mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Video improvements
This commit is contained in:
parent
80342695fb
commit
261fc96d02
@ -488,6 +488,9 @@ open class GalleryControllerNode: ASDisplayNode, ASScrollViewDelegate, ASGesture
|
||||
return
|
||||
}
|
||||
|
||||
if distanceFromEquilibrium < -1.0, let centralItemNode = self.pager.centralItemNode(), centralItemNode.maybePerformActionForSwipeDownDismiss() {
|
||||
}
|
||||
|
||||
if let backgroundColor = self.backgroundNode.backgroundColor {
|
||||
self.backgroundNode.layer.animate(from: backgroundColor, to: UIColor(white: 0.0, alpha: 0.0).cgColor, keyPath: "backgroundColor", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.2, removeOnCompletion: false)
|
||||
}
|
||||
|
@ -109,6 +109,10 @@ open class GalleryItemNode: ASDisplayNode {
|
||||
return false
|
||||
}
|
||||
|
||||
open func maybePerformActionForSwipeDownDismiss() -> Bool {
|
||||
return false
|
||||
}
|
||||
|
||||
open func contentSize() -> CGSize? {
|
||||
return nil
|
||||
}
|
||||
|
@ -177,7 +177,7 @@ final class GalleryRateToastComponent: Component {
|
||||
content: LottieComponent.AppBundleContent(name: "video_toast_speedup"),
|
||||
color: .white,
|
||||
startingPosition: .begin,
|
||||
loop: true
|
||||
loop: false
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 60.0, height: 60.0)
|
||||
|
@ -1783,7 +1783,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
if let item = strongSelf.item, let _ = item.content as? PlatformVideoContent {
|
||||
strongSelf.videoNode?.play()
|
||||
} else {
|
||||
strongSelf.videoNode?.playOnceWithSound(playAndRecord: false, actionAtEnd: isAnimated ? .loop : strongSelf.actionAtEnd)
|
||||
strongSelf.videoNode?.playOnceWithSound(playAndRecord: false, seek: .none, actionAtEnd: isAnimated ? .loop : strongSelf.actionAtEnd)
|
||||
}
|
||||
|
||||
if let playbackRate = strongSelf.playbackRate {
|
||||
@ -1837,7 +1837,14 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
return
|
||||
}
|
||||
|
||||
if let status = status, status.duration >= 60.0 * 10.0 {
|
||||
if let status = status {
|
||||
let shouldStorePlaybacksState: Bool
|
||||
#if DEBUG
|
||||
shouldStorePlaybacksState = status.duration >= 10.0
|
||||
#else
|
||||
shouldStorePlaybacksState = status.duration >= 60.0 * 10.0
|
||||
#endif
|
||||
|
||||
var timestamp: Double?
|
||||
if status.timestamp > 5.0 && status.timestamp < status.duration - 5.0 {
|
||||
timestamp = status.timestamp
|
||||
@ -2763,12 +2770,20 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
}
|
||||
|
||||
override func maybePerformActionForSwipeDismiss() -> Bool {
|
||||
if let data = self.context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_disable_swipe_pip"] {
|
||||
return false
|
||||
if let data = self.context.currentAppConfiguration.with({ $0 }).data {
|
||||
if let _ = data["ios_killswitch_disable_swipe_pip"] {
|
||||
return false
|
||||
}
|
||||
if let value = data["video_swipe_up_to_close"] as? Double, value == 1.0 {
|
||||
addAppLogEvent(postbox: self.context.account.postbox, type: "swipe_up_close", peerId: self.context.account.peerId)
|
||||
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if #available(iOS 15.0, *) {
|
||||
if let nativePictureInPictureContent = self.nativePictureInPictureContent as? NativePictureInPictureContentImpl {
|
||||
addAppLogEvent(postbox: self.context.account.postbox, type: "swipe_up_pip", peerId: self.context.account.peerId)
|
||||
nativePictureInPictureContent.beginPictureInPicture()
|
||||
return true
|
||||
}
|
||||
@ -2776,6 +2791,11 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
return false
|
||||
}
|
||||
|
||||
override func maybePerformActionForSwipeDownDismiss() -> Bool {
|
||||
addAppLogEvent(postbox: self.context.account.postbox, type: "swipe_down_close", peerId: self.context.account.peerId)
|
||||
return false
|
||||
}
|
||||
|
||||
override func title() -> Signal<String, NoError> {
|
||||
return self._title.get()
|
||||
}
|
||||
@ -2981,6 +3001,8 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
if !didExpand {
|
||||
self.activePictureInPictureController = nil
|
||||
self.activePictureInPictureNavigationController = nil
|
||||
|
||||
addAppLogEvent(postbox: self.context.account.postbox, type: "pip_close_btn", peerId: self.context.account.peerId)
|
||||
}
|
||||
}, expand: { [weak self] completion in
|
||||
didExpand = true
|
||||
@ -3013,6 +3035,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
@objc func pictureInPictureButtonPressed() {
|
||||
if #available(iOS 15.0, *) {
|
||||
if let nativePictureInPictureContent = self.nativePictureInPictureContent as? NativePictureInPictureContentImpl {
|
||||
addAppLogEvent(postbox: self.context.account.postbox, type: "pip_btn", peerId: self.context.account.peerId)
|
||||
nativePictureInPictureContent.beginPictureInPicture()
|
||||
return
|
||||
}
|
||||
|
@ -120,16 +120,18 @@ public final class ChunkMediaPlayerPart {
|
||||
public let endTime: Double
|
||||
public let file: TempBoxFile
|
||||
public let clippedStartTime: Double?
|
||||
public let codecName: String?
|
||||
|
||||
public var id: Id {
|
||||
return Id(rawValue: self.file.path)
|
||||
}
|
||||
|
||||
public init(startTime: Double, clippedStartTime: Double? = nil, endTime: Double, file: TempBoxFile) {
|
||||
public init(startTime: Double, clippedStartTime: Double? = nil, endTime: Double, file: TempBoxFile, codecName: String?) {
|
||||
self.startTime = startTime
|
||||
self.clippedStartTime = clippedStartTime
|
||||
self.endTime = endTime
|
||||
self.file = file
|
||||
self.codecName = codecName
|
||||
}
|
||||
}
|
||||
|
||||
@ -666,7 +668,8 @@ private final class ChunkMediaPlayerContext {
|
||||
startTime: part.startTime,
|
||||
clippedStartTime: partStartTime == part.startTime ? nil : partStartTime,
|
||||
endTime: part.endTime,
|
||||
file: part.file
|
||||
file: part.file,
|
||||
codecName: part.codecName
|
||||
))
|
||||
minStartTime = max(minStartTime, partEndTime)
|
||||
}
|
||||
@ -687,7 +690,8 @@ private final class ChunkMediaPlayerContext {
|
||||
startTime: part.startTime,
|
||||
clippedStartTime: partStartTime == part.startTime ? nil : partStartTime,
|
||||
endTime: part.endTime,
|
||||
file: part.file
|
||||
file: part.file,
|
||||
codecName: part.codecName
|
||||
))
|
||||
minStartTime = max(minStartTime, partEndTime)
|
||||
break
|
||||
@ -1040,7 +1044,26 @@ private final class ChunkMediaPlayerContext {
|
||||
}
|
||||
}
|
||||
|
||||
public final class ChunkMediaPlayer {
|
||||
public protocol ChunkMediaPlayer: AnyObject {
|
||||
var status: Signal<MediaPlayerStatus, NoError> { get }
|
||||
var audioLevelEvents: Signal<Float, NoError> { get }
|
||||
var actionAtEnd: ChunkMediaPlayerActionAtEnd { get set }
|
||||
|
||||
func play()
|
||||
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek)
|
||||
func setSoundMuted(soundMuted: Bool)
|
||||
func continueWithOverridingAmbientMode(isAmbient: Bool)
|
||||
func continuePlayingWithoutSound(seek: MediaPlayerSeek)
|
||||
func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool)
|
||||
func setForceAudioToSpeaker(_ value: Bool)
|
||||
func setKeepAudioSessionWhilePaused(_ value: Bool)
|
||||
func pause()
|
||||
func togglePlayPause(faded: Bool)
|
||||
func seek(timestamp: Double, play: Bool?)
|
||||
func setBaseRate(_ baseRate: Double)
|
||||
}
|
||||
|
||||
public final class ChunkMediaPlayerImpl: ChunkMediaPlayer {
|
||||
private let queue = Queue()
|
||||
private var contextRef: Unmanaged<ChunkMediaPlayerContext>?
|
||||
|
||||
@ -1081,7 +1104,8 @@ public final class ChunkMediaPlayer {
|
||||
keepAudioSessionWhilePaused: Bool = false,
|
||||
continuePlayingWithoutSoundOnLostAudioSession: Bool = false,
|
||||
isAudioVideoMessage: Bool = false,
|
||||
onSeeked: (() -> Void)? = nil
|
||||
onSeeked: (() -> Void)? = nil,
|
||||
playerNode: MediaPlayerNode
|
||||
) {
|
||||
let audioLevelPipe = self.audioLevelPipe
|
||||
self.queue.async {
|
||||
@ -1109,6 +1133,8 @@ public final class ChunkMediaPlayer {
|
||||
)
|
||||
self.contextRef = Unmanaged.passRetained(context)
|
||||
}
|
||||
|
||||
self.attachPlayerNode(playerNode)
|
||||
}
|
||||
|
||||
deinit {
|
||||
@ -1126,7 +1152,7 @@ public final class ChunkMediaPlayer {
|
||||
}
|
||||
}
|
||||
|
||||
public func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek = .start) {
|
||||
public func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek) {
|
||||
self.queue.async {
|
||||
if let context = self.contextRef?.takeUnretainedValue() {
|
||||
context.playOnceWithSound(playAndRecord: playAndRecord, seek: seek)
|
||||
@ -1150,7 +1176,7 @@ public final class ChunkMediaPlayer {
|
||||
}
|
||||
}
|
||||
|
||||
public func continuePlayingWithoutSound(seek: MediaPlayerSeek = .start) {
|
||||
public func continuePlayingWithoutSound(seek: MediaPlayerSeek) {
|
||||
self.queue.async {
|
||||
if let context = self.contextRef?.takeUnretainedValue() {
|
||||
context.continuePlayingWithoutSound(seek: seek)
|
||||
@ -1190,7 +1216,7 @@ public final class ChunkMediaPlayer {
|
||||
}
|
||||
}
|
||||
|
||||
public func togglePlayPause(faded: Bool = false) {
|
||||
public func togglePlayPause(faded: Bool) {
|
||||
self.queue.async {
|
||||
if let context = self.contextRef?.takeUnretainedValue() {
|
||||
context.togglePlayPause(faded: faded)
|
||||
@ -1198,7 +1224,7 @@ public final class ChunkMediaPlayer {
|
||||
}
|
||||
}
|
||||
|
||||
public func seek(timestamp: Double, play: Bool? = nil) {
|
||||
public func seek(timestamp: Double, play: Bool?) {
|
||||
self.queue.async {
|
||||
if let context = self.contextRef?.takeUnretainedValue() {
|
||||
if let play {
|
||||
|
805
submodules/MediaPlayer/Sources/ChunkMediaPlayerV2.swift
Normal file
805
submodules/MediaPlayer/Sources/ChunkMediaPlayerV2.swift
Normal file
@ -0,0 +1,805 @@
|
||||
import Foundation
|
||||
import AVFoundation
|
||||
import TelegramCore
|
||||
import TelegramAudio
|
||||
import SwiftSignalKit
|
||||
import Postbox
|
||||
|
||||
public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
private final class LoadedPart {
|
||||
final class Media {
|
||||
let queue: Queue
|
||||
let tempFile: TempBoxFile
|
||||
let mediaType: AVMediaType
|
||||
let codecName: String?
|
||||
|
||||
private(set) var reader: MediaDataReader?
|
||||
|
||||
var didBeginReading: Bool = false
|
||||
var isFinished: Bool = false
|
||||
|
||||
init(queue: Queue, tempFile: TempBoxFile, mediaType: AVMediaType, codecName: String?) {
|
||||
assert(queue.isCurrent())
|
||||
|
||||
self.queue = queue
|
||||
self.tempFile = tempFile
|
||||
self.mediaType = mediaType
|
||||
self.codecName = codecName
|
||||
}
|
||||
|
||||
deinit {
|
||||
assert(self.queue.isCurrent())
|
||||
}
|
||||
|
||||
func load() {
|
||||
let reader: MediaDataReader
|
||||
if self.mediaType == .video && self.codecName == "av1" {
|
||||
reader = AVAssetVideoDataReader(filePath: self.tempFile.path, isVideo: self.mediaType == .video)
|
||||
} else {
|
||||
reader = FFMpegMediaDataReader(filePath: self.tempFile.path, isVideo: self.mediaType == .video)
|
||||
}
|
||||
if self.mediaType == .video {
|
||||
if reader.hasVideo {
|
||||
self.reader = reader
|
||||
}
|
||||
} else {
|
||||
if reader.hasAudio {
|
||||
self.reader = reader
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final class MediaData {
|
||||
let part: ChunkMediaPlayerPart
|
||||
let video: Media?
|
||||
let audio: Media?
|
||||
|
||||
init(part: ChunkMediaPlayerPart, video: Media?, audio: Media?) {
|
||||
self.part = part
|
||||
self.video = video
|
||||
self.audio = audio
|
||||
}
|
||||
}
|
||||
|
||||
let part: ChunkMediaPlayerPart
|
||||
|
||||
init(part: ChunkMediaPlayerPart) {
|
||||
self.part = part
|
||||
}
|
||||
}
|
||||
|
||||
private final class LoadedPartsMediaData {
|
||||
var ids: [ChunkMediaPlayerPart.Id] = []
|
||||
var parts: [ChunkMediaPlayerPart.Id: LoadedPart.MediaData] = [:]
|
||||
var notifiedHasSound: Bool = false
|
||||
var seekFromMinTimestamp: Double?
|
||||
}
|
||||
|
||||
private static let sharedDataQueue = Queue(name: "ChunkMediaPlayerV2-DataQueue")
|
||||
private let dataQueue: Queue
|
||||
|
||||
private let audioSessionManager: ManagedAudioSession
|
||||
private let onSeeked: (() -> Void)?
|
||||
|
||||
private let renderSynchronizer: AVSampleBufferRenderSynchronizer
|
||||
private var videoRenderer: AVSampleBufferDisplayLayer
|
||||
private var audioRenderer: AVSampleBufferAudioRenderer?
|
||||
|
||||
private var partsState = ChunkMediaPlayerPartsState(duration: nil, parts: [])
|
||||
private var loadedParts: [LoadedPart] = []
|
||||
private var loadedPartsMediaData: QueueLocalObject<LoadedPartsMediaData>
|
||||
private var hasSound: Bool = false
|
||||
|
||||
private var statusValue: MediaPlayerStatus? {
|
||||
didSet {
|
||||
if let statusValue = self.statusValue, statusValue != oldValue {
|
||||
self.statusPromise.set(statusValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
private let statusPromise = ValuePromise<MediaPlayerStatus>()
|
||||
public var status: Signal<MediaPlayerStatus, NoError> {
|
||||
return self.statusPromise.get()
|
||||
}
|
||||
|
||||
public var audioLevelEvents: Signal<Float, NoError> {
|
||||
return .never()
|
||||
}
|
||||
|
||||
public var actionAtEnd: ChunkMediaPlayerActionAtEnd = .stop
|
||||
|
||||
private var isPlaying: Bool = false
|
||||
private var baseRate: Double = 1.0
|
||||
private var isSoundEnabled: Bool
|
||||
private var isMuted: Bool
|
||||
|
||||
private var seekId: Int = 0
|
||||
private var pendingSeekTimestamp: Double?
|
||||
private var pendingContinuePlaybackAfterSeekToTimestamp: Double?
|
||||
private var shouldNotifySeeked: Bool = false
|
||||
private var stoppedAtEnd: Bool = false
|
||||
|
||||
private var renderSynchronizerRate: Double = 0.0
|
||||
private var videoIsRequestingMediaData: Bool = false
|
||||
private var audioIsRequestingMediaData: Bool = false
|
||||
|
||||
private var partsStateDisposable: Disposable?
|
||||
private var updateTimer: Foundation.Timer?
|
||||
|
||||
private var audioSessionDisposable: Disposable?
|
||||
private var hasAudioSession: Bool = false
|
||||
|
||||
public init(
|
||||
audioSessionManager: ManagedAudioSession,
|
||||
partsState: Signal<ChunkMediaPlayerPartsState, NoError>,
|
||||
video: Bool,
|
||||
playAutomatically: Bool = false,
|
||||
enableSound: Bool,
|
||||
baseRate: Double = 1.0,
|
||||
playAndRecord: Bool = false,
|
||||
soundMuted: Bool = false,
|
||||
ambient: Bool = false,
|
||||
mixWithOthers: Bool = false,
|
||||
keepAudioSessionWhilePaused: Bool = false,
|
||||
continuePlayingWithoutSoundOnLostAudioSession: Bool = false,
|
||||
isAudioVideoMessage: Bool = false,
|
||||
onSeeked: (() -> Void)? = nil,
|
||||
playerNode: MediaPlayerNode
|
||||
) {
|
||||
self.dataQueue = ChunkMediaPlayerV2.sharedDataQueue
|
||||
|
||||
self.audioSessionManager = audioSessionManager
|
||||
self.onSeeked = onSeeked
|
||||
|
||||
self.loadedPartsMediaData = QueueLocalObject(queue: self.dataQueue, generate: {
|
||||
return LoadedPartsMediaData()
|
||||
})
|
||||
|
||||
self.isSoundEnabled = enableSound
|
||||
self.isMuted = soundMuted
|
||||
self.baseRate = baseRate
|
||||
|
||||
self.renderSynchronizer = AVSampleBufferRenderSynchronizer()
|
||||
self.renderSynchronizer.setRate(0.0, time: CMTime(seconds: 0.0, preferredTimescale: 44000))
|
||||
|
||||
if playerNode.videoLayer == nil {
|
||||
assertionFailure()
|
||||
}
|
||||
self.videoRenderer = playerNode.videoLayer ?? AVSampleBufferDisplayLayer()
|
||||
|
||||
self.updateTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 60.0, repeats: true, block: { [weak self] _ in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.updateInternalState()
|
||||
})
|
||||
|
||||
self.partsStateDisposable = (partsState
|
||||
|> deliverOnMainQueue).startStrict(next: { [weak self] partsState in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.partsState = partsState
|
||||
self.updateInternalState()
|
||||
})
|
||||
|
||||
if #available(iOS 17.0, *) {
|
||||
self.renderSynchronizer.addRenderer(self.videoRenderer.sampleBufferRenderer)
|
||||
} else {
|
||||
self.renderSynchronizer.addRenderer(self.videoRenderer)
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.partsStateDisposable?.dispose()
|
||||
self.updateTimer?.invalidate()
|
||||
self.audioSessionDisposable?.dispose()
|
||||
|
||||
if #available(iOS 17.0, *) {
|
||||
self.videoRenderer.sampleBufferRenderer.stopRequestingMediaData()
|
||||
} else {
|
||||
self.videoRenderer.stopRequestingMediaData()
|
||||
}
|
||||
|
||||
// Conservatively release AVSampleBufferDisplayLayer reference on main thread to prevent deadlock
|
||||
let videoRenderer = self.videoRenderer
|
||||
Queue.mainQueue().after(1.0, {
|
||||
let _ = videoRenderer.masksToBounds
|
||||
})
|
||||
|
||||
if let audioRenderer = self.audioRenderer {
|
||||
audioRenderer.stopRequestingMediaData()
|
||||
}
|
||||
}
|
||||
|
||||
private func updateInternalState() {
|
||||
if self.isSoundEnabled && self.hasSound {
|
||||
if self.audioSessionDisposable == nil {
|
||||
self.audioSessionDisposable = self.audioSessionManager.push(params: ManagedAudioSessionClientParams(
|
||||
audioSessionType: .play(mixWithOthers: false),
|
||||
activateImmediately: false,
|
||||
manualActivate: { [weak self] control in
|
||||
control.setupAndActivate(synchronous: false, { state in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.hasAudioSession = true
|
||||
self.updateInternalState()
|
||||
}
|
||||
})
|
||||
},
|
||||
deactivate: { [weak self] _ in
|
||||
return Signal { subscriber in
|
||||
guard let self else {
|
||||
subscriber.putCompletion()
|
||||
return EmptyDisposable
|
||||
}
|
||||
|
||||
self.hasAudioSession = false
|
||||
self.updateInternalState()
|
||||
subscriber.putCompletion()
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
|> runOn(.mainQueue())
|
||||
},
|
||||
headsetConnectionStatusChanged: { _ in },
|
||||
availableOutputsChanged: { _, _ in }
|
||||
))
|
||||
}
|
||||
} else {
|
||||
if let audioSessionDisposable = self.audioSessionDisposable {
|
||||
self.audioSessionDisposable = nil
|
||||
audioSessionDisposable.dispose()
|
||||
}
|
||||
|
||||
self.hasAudioSession = false
|
||||
}
|
||||
|
||||
if self.isSoundEnabled && self.hasSound && self.hasAudioSession {
|
||||
if self.audioRenderer == nil {
|
||||
let audioRenderer = AVSampleBufferAudioRenderer()
|
||||
audioRenderer.isMuted = self.isMuted
|
||||
self.audioRenderer = audioRenderer
|
||||
self.renderSynchronizer.addRenderer(audioRenderer)
|
||||
}
|
||||
} else {
|
||||
if let audioRenderer = self.audioRenderer {
|
||||
self.audioRenderer = nil
|
||||
audioRenderer.stopRequestingMediaData()
|
||||
self.audioIsRequestingMediaData = false
|
||||
self.renderSynchronizer.removeRenderer(audioRenderer, at: .invalid)
|
||||
}
|
||||
}
|
||||
|
||||
let timestamp: CMTime
|
||||
if let pendingSeekTimestamp = self.pendingSeekTimestamp {
|
||||
timestamp = CMTimeMakeWithSeconds(pendingSeekTimestamp, preferredTimescale: 44000)
|
||||
} else {
|
||||
timestamp = self.renderSynchronizer.currentTime()
|
||||
}
|
||||
let timestampSeconds = timestamp.seconds
|
||||
|
||||
var duration: Double = 0.0
|
||||
if let partsStateDuration = self.partsState.duration {
|
||||
duration = partsStateDuration
|
||||
}
|
||||
|
||||
var validParts: [ChunkMediaPlayerPart] = []
|
||||
|
||||
var minStartTime: Double = 0.0
|
||||
for i in 0 ..< self.partsState.parts.count {
|
||||
let part = self.partsState.parts[i]
|
||||
|
||||
let partStartTime = max(minStartTime, part.startTime)
|
||||
let partEndTime = max(partStartTime, part.endTime)
|
||||
if partStartTime >= partEndTime {
|
||||
continue
|
||||
}
|
||||
|
||||
var partMatches = false
|
||||
if timestampSeconds >= partStartTime - 0.5 && timestampSeconds < partEndTime + 0.5 {
|
||||
partMatches = true
|
||||
}
|
||||
|
||||
if partMatches {
|
||||
validParts.append(ChunkMediaPlayerPart(
|
||||
startTime: part.startTime,
|
||||
clippedStartTime: partStartTime == part.startTime ? nil : partStartTime,
|
||||
endTime: part.endTime,
|
||||
file: part.file,
|
||||
codecName: part.codecName
|
||||
))
|
||||
minStartTime = max(minStartTime, partEndTime)
|
||||
}
|
||||
}
|
||||
|
||||
if let lastValidPart = validParts.last {
|
||||
for i in 0 ..< self.partsState.parts.count {
|
||||
let part = self.partsState.parts[i]
|
||||
|
||||
let partStartTime = max(minStartTime, part.startTime)
|
||||
let partEndTime = max(partStartTime, part.endTime)
|
||||
if partStartTime >= partEndTime {
|
||||
continue
|
||||
}
|
||||
|
||||
if lastValidPart !== part && partStartTime > (lastValidPart.clippedStartTime ?? lastValidPart.startTime) && partStartTime <= lastValidPart.endTime + 0.5 {
|
||||
validParts.append(ChunkMediaPlayerPart(
|
||||
startTime: part.startTime,
|
||||
clippedStartTime: partStartTime == part.startTime ? nil : partStartTime,
|
||||
endTime: part.endTime,
|
||||
file: part.file,
|
||||
codecName: part.codecName
|
||||
))
|
||||
minStartTime = max(minStartTime, partEndTime)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if validParts.isEmpty, let pendingContinuePlaybackAfterSeekToTimestamp = self.pendingContinuePlaybackAfterSeekToTimestamp {
|
||||
for part in self.partsState.parts {
|
||||
if pendingContinuePlaybackAfterSeekToTimestamp >= part.startTime - 0.2 && pendingContinuePlaybackAfterSeekToTimestamp < part.endTime {
|
||||
self.renderSynchronizer.setRate(Float(self.renderSynchronizerRate), time: CMTimeMakeWithSeconds(part.startTime, preferredTimescale: 44000))
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.loadedParts.removeAll(where: { partState in
|
||||
if !validParts.contains(where: { $0.id == partState.part.id }) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
for part in validParts {
|
||||
if !self.loadedParts.contains(where: { $0.part.id == part.id }) {
|
||||
self.loadedParts.append(LoadedPart(part: part))
|
||||
self.loadedParts.sort(by: { $0.part.startTime < $1.part.startTime })
|
||||
}
|
||||
}
|
||||
|
||||
if self.pendingSeekTimestamp != nil {
|
||||
return
|
||||
}
|
||||
|
||||
let loadedParts = self.loadedParts
|
||||
let dataQueue = self.dataQueue
|
||||
let isSoundEnabled = self.isSoundEnabled
|
||||
self.loadedPartsMediaData.with { [weak self] loadedPartsMediaData in
|
||||
loadedPartsMediaData.ids = loadedParts.map(\.part.id)
|
||||
|
||||
for part in loadedParts {
|
||||
if let loadedPart = loadedPartsMediaData.parts[part.part.id] {
|
||||
if let audio = loadedPart.audio, audio.didBeginReading, !isSoundEnabled {
|
||||
let cleanAudio = LoadedPart.Media(queue: dataQueue, tempFile: part.part.file, mediaType: .audio, codecName: part.part.codecName)
|
||||
cleanAudio.load()
|
||||
|
||||
loadedPartsMediaData.parts[part.part.id] = LoadedPart.MediaData(
|
||||
part: part.part,
|
||||
video: loadedPart.video,
|
||||
audio: cleanAudio.reader != nil ? cleanAudio : nil
|
||||
)
|
||||
}
|
||||
} else {
|
||||
let video = LoadedPart.Media(queue: dataQueue, tempFile: part.part.file, mediaType: .video, codecName: part.part.codecName)
|
||||
video.load()
|
||||
|
||||
let audio = LoadedPart.Media(queue: dataQueue, tempFile: part.part.file, mediaType: .audio, codecName: part.part.codecName)
|
||||
audio.load()
|
||||
|
||||
loadedPartsMediaData.parts[part.part.id] = LoadedPart.MediaData(
|
||||
part: part.part,
|
||||
video: video,
|
||||
audio: audio.reader != nil ? audio : nil
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
var removedKeys: [ChunkMediaPlayerPart.Id] = []
|
||||
for (id, _) in loadedPartsMediaData.parts {
|
||||
if !loadedPartsMediaData.ids.contains(id) {
|
||||
removedKeys.append(id)
|
||||
}
|
||||
}
|
||||
for id in removedKeys {
|
||||
loadedPartsMediaData.parts.removeValue(forKey: id)
|
||||
}
|
||||
|
||||
if !loadedPartsMediaData.notifiedHasSound, let part = loadedPartsMediaData.parts.values.first {
|
||||
loadedPartsMediaData.notifiedHasSound = true
|
||||
let hasSound = part.audio?.reader != nil
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if self.hasSound != hasSound {
|
||||
self.hasSound = hasSound
|
||||
self.updateInternalState()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var playableDuration: Double = 0.0
|
||||
var previousValidPartEndTime: Double?
|
||||
for part in self.partsState.parts {
|
||||
if let previousValidPartEndTime {
|
||||
if part.startTime > previousValidPartEndTime + 0.5 {
|
||||
break
|
||||
}
|
||||
} else if !validParts.contains(where: { $0.id == part.id }) {
|
||||
continue
|
||||
}
|
||||
|
||||
let partDuration: Double
|
||||
if part.startTime - 0.5 <= timestampSeconds && part.endTime + 0.5 > timestampSeconds {
|
||||
partDuration = part.endTime - timestampSeconds
|
||||
} else if part.startTime - 0.5 > timestampSeconds {
|
||||
partDuration = part.endTime - part.startTime
|
||||
} else {
|
||||
partDuration = 0.0
|
||||
}
|
||||
playableDuration += partDuration
|
||||
previousValidPartEndTime = part.endTime
|
||||
}
|
||||
|
||||
var effectiveRate: Double = 0.0
|
||||
let isBuffering: Bool
|
||||
if let previousValidPartEndTime, previousValidPartEndTime >= duration - 0.5 {
|
||||
isBuffering = false
|
||||
} else {
|
||||
isBuffering = playableDuration < 1.0
|
||||
}
|
||||
if self.isPlaying {
|
||||
if !isBuffering {
|
||||
effectiveRate = self.baseRate
|
||||
}
|
||||
}
|
||||
if !isBuffering {
|
||||
self.pendingContinuePlaybackAfterSeekToTimestamp = nil
|
||||
}
|
||||
|
||||
//print("timestampSeconds: \(timestampSeconds) rate: \(effectiveRate)")
|
||||
|
||||
if self.renderSynchronizerRate != effectiveRate {
|
||||
self.renderSynchronizerRate = effectiveRate
|
||||
self.renderSynchronizer.setRate(Float(effectiveRate), time: timestamp)
|
||||
}
|
||||
|
||||
if effectiveRate != 0.0 {
|
||||
self.triggerRequestMediaData()
|
||||
}
|
||||
|
||||
let playbackStatus: MediaPlayerPlaybackStatus
|
||||
if isBuffering {
|
||||
playbackStatus = .buffering(initial: false, whilePlaying: self.isPlaying, progress: 0.0, display: true)
|
||||
} else if self.isPlaying {
|
||||
playbackStatus = .playing
|
||||
} else {
|
||||
playbackStatus = .paused
|
||||
}
|
||||
self.statusValue = MediaPlayerStatus(
|
||||
generationTimestamp: CACurrentMediaTime(),
|
||||
duration: duration,
|
||||
dimensions: CGSize(),
|
||||
timestamp: timestampSeconds,
|
||||
baseRate: self.baseRate,
|
||||
seekId: self.seekId,
|
||||
status: playbackStatus,
|
||||
soundEnabled: self.isSoundEnabled
|
||||
)
|
||||
|
||||
if self.shouldNotifySeeked {
|
||||
self.shouldNotifySeeked = false
|
||||
self.onSeeked?()
|
||||
}
|
||||
|
||||
if duration > 0.0 && timestampSeconds >= duration - 0.1 {
|
||||
if !self.stoppedAtEnd {
|
||||
switch self.actionAtEnd {
|
||||
case let .loop(f):
|
||||
self.stoppedAtEnd = false
|
||||
self.seek(timestamp: 0.0, play: true, notify: true)
|
||||
f?()
|
||||
case .stop:
|
||||
self.stoppedAtEnd = true
|
||||
self.pause()
|
||||
case let .action(f):
|
||||
self.stoppedAtEnd = true
|
||||
self.pause()
|
||||
f()
|
||||
case let .loopDisablingSound(f):
|
||||
self.stoppedAtEnd = false
|
||||
self.isSoundEnabled = false
|
||||
self.seek(timestamp: 0.0, play: true, notify: true)
|
||||
f()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func play() {
|
||||
self.isPlaying = true
|
||||
self.updateInternalState()
|
||||
}
|
||||
|
||||
public func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek) {
|
||||
self.isPlaying = true
|
||||
self.isSoundEnabled = true
|
||||
|
||||
switch seek {
|
||||
case .automatic, .none:
|
||||
self.updateInternalState()
|
||||
case .start:
|
||||
self.seek(timestamp: 0.0, play: nil)
|
||||
case let .timecode(timestamp):
|
||||
self.seek(timestamp: timestamp, play: nil)
|
||||
}
|
||||
}
|
||||
|
||||
public func setSoundMuted(soundMuted: Bool) {
|
||||
if self.isMuted != soundMuted {
|
||||
self.isMuted = soundMuted
|
||||
if let audioRenderer = self.audioRenderer {
|
||||
audioRenderer.isMuted = self.isMuted
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||
}
|
||||
|
||||
public func continuePlayingWithoutSound(seek: MediaPlayerSeek) {
|
||||
self.isSoundEnabled = false
|
||||
self.isPlaying = true
|
||||
self.updateInternalState()
|
||||
|
||||
switch seek {
|
||||
case .automatic, .none:
|
||||
break
|
||||
case .start:
|
||||
self.seek(timestamp: 0.0, play: nil)
|
||||
case let .timecode(timestamp):
|
||||
self.seek(timestamp: timestamp, play: nil)
|
||||
}
|
||||
}
|
||||
|
||||
public func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool) {
|
||||
}
|
||||
|
||||
public func setForceAudioToSpeaker(_ value: Bool) {
|
||||
}
|
||||
|
||||
public func setKeepAudioSessionWhilePaused(_ value: Bool) {
|
||||
}
|
||||
|
||||
public func pause() {
|
||||
self.isPlaying = false
|
||||
self.updateInternalState()
|
||||
}
|
||||
|
||||
public func togglePlayPause(faded: Bool) {
|
||||
if self.isPlaying {
|
||||
self.isPlaying = false
|
||||
} else {
|
||||
self.isPlaying = true
|
||||
}
|
||||
self.updateInternalState()
|
||||
}
|
||||
|
||||
public func seek(timestamp: Double, play: Bool?) {
|
||||
self.seek(timestamp: timestamp, play: play, notify: true)
|
||||
}
|
||||
|
||||
private func seek(timestamp: Double, play: Bool?, notify: Bool) {
|
||||
self.seekId += 1
|
||||
let seekId = self.seekId
|
||||
self.pendingSeekTimestamp = timestamp
|
||||
self.pendingContinuePlaybackAfterSeekToTimestamp = timestamp
|
||||
if let play {
|
||||
self.isPlaying = play
|
||||
}
|
||||
if notify {
|
||||
self.shouldNotifySeeked = true
|
||||
}
|
||||
|
||||
//print("Seek to \(timestamp)")
|
||||
self.renderSynchronizerRate = 0.0
|
||||
self.renderSynchronizer.setRate(0.0, time: CMTimeMakeWithSeconds(timestamp, preferredTimescale: 44000))
|
||||
|
||||
self.updateInternalState()
|
||||
|
||||
self.videoIsRequestingMediaData = false
|
||||
if #available(iOS 17.0, *) {
|
||||
self.videoRenderer.sampleBufferRenderer.stopRequestingMediaData()
|
||||
} else {
|
||||
self.videoRenderer.stopRequestingMediaData()
|
||||
}
|
||||
if let audioRenderer = self.audioRenderer {
|
||||
self.audioIsRequestingMediaData = false
|
||||
audioRenderer.stopRequestingMediaData()
|
||||
}
|
||||
|
||||
self.loadedPartsMediaData.with { [weak self] loadedPartsMediaData in
|
||||
loadedPartsMediaData.parts.removeAll()
|
||||
loadedPartsMediaData.seekFromMinTimestamp = timestamp
|
||||
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
|
||||
if self.seekId == seekId {
|
||||
if #available(iOS 17.0, *) {
|
||||
self.videoRenderer.sampleBufferRenderer.flush()
|
||||
} else {
|
||||
self.videoRenderer.flush()
|
||||
}
|
||||
if let audioRenderer = self.audioRenderer {
|
||||
audioRenderer.flush()
|
||||
}
|
||||
|
||||
self.pendingSeekTimestamp = nil
|
||||
self.updateInternalState()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func setBaseRate(_ baseRate: Double) {
|
||||
self.baseRate = baseRate
|
||||
self.updateInternalState()
|
||||
}
|
||||
|
||||
private func triggerRequestMediaData() {
|
||||
let loadedPartsMediaData = self.loadedPartsMediaData
|
||||
|
||||
if !self.videoIsRequestingMediaData && "".isEmpty {
|
||||
self.videoIsRequestingMediaData = true
|
||||
|
||||
let videoTarget: AVQueuedSampleBufferRendering
|
||||
if #available(iOS 17.0, *) {
|
||||
videoTarget = self.videoRenderer.sampleBufferRenderer
|
||||
} else {
|
||||
videoTarget = self.videoRenderer
|
||||
}
|
||||
|
||||
videoTarget.requestMediaDataWhenReady(on: self.dataQueue.queue, using: { [weak self] in
|
||||
if let loadedPartsMediaData = loadedPartsMediaData.unsafeGet() {
|
||||
let bufferIsReadyForMoreData = ChunkMediaPlayerV2.fillRendererBuffer(bufferTarget: videoTarget, loadedPartsMediaData: loadedPartsMediaData, isVideo: true)
|
||||
if bufferIsReadyForMoreData {
|
||||
videoTarget.stopRequestingMediaData()
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.videoIsRequestingMediaData = false
|
||||
self.updateInternalState()
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if !self.audioIsRequestingMediaData, let audioRenderer = self.audioRenderer {
|
||||
self.audioIsRequestingMediaData = true
|
||||
let loadedPartsMediaData = self.loadedPartsMediaData
|
||||
let audioTarget = audioRenderer
|
||||
audioTarget.requestMediaDataWhenReady(on: self.dataQueue.queue, using: { [weak self] in
|
||||
if let loadedPartsMediaData = loadedPartsMediaData.unsafeGet() {
|
||||
let bufferIsReadyForMoreData = ChunkMediaPlayerV2.fillRendererBuffer(bufferTarget: audioTarget, loadedPartsMediaData: loadedPartsMediaData, isVideo: false)
|
||||
if bufferIsReadyForMoreData {
|
||||
audioTarget.stopRequestingMediaData()
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.audioIsRequestingMediaData = false
|
||||
self.updateInternalState()
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private static func fillRendererBuffer(bufferTarget: AVQueuedSampleBufferRendering, loadedPartsMediaData: LoadedPartsMediaData, isVideo: Bool) -> Bool {
|
||||
var bufferIsReadyForMoreData = true
|
||||
outer: while true {
|
||||
if !bufferTarget.isReadyForMoreMediaData {
|
||||
bufferIsReadyForMoreData = false
|
||||
break
|
||||
}
|
||||
var hasData = false
|
||||
for partId in loadedPartsMediaData.ids {
|
||||
guard let loadedPart = loadedPartsMediaData.parts[partId] else {
|
||||
continue
|
||||
}
|
||||
guard let media = isVideo ? loadedPart.video : loadedPart.audio else {
|
||||
continue
|
||||
}
|
||||
if media.isFinished {
|
||||
continue
|
||||
}
|
||||
guard let reader = media.reader else {
|
||||
continue
|
||||
}
|
||||
media.didBeginReading = true
|
||||
if var sampleBuffer = reader.readSampleBuffer() {
|
||||
if let seekFromMinTimestamp = loadedPartsMediaData.seekFromMinTimestamp, CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds < seekFromMinTimestamp {
|
||||
if isVideo {
|
||||
var updatedSampleBuffer: CMSampleBuffer?
|
||||
CMSampleBufferCreateCopy(allocator: nil, sampleBuffer: sampleBuffer, sampleBufferOut: &updatedSampleBuffer)
|
||||
if let updatedSampleBuffer {
|
||||
if let attachments = CMSampleBufferGetSampleAttachmentsArray(updatedSampleBuffer, createIfNecessary: true) {
|
||||
let attachments = attachments as NSArray
|
||||
let dict = attachments[0] as! NSMutableDictionary
|
||||
|
||||
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleAttachmentKey_DoNotDisplay as NSString as String)
|
||||
|
||||
sampleBuffer = updatedSampleBuffer
|
||||
}
|
||||
}
|
||||
} else {
|
||||
continue outer
|
||||
}
|
||||
}
|
||||
/*if isVideo {
|
||||
print("Enqueue \(isVideo ? "video" : "audio") at \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds) \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value)/\(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).timescale) next \(CMSampleBufferGetPresentationTimeStamp(sampleBuffer).value + CMSampleBufferGetDuration(sampleBuffer).value)")
|
||||
}*/
|
||||
bufferTarget.enqueue(sampleBuffer)
|
||||
hasData = true
|
||||
continue outer
|
||||
} else {
|
||||
media.isFinished = true
|
||||
}
|
||||
}
|
||||
if !hasData {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return bufferIsReadyForMoreData
|
||||
}
|
||||
}
|
||||
|
||||
private func createSampleBuffer(fromSampleBuffer sampleBuffer: CMSampleBuffer, withTimeOffset timeOffset: CMTime, duration: CMTime?) -> CMSampleBuffer? {
|
||||
var itemCount: CMItemCount = 0
|
||||
var status = CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, entryCount: 0, arrayToFill: nil, entriesNeededOut: &itemCount)
|
||||
if status != 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var timingInfo = [CMSampleTimingInfo](repeating: CMSampleTimingInfo(duration: CMTimeMake(value: 0, timescale: 0), presentationTimeStamp: CMTimeMake(value: 0, timescale: 0), decodeTimeStamp: CMTimeMake(value: 0, timescale: 0)), count: itemCount)
|
||||
status = CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, entryCount: itemCount, arrayToFill: &timingInfo, entriesNeededOut: &itemCount)
|
||||
if status != 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if let dur = duration {
|
||||
for i in 0 ..< itemCount {
|
||||
timingInfo[i].decodeTimeStamp = CMTimeAdd(timingInfo[i].decodeTimeStamp, timeOffset)
|
||||
timingInfo[i].presentationTimeStamp = CMTimeAdd(timingInfo[i].presentationTimeStamp, timeOffset)
|
||||
timingInfo[i].duration = dur
|
||||
}
|
||||
} else {
|
||||
for i in 0 ..< itemCount {
|
||||
timingInfo[i].decodeTimeStamp = CMTimeAdd(timingInfo[i].decodeTimeStamp, timeOffset)
|
||||
timingInfo[i].presentationTimeStamp = CMTimeAdd(timingInfo[i].presentationTimeStamp, timeOffset)
|
||||
}
|
||||
}
|
||||
|
||||
var sampleBufferOffset: CMSampleBuffer?
|
||||
CMSampleBufferCreateCopyWithNewTiming(allocator: kCFAllocatorDefault, sampleBuffer: sampleBuffer, sampleTimingEntryCount: itemCount, sampleTimingArray: &timingInfo, sampleBufferOut: &sampleBufferOffset)
|
||||
|
||||
if let output = sampleBufferOffset {
|
||||
return output
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
@ -151,4 +151,8 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
|
||||
self.codecContext.flushBuffers()
|
||||
self.resetDecoderOnNextFrame = true
|
||||
}
|
||||
|
||||
func sendEndToDecoder() -> Bool {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
@ -469,20 +469,19 @@ final class FFMpegMediaFrameSourceContext: NSObject {
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if codecId == FFMpegCodecIdMPEG4 {
|
||||
if let videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromMpeg4CodecData(UInt32(kCMVideoCodecType_MPEG4Video), metrics.width, metrics.height, metrics.extradata, metrics.extradataSize) {
|
||||
videoStream = StreamContext(index: Int(streamIndex), codecContext: nil, fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormat: videoFormat, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
|
||||
break
|
||||
} else {
|
||||
var videoFormatData: FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData?
|
||||
if codecId == FFMpegCodecIdMPEG4 {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_MPEG4Video, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
} else if codecId == FFMpegCodecIdH264 {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_H264, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
} else if codecId == FFMpegCodecIdHEVC {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_HEVC, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
} else if codecId == FFMpegCodecIdAV1 {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_AV1, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
}
|
||||
} else if codecId == FFMpegCodecIdH264 {
|
||||
if let videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromAVCCodecData(UInt32(kCMVideoCodecType_H264), metrics.width, metrics.height, metrics.extradata, metrics.extradataSize) {
|
||||
videoStream = StreamContext(index: Int(streamIndex), codecContext: nil, fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormat: videoFormat, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
|
||||
break
|
||||
}
|
||||
} else if codecId == FFMpegCodecIdHEVC {
|
||||
if let videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromHEVCCodecData(UInt32(kCMVideoCodecType_HEVC), metrics.width, metrics.height, metrics.extradata, metrics.extradataSize) {
|
||||
videoStream = StreamContext(index: Int(streamIndex), codecContext: nil, fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormat: videoFormat, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
|
||||
break
|
||||
if let videoFormatData {
|
||||
videoStream = StreamContext(index: Int(streamIndex), codecContext: nil, fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormatData: videoFormatData, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8,13 +8,13 @@ public final class FFMpegMediaFrameSourceContextHelpers {
|
||||
return
|
||||
}()
|
||||
|
||||
static func createFormatDescriptionFromAVCCodecData(_ formatId: UInt32, _ width: Int32, _ height: Int32, _ extradata: UnsafePointer<UInt8>, _ extradata_size: Int32) -> CMFormatDescription? {
|
||||
static func createFormatDescriptionFromAVCCodecData(_ formatId: UInt32, _ width: Int32, _ height: Int32, _ extradata: Data) -> CMFormatDescription? {
|
||||
let par = NSMutableDictionary()
|
||||
par.setObject(1 as NSNumber, forKey: "HorizontalSpacing" as NSString)
|
||||
par.setObject(1 as NSNumber, forKey: "VerticalSpacing" as NSString)
|
||||
|
||||
let atoms = NSMutableDictionary()
|
||||
atoms.setObject(NSData(bytes: extradata, length: Int(extradata_size)), forKey: "avcC" as NSString)
|
||||
atoms.setObject(extradata as NSData, forKey: "avcC" as NSString)
|
||||
|
||||
let extensions = NSMutableDictionary()
|
||||
extensions.setObject("left" as NSString, forKey: "CVImageBufferChromaLocationBottomField" as NSString)
|
||||
@ -35,13 +35,13 @@ public final class FFMpegMediaFrameSourceContextHelpers {
|
||||
return formatDescription
|
||||
}
|
||||
|
||||
static func createFormatDescriptionFromMpeg4CodecData(_ formatId: UInt32, _ width: Int32, _ height: Int32, _ extradata: UnsafePointer<UInt8>, _ extradata_size: Int32) -> CMFormatDescription? {
|
||||
static func createFormatDescriptionFromMpeg4CodecData(_ formatId: UInt32, _ width: Int32, _ height: Int32, _ extradata: Data) -> CMFormatDescription? {
|
||||
let par = NSMutableDictionary()
|
||||
par.setObject(1 as NSNumber, forKey: "HorizontalSpacing" as NSString)
|
||||
par.setObject(1 as NSNumber, forKey: "VerticalSpacing" as NSString)
|
||||
|
||||
let atoms = NSMutableDictionary()
|
||||
atoms.setObject(NSData(bytes: extradata, length: Int(extradata_size)), forKey: "esds" as NSString)
|
||||
atoms.setObject(extradata as NSData, forKey: "esds" as NSString)
|
||||
|
||||
let extensions = NSMutableDictionary()
|
||||
extensions.setObject("left" as NSString, forKey: "CVImageBufferChromaLocationBottomField" as NSString)
|
||||
@ -64,13 +64,13 @@ public final class FFMpegMediaFrameSourceContextHelpers {
|
||||
return formatDescription
|
||||
}
|
||||
|
||||
static func createFormatDescriptionFromHEVCCodecData(_ formatId: UInt32, _ width: Int32, _ height: Int32, _ extradata: UnsafePointer<UInt8>, _ extradata_size: Int32) -> CMFormatDescription? {
|
||||
static func createFormatDescriptionFromHEVCCodecData(_ formatId: UInt32, _ width: Int32, _ height: Int32, _ extradata: Data) -> CMFormatDescription? {
|
||||
let par = NSMutableDictionary()
|
||||
par.setObject(1 as NSNumber, forKey: "HorizontalSpacing" as NSString)
|
||||
par.setObject(1 as NSNumber, forKey: "VerticalSpacing" as NSString)
|
||||
|
||||
let atoms = NSMutableDictionary()
|
||||
atoms.setObject(NSData(bytes: extradata, length: Int(extradata_size)), forKey: "hvcC" as NSString)
|
||||
atoms.setObject(extradata as NSData, forKey: "hvcC" as NSString)
|
||||
|
||||
let extensions = NSMutableDictionary()
|
||||
extensions.setObject("left" as NSString, forKey: "CVImageBufferChromaLocationBottomField" as NSString)
|
||||
@ -90,4 +90,236 @@ public final class FFMpegMediaFrameSourceContextHelpers {
|
||||
|
||||
return formatDescription
|
||||
}
|
||||
|
||||
static func createFormatDescriptionFromAV1CodecData(_ formatId: UInt32, _ width: Int32, _ height: Int32, _ extradata: Data, frameData: Data) -> CMFormatDescription? {
|
||||
return createAV1FormatDescription(frameData)
|
||||
|
||||
/*let par = NSMutableDictionary()
|
||||
par.setObject(1 as NSNumber, forKey: "HorizontalSpacing" as NSString)
|
||||
par.setObject(1 as NSNumber, forKey: "VerticalSpacing" as NSString)
|
||||
|
||||
let atoms = NSMutableDictionary()
|
||||
atoms.setObject(extradata as NSData, forKey: "av1C" as NSString)
|
||||
|
||||
let extensions = NSMutableDictionary()
|
||||
extensions.setObject("left" as NSString, forKey: "CVImageBufferChromaLocationBottomField" as NSString)
|
||||
extensions.setObject("left" as NSString, forKey: "CVImageBufferChromaLocationTopField" as NSString)
|
||||
extensions.setObject(0 as NSNumber, forKey: "FullRangeVideo" as NSString)
|
||||
extensions.setObject(par, forKey: "CVPixelAspectRatio" as NSString)
|
||||
extensions.setObject(atoms, forKey: "SampleDescriptionExtensionAtoms" as NSString)
|
||||
extensions.setObject("hevc" as NSString, forKey: "FormatName" as NSString)
|
||||
extensions.setObject(0 as NSNumber, forKey: "SpatialQuality" as NSString)
|
||||
extensions.setObject(0 as NSNumber, forKey: "Version" as NSString)
|
||||
extensions.setObject(0 as NSNumber, forKey: "FullRangeVideo" as NSString)
|
||||
extensions.setObject(1 as NSNumber, forKey: "CVFieldCount" as NSString)
|
||||
extensions.setObject(24 as NSNumber, forKey: "Depth" as NSString)
|
||||
|
||||
var formatDescription: CMFormatDescription?
|
||||
CMVideoFormatDescriptionCreate(allocator: nil, codecType: CMVideoCodecType(formatId), width: width, height: height, extensions: extensions, formatDescriptionOut: &formatDescription)
|
||||
|
||||
return formatDescription*/
|
||||
}
|
||||
}
|
||||
|
||||
private func getSequenceHeaderOBU(data: Data) -> (Data, Data)? {
|
||||
let originalData = data
|
||||
return data.withUnsafeBytes { buffer -> (Data, Data)? in
|
||||
let data = buffer.baseAddress!.assumingMemoryBound(to: UInt8.self)
|
||||
|
||||
var index = 0
|
||||
while true {
|
||||
if index >= buffer.count {
|
||||
return nil
|
||||
}
|
||||
|
||||
let startIndex = index
|
||||
let value = data[index]
|
||||
index += 1
|
||||
if (value >> 7) != 0 {
|
||||
return nil
|
||||
}
|
||||
let headerType = value >> 3
|
||||
let hasPayloadSize = value & 0x02
|
||||
if hasPayloadSize == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
let hasExtension = value & 0x04
|
||||
if hasExtension != 0 {
|
||||
index += 1
|
||||
}
|
||||
|
||||
let payloadSize = readULEBSize(data: data, dataSize: buffer.count, index: &index)
|
||||
if index + payloadSize >= buffer.count {
|
||||
return nil
|
||||
}
|
||||
|
||||
if headerType == 1 {
|
||||
let fullObu = originalData.subdata(in: startIndex ..< (startIndex + payloadSize + index - startIndex))
|
||||
let obuData = originalData.subdata(in: index ..< (index + payloadSize))
|
||||
return (fullObu, obuData)
|
||||
}
|
||||
|
||||
index += payloadSize
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
private func readULEBSize(data: UnsafePointer<UInt8>, dataSize: Int, index: inout Int) -> Int {
|
||||
var value = 0
|
||||
for cptr in 0 ..< 8 {
|
||||
if index >= dataSize {
|
||||
return 0
|
||||
}
|
||||
|
||||
let dataByte = data[index]
|
||||
index += 1
|
||||
let decodedByte = dataByte & 0x7f
|
||||
value |= Int(decodedByte << (7 * cptr))
|
||||
if value >= Int(Int32.max) {
|
||||
return 0
|
||||
}
|
||||
if (dataByte & 0x80) == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
private struct ParsedSequenceHeaderParameters {
|
||||
var height: Int32 = 0
|
||||
var width: Int32 = 0
|
||||
|
||||
var profile: UInt8 = 0
|
||||
var level: UInt8 = 0
|
||||
|
||||
var high_bitdepth: UInt8 = 0
|
||||
var twelve_bit: UInt8 = 0
|
||||
var chroma_type: UInt8 = 0
|
||||
}
|
||||
|
||||
private func parseSequenceHeaderOBU(data: Data) -> ParsedSequenceHeaderParameters? {
|
||||
var parameters = ParsedSequenceHeaderParameters()
|
||||
|
||||
let bitReader = LsbBitReader(data: data)
|
||||
var value: UInt32 = 0
|
||||
|
||||
// Read three bits, profile
|
||||
if bitReader.bitsLeft < 3 {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: 3)
|
||||
bitReader.advance(by: 3)
|
||||
parameters.profile = UInt8(bitPattern: Int8(clamping: value))
|
||||
|
||||
// Read one bit, still picture
|
||||
if bitReader.bitsLeft < 1 {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: 1)
|
||||
bitReader.advance(by: 1)
|
||||
|
||||
// Read one bit, hdr still picture
|
||||
if bitReader.bitsLeft < 1 {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: 1)
|
||||
bitReader.advance(by: 1)
|
||||
// We only support hdr still picture = 0 for now.
|
||||
if value != 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
parameters.high_bitdepth = 0
|
||||
parameters.twelve_bit = 0
|
||||
parameters.chroma_type = 3
|
||||
|
||||
// Read one bit, timing info
|
||||
if bitReader.bitsLeft < 1 {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: 1)
|
||||
bitReader.advance(by: 1)
|
||||
// We only support no timing info for now.
|
||||
if value != 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Read one bit, display mode
|
||||
if bitReader.bitsLeft < 1 {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: 1)
|
||||
bitReader.advance(by: 1)
|
||||
|
||||
// Read 5 bits, operating_points_cnt_minus_1
|
||||
if bitReader.bitsLeft < 5 {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: 5)
|
||||
bitReader.advance(by: 5)
|
||||
// We only support operating_points_cnt_minus_1 = 0 for now.
|
||||
if value != 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Read 12 bits, operating_point_idc
|
||||
if bitReader.bitsLeft < 12 {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: 12)
|
||||
bitReader.advance(by: 12)
|
||||
|
||||
// Read 5 bits, level
|
||||
if bitReader.bitsLeft < 5 {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: 5)
|
||||
bitReader.advance(by: 5)
|
||||
parameters.level = UInt8(value)
|
||||
|
||||
// If level >= 4.0, read one bit
|
||||
if parameters.level > 7 {
|
||||
if bitReader.bitsLeft < 1 {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: 1)
|
||||
bitReader.advance(by: 1)
|
||||
}
|
||||
|
||||
// Read width num bits
|
||||
if bitReader.bitsLeft < 4 {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: 4)
|
||||
bitReader.advance(by: 4)
|
||||
let widthNumBits = value + 1
|
||||
|
||||
// Read height num bits
|
||||
if bitReader.bitsLeft < 4 {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: 4)
|
||||
bitReader.advance(by: 4)
|
||||
let heightNumBits = value + 1
|
||||
|
||||
// Read width according with num bits
|
||||
if bitReader.bitsLeft < Int(widthNumBits) {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: Int(widthNumBits))
|
||||
bitReader.advance(by: Int(widthNumBits))
|
||||
parameters.width = Int32(value + 1)
|
||||
|
||||
// Read height according with num bits
|
||||
if bitReader.bitsLeft < Int(heightNumBits) {
|
||||
return nil
|
||||
}
|
||||
value = bitReader.uint32(fromBits: Int(heightNumBits))
|
||||
bitReader.advance(by: Int(heightNumBits))
|
||||
parameters.height = Int32(value + 1)
|
||||
|
||||
return parameters
|
||||
}
|
||||
|
@ -1,16 +1,47 @@
|
||||
import CoreMedia
|
||||
|
||||
final class FFMpegMediaPassthroughVideoFrameDecoder: MediaTrackFrameDecoder {
|
||||
private let videoFormat: CMVideoFormatDescription
|
||||
final class VideoFormatData {
|
||||
let codecType: CMVideoCodecType
|
||||
let width: Int32
|
||||
let height: Int32
|
||||
let extraData: Data
|
||||
|
||||
init(codecType: CMVideoCodecType, width: Int32, height: Int32, extraData: Data) {
|
||||
self.codecType = codecType
|
||||
self.width = width
|
||||
self.height = height
|
||||
self.extraData = extraData
|
||||
}
|
||||
}
|
||||
|
||||
private let videoFormatData: VideoFormatData
|
||||
private var videoFormat: CMVideoFormatDescription?
|
||||
private let rotationAngle: Double
|
||||
private var resetDecoderOnNextFrame = true
|
||||
|
||||
init(videoFormat: CMVideoFormatDescription, rotationAngle: Double) {
|
||||
self.videoFormat = videoFormat
|
||||
init(videoFormatData: VideoFormatData, rotationAngle: Double) {
|
||||
self.videoFormatData = videoFormatData
|
||||
self.rotationAngle = rotationAngle
|
||||
}
|
||||
|
||||
func decode(frame: MediaTrackDecodableFrame) -> MediaTrackFrame? {
|
||||
if self.videoFormat == nil {
|
||||
if self.videoFormatData.codecType == kCMVideoCodecType_MPEG4Video {
|
||||
self.videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromMpeg4CodecData(UInt32(kCMVideoCodecType_MPEG4Video), self.videoFormatData.width, self.videoFormatData.height, self.videoFormatData.extraData)
|
||||
} else if self.videoFormatData.codecType == kCMVideoCodecType_H264 {
|
||||
self.videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromAVCCodecData(UInt32(kCMVideoCodecType_H264), self.videoFormatData.width, self.videoFormatData.height, self.videoFormatData.extraData)
|
||||
} else if self.videoFormatData.codecType == kCMVideoCodecType_HEVC {
|
||||
self.videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromHEVCCodecData(UInt32(kCMVideoCodecType_HEVC), self.videoFormatData.width, self.videoFormatData.height, self.videoFormatData.extraData)
|
||||
} else if self.videoFormatData.codecType == kCMVideoCodecType_AV1 {
|
||||
self.videoFormat = FFMpegMediaFrameSourceContextHelpers.createFormatDescriptionFromAV1CodecData(UInt32(kCMVideoCodecType_AV1), self.videoFormatData.width, self.videoFormatData.height, self.videoFormatData.extraData, frameData: frame.copyPacketData())
|
||||
}
|
||||
}
|
||||
|
||||
if self.videoFormat == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
var blockBuffer: CMBlockBuffer?
|
||||
|
||||
let bytes = malloc(Int(frame.packet.size))!
|
||||
@ -50,4 +81,8 @@ final class FFMpegMediaPassthroughVideoFrameDecoder: MediaTrackFrameDecoder {
|
||||
func reset() {
|
||||
self.resetDecoderOnNextFrame = true
|
||||
}
|
||||
|
||||
func sendEndToDecoder() -> Bool {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
@ -91,7 +91,7 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
|
||||
return self.codecContext.sendEnd()
|
||||
}
|
||||
|
||||
public func receiveFromDecoder(ptsOffset: CMTime?) -> ReceiveResult {
|
||||
public func receiveFromDecoder(ptsOffset: CMTime?, displayImmediately: Bool = true) -> ReceiveResult {
|
||||
if self.isError {
|
||||
return .error
|
||||
}
|
||||
@ -112,7 +112,7 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
|
||||
if let ptsOffset = ptsOffset {
|
||||
pts = CMTimeAdd(pts, ptsOffset)
|
||||
}
|
||||
if let convertedFrame = convertVideoFrame(self.videoFrame, pts: pts, dts: pts, duration: self.videoFrame.duration > 0 ? CMTimeMake(value: self.videoFrame.duration, timescale: defaultTimescale) : defaultDuration) {
|
||||
if let convertedFrame = convertVideoFrame(self.videoFrame, pts: pts, dts: pts, duration: self.videoFrame.duration > 0 ? CMTimeMake(value: self.videoFrame.duration, timescale: defaultTimescale) : defaultDuration, displayImmediately: displayImmediately) {
|
||||
return .result(convertedFrame)
|
||||
} else {
|
||||
return .error
|
||||
@ -126,7 +126,7 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
|
||||
}
|
||||
}
|
||||
|
||||
public func decode(frame: MediaTrackDecodableFrame, ptsOffset: CMTime?, forceARGB: Bool = false, unpremultiplyAlpha: Bool = true) -> MediaTrackFrame? {
|
||||
public func decode(frame: MediaTrackDecodableFrame, ptsOffset: CMTime?, forceARGB: Bool = false, unpremultiplyAlpha: Bool = true, displayImmediately: Bool = true) -> MediaTrackFrame? {
|
||||
if self.isError {
|
||||
return nil
|
||||
}
|
||||
@ -146,7 +146,7 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
|
||||
if let ptsOffset = ptsOffset {
|
||||
pts = CMTimeAdd(pts, ptsOffset)
|
||||
}
|
||||
return convertVideoFrame(self.videoFrame, pts: pts, dts: pts, duration: frame.duration, forceARGB: forceARGB, unpremultiplyAlpha: unpremultiplyAlpha)
|
||||
return convertVideoFrame(self.videoFrame, pts: pts, dts: pts, duration: frame.duration, forceARGB: forceARGB, unpremultiplyAlpha: unpremultiplyAlpha, displayImmediately: displayImmediately)
|
||||
}
|
||||
}
|
||||
|
||||
@ -269,7 +269,7 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
|
||||
return UIImage(cgImage: image, scale: 1.0, orientation: .up)
|
||||
}
|
||||
|
||||
private func convertVideoFrame(_ frame: FFMpegAVFrame, pts: CMTime, dts: CMTime, duration: CMTime, forceARGB: Bool = false, unpremultiplyAlpha: Bool = true) -> MediaTrackFrame? {
|
||||
private func convertVideoFrame(_ frame: FFMpegAVFrame, pts: CMTime, dts: CMTime, duration: CMTime, forceARGB: Bool = false, unpremultiplyAlpha: Bool = true, displayImmediately: Bool = true) -> MediaTrackFrame? {
|
||||
if frame.data[0] == nil {
|
||||
return nil
|
||||
}
|
||||
@ -438,7 +438,9 @@ public final class FFMpegMediaVideoFrameDecoder: MediaTrackFrameDecoder {
|
||||
//dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleBufferAttachmentKey_ResetDecoderBeforeDecoding as NSString as String)
|
||||
}
|
||||
|
||||
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleAttachmentKey_DisplayImmediately as NSString as String)
|
||||
if displayImmediately {
|
||||
dict.setValue(kCFBooleanTrue as AnyObject, forKey: kCMSampleAttachmentKey_DisplayImmediately as NSString as String)
|
||||
}
|
||||
|
||||
let decodedFrame = MediaTrackFrame(type: .video, sampleBuffer: sampleBuffer!, resetDecoder: resetDecoder, decoded: true)
|
||||
|
||||
|
157
submodules/MediaPlayer/Sources/MediaDataReader.swift
Normal file
157
submodules/MediaPlayer/Sources/MediaDataReader.swift
Normal file
@ -0,0 +1,157 @@
|
||||
import Foundation
|
||||
import AVFoundation
|
||||
import CoreMedia
|
||||
import FFMpegBinding
|
||||
|
||||
protocol MediaDataReader: AnyObject {
|
||||
var hasVideo: Bool { get }
|
||||
var hasAudio: Bool { get }
|
||||
|
||||
func readSampleBuffer() -> CMSampleBuffer?
|
||||
}
|
||||
|
||||
final class FFMpegMediaDataReader: MediaDataReader {
|
||||
private let isVideo: Bool
|
||||
private let videoSource: SoftwareVideoReader?
|
||||
private let audioSource: SoftwareAudioSource?
|
||||
|
||||
var hasVideo: Bool {
|
||||
return self.videoSource != nil
|
||||
}
|
||||
|
||||
var hasAudio: Bool {
|
||||
return self.audioSource != nil
|
||||
}
|
||||
|
||||
init(filePath: String, isVideo: Bool) {
|
||||
self.isVideo = isVideo
|
||||
|
||||
if self.isVideo {
|
||||
let videoSource = SoftwareVideoReader(path: filePath, hintVP9: false, passthroughDecoder: true)
|
||||
if videoSource.hasStream {
|
||||
self.videoSource = videoSource
|
||||
} else {
|
||||
self.videoSource = nil
|
||||
}
|
||||
self.audioSource = nil
|
||||
} else {
|
||||
let audioSource = SoftwareAudioSource(path: filePath)
|
||||
if audioSource.hasStream {
|
||||
self.audioSource = audioSource
|
||||
} else {
|
||||
self.audioSource = nil
|
||||
}
|
||||
self.videoSource = nil
|
||||
}
|
||||
}
|
||||
|
||||
func readSampleBuffer() -> CMSampleBuffer? {
|
||||
if let videoSource {
|
||||
let frame = videoSource.readFrame()
|
||||
if let frame {
|
||||
return frame.sampleBuffer
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
} else if let audioSource {
|
||||
return audioSource.readSampleBuffer()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
final class AVAssetVideoDataReader: MediaDataReader {
|
||||
private let isVideo: Bool
|
||||
private var mediaInfo: FFMpegMediaInfo.Info?
|
||||
private var assetReader: AVAssetReader?
|
||||
private var assetOutput: AVAssetReaderOutput?
|
||||
|
||||
var hasVideo: Bool {
|
||||
return self.assetOutput != nil
|
||||
}
|
||||
|
||||
var hasAudio: Bool {
|
||||
return false
|
||||
}
|
||||
|
||||
init(filePath: String, isVideo: Bool) {
|
||||
self.isVideo = isVideo
|
||||
|
||||
if self.isVideo {
|
||||
guard let video = extractFFMpegMediaInfo(path: filePath)?.video else {
|
||||
return
|
||||
}
|
||||
self.mediaInfo = video
|
||||
|
||||
let asset = AVURLAsset(url: URL(fileURLWithPath: filePath))
|
||||
guard let assetReader = try? AVAssetReader(asset: asset) else {
|
||||
return
|
||||
}
|
||||
guard let videoTrack = asset.tracks(withMediaType: .video).first else {
|
||||
return
|
||||
}
|
||||
let videoOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: [String(kCVPixelBufferPixelFormatTypeKey): kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange])
|
||||
assetReader.add(videoOutput)
|
||||
if assetReader.startReading() {
|
||||
self.assetReader = assetReader
|
||||
self.assetOutput = videoOutput
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func readSampleBuffer() -> CMSampleBuffer? {
|
||||
guard let mediaInfo = self.mediaInfo, let assetReader = self.assetReader, let assetOutput = self.assetOutput else {
|
||||
return nil
|
||||
}
|
||||
var retryCount = 0
|
||||
while true {
|
||||
if let sampleBuffer = assetOutput.copyNextSampleBuffer() {
|
||||
return createSampleBuffer(fromSampleBuffer: sampleBuffer, withTimeOffset: mediaInfo.startTime, duration: nil)
|
||||
} else if assetReader.status == .reading && retryCount < 100 {
|
||||
Thread.sleep(forTimeInterval: 1.0 / 60.0)
|
||||
retryCount += 1
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
private func createSampleBuffer(fromSampleBuffer sampleBuffer: CMSampleBuffer, withTimeOffset timeOffset: CMTime, duration: CMTime?) -> CMSampleBuffer? {
|
||||
var itemCount: CMItemCount = 0
|
||||
var status = CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, entryCount: 0, arrayToFill: nil, entriesNeededOut: &itemCount)
|
||||
if status != 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var timingInfo = [CMSampleTimingInfo](repeating: CMSampleTimingInfo(duration: CMTimeMake(value: 0, timescale: 0), presentationTimeStamp: CMTimeMake(value: 0, timescale: 0), decodeTimeStamp: CMTimeMake(value: 0, timescale: 0)), count: itemCount)
|
||||
status = CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, entryCount: itemCount, arrayToFill: &timingInfo, entriesNeededOut: &itemCount)
|
||||
if status != 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if let dur = duration {
|
||||
for i in 0 ..< itemCount {
|
||||
timingInfo[i].decodeTimeStamp = CMTimeAdd(timingInfo[i].decodeTimeStamp, timeOffset)
|
||||
timingInfo[i].presentationTimeStamp = CMTimeAdd(timingInfo[i].presentationTimeStamp, timeOffset)
|
||||
timingInfo[i].duration = dur
|
||||
}
|
||||
} else {
|
||||
for i in 0 ..< itemCount {
|
||||
timingInfo[i].decodeTimeStamp = CMTimeAdd(timingInfo[i].decodeTimeStamp, timeOffset)
|
||||
timingInfo[i].presentationTimeStamp = CMTimeAdd(timingInfo[i].presentationTimeStamp, timeOffset)
|
||||
}
|
||||
}
|
||||
|
||||
var sampleBufferOffset: CMSampleBuffer?
|
||||
CMSampleBufferCreateCopyWithNewTiming(allocator: kCFAllocatorDefault, sampleBuffer: sampleBuffer, sampleTimingEntryCount: itemCount, sampleTimingArray: &timingInfo, sampleBufferOut: &sampleBufferOffset)
|
||||
|
||||
if let output = sampleBufferOffset {
|
||||
return output
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
@ -19,6 +19,10 @@ private final class MediaPlayerNodeLayer: AVSampleBufferDisplayLayer {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
deinit {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
}
|
||||
|
||||
override func action(forKey event: String) -> CAAction? {
|
||||
return MediaPlayerNodeLayerNullAction()
|
||||
}
|
||||
@ -66,7 +70,7 @@ public final class MediaPlayerNode: ASDisplayNode {
|
||||
|
||||
private var videoNode: MediaPlayerNodeDisplayNode
|
||||
|
||||
private var videoLayer: AVSampleBufferDisplayLayer?
|
||||
public private(set) var videoLayer: AVSampleBufferDisplayLayer?
|
||||
|
||||
private let videoQueue: Queue
|
||||
|
||||
|
@ -4,4 +4,5 @@ protocol MediaTrackFrameDecoder {
|
||||
func takeQueuedFrame() -> MediaTrackFrame?
|
||||
func takeRemainingFrame() -> MediaTrackFrame?
|
||||
func reset()
|
||||
func sendEndToDecoder() -> Bool
|
||||
}
|
||||
|
@ -40,11 +40,11 @@ private final class SoftwareVideoStream {
|
||||
let timebase: CMTime
|
||||
let startTime: CMTime
|
||||
let duration: CMTime
|
||||
let decoder: FFMpegMediaVideoFrameDecoder
|
||||
let decoder: MediaTrackFrameDecoder
|
||||
let rotationAngle: Double
|
||||
let aspect: Double
|
||||
|
||||
init(index: Int, fps: CMTime, timebase: CMTime, startTime: CMTime, duration: CMTime, decoder: FFMpegMediaVideoFrameDecoder, rotationAngle: Double, aspect: Double) {
|
||||
init(index: Int, fps: CMTime, timebase: CMTime, startTime: CMTime, duration: CMTime, decoder: MediaTrackFrameDecoder, rotationAngle: Double, aspect: Double) {
|
||||
self.index = index
|
||||
self.fps = fps
|
||||
self.timebase = timebase
|
||||
@ -73,7 +73,11 @@ public final class SoftwareVideoSource {
|
||||
|
||||
public private(set) var reportedDuration: CMTime = .invalid
|
||||
|
||||
public init(path: String, hintVP9: Bool, unpremultiplyAlpha: Bool) {
|
||||
public var hasStream: Bool {
|
||||
return self.videoStream != nil
|
||||
}
|
||||
|
||||
public init(path: String, hintVP9: Bool, unpremultiplyAlpha: Bool, passthroughDecoder: Bool = false) {
|
||||
let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals
|
||||
|
||||
self.hintVP9 = hintVP9
|
||||
@ -142,12 +146,30 @@ public final class SoftwareVideoSource {
|
||||
let rotationAngle: Double = metrics.rotationAngle
|
||||
let aspect = Double(metrics.width) / Double(metrics.height)
|
||||
|
||||
if let codec = FFMpegAVCodec.find(forId: codecId) {
|
||||
let codecContext = FFMpegAVCodecContext(codec: codec)
|
||||
if avFormatContext.codecParams(atStreamIndex: streamIndex, to: codecContext) {
|
||||
if codecContext.open() {
|
||||
videoStream = SoftwareVideoStream(index: Int(streamIndex), fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaVideoFrameDecoder(codecContext: codecContext), rotationAngle: rotationAngle, aspect: aspect)
|
||||
break
|
||||
if passthroughDecoder {
|
||||
var videoFormatData: FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData?
|
||||
if codecId == FFMpegCodecIdMPEG4 {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_MPEG4Video, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
} else if codecId == FFMpegCodecIdH264 {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_H264, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
} else if codecId == FFMpegCodecIdHEVC {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_HEVC, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
} else if codecId == FFMpegCodecIdAV1 {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_AV1, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
}
|
||||
|
||||
if let videoFormatData {
|
||||
videoStream = SoftwareVideoStream(index: Int(streamIndex), fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormatData: videoFormatData, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
|
||||
break
|
||||
}
|
||||
} else {
|
||||
if let codec = FFMpegAVCodec.find(forId: codecId) {
|
||||
let codecContext = FFMpegAVCodecContext(codec: codec)
|
||||
if avFormatContext.codecParams(atStreamIndex: streamIndex, to: codecContext) {
|
||||
if codecContext.open() {
|
||||
videoStream = SoftwareVideoStream(index: Int(streamIndex), fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaVideoFrameDecoder(codecContext: codecContext), rotationAngle: rotationAngle, aspect: aspect)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -255,15 +277,25 @@ public final class SoftwareVideoSource {
|
||||
if let maxPts = maxPts, CMTimeCompare(decodableFrame.pts, maxPts) < 0 {
|
||||
ptsOffset = maxPts
|
||||
}
|
||||
result = (videoStream.decoder.decode(frame: decodableFrame, ptsOffset: ptsOffset, forceARGB: self.hintVP9, unpremultiplyAlpha: self.unpremultiplyAlpha), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
|
||||
if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder {
|
||||
result = (decoder.decode(frame: decodableFrame, ptsOffset: ptsOffset, forceARGB: self.hintVP9, unpremultiplyAlpha: self.unpremultiplyAlpha), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
|
||||
} else {
|
||||
result = (videoStream.decoder.decode(frame: decodableFrame), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
|
||||
}
|
||||
} else {
|
||||
result = (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
|
||||
}
|
||||
if loop {
|
||||
let _ = videoStream.decoder.sendEndToDecoder()
|
||||
let remainingFrames = videoStream.decoder.receiveRemainingFrames(ptsOffset: maxPts)
|
||||
for i in 0 ..< remainingFrames.count {
|
||||
self.enqueuedFrames.append((remainingFrames[i], CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), i == remainingFrames.count - 1))
|
||||
if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder {
|
||||
let remainingFrames = decoder.receiveRemainingFrames(ptsOffset: maxPts)
|
||||
for i in 0 ..< remainingFrames.count {
|
||||
self.enqueuedFrames.append((remainingFrames[i], CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), i == remainingFrames.count - 1))
|
||||
}
|
||||
} else {
|
||||
if let remainingFrame = videoStream.decoder.takeRemainingFrame() {
|
||||
self.enqueuedFrames.append((remainingFrame, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), true))
|
||||
}
|
||||
}
|
||||
videoStream.decoder.reset()
|
||||
avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0, positionOnKeyframe: true)
|
||||
@ -277,10 +309,14 @@ public final class SoftwareVideoSource {
|
||||
|
||||
public func readImage() -> (UIImage?, CGFloat, CGFloat, Bool) {
|
||||
if let videoStream = self.videoStream {
|
||||
guard let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder else {
|
||||
return (nil, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), true)
|
||||
}
|
||||
|
||||
for _ in 0 ..< 10 {
|
||||
let (decodableFrame, loop) = self.readDecodableFrame()
|
||||
if let decodableFrame = decodableFrame {
|
||||
if let renderedFrame = videoStream.decoder.render(frame: decodableFrame) {
|
||||
if let renderedFrame = decoder.render(frame: decodableFrame) {
|
||||
return (renderedFrame, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect), loop)
|
||||
}
|
||||
}
|
||||
@ -327,6 +363,10 @@ public final class SoftwareAudioSource {
|
||||
|
||||
private var hasReadToEnd: Bool = false
|
||||
|
||||
public var hasStream: Bool {
|
||||
return self.audioStream != nil
|
||||
}
|
||||
|
||||
public init(path: String) {
|
||||
let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals
|
||||
|
||||
@ -507,13 +547,240 @@ public final class SoftwareAudioSource {
|
||||
}
|
||||
}
|
||||
|
||||
public final class FFMpegMediaInfo {
|
||||
public let startTime: CMTime
|
||||
public let duration: CMTime
|
||||
final class SoftwareVideoReader {
|
||||
private var readingError = false
|
||||
private var videoStream: SoftwareVideoStream?
|
||||
private var avIoContext: FFMpegAVIOContext?
|
||||
private var avFormatContext: FFMpegAVFormatContext?
|
||||
private let path: String
|
||||
fileprivate let fd: Int32?
|
||||
fileprivate let size: Int32
|
||||
|
||||
public init(startTime: CMTime, duration: CMTime) {
|
||||
self.startTime = startTime
|
||||
self.duration = duration
|
||||
private var didSendEndToEncoder: Bool = false
|
||||
private var hasReadToEnd: Bool = false
|
||||
private var enqueuedFrames: [(MediaTrackFrame, CGFloat, CGFloat)] = []
|
||||
|
||||
public private(set) var reportedDuration: CMTime = .invalid
|
||||
|
||||
public var hasStream: Bool {
|
||||
return self.videoStream != nil
|
||||
}
|
||||
|
||||
public init(path: String, hintVP9: Bool, passthroughDecoder: Bool = false) {
|
||||
let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals
|
||||
|
||||
var s = stat()
|
||||
stat(path, &s)
|
||||
self.size = Int32(s.st_size)
|
||||
|
||||
let fd = open(path, O_RDONLY, S_IRUSR)
|
||||
if fd >= 0 {
|
||||
self.fd = fd
|
||||
} else {
|
||||
self.fd = nil
|
||||
}
|
||||
|
||||
self.path = path
|
||||
|
||||
let avFormatContext = FFMpegAVFormatContext()
|
||||
if hintVP9 {
|
||||
avFormatContext.forceVideoCodecId(FFMpegCodecIdVP9)
|
||||
}
|
||||
let ioBufferSize = 64 * 1024
|
||||
|
||||
let avIoContext = FFMpegAVIOContext(bufferSize: Int32(ioBufferSize), opaqueContext: Unmanaged.passUnretained(self).toOpaque(), readPacket: readPacketCallback, writePacket: nil, seek: seekCallback, isSeekable: true)
|
||||
self.avIoContext = avIoContext
|
||||
|
||||
avFormatContext.setIO(self.avIoContext!)
|
||||
|
||||
if !avFormatContext.openInput() {
|
||||
self.readingError = true
|
||||
return
|
||||
}
|
||||
|
||||
if !avFormatContext.findStreamInfo() {
|
||||
self.readingError = true
|
||||
return
|
||||
}
|
||||
|
||||
self.avFormatContext = avFormatContext
|
||||
|
||||
var videoStream: SoftwareVideoStream?
|
||||
|
||||
for streamIndexNumber in avFormatContext.streamIndices(for: FFMpegAVFormatStreamTypeVideo) {
|
||||
let streamIndex = streamIndexNumber.int32Value
|
||||
if avFormatContext.isAttachedPic(atStreamIndex: streamIndex) {
|
||||
continue
|
||||
}
|
||||
|
||||
let codecId = avFormatContext.codecId(atStreamIndex: streamIndex)
|
||||
|
||||
let fpsAndTimebase = avFormatContext.fpsAndTimebase(forStreamIndex: streamIndex, defaultTimeBase: CMTimeMake(value: 1, timescale: 40000))
|
||||
let (fps, timebase) = (fpsAndTimebase.fps, fpsAndTimebase.timebase)
|
||||
|
||||
let startTime: CMTime
|
||||
let rawStartTime = avFormatContext.startTime(atStreamIndex: streamIndex)
|
||||
if rawStartTime == Int64(bitPattern: 0x8000000000000000 as UInt64) {
|
||||
startTime = CMTime(value: 0, timescale: timebase.timescale)
|
||||
} else {
|
||||
startTime = CMTimeMake(value: rawStartTime, timescale: timebase.timescale)
|
||||
}
|
||||
let duration = CMTimeMake(value: avFormatContext.duration(atStreamIndex: streamIndex), timescale: timebase.timescale)
|
||||
|
||||
let metrics = avFormatContext.metricsForStream(at: streamIndex)
|
||||
|
||||
let rotationAngle: Double = metrics.rotationAngle
|
||||
let aspect = Double(metrics.width) / Double(metrics.height)
|
||||
|
||||
if passthroughDecoder {
|
||||
var videoFormatData: FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData?
|
||||
if codecId == FFMpegCodecIdMPEG4 {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_MPEG4Video, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
} else if codecId == FFMpegCodecIdH264 {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_H264, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
} else if codecId == FFMpegCodecIdHEVC {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_HEVC, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
} else if codecId == FFMpegCodecIdAV1 {
|
||||
videoFormatData = FFMpegMediaPassthroughVideoFrameDecoder.VideoFormatData(codecType: kCMVideoCodecType_AV1, width: metrics.width, height: metrics.height, extraData: Data(bytes: metrics.extradata, count: Int(metrics.extradataSize)))
|
||||
}
|
||||
|
||||
if let videoFormatData {
|
||||
videoStream = SoftwareVideoStream(index: Int(streamIndex), fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaPassthroughVideoFrameDecoder(videoFormatData: videoFormatData, rotationAngle: rotationAngle), rotationAngle: rotationAngle, aspect: aspect)
|
||||
break
|
||||
}
|
||||
} else {
|
||||
if let codec = FFMpegAVCodec.find(forId: codecId) {
|
||||
let codecContext = FFMpegAVCodecContext(codec: codec)
|
||||
if avFormatContext.codecParams(atStreamIndex: streamIndex, to: codecContext) {
|
||||
if codecContext.open() {
|
||||
videoStream = SoftwareVideoStream(index: Int(streamIndex), fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaVideoFrameDecoder(codecContext: codecContext), rotationAngle: rotationAngle, aspect: aspect)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.reportedDuration = CMTime(seconds: avFormatContext.duration(), preferredTimescale: CMTimeScale(NSEC_PER_SEC))
|
||||
|
||||
self.videoStream = videoStream
|
||||
|
||||
if let videoStream = self.videoStream {
|
||||
avFormatContext.seekFrame(forStreamIndex: Int32(videoStream.index), pts: 0, positionOnKeyframe: true)
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
if let fd = self.fd {
|
||||
close(fd)
|
||||
}
|
||||
}
|
||||
|
||||
private func readPacketInternal() -> FFMpegPacket? {
|
||||
guard let avFormatContext = self.avFormatContext else {
|
||||
return nil
|
||||
}
|
||||
|
||||
let packet = FFMpegPacket()
|
||||
if avFormatContext.readFrame(into: packet) {
|
||||
return packet
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func readDecodableFrame() -> MediaTrackDecodableFrame? {
|
||||
if self.hasReadToEnd {
|
||||
return nil
|
||||
}
|
||||
|
||||
while !self.readingError && !self.hasReadToEnd {
|
||||
if let packet = self.readPacketInternal() {
|
||||
if let videoStream = self.videoStream, Int(packet.streamIndex) == videoStream.index {
|
||||
let packetPts = packet.pts
|
||||
|
||||
let pts = CMTimeMake(value: packetPts, timescale: videoStream.timebase.timescale)
|
||||
let dts = CMTimeMake(value: packet.dts, timescale: videoStream.timebase.timescale)
|
||||
|
||||
let duration: CMTime
|
||||
|
||||
let frameDuration = packet.duration
|
||||
if frameDuration != 0 {
|
||||
duration = CMTimeMake(value: frameDuration * videoStream.timebase.value, timescale: videoStream.timebase.timescale)
|
||||
} else {
|
||||
duration = videoStream.fps
|
||||
}
|
||||
|
||||
let frame = MediaTrackDecodableFrame(type: .video, packet: packet, pts: pts, dts: dts, duration: duration)
|
||||
return frame
|
||||
}
|
||||
} else {
|
||||
self.hasReadToEnd = true
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
public func readFrame() -> MediaTrackFrame? {
|
||||
guard let videoStream = self.videoStream else {
|
||||
return nil
|
||||
}
|
||||
|
||||
while !self.readingError && !self.hasReadToEnd {
|
||||
if let decodableFrame = self.readDecodableFrame() {
|
||||
var result: (MediaTrackFrame?, CGFloat, CGFloat)
|
||||
if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder {
|
||||
result = (decoder.decode(frame: decodableFrame, ptsOffset: nil, forceARGB: false, unpremultiplyAlpha: false, displayImmediately: false), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect))
|
||||
} else {
|
||||
result = (videoStream.decoder.decode(frame: decodableFrame), CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect))
|
||||
}
|
||||
if let frame = result.0 {
|
||||
return frame
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !self.readingError && self.hasReadToEnd && !self.didSendEndToEncoder {
|
||||
self.didSendEndToEncoder = true
|
||||
let _ = videoStream.decoder.sendEndToDecoder()
|
||||
|
||||
if let decoder = videoStream.decoder as? FFMpegMediaVideoFrameDecoder {
|
||||
let remainingFrames = decoder.receiveRemainingFrames(ptsOffset: nil)
|
||||
for i in 0 ..< remainingFrames.count {
|
||||
self.enqueuedFrames.append((remainingFrames[i], CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect)))
|
||||
}
|
||||
} else {
|
||||
if let remainingFrame = videoStream.decoder.takeRemainingFrame() {
|
||||
self.enqueuedFrames.append((remainingFrame, CGFloat(videoStream.rotationAngle), CGFloat(videoStream.aspect)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !self.enqueuedFrames.isEmpty {
|
||||
let result = self.enqueuedFrames.removeFirst()
|
||||
return result.0
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final class FFMpegMediaInfo {
|
||||
public struct Info {
|
||||
public let startTime: CMTime
|
||||
public let duration: CMTime
|
||||
public let codecName: String?
|
||||
}
|
||||
|
||||
public let audio: Info?
|
||||
public let video: Info?
|
||||
|
||||
public init(audio: Info?, video: Info?) {
|
||||
self.audio = audio
|
||||
self.video = video
|
||||
}
|
||||
}
|
||||
|
||||
@ -580,7 +847,7 @@ public func extractFFMpegMediaInfo(path: String) -> FFMpegMediaInfo? {
|
||||
return nil
|
||||
}
|
||||
|
||||
var streamInfos: [(isVideo: Bool, info: FFMpegMediaInfo)] = []
|
||||
var streamInfos: [(isVideo: Bool, info: FFMpegMediaInfo.Info)] = []
|
||||
|
||||
for typeIndex in 0 ..< 1 {
|
||||
let isVideo = typeIndex == 0
|
||||
@ -604,15 +871,21 @@ public func extractFFMpegMediaInfo(path: String) -> FFMpegMediaInfo? {
|
||||
var duration = CMTimeMake(value: avFormatContext.duration(atStreamIndex: streamIndex), timescale: timebase.timescale)
|
||||
duration = CMTimeMaximum(CMTime(value: 0, timescale: duration.timescale), CMTimeSubtract(duration, startTime))
|
||||
|
||||
streamInfos.append((isVideo: isVideo, info: FFMpegMediaInfo(startTime: startTime, duration: duration)))
|
||||
var codecName: String?
|
||||
let codecId = avFormatContext.codecId(atStreamIndex: streamIndex)
|
||||
if codecId == FFMpegCodecIdMPEG4 {
|
||||
codecName = "mpeg4"
|
||||
} else if codecId == FFMpegCodecIdH264 {
|
||||
codecName = "h264"
|
||||
} else if codecId == FFMpegCodecIdHEVC {
|
||||
codecName = "hevc"
|
||||
} else if codecId == FFMpegCodecIdAV1 {
|
||||
codecName = "av1"
|
||||
}
|
||||
|
||||
streamInfos.append((isVideo: isVideo, info: FFMpegMediaInfo.Info(startTime: startTime, duration: duration, codecName: codecName)))
|
||||
}
|
||||
}
|
||||
|
||||
if let video = streamInfos.first(where: \.isVideo) {
|
||||
return video.info
|
||||
} else if let stream = streamInfos.first {
|
||||
return stream.info
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
return FFMpegMediaInfo(audio: streamInfos.first(where: { !$0.isVideo })?.info, video: streamInfos.first(where: { $0.isVideo })?.info)
|
||||
}
|
||||
|
@ -903,8 +903,17 @@ public final class ManagedAudioSessionImpl: NSObject, ManagedAudioSession {
|
||||
break
|
||||
}
|
||||
|
||||
try AVAudioSession.sharedInstance().setCategory(nativeCategory, options: options)
|
||||
try AVAudioSession.sharedInstance().setMode(mode)
|
||||
if #available(iOS 13.0, *) {
|
||||
#if DEBUG && false
|
||||
try AVAudioSession.sharedInstance().setCategory(nativeCategory, mode: mode, policy: .longFormVideo, options: options)
|
||||
#else
|
||||
try AVAudioSession.sharedInstance().setCategory(nativeCategory, options: options)
|
||||
try AVAudioSession.sharedInstance().setMode(mode)
|
||||
#endif
|
||||
} else {
|
||||
try AVAudioSession.sharedInstance().setCategory(nativeCategory, options: options)
|
||||
try AVAudioSession.sharedInstance().setMode(mode)
|
||||
}
|
||||
if AVAudioSession.sharedInstance().categoryOptions != options {
|
||||
switch type {
|
||||
case .voiceCall, .videoCall:
|
||||
|
@ -796,7 +796,12 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
|
||||
return { [weak self] context, presentationData, dateTimeFormat, message, associatedData, attributes, media, mediaIndex, dateAndStatus, automaticDownload, peerType, peerId, sizeCalculation, layoutConstants, contentMode, presentationContext in
|
||||
let _ = peerType
|
||||
|
||||
let useInlineHLS = "".isEmpty
|
||||
var useInlineHLS = true
|
||||
if let data = context.currentAppConfiguration.with({ $0 }).data {
|
||||
if let value = data["ios_inline_hls"] as? Double {
|
||||
useInlineHLS = value != 0.0
|
||||
}
|
||||
}
|
||||
|
||||
var nativeSize: CGSize
|
||||
|
||||
@ -1790,7 +1795,9 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
|
||||
fileReference: .message(message: MessageReference(message), media: updatedVideoFile),
|
||||
loopVideo: loopVideo,
|
||||
enableSound: false,
|
||||
fetchAutomatically: false
|
||||
fetchAutomatically: false,
|
||||
onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false),
|
||||
autoFetchFullSizeThumbnail: true
|
||||
)
|
||||
} else {
|
||||
videoContent = NativeVideoContent(
|
||||
|
File diff suppressed because one or more lines are too long
@ -209,7 +209,7 @@ export class HlsPlayerInstance {
|
||||
this.hls.autoLevelCapping = level;
|
||||
} else {
|
||||
this.hls.autoLevelCapping = -1;
|
||||
this.hls.currentLevel = -1;
|
||||
//this.hls.currentLevel = -1;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -198,8 +198,11 @@ public final class HLSVideoContent: UniversalVideoContent {
|
||||
let enableSound: Bool
|
||||
let baseRate: Double
|
||||
let fetchAutomatically: Bool
|
||||
let onlyFullSizeThumbnail: Bool
|
||||
let useLargeThumbnail: Bool
|
||||
let autoFetchFullSizeThumbnail: Bool
|
||||
|
||||
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool = false, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true) {
|
||||
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool = false, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true, onlyFullSizeThumbnail: Bool = false, useLargeThumbnail: Bool = false, autoFetchFullSizeThumbnail: Bool = false) {
|
||||
self.id = id
|
||||
self.userLocation = userLocation
|
||||
self.nativeId = id
|
||||
@ -211,10 +214,13 @@ public final class HLSVideoContent: UniversalVideoContent {
|
||||
self.enableSound = enableSound
|
||||
self.baseRate = baseRate
|
||||
self.fetchAutomatically = fetchAutomatically
|
||||
self.onlyFullSizeThumbnail = onlyFullSizeThumbnail
|
||||
self.useLargeThumbnail = useLargeThumbnail
|
||||
self.autoFetchFullSizeThumbnail = autoFetchFullSizeThumbnail
|
||||
}
|
||||
|
||||
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
|
||||
return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
|
||||
return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically, onlyFullSizeThumbnail: self.onlyFullSizeThumbnail, useLargeThumbnail: self.useLargeThumbnail, autoFetchFullSizeThumbnail: self.autoFetchFullSizeThumbnail)
|
||||
}
|
||||
|
||||
public func isEqual(to other: UniversalVideoContent) -> Bool {
|
||||
|
@ -1030,7 +1030,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
|
||||
|
||||
private var contextDisposable: Disposable?
|
||||
|
||||
init(accountId: AccountRecordId, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool) {
|
||||
init(accountId: AccountRecordId, postbox: Postbox, audioSessionManager: ManagedAudioSession, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool, loopVideo: Bool, enableSound: Bool, baseRate: Double, fetchAutomatically: Bool, onlyFullSizeThumbnail: Bool, useLargeThumbnail: Bool, autoFetchFullSizeThumbnail: Bool) {
|
||||
self.instanceId = HLSVideoJSNativeContentNode.nextInstanceId
|
||||
HLSVideoJSNativeContentNode.nextInstanceId += 1
|
||||
|
||||
@ -1071,8 +1071,21 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
|
||||
intrinsicDimensions.height = floor(intrinsicDimensions.height / UIScreenScale)
|
||||
self.intrinsicDimensions = intrinsicDimensions
|
||||
|
||||
self.playerNode = MediaPlayerNode()
|
||||
|
||||
var onSeeked: (() -> Void)?
|
||||
self.player = ChunkMediaPlayer(
|
||||
/*self.player = ChunkMediaPlayerV2(
|
||||
audioSessionManager: audioSessionManager,
|
||||
partsState: self.chunkPlayerPartsState.get(),
|
||||
video: true,
|
||||
enableSound: self.enableSound,
|
||||
baseRate: baseRate,
|
||||
onSeeked: {
|
||||
onSeeked?()
|
||||
},
|
||||
playerNode: self.playerNode
|
||||
)*/
|
||||
self.player = ChunkMediaPlayerImpl(
|
||||
postbox: postbox,
|
||||
audioSessionManager: audioSessionManager,
|
||||
partsState: self.chunkPlayerPartsState.get(),
|
||||
@ -1081,21 +1094,19 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
|
||||
baseRate: baseRate,
|
||||
onSeeked: {
|
||||
onSeeked?()
|
||||
}
|
||||
},
|
||||
playerNode: self.playerNode
|
||||
)
|
||||
|
||||
self.playerNode = MediaPlayerNode()
|
||||
self.player.attachPlayerNode(self.playerNode)
|
||||
|
||||
super.init()
|
||||
|
||||
self.contextDisposable = SharedHLSVideoJSContext.shared.register(context: self)
|
||||
|
||||
self.playerNode.frame = CGRect(origin: CGPoint(), size: self.intrinsicDimensions)
|
||||
|
||||
let thumbnailVideoReference = HLSVideoContent.minimizedHLSQuality(file: fileReference)?.file ?? fileReference
|
||||
//let thumbnailVideoReference = HLSVideoContent.minimizedHLSQuality(file: fileReference)?.file ?? fileReference
|
||||
|
||||
self.imageNode.setSignal(internalMediaGridMessageVideo(postbox: postbox, userLocation: self.userLocation, videoReference: thumbnailVideoReference, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true) |> map { [weak self] getSize, getData in
|
||||
self.imageNode.setSignal(internalMediaGridMessageVideo(postbox: postbox, userLocation: userLocation, videoReference: fileReference, previewSourceFileReference: nil, imageReference: nil, onlyFullSize: onlyFullSizeThumbnail, useLargeThumbnail: useLargeThumbnail, autoFetchFullSizeThumbnail: autoFetchFullSizeThumbnail || fileReference.media.isInstantVideo) |> map { [weak self] getSize, getData in
|
||||
Queue.mainQueue().async {
|
||||
if let strongSelf = self, strongSelf.dimensions == nil {
|
||||
if let dimensions = getSize() {
|
||||
@ -1298,7 +1309,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
|
||||
}
|
||||
|
||||
fileprivate func onSetCurrentTime(timestamp: Double) {
|
||||
self.player.seek(timestamp: timestamp)
|
||||
self.player.seek(timestamp: timestamp, play: nil)
|
||||
}
|
||||
|
||||
fileprivate func onSetPlaybackRate(playbackRate: Double) {
|
||||
@ -1449,7 +1460,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
|
||||
|
||||
func togglePlayPause() {
|
||||
assert(Queue.mainQueue().isCurrent())
|
||||
self.player.togglePlayPause()
|
||||
self.player.togglePlayPause(faded: false)
|
||||
}
|
||||
|
||||
func setSoundEnabled(_ value: Bool) {
|
||||
@ -1802,7 +1813,7 @@ private final class SourceBuffer {
|
||||
return
|
||||
}
|
||||
|
||||
if let fragmentInfo = extractFFMpegMediaInfo(path: tempFile.path) {
|
||||
if let fragmentInfoSet = extractFFMpegMediaInfo(path: tempFile.path), let fragmentInfo = fragmentInfoSet.audio ?? fragmentInfoSet.video {
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
completion(RangeSet())
|
||||
@ -1818,10 +1829,13 @@ private final class SourceBuffer {
|
||||
|
||||
completion(self.ranges)
|
||||
} else {
|
||||
let videoCodecName: String? = fragmentInfoSet.video?.codecName
|
||||
|
||||
let item = ChunkMediaPlayerPart(
|
||||
startTime: fragmentInfo.startTime.seconds,
|
||||
endTime: fragmentInfo.startTime.seconds + fragmentInfo.duration.seconds,
|
||||
file: tempFile
|
||||
file: tempFile,
|
||||
codecName: videoCodecName
|
||||
)
|
||||
self.items.append(item)
|
||||
self.updateRanges()
|
||||
@ -1852,7 +1866,7 @@ private final class SourceBuffer {
|
||||
|
||||
func remove(start: Double, end: Double, completion: @escaping (RangeSet<Double>) -> Void) {
|
||||
self.items.removeAll(where: { item in
|
||||
if item.startTime >= start && item.endTime <= end {
|
||||
if item.startTime + 0.5 >= start && item.endTime - 0.5 <= end {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
|
@ -26,7 +26,7 @@ public enum NativeVideoContentId: Hashable {
|
||||
case profileVideo(Int64, String?)
|
||||
}
|
||||
|
||||
private let isAv1Supported: Bool = {
|
||||
private let isHadrwareAv1Supported: Bool = {
|
||||
let value = VTIsHardwareDecodeSupported(kCMVideoCodecType_AV1)
|
||||
return value
|
||||
}()
|
||||
@ -70,11 +70,11 @@ public final class NativeVideoContent: UniversalVideoContent {
|
||||
return true
|
||||
}
|
||||
|
||||
/*if videoCodec == "av1" {
|
||||
if isAv1Supported {
|
||||
if videoCodec == "av1" {
|
||||
if isHadrwareAv1Supported {
|
||||
return true
|
||||
}
|
||||
}*/
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user