[WIP] Livestream V2

This commit is contained in:
Isaac 2024-12-31 14:03:02 +08:00
parent 45fa1b5ddb
commit ad0188f0ed
9 changed files with 532 additions and 61 deletions

View File

@ -103,7 +103,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
case playlistPlayback(Bool)
case enableQuickReactionSwitch(Bool)
case disableReloginTokens(Bool)
case disableCallV2(Bool)
case liveStreamV2(Bool)
case experimentalCallMute(Bool)
case conferenceCalls(Bool)
case playerV2(Bool)
@ -133,7 +133,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return DebugControllerSection.web.rawValue
case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure:
return DebugControllerSection.experiments.rawValue
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .disableCallV2, .experimentalCallMute, .conferenceCalls, .playerV2, .benchmarkReflectors, .enableLocalTranslation:
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .liveStreamV2, .experimentalCallMute, .conferenceCalls, .playerV2, .benchmarkReflectors, .enableLocalTranslation:
return DebugControllerSection.experiments.rawValue
case .logTranslationRecognition, .resetTranslationStates:
return DebugControllerSection.translation.rawValue
@ -246,7 +246,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return 49
case .enableQuickReactionSwitch:
return 50
case .disableCallV2:
case .liveStreamV2:
return 51
case .experimentalCallMute:
return 52
@ -1328,12 +1328,12 @@ private enum DebugControllerEntry: ItemListNodeEntry {
})
}).start()
})
case let .disableCallV2(value):
return ItemListSwitchItem(presentationData: presentationData, title: "Disable Video Chat V2", value: value, sectionId: self.section, style: .blocks, updated: { value in
case let .liveStreamV2(value):
return ItemListSwitchItem(presentationData: presentationData, title: "Live Stream V2", value: value, sectionId: self.section, style: .blocks, updated: { value in
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in
var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings
settings.disableCallV2 = value
settings.liveStreamV2 = value
return PreferencesEntry(settings)
})
}).start()
@ -1587,7 +1587,7 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present
}
entries.append(.playlistPlayback(experimentalSettings.playlistPlayback))
entries.append(.enableQuickReactionSwitch(!experimentalSettings.disableQuickReaction))
entries.append(.disableCallV2(experimentalSettings.disableCallV2))
entries.append(.liveStreamV2(experimentalSettings.liveStreamV2))
entries.append(.experimentalCallMute(experimentalSettings.experimentalCallMute))
entries.append(.conferenceCalls(experimentalSettings.conferenceCalls))

View File

@ -34,7 +34,7 @@ public final class ChunkMediaPlayerPart {
}
deinit {
TempBox.shared.dispose(self.file)
//TempBox.shared.dispose(self.file)
}
}
@ -43,17 +43,19 @@ public final class ChunkMediaPlayerPart {
public let content: TempFile
public let clippedStartTime: Double?
public let codecName: String?
public let offsetTime: Double
public var id: Id {
return .tempFile(path: self.content.file.path)
}
public init(startTime: Double, clippedStartTime: Double? = nil, endTime: Double, content: TempFile, codecName: String?) {
public init(startTime: Double, clippedStartTime: Double? = nil, endTime: Double, content: TempFile, codecName: String?, offsetTime: Double) {
self.startTime = startTime
self.clippedStartTime = clippedStartTime
self.endTime = endTime
self.content = content
self.codecName = codecName
self.offsetTime = offsetTime
}
}

View File

@ -77,19 +77,21 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
let content: Content
let mediaType: AVMediaType
let codecName: String?
let offset: Double
private(set) var reader: MediaDataReader?
var didBeginReading: Bool = false
var isFinished: Bool = false
init(queue: Queue, content: Content, mediaType: AVMediaType, codecName: String?) {
init(queue: Queue, content: Content, mediaType: AVMediaType, codecName: String?, offset: Double) {
assert(queue.isCurrent())
self.queue = queue
self.content = content
self.mediaType = mediaType
self.codecName = codecName
self.offset = offset
}
deinit {
@ -425,7 +427,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
clippedStartTime: partStartTime == part.startTime ? nil : partStartTime,
endTime: part.endTime,
content: part.content,
codecName: part.codecName
codecName: part.codecName,
offsetTime: part.offsetTime
))
minStartTime = max(minStartTime, partEndTime)
}
@ -447,7 +450,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
clippedStartTime: partStartTime == part.startTime ? nil : partStartTime,
endTime: part.endTime,
content: part.content,
codecName: part.codecName
codecName: part.codecName,
offsetTime: part.offsetTime
))
minStartTime = max(minStartTime, partEndTime)
break
@ -519,7 +523,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
queue: dataQueue,
content: .tempFile(part.part.content),
mediaType: .audio,
codecName: part.part.codecName
codecName: part.part.codecName,
offset: part.part.offsetTime
)
cleanAudio.load(params: mediaDataReaderParams)
@ -533,7 +538,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
queue: dataQueue,
content: .tempFile(part.part.content),
mediaType: .video,
codecName: part.part.codecName
codecName: part.part.codecName,
offset: part.part.offsetTime
)
video.load(params: mediaDataReaderParams)
@ -541,7 +547,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
queue: dataQueue,
content: .tempFile(part.part.content),
mediaType: .audio,
codecName: part.part.codecName
codecName: part.part.codecName,
offset: part.part.offsetTime
)
audio.load(params: mediaDataReaderParams)
@ -622,7 +629,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
queue: dataQueue,
content: .directStream(media),
mediaType: .video,
codecName: media.codecName
codecName: media.codecName,
offset: 0.0
)
}
video?.load(params: mediaDataReaderParams)
@ -632,7 +640,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
queue: dataQueue,
content: .directStream(media),
mediaType: .audio,
codecName: media.codecName
codecName: media.codecName,
offset: 0.0
)
}
audio?.load(params: mediaDataReaderParams)
@ -973,6 +982,11 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
switch reader.readSampleBuffer() {
case let .frame(sampleBuffer):
var sampleBuffer = sampleBuffer
if media.offset != 0.0 {
if let updatedSampleBuffer = createSampleBuffer(fromSampleBuffer: sampleBuffer, withTimeOffset: CMTimeMakeWithSeconds(Float64(media.offset), preferredTimescale: CMSampleBufferGetPresentationTimeStamp(sampleBuffer).timescale), duration: nil) {
sampleBuffer = updatedSampleBuffer
}
}
if let seekFromMinTimestamp = loadedPartsMediaData.seekFromMinTimestamp, CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds < seekFromMinTimestamp {
if isVideo {
var updatedSampleBuffer: CMSampleBuffer?

View File

@ -0,0 +1,279 @@
import Foundation
import UIKit
import AVFoundation
import SwiftSignalKit
import UniversalMediaPlayer
import Postbox
import TelegramCore
import AccountContext
import TelegramAudio
import Display
import TelegramVoip
import RangeSet
import ManagedFile
import FFMpegBinding
import TelegramUniversalVideoContent
final class LivestreamVideoViewV1: UIView {
private final class PartContext {
let part: DirectMediaStreamingContext.Playlist.Part
let disposable = MetaDisposable()
var resolvedTimeOffset: Double?
var data: TempBoxFile?
var info: FFMpegMediaInfo?
init(part: DirectMediaStreamingContext.Playlist.Part) {
self.part = part
}
deinit {
self.disposable.dispose()
}
}
private let context: AccountContext
private let audioSessionManager: ManagedAudioSession
private let call: PresentationGroupCall
private let chunkPlayerPartsState = Promise<ChunkMediaPlayerPartsState>(ChunkMediaPlayerPartsState(duration: 10000000.0, content: .parts([])))
private var parts: [ChunkMediaPlayerPart] = [] {
didSet {
self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: 10000000.0, content: .parts(self.parts))))
}
}
private let player: ChunkMediaPlayer
private let playerNode: MediaPlayerNode
private var playerStatus: MediaPlayerStatus?
private var playerStatusDisposable: Disposable?
private var streamingContextDisposable: Disposable?
private var streamingContext: DirectMediaStreamingContext?
private var playlistDisposable: Disposable?
private var partContexts: [Int: PartContext] = [:]
private var requestedSeekTimestamp: Double?
init(
context: AccountContext,
audioSessionManager: ManagedAudioSession,
call: PresentationGroupCall
) {
self.context = context
self.audioSessionManager = audioSessionManager
self.call = call
self.playerNode = MediaPlayerNode()
var onSeeked: (() -> Void)?
self.player = ChunkMediaPlayerV2(
params: ChunkMediaPlayerV2.MediaDataReaderParams(context: context),
audioSessionManager: audioSessionManager,
source: .externalParts(self.chunkPlayerPartsState.get()),
video: true,
enableSound: true,
baseRate: 1.0,
onSeeked: {
onSeeked?()
},
playerNode: self.playerNode
)
super.init(frame: CGRect())
self.addSubview(self.playerNode.view)
onSeeked = {
}
self.playerStatusDisposable = (self.player.status
|> deliverOnMainQueue).startStrict(next: { [weak self] status in
guard let self else {
return
}
self.updatePlayerStatus(status: status)
})
var didProcessFramesToDisplay = false
self.playerNode.isHidden = true
self.playerNode.hasSentFramesToDisplay = { [weak self] in
guard let self, !didProcessFramesToDisplay else {
return
}
didProcessFramesToDisplay = true
self.playerNode.isHidden = false
}
if let call = call as? PresentationGroupCallImpl {
self.streamingContextDisposable = (call.externalMediaStream.get()
|> deliverOnMainQueue).startStrict(next: { [weak self] externalMediaStream in
guard let self else {
return
}
self.streamingContext = externalMediaStream
self.resetPlayback()
})
}
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
self.playerStatusDisposable?.dispose()
self.streamingContextDisposable?.dispose()
self.playlistDisposable?.dispose()
}
private func updatePlayerStatus(status: MediaPlayerStatus) {
self.playerStatus = status
self.updatePlaybackPositionIfNeeded()
}
private func resetPlayback() {
self.parts = []
self.playlistDisposable?.dispose()
self.playlistDisposable = nil
guard let streamingContext = self.streamingContext else {
return
}
self.playlistDisposable = (streamingContext.playlistData()
|> deliverOnMainQueue).startStrict(next: { [weak self] playlist in
guard let self else {
return
}
self.updatePlaylist(playlist: playlist)
})
}
private func updatePlaylist(playlist: DirectMediaStreamingContext.Playlist) {
var validPartIds: [Int] = []
for part in playlist.parts.prefix(upTo: 4) {
validPartIds.append(part.index)
if self.partContexts[part.index] == nil {
let partContext = PartContext(part: part)
self.partContexts[part.index] = partContext
if let streamingContext = self.streamingContext {
partContext.disposable.set((streamingContext.partData(index: part.index)
|> deliverOn(Queue.concurrentDefaultQueue())
|> map { data -> (file: TempBoxFile, info: FFMpegMediaInfo)? in
guard let data else {
return nil
}
let tempFile = TempBox.shared.tempFile(fileName: "part.mp4")
if let _ = try? data.write(to: URL(fileURLWithPath: tempFile.path), options: .atomic) {
if let info = extractFFMpegMediaInfo(path: tempFile.path) {
return (tempFile, info)
} else {
return nil
}
} else {
TempBox.shared.dispose(tempFile)
return nil
}
}
|> deliverOnMainQueue).startStrict(next: { [weak self, weak partContext] fileAndInfo in
guard let self, let partContext else {
return
}
if let (file, info) = fileAndInfo {
partContext.data = file
partContext.info = info
} else {
partContext.data = nil
}
self.updatePartContexts()
}))
}
}
}
var removedPartIds: [Int] = []
for (id, _) in self.partContexts {
if !validPartIds.contains(id) {
removedPartIds.append(id)
}
}
for id in removedPartIds {
self.partContexts.removeValue(forKey: id)
}
}
private func updatePartContexts() {
var readyParts: [ChunkMediaPlayerPart] = []
let sortedContexts = self.partContexts.values.sorted(by: { $0.part.timestamp < $1.part.timestamp })
outer: for i in 0 ..< sortedContexts.count {
let partContext = sortedContexts[i]
if let data = partContext.data {
let offsetTime: Double
if i != 0 {
var foundOffset: Double?
inner: for j in 0 ..< i {
let previousContext = sortedContexts[j]
if previousContext.part.index == partContext.part.index - 1 {
if let previousInfo = previousContext.info {
if let previousResolvedOffset = previousContext.resolvedTimeOffset {
if let audio = previousInfo.audio {
foundOffset = previousResolvedOffset + audio.duration.seconds
} else {
foundOffset = partContext.part.timestamp
}
}
}
break inner
}
}
if let foundOffset {
partContext.resolvedTimeOffset = foundOffset
offsetTime = foundOffset
} else {
continue outer
}
} else {
if let resolvedOffset = partContext.resolvedTimeOffset {
offsetTime = resolvedOffset
} else {
offsetTime = partContext.part.timestamp
partContext.resolvedTimeOffset = offsetTime
}
}
readyParts.append(ChunkMediaPlayerPart(
startTime: partContext.part.timestamp,
endTime: partContext.part.timestamp + partContext.part.duration,
content: ChunkMediaPlayerPart.TempFile(file: data),
codecName: nil,
offsetTime: offsetTime
))
}
}
readyParts.sort(by: { $0.startTime < $1.startTime })
self.parts = readyParts
self.updatePlaybackPositionIfNeeded()
}
private func updatePlaybackPositionIfNeeded() {
if let part = self.parts.first {
if let playerStatus = self.playerStatus, playerStatus.timestamp < part.startTime {
if self.requestedSeekTimestamp != part.startTime {
self.requestedSeekTimestamp = part.startTime
self.player.seek(timestamp: part.startTime, play: true)
}
}
}
}
public func update(size: CGSize, transition: ContainedViewLayoutTransition) {
//transition.updateFrame(view: self.playerNode.view, frame: CGRect(origin: CGPoint(), size: size))
self.playerNode.frame = CGRect(origin: CGPoint(), size: size)
}
}

View File

@ -11,6 +11,7 @@ import SwiftSignalKit
import AvatarNode
import Postbox
import TelegramVoip
import ComponentDisplayAdapters
final class MediaStreamVideoComponent: Component {
let call: PresentationGroupCallImpl
@ -157,7 +158,7 @@ final class MediaStreamVideoComponent: Component {
private var lastPresentation: UIView?
private var pipTrackDisplayLink: CADisplayLink?
private var livePlayerView: ProxyVideoView?
private var livestreamVideoView: LivestreamVideoViewV1?
override init(frame: CGRect) {
self.blurTintView = UIView()
@ -500,9 +501,9 @@ final class MediaStreamVideoComponent: Component {
var isVideoVisible = component.isVisible
if !wasVisible && component.isVisible {
if !self.wasVisible && component.isVisible {
videoView.layer.animateAlpha(from: 0, to: 1, duration: 0.2)
} else if wasVisible && !component.isVisible {
} else if self.wasVisible && !component.isVisible {
videoView.layer.animateAlpha(from: 1, to: 0, duration: 0.2)
}
@ -522,7 +523,6 @@ final class MediaStreamVideoComponent: Component {
videoFrameUpdateTransition.setFrame(view: videoView, frame: newVideoFrame, completion: nil)
if let videoBlurView = self.videoBlurView {
videoBlurView.updateIsEnabled(component.isVisible)
if component.isFullscreen {
videoFrameUpdateTransition.setFrame(view: videoBlurView, frame: CGRect(
@ -545,16 +545,16 @@ final class MediaStreamVideoComponent: Component {
videoFrameUpdateTransition.setFrame(layer: self.videoBlurSolidMask, frame: self.videoBlurGradientMask.bounds)
}
if self.livePlayerView == nil {
let livePlayerView = ProxyVideoView(context: component.call.accountContext, call: component.call)
self.livePlayerView = livePlayerView
livePlayerView.layer.masksToBounds = true
self.addSubview(livePlayerView)
livePlayerView.frame = newVideoFrame
livePlayerView.layer.cornerRadius = videoCornerRadius
livePlayerView.update(size: newVideoFrame.size)
if component.call.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 && self.livestreamVideoView == nil {
let livestreamVideoView = LivestreamVideoViewV1(context: component.call.accountContext, audioSessionManager: component.call.accountContext.sharedContext.mediaManager.audioSession, call: component.call)
self.livestreamVideoView = livestreamVideoView
livestreamVideoView.layer.masksToBounds = true
self.addSubview(livestreamVideoView)
livestreamVideoView.frame = newVideoFrame
livestreamVideoView.layer.cornerRadius = videoCornerRadius
livestreamVideoView.update(size: newVideoFrame.size, transition: .immediate)
var pictureInPictureController: AVPictureInPictureController? = nil
/*var pictureInPictureController: AVPictureInPictureController? = nil
if #available(iOS 15.0, *) {
pictureInPictureController = AVPictureInPictureController(contentSource: AVPictureInPictureController.ContentSource(playerLayer: livePlayerView.playerLayer))
pictureInPictureController?.playerLayer.masksToBounds = false
@ -570,12 +570,14 @@ final class MediaStreamVideoComponent: Component {
if #available(iOS 14.0, *) {
pictureInPictureController?.requiresLinearPlayback = true
}
self.pictureInPictureController = pictureInPictureController
self.pictureInPictureController = pictureInPictureController*/
}
if let livePlayerView = self.livePlayerView {
videoFrameUpdateTransition.setFrame(view: livePlayerView, frame: newVideoFrame, completion: nil)
videoFrameUpdateTransition.setCornerRadius(layer: livePlayerView.layer, cornerRadius: videoCornerRadius)
livePlayerView.update(size: newVideoFrame.size)
if let livestreamVideoView = self.livestreamVideoView {
videoFrameUpdateTransition.setFrame(view: livestreamVideoView, frame: newVideoFrame, completion: nil)
videoFrameUpdateTransition.setCornerRadius(layer: livestreamVideoView.layer, cornerRadius: videoCornerRadius)
livestreamVideoView.update(size: newVideoFrame.size, transition: transition.containedViewLayoutTransition)
videoView.isHidden = true
}
} else {
videoSize = CGSize(width: 16 / 9 * 100.0, height: 100.0).aspectFitted(.init(width: availableSize.width - videoInset * 2, height: availableSize.height))
@ -583,30 +585,33 @@ final class MediaStreamVideoComponent: Component {
let loadingBlurViewFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - videoSize.width) / 2.0), y: floor((availableSize.height - videoSize.height) / 2.0)), size: videoSize)
if loadingBlurView.frame == .zero {
loadingBlurView.frame = loadingBlurViewFrame
if self.loadingBlurView.frame == .zero {
self.loadingBlurView.frame = loadingBlurViewFrame
} else {
// Using ComponentTransition.setFrame on UIVisualEffectView causes instant update of sublayers
switch videoFrameUpdateTransition.animation {
case let .curve(duration, curve):
UIView.animate(withDuration: duration, delay: 0, options: curve.containedViewLayoutTransitionCurve.viewAnimationOptions, animations: { [self] in
loadingBlurView.frame = loadingBlurViewFrame
UIView.animate(withDuration: duration, delay: 0, options: curve.containedViewLayoutTransitionCurve.viewAnimationOptions, animations: { [weak self] in
guard let self else {
return
}
self.loadingBlurView.frame = loadingBlurViewFrame
})
default:
loadingBlurView.frame = loadingBlurViewFrame
self.loadingBlurView.frame = loadingBlurViewFrame
}
}
videoFrameUpdateTransition.setCornerRadius(layer: loadingBlurView.layer, cornerRadius: videoCornerRadius)
videoFrameUpdateTransition.setFrame(view: placeholderView, frame: loadingBlurViewFrame)
videoFrameUpdateTransition.setCornerRadius(layer: placeholderView.layer, cornerRadius: videoCornerRadius)
placeholderView.clipsToBounds = true
placeholderView.subviews.forEach {
videoFrameUpdateTransition.setFrame(view: $0, frame: placeholderView.bounds)
videoFrameUpdateTransition.setCornerRadius(layer: self.loadingBlurView.layer, cornerRadius: videoCornerRadius)
videoFrameUpdateTransition.setFrame(view: self.placeholderView, frame: loadingBlurViewFrame)
videoFrameUpdateTransition.setCornerRadius(layer: self.placeholderView.layer, cornerRadius: videoCornerRadius)
self.placeholderView.clipsToBounds = true
self.placeholderView.subviews.forEach {
videoFrameUpdateTransition.setFrame(view: $0, frame: self.placeholderView.bounds)
}
let initialShimmerBounds = shimmerBorderLayer.bounds
videoFrameUpdateTransition.setFrame(layer: shimmerBorderLayer, frame: loadingBlurView.bounds)
let initialShimmerBounds = self.shimmerBorderLayer.bounds
videoFrameUpdateTransition.setFrame(layer: self.shimmerBorderLayer, frame: loadingBlurView.bounds)
let borderMask = CAShapeLayer()
let initialPath = CGPath(roundedRect: .init(x: 0, y: 0, width: initialShimmerBounds.width, height: initialShimmerBounds.height), cornerWidth: videoCornerRadius, cornerHeight: videoCornerRadius, transform: nil)
@ -617,11 +622,10 @@ final class MediaStreamVideoComponent: Component {
borderMask.fillColor = UIColor.white.withAlphaComponent(0.4).cgColor
borderMask.strokeColor = UIColor.white.withAlphaComponent(0.7).cgColor
borderMask.lineWidth = 3
shimmerBorderLayer.mask = borderMask
shimmerBorderLayer.cornerRadius = videoCornerRadius
if !self.hadVideo {
self.shimmerBorderLayer.mask = borderMask
self.shimmerBorderLayer.cornerRadius = videoCornerRadius
if !self.hadVideo && !component.call.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 {
if self.noSignalTimer == nil {
if #available(iOS 10.0, *) {
let noSignalTimer = Timer(timeInterval: 20.0, repeats: false, block: { [weak self] _ in

View File

@ -261,7 +261,7 @@ private extension PresentationGroupCallState {
private enum CurrentImpl {
case call(OngoingGroupCallContext)
case mediaStream(WrappedMediaStreamingContext)
case externalMediaStream(ExternalMediaStreamingContext)
case externalMediaStream(DirectMediaStreamingContext)
}
private extension CurrentImpl {
@ -627,7 +627,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private var currentConnectionMode: OngoingGroupCallContext.ConnectionMode = .none
private var didInitializeConnectionMode: Bool = false
let externalMediaStream = Promise<ExternalMediaStreamingContext>()
let externalMediaStream = Promise<DirectMediaStreamingContext>()
private var screencastCallContext: OngoingGroupCallContext?
private var screencastBufferServerContext: IpcGroupCallBufferAppContext?
@ -922,7 +922,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.encryptionKey = encryptionKey
self.sharedAudioDevice = sharedAudioDevice
if self.sharedAudioDevice == nil {
if self.sharedAudioDevice == nil && !accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 {
var didReceiveAudioOutputs = false
if !audioSession.getIsHeadsetPluggedIn() {
@ -1639,7 +1639,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.internalState = internalState
self.internalStatePromise.set(.single(internalState))
if let audioSessionControl = audioSessionControl, previousControl == nil {
if !self.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2, let audioSessionControl = audioSessionControl, previousControl == nil {
if self.isStream {
audioSessionControl.setOutputMode(.system)
} else {
@ -1693,7 +1693,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
genericCallContext = current
} else {
if self.isStream, self.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 {
let externalMediaStream = ExternalMediaStreamingContext(id: self.internalId, rejoinNeeded: { [weak self] in
let externalMediaStream = DirectMediaStreamingContext(id: self.internalId, rejoinNeeded: { [weak self] in
Queue.mainQueue().async {
guard let strongSelf = self else {
return

View File

@ -1093,7 +1093,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self.playerNode.frame = CGRect(origin: CGPoint(), size: self.intrinsicDimensions)
/*var didProcessFramesToDisplay = false
var didProcessFramesToDisplay = false
self.playerNode.isHidden = true
self.playerNode.hasSentFramesToDisplay = { [weak self] in
guard let self, !didProcessFramesToDisplay else {
@ -1101,7 +1101,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
}
didProcessFramesToDisplay = true
self.playerNode.isHidden = false
}*/
}
let thumbnailVideoReference = HLSVideoContent.minimizedHLSQuality(file: fileReference, codecConfiguration: self.codecConfiguration)?.file ?? fileReference
@ -1834,7 +1834,8 @@ private final class SourceBuffer {
startTime: fragmentInfo.startTime.seconds,
endTime: fragmentInfo.startTime.seconds + fragmentInfo.duration.seconds,
content: ChunkMediaPlayerPart.TempFile(file: tempFile),
codecName: videoCodecName
codecName: videoCodecName,
offsetTime: 0.0
)
self.items.append(item)
self.updateRanges()

View File

@ -277,7 +277,7 @@ private enum PlayerImpl {
}
}
extension ChunkMediaPlayerV2.MediaDataReaderParams {
public extension ChunkMediaPlayerV2.MediaDataReaderParams {
init(context: AccountContext) {
var useV2Reader = true
if let data = context.currentAppConfiguration.with({ $0 }).data, let value = data["ios_video_v2_reader"] as? Double {

View File

@ -342,6 +342,177 @@ public final class ExternalMediaStreamingContext: SharedHLSServerSource {
}
}
public final class DirectMediaStreamingContext {
public struct Playlist: Equatable {
public struct Part: Equatable {
public let index: Int
public let timestamp: Double
public let duration: Double
public init(index: Int, timestamp: Double, duration: Double) {
self.index = index
self.timestamp = timestamp
self.duration = duration
}
}
public var parts: [Part]
public init(parts: [Part]) {
self.parts = parts
}
}
private final class Impl {
let queue: Queue
private var broadcastPartsSource: BroadcastPartSource?
private let resetPlaylistDisposable = MetaDisposable()
private let updatePlaylistDisposable = MetaDisposable()
let playlistData = Promise<Playlist>()
init(queue: Queue, rejoinNeeded: @escaping () -> Void) {
self.queue = queue
}
deinit {
self.updatePlaylistDisposable.dispose()
}
func setAudioStreamData(audioStreamData: OngoingGroupCallContext.AudioStreamData?) {
if let audioStreamData {
let broadcastPartsSource = NetworkBroadcastPartSource(queue: self.queue, engine: audioStreamData.engine, callId: audioStreamData.callId, accessHash: audioStreamData.accessHash, isExternalStream: audioStreamData.isExternalStream)
self.broadcastPartsSource = broadcastPartsSource
self.updatePlaylistDisposable.set(nil)
let queue = self.queue
self.resetPlaylistDisposable.set(broadcastPartsSource.requestTime(completion: { [weak self] timestamp in
queue.async {
guard let self else {
return
}
let segmentDuration: Int64 = 1000
var adjustedTimestamp: Int64 = 0
if timestamp > 0 {
adjustedTimestamp = timestamp / segmentDuration * segmentDuration - 4 * segmentDuration
}
if adjustedTimestamp > 0 {
self.beginUpdatingPlaylist(initialHeadTimestamp: adjustedTimestamp)
}
}
}))
}
}
private func beginUpdatingPlaylist(initialHeadTimestamp: Int64) {
let segmentDuration: Int64 = 1000
var timestamp = initialHeadTimestamp
self.updatePlaylist(headTimestamp: timestamp)
self.updatePlaylistDisposable.set((
Signal<Void, NoError>.single(Void())
|> delay(1.0, queue: self.queue)
|> restart
|> deliverOn(self.queue)
).start(next: { [weak self] _ in
guard let self else {
return
}
timestamp += segmentDuration
self.updatePlaylist(headTimestamp: timestamp)
}))
}
private func updatePlaylist(headTimestamp: Int64) {
let segmentDuration: Int64 = 1000
let headIndex = headTimestamp / segmentDuration
let minIndex = headIndex - 20
var parts: [Playlist.Part] = []
for index in minIndex ... headIndex {
parts.append(DirectMediaStreamingContext.Playlist.Part(
index: Int(index),
timestamp: Double(index),
duration: 1.0
))
}
self.playlistData.set(.single(Playlist(parts: parts)))
}
func partData(index: Int) -> Signal<Data?, NoError> {
let segmentDuration: Int64 = 1000
let timestamp = Int64(index) * segmentDuration
//print("Player: request part(q: \(quality)) \(index) -> \(timestamp)")
guard let broadcastPartsSource = self.broadcastPartsSource else {
return .single(nil)
}
return Signal { subscriber in
return broadcastPartsSource.requestPart(
timestampMilliseconds: timestamp,
durationMilliseconds: segmentDuration,
subject: .video(channelId: 1, quality: .full),
completion: { part in
var data = part.oggData
if data.count > 32 {
data = data.subdata(in: 32 ..< data.count)
}
subscriber.putNext(data)
},
rejoinNeeded: {
//TODO
}
)
}
}
}
private let queue = Queue()
let internalId: CallSessionInternalId
private let impl: QueueLocalObject<Impl>
private var hlsServerDisposable: Disposable?
public init(id: CallSessionInternalId, rejoinNeeded: @escaping () -> Void) {
self.internalId = id
let queue = self.queue
self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, rejoinNeeded: rejoinNeeded)
})
}
deinit {
}
public func setAudioStreamData(audioStreamData: OngoingGroupCallContext.AudioStreamData?) {
self.impl.with { impl in
impl.setAudioStreamData(audioStreamData: audioStreamData)
}
}
public func playlistData() -> Signal<Playlist, NoError> {
return self.impl.signalWith { impl, subscriber in
impl.playlistData.get().start(next: subscriber.putNext)
}
}
public func partData(index: Int) -> Signal<Data?, NoError> {
return self.impl.signalWith { impl, subscriber in
impl.partData(index: index).start(next: subscriber.putNext)
}
}
}
public protocol SharedHLSServerSource: AnyObject {
var id: String { get }