[WIP] Livestream V2

This commit is contained in:
Isaac 2024-12-31 14:03:02 +08:00
parent 45fa1b5ddb
commit ad0188f0ed
9 changed files with 532 additions and 61 deletions

View File

@ -103,7 +103,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
case playlistPlayback(Bool) case playlistPlayback(Bool)
case enableQuickReactionSwitch(Bool) case enableQuickReactionSwitch(Bool)
case disableReloginTokens(Bool) case disableReloginTokens(Bool)
case disableCallV2(Bool) case liveStreamV2(Bool)
case experimentalCallMute(Bool) case experimentalCallMute(Bool)
case conferenceCalls(Bool) case conferenceCalls(Bool)
case playerV2(Bool) case playerV2(Bool)
@ -133,7 +133,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return DebugControllerSection.web.rawValue return DebugControllerSection.web.rawValue
case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure: case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure:
return DebugControllerSection.experiments.rawValue return DebugControllerSection.experiments.rawValue
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .disableCallV2, .experimentalCallMute, .conferenceCalls, .playerV2, .benchmarkReflectors, .enableLocalTranslation: case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .liveStreamV2, .experimentalCallMute, .conferenceCalls, .playerV2, .benchmarkReflectors, .enableLocalTranslation:
return DebugControllerSection.experiments.rawValue return DebugControllerSection.experiments.rawValue
case .logTranslationRecognition, .resetTranslationStates: case .logTranslationRecognition, .resetTranslationStates:
return DebugControllerSection.translation.rawValue return DebugControllerSection.translation.rawValue
@ -246,7 +246,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return 49 return 49
case .enableQuickReactionSwitch: case .enableQuickReactionSwitch:
return 50 return 50
case .disableCallV2: case .liveStreamV2:
return 51 return 51
case .experimentalCallMute: case .experimentalCallMute:
return 52 return 52
@ -1328,12 +1328,12 @@ private enum DebugControllerEntry: ItemListNodeEntry {
}) })
}).start() }).start()
}) })
case let .disableCallV2(value): case let .liveStreamV2(value):
return ItemListSwitchItem(presentationData: presentationData, title: "Disable Video Chat V2", value: value, sectionId: self.section, style: .blocks, updated: { value in return ItemListSwitchItem(presentationData: presentationData, title: "Live Stream V2", value: value, sectionId: self.section, style: .blocks, updated: { value in
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in
var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings
settings.disableCallV2 = value settings.liveStreamV2 = value
return PreferencesEntry(settings) return PreferencesEntry(settings)
}) })
}).start() }).start()
@ -1587,7 +1587,7 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present
} }
entries.append(.playlistPlayback(experimentalSettings.playlistPlayback)) entries.append(.playlistPlayback(experimentalSettings.playlistPlayback))
entries.append(.enableQuickReactionSwitch(!experimentalSettings.disableQuickReaction)) entries.append(.enableQuickReactionSwitch(!experimentalSettings.disableQuickReaction))
entries.append(.disableCallV2(experimentalSettings.disableCallV2)) entries.append(.liveStreamV2(experimentalSettings.liveStreamV2))
entries.append(.experimentalCallMute(experimentalSettings.experimentalCallMute)) entries.append(.experimentalCallMute(experimentalSettings.experimentalCallMute))
entries.append(.conferenceCalls(experimentalSettings.conferenceCalls)) entries.append(.conferenceCalls(experimentalSettings.conferenceCalls))

View File

@ -34,7 +34,7 @@ public final class ChunkMediaPlayerPart {
} }
deinit { deinit {
TempBox.shared.dispose(self.file) //TempBox.shared.dispose(self.file)
} }
} }
@ -43,17 +43,19 @@ public final class ChunkMediaPlayerPart {
public let content: TempFile public let content: TempFile
public let clippedStartTime: Double? public let clippedStartTime: Double?
public let codecName: String? public let codecName: String?
public let offsetTime: Double
public var id: Id { public var id: Id {
return .tempFile(path: self.content.file.path) return .tempFile(path: self.content.file.path)
} }
public init(startTime: Double, clippedStartTime: Double? = nil, endTime: Double, content: TempFile, codecName: String?) { public init(startTime: Double, clippedStartTime: Double? = nil, endTime: Double, content: TempFile, codecName: String?, offsetTime: Double) {
self.startTime = startTime self.startTime = startTime
self.clippedStartTime = clippedStartTime self.clippedStartTime = clippedStartTime
self.endTime = endTime self.endTime = endTime
self.content = content self.content = content
self.codecName = codecName self.codecName = codecName
self.offsetTime = offsetTime
} }
} }

View File

@ -77,19 +77,21 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
let content: Content let content: Content
let mediaType: AVMediaType let mediaType: AVMediaType
let codecName: String? let codecName: String?
let offset: Double
private(set) var reader: MediaDataReader? private(set) var reader: MediaDataReader?
var didBeginReading: Bool = false var didBeginReading: Bool = false
var isFinished: Bool = false var isFinished: Bool = false
init(queue: Queue, content: Content, mediaType: AVMediaType, codecName: String?) { init(queue: Queue, content: Content, mediaType: AVMediaType, codecName: String?, offset: Double) {
assert(queue.isCurrent()) assert(queue.isCurrent())
self.queue = queue self.queue = queue
self.content = content self.content = content
self.mediaType = mediaType self.mediaType = mediaType
self.codecName = codecName self.codecName = codecName
self.offset = offset
} }
deinit { deinit {
@ -425,7 +427,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
clippedStartTime: partStartTime == part.startTime ? nil : partStartTime, clippedStartTime: partStartTime == part.startTime ? nil : partStartTime,
endTime: part.endTime, endTime: part.endTime,
content: part.content, content: part.content,
codecName: part.codecName codecName: part.codecName,
offsetTime: part.offsetTime
)) ))
minStartTime = max(minStartTime, partEndTime) minStartTime = max(minStartTime, partEndTime)
} }
@ -447,7 +450,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
clippedStartTime: partStartTime == part.startTime ? nil : partStartTime, clippedStartTime: partStartTime == part.startTime ? nil : partStartTime,
endTime: part.endTime, endTime: part.endTime,
content: part.content, content: part.content,
codecName: part.codecName codecName: part.codecName,
offsetTime: part.offsetTime
)) ))
minStartTime = max(minStartTime, partEndTime) minStartTime = max(minStartTime, partEndTime)
break break
@ -519,7 +523,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
queue: dataQueue, queue: dataQueue,
content: .tempFile(part.part.content), content: .tempFile(part.part.content),
mediaType: .audio, mediaType: .audio,
codecName: part.part.codecName codecName: part.part.codecName,
offset: part.part.offsetTime
) )
cleanAudio.load(params: mediaDataReaderParams) cleanAudio.load(params: mediaDataReaderParams)
@ -533,7 +538,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
queue: dataQueue, queue: dataQueue,
content: .tempFile(part.part.content), content: .tempFile(part.part.content),
mediaType: .video, mediaType: .video,
codecName: part.part.codecName codecName: part.part.codecName,
offset: part.part.offsetTime
) )
video.load(params: mediaDataReaderParams) video.load(params: mediaDataReaderParams)
@ -541,7 +547,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
queue: dataQueue, queue: dataQueue,
content: .tempFile(part.part.content), content: .tempFile(part.part.content),
mediaType: .audio, mediaType: .audio,
codecName: part.part.codecName codecName: part.part.codecName,
offset: part.part.offsetTime
) )
audio.load(params: mediaDataReaderParams) audio.load(params: mediaDataReaderParams)
@ -622,7 +629,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
queue: dataQueue, queue: dataQueue,
content: .directStream(media), content: .directStream(media),
mediaType: .video, mediaType: .video,
codecName: media.codecName codecName: media.codecName,
offset: 0.0
) )
} }
video?.load(params: mediaDataReaderParams) video?.load(params: mediaDataReaderParams)
@ -632,7 +640,8 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
queue: dataQueue, queue: dataQueue,
content: .directStream(media), content: .directStream(media),
mediaType: .audio, mediaType: .audio,
codecName: media.codecName codecName: media.codecName,
offset: 0.0
) )
} }
audio?.load(params: mediaDataReaderParams) audio?.load(params: mediaDataReaderParams)
@ -973,6 +982,11 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
switch reader.readSampleBuffer() { switch reader.readSampleBuffer() {
case let .frame(sampleBuffer): case let .frame(sampleBuffer):
var sampleBuffer = sampleBuffer var sampleBuffer = sampleBuffer
if media.offset != 0.0 {
if let updatedSampleBuffer = createSampleBuffer(fromSampleBuffer: sampleBuffer, withTimeOffset: CMTimeMakeWithSeconds(Float64(media.offset), preferredTimescale: CMSampleBufferGetPresentationTimeStamp(sampleBuffer).timescale), duration: nil) {
sampleBuffer = updatedSampleBuffer
}
}
if let seekFromMinTimestamp = loadedPartsMediaData.seekFromMinTimestamp, CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds < seekFromMinTimestamp { if let seekFromMinTimestamp = loadedPartsMediaData.seekFromMinTimestamp, CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds < seekFromMinTimestamp {
if isVideo { if isVideo {
var updatedSampleBuffer: CMSampleBuffer? var updatedSampleBuffer: CMSampleBuffer?

View File

@ -0,0 +1,279 @@
import Foundation
import UIKit
import AVFoundation
import SwiftSignalKit
import UniversalMediaPlayer
import Postbox
import TelegramCore
import AccountContext
import TelegramAudio
import Display
import TelegramVoip
import RangeSet
import ManagedFile
import FFMpegBinding
import TelegramUniversalVideoContent
final class LivestreamVideoViewV1: UIView {
private final class PartContext {
let part: DirectMediaStreamingContext.Playlist.Part
let disposable = MetaDisposable()
var resolvedTimeOffset: Double?
var data: TempBoxFile?
var info: FFMpegMediaInfo?
init(part: DirectMediaStreamingContext.Playlist.Part) {
self.part = part
}
deinit {
self.disposable.dispose()
}
}
private let context: AccountContext
private let audioSessionManager: ManagedAudioSession
private let call: PresentationGroupCall
private let chunkPlayerPartsState = Promise<ChunkMediaPlayerPartsState>(ChunkMediaPlayerPartsState(duration: 10000000.0, content: .parts([])))
private var parts: [ChunkMediaPlayerPart] = [] {
didSet {
self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: 10000000.0, content: .parts(self.parts))))
}
}
private let player: ChunkMediaPlayer
private let playerNode: MediaPlayerNode
private var playerStatus: MediaPlayerStatus?
private var playerStatusDisposable: Disposable?
private var streamingContextDisposable: Disposable?
private var streamingContext: DirectMediaStreamingContext?
private var playlistDisposable: Disposable?
private var partContexts: [Int: PartContext] = [:]
private var requestedSeekTimestamp: Double?
init(
context: AccountContext,
audioSessionManager: ManagedAudioSession,
call: PresentationGroupCall
) {
self.context = context
self.audioSessionManager = audioSessionManager
self.call = call
self.playerNode = MediaPlayerNode()
var onSeeked: (() -> Void)?
self.player = ChunkMediaPlayerV2(
params: ChunkMediaPlayerV2.MediaDataReaderParams(context: context),
audioSessionManager: audioSessionManager,
source: .externalParts(self.chunkPlayerPartsState.get()),
video: true,
enableSound: true,
baseRate: 1.0,
onSeeked: {
onSeeked?()
},
playerNode: self.playerNode
)
super.init(frame: CGRect())
self.addSubview(self.playerNode.view)
onSeeked = {
}
self.playerStatusDisposable = (self.player.status
|> deliverOnMainQueue).startStrict(next: { [weak self] status in
guard let self else {
return
}
self.updatePlayerStatus(status: status)
})
var didProcessFramesToDisplay = false
self.playerNode.isHidden = true
self.playerNode.hasSentFramesToDisplay = { [weak self] in
guard let self, !didProcessFramesToDisplay else {
return
}
didProcessFramesToDisplay = true
self.playerNode.isHidden = false
}
if let call = call as? PresentationGroupCallImpl {
self.streamingContextDisposable = (call.externalMediaStream.get()
|> deliverOnMainQueue).startStrict(next: { [weak self] externalMediaStream in
guard let self else {
return
}
self.streamingContext = externalMediaStream
self.resetPlayback()
})
}
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
self.playerStatusDisposable?.dispose()
self.streamingContextDisposable?.dispose()
self.playlistDisposable?.dispose()
}
private func updatePlayerStatus(status: MediaPlayerStatus) {
self.playerStatus = status
self.updatePlaybackPositionIfNeeded()
}
private func resetPlayback() {
self.parts = []
self.playlistDisposable?.dispose()
self.playlistDisposable = nil
guard let streamingContext = self.streamingContext else {
return
}
self.playlistDisposable = (streamingContext.playlistData()
|> deliverOnMainQueue).startStrict(next: { [weak self] playlist in
guard let self else {
return
}
self.updatePlaylist(playlist: playlist)
})
}
private func updatePlaylist(playlist: DirectMediaStreamingContext.Playlist) {
var validPartIds: [Int] = []
for part in playlist.parts.prefix(upTo: 4) {
validPartIds.append(part.index)
if self.partContexts[part.index] == nil {
let partContext = PartContext(part: part)
self.partContexts[part.index] = partContext
if let streamingContext = self.streamingContext {
partContext.disposable.set((streamingContext.partData(index: part.index)
|> deliverOn(Queue.concurrentDefaultQueue())
|> map { data -> (file: TempBoxFile, info: FFMpegMediaInfo)? in
guard let data else {
return nil
}
let tempFile = TempBox.shared.tempFile(fileName: "part.mp4")
if let _ = try? data.write(to: URL(fileURLWithPath: tempFile.path), options: .atomic) {
if let info = extractFFMpegMediaInfo(path: tempFile.path) {
return (tempFile, info)
} else {
return nil
}
} else {
TempBox.shared.dispose(tempFile)
return nil
}
}
|> deliverOnMainQueue).startStrict(next: { [weak self, weak partContext] fileAndInfo in
guard let self, let partContext else {
return
}
if let (file, info) = fileAndInfo {
partContext.data = file
partContext.info = info
} else {
partContext.data = nil
}
self.updatePartContexts()
}))
}
}
}
var removedPartIds: [Int] = []
for (id, _) in self.partContexts {
if !validPartIds.contains(id) {
removedPartIds.append(id)
}
}
for id in removedPartIds {
self.partContexts.removeValue(forKey: id)
}
}
private func updatePartContexts() {
var readyParts: [ChunkMediaPlayerPart] = []
let sortedContexts = self.partContexts.values.sorted(by: { $0.part.timestamp < $1.part.timestamp })
outer: for i in 0 ..< sortedContexts.count {
let partContext = sortedContexts[i]
if let data = partContext.data {
let offsetTime: Double
if i != 0 {
var foundOffset: Double?
inner: for j in 0 ..< i {
let previousContext = sortedContexts[j]
if previousContext.part.index == partContext.part.index - 1 {
if let previousInfo = previousContext.info {
if let previousResolvedOffset = previousContext.resolvedTimeOffset {
if let audio = previousInfo.audio {
foundOffset = previousResolvedOffset + audio.duration.seconds
} else {
foundOffset = partContext.part.timestamp
}
}
}
break inner
}
}
if let foundOffset {
partContext.resolvedTimeOffset = foundOffset
offsetTime = foundOffset
} else {
continue outer
}
} else {
if let resolvedOffset = partContext.resolvedTimeOffset {
offsetTime = resolvedOffset
} else {
offsetTime = partContext.part.timestamp
partContext.resolvedTimeOffset = offsetTime
}
}
readyParts.append(ChunkMediaPlayerPart(
startTime: partContext.part.timestamp,
endTime: partContext.part.timestamp + partContext.part.duration,
content: ChunkMediaPlayerPart.TempFile(file: data),
codecName: nil,
offsetTime: offsetTime
))
}
}
readyParts.sort(by: { $0.startTime < $1.startTime })
self.parts = readyParts
self.updatePlaybackPositionIfNeeded()
}
private func updatePlaybackPositionIfNeeded() {
if let part = self.parts.first {
if let playerStatus = self.playerStatus, playerStatus.timestamp < part.startTime {
if self.requestedSeekTimestamp != part.startTime {
self.requestedSeekTimestamp = part.startTime
self.player.seek(timestamp: part.startTime, play: true)
}
}
}
}
public func update(size: CGSize, transition: ContainedViewLayoutTransition) {
//transition.updateFrame(view: self.playerNode.view, frame: CGRect(origin: CGPoint(), size: size))
self.playerNode.frame = CGRect(origin: CGPoint(), size: size)
}
}

View File

@ -11,6 +11,7 @@ import SwiftSignalKit
import AvatarNode import AvatarNode
import Postbox import Postbox
import TelegramVoip import TelegramVoip
import ComponentDisplayAdapters
final class MediaStreamVideoComponent: Component { final class MediaStreamVideoComponent: Component {
let call: PresentationGroupCallImpl let call: PresentationGroupCallImpl
@ -157,7 +158,7 @@ final class MediaStreamVideoComponent: Component {
private var lastPresentation: UIView? private var lastPresentation: UIView?
private var pipTrackDisplayLink: CADisplayLink? private var pipTrackDisplayLink: CADisplayLink?
private var livePlayerView: ProxyVideoView? private var livestreamVideoView: LivestreamVideoViewV1?
override init(frame: CGRect) { override init(frame: CGRect) {
self.blurTintView = UIView() self.blurTintView = UIView()
@ -500,9 +501,9 @@ final class MediaStreamVideoComponent: Component {
var isVideoVisible = component.isVisible var isVideoVisible = component.isVisible
if !wasVisible && component.isVisible { if !self.wasVisible && component.isVisible {
videoView.layer.animateAlpha(from: 0, to: 1, duration: 0.2) videoView.layer.animateAlpha(from: 0, to: 1, duration: 0.2)
} else if wasVisible && !component.isVisible { } else if self.wasVisible && !component.isVisible {
videoView.layer.animateAlpha(from: 1, to: 0, duration: 0.2) videoView.layer.animateAlpha(from: 1, to: 0, duration: 0.2)
} }
@ -522,7 +523,6 @@ final class MediaStreamVideoComponent: Component {
videoFrameUpdateTransition.setFrame(view: videoView, frame: newVideoFrame, completion: nil) videoFrameUpdateTransition.setFrame(view: videoView, frame: newVideoFrame, completion: nil)
if let videoBlurView = self.videoBlurView { if let videoBlurView = self.videoBlurView {
videoBlurView.updateIsEnabled(component.isVisible) videoBlurView.updateIsEnabled(component.isVisible)
if component.isFullscreen { if component.isFullscreen {
videoFrameUpdateTransition.setFrame(view: videoBlurView, frame: CGRect( videoFrameUpdateTransition.setFrame(view: videoBlurView, frame: CGRect(
@ -545,16 +545,16 @@ final class MediaStreamVideoComponent: Component {
videoFrameUpdateTransition.setFrame(layer: self.videoBlurSolidMask, frame: self.videoBlurGradientMask.bounds) videoFrameUpdateTransition.setFrame(layer: self.videoBlurSolidMask, frame: self.videoBlurGradientMask.bounds)
} }
if self.livePlayerView == nil { if component.call.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 && self.livestreamVideoView == nil {
let livePlayerView = ProxyVideoView(context: component.call.accountContext, call: component.call) let livestreamVideoView = LivestreamVideoViewV1(context: component.call.accountContext, audioSessionManager: component.call.accountContext.sharedContext.mediaManager.audioSession, call: component.call)
self.livePlayerView = livePlayerView self.livestreamVideoView = livestreamVideoView
livePlayerView.layer.masksToBounds = true livestreamVideoView.layer.masksToBounds = true
self.addSubview(livePlayerView) self.addSubview(livestreamVideoView)
livePlayerView.frame = newVideoFrame livestreamVideoView.frame = newVideoFrame
livePlayerView.layer.cornerRadius = videoCornerRadius livestreamVideoView.layer.cornerRadius = videoCornerRadius
livePlayerView.update(size: newVideoFrame.size) livestreamVideoView.update(size: newVideoFrame.size, transition: .immediate)
var pictureInPictureController: AVPictureInPictureController? = nil /*var pictureInPictureController: AVPictureInPictureController? = nil
if #available(iOS 15.0, *) { if #available(iOS 15.0, *) {
pictureInPictureController = AVPictureInPictureController(contentSource: AVPictureInPictureController.ContentSource(playerLayer: livePlayerView.playerLayer)) pictureInPictureController = AVPictureInPictureController(contentSource: AVPictureInPictureController.ContentSource(playerLayer: livePlayerView.playerLayer))
pictureInPictureController?.playerLayer.masksToBounds = false pictureInPictureController?.playerLayer.masksToBounds = false
@ -570,12 +570,14 @@ final class MediaStreamVideoComponent: Component {
if #available(iOS 14.0, *) { if #available(iOS 14.0, *) {
pictureInPictureController?.requiresLinearPlayback = true pictureInPictureController?.requiresLinearPlayback = true
} }
self.pictureInPictureController = pictureInPictureController self.pictureInPictureController = pictureInPictureController*/
} }
if let livePlayerView = self.livePlayerView { if let livestreamVideoView = self.livestreamVideoView {
videoFrameUpdateTransition.setFrame(view: livePlayerView, frame: newVideoFrame, completion: nil) videoFrameUpdateTransition.setFrame(view: livestreamVideoView, frame: newVideoFrame, completion: nil)
videoFrameUpdateTransition.setCornerRadius(layer: livePlayerView.layer, cornerRadius: videoCornerRadius) videoFrameUpdateTransition.setCornerRadius(layer: livestreamVideoView.layer, cornerRadius: videoCornerRadius)
livePlayerView.update(size: newVideoFrame.size) livestreamVideoView.update(size: newVideoFrame.size, transition: transition.containedViewLayoutTransition)
videoView.isHidden = true
} }
} else { } else {
videoSize = CGSize(width: 16 / 9 * 100.0, height: 100.0).aspectFitted(.init(width: availableSize.width - videoInset * 2, height: availableSize.height)) videoSize = CGSize(width: 16 / 9 * 100.0, height: 100.0).aspectFitted(.init(width: availableSize.width - videoInset * 2, height: availableSize.height))
@ -583,30 +585,33 @@ final class MediaStreamVideoComponent: Component {
let loadingBlurViewFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - videoSize.width) / 2.0), y: floor((availableSize.height - videoSize.height) / 2.0)), size: videoSize) let loadingBlurViewFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - videoSize.width) / 2.0), y: floor((availableSize.height - videoSize.height) / 2.0)), size: videoSize)
if loadingBlurView.frame == .zero { if self.loadingBlurView.frame == .zero {
loadingBlurView.frame = loadingBlurViewFrame self.loadingBlurView.frame = loadingBlurViewFrame
} else { } else {
// Using ComponentTransition.setFrame on UIVisualEffectView causes instant update of sublayers // Using ComponentTransition.setFrame on UIVisualEffectView causes instant update of sublayers
switch videoFrameUpdateTransition.animation { switch videoFrameUpdateTransition.animation {
case let .curve(duration, curve): case let .curve(duration, curve):
UIView.animate(withDuration: duration, delay: 0, options: curve.containedViewLayoutTransitionCurve.viewAnimationOptions, animations: { [self] in UIView.animate(withDuration: duration, delay: 0, options: curve.containedViewLayoutTransitionCurve.viewAnimationOptions, animations: { [weak self] in
loadingBlurView.frame = loadingBlurViewFrame guard let self else {
return
}
self.loadingBlurView.frame = loadingBlurViewFrame
}) })
default: default:
loadingBlurView.frame = loadingBlurViewFrame self.loadingBlurView.frame = loadingBlurViewFrame
} }
} }
videoFrameUpdateTransition.setCornerRadius(layer: loadingBlurView.layer, cornerRadius: videoCornerRadius) videoFrameUpdateTransition.setCornerRadius(layer: self.loadingBlurView.layer, cornerRadius: videoCornerRadius)
videoFrameUpdateTransition.setFrame(view: placeholderView, frame: loadingBlurViewFrame) videoFrameUpdateTransition.setFrame(view: self.placeholderView, frame: loadingBlurViewFrame)
videoFrameUpdateTransition.setCornerRadius(layer: placeholderView.layer, cornerRadius: videoCornerRadius) videoFrameUpdateTransition.setCornerRadius(layer: self.placeholderView.layer, cornerRadius: videoCornerRadius)
placeholderView.clipsToBounds = true self.placeholderView.clipsToBounds = true
placeholderView.subviews.forEach { self.placeholderView.subviews.forEach {
videoFrameUpdateTransition.setFrame(view: $0, frame: placeholderView.bounds) videoFrameUpdateTransition.setFrame(view: $0, frame: self.placeholderView.bounds)
} }
let initialShimmerBounds = shimmerBorderLayer.bounds let initialShimmerBounds = self.shimmerBorderLayer.bounds
videoFrameUpdateTransition.setFrame(layer: shimmerBorderLayer, frame: loadingBlurView.bounds) videoFrameUpdateTransition.setFrame(layer: self.shimmerBorderLayer, frame: loadingBlurView.bounds)
let borderMask = CAShapeLayer() let borderMask = CAShapeLayer()
let initialPath = CGPath(roundedRect: .init(x: 0, y: 0, width: initialShimmerBounds.width, height: initialShimmerBounds.height), cornerWidth: videoCornerRadius, cornerHeight: videoCornerRadius, transform: nil) let initialPath = CGPath(roundedRect: .init(x: 0, y: 0, width: initialShimmerBounds.width, height: initialShimmerBounds.height), cornerWidth: videoCornerRadius, cornerHeight: videoCornerRadius, transform: nil)
@ -617,11 +622,10 @@ final class MediaStreamVideoComponent: Component {
borderMask.fillColor = UIColor.white.withAlphaComponent(0.4).cgColor borderMask.fillColor = UIColor.white.withAlphaComponent(0.4).cgColor
borderMask.strokeColor = UIColor.white.withAlphaComponent(0.7).cgColor borderMask.strokeColor = UIColor.white.withAlphaComponent(0.7).cgColor
borderMask.lineWidth = 3 borderMask.lineWidth = 3
shimmerBorderLayer.mask = borderMask self.shimmerBorderLayer.mask = borderMask
shimmerBorderLayer.cornerRadius = videoCornerRadius self.shimmerBorderLayer.cornerRadius = videoCornerRadius
if !self.hadVideo {
if !self.hadVideo && !component.call.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 {
if self.noSignalTimer == nil { if self.noSignalTimer == nil {
if #available(iOS 10.0, *) { if #available(iOS 10.0, *) {
let noSignalTimer = Timer(timeInterval: 20.0, repeats: false, block: { [weak self] _ in let noSignalTimer = Timer(timeInterval: 20.0, repeats: false, block: { [weak self] _ in

View File

@ -261,7 +261,7 @@ private extension PresentationGroupCallState {
private enum CurrentImpl { private enum CurrentImpl {
case call(OngoingGroupCallContext) case call(OngoingGroupCallContext)
case mediaStream(WrappedMediaStreamingContext) case mediaStream(WrappedMediaStreamingContext)
case externalMediaStream(ExternalMediaStreamingContext) case externalMediaStream(DirectMediaStreamingContext)
} }
private extension CurrentImpl { private extension CurrentImpl {
@ -627,7 +627,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private var currentConnectionMode: OngoingGroupCallContext.ConnectionMode = .none private var currentConnectionMode: OngoingGroupCallContext.ConnectionMode = .none
private var didInitializeConnectionMode: Bool = false private var didInitializeConnectionMode: Bool = false
let externalMediaStream = Promise<ExternalMediaStreamingContext>() let externalMediaStream = Promise<DirectMediaStreamingContext>()
private var screencastCallContext: OngoingGroupCallContext? private var screencastCallContext: OngoingGroupCallContext?
private var screencastBufferServerContext: IpcGroupCallBufferAppContext? private var screencastBufferServerContext: IpcGroupCallBufferAppContext?
@ -922,7 +922,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.encryptionKey = encryptionKey self.encryptionKey = encryptionKey
self.sharedAudioDevice = sharedAudioDevice self.sharedAudioDevice = sharedAudioDevice
if self.sharedAudioDevice == nil { if self.sharedAudioDevice == nil && !accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 {
var didReceiveAudioOutputs = false var didReceiveAudioOutputs = false
if !audioSession.getIsHeadsetPluggedIn() { if !audioSession.getIsHeadsetPluggedIn() {
@ -1639,7 +1639,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.internalState = internalState self.internalState = internalState
self.internalStatePromise.set(.single(internalState)) self.internalStatePromise.set(.single(internalState))
if let audioSessionControl = audioSessionControl, previousControl == nil { if !self.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2, let audioSessionControl = audioSessionControl, previousControl == nil {
if self.isStream { if self.isStream {
audioSessionControl.setOutputMode(.system) audioSessionControl.setOutputMode(.system)
} else { } else {
@ -1693,7 +1693,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
genericCallContext = current genericCallContext = current
} else { } else {
if self.isStream, self.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 { if self.isStream, self.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 {
let externalMediaStream = ExternalMediaStreamingContext(id: self.internalId, rejoinNeeded: { [weak self] in let externalMediaStream = DirectMediaStreamingContext(id: self.internalId, rejoinNeeded: { [weak self] in
Queue.mainQueue().async { Queue.mainQueue().async {
guard let strongSelf = self else { guard let strongSelf = self else {
return return

View File

@ -1093,7 +1093,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self.playerNode.frame = CGRect(origin: CGPoint(), size: self.intrinsicDimensions) self.playerNode.frame = CGRect(origin: CGPoint(), size: self.intrinsicDimensions)
/*var didProcessFramesToDisplay = false var didProcessFramesToDisplay = false
self.playerNode.isHidden = true self.playerNode.isHidden = true
self.playerNode.hasSentFramesToDisplay = { [weak self] in self.playerNode.hasSentFramesToDisplay = { [weak self] in
guard let self, !didProcessFramesToDisplay else { guard let self, !didProcessFramesToDisplay else {
@ -1101,7 +1101,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
} }
didProcessFramesToDisplay = true didProcessFramesToDisplay = true
self.playerNode.isHidden = false self.playerNode.isHidden = false
}*/ }
let thumbnailVideoReference = HLSVideoContent.minimizedHLSQuality(file: fileReference, codecConfiguration: self.codecConfiguration)?.file ?? fileReference let thumbnailVideoReference = HLSVideoContent.minimizedHLSQuality(file: fileReference, codecConfiguration: self.codecConfiguration)?.file ?? fileReference
@ -1834,7 +1834,8 @@ private final class SourceBuffer {
startTime: fragmentInfo.startTime.seconds, startTime: fragmentInfo.startTime.seconds,
endTime: fragmentInfo.startTime.seconds + fragmentInfo.duration.seconds, endTime: fragmentInfo.startTime.seconds + fragmentInfo.duration.seconds,
content: ChunkMediaPlayerPart.TempFile(file: tempFile), content: ChunkMediaPlayerPart.TempFile(file: tempFile),
codecName: videoCodecName codecName: videoCodecName,
offsetTime: 0.0
) )
self.items.append(item) self.items.append(item)
self.updateRanges() self.updateRanges()

View File

@ -277,7 +277,7 @@ private enum PlayerImpl {
} }
} }
extension ChunkMediaPlayerV2.MediaDataReaderParams { public extension ChunkMediaPlayerV2.MediaDataReaderParams {
init(context: AccountContext) { init(context: AccountContext) {
var useV2Reader = true var useV2Reader = true
if let data = context.currentAppConfiguration.with({ $0 }).data, let value = data["ios_video_v2_reader"] as? Double { if let data = context.currentAppConfiguration.with({ $0 }).data, let value = data["ios_video_v2_reader"] as? Double {

View File

@ -342,6 +342,177 @@ public final class ExternalMediaStreamingContext: SharedHLSServerSource {
} }
} }
public final class DirectMediaStreamingContext {
public struct Playlist: Equatable {
public struct Part: Equatable {
public let index: Int
public let timestamp: Double
public let duration: Double
public init(index: Int, timestamp: Double, duration: Double) {
self.index = index
self.timestamp = timestamp
self.duration = duration
}
}
public var parts: [Part]
public init(parts: [Part]) {
self.parts = parts
}
}
private final class Impl {
let queue: Queue
private var broadcastPartsSource: BroadcastPartSource?
private let resetPlaylistDisposable = MetaDisposable()
private let updatePlaylistDisposable = MetaDisposable()
let playlistData = Promise<Playlist>()
init(queue: Queue, rejoinNeeded: @escaping () -> Void) {
self.queue = queue
}
deinit {
self.updatePlaylistDisposable.dispose()
}
func setAudioStreamData(audioStreamData: OngoingGroupCallContext.AudioStreamData?) {
if let audioStreamData {
let broadcastPartsSource = NetworkBroadcastPartSource(queue: self.queue, engine: audioStreamData.engine, callId: audioStreamData.callId, accessHash: audioStreamData.accessHash, isExternalStream: audioStreamData.isExternalStream)
self.broadcastPartsSource = broadcastPartsSource
self.updatePlaylistDisposable.set(nil)
let queue = self.queue
self.resetPlaylistDisposable.set(broadcastPartsSource.requestTime(completion: { [weak self] timestamp in
queue.async {
guard let self else {
return
}
let segmentDuration: Int64 = 1000
var adjustedTimestamp: Int64 = 0
if timestamp > 0 {
adjustedTimestamp = timestamp / segmentDuration * segmentDuration - 4 * segmentDuration
}
if adjustedTimestamp > 0 {
self.beginUpdatingPlaylist(initialHeadTimestamp: adjustedTimestamp)
}
}
}))
}
}
private func beginUpdatingPlaylist(initialHeadTimestamp: Int64) {
let segmentDuration: Int64 = 1000
var timestamp = initialHeadTimestamp
self.updatePlaylist(headTimestamp: timestamp)
self.updatePlaylistDisposable.set((
Signal<Void, NoError>.single(Void())
|> delay(1.0, queue: self.queue)
|> restart
|> deliverOn(self.queue)
).start(next: { [weak self] _ in
guard let self else {
return
}
timestamp += segmentDuration
self.updatePlaylist(headTimestamp: timestamp)
}))
}
private func updatePlaylist(headTimestamp: Int64) {
let segmentDuration: Int64 = 1000
let headIndex = headTimestamp / segmentDuration
let minIndex = headIndex - 20
var parts: [Playlist.Part] = []
for index in minIndex ... headIndex {
parts.append(DirectMediaStreamingContext.Playlist.Part(
index: Int(index),
timestamp: Double(index),
duration: 1.0
))
}
self.playlistData.set(.single(Playlist(parts: parts)))
}
func partData(index: Int) -> Signal<Data?, NoError> {
let segmentDuration: Int64 = 1000
let timestamp = Int64(index) * segmentDuration
//print("Player: request part(q: \(quality)) \(index) -> \(timestamp)")
guard let broadcastPartsSource = self.broadcastPartsSource else {
return .single(nil)
}
return Signal { subscriber in
return broadcastPartsSource.requestPart(
timestampMilliseconds: timestamp,
durationMilliseconds: segmentDuration,
subject: .video(channelId: 1, quality: .full),
completion: { part in
var data = part.oggData
if data.count > 32 {
data = data.subdata(in: 32 ..< data.count)
}
subscriber.putNext(data)
},
rejoinNeeded: {
//TODO
}
)
}
}
}
private let queue = Queue()
let internalId: CallSessionInternalId
private let impl: QueueLocalObject<Impl>
private var hlsServerDisposable: Disposable?
public init(id: CallSessionInternalId, rejoinNeeded: @escaping () -> Void) {
self.internalId = id
let queue = self.queue
self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, rejoinNeeded: rejoinNeeded)
})
}
deinit {
}
public func setAudioStreamData(audioStreamData: OngoingGroupCallContext.AudioStreamData?) {
self.impl.with { impl in
impl.setAudioStreamData(audioStreamData: audioStreamData)
}
}
public func playlistData() -> Signal<Playlist, NoError> {
return self.impl.signalWith { impl, subscriber in
impl.playlistData.get().start(next: subscriber.putNext)
}
}
public func partData(index: Int) -> Signal<Data?, NoError> {
return self.impl.signalWith { impl, subscriber in
impl.partData(index: index).start(next: subscriber.putNext)
}
}
}
public protocol SharedHLSServerSource: AnyObject { public protocol SharedHLSServerSource: AnyObject {
var id: String { get } var id: String { get }