Video player improvements

This commit is contained in:
Isaac 2024-10-18 17:38:41 +04:00
parent 9a55df8fc9
commit 1e75c0fa02
10 changed files with 703 additions and 232 deletions

View File

@ -53,6 +53,7 @@ swift_library(
"//submodules/TelegramUI/Components/Ads/AdsInfoScreen",
"//submodules/TelegramUI/Components/Ads/AdsReportScreen",
"//submodules/UrlHandling",
"//submodules/TelegramUI/Components/SaveProgressScreen",
],
visibility = [
"//visibility:public",

View File

@ -27,6 +27,7 @@ import Pasteboard
import AdUI
import AdsInfoScreen
import AdsReportScreen
import SaveProgressScreen
public enum UniversalVideoGalleryItemContentInfo {
case message(Message, Int?)
@ -3238,28 +3239,104 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
}
}
if let (message, maybeFile, _) = strongSelf.contentInfo(), let file = maybeFile, !message.isCopyProtected() && !item.peerIsCopyProtected && message.paidContent == nil && !(item.content is HLSVideoContent) {
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Gallery_SaveVideo, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Download"), color: theme.actionSheet.primaryTextColor) }, action: { _, f in
f(.default)
if let (message, maybeFile, _) = strongSelf.contentInfo(), let file = maybeFile, !message.isCopyProtected() && !item.peerIsCopyProtected && message.paidContent == nil {
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.Gallery_SaveVideo, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Download"), color: theme.actionSheet.primaryTextColor) }, action: { c, _ in
guard let self else {
c?.dismiss(result: .default, completion: nil)
return
}
if let strongSelf = self {
switch strongSelf.fetchStatus {
if let content = item.content as? HLSVideoContent {
guard let videoNode = self.videoNode, let qualityState = videoNode.videoQualityState(), !qualityState.available.isEmpty else {
return
}
if qualityState.available.isEmpty {
return
}
guard let qualitySet = HLSQualitySet(baseFile: content.fileReference) else {
return
}
var items: [ContextMenuItem] = []
items.append(.action(ContextMenuActionItem(text: self.presentationData.strings.Common_Back, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Back"), color: theme.actionSheet.primaryTextColor)
}, iconPosition: .left, action: { c, _ in
c?.popItems()
})))
for quality in qualityState.available {
guard let qualityFile = qualitySet.qualityFiles[quality] else {
continue
}
guard let qualityFileSize = qualityFile.media.size else {
continue
}
let fileSizeString = dataSizeString(qualityFileSize, formatting: DataSizeStringFormatting(presentationData: self.presentationData))
items.append(.action(ContextMenuActionItem(text: "\(quality)p (\(fileSizeString))", icon: { _ in
return nil
}, action: { [weak self] c, _ in
c?.dismiss(result: .default, completion: nil)
guard let self else {
return
}
guard let controller = self.galleryController() else {
return
}
let saveScreen = SaveProgressScreen(context: self.context, content: .progress(self.presentationData.strings.Story_TooltipSaving, 0.0))
controller.present(saveScreen, in: .current)
let stringSaving = self.presentationData.strings.Story_TooltipSaving
let stringSaved = self.presentationData.strings.Story_TooltipSaved
let saveFileReference: AnyMediaReference = qualityFile.abstract
let saveSignal = SaveToCameraRoll.saveToCameraRoll(context: self.context, postbox: self.context.account.postbox, userLocation: .peer(message.id.peerId), mediaReference: saveFileReference)
let disposable = (saveSignal
|> deliverOnMainQueue).start(next: { [weak saveScreen] progress in
guard let saveScreen else {
return
}
saveScreen.content = .progress(stringSaving, progress)
}, completed: { [weak saveScreen] in
guard let saveScreen else {
return
}
saveScreen.content = .completion(stringSaved)
Queue.mainQueue().after(3.0, { [weak saveScreen] in
saveScreen?.dismiss()
})
})
saveScreen.cancelled = {
disposable.dispose()
}
})))
}
c?.pushItems(items: .single(ContextController.Items(content: .list(items))))
} else {
c?.dismiss(result: .default, completion: nil)
switch self.fetchStatus {
case .Local:
let _ = (SaveToCameraRoll.saveToCameraRoll(context: strongSelf.context, postbox: strongSelf.context.account.postbox, userLocation: .peer(message.id.peerId), mediaReference: .message(message: MessageReference(message), media: file))
|> deliverOnMainQueue).start(completed: {
guard let strongSelf = self else {
let _ = (SaveToCameraRoll.saveToCameraRoll(context: self.context, postbox: self.context.account.postbox, userLocation: .peer(message.id.peerId), mediaReference: .message(message: MessageReference(message), media: file))
|> deliverOnMainQueue).start(completed: { [weak self] in
guard let self else {
return
}
guard let controller = strongSelf.galleryController() else {
guard let controller = self.galleryController() else {
return
}
controller.present(UndoOverlayController(presentationData: strongSelf.presentationData, content: .mediaSaved(text: strongSelf.presentationData.strings.Gallery_VideoSaved), elevatedLayout: false, animateInAsReplacement: false, action: { _ in return false }), in: .window(.root))
controller.present(UndoOverlayController(presentationData: self.presentationData, content: .mediaSaved(text: self.presentationData.strings.Gallery_VideoSaved), elevatedLayout: false, animateInAsReplacement: false, action: { _ in return false }), in: .window(.root))
})
default:
guard let controller = strongSelf.galleryController() else {
guard let controller = self.galleryController() else {
return
}
controller.present(textAlertController(context: strongSelf.context, title: nil, text: strongSelf.presentationData.strings.Gallery_WaitForVideoDownoad, actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {
controller.present(textAlertController(context: self.context, title: nil, text: self.presentationData.strings.Gallery_WaitForVideoDownoad, actions: [TextAlertAction(type: .defaultAction, title: self.presentationData.strings.Common_OK, action: {
})]), in: .window(.root))
}
}

View File

@ -63,13 +63,6 @@ public enum ChunkMediaPlayerPlayOnceWithSoundActionAtEnd {
case repeatIfNeeded
}
public enum ChunkMediaPlayerSeek {
case none
case start
case automatic
case timecode(Double)
}
public enum ChunkMediaPlayerStreaming {
case none
case conservative
@ -165,9 +158,11 @@ private final class ChunkMediaPlayerContext {
private var keepAudioSessionWhilePaused: Bool
private var continuePlayingWithoutSoundOnLostAudioSession: Bool
private let isAudioVideoMessage: Bool
private let onSeeked: () -> Void
private var seekId: Int = 0
private var initialSeekTimestamp: Double?
private var notifySeeked: Bool = false
private let loadedState: ChunkMediaPlayerLoadedState
private var isSeeking: Bool = false
@ -206,7 +201,8 @@ private final class ChunkMediaPlayerContext {
mixWithOthers: Bool,
keepAudioSessionWhilePaused: Bool,
continuePlayingWithoutSoundOnLostAudioSession: Bool,
isAudioVideoMessage: Bool
isAudioVideoMessage: Bool,
onSeeked: @escaping () -> Void
) {
assert(queue.isCurrent())
@ -225,6 +221,7 @@ private final class ChunkMediaPlayerContext {
self.keepAudioSessionWhilePaused = keepAudioSessionWhilePaused
self.continuePlayingWithoutSoundOnLostAudioSession = continuePlayingWithoutSoundOnLostAudioSession
self.isAudioVideoMessage = isAudioVideoMessage
self.onSeeked = onSeeked
self.videoRenderer = VideoPlayerProxy(queue: queue)
@ -261,7 +258,7 @@ private final class ChunkMediaPlayerContext {
self.pause(lostAudioSession: true, faded: false)
}
} else {
self.seek(timestamp: 0.0, action: .play)
self.seek(timestamp: 0.0, action: .play, notify: true)
}
}
}
@ -354,7 +351,7 @@ private final class ChunkMediaPlayerContext {
self.partsDisposable?.dispose()
}
fileprivate func seek(timestamp: Double) {
fileprivate func seek(timestamp: Double, notify: Bool) {
assert(self.queue.isCurrent())
let action: ChunkMediaPlayerPlaybackAction
@ -364,10 +361,10 @@ private final class ChunkMediaPlayerContext {
case .playing:
action = .play
}
self.seek(timestamp: timestamp, action: action)
self.seek(timestamp: timestamp, action: action, notify: notify)
}
fileprivate func seek(timestamp: Double, action: ChunkMediaPlayerPlaybackAction) {
fileprivate func seek(timestamp: Double, action: ChunkMediaPlayerPlaybackAction, notify: Bool) {
assert(self.queue.isCurrent())
self.isSeeking = true
@ -375,6 +372,7 @@ private final class ChunkMediaPlayerContext {
self.seekId += 1
self.initialSeekTimestamp = timestamp
self.notifySeeked = true
switch action {
case .play:
@ -421,11 +419,11 @@ private final class ChunkMediaPlayerContext {
timestamp = self.initialSeekTimestamp ?? 0.0
}
self.seek(timestamp: timestamp, action: .play)
self.seek(timestamp: timestamp, action: .play, notify: false)
}
}
fileprivate func playOnceWithSound(playAndRecord: Bool, seek: ChunkMediaPlayerSeek = .start) {
fileprivate func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek = .start) {
assert(self.queue.isCurrent())
if !self.enableSound {
@ -446,10 +444,10 @@ private final class ChunkMediaPlayerContext {
} else {
timestamp = 0.0
}
self.seek(timestamp: timestamp, action: .play)
self.seek(timestamp: timestamp, action: .play, notify: true)
} else {
if case let .timecode(time) = seek {
self.seek(timestamp: Double(time), action: .play)
self.seek(timestamp: Double(time), action: .play, notify: true)
} else if case .playing = self.state {
} else {
self.play()
@ -471,7 +469,7 @@ private final class ChunkMediaPlayerContext {
}
}
fileprivate func continuePlayingWithoutSound(seek: ChunkMediaPlayerSeek) {
fileprivate func continuePlayingWithoutSound(seek: MediaPlayerSeek) {
if self.enableSound {
self.lastStatusUpdateTimestamp = nil
@ -493,7 +491,7 @@ private final class ChunkMediaPlayerContext {
timestamp = 0.0
}
self.seek(timestamp: timestamp, action: .play)
self.seek(timestamp: timestamp, action: .play, notify: true)
}
}
}
@ -632,6 +630,8 @@ private final class ChunkMediaPlayerContext {
return
}
}
} else {
self.initialSeekTimestamp = nil
}
self.loadedState.partStates.removeAll(where: { partState in
@ -733,11 +733,7 @@ private final class ChunkMediaPlayerContext {
}
}
//TODO
var performActionAtEndNow = false
if !"".isEmpty {
performActionAtEndNow = true
}
var worstStatus: MediaTrackFrameBufferStatus?
for status in [videoStatus, audioStatus] {
@ -800,6 +796,11 @@ private final class ChunkMediaPlayerContext {
} else {
rate = 0.0
}
//print("finished timestamp: \(timestamp), finishedAt: \(finishedAt), duration: \(duration)")
if duration > 0.0 && timestamp >= finishedAt && finishedAt >= duration - 0.2 {
performActionAtEndNow = true
}
} else if case .buffering = worstStatus {
bufferingProgress = 0.0
rate = 0.0
@ -808,6 +809,10 @@ private final class ChunkMediaPlayerContext {
bufferingProgress = 0.0
}
if duration > 0.0 && timestamp >= duration {
performActionAtEndNow = true
}
var reportRate = rate
if let controlTimebase = self.loadedState.controlTimebase {
@ -829,12 +834,10 @@ private final class ChunkMediaPlayerContext {
}
}
//TODO
if let controlTimebase = self.loadedState.controlTimebase, let videoTrackFrameBuffer = self.loadedState.partStates.first?.mediaBuffers?.videoBuffer, videoTrackFrameBuffer.hasFrames {
self.videoRenderer.state = (controlTimebase.timebase, true, videoTrackFrameBuffer.rotationAngle, videoTrackFrameBuffer.aspect)
}
//TODO
if let audioRenderer = self.audioRenderer {
let queue = self.queue
audioRenderer.requestedFrames = true
@ -903,13 +906,18 @@ private final class ChunkMediaPlayerContext {
self.playerStatus.set(.single(status))
let _ = self.playerStatusValue.swap(status)
}
if self.notifySeeked {
self.notifySeeked = false
self.onSeeked()
}
if performActionAtEndNow {
/*if !self.stoppedAtEnd {
if !self.stoppedAtEnd {
switch self.actionAtEnd {
case let .loop(f):
self.stoppedAtEnd = false
self.seek(timestamp: 0.0, action: .play)
self.seek(timestamp: 0.0, action: .play, notify: true)
f?()
case .stop:
self.stoppedAtEnd = true
@ -921,10 +929,10 @@ private final class ChunkMediaPlayerContext {
case let .loopDisablingSound(f):
self.stoppedAtEnd = false
self.enableSound = false
self.seek(timestamp: 0.0, action: .play)
self.seek(timestamp: 0.0, action: .play, notify: true)
f()
}
}*/
}
}
}
}
@ -969,7 +977,8 @@ public final class ChunkMediaPlayer {
mixWithOthers: Bool = false,
keepAudioSessionWhilePaused: Bool = false,
continuePlayingWithoutSoundOnLostAudioSession: Bool = false,
isAudioVideoMessage: Bool = false
isAudioVideoMessage: Bool = false,
onSeeked: (() -> Void)? = nil
) {
let audioLevelPipe = self.audioLevelPipe
self.queue.async {
@ -990,7 +999,10 @@ public final class ChunkMediaPlayer {
mixWithOthers: mixWithOthers,
keepAudioSessionWhilePaused: keepAudioSessionWhilePaused,
continuePlayingWithoutSoundOnLostAudioSession: continuePlayingWithoutSoundOnLostAudioSession,
isAudioVideoMessage: isAudioVideoMessage
isAudioVideoMessage: isAudioVideoMessage,
onSeeked: {
onSeeked?()
}
)
self.contextRef = Unmanaged.passRetained(context)
}
@ -1011,7 +1023,7 @@ public final class ChunkMediaPlayer {
}
}
public func playOnceWithSound(playAndRecord: Bool, seek: ChunkMediaPlayerSeek = .start) {
public func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek = .start) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.playOnceWithSound(playAndRecord: playAndRecord, seek: seek)
@ -1035,7 +1047,7 @@ public final class ChunkMediaPlayer {
}
}
public func continuePlayingWithoutSound(seek: ChunkMediaPlayerSeek = .start) {
public func continuePlayingWithoutSound(seek: MediaPlayerSeek = .start) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.continuePlayingWithoutSound(seek: seek)
@ -1086,10 +1098,10 @@ public final class ChunkMediaPlayer {
public func seek(timestamp: Double, play: Bool? = nil) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
if let play = play {
context.seek(timestamp: timestamp, action: play ? .play : .pause)
if let play {
context.seek(timestamp: timestamp, action: play ? .play : .pause, notify: false)
} else {
context.seek(timestamp: timestamp)
context.seek(timestamp: timestamp, notify: false)
}
}
}

View File

@ -410,7 +410,7 @@ final class PeerAllowedReactionsScreenComponent: Component {
chatPeerId: nil,
selectedItems: Set(),
backgroundIconColor: nil,
hasSearch: false,
hasSearch: true,
forceHasPremium: true
)
self.emojiContentDisposable = (emojiContent

File diff suppressed because one or more lines are too long

View File

@ -83,6 +83,7 @@ export class SourceBufferStub extends EventTarget {
this.buffered._ranges = ranges;
this.mediaSource._reopen();
this.mediaSource.emitUpdatedBuffer();
this.updating = false;
this.dispatchEvent(new Event('update'));
@ -122,6 +123,7 @@ export class SourceBufferStub extends EventTarget {
this.buffered._ranges = ranges;
this.mediaSource._reopen();
this.mediaSource.emitUpdatedBuffer();
this.updating = false;
this.dispatchEvent(new Event('update'));
@ -162,6 +164,17 @@ export class MediaSourceStub extends EventTarget {
return true;
}
emitUpdatedBuffer() {
this.dispatchEvent(new Event("bufferChanged"));
}
getBufferedRanges() {
if (this.sourceBuffers._buffers.length != 0) {
return this.sourceBuffers._buffers[0].buffered._ranges;
}
return [];
}
addSourceBuffer(mimeType) {
if (this.readyState !== 'open') {
throw new DOMException('MediaSource is not open', 'InvalidStateError');
@ -170,6 +183,8 @@ export class MediaSourceStub extends EventTarget {
this.sourceBuffers._add(sourceBuffer);
this.activeSourceBuffers._add(sourceBuffer);
this.dispatchEvent(new Event("bufferChanged"));
window.bridgeInvokeAsync(this.bridgeId, "MediaSource", "updateSourceBuffers", {
"ids": this.sourceBuffers._buffers.map((sb) => sb.bridgeId)
}).then((result) => {
@ -184,6 +199,8 @@ export class MediaSourceStub extends EventTarget {
}
this.activeSourceBuffers._remove(sourceBuffer);
this.dispatchEvent(new Event("bufferChanged"));
window.bridgeInvokeAsync(this.bridgeId, "MediaSource", "updateSourceBuffers", {
"ids": this.sourceBuffers._buffers.map((sb) => sb.bridgeId)
}).then((result) => {

View File

@ -14,7 +14,7 @@ export class VideoElementStub extends EventTarget {
this._currentTime = 0.0;
this.duration = NaN;
this.paused = true;
this.playbackRate = 1.0;
this._playbackRate = 1.0;
this.volume = 1.0;
this.muted = false;
this.readyState = 0;
@ -30,6 +30,7 @@ export class VideoElementStub extends EventTarget {
this.videoHeight = 0;
this.textTracks = new TextTrackListStub();
this.isWaiting = false;
this.currentMedia = null;
window.bridgeInvokeAsync(this.bridgeId, "VideoElement", "constructor", {
"instanceId": this.instanceId
@ -63,14 +64,36 @@ export class VideoElementStub extends EventTarget {
}
}
get playbackRate() {
return this._playbackRate;
}
set playbackRate(value) {
this._playbackRate = value;
window.bridgeInvokeAsync(this.bridgeId, "VideoElement", "setPlaybackRate", {
"instanceId": this.instanceId,
"playbackRate": value
}).then((result) => {
})
}
get src() {
return this._src;
}
set src(value) {
if (this.currentMedia) {
this.currentMedia.removeEventListener("bufferChanged", false);
}
this._src = value;
var media = window.mediaSourceMap[this._src];
this.currentMedia = media;
if (media) {
media.addEventListener("bufferChanged", () => {
this.updateBufferedFromMediaSource();
}, false);
window.bridgeInvokeAsync(this.bridgeId, "VideoElement", "setMediaSource", {
"instanceId": this.instanceId,
"mediaSourceId": media.bridgeId
@ -90,16 +113,13 @@ export class VideoElementStub extends EventTarget {
return fragment.querySelectorAll('*');
}
bridgeUpdateBuffered(value) {
const updatedRanges = value;
var ranges = [];
for (var i = 0; i < updatedRanges.length; i += 2) {
ranges.push({
start: updatedRanges[i],
end: updatedRanges[i + 1]
});
updateBufferedFromMediaSource() {
var currentMedia = this.currentMedia;
if (currentMedia) {
this.buffered._ranges = currentMedia.getBufferedRanges();
} else {
this.buffered._ranges = [];
}
this.buffered._ranges = ranges;
}
bridgeUpdateStatus(dict) {
@ -160,10 +180,6 @@ export class VideoElementStub extends EventTarget {
return 'probably';
}
_getMedia() {
return window.mediaSourceMap[this.src];
}
addTextTrack(kind, label, language) {
const textTrack = new TextTrackStub(kind, label, language);
this.textTracks._add(textTrack);
@ -172,4 +188,8 @@ export class VideoElementStub extends EventTarget {
load() {
}
notifySeeked() {
this.dispatchEvent(new Event('seeked'));
}
}

View File

@ -0,0 +1,150 @@
function base64ToArrayBuffer(base64) {
var binaryString = atob(base64);
var bytes = new Uint8Array(binaryString.length);
for (var i = 0; i < binaryString.length; i++) {
bytes[i] = binaryString.charCodeAt(i);
}
return bytes.buffer;
}
export class XMLHttpRequestStub extends EventTarget {
constructor() {
super();
this.bridgeId = window.nextInternalId;
window.nextInternalId += 1;
this.readyState = 0;
this.status = 0;
this.statusText = "";
this.responseText = "";
this.responseXML = null;
this._responseData = null;
this.onreadystatechange = null;
this._requestHeaders = {};
this._responseHeaders = {};
this._method = "";
this._url = "";
this._async = true;
this._user = null;
this._password = null;
this._responseType = "";
}
open(method, url, async = true, user = null, password = null) {
this._method = method;
this._url = url;
this._async = async;
this._user = user;
this._password = password;
this.readyState = 1; // Opened
this._triggerReadyStateChange();
}
setRequestHeader(header, value) {
this._requestHeaders[header] = value;
}
getResponseHeader(header) {
return this._responseHeaders[header.toLowerCase()] || null;
}
getAllResponseHeaders() {
return Object.entries(this._responseHeaders)
.map(([header, value]) => `${header}: ${value}`)
.join('\r\n');
}
send(body = null) {
this.readyState = 2;
this._triggerReadyStateChange();
this.readyState = 3; // Loading
this._triggerReadyStateChange();
this.dispatchEvent(new Event("loadstart"));
window.bridgeInvokeAsync(this.bridgeId, "XMLHttpRequest", "load", {
"id": this.bridgeId,
"url": this._url,
"requestHeaders": this._requestHeaders
}).then((result) => {
if (result["error"]) {
this.dispatchEvent(new Event("error"));
} else {
this.status = result["status"];
this.statusText = result["statusText"];
if (result["responseData"]) {
if (this._responseType === "arraybuffer") {
this._responseData = base64ToArrayBuffer(result["responseData"]);
} else {
this.responseText = atob(result["responseData"]);
}
this.responseXML = null;
} else {
this.response = null;
this.responseText = result["responseText"] || null;
this.responseXML = result["responseXML"] || null;
}
this._responseHeaders = result["responseHeaders"];
this.readyState = 4; // Done
this._triggerReadyStateChange();
this.dispatchEvent(new Event("load"));
}
this.dispatchEvent(new Event("loadend"));
});
}
abort() {
this.dispatchEvent(new Event("abort"));
window.bridgeInvokeAsync(this.bridgeId, "XMLHttpRequest", "abort", {
"id": this.bridgeId
});
this.readyState = 0;
this.status = 0;
this.statusText = '';
this.responseText = '';
this.responseXML = null;
this._responseHeaders = {};
this._triggerReadyStateChange();
}
overrideMimeType(mime) {
}
set responseType(type) {
this._responseType = type;
}
get responseType() {
return this._responseType;
}
get response() {
if (this._responseType === '' || this._responseType === 'text') {
return this.responseText;
}
return this._responseData;
}
_triggerReadyStateChange() {
this.dispatchEvent(new Event('readystatechange'));
if (typeof this.onreadystatechange === 'function') {
this.onreadystatechange();
}
}
// Additional methods to simulate responses
_setResponse(status, statusText, responseText, responseHeaders = {}) {
this.status = status;
this.statusText = statusText;
this.responseText = responseText;
this._responseHeaders = responseHeaders;
}
}

View File

@ -1,6 +1,7 @@
import Hls from "hls.js";
import { VideoElementStub } from "./VideoElementStub.js"
import { MediaSourceStub, SourceBufferStub } from "./MediaSourceStub.js"
import { XMLHttpRequestStub } from "./XMLHttpRequestStub.js"
window.bridgeObjectMap = {};
window.bridgeCallbackMap = {};
@ -48,6 +49,8 @@ if (typeof window !== 'undefined') {
window.MediaSource = MediaSourceStub;
window.ManagedMediaSource = MediaSourceStub;
window.SourceBuffer = SourceBufferStub;
window.XMLHttpRequest = XMLHttpRequestStub;
URL.createObjectURL = function(ms) {
const url = "blob:mock-media-source:" + ms.internalId;
window.mediaSourceMap[url] = ms;
@ -66,6 +69,7 @@ export class HlsPlayerInstance {
this.id = id;
this.isManifestParsed = false;
this.currentTimeUpdateTimeout = null;
this.notifySeekedOnNextStatusUpdate = false;
this.video = new VideoElementStub(this.id);
}
@ -186,6 +190,15 @@ export class HlsPlayerInstance {
this.currentTimeUpdateTimeout = null;
}
}
if (this.notifySeekedOnNextStatusUpdate) {
this.notifySeekedOnNextStatusUpdate = false;
this.video.notifySeeked();
}
}
playerNotifySeekedOnNextStatusUpdate() {
this.notifySeekedOnNextStatusUpdate = true;
}
refreshPlayerCurrentTime() {

View File

@ -17,6 +17,21 @@ import ManagedFile
import FFMpegBinding
import RangeSet
private func parseRange(from rangeString: String) -> Range<Int>? {
guard rangeString.hasPrefix("bytes=") else {
return nil
}
let rangeValues = rangeString.dropFirst("bytes=".count).split(separator: "-")
guard rangeValues.count == 2,
let start = Int(rangeValues[0]),
let end = Int(rangeValues[1]) else {
return nil
}
return start ..< end
}
final class HLSJSServerSource: SharedHLSServer.Source {
let id: String
let postbox: Postbox
@ -253,59 +268,6 @@ final class HLSJSServerSource: SharedHLSServer.Source {
}
}
final class HLSJSHTMLServerSource: SharedHLSServer.Source {
let id: String
init() {
self.id = UUID().uuidString
}
deinit {
}
func arbitraryFileData(path: String) -> Signal<(data: Data, contentType: String)?, NoError> {
return Signal { subscriber in
let bundle = Bundle(for: HLSJSServerSource.self)
let bundlePath = bundle.bundlePath + "/HlsBundle.bundle"
if let data = try? Data(contentsOf: URL(fileURLWithPath: bundlePath + "/" + path)) {
let mimeType: String
let pathExtension = (path as NSString).pathExtension
if pathExtension == "html" {
mimeType = "text/html"
} else if pathExtension == "html" {
mimeType = "application/javascript"
} else {
mimeType = "application/octet-stream"
}
subscriber.putNext((data, mimeType))
} else {
subscriber.putNext(nil)
}
subscriber.putCompletion()
return EmptyDisposable
}
}
func masterPlaylistData() -> Signal<String, NoError> {
return .never()
}
func playlistData(quality: Int) -> Signal<String, NoError> {
return .never()
}
func partData(index: Int, quality: Int) -> Signal<Data?, NoError> {
return .never()
}
func fileData(id: Int64, range: Range<Int>) -> Signal<(TempBoxFile, Range<Int>, Int)?, NoError> {
return .never()
}
}
private class WeakScriptMessageHandler: NSObject, WKScriptMessageHandler {
private let f: (WKScriptMessage) -> ()
@ -320,7 +282,7 @@ private class WeakScriptMessageHandler: NSObject, WKScriptMessageHandler {
}
}
private final class SharedHLSVideoWebView {
private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
private final class ContextReference {
weak var contentNode: HLSVideoJSNativeContentNode?
@ -329,11 +291,27 @@ private final class SharedHLSVideoWebView {
}
}
private enum ResponseError {
case badRequest
case notFound
case internalServerError
var httpStatus: (Int, String) {
switch self {
case .badRequest:
return (400, "Bad Request")
case .notFound:
return (404, "Not Found")
case .internalServerError:
return (500, "Internal Server Error")
}
}
}
static let shared: SharedHLSVideoWebView = SharedHLSVideoWebView()
private var contextReferences: [Int: ContextReference] = [:]
let htmlSource: HLSJSHTMLServerSource
let webView: WKWebView
var videoElements: [Int: VideoElement] = [:]
@ -343,11 +321,9 @@ private final class SharedHLSVideoWebView {
private var isWebViewReady: Bool = false
private var pendingInitializeInstanceIds: [(id: Int, urlPrefix: String)] = []
private var serverDisposable: Disposable?
private var tempTasks: [Int: URLSessionTask] = [:]
init() {
self.htmlSource = HLSJSHTMLServerSource()
override init() {
let config = WKWebViewConfiguration()
config.allowsInlineMediaPlayback = true
config.mediaTypesRequiringUserActionForPlayback = []
@ -381,6 +357,10 @@ private final class SharedHLSVideoWebView {
self.webView.isInspectable = isDebug
}
super.init()
self.webView.navigationDelegate = self
handleScriptMessage = { [weak self] message in
Queue.mainQueue().async {
guard let self else {
@ -474,21 +454,12 @@ private final class SharedHLSVideoWebView {
}
}
let htmlSourceId = self.htmlSource.id
self.serverDisposable = SharedHLSServer.shared.registerPlayer(source: self.htmlSource, completion: { [weak self] in
Queue.mainQueue().async {
guard let self else {
return
}
let htmlUrl = "http://127.0.0.1:\(SharedHLSServer.shared.port)/\(htmlSourceId)/index.html"
self.webView.load(URLRequest(url: URL(string: htmlUrl)!))
}
})
let bundle = Bundle(for: SharedHLSVideoWebView.self)
let bundlePath = bundle.bundlePath + "/HlsBundle.bundle"
self.webView.loadFileURL(URL(fileURLWithPath: bundlePath + "/index.html"), allowingReadAccessTo: URL(fileURLWithPath: bundlePath))
}
deinit {
self.serverDisposable?.dispose()
}
private func bridgeInvoke(
@ -534,6 +505,21 @@ private final class SharedHLSVideoWebView {
instance.onSetCurrentTime(timestamp: currentTime)
}
completion([:])
} else if (methodName == "setPlaybackRate") {
guard let instanceId = params["instanceId"] as? Int else {
assertionFailure()
return
}
guard let playbackRate = params["playbackRate"] as? Double else {
assertionFailure()
return
}
if let instance = self.contextReferences[instanceId]?.contentNode {
instance.onSetPlaybackRate(playbackRate: playbackRate)
}
completion([:])
} else if (methodName == "play") {
guard let instanceId = params["instanceId"] as? Int else {
@ -657,6 +643,215 @@ private final class SharedHLSVideoWebView {
sourceBuffer.abortOperation()
completion([:])
}
} else if className == "XMLHttpRequest" {
if methodName == "load" {
guard let id = params["id"] as? Int else {
assertionFailure()
return
}
guard let url = params["url"] as? String else {
assertionFailure()
return
}
guard let requestHeaders = params["requestHeaders"] as? [String: String] else {
assertionFailure()
return
}
guard let parsedUrl = URL(string: url) else {
assertionFailure()
return
}
guard let host = parsedUrl.host, host == "server" else {
completion(["error": 1])
return
}
var requestPath = parsedUrl.path
if requestPath.hasPrefix("/") {
requestPath = String(requestPath[requestPath.index(after: requestPath.startIndex) ..< requestPath.endIndex])
}
guard let firstSlash = requestPath.range(of: "/") else {
completion(["error": 1])
return
}
var requestRange: Range<Int>?
if let rangeString = requestHeaders["Range"] {
requestRange = parseRange(from: rangeString)
}
let streamId = String(requestPath[requestPath.startIndex ..< firstSlash.lowerBound])
var handlerFound = false
for (_, contextReference) in self.contextReferences {
if let context = contextReference.contentNode, let source = context.playerSource, source.id == streamId {
handlerFound = true
let filePath = String(requestPath[firstSlash.upperBound...])
if filePath == "master.m3u8" {
let _ = (source.masterPlaylistData()
|> deliverOn(.mainQueue())
|> take(1)).start(next: { [weak self] result in
guard let self else {
return
}
self.sendResponseAndClose(id: id, data: result.data(using: .utf8)!, completion: completion)
})
} else if filePath.hasPrefix("hls_level_") && filePath.hasSuffix(".m3u8") {
guard let levelIndex = Int(String(filePath[filePath.index(filePath.startIndex, offsetBy: "hls_level_".count) ..< filePath.index(filePath.endIndex, offsetBy: -".m3u8".count)])) else {
self.sendErrorAndClose(id: id, error: .notFound, completion: completion)
return
}
let _ = (source.playlistData(quality: levelIndex)
|> deliverOn(.mainQueue())
|> take(1)).start(next: { [weak self] result in
guard let self else {
return
}
self.sendResponseAndClose(id: id, data: result.data(using: .utf8)!, completion: completion)
})
} else if filePath.hasPrefix("partfile") && filePath.hasSuffix(".mp4") {
let fileId = String(filePath[filePath.index(filePath.startIndex, offsetBy: "partfile".count) ..< filePath.index(filePath.endIndex, offsetBy: -".mp4".count)])
guard let fileIdValue = Int64(fileId) else {
self.sendErrorAndClose(id: id, error: .notFound, completion: completion)
return
}
guard let requestRange else {
self.sendErrorAndClose(id: id, error: .badRequest, completion: completion)
return
}
let _ = (source.fileData(id: fileIdValue, range: requestRange.lowerBound ..< requestRange.upperBound + 1)
|> deliverOn(.mainQueue())
//|> timeout(5.0, queue: self.queue, alternate: .single(nil))
|> take(1)).start(next: { [weak self] result in
guard let self else {
return
}
if let (tempFile, tempFileRange, totalSize) = result {
self.sendResponseFileAndClose(id: id, file: tempFile, fileRange: tempFileRange, range: requestRange, totalSize: totalSize, completion: completion)
} else {
self.sendErrorAndClose(id: id, error: .internalServerError, completion: completion)
}
})
}
break
}
}
if (!handlerFound) {
completion(["error": 1])
}
/*var request = URLRequest(url: URL(string: url)!)
for (key, value) in requestHeaders {
request.setValue(value, forHTTPHeaderField: key)
}
let isCompleted = Atomic<Bool>(value: false)
let task = URLSession.shared.dataTask(with: request, completionHandler: { [weak self] data, response, error in
Queue.mainQueue().async {
guard let self else {
return
}
if isCompleted.swap(true) {
return
}
self.tempTasks.removeValue(forKey: id)
if let _ = error {
completion([
"error": 1
])
} else {
if let response = response as? HTTPURLResponse {
completion([
"status": response.statusCode,
"statusText": "OK",
"responseData": data?.base64EncodedString() ?? "",
"responseHeaders": response.allHeaderFields as? [String: String] ?? [:]
])
let _ = response
/*if let response = response as? HTTPURLResponse, let requestUrl {
if let updatedResponse = HTTPURLResponse(
url: requestUrl,
statusCode: response.statusCode,
httpVersion: "HTTP/1.1",
headerFields: response.allHeaderFields as? [String: String] ?? [:]
) {
sourceTask.didReceive(updatedResponse)
} else {
sourceTask.didReceive(response)
}
} else {
sourceTask.didReceive(response)
}*/
}
}
}
})
self.tempTasks[id] = task
task.resume()*/
} else if methodName == "abort" {
guard let id = params["id"] as? Int else {
assertionFailure()
return
}
if let task = self.tempTasks.removeValue(forKey: id) {
task.cancel()
}
completion([:])
}
}
}
private func sendErrorAndClose(id: Int, error: ResponseError, completion: @escaping ([String: Any]) -> Void) {
let (code, status) = error.httpStatus
completion([
"status": code,
"statusText": status,
"responseData": "",
"responseHeaders": [
"Content-Type": "text/html"
] as [String: String]
])
}
private func sendResponseAndClose(id: Int, data: Data, contentType: String = "application/octet-stream", completion: @escaping ([String: Any]) -> Void) {
completion([
"status": 200,
"statusText": "OK",
"responseData": data.base64EncodedString(),
"responseHeaders": [
"Content-Type": contentType,
"Content-Length": "\(data.count)"
] as [String: String]
])
}
private func sendResponseFileAndClose(id: Int, file: TempBoxFile, fileRange: Range<Int>, range: Range<Int>, totalSize: Int, completion: @escaping ([String: Any]) -> Void) {
if let data = try? Data(contentsOf: URL(fileURLWithPath: file.path), options: .mappedIfSafe).subdata(in: fileRange) {
completion([
"status": 200,
"statusText": "OK",
"responseData": data.base64EncodedString(),
"responseHeaders": [
"Content-Type": "application/octet-stream",
"Content-Range": "bytes \(range.lowerBound)-\(range.upperBound)/\(totalSize)",
"Content-Length": "\(fileRange.upperBound - fileRange.lowerBound)"
] as [String: String]
])
} else {
self.sendErrorAndClose(id: id, error: .internalServerError, completion: completion)
}
}
@ -754,7 +949,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
private let audioSessionDisposable = MetaDisposable()
private var hasAudioSession = false
private let playerSource: HLSJSServerSource?
fileprivate let playerSource: HLSJSServerSource?
private var serverDisposable: Disposable?
private let playbackCompletedListeners = Bag<() -> Void>()
@ -816,7 +1011,6 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
private var hasRequestedPlayerLoad: Bool = false
private var requestedPlaying: Bool = false
private var requestedBaseRate: Double = 1.0
private var requestedLevelIndex: Int?
@ -870,13 +1064,17 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
intrinsicDimensions.height = floor(intrinsicDimensions.height / UIScreenScale)
self.intrinsicDimensions = intrinsicDimensions
var onSeeked: (() -> Void)?
self.player = ChunkMediaPlayer(
postbox: postbox,
audioSessionManager: audioSessionManager,
partsState: self.chunkPlayerPartsState.get(),
video: true,
enableSound: true,
baseRate: baseRate
baseRate: baseRate,
onSeeked: {
onSeeked?()
}
)
self.playerNode = MediaPlayerNode()
@ -912,19 +1110,6 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self._bufferingStatus.set(.single(nil))
if let playerSource = self.playerSource {
let playerSourceId = playerSource.id
self.serverDisposable = SharedHLSServer.shared.registerPlayer(source: playerSource, completion: { [weak self] in
Queue.mainQueue().async {
guard let self else {
return
}
SharedHLSVideoWebView.shared.initializeWhenReady(context: self, urlPrefix: "/\(playerSourceId)/")
}
})
}
self.didBecomeActiveObserver = NotificationCenter.default.addObserver(forName: UIApplication.willEnterForegroundNotification, object: nil, queue: nil, using: { [weak self] _ in
let _ = self
})
@ -946,6 +1131,19 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
}
self.updateStatus()
})
onSeeked = { [weak self] in
Queue.mainQueue().async {
guard let self else {
return
}
SharedHLSVideoWebView.shared.webView.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerNotifySeekedOnNextStatusUpdate();", completionHandler: nil)
}
}
if let playerSource {
SharedHLSVideoWebView.shared.initializeWhenReady(context: self, urlPrefix: "http://server/\(playerSource.id)/")
}
}
deinit {
@ -1049,12 +1247,6 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
}
SharedHLSVideoWebView.shared.webView.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerSetBaseRate(\(self.requestedBaseRate));", completionHandler: nil)
if self.requestedPlaying {
self.requestPlay()
} else {
self.requestPause()
}
}
self.updateStatus()
@ -1070,6 +1262,10 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self.player.seek(timestamp: timestamp)
}
fileprivate func onSetPlaybackRate(playbackRate: Double) {
self.player.setBaseRate(playbackRate)
}
fileprivate func onPlay() {
self.player.play()
}
@ -1161,13 +1357,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
let bufferedRanges = sourceBuffer.ranges
if let (bridgeId, videoElement) = SharedHLSVideoWebView.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) {
let result = serializeRanges(bufferedRanges)
let jsonResult = try! JSONSerialization.data(withJSONObject: result)
let jsonResultString = String(data: jsonResult, encoding: .utf8)!
SharedHLSVideoWebView.shared.webView.evaluateJavaScript("window.bridgeObjectMap[\(bridgeId)].bridgeUpdateBuffered(\(jsonResultString));", completionHandler: nil)
if let (_, videoElement) = SharedHLSVideoWebView.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) {
if let mediaSourceId = videoElement.mediaSourceId, let mediaSource = SharedHLSVideoWebView.shared.mediaSources[mediaSourceId] {
if let duration = mediaSource.duration {
var mappedRanges = RangeSet<Int64>()
@ -1208,82 +1398,26 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
if !self.initializedStatus {
self._status.set(MediaPlayerStatus(generationTimestamp: 0.0, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: 0.0, baseRate: self.requestedBaseRate, seekId: self.seekId, status: .buffering(initial: true, whilePlaying: true, progress: 0.0, display: true), soundEnabled: true))
}
/*if !self.hasAudioSession {
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in
Queue.mainQueue().async {
guard let self else {
return
}
self.hasAudioSession = true
self.requestPlay()
}
}, deactivate: { [weak self] _ in
return Signal { subscriber in
if let self {
self.hasAudioSession = false
self.requestPause()
}
subscriber.putCompletion()
return EmptyDisposable
}
|> runOn(.mainQueue())
}))
} else*/ do {
self.requestPlay()
}
}
private func requestPlay() {
self.requestedPlaying = true
if self.playerIsReady {
SharedHLSVideoWebView.shared.webView.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerPlay();", completionHandler: nil)
}
self.updateStatus()
}
private func requestPause() {
self.requestedPlaying = false
if self.playerIsReady {
SharedHLSVideoWebView.shared.webView.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerPause();", completionHandler: nil)
}
self.updateStatus()
self.player.play()
}
func pause() {
assert(Queue.mainQueue().isCurrent())
self.requestPause()
self.player.pause()
}
func togglePlayPause() {
assert(Queue.mainQueue().isCurrent())
if self.requestedPlaying {
self.pause()
} else {
self.play()
}
self.player.togglePlayPause()
}
func setSoundEnabled(_ value: Bool) {
assert(Queue.mainQueue().isCurrent())
/*if value {
if !self.hasAudioSession {
self.audioSessionDisposable.set(self.audioSessionManager.push(audioSessionType: .play(mixWithOthers: false), activate: { [weak self] _ in
self?.hasAudioSession = true
self?.player?.volume = 1.0
}, deactivate: { [weak self] _ in
self?.hasAudioSession = false
self?.player?.pause()
return .complete()
}))
}
if value {
self.player.playOnceWithSound(playAndRecord: false, seek: .none)
} else {
self.player?.volume = 0.0
self.hasAudioSession = false
self.audioSessionDisposable.set(nil)
}*/
self.player.continuePlayingWithoutSound(seek: .none)
}
}
func seek(_ timestamp: Double) {
@ -1294,28 +1428,75 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
}
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
SharedHLSVideoWebView.shared.webView.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerSetIsMuted(false);", completionHandler: nil)
assert(Queue.mainQueue().isCurrent())
let action = { [weak self] in
Queue.mainQueue().async {
self?.performActionAtEnd()
}
}
switch actionAtEnd {
case .loop:
self.player.actionAtEnd = .loop({})
case .loopDisablingSound:
self.player.actionAtEnd = .loopDisablingSound(action)
case .stop:
self.player.actionAtEnd = .action(action)
case .repeatIfNeeded:
let _ = (self.player.status
|> deliverOnMainQueue
|> take(1)).start(next: { [weak self] status in
guard let strongSelf = self else {
return
}
if status.timestamp > status.duration * 0.1 {
strongSelf.player.actionAtEnd = .loop({ [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.player.actionAtEnd = .loopDisablingSound(action)
})
} else {
strongSelf.player.actionAtEnd = .loopDisablingSound(action)
}
})
}
self.play()
self.player.playOnceWithSound(playAndRecord: playAndRecord, seek: seek)
}
func setSoundMuted(soundMuted: Bool) {
SharedHLSVideoWebView.shared.webView.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerSetIsMuted(\(soundMuted));", completionHandler: nil)
self.player.setSoundMuted(soundMuted: soundMuted)
}
func continueWithOverridingAmbientMode(isAmbient: Bool) {
self.player.continueWithOverridingAmbientMode(isAmbient: isAmbient)
}
func setForceAudioToSpeaker(_ forceAudioToSpeaker: Bool) {
assert(Queue.mainQueue().isCurrent())
self.player.setForceAudioToSpeaker(forceAudioToSpeaker)
}
func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
SharedHLSVideoWebView.shared.webView.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerSetIsMuted(true);", completionHandler: nil)
self.hasAudioSession = false
self.audioSessionDisposable.set(nil)
assert(Queue.mainQueue().isCurrent())
let action = { [weak self] in
Queue.mainQueue().async {
self?.performActionAtEnd()
}
}
switch actionAtEnd {
case .loop:
self.player.actionAtEnd = .loop({})
case .loopDisablingSound, .repeatIfNeeded:
self.player.actionAtEnd = .loopDisablingSound(action)
case .stop:
self.player.actionAtEnd = .action(action)
}
self.player.continuePlayingWithoutSound()
}
func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool) {
self.player.setContinuePlayingWithoutSoundOnLostAudioSession(value)
}
func setBaseRate(_ baseRate: Double) {