Video improvements

This commit is contained in:
Isaac 2024-11-08 16:26:01 +01:00
parent bae29f301e
commit 3797f3af4f
31 changed files with 773 additions and 314 deletions

View File

@ -259,7 +259,7 @@ public func galleryItemForEntry(
}
if isHLS {
content = HLSVideoContent(id: .message(message.id, message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), streamVideo: streamVideos, loopVideo: loopVideos)
content = HLSVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), streamVideo: streamVideos, loopVideo: loopVideos)
} else {
content = NativeVideoContent(id: .message(message.stableId, file.fileId), userLocation: .peer(message.id.peerId), fileReference: .message(message: MessageReference(message), media: file), imageReference: mediaImage.flatMap({ ImageMediaReference.message(message: MessageReference(message), media: $0) }), streamVideo: .conservative, loopVideo: loopVideos, tempFilePath: tempFilePath, captureProtected: captureProtected, storeAfterDownload: generateStoreAfterDownload?(message, file))
}
@ -1364,7 +1364,7 @@ public class GalleryController: ViewController, StandalonePresentableController,
})
let disableTapNavigation = !(self.context.sharedContext.currentMediaDisplaySettings.with { $0 }.showNextMediaOnTap)
self.displayNode = GalleryControllerNode(controllerInteraction: controllerInteraction, disableTapNavigation: disableTapNavigation)
self.displayNode = GalleryControllerNode(context: self.context, controllerInteraction: controllerInteraction, disableTapNavigation: disableTapNavigation)
self.displayNodeDidLoad()
self.galleryNode.statusBar = self.statusBar

View File

@ -5,8 +5,11 @@ import Display
import Postbox
import SwipeToDismissGesture
import AccountContext
import UndoUI
open class GalleryControllerNode: ASDisplayNode, ASScrollViewDelegate, ASGestureRecognizerDelegate {
private let context: AccountContext
public var statusBar: StatusBar?
public var navigationBar: NavigationBar? {
didSet {
@ -48,7 +51,8 @@ open class GalleryControllerNode: ASDisplayNode, ASScrollViewDelegate, ASGesture
}
}
public init(controllerInteraction: GalleryControllerInteraction, pageGap: CGFloat = 20.0, disableTapNavigation: Bool = false) {
public init(context: AccountContext, controllerInteraction: GalleryControllerInteraction, pageGap: CGFloat = 20.0, disableTapNavigation: Bool = false) {
self.context = context
self.backgroundNode = ASDisplayNode()
self.backgroundNode.backgroundColor = UIColor.black
self.scrollView = UIScrollView()
@ -471,6 +475,16 @@ open class GalleryControllerNode: ASDisplayNode, ASScrollViewDelegate, ASGesture
let minimalDismissDistance = scrollView.contentSize.height / 12.0
if abs(velocity.y) > 1.0 || abs(distanceFromEquilibrium) > minimalDismissDistance {
if distanceFromEquilibrium > 1.0, let centralItemNode = self.pager.centralItemNode(), centralItemNode.maybePerformActionForSwipeDismiss() {
if let chatController = self.baseNavigationController()?.topViewController as? ChatController {
let presentationData = self.context.sharedContext.currentPresentationData.with({ $0 })
//TODO:localize
chatController.present(UndoOverlayController(
presentationData: presentationData,
content: .hidArchive(title: "Video Minimized", text: "Swipe down on a video to close it.", undo: false),
elevatedLayout: false, action: { _ in true }
), in: .current)
}
return
}

View File

@ -254,7 +254,7 @@ public final class SecretMediaPreviewController: ViewController {
}, controller: { [weak self] in
return self
})
self.displayNode = SecretMediaPreviewControllerNode(controllerInteraction: controllerInteraction)
self.displayNode = SecretMediaPreviewControllerNode(context: self.context, controllerInteraction: controllerInteraction)
self.displayNodeDidLoad()
self.controllerNode.statusPressed = { [weak self] _ in

View File

@ -377,7 +377,7 @@ public class InstantPageGalleryController: ViewController, StandalonePresentable
}, controller: { [weak self] in
return self
})
self.displayNode = GalleryControllerNode(controllerInteraction: controllerInteraction)
self.displayNode = GalleryControllerNode(context: self.context,controllerInteraction: controllerInteraction)
self.displayNodeDidLoad()
self.galleryNode.statusBar = self.statusBar

View File

@ -230,45 +230,6 @@ private final class ChunkMediaPlayerContext {
self.loadedState = ChunkMediaPlayerLoadedState()
let queue = self.queue
let audioRendererContext = MediaPlayerAudioRenderer(
audioSession: .manager(self.audioSessionManager),
forAudioVideoMessage: self.isAudioVideoMessage,
playAndRecord: self.playAndRecord,
soundMuted: self.soundMuted,
ambient: self.ambient,
mixWithOthers: self.mixWithOthers,
forceAudioToSpeaker: self.forceAudioToSpeaker,
baseRate: self.baseRate,
audioLevelPipe: self.audioLevelPipe,
updatedRate: { [weak self] in
queue.async {
guard let self else {
return
}
self.tick()
}
},
audioPaused: { [weak self] in
queue.async {
guard let self else {
return
}
if self.enableSound {
if self.continuePlayingWithoutSoundOnLostAudioSession {
self.continuePlayingWithoutSound(seek: .start)
} else {
self.pause(lostAudioSession: true, faded: false)
}
} else {
self.seek(timestamp: 0.0, action: .play, notify: true)
}
}
}
)
self.audioRenderer = MediaPlayerAudioRendererContext(renderer: audioRendererContext)
self.loadedState.controlTimebase = ChunkMediaPlayerControlTimebase(timebase: audioRendererContext.audioTimebase, isAudio: true)
self.videoRenderer.visibilityUpdated = { [weak self] value in
assert(queue.isCurrent())
@ -328,9 +289,6 @@ private final class ChunkMediaPlayerContext {
return .noFrames
})
audioRendererContext.start()
self.tick()
let tickTimer = SwiftSignalKit.Timer(timeout: 1.0 / 25.0, repeat: true, completion: { [weak self] in
self?.tick()
}, queue: self.queue)
@ -344,6 +302,8 @@ private final class ChunkMediaPlayerContext {
self.partsState = partsState
self.tick()
})
self.tick()
}
deinit {
@ -457,6 +417,7 @@ private final class ChunkMediaPlayerContext {
} else {
timestamp = 0.0
}
let _ = timestamp
self.seek(timestamp: timestamp, action: .play, notify: true)
} else {
if case let .timecode(time) = seek {
@ -598,12 +559,31 @@ private final class ChunkMediaPlayerContext {
}
timestamp = max(0.0, timestamp)
if let firstPart = self.loadedState.partStates.first, let mediaBuffers = firstPart.mediaBuffers, mediaBuffers.videoBuffer != nil, mediaBuffers.audioBuffer == nil {
// No audio
var disableAudio = false
if !self.enableSound {
disableAudio = true
}
var hasAudio = false
if let firstPart = self.loadedState.partStates.first, let mediaBuffers = firstPart.mediaBuffers, mediaBuffers.videoBuffer != nil {
if mediaBuffers.audioBuffer != nil {
hasAudio = true
} else {
disableAudio = true
}
}
if disableAudio {
var resetTimebase = false
if self.audioRenderer != nil {
self.audioRenderer?.renderer.stop()
self.audioRenderer = nil
resetTimebase = true
}
if self.loadedState.controlTimebase == nil {
resetTimebase = true
}
if resetTimebase {
var timebase: CMTimebase?
CMTimebaseCreateWithSourceClock(allocator: nil, sourceClock: CMClockGetHostTimeClock(), timebaseOut: &timebase)
let controlTimebase = ChunkMediaPlayerControlTimebase(timebase: timebase!, isAudio: false)
@ -611,6 +591,50 @@ private final class ChunkMediaPlayerContext {
self.loadedState.controlTimebase = controlTimebase
}
} else if hasAudio {
if self.audioRenderer == nil {
let queue = self.queue
let audioRendererContext = MediaPlayerAudioRenderer(
audioSession: .manager(self.audioSessionManager),
forAudioVideoMessage: self.isAudioVideoMessage,
playAndRecord: self.playAndRecord,
soundMuted: self.soundMuted,
ambient: self.ambient,
mixWithOthers: self.mixWithOthers,
forceAudioToSpeaker: self.forceAudioToSpeaker,
baseRate: self.baseRate,
audioLevelPipe: self.audioLevelPipe,
updatedRate: { [weak self] in
queue.async {
guard let self else {
return
}
self.tick()
}
},
audioPaused: { [weak self] in
queue.async {
guard let self else {
return
}
if self.enableSound {
if self.continuePlayingWithoutSoundOnLostAudioSession {
self.continuePlayingWithoutSound(seek: .start)
} else {
self.pause(lostAudioSession: true, faded: false)
}
} else {
self.seek(timestamp: 0.0, action: .play, notify: true)
}
}
}
)
self.audioRenderer = MediaPlayerAudioRendererContext(renderer: audioRendererContext)
self.loadedState.controlTimebase = ChunkMediaPlayerControlTimebase(timebase: audioRendererContext.audioTimebase, isAudio: true)
audioRendererContext.flushBuffers(at: CMTimeMakeWithSeconds(timestamp, preferredTimescale: 44000), completion: {})
audioRendererContext.start()
}
}
//print("Timestamp: \(timestamp)")

View File

@ -187,7 +187,7 @@ class SecureIdDocumentGalleryController: ViewController, StandalonePresentableCo
}, controller: { [weak self] in
return self
})
self.displayNode = GalleryControllerNode(controllerInteraction: controllerInteraction)
self.displayNode = GalleryControllerNode(context: self.context, controllerInteraction: controllerInteraction)
self.displayNodeDidLoad()
self.galleryNode.statusBar = self.statusBar

View File

@ -633,7 +633,7 @@ public class AvatarGalleryController: ViewController, StandalonePresentableContr
}, controller: { [weak self] in
return self
})
self.displayNode = GalleryControllerNode(controllerInteraction: controllerInteraction)
self.displayNode = GalleryControllerNode(context: self.context, controllerInteraction: controllerInteraction)
self.displayNodeDidLoad()
self.galleryNode.pager.updateOnReplacement = true

View File

@ -472,8 +472,8 @@ open class TelegramBaseController: ViewController, KeyShortcutResponder {
let controller = UndoOverlayController(
presentationData: presentationData,
content: .universal(
animation: "anim_profileunmute",
scale: 0.075,
animation: "anim_set_notification",
scale: 0.06,
colors: [
"Middle.Group 1.Fill 1": UIColor.white,
"Top.Group 1.Fill 1": UIColor.white,

View File

@ -796,6 +796,8 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
return { [weak self] context, presentationData, dateTimeFormat, message, associatedData, attributes, media, mediaIndex, dateAndStatus, automaticDownload, peerType, peerId, sizeCalculation, layoutConstants, contentMode, presentationContext in
let _ = peerType
let useInlineHLS = "".isEmpty
var nativeSize: CGSize
let isSecretMedia = message.containsSecretMedia
@ -1270,7 +1272,9 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
var passFile = true
if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file)) {
file = minimizedQualityFile.file.media
if !useInlineHLS {
file = minimizedQualityFile.file.media
}
if hlsInlinePlaybackRange == nil {
passFile = false
}
@ -1395,7 +1399,9 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
var passFile = true
if NativeVideoContent.isHLSVideo(file: file), let minimizedQualityFile = HLSVideoContent.minimizedHLSQuality(file: .message(message: MessageReference(message), media: file)) {
file = minimizedQualityFile.file.media
if !useInlineHLS {
file = minimizedQualityFile.file.media
}
if hlsInlinePlaybackRange == nil {
passFile = false
}
@ -1777,27 +1783,38 @@ public final class ChatMessageInteractiveMediaNode: ASDisplayNode, GalleryItemTr
let loopVideo = updatedVideoFile.isAnimated
let videoContent: UniversalVideoContent
videoContent = NativeVideoContent(
id: .message(message.stableId, updatedVideoFile.fileId),
userLocation: .peer(message.id.peerId),
fileReference: .message(message: MessageReference(message), media: updatedVideoFile),
limitedFileRange: hlsInlinePlaybackRange,
streamVideo: streamVideo ? .conservative : .none,
loopVideo: loopVideo,
enableSound: false,
fetchAutomatically: false,
onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false),
autoFetchFullSizeThumbnail: true,
continuePlayingWithoutSoundOnLostAudioSession: isInlinePlayableVideo,
placeholderColor: emptyColor,
captureProtected: message.isCopyProtected() || isExtendedMedia,
storeAfterDownload: { [weak context] in
guard let context, let peerId else {
return
if useInlineHLS && NativeVideoContent.isHLSVideo(file: updatedVideoFile) {
videoContent = HLSVideoContent(
id: .message(message.stableId, updatedVideoFile.fileId),
userLocation: .peer(message.id.peerId),
fileReference: .message(message: MessageReference(message), media: updatedVideoFile),
loopVideo: loopVideo,
enableSound: false,
fetchAutomatically: false
)
} else {
videoContent = NativeVideoContent(
id: .message(message.stableId, updatedVideoFile.fileId),
userLocation: .peer(message.id.peerId),
fileReference: .message(message: MessageReference(message), media: updatedVideoFile),
limitedFileRange: hlsInlinePlaybackRange,
streamVideo: streamVideo ? .conservative : .none,
loopVideo: loopVideo,
enableSound: false,
fetchAutomatically: false,
onlyFullSizeThumbnail: (onlyFullSizeVideoThumbnail ?? false),
autoFetchFullSizeThumbnail: true,
continuePlayingWithoutSoundOnLostAudioSession: isInlinePlayableVideo,
placeholderColor: emptyColor,
captureProtected: message.isCopyProtected() || isExtendedMedia,
storeAfterDownload: { [weak context] in
guard let context, let peerId else {
return
}
let _ = storeDownloadedMedia(storeManager: context.downloadedMediaStoreManager, media: .message(message: MessageReference(message), media: updatedVideoFile), peerId: peerId).startStandalone()
}
let _ = storeDownloadedMedia(storeManager: context.downloadedMediaStoreManager, media: .message(message: MessageReference(message), media: updatedVideoFile), peerId: peerId).startStandalone()
}
)
)
}
let videoNode = UniversalVideoNode(accountId: context.account.id, postbox: context.account.postbox, audioSession: mediaManager.audioSession, manager: mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded)
videoNode.isUserInteractionEnabled = false
videoNode.ownsContentNodeUpdated = { [weak self] owns in

View File

@ -8159,7 +8159,7 @@ final class PeerInfoScreenNode: ViewControllerTracingNode, PeerInfoScreenNodePro
canCreateStream = true
}
case let channel as TelegramChannel:
if channel.flags.contains(.isCreator) {
if channel.hasPermission(.manageCalls) {
canCreateStream = true
credentialsPromise = Promise()
credentialsPromise?.set(context.engine.calls.getGroupCallStreamCredentials(peerId: peerId, revokePreviousCredentials: false) |> `catch` { _ -> Signal<GroupCallStreamCredentials, NoError> in return .never() })

View File

@ -446,7 +446,7 @@ public class WallpaperGalleryController: ViewController {
}, controller: { [weak self] in
return self
})
self.displayNode = WallpaperGalleryControllerNode(controllerInteraction: controllerInteraction, pageGap: 0.0, disableTapNavigation: true)
self.displayNode = WallpaperGalleryControllerNode(context: self.context, controllerInteraction: controllerInteraction, pageGap: 0.0, disableTapNavigation: true)
self.displayNodeDidLoad()
(self.displayNode as? WallpaperGalleryControllerNode)?.nativeStatusBar = self.statusBar

View File

@ -42,7 +42,6 @@ import MediaEditor
import TelegramUIDeclareEncodables
import ContextMenuScreen
import MetalEngine
import TranslateUI
#if canImport(AppCenter)
import AppCenter
@ -363,13 +362,6 @@ private func extractAccountManagerState(records: AccountRecordsView<TelegramAcco
UIDevice.current.isBatteryMonitoringEnabled = true
}
#if DEBUG
if #available(iOS 18.0, *) {
let translationService = ExperimentalInternalTranslationServiceImpl(view: hostView.containerView)
engineExperimentalInternalTranslationService = translationService
}
#endif
let clearNotificationsManager = ClearNotificationsManager(getNotificationIds: { completion in
if #available(iOS 10.0, *) {
UNUserNotificationCenter.current().getDeliveredNotifications(completionHandler: { notifications in

View File

@ -126,6 +126,19 @@ import AdsInfoScreen
extension ChatControllerImpl {
func loadDisplayNodeImpl() {
if #available(iOS 18.0, *) {
if self.context.sharedContext.immediateExperimentalUISettings.enableLocalTranslation {
if engineExperimentalInternalTranslationService == nil, let hostView = self.context.sharedContext.mainWindow?.hostView {
let translationService = ExperimentalInternalTranslationServiceImpl(view: hostView.containerView)
engineExperimentalInternalTranslationService = translationService
}
} else {
if engineExperimentalInternalTranslationService != nil {
engineExperimentalInternalTranslationService = nil
}
}
}
self.displayNode = ChatControllerNode(context: self.context, chatLocation: self.chatLocation, chatLocationContextHolder: self.chatLocationContextHolder, subject: self.subject, controllerInteraction: self.controllerInteraction!, chatPresentationInterfaceState: self.presentationInterfaceState, automaticMediaDownloadSettings: self.automaticMediaDownloadSettings, navigationBar: self.navigationBar, statusBar: self.statusBar, backgroundNode: self.chatBackgroundNode, controller: self)
if let currentItem = self.tempVoicePlaylistCurrentItem {

View File

@ -216,7 +216,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
self.allowWebViewInspection = try container.decodeIfPresent(Bool.self, forKey: "allowWebViewInspection") ?? false
self.disableReloginTokens = try container.decodeIfPresent(Bool.self, forKey: "disableReloginTokens") ?? false
self.liveStreamV2 = try container.decodeIfPresent(Bool.self, forKey: "liveStreamV2") ?? false
self.dynamicStreaming = try container.decodeIfPresent(Bool.self, forKey: "dynamicStreaming") ?? false
self.dynamicStreaming = try container.decodeIfPresent(Bool.self, forKey: "dynamicStreaming_v2") ?? false
self.enableLocalTranslation = try container.decodeIfPresent(Bool.self, forKey: "enableLocalTranslation") ?? false
}

View File

@ -0,0 +1,38 @@
class ConsolePolyfill {
constructor() {
}
log(...messageArgs) {
var string = "";
for (const arg of messageArgs) {
string += arg;
}
_JsCorePolyfills.consoleLog(string);
}
error(...messageArgs) {
var string = "";
for (const arg of messageArgs) {
string += arg;
}
_JsCorePolyfills.consoleLog(string);
}
}
class PerformancePolyfill {
constructor() {
}
now() {
return _JsCorePolyfills.performanceNow();
}
}
console = new ConsolePolyfill();
performance = new PerformancePolyfill();
self = {
console: console,
performance: performance
};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
/*! https://mths.be/base64 v1.0.0 by @mathias | MIT license */

View File

@ -1,6 +1,7 @@
#!/bin/sh
rm -rf ../HlsBundle
mkdir ../HlsBundle
mkdir -p ../HlsBundle
rm -rf ../HlsBundle/index
mkdir ../HlsBundle/index
npm run build-$1
cp ./dist/* ../HlsBundle/
cp ./dist/* ../HlsBundle/index/

View File

@ -9,6 +9,8 @@
"version": "1.0.0",
"license": "MIT",
"dependencies": {
"base-64": "^1.0.0",
"event-target-polyfill": "^0.0.4",
"hls.js": "^1.5.15"
},
"devDependencies": {
@ -657,6 +659,11 @@
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
"dev": true
},
"node_modules/base-64": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/base-64/-/base-64-1.0.0.tgz",
"integrity": "sha512-kwDPIFCGx0NZHog36dj+tHiwP4QMzsZ3AgMViUBKI0+V5n4U0ufTCUMhnQ04diaRI8EX/QcPfql7zlhZ7j4zgg=="
},
"node_modules/batch": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz",
@ -1453,6 +1460,11 @@
"node": ">= 0.6"
}
},
"node_modules/event-target-polyfill": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/event-target-polyfill/-/event-target-polyfill-0.0.4.tgz",
"integrity": "sha512-Gs6RLjzlLRdT8X9ZipJdIZI/Y6/HhRLyq9RdDlCsnpxr/+Nn6bU2EFGuC94GjxqhM+Nmij2Vcq98yoHrU8uNFQ=="
},
"node_modules/eventemitter3": {
"version": "4.0.7",
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz",

View File

@ -25,6 +25,8 @@
"webpack-merge": "^6.0.1"
},
"dependencies": {
"base-64": "^1.0.0",
"event-target-polyfill": "^0.0.4",
"hls.js": "^1.5.15"
}
}

View File

@ -109,8 +109,15 @@ export class VideoElementStub extends EventTarget {
}
querySelectorAll(name) {
const fragment = document.createDocumentFragment();
return fragment.querySelectorAll('*');
if (global.isJsCore) {
return [];
} else {
const fragment = document.createDocumentFragment();
return fragment.querySelectorAll('*');
}
}
removeChild(child) {
}
updateBufferedFromMediaSource() {

View File

@ -1,8 +1,60 @@
import Hls from "hls.js";
import "event-target-polyfill";
import {decode, encode} from "base-64";
import { VideoElementStub } from "./VideoElementStub.js"
import { MediaSourceStub, SourceBufferStub } from "./MediaSourceStub.js"
import { XMLHttpRequestStub } from "./XMLHttpRequestStub.js"
global.isJsCore = false;
if (!global.btoa) {
global.btoa = encode;
}
if (!global.atob) {
global.atob = decode;
}
if (typeof window === 'undefined') {
global.isJsCore = true;
global.navigator = {
userAgent: "Telegram"
};
global.now = function() {
return _JsCorePolyfills.performanceNow();
};
global.window = {
};
global.URL = {
};
window.webkit = {
};
window.webkit.messageHandlers = {
};
window.webkit.messageHandlers.performAction = {
};
window.webkit.messageHandlers.performAction.postMessage = function(dict) {
_JsCorePolyfills.postMessage(dict);
};
global.self.location = {
href: "http://127.0.0.1"
};
global.self.setTimeout = global.setTimeout;
global.self.setInterval = global.setInterval;
global.self.clearTimeout = global.clearTimeout;
global.self.clearInterval = global.clearTimeout;
global.self.URL = global.URL;
global.self.Date = global.Date;
}
import Hls from "hls.js";
window.bridgeObjectMap = {};
window.bridgeCallbackMap = {};
@ -50,12 +102,25 @@ if (typeof window !== 'undefined') {
window.ManagedMediaSource = MediaSourceStub;
window.SourceBuffer = SourceBufferStub;
window.XMLHttpRequest = XMLHttpRequestStub;
URL.createObjectURL = function(ms) {
const url = "blob:mock-media-source:" + ms.internalId;
window.mediaSourceMap[url] = ms;
return url;
};
URL.revokeObjectURL = function(url) {
};
if (global.isJsCore) {
global.HTMLVideoElement = VideoElementStub;
global.self.MediaSource = window.MediaSource;
global.self.ManagedMediaSource = window.ManagedMediaSource;
global.self.SourceBuffer = window.SourceBuffer;
global.self.XMLHttpRequest = window.XMLHttpRequest;
global.self.HTMLVideoElement = VideoElementStub;
}
}
function postPlayerEvent(id, eventName, eventData) {
@ -139,6 +204,15 @@ export class HlsPlayerInstance {
}
}
playerSetCapAutoLevel(level) {
if (level >= 0) {
this.hls.autoLevelCapping = level;
} else {
this.hls.autoLevelCapping = -1;
this.hls.currentLevel = -1;
}
}
playerSeek(value) {
this.video.currentTime = value;
}
@ -236,3 +310,7 @@ window.hlsPlayer_destroyInstance = function(id) {
}
window.bridgeInvokeCallback = bridgeInvokeCallback;
if (global.isJsCore) {
window.onload();
}

View File

@ -3,7 +3,7 @@ const common = require('./webpack.common.js');
module.exports = merge(common, {
mode: 'development',
devtool: 'source-map',
devtool: 'inline-source-map',
devServer: {
static: './dist',
},

View File

@ -188,7 +188,7 @@ public final class HLSVideoContent: UniversalVideoContent {
}
public let id: AnyHashable
public let nativeId: PlatformVideoContentId
public let nativeId: NativeVideoContentId
let userLocation: MediaResourceUserLocation
public let fileReference: FileMediaReference
public let dimensions: CGSize
@ -199,7 +199,7 @@ public final class HLSVideoContent: UniversalVideoContent {
let baseRate: Double
let fetchAutomatically: Bool
public init(id: PlatformVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool = false, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true) {
public init(id: NativeVideoContentId, userLocation: MediaResourceUserLocation, fileReference: FileMediaReference, streamVideo: Bool = false, loopVideo: Bool = false, enableSound: Bool = true, baseRate: Double = 1.0, fetchAutomatically: Bool = true) {
self.id = id
self.userLocation = userLocation
self.nativeId = id
@ -218,9 +218,9 @@ public final class HLSVideoContent: UniversalVideoContent {
}
public func isEqual(to other: UniversalVideoContent) -> Bool {
if let other = other as? HLSVideoContent {
if case let .message(_, stableId, _) = self.nativeId {
if case .message(_, stableId, _) = other.nativeId {
if let other = other as? NativeVideoContent {
if case let .message(stableId, _) = self.nativeId {
if case .message(stableId, _) = other.nativeId {
if self.fileReference.media.isInstantVideo {
return true
}

View File

@ -4,7 +4,6 @@ import SwiftSignalKit
import UniversalMediaPlayer
import Postbox
import TelegramCore
import WebKit
import AsyncDisplayKit
import AccountContext
import TelegramAudio
@ -327,21 +326,11 @@ final class HLSJSServerSource: SharedHLSServer.Source {
}
}
private class WeakScriptMessageHandler: NSObject, WKScriptMessageHandler {
private let f: (WKScriptMessage) -> ()
init(_ f: @escaping (WKScriptMessage) -> ()) {
self.f = f
super.init()
}
func userContentController(_ controller: WKUserContentController, didReceive scriptMessage: WKScriptMessage) {
self.f(scriptMessage)
}
protocol HLSJSContext: AnyObject {
func evaluateJavaScript(_ string: String)
}
private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
private final class SharedHLSVideoJSContext: NSObject {
private final class ContextReference {
weak var contentNode: HLSVideoJSNativeContentNode?
@ -367,17 +356,17 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
}
}
static let shared: SharedHLSVideoWebView = SharedHLSVideoWebView()
static let shared: SharedHLSVideoJSContext = SharedHLSVideoJSContext()
private var contextReferences: [Int: ContextReference] = [:]
var webView: WKWebView?
var jsContext: HLSJSContext?
var videoElements: [Int: VideoElement] = [:]
var mediaSources: [Int: MediaSource] = [:]
var sourceBuffers: [Int: SourceBuffer] = [:]
private var isWebViewReady: Bool = false
private var isJsContextReady: Bool = false
private var pendingInitializeInstanceIds: [(id: Int, urlPrefix: String)] = []
private var tempTasks: [Int: URLSessionTask] = [:]
@ -392,64 +381,24 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
self.emptyTimer?.invalidate()
}
private func createWebView() {
let config = WKWebViewConfiguration()
config.allowsInlineMediaPlayback = true
config.mediaTypesRequiringUserActionForPlayback = []
config.allowsPictureInPictureMediaPlayback = true
let userController = WKUserContentController()
var handleScriptMessage: ((WKScriptMessage) -> Void)?
userController.add(WeakScriptMessageHandler { message in
handleScriptMessage?(message)
}, name: "performAction")
let isDebug: Bool
#if DEBUG
isDebug = true
#else
isDebug = false
#endif
config.userContentController = userController
let webView = WKWebView(frame: CGRect(origin: CGPoint(), size: CGSize(width: 100.0, height: 100.0)), configuration: config)
self.webView = webView
webView.scrollView.isScrollEnabled = false
webView.allowsLinkPreview = false
webView.allowsBackForwardNavigationGestures = false
webView.accessibilityIgnoresInvertColors = true
webView.scrollView.contentInsetAdjustmentBehavior = .never
webView.alpha = 0.0
if #available(iOS 16.4, *) {
webView.isInspectable = isDebug
}
webView.navigationDelegate = self
handleScriptMessage = { [weak self] message in
private func createJsContext() {
let handleScriptMessage: ([String: Any]) -> Void = { [weak self] message in
Queue.mainQueue().async {
guard let self else {
return
}
guard let body = message.body as? [String: Any] else {
return
}
guard let eventName = body["event"] as? String else {
guard let eventName = message["event"] as? String else {
return
}
switch eventName {
case "windowOnLoad":
self.isWebViewReady = true
self.isJsContextReady = true
self.initializePendingInstances()
case "bridgeInvoke":
guard let eventData = body["data"] as? [String: Any] else {
guard let eventData = message["data"] as? [String: Any] else {
return
}
guard let bridgeId = eventData["bridgeId"] as? Int else {
@ -478,31 +427,31 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
}
let jsonResult = try! JSONSerialization.data(withJSONObject: result)
let jsonResultString = String(data: jsonResult, encoding: .utf8)!
self.webView?.evaluateJavaScript("bridgeInvokeCallback(\(callbackId), \(jsonResultString));", completionHandler: nil)
self.jsContext?.evaluateJavaScript("window.bridgeInvokeCallback(\(callbackId), \(jsonResultString));")
}
)
case "playerStatus":
guard let instanceId = body["instanceId"] as? Int else {
guard let instanceId = message["instanceId"] as? Int else {
return
}
guard let instance = self.contextReferences[instanceId]?.contentNode else {
self.contextReferences.removeValue(forKey: instanceId)
return
}
guard let eventData = body["data"] as? [String: Any] else {
guard let eventData = message["data"] as? [String: Any] else {
return
}
instance.onPlayerStatusUpdated(eventData: eventData)
case "playerCurrentTime":
guard let instanceId = body["instanceId"] as? Int else {
guard let instanceId = message["instanceId"] as? Int else {
return
}
guard let instance = self.contextReferences[instanceId]?.contentNode else {
self.contextReferences.removeValue(forKey: instanceId)
return
}
guard let eventData = body["data"] as? [String: Any] else {
guard let eventData = message["data"] as? [String: Any] else {
return
}
guard let value = eventData["value"] as? Double else {
@ -523,18 +472,20 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
}
}
self.isWebViewReady = false
self.isJsContextReady = false
let bundle = Bundle(for: SharedHLSVideoWebView.self)
let bundlePath = bundle.bundlePath + "/HlsBundle.bundle"
webView.loadFileURL(URL(fileURLWithPath: bundlePath + "/index.html"), allowingReadAccessTo: URL(fileURLWithPath: bundlePath))
/*#if DEBUG
self.jsContext = WebViewNativeJSContextImpl(handleScriptMessage: handleScriptMessage)
#else*/
self.jsContext = WebViewHLSJSContextImpl(handleScriptMessage: handleScriptMessage)
//#endif
}
private func disposeWebView() {
if let _ = self.webView {
self.webView = nil
private func disposeJsContext() {
if let _ = self.jsContext {
self.jsContext = nil
}
self.isWebViewReady = false
self.isJsContextReady = false
}
private func bridgeInvoke(
@ -551,7 +502,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
return
}
let videoElement = VideoElement(instanceId: instanceId)
SharedHLSVideoWebView.shared.videoElements[bridgeId] = videoElement
SharedHLSVideoJSContext.shared.videoElements[bridgeId] = videoElement
completion([:])
} else if (methodName == "setMediaSource") {
guard let instanceId = params["instanceId"] as? Int else {
@ -562,7 +513,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
assertionFailure()
return
}
guard let (_, videoElement) = SharedHLSVideoWebView.shared.videoElements.first(where: { $0.value.instanceId == instanceId }) else {
guard let (_, videoElement) = SharedHLSVideoJSContext.shared.videoElements.first(where: { $0.value.instanceId == instanceId }) else {
return
}
videoElement.mediaSourceId = mediaSourceId
@ -622,14 +573,14 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
} else if (className == "MediaSource") {
if (methodName == "constructor") {
let mediaSource = MediaSource()
SharedHLSVideoWebView.shared.mediaSources[bridgeId] = mediaSource
SharedHLSVideoJSContext.shared.mediaSources[bridgeId] = mediaSource
completion([:])
} else if (methodName == "setDuration") {
guard let duration = params["duration"] as? Double else {
assertionFailure()
return
}
guard let mediaSource = SharedHLSVideoWebView.shared.mediaSources[bridgeId] else {
guard let mediaSource = SharedHLSVideoJSContext.shared.mediaSources[bridgeId] else {
assertionFailure()
return
}
@ -639,7 +590,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
durationUpdated = true
}
guard let (_, videoElement) = SharedHLSVideoWebView.shared.videoElements.first(where: { $0.value.mediaSourceId == bridgeId }) else {
guard let (_, videoElement) = SharedHLSVideoJSContext.shared.videoElements.first(where: { $0.value.mediaSourceId == bridgeId }) else {
return
}
@ -654,13 +605,13 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
assertionFailure()
return
}
guard let mediaSource = SharedHLSVideoWebView.shared.mediaSources[bridgeId] else {
guard let mediaSource = SharedHLSVideoJSContext.shared.mediaSources[bridgeId] else {
assertionFailure()
return
}
mediaSource.sourceBufferIds = ids
guard let (_, videoElement) = SharedHLSVideoWebView.shared.videoElements.first(where: { $0.value.mediaSourceId == bridgeId }) else {
guard let (_, videoElement) = SharedHLSVideoJSContext.shared.videoElements.first(where: { $0.value.mediaSourceId == bridgeId }) else {
return
}
@ -679,7 +630,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
return
}
let sourceBuffer = SourceBuffer(mediaSourceId: mediaSourceId, mimeType: mimeType)
SharedHLSVideoWebView.shared.sourceBuffers[bridgeId] = sourceBuffer
SharedHLSVideoJSContext.shared.sourceBuffers[bridgeId] = sourceBuffer
completion([:])
} else if (methodName == "appendBuffer") {
@ -691,7 +642,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
assertionFailure()
return
}
guard let sourceBuffer = SharedHLSVideoWebView.shared.sourceBuffers[bridgeId] else {
guard let sourceBuffer = SharedHLSVideoJSContext.shared.sourceBuffers[bridgeId] else {
assertionFailure()
return
}
@ -703,7 +654,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
assertionFailure()
return
}
guard let sourceBuffer = SharedHLSVideoWebView.shared.sourceBuffers[bridgeId] else {
guard let sourceBuffer = SharedHLSVideoJSContext.shared.sourceBuffers[bridgeId] else {
assertionFailure()
return
}
@ -711,7 +662,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
completion(["ranges": serializeRanges(bufferedRanges)])
})
} else if methodName == "abort" {
guard let sourceBuffer = SharedHLSVideoWebView.shared.sourceBuffers[bridgeId] else {
guard let sourceBuffer = SharedHLSVideoJSContext.shared.sourceBuffers[bridgeId] else {
assertionFailure()
return
}
@ -766,51 +717,38 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
let filePath = String(requestPath[firstSlash.upperBound...])
if filePath == "master.m3u8" {
let _ = (source.masterPlaylistData()
|> deliverOn(.mainQueue())
|> take(1)).start(next: { [weak self] result in
guard let self else {
return
}
self.sendResponseAndClose(id: id, data: result.data(using: .utf8)!, completion: completion)
|> take(1)).start(next: { result in
SharedHLSVideoJSContext.sendResponseAndClose(id: id, data: result.data(using: .utf8)!, completion: completion)
})
} else if filePath.hasPrefix("hls_level_") && filePath.hasSuffix(".m3u8") {
guard let levelIndex = Int(String(filePath[filePath.index(filePath.startIndex, offsetBy: "hls_level_".count) ..< filePath.index(filePath.endIndex, offsetBy: -".m3u8".count)])) else {
self.sendErrorAndClose(id: id, error: .notFound, completion: completion)
SharedHLSVideoJSContext.sendErrorAndClose(id: id, error: .notFound, completion: completion)
return
}
let _ = (source.playlistData(quality: levelIndex)
|> deliverOn(.mainQueue())
|> take(1)).start(next: { [weak self] result in
guard let self else {
return
}
self.sendResponseAndClose(id: id, data: result.data(using: .utf8)!, completion: completion)
|> take(1)).start(next: { result in
SharedHLSVideoJSContext.sendResponseAndClose(id: id, data: result.data(using: .utf8)!, completion: completion)
})
} else if filePath.hasPrefix("partfile") && filePath.hasSuffix(".mp4") {
let fileId = String(filePath[filePath.index(filePath.startIndex, offsetBy: "partfile".count) ..< filePath.index(filePath.endIndex, offsetBy: -".mp4".count)])
guard let fileIdValue = Int64(fileId) else {
self.sendErrorAndClose(id: id, error: .notFound, completion: completion)
SharedHLSVideoJSContext.sendErrorAndClose(id: id, error: .notFound, completion: completion)
return
}
guard let requestRange else {
self.sendErrorAndClose(id: id, error: .badRequest, completion: completion)
SharedHLSVideoJSContext.sendErrorAndClose(id: id, error: .badRequest, completion: completion)
return
}
let _ = (source.fileData(id: fileIdValue, range: requestRange.lowerBound ..< requestRange.upperBound + 1)
|> deliverOn(.mainQueue())
//|> timeout(5.0, queue: self.queue, alternate: .single(nil))
|> take(1)).start(next: { [weak self] result in
guard let self else {
return
}
|> take(1)).start(next: { result in
if let (tempFile, tempFileRange, totalSize) = result {
self.sendResponseFileAndClose(id: id, file: tempFile, fileRange: tempFileRange, range: requestRange, totalSize: totalSize, completion: completion)
SharedHLSVideoJSContext.sendResponseFileAndClose(id: id, file: tempFile, fileRange: tempFileRange, range: requestRange, totalSize: totalSize, completion: completion)
} else {
self.sendErrorAndClose(id: id, error: .internalServerError, completion: completion)
SharedHLSVideoJSContext.sendErrorAndClose(id: id, error: .internalServerError, completion: completion)
}
})
}
@ -822,58 +760,6 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
if (!handlerFound) {
completion(["error": 1])
}
/*var request = URLRequest(url: URL(string: url)!)
for (key, value) in requestHeaders {
request.setValue(value, forHTTPHeaderField: key)
}
let isCompleted = Atomic<Bool>(value: false)
let task = URLSession.shared.dataTask(with: request, completionHandler: { [weak self] data, response, error in
Queue.mainQueue().async {
guard let self else {
return
}
if isCompleted.swap(true) {
return
}
self.tempTasks.removeValue(forKey: id)
if let _ = error {
completion([
"error": 1
])
} else {
if let response = response as? HTTPURLResponse {
completion([
"status": response.statusCode,
"statusText": "OK",
"responseData": data?.base64EncodedString() ?? "",
"responseHeaders": response.allHeaderFields as? [String: String] ?? [:]
])
let _ = response
/*if let response = response as? HTTPURLResponse, let requestUrl {
if let updatedResponse = HTTPURLResponse(
url: requestUrl,
statusCode: response.statusCode,
httpVersion: "HTTP/1.1",
headerFields: response.allHeaderFields as? [String: String] ?? [:]
) {
sourceTask.didReceive(updatedResponse)
} else {
sourceTask.didReceive(response)
}
} else {
sourceTask.didReceive(response)
}*/
}
}
}
})
self.tempTasks[id] = task
task.resume()*/
} else if methodName == "abort" {
guard let id = params["id"] as? Int else {
assertionFailure()
@ -889,7 +775,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
}
}
private func sendErrorAndClose(id: Int, error: ResponseError, completion: @escaping ([String: Any]) -> Void) {
private static func sendErrorAndClose(id: Int, error: ResponseError, completion: @escaping ([String: Any]) -> Void) {
let (code, status) = error.httpStatus
completion([
"status": code,
@ -901,7 +787,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
])
}
private func sendResponseAndClose(id: Int, data: Data, contentType: String = "application/octet-stream", completion: @escaping ([String: Any]) -> Void) {
private static func sendResponseAndClose(id: Int, data: Data, contentType: String = "application/octet-stream", completion: @escaping ([String: Any]) -> Void) {
completion([
"status": 200,
"statusText": "OK",
@ -913,20 +799,22 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
])
}
private func sendResponseFileAndClose(id: Int, file: TempBoxFile, fileRange: Range<Int>, range: Range<Int>, totalSize: Int, completion: @escaping ([String: Any]) -> Void) {
if let data = try? Data(contentsOf: URL(fileURLWithPath: file.path), options: .mappedIfSafe).subdata(in: fileRange) {
completion([
"status": 200,
"statusText": "OK",
"responseData": data.base64EncodedString(),
"responseHeaders": [
"Content-Type": "application/octet-stream",
"Content-Range": "bytes \(range.lowerBound)-\(range.upperBound)/\(totalSize)",
"Content-Length": "\(fileRange.upperBound - fileRange.lowerBound)"
] as [String: String]
])
} else {
self.sendErrorAndClose(id: id, error: .internalServerError, completion: completion)
private static func sendResponseFileAndClose(id: Int, file: TempBoxFile, fileRange: Range<Int>, range: Range<Int>, totalSize: Int, completion: @escaping ([String: Any]) -> Void) {
Queue.concurrentDefaultQueue().async {
if let data = try? Data(contentsOf: URL(fileURLWithPath: file.path), options: .mappedIfSafe).subdata(in: fileRange) {
completion([
"status": 200,
"statusText": "OK",
"responseData": data.base64EncodedString(),
"responseHeaders": [
"Content-Type": "application/octet-stream",
"Content-Range": "bytes \(range.lowerBound)-\(range.upperBound)/\(totalSize)",
"Content-Length": "\(fileRange.upperBound - fileRange.lowerBound)"
] as [String: String]
])
} else {
SharedHLSVideoJSContext.sendErrorAndClose(id: id, error: .internalServerError, completion: completion)
}
}
}
@ -934,8 +822,8 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
let contextInstanceId = context.instanceId
self.contextReferences[contextInstanceId] = ContextReference(contentNode: context)
if self.webView == nil {
self.createWebView()
if self.jsContext == nil {
self.createJsContext()
}
if let emptyTimer = self.emptyTimer {
@ -960,7 +848,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
}
}
self.webView?.evaluateJavaScript("window.hlsPlayer_destroyInstance(\(contextInstanceId));")
self.jsContext?.evaluateJavaScript("window.hlsPlayer_destroyInstance(\(contextInstanceId));")
if self.contextReferences.isEmpty {
if self.emptyTimer == nil {
@ -972,7 +860,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
self.emptyTimer = nil
}
if self.contextReferences.isEmpty {
self.disposeWebView()
self.disposeJsContext()
}
})
}
@ -984,7 +872,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
func initializeWhenReady(context: HLSVideoJSNativeContentNode, urlPrefix: String) {
self.pendingInitializeInstanceIds.append((context.instanceId, urlPrefix))
if self.isWebViewReady {
if self.isJsContextReady {
self.initializePendingInstances()
}
}
@ -1020,7 +908,7 @@ private final class SharedHLSVideoWebView: NSObject, WKNavigationDelegate {
""")
}
self.webView?.evaluateJavaScript(userScriptJs)
self.jsContext?.evaluateJavaScript(userScriptJs)
}
}
@ -1056,6 +944,8 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
private let fileReference: FileMediaReference
private let approximateDuration: Double
private let intrinsicDimensions: CGSize
private var enableSound: Bool
private let audioSessionManager: ManagedAudioSession
private let audioSessionDisposable = MetaDisposable()
@ -1150,6 +1040,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self.audioSessionManager = audioSessionManager
self.userLocation = userLocation
self.requestedBaseRate = baseRate
self.enableSound = enableSound
if var dimensions = fileReference.media.dimensions {
if let thumbnail = fileReference.media.previewRepresentations.first {
@ -1186,7 +1077,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
audioSessionManager: audioSessionManager,
partsState: self.chunkPlayerPartsState.get(),
video: true,
enableSound: true,
enableSound: self.enableSound,
baseRate: baseRate,
onSeeked: {
onSeeked?()
@ -1198,7 +1089,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
super.init()
self.contextDisposable = SharedHLSVideoWebView.shared.register(context: self)
self.contextDisposable = SharedHLSVideoJSContext.shared.register(context: self)
self.playerNode.frame = CGRect(origin: CGPoint(), size: self.intrinsicDimensions)
@ -1255,12 +1146,12 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
guard let self else {
return
}
SharedHLSVideoWebView.shared.webView?.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerNotifySeekedOnNextStatusUpdate();", completionHandler: nil)
SharedHLSVideoJSContext.shared.jsContext?.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerNotifySeekedOnNextStatusUpdate();")
}
}
if let playerSource {
SharedHLSVideoWebView.shared.initializeWhenReady(context: self, urlPrefix: "http://server/\(playerSource.id)/")
SharedHLSVideoJSContext.shared.initializeWhenReady(context: self, urlPrefix: "http://server/\(playerSource.id)/")
}
}
@ -1380,13 +1271,21 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
selectedLevelIndex = self.playerAvailableLevels.sorted(by: { $0.value.height > $1.value.height }).first?.key
}
if let selectedLevelIndex {
var effectiveSelectedLevelIndex = selectedLevelIndex
if !self.enableSound {
effectiveSelectedLevelIndex = self.resolveCurrentLevelIndex() ?? -1
}
self.hasRequestedPlayerLoad = true
SharedHLSVideoWebView.shared.webView?.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerLoad(\(selectedLevelIndex));", completionHandler: nil)
SharedHLSVideoJSContext.shared.jsContext?.evaluateJavaScript("""
window.hlsPlayer_instances[\(self.instanceId)].playerSetCapAutoLevel(\(self.resolveCurrentLevelIndex() ?? -1));
window.hlsPlayer_instances[\(self.instanceId)].playerLoad(\(effectiveSelectedLevelIndex));
""")
}
}
}
SharedHLSVideoWebView.shared.webView?.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerSetBaseRate(\(self.requestedBaseRate));", completionHandler: nil)
SharedHLSVideoJSContext.shared.jsContext?.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerSetBaseRate(\(self.requestedBaseRate));")
}
self.updateStatus()
@ -1415,13 +1314,13 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
}
fileprivate func onMediaSourceDurationUpdated() {
guard let (_, videoElement) = SharedHLSVideoWebView.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) else {
guard let (_, videoElement) = SharedHLSVideoJSContext.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) else {
return
}
guard let mediaSourceId = videoElement.mediaSourceId, let mediaSource = SharedHLSVideoWebView.shared.mediaSources[mediaSourceId] else {
guard let mediaSourceId = videoElement.mediaSourceId, let mediaSource = SharedHLSVideoJSContext.shared.mediaSources[mediaSourceId] else {
return
}
guard let sourceBufferId = mediaSource.sourceBufferIds.first, let sourceBuffer = SharedHLSVideoWebView.shared.sourceBuffers[sourceBufferId] else {
guard let sourceBufferId = mediaSource.sourceBufferIds.first, let sourceBuffer = SharedHLSVideoJSContext.shared.sourceBuffers[sourceBufferId] else {
return
}
@ -1429,13 +1328,13 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
}
fileprivate func onMediaSourceBuffersUpdated() {
guard let (_, videoElement) = SharedHLSVideoWebView.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) else {
guard let (_, videoElement) = SharedHLSVideoJSContext.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) else {
return
}
guard let mediaSourceId = videoElement.mediaSourceId, let mediaSource = SharedHLSVideoWebView.shared.mediaSources[mediaSourceId] else {
guard let mediaSourceId = videoElement.mediaSourceId, let mediaSource = SharedHLSVideoJSContext.shared.mediaSources[mediaSourceId] else {
return
}
guard let sourceBufferId = mediaSource.sourceBufferIds.first, let sourceBuffer = SharedHLSVideoWebView.shared.sourceBuffers[sourceBufferId] else {
guard let sourceBufferId = mediaSource.sourceBufferIds.first, let sourceBuffer = SharedHLSVideoJSContext.shared.sourceBuffers[sourceBufferId] else {
return
}
@ -1446,7 +1345,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
guard let self, let sourceBuffer else {
return
}
guard let mediaSource = SharedHLSVideoWebView.shared.mediaSources[sourceBuffer.mediaSourceId] else {
guard let mediaSource = SharedHLSVideoJSContext.shared.mediaSources[sourceBuffer.mediaSourceId] else {
return
}
self.chunkPlayerPartsState.set(.single(ChunkMediaPlayerPartsState(duration: mediaSource.duration, parts: sourceBuffer.items)))
@ -1459,7 +1358,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
private func updatePlayerStatus(status: MediaPlayerStatus) {
self._status.set(status)
if let (bridgeId, _) = SharedHLSVideoWebView.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) {
if let (bridgeId, _) = SharedHLSVideoJSContext.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) {
var isPlaying: Bool = false
var isBuffering = false
switch status.status {
@ -1480,25 +1379,25 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
let jsonResult = try! JSONSerialization.data(withJSONObject: result)
let jsonResultString = String(data: jsonResult, encoding: .utf8)!
SharedHLSVideoWebView.shared.webView?.evaluateJavaScript("window.bridgeObjectMap[\(bridgeId)].bridgeUpdateStatus(\(jsonResultString));", completionHandler: nil)
SharedHLSVideoJSContext.shared.jsContext?.evaluateJavaScript("window.bridgeObjectMap[\(bridgeId)].bridgeUpdateStatus(\(jsonResultString));")
}
}
private func updateBuffered() {
guard let (_, videoElement) = SharedHLSVideoWebView.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) else {
guard let (_, videoElement) = SharedHLSVideoJSContext.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) else {
return
}
guard let mediaSourceId = videoElement.mediaSourceId, let mediaSource = SharedHLSVideoWebView.shared.mediaSources[mediaSourceId] else {
guard let mediaSourceId = videoElement.mediaSourceId, let mediaSource = SharedHLSVideoJSContext.shared.mediaSources[mediaSourceId] else {
return
}
guard let sourceBufferId = mediaSource.sourceBufferIds.first, let sourceBuffer = SharedHLSVideoWebView.shared.sourceBuffers[sourceBufferId] else {
guard let sourceBufferId = mediaSource.sourceBufferIds.first, let sourceBuffer = SharedHLSVideoJSContext.shared.sourceBuffers[sourceBufferId] else {
return
}
let bufferedRanges = sourceBuffer.ranges
if let (_, videoElement) = SharedHLSVideoWebView.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) {
if let mediaSourceId = videoElement.mediaSourceId, let mediaSource = SharedHLSVideoWebView.shared.mediaSources[mediaSourceId] {
if let (_, videoElement) = SharedHLSVideoJSContext.shared.videoElements.first(where: { $0.value.instanceId == self.instanceId }) {
if let mediaSourceId = videoElement.mediaSourceId, let mediaSource = SharedHLSVideoJSContext.shared.mediaSources[mediaSourceId] {
if let duration = mediaSource.duration {
var mappedRanges = RangeSet<Int64>()
for range in bufferedRanges.ranges {
@ -1538,7 +1437,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
func play() {
assert(Queue.mainQueue().isCurrent())
if !self.initializedStatus {
self._status.set(MediaPlayerStatus(generationTimestamp: 0.0, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: 0.0, baseRate: self.requestedBaseRate, seekId: self.seekId, status: .buffering(initial: true, whilePlaying: true, progress: 0.0, display: true), soundEnabled: true))
self._status.set(MediaPlayerStatus(generationTimestamp: 0.0, duration: Double(self.approximateDuration), dimensions: CGSize(), timestamp: 0.0, baseRate: self.requestedBaseRate, seekId: self.seekId, status: .buffering(initial: true, whilePlaying: true, progress: 0.0, display: true), soundEnabled: self.enableSound))
}
self.player.play()
}
@ -1555,10 +1454,14 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
func setSoundEnabled(_ value: Bool) {
assert(Queue.mainQueue().isCurrent())
if value {
self.player.playOnceWithSound(playAndRecord: false, seek: .none)
} else {
self.player.continuePlayingWithoutSound(seek: .none)
if self.enableSound != value {
self.enableSound = value
if value {
self.player.playOnceWithSound(playAndRecord: false, seek: .none)
} else {
self.player.continuePlayingWithoutSound(seek: .none)
}
self.updateInternalQualityLevel()
}
}
@ -1566,7 +1469,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
assert(Queue.mainQueue().isCurrent())
self.seekId += 1
SharedHLSVideoWebView.shared.webView?.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerSeek(\(timestamp));", completionHandler: nil)
SharedHLSVideoJSContext.shared.jsContext?.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerSeek(\(timestamp));")
}
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd) {
@ -1576,6 +1479,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self?.performActionAtEnd()
}
}
self.enableSound = true
switch actionAtEnd {
case .loop:
self.player.actionAtEnd = .loop({})
@ -1604,6 +1508,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
}
self.player.playOnceWithSound(playAndRecord: playAndRecord, seek: seek)
self.updateInternalQualityLevel()
}
func setSoundMuted(soundMuted: Bool) {
@ -1626,6 +1531,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self?.performActionAtEnd()
}
}
self.enableSound = false
switch actionAtEnd {
case .loop:
self.player.actionAtEnd = .loop({})
@ -1635,6 +1541,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self.player.actionAtEnd = .action(action)
}
self.player.continuePlayingWithoutSound(seek: .none)
self.updateInternalQualityLevel()
}
func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool) {
@ -1644,15 +1551,41 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
func setBaseRate(_ baseRate: Double) {
self.requestedBaseRate = baseRate
if self.playerIsReady {
SharedHLSVideoWebView.shared.webView?.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerSetBaseRate(\(self.requestedBaseRate));", completionHandler: nil)
SharedHLSVideoJSContext.shared.jsContext?.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerSetBaseRate(\(self.requestedBaseRate));")
}
self.updateStatus()
}
private func resolveCurrentLevelIndex() -> Int? {
if self.enableSound {
return self.requestedLevelIndex
} else {
var foundIndex: Int?
if let minQualityFile = HLSVideoContent.minimizedHLSQuality(file: self.fileReference)?.file, let dimensions = minQualityFile.media.dimensions {
for (index, level) in self.playerAvailableLevels {
if level.width == Int(dimensions.width) && level.height == Int(dimensions.height) {
foundIndex = index
break
}
}
}
return foundIndex
}
}
private func updateInternalQualityLevel() {
if self.playerIsReady {
SharedHLSVideoJSContext.shared.jsContext?.evaluateJavaScript("""
window.hlsPlayer_instances[\(self.instanceId)].playerSetCapAutoLevel(\(self.resolveCurrentLevelIndex() ?? -1));
""")
}
}
func setVideoQuality(_ videoQuality: UniversalVideoContentVideoQuality) {
self.preferredVideoQuality = videoQuality
switch videoQuality {
let resolvedVideoQuality = self.preferredVideoQuality
switch resolvedVideoQuality {
case .auto:
self.requestedLevelIndex = nil
case let .quality(quality):
@ -1666,7 +1599,9 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
self.updateVideoQualityState()
if self.playerIsReady {
SharedHLSVideoWebView.shared.webView?.evaluateJavaScript("window.hlsPlayer_instances[\(self.instanceId)].playerSetLevel(\(self.requestedLevelIndex ?? -1));", completionHandler: nil)
SharedHLSVideoJSContext.shared.jsContext?.evaluateJavaScript("""
window.hlsPlayer_instances[\(self.instanceId)].playerSetLevel(\(self.requestedLevelIndex ?? -1));
""")
}
}

View File

@ -0,0 +1,76 @@
import Foundation
import UIKit
@preconcurrency import WebKit
import SwiftSignalKit
private class WeakScriptMessageHandler: NSObject, WKScriptMessageHandler {
private let f: (WKScriptMessage) -> ()
init(_ f: @escaping (WKScriptMessage) -> ()) {
self.f = f
super.init()
}
func userContentController(_ controller: WKUserContentController, didReceive scriptMessage: WKScriptMessage) {
self.f(scriptMessage)
}
}
final class WebViewHLSJSContextImpl: HLSJSContext {
let webView: WKWebView
init(handleScriptMessage: @escaping ([String: Any]) -> Void) {
let config = WKWebViewConfiguration()
config.allowsInlineMediaPlayback = true
config.mediaTypesRequiringUserActionForPlayback = []
config.allowsPictureInPictureMediaPlayback = true
let userController = WKUserContentController()
var handleScriptMessageImpl: (([String: Any]) -> Void)?
userController.add(WeakScriptMessageHandler { message in
guard let body = message.body as? [String: Any] else {
return
}
handleScriptMessageImpl?(body)
}, name: "performAction")
let isDebug: Bool
#if DEBUG
isDebug = true
#else
isDebug = false
#endif
config.userContentController = userController
let webView = WKWebView(frame: CGRect(origin: CGPoint(), size: CGSize(width: 100.0, height: 100.0)), configuration: config)
self.webView = webView
webView.scrollView.isScrollEnabled = false
webView.allowsLinkPreview = false
webView.allowsBackForwardNavigationGestures = false
webView.accessibilityIgnoresInvertColors = true
webView.scrollView.contentInsetAdjustmentBehavior = .never
webView.alpha = 0.0
if #available(iOS 16.4, *) {
webView.isInspectable = isDebug
}
handleScriptMessageImpl = { message in
Queue.mainQueue().async {
handleScriptMessage(message)
}
}
let bundle = Bundle(for: WebViewHLSJSContextImpl.self)
let bundlePath = bundle.bundlePath + "/HlsBundle.bundle"
webView.loadFileURL(URL(fileURLWithPath: bundlePath + "/index.html"), allowingReadAccessTo: URL(fileURLWithPath: bundlePath))
}
func evaluateJavaScript(_ string: String) {
self.webView.evaluateJavaScript(string, completionHandler: nil)
}
}

View File

@ -0,0 +1,248 @@
import Foundation
import UIKit
import JavaScriptCore
import TelegramCore
import SwiftSignalKit
private var ObjCKey_ContextReference: Int?
@objc private protocol JsCorePolyfillsExport: JSExport {
func postMessage(_ object: JSValue)
func consoleLog(_ object: JSValue)
func consoleLog(_ object: JSValue, _ arg1: JSValue)
func consoleLog(_ object: JSValue, _ arg1: JSValue, _ arg2: JSValue)
func performanceNow() -> Double
}
@objc private final class JsCorePolyfills: NSObject, JsCorePolyfillsExport {
private let queue: Queue
private let context: WebViewNativeJSContextImpl.Reference
init(queue: Queue, context: WebViewNativeJSContextImpl.Reference) {
self.queue = queue
self.context = context
super.init()
}
@objc func postMessage(_ object: JSValue) {
guard object.isObject else {
return
}
guard let message = object.toDictionary() as? [String: Any] else {
return
}
let context = self.context
self.queue.async {
guard let context = context.context else {
return
}
let handleScriptMessage = context.handleScriptMessage
Queue.mainQueue().async {
handleScriptMessage(message)
}
}
}
@objc func consoleLog(_ object: JSValue) {
#if DEBUG
print("\(object)")
#endif
}
@objc func consoleLog(_ object: JSValue, _ arg1: JSValue) {
#if DEBUG
print("\(object) \(arg1)")
#endif
}
@objc func consoleLog(_ object: JSValue, _ arg1: JSValue, _ arg2: JSValue) {
#if DEBUG
print("\(object) \(arg1) \(arg2)")
#endif
}
@objc func performanceNow() -> Double {
return CFAbsoluteTimeGetCurrent()
}
}
@objc private protocol TimerJSExport: JSExport {
func setTimeout(_ callback: JSValue, _ ms: Double) -> Int32
func setInterval(_ callback: JSValue, _ ms: Double) -> Int32
func clearTimeout(_ id: Int32)
}
@objc private class TimeoutPolyfill: NSObject, TimerJSExport {
private let queue: Queue
private var timers: [Int32: SwiftSignalKit.Timer] = [:]
private var nextId: Int32 = 0
init(queue: Queue) {
self.queue = queue
}
deinit {
for (_, timer) in self.timers {
timer.invalidate()
}
}
func register(jsContext: JSContext) {
jsContext.evaluateScript("""
function setTimeout(...args) {
if (args.length === 0) {
return -1;
}
const [callback, delay = 0, ...callbackArgs] = args;
return _timeoutPolyfill.setTimeout(() => {
callback(...callbackArgs);
}, delay);
}
function setInterval(...args) {
if (args.length === 0) {
return -1;
}
const [callback, delay = 0, ...callbackArgs] = args;
return _timeoutPolyfill.setInterval(() => {
callback(...callbackArgs);
}, delay);
}
function clearTimeout(indentifier) {
_timeoutPolyfill.clearTimeout(indentifier)
}
function clearInterval(indentifier) {
_timeoutPolyfill.clearTimeout(indentifier)
}
"""
)
}
func clearTimeout(_ id: Int32) {
let timer = self.timers.removeValue(forKey: id)
timer?.invalidate()
}
func setTimeout(_ callback: JSValue, _ ms: Double) -> Int32 {
return self.createTimer(callback: callback, ms: ms, repeats: false)
}
func setInterval(_ callback: JSValue, _ ms: Double) -> Int32 {
return self.createTimer(callback: callback, ms: ms, repeats: true)
}
func createTimer(callback: JSValue, ms: Double, repeats: Bool) -> Int32 {
let timeInterval = ms / 1000.0
let id = self.nextId
self.nextId += 1
let timer = SwiftSignalKit.Timer(timeout: timeInterval, repeat: repeats, completion: { [weak self] in
guard let self else {
return
}
callback.call(withArguments: nil)
if !repeats {
self.timers.removeValue(forKey: id)
}
}, queue: self.queue)
self.timers[id] = timer
timer.start()
return id
}
}
final class WebViewNativeJSContextImpl: HLSJSContext {
fileprivate final class Reference {
weak var context: WebViewNativeJSContextImpl.Impl?
init(context: WebViewNativeJSContextImpl.Impl) {
self.context = context
}
}
fileprivate final class Impl {
let queue: Queue
let context: JSContext
let handleScriptMessage: ([String: Any]) -> Void
init(queue: Queue, handleScriptMessage: @escaping ([String: Any]) -> Void) {
self.queue = queue
self.context = JSContext()
self.handleScriptMessage = handleScriptMessage
#if DEBUG
if #available(iOS 16.4, *) {
self.context.isInspectable = true
}
#endif
self.context.exceptionHandler = { context, exception in
if let exception {
Logger.shared.log("WebViewNativeJSContextImpl", "JS exception: \(exception)")
#if DEBUG
print("JS exception: \(exception)")
#endif
}
}
let timeoutPolyfill = TimeoutPolyfill(queue: self.queue)
self.context.setObject(timeoutPolyfill, forKeyedSubscript: "_timeoutPolyfill" as (NSCopying & NSObjectProtocol))
timeoutPolyfill.register(jsContext: self.context)
self.context.setObject(JsCorePolyfills(queue: self.queue, context: Reference(context: self)), forKeyedSubscript: "_JsCorePolyfills" as (NSCopying & NSObjectProtocol))
let bundle = Bundle(for: WebViewHLSJSContextImpl.self)
let bundlePath = bundle.bundlePath + "/HlsBundle.bundle"
if let indexJsString = try? String(contentsOf: URL(fileURLWithPath: bundlePath + "/headless_prologue.js"), encoding: .utf8) {
self.context.evaluateScript(indexJsString, withSourceURL: URL(fileURLWithPath: "index/index.bundle.js"))
} else {
assertionFailure()
}
if let indexJsString = try? String(contentsOf: URL(fileURLWithPath: bundlePath + "/index.bundle.js"), encoding: .utf8) {
self.context.evaluateScript(indexJsString, withSourceURL: URL(fileURLWithPath: "index.bundle.js"))
} else {
assertionFailure()
}
}
deinit {
print("WebViewNativeJSContextImpl.deinit")
}
func evaluateJavaScript(_ string: String) {
self.context.evaluateScript(string)
}
}
static let sharedQueue = Queue(name: "WebViewNativeJSContextImpl", qos: .default)
private let queue: Queue
private let impl: QueueLocalObject<Impl>
init(handleScriptMessage: @escaping ([String: Any]) -> Void) {
let queue = WebViewNativeJSContextImpl.sharedQueue
self.queue = queue
self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, handleScriptMessage: handleScriptMessage)
})
}
func evaluateJavaScript(_ string: String) {
self.impl.with { impl in
impl.evaluateJavaScript(string)
}
}
}

View File

@ -230,7 +230,7 @@ class WebSearchGalleryController: ViewController {
}, controller: { [weak self] in
return self
})
self.displayNode = GalleryControllerNode(controllerInteraction: controllerInteraction)
self.displayNode = GalleryControllerNode(context: self.context, controllerInteraction: controllerInteraction)
self.displayNodeDidLoad()
self.galleryNode.statusBar = self.statusBar