Video experiments

This commit is contained in:
Isaac 2024-10-08 20:55:22 +04:00
parent 939cff61c7
commit 8f03bc89bf
11 changed files with 30839 additions and 6 deletions

View File

@ -44,6 +44,7 @@ extern int FFMpegCodecIdVP9;
- (bool)isAttachedPicAtStreamIndex:(int32_t)streamIndex;
- (int)codecIdAtStreamIndex:(int32_t)streamIndex;
- (double)duration;
- (int64_t)startTimeAtStreamIndex:(int32_t)streamIndex;
- (int64_t)durationAtStreamIndex:(int32_t)streamIndex;
- (bool)codecParamsAtStreamIndex:(int32_t)streamIndex toContext:(FFMpegAVCodecContext *)context;
- (FFMpegFpsAndTimebase)fpsAndTimebaseForStreamIndex:(int32_t)streamIndex defaultTimeBase:(CMTime)defaultTimeBase;

View File

@ -103,6 +103,10 @@ int FFMpegCodecIdVP9 = AV_CODEC_ID_VP9;
return (double)_impl->duration / AV_TIME_BASE;
}
- (int64_t)startTimeAtStreamIndex:(int32_t)streamIndex {
return _impl->streams[streamIndex]->start_time;
}
- (int64_t)durationAtStreamIndex:(int32_t)streamIndex {
return _impl->streams[streamIndex]->duration;
}

View File

@ -38,15 +38,17 @@ private final class SoftwareVideoStream {
let index: Int
let fps: CMTime
let timebase: CMTime
let startTime: CMTime
let duration: CMTime
let decoder: FFMpegMediaVideoFrameDecoder
let rotationAngle: Double
let aspect: Double
init(index: Int, fps: CMTime, timebase: CMTime, duration: CMTime, decoder: FFMpegMediaVideoFrameDecoder, rotationAngle: Double, aspect: Double) {
init(index: Int, fps: CMTime, timebase: CMTime, startTime: CMTime, duration: CMTime, decoder: FFMpegMediaVideoFrameDecoder, rotationAngle: Double, aspect: Double) {
self.index = index
self.fps = fps
self.timebase = timebase
self.startTime = startTime
self.duration = duration
self.decoder = decoder
self.rotationAngle = rotationAngle
@ -126,6 +128,13 @@ public final class SoftwareVideoSource {
let fpsAndTimebase = avFormatContext.fpsAndTimebase(forStreamIndex: streamIndex, defaultTimeBase: CMTimeMake(value: 1, timescale: 40000))
let (fps, timebase) = (fpsAndTimebase.fps, fpsAndTimebase.timebase)
let startTime: CMTime
let rawStartTime = avFormatContext.startTime(atStreamIndex: streamIndex)
if rawStartTime == Int64(bitPattern: 0x8000000000000000 as UInt64) {
startTime = CMTime(value: 0, timescale: timebase.timescale)
} else {
startTime = CMTimeMake(value: rawStartTime, timescale: timebase.timescale)
}
let duration = CMTimeMake(value: avFormatContext.duration(atStreamIndex: streamIndex), timescale: timebase.timescale)
let metrics = avFormatContext.metricsForStream(at: streamIndex)
@ -137,7 +146,7 @@ public final class SoftwareVideoSource {
let codecContext = FFMpegAVCodecContext(codec: codec)
if avFormatContext.codecParams(atStreamIndex: streamIndex, to: codecContext) {
if codecContext.open() {
videoStream = SoftwareVideoStream(index: Int(streamIndex), fps: fps, timebase: timebase, duration: duration, decoder: FFMpegMediaVideoFrameDecoder(codecContext: codecContext), rotationAngle: rotationAngle, aspect: aspect)
videoStream = SoftwareVideoStream(index: Int(streamIndex), fps: fps, timebase: timebase, startTime: startTime, duration: duration, decoder: FFMpegMediaVideoFrameDecoder(codecContext: codecContext), rotationAngle: rotationAngle, aspect: aspect)
break
}
}
@ -222,6 +231,13 @@ public final class SoftwareVideoSource {
}
}
public func readTrackInfo() -> (offset: CMTime, duration: CMTime)? {
guard let videoStream = self.videoStream else {
return nil
}
return (videoStream.startTime, CMTimeMaximum(CMTime(value: 0, timescale: videoStream.duration.timescale), CMTimeSubtract(videoStream.duration, videoStream.startTime)))
}
public func readFrame(maxPts: CMTime?) -> (MediaTrackFrame?, CGFloat, CGFloat, Bool) {
guard let videoStream = self.videoStream, let avFormatContext = self.avFormatContext else {
return (nil, 0.0, 1.0, false)

View File

@ -7098,10 +7098,7 @@ final class VoiceChatContextReferenceContentSource: ContextReferenceContentSourc
}
public func shouldUseV2VideoChatImpl(context: AccountContext) -> Bool {
var useV2 = false
if let data = context.currentAppConfiguration.with({ $0 }).data, let _ = data["ios_killswitch_enable_videochatui_v2"] {
useV2 = true
}
var useV2 = true
if context.sharedContext.immediateExperimentalUISettings.disableCallV2 {
useV2 = false
}

View File

@ -1,4 +1,44 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
load(
"@build_bazel_rules_apple//apple:resources.bzl",
"apple_resource_bundle",
"apple_resource_group",
)
load("//build-system/bazel-utils:plist_fragment.bzl",
"plist_fragment",
)
filegroup(
name = "HlsBundleContents",
srcs = glob([
"HlsBundle/**",
]),
visibility = ["//visibility:public"],
)
plist_fragment(
name = "HlsBundleInfoPlist",
extension = "plist",
template =
"""
<key>CFBundleIdentifier</key>
<string>org.telegram.TelegramUniversalVideoContent</string>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleName</key>
<string>TelegramUniversalVideoContent</string>
"""
)
apple_resource_bundle(
name = "HlsBundle",
infoplists = [
":HlsBundleInfoPlist",
],
resources = [
":HlsBundleContents",
],
)
swift_library(
name = "TelegramUniversalVideoContent",
@ -9,6 +49,9 @@ swift_library(
copts = [
"-warnings-as-errors",
],
data = [
":HlsBundle",
],
deps = [
"//submodules/AsyncDisplayKit:AsyncDisplayKit",
"//submodules/Display:Display",

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,9 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Developement</title>
<meta name="viewport" content="width=device-width, initial-scale=1"></head>
<body>
<script src="runtime.bundle.js"></script><script src="index.bundle.js"></script><script src="print.bundle.js"></script></body>
</html>

View File

@ -0,0 +1,27 @@
"use strict";
(self["webpackChunkmy3d"] = self["webpackChunkmy3d"] || []).push([["print"],{
/***/ "./src/print.js":
/*!**********************!*\
!*** ./src/print.js ***!
\**********************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "default": () => (/* binding */ printMe)
/* harmony export */ });
function printMe() {
console.log('I get called from print.js1234!');
}
/***/ })
},
/******/ __webpack_require__ => { // webpackRuntimeModules
/******/ var __webpack_exec__ = (moduleId) => (__webpack_require__(__webpack_require__.s = moduleId))
/******/ var __webpack_exports__ = (__webpack_exec__("./src/print.js"));
/******/ }
]);
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoicHJpbnQuYnVuZGxlLmpzIiwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7QUFBZTtBQUNmO0FBQ0EiLCJzb3VyY2VzIjpbIndlYnBhY2s6Ly9teTNkLy4vc3JjL3ByaW50LmpzIl0sInNvdXJjZXNDb250ZW50IjpbImV4cG9ydCBkZWZhdWx0IGZ1bmN0aW9uIHByaW50TWUoKSB7XG4gIGNvbnNvbGUubG9nKCdJIGdldCBjYWxsZWQgZnJvbSBwcmludC5qczEyMzQhJyk7XG59XG4iXSwibmFtZXMiOltdLCJzb3VyY2VSb290IjoiIn0=

File diff suppressed because one or more lines are too long

View File

@ -215,7 +215,11 @@ public final class HLSVideoContent: UniversalVideoContent {
public func makeContentNode(accountId: AccountRecordId, postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode {
if #available(iOS 17.1, *) {
#if DEBUG
return HLSVideoJSNativeContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
#else
return HLSVideoJSContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
#endif
} else {
return HLSVideoAVContentNode(accountId: accountId, postbox: postbox, audioSessionManager: audioSession, userLocation: self.userLocation, fileReference: self.fileReference, streamVideo: self.streamVideo, loopVideo: self.loopVideo, enableSound: self.enableSound, baseRate: self.baseRate, fetchAutomatically: self.fetchAutomatically)
}