mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
commit
9fcc3248fa
@ -1 +1,2 @@
|
||||
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)
|
||||
spm-files
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -73,3 +73,4 @@ buildServer.json
|
||||
.build/**
|
||||
Telegram.LSP.json
|
||||
**/.build/**
|
||||
spm-files
|
||||
|
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@ -11,5 +11,8 @@
|
||||
},
|
||||
"search.exclude": {
|
||||
".git/**": true
|
||||
},
|
||||
"files.associations": {
|
||||
"memory": "cpp"
|
||||
}
|
||||
}
|
||||
|
@ -55,6 +55,10 @@ load("@build_bazel_rules_apple//apple:resources.bzl",
|
||||
"swift_intent_library",
|
||||
)
|
||||
|
||||
load("//build-system/bazel-utils:spm.bzl",
|
||||
"generate_spm",
|
||||
)
|
||||
|
||||
config_setting(
|
||||
name = "debug",
|
||||
values = {
|
||||
@ -952,29 +956,6 @@ plist_fragment(
|
||||
)
|
||||
)
|
||||
|
||||
ios_framework(
|
||||
name = "TelegramApiFramework",
|
||||
bundle_id = "{telegram_bundle_id}.TelegramApi".format(
|
||||
telegram_bundle_id = telegram_bundle_id,
|
||||
),
|
||||
families = [
|
||||
"iphone",
|
||||
"ipad",
|
||||
],
|
||||
infoplists = [
|
||||
":TelegramApiInfoPlist",
|
||||
":BuildNumberInfoPlist",
|
||||
":VersionInfoPlist",
|
||||
":RequiredDeviceCapabilitiesPlist",
|
||||
],
|
||||
minimum_os_version = minimum_os_version,
|
||||
extension_safe = True,
|
||||
ipa_post_processor = strip_framework,
|
||||
deps = [
|
||||
"//submodules/TelegramApi:TelegramApi",
|
||||
],
|
||||
)
|
||||
|
||||
plist_fragment(
|
||||
name = "TelegramCoreInfoPlist",
|
||||
extension = "plist",
|
||||
@ -2022,7 +2003,45 @@ xcodeproj(
|
||||
default_xcode_configuration = "Debug"
|
||||
)
|
||||
|
||||
# Temporary targets used to simplify webrtc build tests
|
||||
# Temporary targets used to simplify build tests
|
||||
|
||||
ios_application(
|
||||
name = "spm_build_app",
|
||||
bundle_id = "{telegram_bundle_id}".format(
|
||||
telegram_bundle_id = telegram_bundle_id,
|
||||
),
|
||||
families = ["iphone", "ipad"],
|
||||
minimum_os_version = minimum_os_version,
|
||||
provisioning_profile = select({
|
||||
":disableProvisioningProfilesSetting": None,
|
||||
"//conditions:default": "@build_configuration//provisioning:Telegram.mobileprovision",
|
||||
}),
|
||||
entitlements = ":TelegramEntitlements.entitlements",
|
||||
infoplists = [
|
||||
":TelegramInfoPlist",
|
||||
":BuildNumberInfoPlist",
|
||||
":VersionInfoPlist",
|
||||
":RequiredDeviceCapabilitiesPlist",
|
||||
":UrlTypesInfoPlist",
|
||||
],
|
||||
deps = [
|
||||
#"//submodules/MtProtoKit",
|
||||
#"//submodules/SSignalKit/SwiftSignalKit",
|
||||
#"//submodules/Postbox",
|
||||
#"//submodules/TelegramApi",
|
||||
#"//submodules/TelegramCore",
|
||||
#"//submodules/FFMpegBinding",
|
||||
"//submodules/Display",
|
||||
#"//third-party/webrtc",
|
||||
],
|
||||
)
|
||||
|
||||
generate_spm(
|
||||
name = "spm_build_root",
|
||||
deps = [
|
||||
":spm_build_app",
|
||||
]
|
||||
)
|
||||
|
||||
ios_application(
|
||||
name = "webrtc_build_test",
|
||||
@ -2044,7 +2063,7 @@ ios_application(
|
||||
":UrlTypesInfoPlist",
|
||||
],
|
||||
deps = [
|
||||
"//third-party/webrtc:webrtc_lib",
|
||||
"//third-party/webrtc:webrtc",
|
||||
],
|
||||
)
|
||||
|
||||
|
@ -336,7 +336,7 @@ private final class EmbeddedBroadcastUploadImpl: BroadcastUploadImpl {
|
||||
let logsPath = rootPath + "/logs/broadcast-logs"
|
||||
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
|
||||
|
||||
let embeddedBroadcastImplementationTypePath = rootPath + "/broadcast-coordination-type"
|
||||
let embeddedBroadcastImplementationTypePath = rootPath + "/broadcast-coordination-type-v2"
|
||||
|
||||
var useIPCContext = false
|
||||
if let typeData = try? Data(contentsOf: URL(fileURLWithPath: embeddedBroadcastImplementationTypePath)), let type = String(data: typeData, encoding: .utf8) {
|
||||
|
@ -35,7 +35,8 @@ public final class ViewController: UIViewController {
|
||||
isRemoteAudioMuted: false,
|
||||
localVideo: nil,
|
||||
remoteVideo: nil,
|
||||
isRemoteBatteryLow: false
|
||||
isRemoteBatteryLow: false,
|
||||
enableVideoSharpening: false
|
||||
)
|
||||
|
||||
private var currentLayout: (size: CGSize, insets: UIEdgeInsets)?
|
||||
|
@ -393,6 +393,58 @@ class BazelCommandLine:
|
||||
print(subprocess.list2cmdline(combined_arguments))
|
||||
call_executable(combined_arguments)
|
||||
|
||||
def get_spm_aspect_invocation(self):
|
||||
combined_arguments = [
|
||||
self.build_environment.bazel_path
|
||||
]
|
||||
combined_arguments += self.get_startup_bazel_arguments()
|
||||
combined_arguments += ['build']
|
||||
|
||||
if self.custom_target is not None:
|
||||
combined_arguments += [self.custom_target]
|
||||
else:
|
||||
combined_arguments += ['Telegram/Telegram']
|
||||
|
||||
if self.continue_on_error:
|
||||
combined_arguments += ['--keep_going']
|
||||
if self.show_actions:
|
||||
combined_arguments += ['--subcommands']
|
||||
|
||||
if self.enable_sandbox:
|
||||
combined_arguments += ['--spawn_strategy=sandboxed']
|
||||
|
||||
if self.disable_provisioning_profiles:
|
||||
combined_arguments += ['--//Telegram:disableProvisioningProfiles']
|
||||
|
||||
if self.configuration_path is None:
|
||||
raise Exception('configuration_path is not defined')
|
||||
|
||||
combined_arguments += [
|
||||
'--override_repository=build_configuration={}'.format(self.configuration_path)
|
||||
]
|
||||
|
||||
combined_arguments += self.common_args
|
||||
combined_arguments += self.common_build_args
|
||||
combined_arguments += self.get_define_arguments()
|
||||
combined_arguments += self.get_additional_build_arguments()
|
||||
|
||||
if self.remote_cache is not None:
|
||||
combined_arguments += [
|
||||
'--remote_cache={}'.format(self.remote_cache),
|
||||
'--experimental_remote_downloader={}'.format(self.remote_cache)
|
||||
]
|
||||
elif self.cache_dir is not None:
|
||||
combined_arguments += [
|
||||
'--disk_cache={path}'.format(path=self.cache_dir)
|
||||
]
|
||||
|
||||
combined_arguments += self.configuration_args
|
||||
|
||||
combined_arguments += ['--aspects', '//build-system/bazel-utils:spm.bzl%spm_text_aspect']
|
||||
|
||||
print(subprocess.list2cmdline(combined_arguments))
|
||||
call_executable(combined_arguments)
|
||||
|
||||
|
||||
def clean(bazel, arguments):
|
||||
bazel_command_line = BazelCommandLine(
|
||||
@ -696,6 +748,36 @@ def query(bazel, arguments):
|
||||
bazel_command_line.invoke_query(query_args)
|
||||
|
||||
|
||||
def get_spm_aspect_invocation(bazel, arguments):
|
||||
bazel_command_line = BazelCommandLine(
|
||||
bazel=bazel,
|
||||
override_bazel_version=arguments.overrideBazelVersion,
|
||||
override_xcode_version=arguments.overrideXcodeVersion,
|
||||
bazel_user_root=arguments.bazelUserRoot
|
||||
)
|
||||
|
||||
if arguments.cacheDir is not None:
|
||||
bazel_command_line.add_cache_dir(arguments.cacheDir)
|
||||
elif arguments.cacheHost is not None:
|
||||
bazel_command_line.add_remote_cache(arguments.cacheHost)
|
||||
|
||||
resolve_configuration(
|
||||
base_path=os.getcwd(),
|
||||
bazel_command_line=bazel_command_line,
|
||||
arguments=arguments,
|
||||
additional_codesigning_output_path=None
|
||||
)
|
||||
|
||||
bazel_command_line.set_configuration(arguments.configuration)
|
||||
bazel_command_line.set_build_number(arguments.buildNumber)
|
||||
bazel_command_line.set_custom_target(arguments.target)
|
||||
bazel_command_line.set_continue_on_error(False)
|
||||
bazel_command_line.set_show_actions(False)
|
||||
bazel_command_line.set_enable_sandbox(False)
|
||||
bazel_command_line.set_split_swiftmodules(False)
|
||||
|
||||
bazel_command_line.get_spm_aspect_invocation()
|
||||
|
||||
def add_codesigning_common_arguments(current_parser: argparse.ArgumentParser):
|
||||
configuration_group = current_parser.add_mutually_exclusive_group(required=True)
|
||||
configuration_group.add_argument(
|
||||
@ -1121,6 +1203,38 @@ if __name__ == '__main__':
|
||||
metavar='query_string'
|
||||
)
|
||||
|
||||
spm_parser = subparsers.add_parser('spm', help='Generate SPM package')
|
||||
spm_parser.add_argument(
|
||||
'--target',
|
||||
type=str,
|
||||
help='A custom bazel target name to build.',
|
||||
metavar='target_name'
|
||||
)
|
||||
spm_parser.add_argument(
|
||||
'--buildNumber',
|
||||
required=False,
|
||||
type=int,
|
||||
default=10000,
|
||||
help='Build number.',
|
||||
metavar='number'
|
||||
)
|
||||
spm_parser.add_argument(
|
||||
'--configuration',
|
||||
choices=[
|
||||
'debug_universal',
|
||||
'debug_arm64',
|
||||
'debug_armv7',
|
||||
'debug_sim_arm64',
|
||||
'release_sim_arm64',
|
||||
'release_arm64',
|
||||
'release_armv7',
|
||||
'release_universal'
|
||||
],
|
||||
required=True,
|
||||
help='Build configuration'
|
||||
)
|
||||
add_codesigning_common_arguments(spm_parser)
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
@ -1229,6 +1343,8 @@ if __name__ == '__main__':
|
||||
test(bazel=bazel_path, arguments=args)
|
||||
elif args.commandName == 'query':
|
||||
query(bazel=bazel_path, arguments=args)
|
||||
elif args.commandName == 'spm':
|
||||
get_spm_aspect_invocation(bazel=bazel_path, arguments=args)
|
||||
else:
|
||||
raise Exception('Unknown command')
|
||||
except KeyboardInterrupt:
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 44b6f046d95b84933c1149fbf7f9d81fd4e32020
|
||||
Subproject commit 41929acc4c7c1da973c77871d0375207b9d0806f
|
447
build-system/bazel-utils/spm.bzl
Normal file
447
build-system/bazel-utils/spm.bzl
Normal file
@ -0,0 +1,447 @@
|
||||
load("@build_bazel_rules_swift//swift:swift.bzl", "SwiftInfo")
|
||||
load("@bazel_skylib//lib:paths.bzl", "paths")
|
||||
load("@bazel_skylib//lib:dicts.bzl", "dicts")
|
||||
|
||||
# Define provider to propagate data
|
||||
SPMModulesInfo = provider(
|
||||
fields = {
|
||||
"modules": "Dictionary of module information",
|
||||
"transitive_sources": "Depset of all transitive source files",
|
||||
}
|
||||
)
|
||||
|
||||
_IGNORE_CC_LIBRARY_ATTRS = [
|
||||
"data",
|
||||
"applicable_licenses",
|
||||
"alwayslink",
|
||||
"aspect_hints",
|
||||
"compatible_with",
|
||||
"deprecation",
|
||||
"exec_compatible_with",
|
||||
"exec_properties",
|
||||
"expect_failure",
|
||||
"features",
|
||||
"generator_function",
|
||||
"generator_location",
|
||||
"generator_name",
|
||||
"generator_platform",
|
||||
"generator_script",
|
||||
"generator_tool",
|
||||
"generator_toolchain",
|
||||
"generator_toolchain_type",
|
||||
"licenses",
|
||||
"linkstamp",
|
||||
"linkstatic",
|
||||
"name",
|
||||
"restricted_to",
|
||||
"tags",
|
||||
"target_compatible_with",
|
||||
"testonly",
|
||||
"to_json",
|
||||
"to_proto",
|
||||
"toolchains",
|
||||
"transitive_configs",
|
||||
"visibility",
|
||||
"win_def_file",
|
||||
"linkopts",
|
||||
]
|
||||
|
||||
_IGNORE_CC_LIBRARY_EMPTY_ATTRS = [
|
||||
"additional_compiler_inputs",
|
||||
"additional_linker_inputs",
|
||||
"hdrs_check",
|
||||
"implementation_deps",
|
||||
"include_prefix",
|
||||
"strip_include_prefix",
|
||||
"local_defines",
|
||||
]
|
||||
|
||||
_CC_LIBRARY_ATTRS = {
|
||||
"copts": [],
|
||||
"defines": [],
|
||||
"deps": [],
|
||||
"hdrs": [],
|
||||
"includes": [],
|
||||
"srcs": [],
|
||||
"textual_hdrs": [],
|
||||
}
|
||||
|
||||
_CC_LIBRARY_REQUIRED_ATTRS = {
|
||||
}
|
||||
|
||||
_IGNORE_OBJC_LIBRARY_ATTRS = [
|
||||
"data",
|
||||
"alwayslink",
|
||||
"applicable_licenses",
|
||||
"aspect_hints",
|
||||
"compatible_with",
|
||||
"enable_modules",
|
||||
"exec_compatible_with",
|
||||
"exec_properties",
|
||||
"expect_failure",
|
||||
"features",
|
||||
"generator_function",
|
||||
"generator_location",
|
||||
"generator_name",
|
||||
"deprecation",
|
||||
"module_name",
|
||||
"name",
|
||||
"stamp",
|
||||
"tags",
|
||||
"target_compatible_with",
|
||||
"testonly",
|
||||
"to_json",
|
||||
"to_proto",
|
||||
"toolchains",
|
||||
"transitive_configs",
|
||||
"visibility",
|
||||
]
|
||||
|
||||
_IGNORE_OBJC_LIBRARY_EMPTY_ATTRS = [
|
||||
"implementation_deps",
|
||||
"linkopts",
|
||||
"module_map",
|
||||
"non_arc_srcs",
|
||||
"pch",
|
||||
"restricted_to",
|
||||
"textual_hdrs",
|
||||
"sdk_includes",
|
||||
]
|
||||
|
||||
_OBJC_LIBRARY_ATTRS = {
|
||||
"copts": [],
|
||||
"defines": [],
|
||||
"deps": [],
|
||||
"hdrs": [],
|
||||
"srcs": [],
|
||||
"sdk_dylibs": [],
|
||||
"sdk_frameworks": [],
|
||||
"weak_sdk_frameworks": [],
|
||||
"includes": [],
|
||||
}
|
||||
|
||||
_OBJC_LIBRARY_REQUIRED_ATTRS = [
|
||||
"module_name",
|
||||
]
|
||||
|
||||
_IGNORE_SWIFT_LIBRARY_ATTRS = [
|
||||
"data",
|
||||
"always_include_developer_search_paths",
|
||||
"alwayslink",
|
||||
"applicable_licenses",
|
||||
"aspect_hints",
|
||||
"compatible_with",
|
||||
"deprecation",
|
||||
"exec_compatible_with",
|
||||
"exec_properties",
|
||||
"expect_failure",
|
||||
"features",
|
||||
"generated_header_name",
|
||||
"generates_header",
|
||||
"generator_function",
|
||||
"generator_location",
|
||||
"generator_name",
|
||||
"linkstatic",
|
||||
"module_name",
|
||||
"name",
|
||||
"package_name",
|
||||
"restricted_to",
|
||||
"tags",
|
||||
"target_compatible_with",
|
||||
"testonly",
|
||||
"to_json",
|
||||
"to_proto",
|
||||
"toolchains",
|
||||
"transitive_configs",
|
||||
"visibility",
|
||||
]
|
||||
|
||||
_IGNORE_SWIFT_LIBRARY_EMPTY_ATTRS = [
|
||||
"plugins",
|
||||
"private_deps",
|
||||
"swiftc_inputs",
|
||||
]
|
||||
|
||||
_SWIFT_LIBRARY_ATTRS = {
|
||||
"copts": [],
|
||||
"defines": [],
|
||||
"deps": [],
|
||||
"linkopts": [],
|
||||
"srcs": [],
|
||||
}
|
||||
|
||||
_SWIFT_LIBRARY_REQUIRED_ATTRS = [
|
||||
"module_name",
|
||||
]
|
||||
|
||||
_LIBRARY_CONFIGS = {
|
||||
"cc_library": {
|
||||
"ignore_attrs": _IGNORE_CC_LIBRARY_ATTRS,
|
||||
"ignore_empty_attrs": _IGNORE_CC_LIBRARY_EMPTY_ATTRS,
|
||||
"handled_attrs": _CC_LIBRARY_ATTRS,
|
||||
"required_attrs": _CC_LIBRARY_REQUIRED_ATTRS,
|
||||
},
|
||||
"objc_library": {
|
||||
"ignore_attrs": _IGNORE_OBJC_LIBRARY_ATTRS,
|
||||
"ignore_empty_attrs": _IGNORE_OBJC_LIBRARY_EMPTY_ATTRS,
|
||||
"handled_attrs": _OBJC_LIBRARY_ATTRS,
|
||||
"required_attrs": _OBJC_LIBRARY_REQUIRED_ATTRS,
|
||||
},
|
||||
"swift_library": {
|
||||
"ignore_attrs": _IGNORE_SWIFT_LIBRARY_ATTRS,
|
||||
"ignore_empty_attrs": _IGNORE_SWIFT_LIBRARY_EMPTY_ATTRS,
|
||||
"handled_attrs": _SWIFT_LIBRARY_ATTRS,
|
||||
"required_attrs": _SWIFT_LIBRARY_REQUIRED_ATTRS,
|
||||
},
|
||||
}
|
||||
|
||||
def get_rule_atts(rule):
|
||||
if rule.kind in _LIBRARY_CONFIGS:
|
||||
config = _LIBRARY_CONFIGS[rule.kind]
|
||||
ignore_attrs = config["ignore_attrs"]
|
||||
ignore_empty_attrs = config["ignore_empty_attrs"]
|
||||
handled_attrs = config["handled_attrs"]
|
||||
required_attrs = config["required_attrs"]
|
||||
|
||||
for attr_name in dir(rule.attr):
|
||||
if attr_name.startswith("_"):
|
||||
continue
|
||||
if attr_name in ignore_attrs:
|
||||
continue
|
||||
if attr_name in ignore_empty_attrs:
|
||||
attr_value = getattr(rule.attr, attr_name)
|
||||
if attr_value == [] or attr_value == None or attr_value == "":
|
||||
continue
|
||||
else:
|
||||
fail("Attribute {} is not empty: {}".format(attr_name, attr_value))
|
||||
if attr_name in handled_attrs:
|
||||
continue
|
||||
fail("Unknown attribute: {}".format(attr_name))
|
||||
|
||||
result = dict()
|
||||
result["type"] = rule.kind
|
||||
for attr_name in handled_attrs:
|
||||
if hasattr(rule.attr, attr_name):
|
||||
result[attr_name] = getattr(rule.attr, attr_name)
|
||||
else:
|
||||
result[attr_name] = handled_attrs[attr_name] # Use default value
|
||||
for attr_name in required_attrs:
|
||||
if not hasattr(rule.attr, attr_name):
|
||||
if rule.kind == "objc_library" and attr_name == "module_name":
|
||||
result[attr_name] = getattr(rule.attr, "name")
|
||||
else:
|
||||
fail("Required attribute {} is missing".format(attr_name))
|
||||
else:
|
||||
result[attr_name] = getattr(rule.attr, attr_name)
|
||||
result["name"] = getattr(rule.attr, "name")
|
||||
return result
|
||||
elif rule.kind == "ios_application":
|
||||
result = dict()
|
||||
result["type"] = "ios_application"
|
||||
return result
|
||||
elif rule.kind == "generate_spm":
|
||||
result = dict()
|
||||
result["type"] = "root"
|
||||
return result
|
||||
elif rule.kind == "apple_static_xcframework_import":
|
||||
result = dict()
|
||||
result["type"] = "apple_static_xcframework_import"
|
||||
return result
|
||||
else:
|
||||
fail("Unknown rule kind: {}".format(rule.kind))
|
||||
|
||||
def _collect_spm_modules_impl(target, ctx):
|
||||
# Skip targets without DefaultInfo
|
||||
if not DefaultInfo in target:
|
||||
return []
|
||||
|
||||
# Get module name
|
||||
module_name = ctx.label.name
|
||||
if hasattr(ctx.rule.attr, "module_name"):
|
||||
module_name = ctx.rule.attr.module_name or ctx.label.name
|
||||
|
||||
# Collect all modules and transitive sources from dependencies first
|
||||
all_modules = {}
|
||||
dep_transitive_sources_list = []
|
||||
|
||||
if hasattr(ctx.rule.attr, "deps"):
|
||||
for dep in ctx.rule.attr.deps:
|
||||
if SPMModulesInfo in dep:
|
||||
# Merge the modules dictionaries
|
||||
for label, info in dep[SPMModulesInfo].modules.items():
|
||||
all_modules[label] = info
|
||||
# Add transitive sources depset from dependency to the list
|
||||
dep_transitive_sources_list.append(dep[SPMModulesInfo].transitive_sources)
|
||||
|
||||
# Merge all transitive sources from dependencies
|
||||
transitive_sources_from_deps = depset(transitive = dep_transitive_sources_list)
|
||||
|
||||
# Keep this for debugging later
|
||||
# if result_attrs["type"] == "swift_library":
|
||||
# print("Processing rule {}".format(ctx.label.name))
|
||||
# print("ctx.rule.kind = {}".format(ctx.rule.kind))
|
||||
# for attr_name in dir(ctx.rule.attr):
|
||||
# print(" attr1: {}".format(attr_name))
|
||||
|
||||
result_attrs = get_rule_atts(ctx.rule)
|
||||
|
||||
sources = []
|
||||
current_target_src_files = []
|
||||
if "srcs" in result_attrs:
|
||||
for src_target in result_attrs["srcs"]:
|
||||
src_files = src_target.files.to_list()
|
||||
for f in src_files:
|
||||
if f.extension in ["swift", "cc", "cpp", "h", "m", "mm", "s", "S"]:
|
||||
current_target_src_files.append(f)
|
||||
for src_file in src_files:
|
||||
sources.append(src_file.path)
|
||||
current_target_sources = depset(current_target_src_files)
|
||||
|
||||
headers = []
|
||||
current_target_hdr_files = []
|
||||
if "hdrs" in result_attrs:
|
||||
for hdr_target in result_attrs["hdrs"]:
|
||||
hdr_files = hdr_target.files.to_list()
|
||||
for f in hdr_files:
|
||||
current_target_hdr_files.append(f)
|
||||
for hdr_file in hdr_files:
|
||||
headers.append(hdr_file.path)
|
||||
current_target_headers = depset(current_target_hdr_files)
|
||||
|
||||
module_type = result_attrs["type"]
|
||||
|
||||
if module_type == "root":
|
||||
pass
|
||||
elif module_type == "apple_static_xcframework_import":
|
||||
pass
|
||||
elif module_type == "objc_library" or module_type == "swift_library" or module_type == "cc_library":
|
||||
# Collect dependency labels
|
||||
dep_names = []
|
||||
if "deps" in result_attrs:
|
||||
for dep in result_attrs["deps"]:
|
||||
if hasattr(dep, "label"):
|
||||
dep_label = str(dep.label)
|
||||
dep_name = dep_label.split(":")[-1]
|
||||
dep_names.append(dep_name)
|
||||
else:
|
||||
fail("Missing dependency label")
|
||||
|
||||
if module_type == "objc_library" or module_type == "swift_library":
|
||||
if result_attrs["module_name"] != result_attrs["name"]:
|
||||
fail("Module name mismatch: {} != {}".format(result_attrs["module_name"], result_attrs["name"]))
|
||||
|
||||
# Extract the path from the label
|
||||
# Example: @//path/ModuleName:ModuleSubname -> path/ModuleName
|
||||
if not str(ctx.label).startswith("@//"):
|
||||
fail("Invalid label: {}".format(ctx.label))
|
||||
module_path = str(ctx.label).split(":")[0].split("@//")[1]
|
||||
|
||||
if module_type == "objc_library":
|
||||
module_info = {
|
||||
"name": result_attrs["name"],
|
||||
"type": module_type,
|
||||
"path": module_path,
|
||||
"defines": result_attrs["defines"],
|
||||
"deps": dep_names,
|
||||
"sources": sorted(sources + headers),
|
||||
"module_name": module_name,
|
||||
"copts": result_attrs["copts"],
|
||||
"sdk_frameworks": result_attrs["sdk_frameworks"],
|
||||
"sdk_dylibs": result_attrs["sdk_dylibs"],
|
||||
"weak_sdk_frameworks": result_attrs["weak_sdk_frameworks"],
|
||||
"includes": result_attrs["includes"],
|
||||
}
|
||||
elif module_type == "cc_library":
|
||||
module_info = {
|
||||
"name": result_attrs["name"],
|
||||
"type": module_type,
|
||||
"path": module_path,
|
||||
"defines": result_attrs["defines"],
|
||||
"deps": dep_names,
|
||||
"sources": sorted(sources + headers),
|
||||
"module_name": module_name,
|
||||
"copts": result_attrs["copts"],
|
||||
"includes": result_attrs["includes"],
|
||||
}
|
||||
elif module_type == "swift_library":
|
||||
module_info = {
|
||||
"name": result_attrs["name"],
|
||||
"type": module_type,
|
||||
"path": module_path,
|
||||
"deps": dep_names,
|
||||
"sources": sorted(sources),
|
||||
"module_name": module_name,
|
||||
"copts": result_attrs["copts"],
|
||||
}
|
||||
else:
|
||||
fail("Unknown module type: {}".format(module_type))
|
||||
|
||||
if result_attrs["name"] in all_modules:
|
||||
fail("Duplicate module name: {}".format(result_attrs["name"]))
|
||||
all_modules[result_attrs["name"]] = module_info
|
||||
elif result_attrs["type"] == "ios_application":
|
||||
pass
|
||||
else:
|
||||
fail("Unknown rule type: {}".format(ctx.rule.kind))
|
||||
|
||||
# Add current target's sources and headers to the transitive set
|
||||
final_transitive_sources = depset(transitive = [
|
||||
transitive_sources_from_deps,
|
||||
current_target_sources,
|
||||
current_target_headers,
|
||||
])
|
||||
|
||||
# Return both the SPM output files and the provider with modules data and sources
|
||||
return [
|
||||
SPMModulesInfo(
|
||||
modules = all_modules,
|
||||
transitive_sources = final_transitive_sources,
|
||||
),
|
||||
]
|
||||
|
||||
spm_modules_aspect = aspect(
|
||||
implementation = _collect_spm_modules_impl,
|
||||
attr_aspects = ["deps"],
|
||||
)
|
||||
|
||||
def _generate_spm_impl(ctx):
|
||||
outputs = []
|
||||
dep_transitive_sources_list = []
|
||||
|
||||
if len(ctx.attr.deps) != 1:
|
||||
fail("generate_spm must have exactly one dependency")
|
||||
if SPMModulesInfo not in ctx.attr.deps[0]:
|
||||
fail("generate_spm must have a dependency with SPMModulesInfo provider")
|
||||
|
||||
spm_info = ctx.attr.deps[0][SPMModulesInfo]
|
||||
modules = spm_info.modules
|
||||
|
||||
# Declare and write the modules JSON file
|
||||
modules_json_out = ctx.actions.declare_file("%s_modules.json" % ctx.label.name)
|
||||
ctx.actions.write(
|
||||
output = modules_json_out,
|
||||
content = json.encode_indent(modules, indent = " "), # Use encode_indent for readability
|
||||
)
|
||||
outputs.append(modules_json_out)
|
||||
|
||||
for dep in ctx.attr.deps:
|
||||
if SPMModulesInfo in dep:
|
||||
# Add transitive sources depset from dependency
|
||||
dep_transitive_sources_list.append(dep[SPMModulesInfo].transitive_sources)
|
||||
|
||||
# Merge all transitive sources from dependencies
|
||||
transitive_sources_from_deps = depset(transitive = dep_transitive_sources_list)
|
||||
|
||||
# Return DefaultInfo containing only the output files in the 'files' field,
|
||||
# but include the transitive sources in 'runfiles' to enforce the dependency.
|
||||
return [DefaultInfo(
|
||||
files = depset(outputs),
|
||||
runfiles = ctx.runfiles(transitive_files = transitive_sources_from_deps),
|
||||
)]
|
||||
|
||||
generate_spm = rule(
|
||||
implementation = _generate_spm_impl,
|
||||
attrs = {
|
||||
'deps' : attr.label_list(aspects = [spm_modules_aspect]),
|
||||
},
|
||||
)
|
193
build-system/generate_spm.py
Normal file
193
build-system/generate_spm.py
Normal file
@ -0,0 +1,193 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import shutil
|
||||
|
||||
# Read the modules JSON file
|
||||
modules_json_path = "bazel-bin/Telegram/spm_build_root_modules.json"
|
||||
|
||||
with open(modules_json_path, 'r') as f:
|
||||
modules = json.load(f)
|
||||
|
||||
# Clean spm-files
|
||||
spm_files_dir = "spm-files"
|
||||
if os.path.exists(spm_files_dir):
|
||||
shutil.rmtree(spm_files_dir)
|
||||
if not os.path.exists(spm_files_dir):
|
||||
os.makedirs(spm_files_dir)
|
||||
|
||||
combined_lines = []
|
||||
combined_lines.append("// swift-tools-version: 6.0")
|
||||
combined_lines.append("// The swift-tools-version declares the minimum version of Swift required to build this package.")
|
||||
combined_lines.append("")
|
||||
combined_lines.append("import PackageDescription")
|
||||
combined_lines.append("")
|
||||
combined_lines.append("let package = Package(")
|
||||
combined_lines.append(" name: \"Telegram\",")
|
||||
combined_lines.append(" platforms: [")
|
||||
combined_lines.append(" .iOS(.v13)")
|
||||
combined_lines.append(" ],")
|
||||
combined_lines.append(" products: [")
|
||||
|
||||
for name, module in sorted(modules.items()):
|
||||
if module["type"] == "objc_library" or module["type"] == "swift_library" or module["type"] == "cc_library":
|
||||
combined_lines.append(" .library(name: \"%s\", targets: [\"%s\"])," % (module["name"], module["name"]))
|
||||
|
||||
combined_lines.append(" ],")
|
||||
combined_lines.append(" targets: [")
|
||||
|
||||
for name, module in sorted(modules.items()):
|
||||
module_type = module["type"]
|
||||
if module_type == "objc_library" or module_type == "cc_library" or module_type == "swift_library":
|
||||
combined_lines.append(" .target(")
|
||||
combined_lines.append(" name: \"%s\"," % name)
|
||||
|
||||
linked_directory = None
|
||||
has_non_linked_sources = False
|
||||
for source in module["sources"]:
|
||||
if source.startswith("bazel-out/"):
|
||||
linked_directory = "spm-files/" + name
|
||||
else:
|
||||
has_non_linked_sources = True
|
||||
if linked_directory and has_non_linked_sources:
|
||||
print("Module {} has both regular and generated sources".format(name))
|
||||
sys.exit(1)
|
||||
if linked_directory:
|
||||
os.makedirs(linked_directory)
|
||||
|
||||
combined_lines.append(" dependencies: [")
|
||||
for dep in module["deps"]:
|
||||
combined_lines.append(" .target(name: \"%s\")," % dep)
|
||||
combined_lines.append(" ],")
|
||||
|
||||
if linked_directory:
|
||||
combined_lines.append(" path: \"%s\"," % linked_directory)
|
||||
else:
|
||||
combined_lines.append(" path: \"%s\"," % module["path"])
|
||||
|
||||
combined_lines.append(" exclude: [")
|
||||
exclude_files_and_dirs = []
|
||||
if not linked_directory:
|
||||
for root, dirs, files in os.walk(module["path"]):
|
||||
rel_path = os.path.relpath(root, module["path"])
|
||||
if rel_path == ".":
|
||||
rel_path = ""
|
||||
else:
|
||||
rel_path += "/"
|
||||
|
||||
# Add directories that should be excluded
|
||||
for d in dirs:
|
||||
dir_path = os.path.join(rel_path, d)
|
||||
if any(component.startswith('.') for component in dir_path.split('/')):
|
||||
continue
|
||||
# Check if any source file is under this directory
|
||||
has_source = False
|
||||
for source in module["sources"]:
|
||||
rel_source = source[len(module["path"]) + 1:]
|
||||
if rel_source.startswith(dir_path + "/"):
|
||||
has_source = True
|
||||
break
|
||||
if not has_source:
|
||||
exclude_files_and_dirs.append(dir_path)
|
||||
|
||||
# Add files that should be excluded
|
||||
for f in files:
|
||||
file_path = os.path.join(rel_path, f)
|
||||
if any(component.startswith('.') for component in file_path.split('/')):
|
||||
continue
|
||||
if file_path not in [source[len(module["path"]) + 1:] for source in module["sources"]]:
|
||||
exclude_files_and_dirs.append(file_path)
|
||||
for item in exclude_files_and_dirs:
|
||||
combined_lines.append(" \"%s\"," % item)
|
||||
combined_lines.append(" ],")
|
||||
|
||||
combined_lines.append(" sources: [")
|
||||
for source in module["sources"]:
|
||||
if source.endswith(('.h', '.hpp')):
|
||||
continue
|
||||
linked_source_file_names = []
|
||||
if not source.startswith(module["path"]):
|
||||
if source.startswith("bazel-out/"):
|
||||
if not linked_directory:
|
||||
print("Source {} is a generated file, but module {} has no linked directory".format(source, name))
|
||||
sys.exit(1)
|
||||
if module["path"] in source:
|
||||
source_file_name = source[source.index(module["path"]) + len(module["path"]) + 1:]
|
||||
else:
|
||||
print("Source {} is not inside module path {}".format(source, module["path"]))
|
||||
sys.exit(1)
|
||||
if source_file_name in linked_source_file_names:
|
||||
print("Source {} is a duplicate".format(source))
|
||||
sys.exit(1)
|
||||
|
||||
linked_source_file_names.append(source_file_name)
|
||||
|
||||
# Create any parent directories needed for the source file
|
||||
symlink_location = os.path.join(linked_directory, source_file_name)
|
||||
source_dir = os.path.dirname(symlink_location)
|
||||
if not os.path.exists(source_dir):
|
||||
os.makedirs(source_dir)
|
||||
|
||||
# Calculate the relative path from the symlink location back to the workspace root
|
||||
num_parent_dirs = 2 + source_file_name.count(os.path.sep)
|
||||
relative_prefix = "".join(["../"] * num_parent_dirs)
|
||||
symlink_target = relative_prefix + source
|
||||
|
||||
os.symlink(symlink_target, symlink_location)
|
||||
relative_source = source_file_name
|
||||
combined_lines.append(" \"%s\"," % relative_source)
|
||||
else:
|
||||
print("Source {} is not inside module path {}".format(source, module["path"]))
|
||||
sys.exit(1)
|
||||
else:
|
||||
relative_source = source[len(module["path"]) + 1:]
|
||||
combined_lines.append(" \"%s\"," % relative_source)
|
||||
combined_lines.append(" ],")
|
||||
if module_type == "objc_library" or module_type == "cc_library":
|
||||
if len(module["includes"]) == 0:
|
||||
combined_lines.append(" publicHeadersPath: \"\",")
|
||||
elif len(module["includes"]) == 1:
|
||||
combined_lines.append(" publicHeadersPath: \"%s\"," % module["includes"][0])
|
||||
else:
|
||||
print("Multiple includes are not supported yet: {}".format(module["includes"]))
|
||||
sys.exit(1)
|
||||
combined_lines.append(" cSettings: [")
|
||||
combined_lines.append(" .unsafeFlags([")
|
||||
for flag in module["copts"]:
|
||||
# Escape C-string entities in flag
|
||||
escaped_flag = flag.replace('\\', '\\\\').replace('"', '\\"')
|
||||
combined_lines.append(" \"%s\"," % escaped_flag)
|
||||
combined_lines.append(" ])")
|
||||
combined_lines.append(" ],")
|
||||
combined_lines.append(" linkerSettings: [")
|
||||
if module_type == "objc_library":
|
||||
for framework in module["sdk_frameworks"]:
|
||||
combined_lines.append(" .linkedFramework(\"%s\")," % framework)
|
||||
for dylib in module["sdk_dylibs"]:
|
||||
combined_lines.append(" .linkedLibrary(\"%s\")," % dylib)
|
||||
combined_lines.append(" ]")
|
||||
|
||||
elif module_type == "swift_library":
|
||||
combined_lines.append(" swiftSettings: [")
|
||||
combined_lines.append(" .swiftLanguageMode(.v5),")
|
||||
combined_lines.append(" .unsafeFlags([")
|
||||
for flag in module["copts"]:
|
||||
combined_lines.append(" \"%s\"," % flag)
|
||||
combined_lines.append(" ])")
|
||||
combined_lines.append(" ]")
|
||||
combined_lines.append(" ),")
|
||||
elif module["type"] == "root":
|
||||
pass
|
||||
else:
|
||||
print("Unknown module type: {}".format(module["type"]))
|
||||
sys.exit(1)
|
||||
|
||||
combined_lines.append(" ]")
|
||||
combined_lines.append(")")
|
||||
combined_lines.append("")
|
||||
|
||||
with open("Package.swift", "w") as f:
|
||||
f.write("\n".join(combined_lines))
|
@ -33,101 +33,6 @@ private final class ChunkMediaPlayerExternalSourceImpl: ChunkMediaPlayerSourceIm
|
||||
}
|
||||
|
||||
public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
public final class AudioContext {
|
||||
fileprivate let audioSessionManager: ManagedAudioSession
|
||||
private var audioSessionDisposable: Disposable?
|
||||
private(set) var hasAudioSession: Bool = false
|
||||
private(set) var isAmbientMode: Bool = false
|
||||
private(set) var isInitialized: Bool = false
|
||||
|
||||
private var updatedListeners = Bag<() -> Void>()
|
||||
|
||||
public init(
|
||||
audioSessionManager: ManagedAudioSession
|
||||
) {
|
||||
self.audioSessionManager = audioSessionManager
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.audioSessionDisposable?.dispose()
|
||||
}
|
||||
|
||||
func onUpdated(_ f: @escaping () -> Void) -> Disposable {
|
||||
let index = self.updatedListeners.add(f)
|
||||
return ActionDisposable { [weak self] in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.updatedListeners.remove(index)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setIsAmbient(isAmbient: Bool) {
|
||||
self.hasAudioSession = false
|
||||
|
||||
for f in self.updatedListeners.copyItems() {
|
||||
f()
|
||||
}
|
||||
|
||||
self.audioSessionDisposable?.dispose()
|
||||
self.audioSessionDisposable = nil
|
||||
}
|
||||
|
||||
func update(type: ManagedAudioSessionType?) {
|
||||
if let type {
|
||||
if self.audioSessionDisposable == nil {
|
||||
self.isInitialized = true
|
||||
|
||||
self.audioSessionDisposable = self.audioSessionManager.push(params: ManagedAudioSessionClientParams(
|
||||
audioSessionType: type,
|
||||
activateImmediately: false,
|
||||
manualActivate: { [weak self] control in
|
||||
control.setupAndActivate(synchronous: false, { state in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.hasAudioSession = true
|
||||
for f in self.updatedListeners.copyItems() {
|
||||
f()
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
deactivate: { [weak self] _ in
|
||||
return Signal { subscriber in
|
||||
guard let self else {
|
||||
subscriber.putCompletion()
|
||||
return EmptyDisposable
|
||||
}
|
||||
|
||||
self.hasAudioSession = false
|
||||
for f in self.updatedListeners.copyItems() {
|
||||
f()
|
||||
}
|
||||
subscriber.putCompletion()
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
|> runOn(.mainQueue())
|
||||
},
|
||||
headsetConnectionStatusChanged: { _ in },
|
||||
availableOutputsChanged: { _, _ in }
|
||||
))
|
||||
}
|
||||
} else {
|
||||
if let audioSessionDisposable = self.audioSessionDisposable {
|
||||
self.audioSessionDisposable = nil
|
||||
audioSessionDisposable.dispose()
|
||||
}
|
||||
|
||||
self.hasAudioSession = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public enum SourceDescription {
|
||||
public final class ResourceDescription {
|
||||
public let postbox: Postbox
|
||||
@ -261,10 +166,10 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
private let dataQueue: Queue
|
||||
|
||||
private let mediaDataReaderParams: MediaDataReaderParams
|
||||
private let audioSessionManager: ManagedAudioSession
|
||||
private let onSeeked: (() -> Void)?
|
||||
private weak var playerNode: MediaPlayerNode?
|
||||
|
||||
private let audioContext: AudioContext
|
||||
private let renderSynchronizer: AVSampleBufferRenderSynchronizer
|
||||
private var videoRenderer: AVSampleBufferDisplayLayer
|
||||
private var audioRenderer: AVSampleBufferAudioRenderer?
|
||||
@ -293,20 +198,13 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
}
|
||||
|
||||
public var actionAtEnd: MediaPlayerActionAtEnd = .stop
|
||||
public weak var migrateToNextPlayerOnEnd: ChunkMediaPlayerV2? {
|
||||
didSet {
|
||||
if self.migrateToNextPlayerOnEnd !== oldValue {
|
||||
self.updateInternalState()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private var didSeekOnce: Bool = false
|
||||
private var isPlaying: Bool = false
|
||||
private var baseRate: Double = 1.0
|
||||
private var isSoundEnabled: Bool
|
||||
private var isMuted: Bool
|
||||
private var initialIsAmbient: Bool
|
||||
private var isAmbientMode: Bool
|
||||
|
||||
private var seekId: Int = 0
|
||||
private var seekTimestamp: Double = 0.0
|
||||
@ -325,11 +223,12 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
private var partsStateDisposable: Disposable?
|
||||
private var updateTimer: Foundation.Timer?
|
||||
|
||||
private var audioContextUpdatedDisposable: Disposable?
|
||||
private var audioSessionDisposable: Disposable?
|
||||
private var hasAudioSession: Bool = false
|
||||
|
||||
public init(
|
||||
params: MediaDataReaderParams,
|
||||
audioContext: AudioContext,
|
||||
audioSessionManager: ManagedAudioSession,
|
||||
source: SourceDescription,
|
||||
video: Bool,
|
||||
playAutomatically: Bool = false,
|
||||
@ -348,7 +247,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
self.dataQueue = ChunkMediaPlayerV2.sharedDataQueue
|
||||
|
||||
self.mediaDataReaderParams = params
|
||||
self.audioContext = audioContext
|
||||
self.audioSessionManager = audioSessionManager
|
||||
self.onSeeked = onSeeked
|
||||
self.playerNode = playerNode
|
||||
|
||||
@ -358,7 +257,7 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
|
||||
self.isSoundEnabled = enableSound
|
||||
self.isMuted = soundMuted
|
||||
self.initialIsAmbient = ambient
|
||||
self.isAmbientMode = ambient
|
||||
self.baseRate = baseRate
|
||||
|
||||
self.renderSynchronizer = AVSampleBufferRenderSynchronizer()
|
||||
@ -397,19 +296,12 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
} else {
|
||||
self.renderSynchronizer.addRenderer(self.videoRenderer)
|
||||
}
|
||||
|
||||
self.audioContextUpdatedDisposable = self.audioContext.onUpdated({ [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.updateInternalState()
|
||||
})
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.partsStateDisposable?.dispose()
|
||||
self.updateTimer?.invalidate()
|
||||
self.audioContextUpdatedDisposable?.dispose()
|
||||
self.audioSessionDisposable?.dispose()
|
||||
|
||||
if #available(iOS 17.0, *) {
|
||||
self.videoRenderer.sampleBufferRenderer.stopRequestingMediaData()
|
||||
@ -429,19 +321,51 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
}
|
||||
|
||||
private func updateInternalState() {
|
||||
var audioSessionType: ManagedAudioSessionType?
|
||||
if self.isSoundEnabled && self.hasSound {
|
||||
let isAmbient: Bool
|
||||
if self.audioContext.isInitialized {
|
||||
isAmbient = self.audioContext.isAmbientMode
|
||||
} else {
|
||||
isAmbient = self.initialIsAmbient
|
||||
if self.audioSessionDisposable == nil {
|
||||
self.audioSessionDisposable = self.audioSessionManager.push(params: ManagedAudioSessionClientParams(
|
||||
audioSessionType: self.isAmbientMode ? .ambient : .play(mixWithOthers: false),
|
||||
activateImmediately: false,
|
||||
manualActivate: { [weak self] control in
|
||||
control.setupAndActivate(synchronous: false, { state in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
audioSessionType = isAmbient ? .ambient : .play(mixWithOthers: false)
|
||||
self.hasAudioSession = true
|
||||
self.updateInternalState()
|
||||
}
|
||||
})
|
||||
},
|
||||
deactivate: { [weak self] _ in
|
||||
return Signal { subscriber in
|
||||
guard let self else {
|
||||
subscriber.putCompletion()
|
||||
return EmptyDisposable
|
||||
}
|
||||
self.audioContext.update(type: audioSessionType)
|
||||
|
||||
if self.isSoundEnabled && self.hasSound && self.audioContext.hasAudioSession {
|
||||
self.hasAudioSession = false
|
||||
self.updateInternalState()
|
||||
subscriber.putCompletion()
|
||||
|
||||
return EmptyDisposable
|
||||
}
|
||||
|> runOn(.mainQueue())
|
||||
},
|
||||
headsetConnectionStatusChanged: { _ in },
|
||||
availableOutputsChanged: { _, _ in }
|
||||
))
|
||||
}
|
||||
} else {
|
||||
if let audioSessionDisposable = self.audioSessionDisposable {
|
||||
self.audioSessionDisposable = nil
|
||||
audioSessionDisposable.dispose()
|
||||
}
|
||||
|
||||
self.hasAudioSession = false
|
||||
}
|
||||
|
||||
if self.isSoundEnabled && self.hasSound && self.hasAudioSession {
|
||||
if self.audioRenderer == nil {
|
||||
let audioRenderer = AVSampleBufferAudioRenderer()
|
||||
audioRenderer.isMuted = self.isMuted
|
||||
@ -875,9 +799,13 @@ public final class ChunkMediaPlayerV2: ChunkMediaPlayer {
|
||||
}
|
||||
|
||||
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||
if self.audioContext.isAmbientMode != isAmbient {
|
||||
self.initialIsAmbient = isAmbient
|
||||
self.audioContext.setIsAmbient(isAmbient: isAmbient)
|
||||
if self.isAmbientMode != isAmbient {
|
||||
self.isAmbientMode = isAmbient
|
||||
|
||||
self.hasAudioSession = false
|
||||
self.updateInternalState()
|
||||
self.audioSessionDisposable?.dispose()
|
||||
self.audioSessionDisposable = nil
|
||||
|
||||
let currentTimestamp: CMTime
|
||||
if let pendingSeekTimestamp = self.pendingSeekTimestamp {
|
||||
|
@ -167,6 +167,11 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
||||
self.conferenceAddParticipant?()
|
||||
}
|
||||
|
||||
var enableVideoSharpening = false
|
||||
if let data = call.context.currentAppConfiguration.with({ $0 }).data, let value = data["ios_call_video_sharpening"] as? Double {
|
||||
enableVideoSharpening = value != 0.0
|
||||
}
|
||||
|
||||
self.callScreenState = PrivateCallScreen.State(
|
||||
strings: presentationData.strings,
|
||||
lifecycleState: .connecting,
|
||||
@ -180,7 +185,8 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
||||
remoteVideo: nil,
|
||||
isRemoteBatteryLow: false,
|
||||
isEnergySavingEnabled: !self.sharedContext.energyUsageSettings.fullTranslucency,
|
||||
isConferencePossible: false
|
||||
isConferencePossible: false,
|
||||
enableVideoSharpening: enableVideoSharpening
|
||||
)
|
||||
|
||||
self.isMicrophoneMutedDisposable = (call.isMuted
|
||||
|
@ -70,7 +70,7 @@ final class LivestreamVideoViewV1: UIView {
|
||||
var onSeeked: (() -> Void)?
|
||||
self.player = ChunkMediaPlayerV2(
|
||||
params: ChunkMediaPlayerV2.MediaDataReaderParams(context: context),
|
||||
audioContext: ChunkMediaPlayerV2.AudioContext(audioSessionManager: audioSessionManager),
|
||||
audioSessionManager: audioSessionManager,
|
||||
source: .externalParts(self.chunkPlayerPartsState.get()),
|
||||
video: true,
|
||||
enableSound: true,
|
||||
|
@ -1160,7 +1160,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
useIPCContext = value != 0.0
|
||||
}
|
||||
|
||||
let embeddedBroadcastImplementationTypePath = self.accountContext.sharedContext.basePath + "/broadcast-coordination-type"
|
||||
let embeddedBroadcastImplementationTypePath = self.accountContext.sharedContext.basePath + "/broadcast-coordination-type-v2"
|
||||
|
||||
let screencastIPCContext: ScreencastIPCContext
|
||||
if useIPCContext {
|
||||
|
@ -7,6 +7,7 @@ import BalancedTextComponent
|
||||
import TelegramPresentationData
|
||||
import CallsEmoji
|
||||
import ImageBlur
|
||||
import HierarchyTrackingLayer
|
||||
|
||||
private final class EmojiContainerView: UIView {
|
||||
private let maskImageView: UIImageView?
|
||||
@ -207,6 +208,7 @@ private final class EmojiItemComponent: Component {
|
||||
}
|
||||
|
||||
final class View: UIView {
|
||||
private let hierarchyTrackingLayer: HierarchyTrackingLayer
|
||||
private let containerView: EmojiContainerView
|
||||
private let measureEmojiView = ComponentView<Empty>()
|
||||
private var pendingContainerView: EmojiContainerView?
|
||||
@ -219,11 +221,22 @@ private final class EmojiItemComponent: Component {
|
||||
private var pendingEmojiValues: [String]?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.hierarchyTrackingLayer = HierarchyTrackingLayer()
|
||||
self.containerView = EmojiContainerView(hasMask: true)
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.layer.addSublayer(self.hierarchyTrackingLayer)
|
||||
self.addSubview(self.containerView)
|
||||
|
||||
self.hierarchyTrackingLayer.isInHierarchyUpdated = { [weak self] value in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if value {
|
||||
self.state?.updated(transition: .immediate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
|
@ -275,7 +275,7 @@ final class VideoChatParticipantThumbnailComponent: Component {
|
||||
if let current = self.videoLayer {
|
||||
videoLayer = current
|
||||
} else {
|
||||
videoLayer = PrivateCallVideoLayer()
|
||||
videoLayer = PrivateCallVideoLayer(enableSharpening: false)
|
||||
self.videoLayer = videoLayer
|
||||
self.extractedContainerView.contentView.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
|
||||
self.extractedContainerView.contentView.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer)
|
||||
|
@ -51,6 +51,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
let contentInsets: UIEdgeInsets
|
||||
let controlInsets: UIEdgeInsets
|
||||
let interfaceOrientation: UIInterfaceOrientation
|
||||
let enableVideoSharpening: Bool
|
||||
let action: (() -> Void)?
|
||||
let contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?
|
||||
let activatePinch: ((PinchSourceContainerNode) -> Void)?
|
||||
@ -70,6 +71,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
contentInsets: UIEdgeInsets,
|
||||
controlInsets: UIEdgeInsets,
|
||||
interfaceOrientation: UIInterfaceOrientation,
|
||||
enableVideoSharpening: Bool,
|
||||
action: (() -> Void)?,
|
||||
contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?,
|
||||
activatePinch: ((PinchSourceContainerNode) -> Void)?,
|
||||
@ -88,6 +90,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
self.contentInsets = contentInsets
|
||||
self.controlInsets = controlInsets
|
||||
self.interfaceOrientation = interfaceOrientation
|
||||
self.enableVideoSharpening = enableVideoSharpening
|
||||
self.action = action
|
||||
self.contextAction = contextAction
|
||||
self.activatePinch = activatePinch
|
||||
@ -128,6 +131,9 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
if lhs.interfaceOrientation != rhs.interfaceOrientation {
|
||||
return false
|
||||
}
|
||||
if lhs.enableVideoSharpening != rhs.enableVideoSharpening {
|
||||
return false
|
||||
}
|
||||
if (lhs.action == nil) != (rhs.action == nil) {
|
||||
return false
|
||||
}
|
||||
@ -525,7 +531,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
||||
resetVideoSource = true
|
||||
}
|
||||
} else {
|
||||
videoLayer = PrivateCallVideoLayer()
|
||||
videoLayer = PrivateCallVideoLayer(enableSharpening: component.enableVideoSharpening)
|
||||
self.videoLayer = videoLayer
|
||||
videoLayer.opacity = 0.0
|
||||
self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
|
||||
|
@ -152,6 +152,7 @@ final class VideoChatParticipantsComponent: Component {
|
||||
let expandedInsets: UIEdgeInsets
|
||||
let safeInsets: UIEdgeInsets
|
||||
let interfaceOrientation: UIInterfaceOrientation
|
||||
let enableVideoSharpening: Bool
|
||||
let openParticipantContextMenu: (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void
|
||||
let openInvitedParticipantContextMenu: (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void
|
||||
let updateMainParticipant: (VideoParticipantKey?, Bool?) -> Void
|
||||
@ -173,6 +174,7 @@ final class VideoChatParticipantsComponent: Component {
|
||||
expandedInsets: UIEdgeInsets,
|
||||
safeInsets: UIEdgeInsets,
|
||||
interfaceOrientation: UIInterfaceOrientation,
|
||||
enableVideoSharpening: Bool,
|
||||
openParticipantContextMenu: @escaping (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void,
|
||||
openInvitedParticipantContextMenu: @escaping (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void,
|
||||
updateMainParticipant: @escaping (VideoParticipantKey?, Bool?) -> Void,
|
||||
@ -193,6 +195,7 @@ final class VideoChatParticipantsComponent: Component {
|
||||
self.expandedInsets = expandedInsets
|
||||
self.safeInsets = safeInsets
|
||||
self.interfaceOrientation = interfaceOrientation
|
||||
self.enableVideoSharpening = enableVideoSharpening
|
||||
self.openParticipantContextMenu = openParticipantContextMenu
|
||||
self.openInvitedParticipantContextMenu = openInvitedParticipantContextMenu
|
||||
self.updateMainParticipant = updateMainParticipant
|
||||
@ -239,6 +242,9 @@ final class VideoChatParticipantsComponent: Component {
|
||||
if lhs.interfaceOrientation != rhs.interfaceOrientation {
|
||||
return false
|
||||
}
|
||||
if lhs.enableVideoSharpening != rhs.enableVideoSharpening {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@ -1074,6 +1080,7 @@ final class VideoChatParticipantsComponent: Component {
|
||||
contentInsets: itemContentInsets,
|
||||
controlInsets: itemControlInsets,
|
||||
interfaceOrientation: component.interfaceOrientation,
|
||||
enableVideoSharpening: component.enableVideoSharpening,
|
||||
action: { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
|
@ -235,6 +235,8 @@ final class VideoChatScreenComponent: Component {
|
||||
let participants = ComponentView<Empty>()
|
||||
var scheduleInfo: ComponentView<Empty>?
|
||||
|
||||
var enableVideoSharpening: Bool = false
|
||||
|
||||
var reconnectedAsEventsDisposable: Disposable?
|
||||
var memberEventsDisposable: Disposable?
|
||||
|
||||
@ -1244,6 +1246,11 @@ final class VideoChatScreenComponent: Component {
|
||||
self.invitedPeers.removeAll(where: { invitedPeer in members.participants.contains(where: { $0.id == .peer(invitedPeer.peer.id) }) })
|
||||
}
|
||||
self.callState = component.initialData.callState
|
||||
|
||||
self.enableVideoSharpening = false
|
||||
if let data = component.initialCall.accountContext.currentAppConfiguration.with({ $0 }).data, let value = data["ios_call_video_sharpening"] as? Double {
|
||||
self.enableVideoSharpening = value != 0.0
|
||||
}
|
||||
}
|
||||
|
||||
var call: VideoChatCall
|
||||
@ -1359,7 +1366,7 @@ final class VideoChatScreenComponent: Component {
|
||||
return false
|
||||
}
|
||||
if participant.videoDescription != nil || participant.presentationDescription != nil {
|
||||
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
|
||||
if let participantPeer = participant.peer, participantPeer.id != groupCall.accountContext.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
@ -1421,7 +1428,7 @@ final class VideoChatScreenComponent: Component {
|
||||
var speakingParticipantPeers: [EnginePeer] = []
|
||||
if let members, !members.speakingParticipants.isEmpty {
|
||||
for participant in members.participants {
|
||||
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
|
||||
if let participantPeer = participant.peer, participantPeer.id != groupCall.accountContext.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
|
||||
speakingParticipantPeers.append(participantPeer)
|
||||
}
|
||||
}
|
||||
@ -1698,7 +1705,7 @@ final class VideoChatScreenComponent: Component {
|
||||
return false
|
||||
}
|
||||
if participant.videoDescription != nil || participant.presentationDescription != nil {
|
||||
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
|
||||
if let participantPeer = participant.peer, participantPeer.id != conferenceSource.context.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
@ -1760,7 +1767,7 @@ final class VideoChatScreenComponent: Component {
|
||||
var speakingParticipantPeers: [EnginePeer] = []
|
||||
if !members.speakingParticipants.isEmpty {
|
||||
for participant in members.participants {
|
||||
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
|
||||
if let participantPeer = participant.peer, participantPeer.id != conferenceSource.context.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
|
||||
speakingParticipantPeers.append(participantPeer)
|
||||
}
|
||||
}
|
||||
@ -2501,6 +2508,7 @@ final class VideoChatScreenComponent: Component {
|
||||
expandedInsets: participantsExpandedInsets,
|
||||
safeInsets: participantsSafeInsets,
|
||||
interfaceOrientation: environment.orientation ?? .portrait,
|
||||
enableVideoSharpening: self.enableVideoSharpening,
|
||||
openParticipantContextMenu: { [weak self] id, sourceView, gesture in
|
||||
guard let self else {
|
||||
return
|
||||
|
@ -5,6 +5,21 @@ import MetalPerformanceShaders
|
||||
import Accelerate
|
||||
import MetalEngine
|
||||
|
||||
private func makeSharpenKernel(device: MTLDevice, sharpeningStrength: Float) -> MPSImageConvolution {
|
||||
let centerWeight = 1.0 + 6.0 * sharpeningStrength
|
||||
let adjacentWeight = -1.0 * sharpeningStrength
|
||||
let diagonalWeight = -0.5 * sharpeningStrength
|
||||
|
||||
let sharpenWeights: [Float] = [
|
||||
diagonalWeight, adjacentWeight, diagonalWeight,
|
||||
adjacentWeight, centerWeight, adjacentWeight,
|
||||
diagonalWeight, adjacentWeight, diagonalWeight
|
||||
]
|
||||
let result = MPSImageConvolution(device: device, kernelWidth: 3, kernelHeight: 3, weights: sharpenWeights)
|
||||
result.edgeMode = .clamp
|
||||
return result
|
||||
}
|
||||
|
||||
public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
||||
public var internalData: MetalEngineSubjectInternalData?
|
||||
|
||||
@ -17,6 +32,9 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
||||
let computePipelineStateVertical: MTLComputePipelineState
|
||||
let downscaleKernel: MPSImageBilinearScale
|
||||
|
||||
var sharpeningStrength: Float = 0.0
|
||||
var sharpenKernel: MPSImageConvolution
|
||||
|
||||
required init?(device: MTLDevice) {
|
||||
guard let library = metalLibrary(device: device) else {
|
||||
return nil
|
||||
@ -52,6 +70,14 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
||||
self.computePipelineStateVertical = computePipelineStateVertical
|
||||
|
||||
self.downscaleKernel = MPSImageBilinearScale(device: device)
|
||||
|
||||
self.sharpeningStrength = 1.4
|
||||
self.sharpenKernel = makeSharpenKernel(device: device, sharpeningStrength: self.sharpeningStrength)
|
||||
}
|
||||
|
||||
func updateSharpeningStrength(device: MTLDevice, sharpeningStrength: Float) {
|
||||
self.sharpeningStrength = sharpeningStrength
|
||||
self.sharpenKernel = makeSharpenKernel(device: device, sharpeningStrength: self.sharpeningStrength)
|
||||
}
|
||||
}
|
||||
|
||||
@ -83,20 +109,25 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
||||
}
|
||||
}
|
||||
|
||||
private let enableSharpening: Bool
|
||||
|
||||
public var renderSpec: RenderLayerSpec?
|
||||
|
||||
private var rgbaTexture: PooledTexture?
|
||||
private var sharpenedTexture: PooledTexture?
|
||||
private var downscaledTexture: PooledTexture?
|
||||
private var blurredHorizontalTexture: PooledTexture?
|
||||
private var blurredVerticalTexture: PooledTexture?
|
||||
|
||||
override public init() {
|
||||
public init(enableSharpening: Bool) {
|
||||
self.enableSharpening = enableSharpening
|
||||
self.blurredLayer = MetalEngineSubjectLayer()
|
||||
|
||||
super.init()
|
||||
}
|
||||
|
||||
override public init(layer: Any) {
|
||||
self.enableSharpening = false
|
||||
self.blurredLayer = MetalEngineSubjectLayer()
|
||||
|
||||
super.init(layer: layer)
|
||||
@ -121,6 +152,9 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
||||
if self.rgbaTexture == nil || self.rgbaTexture?.spec != rgbaTextureSpec {
|
||||
self.rgbaTexture = MetalEngine.shared.pooledTexture(spec: rgbaTextureSpec)
|
||||
}
|
||||
if self.sharpenedTexture == nil || self.sharpenedTexture?.spec != rgbaTextureSpec {
|
||||
self.sharpenedTexture = MetalEngine.shared.pooledTexture(spec: rgbaTextureSpec)
|
||||
}
|
||||
if self.downscaledTexture == nil {
|
||||
self.downscaledTexture = MetalEngine.shared.pooledTexture(spec: TextureSpec(width: 128, height: 128, pixelFormat: .rgba8UnsignedNormalized))
|
||||
}
|
||||
@ -135,10 +169,26 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
||||
return
|
||||
}
|
||||
|
||||
let _ = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture in
|
||||
var outputTexture = rgbaTexture
|
||||
|
||||
var sharpenedTexture: TexturePlaceholder?
|
||||
if self.enableSharpening && rgbaTextureSpec.width * rgbaTextureSpec.height >= 800 * 480 {
|
||||
sharpenedTexture = self.sharpenedTexture?.get(context: context)
|
||||
if let sharpenedTexture {
|
||||
outputTexture = sharpenedTexture
|
||||
}
|
||||
}
|
||||
|
||||
if let sharpenedTexture {
|
||||
let _ = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, sharpenedTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture, sharpenedTexture in
|
||||
guard let rgbaTexture else {
|
||||
return
|
||||
}
|
||||
guard let sharpenedTexture else {
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
|
||||
return
|
||||
}
|
||||
@ -162,7 +212,46 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
||||
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
|
||||
|
||||
computeEncoder.endEncoding()
|
||||
}
|
||||
|
||||
do {
|
||||
|
||||
blurState.sharpenKernel.encode(commandBuffer: commandBuffer, sourceTexture: rgbaTexture, destinationTexture: sharpenedTexture)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
let _ = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture in
|
||||
guard let rgbaTexture else {
|
||||
return
|
||||
}
|
||||
|
||||
do {
|
||||
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
|
||||
return
|
||||
}
|
||||
|
||||
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
|
||||
let threadgroupCount = MTLSize(width: (rgbaTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (rgbaTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
|
||||
|
||||
switch videoTextures.textureLayout {
|
||||
case let .biPlanar(biPlanar):
|
||||
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVBiPlanarToRGBA)
|
||||
computeEncoder.setTexture(biPlanar.y, index: 0)
|
||||
computeEncoder.setTexture(biPlanar.uv, index: 1)
|
||||
computeEncoder.setTexture(rgbaTexture, index: 2)
|
||||
case let .triPlanar(triPlanar):
|
||||
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVTriPlanarToRGBA)
|
||||
computeEncoder.setTexture(triPlanar.y, index: 0)
|
||||
computeEncoder.setTexture(triPlanar.u, index: 1)
|
||||
computeEncoder.setTexture(triPlanar.u, index: 2)
|
||||
computeEncoder.setTexture(rgbaTexture, index: 3)
|
||||
}
|
||||
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
|
||||
|
||||
computeEncoder.endEncoding()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if !self.blurredLayer.isHidden {
|
||||
guard let downscaledTexture = self.downscaledTexture?.get(context: context), let blurredHorizontalTexture = self.blurredHorizontalTexture?.get(context: context), let blurredVerticalTexture = self.blurredVerticalTexture?.get(context: context) else {
|
||||
@ -228,8 +317,8 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
||||
})
|
||||
}
|
||||
|
||||
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self, inputs: rgbaTexture.placeholer, commands: { encoder, placement, rgbaTexture in
|
||||
guard let rgbaTexture else {
|
||||
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self, inputs: outputTexture.placeholer, commands: { encoder, placement, outputTexture in
|
||||
guard let outputTexture else {
|
||||
return
|
||||
}
|
||||
|
||||
@ -244,7 +333,7 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
||||
)
|
||||
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
|
||||
|
||||
encoder.setFragmentTexture(rgbaTexture, index: 0)
|
||||
encoder.setFragmentTexture(outputTexture, index: 0)
|
||||
|
||||
var brightness: Float = 1.0
|
||||
var saturation: Float = 1.0
|
||||
|
@ -128,6 +128,7 @@ final class VideoContainerView: HighlightTrackingButton {
|
||||
}
|
||||
|
||||
let key: Key
|
||||
let enableSharpening: Bool
|
||||
|
||||
let videoContainerLayer: VideoContainerLayer
|
||||
var videoContainerLayerTaken: Bool = false
|
||||
@ -211,8 +212,9 @@ final class VideoContainerView: HighlightTrackingButton {
|
||||
|
||||
var pressAction: (() -> Void)?
|
||||
|
||||
init(key: Key) {
|
||||
init(key: Key, enableSharpening: Bool) {
|
||||
self.key = key
|
||||
self.enableSharpening = enableSharpening
|
||||
|
||||
self.videoContainerLayer = VideoContainerLayer()
|
||||
self.videoContainerLayer.backgroundColor = nil
|
||||
@ -223,7 +225,7 @@ final class VideoContainerView: HighlightTrackingButton {
|
||||
self.videoContainerLayer.contentsLayer.cornerCurve = .circular
|
||||
}
|
||||
|
||||
self.videoLayer = PrivateCallVideoLayer()
|
||||
self.videoLayer = PrivateCallVideoLayer(enableSharpening: self.enableSharpening)
|
||||
self.videoLayer.masksToBounds = true
|
||||
self.videoLayer.isDoubleSided = false
|
||||
if #available(iOS 13.0, *) {
|
||||
@ -454,7 +456,7 @@ final class VideoContainerView: HighlightTrackingButton {
|
||||
let previousVideoLayer = self.videoLayer
|
||||
self.disappearingVideoLayer = DisappearingVideo(flipAnimationInfo: flipAnimationInfo, videoLayer: self.videoLayer, videoMetrics: videoMetrics)
|
||||
|
||||
self.videoLayer = PrivateCallVideoLayer()
|
||||
self.videoLayer = PrivateCallVideoLayer(enableSharpening: self.enableSharpening)
|
||||
self.videoLayer.opacity = previousVideoLayer.opacity
|
||||
self.videoLayer.masksToBounds = true
|
||||
self.videoLayer.isDoubleSided = false
|
||||
|
@ -81,6 +81,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
|
||||
public var isRemoteBatteryLow: Bool
|
||||
public var isEnergySavingEnabled: Bool
|
||||
public var isConferencePossible: Bool
|
||||
public var enableVideoSharpening: Bool
|
||||
|
||||
public init(
|
||||
strings: PresentationStrings,
|
||||
@ -95,7 +96,8 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
|
||||
remoteVideo: VideoSource?,
|
||||
isRemoteBatteryLow: Bool,
|
||||
isEnergySavingEnabled: Bool,
|
||||
isConferencePossible: Bool
|
||||
isConferencePossible: Bool,
|
||||
enableVideoSharpening: Bool
|
||||
) {
|
||||
self.strings = strings
|
||||
self.lifecycleState = lifecycleState
|
||||
@ -110,6 +112,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
|
||||
self.isRemoteBatteryLow = isRemoteBatteryLow
|
||||
self.isEnergySavingEnabled = isEnergySavingEnabled
|
||||
self.isConferencePossible = isConferencePossible
|
||||
self.enableVideoSharpening = enableVideoSharpening
|
||||
}
|
||||
|
||||
public static func ==(lhs: State, rhs: State) -> Bool {
|
||||
@ -152,6 +155,9 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
|
||||
if lhs.isConferencePossible != rhs.isConferencePossible {
|
||||
return false
|
||||
}
|
||||
if lhs.enableVideoSharpening != rhs.enableVideoSharpening {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
@ -994,7 +1000,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
|
||||
videoContainerView = current
|
||||
} else {
|
||||
animateIn = true
|
||||
videoContainerView = VideoContainerView(key: videoContainerKey)
|
||||
videoContainerView = VideoContainerView(key: videoContainerKey, enableSharpening: params.state.enableVideoSharpening)
|
||||
switch videoContainerKey {
|
||||
case .foreground:
|
||||
self.overlayContentsView.layer.addSublayer(videoContainerView.blurredContainerLayer)
|
||||
|
@ -469,13 +469,7 @@ public final class ChatChannelSubscriberInputPanelNode: ChatInputPanelNode {
|
||||
|
||||
self.giftButton.isHidden = false
|
||||
self.helpButton.isHidden = true
|
||||
//TODO:release
|
||||
self.suggestedPostButton.isHidden = false
|
||||
self.presentGiftOrSuggestTooltip()
|
||||
} else if case .broadcast = peer.info {
|
||||
self.giftButton.isHidden = true
|
||||
self.helpButton.isHidden = true
|
||||
self.suggestedPostButton.isHidden = false
|
||||
self.suggestedPostButton.isHidden = true
|
||||
self.presentGiftOrSuggestTooltip()
|
||||
} else if peer.flags.contains(.isGigagroup), self.action == .muteNotifications || self.action == .unmuteNotifications {
|
||||
self.giftButton.isHidden = true
|
||||
|
@ -6,7 +6,6 @@ import SwiftSignalKit
|
||||
import TelegramCore
|
||||
import Postbox
|
||||
import TelegramPresentationData
|
||||
import UniversalMediaPlayer
|
||||
|
||||
public final class StoryContentItem: Equatable {
|
||||
public final class ExternalState {
|
||||
@ -33,7 +32,6 @@ public final class StoryContentItem: Equatable {
|
||||
public final class SharedState {
|
||||
public var replyDrafts: [StoryId: NSAttributedString] = [:]
|
||||
public var baseRate: Double = 1.0
|
||||
public var audioContext: ChunkMediaPlayerV2.AudioContext?
|
||||
|
||||
public init() {
|
||||
}
|
||||
|
@ -15,275 +15,6 @@ import ButtonComponent
|
||||
import MultilineTextComponent
|
||||
import TelegramPresentationData
|
||||
|
||||
private protocol StoryVideoView: UIView {
|
||||
var audioMode: StoryContentItem.AudioMode { get set }
|
||||
var playbackCompleted: (() -> Void)? { get set }
|
||||
var status: Signal<MediaPlayerStatus?, NoError> { get }
|
||||
|
||||
func play()
|
||||
func pause()
|
||||
func seek(timestamp: Double)
|
||||
func setSoundMuted(soundMuted: Bool)
|
||||
func continueWithOverridingAmbientMode(isAmbient: Bool)
|
||||
func setBaseRate(baseRate: Double)
|
||||
func update(size: CGSize, transition: ComponentTransition)
|
||||
}
|
||||
|
||||
private final class LegacyStoryVideoView: UIView, StoryVideoView {
|
||||
private let videoNode: UniversalVideoNode
|
||||
|
||||
var audioMode: StoryContentItem.AudioMode
|
||||
var playbackCompleted: (() -> Void)?
|
||||
|
||||
var status: Signal<MediaPlayerStatus?, NoError> {
|
||||
return self.videoNode.status
|
||||
}
|
||||
|
||||
init(
|
||||
context: AccountContext,
|
||||
file: FileMediaReference,
|
||||
audioMode: StoryContentItem.AudioMode,
|
||||
baseRate: Double,
|
||||
isCaptureProtected: Bool
|
||||
) {
|
||||
self.audioMode = audioMode
|
||||
|
||||
var userLocation: MediaResourceUserLocation = .other
|
||||
switch file {
|
||||
case let .story(peer, _, _):
|
||||
userLocation = .peer(peer.id)
|
||||
default:
|
||||
break
|
||||
}
|
||||
var hasSentFramesToDisplay: (() -> Void)?
|
||||
self.videoNode = UniversalVideoNode(
|
||||
context: context,
|
||||
postbox: context.account.postbox,
|
||||
audioSession: context.sharedContext.mediaManager.audioSession,
|
||||
manager: context.sharedContext.mediaManager.universalVideoManager,
|
||||
decoration: StoryVideoDecoration(),
|
||||
content: NativeVideoContent(
|
||||
id: .contextResult(0, "\(UInt64.random(in: 0 ... UInt64.max))"),
|
||||
userLocation: userLocation,
|
||||
fileReference: file,
|
||||
imageReference: nil,
|
||||
streamVideo: .story,
|
||||
loopVideo: true,
|
||||
enableSound: true,
|
||||
soundMuted: audioMode == .off,
|
||||
beginWithAmbientSound: audioMode == .ambient,
|
||||
mixWithOthers: true,
|
||||
useLargeThumbnail: false,
|
||||
autoFetchFullSizeThumbnail: false,
|
||||
tempFilePath: nil,
|
||||
captureProtected: isCaptureProtected,
|
||||
hintDimensions: file.media.dimensions?.cgSize,
|
||||
storeAfterDownload: nil,
|
||||
displayImage: false,
|
||||
hasSentFramesToDisplay: {
|
||||
hasSentFramesToDisplay?()
|
||||
}
|
||||
),
|
||||
priority: .gallery
|
||||
)
|
||||
self.videoNode.isHidden = true
|
||||
self.videoNode.setBaseRate(baseRate)
|
||||
|
||||
super.init(frame: CGRect())
|
||||
|
||||
hasSentFramesToDisplay = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.videoNode.isHidden = false
|
||||
}
|
||||
|
||||
self.videoNode.playbackCompleted = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.playbackCompleted?()
|
||||
}
|
||||
|
||||
self.addSubview(self.videoNode.view)
|
||||
|
||||
self.videoNode.ownsContentNodeUpdated = { [weak self] value in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if value {
|
||||
self.videoNode.seek(0.0)
|
||||
if self.audioMode != .off {
|
||||
self.videoNode.playOnceWithSound(playAndRecord: false, actionAtEnd: .stop)
|
||||
} else {
|
||||
self.videoNode.play()
|
||||
}
|
||||
}
|
||||
}
|
||||
self.videoNode.canAttachContent = true
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func play() {
|
||||
self.videoNode.play()
|
||||
}
|
||||
|
||||
func pause() {
|
||||
self.videoNode.pause()
|
||||
}
|
||||
|
||||
func seek(timestamp: Double) {
|
||||
self.videoNode.seek(timestamp)
|
||||
}
|
||||
|
||||
func setSoundMuted(soundMuted: Bool) {
|
||||
self.videoNode.setSoundMuted(soundMuted: soundMuted)
|
||||
}
|
||||
|
||||
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||
self.videoNode.continueWithOverridingAmbientMode(isAmbient: isAmbient)
|
||||
}
|
||||
|
||||
func setBaseRate(baseRate: Double) {
|
||||
self.videoNode.setBaseRate(baseRate)
|
||||
}
|
||||
|
||||
func update(size: CGSize, transition: ComponentTransition) {
|
||||
transition.setFrame(view: self.videoNode.view, frame: CGRect(origin: CGPoint(), size: size))
|
||||
self.videoNode.updateLayout(size: size, transition: transition.containedViewLayoutTransition)
|
||||
}
|
||||
}
|
||||
|
||||
private final class ModernStoryVideoView: UIView, StoryVideoView {
|
||||
private let player: ChunkMediaPlayerV2
|
||||
private let playerNode: MediaPlayerNode
|
||||
|
||||
var audioMode: StoryContentItem.AudioMode
|
||||
var playbackCompleted: (() -> Void)?
|
||||
var isFirstPlay: Bool = true
|
||||
|
||||
var status: Signal<MediaPlayerStatus?, NoError> {
|
||||
return self.player.status |> map(Optional.init)
|
||||
}
|
||||
|
||||
init(
|
||||
context: AccountContext,
|
||||
audioContext: ChunkMediaPlayerV2.AudioContext,
|
||||
file: FileMediaReference,
|
||||
audioMode: StoryContentItem.AudioMode,
|
||||
baseRate: Double,
|
||||
isCaptureProtected: Bool
|
||||
) {
|
||||
self.audioMode = audioMode
|
||||
|
||||
self.playerNode = MediaPlayerNode(
|
||||
backgroundThread: false,
|
||||
captureProtected: isCaptureProtected
|
||||
)
|
||||
|
||||
var userLocation: MediaResourceUserLocation = .other
|
||||
switch file {
|
||||
case let .story(peer, _, _):
|
||||
userLocation = .peer(peer.id)
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
self.player = ChunkMediaPlayerV2(
|
||||
params: ChunkMediaPlayerV2.MediaDataReaderParams(context: context),
|
||||
audioContext: audioContext,
|
||||
source: .directFetch(ChunkMediaPlayerV2.SourceDescription.ResourceDescription(
|
||||
postbox: context.account.postbox,
|
||||
size: file.media.size ?? 0,
|
||||
reference: file.resourceReference(file.media.resource),
|
||||
userLocation: userLocation,
|
||||
userContentType: .story,
|
||||
statsCategory: statsCategoryForFileWithAttributes(file.media.attributes),
|
||||
fetchAutomatically: false
|
||||
)),
|
||||
video: true,
|
||||
playAutomatically: false,
|
||||
enableSound: true,
|
||||
baseRate: baseRate,
|
||||
soundMuted: audioMode == .off,
|
||||
ambient: audioMode == .ambient,
|
||||
mixWithOthers: true,
|
||||
continuePlayingWithoutSoundOnLostAudioSession: false,
|
||||
isAudioVideoMessage: false,
|
||||
playerNode: self.playerNode
|
||||
)
|
||||
self.playerNode.isHidden = true
|
||||
self.player.setBaseRate(baseRate)
|
||||
|
||||
super.init(frame: CGRect())
|
||||
|
||||
self.addSubview(self.playerNode.view)
|
||||
|
||||
self.playerNode.hasSentFramesToDisplay = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.playerNode.isHidden = false
|
||||
}
|
||||
|
||||
self.player.actionAtEnd = .action({ [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.playbackCompleted?()
|
||||
})
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func play() {
|
||||
if self.isFirstPlay {
|
||||
self.isFirstPlay = false
|
||||
|
||||
if self.audioMode != .off {
|
||||
self.player.playOnceWithSound(playAndRecord: false, seek: .start)
|
||||
} else {
|
||||
self.player.play()
|
||||
}
|
||||
} else {
|
||||
self.player.play()
|
||||
}
|
||||
}
|
||||
|
||||
func pause() {
|
||||
self.player.pause()
|
||||
}
|
||||
|
||||
func seek(timestamp: Double) {
|
||||
self.player.seek(timestamp: timestamp, play: nil)
|
||||
}
|
||||
|
||||
func setSoundMuted(soundMuted: Bool) {
|
||||
self.player.setSoundMuted(soundMuted: soundMuted)
|
||||
}
|
||||
|
||||
func continueWithOverridingAmbientMode(isAmbient: Bool) {
|
||||
self.player.continueWithOverridingAmbientMode(isAmbient: isAmbient)
|
||||
}
|
||||
|
||||
func setBaseRate(baseRate: Double) {
|
||||
self.player.setBaseRate(baseRate)
|
||||
}
|
||||
|
||||
func update(size: CGSize, transition: ComponentTransition) {
|
||||
transition.containedViewLayoutTransition.updateFrame(node: self.playerNode, frame: CGRect(origin: CGPoint(), size: size))
|
||||
}
|
||||
|
||||
func updateNext(nextVideoView: ModernStoryVideoView?) {
|
||||
self.player.migrateToNextPlayerOnEnd = nextVideoView?.player
|
||||
}
|
||||
}
|
||||
|
||||
final class StoryItemContentComponent: Component {
|
||||
typealias EnvironmentType = StoryContentItem.Environment
|
||||
|
||||
@ -360,11 +91,10 @@ final class StoryItemContentComponent: Component {
|
||||
final class View: StoryContentItem.View {
|
||||
private let imageView: StoryItemImageView
|
||||
private let overlaysView: StoryItemOverlaysView
|
||||
private var videoNode: UniversalVideoNode?
|
||||
private var loadingEffectView: StoryItemLoadingEffectView?
|
||||
private var loadingEffectAppearanceTimer: SwiftSignalKit.Timer?
|
||||
|
||||
private var videoView: StoryVideoView?
|
||||
|
||||
private var mediaAreasEffectView: StoryItemLoadingEffectView?
|
||||
|
||||
private var currentMessageMedia: EngineMedia?
|
||||
@ -399,8 +129,6 @@ final class StoryItemContentComponent: Component {
|
||||
private var fetchPriorityResourceId: String?
|
||||
private var currentFetchPriority: (isMain: Bool, disposable: Disposable)?
|
||||
|
||||
private weak var nextItemView: StoryItemContentComponent.View?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.hierarchyTrackingLayer = HierarchyTrackingLayer()
|
||||
self.imageView = StoryItemImageView()
|
||||
@ -458,7 +186,10 @@ final class StoryItemContentComponent: Component {
|
||||
}
|
||||
|
||||
private func initializeVideoIfReady(update: Bool) {
|
||||
if self.videoView != nil {
|
||||
if self.videoNode != nil {
|
||||
return
|
||||
}
|
||||
if case .pause = self.progressMode {
|
||||
return
|
||||
}
|
||||
|
||||
@ -466,49 +197,48 @@ final class StoryItemContentComponent: Component {
|
||||
return
|
||||
}
|
||||
|
||||
var useLegacyImplementation = true
|
||||
if let data = component.context.currentAppConfiguration.with({ $0 }).data, let value = data["ios_video_legacystoryplayer"] as? Double {
|
||||
useLegacyImplementation = value != 0.0
|
||||
}
|
||||
|
||||
if case .pause = self.progressMode {
|
||||
if useLegacyImplementation {
|
||||
if case let .file(file) = currentMessageMedia, let peerReference = PeerReference(component.peer._asPeer()) {
|
||||
if self.videoNode == nil {
|
||||
let videoNode = UniversalVideoNode(
|
||||
context: component.context,
|
||||
postbox: component.context.account.postbox,
|
||||
audioSession: component.context.sharedContext.mediaManager.audioSession,
|
||||
manager: component.context.sharedContext.mediaManager.universalVideoManager,
|
||||
decoration: StoryVideoDecoration(),
|
||||
content: NativeVideoContent(
|
||||
id: .contextResult(0, "\(UInt64.random(in: 0 ... UInt64.max))"),
|
||||
userLocation: .peer(peerReference.id),
|
||||
fileReference: .story(peer: peerReference, id: component.item.id, media: file),
|
||||
imageReference: nil,
|
||||
streamVideo: .story,
|
||||
loopVideo: true,
|
||||
enableSound: true,
|
||||
soundMuted: component.audioMode == .off,
|
||||
beginWithAmbientSound: component.audioMode == .ambient,
|
||||
mixWithOthers: true,
|
||||
useLargeThumbnail: false,
|
||||
autoFetchFullSizeThumbnail: false,
|
||||
tempFilePath: nil,
|
||||
captureProtected: component.item.isForwardingDisabled,
|
||||
hintDimensions: file.dimensions?.cgSize,
|
||||
storeAfterDownload: nil,
|
||||
displayImage: false,
|
||||
hasSentFramesToDisplay: { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.videoNode?.isHidden = false
|
||||
}
|
||||
|
||||
if case let .file(file) = currentMessageMedia, let peerReference = PeerReference(component.peer._asPeer()) {
|
||||
if self.videoView == nil {
|
||||
let videoView: StoryVideoView
|
||||
if useLegacyImplementation {
|
||||
videoView = LegacyStoryVideoView(
|
||||
context: component.context,
|
||||
file: .story(peer: peerReference, id: component.item.id, media: file),
|
||||
audioMode: component.audioMode,
|
||||
baseRate: component.baseRate,
|
||||
isCaptureProtected: component.item.isForwardingDisabled
|
||||
),
|
||||
priority: .gallery
|
||||
)
|
||||
} else {
|
||||
let audioContext: ChunkMediaPlayerV2.AudioContext
|
||||
if let current = self.environment?.sharedState.audioContext {
|
||||
audioContext = current
|
||||
} else {
|
||||
audioContext = ChunkMediaPlayerV2.AudioContext(audioSessionManager: component.context.sharedContext.mediaManager.audioSession)
|
||||
self.environment?.sharedState.audioContext = audioContext
|
||||
}
|
||||
videoView = ModernStoryVideoView(
|
||||
context: component.context,
|
||||
audioContext: audioContext,
|
||||
file: .story(peer: peerReference, id: component.item.id, media: file),
|
||||
audioMode: component.audioMode,
|
||||
baseRate: component.baseRate,
|
||||
isCaptureProtected: component.item.isForwardingDisabled
|
||||
)
|
||||
}
|
||||
self.videoView = videoView
|
||||
self.insertSubview(videoView, aboveSubview: self.imageView)
|
||||
videoNode.isHidden = true
|
||||
videoNode.setBaseRate(component.baseRate)
|
||||
|
||||
videoView.playbackCompleted = { [weak self] in
|
||||
self.videoNode = videoNode
|
||||
self.insertSubview(videoNode.view, aboveSubview: self.imageView)
|
||||
|
||||
videoNode.playbackCompleted = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
@ -523,24 +253,38 @@ final class StoryItemContentComponent: Component {
|
||||
if shouldLoop {
|
||||
self.rewind()
|
||||
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
if self.contentLoaded {
|
||||
videoView.play()
|
||||
videoNode.play()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.environment?.presentationProgressUpdated(1.0, false, true)
|
||||
}
|
||||
}
|
||||
videoNode.ownsContentNodeUpdated = { [weak self] value in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
if value {
|
||||
self.videoNode?.seek(0.0)
|
||||
if component.audioMode != .off {
|
||||
self.videoNode?.playOnceWithSound(playAndRecord: false, actionAtEnd: .stop)
|
||||
} else {
|
||||
self.videoNode?.play()
|
||||
}
|
||||
}
|
||||
}
|
||||
videoNode.canAttachContent = true
|
||||
if update {
|
||||
self.state?.updated(transition: .immediate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
if self.videoProgressDisposable == nil {
|
||||
self.videoProgressDisposable = (videoView.status
|
||||
self.videoProgressDisposable = (videoNode.status
|
||||
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||
guard let self, let status else {
|
||||
return
|
||||
@ -552,19 +296,9 @@ final class StoryItemContentComponent: Component {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
let canPlay = self.progressMode != .pause && self.contentLoaded && self.hierarchyTrackingLayer.isInHierarchy
|
||||
|
||||
if canPlay {
|
||||
videoView.play()
|
||||
} else {
|
||||
videoView.pause()
|
||||
}
|
||||
}
|
||||
|
||||
self.updateVideoNextItem()
|
||||
}
|
||||
|
||||
override func setProgressMode(_ progressMode: StoryContentItem.ProgressMode) {
|
||||
if self.progressMode != progressMode {
|
||||
self.progressMode = progressMode
|
||||
@ -576,62 +310,48 @@ final class StoryItemContentComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
func setNextItemView(nextItemView: StoryItemContentComponent.View?) {
|
||||
if self.nextItemView !== nextItemView {
|
||||
self.nextItemView = nextItemView
|
||||
self.updateVideoNextItem()
|
||||
}
|
||||
}
|
||||
|
||||
private func updateVideoNextItem() {
|
||||
if let videoView = self.videoView as? ModernStoryVideoView {
|
||||
let nextVideoView = self.nextItemView?.videoView as? ModernStoryVideoView
|
||||
videoView.updateNext(nextVideoView: nextVideoView)
|
||||
}
|
||||
}
|
||||
|
||||
override func rewind() {
|
||||
self.currentProgressTimerValue = 0.0
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
if self.contentLoaded {
|
||||
videoView.seek(timestamp: 0.0)
|
||||
videoNode.seek(0.0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override func leaveAmbientMode() {
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
self.ignoreBufferingTimestamp = CFAbsoluteTimeGetCurrent()
|
||||
videoView.setSoundMuted(soundMuted: false)
|
||||
videoView.continueWithOverridingAmbientMode(isAmbient: false)
|
||||
videoNode.setSoundMuted(soundMuted: false)
|
||||
videoNode.continueWithOverridingAmbientMode(isAmbient: false)
|
||||
}
|
||||
}
|
||||
|
||||
override func enterAmbientMode(ambient: Bool) {
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
self.ignoreBufferingTimestamp = CFAbsoluteTimeGetCurrent()
|
||||
if ambient {
|
||||
videoView.continueWithOverridingAmbientMode(isAmbient: true)
|
||||
videoNode.continueWithOverridingAmbientMode(isAmbient: true)
|
||||
} else {
|
||||
videoView.setSoundMuted(soundMuted: true)
|
||||
videoNode.setSoundMuted(soundMuted: true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override func setBaseRate(_ baseRate: Double) {
|
||||
if let videoView = self.videoView {
|
||||
videoView.setBaseRate(baseRate: baseRate)
|
||||
if let videoNode = self.videoNode {
|
||||
videoNode.setBaseRate(baseRate)
|
||||
}
|
||||
}
|
||||
|
||||
private func updateProgressMode(update: Bool) {
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
let canPlay = self.progressMode != .pause && self.contentLoaded && self.hierarchyTrackingLayer.isInHierarchy
|
||||
|
||||
if canPlay {
|
||||
videoView.play()
|
||||
videoNode.play()
|
||||
} else {
|
||||
videoView.pause()
|
||||
videoNode.pause()
|
||||
}
|
||||
}
|
||||
|
||||
@ -846,11 +566,11 @@ final class StoryItemContentComponent: Component {
|
||||
|
||||
private var isSeeking = false
|
||||
func seekTo(_ timestamp: Double, apply: Bool) {
|
||||
guard let videoView = self.videoView else {
|
||||
guard let videoNode = self.videoNode else {
|
||||
return
|
||||
}
|
||||
if apply {
|
||||
videoView.seek(timestamp: min(timestamp, self.effectiveDuration - 0.3))
|
||||
videoNode.seek(min(timestamp, self.effectiveDuration - 0.3))
|
||||
}
|
||||
self.isSeeking = true
|
||||
self.updateVideoPlaybackProgress(timestamp)
|
||||
@ -868,10 +588,6 @@ final class StoryItemContentComponent: Component {
|
||||
let environment = environment[StoryContentItem.Environment.self].value
|
||||
self.environment = environment
|
||||
|
||||
if let videoView = self.videoView {
|
||||
videoView.audioMode = component.audioMode
|
||||
}
|
||||
|
||||
var synchronousLoad = false
|
||||
if let hint = transition.userData(Hint.self) {
|
||||
synchronousLoad = hint.synchronousLoad
|
||||
@ -916,12 +632,12 @@ final class StoryItemContentComponent: Component {
|
||||
self.currentMessageMedia = messageMedia
|
||||
reloadMedia = true
|
||||
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
self.videoProgressDisposable?.dispose()
|
||||
self.videoProgressDisposable = nil
|
||||
|
||||
self.videoView = nil
|
||||
videoView.removeFromSuperview()
|
||||
self.videoNode = nil
|
||||
videoNode.view.removeFromSuperview()
|
||||
}
|
||||
}
|
||||
self.currentMessageMetadataMedia = component.item.media
|
||||
@ -1051,10 +767,10 @@ final class StoryItemContentComponent: Component {
|
||||
}
|
||||
let _ = imageSize
|
||||
|
||||
if let videoView = self.videoView {
|
||||
if let videoNode = self.videoNode {
|
||||
let videoSize = dimensions.aspectFilled(availableSize)
|
||||
videoView.frame = CGRect(origin: CGPoint(x: floor((availableSize.width - videoSize.width) * 0.5), y: floor((availableSize.height - videoSize.height) * 0.5)), size: videoSize)
|
||||
videoView.update(size: videoSize, transition: .immediate)
|
||||
videoNode.frame = CGRect(origin: CGPoint(x: floor((availableSize.width - videoSize.width) * 0.5), y: floor((availableSize.height - videoSize.height) * 0.5)), size: videoSize)
|
||||
videoNode.updateLayout(size: videoSize, transition: .immediate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1478,7 +1478,7 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
|
||||
if itemLayout.contentScaleFraction <= 0.0001 && !self.preparingToDisplayViewList {
|
||||
if index != centralIndex && index != centralIndex + 1 {
|
||||
if index != centralIndex {
|
||||
itemVisible = false
|
||||
}
|
||||
}
|
||||
@ -1870,19 +1870,6 @@ public final class StoryItemSetContainerComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
for i in 0 ..< component.slice.allItems.count {
|
||||
guard let visibleItem = self.visibleItems[component.slice.allItems[i].id] else {
|
||||
continue
|
||||
}
|
||||
var nextVisibleItem: VisibleItem?
|
||||
if i != component.slice.allItems.count - 1 {
|
||||
nextVisibleItem = self.visibleItems[component.slice.allItems[i + 1].id]
|
||||
}
|
||||
if let itemView = visibleItem.view.view as? StoryItemContentComponent.View {
|
||||
itemView.setNextItemView(nextItemView: nextVisibleItem?.view.view as? StoryItemContentComponent.View)
|
||||
}
|
||||
}
|
||||
|
||||
self.trulyValidIds = trulyValidIds
|
||||
|
||||
var removeIds: [StoryId] = []
|
||||
|
@ -4131,7 +4131,8 @@ extension ChatControllerImpl {
|
||||
strongSelf.present(UndoOverlayController(presentationData: strongSelf.presentationData, content: .info(title: nil, text: strongSelf.presentationData.strings.Conversation_GigagroupDescription, timeout: nil, customUndoText: nil), elevatedLayout: false, action: { _ in return true }), in: .current)
|
||||
}
|
||||
}, openSuggestPost: { [weak self] in
|
||||
guard let self else {
|
||||
let _ = self
|
||||
/*guard let self else {
|
||||
return
|
||||
}
|
||||
guard let peerId = self.chatLocation.peerId else {
|
||||
@ -4152,7 +4153,7 @@ extension ChatControllerImpl {
|
||||
)
|
||||
chatController.navigationPresentation = .modal
|
||||
|
||||
self.push(chatController)
|
||||
self.push(chatController)*/
|
||||
}, editMessageMedia: { [weak self] messageId, draw in
|
||||
if let strongSelf = self {
|
||||
strongSelf.controllerInteraction?.editMessageMedia(messageId, draw)
|
||||
|
@ -1093,7 +1093,7 @@ final class HLSVideoJSNativeContentNode: ASDisplayNode, UniversalVideoContentNod
|
||||
var onSeeked: (() -> Void)?
|
||||
self.player = ChunkMediaPlayerV2(
|
||||
params: ChunkMediaPlayerV2.MediaDataReaderParams(context: context),
|
||||
audioContext: ChunkMediaPlayerV2.AudioContext(audioSessionManager: audioSessionManager),
|
||||
audioSessionManager: audioSessionManager,
|
||||
source: .externalParts(self.chunkPlayerPartsState.get()),
|
||||
video: true,
|
||||
enableSound: self.enableSound,
|
||||
|
@ -520,7 +520,7 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
|
||||
} else {
|
||||
let mediaPlayer = ChunkMediaPlayerV2(
|
||||
params: ChunkMediaPlayerV2.MediaDataReaderParams(context: context),
|
||||
audioContext: ChunkMediaPlayerV2.AudioContext(audioSessionManager: audioSessionManager),
|
||||
audioSessionManager: audioSessionManager,
|
||||
source: .directFetch(ChunkMediaPlayerV2.SourceDescription.ResourceDescription(
|
||||
postbox: postbox,
|
||||
size: selectedFile.size ?? 0,
|
||||
|
@ -1631,7 +1631,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
static dispatch_once_t onceToken;
|
||||
dispatch_once(&onceToken, ^{
|
||||
tgcalls::Register<tgcalls::InstanceImpl>();
|
||||
//tgcalls::Register<tgcalls::InstanceV2_4_0_0Impl>();
|
||||
tgcalls::Register<tgcalls::InstanceV2Impl>();
|
||||
tgcalls::Register<tgcalls::InstanceV2ReferenceImpl>();
|
||||
});
|
||||
|
Loading…
x
Reference in New Issue
Block a user