Merge branch 'temp-ppp'

# Conflicts:
#	submodules/TelegramUI/Sources/AccountContext.swift
This commit is contained in:
Isaac 2025-03-14 15:37:17 +01:00
commit 1cd1c1e0b3
11 changed files with 571 additions and 63 deletions

View File

@ -105,3 +105,8 @@ load("@build_bazel_rules_apple//apple:apple.bzl", "provisioning_profile_reposito
provisioning_profile_repository(
name = "local_provisioning_profiles",
)
local_repository(
name = "build_configuration",
path = "build-input/configuration-repository",
)

View File

@ -315,7 +315,6 @@ class BazelCommandLine:
print(subprocess.list2cmdline(combined_arguments))
call_executable(combined_arguments)
def invoke_test(self):
combined_arguments = [
self.build_environment.bazel_path
@ -356,6 +355,44 @@ class BazelCommandLine:
print(subprocess.list2cmdline(combined_arguments))
call_executable(combined_arguments)
def invoke_query(self, query_args):
combined_arguments = [
self.build_environment.bazel_path
]
combined_arguments += self.get_startup_bazel_arguments()
combined_arguments += ['aquery']
if self.configuration_path is None:
raise Exception('configuration_path is not defined')
combined_arguments += [
'--override_repository=build_configuration={}'.format(self.configuration_path)
]
combined_arguments += [
'-c', 'dbg',
'--ios_multi_cpus=sim_arm64',
]
combined_arguments += self.get_define_arguments()
if self.remote_cache is not None:
combined_arguments += [
'--remote_cache={}'.format(self.remote_cache),
'--experimental_remote_downloader={}'.format(self.remote_cache)
]
elif self.cache_dir is not None:
combined_arguments += [
'--disk_cache={path}'.format(path=self.cache_dir)
]
# Add user-provided query arguments
combined_arguments += query_args
print('TelegramBuild: running')
print(subprocess.list2cmdline(combined_arguments))
call_executable(combined_arguments)
def clean(bazel, arguments):
bazel_command_line = BazelCommandLine(
@ -613,6 +650,36 @@ def test(bazel, arguments):
bazel_command_line.invoke_test()
def query(bazel, arguments):
bazel_command_line = BazelCommandLine(
bazel=bazel,
override_bazel_version=arguments.overrideBazelVersion,
override_xcode_version=arguments.overrideXcodeVersion,
bazel_user_root=arguments.bazelUserRoot
)
if arguments.cacheDir is not None:
bazel_command_line.add_cache_dir(arguments.cacheDir)
elif arguments.cacheHost is not None:
bazel_command_line.add_remote_cache(arguments.cacheHost)
# Resolve configuration if needed
if arguments.configurationPath is not None:
resolve_configuration(
base_path=os.getcwd(),
bazel_command_line=bazel_command_line,
arguments=arguments,
additional_codesigning_output_path=None
)
# Parse the query arguments
query_args = []
if arguments.queryArgs:
query_args = shlex.split(arguments.queryArgs)
bazel_command_line.invoke_query(query_args)
def add_codesigning_common_arguments(current_parser: argparse.ArgumentParser):
configuration_group = current_parser.add_mutually_exclusive_group(required=True)
configuration_group.add_argument(
@ -971,6 +1038,73 @@ if __name__ == '__main__':
help='Path to IPA 2 file.'
)
query_parser = subparsers.add_parser('query', help='Run arbitrary bazel queries')
# Configuration is optional for queries
query_parser.add_argument(
'--configurationPath',
required=False,
help='''
Path to a json containing build configuration.
See build-system/appstore-configuration.json for an example.
''',
metavar='path'
)
# Codesigning arguments are optional for queries
query_parser.add_argument(
'--gitCodesigningRepository',
required=False,
help='''
If specified, certificates and provisioning profiles will be loaded from git.
TELEGRAM_CODESIGNING_GIT_PASSWORD environment variable must be set.
''',
metavar='path'
)
query_parser.add_argument(
'--codesigningInformationPath',
required=False,
help='''
Use signing certificates and provisioning profiles from a local directory.
''',
metavar='command'
)
query_parser.add_argument(
'--xcodeManagedCodesigning',
action='store_true',
help='''
Let Xcode manage your certificates and provisioning profiles.
''',
)
query_parser.add_argument(
'--gitCodesigningType',
choices=[
'development',
'adhoc',
'appstore',
'enterprise'
],
required=False,
help='''
The name of the folder to use inside "profiles" folder in the git repository.
Required if gitCodesigningRepository is specified.
''',
metavar='type'
)
query_parser.add_argument(
'--gitCodesigningUseCurrent',
action='store_true',
required=False,
default=False,
help='''
Always refresh codesigning repository.
'''
)
query_parser.add_argument(
'--queryArgs',
required=True,
help='The query command and arguments to pass to bazel.',
metavar='query_string'
)
if len(sys.argv) < 2:
parser.print_help()
sys.exit(1)
@ -1077,6 +1211,8 @@ if __name__ == '__main__':
)
elif args.commandName == 'test':
test(bazel=bazel_path, arguments=args)
elif args.commandName == 'query':
query(bazel=bazel_path, arguments=args)
else:
raise Exception('Unknown command')
except KeyboardInterrupt:

View File

@ -98,7 +98,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
case browserExperiment(Bool)
case localTranscription(Bool)
case enableReactionOverrides(Bool)
case storiesExperiment(Bool)
case compressedEmojiCache(Bool)
case storiesJpegExperiment(Bool)
case conferenceDebug(Bool)
case enableQuickReactionSwitch(Bool)
@ -133,7 +133,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return DebugControllerSection.web.rawValue
case .keepChatNavigationStack, .skipReadHistory, .dustEffect, .crashOnSlowQueries, .crashOnMemoryPressure:
return DebugControllerSection.experiments.rawValue
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .conferenceDebug, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .liveStreamV2, .experimentalCallMute, .playerV2, .devRequests, .fakeAds, .enableLocalTranslation:
case .clearTips, .resetNotifications, .crash, .fillLocalSavedMessageCache, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .resetTagHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .compressedEmojiCache, .storiesJpegExperiment, .conferenceDebug, .enableQuickReactionSwitch, .experimentalCompatibility, .enableDebugDataDisplay, .rippleEffect, .browserExperiment, .localTranscription, .enableReactionOverrides, .restorePurchases, .disableReloginTokens, .liveStreamV2, .experimentalCallMute, .playerV2, .devRequests, .fakeAds, .enableLocalTranslation:
return DebugControllerSection.experiments.rawValue
case .logTranslationRecognition, .resetTranslationStates:
return DebugControllerSection.translation.rawValue
@ -236,7 +236,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return 44
case .resetTranslationStates:
return 45
case .storiesExperiment:
case .compressedEmojiCache:
return 46
case .storiesJpegExperiment:
return 47
@ -1288,12 +1288,12 @@ private enum DebugControllerEntry: ItemListNodeEntry {
})
}).start()
})
case let .storiesExperiment(value):
return ItemListSwitchItem(presentationData: presentationData, title: "Story Search Debug", value: value, sectionId: self.section, style: .blocks, updated: { value in
case let .compressedEmojiCache(value):
return ItemListSwitchItem(presentationData: presentationData, title: "Compressed Emoji Cache", value: value, sectionId: self.section, style: .blocks, updated: { value in
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in
var settings = settings?.get(ExperimentalUISettings.self) ?? ExperimentalUISettings.defaultSettings
settings.storiesExperiment = value
settings.compressedEmojiCache = value
return PreferencesEntry(settings)
})
}).start()
@ -1535,11 +1535,10 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present
entries.append(.logTranslationRecognition(experimentalSettings.logLanguageRecognition))
entries.append(.resetTranslationStates)
if case .internal = sharedContext.applicationBindings.appBuildType {
entries.append(.storiesExperiment(experimentalSettings.storiesExperiment))
entries.append(.storiesJpegExperiment(experimentalSettings.storiesJpegExperiment))
entries.append(.disableReloginTokens(experimentalSettings.disableReloginTokens))
}
entries.append(.compressedEmojiCache(experimentalSettings.compressedEmojiCache))
entries.append(.storiesJpegExperiment(experimentalSettings.storiesJpegExperiment))
entries.append(.disableReloginTokens(experimentalSettings.disableReloginTokens))
entries.append(.conferenceDebug(experimentalSettings.conferenceDebug))
entries.append(.enableQuickReactionSwitch(!experimentalSettings.disableQuickReaction))
entries.append(.liveStreamV2(experimentalSettings.liveStreamV2))

View File

@ -499,6 +499,7 @@ public final class ReactionContextNode: ASDisplayNode, ASScrollViewDelegate {
self.animationCache = animationCache
self.animationRenderer = MultiAnimationRendererImpl()
(self.animationRenderer as? MultiAnimationRendererImpl)?.useYuvA = context.sharedContext.immediateExperimentalUISettings.compressedEmojiCache
self.backgroundMaskNode = ASDisplayNode()
self.backgroundNode = ReactionContextBackgroundNode(largeCircleSize: largeCircleSize, smallCircleSize: smallCircleSize, maskNode: self.backgroundMaskNode)

View File

@ -65,8 +65,6 @@ void combineYUVAPlanesIntoARGB(uint8_t *argb, uint8_t const *inY, uint8_t const
vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_709_2, &pixelRange, &info, kvImage420Yp8_Cb8_Cr8, kvImageARGB8888, 0);
});
vImage_Error error = kvImageNoError;
vImage_Buffer destArgb;
destArgb.data = (void *)argb;
destArgb.width = width;
@ -97,15 +95,8 @@ void combineYUVAPlanesIntoARGB(uint8_t *argb, uint8_t const *inY, uint8_t const
srcA.height = height;
srcA.rowBytes = width;
error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp, &srcCb, &srcCr, &destArgb, &info, permuteMap, 255, kvImageDoNotTile);
error = vImageOverwriteChannels_ARGB8888(&srcA, &destArgb, &destArgb, 1 << 0, kvImageDoNotTile);
if (error != kvImageNoError) {
}
//error = vImageOverwriteChannels_ARGB8888(&srcYp, &destArgb, &destArgb, 1 << 1, kvImageDoNotTile);
//error = vImageOverwriteChannels_ARGB8888(&srcYp, &destArgb, &destArgb, 1 << 2, kvImageDoNotTile);
//error = vImageOverwriteChannels_ARGB8888(&srcYp, &destArgb, &destArgb, 1 << 3, kvImageDoNotTile);
vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&srcYp, &srcCb, &srcCr, &destArgb, &info, permuteMap, 255, kvImageDoNotTile);
vImageOverwriteChannels_ARGB8888(&srcA, &destArgb, &destArgb, 1 << 0, kvImageDoNotTile);
}
void scaleImagePlane(uint8_t *outPlane, int outWidth, int outHeight, int outBytesPerRow, uint8_t const *inPlane, int inWidth, int inHeight, int inBytesPerRow) {

View File

@ -417,7 +417,20 @@ public final class InlineStickerItemLayer: MultiAnimationRenderTarget {
}
override public var contents: Any? {
didSet {
get {
return super.contents
} set(value) {
#if targetEnvironment(simulator)
if let value, CFGetTypeID(value as CFTypeRef) == CVPixelBufferGetTypeID() {
let pixelBuffer = value as! CVPixelBuffer
super.contents = CVPixelBufferGetIOSurface(pixelBuffer)
} else {
super.contents = value
}
#else
super.contents = value
#endif
if let mirrorLayer = self.mirrorLayer {
mirrorLayer.contents = self.contents
}

View File

@ -10,6 +10,7 @@ import AccountContext
import TelegramPresentationData
import EmojiTextAttachmentView
import EmojiStatusComponent
import CoreVideo
final class EmojiKeyboardCloneItemLayer: SimpleLayer {
}
@ -79,7 +80,20 @@ public final class EmojiKeyboardItemLayer: MultiAnimationRenderTarget {
}
override public var contents: Any? {
didSet {
get {
return super.contents
} set(value) {
#if targetEnvironment(simulator)
if let value, CFGetTypeID(value as CFTypeRef) == CVPixelBufferGetTypeID() {
let pixelBuffer = value as! CVPixelBuffer
super.contents = CVPixelBufferGetIOSurface(pixelBuffer)
} else {
super.contents = value
}
#else
super.contents = value
#endif
self.onContentsUpdate()
if let cloneLayer = self.cloneLayer {
cloneLayer.contents = self.contents

View File

@ -4,6 +4,7 @@ import SwiftSignalKit
import Display
import AnimationCache
import Accelerate
import IOSurface
public protocol MultiAnimationRenderer: AnyObject {
func add(target: MultiAnimationRenderTarget, cache: AnimationCache, itemId: String, unique: Bool, size: CGSize, fetch: @escaping (AnimationCacheFetchOptions) -> Disposable) -> Disposable
@ -89,12 +90,21 @@ private final class LoadFrameGroupTask {
}
}
private var yuvToRgbConversion: vImage_YpCbCrToARGB = {
var info = vImage_YpCbCrToARGB()
var pixelRange = vImage_YpCbCrPixelRange(Yp_bias: 16, CbCr_bias: 128, YpRangeMax: 235, CbCrRangeMax: 240, YpMax: 255, YpMin: 0, CbCrMax: 255, CbCrMin: 0)
vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_709_2, &pixelRange, &info, kvImage420Yp8_Cb8_Cr8, kvImageARGB8888, 0)
return info
}()
private final class ItemAnimationContext {
fileprivate final class Frame {
let frame: AnimationCacheItemFrame
let duration: Double
let image: UIImage
let badgeImage: UIImage?
let contentsAsImage: UIImage?
let contentsAsCVPixelBuffer: CVPixelBuffer?
let size: CGSize
var remainingDuration: Double
@ -120,11 +130,101 @@ private final class ItemAnimationContext {
return nil
}
self.image = image
self.contentsAsImage = image
self.contentsAsCVPixelBuffer = nil
self.size = CGSize(width: CGFloat(width), height: CGFloat(height))
self.badgeImage = nil
default:
return nil
case let .yuva(y, u, v, a):
var pixelBuffer: CVPixelBuffer? = nil
let _ = CVPixelBufferCreate(kCFAllocatorDefault, y.width, y.height, kCVPixelFormatType_420YpCbCr8VideoRange_8A_TriPlanar, [
kCVPixelBufferIOSurfacePropertiesKey: NSDictionary()
] as CFDictionary, &pixelBuffer)
guard let pixelBuffer else {
return nil
}
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
defer {
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
}
guard let baseAddressY = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0) else {
return nil
}
guard let baseAddressCbCr = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1) else {
return nil
}
guard let baseAddressA = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 2) else {
return nil
}
let dstBufferY = vImage_Buffer(data: UnsafeMutableRawPointer(mutating: baseAddressY), height: vImagePixelCount(y.height), width: vImagePixelCount(y.width), rowBytes: CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0))
let dstBufferCbCr = vImage_Buffer(data: UnsafeMutableRawPointer(mutating: baseAddressCbCr), height: vImagePixelCount(y.height / 2), width: vImagePixelCount(y.width / 2), rowBytes: CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1))
let dstBufferA = vImage_Buffer(data: UnsafeMutableRawPointer(mutating: baseAddressA), height: vImagePixelCount(y.height), width: vImagePixelCount(y.width), rowBytes: CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 2))
y.data.withUnsafeBytes { (yBytes: UnsafeRawBufferPointer) -> Void in
if dstBufferY.rowBytes == y.bytesPerRow {
memcpy(dstBufferY.data, yBytes.baseAddress!, yBytes.count)
} else {
for i in 0 ..< y.height {
memcpy(dstBufferY.data.advanced(by: dstBufferY.rowBytes * i), yBytes.baseAddress!.advanced(by: y.bytesPerRow * i), y.bytesPerRow)
}
}
}
a.data.withUnsafeBytes { (aBytes: UnsafeRawBufferPointer) -> Void in
if dstBufferA.rowBytes == a.bytesPerRow {
memcpy(dstBufferA.data, aBytes.baseAddress!, aBytes.count)
} else {
for i in 0 ..< y.height {
memcpy(dstBufferA.data.advanced(by: dstBufferA.rowBytes * i), aBytes.baseAddress!.advanced(by: a.bytesPerRow * i), a.bytesPerRow)
}
}
}
u.data.withUnsafeBytes { (uBytes: UnsafeRawBufferPointer) -> Void in
v.data.withUnsafeBytes { (vBytes: UnsafeRawBufferPointer) -> Void in
let sourceU = vImage_Buffer(
data: UnsafeMutableRawPointer(mutating: uBytes.baseAddress!),
height: vImagePixelCount(u.height),
width: vImagePixelCount(u.width),
rowBytes: u.bytesPerRow
)
let sourceV = vImage_Buffer(
data: UnsafeMutableRawPointer(mutating: vBytes.baseAddress!),
height: vImagePixelCount(v.height),
width: vImagePixelCount(v.width),
rowBytes: v.bytesPerRow
)
withUnsafePointer(to: sourceU, { sourceU in
withUnsafePointer(to: sourceV, { sourceV in
var srcPlanarBuffers: [
UnsafePointer<vImage_Buffer>?
] = [sourceU, sourceV]
var destChannels: [UnsafeMutableRawPointer?] = [
dstBufferCbCr.data.advanced(by: 1),
dstBufferCbCr.data
]
let channelCount = 2
vImageConvert_PlanarToChunky8(
&srcPlanarBuffers,
&destChannels,
UInt32(channelCount),
MemoryLayout<Pixel_8>.stride * channelCount,
vImagePixelCount(u.width),
vImagePixelCount(u.height),
dstBufferCbCr.rowBytes,
vImage_Flags(kvImageDoNotTile)
)
})
})
}
}
self.contentsAsImage = nil
self.contentsAsCVPixelBuffer = pixelBuffer
self.size = CGSize(width: CGFloat(y.width), height: CGFloat(y.height))
}
}
@ -221,8 +321,163 @@ private final class ItemAnimationContext {
self.blurredRepresentationValue = context.generateImage()
return self.blurredRepresentationValue
default:
return nil
case let .yuva(y, u, v, a):
let blurredWidth = 12
let blurredHeight = 12
let size = CGSize(width: blurredWidth, height: blurredHeight)
var sourceY = vImage_Buffer(
data: UnsafeMutableRawPointer(mutating: y.data.withUnsafeBytes { $0.baseAddress! }),
height: vImagePixelCount(y.height),
width: vImagePixelCount(y.width),
rowBytes: y.bytesPerRow
)
var sourceU = vImage_Buffer(
data: UnsafeMutableRawPointer(mutating: u.data.withUnsafeBytes { $0.baseAddress! }),
height: vImagePixelCount(u.height),
width: vImagePixelCount(u.width),
rowBytes: u.bytesPerRow
)
var sourceV = vImage_Buffer(
data: UnsafeMutableRawPointer(mutating: v.data.withUnsafeBytes { $0.baseAddress! }),
height: vImagePixelCount(v.height),
width: vImagePixelCount(v.width),
rowBytes: v.bytesPerRow
)
var sourceA = vImage_Buffer(
data: UnsafeMutableRawPointer(mutating: a.data.withUnsafeBytes { $0.baseAddress! }),
height: vImagePixelCount(a.height),
width: vImagePixelCount(a.width),
rowBytes: a.bytesPerRow
)
let scaledYData = malloc(blurredWidth * blurredHeight)!
defer {
free(scaledYData)
}
let scaledUData = malloc(blurredWidth * blurredHeight / 4)!
defer {
free(scaledUData)
}
let scaledVData = malloc(blurredWidth * blurredHeight / 4)!
defer {
free(scaledVData)
}
let scaledAData = malloc(blurredWidth * blurredHeight)!
defer {
free(scaledAData)
}
var scaledY = vImage_Buffer(
data: scaledYData,
height: vImagePixelCount(blurredHeight),
width: vImagePixelCount(blurredWidth),
rowBytes: blurredWidth
)
var scaledU = vImage_Buffer(
data: scaledUData,
height: vImagePixelCount(blurredHeight / 2),
width: vImagePixelCount(blurredWidth / 2),
rowBytes: blurredWidth / 2
)
var scaledV = vImage_Buffer(
data: scaledVData,
height: vImagePixelCount(blurredHeight / 2),
width: vImagePixelCount(blurredWidth / 2),
rowBytes: blurredWidth / 2
)
var scaledA = vImage_Buffer(
data: scaledAData,
height: vImagePixelCount(blurredHeight),
width: vImagePixelCount(blurredWidth),
rowBytes: blurredWidth
)
vImageScale_Planar8(&sourceY, &scaledY, nil, vImage_Flags(kvImageHighQualityResampling))
vImageScale_Planar8(&sourceU, &scaledU, nil, vImage_Flags(kvImageHighQualityResampling))
vImageScale_Planar8(&sourceV, &scaledV, nil, vImage_Flags(kvImageHighQualityResampling))
vImageScale_Planar8(&sourceA, &scaledA, nil, vImage_Flags(kvImageHighQualityResampling))
guard let context = DrawingContext(size: size, scale: 1.0, clear: true) else {
return nil
}
var destinationBuffer = vImage_Buffer(
data: context.bytes,
height: vImagePixelCount(blurredHeight),
width: vImagePixelCount(blurredWidth),
rowBytes: context.bytesPerRow
)
var result = kvImageNoError
var permuteMap: [UInt8] = [1, 2, 3, 0]
result = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&scaledY, &scaledU, &scaledV, &destinationBuffer, &yuvToRgbConversion, &permuteMap, 255, vImage_Flags(kvImageDoNotTile))
if result != kvImageNoError {
return nil
}
result = vImageOverwriteChannels_ARGB8888(&scaledA, &destinationBuffer, &destinationBuffer, 1 << 0, vImage_Flags(kvImageDoNotTile));
if result != kvImageNoError {
return nil
}
vImageBoxConvolve_ARGB8888(&destinationBuffer,
&destinationBuffer,
nil,
0, 0,
UInt32(15),
UInt32(15),
nil,
vImage_Flags(kvImageTruncateKernel))
let divisor: Int32 = 0x1000
let rwgt: CGFloat = 0.3086
let gwgt: CGFloat = 0.6094
let bwgt: CGFloat = 0.0820
let adjustSaturation: CGFloat = 1.7
let a = (1.0 - adjustSaturation) * rwgt + adjustSaturation
let b = (1.0 - adjustSaturation) * rwgt
let c = (1.0 - adjustSaturation) * rwgt
let d = (1.0 - adjustSaturation) * gwgt
let e = (1.0 - adjustSaturation) * gwgt + adjustSaturation
let f = (1.0 - adjustSaturation) * gwgt
let g = (1.0 - adjustSaturation) * bwgt
let h = (1.0 - adjustSaturation) * bwgt
let i = (1.0 - adjustSaturation) * bwgt + adjustSaturation
let satMatrix: [CGFloat] = [
a, b, c, 0,
d, e, f, 0,
g, h, i, 0,
0, 0, 0, 1
]
var matrix: [Int16] = satMatrix.map { value in
return Int16(value * CGFloat(divisor))
}
vImageMatrixMultiply_ARGB8888(&destinationBuffer, &destinationBuffer, &matrix, divisor, nil, nil, vImage_Flags(kvImageDoNotTile))
context.withFlippedContext { c in
c.setFillColor((color ?? .white).withMultipliedAlpha(0.6).cgColor)
c.fill(CGRect(origin: CGPoint(), size: size))
}
self.blurredRepresentationValue = context.generateImage()
return self.blurredRepresentationValue
}
}
}
@ -230,6 +485,8 @@ private final class ItemAnimationContext {
static let queue0 = Queue(name: "ItemAnimationContext-0", qos: .default)
static let queue1 = Queue(name: "ItemAnimationContext-1", qos: .default)
private let useYuvA: Bool
private let cache: AnimationCache
let queueAffinity: Int
private let stateUpdated: () -> Void
@ -253,9 +510,10 @@ private final class ItemAnimationContext {
let targets = Bag<Weak<MultiAnimationRenderTarget>>()
init(cache: AnimationCache, queueAffinity: Int, itemId: String, size: CGSize, fetch: @escaping (AnimationCacheFetchOptions) -> Disposable, stateUpdated: @escaping () -> Void) {
init(cache: AnimationCache, queueAffinity: Int, itemId: String, size: CGSize, useYuvA: Bool, fetch: @escaping (AnimationCacheFetchOptions) -> Disposable, stateUpdated: @escaping () -> Void) {
self.cache = cache
self.queueAffinity = queueAffinity
self.useYuvA = useYuvA
self.stateUpdated = stateUpdated
self.disposable = cache.get(sourceId: itemId, size: size, fetch: fetch).start(next: { [weak self] result in
@ -300,7 +558,11 @@ private final class ItemAnimationContext {
for target in self.targets.copyItems() {
if let target = target.value {
target.transitionToContents(currentFrame.image.cgImage!, didLoop: false)
if let image = currentFrame.contentsAsImage {
target.transitionToContents(image.cgImage!, didLoop: false)
} else if let pixelBuffer = currentFrame.contentsAsCVPixelBuffer {
target.transitionToContents(pixelBuffer, didLoop: false)
}
if let blurredRepresentationTarget = target.blurredRepresentationTarget {
blurredRepresentationTarget.contents = currentFrame.blurredRepresentation(color: target.blurredRepresentationBackgroundColor)?.cgImage
@ -321,9 +583,15 @@ private final class ItemAnimationContext {
func updateAddedTarget(target: MultiAnimationRenderTarget) {
if let currentFrame = self.currentFrame {
if let cgImage = currentFrame.image.cgImage {
if let cgImage = currentFrame.contentsAsImage?.cgImage {
target.transitionToContents(cgImage, didLoop: false)
if let blurredRepresentationTarget = target.blurredRepresentationTarget {
blurredRepresentationTarget.contents = currentFrame.blurredRepresentation(color: target.blurredRepresentationBackgroundColor)?.cgImage
}
} else if let pixelBuffer = currentFrame.contentsAsCVPixelBuffer {
target.transitionToContents(pixelBuffer, didLoop: false)
if let blurredRepresentationTarget = target.blurredRepresentationTarget {
blurredRepresentationTarget.contents = currentFrame.blurredRepresentation(color: target.blurredRepresentationBackgroundColor)?.cgImage
}
@ -388,12 +656,20 @@ private final class ItemAnimationContext {
self.nextLoadingFrameTaskId += 1
self.loadingFrameTaskId = taskId
let useYuvA = self.useYuvA
return LoadFrameGroupTask(task: { [weak self] in
let currentFrame: (frame: Frame, didLoop: Bool)?
do {
if let (frame, didLoop) = try item.tryWith({ item -> (AnimationCacheItemFrame, Bool)? in
if let result = item.advance(advance: frameAdvance, requestedFormat: .rgba) {
let defaultFormat: AnimationCacheItemFrame.RequestedFormat
if useYuvA {
defaultFormat = .yuva(rowAlignment: 1)
} else {
defaultFormat = .rgba
}
if let result = item.advance(advance: frameAdvance, requestedFormat: defaultFormat) {
return (result.frame, result.didLoop)
} else {
return nil
@ -423,7 +699,11 @@ private final class ItemAnimationContext {
strongSelf.currentFrame = currentFrame.frame
for target in strongSelf.targets.copyItems() {
if let target = target.value {
target.transitionToContents(currentFrame.frame.image.cgImage!, didLoop: currentFrame.didLoop)
if let image = currentFrame.frame.contentsAsImage {
target.transitionToContents(image.cgImage!, didLoop: currentFrame.didLoop)
} else if let pixelBuffer = currentFrame.frame.contentsAsCVPixelBuffer {
target.transitionToContents(pixelBuffer, didLoop: currentFrame.didLoop)
}
if let blurredRepresentationTarget = target.blurredRepresentationTarget {
blurredRepresentationTarget.contents = currentFrame.frame.blurredRepresentation(color: target.blurredRepresentationBackgroundColor)?.cgImage
@ -476,7 +756,7 @@ public final class MultiAnimationRendererImpl: MultiAnimationRenderer {
self.stateUpdated = stateUpdated
}
func add(target: MultiAnimationRenderTarget, cache: AnimationCache, itemId: String, unique: Bool, size: CGSize, fetch: @escaping (AnimationCacheFetchOptions) -> Disposable) -> Disposable {
func add(target: MultiAnimationRenderTarget, cache: AnimationCache, itemId: String, unique: Bool, size: CGSize, useYuvA: Bool, fetch: @escaping (AnimationCacheFetchOptions) -> Disposable) -> Disposable {
var uniqueId = 0
if unique {
uniqueId = self.nextUniqueId
@ -490,7 +770,7 @@ public final class MultiAnimationRendererImpl: MultiAnimationRenderer {
} else {
let queueAffinity = self.nextQueueAffinity
self.nextQueueAffinity += 1
itemContext = ItemAnimationContext(cache: cache, queueAffinity: queueAffinity, itemId: itemId, size: size, fetch: fetch, stateUpdated: { [weak self] in
itemContext = ItemAnimationContext(cache: cache, queueAffinity: queueAffinity, itemId: itemId, size: size, useYuvA: useYuvA, fetch: fetch, stateUpdated: { [weak self] in
guard let strongSelf = self else {
return
}
@ -545,7 +825,11 @@ public final class MultiAnimationRendererImpl: MultiAnimationRenderer {
return false
}
target.contents = loadedFrame.image.cgImage
if let image = loadedFrame.contentsAsImage {
target.contents = image.cgImage
} else if let pixelBuffer = loadedFrame.contentsAsCVPixelBuffer {
target.contents = pixelBuffer
}
target.numFrames = item.numFrames
if let blurredRepresentationTarget = target.blurredRepresentationTarget {
@ -584,12 +868,18 @@ public final class MultiAnimationRendererImpl: MultiAnimationRenderer {
}
target.numFrames = item.numFrames
if let loadedFrame = loadedFrame {
if let cgImage = loadedFrame.image.cgImage {
if let cgImage = loadedFrame.contentsAsImage?.cgImage {
if hadIntermediateUpdate {
target.transitionToContents(cgImage, didLoop: false)
} else {
target.contents = cgImage
}
} else if let pixelBuffer = loadedFrame.contentsAsCVPixelBuffer {
if hadIntermediateUpdate {
target.transitionToContents(pixelBuffer, didLoop: false)
} else {
target.contents = pixelBuffer
}
}
if let blurredRepresentationTarget = target.blurredRepresentationTarget {
@ -622,8 +912,10 @@ public final class MultiAnimationRendererImpl: MultiAnimationRenderer {
Queue.mainQueue().async {
if let loadedFrame = loadedFrame {
if let cgImage = loadedFrame.image.cgImage {
if let cgImage = loadedFrame.contentsAsImage?.cgImage {
completion(cgImage)
} else {
completion(nil)
}
} else {
completion(nil)
@ -666,6 +958,7 @@ public final class MultiAnimationRendererImpl: MultiAnimationRenderer {
public static let firstFrameQueue = Queue(name: "MultiAnimationRenderer-FirstFrame", qos: .userInteractive)
public var useYuvA: Bool = false
private var groupContext: GroupContext?
private var frameSkip: Int
private var displayTimer: Foundation.Timer?
@ -728,7 +1021,7 @@ public final class MultiAnimationRendererImpl: MultiAnimationRenderer {
self.groupContext = groupContext
}
let disposable = groupContext.add(target: target, cache: cache, itemId: itemId, unique: unique, size: size, fetch: fetch)
let disposable = groupContext.add(target: target, cache: cache, itemId: itemId, unique: unique, size: size, useYuvA: self.useYuvA, fetch: fetch)
return ActionDisposable {
disposable.dispose()

View File

@ -323,6 +323,7 @@ public final class AccountContextImpl: AccountContext {
}
})
self.animationRenderer = MultiAnimationRendererImpl()
(self.animationRenderer as? MultiAnimationRendererImpl)?.useYuvA = sharedContext.immediateExperimentalUISettings.compressedEmojiCache
let updatedLimitsConfiguration = account.postbox.preferencesView(keys: [PreferencesKeys.limitsConfiguration])
|> map { preferences -> LimitsConfiguration in
@ -468,6 +469,17 @@ public final class AccountContextImpl: AccountContext {
}
self.isFrozen = isFrozen
})
self.experimentalUISettingsDisposable = (sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.experimentalUISettings])
|> deliverOnMainQueue).start(next: { [weak self] sharedData in
guard let self else {
return
}
guard let settings = sharedData.entries[ApplicationSpecificSharedDataKeys.experimentalUISettings]?.get(ExperimentalUISettings.self) else {
return
}
(self.animationRenderer as? MultiAnimationRendererImpl)?.useYuvA = settings.compressedEmojiCache
})
}
deinit {

View File

@ -38,7 +38,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
public var experimentalCompatibility: Bool
public var enableDebugDataDisplay: Bool
public var rippleEffect: Bool
public var inlineStickers: Bool
public var compressedEmojiCache: Bool
public var localTranscription: Bool
public var enableReactionOverrides: Bool
public var browserExperiment: Bool
@ -81,7 +81,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
experimentalCompatibility: false,
enableDebugDataDisplay: false,
rippleEffect: false,
inlineStickers: false,
compressedEmojiCache: false,
localTranscription: false,
enableReactionOverrides: false,
browserExperiment: false,
@ -125,7 +125,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
experimentalCompatibility: Bool,
enableDebugDataDisplay: Bool,
rippleEffect: Bool,
inlineStickers: Bool,
compressedEmojiCache: Bool,
localTranscription: Bool,
enableReactionOverrides: Bool,
browserExperiment: Bool,
@ -166,7 +166,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
self.experimentalCompatibility = experimentalCompatibility
self.enableDebugDataDisplay = enableDebugDataDisplay
self.rippleEffect = rippleEffect
self.inlineStickers = inlineStickers
self.compressedEmojiCache = compressedEmojiCache
self.localTranscription = localTranscription
self.enableReactionOverrides = enableReactionOverrides
self.browserExperiment = browserExperiment
@ -211,7 +211,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
self.experimentalCompatibility = (try container.decodeIfPresent(Int32.self, forKey: "experimentalCompatibility") ?? 0) != 0
self.enableDebugDataDisplay = (try container.decodeIfPresent(Int32.self, forKey: "enableDebugDataDisplay") ?? 0) != 0
self.rippleEffect = (try container.decodeIfPresent(Int32.self, forKey: "rippleEffect") ?? 0) != 0
self.inlineStickers = (try container.decodeIfPresent(Int32.self, forKey: "inlineStickers") ?? 0) != 0
self.compressedEmojiCache = (try container.decodeIfPresent(Int32.self, forKey: "compressedEmojiCache") ?? 0) != 0
self.localTranscription = (try container.decodeIfPresent(Int32.self, forKey: "localTranscription") ?? 0) != 0
self.enableReactionOverrides = try container.decodeIfPresent(Bool.self, forKey: "enableReactionOverrides") ?? false
self.browserExperiment = try container.decodeIfPresent(Bool.self, forKey: "browserExperiment") ?? false
@ -256,7 +256,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
try container.encode((self.experimentalCompatibility ? 1 : 0) as Int32, forKey: "experimentalCompatibility")
try container.encode((self.enableDebugDataDisplay ? 1 : 0) as Int32, forKey: "enableDebugDataDisplay")
try container.encode((self.rippleEffect ? 1 : 0) as Int32, forKey: "rippleEffect")
try container.encode((self.inlineStickers ? 1 : 0) as Int32, forKey: "inlineStickers")
try container.encode((self.compressedEmojiCache ? 1 : 0) as Int32, forKey: "compressedEmojiCache")
try container.encode((self.localTranscription ? 1 : 0) as Int32, forKey: "localTranscription")
try container.encode(self.enableReactionOverrides, forKey: "enableReactionOverrides")
try container.encode(self.browserExperiment, forKey: "browserExperiment")

View File

@ -1,4 +1,5 @@
#import <TgVoipWebrtc/OngoingCallThreadLocalContext.h>
#include <cstdint>
#import "MediaUtils.h"
@ -6,7 +7,6 @@
#import "InstanceImpl.h"
#import "v2/InstanceV2Impl.h"
#import "v2/InstanceV2ReferenceImpl.h"
//#import "v2_4_0_0/InstanceV2_4_0_0Impl.h"
#include "StaticThreads.h"
#import "VideoCaptureInterface.h"
@ -507,8 +507,8 @@ public:
) override {
_mutex.Lock();
if (!_audioTransports.empty()) {
for (size_t i = _audioTransports.size() - 1; i < _audioTransports.size(); i++) {
_audioTransports[_audioTransports.size() - 1]->RecordedDataIsAvailable(
for (size_t i = 0; i < _audioTransports.size(); i++) {
_audioTransports[i]->RecordedDataIsAvailable(
audioSamples,
nSamples,
nBytesPerSample,
@ -542,16 +542,59 @@ public:
int32_t result = 0;
if (!_audioTransports.empty()) {
result = _audioTransports[_audioTransports.size() - 1]->NeedMorePlayData(
nSamples,
nBytesPerSample,
nChannels,
samplesPerSec,
audioSamples,
nSamplesOut,
elapsed_time_ms,
ntp_time_ms
);
if (_audioTransports.size() > 1) {
size_t totalNumSamples = nSamples * nBytesPerSample * nChannels;
if (_mixAudioSamples.size() < totalNumSamples) {
_mixAudioSamples.resize(totalNumSamples);
}
memset(audioSamples, 0, totalNumSamples);
int16_t *resultAudioSamples = (int16_t *)audioSamples;
for (size_t i = 0; i < _audioTransports.size(); i++) {
int64_t localElapsedTimeMs = 0;
int64_t localNtpTimeMs = 0;
size_t localNSamplesOut = 0;
_audioTransports[i]->NeedMorePlayData(
nSamples,
nBytesPerSample,
nChannels,
samplesPerSec,
_mixAudioSamples.data(),
localNSamplesOut,
&localElapsedTimeMs,
&localNtpTimeMs
);
for (size_t j = 0; j < localNSamplesOut; j++) {
int32_t mixedSample = (int32_t)resultAudioSamples[j] + (int32_t)_mixAudioSamples[j];
resultAudioSamples[j] = (int16_t)std::clamp(mixedSample, INT16_MIN, INT16_MAX);
}
if (i == _audioTransports.size() - 1) {
nSamplesOut = localNSamplesOut;
if (elapsed_time_ms) {
*elapsed_time_ms = localElapsedTimeMs;
}
if (ntp_time_ms) {
*ntp_time_ms = localNtpTimeMs;
}
}
}
nSamplesOut = nSamples;
} else {
result = _audioTransports[_audioTransports.size() - 1]->NeedMorePlayData(
nSamples,
nBytesPerSample,
nChannels,
samplesPerSec,
audioSamples,
nSamplesOut,
elapsed_time_ms,
ntp_time_ms
);
}
} else {
nSamplesOut = 0;
}
@ -620,6 +663,7 @@ private:
bool _isStarted = false;
std::vector<webrtc::AudioTransport *> _audioTransports;
webrtc::Mutex _mutex;
std::vector<int16_t> _mixAudioSamples;
};
class WrappedChildAudioDeviceModule : public tgcalls::DefaultWrappedAudioDeviceModule {