Call improvements

This commit is contained in:
Isaac 2025-05-05 18:04:32 +02:00
parent 65a0b41071
commit 4b90fffb69
13 changed files with 185 additions and 48 deletions

View File

@ -336,7 +336,7 @@ private final class EmbeddedBroadcastUploadImpl: BroadcastUploadImpl {
let logsPath = rootPath + "/logs/broadcast-logs"
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
let embeddedBroadcastImplementationTypePath = rootPath + "/broadcast-coordination-type"
let embeddedBroadcastImplementationTypePath = rootPath + "/broadcast-coordination-type-v2"
var useIPCContext = false
if let typeData = try? Data(contentsOf: URL(fileURLWithPath: embeddedBroadcastImplementationTypePath)), let type = String(data: typeData, encoding: .utf8) {

View File

@ -35,7 +35,8 @@ public final class ViewController: UIViewController {
isRemoteAudioMuted: false,
localVideo: nil,
remoteVideo: nil,
isRemoteBatteryLow: false
isRemoteBatteryLow: false,
enableVideoSharpening: false
)
private var currentLayout: (size: CGSize, insets: UIEdgeInsets)?

View File

@ -167,6 +167,11 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
self.conferenceAddParticipant?()
}
var enableVideoSharpening = true
if let data = call.context.currentAppConfiguration.with({ $0 }).data, let value = data["ios_call_video_sharpening"] as? Double {
enableVideoSharpening = value != 0.0
}
self.callScreenState = PrivateCallScreen.State(
strings: presentationData.strings,
lifecycleState: .connecting,
@ -180,7 +185,8 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
remoteVideo: nil,
isRemoteBatteryLow: false,
isEnergySavingEnabled: !self.sharedContext.energyUsageSettings.fullTranslucency,
isConferencePossible: false
isConferencePossible: false,
enableVideoSharpening: enableVideoSharpening
)
self.isMicrophoneMutedDisposable = (call.isMuted

View File

@ -1160,7 +1160,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
useIPCContext = value != 0.0
}
let embeddedBroadcastImplementationTypePath = self.accountContext.sharedContext.basePath + "/broadcast-coordination-type"
let embeddedBroadcastImplementationTypePath = self.accountContext.sharedContext.basePath + "/broadcast-coordination-type-v2"
let screencastIPCContext: ScreencastIPCContext
if useIPCContext {

View File

@ -7,6 +7,7 @@ import BalancedTextComponent
import TelegramPresentationData
import CallsEmoji
import ImageBlur
import HierarchyTrackingLayer
private final class EmojiContainerView: UIView {
private let maskImageView: UIImageView?
@ -207,6 +208,7 @@ private final class EmojiItemComponent: Component {
}
final class View: UIView {
private let hierarchyTrackingLayer: HierarchyTrackingLayer
private let containerView: EmojiContainerView
private let measureEmojiView = ComponentView<Empty>()
private var pendingContainerView: EmojiContainerView?
@ -219,11 +221,22 @@ private final class EmojiItemComponent: Component {
private var pendingEmojiValues: [String]?
override init(frame: CGRect) {
self.hierarchyTrackingLayer = HierarchyTrackingLayer()
self.containerView = EmojiContainerView(hasMask: true)
super.init(frame: frame)
self.layer.addSublayer(self.hierarchyTrackingLayer)
self.addSubview(self.containerView)
self.hierarchyTrackingLayer.isInHierarchyUpdated = { [weak self] value in
guard let self else {
return
}
if value {
self.state?.updated(transition: .immediate)
}
}
}
required init?(coder: NSCoder) {

View File

@ -275,7 +275,7 @@ final class VideoChatParticipantThumbnailComponent: Component {
if let current = self.videoLayer {
videoLayer = current
} else {
videoLayer = PrivateCallVideoLayer()
videoLayer = PrivateCallVideoLayer(enableSharpening: false)
self.videoLayer = videoLayer
self.extractedContainerView.contentView.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
self.extractedContainerView.contentView.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer)

View File

@ -51,6 +51,7 @@ final class VideoChatParticipantVideoComponent: Component {
let contentInsets: UIEdgeInsets
let controlInsets: UIEdgeInsets
let interfaceOrientation: UIInterfaceOrientation
let enableVideoSharpening: Bool
let action: (() -> Void)?
let contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?
let activatePinch: ((PinchSourceContainerNode) -> Void)?
@ -70,6 +71,7 @@ final class VideoChatParticipantVideoComponent: Component {
contentInsets: UIEdgeInsets,
controlInsets: UIEdgeInsets,
interfaceOrientation: UIInterfaceOrientation,
enableVideoSharpening: Bool,
action: (() -> Void)?,
contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?,
activatePinch: ((PinchSourceContainerNode) -> Void)?,
@ -88,6 +90,7 @@ final class VideoChatParticipantVideoComponent: Component {
self.contentInsets = contentInsets
self.controlInsets = controlInsets
self.interfaceOrientation = interfaceOrientation
self.enableVideoSharpening = enableVideoSharpening
self.action = action
self.contextAction = contextAction
self.activatePinch = activatePinch
@ -128,6 +131,9 @@ final class VideoChatParticipantVideoComponent: Component {
if lhs.interfaceOrientation != rhs.interfaceOrientation {
return false
}
if lhs.enableVideoSharpening != rhs.enableVideoSharpening {
return false
}
if (lhs.action == nil) != (rhs.action == nil) {
return false
}
@ -525,7 +531,7 @@ final class VideoChatParticipantVideoComponent: Component {
resetVideoSource = true
}
} else {
videoLayer = PrivateCallVideoLayer()
videoLayer = PrivateCallVideoLayer(enableSharpening: component.enableVideoSharpening)
self.videoLayer = videoLayer
videoLayer.opacity = 0.0
self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)

View File

@ -152,6 +152,7 @@ final class VideoChatParticipantsComponent: Component {
let expandedInsets: UIEdgeInsets
let safeInsets: UIEdgeInsets
let interfaceOrientation: UIInterfaceOrientation
let enableVideoSharpening: Bool
let openParticipantContextMenu: (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void
let openInvitedParticipantContextMenu: (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void
let updateMainParticipant: (VideoParticipantKey?, Bool?) -> Void
@ -173,6 +174,7 @@ final class VideoChatParticipantsComponent: Component {
expandedInsets: UIEdgeInsets,
safeInsets: UIEdgeInsets,
interfaceOrientation: UIInterfaceOrientation,
enableVideoSharpening: Bool,
openParticipantContextMenu: @escaping (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void,
openInvitedParticipantContextMenu: @escaping (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void,
updateMainParticipant: @escaping (VideoParticipantKey?, Bool?) -> Void,
@ -193,6 +195,7 @@ final class VideoChatParticipantsComponent: Component {
self.expandedInsets = expandedInsets
self.safeInsets = safeInsets
self.interfaceOrientation = interfaceOrientation
self.enableVideoSharpening = enableVideoSharpening
self.openParticipantContextMenu = openParticipantContextMenu
self.openInvitedParticipantContextMenu = openInvitedParticipantContextMenu
self.updateMainParticipant = updateMainParticipant
@ -239,6 +242,9 @@ final class VideoChatParticipantsComponent: Component {
if lhs.interfaceOrientation != rhs.interfaceOrientation {
return false
}
if lhs.enableVideoSharpening != rhs.enableVideoSharpening {
return false
}
return true
}
@ -1074,6 +1080,7 @@ final class VideoChatParticipantsComponent: Component {
contentInsets: itemContentInsets,
controlInsets: itemControlInsets,
interfaceOrientation: component.interfaceOrientation,
enableVideoSharpening: component.enableVideoSharpening,
action: { [weak self] in
guard let self, let component = self.component else {
return

View File

@ -235,6 +235,8 @@ final class VideoChatScreenComponent: Component {
let participants = ComponentView<Empty>()
var scheduleInfo: ComponentView<Empty>?
var enableVideoSharpening: Bool = false
var reconnectedAsEventsDisposable: Disposable?
var memberEventsDisposable: Disposable?
@ -1244,6 +1246,11 @@ final class VideoChatScreenComponent: Component {
self.invitedPeers.removeAll(where: { invitedPeer in members.participants.contains(where: { $0.id == .peer(invitedPeer.peer.id) }) })
}
self.callState = component.initialData.callState
self.enableVideoSharpening = true
if let data = component.initialCall.accountContext.currentAppConfiguration.with({ $0 }).data, let value = data["ios_call_video_sharpening"] as? Double {
self.enableVideoSharpening = value != 0.0
}
}
var call: VideoChatCall
@ -1359,7 +1366,7 @@ final class VideoChatScreenComponent: Component {
return false
}
if participant.videoDescription != nil || participant.presentationDescription != nil {
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
if let participantPeer = participant.peer, participantPeer.id != groupCall.accountContext.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
return true
}
}
@ -1421,7 +1428,7 @@ final class VideoChatScreenComponent: Component {
var speakingParticipantPeers: [EnginePeer] = []
if let members, !members.speakingParticipants.isEmpty {
for participant in members.participants {
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
if let participantPeer = participant.peer, participantPeer.id != groupCall.accountContext.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
speakingParticipantPeers.append(participantPeer)
}
}
@ -1698,7 +1705,7 @@ final class VideoChatScreenComponent: Component {
return false
}
if participant.videoDescription != nil || participant.presentationDescription != nil {
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
if let participantPeer = participant.peer, participantPeer.id != conferenceSource.context.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
return true
}
}
@ -1760,7 +1767,7 @@ final class VideoChatScreenComponent: Component {
var speakingParticipantPeers: [EnginePeer] = []
if !members.speakingParticipants.isEmpty {
for participant in members.participants {
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
if let participantPeer = participant.peer, participantPeer.id != conferenceSource.context.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
speakingParticipantPeers.append(participantPeer)
}
}
@ -2501,6 +2508,7 @@ final class VideoChatScreenComponent: Component {
expandedInsets: participantsExpandedInsets,
safeInsets: participantsSafeInsets,
interfaceOrientation: environment.orientation ?? .portrait,
enableVideoSharpening: self.enableVideoSharpening,
openParticipantContextMenu: { [weak self] id, sourceView, gesture in
guard let self else {
return

View File

@ -5,6 +5,21 @@ import MetalPerformanceShaders
import Accelerate
import MetalEngine
private func makeSharpenKernel(device: MTLDevice, sharpeningStrength: Float) -> MPSImageConvolution {
let centerWeight = 1.0 + 6.0 * sharpeningStrength
let adjacentWeight = -1.0 * sharpeningStrength
let diagonalWeight = -0.5 * sharpeningStrength
let sharpenWeights: [Float] = [
diagonalWeight, adjacentWeight, diagonalWeight,
adjacentWeight, centerWeight, adjacentWeight,
diagonalWeight, adjacentWeight, diagonalWeight
]
let result = MPSImageConvolution(device: device, kernelWidth: 3, kernelHeight: 3, weights: sharpenWeights)
result.edgeMode = .clamp
return result
}
public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
public var internalData: MetalEngineSubjectInternalData?
@ -17,6 +32,9 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
let computePipelineStateVertical: MTLComputePipelineState
let downscaleKernel: MPSImageBilinearScale
var sharpeningStrength: Float = 0.0
var sharpenKernel: MPSImageConvolution
required init?(device: MTLDevice) {
guard let library = metalLibrary(device: device) else {
return nil
@ -52,6 +70,14 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
self.computePipelineStateVertical = computePipelineStateVertical
self.downscaleKernel = MPSImageBilinearScale(device: device)
self.sharpeningStrength = 1.4
self.sharpenKernel = makeSharpenKernel(device: device, sharpeningStrength: self.sharpeningStrength)
}
func updateSharpeningStrength(device: MTLDevice, sharpeningStrength: Float) {
self.sharpeningStrength = sharpeningStrength
self.sharpenKernel = makeSharpenKernel(device: device, sharpeningStrength: self.sharpeningStrength)
}
}
@ -83,20 +109,25 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
}
}
private let enableSharpening: Bool
public var renderSpec: RenderLayerSpec?
private var rgbaTexture: PooledTexture?
private var sharpenedTexture: PooledTexture?
private var downscaledTexture: PooledTexture?
private var blurredHorizontalTexture: PooledTexture?
private var blurredVerticalTexture: PooledTexture?
override public init() {
public init(enableSharpening: Bool) {
self.enableSharpening = enableSharpening
self.blurredLayer = MetalEngineSubjectLayer()
super.init()
}
override public init(layer: Any) {
self.enableSharpening = false
self.blurredLayer = MetalEngineSubjectLayer()
super.init(layer: layer)
@ -121,6 +152,9 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
if self.rgbaTexture == nil || self.rgbaTexture?.spec != rgbaTextureSpec {
self.rgbaTexture = MetalEngine.shared.pooledTexture(spec: rgbaTextureSpec)
}
if self.sharpenedTexture == nil || self.sharpenedTexture?.spec != rgbaTextureSpec {
self.sharpenedTexture = MetalEngine.shared.pooledTexture(spec: rgbaTextureSpec)
}
if self.downscaledTexture == nil {
self.downscaledTexture = MetalEngine.shared.pooledTexture(spec: TextureSpec(width: 128, height: 128, pixelFormat: .rgba8UnsignedNormalized))
}
@ -135,34 +169,89 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
return
}
let _ = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture in
guard let rgbaTexture else {
return
}
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
return
}
var outputTexture = rgbaTexture
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
let threadgroupCount = MTLSize(width: (rgbaTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (rgbaTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
switch videoTextures.textureLayout {
case let .biPlanar(biPlanar):
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVBiPlanarToRGBA)
computeEncoder.setTexture(biPlanar.y, index: 0)
computeEncoder.setTexture(biPlanar.uv, index: 1)
computeEncoder.setTexture(rgbaTexture, index: 2)
case let .triPlanar(triPlanar):
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVTriPlanarToRGBA)
computeEncoder.setTexture(triPlanar.y, index: 0)
computeEncoder.setTexture(triPlanar.u, index: 1)
computeEncoder.setTexture(triPlanar.u, index: 2)
computeEncoder.setTexture(rgbaTexture, index: 3)
var sharpenedTexture: TexturePlaceholder?
if self.enableSharpening && rgbaTextureSpec.width * rgbaTextureSpec.height >= 800 * 480 {
sharpenedTexture = self.sharpenedTexture?.get(context: context)
if let sharpenedTexture {
outputTexture = sharpenedTexture
}
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
}
computeEncoder.endEncoding()
})
if let sharpenedTexture {
let _ = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, sharpenedTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture, sharpenedTexture in
guard let rgbaTexture else {
return
}
guard let sharpenedTexture else {
return
}
do {
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
return
}
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
let threadgroupCount = MTLSize(width: (rgbaTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (rgbaTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
switch videoTextures.textureLayout {
case let .biPlanar(biPlanar):
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVBiPlanarToRGBA)
computeEncoder.setTexture(biPlanar.y, index: 0)
computeEncoder.setTexture(biPlanar.uv, index: 1)
computeEncoder.setTexture(rgbaTexture, index: 2)
case let .triPlanar(triPlanar):
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVTriPlanarToRGBA)
computeEncoder.setTexture(triPlanar.y, index: 0)
computeEncoder.setTexture(triPlanar.u, index: 1)
computeEncoder.setTexture(triPlanar.u, index: 2)
computeEncoder.setTexture(rgbaTexture, index: 3)
}
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
computeEncoder.endEncoding()
}
do {
blurState.sharpenKernel.encode(commandBuffer: commandBuffer, sourceTexture: rgbaTexture, destinationTexture: sharpenedTexture)
}
})
} else {
let _ = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture in
guard let rgbaTexture else {
return
}
do {
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
return
}
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
let threadgroupCount = MTLSize(width: (rgbaTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (rgbaTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
switch videoTextures.textureLayout {
case let .biPlanar(biPlanar):
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVBiPlanarToRGBA)
computeEncoder.setTexture(biPlanar.y, index: 0)
computeEncoder.setTexture(biPlanar.uv, index: 1)
computeEncoder.setTexture(rgbaTexture, index: 2)
case let .triPlanar(triPlanar):
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVTriPlanarToRGBA)
computeEncoder.setTexture(triPlanar.y, index: 0)
computeEncoder.setTexture(triPlanar.u, index: 1)
computeEncoder.setTexture(triPlanar.u, index: 2)
computeEncoder.setTexture(rgbaTexture, index: 3)
}
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
computeEncoder.endEncoding()
}
})
}
if !self.blurredLayer.isHidden {
guard let downscaledTexture = self.downscaledTexture?.get(context: context), let blurredHorizontalTexture = self.blurredHorizontalTexture?.get(context: context), let blurredVerticalTexture = self.blurredVerticalTexture?.get(context: context) else {
@ -228,8 +317,8 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
})
}
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self, inputs: rgbaTexture.placeholer, commands: { encoder, placement, rgbaTexture in
guard let rgbaTexture else {
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self, inputs: outputTexture.placeholer, commands: { encoder, placement, outputTexture in
guard let outputTexture else {
return
}
@ -244,7 +333,7 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
)
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
encoder.setFragmentTexture(rgbaTexture, index: 0)
encoder.setFragmentTexture(outputTexture, index: 0)
var brightness: Float = 1.0
var saturation: Float = 1.0

View File

@ -128,6 +128,7 @@ final class VideoContainerView: HighlightTrackingButton {
}
let key: Key
let enableSharpening: Bool
let videoContainerLayer: VideoContainerLayer
var videoContainerLayerTaken: Bool = false
@ -211,8 +212,9 @@ final class VideoContainerView: HighlightTrackingButton {
var pressAction: (() -> Void)?
init(key: Key) {
init(key: Key, enableSharpening: Bool) {
self.key = key
self.enableSharpening = enableSharpening
self.videoContainerLayer = VideoContainerLayer()
self.videoContainerLayer.backgroundColor = nil
@ -223,7 +225,7 @@ final class VideoContainerView: HighlightTrackingButton {
self.videoContainerLayer.contentsLayer.cornerCurve = .circular
}
self.videoLayer = PrivateCallVideoLayer()
self.videoLayer = PrivateCallVideoLayer(enableSharpening: self.enableSharpening)
self.videoLayer.masksToBounds = true
self.videoLayer.isDoubleSided = false
if #available(iOS 13.0, *) {
@ -454,7 +456,7 @@ final class VideoContainerView: HighlightTrackingButton {
let previousVideoLayer = self.videoLayer
self.disappearingVideoLayer = DisappearingVideo(flipAnimationInfo: flipAnimationInfo, videoLayer: self.videoLayer, videoMetrics: videoMetrics)
self.videoLayer = PrivateCallVideoLayer()
self.videoLayer = PrivateCallVideoLayer(enableSharpening: self.enableSharpening)
self.videoLayer.opacity = previousVideoLayer.opacity
self.videoLayer.masksToBounds = true
self.videoLayer.isDoubleSided = false

View File

@ -81,6 +81,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
public var isRemoteBatteryLow: Bool
public var isEnergySavingEnabled: Bool
public var isConferencePossible: Bool
public var enableVideoSharpening: Bool
public init(
strings: PresentationStrings,
@ -95,7 +96,8 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
remoteVideo: VideoSource?,
isRemoteBatteryLow: Bool,
isEnergySavingEnabled: Bool,
isConferencePossible: Bool
isConferencePossible: Bool,
enableVideoSharpening: Bool
) {
self.strings = strings
self.lifecycleState = lifecycleState
@ -110,6 +112,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
self.isRemoteBatteryLow = isRemoteBatteryLow
self.isEnergySavingEnabled = isEnergySavingEnabled
self.isConferencePossible = isConferencePossible
self.enableVideoSharpening = enableVideoSharpening
}
public static func ==(lhs: State, rhs: State) -> Bool {
@ -152,6 +155,9 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
if lhs.isConferencePossible != rhs.isConferencePossible {
return false
}
if lhs.enableVideoSharpening != rhs.enableVideoSharpening {
return false
}
return true
}
}
@ -994,7 +1000,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
videoContainerView = current
} else {
animateIn = true
videoContainerView = VideoContainerView(key: videoContainerKey)
videoContainerView = VideoContainerView(key: videoContainerKey, enableSharpening: params.state.enableVideoSharpening)
switch videoContainerKey {
case .foreground:
self.overlayContentsView.layer.addSublayer(videoContainerView.blurredContainerLayer)

View File

@ -1631,7 +1631,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
tgcalls::Register<tgcalls::InstanceImpl>();
//tgcalls::Register<tgcalls::InstanceV2_4_0_0Impl>();
tgcalls::Register<tgcalls::InstanceV2Impl>();
tgcalls::Register<tgcalls::InstanceV2ReferenceImpl>();
});