mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-15 13:35:19 +00:00
Call improvements
This commit is contained in:
parent
65a0b41071
commit
4b90fffb69
@ -336,7 +336,7 @@ private final class EmbeddedBroadcastUploadImpl: BroadcastUploadImpl {
|
|||||||
let logsPath = rootPath + "/logs/broadcast-logs"
|
let logsPath = rootPath + "/logs/broadcast-logs"
|
||||||
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
|
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
|
||||||
|
|
||||||
let embeddedBroadcastImplementationTypePath = rootPath + "/broadcast-coordination-type"
|
let embeddedBroadcastImplementationTypePath = rootPath + "/broadcast-coordination-type-v2"
|
||||||
|
|
||||||
var useIPCContext = false
|
var useIPCContext = false
|
||||||
if let typeData = try? Data(contentsOf: URL(fileURLWithPath: embeddedBroadcastImplementationTypePath)), let type = String(data: typeData, encoding: .utf8) {
|
if let typeData = try? Data(contentsOf: URL(fileURLWithPath: embeddedBroadcastImplementationTypePath)), let type = String(data: typeData, encoding: .utf8) {
|
||||||
|
@ -35,7 +35,8 @@ public final class ViewController: UIViewController {
|
|||||||
isRemoteAudioMuted: false,
|
isRemoteAudioMuted: false,
|
||||||
localVideo: nil,
|
localVideo: nil,
|
||||||
remoteVideo: nil,
|
remoteVideo: nil,
|
||||||
isRemoteBatteryLow: false
|
isRemoteBatteryLow: false,
|
||||||
|
enableVideoSharpening: false
|
||||||
)
|
)
|
||||||
|
|
||||||
private var currentLayout: (size: CGSize, insets: UIEdgeInsets)?
|
private var currentLayout: (size: CGSize, insets: UIEdgeInsets)?
|
||||||
|
@ -166,6 +166,11 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
|||||||
}
|
}
|
||||||
self.conferenceAddParticipant?()
|
self.conferenceAddParticipant?()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var enableVideoSharpening = true
|
||||||
|
if let data = call.context.currentAppConfiguration.with({ $0 }).data, let value = data["ios_call_video_sharpening"] as? Double {
|
||||||
|
enableVideoSharpening = value != 0.0
|
||||||
|
}
|
||||||
|
|
||||||
self.callScreenState = PrivateCallScreen.State(
|
self.callScreenState = PrivateCallScreen.State(
|
||||||
strings: presentationData.strings,
|
strings: presentationData.strings,
|
||||||
@ -180,7 +185,8 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
|||||||
remoteVideo: nil,
|
remoteVideo: nil,
|
||||||
isRemoteBatteryLow: false,
|
isRemoteBatteryLow: false,
|
||||||
isEnergySavingEnabled: !self.sharedContext.energyUsageSettings.fullTranslucency,
|
isEnergySavingEnabled: !self.sharedContext.energyUsageSettings.fullTranslucency,
|
||||||
isConferencePossible: false
|
isConferencePossible: false,
|
||||||
|
enableVideoSharpening: enableVideoSharpening
|
||||||
)
|
)
|
||||||
|
|
||||||
self.isMicrophoneMutedDisposable = (call.isMuted
|
self.isMicrophoneMutedDisposable = (call.isMuted
|
||||||
|
@ -1160,7 +1160,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
useIPCContext = value != 0.0
|
useIPCContext = value != 0.0
|
||||||
}
|
}
|
||||||
|
|
||||||
let embeddedBroadcastImplementationTypePath = self.accountContext.sharedContext.basePath + "/broadcast-coordination-type"
|
let embeddedBroadcastImplementationTypePath = self.accountContext.sharedContext.basePath + "/broadcast-coordination-type-v2"
|
||||||
|
|
||||||
let screencastIPCContext: ScreencastIPCContext
|
let screencastIPCContext: ScreencastIPCContext
|
||||||
if useIPCContext {
|
if useIPCContext {
|
||||||
|
@ -7,6 +7,7 @@ import BalancedTextComponent
|
|||||||
import TelegramPresentationData
|
import TelegramPresentationData
|
||||||
import CallsEmoji
|
import CallsEmoji
|
||||||
import ImageBlur
|
import ImageBlur
|
||||||
|
import HierarchyTrackingLayer
|
||||||
|
|
||||||
private final class EmojiContainerView: UIView {
|
private final class EmojiContainerView: UIView {
|
||||||
private let maskImageView: UIImageView?
|
private let maskImageView: UIImageView?
|
||||||
@ -207,6 +208,7 @@ private final class EmojiItemComponent: Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
final class View: UIView {
|
final class View: UIView {
|
||||||
|
private let hierarchyTrackingLayer: HierarchyTrackingLayer
|
||||||
private let containerView: EmojiContainerView
|
private let containerView: EmojiContainerView
|
||||||
private let measureEmojiView = ComponentView<Empty>()
|
private let measureEmojiView = ComponentView<Empty>()
|
||||||
private var pendingContainerView: EmojiContainerView?
|
private var pendingContainerView: EmojiContainerView?
|
||||||
@ -219,11 +221,22 @@ private final class EmojiItemComponent: Component {
|
|||||||
private var pendingEmojiValues: [String]?
|
private var pendingEmojiValues: [String]?
|
||||||
|
|
||||||
override init(frame: CGRect) {
|
override init(frame: CGRect) {
|
||||||
|
self.hierarchyTrackingLayer = HierarchyTrackingLayer()
|
||||||
self.containerView = EmojiContainerView(hasMask: true)
|
self.containerView = EmojiContainerView(hasMask: true)
|
||||||
|
|
||||||
super.init(frame: frame)
|
super.init(frame: frame)
|
||||||
|
|
||||||
|
self.layer.addSublayer(self.hierarchyTrackingLayer)
|
||||||
self.addSubview(self.containerView)
|
self.addSubview(self.containerView)
|
||||||
|
|
||||||
|
self.hierarchyTrackingLayer.isInHierarchyUpdated = { [weak self] value in
|
||||||
|
guard let self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if value {
|
||||||
|
self.state?.updated(transition: .immediate)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
required init?(coder: NSCoder) {
|
required init?(coder: NSCoder) {
|
||||||
|
@ -275,7 +275,7 @@ final class VideoChatParticipantThumbnailComponent: Component {
|
|||||||
if let current = self.videoLayer {
|
if let current = self.videoLayer {
|
||||||
videoLayer = current
|
videoLayer = current
|
||||||
} else {
|
} else {
|
||||||
videoLayer = PrivateCallVideoLayer()
|
videoLayer = PrivateCallVideoLayer(enableSharpening: false)
|
||||||
self.videoLayer = videoLayer
|
self.videoLayer = videoLayer
|
||||||
self.extractedContainerView.contentView.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
|
self.extractedContainerView.contentView.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
|
||||||
self.extractedContainerView.contentView.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer)
|
self.extractedContainerView.contentView.layer.insertSublayer(videoLayer, above: videoLayer.blurredLayer)
|
||||||
|
@ -51,6 +51,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
|||||||
let contentInsets: UIEdgeInsets
|
let contentInsets: UIEdgeInsets
|
||||||
let controlInsets: UIEdgeInsets
|
let controlInsets: UIEdgeInsets
|
||||||
let interfaceOrientation: UIInterfaceOrientation
|
let interfaceOrientation: UIInterfaceOrientation
|
||||||
|
let enableVideoSharpening: Bool
|
||||||
let action: (() -> Void)?
|
let action: (() -> Void)?
|
||||||
let contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?
|
let contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?
|
||||||
let activatePinch: ((PinchSourceContainerNode) -> Void)?
|
let activatePinch: ((PinchSourceContainerNode) -> Void)?
|
||||||
@ -70,6 +71,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
|||||||
contentInsets: UIEdgeInsets,
|
contentInsets: UIEdgeInsets,
|
||||||
controlInsets: UIEdgeInsets,
|
controlInsets: UIEdgeInsets,
|
||||||
interfaceOrientation: UIInterfaceOrientation,
|
interfaceOrientation: UIInterfaceOrientation,
|
||||||
|
enableVideoSharpening: Bool,
|
||||||
action: (() -> Void)?,
|
action: (() -> Void)?,
|
||||||
contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?,
|
contextAction: ((EnginePeer, ContextExtractedContentContainingView, ContextGesture) -> Void)?,
|
||||||
activatePinch: ((PinchSourceContainerNode) -> Void)?,
|
activatePinch: ((PinchSourceContainerNode) -> Void)?,
|
||||||
@ -88,6 +90,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
|||||||
self.contentInsets = contentInsets
|
self.contentInsets = contentInsets
|
||||||
self.controlInsets = controlInsets
|
self.controlInsets = controlInsets
|
||||||
self.interfaceOrientation = interfaceOrientation
|
self.interfaceOrientation = interfaceOrientation
|
||||||
|
self.enableVideoSharpening = enableVideoSharpening
|
||||||
self.action = action
|
self.action = action
|
||||||
self.contextAction = contextAction
|
self.contextAction = contextAction
|
||||||
self.activatePinch = activatePinch
|
self.activatePinch = activatePinch
|
||||||
@ -128,6 +131,9 @@ final class VideoChatParticipantVideoComponent: Component {
|
|||||||
if lhs.interfaceOrientation != rhs.interfaceOrientation {
|
if lhs.interfaceOrientation != rhs.interfaceOrientation {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.enableVideoSharpening != rhs.enableVideoSharpening {
|
||||||
|
return false
|
||||||
|
}
|
||||||
if (lhs.action == nil) != (rhs.action == nil) {
|
if (lhs.action == nil) != (rhs.action == nil) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -525,7 +531,7 @@ final class VideoChatParticipantVideoComponent: Component {
|
|||||||
resetVideoSource = true
|
resetVideoSource = true
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
videoLayer = PrivateCallVideoLayer()
|
videoLayer = PrivateCallVideoLayer(enableSharpening: component.enableVideoSharpening)
|
||||||
self.videoLayer = videoLayer
|
self.videoLayer = videoLayer
|
||||||
videoLayer.opacity = 0.0
|
videoLayer.opacity = 0.0
|
||||||
self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
|
self.pinchContainerNode.contentNode.view.layer.insertSublayer(videoLayer.blurredLayer, above: videoBackgroundLayer)
|
||||||
|
@ -152,6 +152,7 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
let expandedInsets: UIEdgeInsets
|
let expandedInsets: UIEdgeInsets
|
||||||
let safeInsets: UIEdgeInsets
|
let safeInsets: UIEdgeInsets
|
||||||
let interfaceOrientation: UIInterfaceOrientation
|
let interfaceOrientation: UIInterfaceOrientation
|
||||||
|
let enableVideoSharpening: Bool
|
||||||
let openParticipantContextMenu: (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void
|
let openParticipantContextMenu: (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void
|
||||||
let openInvitedParticipantContextMenu: (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void
|
let openInvitedParticipantContextMenu: (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void
|
||||||
let updateMainParticipant: (VideoParticipantKey?, Bool?) -> Void
|
let updateMainParticipant: (VideoParticipantKey?, Bool?) -> Void
|
||||||
@ -173,6 +174,7 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
expandedInsets: UIEdgeInsets,
|
expandedInsets: UIEdgeInsets,
|
||||||
safeInsets: UIEdgeInsets,
|
safeInsets: UIEdgeInsets,
|
||||||
interfaceOrientation: UIInterfaceOrientation,
|
interfaceOrientation: UIInterfaceOrientation,
|
||||||
|
enableVideoSharpening: Bool,
|
||||||
openParticipantContextMenu: @escaping (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void,
|
openParticipantContextMenu: @escaping (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void,
|
||||||
openInvitedParticipantContextMenu: @escaping (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void,
|
openInvitedParticipantContextMenu: @escaping (EnginePeer.Id, ContextExtractedContentContainingView, ContextGesture?) -> Void,
|
||||||
updateMainParticipant: @escaping (VideoParticipantKey?, Bool?) -> Void,
|
updateMainParticipant: @escaping (VideoParticipantKey?, Bool?) -> Void,
|
||||||
@ -193,6 +195,7 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
self.expandedInsets = expandedInsets
|
self.expandedInsets = expandedInsets
|
||||||
self.safeInsets = safeInsets
|
self.safeInsets = safeInsets
|
||||||
self.interfaceOrientation = interfaceOrientation
|
self.interfaceOrientation = interfaceOrientation
|
||||||
|
self.enableVideoSharpening = enableVideoSharpening
|
||||||
self.openParticipantContextMenu = openParticipantContextMenu
|
self.openParticipantContextMenu = openParticipantContextMenu
|
||||||
self.openInvitedParticipantContextMenu = openInvitedParticipantContextMenu
|
self.openInvitedParticipantContextMenu = openInvitedParticipantContextMenu
|
||||||
self.updateMainParticipant = updateMainParticipant
|
self.updateMainParticipant = updateMainParticipant
|
||||||
@ -239,6 +242,9 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
if lhs.interfaceOrientation != rhs.interfaceOrientation {
|
if lhs.interfaceOrientation != rhs.interfaceOrientation {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.enableVideoSharpening != rhs.enableVideoSharpening {
|
||||||
|
return false
|
||||||
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1074,6 +1080,7 @@ final class VideoChatParticipantsComponent: Component {
|
|||||||
contentInsets: itemContentInsets,
|
contentInsets: itemContentInsets,
|
||||||
controlInsets: itemControlInsets,
|
controlInsets: itemControlInsets,
|
||||||
interfaceOrientation: component.interfaceOrientation,
|
interfaceOrientation: component.interfaceOrientation,
|
||||||
|
enableVideoSharpening: component.enableVideoSharpening,
|
||||||
action: { [weak self] in
|
action: { [weak self] in
|
||||||
guard let self, let component = self.component else {
|
guard let self, let component = self.component else {
|
||||||
return
|
return
|
||||||
|
@ -234,6 +234,8 @@ final class VideoChatScreenComponent: Component {
|
|||||||
|
|
||||||
let participants = ComponentView<Empty>()
|
let participants = ComponentView<Empty>()
|
||||||
var scheduleInfo: ComponentView<Empty>?
|
var scheduleInfo: ComponentView<Empty>?
|
||||||
|
|
||||||
|
var enableVideoSharpening: Bool = false
|
||||||
|
|
||||||
var reconnectedAsEventsDisposable: Disposable?
|
var reconnectedAsEventsDisposable: Disposable?
|
||||||
var memberEventsDisposable: Disposable?
|
var memberEventsDisposable: Disposable?
|
||||||
@ -1244,6 +1246,11 @@ final class VideoChatScreenComponent: Component {
|
|||||||
self.invitedPeers.removeAll(where: { invitedPeer in members.participants.contains(where: { $0.id == .peer(invitedPeer.peer.id) }) })
|
self.invitedPeers.removeAll(where: { invitedPeer in members.participants.contains(where: { $0.id == .peer(invitedPeer.peer.id) }) })
|
||||||
}
|
}
|
||||||
self.callState = component.initialData.callState
|
self.callState = component.initialData.callState
|
||||||
|
|
||||||
|
self.enableVideoSharpening = true
|
||||||
|
if let data = component.initialCall.accountContext.currentAppConfiguration.with({ $0 }).data, let value = data["ios_call_video_sharpening"] as? Double {
|
||||||
|
self.enableVideoSharpening = value != 0.0
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var call: VideoChatCall
|
var call: VideoChatCall
|
||||||
@ -1359,7 +1366,7 @@ final class VideoChatScreenComponent: Component {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if participant.videoDescription != nil || participant.presentationDescription != nil {
|
if participant.videoDescription != nil || participant.presentationDescription != nil {
|
||||||
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
|
if let participantPeer = participant.peer, participantPeer.id != groupCall.accountContext.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1421,7 +1428,7 @@ final class VideoChatScreenComponent: Component {
|
|||||||
var speakingParticipantPeers: [EnginePeer] = []
|
var speakingParticipantPeers: [EnginePeer] = []
|
||||||
if let members, !members.speakingParticipants.isEmpty {
|
if let members, !members.speakingParticipants.isEmpty {
|
||||||
for participant in members.participants {
|
for participant in members.participants {
|
||||||
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
|
if let participantPeer = participant.peer, participantPeer.id != groupCall.accountContext.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
|
||||||
speakingParticipantPeers.append(participantPeer)
|
speakingParticipantPeers.append(participantPeer)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1698,7 +1705,7 @@ final class VideoChatScreenComponent: Component {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if participant.videoDescription != nil || participant.presentationDescription != nil {
|
if participant.videoDescription != nil || participant.presentationDescription != nil {
|
||||||
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
|
if let participantPeer = participant.peer, participantPeer.id != conferenceSource.context.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1760,7 +1767,7 @@ final class VideoChatScreenComponent: Component {
|
|||||||
var speakingParticipantPeers: [EnginePeer] = []
|
var speakingParticipantPeers: [EnginePeer] = []
|
||||||
if !members.speakingParticipants.isEmpty {
|
if !members.speakingParticipants.isEmpty {
|
||||||
for participant in members.participants {
|
for participant in members.participants {
|
||||||
if let participantPeer = participant.peer, members.speakingParticipants.contains(participantPeer.id) {
|
if let participantPeer = participant.peer, participantPeer.id != conferenceSource.context.account.peerId, members.speakingParticipants.contains(participantPeer.id) {
|
||||||
speakingParticipantPeers.append(participantPeer)
|
speakingParticipantPeers.append(participantPeer)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2501,6 +2508,7 @@ final class VideoChatScreenComponent: Component {
|
|||||||
expandedInsets: participantsExpandedInsets,
|
expandedInsets: participantsExpandedInsets,
|
||||||
safeInsets: participantsSafeInsets,
|
safeInsets: participantsSafeInsets,
|
||||||
interfaceOrientation: environment.orientation ?? .portrait,
|
interfaceOrientation: environment.orientation ?? .portrait,
|
||||||
|
enableVideoSharpening: self.enableVideoSharpening,
|
||||||
openParticipantContextMenu: { [weak self] id, sourceView, gesture in
|
openParticipantContextMenu: { [weak self] id, sourceView, gesture in
|
||||||
guard let self else {
|
guard let self else {
|
||||||
return
|
return
|
||||||
|
@ -5,6 +5,21 @@ import MetalPerformanceShaders
|
|||||||
import Accelerate
|
import Accelerate
|
||||||
import MetalEngine
|
import MetalEngine
|
||||||
|
|
||||||
|
private func makeSharpenKernel(device: MTLDevice, sharpeningStrength: Float) -> MPSImageConvolution {
|
||||||
|
let centerWeight = 1.0 + 6.0 * sharpeningStrength
|
||||||
|
let adjacentWeight = -1.0 * sharpeningStrength
|
||||||
|
let diagonalWeight = -0.5 * sharpeningStrength
|
||||||
|
|
||||||
|
let sharpenWeights: [Float] = [
|
||||||
|
diagonalWeight, adjacentWeight, diagonalWeight,
|
||||||
|
adjacentWeight, centerWeight, adjacentWeight,
|
||||||
|
diagonalWeight, adjacentWeight, diagonalWeight
|
||||||
|
]
|
||||||
|
let result = MPSImageConvolution(device: device, kernelWidth: 3, kernelHeight: 3, weights: sharpenWeights)
|
||||||
|
result.edgeMode = .clamp
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
||||||
public var internalData: MetalEngineSubjectInternalData?
|
public var internalData: MetalEngineSubjectInternalData?
|
||||||
|
|
||||||
@ -16,6 +31,9 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
|||||||
let computePipelineStateHorizontal: MTLComputePipelineState
|
let computePipelineStateHorizontal: MTLComputePipelineState
|
||||||
let computePipelineStateVertical: MTLComputePipelineState
|
let computePipelineStateVertical: MTLComputePipelineState
|
||||||
let downscaleKernel: MPSImageBilinearScale
|
let downscaleKernel: MPSImageBilinearScale
|
||||||
|
|
||||||
|
var sharpeningStrength: Float = 0.0
|
||||||
|
var sharpenKernel: MPSImageConvolution
|
||||||
|
|
||||||
required init?(device: MTLDevice) {
|
required init?(device: MTLDevice) {
|
||||||
guard let library = metalLibrary(device: device) else {
|
guard let library = metalLibrary(device: device) else {
|
||||||
@ -52,6 +70,14 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
|||||||
self.computePipelineStateVertical = computePipelineStateVertical
|
self.computePipelineStateVertical = computePipelineStateVertical
|
||||||
|
|
||||||
self.downscaleKernel = MPSImageBilinearScale(device: device)
|
self.downscaleKernel = MPSImageBilinearScale(device: device)
|
||||||
|
|
||||||
|
self.sharpeningStrength = 1.4
|
||||||
|
self.sharpenKernel = makeSharpenKernel(device: device, sharpeningStrength: self.sharpeningStrength)
|
||||||
|
}
|
||||||
|
|
||||||
|
func updateSharpeningStrength(device: MTLDevice, sharpeningStrength: Float) {
|
||||||
|
self.sharpeningStrength = sharpeningStrength
|
||||||
|
self.sharpenKernel = makeSharpenKernel(device: device, sharpeningStrength: self.sharpeningStrength)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -82,21 +108,26 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
|||||||
self.setNeedsUpdate()
|
self.setNeedsUpdate()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private let enableSharpening: Bool
|
||||||
|
|
||||||
public var renderSpec: RenderLayerSpec?
|
public var renderSpec: RenderLayerSpec?
|
||||||
|
|
||||||
private var rgbaTexture: PooledTexture?
|
private var rgbaTexture: PooledTexture?
|
||||||
|
private var sharpenedTexture: PooledTexture?
|
||||||
private var downscaledTexture: PooledTexture?
|
private var downscaledTexture: PooledTexture?
|
||||||
private var blurredHorizontalTexture: PooledTexture?
|
private var blurredHorizontalTexture: PooledTexture?
|
||||||
private var blurredVerticalTexture: PooledTexture?
|
private var blurredVerticalTexture: PooledTexture?
|
||||||
|
|
||||||
override public init() {
|
public init(enableSharpening: Bool) {
|
||||||
|
self.enableSharpening = enableSharpening
|
||||||
self.blurredLayer = MetalEngineSubjectLayer()
|
self.blurredLayer = MetalEngineSubjectLayer()
|
||||||
|
|
||||||
super.init()
|
super.init()
|
||||||
}
|
}
|
||||||
|
|
||||||
override public init(layer: Any) {
|
override public init(layer: Any) {
|
||||||
|
self.enableSharpening = false
|
||||||
self.blurredLayer = MetalEngineSubjectLayer()
|
self.blurredLayer = MetalEngineSubjectLayer()
|
||||||
|
|
||||||
super.init(layer: layer)
|
super.init(layer: layer)
|
||||||
@ -121,6 +152,9 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
|||||||
if self.rgbaTexture == nil || self.rgbaTexture?.spec != rgbaTextureSpec {
|
if self.rgbaTexture == nil || self.rgbaTexture?.spec != rgbaTextureSpec {
|
||||||
self.rgbaTexture = MetalEngine.shared.pooledTexture(spec: rgbaTextureSpec)
|
self.rgbaTexture = MetalEngine.shared.pooledTexture(spec: rgbaTextureSpec)
|
||||||
}
|
}
|
||||||
|
if self.sharpenedTexture == nil || self.sharpenedTexture?.spec != rgbaTextureSpec {
|
||||||
|
self.sharpenedTexture = MetalEngine.shared.pooledTexture(spec: rgbaTextureSpec)
|
||||||
|
}
|
||||||
if self.downscaledTexture == nil {
|
if self.downscaledTexture == nil {
|
||||||
self.downscaledTexture = MetalEngine.shared.pooledTexture(spec: TextureSpec(width: 128, height: 128, pixelFormat: .rgba8UnsignedNormalized))
|
self.downscaledTexture = MetalEngine.shared.pooledTexture(spec: TextureSpec(width: 128, height: 128, pixelFormat: .rgba8UnsignedNormalized))
|
||||||
}
|
}
|
||||||
@ -134,35 +168,90 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
|||||||
guard let rgbaTexture = self.rgbaTexture?.get(context: context) else {
|
guard let rgbaTexture = self.rgbaTexture?.get(context: context) else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var outputTexture = rgbaTexture
|
||||||
|
|
||||||
|
var sharpenedTexture: TexturePlaceholder?
|
||||||
|
if self.enableSharpening && rgbaTextureSpec.width * rgbaTextureSpec.height >= 800 * 480 {
|
||||||
|
sharpenedTexture = self.sharpenedTexture?.get(context: context)
|
||||||
|
if let sharpenedTexture {
|
||||||
|
outputTexture = sharpenedTexture
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let _ = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture in
|
if let sharpenedTexture {
|
||||||
guard let rgbaTexture else {
|
let _ = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, sharpenedTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture, sharpenedTexture in
|
||||||
return
|
guard let rgbaTexture else {
|
||||||
}
|
return
|
||||||
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
|
}
|
||||||
return
|
guard let sharpenedTexture else {
|
||||||
}
|
return
|
||||||
|
}
|
||||||
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
|
|
||||||
let threadgroupCount = MTLSize(width: (rgbaTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (rgbaTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
|
do {
|
||||||
|
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
|
||||||
switch videoTextures.textureLayout {
|
return
|
||||||
case let .biPlanar(biPlanar):
|
}
|
||||||
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVBiPlanarToRGBA)
|
|
||||||
computeEncoder.setTexture(biPlanar.y, index: 0)
|
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
|
||||||
computeEncoder.setTexture(biPlanar.uv, index: 1)
|
let threadgroupCount = MTLSize(width: (rgbaTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (rgbaTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
|
||||||
computeEncoder.setTexture(rgbaTexture, index: 2)
|
|
||||||
case let .triPlanar(triPlanar):
|
switch videoTextures.textureLayout {
|
||||||
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVTriPlanarToRGBA)
|
case let .biPlanar(biPlanar):
|
||||||
computeEncoder.setTexture(triPlanar.y, index: 0)
|
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVBiPlanarToRGBA)
|
||||||
computeEncoder.setTexture(triPlanar.u, index: 1)
|
computeEncoder.setTexture(biPlanar.y, index: 0)
|
||||||
computeEncoder.setTexture(triPlanar.u, index: 2)
|
computeEncoder.setTexture(biPlanar.uv, index: 1)
|
||||||
computeEncoder.setTexture(rgbaTexture, index: 3)
|
computeEncoder.setTexture(rgbaTexture, index: 2)
|
||||||
}
|
case let .triPlanar(triPlanar):
|
||||||
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
|
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVTriPlanarToRGBA)
|
||||||
|
computeEncoder.setTexture(triPlanar.y, index: 0)
|
||||||
computeEncoder.endEncoding()
|
computeEncoder.setTexture(triPlanar.u, index: 1)
|
||||||
})
|
computeEncoder.setTexture(triPlanar.u, index: 2)
|
||||||
|
computeEncoder.setTexture(rgbaTexture, index: 3)
|
||||||
|
}
|
||||||
|
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
|
||||||
|
|
||||||
|
computeEncoder.endEncoding()
|
||||||
|
}
|
||||||
|
|
||||||
|
do {
|
||||||
|
|
||||||
|
blurState.sharpenKernel.encode(commandBuffer: commandBuffer, sourceTexture: rgbaTexture, destinationTexture: sharpenedTexture)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
let _ = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture in
|
||||||
|
guard let rgbaTexture else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
do {
|
||||||
|
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
|
||||||
|
let threadgroupCount = MTLSize(width: (rgbaTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (rgbaTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
|
||||||
|
|
||||||
|
switch videoTextures.textureLayout {
|
||||||
|
case let .biPlanar(biPlanar):
|
||||||
|
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVBiPlanarToRGBA)
|
||||||
|
computeEncoder.setTexture(biPlanar.y, index: 0)
|
||||||
|
computeEncoder.setTexture(biPlanar.uv, index: 1)
|
||||||
|
computeEncoder.setTexture(rgbaTexture, index: 2)
|
||||||
|
case let .triPlanar(triPlanar):
|
||||||
|
computeEncoder.setComputePipelineState(blurState.computePipelineStateYUVTriPlanarToRGBA)
|
||||||
|
computeEncoder.setTexture(triPlanar.y, index: 0)
|
||||||
|
computeEncoder.setTexture(triPlanar.u, index: 1)
|
||||||
|
computeEncoder.setTexture(triPlanar.u, index: 2)
|
||||||
|
computeEncoder.setTexture(rgbaTexture, index: 3)
|
||||||
|
}
|
||||||
|
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
|
||||||
|
|
||||||
|
computeEncoder.endEncoding()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if !self.blurredLayer.isHidden {
|
if !self.blurredLayer.isHidden {
|
||||||
guard let downscaledTexture = self.downscaledTexture?.get(context: context), let blurredHorizontalTexture = self.blurredHorizontalTexture?.get(context: context), let blurredVerticalTexture = self.blurredVerticalTexture?.get(context: context) else {
|
guard let downscaledTexture = self.downscaledTexture?.get(context: context), let blurredHorizontalTexture = self.blurredHorizontalTexture?.get(context: context), let blurredVerticalTexture = self.blurredVerticalTexture?.get(context: context) else {
|
||||||
@ -228,8 +317,8 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self, inputs: rgbaTexture.placeholer, commands: { encoder, placement, rgbaTexture in
|
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self, inputs: outputTexture.placeholer, commands: { encoder, placement, outputTexture in
|
||||||
guard let rgbaTexture else {
|
guard let outputTexture else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -244,7 +333,7 @@ public final class PrivateCallVideoLayer: MetalEngineSubjectLayer, MetalEngineSu
|
|||||||
)
|
)
|
||||||
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
|
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
|
||||||
|
|
||||||
encoder.setFragmentTexture(rgbaTexture, index: 0)
|
encoder.setFragmentTexture(outputTexture, index: 0)
|
||||||
|
|
||||||
var brightness: Float = 1.0
|
var brightness: Float = 1.0
|
||||||
var saturation: Float = 1.0
|
var saturation: Float = 1.0
|
||||||
|
@ -128,6 +128,7 @@ final class VideoContainerView: HighlightTrackingButton {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let key: Key
|
let key: Key
|
||||||
|
let enableSharpening: Bool
|
||||||
|
|
||||||
let videoContainerLayer: VideoContainerLayer
|
let videoContainerLayer: VideoContainerLayer
|
||||||
var videoContainerLayerTaken: Bool = false
|
var videoContainerLayerTaken: Bool = false
|
||||||
@ -211,8 +212,9 @@ final class VideoContainerView: HighlightTrackingButton {
|
|||||||
|
|
||||||
var pressAction: (() -> Void)?
|
var pressAction: (() -> Void)?
|
||||||
|
|
||||||
init(key: Key) {
|
init(key: Key, enableSharpening: Bool) {
|
||||||
self.key = key
|
self.key = key
|
||||||
|
self.enableSharpening = enableSharpening
|
||||||
|
|
||||||
self.videoContainerLayer = VideoContainerLayer()
|
self.videoContainerLayer = VideoContainerLayer()
|
||||||
self.videoContainerLayer.backgroundColor = nil
|
self.videoContainerLayer.backgroundColor = nil
|
||||||
@ -223,7 +225,7 @@ final class VideoContainerView: HighlightTrackingButton {
|
|||||||
self.videoContainerLayer.contentsLayer.cornerCurve = .circular
|
self.videoContainerLayer.contentsLayer.cornerCurve = .circular
|
||||||
}
|
}
|
||||||
|
|
||||||
self.videoLayer = PrivateCallVideoLayer()
|
self.videoLayer = PrivateCallVideoLayer(enableSharpening: self.enableSharpening)
|
||||||
self.videoLayer.masksToBounds = true
|
self.videoLayer.masksToBounds = true
|
||||||
self.videoLayer.isDoubleSided = false
|
self.videoLayer.isDoubleSided = false
|
||||||
if #available(iOS 13.0, *) {
|
if #available(iOS 13.0, *) {
|
||||||
@ -454,7 +456,7 @@ final class VideoContainerView: HighlightTrackingButton {
|
|||||||
let previousVideoLayer = self.videoLayer
|
let previousVideoLayer = self.videoLayer
|
||||||
self.disappearingVideoLayer = DisappearingVideo(flipAnimationInfo: flipAnimationInfo, videoLayer: self.videoLayer, videoMetrics: videoMetrics)
|
self.disappearingVideoLayer = DisappearingVideo(flipAnimationInfo: flipAnimationInfo, videoLayer: self.videoLayer, videoMetrics: videoMetrics)
|
||||||
|
|
||||||
self.videoLayer = PrivateCallVideoLayer()
|
self.videoLayer = PrivateCallVideoLayer(enableSharpening: self.enableSharpening)
|
||||||
self.videoLayer.opacity = previousVideoLayer.opacity
|
self.videoLayer.opacity = previousVideoLayer.opacity
|
||||||
self.videoLayer.masksToBounds = true
|
self.videoLayer.masksToBounds = true
|
||||||
self.videoLayer.isDoubleSided = false
|
self.videoLayer.isDoubleSided = false
|
||||||
|
@ -81,6 +81,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
|
|||||||
public var isRemoteBatteryLow: Bool
|
public var isRemoteBatteryLow: Bool
|
||||||
public var isEnergySavingEnabled: Bool
|
public var isEnergySavingEnabled: Bool
|
||||||
public var isConferencePossible: Bool
|
public var isConferencePossible: Bool
|
||||||
|
public var enableVideoSharpening: Bool
|
||||||
|
|
||||||
public init(
|
public init(
|
||||||
strings: PresentationStrings,
|
strings: PresentationStrings,
|
||||||
@ -95,7 +96,8 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
|
|||||||
remoteVideo: VideoSource?,
|
remoteVideo: VideoSource?,
|
||||||
isRemoteBatteryLow: Bool,
|
isRemoteBatteryLow: Bool,
|
||||||
isEnergySavingEnabled: Bool,
|
isEnergySavingEnabled: Bool,
|
||||||
isConferencePossible: Bool
|
isConferencePossible: Bool,
|
||||||
|
enableVideoSharpening: Bool
|
||||||
) {
|
) {
|
||||||
self.strings = strings
|
self.strings = strings
|
||||||
self.lifecycleState = lifecycleState
|
self.lifecycleState = lifecycleState
|
||||||
@ -110,6 +112,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
|
|||||||
self.isRemoteBatteryLow = isRemoteBatteryLow
|
self.isRemoteBatteryLow = isRemoteBatteryLow
|
||||||
self.isEnergySavingEnabled = isEnergySavingEnabled
|
self.isEnergySavingEnabled = isEnergySavingEnabled
|
||||||
self.isConferencePossible = isConferencePossible
|
self.isConferencePossible = isConferencePossible
|
||||||
|
self.enableVideoSharpening = enableVideoSharpening
|
||||||
}
|
}
|
||||||
|
|
||||||
public static func ==(lhs: State, rhs: State) -> Bool {
|
public static func ==(lhs: State, rhs: State) -> Bool {
|
||||||
@ -152,6 +155,9 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
|
|||||||
if lhs.isConferencePossible != rhs.isConferencePossible {
|
if lhs.isConferencePossible != rhs.isConferencePossible {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.enableVideoSharpening != rhs.enableVideoSharpening {
|
||||||
|
return false
|
||||||
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -994,7 +1000,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView, AVPictureInPictu
|
|||||||
videoContainerView = current
|
videoContainerView = current
|
||||||
} else {
|
} else {
|
||||||
animateIn = true
|
animateIn = true
|
||||||
videoContainerView = VideoContainerView(key: videoContainerKey)
|
videoContainerView = VideoContainerView(key: videoContainerKey, enableSharpening: params.state.enableVideoSharpening)
|
||||||
switch videoContainerKey {
|
switch videoContainerKey {
|
||||||
case .foreground:
|
case .foreground:
|
||||||
self.overlayContentsView.layer.addSublayer(videoContainerView.blurredContainerLayer)
|
self.overlayContentsView.layer.addSublayer(videoContainerView.blurredContainerLayer)
|
||||||
|
@ -1631,7 +1631,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
|||||||
static dispatch_once_t onceToken;
|
static dispatch_once_t onceToken;
|
||||||
dispatch_once(&onceToken, ^{
|
dispatch_once(&onceToken, ^{
|
||||||
tgcalls::Register<tgcalls::InstanceImpl>();
|
tgcalls::Register<tgcalls::InstanceImpl>();
|
||||||
//tgcalls::Register<tgcalls::InstanceV2_4_0_0Impl>();
|
|
||||||
tgcalls::Register<tgcalls::InstanceV2Impl>();
|
tgcalls::Register<tgcalls::InstanceV2Impl>();
|
||||||
tgcalls::Register<tgcalls::InstanceV2ReferenceImpl>();
|
tgcalls::Register<tgcalls::InstanceV2ReferenceImpl>();
|
||||||
});
|
});
|
||||||
|
Loading…
x
Reference in New Issue
Block a user