Various improvements

This commit is contained in:
Ilya Laktyushin 2025-07-20 02:49:04 +01:00
parent fd9b25b527
commit 5a1ea16354
17 changed files with 2552 additions and 23 deletions

View File

@ -14697,3 +14697,5 @@ Sorry for the inconvenience.";
"Premium.PricePer2Years" = "%@/2 years";
"Chat.SensitiveContentShort" = "18+";
"AccessDenied.AgeVerificationCamera" = "Telegram needs access to your camera for age verification.\n\nOpen your device's Settings > Privacy > Camera and set Telegram to ON.";

View File

@ -543,12 +543,13 @@ public final class NavigateToChatControllerParams {
public let changeColors: Bool
public let setupController: (ChatController) -> Void
public let completion: (ChatController) -> Void
public let chatListCompletion: ((ChatListController) -> Void)?
public let chatListCompletion: ((ChatListController) -> Void)
public let pushController: ((ChatController, Bool, @escaping () -> Void) -> Void)?
public let forceOpenChat: Bool
public let customChatNavigationStack: [EnginePeer.Id]?
public let skipAgeVerification: Bool
public init(navigationController: NavigationController, chatController: ChatController? = nil, context: AccountContext, chatLocation: Location, chatLocationContextHolder: Atomic<ChatLocationContextHolder?> = Atomic<ChatLocationContextHolder?>(value: nil), subject: ChatControllerSubject? = nil, botStart: ChatControllerInitialBotStart? = nil, attachBotStart: ChatControllerInitialAttachBotStart? = nil, botAppStart: ChatControllerInitialBotAppStart? = nil, updateTextInputState: ChatTextInputState? = nil, activateInput: ChatControllerActivateInput? = nil, keepStack: NavigateToChatKeepStack = .default, useExisting: Bool = true, useBackAnimation: Bool = false, purposefulAction: (() -> Void)? = nil, scrollToEndIfExists: Bool = false, activateMessageSearch: (ChatSearchDomain, String)? = nil, peekData: ChatPeekTimeout? = nil, peerNearbyData: ChatPeerNearbyData? = nil, reportReason: NavigateToChatControllerParams.ReportReason? = nil, animated: Bool = true, forceAnimatedScroll: Bool = false, options: NavigationAnimationOptions = [], parentGroupId: PeerGroupId? = nil, chatListFilter: Int32? = nil, chatNavigationStack: [ChatNavigationStackItem] = [], changeColors: Bool = false, setupController: @escaping (ChatController) -> Void = { _ in }, pushController: ((ChatController, Bool, @escaping () -> Void) -> Void)? = nil, completion: @escaping (ChatController) -> Void = { _ in }, chatListCompletion: @escaping (ChatListController) -> Void = { _ in }, forceOpenChat: Bool = false, customChatNavigationStack: [EnginePeer.Id]? = nil) {
public init(navigationController: NavigationController, chatController: ChatController? = nil, context: AccountContext, chatLocation: Location, chatLocationContextHolder: Atomic<ChatLocationContextHolder?> = Atomic<ChatLocationContextHolder?>(value: nil), subject: ChatControllerSubject? = nil, botStart: ChatControllerInitialBotStart? = nil, attachBotStart: ChatControllerInitialAttachBotStart? = nil, botAppStart: ChatControllerInitialBotAppStart? = nil, updateTextInputState: ChatTextInputState? = nil, activateInput: ChatControllerActivateInput? = nil, keepStack: NavigateToChatKeepStack = .default, useExisting: Bool = true, useBackAnimation: Bool = false, purposefulAction: (() -> Void)? = nil, scrollToEndIfExists: Bool = false, activateMessageSearch: (ChatSearchDomain, String)? = nil, peekData: ChatPeekTimeout? = nil, peerNearbyData: ChatPeerNearbyData? = nil, reportReason: NavigateToChatControllerParams.ReportReason? = nil, animated: Bool = true, forceAnimatedScroll: Bool = false, options: NavigationAnimationOptions = [], parentGroupId: PeerGroupId? = nil, chatListFilter: Int32? = nil, chatNavigationStack: [ChatNavigationStackItem] = [], changeColors: Bool = false, setupController: @escaping (ChatController) -> Void = { _ in }, pushController: ((ChatController, Bool, @escaping () -> Void) -> Void)? = nil, completion: @escaping (ChatController) -> Void = { _ in }, chatListCompletion: @escaping (ChatListController) -> Void = { _ in }, forceOpenChat: Bool = false, customChatNavigationStack: [EnginePeer.Id]? = nil, skipAgeVerification: Bool = false) {
self.navigationController = navigationController
self.chatController = chatController
self.chatLocationContextHolder = chatLocationContextHolder
@ -582,6 +583,46 @@ public final class NavigateToChatControllerParams {
self.chatListCompletion = chatListCompletion
self.forceOpenChat = forceOpenChat
self.customChatNavigationStack = customChatNavigationStack
self.skipAgeVerification = skipAgeVerification
}
public func withSkipAgeVerification(_ skipAgeVerification: Bool) -> NavigateToChatControllerParams {
return NavigateToChatControllerParams(
navigationController: self.navigationController,
chatController: self.chatController,
context: self.context,
chatLocation: self.chatLocation,
chatLocationContextHolder: self.chatLocationContextHolder,
subject: self.subject,
botStart: self.botStart,
attachBotStart: self.attachBotStart,
botAppStart: self.botAppStart,
updateTextInputState: self.updateTextInputState,
activateInput: self.activateInput,
keepStack: self.keepStack,
useExisting: self.useExisting,
useBackAnimation: self.useBackAnimation,
purposefulAction: self.purposefulAction,
scrollToEndIfExists: self.scrollToEndIfExists,
activateMessageSearch: self.activateMessageSearch,
peekData: self.peekData,
peerNearbyData: self.peerNearbyData,
reportReason: self.reportReason,
animated: self.animated,
forceAnimatedScroll: self.forceAnimatedScroll,
options: self.options,
parentGroupId: self.parentGroupId,
chatListFilter: self.chatListFilter,
chatNavigationStack: self.chatNavigationStack,
changeColors: self.changeColors,
setupController: self.setupController,
pushController: self.pushController,
completion: self.completion,
chatListCompletion: self.chatListCompletion,
forceOpenChat: self.forceOpenChat,
customChatNavigationStack: self.customChatNavigationStack,
skipAgeVerification: skipAgeVerification
)
}
}

View File

@ -16,6 +16,7 @@ public enum DeviceAccessCameraSubject {
case video
case videoCall
case qrCode
case ageVerification
}
public enum DeviceAccessMicrophoneSubject {
@ -332,6 +333,8 @@ public final class DeviceAccess {
text = presentationData.strings.AccessDenied_VideoCallCamera
case .qrCode:
text = presentationData.strings.AccessDenied_QrCamera
case .ageVerification:
text = presentationData.strings.AccessDenied_AgeVerificationCamera
}
present(standardTextAlertController(theme: AlertControllerTheme(presentationData: presentationData), title: presentationData.strings.AccessDenied_Title, text: text, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_NotNow, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.AccessDenied_Settings, action: {
openSettings()
@ -356,6 +359,8 @@ public final class DeviceAccess {
text = presentationData.strings.AccessDenied_VideoCallCamera
case .qrCode:
text = presentationData.strings.AccessDenied_QrCamera
case .ageVerification:
text = presentationData.strings.AccessDenied_AgeVerificationCamera
}
}
present(standardTextAlertController(theme: AlertControllerTheme(presentationData: presentationData), title: presentationData.strings.AccessDenied_Title, text: text, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_NotNow, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.AccessDenied_Settings, action: {

View File

@ -617,7 +617,7 @@ private func autosaveLabelAndValue(presentationData: PresentationData, settings:
return (label, value)
}
private func dataAndStorageControllerEntries(state: DataAndStorageControllerState, data: DataAndStorageData, presentationData: PresentationData, defaultWebBrowser: String, contentSettingsConfiguration: ContentSettingsConfiguration?, networkUsage: Int64, storageUsage: Int64, mediaAutoSaveSettings: MediaAutoSaveSettings, autosaveExceptionPeers: [EnginePeer.Id: EnginePeer?], mediaSettings: MediaDisplaySettings) -> [DataAndStorageEntry] {
private func dataAndStorageControllerEntries(state: DataAndStorageControllerState, data: DataAndStorageData, presentationData: PresentationData, defaultWebBrowser: String, contentSettingsConfiguration: ContentSettingsConfiguration?, networkUsage: Int64, storageUsage: Int64, mediaAutoSaveSettings: MediaAutoSaveSettings, autosaveExceptionPeers: [EnginePeer.Id: EnginePeer?], mediaSettings: MediaDisplaySettings, showSensitiveContentSetting: Bool) -> [DataAndStorageEntry] {
var entries: [DataAndStorageEntry] = []
entries.append(.storageUsage(presentationData.theme, presentationData.strings.ChatSettings_Cache, dataSizeString(storageUsage, formatting: DataSizeStringFormatting(presentationData: presentationData))))
@ -656,7 +656,7 @@ private func dataAndStorageControllerEntries(state: DataAndStorageControllerStat
entries.append(.raiseToListen(presentationData.theme, presentationData.strings.Settings_RaiseToListen, data.mediaInputSettings.enableRaiseToSpeak))
entries.append(.raiseToListenInfo(presentationData.theme, presentationData.strings.Settings_RaiseToListenInfo))
if !"".isEmpty, let contentSettingsConfiguration = contentSettingsConfiguration, contentSettingsConfiguration.canAdjustSensitiveContent {
if let contentSettingsConfiguration = contentSettingsConfiguration, contentSettingsConfiguration.canAdjustSensitiveContent && showSensitiveContentSetting {
entries.append(.sensitiveContent(presentationData.strings.Settings_SensitiveContent, contentSettingsConfiguration.sensitiveContentEnabled))
entries.append(.sensitiveContentInfo(presentationData.strings.Settings_SensitiveContentInfo))
}
@ -944,6 +944,7 @@ public func dataAndStorageController(context: AccountContext, focusOnItemTag: Da
}
let sensitiveContent = Atomic<Bool?>(value: nil)
let canAdjustSensitiveContent = Atomic<Bool?>(value: nil)
let signal = combineLatest(queue: .mainQueue(),
context.sharedContext.presentationData,
@ -974,9 +975,14 @@ public func dataAndStorageController(context: AccountContext, focusOnItemTag: Da
if previousSensitiveContent != contentSettingsConfiguration?.sensitiveContentEnabled {
animateChanges = true
}
if canAdjustSensitiveContent.with({ $0 }) == nil {
let _ = canAdjustSensitiveContent.swap(contentSettingsConfiguration?.sensitiveContentEnabled)
}
let showSensitiveContentSetting = canAdjustSensitiveContent.with { $0 } ?? false
let controllerState = ItemListControllerState(presentationData: ItemListPresentationData(presentationData), title: .text(presentationData.strings.ChatSettings_Title), leftNavigationButton: nil, rightNavigationButton: nil, backNavigationButton: ItemListBackButton(title: presentationData.strings.Common_Back), animateChanges: false)
let listState = ItemListNodeState(presentationData: ItemListPresentationData(presentationData), entries: dataAndStorageControllerEntries(state: state, data: dataAndStorageData, presentationData: presentationData, defaultWebBrowser: defaultWebBrowser, contentSettingsConfiguration: contentSettingsConfiguration, networkUsage: usageSignal.network, storageUsage: usageSignal.storage, mediaAutoSaveSettings: mediaAutoSaveSettings, autosaveExceptionPeers: autosaveExceptionPeers, mediaSettings: mediaSettings), style: .blocks, ensureVisibleItemTag: focusOnItemTag, emptyStateItem: nil, animateChanges: animateChanges)
let listState = ItemListNodeState(presentationData: ItemListPresentationData(presentationData), entries: dataAndStorageControllerEntries(state: state, data: dataAndStorageData, presentationData: presentationData, defaultWebBrowser: defaultWebBrowser, contentSettingsConfiguration: contentSettingsConfiguration, networkUsage: usageSignal.network, storageUsage: usageSignal.storage, mediaAutoSaveSettings: mediaAutoSaveSettings, autosaveExceptionPeers: autosaveExceptionPeers, mediaSettings: mediaSettings, showSensitiveContentSetting: showSensitiveContentSetting), style: .blocks, ensureVisibleItemTag: focusOnItemTag, emptyStateItem: nil, animateChanges: animateChanges)
return (controllerState, (listState, arguments))
} |> afterDisposed {

View File

@ -482,6 +482,7 @@ swift_library(
"//submodules/TelegramUI/Components/ComposeTodoScreen",
"//submodules/TelegramUI/Components/SuggestedPostApproveAlert",
"//submodules/TelegramUI/Components/Stars/BalanceNeededScreen",
"//submodules/TelegramUI/Components/FaceScanScreen",
"//submodules/ContactsHelper",
] + select({
"@build_bazel_rules_apple//apple:ios_arm64": appcenter_targets,

View File

@ -0,0 +1,41 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "FaceScanScreen",
module_name = "FaceScanScreen",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/TelegramCore",
"//submodules/SSignalKit/SwiftSignalKit",
"//submodules/AsyncDisplayKit",
"//submodules/Display",
"//submodules/TelegramPresentationData",
"//submodules/AccountContext",
"//submodules/AppBundle",
"//submodules/PresentationDataUtils",
"//submodules/TextFormat",
"//submodules/Markdown",
"//submodules/Camera",
"//submodules/FileMediaResourceStatus",
"//submodules/TelegramUIPreferences",
"//submodules/UndoUI",
"//submodules/DeviceAccess",
"//third-party/ZipArchive",
"//submodules/ComponentFlow",
"//submodules/Components/ViewControllerComponent",
"//submodules/Components/MultilineTextComponent",
"//submodules/Components/BalancedTextComponent",
"//submodules/Components/BundleIconComponent",
"//submodules/TelegramUI/Components/PlainButtonComponent",
"//submodules/Components/SheetComponent",
"//submodules/TelegramUI/Components/ButtonComponent",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,299 @@
//
// AgeNet.swift
//
// This file was automatically generated and should not be edited.
//
import CoreML
/// Model Prediction Input Type
@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, visionOS 1.0, *)
class AgeNetInput : MLFeatureProvider {
/// input as color (kCVPixelFormatType_32BGRA) image buffer, 112 pixels wide by 112 pixels high
var input: CVPixelBuffer
var featureNames: Set<String> { ["input"] }
func featureValue(for featureName: String) -> MLFeatureValue? {
if featureName == "input" {
return MLFeatureValue(pixelBuffer: input)
}
return nil
}
init(input: CVPixelBuffer) {
self.input = input
}
convenience init(inputWith input: CGImage) throws {
self.init(input: try MLFeatureValue(cgImage: input, pixelsWide: 112, pixelsHigh: 112, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!)
}
convenience init(inputAt input: URL) throws {
self.init(input: try MLFeatureValue(imageAt: input, pixelsWide: 112, pixelsHigh: 112, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!)
}
func setInput(with input: CGImage) throws {
self.input = try MLFeatureValue(cgImage: input, pixelsWide: 112, pixelsHigh: 112, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!
}
func setInput(with input: URL) throws {
self.input = try MLFeatureValue(imageAt: input, pixelsWide: 112, pixelsHigh: 112, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!
}
}
/// Model Prediction Output Type
@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, visionOS 1.0, *)
class AgeNetOutput : MLFeatureProvider {
/// Source provided by CoreML
private let provider : MLFeatureProvider
/// Identity as 1 by 1 matrix of floats
var Identity: MLMultiArray {
provider.featureValue(for: "Identity")!.multiArrayValue!
}
/// Identity as 1 by 1 matrix of floats
var IdentityShapedArray: MLShapedArray<Float> {
MLShapedArray<Float>(Identity)
}
var featureNames: Set<String> {
provider.featureNames
}
func featureValue(for featureName: String) -> MLFeatureValue? {
provider.featureValue(for: featureName)
}
init(Identity: MLMultiArray) {
self.provider = try! MLDictionaryFeatureProvider(dictionary: ["Identity" : MLFeatureValue(multiArray: Identity)])
}
init(features: MLFeatureProvider) {
self.provider = features
}
}
/// Class for model loading and prediction
@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, visionOS 1.0, *)
class AgeNet {
let model: MLModel
/// URL of model assuming it was installed in the same bundle as this class
class var urlOfModelInThisBundle : URL {
let bundle = Bundle(for: self)
return bundle.url(forResource: "AgeNet", withExtension:"mlmodelc")!
}
/**
Construct AgeNet instance with an existing MLModel object.
Usually the application does not use this initializer unless it makes a subclass of AgeNet.
Such application may want to use `MLModel(contentsOfURL:configuration:)` and `AgeNet.urlOfModelInThisBundle` to create a MLModel object to pass-in.
- parameters:
- model: MLModel object
*/
init(model: MLModel) {
self.model = model
}
/**
Construct a model with configuration
- parameters:
- configuration: the desired model configuration
- throws: an NSError object that describes the problem
*/
convenience init(configuration: MLModelConfiguration = MLModelConfiguration()) throws {
try self.init(contentsOf: type(of:self).urlOfModelInThisBundle, configuration: configuration)
}
/**
Construct AgeNet instance with explicit path to mlmodelc file
- parameters:
- modelURL: the file url of the model
- throws: an NSError object that describes the problem
*/
convenience init(contentsOf modelURL: URL) throws {
try self.init(model: MLModel(contentsOf: modelURL))
}
/**
Construct a model with URL of the .mlmodelc directory and configuration
- parameters:
- modelURL: the file url of the model
- configuration: the desired model configuration
- throws: an NSError object that describes the problem
*/
convenience init(contentsOf modelURL: URL, configuration: MLModelConfiguration) throws {
try self.init(model: MLModel(contentsOf: modelURL, configuration: configuration))
}
/**
Construct AgeNet instance asynchronously with optional configuration.
Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
- parameters:
- configuration: the desired model configuration
- handler: the completion handler to be called when the model loading completes successfully or unsuccessfully
*/
class func load(configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result<AgeNet, Error>) -> Void) {
load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration, completionHandler: handler)
}
/**
Construct AgeNet instance asynchronously with optional configuration.
Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
- parameters:
- configuration: the desired model configuration
*/
class func load(configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> AgeNet {
try await load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration)
}
/**
Construct AgeNet instance asynchronously with URL of the .mlmodelc directory with optional configuration.
Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
- parameters:
- modelURL: the URL to the model
- configuration: the desired model configuration
- handler: the completion handler to be called when the model loading completes successfully or unsuccessfully
*/
class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result<AgeNet, Error>) -> Void) {
MLModel.load(contentsOf: modelURL, configuration: configuration) { result in
switch result {
case .failure(let error):
handler(.failure(error))
case .success(let model):
handler(.success(AgeNet(model: model)))
}
}
}
/**
Construct AgeNet instance asynchronously with URL of the .mlmodelc directory with optional configuration.
Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
- parameters:
- modelURL: the URL to the model
- configuration: the desired model configuration
*/
class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> AgeNet {
let model = try await MLModel.load(contentsOf: modelURL, configuration: configuration)
return AgeNet(model: model)
}
/**
Make a prediction using the structured interface
It uses the default function if the model has multiple functions.
- parameters:
- input: the input to the prediction as AgeNetInput
- throws: an NSError object that describes the problem
- returns: the result of the prediction as AgeNetOutput
*/
func prediction(input: AgeNetInput) throws -> AgeNetOutput {
try prediction(input: input, options: MLPredictionOptions())
}
/**
Make a prediction using the structured interface
It uses the default function if the model has multiple functions.
- parameters:
- input: the input to the prediction as AgeNetInput
- options: prediction options
- throws: an NSError object that describes the problem
- returns: the result of the prediction as AgeNetOutput
*/
func prediction(input: AgeNetInput, options: MLPredictionOptions) throws -> AgeNetOutput {
let outFeatures = try model.prediction(from: input, options: options)
return AgeNetOutput(features: outFeatures)
}
/**
Make an asynchronous prediction using the structured interface
It uses the default function if the model has multiple functions.
- parameters:
- input: the input to the prediction as AgeNetInput
- options: prediction options
- throws: an NSError object that describes the problem
- returns: the result of the prediction as AgeNetOutput
*/
@available(macOS 14.0, iOS 17.0, tvOS 17.0, watchOS 10.0, visionOS 1.0, *)
func prediction(input: AgeNetInput, options: MLPredictionOptions = MLPredictionOptions()) async throws -> AgeNetOutput {
let outFeatures = try await model.prediction(from: input, options: options)
return AgeNetOutput(features: outFeatures)
}
/**
Make a prediction using the convenience interface
It uses the default function if the model has multiple functions.
- parameters:
- input: color (kCVPixelFormatType_32BGRA) image buffer, 112 pixels wide by 112 pixels high
- throws: an NSError object that describes the problem
- returns: the result of the prediction as AgeNetOutput
*/
func prediction(input: CVPixelBuffer) throws -> AgeNetOutput {
let input_ = AgeNetInput(input: input)
return try prediction(input: input_)
}
/**
Make a batch prediction using the structured interface
It uses the default function if the model has multiple functions.
- parameters:
- inputs: the inputs to the prediction as [AgeNetInput]
- options: prediction options
- throws: an NSError object that describes the problem
- returns: the result of the prediction as [AgeNetOutput]
*/
func predictions(inputs: [AgeNetInput], options: MLPredictionOptions = MLPredictionOptions()) throws -> [AgeNetOutput] {
let batchIn = MLArrayBatchProvider(array: inputs)
let batchOut = try model.predictions(from: batchIn, options: options)
var results : [AgeNetOutput] = []
results.reserveCapacity(inputs.count)
for i in 0..<batchOut.count {
let outProvider = batchOut.features(at: i)
let result = AgeNetOutput(features: outProvider)
results.append(result)
}
return results
}
}

View File

@ -0,0 +1,312 @@
//
// AgeNetLegacy.swift
//
// This file was automatically generated and should not be edited.
//
import CoreML
/// Model Prediction Input Type
@available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, visionOS 1.0, *)
class AgeNetLegacyInput : MLFeatureProvider {
/// input as color (kCVPixelFormatType_32BGRA) image buffer, 112 pixels wide by 112 pixels high
var input: CVPixelBuffer
var featureNames: Set<String> { ["input"] }
func featureValue(for featureName: String) -> MLFeatureValue? {
if featureName == "input" {
return MLFeatureValue(pixelBuffer: input)
}
return nil
}
init(input: CVPixelBuffer) {
self.input = input
}
convenience init(inputWith input: CGImage) throws {
self.init(input: try MLFeatureValue(cgImage: input, pixelsWide: 112, pixelsHigh: 112, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!)
}
convenience init(inputAt input: URL) throws {
self.init(input: try MLFeatureValue(imageAt: input, pixelsWide: 112, pixelsHigh: 112, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!)
}
func setInput(with input: CGImage) throws {
self.input = try MLFeatureValue(cgImage: input, pixelsWide: 112, pixelsHigh: 112, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!
}
func setInput(with input: URL) throws {
self.input = try MLFeatureValue(imageAt: input, pixelsWide: 112, pixelsHigh: 112, pixelFormatType: kCVPixelFormatType_32ARGB, options: nil).imageBufferValue!
}
}
/// Model Prediction Output Type
@available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, visionOS 1.0, *)
class AgeNetLegacyOutput : MLFeatureProvider {
/// Source provided by CoreML
private let provider : MLFeatureProvider
/// Identity as multidimensional array of floats
var Identity: MLMultiArray {
provider.featureValue(for: "Identity")!.multiArrayValue!
}
/// Identity as multidimensional array of floats
@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, visionOS 1.0, *)
var IdentityShapedArray: MLShapedArray<Float> {
MLShapedArray<Float>(Identity)
}
var featureNames: Set<String> {
provider.featureNames
}
func featureValue(for featureName: String) -> MLFeatureValue? {
provider.featureValue(for: featureName)
}
init(Identity: MLMultiArray) {
self.provider = try! MLDictionaryFeatureProvider(dictionary: ["Identity" : MLFeatureValue(multiArray: Identity)])
}
init(features: MLFeatureProvider) {
self.provider = features
}
}
/// Class for model loading and prediction
@available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 6.0, visionOS 1.0, *)
class AgeNetLegacy {
let model: MLModel
/// URL of model assuming it was installed in the same bundle as this class
class var urlOfModelInThisBundle : URL {
let bundle = Bundle(for: self)
return bundle.url(forResource: "AgeNetLegacy", withExtension:"mlmodelc")!
}
/**
Construct AgeNetLegacy instance with an existing MLModel object.
Usually the application does not use this initializer unless it makes a subclass of AgeNetLegacy.
Such application may want to use `MLModel(contentsOfURL:configuration:)` and `AgeNetLegacy.urlOfModelInThisBundle` to create a MLModel object to pass-in.
- parameters:
- model: MLModel object
*/
init(model: MLModel) {
self.model = model
}
/**
Construct AgeNetLegacy instance by automatically loading the model from the app's bundle.
*/
@available(*, deprecated, message: "Use init(configuration:) instead and handle errors appropriately.")
convenience init() {
try! self.init(contentsOf: type(of:self).urlOfModelInThisBundle)
}
/**
Construct a model with configuration
- parameters:
- configuration: the desired model configuration
- throws: an NSError object that describes the problem
*/
convenience init(configuration: MLModelConfiguration) throws {
try self.init(contentsOf: type(of:self).urlOfModelInThisBundle, configuration: configuration)
}
/**
Construct AgeNetLegacy instance with explicit path to mlmodelc file
- parameters:
- modelURL: the file url of the model
- throws: an NSError object that describes the problem
*/
convenience init(contentsOf modelURL: URL) throws {
try self.init(model: MLModel(contentsOf: modelURL))
}
/**
Construct a model with URL of the .mlmodelc directory and configuration
- parameters:
- modelURL: the file url of the model
- configuration: the desired model configuration
- throws: an NSError object that describes the problem
*/
convenience init(contentsOf modelURL: URL, configuration: MLModelConfiguration) throws {
try self.init(model: MLModel(contentsOf: modelURL, configuration: configuration))
}
/**
Construct AgeNetLegacy instance asynchronously with optional configuration.
Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
- parameters:
- configuration: the desired model configuration
- handler: the completion handler to be called when the model loading completes successfully or unsuccessfully
*/
@available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, visionOS 1.0, *)
class func load(configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result<AgeNetLegacy, Error>) -> Void) {
load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration, completionHandler: handler)
}
/**
Construct AgeNetLegacy instance asynchronously with optional configuration.
Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
- parameters:
- configuration: the desired model configuration
*/
@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, visionOS 1.0, *)
class func load(configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> AgeNetLegacy {
try await load(contentsOf: self.urlOfModelInThisBundle, configuration: configuration)
}
/**
Construct AgeNetLegacy instance asynchronously with URL of the .mlmodelc directory with optional configuration.
Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
- parameters:
- modelURL: the URL to the model
- configuration: the desired model configuration
- handler: the completion handler to be called when the model loading completes successfully or unsuccessfully
*/
@available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, visionOS 1.0, *)
class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration(), completionHandler handler: @escaping (Swift.Result<AgeNetLegacy, Error>) -> Void) {
MLModel.load(contentsOf: modelURL, configuration: configuration) { result in
switch result {
case .failure(let error):
handler(.failure(error))
case .success(let model):
handler(.success(AgeNetLegacy(model: model)))
}
}
}
/**
Construct AgeNetLegacy instance asynchronously with URL of the .mlmodelc directory with optional configuration.
Model loading may take time when the model content is not immediately available (e.g. encrypted model). Use this factory method especially when the caller is on the main thread.
- parameters:
- modelURL: the URL to the model
- configuration: the desired model configuration
*/
@available(macOS 12.0, iOS 15.0, tvOS 15.0, watchOS 8.0, visionOS 1.0, *)
class func load(contentsOf modelURL: URL, configuration: MLModelConfiguration = MLModelConfiguration()) async throws -> AgeNetLegacy {
let model = try await MLModel.load(contentsOf: modelURL, configuration: configuration)
return AgeNetLegacy(model: model)
}
/**
Make a prediction using the structured interface
It uses the default function if the model has multiple functions.
- parameters:
- input: the input to the prediction as AgeNetLegacyInput
- throws: an NSError object that describes the problem
- returns: the result of the prediction as AgeNetLegacyOutput
*/
func prediction(input: AgeNetLegacyInput) throws -> AgeNetLegacyOutput {
try prediction(input: input, options: MLPredictionOptions())
}
/**
Make a prediction using the structured interface
It uses the default function if the model has multiple functions.
- parameters:
- input: the input to the prediction as AgeNetLegacyInput
- options: prediction options
- throws: an NSError object that describes the problem
- returns: the result of the prediction as AgeNetLegacyOutput
*/
func prediction(input: AgeNetLegacyInput, options: MLPredictionOptions) throws -> AgeNetLegacyOutput {
let outFeatures = try model.prediction(from: input, options: options)
return AgeNetLegacyOutput(features: outFeatures)
}
/**
Make an asynchronous prediction using the structured interface
It uses the default function if the model has multiple functions.
- parameters:
- input: the input to the prediction as AgeNetLegacyInput
- options: prediction options
- throws: an NSError object that describes the problem
- returns: the result of the prediction as AgeNetLegacyOutput
*/
@available(macOS 14.0, iOS 17.0, tvOS 17.0, watchOS 10.0, visionOS 1.0, *)
func prediction(input: AgeNetLegacyInput, options: MLPredictionOptions = MLPredictionOptions()) async throws -> AgeNetLegacyOutput {
let outFeatures = try await model.prediction(from: input, options: options)
return AgeNetLegacyOutput(features: outFeatures)
}
/**
Make a prediction using the convenience interface
It uses the default function if the model has multiple functions.
- parameters:
- input: color (kCVPixelFormatType_32BGRA) image buffer, 112 pixels wide by 112 pixels high
- throws: an NSError object that describes the problem
- returns: the result of the prediction as AgeNetLegacyOutput
*/
func prediction(input: CVPixelBuffer) throws -> AgeNetLegacyOutput {
let input_ = AgeNetLegacyInput(input: input)
return try prediction(input: input_)
}
/**
Make a batch prediction using the structured interface
It uses the default function if the model has multiple functions.
- parameters:
- inputs: the inputs to the prediction as [AgeNetLegacyInput]
- options: prediction options
- throws: an NSError object that describes the problem
- returns: the result of the prediction as [AgeNetLegacyOutput]
*/
func predictions(inputs: [AgeNetLegacyInput], options: MLPredictionOptions = MLPredictionOptions()) throws -> [AgeNetLegacyOutput] {
let batchIn = MLArrayBatchProvider(array: inputs)
let batchOut = try model.predictions(from: batchIn, options: options)
var results : [AgeNetLegacyOutput] = []
results.reserveCapacity(inputs.count)
for i in 0..<batchOut.count {
let outProvider = batchOut.features(at: i)
let result = AgeNetLegacyOutput(features: outProvider)
results.append(result)
}
return results
}
}

View File

@ -0,0 +1,124 @@
import Foundation
import UIKit
import Display
import SwiftSignalKit
import Postbox
import TelegramCore
import AccountContext
import FileMediaResourceStatus
import ZipArchive
private let queue = Queue()
public enum AgeVerificationAvailability {
case available(String, Bool)
case progress(Float)
case unavailable
}
private var forceCoreMLVariant: Bool {
#if targetEnvironment(simulator)
return true
#else
return false
#endif
}
private func modelPath() -> String {
return NSTemporaryDirectory() + "AgeNet.mlmodelc"
}
private let modelPeer = "agecomputation"
private func legacyModelPath() -> String {
return NSTemporaryDirectory() + "AgeNetLegacy.mlmodelc"
}
private let legacyModelPeer = "agelegacycomputation"
public func ageVerificationAvailability(context: AccountContext) -> Signal<AgeVerificationAvailability, NoError> {
let compiledModelPath: String
let modelPeerName: String
let isLegacy: Bool
if #available(iOS 15.0, *) {
compiledModelPath = modelPath()
modelPeerName = modelPeer
isLegacy = false
} else {
compiledModelPath = legacyModelPath()
modelPeerName = legacyModelPeer
isLegacy = true
}
if FileManager.default.fileExists(atPath: compiledModelPath) {
return .single(.available(compiledModelPath, isLegacy))
}
return context.engine.peers.resolvePeerByName(name: modelPeerName, referrer: nil)
|> mapToSignal { result -> Signal<AgeVerificationAvailability, NoError> in
guard case let .result(maybePeer) = result else {
return .complete()
}
guard let peer = maybePeer else {
return .single(.unavailable)
}
return context.account.viewTracker.aroundMessageHistoryViewForLocation(.peer(peerId: peer.id, threadId: nil), index: .lowerBound, anchorIndex: .lowerBound, count: 5, fixedCombinedReadStates: nil)
|> mapToSignal { view -> Signal<(TelegramMediaFile, EngineMessage)?, NoError> in
if !view.0.isLoading {
if let message = view.0.entries.last?.message, let file = message.media.first(where: { $0 is TelegramMediaFile }) as? TelegramMediaFile {
return .single((file, EngineMessage(message)))
} else {
return .single(nil)
}
} else {
return .complete()
}
}
|> take(1)
|> mapToSignal { maybeFileAndMessage -> Signal<AgeVerificationAvailability, NoError> in
if let (file, message) = maybeFileAndMessage {
let fetchedData = fetchedMediaResource(mediaBox: context.account.postbox.mediaBox, userLocation: .other, userContentType: .file, reference: FileMediaReference.message(message: MessageReference(message._asMessage()), media: file).resourceReference(file.resource))
enum FetchStatus {
case completed(String)
case progress(Float)
case failed
}
let fetchStatus = Signal<FetchStatus, NoError> { subscriber in
let fetchedDisposable = fetchedData.start()
let resourceDataDisposable = context.account.postbox.mediaBox.resourceData(file.resource, attemptSynchronously: false).start(next: { next in
if next.complete {
SSZipArchive.unzipFile(atPath: next.path, toDestination: NSTemporaryDirectory())
subscriber.putNext(.completed(compiledModelPath))
subscriber.putCompletion()
}
}, error: subscriber.putError, completed: subscriber.putCompletion)
let progressDisposable = messageFileMediaResourceStatus(context: context, file: file, message: message, isRecentActions: false).start(next: { status in
switch status.fetchStatus {
case let .Remote(progress), let .Fetching(_, progress), let .Paused(progress):
subscriber.putNext(.progress(progress))
default:
break
}
})
return ActionDisposable {
fetchedDisposable.dispose()
resourceDataDisposable.dispose()
progressDisposable.dispose()
}
}
return fetchStatus
|> mapToSignal { status -> Signal<AgeVerificationAvailability, NoError> in
switch status {
case .completed:
return .single(.available(compiledModelPath, isLegacy))
case let .progress(progress):
return .single(.progress(progress))
case .failed:
return .single(.unavailable)
}
}
} else {
return .single(.unavailable)
}
}
}
}

View File

@ -0,0 +1,463 @@
import Foundation
import UIKit
import Display
import ComponentFlow
import SwiftSignalKit
import TelegramCore
import Markdown
import TextFormat
import TelegramPresentationData
import ViewControllerComponent
import SheetComponent
import BalancedTextComponent
import MultilineTextComponent
import BundleIconComponent
import ButtonComponent
import AccountContext
import PresentationDataUtils
import TelegramUIPreferences
import UndoUI
import DeviceAccess
public func requireAgeVerification(context: AccountContext) -> Bool {
if let value = context.currentAppConfiguration.with({ $0 }).data?["need_age_video_verification"] as? Bool, value {
return true
}
return false
}
public func requireAgeVerification(context: AccountContext, peer: EnginePeer) -> Signal<Bool, NoError> {
if requireAgeVerification(context: context), peer._asPeer().hasSensitiveContent(platform: "ios") {
return context.engine.data.get(TelegramEngine.EngineData.Item.Configuration.ContentSettings())
|> map { contentSettings in
if !contentSettings.ignoreContentRestrictionReasons.contains("sensitive") {
return true
}
return false
}
}
return .single(false)
}
private final class SheetContent: CombinedComponent {
typealias EnvironmentType = ViewControllerComponentContainer.Environment
let context: AccountContext
let dismiss: () -> Void
init(
context: AccountContext,
dismiss: @escaping () -> Void
) {
self.context = context
self.dismiss = dismiss
}
static func ==(lhs: SheetContent, rhs: SheetContent) -> Bool {
if lhs.context !== rhs.context {
return false
}
return true
}
final class State: ComponentState {
var cachedCloseImage: (UIImage, PresentationTheme)?
}
func makeState() -> State {
return State()
}
static var body: Body {
let background = Child(RoundedRectangle.self)
let icon = Child(ZStack<Empty>.self)
let closeButton = Child(Button.self)
let title = Child(Text.self)
let text = Child(BalancedTextComponent.self)
let button = Child(ButtonComponent.self)
return { context in
let environment = context.environment[EnvironmentType.self]
let component = context.component
let state = context.state
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
let theme = presentationData.theme
//let strings = presentationData.strings
var contentSize = CGSize(width: context.availableSize.width, height: 18.0)
let background = background.update(
component: RoundedRectangle(color: theme.actionSheet.opaqueItemBackgroundColor, cornerRadius: 8.0),
availableSize: CGSize(width: context.availableSize.width, height: 1000.0),
transition: .immediate
)
context.add(background
.position(CGPoint(x: context.availableSize.width / 2.0, y: background.size.height / 2.0))
)
let icon = icon.update(
component: ZStack([
AnyComponentWithIdentity(
id: AnyHashable(0),
component: AnyComponent(RoundedRectangle(color: theme.list.itemCheckColors.fillColor, cornerRadius: 45.0, size: CGSize(width: 90.0, height: 90.0)))
),
AnyComponentWithIdentity(
id: AnyHashable(1),
component: AnyComponent(BundleIconComponent(
name: "Settings/FaceVerification",
tintColor: theme.list.itemCheckColors.foregroundColor
))
)
]),
availableSize: CGSize(width: 90.0, height: 90.0),
transition: .immediate
)
context.add(icon
.position(CGPoint(x: context.availableSize.width / 2.0, y: icon.size.height / 2.0 + 31.0))
)
let closeImage: UIImage
if let (image, cacheTheme) = state.cachedCloseImage, theme === cacheTheme {
closeImage = image
} else {
closeImage = generateCloseButtonImage(backgroundColor: UIColor(rgb: 0x808084, alpha: 0.1), foregroundColor: theme.actionSheet.inputClearButtonColor)!
state.cachedCloseImage = (closeImage, theme)
}
let closeButton = closeButton.update(
component: Button(
content: AnyComponent(Image(image: closeImage)),
action: {
component.dismiss()
}
),
availableSize: CGSize(width: 30.0, height: 30.0),
transition: .immediate
)
context.add(closeButton
.position(CGPoint(x: context.availableSize.width - closeButton.size.width, y: 28.0))
)
let constrainedTitleWidth = context.availableSize.width - 16.0 * 2.0
contentSize.height += 124.0
let title = title.update(
component: Text(text: "Age Verification", font: Font.bold(24.0), color: theme.list.itemPrimaryTextColor),
availableSize: CGSize(width: constrainedTitleWidth, height: context.availableSize.height),
transition: .immediate
)
context.add(title
.position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + title.size.height / 2.0))
)
contentSize.height += title.size.height
contentSize.height += 13.0
let textFont = Font.regular(15.0)
let boldTextFont = Font.semibold(15.0)
let textColor = theme.actionSheet.primaryTextColor
let linkColor = theme.actionSheet.controlAccentColor
let markdownAttributes = MarkdownAttributes(body: MarkdownAttributeSet(font: textFont, textColor: textColor), bold: MarkdownAttributeSet(font: boldTextFont, textColor: textColor), link: MarkdownAttributeSet(font: textFont, textColor: linkColor), linkAttribute: { contents in
return (TelegramTextAttributes.URL, contents)
})
let textString = "To access this content, you must confirm you are at least **18** years old as required by UK law.\n\nThis is a one-time process using your phone's camera. Your selfie will not be stored by Telegram."
let text = text.update(
component: BalancedTextComponent(
text: .markdown(
text: textString,
attributes: markdownAttributes
),
horizontalAlignment: .center,
maximumNumberOfLines: 0,
lineSpacing: 0.2
),
availableSize: CGSize(width: constrainedTitleWidth, height: context.availableSize.height),
transition: .immediate
)
context.add(text
.position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + text.size.height / 2.0))
)
contentSize.height += text.size.height
contentSize.height += 23.0
let controller = environment.controller() as? AgeVerificationScreen
let button = button.update(
component: ButtonComponent(
background: ButtonComponent.Background(
color: theme.list.itemCheckColors.fillColor,
foreground: theme.list.itemCheckColors.foregroundColor,
pressedColor: theme.list.itemCheckColors.fillColor.withMultipliedAlpha(0.9),
cornerRadius: 10.0
),
content: AnyComponentWithIdentity(
id: AnyHashable(0),
component: AnyComponent(MultilineTextComponent(text: .plain(NSMutableAttributedString(string: "Verify My Age", font: Font.semibold(17.0), textColor: theme.list.itemCheckColors.foregroundColor, paragraphAlignment: .center))))
),
isEnabled: true,
displaysProgress: false,
action: { [weak controller] in
controller?.complete(result: true)
}
),
availableSize: CGSize(width: context.availableSize.width - 16.0 * 2.0, height: 50),
transition: .immediate
)
context.add(button
.clipsToBounds(true)
.cornerRadius(10.0)
.position(CGPoint(x: context.availableSize.width / 2.0, y: contentSize.height + button.size.height / 2.0))
)
contentSize.height += button.size.height
contentSize.height += 48.0
return contentSize
}
}
}
private final class AgeVerificationSheetComponent: CombinedComponent {
typealias EnvironmentType = ViewControllerComponentContainer.Environment
private let context: AccountContext
init(
context: AccountContext
) {
self.context = context
}
static func ==(lhs: AgeVerificationSheetComponent, rhs: AgeVerificationSheetComponent) -> Bool {
if lhs.context !== rhs.context {
return false
}
return true
}
static var body: Body {
let sheet = Child(SheetComponent<(EnvironmentType)>.self)
let animateOut = StoredActionSlot(Action<Void>.self)
return { context in
let environment = context.environment[EnvironmentType.self]
let controller = environment.controller
let sheet = sheet.update(
component: SheetComponent<EnvironmentType>(
content: AnyComponent<EnvironmentType>(SheetContent(
context: context.component.context,
dismiss: {
animateOut.invoke(Action { _ in
if let controller = controller() as? AgeVerificationScreen {
controller.complete(result: false)
controller.dismiss(completion: nil)
}
})
}
)),
backgroundColor: .color(environment.theme.list.modalBlocksBackgroundColor),
followContentSizeChanges: true,
clipsContent: true,
animateOut: animateOut
),
environment: {
environment
SheetComponentEnvironment(
isDisplaying: environment.value.isVisible,
isCentered: environment.metrics.widthClass == .regular,
hasInputHeight: !environment.inputHeight.isZero,
regularMetricsSize: CGSize(width: 430.0, height: 900.0),
dismiss: { animated in
if animated {
animateOut.invoke(Action { _ in
if let controller = controller() as? AgeVerificationScreen {
controller.complete(result: false)
controller.dismiss(completion: nil)
}
})
} else {
if let controller = controller() as? AgeVerificationScreen {
controller.complete(result: false)
controller.dismiss(completion: nil)
}
}
}
)
},
availableSize: context.availableSize,
transition: context.transition
)
context.add(sheet
.position(CGPoint(x: context.availableSize.width / 2.0, y: context.availableSize.height / 2.0))
)
return context.availableSize
}
}
}
public final class AgeVerificationScreen: ViewControllerComponentContainer {
private let context: AccountContext
private let completion: (Bool, Signal<AgeVerificationAvailability, NoError>) -> Void
private let promise = Promise<AgeVerificationAvailability>()
public init(
context: AccountContext,
completion: @escaping (Bool, Signal<AgeVerificationAvailability, NoError>) -> Void
) {
self.context = context
self.completion = completion
self.promise.set(ageVerificationAvailability(context: context))
super.init(
context: context,
component: AgeVerificationSheetComponent(
context: context
),
navigationBarAppearance: .none,
statusBarStyle: .ignore,
theme: .default
)
self.navigationPresentation = .flatModal
}
required public init(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private var didComplete = false
fileprivate func complete(result: Bool) {
guard !self.didComplete else {
return
}
if result {
let context = self.context
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
DeviceAccess.authorizeAccess(to: .camera(.ageVerification), presentationData: presentationData, present: { c, a in
c.presentationArguments = a
context.sharedContext.mainWindow?.present(c, on: .root)
}, openSettings: {
context.sharedContext.applicationBindings.openSettings()
}, { [weak self] granted in
guard let self, granted else {
return
}
self.didComplete = true
self.completion(true, self.promise.get())
self.dismissAnimated()
})
} else {
self.didComplete = true
self.completion(false, self.promise.get())
}
}
public func dismissAnimated() {
if let view = self.node.hostView.findTaggedView(tag: SheetComponent<ViewControllerComponentContainer.Environment>.View.Tag()) as? SheetComponent<ViewControllerComponentContainer.Environment>.View {
view.dismissAnimated()
}
}
}
func generateCloseButtonImage(backgroundColor: UIColor, foregroundColor: UIColor) -> UIImage? {
return generateImage(CGSize(width: 30.0, height: 30.0), contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setFillColor(backgroundColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
context.setLineWidth(2.0)
context.setLineCap(.round)
context.setStrokeColor(foregroundColor.cgColor)
context.move(to: CGPoint(x: 10.0, y: 10.0))
context.addLine(to: CGPoint(x: 20.0, y: 20.0))
context.strokePath()
context.move(to: CGPoint(x: 20.0, y: 10.0))
context.addLine(to: CGPoint(x: 10.0, y: 20.0))
context.strokePath()
})
}
public func presentAgeVerification(context: AccountContext, parentController: ViewController, completion: @escaping () -> Void) {
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let _ = (context.engine.data.get(
TelegramEngine.EngineData.Item.Configuration.ApplicationSpecificPreference(key: ApplicationSpecificPreferencesKeys.ageVerificationState)
) |> deliverOnMainQueue).start(next: { [weak parentController] ageVerificationStatePreference in
let state = ageVerificationStatePreference?.get(AgeVerificationState.self) ?? AgeVerificationState.default
if state.verificationPassed {
completion()
} else {
let infoScreen = AgeVerificationScreen(context: context, completion: { [weak parentController] check, availability in
if check {
let scanScreen = FaceScanScreen(context: context, availability: availability, completion: { [weak parentController] passed in
if passed {
let _ = updateAgeVerificationState(engine: context.engine, { _ in
return AgeVerificationState(verificationPassed: passed)
}).start()
completion()
let navigationController = parentController?.navigationController
Queue.mainQueue().after(2.0) {
let controller = UndoOverlayController(presentationData: presentationData, content: .actionSucceeded(title: "Age check passed!", text: "You can now view this content.", cancel: nil, destructive: false), action: { _ in return true })
(navigationController?.viewControllers.last as? ViewController)?.present(controller, in: .window(.root))
}
} else {
let controller = UndoOverlayController(presentationData: presentationData, content: .universal(animation: "anim_banned", scale: 0.066, colors: [:], title: "Age check failed!", text: "Sorry, you can't view this content.", customUndoText: nil, timeout: nil), action: { _ in return true })
parentController?.present(controller, in: .window(.root))
}
})
parentController?.push(scanScreen)
}
})
parentController?.push(infoScreen)
}
})
}
func updateAgeVerificationState(engine: TelegramEngine, _ f: @escaping (AgeVerificationState) -> AgeVerificationState) -> Signal<Never, NoError> {
return engine.preferences.update(id: ApplicationSpecificPreferencesKeys.ageVerificationState, { entry in
let currentSettings: AgeVerificationState
if let entry = entry?.get(AgeVerificationState.self) {
currentSettings = entry
} else {
currentSettings = .default
}
return SharedPreferencesEntry(f(currentSettings))
})
}
public struct AgeVerificationState: Equatable, Codable {
public var verificationPassed: Bool
public static var `default`: AgeVerificationState {
return AgeVerificationState(verificationPassed: false)
}
public init(verificationPassed: Bool) {
self.verificationPassed = verificationPassed
}
public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: StringCodingKey.self)
self.verificationPassed = (try container.decode(Int32.self, forKey: "verificationPassed")) != 0
}
public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: StringCodingKey.self)
try container.encode((self.verificationPassed ? 1 : 0) as Int32, forKey: "verificationPassed")
}
}

View File

@ -0,0 +1,785 @@
import Foundation
import UIKit
import AccountContext
import AsyncDisplayKit
import Display
import SwiftSignalKit
import Camera
import CoreImage
import TelegramPresentationData
import TelegramCore
import Markdown
import TextFormat
import PresentationDataUtils
import ComponentFlow
import ViewControllerComponent
import Vision
import AVFoundation
import AppBundle
import ZipArchive
import PlainButtonComponent
import MultilineTextComponent
private let requiredAge = 18
final class FaceScanScreenComponent: Component {
typealias EnvironmentType = ViewControllerComponentContainer.Environment
let context: AccountContext
let availability: Signal<AgeVerificationAvailability, NoError>
init(
context: AccountContext,
availability: Signal<AgeVerificationAvailability, NoError>
) {
self.context = context
self.availability = availability
}
static func ==(lhs: FaceScanScreenComponent, rhs: FaceScanScreenComponent) -> Bool {
return true
}
final class View: UIView, AVCaptureVideoDataOutputSampleBufferDelegate {
private let captureSession = AVCaptureSession()
private lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
private let videoDataOutput = AVCaptureVideoDataOutput()
private let videoDataOutputQueue = DispatchQueue(label: "VideoDataOutput", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
private var faceDetectionRequest: VNDetectFaceRectanglesRequest!
private let overlayView = UIView()
private let cutoutLayer = CAShapeLayer()
private let frameView = FrameView()
private let numberOfStripes = 16
private var completedAngles: Set<Int> = []
private let instruction = ComponentView<Empty>()
private let cancel = ComponentView<Empty>()
private var currentFaceImage: CIImage?
private enum State {
case waitingForFace
case positioning
case readyToStart
case tracking
case completed
}
private var processState: State = .waitingForFace
private var lastFaceYaw: NSNumber?
private var lastFacePitch: NSNumber?
private var lastFaceRoll: NSNumber?
private var centerYaw: Double = 0
private var centerPitch: Double = 0
private var centerRoll: Double = 0
private let motionThreshold: Double = 0.07
private var faceDetectionTimer: Foundation.Timer?
private var segmentTimer: Foundation.Timer?
private var currentSegment: Int?
private let segmentDwellTime: TimeInterval = 0.05
private let positioningTime: TimeInterval = 1.0
private var horizontalGuideLines: [CAShapeLayer] = []
private var verticalGuideLines: [CAShapeLayer] = []
private let maxGuideLines = 30
private let guideLineFadeDuration: TimeInterval = 0.3
private var lastGuideLineUpdate: Date = Date()
private let guideLineUpdateInterval: TimeInterval = 0.01
private var lastGuideLineYaw: Double = 0
private var lastGuideLinePitch: Double = 0
private let angleThreshold: Double = 0.01
private var ageModel: VNCoreMLModel?
private var ages: [Double] = []
private var availabilityDisposable: Disposable?
private var isUpdating: Bool = false
private var component: FaceScanScreenComponent?
private(set) weak var state: EmptyComponentState?
private var environment: EnvironmentType?
override init(frame: CGRect) {
super.init(frame: frame)
self.backgroundColor = .black
//self.previewLayer.backgroundColor = UIColor.red.cgColor
self.previewLayer.videoGravity = .resizeAspectFill
self.layer.addSublayer(previewLayer)
self.overlayView.backgroundColor = UIColor.black
self.addSubview(overlayView)
self.cutoutLayer.fillRule = .evenOdd
self.overlayView.layer.mask = self.cutoutLayer
self.addSubview(self.frameView)
}
required init?(coder: NSCoder) {
preconditionFailure()
}
deinit {
self.availabilityDisposable?.dispose()
}
private func setupModel(availability: Signal<AgeVerificationAvailability, NoError>) {
self.availabilityDisposable = (availability
|> deliverOnMainQueue).start(next: { [weak self] availability in
guard let self else {
return
}
if case let .available(path, isLegacy) = availability {
if isLegacy {
if let model = try? AgeNetLegacy(contentsOf: URL(fileURLWithPath: path)).model {
self.ageModel = try? VNCoreMLModel(for: model)
}
} else if #available(iOS 15.0, *) {
if let model = try? AgeNet(contentsOf: URL(fileURLWithPath: path)).model {
self.ageModel = try? VNCoreMLModel(for: model)
}
}
}
})
}
private func extractFaceImage(from pixelBuffer: CVPixelBuffer, faceObservation: VNFaceObservation) -> CIImage? {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let imageSize = ciImage.extent.size
let visionRect = faceObservation.boundingBox
let boundingBox = CGRect(x: 1.0 - visionRect.maxY, y: 1.0 - visionRect.maxX, width: visionRect.height, height: visionRect.width)
let faceRect = CGRect(
x: boundingBox.minX * imageSize.width,
y: boundingBox.minY * imageSize.height,
width: boundingBox.width * imageSize.width,
height: boundingBox.height * imageSize.height
)
let padding: CGFloat = 0.1
let paddingX = faceRect.width * padding
let paddingY = faceRect.height * padding
let paddedRect = CGRect(
x: max(0, faceRect.minX - paddingX),
y: max(0, faceRect.minY - paddingY),
width: min(imageSize.width - max(0, faceRect.minX - paddingX), faceRect.width + 2 * paddingX),
height: min(imageSize.height - max(0, faceRect.minY - paddingY), faceRect.height + 2 * paddingY)
)
let croppedImage = ciImage.cropped(to: paddedRect)
let rotatedImage = croppedImage.oriented(.leftMirrored)
return rotatedImage
}
private func setupCamera() {
guard let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) else {
return
}
do {
let input = try AVCaptureDeviceInput(device: device)
self.captureSession.beginConfiguration()
self.captureSession.sessionPreset = .high
if self.captureSession.canAddInput(input) {
self.captureSession.addInput(input)
}
if self.captureSession.canAddOutput(self.videoDataOutput) {
self.captureSession.addOutput(self.videoDataOutput)
self.videoDataOutput.alwaysDiscardsLateVideoFrames = true
self.videoDataOutput.setSampleBufferDelegate(self, queue: self.videoDataOutputQueue)
}
self.captureSession.commitConfiguration()
} catch {
print("Failed to setup camera: \(error)")
}
Queue.concurrentDefaultQueue().async {
self.captureSession.startRunning()
}
}
private func setupVision() {
self.faceDetectionRequest = VNDetectFaceRectanglesRequest { [weak self] request, error in
guard error == nil else { return }
Queue.mainQueue().async {
self?.handleFaceDetection(request)
}
}
}
private func handleFaceDetection(_ request: VNRequest) {
guard #available(iOS 15.0, *) else {
return
}
guard let observations = request.results as? [VNFaceObservation], let face = observations.first else {
if self.processState == .tracking || self.processState == .readyToStart {
self.resetTracking()
}
self.currentFaceImage = nil
return
}
guard let yaw = face.yaw,
let pitch = face.pitch,
let roll = face.roll else { return }
let faceCenter = CGPoint(x: face.boundingBox.midX, y: face.boundingBox.midY)
let targetCenter = CGPoint(x: 0.5, y: 0.5)
let distance = sqrt(pow(faceCenter.x - targetCenter.x, 2) + pow(faceCenter.y - targetCenter.y, 2))
if distance < 0.2 {
switch processState {
case .waitingForFace:
self.processState = .positioning
self.faceDetectionTimer = Timer.scheduledTimer(withTimeInterval: self.positioningTime, repeats: false) { [weak self] _ in
self?.processState = .readyToStart
self?.state?.updated(transition: .spring(duration: 0.3))
}
case .positioning:
break
case .readyToStart:
self.centerYaw = yaw.doubleValue
self.centerPitch = pitch.doubleValue
self.centerRoll = roll.doubleValue
self.processState = .tracking
case .tracking:
self.trackHeadOrientation(yaw: yaw, pitch: pitch, roll: roll)
case .completed:
break
}
} else if self.processState == .tracking {
self.resetTracking()
}
}
private func trackHeadOrientation(yaw: NSNumber, pitch: NSNumber, roll: NSNumber) {
let relativeYaw = yaw.doubleValue - self.centerYaw
let relativePitch = pitch.doubleValue - self.centerPitch
//self.updateTrailingGuideLines(yaw: relativeYaw, pitch: relativePitch)
if let lastYaw = self.lastFaceYaw, let lastPitch = self.lastFacePitch {
let yawChange = abs(yaw.doubleValue - lastYaw.doubleValue)
let pitchChange = abs(pitch.doubleValue - lastPitch.doubleValue)
if yawChange < 0.02 && pitchChange < 0.02 {
return
}
}
let mirroredYaw = -relativeYaw
let flippedPitch = relativePitch
let angle = atan2(mirroredYaw, flippedPitch) + .pi
let normalizedAngle = angle / (2 * .pi)
let segmentIndex = Int(normalizedAngle * Double(self.numberOfStripes)) % self.numberOfStripes
let rotationMagnitude = sqrt(relativeYaw * relativeYaw + relativePitch * relativePitch)
if rotationMagnitude > self.motionThreshold {
if self.currentSegment != segmentIndex {
self.currentSegment = segmentIndex
self.segmentTimer?.invalidate()
self.segmentTimer = Timer.scheduledTimer(withTimeInterval: self.segmentDwellTime, repeats: false) { _ in
self.fillSegment(segmentIndex)
}
}
}
self.lastFaceYaw = yaw
self.lastFacePitch = pitch
self.lastFaceRoll = roll
}
private func fillSegment(_ segmentIndex: Int) {
guard !self.completedAngles.contains(segmentIndex) else {
return
}
self.completedAngles.insert(segmentIndex)
if self.completedAngles.count >= self.numberOfStripes {
Queue.mainQueue().after(0.3, {
self.processState = .completed
self.state?.updated(transition: .spring(duration: 0.3))
Queue.mainQueue().after(1.0) {
if !self.ages.isEmpty {
let averageAge = self.ages.reduce(0, +) / Double(self.ages.count)
if let environment = self.environment, let controller = environment.controller() as? FaceScanScreen {
controller.completion(averageAge >= Double(requiredAge))
controller.dismiss(animated: true)
}
} else {
self.completedAngles.removeAll()
self.processState = .tracking
self.state?.updated(transition: .spring(duration: 0.3))
}
}
})
}
self.state?.updated(transition: .spring(duration: 0.3))
}
private func resetTracking() {
self.faceDetectionTimer?.invalidate()
self.segmentTimer?.invalidate()
self.processState = .waitingForFace
self.completedAngles.removeAll()
self.currentSegment = nil
self.lastFaceYaw = nil
self.lastFacePitch = nil
self.lastFaceRoll = nil
self.currentFaceImage = nil
self.state?.updated(transition: .spring(duration: 0.3))
}
private var tick = 0
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let requestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: .leftMirrored)
do {
try requestHandler.perform([self.faceDetectionRequest])
if let observations = self.faceDetectionRequest.results, let faceObservation = observations.first {
tick += 1
if tick < 33 {
return
}
tick = 0
self.currentFaceImage = self.extractFaceImage(from: pixelBuffer, faceObservation: faceObservation)
self.processFace()
}
} catch {
print("Failed to perform request: \(error)")
}
}
public func processFace() {
guard let faceImage = self.currentFaceImage else {
return
}
guard let model = self.ageModel else {
return
}
let request = VNCoreMLRequest(model: model) { [weak self] request, error in
if let results = request.results as? [VNCoreMLFeatureValueObservation], let ageObservation = results.last {
let age = ageObservation.featureValue.multiArrayValue?[0].doubleValue ?? 0
Queue.mainQueue().async {
self?.ages.append(age)
}
}
}
let handler = VNImageRequestHandler(ciImage: faceImage)
DispatchQueue.global(qos: .userInteractive).async {
do {
try handler.perform([request])
} catch {
print(error)
}
}
}
func update(component: FaceScanScreenComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
self.isUpdating = true
defer {
self.isUpdating = false
}
if self.component == nil {
self.setupModel(availability: component.availability)
self.setupCamera()
self.setupVision()
}
self.component = component
self.state = state
let environment = environment[EnvironmentType.self].value
self.environment = environment
let theme = environment.theme
let strings = environment.strings
self.overlayView.frame = CGRect(origin: .zero, size: availableSize)
self.cutoutLayer.frame = CGRect(origin: .zero, size: availableSize)
let path = CGMutablePath(rect: overlayView.bounds, transform: nil)
let radius: CGFloat = 130.0
let widthRadius = ceil(radius * 1.05)
let heightRadius = widthRadius //radius //floor(widthRadius * 1.17778)
let center = CGPoint(x: availableSize.width / 2, y: environment.statusBarHeight + 10.0 + widthRadius * 1.3)
var previewScale = 1.0
if self.processState == .tracking || self.processState == .readyToStart || self.processState == .completed {
let circlePath = CGPath(roundedRect: CGRect(x: center.x - radius, y: center.y - radius, width: radius * 2, height: radius * 2), cornerWidth: radius, cornerHeight: radius, transform: nil)
path.addPath(circlePath)
previewScale = 0.75
} else {
let rectanglePath = CGPath(roundedRect: CGRect(x: center.x - widthRadius, y: center.y - heightRadius, width: widthRadius * 2, height: heightRadius * 2), cornerWidth: 20, cornerHeight: 20, transform: nil)
path.addPath(rectanglePath)
}
transition.setShapeLayerPath(layer: self.cutoutLayer, path: path)
self.previewLayer.bounds = CGRect(origin: .zero, size: availableSize)
self.previewLayer.position = CGPoint(x: availableSize.width / 2.0, y: availableSize.height / 2.0 - 200)
transition.setTransform(layer: self.previewLayer, transform: CATransform3DMakeScale(previewScale, previewScale, 1.0))
let frameViewSize = CGSize(width: 330.0, height: 330.0)
let frameViewFrame = CGRect(x: (availableSize.width - frameViewSize.width) / 2.0, y: center.y - frameViewSize.height * 0.5, width: frameViewSize.width, height: frameViewSize.height)
self.frameView.frame = frameViewFrame
self.frameView.update(size: frameViewFrame.size)
//TODO:localize
var instructionString = "Position your face\nwithin the frame"
switch self.processState {
case .waitingForFace, .positioning:
self.frameView.update(state: .viewFinder, transition: .spring(duration: 0.3))
instructionString = "Position your face\nwithin the frame"
case .readyToStart:
self.frameView.update(state: .segments(Set()), transition: .spring(duration: 0.3))
instructionString = "Move your head slowly to\ncomplete the circle"
case .tracking:
self.frameView.update(state: .segments(self.completedAngles), transition: .spring(duration: 0.3))
instructionString = "Move your head slowly to\ncomplete the circle"
case .completed:
self.frameView.update(state: .success, transition: .spring(duration: 0.3))
instructionString = ""
}
let instructionSize = self.instruction.update(
transition: .immediate,
component: AnyComponent(
MultilineTextComponent(
text: .plain(NSAttributedString(string: instructionString, font: Font.semibold(20.0), textColor: .white)),
horizontalAlignment: .center,
maximumNumberOfLines: 3,
lineSpacing: 0.1
)
),
environment: {},
containerSize: availableSize
)
let instructionFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - instructionSize.width) / 2.0), y: 484.0), size: instructionSize)
if let instructionView = self.instruction.view {
if instructionView.superview == nil {
self.addSubview(instructionView)
}
instructionView.frame = instructionFrame
}
let cancelSize = self.cancel.update(
transition: .immediate,
component: AnyComponent(
PlainButtonComponent(
content: AnyComponent(
MultilineTextComponent(
text: .plain(NSAttributedString(string: strings.Common_Cancel, font: Font.regular(17.0), textColor: theme.list.itemAccentColor))
)
),
action: { [weak self] in
guard let self, let environment = self.environment, let controller = environment.controller() as? FaceScanScreen else {
return
}
controller.dismiss(animated: true)
},
animateScale: false
)
),
environment: {},
containerSize: availableSize
)
let cancelFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - cancelSize.width) / 2.0), y: availableSize.height - cancelSize.height - environment.safeInsets.bottom - 22.0), size: cancelSize)
if let cancelView = self.cancel.view {
if cancelView.superview == nil {
self.addSubview(cancelView)
}
cancelView.frame = cancelFrame
transition.setAlpha(view: cancelView, alpha: self.processState == .completed ? 0.0 : 1.0)
}
return availableSize
}
}
func makeView() -> View {
return View()
}
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<EnvironmentType>, transition: ComponentTransition) -> CGSize {
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
}
}
public final class FaceScanScreen: ViewControllerComponentContainer {
private let context: AccountContext
fileprivate let completion: (Bool) -> Void
public init(
context: AccountContext,
availability: Signal<AgeVerificationAvailability, NoError>,
completion: @escaping (Bool) -> Void
) {
self.context = context
self.completion = completion
super.init(context: context, component: FaceScanScreenComponent(
context: context,
availability: availability
), navigationBarAppearance: .none, theme: .default, updatedPresentationData: nil)
self.title = ""
self.statusBar.statusBarStyle = .White
self.navigationPresentation = .standaloneFlatModal
self.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait)
}
required public init(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
public override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.animateIn()
}
private func animateIn() {
guard let layout = self.validLayout else {
return
}
self.view.clipsToBounds = true
self.view.layer.cornerRadius = layout.deviceMetrics.screenCornerRadius
self.view.layer.animatePosition(from: CGPoint(x: self.view.layer.position.x, y: self.view.layer.position.y + self.view.layer.bounds.size.height), to: self.view.layer.position, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in
self.view.clipsToBounds = false
})
}
private func animateOut(completion: (() -> Void)? = nil) {
guard let layout = self.validLayout else {
return
}
self.view.clipsToBounds = true
self.view.layer.cornerRadius = layout.deviceMetrics.screenCornerRadius
self.view.layer.animatePosition(from: self.view.layer.position, to: CGPoint(x: self.view.layer.position.x, y: self.view.layer.position.y + self.view.layer.bounds.size.height), duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: false, completion: { _ in
completion?()
})
}
public override func dismiss(animated flag: Bool, completion: (() -> Void)? = nil) {
if flag {
self.animateOut(completion: {
super.dismiss(animated: false, completion: completion)
})
} else {
super.dismiss(animated: flag, completion: completion)
}
}
}
extension FaceScanScreenComponent.View {
private func updateTrailingGuideLines(yaw: Double, pitch: Double) {
let now = Date()
guard now.timeIntervalSince(lastGuideLineUpdate) >= guideLineUpdateInterval else {
return
}
self.lastGuideLineUpdate = now
let radius: CGFloat = 128.0
let center = CGPoint(x: self.bounds.width / 2, y: self.frameView.center.y)
let maxRotation: Double = 0.5
let normalizedYaw = min(max(yaw / maxRotation, -1.0), 1.0) * 1.5
let normalizedPitch = min(max(pitch / maxRotation, -1.0), 1.0) * 1.5
let yawChange = abs(yaw - lastGuideLineYaw)
let pitchChange = abs(pitch - lastGuideLinePitch)
let rotationMagnitude = sqrt(yaw * yaw + pitch * pitch)
if rotationMagnitude > 0.01 && (yawChange > angleThreshold || pitchChange > angleThreshold) {
if abs(pitch) > 0.01 {
createHorizontalGuideLine(center: center, radius: radius, curvature: normalizedPitch)
}
if abs(yaw) > 0.01 {
createVerticalGuideLine(center: center, radius: radius, curvature: normalizedYaw)
}
lastGuideLineYaw = yaw
lastGuideLinePitch = pitch
}
cleanupOldGuideLines()
}
private func createHorizontalGuideLine(center: CGPoint, radius: CGFloat, curvature: Double) {
let lineLayer = CAShapeLayer()
let path = createCurvedHorizontalPath(center: center, radius: radius, curvature: curvature)
lineLayer.path = path.cgPath
lineLayer.strokeColor = UIColor(rgb: 0xb4f5ff).cgColor
lineLayer.lineWidth = 3.0
lineLayer.fillColor = UIColor.clear.cgColor
lineLayer.lineCap = .round
lineLayer.opacity = 0.3
lineLayer.shadowColor = UIColor(rgb: 0xb4f5ff).cgColor
lineLayer.shadowRadius = 2
lineLayer.shadowOpacity = 0.5
lineLayer.shadowOffset = .zero
self.layer.addSublayer(lineLayer)
horizontalGuideLines.append(lineLayer)
animateLinefadeOut(lineLayer)
DispatchQueue.main.asyncAfter(deadline: .now() + guideLineFadeDuration + 0.1) { [weak self] in
if let index = self?.horizontalGuideLines.firstIndex(of: lineLayer) {
self?.horizontalGuideLines.remove(at: index)
}
lineLayer.removeFromSuperlayer()
}
}
private func createVerticalGuideLine(center: CGPoint, radius: CGFloat, curvature: Double) {
let lineLayer = CAShapeLayer()
let path = createCurvedVerticalPath(center: center, radius: radius, curvature: curvature)
lineLayer.path = path.cgPath
lineLayer.strokeColor = UIColor(rgb: 0xb4f5ff).cgColor
lineLayer.lineWidth = 3.0
lineLayer.fillColor = UIColor.clear.cgColor
lineLayer.lineCap = .round
lineLayer.opacity = 0.3
lineLayer.shadowColor = UIColor(rgb: 0xb4f5ff).cgColor
lineLayer.shadowRadius = 2
lineLayer.shadowOpacity = 0.5
lineLayer.shadowOffset = .zero
self.layer.addSublayer(lineLayer)
verticalGuideLines.append(lineLayer)
animateLinefadeOut(lineLayer)
DispatchQueue.main.asyncAfter(deadline: .now() + guideLineFadeDuration + 0.1) { [weak self] in
if let index = self?.verticalGuideLines.firstIndex(of: lineLayer) {
self?.verticalGuideLines.remove(at: index)
}
lineLayer.removeFromSuperlayer()
}
}
private func createCurvedHorizontalPath(center: CGPoint, radius: CGFloat, curvature: Double) -> UIBezierPath {
let path = UIBezierPath()
let startPoint = CGPoint(x: center.x - radius, y: center.y)
let endPoint = CGPoint(x: center.x + radius, y: center.y)
if abs(curvature) < 0.05 {
path.move(to: startPoint)
path.addLine(to: endPoint)
} else {
let curveOffset = CGFloat(curvature) * radius * 0.6
let controlPoint = CGPoint(x: center.x, y: center.y + curveOffset)
path.move(to: startPoint)
path.addQuadCurve(to: endPoint, controlPoint: controlPoint)
}
return path
}
private func createCurvedVerticalPath(center: CGPoint, radius: CGFloat, curvature: Double) -> UIBezierPath {
let path = UIBezierPath()
let startPoint = CGPoint(x: center.x, y: center.y - radius)
let endPoint = CGPoint(x: center.x, y: center.y + radius)
if abs(curvature) < 0.05 {
path.move(to: startPoint)
path.addLine(to: endPoint)
} else {
let curveOffset = CGFloat(curvature) * radius * 0.6
let controlPoint = CGPoint(x: center.x + curveOffset, y: center.y)
path.move(to: startPoint)
path.addQuadCurve(to: endPoint, controlPoint: controlPoint)
}
return path
}
private func animateLinefadeOut(_ layer: CAShapeLayer) {
let opacityAnimation = CABasicAnimation(keyPath: "opacity")
opacityAnimation.fromValue = 0.5
opacityAnimation.toValue = 0.0
opacityAnimation.duration = guideLineFadeDuration
opacityAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
opacityAnimation.fillMode = .forwards
opacityAnimation.isRemovedOnCompletion = false
let shadowAnimation = CABasicAnimation(keyPath: "shadowOpacity")
shadowAnimation.fromValue = 0.5
shadowAnimation.toValue = 0.0
shadowAnimation.duration = guideLineFadeDuration
shadowAnimation.timingFunction = CAMediaTimingFunction(name: .easeOut)
shadowAnimation.fillMode = .forwards
shadowAnimation.isRemovedOnCompletion = false
layer.add(opacityAnimation, forKey: "fadeOut")
layer.add(shadowAnimation, forKey: "shadowFadeOut")
}
private func cleanupOldGuideLines() {
while horizontalGuideLines.count > maxGuideLines {
let oldestLine = horizontalGuideLines.removeFirst()
oldestLine.removeFromSuperlayer()
}
while verticalGuideLines.count > maxGuideLines {
let oldestLine = verticalGuideLines.removeFirst()
oldestLine.removeFromSuperlayer()
}
}
private func clearAllGuideLines() {
for line in horizontalGuideLines {
line.removeFromSuperlayer()
}
self.horizontalGuideLines.removeAll()
for line in verticalGuideLines {
line.removeFromSuperlayer()
}
self.verticalGuideLines.removeAll()
}
}

View File

@ -0,0 +1,411 @@
import UIKit
import Display
import ComponentFlow
final class FrameView: UIView {
enum State {
case viewFinder
case segments(Set<Int>)
case success
case failure
}
private let maskLayer = CALayer()
private let viewFinderLayer = ViewFinderLayer()
private let transitionLayer = TransitionLayer()
private let segmentsLayer = SegmentsLayer()
private var currentState: State = .viewFinder
private var scheduledState: State?
private var isTransitioning = false
override init(frame: CGRect) {
super.init(frame: frame)
self.backgroundColor = .clear
//self.layer.mask = self.maskLayer
self.transitionLayer.isHidden = true
self.segmentsLayer.isHidden = true
self.layer.addSublayer(self.viewFinderLayer)
self.layer.addSublayer(self.transitionLayer)
self.layer.addSublayer(self.segmentsLayer)
}
required init?(coder: NSCoder) {
preconditionFailure()
}
func update(state: State, transition: ComponentTransition) {
guard !self.isTransitioning else {
self.scheduledState = state
return
}
let previousState = self.currentState
self.currentState = state
switch state {
case .viewFinder:
switch previousState {
case .viewFinder:
break
case .segments:
self.isTransitioning = true
self.segmentsLayer.animateOut(transition: transition) {
self.segmentsLayer.isHidden = true
self.transitionLayer.isHidden = false
self.transitionLayer.animateOut(transition: transition) {
self.transitionLayer.isHidden = true
self.viewFinderLayer.isHidden = false
self.viewFinderLayer.animateIn(transition: transition) {
self.isTransitioning = false
self.maybeApplyScheduledState()
}
}
}
case .success:
break
case .failure:
break
}
case let .segments(segments):
switch previousState {
case .viewFinder:
self.isTransitioning = true
self.viewFinderLayer.animateOut(transition: transition) {
self.viewFinderLayer.isHidden = true
self.transitionLayer.isHidden = false
self.transitionLayer.animateIn(transition: transition) {
self.transitionLayer.isHidden = true
self.segmentsLayer.isHidden = false
self.segmentsLayer.animateIn (transition: transition) {
self.isTransitioning = false
self.maybeApplyScheduledState()
}
}
}
case .segments:
self.segmentsLayer.update(segments: segments, transition: transition)
case .success:
break
case .failure:
break
}
case .success:
self.isTransitioning = true
self.segmentsLayer.animateOut(transition: transition) {
self.segmentsLayer.isHidden = true
self.transitionLayer.isHidden = false
self.transitionLayer.update(color: UIColor(rgb: 0x65c466))
self.transitionLayer.animateOut(transition: transition) {
self.isTransitioning = false
self.maybeApplyScheduledState()
}
}
case .failure:
break
}
}
func maybeApplyScheduledState() {
if !self.isTransitioning, let state = self.scheduledState {
self.scheduledState = nil
self.update(state: state, transition: .spring(duration: 0.3))
}
}
func update(size: CGSize) {
let bounds = CGRect(origin: .zero, size: size)
self.maskLayer.frame = bounds
//let center = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
//let viewFinderWidth = bounds.width - 34.0
//let viewFinderSize = CGSize(width: viewFinderWidth, height: floor(viewFinderWidth * 1.17778))
let viewFinderFrame = bounds.insetBy(dx: 29.0, dy: 29.0) //viewFinderSize.centered(around: center)
self.viewFinderLayer.update(size: viewFinderFrame.size, closed: false, transition: .immediate)
self.viewFinderLayer.frame = viewFinderFrame
let transitionFrame = bounds.insetBy(dx: 29.0, dy: 29.0) //viewFinderSize.centered(around: center)
self.transitionLayer.update(size: transitionFrame.size)
self.transitionLayer.frame = transitionFrame
let segmentsFrame = bounds.insetBy(dx: 15.0, dy: 15.0)
self.segmentsLayer.update(size: segmentsFrame.size)
self.segmentsLayer.frame = segmentsFrame
}
}
private let numberOfSegments = 64
private let lineWidth: CGFloat = 4.0
final class ViewFinderLayer: SimpleLayer {
private let viewFinderTopLeftLine = SimpleShapeLayer()
private let viewFinderTopRightLine = SimpleShapeLayer()
private let viewFinderBottomLeftLine = SimpleShapeLayer()
private let viewFinderBottomRightLine = SimpleShapeLayer()
private var viewFinderLines: [SimpleShapeLayer] {
return [
self.viewFinderTopLeftLine,
self.viewFinderTopRightLine,
self.viewFinderBottomLeftLine,
self.viewFinderBottomRightLine
]
}
override init() {
super.init()
for line in self.viewFinderLines {
line.strokeColor = UIColor.white.cgColor
line.fillColor = UIColor.clear.cgColor
line.lineWidth = lineWidth
line.lineCap = .round
self.addSublayer(line)
}
}
required init?(coder: NSCoder) {
preconditionFailure()
}
private var validLayout: CGSize?
func animateOut(transition: ComponentTransition, completion: @escaping () -> Void) {
guard let size = self.validLayout else {
return
}
self.update(size: size, closed: true, transition: transition, completion: completion)
}
func animateIn(transition: ComponentTransition, completion: @escaping () -> Void) {
guard let size = self.validLayout else {
return
}
self.update(size: size, closed: false, transition: transition, completion: completion)
}
func update(size: CGSize, closed: Bool, transition: ComponentTransition, completion: (() -> Void)? = nil) {
self.validLayout = size
let cornerRadius = closed ? size.width / 2.0 : 18.0
let lineLength = size.width / 2.0 - cornerRadius
let targetLineLength = 34.0
let fraction = targetLineLength / lineLength
let strokeFraction = (1.0 - fraction) / 2.0
let strokeStart = closed ? 0.0 : strokeFraction
let strokeEnd = closed ? 1.0 : 1.0 - strokeFraction
let topLeftPath = CGMutablePath()
topLeftPath.move(to: CGPoint(x: 0.0, y: size.height / 2.0))
topLeftPath.addArc(center: CGPoint(x: cornerRadius, y: cornerRadius), radius: cornerRadius, startAngle: -.pi, endAngle: -.pi / 2.0, clockwise: false)
topLeftPath.addLine(to: CGPoint(x: size.width / 2.0, y: 0.0))
transition.setShapeLayerPath(layer: self.viewFinderTopLeftLine, path: topLeftPath, completion: { _ in
completion?()
})
transition.setShapeLayerStrokeStart(layer: self.viewFinderTopLeftLine, strokeStart: strokeStart)
transition.setShapeLayerStrokeEnd(layer: self.viewFinderTopLeftLine, strokeEnd: strokeEnd)
let topRightPath = CGMutablePath()
topRightPath.move(to: CGPoint(x: size.width / 2.0, y: 0.0))
topRightPath.addArc(center: CGPoint(x: size.width - cornerRadius, y: cornerRadius), radius: cornerRadius, startAngle: -.pi / 2.0, endAngle: 0.0, clockwise: false)
topRightPath.addLine(to: CGPoint(x: size.width, y: size.height / 2.0))
transition.setShapeLayerPath(layer: self.viewFinderTopRightLine, path: topRightPath)
transition.setShapeLayerStrokeStart(layer: self.viewFinderTopRightLine, strokeStart: strokeStart)
transition.setShapeLayerStrokeEnd(layer: self.viewFinderTopRightLine, strokeEnd: strokeEnd)
let bottomRightPath = CGMutablePath()
bottomRightPath.move(to: CGPoint(x: size.width, y: size.height / 2.0))
bottomRightPath.addArc(center: CGPoint(x: size.width - cornerRadius, y: size.height - cornerRadius), radius: cornerRadius, startAngle: 0.0, endAngle: .pi / 2.0, clockwise: false)
bottomRightPath.addLine(to: CGPoint(x: size.width / 2.0, y: size.height))
transition.setShapeLayerPath(layer: self.viewFinderBottomRightLine, path: bottomRightPath)
transition.setShapeLayerStrokeStart(layer: self.viewFinderBottomRightLine, strokeStart: strokeStart)
transition.setShapeLayerStrokeEnd(layer: self.viewFinderBottomRightLine, strokeEnd: strokeEnd)
let bottomLeftPath = CGMutablePath()
bottomLeftPath.move(to: CGPoint(x: size.width / 2.0, y: size.height))
bottomLeftPath.addArc(center: CGPoint(x: cornerRadius, y: size.height - cornerRadius), radius: cornerRadius, startAngle: .pi / 2.0, endAngle: .pi, clockwise: false)
bottomLeftPath.addLine(to: CGPoint(x: 0.0, y: size.height / 2.0))
transition.setShapeLayerPath(layer: self.viewFinderBottomLeftLine, path: bottomLeftPath)
transition.setShapeLayerStrokeStart(layer: self.viewFinderBottomLeftLine, strokeStart: strokeStart)
transition.setShapeLayerStrokeEnd(layer: self.viewFinderBottomLeftLine, strokeEnd: strokeEnd)
for line in self.viewFinderLines {
line.frame = CGRect(origin: .zero, size: size)
}
}
}
final class TransitionLayer: SimpleLayer {
private var segmentLayers: [SimpleShapeLayer] = []
func animateIn(transition: ComponentTransition, completion: @escaping () -> Void) {
var i = 0
for layer in self.segmentLayers {
transition.setShapeLayerStrokeStart(layer: layer, strokeStart: 0.499)
transition.setShapeLayerStrokeEnd(layer: layer, strokeEnd: 0.501, completion: i == 0 ? { _ in completion() } : nil)
i += 1
}
}
func animateOut(transition: ComponentTransition, completion: @escaping () -> Void) {
var i = 0
for layer in self.segmentLayers {
transition.setShapeLayerStrokeStart(layer: layer, strokeStart: 0.0)
transition.setShapeLayerStrokeEnd(layer: layer, strokeEnd: 1.0, completion: i == 0 ? { _ in completion() } : nil)
i += 1
}
}
func setupIfNeeded(size: CGSize) {
guard self.segmentLayers.isEmpty else {
return
}
let center = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
let radius: CGFloat = size.width / 2.0
let gapInDegrees: CGFloat = 0.0
let gapInRadians: CGFloat = gapInDegrees * .pi / 180.0
let totalGapAngle = CGFloat(numberOfSegments) * gapInRadians
let totalSegmentAngle = 2 * .pi - totalGapAngle
let segmentAngle = totalSegmentAngle / CGFloat(numberOfSegments)
for i in 0 ..< numberOfSegments {
let startAngle = -segmentAngle * 0.5 + (CGFloat(i) * (segmentAngle + gapInRadians)) - .pi / 2
let endAngle = startAngle + segmentAngle
let path = UIBezierPath(arcCenter: center,
radius: radius,
startAngle: startAngle,
endAngle: endAngle,
clockwise: true)
let stripeLayer = SimpleShapeLayer()
stripeLayer.path = path.cgPath
stripeLayer.strokeColor = UIColor(rgb: 0xaaaaaa).cgColor
stripeLayer.lineWidth = lineWidth
stripeLayer.fillColor = UIColor.clear.cgColor
stripeLayer.lineCap = .round
self.addSublayer(stripeLayer)
self.segmentLayers.append(stripeLayer)
}
}
func update(color: UIColor) {
for layer in self.segmentLayers {
layer.strokeColor = color.cgColor
}
}
func update(size: CGSize) {
self.setupIfNeeded(size: size)
}
}
final class SegmentsLayer: SimpleLayer {
private var segmentLayers: [SimpleShapeLayer] = []
func animateIn(transition: ComponentTransition, completion: @escaping () -> Void) {
var i = 0
for layer in self.segmentLayers {
transition.setShapeLayerStrokeStart(layer: layer, strokeStart: 0.0)
transition.setShapeLayerStrokeEnd(layer: layer, strokeEnd: 0.32, completion: i == 0 ? { _ in completion() } : nil)
i += 1
}
}
func animateOut(transition: ComponentTransition, completion: @escaping () -> Void) {
var i = 0
for layer in self.segmentLayers {
transition.setShapeLayerStrokeStart(layer: layer, strokeStart: 0.0)
transition.setShapeLayerStrokeEnd(layer: layer, strokeEnd: 0.001, completion: i == 0 ? { _ in completion() } : nil)
i += 1
}
}
func setupIfNeeded(size: CGSize) {
guard self.segmentLayers.isEmpty else {
return
}
let center = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
let innerRadius: CGFloat = size.width / 2.0 - 13.0
let outerRadius: CGFloat = size.width / 2.0 + 13.0
let gapInDegrees: CGFloat = 2.0
let gapInRadians: CGFloat = gapInDegrees * .pi / 180.0
let totalGapAngle = CGFloat(numberOfSegments) * gapInRadians
let totalSegmentAngle = 2 * .pi - totalGapAngle
let segmentAngle = totalSegmentAngle / CGFloat(numberOfSegments)
for i in 0 ..< numberOfSegments {
let angle = (CGFloat(i) * (segmentAngle + gapInRadians)) - .pi / 2
let startPoint = CGPoint(
x: center.x + innerRadius * cos(angle),
y: center.y + innerRadius * sin(angle)
)
let endPoint = CGPoint(
x: center.x + outerRadius * cos(angle),
y: center.y + outerRadius * sin(angle)
)
let path = UIBezierPath()
path.move(to: startPoint)
path.addLine(to: endPoint)
let stripeLayer = SimpleShapeLayer()
stripeLayer.path = path.cgPath
stripeLayer.strokeColor = UIColor.white.cgColor
stripeLayer.lineWidth = lineWidth
stripeLayer.fillColor = UIColor.clear.cgColor
stripeLayer.lineCap = .round
stripeLayer.strokeStart = 0.0
stripeLayer.strokeEnd = 0.001
self.addSublayer(stripeLayer)
self.segmentLayers.append(stripeLayer)
}
}
func update(segments: Set<Int>, transition: ComponentTransition) {
var mappedSegments = Set<Int>()
for value in segments {
for i in 0 ..< 4 {
mappedSegments.insert(value * 4 + i)
}
}
for i in 0 ..< numberOfSegments {
let stripeLayer = self.segmentLayers[i]
if mappedSegments.contains(i) {
transition.setShapeLayerStrokeEnd(layer: stripeLayer, strokeEnd: 1.0)
transition.setShapeLayerStrokeColor(layer: stripeLayer, color: UIColor(rgb: 0x00ca48))
} else {
transition.setShapeLayerStrokeEnd(layer: stripeLayer, strokeEnd: 0.32)
transition.setShapeLayerStrokeColor(layer: stripeLayer, color: UIColor(rgb: 0xaaaaaa))
}
}
}
func update(size: CGSize) {
self.setupIfNeeded(size: size)
}
}

View File

@ -122,9 +122,10 @@ import PeerNameColorScreen
import ChatEmptyNode
import ChatMediaInputStickerGridItem
import AdsInfoScreen
import FaceScanScreen
extension ChatControllerImpl {
func openPeer(peer: EnginePeer?, navigation: ChatControllerInteractionNavigateToPeer, fromMessage: MessageReference?, fromReactionMessageId: MessageId? = nil, expandAvatar: Bool = false, peerTypes: ReplyMarkupButtonAction.PeerTypes? = nil) {
func openPeer(peer: EnginePeer?, navigation: ChatControllerInteractionNavigateToPeer, fromMessage: MessageReference?, fromReactionMessageId: MessageId? = nil, expandAvatar: Bool = false, peerTypes: ReplyMarkupButtonAction.PeerTypes? = nil, skipAgeVerification: Bool = false) {
let _ = self.presentVoiceMessageDiscardAlert(action: {
if case let .peer(currentPeerId) = self.chatLocation, peer?.id == currentPeerId {
switch navigation {
@ -223,11 +224,23 @@ extension ChatControllerImpl {
}
})
} else {
if case let .channel(channel) = peer, channel.isForumOrMonoForum {
self.effectiveNavigationController?.pushViewController(ChatListControllerImpl(context: self.context, location: .forum(peerId: channel.id), controlsHistoryPreload: false, enableDebugActions: false))
} else {
self.effectiveNavigationController?.pushViewController(ChatControllerImpl(context: self.context, chatLocation: .peer(id: peer.id), subject: subject))
}
let _ = (requireAgeVerification(context: self.context, peer: peer)
|> deliverOnMainQueue).start(next: { [weak self] require in
guard let self else {
return
}
if require && !skipAgeVerification {
presentAgeVerification(context: self.context, parentController: self, completion: {
self.openPeer(peer: peer, navigation: navigation, fromMessage: fromMessage, fromReactionMessageId: fromReactionMessageId, expandAvatar: expandAvatar, peerTypes: peerTypes)
})
} else {
if case let .channel(channel) = peer, channel.isForumOrMonoForum {
self.effectiveNavigationController?.pushViewController(ChatListControllerImpl(context: self.context, location: .forum(peerId: channel.id), controlsHistoryPreload: false, enableDebugActions: false))
} else {
self.effectiveNavigationController?.pushViewController(ChatControllerImpl(context: self.context, chatLocation: .peer(id: peer.id), subject: subject))
}
}
})
}
case let .withBotStartPayload(botStart):
self.effectiveNavigationController?.pushViewController(ChatControllerImpl(context: self.context, chatLocation: .peer(id: peer.id), botStart: botStart))

View File

@ -259,7 +259,7 @@ private final class ChatAgeRestrictionAlertContentNode: AlertContentNode {
}
}
public func chatAgeRestrictionAlertController(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?, completion: @escaping (Bool) -> Void) -> AlertController {
public func chatAgeRestrictionAlertController(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?, parentController: ViewController, completion: @escaping (Bool) -> Void) -> AlertController {
let theme = defaultDarkColorPresentationTheme
let presentationData: PresentationData
if let updatedPresentationData {
@ -268,7 +268,7 @@ public func chatAgeRestrictionAlertController(context: AccountContext, updatedPr
presentationData = context.sharedContext.currentPresentationData.with { $0 }
}
let strings = presentationData.strings
var dismissImpl: ((Bool) -> Void)?
var getContentNodeImpl: (() -> ChatAgeRestrictionAlertContentNode?)?
let actions: [TextAlertAction] = [TextAlertAction(type: .defaultAction, title: strings.SensitiveContent_ViewAnyway, action: {

View File

@ -140,6 +140,7 @@ import PromptUI
import SuggestedPostApproveAlert
import AVFoundation
import BalanceNeededScreen
import FaceScanScreen
public final class ChatControllerOverlayPresentationData {
public let expandData: (ASDisplayNode?, () -> Void)
@ -4770,10 +4771,20 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
guard let self else {
return
}
let controller = chatAgeRestrictionAlertController(context: self.context, updatedPresentationData: self.updatedPresentationData, completion: { _ in
reveal()
})
self.present(controller, in: .window(.root))
if requireAgeVerification(context: context) {
presentAgeVerification(context: context, parentController: self, completion: {
let _ = updateRemoteContentSettingsConfiguration(postbox: context.account.postbox, network: context.account.network, sensitiveContentEnabled: true).start()
reveal()
})
} else {
let controller = chatAgeRestrictionAlertController(context: self.context, updatedPresentationData: self.updatedPresentationData, parentController: self, completion: { alwaysShow in
if alwaysShow {
let _ = updateRemoteContentSettingsConfiguration(postbox: context.account.postbox, network: context.account.network, sensitiveContentEnabled: true).start()
}
reveal()
})
self.present(controller, in: .window(.root))
}
}, playMessageEffect: { [weak self] message in
guard let self else {
return

View File

@ -20,12 +20,18 @@ import ChatControllerInteraction
import SavedMessagesScreen
import WallpaperGalleryScreen
import ChatMessageNotificationItem
import FaceScanScreen
public func navigateToChatControllerImpl(_ params: NavigateToChatControllerParams) {
if case let .peer(peer) = params.chatLocation {
let _ = params.context.engine.peers.ensurePeerIsLocallyAvailable(peer: peer).startStandalone()
}
var requiresAgeVerification: Signal<Bool, NoError> = .single(false)
if !params.skipAgeVerification, case let .peer(peer) = params.chatLocation {
requiresAgeVerification = requireAgeVerification(context: params.context, peer: peer)
}
var viewForumAsMessages: Signal<Bool, NoError> = .single(false)
if case let .peer(peer) = params.chatLocation, case let .channel(channel) = peer, channel.flags.contains(.isMonoforum) {
if let linkedMonoforumId = channel.linkedMonoforumId {
@ -67,9 +73,18 @@ public func navigateToChatControllerImpl(_ params: NavigateToChatControllerParam
}
}
let _ = (viewForumAsMessages
|> take(1)
|> deliverOnMainQueue).start(next: { viewForumAsMessages in
let _ = combineLatest(
queue: Queue.mainQueue(),
viewForumAsMessages |> take(1),
requiresAgeVerification
).start(next: { viewForumAsMessages, requiresAgeVerification in
if requiresAgeVerification, let parentController = params.navigationController.viewControllers.last as? ViewController {
presentAgeVerification(context: params.context, parentController: parentController, completion: {
navigateToChatControllerImpl(params.withSkipAgeVerification(true))
})
return
}
if case let .peer(peer) = params.chatLocation, case let .channel(channel) = peer, channel.flags.contains(.isForum), !viewForumAsMessages {
for controller in params.navigationController.viewControllers.reversed() {
var chatListController: ChatListControllerImpl?
@ -118,9 +133,7 @@ public func navigateToChatControllerImpl(_ params: NavigateToChatControllerParam
controller.activateSearch(query: activateMessageSearch.1)
}
if let chatListCompletion {
chatListCompletion(controller)
}
chatListCompletion(controller)
})
return

View File

@ -8,6 +8,7 @@ private enum ApplicationSpecificPreferencesKeyValues: Int32 {
case chatListFilterSettings = 18
case widgetSettings = 19
case mediaAutoSaveSettings = 20
case ageVerificationState = 21
}
public struct ApplicationSpecificPreferencesKeys {
@ -16,6 +17,7 @@ public struct ApplicationSpecificPreferencesKeys {
public static let chatListFilterSettings = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.chatListFilterSettings.rawValue)
public static let widgetSettings = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.widgetSettings.rawValue)
public static let mediaAutoSaveSettings = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.mediaAutoSaveSettings.rawValue)
public static let ageVerificationState = applicationSpecificPreferencesKey(ApplicationSpecificPreferencesKeyValues.ageVerificationState.rawValue)
}
private enum ApplicationSpecificSharedDataKeyValues: Int32 {