Merge branch 'master' into beta

# Conflicts:
#	Telegram/Telegram-iOS/en.lproj/Localizable.strings
#	submodules/ChatListUI/Sources/Node/ChatListNodeEntries.swift
#	submodules/ChatListUI/Sources/Node/ChatListStorageInfoItem.swift
#	submodules/TelegramCore/Sources/TelegramEngine/Peers/UpdateBotInfo.swift
#	submodules/TelegramUI/Sources/ChatMessageInteractiveFileNode.swift
#	submodules/TelegramUI/Sources/FetchVideoMediaResource.swift
#	submodules/TelegramUI/Sources/PeerInfo/PeerInfoHeaderNode.swift
#	versions.json
This commit is contained in:
Ali 2023-07-16 20:17:08 +04:00
commit 4a0399a455
1364 changed files with 116326 additions and 44446 deletions

View File

@ -162,6 +162,9 @@ verify_beta_testflight:
submit_appstore:
tags:
- deploy
only:
- beta
- hotfix
stage: submit
needs: []
when: manual

View File

@ -2013,9 +2013,9 @@ xcodeproj(
"Debug": {
"//command_line_option:compilation_mode": "dbg",
},
"Release": {
"//command_line_option:compilation_mode": "opt",
},
#"Release": {
# "//command_line_option:compilation_mode": "opt",
#},
},
default_xcode_configuration = "Debug"

View File

@ -301,15 +301,43 @@ private func testAvatarImage(size: CGSize) -> UIImage? {
return image
}
private func avatarRoundImage(size: CGSize, source: UIImage) -> UIImage? {
private func avatarRoundImage(size: CGSize, source: UIImage, isStory: Bool) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(size, false, 0.0)
let context = UIGraphicsGetCurrentContext()
context?.beginPath()
context?.addEllipse(in: CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height))
context?.clip()
source.draw(in: CGRect(origin: CGPoint(), size: size))
if isStory {
let lineWidth: CGFloat = 2.0
context?.beginPath()
context?.addEllipse(in: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: size.width, height: size.height)).insetBy(dx: lineWidth * 0.5, dy: lineWidth * 0.5))
context?.clip()
let colors: [CGColor] = [
UIColor(rgb: 0x34C76F).cgColor,
UIColor(rgb: 0x3DA1FD).cgColor
]
var locations: [CGFloat] = [0.0, 1.0]
let colorSpace = CGColorSpaceCreateDeviceRGB()
let gradient = CGGradient(colorsSpace: colorSpace, colors: colors as CFArray, locations: &locations)!
context?.drawLinearGradient(gradient, start: CGPoint(x: 0.0, y: 0.0), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
context?.setBlendMode(.copy)
context?.fillEllipse(in: CGRect(origin: CGPoint(), size: size).insetBy(dx: 2.0, dy: 2.0))
context?.setBlendMode(.normal)
context?.beginPath()
context?.addEllipse(in: CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height).insetBy(dx: 4.0, dy: 4.0))
context?.clip()
source.draw(in: CGRect(origin: CGPoint(), size: size).insetBy(dx: 4.0, dy: 4.0))
} else {
context?.beginPath()
context?.addEllipse(in: CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height))
context?.clip()
source.draw(in: CGRect(origin: CGPoint(), size: size))
}
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
@ -332,12 +360,16 @@ private let gradientColors: [NSArray] = [
[UIColor(rgb: 0xd669ed).cgColor, UIColor(rgb: 0xe0a2f3).cgColor],
]
private func avatarViewLettersImage(size: CGSize, peerId: PeerId, letters: [String]) -> UIImage? {
private func avatarViewLettersImage(size: CGSize, peerId: PeerId, letters: [String], isStory: Bool) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(size, false, 2.0)
let context = UIGraphicsGetCurrentContext()
context?.beginPath()
context?.addEllipse(in: CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height))
if isStory {
context?.addEllipse(in: CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height).insetBy(dx: 4.0, dy: 4.0))
} else {
context?.addEllipse(in: CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height))
}
context?.clip()
let colorIndex: Int
@ -373,17 +405,38 @@ private func avatarViewLettersImage(size: CGSize, peerId: PeerId, letters: [Stri
CTLineDraw(line, context)
}
context?.translateBy(x: -lineOrigin.x, y: -lineOrigin.y)
if isStory {
context?.resetClip()
let lineWidth: CGFloat = 2.0
context?.setLineWidth(lineWidth)
context?.addEllipse(in: CGRect(origin: CGPoint(x: size.width * 0.5, y: size.height * 0.5), size: CGSize(width: size.width, height: size.height)).insetBy(dx: lineWidth * 0.5, dy: lineWidth * 0.5))
context?.replacePathWithStrokedPath()
context?.clip()
let colors: [CGColor] = [
UIColor(rgb: 0x34C76F).cgColor,
UIColor(rgb: 0x3DA1FD).cgColor
]
var locations: [CGFloat] = [0.0, 1.0]
let colorSpace = CGColorSpaceCreateDeviceRGB()
let gradient = CGGradient(colorsSpace: colorSpace, colors: colors as CFArray, locations: &locations)!
context?.drawLinearGradient(gradient, start: CGPoint(x: 0.0, y: 0.0), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
}
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return image
}
private func avatarImage(path: String?, peerId: PeerId, letters: [String], size: CGSize) -> UIImage {
if let path = path, let image = UIImage(contentsOfFile: path), let roundImage = avatarRoundImage(size: size, source: image) {
private func avatarImage(path: String?, peerId: PeerId, letters: [String], size: CGSize, isStory: Bool) -> UIImage {
if let path = path, let image = UIImage(contentsOfFile: path), let roundImage = avatarRoundImage(size: size, source: image, isStory: isStory) {
return roundImage
} else {
return avatarViewLettersImage(size: size, peerId: peerId, letters: letters)!
return avatarViewLettersImage(size: size, peerId: peerId, letters: letters, isStory: isStory)!
}
}
@ -402,14 +455,15 @@ private func storeTemporaryImage(path: String) -> String {
}
@available(iOS 15.0, *)
private func peerAvatar(mediaBox: MediaBox, accountPeerId: PeerId, peer: Peer) -> INImage? {
private func peerAvatar(mediaBox: MediaBox, accountPeerId: PeerId, peer: Peer, isStory: Bool) -> INImage? {
if let resource = smallestImageRepresentation(peer.profileImageRepresentations)?.resource, let path = mediaBox.completedResourcePath(resource) {
let cachedPath = mediaBox.cachedRepresentationPathForId(resource.id.stringRepresentation, representationId: "intents.png", keepDuration: .shortLived)
if let _ = fileSize(cachedPath) {
let cachedPath = mediaBox.cachedRepresentationPathForId(resource.id.stringRepresentation, representationId: "intents\(isStory ? "-story2" : "").png", keepDuration: .shortLived)
if let _ = fileSize(cachedPath), !"".isEmpty {
return INImage(url: URL(fileURLWithPath: storeTemporaryImage(path: cachedPath)))
} else {
let image = avatarImage(path: path, peerId: peer.id, letters: peer.displayLetters, size: CGSize(width: 50.0, height: 50.0))
let image = avatarImage(path: path, peerId: peer.id, letters: peer.displayLetters, size: CGSize(width: 50.0, height: 50.0), isStory: isStory)
if let data = image.pngData() {
let _ = try? FileManager.default.removeItem(atPath: cachedPath)
let _ = try? data.write(to: URL(fileURLWithPath: cachedPath), options: .atomic)
}
@ -417,11 +471,11 @@ private func peerAvatar(mediaBox: MediaBox, accountPeerId: PeerId, peer: Peer) -
}
}
let cachedPath = mediaBox.cachedRepresentationPathForId("lettersAvatar2-\(peer.displayLetters.joined(separator: ","))", representationId: "intents.png", keepDuration: .shortLived)
let cachedPath = mediaBox.cachedRepresentationPathForId("lettersAvatar2-\(peer.displayLetters.joined(separator: ","))\(isStory ? "-story" : "")", representationId: "intents.png", keepDuration: .shortLived)
if let _ = fileSize(cachedPath) {
return INImage(url: URL(fileURLWithPath: storeTemporaryImage(path: cachedPath)))
} else {
let image = avatarImage(path: nil, peerId: peer.id, letters: peer.displayLetters, size: CGSize(width: 50.0, height: 50.0))
let image = avatarImage(path: nil, peerId: peer.id, letters: peer.displayLetters, size: CGSize(width: 50.0, height: 50.0), isStory: isStory)
if let data = image.pngData() {
let _ = try? data.write(to: URL(fileURLWithPath: cachedPath), options: .atomic)
}
@ -468,9 +522,9 @@ private struct NotificationContent: CustomStringConvertible {
return string
}
mutating func addSenderInfo(mediaBox: MediaBox, accountPeerId: PeerId, peer: Peer, topicTitle: String?, contactIdentifier: String?) {
mutating func addSenderInfo(mediaBox: MediaBox, accountPeerId: PeerId, peer: Peer, topicTitle: String?, contactIdentifier: String?, isStory: Bool) {
if #available(iOS 15.0, *) {
let image = peerAvatar(mediaBox: mediaBox, accountPeerId: accountPeerId, peer: peer)
let image = peerAvatar(mediaBox: mediaBox, accountPeerId: accountPeerId, peer: peer, isStory: isStory)
self.senderImage = image
@ -847,6 +901,7 @@ private final class NotificationServiceHandler {
var peerId: PeerId?
var messageId: MessageId.Id?
var storyId: Int32?
var mediaAttachment: Media?
var downloadNotificationSound: (file: TelegramMediaFile, path: String, fileName: String)?
@ -868,6 +923,9 @@ private final class NotificationServiceHandler {
if let messageIdString = payloadJson["msg_id"] as? String {
messageId = Int32(messageIdString)
}
if let storyIdString = payloadJson["story_id"] as? String {
storyId = Int32(storyIdString)
}
if let fromIdString = payloadJson["from_id"] as? String {
if let userIdValue = Int64(fromIdString) {
@ -917,7 +975,9 @@ private final class NotificationServiceHandler {
enum Action {
case logout
case poll(peerId: PeerId, content: NotificationContent, messageId: MessageId?)
case pollStories(peerId: PeerId, content: NotificationContent, storyId: Int32)
case deleteMessage([MessageId])
case readReactions([MessageId])
case readMessage(MessageId)
case call(CallData)
}
@ -948,6 +1008,20 @@ private final class NotificationServiceHandler {
action = .deleteMessage(messagesDeleted)
}
}
case "READ_REACTION":
if let peerId {
if let messageId = messageId {
action = .readReactions([MessageId(peerId: peerId, namespace: Namespaces.Message.Cloud, id: messageId)])
} else if let messageIds = payloadJson["messages"] as? String {
var messages: [MessageId] = []
for messageId in messageIds.split(separator: ",") {
if let messageIdValue = Int32(messageId) {
messages.append(MessageId(peerId: peerId, namespace: Namespaces.Message.Cloud, id: messageIdValue))
}
}
action = .readReactions(messages)
}
}
case "READ_HISTORY":
if let peerId = peerId {
if let messageIdString = payloadJson["max_id"] as? String {
@ -989,6 +1063,10 @@ private final class NotificationServiceHandler {
messageIdValue = MessageId(peerId: peerId, namespace: Namespaces.Message.Cloud, id: messageId)
}
if let storyId = storyId {
interactionAuthorId = peerId
content.userInfo["story_id"] = "\(storyId)"
}
if peerId.namespace == Namespaces.Peer.CloudUser {
content.userInfo["from_id"] = "\(peerId.id._internalGetInt64Value())"
@ -1060,7 +1138,12 @@ private final class NotificationServiceHandler {
} else {
content.category = category
}
if aps["r"] != nil || aps["react_emoji"] != nil {
content.category = "t"
} else if payloadJson["r"] != nil || payloadJson["react_emoji"] != nil {
content.category = "t"
}
let _ = messageId
@ -1087,7 +1170,11 @@ private final class NotificationServiceHandler {
}
}*/
action = .poll(peerId: peerId, content: content, messageId: messageIdValue)
if let storyId {
action = .pollStories(peerId: peerId, content: content, storyId: storyId)
} else {
action = .poll(peerId: peerId, content: content, messageId: messageIdValue)
}
updateCurrentContent(content)
}
@ -1194,6 +1281,7 @@ private final class NotificationServiceHandler {
let collectedData = Atomic<DataValue>(value: DataValue())
return standaloneMultipartFetch(
accountPeerId: stateManager.accountPeerId,
postbox: stateManager.postbox,
network: stateManager.network,
resource: resource,
@ -1284,6 +1372,7 @@ private final class NotificationServiceHandler {
fetchNotificationSoundSignal = Signal { subscriber in
let collectedData = Atomic<Data>(value: Data())
return standaloneMultipartFetch(
accountPeerId: stateManager.accountPeerId,
postbox: stateManager.postbox,
network: stateManager.network,
resource: resource,
@ -1492,7 +1581,7 @@ private final class NotificationServiceHandler {
return true
})
content.addSenderInfo(mediaBox: stateManager.postbox.mediaBox, accountPeerId: stateManager.accountPeerId, peer: peer, topicTitle: topicTitle, contactIdentifier: foundLocalId)
content.addSenderInfo(mediaBox: stateManager.postbox.mediaBox, accountPeerId: stateManager.accountPeerId, peer: peer, topicTitle: topicTitle, contactIdentifier: foundLocalId, isStory: false)
}
}
@ -1527,6 +1616,253 @@ private final class NotificationServiceHandler {
|> map { _ -> NotificationContent in }
}
var updatedContent = initialContent
strongSelf.pollDisposable.set(pollWithUpdatedContent.start(next: { content in
updatedContent = content
}, completed: {
pollCompletion(updatedContent)
}))
} else {
completed()
}
case let .pollStories(peerId, initialContent, storyId):
Logger.shared.log("NotificationService \(episode)", "Will poll stories for \(peerId)")
if let stateManager = strongSelf.stateManager {
let pollCompletion: (NotificationContent) -> Void = { content in
let content = content
queue.async {
guard let strongSelf = self, let stateManager = strongSelf.stateManager else {
let content = NotificationContent(isLockedMessage: isLockedMessage)
updateCurrentContent(content)
completed()
return
}
var fetchStoriesSignal: Signal<Void, NoError> = .single(Void())
fetchStoriesSignal = _internal_pollPeerStories(postbox: stateManager.postbox, network: stateManager.network, accountPeerId: stateManager.accountPeerId, peerId: peerId)
|> map { _ -> Void in
}
|> then(
stateManager.postbox.transaction { transaction -> (MediaResourceReference, Int64?)? in
guard let state = transaction.getPeerStoryState(peerId: peerId)?.entry.get(Stories.PeerState.self) else {
return nil
}
let firstUnseenItem = transaction.getStoryItems(peerId: peerId).first(where: { entry in
return entry.id > state.maxReadId
})
guard let firstUnseenItem, firstUnseenItem.id == storyId else {
return nil
}
guard let peer = transaction.getPeer(peerId).flatMap(PeerReference.init) else {
return nil
}
if let storyItem = transaction.getStory(id: StoryId(peerId: peerId, id: storyId))?.get(Stories.StoredItem.self), case let .item(item) = storyItem, let media = item.media {
var resource: MediaResource?
var fetchSize: Int64?
if let image = media as? TelegramMediaImage {
resource = largestImageRepresentation(image.representations)?.resource
} else if let file = media as? TelegramMediaFile {
resource = file.resource
for attribute in file.attributes {
if case let .Video(_, _, _, preloadSize) = attribute {
fetchSize = preloadSize.flatMap(Int64.init)
}
}
}
guard let resource else {
return nil
}
return (MediaResourceReference.media(media: .story(peer: peer, id: storyId, media: media), resource: resource), fetchSize)
}
return nil
}
|> mapToSignal { resourceData -> Signal<Void, NoError> in
guard let (resource, _) = resourceData, let resourceValue = resource.resource as? TelegramMultipartFetchableResource else {
return .single(Void())
}
let intervals: Signal<[(Range<Int64>, MediaBoxFetchPriority)], NoError> = .single([(0 ..< Int64.max, MediaBoxFetchPriority.maximum)])
return Signal<Void, NoError> { subscriber in
let collectedData = Atomic<Data>(value: Data())
return standaloneMultipartFetch(
accountPeerId: stateManager.accountPeerId,
postbox: stateManager.postbox,
network: stateManager.network,
resource: resourceValue,
datacenterId: resourceValue.datacenterId,
size: nil,
intervals: intervals,
parameters: MediaResourceFetchParameters(
tag: nil,
info: resourceFetchInfo(reference: resource),
location: .init(peerId: peerId, messageId: nil),
contentType: .other,
isRandomAccessAllowed: true
),
encryptionKey: nil,
decryptedSize: nil,
continueInBackground: false,
useMainConnection: true
).start(next: { result in
switch result {
case let .dataPart(_, data, _, _):
let _ = collectedData.modify { current in
var current = current
current.append(data)
return current
}
default:
break
}
}, error: { _ in
subscriber.putNext(Void())
subscriber.putCompletion()
}, completed: {
stateManager.postbox.mediaBox.storeResourceData(resource.resource.id, data: collectedData.with({ $0 }))
subscriber.putNext(Void())
subscriber.putCompletion()
})
}
}
)
let fetchMediaSignal: Signal<Data?, NoError> = .single(nil)
var fetchNotificationSoundSignal: Signal<Data?, NoError> = .single(nil)
if let (downloadNotificationSound, _, _) = downloadNotificationSound {
var fetchResource: TelegramMultipartFetchableResource?
fetchResource = downloadNotificationSound.resource as? TelegramMultipartFetchableResource
if let resource = fetchResource {
if let path = strongSelf.stateManager?.postbox.mediaBox.completedResourcePath(resource), let data = try? Data(contentsOf: URL(fileURLWithPath: path)) {
fetchNotificationSoundSignal = .single(data)
} else {
let intervals: Signal<[(Range<Int64>, MediaBoxFetchPriority)], NoError> = .single([(0 ..< Int64.max, MediaBoxFetchPriority.maximum)])
fetchNotificationSoundSignal = Signal { subscriber in
let collectedData = Atomic<Data>(value: Data())
return standaloneMultipartFetch(
accountPeerId: stateManager.accountPeerId,
postbox: stateManager.postbox,
network: stateManager.network,
resource: resource,
datacenterId: resource.datacenterId,
size: nil,
intervals: intervals,
parameters: MediaResourceFetchParameters(
tag: nil,
info: resourceFetchInfo(resource: resource),
location: nil,
contentType: .other,
isRandomAccessAllowed: true
),
encryptionKey: nil,
decryptedSize: nil,
continueInBackground: false,
useMainConnection: true
).start(next: { result in
switch result {
case let .dataPart(_, data, _, _):
let _ = collectedData.modify { current in
var current = current
current.append(data)
return current
}
default:
break
}
}, error: { _ in
subscriber.putNext(nil)
subscriber.putCompletion()
}, completed: {
subscriber.putNext(collectedData.with({ $0 }))
subscriber.putCompletion()
})
}
}
}
}
Logger.shared.log("NotificationService \(episode)", "Will fetch media")
let _ = (combineLatest(queue: queue,
fetchMediaSignal
|> timeout(10.0, queue: queue, alternate: .single(nil)),
fetchNotificationSoundSignal
|> timeout(10.0, queue: queue, alternate: .single(nil)),
fetchStoriesSignal
|> timeout(10.0, queue: queue, alternate: .single(Void()))
)
|> deliverOn(queue)).start(next: { mediaData, notificationSoundData, _ in
guard let strongSelf = self, let _ = strongSelf.stateManager else {
completed()
return
}
Logger.shared.log("NotificationService \(episode)", "Did fetch media \(mediaData == nil ? "Non-empty" : "Empty")")
if let notificationSoundData = notificationSoundData {
Logger.shared.log("NotificationService \(episode)", "Did fetch notificationSoundData")
if let (_, filePath, _) = downloadNotificationSound {
let _ = try? notificationSoundData.write(to: URL(fileURLWithPath: filePath))
}
}
Logger.shared.log("NotificationService \(episode)", "Updating content to \(content)")
updateCurrentContent(content)
completed()
})
}
}
let pollSignal: Signal<Never, NoError>
pollSignal = .complete()
stateManager.network.shouldKeepConnection.set(.single(true))
let pollWithUpdatedContent: Signal<NotificationContent, NoError>
if interactionAuthorId != nil || messageId != nil {
pollWithUpdatedContent = stateManager.postbox.transaction { transaction -> NotificationContent in
var content = initialContent
if let interactionAuthorId = interactionAuthorId {
if inAppNotificationSettings.displayNameOnLockscreen, let peer = transaction.getPeer(interactionAuthorId) {
var foundLocalId: String?
transaction.enumerateDeviceContactImportInfoItems({ _, value in
if let value = value as? TelegramDeviceContactImportedData {
switch value {
case let .imported(data, _, peerId):
if peerId == interactionAuthorId {
foundLocalId = data.localIdentifiers.first
return false
}
default:
break
}
}
return true
})
content.addSenderInfo(mediaBox: stateManager.postbox.mediaBox, accountPeerId: stateManager.accountPeerId, peer: peer, topicTitle: topicTitle, contactIdentifier: foundLocalId, isStory: false)
}
}
return content
}
|> then(
pollSignal
|> map { _ -> NotificationContent in }
)
} else {
pollWithUpdatedContent = pollSignal
|> map { _ -> NotificationContent in }
}
var updatedContent = initialContent
strongSelf.pollDisposable.set(pollWithUpdatedContent.start(next: { content in
updatedContent = content
@ -1588,6 +1924,45 @@ private final class NotificationServiceHandler {
}
})
})
case let .readReactions(ids):
Logger.shared.log("NotificationService \(episode)", "Will read reactions \(ids)")
UNUserNotificationCenter.current().getDeliveredNotifications(completionHandler: { notifications in
var removeIdentifiers: [String] = []
for notification in notifications {
if notification.request.content.categoryIdentifier != "t" {
continue
}
if let peerIdString = notification.request.content.userInfo["peerId"] as? String, let peerIdValue = Int64(peerIdString), let messageIdString = notification.request.content.userInfo["msg_id"] as? String, let messageIdValue = Int32(messageIdString) {
for id in ids {
if PeerId(peerIdValue) == id.peerId && messageIdValue == id.id {
removeIdentifiers.append(notification.request.identifier)
}
}
}
}
let completeRemoval: () -> Void = {
guard let strongSelf = self else {
return
}
var content = NotificationContent(isLockedMessage: nil)
Logger.shared.log("NotificationService \(episode)", "Updating content to \(content)")
updateCurrentContent(content)
completed()
}
if !removeIdentifiers.isEmpty {
Logger.shared.log("NotificationService \(episode)", "Will try to remove \(removeIdentifiers.count) notifications")
UNUserNotificationCenter.current().removeDeliveredNotifications(withIdentifiers: removeIdentifiers)
queue.after(1.0, {
completeRemoval()
})
} else {
completeRemoval()
}
})
case let .readMessage(id):
Logger.shared.log("NotificationService \(episode)", "Will read message \(id)")
let _ = (stateManager.postbox.transaction { transaction -> Void in

View File

@ -36,7 +36,11 @@ func unreadMessages(account: Account) -> Signal<[INMessage], NoError> {
|> mapToSignal { view -> Signal<[INMessage], NoError> in
var signals: [Signal<[INMessage], NoError>] = []
for entry in view.0.entries {
if case let .MessageEntry(index, _, readState, isMuted, _, _, _, _, _, _, _, _, _) = entry {
if case let .MessageEntry(entryData) = entry {
let index = entryData.index
let readState = entryData.readState
let isMuted = entryData.isRemovedFromTotalUnreadCount
if index.messageIndex.id.peerId.namespace != Namespaces.Peer.CloudUser {
continue
}

View File

@ -1,5 +1,7 @@
import UIKit
@objc(Application) class Application: UIApplication {
override func sendEvent(_ event: UIEvent) {
super.sendEvent(event)
}
}

View File

@ -263,6 +263,12 @@
"PUSH_CHAT_REQ_JOINED" = "%2$@|%1$@ was accepted into the group";
"PUSH_STORY_NOTEXT" = "%1$@|posted a story";
"PUSH_MESSAGE_STORY" = "%1$@|shared a story with you";
"PUSH_MESSAGE_STORY_MENTION" = "%1$@|mentioned you in a story";
"PUSH_CHANNEL_MESSAGE_STORY" = "%1$@|shared a story";
"PUSH_CHAT_MESSAGE_STORY" = "%2$@|%1$@ shared a story to the group";
"LOCAL_MESSAGE_FWDS" = "%1$@ forwarded you %2$d messages";
"LOCAL_CHANNEL_MESSAGE_FWDS" = "%1$@ posted %2$d forwarded messages";
"LOCAL_CHAT_MESSAGE_FWDS" = "%1$@ forwarded %2$d messages";
@ -963,6 +969,8 @@
"PrivacySettings.LastSeenContactsMinus" = "My Contacts (-%@)";
"PrivacySettings.LastSeenContactsMinusPlus" = "My Contacts (-%@, +%@)";
"PrivacySettings.LastSeenNobodyPlus" = "Nobody (+%@)";
"PrivacySettings.LastSeenCloseFriendsPlus" = "Close Friends (+%@)";
"PrivacySettings.LastSeenCloseFriends" = "Close Friends";
"PrivacySettings.SecurityTitle" = "SECURITY";
@ -2054,6 +2062,7 @@
"StickerPack.Share" = "Share";
"StickerPack.Send" = "Send Sticker";
"StickerPack.AddSticker" = "Add Sticker";
"StickerPack.RemoveStickerCount_1" = "Remove 1 Sticker";
"StickerPack.RemoveStickerCount_2" = "Remove 2 Stickers";
@ -5823,6 +5832,8 @@ Sorry for the inconvenience.";
"VoiceChat.Audio" = "audio";
"VoiceChat.Leave" = "leave";
"LiveStream.Expand" = "expand";
"VoiceChat.SpeakPermissionEveryone" = "New participants can speak";
"VoiceChat.SpeakPermissionAdmin" = "New paricipants are muted";
"VoiceChat.Share" = "Share Invite Link";
@ -5959,7 +5970,9 @@ Sorry for the inconvenience.";
"LiveStream.RecordingInProgress" = "Live stream is being recorded";
"VoiceChat.StopRecordingTitle" = "Stop Recording?";
"VoiceChat.StopRecordingStop" = "Stop";
"VoiceChat.StopRecordingStop" = "Stop Recording";
"LiveStream.StopLiveStream" = "Stop Live Stream";
"VoiceChat.RecordingSaved" = "Audio saved to **Saved Messages**.";
@ -6899,7 +6912,7 @@ Sorry for the inconvenience.";
"SponsoredMessageMenu.Info" = "What are sponsored\nmessages?";
"SponsoredMessageInfoScreen.Title" = "What are sponsored messages?";
"SponsoredMessageInfoScreen.Text" = "Unlike other apps, Telegram never uses your private data to target ads. You are seeing this message only because someone chose this public one-to many channel as a space to promote their messages. This means that no user data is mined or analyzed to display ads, and every user viewing a channel on Telegram sees the same sponsored message.\n\nUnline other apps, Telegram doesn't track whether you tapped on a sponsored message and doesn't profile you based on your activity. We also prevent external links in sponsored messages to ensure that third parties can't spy on our users. We believe that everyone has the right to privacy, and technological platforms should respect that.\n\nTelegram offers free and unlimited service to hundreds of millions of users, which involves significant server and traffic costs. In order to remain independent and stay true to its values, Telegram developed a paid tool to promote messages with user privacy in mind. We welcome responsible adverticers at:\n[url]\nAds should no longer be synonymous with abuse of user privacy. Let us redefine how a tech compony should operate — together.";
"SponsoredMessageInfoScreen.MarkdownText" = "Unlike other apps, Telegram never uses your private data to target ads. [Learn more in the Privacy Policy](https://telegram.org/privacy#5-6-no-ads-based-on-user-data)\nYou are seeing this message only because someone chose this public one-to many channel as a space to promote their messages. This means that no user data is mined or analyzed to display ads, and every user viewing a channel on Telegram sees the same sponsored message.\n\nUnline other apps, Telegram doesn't track whether you tapped on a sponsored message and doesn't profile you based on your activity. We also prevent external links in sponsored messages to ensure that third parties can't spy on our users. We believe that everyone has the right to privacy, and technological platforms should respect that.\n\nTelegram offers free and unlimited service to hundreds of millions of users, which involves significant server and traffic costs. In order to remain independent and stay true to its values, Telegram developed a paid tool to promote messages with user privacy in mind. We welcome responsible adverticers at:\n[url]\nAds should no longer be synonymous with abuse of user privacy. Let us redefine how a tech compony should operate — together.";
"SponsoredMessageInfo.Action" = "Learn More";
"SponsoredMessageInfo.Url" = "https://telegram.org/ads";
@ -7078,6 +7091,7 @@ Sorry for the inconvenience.";
"Time.HoursAgo_many" = "%@ hours ago";
"Time.HoursAgo_0" = "%@ hours ago";
"Time.AtDate" = "%@";
"Time.AtPreciseDate" = "%@ at %@";
"Stickers.ShowMore" = "Show More";
@ -7420,6 +7434,7 @@ Sorry for the inconvenience.";
"LiveStream.NoViewers" = "No viewers";
"LiveStream.ViewerCount_1" = "1 viewer";
"LiveStream.ViewerCount_any" = "%@ viewers";
"LiveStream.Watching" = "watching";
"LiveStream.NoSignalAdminText" = "Oops! Telegram doesn't see any stream\ncoming from your streaming app.\n\nPlease make sure you entered the right Server\nURL and Stream Key in your app.";
"LiveStream.NoSignalUserText" = "%@ is currently not broadcasting live\nstream data to Telegram.";
@ -7536,6 +7551,7 @@ Sorry for the inconvenience.";
"PeerInfo.AutoDeleteSettingOther" = "Other...";
"PeerInfo.AutoDeleteDisable" = "Disable";
"PeerInfo.AutoDeleteInfo" = "Automatically delete messages sent in this chat after a certain period of time.";
"PeerInfo.ChannelAutoDeleteInfo" = "Automatically delete messages sent in this channel after a certain period of time.";
"PeerInfo.ClearMessages" = "Clear Messages";
"PeerInfo.ClearConfirmationUser" = "Are you sure you want to delete all messages with %@?";
@ -9106,11 +9122,6 @@ Sorry for the inconvenience.";
"Wallpaper.ApplyForAll" = "Apply For All Chats";
"Wallpaper.ApplyForChat" = "Apply For This Chat";
"ChatList.ChatFolderUpdateCount_1" = "1 new chat";
"ChatList.ChatFolderUpdateCount_any" = "%d new chats";
"ChatList.ChatFolderUpdateHintTitle" = "You can join %@";
"ChatList.ChatFolderUpdateHintText" = "Tap here to view them";
"Premium.MaxSharedFolderMembershipText" = "You can only add **%1$@** shareable folders. Upgrade to **Telegram Premium** to increase this limit up to **%2$@**.";
"Premium.MaxSharedFolderMembershipNoPremiumText" = "You can only add **%1$@** shareable folders. We are working to let you increase this limit in the future.";
"Premium.MaxSharedFolderMembershipFinalText" = "Sorry, you can only add **%1$@** shareable folders.";
@ -9342,3 +9353,240 @@ Sorry for the inconvenience.";
"ChatList.PremiumRestoreDiscountTitle" = "Get Premium back with up to %@ off";
"ChatList.PremiumRestoreDiscountText" = "Your Telegram Premium has recently expired. Tap here to extend it.";
"Notification.LockScreenReactionPlaceholder" = "Reaction";
"UserInfo.BotNamePlaceholder" = "Bot Name";
"ChatList.PremiumRestoreDiscountTitle" = "Get Premium back with up to %@ off";
"ChatList.PremiumRestoreDiscountText" = "Your Telegram Premium has recently expired. Tap here to extend it.";
"Login.ErrorAppOutdated" = "Please update Telegram to the latest version to log in.";
"Login.GetCodeViaFragment" = "Get a code via Fragment";
"Privacy.Bio" = "Bio";
"Privacy.Bio.WhoCanSeeMyBio" = "WHO CAN SEE MY BIO";
"Privacy.Bio.CustomHelp" = "You can restrict who can see your profile bio with granular precision.";
"Privacy.Bio.AlwaysShareWith.Title" = "Always Share With";
"Privacy.Bio.NeverShareWith.Title" = "Never Share With";
"Conversation.OpenLink" = "OPEN LINK";
"Paint.Flip" = "Flip";
"Message.ForwardedStoryShort" = "Forwarded Story\nFrom: %@";
"Message.ForwardedExpiredStoryShort" = "Expired Story\nFrom: %@";
"Conversation.StoryForwardTooltip.Chat.One" = "Story forwarded to **%@**";
"Conversation.StoryForwardTooltip.TwoChats.One" = "Story forwarded to to **%@** and **%@**";
"Conversation.StoryForwardTooltip.ManyChats.One" = "Story forwarded to to **%@** and %@ others";
"Conversation.StoryForwardTooltip.SavedMessages.One" = "Story forwarded to to **Saved Messages**";
"Conversation.StoryMentionTextOutgoing" = "You mentioned %@\nin a story";
"Conversation.StoryMentionTextIncoming" = "%@ mentioned you\nin a story";
"Conversation.StoryExpiredMentionTextOutgoing" = "The story where you mentioned %@\n is no longer available";
"Conversation.StoryExpiredMentionTextIncoming" = "The story you were mentioned in\nis no longer available";
"ChatList.ArchiveStoryCount_1" = "1 story";
"ChatList.ArchiveStoryCount_any" = "%d stories";
"Notification.Story" = "Story";
"ChatList.StoryFeedTooltip" = "Tap above to view updates\nfrom %@";
"StoryFeed.ContextAddStory" = "Add Story";
"StoryFeed.ContextSavedStories" = "Saved Stories";
"StoryFeed.ContextArchivedStories" = "Archived Stories";
"StoryFeed.ContextOpenChat" = "Send Message";
"StoryFeed.ContextOpenProfile" = "View Profile";
"StoryFeed.ContextNotifyOn" = "Notify About Stories";
"StoryFeed.ContextNotifyOff" = "Do Not Notify About Stories";
"StoryFeed.ContextArchive" = "Hide Stories";
"StoryFeed.ContextUnarchive" = "Unhide Stories";
"StoryFeed.TooltipNotifyOn" = "You will now get a notification whenever **%@** posts a story.";
"StoryFeed.TooltipNotifyOff" = "You will no longer receive a notification when **%@** posts a story.";
"StoryFeed.TooltipArchive" = "Stories from **%@** will now be shown in Archived Chats.";
"StoryFeed.TooltipUnarchive" = "Stories from **%@** will now be shown in Chats.";
"ChatList.Archive.ContextSettings" = "Archive Settings";
"ChatList.Archive.ContextInfo" = "How Does It Work?";
"ChatList.ContextSelectChats" = "Select Chats";
"StoryFeed.TooltipPremiumPosting" = "Posting stories is currently available only\nto subscribers of [Telegram Premium]().";
"StoryFeed.TooltipStoryLimitValue_1" = "1 story";
"StoryFeed.TooltipStoryLimitValue_any" = "%d stories";
"StoryFeed.TooltipStoryLimit" = "You can't post more than **%@** stories in **24 hours**.";
"StoryFeed.MyStory" = "My Story";
"StoryFeed.MyUploading" = "Uploading...";
"MediaPicker.AddImage" = "Add Image";
"Premium.Stories" = "Story Posting";
"Premium.StoriesInfo" = "Be one of the first to share your stories with your contacts or an unlimited audience.";
"Premium.Stories.Proceed" = "Unlock Story Posting";
"AutoDownloadSettings.OnForContacts" = "On for contacts";
"AutoDownloadSettings.StoriesSectionHeader" = "AUTO-DOWNLOAD STORIES";
"AutoDownloadSettings.StoriesArchivedContacts" = "Archived Contacts";
"AutoDownloadSettings.StoriesTitle" = "Stories";
"Notifications.TopChats" = "Top 5";
"Notifications.Stories" = "Stories";
"Settings.MyStories" = "My Stories";
"Settings.StoriesArchive" = "Stories Archive";
"ArchiveSettings.Title" = "Archive Settings";
"ArchiveSettings.UnmutedChatsHeader" = "UNMUTED CHATS";
"ArchiveSettings.UnmutedChatsFooter" = "Keep archived chats in the Archive even if they are unmuted and get a new message.";
"ArchiveSettings.FolderChatsHeader" = "CHATS FROM FOLDERS";
"ArchiveSettings.FolderChatsFooter" = "Keep archived chats from folders in the Archive even if they are unmuted and get a new message.";
"ArchiveSettings.UnknownChatsHeader" = "NEW CHATS FROM UNKNOWN USERS";
"ArchiveSettings.UnknownChatsFooter" = "Automatically archive and mute new private chats, groups and channels from non-contacts.";
"ArchiveSettings.KeepArchived" = "Always Keep Archived";
"ArchiveSettings.TooltipPremiumRequired" = "This setting is available only to the subscribers of [Telegram Premium]().";
"NotificationSettings.Stories.ShowAll" = "Show All Notifications";
"NotificationSettings.Stories.ShowImportant" = "Show Important Notifications";
"NotificationSettings.Stories.ShowImportantFooter" = "Always on for top 5 contacts.";
"NotificationSettings.Stories.DisplayAuthorName" = "Display Author Name";
"NotificationSettings.Stories.AutomaticValue" = "%@ (automatic)";
"NotificationSettings.Stories.CompactShowName" = "Show name";
"NotificationSettings.Stories.CompactHideName" = "Hide name";
"Notifications.StoriesTitle" = "Stories";
"Message.Story" = "Story";
"Notification.Exceptions.StoriesHeader" = "STORY NOTIFICATIONS";
"Notification.Exceptions.StoriesDisplayAuthorName" = "DISPLAY AUTHOR NAME";
"StorageManagement.SectionStories" = "Stories";
"PeerInfo.PaneStories" = "Stories";
"Story.TooltipExpired" = "This story is no longer available";
"Chat.ReplyExpiredStory" = "Expired story";
"Chat.ReplyStory" = "Story";
"Chat.StoryMentionAction" = "View Story";
"StoryList.ContextSaveToGallery" = "Save to Gallery";
"StoryList.ContextShowArchive" = "Show Archive";
"StoryList.TooltipStoriesDeleted_1" = "1 story deleted.";
"StoryList.TooltipStoriesDeleted_any" = "%d stories deleted.";
"Story.TooltipSaving" = "Saving";
"Story.TooltipSaved" = "Saved";
"StoryList.SaveToProfile" = "Save to Profile";
"StoryList.TooltipStoriesSavedToProfile_1" = "Story saved to your profile";
"StoryList.TooltipStoriesSavedToProfile_any" = "%d stories saved to your profile.";
"StoryList.TooltipStoriesSavedToProfileText" = "Saved stories can be viewed by others on your profile until you remove them.";
"StoryList.TitleSaved" = "My Stories";
"StoryList.TitleArchive" = "Stories Archive";
"StoryList.SubtitleSelected_1" = "1 story selected";
"StoryList.SubtitleSelected_any" = "%d stories selected";
"StoryList.SubtitleSaved_1" = "1 saved story";
"StoryList.SubtitleSaved_any" = "%d saved stories";
"StoryList.SubtitleCount_1" = "1 story";
"StoryList.SubtitleCount_any" = "%d stories";
"StoryList.ArchiveDescription" = "Only you can see archived stories unless you choose to save them to your profile.";
"StoryList.SavedEmptyState.Title" = "No saved stories";
"StoryList.SavedEmptyState.Text" = "Open the Archive to select stories you\nwant to be displayed in your profile.";
"StoryList.ArchivedEmptyState.Title" = "No Archived Stories";
"StoryList.ArchivedEmptyState.Text" = "Upload a new story to view it here";
"StoryList.SavedEmptyAction" = "Open Archive";
"ArchiveInfo.Title" = "This is Your Archive";
"ArchiveInfo.TextKeepArchivedUnmuted" = "Archived chats will remain in the Archive when you receive a new message. [Tap to change >]()";
"ArchiveInfo.TextKeepArchivedDefault" = "When you receive a new message, muted chats will remain in the Archive, while unmuted chats will be moved to Chats. [Tap to change >]()";
"ArchiveInfo.ChatsTitle" = "Archived Chats";
"ArchiveInfo.ChatsText" = "Move any chat into your Archive and back by swiping on it.";
"ArchiveInfo.HideTitle" = "Hiding Archive";
"ArchiveInfo.HideText" = "Hide the Archive from your Main screen by swiping on it.";
"ArchiveInfo.StoriesTitle" = "Stories";
"ArchiveInfo.StoriesText" = "Archive Stories from your contacts separately from chats with them.";
"ArchiveInfo.CloseAction" = "Got it";
"Story.HeaderYourStory" = "Your story";
"Story.HeaderEdited" = "edited";
"Story.CaptionShowMore" = "Show more";
"Story.UnsupportedText" = "This story is not supported by\nyour version of Telegram.";
"Story.UnsupportedAction" = "Update Telegram";
"Story.ScreenshotBlockedTitle" = "Screenshot Blocked";
"Story.ScreenshotBlockedText" = "The story you tried to take a\nscreenshot of is protected from\ncopying by its creator.";
"Story.Footer.NoViews" = "No views";
"Story.Footer.Views_1" = "1 view";
"Story.Footer.Views_any" = "%d views";
"Story.Footer.Uploading" = "Uploading...";
"Story.FooterReplyUnavailable" = "You can't reply to this story";
"Story.InputPlaceholderReplyPrivately" = "Reply Privately...";
"Story.ContextDeleteStory" = "Delete Story";
"Story.TooltipPrivacyCloseFriendsMy" = "Only people from your close friends list will see this story.";
"Story.TooltipPrivacyCloseFriends" = "You are seeing this story because you have\nbeen added to %@'s list of close friends.";
"Story.ToastViewInChat" = "View in Chat";
"Story.ToastReactionSent" = "Reaction Sent.";
"Story.PrivacyTooltipContacts" = "This story is shown to all your contacts.";
"Story.PrivacyTooltipCloseFriends" = "This story is shown to your close friends.";
"Story.PrivacyTooltipSelectedContacts" = "This story is shown to selected contacts.";
"Story.PrivacyTooltipNobody" = "This story is shown only to you.";
"Story.PrivacyTooltipEveryone" = "This story is shown to everyone.";
"Story.ContextPrivacy.LabelCloseFriends" = "Close Friends";
"Story.ContextPrivacy.LabelContactsExcept" = "Contacts (-%@)";
"Story.ContextPrivacy.LabelContacts" = "Contacts";
"Story.ContextPrivacy.LabelOnlySelected_1" = "1 Person";
"Story.ContextPrivacy.LabelOnlySelected_any" = "%d People";
"Story.ContextPrivacy.LabelOnlyMe" = "Only Me";
"Story.ContextPrivacy.LabelEveryone" = "Everyone";
"Story.Context.Privacy" = "Who Can See";
"Story.Context.Edit" = "Edit Story";
"Story.Context.SaveToProfile" = "Save to Profile";
"Story.Context.RemoveFromProfile" = "Remove from Profile";
"Story.ToastRemovedFromProfileText" = "Story removed from your profile";
"Story.ToastSavedToProfileTitle" = "Story saved to your profile";
"Story.ToastSavedToProfileText" = "Saved stories can be viewed by others on your profile until you remove them.";
"Story.Context.SaveToGallery" = "Save to Gallery";
"Story.Context.CopyLink" = "Copy Link";
"Story.ToastLinkCopied" = "Link copied.";
"Story.Context.Share" = "Share";
"Story.Context.Report" = "Report";
"Story.Context.EmbeddedStickersValue_1" = "1 pack";
"Story.Context.EmbeddedStickersValue_any" = "%d packs";
"Story.Context.EmbeddedStickers" = "This story contains stickers from [%@]().";
"Story.Context.EmbeddedEmojiPack" = "This story contains\n#[%@]() emoji.";
"Story.Context.EmbeddedStickerPack" = "This story contains\n#[%@]() stickers.";
"Story.TooltipVideoHasNoSound" = "This video has no sound";
"Story.TooltipMessageScheduled" = "Message Scheduled";
"Story.TooltipMessageSent" = "Message Sent";

View File

@ -20,7 +20,6 @@ swift_library(
"//submodules/Postbox:Postbox",
"//submodules/TelegramCore:TelegramCore",
"//submodules/MusicAlbumArtResources:MusicAlbumArtResources",
"//submodules/MeshAnimationCache:MeshAnimationCache",
"//submodules/Utils/RangeSet:RangeSet",
"//submodules/InAppPurchaseManager:InAppPurchaseManager",
"//submodules/TextFormat:TextFormat",

View File

@ -10,10 +10,10 @@ import AsyncDisplayKit
import Display
import DeviceLocationManager
import TemporaryCachedPeerDataManager
import MeshAnimationCache
import InAppPurchaseManager
import AnimationCache
import MultiAnimationRenderer
import Photos
public final class TelegramApplicationOpenUrlCompletion {
public let completion: (Bool) -> Void
@ -299,6 +299,7 @@ public enum ResolvedUrl {
case invoice(slug: String, invoice: TelegramMediaInvoice?)
case premiumOffer(reference: String?)
case chatFolder(slug: String)
case story(peerId: PeerId, id: Int32)
}
public enum NavigateToChatKeepStack {
@ -465,8 +466,9 @@ public final class NavigateToChatControllerParams {
public let changeColors: Bool
public let setupController: (ChatController) -> Void
public let completion: (ChatController) -> Void
public let pushController: ((ChatController, Bool, @escaping () -> Void) -> Void)?
public init(navigationController: NavigationController, chatController: ChatController? = nil, context: AccountContext, chatLocation: Location, chatLocationContextHolder: Atomic<ChatLocationContextHolder?> = Atomic<ChatLocationContextHolder?>(value: nil), subject: ChatControllerSubject? = nil, botStart: ChatControllerInitialBotStart? = nil, attachBotStart: ChatControllerInitialAttachBotStart? = nil, botAppStart: ChatControllerInitialBotAppStart? = nil, updateTextInputState: ChatTextInputState? = nil, activateInput: ChatControllerActivateInput? = nil, keepStack: NavigateToChatKeepStack = .default, useExisting: Bool = true, useBackAnimation: Bool = false, purposefulAction: (() -> Void)? = nil, scrollToEndIfExists: Bool = false, activateMessageSearch: (ChatSearchDomain, String)? = nil, peekData: ChatPeekTimeout? = nil, peerNearbyData: ChatPeerNearbyData? = nil, reportReason: ReportReason? = nil, animated: Bool = true, options: NavigationAnimationOptions = [], parentGroupId: PeerGroupId? = nil, chatListFilter: Int32? = nil, chatNavigationStack: [ChatNavigationStackItem] = [], changeColors: Bool = false, setupController: @escaping (ChatController) -> Void = { _ in }, completion: @escaping (ChatController) -> Void = { _ in }) {
public init(navigationController: NavigationController, chatController: ChatController? = nil, context: AccountContext, chatLocation: Location, chatLocationContextHolder: Atomic<ChatLocationContextHolder?> = Atomic<ChatLocationContextHolder?>(value: nil), subject: ChatControllerSubject? = nil, botStart: ChatControllerInitialBotStart? = nil, attachBotStart: ChatControllerInitialAttachBotStart? = nil, botAppStart: ChatControllerInitialBotAppStart? = nil, updateTextInputState: ChatTextInputState? = nil, activateInput: ChatControllerActivateInput? = nil, keepStack: NavigateToChatKeepStack = .default, useExisting: Bool = true, useBackAnimation: Bool = false, purposefulAction: (() -> Void)? = nil, scrollToEndIfExists: Bool = false, activateMessageSearch: (ChatSearchDomain, String)? = nil, peekData: ChatPeekTimeout? = nil, peerNearbyData: ChatPeerNearbyData? = nil, reportReason: ReportReason? = nil, animated: Bool = true, options: NavigationAnimationOptions = [], parentGroupId: PeerGroupId? = nil, chatListFilter: Int32? = nil, chatNavigationStack: [ChatNavigationStackItem] = [], changeColors: Bool = false, setupController: @escaping (ChatController) -> Void = { _ in }, pushController: ((ChatController, Bool, @escaping () -> Void) -> Void)? = nil, completion: @escaping (ChatController) -> Void = { _ in }) {
self.navigationController = navigationController
self.chatController = chatController
self.chatLocationContextHolder = chatLocationContextHolder
@ -494,6 +496,7 @@ public final class NavigateToChatControllerParams {
self.chatNavigationStack = chatNavigationStack
self.changeColors = changeColors
self.setupController = setupController
self.pushController = pushController
self.completion = completion
}
}
@ -739,6 +742,65 @@ public protocol AppLockContext: AnyObject {
public protocol RecentSessionsController: AnyObject {
}
public protocol AttachmentFileController: AnyObject {
}
public struct StoryCameraTransitionIn {
public weak var sourceView: UIView?
public let sourceRect: CGRect
public let sourceCornerRadius: CGFloat
public init(
sourceView: UIView,
sourceRect: CGRect,
sourceCornerRadius: CGFloat
) {
self.sourceView = sourceView
self.sourceRect = sourceRect
self.sourceCornerRadius = sourceCornerRadius
}
}
public struct StoryCameraTransitionOut {
public weak var destinationView: UIView?
public let destinationRect: CGRect
public let destinationCornerRadius: CGFloat
public init(
destinationView: UIView,
destinationRect: CGRect,
destinationCornerRadius: CGFloat
) {
self.destinationView = destinationView
self.destinationRect = destinationRect
self.destinationCornerRadius = destinationCornerRadius
}
}
public struct StoryCameraTransitionInCoordinator {
public let animateIn: () -> Void
public let updateTransitionProgress: (CGFloat) -> Void
public let completeWithTransitionProgressAndVelocity: (CGFloat, CGFloat) -> Void
public init(
animateIn: @escaping () -> Void,
updateTransitionProgress: @escaping (CGFloat) -> Void,
completeWithTransitionProgressAndVelocity: @escaping (CGFloat, CGFloat) -> Void
) {
self.animateIn = animateIn
self.updateTransitionProgress = updateTransitionProgress
self.completeWithTransitionProgressAndVelocity = completeWithTransitionProgressAndVelocity
}
}
public protocol TelegramRootControllerInterface: NavigationController {
@discardableResult
func openStoryCamera(transitionIn: StoryCameraTransitionIn?, transitionedIn: @escaping () -> Void, transitionOut: @escaping (Bool) -> StoryCameraTransitionOut?) -> StoryCameraTransitionInCoordinator?
func getContactsController() -> ViewController?
func getChatsController() -> ViewController?
}
public protocol SharedAccountContext: AnyObject {
var sharedContainerPath: String { get }
var basePath: String { get }
@ -806,6 +868,11 @@ public protocol SharedAccountContext: AnyObject {
func makePrivacyAndSecurityController(context: AccountContext) -> ViewController
func makeSetupTwoFactorAuthController(context: AccountContext) -> ViewController
func makeStorageManagementController(context: AccountContext) -> ViewController
func makeAttachmentFileController(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?, bannedSendMedia: (Int32, Bool)?, presentGallery: @escaping () -> Void, presentFiles: @escaping () -> Void, send: @escaping (AnyMediaReference) -> Void) -> AttachmentFileController
func makeGalleryCaptionPanelView(context: AccountContext, chatLocation: ChatLocation, customEmojiAvailable: Bool, present: @escaping (ViewController) -> Void, presentInGlobalOverlay: @escaping (ViewController) -> Void) -> NSObject?
func makeHashtagSearchController(context: AccountContext, peer: EnginePeer?, query: String, all: Bool) -> ViewController
func makeMyStoriesController(context: AccountContext, isArchive: Bool) -> ViewController
func makeArchiveSettingsController(context: AccountContext) -> ViewController
func navigateToChatController(_ params: NavigateToChatControllerParams)
func navigateToForumChannel(context: AccountContext, peerId: EnginePeer.Id, navigationController: NavigationController)
func navigateToForumThread(context: AccountContext, peerId: EnginePeer.Id, threadId: Int64, messageId: EngineMessage.Id?, navigationController: NavigationController, activateInput: ChatControllerActivateInput?, keepStack: NavigateToChatKeepStack) -> Signal<Never, NoError>
@ -832,10 +899,14 @@ public protocol SharedAccountContext: AnyObject {
func makePremiumLimitController(context: AccountContext, subject: PremiumLimitSubject, count: Int32, action: @escaping () -> Void) -> ViewController
func makeStickerPackScreen(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?, mainStickerPack: StickerPackReference, stickerPacks: [StickerPackReference], loadedStickerPacks: [LoadedStickerPack], parentNavigationController: NavigationController?, sendSticker: ((FileMediaReference, UIView, CGRect) -> Bool)?) -> ViewController
func makeMediaPickerScreen(context: AccountContext, completion: @escaping (Any) -> Void) -> ViewController
func makeStoryMediaPickerScreen(context: AccountContext, getSourceRect: @escaping () -> CGRect, completion: @escaping (Any, UIView, CGRect, UIImage?, @escaping (Bool?) -> (UIView, CGRect)?, @escaping () -> Void) -> Void, dismissed: @escaping () -> Void) -> ViewController
func makeProxySettingsController(sharedContext: SharedAccountContext, account: UnauthorizedAccount) -> ViewController
func makeInstalledStickerPacksController(context: AccountContext, mode: InstalledStickerPacksControllerMode) -> ViewController
func makeInstalledStickerPacksController(context: AccountContext, mode: InstalledStickerPacksControllerMode, forceTheme: PresentationTheme?) -> ViewController
func makeDebugSettingsController(context: AccountContext?) -> ViewController?
@ -843,6 +914,9 @@ public protocol SharedAccountContext: AnyObject {
var hasOngoingCall: ValuePromise<Bool> { get }
var immediateHasOngoingCall: Bool { get }
var enablePreloads: Promise<Bool> { get }
var hasPreloadBlockingContent: Promise<Bool> { get }
var hasGroupCallOnScreen: Signal<Bool, NoError> { get }
var currentGroupCallController: ViewController? { get }
@ -872,6 +946,7 @@ public enum PremiumIntroSource {
case voiceToText
case fasterDownload
case translation
case stories
}
public enum PremiumDemoSubject {
@ -889,6 +964,7 @@ public enum PremiumDemoSubject {
case animatedEmoji
case emojiStatus
case translation
case stories
}
public enum PremiumLimitSubject {
@ -935,7 +1011,6 @@ public protocol AccountContext: AnyObject {
var currentCountriesConfiguration: Atomic<CountriesConfiguration> { get }
var cachedGroupCallContexts: AccountGroupCallContextCache { get }
var meshAnimationCache: MeshAnimationCache { get }
var animationCache: AnimationCache { get }
var animationRenderer: MultiAnimationRenderer { get }
@ -1006,3 +1081,58 @@ public struct AntiSpamBotConfiguration {
}
}
}
public struct StoriesConfiguration {
public enum PostingAvailability {
case enabled
case premium
case disabled
}
static var defaultValue: StoriesConfiguration {
return StoriesConfiguration(posting: .disabled)
}
public let posting: PostingAvailability
fileprivate init(posting: PostingAvailability) {
self.posting = posting
}
public static func with(appConfiguration: AppConfiguration) -> StoriesConfiguration {
if let data = appConfiguration.data, let postingString = data["stories_posting"] as? String {
var posting: PostingAvailability
switch postingString {
case "enabled":
posting = .enabled
case "premium":
posting = .premium
default:
posting = .disabled
}
return StoriesConfiguration(posting: posting)
} else {
return .defaultValue
}
}
}
public struct StickersSearchConfiguration {
static var defaultValue: StickersSearchConfiguration {
return StickersSearchConfiguration(disableLocalSuggestions: false)
}
public let disableLocalSuggestions: Bool
fileprivate init(disableLocalSuggestions: Bool) {
self.disableLocalSuggestions = disableLocalSuggestions
}
public static func with(appConfiguration: AppConfiguration) -> StickersSearchConfiguration {
if let data = appConfiguration.data, let suggestOnlyApi = data["stickers_emoji_suggest_only_api"] as? Bool {
return StickersSearchConfiguration(disableLocalSuggestions: suggestOnlyApi)
} else {
return .defaultValue
}
}
}

View File

@ -1,13 +1,13 @@
import Foundation
import UIKit
import TelegramCore
import Postbox
import TextFormat
import AsyncDisplayKit
import Display
import SwiftSignalKit
import TelegramPresentationData
import TelegramUIPreferences
import Postbox
public final class ChatMessageItemAssociatedData: Equatable {
public enum ChannelDiscussionGroupStatus: Equatable {
@ -49,8 +49,9 @@ public final class ChatMessageItemAssociatedData: Equatable {
public let topicAuthorId: EnginePeer.Id?
public let hasBots: Bool
public let translateToLanguage: String?
public let maxReadStoryId: Int32?
public init(automaticDownloadPeerType: MediaAutoDownloadPeerType, automaticDownloadPeerId: EnginePeer.Id?, automaticDownloadNetworkType: MediaAutoDownloadNetworkType, isRecentActions: Bool = false, subject: ChatControllerSubject? = nil, contactsPeerIds: Set<EnginePeer.Id> = Set(), channelDiscussionGroup: ChannelDiscussionGroupStatus = .unknown, animatedEmojiStickers: [String: [StickerPackItem]] = [:], additionalAnimatedEmojiStickers: [String: [Int: StickerPackItem]] = [:], forcedResourceStatus: FileMediaResourceStatus? = nil, currentlyPlayingMessageId: EngineMessage.Index? = nil, isCopyProtectionEnabled: Bool = false, availableReactions: AvailableReactions?, defaultReaction: MessageReaction.Reaction?, isPremium: Bool, accountPeer: EnginePeer?, forceInlineReactions: Bool = false, alwaysDisplayTranscribeButton: DisplayTranscribeButton = DisplayTranscribeButton(canBeDisplayed: false, displayForNotConsumed: false), topicAuthorId: EnginePeer.Id? = nil, hasBots: Bool = false, translateToLanguage: String? = nil) {
public init(automaticDownloadPeerType: MediaAutoDownloadPeerType, automaticDownloadPeerId: EnginePeer.Id?, automaticDownloadNetworkType: MediaAutoDownloadNetworkType, isRecentActions: Bool = false, subject: ChatControllerSubject? = nil, contactsPeerIds: Set<EnginePeer.Id> = Set(), channelDiscussionGroup: ChannelDiscussionGroupStatus = .unknown, animatedEmojiStickers: [String: [StickerPackItem]] = [:], additionalAnimatedEmojiStickers: [String: [Int: StickerPackItem]] = [:], forcedResourceStatus: FileMediaResourceStatus? = nil, currentlyPlayingMessageId: EngineMessage.Index? = nil, isCopyProtectionEnabled: Bool = false, availableReactions: AvailableReactions?, defaultReaction: MessageReaction.Reaction?, isPremium: Bool, accountPeer: EnginePeer?, forceInlineReactions: Bool = false, alwaysDisplayTranscribeButton: DisplayTranscribeButton = DisplayTranscribeButton(canBeDisplayed: false, displayForNotConsumed: false), topicAuthorId: EnginePeer.Id? = nil, hasBots: Bool = false, translateToLanguage: String? = nil, maxReadStoryId: Int32? = nil) {
self.automaticDownloadPeerType = automaticDownloadPeerType
self.automaticDownloadPeerId = automaticDownloadPeerId
self.automaticDownloadNetworkType = automaticDownloadNetworkType
@ -72,6 +73,7 @@ public final class ChatMessageItemAssociatedData: Equatable {
self.alwaysDisplayTranscribeButton = alwaysDisplayTranscribeButton
self.hasBots = hasBots
self.translateToLanguage = translateToLanguage
self.maxReadStoryId = maxReadStoryId
}
public static func == (lhs: ChatMessageItemAssociatedData, rhs: ChatMessageItemAssociatedData) -> Bool {
@ -135,6 +137,9 @@ public final class ChatMessageItemAssociatedData: Equatable {
if lhs.translateToLanguage != rhs.translateToLanguage {
return false
}
if lhs.maxReadStoryId != rhs.maxReadStoryId {
return false
}
return true
}
}
@ -197,11 +202,11 @@ public struct ChatControllerInitialBotStart {
}
public struct ChatControllerInitialAttachBotStart {
public let botId: PeerId
public let botId: EnginePeer.Id
public let payload: String?
public let justInstalled: Bool
public init(botId: PeerId, payload: String?, justInstalled: Bool) {
public init(botId: EnginePeer.Id, payload: String?, justInstalled: Bool) {
self.botId = botId
self.payload = payload
self.justInstalled = justInstalled
@ -313,6 +318,9 @@ public enum ChatTextInputStateTextAttributeType: Codable, Equatable {
case textMention(EnginePeer.Id)
case textUrl(String)
case customEmoji(stickerPack: StickerPackReference?, fileId: Int64)
case strikethrough
case underline
case spoiler
public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: StringCodingKey.self)
@ -334,6 +342,12 @@ public enum ChatTextInputStateTextAttributeType: Codable, Equatable {
let stickerPack = try container.decodeIfPresent(StickerPackReference.self, forKey: "s")
let fileId = try container.decode(Int64.self, forKey: "f")
self = .customEmoji(stickerPack: stickerPack, fileId: fileId)
case 6:
self = .strikethrough
case 7:
self = .underline
case 8:
self = .spoiler
default:
assertionFailure()
self = .bold
@ -359,6 +373,12 @@ public enum ChatTextInputStateTextAttributeType: Codable, Equatable {
try container.encode(5 as Int32, forKey: "t")
try container.encodeIfPresent(stickerPack, forKey: "s")
try container.encode(fileId, forKey: "f")
case .strikethrough:
try container.encode(6 as Int32, forKey: "t")
case .underline:
try container.encode(7 as Int32, forKey: "t")
case .spoiler:
try container.encode(8 as Int32, forKey: "t")
}
}
}
@ -426,6 +446,12 @@ public struct ChatTextInputStateText: Codable, Equatable {
parsedAttributes.append(ChatTextInputStateTextAttribute(type: .textUrl(value.url), range: range.location ..< (range.location + range.length)))
} else if key == ChatTextInputAttributes.customEmoji, let value = value as? ChatTextInputTextCustomEmojiAttribute {
parsedAttributes.append(ChatTextInputStateTextAttribute(type: .customEmoji(stickerPack: nil, fileId: value.fileId), range: range.location ..< (range.location + range.length)))
} else if key == ChatTextInputAttributes.strikethrough {
parsedAttributes.append(ChatTextInputStateTextAttribute(type: .strikethrough, range: range.location ..< (range.location + range.length)))
} else if key == ChatTextInputAttributes.underline {
parsedAttributes.append(ChatTextInputStateTextAttribute(type: .underline, range: range.location ..< (range.location + range.length)))
} else if key == ChatTextInputAttributes.spoiler {
parsedAttributes.append(ChatTextInputStateTextAttribute(type: .spoiler, range: range.location ..< (range.location + range.length)))
}
}
})
@ -464,6 +490,12 @@ public struct ChatTextInputStateText: Codable, Equatable {
result.addAttribute(ChatTextInputAttributes.textUrl, value: ChatTextInputTextUrlAttribute(url: url), range: NSRange(location: attribute.range.lowerBound, length: attribute.range.count))
case let .customEmoji(_, fileId):
result.addAttribute(ChatTextInputAttributes.customEmoji, value: ChatTextInputTextCustomEmojiAttribute(interactivelySelectedFromPackId: nil, fileId: fileId, file: nil), range: NSRange(location: attribute.range.lowerBound, length: attribute.range.count))
case .strikethrough:
result.addAttribute(ChatTextInputAttributes.strikethrough, value: true as NSNumber, range: NSRange(location: attribute.range.lowerBound, length: attribute.range.count))
case .underline:
result.addAttribute(ChatTextInputAttributes.underline, value: true as NSNumber, range: NSRange(location: attribute.range.lowerBound, length: attribute.range.count))
case .spoiler:
result.addAttribute(ChatTextInputAttributes.spoiler, value: true as NSNumber, range: NSRange(location: attribute.range.lowerBound, length: attribute.range.count))
}
}
return result
@ -472,7 +504,7 @@ public struct ChatTextInputStateText: Codable, Equatable {
public enum ChatControllerSubject: Equatable {
public enum MessageSubject: Equatable {
case id(MessageId)
case id(EngineMessage.Id)
case timestamp(Int32)
}
@ -617,7 +649,7 @@ public final class PeerInfoNavigationSourceTag {
}
public protocol PeerInfoScreen: ViewController {
var peerId: PeerId { get }
}
public protocol ChatController: ViewController {
@ -652,20 +684,21 @@ public enum FileMediaResourcePlaybackStatus: Equatable {
public struct FileMediaResourceStatus: Equatable {
public var mediaStatus: FileMediaResourceMediaStatus
public var fetchStatus: MediaResourceStatus
public var fetchStatus: EngineMediaResource.FetchStatus
public init(mediaStatus: FileMediaResourceMediaStatus, fetchStatus: MediaResourceStatus) {
public init(mediaStatus: FileMediaResourceMediaStatus, fetchStatus: EngineMediaResource.FetchStatus) {
self.mediaStatus = mediaStatus
self.fetchStatus = fetchStatus
}
}
public enum FileMediaResourceMediaStatus: Equatable {
case fetchStatus(MediaResourceStatus)
case fetchStatus(EngineMediaResource.FetchStatus)
case playbackStatus(FileMediaResourcePlaybackStatus)
}
public protocol ChatMessageItemNodeProtocol: ListViewItemNode {
func targetReactionView(value: MessageReaction.Reaction) -> UIView?
func targetForStoryTransition(id: StoryId) -> UIView?
func contentFrame() -> CGRect
}

View File

@ -1,12 +1,11 @@
import Foundation
import UIKit
import Postbox
import Display
import TelegramCore
public enum ChatListControllerLocation: Equatable {
case chatList(groupId: EngineChatList.Group)
case forum(peerId: PeerId)
case forum(peerId: EnginePeer.Id)
}
public protocol ChatListController: ViewController {
@ -22,4 +21,6 @@ public protocol ChatListController: ViewController {
func playSignUpCompletedAnimation()
func navigateToFolder(folderId: Int32, completion: @escaping () -> Void)
func openStories(peerId: EnginePeer.Id)
}

View File

@ -2,13 +2,12 @@ import Foundation
import UIKit
import Display
import SwiftSignalKit
import Postbox
import TelegramCore
import TelegramPresentationData
public struct ChatListNodeAdditionalCategory {
public enum Appearance {
case option
public enum Appearance: Equatable {
case option(sectionTitle: String?)
case action
}
@ -18,7 +17,7 @@ public struct ChatListNodeAdditionalCategory {
public var title: String
public var appearance: Appearance
public init(id: Int, icon: UIImage?, smallIcon: UIImage?, title: String, appearance: Appearance = .option) {
public init(id: Int, icon: UIImage?, smallIcon: UIImage?, title: String, appearance: Appearance = .option(sectionTitle: nil)) {
self.id = id
self.icon = icon
self.smallIcon = smallIcon
@ -41,18 +40,20 @@ public enum ContactMultiselectionControllerMode {
public struct ChatSelection {
public var title: String
public var searchPlaceholder: String
public var selectedChats: Set<PeerId>
public var selectedChats: Set<EnginePeer.Id>
public var additionalCategories: ContactMultiselectionControllerAdditionalCategories?
public var chatListFilters: [ChatListFilter]?
public var displayAutoremoveTimeout: Bool
public var displayPresence: Bool
public init(
title: String,
searchPlaceholder: String,
selectedChats: Set<PeerId>,
selectedChats: Set<EnginePeer.Id>,
additionalCategories: ContactMultiselectionControllerAdditionalCategories?,
chatListFilters: [ChatListFilter]?,
displayAutoremoveTimeout: Bool = false
displayAutoremoveTimeout: Bool = false,
displayPresence: Bool = false
) {
self.title = title
self.searchPlaceholder = searchPlaceholder
@ -60,6 +61,7 @@ public enum ContactMultiselectionControllerMode {
self.additionalCategories = additionalCategories
self.chatListFilters = chatListFilters
self.displayAutoremoveTimeout = displayAutoremoveTimeout
self.displayPresence = displayPresence
}
}
@ -71,8 +73,8 @@ public enum ContactMultiselectionControllerMode {
public enum ContactListFilter {
case excludeSelf
case exclude([PeerId])
case disable([PeerId])
case exclude([EnginePeer.Id])
case disable([EnginePeer.Id])
}
public final class ContactMultiselectionControllerParams {

View File

@ -6,6 +6,7 @@ public protocol ContactSelectionController: ViewController {
var result: Signal<([ContactListPeer], ContactListAction, Bool, Int32?, NSAttributedString?)?, NoError> { get }
var displayProgress: Bool { get set }
var dismissed: (() -> Void)? { get set }
var presentScheduleTimePicker: (@escaping (Int32) -> Void) -> Void { get set }
func dismissSearch()
}

View File

@ -1,6 +1,5 @@
import Foundation
import Contacts
import Postbox
import TelegramCore
public final class DeviceContactPhoneNumberData: Equatable {
@ -190,18 +189,18 @@ public let phonebookUsernamePathPrefix = "@id"
private let phonebookUsernamePrefix = "https://t.me/" + phonebookUsernamePathPrefix
public extension DeviceContactUrlData {
convenience init(appProfile: PeerId) {
convenience init(appProfile: EnginePeer.Id) {
self.init(label: "Telegram", value: "\(phonebookUsernamePrefix)\(appProfile.id._internalGetInt64Value())")
}
}
public func parseAppSpecificContactReference(_ value: String) -> PeerId? {
public func parseAppSpecificContactReference(_ value: String) -> EnginePeer.Id? {
if !value.hasPrefix(phonebookUsernamePrefix) {
return nil
}
let idString = String(value[value.index(value.startIndex, offsetBy: phonebookUsernamePrefix.count)...])
if let id = Int64(idString) {
return PeerId(namespace: Namespaces.Peer.CloudUser, id: PeerId.Id._internalFromInt64Value(id))
return EnginePeer.Id(namespace: Namespaces.Peer.CloudUser, id: EnginePeer.Id.Id._internalFromInt64Value(id))
}
return nil
}
@ -466,8 +465,8 @@ public extension DeviceContactExtendedData {
}
public extension DeviceContactExtendedData {
convenience init?(peer: Peer) {
guard let user = peer as? TelegramUser else {
convenience init?(peer: EnginePeer) {
guard case let .user(user) = peer else {
return nil
}
var phoneNumbers: [DeviceContactPhoneNumberData] = []

View File

@ -1,5 +1,4 @@
import Foundation
import Postbox
import TelegramCore
import TelegramUIPreferences
import SwiftSignalKit
@ -12,10 +11,10 @@ public protocol DeviceContactDataManager: AnyObject {
func basicDataForNormalizedPhoneNumber(_ normalizedNumber: DeviceContactNormalizedPhoneNumber) -> Signal<[(DeviceContactStableId, DeviceContactBasicData)], NoError>
func extendedData(stableId: DeviceContactStableId) -> Signal<DeviceContactExtendedData?, NoError>
func importable() -> Signal<[DeviceContactNormalizedPhoneNumber: ImportableDeviceContactData], NoError>
func appSpecificReferences() -> Signal<[PeerId: DeviceContactBasicDataWithReference], NoError>
func search(query: String) -> Signal<[DeviceContactStableId: (DeviceContactBasicData, PeerId?)], NoError>
func appSpecificReferences() -> Signal<[EnginePeer.Id: DeviceContactBasicDataWithReference], NoError>
func search(query: String) -> Signal<[DeviceContactStableId: (DeviceContactBasicData, EnginePeer.Id?)], NoError>
func appendContactData(_ contactData: DeviceContactExtendedData, to stableId: DeviceContactStableId) -> Signal<DeviceContactExtendedData?, NoError>
func appendPhoneNumber(_ phoneNumber: DeviceContactPhoneNumberData, to stableId: DeviceContactStableId) -> Signal<DeviceContactExtendedData?, NoError>
func createContactWithData(_ contactData: DeviceContactExtendedData) -> Signal<(DeviceContactStableId, DeviceContactExtendedData)?, NoError>
func deleteContactWithAppSpecificReference(peerId: PeerId) -> Signal<Never, NoError>
func deleteContactWithAppSpecificReference(peerId: EnginePeer.Id) -> Signal<Never, NoError>
}

View File

@ -1,6 +1,5 @@
import Foundation
import TelegramCore
import Postbox
import TelegramUIPreferences
import SwiftSignalKit

View File

@ -86,8 +86,15 @@ public struct FetchManagerPriorityKey: Comparable {
}
}
public enum FetchManagerLocation: Hashable {
public enum FetchManagerLocation: Hashable, CustomStringConvertible {
case chat(PeerId)
public var description: String {
switch self {
case let .chat(peerId):
return "chat:\(peerId)"
}
}
}
public enum FetchManagerForegroundDirection {

View File

@ -18,7 +18,7 @@ public func isMediaStreamable(message: Message, media: TelegramMediaFile) -> Boo
return false
}
for attribute in media.attributes {
if case let .Video(_, _, flags) = attribute {
if case let .Video(_, _, flags, _) = attribute {
if flags.contains(.supportsStreaming) {
return true
}
@ -41,7 +41,7 @@ public func isMediaStreamable(media: TelegramMediaFile) -> Bool {
return false
}
for attribute in media.attributes {
if case let .Video(_, _, flags) = attribute {
if case let .Video(_, _, flags, _) = attribute {
if flags.contains(.supportsStreaming) {
return true
}

View File

@ -95,8 +95,8 @@ public enum PeerMessagesPlaylistLocation: Equatable, SharedMediaPlaylistLocation
}
}
public func peerMessageMediaPlayerType(_ message: Message) -> MediaManagerPlayerType? {
func extractFileMedia(_ message: Message) -> TelegramMediaFile? {
public func peerMessageMediaPlayerType(_ message: EngineMessage) -> MediaManagerPlayerType? {
func extractFileMedia(_ message: EngineMessage) -> TelegramMediaFile? {
var file: TelegramMediaFile?
for media in message.media {
if let media = media as? TelegramMediaFile {
@ -120,7 +120,7 @@ public func peerMessageMediaPlayerType(_ message: Message) -> MediaManagerPlayer
return nil
}
public func peerMessagesMediaPlaylistAndItemId(_ message: Message, isRecentActions: Bool, isGlobalSearch: Bool, isDownloadList: Bool) -> (SharedMediaPlaylistId, SharedMediaPlaylistItemId)? {
public func peerMessagesMediaPlaylistAndItemId(_ message: EngineMessage, isRecentActions: Bool, isGlobalSearch: Bool, isDownloadList: Bool) -> (SharedMediaPlaylistId, SharedMediaPlaylistItemId)? {
if isGlobalSearch && !isDownloadList {
return (PeerMessagesMediaPlaylistId.custom, PeerMessagesMediaPlaylistItemId(messageId: message.id, messageIndex: message.index))
} else if isRecentActions && !isDownloadList {

View File

@ -31,7 +31,7 @@ public final class OpenChatMessageParams {
public let modal: Bool
public let dismissInput: () -> Void
public let present: (ViewController, Any?) -> Void
public let transitionNode: (MessageId, Media) -> (ASDisplayNode, CGRect, () -> (UIView?, UIView?))?
public let transitionNode: (MessageId, Media, Bool) -> (ASDisplayNode, CGRect, () -> (UIView?, UIView?))?
public let addToTransitionSurface: (UIView) -> Void
public let openUrl: (String) -> Void
public let openPeer: (Peer, ChatControllerInteractionNavigateToPeer) -> Void
@ -60,7 +60,7 @@ public final class OpenChatMessageParams {
modal: Bool = false,
dismissInput: @escaping () -> Void,
present: @escaping (ViewController, Any?) -> Void,
transitionNode: @escaping (MessageId, Media) -> (ASDisplayNode, CGRect, () -> (UIView?, UIView?))?,
transitionNode: @escaping (MessageId, Media, Bool) -> (ASDisplayNode, CGRect, () -> (UIView?, UIView?))?,
addToTransitionSurface: @escaping (UIView) -> Void,
openUrl: @escaping (String) -> Void,
openPeer: @escaping (Peer, ChatControllerInteractionNavigateToPeer) -> Void,

View File

@ -1,7 +1,6 @@
import Foundation
import Display
import SwiftSignalKit
import Postbox
import TelegramCore
import TelegramPresentationData
@ -48,7 +47,7 @@ public final class PeerSelectionControllerParams {
public let hasContactSelector: Bool
public let hasGlobalSearch: Bool
public let title: String?
public let attemptSelection: ((Peer, Int64?) -> Void)?
public let attemptSelection: ((EnginePeer, Int64?) -> Void)?
public let createNewGroup: (() -> Void)?
public let pretendPresentedInModal: Bool
public let multipleSelection: Bool
@ -57,7 +56,7 @@ public final class PeerSelectionControllerParams {
public let selectForumThreads: Bool
public let hasCreation: Bool
public init(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)? = nil, filter: ChatListNodePeersFilter = [.onlyWriteable], requestPeerType: [ReplyMarkupButtonRequestPeerType]? = nil, forumPeerId: EnginePeer.Id? = nil, hasFilters: Bool = false, hasChatListSelector: Bool = true, hasContactSelector: Bool = true, hasGlobalSearch: Bool = true, title: String? = nil, attemptSelection: ((Peer, Int64?) -> Void)? = nil, createNewGroup: (() -> Void)? = nil, pretendPresentedInModal: Bool = false, multipleSelection: Bool = false, forwardedMessageIds: [EngineMessage.Id] = [], hasTypeHeaders: Bool = false, selectForumThreads: Bool = false, hasCreation: Bool = false) {
public init(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)? = nil, filter: ChatListNodePeersFilter = [.onlyWriteable], requestPeerType: [ReplyMarkupButtonRequestPeerType]? = nil, forumPeerId: EnginePeer.Id? = nil, hasFilters: Bool = false, hasChatListSelector: Bool = true, hasContactSelector: Bool = true, hasGlobalSearch: Bool = true, title: String? = nil, attemptSelection: ((EnginePeer, Int64?) -> Void)? = nil, createNewGroup: (() -> Void)? = nil, pretendPresentedInModal: Bool = false, multipleSelection: Bool = false, forwardedMessageIds: [EngineMessage.Id] = [], hasTypeHeaders: Bool = false, selectForumThreads: Bool = false, hasCreation: Bool = false) {
self.context = context
self.updatedPresentationData = updatedPresentationData
self.filter = filter
@ -87,8 +86,8 @@ public enum AttachmentTextInputPanelSendMode {
}
public protocol PeerSelectionController: ViewController {
var peerSelected: ((Peer, Int64?) -> Void)? { get set }
var multiplePeersSelected: (([Peer], [PeerId: Peer], NSAttributedString, AttachmentTextInputPanelSendMode, ChatInterfaceForwardOptionsState?) -> Void)? { get set }
var peerSelected: ((EnginePeer, Int64?) -> Void)? { get set }
var multiplePeersSelected: (([EnginePeer], [EnginePeer.Id: EnginePeer], NSAttributedString, AttachmentTextInputPanelSendMode, ChatInterfaceForwardOptionsState?) -> Void)? { get set }
var inProgress: Bool { get set }
var customDismiss: (() -> Void)? { get set }
}

View File

@ -1,24 +1,23 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Postbox
import TelegramCore
import SwiftSignalKit
import TelegramAudio
public enum RequestCallResult {
case requested
case alreadyInProgress(PeerId?)
case alreadyInProgress(EnginePeer.Id?)
}
public enum JoinGroupCallManagerResult {
case joined
case alreadyInProgress(PeerId?)
case alreadyInProgress(EnginePeer.Id?)
}
public enum RequestScheduleGroupCallResult {
case success
case alreadyInProgress(PeerId?)
case alreadyInProgress(EnginePeer.Id?)
}
public struct CallAuxiliaryServer {
@ -135,11 +134,11 @@ public protocol PresentationCall: AnyObject {
var context: AccountContext { get }
var isIntegratedWithCallKit: Bool { get }
var internalId: CallSessionInternalId { get }
var peerId: PeerId { get }
var peerId: EnginePeer.Id { get }
var isOutgoing: Bool { get }
var isVideo: Bool { get }
var isVideoPossible: Bool { get }
var peer: Peer? { get }
var peer: EnginePeer? { get }
var state: Signal<PresentationCallState, NoError> { get }
var audioLevel: Signal<Float, NoError> { get }
@ -199,10 +198,10 @@ public struct PresentationGroupCallState: Equatable {
case muted
}
public var myPeerId: PeerId
public var myPeerId: EnginePeer.Id
public var networkState: NetworkState
public var canManageCall: Bool
public var adminIds: Set<PeerId>
public var adminIds: Set<EnginePeer.Id>
public var muteState: GroupCallParticipantsContext.Participant.MuteState?
public var defaultParticipantMuteState: DefaultParticipantMuteState?
public var recordingStartTimestamp: Int32?
@ -214,10 +213,10 @@ public struct PresentationGroupCallState: Equatable {
public var isVideoWatchersLimitReached: Bool
public init(
myPeerId: PeerId,
myPeerId: EnginePeer.Id,
networkState: NetworkState,
canManageCall: Bool,
adminIds: Set<PeerId>,
adminIds: Set<EnginePeer.Id>,
muteState: GroupCallParticipantsContext.Participant.MuteState?,
defaultParticipantMuteState: DefaultParticipantMuteState?,
recordingStartTimestamp: Int32?,
@ -249,14 +248,14 @@ public struct PresentationGroupCallSummaryState: Equatable {
public var participantCount: Int
public var callState: PresentationGroupCallState
public var topParticipants: [GroupCallParticipantsContext.Participant]
public var activeSpeakers: Set<PeerId>
public var activeSpeakers: Set<EnginePeer.Id>
public init(
info: GroupCallInfo?,
participantCount: Int,
callState: PresentationGroupCallState,
topParticipants: [GroupCallParticipantsContext.Participant],
activeSpeakers: Set<PeerId>
activeSpeakers: Set<EnginePeer.Id>
) {
self.info = info
self.participantCount = participantCount
@ -298,13 +297,13 @@ public enum PresentationGroupCallMuteAction: Equatable {
public struct PresentationGroupCallMembers: Equatable {
public var participants: [GroupCallParticipantsContext.Participant]
public var speakingParticipants: Set<PeerId>
public var speakingParticipants: Set<EnginePeer.Id>
public var totalCount: Int
public var loadMoreToken: String?
public init(
participants: [GroupCallParticipantsContext.Participant],
speakingParticipants: Set<PeerId>,
speakingParticipants: Set<EnginePeer.Id>,
totalCount: Int,
loadMoreToken: String?
) {
@ -316,13 +315,13 @@ public struct PresentationGroupCallMembers: Equatable {
}
public final class PresentationGroupCallMemberEvent {
public let peer: Peer
public let peer: EnginePeer
public let isContact: Bool
public let isInChatList: Bool
public let canUnmute: Bool
public let joined: Bool
public init(peer: Peer, isContact: Bool, isInChatList: Bool, canUnmute: Bool, joined: Bool) {
public init(peer: EnginePeer, isContact: Bool, isInChatList: Bool, canUnmute: Bool, joined: Bool) {
self.peer = peer
self.isContact = isContact
self.isInChatList = isInChatList
@ -395,7 +394,7 @@ public protocol PresentationGroupCall: AnyObject {
var account: Account { get }
var accountContext: AccountContext { get }
var internalId: CallSessionInternalId { get }
var peerId: PeerId { get }
var peerId: EnginePeer.Id { get }
var hasVideo: Bool { get }
var hasScreencast: Bool { get }
@ -412,20 +411,20 @@ public protocol PresentationGroupCall: AnyObject {
var stateVersion: Signal<Int, NoError> { get }
var summaryState: Signal<PresentationGroupCallSummaryState?, NoError> { get }
var members: Signal<PresentationGroupCallMembers?, NoError> { get }
var audioLevels: Signal<[(PeerId, UInt32, Float, Bool)], NoError> { get }
var audioLevels: Signal<[(EnginePeer.Id, UInt32, Float, Bool)], NoError> { get }
var myAudioLevel: Signal<Float, NoError> { get }
var isMuted: Signal<Bool, NoError> { get }
var isNoiseSuppressionEnabled: Signal<Bool, NoError> { get }
var memberEvents: Signal<PresentationGroupCallMemberEvent, NoError> { get }
var reconnectedAsEvents: Signal<Peer, NoError> { get }
var reconnectedAsEvents: Signal<EnginePeer, NoError> { get }
func toggleScheduledSubscription(_ subscribe: Bool)
func schedule(timestamp: Int32)
func startScheduled()
func reconnect(with invite: String)
func reconnect(as peerId: PeerId)
func reconnect(as peerId: EnginePeer.Id)
func leave(terminateIfPossible: Bool) -> Signal<Bool, NoError>
func toggleIsMuted()
@ -438,20 +437,20 @@ public protocol PresentationGroupCall: AnyObject {
func disableScreencast()
func switchVideoCamera()
func updateDefaultParticipantsAreMuted(isMuted: Bool)
func setVolume(peerId: PeerId, volume: Int32, sync: Bool)
func setVolume(peerId: EnginePeer.Id, volume: Int32, sync: Bool)
func setRequestedVideoList(items: [PresentationGroupCallRequestedVideo])
func setCurrentAudioOutput(_ output: AudioSessionOutput)
func playTone(_ tone: PresentationGroupCallTone)
func updateMuteState(peerId: PeerId, isMuted: Bool) -> GroupCallParticipantsContext.Participant.MuteState?
func updateMuteState(peerId: EnginePeer.Id, isMuted: Bool) -> GroupCallParticipantsContext.Participant.MuteState?
func setShouldBeRecording(_ shouldBeRecording: Bool, title: String?, videoOrientation: Bool?)
func updateTitle(_ title: String)
func invitePeer(_ peerId: PeerId) -> Bool
func removedPeer(_ peerId: PeerId)
var invitedPeers: Signal<[PeerId], NoError> { get }
func invitePeer(_ peerId: EnginePeer.Id) -> Bool
func removedPeer(_ peerId: EnginePeer.Id)
var invitedPeers: Signal<[EnginePeer.Id], NoError> { get }
var inviteLinks: Signal<GroupCallInviteLinks?, NoError> { get }
@ -464,8 +463,9 @@ public protocol PresentationGroupCall: AnyObject {
public protocol PresentationCallManager: AnyObject {
var currentCallSignal: Signal<PresentationCall?, NoError> { get }
var currentGroupCallSignal: Signal<PresentationGroupCall?, NoError> { get }
var hasActiveCall: Bool { get }
func requestCall(context: AccountContext, peerId: PeerId, isVideo: Bool, endCurrentIfAny: Bool) -> RequestCallResult
func joinGroupCall(context: AccountContext, peerId: PeerId, invite: String?, requestJoinAsPeerId: ((@escaping (PeerId?) -> Void) -> Void)?, initialCall: EngineGroupCallDescription, endCurrentIfAny: Bool) -> JoinGroupCallManagerResult
func scheduleGroupCall(context: AccountContext, peerId: PeerId, endCurrentIfAny: Bool) -> RequestScheduleGroupCallResult
func requestCall(context: AccountContext, peerId: EnginePeer.Id, isVideo: Bool, endCurrentIfAny: Bool) -> RequestCallResult
func joinGroupCall(context: AccountContext, peerId: EnginePeer.Id, invite: String?, requestJoinAsPeerId: ((@escaping (EnginePeer.Id?) -> Void) -> Void)?, initialCall: EngineGroupCallDescription, endCurrentIfAny: Bool) -> JoinGroupCallManagerResult
func scheduleGroupCall(context: AccountContext, peerId: EnginePeer.Id, endCurrentIfAny: Bool) -> RequestScheduleGroupCallResult
}

View File

@ -1,6 +1,5 @@
import Foundation
import TelegramCore
import Postbox
import TelegramUIPreferences
import SwiftSignalKit
import UniversalMediaPlayer
@ -59,8 +58,8 @@ public struct SharedMediaPlaybackAlbumArt: Equatable {
public enum SharedMediaPlaybackDisplayData: Equatable {
case music(title: String?, performer: String?, albumArt: SharedMediaPlaybackAlbumArt?, long: Bool, caption: NSAttributedString?)
case voice(author: Peer?, peer: Peer?)
case instantVideo(author: Peer?, peer: Peer?, timestamp: Int32)
case voice(author: EnginePeer?, peer: EnginePeer?)
case instantVideo(author: EnginePeer?, peer: EnginePeer?, timestamp: Int32)
public static func ==(lhs: SharedMediaPlaybackDisplayData, rhs: SharedMediaPlaybackDisplayData) -> Bool {
switch lhs {
@ -71,13 +70,13 @@ public enum SharedMediaPlaybackDisplayData: Equatable {
return false
}
case let .voice(lhsAuthor, lhsPeer):
if case let .voice(rhsAuthor, rhsPeer) = rhs, arePeersEqual(lhsAuthor, rhsAuthor), arePeersEqual(lhsPeer, rhsPeer) {
if case let .voice(rhsAuthor, rhsPeer) = rhs, lhsAuthor == rhsAuthor, lhsPeer == rhsPeer {
return true
} else {
return false
}
case let .instantVideo(lhsAuthor, lhsPeer, lhsTimestamp):
if case let .instantVideo(rhsAuthor, rhsPeer, rhsTimestamp) = rhs, arePeersEqual(lhsAuthor, rhsAuthor), arePeersEqual(lhsPeer, rhsPeer), lhsTimestamp == rhsTimestamp {
if case let .instantVideo(rhsAuthor, rhsPeer, rhsTimestamp) = rhs, lhsAuthor == rhsAuthor, lhsPeer == rhsPeer, lhsTimestamp == rhsTimestamp {
return true
} else {
return false
@ -125,10 +124,10 @@ public func areSharedMediaPlaylistItemIdsEqual(_ lhs: SharedMediaPlaylistItemId?
}
public struct PeerMessagesMediaPlaylistItemId: SharedMediaPlaylistItemId {
public let messageId: MessageId
public let messageIndex: MessageIndex
public let messageId: EngineMessage.Id
public let messageIndex: EngineMessage.Index
public init(messageId: MessageId, messageIndex: MessageIndex) {
public init(messageId: EngineMessage.Id, messageIndex: EngineMessage.Index) {
self.messageId = messageId
self.messageIndex = messageIndex
}

View File

@ -23,6 +23,7 @@ public protocol UniversalVideoContentNode: AnyObject {
func setSoundEnabled(_ value: Bool)
func seek(_ timestamp: Double)
func playOnceWithSound(playAndRecord: Bool, seek: MediaPlayerSeek, actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd)
func continueWithOverridingAmbientMode(isAmbient: Bool)
func setForceAudioToSpeaker(_ forceAudioToSpeaker: Bool)
func continuePlayingWithoutSound(actionAtEnd: MediaPlayerPlayOnceWithSoundActionAtEnd)
func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool)
@ -37,7 +38,7 @@ public protocol UniversalVideoContentNode: AnyObject {
public protocol UniversalVideoContent {
var id: AnyHashable { get }
var dimensions: CGSize { get }
var duration: Int32 { get }
var duration: Double { get }
func makeContentNode(postbox: Postbox, audioSession: ManagedAudioSession) -> UniversalVideoContentNode & ASDisplayNode
@ -283,6 +284,14 @@ public final class UniversalVideoNode: ASDisplayNode {
})
}
public func continueWithOverridingAmbientMode(isAmbient: Bool) {
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
if let contentNode = contentNode {
contentNode.continueWithOverridingAmbientMode(isAmbient: isAmbient)
}
})
}
public func setContinuePlayingWithoutSoundOnLostAudioSession(_ value: Bool) {
self.manager.withUniversalVideoContent(id: self.content.id, { contentNode in
if let contentNode = contentNode {

View File

@ -1,6 +1,6 @@
import Foundation
import SwiftSignalKit
import Postbox
import TelegramCore
public struct WatchRunningTasks: Equatable {
public let running: Bool
@ -18,6 +18,6 @@ public struct WatchRunningTasks: Equatable {
public protocol WatchManager: AnyObject {
var watchAppInstalled: Signal<Bool, NoError> { get }
var navigateToMessageRequested: Signal<MessageId, NoError> { get }
var navigateToMessageRequested: Signal<EngineMessage.Id, NoError> { get }
var runningTasks: Signal<WatchRunningTasks?, NoError> { get }
}

View File

@ -17,6 +17,7 @@ swift_library(
"//submodules/TelegramPresentationData:TelegramPresentationData",
"//submodules/TelegramUIPreferences:TelegramUIPreferences",
"//submodules/AccountContext:AccountContext",
"//submodules/Markdown",
],
visibility = [
"//visibility:public",

View File

@ -7,6 +7,7 @@ import TelegramCore
import TelegramPresentationData
import TelegramUIPreferences
import AccountContext
import Markdown
public final class AdInfoScreen: ViewController {
private final class Node: ViewControllerTracingNode {
@ -84,9 +85,16 @@ public final class AdInfoScreen: ViewController {
self.scrollNode.view.contentInsetAdjustmentBehavior = .never
}
var openUrl: (() -> Void)?
var openUrl: ((String) -> Void)?
let rawText = self.presentationData.strings.SponsoredMessageInfoScreen_Text
#if DEBUG && false
let rawText = "First Line\n**Bold Text** [Description](http://google.com) text\n[url]\nabcdee"
#else
let rawText = self.presentationData.strings.SponsoredMessageInfoScreen_MarkdownText
#endif
let defaultUrl = self.presentationData.strings.SponsoredMessageInfo_Url
var items: [Item] = []
var didAddUrl = false
for component in rawText.components(separatedBy: "[url]") {
@ -100,20 +108,40 @@ public final class AdInfoScreen: ViewController {
let textNode = ImmediateTextNode()
textNode.maximumNumberOfLines = 0
textNode.attributedText = NSAttributedString(string: itemText, font: Font.regular(16.0), textColor: self.presentationData.theme.list.itemPrimaryTextColor)
textNode.attributedText = parseMarkdownIntoAttributedString(itemText, attributes: MarkdownAttributes(
body: MarkdownAttributeSet(font: Font.regular(16.0), textColor: self.presentationData.theme.list.itemPrimaryTextColor),
bold: MarkdownAttributeSet(font: Font.semibold(16.0), textColor: self.presentationData.theme.list.itemPrimaryTextColor),
link: MarkdownAttributeSet(font: Font.regular(16.0), textColor: self.presentationData.theme.list.itemAccentColor),
linkAttribute: { url in
return ("URL", url)
}
))
items.append(.text(textNode))
textNode.highlightAttributeAction = { attributes in
if let _ = attributes[NSAttributedString.Key(rawValue: "URL")] {
return NSAttributedString.Key(rawValue: "URL")
} else {
return nil
}
}
textNode.tapAttributeAction = { attributes, _ in
if let value = attributes[NSAttributedString.Key(rawValue: "URL")] as? String {
openUrl?(value)
}
}
textNode.linkHighlightColor = self.presentationData.theme.list.itemAccentColor.withAlphaComponent(0.5)
if !didAddUrl {
didAddUrl = true
items.append(.link(LinkNode(text: self.presentationData.strings.SponsoredMessageInfo_Url, color: self.presentationData.theme.list.itemAccentColor, action: {
openUrl?()
openUrl?(defaultUrl)
})))
}
}
if !didAddUrl {
didAddUrl = true
items.append(.link(LinkNode(text: self.presentationData.strings.SponsoredMessageInfo_Url, color: self.presentationData.theme.list.itemAccentColor, action: {
openUrl?()
openUrl?(defaultUrl)
})))
}
self.items = items
@ -133,11 +161,11 @@ public final class AdInfoScreen: ViewController {
}
}
openUrl = { [weak self] in
openUrl = { [weak self] url in
guard let strongSelf = self else {
return
}
strongSelf.context.sharedContext.applicationBindings.openUrl(strongSelf.presentationData.strings.SponsoredMessageInfo_Url)
strongSelf.context.sharedContext.applicationBindings.openUrl(url)
}
}

View File

@ -277,10 +277,14 @@ public final class AnimatedAvatarSetNode: ASDisplayNode {
guard let itemNode = self.contentNodes.removeValue(forKey: key) else {
continue
}
itemNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak itemNode] _ in
itemNode?.removeFromSupernode()
})
itemNode.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2, removeOnCompletion: false)
if animated {
itemNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak itemNode] _ in
itemNode?.removeFromSupernode()
})
itemNode.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2, removeOnCompletion: false)
} else {
itemNode.removeFromSupernode()
}
}
return CGSize(width: contentWidth, height: contentHeight)

View File

@ -277,7 +277,7 @@ public func makeVideoStickerDirectFrameSource(queue: Queue, path: String, width:
return VideoStickerDirectFrameSource(queue: queue, path: path, width: width, height: height, cachePathPrefix: cachePathPrefix, unpremultiplyAlpha: unpremultiplyAlpha)
}
final class VideoStickerDirectFrameSource: AnimatedStickerFrameSource {
public final class VideoStickerDirectFrameSource: AnimatedStickerFrameSource {
private let queue: Queue
private let path: String
private let width: Int
@ -285,13 +285,13 @@ final class VideoStickerDirectFrameSource: AnimatedStickerFrameSource {
private let cache: VideoStickerFrameSourceCache?
private let image: UIImage?
private let bytesPerRow: Int
var frameCount: Int
let frameRate: Int
public var frameCount: Int
public let frameRate: Int
fileprivate var currentFrame: Int
private let source: SoftwareVideoSource?
var frameIndex: Int {
public var frameIndex: Int {
if self.frameCount == 0 {
return 0
} else {
@ -299,7 +299,7 @@ final class VideoStickerDirectFrameSource: AnimatedStickerFrameSource {
}
}
init?(queue: Queue, path: String, width: Int, height: Int, cachePathPrefix: String?, unpremultiplyAlpha: Bool = true) {
public init?(queue: Queue, path: String, width: Int, height: Int, cachePathPrefix: String?, unpremultiplyAlpha: Bool = true) {
self.queue = queue
self.path = path
self.width = width
@ -334,7 +334,7 @@ final class VideoStickerDirectFrameSource: AnimatedStickerFrameSource {
assert(self.queue.isCurrent())
}
func takeFrame(draw: Bool) -> AnimatedStickerFrame? {
public func takeFrame(draw: Bool) -> AnimatedStickerFrame? {
let frameIndex: Int
if self.frameCount > 0 {
frameIndex = self.currentFrame % self.frameCount
@ -415,11 +415,11 @@ final class VideoStickerDirectFrameSource: AnimatedStickerFrameSource {
}
}
func skipToEnd() {
public func skipToEnd() {
self.currentFrame = self.frameCount - 1
}
func skipToFrameIndex(_ index: Int) {
public func skipToFrameIndex(_ index: Int) {
self.currentFrame = index
}
}

View File

@ -2998,7 +2998,7 @@ ASDISPLAYNODE_INLINE BOOL subtreeIsRasterized(ASDisplayNode *node) {
if ([self _implementsDisplay]) {
if (nowDisplay) {
[ASDisplayNode scheduleNodeForRecursiveDisplay:self];
} else {
} else if (!self.disableClearContentsOnHide) {
[[self asyncLayer] cancelAsyncDisplay];
//schedule clear contents on next runloop
dispatch_async(dispatch_get_main_queue(), ^{

View File

@ -567,6 +567,8 @@ AS_EXTERN NSInteger const ASDefaultDrawingPriority;
*/
@property BOOL automaticallyRelayoutOnLayoutMarginsChanges;
@property (nonatomic) bool disableClearContentsOnHide;
@end
/**

View File

@ -3,7 +3,6 @@ import UIKit
import Display
import AsyncDisplayKit
import SwiftSignalKit
import Postbox
import TelegramCore
import MobileCoreServices
import TelegramPresentationData

View File

@ -80,6 +80,7 @@ final class AttachmentContainer: ASDisplayNode, UIGestureRecognizerDelegate {
})
self.container.clipsToBounds = true
self.container.overflowInset = overflowInset
self.container.shouldAnimateDisappearance = true
super.init()
@ -539,7 +540,15 @@ final class AttachmentContainer: ASDisplayNode, UIGestureRecognizerDelegate {
controller.setIgnoreAppearanceMethodInvocations(false)
controller.viewDidDisappear(transition.isAnimated)
}
if let (layout, _, coveredByModalTransition) = self.validLayout {
self.update(layout: layout, controllers: [], coveredByModalTransition: coveredByModalTransition, transition: .immediate)
}
completion()
var bounds = self.bounds
bounds.origin.y = 0.0
self.bounds = bounds
return transition
}
}

View File

@ -84,6 +84,7 @@ public protocol AttachmentContainable: ViewController {
var cancelPanGesture: () -> Void { get set }
var isContainerPanning: () -> Bool { get set }
var isContainerExpanded: () -> Bool { get set }
var mediaPickerContext: AttachmentMediaPickerContext? { get }
func isContainerPanningUpdated(_ panning: Bool)
@ -206,7 +207,7 @@ public class AttachmentController: ViewController {
private weak var controller: AttachmentController?
private let dim: ASDisplayNode
private let shadowNode: ASImageNode
private let container: AttachmentContainer
fileprivate let container: AttachmentContainer
private let makeEntityInputView: () -> AttachmentTextInputPanelInputView?
let panel: AttachmentPanel
@ -215,7 +216,7 @@ public class AttachmentController: ViewController {
private var validLayout: ContainerViewLayout?
private var modalProgress: CGFloat = 0.0
private var isDismissing = false
fileprivate var isDismissing = false
private let captionDisposable = MetaDisposable()
private let mediaSelectionCountDisposable = MetaDisposable()
@ -312,6 +313,10 @@ public class AttachmentController: ViewController {
self.container.updateModalProgress = { [weak self] progress, transition in
if let strongSelf = self, let layout = strongSelf.validLayout, !strongSelf.isDismissing {
var transition = transition
if strongSelf.container.supernode == nil {
transition = .animated(duration: 0.4, curve: .spring)
}
strongSelf.controller?.updateModalStyleOverlayTransitionFactor(progress, transition: transition)
strongSelf.modalProgress = progress
@ -644,7 +649,7 @@ public class AttachmentController: ViewController {
} else {
ContainedViewLayoutTransition.animated(duration: 0.3, curve: .linear).updateAlpha(node: self.dim, alpha: 1.0)
let targetPosition = self.container.position
let targetPosition = CGPoint(x: layout.size.width / 2.0, y: layout.size.height / 2.0)
let startPosition = targetPosition.offsetBy(dx: 0.0, dy: layout.size.height)
self.container.position = startPosition
@ -673,6 +678,7 @@ public class AttachmentController: ViewController {
self.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak self] _ in
let _ = self?.container.dismiss(transition: .immediate, completion: completion)
self?.animating = false
self?.layer.removeAllAnimations()
})
} else {
let positionTransition: ContainedViewLayoutTransition = .animated(duration: 0.25, curve: .easeInOut)
@ -740,12 +746,12 @@ public class AttachmentController: ViewController {
let position: CGPoint
let positionY = layout.size.height - size.height - insets.bottom - 40.0
if let sourceRect = controller.getSourceRect?() {
position = CGPoint(x: floor(sourceRect.midX - size.width / 2.0), y: min(positionY, sourceRect.minY - size.height))
position = CGPoint(x: min(layout.size.width - size.width - 28.0, floor(sourceRect.midX - size.width / 2.0)), y: min(positionY, sourceRect.minY - size.height))
} else {
position = CGPoint(x: masterWidth - 174.0, y: positionY)
}
if controller.isStandalone {
if controller.isStandalone && !controller.forceSourceRect {
var containerY = floorToScreenPixels((layout.size.height - size.height) / 2.0)
if let inputHeight = layout.inputHeight, inputHeight > 88.0 {
containerY = layout.size.height - inputHeight - size.height - 80.0
@ -876,7 +882,7 @@ public class AttachmentController: ViewController {
self.container.update(layout: containerLayout, controllers: controllers, coveredByModalTransition: 0.0, transition: self.switchingController ? .immediate : transition)
if self.container.supernode == nil, !controllers.isEmpty && self.container.isReady {
if self.container.supernode == nil, !controllers.isEmpty && self.container.isReady && !self.isDismissing {
self.wrapperNode.addSubnode(self.container)
if fromMenu, let _ = controller.getInputContainerNode() {
@ -928,6 +934,8 @@ public class AttachmentController: ViewController {
fatalError("init(coder:) has not been implemented")
}
public var forceSourceRect = false
fileprivate var isStandalone: Bool {
return self.buttons.contains(.standalone)
}
@ -964,12 +972,17 @@ public class AttachmentController: ViewController {
self?.didDismiss()
self?._dismiss()
completion?()
self?.dismissedFlag = false
self?.node.isDismissing = false
self?.node.container.removeFromSupernode()
})
}
} else {
self.didDismiss()
self._dismiss()
completion?()
self.node.isDismissing = false
self.node.container.removeFromSupernode()
}
}

View File

@ -221,6 +221,13 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
case .phoneLimitExceeded:
text = strongSelf.presentationData.strings.Login_PhoneFloodError
actions.append(TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {}))
case .appOutdated:
text = strongSelf.presentationData.strings.Login_ErrorAppOutdated
let updateUrl = strongSelf.presentationData.strings.InviteText_URL
let sharedContext = strongSelf.sharedContext
actions.append(TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {
sharedContext.applicationBindings.openUrl(updateUrl)
}))
case .phoneBanned:
text = strongSelf.presentationData.strings.Login_PhoneBannedError
actions.append(TextAlertAction(type: .genericAction, title: strongSelf.presentationData.strings.Common_OK, action: {}))
@ -581,6 +588,8 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
if let strongSelf = self, let controller = controller {
controller.inProgress = false
var actions: [TextAlertAction] = [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {})]
let text: String
switch error {
case .limitExceeded:
@ -589,6 +598,13 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
text = strongSelf.presentationData.strings.Login_InvalidPhoneError
case .phoneLimitExceeded:
text = strongSelf.presentationData.strings.Login_PhoneFloodError
case .appOutdated:
text = strongSelf.presentationData.strings.Login_ErrorAppOutdated
let updateUrl = strongSelf.presentationData.strings.InviteText_URL
let sharedContext = strongSelf.sharedContext
actions = [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {
sharedContext.applicationBindings.openUrl(updateUrl)
})]
case .phoneBanned:
text = strongSelf.presentationData.strings.Login_PhoneBannedError
case .generic:
@ -597,7 +613,7 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
text = strongSelf.presentationData.strings.Login_NetworkError
}
controller.present(standardTextAlertController(theme: AlertControllerTheme(presentationData: strongSelf.presentationData), title: nil, text: text, actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {})]), in: .window(.root))
controller.present(standardTextAlertController(theme: AlertControllerTheme(presentationData: strongSelf.presentationData), title: nil, text: text, actions: actions), in: .window(.root))
}
}))
}

View File

@ -128,8 +128,9 @@ private final class PhoneAndCountryNode: ASDisplayNode {
if let strongSelf = self {
let _ = strongSelf.processNumberChange(number: strongSelf.phoneInputNode.number)
if strongSelf.hasCountry {
strongSelf.hasNumberUpdated?(!strongSelf.phoneInputNode.codeAndNumber.2.isEmpty)
let isServiceNumber = strongSelf.phoneInputNode.number.hasPrefix("+999")
if strongSelf.hasCountry || isServiceNumber {
strongSelf.hasNumberUpdated?(!strongSelf.phoneInputNode.codeAndNumber.2.isEmpty || isServiceNumber)
} else {
strongSelf.hasNumberUpdated?(false)
}

View File

@ -179,7 +179,7 @@ final class AuthorizationSequenceSignUpControllerNode: ASDisplayNode, UITextFiel
self.addPhotoButton.addTarget(self, action: #selector(self.addPhotoPressed), forControlEvents: .touchUpInside)
self.termsNode.linkHighlightColor = self.theme.list.itemAccentColor.withAlphaComponent(0.5)
self.termsNode.linkHighlightColor = self.theme.list.itemAccentColor.withAlphaComponent(0.2)
self.termsNode.highlightAttributeAction = { attributes in
if let _ = attributes[NSAttributedString.Key(rawValue: TelegramTextAttributes.URL)] {
return NSAttributedString.Key(rawValue: TelegramTextAttributes.URL)

View File

@ -87,7 +87,7 @@ public func authorizationNextOptionText(currentType: SentAuthorizationCodeType,
case .flashCall, .missedCall:
return (NSAttributedString(string: strings.Login_SendCodeViaFlashCall, font: Font.regular(16.0), textColor: accentColor, paragraphAlignment: .center), true)
case .fragment:
return (NSAttributedString(string: "Send code via fragment", font: Font.regular(16.0), textColor: accentColor, paragraphAlignment: .center), true)
return (NSAttributedString(string: strings.Login_GetCodeViaFragment, font: Font.regular(16.0), textColor: accentColor, paragraphAlignment: .center), true)
case .none:
return (NSAttributedString(string: strings.Login_HaveNotReceivedCodeInternal, font: Font.regular(16.0), textColor: accentColor, paragraphAlignment: .center), true)
}
@ -100,7 +100,7 @@ public func authorizationNextOptionText(currentType: SentAuthorizationCodeType,
case .flashCall, .missedCall:
return (NSAttributedString(string: strings.Login_SendCodeViaFlashCall, font: Font.regular(16.0), textColor: accentColor, paragraphAlignment: .center), true)
case .fragment:
return (NSAttributedString(string: "Send code via fragment", font: Font.regular(16.0), textColor: accentColor, paragraphAlignment: .center), true)
return (NSAttributedString(string: strings.Login_GetCodeViaFragment, font: Font.regular(16.0), textColor: accentColor, paragraphAlignment: .center), true)
case .none:
return (NSAttributedString(string: strings.Login_HaveNotReceivedCodeInternal, font: Font.regular(16.0), textColor: accentColor, paragraphAlignment: .center), true)
}

View File

@ -20,6 +20,8 @@ swift_library(
"//submodules/Emoji:Emoji",
"//submodules/TinyThumbnail:TinyThumbnail",
"//submodules/FastBlur:FastBlur",
"//submodules/ComponentFlow",
"//submodules/TelegramUI/Components/Stories/AvatarStoryIndicatorComponent",
],
visibility = [
"//visibility:public",

File diff suppressed because it is too large Load Diff

View File

@ -5,7 +5,6 @@ import Display
import SwiftSignalKit
import UniversalMediaPlayer
import TelegramUniversalVideoContent
import Postbox
import TelegramCore
import AccountContext
import ComponentFlow
@ -207,7 +206,7 @@ public final class AvatarVideoNode: ASDisplayNode {
self.backgroundNode.image = nil
let videoId = photo.id?.id ?? peer.id.id._internalGetInt64Value()
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [])]))
let videoFileReference = FileMediaReference.avatarList(peer: peerReference, media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: 0), partialReference: nil, resource: video.resource, previewRepresentations: photo.representations, videoThumbnails: [], immediateThumbnailData: photo.immediateThumbnailData, mimeType: "video/mp4", size: nil, attributes: [.Animated, .Video(duration: 0, size: video.dimensions, flags: [], preloadSize: nil)]))
let videoContent = NativeVideoContent(id: .profileVideo(videoId, nil), userLocation: .other, fileReference: videoFileReference, streamVideo: isMediaStreamable(resource: video.resource) ? .conservative : .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, useLargeThumbnail: true, autoFetchFullSizeThumbnail: true, startTimestamp: video.startTimestamp, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .clear, captureProtected: false, storeAfterDownload: nil)
if videoContent.id != self.videoContent?.id {
self.videoNode?.removeFromSupernode()

View File

@ -577,7 +577,7 @@ private final class RecurrentConfirmationNode: ASDisplayNode {
let checkSize = CGSize(width: 22.0, height: 22.0)
self.textNode.linkHighlightColor = presentationData.theme.list.itemAccentColor.withAlphaComponent(0.3)
self.textNode.linkHighlightColor = presentationData.theme.list.itemAccentColor.withAlphaComponent(0.2)
let attributedText = parseMarkdownIntoAttributedString(
presentationData.strings.Bot_AccepRecurrentInfo(botName).string,

View File

@ -8,7 +8,6 @@ import TelegramPresentationData
import ItemListUI
import PresentationDataUtils
import PhotoResources
import Postbox
class BotCheckoutHeaderItem: ListViewItem, ItemListItem {
let account: Account
@ -168,7 +167,7 @@ class BotCheckoutHeaderItemNode: ListViewItemNode {
var imageApply: (() -> Void)?
var updatedImageSignal: Signal<(TransformImageArguments) -> DrawingContext?, NoError>?
var updatedFetchSignal: Signal<FetchResourceSourceType, FetchResourceError>?
var updatedFetchSignal: Signal<Never, NoError>?
if let photo = item.invoice.photo, let dimensions = photo.dimensions {
let arguments = TransformImageArguments(corners: ImageCorners(radius: 4.0), imageSize: dimensions.cgSize.aspectFilled(imageSize), boundingSize: imageSize, intrinsicInsets: UIEdgeInsets(), emptyColor: item.theme.list.mediaPlaceholderColor)
imageApply = makeImageLayout(arguments)
@ -184,6 +183,10 @@ class BotCheckoutHeaderItemNode: ListViewItemNode {
break
}
updatedFetchSignal = fetchedMediaResource(mediaBox: item.account.postbox.mediaBox, userLocation: userLocation, userContentType: .image, reference: .standalone(resource: photo.resource))
|> ignoreValues
|> `catch` { _ -> Signal<Never, NoError> in
return .complete()
}
}
}
@ -191,7 +194,9 @@ class BotCheckoutHeaderItemNode: ListViewItemNode {
let (botNameLayout, botNameApply) = makeBotNameLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.botName, font: textFont, textColor: item.theme.list.itemSecondaryTextColor), backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: maxTextWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let (textLayout, textApply) = makeTextLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.invoice.description, font: textFont, textColor: textColor), backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: maxTextWidth, height: maxTextHeight - titleLayout.size.height - titleTextSpacing - botNameLayout.size.height - textBotNameSpacing), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let textLayoutMaxHeight: CGFloat = maxTextHeight - titleLayout.size.height - titleTextSpacing - botNameLayout.size.height - textBotNameSpacing
let textArguments = TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.invoice.description, font: textFont, textColor: textColor), backgroundColor: nil, maximumNumberOfLines: 0, truncationType: .end, constrainedSize: CGSize(width: maxTextWidth, height: textLayoutMaxHeight), alignment: .natural, cutout: nil, insets: UIEdgeInsets())
let (textLayout, textApply) = makeTextLayout(textArguments)
let contentHeight: CGFloat
if let _ = imageApply {

View File

@ -3,7 +3,6 @@ import UIKit
import SwiftSignalKit
import AsyncDisplayKit
import Display
import Postbox
import TelegramCore
import TelegramPresentationData
import AccountContext

View File

@ -3,7 +3,6 @@ import UIKit
import Display
import AsyncDisplayKit
import SwiftSignalKit
import Postbox
import TelegramCore
import AccountContext
import TelegramPresentationData
@ -371,7 +370,7 @@ private final class DayComponent: Component {
private var currentSelection: DaySelection?
private(set) var timestamp: Int32?
private(set) var index: MessageIndex?
private(set) var index: EngineMessage.Index?
private var isHighlightingEnabled: Bool = false
init() {
@ -983,12 +982,12 @@ public final class CalendarMessageScreen: ViewController {
private weak var controller: CalendarMessageScreen?
private let context: AccountContext
private let peerId: PeerId
private let peerId: EnginePeer.Id
private let initialTimestamp: Int32
private let enableMessageRangeDeletion: Bool
private let canNavigateToEmptyDays: Bool
private let navigateToOffset: (Int, Int32) -> Void
private let previewDay: (Int32, MessageIndex?, ASDisplayNode, CGRect, ContextGesture) -> Void
private let previewDay: (Int32, EngineMessage.Index?, ASDisplayNode, CGRect, ContextGesture) -> Void
private var presentationData: PresentationData
private var scrollView: Scroller
@ -1019,13 +1018,13 @@ public final class CalendarMessageScreen: ViewController {
init(
controller: CalendarMessageScreen,
context: AccountContext,
peerId: PeerId,
peerId: EnginePeer.Id,
calendarSource: SparseMessageCalendar,
initialTimestamp: Int32,
enableMessageRangeDeletion: Bool,
canNavigateToEmptyDays: Bool,
navigateToOffset: @escaping (Int, Int32) -> Void,
previewDay: @escaping (Int32, MessageIndex?, ASDisplayNode, CGRect, ContextGesture) -> Void
previewDay: @escaping (Int32, EngineMessage.Index?, ASDisplayNode, CGRect, ContextGesture) -> Void
) {
self.controller = controller
self.context = context
@ -1370,7 +1369,7 @@ public final class CalendarMessageScreen: ViewController {
if self.selectionState?.dayRange == nil {
if let selectionToolbarNode = self.selectionToolbarNode {
let toolbarFrame = selectionToolbarNode.view.convert(selectionToolbarNode.bounds, to: self.view)
self.controller?.present(TooltipScreen(account: self.context.account, sharedContext: self.context.sharedContext, text: self.presentationData.strings.MessageCalendar_EmptySelectionTooltip, style: .default, icon: .none, location: .point(toolbarFrame.insetBy(dx: 0.0, dy: 10.0), .bottom), shouldDismissOnTouch: { point in
self.controller?.present(TooltipScreen(account: self.context.account, sharedContext: self.context.sharedContext, text: .plain(text: self.presentationData.strings.MessageCalendar_EmptySelectionTooltip), style: .default, icon: .none, location: .point(toolbarFrame.insetBy(dx: 0.0, dy: 10.0), .bottom), shouldDismissOnTouch: { _, _ in
return .dismiss(consume: false)
}), in: .current)
}
@ -1783,9 +1782,9 @@ public final class CalendarMessageScreen: ViewController {
guard let calendarState = self.calendarState else {
return
}
var messageMap: [Message] = []
var messageMap: [EngineMessage] = []
for (_, entry) in calendarState.messagesByDay {
messageMap.append(entry.message)
messageMap.append(EngineMessage(entry.message))
}
var updatedMedia: [Int: [Int: DayMedia]] = [:]
@ -1805,7 +1804,7 @@ public final class CalendarMessageScreen: ViewController {
mediaLoop: for media in message.media {
switch media {
case _ as TelegramMediaImage, _ as TelegramMediaFile:
updatedMedia[i]![day] = DayMedia(message: EngineMessage(message), media: EngineMedia(media))
updatedMedia[i]![day] = DayMedia(message: message, media: EngineMedia(media))
break mediaLoop
default:
break
@ -1830,13 +1829,13 @@ public final class CalendarMessageScreen: ViewController {
}
private let context: AccountContext
private let peerId: PeerId
private let peerId: EnginePeer.Id
private let calendarSource: SparseMessageCalendar
private let initialTimestamp: Int32
private let enableMessageRangeDeletion: Bool
private let canNavigateToEmptyDays: Bool
private let navigateToDay: (CalendarMessageScreen, Int, Int32) -> Void
private let previewDay: (Int32, MessageIndex?, ASDisplayNode, CGRect, ContextGesture) -> Void
private let previewDay: (Int32, EngineMessage.Index?, ASDisplayNode, CGRect, ContextGesture) -> Void
private var presentationData: PresentationData
@ -1844,13 +1843,13 @@ public final class CalendarMessageScreen: ViewController {
public init(
context: AccountContext,
peerId: PeerId,
peerId: EnginePeer.Id,
calendarSource: SparseMessageCalendar,
initialTimestamp: Int32,
enableMessageRangeDeletion: Bool,
canNavigateToEmptyDays: Bool,
navigateToDay: @escaping (CalendarMessageScreen, Int, Int32) -> Void,
previewDay: @escaping (Int32, MessageIndex?, ASDisplayNode, CGRect, ContextGesture) -> Void
previewDay: @escaping (Int32, EngineMessage.Index?, ASDisplayNode, CGRect, ContextGesture) -> Void
) {
self.context = context
self.peerId = peerId

View File

@ -1,4 +1,44 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
load(
"@build_bazel_rules_apple//apple:resources.bzl",
"apple_resource_bundle",
"apple_resource_group",
)
load("//build-system/bazel-utils:plist_fragment.bzl",
"plist_fragment",
)
filegroup(
name = "CameraMetalResources",
srcs = glob([
"MetalResources/**/*.*",
]),
visibility = ["//visibility:public"],
)
plist_fragment(
name = "CameraBundleInfoPlist",
extension = "plist",
template =
"""
<key>CFBundleIdentifier</key>
<string>org.telegram.Camera</string>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleName</key>
<string>Camera</string>
"""
)
apple_resource_bundle(
name = "CameraBundle",
infoplists = [
":CameraBundleInfoPlist",
],
resources = [
":CameraMetalResources",
],
)
swift_library(
name = "Camera",
@ -9,10 +49,15 @@ swift_library(
copts = [
"-warnings-as-errors",
],
data = [
":CameraBundle",
],
deps = [
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit",
"//submodules/AsyncDisplayKit:AsyncDisplayKit",
"//submodules/Display:Display",
"//submodules/ImageBlur:ImageBlur",
"//submodules/TelegramCore:TelegramCore",
],
visibility = [
"//visibility:public",

View File

@ -0,0 +1,30 @@
#include <metal_stdlib>
using namespace metal;
// Vertex input/output structure for passing results from vertex shader to fragment shader
struct VertexIO
{
float4 position [[position]];
float2 textureCoord [[user(texturecoord)]];
};
// Vertex shader for a textured quad
vertex VertexIO vertexPassThrough(const device packed_float4 *pPosition [[ buffer(0) ]],
const device packed_float2 *pTexCoords [[ buffer(1) ]],
uint vid [[ vertex_id ]])
{
VertexIO outVertex;
outVertex.position = pPosition[vid];
outVertex.textureCoord = pTexCoords[vid];
return outVertex;
}
// Fragment shader for a textured quad
fragment half4 fragmentPassThrough(VertexIO inputFragment [[ stage_in ]],
texture2d<half> inputTexture [[ texture(0) ]],
sampler samplr [[ sampler(0) ]])
{
return inputTexture.sample(samplr, inputFragment.textureCoord);
}

View File

@ -1,21 +1,123 @@
import Foundation
import UIKit
import SwiftSignalKit
import AVFoundation
import CoreImage
import TelegramCore
final class CameraSession {
private let singleSession: AVCaptureSession?
private let multiSession: Any?
let hasMultiCam: Bool
init() {
if #available(iOS 13.0, *), AVCaptureMultiCamSession.isMultiCamSupported {
self.multiSession = AVCaptureMultiCamSession()
self.singleSession = nil
self.hasMultiCam = true
} else {
self.singleSession = AVCaptureSession()
self.multiSession = nil
self.hasMultiCam = false
}
self.session.sessionPreset = .inputPriority
}
var session: AVCaptureSession {
if #available(iOS 13.0, *), let multiSession = self.multiSession as? AVCaptureMultiCamSession {
return multiSession
} else if let session = self.singleSession {
return session
} else {
fatalError()
}
}
var supportsDualCam: Bool {
return self.multiSession != nil
}
}
final class CameraDeviceContext {
private weak var session: CameraSession?
private weak var previewView: CameraSimplePreviewView?
private let exclusive: Bool
private let additional: Bool
let device = CameraDevice()
let input = CameraInput()
let output: CameraOutput
init(session: CameraSession, exclusive: Bool, additional: Bool) {
self.session = session
self.exclusive = exclusive
self.additional = additional
self.output = CameraOutput(exclusive: exclusive)
}
func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool) {
guard let session = self.session else {
return
}
self.previewView = previewView
self.device.configure(for: session, position: position, dual: !exclusive || additional)
self.device.configureDeviceFormat(maxDimensions: self.preferredMaxDimensions, maxFramerate: self.preferredMaxFrameRate)
self.input.configure(for: session, device: self.device, audio: audio)
self.output.configure(for: session, device: self.device, input: self.input, previewView: previewView, audio: audio, photo: photo, metadata: metadata)
self.output.configureVideoStabilization()
self.device.resetZoom(neutral: self.exclusive || !self.additional)
}
func invalidate() {
guard let session = self.session else {
return
}
self.output.invalidate(for: session)
self.input.invalidate(for: session)
}
private var preferredMaxDimensions: CMVideoDimensions {
if self.additional {
return CMVideoDimensions(width: 1920, height: 1440)
} else {
return CMVideoDimensions(width: 1920, height: 1080)
}
}
private var preferredMaxFrameRate: Double {
if !self.exclusive {
return 30.0
}
switch DeviceModel.current {
case .iPhone14ProMax, .iPhone13ProMax:
return 60.0
default:
return 30.0
}
}
}
private final class CameraContext {
private let queue: Queue
private let session = AVCaptureSession()
private let device: CameraDevice
private let input = CameraInput()
private let output = CameraOutput()
private let session: CameraSession
private var mainDeviceContext: CameraDeviceContext?
private var additionalDeviceContext: CameraDeviceContext?
private let cameraImageContext = CIContext()
private let initialConfiguration: Camera.Configuration
private var invalidated = false
private var previousSampleBuffer: CMSampleBuffer?
var processSampleBuffer: ((CMSampleBuffer) -> Void)?
private let detectedCodesPipe = ValuePipe<[CameraCode]>()
fileprivate let modeChangePromise = ValuePromise<Camera.ModeChange>(.none)
var previewNode: CameraPreviewNode? {
didSet {
@ -23,87 +125,414 @@ private final class CameraContext {
}
}
init(queue: Queue, configuration: Camera.Configuration) {
self.queue = queue
self.initialConfiguration = configuration
self.device = CameraDevice()
self.device.configure(for: self.session, position: configuration.position)
self.session.beginConfiguration()
self.session.sessionPreset = configuration.preset
self.input.configure(for: self.session, device: self.device, audio: configuration.audio)
self.output.configure(for: self.session)
self.session.commitConfiguration()
self.output.processSampleBuffer = { [weak self] sampleBuffer, connection in
if let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Video {
self?.previousSampleBuffer = sampleBuffer
self?.previewNode?.enqueue(sampleBuffer)
var previewView: CameraPreviewView?
var simplePreviewView: CameraSimplePreviewView?
var secondaryPreviewView: CameraSimplePreviewView?
private var lastSnapshotTimestamp: Double = CACurrentMediaTime()
private var lastAdditionalSnapshotTimestamp: Double = CACurrentMediaTime()
private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer, front: Bool) {
Queue.concurrentDefaultQueue().async {
var ciImage = CIImage(cvImageBuffer: pixelBuffer)
let size = ciImage.extent.size
if front {
var transform = CGAffineTransformMakeScale(1.0, -1.0)
transform = CGAffineTransformTranslate(transform, 0.0, -size.height)
ciImage = ciImage.transformed(by: transform)
}
self?.queue.async {
self?.processSampleBuffer?(sampleBuffer)
ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: 40.0).cropped(to: CGRect(origin: .zero, size: size))
if let cgImage = self.cameraImageContext.createCGImage(ciImage, from: ciImage.extent) {
let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
if front {
CameraSimplePreviewView.saveLastFrontImage(uiImage)
} else {
CameraSimplePreviewView.saveLastBackImage(uiImage)
}
}
}
self.output.processCodes = { [weak self] codes in
self?.detectedCodesPipe.putNext(codes)
}
}
init(queue: Queue, session: CameraSession, configuration: Camera.Configuration, metrics: Camera.Metrics, previewView: CameraSimplePreviewView?, secondaryPreviewView: CameraSimplePreviewView?) {
self.queue = queue
self.session = session
self.initialConfiguration = configuration
self.simplePreviewView = previewView
self.secondaryPreviewView = secondaryPreviewView
self.positionValue = configuration.position
self._positionPromise = ValuePromise<Camera.Position>(configuration.position)
self.setDualCameraEnabled(configuration.isDualEnabled, change: false)
NotificationCenter.default.addObserver(
self,
selector: #selector(self.sessionRuntimeError),
name: .AVCaptureSessionRuntimeError,
object: self.session.session
)
}
private var isSessionRunning = false
func startCapture() {
guard !self.session.isRunning else {
guard !self.session.session.isRunning else {
return
}
self.session.startRunning()
self.session.session.startRunning()
self.isSessionRunning = self.session.session.isRunning
}
func stopCapture(invalidate: Bool = false) {
if invalidate {
self.session.beginConfiguration()
self.input.invalidate(for: self.session)
self.output.invalidate(for: self.session)
self.session.commitConfiguration()
self.mainDeviceContext?.device.resetZoom()
self.configure {
self.mainDeviceContext?.invalidate()
}
}
self.session.stopRunning()
self.session.session.stopRunning()
}
func focus(at point: CGPoint) {
self.device.setFocusPoint(point, focusMode: .continuousAutoFocus, exposureMode: .continuousAutoExposure, monitorSubjectAreaChange: true)
}
func setFPS(_ fps: Float64) {
self.device.fps = fps
}
func togglePosition() {
self.session.beginConfiguration()
self.input.invalidate(for: self.session)
let targetPosition: Camera.Position
if case .back = self.device.position {
targetPosition = .front
func focus(at point: CGPoint, autoFocus: Bool) {
let focusMode: AVCaptureDevice.FocusMode
let exposureMode: AVCaptureDevice.ExposureMode
if autoFocus {
focusMode = .continuousAutoFocus
exposureMode = .continuousAutoExposure
} else {
targetPosition = .back
focusMode = .autoFocus
exposureMode = .autoExpose
}
self.device.configure(for: self.session, position: targetPosition)
self.input.configure(for: self.session, device: self.device, audio: self.initialConfiguration.audio)
self.session.commitConfiguration()
self.mainDeviceContext?.device.setFocusPoint(point, focusMode: focusMode, exposureMode: exposureMode, monitorSubjectAreaChange: true)
}
func setFps(_ fps: Float64) {
self.mainDeviceContext?.device.fps = fps
}
private var modeChange: Camera.ModeChange = .none {
didSet {
if oldValue != self.modeChange {
self.modeChangePromise.set(self.modeChange)
}
}
}
private var _positionPromise: ValuePromise<Camera.Position>
var position: Signal<Camera.Position, NoError> {
return self._positionPromise.get()
}
private var positionValue: Camera.Position = .back
func togglePosition() {
guard let mainDeviceContext = self.mainDeviceContext else {
return
}
if self.isDualCameraEnabled == true {
let targetPosition: Camera.Position
if case .back = self.positionValue {
targetPosition = .front
} else {
targetPosition = .back
}
self.positionValue = targetPosition
self._positionPromise.set(targetPosition)
mainDeviceContext.output.markPositionChange(position: targetPosition)
} else {
self.configure {
self.mainDeviceContext?.invalidate()
let targetPosition: Camera.Position
if case .back = mainDeviceContext.device.position {
targetPosition = .front
} else {
targetPosition = .back
}
self.positionValue = targetPosition
self._positionPromise.set(targetPosition)
self.modeChange = .position
mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
self.queue.after(0.5) {
self.modeChange = .none
}
}
}
}
public func setPosition(_ position: Camera.Position) {
self.configure {
self.mainDeviceContext?.invalidate()
self._positionPromise.set(position)
self.positionValue = position
self.modeChange = .position
self.mainDeviceContext?.configure(position: position, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
self.queue.after(0.5) {
self.modeChange = .none
}
}
}
private var isDualCameraEnabled: Bool?
public func setDualCameraEnabled(_ enabled: Bool, change: Bool = true) {
guard enabled != self.isDualCameraEnabled else {
return
}
self.isDualCameraEnabled = enabled
if change {
self.modeChange = .dualCamera
}
if enabled {
self.configure {
self.mainDeviceContext?.invalidate()
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false)
self.mainDeviceContext?.configure(position: .back, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true)
self.additionalDeviceContext?.configure(position: .front, previewView: self.secondaryPreviewView, audio: false, photo: true, metadata: false)
}
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
guard let self, let mainDeviceContext = self.mainDeviceContext else {
return
}
self.previewNode?.enqueue(sampleBuffer)
let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording {
var front = false
if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front
}
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastSnapshotTimestamp = timestamp
}
}
self.additionalDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
guard let self, let additionalDeviceContext = self.additionalDeviceContext else {
return
}
let timestamp = CACurrentMediaTime()
if timestamp > self.lastAdditionalSnapshotTimestamp + 2.5, !additionalDeviceContext.output.isRecording {
var front = false
if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front
}
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastAdditionalSnapshotTimestamp = timestamp
}
}
} else {
self.configure {
self.mainDeviceContext?.invalidate()
self.additionalDeviceContext?.invalidate()
self.additionalDeviceContext = nil
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false)
self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
}
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
guard let self, let mainDeviceContext = self.mainDeviceContext else {
return
}
self.previewNode?.enqueue(sampleBuffer)
let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording {
var front = false
if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front
}
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastSnapshotTimestamp = timestamp
}
}
self.mainDeviceContext?.output.processCodes = { [weak self] codes in
self?.detectedCodesPipe.putNext(codes)
}
}
if change {
if #available(iOS 13.0, *), let previewView = self.simplePreviewView {
if enabled, let secondaryPreviewView = self.secondaryPreviewView {
let _ = (combineLatest(previewView.isPreviewing, secondaryPreviewView.isPreviewing)
|> map { first, second in
return first && second
}
|> filter { $0 }
|> take(1)
|> delay(0.1, queue: self.queue)
|> deliverOn(self.queue)).start(next: { [weak self] _ in
self?.modeChange = .none
})
} else {
let _ = (previewView.isPreviewing
|> filter { $0 }
|> take(1)
|> deliverOn(self.queue)).start(next: { [weak self] _ in
self?.modeChange = .none
})
}
} else {
self.queue.after(0.4) {
self.modeChange = .none
}
}
}
}
private func configure(_ f: () -> Void) {
self.session.session.beginConfiguration()
f()
self.session.session.commitConfiguration()
}
var hasTorch: Signal<Bool, NoError> {
return self.device.isFlashAvailable
return self.mainDeviceContext?.device.isTorchAvailable ?? .never()
}
func setTorchActive(_ active: Bool) {
self.device.setTorchActive(active)
self.mainDeviceContext?.device.setTorchActive(active)
}
var isFlashActive: Signal<Bool, NoError> {
return self.mainDeviceContext?.output.isFlashActive ?? .never()
}
private var _flashMode: Camera.FlashMode = .off {
didSet {
self._flashModePromise.set(self._flashMode)
}
}
private var _flashModePromise = ValuePromise<Camera.FlashMode>(.off)
var flashMode: Signal<Camera.FlashMode, NoError> {
return self._flashModePromise.get()
}
func setFlashMode(_ mode: Camera.FlashMode) {
self._flashMode = mode
}
func setZoomLevel(_ zoomLevel: CGFloat) {
self.mainDeviceContext?.device.setZoomLevel(zoomLevel)
}
func setZoomDelta(_ zoomDelta: CGFloat) {
self.mainDeviceContext?.device.setZoomDelta(zoomDelta)
}
func takePhoto() -> Signal<PhotoCaptureResult, NoError> {
guard let mainDeviceContext = self.mainDeviceContext else {
return .complete()
}
let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait
if let additionalDeviceContext = self.additionalDeviceContext {
let dualPosition = self.positionValue
return combineLatest(
mainDeviceContext.output.takePhoto(orientation: orientation, flashMode: self._flashMode),
additionalDeviceContext.output.takePhoto(orientation: orientation, flashMode: self._flashMode)
) |> map { main, additional in
if case let .finished(mainImage, _, _) = main, case let .finished(additionalImage, _, _) = additional {
if dualPosition == .front {
return .finished(additionalImage, mainImage, CACurrentMediaTime())
} else {
return .finished(mainImage, additionalImage, CACurrentMediaTime())
}
} else {
return .began
}
} |> distinctUntilChanged
} else {
return mainDeviceContext.output.takePhoto(orientation: orientation, flashMode: self._flashMode)
}
}
public func startRecording() -> Signal<Double, NoError> {
guard let mainDeviceContext = self.mainDeviceContext else {
return .complete()
}
mainDeviceContext.device.setTorchMode(self._flashMode)
let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait
if let additionalDeviceContext = self.additionalDeviceContext {
return combineLatest(
mainDeviceContext.output.startRecording(isDualCamera: true, position: self.positionValue, orientation: orientation),
additionalDeviceContext.output.startRecording(isDualCamera: true, orientation: .portrait)
) |> map { value, _ in
return value
}
} else {
return mainDeviceContext.output.startRecording(isDualCamera: false, orientation: orientation)
}
}
public func stopRecording() -> Signal<VideoCaptureResult, NoError> {
guard let mainDeviceContext = self.mainDeviceContext else {
return .complete()
}
if let additionalDeviceContext = self.additionalDeviceContext {
return combineLatest(
mainDeviceContext.output.stopRecording(),
additionalDeviceContext.output.stopRecording()
) |> mapToSignal { main, additional in
if case let .finished(mainResult, _, duration, positionChangeTimestamps, _) = main, case let .finished(additionalResult, _, _, _, _) = additional {
var additionalTransitionImage = additionalResult.1
if let cgImage = additionalResult.1.cgImage {
additionalTransitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored)
}
return .single(.finished(mainResult, (additionalResult.0, additionalTransitionImage, true, additionalResult.3), duration, positionChangeTimestamps, CACurrentMediaTime()))
} else {
return .complete()
}
}
} else {
let mirror = self.positionValue == .front
return mainDeviceContext.output.stopRecording()
|> map { result -> VideoCaptureResult in
if case let .finished(mainResult, _, duration, positionChangeTimestamps, time) = result {
var transitionImage = mainResult.1
if mirror, let cgImage = transitionImage.cgImage {
transitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .leftMirrored)
}
return .finished((mainResult.0, transitionImage, mirror, mainResult.3), nil, duration, positionChangeTimestamps, time)
} else {
return result
}
}
}
}
var detectedCodes: Signal<[CameraCode], NoError> {
return self.detectedCodesPipe.signal()
}
@objc private func sessionInterruptionEnded(notification: NSNotification) {
}
@objc private func sessionRuntimeError(notification: NSNotification) {
guard let errorValue = notification.userInfo?[AVCaptureSessionErrorKey] as? NSError else {
return
}
let error = AVError(_nsError: errorValue)
Logger.shared.log("Camera", "Runtime error: \(error)")
if error.code == .mediaServicesWereReset {
self.queue.async {
if self.isSessionRunning {
self.session.session.startRunning()
self.isSessionRunning = self.session.session.isRunning
}
}
}
}
}
public final class Camera {
@ -111,25 +540,51 @@ public final class Camera {
public typealias Position = AVCaptureDevice.Position
public typealias FocusMode = AVCaptureDevice.FocusMode
public typealias ExposureMode = AVCaptureDevice.ExposureMode
public typealias FlashMode = AVCaptureDevice.FlashMode
public struct Configuration {
let preset: Preset
let position: Position
let isDualEnabled: Bool
let audio: Bool
let photo: Bool
let metadata: Bool
let preferredFps: Double
public init(preset: Preset, position: Position, audio: Bool) {
public init(preset: Preset, position: Position, isDualEnabled: Bool = false, audio: Bool, photo: Bool, metadata: Bool, preferredFps: Double) {
self.preset = preset
self.position = position
self.isDualEnabled = isDualEnabled
self.audio = audio
self.photo = photo
self.metadata = metadata
self.preferredFps = preferredFps
}
}
private let queue = Queue()
private var contextRef: Unmanaged<CameraContext>?
private weak var previewView: CameraPreviewView?
public init(configuration: Camera.Configuration = Configuration(preset: .hd1920x1080, position: .back, audio: true)) {
public let metrics: Camera.Metrics
public init(configuration: Camera.Configuration = Configuration(preset: .hd1920x1080, position: .back, audio: true, photo: false, metadata: false, preferredFps: 60.0), previewView: CameraSimplePreviewView? = nil, secondaryPreviewView: CameraSimplePreviewView? = nil) {
self.metrics = Camera.Metrics(model: DeviceModel.current)
let session = CameraSession()
session.session.usesApplicationAudioSession = true
session.session.automaticallyConfiguresApplicationAudioSession = false
session.session.automaticallyConfiguresCaptureDeviceForWideColor = false
if let previewView {
previewView.setSession(session.session, autoConnect: !session.hasMultiCam)
}
if let secondaryPreviewView, session.hasMultiCam {
secondaryPreviewView.setSession(session.session, autoConnect: false)
}
self.queue.async {
let context = CameraContext(queue: self.queue, configuration: configuration)
let context = CameraContext(queue: self.queue, session: session, configuration: configuration, metrics: self.metrics, previewView: previewView, secondaryPreviewView: secondaryPreviewView)
self.contextRef = Unmanaged.passRetained(context)
}
}
@ -142,19 +597,41 @@ public final class Camera {
}
public func startCapture() {
#if targetEnvironment(simulator)
#else
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.startCapture()
}
}
#endif
}
public func stopCapture(invalidate: Bool = false) {
#if targetEnvironment(simulator)
#else
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.stopCapture(invalidate: invalidate)
}
}
#endif
}
public var position: Signal<Camera.Position, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.position.start(next: { flashMode in
subscriber.putNext(flashMode)
}, completed: {
subscriber.putCompletion()
}))
}
}
return disposable
}
}
public func togglePosition() {
@ -165,22 +642,107 @@ public final class Camera {
}
}
public func takePhoto() -> Signal<Void, NoError> {
return .never()
}
public func focus(at point: CGPoint) {
public func setPosition(_ position: Camera.Position) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.focus(at: point)
context.setPosition(position)
}
}
}
public func setFPS(_ fps: Double) {
public func setDualCameraEnabled(_ enabled: Bool) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.setFPS(fps)
context.setDualCameraEnabled(enabled)
}
}
}
public func takePhoto() -> Signal<PhotoCaptureResult, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.takePhoto().start(next: { value in
subscriber.putNext(value)
}, completed: {
subscriber.putCompletion()
}))
}
}
return disposable
}
}
public func startRecording() -> Signal<Double, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.startRecording().start(next: { value in
subscriber.putNext(value)
}, completed: {
subscriber.putCompletion()
}))
}
}
return disposable
}
}
public func stopRecording() -> Signal<VideoCaptureResult, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.stopRecording().start(next: { value in
subscriber.putNext(value)
}, completed: {
subscriber.putCompletion()
}))
}
}
return disposable
}
}
public func focus(at point: CGPoint, autoFocus: Bool = true) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.focus(at: point, autoFocus: autoFocus)
}
}
}
public func setFps(_ fps: Double) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.setFps(fps)
}
}
}
public func setFlashMode(_ flashMode: FlashMode) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.setFlashMode(flashMode)
}
}
}
public func setZoomLevel(_ zoomLevel: CGFloat) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.setZoomLevel(zoomLevel)
}
}
}
public func setZoomDelta(_ zoomDelta: CGFloat) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.setZoomDelta(zoomDelta)
}
}
}
@ -200,6 +762,39 @@ public final class Camera {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.hasTorch.start(next: { hasTorch in
subscriber.putNext(hasTorch)
}, completed: {
subscriber.putCompletion()
}))
}
}
return disposable
}
}
public var isFlashActive: Signal<Bool, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.isFlashActive.start(next: { isFlashActive in
subscriber.putNext(isFlashActive)
}, completed: {
subscriber.putCompletion()
}))
}
}
return disposable
}
}
public var flashMode: Signal<Camera.FlashMode, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.flashMode.start(next: { flashMode in
subscriber.putNext(flashMode)
}, completed: {
subscriber.putCompletion()
}))
}
@ -222,10 +817,31 @@ public final class Camera {
}
}
public func setProcessSampleBuffer(_ block: ((CMSampleBuffer) -> Void)?) {
public func attachPreviewView(_ view: CameraPreviewView) {
self.previewView = view
let viewRef: Unmanaged<CameraPreviewView> = Unmanaged.passRetained(view)
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.processSampleBuffer = block
context.previewView = viewRef.takeUnretainedValue()
viewRef.release()
} else {
Queue.mainQueue().async {
viewRef.release()
}
}
}
}
public func attachSimplePreviewView(_ view: CameraSimplePreviewView) {
let viewRef: Unmanaged<CameraSimplePreviewView> = Unmanaged.passRetained(view)
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.simplePreviewView = viewRef.takeUnretainedValue()
viewRef.release()
} else {
Queue.mainQueue().async {
viewRef.release()
}
}
}
}
@ -243,4 +859,41 @@ public final class Camera {
return disposable
}
}
public enum ModeChange: Equatable {
case none
case position
case dualCamera
}
public var modeChange: Signal<ModeChange, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.modeChangePromise.get().start(next: { value in
subscriber.putNext(value)
}))
}
}
return disposable
}
}
public static var isDualCameraSupported: Bool {
if #available(iOS 13.0, *), AVCaptureMultiCamSession.isMultiCamSupported && !DeviceModel.current.isIpad {
return true
} else {
return false
}
}
}
public final class CameraHolder {
public let camera: Camera
public let previewView: CameraPreviewView
public init(camera: Camera, previewView: CameraPreviewView) {
self.camera = camera
self.previewView = previewView
}
}

View File

@ -1,32 +1,139 @@
import Foundation
import AVFoundation
import SwiftSignalKit
import TelegramCore
private let defaultFPS: Double = 30.0
final class CameraDevice {
public private(set) var videoDevice: AVCaptureDevice? = nil
public private(set) var audioDevice: AVCaptureDevice? = nil
private var videoDevicePromise = Promise<AVCaptureDevice>()
init() {
}
var position: Camera.Position = .back
func configure(for session: AVCaptureSession, position: Camera.Position) {
self.position = position
if #available(iOSApplicationExtension 10.0, iOS 10.0, *) {
self.videoDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInWideAngleCamera, .builtInTelephotoCamera], mediaType: .video, position: position).devices.first
} else {
self.videoDevice = AVCaptureDevice.devices(for: .video).filter { $0.position == position }.first
}
deinit {
if let videoDevice = self.videoDevice {
self.videoDevicePromise.set(.single(videoDevice))
self.unsubscribeFromChanges(videoDevice)
}
}
public private(set) var videoDevice: AVCaptureDevice? = nil {
didSet {
if let previousVideoDevice = oldValue {
self.unsubscribeFromChanges(previousVideoDevice)
}
self.videoDevicePromise.set(.single(self.videoDevice))
if let videoDevice = self.videoDevice {
self.subscribeForChanges(videoDevice)
}
}
}
private var videoDevicePromise = Promise<AVCaptureDevice?>()
public private(set) var audioDevice: AVCaptureDevice? = nil
func configure(for session: CameraSession, position: Camera.Position, dual: Bool) {
self.position = position
var selectedDevice: AVCaptureDevice?
if #available(iOS 13.0, *), position != .front && !dual {
if let device = AVCaptureDevice.default(.builtInTripleCamera, for: .video, position: position) {
selectedDevice = device
} else if let device = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: position) {
selectedDevice = device
} else if let device = AVCaptureDevice.default(.builtInDualWideCamera, for: .video, position: position) {
selectedDevice = device
} else if let device = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInTelephotoCamera], mediaType: .video, position: position).devices.first {
selectedDevice = device
}
} else {
if selectedDevice == nil {
selectedDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInTelephotoCamera], mediaType: .video, position: position).devices.first
}
}
if selectedDevice == nil, #available(iOS 13.0, *) {
let allDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInTripleCamera, .builtInTelephotoCamera, .builtInDualWideCamera, .builtInTrueDepthCamera, .builtInWideAngleCamera, .builtInUltraWideCamera], mediaType: .video, position: position).devices
Logger.shared.log("Camera", "No device selected, availabled devices: \(allDevices)")
}
self.videoDevice = selectedDevice
self.videoDevicePromise.set(.single(selectedDevice))
self.audioDevice = AVCaptureDevice.default(for: .audio)
}
func configureDeviceFormat(maxDimensions: CMVideoDimensions, maxFramerate: Double) {
guard let device = self.videoDevice else {
return
}
self.transaction(device) { device in
var maxWidth: Int32 = 0
var maxHeight: Int32 = 0
var hasSecondaryZoomLevels = false
var candidates: [AVCaptureDevice.Format] = []
outer: for format in device.formats {
if format.mediaType != .video || format.value(forKey: "isPhotoFormat") as? Bool == true {
continue
}
let dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription)
if dimensions.width >= maxWidth && dimensions.width <= maxDimensions.width && dimensions.height >= maxHeight && dimensions.height <= maxDimensions.height {
if dimensions.width > maxWidth {
hasSecondaryZoomLevels = false
candidates.removeAll()
}
let subtype = CMFormatDescriptionGetMediaSubType(format.formatDescription)
if subtype == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange {
for range in format.videoSupportedFrameRateRanges {
if range.maxFrameRate > 60 {
continue outer
}
}
maxWidth = dimensions.width
maxHeight = dimensions.height
if #available(iOS 16.0, *), !format.secondaryNativeResolutionZoomFactors.isEmpty {
hasSecondaryZoomLevels = true
candidates.append(format)
} else if !hasSecondaryZoomLevels {
candidates.append(format)
}
}
}
}
if !candidates.isEmpty {
var bestFormat: AVCaptureDevice.Format?
outer: for format in candidates {
for range in format.videoSupportedFrameRateRanges {
if range.maxFrameRate > maxFramerate {
continue outer
}
bestFormat = format
}
}
if bestFormat == nil {
bestFormat = candidates.last
}
device.activeFormat = bestFormat!
Logger.shared.log("Camera", "Selected format:")
Logger.shared.log("Camera", bestFormat!.description)
} else {
Logger.shared.log("Camera", "No format selected")
}
Logger.shared.log("Camera", "Available formats:")
for format in device.formats {
Logger.shared.log("Camera", format.description)
}
if let targetFPS = device.actualFPS(maxFramerate) {
device.activeVideoMinFrameDuration = targetFPS.duration
device.activeVideoMaxFrameDuration = targetFPS.duration
}
}
}
func transaction(_ device: AVCaptureDevice, update: (AVCaptureDevice) -> Void) {
if let _ = try? device.lockForConfiguration() {
update(device)
@ -34,16 +141,16 @@ final class CameraDevice {
}
}
private func subscribeForChanges() {
NotificationCenter.default.addObserver(self, selector: #selector(self.subjectAreaChanged), name: Notification.Name.AVCaptureDeviceSubjectAreaDidChange, object: self.videoDevice)
private func subscribeForChanges(_ device: AVCaptureDevice) {
NotificationCenter.default.addObserver(self, selector: #selector(self.subjectAreaChanged), name: Notification.Name.AVCaptureDeviceSubjectAreaDidChange, object: device)
}
private func unsubscribeFromChanges() {
NotificationCenter.default.removeObserver(self, name: Notification.Name.AVCaptureDeviceSubjectAreaDidChange, object: self.videoDevice)
private func unsubscribeFromChanges(_ device: AVCaptureDevice) {
NotificationCenter.default.removeObserver(self, name: Notification.Name.AVCaptureDeviceSubjectAreaDidChange, object: device)
}
@objc private func subjectAreaChanged() {
self.setFocusPoint(CGPoint(x: 0.5, y: 0.5), focusMode: .continuousAutoFocus, exposureMode: .continuousAutoExposure, monitorSubjectAreaChange: false)
}
var fps: Double = defaultFPS {
@ -61,26 +168,13 @@ final class CameraDevice {
}
}
/*var isFlashActive: Signal<Bool, NoError> {
var isTorchAvailable: Signal<Bool, NoError> {
return self.videoDevicePromise.get()
|> mapToSignal { device -> Signal<Bool, NoError> in
return Signal { subscriber in
subscriber.putNext(device.isFlashActive)
let observer = device.observe(\.isFlashActive, options: [.new], changeHandler: { device, _ in
subscriber.putNext(device.isFlashActive)
})
return ActionDisposable {
observer.invalidate()
guard let device else {
return EmptyDisposable
}
}
|> distinctUntilChanged
}
}*/
var isFlashAvailable: Signal<Bool, NoError> {
return self.videoDevicePromise.get()
|> mapToSignal { device -> Signal<Bool, NoError> in
return Signal { subscriber in
subscriber.putNext(device.isFlashAvailable)
let observer = device.observe(\.isFlashAvailable, options: [.new], changeHandler: { device, _ in
subscriber.putNext(device.isFlashAvailable)
@ -97,6 +191,9 @@ final class CameraDevice {
return self.videoDevicePromise.get()
|> mapToSignal { device -> Signal<Bool, NoError> in
return Signal { subscriber in
guard let device else {
return EmptyDisposable
}
subscriber.putNext(device.isAdjustingFocus)
let observer = device.observe(\.isAdjustingFocus, options: [.new], changeHandler: { device, _ in
subscriber.putNext(device.isAdjustingFocus)
@ -122,6 +219,12 @@ final class CameraDevice {
device.focusPointOfInterest = point
device.focusMode = focusMode
}
device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
if abs(device.exposureTargetBias) > 0.0 {
device.setExposureTargetBias(0.0)
}
}
}
@ -144,4 +247,53 @@ final class CameraDevice {
device.torchMode = active ? .on : .off
}
}
func setTorchMode(_ flashMode: AVCaptureDevice.FlashMode) {
guard let device = self.videoDevice else {
return
}
self.transaction(device) { device in
let torchMode: AVCaptureDevice.TorchMode
switch flashMode {
case .on:
torchMode = .on
case .off:
torchMode = .off
case .auto:
torchMode = .auto
@unknown default:
torchMode = .off
}
if device.isTorchModeSupported(torchMode) {
device.torchMode = torchMode
}
}
}
func setZoomLevel(_ zoomLevel: CGFloat) {
guard let device = self.videoDevice else {
return
}
self.transaction(device) { device in
device.videoZoomFactor = max(device.neutralZoomFactor, min(10.0, device.neutralZoomFactor + zoomLevel))
}
}
func setZoomDelta(_ zoomDelta: CGFloat) {
guard let device = self.videoDevice else {
return
}
self.transaction(device) { device in
device.videoZoomFactor = max(1.0, min(10.0, device.videoZoomFactor * zoomDelta))
}
}
func resetZoom(neutral: Bool = true) {
guard let device = self.videoDevice else {
return
}
self.transaction(device) { device in
device.videoZoomFactor = neutral ? device.neutralZoomFactor : device.minAvailableVideoZoomFactor
}
}
}

View File

@ -1,10 +1,11 @@
import AVFoundation
import TelegramCore
class CameraInput {
private var videoInput: AVCaptureDeviceInput?
var videoInput: AVCaptureDeviceInput?
private var audioInput: AVCaptureDeviceInput?
func configure(for session: AVCaptureSession, device: CameraDevice, audio: Bool) {
func configure(for session: CameraSession, device: CameraDevice, audio: Bool) {
if let videoDevice = device.videoDevice {
self.configureVideoInput(for: session, device: videoDevice)
}
@ -13,32 +14,42 @@ class CameraInput {
}
}
func invalidate(for session: AVCaptureSession) {
for input in session.inputs {
session.removeInput(input)
func invalidate(for session: CameraSession) {
for input in session.session.inputs {
session.session.removeInput(input)
}
}
private func configureVideoInput(for session: AVCaptureSession, device: AVCaptureDevice) {
private func configureVideoInput(for session: CameraSession, device: AVCaptureDevice) {
if let currentVideoInput = self.videoInput {
session.session.removeInput(currentVideoInput)
self.videoInput = nil
}
if let videoInput = try? AVCaptureDeviceInput(device: device) {
if let currentVideoInput = self.videoInput {
session.removeInput(currentVideoInput)
}
self.videoInput = videoInput
if session.canAddInput(videoInput) {
session.addInput(videoInput)
if session.session.canAddInput(videoInput) {
if session.hasMultiCam {
session.session.addInputWithNoConnections(videoInput)
} else {
session.session.addInput(videoInput)
}
} else {
Logger.shared.log("Camera", "Can't add video input")
}
}
}
private func configureAudioInput(for session: AVCaptureSession, device: AVCaptureDevice) {
guard self.audioInput == nil else {
return
private func configureAudioInput(for session: CameraSession, device: AVCaptureDevice) {
if let currentAudioInput = self.audioInput {
session.session.removeInput(currentAudioInput)
self.audioInput = nil
}
if let audioInput = try? AVCaptureDeviceInput(device: device) {
self.audioInput = audioInput
if session.canAddInput(audioInput) {
session.addInput(audioInput)
if session.session.canAddInput(audioInput) {
session.session.addInput(audioInput)
} else {
Logger.shared.log("Camera", "Can't add audio input")
}
}
}

View File

@ -0,0 +1,390 @@
import Foundation
public extension Camera {
enum Metrics {
case singleCamera
case iPhone14
case iPhone14Plus
case iPhone14Pro
case iPhone14ProMax
case unknown
init(model: DeviceModel) {
switch model {
case .iPodTouch1, .iPodTouch2, .iPodTouch3, .iPodTouch4, .iPodTouch5, .iPodTouch6, .iPodTouch7:
self = .singleCamera
case .iPhone14:
self = .iPhone14
case .iPhone14Plus:
self = .iPhone14Plus
case .iPhone14Pro:
self = .iPhone14Pro
case .iPhone14ProMax:
self = .iPhone14ProMax
case .unknown:
self = .unknown
default:
self = .unknown
}
}
public var zoomLevels: [Float] {
switch self {
case .singleCamera:
return [1.0]
case .iPhone14:
return [0.5, 1.0, 2.0]
case .iPhone14Plus:
return [0.5, 1.0, 2.0]
case .iPhone14Pro:
return [0.5, 1.0, 2.0, 3.0]
case .iPhone14ProMax:
return [0.5, 1.0, 2.0, 3.0]
case .unknown:
return [1.0, 2.0]
}
}
}
}
enum DeviceModel: CaseIterable, Equatable {
static var allCases: [DeviceModel] {
return [
.iPodTouch1,
.iPodTouch2,
.iPodTouch3,
.iPodTouch4,
.iPodTouch5,
.iPodTouch6,
.iPodTouch7,
.iPhone,
.iPhone3G,
.iPhone3GS,
.iPhone4,
.iPhone4S,
.iPhone5,
.iPhone5C,
.iPhone5S,
.iPhone6,
.iPhone6Plus,
.iPhone6S,
.iPhone6SPlus,
.iPhoneSE,
.iPhone7,
.iPhone7Plus,
.iPhone8,
.iPhone8Plus,
.iPhoneX,
.iPhoneXS,
.iPhoneXR,
.iPhone11,
.iPhone11Pro,
.iPhone11ProMax,
.iPhone12,
.iPhone12Mini,
.iPhone12Pro,
.iPhone12ProMax,
.iPhone13,
.iPhone13Mini,
.iPhone13Pro,
.iPhone13ProMax,
.iPhone14,
.iPhone14Plus,
.iPhone14Pro,
.iPhone14ProMax
]
}
case iPodTouch1
case iPodTouch2
case iPodTouch3
case iPodTouch4
case iPodTouch5
case iPodTouch6
case iPodTouch7
case iPhone
case iPhone3G
case iPhone3GS
case iPhone4
case iPhone4S
case iPhone5
case iPhone5C
case iPhone5S
case iPhone6
case iPhone6Plus
case iPhone6S
case iPhone6SPlus
case iPhoneSE
case iPhone7
case iPhone7Plus
case iPhone8
case iPhone8Plus
case iPhoneX
case iPhoneXS
case iPhoneXSMax
case iPhoneXR
case iPhone11
case iPhone11Pro
case iPhone11ProMax
case iPhoneSE2ndGen
case iPhone12
case iPhone12Mini
case iPhone12Pro
case iPhone12ProMax
case iPhone13
case iPhone13Mini
case iPhone13Pro
case iPhone13ProMax
case iPhoneSE3rdGen
case iPhone14
case iPhone14Plus
case iPhone14Pro
case iPhone14ProMax
case unknown(String)
var modelId: [String] {
switch self {
case .iPodTouch1:
return ["iPod1,1"]
case .iPodTouch2:
return ["iPod2,1"]
case .iPodTouch3:
return ["iPod3,1"]
case .iPodTouch4:
return ["iPod4,1"]
case .iPodTouch5:
return ["iPod5,1"]
case .iPodTouch6:
return ["iPod7,1"]
case .iPodTouch7:
return ["iPod9,1"]
case .iPhone:
return ["iPhone1,1"]
case .iPhone3G:
return ["iPhone1,2"]
case .iPhone3GS:
return ["iPhone2,1"]
case .iPhone4:
return ["iPhone3,1", "iPhone3,2", "iPhone3,3"]
case .iPhone4S:
return ["iPhone4,1", "iPhone4,2", "iPhone4,3"]
case .iPhone5:
return ["iPhone5,1", "iPhone5,2"]
case .iPhone5C:
return ["iPhone5,3", "iPhone5,4"]
case .iPhone5S:
return ["iPhone6,1", "iPhone6,2"]
case .iPhone6:
return ["iPhone7,2"]
case .iPhone6Plus:
return ["iPhone7,1"]
case .iPhone6S:
return ["iPhone8,1"]
case .iPhone6SPlus:
return ["iPhone8,2"]
case .iPhoneSE:
return ["iPhone8,4"]
case .iPhone7:
return ["iPhone9,1", "iPhone9,3"]
case .iPhone7Plus:
return ["iPhone9,2", "iPhone9,4"]
case .iPhone8:
return ["iPhone10,1", "iPhone10,4"]
case .iPhone8Plus:
return ["iPhone10,2", "iPhone10,5"]
case .iPhoneX:
return ["iPhone10,3", "iPhone10,6"]
case .iPhoneXS:
return ["iPhone11,2"]
case .iPhoneXSMax:
return ["iPhone11,4", "iPhone11,6"]
case .iPhoneXR:
return ["iPhone11,8"]
case .iPhone11:
return ["iPhone12,1"]
case .iPhone11Pro:
return ["iPhone12,3"]
case .iPhone11ProMax:
return ["iPhone12,5"]
case .iPhoneSE2ndGen:
return ["iPhone12,8"]
case .iPhone12:
return ["iPhone13,2"]
case .iPhone12Mini:
return ["iPhone13,1"]
case .iPhone12Pro:
return ["iPhone13,3"]
case .iPhone12ProMax:
return ["iPhone13,4"]
case .iPhone13:
return ["iPhone14,5"]
case .iPhone13Mini:
return ["iPhone14,4"]
case .iPhone13Pro:
return ["iPhone14,2"]
case .iPhone13ProMax:
return ["iPhone14,3"]
case .iPhoneSE3rdGen:
return ["iPhone14,6"]
case .iPhone14:
return ["iPhone14,7"]
case .iPhone14Plus:
return ["iPhone14,8"]
case .iPhone14Pro:
return ["iPhone15,2"]
case .iPhone14ProMax:
return ["iPhone15,3"]
case let .unknown(modelId):
return [modelId]
}
}
var modelName: String {
switch self {
case .iPodTouch1:
return "iPod touch 1G"
case .iPodTouch2:
return "iPod touch 2G"
case .iPodTouch3:
return "iPod touch 3G"
case .iPodTouch4:
return "iPod touch 4G"
case .iPodTouch5:
return "iPod touch 5G"
case .iPodTouch6:
return "iPod touch 6G"
case .iPodTouch7:
return "iPod touch 7G"
case .iPhone:
return "iPhone"
case .iPhone3G:
return "iPhone 3G"
case .iPhone3GS:
return "iPhone 3GS"
case .iPhone4:
return "iPhone 4"
case .iPhone4S:
return "iPhone 4S"
case .iPhone5:
return "iPhone 5"
case .iPhone5C:
return "iPhone 5C"
case .iPhone5S:
return "iPhone 5S"
case .iPhone6:
return "iPhone 6"
case .iPhone6Plus:
return "iPhone 6 Plus"
case .iPhone6S:
return "iPhone 6S"
case .iPhone6SPlus:
return "iPhone 6S Plus"
case .iPhoneSE:
return "iPhone SE"
case .iPhone7:
return "iPhone 7"
case .iPhone7Plus:
return "iPhone 7 Plus"
case .iPhone8:
return "iPhone 8"
case .iPhone8Plus:
return "iPhone 8 Plus"
case .iPhoneX:
return "iPhone X"
case .iPhoneXS:
return "iPhone XS"
case .iPhoneXSMax:
return "iPhone XS Max"
case .iPhoneXR:
return "iPhone XR"
case .iPhone11:
return "iPhone 11"
case .iPhone11Pro:
return "iPhone 11 Pro"
case .iPhone11ProMax:
return "iPhone 11 Pro Max"
case .iPhoneSE2ndGen:
return "iPhone SE (2nd gen)"
case .iPhone12:
return "iPhone 12"
case .iPhone12Mini:
return "iPhone 12 mini"
case .iPhone12Pro:
return "iPhone 12 Pro"
case .iPhone12ProMax:
return "iPhone 12 Pro Max"
case .iPhone13:
return "iPhone 13"
case .iPhone13Mini:
return "iPhone 13 mini"
case .iPhone13Pro:
return "iPhone 13 Pro"
case .iPhone13ProMax:
return "iPhone 13 Pro Max"
case .iPhoneSE3rdGen:
return "iPhone SE (3rd gen)"
case .iPhone14:
return "iPhone 14"
case .iPhone14Plus:
return "iPhone 14 Plus"
case .iPhone14Pro:
return "iPhone 14 Pro"
case .iPhone14ProMax:
return "iPhone 14 Pro Max"
case let .unknown(modelId):
if modelId.hasPrefix("iPhone") {
return "Unknown iPhone"
} else if modelId.hasPrefix("iPod") {
return "Unknown iPod"
} else if modelId.hasPrefix("iPad") {
return "Unknown iPad"
} else {
return "Unknown Device"
}
}
}
var isIpad: Bool {
return self.modelId.first?.hasPrefix("iPad") ?? false
}
static let current = DeviceModel()
private init() {
var systemInfo = utsname()
uname(&systemInfo)
let modelCode = withUnsafePointer(to: &systemInfo.machine) {
$0.withMemoryRebound(to: CChar.self, capacity: 1) {
ptr in String.init(validatingUTF8: ptr)
}
}
var result: DeviceModel?
if let modelCode {
for model in DeviceModel.allCases {
if model.modelId.contains(modelCode) {
result = model
break
}
}
}
if let result {
self = result
} else {
self = .unknown(modelCode ?? "")
}
}
}

View File

@ -1,4 +1,36 @@
import Foundation
import AVFoundation
import UIKit
import SwiftSignalKit
import CoreImage
import Vision
import VideoToolbox
import TelegramCore
public enum VideoCaptureResult: Equatable {
case finished((String, UIImage, Bool, CGSize), (String, UIImage, Bool, CGSize)?, Double, [(Bool, Double)], Double)
case failed
public static func == (lhs: VideoCaptureResult, rhs: VideoCaptureResult) -> Bool {
switch lhs {
case .failed:
if case .failed = rhs {
return true
} else {
return false
}
case let .finished(_, _, lhsDuration, lhsChangeTimestamps, lhsTime):
if case let .finished(_, _, rhsDuration, rhsChangeTimestamps, rhsTime) = rhs, lhsDuration == rhsDuration, lhsTime == rhsTime {
if lhsChangeTimestamps.count != rhsChangeTimestamps.count {
return false
}
return true
} else {
return false
}
}
}
}
public struct CameraCode: Equatable {
public enum CodeType {
@ -39,21 +71,32 @@ public struct CameraCode: Equatable {
}
final class CameraOutput: NSObject {
//private let photoOutput = CameraPhotoOutput()
private let videoOutput = AVCaptureVideoDataOutput()
private let audioOutput = AVCaptureAudioDataOutput()
private let metadataOutput = AVCaptureMetadataOutput()
let photoOutput = AVCapturePhotoOutput()
let videoOutput = AVCaptureVideoDataOutput()
let audioOutput = AVCaptureAudioDataOutput()
let metadataOutput = AVCaptureMetadataOutput()
let exclusive: Bool
private var photoConnection: AVCaptureConnection?
private var videoConnection: AVCaptureConnection?
private var previewConnection: AVCaptureConnection?
private let queue = DispatchQueue(label: "")
private let metadataQueue = DispatchQueue(label: "")
var processSampleBuffer: ((CMSampleBuffer, AVCaptureConnection) -> Void)?
private var photoCaptureRequests: [Int64: PhotoCaptureContext] = [:]
private var videoRecorder: VideoRecorder?
var processSampleBuffer: ((CMSampleBuffer, CVImageBuffer, AVCaptureConnection) -> Void)?
var processCodes: (([CameraCode]) -> Void)?
override init() {
super.init()
init(exclusive: Bool) {
self.exclusive = exclusive
self.videoOutput.alwaysDiscardsLateVideoFrames = true;
super.init()
self.videoOutput.alwaysDiscardsLateVideoFrames = false
self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any]
}
@ -62,28 +105,248 @@ final class CameraOutput: NSObject {
self.audioOutput.setSampleBufferDelegate(nil, queue: nil)
}
func configure(for session: AVCaptureSession) {
if session.canAddOutput(self.videoOutput) {
session.addOutput(self.videoOutput)
func configure(for session: CameraSession, device: CameraDevice, input: CameraInput, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool) {
if session.session.canAddOutput(self.videoOutput) {
if session.hasMultiCam {
session.session.addOutputWithNoConnections(self.videoOutput)
} else {
session.session.addOutput(self.videoOutput)
}
self.videoOutput.setSampleBufferDelegate(self, queue: self.queue)
} else {
Logger.shared.log("Camera", "Can't add video output")
}
if session.canAddOutput(self.audioOutput) {
session.addOutput(self.audioOutput)
if audio, session.session.canAddOutput(self.audioOutput) {
session.session.addOutput(self.audioOutput)
self.audioOutput.setSampleBufferDelegate(self, queue: self.queue)
}
if session.canAddOutput(self.metadataOutput) {
session.addOutput(self.metadataOutput)
if photo, session.session.canAddOutput(self.photoOutput) {
if session.hasMultiCam {
session.session.addOutputWithNoConnections(self.photoOutput)
} else {
session.session.addOutput(self.photoOutput)
}
} else {
Logger.shared.log("Camera", "Can't add photo output")
}
if metadata, session.session.canAddOutput(self.metadataOutput) {
session.session.addOutput(self.metadataOutput)
self.metadataOutput.setMetadataObjectsDelegate(self, queue: self.metadataQueue)
if self.metadataOutput.availableMetadataObjectTypes.contains(.qr) {
self.metadataOutput.metadataObjectTypes = [.qr]
}
}
if #available(iOS 13.0, *), session.hasMultiCam {
if let device = device.videoDevice, let ports = input.videoInput?.ports(for: AVMediaType.video, sourceDeviceType: device.deviceType, sourceDevicePosition: device.position) {
if let previewView {
let previewConnection = AVCaptureConnection(inputPort: ports.first!, videoPreviewLayer: previewView.videoPreviewLayer)
if session.session.canAddConnection(previewConnection) {
session.session.addConnection(previewConnection)
self.previewConnection = previewConnection
} else {
Logger.shared.log("Camera", "Can't add preview connection")
}
}
let videoConnection = AVCaptureConnection(inputPorts: ports, output: self.videoOutput)
if session.session.canAddConnection(videoConnection) {
session.session.addConnection(videoConnection)
self.videoConnection = videoConnection
} else {
Logger.shared.log("Camera", "Can't add video connection")
}
if photo {
let photoConnection = AVCaptureConnection(inputPorts: ports, output: self.photoOutput)
if session.session.canAddConnection(photoConnection) {
session.session.addConnection(photoConnection)
self.photoConnection = photoConnection
}
}
} else {
Logger.shared.log("Camera", "Can't get video port")
}
}
}
func invalidate(for session: CameraSession) {
if #available(iOS 13.0, *) {
if let previewConnection = self.previewConnection {
if session.session.connections.contains(where: { $0 === previewConnection }) {
session.session.removeConnection(previewConnection)
}
self.previewConnection = nil
}
if let videoConnection = self.videoConnection {
if session.session.connections.contains(where: { $0 === videoConnection }) {
session.session.removeConnection(videoConnection)
}
self.videoConnection = nil
}
if let photoConnection = self.photoConnection {
if session.session.connections.contains(where: { $0 === photoConnection }) {
session.session.removeConnection(photoConnection)
}
self.photoConnection = nil
}
}
if session.session.outputs.contains(where: { $0 === self.videoOutput }) {
session.session.removeOutput(self.videoOutput)
}
if session.session.outputs.contains(where: { $0 === self.audioOutput }) {
session.session.removeOutput(self.audioOutput)
}
if session.session.outputs.contains(where: { $0 === self.photoOutput }) {
session.session.removeOutput(self.photoOutput)
}
if session.session.outputs.contains(where: { $0 === self.metadataOutput }) {
session.session.removeOutput(self.metadataOutput)
}
}
func invalidate(for session: AVCaptureSession) {
for output in session.outputs {
session.removeOutput(output)
func configureVideoStabilization() {
if let videoDataOutputConnection = self.videoOutput.connection(with: .video), videoDataOutputConnection.isVideoStabilizationSupported {
videoDataOutputConnection.preferredVideoStabilizationMode = .standard
// if #available(iOS 13.0, *) {
// videoDataOutputConnection.preferredVideoStabilizationMode = .cinematicExtended
// } else {
// videoDataOutputConnection.preferredVideoStabilizationMode = .cinematic
// }
}
}
var isFlashActive: Signal<Bool, NoError> {
return Signal { [weak self] subscriber in
guard let self else {
return EmptyDisposable
}
subscriber.putNext(self.photoOutput.isFlashScene)
let observer = self.photoOutput.observe(\.isFlashScene, options: [.new], changeHandler: { device, _ in
subscriber.putNext(self.photoOutput.isFlashScene)
})
return ActionDisposable {
observer.invalidate()
}
}
|> distinctUntilChanged
}
func takePhoto(orientation: AVCaptureVideoOrientation, flashMode: AVCaptureDevice.FlashMode) -> Signal<PhotoCaptureResult, NoError> {
var mirror = false
if let connection = self.photoOutput.connection(with: .video) {
connection.videoOrientation = orientation
if #available(iOS 13.0, *) {
mirror = connection.inputPorts.first?.sourceDevicePosition == .front
}
}
let settings = AVCapturePhotoSettings(format: [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)])
settings.flashMode = flashMode
if let previewPhotoPixelFormatType = settings.availablePreviewPhotoPixelFormatTypes.first {
settings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPhotoPixelFormatType]
}
if #available(iOS 13.0, *) {
if self.exclusive {
if self.photoOutput.maxPhotoQualityPrioritization != .speed {
settings.photoQualityPrioritization = .balanced
} else {
settings.photoQualityPrioritization = .speed
}
} else {
settings.photoQualityPrioritization = .speed
}
}
let uniqueId = settings.uniqueID
let photoCapture = PhotoCaptureContext(settings: settings, orientation: orientation, mirror: mirror)
self.photoCaptureRequests[uniqueId] = photoCapture
self.photoOutput.capturePhoto(with: settings, delegate: photoCapture)
return photoCapture.signal
|> afterDisposed { [weak self] in
self?.photoCaptureRequests.removeValue(forKey: uniqueId)
}
}
var isRecording: Bool {
return self.videoRecorder != nil
}
private var recordingCompletionPipe = ValuePipe<VideoCaptureResult>()
func startRecording(isDualCamera: Bool, position: Camera.Position? = nil, orientation: AVCaptureVideoOrientation) -> Signal<Double, NoError> {
guard self.videoRecorder == nil else {
return .complete()
}
let codecType: AVVideoCodecType
if hasHEVCHardwareEncoder {
codecType = .hevc
} else {
codecType = .h264
}
guard let videoSettings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else {
return .complete()
}
let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:]
var dimensions: CGSize = CGSize(width: 1080, height: 1920)
if orientation == .landscapeLeft {
dimensions = CGSize(width: 1920, height: 1080)
} else if orientation == .landscapeRight {
dimensions = CGSize(width: 1920, height: 1080)
}
let outputFileName = NSUUID().uuidString
let outputFilePath = NSTemporaryDirectory() + outputFileName + ".mp4"
let outputFileURL = URL(fileURLWithPath: outputFilePath)
let videoRecorder = VideoRecorder(configuration: VideoRecorder.Configuration(videoSettings: videoSettings, audioSettings: audioSettings), orientation: orientation, fileUrl: outputFileURL, completion: { [weak self] result in
if case let .success(transitionImage, duration, positionChangeTimestamps) = result {
self?.recordingCompletionPipe.putNext(.finished((outputFilePath, transitionImage ?? UIImage(), false, dimensions), nil, duration, positionChangeTimestamps.map { ($0 == .front, $1) }, CACurrentMediaTime()))
} else {
self?.recordingCompletionPipe.putNext(.failed)
}
})
videoRecorder?.start()
self.videoRecorder = videoRecorder
if isDualCamera, let position {
videoRecorder?.markPositionChange(position: position, time: .zero)
}
return Signal { subscriber in
let timer = SwiftSignalKit.Timer(timeout: 0.1, repeat: true, completion: { [weak videoRecorder] in
subscriber.putNext(videoRecorder?.duration ?? 0.0)
}, queue: Queue.mainQueue())
timer.start()
return ActionDisposable {
timer.invalidate()
}
}
}
func stopRecording() -> Signal<VideoCaptureResult, NoError> {
guard let videoRecorder = self.videoRecorder, videoRecorder.isRecording else {
return .complete()
}
videoRecorder.stop()
return self.recordingCompletionPipe.signal()
|> take(1)
|> afterDisposed {
self.videoRecorder = nil
}
}
func markPositionChange(position: Camera.Position) {
if let videoRecorder = self.videoRecorder {
videoRecorder.markPositionChange(position: position)
}
}
}
@ -94,7 +357,13 @@ extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureA
return
}
self.processSampleBuffer?(sampleBuffer, connection)
if let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
self.processSampleBuffer?(sampleBuffer, videoPixelBuffer, connection)
}
if let videoRecorder = self.videoRecorder, videoRecorder.isRecording {
videoRecorder.appendSampleBuffer(sampleBuffer)
}
}
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
@ -118,3 +387,14 @@ extension CameraOutput: AVCaptureMetadataOutputObjectsDelegate {
self.processCodes?(codes)
}
}
private let hasHEVCHardwareEncoder: Bool = {
let spec: [CFString: Any] = [:]
var outID: CFString?
var properties: CFDictionary?
let result = VTCopySupportedPropertyDictionaryForEncoder(width: 1920, height: 1080, codecType: kCMVideoCodecType_HEVC, encoderSpecification: spec as CFDictionary, encoderIDOut: &outID, supportedPropertiesOut: &properties)
if result == kVTCouldNotFindVideoEncoderErr {
return false
}
return result == noErr
}()

View File

@ -0,0 +1,655 @@
import Foundation
import UIKit
import Display
import AVFoundation
import SwiftSignalKit
import Metal
import MetalKit
import CoreMedia
import Vision
import ImageBlur
private extension UIInterfaceOrientation {
var videoOrientation: AVCaptureVideoOrientation {
switch self {
case .portraitUpsideDown: return .portraitUpsideDown
case .landscapeRight: return .landscapeRight
case .landscapeLeft: return .landscapeLeft
case .portrait: return .portrait
default: return .portrait
}
}
}
public class CameraSimplePreviewView: UIView {
func updateOrientation() {
guard self.videoPreviewLayer.connection?.isVideoOrientationSupported == true else {
return
}
let statusBarOrientation: UIInterfaceOrientation
if #available(iOS 13.0, *) {
statusBarOrientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation ?? .portrait
} else {
statusBarOrientation = UIApplication.shared.statusBarOrientation
}
let videoOrientation = statusBarOrientation.videoOrientation
self.videoPreviewLayer.connection?.videoOrientation = videoOrientation
self.videoPreviewLayer.removeAllAnimations()
}
static func lastBackImage() -> UIImage {
let imagePath = NSTemporaryDirectory() + "backCameraImage.jpg"
if let data = try? Data(contentsOf: URL(fileURLWithPath: imagePath)), let image = UIImage(data: data) {
return image
} else {
return UIImage(bundleImageName: "Camera/Placeholder")!
}
}
static func saveLastBackImage(_ image: UIImage) {
let imagePath = NSTemporaryDirectory() + "backCameraImage.jpg"
if let data = image.jpegData(compressionQuality: 0.6) {
try? data.write(to: URL(fileURLWithPath: imagePath))
}
}
static func lastFrontImage() -> UIImage {
let imagePath = NSTemporaryDirectory() + "frontCameraImage.jpg"
if let data = try? Data(contentsOf: URL(fileURLWithPath: imagePath)), let image = UIImage(data: data) {
return image
} else {
return UIImage(bundleImageName: "Camera/SelfiePlaceholder")!
}
}
static func saveLastFrontImage(_ image: UIImage) {
let imagePath = NSTemporaryDirectory() + "frontCameraImage.jpg"
if let data = image.jpegData(compressionQuality: 0.6) {
try? data.write(to: URL(fileURLWithPath: imagePath))
}
}
private var previewingDisposable: Disposable?
private let placeholderView = UIImageView()
public init(frame: CGRect, main: Bool) {
super.init(frame: frame)
self.videoPreviewLayer.videoGravity = main ? .resizeAspectFill : .resizeAspect
self.placeholderView.contentMode = main ? .scaleAspectFill : .scaleAspectFit
self.addSubview(self.placeholderView)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
self.previewingDisposable?.dispose()
}
public override func layoutSubviews() {
super.layoutSubviews()
self.updateOrientation()
self.placeholderView.frame = self.bounds.insetBy(dx: -1.0, dy: -1.0)
}
public func removePlaceholder(delay: Double = 0.0) {
UIView.animate(withDuration: 0.3, delay: delay) {
self.placeholderView.alpha = 0.0
}
}
public func resetPlaceholder(front: Bool) {
self.placeholderView.image = front ? CameraSimplePreviewView.lastFrontImage() : CameraSimplePreviewView.lastBackImage()
self.placeholderView.alpha = 1.0
}
private var _videoPreviewLayer: AVCaptureVideoPreviewLayer?
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
if let layer = self._videoPreviewLayer {
return layer
}
guard let layer = self.layer as? AVCaptureVideoPreviewLayer else {
fatalError()
}
self._videoPreviewLayer = layer
return layer
}
func invalidate() {
self.videoPreviewLayer.session = nil
}
func setSession(_ session: AVCaptureSession, autoConnect: Bool) {
if autoConnect {
self.videoPreviewLayer.session = session
} else {
self.videoPreviewLayer.setSessionWithNoConnection(session)
}
}
public var isEnabled: Bool = true {
didSet {
self.videoPreviewLayer.connection?.isEnabled = self.isEnabled
}
}
public override class var layerClass: AnyClass {
return AVCaptureVideoPreviewLayer.self
}
@available(iOS 13.0, *)
public var isPreviewing: Signal<Bool, NoError> {
return Signal { [weak self] subscriber in
guard let self else {
return EmptyDisposable
}
subscriber.putNext(self.videoPreviewLayer.isPreviewing)
let observer = self.videoPreviewLayer.observe(\.isPreviewing, options: [.new], changeHandler: { view, _ in
subscriber.putNext(view.isPreviewing)
})
return ActionDisposable {
observer.invalidate()
}
}
|> distinctUntilChanged
}
public func cameraPoint(for location: CGPoint) -> CGPoint {
return self.videoPreviewLayer.captureDevicePointConverted(fromLayerPoint: location)
}
}
public class CameraPreviewView: MTKView {
private let queue = DispatchQueue(label: "CameraPreview", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
private let commandQueue: MTLCommandQueue
private var textureCache: CVMetalTextureCache?
private var sampler: MTLSamplerState!
private var renderPipelineState: MTLRenderPipelineState!
private var vertexCoordBuffer: MTLBuffer!
private var texCoordBuffer: MTLBuffer!
private var textureWidth: Int = 0
private var textureHeight: Int = 0
private var textureMirroring = false
private var textureRotation: Rotation = .rotate0Degrees
private var textureTranform: CGAffineTransform?
private var _bounds = CGRectNull
public enum Rotation: Int {
case rotate0Degrees
case rotate90Degrees
case rotate180Degrees
case rotate270Degrees
}
private var _mirroring: Bool?
private var _scheduledMirroring: Bool?
public var mirroring = false {
didSet {
self.queue.sync {
if self._mirroring != nil {
self._scheduledMirroring = self.mirroring
} else {
self._mirroring = self.mirroring
}
}
}
}
private var _rotation: Rotation = .rotate0Degrees
public var rotation: Rotation = .rotate0Degrees {
didSet {
self.queue.sync {
self._rotation = rotation
}
}
}
private var _pixelBuffer: CVPixelBuffer?
var pixelBuffer: CVPixelBuffer? {
didSet {
self.queue.sync {
if let scheduledMirroring = self._scheduledMirroring {
self._scheduledMirroring = nil
self._mirroring = scheduledMirroring
}
self._pixelBuffer = pixelBuffer
}
}
}
public init?(test: Bool) {
let mainBundle = Bundle(for: CameraPreviewView.self)
guard let path = mainBundle.path(forResource: "CameraBundle", ofType: "bundle") else {
return nil
}
guard let bundle = Bundle(path: path) else {
return nil
}
guard let device = MTLCreateSystemDefaultDevice() else {
return nil
}
guard let defaultLibrary = try? device.makeDefaultLibrary(bundle: bundle) else {
return nil
}
guard let commandQueue = device.makeCommandQueue() else {
return nil
}
self.commandQueue = commandQueue
super.init(frame: .zero, device: device)
self.colorPixelFormat = .bgra8Unorm
let pipelineDescriptor = MTLRenderPipelineDescriptor()
pipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
pipelineDescriptor.vertexFunction = defaultLibrary.makeFunction(name: "vertexPassThrough")
pipelineDescriptor.fragmentFunction = defaultLibrary.makeFunction(name: "fragmentPassThrough")
let samplerDescriptor = MTLSamplerDescriptor()
samplerDescriptor.sAddressMode = .clampToEdge
samplerDescriptor.tAddressMode = .clampToEdge
samplerDescriptor.minFilter = .linear
samplerDescriptor.magFilter = .linear
self.sampler = device.makeSamplerState(descriptor: samplerDescriptor)
do {
self.renderPipelineState = try device.makeRenderPipelineState(descriptor: pipelineDescriptor)
} catch {
fatalError("\(error)")
}
self.setupTextureCache()
}
required public init(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func setupTextureCache() {
var newTextureCache: CVMetalTextureCache?
if CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device!, nil, &newTextureCache) == kCVReturnSuccess {
self.textureCache = newTextureCache
} else {
assertionFailure("Unable to allocate texture cache")
}
}
private func setupTransform(width: Int, height: Int, rotation: Rotation, mirroring: Bool) {
var scaleX: Float = 1.0
var scaleY: Float = 1.0
var resizeAspect: Float = 1.0
self._bounds = self.bounds
self.textureWidth = width
self.textureHeight = height
self.textureMirroring = mirroring
self.textureRotation = rotation
if self.textureWidth > 0 && self.textureHeight > 0 {
switch self.textureRotation {
case .rotate0Degrees, .rotate180Degrees:
scaleX = Float(self._bounds.width / CGFloat(self.textureWidth))
scaleY = Float(self._bounds.height / CGFloat(self.textureHeight))
case .rotate90Degrees, .rotate270Degrees:
scaleX = Float(self._bounds.width / CGFloat(self.textureHeight))
scaleY = Float(self._bounds.height / CGFloat(self.textureWidth))
}
}
resizeAspect = min(scaleX, scaleY)
if scaleX < scaleY {
scaleY = scaleX / scaleY
scaleX = 1.0
} else {
scaleX = scaleY / scaleX
scaleY = 1.0
}
if self.textureMirroring {
scaleX *= -1.0
}
let vertexData: [Float] = [
-scaleX, -scaleY, 0.0, 1.0,
scaleX, -scaleY, 0.0, 1.0,
-scaleX, scaleY, 0.0, 1.0,
scaleX, scaleY, 0.0, 1.0
]
self.vertexCoordBuffer = device!.makeBuffer(bytes: vertexData, length: vertexData.count * MemoryLayout<Float>.size, options: [])
var texCoordBufferData: [Float]
switch self.textureRotation {
case .rotate0Degrees:
texCoordBufferData = [
0.0, 1.0,
1.0, 1.0,
0.0, 0.0,
1.0, 0.0
]
case .rotate180Degrees:
texCoordBufferData = [
1.0, 0.0,
0.0, 0.0,
1.0, 1.0,
0.0, 1.0
]
case .rotate90Degrees:
texCoordBufferData = [
1.0, 1.0,
1.0, 0.0,
0.0, 1.0,
0.0, 0.0
]
case .rotate270Degrees:
texCoordBufferData = [
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0
]
}
self.texCoordBuffer = device?.makeBuffer(bytes: texCoordBufferData, length: texCoordBufferData.count * MemoryLayout<Float>.size, options: [])
var transform = CGAffineTransform.identity
if self.textureMirroring {
transform = transform.concatenating(CGAffineTransform(scaleX: -1, y: 1))
transform = transform.concatenating(CGAffineTransform(translationX: CGFloat(self.textureWidth), y: 0))
}
switch self.textureRotation {
case .rotate0Degrees:
transform = transform.concatenating(CGAffineTransform(rotationAngle: CGFloat(0)))
case .rotate180Degrees:
transform = transform.concatenating(CGAffineTransform(rotationAngle: CGFloat(Double.pi)))
transform = transform.concatenating(CGAffineTransform(translationX: CGFloat(self.textureWidth), y: CGFloat(self.textureHeight)))
case .rotate90Degrees:
transform = transform.concatenating(CGAffineTransform(rotationAngle: CGFloat(Double.pi) / 2))
transform = transform.concatenating(CGAffineTransform(translationX: CGFloat(self.textureHeight), y: 0))
case .rotate270Degrees:
transform = transform.concatenating(CGAffineTransform(rotationAngle: 3 * CGFloat(Double.pi) / 2))
transform = transform.concatenating(CGAffineTransform(translationX: 0, y: CGFloat(self.textureWidth)))
}
transform = transform.concatenating(CGAffineTransform(scaleX: CGFloat(resizeAspect), y: CGFloat(resizeAspect)))
let tranformRect = CGRect(origin: .zero, size: CGSize(width: self.textureWidth, height: self.textureHeight)).applying(transform)
let xShift = (self._bounds.size.width - tranformRect.size.width) / 2
let yShift = (self._bounds.size.height - tranformRect.size.height) / 2
transform = transform.concatenating(CGAffineTransform(translationX: xShift, y: yShift))
self.textureTranform = transform.inverted()
}
public override func draw(_ rect: CGRect) {
var pixelBuffer: CVPixelBuffer?
var mirroring = false
var rotation: Rotation = .rotate0Degrees
self.queue.sync {
pixelBuffer = self._pixelBuffer
if let mirroringValue = self._mirroring {
mirroring = mirroringValue
}
rotation = self._rotation
}
guard let drawable = currentDrawable, let currentRenderPassDescriptor = currentRenderPassDescriptor, let previewPixelBuffer = pixelBuffer else {
return
}
let width = CVPixelBufferGetWidth(previewPixelBuffer)
let height = CVPixelBufferGetHeight(previewPixelBuffer)
if self.textureCache == nil {
self.setupTextureCache()
}
var cvTextureOut: CVMetalTexture?
CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault,
textureCache!,
previewPixelBuffer,
nil,
.bgra8Unorm,
width,
height,
0,
&cvTextureOut)
guard let cvTexture = cvTextureOut, let texture = CVMetalTextureGetTexture(cvTexture) else {
CVMetalTextureCacheFlush(self.textureCache!, 0)
return
}
if texture.width != self.textureWidth ||
texture.height != self.textureHeight ||
self.bounds != self._bounds ||
rotation != self.textureRotation ||
mirroring != self.textureMirroring {
self.setupTransform(width: texture.width, height: texture.height, rotation: rotation, mirroring: mirroring)
}
guard let commandBuffer = self.commandQueue.makeCommandBuffer() else {
CVMetalTextureCacheFlush(self.textureCache!, 0)
return
}
guard let commandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: currentRenderPassDescriptor) else {
CVMetalTextureCacheFlush(self.textureCache!, 0)
return
}
commandEncoder.setRenderPipelineState(self.renderPipelineState!)
commandEncoder.setVertexBuffer(self.vertexCoordBuffer, offset: 0, index: 0)
commandEncoder.setVertexBuffer(self.texCoordBuffer, offset: 0, index: 1)
commandEncoder.setFragmentTexture(texture, index: 0)
commandEncoder.setFragmentSamplerState(self.sampler, index: 0)
commandEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
commandEncoder.endEncoding()
commandBuffer.present(drawable)
commandBuffer.commit()
}
var captureDeviceResolution: CGSize = CGSize() {
didSet {
if oldValue.width.isZero, !self.captureDeviceResolution.width.isZero {
Queue.mainQueue().async {
self.setupVisionDrawingLayers()
}
}
}
}
var detectionOverlayLayer: CALayer?
var detectedFaceRectangleShapeLayer: CAShapeLayer?
var detectedFaceLandmarksShapeLayer: CAShapeLayer?
func drawFaceObservations(_ faceObservations: [VNFaceObservation]) {
guard let faceRectangleShapeLayer = self.detectedFaceRectangleShapeLayer,
let faceLandmarksShapeLayer = self.detectedFaceLandmarksShapeLayer
else {
return
}
CATransaction.begin()
CATransaction.setValue(NSNumber(value: true), forKey: kCATransactionDisableActions)
self.detectionOverlayLayer?.isHidden = faceObservations.isEmpty
let faceRectanglePath = CGMutablePath()
let faceLandmarksPath = CGMutablePath()
for faceObservation in faceObservations {
self.addIndicators(to: faceRectanglePath,
faceLandmarksPath: faceLandmarksPath,
for: faceObservation)
}
faceRectangleShapeLayer.path = faceRectanglePath
faceLandmarksShapeLayer.path = faceLandmarksPath
self.updateLayerGeometry()
CATransaction.commit()
}
fileprivate func addPoints(in landmarkRegion: VNFaceLandmarkRegion2D, to path: CGMutablePath, applying affineTransform: CGAffineTransform, closingWhenComplete closePath: Bool) {
let pointCount = landmarkRegion.pointCount
if pointCount > 1 {
let points: [CGPoint] = landmarkRegion.normalizedPoints
path.move(to: points[0], transform: affineTransform)
path.addLines(between: points, transform: affineTransform)
if closePath {
path.addLine(to: points[0], transform: affineTransform)
path.closeSubpath()
}
}
}
fileprivate func addIndicators(to faceRectanglePath: CGMutablePath, faceLandmarksPath: CGMutablePath, for faceObservation: VNFaceObservation) {
let displaySize = self.captureDeviceResolution
let faceBounds = VNImageRectForNormalizedRect(faceObservation.boundingBox, Int(displaySize.width), Int(displaySize.height))
faceRectanglePath.addRect(faceBounds)
if let landmarks = faceObservation.landmarks {
let affineTransform = CGAffineTransform(translationX: faceBounds.origin.x, y: faceBounds.origin.y)
.scaledBy(x: faceBounds.size.width, y: faceBounds.size.height)
let openLandmarkRegions: [VNFaceLandmarkRegion2D?] = [
landmarks.leftEyebrow,
landmarks.rightEyebrow,
landmarks.faceContour,
landmarks.noseCrest,
landmarks.medianLine
]
for openLandmarkRegion in openLandmarkRegions where openLandmarkRegion != nil {
self.addPoints(in: openLandmarkRegion!, to: faceLandmarksPath, applying: affineTransform, closingWhenComplete: false)
}
let closedLandmarkRegions: [VNFaceLandmarkRegion2D?] = [
landmarks.leftEye,
landmarks.rightEye,
landmarks.outerLips,
landmarks.innerLips,
landmarks.nose
]
for closedLandmarkRegion in closedLandmarkRegions where closedLandmarkRegion != nil {
self.addPoints(in: closedLandmarkRegion!, to: faceLandmarksPath, applying: affineTransform, closingWhenComplete: true)
}
}
}
fileprivate func radiansForDegrees(_ degrees: CGFloat) -> CGFloat {
return CGFloat(Double(degrees) * Double.pi / 180.0)
}
fileprivate func updateLayerGeometry() {
guard let overlayLayer = self.detectionOverlayLayer else {
return
}
CATransaction.setValue(NSNumber(value: true), forKey: kCATransactionDisableActions)
let videoPreviewRect = self.bounds
var rotation: CGFloat
var scaleX: CGFloat
var scaleY: CGFloat
// Rotate the layer into screen orientation.
switch UIDevice.current.orientation {
case .portraitUpsideDown:
rotation = 180
scaleX = videoPreviewRect.width / captureDeviceResolution.width
scaleY = videoPreviewRect.height / captureDeviceResolution.height
case .landscapeLeft:
rotation = 90
scaleX = videoPreviewRect.height / captureDeviceResolution.width
scaleY = scaleX
case .landscapeRight:
rotation = -90
scaleX = videoPreviewRect.height / captureDeviceResolution.width
scaleY = scaleX
default:
rotation = 0
scaleX = videoPreviewRect.width / captureDeviceResolution.width
scaleY = videoPreviewRect.height / captureDeviceResolution.height
}
// Scale and mirror the image to ensure upright presentation.
let affineTransform = CGAffineTransform(rotationAngle: radiansForDegrees(rotation))
.scaledBy(x: scaleX, y: -scaleY)
overlayLayer.setAffineTransform(affineTransform)
// Cover entire screen UI.
let rootLayerBounds = self.bounds
overlayLayer.position = CGPoint(x: rootLayerBounds.midX, y: rootLayerBounds.midY)
}
fileprivate func setupVisionDrawingLayers() {
let captureDeviceResolution = self.captureDeviceResolution
let rootLayer = self.layer
let captureDeviceBounds = CGRect(x: 0,
y: 0,
width: captureDeviceResolution.width,
height: captureDeviceResolution.height)
let captureDeviceBoundsCenterPoint = CGPoint(x: captureDeviceBounds.midX,
y: captureDeviceBounds.midY)
let normalizedCenterPoint = CGPoint(x: 0.5, y: 0.5)
let overlayLayer = CALayer()
overlayLayer.name = "DetectionOverlay"
overlayLayer.masksToBounds = true
overlayLayer.anchorPoint = normalizedCenterPoint
overlayLayer.bounds = captureDeviceBounds
overlayLayer.position = CGPoint(x: rootLayer.bounds.midX, y: rootLayer.bounds.midY)
let faceRectangleShapeLayer = CAShapeLayer()
faceRectangleShapeLayer.name = "RectangleOutlineLayer"
faceRectangleShapeLayer.bounds = captureDeviceBounds
faceRectangleShapeLayer.anchorPoint = normalizedCenterPoint
faceRectangleShapeLayer.position = captureDeviceBoundsCenterPoint
faceRectangleShapeLayer.fillColor = nil
faceRectangleShapeLayer.strokeColor = UIColor.green.withAlphaComponent(0.2).cgColor
faceRectangleShapeLayer.lineWidth = 2
let faceLandmarksShapeLayer = CAShapeLayer()
faceLandmarksShapeLayer.name = "FaceLandmarksLayer"
faceLandmarksShapeLayer.bounds = captureDeviceBounds
faceLandmarksShapeLayer.anchorPoint = normalizedCenterPoint
faceLandmarksShapeLayer.position = captureDeviceBoundsCenterPoint
faceLandmarksShapeLayer.fillColor = nil
faceLandmarksShapeLayer.strokeColor = UIColor.white.withAlphaComponent(0.7).cgColor
faceLandmarksShapeLayer.lineWidth = 2
faceLandmarksShapeLayer.shadowOpacity = 0.7
faceLandmarksShapeLayer.shadowRadius = 2
overlayLayer.addSublayer(faceRectangleShapeLayer)
faceRectangleShapeLayer.addSublayer(faceLandmarksShapeLayer)
self.layer.addSublayer(overlayLayer)
self.detectionOverlayLayer = overlayLayer
self.detectedFaceRectangleShapeLayer = faceRectangleShapeLayer
self.detectedFaceLandmarksShapeLayer = faceLandmarksShapeLayer
self.updateLayerGeometry()
}
}

View File

@ -1,4 +1,8 @@
import UIKit
import AVFoundation
import Foundation
import Accelerate
import CoreImage
extension AVFrameRateRange {
func clamp(rate: Float64) -> Float64 {
@ -31,7 +35,6 @@ extension AVCaptureDevice {
}
let diff = frameRates.map { abs($0 - fps) }
if let minElement: Float64 = diff.min() {
for i in 0..<diff.count where diff[i] == minElement {
return (frameRates[i], durations[i])
@ -40,4 +43,315 @@ extension AVCaptureDevice {
return nil
}
var neutralZoomFactor: CGFloat {
if #available(iOS 13.0, *) {
if let indexOfWideAngle = self.constituentDevices.firstIndex(where: { $0.deviceType == .builtInWideAngleCamera }), indexOfWideAngle > 0 {
let zoomFactor = self.virtualDeviceSwitchOverVideoZoomFactors[indexOfWideAngle - 1]
return CGFloat(zoomFactor.doubleValue)
}
}
return 1.0
}
}
extension CMSampleBuffer {
var presentationTimestamp: CMTime {
return CMSampleBufferGetPresentationTimeStamp(self)
}
var type: CMMediaType {
if let formatDescription = CMSampleBufferGetFormatDescription(self) {
return CMFormatDescriptionGetMediaType(formatDescription)
} else {
return kCMMediaType_Video
}
}
}
extension AVCaptureVideoOrientation {
init?(interfaceOrientation: UIInterfaceOrientation) {
switch interfaceOrientation {
case .portrait: self = .portrait
case .portraitUpsideDown: self = .portraitUpsideDown
case .landscapeLeft: self = .landscapeLeft
case .landscapeRight: self = .landscapeRight
default: return nil
}
}
}
extension CameraPreviewView.Rotation {
init?(with interfaceOrientation: UIInterfaceOrientation, videoOrientation: AVCaptureVideoOrientation, cameraPosition: AVCaptureDevice.Position) {
switch videoOrientation {
case .portrait:
switch interfaceOrientation {
case .landscapeRight:
if cameraPosition == .front {
self = .rotate90Degrees
} else {
self = .rotate270Degrees
}
case .landscapeLeft:
if cameraPosition == .front {
self = .rotate270Degrees
} else {
self = .rotate90Degrees
}
case .portrait:
self = .rotate0Degrees
case .portraitUpsideDown:
self = .rotate180Degrees
default: return nil
}
case .portraitUpsideDown:
switch interfaceOrientation {
case .landscapeRight:
if cameraPosition == .front {
self = .rotate270Degrees
} else {
self = .rotate90Degrees
}
case .landscapeLeft:
if cameraPosition == .front {
self = .rotate90Degrees
} else {
self = .rotate270Degrees
}
case .portrait:
self = .rotate180Degrees
case .portraitUpsideDown:
self = .rotate0Degrees
default: return nil
}
case .landscapeRight:
switch interfaceOrientation {
case .landscapeRight:
self = .rotate0Degrees
case .landscapeLeft:
self = .rotate180Degrees
case .portrait:
if cameraPosition == .front {
self = .rotate270Degrees
} else {
self = .rotate90Degrees
}
case .portraitUpsideDown:
if cameraPosition == .front {
self = .rotate90Degrees
} else {
self = .rotate270Degrees
}
default: return nil
}
case .landscapeLeft:
switch interfaceOrientation {
case .landscapeLeft:
self = .rotate0Degrees
case .landscapeRight:
self = .rotate180Degrees
case .portrait:
if cameraPosition == .front {
self = .rotate90Degrees
} else {
self = .rotate270Degrees
}
case .portraitUpsideDown:
if cameraPosition == .front {
self = .rotate270Degrees
} else {
self = .rotate90Degrees
}
default: return nil
}
@unknown default:
fatalError("Unknown orientation.")
}
}
}
func exifOrientationForDeviceOrientation(_ deviceOrientation: UIDeviceOrientation) -> CGImagePropertyOrientation {
switch deviceOrientation {
case .portraitUpsideDown:
return .rightMirrored
case .landscapeLeft:
return .downMirrored
case .landscapeRight:
return .upMirrored
default:
return .leftMirrored
}
}
func resizePixelBuffer(from srcPixelBuffer: CVPixelBuffer,
to dstPixelBuffer: CVPixelBuffer,
cropX: Int,
cropY: Int,
cropWidth: Int,
cropHeight: Int,
scaleWidth: Int,
scaleHeight: Int) {
assert(CVPixelBufferGetWidth(dstPixelBuffer) >= scaleWidth)
assert(CVPixelBufferGetHeight(dstPixelBuffer) >= scaleHeight)
let srcFlags = CVPixelBufferLockFlags.readOnly
let dstFlags = CVPixelBufferLockFlags(rawValue: 0)
guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(srcPixelBuffer, srcFlags) else {
print("Error: could not lock source pixel buffer")
return
}
defer { CVPixelBufferUnlockBaseAddress(srcPixelBuffer, srcFlags) }
guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(dstPixelBuffer, dstFlags) else {
print("Error: could not lock destination pixel buffer")
return
}
defer { CVPixelBufferUnlockBaseAddress(dstPixelBuffer, dstFlags) }
guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer),
let dstData = CVPixelBufferGetBaseAddress(dstPixelBuffer) else {
print("Error: could not get pixel buffer base address")
return
}
let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
let offset = cropY*srcBytesPerRow + cropX*4
var srcBuffer = vImage_Buffer(data: srcData.advanced(by: offset),
height: vImagePixelCount(cropHeight),
width: vImagePixelCount(cropWidth),
rowBytes: srcBytesPerRow)
let dstBytesPerRow = CVPixelBufferGetBytesPerRow(dstPixelBuffer)
var dstBuffer = vImage_Buffer(data: dstData,
height: vImagePixelCount(scaleHeight),
width: vImagePixelCount(scaleWidth),
rowBytes: dstBytesPerRow)
let error = vImageScale_ARGB8888(&srcBuffer, &dstBuffer, nil, vImage_Flags(0))
if error != kvImageNoError {
print("Error:", error)
}
}
func resizePixelBuffer(from srcPixelBuffer: CVPixelBuffer,
to dstPixelBuffer: CVPixelBuffer,
width: Int, height: Int) {
resizePixelBuffer(from: srcPixelBuffer, to: dstPixelBuffer,
cropX: 0, cropY: 0,
cropWidth: CVPixelBufferGetWidth(srcPixelBuffer),
cropHeight: CVPixelBufferGetHeight(srcPixelBuffer),
scaleWidth: width, scaleHeight: height)
}
func resizePixelBuffer(_ pixelBuffer: CVPixelBuffer,
width: Int, height: Int,
output: CVPixelBuffer, context: CIContext) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let sx = CGFloat(width) / CGFloat(CVPixelBufferGetWidth(pixelBuffer))
let sy = CGFloat(height) / CGFloat(CVPixelBufferGetHeight(pixelBuffer))
let scaleTransform = CGAffineTransform(scaleX: sx, y: sy)
let scaledImage = ciImage.transformed(by: scaleTransform)
context.render(scaledImage, to: output)
}
func imageFromCVPixelBuffer(_ pixelBuffer: CVPixelBuffer, orientation: UIImage.Orientation) -> UIImage? {
CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer)
let colorSpace = CGColorSpaceCreateDeviceRGB()
guard let context = CGContext(
data: baseAddress,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: bytesPerRow,
space: colorSpace,
bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue
) else {
CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
return nil
}
guard let cgImage = context.makeImage() else {
CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
return nil
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
return UIImage(cgImage: cgImage, scale: 1.0, orientation: orientation)
}
extension CVPixelBuffer {
func deepCopy() -> CVPixelBuffer? {
let width = CVPixelBufferGetWidth(self)
let height = CVPixelBufferGetHeight(self)
let format = CVPixelBufferGetPixelFormatType(self)
let attributes: [NSObject: AnyObject] = [
kCVPixelBufferCGImageCompatibilityKey: true as AnyObject,
kCVPixelBufferCGBitmapContextCompatibilityKey: true as AnyObject
]
var newPixelBuffer: CVPixelBuffer?
let status = CVPixelBufferCreate(
kCFAllocatorDefault,
width,
height,
format,
attributes as CFDictionary,
&newPixelBuffer
)
guard status == kCVReturnSuccess, let unwrappedPixelBuffer = newPixelBuffer else {
return nil
}
CVPixelBufferLockBaseAddress(self, .readOnly)
CVPixelBufferLockBaseAddress(unwrappedPixelBuffer, [])
guard let sourceBaseAddress = CVPixelBufferGetBaseAddress(self),
let destinationBaseAddress = CVPixelBufferGetBaseAddress(unwrappedPixelBuffer) else {
CVPixelBufferUnlockBaseAddress(self, .readOnly)
CVPixelBufferUnlockBaseAddress(unwrappedPixelBuffer, [])
return nil
}
let sourceBytesPerRow = CVPixelBufferGetBytesPerRow(self)
let destinationBytesPerRow = CVPixelBufferGetBytesPerRow(unwrappedPixelBuffer)
let imageSize = height * min(sourceBytesPerRow, destinationBytesPerRow)
memcpy(destinationBaseAddress, sourceBaseAddress, imageSize)
CVPixelBufferUnlockBaseAddress(self, .readOnly)
CVPixelBufferUnlockBaseAddress(unwrappedPixelBuffer, [])
return unwrappedPixelBuffer
}
}

View File

@ -0,0 +1,110 @@
import Foundation
import AVFoundation
import UIKit
import SwiftSignalKit
public enum PhotoCaptureResult: Equatable {
case began
case finished(UIImage, UIImage?, Double)
case failed
public static func == (lhs: PhotoCaptureResult, rhs: PhotoCaptureResult) -> Bool {
switch lhs {
case .began:
if case .began = rhs {
return true
} else {
return false
}
case .failed:
if case .failed = rhs {
return true
} else {
return false
}
case let .finished(_, _, lhsTime):
if case let .finished(_, _, rhsTime) = rhs, lhsTime == rhsTime {
return true
} else {
return false
}
}
}
}
final class PhotoCaptureContext: NSObject, AVCapturePhotoCaptureDelegate {
private let pipe = ValuePipe<PhotoCaptureResult>()
private let orientation: AVCaptureVideoOrientation
private let mirror: Bool
init(settings: AVCapturePhotoSettings, orientation: AVCaptureVideoOrientation, mirror: Bool) {
self.orientation = orientation
self.mirror = mirror
super.init()
}
func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
self.pipe.putNext(.began)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let _ = error {
self.pipe.putNext(.failed)
} else {
guard let photoPixelBuffer = photo.pixelBuffer else {
print("Error occurred while capturing photo: Missing pixel buffer (\(String(describing: error)))")
return
}
var photoFormatDescription: CMFormatDescription?
CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: photoPixelBuffer, formatDescriptionOut: &photoFormatDescription)
var orientation: UIImage.Orientation = .right
if self.orientation == .landscapeLeft {
orientation = .down
} else if self.orientation == .landscapeRight {
orientation = .up
} else if self.orientation == .portraitUpsideDown {
orientation = .left
}
let finalPixelBuffer = photoPixelBuffer
let ciContext = CIContext()
let renderedCIImage = CIImage(cvImageBuffer: finalPixelBuffer)
if let cgImage = ciContext.createCGImage(renderedCIImage, from: renderedCIImage.extent) {
var image = UIImage(cgImage: cgImage, scale: 1.0, orientation: orientation)
if image.imageOrientation != .up {
UIGraphicsBeginImageContextWithOptions(image.size, true, image.scale)
if self.mirror, let context = UIGraphicsGetCurrentContext() {
context.translateBy(x: image.size.width / 2.0, y: image.size.height / 2.0)
context.scaleBy(x: -1.0, y: 1.0)
context.translateBy(x: -image.size.width / 2.0, y: -image.size.height / 2.0)
}
image.draw(in: CGRect(origin: .zero, size: image.size))
if let currentImage = UIGraphicsGetImageFromCurrentImageContext() {
image = currentImage
}
UIGraphicsEndImageContext()
}
self.pipe.putNext(.finished(image, nil, CACurrentMediaTime()))
} else {
self.pipe.putNext(.failed)
}
}
}
var signal: Signal<PhotoCaptureResult, NoError> {
return self.pipe.signal()
|> take(until: { next in
let complete: Bool
switch next {
case .finished, .failed:
complete = true
default:
complete = false
}
return SignalTakeAction(passthrough: true, complete: complete)
})
}
}

View File

@ -0,0 +1,535 @@
import Foundation
import AVFoundation
import UIKit
import CoreImage
import SwiftSignalKit
import TelegramCore
private extension CMSampleBuffer {
var endTime: CMTime {
let presentationTime = CMSampleBufferGetPresentationTimeStamp(self)
let duration = CMSampleBufferGetDuration(self)
return presentationTime + duration
}
}
private final class VideoRecorderImpl {
public enum RecorderError: LocalizedError {
case generic
case avError(Error)
public var errorDescription: String? {
switch self {
case .generic:
return "Error"
case let .avError(error):
return error.localizedDescription
}
}
}
private let queue = DispatchQueue(label: "VideoRecorder")
private var assetWriter: AVAssetWriter
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
private let imageContext = CIContext()
private var transitionImage: UIImage?
private var savedTransitionImage = false
private var pendingAudioSampleBuffers: [CMSampleBuffer] = []
private var _duration: CMTime = .zero
public var duration: CMTime {
self.queue.sync { _duration }
}
private var lastVideoSampleTime: CMTime = .invalid
private var recordingStartSampleTime: CMTime = .invalid
private var recordingStopSampleTime: CMTime = .invalid
private var positionChangeTimestamps: [(Camera.Position, CMTime)] = []
private let configuration: VideoRecorder.Configuration
private let orientation: AVCaptureVideoOrientation
private let videoTransform: CGAffineTransform
private let url: URL
fileprivate var completion: (Bool, UIImage?, [(Camera.Position, CMTime)]?) -> Void = { _, _, _ in }
private let error = Atomic<Error?>(value: nil)
private var stopped = false
private var hasAllVideoBuffers = false
private var hasAllAudioBuffers = false
public init?(configuration: VideoRecorder.Configuration, orientation: AVCaptureVideoOrientation, fileUrl: URL) {
self.configuration = configuration
var transform: CGAffineTransform = CGAffineTransform(rotationAngle: .pi / 2.0)
if orientation == .landscapeLeft {
transform = CGAffineTransform(rotationAngle: .pi)
} else if orientation == .landscapeRight {
transform = CGAffineTransform(rotationAngle: 0.0)
} else if orientation == .portraitUpsideDown {
transform = CGAffineTransform(rotationAngle: -.pi / 2.0)
}
self.orientation = orientation
self.videoTransform = transform
self.url = fileUrl
try? FileManager.default.removeItem(at: url)
guard let assetWriter = try? AVAssetWriter(url: url, fileType: .mp4) else {
return nil
}
self.assetWriter = assetWriter
self.assetWriter.shouldOptimizeForNetworkUse = false
}
private func hasError() -> Error? {
return self.error.with { $0 }
}
public func start() {
self.queue.async {
self.recordingStartSampleTime = CMTime(seconds: CACurrentMediaTime(), preferredTimescale: CMTimeScale(NSEC_PER_SEC))
}
}
public func markPositionChange(position: Camera.Position, time: CMTime? = nil) {
self.queue.async {
guard self.recordingStartSampleTime.isValid || time != nil else {
return
}
if let time {
self.positionChangeTimestamps.append((position, time))
} else {
let currentTime = CMTime(seconds: CACurrentMediaTime(), preferredTimescale: CMTimeScale(NSEC_PER_SEC))
let delta = currentTime - self.recordingStartSampleTime
self.positionChangeTimestamps.append((position, delta))
}
}
}
public func appendVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
if let _ = self.hasError() {
return
}
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Video else {
return
}
let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
self.queue.async {
guard !self.stopped && self.error.with({ $0 }) == nil else {
return
}
var failed = false
if self.videoInput == nil {
let videoSettings = self.configuration.videoSettings
if self.assetWriter.canApply(outputSettings: videoSettings, forMediaType: .video) {
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings, sourceFormatHint: formatDescription)
videoInput.expectsMediaDataInRealTime = true
videoInput.transform = self.videoTransform
if self.assetWriter.canAdd(videoInput) {
self.assetWriter.add(videoInput)
self.videoInput = videoInput
} else {
failed = true
}
} else {
failed = true
}
}
if failed {
print("error")
return
}
if self.assetWriter.status == .unknown {
if sampleBuffer.presentationTimestamp < self.recordingStartSampleTime {
return
}
if !self.assetWriter.startWriting() {
if let error = self.assetWriter.error {
self.transitionToFailedStatus(error: .avError(error))
return
}
}
self.assetWriter.startSession(atSourceTime: presentationTime)
self.recordingStartSampleTime = presentationTime
self.lastVideoSampleTime = presentationTime
}
if self.assetWriter.status == .writing {
if self.recordingStopSampleTime != .invalid && sampleBuffer.presentationTimestamp > self.recordingStopSampleTime {
self.hasAllVideoBuffers = true
self.maybeFinish()
return
}
if let videoInput = self.videoInput, videoInput.isReadyForMoreMediaData {
if videoInput.append(sampleBuffer) {
self.lastVideoSampleTime = presentationTime
let startTime = self.recordingStartSampleTime
let duration = presentationTime - startTime
self._duration = duration
}
if !self.savedTransitionImage, let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
self.savedTransitionImage = true
Queue.concurrentBackgroundQueue().async {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
if let cgImage = self.imageContext.createCGImage(ciImage, from: ciImage.extent) {
var orientation: UIImage.Orientation = .right
if self.orientation == .landscapeLeft {
orientation = .down
} else if self.orientation == .landscapeRight {
orientation = .up
} else if self.orientation == .portraitUpsideDown {
orientation = .left
}
self.transitionImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: orientation)
} else {
self.savedTransitionImage = false
}
}
}
if !self.tryAppendingPendingAudioBuffers() {
self.transitionToFailedStatus(error: .generic)
}
}
}
}
}
public func appendAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
if let _ = self.hasError() {
return
}
guard let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Audio else {
return
}
self.queue.async {
guard !self.stopped && self.error.with({ $0 }) == nil else {
return
}
var failed = false
if self.audioInput == nil {
var audioSettings = self.configuration.audioSettings
if let currentAudioStreamBasicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription) {
audioSettings[AVSampleRateKey] = currentAudioStreamBasicDescription.pointee.mSampleRate
audioSettings[AVNumberOfChannelsKey] = currentAudioStreamBasicDescription.pointee.mChannelsPerFrame
}
var audioChannelLayoutSize: Int = 0
let currentChannelLayout = CMAudioFormatDescriptionGetChannelLayout(formatDescription, sizeOut: &audioChannelLayoutSize)
let currentChannelLayoutData: Data
if let currentChannelLayout = currentChannelLayout, audioChannelLayoutSize > 0 {
currentChannelLayoutData = Data(bytes: currentChannelLayout, count: audioChannelLayoutSize)
} else {
currentChannelLayoutData = Data()
}
audioSettings[AVChannelLayoutKey] = currentChannelLayoutData
if self.assetWriter.canApply(outputSettings: audioSettings, forMediaType: .audio) {
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings, sourceFormatHint: formatDescription)
audioInput.expectsMediaDataInRealTime = true
if self.assetWriter.canAdd(audioInput) {
self.assetWriter.add(audioInput)
self.audioInput = audioInput
} else {
failed = true
}
} else {
failed = true
}
}
if failed {
print("error")
return
}
if self.assetWriter.status == .writing {
if sampleBuffer.presentationTimestamp < self.recordingStartSampleTime {
return
}
if self.recordingStopSampleTime != .invalid && sampleBuffer.presentationTimestamp > self.recordingStopSampleTime {
self.hasAllAudioBuffers = true
self.maybeFinish()
return
}
var result = false
if self.tryAppendingPendingAudioBuffers() {
if self.tryAppendingAudioSampleBuffer(sampleBuffer) {
result = true
}
}
if !result {
self.transitionToFailedStatus(error: .generic)
}
}
}
}
public func cancelRecording(completion: @escaping () -> Void) {
self.queue.async {
if self.stopped {
DispatchQueue.main.async {
completion()
}
return
}
self.stopped = true
self.pendingAudioSampleBuffers = []
if self.assetWriter.status == .writing {
self.assetWriter.cancelWriting()
}
let fileManager = FileManager()
try? fileManager.removeItem(at: self.url)
DispatchQueue.main.async {
completion()
}
}
}
public var isRecording: Bool {
self.queue.sync { !(self.hasAllVideoBuffers && self.hasAllAudioBuffers) }
}
public func stopRecording() {
self.queue.async {
var stopTime = CMTime(seconds: CACurrentMediaTime(), preferredTimescale: CMTimeScale(NSEC_PER_SEC))
if self.recordingStartSampleTime.isValid {
if (stopTime - self.recordingStartSampleTime).seconds < 1.0 {
stopTime = self.recordingStartSampleTime + CMTime(seconds: 1.0, preferredTimescale: self.recordingStartSampleTime.timescale)
}
}
self.recordingStopSampleTime = stopTime
}
}
public func maybeFinish() {
self.queue.async {
guard self.hasAllVideoBuffers && self.hasAllVideoBuffers else {
return
}
self.stopped = true
self.finish()
}
}
public func finish() {
self.queue.async {
let completion = self.completion
if self.recordingStopSampleTime == .invalid {
DispatchQueue.main.async {
completion(false, nil, nil)
}
return
}
if let _ = self.error.with({ $0 }) {
DispatchQueue.main.async {
completion(false, nil, nil)
}
return
}
if !self.tryAppendingPendingAudioBuffers() {
DispatchQueue.main.async {
completion(false, nil, nil)
}
return
}
if self.assetWriter.status == .writing {
self.assetWriter.finishWriting {
if let _ = self.assetWriter.error {
DispatchQueue.main.async {
completion(false, nil, nil)
}
} else {
DispatchQueue.main.async {
completion(true, self.transitionImage, self.positionChangeTimestamps)
}
}
}
} else if let _ = self.assetWriter.error {
DispatchQueue.main.async {
completion(false, nil, nil)
}
} else {
DispatchQueue.main.async {
completion(false, nil, nil)
}
}
}
}
private func tryAppendingPendingAudioBuffers() -> Bool {
dispatchPrecondition(condition: .onQueue(self.queue))
guard self.pendingAudioSampleBuffers.count > 0 else {
return true
}
var result = true
let (sampleBuffersToAppend, pendingSampleBuffers) = self.pendingAudioSampleBuffers.stableGroup(using: { $0.endTime <= self.lastVideoSampleTime })
for sampleBuffer in sampleBuffersToAppend {
if !self.internalAppendAudioSampleBuffer(sampleBuffer) {
result = false
break
}
}
self.pendingAudioSampleBuffers = pendingSampleBuffers
return result
}
private func tryAppendingAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) -> Bool {
dispatchPrecondition(condition: .onQueue(self.queue))
var result = true
if sampleBuffer.endTime > self.lastVideoSampleTime {
self.pendingAudioSampleBuffers.append(sampleBuffer)
} else {
result = self.internalAppendAudioSampleBuffer(sampleBuffer)
}
return result
}
private func internalAppendAudioSampleBuffer(_ sampleBuffer: CMSampleBuffer) -> Bool {
if let audioInput = self.audioInput, audioInput.isReadyForMoreMediaData {
if !audioInput.append(sampleBuffer) {
if let _ = self.assetWriter.error {
return false
}
}
} else {
}
return true
}
private func transitionToFailedStatus(error: RecorderError) {
let _ = self.error.modify({ _ in return error })
}
}
private extension Sequence {
func stableGroup(using predicate: (Element) throws -> Bool) rethrows -> ([Element], [Element]) {
var trueGroup: [Element] = []
var falseGroup: [Element] = []
for element in self {
if try predicate(element) {
trueGroup.append(element)
} else {
falseGroup.append(element)
}
}
return (trueGroup, falseGroup)
}
}
public final class VideoRecorder {
var duration: Double? {
return self.impl.duration.seconds
}
enum Result {
enum Error {
case generic
}
case success(UIImage?, Double, [(Camera.Position, Double)])
case initError(Error)
case writeError(Error)
case finishError(Error)
}
struct Configuration {
var videoSettings: [String: Any]
var audioSettings: [String: Any]
init(videoSettings: [String: Any], audioSettings: [String: Any]) {
self.videoSettings = videoSettings
self.audioSettings = audioSettings
}
var hasAudio: Bool {
return !self.audioSettings.isEmpty
}
}
private let impl: VideoRecorderImpl
fileprivate let configuration: Configuration
fileprivate let fileUrl: URL
private let completion: (Result) -> Void
public var isRecording: Bool {
return self.impl.isRecording
}
init?(configuration: Configuration, orientation: AVCaptureVideoOrientation, fileUrl: URL, completion: @escaping (Result) -> Void) {
self.configuration = configuration
self.fileUrl = fileUrl
self.completion = completion
guard let impl = VideoRecorderImpl(configuration: configuration, orientation: orientation, fileUrl: fileUrl) else {
completion(.initError(.generic))
return nil
}
self.impl = impl
impl.completion = { [weak self] result, transitionImage, positionChangeTimestamps in
if let self {
let duration = self.duration ?? 0.0
if result {
var timestamps: [(Camera.Position, Double)] = []
if let positionChangeTimestamps {
for (position, time) in positionChangeTimestamps {
timestamps.append((position, time.seconds))
}
}
self.completion(.success(transitionImage, duration, timestamps))
} else {
self.completion(.finishError(.generic))
}
}
}
}
func start() {
self.impl.start()
}
func stop() {
self.impl.stopRecording()
}
func markPositionChange(position: Camera.Position, time: CMTime? = nil) {
self.impl.markPositionChange(position: position, time: time)
}
func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
guard let formatDescriptor = CMSampleBufferGetFormatDescription(sampleBuffer) else {
return
}
let type = CMFormatDescriptionGetMediaType(formatDescriptor)
if type == kCMMediaType_Video {
self.impl.appendVideoSampleBuffer(sampleBuffer)
} else if type == kCMMediaType_Audio {
if self.configuration.hasAudio {
self.impl.appendAudioSampleBuffer(sampleBuffer)
}
}
}
}

View File

@ -1,11 +0,0 @@
#import <UIKit/UIKit.h>
//! Project version number for Camera.
FOUNDATION_EXPORT double CameraVersionNumber;
//! Project version string for Camera.
FOUNDATION_EXPORT const unsigned char CameraVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <Camera/PublicHeader.h>

View File

@ -1,21 +0,0 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Display
import SwiftSignalKit
final class CameraModeNode: ASDisplayNode {
enum Mode {
case photo
case video
case scan
}
override init() {
super.init()
}
func update(mode: Mode, transition: ContainedViewLayoutTransition) {
}
}

View File

@ -1,227 +0,0 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Display
private final class ZoomWheelNodeDrawingState: NSObject {
let transition: CGFloat
let reverse: Bool
init(transition: CGFloat, reverse: Bool) {
self.transition = transition
self.reverse = reverse
super.init()
}
}
final class ZoomWheelNode: ASDisplayNode {
class State: Equatable {
let active: Bool
init(active: Bool) {
self.active = active
}
static func ==(lhs: State, rhs: State) -> Bool {
if lhs.active != rhs.active {
return false
}
return true
}
}
private class TransitionContext {
let startTime: Double
let duration: Double
let previousState: State
init(startTime: Double, duration: Double, previousState: State) {
self.startTime = startTime
self.duration = duration
self.previousState = previousState
}
}
private var animator: ConstantDisplayLinkAnimator?
private var hasState = false
private var state: State = State(active: false)
private var transitionContext: TransitionContext?
override init() {
super.init()
self.isOpaque = false
}
func update(state: State, animated: Bool) {
var animated = animated
if !self.hasState {
self.hasState = true
animated = false
}
if self.state != state {
let previousState = self.state
self.state = state
if animated {
self.transitionContext = TransitionContext(startTime: CACurrentMediaTime(), duration: 0.18, previousState: previousState)
}
self.updateAnimations()
self.setNeedsDisplay()
}
}
private func updateAnimations() {
var animate = false
let timestamp = CACurrentMediaTime()
if let transitionContext = self.transitionContext {
if transitionContext.startTime + transitionContext.duration < timestamp {
self.transitionContext = nil
} else {
animate = true
}
}
if animate {
let animator: ConstantDisplayLinkAnimator
if let current = self.animator {
animator = current
} else {
animator = ConstantDisplayLinkAnimator(update: { [weak self] in
self?.updateAnimations()
})
self.animator = animator
}
animator.isPaused = false
} else {
self.animator?.isPaused = true
}
self.setNeedsDisplay()
}
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
var transitionFraction: CGFloat = self.state.active ? 1.0 : 0.0
var reverse = false
if let transitionContext = self.transitionContext {
let timestamp = CACurrentMediaTime()
var t = CGFloat((timestamp - transitionContext.startTime) / transitionContext.duration)
t = min(1.0, max(0.0, t))
if transitionContext.previousState.active != self.state.active {
transitionFraction = self.state.active ? t : 1.0 - t
reverse = transitionContext.previousState.active
}
}
return ZoomWheelNodeDrawingState(transition: transitionFraction, reverse: reverse)
}
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
let context = UIGraphicsGetCurrentContext()!
if !isRasterizing {
context.setBlendMode(.copy)
context.setFillColor(UIColor.clear.cgColor)
context.fill(bounds)
}
guard let parameters = parameters as? ZoomWheelNodeDrawingState else {
return
}
let color = UIColor(rgb: 0xffffff)
context.setFillColor(color.cgColor)
let clearLineWidth: CGFloat = 4.0
let lineWidth: CGFloat = 1.0 + UIScreenPixel
context.scaleBy(x: 2.5, y: 2.5)
context.translateBy(x: 4.0, y: 3.0)
let _ = try? drawSvgPath(context, path: "M14,8.335 C14.36727,8.335 14.665,8.632731 14.665,9 C14.665,11.903515 12.48064,14.296846 9.665603,14.626311 L9.665,16 C9.665,16.367269 9.367269,16.665 9,16.665 C8.666119,16.665 8.389708,16.418942 8.34221,16.098269 L8.335,16 L8.3354,14.626428 C5.519879,14.297415 3.335,11.90386 3.335,9 C3.335,8.632731 3.632731,8.335 4,8.335 C4.367269,8.335 4.665,8.632731 4.665,9 C4.665,11.394154 6.605846,13.335 9,13.335 C11.39415,13.335 13.335,11.394154 13.335,9 C13.335,8.632731 13.63273,8.335 14,8.335 Z ")
let _ = try? drawSvgPath(context, path: "M9,2.5 C10.38071,2.5 11.5,3.61929 11.5,5 L11.5,9 C11.5,10.380712 10.38071,11.5 9,11.5 C7.619288,11.5 6.5,10.380712 6.5,9 L6.5,5 C6.5,3.61929 7.619288,2.5 9,2.5 Z ")
context.translateBy(x: -4.0, y: -3.0)
if parameters.transition > 0.0 {
let startPoint: CGPoint
let endPoint: CGPoint
let origin = CGPoint(x: 9.0, y: 10.0 - UIScreenPixel)
let length: CGFloat = 17.0
if parameters.reverse {
startPoint = CGPoint(x: origin.x + length * (1.0 - parameters.transition), y: origin.y + length * (1.0 - parameters.transition))
endPoint = CGPoint(x: origin.x + length, y: origin.y + length)
} else {
startPoint = origin
endPoint = CGPoint(x: origin.x + length * parameters.transition, y: origin.y + length * parameters.transition)
}
context.setBlendMode(.clear)
context.setLineWidth(clearLineWidth)
context.move(to: startPoint)
context.addLine(to: endPoint)
context.strokePath()
context.setBlendMode(.normal)
context.setStrokeColor(color.cgColor)
context.setLineWidth(lineWidth)
context.setLineCap(.round)
context.setLineJoin(.round)
context.move(to: startPoint)
context.addLine(to: endPoint)
context.strokePath()
}
}
}
private class ButtonNode: HighlightTrackingButtonNode {
private let backgroundNode: ASDisplayNode
private let textNode: ImmediateTextNode
init() {
self.backgroundNode = ASDisplayNode()
self.textNode = ImmediateTextNode()
super.init()
self.addSubnode(self.backgroundNode)
self.addSubnode(self.textNode)
self.highligthedChanged = { [weak self] highlight in
if let strongSelf = self {
}
}
}
func update() {
}
}
final class CameraZoomNode: ASDisplayNode {
private let wheelNode: ZoomWheelNode
private let backgroundNode: ASDisplayNode
override init() {
self.wheelNode = ZoomWheelNode()
self.backgroundNode = ASDisplayNode()
super.init()
self.addSubnode(self.wheelNode)
}
}

View File

@ -1,8 +1,8 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "MeshAnimationCache",
module_name = "MeshAnimationCache",
name = "ChatContextQuery",
module_name = "ChatContextQuery",
srcs = glob([
"Sources/**/*.swift",
]),
@ -10,11 +10,10 @@ swift_library(
"-warnings-as-errors",
],
deps = [
"//submodules/LottieMeshSwift:LottieMeshSwift",
"//submodules/Postbox:Postbox",
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit",
"//submodules/GZip:GZip",
"//submodules/AppBundle:AppBundle",
"//submodules/TelegramCore:TelegramCore",
"//submodules/TextFormat:TextFormat",
"//submodules/AccountContext:AccountContext",
],
visibility = [
"//visibility:public",

View File

@ -0,0 +1,241 @@
import Foundation
import SwiftSignalKit
import TextFormat
import TelegramCore
import AccountContext
public struct PossibleContextQueryTypes: OptionSet {
public var rawValue: Int32
public init() {
self.rawValue = 0
}
public init(rawValue: Int32) {
self.rawValue = rawValue
}
public static let emoji = PossibleContextQueryTypes(rawValue: (1 << 0))
public static let hashtag = PossibleContextQueryTypes(rawValue: (1 << 1))
public static let mention = PossibleContextQueryTypes(rawValue: (1 << 2))
public static let command = PossibleContextQueryTypes(rawValue: (1 << 3))
public static let contextRequest = PossibleContextQueryTypes(rawValue: (1 << 4))
public static let emojiSearch = PossibleContextQueryTypes(rawValue: (1 << 5))
}
private func scalarCanPrependQueryControl(_ c: UnicodeScalar?) -> Bool {
if let c = c {
if c == " " || c == "\n" || c == "." || c == "," {
return true
}
return false
} else {
return true
}
}
private func makeScalar(_ c: Character) -> Character {
return c
}
private let spaceScalar = " " as UnicodeScalar
private let newlineScalar = "\n" as UnicodeScalar
private let hashScalar = "#" as UnicodeScalar
private let atScalar = "@" as UnicodeScalar
private let slashScalar = "/" as UnicodeScalar
private let colonScalar = ":" as UnicodeScalar
private let alphanumerics = CharacterSet.alphanumerics
public func textInputStateContextQueryRangeAndType(_ inputState: ChatTextInputState) -> [(NSRange, PossibleContextQueryTypes, NSRange?)] {
return textInputStateContextQueryRangeAndType(inputText: inputState.inputText, selectionRange: inputState.selectionRange)
}
public func textInputStateContextQueryRangeAndType(inputText: NSAttributedString, selectionRange: Range<Int>) -> [(NSRange, PossibleContextQueryTypes, NSRange?)] {
if selectionRange.count != 0 {
return []
}
let inputString: NSString = inputText.string as NSString
var results: [(NSRange, PossibleContextQueryTypes, NSRange?)] = []
let inputLength = inputString.length
if inputLength != 0 {
if inputString.hasPrefix("@") && inputLength != 1 {
let startIndex = 1
var index = startIndex
var contextAddressRange: NSRange?
while true {
if index == inputLength {
break
}
if let c = UnicodeScalar(inputString.character(at: index)) {
if c == " " {
if index != startIndex {
contextAddressRange = NSRange(location: startIndex, length: index - startIndex)
index += 1
}
break
} else {
if !((c >= "a" && c <= "z") || (c >= "A" && c <= "Z") || (c >= "0" && c <= "9") || c == "_") {
break
}
}
if index == inputLength {
break
} else {
index += 1
}
} else {
index += 1
}
}
if let contextAddressRange = contextAddressRange {
results.append((contextAddressRange, [.contextRequest], NSRange(location: index, length: inputLength - index)))
}
}
let maxIndex = min(selectionRange.lowerBound, inputLength)
if maxIndex == 0 {
return results
}
var index = maxIndex - 1
var possibleQueryRange: NSRange?
let string = (inputString as String)
let trimmedString = string.trimmingTrailingSpaces()
if string.count < 3, trimmedString.isSingleEmoji {
if inputText.attribute(ChatTextInputAttributes.customEmoji, at: 0, effectiveRange: nil) == nil {
return [(NSRange(location: 0, length: inputString.length - (string.count - trimmedString.count)), [.emoji], nil)]
}
} else {
/*let activeString = inputText.attributedSubstring(from: NSRange(location: 0, length: inputState.selectionRange.upperBound))
if let lastCharacter = activeString.string.last, String(lastCharacter).isSingleEmoji {
let matchLength = (String(lastCharacter) as NSString).length
if activeString.attribute(ChatTextInputAttributes.customEmoji, at: activeString.length - matchLength, effectiveRange: nil) == nil {
return [(NSRange(location: inputState.selectionRange.upperBound - matchLength, length: matchLength), [.emojiSearch], nil)]
}
}*/
}
var possibleTypes = PossibleContextQueryTypes([.command, .mention, .hashtag, .emojiSearch])
var definedType = false
while true {
var previousC: UnicodeScalar?
if index != 0 {
previousC = UnicodeScalar(inputString.character(at: index - 1))
}
if let c = UnicodeScalar(inputString.character(at: index)) {
if c == spaceScalar || c == newlineScalar {
possibleTypes = []
} else if c == hashScalar {
if scalarCanPrependQueryControl(previousC) {
possibleTypes = possibleTypes.intersection([.hashtag])
definedType = true
index += 1
possibleQueryRange = NSRange(location: index, length: maxIndex - index)
}
break
} else if c == atScalar {
if scalarCanPrependQueryControl(previousC) {
possibleTypes = possibleTypes.intersection([.mention])
definedType = true
index += 1
possibleQueryRange = NSRange(location: index, length: maxIndex - index)
}
break
} else if c == slashScalar {
if scalarCanPrependQueryControl(previousC) {
possibleTypes = possibleTypes.intersection([.command])
definedType = true
index += 1
possibleQueryRange = NSRange(location: index, length: maxIndex - index)
}
break
} else if c == colonScalar {
if scalarCanPrependQueryControl(previousC) {
possibleTypes = possibleTypes.intersection([.emojiSearch])
definedType = true
index += 1
possibleQueryRange = NSRange(location: index, length: maxIndex - index)
}
break
}
}
if index == 0 {
break
} else {
index -= 1
possibleQueryRange = NSRange(location: index, length: maxIndex - index)
}
}
if let possibleQueryRange = possibleQueryRange, definedType && !possibleTypes.isEmpty {
results.append((possibleQueryRange, possibleTypes, nil))
}
}
return results
}
public enum ChatPresentationInputQueryKind: Int32 {
case emoji
case hashtag
case mention
case command
case contextRequest
case emojiSearch
}
public struct ChatInputQueryMentionTypes: OptionSet, Hashable {
public var rawValue: Int32
public init(rawValue: Int32) {
self.rawValue = rawValue
}
public static let contextBots = ChatInputQueryMentionTypes(rawValue: 1 << 0)
public static let members = ChatInputQueryMentionTypes(rawValue: 1 << 1)
public static let accountPeer = ChatInputQueryMentionTypes(rawValue: 1 << 2)
}
public enum ChatPresentationInputQuery: Hashable, Equatable {
case emoji(String)
case hashtag(String)
case mention(query: String, types: ChatInputQueryMentionTypes)
case command(String)
case emojiSearch(query: String, languageCode: String, range: NSRange)
case contextRequest(addressName: String, query: String)
public var kind: ChatPresentationInputQueryKind {
switch self {
case .emoji:
return .emoji
case .hashtag:
return .hashtag
case .mention:
return .mention
case .command:
return .command
case .contextRequest:
return .contextRequest
case .emojiSearch:
return .emojiSearch
}
}
}
public enum ChatContextQueryError {
case generic
case inlineBotLocationRequest(EnginePeer.Id)
}
public enum ChatContextQueryUpdate {
case remove
case update(ChatPresentationInputQuery, Signal<(ChatPresentationInputQueryResult?) -> ChatPresentationInputQueryResult?, ChatContextQueryError>)
}

View File

@ -3,7 +3,6 @@ import AsyncDisplayKit
import Display
import TelegramCore
import SwiftSignalKit
import Postbox
import TelegramPresentationData
import AccountContext
import PresentationDataUtils
@ -17,6 +16,26 @@ import ConfettiEffect
import TelegramUniversalVideoContent
import SolidRoundedButtonNode
private func fileSize(_ path: String, useTotalFileAllocatedSize: Bool = false) -> Int64? {
if useTotalFileAllocatedSize {
let url = URL(fileURLWithPath: path)
if let values = (try? url.resourceValues(forKeys: Set([.isRegularFileKey, .totalFileAllocatedSizeKey]))) {
if values.isRegularFile ?? false {
if let fileSize = values.totalFileAllocatedSize {
return Int64(fileSize)
}
}
}
}
var value = stat()
if stat(path, &value) == 0 {
return value.st_size
} else {
return nil
}
}
private final class ProgressEstimator {
private var averageProgressPerSecond: Double = 0.0
private var lastMeasurement: (Double, Float)?
@ -91,7 +110,7 @@ private final class ImportManager {
return self.statePromise.get()
}
init(account: Account, peerId: PeerId, mainFile: TempBoxFile, archivePath: String?, entries: [(SSZipEntry, String, TelegramEngine.HistoryImport.MediaType)]) {
init(account: Account, peerId: EnginePeer.Id, mainFile: EngineTempBox.File, archivePath: String?, entries: [(SSZipEntry, String, TelegramEngine.HistoryImport.MediaType)]) {
self.account = account
self.archivePath = archivePath
self.entries = entries
@ -234,8 +253,8 @@ private final class ImportManager {
Logger.shared.log("ChatImportScreen", "updateState take pending entry \(entry.1)")
let unpackedFile = Signal<TempBoxFile, ImportError> { subscriber in
let tempFile = TempBox.shared.tempFile(fileName: entry.0.path)
let unpackedFile = Signal<EngineTempBox.File, ImportError> { subscriber in
let tempFile = EngineTempBox.shared.tempFile(fileName: entry.0.path)
Logger.shared.log("ChatImportScreen", "Extracting \(entry.0.path) to \(tempFile.path)...")
let startTime = CACurrentMediaTime()
if SSZipArchive.extractFileFromArchive(atPath: archivePath, filePath: entry.0.path, toPath: tempFile.path) {
@ -440,9 +459,9 @@ public final class ChatImportActivityScreen: ViewController {
if let path = getAppBundle().path(forResource: "BlankVideo", ofType: "m4v"), let size = fileSize(path) {
let decoration = ChatBubbleVideoDecoration(corners: ImageCorners(), nativeSize: CGSize(width: 100.0, height: 100.0), contentMode: .aspectFit, backgroundColor: .black)
let dummyFile = TelegramMediaFile(fileId: MediaId(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [])])
let dummyFile = TelegramMediaFile(fileId: EngineMedia.Id(namespace: 0, id: 1), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: path, randomId: 12345), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "video/mp4", size: size, attributes: [.Video(duration: 1, size: PixelDimensions(width: 100, height: 100), flags: [], preloadSize: nil)])
let videoContent = NativeVideoContent(id: .message(1, MediaId(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil)
let videoContent = NativeVideoContent(id: .message(1, EngineMedia.Id(namespace: 0, id: 1)), userLocation: .other, fileReference: .standalone(media: dummyFile), streamVideo: .none, loopVideo: true, enableSound: false, fetchAutomatically: true, onlyFullSizeThumbnail: false, continuePlayingWithoutSoundOnLostAudioSession: false, placeholderColor: .black, storeAfterDownload: nil)
let videoNode = UniversalVideoNode(postbox: context.account.postbox, audioSession: context.sharedContext.mediaManager.audioSession, manager: context.sharedContext.mediaManager.universalVideoManager, decoration: decoration, content: videoContent, priority: .embedded)
videoNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 2.0, height: 2.0))
@ -724,9 +743,9 @@ public final class ChatImportActivityScreen: ViewController {
private let context: AccountContext
private var presentationData: PresentationData
fileprivate let cancel: () -> Void
fileprivate var peerId: PeerId
fileprivate var peerId: EnginePeer.Id
private let archivePath: String?
private let mainEntry: TempBoxFile
private let mainEntry: EngineTempBox.File
private let totalBytes: Int64
private let totalMediaBytes: Int64
private let otherEntries: [(SSZipEntry, String, TelegramEngine.HistoryImport.MediaType)]
@ -746,7 +765,7 @@ public final class ChatImportActivityScreen: ViewController {
}
}
public init(context: AccountContext, cancel: @escaping () -> Void, peerId: PeerId, archivePath: String?, mainEntry: TempBoxFile, otherEntries: [(SSZipEntry, String, TelegramEngine.HistoryImport.MediaType)]) {
public init(context: AccountContext, cancel: @escaping () -> Void, peerId: EnginePeer.Id, archivePath: String?, mainEntry: EngineTempBox.File, otherEntries: [(SSZipEntry, String, TelegramEngine.HistoryImport.MediaType)]) {
self.context = context
self.cancel = cancel
self.peerId = peerId
@ -818,7 +837,7 @@ public final class ChatImportActivityScreen: ViewController {
self.progressEstimator = ProgressEstimator()
self.beganCompletion = false
let resolvedPeerId: Signal<PeerId, ImportManager.ImportError>
let resolvedPeerId: Signal<EnginePeer.Id, ImportManager.ImportError>
if self.peerId.namespace == Namespaces.Peer.CloudGroup {
resolvedPeerId = self.context.engine.peers.convertGroupToSupergroup(peerId: self.peerId)
|> mapError { _ -> ImportManager.ImportError in

View File

@ -30,6 +30,7 @@ public enum ChatListSearchItemHeaderType {
case downloading
case recentDownloads
case topics
case text(String, AnyHashable)
fileprivate func title(strings: PresentationStrings) -> String {
switch self {
@ -87,6 +88,8 @@ public enum ChatListSearchItemHeaderType {
return strings.DownloadList_DownloadedHeader
case .topics:
return strings.DialogList_SearchSectionTopics
case let .text(text, _):
return text
}
}
@ -146,11 +149,13 @@ public enum ChatListSearchItemHeaderType {
return .recentDownloads
case .topics:
return .topics
case let .text(_, id):
return .text(id)
}
}
}
private enum ChatListSearchItemHeaderId: Int32 {
private enum ChatListSearchItemHeaderId: Hashable {
case localPeers
case members
case contacts
@ -181,6 +186,7 @@ private enum ChatListSearchItemHeaderId: Int32 {
case downloading
case recentDownloads
case topics
case text(AnyHashable)
}
public final class ChatListSearchItemHeader: ListViewItemHeader {
@ -197,7 +203,7 @@ public final class ChatListSearchItemHeader: ListViewItemHeader {
public init(type: ChatListSearchItemHeaderType, theme: PresentationTheme, strings: PresentationStrings, actionTitle: String? = nil, action: (() -> Void)? = nil) {
self.type = type
self.id = ListViewItemNode.HeaderId(space: 0, id: Int64(self.type.id.rawValue))
self.id = ListViewItemNode.HeaderId(space: 0, id: Int64(self.type.id.hashValue))
self.theme = theme
self.strings = strings
self.actionTitle = actionTitle

View File

@ -10,7 +10,6 @@ import HorizontalPeerItem
import ListSectionHeaderNode
import ContextUI
import AccountContext
import Postbox
private func calculateItemCustomWidth(width: CGFloat) -> CGFloat {
let itemInsets = UIEdgeInsets(top: 0.0, left: 6.0, bottom: 0.0, right: 6.0)
@ -160,17 +159,19 @@ public final class ChatListSearchRecentPeersNode: ASDisplayNode {
return .single(([], [:], [:]))
case let .peers(peers):
return combineLatest(queue: .mainQueue(),
peers.filter {
!$0.isDeleted
}.map {
context.account.postbox.peerView(id: $0.id)
}
peers.filter {
!$0.isDeleted
}.map {
context.account.postbox.peerView(id: $0.id)
}
)
|> mapToSignal { peerViews -> Signal<([EnginePeer], [EnginePeer.Id: (Int32, Bool)], [EnginePeer.Id: EnginePeer.Presence]), NoError> in
return context.account.postbox.unreadMessageCountsView(items: peerViews.map { item -> UnreadMessageCountsItem in
return UnreadMessageCountsItem.peer(id: item.peerId, handleThreads: true)
})
|> map { values in
return context.engine.data.subscribe(
EngineDataMap(peerViews.map { item in
return TelegramEngine.EngineData.Item.Messages.PeerUnreadCount(id: item.peerId)
})
)
|> map { unreadCounts in
var peers: [EnginePeer] = []
var unread: [EnginePeer.Id: (Int32, Bool)] = [:]
var presences: [EnginePeer.Id: EnginePeer.Presence] = [:]
@ -186,9 +187,9 @@ public final class ChatListSearchRecentPeersNode: ASDisplayNode {
}
}
let unreadCount = values.count(for: .peer(id: peerView.peerId, handleThreads: true))
if let unreadCount = unreadCount, unreadCount > 0 {
unread[peerView.peerId] = (unreadCount, isMuted)
let unreadCount = unreadCounts[peerView.peerId]
if let unreadCount, unreadCount > 0 {
unread[peerView.peerId] = (Int32(unreadCount), isMuted)
}
if let presence = peerView.peerPresences[peer.id] {

View File

@ -93,6 +93,12 @@ swift_library(
"//submodules/ItemListUI",
"//submodules/QrCodeUI",
"//submodules/TelegramUI/Components/ActionPanelComponent",
"//submodules/TelegramUI/Components/Stories/StoryContainerScreen",
"//submodules/TelegramUI/Components/Stories/StoryPeerListComponent",
"//submodules/TelegramUI/Components/FullScreenEffectView",
"//submodules/TelegramUI/Components/Stories/AvatarStoryIndicatorComponent",
"//submodules/TelegramUI/Components/PeerInfo/PeerInfoStoryGridScreen",
"//submodules/TelegramUI/Components/Settings/ArchiveInfoScreen",
],
visibility = [
"//visibility:public",

View File

@ -723,6 +723,20 @@ func chatForumTopicMenuItems(context: AccountContext, peerId: PeerId, threadId:
return context.engine.peers.updatePeerDisplayPreviewsSetting(peerId: peerId, threadId: threadId, displayPreviews: displayPreviews) |> deliverOnMainQueue
}
let updatePeerStoriesMuted: (PeerId, PeerStoryNotificationSettings.Mute) -> Signal<Void, NoError> = {
peerId, mute in
return context.engine.peers.updatePeerStoriesMutedSetting(peerId: peerId, mute: mute) |> deliverOnMainQueue
}
let updatePeerStoriesHideSender: (PeerId, PeerStoryNotificationSettings.HideSender) -> Signal<Void, NoError> = {
peerId, hideSender in
return context.engine.peers.updatePeerStoriesHideSenderSetting(peerId: peerId, hideSender: hideSender) |> deliverOnMainQueue
}
let updatePeerStorySound: (PeerId, PeerMessageSound) -> Signal<Void, NoError> = { peerId, sound in
return context.engine.peers.updatePeerStorySoundInteractive(peerId: peerId, sound: sound) |> deliverOnMainQueue
}
let defaultSound: PeerMessageSound
if case .broadcast = channel.info {
@ -733,7 +747,7 @@ func chatForumTopicMenuItems(context: AccountContext, peerId: PeerId, threadId:
let canRemove = false
let exceptionController = notificationPeerExceptionController(context: context, updatedPresentationData: nil, peer: channel, threadId: threadId, canRemove: canRemove, defaultSound: defaultSound, edit: true, updatePeerSound: { peerId, sound in
let exceptionController = notificationPeerExceptionController(context: context, updatedPresentationData: nil, peer: .channel(channel), threadId: threadId, isStories: nil, canRemove: canRemove, defaultSound: defaultSound, defaultStoriesSound: defaultSound, edit: true, updatePeerSound: { peerId, sound in
let _ = (updatePeerSound(peerId, sound)
|> deliverOnMainQueue).start(next: { _ in
})
@ -756,6 +770,15 @@ func chatForumTopicMenuItems(context: AccountContext, peerId: PeerId, threadId:
|> deliverOnMainQueue).start(next: { _ in
})
}, updatePeerStoriesMuted: { peerId, mute in
let _ = (updatePeerStoriesMuted(peerId, mute)
|> deliverOnMainQueue).start()
}, updatePeerStoriesHideSender: { peerId, hideSender in
let _ = (updatePeerStoriesHideSender(peerId, hideSender)
|> deliverOnMainQueue).start()
}, updatePeerStorySound: { peerId, sound in
let _ = (updatePeerStorySound(peerId, sound)
|> deliverOnMainQueue).start()
}, removePeerFromExceptions: {
}, modifiedPeer: {
})

View File

@ -45,8 +45,12 @@ public class ChatListAdditionalCategoryItem: ItemListItem, ListViewItemWithHeade
self.action = action
switch appearance {
case .option:
self.header = ChatListSearchItemHeader(type: .chatTypes, theme: presentationData.theme, strings: presentationData.strings, actionTitle: nil, action: nil)
case let .option(sectionTitle):
if let sectionTitle {
self.header = ChatListSearchItemHeader(type: .text(sectionTitle, AnyHashable(0)), theme: presentationData.theme, strings: presentationData.strings, actionTitle: nil, action: nil)
} else {
self.header = ChatListSearchItemHeader(type: .chatTypes, theme: presentationData.theme, strings: presentationData.strings, actionTitle: nil, action: nil)
}
case .action:
self.header = header
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -9,15 +9,27 @@ import AppBundle
import SolidRoundedButtonNode
import ActivityIndicator
import AccountContext
import TelegramCore
import ComponentFlow
import ArchiveInfoScreen
import ComponentDisplayAdapters
import SwiftSignalKit
import ChatListHeaderComponent
final class ChatListEmptyNode: ASDisplayNode {
enum Subject {
case chats(hasArchive: Bool)
case archive
case filter(showEdit: Bool)
case forum(hasGeneral: Bool)
}
private let action: () -> Void
private let secondaryAction: () -> Void
private let openArchiveSettings: () -> Void
private let context: AccountContext
private var theme: PresentationTheme
private var strings: PresentationStrings
let subject: Subject
private(set) var isLoading: Bool
@ -28,14 +40,25 @@ final class ChatListEmptyNode: ASDisplayNode {
private let secondaryButtonNode: HighlightableButtonNode
private let activityIndicator: ActivityIndicator
private var emptyArchive: ComponentView<Empty>?
private var animationSize: CGSize = CGSize()
private var buttonIsHidden: Bool
private var validLayout: CGSize?
private var validLayout: (size: CGSize, insets: UIEdgeInsets)?
private var scrollingOffset: (navigationHeight: CGFloat, offset: CGFloat)?
init(context: AccountContext, subject: Subject, isLoading: Bool, theme: PresentationTheme, strings: PresentationStrings, action: @escaping () -> Void, secondaryAction: @escaping () -> Void) {
private var globalPrivacySettings: GlobalPrivacySettings = .default
private var archiveSettingsDisposable: Disposable?
init(context: AccountContext, subject: Subject, isLoading: Bool, theme: PresentationTheme, strings: PresentationStrings, action: @escaping () -> Void, secondaryAction: @escaping () -> Void, openArchiveSettings: @escaping () -> Void) {
self.context = context
self.theme = theme
self.strings = strings
self.action = action
self.secondaryAction = secondaryAction
self.openArchiveSettings = openArchiveSettings
self.subject = subject
self.isLoading = isLoading
@ -80,16 +103,20 @@ final class ChatListEmptyNode: ASDisplayNode {
super.init()
self.addSubnode(self.animationNode)
self.addSubnode(self.textNode)
self.addSubnode(self.descriptionNode)
self.addSubnode(self.buttonNode)
self.addSubnode(self.secondaryButtonNode)
self.addSubnode(self.activityIndicator)
self.animationNode.setup(source: AnimatedStickerNodeLocalFileSource(name: animationName), width: 248, height: 248, playbackMode: .once, mode: .direct(cachePathPrefix: nil))
self.animationSize = CGSize(width: 124.0, height: 124.0)
self.animationNode.visibility = true
if case .archive = subject {
} else {
self.addSubnode(self.animationNode)
self.addSubnode(self.textNode)
self.addSubnode(self.descriptionNode)
self.addSubnode(self.buttonNode)
self.addSubnode(self.secondaryButtonNode)
self.addSubnode(self.activityIndicator)
self.animationNode.setup(source: AnimatedStickerNodeLocalFileSource(name: animationName), width: 248, height: 248, playbackMode: .once, mode: .direct(cachePathPrefix: nil))
self.animationNode.visibility = true
}
self.animationNode.isHidden = self.isLoading
self.textNode.isHidden = self.isLoading
@ -107,6 +134,27 @@ final class ChatListEmptyNode: ASDisplayNode {
self.updateThemeAndStrings(theme: theme, strings: strings)
self.animationNode.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.animationTapGesture(_:))))
if case .archive = subject {
let _ = self.context.engine.privacy.updateGlobalPrivacySettings().start()
self.archiveSettingsDisposable = (context.engine.data.subscribe(
TelegramEngine.EngineData.Item.Configuration.GlobalPrivacy()
)
|> deliverOnMainQueue).start(next: { [weak self] settings in
guard let self else {
return
}
self.globalPrivacySettings = settings
if let (size, insets) = self.validLayout {
self.updateLayout(size: size, insets: insets, transition: .immediate)
}
})
}
}
deinit {
self.archiveSettingsDisposable?.dispose()
}
@objc private func buttonPressed() {
@ -130,13 +178,19 @@ final class ChatListEmptyNode: ASDisplayNode {
}
func updateThemeAndStrings(theme: PresentationTheme, strings: PresentationStrings) {
self.theme = theme
self.strings = strings
let text: String
var descriptionText = ""
let buttonText: String
let buttonText: String?
switch self.subject {
case let .chats(hasArchive):
text = hasArchive ? strings.ChatList_EmptyChatListWithArchive : strings.ChatList_EmptyChatList
buttonText = strings.ChatList_EmptyChatListNewMessage
case .archive:
text = strings.ChatList_EmptyChatList
buttonText = nil
case .filter:
text = strings.ChatList_EmptyChatListFilterTitle
descriptionText = strings.ChatList_EmptyChatListFilterText
@ -152,12 +206,21 @@ final class ChatListEmptyNode: ASDisplayNode {
self.textNode.attributedText = string
self.descriptionNode.attributedText = descriptionString
self.buttonNode.title = buttonText
if let buttonText {
self.buttonNode.title = buttonText
self.buttonNode.isHidden = false
} else {
self.buttonNode.isHidden = true
}
self.activityIndicator.type = .custom(theme.list.itemAccentColor, 22.0, 1.0, false)
if let size = self.validLayout {
self.updateLayout(size: size, transition: .immediate)
if let (size, insets) = self.validLayout {
self.updateLayout(size: size, insets: insets, transition: .immediate)
if let scrollingOffset = self.scrollingOffset {
self.updateScrollingOffset(navigationHeight: scrollingOffset.navigationHeight, offset: scrollingOffset.offset, transition: .immediate)
}
}
}
@ -173,17 +236,17 @@ final class ChatListEmptyNode: ASDisplayNode {
self.activityIndicator.isHidden = !self.isLoading
}
func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) {
self.validLayout = size
func updateLayout(size: CGSize, insets: UIEdgeInsets, transition: ContainedViewLayoutTransition) {
self.validLayout = (size, insets)
let indicatorSize = self.activityIndicator.measure(CGSize(width: 100.0, height: 100.0))
transition.updateFrame(node: self.activityIndicator, frame: CGRect(origin: CGPoint(x: floor((size.width - indicatorSize.width) / 2.0), y: floor((size.height - indicatorSize.height - 50.0) / 2.0)), size: indicatorSize))
transition.updateFrame(node: self.activityIndicator, frame: CGRect(origin: CGPoint(x: floor((size.width - indicatorSize.width) / 2.0), y: insets.top + floor((size.height - insets.top - insets.bottom - indicatorSize.height - 50.0) / 2.0)), size: indicatorSize))
let animationSpacing: CGFloat = 24.0
let descriptionSpacing: CGFloat = 8.0
let textSize = self.textNode.updateLayout(CGSize(width: size.width - 40.0, height: size.height))
let descriptionSize = self.descriptionNode.updateLayout(CGSize(width: size.width - 40.0, height: size.height))
let textSize = self.textNode.updateLayout(CGSize(width: size.width - 40.0, height: size.height - insets.top - insets.bottom))
let descriptionSize = self.descriptionNode.updateLayout(CGSize(width: size.width - 40.0, height: size.height - insets.top - insets.bottom))
let buttonSideInset: CGFloat = 32.0
let buttonWidth = min(270.0, size.width - buttonSideInset * 2.0)
@ -199,7 +262,7 @@ final class ChatListEmptyNode: ASDisplayNode {
let contentHeight = self.animationSize.height + animationSpacing + textSize.height + buttonSize.height
var contentOffset: CGFloat = 0.0
if size.height < contentHeight + threshold {
if size.height - insets.top - insets.bottom < contentHeight + threshold {
contentOffset = -self.animationSize.height - animationSpacing + 44.0
transition.updateAlpha(node: self.animationNode, alpha: 0.0)
} else {
@ -207,7 +270,7 @@ final class ChatListEmptyNode: ASDisplayNode {
transition.updateAlpha(node: self.animationNode, alpha: 1.0)
}
let animationFrame = CGRect(origin: CGPoint(x: floor((size.width - self.animationSize.width) / 2.0), y: floor((size.height - contentHeight) / 2.0) + contentOffset), size: self.animationSize)
let animationFrame = CGRect(origin: CGPoint(x: floor((size.width - self.animationSize.width) / 2.0), y: insets.top + floor((size.height - insets.top - insets.bottom - contentHeight) / 2.0) + contentOffset), size: self.animationSize)
let textFrame = CGRect(origin: CGPoint(x: floor((size.width - textSize.width) / 2.0), y: animationFrame.maxY + animationSpacing), size: textSize)
let descriptionFrame = CGRect(origin: CGPoint(x: floor((size.width - descriptionSize.width) / 2.0), y: textFrame.maxY + descriptionSpacing), size: descriptionSize)
@ -221,7 +284,7 @@ final class ChatListEmptyNode: ASDisplayNode {
var bottomInset: CGFloat = 16.0
let secondaryButtonFrame = CGRect(origin: CGPoint(x: floor((size.width - secondaryButtonSize.width) / 2.0), y: size.height - secondaryButtonSize.height - bottomInset), size: secondaryButtonSize)
let secondaryButtonFrame = CGRect(origin: CGPoint(x: floor((size.width - secondaryButtonSize.width) / 2.0), y: size.height - insets.bottom - secondaryButtonSize.height - bottomInset), size: secondaryButtonSize)
transition.updateFrame(node: self.secondaryButtonNode, frame: secondaryButtonFrame)
if secondaryButtonSize.height > 0.0 {
@ -232,11 +295,68 @@ final class ChatListEmptyNode: ASDisplayNode {
if case .forum = self.subject {
buttonFrame = CGRect(origin: CGPoint(x: floor((size.width - buttonSize.width) / 2.0), y: descriptionFrame.maxY + 20.0), size: buttonSize)
} else {
buttonFrame = CGRect(origin: CGPoint(x: floor((size.width - buttonSize.width) / 2.0), y: size.height - buttonHeight - bottomInset), size: buttonSize)
buttonFrame = CGRect(origin: CGPoint(x: floor((size.width - buttonSize.width) / 2.0), y: size.height - insets.bottom - buttonHeight - bottomInset), size: buttonSize)
}
transition.updateFrame(node: self.buttonNode, frame: buttonFrame)
}
func updateScrollingOffset(navigationHeight: CGFloat, offset: CGFloat, transition: ContainedViewLayoutTransition) {
self.scrollingOffset = (navigationHeight, offset)
guard let (size, _) = self.validLayout else {
return
}
if case .archive = self.subject {
let emptyArchive: ComponentView<Empty>
if let current = self.emptyArchive {
emptyArchive = current
} else {
emptyArchive = ComponentView()
self.emptyArchive = emptyArchive
}
let emptyArchiveSize = emptyArchive.update(
transition: Transition(transition),
component: AnyComponent(ArchiveInfoContentComponent(
theme: self.theme,
strings: self.strings,
settings: self.globalPrivacySettings,
openSettings: { [weak self] in
guard let self else {
return
}
self.openArchiveSettings()
}
)),
environment: {
},
containerSize: CGSize(width: size.width, height: 10000.0)
)
if let emptyArchiveView = emptyArchive.view {
if emptyArchiveView.superview == nil {
self.view.addSubview(emptyArchiveView)
}
let cancelledOutHeight: CGFloat = max(0.0, ChatListNavigationBar.searchScrollHeight - offset)
let visibleNavigationHeight: CGFloat = navigationHeight - ChatListNavigationBar.searchScrollHeight + cancelledOutHeight
let additionalOffset = min(0.0, -offset + ChatListNavigationBar.searchScrollHeight)
var archiveFrame = CGRect(origin: CGPoint(x: 0.0, y: visibleNavigationHeight + floorToScreenPixels((size.height - visibleNavigationHeight - emptyArchiveSize.height - 50.0) * 0.5)), size: emptyArchiveSize)
archiveFrame.origin.y = max(archiveFrame.origin.y, visibleNavigationHeight + 20.0)
if size.height - visibleNavigationHeight - emptyArchiveSize.height - 20.0 < 0.0 {
archiveFrame.origin.y += additionalOffset
}
transition.updateFrame(view: emptyArchiveView, frame: archiveFrame)
}
} else if let emptyArchive = self.emptyArchive {
self.emptyArchive = nil
emptyArchive.view?.removeFromSuperview()
}
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
if self.buttonNode.frame.contains(point) {
return self.buttonNode.view.hitTest(self.view.convert(point, to: self.buttonNode.view), with: event)
@ -244,6 +364,11 @@ final class ChatListEmptyNode: ASDisplayNode {
if self.secondaryButtonNode.frame.contains(point), !self.secondaryButtonNode.isHidden {
return self.secondaryButtonNode.view.hitTest(self.view.convert(point, to: self.secondaryButtonNode.view), with: event)
}
if let emptyArchiveView = self.emptyArchive?.view {
if let result = emptyArchiveView.hitTest(self.view.convert(point, to: emptyArchiveView), with: event) {
return result
}
}
return nil
}
}

View File

@ -2,7 +2,6 @@ import Foundation
import UIKit
import Display
import SwiftSignalKit
import Postbox
import TelegramCore
import TelegramPresentationData
import PresentationDataUtils
@ -30,8 +29,8 @@ private final class ChatListFilterPresetControllerArguments {
let updateState: ((ChatListFilterPresetControllerState) -> ChatListFilterPresetControllerState) -> Void
let openAddIncludePeer: () -> Void
let openAddExcludePeer: () -> Void
let deleteIncludePeer: (PeerId) -> Void
let deleteExcludePeer: (PeerId) -> Void
let deleteIncludePeer: (EnginePeer.Id) -> Void
let deleteExcludePeer: (EnginePeer.Id) -> Void
let setItemIdWithRevealedOptions: (ChatListFilterRevealedItemId?, ChatListFilterRevealedItemId?) -> Void
let deleteIncludeCategory: (ChatListFilterIncludeCategory) -> Void
let deleteExcludeCategory: (ChatListFilterExcludeCategory) -> Void
@ -49,8 +48,8 @@ private final class ChatListFilterPresetControllerArguments {
updateState: @escaping ((ChatListFilterPresetControllerState) -> ChatListFilterPresetControllerState) -> Void,
openAddIncludePeer: @escaping () -> Void,
openAddExcludePeer: @escaping () -> Void,
deleteIncludePeer: @escaping (PeerId) -> Void,
deleteExcludePeer: @escaping (PeerId) -> Void,
deleteIncludePeer: @escaping (EnginePeer.Id) -> Void,
deleteExcludePeer: @escaping (EnginePeer.Id) -> Void,
setItemIdWithRevealedOptions: @escaping (ChatListFilterRevealedItemId?, ChatListFilterRevealedItemId?) -> Void,
deleteIncludeCategory: @escaping (ChatListFilterIncludeCategory) -> Void,
deleteExcludeCategory: @escaping (ChatListFilterExcludeCategory) -> Void,
@ -93,7 +92,7 @@ private enum ChatListFilterPresetControllerSection: Int32 {
private enum ChatListFilterPresetEntryStableId: Hashable {
case index(Int)
case peer(PeerId)
case peer(EnginePeer.Id)
case includePeerInfo
case excludePeerInfo
case includeCategory(ChatListFilterIncludeCategory)
@ -311,7 +310,7 @@ private extension ChatListFilterCategoryIcon {
}
private enum ChatListFilterRevealedItemId: Equatable {
case peer(PeerId)
case peer(EnginePeer.Id)
case includeCategory(ChatListFilterIncludeCategory)
case excludeCategory(ChatListFilterExcludeCategory)
}
@ -573,8 +572,8 @@ private struct ChatListFilterPresetControllerState: Equatable {
var excludeMuted: Bool
var excludeRead: Bool
var excludeArchived: Bool
var additionallyIncludePeers: [PeerId]
var additionallyExcludePeers: [PeerId]
var additionallyIncludePeers: [EnginePeer.Id]
var additionallyExcludePeers: [EnginePeer.Id]
var revealedItemId: ChatListFilterRevealedItemId?
var expandedSections: Set<FilterSection>
@ -825,7 +824,7 @@ private func internalChatListFilterAddChatsController(context: AccountContext, f
return
}
var includePeers: [PeerId] = []
var includePeers: [EnginePeer.Id] = []
for peerId in peerIds {
switch peerId {
case let .peer(id):
@ -838,7 +837,7 @@ private func internalChatListFilterAddChatsController(context: AccountContext, f
if filter.id > 1, case let .filter(_, _, _, data) = filter, data.hasSharedLinks {
let newPeers = includePeers.filter({ !(filter.data?.includePeers.peers.contains($0) ?? false) })
var removedPeers: [PeerId] = []
var removedPeers: [EnginePeer.Id] = []
if let data = filter.data {
removedPeers = data.includePeers.peers.filter({ !includePeers.contains($0) })
}
@ -951,7 +950,7 @@ private func internalChatListFilterExcludeChatsController(context: AccountContex
return
}
var excludePeers: [PeerId] = []
var excludePeers: [EnginePeer.Id] = []
for peerId in peerIds {
switch peerId {
case let .peer(id):
@ -1144,7 +1143,7 @@ func chatListFilterPresetController(context: AccountContext, currentPreset initi
sharedLinks.set(Signal<[ExportedChatFolderLink]?, NoError>.single(nil) |> then(context.engine.peers.getExportedChatFolderLinks(id: initialPreset.id)))
}
let currentPeers = Atomic<[PeerId: EngineRenderedPeer]>(value: [:])
let currentPeers = Atomic<[EnginePeer.Id: EngineRenderedPeer]>(value: [:])
let stateWithPeers = statePromise.get()
|> mapToSignal { state -> Signal<(ChatListFilterPresetControllerState, [EngineRenderedPeer], [EngineRenderedPeer]), NoError> in
let currentPeersValue = currentPeers.with { $0 }

View File

@ -2,7 +2,6 @@ import Foundation
import UIKit
import Display
import SwiftSignalKit
import Postbox
import TelegramCore
import TelegramPresentationData
import TelegramUIPreferences
@ -39,7 +38,7 @@ private enum ChatListFilterPresetListSection: Int32 {
case list
}
private func stringForUserCount(_ peers: [PeerId: SelectivePrivacyPeer], strings: PresentationStrings) -> String {
private func stringForUserCount(_ peers: [EnginePeer.Id: SelectivePrivacyPeer], strings: PresentationStrings) -> String {
if peers.isEmpty {
return strings.PrivacyLastSeenSettings_EmpryUsersPlaceholder
} else {
@ -197,7 +196,7 @@ private func filtersWithAppliedOrder(filters: [(ChatListFilter, Int)], order: [I
return sortedFilters
}
private func chatListFilterPresetListControllerEntries(presentationData: PresentationData, state: ChatListFilterPresetListControllerState, filters: [(ChatListFilter, Int)], updatedFilterOrder: [Int32]?, suggestedFilters: [ChatListFeaturedFilter], settings: ChatListFilterSettings, isPremium: Bool, limits: EngineConfiguration.UserLimits, premiumLimits: EngineConfiguration.UserLimits) -> [ChatListFilterPresetListEntry] {
private func chatListFilterPresetListControllerEntries(presentationData: PresentationData, state: ChatListFilterPresetListControllerState, filters: [(ChatListFilter, Int)], updatedFilterOrder: [Int32]?, suggestedFilters: [ChatListFeaturedFilter], isPremium: Bool, limits: EngineConfiguration.UserLimits, premiumLimits: EngineConfiguration.UserLimits) -> [ChatListFilterPresetListEntry] {
var entries: [ChatListFilterPresetListEntry] = []
entries.append(.screenHeader(presentationData.strings.ChatListFolderSettings_Info))
@ -522,7 +521,6 @@ public func chatListFilterPresetListController(context: AccountContext, mode: Ch
let limits = allLimits.0
let premiumLimits = allLimits.1
let filterSettings = preferences.values[ApplicationSpecificPreferencesKeys.chatListFilterSettings]?.get(ChatListFilterSettings.self) ?? ChatListFilterSettings.default
let leftNavigationButton: ItemListNavigationButton?
switch mode {
case .default:
@ -590,7 +588,7 @@ public func chatListFilterPresetListController(context: AccountContext, mode: Ch
}
let controllerState = ItemListControllerState(presentationData: ItemListPresentationData(presentationData), title: .text(presentationData.strings.ChatListFolderSettings_Title), leftNavigationButton: leftNavigationButton, rightNavigationButton: rightNavigationButton, backNavigationButton: ItemListBackButton(title: presentationData.strings.Common_Back), animateChanges: false)
let listState = ItemListNodeState(presentationData: ItemListPresentationData(presentationData), entries: chatListFilterPresetListControllerEntries(presentationData: presentationData, state: state, filters: filtersWithCountsValue, updatedFilterOrder: updatedFilterOrderValue, suggestedFilters: suggestedFilters, settings: filterSettings, isPremium: isPremium, limits: limits, premiumLimits: premiumLimits), style: .blocks, animateChanges: true)
let listState = ItemListNodeState(presentationData: ItemListPresentationData(presentationData), entries: chatListFilterPresetListControllerEntries(presentationData: presentationData, state: state, filters: filtersWithCountsValue, updatedFilterOrder: updatedFilterOrderValue, suggestedFilters: suggestedFilters, isPremium: isPremium, limits: limits, premiumLimits: premiumLimits), style: .blocks, animateChanges: true)
return (controllerState, (listState, arguments))
}

View File

@ -3,7 +3,6 @@ import UIKit
import Display
import AsyncDisplayKit
import SwiftSignalKit
import Postbox
import TelegramCore
import TelegramPresentationData
import ItemListUI

View File

@ -2,7 +2,6 @@ import Foundation
import UIKit
import AsyncDisplayKit
import Display
import Postbox
import TelegramCore
import TelegramPresentationData
@ -85,6 +84,7 @@ private final class ItemNode: ASDisplayNode {
private var isDisabled: Bool = false
private var theme: PresentationTheme?
private var currentTitle: (String, String)?
private var pointerInteraction: PointerInteraction?
@ -198,16 +198,34 @@ private final class ItemNode: ASDisplayNode {
self.isEditing = isEditing
self.isDisabled = isDisabled
var themeUpdated = false
if self.theme !== presentationData.theme {
self.theme = presentationData.theme
self.badgeBackgroundActiveNode.image = generateStretchableFilledCircleImage(diameter: 18.0, color: presentationData.theme.chatList.unreadBadgeActiveBackgroundColor)
self.badgeBackgroundInactiveNode.image = generateStretchableFilledCircleImage(diameter: 18.0, color: presentationData.theme.chatList.unreadBadgeInactiveBackgroundColor)
themeUpdated = true
}
var titleUpdated = false
if self.currentTitle?.0 != title || self.currentTitle?.1 != shortTitle {
self.currentTitle = (title, shortTitle)
titleUpdated = true
}
var unreadCountUpdated = false
if self.unreadCount != unreadCount {
unreadCountUpdated = true
self.unreadCount = unreadCount
}
self.buttonNode.accessibilityLabel = title
if unreadCount > 0 {
self.buttonNode.accessibilityValue = strings.VoiceOver_Chat_UnreadMessages(Int32(unreadCount))
if self.buttonNode.accessibilityValue == nil || unreadCountUpdated {
self.buttonNode.accessibilityValue = strings.VoiceOver_Chat_UnreadMessages(Int32(unreadCount))
}
} else {
self.buttonNode.accessibilityValue = ""
}
@ -253,14 +271,19 @@ private final class ItemNode: ASDisplayNode {
transition.updateAlpha(node: self.shortTitleNode, alpha: deselectionAlpha)
transition.updateAlpha(node: self.shortTitleActiveNode, alpha: selectionAlpha)
self.titleNode.attributedText = NSAttributedString(string: title, font: Font.medium(14.0), textColor: presentationData.theme.list.itemSecondaryTextColor)
self.titleActiveNode.attributedText = NSAttributedString(string: title, font: Font.medium(14.0), textColor: presentationData.theme.list.itemAccentColor)
self.shortTitleNode.attributedText = NSAttributedString(string: shortTitle, font: Font.medium(14.0), textColor: presentationData.theme.list.itemSecondaryTextColor)
self.shortTitleActiveNode.attributedText = NSAttributedString(string: shortTitle, font: Font.medium(14.0), textColor: presentationData.theme.list.itemAccentColor)
if themeUpdated || titleUpdated {
self.titleNode.attributedText = NSAttributedString(string: title, font: Font.medium(14.0), textColor: presentationData.theme.list.itemSecondaryTextColor)
self.titleActiveNode.attributedText = NSAttributedString(string: title, font: Font.medium(14.0), textColor: presentationData.theme.list.itemAccentColor)
self.shortTitleNode.attributedText = NSAttributedString(string: shortTitle, font: Font.medium(14.0), textColor: presentationData.theme.list.itemSecondaryTextColor)
self.shortTitleActiveNode.attributedText = NSAttributedString(string: shortTitle, font: Font.medium(14.0), textColor: presentationData.theme.list.itemAccentColor)
}
if unreadCount != 0 {
self.badgeTextNode.attributedText = NSAttributedString(string: "\(unreadCount)", font: Font.regular(14.0), textColor: presentationData.theme.list.itemCheckColors.foregroundColor)
let badgeSelectionFraction: CGFloat = unreadHasUnmuted ? 1.0 : selectionFraction
if themeUpdated || unreadCountUpdated || self.badgeTextNode.attributedText == nil {
self.badgeTextNode.attributedText = NSAttributedString(string: "\(unreadCount)", font: Font.regular(14.0), textColor: presentationData.theme.list.itemCheckColors.foregroundColor)
}
let badgeSelectionFraction: CGFloat = unreadHasUnmuted ? 1.0 : selectionFraction
let badgeSelectionAlpha: CGFloat = badgeSelectionFraction
//let badgeDeselectionAlpha: CGFloat = 1.0 - badgeSelectionFraction

View File

@ -1326,7 +1326,7 @@ public final class ChatListSearchContainerNode: SearchDisplayControllerContentNo
if !entities.isEmpty {
attributes.append(TextEntitiesMessageAttribute(entities: entities))
}
result.append(.message(text: text.string, attributes: attributes, inlineStickers: [:], mediaReference: nil, replyToMessageId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: []))
result.append(.message(text: text.string, attributes: attributes, inlineStickers: [:], mediaReference: nil, replyToMessageId: nil, replyToStoryId: nil, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: []))
}
}
}
@ -1364,12 +1364,12 @@ public final class ChatListSearchContainerNode: SearchDisplayControllerContentNo
|> deliverOnMainQueue).start())
}
})
if let secretPeer = peer as? TelegramSecretChat {
if case let .secretChat(secretPeer) = peer {
if let peer = peerMap[secretPeer.regularPeerId] {
displayPeers.append(EnginePeer(peer))
displayPeers.append(peer)
}
} else {
displayPeers.append(EnginePeer(peer))
displayPeers.append(peer)
}
}

View File

@ -797,7 +797,8 @@ public enum ChatListSearchEntry: Comparable, Identifiable {
hasFailedMessages: false,
forumTopicData: nil,
topForumTopicItems: [],
autoremoveTimeout: nil
autoremoveTimeout: nil,
storyState: nil
)), editing: false, hasActiveRevealControls: false, selected: false, header: tagMask == nil ? header : nil, enableContextActions: false, hiddenOffset: false, interaction: interaction)
}
case let .addContact(phoneNumber, theme, strings):
@ -2068,7 +2069,7 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
interaction.dismissInput()
}, present: { c, a in
interaction.present(c, a)
}, transitionNode: { messageId, media in
}, transitionNode: { messageId, media, _ in
return transitionNodeImpl?(messageId, EngineMedia(media))
}, addToTransitionSurface: { view in
addToTransitionSurfaceImpl?(view)
@ -2166,6 +2167,7 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
}, openPremiumIntro: {
}, openChatFolderUpdates: {
}, hideChatFolderUpdates: {
}, openStories: { _, _ in
})
chatListInteraction.isSearchMode = true
@ -2202,12 +2204,12 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
interaction.dismissInput()
}, present: { c, a in
interaction.present(c, a)
}, transitionNode: { messageId, media in
}, transitionNode: { messageId, media, _ in
var transitionNode: (ASDisplayNode, CGRect, () -> (UIView?, UIView?))?
if let strongSelf = self {
strongSelf.listNode.forEachItemNode { itemNode in
if let itemNode = itemNode as? ListMessageNode {
if let result = itemNode.transitionNode(id: messageId, media: media) {
if let result = itemNode.transitionNode(id: messageId, media: media, adjustRect: false) {
transitionNode = result
}
}
@ -3029,7 +3031,7 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
var transitionNode: (ASDisplayNode, CGRect, () -> (UIView?, UIView?))?
self.listNode.forEachItemNode { itemNode in
if let itemNode = itemNode as? ListMessageNode {
if let result = itemNode.transitionNode(id: messageId, media: media._asMedia()) {
if let result = itemNode.transitionNode(id: messageId, media: media._asMedia(), adjustRect: false) {
transitionNode = result
}
}
@ -3242,8 +3244,8 @@ final class ChatListSearchListPaneNode: ASDisplayNode, ChatListSearchPaneNode {
switch item.content {
case let .peer(peerData):
return (selectedItemNode.view, bounds, peerData.messages.last?.id ?? peerData.peer.peerId)
case let .groupReference(groupId, _, _, _, _):
return (selectedItemNode.view, bounds, groupId)
case let .groupReference(groupReference):
return (selectedItemNode.view, bounds, groupReference.groupId)
}
}
return nil
@ -3366,13 +3368,13 @@ private final class ShimmerEffectNode: ASDisplayNode {
}
}
private final class ChatListSearchShimmerNode: ASDisplayNode {
public final class ChatListSearchShimmerNode: ASDisplayNode {
private let backgroundColorNode: ASDisplayNode
private let effectNode: ShimmerEffectNode
private let maskNode: ASImageNode
private var currentParams: (size: CGSize, presentationData: PresentationData, key: ChatListSearchPaneKey)?
init(key: ChatListSearchPaneKey) {
public init(key: ChatListSearchPaneKey) {
self.backgroundColorNode = ASDisplayNode()
self.effectNode = ShimmerEffectNode()
self.maskNode = ASImageNode()
@ -3386,13 +3388,13 @@ private final class ChatListSearchShimmerNode: ASDisplayNode {
self.addSubnode(self.maskNode)
}
func update(context: AccountContext, size: CGSize, presentationData: PresentationData, animationCache: AnimationCache, animationRenderer: MultiAnimationRenderer, key: ChatListSearchPaneKey, hasSelection: Bool, transition: ContainedViewLayoutTransition) {
public func update(context: AccountContext, size: CGSize, presentationData: PresentationData, animationCache: AnimationCache, animationRenderer: MultiAnimationRenderer, key: ChatListSearchPaneKey, hasSelection: Bool, transition: ContainedViewLayoutTransition) {
if self.currentParams?.size != size || self.currentParams?.presentationData !== presentationData || self.currentParams?.key != key {
self.currentParams = (size, presentationData, key)
let chatListPresentationData = ChatListPresentationData(theme: presentationData.theme, fontSize: presentationData.chatFontSize, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat, nameSortOrder: presentationData.nameSortOrder, nameDisplayOrder: presentationData.nameDisplayOrder, disableAnimations: true)
let peer1: EnginePeer = .user(TelegramUser(id: EnginePeer.Id(namespace: Namespaces.Peer.CloudUser, id: EnginePeer.Id.Id._internalFromInt64Value(0)), accessHash: nil, firstName: "FirstName", lastName: nil, username: nil, phone: nil, photo: [], botInfo: nil, restrictionInfo: nil, flags: [], emojiStatus: nil, usernames: []))
let peer1: EnginePeer = .user(TelegramUser(id: EnginePeer.Id(namespace: Namespaces.Peer.CloudUser, id: EnginePeer.Id.Id._internalFromInt64Value(0)), accessHash: nil, firstName: "FirstName", lastName: nil, username: nil, phone: nil, photo: [], botInfo: nil, restrictionInfo: nil, flags: [], emojiStatus: nil, usernames: [], storiesHidden: nil))
let timestamp1: Int32 = 100000
var peers: [EnginePeer.Id: EnginePeer] = [:]
peers[peer1.id] = peer1
@ -3400,6 +3402,7 @@ private final class ChatListSearchShimmerNode: ASDisplayNode {
}, messageSelected: { _, _, _, _ in}, groupSelected: { _ in }, addContact: { _ in }, setPeerIdWithRevealedOptions: { _, _ in }, setItemPinned: { _, _ in }, setPeerMuted: { _, _ in }, setPeerThreadMuted: { _, _, _ in }, deletePeer: { _, _ in }, deletePeerThread: { _, _ in }, setPeerThreadStopped: { _, _, _ in }, setPeerThreadPinned: { _, _, _ in }, setPeerThreadHidden: { _, _, _ in }, updatePeerGrouping: { _, _ in }, togglePeerMarkedUnread: { _, _ in}, toggleArchivedFolderHiddenByDefault: {}, toggleThreadsSelection: { _, _ in }, hidePsa: { _ in }, activateChatPreview: { _, _, _, gesture, _ in
gesture?.cancel()
}, present: { _ in }, openForumThread: { _, _ in }, openStorageManagement: {}, openPasswordSetup: {}, openPremiumIntro: {}, openChatFolderUpdates: {}, hideChatFolderUpdates: {
}, openStories: { _, _ in
})
var isInlineMode = false
if case .topics = key {
@ -3433,7 +3436,8 @@ private final class ChatListSearchShimmerNode: ASDisplayNode {
associatedMessages: [:],
associatedMessageIds: [],
associatedMedia: [:],
associatedThreadInfo: nil
associatedThreadInfo: nil,
associatedStories: [:]
)
let readState = EnginePeerReadCounters()
return ChatListItem(presentationData: chatListPresentationData, context: context, chatListLocation: .chatList(groupId: .root), filterData: nil, index: .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: 0, messageIndex: EngineMessage.Index(id: EngineMessage.Id(peerId: peer1.id, namespace: 0, id: 0), timestamp: timestamp1))), content: .peer(ChatListItemContent.PeerData(
@ -3453,13 +3457,14 @@ private final class ChatListSearchShimmerNode: ASDisplayNode {
hasFailedMessages: false,
forumTopicData: nil,
topForumTopicItems: [],
autoremoveTimeout: nil
autoremoveTimeout: nil,
storyState: nil
)), editing: false, hasActiveRevealControls: false, selected: false, header: nil, enableContextActions: false, hiddenOffset: false, interaction: interaction)
case .media:
return nil
case .links:
var media: [EngineMedia] = []
media.append(.webpage(TelegramMediaWebpage(webpageId: EngineMedia.Id(namespace: 0, id: 0), content: .Loaded(TelegramMediaWebpageLoadedContent(url: "https://telegram.org", displayUrl: "https://telegram.org", hash: 0, type: nil, websiteName: "Telegram", title: "Telegram Telegram", text: "Telegram", embedUrl: nil, embedType: nil, embedSize: nil, duration: nil, author: nil, image: nil, file: nil, attributes: [], instantPage: nil)))))
media.append(.webpage(TelegramMediaWebpage(webpageId: EngineMedia.Id(namespace: 0, id: 0), content: .Loaded(TelegramMediaWebpageLoadedContent(url: "https://telegram.org", displayUrl: "https://telegram.org", hash: 0, type: nil, websiteName: "Telegram", title: "Telegram Telegram", text: "Telegram", embedUrl: nil, embedType: nil, embedSize: nil, duration: nil, author: nil, image: nil, file: nil, story: nil, attributes: [], instantPage: nil)))))
let message = EngineMessage(
stableId: 0,
stableVersion: 0,
@ -3482,7 +3487,8 @@ private final class ChatListSearchShimmerNode: ASDisplayNode {
associatedMessages: [:],
associatedMessageIds: [],
associatedMedia: [:],
associatedThreadInfo: nil
associatedThreadInfo: nil,
associatedStories: [:]
)
return ListMessageItem(presentationData: ChatPresentationData(presentationData: presentationData), context: context, chatLocation: .peer(id: peer1.id), interaction: ListMessageItemInteraction.default, message: message._asMessage(), selection: hasSelection ? .selectable(selected: false) : .none, displayHeader: false, customHeader: nil, hintIsLink: true, isGlobalSearchResult: true)
@ -3511,7 +3517,8 @@ private final class ChatListSearchShimmerNode: ASDisplayNode {
associatedMessages: [:],
associatedMessageIds: [],
associatedMedia: [:],
associatedThreadInfo: nil
associatedThreadInfo: nil,
associatedStories: [:]
)
return ListMessageItem(presentationData: ChatPresentationData(presentationData: presentationData), context: context, chatLocation: .peer(id: peer1.id), interaction: ListMessageItemInteraction.default, message: message._asMessage(), selection: hasSelection ? .selectable(selected: false) : .none, displayHeader: false, customHeader: nil, hintIsLink: false, isGlobalSearchResult: true)
@ -3540,7 +3547,8 @@ private final class ChatListSearchShimmerNode: ASDisplayNode {
associatedMessages: [:],
associatedMessageIds: [],
associatedMedia: [:],
associatedThreadInfo: nil
associatedThreadInfo: nil,
associatedStories: [:]
)
return ListMessageItem(presentationData: ChatPresentationData(presentationData: presentationData), context: context, chatLocation: .peer(id: peer1.id), interaction: ListMessageItemInteraction.default, message: message._asMessage(), selection: hasSelection ? .selectable(selected: false) : .none, displayHeader: false, customHeader: nil, hintIsLink: false, isGlobalSearchResult: true)
@ -3569,7 +3577,8 @@ private final class ChatListSearchShimmerNode: ASDisplayNode {
associatedMessages: [:],
associatedMessageIds: [],
associatedMedia: [:],
associatedThreadInfo: nil
associatedThreadInfo: nil,
associatedStories: [:]
)
return ListMessageItem(presentationData: ChatPresentationData(presentationData: presentationData), context: context, chatLocation: .peer(id: peer1.id), interaction: ListMessageItemInteraction.default, message: message._asMessage(), selection: hasSelection ? .selectable(selected: false) : .none, displayHeader: false, customHeader: nil, hintIsLink: false, isGlobalSearchResult: true)

View File

@ -257,7 +257,7 @@ private final class VisualMediaItemNode: ASDisplayNode {
})
if let duration = file.duration {
let durationString = stringForDuration(duration)
let durationString = stringForDuration(Int32(duration))
var badgeContent: ChatMessageInteractiveMediaBadgeContent?
var mediaDownloadState: ChatMessageInteractiveMediaDownloadState?

View File

@ -1,7 +1,6 @@
import Foundation
import UIKit
import SwiftSignalKit
import Postbox
import TelegramCore
import AccountContext
@ -15,7 +14,7 @@ struct ChatListSelectionOptions: Equatable {
let delete: Bool
}
func chatListSelectionOptions(context: AccountContext, peerIds: Set<PeerId>, filterId: Int32?) -> Signal<ChatListSelectionOptions, NoError> {
func chatListSelectionOptions(context: AccountContext, peerIds: Set<EnginePeer.Id>, filterId: Int32?) -> Signal<ChatListSelectionOptions, NoError> {
if peerIds.isEmpty {
if let filterId = filterId {
return chatListFilterItems(context: context)
@ -58,7 +57,7 @@ func chatListSelectionOptions(context: AccountContext, peerIds: Set<PeerId>, fil
}
func forumSelectionOptions(context: AccountContext, peerId: PeerId, threadIds: Set<Int64>) -> Signal<ChatListSelectionOptions, NoError> {
func forumSelectionOptions(context: AccountContext, peerId: EnginePeer.Id, threadIds: Set<Int64>) -> Signal<ChatListSelectionOptions, NoError> {
return context.engine.data.get(
TelegramEngine.EngineData.Item.Peer.Peer(id: peerId),
EngineDataList(threadIds.map { TelegramEngine.EngineData.Item.Peer.ThreadData(id: peerId, threadId: $0) })

View File

@ -1,7 +1,6 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Postbox
import Display
import SwiftSignalKit
import TelegramPresentationData

View File

@ -1,7 +1,6 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Postbox
import Display
import SwiftSignalKit
import TelegramPresentationData

View File

@ -1,7 +1,6 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Postbox
import Display
import SwiftSignalKit
import TelegramPresentationData

View File

@ -1,7 +1,6 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Postbox
import Display
import SwiftSignalKit
import TelegramPresentationData

View File

@ -64,6 +64,19 @@ public enum ChatListItemContent {
}
}
public struct StoryState: Equatable {
public var stats: EngineChatList.StoryStats
public var hasUnseenCloseFriends: Bool
public init(
stats: EngineChatList.StoryStats,
hasUnseenCloseFriends: Bool
) {
self.stats = stats
self.hasUnseenCloseFriends = hasUnseenCloseFriends
}
}
public struct PeerData {
public var messages: [EngineMessage]
public var peer: EngineRenderedPeer
@ -82,6 +95,7 @@ public enum ChatListItemContent {
public var forumTopicData: EngineChatList.ForumTopicData?
public var topForumTopicItems: [EngineChatList.ForumTopicData]
public var autoremoveTimeout: Int32?
public var storyState: StoryState?
public init(
messages: [EngineMessage],
@ -100,7 +114,8 @@ public enum ChatListItemContent {
hasFailedMessages: Bool,
forumTopicData: EngineChatList.ForumTopicData?,
topForumTopicItems: [EngineChatList.ForumTopicData],
autoremoveTimeout: Int32?
autoremoveTimeout: Int32?,
storyState: StoryState?
) {
self.messages = messages
self.peer = peer
@ -119,11 +134,37 @@ public enum ChatListItemContent {
self.forumTopicData = forumTopicData
self.topForumTopicItems = topForumTopicItems
self.autoremoveTimeout = autoremoveTimeout
self.storyState = storyState
}
}
public struct GroupReferenceData {
public var groupId: EngineChatList.Group
public var peers: [EngineChatList.GroupItem.Item]
public var message: EngineMessage?
public var unreadCount: Int
public var hiddenByDefault: Bool
public var storyState: StoryState?
public init(
groupId: EngineChatList.Group,
peers: [EngineChatList.GroupItem.Item],
message: EngineMessage?,
unreadCount: Int,
hiddenByDefault: Bool,
storyState: StoryState?
) {
self.groupId = groupId
self.peers = peers
self.message = message
self.unreadCount = unreadCount
self.hiddenByDefault = hiddenByDefault
self.storyState = storyState
}
}
case peer(PeerData)
case groupReference(groupId: EngineChatList.Group, peers: [EngineChatList.GroupItem.Item], message: EngineMessage?, unreadCount: Int, hiddenByDefault: Bool)
case groupReference(GroupReferenceData)
public var chatLocation: ChatLocation? {
switch self {
@ -250,8 +291,8 @@ public class ChatListItem: ListViewItem, ChatListSearchItemNeighbour {
} else if let peer = peerData.peer.peers[peerData.peer.peerId] {
self.interaction.peerSelected(peer, nil, nil, peerData.promoInfo)
}
case let .groupReference(groupId, _, _, _, _):
self.interaction.groupSelected(groupId)
case let .groupReference(groupReferenceData):
self.interaction.groupSelected(groupReferenceData.groupId)
}
}
@ -899,6 +940,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
var avatarIconView: ComponentHostView<Empty>?
var avatarIconComponent: EmojiStatusComponent?
var avatarVideoNode: AvatarVideoNode?
var avatarTapRecognizer: UITapGestureRecognizer?
private var inlineNavigationMarkLayer: SimpleLayer?
@ -987,9 +1029,9 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
return nil
}
switch item.content {
case let .groupReference(_, _, _, unreadCount, _):
case let .groupReference(groupReferenceData):
var result = item.presentationData.strings.ChatList_ArchivedChatsTitle
let allCount = unreadCount
let allCount = groupReferenceData.unreadCount
if allCount > 0 {
result += "\n\(item.presentationData.strings.VoiceOver_Chat_UnreadMessages(Int32(allCount)))"
}
@ -1019,7 +1061,9 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
return nil
}
switch item.content {
case let .groupReference(_, peers, messageValue, _, _):
case let .groupReference(groupReferenceData):
let peers = groupReferenceData.peers
let messageValue = groupReferenceData.message
if let message = messageValue, let peer = peers.first?.peer {
let messages = [message]
var result = ""
@ -1265,6 +1309,15 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
}
item.interaction.activateChatPreview(item, threadId, strongSelf.contextContainer, gesture, nil)
}
self.onDidLoad { [weak self] _ in
guard let self else {
return
}
let avatarTapRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.avatarStoryTapGesture(_:)))
self.avatarTapRecognizer = avatarTapRecognizer
self.avatarNode.view.addGestureRecognizer(avatarTapRecognizer)
}
}
deinit {
@ -1282,28 +1335,48 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
let previousItem = self.item
self.item = item
var storyState: ChatListItemContent.StoryState?
if case let .peer(peerData) = item.content {
storyState = peerData.storyState
} else if case let .groupReference(groupReference) = item.content {
storyState = groupReference.storyState
}
var peer: EnginePeer?
var displayAsMessage = false
var enablePreview = true
switch item.content {
case let .peer(peerData):
displayAsMessage = peerData.displayAsMessage
if displayAsMessage, case let .user(author) = peerData.messages.last?.author {
peer = .user(author)
} else {
peer = peerData.peer.chatMainPeer
}
if peerData.peer.peerId.namespace == Namespaces.Peer.SecretChat {
enablePreview = false
}
case let .groupReference(_, _, _, _, hiddenByDefault):
if let previousItem = previousItem, case let .groupReference(_, _, _, _, previousHiddenByDefault) = previousItem.content, hiddenByDefault != previousHiddenByDefault {
UIView.transition(with: self.avatarNode.view, duration: 0.3, options: [.transitionCrossDissolve], animations: {
}, completion: nil)
}
self.avatarNode.setPeer(context: item.context, theme: item.presentationData.theme, peer: peer, overrideImage: .archivedChatsIcon(hiddenByDefault: hiddenByDefault), emptyColor: item.presentationData.theme.list.mediaPlaceholderColor, synchronousLoad: synchronousLoads)
case let .peer(peerData):
displayAsMessage = peerData.displayAsMessage
if displayAsMessage, case let .user(author) = peerData.messages.last?.author {
peer = .user(author)
} else {
peer = peerData.peer.chatMainPeer
}
if peerData.peer.peerId.namespace == Namespaces.Peer.SecretChat {
enablePreview = false
}
case let .groupReference(groupReferenceData):
if let previousItem = previousItem, case let .groupReference(previousGroupReferenceData) = previousItem.content, groupReferenceData.hiddenByDefault != previousGroupReferenceData.hiddenByDefault {
UIView.transition(with: self.avatarNode.view, duration: 0.3, options: [.transitionCrossDissolve], animations: {
}, completion: nil)
}
self.avatarNode.setPeer(context: item.context, theme: item.presentationData.theme, peer: peer, overrideImage: .archivedChatsIcon(hiddenByDefault: groupReferenceData.hiddenByDefault), emptyColor: item.presentationData.theme.list.mediaPlaceholderColor, synchronousLoad: synchronousLoads)
}
self.avatarNode.setStoryStats(storyStats: storyState.flatMap { storyState in
return AvatarNode.StoryStats(
totalCount: storyState.stats.totalCount,
unseenCount: storyState.stats.unseenCount,
hasUnseenCloseFriendsItems: storyState.hasUnseenCloseFriends
)
}, presentationParams: AvatarNode.StoryPresentationParams(
colors: AvatarNode.Colors(theme: item.presentationData.theme),
lineWidth: 2.33,
inactiveLineWidth: 1.33
), transition: .immediate)
self.avatarNode.isUserInteractionEnabled = storyState != nil
if let peer = peer {
var overrideImage: AvatarNodeImageOverride?
if peer.id.isReplies {
@ -1350,7 +1423,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
videoNode = current
} else {
videoNode = AvatarVideoNode(context: item.context)
strongSelf.avatarNode.addSubnode(videoNode)
strongSelf.avatarNode.contentNode.addSubnode(videoNode)
strongSelf.avatarVideoNode = videoNode
}
videoNode.update(peer: peer, photo: photo, size: CGSize(width: 60.0, height: 60.0))
@ -1610,7 +1683,12 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
promoInfo = promoInfoValue
displayAsMessage = displayAsMessageValue
hasFailedMessages = messagesValue.last?.flags.contains(.Failed) ?? false // hasFailedMessagesValue
case let .groupReference(_, peers, messageValue, unreadCountValue, hiddenByDefault):
case let .groupReference(groupReferenceData):
let peers = groupReferenceData.peers
let messageValue = groupReferenceData.message
let unreadCountValue = groupReferenceData.unreadCount
let hiddenByDefault = groupReferenceData.hiddenByDefault
if let _ = messageValue, !peers.isEmpty {
contentPeer = .chat(peers[0].peer)
} else {
@ -1679,6 +1757,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
var currentCredibilityIconContent: EmojiStatusComponent.Content?
var currentSecretIconImage: UIImage?
var currentForwardedIcon: UIImage?
var currentStoryIcon: UIImage?
var selectableControlSizeAndApply: (CGFloat, (CGSize, Bool) -> ItemListSelectableControlNode)?
var reorderControlSizeAndApply: (CGFloat, (CGFloat, Bool, ContainedViewLayoutTransition) -> ItemListEditableReorderControlNode)?
@ -1801,6 +1880,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
var forumThread: (id: Int64, title: String, iconId: Int64?, iconColor: Int32, isUnread: Bool)?
var displayForwardedIcon = false
var displayStoryReplyIcon = false
switch contentData {
case let .chat(itemPeer, _, _, _, text, spoilers, customEmojiRanges):
@ -1979,6 +2059,8 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
if let forwardInfo = message.forwardInfo, !forwardInfo.flags.contains(.isImported) {
displayForwardedIcon = true
} else if let _ = message.attributes.first(where: { $0 is ReplyStoryAttribute }) {
displayStoryReplyIcon = true
}
var displayMediaPreviews = true
@ -2025,6 +2107,20 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
} else if let action = media as? TelegramMediaAction, case let .suggestedProfilePhoto(image) = action.action, let _ = image {
let fitSize = contentImageSize
contentImageSpecs.append((message, .action(action), fitSize))
} else if let storyMedia = media as? TelegramMediaStory, let story = message.associatedStories[storyMedia.storyId], !story.data.isEmpty, case let .item(storyItem) = story.get(Stories.StoredItem.self) {
if let image = storyItem.media as? TelegramMediaImage {
if let _ = largestImageRepresentation(image.representations) {
let fitSize = contentImageSize
contentImageSpecs.append((message, .image(image), fitSize))
}
break inner
} else if let file = storyItem.media as? TelegramMediaFile {
if file.isVideo, !file.isInstantVideo, let _ = file.dimensions {
let fitSize = contentImageSize
contentImageSpecs.append((message, .file(file), fitSize))
}
break inner
}
}
}
}
@ -2059,6 +2155,10 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
}
}
}
if textString.length == 0, case let .groupReference(data) = item.content, let storyState = data.storyState, storyState.stats.totalCount != 0 {
let storyText: String = item.presentationData.strings.ChatList_ArchiveStoryCount(Int32(storyState.stats.totalCount))
textString.append(NSAttributedString(string: storyText, font: textFont, textColor: theme.messageTextColor))
}
attributedText = textString
}
@ -2066,6 +2166,10 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
currentForwardedIcon = PresentationResourcesChatList.forwardedIcon(item.presentationData.theme)
}
if displayStoryReplyIcon {
currentStoryIcon = PresentationResourcesChatList.storyReplyIcon(item.presentationData.theme)
}
if let currentForwardedIcon {
textLeftCutout += currentForwardedIcon.size.width
if !contentImageSpecs.isEmpty {
@ -2075,6 +2179,15 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
}
}
if let currentStoryIcon {
textLeftCutout += currentStoryIcon.size.width
if !contentImageSpecs.isEmpty {
textLeftCutout += forwardedIconSpacing
} else {
textLeftCutout += contentImageTrailingSpace
}
}
for i in 0 ..< contentImageSpecs.count {
if i != 0 {
textLeftCutout += contentImageSpacing
@ -2115,8 +2228,8 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
let dateText: String
var topIndex: MessageIndex?
switch item.content {
case let .groupReference(_, _, message, _, _):
topIndex = message?.index
case let .groupReference(groupReferenceData):
topIndex = groupReferenceData.message?.index
case let .peer(peerData):
topIndex = peerData.messages.first?.index
}
@ -2735,6 +2848,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
let targetAvatarScaleOffset: CGFloat = -(avatarFrame.width - avatarFrame.width * avatarScale) * 0.5
avatarScaleOffset = targetAvatarScaleOffset * inlineNavigationLocation.progress
}
transition.updateFrame(node: strongSelf.avatarContainerNode, frame: avatarFrame)
transition.updatePosition(node: strongSelf.avatarNode, position: avatarFrame.offsetBy(dx: -avatarFrame.minX, dy: -avatarFrame.minY).center.offsetBy(dx: avatarScaleOffset, dy: 0.0))
transition.updateBounds(node: strongSelf.avatarNode, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
@ -3273,15 +3387,24 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
}
inputActivitiesApply?()
var mediaPreviewOffset = textNodeFrame.origin.offsetBy(dx: 1.0, dy: floor((measureLayout.size.height - contentImageSize.height) / 2.0))
var mediaPreviewOffset = textNodeFrame.origin.offsetBy(dx: 1.0, dy: 1.0 + floor((measureLayout.size.height - contentImageSize.height) / 2.0))
if let currentForwardedIcon = currentForwardedIcon {
strongSelf.forwardedIconNode.image = currentForwardedIcon
var messageTypeIcon: UIImage?
var messageTypeIconOffset = mediaPreviewOffset
if let currentForwardedIcon {
messageTypeIcon = currentForwardedIcon
messageTypeIconOffset.y += 3.0
} else if let currentStoryIcon {
messageTypeIcon = currentStoryIcon
}
if let messageTypeIcon {
strongSelf.forwardedIconNode.image = messageTypeIcon
if strongSelf.forwardedIconNode.supernode == nil {
strongSelf.mainContentContainerNode.addSubnode(strongSelf.forwardedIconNode)
}
transition.updateFrame(node: strongSelf.forwardedIconNode, frame: CGRect(origin: CGPoint(x: mediaPreviewOffset.x, y: mediaPreviewOffset.y + 3.0), size: currentForwardedIcon.size))
mediaPreviewOffset.x += currentForwardedIcon.size.width + forwardedIconSpacing
transition.updateFrame(node: strongSelf.forwardedIconNode, frame: CGRect(origin: messageTypeIconOffset, size: messageTypeIcon.size))
mediaPreviewOffset.x += messageTypeIcon.size.width + forwardedIconSpacing
} else if strongSelf.forwardedIconNode.supernode != nil {
strongSelf.forwardedIconNode.removeFromSupernode()
}
@ -3403,7 +3526,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
}
let separatorInset: CGFloat
if case let .groupReference(_, _, _, _, hiddenByDefault) = item.content, hiddenByDefault {
if case let .groupReference(groupReferenceData) = item.content, groupReferenceData.hiddenByDefault {
separatorInset = 0.0
} else if (!nextIsPinned && isPinned) || last {
separatorInset = 0.0
@ -3425,7 +3548,7 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
backgroundColor = theme.itemSelectedBackgroundColor
highlightedBackgroundColor = theme.itemHighlightedBackgroundColor
} else if isPinned {
if case let .groupReference(_, _, _, _, hiddenByDefault) = item.content, hiddenByDefault {
if case let .groupReference(groupReferenceData) = item.content, groupReferenceData.hiddenByDefault {
backgroundColor = theme.itemBackgroundColor
highlightedBackgroundColor = theme.itemHighlightedBackgroundColor
} else {
@ -3742,6 +3865,10 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
guard let item = self.item else {
return nil
}
if let compoundTextButtonNode = self.compoundTextButtonNode, let compoundHighlightingNode = self.compoundHighlightingNode, compoundHighlightingNode.alpha != 0.0 {
let localPoint = self.view.convert(point, to: compoundHighlightingNode.view)
var matches = false
@ -3756,6 +3883,29 @@ class ChatListItemNode: ItemListRevealOptionsItemNode {
}
}
if let _ = item.interaction.inlineNavigationLocation {
} else {
if self.avatarNode.storyStats != nil {
if let result = self.avatarNode.view.hitTest(self.view.convert(point, to: self.avatarNode.view), with: event) {
return result
}
}
}
return super.hitTest(point, with: event)
}
@objc private func avatarStoryTapGesture(_ recognizer: UITapGestureRecognizer) {
if case .ended = recognizer.state {
guard let item = self.item else {
return
}
switch item.content {
case let .peer(peerData):
item.interaction.openStories(.peer(peerData.peer.peerId), self)
case .groupReference:
item.interaction.openStories(.archive, self)
}
}
}
}

View File

@ -177,7 +177,7 @@ public func chatListItemStrings(strings: PresentationStrings, nameDisplayOrder:
processed = true
break inner
}
case let .Video(_, _, flags):
case let .Video(_, _, flags, _):
if flags.contains(.instantRoundVideo) {
messageText = strings.Message_VideoMessage
processed = true
@ -295,6 +295,16 @@ public func chatListItemStrings(strings: PresentationStrings, nameDisplayOrder:
messageText = "📊 \(poll.text)"
case let dice as TelegramMediaDice:
messageText = dice.emoji
case let story as TelegramMediaStory:
if story.isMention, let peer {
if message.flags.contains(.Incoming) {
messageText = strings.Conversation_StoryMentionTextIncoming(peer.compactDisplayTitle).string
} else {
messageText = strings.Conversation_StoryMentionTextOutgoing(peer.compactDisplayTitle).string
}
} else {
messageText = strings.Notification_Story
}
default:
break
}

View File

@ -18,10 +18,12 @@ import AnimationCache
import MultiAnimationRenderer
import Postbox
import ChatFolderLinkPreviewScreen
import StoryContainerScreen
import ChatListHeaderComponent
public enum ChatListNodeMode {
case chatList(appendContacts: Bool)
case peers(filter: ChatListNodePeersFilter, isSelecting: Bool, additionalCategories: [ChatListNodeAdditionalCategory], chatListFilters: [ChatListFilter]?, displayAutoremoveTimeout: Bool)
case peers(filter: ChatListNodePeersFilter, isSelecting: Bool, additionalCategories: [ChatListNodeAdditionalCategory], chatListFilters: [ChatListFilter]?, displayAutoremoveTimeout: Bool, displayPresence: Bool)
case peerType(type: [ReplyMarkupButtonRequestPeerType], hasCreate: Bool)
}
@ -98,6 +100,7 @@ public final class ChatListNodeInteraction {
let openPremiumIntro: () -> Void
let openChatFolderUpdates: () -> Void
let hideChatFolderUpdates: () -> Void
let openStories: (ChatListNode.OpenStoriesSubject, ASDisplayNode?) -> Void
public var searchTextHighightState: String?
var highlightedChatLocation: ChatListHighlightedLocation?
@ -144,7 +147,8 @@ public final class ChatListNodeInteraction {
openPasswordSetup: @escaping () -> Void,
openPremiumIntro: @escaping () -> Void,
openChatFolderUpdates: @escaping () -> Void,
hideChatFolderUpdates: @escaping () -> Void
hideChatFolderUpdates: @escaping () -> Void,
openStories: @escaping (ChatListNode.OpenStoriesSubject, ASDisplayNode?) -> Void
) {
self.activateSearch = activateSearch
self.peerSelected = peerSelected
@ -179,6 +183,7 @@ public final class ChatListNodeInteraction {
self.openPremiumIntro = openPremiumIntro
self.openChatFolderUpdates = openChatFolderUpdates
self.hideChatFolderUpdates = hideChatFolderUpdates
self.openStories = openStories
}
}
@ -213,6 +218,16 @@ private func areFoundPeerArraysEqual(_ lhs: [(EnginePeer, EnginePeer?)], _ rhs:
}
public struct ChatListNodeState: Equatable {
public struct StoryState: Equatable {
public var stats: EngineChatList.StoryStats
public var hasUnseenCloseFriends: Bool
public init(stats: EngineChatList.StoryStats, hasUnseenCloseFriends: Bool) {
self.stats = stats
self.hasUnseenCloseFriends = hasUnseenCloseFriends
}
}
public struct ItemId: Hashable {
public var peerId: EnginePeer.Id
public var threadId: Int64?
@ -236,6 +251,7 @@ public struct ChatListNodeState: Equatable {
public var foundPeers: [(EnginePeer, EnginePeer?)]
public var selectedPeerMap: [EnginePeer.Id: EnginePeer]
public var selectedThreadIds: Set<Int64>
public var archiveStoryState: StoryState?
public init(
presentationData: ChatListPresentationData,
@ -250,7 +266,8 @@ public struct ChatListNodeState: Equatable {
pendingClearHistoryPeerIds: Set<ItemId>,
hiddenItemShouldBeTemporaryRevealed: Bool,
hiddenPsaPeerId: EnginePeer.Id?,
selectedThreadIds: Set<Int64>
selectedThreadIds: Set<Int64>,
archiveStoryState: StoryState?
) {
self.presentationData = presentationData
self.editing = editing
@ -265,6 +282,7 @@ public struct ChatListNodeState: Equatable {
self.hiddenItemShouldBeTemporaryRevealed = hiddenItemShouldBeTemporaryRevealed
self.hiddenPsaPeerId = hiddenPsaPeerId
self.selectedThreadIds = selectedThreadIds
self.archiveStoryState = archiveStoryState
}
public static func ==(lhs: ChatListNodeState, rhs: ChatListNodeState) -> Bool {
@ -307,6 +325,9 @@ public struct ChatListNodeState: Equatable {
if lhs.selectedThreadIds != rhs.selectedThreadIds {
return false
}
if lhs.archiveStoryState != rhs.archiveStoryState {
return false
}
return true
}
}
@ -384,7 +405,13 @@ private func mappedInsertEntries(context: AccountContext, nodeInteraction: ChatL
hasFailedMessages: hasFailedMessages,
forumTopicData: forumTopicData,
topForumTopicItems: topForumTopicItems,
autoremoveTimeout: peerEntry.autoremoveTimeout
autoremoveTimeout: peerEntry.autoremoveTimeout,
storyState: peerEntry.storyState.flatMap { storyState in
return ChatListItemContent.StoryState(
stats: storyState.stats,
hasUnseenCloseFriends: storyState.hasUnseenCloseFriends
)
}
)),
editing: editing,
hasActiveRevealControls: hasActiveRevealControls,
@ -394,7 +421,7 @@ private func mappedInsertEntries(context: AccountContext, nodeInteraction: ChatL
hiddenOffset: threadInfo?.isHidden == true && !revealed,
interaction: nodeInteraction
), directionHint: entry.directionHint)
case let .peers(filter, isSelecting, _, filters, displayAutoremoveTimeout):
case let .peers(filter, isSelecting, _, filters, displayAutoremoveTimeout, displayPresence):
let itemPeer = peer.chatMainPeer
var chatPeer: EnginePeer?
if let peer = peer.peers[peer.peerId] {
@ -477,7 +504,7 @@ private func mappedInsertEntries(context: AccountContext, nodeInteraction: ChatL
var header: ChatListSearchItemHeader?
switch mode {
case let .peers(_, _, additionalCategories, _, _):
case let .peers(_, _, additionalCategories, _, _, _):
if !additionalCategories.isEmpty {
let headerType: ChatListSearchItemHeaderType
if case .action = additionalCategories[0].appearance {
@ -494,7 +521,9 @@ private func mappedInsertEntries(context: AccountContext, nodeInteraction: ChatL
var status: ContactsPeerItemStatus = .none
if isSelecting, let itemPeer = itemPeer {
if let (string, multiline, isActive, icon) = statusStringForPeerType(accountPeerId: context.account.peerId, strings: presentationData.strings, peer: itemPeer, isMuted: isRemovedFromTotalUnreadCount, isUnread: combinedReadState?.isUnread ?? false, isContact: isContact, hasUnseenMentions: hasUnseenMentions, chatListFilters: filters, displayAutoremoveTimeout: displayAutoremoveTimeout, autoremoveTimeout: peerEntry.autoremoveTimeout) {
if displayPresence, let presence = presence {
status = .presence(presence, presentationData.dateTimeFormat)
} else if let (string, multiline, isActive, icon) = statusStringForPeerType(accountPeerId: context.account.peerId, strings: presentationData.strings, peer: itemPeer, isMuted: isRemovedFromTotalUnreadCount, isUnread: combinedReadState?.isUnread ?? false, isContact: isContact, hasUnseenMentions: hasUnseenMentions, chatListFilters: filters, displayAutoremoveTimeout: displayAutoremoveTimeout, autoremoveTimeout: peerEntry.autoremoveTimeout) {
status = .custom(string: string, multiline: multiline, isActive: isActive, icon: icon)
} else {
status = .none
@ -594,26 +623,32 @@ private func mappedInsertEntries(context: AccountContext, nodeInteraction: ChatL
}
case let .HoleEntry(_, theme):
return ListViewInsertItem(index: entry.index, previousIndex: entry.previousIndex, item: ChatListHoleItem(theme: theme), directionHint: entry.directionHint)
case let .GroupReferenceEntry(index, presentationData, groupId, peers, message, editing, unreadCount, revealed, hiddenByDefault):
case let .GroupReferenceEntry(groupReferenceEntry):
return ListViewInsertItem(index: entry.index, previousIndex: entry.previousIndex, item: ChatListItem(
presentationData: presentationData,
presentationData: groupReferenceEntry.presentationData,
context: context,
chatListLocation: location,
filterData: filterData,
index: index,
content: .groupReference(
groupId: groupId,
peers: peers,
message: message,
unreadCount: unreadCount,
hiddenByDefault: hiddenByDefault
),
editing: editing,
index: groupReferenceEntry.index,
content: .groupReference(ChatListItemContent.GroupReferenceData(
groupId: groupReferenceEntry.groupId,
peers: groupReferenceEntry.peers,
message: groupReferenceEntry.message,
unreadCount: groupReferenceEntry.unreadCount,
hiddenByDefault: groupReferenceEntry.hiddenByDefault,
storyState: groupReferenceEntry.storyState.flatMap { storyState in
return ChatListItemContent.StoryState(
stats: storyState.stats,
hasUnseenCloseFriends: storyState.hasUnseenCloseFriends
)
}
)),
editing: groupReferenceEntry.editing,
hasActiveRevealControls: false,
selected: false,
header: nil,
enableContextActions: true,
hiddenOffset: hiddenByDefault && !revealed,
hiddenOffset: groupReferenceEntry.hiddenByDefault && !groupReferenceEntry.revealed,
interaction: nodeInteraction
), directionHint: entry.directionHint)
case let .ContactEntry(contactEntry):
@ -665,13 +700,9 @@ private func mappedInsertEntries(context: AccountContext, nodeInteraction: ChatL
nodeInteraction?.openPasswordSetup()
case .premiumUpgrade, .premiumAnnualDiscount, .premiumRestore:
nodeInteraction?.openPremiumIntro()
case .chatFolderUpdates:
nodeInteraction?.openChatFolderUpdates()
}
case .hide:
switch notice {
case .chatFolderUpdates:
nodeInteraction?.hideChatFolderUpdates()
default:
break
}
@ -731,7 +762,13 @@ private func mappedUpdateEntries(context: AccountContext, nodeInteraction: ChatL
hasFailedMessages: hasFailedMessages,
forumTopicData: forumTopicData,
topForumTopicItems: topForumTopicItems,
autoremoveTimeout: peerEntry.autoremoveTimeout
autoremoveTimeout: peerEntry.autoremoveTimeout,
storyState: peerEntry.storyState.flatMap { storyState in
return ChatListItemContent.StoryState(
stats: storyState.stats,
hasUnseenCloseFriends: storyState.hasUnseenCloseFriends
)
}
)),
editing: editing,
hasActiveRevealControls: hasActiveRevealControls,
@ -741,7 +778,7 @@ private func mappedUpdateEntries(context: AccountContext, nodeInteraction: ChatL
hiddenOffset: threadInfo?.isHidden == true && !revealed,
interaction: nodeInteraction
), directionHint: entry.directionHint)
case let .peers(filter, isSelecting, _, filters, displayAutoremoveTimeout):
case let .peers(filter, isSelecting, _, filters, displayAutoremoveTimeout, displayPresence):
let itemPeer = peer.chatMainPeer
var chatPeer: EnginePeer?
if let peer = peer.peers[peer.peerId] {
@ -778,7 +815,7 @@ private func mappedUpdateEntries(context: AccountContext, nodeInteraction: ChatL
var header: ChatListSearchItemHeader?
switch mode {
case let .peers(_, _, additionalCategories, _, _):
case let .peers(_, _, additionalCategories, _, _, _):
if !additionalCategories.isEmpty {
let headerType: ChatListSearchItemHeaderType
if case .action = additionalCategories[0].appearance {
@ -795,7 +832,9 @@ private func mappedUpdateEntries(context: AccountContext, nodeInteraction: ChatL
var status: ContactsPeerItemStatus = .none
if isSelecting, let itemPeer = itemPeer {
if let (string, multiline, isActive, icon) = statusStringForPeerType(accountPeerId: context.account.peerId, strings: presentationData.strings, peer: itemPeer, isMuted: isRemovedFromTotalUnreadCount, isUnread: combinedReadState?.isUnread ?? false, isContact: isContact, hasUnseenMentions: hasUnseenMentions, chatListFilters: filters, displayAutoremoveTimeout: displayAutoremoveTimeout, autoremoveTimeout: peerEntry.autoremoveTimeout) {
if displayPresence, let presence = presence {
status = .presence(presence, presentationData.dateTimeFormat)
} else if let (string, multiline, isActive, icon) = statusStringForPeerType(accountPeerId: context.account.peerId, strings: presentationData.strings, peer: itemPeer, isMuted: isRemovedFromTotalUnreadCount, isUnread: combinedReadState?.isUnread ?? false, isContact: isContact, hasUnseenMentions: hasUnseenMentions, chatListFilters: filters, displayAutoremoveTimeout: displayAutoremoveTimeout, autoremoveTimeout: peerEntry.autoremoveTimeout) {
status = .custom(string: string, multiline: multiline, isActive: isActive, icon: icon)
} else {
status = .none
@ -895,26 +934,32 @@ private func mappedUpdateEntries(context: AccountContext, nodeInteraction: ChatL
}
case let .HoleEntry(_, theme):
return ListViewUpdateItem(index: entry.index, previousIndex: entry.previousIndex, item: ChatListHoleItem(theme: theme), directionHint: entry.directionHint)
case let .GroupReferenceEntry(index, presentationData, groupId, peers, message, editing, unreadCount, revealed, hiddenByDefault):
case let .GroupReferenceEntry(groupReferenceEntry):
return ListViewUpdateItem(index: entry.index, previousIndex: entry.previousIndex, item: ChatListItem(
presentationData: presentationData,
presentationData: groupReferenceEntry.presentationData,
context: context,
chatListLocation: location,
filterData: filterData,
index: index,
content: .groupReference(
groupId: groupId,
peers: peers,
message: message,
unreadCount: unreadCount,
hiddenByDefault: hiddenByDefault
),
editing: editing,
index: groupReferenceEntry.index,
content: .groupReference(ChatListItemContent.GroupReferenceData(
groupId: groupReferenceEntry.groupId,
peers: groupReferenceEntry.peers,
message: groupReferenceEntry.message,
unreadCount: groupReferenceEntry.unreadCount,
hiddenByDefault: groupReferenceEntry.hiddenByDefault,
storyState: groupReferenceEntry.storyState.flatMap { storyState in
return ChatListItemContent.StoryState(
stats: storyState.stats,
hasUnseenCloseFriends: storyState.hasUnseenCloseFriends
)
}
)),
editing: groupReferenceEntry.editing,
hasActiveRevealControls: false,
selected: false,
header: nil,
enableContextActions: true,
hiddenOffset: hiddenByDefault && !revealed,
hiddenOffset: groupReferenceEntry.hiddenByDefault && !groupReferenceEntry.revealed,
interaction: nodeInteraction
), directionHint: entry.directionHint)
case let .ContactEntry(contactEntry):
@ -966,13 +1011,9 @@ private func mappedUpdateEntries(context: AccountContext, nodeInteraction: ChatL
nodeInteraction?.openPasswordSetup()
case .premiumUpgrade, .premiumAnnualDiscount, .premiumRestore:
nodeInteraction?.openPremiumIntro()
case .chatFolderUpdates:
nodeInteraction?.openChatFolderUpdates()
}
case .hide:
switch notice {
case .chatFolderUpdates:
nodeInteraction?.hideChatFolderUpdates()
default:
break
}
@ -1031,7 +1072,7 @@ public enum ChatListGlobalScrollOption {
}
public enum ChatListNodeScrollPosition {
case top
case top(adjustForTempInset: Bool)
}
public enum ChatListNodeEmptyState: Equatable {
@ -1040,6 +1081,11 @@ public enum ChatListNodeEmptyState: Equatable {
}
public final class ChatListNode: ListView {
public enum OpenStoriesSubject {
case peer(EnginePeer.Id)
case archive
}
private let fillPreloadItems: Bool
private let context: AccountContext
private let location: ChatListControllerLocation
@ -1077,6 +1123,7 @@ public final class ChatListNode: ListView {
public var toggleArchivedFolderHiddenByDefault: (() -> Void)?
public var hidePsa: ((EnginePeer.Id) -> Void)?
public var activateChatPreview: ((ChatListItem, Int64?, ASDisplayNode, ContextGesture?, CGPoint?) -> Void)?
public var openStories: ((ChatListNode.OpenStoriesSubject, ASDisplayNode?) -> Void)?
private var theme: PresentationTheme
@ -1141,10 +1188,13 @@ public final class ChatListNode: ListView {
public var contentOffsetChanged: ((ListViewVisibleContentOffset) -> Void)?
public var contentScrollingEnded: ((ListView) -> Bool)?
public var didBeginInteractiveDragging: ((ListView) -> Void)?
public var isEmptyUpdated: ((ChatListNodeEmptyState, Bool, ContainedViewLayoutTransition) -> Void)?
private var currentIsEmptyState: ChatListNodeEmptyState?
public var canExpandHiddenItems: (() -> Bool)?
public var addedVisibleChatsWithPeerIds: (([EnginePeer.Id]) -> Void)?
private let currentRemovingItemId = Atomic<ChatListNodeState.ItemId?>(value: nil)
@ -1175,7 +1225,17 @@ public final class ChatListNode: ListView {
private var pollFilterUpdatesDisposable: Disposable?
private var chatFilterUpdatesDisposable: Disposable?
public init(context: AccountContext, location: ChatListControllerLocation, chatListFilter: ChatListFilter? = nil, previewing: Bool, fillPreloadItems: Bool, mode: ChatListNodeMode, isPeerEnabled: ((EnginePeer) -> Bool)? = nil, theme: PresentationTheme, fontSize: PresentationFontSize, strings: PresentationStrings, dateTimeFormat: PresentationDateTimeFormat, nameSortOrder: PresentationPersonNameOrder, nameDisplayOrder: PresentationPersonNameOrder, animationCache: AnimationCache, animationRenderer: MultiAnimationRenderer, disableAnimations: Bool, isInlineMode: Bool) {
public var scrollHeightTopInset: CGFloat {
didSet {
self.keepMinimalScrollHeightWithTopInset = self.scrollHeightTopInset
}
}
public var startedScrollingAtUpperBound: Bool = false
private let autoSetReady: Bool
public init(context: AccountContext, location: ChatListControllerLocation, chatListFilter: ChatListFilter? = nil, previewing: Bool, fillPreloadItems: Bool, mode: ChatListNodeMode, isPeerEnabled: ((EnginePeer) -> Bool)? = nil, theme: PresentationTheme, fontSize: PresentationFontSize, strings: PresentationStrings, dateTimeFormat: PresentationDateTimeFormat, nameSortOrder: PresentationPersonNameOrder, nameDisplayOrder: PresentationPersonNameOrder, animationCache: AnimationCache, animationRenderer: MultiAnimationRenderer, disableAnimations: Bool, isInlineMode: Bool, autoSetReady: Bool) {
self.context = context
self.location = location
self.chatListFilter = chatListFilter
@ -1184,23 +1244,30 @@ public final class ChatListNode: ListView {
self.mode = mode
self.animationCache = animationCache
self.animationRenderer = animationRenderer
self.autoSetReady = autoSetReady
let isMainTab = chatListFilter == nil && location == .chatList(groupId: .root)
var isSelecting = false
if case .peers(_, true, _, _, _) = mode {
if case .peers(_, true, _, _, _, _) = mode {
isSelecting = true
}
self.currentState = ChatListNodeState(presentationData: ChatListPresentationData(theme: theme, fontSize: fontSize, strings: strings, dateTimeFormat: dateTimeFormat, nameSortOrder: nameSortOrder, nameDisplayOrder: nameDisplayOrder, disableAnimations: disableAnimations), editing: isSelecting, peerIdWithRevealedOptions: nil, selectedPeerIds: Set(), foundPeers: [], selectedPeerMap: [:], selectedAdditionalCategoryIds: Set(), peerInputActivities: nil, pendingRemovalItemIds: Set(), pendingClearHistoryPeerIds: Set(), hiddenItemShouldBeTemporaryRevealed: false, hiddenPsaPeerId: nil, selectedThreadIds: Set())
self.currentState = ChatListNodeState(presentationData: ChatListPresentationData(theme: theme, fontSize: fontSize, strings: strings, dateTimeFormat: dateTimeFormat, nameSortOrder: nameSortOrder, nameDisplayOrder: nameDisplayOrder, disableAnimations: disableAnimations), editing: isSelecting, peerIdWithRevealedOptions: nil, selectedPeerIds: Set(), foundPeers: [], selectedPeerMap: [:], selectedAdditionalCategoryIds: Set(), peerInputActivities: nil, pendingRemovalItemIds: Set(), pendingClearHistoryPeerIds: Set(), hiddenItemShouldBeTemporaryRevealed: false, hiddenPsaPeerId: nil, selectedThreadIds: Set(), archiveStoryState: nil)
self.statePromise = ValuePromise(self.currentState, ignoreRepeated: true)
self.theme = theme
self.scrollHeightTopInset = ChatListNavigationBar.searchScrollHeight
super.init()
//self.useMainQueueTransactions = true
self.verticalScrollIndicatorColor = theme.list.scrollIndicatorColor
self.verticalScrollIndicatorFollowsOverscroll = true
self.keepMinimalScrollHeightWithTopInset = navigationBarSearchContentHeight
self.keepMinimalScrollHeightWithTopInset = self.scrollHeightTopInset
let nodeInteraction = ChatListNodeInteraction(context: context, animationCache: self.animationCache, animationRenderer: self.animationRenderer, activateSearch: { [weak self] in
if let strongSelf = self, let activateSearch = strongSelf.activateSearch {
@ -1525,6 +1592,11 @@ public final class ChatListNode: ListView {
let _ = self.context.engine.peers.hideChatFolderUpdates(folderId: localFilterId).start()
}
})
}, openStories: { [weak self] subject, itemNode in
guard let self else {
return
}
self.openStories?(subject, itemNode)
})
nodeInteraction.isInlineMode = isInlineMode
@ -1545,7 +1617,7 @@ public final class ChatListNode: ListView {
let currentRemovingItemId = self.currentRemovingItemId
let savedMessagesPeer: Signal<EnginePeer?, NoError>
if case let .peers(filter, _, _, _, _) = mode, filter.contains(.onlyWriteable), case .chatList = location, self.chatListFilter == nil {
if case let .peers(filter, _, _, _, _, _) = mode, filter.contains(.onlyWriteable), case .chatList = location, self.chatListFilter == nil {
savedMessagesPeer = context.account.postbox.loadedPeerWithId(context.account.peerId)
|> map(Optional.init)
|> map { peer in
@ -1778,7 +1850,7 @@ public final class ChatListNode: ListView {
})*/
let contacts: Signal<[ChatListContactPeer], NoError>
if case .chatList(groupId: .root) = location, chatListFilter == nil {
if case .chatList(groupId: .root) = location, chatListFilter == nil, case .chatList = mode {
contacts = ApplicationSpecificNotice.displayChatListContacts(accountManager: context.sharedContext.accountManager)
|> distinctUntilChanged
|> mapToSignal { value -> Signal<[ChatListContactPeer], NoError> in
@ -1836,6 +1908,8 @@ public final class ChatListNode: ListView {
contacts = .single([])
}
let accountPeerId = context.account.peerId
let chatListNodeViewTransition = combineLatest(
queue: viewProcessingQueue,
hideArchivedFolderByDefault,
@ -1844,19 +1918,16 @@ public final class ChatListNode: ListView {
suggestedChatListNotice,
savedMessagesPeer,
chatListViewUpdate,
self.chatFolderUpdates.get() |> distinctUntilChanged,
self.statePromise.get(),
contacts
)
|> mapToQueue { (hideArchivedFolderByDefault, displayArchiveIntro, storageInfo, suggestedChatListNotice, savedMessagesPeer, updateAndFilter, chatFolderUpdates, state, contacts) -> Signal<ChatListNodeListViewTransition, NoError> in
|> mapToQueue { (hideArchivedFolderByDefault, displayArchiveIntro, storageInfo, suggestedChatListNotice, savedMessagesPeer, updateAndFilter, state, contacts) -> Signal<ChatListNodeListViewTransition, NoError> in
let (update, filter) = updateAndFilter
let previousHideArchivedFolderByDefaultValue = previousHideArchivedFolderByDefault.swap(hideArchivedFolderByDefault)
let notice: ChatListNotice?
if let chatFolderUpdates, chatFolderUpdates.availableChatsToJoin != 0 {
notice = .chatFolderUpdates(count: chatFolderUpdates.availableChatsToJoin)
} else if let suggestedChatListNotice {
if let suggestedChatListNotice {
notice = suggestedChatListNotice
} else if let storageInfo {
notice = .clearStorage(sizeFraction: storageInfo)
@ -1864,7 +1935,7 @@ public final class ChatListNode: ListView {
notice = nil
}
let (rawEntries, isLoading) = chatListNodeEntriesForView(update.list, state: state, savedMessagesPeer: savedMessagesPeer, foundPeers: state.foundPeers, hideArchivedFolderByDefault: hideArchivedFolderByDefault, displayArchiveIntro: displayArchiveIntro, notice: notice, mode: mode, chatListLocation: location, contacts: contacts)
let (rawEntries, isLoading) = chatListNodeEntriesForView(view: update.list, state: state, savedMessagesPeer: savedMessagesPeer, foundPeers: state.foundPeers, hideArchivedFolderByDefault: hideArchivedFolderByDefault, displayArchiveIntro: displayArchiveIntro, notice: notice, mode: mode, chatListLocation: location, contacts: contacts, accountPeerId: accountPeerId, isMainTab: isMainTab)
var isEmpty = true
var entries = rawEntries.filter { entry in
switch entry {
@ -1875,7 +1946,7 @@ public final class ChatListNode: ListView {
case .chatList:
isEmpty = false
return true
case let .peers(filter, _, _, _, _):
case let .peers(filter, _, _, _, _, _):
guard !filter.contains(.excludeSavedMessages) || peer.peerId != currentPeerId else { return false }
guard !filter.contains(.excludeSavedMessages) || !peer.peerId.isReplies else { return false }
guard !filter.contains(.excludeSecretChats) || peer.peerId.namespace != Namespaces.Peer.SecretChat else { return false }
@ -2210,8 +2281,8 @@ public final class ChatListNode: ListView {
didIncludeRemovingPeerId = true
}
}
} else if case let .GroupReferenceEntry(_, _, _, _, _, _, _, _, hiddenByDefault) = entry {
didIncludeHiddenByDefaultArchive = hiddenByDefault
} else if case let .GroupReferenceEntry(groupReferenceEntry) = entry {
didIncludeHiddenByDefaultArchive = groupReferenceEntry.hiddenByDefault
} else if case .Notice = entry {
didIncludeNotice = true
}
@ -2246,9 +2317,9 @@ public final class ChatListNode: ListView {
doesIncludeRemovingPeerId = true
}
}
} else if case let .GroupReferenceEntry(_, _, _, _, _, _, _, _, hiddenByDefault) = entry {
} else if case let .GroupReferenceEntry(groupReferenceEntry) = entry {
doesIncludeArchive = true
doesIncludeHiddenByDefaultArchive = hiddenByDefault
doesIncludeHiddenByDefaultArchive = groupReferenceEntry.hiddenByDefault
} else if case .Notice = entry {
doesIncludeNotice = true
}
@ -2334,10 +2405,11 @@ public final class ChatListNode: ListView {
strongSelf.enqueueHistoryPreloadUpdate()
}
var refreshStoryPeerIds: [PeerId] = []
var isHiddenItemVisible = false
if let range = range.visibleRange {
let entryCount = chatListView.filteredEntries.count
for i in range.firstIndex ..< range.lastIndex {
for i in max(0, range.firstIndex - 1) ..< range.lastIndex {
if i < 0 || i >= entryCount {
assertionFailure()
continue
@ -2349,6 +2421,11 @@ public final class ChatListNode: ListView {
if let threadInfo, threadInfo.isHidden {
isHiddenItemVisible = true
}
if let peer = peerEntry.peer.chatMainPeer, !peerEntry.isContact, case let .user(user) = peer {
refreshStoryPeerIds.append(user.id)
}
break
case .GroupReferenceEntry:
isHiddenItemVisible = true
@ -2364,6 +2441,9 @@ public final class ChatListNode: ListView {
return state
}
}
if !refreshStoryPeerIds.isEmpty {
strongSelf.context.account.viewTracker.refreshStoryStatsForPeerIds(peerIds: refreshStoryPeerIds)
}
}
}
@ -2695,18 +2775,25 @@ public final class ChatListNode: ListView {
}
}
}
var startedScrollingAtUpperBound = false
var startedScrollingWithCanExpandHiddenItems = false
self.beganInteractiveDragging = { [weak self] _ in
guard let strongSelf = self else {
return
}
switch strongSelf.visibleContentOffset() {
case .none, .unknown:
startedScrollingAtUpperBound = false
case let .known(value):
startedScrollingAtUpperBound = value <= 0.0
case .none, .unknown:
strongSelf.startedScrollingAtUpperBound = false
case let .known(value):
strongSelf.startedScrollingAtUpperBound = value <= 0.001
}
if let canExpandHiddenItems = strongSelf.canExpandHiddenItems {
startedScrollingWithCanExpandHiddenItems = canExpandHiddenItems()
} else {
startedScrollingWithCanExpandHiddenItems = true
}
if strongSelf.currentState.peerIdWithRevealedOptions != nil {
strongSelf.updateState { state in
var state = state
@ -2714,27 +2801,28 @@ public final class ChatListNode: ListView {
return state
}
}
strongSelf.didBeginInteractiveDragging?(strongSelf)
}
self.didEndScrolling = { [weak self] _ in
guard let strongSelf = self else {
return
}
startedScrollingAtUpperBound = false
let _ = strongSelf.contentScrollingEnded?(strongSelf)
let revealHiddenItems: Bool
switch strongSelf.visibleContentOffset() {
case .none, .unknown:
revealHiddenItems = false
case let .known(value):
revealHiddenItems = value <= 54.0
revealHiddenItems = value <= -strongSelf.tempTopInset - 60.0
}
if !revealHiddenItems && strongSelf.currentState.hiddenItemShouldBeTemporaryRevealed {
strongSelf.updateState { state in
/*strongSelf.updateState { state in
var state = state
state.hiddenItemShouldBeTemporaryRevealed = false
return state
}
}*/
}
}
@ -2762,9 +2850,9 @@ public final class ChatListNode: ListView {
case .none, .unknown:
atTop = false
case let .known(value):
atTop = value <= 0.0
if startedScrollingAtUpperBound && strongSelf.isTracking {
revealHiddenItems = value <= -60.0
atTop = value <= -strongSelf.tempTopInset
if strongSelf.startedScrollingAtUpperBound && startedScrollingWithCanExpandHiddenItems && strongSelf.isTracking {
revealHiddenItems = value <= -strongSelf.tempTopInset - 60.0
}
}
strongSelf.scrolledAtTopValue = atTop
@ -2778,8 +2866,8 @@ public final class ChatListNode: ListView {
isHiddenItemVisible = true
}
}
if case let .groupReference(_, _, _, _, hiddenByDefault) = item.content {
if hiddenByDefault {
if case let .groupReference(groupReference) = item.content {
if groupReference.hiddenByDefault {
isHiddenItemVisible = true
}
}
@ -2906,7 +2994,7 @@ public final class ChatListNode: ListView {
if strongSelf.isNodeLoaded, strongSelf.dequeuedInitialTransitionOnLayout {
strongSelf.dequeueTransition()
} else {
if !strongSelf.didSetReady {
if !strongSelf.didSetReady && strongSelf.autoSetReady {
strongSelf.didSetReady = true
strongSelf._ready.set(true)
}
@ -3109,6 +3197,7 @@ public final class ChatListNode: ListView {
}
var options = transition.options
//options.insert(.Synchronous)
if self.view.window != nil {
if !options.contains(.AnimateInsertion) {
options.insert(.PreferSynchronousDrawing)
@ -3129,7 +3218,7 @@ public final class ChatListNode: ListView {
case let .known(value) where abs(value) < .ulpOfOne:
offset = 0.0
default:
offset = -navigationBarSearchContentHeight
offset = -self.scrollHeightTopInset
}
}
scrollToItem = ListViewScrollToItem(index: 0, position: .top(offset), animated: false, curve: .Default(duration: 0.0), directionHint: .Up)
@ -3142,7 +3231,7 @@ public final class ChatListNode: ListView {
var isNavigationHidden: Bool {
switch self.visibleContentOffset() {
case let .known(value) where abs(value) < navigationBarSearchContentHeight - 1.0:
case let .known(value) where abs(value) < self.scrollHeightTopInset - 1.0:
return false
case .none:
return false
@ -3154,17 +3243,18 @@ public final class ChatListNode: ListView {
var isNavigationInAFinalState: Bool {
switch self.visibleContentOffset() {
case let .known(value):
if value < navigationBarSearchContentHeight - 1.0 {
let _ = value
/*if value < self.scrollHeightTopInset - 1.0 {
if abs(value - 0.0) < 1.0 {
return true
}
if abs(value - navigationBarSearchContentHeight) < 1.0 {
if abs(value - self.scrollHeightTopInset) < 1.0 {
return true
}
return false
} else {
} else {*/
return true
}
//}
default:
return true
}
@ -3176,9 +3266,9 @@ public final class ChatListNode: ListView {
}
var scrollToItem: ListViewScrollToItem?
switch self.visibleContentOffset() {
case let .known(value) where abs(value) < navigationBarSearchContentHeight - 1.0:
case let .known(value) where abs(value) < self.scrollHeightTopInset - 1.0:
if isNavigationHidden {
scrollToItem = ListViewScrollToItem(index: 0, position: .top(-navigationBarSearchContentHeight), animated: false, curve: .Default(duration: 0.0), directionHint: .Up)
scrollToItem = ListViewScrollToItem(index: 0, position: .top(-self.scrollHeightTopInset), animated: false, curve: .Default(duration: 0.0), directionHint: .Up)
}
default:
if !isNavigationHidden {
@ -3197,7 +3287,11 @@ public final class ChatListNode: ListView {
self.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous], scrollToItem: scrollToItem, updateSizeAndInsets: nil, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in })*/
}
public func updateLayout(transition: ContainedViewLayoutTransition, updateSizeAndInsets: ListViewUpdateSizeAndInsets, visibleTopInset: CGFloat, originalTopInset: CGFloat, inlineNavigationLocation: ChatListControllerLocation?, inlineNavigationTransitionFraction: CGFloat) {
public var ignoreStoryInsetAdjustment: Bool = false
private var previousStoriesInset: CGFloat?
public func updateLayout(transition: ContainedViewLayoutTransition, updateSizeAndInsets: ListViewUpdateSizeAndInsets, visibleTopInset: CGFloat, originalTopInset: CGFloat, storiesInset: CGFloat, inlineNavigationLocation: ChatListControllerLocation?, inlineNavigationTransitionFraction: CGFloat) {
//print("inset: \(updateSizeAndInsets.insets.top)")
var highlightedLocation: ChatListHighlightedLocation?
if case let .forum(peerId) = inlineNavigationLocation {
@ -3234,6 +3328,23 @@ public final class ChatListNode: ListView {
var additionalScrollDistance: CGFloat = 0.0
if let previousStoriesInset = self.previousStoriesInset {
if self.ignoreStoryInsetAdjustment {
//additionalScrollDistance += -20.0
switch self.visibleContentOffset() {
case let .known(value):
additionalScrollDistance += min(0.0, value)
default:
break
}
additionalScrollDistance = 0.0
} else {
additionalScrollDistance += previousStoriesInset - storiesInset
}
}
self.previousStoriesInset = storiesInset
//print("storiesInset: \(storiesInset), additionalScrollDistance: \(additionalScrollDistance)")
var options: ListViewDeleteAndInsertOptions = [.Synchronous, .LowLatency]
if navigationLocationUpdated {
options.insert(.ForceUpdate)
@ -3244,7 +3355,9 @@ public final class ChatListNode: ListView {
additionalScrollDistance += insetDelta
}
self.ignoreStopScrolling = true
self.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: options, scrollToItem: nil, additionalScrollDistance: additionalScrollDistance, updateSizeAndInsets: updateSizeAndInsets, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in })
self.ignoreStopScrolling = false
if !self.dequeuedInitialTransitionOnLayout {
self.dequeuedInitialTransitionOnLayout = true
@ -3253,16 +3366,25 @@ public final class ChatListNode: ListView {
}
}
public func scrollToPosition(_ position: ChatListNodeScrollPosition) {
public func scrollToPosition(_ position: ChatListNodeScrollPosition, animated: Bool = true) {
var additionalDelta: CGFloat = 0.0
switch position {
case let .top(adjustForTempInset):
if adjustForTempInset {
additionalDelta = ChatListNavigationBar.storiesScrollHeight
self.tempTopInset = ChatListNavigationBar.storiesScrollHeight
}
}
if let list = self.chatListView?.originalList {
if !list.hasLater {
self.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous], scrollToItem: ListViewScrollToItem(index: 0, position: .top(0.0), animated: true, curve: .Default(duration: nil), directionHint: .Up), updateSizeAndInsets: nil, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in })
self.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous], scrollToItem: ListViewScrollToItem(index: 0, position: .top(additionalDelta), animated: animated, curve: .Default(duration: nil), directionHint: .Up), updateSizeAndInsets: nil, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in })
} else {
let location: ChatListNodeLocation = .scroll(index: .chatList(.absoluteUpperBound), sourceIndex: .chatList(.absoluteLowerBound), scrollPosition: .top(0.0), animated: true, filter: self.chatListFilter)
let location: ChatListNodeLocation = .scroll(index: .chatList(.absoluteUpperBound), sourceIndex: .chatList(.absoluteLowerBound), scrollPosition: .top(additionalDelta), animated: animated, filter: self.chatListFilter)
self.setChatListLocation(location)
}
} else {
let location: ChatListNodeLocation = .scroll(index: .chatList(.absoluteUpperBound), sourceIndex: .chatList(.absoluteLowerBound), scrollPosition: .top(0.0), animated: true, filter: self.chatListFilter)
let location: ChatListNodeLocation = .scroll(index: .chatList(.absoluteUpperBound), sourceIndex: .chatList(.absoluteLowerBound), scrollPosition: .top(additionalDelta), animated: animated, filter: self.chatListFilter)
self.setChatListLocation(location)
}
}
@ -3714,10 +3836,12 @@ private func statusStringForPeerType(accountPeerId: EnginePeer.Id, strings: Pres
} else if case let .user(user) = peer {
if user.botInfo != nil || user.flags.contains(.isSupport) {
return (strings.ChatList_PeerTypeBot, false, false, nil)
} else if isContact {
return (strings.ChatList_PeerTypeContact, false, false, nil)
} else {
return (strings.ChatList_PeerTypeNonContact, false, false, nil)
if isContact {
return (strings.ChatList_PeerTypeContact, false, false, nil)
} else {
return (strings.ChatList_PeerTypeNonContact, false, false, nil)
}
}
} else if case .secretChat = peer {
if isContact {

View File

@ -112,6 +112,7 @@ enum ChatListNodeEntry: Comparable, Identifiable {
var forumTopicData: EngineChatList.ForumTopicData?
var topForumTopicItems: [EngineChatList.ForumTopicData]
var revealed: Bool
var storyState: ChatListNodeState.StoryState?
init(
index: EngineChatList.Item.Index,
@ -135,7 +136,8 @@ enum ChatListNodeEntry: Comparable, Identifiable {
autoremoveTimeout: Int32?,
forumTopicData: EngineChatList.ForumTopicData?,
topForumTopicItems: [EngineChatList.ForumTopicData],
revealed: Bool
revealed: Bool,
storyState: ChatListNodeState.StoryState?
) {
self.index = index
self.presentationData = presentationData
@ -159,6 +161,7 @@ enum ChatListNodeEntry: Comparable, Identifiable {
self.forumTopicData = forumTopicData
self.topForumTopicItems = topForumTopicItems
self.revealed = revealed
self.storyState = storyState
}
static func ==(lhs: PeerEntryData, rhs: PeerEntryData) -> Bool {
@ -268,6 +271,9 @@ enum ChatListNodeEntry: Comparable, Identifiable {
if lhs.revealed != rhs.revealed {
return false
}
if lhs.storyState != rhs.storyState {
return false
}
return true
}
}
@ -297,10 +303,82 @@ enum ChatListNodeEntry: Comparable, Identifiable {
}
}
struct GroupReferenceEntryData: Equatable {
var index: EngineChatList.Item.Index
var presentationData: ChatListPresentationData
var groupId: EngineChatList.Group
var peers: [EngineChatList.GroupItem.Item]
var message: EngineMessage?
var editing: Bool
var unreadCount: Int
var revealed: Bool
var hiddenByDefault: Bool
var storyState: ChatListNodeState.StoryState?
init(
index: EngineChatList.Item.Index,
presentationData: ChatListPresentationData,
groupId: EngineChatList.Group,
peers: [EngineChatList.GroupItem.Item],
message: EngineMessage?,
editing: Bool,
unreadCount: Int,
revealed: Bool,
hiddenByDefault: Bool,
storyState: ChatListNodeState.StoryState?
) {
self.index = index
self.presentationData = presentationData
self.groupId = groupId
self.peers = peers
self.message = message
self.editing = editing
self.unreadCount = unreadCount
self.revealed = revealed
self.hiddenByDefault = hiddenByDefault
self.storyState = storyState
}
static func ==(lhs: GroupReferenceEntryData, rhs: GroupReferenceEntryData) -> Bool {
if lhs.index != rhs.index {
return false
}
if lhs.presentationData !== rhs.presentationData {
return false
}
if lhs.groupId != rhs.groupId {
return false
}
if lhs.peers != rhs.peers {
return false
}
if lhs.message?.stableId != rhs.message?.stableId {
return false
}
if lhs.editing != rhs.editing {
return false
}
if lhs.unreadCount != rhs.unreadCount {
return false
}
if lhs.revealed != rhs.revealed {
return false
}
if lhs.hiddenByDefault != rhs.hiddenByDefault {
return false
}
if lhs.storyState != rhs.storyState {
return false
}
return true
}
}
case HeaderEntry
case PeerEntry(PeerEntryData)
case HoleEntry(EngineMessage.Index, theme: PresentationTheme)
case GroupReferenceEntry(index: EngineChatList.Item.Index, presentationData: ChatListPresentationData, groupId: EngineChatList.Group, peers: [EngineChatList.GroupItem.Item], message: EngineMessage?, editing: Bool, unreadCount: Int, revealed: Bool, hiddenByDefault: Bool)
case GroupReferenceEntry(GroupReferenceEntryData)
case ContactEntry(ContactEntryData)
case ArchiveIntro(presentationData: ChatListPresentationData)
case EmptyIntro(presentationData: ChatListPresentationData)
@ -316,8 +394,8 @@ enum ChatListNodeEntry: Comparable, Identifiable {
return .index(peerEntry.index)
case let .HoleEntry(holeIndex, _):
return .index(.chatList(EngineChatList.Item.Index.ChatList(pinningIndex: nil, messageIndex: holeIndex)))
case let .GroupReferenceEntry(index, _, _, _, _, _, _, _, _):
return .index(index)
case let .GroupReferenceEntry(groupReferenceEntry):
return .index(groupReferenceEntry.index)
case let .ContactEntry(contactEntry):
return .contact(id: contactEntry.peer.id, presence: contactEntry.presence)
case .ArchiveIntro:
@ -346,8 +424,8 @@ enum ChatListNodeEntry: Comparable, Identifiable {
}
case let .HoleEntry(holeIndex, _):
return .Hole(Int64(holeIndex.id.id))
case let .GroupReferenceEntry(_, _, groupId, _, _, _, _, _, _):
return .GroupId(groupId)
case let .GroupReferenceEntry(groupReferenceEntry):
return .GroupId(groupReferenceEntry.groupId)
case let .ContactEntry(contactEntry):
return .ContactId(contactEntry.peer.id)
case .ArchiveIntro:
@ -388,35 +466,8 @@ enum ChatListNodeEntry: Comparable, Identifiable {
default:
return false
}
case let .GroupReferenceEntry(lhsIndex, lhsPresentationData, lhsGroupId, lhsPeers, lhsMessage, lhsEditing, lhsUnreadState, lhsRevealed, lhsHiddenByDefault):
if case let .GroupReferenceEntry(rhsIndex, rhsPresentationData, rhsGroupId, rhsPeers, rhsMessage, rhsEditing, rhsUnreadState, rhsRevealed, rhsHiddenByDefault) = rhs {
if lhsIndex != rhsIndex {
return false
}
if lhsPresentationData !== rhsPresentationData {
return false
}
if lhsGroupId != rhsGroupId {
return false
}
if lhsPeers != rhsPeers {
return false
}
if lhsMessage?.stableId != rhsMessage?.stableId {
return false
}
if lhsEditing != rhsEditing {
return false
}
if lhsUnreadState != rhsUnreadState {
return false
}
if lhsRevealed != rhsRevealed {
return false
}
if lhsHiddenByDefault != rhsHiddenByDefault {
return false
}
case let .GroupReferenceEntry(groupReferenceEntry):
if case .GroupReferenceEntry(groupReferenceEntry) = rhs {
return true
} else {
return false
@ -518,7 +569,17 @@ struct ChatListContactPeer {
}
}
func chatListNodeEntriesForView(_ view: EngineChatList, state: ChatListNodeState, savedMessagesPeer: EnginePeer?, foundPeers: [(EnginePeer, EnginePeer?)], hideArchivedFolderByDefault: Bool, displayArchiveIntro: Bool, notice: ChatListNotice?, mode: ChatListNodeMode, chatListLocation: ChatListControllerLocation, contacts: [ChatListContactPeer]) -> (entries: [ChatListNodeEntry], loading: Bool) {
func chatListNodeEntriesForView(view: EngineChatList, state: ChatListNodeState, savedMessagesPeer: EnginePeer?, foundPeers: [(EnginePeer, EnginePeer?)], hideArchivedFolderByDefault: Bool, displayArchiveIntro: Bool, notice: ChatListNotice?, mode: ChatListNodeMode, chatListLocation: ChatListControllerLocation, contacts: [ChatListContactPeer], accountPeerId: EnginePeer.Id, isMainTab: Bool) -> (entries: [ChatListNodeEntry], loading: Bool) {
var groupItems = view.groupItems
if isMainTab && state.archiveStoryState != nil && groupItems.isEmpty {
groupItems.append(EngineChatList.GroupItem(
id: .archive,
topMessage: nil,
items: [],
unreadCount: 0
))
}
var result: [ChatListNodeEntry] = []
if !view.hasEarlier {
@ -538,7 +599,7 @@ func chatListNodeEntriesForView(_ view: EngineChatList, state: ChatListNodeState
if !view.hasLater, case .chatList = mode {
var groupEntryCount = 0
for _ in view.groupItems {
for _ in groupItems {
groupEntryCount += 1
}
pinnedIndexOffset += UInt16(groupEntryCount)
@ -633,7 +694,13 @@ func chatListNodeEntriesForView(_ view: EngineChatList, state: ChatListNodeState
autoremoveTimeout: entry.autoremoveTimeout,
forumTopicData: entry.forumTopicData,
topForumTopicItems: entry.topForumTopicItems,
revealed: threadId == 1 && (state.hiddenItemShouldBeTemporaryRevealed || state.editing)
revealed: threadId == 1 && (state.hiddenItemShouldBeTemporaryRevealed || state.editing),
storyState: entry.renderedPeer.peerId == accountPeerId ? nil : entry.storyStats.flatMap { stats -> ChatListNodeState.StoryState in
return ChatListNodeState.StoryState(
stats: stats,
hasUnseenCloseFriends: stats.hasUnseenCloseFriends
)
}
))
if let threadInfo, threadInfo.isHidden {
@ -682,7 +749,8 @@ func chatListNodeEntriesForView(_ view: EngineChatList, state: ChatListNodeState
autoremoveTimeout: nil,
forumTopicData: nil,
topForumTopicItems: [],
revealed: false
revealed: false,
storyState: nil
)))
if foundPinningIndex != 0 {
foundPinningIndex -= 1
@ -712,7 +780,8 @@ func chatListNodeEntriesForView(_ view: EngineChatList, state: ChatListNodeState
autoremoveTimeout: nil,
forumTopicData: nil,
topForumTopicItems: [],
revealed: false
revealed: false,
storyState: nil
)))
} else {
if !filteredAdditionalItemEntries.isEmpty {
@ -762,7 +831,8 @@ func chatListNodeEntriesForView(_ view: EngineChatList, state: ChatListNodeState
autoremoveTimeout: item.item.autoremoveTimeout,
forumTopicData: item.item.forumTopicData,
topForumTopicItems: item.item.topForumTopicItems,
revealed: state.hiddenItemShouldBeTemporaryRevealed || state.editing
revealed: state.hiddenItemShouldBeTemporaryRevealed || state.editing,
storyState: nil
)))
if pinningIndex != 0 {
pinningIndex -= 1
@ -772,9 +842,13 @@ func chatListNodeEntriesForView(_ view: EngineChatList, state: ChatListNodeState
}
if !view.hasLater, case .chatList = mode {
for groupReference in view.groupItems {
for groupReference in groupItems {
let messageIndex = EngineMessage.Index(id: EngineMessage.Id(peerId: EnginePeer.Id(0), namespace: 0, id: 0), timestamp: 1)
result.append(.GroupReferenceEntry(
var mappedStoryState: ChatListNodeState.StoryState?
if let archiveStoryState = state.archiveStoryState {
mappedStoryState = archiveStoryState
}
result.append(.GroupReferenceEntry(ChatListNodeEntry.GroupReferenceEntryData(
index: .chatList(EngineChatList.Item.Index.ChatList(pinningIndex: pinningIndex, messageIndex: messageIndex)),
presentationData: state.presentationData,
groupId: groupReference.id,
@ -783,15 +857,16 @@ func chatListNodeEntriesForView(_ view: EngineChatList, state: ChatListNodeState
editing: state.editing,
unreadCount: groupReference.unreadCount,
revealed: state.hiddenItemShouldBeTemporaryRevealed,
hiddenByDefault: hideArchivedFolderByDefault
))
hiddenByDefault: hideArchivedFolderByDefault,
storyState: mappedStoryState
)))
if pinningIndex != 0 {
pinningIndex -= 1
}
}
if displayArchiveIntro {
result.append(.ArchiveIntro(presentationData: state.presentationData))
//result.append(.ArchiveIntro(presentationData: state.presentationData))
} else if !contacts.isEmpty && !result.contains(where: { entry in
if case .PeerEntry = entry {
return true
@ -810,7 +885,7 @@ func chatListNodeEntriesForView(_ view: EngineChatList, state: ChatListNodeState
}
if !view.hasLater {
if case let .peers(_, _, additionalCategories, _, _) = mode {
if case let .peers(_, _, additionalCategories, _, _, _) = mode {
var index = 0
for category in additionalCategories.reversed() {
result.append(.AdditionalCategory(index: index, id: category.id, title: category.title, image: category.icon, appearance: category.appearance, selected: state.selectedAdditionalCategoryIds.contains(category.id), presentationData: state.presentationData))

View File

@ -288,7 +288,8 @@ func chatListViewForLocation(chatListLocation: ChatListControllerLocation, locat
topForumTopicItems: [],
hasFailed: false,
isContact: false,
autoremoveTimeout: nil
autoremoveTimeout: nil,
storyStats: nil
))
}

View File

@ -1,7 +1,6 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Postbox
import Display
import SwiftSignalKit
import TelegramPresentationData
@ -183,15 +182,6 @@ class ChatListStorageInfoItemNode: ItemListRevealOptionsItemNode {
titleString = titleStringValue
textString = NSAttributedString(string: item.strings.ChatList_PremiumRestoreDiscountText, font: textFont, textColor: item.theme.rootController.navigationBar.secondaryTextColor)
case let .chatFolderUpdates(count):
let rawTitleString = item.strings.ChatList_ChatFolderUpdateHintTitle(item.strings.ChatList_ChatFolderUpdateCount(Int32(count)))
let titleStringValue = NSMutableAttributedString(attributedString: NSAttributedString(string: rawTitleString.string, font: titleFont, textColor: item.theme.rootController.navigationBar.primaryTextColor))
if let range = rawTitleString.ranges.first {
titleStringValue.addAttribute(.foregroundColor, value: item.theme.rootController.navigationBar.accentTextColor, range: range.range)
}
titleString = titleStringValue
textString = NSAttributedString(string: item.strings.ChatList_ChatFolderUpdateHintText, font: textFont, textColor: item.theme.rootController.navigationBar.secondaryTextColor)
}
let titleLayout = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleString, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width - sideInset - rightInset, height: 100.0)))
@ -230,8 +220,6 @@ class ChatListStorageInfoItemNode: ItemListRevealOptionsItemNode {
strongSelf.contentContainer.frame = CGRect(origin: CGPoint(), size: layout.contentSize)
switch item.notice {
case .chatFolderUpdates:
strongSelf.setRevealOptions((left: [], right: [ItemListRevealOption(key: 0, title: item.strings.ChatList_HideAction, icon: .none, color: item.theme.list.itemDisclosureActions.destructive.fillColor, textColor: item.theme.list.itemDisclosureActions.destructive.foregroundColor)]))
default:
strongSelf.setRevealOptions((left: [], right: []))
}

View File

@ -2,7 +2,6 @@ import Foundation
import UIKit
import AsyncDisplayKit
import Display
import Postbox
import TelegramPresentationData
import WallpaperBackgroundNode
@ -523,7 +522,7 @@ public final class ChatMessageBubbleBackdrop: ASDisplayNode {
self.clipsToBounds = true
}
public func setMaskMode(_ maskMode: Bool, mediaBox: MediaBox) {
public func setMaskMode(_ maskMode: Bool) {
if let currentType = self.currentType, let theme = self.theme, let essentialGraphics = self.essentialGraphics, let backgroundNode = self.backgroundNode {
self.setType(type: currentType, theme: theme, essentialGraphics: essentialGraphics, maskMode: maskMode, backgroundNode: backgroundNode)
}
@ -685,7 +684,7 @@ public final class ChatMessageBubbleBackdrop: ASDisplayNode {
})
}
public func animateFrom(sourceView: UIView, mediaBox: MediaBox, transition: CombinedTransition) {
public func animateFrom(sourceView: UIView, transition: CombinedTransition) {
if transition.isAnimated {
let previousFrame = self.frame
self.updateFrame(CGRect(origin: CGPoint(x: previousFrame.minX, y: sourceView.frame.minY), size: sourceView.frame.size), transition: .immediate)

View File

@ -19,6 +19,7 @@ swift_library(
"//submodules/ChatInterfaceState:ChatInterfaceState",
"//submodules/TelegramUIPreferences:TelegramUIPreferences",
"//submodules/TelegramPresentationData:TelegramPresentationData",
"//submodules/ChatContextQuery",
],
visibility = [
"//visibility:public",

View File

@ -1,6 +1,5 @@
import Foundation
import UIKit
import Postbox
import TelegramCore
public struct MessageMediaEditingOptions: OptionSet {

View File

@ -6,6 +6,7 @@ import TelegramPresentationData
import TelegramUIPreferences
import AccountContext
import ChatInterfaceState
import ChatContextQuery
public extension ChatLocation {
var peerId: PeerId? {
@ -31,53 +32,6 @@ public extension ChatLocation {
}
}
public enum ChatPresentationInputQueryKind: Int32 {
case emoji
case hashtag
case mention
case command
case contextRequest
case emojiSearch
}
public struct ChatInputQueryMentionTypes: OptionSet, Hashable {
public var rawValue: Int32
public init(rawValue: Int32) {
self.rawValue = rawValue
}
public static let contextBots = ChatInputQueryMentionTypes(rawValue: 1 << 0)
public static let members = ChatInputQueryMentionTypes(rawValue: 1 << 1)
public static let accountPeer = ChatInputQueryMentionTypes(rawValue: 1 << 2)
}
public enum ChatPresentationInputQuery: Hashable, Equatable {
case emoji(String)
case hashtag(String)
case mention(query: String, types: ChatInputQueryMentionTypes)
case command(String)
case emojiSearch(query: String, languageCode: String, range: NSRange)
case contextRequest(addressName: String, query: String)
public var kind: ChatPresentationInputQueryKind {
switch self {
case .emoji:
return .emoji
case .hashtag:
return .hashtag
case .mention:
return .mention
case .command:
return .command
case .contextRequest:
return .contextRequest
case .emojiSearch:
return .emojiSearch
}
}
}
public enum ChatMediaInputMode {
case gif
case other

View File

@ -1,6 +1,5 @@
import Foundation
import TextFormat
import Postbox
import TelegramCore
import AccountContext
@ -78,7 +77,7 @@ public func chatTextInputAddLinkAttribute(_ state: ChatTextInputState, selection
}
}
public func chatTextInputAddMentionAttribute(_ state: ChatTextInputState, peer: Peer) -> ChatTextInputState {
public func chatTextInputAddMentionAttribute(_ state: ChatTextInputState, peer: EnginePeer) -> ChatTextInputState {
let inputText = NSMutableAttributedString(attributedString: state.inputText)
let range = NSMakeRange(state.selectionRange.startIndex, state.selectionRange.endIndex - state.selectionRange.startIndex)
@ -91,9 +90,9 @@ public func chatTextInputAddMentionAttribute(_ state: ChatTextInputState, peer:
let selectionPosition = range.lowerBound + (replacementText as NSString).length
return ChatTextInputState(inputText: inputText, selectionRange: selectionPosition ..< selectionPosition)
} else if !EnginePeer(peer).compactDisplayTitle.isEmpty {
} else if !peer.compactDisplayTitle.isEmpty {
let replacementText = NSMutableAttributedString()
replacementText.append(NSAttributedString(string: EnginePeer(peer).compactDisplayTitle, attributes: [ChatTextInputAttributes.textMention: ChatTextInputTextMentionAttribute(peerId: peer.id)]))
replacementText.append(NSAttributedString(string: peer.compactDisplayTitle, attributes: [ChatTextInputAttributes.textMention: ChatTextInputTextMentionAttribute(peerId: peer.id)]))
replacementText.append(NSAttributedString(string: " "))
let updatedRange = NSRange(location: range.location , length: range.length)

View File

@ -3,7 +3,6 @@ import UIKit
import AsyncDisplayKit
import SwiftSignalKit
import Display
import Postbox
import TelegramCore
import TelegramPresentationData
import AccountContext

View File

@ -3,7 +3,6 @@ import UIKit
import SwiftSignalKit
import AsyncDisplayKit
import Display
import Postbox
import TelegramCore
import TelegramPresentationData
import AccountContext

View File

@ -180,7 +180,8 @@ public final class _UpdatedChildComponent {
var _opacity: CGFloat?
var _cornerRadius: CGFloat?
var _clipsToBounds: Bool?
var _shadow: Shadow?
fileprivate var transitionAppear: Transition.Appear?
fileprivate var transitionAppearWithGuide: (Transition.AppearWithGuide, _AnyChildComponent.Id)?
fileprivate var transitionDisappear: Transition.Disappear?
@ -240,7 +241,7 @@ public final class _UpdatedChildComponent {
self._position = position
return self
}
@discardableResult public func scale(_ scale: CGFloat) -> _UpdatedChildComponent {
self._scale = scale
return self
@ -260,6 +261,11 @@ public final class _UpdatedChildComponent {
self._clipsToBounds = clipsToBounds
return self
}
@discardableResult public func shadow(_ shadow: Shadow?) -> _UpdatedChildComponent {
self._shadow = shadow
return self
}
@discardableResult public func gesture(_ gesture: Gesture) -> _UpdatedChildComponent {
self.gestures.append(gesture)
@ -702,9 +708,20 @@ public extension CombinedComponent {
} else {
updatedChild.view.frame = updatedChild.size.centered(around: updatedChild._position ?? CGPoint())
}
updatedChild.view.alpha = updatedChild._opacity ?? 1.0
updatedChild.view.clipsToBounds = updatedChild._clipsToBounds ?? false
updatedChild.view.layer.cornerRadius = updatedChild._cornerRadius ?? 0.0
if let shadow = updatedChild._shadow {
updatedChild.view.layer.shadowColor = shadow.color.withAlphaComponent(1.0).cgColor
updatedChild.view.layer.shadowRadius = shadow.radius
updatedChild.view.layer.shadowOpacity = Float(shadow.color.alpha)
updatedChild.view.layer.shadowOffset = shadow.offset
} else {
updatedChild.view.layer.shadowColor = nil
updatedChild.view.layer.shadowRadius = 0.0
updatedChild.view.layer.shadowOpacity = 0.0
}
updatedChild.view.context(typeErasedComponent: updatedChild.component).erasedState._updated = { [weak viewContext] transition in
guard let viewContext = viewContext else {
return
@ -833,3 +850,19 @@ public extension CombinedComponent {
return ActionSlot<Arguments>()
}
}
public struct Shadow {
public let color: UIColor
public let radius: CGFloat
public let offset: CGSize
public init(
color: UIColor,
radius: CGFloat,
offset: CGSize
) {
self.color = color
self.radius = radius
self.offset = offset
}
}

View File

@ -6,7 +6,7 @@ import Display
@_silgen_name("UIAnimationDragCoefficient") func UIAnimationDragCoefficient() -> Float
#endif
private extension UIView {
public extension UIView {
static var animationDurationFactor: Double {
#if targetEnvironment(simulator)
return Double(UIAnimationDragCoefficient())
@ -73,6 +73,21 @@ public struct Transition {
case easeInOut
case spring
case custom(Float, Float, Float, Float)
public func solve(at offset: CGFloat) -> CGFloat {
switch self {
case .easeInOut:
return listViewAnimationCurveEaseInOut(offset)
case .spring:
return listViewAnimationCurveSystem(offset)
case let .custom(c1x, c1y, c2x, c2y):
return bezierPoint(CGFloat(c1x), CGFloat(c1y), CGFloat(c2x), CGFloat(c2y), offset)
}
}
public static var slide: Curve {
return .custom(0.33, 0.52, 0.25, 0.99)
}
}
case none
@ -197,6 +212,62 @@ public struct Transition {
}
}
public func setFrameWithAdditivePosition(view: UIView, frame: CGRect, completion: ((Bool) -> Void)? = nil) {
assert(view.layer.anchorPoint == CGPoint())
if view.frame == frame {
completion?(true)
return
}
var completedBounds: Bool?
var completedPosition: Bool?
let processCompletion: () -> Void = {
guard let completedBounds, let completedPosition else {
return
}
completion?(completedBounds && completedPosition)
}
self.setBounds(view: view, bounds: CGRect(origin: view.bounds.origin, size: frame.size), completion: { value in
completedBounds = value
processCompletion()
})
self.animatePosition(view: view, from: CGPoint(x: -frame.minX + view.layer.position.x, y: -frame.minY + view.layer.position.y), to: CGPoint(), additive: true, completion: { value in
completedPosition = value
processCompletion()
})
view.layer.position = frame.origin
}
public func setFrameWithAdditivePosition(layer: CALayer, frame: CGRect, completion: ((Bool) -> Void)? = nil) {
assert(layer.anchorPoint == CGPoint())
if layer.frame == frame {
completion?(true)
return
}
var completedBounds: Bool?
var completedPosition: Bool?
let processCompletion: () -> Void = {
guard let completedBounds, let completedPosition else {
return
}
completion?(completedBounds && completedPosition)
}
self.setBounds(layer: layer, bounds: CGRect(origin: layer.bounds.origin, size: frame.size), completion: { value in
completedBounds = value
processCompletion()
})
self.animatePosition(layer: layer, from: CGPoint(x: -frame.minX + layer.position.x, y: -frame.minY + layer.position.y), to: CGPoint(), additive: true, completion: { value in
completedPosition = value
processCompletion()
})
layer.position = frame.origin
}
public func setBounds(view: UIView, bounds: CGRect, completion: ((Bool) -> Void)? = nil) {
if view.bounds == bounds {
completion?(true)
@ -351,7 +422,7 @@ public struct Transition {
delay: 0.0,
curve: curve,
removeOnCompletion: true,
additive: true,
additive: false,
completion: completion
)
}
@ -386,8 +457,15 @@ public struct Transition {
let t = layer.presentation()?.transform ?? layer.transform
let currentScale = sqrt((t.m11 * t.m11) + (t.m12 * t.m12) + (t.m13 * t.m13))
if currentScale == scale {
completion?(true)
return
if let animation = layer.animation(forKey: "transform.scale") as? CABasicAnimation, let toValue = animation.toValue as? NSNumber {
if toValue.doubleValue == scale {
completion?(true)
return
}
} else {
completion?(true)
return
}
}
switch self.animation {
case .none:
@ -414,9 +492,40 @@ public struct Transition {
self.setTransform(layer: view.layer, transform: transform, completion: completion)
}
public func setTransformAsKeyframes(view: UIView, transform: (CGFloat, Bool) -> CATransform3D, completion: ((Bool) -> Void)? = nil) {
self.setTransformAsKeyframes(layer: view.layer, transform: transform, completion: completion)
}
public func setTransform(layer: CALayer, transform: CATransform3D, completion: ((Bool) -> Void)? = nil) {
if let animation = layer.animation(forKey: "transform") as? CABasicAnimation, let toValue = animation.toValue as? NSValue {
if CATransform3DEqualToTransform(toValue.caTransform3DValue, transform) {
completion?(true)
return
}
} else if let animation = layer.animation(forKey: "transform") as? CAKeyframeAnimation, let toValue = animation.values?.last as? NSValue {
if CATransform3DEqualToTransform(toValue.caTransform3DValue, transform) {
completion?(true)
return
}
}
if CATransform3DEqualToTransform(layer.transform, transform) {
completion?(true)
return
}
switch self.animation {
case .none:
if layer.animation(forKey: "transform") != nil {
if let animation = layer.animation(forKey: "transform") as? CAKeyframeAnimation, let toValue = animation.values?.last as? NSValue {
if CATransform3DEqualToTransform(toValue.caTransform3DValue, transform) {
completion?(true)
return
}
}
layer.removeAnimation(forKey: "transform")
}
layer.transform = transform
completion?(true)
case let .curve(duration, curve):
@ -426,6 +535,7 @@ public struct Transition {
} else {
previousValue = layer.transform
}
layer.transform = transform
layer.animate(
from: NSValue(caTransform3D: previousValue),
@ -441,6 +551,67 @@ public struct Transition {
}
}
public func setTransformAsKeyframes(layer: CALayer, transform: (CGFloat, Bool) -> CATransform3D, completion: ((Bool) -> Void)? = nil) {
let finalTransform = transform(1.0, true)
let t = layer.transform
do {
if let animation = layer.animation(forKey: "transform") as? CABasicAnimation, let toValue = animation.toValue as? NSValue {
if CATransform3DEqualToTransform(toValue.caTransform3DValue, finalTransform) {
completion?(true)
return
}
} else if let animation = layer.animation(forKey: "transform") as? CAKeyframeAnimation, let toValue = animation.values?.last as? NSValue {
if CATransform3DEqualToTransform(toValue.caTransform3DValue, finalTransform) {
completion?(true)
return
}
} else if CATransform3DEqualToTransform(t, finalTransform) {
completion?(true)
return
}
}
switch self.animation {
case .none:
if layer.animation(forKey: "transform") != nil {
layer.removeAnimation(forKey: "transform")
}
layer.transform = transform(1.0, true)
completion?(true)
case let .curve(duration, curve):
let framesPerSecond: CGFloat
if #available(iOS 15.0, *) {
framesPerSecond = duration * CGFloat(UIScreen.main.maximumFramesPerSecond)
} else {
framesPerSecond = 60.0
}
let numValues = Int(framesPerSecond * duration)
if numValues == 0 {
layer.transform = transform(1.0, true)
completion?(true)
return
}
var values: [AnyObject] = []
for i in 0 ... numValues {
let t = curve.solve(at: CGFloat(i) / CGFloat(numValues))
values.append(NSValue(caTransform3D: transform(t, false)))
}
layer.transform = transform(1.0, true)
layer.animateKeyframes(
values: values,
duration: duration,
keyPath: "transform",
removeOnCompletion: true,
completion: completion
)
}
}
public func setSublayerTransform(view: UIView, transform: CATransform3D, completion: ((Bool) -> Void)? = nil) {
self.setSublayerTransform(layer: view.layer, transform: transform, completion: completion)
}
@ -732,6 +903,80 @@ public struct Transition {
}
}
public func setShapeLayerStrokeStart(layer: CAShapeLayer, strokeStart: CGFloat, completion: ((Bool) -> Void)? = nil) {
switch self.animation {
case .none:
layer.strokeStart = strokeStart
completion?(true)
case let .curve(duration, curve):
let previousStrokeStart = layer.strokeStart
layer.strokeStart = strokeStart
layer.animate(
from: previousStrokeStart as NSNumber,
to: strokeStart as NSNumber,
keyPath: "strokeStart",
duration: duration,
delay: 0.0,
curve: curve,
removeOnCompletion: true,
additive: false,
completion: completion
)
}
}
public func setShapeLayerStrokeEnd(layer: CAShapeLayer, strokeEnd: CGFloat, completion: ((Bool) -> Void)? = nil) {
switch self.animation {
case .none:
layer.strokeEnd = strokeEnd
completion?(true)
case let .curve(duration, curve):
let previousStrokeEnd = layer.strokeEnd
layer.strokeEnd = strokeEnd
layer.animate(
from: previousStrokeEnd as NSNumber,
to: strokeEnd as NSNumber,
keyPath: "strokeEnd",
duration: duration,
delay: 0.0,
curve: curve,
removeOnCompletion: true,
additive: false,
completion: completion
)
}
}
public func setShapeLayerFillColor(layer: CAShapeLayer, color: UIColor, completion: ((Bool) -> Void)? = nil) {
if let current = layer.layerTintColor, current == color.cgColor {
completion?(true)
return
}
switch self.animation {
case .none:
layer.fillColor = color.cgColor
completion?(true)
case let .curve(duration, curve):
let previousColor: CGColor = layer.fillColor ?? UIColor.clear.cgColor
layer.fillColor = color.cgColor
layer.animate(
from: previousColor,
to: color.cgColor,
keyPath: "fillColor",
duration: duration,
delay: 0.0,
curve: curve,
removeOnCompletion: true,
additive: false,
completion: completion
)
}
}
public func setBackgroundColor(view: UIView, color: UIColor, completion: ((Bool) -> Void)? = nil) {
self.setBackgroundColor(layer: view.layer, color: color, completion: completion)
}
@ -819,4 +1064,18 @@ public struct Transition {
)
}
}
public func animateContentsImage(layer: CALayer, from fromImage: CGImage, to toImage: CGImage, duration: Double, curve: Transition.Animation.Curve, completion: ((Bool) -> Void)? = nil) {
layer.animate(
from: fromImage,
to: toImage,
keyPath: "contents",
duration: duration,
delay: 0.0,
curve: .easeInOut,
removeOnCompletion: true,
additive: false,
completion: completion
)
}
}

View File

@ -9,20 +9,24 @@ public final class Button: Component {
public let isEnabled: Bool
public let action: () -> Void
public let holdAction: (() -> Void)?
public let highlightedAction: ActionSlot<Bool>?
convenience public init(
content: AnyComponent<Empty>,
isEnabled: Bool = true,
action: @escaping () -> Void
automaticHighlight: Bool = true,
action: @escaping () -> Void,
highlightedAction: ActionSlot<Bool>? = nil
) {
self.init(
content: content,
minSize: nil,
tag: nil,
automaticHighlight: true,
automaticHighlight: automaticHighlight,
isEnabled: isEnabled,
action: action,
holdAction: nil
holdAction: nil,
highlightedAction: highlightedAction
)
}
@ -33,7 +37,8 @@ public final class Button: Component {
automaticHighlight: Bool = true,
isEnabled: Bool = true,
action: @escaping () -> Void,
holdAction: (() -> Void)?
holdAction: (() -> Void)?,
highlightedAction: ActionSlot<Bool>?
) {
self.content = content
self.minSize = minSize
@ -42,6 +47,7 @@ public final class Button: Component {
self.isEnabled = isEnabled
self.action = action
self.holdAction = holdAction
self.highlightedAction = highlightedAction
}
public func minSize(_ minSize: CGSize?) -> Button {
@ -52,7 +58,8 @@ public final class Button: Component {
automaticHighlight: self.automaticHighlight,
isEnabled: self.isEnabled,
action: self.action,
holdAction: self.holdAction
holdAction: self.holdAction,
highlightedAction: self.highlightedAction
)
}
@ -64,7 +71,8 @@ public final class Button: Component {
automaticHighlight: self.automaticHighlight,
isEnabled: self.isEnabled,
action: self.action,
holdAction: holdAction
holdAction: holdAction,
highlightedAction: self.highlightedAction
)
}
@ -76,7 +84,8 @@ public final class Button: Component {
automaticHighlight: self.automaticHighlight,
isEnabled: self.isEnabled,
action: self.action,
holdAction: self.holdAction
holdAction: self.holdAction,
highlightedAction: self.highlightedAction
)
}
@ -102,14 +111,21 @@ public final class Button: Component {
public final class View: UIButton, ComponentTaggedView {
private let contentView: ComponentHostView<Empty>
public var content: UIView? {
return self.contentView.componentView
}
private var component: Button?
private var currentIsHighlighted: Bool = false {
didSet {
guard let component = self.component, component.automaticHighlight else {
guard let component = self.component else {
return
}
if self.currentIsHighlighted != oldValue {
self.updateAlpha(transition: .immediate)
if component.automaticHighlight {
self.updateAlpha(transition: .immediate)
}
component.highlightedAction?.invoke(self.currentIsHighlighted)
}
}
}
@ -137,9 +153,12 @@ public final class Button: Component {
override init(frame: CGRect) {
self.contentView = ComponentHostView<Empty>()
self.contentView.isUserInteractionEnabled = false
self.contentView.layer.allowsGroupOpacity = true
super.init(frame: frame)
self.isExclusiveTouch = true
self.addSubview(self.contentView)
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)

View File

@ -4,13 +4,19 @@ import UIKit
public final class Image: Component {
public let image: UIImage?
public let tintColor: UIColor?
public let size: CGSize?
public let contentMode: UIImageView.ContentMode
public init(
image: UIImage?,
tintColor: UIColor? = nil
tintColor: UIColor? = nil,
size: CGSize? = nil,
contentMode: UIImageView.ContentMode = .scaleToFill
) {
self.image = image
self.tintColor = tintColor
self.size = size
self.contentMode = contentMode
}
public static func ==(lhs: Image, rhs: Image) -> Bool {
@ -20,6 +26,12 @@ public final class Image: Component {
if lhs.tintColor != rhs.tintColor {
return false
}
if lhs.size != rhs.size {
return false
}
if lhs.contentMode != rhs.contentMode {
return false
}
return true
}
@ -35,8 +47,9 @@ public final class Image: Component {
func update(component: Image, availableSize: CGSize, environment: Environment<Empty>, transition: Transition) -> CGSize {
self.image = component.image
self.tintColor = component.tintColor
self.contentMode = component.contentMode
return availableSize
return component.size ?? availableSize
}
}

View File

@ -58,7 +58,7 @@ public final class RoundedRectangle: Component {
}
self.image = UIGraphicsGetImageFromCurrentImageContext()?.stretchableImage(withLeftCapWidth: Int(component.cornerRadius), topCapHeight: Int(component.cornerRadius))
UIGraphicsEndImageContext()
} else if component.colors.count > 1{
} else if component.colors.count > 1 {
let imageSize = availableSize
UIGraphicsBeginImageContextWithOptions(imageSize, false, 0.0)
if let context = UIGraphicsGetCurrentContext() {

Some files were not shown because too many files have changed in this diff Show More