Merge branch 'master' into temp-10

This commit is contained in:
Ali 2023-06-12 18:22:33 +03:00
commit d1817dca18
26 changed files with 533 additions and 205 deletions

View File

@ -1200,6 +1200,11 @@ private final class DrawingScreenComponent: CombinedComponent {
.opacity(controlsAreVisible ? 1.0 : 0.0)
)
var additionalBottomInset: CGFloat = 0.0
if component.sourceHint == .storyEditor {
additionalBottomInset = max(0.0, previewBottomInset - environment.safeInsets.bottom - 49.0)
}
if let textEntity = state.selectedEntity as? DrawingTextEntity {
let textSettings = textSettings.update(
component: TextSettingsComponent(
@ -1277,7 +1282,7 @@ private final class DrawingScreenComponent: CombinedComponent {
transition: context.transition
)
context.add(textSettings
.position(CGPoint(x: context.availableSize.width / 2.0, y: context.availableSize.height - environment.safeInsets.bottom - textSettings.size.height / 2.0 - 89.0))
.position(CGPoint(x: context.availableSize.width / 2.0, y: context.availableSize.height - environment.safeInsets.bottom - textSettings.size.height / 2.0 - 89.0 - additionalBottomInset))
.appear(Transition.Appear({ _, view, transition in
if let view = view as? TextSettingsComponent.View, !transition.animation.isImmediate {
view.animateIn()
@ -1293,11 +1298,6 @@ private final class DrawingScreenComponent: CombinedComponent {
)
}
var additionalBottomInset: CGFloat = 0.0
if component.sourceHint == .storyEditor {
additionalBottomInset = max(0.0, previewBottomInset - environment.safeInsets.bottom - 49.0)
}
let rightButtonPosition = rightEdge - 24.0
var offsetX: CGFloat = leftEdge + 24.0
let delta: CGFloat = (rightButtonPosition - offsetX) / 7.0

View File

@ -361,7 +361,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[2107670217] = { return Api.InputPeer.parse_inputPeerSelf($0) }
dict[-571955892] = { return Api.InputPeer.parse_inputPeerUser($0) }
dict[-1468331492] = { return Api.InputPeer.parse_inputPeerUserFromMessage($0) }
dict[-551616469] = { return Api.InputPeerNotifySettings.parse_inputPeerNotifySettings($0) }
dict[-505078139] = { return Api.InputPeerNotifySettings.parse_inputPeerNotifySettings($0) }
dict[506920429] = { return Api.InputPhoneCall.parse_inputPhoneCall($0) }
dict[1001634122] = { return Api.InputPhoto.parse_inputPhoto($0) }
dict[483901197] = { return Api.InputPhoto.parse_inputPhotoEmpty($0) }
@ -528,7 +528,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-156940077] = { return Api.MessageMedia.parse_messageMediaInvoice($0) }
dict[1766936791] = { return Api.MessageMedia.parse_messageMediaPhoto($0) }
dict[1272375192] = { return Api.MessageMedia.parse_messageMediaPoll($0) }
dict[-946147823] = { return Api.MessageMedia.parse_messageMediaStory($0) }
dict[-877523576] = { return Api.MessageMedia.parse_messageMediaStory($0) }
dict[-1618676578] = { return Api.MessageMedia.parse_messageMediaUnsupported($0) }
dict[784356159] = { return Api.MessageMedia.parse_messageMediaVenue($0) }
dict[-1557277184] = { return Api.MessageMedia.parse_messageMediaWebPage($0) }
@ -619,7 +619,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-386039788] = { return Api.PeerBlocked.parse_peerBlocked($0) }
dict[-901375139] = { return Api.PeerLocated.parse_peerLocated($0) }
dict[-118740917] = { return Api.PeerLocated.parse_peerSelfLocated($0) }
dict[-1472527322] = { return Api.PeerNotifySettings.parse_peerNotifySettings($0) }
dict[1826385490] = { return Api.PeerNotifySettings.parse_peerNotifySettings($0) }
dict[-1525149427] = { return Api.PeerSettings.parse_peerSettings($0) }
dict[-1770029977] = { return Api.PhoneCall.parse_phoneCall($0) }
dict[912311057] = { return Api.PhoneCall.parse_phoneCallAccepted($0) }

View File

@ -744,7 +744,7 @@ public extension Api {
case messageMediaInvoice(flags: Int32, title: String, description: String, photo: Api.WebDocument?, receiptMsgId: Int32?, currency: String, totalAmount: Int64, startParam: String, extendedMedia: Api.MessageExtendedMedia?)
case messageMediaPhoto(flags: Int32, photo: Api.Photo?, ttlSeconds: Int32?)
case messageMediaPoll(poll: Api.Poll, results: Api.PollResults)
case messageMediaStory(userId: Int64, id: Int32)
case messageMediaStory(flags: Int32, userId: Int64, id: Int32, story: Api.StoryItem?)
case messageMediaUnsupported
case messageMediaVenue(geo: Api.GeoPoint, title: String, address: String, provider: String, venueId: String, venueType: String)
case messageMediaWebPage(webpage: Api.WebPage)
@ -834,12 +834,14 @@ public extension Api {
poll.serialize(buffer, true)
results.serialize(buffer, true)
break
case .messageMediaStory(let userId, let id):
case .messageMediaStory(let flags, let userId, let id, let story):
if boxed {
buffer.appendInt32(-946147823)
buffer.appendInt32(-877523576)
}
serializeInt32(flags, buffer: buffer, boxed: false)
serializeInt64(userId, buffer: buffer, boxed: false)
serializeInt32(id, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 0) != 0 {story!.serialize(buffer, true)}
break
case .messageMediaUnsupported:
if boxed {
@ -889,8 +891,8 @@ public extension Api {
return ("messageMediaPhoto", [("flags", flags as Any), ("photo", photo as Any), ("ttlSeconds", ttlSeconds as Any)])
case .messageMediaPoll(let poll, let results):
return ("messageMediaPoll", [("poll", poll as Any), ("results", results as Any)])
case .messageMediaStory(let userId, let id):
return ("messageMediaStory", [("userId", userId as Any), ("id", id as Any)])
case .messageMediaStory(let flags, let userId, let id, let story):
return ("messageMediaStory", [("flags", flags as Any), ("userId", userId as Any), ("id", id as Any), ("story", story as Any)])
case .messageMediaUnsupported:
return ("messageMediaUnsupported", [])
case .messageMediaVenue(let geo, let title, let address, let provider, let venueId, let venueType):
@ -1092,14 +1094,22 @@ public extension Api {
}
}
public static func parse_messageMediaStory(_ reader: BufferReader) -> MessageMedia? {
var _1: Int64?
_1 = reader.readInt64()
var _2: Int32?
_2 = reader.readInt32()
var _1: Int32?
_1 = reader.readInt32()
var _2: Int64?
_2 = reader.readInt64()
var _3: Int32?
_3 = reader.readInt32()
var _4: Api.StoryItem?
if Int(_1!) & Int(1 << 0) != 0 {if let signature = reader.readInt32() {
_4 = Api.parse(reader, signature: signature) as? Api.StoryItem
} }
let _c1 = _1 != nil
let _c2 = _2 != nil
if _c1 && _c2 {
return Api.MessageMedia.messageMediaStory(userId: _1!, id: _2!)
let _c3 = _3 != nil
let _c4 = (Int(_1!) & Int(1 << 0) == 0) || _4 != nil
if _c1 && _c2 && _c3 && _c4 {
return Api.MessageMedia.messageMediaStory(flags: _1!, userId: _2!, id: _3!, story: _4)
}
else {
return nil

View File

@ -754,13 +754,13 @@ public extension Api {
}
public extension Api {
enum PeerNotifySettings: TypeConstructorDescription {
case peerNotifySettings(flags: Int32, showPreviews: Api.Bool?, silent: Api.Bool?, muteUntil: Int32?, iosSound: Api.NotificationSound?, androidSound: Api.NotificationSound?, otherSound: Api.NotificationSound?)
case peerNotifySettings(flags: Int32, showPreviews: Api.Bool?, silent: Api.Bool?, muteUntil: Int32?, iosSound: Api.NotificationSound?, androidSound: Api.NotificationSound?, otherSound: Api.NotificationSound?, storiesMuted: Api.Bool?)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
case .peerNotifySettings(let flags, let showPreviews, let silent, let muteUntil, let iosSound, let androidSound, let otherSound):
case .peerNotifySettings(let flags, let showPreviews, let silent, let muteUntil, let iosSound, let androidSound, let otherSound, let storiesMuted):
if boxed {
buffer.appendInt32(-1472527322)
buffer.appendInt32(1826385490)
}
serializeInt32(flags, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 0) != 0 {showPreviews!.serialize(buffer, true)}
@ -769,14 +769,15 @@ public extension Api {
if Int(flags) & Int(1 << 3) != 0 {iosSound!.serialize(buffer, true)}
if Int(flags) & Int(1 << 4) != 0 {androidSound!.serialize(buffer, true)}
if Int(flags) & Int(1 << 5) != 0 {otherSound!.serialize(buffer, true)}
if Int(flags) & Int(1 << 6) != 0 {storiesMuted!.serialize(buffer, true)}
break
}
}
public func descriptionFields() -> (String, [(String, Any)]) {
switch self {
case .peerNotifySettings(let flags, let showPreviews, let silent, let muteUntil, let iosSound, let androidSound, let otherSound):
return ("peerNotifySettings", [("flags", flags as Any), ("showPreviews", showPreviews as Any), ("silent", silent as Any), ("muteUntil", muteUntil as Any), ("iosSound", iosSound as Any), ("androidSound", androidSound as Any), ("otherSound", otherSound as Any)])
case .peerNotifySettings(let flags, let showPreviews, let silent, let muteUntil, let iosSound, let androidSound, let otherSound, let storiesMuted):
return ("peerNotifySettings", [("flags", flags as Any), ("showPreviews", showPreviews as Any), ("silent", silent as Any), ("muteUntil", muteUntil as Any), ("iosSound", iosSound as Any), ("androidSound", androidSound as Any), ("otherSound", otherSound as Any), ("storiesMuted", storiesMuted as Any)])
}
}
@ -805,6 +806,10 @@ public extension Api {
if Int(_1!) & Int(1 << 5) != 0 {if let signature = reader.readInt32() {
_7 = Api.parse(reader, signature: signature) as? Api.NotificationSound
} }
var _8: Api.Bool?
if Int(_1!) & Int(1 << 6) != 0 {if let signature = reader.readInt32() {
_8 = Api.parse(reader, signature: signature) as? Api.Bool
} }
let _c1 = _1 != nil
let _c2 = (Int(_1!) & Int(1 << 0) == 0) || _2 != nil
let _c3 = (Int(_1!) & Int(1 << 1) == 0) || _3 != nil
@ -812,8 +817,9 @@ public extension Api {
let _c5 = (Int(_1!) & Int(1 << 3) == 0) || _5 != nil
let _c6 = (Int(_1!) & Int(1 << 4) == 0) || _6 != nil
let _c7 = (Int(_1!) & Int(1 << 5) == 0) || _7 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 {
return Api.PeerNotifySettings.peerNotifySettings(flags: _1!, showPreviews: _2, silent: _3, muteUntil: _4, iosSound: _5, androidSound: _6, otherSound: _7)
let _c8 = (Int(_1!) & Int(1 << 6) == 0) || _8 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 && _c8 {
return Api.PeerNotifySettings.peerNotifySettings(flags: _1!, showPreviews: _2, silent: _3, muteUntil: _4, iosSound: _5, androidSound: _6, otherSound: _7, storiesMuted: _8)
}
else {
return nil

View File

@ -278,27 +278,28 @@ public extension Api {
}
public extension Api {
enum InputPeerNotifySettings: TypeConstructorDescription {
case inputPeerNotifySettings(flags: Int32, showPreviews: Api.Bool?, silent: Api.Bool?, muteUntil: Int32?, sound: Api.NotificationSound?)
case inputPeerNotifySettings(flags: Int32, showPreviews: Api.Bool?, silent: Api.Bool?, muteUntil: Int32?, sound: Api.NotificationSound?, storiesMuted: Api.Bool?)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
case .inputPeerNotifySettings(let flags, let showPreviews, let silent, let muteUntil, let sound):
case .inputPeerNotifySettings(let flags, let showPreviews, let silent, let muteUntil, let sound, let storiesMuted):
if boxed {
buffer.appendInt32(-551616469)
buffer.appendInt32(-505078139)
}
serializeInt32(flags, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 0) != 0 {showPreviews!.serialize(buffer, true)}
if Int(flags) & Int(1 << 1) != 0 {silent!.serialize(buffer, true)}
if Int(flags) & Int(1 << 2) != 0 {serializeInt32(muteUntil!, buffer: buffer, boxed: false)}
if Int(flags) & Int(1 << 3) != 0 {sound!.serialize(buffer, true)}
if Int(flags) & Int(1 << 6) != 0 {storiesMuted!.serialize(buffer, true)}
break
}
}
public func descriptionFields() -> (String, [(String, Any)]) {
switch self {
case .inputPeerNotifySettings(let flags, let showPreviews, let silent, let muteUntil, let sound):
return ("inputPeerNotifySettings", [("flags", flags as Any), ("showPreviews", showPreviews as Any), ("silent", silent as Any), ("muteUntil", muteUntil as Any), ("sound", sound as Any)])
case .inputPeerNotifySettings(let flags, let showPreviews, let silent, let muteUntil, let sound, let storiesMuted):
return ("inputPeerNotifySettings", [("flags", flags as Any), ("showPreviews", showPreviews as Any), ("silent", silent as Any), ("muteUntil", muteUntil as Any), ("sound", sound as Any), ("storiesMuted", storiesMuted as Any)])
}
}
@ -319,13 +320,18 @@ public extension Api {
if Int(_1!) & Int(1 << 3) != 0 {if let signature = reader.readInt32() {
_5 = Api.parse(reader, signature: signature) as? Api.NotificationSound
} }
var _6: Api.Bool?
if Int(_1!) & Int(1 << 6) != 0 {if let signature = reader.readInt32() {
_6 = Api.parse(reader, signature: signature) as? Api.Bool
} }
let _c1 = _1 != nil
let _c2 = (Int(_1!) & Int(1 << 0) == 0) || _2 != nil
let _c3 = (Int(_1!) & Int(1 << 1) == 0) || _3 != nil
let _c4 = (Int(_1!) & Int(1 << 2) == 0) || _4 != nil
let _c5 = (Int(_1!) & Int(1 << 3) == 0) || _5 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 {
return Api.InputPeerNotifySettings.inputPeerNotifySettings(flags: _1!, showPreviews: _2, silent: _3, muteUntil: _4, sound: _5)
let _c6 = (Int(_1!) & Int(1 << 6) == 0) || _6 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 {
return Api.InputPeerNotifySettings.inputPeerNotifySettings(flags: _1!, showPreviews: _2, silent: _3, muteUntil: _4, sound: _5, storiesMuted: _6)
}
else {
return nil

View File

@ -380,7 +380,7 @@ func textMediaAndExpirationTimerFromApiMedia(_ media: Api.MessageMedia?, _ peerI
}
case let .messageMediaDice(value, emoticon):
return (TelegramMediaDice(emoji: emoticon, value: value), nil, nil, nil)
case let .messageMediaStory(userId, id):
case let .messageMediaStory(_, userId, id, _):
return (TelegramMediaStory(storyId: StoryId(peerId: PeerId(namespace: Namespaces.Peer.CloudUser, id: PeerId.Id._internalFromInt64Value(userId)), id: id)), nil, nil, nil)
}
}

View File

@ -6,7 +6,7 @@ import TelegramApi
extension TelegramPeerNotificationSettings {
convenience init(apiSettings: Api.PeerNotifySettings) {
switch apiSettings {
case let .peerNotifySettings(_, showPreviews, _, muteUntil, iosSound, _, desktopSound):
case let .peerNotifySettings(_, showPreviews, _, muteUntil, iosSound, _, desktopSound, storiesMuted):
let sound: Api.NotificationSound?
#if os(iOS)
sound = iosSound
@ -34,7 +34,13 @@ extension TelegramPeerNotificationSettings {
} else {
displayPreviews = .default
}
self.init(muteState: muteState, messageSound: PeerMessageSound(apiSound: sound ?? .notificationSoundDefault), displayPreviews: displayPreviews)
var storiesMutedValue: Bool?
if let storiesMuted = storiesMuted {
storiesMutedValue = storiesMuted == .boolTrue
}
self.init(muteState: muteState, messageSound: PeerMessageSound(apiSound: sound ?? .notificationSoundDefault), displayPreviews: displayPreviews, storiesMuted: storiesMutedValue)
}
}
}

View File

@ -6,7 +6,7 @@ import TelegramApi
extension MessageNotificationSettings {
init(apiSettings: Api.PeerNotifySettings) {
switch apiSettings {
case let .peerNotifySettings(_, showPreviews, _, muteUntil, iosSound, _, desktopSound):
case let .peerNotifySettings(_, showPreviews, _, muteUntil, iosSound, _, desktopSound, storiesMuted):
let sound: Api.NotificationSound?
#if os(iOS)
sound = iosSound
@ -19,7 +19,11 @@ extension MessageNotificationSettings {
} else {
displayPreviews = true
}
self = MessageNotificationSettings(enabled: muteUntil == 0, displayPreviews: displayPreviews, sound: PeerMessageSound(apiSound: sound ?? .notificationSoundDefault))
var storiesMutedValue: Bool?
if let storiesMuted = storiesMuted {
storiesMutedValue = storiesMuted == .boolTrue
}
self = MessageNotificationSettings(enabled: muteUntil == 0, displayPreviews: displayPreviews, sound: PeerMessageSound(apiSound: sound ?? .notificationSoundDefault), storiesMuted: storiesMutedValue)
}
}
}

View File

@ -116,7 +116,7 @@ private func fetchedNotificationSettings(network: Network) -> Signal<GlobalNotif
|> map { chats, users, channels, contactsJoinedMuted in
let chatsSettings: MessageNotificationSettings
switch chats {
case let .peerNotifySettings(_, showPreviews, _, muteUntil, iosSound, _, desktopSound):
case let .peerNotifySettings(_, showPreviews, _, muteUntil, iosSound, _, desktopSound, storiesMuted):
let sound: Api.NotificationSound?
#if os(iOS)
sound = iosSound
@ -136,12 +136,18 @@ private func fetchedNotificationSettings(network: Network) -> Signal<GlobalNotif
} else {
displayPreviews = true
}
chatsSettings = MessageNotificationSettings(enabled: enabled, displayPreviews: displayPreviews, sound: PeerMessageSound(apiSound: sound ?? .notificationSoundDefault))
var storiesMutedValue: Bool?
if let storiesMuted = storiesMuted {
storiesMutedValue = storiesMuted == .boolTrue
}
chatsSettings = MessageNotificationSettings(enabled: enabled, displayPreviews: displayPreviews, sound: PeerMessageSound(apiSound: sound ?? .notificationSoundDefault), storiesMuted: storiesMutedValue)
}
let userSettings: MessageNotificationSettings
switch users {
case let .peerNotifySettings(_, showPreviews, _, muteUntil, iosSound, _, desktopSound):
case let .peerNotifySettings(_, showPreviews, _, muteUntil, iosSound, _, desktopSound, storiesMuted):
let sound: Api.NotificationSound?
#if os(iOS)
sound = iosSound
@ -161,12 +167,18 @@ private func fetchedNotificationSettings(network: Network) -> Signal<GlobalNotif
} else {
displayPreviews = true
}
userSettings = MessageNotificationSettings(enabled: enabled, displayPreviews: displayPreviews, sound: PeerMessageSound(apiSound: sound ?? .notificationSoundDefault))
var storiesMutedValue: Bool?
if let storiesMuted = storiesMuted {
storiesMutedValue = storiesMuted == .boolTrue
}
userSettings = MessageNotificationSettings(enabled: enabled, displayPreviews: displayPreviews, sound: PeerMessageSound(apiSound: sound ?? .notificationSoundDefault), storiesMuted: storiesMutedValue)
}
let channelSettings: MessageNotificationSettings
switch channels {
case let .peerNotifySettings(_, showPreviews, _, muteUntil, iosSound, _, desktopSound):
case let .peerNotifySettings(_, showPreviews, _, muteUntil, iosSound, _, desktopSound, storiesMuted):
let sound: Api.NotificationSound?
#if os(iOS)
sound = iosSound
@ -186,7 +198,13 @@ private func fetchedNotificationSettings(network: Network) -> Signal<GlobalNotif
} else {
displayPreviews = true
}
channelSettings = MessageNotificationSettings(enabled: enabled, displayPreviews: displayPreviews, sound: PeerMessageSound(apiSound: sound ?? .notificationSoundDefault))
var storiesMutedValue: Bool?
if let storiesMuted = storiesMuted {
storiesMutedValue = storiesMuted == .boolTrue
}
channelSettings = MessageNotificationSettings(enabled: enabled, displayPreviews: displayPreviews, sound: PeerMessageSound(apiSound: sound ?? .notificationSoundDefault), storiesMuted: storiesMutedValue)
}
return GlobalNotificationSettingsSet(privateChats: userSettings, groupChats: chatsSettings, channels: channelSettings, contactsJoined: contactsJoinedMuted == .boolFalse)
@ -209,7 +227,14 @@ private func apiInputPeerNotifySettings(_ settings: MessageNotificationSettings)
if sound != nil {
flags |= (1 << 3)
}
return .inputPeerNotifySettings(flags: flags, showPreviews: settings.displayPreviews ? .boolTrue : .boolFalse, silent: nil, muteUntil: muteUntil, sound: sound)
var storiesMuted: Api.Bool?
if let storiesMutedValue = settings.storiesMuted {
flags |= (1 << 6)
storiesMuted = storiesMutedValue ? .boolTrue : .boolFalse
}
return .inputPeerNotifySettings(flags: flags, showPreviews: settings.displayPreviews ? .boolTrue : .boolFalse, silent: nil, muteUntil: muteUntil, sound: sound, storiesMuted: storiesMuted)
}
private func pushedNotificationSettings(network: Network, settings: GlobalNotificationSettingsSet) -> Signal<Void, NoError> {

View File

@ -130,7 +130,13 @@ func pushPeerNotificationSettings(postbox: Postbox, network: Network, peerId: Pe
if sound != nil {
flags |= (1 << 3)
}
let inputSettings = Api.InputPeerNotifySettings.inputPeerNotifySettings(flags: flags, showPreviews: showPreviews, silent: nil, muteUntil: muteUntil, sound: sound)
var storiesMuted: Api.Bool?
if let storiesMutedValue = settings.storiesMuted {
flags |= (1 << 6)
storiesMuted = storiesMutedValue ? .boolTrue : .boolFalse
}
let inputSettings = Api.InputPeerNotifySettings.inputPeerNotifySettings(flags: flags, showPreviews: showPreviews, silent: nil, muteUntil: muteUntil, sound: sound, storiesMuted: storiesMuted)
return network.request(Api.functions.account.updateNotifySettings(peer: .inputNotifyForumTopic(peer: inputPeer, topMsgId: Int32(clamping: threadId)), settings: inputSettings))
|> `catch` { _ -> Signal<Api.Bool, NoError> in
return .single(.boolFalse)
@ -173,7 +179,12 @@ func pushPeerNotificationSettings(postbox: Postbox, network: Network, peerId: Pe
if sound != nil {
flags |= (1 << 3)
}
let inputSettings = Api.InputPeerNotifySettings.inputPeerNotifySettings(flags: flags, showPreviews: showPreviews, silent: nil, muteUntil: muteUntil, sound: sound)
var storiesMuted: Api.Bool?
if let storiesMutedValue = settings.storiesMuted {
flags |= (1 << 6)
storiesMuted = storiesMutedValue ? .boolTrue : .boolFalse
}
let inputSettings = Api.InputPeerNotifySettings.inputPeerNotifySettings(flags: flags, showPreviews: showPreviews, silent: nil, muteUntil: muteUntil, sound: sound, storiesMuted: storiesMuted)
return network.request(Api.functions.account.updateNotifySettings(peer: .inputNotifyPeer(peer: inputPeer), settings: inputSettings))
|> `catch` { _ -> Signal<Api.Bool, NoError> in
return .single(.boolFalse)

View File

@ -4,15 +4,17 @@ public struct MessageNotificationSettings: Codable, Equatable {
public var enabled: Bool
public var displayPreviews: Bool
public var sound: PeerMessageSound
public var storiesMuted: Bool?
public static var defaultSettings: MessageNotificationSettings {
return MessageNotificationSettings(enabled: true, displayPreviews: true, sound: defaultCloudPeerNotificationSound)
return MessageNotificationSettings(enabled: true, displayPreviews: true, sound: defaultCloudPeerNotificationSound, storiesMuted: nil)
}
public init(enabled: Bool, displayPreviews: Bool, sound: PeerMessageSound) {
public init(enabled: Bool, displayPreviews: Bool, sound: PeerMessageSound, storiesMuted: Bool?) {
self.enabled = enabled
self.displayPreviews = displayPreviews
self.sound = sound
self.storiesMuted = storiesMuted
}
public init(from decoder: Decoder) throws {
@ -22,6 +24,8 @@ public struct MessageNotificationSettings: Codable, Equatable {
self.displayPreviews = ((try? container.decode(Int32.self, forKey: "p")) ?? 0) != 0
self.sound = try PeerMessageSound.decodeInline(container)
self.storiesMuted = try? container.decodeIfPresent(Bool.self, forKey: "st")
}
public func encode(to encoder: Encoder) throws {
@ -30,6 +34,7 @@ public struct MessageNotificationSettings: Codable, Equatable {
try container.encode((self.enabled ? 1 : 0) as Int32, forKey: "e")
try container.encode((self.displayPreviews ? 1 : 0) as Int32, forKey: "p")
try self.sound.encodeInline(&container)
try container.encodeIfPresent(self.storiesMuted, forKey: "st")
}
}

View File

@ -392,9 +392,10 @@ public final class TelegramPeerNotificationSettings: PeerNotificationSettings, C
public let muteState: PeerMuteState
public let messageSound: PeerMessageSound
public let displayPreviews: PeerNotificationDisplayPreviews
public let storiesMuted: Bool?
public static var defaultSettings: TelegramPeerNotificationSettings {
return TelegramPeerNotificationSettings(muteState: .unmuted, messageSound: .default, displayPreviews: .default)
return TelegramPeerNotificationSettings(muteState: .unmuted, messageSound: .default, displayPreviews: .default, storiesMuted: nil)
}
public func isRemovedFromTotalUnreadCount(`default`: Bool) -> Bool {
@ -416,16 +417,18 @@ public final class TelegramPeerNotificationSettings: PeerNotificationSettings, C
}
}
public init(muteState: PeerMuteState, messageSound: PeerMessageSound, displayPreviews: PeerNotificationDisplayPreviews) {
public init(muteState: PeerMuteState, messageSound: PeerMessageSound, displayPreviews: PeerNotificationDisplayPreviews, storiesMuted: Bool?) {
self.muteState = muteState
self.messageSound = messageSound
self.displayPreviews = displayPreviews
self.storiesMuted = storiesMuted
}
public init(decoder: PostboxDecoder) {
self.muteState = PeerMuteState.decodeInline(decoder)
self.messageSound = PeerMessageSound.decodeInline(decoder)
self.displayPreviews = PeerNotificationDisplayPreviews.decodeInline(decoder)
self.storiesMuted = decoder.decodeOptionalBoolForKey("stm")
}
public init(from decoder: Decoder) throws {
@ -434,6 +437,7 @@ public final class TelegramPeerNotificationSettings: PeerNotificationSettings, C
self.muteState = try container.decode(PeerMuteState.self, forKey: "muteState")
self.messageSound = try container.decode(PeerMessageSound.self, forKey: "messageSound")
self.displayPreviews = try container.decode(PeerNotificationDisplayPreviews.self, forKey: "displayPreviews")
self.storiesMuted = try? container.decodeIfPresent(Bool.self, forKey: "stm")
}
public func encode(to encoder: Encoder) throws {
@ -442,12 +446,18 @@ public final class TelegramPeerNotificationSettings: PeerNotificationSettings, C
try container.encode(self.muteState, forKey: "muteState")
try container.encode(self.messageSound, forKey: "messageSound")
try container.encode(self.displayPreviews, forKey: "displayPreviews")
try container.encodeIfPresent(self.storiesMuted, forKey: "stm")
}
public func encode(_ encoder: PostboxEncoder) {
self.muteState.encodeInline(encoder)
self.messageSound.encodeInline(encoder)
self.displayPreviews.encodeInline(encoder)
if let storiesMuted = self.storiesMuted {
encoder.encodeBool(storiesMuted, forKey: "stm")
} else {
encoder.encodeNil(forKey: "stm")
}
}
public func isEqual(to: PeerNotificationSettings) -> Bool {
@ -459,18 +469,22 @@ public final class TelegramPeerNotificationSettings: PeerNotificationSettings, C
}
public func withUpdatedMuteState(_ muteState: PeerMuteState) -> TelegramPeerNotificationSettings {
return TelegramPeerNotificationSettings(muteState: muteState, messageSound: self.messageSound, displayPreviews: self.displayPreviews)
return TelegramPeerNotificationSettings(muteState: muteState, messageSound: self.messageSound, displayPreviews: self.displayPreviews, storiesMuted: self.storiesMuted)
}
public func withUpdatedMessageSound(_ messageSound: PeerMessageSound) -> TelegramPeerNotificationSettings {
return TelegramPeerNotificationSettings(muteState: self.muteState, messageSound: messageSound, displayPreviews: self.displayPreviews)
return TelegramPeerNotificationSettings(muteState: self.muteState, messageSound: messageSound, displayPreviews: self.displayPreviews, storiesMuted: self.storiesMuted)
}
public func withUpdatedDisplayPreviews(_ displayPreviews: PeerNotificationDisplayPreviews) -> TelegramPeerNotificationSettings {
return TelegramPeerNotificationSettings(muteState: self.muteState, messageSound: self.messageSound, displayPreviews: displayPreviews)
return TelegramPeerNotificationSettings(muteState: self.muteState, messageSound: self.messageSound, displayPreviews: displayPreviews, storiesMuted: self.storiesMuted)
}
public func withUpdatedStoriesMuted(_ storiesMuted: Bool?) -> TelegramPeerNotificationSettings {
return TelegramPeerNotificationSettings(muteState: self.muteState, messageSound: self.messageSound, displayPreviews: self.displayPreviews, storiesMuted: storiesMuted)
}
public static func ==(lhs: TelegramPeerNotificationSettings, rhs: TelegramPeerNotificationSettings) -> Bool {
return lhs.muteState == rhs.muteState && lhs.messageSound == rhs.messageSound && lhs.displayPreviews == rhs.displayPreviews
return lhs.muteState == rhs.muteState && lhs.messageSound == rhs.messageSound && lhs.displayPreviews == rhs.displayPreviews && lhs.storiesMuted == rhs.storiesMuted
}
}

View File

@ -88,15 +88,18 @@ public enum EnginePeer: Equatable {
public var muteState: MuteState
public var messageSound: MessageSound
public var displayPreviews: DisplayPreviews
public var storiesMuted: Bool?
public init(
muteState: MuteState,
messageSound: MessageSound,
displayPreviews: DisplayPreviews
displayPreviews: DisplayPreviews,
storiesMuted: Bool?
) {
self.muteState = muteState
self.messageSound = messageSound
self.displayPreviews = displayPreviews
self.storiesMuted = storiesMuted
}
}
@ -216,11 +219,13 @@ public struct EngineGlobalNotificationSettings: Equatable {
public var enabled: Bool
public var displayPreviews: Bool
public var sound: EnginePeer.NotificationSettings.MessageSound
public var storiesMuted: Bool
public init(enabled: Bool, displayPreviews: Bool, sound: EnginePeer.NotificationSettings.MessageSound) {
public init(enabled: Bool, displayPreviews: Bool, sound: EnginePeer.NotificationSettings.MessageSound, storiesMuted: Bool) {
self.enabled = enabled
self.displayPreviews = displayPreviews
self.sound = sound
self.storiesMuted = storiesMuted
}
}
@ -327,7 +332,8 @@ public extension EnginePeer.NotificationSettings {
self.init(
muteState: MuteState(notificationSettings.muteState),
messageSound: MessageSound(notificationSettings.messageSound),
displayPreviews: DisplayPreviews(notificationSettings.displayPreviews)
displayPreviews: DisplayPreviews(notificationSettings.displayPreviews),
storiesMuted: notificationSettings.storiesMuted
)
}
@ -335,7 +341,8 @@ public extension EnginePeer.NotificationSettings {
return TelegramPeerNotificationSettings(
muteState: self.muteState._asMuteState(),
messageSound: self.messageSound._asMessageSound(),
displayPreviews: self.displayPreviews._asDisplayPreviews()
displayPreviews: self.displayPreviews._asDisplayPreviews(),
storiesMuted: self.storiesMuted
)
}
}
@ -594,7 +601,8 @@ public extension EngineGlobalNotificationSettings.CategorySettings {
self.init(
enabled: categorySettings.enabled,
displayPreviews: categorySettings.displayPreviews,
sound: EnginePeer.NotificationSettings.MessageSound(categorySettings.sound)
sound: EnginePeer.NotificationSettings.MessageSound(categorySettings.sound),
storiesMuted: categorySettings.storiesMuted ?? false
)
}
@ -602,7 +610,8 @@ public extension EngineGlobalNotificationSettings.CategorySettings {
return MessageNotificationSettings(
enabled: self.enabled,
displayPreviews: self.displayPreviews,
sound: self.sound._asMessageSound()
sound: self.sound._asMessageSound(),
storiesMuted: self.storiesMuted
)
}
}

View File

@ -73,6 +73,7 @@ swift_library(
"//submodules/Components/BundleIconComponent:BundleIconComponent",
"//submodules/TooltipUI",
"//submodules/TelegramUI/Components/MediaEditor",
"//submodules/Components/MetalImageView:MetalImageView",
],
visibility = [
"//visibility:public",

View File

@ -148,6 +148,7 @@ private final class CameraScreenComponent: CombinedComponent {
var cameraState = CameraState(mode: .photo, position: .unspecified, flashMode: .off, flashModeDidChange: false, recording: .none, duration: 0.0)
var swipeHint: CaptureControlsComponent.SwipeHint = .none
var isTransitioning = false
private let hapticFeedback = HapticFeedback()
@ -267,6 +268,12 @@ private final class CameraScreenComponent: CombinedComponent {
self.completion.invoke(.single(.video(path, transitionImage, PixelDimensions(width: 1080, height: 1920))))
}
}))
self.isTransitioning = true
Queue.mainQueue().after(0.8, {
self.isTransitioning = false
self.updated(transition: .immediate)
})
self.updated(transition: .spring(duration: 0.4))
}
@ -290,7 +297,7 @@ private final class CameraScreenComponent: CombinedComponent {
let zoomControl = Child(ZoomComponent.self)
let flashButton = Child(CameraButton.self)
let modeControl = Child(ModeComponent.self)
let hintLabel = Child(MultilineTextComponent.self)
let hintLabel = Child(HintLabelComponent.self)
let timeBackground = Child(RoundedRectangle.self)
let timeLabel = Child(MultilineTextComponent.self)
@ -308,7 +315,7 @@ private final class CameraScreenComponent: CombinedComponent {
state?.updateCameraMode(mode)
})
if case .none = state.cameraState.recording {
if case .none = state.cameraState.recording, !state.isTransitioning {
let cancelButton = cancelButton.update(
component: CameraButton(
content: AnyComponentWithIdentity(
@ -420,17 +427,21 @@ private final class CameraScreenComponent: CombinedComponent {
}
let shutterState: ShutterButtonState
switch state.cameraState.recording {
case .handsFree:
shutterState = .stopRecording
case .holding:
shutterState = .holdRecording(progress: min(1.0, Float(state.cameraState.duration / 60.0)))
case .none:
switch state.cameraState.mode {
case .photo:
shutterState = .generic
case .video:
shutterState = .video
if state.isTransitioning {
shutterState = .transition
} else {
switch state.cameraState.recording {
case .handsFree:
shutterState = .stopRecording
case .holding:
shutterState = .holdRecording(progress: min(1.0, Float(state.cameraState.duration / 60.0)))
case .none:
switch state.cameraState.mode {
case .photo:
shutterState = .generic
case .video:
shutterState = .video
}
}
}
@ -505,7 +516,7 @@ private final class CameraScreenComponent: CombinedComponent {
isVideoRecording = true
}
if isVideoRecording {
if isVideoRecording && !state.isTransitioning {
let duration = Int(state.cameraState.duration)
let durationString = String(format: "%02d:%02d", (duration / 60) % 60, duration % 60)
let timeLabel = timeLabel.update(
@ -541,7 +552,7 @@ private final class CameraScreenComponent: CombinedComponent {
let hintText: String?
switch state.swipeHint {
case .none:
hintText = nil
hintText = " "
case .zoom:
hintText = "Swipe up to zoom"
case .lock:
@ -553,10 +564,7 @@ private final class CameraScreenComponent: CombinedComponent {
}
if let hintText {
let hintLabel = hintLabel.update(
component: MultilineTextComponent(
text: .plain(NSAttributedString(string: hintText.uppercased(), font: Font.with(size: 14.0, design: .camera, weight: .semibold), textColor: .white)),
horizontalAlignment: .center
),
component: HintLabelComponent(text: hintText),
availableSize: availableSize,
transition: .immediate
)
@ -569,7 +577,7 @@ private final class CameraScreenComponent: CombinedComponent {
}
}
if case .none = state.cameraState.recording {
if case .none = state.cameraState.recording, !state.isTransitioning {
let modeControl = modeControl.update(
component: ModeComponent(
availableModes: [.photo, .video],
@ -878,6 +886,7 @@ public class CameraScreen: ViewController {
self.effectivePreviewView.addGestureRecognizer(pinchGestureRecognizer)
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.handlePan(_:)))
panGestureRecognizer.maximumNumberOfTouches = 1
self.effectivePreviewView.addGestureRecognizer(panGestureRecognizer)
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.handleTap(_:)))

View File

@ -11,6 +11,7 @@ enum ShutterButtonState: Equatable {
case video
case stopRecording
case holdRecording(progress: Float)
case transition
}
private let maximumShutterSize = CGSize(width: 96.0, height: 96.0)
@ -141,6 +142,12 @@ private final class ShutterButtonContentComponent: Component {
innerCornerRadius = innerSize.height / 2.0
ringSize = CGSize(width: 92.0, height: 92.0)
recordingProgress = progress
case .transition:
innerColor = videoRedColor
innerSize = CGSize(width: 60.0, height: 60.0)
innerCornerRadius = innerSize.height / 2.0
ringSize = CGSize(width: 68.0, height: 68.0)
recordingProgress = 0.0
}
self.ringLayer.fillColor = UIColor.clear.cgColor
@ -573,6 +580,7 @@ final class CaptureControlsComponent: Component {
let buttonSideInset: CGFloat = 28.0
//let buttonMaxOffset: CGFloat = 100.0
var isTransitioning = false
var isRecording = false
var isHolding = false
if case .stopRecording = component.shutterState {
@ -580,6 +588,8 @@ final class CaptureControlsComponent: Component {
} else if case .holdRecording = component.shutterState {
isRecording = true
isHolding = true
} else if case .transition = component.shutterState {
isTransitioning = true
}
let galleryButtonSize = self.galleryButtonView.update(
@ -615,8 +625,8 @@ final class CaptureControlsComponent: Component {
transition.setBounds(view: galleryButtonView, bounds: CGRect(origin: .zero, size: galleryButtonFrame.size))
transition.setPosition(view: galleryButtonView, position: galleryButtonFrame.center)
transition.setScale(view: galleryButtonView, scale: isRecording ? 0.1 : 1.0)
transition.setAlpha(view: galleryButtonView, alpha: isRecording ? 0.0 : 1.0)
transition.setScale(view: galleryButtonView, scale: isRecording || isTransitioning ? 0.1 : 1.0)
transition.setAlpha(view: galleryButtonView, alpha: isRecording || isTransitioning ? 0.0 : 1.0)
}
let _ = self.lockView.update(
@ -678,13 +688,16 @@ final class CaptureControlsComponent: Component {
}
transition.setBounds(view: flipButtonView, bounds: CGRect(origin: .zero, size: flipButtonFrame.size))
transition.setPosition(view: flipButtonView, position: flipButtonFrame.center)
transition.setScale(view: flipButtonView, scale: isTransitioning ? 0.01 : 1.0)
transition.setAlpha(view: flipButtonView, alpha: isTransitioning ? 0.0 : 1.0)
}
var blobState: ShutterBlobView.BlobState
switch component.shutterState {
case .generic:
blobState = .generic
case .video:
case .video, .transition:
blobState = .video
case .stopRecording:
blobState = .stopVideo
@ -732,6 +745,8 @@ final class CaptureControlsComponent: Component {
}
transition.setBounds(view: shutterButtonView, bounds: CGRect(origin: .zero, size: shutterButtonFrame.size))
transition.setPosition(view: shutterButtonView, position: shutterButtonFrame.center)
transition.setScale(view: shutterButtonView, scale: isTransitioning ? 0.01 : 1.0)
transition.setAlpha(view: shutterButtonView, alpha: isTransitioning ? 0.0 : 1.0)
}
let guideSpacing: CGFloat = 9.0

View File

@ -2,6 +2,7 @@ import Foundation
import UIKit
import Display
import ComponentFlow
import MultilineTextComponent
extension CameraMode {
var title: String {
@ -161,3 +162,83 @@ final class ModeComponent: Component {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}
final class HintLabelComponent: Component {
let text: String
init(
text: String
) {
self.text = text
}
static func ==(lhs: HintLabelComponent, rhs: HintLabelComponent) -> Bool {
if lhs.text != rhs.text {
return false
}
return true
}
final class View: UIView {
private var component: HintLabelComponent?
private var componentView = ComponentView<Empty>()
init() {
super.init(frame: CGRect())
}
required init?(coder aDecoder: NSCoder) {
preconditionFailure()
}
func update(component: HintLabelComponent, availableSize: CGSize, transition: Transition) -> CGSize {
let previousComponent = self.component
self.component = component
if let previousText = previousComponent?.text, !previousText.isEmpty && previousText != component.text {
if let componentView = self.componentView.view, let snapshotView = componentView.snapshotView(afterScreenUpdates: false) {
snapshotView.frame = componentView.frame
self.addSubview(snapshotView)
snapshotView.layer.animateScale(from: 1.0, to: 0.1, duration: 0.2, removeOnCompletion: false)
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak snapshotView] _ in
snapshotView?.removeFromSuperview()
})
}
self.componentView.view?.removeFromSuperview()
self.componentView = ComponentView<Empty>()
}
let textSize = self.componentView.update(
transition: .immediate,
component: AnyComponent(
MultilineTextComponent(
text: .plain(NSAttributedString(string: component.text.uppercased(), font: Font.with(size: 14.0, design: .camera, weight: .semibold), textColor: .white)),
horizontalAlignment: .center
)
),
environment: {},
containerSize: availableSize
)
if let view = self.componentView.view {
if view.superview == nil {
view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.addSubview(view)
}
view.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - textSize.width) / 2.0), y: 0.0), size: textSize)
}
return CGSize(width: availableSize.width, height: textSize.height)
}
}
func makeView() -> View {
return View()
}
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}

View File

@ -3,6 +3,7 @@ import Metal
import MetalKit
import ComponentFlow
import Display
import MetalImageView
private final class PropertyAnimation<T: Interpolatable> {
let from: T
@ -63,6 +64,7 @@ private final class AnimatableProperty<T: Interpolatable> {
}
func tick(timestamp: Double) -> Bool {
guard let animation = self.animation, case let .curve(duration, curve) = animation.animation else {
return false
}
@ -73,8 +75,7 @@ private final class AnimatableProperty<T: Interpolatable> {
case .easeInOut:
t = listViewAnimationCurveEaseInOut(t)
case .spring:
t = listViewAnimationCurveEaseInOut(t)
//t = listViewAnimationCurveSystem(t)
t = lookupSpringValue(t)
case let .custom(x1, y1, x2, y2):
t = bezierPoint(CGFloat(x1), CGFloat(y1), CGFloat(x2), CGFloat(y2), t)
}
@ -88,7 +89,69 @@ private final class AnimatableProperty<T: Interpolatable> {
}
}
final class ShutterBlobView: MTKView, MTKViewDelegate {
private func lookupSpringValue(_ t: CGFloat) -> CGFloat {
let table: [(CGFloat, CGFloat)] = [
(0.0, 0.0),
(0.0625, 0.1123005598783493),
(0.125, 0.31598418951034546),
(0.1875, 0.5103585720062256),
(0.25, 0.6650152802467346),
(0.3125, 0.777747631072998),
(0.375, 0.8557760119438171),
(0.4375, 0.9079672694206238),
(0.5, 0.942038357257843),
(0.5625, 0.9638798832893372),
(0.625, 0.9776856303215027),
(0.6875, 0.9863143563270569),
(0.75, 0.991658091545105),
(0.8125, 0.9949421286582947),
(0.875, 0.9969474077224731),
(0.9375, 0.9981651306152344),
(1.0, 1.0)
]
for i in 0 ..< table.count - 2 {
let lhs = table[i]
let rhs = table[i + 1]
if t >= lhs.0 && t <= rhs.0 {
let fraction = (t - lhs.0) / (rhs.0 - lhs.0)
let value = lhs.1 + fraction * (rhs.1 - lhs.1)
return value
}
}
return 1.0
// print("---start---")
// for i in 0 ..< 16 {
// let j = Double(i) * 1.0 / 16.0
// print("\(j) \(listViewAnimationCurveSystem(j))")
// }
// print("---end---")
}
private class ShutterBlobLayer: MetalImageLayer {
override public init() {
super.init()
self.renderer.imageUpdated = { [weak self] image in
self?.contents = image
}
}
override public init(layer: Any) {
super.init()
if let layer = layer as? ShutterBlobLayer {
self.contents = layer.contents
}
}
required public init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
final class ShutterBlobView: UIView {
enum BlobState {
case generic
case video
@ -147,7 +210,6 @@ final class ShutterBlobView: MTKView, MTKViewDelegate {
private let commandQueue: MTLCommandQueue
private let drawPassthroughPipelineState: MTLRenderPipelineState
private var viewportDimensions = CGSize(width: 1, height: 1)
private var displayLink: SharedDisplayLinkDriver.Link?
@ -162,6 +224,10 @@ final class ShutterBlobView: MTKView, MTKViewDelegate {
private(set) var state: BlobState = .generic
static override var layerClass: AnyClass {
return ShutterBlobLayer.self
}
public init?(test: Bool) {
let mainBundle = Bundle(for: ShutterBlobView.self)
@ -207,16 +273,12 @@ final class ShutterBlobView: MTKView, MTKViewDelegate {
self.drawPassthroughPipelineState = try! device.makeRenderPipelineState(descriptor: pipelineStateDescriptor)
super.init(frame: CGRect(), device: device)
super.init(frame: CGRect())
(self.layer as! ShutterBlobLayer).renderer.device = device
self.isOpaque = false
self.backgroundColor = .clear
self.colorPixelFormat = .bgra8Unorm
self.framebufferOnly = true
self.isPaused = true
self.delegate = self
self.displayLink = SharedDisplayLinkDriver.shared.add { [weak self] in
self?.tick()
@ -232,10 +294,6 @@ final class ShutterBlobView: MTKView, MTKViewDelegate {
self.displayLink?.invalidate()
}
public func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
self.viewportDimensions = size
}
func updateState(_ state: BlobState, transition: Transition = .immediate) {
guard self.state != state else {
return
@ -297,40 +355,56 @@ final class ShutterBlobView: MTKView, MTKViewDelegate {
self.draw()
}
override public func draw(_ rect: CGRect) {
self.redraw(drawable: self.currentDrawable!)
override func layoutSubviews() {
super.layoutSubviews()
self.tick()
}
private func redraw(drawable: MTLDrawable) {
guard let commandBuffer = self.commandQueue.makeCommandBuffer() else {
private func getNextDrawable(layer: MetalImageLayer, drawableSize: CGSize) -> MetalImageLayer.Drawable? {
layer.renderer.drawableSize = drawableSize
return layer.renderer.nextDrawable()
}
func draw() {
guard let layer = self.layer as? MetalImageLayer else {
return
}
self.updateAnimations()
let drawableSize = CGSize(width: self.bounds.width * UIScreen.main.scale, height: self.bounds.height * UIScreen.main.scale)
guard let drawable = self.getNextDrawable(layer: layer, drawableSize: drawableSize) else {
return
}
let renderPassDescriptor = self.currentRenderPassDescriptor!
let renderPassDescriptor = MTLRenderPassDescriptor()
renderPassDescriptor.colorAttachments[0].texture = drawable.texture
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0, green: 0, blue: 0, alpha: 0.0)
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0, green: 0, blue: 0, alpha: 0)
guard let commandBuffer = self.commandQueue.makeCommandBuffer() else {
return
}
guard let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else {
return
}
let viewportDimensions = self.viewportDimensions
renderEncoder.setViewport(MTLViewport(originX: 0.0, originY: 0.0, width: viewportDimensions.width, height: viewportDimensions.height, znear: -1.0, zfar: 1.0))
renderEncoder.setViewport(MTLViewport(originX: 0.0, originY: 0.0, width: drawableSize.width, height: drawableSize.height, znear: -1.0, zfar: 1.0))
renderEncoder.setRenderPipelineState(self.drawPassthroughPipelineState)
let w = Float(1)
let h = Float(1)
var vertices: [Float] = [
w, -h,
-w, -h,
-w, h,
w, -h,
-w, h,
w, h
1, -1,
-1, -1,
-1, 1,
1, -1,
-1, 1,
1, 1
]
renderEncoder.setVertexBytes(&vertices, length: 4 * vertices.count, index: 0)
var resolution = simd_uint2(UInt32(viewportDimensions.width), UInt32(viewportDimensions.height))
var resolution = simd_uint2(UInt32(drawableSize.width), UInt32(drawableSize.height))
renderEncoder.setFragmentBytes(&resolution, length: MemoryLayout<simd_uint2>.size * 2, index: 0)
var primaryParameters = simd_float4(
@ -340,7 +414,7 @@ final class ShutterBlobView: MTKView, MTKViewDelegate {
Float(self.primaryCornerRadius.presentationValue)
)
renderEncoder.setFragmentBytes(&primaryParameters, length: MemoryLayout<simd_float4>.size, index: 1)
var secondaryParameters = simd_float3(
Float(self.secondarySize.presentationValue),
Float(self.secondaryOffset.presentationValue),
@ -350,17 +424,17 @@ final class ShutterBlobView: MTKView, MTKViewDelegate {
renderEncoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6, instanceCount: 1)
renderEncoder.endEncoding()
commandBuffer.present(drawable)
var storedDrawable: MetalImageLayer.Drawable? = drawable
commandBuffer.addCompletedHandler { _ in
DispatchQueue.main.async {
autoreleasepool {
storedDrawable?.present(completion: {})
storedDrawable = nil
}
}
}
commandBuffer.commit()
}
override func layoutSubviews() {
super.layoutSubviews()
self.tick()
}
func draw(in view: MTKView) {
}
}

View File

@ -4,6 +4,31 @@
using namespace metal;
static inline
float sRGB_nonLinearNormToLinear(float normV)
{
if (normV <= 0.04045f) {
normV *= (1.0f / 12.92f);
} else {
const float a = 0.055f;
const float gamma = 2.4f;
//const float gamma = 1.0f / (1.0f / 2.4f);
normV = (normV + a) * (1.0f / (1.0f + a));
normV = pow(normV, gamma);
}
return normV;
}
static inline
float4 sRGB_gamma_decode(const float4 rgba) {
float4 tmp = rgba;
tmp.r = sRGB_nonLinearNormToLinear(rgba.r);
tmp.g = sRGB_nonLinearNormToLinear(rgba.g);
tmp.b = sRGB_nonLinearNormToLinear(rgba.b);
return tmp;
}
static inline float4 BT709_decode(const float Y, const float Cb, const float Cr) {
float Yn = Y;
@ -13,8 +38,8 @@ static inline float4 BT709_decode(const float Y, const float Cb, const float Cr)
float3 YCbCr = float3(Yn, Cbn, Crn);
const float3x3 kColorConversion709 = float3x3(float3(1.0, 1.0, 1.0),
float3(0.0f, -0.1873, 1.8556),
float3(1.5748, -0.4681, 0.0));
float3(0.0f, -0.18732, 1.8556),
float3(1.5748, -0.46812, 0.0));
float3 rgb = kColorConversion709 * YCbCr;
@ -23,7 +48,6 @@ static inline float4 BT709_decode(const float Y, const float Cb, const float Cr)
return float4(rgb.r, rgb.g, rgb.b, 1.0f);
}
fragment float4 bt709ToRGBFragmentShader(RasterizerData in [[stage_in]],
texture2d<half, access::sample> inYTexture [[texture(0)]],
texture2d<half, access::sample> inUVTexture [[texture(1)]]
@ -38,5 +62,7 @@ fragment float4 bt709ToRGBFragmentShader(RasterizerData in [[stage_in]],
float Cr = float(uvSamples[1]);
float4 pixel = BT709_decode(Y, Cb, Cr);
pixel = sRGB_gamma_decode(pixel);
pixel.rgb = pow(pixel.rgb, 1.0 / 2.2);
return pixel;
}

View File

@ -305,7 +305,7 @@ public final class MediaEditor {
let playerItem = AVPlayerItem(asset: asset)
let player = AVPlayer(playerItem: playerItem)
player.automaticallyWaitsToMinimizeStalling = false
if let transitionImage {
let colors = mediaEditorGetGradientColors(from: transitionImage)
subscriber.putNext((VideoTextureSource(player: player, renderTarget: renderTarget), nil, player, colors.0, colors.1))
@ -348,6 +348,7 @@ public final class MediaEditor {
if let asset {
let playerItem = AVPlayerItem(asset: asset)
let player = AVPlayer(playerItem: playerItem)
player.automaticallyWaitsToMinimizeStalling = false
subscriber.putNext((VideoTextureSource(player: player, renderTarget: renderTarget), nil, player, colors.0, colors.1))
subscriber.putCompletion()
}
@ -422,7 +423,7 @@ public final class MediaEditor {
self.player?.play()
}
})
self.player?.play()
player.playImmediately(atRate: 1.0)
self.volumeFade = self.player?.fadeVolume(from: 0.0, to: 1.0, duration: 0.4)
}
}

View File

@ -197,19 +197,10 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity {
tintColor = .white
}
self.disposables.add((self.frameSource.get()
|> take(1)
|> deliverOn(self.queue)).start(next: { [weak self] frameSource in
let processFrame: (Double, Int, (Int) -> AnimatedStickerFrame?) -> Void = { [weak self] duration, frameCount, takeFrame in
guard let strongSelf = self else {
completion(nil)
return
}
guard let frameSource, let duration = strongSelf.totalDuration, let frameCount = strongSelf.frameCount else {
completion(nil)
return
}
let relativeTime = currentTime - floor(currentTime / duration) * duration
var t = relativeTime / duration
t = max(0.0, t)
@ -233,12 +224,8 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity {
delta = max(1, frameIndex - previousFrameIndex)
}
var frame: AnimatedStickerFrame?
frameSource.syncWith { frameSource in
for i in 0 ..< delta {
frame = frameSource.takeFrame(draw: i == delta - 1)
}
}
let frame = takeFrame(delta)
if let frame {
var imagePixelBuffer: CVPixelBuffer?
if let pixelBuffer = strongSelf.imagePixelBuffer {
@ -273,7 +260,57 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity {
} else {
completion(strongSelf.image)
}
}))
}
if self.isVideo {
self.disposables.add((self.videoFrameSource.get()
|> take(1)
|> deliverOn(self.queue)).start(next: { [weak self] frameSource in
guard let strongSelf = self else {
completion(nil)
return
}
guard let frameSource, let duration = strongSelf.totalDuration, let frameCount = strongSelf.frameCount else {
completion(nil)
return
}
processFrame(duration, frameCount, { delta in
var frame: AnimatedStickerFrame?
frameSource.syncWith { frameSource in
for i in 0 ..< delta {
frame = frameSource.takeFrame(draw: i == delta - 1)
}
}
return frame
})
}))
} else {
self.disposables.add((self.frameSource.get()
|> take(1)
|> deliverOn(self.queue)).start(next: { [weak self] frameSource in
guard let strongSelf = self else {
completion(nil)
return
}
guard let frameSource, let duration = strongSelf.totalDuration, let frameCount = strongSelf.frameCount else {
completion(nil)
return
}
processFrame(duration, frameCount, { delta in
var frame: AnimatedStickerFrame?
frameSource.syncWith { frameSource in
for i in 0 ..< delta {
frame = frameSource.takeFrame(draw: i == delta - 1)
}
}
return frame
})
}))
}
} else {
var image: CIImage?
if let cachedImage = self.image {

View File

@ -221,7 +221,7 @@ final class MediaEditorRenderer: TextureConsumer {
if let onNextRender = self.onNextRender {
self.onNextRender = nil
Queue.mainQueue().async {
Queue.mainQueue().after(0.016) {
onNextRender()
}
}

View File

@ -29,10 +29,7 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD
private weak var player: AVPlayer?
private weak var playerItem: AVPlayerItem?
private var playerItemOutput: AVPlayerItemVideoOutput?
private var playerItemStatusObservation: NSKeyValueObservation?
private var playerItemObservation: NSKeyValueObservation?
private var displayLink: CADisplayLink?
private let device: MTLDevice?
@ -57,24 +54,12 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD
super.init()
self.playerItemObservation = player.observe(\.currentItem, options: [.initial, .new], changeHandler: { [weak self] (player, change) in
guard let strongSelf = self else {
return
}
strongSelf.updatePlayerItem(player.currentItem)
})
self.updatePlayerItem(player.currentItem)
}
deinit {
self.playerItemObservation?.invalidate()
self.playerItemStatusObservation?.invalidate()
}
func invalidate() {
self.playerItemOutput?.setDelegate(nil, queue: nil)
self.playerItemOutput = nil
self.playerItemObservation?.invalidate()
self.playerItemStatusObservation?.invalidate()
self.displayLink?.invalidate()
self.displayLink = nil
}
@ -88,18 +73,9 @@ final class VideoTextureSource: NSObject, TextureSource, AVPlayerItemOutputPullD
}
}
self.playerItemOutput = nil
self.playerItemStatusObservation?.invalidate()
self.playerItemStatusObservation = nil
self.playerItem = playerItem
self.playerItemStatusObservation = self.playerItem?.observe(\.status, options: [.initial, .new], changeHandler: { [weak self] item, change in
guard let strongSelf = self else {
return
}
if strongSelf.playerItem == item, item.status == .readyToPlay {
strongSelf.handleReadyToPlay()
}
})
self.handleReadyToPlay()
}
private func handleReadyToPlay() {
@ -273,7 +249,7 @@ final class VideoInputPass: DefaultRenderPass {
textureDescriptor.textureType = .type2D
textureDescriptor.width = outputWidth
textureDescriptor.height = outputHeight
textureDescriptor.pixelFormat = .bgra8Unorm
textureDescriptor.pixelFormat = self.pixelFormat
textureDescriptor.storageMode = .private
textureDescriptor.usage = [.shaderRead, .shaderWrite, .renderTarget]
if let texture = device.makeTexture(descriptor: textureDescriptor) {

View File

@ -252,22 +252,22 @@ final class MediaEditorScreenComponent: Component {
view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
}
var delay: Double = 0.0
for button in buttons {
if let view = button.view {
view.layer.animatePosition(from: CGPoint(x: 0.0, y: 64.0), to: .zero, duration: 0.3, delay: delay, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: delay)
view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2, delay: delay)
delay += 0.05
}
}
if let view = self.doneButton.view {
view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
}
if case .camera = source {
var delay: Double = 0.0
for button in buttons {
if let view = button.view {
view.layer.animatePosition(from: CGPoint(x: 0.0, y: 64.0), to: .zero, duration: 0.3, delay: delay, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: delay)
view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2, delay: delay)
delay += 0.05
}
}
if let view = self.saveButton.view {
view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
view.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
@ -702,7 +702,7 @@ final class MediaEditorScreenComponent: Component {
case 86400:
timeoutValue = "24"
case 172800:
timeoutValue = "2d"
timeoutValue = "48"
default:
timeoutValue = "24"
}
@ -900,10 +900,10 @@ final class MediaEditorScreenComponent: Component {
saveButtonView.layer.shadowOpacity = 0.35
self.addSubview(saveButtonView)
}
let saveButtonAlpha = component.isSavingAvailable ? 1.0 : 0.3
saveButtonView.isUserInteractionEnabled = component.isSavingAvailable
transition.setPosition(view: saveButtonView, position: saveButtonFrame.center)
transition.setBounds(view: saveButtonView, bounds: CGRect(origin: .zero, size: saveButtonFrame.size))
transition.setScale(view: saveButtonView, scale: displayTopButtons ? 1.0 : 0.01)
@ -1640,8 +1640,10 @@ public final class MediaEditorScreen: ViewController {
self.mediaEditor?.onFirstDisplay = { [weak self] in
if let self, let transitionInView = self.transitionInView {
transitionInView.removeFromSuperview()
self.transitionInView = nil
transitionInView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak transitionInView] _ in
transitionInView?.removeFromSuperview()
})
}
}
}
@ -2059,6 +2061,9 @@ public final class MediaEditorScreen: ViewController {
if let self, let file {
let stickerEntity = DrawingStickerEntity(content: .file(file))
self.interaction?.insertEntity(stickerEntity)
self.controller?.isSavingAvailable = true
self.controller?.requestLayout(transition: .immediate)
}
}
self.controller?.present(controller, in: .current)
@ -2066,6 +2071,9 @@ public final class MediaEditorScreen: ViewController {
case .text:
let textEntity = DrawingTextEntity(text: NSAttributedString(), style: .regular, animation: .none, font: .sanFrancisco, alignment: .center, fontSize: 1.0, color: DrawingColor(color: .white))
self.interaction?.insertEntity(textEntity)
self.controller?.isSavingAvailable = true
self.controller?.requestLayout(transition: .immediate)
return
case .drawing:
self.interaction?.deactivate()
@ -2496,7 +2504,7 @@ public final class MediaEditorScreen: ViewController {
updateTimeout(86400, false)
})))
items.append(.action(ContextMenuActionItem(text: "2 Days", icon: { theme in
items.append(.action(ContextMenuActionItem(text: "48 Hours", icon: { theme in
return currentValue == 86400 * 2 ? generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Check"), color: theme.contextMenu.primaryColor) : nil
}, action: { _, a in
a(.default)

View File

@ -132,7 +132,7 @@ final class VideoScrubberComponent: Component {
private var isPanningPositionHandle = false
private var displayLink: SharedDisplayLinkDriver.Link?
private var positionAnimation: (start: Double, from: Double, to: Double)?
private var positionAnimation: (start: Double, from: Double, to: Double, ended: Bool)?
override init(frame: CGRect) {
super.init(frame: frame)
@ -343,12 +343,12 @@ final class VideoScrubberComponent: Component {
let timestamp = CACurrentMediaTime()
let updatedPosition: Double
if let (start, from, to) = self.positionAnimation {
if let (start, from, to, _) = self.positionAnimation {
let duration = to - from
let fraction = duration > 0.0 ? (timestamp - start) / duration : 0.0
updatedPosition = max(component.startPosition, min(component.endPosition, from + (to - from) * fraction))
if fraction >= 1.0 {
self.positionAnimation = (timestamp, component.startPosition, component.endPosition)
self.positionAnimation = (start, from, to, true)
}
} else {
let advance = component.isPlaying ? timestamp - component.generationTimestamp : 0.0
@ -419,8 +419,12 @@ final class VideoScrubberComponent: Component {
self.displayLink?.isPaused = true
transition.setFrame(view: self.cursorView, frame: cursorFrame(size: scrubberSize, position: component.position, duration: component.duration))
} else {
if self.positionAnimation == nil {
self.positionAnimation = (CACurrentMediaTime(), component.position, component.endPosition)
if let (_, _, end, ended) = self.positionAnimation {
if ended, component.position >= component.startPosition && component.position < end - 1.0 {
self.positionAnimation = (CACurrentMediaTime(), component.position, component.endPosition, false)
}
} else {
self.positionAnimation = (CACurrentMediaTime(), component.position, component.endPosition, false)
}
self.displayLink?.isPaused = false
self.updateCursorPosition()

View File

@ -113,7 +113,7 @@ public enum NotificationExceptionMode : Equatable {
case .default:
break
default:
values[peerId] = NotificationExceptionWrapper(settings: TelegramPeerNotificationSettings(muteState: .default, messageSound: sound, displayPreviews: .default), peer: peer, date: Date().timeIntervalSince1970)
values[peerId] = NotificationExceptionWrapper(settings: TelegramPeerNotificationSettings(muteState: .default, messageSound: sound, displayPreviews: .default, storiesMuted: nil), peer: peer, date: Date().timeIntervalSince1970)
}
}
return values
@ -149,7 +149,7 @@ public enum NotificationExceptionMode : Equatable {
case .default:
break
default:
values[peerId] = NotificationExceptionWrapper(settings: TelegramPeerNotificationSettings(muteState: muteState, messageSound: .default, displayPreviews: .default), peer: peer, date: Date().timeIntervalSince1970)
values[peerId] = NotificationExceptionWrapper(settings: TelegramPeerNotificationSettings(muteState: muteState, messageSound: .default, displayPreviews: .default, storiesMuted: nil), peer: peer, date: Date().timeIntervalSince1970)
}
}
return values
@ -201,7 +201,7 @@ public enum NotificationExceptionMode : Equatable {
case .default:
break
default:
values[peerId] = NotificationExceptionWrapper(settings: TelegramPeerNotificationSettings(muteState: .unmuted, messageSound: .default, displayPreviews: displayPreviews), peer: peer, date: Date().timeIntervalSince1970)
values[peerId] = NotificationExceptionWrapper(settings: TelegramPeerNotificationSettings(muteState: .unmuted, messageSound: .default, displayPreviews: displayPreviews, storiesMuted: nil), peer: peer, date: Date().timeIntervalSince1970)
}
}
return values