Combo update
@ -164,9 +164,14 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String {
|
|||||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
var orientation = CGImagePropertyOrientation.up
|
||||||
|
if #available(iOS 11.0, *) {
|
||||||
|
if let orientationAttachment = CMGetAttachment(sampleBuffer, key: RPVideoSampleOrientationKey as CFString, attachmentModeOut: nil) as? NSNumber {
|
||||||
|
orientation = CGImagePropertyOrientation(rawValue: orientationAttachment.uint32Value) ?? .up
|
||||||
|
}
|
||||||
|
}
|
||||||
if let data = serializePixelBuffer(buffer: pixelBuffer) {
|
if let data = serializePixelBuffer(buffer: pixelBuffer) {
|
||||||
self.screencastBufferClientContext?.setCurrentFrame(data: data)
|
self.screencastBufferClientContext?.setCurrentFrame(data: data, orientation: orientation)
|
||||||
}
|
}
|
||||||
|
|
||||||
//self.videoCapturer?.injectSampleBuffer(sampleBuffer)
|
//self.videoCapturer?.injectSampleBuffer(sampleBuffer)
|
||||||
|
@ -72,4 +72,44 @@
|
|||||||
<key>UIPrerenderedIcon</key>
|
<key>UIPrerenderedIcon</key>
|
||||||
<true/>
|
<true/>
|
||||||
</dict>
|
</dict>
|
||||||
|
<key>New1</key>
|
||||||
|
<dict>
|
||||||
|
<key>CFBundleIconFiles</key>
|
||||||
|
<array>
|
||||||
|
<string>New1_20x20</string>
|
||||||
|
<string>New1_29x29</string>
|
||||||
|
<string>New1_40x40</string>
|
||||||
|
<string>New1_58x58</string>
|
||||||
|
<string>New1_60x60</string>
|
||||||
|
<string>New1_76x76</string>
|
||||||
|
<string>New1_80x80</string>
|
||||||
|
<string>New1_87x87</string>
|
||||||
|
<string>New1_120x120</string>
|
||||||
|
<string>New1_152x152</string>
|
||||||
|
<string>New1_167x167</string>
|
||||||
|
<string>New1_180x180</string>
|
||||||
|
</array>
|
||||||
|
<key>UIPrerenderedIcon</key>
|
||||||
|
<true/>
|
||||||
|
</dict>
|
||||||
|
<key>New2</key>
|
||||||
|
<dict>
|
||||||
|
<key>CFBundleIconFiles</key>
|
||||||
|
<array>
|
||||||
|
<string>New2_20x20</string>
|
||||||
|
<string>New2_29x29</string>
|
||||||
|
<string>New2_40x40</string>
|
||||||
|
<string>New2_58x58</string>
|
||||||
|
<string>New2_60x60</string>
|
||||||
|
<string>New2_76x76</string>
|
||||||
|
<string>New2_80x80</string>
|
||||||
|
<string>New2_87x87</string>
|
||||||
|
<string>New2_120x120</string>
|
||||||
|
<string>New2_152x152</string>
|
||||||
|
<string>New2_167x167</string>
|
||||||
|
<string>New2_180x180</string>
|
||||||
|
</array>
|
||||||
|
<key>UIPrerenderedIcon</key>
|
||||||
|
<true/>
|
||||||
|
</dict>
|
||||||
</dict>
|
</dict>
|
@ -66,4 +66,44 @@
|
|||||||
<key>UIPrerenderedIcon</key>
|
<key>UIPrerenderedIcon</key>
|
||||||
<true/>
|
<true/>
|
||||||
</dict>
|
</dict>
|
||||||
|
<key>New1</key>
|
||||||
|
<dict>
|
||||||
|
<key>CFBundleIconFiles</key>
|
||||||
|
<array>
|
||||||
|
<string>New1_20x20</string>
|
||||||
|
<string>New1_29x29</string>
|
||||||
|
<string>New1_40x40</string>
|
||||||
|
<string>New1_58x58</string>
|
||||||
|
<string>New1_60x60</string>
|
||||||
|
<string>New1_76x76</string>
|
||||||
|
<string>New1_80x80</string>
|
||||||
|
<string>New1_87x87</string>
|
||||||
|
<string>New1_120x120</string>
|
||||||
|
<string>New1_152x152</string>
|
||||||
|
<string>New1_167x167</string>
|
||||||
|
<string>New1_180x180</string>
|
||||||
|
</array>
|
||||||
|
<key>UIPrerenderedIcon</key>
|
||||||
|
<true/>
|
||||||
|
</dict>
|
||||||
|
<key>New2</key>
|
||||||
|
<dict>
|
||||||
|
<key>CFBundleIconFiles</key>
|
||||||
|
<array>
|
||||||
|
<string>New2_20x20</string>
|
||||||
|
<string>New2_29x29</string>
|
||||||
|
<string>New2_40x40</string>
|
||||||
|
<string>New2_58x58</string>
|
||||||
|
<string>New2_60x60</string>
|
||||||
|
<string>New2_76x76</string>
|
||||||
|
<string>New2_80x80</string>
|
||||||
|
<string>New2_87x87</string>
|
||||||
|
<string>New2_120x120</string>
|
||||||
|
<string>New2_152x152</string>
|
||||||
|
<string>New2_167x167</string>
|
||||||
|
<string>New2_180x180</string>
|
||||||
|
</array>
|
||||||
|
<key>UIPrerenderedIcon</key>
|
||||||
|
<true/>
|
||||||
|
</dict>
|
||||||
</dict>
|
</dict>
|
BIN
Telegram/Telegram-iOS/New1_120x120.png
Normal file
After Width: | Height: | Size: 8.5 KiB |
BIN
Telegram/Telegram-iOS/New1_152x152.png
Normal file
After Width: | Height: | Size: 12 KiB |
BIN
Telegram/Telegram-iOS/New1_167x167.png
Normal file
After Width: | Height: | Size: 14 KiB |
BIN
Telegram/Telegram-iOS/New1_180x180.png
Normal file
After Width: | Height: | Size: 16 KiB |
BIN
Telegram/Telegram-iOS/New1_20x20.png
Normal file
After Width: | Height: | Size: 889 B |
BIN
Telegram/Telegram-iOS/New1_29x29.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
Telegram/Telegram-iOS/New1_40x40.png
Normal file
After Width: | Height: | Size: 2.0 KiB |
BIN
Telegram/Telegram-iOS/New1_58x58.png
Normal file
After Width: | Height: | Size: 3.2 KiB |
BIN
Telegram/Telegram-iOS/New1_60x60.png
Normal file
After Width: | Height: | Size: 3.3 KiB |
BIN
Telegram/Telegram-iOS/New1_76x76.png
Normal file
After Width: | Height: | Size: 4.6 KiB |
BIN
Telegram/Telegram-iOS/New1_80x80.png
Normal file
After Width: | Height: | Size: 4.9 KiB |
BIN
Telegram/Telegram-iOS/New1_87x87.png
Normal file
After Width: | Height: | Size: 5.5 KiB |
BIN
Telegram/Telegram-iOS/New2_120x120.png
Normal file
After Width: | Height: | Size: 8.9 KiB |
BIN
Telegram/Telegram-iOS/New2_152x152.png
Normal file
After Width: | Height: | Size: 13 KiB |
BIN
Telegram/Telegram-iOS/New2_167x167.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
Telegram/Telegram-iOS/New2_180x180.png
Normal file
After Width: | Height: | Size: 17 KiB |
BIN
Telegram/Telegram-iOS/New2_20x20.png
Normal file
After Width: | Height: | Size: 917 B |
BIN
Telegram/Telegram-iOS/New2_29x29.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
Telegram/Telegram-iOS/New2_40x40.png
Normal file
After Width: | Height: | Size: 2.0 KiB |
BIN
Telegram/Telegram-iOS/New2_58x58.png
Normal file
After Width: | Height: | Size: 3.3 KiB |
BIN
Telegram/Telegram-iOS/New2_60x60.png
Normal file
After Width: | Height: | Size: 3.5 KiB |
BIN
Telegram/Telegram-iOS/New2_76x76.png
Normal file
After Width: | Height: | Size: 4.7 KiB |
BIN
Telegram/Telegram-iOS/New2_80x80.png
Normal file
After Width: | Height: | Size: 5.1 KiB |
BIN
Telegram/Telegram-iOS/New2_87x87.png
Normal file
After Width: | Height: | Size: 5.8 KiB |
@ -4428,6 +4428,8 @@ Sorry for the inconvenience.";
|
|||||||
"Appearance.AppIconClassicX" = "Classic X";
|
"Appearance.AppIconClassicX" = "Classic X";
|
||||||
"Appearance.AppIconFilled" = "Filled";
|
"Appearance.AppIconFilled" = "Filled";
|
||||||
"Appearance.AppIconFilledX" = "Filled X";
|
"Appearance.AppIconFilledX" = "Filled X";
|
||||||
|
"Appearance.AppIconNew1" = "New 1";
|
||||||
|
"Appearance.AppIconNew2" = "New 2";
|
||||||
|
|
||||||
"Appearance.ThemeCarouselClassic" = "Classic";
|
"Appearance.ThemeCarouselClassic" = "Classic";
|
||||||
"Appearance.ThemeCarouselDay" = "Day";
|
"Appearance.ThemeCarouselDay" = "Day";
|
||||||
@ -6479,3 +6481,15 @@ Sorry for the inconvenience.";
|
|||||||
"VoiceChat.UnmuteSuggestion" = "You are on mute. Tap here to speak.";
|
"VoiceChat.UnmuteSuggestion" = "You are on mute. Tap here to speak.";
|
||||||
|
|
||||||
"VoiceChat.ContextAudio" = "Audio";
|
"VoiceChat.ContextAudio" = "Audio";
|
||||||
|
|
||||||
|
"VoiceChat.VideoPaused" = "Video is paused";
|
||||||
|
"VoiceChat.YouAreSharingScreen" = "You are sharing your screen";
|
||||||
|
"VoiceChat.StopScreenSharingShort" = "Stop Sharing";
|
||||||
|
|
||||||
|
"VoiceChat.OpenGroup" = "Open Group";
|
||||||
|
|
||||||
|
"VoiceChat.NoiseSuppression" = "Noise Suppression";
|
||||||
|
"VoiceChat.NoiseSuppressionEnabled" = "Enabled";
|
||||||
|
"VoiceChat.NoiseSuppressionDisabled" = "Disabled";
|
||||||
|
|
||||||
|
"VoiceChat.Unpin" = "Unpin";
|
||||||
|
@ -8,4 +8,5 @@ exports_files([
|
|||||||
"WatchApp.mobileprovision",
|
"WatchApp.mobileprovision",
|
||||||
"WatchExtension.mobileprovision",
|
"WatchExtension.mobileprovision",
|
||||||
"Widget.mobileprovision",
|
"Widget.mobileprovision",
|
||||||
|
"BroadcastUpload.mobileprovision",
|
||||||
])
|
])
|
||||||
|
@ -321,57 +321,51 @@ public struct PresentationGroupCallRequestedVideo {
|
|||||||
case full
|
case full
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public struct SsrcGroup {
|
||||||
|
public var semantics: String
|
||||||
|
public var ssrcs: [UInt32]
|
||||||
|
}
|
||||||
|
|
||||||
public var audioSsrc: UInt32
|
public var audioSsrc: UInt32
|
||||||
public var endpointId: String
|
public var endpointId: String
|
||||||
public var videoInformation: String
|
public var ssrcGroups: [SsrcGroup]
|
||||||
public var quality: Quality
|
public var minQuality: Quality
|
||||||
|
public var maxQuality: Quality
|
||||||
}
|
}
|
||||||
|
|
||||||
public extension GroupCallParticipantsContext.Participant {
|
public extension GroupCallParticipantsContext.Participant {
|
||||||
var videoEndpointId: String? {
|
var videoEndpointId: String? {
|
||||||
if let jsonParams = self.videoJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
return self.videoDescription?.endpointId
|
||||||
if let endpoint = json["endpoint"] as? String {
|
|
||||||
return endpoint
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var presentationEndpointId: String? {
|
var presentationEndpointId: String? {
|
||||||
if let jsonParams = self.presentationJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
return self.presentationDescription?.endpointId
|
||||||
if let endpoint = json["endpoint"] as? String {
|
|
||||||
return endpoint
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public extension GroupCallParticipantsContext.Participant {
|
public extension GroupCallParticipantsContext.Participant {
|
||||||
func requestedVideoChannel(quality: PresentationGroupCallRequestedVideo.Quality) -> PresentationGroupCallRequestedVideo? {
|
func requestedVideoChannel(minQuality: PresentationGroupCallRequestedVideo.Quality, maxQuality: PresentationGroupCallRequestedVideo.Quality) -> PresentationGroupCallRequestedVideo? {
|
||||||
guard let audioSsrc = self.ssrc else {
|
guard let audioSsrc = self.ssrc else {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
guard let videoInformation = self.videoJsonDescription else {
|
guard let videoDescription = self.videoDescription else {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
guard let videoEndpointId = self.videoEndpointId else {
|
return PresentationGroupCallRequestedVideo(audioSsrc: audioSsrc, endpointId: videoDescription.endpointId, ssrcGroups: videoDescription.ssrcGroups.map { group in
|
||||||
return nil
|
PresentationGroupCallRequestedVideo.SsrcGroup(semantics: group.semantics, ssrcs: group.ssrcs)
|
||||||
}
|
}, minQuality: minQuality, maxQuality: maxQuality)
|
||||||
return PresentationGroupCallRequestedVideo(audioSsrc: audioSsrc, endpointId: videoEndpointId, videoInformation: videoInformation, quality: quality)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func requestedPresentationVideoChannel(quality: PresentationGroupCallRequestedVideo.Quality) -> PresentationGroupCallRequestedVideo? {
|
func requestedPresentationVideoChannel(minQuality: PresentationGroupCallRequestedVideo.Quality, maxQuality: PresentationGroupCallRequestedVideo.Quality) -> PresentationGroupCallRequestedVideo? {
|
||||||
guard let audioSsrc = self.ssrc else {
|
guard let audioSsrc = self.ssrc else {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
guard let videoInformation = self.presentationJsonDescription else {
|
guard let presentationDescription = self.presentationDescription else {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
guard let presentationEndpointId = self.presentationEndpointId else {
|
return PresentationGroupCallRequestedVideo(audioSsrc: audioSsrc, endpointId: presentationDescription.endpointId, ssrcGroups: presentationDescription.ssrcGroups.map { group in
|
||||||
return nil
|
PresentationGroupCallRequestedVideo.SsrcGroup(semantics: group.semantics, ssrcs: group.ssrcs)
|
||||||
}
|
}, minQuality: minQuality, maxQuality: maxQuality)
|
||||||
return PresentationGroupCallRequestedVideo(audioSsrc: audioSsrc, endpointId: presentationEndpointId, videoInformation: videoInformation, quality: quality)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -438,7 +432,7 @@ public protocol PresentationGroupCall: class {
|
|||||||
var inviteLinks: Signal<GroupCallInviteLinks?, NoError> { get }
|
var inviteLinks: Signal<GroupCallInviteLinks?, NoError> { get }
|
||||||
|
|
||||||
func makeIncomingVideoView(endpointId: String, requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void)
|
func makeIncomingVideoView(endpointId: String, requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void)
|
||||||
func makeOutgoingVideoView(completion: @escaping (PresentationCallVideoView?) -> Void)
|
func makeOutgoingVideoView(requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void)
|
||||||
|
|
||||||
func loadMoreMembers(token: String)
|
func loadMoreMembers(token: String)
|
||||||
}
|
}
|
||||||
|
@ -192,6 +192,7 @@ final class BlobView: UIView {
|
|||||||
|
|
||||||
var level: CGFloat = 0 {
|
var level: CGFloat = 0 {
|
||||||
didSet {
|
didSet {
|
||||||
|
if abs(self.level - oldValue) > 0.01 {
|
||||||
CATransaction.begin()
|
CATransaction.begin()
|
||||||
CATransaction.setDisableActions(true)
|
CATransaction.setDisableActions(true)
|
||||||
let lv = self.minScale + (self.maxScale - self.minScale) * self.level
|
let lv = self.minScale + (self.maxScale - self.minScale) * self.level
|
||||||
@ -199,6 +200,7 @@ final class BlobView: UIView {
|
|||||||
CATransaction.commit()
|
CATransaction.commit()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private var speedLevel: CGFloat = 0
|
private var speedLevel: CGFloat = 0
|
||||||
private var scaleLevel: CGFloat = 0
|
private var scaleLevel: CGFloat = 0
|
||||||
|
@ -121,7 +121,7 @@ public func peerAvatarCompleteImage(account: Account, peer: Peer, size: CGSize,
|
|||||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||||
drawPeerAvatarLetters(context: context, size: CGSize(width: size.width, height: size.height), round: round, font: font, letters: displayLetters, peerId: peerId)
|
drawPeerAvatarLetters(context: context, size: CGSize(width: size.width, height: size.height), round: round, font: font, letters: displayLetters, peerId: peerId)
|
||||||
if blurred {
|
if blurred {
|
||||||
context.setFillColor(UIColor(rgb: 0x000000, alpha: 0.45).cgColor)
|
context.setFillColor(UIColor(rgb: 0x000000, alpha: 0.5).cgColor)
|
||||||
context.fill(CGRect(origin: CGPoint(), size: size))
|
context.fill(CGRect(origin: CGPoint(), size: size))
|
||||||
}
|
}
|
||||||
})?.withRenderingMode(.alwaysOriginal)
|
})?.withRenderingMode(.alwaysOriginal)
|
||||||
|
@ -100,7 +100,7 @@ class ChatPlayingActivityContentNode: ChatTitleActivityContentNode {
|
|||||||
self.addSubnode(self.indicatorNode)
|
self.addSubnode(self.indicatorNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
override func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
override func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||||
let size = self.textNode.updateLayout(constrainedSize)
|
let size = self.textNode.updateLayout(constrainedSize)
|
||||||
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
||||||
let originX: CGFloat
|
let originX: CGFloat
|
||||||
|
@ -72,7 +72,7 @@ class ChatRecordingVideoActivityContentNode: ChatTitleActivityContentNode {
|
|||||||
self.addSubnode(self.indicatorNode)
|
self.addSubnode(self.indicatorNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
override func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
override func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||||
let size = self.textNode.updateLayout(constrainedSize)
|
let size = self.textNode.updateLayout(constrainedSize)
|
||||||
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
||||||
let originX: CGFloat
|
let originX: CGFloat
|
||||||
|
@ -90,7 +90,7 @@ class ChatRecordingVoiceActivityContentNode: ChatTitleActivityContentNode {
|
|||||||
self.addSubnode(self.indicatorNode)
|
self.addSubnode(self.indicatorNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
override func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
override func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||||
let size = self.textNode.updateLayout(constrainedSize)
|
let size = self.textNode.updateLayout(constrainedSize)
|
||||||
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
||||||
let originX: CGFloat
|
let originX: CGFloat
|
||||||
@ -99,7 +99,7 @@ class ChatRecordingVoiceActivityContentNode: ChatTitleActivityContentNode {
|
|||||||
} else {
|
} else {
|
||||||
originX = indicatorSize.width
|
originX = indicatorSize.width
|
||||||
}
|
}
|
||||||
self.textNode.frame = CGRect(origin: CGPoint(x: originX, y: 0.0), size: size)
|
self.textNode.frame = CGRect(origin: CGPoint(x: originX, y: offset), size: size)
|
||||||
self.indicatorNode.frame = CGRect(origin: CGPoint(x: self.textNode.frame.minX - indicatorSize.width, y: 0.0), size: indicatorSize)
|
self.indicatorNode.frame = CGRect(origin: CGPoint(x: self.textNode.frame.minX - indicatorSize.width, y: 0.0), size: indicatorSize)
|
||||||
return CGSize(width: size.width + indicatorSize.width, height: size.height)
|
return CGSize(width: size.width + indicatorSize.width, height: size.height)
|
||||||
}
|
}
|
||||||
|
@ -122,13 +122,13 @@ public class ChatTitleActivityContentNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
public func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||||
let size = self.textNode.updateLayout(constrainedSize)
|
let size = self.textNode.updateLayout(constrainedSize)
|
||||||
self.textNode.bounds = CGRect(origin: CGPoint(), size: size)
|
self.textNode.bounds = CGRect(origin: CGPoint(), size: size)
|
||||||
if case .center = alignment {
|
if case .center = alignment {
|
||||||
self.textNode.position = CGPoint(x: 0.0, y: size.height / 2.0)
|
self.textNode.position = CGPoint(x: 0.0, y: size.height / 2.0 + offset)
|
||||||
} else {
|
} else {
|
||||||
self.textNode.position = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
|
self.textNode.position = CGPoint(x: size.width / 2.0, y: size.height / 2.0 + offset)
|
||||||
}
|
}
|
||||||
return size
|
return size
|
||||||
}
|
}
|
||||||
|
@ -123,7 +123,7 @@ public class ChatTitleActivityNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
public func updateLayout(_ constrainedSize: CGSize, offset: CGFloat = 0.0, alignment: NSTextAlignment) -> CGSize {
|
||||||
return CGSize(width: 0.0, height: self.contentNode?.updateLayout(constrainedSize, alignment: alignment).height ?? 0.0)
|
return CGSize(width: 0.0, height: self.contentNode?.updateLayout(constrainedSize, offset: offset, alignment: alignment).height ?? 0.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -108,7 +108,7 @@ class ChatTypingActivityContentNode: ChatTitleActivityContentNode {
|
|||||||
self.addSubnode(self.indicatorNode)
|
self.addSubnode(self.indicatorNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
override func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
override func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||||
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
||||||
let size = self.textNode.updateLayout(CGSize(width: constrainedSize.width - indicatorSize.width, height: constrainedSize.height))
|
let size = self.textNode.updateLayout(CGSize(width: constrainedSize.width - indicatorSize.width, height: constrainedSize.height))
|
||||||
var originX: CGFloat
|
var originX: CGFloat
|
||||||
|
@ -80,7 +80,7 @@ class ChatUploadingActivityContentNode: ChatTitleActivityContentNode {
|
|||||||
self.addSubnode(self.indicatorNode)
|
self.addSubnode(self.indicatorNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
override func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
override func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||||
let size = self.textNode.updateLayout(constrainedSize)
|
let size = self.textNode.updateLayout(constrainedSize)
|
||||||
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
||||||
let originX: CGFloat
|
let originX: CGFloat
|
||||||
|
@ -497,7 +497,8 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
|
|||||||
|
|
||||||
if let takenViewInfo = takenViewInfo, let parentSupernode = takenViewInfo.contentContainingNode.supernode {
|
if let takenViewInfo = takenViewInfo, let parentSupernode = takenViewInfo.contentContainingNode.supernode {
|
||||||
self.contentContainerNode.contentNode = .extracted(node: takenViewInfo.contentContainingNode, keepInPlace: source.keepInPlace)
|
self.contentContainerNode.contentNode = .extracted(node: takenViewInfo.contentContainingNode, keepInPlace: source.keepInPlace)
|
||||||
if source.keepInPlace {
|
if source.keepInPlace || takenViewInfo.maskView != nil {
|
||||||
|
self.clippingNode.view.mask = takenViewInfo.maskView
|
||||||
self.clippingNode.addSubnode(self.contentContainerNode)
|
self.clippingNode.addSubnode(self.contentContainerNode)
|
||||||
} else {
|
} else {
|
||||||
self.scrollNode.addSubnode(self.contentContainerNode)
|
self.scrollNode.addSubnode(self.contentContainerNode)
|
||||||
@ -687,7 +688,9 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
|
|||||||
|
|
||||||
self.actionsContainerNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: localSourceFrame.center.x - self.actionsContainerNode.position.x, y: localSourceFrame.center.y - self.actionsContainerNode.position.y + actionsOffset)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: actionsDuration, initialVelocity: 0.0, damping: springDamping, additive: true)
|
self.actionsContainerNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: localSourceFrame.center.x - self.actionsContainerNode.position.x, y: localSourceFrame.center.y - self.actionsContainerNode.position.y + actionsOffset)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: actionsDuration, initialVelocity: 0.0, damping: springDamping, additive: true)
|
||||||
let contentContainerOffset = CGPoint(x: localContentSourceFrame.center.x - self.contentContainerNode.frame.center.x - contentParentNode.contentRect.minX, y: localContentSourceFrame.center.y - self.contentContainerNode.frame.center.y - contentParentNode.contentRect.minY)
|
let contentContainerOffset = CGPoint(x: localContentSourceFrame.center.x - self.contentContainerNode.frame.center.x - contentParentNode.contentRect.minX, y: localContentSourceFrame.center.y - self.contentContainerNode.frame.center.y - contentParentNode.contentRect.minY)
|
||||||
self.contentContainerNode.layer.animateSpring(from: NSValue(cgPoint: contentContainerOffset), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: contentDuration, initialVelocity: 0.0, damping: springDamping, additive: true)
|
self.contentContainerNode.layer.animateSpring(from: NSValue(cgPoint: contentContainerOffset), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: contentDuration, initialVelocity: 0.0, damping: springDamping, additive: true, completion: { [weak self] _ in
|
||||||
|
self?.clippingNode.view.mask = nil
|
||||||
|
})
|
||||||
contentParentNode.applyAbsoluteOffsetSpring?(-contentContainerOffset.y, springDuration, springDamping)
|
contentParentNode.applyAbsoluteOffsetSpring?(-contentContainerOffset.y, springDuration, springDamping)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -849,6 +852,7 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
|
|||||||
updatedContentAreaInScreenSpace.origin.x = 0.0
|
updatedContentAreaInScreenSpace.origin.x = 0.0
|
||||||
updatedContentAreaInScreenSpace.size.width = self.bounds.width
|
updatedContentAreaInScreenSpace.size.width = self.bounds.width
|
||||||
|
|
||||||
|
self.clippingNode.view.mask = putBackInfo.maskView
|
||||||
self.clippingNode.layer.animateFrame(from: self.clippingNode.frame, to: updatedContentAreaInScreenSpace, duration: transitionDuration * animationDurationFactor, timingFunction: transitionCurve.timingFunction, removeOnCompletion: false)
|
self.clippingNode.layer.animateFrame(from: self.clippingNode.frame, to: updatedContentAreaInScreenSpace, duration: transitionDuration * animationDurationFactor, timingFunction: transitionCurve.timingFunction, removeOnCompletion: false)
|
||||||
self.clippingNode.layer.animateBoundsOriginYAdditive(from: 0.0, to: updatedContentAreaInScreenSpace.minY, duration: transitionDuration * animationDurationFactor, timingFunction: transitionCurve.timingFunction, removeOnCompletion: false)
|
self.clippingNode.layer.animateBoundsOriginYAdditive(from: 0.0, to: updatedContentAreaInScreenSpace.minY, duration: transitionDuration * animationDurationFactor, timingFunction: transitionCurve.timingFunction, removeOnCompletion: false)
|
||||||
}
|
}
|
||||||
@ -1726,18 +1730,22 @@ public protocol ContextReferenceContentSource: class {
|
|||||||
public final class ContextControllerTakeViewInfo {
|
public final class ContextControllerTakeViewInfo {
|
||||||
public let contentContainingNode: ContextExtractedContentContainingNode
|
public let contentContainingNode: ContextExtractedContentContainingNode
|
||||||
public let contentAreaInScreenSpace: CGRect
|
public let contentAreaInScreenSpace: CGRect
|
||||||
|
public let maskView: UIView?
|
||||||
|
|
||||||
public init(contentContainingNode: ContextExtractedContentContainingNode, contentAreaInScreenSpace: CGRect) {
|
public init(contentContainingNode: ContextExtractedContentContainingNode, contentAreaInScreenSpace: CGRect, maskView: UIView? = nil) {
|
||||||
self.contentContainingNode = contentContainingNode
|
self.contentContainingNode = contentContainingNode
|
||||||
self.contentAreaInScreenSpace = contentAreaInScreenSpace
|
self.contentAreaInScreenSpace = contentAreaInScreenSpace
|
||||||
|
self.maskView = maskView
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public final class ContextControllerPutBackViewInfo {
|
public final class ContextControllerPutBackViewInfo {
|
||||||
public let contentAreaInScreenSpace: CGRect
|
public let contentAreaInScreenSpace: CGRect
|
||||||
|
public let maskView: UIView?
|
||||||
|
|
||||||
public init(contentAreaInScreenSpace: CGRect) {
|
public init(contentAreaInScreenSpace: CGRect, maskView: UIView? = nil) {
|
||||||
self.contentAreaInScreenSpace = contentAreaInScreenSpace
|
self.contentAreaInScreenSpace = contentAreaInScreenSpace
|
||||||
|
self.maskView = maskView
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -250,11 +250,12 @@ public final class DeviceAccess {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static func authorizeAccess(to subject: DeviceAccessSubject, registerForNotifications: ((@escaping (Bool) -> Void) -> Void)? = nil, requestSiriAuthorization: ((@escaping (Bool) -> Void) -> Void)? = nil, locationManager: LocationManager? = nil, presentationData: PresentationData? = nil, present: @escaping (ViewController, Any?) -> Void = { _, _ in }, openSettings: @escaping () -> Void = { }, displayNotificationFromBackground: @escaping (String) -> Void = { _ in }, _ completion: @escaping (Bool) -> Void = { _ in }) {
|
public static func authorizeAccess(to subject: DeviceAccessSubject, onlyCheck: Bool = false, registerForNotifications: ((@escaping (Bool) -> Void) -> Void)? = nil, requestSiriAuthorization: ((@escaping (Bool) -> Void) -> Void)? = nil, locationManager: LocationManager? = nil, presentationData: PresentationData? = nil, present: @escaping (ViewController, Any?) -> Void = { _, _ in }, openSettings: @escaping () -> Void = { }, displayNotificationFromBackground: @escaping (String) -> Void = { _ in }, _ completion: @escaping (Bool) -> Void = { _ in }) {
|
||||||
switch subject {
|
switch subject {
|
||||||
case let .camera(cameraSubject):
|
case let .camera(cameraSubject):
|
||||||
let status = PGCamera.cameraAuthorizationStatus()
|
let status = PGCamera.cameraAuthorizationStatus()
|
||||||
if status == PGCameraAuthorizationStatusNotDetermined {
|
if status == PGCameraAuthorizationStatusNotDetermined {
|
||||||
|
if !onlyCheck {
|
||||||
AVCaptureDevice.requestAccess(for: AVMediaType.video) { response in
|
AVCaptureDevice.requestAccess(for: AVMediaType.video) { response in
|
||||||
Queue.mainQueue().async {
|
Queue.mainQueue().async {
|
||||||
completion(response)
|
completion(response)
|
||||||
@ -272,6 +273,9 @@ public final class DeviceAccess {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
completion(true)
|
||||||
|
}
|
||||||
} else if status == PGCameraAuthorizationStatusRestricted || status == PGCameraAuthorizationStatusDenied, let presentationData = presentationData {
|
} else if status == PGCameraAuthorizationStatusRestricted || status == PGCameraAuthorizationStatusDenied, let presentationData = presentationData {
|
||||||
let text: String
|
let text: String
|
||||||
if status == PGCameraAuthorizationStatusRestricted {
|
if status == PGCameraAuthorizationStatusRestricted {
|
||||||
|
@ -333,8 +333,8 @@ public extension ContainedViewLayoutTransition {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func updatePosition(layer: CALayer, position: CGPoint, completion: ((Bool) -> Void)? = nil) {
|
func updatePosition(layer: CALayer, position: CGPoint, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
|
||||||
if layer.position.equalTo(position) {
|
if layer.position.equalTo(position) && !force {
|
||||||
completion?(true)
|
completion?(true)
|
||||||
} else {
|
} else {
|
||||||
switch self {
|
switch self {
|
||||||
@ -546,6 +546,15 @@ public extension ContainedViewLayoutTransition {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func animateContentsRectPositionAdditive(layer: CALayer, offset: CGPoint, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
|
||||||
|
switch self {
|
||||||
|
case .immediate:
|
||||||
|
completion?(true)
|
||||||
|
case let .animated(duration, curve):
|
||||||
|
layer.animate(from: NSValue(cgPoint: offset), to: NSValue(cgPoint: CGPoint()), keyPath: "contentsRect.origin", timingFunction: curve.timingFunction, duration: duration, delay: 0.0, mediaTimingFunction: curve.mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: true, completion: completion)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func updateFrame(view: UIView, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, delay: Double = 0.0, completion: ((Bool) -> Void)? = nil) {
|
func updateFrame(view: UIView, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, delay: Double = 0.0, completion: ((Bool) -> Void)? = nil) {
|
||||||
if frame.origin.x.isNaN {
|
if frame.origin.x.isNaN {
|
||||||
return
|
return
|
||||||
@ -1284,9 +1293,6 @@ public struct CombinedTransition {
|
|||||||
completeKey(.positionY, result)
|
completeKey(.positionY, result)
|
||||||
})
|
})
|
||||||
|
|
||||||
//self.horizontal.animateHorizontalOffsetAdditive(layer: layer, offset: (fromFrame.width - toFrame.width) / 4.0)
|
|
||||||
//self.vertical.animateOffsetAdditive(layer: layer, offset: (fromFrame.height - toFrame.height) / 2.0)
|
|
||||||
|
|
||||||
self.horizontal.animateWidthAdditive(layer: layer, value: fromFrame.width - toFrame.width, completion: { result in
|
self.horizontal.animateWidthAdditive(layer: layer, value: fromFrame.width - toFrame.width, completion: { result in
|
||||||
completeKey(.sizeWidth, result)
|
completeKey(.sizeWidth, result)
|
||||||
})
|
})
|
||||||
@ -1301,6 +1307,12 @@ public struct CombinedTransition {
|
|||||||
self.animateFrame(layer: layer, from: fromFrame, completion: completion)
|
self.animateFrame(layer: layer, from: fromFrame, completion: completion)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public func updateFrame(node: ASDisplayNode, frame: CGRect, completion: ((Bool) -> Void)? = nil) {
|
||||||
|
let fromFrame = node.frame
|
||||||
|
node.frame = frame
|
||||||
|
self.animateFrame(layer: node.layer, from: fromFrame, completion: completion)
|
||||||
|
}
|
||||||
|
|
||||||
public func updatePosition(layer: CALayer, position: CGPoint, completion: ((Bool) -> Void)? = nil) {
|
public func updatePosition(layer: CALayer, position: CGPoint, completion: ((Bool) -> Void)? = nil) {
|
||||||
let fromPosition = layer.position
|
let fromPosition = layer.position
|
||||||
layer.position = position
|
layer.position = position
|
||||||
|
@ -58,7 +58,13 @@ public struct Font {
|
|||||||
|
|
||||||
public static func with(size: CGFloat, design: Design = .regular, weight: Weight = .regular, traits: Traits = []) -> UIFont {
|
public static func with(size: CGFloat, design: Design = .regular, weight: Weight = .regular, traits: Traits = []) -> UIFont {
|
||||||
if #available(iOS 13.0, *) {
|
if #available(iOS 13.0, *) {
|
||||||
let descriptor = UIFont.systemFont(ofSize: size).fontDescriptor
|
let descriptor: UIFontDescriptor
|
||||||
|
if #available(iOS 14.0, *) {
|
||||||
|
descriptor = UIFont.systemFont(ofSize: size).fontDescriptor
|
||||||
|
} else {
|
||||||
|
descriptor = UIFont.systemFont(ofSize: size, weight: weight.weight).fontDescriptor
|
||||||
|
}
|
||||||
|
|
||||||
var symbolicTraits = descriptor.symbolicTraits
|
var symbolicTraits = descriptor.symbolicTraits
|
||||||
if traits.contains(.italic) {
|
if traits.contains(.italic) {
|
||||||
symbolicTraits.insert(.traitItalic)
|
symbolicTraits.insert(.traitItalic)
|
||||||
@ -83,11 +89,13 @@ public struct Font {
|
|||||||
default:
|
default:
|
||||||
updatedDescriptor = updatedDescriptor?.withDesign(.default)
|
updatedDescriptor = updatedDescriptor?.withDesign(.default)
|
||||||
}
|
}
|
||||||
|
if #available(iOS 14.0, *) {
|
||||||
if weight != .regular {
|
if weight != .regular {
|
||||||
updatedDescriptor = updatedDescriptor?.addingAttributes([
|
updatedDescriptor = updatedDescriptor?.addingAttributes([
|
||||||
UIFontDescriptor.AttributeName.traits: [UIFontDescriptor.TraitKey.weight: weight.weight]
|
UIFontDescriptor.AttributeName.traits: [UIFontDescriptor.TraitKey.weight: weight.weight]
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if let updatedDescriptor = updatedDescriptor {
|
if let updatedDescriptor = updatedDescriptor {
|
||||||
return UIFont(descriptor: updatedDescriptor, size: size)
|
return UIFont(descriptor: updatedDescriptor, size: size)
|
||||||
|
@ -157,7 +157,7 @@ private func generateRectsImage(color: UIColor, rects: [CGRect], inset: CGFloat,
|
|||||||
|
|
||||||
public final class LinkHighlightingNode: ASDisplayNode {
|
public final class LinkHighlightingNode: ASDisplayNode {
|
||||||
private var rects: [CGRect] = []
|
private var rects: [CGRect] = []
|
||||||
private let imageNode: ASImageNode
|
public let imageNode: ASImageNode
|
||||||
|
|
||||||
public var innerRadius: CGFloat = 4.0
|
public var innerRadius: CGFloat = 4.0
|
||||||
public var outerRadius: CGFloat = 4.0
|
public var outerRadius: CGFloat = 4.0
|
||||||
@ -196,7 +196,7 @@ public final class LinkHighlightingNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func updateImage() {
|
private func updateImage() {
|
||||||
if rects.isEmpty {
|
if self.rects.isEmpty {
|
||||||
self.imageNode.image = nil
|
self.imageNode.image = nil
|
||||||
}
|
}
|
||||||
let (offset, image) = generateRectsImage(color: self.color, rects: self.rects, inset: self.inset, outerRadius: self.outerRadius, innerRadius: self.innerRadius)
|
let (offset, image) = generateRectsImage(color: self.color, rects: self.rects, inset: self.inset, outerRadius: self.outerRadius, innerRadius: self.innerRadius)
|
||||||
@ -207,6 +207,19 @@ public final class LinkHighlightingNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static func generateImage(color: UIColor, inset: CGFloat, innerRadius: CGFloat, outerRadius: CGFloat, rects: [CGRect]) -> (CGPoint, UIImage)? {
|
||||||
|
if rects.isEmpty {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
let (offset, image) = generateRectsImage(color: color, rects: rects, inset: inset, outerRadius: outerRadius, innerRadius: innerRadius)
|
||||||
|
|
||||||
|
if let image = image {
|
||||||
|
return (offset, image)
|
||||||
|
} else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public func asyncLayout() -> (UIColor, [CGRect], CGFloat, CGFloat, CGFloat) -> () -> Void {
|
public func asyncLayout() -> (UIColor, [CGRect], CGFloat, CGFloat, CGFloat) -> () -> Void {
|
||||||
let currentRects = self.rects
|
let currentRects = self.rects
|
||||||
let currentColor = self._color
|
let currentColor = self._color
|
||||||
|
@ -117,6 +117,8 @@ enum NavigationPreviousAction: Equatable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private var sharedIsReduceTransparencyEnabled = UIAccessibility.isReduceTransparencyEnabled
|
||||||
|
|
||||||
public final class NavigationBackgroundNode: ASDisplayNode {
|
public final class NavigationBackgroundNode: ASDisplayNode {
|
||||||
private var _color: UIColor
|
private var _color: UIColor
|
||||||
public var color: UIColor {
|
public var color: UIColor {
|
||||||
@ -148,14 +150,9 @@ public final class NavigationBackgroundNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func updateBackgroundBlur(forceKeepBlur: Bool) {
|
private func updateBackgroundBlur(forceKeepBlur: Bool) {
|
||||||
if self.enableBlur && ((self.color.alpha > 0.1 && self.color.alpha < 0.95) || forceKeepBlur) {
|
if self.enableBlur && !sharedIsReduceTransparencyEnabled && ((self.color.alpha > .ulpOfOne && self.color.alpha < 0.95) || forceKeepBlur) {
|
||||||
if self.effectView == nil {
|
if self.effectView == nil {
|
||||||
let effectView: UIVisualEffectView
|
let effectView = UIVisualEffectView(effect: UIBlurEffect(style: .light))
|
||||||
if self.color.lightness > 0.6 {
|
|
||||||
effectView = UIVisualEffectView(effect: UIBlurEffect(style: .light))
|
|
||||||
} else {
|
|
||||||
effectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
|
|
||||||
}
|
|
||||||
|
|
||||||
for subview in effectView.subviews {
|
for subview in effectView.subviews {
|
||||||
if subview.description.contains("VisualEffectSubview") {
|
if subview.description.contains("VisualEffectSubview") {
|
||||||
@ -164,6 +161,8 @@ public final class NavigationBackgroundNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let sublayer = effectView.layer.sublayers?[0], let filters = sublayer.filters {
|
if let sublayer = effectView.layer.sublayers?[0], let filters = sublayer.filters {
|
||||||
|
sublayer.backgroundColor = nil
|
||||||
|
sublayer.isOpaque = false
|
||||||
let allowedKeys: [String] = [
|
let allowedKeys: [String] = [
|
||||||
"colorSaturate",
|
"colorSaturate",
|
||||||
"gaussianBlur"
|
"gaussianBlur"
|
||||||
@ -176,11 +175,6 @@ public final class NavigationBackgroundNode: ASDisplayNode {
|
|||||||
if !allowedKeys.contains(filterName) {
|
if !allowedKeys.contains(filterName) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
/*if filterName == "colorSaturate" {
|
|
||||||
filter.setValue(2.8 as NSNumber, forKey: "inputAmount")
|
|
||||||
} else if filterName == "gaussianBlur" {
|
|
||||||
filter.setValue(5.0 as NSNumber, forKey: "inputRadius")
|
|
||||||
}*/
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -205,7 +199,11 @@ public final class NavigationBackgroundNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
self._color = color
|
self._color = color
|
||||||
|
|
||||||
|
if sharedIsReduceTransparencyEnabled {
|
||||||
|
transition.updateBackgroundColor(node: self.backgroundNode, color: self.color.withAlphaComponent(1.0))
|
||||||
|
} else {
|
||||||
transition.updateBackgroundColor(node: self.backgroundNode, color: self.color)
|
transition.updateBackgroundColor(node: self.backgroundNode, color: self.color)
|
||||||
|
}
|
||||||
|
|
||||||
self.updateBackgroundBlur(forceKeepBlur: forceKeepBlur)
|
self.updateBackgroundBlur(forceKeepBlur: forceKeepBlur)
|
||||||
}
|
}
|
||||||
|
@ -133,7 +133,7 @@ public extension UIColor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var hsb: (CGFloat, CGFloat, CGFloat) {
|
var hsb: (h: CGFloat, s: CGFloat, b: CGFloat) {
|
||||||
var hue: CGFloat = 0.0
|
var hue: CGFloat = 0.0
|
||||||
var saturation: CGFloat = 0.0
|
var saturation: CGFloat = 0.0
|
||||||
var brightness: CGFloat = 0.0
|
var brightness: CGFloat = 0.0
|
||||||
@ -284,6 +284,27 @@ public extension UIColor {
|
|||||||
let b = e1.b - e2.b
|
let b = e1.b - e2.b
|
||||||
return ((512 + rMean) * r * r) >> 8 + 4 * g * g + ((767 - rMean) * b * b) >> 8
|
return ((512 + rMean) * r * r) >> 8 + 4 * g * g + ((767 - rMean) * b * b) >> 8
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static func average(of colors: [UIColor]) -> UIColor {
|
||||||
|
var sr: CGFloat = 0.0
|
||||||
|
var sg: CGFloat = 0.0
|
||||||
|
var sb: CGFloat = 0.0
|
||||||
|
var sa: CGFloat = 0.0
|
||||||
|
|
||||||
|
for color in colors {
|
||||||
|
var r: CGFloat = 0.0
|
||||||
|
var g: CGFloat = 0.0
|
||||||
|
var b: CGFloat = 0.0
|
||||||
|
var a: CGFloat = 0.0
|
||||||
|
color.getRed(&r, green: &g, blue: &b, alpha: &a)
|
||||||
|
sr += r
|
||||||
|
sg += g
|
||||||
|
sb += b
|
||||||
|
sa += a
|
||||||
|
}
|
||||||
|
|
||||||
|
return UIColor(red: sr / CGFloat(colors.count), green: sg / CGFloat(colors.count), blue: sb / CGFloat(colors.count), alpha: sa / CGFloat(colors.count))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public extension CGSize {
|
public extension CGSize {
|
||||||
|
@ -386,6 +386,8 @@ public class GalleryController: ViewController, StandalonePresentableController
|
|||||||
|
|
||||||
public var centralItemUpdated: ((MessageId) -> Void)?
|
public var centralItemUpdated: ((MessageId) -> Void)?
|
||||||
|
|
||||||
|
private var initialOrientation: UIInterfaceOrientation?
|
||||||
|
|
||||||
public init(context: AccountContext, source: GalleryControllerItemSource, invertItemOrder: Bool = false, streamSingleVideo: Bool = false, fromPlayingVideo: Bool = false, landscape: Bool = false, timecode: Double? = nil, synchronousLoad: Bool = false, replaceRootController: @escaping (ViewController, Promise<Bool>?) -> Void, baseNavigationController: NavigationController?, actionInteraction: GalleryControllerActionInteraction? = nil) {
|
public init(context: AccountContext, source: GalleryControllerItemSource, invertItemOrder: Bool = false, streamSingleVideo: Bool = false, fromPlayingVideo: Bool = false, landscape: Bool = false, timecode: Double? = nil, synchronousLoad: Bool = false, replaceRootController: @escaping (ViewController, Promise<Bool>?) -> Void, baseNavigationController: NavigationController?, actionInteraction: GalleryControllerActionInteraction? = nil) {
|
||||||
self.context = context
|
self.context = context
|
||||||
self.source = source
|
self.source = source
|
||||||
@ -897,6 +899,10 @@ public class GalleryController: ViewController, StandalonePresentableController
|
|||||||
}
|
}
|
||||||
|
|
||||||
deinit {
|
deinit {
|
||||||
|
if let initialOrientation = self.initialOrientation {
|
||||||
|
self.context.sharedContext.applicationBindings.forceOrientation(initialOrientation)
|
||||||
|
}
|
||||||
|
|
||||||
self.accountInUseDisposable.dispose()
|
self.accountInUseDisposable.dispose()
|
||||||
self.disposable.dispose()
|
self.disposable.dispose()
|
||||||
self.centralItemAttributesDisposable.dispose()
|
self.centralItemAttributesDisposable.dispose()
|
||||||
@ -1019,6 +1025,17 @@ public class GalleryController: ViewController, StandalonePresentableController
|
|||||||
self?.galleryNode.pager.centralItemNode()?.controlsVisibilityUpdated(isVisible: visible)
|
self?.galleryNode.pager.centralItemNode()?.controlsVisibilityUpdated(isVisible: visible)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.galleryNode.updateOrientation = { [weak self] orientation in
|
||||||
|
if let strongSelf = self {
|
||||||
|
if strongSelf.initialOrientation == nil {
|
||||||
|
strongSelf.initialOrientation = orientation == .portrait ? .landscapeRight : .portrait
|
||||||
|
} else if strongSelf.initialOrientation == orientation {
|
||||||
|
strongSelf.initialOrientation = nil
|
||||||
|
}
|
||||||
|
strongSelf.context.sharedContext.applicationBindings.forceOrientation(orientation)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let baseNavigationController = self.baseNavigationController
|
let baseNavigationController = self.baseNavigationController
|
||||||
self.galleryNode.baseNavigationController = { [weak baseNavigationController] in
|
self.galleryNode.baseNavigationController = { [weak baseNavigationController] in
|
||||||
return baseNavigationController
|
return baseNavigationController
|
||||||
|
@ -30,6 +30,8 @@ open class GalleryControllerNode: ASDisplayNode, UIScrollViewDelegate, UIGesture
|
|||||||
public var areControlsHidden = false
|
public var areControlsHidden = false
|
||||||
public var controlsVisibilityChanged: ((Bool) -> Void)?
|
public var controlsVisibilityChanged: ((Bool) -> Void)?
|
||||||
|
|
||||||
|
public var updateOrientation: ((UIInterfaceOrientation) -> Void)?
|
||||||
|
|
||||||
public var isBackgroundExtendedOverNavigationBar = true {
|
public var isBackgroundExtendedOverNavigationBar = true {
|
||||||
didSet {
|
didSet {
|
||||||
if let (navigationBarHeight, layout) = self.containerLayout {
|
if let (navigationBarHeight, layout) = self.containerLayout {
|
||||||
@ -69,6 +71,12 @@ open class GalleryControllerNode: ASDisplayNode, UIScrollViewDelegate, UIGesture
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.pager.updateOrientation = { [weak self] orientation in
|
||||||
|
if let strongSelf = self {
|
||||||
|
strongSelf.updateOrientation?(orientation)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
self.pager.dismiss = { [weak self] in
|
self.pager.dismiss = { [weak self] in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
var interfaceAnimationCompleted = false
|
var interfaceAnimationCompleted = false
|
||||||
|
@ -22,6 +22,7 @@ open class GalleryItemNode: ASDisplayNode {
|
|||||||
|
|
||||||
public var toggleControlsVisibility: () -> Void = { }
|
public var toggleControlsVisibility: () -> Void = { }
|
||||||
public var updateControlsVisibility: (Bool) -> Void = { _ in }
|
public var updateControlsVisibility: (Bool) -> Void = { _ in }
|
||||||
|
public var updateOrientation: (UIInterfaceOrientation) -> Void = { _ in }
|
||||||
public var dismiss: () -> Void = { }
|
public var dismiss: () -> Void = { }
|
||||||
public var beginCustomDismiss: () -> Void = { }
|
public var beginCustomDismiss: () -> Void = { }
|
||||||
public var completeCustomDismiss: () -> Void = { }
|
public var completeCustomDismiss: () -> Void = { }
|
||||||
|
@ -107,6 +107,7 @@ public final class GalleryPagerNode: ASDisplayNode, UIScrollViewDelegate, UIGest
|
|||||||
public var centralItemIndexOffsetUpdated: (([GalleryItem]?, Int, CGFloat)?) -> Void = { _ in }
|
public var centralItemIndexOffsetUpdated: (([GalleryItem]?, Int, CGFloat)?) -> Void = { _ in }
|
||||||
public var toggleControlsVisibility: () -> Void = { }
|
public var toggleControlsVisibility: () -> Void = { }
|
||||||
public var updateControlsVisibility: (Bool) -> Void = { _ in }
|
public var updateControlsVisibility: (Bool) -> Void = { _ in }
|
||||||
|
public var updateOrientation: (UIInterfaceOrientation) -> Void = { _ in }
|
||||||
public var dismiss: () -> Void = { }
|
public var dismiss: () -> Void = { }
|
||||||
public var beginCustomDismiss: () -> Void = { }
|
public var beginCustomDismiss: () -> Void = { }
|
||||||
public var completeCustomDismiss: () -> Void = { }
|
public var completeCustomDismiss: () -> Void = { }
|
||||||
@ -474,6 +475,7 @@ public final class GalleryPagerNode: ASDisplayNode, UIScrollViewDelegate, UIGest
|
|||||||
let node = self.items[index].node(synchronous: synchronous)
|
let node = self.items[index].node(synchronous: synchronous)
|
||||||
node.toggleControlsVisibility = self.toggleControlsVisibility
|
node.toggleControlsVisibility = self.toggleControlsVisibility
|
||||||
node.updateControlsVisibility = self.updateControlsVisibility
|
node.updateControlsVisibility = self.updateControlsVisibility
|
||||||
|
node.updateOrientation = self.updateOrientation
|
||||||
node.dismiss = self.dismiss
|
node.dismiss = self.dismiss
|
||||||
node.beginCustomDismiss = self.beginCustomDismiss
|
node.beginCustomDismiss = self.beginCustomDismiss
|
||||||
node.completeCustomDismiss = self.completeCustomDismiss
|
node.completeCustomDismiss = self.completeCustomDismiss
|
||||||
|
@ -188,7 +188,7 @@ private final class UniversalVideoGalleryItemOverlayNode: GalleryOverlayContentN
|
|||||||
self.addSubnode(self.wrapperNode)
|
self.addSubnode(self.wrapperNode)
|
||||||
self.wrapperNode.addSubnode(self.fullscreenNode)
|
self.wrapperNode.addSubnode(self.fullscreenNode)
|
||||||
|
|
||||||
self.fullscreenNode.addTarget(self, action: #selector(self.soundButtonPressed), forControlEvents: .touchUpInside)
|
self.fullscreenNode.addTarget(self, action: #selector(self.toggleFullscreenPressed), forControlEvents: .touchUpInside)
|
||||||
}
|
}
|
||||||
|
|
||||||
override func updateLayout(size: CGSize, metrics: LayoutMetrics, leftInset: CGFloat, rightInset: CGFloat, bottomInset: CGFloat, transition: ContainedViewLayoutTransition) {
|
override func updateLayout(size: CGSize, metrics: LayoutMetrics, leftInset: CGFloat, rightInset: CGFloat, bottomInset: CGFloat, transition: ContainedViewLayoutTransition) {
|
||||||
@ -227,7 +227,7 @@ private final class UniversalVideoGalleryItemOverlayNode: GalleryOverlayContentN
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc func soundButtonPressed() {
|
@objc func toggleFullscreenPressed() {
|
||||||
var toLandscape = false
|
var toLandscape = false
|
||||||
if let (size, _, _, _ ,_) = self.validLayout, size.width < size.height {
|
if let (size, _, _, _ ,_) = self.validLayout, size.width < size.height {
|
||||||
toLandscape = true
|
toLandscape = true
|
||||||
@ -337,7 +337,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
|||||||
|
|
||||||
self.overlayContentNode.action = { [weak self] toLandscape in
|
self.overlayContentNode.action = { [weak self] toLandscape in
|
||||||
self?.updateControlsVisibility(!toLandscape)
|
self?.updateControlsVisibility(!toLandscape)
|
||||||
context.sharedContext.applicationBindings.forceOrientation(toLandscape ? .landscapeRight : .portrait)
|
self?.updateOrientation(toLandscape ? .landscapeRight : .portrait)
|
||||||
}
|
}
|
||||||
|
|
||||||
self.scrubberView.seek = { [weak self] timecode in
|
self.scrubberView.seek = { [weak self] timecode in
|
||||||
|
@ -3,6 +3,7 @@ import UIKit
|
|||||||
import Display
|
import Display
|
||||||
import AsyncDisplayKit
|
import AsyncDisplayKit
|
||||||
import SwiftSignalKit
|
import SwiftSignalKit
|
||||||
|
import Accelerate
|
||||||
|
|
||||||
private func shiftArray(array: [CGPoint], offset: Int) -> [CGPoint] {
|
private func shiftArray(array: [CGPoint], offset: Int) -> [CGPoint] {
|
||||||
var newArray = array
|
var newArray = array
|
||||||
@ -31,7 +32,7 @@ private func interpolatePoints(_ point1: CGPoint, _ point2: CGPoint, at factor:
|
|||||||
return CGPoint(x: interpolateFloat(point1.x, point2.x, at: factor), y: interpolateFloat(point1.y, point2.y, at: factor))
|
return CGPoint(x: interpolateFloat(point1.x, point2.x, at: factor), y: interpolateFloat(point1.y, point2.y, at: factor))
|
||||||
}
|
}
|
||||||
|
|
||||||
private func generateGradient(size: CGSize, colors: [UIColor], positions: [CGPoint]) -> UIImage {
|
private func generateGradient(size: CGSize, colors: [UIColor], positions: [CGPoint], adjustSaturation: CGFloat = 1.0) -> UIImage {
|
||||||
let width = Int(size.width)
|
let width = Int(size.width)
|
||||||
let height = Int(size.height)
|
let height = Int(size.height)
|
||||||
|
|
||||||
@ -114,6 +115,43 @@ private func generateGradient(size: CGSize, colors: [UIColor], positions: [CGPoi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if abs(adjustSaturation - 1.0) > .ulpOfOne {
|
||||||
|
var buffer = vImage_Buffer()
|
||||||
|
buffer.data = context.bytes
|
||||||
|
buffer.width = UInt(width)
|
||||||
|
buffer.height = UInt(height)
|
||||||
|
buffer.rowBytes = context.bytesPerRow
|
||||||
|
|
||||||
|
let divisor: Int32 = 0x1000
|
||||||
|
|
||||||
|
let rwgt: CGFloat = 0.3086
|
||||||
|
let gwgt: CGFloat = 0.6094
|
||||||
|
let bwgt: CGFloat = 0.0820
|
||||||
|
|
||||||
|
let a = (1.0 - adjustSaturation) * rwgt + adjustSaturation
|
||||||
|
let b = (1.0 - adjustSaturation) * rwgt
|
||||||
|
let c = (1.0 - adjustSaturation) * rwgt
|
||||||
|
let d = (1.0 - adjustSaturation) * gwgt
|
||||||
|
let e = (1.0 - adjustSaturation) * gwgt + adjustSaturation
|
||||||
|
let f = (1.0 - adjustSaturation) * gwgt
|
||||||
|
let g = (1.0 - adjustSaturation) * bwgt
|
||||||
|
let h = (1.0 - adjustSaturation) * bwgt
|
||||||
|
let i = (1.0 - adjustSaturation) * bwgt + adjustSaturation
|
||||||
|
|
||||||
|
let satMatrix: [CGFloat] = [
|
||||||
|
a, b, c, 0,
|
||||||
|
d, e, f, 0,
|
||||||
|
g, h, i, 0,
|
||||||
|
0, 0, 0, 1
|
||||||
|
]
|
||||||
|
|
||||||
|
var matrix: [Int16] = satMatrix.map { value in
|
||||||
|
return Int16(value * CGFloat(divisor))
|
||||||
|
}
|
||||||
|
|
||||||
|
vImageMatrixMultiply_ARGB8888(&buffer, &buffer, &matrix, divisor, nil, nil, vImage_Flags(kvImageDoNotTile))
|
||||||
|
}
|
||||||
|
|
||||||
return context.generateImage()!
|
return context.generateImage()!
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,7 +166,7 @@ public final class GradientBackgroundNode: ASDisplayNode {
|
|||||||
super.init()
|
super.init()
|
||||||
|
|
||||||
self.index = parentNode.cloneNodes.add(Weak<CloneNode>(self))
|
self.index = parentNode.cloneNodes.add(Weak<CloneNode>(self))
|
||||||
self.image = parentNode.contentView.image
|
self.image = parentNode.dimmedImage
|
||||||
}
|
}
|
||||||
|
|
||||||
deinit {
|
deinit {
|
||||||
@ -160,6 +198,19 @@ public final class GradientBackgroundNode: ASDisplayNode {
|
|||||||
private var validPhase: Int?
|
private var validPhase: Int?
|
||||||
private var invalidated: Bool = false
|
private var invalidated: Bool = false
|
||||||
|
|
||||||
|
private var dimmedImageParams: (size: CGSize, colors: [UIColor], positions: [CGPoint])?
|
||||||
|
private var _dimmedImage: UIImage?
|
||||||
|
private var dimmedImage: UIImage? {
|
||||||
|
if let current = self._dimmedImage {
|
||||||
|
return current
|
||||||
|
} else if let (size, colors, positions) = self.dimmedImageParams {
|
||||||
|
self._dimmedImage = generateGradient(size: size, colors: colors, positions: positions, adjustSaturation: 1.7)
|
||||||
|
return self._dimmedImage
|
||||||
|
} else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private var validLayout: CGSize?
|
private var validLayout: CGSize?
|
||||||
|
|
||||||
private var colors: [UIColor] = [
|
private var colors: [UIColor] = [
|
||||||
@ -201,7 +252,7 @@ public final class GradientBackgroundNode: ASDisplayNode {
|
|||||||
deinit {
|
deinit {
|
||||||
}
|
}
|
||||||
|
|
||||||
public func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) {
|
public func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition, extendAnimation: Bool = false) {
|
||||||
let sizeUpdated = self.validLayout != size
|
let sizeUpdated = self.validLayout != size
|
||||||
self.validLayout = size
|
self.validLayout = size
|
||||||
|
|
||||||
@ -214,54 +265,127 @@ public final class GradientBackgroundNode: ASDisplayNode {
|
|||||||
self.validPhase = self.phase
|
self.validPhase = self.phase
|
||||||
self.invalidated = false
|
self.invalidated = false
|
||||||
|
|
||||||
let previousPositions = gatherPositions(shiftArray(array: GradientBackgroundNode.basePositions, offset: validPhase % 8))
|
var steps: [[CGPoint]] = []
|
||||||
|
if extendAnimation {
|
||||||
|
let phaseCount = 4
|
||||||
|
var stepPhase = (self.phase + phaseCount) % 8
|
||||||
|
for _ in 0 ... phaseCount {
|
||||||
|
steps.append(gatherPositions(shiftArray(array: GradientBackgroundNode.basePositions, offset: stepPhase)))
|
||||||
|
stepPhase = stepPhase - 1
|
||||||
|
if stepPhase < 0 {
|
||||||
|
stepPhase = 7
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
steps.append(gatherPositions(shiftArray(array: GradientBackgroundNode.basePositions, offset: validPhase % 8)))
|
||||||
|
steps.append(positions)
|
||||||
|
}
|
||||||
|
|
||||||
if case let .animated(duration, curve) = transition, duration > 0.001 {
|
if case let .animated(duration, curve) = transition, duration > 0.001 {
|
||||||
var images: [UIImage] = []
|
var images: [UIImage] = []
|
||||||
|
|
||||||
let maxFrame = Int(duration * 30)
|
var dimmedImages: [UIImage] = []
|
||||||
for i in 0 ..< maxFrame {
|
let needDimmedImages = !self.cloneNodes.isEmpty
|
||||||
let t = curve.solve(at: CGFloat(i) / CGFloat(maxFrame - 1))
|
|
||||||
|
|
||||||
let morphedPositions = Array(zip(previousPositions, positions).map { previous, current -> CGPoint in
|
let stepCount = steps.count - 1
|
||||||
return interpolatePoints(previous, current, at: t)
|
|
||||||
})
|
|
||||||
|
|
||||||
images.append(generateGradient(size: imageSize, colors: self.colors, positions: morphedPositions))
|
let fps: Double = extendAnimation ? 60 : 30
|
||||||
|
let maxFrame = Int(duration * fps)
|
||||||
|
let framesPerAnyStep = maxFrame / stepCount
|
||||||
|
|
||||||
|
for frameIndex in 0 ..< maxFrame {
|
||||||
|
let t = curve.solve(at: CGFloat(frameIndex) / CGFloat(maxFrame - 1))
|
||||||
|
let globalStep = Int(t * CGFloat(maxFrame))
|
||||||
|
let stepIndex = min(stepCount - 1, globalStep / framesPerAnyStep)
|
||||||
|
|
||||||
|
let stepFrameIndex = globalStep - stepIndex * framesPerAnyStep
|
||||||
|
let stepFrames: Int
|
||||||
|
if stepIndex == stepCount - 1 {
|
||||||
|
stepFrames = maxFrame - framesPerAnyStep * (stepCount - 1)
|
||||||
|
} else {
|
||||||
|
stepFrames = framesPerAnyStep
|
||||||
|
}
|
||||||
|
let stepT = CGFloat(stepFrameIndex) / CGFloat(stepFrames - 1)
|
||||||
|
|
||||||
|
var morphedPositions: [CGPoint] = []
|
||||||
|
for i in 0 ..< steps[0].count {
|
||||||
|
morphedPositions.append(interpolatePoints(steps[stepIndex][i], steps[stepIndex + 1][i], at: stepT))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
images.append(generateGradient(size: imageSize, colors: self.colors, positions: morphedPositions))
|
||||||
|
if needDimmedImages {
|
||||||
|
dimmedImages.append(generateGradient(size: imageSize, colors: self.colors, positions: morphedPositions, adjustSaturation: 1.7))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.dimmedImageParams = (imageSize, self.colors, gatherPositions(shiftArray(array: GradientBackgroundNode.basePositions, offset: self.phase % 8)))
|
||||||
|
|
||||||
self.contentView.image = images.last
|
self.contentView.image = images.last
|
||||||
|
|
||||||
let animation = CAKeyframeAnimation(keyPath: "contents")
|
let animation = CAKeyframeAnimation(keyPath: "contents")
|
||||||
animation.values = images.map { $0.cgImage! }
|
animation.values = images.map { $0.cgImage! }
|
||||||
animation.duration = duration * UIView.animationDurationFactor()
|
animation.duration = duration * UIView.animationDurationFactor()
|
||||||
|
if extendAnimation {
|
||||||
|
animation.calculationMode = .discrete
|
||||||
|
} else {
|
||||||
animation.calculationMode = .linear
|
animation.calculationMode = .linear
|
||||||
|
}
|
||||||
animation.isRemovedOnCompletion = true
|
animation.isRemovedOnCompletion = true
|
||||||
|
if extendAnimation {
|
||||||
|
animation.fillMode = .backwards
|
||||||
|
animation.beginTime = self.contentView.layer.convertTime(CACurrentMediaTime(), from: nil) + 0.25
|
||||||
|
}
|
||||||
|
|
||||||
self.contentView.layer.removeAnimation(forKey: "contents")
|
self.contentView.layer.removeAnimation(forKey: "contents")
|
||||||
self.contentView.layer.add(animation, forKey: "contents")
|
self.contentView.layer.add(animation, forKey: "contents")
|
||||||
|
|
||||||
|
if !self.cloneNodes.isEmpty {
|
||||||
|
let animation = CAKeyframeAnimation(keyPath: "contents")
|
||||||
|
animation.values = dimmedImages.map { $0.cgImage! }
|
||||||
|
animation.duration = duration * UIView.animationDurationFactor()
|
||||||
|
if extendAnimation {
|
||||||
|
animation.calculationMode = .discrete
|
||||||
|
} else {
|
||||||
|
animation.calculationMode = .linear
|
||||||
|
}
|
||||||
|
animation.isRemovedOnCompletion = true
|
||||||
|
if extendAnimation {
|
||||||
|
animation.fillMode = .backwards
|
||||||
|
animation.beginTime = self.contentView.layer.convertTime(CACurrentMediaTime(), from: nil) + 0.25
|
||||||
|
}
|
||||||
|
|
||||||
|
self._dimmedImage = dimmedImages.last
|
||||||
|
|
||||||
for cloneNode in self.cloneNodes {
|
for cloneNode in self.cloneNodes {
|
||||||
if let value = cloneNode.value {
|
if let value = cloneNode.value {
|
||||||
value.image = images.last
|
value.image = dimmedImages.last
|
||||||
value.layer.removeAnimation(forKey: "contents")
|
value.layer.removeAnimation(forKey: "contents")
|
||||||
value.layer.add(animation.copy() as! CAAnimation, forKey: "contents")
|
value.layer.add(animation, forKey: "contents")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
let image = generateGradient(size: imageSize, colors: colors, positions: positions)
|
let image = generateGradient(size: imageSize, colors: self.colors, positions: positions)
|
||||||
self.contentView.image = image
|
self.contentView.image = image
|
||||||
|
|
||||||
|
let dimmedImage = generateGradient(size: imageSize, colors: self.colors, positions: positions, adjustSaturation: 1.7)
|
||||||
|
self._dimmedImage = dimmedImage
|
||||||
|
self.dimmedImageParams = (imageSize, self.colors, positions)
|
||||||
|
|
||||||
for cloneNode in self.cloneNodes {
|
for cloneNode in self.cloneNodes {
|
||||||
cloneNode.value?.image = image
|
cloneNode.value?.image = dimmedImage
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if sizeUpdated {
|
} else if sizeUpdated {
|
||||||
let image = generateGradient(size: imageSize, colors: colors, positions: positions)
|
let image = generateGradient(size: imageSize, colors: self.colors, positions: positions)
|
||||||
self.contentView.image = image
|
self.contentView.image = image
|
||||||
|
|
||||||
|
let dimmedImage = generateGradient(size: imageSize, colors: self.colors, positions: positions, adjustSaturation: 1.7)
|
||||||
|
self.dimmedImageParams = (imageSize, self.colors, positions)
|
||||||
|
|
||||||
for cloneNode in self.cloneNodes {
|
for cloneNode in self.cloneNodes {
|
||||||
cloneNode.value?.image = image
|
cloneNode.value?.image = dimmedImage
|
||||||
}
|
}
|
||||||
|
|
||||||
self.validPhase = self.phase
|
self.validPhase = self.phase
|
||||||
@ -278,21 +402,25 @@ public final class GradientBackgroundNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func animateEvent(transition: ContainedViewLayoutTransition) {
|
public func animateEvent(transition: ContainedViewLayoutTransition, extendAnimation: Bool = false) {
|
||||||
guard case let .animated(duration, _) = transition, duration > 0.001 else {
|
guard case let .animated(duration, _) = transition, duration > 0.001 else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if extendAnimation {
|
||||||
|
self.invalidated = true
|
||||||
|
} else {
|
||||||
if self.phase == 0 {
|
if self.phase == 0 {
|
||||||
self.phase = 7
|
self.phase = 7
|
||||||
} else {
|
} else {
|
||||||
self.phase = self.phase - 1
|
self.phase = self.phase - 1
|
||||||
}
|
}
|
||||||
|
}
|
||||||
if self.useSharedAnimationPhase {
|
if self.useSharedAnimationPhase {
|
||||||
GradientBackgroundNode.sharedPhase = self.phase
|
GradientBackgroundNode.sharedPhase = self.phase
|
||||||
}
|
}
|
||||||
if let size = self.validLayout {
|
if let size = self.validLayout {
|
||||||
self.updateLayout(size: size, transition: transition)
|
self.updateLayout(size: size, transition: transition, extendAnimation: extendAnimation)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -32,6 +32,7 @@ swift_library(
|
|||||||
"//submodules/ContextUI:ContextUI",
|
"//submodules/ContextUI:ContextUI",
|
||||||
"//submodules/FileMediaResourceStatus:FileMediaResourceStatus",
|
"//submodules/FileMediaResourceStatus:FileMediaResourceStatus",
|
||||||
"//submodules/ManagedAnimationNode:ManagedAnimationNode",
|
"//submodules/ManagedAnimationNode:ManagedAnimationNode",
|
||||||
|
"//submodules/WallpaperResources:WallpaperResources",
|
||||||
],
|
],
|
||||||
visibility = [
|
visibility = [
|
||||||
"//visibility:public",
|
"//visibility:public",
|
||||||
|
@ -16,6 +16,7 @@ import UrlHandling
|
|||||||
import UrlWhitelist
|
import UrlWhitelist
|
||||||
import AccountContext
|
import AccountContext
|
||||||
import TelegramStringFormatting
|
import TelegramStringFormatting
|
||||||
|
import WallpaperResources
|
||||||
|
|
||||||
private let iconFont = Font.with(size: 30.0, design: .round, weight: .bold)
|
private let iconFont = Font.with(size: 30.0, design: .round, weight: .bold)
|
||||||
|
|
||||||
@ -254,6 +255,9 @@ public final class ListMessageSnippetItemNode: ListMessageNode {
|
|||||||
|
|
||||||
var isInstantView = false
|
var isInstantView = false
|
||||||
|
|
||||||
|
var previewWallpaper: TelegramWallpaper?
|
||||||
|
var previewWallpaperFileReference: FileMediaReference?
|
||||||
|
|
||||||
var selectedMedia: TelegramMediaWebpage?
|
var selectedMedia: TelegramMediaWebpage?
|
||||||
var processed = false
|
var processed = false
|
||||||
for media in item.message.media {
|
for media in item.message.media {
|
||||||
@ -283,6 +287,17 @@ public final class ListMessageSnippetItemNode: ListMessageNode {
|
|||||||
iconImageReferenceAndRepresentation = (.message(message: MessageReference(item.message), media: image), representation)
|
iconImageReferenceAndRepresentation = (.message(message: MessageReference(item.message), media: image), representation)
|
||||||
}
|
}
|
||||||
} else if let file = content.file {
|
} else if let file = content.file {
|
||||||
|
if content.type == "telegram_background" {
|
||||||
|
if let wallpaper = parseWallpaperUrl(content.url) {
|
||||||
|
switch wallpaper {
|
||||||
|
case let .slug(slug, _, colors, intensity, angle):
|
||||||
|
previewWallpaperFileReference = .message(message: MessageReference(item.message), media: file)
|
||||||
|
previewWallpaper = .file(id: file.fileId.id, accessHash: 0, isCreator: false, isDefault: false, isPattern: true, isDark: false, slug: slug, file: file, settings: WallpaperSettings(blur: false, motion: false, colors: colors, intensity: intensity, rotation: angle))
|
||||||
|
default:
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
if let representation = smallestImageRepresentation(file.previewRepresentations) {
|
if let representation = smallestImageRepresentation(file.previewRepresentations) {
|
||||||
iconImageReferenceAndRepresentation = (.message(message: MessageReference(item.message), media: file), representation)
|
iconImageReferenceAndRepresentation = (.message(message: MessageReference(item.message), media: file), representation)
|
||||||
}
|
}
|
||||||
@ -508,7 +523,9 @@ public final class ListMessageSnippetItemNode: ListMessageNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if currentIconImageRepresentation != iconImageReferenceAndRepresentation?.1 {
|
if currentIconImageRepresentation != iconImageReferenceAndRepresentation?.1 {
|
||||||
if let iconImageReferenceAndRepresentation = iconImageReferenceAndRepresentation {
|
if let previewWallpaper = previewWallpaper, let fileReference = previewWallpaperFileReference {
|
||||||
|
updateIconImageSignal = wallpaperThumbnail(account: item.context.account, accountManager: item.context.sharedContext.accountManager, fileReference: fileReference, wallpaper: previewWallpaper, synchronousLoad: false)
|
||||||
|
} else if let iconImageReferenceAndRepresentation = iconImageReferenceAndRepresentation {
|
||||||
if let imageReference = iconImageReferenceAndRepresentation.0.concrete(TelegramMediaImage.self) {
|
if let imageReference = iconImageReferenceAndRepresentation.0.concrete(TelegramMediaImage.self) {
|
||||||
updateIconImageSignal = chatWebpageSnippetPhoto(account: item.context.account, photoReference: imageReference)
|
updateIconImageSignal = chatWebpageSnippetPhoto(account: item.context.account, photoReference: imageReference)
|
||||||
} else if let fileReference = iconImageReferenceAndRepresentation.0.concrete(TelegramMediaFile.self) {
|
} else if let fileReference = iconImageReferenceAndRepresentation.0.concrete(TelegramMediaFile.self) {
|
||||||
|
@ -2112,6 +2112,24 @@ static NSString *dumpHexString(NSData *data, int maxLength) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static bool isDataEqualToDataConstTime(NSData *data1, NSData *data2) {
|
||||||
|
if (data1.length != data2.length) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t const *bytes1 = data1.bytes;
|
||||||
|
uint8_t const *bytes2 = data2.bytes;
|
||||||
|
|
||||||
|
int result = 0;
|
||||||
|
for (int i = 0; i < data1.length; i++) {
|
||||||
|
if (bytes1[i] != bytes2[i]) {
|
||||||
|
result |= i + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result == 0;
|
||||||
|
}
|
||||||
|
|
||||||
- (NSData *)_decryptIncomingTransportData:(NSData *)transportData address:(MTDatacenterAddress *)address authKey:(MTDatacenterAuthKey *)authKey
|
- (NSData *)_decryptIncomingTransportData:(NSData *)transportData address:(MTDatacenterAddress *)address authKey:(MTDatacenterAuthKey *)authKey
|
||||||
{
|
{
|
||||||
MTDatacenterAuthKey *effectiveAuthKey = authKey;
|
MTDatacenterAuthKey *effectiveAuthKey = authKey;
|
||||||
@ -2146,7 +2164,7 @@ static NSString *dumpHexString(NSData *data, int maxLength) {
|
|||||||
NSData *msgKeyLarge = MTSha256(msgKeyLargeData);
|
NSData *msgKeyLarge = MTSha256(msgKeyLargeData);
|
||||||
NSData *messageKey = [msgKeyLarge subdataWithRange:NSMakeRange(8, 16)];
|
NSData *messageKey = [msgKeyLarge subdataWithRange:NSMakeRange(8, 16)];
|
||||||
|
|
||||||
if (![messageKey isEqualToData:embeddedMessageKey]) {
|
if (!isDataEqualToDataConstTime(messageKey, embeddedMessageKey)) {
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1107,6 +1107,8 @@ public final class PeerInfoAvatarListContainerNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public var updateCustomItemsOnlySynchronously = false
|
||||||
|
|
||||||
private func updateItems(size: CGSize, update: Bool = false, transition: ContainedViewLayoutTransition, stripTransition: ContainedViewLayoutTransition, synchronous: Bool = false) {
|
private func updateItems(size: CGSize, update: Bool = false, transition: ContainedViewLayoutTransition, stripTransition: ContainedViewLayoutTransition, synchronous: Bool = false) {
|
||||||
var validIds: [WrappedMediaResourceId] = []
|
var validIds: [WrappedMediaResourceId] = []
|
||||||
var addedItemNodesForAdditiveTransition: [PeerInfoAvatarListItemNode] = []
|
var addedItemNodesForAdditiveTransition: [PeerInfoAvatarListItemNode] = []
|
||||||
@ -1121,6 +1123,10 @@ public final class PeerInfoAvatarListContainerNode: ASDisplayNode {
|
|||||||
if let current = self.itemNodes[self.items[i].id] {
|
if let current = self.itemNodes[self.items[i].id] {
|
||||||
itemNode = current
|
itemNode = current
|
||||||
if update {
|
if update {
|
||||||
|
var synchronous = synchronous && i == self.currentIndex
|
||||||
|
if case .custom = self.items[i], self.updateCustomItemsOnlySynchronously {
|
||||||
|
synchronous = true
|
||||||
|
}
|
||||||
current.setup(item: self.items[i], synchronous: synchronous && i == self.currentIndex, fullSizeOnly: self.firstFullSizeOnly && i == 0)
|
current.setup(item: self.items[i], synchronous: synchronous && i == self.currentIndex, fullSizeOnly: self.firstFullSizeOnly && i == 0)
|
||||||
}
|
}
|
||||||
} else if let peer = self.peer {
|
} else if let peer = self.peer {
|
||||||
|
@ -34,7 +34,7 @@ final class GroupInfoSearchNavigationContentNode: NavigationBarContentNode, Item
|
|||||||
|
|
||||||
self.cancel = cancel
|
self.cancel = cancel
|
||||||
|
|
||||||
self.searchBar = SearchBarNode(theme: SearchBarNodeTheme(theme: theme, hasSeparator: false), strings: strings, fieldStyle: .modern)
|
self.searchBar = SearchBarNode(theme: SearchBarNodeTheme(theme: theme, hasSeparator: false), strings: strings, fieldStyle: .modern, displayBackground: false)
|
||||||
|
|
||||||
super.init()
|
super.init()
|
||||||
|
|
||||||
|
@ -151,7 +151,7 @@ public struct PeerId: Hashable, CustomStringConvertible, Comparable, Codable {
|
|||||||
self.namespace = Namespace(rawValue: UInt32(namespaceBits))
|
self.namespace = Namespace(rawValue: UInt32(namespaceBits))
|
||||||
|
|
||||||
let idHighBits = (data >> (32 + 3)) & 0xffffffff
|
let idHighBits = (data >> (32 + 3)) & 0xffffffff
|
||||||
assert(idHighBits == 0)
|
//assert(idHighBits == 0)
|
||||||
|
|
||||||
self.id = Id(rawValue: Int32(bitPattern: UInt32(clamping: idLowBits)))
|
self.id = Id(rawValue: Int32(bitPattern: UInt32(clamping: idLowBits)))
|
||||||
}
|
}
|
||||||
|
@ -807,14 +807,14 @@ public class SearchBarNode: ASDisplayNode, UITextFieldDelegate {
|
|||||||
private var strings: PresentationStrings?
|
private var strings: PresentationStrings?
|
||||||
private let cancelText: String?
|
private let cancelText: String?
|
||||||
|
|
||||||
public init(theme: SearchBarNodeTheme, strings: PresentationStrings, fieldStyle: SearchBarStyle = .legacy, forceSeparator: Bool = false, cancelText: String? = nil) {
|
public init(theme: SearchBarNodeTheme, strings: PresentationStrings, fieldStyle: SearchBarStyle = .legacy, forceSeparator: Bool = false, displayBackground: Bool = true, cancelText: String? = nil) {
|
||||||
self.fieldStyle = fieldStyle
|
self.fieldStyle = fieldStyle
|
||||||
self.forceSeparator = forceSeparator
|
self.forceSeparator = forceSeparator
|
||||||
self.cancelText = cancelText
|
self.cancelText = cancelText
|
||||||
|
|
||||||
self.backgroundNode = NavigationBackgroundNode(color: theme.background)
|
self.backgroundNode = NavigationBackgroundNode(color: theme.background)
|
||||||
self.backgroundNode.isUserInteractionEnabled = false
|
self.backgroundNode.isUserInteractionEnabled = false
|
||||||
//self.backgroundNode.isHidden = true
|
self.backgroundNode.isHidden = !displayBackground
|
||||||
|
|
||||||
self.separatorNode = ASDisplayNode()
|
self.separatorNode = ASDisplayNode()
|
||||||
self.separatorNode.isLayerBacked = true
|
self.separatorNode.isLayerBacked = true
|
||||||
|
@ -38,7 +38,7 @@ public final class SearchDisplayController {
|
|||||||
private var isSearchingDisposable: Disposable?
|
private var isSearchingDisposable: Disposable?
|
||||||
|
|
||||||
public init(presentationData: PresentationData, mode: SearchDisplayControllerMode = .navigation, placeholder: String? = nil, hasBackground: Bool = false, hasSeparator: Bool = false, contentNode: SearchDisplayControllerContentNode, cancel: @escaping () -> Void) {
|
public init(presentationData: PresentationData, mode: SearchDisplayControllerMode = .navigation, placeholder: String? = nil, hasBackground: Bool = false, hasSeparator: Bool = false, contentNode: SearchDisplayControllerContentNode, cancel: @escaping () -> Void) {
|
||||||
self.searchBar = SearchBarNode(theme: SearchBarNodeTheme(theme: presentationData.theme, hasBackground: hasBackground, hasSeparator: hasSeparator), strings: presentationData.strings, fieldStyle: .modern, forceSeparator: hasSeparator)
|
self.searchBar = SearchBarNode(theme: SearchBarNodeTheme(theme: presentationData.theme, hasBackground: hasBackground, hasSeparator: hasSeparator), strings: presentationData.strings, fieldStyle: .modern, forceSeparator: hasSeparator, displayBackground: hasBackground)
|
||||||
self.backgroundNode = BackgroundNode()
|
self.backgroundNode = BackgroundNode()
|
||||||
self.backgroundNode.backgroundColor = presentationData.theme.chatList.backgroundColor
|
self.backgroundNode.backgroundColor = presentationData.theme.chatList.backgroundColor
|
||||||
self.backgroundNode.allowsGroupOpacity = true
|
self.backgroundNode.allowsGroupOpacity = true
|
||||||
|
@ -169,7 +169,8 @@ final class SettingsThemeWallpaperNode: ASDisplayNode {
|
|||||||
self.arguments = PatternWallpaperArguments(colors: [.clear], rotation: nil, customPatternColor: UIColor(white: 0.0, alpha: 1.0 + patternIntensity))
|
self.arguments = PatternWallpaperArguments(colors: [.clear], rotation: nil, customPatternColor: UIColor(white: 0.0, alpha: 1.0 + patternIntensity))
|
||||||
} else {
|
} else {
|
||||||
self.imageNode.alpha = CGFloat(file.settings.intensity ?? 50) / 100.0
|
self.imageNode.alpha = CGFloat(file.settings.intensity ?? 50) / 100.0
|
||||||
self.arguments = PatternWallpaperArguments(colors: [.clear], rotation: nil, customPatternColor: UIColor(white: 0.0, alpha: 1.0))
|
let isLight = UIColor.average(of: file.settings.colors.map(UIColor.init(rgb:))).hsb.b > 0.3
|
||||||
|
self.arguments = PatternWallpaperArguments(colors: [.clear], rotation: nil, customPatternColor: isLight ? .black : .white)
|
||||||
}
|
}
|
||||||
imageSignal = patternWallpaperImage(account: context.account, accountManager: context.sharedContext.accountManager, representations: convertedRepresentations, mode: .thumbnail, autoFetchFullSize: true)
|
imageSignal = patternWallpaperImage(account: context.account, accountManager: context.sharedContext.accountManager, representations: convertedRepresentations, mode: .thumbnail, autoFetchFullSize: true)
|
||||||
} else {
|
} else {
|
||||||
|
@ -131,6 +131,12 @@ final class ThemeAccentColorController: ViewController {
|
|||||||
self.applyDisposable.dispose()
|
self.applyDisposable.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override func viewDidAppear(_ animated: Bool) {
|
||||||
|
super.viewDidAppear(animated)
|
||||||
|
|
||||||
|
self.controllerNode.animateWallpaperAppeared()
|
||||||
|
}
|
||||||
|
|
||||||
override func loadDisplayNode() {
|
override func loadDisplayNode() {
|
||||||
super.loadDisplayNode()
|
super.loadDisplayNode()
|
||||||
|
|
||||||
@ -213,7 +219,7 @@ final class ThemeAccentColorController: ViewController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let themeReference = generalThemeReference {
|
if let themeReference = generalThemeReference {
|
||||||
updatedTheme = makePresentationTheme(mediaBox: context.sharedContext.accountManager.mediaBox, themeReference: themeReference, accentColor: state.accentColor, backgroundColors: state.backgroundColors, bubbleColors: state.messagesColors, wallpaper: state.initialWallpaper ?? coloredWallpaper, serviceBackgroundColor: serviceBackgroundColor) ?? defaultPresentationTheme
|
updatedTheme = makePresentationTheme(mediaBox: context.sharedContext.accountManager.mediaBox, themeReference: themeReference, accentColor: state.accentColor, backgroundColors: state.backgroundColors, bubbleColors: state.messagesColors, wallpaper: coloredWallpaper ?? state.initialWallpaper, serviceBackgroundColor: serviceBackgroundColor) ?? defaultPresentationTheme
|
||||||
} else {
|
} else {
|
||||||
updatedTheme = customizePresentationTheme(theme, editing: false, accentColor: state.accentColor, backgroundColors: state.backgroundColors, bubbleColors: state.messagesColors, wallpaper: state.initialWallpaper ?? coloredWallpaper)
|
updatedTheme = customizePresentationTheme(theme, editing: false, accentColor: state.accentColor, backgroundColors: state.backgroundColors, bubbleColors: state.messagesColors, wallpaper: state.initialWallpaper ?? coloredWallpaper)
|
||||||
}
|
}
|
||||||
|
@ -156,7 +156,15 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
private let scrollNode: ASScrollNode
|
private let scrollNode: ASScrollNode
|
||||||
private let pageControlBackgroundNode: ASDisplayNode
|
private let pageControlBackgroundNode: ASDisplayNode
|
||||||
private let pageControlNode: PageControlNode
|
private let pageControlNode: PageControlNode
|
||||||
|
|
||||||
private var patternButtonNode: WallpaperOptionButtonNode
|
private var patternButtonNode: WallpaperOptionButtonNode
|
||||||
|
private var colorsButtonNode: WallpaperOptionButtonNode
|
||||||
|
|
||||||
|
private var playButtonNode: HighlightableButtonNode
|
||||||
|
private let playButtonBackgroundNode: NavigationBackgroundNode
|
||||||
|
private let playButtonPlayImage: UIImage?
|
||||||
|
private let playButtonRotateImage: UIImage?
|
||||||
|
|
||||||
private let chatListBackgroundNode: ASDisplayNode
|
private let chatListBackgroundNode: ASDisplayNode
|
||||||
private var chatNodes: [ListViewItemNode]?
|
private var chatNodes: [ListViewItemNode]?
|
||||||
private let maskNode: ASImageNode
|
private let maskNode: ASImageNode
|
||||||
@ -231,6 +239,40 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
self.pageControlNode = PageControlNode(dotSpacing: 7.0, dotColor: .white, inactiveDotColor: UIColor.white.withAlphaComponent(0.4))
|
self.pageControlNode = PageControlNode(dotSpacing: 7.0, dotColor: .white, inactiveDotColor: UIColor.white.withAlphaComponent(0.4))
|
||||||
|
|
||||||
self.patternButtonNode = WallpaperOptionButtonNode(title: self.presentationData.strings.WallpaperPreview_Pattern, value: .check(false))
|
self.patternButtonNode = WallpaperOptionButtonNode(title: self.presentationData.strings.WallpaperPreview_Pattern, value: .check(false))
|
||||||
|
self.colorsButtonNode = WallpaperOptionButtonNode(title: self.presentationData.strings.WallpaperPreview_WallpaperColors, value: .colors(false, []))
|
||||||
|
|
||||||
|
self.playButtonBackgroundNode = NavigationBackgroundNode(color: UIColor(white: 0.0, alpha: 0.3))
|
||||||
|
self.playButtonNode = HighlightableButtonNode()
|
||||||
|
self.playButtonNode.insertSubnode(self.playButtonBackgroundNode, at: 0)
|
||||||
|
|
||||||
|
self.playButtonPlayImage = generateImage(CGSize(width: 48.0, height: 48.0), rotatedContext: { size, context in
|
||||||
|
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||||
|
context.setFillColor(UIColor.white.cgColor)
|
||||||
|
|
||||||
|
let diameter = size.width
|
||||||
|
|
||||||
|
let factor = diameter / 50.0
|
||||||
|
|
||||||
|
let size = CGSize(width: 15.0, height: 18.0)
|
||||||
|
context.translateBy(x: (diameter - size.width) / 2.0 + 1.5, y: (diameter - size.height) / 2.0)
|
||||||
|
if (diameter < 40.0) {
|
||||||
|
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
|
||||||
|
context.scaleBy(x: factor, y: factor)
|
||||||
|
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
|
||||||
|
}
|
||||||
|
let _ = try? drawSvgPath(context, path: "M1.71891969,0.209353049 C0.769586558,-0.350676705 0,0.0908839327 0,1.18800046 L0,16.8564753 C0,17.9569971 0.750549162,18.357187 1.67393713,17.7519379 L14.1073836,9.60224049 C15.0318735,8.99626906 15.0094718,8.04970371 14.062401,7.49100858 L1.71891969,0.209353049 ")
|
||||||
|
context.fillPath()
|
||||||
|
if (diameter < 40.0) {
|
||||||
|
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
|
||||||
|
context.scaleBy(x: 1.0 / 0.8, y: 1.0 / 0.8)
|
||||||
|
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
|
||||||
|
}
|
||||||
|
context.translateBy(x: -(diameter - size.width) / 2.0 - 1.5, y: -(diameter - size.height) / 2.0)
|
||||||
|
})
|
||||||
|
|
||||||
|
self.playButtonRotateImage = generateTintedImage(image: UIImage(bundleImageName: "Settings/ThemeColorRotateIcon"), color: .white)
|
||||||
|
|
||||||
|
self.playButtonNode.setImage(self.playButtonPlayImage, for: [])
|
||||||
|
|
||||||
self.chatListBackgroundNode = ASDisplayNode()
|
self.chatListBackgroundNode = ASDisplayNode()
|
||||||
|
|
||||||
@ -276,6 +318,8 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
self.addSubnode(self.pageControlBackgroundNode)
|
self.addSubnode(self.pageControlBackgroundNode)
|
||||||
self.addSubnode(self.pageControlNode)
|
self.addSubnode(self.pageControlNode)
|
||||||
self.addSubnode(self.patternButtonNode)
|
self.addSubnode(self.patternButtonNode)
|
||||||
|
self.addSubnode(self.colorsButtonNode)
|
||||||
|
self.addSubnode(self.playButtonNode)
|
||||||
self.addSubnode(self.colorPanelNode)
|
self.addSubnode(self.colorPanelNode)
|
||||||
self.addSubnode(self.patternPanelNode)
|
self.addSubnode(self.patternPanelNode)
|
||||||
self.addSubnode(self.toolbarNode)
|
self.addSubnode(self.toolbarNode)
|
||||||
@ -288,6 +332,8 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
self.backgroundWrapperNode.addSubnode(self.backgroundNode)
|
self.backgroundWrapperNode.addSubnode(self.backgroundNode)
|
||||||
|
|
||||||
self.patternButtonNode.addTarget(self, action: #selector(self.togglePattern), forControlEvents: .touchUpInside)
|
self.patternButtonNode.addTarget(self, action: #selector(self.togglePattern), forControlEvents: .touchUpInside)
|
||||||
|
self.colorsButtonNode.addTarget(self, action: #selector(self.toggleColors), forControlEvents: .touchUpInside)
|
||||||
|
self.playButtonNode.addTarget(self, action: #selector(self.playPressed), forControlEvents: .touchUpInside)
|
||||||
|
|
||||||
self.colorPanelNode.colorsChanged = { [weak self] colors, ended in
|
self.colorPanelNode.colorsChanged = { [weak self] colors, ended in
|
||||||
if let strongSelf = self, let section = strongSelf.state.section {
|
if let strongSelf = self, let section = strongSelf.state.section {
|
||||||
@ -389,7 +435,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
|> mapToThrottled { next -> Signal<ThemeColorState, NoError> in
|
|> mapToThrottled { next -> Signal<ThemeColorState, NoError> in
|
||||||
return .single(next) |> then(.complete() |> delay(0.0166667, queue: self.queue))
|
return .single(next) |> then(.complete() |> delay(0.0166667, queue: self.queue))
|
||||||
}
|
}
|
||||||
|> map { state -> (PresentationTheme?, TelegramWallpaper, UIColor, [UInt32], PatternWallpaperArguments, Bool) in
|
|> map { state -> (PresentationTheme?, TelegramWallpaper, UIColor, [UInt32], Int32, PatternWallpaperArguments, Bool) in
|
||||||
let accentColor = state.accentColor
|
let accentColor = state.accentColor
|
||||||
var backgroundColors = state.backgroundColors
|
var backgroundColors = state.backgroundColors
|
||||||
let messagesColors = state.messagesColors
|
let messagesColors = state.messagesColors
|
||||||
@ -455,9 +501,9 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
|
|
||||||
let patternArguments = PatternWallpaperArguments(colors: calcPatternColors(for: state), rotation: wallpaper.settings?.rotation ?? 0, preview: state.preview)
|
let patternArguments = PatternWallpaperArguments(colors: calcPatternColors(for: state), rotation: wallpaper.settings?.rotation ?? 0, preview: state.preview)
|
||||||
|
|
||||||
return (updatedTheme, wallpaper, serviceBackgroundColor, backgroundColors, patternArguments, state.preview)
|
return (updatedTheme, wallpaper, serviceBackgroundColor, backgroundColors, state.rotation, patternArguments, state.preview)
|
||||||
}
|
}
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] theme, wallpaper, serviceBackgroundColor, backgroundColors, patternArguments, preview in
|
|> deliverOnMainQueue).start(next: { [weak self] theme, wallpaper, serviceBackgroundColor, backgroundColors, rotation, patternArguments, preview in
|
||||||
guard let strongSelf = self else {
|
guard let strongSelf = self else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -485,6 +531,8 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
strongSelf.wallpaper = wallpaper
|
strongSelf.wallpaper = wallpaper
|
||||||
strongSelf.patternArguments = patternArguments
|
strongSelf.patternArguments = patternArguments
|
||||||
|
|
||||||
|
strongSelf.colorsButtonNode.colors = backgroundColors.map(UIColor.init(rgb:))
|
||||||
|
|
||||||
if !preview {
|
if !preview {
|
||||||
if !backgroundColors.isEmpty {
|
if !backgroundColors.isEmpty {
|
||||||
strongSelf.currentBackgroundColors = (backgroundColors, strongSelf.state.rotation, strongSelf.state.patternIntensity)
|
strongSelf.currentBackgroundColors = (backgroundColors, strongSelf.state.rotation, strongSelf.state.patternIntensity)
|
||||||
@ -524,6 +572,8 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
strongSelf.patternPanelNode.serviceBackgroundColor = color
|
strongSelf.patternPanelNode.serviceBackgroundColor = color
|
||||||
strongSelf.pageControlBackgroundNode.backgroundColor = color
|
strongSelf.pageControlBackgroundNode.backgroundColor = color
|
||||||
strongSelf.patternButtonNode.buttonColor = color
|
strongSelf.patternButtonNode.buttonColor = color
|
||||||
|
strongSelf.colorsButtonNode.buttonColor = color
|
||||||
|
strongSelf.playButtonBackgroundNode.color = color
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -688,6 +738,20 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
needsLayout = true
|
needsLayout = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (previousState.backgroundColors.count >= 2) != (self.state.backgroundColors.count >= 2) {
|
||||||
|
needsLayout = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if previousState.backgroundColors.count != self.state.backgroundColors.count {
|
||||||
|
if self.state.backgroundColors.count <= 2 {
|
||||||
|
self.playButtonNode.setImage(self.playButtonRotateImage, for: [])
|
||||||
|
} else {
|
||||||
|
self.playButtonNode.setImage(self.playButtonPlayImage, for: [])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.colorsButtonNode.isSelected = !self.state.colorPanelCollapsed
|
||||||
|
|
||||||
if needsLayout, let (layout, navigationBarHeight, _) = self.validLayout {
|
if needsLayout, let (layout, navigationBarHeight, _) = self.validLayout {
|
||||||
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: animated ? .animated(duration: animationDuration, curve: animationCurve) : .immediate)
|
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: animated ? .animated(duration: animationDuration, curve: animationCurve) : .immediate)
|
||||||
}
|
}
|
||||||
@ -701,6 +765,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
}
|
}
|
||||||
updated.section = section
|
updated.section = section
|
||||||
updated.displayPatternPanel = false
|
updated.displayPatternPanel = false
|
||||||
|
updated.colorPanelCollapsed = false
|
||||||
return updated
|
return updated
|
||||||
}, animated: true)
|
}, animated: true)
|
||||||
}
|
}
|
||||||
@ -825,13 +890,24 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
|
|
||||||
items = sampleMessages.reversed().map { message in
|
items = sampleMessages.reversed().map { message in
|
||||||
let item = self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message], theme: self.theme, strings: self.presentationData.strings, wallpaper: self.wallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: !message.media.isEmpty ? FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local) : nil, tapMessage: { [weak self] message in
|
let item = self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message], theme: self.theme, strings: self.presentationData.strings, wallpaper: self.wallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: !message.media.isEmpty ? FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local) : nil, tapMessage: { [weak self] message in
|
||||||
if message.flags.contains(.Incoming) {
|
guard let strongSelf = self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
strongSelf.updateState({ state in
|
||||||
|
var state = state
|
||||||
|
if state.section == .background {
|
||||||
|
state.colorPanelCollapsed = true
|
||||||
|
state.displayPatternPanel = false
|
||||||
|
}
|
||||||
|
return state
|
||||||
|
}, animated: true)
|
||||||
|
/*if message.flags.contains(.Incoming) {
|
||||||
self?.updateSection(.accent)
|
self?.updateSection(.accent)
|
||||||
self?.requestSectionUpdate?(.accent)
|
self?.requestSectionUpdate?(.accent)
|
||||||
} else {
|
} else {
|
||||||
self?.updateSection(.messages)
|
self?.updateSection(.messages)
|
||||||
self?.requestSectionUpdate?(.messages)
|
self?.requestSectionUpdate?(.messages)
|
||||||
}
|
}*/
|
||||||
}, clickThroughMessage: { [weak self] in
|
}, clickThroughMessage: { [weak self] in
|
||||||
self?.updateSection(.background)
|
self?.updateSection(.background)
|
||||||
self?.requestSectionUpdate?(.background)
|
self?.requestSectionUpdate?(.background)
|
||||||
@ -929,7 +1005,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
|
|
||||||
var colorPanelOffset: CGFloat = 0.0
|
var colorPanelOffset: CGFloat = 0.0
|
||||||
if self.state.colorPanelCollapsed {
|
if self.state.colorPanelCollapsed {
|
||||||
colorPanelOffset = colorPanelHeight - inputFieldPanelHeight
|
colorPanelOffset = colorPanelHeight
|
||||||
}
|
}
|
||||||
let colorPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - bottomInset - colorPanelHeight + colorPanelOffset), size: CGSize(width: layout.size.width, height: colorPanelHeight))
|
let colorPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - bottomInset - colorPanelHeight + colorPanelOffset), size: CGSize(width: layout.size.width, height: colorPanelHeight))
|
||||||
bottomInset += (colorPanelHeight - colorPanelOffset)
|
bottomInset += (colorPanelHeight - colorPanelOffset)
|
||||||
@ -967,7 +1043,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
var messagesBottomInset: CGFloat = bottomInset
|
var messagesBottomInset: CGFloat = bottomInset
|
||||||
|
|
||||||
if displayOptionButtons {
|
if displayOptionButtons {
|
||||||
messagesBottomInset += 46.0
|
messagesBottomInset += 56.0
|
||||||
} else if chatListPreviewAvailable {
|
} else if chatListPreviewAvailable {
|
||||||
messagesBottomInset += 37.0
|
messagesBottomInset += 37.0
|
||||||
}
|
}
|
||||||
@ -987,19 +1063,42 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
transition.updateFrame(node: self.maskNode, frame: CGRect(x: 0.0, y: layout.size.height - bottomInset - 80.0, width: bounds.width, height: 80.0))
|
transition.updateFrame(node: self.maskNode, frame: CGRect(x: 0.0, y: layout.size.height - bottomInset - 80.0, width: bounds.width, height: 80.0))
|
||||||
|
|
||||||
let patternButtonSize = self.patternButtonNode.measure(layout.size)
|
let patternButtonSize = self.patternButtonNode.measure(layout.size)
|
||||||
let maxButtonWidth = patternButtonSize.width
|
let colorsButtonSize = self.colorsButtonNode.measure(layout.size)
|
||||||
|
let maxButtonWidth = max(patternButtonSize.width, colorsButtonSize.width)
|
||||||
let buttonSize = CGSize(width: maxButtonWidth, height: 30.0)
|
let buttonSize = CGSize(width: maxButtonWidth, height: 30.0)
|
||||||
|
|
||||||
let leftButtonFrame = CGRect(origin: CGPoint(x: floor(layout.size.width / 2.0 - buttonSize.width - 10.0), y: layout.size.height - bottomInset - 44.0), size: buttonSize)
|
let patternAlpha: CGFloat = displayOptionButtons ? 1.0 : 0.0
|
||||||
let centerButtonFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - buttonSize.width) / 2.0), y: layout.size.height - bottomInset - 44.0), size: buttonSize)
|
let colorsAlpha: CGFloat = displayOptionButtons ? 1.0 : 0.0
|
||||||
let rightButtonFrame = CGRect(origin: CGPoint(x: ceil(layout.size.width / 2.0 + 10.0), y: layout.size.height - bottomInset - 44.0), size: buttonSize)
|
|
||||||
|
|
||||||
var patternAlpha: CGFloat = displayOptionButtons ? 1.0 : 0.0
|
let patternFrame: CGRect
|
||||||
|
let colorsFrame: CGRect
|
||||||
|
|
||||||
var patternFrame = centerButtonFrame
|
let playButtonSize = CGSize(width: 48.0, height: 48.0)
|
||||||
|
var centerDistance: CGFloat = 40.0
|
||||||
|
let buttonsVerticalOffset: CGFloat = 5.0
|
||||||
|
|
||||||
|
let playFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - playButtonSize.width) / 2.0), y: layout.size.height - bottomInset - 44.0 - buttonsVerticalOffset + floor((buttonSize.height - playButtonSize.height) / 2.0)), size: playButtonSize)
|
||||||
|
|
||||||
|
let playAlpha: CGFloat
|
||||||
|
if self.state.backgroundColors.count >= 2 {
|
||||||
|
playAlpha = displayOptionButtons ? 1.0 : 0.0
|
||||||
|
centerDistance += playButtonSize.width
|
||||||
|
} else {
|
||||||
|
playAlpha = 0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
patternFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - buttonSize.width * 2.0 - centerDistance) / 2.0), y: layout.size.height - bottomInset - 44.0 - buttonsVerticalOffset), size: buttonSize)
|
||||||
|
colorsFrame = CGRect(origin: CGPoint(x: patternFrame.maxX + centerDistance, y: layout.size.height - bottomInset - 44.0 - buttonsVerticalOffset), size: buttonSize)
|
||||||
|
|
||||||
transition.updateFrame(node: self.patternButtonNode, frame: patternFrame)
|
transition.updateFrame(node: self.patternButtonNode, frame: patternFrame)
|
||||||
transition.updateAlpha(node: self.patternButtonNode, alpha: patternAlpha)
|
transition.updateAlpha(node: self.patternButtonNode, alpha: patternAlpha)
|
||||||
|
transition.updateFrame(node: self.colorsButtonNode, frame: colorsFrame)
|
||||||
|
transition.updateAlpha(node: self.colorsButtonNode, alpha: colorsAlpha)
|
||||||
|
|
||||||
|
transition.updateFrame(node: self.playButtonNode, frame: playFrame)
|
||||||
|
transition.updateFrame(node: self.playButtonBackgroundNode, frame: CGRect(origin: CGPoint(), size: playFrame.size))
|
||||||
|
self.playButtonBackgroundNode.update(size: playFrame.size, cornerRadius: playFrame.size.height / 2.0, transition: transition)
|
||||||
|
transition.updateAlpha(node: self.playButtonNode, alpha: playAlpha)
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc private func togglePattern() {
|
@objc private func togglePattern() {
|
||||||
@ -1011,6 +1110,22 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
var appeared = false
|
var appeared = false
|
||||||
self.updateState({ current in
|
self.updateState({ current in
|
||||||
var updated = current
|
var updated = current
|
||||||
|
if !updated.displayPatternPanel {
|
||||||
|
updated.colorPanelCollapsed = false
|
||||||
|
updated.displayPatternPanel = true
|
||||||
|
if current.patternWallpaper == nil, let wallpaper = wallpaper {
|
||||||
|
updated.patternWallpaper = wallpaper
|
||||||
|
if updated.backgroundColors.isEmpty {
|
||||||
|
if let backgroundColors = backgroundColors {
|
||||||
|
updated.backgroundColors = backgroundColors.0
|
||||||
|
} else {
|
||||||
|
updated.backgroundColors = []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
appeared = true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
updated.colorPanelCollapsed = true
|
||||||
if updated.patternWallpaper != nil {
|
if updated.patternWallpaper != nil {
|
||||||
updated.previousPatternWallpaper = updated.patternWallpaper
|
updated.previousPatternWallpaper = updated.patternWallpaper
|
||||||
updated.patternWallpaper = nil
|
updated.patternWallpaper = nil
|
||||||
@ -1030,6 +1145,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
appeared = true
|
appeared = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return updated
|
return updated
|
||||||
}, animated: true)
|
}, animated: true)
|
||||||
|
|
||||||
@ -1037,4 +1153,38 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
|||||||
self.patternPanelNode.didAppear(initialWallpaper: wallpaper, intensity: self.state.patternIntensity)
|
self.patternPanelNode.didAppear(initialWallpaper: wallpaper, intensity: self.state.patternIntensity)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@objc private func toggleColors() {
|
||||||
|
self.updateState({ current in
|
||||||
|
var updated = current
|
||||||
|
if updated.displayPatternPanel {
|
||||||
|
updated.displayPatternPanel = false
|
||||||
|
updated.colorPanelCollapsed = false
|
||||||
|
} else {
|
||||||
|
if updated.colorPanelCollapsed {
|
||||||
|
updated.colorPanelCollapsed = false
|
||||||
|
} else {
|
||||||
|
updated.colorPanelCollapsed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
updated.displayPatternPanel = false
|
||||||
|
return updated
|
||||||
|
}, animated: true)
|
||||||
|
}
|
||||||
|
|
||||||
|
@objc private func playPressed() {
|
||||||
|
if self.state.backgroundColors.count >= 3 {
|
||||||
|
self.backgroundNode.animateEvent(transition: .animated(duration: 0.5, curve: .spring))
|
||||||
|
} else {
|
||||||
|
self.updateState({ state in
|
||||||
|
var state = state
|
||||||
|
state.rotation = (state.rotation + 90) % 360
|
||||||
|
return state
|
||||||
|
}, animated: true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func animateWallpaperAppeared() {
|
||||||
|
self.backgroundNode.animateEvent(transition: .animated(duration: 2.0, curve: .spring), extendAnimation: true)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -319,6 +319,10 @@ class ThemeSettingsAppIconItemNode: ListViewItemNode, ItemListItemNode {
|
|||||||
bordered = false
|
bordered = false
|
||||||
case "WhiteFilled":
|
case "WhiteFilled":
|
||||||
name = "⍺ White"
|
name = "⍺ White"
|
||||||
|
case "New1":
|
||||||
|
name = item.strings.Appearance_AppIconNew1
|
||||||
|
case "New2":
|
||||||
|
name = item.strings.Appearance_AppIconNew2
|
||||||
default:
|
default:
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
@ -528,8 +528,10 @@ final class WallpaperColorPanelNode: ASDisplayNode {
|
|||||||
|
|
||||||
if updateLayout, let size = self.validLayout {
|
if updateLayout, let size = self.validLayout {
|
||||||
if let index = self.state.selection {
|
if let index = self.state.selection {
|
||||||
|
if self.state.colors.count > index {
|
||||||
self.colorPickerNode.color = UIColor(rgb: self.state.colors[index])
|
self.colorPickerNode.color = UIColor(rgb: self.state.colors[index])
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
self.updateLayout(size: size, transition: animated ? .animated(duration: 0.3, curve: .easeInOut) : .immediate)
|
self.updateLayout(size: size, transition: animated ? .animated(duration: 0.3, curve: .easeInOut) : .immediate)
|
||||||
}
|
}
|
||||||
|
@ -200,6 +200,9 @@ public class WallpaperGalleryController: ViewController {
|
|||||||
private var patternPanelEnabled = false
|
private var patternPanelEnabled = false
|
||||||
private var colorsPanelEnabled = false
|
private var colorsPanelEnabled = false
|
||||||
|
|
||||||
|
private var savedPatternWallpaper: TelegramWallpaper?
|
||||||
|
private var savedPatternIntensity: Int32?
|
||||||
|
|
||||||
public init(context: AccountContext, source: WallpaperListSource) {
|
public init(context: AccountContext, source: WallpaperListSource) {
|
||||||
self.context = context
|
self.context = context
|
||||||
self.source = source
|
self.source = source
|
||||||
@ -600,6 +603,10 @@ public class WallpaperGalleryController: ViewController {
|
|||||||
|
|
||||||
self.galleryNode.modalAnimateIn()
|
self.galleryNode.modalAnimateIn()
|
||||||
self.bindCentralItemNode(animated: false, updated: false)
|
self.bindCentralItemNode(animated: false, updated: false)
|
||||||
|
|
||||||
|
if let centralItemNode = self.galleryNode.pager.centralItemNode() as? WallpaperGalleryItemNode {
|
||||||
|
centralItemNode.animateWallpaperAppeared()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func bindCentralItemNode(animated: Bool, updated: Bool) {
|
private func bindCentralItemNode(animated: Bool, updated: Bool) {
|
||||||
@ -615,6 +622,11 @@ public class WallpaperGalleryController: ViewController {
|
|||||||
strongSelf.colorsPanelEnabled = false
|
strongSelf.colorsPanelEnabled = false
|
||||||
strongSelf.colorsPanelNode?.view.endEditing(true)
|
strongSelf.colorsPanelNode?.view.endEditing(true)
|
||||||
|
|
||||||
|
if !enabled {
|
||||||
|
strongSelf.savedPatternWallpaper = initialWallpaper
|
||||||
|
strongSelf.savedPatternIntensity = initialWallpaper.settings?.intensity
|
||||||
|
}
|
||||||
|
|
||||||
strongSelf.patternInitialWallpaper = enabled ? initialWallpaper : nil
|
strongSelf.patternInitialWallpaper = enabled ? initialWallpaper : nil
|
||||||
switch initialWallpaper {
|
switch initialWallpaper {
|
||||||
case let .color(color):
|
case let .color(color):
|
||||||
@ -631,7 +643,7 @@ public class WallpaperGalleryController: ViewController {
|
|||||||
strongSelf.galleryNode.scrollView.isScrollEnabled = !enabled
|
strongSelf.galleryNode.scrollView.isScrollEnabled = !enabled
|
||||||
if enabled {
|
if enabled {
|
||||||
strongSelf.patternPanelNode?.updateWallpapers()
|
strongSelf.patternPanelNode?.updateWallpapers()
|
||||||
strongSelf.patternPanelNode?.didAppear()
|
strongSelf.patternPanelNode?.didAppear(initialWallpaper: strongSelf.savedPatternWallpaper, intensity: strongSelf.savedPatternIntensity)
|
||||||
} else {
|
} else {
|
||||||
switch initialWallpaper {
|
switch initialWallpaper {
|
||||||
case .color, .gradient:
|
case .color, .gradient:
|
||||||
@ -657,6 +669,9 @@ public class WallpaperGalleryController: ViewController {
|
|||||||
strongSelf.patternPanelEnabled = false
|
strongSelf.patternPanelEnabled = false
|
||||||
strongSelf.colorsPanelEnabled = !strongSelf.colorsPanelEnabled
|
strongSelf.colorsPanelEnabled = !strongSelf.colorsPanelEnabled
|
||||||
strongSelf.galleryNode.scrollView.isScrollEnabled = !strongSelf.colorsPanelEnabled
|
strongSelf.galleryNode.scrollView.isScrollEnabled = !strongSelf.colorsPanelEnabled
|
||||||
|
if !strongSelf.colorsPanelEnabled {
|
||||||
|
strongSelf.colorsPanelNode?.view.endEditing(true)
|
||||||
|
}
|
||||||
|
|
||||||
if strongSelf.colorsPanelEnabled {
|
if strongSelf.colorsPanelEnabled {
|
||||||
strongSelf.colorsPanelNode?.updateState({ _ in
|
strongSelf.colorsPanelNode?.updateState({ _ in
|
||||||
@ -700,6 +715,7 @@ public class WallpaperGalleryController: ViewController {
|
|||||||
if updated {
|
if updated {
|
||||||
if self.colorsPanelEnabled || self.patternPanelEnabled {
|
if self.colorsPanelEnabled || self.patternPanelEnabled {
|
||||||
self.colorsPanelEnabled = false
|
self.colorsPanelEnabled = false
|
||||||
|
self.colorsPanelNode?.view.endEditing(true)
|
||||||
self.patternPanelEnabled = false
|
self.patternPanelEnabled = false
|
||||||
|
|
||||||
if let (layout, _) = self.validLayout {
|
if let (layout, _) = self.validLayout {
|
||||||
@ -838,6 +854,10 @@ public class WallpaperGalleryController: ViewController {
|
|||||||
if let pattern = pattern, case let .file(file) = pattern {
|
if let pattern = pattern, case let .file(file) = pattern {
|
||||||
let newSettings = WallpaperSettings(blur: file.settings.blur, motion: file.settings.motion, colors: colors, intensity: intensity)
|
let newSettings = WallpaperSettings(blur: file.settings.blur, motion: file.settings.motion, colors: colors, intensity: intensity)
|
||||||
let newWallpaper = TelegramWallpaper.file(id: file.id, accessHash: file.accessHash, isCreator: file.isCreator, isDefault: file.isDefault, isPattern: pattern.isPattern, isDark: file.isDark, slug: file.slug, file: file.file, settings: newSettings)
|
let newWallpaper = TelegramWallpaper.file(id: file.id, accessHash: file.accessHash, isCreator: file.isCreator, isDefault: file.isDefault, isPattern: pattern.isPattern, isDark: file.isDark, slug: file.slug, file: file.file, settings: newSettings)
|
||||||
|
|
||||||
|
strongSelf.savedPatternWallpaper = newWallpaper
|
||||||
|
strongSelf.savedPatternIntensity = intensity
|
||||||
|
|
||||||
strongSelf.updateEntries(wallpaper: newWallpaper, preview: preview)
|
strongSelf.updateEntries(wallpaper: newWallpaper, preview: preview)
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
|
@ -127,6 +127,8 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
|||||||
private let playButtonPlayImage: UIImage?
|
private let playButtonPlayImage: UIImage?
|
||||||
private let playButtonRotateImage: UIImage?
|
private let playButtonRotateImage: UIImage?
|
||||||
|
|
||||||
|
private var isReadyDisposable: Disposable?
|
||||||
|
|
||||||
init(context: AccountContext) {
|
init(context: AccountContext) {
|
||||||
self.context = context
|
self.context = context
|
||||||
self.presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
self.presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
||||||
@ -154,6 +156,7 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
|||||||
self.patternButtonNode.setEnabled(false)
|
self.patternButtonNode.setEnabled(false)
|
||||||
|
|
||||||
self.colorsButtonNode = WallpaperOptionButtonNode(title: self.presentationData.strings.WallpaperPreview_WallpaperColors, value: .colors(false, [.clear]))
|
self.colorsButtonNode = WallpaperOptionButtonNode(title: self.presentationData.strings.WallpaperPreview_WallpaperColors, value: .colors(false, [.clear]))
|
||||||
|
|
||||||
self.playButtonBackgroundNode = NavigationBackgroundNode(color: UIColor(white: 0.0, alpha: 0.3))
|
self.playButtonBackgroundNode = NavigationBackgroundNode(color: UIColor(white: 0.0, alpha: 0.3))
|
||||||
self.playButtonNode = HighlightableButtonNode()
|
self.playButtonNode = HighlightableButtonNode()
|
||||||
self.playButtonNode.insertSubnode(self.playButtonBackgroundNode, at: 0)
|
self.playButtonNode.insertSubnode(self.playButtonBackgroundNode, at: 0)
|
||||||
@ -192,15 +195,23 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
|||||||
self.clipsToBounds = true
|
self.clipsToBounds = true
|
||||||
self.backgroundColor = .black
|
self.backgroundColor = .black
|
||||||
|
|
||||||
self.imageNode.imageUpdated = { [weak self] _ in
|
self.imageNode.imageUpdated = { [weak self] image in
|
||||||
|
if image != nil {
|
||||||
self?._ready.set(.single(Void()))
|
self?._ready.set(.single(Void()))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
self.isReadyDisposable = (self.nativeNode.isReady
|
||||||
|
|> filter { $0 }
|
||||||
|
|> take(1)
|
||||||
|
|> deliverOnMainQueue).start(next: { [weak self] _ in
|
||||||
|
self?._ready.set(.single(Void()))
|
||||||
|
})
|
||||||
|
|
||||||
self.imageNode.view.contentMode = .scaleAspectFill
|
self.imageNode.view.contentMode = .scaleAspectFill
|
||||||
self.imageNode.clipsToBounds = true
|
self.imageNode.clipsToBounds = true
|
||||||
|
|
||||||
self.addSubnode(self.wrapperNode)
|
self.addSubnode(self.wrapperNode)
|
||||||
self.addSubnode(self.statusNode)
|
//self.addSubnode(self.statusNode)
|
||||||
self.addSubnode(self.messagesContainerNode)
|
self.addSubnode(self.messagesContainerNode)
|
||||||
|
|
||||||
self.addSubnode(self.blurButtonNode)
|
self.addSubnode(self.blurButtonNode)
|
||||||
@ -220,6 +231,7 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
|||||||
self.fetchDisposable.dispose()
|
self.fetchDisposable.dispose()
|
||||||
self.statusDisposable.dispose()
|
self.statusDisposable.dispose()
|
||||||
self.colorDisposable.dispose()
|
self.colorDisposable.dispose()
|
||||||
|
self.isReadyDisposable?.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
var cropRect: CGRect? {
|
var cropRect: CGRect? {
|
||||||
@ -1159,4 +1171,8 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
|||||||
override func visibilityUpdated(isVisible: Bool) {
|
override func visibilityUpdated(isVisible: Bool) {
|
||||||
super.visibilityUpdated(isVisible: isVisible)
|
super.visibilityUpdated(isVisible: isVisible)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func animateWallpaperAppeared() {
|
||||||
|
self.nativeNode.animateEvent(transition: .animated(duration: 2.0, curve: .spring), extendAnimation: true)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -13,6 +13,29 @@ import MergeLists
|
|||||||
private let itemSize = CGSize(width: 88.0, height: 88.0)
|
private let itemSize = CGSize(width: 88.0, height: 88.0)
|
||||||
private let inset: CGFloat = 12.0
|
private let inset: CGFloat = 12.0
|
||||||
|
|
||||||
|
private func intensityToSliderValue(_ value: Int32, allowDark: Bool) -> CGFloat {
|
||||||
|
if allowDark {
|
||||||
|
if value < 0 {
|
||||||
|
return max(0.0, min(100.0, CGFloat(abs(value))))
|
||||||
|
} else {
|
||||||
|
return 100.0 + max(0.0, min(100.0, CGFloat(value)))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return CGFloat(max(value, 0)) * 2.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private func sliderValueToIntensity(_ value: CGFloat, allowDark: Bool) -> Int32 {
|
||||||
|
if allowDark {
|
||||||
|
if value < 100.0 {
|
||||||
|
return -Int32(value)
|
||||||
|
} else {
|
||||||
|
return Int32(value - 100.0)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Int32(value / 2.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private struct WallpaperPatternEntry: Comparable, Identifiable {
|
private struct WallpaperPatternEntry: Comparable, Identifiable {
|
||||||
let index: Int
|
let index: Int
|
||||||
@ -214,9 +237,12 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
|||||||
|
|
||||||
var patternChanged: ((TelegramWallpaper?, Int32?, Bool) -> Void)?
|
var patternChanged: ((TelegramWallpaper?, Int32?, Bool) -> Void)?
|
||||||
|
|
||||||
|
private let allowDark: Bool
|
||||||
|
|
||||||
init(context: AccountContext, theme: PresentationTheme, strings: PresentationStrings) {
|
init(context: AccountContext, theme: PresentationTheme, strings: PresentationStrings) {
|
||||||
self.context = context
|
self.context = context
|
||||||
self.theme = theme
|
self.theme = theme
|
||||||
|
self.allowDark = theme.overallDarkAppearance
|
||||||
|
|
||||||
self.backgroundNode = NavigationBackgroundNode(color: theme.chat.inputPanel.panelBackgroundColor)
|
self.backgroundNode = NavigationBackgroundNode(color: theme.chat.inputPanel.panelBackgroundColor)
|
||||||
|
|
||||||
@ -275,16 +301,18 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
|||||||
sliderView.disableSnapToPositions = true
|
sliderView.disableSnapToPositions = true
|
||||||
sliderView.trackCornerRadius = 1.0
|
sliderView.trackCornerRadius = 1.0
|
||||||
sliderView.lineSize = 2.0
|
sliderView.lineSize = 2.0
|
||||||
sliderView.minimumValue = 0.0
|
|
||||||
sliderView.startValue = 0.0
|
sliderView.startValue = 0.0
|
||||||
|
sliderView.minimumValue = 0.0
|
||||||
sliderView.maximumValue = 200.0
|
sliderView.maximumValue = 200.0
|
||||||
|
if self.allowDark {
|
||||||
sliderView.positionsCount = 3
|
sliderView.positionsCount = 3
|
||||||
|
}
|
||||||
sliderView.useLinesForPositions = true
|
sliderView.useLinesForPositions = true
|
||||||
sliderView.value = 150.0
|
sliderView.value = intensityToSliderValue(50, allowDark: self.allowDark)
|
||||||
sliderView.disablesInteractiveTransitionGestureRecognizer = true
|
sliderView.disablesInteractiveTransitionGestureRecognizer = true
|
||||||
sliderView.backgroundColor = .clear
|
sliderView.backgroundColor = .clear
|
||||||
sliderView.backColor = self.theme.list.disclosureArrowColor
|
sliderView.backColor = self.theme.list.disclosureArrowColor
|
||||||
sliderView.trackColor = self.theme.list.itemAccentColor
|
sliderView.trackColor = sliderView.backColor//self.theme.list.itemAccentColor
|
||||||
|
|
||||||
self.view.addSubview(sliderView)
|
self.view.addSubview(sliderView)
|
||||||
sliderView.addTarget(self, action: #selector(self.sliderValueChanged), for: .valueChanged)
|
sliderView.addTarget(self, action: #selector(self.sliderValueChanged), for: .valueChanged)
|
||||||
@ -335,7 +363,7 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
|||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
strongSelf.currentWallpaper = updatedWallpaper
|
strongSelf.currentWallpaper = updatedWallpaper
|
||||||
if let sliderView = strongSelf.sliderView {
|
if let sliderView = strongSelf.sliderView {
|
||||||
strongSelf.patternChanged?(updatedWallpaper, Int32(sliderView.value - 100.0), false)
|
strongSelf.patternChanged?(updatedWallpaper, sliderValueToIntensity(sliderView.value, allowDark: strongSelf.allowDark), false)
|
||||||
}
|
}
|
||||||
if let subnodes = strongSelf.scrollNode.subnodes {
|
if let subnodes = strongSelf.scrollNode.subnodes {
|
||||||
for case let subnode as SettingsThemeWallpaperNode in subnodes {
|
for case let subnode as SettingsThemeWallpaperNode in subnodes {
|
||||||
@ -377,12 +405,19 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let wallpaper = self.currentWallpaper {
|
if let wallpaper = self.currentWallpaper {
|
||||||
self.patternChanged?(wallpaper, Int32(sliderView.value - 100.0), sliderView.isTracking)
|
self.patternChanged?(wallpaper, sliderValueToIntensity(sliderView.value, allowDark: self.allowDark), sliderView.isTracking)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func didAppear(initialWallpaper: TelegramWallpaper? = nil, intensity: Int32? = nil) {
|
func didAppear(initialWallpaper: TelegramWallpaper? = nil, intensity: Int32? = nil) {
|
||||||
var wallpaper = initialWallpaper ?? self.wallpapers.first
|
let wallpaper: TelegramWallpaper?
|
||||||
|
|
||||||
|
switch initialWallpaper {
|
||||||
|
case let .file(id, accessHash, isCreator, isDefault, isPattern, isDark, slug, file, _):
|
||||||
|
wallpaper = .file(id: id, accessHash: accessHash, isCreator: isCreator, isDefault: isDefault, isPattern: isPattern, isDark: isDark, slug: slug, file: file, settings: self.wallpapers[0].settings ?? WallpaperSettings())
|
||||||
|
default:
|
||||||
|
wallpaper = self.wallpapers.first
|
||||||
|
}
|
||||||
|
|
||||||
if let wallpaper = wallpaper {
|
if let wallpaper = wallpaper {
|
||||||
var selectedFileId: Int64?
|
var selectedFileId: Int64?
|
||||||
@ -391,7 +426,7 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.currentWallpaper = wallpaper
|
self.currentWallpaper = wallpaper
|
||||||
self.sliderView?.value = CGFloat(intensity.flatMap { $0 + 100 } ?? 150)
|
self.sliderView?.value = intensity.flatMap { intensityToSliderValue($0, allowDark: self.allowDark) } ?? intensityToSliderValue(50, allowDark: self.allowDark)
|
||||||
|
|
||||||
self.scrollNode.view.contentOffset = CGPoint()
|
self.scrollNode.view.contentOffset = CGPoint()
|
||||||
|
|
||||||
@ -407,8 +442,8 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if initialWallpaper == nil, let wallpaper = self.currentWallpaper, let sliderView = self.sliderView {
|
if let wallpaper = self.currentWallpaper, let sliderView = self.sliderView {
|
||||||
self.patternChanged?(wallpaper, Int32(sliderView.value - 100.0), false)
|
self.patternChanged?(wallpaper, sliderValueToIntensity(sliderView.value, allowDark: self.allowDark), false)
|
||||||
}
|
}
|
||||||
|
|
||||||
if let selectedNode = selectedNode {
|
if let selectedNode = selectedNode {
|
||||||
|
@ -211,8 +211,12 @@ final class StickerPackPreviewControllerNode: ViewControllerTracingNode, UIScrol
|
|||||||
if let stickerPack = strongSelf.stickerPack, case let .result(info, _, _) = stickerPack, info.id.namespace == Namespaces.ItemCollection.CloudStickerPacks {
|
if let stickerPack = strongSelf.stickerPack, case let .result(info, _, _) = stickerPack, info.id.namespace == Namespaces.ItemCollection.CloudStickerPacks {
|
||||||
if strongSelf.sendSticker != nil {
|
if strongSelf.sendSticker != nil {
|
||||||
menuItems.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.StickerPack_Send, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Resend"), color: theme.contextMenu.primaryColor) }, action: { [weak self] _, f in
|
menuItems.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.StickerPack_Send, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Resend"), color: theme.contextMenu.primaryColor) }, action: { [weak self] _, f in
|
||||||
if let strongSelf = self, let peekController = strongSelf.peekController, let animationNode = (peekController.contentNode as? StickerPreviewPeekContentNode)?.animationNode {
|
if let strongSelf = self, let peekController = strongSelf.peekController {
|
||||||
|
if let animationNode = (peekController.contentNode as? StickerPreviewPeekContentNode)?.animationNode {
|
||||||
let _ = strongSelf.sendSticker?(.standalone(media: item.file), animationNode, animationNode.bounds)
|
let _ = strongSelf.sendSticker?(.standalone(media: item.file), animationNode, animationNode.bounds)
|
||||||
|
} else if let imageNode = (peekController.contentNode as? StickerPreviewPeekContentNode)?.imageNode {
|
||||||
|
let _ = strongSelf.sendSticker?(.standalone(media: item.file), imageNode, imageNode.bounds)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
f(.default)
|
f(.default)
|
||||||
})))
|
})))
|
||||||
|
@ -70,7 +70,7 @@ public final class StickerPreviewPeekContentNode: ASDisplayNode, PeekControllerC
|
|||||||
private let item: StickerPreviewPeekItem
|
private let item: StickerPreviewPeekItem
|
||||||
|
|
||||||
private var textNode: ASTextNode
|
private var textNode: ASTextNode
|
||||||
private var imageNode: TransformImageNode
|
public var imageNode: TransformImageNode
|
||||||
public var animationNode: AnimatedStickerNode?
|
public var animationNode: AnimatedStickerNode?
|
||||||
|
|
||||||
private var containerLayout: (ContainerViewLayout, CGFloat)?
|
private var containerLayout: (ContainerViewLayout, CGFloat)?
|
||||||
|
@ -859,3 +859,59 @@ public final class EmptyMediaResource: TelegramMediaResource {
|
|||||||
return to is EmptyMediaResource
|
return to is EmptyMediaResource
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public struct WallpaperDataResourceId: MediaResourceId {
|
||||||
|
public var uniqueId: String {
|
||||||
|
return "wallpaper-\(self.slug)"
|
||||||
|
}
|
||||||
|
|
||||||
|
public var hashValue: Int {
|
||||||
|
return self.slug.hashValue
|
||||||
|
}
|
||||||
|
|
||||||
|
public var slug: String
|
||||||
|
|
||||||
|
public init(slug: String) {
|
||||||
|
self.slug = slug
|
||||||
|
}
|
||||||
|
|
||||||
|
public func isEqual(to: MediaResourceId) -> Bool {
|
||||||
|
guard let to = to as? WallpaperDataResourceId else {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if self.slug != to.slug {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public final class WallpaperDataResource: TelegramMediaResource {
|
||||||
|
public let slug: String
|
||||||
|
|
||||||
|
public init(slug: String) {
|
||||||
|
self.slug = slug
|
||||||
|
}
|
||||||
|
|
||||||
|
public init(decoder: PostboxDecoder) {
|
||||||
|
self.slug = decoder.decodeStringForKey("s", orElse: "")
|
||||||
|
}
|
||||||
|
|
||||||
|
public func encode(_ encoder: PostboxEncoder) {
|
||||||
|
encoder.encodeString(self.slug, forKey: "s")
|
||||||
|
}
|
||||||
|
|
||||||
|
public var id: MediaResourceId {
|
||||||
|
return WallpaperDataResourceId(slug: self.slug)
|
||||||
|
}
|
||||||
|
|
||||||
|
public func isEqual(to: MediaResource) -> Bool {
|
||||||
|
guard let to = to as? WallpaperDataResource else {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if self.slug != to.slug {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -249,7 +249,7 @@ public struct SecretChatLayerNegotiationState: PostboxCoding, Equatable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public init(decoder: PostboxDecoder) {
|
public init(decoder: PostboxDecoder) {
|
||||||
self.activeLayer = SecretChatSequenceBasedLayer(rawValue: decoder.decodeInt32ForKey("a", orElse: 0)) ?? .layer46
|
self.activeLayer = SecretChatSequenceBasedLayer(rawValue: decoder.decodeInt32ForKey("a", orElse: 0)) ?? .layer73
|
||||||
self.locallyRequestedLayer = decoder.decodeOptionalInt32ForKey("lr")
|
self.locallyRequestedLayer = decoder.decodeOptionalInt32ForKey("lr")
|
||||||
self.remotelyRequestedLayer = decoder.decodeOptionalInt32ForKey("rr")
|
self.remotelyRequestedLayer = decoder.decodeOptionalInt32ForKey("rr")
|
||||||
}
|
}
|
||||||
|
@ -142,7 +142,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
|
|||||||
dict[767652808] = { return Api.InputEncryptedFile.parse_inputEncryptedFileBigUploaded($0) }
|
dict[767652808] = { return Api.InputEncryptedFile.parse_inputEncryptedFileBigUploaded($0) }
|
||||||
dict[1304052993] = { return Api.account.Takeout.parse_takeout($0) }
|
dict[1304052993] = { return Api.account.Takeout.parse_takeout($0) }
|
||||||
dict[-1456996667] = { return Api.messages.InactiveChats.parse_inactiveChats($0) }
|
dict[-1456996667] = { return Api.messages.InactiveChats.parse_inactiveChats($0) }
|
||||||
dict[-1464184409] = { return Api.GroupCallParticipant.parse_groupCallParticipant($0) }
|
dict[-341428482] = { return Api.GroupCallParticipant.parse_groupCallParticipant($0) }
|
||||||
dict[1443858741] = { return Api.messages.SentEncryptedMessage.parse_sentEncryptedMessage($0) }
|
dict[1443858741] = { return Api.messages.SentEncryptedMessage.parse_sentEncryptedMessage($0) }
|
||||||
dict[-1802240206] = { return Api.messages.SentEncryptedMessage.parse_sentEncryptedFile($0) }
|
dict[-1802240206] = { return Api.messages.SentEncryptedMessage.parse_sentEncryptedFile($0) }
|
||||||
dict[289586518] = { return Api.SavedContact.parse_savedPhoneContact($0) }
|
dict[289586518] = { return Api.SavedContact.parse_savedPhoneContact($0) }
|
||||||
@ -283,6 +283,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
|
|||||||
dict[192428418] = { return Api.Update.parse_updateGroupCallConnection($0) }
|
dict[192428418] = { return Api.Update.parse_updateGroupCallConnection($0) }
|
||||||
dict[136574537] = { return Api.messages.VotesList.parse_votesList($0) }
|
dict[136574537] = { return Api.messages.VotesList.parse_votesList($0) }
|
||||||
dict[1558266229] = { return Api.PopularContact.parse_popularContact($0) }
|
dict[1558266229] = { return Api.PopularContact.parse_popularContact($0) }
|
||||||
|
dict[-592373577] = { return Api.GroupCallParticipantVideoSourceGroup.parse_groupCallParticipantVideoSourceGroup($0) }
|
||||||
dict[-373643672] = { return Api.FolderPeer.parse_folderPeer($0) }
|
dict[-373643672] = { return Api.FolderPeer.parse_folderPeer($0) }
|
||||||
dict[367766557] = { return Api.ChannelParticipant.parse_channelParticipant($0) }
|
dict[367766557] = { return Api.ChannelParticipant.parse_channelParticipant($0) }
|
||||||
dict[-1557620115] = { return Api.ChannelParticipant.parse_channelParticipantSelf($0) }
|
dict[-1557620115] = { return Api.ChannelParticipant.parse_channelParticipantSelf($0) }
|
||||||
@ -579,11 +580,12 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
|
|||||||
dict[-1160215659] = { return Api.InputMessage.parse_inputMessageReplyTo($0) }
|
dict[-1160215659] = { return Api.InputMessage.parse_inputMessageReplyTo($0) }
|
||||||
dict[-2037963464] = { return Api.InputMessage.parse_inputMessagePinned($0) }
|
dict[-2037963464] = { return Api.InputMessage.parse_inputMessagePinned($0) }
|
||||||
dict[-1392895362] = { return Api.InputMessage.parse_inputMessageCallbackQuery($0) }
|
dict[-1392895362] = { return Api.InputMessage.parse_inputMessageCallbackQuery($0) }
|
||||||
|
dict[2028213859] = { return Api.GroupCallParticipantVideo.parse_groupCallParticipantVideo($0) }
|
||||||
dict[-58224696] = { return Api.PhoneCallProtocol.parse_phoneCallProtocol($0) }
|
dict[-58224696] = { return Api.PhoneCallProtocol.parse_phoneCallProtocol($0) }
|
||||||
dict[-1237848657] = { return Api.StatsDateRangeDays.parse_statsDateRangeDays($0) }
|
dict[-1237848657] = { return Api.StatsDateRangeDays.parse_statsDateRangeDays($0) }
|
||||||
dict[-275956116] = { return Api.messages.AffectedFoundMessages.parse_affectedFoundMessages($0) }
|
dict[-275956116] = { return Api.messages.AffectedFoundMessages.parse_affectedFoundMessages($0) }
|
||||||
dict[-1539849235] = { return Api.WallPaper.parse_wallPaper($0) }
|
dict[-1539849235] = { return Api.WallPaper.parse_wallPaper($0) }
|
||||||
dict[-1963717851] = { return Api.WallPaper.parse_wallPaperNoFile($0) }
|
dict[-528465642] = { return Api.WallPaper.parse_wallPaperNoFile($0) }
|
||||||
dict[-1938715001] = { return Api.messages.Messages.parse_messages($0) }
|
dict[-1938715001] = { return Api.messages.Messages.parse_messages($0) }
|
||||||
dict[978610270] = { return Api.messages.Messages.parse_messagesSlice($0) }
|
dict[978610270] = { return Api.messages.Messages.parse_messagesSlice($0) }
|
||||||
dict[1682413576] = { return Api.messages.Messages.parse_channelMessages($0) }
|
dict[1682413576] = { return Api.messages.Messages.parse_channelMessages($0) }
|
||||||
@ -690,7 +692,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
|
|||||||
dict[2104790276] = { return Api.DataJSON.parse_dataJSON($0) }
|
dict[2104790276] = { return Api.DataJSON.parse_dataJSON($0) }
|
||||||
dict[-433014407] = { return Api.InputWallPaper.parse_inputWallPaper($0) }
|
dict[-433014407] = { return Api.InputWallPaper.parse_inputWallPaper($0) }
|
||||||
dict[1913199744] = { return Api.InputWallPaper.parse_inputWallPaperSlug($0) }
|
dict[1913199744] = { return Api.InputWallPaper.parse_inputWallPaperSlug($0) }
|
||||||
dict[-2077770836] = { return Api.InputWallPaper.parse_inputWallPaperNoFile($0) }
|
dict[-1770371538] = { return Api.InputWallPaper.parse_inputWallPaperNoFile($0) }
|
||||||
dict[-1118798639] = { return Api.InputThemeSettings.parse_inputThemeSettings($0) }
|
dict[-1118798639] = { return Api.InputThemeSettings.parse_inputThemeSettings($0) }
|
||||||
dict[1251549527] = { return Api.InputStickeredMedia.parse_inputStickeredMediaPhoto($0) }
|
dict[1251549527] = { return Api.InputStickeredMedia.parse_inputStickeredMediaPhoto($0) }
|
||||||
dict[70813275] = { return Api.InputStickeredMedia.parse_inputStickeredMediaDocument($0) }
|
dict[70813275] = { return Api.InputStickeredMedia.parse_inputStickeredMediaDocument($0) }
|
||||||
@ -1085,6 +1087,8 @@ public struct Api {
|
|||||||
_1.serialize(buffer, boxed)
|
_1.serialize(buffer, boxed)
|
||||||
case let _1 as Api.PopularContact:
|
case let _1 as Api.PopularContact:
|
||||||
_1.serialize(buffer, boxed)
|
_1.serialize(buffer, boxed)
|
||||||
|
case let _1 as Api.GroupCallParticipantVideoSourceGroup:
|
||||||
|
_1.serialize(buffer, boxed)
|
||||||
case let _1 as Api.FolderPeer:
|
case let _1 as Api.FolderPeer:
|
||||||
_1.serialize(buffer, boxed)
|
_1.serialize(buffer, boxed)
|
||||||
case let _1 as Api.ChannelParticipant:
|
case let _1 as Api.ChannelParticipant:
|
||||||
@ -1333,6 +1337,8 @@ public struct Api {
|
|||||||
_1.serialize(buffer, boxed)
|
_1.serialize(buffer, boxed)
|
||||||
case let _1 as Api.InputMessage:
|
case let _1 as Api.InputMessage:
|
||||||
_1.serialize(buffer, boxed)
|
_1.serialize(buffer, boxed)
|
||||||
|
case let _1 as Api.GroupCallParticipantVideo:
|
||||||
|
_1.serialize(buffer, boxed)
|
||||||
case let _1 as Api.PhoneCallProtocol:
|
case let _1 as Api.PhoneCallProtocol:
|
||||||
_1.serialize(buffer, boxed)
|
_1.serialize(buffer, boxed)
|
||||||
case let _1 as Api.StatsDateRangeDays:
|
case let _1 as Api.StatsDateRangeDays:
|
||||||
|
@ -3604,13 +3604,13 @@ public extension Api {
|
|||||||
|
|
||||||
}
|
}
|
||||||
public enum GroupCallParticipant: TypeConstructorDescription {
|
public enum GroupCallParticipant: TypeConstructorDescription {
|
||||||
case groupCallParticipant(flags: Int32, peer: Api.Peer, date: Int32, activeDate: Int32?, source: Int32, volume: Int32?, about: String?, raiseHandRating: Int64?, video: Api.DataJSON?, presentation: Api.DataJSON?)
|
case groupCallParticipant(flags: Int32, peer: Api.Peer, date: Int32, activeDate: Int32?, source: Int32, volume: Int32?, about: String?, raiseHandRating: Int64?, video: Api.GroupCallParticipantVideo?, presentation: Api.GroupCallParticipantVideo?)
|
||||||
|
|
||||||
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||||
switch self {
|
switch self {
|
||||||
case .groupCallParticipant(let flags, let peer, let date, let activeDate, let source, let volume, let about, let raiseHandRating, let video, let presentation):
|
case .groupCallParticipant(let flags, let peer, let date, let activeDate, let source, let volume, let about, let raiseHandRating, let video, let presentation):
|
||||||
if boxed {
|
if boxed {
|
||||||
buffer.appendInt32(-1464184409)
|
buffer.appendInt32(-341428482)
|
||||||
}
|
}
|
||||||
serializeInt32(flags, buffer: buffer, boxed: false)
|
serializeInt32(flags, buffer: buffer, boxed: false)
|
||||||
peer.serialize(buffer, true)
|
peer.serialize(buffer, true)
|
||||||
@ -3652,13 +3652,13 @@ public extension Api {
|
|||||||
if Int(_1!) & Int(1 << 11) != 0 {_7 = parseString(reader) }
|
if Int(_1!) & Int(1 << 11) != 0 {_7 = parseString(reader) }
|
||||||
var _8: Int64?
|
var _8: Int64?
|
||||||
if Int(_1!) & Int(1 << 13) != 0 {_8 = reader.readInt64() }
|
if Int(_1!) & Int(1 << 13) != 0 {_8 = reader.readInt64() }
|
||||||
var _9: Api.DataJSON?
|
var _9: Api.GroupCallParticipantVideo?
|
||||||
if Int(_1!) & Int(1 << 6) != 0 {if let signature = reader.readInt32() {
|
if Int(_1!) & Int(1 << 6) != 0 {if let signature = reader.readInt32() {
|
||||||
_9 = Api.parse(reader, signature: signature) as? Api.DataJSON
|
_9 = Api.parse(reader, signature: signature) as? Api.GroupCallParticipantVideo
|
||||||
} }
|
} }
|
||||||
var _10: Api.DataJSON?
|
var _10: Api.GroupCallParticipantVideo?
|
||||||
if Int(_1!) & Int(1 << 14) != 0 {if let signature = reader.readInt32() {
|
if Int(_1!) & Int(1 << 14) != 0 {if let signature = reader.readInt32() {
|
||||||
_10 = Api.parse(reader, signature: signature) as? Api.DataJSON
|
_10 = Api.parse(reader, signature: signature) as? Api.GroupCallParticipantVideo
|
||||||
} }
|
} }
|
||||||
let _c1 = _1 != nil
|
let _c1 = _1 != nil
|
||||||
let _c2 = _2 != nil
|
let _c2 = _2 != nil
|
||||||
@ -7358,6 +7358,50 @@ public extension Api {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
public enum GroupCallParticipantVideoSourceGroup: TypeConstructorDescription {
|
||||||
|
case groupCallParticipantVideoSourceGroup(semantics: String, sources: [Int32])
|
||||||
|
|
||||||
|
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||||
|
switch self {
|
||||||
|
case .groupCallParticipantVideoSourceGroup(let semantics, let sources):
|
||||||
|
if boxed {
|
||||||
|
buffer.appendInt32(-592373577)
|
||||||
|
}
|
||||||
|
serializeString(semantics, buffer: buffer, boxed: false)
|
||||||
|
buffer.appendInt32(481674261)
|
||||||
|
buffer.appendInt32(Int32(sources.count))
|
||||||
|
for item in sources {
|
||||||
|
serializeInt32(item, buffer: buffer, boxed: false)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public func descriptionFields() -> (String, [(String, Any)]) {
|
||||||
|
switch self {
|
||||||
|
case .groupCallParticipantVideoSourceGroup(let semantics, let sources):
|
||||||
|
return ("groupCallParticipantVideoSourceGroup", [("semantics", semantics), ("sources", sources)])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static func parse_groupCallParticipantVideoSourceGroup(_ reader: BufferReader) -> GroupCallParticipantVideoSourceGroup? {
|
||||||
|
var _1: String?
|
||||||
|
_1 = parseString(reader)
|
||||||
|
var _2: [Int32]?
|
||||||
|
if let _ = reader.readInt32() {
|
||||||
|
_2 = Api.parseVector(reader, elementSignature: -1471112230, elementType: Int32.self)
|
||||||
|
}
|
||||||
|
let _c1 = _1 != nil
|
||||||
|
let _c2 = _2 != nil
|
||||||
|
if _c1 && _c2 {
|
||||||
|
return Api.GroupCallParticipantVideoSourceGroup.groupCallParticipantVideoSourceGroup(semantics: _1!, sources: _2!)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
public enum FolderPeer: TypeConstructorDescription {
|
public enum FolderPeer: TypeConstructorDescription {
|
||||||
case folderPeer(peer: Api.Peer, folderId: Int32)
|
case folderPeer(peer: Api.Peer, folderId: Int32)
|
||||||
@ -14888,6 +14932,54 @@ public extension Api {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
public enum GroupCallParticipantVideo: TypeConstructorDescription {
|
||||||
|
case groupCallParticipantVideo(flags: Int32, endpoint: String, sourceGroups: [Api.GroupCallParticipantVideoSourceGroup])
|
||||||
|
|
||||||
|
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||||
|
switch self {
|
||||||
|
case .groupCallParticipantVideo(let flags, let endpoint, let sourceGroups):
|
||||||
|
if boxed {
|
||||||
|
buffer.appendInt32(2028213859)
|
||||||
|
}
|
||||||
|
serializeInt32(flags, buffer: buffer, boxed: false)
|
||||||
|
serializeString(endpoint, buffer: buffer, boxed: false)
|
||||||
|
buffer.appendInt32(481674261)
|
||||||
|
buffer.appendInt32(Int32(sourceGroups.count))
|
||||||
|
for item in sourceGroups {
|
||||||
|
item.serialize(buffer, true)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public func descriptionFields() -> (String, [(String, Any)]) {
|
||||||
|
switch self {
|
||||||
|
case .groupCallParticipantVideo(let flags, let endpoint, let sourceGroups):
|
||||||
|
return ("groupCallParticipantVideo", [("flags", flags), ("endpoint", endpoint), ("sourceGroups", sourceGroups)])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static func parse_groupCallParticipantVideo(_ reader: BufferReader) -> GroupCallParticipantVideo? {
|
||||||
|
var _1: Int32?
|
||||||
|
_1 = reader.readInt32()
|
||||||
|
var _2: String?
|
||||||
|
_2 = parseString(reader)
|
||||||
|
var _3: [Api.GroupCallParticipantVideoSourceGroup]?
|
||||||
|
if let _ = reader.readInt32() {
|
||||||
|
_3 = Api.parseVector(reader, elementSignature: 0, elementType: Api.GroupCallParticipantVideoSourceGroup.self)
|
||||||
|
}
|
||||||
|
let _c1 = _1 != nil
|
||||||
|
let _c2 = _2 != nil
|
||||||
|
let _c3 = _3 != nil
|
||||||
|
if _c1 && _c2 && _c3 {
|
||||||
|
return Api.GroupCallParticipantVideo.groupCallParticipantVideo(flags: _1!, endpoint: _2!, sourceGroups: _3!)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
public enum PhoneCallProtocol: TypeConstructorDescription {
|
public enum PhoneCallProtocol: TypeConstructorDescription {
|
||||||
case phoneCallProtocol(flags: Int32, minLayer: Int32, maxLayer: Int32, libraryVersions: [String])
|
case phoneCallProtocol(flags: Int32, minLayer: Int32, maxLayer: Int32, libraryVersions: [String])
|
||||||
@ -14981,7 +15073,7 @@ public extension Api {
|
|||||||
}
|
}
|
||||||
public enum WallPaper: TypeConstructorDescription {
|
public enum WallPaper: TypeConstructorDescription {
|
||||||
case wallPaper(id: Int64, flags: Int32, accessHash: Int64, slug: String, document: Api.Document, settings: Api.WallPaperSettings?)
|
case wallPaper(id: Int64, flags: Int32, accessHash: Int64, slug: String, document: Api.Document, settings: Api.WallPaperSettings?)
|
||||||
case wallPaperNoFile(flags: Int32, settings: Api.WallPaperSettings?)
|
case wallPaperNoFile(id: Int64, flags: Int32, settings: Api.WallPaperSettings?)
|
||||||
|
|
||||||
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||||
switch self {
|
switch self {
|
||||||
@ -14996,10 +15088,11 @@ public extension Api {
|
|||||||
document.serialize(buffer, true)
|
document.serialize(buffer, true)
|
||||||
if Int(flags) & Int(1 << 2) != 0 {settings!.serialize(buffer, true)}
|
if Int(flags) & Int(1 << 2) != 0 {settings!.serialize(buffer, true)}
|
||||||
break
|
break
|
||||||
case .wallPaperNoFile(let flags, let settings):
|
case .wallPaperNoFile(let id, let flags, let settings):
|
||||||
if boxed {
|
if boxed {
|
||||||
buffer.appendInt32(-1963717851)
|
buffer.appendInt32(-528465642)
|
||||||
}
|
}
|
||||||
|
serializeInt64(id, buffer: buffer, boxed: false)
|
||||||
serializeInt32(flags, buffer: buffer, boxed: false)
|
serializeInt32(flags, buffer: buffer, boxed: false)
|
||||||
if Int(flags) & Int(1 << 2) != 0 {settings!.serialize(buffer, true)}
|
if Int(flags) & Int(1 << 2) != 0 {settings!.serialize(buffer, true)}
|
||||||
break
|
break
|
||||||
@ -15010,8 +15103,8 @@ public extension Api {
|
|||||||
switch self {
|
switch self {
|
||||||
case .wallPaper(let id, let flags, let accessHash, let slug, let document, let settings):
|
case .wallPaper(let id, let flags, let accessHash, let slug, let document, let settings):
|
||||||
return ("wallPaper", [("id", id), ("flags", flags), ("accessHash", accessHash), ("slug", slug), ("document", document), ("settings", settings)])
|
return ("wallPaper", [("id", id), ("flags", flags), ("accessHash", accessHash), ("slug", slug), ("document", document), ("settings", settings)])
|
||||||
case .wallPaperNoFile(let flags, let settings):
|
case .wallPaperNoFile(let id, let flags, let settings):
|
||||||
return ("wallPaperNoFile", [("flags", flags), ("settings", settings)])
|
return ("wallPaperNoFile", [("id", id), ("flags", flags), ("settings", settings)])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -15046,16 +15139,19 @@ public extension Api {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
public static func parse_wallPaperNoFile(_ reader: BufferReader) -> WallPaper? {
|
public static func parse_wallPaperNoFile(_ reader: BufferReader) -> WallPaper? {
|
||||||
var _1: Int32?
|
var _1: Int64?
|
||||||
_1 = reader.readInt32()
|
_1 = reader.readInt64()
|
||||||
var _2: Api.WallPaperSettings?
|
var _2: Int32?
|
||||||
if Int(_1!) & Int(1 << 2) != 0 {if let signature = reader.readInt32() {
|
_2 = reader.readInt32()
|
||||||
_2 = Api.parse(reader, signature: signature) as? Api.WallPaperSettings
|
var _3: Api.WallPaperSettings?
|
||||||
|
if Int(_2!) & Int(1 << 2) != 0 {if let signature = reader.readInt32() {
|
||||||
|
_3 = Api.parse(reader, signature: signature) as? Api.WallPaperSettings
|
||||||
} }
|
} }
|
||||||
let _c1 = _1 != nil
|
let _c1 = _1 != nil
|
||||||
let _c2 = (Int(_1!) & Int(1 << 2) == 0) || _2 != nil
|
let _c2 = _2 != nil
|
||||||
if _c1 && _c2 {
|
let _c3 = (Int(_2!) & Int(1 << 2) == 0) || _3 != nil
|
||||||
return Api.WallPaper.wallPaperNoFile(flags: _1!, settings: _2)
|
if _c1 && _c2 && _c3 {
|
||||||
|
return Api.WallPaper.wallPaperNoFile(id: _1!, flags: _2!, settings: _3)
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return nil
|
return nil
|
||||||
@ -18028,7 +18124,7 @@ public extension Api {
|
|||||||
public enum InputWallPaper: TypeConstructorDescription {
|
public enum InputWallPaper: TypeConstructorDescription {
|
||||||
case inputWallPaper(id: Int64, accessHash: Int64)
|
case inputWallPaper(id: Int64, accessHash: Int64)
|
||||||
case inputWallPaperSlug(slug: String)
|
case inputWallPaperSlug(slug: String)
|
||||||
case inputWallPaperNoFile
|
case inputWallPaperNoFile(id: Int64)
|
||||||
|
|
||||||
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||||
switch self {
|
switch self {
|
||||||
@ -18045,11 +18141,11 @@ public extension Api {
|
|||||||
}
|
}
|
||||||
serializeString(slug, buffer: buffer, boxed: false)
|
serializeString(slug, buffer: buffer, boxed: false)
|
||||||
break
|
break
|
||||||
case .inputWallPaperNoFile:
|
case .inputWallPaperNoFile(let id):
|
||||||
if boxed {
|
if boxed {
|
||||||
buffer.appendInt32(-2077770836)
|
buffer.appendInt32(-1770371538)
|
||||||
}
|
}
|
||||||
|
serializeInt64(id, buffer: buffer, boxed: false)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -18060,8 +18156,8 @@ public extension Api {
|
|||||||
return ("inputWallPaper", [("id", id), ("accessHash", accessHash)])
|
return ("inputWallPaper", [("id", id), ("accessHash", accessHash)])
|
||||||
case .inputWallPaperSlug(let slug):
|
case .inputWallPaperSlug(let slug):
|
||||||
return ("inputWallPaperSlug", [("slug", slug)])
|
return ("inputWallPaperSlug", [("slug", slug)])
|
||||||
case .inputWallPaperNoFile:
|
case .inputWallPaperNoFile(let id):
|
||||||
return ("inputWallPaperNoFile", [])
|
return ("inputWallPaperNoFile", [("id", id)])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -18091,7 +18187,15 @@ public extension Api {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
public static func parse_inputWallPaperNoFile(_ reader: BufferReader) -> InputWallPaper? {
|
public static func parse_inputWallPaperNoFile(_ reader: BufferReader) -> InputWallPaper? {
|
||||||
return Api.InputWallPaper.inputWallPaperNoFile
|
var _1: Int64?
|
||||||
|
_1 = reader.readInt64()
|
||||||
|
let _c1 = _1 != nil
|
||||||
|
if _c1 {
|
||||||
|
return Api.InputWallPaper.inputWallPaperNoFile(id: _1!)
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -7872,17 +7872,19 @@ public extension Api {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
public static func editGroupCallParticipant(flags: Int32, call: Api.InputGroupCall, participant: Api.InputPeer, muted: Api.Bool?, volume: Int32?, raiseHand: Api.Bool?, videoMuted: Api.Bool?) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
|
public static func editGroupCallParticipant(flags: Int32, call: Api.InputGroupCall, participant: Api.InputPeer, muted: Api.Bool?, volume: Int32?, raiseHand: Api.Bool?, videoStopped: Api.Bool?, videoPaused: Api.Bool?, presentationPaused: Api.Bool?) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
|
||||||
let buffer = Buffer()
|
let buffer = Buffer()
|
||||||
buffer.appendInt32(-1362751260)
|
buffer.appendInt32(-1524155713)
|
||||||
serializeInt32(flags, buffer: buffer, boxed: false)
|
serializeInt32(flags, buffer: buffer, boxed: false)
|
||||||
call.serialize(buffer, true)
|
call.serialize(buffer, true)
|
||||||
participant.serialize(buffer, true)
|
participant.serialize(buffer, true)
|
||||||
if Int(flags) & Int(1 << 0) != 0 {muted!.serialize(buffer, true)}
|
if Int(flags) & Int(1 << 0) != 0 {muted!.serialize(buffer, true)}
|
||||||
if Int(flags) & Int(1 << 1) != 0 {serializeInt32(volume!, buffer: buffer, boxed: false)}
|
if Int(flags) & Int(1 << 1) != 0 {serializeInt32(volume!, buffer: buffer, boxed: false)}
|
||||||
if Int(flags) & Int(1 << 2) != 0 {raiseHand!.serialize(buffer, true)}
|
if Int(flags) & Int(1 << 2) != 0 {raiseHand!.serialize(buffer, true)}
|
||||||
if Int(flags) & Int(1 << 3) != 0 {videoMuted!.serialize(buffer, true)}
|
if Int(flags) & Int(1 << 3) != 0 {videoStopped!.serialize(buffer, true)}
|
||||||
return (FunctionDescription(name: "phone.editGroupCallParticipant", parameters: [("flags", flags), ("call", call), ("participant", participant), ("muted", muted), ("volume", volume), ("raiseHand", raiseHand), ("videoMuted", videoMuted)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in
|
if Int(flags) & Int(1 << 4) != 0 {videoPaused!.serialize(buffer, true)}
|
||||||
|
if Int(flags) & Int(1 << 5) != 0 {presentationPaused!.serialize(buffer, true)}
|
||||||
|
return (FunctionDescription(name: "phone.editGroupCallParticipant", parameters: [("flags", flags), ("call", call), ("participant", participant), ("muted", muted), ("volume", volume), ("raiseHand", raiseHand), ("videoStopped", videoStopped), ("videoPaused", videoPaused), ("presentationPaused", presentationPaused)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in
|
||||||
let reader = BufferReader(buffer)
|
let reader = BufferReader(buffer)
|
||||||
var result: Api.Updates?
|
var result: Api.Updates?
|
||||||
if let signature = reader.readInt32() {
|
if let signature = reader.readInt32() {
|
||||||
|
@ -47,6 +47,7 @@ swift_library(
|
|||||||
"//submodules/MapResourceToAvatarSizes:MapResourceToAvatarSizes",
|
"//submodules/MapResourceToAvatarSizes:MapResourceToAvatarSizes",
|
||||||
"//submodules/TextFormat:TextFormat",
|
"//submodules/TextFormat:TextFormat",
|
||||||
"//submodules/Markdown:Markdown",
|
"//submodules/Markdown:Markdown",
|
||||||
|
"//submodules/ChatTitleActivityNode:ChatTitleActivityNode",
|
||||||
],
|
],
|
||||||
visibility = [
|
visibility = [
|
||||||
"//visibility:public",
|
"//visibility:public",
|
||||||
|
@ -11,6 +11,7 @@ enum CallControllerButtonsSpeakerMode: Equatable {
|
|||||||
case generic
|
case generic
|
||||||
case airpods
|
case airpods
|
||||||
case airpodsPro
|
case airpodsPro
|
||||||
|
case airpodsMax
|
||||||
}
|
}
|
||||||
|
|
||||||
case none
|
case none
|
||||||
@ -51,6 +52,7 @@ private enum ButtonDescription: Equatable {
|
|||||||
case bluetooth
|
case bluetooth
|
||||||
case airpods
|
case airpods
|
||||||
case airpodsPro
|
case airpodsPro
|
||||||
|
case airpodsMax
|
||||||
case headphones
|
case headphones
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -215,6 +217,8 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
|||||||
soundOutput = .airpods
|
soundOutput = .airpods
|
||||||
case .airpodsPro:
|
case .airpodsPro:
|
||||||
soundOutput = .airpodsPro
|
soundOutput = .airpodsPro
|
||||||
|
case .airpodsMax:
|
||||||
|
soundOutput = .airpodsMax
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -306,6 +310,8 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
|||||||
soundOutput = .airpods
|
soundOutput = .airpods
|
||||||
case .airpodsPro:
|
case .airpodsPro:
|
||||||
soundOutput = .airpodsPro
|
soundOutput = .airpodsPro
|
||||||
|
case .airpodsMax:
|
||||||
|
soundOutput = .airpodsMax
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -362,6 +368,8 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
|||||||
soundOutput = .airpods
|
soundOutput = .airpods
|
||||||
case .airpodsPro:
|
case .airpodsPro:
|
||||||
soundOutput = .airpodsPro
|
soundOutput = .airpodsPro
|
||||||
|
case .airpodsMax:
|
||||||
|
soundOutput = .airpodsMax
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -468,6 +476,9 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
|||||||
case .airpodsPro:
|
case .airpodsPro:
|
||||||
image = .airpodsPro
|
image = .airpodsPro
|
||||||
title = strings.Call_Audio
|
title = strings.Call_Audio
|
||||||
|
case .airpodsMax:
|
||||||
|
image = .airpodsMax
|
||||||
|
title = strings.Call_Audio
|
||||||
case .headphones:
|
case .headphones:
|
||||||
image = .headphones
|
image = .headphones
|
||||||
title = strings.Call_Audio
|
title = strings.Call_Audio
|
||||||
|
@ -16,6 +16,7 @@ import CallsEmoji
|
|||||||
import TooltipUI
|
import TooltipUI
|
||||||
import AlertUI
|
import AlertUI
|
||||||
import PresentationDataUtils
|
import PresentationDataUtils
|
||||||
|
import DeviceAccess
|
||||||
|
|
||||||
private func interpolateFrame(from fromValue: CGRect, to toValue: CGRect, t: CGFloat) -> CGRect {
|
private func interpolateFrame(from fromValue: CGRect, to toValue: CGRect, t: CGFloat) -> CGRect {
|
||||||
return CGRect(x: floorToScreenPixels(toValue.origin.x * t + fromValue.origin.x * (1.0 - t)), y: floorToScreenPixels(toValue.origin.y * t + fromValue.origin.y * (1.0 - t)), width: floorToScreenPixels(toValue.size.width * t + fromValue.size.width * (1.0 - t)), height: floorToScreenPixels(toValue.size.height * t + fromValue.size.height * (1.0 - t)))
|
return CGRect(x: floorToScreenPixels(toValue.origin.x * t + fromValue.origin.x * (1.0 - t)), y: floorToScreenPixels(toValue.origin.y * t + fromValue.origin.y * (1.0 - t)), width: floorToScreenPixels(toValue.size.width * t + fromValue.size.width * (1.0 - t)), height: floorToScreenPixels(toValue.size.height * t + fromValue.size.height * (1.0 - t)))
|
||||||
@ -559,6 +560,16 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
|||||||
switch callState.state {
|
switch callState.state {
|
||||||
case .active:
|
case .active:
|
||||||
if strongSelf.outgoingVideoNodeValue == nil {
|
if strongSelf.outgoingVideoNodeValue == nil {
|
||||||
|
DeviceAccess.authorizeAccess(to: .camera(.videoCall), onlyCheck: true, presentationData: strongSelf.presentationData, present: { [weak self] c, a in
|
||||||
|
if let strongSelf = self {
|
||||||
|
strongSelf.present?(c)
|
||||||
|
}
|
||||||
|
}, openSettings: { [weak self] in
|
||||||
|
self?.sharedContext.applicationBindings.openSettings()
|
||||||
|
}, _: { [weak self] ready in
|
||||||
|
guard let strongSelf = self, ready else {
|
||||||
|
return
|
||||||
|
}
|
||||||
let proceed = {
|
let proceed = {
|
||||||
strongSelf.displayedCameraConfirmation = true
|
strongSelf.displayedCameraConfirmation = true
|
||||||
switch callState.videoState {
|
switch callState.videoState {
|
||||||
@ -578,6 +589,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
|||||||
proceed()
|
proceed()
|
||||||
})]))
|
})]))
|
||||||
}
|
}
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
strongSelf.call.disableVideo()
|
strongSelf.call.disableVideo()
|
||||||
strongSelf.cancelScheduledUIHiding()
|
strongSelf.cancelScheduledUIHiding()
|
||||||
|
@ -40,7 +40,7 @@ final class GroupVideoNode: ASDisplayNode {
|
|||||||
return self.readyPromise.get()
|
return self.readyPromise.get()
|
||||||
}
|
}
|
||||||
|
|
||||||
init(videoView: PresentationCallVideoView, backdropVideoView: PresentationCallVideoView?) {
|
init(videoView: PresentationCallVideoView, backdropVideoView: PresentationCallVideoView?, disabledText: String? = nil) {
|
||||||
self.sourceContainerNode = PinchSourceContainerNode()
|
self.sourceContainerNode = PinchSourceContainerNode()
|
||||||
self.containerNode = ASDisplayNode()
|
self.containerNode = ASDisplayNode()
|
||||||
self.videoViewContainer = UIView()
|
self.videoViewContainer = UIView()
|
||||||
@ -139,17 +139,33 @@ final class GroupVideoNode: ASDisplayNode {
|
|||||||
if withBackground {
|
if withBackground {
|
||||||
self.backgroundColor = .black
|
self.backgroundColor = .black
|
||||||
}
|
}
|
||||||
|
var snapshotView: UIView?
|
||||||
|
if let snapshot = self.videoView.view.snapshotView(afterScreenUpdates: false) {
|
||||||
|
snapshotView = snapshot
|
||||||
|
snapshot.transform = self.videoView.view.transform
|
||||||
|
snapshot.frame = self.videoView.view.frame
|
||||||
|
self.videoView.view.superview?.insertSubview(snapshot, aboveSubview: self.videoView.view)
|
||||||
|
}
|
||||||
UIView.transition(with: withBackground ? self.videoViewContainer : self.view, duration: 0.4, options: [.transitionFlipFromLeft, .curveEaseOut], animations: {
|
UIView.transition(with: withBackground ? self.videoViewContainer : self.view, duration: 0.4, options: [.transitionFlipFromLeft, .curveEaseOut], animations: {
|
||||||
UIView.performWithoutAnimation {
|
UIView.performWithoutAnimation {
|
||||||
self.updateIsBlurred(isBlurred: true, light: false, animated: false)
|
self.updateIsBlurred(isBlurred: true, light: false, animated: false)
|
||||||
}
|
}
|
||||||
}) { finished in
|
}) { finished in
|
||||||
self.backgroundColor = nil
|
self.backgroundColor = nil
|
||||||
|
if let snapshotView = snapshotView {
|
||||||
|
Queue.mainQueue().after(0.3) {
|
||||||
|
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak snapshotView] _ in
|
||||||
|
snapshotView?.removeFromSuperview()
|
||||||
|
})
|
||||||
|
self.updateIsBlurred(isBlurred: false)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
Queue.mainQueue().after(0.4) {
|
Queue.mainQueue().after(0.4) {
|
||||||
self.updateIsBlurred(isBlurred: false)
|
self.updateIsBlurred(isBlurred: false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@objc private func tapGesture(_ recognizer: UITapGestureRecognizer) {
|
@objc private func tapGesture(_ recognizer: UITapGestureRecognizer) {
|
||||||
if case .ended = recognizer.state {
|
if case .ended = recognizer.state {
|
||||||
@ -260,10 +276,11 @@ final class GroupVideoNode: ASDisplayNode {
|
|||||||
let transformScale: CGFloat = rotatedVideoFrame.width / normalizedVideoSize.width
|
let transformScale: CGFloat = rotatedVideoFrame.width / normalizedVideoSize.width
|
||||||
transition.updateTransformScale(layer: self.videoViewContainer.layer, scale: transformScale)
|
transition.updateTransformScale(layer: self.videoViewContainer.layer, scale: transformScale)
|
||||||
|
|
||||||
|
|
||||||
if let backdropVideoView = self.backdropVideoView {
|
if let backdropVideoView = self.backdropVideoView {
|
||||||
backdropVideoView.view.alpha = 0.995
|
backdropVideoView.view.alpha = 0.995
|
||||||
|
|
||||||
|
let topFrame = rotatedVideoFrame
|
||||||
|
|
||||||
rotatedVideoSize = filledSize
|
rotatedVideoSize = filledSize
|
||||||
var rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - rotatedVideoSize.width) / 2.0), y: floor((size.height - rotatedVideoSize.height) / 2.0)), size: rotatedVideoSize)
|
var rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - rotatedVideoSize.width) / 2.0), y: floor((size.height - rotatedVideoSize.height) / 2.0)), size: rotatedVideoSize)
|
||||||
rotatedVideoFrame.origin.x = floor(rotatedVideoFrame.origin.x)
|
rotatedVideoFrame.origin.x = floor(rotatedVideoFrame.origin.x)
|
||||||
@ -271,11 +288,28 @@ final class GroupVideoNode: ASDisplayNode {
|
|||||||
rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width)
|
rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width)
|
||||||
rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height)
|
rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height)
|
||||||
|
|
||||||
|
let isEnabled = !topFrame.contains(rotatedVideoFrame)
|
||||||
|
|
||||||
let normalizedVideoSize = rotatedVideoFrame.size.aspectFilled(CGSize(width: 1080.0, height: 1080.0))
|
let normalizedVideoSize = rotatedVideoFrame.size.aspectFilled(CGSize(width: 1080.0, height: 1080.0))
|
||||||
transition.updatePosition(layer: backdropVideoView.view.layer, position: rotatedVideoFrame.center)
|
if isEnabled {
|
||||||
|
self.backdropVideoView?.updateIsEnabled(true)
|
||||||
|
self.backdropVideoView?.view.isHidden = false
|
||||||
|
self.backdropEffectView?.isHidden = false
|
||||||
|
}
|
||||||
|
transition.updatePosition(layer: backdropVideoView.view.layer, position: rotatedVideoFrame.center, force: true, completion: { [weak self] value in
|
||||||
|
guard let strongSelf = self, value else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !isEnabled {
|
||||||
|
strongSelf.backdropVideoView?.updateIsEnabled(false)
|
||||||
|
strongSelf.backdropVideoView?.view.isHidden = true
|
||||||
|
strongSelf.backdropEffectView?.isHidden = false
|
||||||
|
}
|
||||||
|
})
|
||||||
transition.updateBounds(layer: backdropVideoView.view.layer, bounds: CGRect(origin: CGPoint(), size: normalizedVideoSize))
|
transition.updateBounds(layer: backdropVideoView.view.layer, bounds: CGRect(origin: CGPoint(), size: normalizedVideoSize))
|
||||||
|
|
||||||
let transformScale: CGFloat = rotatedVideoFrame.width / normalizedVideoSize.width
|
let transformScale: CGFloat = rotatedVideoFrame.width / normalizedVideoSize.width
|
||||||
|
|
||||||
transition.updateTransformScale(layer: self.backdropVideoViewContainer.layer, scale: transformScale)
|
transition.updateTransformScale(layer: self.backdropVideoViewContainer.layer, scale: transformScale)
|
||||||
|
|
||||||
let transition: ContainedViewLayoutTransition = .immediate
|
let transition: ContainedViewLayoutTransition = .immediate
|
||||||
@ -287,18 +321,9 @@ final class GroupVideoNode: ASDisplayNode {
|
|||||||
let squareBounds = CGRect(x: (bounds.width - maxSide) / 2.0, y: (bounds.height - maxSide) / 2.0, width: maxSide, height: maxSide)
|
let squareBounds = CGRect(x: (bounds.width - maxSide) / 2.0, y: (bounds.height - maxSide) / 2.0, width: maxSide, height: maxSide)
|
||||||
|
|
||||||
if case let .animated(duration, .spring) = transition {
|
if case let .animated(duration, .spring) = transition {
|
||||||
if false, #available(iOS 10.0, *) {
|
|
||||||
let timing = UISpringTimingParameters(mass: 3.0, stiffness: 1000.0, damping: 500.0, initialVelocity: CGVector(dx: 0.0, dy: 0.0))
|
|
||||||
let animator = UIViewPropertyAnimator(duration: 0.34, timingParameters: timing)
|
|
||||||
animator.addAnimations {
|
|
||||||
backdropEffectView.frame = squareBounds
|
|
||||||
}
|
|
||||||
animator.startAnimation()
|
|
||||||
} else {
|
|
||||||
UIView.animate(withDuration: duration, delay: 0.0, usingSpringWithDamping: 500.0, initialSpringVelocity: 0.0, options: .layoutSubviews, animations: {
|
UIView.animate(withDuration: duration, delay: 0.0, usingSpringWithDamping: 500.0, initialSpringVelocity: 0.0, options: .layoutSubviews, animations: {
|
||||||
backdropEffectView.frame = squareBounds
|
backdropEffectView.frame = squareBounds
|
||||||
})
|
})
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
transition.animateView {
|
transition.animateView {
|
||||||
backdropEffectView.frame = squareBounds
|
backdropEffectView.frame = squareBounds
|
||||||
@ -306,11 +331,19 @@ final class GroupVideoNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let effectView = self.effectView {
|
||||||
|
if case let .animated(duration, .spring) = transition {
|
||||||
|
UIView.animate(withDuration: duration, delay: 0.0, usingSpringWithDamping: 500.0, initialSpringVelocity: 0.0, options: .layoutSubviews, animations: {
|
||||||
|
effectView.frame = bounds
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
transition.animateView {
|
||||||
|
effectView.frame = bounds
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let transition: ContainedViewLayoutTransition = .immediate
|
let transition: ContainedViewLayoutTransition = .immediate
|
||||||
transition.updateTransformRotation(view: self.videoView.view, angle: angle)
|
transition.updateTransformRotation(view: self.videoView.view, angle: angle)
|
||||||
|
|
||||||
if let effectView = self.effectView {
|
|
||||||
transition.updateFrame(view: effectView, frame: bounds)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1017,7 +1017,7 @@ public final class PresentationCallImpl: PresentationCall {
|
|||||||
self.videoCapturer = videoCapturer
|
self.videoCapturer = videoCapturer
|
||||||
}
|
}
|
||||||
|
|
||||||
self.videoCapturer?.makeOutgoingVideoView(completion: { view in
|
self.videoCapturer?.makeOutgoingVideoView(requestClone: false, completion: { view, _ in
|
||||||
if let view = view {
|
if let view = view {
|
||||||
let setOnFirstFrameReceived = view.setOnFirstFrameReceived
|
let setOnFirstFrameReceived = view.setOnFirstFrameReceived
|
||||||
let setOnOrientationUpdated = view.setOnOrientationUpdated
|
let setOnOrientationUpdated = view.setOnOrientationUpdated
|
||||||
|
@ -24,29 +24,17 @@ private extension GroupCallParticipantsContext.Participant {
|
|||||||
if let ssrc = self.ssrc {
|
if let ssrc = self.ssrc {
|
||||||
participantSsrcs.insert(ssrc)
|
participantSsrcs.insert(ssrc)
|
||||||
}
|
}
|
||||||
if let jsonParams = self.videoJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
if let videoDescription = self.videoDescription {
|
||||||
if let groups = json["ssrc-groups"] as? [Any] {
|
for group in videoDescription.ssrcGroups {
|
||||||
for group in groups {
|
for ssrc in group.ssrcs {
|
||||||
if let group = group as? [String: Any] {
|
participantSsrcs.insert(ssrc)
|
||||||
if let groupSources = group["sources"] as? [UInt32] {
|
|
||||||
for source in groupSources {
|
|
||||||
participantSsrcs.insert(source)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let jsonParams = self.presentationJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
|
||||||
if let groups = json["ssrc-groups"] as? [Any] {
|
|
||||||
for group in groups {
|
|
||||||
if let group = group as? [String: Any] {
|
|
||||||
if let groupSources = group["sources"] as? [UInt32] {
|
|
||||||
for source in groupSources {
|
|
||||||
participantSsrcs.insert(source)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if let presentationDescription = self.presentationDescription {
|
||||||
|
for group in presentationDescription.ssrcGroups {
|
||||||
|
for ssrc in group.ssrcs {
|
||||||
|
participantSsrcs.insert(ssrc)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -55,16 +43,10 @@ private extension GroupCallParticipantsContext.Participant {
|
|||||||
|
|
||||||
var videoSsrcs: Set<UInt32> {
|
var videoSsrcs: Set<UInt32> {
|
||||||
var participantSsrcs = Set<UInt32>()
|
var participantSsrcs = Set<UInt32>()
|
||||||
if let jsonParams = self.videoJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
if let videoDescription = self.videoDescription {
|
||||||
if let groups = json["ssrc-groups"] as? [Any] {
|
for group in videoDescription.ssrcGroups {
|
||||||
for group in groups {
|
for ssrc in group.ssrcs {
|
||||||
if let group = group as? [String: Any] {
|
participantSsrcs.insert(ssrc)
|
||||||
if let groupSources = group["sources"] as? [UInt32] {
|
|
||||||
for source in groupSources {
|
|
||||||
participantSsrcs.insert(source)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -73,16 +55,10 @@ private extension GroupCallParticipantsContext.Participant {
|
|||||||
|
|
||||||
var presentationSsrcs: Set<UInt32> {
|
var presentationSsrcs: Set<UInt32> {
|
||||||
var participantSsrcs = Set<UInt32>()
|
var participantSsrcs = Set<UInt32>()
|
||||||
if let jsonParams = self.presentationJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
if let presentationDescription = self.presentationDescription {
|
||||||
if let groups = json["ssrc-groups"] as? [Any] {
|
for group in presentationDescription.ssrcGroups {
|
||||||
for group in groups {
|
for ssrc in group.ssrcs {
|
||||||
if let group = group as? [String: Any] {
|
participantSsrcs.insert(ssrc)
|
||||||
if let groupSources = group["sources"] as? [UInt32] {
|
|
||||||
for source in groupSources {
|
|
||||||
participantSsrcs.insert(source)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -428,6 +404,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
private var internalState: InternalState = .requesting
|
private var internalState: InternalState = .requesting
|
||||||
private let internalStatePromise = Promise<InternalState>(.requesting)
|
private let internalStatePromise = Promise<InternalState>(.requesting)
|
||||||
private var currentLocalSsrc: UInt32?
|
private var currentLocalSsrc: UInt32?
|
||||||
|
private var currentLocalEndpointId: String?
|
||||||
|
|
||||||
private var genericCallContext: OngoingGroupCallContext?
|
private var genericCallContext: OngoingGroupCallContext?
|
||||||
private var currentConnectionMode: OngoingGroupCallContext.ConnectionMode = .none
|
private var currentConnectionMode: OngoingGroupCallContext.ConnectionMode = .none
|
||||||
@ -478,6 +455,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
private let isNoiseSuppressionEnabledDisposable = MetaDisposable()
|
private let isNoiseSuppressionEnabledDisposable = MetaDisposable()
|
||||||
|
|
||||||
|
private var isVideoMuted: Bool = false
|
||||||
|
private let isVideoMutedDisposable = MetaDisposable()
|
||||||
|
|
||||||
private let audioOutputStatePromise = Promise<([AudioSessionOutput], AudioSessionOutput?)>(([], nil))
|
private let audioOutputStatePromise = Promise<([AudioSessionOutput], AudioSessionOutput?)>(([], nil))
|
||||||
private var audioOutputStateDisposable: Disposable?
|
private var audioOutputStateDisposable: Disposable?
|
||||||
private var actualAudioOutputState: ([AudioSessionOutput], AudioSessionOutput?)?
|
private var actualAudioOutputState: ([AudioSessionOutput], AudioSessionOutput?)?
|
||||||
@ -771,7 +751,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
if case let .established(callInfo, _, _, _, _) = strongSelf.internalState {
|
if case let .established(callInfo, _, _, _, _) = strongSelf.internalState {
|
||||||
var addedParticipants: [(UInt32, String?, String?)] = []
|
|
||||||
var removedSsrc: [UInt32] = []
|
var removedSsrc: [UInt32] = []
|
||||||
for (callId, update) in updates {
|
for (callId, update) in updates {
|
||||||
if callId == callInfo.id {
|
if callId == callInfo.id {
|
||||||
@ -806,11 +785,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if case .joined = participantUpdate.participationStatusChange {
|
} else if case .joined = participantUpdate.participationStatusChange {
|
||||||
if let ssrc = participantUpdate.ssrc {
|
|
||||||
addedParticipants.append((ssrc, participantUpdate.videoJsonDescription, participantUpdate.presentationJsonDescription))
|
|
||||||
}
|
|
||||||
} else if let ssrc = participantUpdate.ssrc, strongSelf.ssrcMapping[ssrc] == nil {
|
} else if let ssrc = participantUpdate.ssrc, strongSelf.ssrcMapping[ssrc] == nil {
|
||||||
addedParticipants.append((ssrc, participantUpdate.videoJsonDescription, participantUpdate.presentationJsonDescription))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case let .call(isTerminated, _, _, _, _, _):
|
case let .call(isTerminated, _, _, _, _, _):
|
||||||
@ -903,7 +878,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
guard let screencastCapturer = screencastCapturer else {
|
guard let screencastCapturer = screencastCapturer else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
screencastCapturer.injectPixelBuffer(screencastFrame)
|
screencastCapturer.injectPixelBuffer(screencastFrame.0, rotation: screencastFrame.1)
|
||||||
})
|
})
|
||||||
self.screencastStateDisposable = (screencastBufferServerContext.isActive
|
self.screencastStateDisposable = (screencastBufferServerContext.isActive
|
||||||
|> distinctUntilChanged
|
|> distinctUntilChanged
|
||||||
@ -942,6 +917,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
self.leaveDisposable.dispose()
|
self.leaveDisposable.dispose()
|
||||||
self.isMutedDisposable.dispose()
|
self.isMutedDisposable.dispose()
|
||||||
self.isNoiseSuppressionEnabledDisposable.dispose()
|
self.isNoiseSuppressionEnabledDisposable.dispose()
|
||||||
|
self.isVideoMutedDisposable.dispose()
|
||||||
self.memberStatesDisposable.dispose()
|
self.memberStatesDisposable.dispose()
|
||||||
self.networkStateDisposable.dispose()
|
self.networkStateDisposable.dispose()
|
||||||
self.checkCallDisposable?.dispose()
|
self.checkCallDisposable?.dispose()
|
||||||
@ -1031,8 +1007,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
participants.append(GroupCallParticipantsContext.Participant(
|
participants.append(GroupCallParticipantsContext.Participant(
|
||||||
peer: myPeer,
|
peer: myPeer,
|
||||||
ssrc: nil,
|
ssrc: nil,
|
||||||
videoJsonDescription: nil,
|
videoDescription: nil,
|
||||||
presentationJsonDescription: nil,
|
presentationDescription: nil,
|
||||||
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
||||||
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
||||||
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
||||||
@ -1112,8 +1088,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
participants.append(GroupCallParticipantsContext.Participant(
|
participants.append(GroupCallParticipantsContext.Participant(
|
||||||
peer: myPeer,
|
peer: myPeer,
|
||||||
ssrc: nil,
|
ssrc: nil,
|
||||||
videoJsonDescription: nil,
|
videoDescription: nil,
|
||||||
presentationJsonDescription: nil,
|
presentationDescription: nil,
|
||||||
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
||||||
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
||||||
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
||||||
@ -1276,8 +1252,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
participants.append(GroupCallParticipantsContext.Participant(
|
participants.append(GroupCallParticipantsContext.Participant(
|
||||||
peer: myPeer,
|
peer: myPeer,
|
||||||
ssrc: nil,
|
ssrc: nil,
|
||||||
videoJsonDescription: nil,
|
videoDescription: nil,
|
||||||
presentationJsonDescription: nil,
|
presentationDescription: nil,
|
||||||
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
||||||
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
||||||
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
||||||
@ -1488,6 +1464,13 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
let clientParams = joinCallResult.jsonParams
|
let clientParams = joinCallResult.jsonParams
|
||||||
|
if let data = clientParams.data(using: .utf8), let dict = (try? JSONSerialization.jsonObject(with: data, options: [])) as? [String: Any] {
|
||||||
|
if let video = dict["video"] as? [String: Any] {
|
||||||
|
if let endpointId = video["endpoint"] as? String {
|
||||||
|
strongSelf.currentLocalEndpointId = endpointId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
strongSelf.ssrcMapping.removeAll()
|
strongSelf.ssrcMapping.removeAll()
|
||||||
for participant in joinCallResult.state.participants {
|
for participant in joinCallResult.state.participants {
|
||||||
@ -1630,11 +1613,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
if let peerId = peerId {
|
if let peerId = peerId {
|
||||||
if case .local = ssrcKey {
|
if case .local = ssrcKey {
|
||||||
orignalMyLevelHasVoice = hasVoice
|
orignalMyLevelHasVoice = hasVoice
|
||||||
if !strongSelf.isMutedValue.isEffectivelyMuted {
|
|
||||||
myLevel = level
|
myLevel = level
|
||||||
myLevelHasVoice = hasVoice
|
myLevelHasVoice = hasVoice
|
||||||
}
|
}
|
||||||
}
|
|
||||||
result.append((peerId, ssrcValue, level, hasVoice))
|
result.append((peerId, ssrcValue, level, hasVoice))
|
||||||
} else if ssrcValue != 0 {
|
} else if ssrcValue != 0 {
|
||||||
missingSsrcs.insert(ssrcValue)
|
missingSsrcs.insert(ssrcValue)
|
||||||
@ -1838,8 +1819,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
participants.append(GroupCallParticipantsContext.Participant(
|
participants.append(GroupCallParticipantsContext.Participant(
|
||||||
peer: myPeer,
|
peer: myPeer,
|
||||||
ssrc: nil,
|
ssrc: nil,
|
||||||
videoJsonDescription: nil,
|
videoDescription: nil,
|
||||||
presentationJsonDescription: nil,
|
presentationDescription: nil,
|
||||||
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
||||||
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
||||||
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
||||||
@ -2055,31 +2036,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
videoDescription: nil
|
videoDescription: nil
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
if let videoDescription = participant.videoJsonDescription, !videoDescription.isEmpty {
|
|
||||||
let videoSsrcs = participant.videoSsrcs
|
|
||||||
if !videoSsrcs.intersection(remainingSsrcs).isEmpty {
|
|
||||||
remainingSsrcs.subtract(videoSsrcs)
|
|
||||||
|
|
||||||
result.append(OngoingGroupCallContext.MediaChannelDescription(
|
|
||||||
kind: .video,
|
|
||||||
audioSsrc: audioSsrc,
|
|
||||||
videoDescription: videoDescription
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let videoDescription = participant.presentationJsonDescription, !videoDescription.isEmpty {
|
|
||||||
let videoSsrcs = participant.presentationSsrcs
|
|
||||||
if !videoSsrcs.intersection(remainingSsrcs).isEmpty {
|
|
||||||
remainingSsrcs.subtract(videoSsrcs)
|
|
||||||
|
|
||||||
result.append(OngoingGroupCallContext.MediaChannelDescription(
|
|
||||||
kind: .video,
|
|
||||||
audioSsrc: audioSsrc,
|
|
||||||
videoDescription: videoDescription
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2242,6 +2198,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
strongSelf.reconnectedAsEventsPipe.putNext(myPeer)
|
strongSelf.reconnectedAsEventsPipe.putNext(myPeer)
|
||||||
strongSelf.switchToTemporaryScheduledParticipantsContext()
|
strongSelf.switchToTemporaryScheduledParticipantsContext()
|
||||||
} else {
|
} else {
|
||||||
|
strongSelf.disableVideo()
|
||||||
|
strongSelf.isMutedValue = .muted(isPushToTalkActive: false)
|
||||||
|
strongSelf.isMutedPromise.set(strongSelf.isMutedValue)
|
||||||
|
|
||||||
strongSelf.reconnectingAsPeer = myPeer
|
strongSelf.reconnectingAsPeer = myPeer
|
||||||
|
|
||||||
if let participantsContext = strongSelf.participantsContext, let immediateState = participantsContext.immediateState {
|
if let participantsContext = strongSelf.participantsContext, let immediateState = participantsContext.immediateState {
|
||||||
@ -2429,28 +2389,32 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
self.participantsContext?.lowerHand()
|
self.participantsContext?.lowerHand()
|
||||||
}
|
}
|
||||||
|
|
||||||
public func makeOutgoingVideoView(completion: @escaping (PresentationCallVideoView?) -> Void) {
|
public func makeOutgoingVideoView(requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void) {
|
||||||
if self.videoCapturer == nil {
|
if self.videoCapturer == nil {
|
||||||
let videoCapturer = OngoingCallVideoCapturer()
|
let videoCapturer = OngoingCallVideoCapturer()
|
||||||
self.videoCapturer = videoCapturer
|
self.videoCapturer = videoCapturer
|
||||||
}
|
}
|
||||||
|
|
||||||
self.videoCapturer?.makeOutgoingVideoView(completion: { view in
|
guard let videoCapturer = self.videoCapturer else {
|
||||||
if let view = view {
|
completion(nil, nil)
|
||||||
let setOnFirstFrameReceived = view.setOnFirstFrameReceived
|
return
|
||||||
let setOnOrientationUpdated = view.setOnOrientationUpdated
|
}
|
||||||
let setOnIsMirroredUpdated = view.setOnIsMirroredUpdated
|
videoCapturer.makeOutgoingVideoView(requestClone: requestClone, completion: { mainView, cloneView in
|
||||||
let updateIsEnabled = view.updateIsEnabled
|
if let mainView = mainView {
|
||||||
completion(PresentationCallVideoView(
|
let setOnFirstFrameReceived = mainView.setOnFirstFrameReceived
|
||||||
holder: view,
|
let setOnOrientationUpdated = mainView.setOnOrientationUpdated
|
||||||
view: view.view,
|
let setOnIsMirroredUpdated = mainView.setOnIsMirroredUpdated
|
||||||
|
let updateIsEnabled = mainView.updateIsEnabled
|
||||||
|
let mainVideoView = PresentationCallVideoView(
|
||||||
|
holder: mainView,
|
||||||
|
view: mainView.view,
|
||||||
setOnFirstFrameReceived: { f in
|
setOnFirstFrameReceived: { f in
|
||||||
setOnFirstFrameReceived(f)
|
setOnFirstFrameReceived(f)
|
||||||
},
|
},
|
||||||
getOrientation: { [weak view] in
|
getOrientation: { [weak mainView] in
|
||||||
if let view = view {
|
if let mainView = mainView {
|
||||||
let mappedValue: PresentationCallVideoView.Orientation
|
let mappedValue: PresentationCallVideoView.Orientation
|
||||||
switch view.getOrientation() {
|
switch mainView.getOrientation() {
|
||||||
case .rotation0:
|
case .rotation0:
|
||||||
mappedValue = .rotation0
|
mappedValue = .rotation0
|
||||||
case .rotation90:
|
case .rotation90:
|
||||||
@ -2465,9 +2429,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
return .rotation0
|
return .rotation0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
getAspect: { [weak view] in
|
getAspect: { [weak mainView] in
|
||||||
if let view = view {
|
if let mainView = mainView {
|
||||||
return view.getAspect()
|
return mainView.getAspect()
|
||||||
} else {
|
} else {
|
||||||
return 0.0
|
return 0.0
|
||||||
}
|
}
|
||||||
@ -2496,9 +2460,73 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
updateIsEnabled: { value in
|
updateIsEnabled: { value in
|
||||||
updateIsEnabled(value)
|
updateIsEnabled(value)
|
||||||
}
|
}
|
||||||
))
|
)
|
||||||
|
var cloneVideoView: PresentationCallVideoView?
|
||||||
|
if let cloneView = cloneView {
|
||||||
|
let setOnFirstFrameReceived = cloneView.setOnFirstFrameReceived
|
||||||
|
let setOnOrientationUpdated = cloneView.setOnOrientationUpdated
|
||||||
|
let setOnIsMirroredUpdated = cloneView.setOnIsMirroredUpdated
|
||||||
|
let updateIsEnabled = cloneView.updateIsEnabled
|
||||||
|
cloneVideoView = PresentationCallVideoView(
|
||||||
|
holder: cloneView,
|
||||||
|
view: cloneView.view,
|
||||||
|
setOnFirstFrameReceived: { f in
|
||||||
|
setOnFirstFrameReceived(f)
|
||||||
|
},
|
||||||
|
getOrientation: { [weak cloneView] in
|
||||||
|
if let cloneView = cloneView {
|
||||||
|
let mappedValue: PresentationCallVideoView.Orientation
|
||||||
|
switch cloneView.getOrientation() {
|
||||||
|
case .rotation0:
|
||||||
|
mappedValue = .rotation0
|
||||||
|
case .rotation90:
|
||||||
|
mappedValue = .rotation90
|
||||||
|
case .rotation180:
|
||||||
|
mappedValue = .rotation180
|
||||||
|
case .rotation270:
|
||||||
|
mappedValue = .rotation270
|
||||||
|
}
|
||||||
|
return mappedValue
|
||||||
} else {
|
} else {
|
||||||
completion(nil)
|
return .rotation0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
getAspect: { [weak cloneView] in
|
||||||
|
if let cloneView = cloneView {
|
||||||
|
return cloneView.getAspect()
|
||||||
|
} else {
|
||||||
|
return 0.0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
setOnOrientationUpdated: { f in
|
||||||
|
setOnOrientationUpdated { value, aspect in
|
||||||
|
let mappedValue: PresentationCallVideoView.Orientation
|
||||||
|
switch value {
|
||||||
|
case .rotation0:
|
||||||
|
mappedValue = .rotation0
|
||||||
|
case .rotation90:
|
||||||
|
mappedValue = .rotation90
|
||||||
|
case .rotation180:
|
||||||
|
mappedValue = .rotation180
|
||||||
|
case .rotation270:
|
||||||
|
mappedValue = .rotation270
|
||||||
|
}
|
||||||
|
f?(mappedValue, aspect)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
setOnIsMirroredUpdated: { f in
|
||||||
|
setOnIsMirroredUpdated { value in
|
||||||
|
f?(value)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
updateIsEnabled: { value in
|
||||||
|
updateIsEnabled(value)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
completion(mainVideoView, cloneVideoView)
|
||||||
|
} else {
|
||||||
|
completion(nil, nil)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -2511,8 +2539,18 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
self.hasVideo = true
|
self.hasVideo = true
|
||||||
if let videoCapturer = self.videoCapturer {
|
if let videoCapturer = self.videoCapturer {
|
||||||
self.genericCallContext?.requestVideo(videoCapturer)
|
self.genericCallContext?.requestVideo(videoCapturer)
|
||||||
|
self.isVideoMuted = false
|
||||||
|
self.isVideoMutedDisposable.set((videoCapturer.isActive
|
||||||
|
|> distinctUntilChanged
|
||||||
|
|> deliverOnMainQueue).start(next: { [weak self] value in
|
||||||
|
guard let strongSelf = self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
strongSelf.isVideoMuted = !value
|
||||||
|
strongSelf.updateLocalVideoState()
|
||||||
|
}))
|
||||||
|
|
||||||
self.participantsContext?.updateVideoState(peerId: self.joinAsPeerId, isVideoMuted: false)
|
self.updateLocalVideoState()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2520,12 +2558,18 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
self.hasVideo = false
|
self.hasVideo = false
|
||||||
if let _ = self.videoCapturer {
|
if let _ = self.videoCapturer {
|
||||||
self.videoCapturer = nil
|
self.videoCapturer = nil
|
||||||
|
self.isVideoMutedDisposable.set(nil)
|
||||||
self.genericCallContext?.disableVideo()
|
self.genericCallContext?.disableVideo()
|
||||||
|
self.isVideoMuted = true
|
||||||
|
|
||||||
self.participantsContext?.updateVideoState(peerId: self.joinAsPeerId, isVideoMuted: true)
|
self.updateLocalVideoState()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private func updateLocalVideoState() {
|
||||||
|
self.participantsContext?.updateVideoState(peerId: self.joinAsPeerId, isVideoMuted: self.videoCapturer == nil, isVideoPaused: self.isVideoMuted)
|
||||||
|
}
|
||||||
|
|
||||||
public func switchVideoCamera() {
|
public func switchVideoCamera() {
|
||||||
self.useFrontCamera = !self.useFrontCamera
|
self.useFrontCamera = !self.useFrontCamera
|
||||||
self.videoCapturer?.switchVideoInput(isFront: self.useFrontCamera)
|
self.videoCapturer?.switchVideoInput(isFront: self.useFrontCamera)
|
||||||
@ -2616,19 +2660,32 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
|
|
||||||
public func setRequestedVideoList(items: [PresentationGroupCallRequestedVideo]) {
|
public func setRequestedVideoList(items: [PresentationGroupCallRequestedVideo]) {
|
||||||
self.genericCallContext?.setRequestedVideoChannels(items.compactMap { item -> OngoingGroupCallContext.VideoChannel in
|
self.genericCallContext?.setRequestedVideoChannels(items.compactMap { item -> OngoingGroupCallContext.VideoChannel in
|
||||||
let mappedQuality: OngoingGroupCallContext.VideoChannel.Quality
|
let mappedMinQuality: OngoingGroupCallContext.VideoChannel.Quality
|
||||||
switch item.quality {
|
let mappedMaxQuality: OngoingGroupCallContext.VideoChannel.Quality
|
||||||
|
switch item.minQuality {
|
||||||
case .thumbnail:
|
case .thumbnail:
|
||||||
mappedQuality = .thumbnail
|
mappedMinQuality = .thumbnail
|
||||||
case .medium:
|
case .medium:
|
||||||
mappedQuality = .medium
|
mappedMinQuality = .medium
|
||||||
case .full:
|
case .full:
|
||||||
mappedQuality = .full
|
mappedMinQuality = .full
|
||||||
|
}
|
||||||
|
switch item.maxQuality {
|
||||||
|
case .thumbnail:
|
||||||
|
mappedMaxQuality = .thumbnail
|
||||||
|
case .medium:
|
||||||
|
mappedMaxQuality = .medium
|
||||||
|
case .full:
|
||||||
|
mappedMaxQuality = .full
|
||||||
}
|
}
|
||||||
return OngoingGroupCallContext.VideoChannel(
|
return OngoingGroupCallContext.VideoChannel(
|
||||||
audioSsrc: item.audioSsrc,
|
audioSsrc: item.audioSsrc,
|
||||||
videoDescription: item.videoInformation,
|
endpointId: item.endpointId,
|
||||||
quality: mappedQuality
|
ssrcGroups: item.ssrcGroups.map { group in
|
||||||
|
return OngoingGroupCallContext.VideoChannel.SsrcGroup(semantics: group.semantics, ssrcs: group.ssrcs)
|
||||||
|
},
|
||||||
|
minQuality: mappedMinQuality,
|
||||||
|
maxQuality: mappedMaxQuality
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -2931,6 +2988,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public func makeIncomingVideoView(endpointId: String, requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void) {
|
public func makeIncomingVideoView(endpointId: String, requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void) {
|
||||||
|
if endpointId == self.currentLocalEndpointId {
|
||||||
|
self.makeOutgoingVideoView(requestClone: requestClone, completion: completion)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
self.genericCallContext?.makeIncomingVideoView(endpointId: endpointId, requestClone: requestClone, completion: { mainView, cloneView in
|
self.genericCallContext?.makeIncomingVideoView(endpointId: endpointId, requestClone: requestClone, completion: { mainView, cloneView in
|
||||||
if let mainView = mainView {
|
if let mainView = mainView {
|
||||||
let setOnFirstFrameReceived = mainView.setOnFirstFrameReceived
|
let setOnFirstFrameReceived = mainView.setOnFirstFrameReceived
|
||||||
|
@ -1374,6 +1374,7 @@ final class BlobView: UIView {
|
|||||||
|
|
||||||
var level: CGFloat = 0 {
|
var level: CGFloat = 0 {
|
||||||
didSet {
|
didSet {
|
||||||
|
if abs(self.level - oldValue) > 0.01 {
|
||||||
CATransaction.begin()
|
CATransaction.begin()
|
||||||
CATransaction.setDisableActions(true)
|
CATransaction.setDisableActions(true)
|
||||||
let lv = self.minScale + (self.maxScale - self.minScale) * self.level
|
let lv = self.minScale + (self.maxScale - self.minScale) * self.level
|
||||||
@ -1382,6 +1383,7 @@ final class BlobView: UIView {
|
|||||||
CATransaction.commit()
|
CATransaction.commit()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private var speedLevel: CGFloat = 0
|
private var speedLevel: CGFloat = 0
|
||||||
private var lastSpeedLevel: CGFloat = 0
|
private var lastSpeedLevel: CGFloat = 0
|
||||||
|
@ -426,11 +426,17 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
|
|||||||
self.containerLayout = (layout, navigationBarHeight)
|
self.containerLayout = (layout, navigationBarHeight)
|
||||||
|
|
||||||
let isLandscape: Bool
|
let isLandscape: Bool
|
||||||
if layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass {
|
if layout.size.width > layout.size.height {
|
||||||
isLandscape = true
|
isLandscape = true
|
||||||
} else {
|
} else {
|
||||||
isLandscape = false
|
isLandscape = false
|
||||||
}
|
}
|
||||||
|
let isTablet: Bool
|
||||||
|
if case .regular = layout.metrics.widthClass {
|
||||||
|
isTablet = true
|
||||||
|
} else {
|
||||||
|
isTablet = false
|
||||||
|
}
|
||||||
|
|
||||||
var insets = layout.insets(options: [.statusBar, .input])
|
var insets = layout.insets(options: [.statusBar, .input])
|
||||||
let cleanInsets = layout.insets(options: [.statusBar])
|
let cleanInsets = layout.insets(options: [.statusBar])
|
||||||
@ -440,28 +446,44 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
|
|||||||
if let _ = self.broadcastPickerView {
|
if let _ = self.broadcastPickerView {
|
||||||
buttonOffset *= 2.0
|
buttonOffset *= 2.0
|
||||||
}
|
}
|
||||||
let bottomInset: CGFloat = 10.0 + cleanInsets.bottom
|
let bottomInset: CGFloat = isTablet ? 31.0 : 10.0 + cleanInsets.bottom
|
||||||
let titleHeight: CGFloat = 54.0
|
let titleHeight: CGFloat = 54.0
|
||||||
var contentHeight = titleHeight + bottomInset + 52.0 + 17.0
|
var contentHeight = titleHeight + bottomInset + 52.0 + 17.0
|
||||||
let innerContentHeight: CGFloat = layout.size.height - contentHeight - 160.0
|
let innerContentHeight: CGFloat = layout.size.height - contentHeight - 160.0
|
||||||
var width = horizontalContainerFillingSizeForLayout(layout: layout, sideInset: layout.safeInsets.left)
|
var width = horizontalContainerFillingSizeForLayout(layout: layout, sideInset: layout.safeInsets.left)
|
||||||
if isLandscape {
|
if isLandscape {
|
||||||
|
if isTablet {
|
||||||
|
width = 870.0
|
||||||
|
contentHeight = 690.0
|
||||||
|
} else {
|
||||||
contentHeight = layout.size.height
|
contentHeight = layout.size.height
|
||||||
width = layout.size.width
|
width = layout.size.width
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if isTablet {
|
||||||
|
width = 600.0
|
||||||
|
contentHeight = 960.0
|
||||||
} else {
|
} else {
|
||||||
contentHeight = titleHeight + bottomInset + 52.0 + 17.0 + innerContentHeight + buttonOffset
|
contentHeight = titleHeight + bottomInset + 52.0 + 17.0 + innerContentHeight + buttonOffset
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let previewInset: CGFloat = 16.0
|
let previewInset: CGFloat = 16.0
|
||||||
let sideInset = floor((layout.size.width - width) / 2.0)
|
let sideInset = floor((layout.size.width - width) / 2.0)
|
||||||
let contentContainerFrame = CGRect(origin: CGPoint(x: sideInset, y: layout.size.height - contentHeight), size: CGSize(width: width, height: contentHeight))
|
let contentFrame: CGRect
|
||||||
let contentFrame = contentContainerFrame
|
if isTablet {
|
||||||
|
contentFrame = CGRect(origin: CGPoint(x: sideInset, y: floor((layout.size.height - contentHeight) / 2.0)), size: CGSize(width: width, height: contentHeight))
|
||||||
var backgroundFrame = CGRect(origin: CGPoint(x: contentFrame.minX, y: contentFrame.minY), size: CGSize(width: contentFrame.width, height: contentFrame.height + 2000.0))
|
} else {
|
||||||
|
contentFrame = CGRect(origin: CGPoint(x: sideInset, y: layout.size.height - contentHeight), size: CGSize(width: width, height: contentHeight))
|
||||||
|
}
|
||||||
|
var backgroundFrame = CGRect(origin: CGPoint(x: contentFrame.minX, y: contentFrame.minY), size: CGSize(width: contentFrame.width, height: contentFrame.height))
|
||||||
|
if !isTablet {
|
||||||
|
backgroundFrame.size.height += 2000.0
|
||||||
|
}
|
||||||
if backgroundFrame.minY < contentFrame.minY {
|
if backgroundFrame.minY < contentFrame.minY {
|
||||||
backgroundFrame.origin.y = contentFrame.minY
|
backgroundFrame.origin.y = contentFrame.minY
|
||||||
}
|
}
|
||||||
transition.updateAlpha(node: self.titleNode, alpha: isLandscape ? 0.0 : 1.0)
|
transition.updateAlpha(node: self.titleNode, alpha: isLandscape && !isTablet ? 0.0 : 1.0)
|
||||||
transition.updateFrame(node: self.backgroundNode, frame: backgroundFrame)
|
transition.updateFrame(node: self.backgroundNode, frame: backgroundFrame)
|
||||||
transition.updateFrame(node: self.effectNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
|
transition.updateFrame(node: self.effectNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
|
||||||
transition.updateFrame(node: self.contentBackgroundNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
|
transition.updateFrame(node: self.contentBackgroundNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
|
||||||
@ -472,14 +494,24 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
|
|||||||
let titleFrame = CGRect(origin: CGPoint(x: floor((contentFrame.width - titleSize.width) / 2.0), y: 18.0), size: titleSize)
|
let titleFrame = CGRect(origin: CGPoint(x: floor((contentFrame.width - titleSize.width) / 2.0), y: 18.0), size: titleSize)
|
||||||
transition.updateFrame(node: self.titleNode, frame: titleFrame)
|
transition.updateFrame(node: self.titleNode, frame: titleFrame)
|
||||||
|
|
||||||
let previewSize: CGSize
|
var previewSize: CGSize
|
||||||
let previewFrame: CGRect
|
var previewFrame: CGRect
|
||||||
if isLandscape {
|
if isLandscape {
|
||||||
let previewHeight = contentHeight - layout.intrinsicInsets.bottom - 52.0 - 10.0
|
let previewHeight = contentHeight - 21.0 - 52.0 - 10.0
|
||||||
previewSize = CGSize(width: min(contentFrame.width - layout.safeInsets.left - layout.safeInsets.right, previewHeight * 1.7778), height: previewHeight)
|
previewSize = CGSize(width: min(contentFrame.width - layout.safeInsets.left - layout.safeInsets.right, previewHeight * 1.7778), height: previewHeight)
|
||||||
|
if isTablet {
|
||||||
|
previewSize.width -= previewInset * 2.0
|
||||||
|
previewSize.height -= 46.0
|
||||||
|
}
|
||||||
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((contentFrame.width - previewSize.width) / 2.0), y: 0.0), size: previewSize)
|
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((contentFrame.width - previewSize.width) / 2.0), y: 0.0), size: previewSize)
|
||||||
|
if isTablet {
|
||||||
|
previewFrame.origin.y += 56.0
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
previewSize = CGSize(width: contentFrame.width - previewInset * 2.0, height: contentHeight - 243.0 - bottomInset + (120.0 - buttonOffset))
|
previewSize = CGSize(width: contentFrame.width - previewInset * 2.0, height: contentHeight - 243.0 - bottomInset + (120.0 - buttonOffset))
|
||||||
|
if isTablet {
|
||||||
|
previewSize.height += 17.0
|
||||||
|
}
|
||||||
previewFrame = CGRect(origin: CGPoint(x: previewInset, y: 56.0), size: previewSize)
|
previewFrame = CGRect(origin: CGPoint(x: previewInset, y: 56.0), size: previewSize)
|
||||||
}
|
}
|
||||||
transition.updateFrame(node: self.previewContainerNode, frame: previewFrame)
|
transition.updateFrame(node: self.previewContainerNode, frame: previewFrame)
|
||||||
@ -508,40 +540,49 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
|
|||||||
} else {
|
} else {
|
||||||
self.screenButton.isHidden = true
|
self.screenButton.isHidden = true
|
||||||
}
|
}
|
||||||
let buttonInset: CGFloat = 6.0
|
|
||||||
|
|
||||||
let buttonWidth = floorToScreenPixels((contentFrame.width - layout.safeInsets.left - layout.safeInsets.right - CGFloat(buttonsCount + 1) * buttonInset) / CGFloat(buttonsCount))
|
let buttonInset: CGFloat = 6.0
|
||||||
|
var leftButtonInset = buttonInset
|
||||||
|
let availableWidth: CGFloat
|
||||||
|
if isTablet {
|
||||||
|
availableWidth = contentFrame.width - layout.safeInsets.left - layout.safeInsets.right - previewInset * 2.0
|
||||||
|
leftButtonInset += previewInset
|
||||||
|
} else {
|
||||||
|
availableWidth = contentFrame.width - layout.safeInsets.left - layout.safeInsets.right
|
||||||
|
}
|
||||||
|
let buttonWidth = floorToScreenPixels((availableWidth - CGFloat(buttonsCount + 1) * buttonInset) / CGFloat(buttonsCount))
|
||||||
|
|
||||||
let cameraButtonHeight = self.cameraButton.updateLayout(width: buttonWidth, transition: transition)
|
let cameraButtonHeight = self.cameraButton.updateLayout(width: buttonWidth, transition: transition)
|
||||||
let screenButtonHeight = self.screenButton.updateLayout(width: buttonWidth, transition: transition)
|
let screenButtonHeight = self.screenButton.updateLayout(width: buttonWidth, transition: transition)
|
||||||
let cancelButtonHeight = self.cancelButton.updateLayout(width: buttonWidth, transition: transition)
|
let cancelButtonHeight = self.cancelButton.updateLayout(width: buttonWidth, transition: transition)
|
||||||
|
|
||||||
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: layout.safeInsets.left + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cancelButtonHeight))
|
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: layout.safeInsets.left + leftButtonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cancelButtonHeight))
|
||||||
if let broadcastPickerView = self.broadcastPickerView {
|
if let broadcastPickerView = self.broadcastPickerView {
|
||||||
transition.updateFrame(node: self.screenButton, frame: CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: screenButtonHeight))
|
transition.updateFrame(node: self.screenButton, frame: CGRect(x: layout.safeInsets.left + leftButtonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: screenButtonHeight))
|
||||||
broadcastPickerView.frame = CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: screenButtonHeight)
|
broadcastPickerView.frame = CGRect(x: layout.safeInsets.left + leftButtonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: screenButtonHeight)
|
||||||
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cameraButtonHeight))
|
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: layout.safeInsets.left + leftButtonInset + buttonWidth + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cameraButtonHeight))
|
||||||
} else {
|
} else {
|
||||||
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cameraButtonHeight))
|
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: layout.safeInsets.left + leftButtonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cameraButtonHeight))
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
let bottomInset = isTablet ? 21.0 : insets.bottom + 16.0
|
||||||
let buttonInset: CGFloat = 16.0
|
let buttonInset: CGFloat = 16.0
|
||||||
let cameraButtonHeight = self.cameraButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
|
let cameraButtonHeight = self.cameraButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
|
||||||
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - insets.bottom - 16.0 - buttonOffset, width: contentFrame.width, height: cameraButtonHeight))
|
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - bottomInset - buttonOffset, width: contentFrame.width, height: cameraButtonHeight))
|
||||||
|
|
||||||
let screenButtonHeight = self.screenButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
|
let screenButtonHeight = self.screenButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
|
||||||
transition.updateFrame(node: self.screenButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - insets.bottom - 16.0, width: contentFrame.width, height: screenButtonHeight))
|
transition.updateFrame(node: self.screenButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - bottomInset, width: contentFrame.width, height: screenButtonHeight))
|
||||||
if let broadcastPickerView = self.broadcastPickerView {
|
if let broadcastPickerView = self.broadcastPickerView {
|
||||||
broadcastPickerView.frame = CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - insets.bottom - 16.0, width: contentFrame.width + 1000.0, height: screenButtonHeight)
|
broadcastPickerView.frame = CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - bottomInset, width: contentFrame.width + 1000.0, height: screenButtonHeight)
|
||||||
} else {
|
} else {
|
||||||
self.screenButton.isHidden = true
|
self.screenButton.isHidden = true
|
||||||
}
|
}
|
||||||
|
|
||||||
let cancelButtonHeight = self.cancelButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
|
let cancelButtonHeight = self.cancelButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
|
||||||
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: buttonInset, y: contentHeight - cancelButtonHeight - insets.bottom - 16.0, width: contentFrame.width, height: cancelButtonHeight))
|
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: buttonInset, y: contentHeight - cancelButtonHeight - bottomInset, width: contentFrame.width, height: cancelButtonHeight))
|
||||||
}
|
}
|
||||||
|
|
||||||
transition.updateFrame(node: self.contentContainerNode, frame: contentContainerFrame)
|
transition.updateFrame(node: self.contentContainerNode, frame: contentFrame)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -68,12 +68,14 @@ final class VoiceChatFullscreenParticipantItem: ListViewItem {
|
|||||||
let context: AccountContext
|
let context: AccountContext
|
||||||
let peer: Peer
|
let peer: Peer
|
||||||
let videoEndpointId: String?
|
let videoEndpointId: String?
|
||||||
|
let isPaused: Bool
|
||||||
let icon: Icon
|
let icon: Icon
|
||||||
let text: VoiceChatParticipantItem.ParticipantText
|
let text: VoiceChatParticipantItem.ParticipantText
|
||||||
let textColor: Color
|
let textColor: Color
|
||||||
let color: Color
|
let color: Color
|
||||||
let isLandscape: Bool
|
let isLandscape: Bool
|
||||||
let active: Bool
|
let active: Bool
|
||||||
|
let showVideoWhenActive: Bool
|
||||||
let getAudioLevel: (() -> Signal<Float, NoError>)?
|
let getAudioLevel: (() -> Signal<Float, NoError>)?
|
||||||
let getVideo: () -> GroupVideoNode?
|
let getVideo: () -> GroupVideoNode?
|
||||||
let action: ((ASDisplayNode?) -> Void)?
|
let action: ((ASDisplayNode?) -> Void)?
|
||||||
@ -82,18 +84,20 @@ final class VoiceChatFullscreenParticipantItem: ListViewItem {
|
|||||||
|
|
||||||
public let selectable: Bool = true
|
public let selectable: Bool = true
|
||||||
|
|
||||||
public init(presentationData: ItemListPresentationData, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, videoEndpointId: String?, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, textColor: Color, color: Color, isLandscape: Bool, active: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, getVideo: @escaping () -> GroupVideoNode?, action: ((ASDisplayNode?) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) {
|
public init(presentationData: ItemListPresentationData, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, videoEndpointId: String?, isPaused: Bool, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, textColor: Color, color: Color, isLandscape: Bool, active: Bool, showVideoWhenActive: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, getVideo: @escaping () -> GroupVideoNode?, action: ((ASDisplayNode?) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) {
|
||||||
self.presentationData = presentationData
|
self.presentationData = presentationData
|
||||||
self.nameDisplayOrder = nameDisplayOrder
|
self.nameDisplayOrder = nameDisplayOrder
|
||||||
self.context = context
|
self.context = context
|
||||||
self.peer = peer
|
self.peer = peer
|
||||||
self.videoEndpointId = videoEndpointId
|
self.videoEndpointId = videoEndpointId
|
||||||
|
self.isPaused = isPaused
|
||||||
self.icon = icon
|
self.icon = icon
|
||||||
self.text = text
|
self.text = text
|
||||||
self.textColor = textColor
|
self.textColor = textColor
|
||||||
self.color = color
|
self.color = color
|
||||||
self.isLandscape = isLandscape
|
self.isLandscape = isLandscape
|
||||||
self.active = active
|
self.active = active
|
||||||
|
self.showVideoWhenActive = showVideoWhenActive
|
||||||
self.getAudioLevel = getAudioLevel
|
self.getAudioLevel = getAudioLevel
|
||||||
self.getVideo = getVideo
|
self.getVideo = getVideo
|
||||||
self.action = action
|
self.action = action
|
||||||
@ -269,10 +273,8 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
gesture.cancel()
|
gesture.cancel()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if item.peer.smallProfileImage != nil {
|
|
||||||
contextAction(strongSelf.contextSourceNode, gesture)
|
contextAction(strongSelf.contextSourceNode, gesture)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
self.contextSourceNode.willUpdateIsExtractedToContextPreview = { [weak self] isExtracted, transition in
|
self.contextSourceNode.willUpdateIsExtractedToContextPreview = { [weak self] isExtracted, transition in
|
||||||
guard let strongSelf = self, let _ = strongSelf.item else {
|
guard let strongSelf = self, let _ = strongSelf.item else {
|
||||||
return
|
return
|
||||||
@ -289,138 +291,53 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
|
|
||||||
override func selected() {
|
override func selected() {
|
||||||
super.selected()
|
super.selected()
|
||||||
|
if self.animatingSelection {
|
||||||
|
return
|
||||||
|
}
|
||||||
self.layoutParams?.0.action?(self.contextSourceNode)
|
self.layoutParams?.0.action?(self.contextSourceNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
func animateTransitionIn(from sourceNode: ASDisplayNode?, containerNode: ASDisplayNode, transition: ContainedViewLayoutTransition, animate: Bool = true) {
|
func transitionIn(from sourceNode: ASDisplayNode?) {
|
||||||
guard let item = self.item else {
|
guard let item = self.item else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
var duration: Double = 0.2
|
let active = item.active && !item.showVideoWhenActive
|
||||||
var timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue
|
|
||||||
if case let .animated(transitionDuration, curve) = transition {
|
|
||||||
duration = transitionDuration
|
|
||||||
timingFunction = curve.timingFunction
|
|
||||||
}
|
|
||||||
|
|
||||||
let initialAnimate = animate
|
var videoNode: GroupVideoNode?
|
||||||
if let sourceNode = sourceNode as? VoiceChatTileItemNode {
|
if let sourceNode = sourceNode as? VoiceChatTileItemNode {
|
||||||
var startContainerPosition = sourceNode.view.convert(sourceNode.bounds, to: containerNode.view).center
|
if let sourceVideoNode = sourceNode.videoNode {
|
||||||
var animate = initialAnimate
|
sourceNode.videoNode = nil
|
||||||
if startContainerPosition.y < -tileHeight || startContainerPosition.y > containerNode.frame.height + tileHeight {
|
videoNode = sourceVideoNode
|
||||||
animate = false
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let videoNode = sourceNode.videoNode {
|
if videoNode == nil {
|
||||||
if item.active {
|
videoNode = item.getVideo()
|
||||||
|
}
|
||||||
|
|
||||||
|
if let videoNode = videoNode {
|
||||||
|
if active {
|
||||||
self.avatarNode.alpha = 1.0
|
self.avatarNode.alpha = 1.0
|
||||||
videoNode.alpha = 0.0
|
videoNode.alpha = 0.0
|
||||||
startContainerPosition = startContainerPosition.offsetBy(dx: 0.0, dy: 9.0)
|
|
||||||
} else {
|
} else {
|
||||||
self.avatarNode.alpha = 0.0
|
self.avatarNode.alpha = 0.0
|
||||||
|
videoNode.alpha = 1.0
|
||||||
}
|
}
|
||||||
|
|
||||||
sourceNode.videoNode = nil
|
|
||||||
self.videoNode = videoNode
|
self.videoNode = videoNode
|
||||||
self.videoContainerNode.insertSubnode(videoNode, at: 0)
|
self.videoContainerNode.insertSubnode(videoNode, at: 0)
|
||||||
|
|
||||||
if animate {
|
|
||||||
videoNode.updateLayout(size: videoSize, layoutMode: .fillOrFitToSquare, transition: transition)
|
|
||||||
|
|
||||||
let scale = sourceNode.bounds.width / videoSize.width
|
|
||||||
self.videoContainerNode.layer.animateScale(from: sourceNode.bounds.width / videoSize.width, to: tileSize.width / videoSize.width, duration: duration, timingFunction: timingFunction)
|
|
||||||
self.videoContainerNode.layer.animate(from: backgroundCornerRadius * (1.0 / scale) as NSNumber, to: videoCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: timingFunction, duration: duration, removeOnCompletion: false, completion: { _ in
|
|
||||||
})
|
|
||||||
|
|
||||||
self.videoFadeNode.alpha = 1.0
|
|
||||||
self.videoFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
|
|
||||||
} else {
|
|
||||||
videoNode.updateLayout(size: videoSize, layoutMode: .fillOrFitToSquare, transition: .immediate)
|
videoNode.updateLayout(size: videoSize, layoutMode: .fillOrFitToSquare, transition: .immediate)
|
||||||
self.videoFadeNode.alpha = 1.0
|
videoNode.frame = CGRect(origin: CGPoint(), size: videoSize)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if animate {
|
|
||||||
let initialPosition = self.contextSourceNode.position
|
|
||||||
let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center
|
|
||||||
|
|
||||||
self.contextSourceNode.position = targetContainerPosition
|
|
||||||
containerNode.addSubnode(self.contextSourceNode)
|
|
||||||
|
|
||||||
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self] _ in
|
|
||||||
if let strongSelf = self {
|
|
||||||
strongSelf.contextSourceNode.position = initialPosition
|
|
||||||
strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if item.active {
|
|
||||||
self.highlightNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
|
||||||
self.highlightNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction)
|
|
||||||
}
|
|
||||||
|
|
||||||
self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
} else if !initialAnimate {
|
|
||||||
if transition.isAnimated {
|
|
||||||
self.contextSourceNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
self.contextSourceNode.layer.animateScale(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if let sourceNode = sourceNode as? VoiceChatParticipantItemNode, let _ = sourceNode.item {
|
|
||||||
var startContainerPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center
|
|
||||||
var animate = true
|
|
||||||
if startContainerPosition.y < -tileHeight || startContainerPosition.y > containerNode.frame.height + tileHeight {
|
|
||||||
animate = false
|
|
||||||
}
|
|
||||||
startContainerPosition = startContainerPosition.offsetBy(dx: 0.0, dy: 9.0)
|
|
||||||
|
|
||||||
if animate {
|
|
||||||
sourceNode.avatarNode.alpha = 0.0
|
|
||||||
sourceNode.audioLevelView?.alpha = 0.0
|
|
||||||
|
|
||||||
let initialPosition = self.contextSourceNode.position
|
|
||||||
let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center
|
|
||||||
|
|
||||||
self.contextSourceNode.position = targetContainerPosition
|
|
||||||
containerNode.addSubnode(self.contextSourceNode)
|
|
||||||
|
|
||||||
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self, weak sourceNode] _ in
|
|
||||||
if let strongSelf = self, let sourceNode = sourceNode {
|
|
||||||
sourceNode.avatarNode.alpha = 1.0
|
|
||||||
sourceNode.audioLevelView?.alpha = 1.0
|
|
||||||
strongSelf.contextSourceNode.position = initialPosition
|
|
||||||
strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if item.active {
|
|
||||||
self.highlightNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
|
||||||
self.highlightNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction)
|
|
||||||
}
|
|
||||||
|
|
||||||
self.avatarNode.layer.animateScale(from: 0.8, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
|
|
||||||
self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if transition.isAnimated {
|
|
||||||
self.contextSourceNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
self.contextSourceNode.layer.animateScale(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
private func updateIsExtracted(_ isExtracted: Bool, transition: ContainedViewLayoutTransition) {
|
private func updateIsExtracted(_ isExtracted: Bool, transition: ContainedViewLayoutTransition) {
|
||||||
guard self.isExtracted != isExtracted, let extractedRect = self.extractedRect, let nonExtractedRect = self.nonExtractedRect, let item = self.item else {
|
guard self.isExtracted != isExtracted, let extractedRect = self.extractedRect, let nonExtractedRect = self.nonExtractedRect, let item = self.item else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
self.isExtracted = isExtracted
|
self.isExtracted = isExtracted
|
||||||
|
|
||||||
|
if item.peer.smallProfileImage != nil {
|
||||||
if isExtracted {
|
if isExtracted {
|
||||||
let profileNode = VoiceChatPeerProfileNode(context: item.context, size: extractedRect.size, peer: item.peer, text: item.text, customNode: self.videoContainerNode, additionalEntry: .single(nil), requestDismiss: { [weak self] in
|
let profileNode = VoiceChatPeerProfileNode(context: item.context, size: extractedRect.size, peer: item.peer, text: item.text, customNode: self.videoContainerNode, additionalEntry: .single(nil), requestDismiss: { [weak self] in
|
||||||
self?.contextSourceNode.requestDismiss?()
|
self?.contextSourceNode.requestDismiss?()
|
||||||
@ -439,11 +356,14 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
self.highlightNode.isHidden = true
|
||||||
} else if let profileNode = self.profileNode {
|
} else if let profileNode = self.profileNode {
|
||||||
self.profileNode = nil
|
self.profileNode = nil
|
||||||
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition)
|
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition)
|
||||||
|
|
||||||
self.contextSourceNode.contentNode.customHitTest = nil
|
self.contextSourceNode.contentNode.customHitTest = nil
|
||||||
|
self.highlightNode.isHidden = !item.active
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -452,14 +372,19 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
let makeStatusLayout = self.statusNode.asyncLayout()
|
let makeStatusLayout = self.statusNode.asyncLayout()
|
||||||
|
|
||||||
let currentItem = self.layoutParams?.0
|
let currentItem = self.layoutParams?.0
|
||||||
let hasVideo = self.videoNode != nil
|
var hasVideo = self.videoNode != nil
|
||||||
|
|
||||||
return { item, params, first, last in
|
return { item, params, first, last in
|
||||||
let titleFont = Font.semibold(13.0)
|
let titleFont = Font.semibold(13.0)
|
||||||
var titleAttributedString: NSAttributedString?
|
var titleAttributedString: NSAttributedString?
|
||||||
|
|
||||||
|
if !hasVideo && item.videoEndpointId != nil {
|
||||||
|
hasVideo = true
|
||||||
|
}
|
||||||
|
let active = item.active && !item.showVideoWhenActive
|
||||||
|
|
||||||
var titleColor = item.presentationData.theme.list.itemPrimaryTextColor
|
var titleColor = item.presentationData.theme.list.itemPrimaryTextColor
|
||||||
if !hasVideo || item.active {
|
if !hasVideo || active {
|
||||||
switch item.textColor {
|
switch item.textColor {
|
||||||
case .generic:
|
case .generic:
|
||||||
titleColor = item.presentationData.theme.list.itemPrimaryTextColor
|
titleColor = item.presentationData.theme.list.itemPrimaryTextColor
|
||||||
@ -569,14 +494,24 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
if let audioLevelView = strongSelf.audioLevelView {
|
if let audioLevelView = strongSelf.audioLevelView {
|
||||||
apperanceTransition.updateAlpha(layer: audioLevelView.layer, alpha: 1.0)
|
apperanceTransition.updateAlpha(layer: audioLevelView.layer, alpha: 1.0)
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
if currentItem?.peer.id == item.peer.id {
|
||||||
|
currentVideoNode.layer.animateScale(from: 1.0, to: 0.0, duration: appearanceDuration, completion: { [weak self, weak currentVideoNode] _ in
|
||||||
|
if currentVideoNode !== self?.videoNode {
|
||||||
|
currentVideoNode?.removeFromSupernode()
|
||||||
|
}
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
currentVideoNode.removeFromSupernode()
|
currentVideoNode.removeFromSupernode()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let videoNodeUpdated = strongSelf.videoNode !== videoNode
|
let videoNodeUpdated = strongSelf.videoNode !== videoNode
|
||||||
strongSelf.videoNode = videoNode
|
strongSelf.videoNode = videoNode
|
||||||
|
|
||||||
|
videoNode?.updateIsBlurred(isBlurred: item.isPaused, light: true)
|
||||||
|
|
||||||
let nonExtractedRect: CGRect
|
let nonExtractedRect: CGRect
|
||||||
let avatarFrame: CGRect
|
let avatarFrame: CGRect
|
||||||
let titleFrame: CGRect
|
let titleFrame: CGRect
|
||||||
@ -594,7 +529,10 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
titleFrame = CGRect(origin: CGPoint(x: 8.0, y: 63.0), size: titleLayout.size)
|
titleFrame = CGRect(origin: CGPoint(x: 8.0, y: 63.0), size: titleLayout.size)
|
||||||
|
|
||||||
let extractedWidth = availableWidth
|
let extractedWidth = availableWidth
|
||||||
let extractedRect = CGRect(x: 0.0, y: 0.0, width: extractedWidth, height: extractedWidth + statusLayout.height + 39.0)
|
var extractedRect = CGRect(x: 0.0, y: 0.0, width: extractedWidth, height: extractedWidth + statusLayout.height + 39.0)
|
||||||
|
if item.peer.smallProfileImage == nil {
|
||||||
|
extractedRect = nonExtractedRect
|
||||||
|
}
|
||||||
strongSelf.extractedRect = extractedRect
|
strongSelf.extractedRect = extractedRect
|
||||||
strongSelf.nonExtractedRect = nonExtractedRect
|
strongSelf.nonExtractedRect = nonExtractedRect
|
||||||
|
|
||||||
@ -706,7 +644,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
strongSelf.audioLevelView = audioLevelView
|
strongSelf.audioLevelView = audioLevelView
|
||||||
strongSelf.offsetContainerNode.view.insertSubview(audioLevelView, at: 0)
|
strongSelf.offsetContainerNode.view.insertSubview(audioLevelView, at: 0)
|
||||||
|
|
||||||
if let item = strongSelf.item, strongSelf.videoNode != nil && !item.active {
|
if let item = strongSelf.item, strongSelf.videoNode != nil && !active {
|
||||||
audioLevelView.alpha = 0.0
|
audioLevelView.alpha = 0.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -777,7 +715,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
nodeToAnimateIn = animationNode
|
nodeToAnimateIn = animationNode
|
||||||
}
|
}
|
||||||
var color = color
|
var color = color
|
||||||
if (hasVideo && !item.active) || color.rgb == 0x979797 {
|
if (hasVideo && !active) || color.rgb == 0x979797 {
|
||||||
color = UIColor(rgb: 0xffffff)
|
color = UIColor(rgb: 0xffffff)
|
||||||
}
|
}
|
||||||
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: color), animated: true)
|
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: color), animated: true)
|
||||||
@ -868,16 +806,16 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
strongSelf.videoContainerNode.position = CGPoint(x: tileSize.width / 2.0, y: tileSize.height / 2.0)
|
strongSelf.videoContainerNode.position = CGPoint(x: tileSize.width / 2.0, y: tileSize.height / 2.0)
|
||||||
strongSelf.videoContainerNode.cornerRadius = videoCornerRadius
|
strongSelf.videoContainerNode.cornerRadius = videoCornerRadius
|
||||||
strongSelf.videoContainerNode.transform = CATransform3DMakeScale(videoContainerScale, videoContainerScale, 1.0)
|
strongSelf.videoContainerNode.transform = CATransform3DMakeScale(videoContainerScale, videoContainerScale, 1.0)
|
||||||
}
|
|
||||||
|
|
||||||
strongSelf.highlightNode.isHidden = !item.active
|
strongSelf.highlightNode.isHidden = !item.active
|
||||||
|
}
|
||||||
|
|
||||||
let canUpdateAvatarVisibility = !strongSelf.isExtracted && !strongSelf.animatingExtraction
|
let canUpdateAvatarVisibility = !strongSelf.isExtracted && !strongSelf.animatingExtraction
|
||||||
|
|
||||||
if let videoNode = videoNode {
|
if let videoNode = videoNode {
|
||||||
if !strongSelf.isExtracted && !strongSelf.animatingExtraction {
|
if !strongSelf.isExtracted && !strongSelf.animatingExtraction {
|
||||||
if currentItem != nil {
|
if currentItem != nil {
|
||||||
if item.active {
|
if active {
|
||||||
if strongSelf.avatarNode.alpha.isZero {
|
if strongSelf.avatarNode.alpha.isZero {
|
||||||
strongSelf.animatingSelection = true
|
strongSelf.animatingSelection = true
|
||||||
strongSelf.videoContainerNode.layer.animateScale(from: videoContainerScale, to: 0.001, duration: appearanceDuration)
|
strongSelf.videoContainerNode.layer.animateScale(from: videoContainerScale, to: 0.001, duration: appearanceDuration)
|
||||||
@ -913,7 +851,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if item.active {
|
if active {
|
||||||
videoNode.alpha = 0.0
|
videoNode.alpha = 0.0
|
||||||
if canUpdateAvatarVisibility {
|
if canUpdateAvatarVisibility {
|
||||||
strongSelf.avatarNode.alpha = 1.0
|
strongSelf.avatarNode.alpha = 1.0
|
||||||
@ -937,19 +875,23 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let _ = currentItem, videoNodeUpdated {
|
if let _ = currentItem, videoNodeUpdated {
|
||||||
if item.active {
|
if active {
|
||||||
if canUpdateAvatarVisibility {
|
if canUpdateAvatarVisibility {
|
||||||
strongSelf.avatarNode.alpha = 1.0
|
strongSelf.avatarNode.alpha = 1.0
|
||||||
}
|
}
|
||||||
videoNode.alpha = 0.0
|
videoNode.alpha = 0.0
|
||||||
} else {
|
} else {
|
||||||
|
strongSelf.animatingSelection = true
|
||||||
|
let previousAvatarNodeAlpha = strongSelf.avatarNode.alpha
|
||||||
strongSelf.avatarNode.alpha = 0.0
|
strongSelf.avatarNode.alpha = 0.0
|
||||||
strongSelf.avatarNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: appearanceDuration)
|
strongSelf.avatarNode.layer.animateAlpha(from: previousAvatarNodeAlpha, to: 0.0, duration: appearanceDuration)
|
||||||
videoNode.layer.animateScale(from: 0.01, to: 1.0, duration: appearanceDuration)
|
videoNode.layer.animateScale(from: 0.01, to: 1.0, duration: appearanceDuration, completion: { [weak self] _ in
|
||||||
|
self?.animatingSelection = false
|
||||||
|
})
|
||||||
videoNode.alpha = 1.0
|
videoNode.alpha = 1.0
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if item.active {
|
if active {
|
||||||
if canUpdateAvatarVisibility {
|
if canUpdateAvatarVisibility {
|
||||||
strongSelf.avatarNode.alpha = 1.0
|
strongSelf.avatarNode.alpha = 1.0
|
||||||
}
|
}
|
||||||
@ -968,7 +910,6 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
|||||||
strongSelf.raiseHandNode?.frame = CGRect(origin: CGPoint(), size: animationSize).insetBy(dx: -6.0, dy: -6.0).offsetBy(dx: -2.0, dy: 0.0)
|
strongSelf.raiseHandNode?.frame = CGRect(origin: CGPoint(), size: animationSize).insetBy(dx: -6.0, dy: -6.0).offsetBy(dx: -2.0, dy: 0.0)
|
||||||
|
|
||||||
strongSelf.actionButtonNode.transform = CATransform3DMakeScale(animationScale, animationScale, 1.0)
|
strongSelf.actionButtonNode.transform = CATransform3DMakeScale(animationScale, animationScale, 1.0)
|
||||||
// strongSelf.actionButtonNode.frame = animationFrame
|
|
||||||
transition.updateFrame(node: strongSelf.actionButtonNode, frame: animationFrame)
|
transition.updateFrame(node: strongSelf.actionButtonNode, frame: animationFrame)
|
||||||
|
|
||||||
strongSelf.updateIsHighlighted(transition: transition)
|
strongSelf.updateIsHighlighted(transition: transition)
|
||||||
|
@ -17,20 +17,6 @@ func optionsBackgroundImage(dark: Bool) -> UIImage? {
|
|||||||
})?.stretchableImage(withLeftCapWidth: 14, topCapHeight: 14)
|
})?.stretchableImage(withLeftCapWidth: 14, topCapHeight: 14)
|
||||||
}
|
}
|
||||||
|
|
||||||
func optionsButtonImage(dark: Bool) -> UIImage? {
|
|
||||||
return generateImage(CGSize(width: 28.0, height: 28.0), contextGenerator: { size, context in
|
|
||||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
|
||||||
|
|
||||||
context.setFillColor(UIColor(rgb: dark ? 0x1c1c1e : 0x2c2c2e).cgColor)
|
|
||||||
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
|
|
||||||
|
|
||||||
context.setFillColor(UIColor.white.cgColor)
|
|
||||||
context.fillEllipse(in: CGRect(x: 6.0, y: 12.0, width: 4.0, height: 4.0))
|
|
||||||
context.fillEllipse(in: CGRect(x: 12.0, y: 12.0, width: 4.0, height: 4.0))
|
|
||||||
context.fillEllipse(in: CGRect(x: 18.0, y: 12.0, width: 4.0, height: 4.0))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func optionsCircleImage(dark: Bool) -> UIImage? {
|
func optionsCircleImage(dark: Bool) -> UIImage? {
|
||||||
return generateImage(CGSize(width: 28.0, height: 28.0), contextGenerator: { size, context in
|
return generateImage(CGSize(width: 28.0, height: 28.0), contextGenerator: { size, context in
|
||||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||||
@ -40,6 +26,28 @@ func optionsCircleImage(dark: Bool) -> UIImage? {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func panelButtonImage(dark: Bool) -> UIImage? {
|
||||||
|
return generateImage(CGSize(width: 38.0, height: 28.0), contextGenerator: { size, context in
|
||||||
|
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||||
|
|
||||||
|
context.addPath(UIBezierPath(roundedRect: CGRect(origin: CGPoint(), size: size), cornerRadius: 14.0).cgPath)
|
||||||
|
context.setFillColor(UIColor(rgb: dark ? 0x1c1c1e : 0x2c2c2e).cgColor)
|
||||||
|
context.fillPath()
|
||||||
|
|
||||||
|
context.setFillColor(UIColor.white.cgColor)
|
||||||
|
|
||||||
|
if let image = UIImage(bundleImageName: "Call/PanelIcon") {
|
||||||
|
let imageSize = image.size
|
||||||
|
let imageRect = CGRect(origin: CGPoint(), size: imageSize)
|
||||||
|
context.saveGState()
|
||||||
|
context.translateBy(x: 7.0, y: 2.0)
|
||||||
|
context.clip(to: imageRect, mask: image.cgImage!)
|
||||||
|
context.fill(imageRect)
|
||||||
|
context.restoreGState()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func closeButtonImage(dark: Bool) -> UIImage? {
|
func closeButtonImage(dark: Bool) -> UIImage? {
|
||||||
return generateImage(CGSize(width: 28.0, height: 28.0), contextGenerator: { size, context in
|
return generateImage(CGSize(width: 28.0, height: 28.0), contextGenerator: { size, context in
|
||||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||||
@ -76,9 +84,12 @@ final class VoiceChatHeaderButton: HighlightableButtonNode {
|
|||||||
|
|
||||||
var contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
|
var contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
|
||||||
|
|
||||||
init(context: AccountContext) {
|
private let wide: Bool
|
||||||
|
|
||||||
|
init(context: AccountContext, wide: Bool = false) {
|
||||||
self.context = context
|
self.context = context
|
||||||
self.theme = context.sharedContext.currentPresentationData.with { $0 }.theme
|
self.theme = context.sharedContext.currentPresentationData.with { $0 }.theme
|
||||||
|
self.wide = wide
|
||||||
|
|
||||||
self.referenceNode = ContextReferenceContentNode()
|
self.referenceNode = ContextReferenceContentNode()
|
||||||
self.containerNode = ContextControllerSourceNode()
|
self.containerNode = ContextControllerSourceNode()
|
||||||
@ -111,9 +122,9 @@ final class VoiceChatHeaderButton: HighlightableButtonNode {
|
|||||||
strongSelf.contextAction?(strongSelf.containerNode, gesture)
|
strongSelf.contextAction?(strongSelf.containerNode, gesture)
|
||||||
}
|
}
|
||||||
|
|
||||||
self.iconNode.image = optionsButtonImage(dark: false)
|
self.iconNode.image = optionsCircleImage(dark: false)
|
||||||
|
|
||||||
self.containerNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 28.0, height: 28.0))
|
self.containerNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: wide ? 38.0 : 28.0, height: 28.0))
|
||||||
self.referenceNode.frame = self.containerNode.bounds
|
self.referenceNode.frame = self.containerNode.bounds
|
||||||
self.iconNode.frame = self.containerNode.bounds
|
self.iconNode.frame = self.containerNode.bounds
|
||||||
self.avatarNode.frame = self.containerNode.bounds
|
self.avatarNode.frame = self.containerNode.bounds
|
||||||
@ -182,7 +193,7 @@ final class VoiceChatHeaderButton: HighlightableButtonNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
override func calculateSizeThatFits(_ constrainedSize: CGSize) -> CGSize {
|
override func calculateSizeThatFits(_ constrainedSize: CGSize) -> CGSize {
|
||||||
return CGSize(width: 28.0, height: 28.0)
|
return CGSize(width: wide ? 38.0 : 28.0, height: 28.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func onLayout() {
|
func onLayout() {
|
@ -29,7 +29,7 @@ private let destructiveColor: UIColor = UIColor(rgb: 0xff3b30)
|
|||||||
final class VoiceChatMainStageNode: ASDisplayNode {
|
final class VoiceChatMainStageNode: ASDisplayNode {
|
||||||
private let context: AccountContext
|
private let context: AccountContext
|
||||||
private let call: PresentationGroupCall
|
private let call: PresentationGroupCall
|
||||||
private var currentPeer: (PeerId, String?)?
|
private var currentPeer: (PeerId, String?, Bool, Bool, Bool)?
|
||||||
private var currentPeerEntry: VoiceChatPeerEntry?
|
private var currentPeerEntry: VoiceChatPeerEntry?
|
||||||
|
|
||||||
var callState: PresentationGroupCallState?
|
var callState: PresentationGroupCallState?
|
||||||
@ -51,10 +51,15 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
private let speakingPeerDisposable = MetaDisposable()
|
private let speakingPeerDisposable = MetaDisposable()
|
||||||
private let speakingAudioLevelDisposable = MetaDisposable()
|
private let speakingAudioLevelDisposable = MetaDisposable()
|
||||||
private var backdropAvatarNode: ImageNode
|
private var backdropAvatarNode: ImageNode
|
||||||
private var backdropEffectView: UIVisualEffectView?
|
|
||||||
private var avatarNode: ImageNode
|
private var avatarNode: ImageNode
|
||||||
private let titleNode: ImmediateTextNode
|
private let titleNode: ImmediateTextNode
|
||||||
private let microphoneNode: VoiceChatMicrophoneNode
|
private let microphoneNode: VoiceChatMicrophoneNode
|
||||||
|
private let placeholderTextNode: ImmediateTextNode
|
||||||
|
private let placeholderIconNode: ASImageNode
|
||||||
|
private let placeholderButton: HighlightTrackingButtonNode
|
||||||
|
private var placeholderButtonEffectView: UIVisualEffectView?
|
||||||
|
private let placeholderButtonHighlightNode: ASDisplayNode
|
||||||
|
private let placeholderButtonTextNode: ImmediateTextNode
|
||||||
|
|
||||||
private let speakingContainerNode: ASDisplayNode
|
private let speakingContainerNode: ASDisplayNode
|
||||||
private var speakingEffectView: UIVisualEffectView?
|
private var speakingEffectView: UIVisualEffectView?
|
||||||
@ -62,12 +67,13 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
private let speakingTitleNode: ImmediateTextNode
|
private let speakingTitleNode: ImmediateTextNode
|
||||||
private var speakingAudioLevelView: VoiceBlobView?
|
private var speakingAudioLevelView: VoiceBlobView?
|
||||||
|
|
||||||
private var validLayout: (CGSize, CGFloat, CGFloat, Bool)?
|
private var validLayout: (CGSize, CGFloat, CGFloat, Bool, Bool)?
|
||||||
|
|
||||||
var tapped: (() -> Void)?
|
var tapped: (() -> Void)?
|
||||||
var back: (() -> Void)?
|
var back: (() -> Void)?
|
||||||
var togglePin: (() -> Void)?
|
var togglePin: (() -> Void)?
|
||||||
var switchTo: ((PeerId) -> Void)?
|
var switchTo: ((PeerId) -> Void)?
|
||||||
|
var stopScreencast: (() -> Void)?
|
||||||
|
|
||||||
var controlsHidden: ((Bool) -> Void)?
|
var controlsHidden: ((Bool) -> Void)?
|
||||||
|
|
||||||
@ -110,6 +116,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
|
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
|
||||||
}) {
|
}) {
|
||||||
self.bottomFadeNode.backgroundColor = UIColor(patternImage: image)
|
self.bottomFadeNode.backgroundColor = UIColor(patternImage: image)
|
||||||
|
self.bottomFadeNode.view.layer.rasterizationScale = UIScreen.main.scale
|
||||||
|
self.bottomFadeNode.view.layer.shouldRasterize = true
|
||||||
}
|
}
|
||||||
|
|
||||||
self.bottomFillNode = ASDisplayNode()
|
self.bottomFillNode = ASDisplayNode()
|
||||||
@ -124,19 +132,20 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
self.backButtonArrowNode.image = NavigationBarTheme.generateBackArrowImage(color: .white)
|
self.backButtonArrowNode.image = NavigationBarTheme.generateBackArrowImage(color: .white)
|
||||||
self.backButtonNode = HighlightableButtonNode()
|
self.backButtonNode = HighlightableButtonNode()
|
||||||
|
|
||||||
|
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
||||||
|
|
||||||
self.pinButtonIconNode = ASImageNode()
|
self.pinButtonIconNode = ASImageNode()
|
||||||
self.pinButtonIconNode.displayWithoutProcessing = true
|
self.pinButtonIconNode.displayWithoutProcessing = true
|
||||||
self.pinButtonIconNode.displaysAsynchronously = false
|
self.pinButtonIconNode.displaysAsynchronously = false
|
||||||
self.pinButtonIconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/Pin"), color: .white)
|
self.pinButtonIconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/Pin"), color: .white)
|
||||||
self.pinButtonTitleNode = ImmediateTextNode()
|
self.pinButtonTitleNode = ImmediateTextNode()
|
||||||
self.pinButtonTitleNode.isHidden = true
|
self.pinButtonTitleNode.isHidden = true
|
||||||
self.pinButtonTitleNode.attributedText = NSAttributedString(string: "Unpin", font: Font.regular(17.0), textColor: .white)
|
self.pinButtonTitleNode.attributedText = NSAttributedString(string: presentationData.strings.VoiceChat_Unpin, font: Font.regular(17.0), textColor: .white)
|
||||||
self.pinButtonNode = HighlightableButtonNode()
|
self.pinButtonNode = HighlightableButtonNode()
|
||||||
|
|
||||||
self.backdropAvatarNode = ImageNode()
|
self.backdropAvatarNode = ImageNode()
|
||||||
self.backdropAvatarNode.contentMode = .scaleAspectFill
|
self.backdropAvatarNode.contentMode = .scaleAspectFill
|
||||||
self.backdropAvatarNode.displaysAsynchronously = false
|
self.backdropAvatarNode.displaysAsynchronously = false
|
||||||
self.backdropAvatarNode.isHidden = true
|
|
||||||
|
|
||||||
self.audioLevelNode = VoiceChatBlobNode(size: CGSize(width: 300.0, height: 300.0))
|
self.audioLevelNode = VoiceChatBlobNode(size: CGSize(width: 300.0, height: 300.0))
|
||||||
|
|
||||||
@ -159,34 +168,74 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
self.speakingTitleNode = ImmediateTextNode()
|
self.speakingTitleNode = ImmediateTextNode()
|
||||||
self.speakingTitleNode.displaysAsynchronously = false
|
self.speakingTitleNode.displaysAsynchronously = false
|
||||||
|
|
||||||
|
self.placeholderTextNode = ImmediateTextNode()
|
||||||
|
self.placeholderTextNode.alpha = 0.0
|
||||||
|
self.placeholderTextNode.maximumNumberOfLines = 2
|
||||||
|
self.placeholderTextNode.textAlignment = .center
|
||||||
|
|
||||||
|
self.placeholderIconNode = ASImageNode()
|
||||||
|
self.placeholderIconNode.alpha = 0.0
|
||||||
|
self.placeholderIconNode.contentMode = .scaleAspectFit
|
||||||
|
self.placeholderIconNode.displaysAsynchronously = false
|
||||||
|
|
||||||
|
self.placeholderButton = HighlightTrackingButtonNode()
|
||||||
|
self.placeholderButton.alpha = 0.0
|
||||||
|
self.placeholderButton.clipsToBounds = true
|
||||||
|
self.placeholderButton.cornerRadius = backgroundCornerRadius
|
||||||
|
|
||||||
|
self.placeholderButtonHighlightNode = ASDisplayNode()
|
||||||
|
self.placeholderButtonHighlightNode.alpha = 0.0
|
||||||
|
self.placeholderButtonHighlightNode.backgroundColor = UIColor(white: 1.0, alpha: 0.4)
|
||||||
|
self.placeholderButtonHighlightNode.isUserInteractionEnabled = false
|
||||||
|
|
||||||
|
self.placeholderButtonTextNode = ImmediateTextNode()
|
||||||
|
self.placeholderButtonTextNode.attributedText = NSAttributedString(string: presentationData.strings.VoiceChat_StopScreenSharingShort, font: Font.semibold(17.0), textColor: .white)
|
||||||
|
self.placeholderButtonTextNode.isUserInteractionEnabled = false
|
||||||
|
|
||||||
super.init()
|
super.init()
|
||||||
|
|
||||||
self.clipsToBounds = true
|
self.clipsToBounds = true
|
||||||
self.cornerRadius = backgroundCornerRadius
|
self.cornerRadius = backgroundCornerRadius
|
||||||
|
|
||||||
self.addSubnode(self.backgroundNode)
|
self.addSubnode(self.backgroundNode)
|
||||||
|
self.addSubnode(self.backdropAvatarNode)
|
||||||
self.addSubnode(self.topFadeNode)
|
self.addSubnode(self.topFadeNode)
|
||||||
self.addSubnode(self.bottomFadeNode)
|
self.addSubnode(self.bottomFadeNode)
|
||||||
self.addSubnode(self.bottomFillNode)
|
self.addSubnode(self.bottomFillNode)
|
||||||
self.addSubnode(self.backdropAvatarNode)
|
|
||||||
self.addSubnode(self.audioLevelNode)
|
self.addSubnode(self.audioLevelNode)
|
||||||
self.addSubnode(self.avatarNode)
|
self.addSubnode(self.avatarNode)
|
||||||
self.addSubnode(self.titleNode)
|
self.addSubnode(self.titleNode)
|
||||||
self.addSubnode(self.microphoneNode)
|
self.addSubnode(self.microphoneNode)
|
||||||
self.addSubnode(self.headerNode)
|
self.addSubnode(self.headerNode)
|
||||||
|
|
||||||
self.headerNode.addSubnode(self.backButtonNode)
|
self.headerNode.addSubnode(self.backButtonNode)
|
||||||
self.headerNode.addSubnode(self.backButtonArrowNode)
|
self.headerNode.addSubnode(self.backButtonArrowNode)
|
||||||
self.headerNode.addSubnode(self.pinButtonIconNode)
|
self.headerNode.addSubnode(self.pinButtonIconNode)
|
||||||
self.headerNode.addSubnode(self.pinButtonTitleNode)
|
self.headerNode.addSubnode(self.pinButtonTitleNode)
|
||||||
self.headerNode.addSubnode(self.pinButtonNode)
|
self.headerNode.addSubnode(self.pinButtonNode)
|
||||||
|
|
||||||
self.addSubnode(self.speakingContainerNode)
|
self.addSubnode(self.placeholderIconNode)
|
||||||
|
self.addSubnode(self.placeholderTextNode)
|
||||||
|
|
||||||
|
self.addSubnode(self.placeholderButton)
|
||||||
|
self.placeholderButton.addSubnode(self.placeholderButtonHighlightNode)
|
||||||
|
self.placeholderButton.addSubnode(self.placeholderButtonTextNode)
|
||||||
|
self.placeholderButton.highligthedChanged = { [weak self] highlighted in
|
||||||
|
if let strongSelf = self {
|
||||||
|
if highlighted {
|
||||||
|
strongSelf.placeholderButtonHighlightNode.layer.removeAnimation(forKey: "opacity")
|
||||||
|
strongSelf.placeholderButtonHighlightNode.alpha = 1.0
|
||||||
|
} else {
|
||||||
|
strongSelf.placeholderButtonHighlightNode.alpha = 0.0
|
||||||
|
strongSelf.placeholderButtonHighlightNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.placeholderButton.addTarget(self, action: #selector(self.stopSharingPressed), forControlEvents: .touchUpInside)
|
||||||
|
|
||||||
|
self.addSubnode(self.speakingContainerNode)
|
||||||
self.speakingContainerNode.addSubnode(self.speakingAvatarNode)
|
self.speakingContainerNode.addSubnode(self.speakingAvatarNode)
|
||||||
self.speakingContainerNode.addSubnode(self.speakingTitleNode)
|
self.speakingContainerNode.addSubnode(self.speakingTitleNode)
|
||||||
|
|
||||||
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
|
||||||
self.backButtonNode.setTitle(presentationData.strings.Common_Back, with: Font.regular(17.0), with: .white, for: [])
|
self.backButtonNode.setTitle(presentationData.strings.Common_Back, with: Font.regular(17.0), with: .white, for: [])
|
||||||
self.backButtonNode.hitTestSlop = UIEdgeInsets(top: -8.0, left: -20.0, bottom: -8.0, right: -8.0)
|
self.backButtonNode.hitTestSlop = UIEdgeInsets(top: -8.0, left: -20.0, bottom: -8.0, right: -8.0)
|
||||||
self.backButtonNode.highligthedChanged = { [weak self] highlighted in
|
self.backButtonNode.highligthedChanged = { [weak self] highlighted in
|
||||||
@ -248,19 +297,12 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
self.speakingContainerNode.view.insertSubview(speakingEffectView, at: 0)
|
self.speakingContainerNode.view.insertSubview(speakingEffectView, at: 0)
|
||||||
self.speakingEffectView = speakingEffectView
|
self.speakingEffectView = speakingEffectView
|
||||||
|
|
||||||
let effect: UIVisualEffect
|
let placeholderButtonEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .light))
|
||||||
if #available(iOS 13.0, *) {
|
placeholderButtonEffectView.isUserInteractionEnabled = false
|
||||||
effect = UIBlurEffect(style: .systemMaterialDark)
|
self.placeholderButton.view.insertSubview(placeholderButtonEffectView, at: 0)
|
||||||
} else {
|
self.placeholderButtonEffectView = placeholderButtonEffectView
|
||||||
effect = UIBlurEffect(style: .dark)
|
|
||||||
}
|
|
||||||
let backdropEffectView = UIVisualEffectView(effect: effect)
|
|
||||||
backdropEffectView.isHidden = true
|
|
||||||
self.view.insertSubview(backdropEffectView, aboveSubview: self.backdropAvatarNode.view)
|
|
||||||
self.backdropEffectView = backdropEffectView
|
|
||||||
|
|
||||||
self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tap)))
|
self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tap)))
|
||||||
|
|
||||||
self.speakingContainerNode.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.speakingTap)))
|
self.speakingContainerNode.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.speakingTap)))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -283,6 +325,10 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
self.togglePin?()
|
self.togglePin?()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@objc private func stopSharingPressed() {
|
||||||
|
self.stopScreencast?()
|
||||||
|
}
|
||||||
|
|
||||||
var animating: Bool {
|
var animating: Bool {
|
||||||
return self.animatingIn || self.animatingOut
|
return self.animatingIn || self.animatingOut
|
||||||
}
|
}
|
||||||
@ -291,7 +337,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
private var appeared = false
|
private var appeared = false
|
||||||
|
|
||||||
func animateTransitionIn(from sourceNode: ASDisplayNode, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) {
|
func animateTransitionIn(from sourceNode: ASDisplayNode, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) {
|
||||||
guard let sourceNode = sourceNode as? VoiceChatTileItemNode, let _ = sourceNode.item, let (_, sideInset, bottomInset, isLandscape) = self.validLayout else {
|
guard let sourceNode = sourceNode as? VoiceChatTileItemNode, let _ = sourceNode.item, let (_, sideInset, bottomInset, isLandscape, isTablet) = self.validLayout else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
self.appeared = true
|
self.appeared = true
|
||||||
@ -302,12 +348,24 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
self.microphoneNode.alpha = 0.0
|
self.microphoneNode.alpha = 0.0
|
||||||
self.headerNode.alpha = 0.0
|
self.headerNode.alpha = 0.0
|
||||||
|
|
||||||
|
let hasPlaceholder = !self.placeholderIconNode.alpha.isZero
|
||||||
|
|
||||||
let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut)
|
let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut)
|
||||||
alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 1.0)
|
alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 1.0)
|
||||||
alphaTransition.updateAlpha(node: self.topFadeNode, alpha: 1.0)
|
alphaTransition.updateAlpha(node: self.topFadeNode, alpha: 1.0)
|
||||||
alphaTransition.updateAlpha(node: self.titleNode, alpha: 1.0)
|
alphaTransition.updateAlpha(node: self.titleNode, alpha: 1.0)
|
||||||
alphaTransition.updateAlpha(node: self.microphoneNode, alpha: 1.0)
|
alphaTransition.updateAlpha(node: self.microphoneNode, alpha: 1.0)
|
||||||
alphaTransition.updateAlpha(node: self.headerNode, alpha: 1.0)
|
alphaTransition.updateAlpha(node: self.headerNode, alpha: 1.0)
|
||||||
|
if hasPlaceholder {
|
||||||
|
self.placeholderIconNode.alpha = 0.0
|
||||||
|
self.placeholderTextNode.alpha = 0.0
|
||||||
|
alphaTransition.updateAlpha(node: self.placeholderTextNode, alpha: 1.0)
|
||||||
|
|
||||||
|
if !self.placeholderButton.alpha.isZero {
|
||||||
|
self.placeholderButton.alpha = 0.0
|
||||||
|
alphaTransition.updateAlpha(node: self.placeholderButton, alpha: 1.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let targetFrame = self.frame
|
let targetFrame = self.frame
|
||||||
|
|
||||||
@ -324,23 +382,44 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
|
|
||||||
self.animatingIn = true
|
self.animatingIn = true
|
||||||
let startLocalFrame = sourceNode.view.convert(sourceNode.bounds, to: self.supernode?.view)
|
let startLocalFrame = sourceNode.view.convert(sourceNode.bounds, to: self.supernode?.view)
|
||||||
self.update(size: startLocalFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, force: true, transition: .immediate)
|
self.update(size: startLocalFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, force: true, transition: .immediate)
|
||||||
self.frame = startLocalFrame
|
self.frame = startLocalFrame
|
||||||
self.update(size: targetFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, force: true, transition: transition)
|
self.update(size: targetFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, force: true, transition: transition)
|
||||||
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
||||||
sourceNode.alpha = 1.0
|
sourceNode.alpha = 1.0
|
||||||
self?.animatingIn = false
|
self?.animatingIn = false
|
||||||
completion()
|
completion()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if hasPlaceholder, let iconSnapshotView = sourceNode.placeholderIconNode.view.snapshotView(afterScreenUpdates: false), let textSnapshotView = sourceNode.placeholderTextNode.view.snapshotView(afterScreenUpdates: false) {
|
||||||
|
iconSnapshotView.frame = sourceNode.placeholderIconNode.frame
|
||||||
|
self.view.addSubview(iconSnapshotView)
|
||||||
|
textSnapshotView.frame = sourceNode.placeholderTextNode.frame
|
||||||
|
self.view.addSubview(textSnapshotView)
|
||||||
|
transition.updatePosition(layer: iconSnapshotView.layer, position: self.placeholderIconNode.position, completion: { [weak self, weak iconSnapshotView] _ in
|
||||||
|
iconSnapshotView?.removeFromSuperview()
|
||||||
|
self?.placeholderIconNode.alpha = 1.0
|
||||||
|
})
|
||||||
|
transition.updateTransformScale(layer: iconSnapshotView.layer, scale: 2.0)
|
||||||
|
textSnapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak textSnapshotView] _ in
|
||||||
|
textSnapshotView?.removeFromSuperview()
|
||||||
|
})
|
||||||
|
let textPosition = self.placeholderTextNode.position
|
||||||
|
self.placeholderTextNode.position = textSnapshotView.center
|
||||||
|
transition.updatePosition(layer: textSnapshotView.layer, position: textPosition)
|
||||||
|
transition.updatePosition(node: self.placeholderTextNode, position: textPosition)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func animateTransitionOut(to targetNode: ASDisplayNode?, offset: CGFloat, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) {
|
func animateTransitionOut(to targetNode: ASDisplayNode?, offset: CGFloat, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) {
|
||||||
guard let (_, sideInset, bottomInset, isLandscape) = self.validLayout else {
|
guard let (_, sideInset, bottomInset, isLandscape, isTablet) = self.validLayout else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
self.appeared = false
|
self.appeared = false
|
||||||
|
|
||||||
|
let hasPlaceholder = !self.placeholderIconNode.alpha.isZero
|
||||||
|
|
||||||
let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut)
|
let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut)
|
||||||
if offset.isZero {
|
if offset.isZero {
|
||||||
alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 0.0)
|
alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 0.0)
|
||||||
@ -357,11 +436,40 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
alphaTransition.updateAlpha(node: self.microphoneNode, alpha: 0.0)
|
alphaTransition.updateAlpha(node: self.microphoneNode, alpha: 0.0)
|
||||||
alphaTransition.updateAlpha(node: self.headerNode, alpha: 0.0)
|
alphaTransition.updateAlpha(node: self.headerNode, alpha: 0.0)
|
||||||
alphaTransition.updateAlpha(node: self.bottomFadeNode, alpha: 1.0)
|
alphaTransition.updateAlpha(node: self.bottomFadeNode, alpha: 1.0)
|
||||||
|
if hasPlaceholder {
|
||||||
|
alphaTransition.updateAlpha(node: self.placeholderTextNode, alpha: 0.0)
|
||||||
|
if !self.placeholderButton.alpha.isZero {
|
||||||
|
self.placeholderButton.alpha = 0.0
|
||||||
|
self.placeholderButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let originalFrame = self.frame
|
||||||
|
let initialFrame = originalFrame.offsetBy(dx: 0.0, dy: offset)
|
||||||
guard let targetNode = targetNode as? VoiceChatTileItemNode, let _ = targetNode.item else {
|
guard let targetNode = targetNode as? VoiceChatTileItemNode, let _ = targetNode.item else {
|
||||||
|
guard let supernode = self.supernode else {
|
||||||
completion()
|
completion()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
self.animatingOut = true
|
||||||
|
self.frame = initialFrame
|
||||||
|
if offset < 0.0 {
|
||||||
|
let targetFrame = CGRect(origin: CGPoint(x: 0.0, y: -originalFrame.size.height), size: originalFrame.size)
|
||||||
|
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
||||||
|
self?.frame = originalFrame
|
||||||
|
completion()
|
||||||
|
self?.animatingOut = false
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
let targetFrame = CGRect(origin: CGPoint(x: 0.0, y: supernode.frame.height), size: originalFrame.size)
|
||||||
|
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
||||||
|
self?.frame = originalFrame
|
||||||
|
completion()
|
||||||
|
self?.animatingOut = false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
targetNode.isHidden = false
|
targetNode.isHidden = false
|
||||||
if offset.isZero {
|
if offset.isZero {
|
||||||
@ -369,8 +477,6 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.animatingOut = true
|
self.animatingOut = true
|
||||||
let originalFrame = self.frame
|
|
||||||
let initialFrame = originalFrame.offsetBy(dx: 0.0, dy: offset)
|
|
||||||
let targetFrame = targetNode.view.convert(targetNode.bounds, to: self.supernode?.view)
|
let targetFrame = targetNode.view.convert(targetNode.bounds, to: self.supernode?.view)
|
||||||
|
|
||||||
self.currentVideoNode?.keepBackdropSize = true
|
self.currentVideoNode?.keepBackdropSize = true
|
||||||
@ -389,27 +495,62 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
targetNode.alpha = 0.0
|
targetNode.alpha = 0.0
|
||||||
|
|
||||||
self.frame = initialFrame
|
self.frame = initialFrame
|
||||||
self.update(size: targetFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, force: true, transition: transition)
|
|
||||||
|
let textPosition = self.placeholderTextNode.position
|
||||||
|
var textTargetPosition = textPosition
|
||||||
|
var textView: UIView?
|
||||||
|
if hasPlaceholder, let iconSnapshotView = targetNode.placeholderIconNode.view.snapshotView(afterScreenUpdates: false), let textSnapshotView = targetNode.placeholderTextNode.view.snapshotView(afterScreenUpdates: false) {
|
||||||
|
self.view.addSubview(iconSnapshotView)
|
||||||
|
self.view.addSubview(textSnapshotView)
|
||||||
|
iconSnapshotView.transform = CGAffineTransform(scaleX: 2.0, y: 2.0)
|
||||||
|
iconSnapshotView.center = self.placeholderIconNode.position
|
||||||
|
textSnapshotView.center = textPosition
|
||||||
|
textTargetPosition = targetNode.placeholderTextNode.position
|
||||||
|
|
||||||
|
self.placeholderIconNode.alpha = 0.0
|
||||||
|
transition.updatePosition(layer: iconSnapshotView.layer, position: targetNode.placeholderIconNode.position, completion: { [weak self, weak iconSnapshotView] _ in
|
||||||
|
iconSnapshotView?.removeFromSuperview()
|
||||||
|
self?.placeholderIconNode.alpha = 1.0
|
||||||
|
})
|
||||||
|
transition.updateTransformScale(layer: iconSnapshotView.layer, scale: 1.0)
|
||||||
|
|
||||||
|
textView = textSnapshotView
|
||||||
|
textSnapshotView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, removeOnCompletion: false)
|
||||||
|
}
|
||||||
|
|
||||||
|
self.update(size: targetFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, force: true, transition: transition)
|
||||||
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
completion()
|
completion()
|
||||||
|
|
||||||
infoView?.removeFromSuperview()
|
infoView?.removeFromSuperview()
|
||||||
|
textView?.removeFromSuperview()
|
||||||
targetNode.alpha = 1.0
|
targetNode.alpha = 1.0
|
||||||
targetNode.highlightNode.layer.animateAlpha(from: 0.0, to: targetNode.highlightNode.alpha, duration: 0.2)
|
targetNode.highlightNode.layer.animateAlpha(from: 0.0, to: targetNode.highlightNode.alpha, duration: 0.2)
|
||||||
strongSelf.animatingOut = false
|
strongSelf.animatingOut = false
|
||||||
strongSelf.frame = originalFrame
|
strongSelf.frame = originalFrame
|
||||||
strongSelf.update(size: initialFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
strongSelf.update(size: initialFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if hasPlaceholder {
|
||||||
|
self.placeholderTextNode.position = textPosition
|
||||||
|
if let textSnapshotView = textView {
|
||||||
|
transition.updatePosition(layer: textSnapshotView.layer, position: textTargetPosition)
|
||||||
|
}
|
||||||
|
transition.updatePosition(node: self.placeholderTextNode, position: textTargetPosition)
|
||||||
|
}
|
||||||
|
|
||||||
self.update(speakingPeerId: nil)
|
self.update(speakingPeerId: nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
private var effectiveSpeakingPeerId: PeerId?
|
private var effectiveSpeakingPeerId: PeerId?
|
||||||
private func updateSpeakingPeer() {
|
private func updateSpeakingPeer() {
|
||||||
|
guard let (_, _, _, _, isTablet) = self.validLayout else {
|
||||||
|
return
|
||||||
|
}
|
||||||
var effectiveSpeakingPeerId = self.speakingPeerId
|
var effectiveSpeakingPeerId = self.speakingPeerId
|
||||||
if let peerId = effectiveSpeakingPeerId, self.visiblePeerIds.contains(peerId) || self.currentPeer?.0 == peerId || self.callState?.myPeerId == peerId {
|
if let peerId = effectiveSpeakingPeerId, self.visiblePeerIds.contains(peerId) || self.currentPeer?.0 == peerId || self.callState?.myPeerId == peerId || isTablet {
|
||||||
effectiveSpeakingPeerId = nil
|
effectiveSpeakingPeerId = nil
|
||||||
}
|
}
|
||||||
guard self.effectiveSpeakingPeerId != effectiveSpeakingPeerId else {
|
guard self.effectiveSpeakingPeerId != effectiveSpeakingPeerId else {
|
||||||
@ -439,8 +580,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
|
|
||||||
strongSelf.speakingContainerNode.alpha = 0.0
|
strongSelf.speakingContainerNode.alpha = 0.0
|
||||||
|
|
||||||
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
|
if let (size, sideInset, bottomInset, isLandscape, isTablet) = strongSelf.validLayout {
|
||||||
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||||
}
|
}
|
||||||
|
|
||||||
strongSelf.speakingContainerNode.alpha = 1.0
|
strongSelf.speakingContainerNode.alpha = 1.0
|
||||||
@ -525,7 +666,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
let peer = peerEntry.peer
|
let peer = peerEntry.peer
|
||||||
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
||||||
if !arePeersEqual(previousPeerEntry?.peer, peerEntry.peer) {
|
if !arePeersEqual(previousPeerEntry?.peer, peerEntry.peer) {
|
||||||
self.backdropAvatarNode.setSignal(peerAvatarCompleteImage(account: self.context.account, peer: peer, size: CGSize(width: 180.0, height: 180.0), round: false, font: avatarPlaceholderFont(size: 78.0), drawLetters: false))
|
self.backdropAvatarNode.setSignal(peerAvatarCompleteImage(account: self.context.account, peer: peer, size: CGSize(width: 180.0, height: 180.0), round: false, font: avatarPlaceholderFont(size: 78.0), drawLetters: false, blurred: true))
|
||||||
self.avatarNode.setSignal(peerAvatarCompleteImage(account: self.context.account, peer: peer, size: CGSize(width: 180.0, height: 180.0), font: avatarPlaceholderFont(size: 78.0), fullSize: true))
|
self.avatarNode.setSignal(peerAvatarCompleteImage(account: self.context.account, peer: peer, size: CGSize(width: 180.0, height: 180.0), font: avatarPlaceholderFont(size: 78.0), fullSize: true))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -573,8 +714,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
titleAttributedString = updatedString
|
titleAttributedString = updatedString
|
||||||
}
|
}
|
||||||
self.titleNode.attributedText = titleAttributedString
|
self.titleNode.attributedText = titleAttributedString
|
||||||
if let (size, sideInset, bottomInset, isLandscape) = self.validLayout {
|
if let (size, sideInset, bottomInset, isLandscape, isTablet) = self.validLayout {
|
||||||
self.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
self.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||||
}
|
}
|
||||||
|
|
||||||
self.pinButtonTitleNode.isHidden = !pinned
|
self.pinButtonTitleNode.isHidden = !pinned
|
||||||
@ -617,15 +758,16 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func setAvatarHidden(_ hidden: Bool) {
|
private func setAvatarHidden(_ hidden: Bool) {
|
||||||
self.backdropAvatarNode.isHidden = hidden
|
self.topFadeNode.isHidden = !hidden
|
||||||
self.backdropEffectView?.isHidden = hidden
|
self.bottomFadeNode.isHidden = !hidden
|
||||||
|
self.bottomFillNode.isHidden = !hidden
|
||||||
self.avatarNode.isHidden = hidden
|
self.avatarNode.isHidden = hidden
|
||||||
self.audioLevelNode.isHidden = hidden
|
self.audioLevelNode.isHidden = hidden
|
||||||
}
|
}
|
||||||
|
|
||||||
func update(peer: (peer: PeerId, endpointId: String?)?, waitForFullSize: Bool, completion: (() -> Void)? = nil) {
|
func update(peer: (peer: PeerId, endpointId: String?, isMyPeer: Bool, isPresentation: Bool, isPaused: Bool)?, isReady: Bool = true, waitForFullSize: Bool, completion: (() -> Void)? = nil) {
|
||||||
let previousPeer = self.currentPeer
|
let previousPeer = self.currentPeer
|
||||||
if previousPeer?.0 == peer?.0 && previousPeer?.1 == peer?.1 {
|
if previousPeer?.0 == peer?.0 && previousPeer?.1 == peer?.1 && previousPeer?.2 == peer?.2 && previousPeer?.3 == peer?.3 && previousPeer?.4 == peer?.4 {
|
||||||
completion?()
|
completion?()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -633,24 +775,50 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
|
|
||||||
self.updateSpeakingPeer()
|
self.updateSpeakingPeer()
|
||||||
|
|
||||||
if let (_, endpointId) = peer {
|
var isTablet = false
|
||||||
|
if let (_, _, _, _, isTabletValue) = self.validLayout {
|
||||||
|
isTablet = isTabletValue
|
||||||
|
}
|
||||||
|
|
||||||
|
if let (_, endpointId, isMyPeer, isPresentation, isPaused) = peer {
|
||||||
|
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
||||||
|
|
||||||
|
var showPlaceholder = false
|
||||||
|
if isMyPeer && isPresentation {
|
||||||
|
self.placeholderTextNode.attributedText = NSAttributedString(string: presentationData.strings.VoiceChat_YouAreSharingScreen, font: Font.semibold(15.0), textColor: .white)
|
||||||
|
self.placeholderIconNode.image = generateTintedImage(image: UIImage(bundleImageName: isTablet ? "Call/ScreenShareTablet" : "Call/ScreenSharePhone"), color: .white)
|
||||||
|
showPlaceholder = true
|
||||||
|
} else if isPaused {
|
||||||
|
self.placeholderTextNode.attributedText = NSAttributedString(string: presentationData.strings.VoiceChat_VideoPaused, font: Font.semibold(14.0), textColor: .white)
|
||||||
|
self.placeholderIconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/Pause"), color: .white)
|
||||||
|
showPlaceholder = true
|
||||||
|
}
|
||||||
|
|
||||||
|
let updatePlaceholderVisibility = {
|
||||||
|
let peerChanged = previousPeer?.0 != peer?.0
|
||||||
|
let transition: ContainedViewLayoutTransition = self.appeared && !peerChanged ? .animated(duration: 0.2, curve: .easeInOut) : .immediate
|
||||||
|
transition.updateAlpha(node: self.placeholderTextNode, alpha: showPlaceholder ? 1.0 : 0.0)
|
||||||
|
transition.updateAlpha(node: self.placeholderIconNode, alpha: showPlaceholder ? 1.0 : 0.0)
|
||||||
|
transition.updateAlpha(node: self.placeholderButton, alpha: showPlaceholder && !isPaused ? 1.0 : 0.0)
|
||||||
|
}
|
||||||
|
|
||||||
if endpointId != previousPeer?.1 {
|
if endpointId != previousPeer?.1 {
|
||||||
|
updatePlaceholderVisibility()
|
||||||
if let endpointId = endpointId {
|
if let endpointId = endpointId {
|
||||||
var delayTransition = false
|
var delayTransition = false
|
||||||
if previousPeer?.0 == peer?.0 && self.appeared {
|
if previousPeer?.0 == peer?.0 && previousPeer?.1 == nil && self.appeared {
|
||||||
delayTransition = true
|
delayTransition = true
|
||||||
}
|
}
|
||||||
if !delayTransition {
|
if !delayTransition {
|
||||||
self.setAvatarHidden(true)
|
self.setAvatarHidden(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
self.call.makeIncomingVideoView(endpointId: endpointId, requestClone: true, completion: { [weak self] videoView, backdropVideoView in
|
self.call.makeIncomingVideoView(endpointId: endpointId, requestClone: true, completion: { [weak self] videoView, backdropVideoView in
|
||||||
Queue.mainQueue().async {
|
Queue.mainQueue().async {
|
||||||
guard let strongSelf = self, let videoView = videoView else {
|
guard let strongSelf = self, let videoView = videoView else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView)
|
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView, disabledText: presentationData.strings.VoiceChat_VideoPaused)
|
||||||
videoNode.tapped = { [weak self] in
|
videoNode.tapped = { [weak self] in
|
||||||
guard let strongSelf = self else {
|
guard let strongSelf = self else {
|
||||||
return
|
return
|
||||||
@ -675,12 +843,18 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
strongSelf.controlsHidden?(false)
|
strongSelf.controlsHidden?(false)
|
||||||
strongSelf.setControlsHidden(false, animated: true)
|
strongSelf.setControlsHidden(false, animated: true)
|
||||||
}
|
}
|
||||||
|
videoNode.updateIsBlurred(isBlurred: isPaused, light: true, animated: false)
|
||||||
videoNode.isUserInteractionEnabled = true
|
videoNode.isUserInteractionEnabled = true
|
||||||
let previousVideoNode = strongSelf.currentVideoNode
|
let previousVideoNode = strongSelf.currentVideoNode
|
||||||
strongSelf.currentVideoNode = videoNode
|
strongSelf.currentVideoNode = videoNode
|
||||||
strongSelf.insertSubnode(videoNode, aboveSubnode: strongSelf.backgroundNode)
|
strongSelf.insertSubnode(videoNode, aboveSubnode: strongSelf.backdropAvatarNode)
|
||||||
|
|
||||||
if delayTransition {
|
if !isReady {
|
||||||
|
videoNode.alpha = 0.0
|
||||||
|
strongSelf.topFadeNode.isHidden = true
|
||||||
|
strongSelf.bottomFadeNode.isHidden = true
|
||||||
|
strongSelf.bottomFillNode.isHidden = true
|
||||||
|
} else if delayTransition {
|
||||||
videoNode.alpha = 0.0
|
videoNode.alpha = 0.0
|
||||||
}
|
}
|
||||||
if waitForFullSize {
|
if waitForFullSize {
|
||||||
@ -694,17 +868,22 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
|> take(1)
|
|> take(1)
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] _ in
|
|> deliverOnMainQueue).start(next: { [weak self] _ in
|
||||||
Queue.mainQueue().after(0.1) {
|
Queue.mainQueue().after(0.1) {
|
||||||
if let strongSelf = self {
|
guard let strongSelf = self else {
|
||||||
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
|
return
|
||||||
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let (size, sideInset, bottomInset, isLandscape, isTablet) = strongSelf.validLayout {
|
||||||
|
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||||
}
|
}
|
||||||
|
|
||||||
Queue.mainQueue().after(0.02) {
|
Queue.mainQueue().after(0.02) {
|
||||||
completion?()
|
completion?()
|
||||||
}
|
}
|
||||||
|
|
||||||
if delayTransition {
|
if videoNode.alpha.isZero {
|
||||||
|
strongSelf.topFadeNode.isHidden = true
|
||||||
|
strongSelf.bottomFadeNode.isHidden = true
|
||||||
|
strongSelf.bottomFillNode.isHidden = true
|
||||||
if let videoNode = strongSelf.currentVideoNode {
|
if let videoNode = strongSelf.currentVideoNode {
|
||||||
videoNode.alpha = 1.0
|
videoNode.alpha = 1.0
|
||||||
videoNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, completion: { [weak self] _ in
|
videoNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, completion: { [weak self] _ in
|
||||||
@ -726,8 +905,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
|
if let (size, sideInset, bottomInset, isLandscape, isTablet) = strongSelf.validLayout {
|
||||||
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||||
}
|
}
|
||||||
if let previousVideoNode = previousVideoNode {
|
if let previousVideoNode = previousVideoNode {
|
||||||
previousVideoNode.removeFromSupernode()
|
previousVideoNode.removeFromSupernode()
|
||||||
@ -738,28 +917,55 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
self.setAvatarHidden(false)
|
|
||||||
if self.appeared {
|
|
||||||
if let currentVideoNode = self.currentVideoNode {
|
if let currentVideoNode = self.currentVideoNode {
|
||||||
currentVideoNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak currentVideoNode] _ in
|
currentVideoNode.removeFromSupernode()
|
||||||
currentVideoNode?.removeFromSupernode()
|
self.currentVideoNode = nil
|
||||||
|
}
|
||||||
|
self.setAvatarHidden(false)
|
||||||
|
completion?()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.setAvatarHidden(endpointId != nil)
|
||||||
|
if waitForFullSize && !isReady && !isPaused, let videoNode = self.currentVideoNode {
|
||||||
|
self.videoReadyDisposable.set((videoNode.ready
|
||||||
|
|> filter { $0 }
|
||||||
|
|> take(1)
|
||||||
|
|> deliverOnMainQueue).start(next: { [weak self] _ in
|
||||||
|
Queue.mainQueue().after(0.1) {
|
||||||
|
guard let strongSelf = self else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if let (size, sideInset, bottomInset, isLandscape, isTablet) = strongSelf.validLayout {
|
||||||
|
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||||
|
}
|
||||||
|
|
||||||
|
Queue.mainQueue().after(0.02) {
|
||||||
|
completion?()
|
||||||
|
}
|
||||||
|
|
||||||
|
updatePlaceholderVisibility()
|
||||||
|
if videoNode.alpha.isZero {
|
||||||
|
videoNode.updateIsBlurred(isBlurred: isPaused, light: true, animated: false)
|
||||||
|
strongSelf.topFadeNode.isHidden = true
|
||||||
|
strongSelf.bottomFadeNode.isHidden = true
|
||||||
|
strongSelf.bottomFillNode.isHidden = true
|
||||||
|
if let videoNode = strongSelf.currentVideoNode {
|
||||||
|
videoNode.alpha = 1.0
|
||||||
|
videoNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, completion: { [weak self] _ in
|
||||||
|
if let strongSelf = self {
|
||||||
|
strongSelf.setAvatarHidden(true)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}))
|
||||||
} else {
|
} else {
|
||||||
if let currentVideoNode = self.currentVideoNode {
|
updatePlaceholderVisibility()
|
||||||
currentVideoNode.removeFromSupernode()
|
self.currentVideoNode?.updateIsBlurred(isBlurred: isPaused, light: true, animated: true)
|
||||||
self.currentVideoNode = nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
completion?()
|
completion?()
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
if let currentVideoNode = self.currentVideoNode {
|
|
||||||
currentVideoNode.removeFromSupernode()
|
|
||||||
self.currentVideoNode = nil
|
|
||||||
}
|
|
||||||
self.setAvatarHidden(endpointId != nil)
|
|
||||||
completion?()
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.videoReadyDisposable.set(nil)
|
self.videoReadyDisposable.set(nil)
|
||||||
@ -782,8 +988,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
transition.updateAlpha(node: self.bottomFillNode, alpha: hidden ? 0.0 : 1.0, delay: delay)
|
transition.updateAlpha(node: self.bottomFillNode, alpha: hidden ? 0.0 : 1.0, delay: delay)
|
||||||
}
|
}
|
||||||
|
|
||||||
func update(size: CGSize, sideInset: CGFloat, bottomInset: CGFloat, isLandscape: Bool, force: Bool = false, transition: ContainedViewLayoutTransition) {
|
func update(size: CGSize, sideInset: CGFloat, bottomInset: CGFloat, isLandscape: Bool, isTablet: Bool, force: Bool = false, transition: ContainedViewLayoutTransition) {
|
||||||
self.validLayout = (size, sideInset, bottomInset, isLandscape)
|
self.validLayout = (size, sideInset, bottomInset, isLandscape, isTablet)
|
||||||
|
|
||||||
if self.animating && !force {
|
if self.animating && !force {
|
||||||
return
|
return
|
||||||
@ -813,9 +1019,6 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
|
|
||||||
transition.updateFrame(node: self.backgroundNode, frame: CGRect(origin: CGPoint(), size: size))
|
transition.updateFrame(node: self.backgroundNode, frame: CGRect(origin: CGPoint(), size: size))
|
||||||
transition.updateFrame(node: self.backdropAvatarNode, frame: CGRect(origin: CGPoint(), size: size))
|
transition.updateFrame(node: self.backdropAvatarNode, frame: CGRect(origin: CGPoint(), size: size))
|
||||||
if let backdropEffectView = self.backdropEffectView {
|
|
||||||
transition.updateFrame(view: backdropEffectView, frame: CGRect(origin: CGPoint(), size: size))
|
|
||||||
}
|
|
||||||
|
|
||||||
let avatarSize = CGSize(width: 180.0, height: 180.0)
|
let avatarSize = CGSize(width: 180.0, height: 180.0)
|
||||||
let avatarFrame = CGRect(origin: CGPoint(x: (size.width - avatarSize.width) / 2.0, y: (size.height - avatarSize.height) / 2.0), size: avatarSize)
|
let avatarFrame = CGRect(origin: CGPoint(x: (size.width - avatarSize.width) / 2.0, y: (size.height - avatarSize.height) / 2.0), size: avatarSize)
|
||||||
@ -823,7 +1026,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
transition.updateFrame(node: self.audioLevelNode, frame: avatarFrame.insetBy(dx: -60.0, dy: -60.0))
|
transition.updateFrame(node: self.audioLevelNode, frame: avatarFrame.insetBy(dx: -60.0, dy: -60.0))
|
||||||
|
|
||||||
let animationSize = CGSize(width: 36.0, height: 36.0)
|
let animationSize = CGSize(width: 36.0, height: 36.0)
|
||||||
let titleSize = self.titleNode.updateLayout(size)
|
let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - sideInset * 2.0 - 24.0 - animationSize.width, height: size.height))
|
||||||
transition.updateFrame(node: self.titleNode, frame: CGRect(origin: CGPoint(x: sideInset + 12.0 + animationSize.width, y: size.height - bottomInset - titleSize.height - 16.0), size: titleSize))
|
transition.updateFrame(node: self.titleNode, frame: CGRect(origin: CGPoint(x: sideInset + 12.0 + animationSize.width, y: size.height - bottomInset - titleSize.height - 16.0), size: titleSize))
|
||||||
|
|
||||||
transition.updateFrame(node: self.microphoneNode, frame: CGRect(origin: CGPoint(x: sideInset + 7.0, y: size.height - bottomInset - animationSize.height - 6.0), size: animationSize))
|
transition.updateFrame(node: self.microphoneNode, frame: CGRect(origin: CGPoint(x: sideInset + 7.0, y: size.height - bottomInset - animationSize.height - 6.0), size: animationSize))
|
||||||
@ -860,6 +1063,23 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
self.speakingAvatarNode.frame = CGRect(origin: CGPoint(x: 4.0, y: 4.0), size: speakingAvatarSize)
|
self.speakingAvatarNode.frame = CGRect(origin: CGPoint(x: 4.0, y: 4.0), size: speakingAvatarSize)
|
||||||
self.speakingTitleNode.frame = CGRect(origin: CGPoint(x: 4.0 + speakingAvatarSize.width + 14.0, y: floorToScreenPixels((38.0 - speakingTitleSize.height) / 2.0)), size: speakingTitleSize)
|
self.speakingTitleNode.frame = CGRect(origin: CGPoint(x: 4.0 + speakingAvatarSize.width + 14.0, y: floorToScreenPixels((38.0 - speakingTitleSize.height) / 2.0)), size: speakingTitleSize)
|
||||||
transition.updateFrame(node: self.speakingContainerNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - speakingContainerSize.width) / 2.0), y: 46.0), size: speakingContainerSize))
|
transition.updateFrame(node: self.speakingContainerNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - speakingContainerSize.width) / 2.0), y: 46.0), size: speakingContainerSize))
|
||||||
|
|
||||||
|
let placeholderTextSize = self.placeholderTextNode.updateLayout(CGSize(width: size.width - 100.0, height: 100.0))
|
||||||
|
transition.updateFrame(node: self.placeholderTextNode, frame: CGRect(origin: CGPoint(x: floor((size.width - placeholderTextSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) + 10.0), size: placeholderTextSize))
|
||||||
|
if let imageSize = self.placeholderIconNode.image?.size {
|
||||||
|
transition.updateFrame(node: self.placeholderIconNode, frame: CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) - imageSize.height - 8.0), size: imageSize))
|
||||||
|
}
|
||||||
|
|
||||||
|
let placeholderButtonTextSize = self.placeholderButtonTextNode.updateLayout(CGSize(width: 240.0, height: 100.0))
|
||||||
|
let placeholderButtonSize = CGSize(width: placeholderButtonTextSize.width + 60.0, height: 52.0)
|
||||||
|
transition.updateFrame(node: self.placeholderButton, frame: CGRect(origin: CGPoint(x: floor((size.width - placeholderButtonSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) + 10.0 + placeholderTextSize.height + 30.0), size: placeholderButtonSize))
|
||||||
|
self.placeholderButtonEffectView?.frame = CGRect(origin: CGPoint(), size: placeholderButtonSize)
|
||||||
|
self.placeholderButtonHighlightNode.frame = CGRect(origin: CGPoint(), size: placeholderButtonSize)
|
||||||
|
self.placeholderButtonTextNode.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((placeholderButtonSize.width - placeholderButtonTextSize.width) / 2.0), y: floorToScreenPixels((placeholderButtonSize.height - placeholderButtonTextSize.height) / 2.0)), size: placeholderButtonTextSize)
|
||||||
|
|
||||||
|
if let imageSize = self.placeholderIconNode.image?.size {
|
||||||
|
transition.updateFrame(node: self.placeholderIconNode, frame: CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) - imageSize.height - 8.0), size: imageSize))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func flipVideoIfNeeded() {
|
func flipVideoIfNeeded() {
|
||||||
|
@ -210,7 +210,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
|||||||
|
|
||||||
transition.updateCornerRadius(node: self.backgroundImageNode, cornerRadius: 0.0)
|
transition.updateCornerRadius(node: self.backgroundImageNode, cornerRadius: 0.0)
|
||||||
|
|
||||||
let initialRect = sourceNode.frame
|
let initialRect = sourceRect
|
||||||
let initialScale: CGFloat = sourceRect.width / targetRect.width
|
let initialScale: CGFloat = sourceRect.width / targetRect.width
|
||||||
|
|
||||||
let targetSize = CGSize(width: targetRect.size.width, height: targetRect.size.width)
|
let targetSize = CGSize(width: targetRect.size.width, height: targetRect.size.width)
|
||||||
@ -254,6 +254,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
|||||||
self.avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
|
self.avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
|
||||||
self.avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: initialRect.center), to: NSValue(cgPoint: self.avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in
|
self.avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: initialRect.center), to: NSValue(cgPoint: self.avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
|
strongSelf.avatarListNode.updateCustomItemsOnlySynchronously = false
|
||||||
strongSelf.avatarListNode.currentItemNode?.addSubnode(sourceNode.videoContainerNode)
|
strongSelf.avatarListNode.currentItemNode?.addSubnode(sourceNode.videoContainerNode)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -268,6 +269,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
|||||||
self.avatarListNode.stripContainerNode.frame = CGRect(x: 0.0, y: 13.0, width: targetRect.width, height: 2.0)
|
self.avatarListNode.stripContainerNode.frame = CGRect(x: 0.0, y: 13.0, width: targetRect.width, height: 2.0)
|
||||||
self.avatarListNode.shadowNode.frame = CGRect(x: 0.0, y: 0.0, width: targetRect.width, height: 44.0)
|
self.avatarListNode.shadowNode.frame = CGRect(x: 0.0, y: 0.0, width: targetRect.width, height: 44.0)
|
||||||
|
|
||||||
|
self.avatarListNode.updateCustomItemsOnlySynchronously = true
|
||||||
self.avatarListNode.update(size: targetSize, peer: self.peer, customNode: self.customNode, additionalEntry: self.additionalEntry, isExpanded: true, transition: .immediate)
|
self.avatarListNode.update(size: targetSize, peer: self.peer, customNode: self.customNode, additionalEntry: self.additionalEntry, isExpanded: true, transition: .immediate)
|
||||||
|
|
||||||
let backgroundTargetRect = CGRect(x: 0.0, y: targetSize.height - backgroundCornerRadius * 2.0, width: targetRect.width, height: targetRect.height - targetSize.height + backgroundCornerRadius * 2.0)
|
let backgroundTargetRect = CGRect(x: 0.0, y: targetSize.height - backgroundCornerRadius * 2.0, width: targetRect.width, height: targetRect.height - targetSize.height + backgroundCornerRadius * 2.0)
|
||||||
@ -334,6 +336,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
|||||||
self.avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
|
self.avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
|
||||||
self.avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: initialRect.center), to: NSValue(cgPoint: self.avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in
|
self.avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: initialRect.center), to: NSValue(cgPoint: self.avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
|
strongSelf.avatarListNode.updateCustomItemsOnlySynchronously = false
|
||||||
// strongSelf.avatarListNode.currentItemNode?.addSubnode(sourceNode.videoContainerNode)
|
// strongSelf.avatarListNode.currentItemNode?.addSubnode(sourceNode.videoContainerNode)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -348,6 +351,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
|||||||
self.avatarListNode.stripContainerNode.frame = CGRect(x: 0.0, y: 13.0, width: targetRect.width, height: 2.0)
|
self.avatarListNode.stripContainerNode.frame = CGRect(x: 0.0, y: 13.0, width: targetRect.width, height: 2.0)
|
||||||
self.avatarListNode.shadowNode.frame = CGRect(x: 0.0, y: 0.0, width: targetRect.width, height: 44.0)
|
self.avatarListNode.shadowNode.frame = CGRect(x: 0.0, y: 0.0, width: targetRect.width, height: 44.0)
|
||||||
|
|
||||||
|
self.avatarListNode.updateCustomItemsOnlySynchronously = true
|
||||||
self.avatarListNode.update(size: targetSize, peer: self.peer, customNode: nil, additionalEntry: self.additionalEntry, isExpanded: true, transition: .immediate)
|
self.avatarListNode.update(size: targetSize, peer: self.peer, customNode: nil, additionalEntry: self.additionalEntry, isExpanded: true, transition: .immediate)
|
||||||
|
|
||||||
let backgroundTargetRect = CGRect(x: 0.0, y: targetSize.height - backgroundCornerRadius * 2.0, width: targetRect.width, height: targetRect.height - targetSize.height + backgroundCornerRadius * 2.0)
|
let backgroundTargetRect = CGRect(x: 0.0, y: targetSize.height - backgroundCornerRadius * 2.0, width: targetRect.width, height: targetRect.height - targetSize.height + backgroundCornerRadius * 2.0)
|
||||||
@ -362,7 +366,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
|||||||
self.appeared = true
|
self.appeared = true
|
||||||
}
|
}
|
||||||
|
|
||||||
func animateOut(to targetNode: ASDisplayNode, targetRect: CGRect, transition: ContainedViewLayoutTransition) {
|
func animateOut(to targetNode: ASDisplayNode, targetRect: CGRect, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void = {}) {
|
||||||
let radiusTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
|
let radiusTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
|
||||||
let springDuration: Double = 0.3
|
let springDuration: Double = 0.3
|
||||||
let springDamping: CGFloat = 1000.0
|
let springDamping: CGFloat = 1000.0
|
||||||
@ -383,12 +387,13 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
|||||||
if let targetNode = targetNode {
|
if let targetNode = targetNode {
|
||||||
targetNode.contentNode.insertSubnode(targetNode.videoContainerNode, aboveSubnode: targetNode.backgroundNode)
|
targetNode.contentNode.insertSubnode(targetNode.videoContainerNode, aboveSubnode: targetNode.backgroundNode)
|
||||||
}
|
}
|
||||||
|
completion()
|
||||||
self?.removeFromSupernode()
|
self?.removeFromSupernode()
|
||||||
})
|
})
|
||||||
|
|
||||||
radiusTransition.updateCornerRadius(node: self.avatarListContainerNode, cornerRadius: backgroundCornerRadius)
|
radiusTransition.updateCornerRadius(node: self.avatarListContainerNode, cornerRadius: backgroundCornerRadius)
|
||||||
|
|
||||||
if let snapshotView = targetNode.infoNode.view.snapshotView(afterScreenUpdates: false) {
|
if let snapshotView = targetNode.infoNode.view.snapshotView(afterScreenUpdates: true) {
|
||||||
self.view.insertSubview(snapshotView, aboveSubview: targetNode.videoContainerNode.view)
|
self.view.insertSubview(snapshotView, aboveSubview: targetNode.videoContainerNode.view)
|
||||||
let snapshotFrame = snapshotView.frame
|
let snapshotFrame = snapshotView.frame
|
||||||
snapshotView.frame = CGRect(origin: CGPoint(x: 0.0, y: initialSize.width - snapshotView.frame.size.height), size: snapshotView.frame.size)
|
snapshotView.frame = CGRect(origin: CGPoint(x: 0.0, y: initialSize.width - snapshotView.frame.size.height), size: snapshotView.frame.size)
|
||||||
@ -439,6 +444,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
|||||||
if let targetNode = targetNode {
|
if let targetNode = targetNode {
|
||||||
targetNode.offsetContainerNode.insertSubnode(targetNode.videoContainerNode, at: 0)
|
targetNode.offsetContainerNode.insertSubnode(targetNode.videoContainerNode, at: 0)
|
||||||
}
|
}
|
||||||
|
completion()
|
||||||
self?.removeFromSupernode()
|
self?.removeFromSupernode()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -8,6 +8,12 @@ import AccountContext
|
|||||||
private let tileSpacing: CGFloat = 4.0
|
private let tileSpacing: CGFloat = 4.0
|
||||||
let tileHeight: CGFloat = 180.0
|
let tileHeight: CGFloat = 180.0
|
||||||
|
|
||||||
|
enum VoiceChatTileLayoutMode {
|
||||||
|
case pairs
|
||||||
|
case rows
|
||||||
|
case grid
|
||||||
|
}
|
||||||
|
|
||||||
final class VoiceChatTileGridNode: ASDisplayNode {
|
final class VoiceChatTileGridNode: ASDisplayNode {
|
||||||
private let context: AccountContext
|
private let context: AccountContext
|
||||||
|
|
||||||
@ -17,6 +23,10 @@ final class VoiceChatTileGridNode: ASDisplayNode {
|
|||||||
|
|
||||||
private var absoluteLocation: (CGRect, CGSize)?
|
private var absoluteLocation: (CGRect, CGSize)?
|
||||||
|
|
||||||
|
var tileNodes: [VoiceChatTileItemNode] {
|
||||||
|
return Array(self.itemNodes.values)
|
||||||
|
}
|
||||||
|
|
||||||
init(context: AccountContext) {
|
init(context: AccountContext) {
|
||||||
self.context = context
|
self.context = context
|
||||||
|
|
||||||
@ -25,6 +35,14 @@ final class VoiceChatTileGridNode: ASDisplayNode {
|
|||||||
self.clipsToBounds = true
|
self.clipsToBounds = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var visiblity = true {
|
||||||
|
didSet {
|
||||||
|
for (_, tileNode) in self.itemNodes {
|
||||||
|
tileNode.visiblity = self.visiblity
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func updateAbsoluteRect(_ rect: CGRect, within containerSize: CGSize) {
|
func updateAbsoluteRect(_ rect: CGRect, within containerSize: CGSize) {
|
||||||
self.absoluteLocation = (rect, containerSize)
|
self.absoluteLocation = (rect, containerSize)
|
||||||
for itemNode in self.itemNodes.values {
|
for itemNode in self.itemNodes.values {
|
||||||
@ -35,32 +53,68 @@ final class VoiceChatTileGridNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func update(size: CGSize, items: [VoiceChatTileItem], transition: ContainedViewLayoutTransition) -> CGSize {
|
func update(size: CGSize, layoutMode: VoiceChatTileLayoutMode, items: [VoiceChatTileItem], transition: ContainedViewLayoutTransition) -> CGSize {
|
||||||
self.items = items
|
self.items = items
|
||||||
|
|
||||||
var validIds: [String] = []
|
var validIds: [String] = []
|
||||||
|
|
||||||
let halfWidth = floorToScreenPixels((size.width - tileSpacing) / 2.0)
|
let colsCount: CGFloat
|
||||||
let lastItemIsWide = items.count % 2 != 0
|
if case .grid = layoutMode {
|
||||||
|
if items.count < 3 {
|
||||||
|
colsCount = 1
|
||||||
|
} else if items.count < 5 {
|
||||||
|
colsCount = 2
|
||||||
|
} else {
|
||||||
|
colsCount = 3
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
colsCount = 2
|
||||||
|
}
|
||||||
|
let rowsCount = ceil(CGFloat(items.count) / colsCount)
|
||||||
|
|
||||||
|
let genericItemWidth = floorToScreenPixels((size.width - tileSpacing * (colsCount - 1)) / colsCount)
|
||||||
|
let lastRowItemsAreWide: Bool
|
||||||
|
let lastRowItemWidth: CGFloat
|
||||||
|
if case .grid = layoutMode {
|
||||||
|
lastRowItemsAreWide = [1, 2].contains(items.count) || items.count % Int(colsCount) != 0
|
||||||
|
var lastRowItemsCount = CGFloat(items.count % Int(colsCount))
|
||||||
|
if lastRowItemsCount.isZero {
|
||||||
|
lastRowItemsCount = colsCount
|
||||||
|
}
|
||||||
|
lastRowItemWidth = floorToScreenPixels((size.width - tileSpacing * (lastRowItemsCount - 1)) / lastRowItemsCount)
|
||||||
|
} else {
|
||||||
|
lastRowItemsAreWide = items.count == 1 || items.count % Int(colsCount) != 0
|
||||||
|
lastRowItemWidth = size.width
|
||||||
|
}
|
||||||
|
|
||||||
let isFirstTime = self.isFirstTime
|
let isFirstTime = self.isFirstTime
|
||||||
if isFirstTime {
|
if isFirstTime {
|
||||||
self.isFirstTime = false
|
self.isFirstTime = false
|
||||||
}
|
}
|
||||||
|
|
||||||
let availableWidth = min(size.width, size.height)
|
var availableWidth = min(size.width, size.height)
|
||||||
|
var itemHeight = tileHeight
|
||||||
|
if case .grid = layoutMode {
|
||||||
|
itemHeight = size.height / rowsCount - (tileSpacing * (rowsCount - 1))
|
||||||
|
}
|
||||||
|
|
||||||
for i in 0 ..< self.items.count {
|
for i in 0 ..< self.items.count {
|
||||||
let item = self.items[i]
|
let item = self.items[i]
|
||||||
let isLast = i == self.items.count - 1
|
let col = CGFloat(i % Int(colsCount))
|
||||||
|
let row = floor(CGFloat(i) / colsCount)
|
||||||
|
let isLastRow = row == (rowsCount - 1)
|
||||||
|
|
||||||
|
let rowItemWidth = isLastRow && lastRowItemsAreWide ? lastRowItemWidth : genericItemWidth
|
||||||
let itemSize = CGSize(
|
let itemSize = CGSize(
|
||||||
width: isLast && lastItemIsWide ? size.width : halfWidth,
|
width: rowItemWidth,
|
||||||
height: tileHeight
|
height: itemHeight
|
||||||
)
|
)
|
||||||
let col = CGFloat(i % 2)
|
|
||||||
let row = floor(CGFloat(i) / 2.0)
|
if case .grid = layoutMode {
|
||||||
let itemFrame = CGRect(origin: CGPoint(x: col * (halfWidth + tileSpacing), y: row * (tileHeight + tileSpacing)), size: itemSize)
|
availableWidth = rowItemWidth
|
||||||
|
}
|
||||||
|
|
||||||
|
let itemFrame = CGRect(origin: CGPoint(x: col * (rowItemWidth + tileSpacing), y: row * (itemHeight + tileSpacing)), size: itemSize)
|
||||||
|
|
||||||
validIds.append(item.id)
|
validIds.append(item.id)
|
||||||
var itemNode: VoiceChatTileItemNode?
|
var itemNode: VoiceChatTileItemNode?
|
||||||
@ -77,6 +131,7 @@ final class VoiceChatTileGridNode: ASDisplayNode {
|
|||||||
self.addSubnode(addedItemNode)
|
self.addSubnode(addedItemNode)
|
||||||
}
|
}
|
||||||
if let itemNode = itemNode {
|
if let itemNode = itemNode {
|
||||||
|
itemNode.visiblity = self.visiblity
|
||||||
if wasAdded {
|
if wasAdded {
|
||||||
itemNode.frame = itemFrame
|
itemNode.frame = itemFrame
|
||||||
if !isFirstTime {
|
if !isFirstTime {
|
||||||
@ -112,18 +167,20 @@ final class VoiceChatTileGridNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let rowCount = ceil(CGFloat(self.items.count) / 2.0)
|
let rowCount = ceil(CGFloat(self.items.count) / 2.0)
|
||||||
return CGSize(width: size.width, height: rowCount * (tileHeight + tileSpacing))
|
return CGSize(width: size.width, height: rowCount * (itemHeight + tileSpacing))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
final class VoiceChatTilesGridItem: ListViewItem {
|
final class VoiceChatTilesGridItem: ListViewItem {
|
||||||
let context: AccountContext
|
let context: AccountContext
|
||||||
let tiles: [VoiceChatTileItem]
|
let tiles: [VoiceChatTileItem]
|
||||||
|
let layoutMode: VoiceChatTileLayoutMode
|
||||||
let getIsExpanded: () -> Bool
|
let getIsExpanded: () -> Bool
|
||||||
|
|
||||||
init(context: AccountContext, tiles: [VoiceChatTileItem], getIsExpanded: @escaping () -> Bool) {
|
init(context: AccountContext, tiles: [VoiceChatTileItem], layoutMode: VoiceChatTileLayoutMode, getIsExpanded: @escaping () -> Bool) {
|
||||||
self.context = context
|
self.context = context
|
||||||
self.tiles = tiles
|
self.tiles = tiles
|
||||||
|
self.layoutMode = layoutMode
|
||||||
self.getIsExpanded = getIsExpanded
|
self.getIsExpanded = getIsExpanded
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -227,6 +284,7 @@ final class VoiceChatTilesGridItemNode: ListViewItemNode {
|
|||||||
strongSelf.cornersNode.image = decorationCornersImage(top: true, bottom: false, dark: item.getIsExpanded())
|
strongSelf.cornersNode.image = decorationCornersImage(top: true, bottom: false, dark: item.getIsExpanded())
|
||||||
|
|
||||||
tileGridNode = VoiceChatTileGridNode(context: item.context)
|
tileGridNode = VoiceChatTileGridNode(context: item.context)
|
||||||
|
tileGridNode.visiblity = strongSelf.gridVisiblity
|
||||||
strongSelf.addSubnode(tileGridNode)
|
strongSelf.addSubnode(tileGridNode)
|
||||||
strongSelf.tileGridNode = tileGridNode
|
strongSelf.tileGridNode = tileGridNode
|
||||||
}
|
}
|
||||||
@ -237,7 +295,7 @@ final class VoiceChatTilesGridItemNode: ListViewItemNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let transition: ContainedViewLayoutTransition = currentItem == nil ? .immediate : .animated(duration: 0.3, curve: .easeInOut)
|
let transition: ContainedViewLayoutTransition = currentItem == nil ? .immediate : .animated(duration: 0.3, curve: .easeInOut)
|
||||||
let tileGridSize = tileGridNode.update(size: CGSize(width: params.width - params.leftInset - params.rightInset, height: params.availableHeight), items: item.tiles, transition: transition)
|
let tileGridSize = tileGridNode.update(size: CGSize(width: params.width - params.leftInset - params.rightInset, height: params.availableHeight), layoutMode: item.layoutMode, items: item.tiles, transition: transition)
|
||||||
if currentItem == nil {
|
if currentItem == nil {
|
||||||
tileGridNode.frame = CGRect(x: params.leftInset, y: 0.0, width: tileGridSize.width, height: tileGridSize.height)
|
tileGridNode.frame = CGRect(x: params.leftInset, y: 0.0, width: tileGridSize.width, height: tileGridSize.height)
|
||||||
strongSelf.backgroundNode.frame = tileGridNode.frame
|
strongSelf.backgroundNode.frame = tileGridNode.frame
|
||||||
@ -256,4 +314,16 @@ final class VoiceChatTilesGridItemNode: ListViewItemNode {
|
|||||||
self.absoluteLocation = (rect, containerSize)
|
self.absoluteLocation = (rect, containerSize)
|
||||||
self.tileGridNode?.updateAbsoluteRect(rect, within: containerSize)
|
self.tileGridNode?.updateAbsoluteRect(rect, within: containerSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var gridVisiblity: Bool = true {
|
||||||
|
didSet {
|
||||||
|
self.tileGridNode?.visiblity = self.gridVisiblity
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func snapshotForDismissal() {
|
||||||
|
if let snapshotView = self.tileGridNode?.view.snapshotView(afterScreenUpdates: false) {
|
||||||
|
self.tileGridNode?.view.addSubview(snapshotView)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -27,32 +27,42 @@ final class VoiceChatTileItem: Equatable {
|
|||||||
let peer: Peer
|
let peer: Peer
|
||||||
let videoEndpointId: String
|
let videoEndpointId: String
|
||||||
let videoReady: Bool
|
let videoReady: Bool
|
||||||
|
let videoTimeouted: Bool
|
||||||
|
let isPaused: Bool
|
||||||
|
let isOwnScreencast: Bool
|
||||||
let strings: PresentationStrings
|
let strings: PresentationStrings
|
||||||
let nameDisplayOrder: PresentationPersonNameOrder
|
let nameDisplayOrder: PresentationPersonNameOrder
|
||||||
let icon: Icon
|
let icon: Icon
|
||||||
let text: VoiceChatParticipantItem.ParticipantText
|
let text: VoiceChatParticipantItem.ParticipantText
|
||||||
let additionalText: VoiceChatParticipantItem.ParticipantText?
|
let additionalText: VoiceChatParticipantItem.ParticipantText?
|
||||||
let speaking: Bool
|
let speaking: Bool
|
||||||
|
let secondary: Bool
|
||||||
|
let isTablet: Bool
|
||||||
let action: () -> Void
|
let action: () -> Void
|
||||||
let contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
|
let contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
|
||||||
let getVideo: () -> GroupVideoNode?
|
let getVideo: (GroupVideoNode.Position) -> GroupVideoNode?
|
||||||
let getAudioLevel: (() -> Signal<Float, NoError>)?
|
let getAudioLevel: (() -> Signal<Float, NoError>)?
|
||||||
|
|
||||||
var id: String {
|
var id: String {
|
||||||
return self.videoEndpointId
|
return self.videoEndpointId
|
||||||
}
|
}
|
||||||
|
|
||||||
init(account: Account, peer: Peer, videoEndpointId: String, videoReady: Bool, strings: PresentationStrings, nameDisplayOrder: PresentationPersonNameOrder, speaking: Bool, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, additionalText: VoiceChatParticipantItem.ParticipantText?, action: @escaping () -> Void, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?, getVideo: @escaping () -> GroupVideoNode?, getAudioLevel: (() -> Signal<Float, NoError>)?) {
|
init(account: Account, peer: Peer, videoEndpointId: String, videoReady: Bool, videoTimeouted: Bool, isPaused: Bool, isOwnScreencast: Bool, strings: PresentationStrings, nameDisplayOrder: PresentationPersonNameOrder, speaking: Bool, secondary: Bool, isTablet: Bool, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, additionalText: VoiceChatParticipantItem.ParticipantText?, action: @escaping () -> Void, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?, getVideo: @escaping (GroupVideoNode.Position) -> GroupVideoNode?, getAudioLevel: (() -> Signal<Float, NoError>)?) {
|
||||||
self.account = account
|
self.account = account
|
||||||
self.peer = peer
|
self.peer = peer
|
||||||
self.videoEndpointId = videoEndpointId
|
self.videoEndpointId = videoEndpointId
|
||||||
self.videoReady = videoReady
|
self.videoReady = videoReady
|
||||||
|
self.videoTimeouted = videoTimeouted
|
||||||
|
self.isPaused = isPaused
|
||||||
|
self.isOwnScreencast = isOwnScreencast
|
||||||
self.strings = strings
|
self.strings = strings
|
||||||
self.nameDisplayOrder = nameDisplayOrder
|
self.nameDisplayOrder = nameDisplayOrder
|
||||||
self.icon = icon
|
self.icon = icon
|
||||||
self.text = text
|
self.text = text
|
||||||
self.additionalText = additionalText
|
self.additionalText = additionalText
|
||||||
self.speaking = speaking
|
self.speaking = speaking
|
||||||
|
self.secondary = secondary
|
||||||
|
self.isTablet = isTablet
|
||||||
self.action = action
|
self.action = action
|
||||||
self.contextAction = contextAction
|
self.contextAction = contextAction
|
||||||
self.getVideo = getVideo
|
self.getVideo = getVideo
|
||||||
@ -69,6 +79,15 @@ final class VoiceChatTileItem: Equatable {
|
|||||||
if lhs.videoReady != rhs.videoReady {
|
if lhs.videoReady != rhs.videoReady {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.videoTimeouted != rhs.videoTimeouted {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.isPaused != rhs.isPaused {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if lhs.isOwnScreencast != rhs.isOwnScreencast {
|
||||||
|
return false
|
||||||
|
}
|
||||||
if lhs.icon != rhs.icon {
|
if lhs.icon != rhs.icon {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -81,6 +100,9 @@ final class VoiceChatTileItem: Equatable {
|
|||||||
if lhs.speaking != rhs.speaking {
|
if lhs.speaking != rhs.speaking {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.secondary != rhs.secondary {
|
||||||
|
return false
|
||||||
|
}
|
||||||
if lhs.icon != rhs.icon {
|
if lhs.icon != rhs.icon {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -121,6 +143,9 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
var highlightNode: VoiceChatTileHighlightNode
|
var highlightNode: VoiceChatTileHighlightNode
|
||||||
private let statusNode: VoiceChatParticipantStatusNode
|
private let statusNode: VoiceChatParticipantStatusNode
|
||||||
|
|
||||||
|
let placeholderTextNode: ImmediateTextNode
|
||||||
|
let placeholderIconNode: ASImageNode
|
||||||
|
|
||||||
private var profileNode: VoiceChatPeerProfileNode?
|
private var profileNode: VoiceChatPeerProfileNode?
|
||||||
private var extractedRect: CGRect?
|
private var extractedRect: CGRect?
|
||||||
private var nonExtractedRect: CGRect?
|
private var nonExtractedRect: CGRect?
|
||||||
@ -164,9 +189,17 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
self.highlightNode.alpha = 0.0
|
self.highlightNode.alpha = 0.0
|
||||||
self.highlightNode.updateGlowAndGradientAnimations(type: .speaking)
|
self.highlightNode.updateGlowAndGradientAnimations(type: .speaking)
|
||||||
|
|
||||||
super.init()
|
self.placeholderTextNode = ImmediateTextNode()
|
||||||
|
self.placeholderTextNode.alpha = 0.0
|
||||||
|
self.placeholderTextNode.maximumNumberOfLines = 2
|
||||||
|
self.placeholderTextNode.textAlignment = .center
|
||||||
|
|
||||||
self.clipsToBounds = true
|
self.placeholderIconNode = ASImageNode()
|
||||||
|
self.placeholderIconNode.alpha = 0.0
|
||||||
|
self.placeholderIconNode.contentMode = .scaleAspectFit
|
||||||
|
self.placeholderIconNode.displaysAsynchronously = false
|
||||||
|
|
||||||
|
super.init()
|
||||||
|
|
||||||
self.containerNode.addSubnode(self.contextSourceNode)
|
self.containerNode.addSubnode(self.contextSourceNode)
|
||||||
self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode
|
self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode
|
||||||
@ -178,6 +211,8 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
self.contentNode.addSubnode(self.fadeNode)
|
self.contentNode.addSubnode(self.fadeNode)
|
||||||
self.contentNode.addSubnode(self.infoNode)
|
self.contentNode.addSubnode(self.infoNode)
|
||||||
self.infoNode.addSubnode(self.titleNode)
|
self.infoNode.addSubnode(self.titleNode)
|
||||||
|
self.contentNode.addSubnode(self.placeholderTextNode)
|
||||||
|
self.contentNode.addSubnode(self.placeholderIconNode)
|
||||||
self.contentNode.addSubnode(self.highlightNode)
|
self.contentNode.addSubnode(self.highlightNode)
|
||||||
|
|
||||||
self.containerNode.shouldBegin = { [weak self] location in
|
self.containerNode.shouldBegin = { [weak self] location in
|
||||||
@ -227,15 +262,22 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
self.isExtracted = isExtracted
|
self.isExtracted = isExtracted
|
||||||
|
|
||||||
|
let springDuration: Double = 0.42
|
||||||
|
let springDamping: CGFloat = 124.0
|
||||||
if isExtracted {
|
if isExtracted {
|
||||||
let profileNode = VoiceChatPeerProfileNode(context: self.context, size: extractedRect.size, peer: item.peer, text: item.text, customNode: self.videoContainerNode, additionalEntry: .single(nil), requestDismiss: { [weak self] in
|
let profileNode = VoiceChatPeerProfileNode(context: self.context, size: extractedRect.size, peer: item.peer, text: item.text, customNode: self.videoContainerNode, additionalEntry: .single(nil), requestDismiss: { [weak self] in
|
||||||
self?.contextSourceNode.requestDismiss?()
|
self?.contextSourceNode.requestDismiss?()
|
||||||
})
|
})
|
||||||
profileNode.frame = CGRect(origin: CGPoint(), size: extractedRect.size)
|
profileNode.frame = CGRect(origin: CGPoint(), size: self.bounds.size)
|
||||||
self.profileNode = profileNode
|
self.profileNode = profileNode
|
||||||
self.contextSourceNode.contentNode.addSubnode(profileNode)
|
self.contextSourceNode.contentNode.addSubnode(profileNode)
|
||||||
|
|
||||||
profileNode.animateIn(from: self, targetRect: extractedRect, transition: transition)
|
profileNode.animateIn(from: self, targetRect: extractedRect, transition: transition)
|
||||||
|
var appearenceTransition = transition
|
||||||
|
if transition.isAnimated {
|
||||||
|
appearenceTransition = .animated(duration: springDuration, curve: .customSpring(damping: springDamping, initialVelocity: 0.0))
|
||||||
|
}
|
||||||
|
appearenceTransition.updateFrame(node: profileNode, frame: extractedRect)
|
||||||
|
|
||||||
self.contextSourceNode.contentNode.customHitTest = { [weak self] point in
|
self.contextSourceNode.contentNode.customHitTest = { [weak self] point in
|
||||||
if let strongSelf = self, let profileNode = strongSelf.profileNode {
|
if let strongSelf = self, let profileNode = strongSelf.profileNode {
|
||||||
@ -245,9 +287,28 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.backgroundNode.isHidden = true
|
||||||
|
self.fadeNode.isHidden = true
|
||||||
|
self.infoNode.isHidden = true
|
||||||
|
self.highlightNode.isHidden = true
|
||||||
} else if let profileNode = self.profileNode {
|
} else if let profileNode = self.profileNode {
|
||||||
self.profileNode = nil
|
self.profileNode = nil
|
||||||
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition)
|
|
||||||
|
self.infoNode.isHidden = false
|
||||||
|
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition, completion: { [weak self] in
|
||||||
|
if let strongSelf = self {
|
||||||
|
strongSelf.backgroundNode.isHidden = false
|
||||||
|
strongSelf.fadeNode.isHidden = false
|
||||||
|
strongSelf.highlightNode.isHidden = false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
var appearenceTransition = transition
|
||||||
|
if transition.isAnimated {
|
||||||
|
appearenceTransition = .animated(duration: 0.2, curve: .easeInOut)
|
||||||
|
}
|
||||||
|
appearenceTransition.updateFrame(node: profileNode, frame: nonExtractedRect)
|
||||||
|
|
||||||
self.contextSourceNode.contentNode.customHitTest = nil
|
self.contextSourceNode.contentNode.customHitTest = nil
|
||||||
}
|
}
|
||||||
@ -259,8 +320,23 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
if let shimmerNode = self.shimmerNode {
|
if let shimmerNode = self.shimmerNode {
|
||||||
shimmerNode.updateAbsoluteRect(rect, within: containerSize)
|
shimmerNode.updateAbsoluteRect(rect, within: containerSize)
|
||||||
}
|
}
|
||||||
let isVisible = rect.maxY >= 0.0 && rect.minY <= containerSize.height
|
self.updateIsEnabled()
|
||||||
self.videoNode?.updateIsEnabled(isVisible)
|
}
|
||||||
|
|
||||||
|
var visiblity = true {
|
||||||
|
didSet {
|
||||||
|
self.updateIsEnabled()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func updateIsEnabled() {
|
||||||
|
guard let (rect, containerSize) = self.absoluteLocation else {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
let isVisibleInContainer = rect.maxY >= 0.0 && rect.minY <= containerSize.height
|
||||||
|
if let videoNode = self.videoNode, videoNode.supernode === self.videoContainerNode {
|
||||||
|
videoNode.updateIsEnabled(self.visiblity && isVisibleInContainer)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func update(size: CGSize, availableWidth: CGFloat, item: VoiceChatTileItem, transition: ContainedViewLayoutTransition) {
|
func update(size: CGSize, availableWidth: CGFloat, item: VoiceChatTileItem, transition: ContainedViewLayoutTransition) {
|
||||||
@ -270,10 +346,12 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
|
|
||||||
self.validLayout = (size, availableWidth)
|
self.validLayout = (size, availableWidth)
|
||||||
|
|
||||||
if !item.videoReady {
|
if !item.videoReady || item.isOwnScreencast {
|
||||||
let shimmerNode: VoiceChatTileShimmeringNode
|
let shimmerNode: VoiceChatTileShimmeringNode
|
||||||
|
let shimmerTransition: ContainedViewLayoutTransition
|
||||||
if let current = self.shimmerNode {
|
if let current = self.shimmerNode {
|
||||||
shimmerNode = current
|
shimmerNode = current
|
||||||
|
shimmerTransition = transition
|
||||||
} else {
|
} else {
|
||||||
shimmerNode = VoiceChatTileShimmeringNode(account: item.account, peer: item.peer)
|
shimmerNode = VoiceChatTileShimmeringNode(account: item.account, peer: item.peer)
|
||||||
self.contentNode.insertSubnode(shimmerNode, aboveSubnode: self.fadeNode)
|
self.contentNode.insertSubnode(shimmerNode, aboveSubnode: self.fadeNode)
|
||||||
@ -282,9 +360,10 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
if let (rect, containerSize) = self.absoluteLocation {
|
if let (rect, containerSize) = self.absoluteLocation {
|
||||||
shimmerNode.updateAbsoluteRect(rect, within: containerSize)
|
shimmerNode.updateAbsoluteRect(rect, within: containerSize)
|
||||||
}
|
}
|
||||||
|
shimmerTransition = .immediate
|
||||||
}
|
}
|
||||||
transition.updateFrame(node: shimmerNode, frame: CGRect(origin: CGPoint(), size: size))
|
shimmerTransition.updateFrame(node: shimmerNode, frame: CGRect(origin: CGPoint(), size: size))
|
||||||
shimmerNode.update(shimmeringColor: UIColor.white, size: size, transition: transition)
|
shimmerNode.update(shimmeringColor: UIColor.white, shimmering: !item.isOwnScreencast && !item.videoTimeouted && !item.isPaused, size: size, transition: shimmerTransition)
|
||||||
} else if let shimmerNode = self.shimmerNode {
|
} else if let shimmerNode = self.shimmerNode {
|
||||||
self.shimmerNode = nil
|
self.shimmerNode = nil
|
||||||
shimmerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak shimmerNode] _ in
|
shimmerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak shimmerNode] _ in
|
||||||
@ -292,6 +371,9 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var nodeToAnimateIn: ASDisplayNode?
|
||||||
|
var placeholderAppeared = false
|
||||||
|
|
||||||
var itemTransition = transition
|
var itemTransition = transition
|
||||||
if self.item != item {
|
if self.item != item {
|
||||||
let previousItem = self.item
|
let previousItem = self.item
|
||||||
@ -316,13 +398,31 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
current.removeFromSupernode()
|
current.removeFromSupernode()
|
||||||
}
|
}
|
||||||
|
|
||||||
if let videoNode = item.getVideo() {
|
if let videoNode = item.getVideo(item.secondary ? .list : .tile) {
|
||||||
itemTransition = .immediate
|
itemTransition = .immediate
|
||||||
self.videoNode = videoNode
|
self.videoNode = videoNode
|
||||||
self.videoContainerNode.addSubnode(videoNode)
|
self.videoContainerNode.addSubnode(videoNode)
|
||||||
|
self.updateIsEnabled()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.videoNode?.updateIsBlurred(isBlurred: item.isPaused, light: true)
|
||||||
|
|
||||||
|
var showPlaceholder = false
|
||||||
|
if item.isOwnScreencast {
|
||||||
|
self.placeholderTextNode.attributedText = NSAttributedString(string: item.strings.VoiceChat_YouAreSharingScreen, font: Font.semibold(13.0), textColor: .white)
|
||||||
|
self.placeholderIconNode.image = generateTintedImage(image: UIImage(bundleImageName: item.isTablet ? "Call/ScreenShareTablet" : "Call/ScreenSharePhone"), color: .white)
|
||||||
|
showPlaceholder = true
|
||||||
|
} else if item.isPaused {
|
||||||
|
self.placeholderTextNode.attributedText = NSAttributedString(string: item.strings.VoiceChat_VideoPaused, font: Font.semibold(13.0), textColor: .white)
|
||||||
|
self.placeholderIconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/Pause"), color: .white)
|
||||||
|
showPlaceholder = true
|
||||||
|
}
|
||||||
|
|
||||||
|
placeholderAppeared = self.placeholderTextNode.alpha.isZero && showPlaceholder
|
||||||
|
transition.updateAlpha(node: self.placeholderTextNode, alpha: showPlaceholder ? 1.0 : 0.0)
|
||||||
|
transition.updateAlpha(node: self.placeholderIconNode, alpha: showPlaceholder ? 1.0 : 0.0)
|
||||||
|
|
||||||
let titleFont = Font.semibold(13.0)
|
let titleFont = Font.semibold(13.0)
|
||||||
let titleColor = UIColor.white
|
let titleColor = UIColor.white
|
||||||
var titleAttributedString: NSAttributedString?
|
var titleAttributedString: NSAttributedString?
|
||||||
@ -361,25 +461,8 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
self.titleNode.attributedText = titleAttributedString
|
self.titleNode.attributedText = titleAttributedString
|
||||||
|
|
||||||
if case let .microphone(muted) = item.icon {
|
|
||||||
let animationNode: VoiceChatMicrophoneNode
|
|
||||||
if let current = self.animationNode {
|
|
||||||
animationNode = current
|
|
||||||
} else {
|
|
||||||
animationNode = VoiceChatMicrophoneNode()
|
|
||||||
self.animationNode = animationNode
|
|
||||||
self.infoNode.addSubnode(animationNode)
|
|
||||||
}
|
|
||||||
animationNode.alpha = 1.0
|
|
||||||
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: microphoneColor), animated: true)
|
|
||||||
} else if let animationNode = self.animationNode {
|
|
||||||
self.animationNode = nil
|
|
||||||
animationNode.removeFromSupernode()
|
|
||||||
}
|
|
||||||
|
|
||||||
var hadMicrophoneNode = false
|
var hadMicrophoneNode = false
|
||||||
var hadIconNode = false
|
var hadIconNode = false
|
||||||
var nodeToAnimateIn: ASDisplayNode?
|
|
||||||
|
|
||||||
if case let .microphone(muted) = item.icon {
|
if case let .microphone(muted) = item.icon {
|
||||||
let animationNode: VoiceChatMicrophoneNode
|
let animationNode: VoiceChatMicrophoneNode
|
||||||
@ -389,13 +472,18 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
animationNode = VoiceChatMicrophoneNode()
|
animationNode = VoiceChatMicrophoneNode()
|
||||||
self.animationNode = animationNode
|
self.animationNode = animationNode
|
||||||
self.infoNode.addSubnode(animationNode)
|
self.infoNode.addSubnode(animationNode)
|
||||||
|
|
||||||
|
nodeToAnimateIn = animationNode
|
||||||
}
|
}
|
||||||
animationNode.alpha = 1.0
|
animationNode.alpha = 1.0
|
||||||
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: microphoneColor), animated: true)
|
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: microphoneColor), animated: true)
|
||||||
} else if let animationNode = self.animationNode {
|
} else if let animationNode = self.animationNode {
|
||||||
hadMicrophoneNode = true
|
hadMicrophoneNode = true
|
||||||
self.animationNode = nil
|
self.animationNode = nil
|
||||||
animationNode.removeFromSupernode()
|
animationNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
|
||||||
|
animationNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak animationNode] _ in
|
||||||
|
animationNode?.removeFromSupernode()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
if case .presentation = item.icon {
|
if case .presentation = item.icon {
|
||||||
@ -449,8 +537,10 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
if self.videoContainerNode.supernode === self.contentNode {
|
if self.videoContainerNode.supernode === self.contentNode {
|
||||||
if let videoNode = self.videoNode {
|
if let videoNode = self.videoNode {
|
||||||
itemTransition.updateFrame(node: videoNode, frame: bounds)
|
itemTransition.updateFrame(node: videoNode, frame: bounds)
|
||||||
|
if videoNode.supernode === self.videoContainerNode {
|
||||||
videoNode.updateLayout(size: size, layoutMode: .fillOrFitToSquare, transition: itemTransition)
|
videoNode.updateLayout(size: size, layoutMode: .fillOrFitToSquare, transition: itemTransition)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
transition.updateFrame(node: self.videoContainerNode, frame: bounds)
|
transition.updateFrame(node: self.videoContainerNode, frame: bounds)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -463,6 +553,11 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - 50.0, height: size.height))
|
let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - 50.0, height: size.height))
|
||||||
self.titleNode.frame = CGRect(origin: CGPoint(x: 30.0, y: size.height - titleSize.height - 8.0), size: titleSize)
|
self.titleNode.frame = CGRect(origin: CGPoint(x: 30.0, y: size.height - titleSize.height - 8.0), size: titleSize)
|
||||||
|
|
||||||
|
var transition = transition
|
||||||
|
if nodeToAnimateIn != nil || placeholderAppeared {
|
||||||
|
transition = .immediate
|
||||||
|
}
|
||||||
|
|
||||||
if let iconNode = self.iconNode, let image = iconNode.image {
|
if let iconNode = self.iconNode, let image = iconNode.image {
|
||||||
transition.updateFrame(node: iconNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels(16.0 - image.size.width / 2.0), y: floorToScreenPixels(size.height - 15.0 - image.size.height / 2.0)), size: image.size))
|
transition.updateFrame(node: iconNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels(16.0 - image.size.width / 2.0), y: floorToScreenPixels(size.height - 15.0 - image.size.height / 2.0)), size: image.size))
|
||||||
}
|
}
|
||||||
@ -473,70 +568,40 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
animationNode.transform = CATransform3DMakeScale(0.66667, 0.66667, 1.0)
|
animationNode.transform = CATransform3DMakeScale(0.66667, 0.66667, 1.0)
|
||||||
transition.updatePosition(node: animationNode, position: CGPoint(x: 16.0, y: size.height - 15.0))
|
transition.updatePosition(node: animationNode, position: CGPoint(x: 16.0, y: size.height - 15.0))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let placeholderTextSize = self.placeholderTextNode.updateLayout(CGSize(width: size.width - 30.0, height: 100.0))
|
||||||
|
transition.updateFrame(node: self.placeholderTextNode, frame: CGRect(origin: CGPoint(x: floor((size.width - placeholderTextSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) + 10.0), size: placeholderTextSize))
|
||||||
|
if let image = self.placeholderIconNode.image {
|
||||||
|
let imageSize = CGSize(width: image.size.width * 0.5, height: image.size.height * 0.5)
|
||||||
|
transition.updateFrame(node: self.placeholderIconNode, frame: CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) - imageSize.height - 4.0), size: imageSize))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, transition: ContainedViewLayoutTransition, animate: Bool = true) {
|
func transitionIn(from sourceNode: ASDisplayNode?) {
|
||||||
guard let _ = self.item else {
|
guard let item = self.item else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
var duration: Double = 0.2
|
var videoNode: GroupVideoNode?
|
||||||
var timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue
|
if let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item {
|
||||||
if case let .animated(transitionDuration, curve) = transition {
|
if let sourceVideoNode = sourceNode.videoNode {
|
||||||
duration = transitionDuration + 0.05
|
sourceNode.videoNode = nil
|
||||||
timingFunction = curve.timingFunction
|
videoNode = sourceVideoNode
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item {
|
if videoNode == nil {
|
||||||
let initialAnimate = animate
|
videoNode = item.getVideo(item.secondary ? .list : .tile)
|
||||||
|
}
|
||||||
|
|
||||||
var startContainerPosition = sourceNode.view.convert(sourceNode.bounds, to: containerNode.view).center
|
if let videoNode = videoNode {
|
||||||
var animate = initialAnimate
|
|
||||||
// if startContainerPosition.y > containerNode.frame.height - 238.0 {
|
|
||||||
// animate = false
|
|
||||||
// }
|
|
||||||
|
|
||||||
if let videoNode = sourceNode.videoNode {
|
|
||||||
sourceNode.videoNode = nil
|
|
||||||
videoNode.alpha = 1.0
|
videoNode.alpha = 1.0
|
||||||
self.videoNode = videoNode
|
self.videoNode = videoNode
|
||||||
self.videoContainerNode.addSubnode(videoNode)
|
self.videoContainerNode.addSubnode(videoNode)
|
||||||
}
|
|
||||||
|
|
||||||
if animate {
|
videoNode.updateLayout(size: self.bounds.size, layoutMode: .fillOrFitToSquare, transition: .immediate)
|
||||||
sourceNode.isHidden = true
|
videoNode.frame = self.bounds
|
||||||
Queue.mainQueue().after(0.7) {
|
|
||||||
sourceNode.isHidden = false
|
|
||||||
}
|
|
||||||
|
|
||||||
let initialPosition = self.contextSourceNode.position
|
self.updateIsEnabled()
|
||||||
let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center
|
|
||||||
|
|
||||||
self.contextSourceNode.position = targetContainerPosition
|
|
||||||
containerNode.addSubnode(self.contextSourceNode)
|
|
||||||
|
|
||||||
self.contextSourceNode.layer.animateScale(from: 0.467, to: 1.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self] _ in
|
|
||||||
if let strongSelf = self {
|
|
||||||
strongSelf.contextSourceNode.position = initialPosition
|
|
||||||
strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
self.videoNode?.updateLayout(size: self.bounds.size, layoutMode: .fillOrFitToSquare, transition: transition)
|
|
||||||
self.videoNode?.frame = self.bounds
|
|
||||||
} else if !initialAnimate {
|
|
||||||
self.videoNode?.updateLayout(size: self.bounds.size, layoutMode: .fillOrFitToSquare, transition: .immediate)
|
|
||||||
self.videoNode?.frame = self.bounds
|
|
||||||
|
|
||||||
sourceNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: duration, timingFunction: timingFunction, removeOnCompletion: false, completion: { [weak sourceNode] _ in
|
|
||||||
sourceNode?.layer.removeAllAnimations()
|
|
||||||
})
|
|
||||||
sourceNode.layer.animateScale(from: 1.0, to: 0.0, duration: duration, timingFunction: timingFunction)
|
|
||||||
}
|
|
||||||
|
|
||||||
if transition.isAnimated {
|
|
||||||
self.fadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -822,6 +887,7 @@ private class VoiceChatTileShimmeringNode: ASDisplayNode {
|
|||||||
private let borderEffectNode: ShimmerEffectForegroundNode
|
private let borderEffectNode: ShimmerEffectForegroundNode
|
||||||
|
|
||||||
private var currentShimmeringColor: UIColor?
|
private var currentShimmeringColor: UIColor?
|
||||||
|
private var currentShimmering: Bool?
|
||||||
private var currentSize: CGSize?
|
private var currentSize: CGSize?
|
||||||
|
|
||||||
public init(account: Account, peer: Peer) {
|
public init(account: Account, peer: Peer) {
|
||||||
@ -844,12 +910,13 @@ private class VoiceChatTileShimmeringNode: ASDisplayNode {
|
|||||||
self.addSubnode(self.borderNode)
|
self.addSubnode(self.borderNode)
|
||||||
self.borderNode.addSubnode(self.borderEffectNode)
|
self.borderNode.addSubnode(self.borderEffectNode)
|
||||||
|
|
||||||
self.backgroundNode.setSignal(peerAvatarCompleteImage(account: account, peer: peer, size: CGSize(width: 180.0, height: 180.0), round: false, font: Font.regular(16.0), drawLetters: false, fullSize: false, blurred: true))
|
self.backgroundNode.setSignal(peerAvatarCompleteImage(account: account, peer: peer, size: CGSize(width: 250.0, height: 250.0), round: false, font: Font.regular(16.0), drawLetters: false, fullSize: false, blurred: true))
|
||||||
}
|
}
|
||||||
|
|
||||||
public override func didLoad() {
|
public override func didLoad() {
|
||||||
super.didLoad()
|
super.didLoad()
|
||||||
|
|
||||||
|
if self.effectNode.supernode != nil {
|
||||||
self.effectNode.layer.compositingFilter = "screenBlendMode"
|
self.effectNode.layer.compositingFilter = "screenBlendMode"
|
||||||
self.borderEffectNode.layer.compositingFilter = "screenBlendMode"
|
self.borderEffectNode.layer.compositingFilter = "screenBlendMode"
|
||||||
|
|
||||||
@ -862,35 +929,43 @@ private class VoiceChatTileShimmeringNode: ASDisplayNode {
|
|||||||
if let size = self.currentSize {
|
if let size = self.currentSize {
|
||||||
borderMaskView.frame = CGRect(origin: CGPoint(), size: size)
|
borderMaskView.frame = CGRect(origin: CGPoint(), size: size)
|
||||||
}
|
}
|
||||||
|
|
||||||
self.borderNode.view.mask = borderMaskView
|
self.borderNode.view.mask = borderMaskView
|
||||||
|
|
||||||
if #available(iOS 13.0, *) {
|
if #available(iOS 13.0, *) {
|
||||||
self.layer.cornerCurve = .continuous
|
|
||||||
borderMaskView.layer.cornerCurve = .continuous
|
borderMaskView.layer.cornerCurve = .continuous
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if #available(iOS 13.0, *) {
|
||||||
|
self.layer.cornerCurve = .continuous
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public func updateAbsoluteRect(_ rect: CGRect, within containerSize: CGSize) {
|
public func updateAbsoluteRect(_ rect: CGRect, within containerSize: CGSize) {
|
||||||
self.effectNode.updateAbsoluteRect(rect, within: containerSize)
|
self.effectNode.updateAbsoluteRect(rect, within: containerSize)
|
||||||
self.borderEffectNode.updateAbsoluteRect(rect, within: containerSize)
|
self.borderEffectNode.updateAbsoluteRect(rect, within: containerSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
public func update(shimmeringColor: UIColor, size: CGSize, transition: ContainedViewLayoutTransition) {
|
public func update(shimmeringColor: UIColor, shimmering: Bool, size: CGSize, transition: ContainedViewLayoutTransition) {
|
||||||
if let currentShimmeringColor = self.currentShimmeringColor, currentShimmeringColor.isEqual(shimmeringColor) && self.currentSize == size {
|
if let currentShimmeringColor = self.currentShimmeringColor, currentShimmeringColor.isEqual(shimmeringColor) && self.currentSize == size && self.currentShimmering == shimmering {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let firstTime = self.currentShimmering == nil
|
||||||
self.currentShimmeringColor = shimmeringColor
|
self.currentShimmeringColor = shimmeringColor
|
||||||
|
self.currentShimmering = shimmering
|
||||||
self.currentSize = size
|
self.currentSize = size
|
||||||
|
|
||||||
|
let transition: ContainedViewLayoutTransition = firstTime ? .immediate : (transition.isAnimated ? transition : .animated(duration: 0.45, curve: .easeInOut))
|
||||||
|
transition.updateAlpha(node: self.effectNode, alpha: shimmering ? 1.0 : 0.0)
|
||||||
|
transition.updateAlpha(node: self.borderNode, alpha: shimmering ? 1.0 : 0.0)
|
||||||
|
|
||||||
let bounds = CGRect(origin: CGPoint(), size: size)
|
let bounds = CGRect(origin: CGPoint(), size: size)
|
||||||
|
|
||||||
self.effectNode.update(foregroundColor: shimmeringColor.withAlphaComponent(0.3))
|
self.effectNode.update(foregroundColor: shimmeringColor.withAlphaComponent(0.3))
|
||||||
self.effectNode.frame = bounds
|
transition.updateFrame(node: self.effectNode, frame: bounds)
|
||||||
|
|
||||||
self.borderEffectNode.update(foregroundColor: shimmeringColor.withAlphaComponent(0.45))
|
self.borderEffectNode.update(foregroundColor: shimmeringColor.withAlphaComponent(0.45))
|
||||||
self.borderEffectNode.frame = bounds
|
transition.updateFrame(node: self.borderEffectNode, frame: bounds)
|
||||||
|
|
||||||
transition.updateFrame(node: self.backgroundNode, frame: bounds)
|
transition.updateFrame(node: self.backgroundNode, frame: bounds)
|
||||||
transition.updateFrame(node: self.borderNode, frame: bounds)
|
transition.updateFrame(node: self.borderNode, frame: bounds)
|
||||||
|
@ -3,12 +3,15 @@ import UIKit
|
|||||||
import AsyncDisplayKit
|
import AsyncDisplayKit
|
||||||
import Display
|
import Display
|
||||||
import TelegramPresentationData
|
import TelegramPresentationData
|
||||||
|
import ChatTitleActivityNode
|
||||||
|
|
||||||
|
private let constructiveColor: UIColor = UIColor(rgb: 0x34c759)
|
||||||
|
|
||||||
final class VoiceChatTitleNode: ASDisplayNode {
|
final class VoiceChatTitleNode: ASDisplayNode {
|
||||||
private var theme: PresentationTheme
|
private var theme: PresentationTheme
|
||||||
|
|
||||||
private let titleNode: ASTextNode
|
private let titleNode: ASTextNode
|
||||||
private let infoNode: ASTextNode
|
private let infoNode: ChatTitleActivityNode
|
||||||
let recordingIconNode: VoiceChatRecordingIconNode
|
let recordingIconNode: VoiceChatRecordingIconNode
|
||||||
|
|
||||||
public var isRecording: Bool = false {
|
public var isRecording: Bool = false {
|
||||||
@ -28,11 +31,7 @@ final class VoiceChatTitleNode: ASDisplayNode {
|
|||||||
self.titleNode.truncationMode = .byTruncatingTail
|
self.titleNode.truncationMode = .byTruncatingTail
|
||||||
self.titleNode.isOpaque = false
|
self.titleNode.isOpaque = false
|
||||||
|
|
||||||
self.infoNode = ASTextNode()
|
self.infoNode = ChatTitleActivityNode()
|
||||||
self.infoNode.displaysAsynchronously = false
|
|
||||||
self.infoNode.maximumNumberOfLines = 1
|
|
||||||
self.infoNode.truncationMode = .byTruncatingTail
|
|
||||||
self.infoNode.isOpaque = false
|
|
||||||
|
|
||||||
self.recordingIconNode = VoiceChatRecordingIconNode(hasBackground: false)
|
self.recordingIconNode = VoiceChatRecordingIconNode(hasBackground: false)
|
||||||
|
|
||||||
@ -65,7 +64,7 @@ final class VoiceChatTitleNode: ASDisplayNode {
|
|||||||
self.tapped?()
|
self.tapped?()
|
||||||
}
|
}
|
||||||
|
|
||||||
func update(size: CGSize, title: String, subtitle: String, slide: Bool, transition: ContainedViewLayoutTransition) {
|
func update(size: CGSize, title: String, subtitle: String, speaking: Bool, slide: Bool, transition: ContainedViewLayoutTransition) {
|
||||||
guard !size.width.isZero else {
|
guard !size.width.isZero else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -94,11 +93,18 @@ final class VoiceChatTitleNode: ASDisplayNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.titleNode.attributedText = NSAttributedString(string: title, font: Font.medium(17.0), textColor: UIColor(rgb: 0xffffff))
|
self.titleNode.attributedText = NSAttributedString(string: title, font: Font.medium(17.0), textColor: UIColor(rgb: 0xffffff))
|
||||||
self.infoNode.attributedText = NSAttributedString(string: subtitle, font: Font.regular(13.0), textColor: UIColor(rgb: 0xffffff, alpha: 0.5))
|
|
||||||
|
var state = ChatTitleActivityNodeState.none
|
||||||
|
if speaking {
|
||||||
|
state = .recordingVoice(NSAttributedString(string: subtitle, font: Font.regular(13.0), textColor: constructiveColor), constructiveColor)
|
||||||
|
} else {
|
||||||
|
state = .info(NSAttributedString(string: subtitle, font: Font.regular(13.0), textColor: UIColor(rgb: 0xffffff, alpha: 0.5)), .generic)
|
||||||
|
}
|
||||||
|
let _ = self.infoNode.transitionToState(state, animation: .slide)
|
||||||
|
|
||||||
let constrainedSize = CGSize(width: size.width - 140.0, height: size.height)
|
let constrainedSize = CGSize(width: size.width - 140.0, height: size.height)
|
||||||
let titleSize = self.titleNode.measure(constrainedSize)
|
let titleSize = self.titleNode.measure(constrainedSize)
|
||||||
let infoSize = self.infoNode.measure(constrainedSize)
|
let infoSize = self.infoNode.updateLayout(constrainedSize, offset: 1.0, alignment: .center)
|
||||||
let titleInfoSpacing: CGFloat = 0.0
|
let titleInfoSpacing: CGFloat = 0.0
|
||||||
|
|
||||||
let combinedHeight = titleSize.height + infoSize.height + titleInfoSpacing
|
let combinedHeight = titleSize.height + infoSize.height + titleInfoSpacing
|
||||||
|
@ -1259,9 +1259,9 @@ public func setupAccount(_ account: Account, fetchCachedResourceRepresentation:
|
|||||||
account.postbox.mediaBox.preFetchedResourcePath = preFetchedResourcePath
|
account.postbox.mediaBox.preFetchedResourcePath = preFetchedResourcePath
|
||||||
account.postbox.mediaBox.fetchResource = { [weak account] resource, intervals, parameters -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> in
|
account.postbox.mediaBox.fetchResource = { [weak account] resource, intervals, parameters -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> in
|
||||||
if let strongAccount = account {
|
if let strongAccount = account {
|
||||||
if let result = fetchResource(account: strongAccount, resource: resource, intervals: intervals, parameters: parameters) {
|
if let result = strongAccount.auxiliaryMethods.fetchResource(strongAccount, resource, intervals, parameters) {
|
||||||
return result
|
return result
|
||||||
} else if let result = strongAccount.auxiliaryMethods.fetchResource(strongAccount, resource, intervals, parameters) {
|
} else if let result = fetchResource(account: strongAccount, resource: resource, intervals: intervals, parameters: parameters) {
|
||||||
return result
|
return result
|
||||||
} else {
|
} else {
|
||||||
return .never()
|
return .never()
|
||||||
|
@ -175,6 +175,7 @@ private var declaredEncodables: Void = {
|
|||||||
declareEncodable(ExportedInvitation.self, f: { ExportedInvitation(decoder: $0) })
|
declareEncodable(ExportedInvitation.self, f: { ExportedInvitation(decoder: $0) })
|
||||||
declareEncodable(CachedDisplayAsPeers.self, f: { CachedDisplayAsPeers(decoder: $0) })
|
declareEncodable(CachedDisplayAsPeers.self, f: { CachedDisplayAsPeers(decoder: $0) })
|
||||||
declareEncodable(WallpapersState.self, f: { WallpapersState(decoder: $0) })
|
declareEncodable(WallpapersState.self, f: { WallpapersState(decoder: $0) })
|
||||||
|
declareEncodable(WallpaperDataResource.self, f: { WallpaperDataResource(decoder: $0) })
|
||||||
|
|
||||||
return
|
return
|
||||||
}()
|
}()
|
||||||
@ -190,6 +191,24 @@ public func rootPathForBasePath(_ appGroupPath: String) -> String {
|
|||||||
public func performAppGroupUpgrades(appGroupPath: String, rootPath: String) {
|
public func performAppGroupUpgrades(appGroupPath: String, rootPath: String) {
|
||||||
let _ = try? FileManager.default.createDirectory(at: URL(fileURLWithPath: rootPath), withIntermediateDirectories: true, attributes: nil)
|
let _ = try? FileManager.default.createDirectory(at: URL(fileURLWithPath: rootPath), withIntermediateDirectories: true, attributes: nil)
|
||||||
|
|
||||||
|
if let items = FileManager.default.enumerator(at: URL(fileURLWithPath: appGroupPath), includingPropertiesForKeys: [.isDirectoryKey], options: [.skipsHiddenFiles, .skipsSubdirectoryDescendants], errorHandler: nil) {
|
||||||
|
let allowedDirectories: [String] = [
|
||||||
|
"telegram-data",
|
||||||
|
"Library"
|
||||||
|
]
|
||||||
|
|
||||||
|
for url in items {
|
||||||
|
guard let url = url as? URL else {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if let isDirectory = try? url.resourceValues(forKeys: [.isDirectoryKey]).isDirectory, isDirectory {
|
||||||
|
if !allowedDirectories.contains(url.lastPathComponent) {
|
||||||
|
let _ = try? FileManager.default.removeItem(at: url)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
do {
|
do {
|
||||||
var resourceValues = URLResourceValues()
|
var resourceValues = URLResourceValues()
|
||||||
resourceValues.isExcludedFromBackup = true
|
resourceValues.isExcludedFromBackup = true
|
||||||
|
@ -77,7 +77,7 @@ extension TelegramWallpaper {
|
|||||||
//assertionFailure()
|
//assertionFailure()
|
||||||
self = .color(0xffffff)
|
self = .color(0xffffff)
|
||||||
}
|
}
|
||||||
case let .wallPaperNoFile(_, settings):
|
case let .wallPaperNoFile(_, _, settings):
|
||||||
if let settings = settings, case let .wallPaperSettings(_, backgroundColor, secondBackgroundColor, thirdBackgroundColor, fourthBackgroundColor, _, rotation) = settings {
|
if let settings = settings, case let .wallPaperSettings(_, backgroundColor, secondBackgroundColor, thirdBackgroundColor, fourthBackgroundColor, _, rotation) = settings {
|
||||||
let colors: [UInt32] = ([backgroundColor, secondBackgroundColor, thirdBackgroundColor, fourthBackgroundColor] as [Int32?]).compactMap({ color -> UInt32? in
|
let colors: [UInt32] = ([backgroundColor, secondBackgroundColor, thirdBackgroundColor, fourthBackgroundColor] as [Int32?]).compactMap({ color -> UInt32? in
|
||||||
return color.flatMap(UInt32.init(bitPattern:))
|
return color.flatMap(UInt32.init(bitPattern:))
|
||||||
@ -103,9 +103,9 @@ extension TelegramWallpaper {
|
|||||||
case let .file(_, _, _, _, _, _, slug, _, settings):
|
case let .file(_, _, _, _, _, _, slug, _, settings):
|
||||||
return (.inputWallPaperSlug(slug: slug), apiWallpaperSettings(settings))
|
return (.inputWallPaperSlug(slug: slug), apiWallpaperSettings(settings))
|
||||||
case let .color(color):
|
case let .color(color):
|
||||||
return (.inputWallPaperNoFile, apiWallpaperSettings(WallpaperSettings(colors: [color])))
|
return (.inputWallPaperNoFile(id: 0), apiWallpaperSettings(WallpaperSettings(colors: [color])))
|
||||||
case let .gradient(colors, settings):
|
case let .gradient(colors, settings):
|
||||||
return (.inputWallPaperNoFile, apiWallpaperSettings(WallpaperSettings(colors: colors, rotation: settings.rotation)))
|
return (.inputWallPaperNoFile(id: 0), apiWallpaperSettings(WallpaperSettings(colors: colors, rotation: settings.rotation)))
|
||||||
default:
|
default:
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -142,29 +142,17 @@ public func getCurrentGroupCall(account: Account, callId: Int64, accessHash: Int
|
|||||||
} else if mutedByYou {
|
} else if mutedByYou {
|
||||||
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: false, mutedByYou: mutedByYou)
|
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: false, mutedByYou: mutedByYou)
|
||||||
}
|
}
|
||||||
var videoJsonDescription: String? = nil
|
var videoDescription = video.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||||
var presentationJsonDescription: String? = nil
|
var presentationDescription = presentation.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||||
if let video = video {
|
|
||||||
switch video {
|
|
||||||
case let .dataJSON(data):
|
|
||||||
videoJsonDescription = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let presentation = presentation {
|
|
||||||
switch presentation {
|
|
||||||
case let .dataJSON(data):
|
|
||||||
presentationJsonDescription = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if muteState?.canUnmute == false {
|
if muteState?.canUnmute == false {
|
||||||
videoJsonDescription = nil
|
videoDescription = nil
|
||||||
presentationJsonDescription = nil
|
presentationDescription = nil
|
||||||
}
|
}
|
||||||
parsedParticipants.append(GroupCallParticipantsContext.Participant(
|
parsedParticipants.append(GroupCallParticipantsContext.Participant(
|
||||||
peer: peer,
|
peer: peer,
|
||||||
ssrc: ssrc,
|
ssrc: ssrc,
|
||||||
videoJsonDescription: videoJsonDescription,
|
videoDescription: videoDescription,
|
||||||
presentationJsonDescription: presentationJsonDescription,
|
presentationDescription: presentationDescription,
|
||||||
joinTimestamp: date,
|
joinTimestamp: date,
|
||||||
raiseHandRating: raiseHandRating,
|
raiseHandRating: raiseHandRating,
|
||||||
hasRaiseHand: raiseHandRating != nil,
|
hasRaiseHand: raiseHandRating != nil,
|
||||||
@ -471,29 +459,17 @@ public func getGroupCallParticipants(account: Account, callId: Int64, accessHash
|
|||||||
} else if mutedByYou {
|
} else if mutedByYou {
|
||||||
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: false, mutedByYou: mutedByYou)
|
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: false, mutedByYou: mutedByYou)
|
||||||
}
|
}
|
||||||
var videoJsonDescription: String? = nil
|
var videoDescription = video.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||||
var presentationJsonDescription: String? = nil
|
var presentationDescription = presentation.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||||
if let video = video {
|
|
||||||
switch video {
|
|
||||||
case let .dataJSON(data):
|
|
||||||
videoJsonDescription = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let presentation = presentation {
|
|
||||||
switch presentation {
|
|
||||||
case let .dataJSON(data):
|
|
||||||
presentationJsonDescription = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if muteState?.canUnmute == false {
|
if muteState?.canUnmute == false {
|
||||||
videoJsonDescription = nil
|
videoDescription = nil
|
||||||
presentationJsonDescription = nil
|
presentationDescription = nil
|
||||||
}
|
}
|
||||||
parsedParticipants.append(GroupCallParticipantsContext.Participant(
|
parsedParticipants.append(GroupCallParticipantsContext.Participant(
|
||||||
peer: peer,
|
peer: peer,
|
||||||
ssrc: ssrc,
|
ssrc: ssrc,
|
||||||
videoJsonDescription: videoJsonDescription,
|
videoDescription: videoDescription,
|
||||||
presentationJsonDescription: presentationJsonDescription,
|
presentationDescription: presentationDescription,
|
||||||
joinTimestamp: date,
|
joinTimestamp: date,
|
||||||
raiseHandRating: raiseHandRating,
|
raiseHandRating: raiseHandRating,
|
||||||
hasRaiseHand: raiseHandRating != nil,
|
hasRaiseHand: raiseHandRating != nil,
|
||||||
@ -735,30 +711,18 @@ public func joinGroupCall(account: Account, peerId: PeerId, joinAs: PeerId?, cal
|
|||||||
} else if mutedByYou {
|
} else if mutedByYou {
|
||||||
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: false, mutedByYou: mutedByYou)
|
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: false, mutedByYou: mutedByYou)
|
||||||
}
|
}
|
||||||
var videoJsonDescription: String? = nil
|
var videoDescription = video.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||||
var presentationJsonDescription: String? = nil
|
var presentationDescription = presentation.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||||
if let video = video {
|
|
||||||
switch video {
|
|
||||||
case let .dataJSON(data):
|
|
||||||
videoJsonDescription = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let presentation = presentation {
|
|
||||||
switch presentation {
|
|
||||||
case let .dataJSON(data):
|
|
||||||
presentationJsonDescription = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if muteState?.canUnmute == false {
|
if muteState?.canUnmute == false {
|
||||||
videoJsonDescription = nil
|
videoDescription = nil
|
||||||
presentationJsonDescription = nil
|
presentationDescription = nil
|
||||||
}
|
}
|
||||||
if !state.participants.contains(where: { $0.peer.id == peer.id }) {
|
if !state.participants.contains(where: { $0.peer.id == peer.id }) {
|
||||||
state.participants.append(GroupCallParticipantsContext.Participant(
|
state.participants.append(GroupCallParticipantsContext.Participant(
|
||||||
peer: peer,
|
peer: peer,
|
||||||
ssrc: ssrc,
|
ssrc: ssrc,
|
||||||
videoJsonDescription: videoJsonDescription,
|
videoDescription: videoDescription,
|
||||||
presentationJsonDescription: presentationJsonDescription,
|
presentationDescription: presentationDescription,
|
||||||
joinTimestamp: date,
|
joinTimestamp: date,
|
||||||
raiseHandRating: raiseHandRating,
|
raiseHandRating: raiseHandRating,
|
||||||
hasRaiseHand: raiseHandRating != nil,
|
hasRaiseHand: raiseHandRating != nil,
|
||||||
@ -961,10 +925,21 @@ public final class GroupCallParticipantsContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public struct VideoDescription: Equatable {
|
||||||
|
public struct SsrcGroup: Equatable {
|
||||||
|
public var semantics: String
|
||||||
|
public var ssrcs: [UInt32]
|
||||||
|
}
|
||||||
|
|
||||||
|
public var endpointId: String
|
||||||
|
public var ssrcGroups: [SsrcGroup]
|
||||||
|
public var isPaused: Bool
|
||||||
|
}
|
||||||
|
|
||||||
public var peer: Peer
|
public var peer: Peer
|
||||||
public var ssrc: UInt32?
|
public var ssrc: UInt32?
|
||||||
public var videoJsonDescription: String?
|
public var videoDescription: VideoDescription?
|
||||||
public var presentationJsonDescription: String?
|
public var presentationDescription: VideoDescription?
|
||||||
public var joinTimestamp: Int32
|
public var joinTimestamp: Int32
|
||||||
public var raiseHandRating: Int64?
|
public var raiseHandRating: Int64?
|
||||||
public var hasRaiseHand: Bool
|
public var hasRaiseHand: Bool
|
||||||
@ -977,8 +952,8 @@ public final class GroupCallParticipantsContext {
|
|||||||
public init(
|
public init(
|
||||||
peer: Peer,
|
peer: Peer,
|
||||||
ssrc: UInt32?,
|
ssrc: UInt32?,
|
||||||
videoJsonDescription: String?,
|
videoDescription: VideoDescription?,
|
||||||
presentationJsonDescription: String?,
|
presentationDescription: VideoDescription?,
|
||||||
joinTimestamp: Int32,
|
joinTimestamp: Int32,
|
||||||
raiseHandRating: Int64?,
|
raiseHandRating: Int64?,
|
||||||
hasRaiseHand: Bool,
|
hasRaiseHand: Bool,
|
||||||
@ -990,8 +965,8 @@ public final class GroupCallParticipantsContext {
|
|||||||
) {
|
) {
|
||||||
self.peer = peer
|
self.peer = peer
|
||||||
self.ssrc = ssrc
|
self.ssrc = ssrc
|
||||||
self.videoJsonDescription = videoJsonDescription
|
self.videoDescription = videoDescription
|
||||||
self.presentationJsonDescription = presentationJsonDescription
|
self.presentationDescription = presentationDescription
|
||||||
self.joinTimestamp = joinTimestamp
|
self.joinTimestamp = joinTimestamp
|
||||||
self.raiseHandRating = raiseHandRating
|
self.raiseHandRating = raiseHandRating
|
||||||
self.hasRaiseHand = hasRaiseHand
|
self.hasRaiseHand = hasRaiseHand
|
||||||
@ -1020,10 +995,10 @@ public final class GroupCallParticipantsContext {
|
|||||||
if lhs.ssrc != rhs.ssrc {
|
if lhs.ssrc != rhs.ssrc {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if lhs.videoJsonDescription != rhs.videoJsonDescription {
|
if lhs.videoDescription != rhs.videoDescription {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if lhs.presentationJsonDescription != rhs.presentationJsonDescription {
|
if lhs.presentationDescription != rhs.presentationDescription {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if lhs.joinTimestamp != rhs.joinTimestamp {
|
if lhs.joinTimestamp != rhs.joinTimestamp {
|
||||||
@ -1225,8 +1200,8 @@ public final class GroupCallParticipantsContext {
|
|||||||
|
|
||||||
public var peerId: PeerId
|
public var peerId: PeerId
|
||||||
public var ssrc: UInt32?
|
public var ssrc: UInt32?
|
||||||
public var videoJsonDescription: String?
|
public var videoDescription: GroupCallParticipantsContext.Participant.VideoDescription?
|
||||||
public var presentationJsonDescription: String?
|
public var presentationDescription: GroupCallParticipantsContext.Participant.VideoDescription?
|
||||||
public var joinTimestamp: Int32
|
public var joinTimestamp: Int32
|
||||||
public var activityTimestamp: Double?
|
public var activityTimestamp: Double?
|
||||||
public var raiseHandRating: Int64?
|
public var raiseHandRating: Int64?
|
||||||
@ -1239,8 +1214,8 @@ public final class GroupCallParticipantsContext {
|
|||||||
init(
|
init(
|
||||||
peerId: PeerId,
|
peerId: PeerId,
|
||||||
ssrc: UInt32?,
|
ssrc: UInt32?,
|
||||||
videoJsonDescription: String?,
|
videoDescription: GroupCallParticipantsContext.Participant.VideoDescription?,
|
||||||
presentationJsonDescription: String?,
|
presentationDescription: GroupCallParticipantsContext.Participant.VideoDescription?,
|
||||||
joinTimestamp: Int32,
|
joinTimestamp: Int32,
|
||||||
activityTimestamp: Double?,
|
activityTimestamp: Double?,
|
||||||
raiseHandRating: Int64?,
|
raiseHandRating: Int64?,
|
||||||
@ -1252,8 +1227,8 @@ public final class GroupCallParticipantsContext {
|
|||||||
) {
|
) {
|
||||||
self.peerId = peerId
|
self.peerId = peerId
|
||||||
self.ssrc = ssrc
|
self.ssrc = ssrc
|
||||||
self.videoJsonDescription = videoJsonDescription
|
self.videoDescription = videoDescription
|
||||||
self.presentationJsonDescription = presentationJsonDescription
|
self.presentationDescription = presentationDescription
|
||||||
self.joinTimestamp = joinTimestamp
|
self.joinTimestamp = joinTimestamp
|
||||||
self.activityTimestamp = activityTimestamp
|
self.activityTimestamp = activityTimestamp
|
||||||
self.raiseHandRating = raiseHandRating
|
self.raiseHandRating = raiseHandRating
|
||||||
@ -1381,6 +1356,9 @@ public final class GroupCallParticipantsContext {
|
|||||||
private let resetInviteLinksDisposable = MetaDisposable()
|
private let resetInviteLinksDisposable = MetaDisposable()
|
||||||
private let updateShouldBeRecordingDisposable = MetaDisposable()
|
private let updateShouldBeRecordingDisposable = MetaDisposable()
|
||||||
|
|
||||||
|
private var localVideoIsMuted: Bool = true
|
||||||
|
private var localIsVideoPaused: Bool = true
|
||||||
|
|
||||||
public struct ServiceState {
|
public struct ServiceState {
|
||||||
fileprivate var nextActivityRank: Int = 0
|
fileprivate var nextActivityRank: Int = 0
|
||||||
}
|
}
|
||||||
@ -1791,8 +1769,8 @@ public final class GroupCallParticipantsContext {
|
|||||||
let participant = Participant(
|
let participant = Participant(
|
||||||
peer: peer,
|
peer: peer,
|
||||||
ssrc: participantUpdate.ssrc,
|
ssrc: participantUpdate.ssrc,
|
||||||
videoJsonDescription: participantUpdate.videoJsonDescription,
|
videoDescription: participantUpdate.videoDescription,
|
||||||
presentationJsonDescription: participantUpdate.presentationJsonDescription,
|
presentationDescription: participantUpdate.presentationDescription,
|
||||||
joinTimestamp: previousJoinTimestamp ?? participantUpdate.joinTimestamp,
|
joinTimestamp: previousJoinTimestamp ?? participantUpdate.joinTimestamp,
|
||||||
raiseHandRating: participantUpdate.raiseHandRating,
|
raiseHandRating: participantUpdate.raiseHandRating,
|
||||||
hasRaiseHand: participantUpdate.raiseHandRating != nil,
|
hasRaiseHand: participantUpdate.raiseHandRating != nil,
|
||||||
@ -1938,7 +1916,7 @@ public final class GroupCallParticipantsContext {
|
|||||||
raiseHandApi = nil
|
raiseHandApi = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return account.network.request(Api.functions.phone.editGroupCallParticipant(flags: flags, call: .inputGroupCall(id: id, accessHash: accessHash), participant: inputPeer, muted: muted, volume: volume, raiseHand: raiseHandApi, videoMuted: nil))
|
return account.network.request(Api.functions.phone.editGroupCallParticipant(flags: flags, call: .inputGroupCall(id: id, accessHash: accessHash), participant: inputPeer, muted: muted, volume: volume, raiseHand: raiseHandApi, videoStopped: nil, videoPaused: nil, presentationPaused: nil))
|
||||||
|> map(Optional.init)
|
|> map(Optional.init)
|
||||||
|> `catch` { _ -> Signal<Api.Updates?, NoError> in
|
|> `catch` { _ -> Signal<Api.Updates?, NoError> in
|
||||||
return .single(nil)
|
return .single(nil)
|
||||||
@ -1978,7 +1956,13 @@ public final class GroupCallParticipantsContext {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
public func updateVideoState(peerId: PeerId, isVideoMuted: Bool) {
|
public func updateVideoState(peerId: PeerId, isVideoMuted: Bool, isVideoPaused: Bool) {
|
||||||
|
if self.localVideoIsMuted == isVideoMuted && self.localIsVideoPaused == isVideoPaused {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
self.localVideoIsMuted = isVideoMuted
|
||||||
|
self.localIsVideoPaused = isVideoPaused
|
||||||
|
|
||||||
let disposable = MetaDisposable()
|
let disposable = MetaDisposable()
|
||||||
|
|
||||||
let account = self.account
|
let account = self.account
|
||||||
@ -1998,7 +1982,13 @@ public final class GroupCallParticipantsContext {
|
|||||||
videoMuted = isVideoMuted ? .boolTrue : .boolFalse
|
videoMuted = isVideoMuted ? .boolTrue : .boolFalse
|
||||||
flags |= 1 << 3
|
flags |= 1 << 3
|
||||||
|
|
||||||
return account.network.request(Api.functions.phone.editGroupCallParticipant(flags: flags, call: .inputGroupCall(id: id, accessHash: accessHash), participant: inputPeer, muted: nil, volume: nil, raiseHand: nil, videoMuted: videoMuted))
|
var videoPaused: Api.Bool?
|
||||||
|
if !isVideoMuted {
|
||||||
|
videoPaused = isVideoPaused ? .boolTrue : .boolFalse
|
||||||
|
flags |= 1 << 4
|
||||||
|
}
|
||||||
|
|
||||||
|
return account.network.request(Api.functions.phone.editGroupCallParticipant(flags: flags, call: .inputGroupCall(id: id, accessHash: accessHash), participant: inputPeer, muted: nil, volume: nil, raiseHand: nil, videoStopped: videoMuted, videoPaused: videoPaused, presentationPaused: nil))
|
||||||
|> map(Optional.init)
|
|> map(Optional.init)
|
||||||
|> `catch` { _ -> Signal<Api.Updates?, NoError> in
|
|> `catch` { _ -> Signal<Api.Updates?, NoError> in
|
||||||
return .single(nil)
|
return .single(nil)
|
||||||
@ -2148,29 +2138,17 @@ extension GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate {
|
|||||||
participationStatusChange = .none
|
participationStatusChange = .none
|
||||||
}
|
}
|
||||||
|
|
||||||
var videoJsonDescription: String? = nil
|
var videoDescription = video.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||||
var presentationJsonDescription: String? = nil
|
var presentationDescription = presentation.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||||
if let video = video {
|
|
||||||
switch video {
|
|
||||||
case let .dataJSON(data):
|
|
||||||
videoJsonDescription = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let presentation = presentation {
|
|
||||||
switch presentation {
|
|
||||||
case let .dataJSON(data):
|
|
||||||
presentationJsonDescription = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if muteState?.canUnmute == false {
|
if muteState?.canUnmute == false {
|
||||||
videoJsonDescription = nil
|
videoDescription = nil
|
||||||
presentationJsonDescription = nil
|
presentationDescription = nil
|
||||||
}
|
}
|
||||||
self.init(
|
self.init(
|
||||||
peerId: peerId,
|
peerId: peerId,
|
||||||
ssrc: ssrc,
|
ssrc: ssrc,
|
||||||
videoJsonDescription: videoJsonDescription,
|
videoDescription: videoDescription,
|
||||||
presentationJsonDescription: presentationJsonDescription,
|
presentationDescription: presentationDescription,
|
||||||
joinTimestamp: date,
|
joinTimestamp: date,
|
||||||
activityTimestamp: activeDate.flatMap(Double.init),
|
activityTimestamp: activeDate.flatMap(Double.init),
|
||||||
raiseHandRating: raiseHandRating,
|
raiseHandRating: raiseHandRating,
|
||||||
@ -2214,29 +2192,17 @@ extension GroupCallParticipantsContext.Update.StateUpdate {
|
|||||||
participationStatusChange = .none
|
participationStatusChange = .none
|
||||||
}
|
}
|
||||||
|
|
||||||
var videoJsonDescription: String? = nil
|
var videoDescription = video.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||||
var presentationJsonDescription: String? = nil
|
var presentationDescription = presentation.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||||
if let video = video {
|
|
||||||
switch video {
|
|
||||||
case let .dataJSON(data):
|
|
||||||
videoJsonDescription = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let presentation = presentation {
|
|
||||||
switch presentation {
|
|
||||||
case let .dataJSON(data):
|
|
||||||
presentationJsonDescription = data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if muteState?.canUnmute == false {
|
if muteState?.canUnmute == false {
|
||||||
videoJsonDescription = nil
|
videoDescription = nil
|
||||||
presentationJsonDescription = nil
|
presentationDescription = nil
|
||||||
}
|
}
|
||||||
participantUpdates.append(GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate(
|
participantUpdates.append(GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate(
|
||||||
peerId: peerId,
|
peerId: peerId,
|
||||||
ssrc: ssrc,
|
ssrc: ssrc,
|
||||||
videoJsonDescription: videoJsonDescription,
|
videoDescription: videoDescription,
|
||||||
presentationJsonDescription: presentationJsonDescription,
|
presentationDescription: presentationDescription,
|
||||||
joinTimestamp: date,
|
joinTimestamp: date,
|
||||||
activityTimestamp: activeDate.flatMap(Double.init),
|
activityTimestamp: activeDate.flatMap(Double.init),
|
||||||
raiseHandRating: raiseHandRating,
|
raiseHandRating: raiseHandRating,
|
||||||
@ -2584,3 +2550,20 @@ public func getAudioBroadcastPart(dataSource: AudioBroadcastDataSource, callId:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private extension GroupCallParticipantsContext.Participant.VideoDescription {
|
||||||
|
init(_ apiVideo: Api.GroupCallParticipantVideo) {
|
||||||
|
switch apiVideo {
|
||||||
|
case let .groupCallParticipantVideo(flags, endpoint, sourceGroups):
|
||||||
|
var parsedSsrcGroups: [SsrcGroup] = []
|
||||||
|
for group in sourceGroups {
|
||||||
|
switch group {
|
||||||
|
case let .groupCallParticipantVideoSourceGroup(semantics, sources):
|
||||||
|
parsedSsrcGroups.append(SsrcGroup(semantics: semantics, ssrcs: sources.map(UInt32.init(bitPattern:))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let isPaused = (flags & (1 << 0)) != 0
|
||||||
|
self.init(endpointId: endpoint, ssrcGroups: parsedSsrcGroups, isPaused: isPaused)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -50,7 +50,7 @@ func updateSecretChat(encryptionProvider: EncryptionProvider, accountPeerId: Pee
|
|||||||
|
|
||||||
var updatedState = currentState
|
var updatedState = currentState
|
||||||
updatedState = updatedState.withUpdatedKeychain(SecretChatKeychain(keys: [SecretChatKey(fingerprint: keyFingerprint, key: MemoryBuffer(data: key), validity: .indefinite, useCount: 0)]))
|
updatedState = updatedState.withUpdatedKeychain(SecretChatKeychain(keys: [SecretChatKey(fingerprint: keyFingerprint, key: MemoryBuffer(data: key), validity: .indefinite, useCount: 0)]))
|
||||||
updatedState = updatedState.withUpdatedEmbeddedState(.sequenceBasedLayer(SecretChatSequenceBasedLayerState(layerNegotiationState: SecretChatLayerNegotiationState(activeLayer: .layer46, locallyRequestedLayer: nil, remotelyRequestedLayer: nil), rekeyState: nil, baseIncomingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: currentPeer.id, tag: OperationLogTags.SecretIncomingDecrypted), baseOutgoingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: currentPeer.id, tag: OperationLogTags.SecretOutgoing), topProcessedCanonicalIncomingOperationIndex: nil)))
|
updatedState = updatedState.withUpdatedEmbeddedState(.sequenceBasedLayer(SecretChatSequenceBasedLayerState(layerNegotiationState: SecretChatLayerNegotiationState(activeLayer: .layer73, locallyRequestedLayer: nil, remotelyRequestedLayer: nil), rekeyState: nil, baseIncomingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: currentPeer.id, tag: OperationLogTags.SecretIncomingDecrypted), baseOutgoingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: currentPeer.id, tag: OperationLogTags.SecretOutgoing), topProcessedCanonicalIncomingOperationIndex: nil)))
|
||||||
|
|
||||||
updatedState = updatedState.withUpdatedKeyFingerprint(SecretChatKeyFingerprint(sha1: SecretChatKeySha1Fingerprint(digest: sha1Digest(key)), sha256: SecretChatKeySha256Fingerprint(digest: sha256Digest(key))))
|
updatedState = updatedState.withUpdatedKeyFingerprint(SecretChatKeyFingerprint(sha1: SecretChatKeySha1Fingerprint(digest: sha1Digest(key)), sha256: SecretChatKeySha256Fingerprint(digest: sha256Digest(key))))
|
||||||
|
|
||||||
|
@ -63,6 +63,21 @@ func fetchResource(account: Account, resource: MediaResource, intervals: Signal<
|
|||||||
} else if let httpReference = resource as? HttpReferenceMediaResource {
|
} else if let httpReference = resource as? HttpReferenceMediaResource {
|
||||||
return .single(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: false))
|
return .single(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: false))
|
||||||
|> then(fetchHttpResource(url: httpReference.url))
|
|> then(fetchHttpResource(url: httpReference.url))
|
||||||
|
} else if let wallpaperResource = resource as? WallpaperDataResource {
|
||||||
|
return getWallpaper(network: account.network, slug: wallpaperResource.slug)
|
||||||
|
|> mapError { _ -> MediaResourceDataFetchError in
|
||||||
|
return .generic
|
||||||
|
}
|
||||||
|
|> mapToSignal { wallpaper -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> in
|
||||||
|
guard case let .file(file) = wallpaper else {
|
||||||
|
return .fail(.generic)
|
||||||
|
}
|
||||||
|
guard let cloudResource = file.file.resource as? TelegramMultipartFetchableResource else {
|
||||||
|
return .fail(.generic)
|
||||||
|
}
|
||||||
|
return .single(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: false))
|
||||||
|
|> then(fetchCloudMediaLocation(account: account, resource: cloudResource, datacenterId: cloudResource.datacenterId, size: resource.size == 0 ? nil : resource.size, intervals: intervals, parameters: MediaResourceFetchParameters(tag: nil, info: TelegramCloudMediaResourceFetchInfo(reference: .standalone(resource: file.file.resource), preferBackgroundReferenceRevalidation: false, continueInBackground: false), isRandomAccessAllowed: true)))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -245,7 +245,7 @@ private func initialHandshakeAccept(postbox: Postbox, network: Network, peerId:
|
|||||||
if let state = transaction.getPeerChatState(peerId) as? SecretChatState {
|
if let state = transaction.getPeerChatState(peerId) as? SecretChatState {
|
||||||
var updatedState = state
|
var updatedState = state
|
||||||
updatedState = updatedState.withUpdatedKeychain(SecretChatKeychain(keys: [SecretChatKey(fingerprint: keyFingerprint, key: MemoryBuffer(data: key), validity: .indefinite, useCount: 0)]))
|
updatedState = updatedState.withUpdatedKeychain(SecretChatKeychain(keys: [SecretChatKey(fingerprint: keyFingerprint, key: MemoryBuffer(data: key), validity: .indefinite, useCount: 0)]))
|
||||||
updatedState = updatedState.withUpdatedEmbeddedState(.sequenceBasedLayer(SecretChatSequenceBasedLayerState(layerNegotiationState: SecretChatLayerNegotiationState(activeLayer: .layer46, locallyRequestedLayer: nil, remotelyRequestedLayer: nil), rekeyState: nil, baseIncomingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: peerId, tag: OperationLogTags.SecretIncomingDecrypted), baseOutgoingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: peerId, tag: OperationLogTags.SecretOutgoing), topProcessedCanonicalIncomingOperationIndex: nil)))
|
updatedState = updatedState.withUpdatedEmbeddedState(.sequenceBasedLayer(SecretChatSequenceBasedLayerState(layerNegotiationState: SecretChatLayerNegotiationState(activeLayer: .layer73, locallyRequestedLayer: nil, remotelyRequestedLayer: nil), rekeyState: nil, baseIncomingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: peerId, tag: OperationLogTags.SecretIncomingDecrypted), baseOutgoingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: peerId, tag: OperationLogTags.SecretOutgoing), topProcessedCanonicalIncomingOperationIndex: nil)))
|
||||||
updatedState = updatedState.withUpdatedKeyFingerprint(SecretChatKeyFingerprint(sha1: SecretChatKeySha1Fingerprint(digest: sha1Digest(key)), sha256: SecretChatKeySha256Fingerprint(digest: sha256Digest(key))))
|
updatedState = updatedState.withUpdatedKeyFingerprint(SecretChatKeyFingerprint(sha1: SecretChatKeySha1Fingerprint(digest: sha1Digest(key)), sha256: SecretChatKeySha256Fingerprint(digest: sha256Digest(key))))
|
||||||
|
|
||||||
var layer: SecretChatLayer?
|
var layer: SecretChatLayer?
|
||||||
|