Combo update
@ -164,9 +164,14 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String {
|
||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||
return
|
||||
}
|
||||
|
||||
var orientation = CGImagePropertyOrientation.up
|
||||
if #available(iOS 11.0, *) {
|
||||
if let orientationAttachment = CMGetAttachment(sampleBuffer, key: RPVideoSampleOrientationKey as CFString, attachmentModeOut: nil) as? NSNumber {
|
||||
orientation = CGImagePropertyOrientation(rawValue: orientationAttachment.uint32Value) ?? .up
|
||||
}
|
||||
}
|
||||
if let data = serializePixelBuffer(buffer: pixelBuffer) {
|
||||
self.screencastBufferClientContext?.setCurrentFrame(data: data)
|
||||
self.screencastBufferClientContext?.setCurrentFrame(data: data, orientation: orientation)
|
||||
}
|
||||
|
||||
//self.videoCapturer?.injectSampleBuffer(sampleBuffer)
|
||||
|
@ -72,4 +72,44 @@
|
||||
<key>UIPrerenderedIcon</key>
|
||||
<true/>
|
||||
</dict>
|
||||
<key>New1</key>
|
||||
<dict>
|
||||
<key>CFBundleIconFiles</key>
|
||||
<array>
|
||||
<string>New1_20x20</string>
|
||||
<string>New1_29x29</string>
|
||||
<string>New1_40x40</string>
|
||||
<string>New1_58x58</string>
|
||||
<string>New1_60x60</string>
|
||||
<string>New1_76x76</string>
|
||||
<string>New1_80x80</string>
|
||||
<string>New1_87x87</string>
|
||||
<string>New1_120x120</string>
|
||||
<string>New1_152x152</string>
|
||||
<string>New1_167x167</string>
|
||||
<string>New1_180x180</string>
|
||||
</array>
|
||||
<key>UIPrerenderedIcon</key>
|
||||
<true/>
|
||||
</dict>
|
||||
<key>New2</key>
|
||||
<dict>
|
||||
<key>CFBundleIconFiles</key>
|
||||
<array>
|
||||
<string>New2_20x20</string>
|
||||
<string>New2_29x29</string>
|
||||
<string>New2_40x40</string>
|
||||
<string>New2_58x58</string>
|
||||
<string>New2_60x60</string>
|
||||
<string>New2_76x76</string>
|
||||
<string>New2_80x80</string>
|
||||
<string>New2_87x87</string>
|
||||
<string>New2_120x120</string>
|
||||
<string>New2_152x152</string>
|
||||
<string>New2_167x167</string>
|
||||
<string>New2_180x180</string>
|
||||
</array>
|
||||
<key>UIPrerenderedIcon</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</dict>
|
@ -66,4 +66,44 @@
|
||||
<key>UIPrerenderedIcon</key>
|
||||
<true/>
|
||||
</dict>
|
||||
<key>New1</key>
|
||||
<dict>
|
||||
<key>CFBundleIconFiles</key>
|
||||
<array>
|
||||
<string>New1_20x20</string>
|
||||
<string>New1_29x29</string>
|
||||
<string>New1_40x40</string>
|
||||
<string>New1_58x58</string>
|
||||
<string>New1_60x60</string>
|
||||
<string>New1_76x76</string>
|
||||
<string>New1_80x80</string>
|
||||
<string>New1_87x87</string>
|
||||
<string>New1_120x120</string>
|
||||
<string>New1_152x152</string>
|
||||
<string>New1_167x167</string>
|
||||
<string>New1_180x180</string>
|
||||
</array>
|
||||
<key>UIPrerenderedIcon</key>
|
||||
<true/>
|
||||
</dict>
|
||||
<key>New2</key>
|
||||
<dict>
|
||||
<key>CFBundleIconFiles</key>
|
||||
<array>
|
||||
<string>New2_20x20</string>
|
||||
<string>New2_29x29</string>
|
||||
<string>New2_40x40</string>
|
||||
<string>New2_58x58</string>
|
||||
<string>New2_60x60</string>
|
||||
<string>New2_76x76</string>
|
||||
<string>New2_80x80</string>
|
||||
<string>New2_87x87</string>
|
||||
<string>New2_120x120</string>
|
||||
<string>New2_152x152</string>
|
||||
<string>New2_167x167</string>
|
||||
<string>New2_180x180</string>
|
||||
</array>
|
||||
<key>UIPrerenderedIcon</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</dict>
|
BIN
Telegram/Telegram-iOS/New1_120x120.png
Normal file
After Width: | Height: | Size: 8.5 KiB |
BIN
Telegram/Telegram-iOS/New1_152x152.png
Normal file
After Width: | Height: | Size: 12 KiB |
BIN
Telegram/Telegram-iOS/New1_167x167.png
Normal file
After Width: | Height: | Size: 14 KiB |
BIN
Telegram/Telegram-iOS/New1_180x180.png
Normal file
After Width: | Height: | Size: 16 KiB |
BIN
Telegram/Telegram-iOS/New1_20x20.png
Normal file
After Width: | Height: | Size: 889 B |
BIN
Telegram/Telegram-iOS/New1_29x29.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
Telegram/Telegram-iOS/New1_40x40.png
Normal file
After Width: | Height: | Size: 2.0 KiB |
BIN
Telegram/Telegram-iOS/New1_58x58.png
Normal file
After Width: | Height: | Size: 3.2 KiB |
BIN
Telegram/Telegram-iOS/New1_60x60.png
Normal file
After Width: | Height: | Size: 3.3 KiB |
BIN
Telegram/Telegram-iOS/New1_76x76.png
Normal file
After Width: | Height: | Size: 4.6 KiB |
BIN
Telegram/Telegram-iOS/New1_80x80.png
Normal file
After Width: | Height: | Size: 4.9 KiB |
BIN
Telegram/Telegram-iOS/New1_87x87.png
Normal file
After Width: | Height: | Size: 5.5 KiB |
BIN
Telegram/Telegram-iOS/New2_120x120.png
Normal file
After Width: | Height: | Size: 8.9 KiB |
BIN
Telegram/Telegram-iOS/New2_152x152.png
Normal file
After Width: | Height: | Size: 13 KiB |
BIN
Telegram/Telegram-iOS/New2_167x167.png
Normal file
After Width: | Height: | Size: 15 KiB |
BIN
Telegram/Telegram-iOS/New2_180x180.png
Normal file
After Width: | Height: | Size: 17 KiB |
BIN
Telegram/Telegram-iOS/New2_20x20.png
Normal file
After Width: | Height: | Size: 917 B |
BIN
Telegram/Telegram-iOS/New2_29x29.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
Telegram/Telegram-iOS/New2_40x40.png
Normal file
After Width: | Height: | Size: 2.0 KiB |
BIN
Telegram/Telegram-iOS/New2_58x58.png
Normal file
After Width: | Height: | Size: 3.3 KiB |
BIN
Telegram/Telegram-iOS/New2_60x60.png
Normal file
After Width: | Height: | Size: 3.5 KiB |
BIN
Telegram/Telegram-iOS/New2_76x76.png
Normal file
After Width: | Height: | Size: 4.7 KiB |
BIN
Telegram/Telegram-iOS/New2_80x80.png
Normal file
After Width: | Height: | Size: 5.1 KiB |
BIN
Telegram/Telegram-iOS/New2_87x87.png
Normal file
After Width: | Height: | Size: 5.8 KiB |
@ -4428,6 +4428,8 @@ Sorry for the inconvenience.";
|
||||
"Appearance.AppIconClassicX" = "Classic X";
|
||||
"Appearance.AppIconFilled" = "Filled";
|
||||
"Appearance.AppIconFilledX" = "Filled X";
|
||||
"Appearance.AppIconNew1" = "New 1";
|
||||
"Appearance.AppIconNew2" = "New 2";
|
||||
|
||||
"Appearance.ThemeCarouselClassic" = "Classic";
|
||||
"Appearance.ThemeCarouselDay" = "Day";
|
||||
@ -6479,3 +6481,15 @@ Sorry for the inconvenience.";
|
||||
"VoiceChat.UnmuteSuggestion" = "You are on mute. Tap here to speak.";
|
||||
|
||||
"VoiceChat.ContextAudio" = "Audio";
|
||||
|
||||
"VoiceChat.VideoPaused" = "Video is paused";
|
||||
"VoiceChat.YouAreSharingScreen" = "You are sharing your screen";
|
||||
"VoiceChat.StopScreenSharingShort" = "Stop Sharing";
|
||||
|
||||
"VoiceChat.OpenGroup" = "Open Group";
|
||||
|
||||
"VoiceChat.NoiseSuppression" = "Noise Suppression";
|
||||
"VoiceChat.NoiseSuppressionEnabled" = "Enabled";
|
||||
"VoiceChat.NoiseSuppressionDisabled" = "Disabled";
|
||||
|
||||
"VoiceChat.Unpin" = "Unpin";
|
||||
|
@ -8,4 +8,5 @@ exports_files([
|
||||
"WatchApp.mobileprovision",
|
||||
"WatchExtension.mobileprovision",
|
||||
"Widget.mobileprovision",
|
||||
"BroadcastUpload.mobileprovision",
|
||||
])
|
||||
|
@ -321,57 +321,51 @@ public struct PresentationGroupCallRequestedVideo {
|
||||
case full
|
||||
}
|
||||
|
||||
public struct SsrcGroup {
|
||||
public var semantics: String
|
||||
public var ssrcs: [UInt32]
|
||||
}
|
||||
|
||||
public var audioSsrc: UInt32
|
||||
public var endpointId: String
|
||||
public var videoInformation: String
|
||||
public var quality: Quality
|
||||
public var ssrcGroups: [SsrcGroup]
|
||||
public var minQuality: Quality
|
||||
public var maxQuality: Quality
|
||||
}
|
||||
|
||||
public extension GroupCallParticipantsContext.Participant {
|
||||
var videoEndpointId: String? {
|
||||
if let jsonParams = self.videoJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
||||
if let endpoint = json["endpoint"] as? String {
|
||||
return endpoint
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return self.videoDescription?.endpointId
|
||||
}
|
||||
|
||||
var presentationEndpointId: String? {
|
||||
if let jsonParams = self.presentationJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
||||
if let endpoint = json["endpoint"] as? String {
|
||||
return endpoint
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return self.presentationDescription?.endpointId
|
||||
}
|
||||
}
|
||||
|
||||
public extension GroupCallParticipantsContext.Participant {
|
||||
func requestedVideoChannel(quality: PresentationGroupCallRequestedVideo.Quality) -> PresentationGroupCallRequestedVideo? {
|
||||
func requestedVideoChannel(minQuality: PresentationGroupCallRequestedVideo.Quality, maxQuality: PresentationGroupCallRequestedVideo.Quality) -> PresentationGroupCallRequestedVideo? {
|
||||
guard let audioSsrc = self.ssrc else {
|
||||
return nil
|
||||
}
|
||||
guard let videoInformation = self.videoJsonDescription else {
|
||||
guard let videoDescription = self.videoDescription else {
|
||||
return nil
|
||||
}
|
||||
guard let videoEndpointId = self.videoEndpointId else {
|
||||
return nil
|
||||
}
|
||||
return PresentationGroupCallRequestedVideo(audioSsrc: audioSsrc, endpointId: videoEndpointId, videoInformation: videoInformation, quality: quality)
|
||||
return PresentationGroupCallRequestedVideo(audioSsrc: audioSsrc, endpointId: videoDescription.endpointId, ssrcGroups: videoDescription.ssrcGroups.map { group in
|
||||
PresentationGroupCallRequestedVideo.SsrcGroup(semantics: group.semantics, ssrcs: group.ssrcs)
|
||||
}, minQuality: minQuality, maxQuality: maxQuality)
|
||||
}
|
||||
|
||||
func requestedPresentationVideoChannel(quality: PresentationGroupCallRequestedVideo.Quality) -> PresentationGroupCallRequestedVideo? {
|
||||
func requestedPresentationVideoChannel(minQuality: PresentationGroupCallRequestedVideo.Quality, maxQuality: PresentationGroupCallRequestedVideo.Quality) -> PresentationGroupCallRequestedVideo? {
|
||||
guard let audioSsrc = self.ssrc else {
|
||||
return nil
|
||||
}
|
||||
guard let videoInformation = self.presentationJsonDescription else {
|
||||
guard let presentationDescription = self.presentationDescription else {
|
||||
return nil
|
||||
}
|
||||
guard let presentationEndpointId = self.presentationEndpointId else {
|
||||
return nil
|
||||
}
|
||||
return PresentationGroupCallRequestedVideo(audioSsrc: audioSsrc, endpointId: presentationEndpointId, videoInformation: videoInformation, quality: quality)
|
||||
return PresentationGroupCallRequestedVideo(audioSsrc: audioSsrc, endpointId: presentationDescription.endpointId, ssrcGroups: presentationDescription.ssrcGroups.map { group in
|
||||
PresentationGroupCallRequestedVideo.SsrcGroup(semantics: group.semantics, ssrcs: group.ssrcs)
|
||||
}, minQuality: minQuality, maxQuality: maxQuality)
|
||||
}
|
||||
}
|
||||
|
||||
@ -438,7 +432,7 @@ public protocol PresentationGroupCall: class {
|
||||
var inviteLinks: Signal<GroupCallInviteLinks?, NoError> { get }
|
||||
|
||||
func makeIncomingVideoView(endpointId: String, requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void)
|
||||
func makeOutgoingVideoView(completion: @escaping (PresentationCallVideoView?) -> Void)
|
||||
func makeOutgoingVideoView(requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void)
|
||||
|
||||
func loadMoreMembers(token: String)
|
||||
}
|
||||
|
@ -192,11 +192,13 @@ final class BlobView: UIView {
|
||||
|
||||
var level: CGFloat = 0 {
|
||||
didSet {
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
let lv = self.minScale + (self.maxScale - self.minScale) * self.level
|
||||
self.shapeLayer.transform = CATransform3DMakeScale(lv, lv, 1)
|
||||
CATransaction.commit()
|
||||
if abs(self.level - oldValue) > 0.01 {
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
let lv = self.minScale + (self.maxScale - self.minScale) * self.level
|
||||
self.shapeLayer.transform = CATransform3DMakeScale(lv, lv, 1)
|
||||
CATransaction.commit()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -121,7 +121,7 @@ public func peerAvatarCompleteImage(account: Account, peer: Peer, size: CGSize,
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
drawPeerAvatarLetters(context: context, size: CGSize(width: size.width, height: size.height), round: round, font: font, letters: displayLetters, peerId: peerId)
|
||||
if blurred {
|
||||
context.setFillColor(UIColor(rgb: 0x000000, alpha: 0.45).cgColor)
|
||||
context.setFillColor(UIColor(rgb: 0x000000, alpha: 0.5).cgColor)
|
||||
context.fill(CGRect(origin: CGPoint(), size: size))
|
||||
}
|
||||
})?.withRenderingMode(.alwaysOriginal)
|
||||
|
@ -100,7 +100,7 @@ class ChatPlayingActivityContentNode: ChatTitleActivityContentNode {
|
||||
self.addSubnode(self.indicatorNode)
|
||||
}
|
||||
|
||||
override func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
||||
override func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||
let size = self.textNode.updateLayout(constrainedSize)
|
||||
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
||||
let originX: CGFloat
|
||||
|
@ -72,7 +72,7 @@ class ChatRecordingVideoActivityContentNode: ChatTitleActivityContentNode {
|
||||
self.addSubnode(self.indicatorNode)
|
||||
}
|
||||
|
||||
override func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
||||
override func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||
let size = self.textNode.updateLayout(constrainedSize)
|
||||
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
||||
let originX: CGFloat
|
||||
|
@ -90,7 +90,7 @@ class ChatRecordingVoiceActivityContentNode: ChatTitleActivityContentNode {
|
||||
self.addSubnode(self.indicatorNode)
|
||||
}
|
||||
|
||||
override func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
||||
override func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||
let size = self.textNode.updateLayout(constrainedSize)
|
||||
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
||||
let originX: CGFloat
|
||||
@ -99,7 +99,7 @@ class ChatRecordingVoiceActivityContentNode: ChatTitleActivityContentNode {
|
||||
} else {
|
||||
originX = indicatorSize.width
|
||||
}
|
||||
self.textNode.frame = CGRect(origin: CGPoint(x: originX, y: 0.0), size: size)
|
||||
self.textNode.frame = CGRect(origin: CGPoint(x: originX, y: offset), size: size)
|
||||
self.indicatorNode.frame = CGRect(origin: CGPoint(x: self.textNode.frame.minX - indicatorSize.width, y: 0.0), size: indicatorSize)
|
||||
return CGSize(width: size.width + indicatorSize.width, height: size.height)
|
||||
}
|
||||
|
@ -122,13 +122,13 @@ public class ChatTitleActivityContentNode: ASDisplayNode {
|
||||
}
|
||||
}
|
||||
|
||||
public func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
||||
public func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||
let size = self.textNode.updateLayout(constrainedSize)
|
||||
self.textNode.bounds = CGRect(origin: CGPoint(), size: size)
|
||||
if case .center = alignment {
|
||||
self.textNode.position = CGPoint(x: 0.0, y: size.height / 2.0)
|
||||
self.textNode.position = CGPoint(x: 0.0, y: size.height / 2.0 + offset)
|
||||
} else {
|
||||
self.textNode.position = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
|
||||
self.textNode.position = CGPoint(x: size.width / 2.0, y: size.height / 2.0 + offset)
|
||||
}
|
||||
return size
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ public class ChatTitleActivityNode: ASDisplayNode {
|
||||
}
|
||||
}
|
||||
|
||||
public func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
||||
return CGSize(width: 0.0, height: self.contentNode?.updateLayout(constrainedSize, alignment: alignment).height ?? 0.0)
|
||||
public func updateLayout(_ constrainedSize: CGSize, offset: CGFloat = 0.0, alignment: NSTextAlignment) -> CGSize {
|
||||
return CGSize(width: 0.0, height: self.contentNode?.updateLayout(constrainedSize, offset: offset, alignment: alignment).height ?? 0.0)
|
||||
}
|
||||
}
|
||||
|
@ -108,7 +108,7 @@ class ChatTypingActivityContentNode: ChatTitleActivityContentNode {
|
||||
self.addSubnode(self.indicatorNode)
|
||||
}
|
||||
|
||||
override func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
||||
override func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
||||
let size = self.textNode.updateLayout(CGSize(width: constrainedSize.width - indicatorSize.width, height: constrainedSize.height))
|
||||
var originX: CGFloat
|
||||
|
@ -80,7 +80,7 @@ class ChatUploadingActivityContentNode: ChatTitleActivityContentNode {
|
||||
self.addSubnode(self.indicatorNode)
|
||||
}
|
||||
|
||||
override func updateLayout(_ constrainedSize: CGSize, alignment: NSTextAlignment) -> CGSize {
|
||||
override func updateLayout(_ constrainedSize: CGSize, offset: CGFloat, alignment: NSTextAlignment) -> CGSize {
|
||||
let size = self.textNode.updateLayout(constrainedSize)
|
||||
let indicatorSize = CGSize(width: 24.0, height: 16.0)
|
||||
let originX: CGFloat
|
||||
|
@ -497,7 +497,8 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
|
||||
|
||||
if let takenViewInfo = takenViewInfo, let parentSupernode = takenViewInfo.contentContainingNode.supernode {
|
||||
self.contentContainerNode.contentNode = .extracted(node: takenViewInfo.contentContainingNode, keepInPlace: source.keepInPlace)
|
||||
if source.keepInPlace {
|
||||
if source.keepInPlace || takenViewInfo.maskView != nil {
|
||||
self.clippingNode.view.mask = takenViewInfo.maskView
|
||||
self.clippingNode.addSubnode(self.contentContainerNode)
|
||||
} else {
|
||||
self.scrollNode.addSubnode(self.contentContainerNode)
|
||||
@ -687,7 +688,9 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
|
||||
|
||||
self.actionsContainerNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: localSourceFrame.center.x - self.actionsContainerNode.position.x, y: localSourceFrame.center.y - self.actionsContainerNode.position.y + actionsOffset)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: actionsDuration, initialVelocity: 0.0, damping: springDamping, additive: true)
|
||||
let contentContainerOffset = CGPoint(x: localContentSourceFrame.center.x - self.contentContainerNode.frame.center.x - contentParentNode.contentRect.minX, y: localContentSourceFrame.center.y - self.contentContainerNode.frame.center.y - contentParentNode.contentRect.minY)
|
||||
self.contentContainerNode.layer.animateSpring(from: NSValue(cgPoint: contentContainerOffset), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: contentDuration, initialVelocity: 0.0, damping: springDamping, additive: true)
|
||||
self.contentContainerNode.layer.animateSpring(from: NSValue(cgPoint: contentContainerOffset), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: contentDuration, initialVelocity: 0.0, damping: springDamping, additive: true, completion: { [weak self] _ in
|
||||
self?.clippingNode.view.mask = nil
|
||||
})
|
||||
contentParentNode.applyAbsoluteOffsetSpring?(-contentContainerOffset.y, springDuration, springDamping)
|
||||
}
|
||||
|
||||
@ -849,6 +852,7 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
|
||||
updatedContentAreaInScreenSpace.origin.x = 0.0
|
||||
updatedContentAreaInScreenSpace.size.width = self.bounds.width
|
||||
|
||||
self.clippingNode.view.mask = putBackInfo.maskView
|
||||
self.clippingNode.layer.animateFrame(from: self.clippingNode.frame, to: updatedContentAreaInScreenSpace, duration: transitionDuration * animationDurationFactor, timingFunction: transitionCurve.timingFunction, removeOnCompletion: false)
|
||||
self.clippingNode.layer.animateBoundsOriginYAdditive(from: 0.0, to: updatedContentAreaInScreenSpace.minY, duration: transitionDuration * animationDurationFactor, timingFunction: transitionCurve.timingFunction, removeOnCompletion: false)
|
||||
}
|
||||
@ -1726,18 +1730,22 @@ public protocol ContextReferenceContentSource: class {
|
||||
public final class ContextControllerTakeViewInfo {
|
||||
public let contentContainingNode: ContextExtractedContentContainingNode
|
||||
public let contentAreaInScreenSpace: CGRect
|
||||
public let maskView: UIView?
|
||||
|
||||
public init(contentContainingNode: ContextExtractedContentContainingNode, contentAreaInScreenSpace: CGRect) {
|
||||
public init(contentContainingNode: ContextExtractedContentContainingNode, contentAreaInScreenSpace: CGRect, maskView: UIView? = nil) {
|
||||
self.contentContainingNode = contentContainingNode
|
||||
self.contentAreaInScreenSpace = contentAreaInScreenSpace
|
||||
self.maskView = maskView
|
||||
}
|
||||
}
|
||||
|
||||
public final class ContextControllerPutBackViewInfo {
|
||||
public let contentAreaInScreenSpace: CGRect
|
||||
public let maskView: UIView?
|
||||
|
||||
public init(contentAreaInScreenSpace: CGRect) {
|
||||
public init(contentAreaInScreenSpace: CGRect, maskView: UIView? = nil) {
|
||||
self.contentAreaInScreenSpace = contentAreaInScreenSpace
|
||||
self.maskView = maskView
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -250,27 +250,31 @@ public final class DeviceAccess {
|
||||
}
|
||||
}
|
||||
|
||||
public static func authorizeAccess(to subject: DeviceAccessSubject, registerForNotifications: ((@escaping (Bool) -> Void) -> Void)? = nil, requestSiriAuthorization: ((@escaping (Bool) -> Void) -> Void)? = nil, locationManager: LocationManager? = nil, presentationData: PresentationData? = nil, present: @escaping (ViewController, Any?) -> Void = { _, _ in }, openSettings: @escaping () -> Void = { }, displayNotificationFromBackground: @escaping (String) -> Void = { _ in }, _ completion: @escaping (Bool) -> Void = { _ in }) {
|
||||
public static func authorizeAccess(to subject: DeviceAccessSubject, onlyCheck: Bool = false, registerForNotifications: ((@escaping (Bool) -> Void) -> Void)? = nil, requestSiriAuthorization: ((@escaping (Bool) -> Void) -> Void)? = nil, locationManager: LocationManager? = nil, presentationData: PresentationData? = nil, present: @escaping (ViewController, Any?) -> Void = { _, _ in }, openSettings: @escaping () -> Void = { }, displayNotificationFromBackground: @escaping (String) -> Void = { _ in }, _ completion: @escaping (Bool) -> Void = { _ in }) {
|
||||
switch subject {
|
||||
case let .camera(cameraSubject):
|
||||
let status = PGCamera.cameraAuthorizationStatus()
|
||||
if status == PGCameraAuthorizationStatusNotDetermined {
|
||||
AVCaptureDevice.requestAccess(for: AVMediaType.video) { response in
|
||||
Queue.mainQueue().async {
|
||||
completion(response)
|
||||
if !response, let presentationData = presentationData {
|
||||
let text: String
|
||||
switch cameraSubject {
|
||||
case .video:
|
||||
text = presentationData.strings.AccessDenied_Camera
|
||||
case .videoCall:
|
||||
text = presentationData.strings.AccessDenied_VideoCallCamera
|
||||
if !onlyCheck {
|
||||
AVCaptureDevice.requestAccess(for: AVMediaType.video) { response in
|
||||
Queue.mainQueue().async {
|
||||
completion(response)
|
||||
if !response, let presentationData = presentationData {
|
||||
let text: String
|
||||
switch cameraSubject {
|
||||
case .video:
|
||||
text = presentationData.strings.AccessDenied_Camera
|
||||
case .videoCall:
|
||||
text = presentationData.strings.AccessDenied_VideoCallCamera
|
||||
}
|
||||
present(standardTextAlertController(theme: AlertControllerTheme(presentationData: presentationData), title: presentationData.strings.AccessDenied_Title, text: text, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_NotNow, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.AccessDenied_Settings, action: {
|
||||
openSettings()
|
||||
})]), nil)
|
||||
}
|
||||
present(standardTextAlertController(theme: AlertControllerTheme(presentationData: presentationData), title: presentationData.strings.AccessDenied_Title, text: text, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_NotNow, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.AccessDenied_Settings, action: {
|
||||
openSettings()
|
||||
})]), nil)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
completion(true)
|
||||
}
|
||||
} else if status == PGCameraAuthorizationStatusRestricted || status == PGCameraAuthorizationStatusDenied, let presentationData = presentationData {
|
||||
let text: String
|
||||
|
@ -333,8 +333,8 @@ public extension ContainedViewLayoutTransition {
|
||||
}
|
||||
}
|
||||
|
||||
func updatePosition(layer: CALayer, position: CGPoint, completion: ((Bool) -> Void)? = nil) {
|
||||
if layer.position.equalTo(position) {
|
||||
func updatePosition(layer: CALayer, position: CGPoint, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
|
||||
if layer.position.equalTo(position) && !force {
|
||||
completion?(true)
|
||||
} else {
|
||||
switch self {
|
||||
@ -545,6 +545,15 @@ public extension ContainedViewLayoutTransition {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func animateContentsRectPositionAdditive(layer: CALayer, offset: CGPoint, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
|
||||
switch self {
|
||||
case .immediate:
|
||||
completion?(true)
|
||||
case let .animated(duration, curve):
|
||||
layer.animate(from: NSValue(cgPoint: offset), to: NSValue(cgPoint: CGPoint()), keyPath: "contentsRect.origin", timingFunction: curve.timingFunction, duration: duration, delay: 0.0, mediaTimingFunction: curve.mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: true, completion: completion)
|
||||
}
|
||||
}
|
||||
|
||||
func updateFrame(view: UIView, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, delay: Double = 0.0, completion: ((Bool) -> Void)? = nil) {
|
||||
if frame.origin.x.isNaN {
|
||||
@ -1284,9 +1293,6 @@ public struct CombinedTransition {
|
||||
completeKey(.positionY, result)
|
||||
})
|
||||
|
||||
//self.horizontal.animateHorizontalOffsetAdditive(layer: layer, offset: (fromFrame.width - toFrame.width) / 4.0)
|
||||
//self.vertical.animateOffsetAdditive(layer: layer, offset: (fromFrame.height - toFrame.height) / 2.0)
|
||||
|
||||
self.horizontal.animateWidthAdditive(layer: layer, value: fromFrame.width - toFrame.width, completion: { result in
|
||||
completeKey(.sizeWidth, result)
|
||||
})
|
||||
@ -1301,6 +1307,12 @@ public struct CombinedTransition {
|
||||
self.animateFrame(layer: layer, from: fromFrame, completion: completion)
|
||||
}
|
||||
|
||||
public func updateFrame(node: ASDisplayNode, frame: CGRect, completion: ((Bool) -> Void)? = nil) {
|
||||
let fromFrame = node.frame
|
||||
node.frame = frame
|
||||
self.animateFrame(layer: node.layer, from: fromFrame, completion: completion)
|
||||
}
|
||||
|
||||
public func updatePosition(layer: CALayer, position: CGPoint, completion: ((Bool) -> Void)? = nil) {
|
||||
let fromPosition = layer.position
|
||||
layer.position = position
|
||||
|
@ -58,7 +58,13 @@ public struct Font {
|
||||
|
||||
public static func with(size: CGFloat, design: Design = .regular, weight: Weight = .regular, traits: Traits = []) -> UIFont {
|
||||
if #available(iOS 13.0, *) {
|
||||
let descriptor = UIFont.systemFont(ofSize: size).fontDescriptor
|
||||
let descriptor: UIFontDescriptor
|
||||
if #available(iOS 14.0, *) {
|
||||
descriptor = UIFont.systemFont(ofSize: size).fontDescriptor
|
||||
} else {
|
||||
descriptor = UIFont.systemFont(ofSize: size, weight: weight.weight).fontDescriptor
|
||||
}
|
||||
|
||||
var symbolicTraits = descriptor.symbolicTraits
|
||||
if traits.contains(.italic) {
|
||||
symbolicTraits.insert(.traitItalic)
|
||||
@ -83,10 +89,12 @@ public struct Font {
|
||||
default:
|
||||
updatedDescriptor = updatedDescriptor?.withDesign(.default)
|
||||
}
|
||||
if weight != .regular {
|
||||
updatedDescriptor = updatedDescriptor?.addingAttributes([
|
||||
UIFontDescriptor.AttributeName.traits: [UIFontDescriptor.TraitKey.weight: weight.weight]
|
||||
])
|
||||
if #available(iOS 14.0, *) {
|
||||
if weight != .regular {
|
||||
updatedDescriptor = updatedDescriptor?.addingAttributes([
|
||||
UIFontDescriptor.AttributeName.traits: [UIFontDescriptor.TraitKey.weight: weight.weight]
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
if let updatedDescriptor = updatedDescriptor {
|
||||
|
@ -157,7 +157,7 @@ private func generateRectsImage(color: UIColor, rects: [CGRect], inset: CGFloat,
|
||||
|
||||
public final class LinkHighlightingNode: ASDisplayNode {
|
||||
private var rects: [CGRect] = []
|
||||
private let imageNode: ASImageNode
|
||||
public let imageNode: ASImageNode
|
||||
|
||||
public var innerRadius: CGFloat = 4.0
|
||||
public var outerRadius: CGFloat = 4.0
|
||||
@ -196,7 +196,7 @@ public final class LinkHighlightingNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
private func updateImage() {
|
||||
if rects.isEmpty {
|
||||
if self.rects.isEmpty {
|
||||
self.imageNode.image = nil
|
||||
}
|
||||
let (offset, image) = generateRectsImage(color: self.color, rects: self.rects, inset: self.inset, outerRadius: self.outerRadius, innerRadius: self.innerRadius)
|
||||
@ -206,6 +206,19 @@ public final class LinkHighlightingNode: ASDisplayNode {
|
||||
self.imageNode.frame = CGRect(origin: offset, size: image.size)
|
||||
}
|
||||
}
|
||||
|
||||
public static func generateImage(color: UIColor, inset: CGFloat, innerRadius: CGFloat, outerRadius: CGFloat, rects: [CGRect]) -> (CGPoint, UIImage)? {
|
||||
if rects.isEmpty {
|
||||
return nil
|
||||
}
|
||||
let (offset, image) = generateRectsImage(color: color, rects: rects, inset: inset, outerRadius: outerRadius, innerRadius: innerRadius)
|
||||
|
||||
if let image = image {
|
||||
return (offset, image)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public func asyncLayout() -> (UIColor, [CGRect], CGFloat, CGFloat, CGFloat) -> () -> Void {
|
||||
let currentRects = self.rects
|
||||
|
@ -117,6 +117,8 @@ enum NavigationPreviousAction: Equatable {
|
||||
}
|
||||
}
|
||||
|
||||
private var sharedIsReduceTransparencyEnabled = UIAccessibility.isReduceTransparencyEnabled
|
||||
|
||||
public final class NavigationBackgroundNode: ASDisplayNode {
|
||||
private var _color: UIColor
|
||||
public var color: UIColor {
|
||||
@ -148,14 +150,9 @@ public final class NavigationBackgroundNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
private func updateBackgroundBlur(forceKeepBlur: Bool) {
|
||||
if self.enableBlur && ((self.color.alpha > 0.1 && self.color.alpha < 0.95) || forceKeepBlur) {
|
||||
if self.enableBlur && !sharedIsReduceTransparencyEnabled && ((self.color.alpha > .ulpOfOne && self.color.alpha < 0.95) || forceKeepBlur) {
|
||||
if self.effectView == nil {
|
||||
let effectView: UIVisualEffectView
|
||||
if self.color.lightness > 0.6 {
|
||||
effectView = UIVisualEffectView(effect: UIBlurEffect(style: .light))
|
||||
} else {
|
||||
effectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
|
||||
}
|
||||
let effectView = UIVisualEffectView(effect: UIBlurEffect(style: .light))
|
||||
|
||||
for subview in effectView.subviews {
|
||||
if subview.description.contains("VisualEffectSubview") {
|
||||
@ -164,6 +161,8 @@ public final class NavigationBackgroundNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
if let sublayer = effectView.layer.sublayers?[0], let filters = sublayer.filters {
|
||||
sublayer.backgroundColor = nil
|
||||
sublayer.isOpaque = false
|
||||
let allowedKeys: [String] = [
|
||||
"colorSaturate",
|
||||
"gaussianBlur"
|
||||
@ -176,11 +175,6 @@ public final class NavigationBackgroundNode: ASDisplayNode {
|
||||
if !allowedKeys.contains(filterName) {
|
||||
return false
|
||||
}
|
||||
/*if filterName == "colorSaturate" {
|
||||
filter.setValue(2.8 as NSNumber, forKey: "inputAmount")
|
||||
} else if filterName == "gaussianBlur" {
|
||||
filter.setValue(5.0 as NSNumber, forKey: "inputRadius")
|
||||
}*/
|
||||
return true
|
||||
}
|
||||
}
|
||||
@ -205,7 +199,11 @@ public final class NavigationBackgroundNode: ASDisplayNode {
|
||||
}
|
||||
self._color = color
|
||||
|
||||
transition.updateBackgroundColor(node: self.backgroundNode, color: self.color)
|
||||
if sharedIsReduceTransparencyEnabled {
|
||||
transition.updateBackgroundColor(node: self.backgroundNode, color: self.color.withAlphaComponent(1.0))
|
||||
} else {
|
||||
transition.updateBackgroundColor(node: self.backgroundNode, color: self.color)
|
||||
}
|
||||
|
||||
self.updateBackgroundBlur(forceKeepBlur: forceKeepBlur)
|
||||
}
|
||||
|
@ -133,7 +133,7 @@ public extension UIColor {
|
||||
}
|
||||
}
|
||||
|
||||
var hsb: (CGFloat, CGFloat, CGFloat) {
|
||||
var hsb: (h: CGFloat, s: CGFloat, b: CGFloat) {
|
||||
var hue: CGFloat = 0.0
|
||||
var saturation: CGFloat = 0.0
|
||||
var brightness: CGFloat = 0.0
|
||||
@ -284,6 +284,27 @@ public extension UIColor {
|
||||
let b = e1.b - e2.b
|
||||
return ((512 + rMean) * r * r) >> 8 + 4 * g * g + ((767 - rMean) * b * b) >> 8
|
||||
}
|
||||
|
||||
static func average(of colors: [UIColor]) -> UIColor {
|
||||
var sr: CGFloat = 0.0
|
||||
var sg: CGFloat = 0.0
|
||||
var sb: CGFloat = 0.0
|
||||
var sa: CGFloat = 0.0
|
||||
|
||||
for color in colors {
|
||||
var r: CGFloat = 0.0
|
||||
var g: CGFloat = 0.0
|
||||
var b: CGFloat = 0.0
|
||||
var a: CGFloat = 0.0
|
||||
color.getRed(&r, green: &g, blue: &b, alpha: &a)
|
||||
sr += r
|
||||
sg += g
|
||||
sb += b
|
||||
sa += a
|
||||
}
|
||||
|
||||
return UIColor(red: sr / CGFloat(colors.count), green: sg / CGFloat(colors.count), blue: sb / CGFloat(colors.count), alpha: sa / CGFloat(colors.count))
|
||||
}
|
||||
}
|
||||
|
||||
public extension CGSize {
|
||||
|
@ -386,6 +386,8 @@ public class GalleryController: ViewController, StandalonePresentableController
|
||||
|
||||
public var centralItemUpdated: ((MessageId) -> Void)?
|
||||
|
||||
private var initialOrientation: UIInterfaceOrientation?
|
||||
|
||||
public init(context: AccountContext, source: GalleryControllerItemSource, invertItemOrder: Bool = false, streamSingleVideo: Bool = false, fromPlayingVideo: Bool = false, landscape: Bool = false, timecode: Double? = nil, synchronousLoad: Bool = false, replaceRootController: @escaping (ViewController, Promise<Bool>?) -> Void, baseNavigationController: NavigationController?, actionInteraction: GalleryControllerActionInteraction? = nil) {
|
||||
self.context = context
|
||||
self.source = source
|
||||
@ -897,6 +899,10 @@ public class GalleryController: ViewController, StandalonePresentableController
|
||||
}
|
||||
|
||||
deinit {
|
||||
if let initialOrientation = self.initialOrientation {
|
||||
self.context.sharedContext.applicationBindings.forceOrientation(initialOrientation)
|
||||
}
|
||||
|
||||
self.accountInUseDisposable.dispose()
|
||||
self.disposable.dispose()
|
||||
self.centralItemAttributesDisposable.dispose()
|
||||
@ -1019,6 +1025,17 @@ public class GalleryController: ViewController, StandalonePresentableController
|
||||
self?.galleryNode.pager.centralItemNode()?.controlsVisibilityUpdated(isVisible: visible)
|
||||
}
|
||||
|
||||
self.galleryNode.updateOrientation = { [weak self] orientation in
|
||||
if let strongSelf = self {
|
||||
if strongSelf.initialOrientation == nil {
|
||||
strongSelf.initialOrientation = orientation == .portrait ? .landscapeRight : .portrait
|
||||
} else if strongSelf.initialOrientation == orientation {
|
||||
strongSelf.initialOrientation = nil
|
||||
}
|
||||
strongSelf.context.sharedContext.applicationBindings.forceOrientation(orientation)
|
||||
}
|
||||
}
|
||||
|
||||
let baseNavigationController = self.baseNavigationController
|
||||
self.galleryNode.baseNavigationController = { [weak baseNavigationController] in
|
||||
return baseNavigationController
|
||||
|
@ -30,6 +30,8 @@ open class GalleryControllerNode: ASDisplayNode, UIScrollViewDelegate, UIGesture
|
||||
public var areControlsHidden = false
|
||||
public var controlsVisibilityChanged: ((Bool) -> Void)?
|
||||
|
||||
public var updateOrientation: ((UIInterfaceOrientation) -> Void)?
|
||||
|
||||
public var isBackgroundExtendedOverNavigationBar = true {
|
||||
didSet {
|
||||
if let (navigationBarHeight, layout) = self.containerLayout {
|
||||
@ -69,6 +71,12 @@ open class GalleryControllerNode: ASDisplayNode, UIScrollViewDelegate, UIGesture
|
||||
}
|
||||
}
|
||||
|
||||
self.pager.updateOrientation = { [weak self] orientation in
|
||||
if let strongSelf = self {
|
||||
strongSelf.updateOrientation?(orientation)
|
||||
}
|
||||
}
|
||||
|
||||
self.pager.dismiss = { [weak self] in
|
||||
if let strongSelf = self {
|
||||
var interfaceAnimationCompleted = false
|
||||
|
@ -22,6 +22,7 @@ open class GalleryItemNode: ASDisplayNode {
|
||||
|
||||
public var toggleControlsVisibility: () -> Void = { }
|
||||
public var updateControlsVisibility: (Bool) -> Void = { _ in }
|
||||
public var updateOrientation: (UIInterfaceOrientation) -> Void = { _ in }
|
||||
public var dismiss: () -> Void = { }
|
||||
public var beginCustomDismiss: () -> Void = { }
|
||||
public var completeCustomDismiss: () -> Void = { }
|
||||
|
@ -107,6 +107,7 @@ public final class GalleryPagerNode: ASDisplayNode, UIScrollViewDelegate, UIGest
|
||||
public var centralItemIndexOffsetUpdated: (([GalleryItem]?, Int, CGFloat)?) -> Void = { _ in }
|
||||
public var toggleControlsVisibility: () -> Void = { }
|
||||
public var updateControlsVisibility: (Bool) -> Void = { _ in }
|
||||
public var updateOrientation: (UIInterfaceOrientation) -> Void = { _ in }
|
||||
public var dismiss: () -> Void = { }
|
||||
public var beginCustomDismiss: () -> Void = { }
|
||||
public var completeCustomDismiss: () -> Void = { }
|
||||
@ -474,6 +475,7 @@ public final class GalleryPagerNode: ASDisplayNode, UIScrollViewDelegate, UIGest
|
||||
let node = self.items[index].node(synchronous: synchronous)
|
||||
node.toggleControlsVisibility = self.toggleControlsVisibility
|
||||
node.updateControlsVisibility = self.updateControlsVisibility
|
||||
node.updateOrientation = self.updateOrientation
|
||||
node.dismiss = self.dismiss
|
||||
node.beginCustomDismiss = self.beginCustomDismiss
|
||||
node.completeCustomDismiss = self.completeCustomDismiss
|
||||
|
@ -188,7 +188,7 @@ private final class UniversalVideoGalleryItemOverlayNode: GalleryOverlayContentN
|
||||
self.addSubnode(self.wrapperNode)
|
||||
self.wrapperNode.addSubnode(self.fullscreenNode)
|
||||
|
||||
self.fullscreenNode.addTarget(self, action: #selector(self.soundButtonPressed), forControlEvents: .touchUpInside)
|
||||
self.fullscreenNode.addTarget(self, action: #selector(self.toggleFullscreenPressed), forControlEvents: .touchUpInside)
|
||||
}
|
||||
|
||||
override func updateLayout(size: CGSize, metrics: LayoutMetrics, leftInset: CGFloat, rightInset: CGFloat, bottomInset: CGFloat, transition: ContainedViewLayoutTransition) {
|
||||
@ -227,7 +227,7 @@ private final class UniversalVideoGalleryItemOverlayNode: GalleryOverlayContentN
|
||||
}
|
||||
}
|
||||
|
||||
@objc func soundButtonPressed() {
|
||||
@objc func toggleFullscreenPressed() {
|
||||
var toLandscape = false
|
||||
if let (size, _, _, _ ,_) = self.validLayout, size.width < size.height {
|
||||
toLandscape = true
|
||||
@ -337,7 +337,7 @@ final class UniversalVideoGalleryItemNode: ZoomableContentGalleryItemNode {
|
||||
|
||||
self.overlayContentNode.action = { [weak self] toLandscape in
|
||||
self?.updateControlsVisibility(!toLandscape)
|
||||
context.sharedContext.applicationBindings.forceOrientation(toLandscape ? .landscapeRight : .portrait)
|
||||
self?.updateOrientation(toLandscape ? .landscapeRight : .portrait)
|
||||
}
|
||||
|
||||
self.scrubberView.seek = { [weak self] timecode in
|
||||
|
@ -3,6 +3,7 @@ import UIKit
|
||||
import Display
|
||||
import AsyncDisplayKit
|
||||
import SwiftSignalKit
|
||||
import Accelerate
|
||||
|
||||
private func shiftArray(array: [CGPoint], offset: Int) -> [CGPoint] {
|
||||
var newArray = array
|
||||
@ -31,7 +32,7 @@ private func interpolatePoints(_ point1: CGPoint, _ point2: CGPoint, at factor:
|
||||
return CGPoint(x: interpolateFloat(point1.x, point2.x, at: factor), y: interpolateFloat(point1.y, point2.y, at: factor))
|
||||
}
|
||||
|
||||
private func generateGradient(size: CGSize, colors: [UIColor], positions: [CGPoint]) -> UIImage {
|
||||
private func generateGradient(size: CGSize, colors: [UIColor], positions: [CGPoint], adjustSaturation: CGFloat = 1.0) -> UIImage {
|
||||
let width = Int(size.width)
|
||||
let height = Int(size.height)
|
||||
|
||||
@ -114,6 +115,43 @@ private func generateGradient(size: CGSize, colors: [UIColor], positions: [CGPoi
|
||||
}
|
||||
}
|
||||
|
||||
if abs(adjustSaturation - 1.0) > .ulpOfOne {
|
||||
var buffer = vImage_Buffer()
|
||||
buffer.data = context.bytes
|
||||
buffer.width = UInt(width)
|
||||
buffer.height = UInt(height)
|
||||
buffer.rowBytes = context.bytesPerRow
|
||||
|
||||
let divisor: Int32 = 0x1000
|
||||
|
||||
let rwgt: CGFloat = 0.3086
|
||||
let gwgt: CGFloat = 0.6094
|
||||
let bwgt: CGFloat = 0.0820
|
||||
|
||||
let a = (1.0 - adjustSaturation) * rwgt + adjustSaturation
|
||||
let b = (1.0 - adjustSaturation) * rwgt
|
||||
let c = (1.0 - adjustSaturation) * rwgt
|
||||
let d = (1.0 - adjustSaturation) * gwgt
|
||||
let e = (1.0 - adjustSaturation) * gwgt + adjustSaturation
|
||||
let f = (1.0 - adjustSaturation) * gwgt
|
||||
let g = (1.0 - adjustSaturation) * bwgt
|
||||
let h = (1.0 - adjustSaturation) * bwgt
|
||||
let i = (1.0 - adjustSaturation) * bwgt + adjustSaturation
|
||||
|
||||
let satMatrix: [CGFloat] = [
|
||||
a, b, c, 0,
|
||||
d, e, f, 0,
|
||||
g, h, i, 0,
|
||||
0, 0, 0, 1
|
||||
]
|
||||
|
||||
var matrix: [Int16] = satMatrix.map { value in
|
||||
return Int16(value * CGFloat(divisor))
|
||||
}
|
||||
|
||||
vImageMatrixMultiply_ARGB8888(&buffer, &buffer, &matrix, divisor, nil, nil, vImage_Flags(kvImageDoNotTile))
|
||||
}
|
||||
|
||||
return context.generateImage()!
|
||||
}
|
||||
|
||||
@ -128,7 +166,7 @@ public final class GradientBackgroundNode: ASDisplayNode {
|
||||
super.init()
|
||||
|
||||
self.index = parentNode.cloneNodes.add(Weak<CloneNode>(self))
|
||||
self.image = parentNode.contentView.image
|
||||
self.image = parentNode.dimmedImage
|
||||
}
|
||||
|
||||
deinit {
|
||||
@ -160,6 +198,19 @@ public final class GradientBackgroundNode: ASDisplayNode {
|
||||
private var validPhase: Int?
|
||||
private var invalidated: Bool = false
|
||||
|
||||
private var dimmedImageParams: (size: CGSize, colors: [UIColor], positions: [CGPoint])?
|
||||
private var _dimmedImage: UIImage?
|
||||
private var dimmedImage: UIImage? {
|
||||
if let current = self._dimmedImage {
|
||||
return current
|
||||
} else if let (size, colors, positions) = self.dimmedImageParams {
|
||||
self._dimmedImage = generateGradient(size: size, colors: colors, positions: positions, adjustSaturation: 1.7)
|
||||
return self._dimmedImage
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
private var validLayout: CGSize?
|
||||
|
||||
private var colors: [UIColor] = [
|
||||
@ -201,7 +252,7 @@ public final class GradientBackgroundNode: ASDisplayNode {
|
||||
deinit {
|
||||
}
|
||||
|
||||
public func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) {
|
||||
public func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition, extendAnimation: Bool = false) {
|
||||
let sizeUpdated = self.validLayout != size
|
||||
self.validLayout = size
|
||||
|
||||
@ -214,54 +265,127 @@ public final class GradientBackgroundNode: ASDisplayNode {
|
||||
self.validPhase = self.phase
|
||||
self.invalidated = false
|
||||
|
||||
let previousPositions = gatherPositions(shiftArray(array: GradientBackgroundNode.basePositions, offset: validPhase % 8))
|
||||
var steps: [[CGPoint]] = []
|
||||
if extendAnimation {
|
||||
let phaseCount = 4
|
||||
var stepPhase = (self.phase + phaseCount) % 8
|
||||
for _ in 0 ... phaseCount {
|
||||
steps.append(gatherPositions(shiftArray(array: GradientBackgroundNode.basePositions, offset: stepPhase)))
|
||||
stepPhase = stepPhase - 1
|
||||
if stepPhase < 0 {
|
||||
stepPhase = 7
|
||||
}
|
||||
}
|
||||
} else {
|
||||
steps.append(gatherPositions(shiftArray(array: GradientBackgroundNode.basePositions, offset: validPhase % 8)))
|
||||
steps.append(positions)
|
||||
}
|
||||
|
||||
if case let .animated(duration, curve) = transition, duration > 0.001 {
|
||||
var images: [UIImage] = []
|
||||
|
||||
let maxFrame = Int(duration * 30)
|
||||
for i in 0 ..< maxFrame {
|
||||
let t = curve.solve(at: CGFloat(i) / CGFloat(maxFrame - 1))
|
||||
var dimmedImages: [UIImage] = []
|
||||
let needDimmedImages = !self.cloneNodes.isEmpty
|
||||
|
||||
let morphedPositions = Array(zip(previousPositions, positions).map { previous, current -> CGPoint in
|
||||
return interpolatePoints(previous, current, at: t)
|
||||
})
|
||||
let stepCount = steps.count - 1
|
||||
|
||||
let fps: Double = extendAnimation ? 60 : 30
|
||||
let maxFrame = Int(duration * fps)
|
||||
let framesPerAnyStep = maxFrame / stepCount
|
||||
|
||||
for frameIndex in 0 ..< maxFrame {
|
||||
let t = curve.solve(at: CGFloat(frameIndex) / CGFloat(maxFrame - 1))
|
||||
let globalStep = Int(t * CGFloat(maxFrame))
|
||||
let stepIndex = min(stepCount - 1, globalStep / framesPerAnyStep)
|
||||
|
||||
let stepFrameIndex = globalStep - stepIndex * framesPerAnyStep
|
||||
let stepFrames: Int
|
||||
if stepIndex == stepCount - 1 {
|
||||
stepFrames = maxFrame - framesPerAnyStep * (stepCount - 1)
|
||||
} else {
|
||||
stepFrames = framesPerAnyStep
|
||||
}
|
||||
let stepT = CGFloat(stepFrameIndex) / CGFloat(stepFrames - 1)
|
||||
|
||||
var morphedPositions: [CGPoint] = []
|
||||
for i in 0 ..< steps[0].count {
|
||||
morphedPositions.append(interpolatePoints(steps[stepIndex][i], steps[stepIndex + 1][i], at: stepT))
|
||||
}
|
||||
|
||||
images.append(generateGradient(size: imageSize, colors: self.colors, positions: morphedPositions))
|
||||
}
|
||||
|
||||
self.contentView.image = images.last
|
||||
let animation = CAKeyframeAnimation(keyPath: "contents")
|
||||
animation.values = images.map { $0.cgImage! }
|
||||
animation.duration = duration * UIView.animationDurationFactor()
|
||||
animation.calculationMode = .linear
|
||||
animation.isRemovedOnCompletion = true
|
||||
self.contentView.layer.removeAnimation(forKey: "contents")
|
||||
self.contentView.layer.add(animation, forKey: "contents")
|
||||
|
||||
for cloneNode in self.cloneNodes {
|
||||
if let value = cloneNode.value {
|
||||
value.image = images.last
|
||||
value.layer.removeAnimation(forKey: "contents")
|
||||
value.layer.add(animation.copy() as! CAAnimation, forKey: "contents")
|
||||
if needDimmedImages {
|
||||
dimmedImages.append(generateGradient(size: imageSize, colors: self.colors, positions: morphedPositions, adjustSaturation: 1.7))
|
||||
}
|
||||
}
|
||||
|
||||
self.dimmedImageParams = (imageSize, self.colors, gatherPositions(shiftArray(array: GradientBackgroundNode.basePositions, offset: self.phase % 8)))
|
||||
|
||||
self.contentView.image = images.last
|
||||
|
||||
let animation = CAKeyframeAnimation(keyPath: "contents")
|
||||
animation.values = images.map { $0.cgImage! }
|
||||
animation.duration = duration * UIView.animationDurationFactor()
|
||||
if extendAnimation {
|
||||
animation.calculationMode = .discrete
|
||||
} else {
|
||||
animation.calculationMode = .linear
|
||||
}
|
||||
animation.isRemovedOnCompletion = true
|
||||
if extendAnimation {
|
||||
animation.fillMode = .backwards
|
||||
animation.beginTime = self.contentView.layer.convertTime(CACurrentMediaTime(), from: nil) + 0.25
|
||||
}
|
||||
|
||||
self.contentView.layer.removeAnimation(forKey: "contents")
|
||||
self.contentView.layer.add(animation, forKey: "contents")
|
||||
|
||||
if !self.cloneNodes.isEmpty {
|
||||
let animation = CAKeyframeAnimation(keyPath: "contents")
|
||||
animation.values = dimmedImages.map { $0.cgImage! }
|
||||
animation.duration = duration * UIView.animationDurationFactor()
|
||||
if extendAnimation {
|
||||
animation.calculationMode = .discrete
|
||||
} else {
|
||||
animation.calculationMode = .linear
|
||||
}
|
||||
animation.isRemovedOnCompletion = true
|
||||
if extendAnimation {
|
||||
animation.fillMode = .backwards
|
||||
animation.beginTime = self.contentView.layer.convertTime(CACurrentMediaTime(), from: nil) + 0.25
|
||||
}
|
||||
|
||||
self._dimmedImage = dimmedImages.last
|
||||
|
||||
for cloneNode in self.cloneNodes {
|
||||
if let value = cloneNode.value {
|
||||
value.image = dimmedImages.last
|
||||
value.layer.removeAnimation(forKey: "contents")
|
||||
value.layer.add(animation, forKey: "contents")
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let image = generateGradient(size: imageSize, colors: colors, positions: positions)
|
||||
let image = generateGradient(size: imageSize, colors: self.colors, positions: positions)
|
||||
self.contentView.image = image
|
||||
|
||||
let dimmedImage = generateGradient(size: imageSize, colors: self.colors, positions: positions, adjustSaturation: 1.7)
|
||||
self._dimmedImage = dimmedImage
|
||||
self.dimmedImageParams = (imageSize, self.colors, positions)
|
||||
|
||||
for cloneNode in self.cloneNodes {
|
||||
cloneNode.value?.image = image
|
||||
cloneNode.value?.image = dimmedImage
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if sizeUpdated {
|
||||
let image = generateGradient(size: imageSize, colors: colors, positions: positions)
|
||||
let image = generateGradient(size: imageSize, colors: self.colors, positions: positions)
|
||||
self.contentView.image = image
|
||||
|
||||
let dimmedImage = generateGradient(size: imageSize, colors: self.colors, positions: positions, adjustSaturation: 1.7)
|
||||
self.dimmedImageParams = (imageSize, self.colors, positions)
|
||||
|
||||
for cloneNode in self.cloneNodes {
|
||||
cloneNode.value?.image = image
|
||||
cloneNode.value?.image = dimmedImage
|
||||
}
|
||||
|
||||
self.validPhase = self.phase
|
||||
@ -278,21 +402,25 @@ public final class GradientBackgroundNode: ASDisplayNode {
|
||||
}
|
||||
}
|
||||
|
||||
public func animateEvent(transition: ContainedViewLayoutTransition) {
|
||||
public func animateEvent(transition: ContainedViewLayoutTransition, extendAnimation: Bool = false) {
|
||||
guard case let .animated(duration, _) = transition, duration > 0.001 else {
|
||||
return
|
||||
}
|
||||
|
||||
if self.phase == 0 {
|
||||
self.phase = 7
|
||||
|
||||
if extendAnimation {
|
||||
self.invalidated = true
|
||||
} else {
|
||||
self.phase = self.phase - 1
|
||||
if self.phase == 0 {
|
||||
self.phase = 7
|
||||
} else {
|
||||
self.phase = self.phase - 1
|
||||
}
|
||||
}
|
||||
if self.useSharedAnimationPhase {
|
||||
GradientBackgroundNode.sharedPhase = self.phase
|
||||
}
|
||||
if let size = self.validLayout {
|
||||
self.updateLayout(size: size, transition: transition)
|
||||
self.updateLayout(size: size, transition: transition, extendAnimation: extendAnimation)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -32,6 +32,7 @@ swift_library(
|
||||
"//submodules/ContextUI:ContextUI",
|
||||
"//submodules/FileMediaResourceStatus:FileMediaResourceStatus",
|
||||
"//submodules/ManagedAnimationNode:ManagedAnimationNode",
|
||||
"//submodules/WallpaperResources:WallpaperResources",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -16,6 +16,7 @@ import UrlHandling
|
||||
import UrlWhitelist
|
||||
import AccountContext
|
||||
import TelegramStringFormatting
|
||||
import WallpaperResources
|
||||
|
||||
private let iconFont = Font.with(size: 30.0, design: .round, weight: .bold)
|
||||
|
||||
@ -253,6 +254,9 @@ public final class ListMessageSnippetItemNode: ListMessageNode {
|
||||
var primaryUrl: String?
|
||||
|
||||
var isInstantView = false
|
||||
|
||||
var previewWallpaper: TelegramWallpaper?
|
||||
var previewWallpaperFileReference: FileMediaReference?
|
||||
|
||||
var selectedMedia: TelegramMediaWebpage?
|
||||
var processed = false
|
||||
@ -283,6 +287,17 @@ public final class ListMessageSnippetItemNode: ListMessageNode {
|
||||
iconImageReferenceAndRepresentation = (.message(message: MessageReference(item.message), media: image), representation)
|
||||
}
|
||||
} else if let file = content.file {
|
||||
if content.type == "telegram_background" {
|
||||
if let wallpaper = parseWallpaperUrl(content.url) {
|
||||
switch wallpaper {
|
||||
case let .slug(slug, _, colors, intensity, angle):
|
||||
previewWallpaperFileReference = .message(message: MessageReference(item.message), media: file)
|
||||
previewWallpaper = .file(id: file.fileId.id, accessHash: 0, isCreator: false, isDefault: false, isPattern: true, isDark: false, slug: slug, file: file, settings: WallpaperSettings(blur: false, motion: false, colors: colors, intensity: intensity, rotation: angle))
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if let representation = smallestImageRepresentation(file.previewRepresentations) {
|
||||
iconImageReferenceAndRepresentation = (.message(message: MessageReference(item.message), media: file), representation)
|
||||
}
|
||||
@ -508,7 +523,9 @@ public final class ListMessageSnippetItemNode: ListMessageNode {
|
||||
}
|
||||
|
||||
if currentIconImageRepresentation != iconImageReferenceAndRepresentation?.1 {
|
||||
if let iconImageReferenceAndRepresentation = iconImageReferenceAndRepresentation {
|
||||
if let previewWallpaper = previewWallpaper, let fileReference = previewWallpaperFileReference {
|
||||
updateIconImageSignal = wallpaperThumbnail(account: item.context.account, accountManager: item.context.sharedContext.accountManager, fileReference: fileReference, wallpaper: previewWallpaper, synchronousLoad: false)
|
||||
} else if let iconImageReferenceAndRepresentation = iconImageReferenceAndRepresentation {
|
||||
if let imageReference = iconImageReferenceAndRepresentation.0.concrete(TelegramMediaImage.self) {
|
||||
updateIconImageSignal = chatWebpageSnippetPhoto(account: item.context.account, photoReference: imageReference)
|
||||
} else if let fileReference = iconImageReferenceAndRepresentation.0.concrete(TelegramMediaFile.self) {
|
||||
|
@ -2112,6 +2112,24 @@ static NSString *dumpHexString(NSData *data, int maxLength) {
|
||||
}
|
||||
}
|
||||
|
||||
static bool isDataEqualToDataConstTime(NSData *data1, NSData *data2) {
|
||||
if (data1.length != data2.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
uint8_t const *bytes1 = data1.bytes;
|
||||
uint8_t const *bytes2 = data2.bytes;
|
||||
|
||||
int result = 0;
|
||||
for (int i = 0; i < data1.length; i++) {
|
||||
if (bytes1[i] != bytes2[i]) {
|
||||
result |= i + 1;
|
||||
}
|
||||
}
|
||||
|
||||
return result == 0;
|
||||
}
|
||||
|
||||
- (NSData *)_decryptIncomingTransportData:(NSData *)transportData address:(MTDatacenterAddress *)address authKey:(MTDatacenterAuthKey *)authKey
|
||||
{
|
||||
MTDatacenterAuthKey *effectiveAuthKey = authKey;
|
||||
@ -2146,7 +2164,7 @@ static NSString *dumpHexString(NSData *data, int maxLength) {
|
||||
NSData *msgKeyLarge = MTSha256(msgKeyLargeData);
|
||||
NSData *messageKey = [msgKeyLarge subdataWithRange:NSMakeRange(8, 16)];
|
||||
|
||||
if (![messageKey isEqualToData:embeddedMessageKey]) {
|
||||
if (!isDataEqualToDataConstTime(messageKey, embeddedMessageKey)) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
|
@ -1107,6 +1107,8 @@ public final class PeerInfoAvatarListContainerNode: ASDisplayNode {
|
||||
}
|
||||
}
|
||||
|
||||
public var updateCustomItemsOnlySynchronously = false
|
||||
|
||||
private func updateItems(size: CGSize, update: Bool = false, transition: ContainedViewLayoutTransition, stripTransition: ContainedViewLayoutTransition, synchronous: Bool = false) {
|
||||
var validIds: [WrappedMediaResourceId] = []
|
||||
var addedItemNodesForAdditiveTransition: [PeerInfoAvatarListItemNode] = []
|
||||
@ -1121,6 +1123,10 @@ public final class PeerInfoAvatarListContainerNode: ASDisplayNode {
|
||||
if let current = self.itemNodes[self.items[i].id] {
|
||||
itemNode = current
|
||||
if update {
|
||||
var synchronous = synchronous && i == self.currentIndex
|
||||
if case .custom = self.items[i], self.updateCustomItemsOnlySynchronously {
|
||||
synchronous = true
|
||||
}
|
||||
current.setup(item: self.items[i], synchronous: synchronous && i == self.currentIndex, fullSizeOnly: self.firstFullSizeOnly && i == 0)
|
||||
}
|
||||
} else if let peer = self.peer {
|
||||
|
@ -34,7 +34,7 @@ final class GroupInfoSearchNavigationContentNode: NavigationBarContentNode, Item
|
||||
|
||||
self.cancel = cancel
|
||||
|
||||
self.searchBar = SearchBarNode(theme: SearchBarNodeTheme(theme: theme, hasSeparator: false), strings: strings, fieldStyle: .modern)
|
||||
self.searchBar = SearchBarNode(theme: SearchBarNodeTheme(theme: theme, hasSeparator: false), strings: strings, fieldStyle: .modern, displayBackground: false)
|
||||
|
||||
super.init()
|
||||
|
||||
|
@ -151,7 +151,7 @@ public struct PeerId: Hashable, CustomStringConvertible, Comparable, Codable {
|
||||
self.namespace = Namespace(rawValue: UInt32(namespaceBits))
|
||||
|
||||
let idHighBits = (data >> (32 + 3)) & 0xffffffff
|
||||
assert(idHighBits == 0)
|
||||
//assert(idHighBits == 0)
|
||||
|
||||
self.id = Id(rawValue: Int32(bitPattern: UInt32(clamping: idLowBits)))
|
||||
}
|
||||
|
@ -807,14 +807,14 @@ public class SearchBarNode: ASDisplayNode, UITextFieldDelegate {
|
||||
private var strings: PresentationStrings?
|
||||
private let cancelText: String?
|
||||
|
||||
public init(theme: SearchBarNodeTheme, strings: PresentationStrings, fieldStyle: SearchBarStyle = .legacy, forceSeparator: Bool = false, cancelText: String? = nil) {
|
||||
public init(theme: SearchBarNodeTheme, strings: PresentationStrings, fieldStyle: SearchBarStyle = .legacy, forceSeparator: Bool = false, displayBackground: Bool = true, cancelText: String? = nil) {
|
||||
self.fieldStyle = fieldStyle
|
||||
self.forceSeparator = forceSeparator
|
||||
self.cancelText = cancelText
|
||||
|
||||
self.backgroundNode = NavigationBackgroundNode(color: theme.background)
|
||||
self.backgroundNode.isUserInteractionEnabled = false
|
||||
//self.backgroundNode.isHidden = true
|
||||
self.backgroundNode.isHidden = !displayBackground
|
||||
|
||||
self.separatorNode = ASDisplayNode()
|
||||
self.separatorNode.isLayerBacked = true
|
||||
|
@ -38,7 +38,7 @@ public final class SearchDisplayController {
|
||||
private var isSearchingDisposable: Disposable?
|
||||
|
||||
public init(presentationData: PresentationData, mode: SearchDisplayControllerMode = .navigation, placeholder: String? = nil, hasBackground: Bool = false, hasSeparator: Bool = false, contentNode: SearchDisplayControllerContentNode, cancel: @escaping () -> Void) {
|
||||
self.searchBar = SearchBarNode(theme: SearchBarNodeTheme(theme: presentationData.theme, hasBackground: hasBackground, hasSeparator: hasSeparator), strings: presentationData.strings, fieldStyle: .modern, forceSeparator: hasSeparator)
|
||||
self.searchBar = SearchBarNode(theme: SearchBarNodeTheme(theme: presentationData.theme, hasBackground: hasBackground, hasSeparator: hasSeparator), strings: presentationData.strings, fieldStyle: .modern, forceSeparator: hasSeparator, displayBackground: hasBackground)
|
||||
self.backgroundNode = BackgroundNode()
|
||||
self.backgroundNode.backgroundColor = presentationData.theme.chatList.backgroundColor
|
||||
self.backgroundNode.allowsGroupOpacity = true
|
||||
|
@ -169,7 +169,8 @@ final class SettingsThemeWallpaperNode: ASDisplayNode {
|
||||
self.arguments = PatternWallpaperArguments(colors: [.clear], rotation: nil, customPatternColor: UIColor(white: 0.0, alpha: 1.0 + patternIntensity))
|
||||
} else {
|
||||
self.imageNode.alpha = CGFloat(file.settings.intensity ?? 50) / 100.0
|
||||
self.arguments = PatternWallpaperArguments(colors: [.clear], rotation: nil, customPatternColor: UIColor(white: 0.0, alpha: 1.0))
|
||||
let isLight = UIColor.average(of: file.settings.colors.map(UIColor.init(rgb:))).hsb.b > 0.3
|
||||
self.arguments = PatternWallpaperArguments(colors: [.clear], rotation: nil, customPatternColor: isLight ? .black : .white)
|
||||
}
|
||||
imageSignal = patternWallpaperImage(account: context.account, accountManager: context.sharedContext.accountManager, representations: convertedRepresentations, mode: .thumbnail, autoFetchFullSize: true)
|
||||
} else {
|
||||
|
@ -130,6 +130,12 @@ final class ThemeAccentColorController: ViewController {
|
||||
deinit {
|
||||
self.applyDisposable.dispose()
|
||||
}
|
||||
|
||||
override func viewDidAppear(_ animated: Bool) {
|
||||
super.viewDidAppear(animated)
|
||||
|
||||
self.controllerNode.animateWallpaperAppeared()
|
||||
}
|
||||
|
||||
override func loadDisplayNode() {
|
||||
super.loadDisplayNode()
|
||||
@ -213,7 +219,7 @@ final class ThemeAccentColorController: ViewController {
|
||||
}
|
||||
|
||||
if let themeReference = generalThemeReference {
|
||||
updatedTheme = makePresentationTheme(mediaBox: context.sharedContext.accountManager.mediaBox, themeReference: themeReference, accentColor: state.accentColor, backgroundColors: state.backgroundColors, bubbleColors: state.messagesColors, wallpaper: state.initialWallpaper ?? coloredWallpaper, serviceBackgroundColor: serviceBackgroundColor) ?? defaultPresentationTheme
|
||||
updatedTheme = makePresentationTheme(mediaBox: context.sharedContext.accountManager.mediaBox, themeReference: themeReference, accentColor: state.accentColor, backgroundColors: state.backgroundColors, bubbleColors: state.messagesColors, wallpaper: coloredWallpaper ?? state.initialWallpaper, serviceBackgroundColor: serviceBackgroundColor) ?? defaultPresentationTheme
|
||||
} else {
|
||||
updatedTheme = customizePresentationTheme(theme, editing: false, accentColor: state.accentColor, backgroundColors: state.backgroundColors, bubbleColors: state.messagesColors, wallpaper: state.initialWallpaper ?? coloredWallpaper)
|
||||
}
|
||||
|
@ -156,7 +156,15 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
private let scrollNode: ASScrollNode
|
||||
private let pageControlBackgroundNode: ASDisplayNode
|
||||
private let pageControlNode: PageControlNode
|
||||
|
||||
private var patternButtonNode: WallpaperOptionButtonNode
|
||||
private var colorsButtonNode: WallpaperOptionButtonNode
|
||||
|
||||
private var playButtonNode: HighlightableButtonNode
|
||||
private let playButtonBackgroundNode: NavigationBackgroundNode
|
||||
private let playButtonPlayImage: UIImage?
|
||||
private let playButtonRotateImage: UIImage?
|
||||
|
||||
private let chatListBackgroundNode: ASDisplayNode
|
||||
private var chatNodes: [ListViewItemNode]?
|
||||
private let maskNode: ASImageNode
|
||||
@ -231,6 +239,40 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
self.pageControlNode = PageControlNode(dotSpacing: 7.0, dotColor: .white, inactiveDotColor: UIColor.white.withAlphaComponent(0.4))
|
||||
|
||||
self.patternButtonNode = WallpaperOptionButtonNode(title: self.presentationData.strings.WallpaperPreview_Pattern, value: .check(false))
|
||||
self.colorsButtonNode = WallpaperOptionButtonNode(title: self.presentationData.strings.WallpaperPreview_WallpaperColors, value: .colors(false, []))
|
||||
|
||||
self.playButtonBackgroundNode = NavigationBackgroundNode(color: UIColor(white: 0.0, alpha: 0.3))
|
||||
self.playButtonNode = HighlightableButtonNode()
|
||||
self.playButtonNode.insertSubnode(self.playButtonBackgroundNode, at: 0)
|
||||
|
||||
self.playButtonPlayImage = generateImage(CGSize(width: 48.0, height: 48.0), rotatedContext: { size, context in
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
|
||||
let diameter = size.width
|
||||
|
||||
let factor = diameter / 50.0
|
||||
|
||||
let size = CGSize(width: 15.0, height: 18.0)
|
||||
context.translateBy(x: (diameter - size.width) / 2.0 + 1.5, y: (diameter - size.height) / 2.0)
|
||||
if (diameter < 40.0) {
|
||||
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
|
||||
context.scaleBy(x: factor, y: factor)
|
||||
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
|
||||
}
|
||||
let _ = try? drawSvgPath(context, path: "M1.71891969,0.209353049 C0.769586558,-0.350676705 0,0.0908839327 0,1.18800046 L0,16.8564753 C0,17.9569971 0.750549162,18.357187 1.67393713,17.7519379 L14.1073836,9.60224049 C15.0318735,8.99626906 15.0094718,8.04970371 14.062401,7.49100858 L1.71891969,0.209353049 ")
|
||||
context.fillPath()
|
||||
if (diameter < 40.0) {
|
||||
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
|
||||
context.scaleBy(x: 1.0 / 0.8, y: 1.0 / 0.8)
|
||||
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
|
||||
}
|
||||
context.translateBy(x: -(diameter - size.width) / 2.0 - 1.5, y: -(diameter - size.height) / 2.0)
|
||||
})
|
||||
|
||||
self.playButtonRotateImage = generateTintedImage(image: UIImage(bundleImageName: "Settings/ThemeColorRotateIcon"), color: .white)
|
||||
|
||||
self.playButtonNode.setImage(self.playButtonPlayImage, for: [])
|
||||
|
||||
self.chatListBackgroundNode = ASDisplayNode()
|
||||
|
||||
@ -276,6 +318,8 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
self.addSubnode(self.pageControlBackgroundNode)
|
||||
self.addSubnode(self.pageControlNode)
|
||||
self.addSubnode(self.patternButtonNode)
|
||||
self.addSubnode(self.colorsButtonNode)
|
||||
self.addSubnode(self.playButtonNode)
|
||||
self.addSubnode(self.colorPanelNode)
|
||||
self.addSubnode(self.patternPanelNode)
|
||||
self.addSubnode(self.toolbarNode)
|
||||
@ -288,6 +332,8 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
self.backgroundWrapperNode.addSubnode(self.backgroundNode)
|
||||
|
||||
self.patternButtonNode.addTarget(self, action: #selector(self.togglePattern), forControlEvents: .touchUpInside)
|
||||
self.colorsButtonNode.addTarget(self, action: #selector(self.toggleColors), forControlEvents: .touchUpInside)
|
||||
self.playButtonNode.addTarget(self, action: #selector(self.playPressed), forControlEvents: .touchUpInside)
|
||||
|
||||
self.colorPanelNode.colorsChanged = { [weak self] colors, ended in
|
||||
if let strongSelf = self, let section = strongSelf.state.section {
|
||||
@ -389,7 +435,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
|> mapToThrottled { next -> Signal<ThemeColorState, NoError> in
|
||||
return .single(next) |> then(.complete() |> delay(0.0166667, queue: self.queue))
|
||||
}
|
||||
|> map { state -> (PresentationTheme?, TelegramWallpaper, UIColor, [UInt32], PatternWallpaperArguments, Bool) in
|
||||
|> map { state -> (PresentationTheme?, TelegramWallpaper, UIColor, [UInt32], Int32, PatternWallpaperArguments, Bool) in
|
||||
let accentColor = state.accentColor
|
||||
var backgroundColors = state.backgroundColors
|
||||
let messagesColors = state.messagesColors
|
||||
@ -455,9 +501,9 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
|
||||
let patternArguments = PatternWallpaperArguments(colors: calcPatternColors(for: state), rotation: wallpaper.settings?.rotation ?? 0, preview: state.preview)
|
||||
|
||||
return (updatedTheme, wallpaper, serviceBackgroundColor, backgroundColors, patternArguments, state.preview)
|
||||
return (updatedTheme, wallpaper, serviceBackgroundColor, backgroundColors, state.rotation, patternArguments, state.preview)
|
||||
}
|
||||
|> deliverOnMainQueue).start(next: { [weak self] theme, wallpaper, serviceBackgroundColor, backgroundColors, patternArguments, preview in
|
||||
|> deliverOnMainQueue).start(next: { [weak self] theme, wallpaper, serviceBackgroundColor, backgroundColors, rotation, patternArguments, preview in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
@ -485,6 +531,8 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
strongSelf.wallpaper = wallpaper
|
||||
strongSelf.patternArguments = patternArguments
|
||||
|
||||
strongSelf.colorsButtonNode.colors = backgroundColors.map(UIColor.init(rgb:))
|
||||
|
||||
if !preview {
|
||||
if !backgroundColors.isEmpty {
|
||||
strongSelf.currentBackgroundColors = (backgroundColors, strongSelf.state.rotation, strongSelf.state.patternIntensity)
|
||||
@ -524,6 +572,8 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
strongSelf.patternPanelNode.serviceBackgroundColor = color
|
||||
strongSelf.pageControlBackgroundNode.backgroundColor = color
|
||||
strongSelf.patternButtonNode.buttonColor = color
|
||||
strongSelf.colorsButtonNode.buttonColor = color
|
||||
strongSelf.playButtonBackgroundNode.color = color
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -687,6 +737,20 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
if (previousState.patternWallpaper == nil) != (self.state.patternWallpaper == nil) {
|
||||
needsLayout = true
|
||||
}
|
||||
|
||||
if (previousState.backgroundColors.count >= 2) != (self.state.backgroundColors.count >= 2) {
|
||||
needsLayout = true
|
||||
}
|
||||
|
||||
if previousState.backgroundColors.count != self.state.backgroundColors.count {
|
||||
if self.state.backgroundColors.count <= 2 {
|
||||
self.playButtonNode.setImage(self.playButtonRotateImage, for: [])
|
||||
} else {
|
||||
self.playButtonNode.setImage(self.playButtonPlayImage, for: [])
|
||||
}
|
||||
}
|
||||
|
||||
self.colorsButtonNode.isSelected = !self.state.colorPanelCollapsed
|
||||
|
||||
if needsLayout, let (layout, navigationBarHeight, _) = self.validLayout {
|
||||
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: animated ? .animated(duration: animationDuration, curve: animationCurve) : .immediate)
|
||||
@ -701,6 +765,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
}
|
||||
updated.section = section
|
||||
updated.displayPatternPanel = false
|
||||
updated.colorPanelCollapsed = false
|
||||
return updated
|
||||
}, animated: true)
|
||||
}
|
||||
@ -825,13 +890,24 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
|
||||
items = sampleMessages.reversed().map { message in
|
||||
let item = self.context.sharedContext.makeChatMessagePreviewItem(context: self.context, messages: [message], theme: self.theme, strings: self.presentationData.strings, wallpaper: self.wallpaper, fontSize: self.presentationData.chatFontSize, chatBubbleCorners: self.presentationData.chatBubbleCorners, dateTimeFormat: self.presentationData.dateTimeFormat, nameOrder: self.presentationData.nameDisplayOrder, forcedResourceStatus: !message.media.isEmpty ? FileMediaResourceStatus(mediaStatus: .playbackStatus(.paused), fetchStatus: .Local) : nil, tapMessage: { [weak self] message in
|
||||
if message.flags.contains(.Incoming) {
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
strongSelf.updateState({ state in
|
||||
var state = state
|
||||
if state.section == .background {
|
||||
state.colorPanelCollapsed = true
|
||||
state.displayPatternPanel = false
|
||||
}
|
||||
return state
|
||||
}, animated: true)
|
||||
/*if message.flags.contains(.Incoming) {
|
||||
self?.updateSection(.accent)
|
||||
self?.requestSectionUpdate?(.accent)
|
||||
} else {
|
||||
self?.updateSection(.messages)
|
||||
self?.requestSectionUpdate?(.messages)
|
||||
}
|
||||
}*/
|
||||
}, clickThroughMessage: { [weak self] in
|
||||
self?.updateSection(.background)
|
||||
self?.requestSectionUpdate?(.background)
|
||||
@ -929,7 +1005,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
|
||||
var colorPanelOffset: CGFloat = 0.0
|
||||
if self.state.colorPanelCollapsed {
|
||||
colorPanelOffset = colorPanelHeight - inputFieldPanelHeight
|
||||
colorPanelOffset = colorPanelHeight
|
||||
}
|
||||
let colorPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - bottomInset - colorPanelHeight + colorPanelOffset), size: CGSize(width: layout.size.width, height: colorPanelHeight))
|
||||
bottomInset += (colorPanelHeight - colorPanelOffset)
|
||||
@ -967,7 +1043,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
var messagesBottomInset: CGFloat = bottomInset
|
||||
|
||||
if displayOptionButtons {
|
||||
messagesBottomInset += 46.0
|
||||
messagesBottomInset += 56.0
|
||||
} else if chatListPreviewAvailable {
|
||||
messagesBottomInset += 37.0
|
||||
}
|
||||
@ -987,19 +1063,42 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
transition.updateFrame(node: self.maskNode, frame: CGRect(x: 0.0, y: layout.size.height - bottomInset - 80.0, width: bounds.width, height: 80.0))
|
||||
|
||||
let patternButtonSize = self.patternButtonNode.measure(layout.size)
|
||||
let maxButtonWidth = patternButtonSize.width
|
||||
let colorsButtonSize = self.colorsButtonNode.measure(layout.size)
|
||||
let maxButtonWidth = max(patternButtonSize.width, colorsButtonSize.width)
|
||||
let buttonSize = CGSize(width: maxButtonWidth, height: 30.0)
|
||||
|
||||
let patternAlpha: CGFloat = displayOptionButtons ? 1.0 : 0.0
|
||||
let colorsAlpha: CGFloat = displayOptionButtons ? 1.0 : 0.0
|
||||
|
||||
let leftButtonFrame = CGRect(origin: CGPoint(x: floor(layout.size.width / 2.0 - buttonSize.width - 10.0), y: layout.size.height - bottomInset - 44.0), size: buttonSize)
|
||||
let centerButtonFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - buttonSize.width) / 2.0), y: layout.size.height - bottomInset - 44.0), size: buttonSize)
|
||||
let rightButtonFrame = CGRect(origin: CGPoint(x: ceil(layout.size.width / 2.0 + 10.0), y: layout.size.height - bottomInset - 44.0), size: buttonSize)
|
||||
|
||||
var patternAlpha: CGFloat = displayOptionButtons ? 1.0 : 0.0
|
||||
|
||||
var patternFrame = centerButtonFrame
|
||||
let patternFrame: CGRect
|
||||
let colorsFrame: CGRect
|
||||
|
||||
let playButtonSize = CGSize(width: 48.0, height: 48.0)
|
||||
var centerDistance: CGFloat = 40.0
|
||||
let buttonsVerticalOffset: CGFloat = 5.0
|
||||
|
||||
let playFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - playButtonSize.width) / 2.0), y: layout.size.height - bottomInset - 44.0 - buttonsVerticalOffset + floor((buttonSize.height - playButtonSize.height) / 2.0)), size: playButtonSize)
|
||||
|
||||
let playAlpha: CGFloat
|
||||
if self.state.backgroundColors.count >= 2 {
|
||||
playAlpha = displayOptionButtons ? 1.0 : 0.0
|
||||
centerDistance += playButtonSize.width
|
||||
} else {
|
||||
playAlpha = 0.0
|
||||
}
|
||||
|
||||
patternFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - buttonSize.width * 2.0 - centerDistance) / 2.0), y: layout.size.height - bottomInset - 44.0 - buttonsVerticalOffset), size: buttonSize)
|
||||
colorsFrame = CGRect(origin: CGPoint(x: patternFrame.maxX + centerDistance, y: layout.size.height - bottomInset - 44.0 - buttonsVerticalOffset), size: buttonSize)
|
||||
|
||||
transition.updateFrame(node: self.patternButtonNode, frame: patternFrame)
|
||||
transition.updateAlpha(node: self.patternButtonNode, alpha: patternAlpha)
|
||||
transition.updateFrame(node: self.colorsButtonNode, frame: colorsFrame)
|
||||
transition.updateAlpha(node: self.colorsButtonNode, alpha: colorsAlpha)
|
||||
|
||||
transition.updateFrame(node: self.playButtonNode, frame: playFrame)
|
||||
transition.updateFrame(node: self.playButtonBackgroundNode, frame: CGRect(origin: CGPoint(), size: playFrame.size))
|
||||
self.playButtonBackgroundNode.update(size: playFrame.size, cornerRadius: playFrame.size.height / 2.0, transition: transition)
|
||||
transition.updateAlpha(node: self.playButtonNode, alpha: playAlpha)
|
||||
}
|
||||
|
||||
@objc private func togglePattern() {
|
||||
@ -1011,11 +1110,7 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
var appeared = false
|
||||
self.updateState({ current in
|
||||
var updated = current
|
||||
if updated.patternWallpaper != nil {
|
||||
updated.previousPatternWallpaper = updated.patternWallpaper
|
||||
updated.patternWallpaper = nil
|
||||
updated.displayPatternPanel = false
|
||||
} else {
|
||||
if !updated.displayPatternPanel {
|
||||
updated.colorPanelCollapsed = false
|
||||
updated.displayPatternPanel = true
|
||||
if current.patternWallpaper == nil, let wallpaper = wallpaper {
|
||||
@ -1029,6 +1124,27 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
}
|
||||
appeared = true
|
||||
}
|
||||
} else {
|
||||
updated.colorPanelCollapsed = true
|
||||
if updated.patternWallpaper != nil {
|
||||
updated.previousPatternWallpaper = updated.patternWallpaper
|
||||
updated.patternWallpaper = nil
|
||||
updated.displayPatternPanel = false
|
||||
} else {
|
||||
updated.colorPanelCollapsed = false
|
||||
updated.displayPatternPanel = true
|
||||
if current.patternWallpaper == nil, let wallpaper = wallpaper {
|
||||
updated.patternWallpaper = wallpaper
|
||||
if updated.backgroundColors.isEmpty {
|
||||
if let backgroundColors = backgroundColors {
|
||||
updated.backgroundColors = backgroundColors.0
|
||||
} else {
|
||||
updated.backgroundColors = []
|
||||
}
|
||||
}
|
||||
appeared = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return updated
|
||||
}, animated: true)
|
||||
@ -1037,4 +1153,38 @@ final class ThemeAccentColorControllerNode: ASDisplayNode, UIScrollViewDelegate
|
||||
self.patternPanelNode.didAppear(initialWallpaper: wallpaper, intensity: self.state.patternIntensity)
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func toggleColors() {
|
||||
self.updateState({ current in
|
||||
var updated = current
|
||||
if updated.displayPatternPanel {
|
||||
updated.displayPatternPanel = false
|
||||
updated.colorPanelCollapsed = false
|
||||
} else {
|
||||
if updated.colorPanelCollapsed {
|
||||
updated.colorPanelCollapsed = false
|
||||
} else {
|
||||
updated.colorPanelCollapsed = true
|
||||
}
|
||||
}
|
||||
updated.displayPatternPanel = false
|
||||
return updated
|
||||
}, animated: true)
|
||||
}
|
||||
|
||||
@objc private func playPressed() {
|
||||
if self.state.backgroundColors.count >= 3 {
|
||||
self.backgroundNode.animateEvent(transition: .animated(duration: 0.5, curve: .spring))
|
||||
} else {
|
||||
self.updateState({ state in
|
||||
var state = state
|
||||
state.rotation = (state.rotation + 90) % 360
|
||||
return state
|
||||
}, animated: true)
|
||||
}
|
||||
}
|
||||
|
||||
func animateWallpaperAppeared() {
|
||||
self.backgroundNode.animateEvent(transition: .animated(duration: 2.0, curve: .spring), extendAnimation: true)
|
||||
}
|
||||
}
|
||||
|
@ -319,6 +319,10 @@ class ThemeSettingsAppIconItemNode: ListViewItemNode, ItemListItemNode {
|
||||
bordered = false
|
||||
case "WhiteFilled":
|
||||
name = "⍺ White"
|
||||
case "New1":
|
||||
name = item.strings.Appearance_AppIconNew1
|
||||
case "New2":
|
||||
name = item.strings.Appearance_AppIconNew2
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
@ -528,7 +528,9 @@ final class WallpaperColorPanelNode: ASDisplayNode {
|
||||
|
||||
if updateLayout, let size = self.validLayout {
|
||||
if let index = self.state.selection {
|
||||
self.colorPickerNode.color = UIColor(rgb: self.state.colors[index])
|
||||
if self.state.colors.count > index {
|
||||
self.colorPickerNode.color = UIColor(rgb: self.state.colors[index])
|
||||
}
|
||||
}
|
||||
|
||||
self.updateLayout(size: size, transition: animated ? .animated(duration: 0.3, curve: .easeInOut) : .immediate)
|
||||
|
@ -199,6 +199,9 @@ public class WallpaperGalleryController: ViewController {
|
||||
private var patternInitialWallpaper: TelegramWallpaper?
|
||||
private var patternPanelEnabled = false
|
||||
private var colorsPanelEnabled = false
|
||||
|
||||
private var savedPatternWallpaper: TelegramWallpaper?
|
||||
private var savedPatternIntensity: Int32?
|
||||
|
||||
public init(context: AccountContext, source: WallpaperListSource) {
|
||||
self.context = context
|
||||
@ -600,6 +603,10 @@ public class WallpaperGalleryController: ViewController {
|
||||
|
||||
self.galleryNode.modalAnimateIn()
|
||||
self.bindCentralItemNode(animated: false, updated: false)
|
||||
|
||||
if let centralItemNode = self.galleryNode.pager.centralItemNode() as? WallpaperGalleryItemNode {
|
||||
centralItemNode.animateWallpaperAppeared()
|
||||
}
|
||||
}
|
||||
|
||||
private func bindCentralItemNode(animated: Bool, updated: Bool) {
|
||||
@ -615,6 +622,11 @@ public class WallpaperGalleryController: ViewController {
|
||||
strongSelf.colorsPanelEnabled = false
|
||||
strongSelf.colorsPanelNode?.view.endEditing(true)
|
||||
|
||||
if !enabled {
|
||||
strongSelf.savedPatternWallpaper = initialWallpaper
|
||||
strongSelf.savedPatternIntensity = initialWallpaper.settings?.intensity
|
||||
}
|
||||
|
||||
strongSelf.patternInitialWallpaper = enabled ? initialWallpaper : nil
|
||||
switch initialWallpaper {
|
||||
case let .color(color):
|
||||
@ -631,7 +643,7 @@ public class WallpaperGalleryController: ViewController {
|
||||
strongSelf.galleryNode.scrollView.isScrollEnabled = !enabled
|
||||
if enabled {
|
||||
strongSelf.patternPanelNode?.updateWallpapers()
|
||||
strongSelf.patternPanelNode?.didAppear()
|
||||
strongSelf.patternPanelNode?.didAppear(initialWallpaper: strongSelf.savedPatternWallpaper, intensity: strongSelf.savedPatternIntensity)
|
||||
} else {
|
||||
switch initialWallpaper {
|
||||
case .color, .gradient:
|
||||
@ -657,6 +669,9 @@ public class WallpaperGalleryController: ViewController {
|
||||
strongSelf.patternPanelEnabled = false
|
||||
strongSelf.colorsPanelEnabled = !strongSelf.colorsPanelEnabled
|
||||
strongSelf.galleryNode.scrollView.isScrollEnabled = !strongSelf.colorsPanelEnabled
|
||||
if !strongSelf.colorsPanelEnabled {
|
||||
strongSelf.colorsPanelNode?.view.endEditing(true)
|
||||
}
|
||||
|
||||
if strongSelf.colorsPanelEnabled {
|
||||
strongSelf.colorsPanelNode?.updateState({ _ in
|
||||
@ -700,6 +715,7 @@ public class WallpaperGalleryController: ViewController {
|
||||
if updated {
|
||||
if self.colorsPanelEnabled || self.patternPanelEnabled {
|
||||
self.colorsPanelEnabled = false
|
||||
self.colorsPanelNode?.view.endEditing(true)
|
||||
self.patternPanelEnabled = false
|
||||
|
||||
if let (layout, _) = self.validLayout {
|
||||
@ -838,6 +854,10 @@ public class WallpaperGalleryController: ViewController {
|
||||
if let pattern = pattern, case let .file(file) = pattern {
|
||||
let newSettings = WallpaperSettings(blur: file.settings.blur, motion: file.settings.motion, colors: colors, intensity: intensity)
|
||||
let newWallpaper = TelegramWallpaper.file(id: file.id, accessHash: file.accessHash, isCreator: file.isCreator, isDefault: file.isDefault, isPattern: pattern.isPattern, isDark: file.isDark, slug: file.slug, file: file.file, settings: newSettings)
|
||||
|
||||
strongSelf.savedPatternWallpaper = newWallpaper
|
||||
strongSelf.savedPatternIntensity = intensity
|
||||
|
||||
strongSelf.updateEntries(wallpaper: newWallpaper, preview: preview)
|
||||
}
|
||||
default:
|
||||
|
@ -126,6 +126,8 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
||||
|
||||
private let playButtonPlayImage: UIImage?
|
||||
private let playButtonRotateImage: UIImage?
|
||||
|
||||
private var isReadyDisposable: Disposable?
|
||||
|
||||
init(context: AccountContext) {
|
||||
self.context = context
|
||||
@ -154,6 +156,7 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
||||
self.patternButtonNode.setEnabled(false)
|
||||
|
||||
self.colorsButtonNode = WallpaperOptionButtonNode(title: self.presentationData.strings.WallpaperPreview_WallpaperColors, value: .colors(false, [.clear]))
|
||||
|
||||
self.playButtonBackgroundNode = NavigationBackgroundNode(color: UIColor(white: 0.0, alpha: 0.3))
|
||||
self.playButtonNode = HighlightableButtonNode()
|
||||
self.playButtonNode.insertSubnode(self.playButtonBackgroundNode, at: 0)
|
||||
@ -192,15 +195,23 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
||||
self.clipsToBounds = true
|
||||
self.backgroundColor = .black
|
||||
|
||||
self.imageNode.imageUpdated = { [weak self] _ in
|
||||
self?._ready.set(.single(Void()))
|
||||
self.imageNode.imageUpdated = { [weak self] image in
|
||||
if image != nil {
|
||||
self?._ready.set(.single(Void()))
|
||||
}
|
||||
}
|
||||
self.isReadyDisposable = (self.nativeNode.isReady
|
||||
|> filter { $0 }
|
||||
|> take(1)
|
||||
|> deliverOnMainQueue).start(next: { [weak self] _ in
|
||||
self?._ready.set(.single(Void()))
|
||||
})
|
||||
|
||||
self.imageNode.view.contentMode = .scaleAspectFill
|
||||
self.imageNode.clipsToBounds = true
|
||||
|
||||
self.addSubnode(self.wrapperNode)
|
||||
self.addSubnode(self.statusNode)
|
||||
//self.addSubnode(self.statusNode)
|
||||
self.addSubnode(self.messagesContainerNode)
|
||||
|
||||
self.addSubnode(self.blurButtonNode)
|
||||
@ -220,6 +231,7 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
||||
self.fetchDisposable.dispose()
|
||||
self.statusDisposable.dispose()
|
||||
self.colorDisposable.dispose()
|
||||
self.isReadyDisposable?.dispose()
|
||||
}
|
||||
|
||||
var cropRect: CGRect? {
|
||||
@ -1159,4 +1171,8 @@ final class WallpaperGalleryItemNode: GalleryItemNode {
|
||||
override func visibilityUpdated(isVisible: Bool) {
|
||||
super.visibilityUpdated(isVisible: isVisible)
|
||||
}
|
||||
|
||||
func animateWallpaperAppeared() {
|
||||
self.nativeNode.animateEvent(transition: .animated(duration: 2.0, curve: .spring), extendAnimation: true)
|
||||
}
|
||||
}
|
||||
|
@ -13,6 +13,29 @@ import MergeLists
|
||||
private let itemSize = CGSize(width: 88.0, height: 88.0)
|
||||
private let inset: CGFloat = 12.0
|
||||
|
||||
private func intensityToSliderValue(_ value: Int32, allowDark: Bool) -> CGFloat {
|
||||
if allowDark {
|
||||
if value < 0 {
|
||||
return max(0.0, min(100.0, CGFloat(abs(value))))
|
||||
} else {
|
||||
return 100.0 + max(0.0, min(100.0, CGFloat(value)))
|
||||
}
|
||||
} else {
|
||||
return CGFloat(max(value, 0)) * 2.0
|
||||
}
|
||||
}
|
||||
|
||||
private func sliderValueToIntensity(_ value: CGFloat, allowDark: Bool) -> Int32 {
|
||||
if allowDark {
|
||||
if value < 100.0 {
|
||||
return -Int32(value)
|
||||
} else {
|
||||
return Int32(value - 100.0)
|
||||
}
|
||||
} else {
|
||||
return Int32(value / 2.0)
|
||||
}
|
||||
}
|
||||
|
||||
private struct WallpaperPatternEntry: Comparable, Identifiable {
|
||||
let index: Int
|
||||
@ -214,9 +237,12 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
||||
|
||||
var patternChanged: ((TelegramWallpaper?, Int32?, Bool) -> Void)?
|
||||
|
||||
private let allowDark: Bool
|
||||
|
||||
init(context: AccountContext, theme: PresentationTheme, strings: PresentationStrings) {
|
||||
self.context = context
|
||||
self.theme = theme
|
||||
self.allowDark = theme.overallDarkAppearance
|
||||
|
||||
self.backgroundNode = NavigationBackgroundNode(color: theme.chat.inputPanel.panelBackgroundColor)
|
||||
|
||||
@ -275,16 +301,18 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
||||
sliderView.disableSnapToPositions = true
|
||||
sliderView.trackCornerRadius = 1.0
|
||||
sliderView.lineSize = 2.0
|
||||
sliderView.minimumValue = 0.0
|
||||
sliderView.startValue = 0.0
|
||||
sliderView.minimumValue = 0.0
|
||||
sliderView.maximumValue = 200.0
|
||||
sliderView.positionsCount = 3
|
||||
if self.allowDark {
|
||||
sliderView.positionsCount = 3
|
||||
}
|
||||
sliderView.useLinesForPositions = true
|
||||
sliderView.value = 150.0
|
||||
sliderView.value = intensityToSliderValue(50, allowDark: self.allowDark)
|
||||
sliderView.disablesInteractiveTransitionGestureRecognizer = true
|
||||
sliderView.backgroundColor = .clear
|
||||
sliderView.backColor = self.theme.list.disclosureArrowColor
|
||||
sliderView.trackColor = self.theme.list.itemAccentColor
|
||||
sliderView.trackColor = sliderView.backColor//self.theme.list.itemAccentColor
|
||||
|
||||
self.view.addSubview(sliderView)
|
||||
sliderView.addTarget(self, action: #selector(self.sliderValueChanged), for: .valueChanged)
|
||||
@ -335,7 +363,7 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
||||
if let strongSelf = self {
|
||||
strongSelf.currentWallpaper = updatedWallpaper
|
||||
if let sliderView = strongSelf.sliderView {
|
||||
strongSelf.patternChanged?(updatedWallpaper, Int32(sliderView.value - 100.0), false)
|
||||
strongSelf.patternChanged?(updatedWallpaper, sliderValueToIntensity(sliderView.value, allowDark: strongSelf.allowDark), false)
|
||||
}
|
||||
if let subnodes = strongSelf.scrollNode.subnodes {
|
||||
for case let subnode as SettingsThemeWallpaperNode in subnodes {
|
||||
@ -377,12 +405,19 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
if let wallpaper = self.currentWallpaper {
|
||||
self.patternChanged?(wallpaper, Int32(sliderView.value - 100.0), sliderView.isTracking)
|
||||
self.patternChanged?(wallpaper, sliderValueToIntensity(sliderView.value, allowDark: self.allowDark), sliderView.isTracking)
|
||||
}
|
||||
}
|
||||
|
||||
func didAppear(initialWallpaper: TelegramWallpaper? = nil, intensity: Int32? = nil) {
|
||||
var wallpaper = initialWallpaper ?? self.wallpapers.first
|
||||
let wallpaper: TelegramWallpaper?
|
||||
|
||||
switch initialWallpaper {
|
||||
case let .file(id, accessHash, isCreator, isDefault, isPattern, isDark, slug, file, _):
|
||||
wallpaper = .file(id: id, accessHash: accessHash, isCreator: isCreator, isDefault: isDefault, isPattern: isPattern, isDark: isDark, slug: slug, file: file, settings: self.wallpapers[0].settings ?? WallpaperSettings())
|
||||
default:
|
||||
wallpaper = self.wallpapers.first
|
||||
}
|
||||
|
||||
if let wallpaper = wallpaper {
|
||||
var selectedFileId: Int64?
|
||||
@ -391,7 +426,7 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
self.currentWallpaper = wallpaper
|
||||
self.sliderView?.value = CGFloat(intensity.flatMap { $0 + 100 } ?? 150)
|
||||
self.sliderView?.value = intensity.flatMap { intensityToSliderValue($0, allowDark: self.allowDark) } ?? intensityToSliderValue(50, allowDark: self.allowDark)
|
||||
|
||||
self.scrollNode.view.contentOffset = CGPoint()
|
||||
|
||||
@ -407,8 +442,8 @@ final class WallpaperPatternPanelNode: ASDisplayNode {
|
||||
}
|
||||
}
|
||||
|
||||
if initialWallpaper == nil, let wallpaper = self.currentWallpaper, let sliderView = self.sliderView {
|
||||
self.patternChanged?(wallpaper, Int32(sliderView.value - 100.0), false)
|
||||
if let wallpaper = self.currentWallpaper, let sliderView = self.sliderView {
|
||||
self.patternChanged?(wallpaper, sliderValueToIntensity(sliderView.value, allowDark: self.allowDark), false)
|
||||
}
|
||||
|
||||
if let selectedNode = selectedNode {
|
||||
|
@ -211,8 +211,12 @@ final class StickerPackPreviewControllerNode: ViewControllerTracingNode, UIScrol
|
||||
if let stickerPack = strongSelf.stickerPack, case let .result(info, _, _) = stickerPack, info.id.namespace == Namespaces.ItemCollection.CloudStickerPacks {
|
||||
if strongSelf.sendSticker != nil {
|
||||
menuItems.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.StickerPack_Send, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Resend"), color: theme.contextMenu.primaryColor) }, action: { [weak self] _, f in
|
||||
if let strongSelf = self, let peekController = strongSelf.peekController, let animationNode = (peekController.contentNode as? StickerPreviewPeekContentNode)?.animationNode {
|
||||
let _ = strongSelf.sendSticker?(.standalone(media: item.file), animationNode, animationNode.bounds)
|
||||
if let strongSelf = self, let peekController = strongSelf.peekController {
|
||||
if let animationNode = (peekController.contentNode as? StickerPreviewPeekContentNode)?.animationNode {
|
||||
let _ = strongSelf.sendSticker?(.standalone(media: item.file), animationNode, animationNode.bounds)
|
||||
} else if let imageNode = (peekController.contentNode as? StickerPreviewPeekContentNode)?.imageNode {
|
||||
let _ = strongSelf.sendSticker?(.standalone(media: item.file), imageNode, imageNode.bounds)
|
||||
}
|
||||
}
|
||||
f(.default)
|
||||
})))
|
||||
|
@ -70,7 +70,7 @@ public final class StickerPreviewPeekContentNode: ASDisplayNode, PeekControllerC
|
||||
private let item: StickerPreviewPeekItem
|
||||
|
||||
private var textNode: ASTextNode
|
||||
private var imageNode: TransformImageNode
|
||||
public var imageNode: TransformImageNode
|
||||
public var animationNode: AnimatedStickerNode?
|
||||
|
||||
private var containerLayout: (ContainerViewLayout, CGFloat)?
|
||||
|
@ -859,3 +859,59 @@ public final class EmptyMediaResource: TelegramMediaResource {
|
||||
return to is EmptyMediaResource
|
||||
}
|
||||
}
|
||||
|
||||
public struct WallpaperDataResourceId: MediaResourceId {
|
||||
public var uniqueId: String {
|
||||
return "wallpaper-\(self.slug)"
|
||||
}
|
||||
|
||||
public var hashValue: Int {
|
||||
return self.slug.hashValue
|
||||
}
|
||||
|
||||
public var slug: String
|
||||
|
||||
public init(slug: String) {
|
||||
self.slug = slug
|
||||
}
|
||||
|
||||
public func isEqual(to: MediaResourceId) -> Bool {
|
||||
guard let to = to as? WallpaperDataResourceId else {
|
||||
return false
|
||||
}
|
||||
if self.slug != to.slug {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
public final class WallpaperDataResource: TelegramMediaResource {
|
||||
public let slug: String
|
||||
|
||||
public init(slug: String) {
|
||||
self.slug = slug
|
||||
}
|
||||
|
||||
public init(decoder: PostboxDecoder) {
|
||||
self.slug = decoder.decodeStringForKey("s", orElse: "")
|
||||
}
|
||||
|
||||
public func encode(_ encoder: PostboxEncoder) {
|
||||
encoder.encodeString(self.slug, forKey: "s")
|
||||
}
|
||||
|
||||
public var id: MediaResourceId {
|
||||
return WallpaperDataResourceId(slug: self.slug)
|
||||
}
|
||||
|
||||
public func isEqual(to: MediaResource) -> Bool {
|
||||
guard let to = to as? WallpaperDataResource else {
|
||||
return false
|
||||
}
|
||||
if self.slug != to.slug {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
@ -249,7 +249,7 @@ public struct SecretChatLayerNegotiationState: PostboxCoding, Equatable {
|
||||
}
|
||||
|
||||
public init(decoder: PostboxDecoder) {
|
||||
self.activeLayer = SecretChatSequenceBasedLayer(rawValue: decoder.decodeInt32ForKey("a", orElse: 0)) ?? .layer46
|
||||
self.activeLayer = SecretChatSequenceBasedLayer(rawValue: decoder.decodeInt32ForKey("a", orElse: 0)) ?? .layer73
|
||||
self.locallyRequestedLayer = decoder.decodeOptionalInt32ForKey("lr")
|
||||
self.remotelyRequestedLayer = decoder.decodeOptionalInt32ForKey("rr")
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
|
||||
dict[767652808] = { return Api.InputEncryptedFile.parse_inputEncryptedFileBigUploaded($0) }
|
||||
dict[1304052993] = { return Api.account.Takeout.parse_takeout($0) }
|
||||
dict[-1456996667] = { return Api.messages.InactiveChats.parse_inactiveChats($0) }
|
||||
dict[-1464184409] = { return Api.GroupCallParticipant.parse_groupCallParticipant($0) }
|
||||
dict[-341428482] = { return Api.GroupCallParticipant.parse_groupCallParticipant($0) }
|
||||
dict[1443858741] = { return Api.messages.SentEncryptedMessage.parse_sentEncryptedMessage($0) }
|
||||
dict[-1802240206] = { return Api.messages.SentEncryptedMessage.parse_sentEncryptedFile($0) }
|
||||
dict[289586518] = { return Api.SavedContact.parse_savedPhoneContact($0) }
|
||||
@ -283,6 +283,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
|
||||
dict[192428418] = { return Api.Update.parse_updateGroupCallConnection($0) }
|
||||
dict[136574537] = { return Api.messages.VotesList.parse_votesList($0) }
|
||||
dict[1558266229] = { return Api.PopularContact.parse_popularContact($0) }
|
||||
dict[-592373577] = { return Api.GroupCallParticipantVideoSourceGroup.parse_groupCallParticipantVideoSourceGroup($0) }
|
||||
dict[-373643672] = { return Api.FolderPeer.parse_folderPeer($0) }
|
||||
dict[367766557] = { return Api.ChannelParticipant.parse_channelParticipant($0) }
|
||||
dict[-1557620115] = { return Api.ChannelParticipant.parse_channelParticipantSelf($0) }
|
||||
@ -579,11 +580,12 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
|
||||
dict[-1160215659] = { return Api.InputMessage.parse_inputMessageReplyTo($0) }
|
||||
dict[-2037963464] = { return Api.InputMessage.parse_inputMessagePinned($0) }
|
||||
dict[-1392895362] = { return Api.InputMessage.parse_inputMessageCallbackQuery($0) }
|
||||
dict[2028213859] = { return Api.GroupCallParticipantVideo.parse_groupCallParticipantVideo($0) }
|
||||
dict[-58224696] = { return Api.PhoneCallProtocol.parse_phoneCallProtocol($0) }
|
||||
dict[-1237848657] = { return Api.StatsDateRangeDays.parse_statsDateRangeDays($0) }
|
||||
dict[-275956116] = { return Api.messages.AffectedFoundMessages.parse_affectedFoundMessages($0) }
|
||||
dict[-1539849235] = { return Api.WallPaper.parse_wallPaper($0) }
|
||||
dict[-1963717851] = { return Api.WallPaper.parse_wallPaperNoFile($0) }
|
||||
dict[-528465642] = { return Api.WallPaper.parse_wallPaperNoFile($0) }
|
||||
dict[-1938715001] = { return Api.messages.Messages.parse_messages($0) }
|
||||
dict[978610270] = { return Api.messages.Messages.parse_messagesSlice($0) }
|
||||
dict[1682413576] = { return Api.messages.Messages.parse_channelMessages($0) }
|
||||
@ -690,7 +692,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
|
||||
dict[2104790276] = { return Api.DataJSON.parse_dataJSON($0) }
|
||||
dict[-433014407] = { return Api.InputWallPaper.parse_inputWallPaper($0) }
|
||||
dict[1913199744] = { return Api.InputWallPaper.parse_inputWallPaperSlug($0) }
|
||||
dict[-2077770836] = { return Api.InputWallPaper.parse_inputWallPaperNoFile($0) }
|
||||
dict[-1770371538] = { return Api.InputWallPaper.parse_inputWallPaperNoFile($0) }
|
||||
dict[-1118798639] = { return Api.InputThemeSettings.parse_inputThemeSettings($0) }
|
||||
dict[1251549527] = { return Api.InputStickeredMedia.parse_inputStickeredMediaPhoto($0) }
|
||||
dict[70813275] = { return Api.InputStickeredMedia.parse_inputStickeredMediaDocument($0) }
|
||||
@ -1085,6 +1087,8 @@ public struct Api {
|
||||
_1.serialize(buffer, boxed)
|
||||
case let _1 as Api.PopularContact:
|
||||
_1.serialize(buffer, boxed)
|
||||
case let _1 as Api.GroupCallParticipantVideoSourceGroup:
|
||||
_1.serialize(buffer, boxed)
|
||||
case let _1 as Api.FolderPeer:
|
||||
_1.serialize(buffer, boxed)
|
||||
case let _1 as Api.ChannelParticipant:
|
||||
@ -1333,6 +1337,8 @@ public struct Api {
|
||||
_1.serialize(buffer, boxed)
|
||||
case let _1 as Api.InputMessage:
|
||||
_1.serialize(buffer, boxed)
|
||||
case let _1 as Api.GroupCallParticipantVideo:
|
||||
_1.serialize(buffer, boxed)
|
||||
case let _1 as Api.PhoneCallProtocol:
|
||||
_1.serialize(buffer, boxed)
|
||||
case let _1 as Api.StatsDateRangeDays:
|
||||
|
@ -3604,13 +3604,13 @@ public extension Api {
|
||||
|
||||
}
|
||||
public enum GroupCallParticipant: TypeConstructorDescription {
|
||||
case groupCallParticipant(flags: Int32, peer: Api.Peer, date: Int32, activeDate: Int32?, source: Int32, volume: Int32?, about: String?, raiseHandRating: Int64?, video: Api.DataJSON?, presentation: Api.DataJSON?)
|
||||
case groupCallParticipant(flags: Int32, peer: Api.Peer, date: Int32, activeDate: Int32?, source: Int32, volume: Int32?, about: String?, raiseHandRating: Int64?, video: Api.GroupCallParticipantVideo?, presentation: Api.GroupCallParticipantVideo?)
|
||||
|
||||
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||
switch self {
|
||||
case .groupCallParticipant(let flags, let peer, let date, let activeDate, let source, let volume, let about, let raiseHandRating, let video, let presentation):
|
||||
if boxed {
|
||||
buffer.appendInt32(-1464184409)
|
||||
buffer.appendInt32(-341428482)
|
||||
}
|
||||
serializeInt32(flags, buffer: buffer, boxed: false)
|
||||
peer.serialize(buffer, true)
|
||||
@ -3652,13 +3652,13 @@ public extension Api {
|
||||
if Int(_1!) & Int(1 << 11) != 0 {_7 = parseString(reader) }
|
||||
var _8: Int64?
|
||||
if Int(_1!) & Int(1 << 13) != 0 {_8 = reader.readInt64() }
|
||||
var _9: Api.DataJSON?
|
||||
var _9: Api.GroupCallParticipantVideo?
|
||||
if Int(_1!) & Int(1 << 6) != 0 {if let signature = reader.readInt32() {
|
||||
_9 = Api.parse(reader, signature: signature) as? Api.DataJSON
|
||||
_9 = Api.parse(reader, signature: signature) as? Api.GroupCallParticipantVideo
|
||||
} }
|
||||
var _10: Api.DataJSON?
|
||||
var _10: Api.GroupCallParticipantVideo?
|
||||
if Int(_1!) & Int(1 << 14) != 0 {if let signature = reader.readInt32() {
|
||||
_10 = Api.parse(reader, signature: signature) as? Api.DataJSON
|
||||
_10 = Api.parse(reader, signature: signature) as? Api.GroupCallParticipantVideo
|
||||
} }
|
||||
let _c1 = _1 != nil
|
||||
let _c2 = _2 != nil
|
||||
@ -7358,6 +7358,50 @@ public extension Api {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
public enum GroupCallParticipantVideoSourceGroup: TypeConstructorDescription {
|
||||
case groupCallParticipantVideoSourceGroup(semantics: String, sources: [Int32])
|
||||
|
||||
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||
switch self {
|
||||
case .groupCallParticipantVideoSourceGroup(let semantics, let sources):
|
||||
if boxed {
|
||||
buffer.appendInt32(-592373577)
|
||||
}
|
||||
serializeString(semantics, buffer: buffer, boxed: false)
|
||||
buffer.appendInt32(481674261)
|
||||
buffer.appendInt32(Int32(sources.count))
|
||||
for item in sources {
|
||||
serializeInt32(item, buffer: buffer, boxed: false)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
public func descriptionFields() -> (String, [(String, Any)]) {
|
||||
switch self {
|
||||
case .groupCallParticipantVideoSourceGroup(let semantics, let sources):
|
||||
return ("groupCallParticipantVideoSourceGroup", [("semantics", semantics), ("sources", sources)])
|
||||
}
|
||||
}
|
||||
|
||||
public static func parse_groupCallParticipantVideoSourceGroup(_ reader: BufferReader) -> GroupCallParticipantVideoSourceGroup? {
|
||||
var _1: String?
|
||||
_1 = parseString(reader)
|
||||
var _2: [Int32]?
|
||||
if let _ = reader.readInt32() {
|
||||
_2 = Api.parseVector(reader, elementSignature: -1471112230, elementType: Int32.self)
|
||||
}
|
||||
let _c1 = _1 != nil
|
||||
let _c2 = _2 != nil
|
||||
if _c1 && _c2 {
|
||||
return Api.GroupCallParticipantVideoSourceGroup.groupCallParticipantVideoSourceGroup(semantics: _1!, sources: _2!)
|
||||
}
|
||||
else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
public enum FolderPeer: TypeConstructorDescription {
|
||||
case folderPeer(peer: Api.Peer, folderId: Int32)
|
||||
@ -14888,6 +14932,54 @@ public extension Api {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
public enum GroupCallParticipantVideo: TypeConstructorDescription {
|
||||
case groupCallParticipantVideo(flags: Int32, endpoint: String, sourceGroups: [Api.GroupCallParticipantVideoSourceGroup])
|
||||
|
||||
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||
switch self {
|
||||
case .groupCallParticipantVideo(let flags, let endpoint, let sourceGroups):
|
||||
if boxed {
|
||||
buffer.appendInt32(2028213859)
|
||||
}
|
||||
serializeInt32(flags, buffer: buffer, boxed: false)
|
||||
serializeString(endpoint, buffer: buffer, boxed: false)
|
||||
buffer.appendInt32(481674261)
|
||||
buffer.appendInt32(Int32(sourceGroups.count))
|
||||
for item in sourceGroups {
|
||||
item.serialize(buffer, true)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
public func descriptionFields() -> (String, [(String, Any)]) {
|
||||
switch self {
|
||||
case .groupCallParticipantVideo(let flags, let endpoint, let sourceGroups):
|
||||
return ("groupCallParticipantVideo", [("flags", flags), ("endpoint", endpoint), ("sourceGroups", sourceGroups)])
|
||||
}
|
||||
}
|
||||
|
||||
public static func parse_groupCallParticipantVideo(_ reader: BufferReader) -> GroupCallParticipantVideo? {
|
||||
var _1: Int32?
|
||||
_1 = reader.readInt32()
|
||||
var _2: String?
|
||||
_2 = parseString(reader)
|
||||
var _3: [Api.GroupCallParticipantVideoSourceGroup]?
|
||||
if let _ = reader.readInt32() {
|
||||
_3 = Api.parseVector(reader, elementSignature: 0, elementType: Api.GroupCallParticipantVideoSourceGroup.self)
|
||||
}
|
||||
let _c1 = _1 != nil
|
||||
let _c2 = _2 != nil
|
||||
let _c3 = _3 != nil
|
||||
if _c1 && _c2 && _c3 {
|
||||
return Api.GroupCallParticipantVideo.groupCallParticipantVideo(flags: _1!, endpoint: _2!, sourceGroups: _3!)
|
||||
}
|
||||
else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
public enum PhoneCallProtocol: TypeConstructorDescription {
|
||||
case phoneCallProtocol(flags: Int32, minLayer: Int32, maxLayer: Int32, libraryVersions: [String])
|
||||
@ -14981,7 +15073,7 @@ public extension Api {
|
||||
}
|
||||
public enum WallPaper: TypeConstructorDescription {
|
||||
case wallPaper(id: Int64, flags: Int32, accessHash: Int64, slug: String, document: Api.Document, settings: Api.WallPaperSettings?)
|
||||
case wallPaperNoFile(flags: Int32, settings: Api.WallPaperSettings?)
|
||||
case wallPaperNoFile(id: Int64, flags: Int32, settings: Api.WallPaperSettings?)
|
||||
|
||||
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||
switch self {
|
||||
@ -14996,10 +15088,11 @@ public extension Api {
|
||||
document.serialize(buffer, true)
|
||||
if Int(flags) & Int(1 << 2) != 0 {settings!.serialize(buffer, true)}
|
||||
break
|
||||
case .wallPaperNoFile(let flags, let settings):
|
||||
case .wallPaperNoFile(let id, let flags, let settings):
|
||||
if boxed {
|
||||
buffer.appendInt32(-1963717851)
|
||||
buffer.appendInt32(-528465642)
|
||||
}
|
||||
serializeInt64(id, buffer: buffer, boxed: false)
|
||||
serializeInt32(flags, buffer: buffer, boxed: false)
|
||||
if Int(flags) & Int(1 << 2) != 0 {settings!.serialize(buffer, true)}
|
||||
break
|
||||
@ -15010,8 +15103,8 @@ public extension Api {
|
||||
switch self {
|
||||
case .wallPaper(let id, let flags, let accessHash, let slug, let document, let settings):
|
||||
return ("wallPaper", [("id", id), ("flags", flags), ("accessHash", accessHash), ("slug", slug), ("document", document), ("settings", settings)])
|
||||
case .wallPaperNoFile(let flags, let settings):
|
||||
return ("wallPaperNoFile", [("flags", flags), ("settings", settings)])
|
||||
case .wallPaperNoFile(let id, let flags, let settings):
|
||||
return ("wallPaperNoFile", [("id", id), ("flags", flags), ("settings", settings)])
|
||||
}
|
||||
}
|
||||
|
||||
@ -15046,16 +15139,19 @@ public extension Api {
|
||||
}
|
||||
}
|
||||
public static func parse_wallPaperNoFile(_ reader: BufferReader) -> WallPaper? {
|
||||
var _1: Int32?
|
||||
_1 = reader.readInt32()
|
||||
var _2: Api.WallPaperSettings?
|
||||
if Int(_1!) & Int(1 << 2) != 0 {if let signature = reader.readInt32() {
|
||||
_2 = Api.parse(reader, signature: signature) as? Api.WallPaperSettings
|
||||
var _1: Int64?
|
||||
_1 = reader.readInt64()
|
||||
var _2: Int32?
|
||||
_2 = reader.readInt32()
|
||||
var _3: Api.WallPaperSettings?
|
||||
if Int(_2!) & Int(1 << 2) != 0 {if let signature = reader.readInt32() {
|
||||
_3 = Api.parse(reader, signature: signature) as? Api.WallPaperSettings
|
||||
} }
|
||||
let _c1 = _1 != nil
|
||||
let _c2 = (Int(_1!) & Int(1 << 2) == 0) || _2 != nil
|
||||
if _c1 && _c2 {
|
||||
return Api.WallPaper.wallPaperNoFile(flags: _1!, settings: _2)
|
||||
let _c2 = _2 != nil
|
||||
let _c3 = (Int(_2!) & Int(1 << 2) == 0) || _3 != nil
|
||||
if _c1 && _c2 && _c3 {
|
||||
return Api.WallPaper.wallPaperNoFile(id: _1!, flags: _2!, settings: _3)
|
||||
}
|
||||
else {
|
||||
return nil
|
||||
@ -18028,7 +18124,7 @@ public extension Api {
|
||||
public enum InputWallPaper: TypeConstructorDescription {
|
||||
case inputWallPaper(id: Int64, accessHash: Int64)
|
||||
case inputWallPaperSlug(slug: String)
|
||||
case inputWallPaperNoFile
|
||||
case inputWallPaperNoFile(id: Int64)
|
||||
|
||||
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
|
||||
switch self {
|
||||
@ -18045,11 +18141,11 @@ public extension Api {
|
||||
}
|
||||
serializeString(slug, buffer: buffer, boxed: false)
|
||||
break
|
||||
case .inputWallPaperNoFile:
|
||||
case .inputWallPaperNoFile(let id):
|
||||
if boxed {
|
||||
buffer.appendInt32(-2077770836)
|
||||
buffer.appendInt32(-1770371538)
|
||||
}
|
||||
|
||||
serializeInt64(id, buffer: buffer, boxed: false)
|
||||
break
|
||||
}
|
||||
}
|
||||
@ -18060,8 +18156,8 @@ public extension Api {
|
||||
return ("inputWallPaper", [("id", id), ("accessHash", accessHash)])
|
||||
case .inputWallPaperSlug(let slug):
|
||||
return ("inputWallPaperSlug", [("slug", slug)])
|
||||
case .inputWallPaperNoFile:
|
||||
return ("inputWallPaperNoFile", [])
|
||||
case .inputWallPaperNoFile(let id):
|
||||
return ("inputWallPaperNoFile", [("id", id)])
|
||||
}
|
||||
}
|
||||
|
||||
@ -18091,7 +18187,15 @@ public extension Api {
|
||||
}
|
||||
}
|
||||
public static func parse_inputWallPaperNoFile(_ reader: BufferReader) -> InputWallPaper? {
|
||||
return Api.InputWallPaper.inputWallPaperNoFile
|
||||
var _1: Int64?
|
||||
_1 = reader.readInt64()
|
||||
let _c1 = _1 != nil
|
||||
if _c1 {
|
||||
return Api.InputWallPaper.inputWallPaperNoFile(id: _1!)
|
||||
}
|
||||
else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -7872,17 +7872,19 @@ public extension Api {
|
||||
})
|
||||
}
|
||||
|
||||
public static func editGroupCallParticipant(flags: Int32, call: Api.InputGroupCall, participant: Api.InputPeer, muted: Api.Bool?, volume: Int32?, raiseHand: Api.Bool?, videoMuted: Api.Bool?) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
|
||||
public static func editGroupCallParticipant(flags: Int32, call: Api.InputGroupCall, participant: Api.InputPeer, muted: Api.Bool?, volume: Int32?, raiseHand: Api.Bool?, videoStopped: Api.Bool?, videoPaused: Api.Bool?, presentationPaused: Api.Bool?) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
|
||||
let buffer = Buffer()
|
||||
buffer.appendInt32(-1362751260)
|
||||
buffer.appendInt32(-1524155713)
|
||||
serializeInt32(flags, buffer: buffer, boxed: false)
|
||||
call.serialize(buffer, true)
|
||||
participant.serialize(buffer, true)
|
||||
if Int(flags) & Int(1 << 0) != 0 {muted!.serialize(buffer, true)}
|
||||
if Int(flags) & Int(1 << 1) != 0 {serializeInt32(volume!, buffer: buffer, boxed: false)}
|
||||
if Int(flags) & Int(1 << 2) != 0 {raiseHand!.serialize(buffer, true)}
|
||||
if Int(flags) & Int(1 << 3) != 0 {videoMuted!.serialize(buffer, true)}
|
||||
return (FunctionDescription(name: "phone.editGroupCallParticipant", parameters: [("flags", flags), ("call", call), ("participant", participant), ("muted", muted), ("volume", volume), ("raiseHand", raiseHand), ("videoMuted", videoMuted)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in
|
||||
if Int(flags) & Int(1 << 3) != 0 {videoStopped!.serialize(buffer, true)}
|
||||
if Int(flags) & Int(1 << 4) != 0 {videoPaused!.serialize(buffer, true)}
|
||||
if Int(flags) & Int(1 << 5) != 0 {presentationPaused!.serialize(buffer, true)}
|
||||
return (FunctionDescription(name: "phone.editGroupCallParticipant", parameters: [("flags", flags), ("call", call), ("participant", participant), ("muted", muted), ("volume", volume), ("raiseHand", raiseHand), ("videoStopped", videoStopped), ("videoPaused", videoPaused), ("presentationPaused", presentationPaused)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in
|
||||
let reader = BufferReader(buffer)
|
||||
var result: Api.Updates?
|
||||
if let signature = reader.readInt32() {
|
||||
|
@ -47,6 +47,7 @@ swift_library(
|
||||
"//submodules/MapResourceToAvatarSizes:MapResourceToAvatarSizes",
|
||||
"//submodules/TextFormat:TextFormat",
|
||||
"//submodules/Markdown:Markdown",
|
||||
"//submodules/ChatTitleActivityNode:ChatTitleActivityNode",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -11,6 +11,7 @@ enum CallControllerButtonsSpeakerMode: Equatable {
|
||||
case generic
|
||||
case airpods
|
||||
case airpodsPro
|
||||
case airpodsMax
|
||||
}
|
||||
|
||||
case none
|
||||
@ -51,6 +52,7 @@ private enum ButtonDescription: Equatable {
|
||||
case bluetooth
|
||||
case airpods
|
||||
case airpodsPro
|
||||
case airpodsMax
|
||||
case headphones
|
||||
}
|
||||
|
||||
@ -215,6 +217,8 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
||||
soundOutput = .airpods
|
||||
case .airpodsPro:
|
||||
soundOutput = .airpodsPro
|
||||
case .airpodsMax:
|
||||
soundOutput = .airpodsMax
|
||||
}
|
||||
}
|
||||
|
||||
@ -306,6 +310,8 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
||||
soundOutput = .airpods
|
||||
case .airpodsPro:
|
||||
soundOutput = .airpodsPro
|
||||
case .airpodsMax:
|
||||
soundOutput = .airpodsMax
|
||||
}
|
||||
}
|
||||
|
||||
@ -362,6 +368,8 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
||||
soundOutput = .airpods
|
||||
case .airpodsPro:
|
||||
soundOutput = .airpodsPro
|
||||
case .airpodsMax:
|
||||
soundOutput = .airpodsMax
|
||||
}
|
||||
}
|
||||
|
||||
@ -468,6 +476,9 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
||||
case .airpodsPro:
|
||||
image = .airpodsPro
|
||||
title = strings.Call_Audio
|
||||
case .airpodsMax:
|
||||
image = .airpodsMax
|
||||
title = strings.Call_Audio
|
||||
case .headphones:
|
||||
image = .headphones
|
||||
title = strings.Call_Audio
|
||||
|
@ -16,6 +16,7 @@ import CallsEmoji
|
||||
import TooltipUI
|
||||
import AlertUI
|
||||
import PresentationDataUtils
|
||||
import DeviceAccess
|
||||
|
||||
private func interpolateFrame(from fromValue: CGRect, to toValue: CGRect, t: CGFloat) -> CGRect {
|
||||
return CGRect(x: floorToScreenPixels(toValue.origin.x * t + fromValue.origin.x * (1.0 - t)), y: floorToScreenPixels(toValue.origin.y * t + fromValue.origin.y * (1.0 - t)), width: floorToScreenPixels(toValue.size.width * t + fromValue.size.width * (1.0 - t)), height: floorToScreenPixels(toValue.size.height * t + fromValue.size.height * (1.0 - t)))
|
||||
@ -559,25 +560,36 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
switch callState.state {
|
||||
case .active:
|
||||
if strongSelf.outgoingVideoNodeValue == nil {
|
||||
let proceed = {
|
||||
strongSelf.displayedCameraConfirmation = true
|
||||
switch callState.videoState {
|
||||
case .inactive:
|
||||
strongSelf.isRequestingVideo = true
|
||||
strongSelf.updateButtonsMode()
|
||||
default:
|
||||
break
|
||||
DeviceAccess.authorizeAccess(to: .camera(.videoCall), onlyCheck: true, presentationData: strongSelf.presentationData, present: { [weak self] c, a in
|
||||
if let strongSelf = self {
|
||||
strongSelf.present?(c)
|
||||
}
|
||||
strongSelf.call.requestVideo()
|
||||
}
|
||||
|
||||
if strongSelf.displayedCameraConfirmation {
|
||||
proceed()
|
||||
} else {
|
||||
strongSelf.present?(textAlertController(sharedContext: strongSelf.sharedContext, title: nil, text: strongSelf.presentationData.strings.Call_CameraConfirmationText, actions: [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Call_CameraConfirmationConfirm, action: {
|
||||
}, openSettings: { [weak self] in
|
||||
self?.sharedContext.applicationBindings.openSettings()
|
||||
}, _: { [weak self] ready in
|
||||
guard let strongSelf = self, ready else {
|
||||
return
|
||||
}
|
||||
let proceed = {
|
||||
strongSelf.displayedCameraConfirmation = true
|
||||
switch callState.videoState {
|
||||
case .inactive:
|
||||
strongSelf.isRequestingVideo = true
|
||||
strongSelf.updateButtonsMode()
|
||||
default:
|
||||
break
|
||||
}
|
||||
strongSelf.call.requestVideo()
|
||||
}
|
||||
|
||||
if strongSelf.displayedCameraConfirmation {
|
||||
proceed()
|
||||
})]))
|
||||
}
|
||||
} else {
|
||||
strongSelf.present?(textAlertController(sharedContext: strongSelf.sharedContext, title: nil, text: strongSelf.presentationData.strings.Call_CameraConfirmationText, actions: [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Call_CameraConfirmationConfirm, action: {
|
||||
proceed()
|
||||
})]))
|
||||
}
|
||||
})
|
||||
} else {
|
||||
strongSelf.call.disableVideo()
|
||||
strongSelf.cancelScheduledUIHiding()
|
||||
|
@ -30,7 +30,7 @@ final class GroupVideoNode: ASDisplayNode {
|
||||
|
||||
private var effectView: UIVisualEffectView?
|
||||
private var isBlurred: Bool = false
|
||||
|
||||
|
||||
private var validLayout: (CGSize, LayoutMode)?
|
||||
|
||||
var tapped: (() -> Void)?
|
||||
@ -40,7 +40,7 @@ final class GroupVideoNode: ASDisplayNode {
|
||||
return self.readyPromise.get()
|
||||
}
|
||||
|
||||
init(videoView: PresentationCallVideoView, backdropVideoView: PresentationCallVideoView?) {
|
||||
init(videoView: PresentationCallVideoView, backdropVideoView: PresentationCallVideoView?, disabledText: String? = nil) {
|
||||
self.sourceContainerNode = PinchSourceContainerNode()
|
||||
self.containerNode = ASDisplayNode()
|
||||
self.videoViewContainer = UIView()
|
||||
@ -50,7 +50,7 @@ final class GroupVideoNode: ASDisplayNode {
|
||||
self.backdropVideoViewContainer = UIView()
|
||||
self.backdropVideoViewContainer.isUserInteractionEnabled = false
|
||||
self.backdropVideoView = backdropVideoView
|
||||
|
||||
|
||||
super.init()
|
||||
|
||||
if let backdropVideoView = backdropVideoView {
|
||||
@ -72,7 +72,7 @@ final class GroupVideoNode: ASDisplayNode {
|
||||
self.addSubnode(self.sourceContainerNode)
|
||||
self.containerNode.view.addSubview(self.videoViewContainer)
|
||||
self.sourceContainerNode.contentNode.addSubnode(self.containerNode)
|
||||
|
||||
|
||||
self.clipsToBounds = true
|
||||
|
||||
videoView.setOnFirstFrameReceived({ [weak self] _ in
|
||||
@ -139,14 +139,30 @@ final class GroupVideoNode: ASDisplayNode {
|
||||
if withBackground {
|
||||
self.backgroundColor = .black
|
||||
}
|
||||
var snapshotView: UIView?
|
||||
if let snapshot = self.videoView.view.snapshotView(afterScreenUpdates: false) {
|
||||
snapshotView = snapshot
|
||||
snapshot.transform = self.videoView.view.transform
|
||||
snapshot.frame = self.videoView.view.frame
|
||||
self.videoView.view.superview?.insertSubview(snapshot, aboveSubview: self.videoView.view)
|
||||
}
|
||||
UIView.transition(with: withBackground ? self.videoViewContainer : self.view, duration: 0.4, options: [.transitionFlipFromLeft, .curveEaseOut], animations: {
|
||||
UIView.performWithoutAnimation {
|
||||
self.updateIsBlurred(isBlurred: true, light: false, animated: false)
|
||||
}
|
||||
}) { finished in
|
||||
self.backgroundColor = nil
|
||||
Queue.mainQueue().after(0.4) {
|
||||
self.updateIsBlurred(isBlurred: false)
|
||||
if let snapshotView = snapshotView {
|
||||
Queue.mainQueue().after(0.3) {
|
||||
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak snapshotView] _ in
|
||||
snapshotView?.removeFromSuperview()
|
||||
})
|
||||
self.updateIsBlurred(isBlurred: false)
|
||||
}
|
||||
} else {
|
||||
Queue.mainQueue().after(0.4) {
|
||||
self.updateIsBlurred(isBlurred: false)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -260,9 +276,10 @@ final class GroupVideoNode: ASDisplayNode {
|
||||
let transformScale: CGFloat = rotatedVideoFrame.width / normalizedVideoSize.width
|
||||
transition.updateTransformScale(layer: self.videoViewContainer.layer, scale: transformScale)
|
||||
|
||||
|
||||
if let backdropVideoView = self.backdropVideoView {
|
||||
backdropVideoView.view.alpha = 0.995
|
||||
|
||||
let topFrame = rotatedVideoFrame
|
||||
|
||||
rotatedVideoSize = filledSize
|
||||
var rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - rotatedVideoSize.width) / 2.0), y: floor((size.height - rotatedVideoSize.height) / 2.0)), size: rotatedVideoSize)
|
||||
@ -270,12 +287,29 @@ final class GroupVideoNode: ASDisplayNode {
|
||||
rotatedVideoFrame.origin.y = floor(rotatedVideoFrame.origin.y)
|
||||
rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width)
|
||||
rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height)
|
||||
|
||||
let isEnabled = !topFrame.contains(rotatedVideoFrame)
|
||||
|
||||
let normalizedVideoSize = rotatedVideoFrame.size.aspectFilled(CGSize(width: 1080.0, height: 1080.0))
|
||||
transition.updatePosition(layer: backdropVideoView.view.layer, position: rotatedVideoFrame.center)
|
||||
if isEnabled {
|
||||
self.backdropVideoView?.updateIsEnabled(true)
|
||||
self.backdropVideoView?.view.isHidden = false
|
||||
self.backdropEffectView?.isHidden = false
|
||||
}
|
||||
transition.updatePosition(layer: backdropVideoView.view.layer, position: rotatedVideoFrame.center, force: true, completion: { [weak self] value in
|
||||
guard let strongSelf = self, value else {
|
||||
return
|
||||
}
|
||||
if !isEnabled {
|
||||
strongSelf.backdropVideoView?.updateIsEnabled(false)
|
||||
strongSelf.backdropVideoView?.view.isHidden = true
|
||||
strongSelf.backdropEffectView?.isHidden = false
|
||||
}
|
||||
})
|
||||
transition.updateBounds(layer: backdropVideoView.view.layer, bounds: CGRect(origin: CGPoint(), size: normalizedVideoSize))
|
||||
|
||||
let transformScale: CGFloat = rotatedVideoFrame.width / normalizedVideoSize.width
|
||||
|
||||
transition.updateTransformScale(layer: self.backdropVideoViewContainer.layer, scale: transformScale)
|
||||
|
||||
let transition: ContainedViewLayoutTransition = .immediate
|
||||
@ -287,18 +321,9 @@ final class GroupVideoNode: ASDisplayNode {
|
||||
let squareBounds = CGRect(x: (bounds.width - maxSide) / 2.0, y: (bounds.height - maxSide) / 2.0, width: maxSide, height: maxSide)
|
||||
|
||||
if case let .animated(duration, .spring) = transition {
|
||||
if false, #available(iOS 10.0, *) {
|
||||
let timing = UISpringTimingParameters(mass: 3.0, stiffness: 1000.0, damping: 500.0, initialVelocity: CGVector(dx: 0.0, dy: 0.0))
|
||||
let animator = UIViewPropertyAnimator(duration: 0.34, timingParameters: timing)
|
||||
animator.addAnimations {
|
||||
backdropEffectView.frame = squareBounds
|
||||
}
|
||||
animator.startAnimation()
|
||||
} else {
|
||||
UIView.animate(withDuration: duration, delay: 0.0, usingSpringWithDamping: 500.0, initialSpringVelocity: 0.0, options: .layoutSubviews, animations: {
|
||||
backdropEffectView.frame = squareBounds
|
||||
})
|
||||
}
|
||||
UIView.animate(withDuration: duration, delay: 0.0, usingSpringWithDamping: 500.0, initialSpringVelocity: 0.0, options: .layoutSubviews, animations: {
|
||||
backdropEffectView.frame = squareBounds
|
||||
})
|
||||
} else {
|
||||
transition.animateView {
|
||||
backdropEffectView.frame = squareBounds
|
||||
@ -306,11 +331,19 @@ final class GroupVideoNode: ASDisplayNode {
|
||||
}
|
||||
}
|
||||
|
||||
if let effectView = self.effectView {
|
||||
if case let .animated(duration, .spring) = transition {
|
||||
UIView.animate(withDuration: duration, delay: 0.0, usingSpringWithDamping: 500.0, initialSpringVelocity: 0.0, options: .layoutSubviews, animations: {
|
||||
effectView.frame = bounds
|
||||
})
|
||||
} else {
|
||||
transition.animateView {
|
||||
effectView.frame = bounds
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let transition: ContainedViewLayoutTransition = .immediate
|
||||
transition.updateTransformRotation(view: self.videoView.view, angle: angle)
|
||||
|
||||
if let effectView = self.effectView {
|
||||
transition.updateFrame(view: effectView, frame: bounds)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1017,7 +1017,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
self.videoCapturer = videoCapturer
|
||||
}
|
||||
|
||||
self.videoCapturer?.makeOutgoingVideoView(completion: { view in
|
||||
self.videoCapturer?.makeOutgoingVideoView(requestClone: false, completion: { view, _ in
|
||||
if let view = view {
|
||||
let setOnFirstFrameReceived = view.setOnFirstFrameReceived
|
||||
let setOnOrientationUpdated = view.setOnOrientationUpdated
|
||||
|
@ -24,29 +24,17 @@ private extension GroupCallParticipantsContext.Participant {
|
||||
if let ssrc = self.ssrc {
|
||||
participantSsrcs.insert(ssrc)
|
||||
}
|
||||
if let jsonParams = self.videoJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
||||
if let groups = json["ssrc-groups"] as? [Any] {
|
||||
for group in groups {
|
||||
if let group = group as? [String: Any] {
|
||||
if let groupSources = group["sources"] as? [UInt32] {
|
||||
for source in groupSources {
|
||||
participantSsrcs.insert(source)
|
||||
}
|
||||
}
|
||||
}
|
||||
if let videoDescription = self.videoDescription {
|
||||
for group in videoDescription.ssrcGroups {
|
||||
for ssrc in group.ssrcs {
|
||||
participantSsrcs.insert(ssrc)
|
||||
}
|
||||
}
|
||||
}
|
||||
if let jsonParams = self.presentationJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
||||
if let groups = json["ssrc-groups"] as? [Any] {
|
||||
for group in groups {
|
||||
if let group = group as? [String: Any] {
|
||||
if let groupSources = group["sources"] as? [UInt32] {
|
||||
for source in groupSources {
|
||||
participantSsrcs.insert(source)
|
||||
}
|
||||
}
|
||||
}
|
||||
if let presentationDescription = self.presentationDescription {
|
||||
for group in presentationDescription.ssrcGroups {
|
||||
for ssrc in group.ssrcs {
|
||||
participantSsrcs.insert(ssrc)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -55,16 +43,10 @@ private extension GroupCallParticipantsContext.Participant {
|
||||
|
||||
var videoSsrcs: Set<UInt32> {
|
||||
var participantSsrcs = Set<UInt32>()
|
||||
if let jsonParams = self.videoJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
||||
if let groups = json["ssrc-groups"] as? [Any] {
|
||||
for group in groups {
|
||||
if let group = group as? [String: Any] {
|
||||
if let groupSources = group["sources"] as? [UInt32] {
|
||||
for source in groupSources {
|
||||
participantSsrcs.insert(source)
|
||||
}
|
||||
}
|
||||
}
|
||||
if let videoDescription = self.videoDescription {
|
||||
for group in videoDescription.ssrcGroups {
|
||||
for ssrc in group.ssrcs {
|
||||
participantSsrcs.insert(ssrc)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -73,16 +55,10 @@ private extension GroupCallParticipantsContext.Participant {
|
||||
|
||||
var presentationSsrcs: Set<UInt32> {
|
||||
var participantSsrcs = Set<UInt32>()
|
||||
if let jsonParams = self.presentationJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
||||
if let groups = json["ssrc-groups"] as? [Any] {
|
||||
for group in groups {
|
||||
if let group = group as? [String: Any] {
|
||||
if let groupSources = group["sources"] as? [UInt32] {
|
||||
for source in groupSources {
|
||||
participantSsrcs.insert(source)
|
||||
}
|
||||
}
|
||||
}
|
||||
if let presentationDescription = self.presentationDescription {
|
||||
for group in presentationDescription.ssrcGroups {
|
||||
for ssrc in group.ssrcs {
|
||||
participantSsrcs.insert(ssrc)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -428,6 +404,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
private var internalState: InternalState = .requesting
|
||||
private let internalStatePromise = Promise<InternalState>(.requesting)
|
||||
private var currentLocalSsrc: UInt32?
|
||||
private var currentLocalEndpointId: String?
|
||||
|
||||
private var genericCallContext: OngoingGroupCallContext?
|
||||
private var currentConnectionMode: OngoingGroupCallContext.ConnectionMode = .none
|
||||
@ -477,6 +454,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
return self.isNoiseSuppressionEnabledPromise.get()
|
||||
}
|
||||
private let isNoiseSuppressionEnabledDisposable = MetaDisposable()
|
||||
|
||||
private var isVideoMuted: Bool = false
|
||||
private let isVideoMutedDisposable = MetaDisposable()
|
||||
|
||||
private let audioOutputStatePromise = Promise<([AudioSessionOutput], AudioSessionOutput?)>(([], nil))
|
||||
private var audioOutputStateDisposable: Disposable?
|
||||
@ -771,7 +751,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
return
|
||||
}
|
||||
if case let .established(callInfo, _, _, _, _) = strongSelf.internalState {
|
||||
var addedParticipants: [(UInt32, String?, String?)] = []
|
||||
var removedSsrc: [UInt32] = []
|
||||
for (callId, update) in updates {
|
||||
if callId == callInfo.id {
|
||||
@ -806,11 +785,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
}
|
||||
} else if case .joined = participantUpdate.participationStatusChange {
|
||||
if let ssrc = participantUpdate.ssrc {
|
||||
addedParticipants.append((ssrc, participantUpdate.videoJsonDescription, participantUpdate.presentationJsonDescription))
|
||||
}
|
||||
} else if let ssrc = participantUpdate.ssrc, strongSelf.ssrcMapping[ssrc] == nil {
|
||||
addedParticipants.append((ssrc, participantUpdate.videoJsonDescription, participantUpdate.presentationJsonDescription))
|
||||
}
|
||||
}
|
||||
case let .call(isTerminated, _, _, _, _, _):
|
||||
@ -903,7 +878,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
guard let screencastCapturer = screencastCapturer else {
|
||||
return
|
||||
}
|
||||
screencastCapturer.injectPixelBuffer(screencastFrame)
|
||||
screencastCapturer.injectPixelBuffer(screencastFrame.0, rotation: screencastFrame.1)
|
||||
})
|
||||
self.screencastStateDisposable = (screencastBufferServerContext.isActive
|
||||
|> distinctUntilChanged
|
||||
@ -942,6 +917,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
self.leaveDisposable.dispose()
|
||||
self.isMutedDisposable.dispose()
|
||||
self.isNoiseSuppressionEnabledDisposable.dispose()
|
||||
self.isVideoMutedDisposable.dispose()
|
||||
self.memberStatesDisposable.dispose()
|
||||
self.networkStateDisposable.dispose()
|
||||
self.checkCallDisposable?.dispose()
|
||||
@ -1031,8 +1007,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
participants.append(GroupCallParticipantsContext.Participant(
|
||||
peer: myPeer,
|
||||
ssrc: nil,
|
||||
videoJsonDescription: nil,
|
||||
presentationJsonDescription: nil,
|
||||
videoDescription: nil,
|
||||
presentationDescription: nil,
|
||||
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
||||
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
||||
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
||||
@ -1112,8 +1088,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
participants.append(GroupCallParticipantsContext.Participant(
|
||||
peer: myPeer,
|
||||
ssrc: nil,
|
||||
videoJsonDescription: nil,
|
||||
presentationJsonDescription: nil,
|
||||
videoDescription: nil,
|
||||
presentationDescription: nil,
|
||||
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
||||
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
||||
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
||||
@ -1276,8 +1252,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
participants.append(GroupCallParticipantsContext.Participant(
|
||||
peer: myPeer,
|
||||
ssrc: nil,
|
||||
videoJsonDescription: nil,
|
||||
presentationJsonDescription: nil,
|
||||
videoDescription: nil,
|
||||
presentationDescription: nil,
|
||||
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
||||
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
||||
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
||||
@ -1488,6 +1464,13 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
return
|
||||
}
|
||||
let clientParams = joinCallResult.jsonParams
|
||||
if let data = clientParams.data(using: .utf8), let dict = (try? JSONSerialization.jsonObject(with: data, options: [])) as? [String: Any] {
|
||||
if let video = dict["video"] as? [String: Any] {
|
||||
if let endpointId = video["endpoint"] as? String {
|
||||
strongSelf.currentLocalEndpointId = endpointId
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
strongSelf.ssrcMapping.removeAll()
|
||||
for participant in joinCallResult.state.participants {
|
||||
@ -1630,10 +1613,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
if let peerId = peerId {
|
||||
if case .local = ssrcKey {
|
||||
orignalMyLevelHasVoice = hasVoice
|
||||
if !strongSelf.isMutedValue.isEffectivelyMuted {
|
||||
myLevel = level
|
||||
myLevelHasVoice = hasVoice
|
||||
}
|
||||
myLevel = level
|
||||
myLevelHasVoice = hasVoice
|
||||
}
|
||||
result.append((peerId, ssrcValue, level, hasVoice))
|
||||
} else if ssrcValue != 0 {
|
||||
@ -1838,8 +1819,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
participants.append(GroupCallParticipantsContext.Participant(
|
||||
peer: myPeer,
|
||||
ssrc: nil,
|
||||
videoJsonDescription: nil,
|
||||
presentationJsonDescription: nil,
|
||||
videoDescription: nil,
|
||||
presentationDescription: nil,
|
||||
joinTimestamp: strongSelf.temporaryJoinTimestamp,
|
||||
raiseHandRating: strongSelf.temporaryRaiseHandRating,
|
||||
hasRaiseHand: strongSelf.temporaryHasRaiseHand,
|
||||
@ -2055,31 +2036,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
videoDescription: nil
|
||||
))
|
||||
}
|
||||
|
||||
if let videoDescription = participant.videoJsonDescription, !videoDescription.isEmpty {
|
||||
let videoSsrcs = participant.videoSsrcs
|
||||
if !videoSsrcs.intersection(remainingSsrcs).isEmpty {
|
||||
remainingSsrcs.subtract(videoSsrcs)
|
||||
|
||||
result.append(OngoingGroupCallContext.MediaChannelDescription(
|
||||
kind: .video,
|
||||
audioSsrc: audioSsrc,
|
||||
videoDescription: videoDescription
|
||||
))
|
||||
}
|
||||
}
|
||||
if let videoDescription = participant.presentationJsonDescription, !videoDescription.isEmpty {
|
||||
let videoSsrcs = participant.presentationSsrcs
|
||||
if !videoSsrcs.intersection(remainingSsrcs).isEmpty {
|
||||
remainingSsrcs.subtract(videoSsrcs)
|
||||
|
||||
result.append(OngoingGroupCallContext.MediaChannelDescription(
|
||||
kind: .video,
|
||||
audioSsrc: audioSsrc,
|
||||
videoDescription: videoDescription
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2242,6 +2198,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
strongSelf.reconnectedAsEventsPipe.putNext(myPeer)
|
||||
strongSelf.switchToTemporaryScheduledParticipantsContext()
|
||||
} else {
|
||||
strongSelf.disableVideo()
|
||||
strongSelf.isMutedValue = .muted(isPushToTalkActive: false)
|
||||
strongSelf.isMutedPromise.set(strongSelf.isMutedValue)
|
||||
|
||||
strongSelf.reconnectingAsPeer = myPeer
|
||||
|
||||
if let participantsContext = strongSelf.participantsContext, let immediateState = participantsContext.immediateState {
|
||||
@ -2429,28 +2389,32 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
self.participantsContext?.lowerHand()
|
||||
}
|
||||
|
||||
public func makeOutgoingVideoView(completion: @escaping (PresentationCallVideoView?) -> Void) {
|
||||
public func makeOutgoingVideoView(requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void) {
|
||||
if self.videoCapturer == nil {
|
||||
let videoCapturer = OngoingCallVideoCapturer()
|
||||
self.videoCapturer = videoCapturer
|
||||
}
|
||||
|
||||
self.videoCapturer?.makeOutgoingVideoView(completion: { view in
|
||||
if let view = view {
|
||||
let setOnFirstFrameReceived = view.setOnFirstFrameReceived
|
||||
let setOnOrientationUpdated = view.setOnOrientationUpdated
|
||||
let setOnIsMirroredUpdated = view.setOnIsMirroredUpdated
|
||||
let updateIsEnabled = view.updateIsEnabled
|
||||
completion(PresentationCallVideoView(
|
||||
holder: view,
|
||||
view: view.view,
|
||||
|
||||
guard let videoCapturer = self.videoCapturer else {
|
||||
completion(nil, nil)
|
||||
return
|
||||
}
|
||||
videoCapturer.makeOutgoingVideoView(requestClone: requestClone, completion: { mainView, cloneView in
|
||||
if let mainView = mainView {
|
||||
let setOnFirstFrameReceived = mainView.setOnFirstFrameReceived
|
||||
let setOnOrientationUpdated = mainView.setOnOrientationUpdated
|
||||
let setOnIsMirroredUpdated = mainView.setOnIsMirroredUpdated
|
||||
let updateIsEnabled = mainView.updateIsEnabled
|
||||
let mainVideoView = PresentationCallVideoView(
|
||||
holder: mainView,
|
||||
view: mainView.view,
|
||||
setOnFirstFrameReceived: { f in
|
||||
setOnFirstFrameReceived(f)
|
||||
},
|
||||
getOrientation: { [weak view] in
|
||||
if let view = view {
|
||||
getOrientation: { [weak mainView] in
|
||||
if let mainView = mainView {
|
||||
let mappedValue: PresentationCallVideoView.Orientation
|
||||
switch view.getOrientation() {
|
||||
switch mainView.getOrientation() {
|
||||
case .rotation0:
|
||||
mappedValue = .rotation0
|
||||
case .rotation90:
|
||||
@ -2465,9 +2429,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
return .rotation0
|
||||
}
|
||||
},
|
||||
getAspect: { [weak view] in
|
||||
if let view = view {
|
||||
return view.getAspect()
|
||||
getAspect: { [weak mainView] in
|
||||
if let mainView = mainView {
|
||||
return mainView.getAspect()
|
||||
} else {
|
||||
return 0.0
|
||||
}
|
||||
@ -2496,9 +2460,73 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
updateIsEnabled: { value in
|
||||
updateIsEnabled(value)
|
||||
}
|
||||
))
|
||||
)
|
||||
var cloneVideoView: PresentationCallVideoView?
|
||||
if let cloneView = cloneView {
|
||||
let setOnFirstFrameReceived = cloneView.setOnFirstFrameReceived
|
||||
let setOnOrientationUpdated = cloneView.setOnOrientationUpdated
|
||||
let setOnIsMirroredUpdated = cloneView.setOnIsMirroredUpdated
|
||||
let updateIsEnabled = cloneView.updateIsEnabled
|
||||
cloneVideoView = PresentationCallVideoView(
|
||||
holder: cloneView,
|
||||
view: cloneView.view,
|
||||
setOnFirstFrameReceived: { f in
|
||||
setOnFirstFrameReceived(f)
|
||||
},
|
||||
getOrientation: { [weak cloneView] in
|
||||
if let cloneView = cloneView {
|
||||
let mappedValue: PresentationCallVideoView.Orientation
|
||||
switch cloneView.getOrientation() {
|
||||
case .rotation0:
|
||||
mappedValue = .rotation0
|
||||
case .rotation90:
|
||||
mappedValue = .rotation90
|
||||
case .rotation180:
|
||||
mappedValue = .rotation180
|
||||
case .rotation270:
|
||||
mappedValue = .rotation270
|
||||
}
|
||||
return mappedValue
|
||||
} else {
|
||||
return .rotation0
|
||||
}
|
||||
},
|
||||
getAspect: { [weak cloneView] in
|
||||
if let cloneView = cloneView {
|
||||
return cloneView.getAspect()
|
||||
} else {
|
||||
return 0.0
|
||||
}
|
||||
},
|
||||
setOnOrientationUpdated: { f in
|
||||
setOnOrientationUpdated { value, aspect in
|
||||
let mappedValue: PresentationCallVideoView.Orientation
|
||||
switch value {
|
||||
case .rotation0:
|
||||
mappedValue = .rotation0
|
||||
case .rotation90:
|
||||
mappedValue = .rotation90
|
||||
case .rotation180:
|
||||
mappedValue = .rotation180
|
||||
case .rotation270:
|
||||
mappedValue = .rotation270
|
||||
}
|
||||
f?(mappedValue, aspect)
|
||||
}
|
||||
},
|
||||
setOnIsMirroredUpdated: { f in
|
||||
setOnIsMirroredUpdated { value in
|
||||
f?(value)
|
||||
}
|
||||
},
|
||||
updateIsEnabled: { value in
|
||||
updateIsEnabled(value)
|
||||
}
|
||||
)
|
||||
}
|
||||
completion(mainVideoView, cloneVideoView)
|
||||
} else {
|
||||
completion(nil)
|
||||
completion(nil, nil)
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -2511,8 +2539,18 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
self.hasVideo = true
|
||||
if let videoCapturer = self.videoCapturer {
|
||||
self.genericCallContext?.requestVideo(videoCapturer)
|
||||
self.isVideoMuted = false
|
||||
self.isVideoMutedDisposable.set((videoCapturer.isActive
|
||||
|> distinctUntilChanged
|
||||
|> deliverOnMainQueue).start(next: { [weak self] value in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
strongSelf.isVideoMuted = !value
|
||||
strongSelf.updateLocalVideoState()
|
||||
}))
|
||||
|
||||
self.participantsContext?.updateVideoState(peerId: self.joinAsPeerId, isVideoMuted: false)
|
||||
self.updateLocalVideoState()
|
||||
}
|
||||
}
|
||||
|
||||
@ -2520,11 +2558,17 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
self.hasVideo = false
|
||||
if let _ = self.videoCapturer {
|
||||
self.videoCapturer = nil
|
||||
self.isVideoMutedDisposable.set(nil)
|
||||
self.genericCallContext?.disableVideo()
|
||||
self.isVideoMuted = true
|
||||
|
||||
self.participantsContext?.updateVideoState(peerId: self.joinAsPeerId, isVideoMuted: true)
|
||||
self.updateLocalVideoState()
|
||||
}
|
||||
}
|
||||
|
||||
private func updateLocalVideoState() {
|
||||
self.participantsContext?.updateVideoState(peerId: self.joinAsPeerId, isVideoMuted: self.videoCapturer == nil, isVideoPaused: self.isVideoMuted)
|
||||
}
|
||||
|
||||
public func switchVideoCamera() {
|
||||
self.useFrontCamera = !self.useFrontCamera
|
||||
@ -2616,19 +2660,32 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
|
||||
public func setRequestedVideoList(items: [PresentationGroupCallRequestedVideo]) {
|
||||
self.genericCallContext?.setRequestedVideoChannels(items.compactMap { item -> OngoingGroupCallContext.VideoChannel in
|
||||
let mappedQuality: OngoingGroupCallContext.VideoChannel.Quality
|
||||
switch item.quality {
|
||||
let mappedMinQuality: OngoingGroupCallContext.VideoChannel.Quality
|
||||
let mappedMaxQuality: OngoingGroupCallContext.VideoChannel.Quality
|
||||
switch item.minQuality {
|
||||
case .thumbnail:
|
||||
mappedQuality = .thumbnail
|
||||
mappedMinQuality = .thumbnail
|
||||
case .medium:
|
||||
mappedQuality = .medium
|
||||
mappedMinQuality = .medium
|
||||
case .full:
|
||||
mappedQuality = .full
|
||||
mappedMinQuality = .full
|
||||
}
|
||||
switch item.maxQuality {
|
||||
case .thumbnail:
|
||||
mappedMaxQuality = .thumbnail
|
||||
case .medium:
|
||||
mappedMaxQuality = .medium
|
||||
case .full:
|
||||
mappedMaxQuality = .full
|
||||
}
|
||||
return OngoingGroupCallContext.VideoChannel(
|
||||
audioSsrc: item.audioSsrc,
|
||||
videoDescription: item.videoInformation,
|
||||
quality: mappedQuality
|
||||
endpointId: item.endpointId,
|
||||
ssrcGroups: item.ssrcGroups.map { group in
|
||||
return OngoingGroupCallContext.VideoChannel.SsrcGroup(semantics: group.semantics, ssrcs: group.ssrcs)
|
||||
},
|
||||
minQuality: mappedMinQuality,
|
||||
maxQuality: mappedMaxQuality
|
||||
)
|
||||
})
|
||||
}
|
||||
@ -2931,6 +2988,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
|
||||
public func makeIncomingVideoView(endpointId: String, requestClone: Bool, completion: @escaping (PresentationCallVideoView?, PresentationCallVideoView?) -> Void) {
|
||||
if endpointId == self.currentLocalEndpointId {
|
||||
self.makeOutgoingVideoView(requestClone: requestClone, completion: completion)
|
||||
return
|
||||
}
|
||||
|
||||
self.genericCallContext?.makeIncomingVideoView(endpointId: endpointId, requestClone: requestClone, completion: { mainView, cloneView in
|
||||
if let mainView = mainView {
|
||||
let setOnFirstFrameReceived = mainView.setOnFirstFrameReceived
|
||||
|
@ -1374,12 +1374,14 @@ final class BlobView: UIView {
|
||||
|
||||
var level: CGFloat = 0 {
|
||||
didSet {
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
let lv = self.minScale + (self.maxScale - self.minScale) * self.level
|
||||
self.shapeLayer.transform = CATransform3DMakeScale(lv, lv, 1)
|
||||
self.scaleUpdated?(self.level)
|
||||
CATransaction.commit()
|
||||
if abs(self.level - oldValue) > 0.01 {
|
||||
CATransaction.begin()
|
||||
CATransaction.setDisableActions(true)
|
||||
let lv = self.minScale + (self.maxScale - self.minScale) * self.level
|
||||
self.shapeLayer.transform = CATransform3DMakeScale(lv, lv, 1)
|
||||
self.scaleUpdated?(self.level)
|
||||
CATransaction.commit()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -426,11 +426,17 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
|
||||
self.containerLayout = (layout, navigationBarHeight)
|
||||
|
||||
let isLandscape: Bool
|
||||
if layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass {
|
||||
if layout.size.width > layout.size.height {
|
||||
isLandscape = true
|
||||
} else {
|
||||
isLandscape = false
|
||||
}
|
||||
let isTablet: Bool
|
||||
if case .regular = layout.metrics.widthClass {
|
||||
isTablet = true
|
||||
} else {
|
||||
isTablet = false
|
||||
}
|
||||
|
||||
var insets = layout.insets(options: [.statusBar, .input])
|
||||
let cleanInsets = layout.insets(options: [.statusBar])
|
||||
@ -440,28 +446,44 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
|
||||
if let _ = self.broadcastPickerView {
|
||||
buttonOffset *= 2.0
|
||||
}
|
||||
let bottomInset: CGFloat = 10.0 + cleanInsets.bottom
|
||||
let bottomInset: CGFloat = isTablet ? 31.0 : 10.0 + cleanInsets.bottom
|
||||
let titleHeight: CGFloat = 54.0
|
||||
var contentHeight = titleHeight + bottomInset + 52.0 + 17.0
|
||||
let innerContentHeight: CGFloat = layout.size.height - contentHeight - 160.0
|
||||
var width = horizontalContainerFillingSizeForLayout(layout: layout, sideInset: layout.safeInsets.left)
|
||||
if isLandscape {
|
||||
contentHeight = layout.size.height
|
||||
width = layout.size.width
|
||||
if isTablet {
|
||||
width = 870.0
|
||||
contentHeight = 690.0
|
||||
} else {
|
||||
contentHeight = layout.size.height
|
||||
width = layout.size.width
|
||||
}
|
||||
} else {
|
||||
contentHeight = titleHeight + bottomInset + 52.0 + 17.0 + innerContentHeight + buttonOffset
|
||||
if isTablet {
|
||||
width = 600.0
|
||||
contentHeight = 960.0
|
||||
} else {
|
||||
contentHeight = titleHeight + bottomInset + 52.0 + 17.0 + innerContentHeight + buttonOffset
|
||||
}
|
||||
}
|
||||
|
||||
let previewInset: CGFloat = 16.0
|
||||
let sideInset = floor((layout.size.width - width) / 2.0)
|
||||
let contentContainerFrame = CGRect(origin: CGPoint(x: sideInset, y: layout.size.height - contentHeight), size: CGSize(width: width, height: contentHeight))
|
||||
let contentFrame = contentContainerFrame
|
||||
|
||||
var backgroundFrame = CGRect(origin: CGPoint(x: contentFrame.minX, y: contentFrame.minY), size: CGSize(width: contentFrame.width, height: contentFrame.height + 2000.0))
|
||||
let contentFrame: CGRect
|
||||
if isTablet {
|
||||
contentFrame = CGRect(origin: CGPoint(x: sideInset, y: floor((layout.size.height - contentHeight) / 2.0)), size: CGSize(width: width, height: contentHeight))
|
||||
} else {
|
||||
contentFrame = CGRect(origin: CGPoint(x: sideInset, y: layout.size.height - contentHeight), size: CGSize(width: width, height: contentHeight))
|
||||
}
|
||||
var backgroundFrame = CGRect(origin: CGPoint(x: contentFrame.minX, y: contentFrame.minY), size: CGSize(width: contentFrame.width, height: contentFrame.height))
|
||||
if !isTablet {
|
||||
backgroundFrame.size.height += 2000.0
|
||||
}
|
||||
if backgroundFrame.minY < contentFrame.minY {
|
||||
backgroundFrame.origin.y = contentFrame.minY
|
||||
}
|
||||
transition.updateAlpha(node: self.titleNode, alpha: isLandscape ? 0.0 : 1.0)
|
||||
transition.updateAlpha(node: self.titleNode, alpha: isLandscape && !isTablet ? 0.0 : 1.0)
|
||||
transition.updateFrame(node: self.backgroundNode, frame: backgroundFrame)
|
||||
transition.updateFrame(node: self.effectNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
|
||||
transition.updateFrame(node: self.contentBackgroundNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
|
||||
@ -472,14 +494,24 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
|
||||
let titleFrame = CGRect(origin: CGPoint(x: floor((contentFrame.width - titleSize.width) / 2.0), y: 18.0), size: titleSize)
|
||||
transition.updateFrame(node: self.titleNode, frame: titleFrame)
|
||||
|
||||
let previewSize: CGSize
|
||||
let previewFrame: CGRect
|
||||
var previewSize: CGSize
|
||||
var previewFrame: CGRect
|
||||
if isLandscape {
|
||||
let previewHeight = contentHeight - layout.intrinsicInsets.bottom - 52.0 - 10.0
|
||||
let previewHeight = contentHeight - 21.0 - 52.0 - 10.0
|
||||
previewSize = CGSize(width: min(contentFrame.width - layout.safeInsets.left - layout.safeInsets.right, previewHeight * 1.7778), height: previewHeight)
|
||||
if isTablet {
|
||||
previewSize.width -= previewInset * 2.0
|
||||
previewSize.height -= 46.0
|
||||
}
|
||||
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((contentFrame.width - previewSize.width) / 2.0), y: 0.0), size: previewSize)
|
||||
if isTablet {
|
||||
previewFrame.origin.y += 56.0
|
||||
}
|
||||
} else {
|
||||
previewSize = CGSize(width: contentFrame.width - previewInset * 2.0, height: contentHeight - 243.0 - bottomInset + (120.0 - buttonOffset))
|
||||
if isTablet {
|
||||
previewSize.height += 17.0
|
||||
}
|
||||
previewFrame = CGRect(origin: CGPoint(x: previewInset, y: 56.0), size: previewSize)
|
||||
}
|
||||
transition.updateFrame(node: self.previewContainerNode, frame: previewFrame)
|
||||
@ -508,40 +540,49 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
|
||||
} else {
|
||||
self.screenButton.isHidden = true
|
||||
}
|
||||
let buttonInset: CGFloat = 6.0
|
||||
|
||||
let buttonWidth = floorToScreenPixels((contentFrame.width - layout.safeInsets.left - layout.safeInsets.right - CGFloat(buttonsCount + 1) * buttonInset) / CGFloat(buttonsCount))
|
||||
let buttonInset: CGFloat = 6.0
|
||||
var leftButtonInset = buttonInset
|
||||
let availableWidth: CGFloat
|
||||
if isTablet {
|
||||
availableWidth = contentFrame.width - layout.safeInsets.left - layout.safeInsets.right - previewInset * 2.0
|
||||
leftButtonInset += previewInset
|
||||
} else {
|
||||
availableWidth = contentFrame.width - layout.safeInsets.left - layout.safeInsets.right
|
||||
}
|
||||
let buttonWidth = floorToScreenPixels((availableWidth - CGFloat(buttonsCount + 1) * buttonInset) / CGFloat(buttonsCount))
|
||||
|
||||
let cameraButtonHeight = self.cameraButton.updateLayout(width: buttonWidth, transition: transition)
|
||||
let screenButtonHeight = self.screenButton.updateLayout(width: buttonWidth, transition: transition)
|
||||
let cancelButtonHeight = self.cancelButton.updateLayout(width: buttonWidth, transition: transition)
|
||||
|
||||
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: layout.safeInsets.left + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cancelButtonHeight))
|
||||
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: layout.safeInsets.left + leftButtonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cancelButtonHeight))
|
||||
if let broadcastPickerView = self.broadcastPickerView {
|
||||
transition.updateFrame(node: self.screenButton, frame: CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: screenButtonHeight))
|
||||
broadcastPickerView.frame = CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: screenButtonHeight)
|
||||
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cameraButtonHeight))
|
||||
transition.updateFrame(node: self.screenButton, frame: CGRect(x: layout.safeInsets.left + leftButtonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: screenButtonHeight))
|
||||
broadcastPickerView.frame = CGRect(x: layout.safeInsets.left + leftButtonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: screenButtonHeight)
|
||||
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: layout.safeInsets.left + leftButtonInset + buttonWidth + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cameraButtonHeight))
|
||||
} else {
|
||||
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cameraButtonHeight))
|
||||
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: layout.safeInsets.left + leftButtonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cameraButtonHeight))
|
||||
}
|
||||
|
||||
} else {
|
||||
let bottomInset = isTablet ? 21.0 : insets.bottom + 16.0
|
||||
let buttonInset: CGFloat = 16.0
|
||||
let cameraButtonHeight = self.cameraButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
|
||||
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - insets.bottom - 16.0 - buttonOffset, width: contentFrame.width, height: cameraButtonHeight))
|
||||
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - bottomInset - buttonOffset, width: contentFrame.width, height: cameraButtonHeight))
|
||||
|
||||
let screenButtonHeight = self.screenButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
|
||||
transition.updateFrame(node: self.screenButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - insets.bottom - 16.0, width: contentFrame.width, height: screenButtonHeight))
|
||||
transition.updateFrame(node: self.screenButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - bottomInset, width: contentFrame.width, height: screenButtonHeight))
|
||||
if let broadcastPickerView = self.broadcastPickerView {
|
||||
broadcastPickerView.frame = CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - insets.bottom - 16.0, width: contentFrame.width + 1000.0, height: screenButtonHeight)
|
||||
broadcastPickerView.frame = CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - 8.0 - screenButtonHeight - bottomInset, width: contentFrame.width + 1000.0, height: screenButtonHeight)
|
||||
} else {
|
||||
self.screenButton.isHidden = true
|
||||
}
|
||||
|
||||
let cancelButtonHeight = self.cancelButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
|
||||
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: buttonInset, y: contentHeight - cancelButtonHeight - insets.bottom - 16.0, width: contentFrame.width, height: cancelButtonHeight))
|
||||
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: buttonInset, y: contentHeight - cancelButtonHeight - bottomInset, width: contentFrame.width, height: cancelButtonHeight))
|
||||
}
|
||||
|
||||
transition.updateFrame(node: self.contentContainerNode, frame: contentContainerFrame)
|
||||
transition.updateFrame(node: self.contentContainerNode, frame: contentFrame)
|
||||
}
|
||||
}
|
||||
|
@ -68,12 +68,14 @@ final class VoiceChatFullscreenParticipantItem: ListViewItem {
|
||||
let context: AccountContext
|
||||
let peer: Peer
|
||||
let videoEndpointId: String?
|
||||
let isPaused: Bool
|
||||
let icon: Icon
|
||||
let text: VoiceChatParticipantItem.ParticipantText
|
||||
let textColor: Color
|
||||
let color: Color
|
||||
let isLandscape: Bool
|
||||
let active: Bool
|
||||
let showVideoWhenActive: Bool
|
||||
let getAudioLevel: (() -> Signal<Float, NoError>)?
|
||||
let getVideo: () -> GroupVideoNode?
|
||||
let action: ((ASDisplayNode?) -> Void)?
|
||||
@ -82,18 +84,20 @@ final class VoiceChatFullscreenParticipantItem: ListViewItem {
|
||||
|
||||
public let selectable: Bool = true
|
||||
|
||||
public init(presentationData: ItemListPresentationData, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, videoEndpointId: String?, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, textColor: Color, color: Color, isLandscape: Bool, active: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, getVideo: @escaping () -> GroupVideoNode?, action: ((ASDisplayNode?) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) {
|
||||
public init(presentationData: ItemListPresentationData, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, videoEndpointId: String?, isPaused: Bool, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, textColor: Color, color: Color, isLandscape: Bool, active: Bool, showVideoWhenActive: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, getVideo: @escaping () -> GroupVideoNode?, action: ((ASDisplayNode?) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) {
|
||||
self.presentationData = presentationData
|
||||
self.nameDisplayOrder = nameDisplayOrder
|
||||
self.context = context
|
||||
self.peer = peer
|
||||
self.videoEndpointId = videoEndpointId
|
||||
self.isPaused = isPaused
|
||||
self.icon = icon
|
||||
self.text = text
|
||||
self.textColor = textColor
|
||||
self.color = color
|
||||
self.isLandscape = isLandscape
|
||||
self.active = active
|
||||
self.showVideoWhenActive = showVideoWhenActive
|
||||
self.getAudioLevel = getAudioLevel
|
||||
self.getVideo = getVideo
|
||||
self.action = action
|
||||
@ -269,9 +273,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
gesture.cancel()
|
||||
return
|
||||
}
|
||||
if item.peer.smallProfileImage != nil {
|
||||
contextAction(strongSelf.contextSourceNode, gesture)
|
||||
}
|
||||
contextAction(strongSelf.contextSourceNode, gesture)
|
||||
}
|
||||
self.contextSourceNode.willUpdateIsExtractedToContextPreview = { [weak self] isExtracted, transition in
|
||||
guard let strongSelf = self, let _ = strongSelf.item else {
|
||||
@ -289,161 +291,79 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
|
||||
override func selected() {
|
||||
super.selected()
|
||||
if self.animatingSelection {
|
||||
return
|
||||
}
|
||||
self.layoutParams?.0.action?(self.contextSourceNode)
|
||||
}
|
||||
|
||||
func animateTransitionIn(from sourceNode: ASDisplayNode?, containerNode: ASDisplayNode, transition: ContainedViewLayoutTransition, animate: Bool = true) {
|
||||
func transitionIn(from sourceNode: ASDisplayNode?) {
|
||||
guard let item = self.item else {
|
||||
return
|
||||
}
|
||||
var duration: Double = 0.2
|
||||
var timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue
|
||||
if case let .animated(transitionDuration, curve) = transition {
|
||||
duration = transitionDuration
|
||||
timingFunction = curve.timingFunction
|
||||
let active = item.active && !item.showVideoWhenActive
|
||||
|
||||
var videoNode: GroupVideoNode?
|
||||
if let sourceNode = sourceNode as? VoiceChatTileItemNode {
|
||||
if let sourceVideoNode = sourceNode.videoNode {
|
||||
sourceNode.videoNode = nil
|
||||
videoNode = sourceVideoNode
|
||||
}
|
||||
}
|
||||
|
||||
let initialAnimate = animate
|
||||
if let sourceNode = sourceNode as? VoiceChatTileItemNode {
|
||||
var startContainerPosition = sourceNode.view.convert(sourceNode.bounds, to: containerNode.view).center
|
||||
var animate = initialAnimate
|
||||
if startContainerPosition.y < -tileHeight || startContainerPosition.y > containerNode.frame.height + tileHeight {
|
||||
animate = false
|
||||
if videoNode == nil {
|
||||
videoNode = item.getVideo()
|
||||
}
|
||||
|
||||
if let videoNode = videoNode {
|
||||
if active {
|
||||
self.avatarNode.alpha = 1.0
|
||||
videoNode.alpha = 0.0
|
||||
} else {
|
||||
self.avatarNode.alpha = 0.0
|
||||
videoNode.alpha = 1.0
|
||||
}
|
||||
self.videoNode = videoNode
|
||||
self.videoContainerNode.insertSubnode(videoNode, at: 0)
|
||||
|
||||
if let videoNode = sourceNode.videoNode {
|
||||
if item.active {
|
||||
self.avatarNode.alpha = 1.0
|
||||
videoNode.alpha = 0.0
|
||||
startContainerPosition = startContainerPosition.offsetBy(dx: 0.0, dy: 9.0)
|
||||
} else {
|
||||
self.avatarNode.alpha = 0.0
|
||||
}
|
||||
|
||||
sourceNode.videoNode = nil
|
||||
self.videoNode = videoNode
|
||||
self.videoContainerNode.insertSubnode(videoNode, at: 0)
|
||||
|
||||
if animate {
|
||||
videoNode.updateLayout(size: videoSize, layoutMode: .fillOrFitToSquare, transition: transition)
|
||||
|
||||
let scale = sourceNode.bounds.width / videoSize.width
|
||||
self.videoContainerNode.layer.animateScale(from: sourceNode.bounds.width / videoSize.width, to: tileSize.width / videoSize.width, duration: duration, timingFunction: timingFunction)
|
||||
self.videoContainerNode.layer.animate(from: backgroundCornerRadius * (1.0 / scale) as NSNumber, to: videoCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: timingFunction, duration: duration, removeOnCompletion: false, completion: { _ in
|
||||
})
|
||||
|
||||
self.videoFadeNode.alpha = 1.0
|
||||
self.videoFadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
|
||||
} else {
|
||||
videoNode.updateLayout(size: videoSize, layoutMode: .fillOrFitToSquare, transition: .immediate)
|
||||
self.videoFadeNode.alpha = 1.0
|
||||
}
|
||||
}
|
||||
|
||||
if animate {
|
||||
let initialPosition = self.contextSourceNode.position
|
||||
let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center
|
||||
|
||||
self.contextSourceNode.position = targetContainerPosition
|
||||
containerNode.addSubnode(self.contextSourceNode)
|
||||
|
||||
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self] _ in
|
||||
if let strongSelf = self {
|
||||
strongSelf.contextSourceNode.position = initialPosition
|
||||
strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode)
|
||||
}
|
||||
})
|
||||
|
||||
if item.active {
|
||||
self.highlightNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||
self.highlightNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction)
|
||||
}
|
||||
|
||||
self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
} else if !initialAnimate {
|
||||
if transition.isAnimated {
|
||||
self.contextSourceNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
self.contextSourceNode.layer.animateScale(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
}
|
||||
}
|
||||
} else if let sourceNode = sourceNode as? VoiceChatParticipantItemNode, let _ = sourceNode.item {
|
||||
var startContainerPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center
|
||||
var animate = true
|
||||
if startContainerPosition.y < -tileHeight || startContainerPosition.y > containerNode.frame.height + tileHeight {
|
||||
animate = false
|
||||
}
|
||||
startContainerPosition = startContainerPosition.offsetBy(dx: 0.0, dy: 9.0)
|
||||
|
||||
if animate {
|
||||
sourceNode.avatarNode.alpha = 0.0
|
||||
sourceNode.audioLevelView?.alpha = 0.0
|
||||
|
||||
let initialPosition = self.contextSourceNode.position
|
||||
let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center
|
||||
|
||||
self.contextSourceNode.position = targetContainerPosition
|
||||
containerNode.addSubnode(self.contextSourceNode)
|
||||
|
||||
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self, weak sourceNode] _ in
|
||||
if let strongSelf = self, let sourceNode = sourceNode {
|
||||
sourceNode.avatarNode.alpha = 1.0
|
||||
sourceNode.audioLevelView?.alpha = 1.0
|
||||
strongSelf.contextSourceNode.position = initialPosition
|
||||
strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode)
|
||||
}
|
||||
})
|
||||
|
||||
if item.active {
|
||||
self.highlightNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||
self.highlightNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2, timingFunction: timingFunction)
|
||||
}
|
||||
|
||||
self.avatarNode.layer.animateScale(from: 0.8, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
|
||||
self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
}
|
||||
} else {
|
||||
if transition.isAnimated {
|
||||
self.contextSourceNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
self.contextSourceNode.layer.animateScale(from: 0.0, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
}
|
||||
videoNode.updateLayout(size: videoSize, layoutMode: .fillOrFitToSquare, transition: .immediate)
|
||||
videoNode.frame = CGRect(origin: CGPoint(), size: videoSize)
|
||||
}
|
||||
}
|
||||
|
||||
private func updateIsExtracted(_ isExtracted: Bool, transition: ContainedViewLayoutTransition) {
|
||||
guard self.isExtracted != isExtracted, let extractedRect = self.extractedRect, let nonExtractedRect = self.nonExtractedRect, let item = self.item else {
|
||||
return
|
||||
}
|
||||
self.isExtracted = isExtracted
|
||||
|
||||
if isExtracted {
|
||||
let profileNode = VoiceChatPeerProfileNode(context: item.context, size: extractedRect.size, peer: item.peer, text: item.text, customNode: self.videoContainerNode, additionalEntry: .single(nil), requestDismiss: { [weak self] in
|
||||
self?.contextSourceNode.requestDismiss?()
|
||||
})
|
||||
profileNode.frame = CGRect(origin: CGPoint(), size: extractedRect.size)
|
||||
self.profileNode = profileNode
|
||||
self.contextSourceNode.contentNode.addSubnode(profileNode)
|
||||
if item.peer.smallProfileImage != nil {
|
||||
if isExtracted {
|
||||
let profileNode = VoiceChatPeerProfileNode(context: item.context, size: extractedRect.size, peer: item.peer, text: item.text, customNode: self.videoContainerNode, additionalEntry: .single(nil), requestDismiss: { [weak self] in
|
||||
self?.contextSourceNode.requestDismiss?()
|
||||
})
|
||||
profileNode.frame = CGRect(origin: CGPoint(), size: extractedRect.size)
|
||||
self.profileNode = profileNode
|
||||
self.contextSourceNode.contentNode.addSubnode(profileNode)
|
||||
|
||||
profileNode.animateIn(from: self, targetRect: extractedRect, transition: transition)
|
||||
|
||||
self.contextSourceNode.contentNode.customHitTest = { [weak self] point in
|
||||
if let strongSelf = self, let profileNode = strongSelf.profileNode {
|
||||
if profileNode.avatarListWrapperNode.frame.contains(point) {
|
||||
return profileNode.avatarListNode.view
|
||||
profileNode.animateIn(from: self, targetRect: extractedRect, transition: transition)
|
||||
|
||||
self.contextSourceNode.contentNode.customHitTest = { [weak self] point in
|
||||
if let strongSelf = self, let profileNode = strongSelf.profileNode {
|
||||
if profileNode.avatarListWrapperNode.frame.contains(point) {
|
||||
return profileNode.avatarListNode.view
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
self.highlightNode.isHidden = true
|
||||
} else if let profileNode = self.profileNode {
|
||||
self.profileNode = nil
|
||||
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition)
|
||||
|
||||
self.contextSourceNode.contentNode.customHitTest = nil
|
||||
self.highlightNode.isHidden = !item.active
|
||||
}
|
||||
} else if let profileNode = self.profileNode {
|
||||
self.profileNode = nil
|
||||
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition)
|
||||
|
||||
self.contextSourceNode.contentNode.customHitTest = nil
|
||||
}
|
||||
}
|
||||
|
||||
@ -452,14 +372,19 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
let makeStatusLayout = self.statusNode.asyncLayout()
|
||||
|
||||
let currentItem = self.layoutParams?.0
|
||||
let hasVideo = self.videoNode != nil
|
||||
var hasVideo = self.videoNode != nil
|
||||
|
||||
return { item, params, first, last in
|
||||
let titleFont = Font.semibold(13.0)
|
||||
var titleAttributedString: NSAttributedString?
|
||||
|
||||
if !hasVideo && item.videoEndpointId != nil {
|
||||
hasVideo = true
|
||||
}
|
||||
let active = item.active && !item.showVideoWhenActive
|
||||
|
||||
var titleColor = item.presentationData.theme.list.itemPrimaryTextColor
|
||||
if !hasVideo || item.active {
|
||||
if !hasVideo || active {
|
||||
switch item.textColor {
|
||||
case .generic:
|
||||
titleColor = item.presentationData.theme.list.itemPrimaryTextColor
|
||||
@ -570,13 +495,23 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
apperanceTransition.updateAlpha(layer: audioLevelView.layer, alpha: 1.0)
|
||||
}
|
||||
} else {
|
||||
currentVideoNode.removeFromSupernode()
|
||||
if currentItem?.peer.id == item.peer.id {
|
||||
currentVideoNode.layer.animateScale(from: 1.0, to: 0.0, duration: appearanceDuration, completion: { [weak self, weak currentVideoNode] _ in
|
||||
if currentVideoNode !== self?.videoNode {
|
||||
currentVideoNode?.removeFromSupernode()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
currentVideoNode.removeFromSupernode()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let videoNodeUpdated = strongSelf.videoNode !== videoNode
|
||||
strongSelf.videoNode = videoNode
|
||||
|
||||
videoNode?.updateIsBlurred(isBlurred: item.isPaused, light: true)
|
||||
|
||||
let nonExtractedRect: CGRect
|
||||
let avatarFrame: CGRect
|
||||
let titleFrame: CGRect
|
||||
@ -594,7 +529,10 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
titleFrame = CGRect(origin: CGPoint(x: 8.0, y: 63.0), size: titleLayout.size)
|
||||
|
||||
let extractedWidth = availableWidth
|
||||
let extractedRect = CGRect(x: 0.0, y: 0.0, width: extractedWidth, height: extractedWidth + statusLayout.height + 39.0)
|
||||
var extractedRect = CGRect(x: 0.0, y: 0.0, width: extractedWidth, height: extractedWidth + statusLayout.height + 39.0)
|
||||
if item.peer.smallProfileImage == nil {
|
||||
extractedRect = nonExtractedRect
|
||||
}
|
||||
strongSelf.extractedRect = extractedRect
|
||||
strongSelf.nonExtractedRect = nonExtractedRect
|
||||
|
||||
@ -706,7 +644,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
strongSelf.audioLevelView = audioLevelView
|
||||
strongSelf.offsetContainerNode.view.insertSubview(audioLevelView, at: 0)
|
||||
|
||||
if let item = strongSelf.item, strongSelf.videoNode != nil && !item.active {
|
||||
if let item = strongSelf.item, strongSelf.videoNode != nil && !active {
|
||||
audioLevelView.alpha = 0.0
|
||||
}
|
||||
}
|
||||
@ -777,7 +715,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
nodeToAnimateIn = animationNode
|
||||
}
|
||||
var color = color
|
||||
if (hasVideo && !item.active) || color.rgb == 0x979797 {
|
||||
if (hasVideo && !active) || color.rgb == 0x979797 {
|
||||
color = UIColor(rgb: 0xffffff)
|
||||
}
|
||||
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: color), animated: true)
|
||||
@ -868,16 +806,16 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
strongSelf.videoContainerNode.position = CGPoint(x: tileSize.width / 2.0, y: tileSize.height / 2.0)
|
||||
strongSelf.videoContainerNode.cornerRadius = videoCornerRadius
|
||||
strongSelf.videoContainerNode.transform = CATransform3DMakeScale(videoContainerScale, videoContainerScale, 1.0)
|
||||
}
|
||||
|
||||
strongSelf.highlightNode.isHidden = !item.active
|
||||
strongSelf.highlightNode.isHidden = !item.active
|
||||
}
|
||||
|
||||
let canUpdateAvatarVisibility = !strongSelf.isExtracted && !strongSelf.animatingExtraction
|
||||
|
||||
if let videoNode = videoNode {
|
||||
if !strongSelf.isExtracted && !strongSelf.animatingExtraction {
|
||||
if currentItem != nil {
|
||||
if item.active {
|
||||
if active {
|
||||
if strongSelf.avatarNode.alpha.isZero {
|
||||
strongSelf.animatingSelection = true
|
||||
strongSelf.videoContainerNode.layer.animateScale(from: videoContainerScale, to: 0.001, duration: appearanceDuration)
|
||||
@ -913,7 +851,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if item.active {
|
||||
if active {
|
||||
videoNode.alpha = 0.0
|
||||
if canUpdateAvatarVisibility {
|
||||
strongSelf.avatarNode.alpha = 1.0
|
||||
@ -937,19 +875,23 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
}
|
||||
|
||||
if let _ = currentItem, videoNodeUpdated {
|
||||
if item.active {
|
||||
if active {
|
||||
if canUpdateAvatarVisibility {
|
||||
strongSelf.avatarNode.alpha = 1.0
|
||||
}
|
||||
videoNode.alpha = 0.0
|
||||
} else {
|
||||
strongSelf.animatingSelection = true
|
||||
let previousAvatarNodeAlpha = strongSelf.avatarNode.alpha
|
||||
strongSelf.avatarNode.alpha = 0.0
|
||||
strongSelf.avatarNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: appearanceDuration)
|
||||
videoNode.layer.animateScale(from: 0.01, to: 1.0, duration: appearanceDuration)
|
||||
strongSelf.avatarNode.layer.animateAlpha(from: previousAvatarNodeAlpha, to: 0.0, duration: appearanceDuration)
|
||||
videoNode.layer.animateScale(from: 0.01, to: 1.0, duration: appearanceDuration, completion: { [weak self] _ in
|
||||
self?.animatingSelection = false
|
||||
})
|
||||
videoNode.alpha = 1.0
|
||||
}
|
||||
} else {
|
||||
if item.active {
|
||||
if active {
|
||||
if canUpdateAvatarVisibility {
|
||||
strongSelf.avatarNode.alpha = 1.0
|
||||
}
|
||||
@ -968,7 +910,6 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
|
||||
strongSelf.raiseHandNode?.frame = CGRect(origin: CGPoint(), size: animationSize).insetBy(dx: -6.0, dy: -6.0).offsetBy(dx: -2.0, dy: 0.0)
|
||||
|
||||
strongSelf.actionButtonNode.transform = CATransform3DMakeScale(animationScale, animationScale, 1.0)
|
||||
// strongSelf.actionButtonNode.frame = animationFrame
|
||||
transition.updateFrame(node: strongSelf.actionButtonNode, frame: animationFrame)
|
||||
|
||||
strongSelf.updateIsHighlighted(transition: transition)
|
||||
|
@ -17,20 +17,6 @@ func optionsBackgroundImage(dark: Bool) -> UIImage? {
|
||||
})?.stretchableImage(withLeftCapWidth: 14, topCapHeight: 14)
|
||||
}
|
||||
|
||||
func optionsButtonImage(dark: Bool) -> UIImage? {
|
||||
return generateImage(CGSize(width: 28.0, height: 28.0), contextGenerator: { size, context in
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
|
||||
context.setFillColor(UIColor(rgb: dark ? 0x1c1c1e : 0x2c2c2e).cgColor)
|
||||
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
|
||||
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
context.fillEllipse(in: CGRect(x: 6.0, y: 12.0, width: 4.0, height: 4.0))
|
||||
context.fillEllipse(in: CGRect(x: 12.0, y: 12.0, width: 4.0, height: 4.0))
|
||||
context.fillEllipse(in: CGRect(x: 18.0, y: 12.0, width: 4.0, height: 4.0))
|
||||
})
|
||||
}
|
||||
|
||||
func optionsCircleImage(dark: Bool) -> UIImage? {
|
||||
return generateImage(CGSize(width: 28.0, height: 28.0), contextGenerator: { size, context in
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
@ -40,6 +26,28 @@ func optionsCircleImage(dark: Bool) -> UIImage? {
|
||||
})
|
||||
}
|
||||
|
||||
func panelButtonImage(dark: Bool) -> UIImage? {
|
||||
return generateImage(CGSize(width: 38.0, height: 28.0), contextGenerator: { size, context in
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
|
||||
context.addPath(UIBezierPath(roundedRect: CGRect(origin: CGPoint(), size: size), cornerRadius: 14.0).cgPath)
|
||||
context.setFillColor(UIColor(rgb: dark ? 0x1c1c1e : 0x2c2c2e).cgColor)
|
||||
context.fillPath()
|
||||
|
||||
context.setFillColor(UIColor.white.cgColor)
|
||||
|
||||
if let image = UIImage(bundleImageName: "Call/PanelIcon") {
|
||||
let imageSize = image.size
|
||||
let imageRect = CGRect(origin: CGPoint(), size: imageSize)
|
||||
context.saveGState()
|
||||
context.translateBy(x: 7.0, y: 2.0)
|
||||
context.clip(to: imageRect, mask: image.cgImage!)
|
||||
context.fill(imageRect)
|
||||
context.restoreGState()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func closeButtonImage(dark: Bool) -> UIImage? {
|
||||
return generateImage(CGSize(width: 28.0, height: 28.0), contextGenerator: { size, context in
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
@ -76,9 +84,12 @@ final class VoiceChatHeaderButton: HighlightableButtonNode {
|
||||
|
||||
var contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
|
||||
|
||||
init(context: AccountContext) {
|
||||
private let wide: Bool
|
||||
|
||||
init(context: AccountContext, wide: Bool = false) {
|
||||
self.context = context
|
||||
self.theme = context.sharedContext.currentPresentationData.with { $0 }.theme
|
||||
self.wide = wide
|
||||
|
||||
self.referenceNode = ContextReferenceContentNode()
|
||||
self.containerNode = ContextControllerSourceNode()
|
||||
@ -111,9 +122,9 @@ final class VoiceChatHeaderButton: HighlightableButtonNode {
|
||||
strongSelf.contextAction?(strongSelf.containerNode, gesture)
|
||||
}
|
||||
|
||||
self.iconNode.image = optionsButtonImage(dark: false)
|
||||
self.iconNode.image = optionsCircleImage(dark: false)
|
||||
|
||||
self.containerNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 28.0, height: 28.0))
|
||||
self.containerNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: wide ? 38.0 : 28.0, height: 28.0))
|
||||
self.referenceNode.frame = self.containerNode.bounds
|
||||
self.iconNode.frame = self.containerNode.bounds
|
||||
self.avatarNode.frame = self.containerNode.bounds
|
||||
@ -182,7 +193,7 @@ final class VoiceChatHeaderButton: HighlightableButtonNode {
|
||||
}
|
||||
|
||||
override func calculateSizeThatFits(_ constrainedSize: CGSize) -> CGSize {
|
||||
return CGSize(width: 28.0, height: 28.0)
|
||||
return CGSize(width: wide ? 38.0 : 28.0, height: 28.0)
|
||||
}
|
||||
|
||||
func onLayout() {
|
@ -29,7 +29,7 @@ private let destructiveColor: UIColor = UIColor(rgb: 0xff3b30)
|
||||
final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
private let context: AccountContext
|
||||
private let call: PresentationGroupCall
|
||||
private var currentPeer: (PeerId, String?)?
|
||||
private var currentPeer: (PeerId, String?, Bool, Bool, Bool)?
|
||||
private var currentPeerEntry: VoiceChatPeerEntry?
|
||||
|
||||
var callState: PresentationGroupCallState?
|
||||
@ -51,23 +51,29 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
private let speakingPeerDisposable = MetaDisposable()
|
||||
private let speakingAudioLevelDisposable = MetaDisposable()
|
||||
private var backdropAvatarNode: ImageNode
|
||||
private var backdropEffectView: UIVisualEffectView?
|
||||
private var avatarNode: ImageNode
|
||||
private let titleNode: ImmediateTextNode
|
||||
private let microphoneNode: VoiceChatMicrophoneNode
|
||||
|
||||
private let placeholderTextNode: ImmediateTextNode
|
||||
private let placeholderIconNode: ASImageNode
|
||||
private let placeholderButton: HighlightTrackingButtonNode
|
||||
private var placeholderButtonEffectView: UIVisualEffectView?
|
||||
private let placeholderButtonHighlightNode: ASDisplayNode
|
||||
private let placeholderButtonTextNode: ImmediateTextNode
|
||||
|
||||
private let speakingContainerNode: ASDisplayNode
|
||||
private var speakingEffectView: UIVisualEffectView?
|
||||
private let speakingAvatarNode: AvatarNode
|
||||
private let speakingTitleNode: ImmediateTextNode
|
||||
private var speakingAudioLevelView: VoiceBlobView?
|
||||
|
||||
private var validLayout: (CGSize, CGFloat, CGFloat, Bool)?
|
||||
private var validLayout: (CGSize, CGFloat, CGFloat, Bool, Bool)?
|
||||
|
||||
var tapped: (() -> Void)?
|
||||
var back: (() -> Void)?
|
||||
var togglePin: (() -> Void)?
|
||||
var switchTo: ((PeerId) -> Void)?
|
||||
var stopScreencast: (() -> Void)?
|
||||
|
||||
var controlsHidden: ((Bool) -> Void)?
|
||||
|
||||
@ -110,6 +116,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
context.drawLinearGradient(gradient, start: CGPoint(), end: CGPoint(x: 0.0, y: size.height), options: CGGradientDrawingOptions())
|
||||
}) {
|
||||
self.bottomFadeNode.backgroundColor = UIColor(patternImage: image)
|
||||
self.bottomFadeNode.view.layer.rasterizationScale = UIScreen.main.scale
|
||||
self.bottomFadeNode.view.layer.shouldRasterize = true
|
||||
}
|
||||
|
||||
self.bottomFillNode = ASDisplayNode()
|
||||
@ -124,19 +132,20 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
self.backButtonArrowNode.image = NavigationBarTheme.generateBackArrowImage(color: .white)
|
||||
self.backButtonNode = HighlightableButtonNode()
|
||||
|
||||
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
||||
|
||||
self.pinButtonIconNode = ASImageNode()
|
||||
self.pinButtonIconNode.displayWithoutProcessing = true
|
||||
self.pinButtonIconNode.displaysAsynchronously = false
|
||||
self.pinButtonIconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/Pin"), color: .white)
|
||||
self.pinButtonTitleNode = ImmediateTextNode()
|
||||
self.pinButtonTitleNode.isHidden = true
|
||||
self.pinButtonTitleNode.attributedText = NSAttributedString(string: "Unpin", font: Font.regular(17.0), textColor: .white)
|
||||
self.pinButtonTitleNode.attributedText = NSAttributedString(string: presentationData.strings.VoiceChat_Unpin, font: Font.regular(17.0), textColor: .white)
|
||||
self.pinButtonNode = HighlightableButtonNode()
|
||||
|
||||
self.backdropAvatarNode = ImageNode()
|
||||
self.backdropAvatarNode.contentMode = .scaleAspectFill
|
||||
self.backdropAvatarNode.displaysAsynchronously = false
|
||||
self.backdropAvatarNode.isHidden = true
|
||||
|
||||
self.audioLevelNode = VoiceChatBlobNode(size: CGSize(width: 300.0, height: 300.0))
|
||||
|
||||
@ -159,34 +168,74 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
self.speakingTitleNode = ImmediateTextNode()
|
||||
self.speakingTitleNode.displaysAsynchronously = false
|
||||
|
||||
self.placeholderTextNode = ImmediateTextNode()
|
||||
self.placeholderTextNode.alpha = 0.0
|
||||
self.placeholderTextNode.maximumNumberOfLines = 2
|
||||
self.placeholderTextNode.textAlignment = .center
|
||||
|
||||
self.placeholderIconNode = ASImageNode()
|
||||
self.placeholderIconNode.alpha = 0.0
|
||||
self.placeholderIconNode.contentMode = .scaleAspectFit
|
||||
self.placeholderIconNode.displaysAsynchronously = false
|
||||
|
||||
self.placeholderButton = HighlightTrackingButtonNode()
|
||||
self.placeholderButton.alpha = 0.0
|
||||
self.placeholderButton.clipsToBounds = true
|
||||
self.placeholderButton.cornerRadius = backgroundCornerRadius
|
||||
|
||||
self.placeholderButtonHighlightNode = ASDisplayNode()
|
||||
self.placeholderButtonHighlightNode.alpha = 0.0
|
||||
self.placeholderButtonHighlightNode.backgroundColor = UIColor(white: 1.0, alpha: 0.4)
|
||||
self.placeholderButtonHighlightNode.isUserInteractionEnabled = false
|
||||
|
||||
self.placeholderButtonTextNode = ImmediateTextNode()
|
||||
self.placeholderButtonTextNode.attributedText = NSAttributedString(string: presentationData.strings.VoiceChat_StopScreenSharingShort, font: Font.semibold(17.0), textColor: .white)
|
||||
self.placeholderButtonTextNode.isUserInteractionEnabled = false
|
||||
|
||||
super.init()
|
||||
|
||||
self.clipsToBounds = true
|
||||
self.cornerRadius = backgroundCornerRadius
|
||||
|
||||
self.addSubnode(self.backgroundNode)
|
||||
self.addSubnode(self.backdropAvatarNode)
|
||||
self.addSubnode(self.topFadeNode)
|
||||
self.addSubnode(self.bottomFadeNode)
|
||||
self.addSubnode(self.bottomFillNode)
|
||||
self.addSubnode(self.backdropAvatarNode)
|
||||
self.addSubnode(self.audioLevelNode)
|
||||
self.addSubnode(self.avatarNode)
|
||||
self.addSubnode(self.titleNode)
|
||||
self.addSubnode(self.microphoneNode)
|
||||
self.addSubnode(self.headerNode)
|
||||
|
||||
self.headerNode.addSubnode(self.backButtonNode)
|
||||
self.headerNode.addSubnode(self.backButtonArrowNode)
|
||||
self.headerNode.addSubnode(self.pinButtonIconNode)
|
||||
self.headerNode.addSubnode(self.pinButtonTitleNode)
|
||||
self.headerNode.addSubnode(self.pinButtonNode)
|
||||
|
||||
self.addSubnode(self.speakingContainerNode)
|
||||
self.addSubnode(self.placeholderIconNode)
|
||||
self.addSubnode(self.placeholderTextNode)
|
||||
|
||||
self.addSubnode(self.placeholderButton)
|
||||
self.placeholderButton.addSubnode(self.placeholderButtonHighlightNode)
|
||||
self.placeholderButton.addSubnode(self.placeholderButtonTextNode)
|
||||
self.placeholderButton.highligthedChanged = { [weak self] highlighted in
|
||||
if let strongSelf = self {
|
||||
if highlighted {
|
||||
strongSelf.placeholderButtonHighlightNode.layer.removeAnimation(forKey: "opacity")
|
||||
strongSelf.placeholderButtonHighlightNode.alpha = 1.0
|
||||
} else {
|
||||
strongSelf.placeholderButtonHighlightNode.alpha = 0.0
|
||||
strongSelf.placeholderButtonHighlightNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
|
||||
}
|
||||
}
|
||||
}
|
||||
self.placeholderButton.addTarget(self, action: #selector(self.stopSharingPressed), forControlEvents: .touchUpInside)
|
||||
|
||||
self.addSubnode(self.speakingContainerNode)
|
||||
self.speakingContainerNode.addSubnode(self.speakingAvatarNode)
|
||||
self.speakingContainerNode.addSubnode(self.speakingTitleNode)
|
||||
|
||||
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
||||
self.backButtonNode.setTitle(presentationData.strings.Common_Back, with: Font.regular(17.0), with: .white, for: [])
|
||||
self.backButtonNode.hitTestSlop = UIEdgeInsets(top: -8.0, left: -20.0, bottom: -8.0, right: -8.0)
|
||||
self.backButtonNode.highligthedChanged = { [weak self] highlighted in
|
||||
@ -248,19 +297,12 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
self.speakingContainerNode.view.insertSubview(speakingEffectView, at: 0)
|
||||
self.speakingEffectView = speakingEffectView
|
||||
|
||||
let effect: UIVisualEffect
|
||||
if #available(iOS 13.0, *) {
|
||||
effect = UIBlurEffect(style: .systemMaterialDark)
|
||||
} else {
|
||||
effect = UIBlurEffect(style: .dark)
|
||||
}
|
||||
let backdropEffectView = UIVisualEffectView(effect: effect)
|
||||
backdropEffectView.isHidden = true
|
||||
self.view.insertSubview(backdropEffectView, aboveSubview: self.backdropAvatarNode.view)
|
||||
self.backdropEffectView = backdropEffectView
|
||||
let placeholderButtonEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .light))
|
||||
placeholderButtonEffectView.isUserInteractionEnabled = false
|
||||
self.placeholderButton.view.insertSubview(placeholderButtonEffectView, at: 0)
|
||||
self.placeholderButtonEffectView = placeholderButtonEffectView
|
||||
|
||||
self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tap)))
|
||||
|
||||
self.speakingContainerNode.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.speakingTap)))
|
||||
}
|
||||
|
||||
@ -283,6 +325,10 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
self.togglePin?()
|
||||
}
|
||||
|
||||
@objc private func stopSharingPressed() {
|
||||
self.stopScreencast?()
|
||||
}
|
||||
|
||||
var animating: Bool {
|
||||
return self.animatingIn || self.animatingOut
|
||||
}
|
||||
@ -291,7 +337,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
private var appeared = false
|
||||
|
||||
func animateTransitionIn(from sourceNode: ASDisplayNode, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) {
|
||||
guard let sourceNode = sourceNode as? VoiceChatTileItemNode, let _ = sourceNode.item, let (_, sideInset, bottomInset, isLandscape) = self.validLayout else {
|
||||
guard let sourceNode = sourceNode as? VoiceChatTileItemNode, let _ = sourceNode.item, let (_, sideInset, bottomInset, isLandscape, isTablet) = self.validLayout else {
|
||||
return
|
||||
}
|
||||
self.appeared = true
|
||||
@ -301,6 +347,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
self.titleNode.alpha = 0.0
|
||||
self.microphoneNode.alpha = 0.0
|
||||
self.headerNode.alpha = 0.0
|
||||
|
||||
let hasPlaceholder = !self.placeholderIconNode.alpha.isZero
|
||||
|
||||
let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut)
|
||||
alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 1.0)
|
||||
@ -308,6 +356,16 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
alphaTransition.updateAlpha(node: self.titleNode, alpha: 1.0)
|
||||
alphaTransition.updateAlpha(node: self.microphoneNode, alpha: 1.0)
|
||||
alphaTransition.updateAlpha(node: self.headerNode, alpha: 1.0)
|
||||
if hasPlaceholder {
|
||||
self.placeholderIconNode.alpha = 0.0
|
||||
self.placeholderTextNode.alpha = 0.0
|
||||
alphaTransition.updateAlpha(node: self.placeholderTextNode, alpha: 1.0)
|
||||
|
||||
if !self.placeholderButton.alpha.isZero {
|
||||
self.placeholderButton.alpha = 0.0
|
||||
alphaTransition.updateAlpha(node: self.placeholderButton, alpha: 1.0)
|
||||
}
|
||||
}
|
||||
|
||||
let targetFrame = self.frame
|
||||
|
||||
@ -321,26 +379,47 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
infoFrame.origin.y = targetFrame.height - infoFrame.height - (sideInset.isZero ? bottomInset : 14.0)
|
||||
transition.updateFrame(view: snapshotView, frame: infoFrame)
|
||||
}
|
||||
|
||||
|
||||
self.animatingIn = true
|
||||
let startLocalFrame = sourceNode.view.convert(sourceNode.bounds, to: self.supernode?.view)
|
||||
self.update(size: startLocalFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, force: true, transition: .immediate)
|
||||
self.update(size: startLocalFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, force: true, transition: .immediate)
|
||||
self.frame = startLocalFrame
|
||||
self.update(size: targetFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, force: true, transition: transition)
|
||||
self.update(size: targetFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, force: true, transition: transition)
|
||||
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
||||
sourceNode.alpha = 1.0
|
||||
self?.animatingIn = false
|
||||
completion()
|
||||
})
|
||||
|
||||
if hasPlaceholder, let iconSnapshotView = sourceNode.placeholderIconNode.view.snapshotView(afterScreenUpdates: false), let textSnapshotView = sourceNode.placeholderTextNode.view.snapshotView(afterScreenUpdates: false) {
|
||||
iconSnapshotView.frame = sourceNode.placeholderIconNode.frame
|
||||
self.view.addSubview(iconSnapshotView)
|
||||
textSnapshotView.frame = sourceNode.placeholderTextNode.frame
|
||||
self.view.addSubview(textSnapshotView)
|
||||
transition.updatePosition(layer: iconSnapshotView.layer, position: self.placeholderIconNode.position, completion: { [weak self, weak iconSnapshotView] _ in
|
||||
iconSnapshotView?.removeFromSuperview()
|
||||
self?.placeholderIconNode.alpha = 1.0
|
||||
})
|
||||
transition.updateTransformScale(layer: iconSnapshotView.layer, scale: 2.0)
|
||||
textSnapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak textSnapshotView] _ in
|
||||
textSnapshotView?.removeFromSuperview()
|
||||
})
|
||||
let textPosition = self.placeholderTextNode.position
|
||||
self.placeholderTextNode.position = textSnapshotView.center
|
||||
transition.updatePosition(layer: textSnapshotView.layer, position: textPosition)
|
||||
transition.updatePosition(node: self.placeholderTextNode, position: textPosition)
|
||||
}
|
||||
}
|
||||
|
||||
func animateTransitionOut(to targetNode: ASDisplayNode?, offset: CGFloat, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) {
|
||||
guard let (_, sideInset, bottomInset, isLandscape) = self.validLayout else {
|
||||
guard let (_, sideInset, bottomInset, isLandscape, isTablet) = self.validLayout else {
|
||||
return
|
||||
}
|
||||
|
||||
self.appeared = false
|
||||
|
||||
let hasPlaceholder = !self.placeholderIconNode.alpha.isZero
|
||||
|
||||
let alphaTransition = ContainedViewLayoutTransition.animated(duration: 0.3, curve: .easeInOut)
|
||||
if offset.isZero {
|
||||
alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 0.0)
|
||||
@ -357,9 +436,38 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
alphaTransition.updateAlpha(node: self.microphoneNode, alpha: 0.0)
|
||||
alphaTransition.updateAlpha(node: self.headerNode, alpha: 0.0)
|
||||
alphaTransition.updateAlpha(node: self.bottomFadeNode, alpha: 1.0)
|
||||
if hasPlaceholder {
|
||||
alphaTransition.updateAlpha(node: self.placeholderTextNode, alpha: 0.0)
|
||||
if !self.placeholderButton.alpha.isZero {
|
||||
self.placeholderButton.alpha = 0.0
|
||||
self.placeholderButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
|
||||
}
|
||||
}
|
||||
|
||||
let originalFrame = self.frame
|
||||
let initialFrame = originalFrame.offsetBy(dx: 0.0, dy: offset)
|
||||
guard let targetNode = targetNode as? VoiceChatTileItemNode, let _ = targetNode.item else {
|
||||
completion()
|
||||
guard let supernode = self.supernode else {
|
||||
completion()
|
||||
return
|
||||
}
|
||||
self.animatingOut = true
|
||||
self.frame = initialFrame
|
||||
if offset < 0.0 {
|
||||
let targetFrame = CGRect(origin: CGPoint(x: 0.0, y: -originalFrame.size.height), size: originalFrame.size)
|
||||
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
||||
self?.frame = originalFrame
|
||||
completion()
|
||||
self?.animatingOut = false
|
||||
})
|
||||
} else {
|
||||
let targetFrame = CGRect(origin: CGPoint(x: 0.0, y: supernode.frame.height), size: originalFrame.size)
|
||||
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
||||
self?.frame = originalFrame
|
||||
completion()
|
||||
self?.animatingOut = false
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@ -367,10 +475,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
if offset.isZero {
|
||||
targetNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.1)
|
||||
}
|
||||
|
||||
|
||||
self.animatingOut = true
|
||||
let originalFrame = self.frame
|
||||
let initialFrame = originalFrame.offsetBy(dx: 0.0, dy: offset)
|
||||
let targetFrame = targetNode.view.convert(targetNode.bounds, to: self.supernode?.view)
|
||||
|
||||
self.currentVideoNode?.keepBackdropSize = true
|
||||
@ -385,31 +491,66 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
snapshotView.frame = infoFrame
|
||||
transition.updateFrame(view: snapshotView, frame: CGRect(origin: CGPoint(), size: targetFrame.size))
|
||||
}
|
||||
|
||||
|
||||
targetNode.alpha = 0.0
|
||||
|
||||
self.frame = initialFrame
|
||||
self.update(size: targetFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, force: true, transition: transition)
|
||||
|
||||
let textPosition = self.placeholderTextNode.position
|
||||
var textTargetPosition = textPosition
|
||||
var textView: UIView?
|
||||
if hasPlaceholder, let iconSnapshotView = targetNode.placeholderIconNode.view.snapshotView(afterScreenUpdates: false), let textSnapshotView = targetNode.placeholderTextNode.view.snapshotView(afterScreenUpdates: false) {
|
||||
self.view.addSubview(iconSnapshotView)
|
||||
self.view.addSubview(textSnapshotView)
|
||||
iconSnapshotView.transform = CGAffineTransform(scaleX: 2.0, y: 2.0)
|
||||
iconSnapshotView.center = self.placeholderIconNode.position
|
||||
textSnapshotView.center = textPosition
|
||||
textTargetPosition = targetNode.placeholderTextNode.position
|
||||
|
||||
self.placeholderIconNode.alpha = 0.0
|
||||
transition.updatePosition(layer: iconSnapshotView.layer, position: targetNode.placeholderIconNode.position, completion: { [weak self, weak iconSnapshotView] _ in
|
||||
iconSnapshotView?.removeFromSuperview()
|
||||
self?.placeholderIconNode.alpha = 1.0
|
||||
})
|
||||
transition.updateTransformScale(layer: iconSnapshotView.layer, scale: 1.0)
|
||||
|
||||
textView = textSnapshotView
|
||||
textSnapshotView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, removeOnCompletion: false)
|
||||
}
|
||||
|
||||
self.update(size: targetFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, force: true, transition: transition)
|
||||
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
||||
if let strongSelf = self {
|
||||
completion()
|
||||
|
||||
infoView?.removeFromSuperview()
|
||||
textView?.removeFromSuperview()
|
||||
targetNode.alpha = 1.0
|
||||
targetNode.highlightNode.layer.animateAlpha(from: 0.0, to: targetNode.highlightNode.alpha, duration: 0.2)
|
||||
strongSelf.animatingOut = false
|
||||
strongSelf.frame = originalFrame
|
||||
strongSelf.update(size: initialFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
||||
strongSelf.update(size: initialFrame.size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||
}
|
||||
})
|
||||
|
||||
if hasPlaceholder {
|
||||
self.placeholderTextNode.position = textPosition
|
||||
if let textSnapshotView = textView {
|
||||
transition.updatePosition(layer: textSnapshotView.layer, position: textTargetPosition)
|
||||
}
|
||||
transition.updatePosition(node: self.placeholderTextNode, position: textTargetPosition)
|
||||
}
|
||||
|
||||
self.update(speakingPeerId: nil)
|
||||
}
|
||||
|
||||
private var effectiveSpeakingPeerId: PeerId?
|
||||
private func updateSpeakingPeer() {
|
||||
guard let (_, _, _, _, isTablet) = self.validLayout else {
|
||||
return
|
||||
}
|
||||
var effectiveSpeakingPeerId = self.speakingPeerId
|
||||
if let peerId = effectiveSpeakingPeerId, self.visiblePeerIds.contains(peerId) || self.currentPeer?.0 == peerId || self.callState?.myPeerId == peerId {
|
||||
if let peerId = effectiveSpeakingPeerId, self.visiblePeerIds.contains(peerId) || self.currentPeer?.0 == peerId || self.callState?.myPeerId == peerId || isTablet {
|
||||
effectiveSpeakingPeerId = nil
|
||||
}
|
||||
guard self.effectiveSpeakingPeerId != effectiveSpeakingPeerId else {
|
||||
@ -439,8 +580,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
|
||||
strongSelf.speakingContainerNode.alpha = 0.0
|
||||
|
||||
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
|
||||
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
||||
if let (size, sideInset, bottomInset, isLandscape, isTablet) = strongSelf.validLayout {
|
||||
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||
}
|
||||
|
||||
strongSelf.speakingContainerNode.alpha = 1.0
|
||||
@ -521,11 +662,11 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
func update(peerEntry: VoiceChatPeerEntry, pinned: Bool) {
|
||||
let previousPeerEntry = self.currentPeerEntry
|
||||
self.currentPeerEntry = peerEntry
|
||||
|
||||
|
||||
let peer = peerEntry.peer
|
||||
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
||||
if !arePeersEqual(previousPeerEntry?.peer, peerEntry.peer) {
|
||||
self.backdropAvatarNode.setSignal(peerAvatarCompleteImage(account: self.context.account, peer: peer, size: CGSize(width: 180.0, height: 180.0), round: false, font: avatarPlaceholderFont(size: 78.0), drawLetters: false))
|
||||
self.backdropAvatarNode.setSignal(peerAvatarCompleteImage(account: self.context.account, peer: peer, size: CGSize(width: 180.0, height: 180.0), round: false, font: avatarPlaceholderFont(size: 78.0), drawLetters: false, blurred: true))
|
||||
self.avatarNode.setSignal(peerAvatarCompleteImage(account: self.context.account, peer: peer, size: CGSize(width: 180.0, height: 180.0), font: avatarPlaceholderFont(size: 78.0), fullSize: true))
|
||||
}
|
||||
|
||||
@ -573,8 +714,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
titleAttributedString = updatedString
|
||||
}
|
||||
self.titleNode.attributedText = titleAttributedString
|
||||
if let (size, sideInset, bottomInset, isLandscape) = self.validLayout {
|
||||
self.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
||||
if let (size, sideInset, bottomInset, isLandscape, isTablet) = self.validLayout {
|
||||
self.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||
}
|
||||
|
||||
self.pinButtonTitleNode.isHidden = !pinned
|
||||
@ -617,15 +758,16 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
private func setAvatarHidden(_ hidden: Bool) {
|
||||
self.backdropAvatarNode.isHidden = hidden
|
||||
self.backdropEffectView?.isHidden = hidden
|
||||
self.topFadeNode.isHidden = !hidden
|
||||
self.bottomFadeNode.isHidden = !hidden
|
||||
self.bottomFillNode.isHidden = !hidden
|
||||
self.avatarNode.isHidden = hidden
|
||||
self.audioLevelNode.isHidden = hidden
|
||||
}
|
||||
|
||||
func update(peer: (peer: PeerId, endpointId: String?)?, waitForFullSize: Bool, completion: (() -> Void)? = nil) {
|
||||
func update(peer: (peer: PeerId, endpointId: String?, isMyPeer: Bool, isPresentation: Bool, isPaused: Bool)?, isReady: Bool = true, waitForFullSize: Bool, completion: (() -> Void)? = nil) {
|
||||
let previousPeer = self.currentPeer
|
||||
if previousPeer?.0 == peer?.0 && previousPeer?.1 == peer?.1 {
|
||||
if previousPeer?.0 == peer?.0 && previousPeer?.1 == peer?.1 && previousPeer?.2 == peer?.2 && previousPeer?.3 == peer?.3 && previousPeer?.4 == peer?.4 {
|
||||
completion?()
|
||||
return
|
||||
}
|
||||
@ -633,24 +775,50 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
|
||||
self.updateSpeakingPeer()
|
||||
|
||||
if let (_, endpointId) = peer {
|
||||
var isTablet = false
|
||||
if let (_, _, _, _, isTabletValue) = self.validLayout {
|
||||
isTablet = isTabletValue
|
||||
}
|
||||
|
||||
if let (_, endpointId, isMyPeer, isPresentation, isPaused) = peer {
|
||||
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
||||
|
||||
var showPlaceholder = false
|
||||
if isMyPeer && isPresentation {
|
||||
self.placeholderTextNode.attributedText = NSAttributedString(string: presentationData.strings.VoiceChat_YouAreSharingScreen, font: Font.semibold(15.0), textColor: .white)
|
||||
self.placeholderIconNode.image = generateTintedImage(image: UIImage(bundleImageName: isTablet ? "Call/ScreenShareTablet" : "Call/ScreenSharePhone"), color: .white)
|
||||
showPlaceholder = true
|
||||
} else if isPaused {
|
||||
self.placeholderTextNode.attributedText = NSAttributedString(string: presentationData.strings.VoiceChat_VideoPaused, font: Font.semibold(14.0), textColor: .white)
|
||||
self.placeholderIconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/Pause"), color: .white)
|
||||
showPlaceholder = true
|
||||
}
|
||||
|
||||
let updatePlaceholderVisibility = {
|
||||
let peerChanged = previousPeer?.0 != peer?.0
|
||||
let transition: ContainedViewLayoutTransition = self.appeared && !peerChanged ? .animated(duration: 0.2, curve: .easeInOut) : .immediate
|
||||
transition.updateAlpha(node: self.placeholderTextNode, alpha: showPlaceholder ? 1.0 : 0.0)
|
||||
transition.updateAlpha(node: self.placeholderIconNode, alpha: showPlaceholder ? 1.0 : 0.0)
|
||||
transition.updateAlpha(node: self.placeholderButton, alpha: showPlaceholder && !isPaused ? 1.0 : 0.0)
|
||||
}
|
||||
|
||||
if endpointId != previousPeer?.1 {
|
||||
updatePlaceholderVisibility()
|
||||
if let endpointId = endpointId {
|
||||
var delayTransition = false
|
||||
if previousPeer?.0 == peer?.0 && self.appeared {
|
||||
if previousPeer?.0 == peer?.0 && previousPeer?.1 == nil && self.appeared {
|
||||
delayTransition = true
|
||||
}
|
||||
if !delayTransition {
|
||||
self.setAvatarHidden(true)
|
||||
}
|
||||
|
||||
self.call.makeIncomingVideoView(endpointId: endpointId, requestClone: true, completion: { [weak self] videoView, backdropVideoView in
|
||||
Queue.mainQueue().async {
|
||||
guard let strongSelf = self, let videoView = videoView else {
|
||||
return
|
||||
}
|
||||
|
||||
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView)
|
||||
let videoNode = GroupVideoNode(videoView: videoView, backdropVideoView: backdropVideoView, disabledText: presentationData.strings.VoiceChat_VideoPaused)
|
||||
videoNode.tapped = { [weak self] in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
@ -675,12 +843,18 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
strongSelf.controlsHidden?(false)
|
||||
strongSelf.setControlsHidden(false, animated: true)
|
||||
}
|
||||
videoNode.updateIsBlurred(isBlurred: isPaused, light: true, animated: false)
|
||||
videoNode.isUserInteractionEnabled = true
|
||||
let previousVideoNode = strongSelf.currentVideoNode
|
||||
strongSelf.currentVideoNode = videoNode
|
||||
strongSelf.insertSubnode(videoNode, aboveSubnode: strongSelf.backgroundNode)
|
||||
strongSelf.insertSubnode(videoNode, aboveSubnode: strongSelf.backdropAvatarNode)
|
||||
|
||||
if delayTransition {
|
||||
if !isReady {
|
||||
videoNode.alpha = 0.0
|
||||
strongSelf.topFadeNode.isHidden = true
|
||||
strongSelf.bottomFadeNode.isHidden = true
|
||||
strongSelf.bottomFillNode.isHidden = true
|
||||
} else if delayTransition {
|
||||
videoNode.alpha = 0.0
|
||||
}
|
||||
if waitForFullSize {
|
||||
@ -694,17 +868,22 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
|> take(1)
|
||||
|> deliverOnMainQueue).start(next: { [weak self] _ in
|
||||
Queue.mainQueue().after(0.1) {
|
||||
if let strongSelf = self {
|
||||
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
|
||||
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
||||
}
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
|
||||
if let (size, sideInset, bottomInset, isLandscape, isTablet) = strongSelf.validLayout {
|
||||
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||
}
|
||||
|
||||
Queue.mainQueue().after(0.02) {
|
||||
completion?()
|
||||
}
|
||||
|
||||
if delayTransition {
|
||||
if videoNode.alpha.isZero {
|
||||
strongSelf.topFadeNode.isHidden = true
|
||||
strongSelf.bottomFadeNode.isHidden = true
|
||||
strongSelf.bottomFillNode.isHidden = true
|
||||
if let videoNode = strongSelf.currentVideoNode {
|
||||
videoNode.alpha = 1.0
|
||||
videoNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, completion: { [weak self] _ in
|
||||
@ -726,8 +905,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
|
||||
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
||||
if let (size, sideInset, bottomInset, isLandscape, isTablet) = strongSelf.validLayout {
|
||||
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||
}
|
||||
if let previousVideoNode = previousVideoNode {
|
||||
previousVideoNode.removeFromSupernode()
|
||||
@ -738,28 +917,55 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
}
|
||||
})
|
||||
} else {
|
||||
self.setAvatarHidden(false)
|
||||
if self.appeared {
|
||||
if let currentVideoNode = self.currentVideoNode {
|
||||
currentVideoNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak currentVideoNode] _ in
|
||||
currentVideoNode?.removeFromSupernode()
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if let currentVideoNode = self.currentVideoNode {
|
||||
currentVideoNode.removeFromSupernode()
|
||||
self.currentVideoNode = nil
|
||||
}
|
||||
if let currentVideoNode = self.currentVideoNode {
|
||||
currentVideoNode.removeFromSupernode()
|
||||
self.currentVideoNode = nil
|
||||
}
|
||||
self.setAvatarHidden(false)
|
||||
completion?()
|
||||
}
|
||||
} else {
|
||||
if let currentVideoNode = self.currentVideoNode {
|
||||
currentVideoNode.removeFromSupernode()
|
||||
self.currentVideoNode = nil
|
||||
}
|
||||
self.setAvatarHidden(endpointId != nil)
|
||||
completion?()
|
||||
if waitForFullSize && !isReady && !isPaused, let videoNode = self.currentVideoNode {
|
||||
self.videoReadyDisposable.set((videoNode.ready
|
||||
|> filter { $0 }
|
||||
|> take(1)
|
||||
|> deliverOnMainQueue).start(next: { [weak self] _ in
|
||||
Queue.mainQueue().after(0.1) {
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
|
||||
if let (size, sideInset, bottomInset, isLandscape, isTablet) = strongSelf.validLayout {
|
||||
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, isTablet: isTablet, transition: .immediate)
|
||||
}
|
||||
|
||||
Queue.mainQueue().after(0.02) {
|
||||
completion?()
|
||||
}
|
||||
|
||||
updatePlaceholderVisibility()
|
||||
if videoNode.alpha.isZero {
|
||||
videoNode.updateIsBlurred(isBlurred: isPaused, light: true, animated: false)
|
||||
strongSelf.topFadeNode.isHidden = true
|
||||
strongSelf.bottomFadeNode.isHidden = true
|
||||
strongSelf.bottomFillNode.isHidden = true
|
||||
if let videoNode = strongSelf.currentVideoNode {
|
||||
videoNode.alpha = 1.0
|
||||
videoNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, completion: { [weak self] _ in
|
||||
if let strongSelf = self {
|
||||
strongSelf.setAvatarHidden(true)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}))
|
||||
} else {
|
||||
updatePlaceholderVisibility()
|
||||
self.currentVideoNode?.updateIsBlurred(isBlurred: isPaused, light: true, animated: true)
|
||||
completion?()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.videoReadyDisposable.set(nil)
|
||||
@ -782,8 +988,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
transition.updateAlpha(node: self.bottomFillNode, alpha: hidden ? 0.0 : 1.0, delay: delay)
|
||||
}
|
||||
|
||||
func update(size: CGSize, sideInset: CGFloat, bottomInset: CGFloat, isLandscape: Bool, force: Bool = false, transition: ContainedViewLayoutTransition) {
|
||||
self.validLayout = (size, sideInset, bottomInset, isLandscape)
|
||||
func update(size: CGSize, sideInset: CGFloat, bottomInset: CGFloat, isLandscape: Bool, isTablet: Bool, force: Bool = false, transition: ContainedViewLayoutTransition) {
|
||||
self.validLayout = (size, sideInset, bottomInset, isLandscape, isTablet)
|
||||
|
||||
if self.animating && !force {
|
||||
return
|
||||
@ -813,9 +1019,6 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
|
||||
transition.updateFrame(node: self.backgroundNode, frame: CGRect(origin: CGPoint(), size: size))
|
||||
transition.updateFrame(node: self.backdropAvatarNode, frame: CGRect(origin: CGPoint(), size: size))
|
||||
if let backdropEffectView = self.backdropEffectView {
|
||||
transition.updateFrame(view: backdropEffectView, frame: CGRect(origin: CGPoint(), size: size))
|
||||
}
|
||||
|
||||
let avatarSize = CGSize(width: 180.0, height: 180.0)
|
||||
let avatarFrame = CGRect(origin: CGPoint(x: (size.width - avatarSize.width) / 2.0, y: (size.height - avatarSize.height) / 2.0), size: avatarSize)
|
||||
@ -823,7 +1026,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
transition.updateFrame(node: self.audioLevelNode, frame: avatarFrame.insetBy(dx: -60.0, dy: -60.0))
|
||||
|
||||
let animationSize = CGSize(width: 36.0, height: 36.0)
|
||||
let titleSize = self.titleNode.updateLayout(size)
|
||||
let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - sideInset * 2.0 - 24.0 - animationSize.width, height: size.height))
|
||||
transition.updateFrame(node: self.titleNode, frame: CGRect(origin: CGPoint(x: sideInset + 12.0 + animationSize.width, y: size.height - bottomInset - titleSize.height - 16.0), size: titleSize))
|
||||
|
||||
transition.updateFrame(node: self.microphoneNode, frame: CGRect(origin: CGPoint(x: sideInset + 7.0, y: size.height - bottomInset - animationSize.height - 6.0), size: animationSize))
|
||||
@ -860,6 +1063,23 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
||||
self.speakingAvatarNode.frame = CGRect(origin: CGPoint(x: 4.0, y: 4.0), size: speakingAvatarSize)
|
||||
self.speakingTitleNode.frame = CGRect(origin: CGPoint(x: 4.0 + speakingAvatarSize.width + 14.0, y: floorToScreenPixels((38.0 - speakingTitleSize.height) / 2.0)), size: speakingTitleSize)
|
||||
transition.updateFrame(node: self.speakingContainerNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - speakingContainerSize.width) / 2.0), y: 46.0), size: speakingContainerSize))
|
||||
|
||||
let placeholderTextSize = self.placeholderTextNode.updateLayout(CGSize(width: size.width - 100.0, height: 100.0))
|
||||
transition.updateFrame(node: self.placeholderTextNode, frame: CGRect(origin: CGPoint(x: floor((size.width - placeholderTextSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) + 10.0), size: placeholderTextSize))
|
||||
if let imageSize = self.placeholderIconNode.image?.size {
|
||||
transition.updateFrame(node: self.placeholderIconNode, frame: CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) - imageSize.height - 8.0), size: imageSize))
|
||||
}
|
||||
|
||||
let placeholderButtonTextSize = self.placeholderButtonTextNode.updateLayout(CGSize(width: 240.0, height: 100.0))
|
||||
let placeholderButtonSize = CGSize(width: placeholderButtonTextSize.width + 60.0, height: 52.0)
|
||||
transition.updateFrame(node: self.placeholderButton, frame: CGRect(origin: CGPoint(x: floor((size.width - placeholderButtonSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) + 10.0 + placeholderTextSize.height + 30.0), size: placeholderButtonSize))
|
||||
self.placeholderButtonEffectView?.frame = CGRect(origin: CGPoint(), size: placeholderButtonSize)
|
||||
self.placeholderButtonHighlightNode.frame = CGRect(origin: CGPoint(), size: placeholderButtonSize)
|
||||
self.placeholderButtonTextNode.frame = CGRect(origin: CGPoint(x: floorToScreenPixels((placeholderButtonSize.width - placeholderButtonTextSize.width) / 2.0), y: floorToScreenPixels((placeholderButtonSize.height - placeholderButtonTextSize.height) / 2.0)), size: placeholderButtonTextSize)
|
||||
|
||||
if let imageSize = self.placeholderIconNode.image?.size {
|
||||
transition.updateFrame(node: self.placeholderIconNode, frame: CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) - imageSize.height - 8.0), size: imageSize))
|
||||
}
|
||||
}
|
||||
|
||||
func flipVideoIfNeeded() {
|
||||
|
@ -210,7 +210,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
||||
|
||||
transition.updateCornerRadius(node: self.backgroundImageNode, cornerRadius: 0.0)
|
||||
|
||||
let initialRect = sourceNode.frame
|
||||
let initialRect = sourceRect
|
||||
let initialScale: CGFloat = sourceRect.width / targetRect.width
|
||||
|
||||
let targetSize = CGSize(width: targetRect.size.width, height: targetRect.size.width)
|
||||
@ -254,6 +254,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
||||
self.avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
|
||||
self.avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: initialRect.center), to: NSValue(cgPoint: self.avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in
|
||||
if let strongSelf = self {
|
||||
strongSelf.avatarListNode.updateCustomItemsOnlySynchronously = false
|
||||
strongSelf.avatarListNode.currentItemNode?.addSubnode(sourceNode.videoContainerNode)
|
||||
}
|
||||
})
|
||||
@ -268,6 +269,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
||||
self.avatarListNode.stripContainerNode.frame = CGRect(x: 0.0, y: 13.0, width: targetRect.width, height: 2.0)
|
||||
self.avatarListNode.shadowNode.frame = CGRect(x: 0.0, y: 0.0, width: targetRect.width, height: 44.0)
|
||||
|
||||
self.avatarListNode.updateCustomItemsOnlySynchronously = true
|
||||
self.avatarListNode.update(size: targetSize, peer: self.peer, customNode: self.customNode, additionalEntry: self.additionalEntry, isExpanded: true, transition: .immediate)
|
||||
|
||||
let backgroundTargetRect = CGRect(x: 0.0, y: targetSize.height - backgroundCornerRadius * 2.0, width: targetRect.width, height: targetRect.height - targetSize.height + backgroundCornerRadius * 2.0)
|
||||
@ -334,6 +336,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
||||
self.avatarListWrapperNode.layer.animateSpring(from: initialScale as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
|
||||
self.avatarListWrapperNode.layer.animateSpring(from: NSValue(cgPoint: initialRect.center), to: NSValue(cgPoint: self.avatarListWrapperNode.position), keyPath: "position", duration: springDuration, initialVelocity: 0.0, damping: springDamping, completion: { [weak self] _ in
|
||||
if let strongSelf = self {
|
||||
strongSelf.avatarListNode.updateCustomItemsOnlySynchronously = false
|
||||
// strongSelf.avatarListNode.currentItemNode?.addSubnode(sourceNode.videoContainerNode)
|
||||
}
|
||||
})
|
||||
@ -348,6 +351,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
||||
self.avatarListNode.stripContainerNode.frame = CGRect(x: 0.0, y: 13.0, width: targetRect.width, height: 2.0)
|
||||
self.avatarListNode.shadowNode.frame = CGRect(x: 0.0, y: 0.0, width: targetRect.width, height: 44.0)
|
||||
|
||||
self.avatarListNode.updateCustomItemsOnlySynchronously = true
|
||||
self.avatarListNode.update(size: targetSize, peer: self.peer, customNode: nil, additionalEntry: self.additionalEntry, isExpanded: true, transition: .immediate)
|
||||
|
||||
let backgroundTargetRect = CGRect(x: 0.0, y: targetSize.height - backgroundCornerRadius * 2.0, width: targetRect.width, height: targetRect.height - targetSize.height + backgroundCornerRadius * 2.0)
|
||||
@ -362,7 +366,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
||||
self.appeared = true
|
||||
}
|
||||
|
||||
func animateOut(to targetNode: ASDisplayNode, targetRect: CGRect, transition: ContainedViewLayoutTransition) {
|
||||
func animateOut(to targetNode: ASDisplayNode, targetRect: CGRect, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void = {}) {
|
||||
let radiusTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
|
||||
let springDuration: Double = 0.3
|
||||
let springDamping: CGFloat = 1000.0
|
||||
@ -383,12 +387,13 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
||||
if let targetNode = targetNode {
|
||||
targetNode.contentNode.insertSubnode(targetNode.videoContainerNode, aboveSubnode: targetNode.backgroundNode)
|
||||
}
|
||||
completion()
|
||||
self?.removeFromSupernode()
|
||||
})
|
||||
|
||||
radiusTransition.updateCornerRadius(node: self.avatarListContainerNode, cornerRadius: backgroundCornerRadius)
|
||||
|
||||
if let snapshotView = targetNode.infoNode.view.snapshotView(afterScreenUpdates: false) {
|
||||
if let snapshotView = targetNode.infoNode.view.snapshotView(afterScreenUpdates: true) {
|
||||
self.view.insertSubview(snapshotView, aboveSubview: targetNode.videoContainerNode.view)
|
||||
let snapshotFrame = snapshotView.frame
|
||||
snapshotView.frame = CGRect(origin: CGPoint(x: 0.0, y: initialSize.width - snapshotView.frame.size.height), size: snapshotView.frame.size)
|
||||
@ -439,6 +444,7 @@ final class VoiceChatPeerProfileNode: ASDisplayNode {
|
||||
if let targetNode = targetNode {
|
||||
targetNode.offsetContainerNode.insertSubnode(targetNode.videoContainerNode, at: 0)
|
||||
}
|
||||
completion()
|
||||
self?.removeFromSupernode()
|
||||
})
|
||||
|
||||
|
@ -8,6 +8,12 @@ import AccountContext
|
||||
private let tileSpacing: CGFloat = 4.0
|
||||
let tileHeight: CGFloat = 180.0
|
||||
|
||||
enum VoiceChatTileLayoutMode {
|
||||
case pairs
|
||||
case rows
|
||||
case grid
|
||||
}
|
||||
|
||||
final class VoiceChatTileGridNode: ASDisplayNode {
|
||||
private let context: AccountContext
|
||||
|
||||
@ -17,6 +23,10 @@ final class VoiceChatTileGridNode: ASDisplayNode {
|
||||
|
||||
private var absoluteLocation: (CGRect, CGSize)?
|
||||
|
||||
var tileNodes: [VoiceChatTileItemNode] {
|
||||
return Array(self.itemNodes.values)
|
||||
}
|
||||
|
||||
init(context: AccountContext) {
|
||||
self.context = context
|
||||
|
||||
@ -25,6 +35,14 @@ final class VoiceChatTileGridNode: ASDisplayNode {
|
||||
self.clipsToBounds = true
|
||||
}
|
||||
|
||||
var visiblity = true {
|
||||
didSet {
|
||||
for (_, tileNode) in self.itemNodes {
|
||||
tileNode.visiblity = self.visiblity
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func updateAbsoluteRect(_ rect: CGRect, within containerSize: CGSize) {
|
||||
self.absoluteLocation = (rect, containerSize)
|
||||
for itemNode in self.itemNodes.values {
|
||||
@ -35,32 +53,68 @@ final class VoiceChatTileGridNode: ASDisplayNode {
|
||||
}
|
||||
}
|
||||
|
||||
func update(size: CGSize, items: [VoiceChatTileItem], transition: ContainedViewLayoutTransition) -> CGSize {
|
||||
func update(size: CGSize, layoutMode: VoiceChatTileLayoutMode, items: [VoiceChatTileItem], transition: ContainedViewLayoutTransition) -> CGSize {
|
||||
self.items = items
|
||||
|
||||
var validIds: [String] = []
|
||||
|
||||
let halfWidth = floorToScreenPixels((size.width - tileSpacing) / 2.0)
|
||||
let lastItemIsWide = items.count % 2 != 0
|
||||
|
||||
let colsCount: CGFloat
|
||||
if case .grid = layoutMode {
|
||||
if items.count < 3 {
|
||||
colsCount = 1
|
||||
} else if items.count < 5 {
|
||||
colsCount = 2
|
||||
} else {
|
||||
colsCount = 3
|
||||
}
|
||||
} else {
|
||||
colsCount = 2
|
||||
}
|
||||
let rowsCount = ceil(CGFloat(items.count) / colsCount)
|
||||
|
||||
let genericItemWidth = floorToScreenPixels((size.width - tileSpacing * (colsCount - 1)) / colsCount)
|
||||
let lastRowItemsAreWide: Bool
|
||||
let lastRowItemWidth: CGFloat
|
||||
if case .grid = layoutMode {
|
||||
lastRowItemsAreWide = [1, 2].contains(items.count) || items.count % Int(colsCount) != 0
|
||||
var lastRowItemsCount = CGFloat(items.count % Int(colsCount))
|
||||
if lastRowItemsCount.isZero {
|
||||
lastRowItemsCount = colsCount
|
||||
}
|
||||
lastRowItemWidth = floorToScreenPixels((size.width - tileSpacing * (lastRowItemsCount - 1)) / lastRowItemsCount)
|
||||
} else {
|
||||
lastRowItemsAreWide = items.count == 1 || items.count % Int(colsCount) != 0
|
||||
lastRowItemWidth = size.width
|
||||
}
|
||||
|
||||
let isFirstTime = self.isFirstTime
|
||||
if isFirstTime {
|
||||
self.isFirstTime = false
|
||||
}
|
||||
|
||||
let availableWidth = min(size.width, size.height)
|
||||
var availableWidth = min(size.width, size.height)
|
||||
var itemHeight = tileHeight
|
||||
if case .grid = layoutMode {
|
||||
itemHeight = size.height / rowsCount - (tileSpacing * (rowsCount - 1))
|
||||
}
|
||||
|
||||
for i in 0 ..< self.items.count {
|
||||
let item = self.items[i]
|
||||
let isLast = i == self.items.count - 1
|
||||
let col = CGFloat(i % Int(colsCount))
|
||||
let row = floor(CGFloat(i) / colsCount)
|
||||
let isLastRow = row == (rowsCount - 1)
|
||||
|
||||
let rowItemWidth = isLastRow && lastRowItemsAreWide ? lastRowItemWidth : genericItemWidth
|
||||
let itemSize = CGSize(
|
||||
width: isLast && lastItemIsWide ? size.width : halfWidth,
|
||||
height: tileHeight
|
||||
width: rowItemWidth,
|
||||
height: itemHeight
|
||||
)
|
||||
let col = CGFloat(i % 2)
|
||||
let row = floor(CGFloat(i) / 2.0)
|
||||
let itemFrame = CGRect(origin: CGPoint(x: col * (halfWidth + tileSpacing), y: row * (tileHeight + tileSpacing)), size: itemSize)
|
||||
|
||||
if case .grid = layoutMode {
|
||||
availableWidth = rowItemWidth
|
||||
}
|
||||
|
||||
let itemFrame = CGRect(origin: CGPoint(x: col * (rowItemWidth + tileSpacing), y: row * (itemHeight + tileSpacing)), size: itemSize)
|
||||
|
||||
validIds.append(item.id)
|
||||
var itemNode: VoiceChatTileItemNode?
|
||||
@ -77,6 +131,7 @@ final class VoiceChatTileGridNode: ASDisplayNode {
|
||||
self.addSubnode(addedItemNode)
|
||||
}
|
||||
if let itemNode = itemNode {
|
||||
itemNode.visiblity = self.visiblity
|
||||
if wasAdded {
|
||||
itemNode.frame = itemFrame
|
||||
if !isFirstTime {
|
||||
@ -112,18 +167,20 @@ final class VoiceChatTileGridNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
let rowCount = ceil(CGFloat(self.items.count) / 2.0)
|
||||
return CGSize(width: size.width, height: rowCount * (tileHeight + tileSpacing))
|
||||
return CGSize(width: size.width, height: rowCount * (itemHeight + tileSpacing))
|
||||
}
|
||||
}
|
||||
|
||||
final class VoiceChatTilesGridItem: ListViewItem {
|
||||
let context: AccountContext
|
||||
let tiles: [VoiceChatTileItem]
|
||||
let layoutMode: VoiceChatTileLayoutMode
|
||||
let getIsExpanded: () -> Bool
|
||||
|
||||
init(context: AccountContext, tiles: [VoiceChatTileItem], getIsExpanded: @escaping () -> Bool) {
|
||||
init(context: AccountContext, tiles: [VoiceChatTileItem], layoutMode: VoiceChatTileLayoutMode, getIsExpanded: @escaping () -> Bool) {
|
||||
self.context = context
|
||||
self.tiles = tiles
|
||||
self.layoutMode = layoutMode
|
||||
self.getIsExpanded = getIsExpanded
|
||||
}
|
||||
|
||||
@ -227,6 +284,7 @@ final class VoiceChatTilesGridItemNode: ListViewItemNode {
|
||||
strongSelf.cornersNode.image = decorationCornersImage(top: true, bottom: false, dark: item.getIsExpanded())
|
||||
|
||||
tileGridNode = VoiceChatTileGridNode(context: item.context)
|
||||
tileGridNode.visiblity = strongSelf.gridVisiblity
|
||||
strongSelf.addSubnode(tileGridNode)
|
||||
strongSelf.tileGridNode = tileGridNode
|
||||
}
|
||||
@ -237,7 +295,7 @@ final class VoiceChatTilesGridItemNode: ListViewItemNode {
|
||||
}
|
||||
|
||||
let transition: ContainedViewLayoutTransition = currentItem == nil ? .immediate : .animated(duration: 0.3, curve: .easeInOut)
|
||||
let tileGridSize = tileGridNode.update(size: CGSize(width: params.width - params.leftInset - params.rightInset, height: params.availableHeight), items: item.tiles, transition: transition)
|
||||
let tileGridSize = tileGridNode.update(size: CGSize(width: params.width - params.leftInset - params.rightInset, height: params.availableHeight), layoutMode: item.layoutMode, items: item.tiles, transition: transition)
|
||||
if currentItem == nil {
|
||||
tileGridNode.frame = CGRect(x: params.leftInset, y: 0.0, width: tileGridSize.width, height: tileGridSize.height)
|
||||
strongSelf.backgroundNode.frame = tileGridNode.frame
|
||||
@ -256,4 +314,16 @@ final class VoiceChatTilesGridItemNode: ListViewItemNode {
|
||||
self.absoluteLocation = (rect, containerSize)
|
||||
self.tileGridNode?.updateAbsoluteRect(rect, within: containerSize)
|
||||
}
|
||||
|
||||
var gridVisiblity: Bool = true {
|
||||
didSet {
|
||||
self.tileGridNode?.visiblity = self.gridVisiblity
|
||||
}
|
||||
}
|
||||
|
||||
func snapshotForDismissal() {
|
||||
if let snapshotView = self.tileGridNode?.view.snapshotView(afterScreenUpdates: false) {
|
||||
self.tileGridNode?.view.addSubview(snapshotView)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -27,32 +27,42 @@ final class VoiceChatTileItem: Equatable {
|
||||
let peer: Peer
|
||||
let videoEndpointId: String
|
||||
let videoReady: Bool
|
||||
let videoTimeouted: Bool
|
||||
let isPaused: Bool
|
||||
let isOwnScreencast: Bool
|
||||
let strings: PresentationStrings
|
||||
let nameDisplayOrder: PresentationPersonNameOrder
|
||||
let icon: Icon
|
||||
let text: VoiceChatParticipantItem.ParticipantText
|
||||
let additionalText: VoiceChatParticipantItem.ParticipantText?
|
||||
let speaking: Bool
|
||||
let secondary: Bool
|
||||
let isTablet: Bool
|
||||
let action: () -> Void
|
||||
let contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
|
||||
let getVideo: () -> GroupVideoNode?
|
||||
let getVideo: (GroupVideoNode.Position) -> GroupVideoNode?
|
||||
let getAudioLevel: (() -> Signal<Float, NoError>)?
|
||||
|
||||
var id: String {
|
||||
return self.videoEndpointId
|
||||
}
|
||||
|
||||
init(account: Account, peer: Peer, videoEndpointId: String, videoReady: Bool, strings: PresentationStrings, nameDisplayOrder: PresentationPersonNameOrder, speaking: Bool, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, additionalText: VoiceChatParticipantItem.ParticipantText?, action: @escaping () -> Void, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?, getVideo: @escaping () -> GroupVideoNode?, getAudioLevel: (() -> Signal<Float, NoError>)?) {
|
||||
init(account: Account, peer: Peer, videoEndpointId: String, videoReady: Bool, videoTimeouted: Bool, isPaused: Bool, isOwnScreencast: Bool, strings: PresentationStrings, nameDisplayOrder: PresentationPersonNameOrder, speaking: Bool, secondary: Bool, isTablet: Bool, icon: Icon, text: VoiceChatParticipantItem.ParticipantText, additionalText: VoiceChatParticipantItem.ParticipantText?, action: @escaping () -> Void, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?, getVideo: @escaping (GroupVideoNode.Position) -> GroupVideoNode?, getAudioLevel: (() -> Signal<Float, NoError>)?) {
|
||||
self.account = account
|
||||
self.peer = peer
|
||||
self.videoEndpointId = videoEndpointId
|
||||
self.videoReady = videoReady
|
||||
self.videoTimeouted = videoTimeouted
|
||||
self.isPaused = isPaused
|
||||
self.isOwnScreencast = isOwnScreencast
|
||||
self.strings = strings
|
||||
self.nameDisplayOrder = nameDisplayOrder
|
||||
self.icon = icon
|
||||
self.text = text
|
||||
self.additionalText = additionalText
|
||||
self.speaking = speaking
|
||||
self.secondary = secondary
|
||||
self.isTablet = isTablet
|
||||
self.action = action
|
||||
self.contextAction = contextAction
|
||||
self.getVideo = getVideo
|
||||
@ -69,6 +79,15 @@ final class VoiceChatTileItem: Equatable {
|
||||
if lhs.videoReady != rhs.videoReady {
|
||||
return false
|
||||
}
|
||||
if lhs.videoTimeouted != rhs.videoTimeouted {
|
||||
return false
|
||||
}
|
||||
if lhs.isPaused != rhs.isPaused {
|
||||
return false
|
||||
}
|
||||
if lhs.isOwnScreencast != rhs.isOwnScreencast {
|
||||
return false
|
||||
}
|
||||
if lhs.icon != rhs.icon {
|
||||
return false
|
||||
}
|
||||
@ -81,6 +100,9 @@ final class VoiceChatTileItem: Equatable {
|
||||
if lhs.speaking != rhs.speaking {
|
||||
return false
|
||||
}
|
||||
if lhs.secondary != rhs.secondary {
|
||||
return false
|
||||
}
|
||||
if lhs.icon != rhs.icon {
|
||||
return false
|
||||
}
|
||||
@ -121,6 +143,9 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
var highlightNode: VoiceChatTileHighlightNode
|
||||
private let statusNode: VoiceChatParticipantStatusNode
|
||||
|
||||
let placeholderTextNode: ImmediateTextNode
|
||||
let placeholderIconNode: ASImageNode
|
||||
|
||||
private var profileNode: VoiceChatPeerProfileNode?
|
||||
private var extractedRect: CGRect?
|
||||
private var nonExtractedRect: CGRect?
|
||||
@ -164,9 +189,17 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
self.highlightNode.alpha = 0.0
|
||||
self.highlightNode.updateGlowAndGradientAnimations(type: .speaking)
|
||||
|
||||
super.init()
|
||||
self.placeholderTextNode = ImmediateTextNode()
|
||||
self.placeholderTextNode.alpha = 0.0
|
||||
self.placeholderTextNode.maximumNumberOfLines = 2
|
||||
self.placeholderTextNode.textAlignment = .center
|
||||
|
||||
self.clipsToBounds = true
|
||||
self.placeholderIconNode = ASImageNode()
|
||||
self.placeholderIconNode.alpha = 0.0
|
||||
self.placeholderIconNode.contentMode = .scaleAspectFit
|
||||
self.placeholderIconNode.displaysAsynchronously = false
|
||||
|
||||
super.init()
|
||||
|
||||
self.containerNode.addSubnode(self.contextSourceNode)
|
||||
self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode
|
||||
@ -178,6 +211,8 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
self.contentNode.addSubnode(self.fadeNode)
|
||||
self.contentNode.addSubnode(self.infoNode)
|
||||
self.infoNode.addSubnode(self.titleNode)
|
||||
self.contentNode.addSubnode(self.placeholderTextNode)
|
||||
self.contentNode.addSubnode(self.placeholderIconNode)
|
||||
self.contentNode.addSubnode(self.highlightNode)
|
||||
|
||||
self.containerNode.shouldBegin = { [weak self] location in
|
||||
@ -227,15 +262,22 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
}
|
||||
self.isExtracted = isExtracted
|
||||
|
||||
let springDuration: Double = 0.42
|
||||
let springDamping: CGFloat = 124.0
|
||||
if isExtracted {
|
||||
let profileNode = VoiceChatPeerProfileNode(context: self.context, size: extractedRect.size, peer: item.peer, text: item.text, customNode: self.videoContainerNode, additionalEntry: .single(nil), requestDismiss: { [weak self] in
|
||||
self?.contextSourceNode.requestDismiss?()
|
||||
})
|
||||
profileNode.frame = CGRect(origin: CGPoint(), size: extractedRect.size)
|
||||
profileNode.frame = CGRect(origin: CGPoint(), size: self.bounds.size)
|
||||
self.profileNode = profileNode
|
||||
self.contextSourceNode.contentNode.addSubnode(profileNode)
|
||||
|
||||
profileNode.animateIn(from: self, targetRect: extractedRect, transition: transition)
|
||||
var appearenceTransition = transition
|
||||
if transition.isAnimated {
|
||||
appearenceTransition = .animated(duration: springDuration, curve: .customSpring(damping: springDamping, initialVelocity: 0.0))
|
||||
}
|
||||
appearenceTransition.updateFrame(node: profileNode, frame: extractedRect)
|
||||
|
||||
self.contextSourceNode.contentNode.customHitTest = { [weak self] point in
|
||||
if let strongSelf = self, let profileNode = strongSelf.profileNode {
|
||||
@ -245,9 +287,28 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
self.backgroundNode.isHidden = true
|
||||
self.fadeNode.isHidden = true
|
||||
self.infoNode.isHidden = true
|
||||
self.highlightNode.isHidden = true
|
||||
} else if let profileNode = self.profileNode {
|
||||
self.profileNode = nil
|
||||
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition)
|
||||
|
||||
self.infoNode.isHidden = false
|
||||
profileNode.animateOut(to: self, targetRect: nonExtractedRect, transition: transition, completion: { [weak self] in
|
||||
if let strongSelf = self {
|
||||
strongSelf.backgroundNode.isHidden = false
|
||||
strongSelf.fadeNode.isHidden = false
|
||||
strongSelf.highlightNode.isHidden = false
|
||||
}
|
||||
})
|
||||
|
||||
var appearenceTransition = transition
|
||||
if transition.isAnimated {
|
||||
appearenceTransition = .animated(duration: 0.2, curve: .easeInOut)
|
||||
}
|
||||
appearenceTransition.updateFrame(node: profileNode, frame: nonExtractedRect)
|
||||
|
||||
self.contextSourceNode.contentNode.customHitTest = nil
|
||||
}
|
||||
@ -259,8 +320,23 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
if let shimmerNode = self.shimmerNode {
|
||||
shimmerNode.updateAbsoluteRect(rect, within: containerSize)
|
||||
}
|
||||
let isVisible = rect.maxY >= 0.0 && rect.minY <= containerSize.height
|
||||
self.videoNode?.updateIsEnabled(isVisible)
|
||||
self.updateIsEnabled()
|
||||
}
|
||||
|
||||
var visiblity = true {
|
||||
didSet {
|
||||
self.updateIsEnabled()
|
||||
}
|
||||
}
|
||||
|
||||
func updateIsEnabled() {
|
||||
guard let (rect, containerSize) = self.absoluteLocation else {
|
||||
return
|
||||
}
|
||||
let isVisibleInContainer = rect.maxY >= 0.0 && rect.minY <= containerSize.height
|
||||
if let videoNode = self.videoNode, videoNode.supernode === self.videoContainerNode {
|
||||
videoNode.updateIsEnabled(self.visiblity && isVisibleInContainer)
|
||||
}
|
||||
}
|
||||
|
||||
func update(size: CGSize, availableWidth: CGFloat, item: VoiceChatTileItem, transition: ContainedViewLayoutTransition) {
|
||||
@ -270,10 +346,12 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
|
||||
self.validLayout = (size, availableWidth)
|
||||
|
||||
if !item.videoReady {
|
||||
if !item.videoReady || item.isOwnScreencast {
|
||||
let shimmerNode: VoiceChatTileShimmeringNode
|
||||
let shimmerTransition: ContainedViewLayoutTransition
|
||||
if let current = self.shimmerNode {
|
||||
shimmerNode = current
|
||||
shimmerTransition = transition
|
||||
} else {
|
||||
shimmerNode = VoiceChatTileShimmeringNode(account: item.account, peer: item.peer)
|
||||
self.contentNode.insertSubnode(shimmerNode, aboveSubnode: self.fadeNode)
|
||||
@ -282,9 +360,10 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
if let (rect, containerSize) = self.absoluteLocation {
|
||||
shimmerNode.updateAbsoluteRect(rect, within: containerSize)
|
||||
}
|
||||
shimmerTransition = .immediate
|
||||
}
|
||||
transition.updateFrame(node: shimmerNode, frame: CGRect(origin: CGPoint(), size: size))
|
||||
shimmerNode.update(shimmeringColor: UIColor.white, size: size, transition: transition)
|
||||
shimmerTransition.updateFrame(node: shimmerNode, frame: CGRect(origin: CGPoint(), size: size))
|
||||
shimmerNode.update(shimmeringColor: UIColor.white, shimmering: !item.isOwnScreencast && !item.videoTimeouted && !item.isPaused, size: size, transition: shimmerTransition)
|
||||
} else if let shimmerNode = self.shimmerNode {
|
||||
self.shimmerNode = nil
|
||||
shimmerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak shimmerNode] _ in
|
||||
@ -292,11 +371,14 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
})
|
||||
}
|
||||
|
||||
var nodeToAnimateIn: ASDisplayNode?
|
||||
var placeholderAppeared = false
|
||||
|
||||
var itemTransition = transition
|
||||
if self.item != item {
|
||||
let previousItem = self.item
|
||||
self.item = item
|
||||
|
||||
|
||||
if let getAudioLevel = item.getAudioLevel {
|
||||
self.audioLevelDisposable.set((getAudioLevel()
|
||||
|> deliverOnMainQueue).start(next: { [weak self] value in
|
||||
@ -316,13 +398,31 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
current.removeFromSupernode()
|
||||
}
|
||||
|
||||
if let videoNode = item.getVideo() {
|
||||
if let videoNode = item.getVideo(item.secondary ? .list : .tile) {
|
||||
itemTransition = .immediate
|
||||
self.videoNode = videoNode
|
||||
self.videoContainerNode.addSubnode(videoNode)
|
||||
self.updateIsEnabled()
|
||||
}
|
||||
}
|
||||
|
||||
self.videoNode?.updateIsBlurred(isBlurred: item.isPaused, light: true)
|
||||
|
||||
var showPlaceholder = false
|
||||
if item.isOwnScreencast {
|
||||
self.placeholderTextNode.attributedText = NSAttributedString(string: item.strings.VoiceChat_YouAreSharingScreen, font: Font.semibold(13.0), textColor: .white)
|
||||
self.placeholderIconNode.image = generateTintedImage(image: UIImage(bundleImageName: item.isTablet ? "Call/ScreenShareTablet" : "Call/ScreenSharePhone"), color: .white)
|
||||
showPlaceholder = true
|
||||
} else if item.isPaused {
|
||||
self.placeholderTextNode.attributedText = NSAttributedString(string: item.strings.VoiceChat_VideoPaused, font: Font.semibold(13.0), textColor: .white)
|
||||
self.placeholderIconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/Pause"), color: .white)
|
||||
showPlaceholder = true
|
||||
}
|
||||
|
||||
placeholderAppeared = self.placeholderTextNode.alpha.isZero && showPlaceholder
|
||||
transition.updateAlpha(node: self.placeholderTextNode, alpha: showPlaceholder ? 1.0 : 0.0)
|
||||
transition.updateAlpha(node: self.placeholderIconNode, alpha: showPlaceholder ? 1.0 : 0.0)
|
||||
|
||||
let titleFont = Font.semibold(13.0)
|
||||
let titleColor = UIColor.white
|
||||
var titleAttributedString: NSAttributedString?
|
||||
@ -361,25 +461,8 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
}
|
||||
self.titleNode.attributedText = titleAttributedString
|
||||
|
||||
if case let .microphone(muted) = item.icon {
|
||||
let animationNode: VoiceChatMicrophoneNode
|
||||
if let current = self.animationNode {
|
||||
animationNode = current
|
||||
} else {
|
||||
animationNode = VoiceChatMicrophoneNode()
|
||||
self.animationNode = animationNode
|
||||
self.infoNode.addSubnode(animationNode)
|
||||
}
|
||||
animationNode.alpha = 1.0
|
||||
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: microphoneColor), animated: true)
|
||||
} else if let animationNode = self.animationNode {
|
||||
self.animationNode = nil
|
||||
animationNode.removeFromSupernode()
|
||||
}
|
||||
|
||||
var hadMicrophoneNode = false
|
||||
var hadIconNode = false
|
||||
var nodeToAnimateIn: ASDisplayNode?
|
||||
|
||||
if case let .microphone(muted) = item.icon {
|
||||
let animationNode: VoiceChatMicrophoneNode
|
||||
@ -389,13 +472,18 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
animationNode = VoiceChatMicrophoneNode()
|
||||
self.animationNode = animationNode
|
||||
self.infoNode.addSubnode(animationNode)
|
||||
|
||||
nodeToAnimateIn = animationNode
|
||||
}
|
||||
animationNode.alpha = 1.0
|
||||
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: microphoneColor), animated: true)
|
||||
} else if let animationNode = self.animationNode {
|
||||
hadMicrophoneNode = true
|
||||
self.animationNode = nil
|
||||
animationNode.removeFromSupernode()
|
||||
animationNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
|
||||
animationNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak animationNode] _ in
|
||||
animationNode?.removeFromSupernode()
|
||||
})
|
||||
}
|
||||
|
||||
if case .presentation = item.icon {
|
||||
@ -449,7 +537,9 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
if self.videoContainerNode.supernode === self.contentNode {
|
||||
if let videoNode = self.videoNode {
|
||||
itemTransition.updateFrame(node: videoNode, frame: bounds)
|
||||
videoNode.updateLayout(size: size, layoutMode: .fillOrFitToSquare, transition: itemTransition)
|
||||
if videoNode.supernode === self.videoContainerNode {
|
||||
videoNode.updateLayout(size: size, layoutMode: .fillOrFitToSquare, transition: itemTransition)
|
||||
}
|
||||
}
|
||||
transition.updateFrame(node: self.videoContainerNode, frame: bounds)
|
||||
}
|
||||
@ -463,6 +553,11 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - 50.0, height: size.height))
|
||||
self.titleNode.frame = CGRect(origin: CGPoint(x: 30.0, y: size.height - titleSize.height - 8.0), size: titleSize)
|
||||
|
||||
var transition = transition
|
||||
if nodeToAnimateIn != nil || placeholderAppeared {
|
||||
transition = .immediate
|
||||
}
|
||||
|
||||
if let iconNode = self.iconNode, let image = iconNode.image {
|
||||
transition.updateFrame(node: iconNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels(16.0 - image.size.width / 2.0), y: floorToScreenPixels(size.height - 15.0 - image.size.height / 2.0)), size: image.size))
|
||||
}
|
||||
@ -473,70 +568,40 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
||||
animationNode.transform = CATransform3DMakeScale(0.66667, 0.66667, 1.0)
|
||||
transition.updatePosition(node: animationNode, position: CGPoint(x: 16.0, y: size.height - 15.0))
|
||||
}
|
||||
|
||||
let placeholderTextSize = self.placeholderTextNode.updateLayout(CGSize(width: size.width - 30.0, height: 100.0))
|
||||
transition.updateFrame(node: self.placeholderTextNode, frame: CGRect(origin: CGPoint(x: floor((size.width - placeholderTextSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) + 10.0), size: placeholderTextSize))
|
||||
if let image = self.placeholderIconNode.image {
|
||||
let imageSize = CGSize(width: image.size.width * 0.5, height: image.size.height * 0.5)
|
||||
transition.updateFrame(node: self.placeholderIconNode, frame: CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: floorToScreenPixels(size.height / 2.0) - imageSize.height - 4.0), size: imageSize))
|
||||
}
|
||||
}
|
||||
|
||||
func animateTransitionIn(from sourceNode: ASDisplayNode, containerNode: ASDisplayNode, transition: ContainedViewLayoutTransition, animate: Bool = true) {
|
||||
guard let _ = self.item else {
|
||||
func transitionIn(from sourceNode: ASDisplayNode?) {
|
||||
guard let item = self.item else {
|
||||
return
|
||||
}
|
||||
var duration: Double = 0.2
|
||||
var timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue
|
||||
if case let .animated(transitionDuration, curve) = transition {
|
||||
duration = transitionDuration + 0.05
|
||||
timingFunction = curve.timingFunction
|
||||
var videoNode: GroupVideoNode?
|
||||
if let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item {
|
||||
if let sourceVideoNode = sourceNode.videoNode {
|
||||
sourceNode.videoNode = nil
|
||||
videoNode = sourceVideoNode
|
||||
}
|
||||
}
|
||||
|
||||
if let sourceNode = sourceNode as? VoiceChatFullscreenParticipantItemNode, let _ = sourceNode.item {
|
||||
let initialAnimate = animate
|
||||
if videoNode == nil {
|
||||
videoNode = item.getVideo(item.secondary ? .list : .tile)
|
||||
}
|
||||
|
||||
var startContainerPosition = sourceNode.view.convert(sourceNode.bounds, to: containerNode.view).center
|
||||
var animate = initialAnimate
|
||||
// if startContainerPosition.y > containerNode.frame.height - 238.0 {
|
||||
// animate = false
|
||||
// }
|
||||
if let videoNode = videoNode {
|
||||
videoNode.alpha = 1.0
|
||||
self.videoNode = videoNode
|
||||
self.videoContainerNode.addSubnode(videoNode)
|
||||
|
||||
if let videoNode = sourceNode.videoNode {
|
||||
sourceNode.videoNode = nil
|
||||
videoNode.alpha = 1.0
|
||||
self.videoNode = videoNode
|
||||
self.videoContainerNode.addSubnode(videoNode)
|
||||
}
|
||||
|
||||
if animate {
|
||||
sourceNode.isHidden = true
|
||||
Queue.mainQueue().after(0.7) {
|
||||
sourceNode.isHidden = false
|
||||
}
|
||||
|
||||
let initialPosition = self.contextSourceNode.position
|
||||
let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center
|
||||
|
||||
self.contextSourceNode.position = targetContainerPosition
|
||||
containerNode.addSubnode(self.contextSourceNode)
|
||||
|
||||
self.contextSourceNode.layer.animateScale(from: 0.467, to: 1.0, duration: duration, timingFunction: timingFunction)
|
||||
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: duration, timingFunction: timingFunction, completion: { [weak self] _ in
|
||||
if let strongSelf = self {
|
||||
strongSelf.contextSourceNode.position = initialPosition
|
||||
strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode)
|
||||
}
|
||||
})
|
||||
|
||||
self.videoNode?.updateLayout(size: self.bounds.size, layoutMode: .fillOrFitToSquare, transition: transition)
|
||||
self.videoNode?.frame = self.bounds
|
||||
} else if !initialAnimate {
|
||||
self.videoNode?.updateLayout(size: self.bounds.size, layoutMode: .fillOrFitToSquare, transition: .immediate)
|
||||
self.videoNode?.frame = self.bounds
|
||||
|
||||
sourceNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: duration, timingFunction: timingFunction, removeOnCompletion: false, completion: { [weak sourceNode] _ in
|
||||
sourceNode?.layer.removeAllAnimations()
|
||||
})
|
||||
sourceNode.layer.animateScale(from: 1.0, to: 0.0, duration: duration, timingFunction: timingFunction)
|
||||
}
|
||||
videoNode.updateLayout(size: self.bounds.size, layoutMode: .fillOrFitToSquare, transition: .immediate)
|
||||
videoNode.frame = self.bounds
|
||||
|
||||
if transition.isAnimated {
|
||||
self.fadeNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
|
||||
}
|
||||
self.updateIsEnabled()
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -822,6 +887,7 @@ private class VoiceChatTileShimmeringNode: ASDisplayNode {
|
||||
private let borderEffectNode: ShimmerEffectForegroundNode
|
||||
|
||||
private var currentShimmeringColor: UIColor?
|
||||
private var currentShimmering: Bool?
|
||||
private var currentSize: CGSize?
|
||||
|
||||
public init(account: Account, peer: Peer) {
|
||||
@ -844,30 +910,33 @@ private class VoiceChatTileShimmeringNode: ASDisplayNode {
|
||||
self.addSubnode(self.borderNode)
|
||||
self.borderNode.addSubnode(self.borderEffectNode)
|
||||
|
||||
self.backgroundNode.setSignal(peerAvatarCompleteImage(account: account, peer: peer, size: CGSize(width: 180.0, height: 180.0), round: false, font: Font.regular(16.0), drawLetters: false, fullSize: false, blurred: true))
|
||||
self.backgroundNode.setSignal(peerAvatarCompleteImage(account: account, peer: peer, size: CGSize(width: 250.0, height: 250.0), round: false, font: Font.regular(16.0), drawLetters: false, fullSize: false, blurred: true))
|
||||
}
|
||||
|
||||
public override func didLoad() {
|
||||
super.didLoad()
|
||||
|
||||
self.effectNode.layer.compositingFilter = "screenBlendMode"
|
||||
self.borderEffectNode.layer.compositingFilter = "screenBlendMode"
|
||||
|
||||
let borderMaskView = UIView()
|
||||
borderMaskView.layer.borderWidth = 1.0
|
||||
borderMaskView.layer.borderColor = UIColor.white.cgColor
|
||||
borderMaskView.layer.cornerRadius = backgroundCornerRadius
|
||||
self.borderMaskView = borderMaskView
|
||||
|
||||
if let size = self.currentSize {
|
||||
borderMaskView.frame = CGRect(origin: CGPoint(), size: size)
|
||||
if self.effectNode.supernode != nil {
|
||||
self.effectNode.layer.compositingFilter = "screenBlendMode"
|
||||
self.borderEffectNode.layer.compositingFilter = "screenBlendMode"
|
||||
|
||||
let borderMaskView = UIView()
|
||||
borderMaskView.layer.borderWidth = 1.0
|
||||
borderMaskView.layer.borderColor = UIColor.white.cgColor
|
||||
borderMaskView.layer.cornerRadius = backgroundCornerRadius
|
||||
self.borderMaskView = borderMaskView
|
||||
|
||||
if let size = self.currentSize {
|
||||
borderMaskView.frame = CGRect(origin: CGPoint(), size: size)
|
||||
}
|
||||
self.borderNode.view.mask = borderMaskView
|
||||
|
||||
if #available(iOS 13.0, *) {
|
||||
borderMaskView.layer.cornerCurve = .continuous
|
||||
}
|
||||
}
|
||||
|
||||
self.borderNode.view.mask = borderMaskView
|
||||
|
||||
if #available(iOS 13.0, *) {
|
||||
self.layer.cornerCurve = .continuous
|
||||
borderMaskView.layer.cornerCurve = .continuous
|
||||
}
|
||||
}
|
||||
|
||||
@ -876,21 +945,27 @@ private class VoiceChatTileShimmeringNode: ASDisplayNode {
|
||||
self.borderEffectNode.updateAbsoluteRect(rect, within: containerSize)
|
||||
}
|
||||
|
||||
public func update(shimmeringColor: UIColor, size: CGSize, transition: ContainedViewLayoutTransition) {
|
||||
if let currentShimmeringColor = self.currentShimmeringColor, currentShimmeringColor.isEqual(shimmeringColor) && self.currentSize == size {
|
||||
public func update(shimmeringColor: UIColor, shimmering: Bool, size: CGSize, transition: ContainedViewLayoutTransition) {
|
||||
if let currentShimmeringColor = self.currentShimmeringColor, currentShimmeringColor.isEqual(shimmeringColor) && self.currentSize == size && self.currentShimmering == shimmering {
|
||||
return
|
||||
}
|
||||
|
||||
let firstTime = self.currentShimmering == nil
|
||||
self.currentShimmeringColor = shimmeringColor
|
||||
self.currentShimmering = shimmering
|
||||
self.currentSize = size
|
||||
|
||||
let transition: ContainedViewLayoutTransition = firstTime ? .immediate : (transition.isAnimated ? transition : .animated(duration: 0.45, curve: .easeInOut))
|
||||
transition.updateAlpha(node: self.effectNode, alpha: shimmering ? 1.0 : 0.0)
|
||||
transition.updateAlpha(node: self.borderNode, alpha: shimmering ? 1.0 : 0.0)
|
||||
|
||||
let bounds = CGRect(origin: CGPoint(), size: size)
|
||||
|
||||
self.effectNode.update(foregroundColor: shimmeringColor.withAlphaComponent(0.3))
|
||||
self.effectNode.frame = bounds
|
||||
transition.updateFrame(node: self.effectNode, frame: bounds)
|
||||
|
||||
self.borderEffectNode.update(foregroundColor: shimmeringColor.withAlphaComponent(0.45))
|
||||
self.borderEffectNode.frame = bounds
|
||||
transition.updateFrame(node: self.borderEffectNode, frame: bounds)
|
||||
|
||||
transition.updateFrame(node: self.backgroundNode, frame: bounds)
|
||||
transition.updateFrame(node: self.borderNode, frame: bounds)
|
||||
|
@ -3,12 +3,15 @@ import UIKit
|
||||
import AsyncDisplayKit
|
||||
import Display
|
||||
import TelegramPresentationData
|
||||
import ChatTitleActivityNode
|
||||
|
||||
private let constructiveColor: UIColor = UIColor(rgb: 0x34c759)
|
||||
|
||||
final class VoiceChatTitleNode: ASDisplayNode {
|
||||
private var theme: PresentationTheme
|
||||
|
||||
private let titleNode: ASTextNode
|
||||
private let infoNode: ASTextNode
|
||||
private let infoNode: ChatTitleActivityNode
|
||||
let recordingIconNode: VoiceChatRecordingIconNode
|
||||
|
||||
public var isRecording: Bool = false {
|
||||
@ -28,11 +31,7 @@ final class VoiceChatTitleNode: ASDisplayNode {
|
||||
self.titleNode.truncationMode = .byTruncatingTail
|
||||
self.titleNode.isOpaque = false
|
||||
|
||||
self.infoNode = ASTextNode()
|
||||
self.infoNode.displaysAsynchronously = false
|
||||
self.infoNode.maximumNumberOfLines = 1
|
||||
self.infoNode.truncationMode = .byTruncatingTail
|
||||
self.infoNode.isOpaque = false
|
||||
self.infoNode = ChatTitleActivityNode()
|
||||
|
||||
self.recordingIconNode = VoiceChatRecordingIconNode(hasBackground: false)
|
||||
|
||||
@ -65,7 +64,7 @@ final class VoiceChatTitleNode: ASDisplayNode {
|
||||
self.tapped?()
|
||||
}
|
||||
|
||||
func update(size: CGSize, title: String, subtitle: String, slide: Bool, transition: ContainedViewLayoutTransition) {
|
||||
func update(size: CGSize, title: String, subtitle: String, speaking: Bool, slide: Bool, transition: ContainedViewLayoutTransition) {
|
||||
guard !size.width.isZero else {
|
||||
return
|
||||
}
|
||||
@ -94,11 +93,18 @@ final class VoiceChatTitleNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
self.titleNode.attributedText = NSAttributedString(string: title, font: Font.medium(17.0), textColor: UIColor(rgb: 0xffffff))
|
||||
self.infoNode.attributedText = NSAttributedString(string: subtitle, font: Font.regular(13.0), textColor: UIColor(rgb: 0xffffff, alpha: 0.5))
|
||||
|
||||
var state = ChatTitleActivityNodeState.none
|
||||
if speaking {
|
||||
state = .recordingVoice(NSAttributedString(string: subtitle, font: Font.regular(13.0), textColor: constructiveColor), constructiveColor)
|
||||
} else {
|
||||
state = .info(NSAttributedString(string: subtitle, font: Font.regular(13.0), textColor: UIColor(rgb: 0xffffff, alpha: 0.5)), .generic)
|
||||
}
|
||||
let _ = self.infoNode.transitionToState(state, animation: .slide)
|
||||
|
||||
let constrainedSize = CGSize(width: size.width - 140.0, height: size.height)
|
||||
let titleSize = self.titleNode.measure(constrainedSize)
|
||||
let infoSize = self.infoNode.measure(constrainedSize)
|
||||
let infoSize = self.infoNode.updateLayout(constrainedSize, offset: 1.0, alignment: .center)
|
||||
let titleInfoSpacing: CGFloat = 0.0
|
||||
|
||||
let combinedHeight = titleSize.height + infoSize.height + titleInfoSpacing
|
||||
|
@ -1259,9 +1259,9 @@ public func setupAccount(_ account: Account, fetchCachedResourceRepresentation:
|
||||
account.postbox.mediaBox.preFetchedResourcePath = preFetchedResourcePath
|
||||
account.postbox.mediaBox.fetchResource = { [weak account] resource, intervals, parameters -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> in
|
||||
if let strongAccount = account {
|
||||
if let result = fetchResource(account: strongAccount, resource: resource, intervals: intervals, parameters: parameters) {
|
||||
if let result = strongAccount.auxiliaryMethods.fetchResource(strongAccount, resource, intervals, parameters) {
|
||||
return result
|
||||
} else if let result = strongAccount.auxiliaryMethods.fetchResource(strongAccount, resource, intervals, parameters) {
|
||||
} else if let result = fetchResource(account: strongAccount, resource: resource, intervals: intervals, parameters: parameters) {
|
||||
return result
|
||||
} else {
|
||||
return .never()
|
||||
|
@ -175,6 +175,7 @@ private var declaredEncodables: Void = {
|
||||
declareEncodable(ExportedInvitation.self, f: { ExportedInvitation(decoder: $0) })
|
||||
declareEncodable(CachedDisplayAsPeers.self, f: { CachedDisplayAsPeers(decoder: $0) })
|
||||
declareEncodable(WallpapersState.self, f: { WallpapersState(decoder: $0) })
|
||||
declareEncodable(WallpaperDataResource.self, f: { WallpaperDataResource(decoder: $0) })
|
||||
|
||||
return
|
||||
}()
|
||||
@ -189,6 +190,24 @@ public func rootPathForBasePath(_ appGroupPath: String) -> String {
|
||||
|
||||
public func performAppGroupUpgrades(appGroupPath: String, rootPath: String) {
|
||||
let _ = try? FileManager.default.createDirectory(at: URL(fileURLWithPath: rootPath), withIntermediateDirectories: true, attributes: nil)
|
||||
|
||||
if let items = FileManager.default.enumerator(at: URL(fileURLWithPath: appGroupPath), includingPropertiesForKeys: [.isDirectoryKey], options: [.skipsHiddenFiles, .skipsSubdirectoryDescendants], errorHandler: nil) {
|
||||
let allowedDirectories: [String] = [
|
||||
"telegram-data",
|
||||
"Library"
|
||||
]
|
||||
|
||||
for url in items {
|
||||
guard let url = url as? URL else {
|
||||
continue
|
||||
}
|
||||
if let isDirectory = try? url.resourceValues(forKeys: [.isDirectoryKey]).isDirectory, isDirectory {
|
||||
if !allowedDirectories.contains(url.lastPathComponent) {
|
||||
let _ = try? FileManager.default.removeItem(at: url)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
do {
|
||||
var resourceValues = URLResourceValues()
|
||||
|
@ -77,7 +77,7 @@ extension TelegramWallpaper {
|
||||
//assertionFailure()
|
||||
self = .color(0xffffff)
|
||||
}
|
||||
case let .wallPaperNoFile(_, settings):
|
||||
case let .wallPaperNoFile(_, _, settings):
|
||||
if let settings = settings, case let .wallPaperSettings(_, backgroundColor, secondBackgroundColor, thirdBackgroundColor, fourthBackgroundColor, _, rotation) = settings {
|
||||
let colors: [UInt32] = ([backgroundColor, secondBackgroundColor, thirdBackgroundColor, fourthBackgroundColor] as [Int32?]).compactMap({ color -> UInt32? in
|
||||
return color.flatMap(UInt32.init(bitPattern:))
|
||||
@ -103,9 +103,9 @@ extension TelegramWallpaper {
|
||||
case let .file(_, _, _, _, _, _, slug, _, settings):
|
||||
return (.inputWallPaperSlug(slug: slug), apiWallpaperSettings(settings))
|
||||
case let .color(color):
|
||||
return (.inputWallPaperNoFile, apiWallpaperSettings(WallpaperSettings(colors: [color])))
|
||||
return (.inputWallPaperNoFile(id: 0), apiWallpaperSettings(WallpaperSettings(colors: [color])))
|
||||
case let .gradient(colors, settings):
|
||||
return (.inputWallPaperNoFile, apiWallpaperSettings(WallpaperSettings(colors: colors, rotation: settings.rotation)))
|
||||
return (.inputWallPaperNoFile(id: 0), apiWallpaperSettings(WallpaperSettings(colors: colors, rotation: settings.rotation)))
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
|
@ -142,29 +142,17 @@ public func getCurrentGroupCall(account: Account, callId: Int64, accessHash: Int
|
||||
} else if mutedByYou {
|
||||
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: false, mutedByYou: mutedByYou)
|
||||
}
|
||||
var videoJsonDescription: String? = nil
|
||||
var presentationJsonDescription: String? = nil
|
||||
if let video = video {
|
||||
switch video {
|
||||
case let .dataJSON(data):
|
||||
videoJsonDescription = data
|
||||
}
|
||||
}
|
||||
if let presentation = presentation {
|
||||
switch presentation {
|
||||
case let .dataJSON(data):
|
||||
presentationJsonDescription = data
|
||||
}
|
||||
}
|
||||
var videoDescription = video.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||
var presentationDescription = presentation.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||
if muteState?.canUnmute == false {
|
||||
videoJsonDescription = nil
|
||||
presentationJsonDescription = nil
|
||||
videoDescription = nil
|
||||
presentationDescription = nil
|
||||
}
|
||||
parsedParticipants.append(GroupCallParticipantsContext.Participant(
|
||||
peer: peer,
|
||||
ssrc: ssrc,
|
||||
videoJsonDescription: videoJsonDescription,
|
||||
presentationJsonDescription: presentationJsonDescription,
|
||||
videoDescription: videoDescription,
|
||||
presentationDescription: presentationDescription,
|
||||
joinTimestamp: date,
|
||||
raiseHandRating: raiseHandRating,
|
||||
hasRaiseHand: raiseHandRating != nil,
|
||||
@ -471,29 +459,17 @@ public func getGroupCallParticipants(account: Account, callId: Int64, accessHash
|
||||
} else if mutedByYou {
|
||||
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: false, mutedByYou: mutedByYou)
|
||||
}
|
||||
var videoJsonDescription: String? = nil
|
||||
var presentationJsonDescription: String? = nil
|
||||
if let video = video {
|
||||
switch video {
|
||||
case let .dataJSON(data):
|
||||
videoJsonDescription = data
|
||||
}
|
||||
}
|
||||
if let presentation = presentation {
|
||||
switch presentation {
|
||||
case let .dataJSON(data):
|
||||
presentationJsonDescription = data
|
||||
}
|
||||
}
|
||||
var videoDescription = video.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||
var presentationDescription = presentation.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||
if muteState?.canUnmute == false {
|
||||
videoJsonDescription = nil
|
||||
presentationJsonDescription = nil
|
||||
videoDescription = nil
|
||||
presentationDescription = nil
|
||||
}
|
||||
parsedParticipants.append(GroupCallParticipantsContext.Participant(
|
||||
peer: peer,
|
||||
ssrc: ssrc,
|
||||
videoJsonDescription: videoJsonDescription,
|
||||
presentationJsonDescription: presentationJsonDescription,
|
||||
videoDescription: videoDescription,
|
||||
presentationDescription: presentationDescription,
|
||||
joinTimestamp: date,
|
||||
raiseHandRating: raiseHandRating,
|
||||
hasRaiseHand: raiseHandRating != nil,
|
||||
@ -735,30 +711,18 @@ public func joinGroupCall(account: Account, peerId: PeerId, joinAs: PeerId?, cal
|
||||
} else if mutedByYou {
|
||||
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: false, mutedByYou: mutedByYou)
|
||||
}
|
||||
var videoJsonDescription: String? = nil
|
||||
var presentationJsonDescription: String? = nil
|
||||
if let video = video {
|
||||
switch video {
|
||||
case let .dataJSON(data):
|
||||
videoJsonDescription = data
|
||||
}
|
||||
}
|
||||
if let presentation = presentation {
|
||||
switch presentation {
|
||||
case let .dataJSON(data):
|
||||
presentationJsonDescription = data
|
||||
}
|
||||
}
|
||||
var videoDescription = video.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||
var presentationDescription = presentation.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||
if muteState?.canUnmute == false {
|
||||
videoJsonDescription = nil
|
||||
presentationJsonDescription = nil
|
||||
videoDescription = nil
|
||||
presentationDescription = nil
|
||||
}
|
||||
if !state.participants.contains(where: { $0.peer.id == peer.id }) {
|
||||
state.participants.append(GroupCallParticipantsContext.Participant(
|
||||
peer: peer,
|
||||
ssrc: ssrc,
|
||||
videoJsonDescription: videoJsonDescription,
|
||||
presentationJsonDescription: presentationJsonDescription,
|
||||
videoDescription: videoDescription,
|
||||
presentationDescription: presentationDescription,
|
||||
joinTimestamp: date,
|
||||
raiseHandRating: raiseHandRating,
|
||||
hasRaiseHand: raiseHandRating != nil,
|
||||
@ -960,11 +924,22 @@ public final class GroupCallParticipantsContext {
|
||||
self.mutedByYou = mutedByYou
|
||||
}
|
||||
}
|
||||
|
||||
public struct VideoDescription: Equatable {
|
||||
public struct SsrcGroup: Equatable {
|
||||
public var semantics: String
|
||||
public var ssrcs: [UInt32]
|
||||
}
|
||||
|
||||
public var endpointId: String
|
||||
public var ssrcGroups: [SsrcGroup]
|
||||
public var isPaused: Bool
|
||||
}
|
||||
|
||||
public var peer: Peer
|
||||
public var ssrc: UInt32?
|
||||
public var videoJsonDescription: String?
|
||||
public var presentationJsonDescription: String?
|
||||
public var videoDescription: VideoDescription?
|
||||
public var presentationDescription: VideoDescription?
|
||||
public var joinTimestamp: Int32
|
||||
public var raiseHandRating: Int64?
|
||||
public var hasRaiseHand: Bool
|
||||
@ -977,8 +952,8 @@ public final class GroupCallParticipantsContext {
|
||||
public init(
|
||||
peer: Peer,
|
||||
ssrc: UInt32?,
|
||||
videoJsonDescription: String?,
|
||||
presentationJsonDescription: String?,
|
||||
videoDescription: VideoDescription?,
|
||||
presentationDescription: VideoDescription?,
|
||||
joinTimestamp: Int32,
|
||||
raiseHandRating: Int64?,
|
||||
hasRaiseHand: Bool,
|
||||
@ -990,8 +965,8 @@ public final class GroupCallParticipantsContext {
|
||||
) {
|
||||
self.peer = peer
|
||||
self.ssrc = ssrc
|
||||
self.videoJsonDescription = videoJsonDescription
|
||||
self.presentationJsonDescription = presentationJsonDescription
|
||||
self.videoDescription = videoDescription
|
||||
self.presentationDescription = presentationDescription
|
||||
self.joinTimestamp = joinTimestamp
|
||||
self.raiseHandRating = raiseHandRating
|
||||
self.hasRaiseHand = hasRaiseHand
|
||||
@ -1020,10 +995,10 @@ public final class GroupCallParticipantsContext {
|
||||
if lhs.ssrc != rhs.ssrc {
|
||||
return false
|
||||
}
|
||||
if lhs.videoJsonDescription != rhs.videoJsonDescription {
|
||||
if lhs.videoDescription != rhs.videoDescription {
|
||||
return false
|
||||
}
|
||||
if lhs.presentationJsonDescription != rhs.presentationJsonDescription {
|
||||
if lhs.presentationDescription != rhs.presentationDescription {
|
||||
return false
|
||||
}
|
||||
if lhs.joinTimestamp != rhs.joinTimestamp {
|
||||
@ -1225,8 +1200,8 @@ public final class GroupCallParticipantsContext {
|
||||
|
||||
public var peerId: PeerId
|
||||
public var ssrc: UInt32?
|
||||
public var videoJsonDescription: String?
|
||||
public var presentationJsonDescription: String?
|
||||
public var videoDescription: GroupCallParticipantsContext.Participant.VideoDescription?
|
||||
public var presentationDescription: GroupCallParticipantsContext.Participant.VideoDescription?
|
||||
public var joinTimestamp: Int32
|
||||
public var activityTimestamp: Double?
|
||||
public var raiseHandRating: Int64?
|
||||
@ -1239,8 +1214,8 @@ public final class GroupCallParticipantsContext {
|
||||
init(
|
||||
peerId: PeerId,
|
||||
ssrc: UInt32?,
|
||||
videoJsonDescription: String?,
|
||||
presentationJsonDescription: String?,
|
||||
videoDescription: GroupCallParticipantsContext.Participant.VideoDescription?,
|
||||
presentationDescription: GroupCallParticipantsContext.Participant.VideoDescription?,
|
||||
joinTimestamp: Int32,
|
||||
activityTimestamp: Double?,
|
||||
raiseHandRating: Int64?,
|
||||
@ -1252,8 +1227,8 @@ public final class GroupCallParticipantsContext {
|
||||
) {
|
||||
self.peerId = peerId
|
||||
self.ssrc = ssrc
|
||||
self.videoJsonDescription = videoJsonDescription
|
||||
self.presentationJsonDescription = presentationJsonDescription
|
||||
self.videoDescription = videoDescription
|
||||
self.presentationDescription = presentationDescription
|
||||
self.joinTimestamp = joinTimestamp
|
||||
self.activityTimestamp = activityTimestamp
|
||||
self.raiseHandRating = raiseHandRating
|
||||
@ -1381,6 +1356,9 @@ public final class GroupCallParticipantsContext {
|
||||
private let resetInviteLinksDisposable = MetaDisposable()
|
||||
private let updateShouldBeRecordingDisposable = MetaDisposable()
|
||||
|
||||
private var localVideoIsMuted: Bool = true
|
||||
private var localIsVideoPaused: Bool = true
|
||||
|
||||
public struct ServiceState {
|
||||
fileprivate var nextActivityRank: Int = 0
|
||||
}
|
||||
@ -1791,8 +1769,8 @@ public final class GroupCallParticipantsContext {
|
||||
let participant = Participant(
|
||||
peer: peer,
|
||||
ssrc: participantUpdate.ssrc,
|
||||
videoJsonDescription: participantUpdate.videoJsonDescription,
|
||||
presentationJsonDescription: participantUpdate.presentationJsonDescription,
|
||||
videoDescription: participantUpdate.videoDescription,
|
||||
presentationDescription: participantUpdate.presentationDescription,
|
||||
joinTimestamp: previousJoinTimestamp ?? participantUpdate.joinTimestamp,
|
||||
raiseHandRating: participantUpdate.raiseHandRating,
|
||||
hasRaiseHand: participantUpdate.raiseHandRating != nil,
|
||||
@ -1938,7 +1916,7 @@ public final class GroupCallParticipantsContext {
|
||||
raiseHandApi = nil
|
||||
}
|
||||
|
||||
return account.network.request(Api.functions.phone.editGroupCallParticipant(flags: flags, call: .inputGroupCall(id: id, accessHash: accessHash), participant: inputPeer, muted: muted, volume: volume, raiseHand: raiseHandApi, videoMuted: nil))
|
||||
return account.network.request(Api.functions.phone.editGroupCallParticipant(flags: flags, call: .inputGroupCall(id: id, accessHash: accessHash), participant: inputPeer, muted: muted, volume: volume, raiseHand: raiseHandApi, videoStopped: nil, videoPaused: nil, presentationPaused: nil))
|
||||
|> map(Optional.init)
|
||||
|> `catch` { _ -> Signal<Api.Updates?, NoError> in
|
||||
return .single(nil)
|
||||
@ -1978,7 +1956,13 @@ public final class GroupCallParticipantsContext {
|
||||
}))
|
||||
}
|
||||
|
||||
public func updateVideoState(peerId: PeerId, isVideoMuted: Bool) {
|
||||
public func updateVideoState(peerId: PeerId, isVideoMuted: Bool, isVideoPaused: Bool) {
|
||||
if self.localVideoIsMuted == isVideoMuted && self.localIsVideoPaused == isVideoPaused {
|
||||
return
|
||||
}
|
||||
self.localVideoIsMuted = isVideoMuted
|
||||
self.localIsVideoPaused = isVideoPaused
|
||||
|
||||
let disposable = MetaDisposable()
|
||||
|
||||
let account = self.account
|
||||
@ -1998,7 +1982,13 @@ public final class GroupCallParticipantsContext {
|
||||
videoMuted = isVideoMuted ? .boolTrue : .boolFalse
|
||||
flags |= 1 << 3
|
||||
|
||||
return account.network.request(Api.functions.phone.editGroupCallParticipant(flags: flags, call: .inputGroupCall(id: id, accessHash: accessHash), participant: inputPeer, muted: nil, volume: nil, raiseHand: nil, videoMuted: videoMuted))
|
||||
var videoPaused: Api.Bool?
|
||||
if !isVideoMuted {
|
||||
videoPaused = isVideoPaused ? .boolTrue : .boolFalse
|
||||
flags |= 1 << 4
|
||||
}
|
||||
|
||||
return account.network.request(Api.functions.phone.editGroupCallParticipant(flags: flags, call: .inputGroupCall(id: id, accessHash: accessHash), participant: inputPeer, muted: nil, volume: nil, raiseHand: nil, videoStopped: videoMuted, videoPaused: videoPaused, presentationPaused: nil))
|
||||
|> map(Optional.init)
|
||||
|> `catch` { _ -> Signal<Api.Updates?, NoError> in
|
||||
return .single(nil)
|
||||
@ -2148,29 +2138,17 @@ extension GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate {
|
||||
participationStatusChange = .none
|
||||
}
|
||||
|
||||
var videoJsonDescription: String? = nil
|
||||
var presentationJsonDescription: String? = nil
|
||||
if let video = video {
|
||||
switch video {
|
||||
case let .dataJSON(data):
|
||||
videoJsonDescription = data
|
||||
}
|
||||
}
|
||||
if let presentation = presentation {
|
||||
switch presentation {
|
||||
case let .dataJSON(data):
|
||||
presentationJsonDescription = data
|
||||
}
|
||||
}
|
||||
var videoDescription = video.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||
var presentationDescription = presentation.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||
if muteState?.canUnmute == false {
|
||||
videoJsonDescription = nil
|
||||
presentationJsonDescription = nil
|
||||
videoDescription = nil
|
||||
presentationDescription = nil
|
||||
}
|
||||
self.init(
|
||||
peerId: peerId,
|
||||
ssrc: ssrc,
|
||||
videoJsonDescription: videoJsonDescription,
|
||||
presentationJsonDescription: presentationJsonDescription,
|
||||
videoDescription: videoDescription,
|
||||
presentationDescription: presentationDescription,
|
||||
joinTimestamp: date,
|
||||
activityTimestamp: activeDate.flatMap(Double.init),
|
||||
raiseHandRating: raiseHandRating,
|
||||
@ -2214,29 +2192,17 @@ extension GroupCallParticipantsContext.Update.StateUpdate {
|
||||
participationStatusChange = .none
|
||||
}
|
||||
|
||||
var videoJsonDescription: String? = nil
|
||||
var presentationJsonDescription: String? = nil
|
||||
if let video = video {
|
||||
switch video {
|
||||
case let .dataJSON(data):
|
||||
videoJsonDescription = data
|
||||
}
|
||||
}
|
||||
if let presentation = presentation {
|
||||
switch presentation {
|
||||
case let .dataJSON(data):
|
||||
presentationJsonDescription = data
|
||||
}
|
||||
}
|
||||
var videoDescription = video.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||
var presentationDescription = presentation.flatMap(GroupCallParticipantsContext.Participant.VideoDescription.init)
|
||||
if muteState?.canUnmute == false {
|
||||
videoJsonDescription = nil
|
||||
presentationJsonDescription = nil
|
||||
videoDescription = nil
|
||||
presentationDescription = nil
|
||||
}
|
||||
participantUpdates.append(GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate(
|
||||
peerId: peerId,
|
||||
ssrc: ssrc,
|
||||
videoJsonDescription: videoJsonDescription,
|
||||
presentationJsonDescription: presentationJsonDescription,
|
||||
videoDescription: videoDescription,
|
||||
presentationDescription: presentationDescription,
|
||||
joinTimestamp: date,
|
||||
activityTimestamp: activeDate.flatMap(Double.init),
|
||||
raiseHandRating: raiseHandRating,
|
||||
@ -2584,3 +2550,20 @@ public func getAudioBroadcastPart(dataSource: AudioBroadcastDataSource, callId:
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extension GroupCallParticipantsContext.Participant.VideoDescription {
|
||||
init(_ apiVideo: Api.GroupCallParticipantVideo) {
|
||||
switch apiVideo {
|
||||
case let .groupCallParticipantVideo(flags, endpoint, sourceGroups):
|
||||
var parsedSsrcGroups: [SsrcGroup] = []
|
||||
for group in sourceGroups {
|
||||
switch group {
|
||||
case let .groupCallParticipantVideoSourceGroup(semantics, sources):
|
||||
parsedSsrcGroups.append(SsrcGroup(semantics: semantics, ssrcs: sources.map(UInt32.init(bitPattern:))))
|
||||
}
|
||||
}
|
||||
let isPaused = (flags & (1 << 0)) != 0
|
||||
self.init(endpointId: endpoint, ssrcGroups: parsedSsrcGroups, isPaused: isPaused)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ func updateSecretChat(encryptionProvider: EncryptionProvider, accountPeerId: Pee
|
||||
|
||||
var updatedState = currentState
|
||||
updatedState = updatedState.withUpdatedKeychain(SecretChatKeychain(keys: [SecretChatKey(fingerprint: keyFingerprint, key: MemoryBuffer(data: key), validity: .indefinite, useCount: 0)]))
|
||||
updatedState = updatedState.withUpdatedEmbeddedState(.sequenceBasedLayer(SecretChatSequenceBasedLayerState(layerNegotiationState: SecretChatLayerNegotiationState(activeLayer: .layer46, locallyRequestedLayer: nil, remotelyRequestedLayer: nil), rekeyState: nil, baseIncomingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: currentPeer.id, tag: OperationLogTags.SecretIncomingDecrypted), baseOutgoingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: currentPeer.id, tag: OperationLogTags.SecretOutgoing), topProcessedCanonicalIncomingOperationIndex: nil)))
|
||||
updatedState = updatedState.withUpdatedEmbeddedState(.sequenceBasedLayer(SecretChatSequenceBasedLayerState(layerNegotiationState: SecretChatLayerNegotiationState(activeLayer: .layer73, locallyRequestedLayer: nil, remotelyRequestedLayer: nil), rekeyState: nil, baseIncomingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: currentPeer.id, tag: OperationLogTags.SecretIncomingDecrypted), baseOutgoingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: currentPeer.id, tag: OperationLogTags.SecretOutgoing), topProcessedCanonicalIncomingOperationIndex: nil)))
|
||||
|
||||
updatedState = updatedState.withUpdatedKeyFingerprint(SecretChatKeyFingerprint(sha1: SecretChatKeySha1Fingerprint(digest: sha1Digest(key)), sha256: SecretChatKeySha256Fingerprint(digest: sha256Digest(key))))
|
||||
|
||||
|
@ -63,6 +63,21 @@ func fetchResource(account: Account, resource: MediaResource, intervals: Signal<
|
||||
} else if let httpReference = resource as? HttpReferenceMediaResource {
|
||||
return .single(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: false))
|
||||
|> then(fetchHttpResource(url: httpReference.url))
|
||||
} else if let wallpaperResource = resource as? WallpaperDataResource {
|
||||
return getWallpaper(network: account.network, slug: wallpaperResource.slug)
|
||||
|> mapError { _ -> MediaResourceDataFetchError in
|
||||
return .generic
|
||||
}
|
||||
|> mapToSignal { wallpaper -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> in
|
||||
guard case let .file(file) = wallpaper else {
|
||||
return .fail(.generic)
|
||||
}
|
||||
guard let cloudResource = file.file.resource as? TelegramMultipartFetchableResource else {
|
||||
return .fail(.generic)
|
||||
}
|
||||
return .single(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: false))
|
||||
|> then(fetchCloudMediaLocation(account: account, resource: cloudResource, datacenterId: cloudResource.datacenterId, size: resource.size == 0 ? nil : resource.size, intervals: intervals, parameters: MediaResourceFetchParameters(tag: nil, info: TelegramCloudMediaResourceFetchInfo(reference: .standalone(resource: file.file.resource), preferBackgroundReferenceRevalidation: false, continueInBackground: false), isRandomAccessAllowed: true)))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -245,7 +245,7 @@ private func initialHandshakeAccept(postbox: Postbox, network: Network, peerId:
|
||||
if let state = transaction.getPeerChatState(peerId) as? SecretChatState {
|
||||
var updatedState = state
|
||||
updatedState = updatedState.withUpdatedKeychain(SecretChatKeychain(keys: [SecretChatKey(fingerprint: keyFingerprint, key: MemoryBuffer(data: key), validity: .indefinite, useCount: 0)]))
|
||||
updatedState = updatedState.withUpdatedEmbeddedState(.sequenceBasedLayer(SecretChatSequenceBasedLayerState(layerNegotiationState: SecretChatLayerNegotiationState(activeLayer: .layer46, locallyRequestedLayer: nil, remotelyRequestedLayer: nil), rekeyState: nil, baseIncomingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: peerId, tag: OperationLogTags.SecretIncomingDecrypted), baseOutgoingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: peerId, tag: OperationLogTags.SecretOutgoing), topProcessedCanonicalIncomingOperationIndex: nil)))
|
||||
updatedState = updatedState.withUpdatedEmbeddedState(.sequenceBasedLayer(SecretChatSequenceBasedLayerState(layerNegotiationState: SecretChatLayerNegotiationState(activeLayer: .layer73, locallyRequestedLayer: nil, remotelyRequestedLayer: nil), rekeyState: nil, baseIncomingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: peerId, tag: OperationLogTags.SecretIncomingDecrypted), baseOutgoingOperationIndex: transaction.operationLogGetNextEntryLocalIndex(peerId: peerId, tag: OperationLogTags.SecretOutgoing), topProcessedCanonicalIncomingOperationIndex: nil)))
|
||||
updatedState = updatedState.withUpdatedKeyFingerprint(SecretChatKeyFingerprint(sha1: SecretChatKeySha1Fingerprint(digest: sha1Digest(key)), sha256: SecretChatKeySha256Fingerprint(digest: sha256Digest(key))))
|
||||
|
||||
var layer: SecretChatLayer?
|
||||
|