Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios

This commit is contained in:
Ilya Laktyushin 2020-11-24 18:26:43 +04:00
commit 31a37eed51
59 changed files with 6116 additions and 5633 deletions

View File

@ -23,8 +23,9 @@ internal:
environment:
name: internal
artifacts:
when: on_failure
paths:
- build/artifacts/Telegram.DSYMs.zip
- build/artifacts
expire_in: 1 week
experimental:

View File

@ -5923,13 +5923,15 @@ Sorry for the inconvenience.";
"VoiceChat.Share" = "Share Invite Link";
"VoiceChat.EndVoiceChat" = "End Voice Chat";
"VoiceChat.CopyInviteLink" = "Copy Invite Link";
"VoiceChat.UnmutePeer" = "Allow to Speak";
"VoiceChat.MutePeer" = "Mute";
"VoiceChat.RemovePeer" = "Remove";
"VoiceChat.RemovePeerConfirmation" = "Are you sure you want to remove %@ from the group chat?";
"VoiceChat.RemovePeerRemove" = "Remove";
"VoiceChat.UserInvited" = "You invited %@ to the voice chat";
"VoiceChat.UserInvited" = "You invited **%@** to the voice chat";
"Notification.VoiceChatInvitation" = "%1$@ invited %2$@ to the voice chat";
"Notification.VoiceChatInvitationByYou" = "You invited %1$@ to the voice chat";

View File

@ -198,6 +198,8 @@ public protocol PresentationGroupCall: class {
var canBeRemoved: Signal<Bool, NoError> { get }
var state: Signal<PresentationGroupCallState, NoError> { get }
var members: Signal<[PeerId: PresentationGroupCallMemberState], NoError> { get }
var audioLevels: Signal<[(PeerId, Float)], NoError> { get }
var myAudioLevel: Signal<Float, NoError> { get }
func leave() -> Signal<Bool, NoError>

View File

@ -0,0 +1,17 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "AudioBlob",
module_name = "AudioBlob",
srcs = glob([
"Sources/**/*.swift",
]),
deps = [
"//submodules/AsyncDisplayKit:AsyncDisplayKit",
"//submodules/Display:Display",
"//submodules/LegacyComponents:LegacyComponents",
],
visibility = [
"//visibility:public",
],
)

View File

@ -3,8 +3,7 @@ import UIKit
import Display
import LegacyComponents
final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration {
public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration {
private let smallBlob: BlobView
private let mediumBlob: BlobView
private let bigBlob: BlobView
@ -18,9 +17,9 @@ final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration
private(set) var isAnimating = false
typealias BlobRange = (min: CGFloat, max: CGFloat)
public typealias BlobRange = (min: CGFloat, max: CGFloat)
init(
public init(
frame: CGRect,
maxLevel: CGFloat,
smallBlobRange: BlobRange,
@ -84,13 +83,13 @@ final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration
fatalError("init(coder:) has not been implemented")
}
func setColor(_ color: UIColor) {
public func setColor(_ color: UIColor) {
smallBlob.setColor(color)
mediumBlob.setColor(color.withAlphaComponent(0.3))
bigBlob.setColor(color.withAlphaComponent(0.15))
}
func updateLevel(_ level: CGFloat) {
public func updateLevel(_ level: CGFloat) {
let normalizedLevel = min(1, max(level / maxLevel, 0))
smallBlob.updateSpeedLevel(to: normalizedLevel)
@ -100,7 +99,7 @@ final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration
audioLevel = normalizedLevel
}
func startAnimating() {
public func startAnimating() {
guard !isAnimating else { return }
isAnimating = true
@ -112,7 +111,7 @@ final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration
displayLinkAnimator?.isPaused = false
}
func stopAnimating() {
public func stopAnimating() {
guard isAnimating else { return }
isAnimating = false
@ -138,7 +137,7 @@ final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDecoration
}
}
override func layoutSubviews() {
override public func layoutSubviews() {
super.layoutSubviews()
smallBlob.frame = bounds

View File

@ -160,47 +160,6 @@ func contactContextMenuItems(context: AccountContext, peerId: PeerId, contactsCo
f(.default)
})))
}
if canVideoCall {
items.append(.action(ContextMenuActionItem(text: strings.ContactList_Context_VideoCall, icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Call"), color: theme.contextMenu.primaryColor) }, action: { _, f in
if let contactsController = contactsController {
let callResult = context.sharedContext.callManager?.requestCall(context: context, peerId: peerId, isVideo: true, endCurrentIfAny: false)
if let callResult = callResult, case let .alreadyInProgress(currentPeerId) = callResult {
if currentPeerId == peerId {
context.sharedContext.navigateToCurrentCall()
} else {
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let _ = (context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peerId), currentPeerId.flatMap(transaction.getPeer))
} |> deliverOnMainQueue).start(next: { [weak contactsController] peer, current in
if let contactsController = contactsController, let peer = peer {
if let current = current {
contactsController.present(textAlertController(context: context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_CallInProgressMessage(current.compactDisplayTitle, peer.compactDisplayTitle).0, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
let _ = context.sharedContext.callManager?.requestCall(context: context, peerId: peerId, isVideo: true, endCurrentIfAny: true)
})]), in: .window(.root))
} else {
contactsController.present(textAlertController(context: context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_ExternalCallInProgressMessage, actions: [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
})]), in: .window(.root))
}
}
})
/*let _ = (context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peerId), transaction.getPeer(currentPeerId))
}
|> deliverOnMainQueue).start(next: { [weak contactsController] peer, current in
if let contactsController = contactsController, let peer = peer, let current = current {
contactsController.present(textAlertController(context: context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_CallInProgressMessage(current.compactDisplayTitle, peer.compactDisplayTitle).0, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
let _ = context.sharedContext.callManager?.requestCall(context: context, peerId: peerId, isVideo: true, endCurrentIfAny: true)
})]), in: .window(.root))
}
})*/
}
}
}
f(.default)
})))
}
return items
}
}

View File

@ -12,10 +12,12 @@ public enum DeviceMetrics: CaseIterable, Equatable {
case iPhone6Plus
case iPhoneX
case iPhoneXSMax
case iPhoneXr
case iPhone12Mini
case iPhone12
case iPhone12ProMax
case iPad
case iPad102Inch
case iPadPro10Inch
case iPadPro11Inch
case iPadPro
@ -30,10 +32,12 @@ public enum DeviceMetrics: CaseIterable, Equatable {
.iPhone6Plus,
.iPhoneX,
.iPhoneXSMax,
.iPhoneXr,
.iPhone12Mini,
.iPhone12,
.iPhone12ProMax,
.iPad,
.iPad102Inch,
.iPadPro10Inch,
.iPadPro11Inch,
.iPadPro,
@ -41,7 +45,7 @@ public enum DeviceMetrics: CaseIterable, Equatable {
]
}
public init(screenSize: CGSize, statusBarHeight: CGFloat, onScreenNavigationHeight: CGFloat?) {
public init(screenSize: CGSize, scale: CGFloat, statusBarHeight: CGFloat, onScreenNavigationHeight: CGFloat?) {
var screenSize = screenSize
if screenSize.width > screenSize.height {
screenSize = CGSize(width: screenSize.height, height: screenSize.width)
@ -63,7 +67,11 @@ public enum DeviceMetrics: CaseIterable, Equatable {
let width = device.screenSize.width
let height = device.screenSize.height
if ((screenSize.width.isEqual(to: width) && screenSize.height.isEqual(to: height)) || (additionalSize.width.isEqual(to: width) && additionalSize.height.isEqual(to: height))) {
self = device
if case .iPhoneXSMax = device, scale == 2.0 {
self = .iPhoneXr
} else {
self = device
}
return
}
}
@ -72,7 +80,7 @@ public enum DeviceMetrics: CaseIterable, Equatable {
public var type: DeviceType {
switch self {
case .iPad, .iPadPro10Inch, .iPadPro11Inch, .iPadPro, .iPadPro3rdGen:
case .iPad, .iPad102Inch, .iPadPro10Inch, .iPadPro11Inch, .iPadPro, .iPadPro3rdGen:
return .tablet
case let .unknown(screenSize, _, _) where screenSize.width >= 768.0 && screenSize.height >= 1024.0:
return .tablet
@ -93,7 +101,7 @@ public enum DeviceMetrics: CaseIterable, Equatable {
return CGSize(width: 414.0, height: 736.0)
case .iPhoneX:
return CGSize(width: 375.0, height: 812.0)
case .iPhoneXSMax:
case .iPhoneXSMax, .iPhoneXr:
return CGSize(width: 414.0, height: 896.0)
case .iPhone12Mini:
return CGSize(width: 360.0, height: 780.0)
@ -103,6 +111,8 @@ public enum DeviceMetrics: CaseIterable, Equatable {
return CGSize(width: 428.0, height: 926.0)
case .iPad:
return CGSize(width: 768.0, height: 1024.0)
case .iPad102Inch:
return CGSize(width: 810.0, height: 1080.0)
case .iPadPro10Inch:
return CGSize(width: 834.0, height: 1112.0)
case .iPadPro11Inch:
@ -114,9 +124,32 @@ public enum DeviceMetrics: CaseIterable, Equatable {
}
}
public var screenCornerRadius: CGFloat {
switch self {
case .iPhoneX, .iPhoneXSMax:
return 39.0
case .iPhoneXr:
return 41.0 + UIScreenPixel
case .iPhone12Mini:
return 44.0
case .iPhone12:
return 47.0 + UIScreenPixel
case .iPhone12ProMax:
return 53.0 + UIScreenPixel
case let .unknown(_, _, onScreenNavigationHeight):
if let _ = onScreenNavigationHeight {
return 39.0
} else {
return 0.0
}
default:
return 0.0
}
}
func safeInsets(inLandscape: Bool) -> UIEdgeInsets {
switch self {
case .iPhoneX, .iPhoneXSMax, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
case .iPhoneX, .iPhoneXSMax, .iPhoneXr, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
return inLandscape ? UIEdgeInsets(top: 0.0, left: 44.0, bottom: 0.0, right: 44.0) : UIEdgeInsets(top: 44.0, left: 0.0, bottom: 0.0, right: 0.0)
default:
return UIEdgeInsets.zero
@ -125,7 +158,7 @@ public enum DeviceMetrics: CaseIterable, Equatable {
func onScreenNavigationHeight(inLandscape: Bool, systemOnScreenNavigationHeight: CGFloat?) -> CGFloat? {
switch self {
case .iPhoneX, .iPhoneXSMax, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
case .iPhoneX, .iPhoneXSMax, .iPhoneXr, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
return inLandscape ? 21.0 : 34.0
case .iPadPro3rdGen, .iPadPro11Inch:
return 21.0
@ -144,10 +177,9 @@ public enum DeviceMetrics: CaseIterable, Equatable {
func statusBarHeight(for size: CGSize) -> CGFloat? {
let value = self.statusBarHeight
switch self {
case .iPad, .iPadPro10Inch, .iPadPro11Inch, .iPadPro, .iPadPro3rdGen:
if self.type == .tablet {
return value
default:
} else {
if size.width < size.height {
return value
} else {
@ -158,7 +190,7 @@ public enum DeviceMetrics: CaseIterable, Equatable {
var statusBarHeight: CGFloat {
switch self {
case .iPhoneX, .iPhoneXSMax, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
case .iPhoneX, .iPhoneXSMax, .iPhoneXr, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
return 44.0
case .iPadPro11Inch, .iPadPro3rdGen:
return 24.0
@ -176,9 +208,9 @@ public enum DeviceMetrics: CaseIterable, Equatable {
return 162.0
case .iPhone6, .iPhone6Plus:
return 163.0
case .iPhoneX, .iPhoneXSMax, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
case .iPhoneX, .iPhoneXSMax, .iPhoneXr, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
return 172.0
case .iPad, .iPadPro10Inch:
case .iPad, .iPad102Inch, .iPadPro10Inch:
return 348.0
case .iPadPro11Inch:
return 368.0
@ -197,9 +229,9 @@ public enum DeviceMetrics: CaseIterable, Equatable {
return 226.0
case .iPhoneX, .iPhone12Mini, .iPhone12:
return 291.0
case .iPhoneXSMax, .iPhone12ProMax:
case .iPhoneXSMax, .iPhoneXr, .iPhone12ProMax:
return 302.0
case .iPad, .iPadPro10Inch:
case .iPad, .iPad102Inch, .iPadPro10Inch:
return 263.0
case .iPadPro11Inch:
return 283.0
@ -216,9 +248,9 @@ public enum DeviceMetrics: CaseIterable, Equatable {
func predictiveInputHeight(inLandscape: Bool) -> CGFloat {
if inLandscape {
switch self {
case .iPhone4, .iPhone5, .iPhone6, .iPhone6Plus, .iPhoneX, .iPhoneXSMax, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
case .iPhone4, .iPhone5, .iPhone6, .iPhone6Plus, .iPhoneX, .iPhoneXSMax, .iPhoneXr, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
return 37.0
case .iPad, .iPadPro10Inch, .iPadPro11Inch, .iPadPro, .iPadPro3rdGen:
case .iPad, .iPad102Inch, .iPadPro10Inch, .iPadPro11Inch, .iPadPro, .iPadPro3rdGen:
return 50.0
case .unknown:
return 37.0
@ -227,11 +259,11 @@ public enum DeviceMetrics: CaseIterable, Equatable {
switch self {
case .iPhone4, .iPhone5:
return 37.0
case .iPhone6, .iPhoneX, .iPhoneXSMax, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
case .iPhone6, .iPhoneX, .iPhoneXSMax, .iPhoneXr, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
return 44.0
case .iPhone6Plus:
return 45.0
case .iPad, .iPadPro10Inch, .iPadPro11Inch, .iPadPro, .iPadPro3rdGen:
case .iPad, .iPad102Inch, .iPadPro10Inch, .iPadPro11Inch, .iPadPro, .iPadPro3rdGen:
return 50.0
case .unknown:
return 44.0
@ -241,7 +273,7 @@ public enum DeviceMetrics: CaseIterable, Equatable {
public var hasTopNotch: Bool {
switch self {
case .iPhoneX, .iPhoneXSMax, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
case .iPhoneX, .iPhoneXSMax, .iPhoneXr, .iPhone12Mini, .iPhone12, .iPhone12ProMax:
return true
default:
return false

View File

@ -291,7 +291,7 @@ public class Window1 {
self.systemUserInterfaceStyle = hostView.systemUserInterfaceStyle
let boundsSize = self.hostView.eventView.bounds.size
self.deviceMetrics = DeviceMetrics(screenSize: UIScreen.main.bounds.size, statusBarHeight: statusBarHost?.statusBarFrame.height ?? defaultStatusBarHeight, onScreenNavigationHeight: self.hostView.onScreenNavigationHeight)
self.deviceMetrics = DeviceMetrics(screenSize: UIScreen.main.bounds.size, scale: UIScreen.main.scale, statusBarHeight: statusBarHost?.statusBarFrame.height ?? defaultStatusBarHeight, onScreenNavigationHeight: self.hostView.onScreenNavigationHeight)
self.statusBarHost = statusBarHost
let statusBarHeight: CGFloat
@ -981,7 +981,7 @@ public class Window1 {
}
if self.deviceMetrics.type == .tablet, let onScreenNavigationHeight = self.hostView.onScreenNavigationHeight, onScreenNavigationHeight != self.deviceMetrics.onScreenNavigationHeight(inLandscape: false, systemOnScreenNavigationHeight: self.hostView.onScreenNavigationHeight) {
self.deviceMetrics = DeviceMetrics(screenSize: UIScreen.main.bounds.size, statusBarHeight: statusBarHeight ?? defaultStatusBarHeight, onScreenNavigationHeight: onScreenNavigationHeight)
self.deviceMetrics = DeviceMetrics(screenSize: UIScreen.main.bounds.size, scale: UIScreen.main.scale, statusBarHeight: statusBarHeight ?? defaultStatusBarHeight, onScreenNavigationHeight: onScreenNavigationHeight)
}
let statusBarWasHidden = self.statusBarHidden

View File

@ -469,7 +469,7 @@ public class ItemListPeerItemNode: ItemListRevealOptionsItemNode, ItemListItemNo
private var editableControlNode: ItemListEditableControlNode?
private var reorderControlNode: ItemListEditableReorderControlNode?
override public var canBeSelected: Bool {
if self.editableControlNode != nil || self.disabledOverlayNode != nil {
return false
@ -499,7 +499,7 @@ public class ItemListPeerItemNode: ItemListRevealOptionsItemNode, ItemListItemNo
self.containerNode = ContextControllerSourceNode()
self.avatarNode = AvatarNode(font: avatarFont)
self.avatarNode.isLayerBacked = !smartInvertColorsEnabled()
//self.avatarNode.isLayerBacked = !smartInvertColorsEnabled()
self.titleNode = TextNode()
self.titleNode.isUserInteractionEnabled = false
@ -1103,7 +1103,8 @@ public class ItemListPeerItemNode: ItemListRevealOptionsItemNode, ItemListItemNo
strongSelf.labelBadgeNode.frame = CGRect(origin: CGPoint(x: revealOffset + params.width - rightLabelInset - badgeWidth, y: labelFrame.minY - 1.0), size: CGSize(width: badgeWidth, height: badgeDiameter))
transition.updateFrame(node: strongSelf.avatarNode, frame: CGRect(origin: CGPoint(x: params.leftInset + revealOffset + editingOffset + 15.0, y: floorToScreenPixels((layout.contentSize.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize)))
let avatarFrame = CGRect(origin: CGPoint(x: params.leftInset + revealOffset + editingOffset + 15.0, y: floorToScreenPixels((layout.contentSize.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize))
transition.updateFrame(node: strongSelf.avatarNode, frame: avatarFrame)
if item.peer.id == item.context.account.peerId, case .threatSelfAsSaved = item.aliasHandling {
strongSelf.avatarNode.setPeer(context: item.context, theme: item.presentationData.theme, peer: item.peer, overrideImage: .savedMessagesIcon, emptyColor: item.presentationData.theme.list.mediaPlaceholderColor, synchronousLoad: synchronousLoad)

View File

@ -14,6 +14,4 @@
- (TGMediaAsset *)assetAtIndex:(NSUInteger)index;
- (NSUInteger)indexOfAsset:(TGMediaAsset *)asset;
- (NSSet *)itemsIdentifiers;
@end

View File

@ -64,20 +64,4 @@
return index;
}
- (NSSet *)itemsIdentifiers
{
NSMutableSet *itemsIds = [[NSMutableSet alloc] init];
if (_concreteFetchResult != nil)
{
for (PHAsset *asset in _concreteFetchResult)
[itemsIds addObject:asset.localIdentifier];
}
else if (_assets.count > 0)
{
for (TGMediaAsset *asset in _assets)
[itemsIds addObject:asset.uniqueIdentifier];
}
return itemsIds;
}
@end

View File

@ -1251,8 +1251,8 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
- (void)createNewTextLabel
{
TGPaintSwatch *currentSwatch = _portraitSettingsView.swatch;
TGPaintSwatch *whiteSwatch = [TGPaintSwatch swatchWithColor:[UIColor whiteColor] colorLocation:1.0f brushWeight:currentSwatch.brushWeight];
TGPaintSwatch *blackSwatch = [TGPaintSwatch swatchWithColor:[UIColor blackColor] colorLocation:0.85f brushWeight:currentSwatch.brushWeight];
TGPaintSwatch *whiteSwatch = [TGPaintSwatch swatchWithColor:UIColorRGB(0xffffff) colorLocation:1.0f brushWeight:currentSwatch.brushWeight];
TGPaintSwatch *blackSwatch = [TGPaintSwatch swatchWithColor:UIColorRGB(0x000000) colorLocation:0.85f brushWeight:currentSwatch.brushWeight];
[self setCurrentSwatch:_selectedTextStyle == TGPhotoPaintTextEntityStyleOutlined ? blackSwatch : whiteSwatch sender:nil];
CGFloat maxWidth = [self fittedContentSize].width - 26.0f;
@ -1608,16 +1608,16 @@ const CGFloat TGPhotoPaintStickerKeyboardSize = 260.0f;
strongSelf->_selectedTextStyle = style;
if (style == TGPhotoPaintTextEntityStyleOutlined && [strongSelf->_portraitSettingsView.swatch.color isEqual:[UIColor whiteColor]])
if (style == TGPhotoPaintTextEntityStyleOutlined && [strongSelf->_portraitSettingsView.swatch.color isEqual:UIColorRGB(0xffffff)])
{
TGPaintSwatch *currentSwatch = strongSelf->_portraitSettingsView.swatch;
TGPaintSwatch *blackSwatch = [TGPaintSwatch swatchWithColor:[UIColor blackColor] colorLocation:0.85f brushWeight:currentSwatch.brushWeight];
TGPaintSwatch *blackSwatch = [TGPaintSwatch swatchWithColor:UIColorRGB(0x000000) colorLocation:0.85f brushWeight:currentSwatch.brushWeight];
[strongSelf setCurrentSwatch:blackSwatch sender:nil];
}
else if (style != TGPhotoPaintTextEntityStyleOutlined && [strongSelf->_portraitSettingsView.swatch.color isEqual:UIColorRGB(0x000000)])
{
TGPaintSwatch *currentSwatch = strongSelf->_portraitSettingsView.swatch;
TGPaintSwatch *whiteSwatch = [TGPaintSwatch swatchWithColor:[UIColor whiteColor] colorLocation:1.0f brushWeight:currentSwatch.brushWeight];
TGPaintSwatch *whiteSwatch = [TGPaintSwatch swatchWithColor:UIColorRGB(0xffffff) colorLocation:1.0f brushWeight:currentSwatch.brushWeight];
[strongSelf setCurrentSwatch:whiteSwatch sender:nil];
}

View File

@ -179,10 +179,10 @@ private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: Unsa
}
}
if let fetchedData = fetchedData {
precondition(fetchedData.count <= readCount)
assert(fetchedData.count <= readCount)
fetchedData.withUnsafeBytes { bytes -> Void in
precondition(bytes.baseAddress != nil)
memcpy(buffer, bytes.baseAddress, fetchedData.count)
memcpy(buffer, bytes.baseAddress, min(fetchedData.count, readCount))
}
fetchedCount = Int32(fetchedData.count)
context.readingOffset += Int(fetchedCount)

View File

@ -78,7 +78,7 @@ struct PasscodeKeyboardLayout {
self.topOffset = 294.0
self.biometricsOffset = 30.0
self.deleteOffset = 20.0
case .iPhoneXSMax, .iPhone12ProMax:
case .iPhoneXSMax, .iPhoneXr, .iPhone12ProMax:
self.buttonSize = 85.0
self.horizontalSecond = 115.0
self.horizontalThird = 230.0
@ -89,7 +89,7 @@ struct PasscodeKeyboardLayout {
self.topOffset = 329.0
self.biometricsOffset = 30.0
self.deleteOffset = 20.0
case .iPad, .iPadPro10Inch, .iPadPro11Inch, .iPadPro, .iPadPro3rdGen:
case .iPad, .iPad102Inch, .iPadPro10Inch, .iPadPro11Inch, .iPadPro, .iPadPro3rdGen:
self.buttonSize = 81.0
self.horizontalSecond = 106.0
self.horizontalThird = 212.0
@ -155,11 +155,11 @@ public struct PasscodeLayout {
self.titleOffset = 162.0
self.subtitleOffset = 0.0
self.inputFieldOffset = 206.0
case .iPhoneXSMax, .iPhone12ProMax:
case .iPhoneXSMax, .iPhoneXr, .iPhone12ProMax:
self.titleOffset = 180.0
self.subtitleOffset = 0.0
self.inputFieldOffset = 226.0
case .iPad, .iPadPro10Inch, .iPadPro11Inch, .iPadPro, .iPadPro3rdGen:
case .iPad, .iPad102Inch, .iPadPro10Inch, .iPadPro11Inch, .iPadPro, .iPadPro3rdGen:
self.titleOffset = self.keyboard.topOffset - 120.0
self.subtitleOffset = -2.0
self.inputFieldOffset = self.keyboard.topOffset - 76.0

View File

@ -813,10 +813,10 @@ public final class SemanticStatusNode: ASControlNode {
var animate = false
let timestamp = CACurrentMediaTime()
if let transtionContext = self.transitionContext {
if transtionContext.startTime + transtionContext.duration < timestamp {
if let transitionContext = self.transitionContext {
if transitionContext.startTime + transitionContext.duration < timestamp {
self.transitionContext = nil
transtionContext.completion()
transitionContext.completion()
} else {
animate = true
}

View File

@ -358,7 +358,6 @@ private enum ThemeSettingsControllerEntry: ItemListNodeEntry {
}
if name == .night {
colors = colors.filter { $0 != .gray }
defaultColor = PresentationThemeAccentColor(baseColor: .white)
} else {
colors = colors.filter { $0 != .white }
}

View File

@ -303,11 +303,11 @@ public final class ShareController: ViewController {
public var dismissed: ((Bool) -> Void)?
public convenience init(context: AccountContext, subject: ShareControllerSubject, presetText: String? = nil, preferredAction: ShareControllerPreferredAction = .default, showInChat: ((Message) -> Void)? = nil, openStats: (() -> Void)? = nil, shares: Int? = nil, externalShare: Bool = true, immediateExternalShare: Bool = false, switchableAccounts: [AccountWithInfo] = [], immediatePeerId: PeerId? = nil, forcedTheme: PresentationTheme? = nil) {
self.init(sharedContext: context.sharedContext, currentContext: context, subject: subject, presetText: presetText, preferredAction: preferredAction, showInChat: showInChat, openStats: openStats, shares: shares, externalShare: externalShare, immediateExternalShare: immediateExternalShare, switchableAccounts: switchableAccounts, immediatePeerId: immediatePeerId, forcedTheme: forcedTheme)
public convenience init(context: AccountContext, subject: ShareControllerSubject, presetText: String? = nil, preferredAction: ShareControllerPreferredAction = .default, showInChat: ((Message) -> Void)? = nil, openStats: (() -> Void)? = nil, shares: Int? = nil, externalShare: Bool = true, immediateExternalShare: Bool = false, switchableAccounts: [AccountWithInfo] = [], immediatePeerId: PeerId? = nil, forcedTheme: PresentationTheme? = nil, forcedActionTitle: String? = nil) {
self.init(sharedContext: context.sharedContext, currentContext: context, subject: subject, presetText: presetText, preferredAction: preferredAction, showInChat: showInChat, openStats: openStats, shares: shares, externalShare: externalShare, immediateExternalShare: immediateExternalShare, switchableAccounts: switchableAccounts, immediatePeerId: immediatePeerId, forcedTheme: forcedTheme, forcedActionTitle: forcedActionTitle)
}
public init(sharedContext: SharedAccountContext, currentContext: AccountContext, subject: ShareControllerSubject, presetText: String? = nil, preferredAction: ShareControllerPreferredAction = .default, showInChat: ((Message) -> Void)? = nil, openStats: (() -> Void)? = nil, shares: Int? = nil, externalShare: Bool = true, immediateExternalShare: Bool = false, switchableAccounts: [AccountWithInfo] = [], immediatePeerId: PeerId? = nil, forcedTheme: PresentationTheme? = nil) {
public init(sharedContext: SharedAccountContext, currentContext: AccountContext, subject: ShareControllerSubject, presetText: String? = nil, preferredAction: ShareControllerPreferredAction = .default, showInChat: ((Message) -> Void)? = nil, openStats: (() -> Void)? = nil, shares: Int? = nil, externalShare: Bool = true, immediateExternalShare: Bool = false, switchableAccounts: [AccountWithInfo] = [], immediatePeerId: PeerId? = nil, forcedTheme: PresentationTheme? = nil, forcedActionTitle: String? = nil) {
self.sharedContext = sharedContext
self.currentContext = currentContext
self.currentAccount = currentContext.account
@ -332,7 +332,7 @@ public final class ShareController: ViewController {
switch subject {
case let .url(text):
self.defaultAction = ShareControllerAction(title: self.presentationData.strings.ShareMenu_CopyShareLink, action: { [weak self] in
self.defaultAction = ShareControllerAction(title: forcedActionTitle ?? self.presentationData.strings.ShareMenu_CopyShareLink, action: { [weak self] in
UIPasteboard.general.string = text
self?.controllerNode.cancel?()
})

View File

@ -47,6 +47,7 @@ public enum TelegramMediaActionType: PostboxCoding, Equatable {
case phoneNumberRequest
case geoProximityReached(from: PeerId, to: PeerId, distance: Int32)
case groupPhoneCall(callId: Int64, accessHash: Int64, duration: Int32?)
case inviteToGroupPhoneCall(callId: Int64, accessHash: Int64, peerId: PeerId)
public init(decoder: PostboxDecoder) {
let rawValue: Int32 = decoder.decodeInt32ForKey("_rawValue", orElse: 0)
@ -101,6 +102,8 @@ public enum TelegramMediaActionType: PostboxCoding, Equatable {
self = .geoProximityReached(from: PeerId(decoder.decodeInt64ForKey("fromId", orElse: 0)), to: PeerId(decoder.decodeInt64ForKey("toId", orElse: 0)), distance: (decoder.decodeInt32ForKey("dst", orElse: 0)))
case 22:
self = .groupPhoneCall(callId: decoder.decodeInt64ForKey("callId", orElse: 0), accessHash: decoder.decodeInt64ForKey("accessHash", orElse: 0), duration: decoder.decodeOptionalInt32ForKey("duration"))
case 23:
self = .inviteToGroupPhoneCall(callId: decoder.decodeInt64ForKey("callId", orElse: 0), accessHash: decoder.decodeInt64ForKey("accessHash", orElse: 0), peerId: PeerId(decoder.decodeInt64ForKey("peerId", orElse: 0)))
default:
self = .unknown
}
@ -200,6 +203,11 @@ public enum TelegramMediaActionType: PostboxCoding, Equatable {
} else {
encoder.encodeNil(forKey: "duration")
}
case let .inviteToGroupPhoneCall(callId, accessHash, peerId):
encoder.encodeInt32(23, forKey: "_rawValue")
encoder.encodeInt64(callId, forKey: "callId")
encoder.encodeInt64(accessHash, forKey: "accessHash")
encoder.encodeInt64(peerId.toInt64(), forKey: "peerIdId")
}
}
@ -217,6 +225,8 @@ public enum TelegramMediaActionType: PostboxCoding, Equatable {
return [channelId]
case let .geoProximityReached(from, to, _):
return [from, to]
case let .inviteToGroupPhoneCall(_, _, peerId):
return [peerId]
default:
return []
}

View File

@ -7,7 +7,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-1255641564] = { return parseString($0) }
dict[-1240849242] = { return Api.messages.StickerSet.parse_stickerSet($0) }
dict[1829443076] = { return Api.GroupCall.parse_groupCallPrivate($0) }
dict[1441699306] = { return Api.GroupCall.parse_groupCall($0) }
dict[2083222527] = { return Api.GroupCall.parse_groupCall($0) }
dict[2004925620] = { return Api.GroupCall.parse_groupCallDiscarded($0) }
dict[-457104426] = { return Api.InputGeoPoint.parse_inputGeoPointEmpty($0) }
dict[1210199983] = { return Api.InputGeoPoint.parse_inputGeoPoint($0) }
@ -503,7 +503,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-1495959709] = { return Api.MessageReplyHeader.parse_messageReplyHeader($0) }
dict[411017418] = { return Api.SecureValue.parse_secureValue($0) }
dict[-316748368] = { return Api.SecureValueHash.parse_secureValueHash($0) }
dict[1118525718] = { return Api.phone.GroupCall.parse_groupCall($0) }
dict[1447862232] = { return Api.phone.GroupCall.parse_groupCall($0) }
dict[-398136321] = { return Api.messages.SearchCounter.parse_searchCounter($0) }
dict[-2128698738] = { return Api.auth.CheckedPhone.parse_checkedPhone($0) }
dict[-1188055347] = { return Api.PageListItem.parse_pageListItemText($0) }
@ -787,6 +787,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-202219658] = { return Api.MessageAction.parse_messageActionContactSignUp($0) }
dict[-1730095465] = { return Api.MessageAction.parse_messageActionGeoProximityReached($0) }
dict[2047704898] = { return Api.MessageAction.parse_messageActionGroupCall($0) }
dict[254144570] = { return Api.MessageAction.parse_messageActionInviteToGroupCall($0) }
dict[1399245077] = { return Api.PhoneCall.parse_phoneCallEmpty($0) }
dict[462375633] = { return Api.PhoneCall.parse_phoneCallWaiting($0) }
dict[-2014659757] = { return Api.PhoneCall.parse_phoneCallRequested($0) }

View File

@ -1911,7 +1911,7 @@ public struct messages {
public extension Api {
public enum GroupCall: TypeConstructorDescription {
case groupCallPrivate(flags: Int32, id: Int64, accessHash: Int64, channelId: Int32?, participantsCount: Int32, adminId: Int32)
case groupCall(flags: Int32, id: Int64, accessHash: Int64, channelId: Int32?, adminId: Int32, reflectorId: Int64, params: Api.DataJSON?, version: Int32)
case groupCall(flags: Int32, id: Int64, accessHash: Int64, adminId: Int32, reflectorId: Int64, params: Api.DataJSON?, version: Int32)
case groupCallDiscarded(id: Int64, accessHash: Int64, duration: Int32)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
@ -1927,17 +1927,16 @@ public extension Api {
serializeInt32(participantsCount, buffer: buffer, boxed: false)
serializeInt32(adminId, buffer: buffer, boxed: false)
break
case .groupCall(let flags, let id, let accessHash, let channelId, let adminId, let reflectorId, let params, let version):
case .groupCall(let flags, let id, let accessHash, let adminId, let reflectorId, let params, let version):
if boxed {
buffer.appendInt32(1441699306)
buffer.appendInt32(2083222527)
}
serializeInt32(flags, buffer: buffer, boxed: false)
serializeInt64(id, buffer: buffer, boxed: false)
serializeInt64(accessHash, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 0) != 0 {serializeInt32(channelId!, buffer: buffer, boxed: false)}
serializeInt32(adminId, buffer: buffer, boxed: false)
serializeInt64(reflectorId, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 1) != 0 {params!.serialize(buffer, true)}
if Int(flags) & Int(1 << 0) != 0 {params!.serialize(buffer, true)}
serializeInt32(version, buffer: buffer, boxed: false)
break
case .groupCallDiscarded(let id, let accessHash, let duration):
@ -1955,8 +1954,8 @@ public extension Api {
switch self {
case .groupCallPrivate(let flags, let id, let accessHash, let channelId, let participantsCount, let adminId):
return ("groupCallPrivate", [("flags", flags), ("id", id), ("accessHash", accessHash), ("channelId", channelId), ("participantsCount", participantsCount), ("adminId", adminId)])
case .groupCall(let flags, let id, let accessHash, let channelId, let adminId, let reflectorId, let params, let version):
return ("groupCall", [("flags", flags), ("id", id), ("accessHash", accessHash), ("channelId", channelId), ("adminId", adminId), ("reflectorId", reflectorId), ("params", params), ("version", version)])
case .groupCall(let flags, let id, let accessHash, let adminId, let reflectorId, let params, let version):
return ("groupCall", [("flags", flags), ("id", id), ("accessHash", accessHash), ("adminId", adminId), ("reflectorId", reflectorId), ("params", params), ("version", version)])
case .groupCallDiscarded(let id, let accessHash, let duration):
return ("groupCallDiscarded", [("id", id), ("accessHash", accessHash), ("duration", duration)])
}
@ -1996,27 +1995,24 @@ public extension Api {
var _3: Int64?
_3 = reader.readInt64()
var _4: Int32?
if Int(_1!) & Int(1 << 0) != 0 {_4 = reader.readInt32() }
var _5: Int32?
_5 = reader.readInt32()
var _6: Int64?
_6 = reader.readInt64()
var _7: Api.DataJSON?
if Int(_1!) & Int(1 << 1) != 0 {if let signature = reader.readInt32() {
_7 = Api.parse(reader, signature: signature) as? Api.DataJSON
_4 = reader.readInt32()
var _5: Int64?
_5 = reader.readInt64()
var _6: Api.DataJSON?
if Int(_1!) & Int(1 << 0) != 0 {if let signature = reader.readInt32() {
_6 = Api.parse(reader, signature: signature) as? Api.DataJSON
} }
var _8: Int32?
_8 = reader.readInt32()
var _7: Int32?
_7 = reader.readInt32()
let _c1 = _1 != nil
let _c2 = _2 != nil
let _c3 = _3 != nil
let _c4 = (Int(_1!) & Int(1 << 0) == 0) || _4 != nil
let _c4 = _4 != nil
let _c5 = _5 != nil
let _c6 = _6 != nil
let _c7 = (Int(_1!) & Int(1 << 1) == 0) || _7 != nil
let _c8 = _8 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 && _c8 {
return Api.GroupCall.groupCall(flags: _1!, id: _2!, accessHash: _3!, channelId: _4, adminId: _5!, reflectorId: _6!, params: _7, version: _8!)
let _c6 = (Int(_1!) & Int(1 << 0) == 0) || _6 != nil
let _c7 = _7 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 {
return Api.GroupCall.groupCall(flags: _1!, id: _2!, accessHash: _3!, adminId: _4!, reflectorId: _5!, params: _6, version: _7!)
}
else {
return nil
@ -21479,6 +21475,7 @@ public extension Api {
case messageActionContactSignUp
case messageActionGeoProximityReached(fromId: Api.Peer, toId: Api.Peer, distance: Int32)
case messageActionGroupCall(flags: Int32, call: Api.InputGroupCall, duration: Int32?)
case messageActionInviteToGroupCall(call: Api.InputGroupCall, userId: Int32)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
@ -21666,6 +21663,13 @@ public extension Api {
call.serialize(buffer, true)
if Int(flags) & Int(1 << 0) != 0 {serializeInt32(duration!, buffer: buffer, boxed: false)}
break
case .messageActionInviteToGroupCall(let call, let userId):
if boxed {
buffer.appendInt32(254144570)
}
call.serialize(buffer, true)
serializeInt32(userId, buffer: buffer, boxed: false)
break
}
}
@ -21721,6 +21725,8 @@ public extension Api {
return ("messageActionGeoProximityReached", [("fromId", fromId), ("toId", toId), ("distance", distance)])
case .messageActionGroupCall(let flags, let call, let duration):
return ("messageActionGroupCall", [("flags", flags), ("call", call), ("duration", duration)])
case .messageActionInviteToGroupCall(let call, let userId):
return ("messageActionInviteToGroupCall", [("call", call), ("userId", userId)])
}
}
@ -22029,6 +22035,22 @@ public extension Api {
return nil
}
}
public static func parse_messageActionInviteToGroupCall(_ reader: BufferReader) -> MessageAction? {
var _1: Api.InputGroupCall?
if let signature = reader.readInt32() {
_1 = Api.parse(reader, signature: signature) as? Api.InputGroupCall
}
var _2: Int32?
_2 = reader.readInt32()
let _c1 = _1 != nil
let _c2 = _2 != nil
if _c1 && _c2 {
return Api.MessageAction.messageActionInviteToGroupCall(call: _1!, userId: _2!)
}
else {
return nil
}
}
}
public enum PhoneCall: TypeConstructorDescription {

View File

@ -1649,13 +1649,13 @@ public struct photos {
public extension Api {
public struct phone {
public enum GroupCall: TypeConstructorDescription {
case groupCall(call: Api.GroupCall, sources: [Int32], participants: [Api.GroupCallParticipant], chats: [Api.Chat], users: [Api.User])
case groupCall(call: Api.GroupCall, sources: [Int32], participants: [Api.GroupCallParticipant], users: [Api.User])
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
case .groupCall(let call, let sources, let participants, let chats, let users):
case .groupCall(let call, let sources, let participants, let users):
if boxed {
buffer.appendInt32(1118525718)
buffer.appendInt32(1447862232)
}
call.serialize(buffer, true)
buffer.appendInt32(481674261)
@ -1669,11 +1669,6 @@ public struct phone {
item.serialize(buffer, true)
}
buffer.appendInt32(481674261)
buffer.appendInt32(Int32(chats.count))
for item in chats {
item.serialize(buffer, true)
}
buffer.appendInt32(481674261)
buffer.appendInt32(Int32(users.count))
for item in users {
item.serialize(buffer, true)
@ -1684,8 +1679,8 @@ public struct phone {
public func descriptionFields() -> (String, [(String, Any)]) {
switch self {
case .groupCall(let call, let sources, let participants, let chats, let users):
return ("groupCall", [("call", call), ("sources", sources), ("participants", participants), ("chats", chats), ("users", users)])
case .groupCall(let call, let sources, let participants, let users):
return ("groupCall", [("call", call), ("sources", sources), ("participants", participants), ("users", users)])
}
}
@ -1702,21 +1697,16 @@ public struct phone {
if let _ = reader.readInt32() {
_3 = Api.parseVector(reader, elementSignature: 0, elementType: Api.GroupCallParticipant.self)
}
var _4: [Api.Chat]?
var _4: [Api.User]?
if let _ = reader.readInt32() {
_4 = Api.parseVector(reader, elementSignature: 0, elementType: Api.Chat.self)
}
var _5: [Api.User]?
if let _ = reader.readInt32() {
_5 = Api.parseVector(reader, elementSignature: 0, elementType: Api.User.self)
_4 = Api.parseVector(reader, elementSignature: 0, elementType: Api.User.self)
}
let _c1 = _1 != nil
let _c2 = _2 != nil
let _c3 = _3 != nil
let _c4 = _4 != nil
let _c5 = _5 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 {
return Api.phone.GroupCall.groupCall(call: _1!, sources: _2!, participants: _3!, chats: _4!, users: _5!)
if _c1 && _c2 && _c3 && _c4 {
return Api.phone.GroupCall.groupCall(call: _1!, sources: _2!, participants: _3!, users: _4!)
}
else {
return nil
@ -7249,13 +7239,12 @@ public extension Api {
})
}
public static func createGroupCall(flags: Int32, channel: Api.InputChannel, randomId: Int32) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
public static func createGroupCall(channel: Api.InputChannel, randomId: Int32) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
let buffer = Buffer()
buffer.appendInt32(-1542553507)
serializeInt32(flags, buffer: buffer, boxed: false)
buffer.appendInt32(-467076606)
channel.serialize(buffer, true)
serializeInt32(randomId, buffer: buffer, boxed: false)
return (FunctionDescription(name: "phone.createGroupCall", parameters: [("flags", flags), ("channel", channel), ("randomId", randomId)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in
return (FunctionDescription(name: "phone.createGroupCall", parameters: [("channel", channel), ("randomId", randomId)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in
let reader = BufferReader(buffer)
var result: Api.Updates?
if let signature = reader.readInt32() {
@ -7310,6 +7299,21 @@ public extension Api {
})
}
public static func inviteToGroupCall(call: Api.InputGroupCall, userId: Api.InputUser) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
let buffer = Buffer()
buffer.appendInt32(-580284540)
call.serialize(buffer, true)
userId.serialize(buffer, true)
return (FunctionDescription(name: "phone.inviteToGroupCall", parameters: [("call", call), ("userId", userId)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in
let reader = BufferReader(buffer)
var result: Api.Updates?
if let signature = reader.readInt32() {
result = Api.parse(reader, signature: signature) as? Api.Updates
}
return result
})
}
public static func discardGroupCall(call: Api.InputGroupCall) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
let buffer = Buffer()
buffer.appendInt32(2054648117)

View File

@ -32,6 +32,8 @@ swift_library(
"//submodules/LegacyComponents:LegacyComponents",
"//submodules/DeleteChatPeerActionSheetItem:DeleteChatPeerActionSheetItem",
"//submodules/AnimationUI:AnimationUI",
"//submodules/UndoUI:UndoUI",
"//submodules/AudioBlob:AudioBlob",
],
visibility = [
"//visibility:public",

View File

@ -11,12 +11,10 @@ private let labelFont = Font.regular(13.0)
final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
struct Content: Equatable {
enum Appearance: Equatable {
enum Color {
enum Color: Equatable {
case red
case green
case redDimmed
case greenDimmed
case grayDimmed
case custom(UInt32)
}
case blurred(isFilled: Bool)
@ -198,12 +196,8 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
fillColor = UIColor(rgb: 0xd92326)
case .green:
fillColor = UIColor(rgb: 0x74db58)
case .redDimmed:
fillColor = UIColor(rgb: 0x4d120e)
case .greenDimmed:
fillColor = UIColor(rgb: 0x74db58).withMultipliedBrightnessBy(0.3)
case .grayDimmed:
fillColor = UIColor(rgb: 0x1C1C1E)
case let .custom(color):
fillColor = UIColor(rgb: color)
}
}
@ -296,12 +290,8 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
fillColor = UIColor(rgb: 0xd92326).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2)
case .green:
fillColor = UIColor(rgb: 0x74db58).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2)
case .redDimmed:
fillColor = UIColor(rgb: 0xd92326).withMultipliedBrightnessBy(0.4).withAlphaComponent(0.2)
case .greenDimmed:
fillColor = UIColor(rgb: 0x74db58).withMultipliedBrightnessBy(0.4).withAlphaComponent(0.2)
case .grayDimmed:
fillColor = UIColor(rgb: 0x1C1C1E).withAlphaComponent(0.2)
case let .custom(color):
fillColor = UIColor(rgb: color).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2)
}
}

View File

@ -2022,7 +2022,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
}
private final class CallPanGestureRecognizer: UIPanGestureRecognizer {
final class CallPanGestureRecognizer: UIPanGestureRecognizer {
private(set) var firstLocation: CGPoint?
public var shouldBegin: ((CGPoint) -> Bool)?

View File

@ -27,7 +27,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private enum InternalState {
case requesting
case active(GroupCallInfo)
case estabilished(GroupCallInfo, String, [UInt32], [UInt32: PeerId])
case estabilished(GroupCallInfo, String, UInt32, [UInt32], [UInt32: PeerId])
var callInfo: GroupCallInfo? {
switch self {
@ -35,7 +35,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
return nil
case let .active(info):
return info
case let .estabilished(info, _, _, _):
case let .estabilished(info, _, _, _, _):
return info
}
}
@ -75,6 +75,18 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
return self.audioOutputStatePromise.get()
}
private let audioLevelsPipe = ValuePipe<[(PeerId, Float)]>()
public var audioLevels: Signal<[(PeerId, Float)], NoError> {
return self.audioLevelsPipe.signal()
}
private var audioLevelsDisposable = MetaDisposable()
private let myAudioLevelPipe = ValuePipe<Float>()
public var myAudioLevel: Signal<Float, NoError> {
return self.myAudioLevelPipe.signal()
}
private var myAudioLevelDisposable = MetaDisposable()
private var audioSessionControl: ManagedAudioSessionControl?
private var audioSessionDisposable: Disposable?
private let audioSessionShouldBeActive = ValuePromise<Bool>(false, ignoreRepeated: true)
@ -120,6 +132,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private let memberStatesDisposable = MetaDisposable()
private let leaveDisposable = MetaDisposable()
private var checkCallDisposable: Disposable?
private var isCurrentlyConnecting: Bool?
init(
accountContext: AccountContext,
audioSession: ManagedAudioSession,
@ -217,14 +232,12 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
})
self.requestCall()
self.groupCallParticipantUpdatesDisposable = (self.account.stateManager.groupCallParticipantUpdates
|> deliverOnMainQueue).start(next: { [weak self] updates in
guard let strongSelf = self else {
return
}
if case let .estabilished(callInfo, _, _, _) = strongSelf.internalState {
if case let .estabilished(callInfo, _, _, _, _) = strongSelf.internalState {
var addedSsrc: [UInt32] = []
var removedSsrc: [UInt32] = []
for (callId, peerId, ssrc, isAdded) in updates {
@ -246,6 +259,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
}
})
self.requestCall()
}
deinit {
@ -259,6 +274,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.isMutedDisposable.dispose()
self.memberStatesDisposable.dispose()
self.networkStateDisposable.dispose()
self.checkCallDisposable?.dispose()
self.audioLevelsDisposable.dispose()
self.myAudioLevelDisposable.dispose()
}
private func updateSessionState(internalState: InternalState, audioSessionControl: ManagedAudioSessionControl?) {
@ -275,6 +293,15 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.audioSessionShouldBeActive.set(true)
switch previousInternalState {
case .requesting:
break
default:
if case .requesting = internalState {
self.isCurrentlyConnecting = nil
}
}
switch previousInternalState {
case .active:
break
@ -284,7 +311,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.callContext = callContext
self.requestDisposable.set((callContext.joinPayload
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] joinPayload in
|> deliverOnMainQueue).start(next: { [weak self] joinPayload, ssrc in
guard let strongSelf = self else {
return
}
@ -299,7 +326,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
return
}
if let clientParams = joinCallResult.callInfo.clientParams {
strongSelf.updateSessionState(internalState: .estabilished(joinCallResult.callInfo, clientParams, joinCallResult.ssrcs, joinCallResult.ssrcMapping), audioSessionControl: strongSelf.audioSessionControl)
strongSelf.updateSessionState(internalState: .estabilished(joinCallResult.callInfo, clientParams, ssrc, joinCallResult.ssrcs, joinCallResult.ssrcMapping), audioSessionControl: strongSelf.audioSessionControl)
}
}))
}))
@ -324,7 +351,21 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
case .connected:
mappedState = .connected
}
strongSelf.stateValue.networkState = mappedState
if strongSelf.stateValue.networkState != mappedState {
strongSelf.stateValue.networkState = mappedState
}
let isConnecting = mappedState == .connecting
if strongSelf.isCurrentlyConnecting != isConnecting {
strongSelf.isCurrentlyConnecting = isConnecting
if isConnecting {
strongSelf.startCheckingCallIfNeeded()
} else {
strongSelf.checkCallDisposable?.dispose()
strongSelf.checkCallDisposable = nil
}
}
}))
self.memberStatesDisposable.set((callContext.memberStates
@ -343,6 +384,30 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
strongSelf.membersValue = result
}))
self.audioLevelsDisposable.set((callContext.audioLevels
|> deliverOnMainQueue).start(next: { [weak self] levels in
guard let strongSelf = self else {
return
}
var result: [(PeerId, Float)] = []
for (ssrc, level) in levels {
if let peerId = strongSelf.ssrcMapping[ssrc] {
result.append((peerId, level))
}
}
if !result.isEmpty {
strongSelf.audioLevelsPipe.putNext(result)
}
}))
self.myAudioLevelDisposable.set((callContext.myAudioLevel
|> deliverOnMainQueue).start(next: { [weak self] level in
guard let strongSelf = self else {
return
}
strongSelf.myAudioLevelPipe.putNext(level)
}))
}
}
@ -350,13 +415,47 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
case .estabilished:
break
default:
if case let .estabilished(_, clientParams, ssrcs, ssrcMapping) = internalState {
if case let .estabilished(_, clientParams, _, ssrcs, ssrcMapping) = internalState {
self.ssrcMapping = ssrcMapping
self.callContext?.setJoinResponse(payload: clientParams, ssrcs: ssrcs)
if let isCurrentlyConnecting = self.isCurrentlyConnecting, isCurrentlyConnecting {
self.startCheckingCallIfNeeded()
}
}
}
}
private func startCheckingCallIfNeeded() {
if self.checkCallDisposable != nil {
return
}
if case let .estabilished(callInfo, _, ssrc, _, _) = self.internalState {
let checkSignal = checkGroupCall(account: self.account, callId: callInfo.id, accessHash: callInfo.accessHash, ssrc: Int32(bitPattern: ssrc))
self.checkCallDisposable = ((
checkSignal
|> castError(Bool.self)
|> delay(4.0, queue: .mainQueue())
|> mapToSignal { result -> Signal<Bool, Bool> in
if case .success = result {
return .fail(true)
} else {
return .single(true)
}
}
)
|> restartIfError
|> take(1)
|> deliverOnMainQueue).start(completed: { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.checkCallDisposable = nil
strongSelf.requestCall()
})
}
}
private func updateIsAudioSessionActive(_ value: Bool) {
if self.isAudioSessionActive != value {
self.isAudioSessionActive = value
@ -364,7 +463,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
public func leave() -> Signal<Bool, NoError> {
if case let .estabilished(callInfo, _, _, _) = self.internalState {
if case let .estabilished(callInfo, _, _, _, _) = self.internalState {
self.leaveDisposable.set((leaveGroupCall(account: self.account, callId: callInfo.id, accessHash: callInfo.accessHash)
|> deliverOnMainQueue).start(completed: { [weak self] in
self?._canBeRemoved.set(.single(true))
@ -402,7 +501,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
private func requestCall() {
self.callContext?.stop()
self.callContext = nil
self.internalState = .requesting
self.isCurrentlyConnecting = nil
enum CallError {
case generic

File diff suppressed because it is too large Load Diff

View File

@ -17,6 +17,7 @@ import AppBundle
import ContextUI
import ShareController
import DeleteChatPeerActionSheetItem
import UndoUI
private final class VoiceChatControllerTitleView: UIView {
private var theme: PresentationTheme
@ -90,14 +91,37 @@ public final class VoiceChatController: ViewController {
let isLoading: Bool
let isEmpty: Bool
let crossFade: Bool
let count: Int
}
private final class Interaction {
let invitePeer: (Peer) -> Void
let peerContextAction: (Peer, ASDisplayNode, ContextGesture?) -> Void
init(peerContextAction: @escaping (Peer, ASDisplayNode, ContextGesture?) -> Void) {
private var audioLevels: [PeerId: ValuePipe<Float>] = [:]
init(invitePeer: @escaping (Peer) -> Void, peerContextAction: @escaping (Peer, ASDisplayNode, ContextGesture?) -> Void) {
self.invitePeer = invitePeer
self.peerContextAction = peerContextAction
}
func getAudioLevel(_ peerId: PeerId) -> Signal<Float, NoError>? {
if let current = self.audioLevels[peerId] {
return current.signal()
} else {
let value = ValuePipe<Float>()
self.audioLevels[peerId] = value
return value.signal()
}
}
func updateAudioLevels(_ levels: [(PeerId, Float)]) {
for (peerId, level) in levels {
if let pipe = self.audioLevels[peerId] {
pipe.putNext(level)
}
}
}
}
private struct PeerEntry: Comparable, Identifiable {
@ -126,16 +150,22 @@ public final class VoiceChatController: ViewController {
let peer = self.participant.peer
let text: VoiceChatParticipantItem.ParticipantText
let icon: VoiceChatParticipantItem.Icon
switch self.state {
case .inactive:
text = .presence
icon = .invite
case .listening:
text = .text(presentationData.strings.VoiceChat_StatusListening, .accent)
icon = .microphone(true, UIColor(rgb: 0x979797))
case .speaking:
text = .text(presentationData.strings.VoiceChat_StatusSpeaking, .constructive)
icon = .microphone(false, UIColor(rgb: 0x34c759))
}
return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, presence: self.participant.presences[self.participant.peer.id], text: text, enabled: true, action: nil, contextAction: { node, gesture in
return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, presence: self.participant.presences[self.participant.peer.id], text: text, icon: icon, enabled: true, audioLevel: interaction.getAudioLevel(peer.id), action: {
interaction.invitePeer(peer)
}, contextAction: { node, gesture in
interaction.peerContextAction(peer, node, gesture)
})
}
@ -148,7 +178,7 @@ public final class VoiceChatController: ViewController {
let insertions = indicesAndItems.map { ListViewInsertItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction), directionHint: nil) }
let updates = updateIndices.map { ListViewUpdateItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction), directionHint: nil) }
return ListTransition(deletions: deletions, insertions: insertions, updates: updates, isLoading: isLoading, isEmpty: isEmpty, crossFade: crossFade)
return ListTransition(deletions: deletions, insertions: insertions, updates: updates, isLoading: isLoading, isEmpty: isEmpty, crossFade: crossFade, count: toEntries.count)
}
private weak var controller: VoiceChatController?
@ -164,9 +194,9 @@ public final class VoiceChatController: ViewController {
private let audioOutputNode: CallControllerButtonItemNode
private let leaveNode: CallControllerButtonItemNode
private let actionButton: VoiceChatActionButton
private let statusLabel: ImmediateTextNode
private var enqueuedTransitions: [ListTransition] = []
private var maxListHeight: CGFloat?
private var validLayout: (ContainerViewLayout, CGFloat)?
private var didSetContentsReady: Bool = false
@ -184,11 +214,15 @@ public final class VoiceChatController: ViewController {
private var isMutedDisposable: Disposable?
private var callStateDisposable: Disposable?
private var pushingToTalk = false
private var callState: PresentationGroupCallState?
private var audioOutputStateDisposable: Disposable?
private var audioOutputState: ([AudioSessionOutput], AudioSessionOutput?)?
private var audioLevelsDisposable: Disposable?
private var myAudioLevelDisposable: Disposable?
private var memberStatesDisposable: Disposable?
private var itemInteraction: Interaction?
@ -205,6 +239,7 @@ public final class VoiceChatController: ViewController {
self.optionsButton = VoiceChatOptionsButton()
self.contentContainer = ASDisplayNode()
self.contentContainer.backgroundColor = .black
self.listNode = ListView()
self.listNode.backgroundColor = self.darkTheme.list.itemBlocksBackgroundColor
@ -214,19 +249,38 @@ public final class VoiceChatController: ViewController {
self.audioOutputNode = CallControllerButtonItemNode()
self.leaveNode = CallControllerButtonItemNode()
self.actionButton = VoiceChatActionButton(size: CGSize(width: 244.0, height: 244.0))
self.statusLabel = ImmediateTextNode()
self.actionButton = VoiceChatActionButton()
super.init()
self.itemInteraction = Interaction(peerContextAction: { [weak self] peer, sourceNode, gesture in
self.itemInteraction = Interaction(invitePeer: { [weak self] peer in
guard let strongSelf = self else {
return
}
strongSelf.controller?.present(
UndoOverlayController(
presentationData: strongSelf.presentationData,
content: .invitedToVoiceChat(
context: strongSelf.context,
peer: peer,
text: strongSelf.presentationData.strings.VoiceChat_UserInvited(peer.compactDisplayTitle).0
),
elevatedLayout: false,
action: { action in
return true
}
),
in: .current
)
}, peerContextAction: { [weak self] peer, sourceNode, gesture in
guard let strongSelf = self, let controller = strongSelf.controller, let sourceNode = sourceNode as? ContextExtractedContentContainingNode else {
return
}
var items: [ContextMenuItem] = []
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.VoiceChat_MutePeer, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Call"), color: theme.actionSheet.primaryTextColor)
return generateTintedImage(image: UIImage(bundleImageName: "Call/Context Menu/Mute"), color: theme.actionSheet.primaryTextColor)
}, action: { _, f in
f(.dismissWithoutContent)
@ -260,17 +314,14 @@ public final class VoiceChatController: ViewController {
strongSelf.controller?.present(actionSheet, in: .window(.root))
})))
let contextController = ContextController(account: strongSelf.context.account, presentationData: strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme), source: .extracted(VoiceChatContextExtractedContentSource(controller: controller, sourceNode: sourceNode)), items: .single(items), reactionItems: [], gesture: gesture)
let contextController = ContextController(account: strongSelf.context.account, presentationData: strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme), source: .extracted(VoiceChatContextExtractedContentSource(controller: controller, sourceNode: sourceNode, keepInPlace: false)), items: .single(items), reactionItems: [], gesture: gesture)
strongSelf.controller?.presentInGlobalOverlay(contextController)
})
self.backgroundColor = .black
self.contentContainer.addSubnode(self.actionButton)
self.contentContainer.addSubnode(self.listNode)
self.contentContainer.addSubnode(self.audioOutputNode)
self.contentContainer.addSubnode(self.leaveNode)
self.contentContainer.addSubnode(self.statusLabel)
self.contentContainer.addSubnode(self.actionButton)
self.addSubnode(self.contentContainer)
@ -353,12 +404,34 @@ public final class VoiceChatController: ViewController {
self.audioOutputStateDisposable = (call.audioOutputState
|> deliverOnMainQueue).start(next: { [weak self] state in
if let strongSelf = self {
strongSelf.audioOutputState = state
if let (layout, navigationHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .immediate)
}
guard let strongSelf = self else {
return
}
strongSelf.audioOutputState = state
if let (layout, navigationHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .immediate)
}
})
self.audioLevelsDisposable = (call.audioLevels
|> deliverOnMainQueue).start(next: { [weak self] levels in
guard let strongSelf = self else {
return
}
strongSelf.itemInteraction?.updateAudioLevels(levels)
})
self.myAudioLevelDisposable = (call.myAudioLevel
|> deliverOnMainQueue).start(next: { [weak self] level in
guard let strongSelf = self else {
return
}
var effectiveLevel: Float = 0.0
if let state = strongSelf.callState, !state.isMuted {
effectiveLevel = level
}
strongSelf.itemInteraction?.updateAudioLevels([(strongSelf.context.account.peerId, effectiveLevel)])
strongSelf.actionButton.updateLevel(CGFloat(effectiveLevel))
})
self.leaveNode.addTarget(self, action: #selector(self.leavePressed), forControlEvents: .touchUpInside)
@ -390,7 +463,7 @@ public final class VoiceChatController: ViewController {
f(.dismissWithoutContent)
if let strongSelf = self {
let shareController = ShareController(context: strongSelf.context, subject: .url("url"), forcedTheme: strongSelf.darkTheme)
let shareController = ShareController(context: strongSelf.context, subject: .url("url"), forcedTheme: strongSelf.darkTheme, forcedActionTitle: strongSelf.presentationData.strings.VoiceChat_CopyInviteLink)
strongSelf.controller?.present(shareController, in: .window(.root))
}
})))
@ -401,7 +474,7 @@ public final class VoiceChatController: ViewController {
})))
let contextController = ContextController(account: strongSelf.context.account, presentationData: strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme), source: .extracted(VoiceChatContextExtractedContentSource(controller: controller, sourceNode: strongOptionsButton.extractedContainerNode)), items: .single(items), reactionItems: [], gesture: gesture)
let contextController = ContextController(account: strongSelf.context.account, presentationData: strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme), source: .extracted(VoiceChatContextExtractedContentSource(controller: controller, sourceNode: strongOptionsButton.extractedContainerNode, keepInPlace: true)), items: .single(items), reactionItems: [], gesture: gesture)
strongSelf.controller?.presentInGlobalOverlay(contextController)
}
let optionsButtonItem = UIBarButtonItem(customDisplayNode: self.optionsButton)!
@ -418,6 +491,25 @@ public final class VoiceChatController: ViewController {
self.callStateDisposable?.dispose()
self.audioOutputStateDisposable?.dispose()
self.memberStatesDisposable?.dispose()
self.audioLevelsDisposable?.dispose()
self.myAudioLevelDisposable?.dispose()
}
override func didLoad() {
super.didLoad()
let longTapRecognizer = UILongPressGestureRecognizer(target: self, action: #selector(self.actionButtonPressGesture(_:)))
longTapRecognizer.minimumPressDuration = 0.3
self.actionButton.view.addGestureRecognizer(longTapRecognizer)
let panRecognizer = CallPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:)))
panRecognizer.shouldBegin = { [weak self] _ in
guard let strongSelf = self else {
return false
}
return true
}
self.view.addGestureRecognizer(panRecognizer)
}
@objc private func rightNavigationButtonAction() {
@ -431,6 +523,21 @@ public final class VoiceChatController: ViewController {
}))
}
@objc private func actionButtonPressGesture(_ gestureRecognizer: UILongPressGestureRecognizer) {
switch gestureRecognizer.state {
case .began:
self.pushingToTalk = true
self.actionButton.pressing = true
self.call.setIsMuted(false)
case .ended, .cancelled:
self.pushingToTalk = false
self.actionButton.pressing = false
self.call.setIsMuted(true)
default:
break
}
}
@objc private func actionButtonPressed() {
self.call.toggleIsMuted()
}
@ -506,9 +613,13 @@ public final class VoiceChatController: ViewController {
let bottomAreaHeight: CGFloat = 333.0
let listOrigin = CGPoint(x: 16.0, y: navigationHeight + 10.0)
// let listFrame = CGRect(origin: listOrigin, size: CGSize(width: layout.size.width - 16.0 * 2.0, height: max(1.0, layout.size.height - bottomAreaHeight - listOrigin.y)))
let listFrame = CGRect(origin: listOrigin, size: CGSize(width: layout.size.width - 16.0 * 2.0, height: 168.0))
var listHeight: CGFloat = 56.0
if let maxListHeight = self.maxListHeight {
listHeight = min(max(1.0, layout.size.height - bottomAreaHeight - listOrigin.y), maxListHeight)
}
let listFrame = CGRect(origin: listOrigin, size: CGSize(width: layout.size.width - 16.0 * 2.0, height: listHeight))
transition.updateFrame(node: self.listNode, frame: listFrame)
let (duration, curve) = listViewAnimationDurationAndCurve(transition: transition)
@ -520,6 +631,45 @@ public final class VoiceChatController: ViewController {
let centralButtonSize = CGSize(width: 244.0, height: 244.0)
let sideButtonInset: CGFloat = 27.0
let actionButtonFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - centralButtonSize.width) / 2.0), y: layout.size.height - bottomAreaHeight + floor((bottomAreaHeight - centralButtonSize.height) / 2.0)), size: centralButtonSize)
var isMicOn = false
let actionButtonState: VoiceChatActionButtonState
let actionButtonTitle: String
let actionButtonSubtitle: String
let audioButtonAppearance: CallControllerButtonItemNode.Content.Appearance
if let callState = callState {
isMicOn = !callState.isMuted
switch callState.networkState {
case .connecting:
actionButtonState = .connecting
actionButtonTitle = self.presentationData.strings.VoiceChat_Connecting
actionButtonSubtitle = ""
audioButtonAppearance = .color(.custom(0x1c1c1e))
case .connected:
actionButtonState = .active(state: isMicOn ? .on : .muted)
if isMicOn {
actionButtonTitle = self.pushingToTalk ? self.presentationData.strings.VoiceChat_Live : self.presentationData.strings.VoiceChat_Mute
actionButtonSubtitle = ""
audioButtonAppearance = .color(.custom(0x005720))
} else {
actionButtonTitle = self.presentationData.strings.VoiceChat_Unmute
actionButtonSubtitle = self.presentationData.strings.VoiceChat_UnmuteHelp
audioButtonAppearance = .color(.custom(0x00274d))
}
}
} else {
actionButtonState = .connecting
actionButtonTitle = self.presentationData.strings.VoiceChat_Connecting
actionButtonSubtitle = ""
audioButtonAppearance = .color(.custom(0x1c1c1e))
}
self.actionButton.update(size: centralButtonSize, buttonSize: CGSize(width: 144.0, height: 144.0), state: actionButtonState, title: actionButtonTitle, subtitle: actionButtonSubtitle, animated: true)
transition.updateFrame(node: self.actionButton, frame: actionButtonFrame)
var audioMode: CallControllerButtonsSpeakerMode = .none
//var hasAudioRouteMenu: Bool = false
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
@ -547,13 +697,13 @@ public final class VoiceChatController: ViewController {
}
let soundImage: CallControllerButtonItemNode.Content.Image
var soundAppearance: CallControllerButtonItemNode.Content.Appearance = .color(.grayDimmed)
var soundAppearance: CallControllerButtonItemNode.Content.Appearance = audioButtonAppearance
switch audioMode {
case .none, .builtin:
soundImage = .speaker
case .speaker:
soundImage = .speaker
soundAppearance = .blurred(isFilled: false)
// soundAppearance = .blurred(isFilled: false)
case .headphones:
soundImage = .bluetooth
case let .bluetooth(type):
@ -567,47 +717,13 @@ public final class VoiceChatController: ViewController {
}
}
self.audioOutputNode.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage), text: self.presentationData.strings.VoiceChat_Audio, transition: .immediate)
self.audioOutputNode.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage), text: self.presentationData.strings.VoiceChat_Audio, transition: .animated(duration: 0.4, curve: .linear))
self.leaveNode.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: .color(.redDimmed), image: .end), text: self.presentationData.strings.VoiceChat_Leave, transition: .immediate)
self.leaveNode.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: .color(.custom(0x4d120e)), image: .end), text: self.presentationData.strings.VoiceChat_Leave, transition: .immediate)
transition.updateFrame(node: self.audioOutputNode, frame: CGRect(origin: CGPoint(x: sideButtonInset, y: layout.size.height - bottomAreaHeight + floor((bottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize))
transition.updateFrame(node: self.leaveNode, frame: CGRect(origin: CGPoint(x: layout.size.width - sideButtonInset - sideButtonSize.width, y: layout.size.height - bottomAreaHeight + floor((bottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize))
let actionButtonFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - centralButtonSize.width) / 2.0), y: layout.size.height - bottomAreaHeight + floor((bottomAreaHeight - centralButtonSize.height) / 2.0)), size: centralButtonSize)
var isMicOn = false
let actionButtonState: VoiceChatActionButton.State
let actionButtonTitle: String
let actionButtonSubtitle: String
if let callState = callState {
isMicOn = !callState.isMuted
// switch callState.networkState {
// case .connecting:
// actionButtonState = .connecting
// actionButtonTitle = "Connecting..."
// actionButtonSubtitle = ""
// case .connected:
actionButtonState = .active(state: isMicOn ? .on : .muted)
if isMicOn {
actionButtonTitle = self.presentationData.strings.VoiceChat_Live
actionButtonSubtitle = ""
} else {
actionButtonTitle = self.presentationData.strings.VoiceChat_Unmute
actionButtonSubtitle = self.presentationData.strings.VoiceChat_UnmuteHelp
}
// }
} else {
actionButtonState = .connecting
actionButtonTitle = self.presentationData.strings.VoiceChat_Connecting
actionButtonSubtitle = ""
}
self.actionButton.update(size: centralButtonSize, state: actionButtonState, title: actionButtonTitle, subtitle: actionButtonSubtitle, animated: true)
transition.updateFrame(node: self.actionButton, frame: actionButtonFrame)
if isFirstTime {
while !self.enqueuedTransitions.isEmpty {
self.dequeueTransition()
@ -620,9 +736,7 @@ public final class VoiceChatController: ViewController {
self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
self.listNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
self.actionButton.startAnimating()
self.actionButton.layer.animateScale(from: 0.1, to: 1.0, duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring)
self.audioOutputNode.layer.animateScale(from: 0.1, to: 1.0, duration: 0.5, timingFunction: kCAMediaTimingFunctionSpring)
@ -674,6 +788,19 @@ public final class VoiceChatController: ViewController {
strongSelf.didSetContentsReady = true
strongSelf.controller?.contentsReady.set(true)
}
if !transition.deletions.isEmpty || !transition.insertions.isEmpty {
var itemHeight: CGFloat = 56.0
strongSelf.listNode.forEachVisibleItemNode { node in
if node.frame.height > 0 {
itemHeight = node.frame.height
}
}
strongSelf.maxListHeight = CGFloat(transition.count) * itemHeight
if let (layout, navigationHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .spring))
}
}
})
}
@ -706,6 +833,10 @@ public final class VoiceChatController: ViewController {
var index: Int32 = 0
for member in members {
if let user = member.peer as? TelegramUser, user.botInfo != nil || user.isDeleted {
continue
}
let memberState: PeerEntry.State
if member.peer.id == self.context.account.peerId {
if !isMuted {
@ -733,6 +864,55 @@ public final class VoiceChatController: ViewController {
let transition = preparedTransition(from: previousEntries, to: entries, isLoading: false, isEmpty: false, crossFade: false, context: self.context, presentationData: presentationData, interaction: self.itemInteraction!)
self.enqueueTransition(transition)
}
@objc private func panGesture(_ recognizer: CallPanGestureRecognizer) {
switch recognizer.state {
case .began:
guard let (layout, _) = self.validLayout else {
return
}
self.contentContainer.clipsToBounds = true
self.contentContainer.cornerRadius = layout.deviceMetrics.screenCornerRadius
case .changed:
let offset = recognizer.translation(in: self.view).y
var bounds = self.bounds
bounds.origin.y = -offset
let transition = offset / bounds.height
if transition > 0.02 {
self.controller?.statusBar.statusBarStyle = .Ignore
} else {
self.controller?.statusBar.statusBarStyle = .White
}
self.bounds = bounds
case .cancelled, .ended:
let velocity = recognizer.velocity(in: self.view).y
if abs(velocity) < 200.0 {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, completion: { _ in
self.contentContainer.cornerRadius = 0.0
})
self.controller?.statusBar.statusBarStyle = .White
} else {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint(x: 0.0, y: velocity > 0.0 ? -bounds.height: bounds.height)
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, completion: { [weak self] _ in
self?.controller?.dismissInteractively()
var initialBounds = bounds
initialBounds.origin = CGPoint()
self?.bounds = initialBounds
self?.controller?.statusBar.statusBarStyle = .White
})
}
default:
break
}
}
}
private let sharedContext: SharedAccountContext
@ -758,7 +938,7 @@ public final class VoiceChatController: ViewController {
self.call = call
self.presentationData = sharedContext.currentPresentationData.with { $0 }
let darkNavigationTheme = NavigationBarTheme(buttonColor: .white, disabledButtonColor: UIColor(rgb: 0x525252), primaryTextColor: .white, backgroundColor: UIColor(white: 0.0, alpha: 0.6), separatorColor: UIColor(white: 0.0, alpha: 0.8), badgeBackgroundColor: .clear, badgeStrokeColor: .clear, badgeTextColor: .clear)
let darkNavigationTheme = NavigationBarTheme(buttonColor: .white, disabledButtonColor: UIColor(rgb: 0x525252), primaryTextColor: .white, backgroundColor: .clear, separatorColor: UIColor(white: 0.0, alpha: 0.8), badgeBackgroundColor: .clear, badgeStrokeColor: .clear, badgeTextColor: .clear)
super.init(navigationBarPresentationData: NavigationBarPresentationData(theme: darkNavigationTheme, strings: NavigationBarStrings(presentationStrings: self.presentationData.strings)))
@ -810,6 +990,16 @@ public final class VoiceChatController: ViewController {
}
}
func dismissInteractively(completion: (() -> Void)? = nil) {
if !self.isDismissed {
self.isDismissed = true
self.didAppearOnce = false
completion?()
self.presentingViewController?.dismiss(animated: false)
}
}
override public func dismiss(completion: (() -> Void)? = nil) {
if !self.isDismissed {
self.isDismissed = true
@ -830,15 +1020,16 @@ public final class VoiceChatController: ViewController {
}
private final class VoiceChatContextExtractedContentSource: ContextExtractedContentSource {
let keepInPlace: Bool = true
var keepInPlace: Bool
let ignoreContentTouches: Bool = true
private let controller: ViewController
private let sourceNode: ContextExtractedContentContainingNode
init(controller: ViewController, sourceNode: ContextExtractedContentContainingNode) {
init(controller: ViewController, sourceNode: ContextExtractedContentContainingNode, keepInPlace: Bool) {
self.controller = controller
self.sourceNode = sourceNode
self.keepInPlace = keepInPlace
}
func takeView() -> ContextControllerTakeViewInfo? {

View File

@ -0,0 +1,200 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Display
private final class VoiceChatMicrophoneNodeDrawingState: NSObject {
let color: UIColor
let transition: CGFloat
let reverse: Bool
init(color: UIColor, transition: CGFloat, reverse: Bool) {
self.color = color
self.transition = transition
self.reverse = reverse
super.init()
}
}
final class VoiceChatMicrophoneNode: ASDisplayNode {
class State: Equatable {
let muted: Bool
let color: UIColor
init(muted: Bool, color: UIColor) {
self.muted = muted
self.color = color
}
static func ==(lhs: State, rhs: State) -> Bool {
if lhs.muted != rhs.muted {
return false
}
if lhs.color.argb != rhs.color.argb {
return false
}
return true
}
}
private class TransitionContext {
let startTime: Double
let duration: Double
let previousState: State
init(startTime: Double, duration: Double, previousState: State) {
self.startTime = startTime
self.duration = duration
self.previousState = previousState
}
}
private var animator: ConstantDisplayLinkAnimator?
private var hasState = false
private var state: State = State(muted: false, color: .black)
private var transitionContext: TransitionContext?
override init() {
super.init()
self.isOpaque = false
}
func update(state: State, animated: Bool) {
var animated = animated
if !self.hasState {
self.hasState = true
animated = false
}
if self.state != state {
let previousState = self.state
self.state = state
if animated {
self.transitionContext = TransitionContext(startTime: CACurrentMediaTime(), duration: 0.18, previousState: previousState)
}
self.updateAnimations()
self.setNeedsDisplay()
}
}
private func updateAnimations() {
var animate = false
let timestamp = CACurrentMediaTime()
if let transitionContext = self.transitionContext {
if transitionContext.startTime + transitionContext.duration < timestamp {
self.transitionContext = nil
} else {
animate = true
}
}
if animate {
let animator: ConstantDisplayLinkAnimator
if let current = self.animator {
animator = current
} else {
animator = ConstantDisplayLinkAnimator(update: { [weak self] in
self?.updateAnimations()
})
self.animator = animator
}
animator.isPaused = false
} else {
self.animator?.isPaused = true
}
self.setNeedsDisplay()
}
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
var transitionFraction: CGFloat = self.state.muted ? 1.0 : 0.0
var color = self.state.color
var reverse = false
if let transitionContext = self.transitionContext {
let timestamp = CACurrentMediaTime()
var t = CGFloat((timestamp - transitionContext.startTime) / transitionContext.duration)
t = min(1.0, max(0.0, t))
if transitionContext.previousState.muted != self.state.muted {
transitionFraction = self.state.muted ? t : 1.0 - t
reverse = transitionContext.previousState.muted
}
if transitionContext.previousState.color.rgb != color.rgb {
color = transitionContext.previousState.color.interpolateTo(color, fraction: t)!
}
}
return VoiceChatMicrophoneNodeDrawingState(color: color, transition: transitionFraction, reverse: reverse)
}
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
let context = UIGraphicsGetCurrentContext()!
if !isRasterizing {
context.setBlendMode(.copy)
context.setFillColor(UIColor.clear.cgColor)
context.fill(bounds)
}
guard let parameters = parameters as? VoiceChatMicrophoneNodeDrawingState else {
return
}
context.setFillColor(parameters.color.cgColor)
if bounds.size.width > 36.0 {
context.scaleBy(x: 2.5, y: 2.5)
}
context.translateBy(x: 18.0, y: 18.0)
let _ = try? drawSvgPath(context, path: "M-0.004000000189989805,-9.86400032043457 C2.2960000038146973,-9.86400032043457 4.165999889373779,-8.053999900817871 4.25600004196167,-5.77400016784668 C4.25600004196167,-5.77400016784668 4.265999794006348,-5.604000091552734 4.265999794006348,-5.604000091552734 C4.265999794006348,-5.604000091552734 4.265999794006348,-0.8040000200271606 4.265999794006348,-0.8040000200271606 C4.265999794006348,1.555999994277954 2.3559999465942383,3.4660000801086426 -0.004000000189989805,3.4660000801086426 C-2.2939999103546143,3.4660000801086426 -4.164000034332275,1.6460000276565552 -4.263999938964844,-0.6240000128746033 C-4.263999938964844,-0.6240000128746033 -4.263999938964844,-0.8040000200271606 -4.263999938964844,-0.8040000200271606 C-4.263999938964844,-0.8040000200271606 -4.263999938964844,-5.604000091552734 -4.263999938964844,-5.604000091552734 C-4.263999938964844,-7.953999996185303 -2.3540000915527344,-9.86400032043457 -0.004000000189989805,-9.86400032043457 Z ")
context.setBlendMode(.clear)
let _ = try? drawSvgPath(context, path: "M0.004000000189989805,-8.53600025177002 C-1.565999984741211,-8.53600025177002 -2.8459999561309814,-7.306000232696533 -2.936000108718872,-5.75600004196167 C-2.936000108718872,-5.75600004196167 -2.936000108718872,-5.5960001945495605 -2.936000108718872,-5.5960001945495605 C-2.936000108718872,-5.5960001945495605 -2.936000108718872,-0.7960000038146973 -2.936000108718872,-0.7960000038146973 C-2.936000108718872,0.8240000009536743 -1.6260000467300415,2.134000062942505 0.004000000189989805,2.134000062942505 C1.5740000009536743,2.134000062942505 2.8540000915527344,0.9039999842643738 2.934000015258789,-0.6460000276565552 C2.934000015258789,-0.6460000276565552 2.934000015258789,-0.7960000038146973 2.934000015258789,-0.7960000038146973 C2.934000015258789,-0.7960000038146973 2.934000015258789,-5.5960001945495605 2.934000015258789,-5.5960001945495605 C2.934000015258789,-7.22599983215332 1.6239999532699585,-8.53600025177002 0.004000000189989805,-8.53600025177002 Z ")
context.setBlendMode(.normal)
let _ = try? drawSvgPath(context, path: "M6.796000003814697,-1.4639999866485596 C7.165999889373779,-1.4639999866485596 7.466000080108643,-1.1640000343322754 7.466000080108643,-0.8040000200271606 C7.466000080108643,3.0959999561309814 4.47599983215332,6.296000003814697 0.6660000085830688,6.636000156402588 C0.6660000085830688,6.636000156402588 0.6660000085830688,9.196000099182129 0.6660000085830688,9.196000099182129 C0.6660000085830688,9.565999984741211 0.3659999966621399,9.866000175476074 -0.004000000189989805,9.866000175476074 C-0.33399999141693115,9.866000175476074 -0.6140000224113464,9.605999946594238 -0.6539999842643738,9.28600025177002 C-0.6539999842643738,9.28600025177002 -0.6639999747276306,9.196000099182129 -0.6639999747276306,9.196000099182129 C-0.6639999747276306,9.196000099182129 -0.6639999747276306,6.636000156402588 -0.6639999747276306,6.636000156402588 C-4.473999977111816,6.296000003814697 -7.464000225067139,3.0959999561309814 -7.464000225067139,-0.8040000200271606 C-7.464000225067139,-1.1640000343322754 -7.164000034332275,-1.4639999866485596 -6.803999900817871,-1.4639999866485596 C-6.434000015258789,-1.4639999866485596 -6.133999824523926,-1.1640000343322754 -6.133999824523926,-0.8040000200271606 C-6.133999824523926,2.5859999656677246 -3.384000062942505,5.335999965667725 -0.004000000189989805,5.335999965667725 C3.385999917984009,5.335999965667725 6.136000156402588,2.5859999656677246 6.136000156402588,-0.8040000200271606 C6.136000156402588,-1.1640000343322754 6.435999870300293,-1.4639999866485596 6.796000003814697,-1.4639999866485596 Z ")
context.translateBy(x: -18.0, y: -18.0)
if parameters.transition > 0.0 {
let startPoint: CGPoint
let endPoint: CGPoint
if parameters.reverse {
startPoint = CGPoint(x: 9.0 + 17.0 * (1.0 - parameters.transition), y: 10.0 - UIScreenPixel + 17.0 * (1.0 - parameters.transition))
endPoint = CGPoint(x: 26.0, y: 27.0 - UIScreenPixel)
} else {
startPoint = CGPoint(x: 9.0, y: 10.0 - UIScreenPixel)
endPoint = CGPoint(x: 9.0 + 17.0 * parameters.transition, y: 10.0 - UIScreenPixel + 17.0 * parameters.transition)
}
context.setBlendMode(.clear)
context.setLineWidth(4.0)
context.move(to: startPoint)
context.addLine(to: endPoint)
context.strokePath()
context.setBlendMode(.normal)
context.setStrokeColor(parameters.color.cgColor)
context.setLineWidth(1.0 + UIScreenPixel)
context.setLineCap(.round)
context.setLineJoin(.round)
context.move(to: startPoint)
context.addLine(to: endPoint)
context.strokePath()
}
}
}

View File

@ -16,7 +16,7 @@ import PeerPresenceStatusManager
import ContextUI
import AccountContext
import LegacyComponents
import AnimationUI
import AudioBlob
public final class VoiceChatParticipantItem: ListViewItem {
public enum ParticipantText {
@ -31,6 +31,12 @@ public final class VoiceChatParticipantItem: ListViewItem {
case none
}
public enum Icon {
case none
case microphone(Bool, UIColor)
case invite
}
let presentationData: ItemListPresentationData
let dateTimeFormat: PresentationDateTimeFormat
let nameDisplayOrder: PresentationPersonNameOrder
@ -38,11 +44,13 @@ public final class VoiceChatParticipantItem: ListViewItem {
let peer: Peer
let presence: PeerPresence?
let text: ParticipantText
let icon: Icon
let enabled: Bool
let audioLevel: Signal<Float, NoError>?
let action: (() -> Void)?
let contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, presence: PeerPresence?, text: ParticipantText, enabled: Bool, action: (() -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil) {
public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, presence: PeerPresence?, text: ParticipantText, icon: Icon, enabled: Bool, audioLevel: Signal<Float, NoError>?, action: (() -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil) {
self.presentationData = presentationData
self.dateTimeFormat = dateTimeFormat
self.nameDisplayOrder = nameDisplayOrder
@ -50,7 +58,9 @@ public final class VoiceChatParticipantItem: ListViewItem {
self.peer = peer
self.presence = presence
self.text = text
self.icon = icon
self.enabled = enabled
self.audioLevel = audioLevel
self.action = action
self.contextAction = contextAction
}
@ -117,12 +127,17 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
private var extractedRect: CGRect?
private var nonExtractedRect: CGRect?
private var audioLevelView: VoiceBlobView?
fileprivate let avatarNode: AvatarNode
private let titleNode: TextNode
private let statusNode: TextNode
private let animationNode: AnimationNode
private let actionContainerNode: ASDisplayNode
private var animationNode: VoiceChatMicrophoneNode?
private var actionButtonNode: HighlightableButtonNode?
private var audioLevelView: VoiceBlobView?
private let audioLevelDisposable = MetaDisposable()
private var absoluteLocation: (CGRect, CGSize)?
@ -130,11 +145,7 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
private var layoutParams: (VoiceChatParticipantItem, ListViewItemLayoutParams, Bool, Bool)?
override public var canBeSelected: Bool {
if let item = self.layoutParams?.0, item.action != nil {
return true
} else {
return false
}
return false
}
public init() {
@ -169,11 +180,11 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
self.statusNode.contentMode = .left
self.statusNode.contentsScale = UIScreen.main.scale
self.actionContainerNode = ASDisplayNode()
self.highlightedBackgroundNode = ASDisplayNode()
self.highlightedBackgroundNode.isLayerBacked = true
self.animationNode = AnimationNode(animation: "anim_voicemute", colors: [:], scale: 0.3333)
super.init(layerBacked: false, dynamicBounce: false, rotated: false, seeThrough: false)
self.isAccessibilityElement = true
@ -187,7 +198,7 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
self.offsetContainerNode.addSubnode(self.avatarNode)
self.offsetContainerNode.addSubnode(self.titleNode)
self.offsetContainerNode.addSubnode(self.statusNode)
self.offsetContainerNode.addSubnode(self.animationNode)
self.offsetContainerNode.addSubnode(self.actionContainerNode)
self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode
self.peerPresenceManager = PeerPresenceStatusManager(update: { [weak self] in
@ -197,6 +208,15 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
}
})
self.containerNode.shouldBegin = { [weak self] location in
guard let strongSelf = self else {
return false
}
if let actionButtonNode = strongSelf.actionButtonNode, actionButtonNode.frame.contains(location) {
return false
}
return true
}
self.containerNode.activated = { [weak self] gesture, _ in
guard let strongSelf = self, let item = strongSelf.layoutParams?.0, let contextAction = item.contextAction else {
gesture.cancel()
@ -221,7 +241,7 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
transition.updateSublayerTransformOffset(layer: strongSelf.offsetContainerNode.layer, offset: CGPoint(x: isExtracted ? 12.0 : 0.0, y: 0.0))
transition.updateSublayerTransformOffset(layer: strongSelf.animationNode.layer, offset: CGPoint(x: isExtracted ? -24.0 : 0.0, y: 0.0))
transition.updateSublayerTransformOffset(layer: strongSelf.actionContainerNode.layer, offset: CGPoint(x: isExtracted ? -24.0 : 0.0, y: 0.0))
transition.updateAlpha(node: strongSelf.extractedBackgroundImageNode, alpha: isExtracted ? 1.0 : 0.0, completion: { _ in
if !isExtracted {
@ -230,6 +250,10 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
})
}
}
deinit {
self.audioLevelDisposable.dispose()
}
public func asyncLayout() -> (_ item: VoiceChatParticipantItem, _ params: ListViewItemLayoutParams, _ first: Bool, _ last: Bool) -> (ListViewItemNodeLayout, (Bool, Bool) -> Void) {
let makeTitleLayout = TextNode.asyncLayout(self.titleNode)
@ -296,8 +320,8 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
statusAttributedString = NSAttributedString(string: botStatus, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor)
} else if let presence = item.presence as? TelegramUserPresence {
let timestamp = CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970
let (string, activity) = stringAndActivityForUserPresence(strings: item.presentationData.strings, dateTimeFormat: item.dateTimeFormat, presence: presence, relativeTo: Int32(timestamp))
statusAttributedString = NSAttributedString(string: string, font: statusFont, textColor: activity ? item.presentationData.theme.list.itemAccentColor : item.presentationData.theme.list.itemSecondaryTextColor)
let (string, _) = stringAndActivityForUserPresence(strings: item.presentationData.strings, dateTimeFormat: item.dateTimeFormat, presence: presence, relativeTo: Int32(timestamp))
statusAttributedString = NSAttributedString(string: string, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor)
} else {
statusAttributedString = NSAttributedString(string: item.presentationData.strings.LastSeen_Offline, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor)
}
@ -309,7 +333,7 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
case .accent:
textColorValue = item.presentationData.theme.list.itemAccentColor
case .constructive:
textColorValue = item.presentationData.theme.list.itemDisclosureActions.constructive.fillColor
textColorValue = UIColor(rgb: 0x34c759)
}
statusAttributedString = NSAttributedString(string: text, font: statusFont, textColor: textColorValue)
case .none:
@ -346,12 +370,14 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
currentDisabledOverlayNode = nil
}
var animateStatusTransition = false
var animateStatusTransitionFromUp: Bool?
if let currentItem = currentItem {
if case .presence = currentItem.text, case .text = item.text {
animateStatusTransition = true
if case .presence = currentItem.text, case let .text(_, newColor) = item.text {
animateStatusTransitionFromUp = newColor == .constructive
} else if case let .text(_, currentColor) = currentItem.text, case let .text(_, newColor) = item.text, currentColor != newColor {
animateStatusTransition = true
animateStatusTransitionFromUp = newColor == .constructive
} else if case .text = currentItem.text, case .presence = item.text {
animateStatusTransitionFromUp = false
}
}
@ -376,6 +402,7 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
strongSelf.offsetContainerNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize)
strongSelf.contextSourceNode.contentNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize)
strongSelf.containerNode.isGestureEnabled = item.contextAction != nil
strongSelf.actionContainerNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize)
strongSelf.accessibilityLabel = titleAttributedString?.string
var combinedValueString = ""
@ -416,18 +443,19 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
strongSelf.disabledOverlayNode = nil
}
if animateStatusTransition {
if let animateStatusTransitionFromUp = animateStatusTransitionFromUp {
let offset: CGFloat = animateStatusTransitionFromUp ? -7.0 : 7.0
if let snapshotView = strongSelf.statusNode.view.snapshotContentTree() {
strongSelf.statusNode.view.insertSubview(snapshotView, belowSubview: strongSelf.statusNode.view)
strongSelf.statusNode.view.superview?.insertSubview(snapshotView, belowSubview: strongSelf.statusNode.view)
snapshotView.frame = strongSelf.statusNode.frame
snapshotView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak snapshotView] _ in
snapshotView?.removeFromSuperview()
})
snapshotView.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -7.0), duration: 0.2, removeOnCompletion: false, additive: true)
snapshotView.layer.animatePosition(from: CGPoint(), to: CGPoint(x: 0.0, y: -offset), duration: 0.2, removeOnCompletion: false, additive: true)
strongSelf.statusNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
strongSelf.statusNode.layer.animatePosition(from: CGPoint(x: 0.0, y: 7.0), to: CGPoint(), duration: 0.2, additive: true)
strongSelf.statusNode.layer.animatePosition(from: CGPoint(x: 0.0, y: offset), to: CGPoint(), duration: 0.2, additive: true)
}
}
@ -454,7 +482,55 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
transition.updateFrame(node: strongSelf.titleNode, frame: CGRect(origin: CGPoint(x: leftInset, y: verticalInset + verticalOffset), size: titleLayout.size))
transition.updateFrame(node: strongSelf.statusNode, frame: CGRect(origin: CGPoint(x: leftInset, y: strongSelf.titleNode.frame.maxY + titleSpacing), size: statusLayout.size))
transition.updateFrame(node: strongSelf.avatarNode, frame: CGRect(origin: CGPoint(x: params.leftInset + 15.0, y: floorToScreenPixels((layout.contentSize.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize)))
let avatarFrame = CGRect(origin: CGPoint(x: params.leftInset + 15.0, y: floorToScreenPixels((layout.contentSize.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize))
transition.updateFrame(node: strongSelf.avatarNode, frame: avatarFrame)
let blobFrame = avatarFrame.insetBy(dx: -12.0, dy: -12.0)
if let audioLevel = item.audioLevel {
strongSelf.audioLevelView?.frame = blobFrame
strongSelf.audioLevelDisposable.set((audioLevel
|> deliverOnMainQueue).start(next: { value in
guard let strongSelf = self else {
return
}
if strongSelf.audioLevelView == nil {
let audioLevelView = VoiceBlobView(
frame: blobFrame,
maxLevel: 0.3,
smallBlobRange: (0, 0),
mediumBlobRange: (0.7, 0.8),
bigBlobRange: (0.8, 0.9)
)
let maskRect = CGRect(origin: .zero, size: blobFrame.size)
let playbackMaskLayer = CAShapeLayer()
playbackMaskLayer.frame = maskRect
playbackMaskLayer.fillRule = .evenOdd
let maskPath = UIBezierPath()
maskPath.append(UIBezierPath(roundedRect: maskRect.insetBy(dx: 12, dy: 12), cornerRadius: 22))
maskPath.append(UIBezierPath(rect: maskRect))
playbackMaskLayer.path = maskPath.cgPath
audioLevelView.layer.mask = playbackMaskLayer
audioLevelView.setColor(.green)
strongSelf.audioLevelView = audioLevelView
strongSelf.containerNode.view.insertSubview(audioLevelView, at: 0)
}
strongSelf.audioLevelView?.updateLevel(CGFloat(value) * 2.0)
if value > 0.0 {
strongSelf.audioLevelView?.startAnimating()
} else {
strongSelf.audioLevelView?.stopAnimating()
}
}))
} else if let audioLevelView = strongSelf.audioLevelView {
strongSelf.audioLevelView = nil
audioLevelView.removeFromSuperview()
strongSelf.audioLevelDisposable.set(nil)
}
var overrideImage: AvatarNodeImageOverride?
if item.peer.isDeleted {
@ -464,12 +540,47 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
strongSelf.highlightedBackgroundNode.frame = CGRect(origin: CGPoint(x: 0.0, y: -UIScreenPixel), size: CGSize(width: params.width, height: layout.contentSize.height + UIScreenPixel + UIScreenPixel))
if var size = strongSelf.animationNode.preferredSize() {
size = CGSize(width: ceil(size.width), height: ceil(size.height))
strongSelf.animationNode.frame = CGRect(x: params.width - size.width - 12.0, y: floor((layout.contentSize.height - size.height) / 2.0) + 1.0, width: size.width, height: size.height)
// animationNode.play()
if case let .microphone(muted, color) = item.icon {
let animationNode: VoiceChatMicrophoneNode
if let current = strongSelf.animationNode {
animationNode = current
} else {
animationNode = VoiceChatMicrophoneNode()
strongSelf.animationNode = animationNode
strongSelf.actionContainerNode.addSubnode(animationNode)
}
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, color: color), animated: true)
} else if let animationNode = strongSelf.animationNode {
strongSelf.animationNode = nil
animationNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak animationNode] _ in
animationNode?.removeFromSupernode()
})
}
if case .invite = item.icon {
let actionButtonNode: HighlightableButtonNode
if let current = strongSelf.actionButtonNode {
actionButtonNode = current
} else {
actionButtonNode = HighlightableButtonNode()
actionButtonNode.setImage(generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/AddUser"), color: item.presentationData.theme.list.itemAccentColor), for: .normal)
actionButtonNode.addTarget(strongSelf, action: #selector(strongSelf.actionButtonPressed), forControlEvents: .touchUpInside)
strongSelf.actionButtonNode = actionButtonNode
strongSelf.actionContainerNode.addSubnode(actionButtonNode)
}
} else if let actionButtonNode = strongSelf.actionButtonNode {
strongSelf.actionButtonNode = nil
actionButtonNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak actionButtonNode] _ in
actionButtonNode?.removeFromSupernode()
})
}
let animationSize = CGSize(width: 36.0, height: 36.0)
strongSelf.animationNode?.frame = CGRect(x: params.width - animationSize.width - 6.0, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height)
strongSelf.actionButtonNode?.frame = CGRect(x: params.width - animationSize.width - 6.0, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height)
if let presence = item.presence as? TelegramUserPresence {
strongSelf.peerPresenceManager?.reset(presence: presence)
}
@ -549,456 +660,10 @@ public class VoiceChatParticipantItemNode: ListViewItemNode {
rect.origin.y += self.insets.top
self.absoluteLocation = (rect, containerSize)
}
}
private class VoiceBlobView: UIView {
private let smallBlob: BlobView
private let mediumBlob: BlobView
private let bigBlob: BlobView
private let maxLevel: CGFloat
private var displayLinkAnimator: ConstantDisplayLinkAnimator?
private var audioLevel: CGFloat = 0
private var presentationAudioLevel: CGFloat = 0
private(set) var isAnimating = false
typealias BlobRange = (min: CGFloat, max: CGFloat)
init(
frame: CGRect,
maxLevel: CGFloat,
smallBlobRange: BlobRange,
mediumBlobRange: BlobRange,
bigBlobRange: BlobRange
) {
self.maxLevel = maxLevel
self.smallBlob = BlobView(
pointsCount: 8,
minRandomness: 0.1,
maxRandomness: 0.5,
minSpeed: 0.2,
maxSpeed: 0.6,
minScale: smallBlobRange.min,
maxScale: smallBlobRange.max,
scaleSpeed: 0.2,
isCircle: true
)
self.mediumBlob = BlobView(
pointsCount: 8,
minRandomness: 1,
maxRandomness: 1,
minSpeed: 1.5,
maxSpeed: 7,
minScale: mediumBlobRange.min,
maxScale: mediumBlobRange.max,
scaleSpeed: 0.2,
isCircle: false
)
self.bigBlob = BlobView(
pointsCount: 8,
minRandomness: 1,
maxRandomness: 1,
minSpeed: 1.5,
maxSpeed: 7,
minScale: bigBlobRange.min,
maxScale: bigBlobRange.max,
scaleSpeed: 0.2,
isCircle: false
)
super.init(frame: frame)
addSubview(bigBlob)
addSubview(mediumBlob)
addSubview(smallBlob)
displayLinkAnimator = ConstantDisplayLinkAnimator() { [weak self] in
guard let strongSelf = self else { return }
strongSelf.presentationAudioLevel = strongSelf.presentationAudioLevel * 0.9 + strongSelf.audioLevel * 0.1
strongSelf.smallBlob.level = strongSelf.presentationAudioLevel
strongSelf.mediumBlob.level = strongSelf.presentationAudioLevel
strongSelf.bigBlob.level = strongSelf.presentationAudioLevel
}
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setColor(_ color: UIColor) {
smallBlob.setColor(color)
mediumBlob.setColor(color.withAlphaComponent(0.3))
bigBlob.setColor(color.withAlphaComponent(0.15))
}
func updateLevel(_ level: CGFloat) {
let normalizedLevel = min(1, max(level / maxLevel, 0))
smallBlob.updateSpeedLevel(to: normalizedLevel)
mediumBlob.updateSpeedLevel(to: normalizedLevel)
bigBlob.updateSpeedLevel(to: normalizedLevel)
audioLevel = normalizedLevel
}
func startAnimating() {
guard !isAnimating else { return }
isAnimating = true
mediumBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.15, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 0.5, to: 1, duration: 0.15, removeOnCompletion: false)
updateBlobsState()
displayLinkAnimator?.isPaused = false
}
func stopAnimating() {
guard isAnimating else { return }
isAnimating = false
mediumBlob.layer.animateScale(from: 1.0, to: 0.5, duration: 0.15, removeOnCompletion: false)
bigBlob.layer.animateScale(from: 1.0, to: 0.5, duration: 0.15, removeOnCompletion: false)
updateBlobsState()
displayLinkAnimator?.isPaused = true
}
private func updateBlobsState() {
if isAnimating {
if smallBlob.frame.size != .zero {
smallBlob.startAnimating()
mediumBlob.startAnimating()
bigBlob.startAnimating()
}
} else {
smallBlob.stopAnimating()
mediumBlob.stopAnimating()
bigBlob.stopAnimating()
}
}
override func layoutSubviews() {
super.layoutSubviews()
smallBlob.frame = bounds
mediumBlob.frame = bounds
bigBlob.frame = bounds
updateBlobsState()
}
}
private class BlobView: UIView {
let pointsCount: Int
let smoothness: CGFloat
let minRandomness: CGFloat
let maxRandomness: CGFloat
let minSpeed: CGFloat
let maxSpeed: CGFloat
let minScale: CGFloat
let maxScale: CGFloat
let scaleSpeed: CGFloat
var scaleLevelsToBalance = [CGFloat]()
// If true ignores randomness and pointsCount
let isCircle: Bool
var level: CGFloat = 0 {
didSet {
CATransaction.begin()
CATransaction.setDisableActions(true)
let lv = minScale + (maxScale - minScale) * level
shapeLayer.transform = CATransform3DMakeScale(lv, lv, 1)
CATransaction.commit()
}
}
private var speedLevel: CGFloat = 0
private var scaleLevel: CGFloat = 0
private var lastSpeedLevel: CGFloat = 0
private var lastScaleLevel: CGFloat = 0
private let shapeLayer: CAShapeLayer = {
let layer = CAShapeLayer()
layer.strokeColor = nil
return layer
}()
private var transition: CGFloat = 0 {
didSet {
guard let currentPoints = currentPoints else { return }
shapeLayer.path = UIBezierPath.smoothCurve(through: currentPoints, length: bounds.width, smoothness: smoothness).cgPath
}
}
private var fromPoints: [CGPoint]?
private var toPoints: [CGPoint]?
private var currentPoints: [CGPoint]? {
guard let fromPoints = fromPoints, let toPoints = toPoints else { return nil }
return fromPoints.enumerated().map { offset, fromPoint in
let toPoint = toPoints[offset]
return CGPoint(
x: fromPoint.x + (toPoint.x - fromPoint.x) * transition,
y: fromPoint.y + (toPoint.y - fromPoint.y) * transition
)
}
}
init(
pointsCount: Int,
minRandomness: CGFloat,
maxRandomness: CGFloat,
minSpeed: CGFloat,
maxSpeed: CGFloat,
minScale: CGFloat,
maxScale: CGFloat,
scaleSpeed: CGFloat,
isCircle: Bool
) {
self.pointsCount = pointsCount
self.minRandomness = minRandomness
self.maxRandomness = maxRandomness
self.minSpeed = minSpeed
self.maxSpeed = maxSpeed
self.minScale = minScale
self.maxScale = maxScale
self.scaleSpeed = scaleSpeed
self.isCircle = isCircle
let angle = (CGFloat.pi * 2) / CGFloat(pointsCount)
self.smoothness = ((4 / 3) * tan(angle / 4)) / sin(angle / 2) / 2
super.init(frame: .zero)
layer.addSublayer(shapeLayer)
shapeLayer.transform = CATransform3DMakeScale(minScale, minScale, 1)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func setColor(_ color: UIColor) {
shapeLayer.fillColor = color.cgColor
}
func updateSpeedLevel(to newSpeedLevel: CGFloat) {
speedLevel = max(speedLevel, newSpeedLevel)
if abs(lastSpeedLevel - newSpeedLevel) > 0.5 {
animateToNewShape()
}
}
func startAnimating() {
animateToNewShape()
}
func stopAnimating() {
fromPoints = currentPoints
toPoints = nil
pop_removeAnimation(forKey: "blob")
}
private func animateToNewShape() {
guard !isCircle else { return }
if pop_animation(forKey: "blob") != nil {
fromPoints = currentPoints
toPoints = nil
pop_removeAnimation(forKey: "blob")
}
if fromPoints == nil {
fromPoints = generateNextBlob(for: bounds.size)
}
if toPoints == nil {
toPoints = generateNextBlob(for: bounds.size)
}
let animation = POPBasicAnimation()
animation.property = POPAnimatableProperty.property(withName: "blob.transition", initializer: { property in
property?.readBlock = { blobView, values in
guard let blobView = blobView as? BlobView, let values = values else { return }
values.pointee = blobView.transition
}
property?.writeBlock = { blobView, values in
guard let blobView = blobView as? BlobView, let values = values else { return }
blobView.transition = values.pointee
}
}) as? POPAnimatableProperty
animation.completionBlock = { [weak self] animation, finished in
if finished {
self?.fromPoints = self?.currentPoints
self?.toPoints = nil
self?.animateToNewShape()
}
}
animation.duration = CFTimeInterval(1 / (minSpeed + (maxSpeed - minSpeed) * speedLevel))
animation.timingFunction = CAMediaTimingFunction(name: .linear)
animation.fromValue = 0
animation.toValue = 1
pop_add(animation, forKey: "blob")
lastSpeedLevel = speedLevel
speedLevel = 0
}
// MARK: Helpers
private func generateNextBlob(for size: CGSize) -> [CGPoint] {
let randomness = minRandomness + (maxRandomness - minRandomness) * speedLevel
return blob(pointsCount: pointsCount, randomness: randomness)
.map {
return CGPoint(
x: $0.x * CGFloat(size.width),
y: $0.y * CGFloat(size.height)
)
}
}
func blob(pointsCount: Int, randomness: CGFloat) -> [CGPoint] {
let angle = (CGFloat.pi * 2) / CGFloat(pointsCount)
let rgen = { () -> CGFloat in
let accuracy: UInt32 = 1000
let random = arc4random_uniform(accuracy)
return CGFloat(random) / CGFloat(accuracy)
}
let rangeStart: CGFloat = 1 / (1 + randomness / 10)
let startAngle = angle * CGFloat(arc4random_uniform(100)) / CGFloat(100)
let points = (0 ..< pointsCount).map { i -> CGPoint in
let randPointOffset = (rangeStart + CGFloat(rgen()) * (1 - rangeStart)) / 2
let angleRandomness: CGFloat = angle * 0.1
let randAngle = angle + angle * ((angleRandomness * CGFloat(arc4random_uniform(100)) / CGFloat(100)) - angleRandomness * 0.5)
let pointX = sin(startAngle + CGFloat(i) * randAngle)
let pointY = cos(startAngle + CGFloat(i) * randAngle)
return CGPoint(
x: pointX * randPointOffset,
y: pointY * randPointOffset
)
}
return points
}
override func layoutSubviews() {
super.layoutSubviews()
CATransaction.begin()
CATransaction.setDisableActions(true)
shapeLayer.position = CGPoint(x: bounds.midX, y: bounds.midY)
if isCircle {
let halfWidth = bounds.width * 0.5
shapeLayer.path = UIBezierPath(
roundedRect: bounds.offsetBy(dx: -halfWidth, dy: -halfWidth),
cornerRadius: halfWidth
).cgPath
}
CATransaction.commit()
}
}
private extension UIBezierPath {
static func smoothCurve(
through points: [CGPoint],
length: CGFloat,
smoothness: CGFloat
) -> UIBezierPath {
var smoothPoints = [SmoothPoint]()
for index in (0 ..< points.count) {
let prevIndex = index - 1
let prev = points[prevIndex >= 0 ? prevIndex : points.count + prevIndex]
let curr = points[index]
let next = points[(index + 1) % points.count]
let angle: CGFloat = {
let dx = next.x - prev.x
let dy = -next.y + prev.y
let angle = atan2(dy, dx)
if angle < 0 {
return abs(angle)
} else {
return 2 * .pi - angle
}
}()
smoothPoints.append(
SmoothPoint(
point: curr,
inAngle: angle + .pi,
inLength: smoothness * distance(from: curr, to: prev),
outAngle: angle,
outLength: smoothness * distance(from: curr, to: next)
)
)
}
let resultPath = UIBezierPath()
resultPath.move(to: smoothPoints[0].point)
for index in (0 ..< smoothPoints.count) {
let curr = smoothPoints[index]
let next = smoothPoints[(index + 1) % points.count]
let currSmoothOut = curr.smoothOut()
let nextSmoothIn = next.smoothIn()
resultPath.addCurve(to: next.point, controlPoint1: currSmoothOut, controlPoint2: nextSmoothIn)
}
resultPath.close()
return resultPath
}
static private func distance(from fromPoint: CGPoint, to toPoint: CGPoint) -> CGFloat {
return sqrt((fromPoint.x - toPoint.x) * (fromPoint.x - toPoint.x) + (fromPoint.y - toPoint.y) * (fromPoint.y - toPoint.y))
}
struct SmoothPoint {
let point: CGPoint
let inAngle: CGFloat
let inLength: CGFloat
let outAngle: CGFloat
let outLength: CGFloat
func smoothIn() -> CGPoint {
return smooth(angle: inAngle, length: inLength)
}
func smoothOut() -> CGPoint {
return smooth(angle: outAngle, length: outLength)
}
private func smooth(angle: CGFloat, length: CGFloat) -> CGPoint {
return CGPoint(
x: point.x + length * cos(angle),
y: point.y + length * sin(angle)
)
@objc private func actionButtonPressed() {
if let item = self.layoutParams?.0 {
item.action?()
}
}
}

View File

@ -23,7 +23,7 @@ private extension GroupCallInfo {
clientParams: nil,
version: nil
)
case let .groupCall(_, id, accessHash, channelId, _, _, params, version):
case let .groupCall(_, id, accessHash, _, _, params, version):
var clientParams: String?
if let params = params {
switch params {
@ -34,7 +34,7 @@ private extension GroupCallInfo {
self.init(
id: id,
accessHash: accessHash,
peerId: channelId.flatMap { PeerId(namespace: Namespaces.Peer.CloudChannel, id: $0) },
peerId: nil,
clientParams: clientParams,
version: version
)
@ -84,7 +84,7 @@ public func getCurrentGroupCall(account: Account, peerId: PeerId) -> Signal<Grou
}
|> mapToSignal { result -> Signal<GroupCallInfo?, GetCurrentGroupCallError> in
switch result {
case let .groupCall(call, sources, participants, chats, users):
case let .groupCall(call, sources, participants, users):
return account.postbox.transaction { transaction -> GroupCallInfo? in
return GroupCallInfo(call)
}
@ -110,7 +110,7 @@ public func createGroupCall(account: Account, peerId: PeerId) -> Signal<GroupCal
return .fail(.generic)
}
return account.network.request(Api.functions.phone.createGroupCall(flags: 0, channel: inputPeer, randomId: Int32.random(in: Int32.min ... Int32.max)))
return account.network.request(Api.functions.phone.createGroupCall(channel: inputPeer, randomId: Int32.random(in: Int32.min ... Int32.max)))
|> mapError { _ -> CreateGroupCallError in
return .generic
}
@ -137,6 +137,44 @@ public func createGroupCall(account: Account, peerId: PeerId) -> Signal<GroupCal
}
}
public struct GetGroupCallParticipantsResult {
public var ssrcMapping: [UInt32: PeerId]
}
public enum GetGroupCallParticipantsError {
case generic
}
public func getGroupCallParticipants(account: Account, callId: Int64, accessHash: Int64, maxDate: Int32, limit: Int32) -> Signal<GetGroupCallParticipantsResult, GetGroupCallParticipantsError> {
return account.network.request(Api.functions.phone.getGroupParticipants(call: .inputGroupCall(id: callId, accessHash: accessHash), maxDate: maxDate, limit: limit))
|> mapError { _ -> GetGroupCallParticipantsError in
return .generic
}
|> map { result -> GetGroupCallParticipantsResult in
var ssrcMapping: [UInt32: PeerId] = [:]
switch result {
case let .groupParticipants(count, participants, users):
for participant in participants {
var peerId: PeerId?
var ssrc: UInt32?
switch participant {
case let .groupCallParticipant(flags, userId, date, source):
peerId = PeerId(namespace: Namespaces.Peer.CloudUser, id: userId)
ssrc = UInt32(bitPattern: source)
}
if let peerId = peerId, let ssrc = ssrc {
ssrcMapping[ssrc] = peerId
}
}
}
return GetGroupCallParticipantsResult(
ssrcMapping: ssrcMapping
)
}
}
public enum JoinGroupCallError {
case generic
}
@ -153,11 +191,17 @@ public func joinGroupCall(account: Account, callId: Int64, accessHash: Int64, jo
return .generic
}
|> mapToSignal { updates -> Signal<JoinGroupCallResult, JoinGroupCallError> in
return account.network.request(Api.functions.phone.getGroupCall(call: .inputGroupCall(id: callId, accessHash: accessHash)))
|> mapError { _ -> JoinGroupCallError in
return .generic
}
|> mapToSignal { result -> Signal<JoinGroupCallResult, JoinGroupCallError> in
return combineLatest(
account.network.request(Api.functions.phone.getGroupCall(call: .inputGroupCall(id: callId, accessHash: accessHash)))
|> mapError { _ -> JoinGroupCallError in
return .generic
},
getGroupCallParticipants(account: account, callId: callId, accessHash: accessHash, maxDate: 0, limit: 100)
|> mapError { _ -> JoinGroupCallError in
return .generic
}
)
|> mapToSignal { result, participantsResult -> Signal<JoinGroupCallResult, JoinGroupCallError> in
account.stateManager.addUpdates(updates)
var maybeParsedCall: GroupCallInfo?
@ -176,11 +220,11 @@ public func joinGroupCall(account: Account, callId: Int64, accessHash: Int64, jo
}
switch result {
case let .groupCall(call, sources, participants, chats, users):
case let .groupCall(call, sources, participants, users):
guard let _ = GroupCallInfo(call) else {
return .fail(.generic)
}
var ssrcMapping: [UInt32: PeerId] = [:]
var ssrcMapping: [UInt32: PeerId] = participantsResult.ssrcMapping
for participant in participants {
var peerId: PeerId?
var ssrc: UInt32?
@ -233,3 +277,52 @@ public func stopGroupCall(account: Account, callId: Int64, accessHash: Int64) ->
return .complete()
}
}
public enum CheckGroupCallResult {
case success
case restart
}
public func checkGroupCall(account: Account, callId: Int64, accessHash: Int64, ssrc: Int32) -> Signal<CheckGroupCallResult, NoError> {
return account.network.request(Api.functions.phone.checkGroupCall(call: .inputGroupCall(id: callId, accessHash: accessHash), source: ssrc))
|> `catch` { _ -> Signal<Api.Bool, NoError> in
return .single(.boolFalse)
}
|> map { result -> CheckGroupCallResult in
switch result {
case .boolTrue:
return .success
case .boolFalse:
return .restart
}
}
}
public enum InviteToGroupCallError {
case generic
}
public func inviteToGroupCall(account: Account, callId: Int64, accessHash: Int64, peerId: PeerId) -> Signal<Never, InviteToGroupCallError> {
return account.postbox.transaction { transaction -> Peer? in
return transaction.getPeer(peerId)
}
|> castError(InviteToGroupCallError.self)
|> mapToSignal { user -> Signal<Never, InviteToGroupCallError> in
guard let user = user else {
return .fail(.generic)
}
guard let apiUser = apiInputUser(user) else {
return .fail(.generic)
}
return account.network.request(Api.functions.phone.inviteToGroupCall(call: .inputGroupCall(id: callId, accessHash: accessHash), userId: apiUser))
|> mapError { _ -> InviteToGroupCallError in
return .generic
}
|> mapToSignal { result -> Signal<Never, InviteToGroupCallError> in
account.stateManager.addUpdates(result)
return .complete()
}
}
}

View File

@ -188,7 +188,7 @@ func apiMessagePeerIds(_ message: Api.Message) -> [PeerId] {
}
switch action {
case .messageActionChannelCreate, .messageActionChatDeletePhoto, .messageActionChatEditPhoto, .messageActionChatEditTitle, .messageActionEmpty, .messageActionPinMessage, .messageActionHistoryClear, .messageActionGameScore, .messageActionPaymentSent, .messageActionPaymentSentMe, .messageActionPhoneCall, .messageActionScreenshotTaken, .messageActionCustomAction, .messageActionBotAllowed, .messageActionSecureValuesSent, .messageActionSecureValuesSentMe, .messageActionContactSignUp, .messageActionGroupCall:
case .messageActionChannelCreate, .messageActionChatDeletePhoto, .messageActionChatEditPhoto, .messageActionChatEditTitle, .messageActionEmpty, .messageActionPinMessage, .messageActionHistoryClear, .messageActionGameScore, .messageActionPaymentSent, .messageActionPaymentSentMe, .messageActionPhoneCall, .messageActionScreenshotTaken, .messageActionCustomAction, .messageActionBotAllowed, .messageActionSecureValuesSent, .messageActionSecureValuesSentMe, .messageActionContactSignUp, .messageActionGroupCall, .messageActionInviteToGroupCall:
break
case let .messageActionChannelMigrateFrom(_, chatId):
result.append(PeerId(namespace: Namespaces.Peer.CloudGroup, id: chatId))

View File

@ -64,6 +64,11 @@ func telegramMediaActionFromApiAction(_ action: Api.MessageAction) -> TelegramMe
case let .inputGroupCall(id, accessHash):
return TelegramMediaAction(action: .groupPhoneCall(callId: id, accessHash: accessHash, duration: duration))
}
case let .messageActionInviteToGroupCall(call, userId):
switch call {
case let .inputGroupCall(id, accessHash):
return TelegramMediaAction(action: .inviteToGroupPhoneCall(callId: id, accessHash: accessHash, peerId: PeerId(namespace: Namespaces.Peer.CloudUser, id: userId)))
}
}
}

View File

@ -460,6 +460,14 @@ public func universalServiceMessageString(presentationData: (PresentationTheme,
} else {
attributedString = addAttributesToStringWithRanges(strings.Notification_ProximityReached(message.peers[fromId]?.displayTitle(strings: strings, displayOrder: nameDisplayOrder) ?? "", distanceString, message.peers[toId]?.displayTitle(strings: strings, displayOrder: nameDisplayOrder) ?? ""), body: bodyAttributes, argumentAttributes: peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: [(0, fromId), (2, toId)]))
}
case let .inviteToGroupPhoneCall(_, _, userId):
if message.author?.id == accountPeerId {
attributedString = addAttributesToStringWithRanges(strings.Notification_VoiceChatInvitationByYou( message.peers[userId]?.displayTitle(strings: strings, displayOrder: nameDisplayOrder) ?? ""), body: bodyAttributes, argumentAttributes: peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: [(0, userId)]))
} else if userId == accountPeerId {
attributedString = addAttributesToStringWithRanges(strings.Notification_VoiceChatInvitationForYou(authorName), body: bodyAttributes, argumentAttributes: peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: [(0, message.author?.id)]))
} else {
attributedString = addAttributesToStringWithRanges(strings.Notification_VoiceChatInvitation(authorName, message.peers[userId]?.displayTitle(strings: strings, displayOrder: nameDisplayOrder) ?? ""), body: bodyAttributes, argumentAttributes: peerMentionsAttributes(primaryTextColor: primaryTextColor, peerIds: [(0, message.author?.id), (1, userId)]))
}
case .unknown:
attributedString = nil
}

View File

@ -210,6 +210,7 @@ swift_library(
"//submodules/AnimatedAvatarSetNode:AnimatedAvatarSetNode",
"//submodules/SlotMachineAnimationNode:SlotMachineAnimationNode",
"//submodules/AnimatedNavigationStripeNode:AnimatedNavigationStripeNode",
"//submodules/AudioBlob:AudioBlob",
],
visibility = [
"//visibility:public",

View File

@ -0,0 +1,9 @@
{
"info" : {
"author" : "xcode",
"version" : 1
},
"properties" : {
"provides-namespace" : true
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_invited.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -1,7 +1,7 @@
{
"images" : [
{
"filename" : "Voice.pdf",
"filename" : "ic_mute.pdf",
"idiom" : "universal"
}
],

View File

@ -1,7 +1,7 @@
{
"images" : [
{
"filename" : "Voice.pdf",
"filename" : "ic_unmute.pdf",
"idiom" : "universal"
}
],

View File

@ -157,6 +157,13 @@ final class ChatMediaInputMetaSectionItemNode: ListViewItemNode {
case .trendingGifs:
self.imageNode.image = PresentationResourcesChat.chatInputMediaPanelTrendingGifsIcon(theme)
case let .gifEmoji(emoji):
var emoji = emoji
if emoji == "🥳" {
if #available(iOSApplicationExtension 12.1, iOS 12.1, *) {
} else {
emoji = "🎉"
}
}
self.imageNode.image = nil
self.textNode.attributedText = NSAttributedString(string: emoji, font: Font.regular(27.0), textColor: .black)
let textSize = self.textNode.updateLayout(CGSize(width: 100.0, height: 100.0))

View File

@ -772,7 +772,7 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
}
}
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: .minimal, reactionCount: dateReactionCount)
let dateText = stringForMessageTimestampStatus(accountPeerId: item.context.account.peerId, message: item.message, dateTimeFormat: item.presentationData.dateTimeFormat, nameDisplayOrder: item.presentationData.nameDisplayOrder, strings: item.presentationData.strings, format: .regular, reactionCount: dateReactionCount)
var isReplyThread = false
if case .replyThread = item.chatLocation {

View File

@ -16,6 +16,7 @@ import SemanticStatusNode
import FileMediaResourceStatus
import CheckNode
import MusicAlbumArtResources
import AudioBlob
private struct FetchControls {
let fetch: () -> Void

View File

@ -431,7 +431,7 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
}
}
if item.message.id.peerId != item.context.account.peerId && !item.message.id.peerId.isReplies{
if item.message.id.peerId != item.context.account.peerId && !item.message.id.peerId.isReplies {
for attribute in item.message.attributes {
if let attribute = attribute as? SourceReferenceMessageAttribute {
if let sourcePeer = item.message.peers[attribute.messageId.peerId] {
@ -457,7 +457,7 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
var updatedShareButtonNode: ChatMessageShareButton?
if needShareButton {
if currentShareButtonNode != nil {
if let currentShareButtonNode = currentShareButtonNode {
updatedShareButtonNode = currentShareButtonNode
} else {
let buttonNode = ChatMessageShareButton()

View File

@ -9,6 +9,7 @@ import TelegramPresentationData
import LegacyComponents
import AccountContext
import ChatInterfaceState
import AudioBlob
private let offsetThreshold: CGFloat = 10.0
private let dismissOffsetThreshold: CGFloat = 70.0

View File

@ -88,9 +88,6 @@ private final class PeerInfoScreenSwitchItemNode: PeerInfoScreenItemNode {
self.textNode.attributedText = NSAttributedString(string: item.text, font: Font.regular(17.0), textColor: textColorValue)
let textSize = self.textNode.updateLayout(CGSize(width: width - sideInset * 2.0 - 56.0, height: .greatestFiniteMagnitude))
let arrowInset: CGFloat = 18.0
let textFrame = CGRect(origin: CGPoint(x: sideInset, y: 12.0), size: textSize)
let height = textSize.height + 24.0
@ -103,7 +100,7 @@ private final class PeerInfoScreenSwitchItemNode: PeerInfoScreenItemNode {
}
let switchSize = switchView.bounds.size
self.switchNode.frame = CGRect(origin: CGPoint(x: width - switchSize.width - 15.0, y: floor((height - switchSize.height) / 2.0)), size: switchSize)
self.switchNode.frame = CGRect(origin: CGPoint(x: width - switchSize.width - 15.0 - safeInsets.right, y: floor((height - switchSize.height) / 2.0)), size: switchSize)
if switchView.isOn != item.value {
switchView.setOn(item.value, animated: !firstTime)
}

View File

@ -2010,7 +2010,6 @@ final class PeerInfoHeaderNavigationButtonContainerNode: ASDisplayNode {
transition.updateFrameAdditiveToCenter(node: buttonNode, frame: buttonFrame)
let alphaFactor: CGFloat = spec.isForExpandedView ? expandFraction : (1.0 - expandFraction)
var buttonTransition = transition
if case let .animated(duration, curve) = buttonTransition, alphaFactor == 0.0 {
buttonTransition = .animated(duration: duration * 0.25, curve: curve)
@ -3131,7 +3130,6 @@ final class PeerInfoHeaderNode: ASDisplayNode {
transition.updateSublayerTransformScale(node: self.titleNodeContainer, scale: titleScale)
transition.updateSublayerTransformScale(node: self.subtitleNodeContainer, scale: subtitleScale)
transition.updateSublayerTransformScale(node: self.usernameNodeContainer, scale: subtitleScale)
} else {
let titleScale: CGFloat
let subtitleScale: CGFloat

View File

@ -5266,7 +5266,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
bottomInset = max(bottomInset, selectionPanelNode.bounds.height)
}
let navigationBarHeight: CGFloat = layout.isModalOverlay ? 56.0 : 44.0
let navigationBarHeight: CGFloat = !self.isSettings && layout.isModalOverlay ? 56.0 : 44.0
self.paneContainerNode.update(size: self.paneContainerNode.bounds.size, sideInset: layout.safeInsets.left, bottomInset: bottomInset, visibleHeight: visibleHeight, expansionFraction: effectiveAreaExpansionFraction, presentationData: self.presentationData, data: self.data, transition: transition)
self.headerNode.navigationButtonContainer.frame = CGRect(origin: CGPoint(x: layout.safeInsets.left, y: layout.statusBarHeight ?? 0.0), size: CGSize(width: layout.size.width - layout.safeInsets.left * 2.0, height: navigationBarHeight))
self.headerNode.navigationButtonContainer.isWhite = self.headerNode.isAvatarExpanded

View File

@ -44,17 +44,19 @@ public final class OngoingGroupCallContext {
var mainStreamAudioSsrc: UInt32?
var otherSsrcs: [UInt32] = []
let joinPayload = Promise<String>()
let joinPayload = Promise<(String, UInt32)>()
let networkState = ValuePromise<NetworkState>(.connecting, ignoreRepeated: true)
let isMuted = ValuePromise<Bool>(true, ignoreRepeated: true)
let memberStates = ValuePromise<[UInt32: MemberState]>([:], ignoreRepeated: true)
let audioLevels = ValuePipe<[(UInt32, Float)]>()
let myAudioLevel = ValuePipe<Float>()
init(queue: Queue) {
self.queue = queue
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
var myAudioLevelUpdatedImpl: ((Float) -> Void)?
self.context = GroupCallThreadLocalContext(
queue: ContextQueueImpl(queue: queue),
@ -63,6 +65,9 @@ public final class OngoingGroupCallContext {
},
audioLevelsUpdated: { levels in
audioLevelsUpdatedImpl?(levels)
},
myAudioLevelUpdated: { level in
myAudioLevelUpdatedImpl?(level)
}
)
@ -99,13 +104,20 @@ public final class OngoingGroupCallContext {
}
}
let myAudioLevel = self.myAudioLevel
myAudioLevelUpdatedImpl = { level in
queue.async {
myAudioLevel.putNext(level)
}
}
self.context.emitJoinPayload({ [weak self] payload, ssrc in
queue.async {
guard let strongSelf = self else {
return
}
strongSelf.mainStreamAudioSsrc = ssrc
strongSelf.joinPayload.set(.single(payload))
strongSelf.joinPayload.set(.single((payload, ssrc)))
}
})
}
@ -170,6 +182,10 @@ public final class OngoingGroupCallContext {
}
}
func stop() {
self.context.stop()
}
func setIsMuted(_ isMuted: Bool) {
self.isMuted.set(isMuted)
self.context.setIsMuted(isMuted)
@ -179,7 +195,7 @@ public final class OngoingGroupCallContext {
private let queue = Queue()
private let impl: QueueLocalObject<Impl>
public var joinPayload: Signal<String, NoError> {
public var joinPayload: Signal<(String, UInt32), NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.impl.with { impl in
@ -227,6 +243,18 @@ public final class OngoingGroupCallContext {
}
}
public var myAudioLevel: Signal<Float, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.impl.with { impl in
disposable.set(impl.myAudioLevel.signal().start(next: { value in
subscriber.putNext(value)
}))
}
return disposable
}
}
public var isMuted: Signal<Bool, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
@ -269,4 +297,10 @@ public final class OngoingGroupCallContext {
impl.removeSsrcs(ssrcs: ssrcs)
}
}
public func stop() {
self.impl.with { impl in
impl.stop()
}
}
}

View File

@ -158,7 +158,9 @@ typedef NS_ENUM(int32_t, GroupCallNetworkState) {
@interface GroupCallThreadLocalContext : NSObject
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated;
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated myAudioLevelUpdated:(void (^ _Nonnull)(float))myAudioLevelUpdated;
- (void)stop;
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
- (void)setJoinResponsePayload:(NSString * _Nonnull)payload;

View File

@ -808,7 +808,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
@implementation GroupCallThreadLocalContext
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated {
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated myAudioLevelUpdated:(void (^ _Nonnull)(float))myAudioLevelUpdated {
self = [super init];
if (self != nil) {
_queue = queue;
@ -826,18 +826,29 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
networkStateUpdated(isConnected ? GroupCallNetworkStateConnected : GroupCallNetworkStateConnecting);
}];
},
.audioLevelsUpdated = [weakSelf, queue, audioLevelsUpdated](std::vector<std::pair<uint32_t, float>> const &levels) {
.audioLevelsUpdated = [audioLevelsUpdated](std::vector<std::pair<uint32_t, float>> const &levels) {
NSMutableArray *result = [[NSMutableArray alloc] init];
for (auto &it : levels) {
[result addObject:@(it.first)];
[result addObject:@(it.second)];
}
audioLevelsUpdated(result);
},
.myAudioLevelUpdated = [myAudioLevelUpdated](float level) {
myAudioLevelUpdated(level);
}
}));
}
return self;
}
- (void)stop {
if (_instance) {
_instance->stop();
_instance.reset();
}
}
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion {
if (_instance) {
_instance->emitJoinPayload([completion](tgcalls::GroupJoinPayload payload) {

@ -1 +1 @@
Subproject commit f679bba27327cfe0327a89816d4ba56355b9c9c7
Subproject commit c0a1340e81d4c4ee27307164afc2f43b4183851a

View File

@ -23,6 +23,8 @@ swift_library(
"//submodules/StickerResources:StickerResources",
"//submodules/TelegramAnimatedStickerNode:TelegramAnimatedStickerNode",
"//submodules/SlotMachineAnimationNode:SlotMachineAnimationNode",
"//submodules/AvatarNode:AvatarNode",
"//submodules/AccountContext:AccountContext",
],
visibility = [
"//visibility:public",

View File

@ -5,6 +5,7 @@ import TelegramPresentationData
import SyncCore
import Postbox
import TelegramCore
import AccountContext
public enum UndoOverlayContent {
case removedChat(text: String)
@ -22,6 +23,7 @@ public enum UndoOverlayContent {
case chatRemovedFromFolder(chatTitle: String, folderTitle: String)
case messagesUnpinned(title: String, text: String, undo: Bool, isHidden: Bool)
case setProximityAlert(title: String, text: String, cancelled: Bool)
case invitedToVoiceChat(context: AccountContext, peer: Peer, text: String)
}
public enum UndoOverlayAction {

View File

@ -3,6 +3,9 @@ import UIKit
import AsyncDisplayKit
import Display
import SwiftSignalKit
import Postbox
import SyncCore
import TelegramCore
import TelegramPresentationData
import TextFormat
import Markdown
@ -12,15 +15,14 @@ import AnimatedStickerNode
import TelegramAnimatedStickerNode
import SlotMachineAnimationNode
import AnimationUI
import SyncCore
import Postbox
import TelegramCore
import StickerResources
import AvatarNode
final class UndoOverlayControllerNode: ViewControllerTracingNode {
private let elevatedLayout: Bool
private var statusNode: RadialStatusNode?
private let timerTextNode: ImmediateTextNode
private let avatarNode: AvatarNode?
private let iconNode: ASImageNode?
private let iconCheckNode: RadialStatusNode?
private let animationNode: AnimationNode?
@ -85,6 +87,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
switch content {
case let .removedChat(text):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = nil
@ -94,6 +97,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
self.originalRemainingSeconds = 5
self.statusNode = RadialStatusNode(backgroundNodeColor: .clear)
case let .archivedChat(_, title, text, undo):
self.avatarNode = nil
if undo {
self.iconNode = ASImageNode()
self.iconNode?.displayWithoutProcessing = true
@ -113,6 +117,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
displayUndo = undo
self.originalRemainingSeconds = 5
case let .hidArchive(title, text, undo):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = AnimationNode(animation: "anim_archiveswipe", colors: ["info1.info1.stroke": self.animationBackgroundColor, "info2.info2.Fill": self.animationBackgroundColor], scale: 1.0)
@ -122,6 +127,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
displayUndo = undo
self.originalRemainingSeconds = 3
case let .revealedArchive(title, text, undo):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = AnimationNode(animation: "anim_infotip", colors: ["info1.info1.stroke": self.animationBackgroundColor, "info2.info2.Fill": self.animationBackgroundColor], scale: 1.0)
@ -131,6 +137,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
displayUndo = undo
self.originalRemainingSeconds = 3
case let .succeed(text):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = AnimationNode(animation: "anim_success", colors: ["info1.info1.stroke": self.animationBackgroundColor, "info2.info2.Fill": self.animationBackgroundColor], scale: 1.0)
@ -144,6 +151,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
displayUndo = false
self.originalRemainingSeconds = 3
case let .info(text):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = AnimationNode(animation: "anim_infotip", colors: ["info1.info1.stroke": self.animationBackgroundColor, "info2.info2.Fill": self.animationBackgroundColor], scale: 1.0)
@ -157,6 +165,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
displayUndo = false
self.originalRemainingSeconds = max(5, min(8, text.count / 14))
case let .actionSucceeded(title, text, cancel):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = AnimationNode(animation: "anim_success", colors: ["info1.info1.stroke": self.animationBackgroundColor, "info2.info2.Fill": self.animationBackgroundColor], scale: 1.0)
@ -174,6 +183,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
undoText = cancel
self.originalRemainingSeconds = 5
case let .chatAddedToFolder(chatTitle, folderTitle):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = AnimationNode(animation: "anim_success", colors: ["info1.info1.stroke": self.animationBackgroundColor, "info2.info2.Fill": self.animationBackgroundColor], scale: 1.0)
@ -190,6 +200,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
displayUndo = false
self.originalRemainingSeconds = 5
case let .chatRemovedFromFolder(chatTitle, folderTitle):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = AnimationNode(animation: "anim_success", colors: ["info1.info1.stroke": self.animationBackgroundColor, "info2.info2.Fill": self.animationBackgroundColor], scale: 1.0)
@ -206,6 +217,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
displayUndo = false
self.originalRemainingSeconds = 5
case let .messagesUnpinned(title, text, undo, isHidden):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = AnimationNode(animation: isHidden ? "anim_message_hidepin" : "anim_message_unpin", colors: ["info1.info1.stroke": self.animationBackgroundColor, "info2.info2.Fill": self.animationBackgroundColor], scale: 1.0)
@ -223,6 +235,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
displayUndo = undo
self.originalRemainingSeconds = 5
case let .emoji(path, text):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = nil
@ -238,6 +251,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
displayUndo = false
self.originalRemainingSeconds = 5
case let .swipeToReply(title, text):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = AnimationNode(animation: "anim_swipereply", colors: [:], scale: 1.0)
@ -248,6 +262,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
displayUndo = false
self.originalRemainingSeconds = 5
case let .stickersModified(title, text, undo, info, topItem, account):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = nil
@ -335,6 +350,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
}
}
case let .dice(dice, account, text, action):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = nil
@ -389,6 +405,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
})
}
case let .setProximityAlert(title, text, cancelled):
self.avatarNode = nil
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = AnimationNode(animation: cancelled ? "anim_proximity_cancelled" : "anim_proximity_set", colors: [:], scale: 0.45)
@ -403,6 +420,23 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
self.textNode.attributedText = attributedText
}
displayUndo = false
self.originalRemainingSeconds = 3
case let .invitedToVoiceChat(context, peer, text):
self.avatarNode = AvatarNode(font: avatarPlaceholderFont(size: 16.0))
self.iconNode = nil
self.iconCheckNode = nil
self.animationNode = nil
self.animatedStickerNode = nil
let body = MarkdownAttributeSet(font: Font.regular(14.0), textColor: .white)
let bold = MarkdownAttributeSet(font: Font.semibold(14.0), textColor: .white)
let link = MarkdownAttributeSet(font: Font.regular(14.0), textColor: undoTextColor)
let attributedText = parseMarkdownIntoAttributedString(text, attributes: MarkdownAttributes(body: body, bold: bold, link: link, linkAttribute: { _ in return nil }), textAlignment: .natural)
self.textNode.attributedText = attributedText
self.avatarNode?.setPeer(context: context, theme: presentationData.theme, peer: peer, overrideImage: nil, emptyColor: presentationData.theme.list.mediaPlaceholderColor, synchronousLoad: true)
displayUndo = false
self.originalRemainingSeconds = 3
}
@ -433,7 +467,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
switch content {
case .removedChat:
self.panelWrapperNode.addSubnode(self.timerTextNode)
case .archivedChat, .hidArchive, .revealedArchive, .succeed, .emoji, .swipeToReply, .actionSucceeded, .stickersModified, .chatAddedToFolder, .chatRemovedFromFolder, .messagesUnpinned, .setProximityAlert:
case .archivedChat, .hidArchive, .revealedArchive, .succeed, .emoji, .swipeToReply, .actionSucceeded, .stickersModified, .chatAddedToFolder, .chatRemovedFromFolder, .messagesUnpinned, .setProximityAlert, .invitedToVoiceChat:
break
case .dice:
self.panelWrapperNode.clipsToBounds = true
@ -447,6 +481,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
self.stillStickerNode.flatMap(self.panelWrapperNode.addSubnode)
self.animatedStickerNode.flatMap(self.panelWrapperNode.addSubnode)
self.slotMachineNode.flatMap(self.panelWrapperNode.addSubnode)
self.avatarNode.flatMap(self.panelWrapperNode.addSubnode)
self.panelWrapperNode.addSubnode(self.titleNode)
self.panelWrapperNode.addSubnode(self.textNode)
self.panelWrapperNode.addSubnode(self.buttonNode)
@ -605,9 +640,7 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
}
let textContentOrigin = floor((contentHeight - textContentHeight) / 2.0)
transition.updateFrame(node: self.titleNode, frame: CGRect(origin: CGPoint(x: leftInset, y: textContentOrigin), size: titleSize))
transition.updateFrame(node: self.textNode, frame: CGRect(origin: CGPoint(x: leftInset, y: textContentOrigin + textOffset), size: textSize))
if let iconNode = self.iconNode, let iconSize = iconNode.image?.size {
@ -660,16 +693,22 @@ final class UndoOverlayControllerNode: ViewControllerTracingNode {
let iconFrame = CGRect(origin: CGPoint(x: floor((leftInset - iconSize.width) / 2.0), y: floor((contentHeight - iconSize.height) / 2.0)), size: iconSize)
transition.updateFrame(node: slotMachineNode, frame: iconFrame)
}
let timerTextSize = self.timerTextNode.updateLayout(CGSize(width: 100.0, height: 100.0))
transition.updateFrame(node: self.timerTextNode, frame: CGRect(origin: CGPoint(x: floor((leftInset - timerTextSize.width) / 2.0), y: floor((contentHeight - timerTextSize.height) / 2.0)), size: timerTextSize))
let statusSize: CGFloat = 30.0
if let statusNode = self.statusNode {
let statusSize: CGFloat = 30.0
transition.updateFrame(node: statusNode, frame: CGRect(origin: CGPoint(x: floor((leftInset - statusSize) / 2.0), y: floor((contentHeight - statusSize) / 2.0)), size: CGSize(width: statusSize, height: statusSize)))
if firstLayout {
statusNode.transitionToState(.secretTimeout(color: .white, icon: nil, beginTime: CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970, timeout: Double(self.remainingSeconds), sparks: false), completion: {})
}
}
if let avatarNode = self.avatarNode {
let avatarSize: CGFloat = 30.0
transition.updateFrame(node: avatarNode, frame: CGRect(origin: CGPoint(x: floor((leftInset - avatarSize) / 2.0), y: floor((contentHeight - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize)))
}
}
func animateIn(asReplacement: Bool) {