From b1d1d52fd3550e4cadaafaabd11410be8475b5a1 Mon Sep 17 00:00:00 2001 From: Ilya Laktyushin Date: Sat, 3 Apr 2021 01:17:45 +0300 Subject: [PATCH] Video Chats Improvements --- .../Sources/PresentationCallManager.swift | 1 + .../Sources/ShareControllerNode.swift | 7 + submodules/TelegramApi/Sources/Api0.swift | 5 +- submodules/TelegramApi/Sources/Api2.swift | 78 ++- submodules/TelegramApi/Sources/Api4.swift | 38 +- .../Sources/PresentationGroupCall.swift | 7 +- .../Sources/VoiceChatActionButton.swift | 32 +- .../Sources/VoiceChatController.swift | 507 ++++++++++++++---- .../Sources/VoiceChatParticipantItem.swift | 337 ++++++++---- .../ApiUtils/TelegramMediaAction.swift | 4 +- .../TelegramCore/Sources/BotPaymentForm.swift | 8 +- .../TelegramCore/Sources/GroupCalls.swift | 7 +- .../State/AccountStateManagementUtils.swift | 2 +- .../Sources/StoreMessage_Telegram.swift | 2 +- .../Call/Speaking.imageset/Contents.json | 12 + .../Call/Speaking.imageset/ic_vc_volume.pdf | Bin 0 -> 4977 bytes .../Call/Video.imageset/Contents.json | 12 + .../Call/Video.imageset/ic_vc_camera.pdf | Bin 0 -> 4792 bytes 18 files changed, 809 insertions(+), 250 deletions(-) create mode 100644 submodules/TelegramUI/Images.xcassets/Call/Speaking.imageset/Contents.json create mode 100644 submodules/TelegramUI/Images.xcassets/Call/Speaking.imageset/ic_vc_volume.pdf create mode 100644 submodules/TelegramUI/Images.xcassets/Call/Video.imageset/Contents.json create mode 100644 submodules/TelegramUI/Images.xcassets/Call/Video.imageset/ic_vc_camera.pdf diff --git a/submodules/AccountContext/Sources/PresentationCallManager.swift b/submodules/AccountContext/Sources/PresentationCallManager.swift index f27b559e4f..1d2f24abee 100644 --- a/submodules/AccountContext/Sources/PresentationCallManager.swift +++ b/submodules/AccountContext/Sources/PresentationCallManager.swift @@ -324,6 +324,7 @@ public protocol PresentationGroupCall: class { func lowerHand() func requestVideo() func disableVideo() + func switchVideoCamera() func updateDefaultParticipantsAreMuted(isMuted: Bool) func setVolume(peerId: PeerId, volume: Int32, sync: Bool) func setFullSizeVideo(peerId: PeerId?) diff --git a/submodules/ShareController/Sources/ShareControllerNode.swift b/submodules/ShareController/Sources/ShareControllerNode.swift index 0afd735957..45d79ed873 100644 --- a/submodules/ShareController/Sources/ShareControllerNode.swift +++ b/submodules/ShareController/Sources/ShareControllerNode.swift @@ -595,6 +595,13 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate self.animateOut(shared: true, completion: { }) self.completed?(peerIds) + + Queue.mainQueue().after(0.1) { + if self.hapticFeedback == nil { + self.hapticFeedback = HapticFeedback() + } + self.hapticFeedback?.success() + } } let fromForeignApp = self.fromForeignApp self.shareDisposable.set((signal diff --git a/submodules/TelegramApi/Sources/Api0.swift b/submodules/TelegramApi/Sources/Api0.swift index 6afbcfc5cc..0f27c3a3f9 100644 --- a/submodules/TelegramApi/Sources/Api0.swift +++ b/submodules/TelegramApi/Sources/Api0.swift @@ -7,7 +7,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = { dict[-1255641564] = { return parseString($0) } dict[-1240849242] = { return Api.messages.StickerSet.parse_stickerSet($0) } dict[2004925620] = { return Api.GroupCall.parse_groupCallDiscarded($0) } - dict[-1061026514] = { return Api.GroupCall.parse_groupCall($0) } + dict[-916691372] = { return Api.GroupCall.parse_groupCall($0) } dict[-457104426] = { return Api.InputGeoPoint.parse_inputGeoPointEmpty($0) } dict[1210199983] = { return Api.InputGeoPoint.parse_inputGeoPoint($0) } dict[-784000893] = { return Api.payments.ValidatedRequestedInfo.parse_validatedRequestedInfo($0) } @@ -587,7 +587,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = { dict[978610270] = { return Api.messages.Messages.parse_messagesSlice($0) } dict[1682413576] = { return Api.messages.Messages.parse_channelMessages($0) } dict[1951620897] = { return Api.messages.Messages.parse_messagesNotModified($0) } - dict[615970509] = { return Api.Invoice.parse_invoice($0) } + dict[215516896] = { return Api.Invoice.parse_invoice($0) } dict[1933519201] = { return Api.PeerSettings.parse_peerSettings($0) } dict[1577067778] = { return Api.auth.SentCode.parse_sentCode($0) } dict[480546647] = { return Api.InputChatPhoto.parse_inputChatPhotoEmpty($0) } @@ -825,6 +825,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = { dict[2047704898] = { return Api.MessageAction.parse_messageActionGroupCall($0) } dict[1991897370] = { return Api.MessageAction.parse_messageActionInviteToGroupCall($0) } dict[-1441072131] = { return Api.MessageAction.parse_messageActionSetMessagesTTL($0) } + dict[-1281329567] = { return Api.MessageAction.parse_messageActionGroupCallScheduled($0) } dict[1399245077] = { return Api.PhoneCall.parse_phoneCallEmpty($0) } dict[462375633] = { return Api.PhoneCall.parse_phoneCallWaiting($0) } dict[-2014659757] = { return Api.PhoneCall.parse_phoneCallRequested($0) } diff --git a/submodules/TelegramApi/Sources/Api2.swift b/submodules/TelegramApi/Sources/Api2.swift index 29739c8953..1e7382eb4f 100644 --- a/submodules/TelegramApi/Sources/Api2.swift +++ b/submodules/TelegramApi/Sources/Api2.swift @@ -1,7 +1,7 @@ public extension Api { public enum GroupCall: TypeConstructorDescription { case groupCallDiscarded(id: Int64, accessHash: Int64, duration: Int32) - case groupCall(flags: Int32, id: Int64, accessHash: Int64, participantsCount: Int32, params: Api.DataJSON?, title: String?, streamDcId: Int32?, recordStartDate: Int32?, version: Int32) + case groupCall(flags: Int32, id: Int64, accessHash: Int64, participantsCount: Int32, params: Api.DataJSON?, title: String?, streamDcId: Int32?, recordStartDate: Int32?, scheduleDate: Int32?, version: Int32) public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) { switch self { @@ -13,9 +13,9 @@ public extension Api { serializeInt64(accessHash, buffer: buffer, boxed: false) serializeInt32(duration, buffer: buffer, boxed: false) break - case .groupCall(let flags, let id, let accessHash, let participantsCount, let params, let title, let streamDcId, let recordStartDate, let version): + case .groupCall(let flags, let id, let accessHash, let participantsCount, let params, let title, let streamDcId, let recordStartDate, let scheduleDate, let version): if boxed { - buffer.appendInt32(-1061026514) + buffer.appendInt32(-916691372) } serializeInt32(flags, buffer: buffer, boxed: false) serializeInt64(id, buffer: buffer, boxed: false) @@ -25,6 +25,7 @@ public extension Api { if Int(flags) & Int(1 << 3) != 0 {serializeString(title!, buffer: buffer, boxed: false)} if Int(flags) & Int(1 << 4) != 0 {serializeInt32(streamDcId!, buffer: buffer, boxed: false)} if Int(flags) & Int(1 << 5) != 0 {serializeInt32(recordStartDate!, buffer: buffer, boxed: false)} + if Int(flags) & Int(1 << 7) != 0 {serializeInt32(scheduleDate!, buffer: buffer, boxed: false)} serializeInt32(version, buffer: buffer, boxed: false) break } @@ -34,8 +35,8 @@ public extension Api { switch self { case .groupCallDiscarded(let id, let accessHash, let duration): return ("groupCallDiscarded", [("id", id), ("accessHash", accessHash), ("duration", duration)]) - case .groupCall(let flags, let id, let accessHash, let participantsCount, let params, let title, let streamDcId, let recordStartDate, let version): - return ("groupCall", [("flags", flags), ("id", id), ("accessHash", accessHash), ("participantsCount", participantsCount), ("params", params), ("title", title), ("streamDcId", streamDcId), ("recordStartDate", recordStartDate), ("version", version)]) + case .groupCall(let flags, let id, let accessHash, let participantsCount, let params, let title, let streamDcId, let recordStartDate, let scheduleDate, let version): + return ("groupCall", [("flags", flags), ("id", id), ("accessHash", accessHash), ("participantsCount", participantsCount), ("params", params), ("title", title), ("streamDcId", streamDcId), ("recordStartDate", recordStartDate), ("scheduleDate", scheduleDate), ("version", version)]) } } @@ -76,7 +77,9 @@ public extension Api { var _8: Int32? if Int(_1!) & Int(1 << 5) != 0 {_8 = reader.readInt32() } var _9: Int32? - _9 = reader.readInt32() + if Int(_1!) & Int(1 << 7) != 0 {_9 = reader.readInt32() } + var _10: Int32? + _10 = reader.readInt32() let _c1 = _1 != nil let _c2 = _2 != nil let _c3 = _3 != nil @@ -85,9 +88,10 @@ public extension Api { let _c6 = (Int(_1!) & Int(1 << 3) == 0) || _6 != nil let _c7 = (Int(_1!) & Int(1 << 4) == 0) || _7 != nil let _c8 = (Int(_1!) & Int(1 << 5) == 0) || _8 != nil - let _c9 = _9 != nil - if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 && _c8 && _c9 { - return Api.GroupCall.groupCall(flags: _1!, id: _2!, accessHash: _3!, participantsCount: _4!, params: _5, title: _6, streamDcId: _7, recordStartDate: _8, version: _9!) + let _c9 = (Int(_1!) & Int(1 << 7) == 0) || _9 != nil + let _c10 = _10 != nil + if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 && _c8 && _c9 && _c10 { + return Api.GroupCall.groupCall(flags: _1!, id: _2!, accessHash: _3!, participantsCount: _4!, params: _5, title: _6, streamDcId: _7, recordStartDate: _8, scheduleDate: _9, version: _10!) } else { return nil @@ -15026,13 +15030,13 @@ public extension Api { } public enum Invoice: TypeConstructorDescription { - case invoice(flags: Int32, currency: String, prices: [Api.LabeledPrice], minTipAmount: Int64?, maxTipAmount: Int64?, defaultTipAmount: Int64?) + case invoice(flags: Int32, currency: String, prices: [Api.LabeledPrice], maxTipAmount: Int64?, suggestedTipAmounts: [Int64]?) public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) { switch self { - case .invoice(let flags, let currency, let prices, let minTipAmount, let maxTipAmount, let defaultTipAmount): + case .invoice(let flags, let currency, let prices, let maxTipAmount, let suggestedTipAmounts): if boxed { - buffer.appendInt32(615970509) + buffer.appendInt32(215516896) } serializeInt32(flags, buffer: buffer, boxed: false) serializeString(currency, buffer: buffer, boxed: false) @@ -15041,17 +15045,20 @@ public extension Api { for item in prices { item.serialize(buffer, true) } - if Int(flags) & Int(1 << 8) != 0 {serializeInt64(minTipAmount!, buffer: buffer, boxed: false)} if Int(flags) & Int(1 << 8) != 0 {serializeInt64(maxTipAmount!, buffer: buffer, boxed: false)} - if Int(flags) & Int(1 << 8) != 0 {serializeInt64(defaultTipAmount!, buffer: buffer, boxed: false)} + if Int(flags) & Int(1 << 8) != 0 {buffer.appendInt32(481674261) + buffer.appendInt32(Int32(suggestedTipAmounts!.count)) + for item in suggestedTipAmounts! { + serializeInt64(item, buffer: buffer, boxed: false) + }} break } } public func descriptionFields() -> (String, [(String, Any)]) { switch self { - case .invoice(let flags, let currency, let prices, let minTipAmount, let maxTipAmount, let defaultTipAmount): - return ("invoice", [("flags", flags), ("currency", currency), ("prices", prices), ("minTipAmount", minTipAmount), ("maxTipAmount", maxTipAmount), ("defaultTipAmount", defaultTipAmount)]) + case .invoice(let flags, let currency, let prices, let maxTipAmount, let suggestedTipAmounts): + return ("invoice", [("flags", flags), ("currency", currency), ("prices", prices), ("maxTipAmount", maxTipAmount), ("suggestedTipAmounts", suggestedTipAmounts)]) } } @@ -15066,18 +15073,17 @@ public extension Api { } var _4: Int64? if Int(_1!) & Int(1 << 8) != 0 {_4 = reader.readInt64() } - var _5: Int64? - if Int(_1!) & Int(1 << 8) != 0 {_5 = reader.readInt64() } - var _6: Int64? - if Int(_1!) & Int(1 << 8) != 0 {_6 = reader.readInt64() } + var _5: [Int64]? + if Int(_1!) & Int(1 << 8) != 0 {if let _ = reader.readInt32() { + _5 = Api.parseVector(reader, elementSignature: 570911930, elementType: Int64.self) + } } let _c1 = _1 != nil let _c2 = _2 != nil let _c3 = _3 != nil let _c4 = (Int(_1!) & Int(1 << 8) == 0) || _4 != nil let _c5 = (Int(_1!) & Int(1 << 8) == 0) || _5 != nil - let _c6 = (Int(_1!) & Int(1 << 8) == 0) || _6 != nil - if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 { - return Api.Invoice.invoice(flags: _1!, currency: _2!, prices: _3!, minTipAmount: _4, maxTipAmount: _5, defaultTipAmount: _6) + if _c1 && _c2 && _c3 && _c4 && _c5 { + return Api.Invoice.invoice(flags: _1!, currency: _2!, prices: _3!, maxTipAmount: _4, suggestedTipAmounts: _5) } else { return nil @@ -20433,6 +20439,7 @@ public extension Api { case messageActionGroupCall(flags: Int32, call: Api.InputGroupCall, duration: Int32?) case messageActionInviteToGroupCall(call: Api.InputGroupCall, users: [Int32]) case messageActionSetMessagesTTL(period: Int32) + case messageActionGroupCallScheduled(call: Api.InputGroupCall, scheduleDate: Int32) public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) { switch self { @@ -20637,6 +20644,13 @@ public extension Api { } serializeInt32(period, buffer: buffer, boxed: false) break + case .messageActionGroupCallScheduled(let call, let scheduleDate): + if boxed { + buffer.appendInt32(-1281329567) + } + call.serialize(buffer, true) + serializeInt32(scheduleDate, buffer: buffer, boxed: false) + break } } @@ -20696,6 +20710,8 @@ public extension Api { return ("messageActionInviteToGroupCall", [("call", call), ("users", users)]) case .messageActionSetMessagesTTL(let period): return ("messageActionSetMessagesTTL", [("period", period)]) + case .messageActionGroupCallScheduled(let call, let scheduleDate): + return ("messageActionGroupCallScheduled", [("call", call), ("scheduleDate", scheduleDate)]) } } @@ -21033,6 +21049,22 @@ public extension Api { return nil } } + public static func parse_messageActionGroupCallScheduled(_ reader: BufferReader) -> MessageAction? { + var _1: Api.InputGroupCall? + if let signature = reader.readInt32() { + _1 = Api.parse(reader, signature: signature) as? Api.InputGroupCall + } + var _2: Int32? + _2 = reader.readInt32() + let _c1 = _1 != nil + let _c2 = _2 != nil + if _c1 && _c2 { + return Api.MessageAction.messageActionGroupCallScheduled(call: _1!, scheduleDate: _2!) + } + else { + return nil + } + } } public enum PhoneCall: TypeConstructorDescription { diff --git a/submodules/TelegramApi/Sources/Api4.swift b/submodules/TelegramApi/Sources/Api4.swift index 04c9628008..8728232aab 100644 --- a/submodules/TelegramApi/Sources/Api4.swift +++ b/submodules/TelegramApi/Sources/Api4.swift @@ -7697,12 +7697,15 @@ public extension Api { }) } - public static func createGroupCall(peer: Api.InputPeer, randomId: Int32) -> (FunctionDescription, Buffer, DeserializeFunctionResponse) { + public static func createGroupCall(flags: Int32, peer: Api.InputPeer, randomId: Int32, title: String?, scheduleDate: Int32?) -> (FunctionDescription, Buffer, DeserializeFunctionResponse) { let buffer = Buffer() - buffer.appendInt32(-1120031776) + buffer.appendInt32(1221445336) + serializeInt32(flags, buffer: buffer, boxed: false) peer.serialize(buffer, true) serializeInt32(randomId, buffer: buffer, boxed: false) - return (FunctionDescription(name: "phone.createGroupCall", parameters: [("peer", peer), ("randomId", randomId)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in + if Int(flags) & Int(1 << 0) != 0 {serializeString(title!, buffer: buffer, boxed: false)} + if Int(flags) & Int(1 << 1) != 0 {serializeInt32(scheduleDate!, buffer: buffer, boxed: false)} + return (FunctionDescription(name: "phone.createGroupCall", parameters: [("flags", flags), ("peer", peer), ("randomId", randomId), ("title", title), ("scheduleDate", scheduleDate)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in let reader = BufferReader(buffer) var result: Api.Updates? if let signature = reader.readInt32() { @@ -7926,6 +7929,35 @@ public extension Api { return result }) } + + public static func toggleGroupCallStartSubscription(call: Api.InputGroupCall, subscribed: Api.Bool) -> (FunctionDescription, Buffer, DeserializeFunctionResponse) { + let buffer = Buffer() + buffer.appendInt32(563885286) + call.serialize(buffer, true) + subscribed.serialize(buffer, true) + return (FunctionDescription(name: "phone.toggleGroupCallStartSubscription", parameters: [("call", call), ("subscribed", subscribed)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in + let reader = BufferReader(buffer) + var result: Api.Updates? + if let signature = reader.readInt32() { + result = Api.parse(reader, signature: signature) as? Api.Updates + } + return result + }) + } + + public static func startScheduledGroupCall(call: Api.InputGroupCall) -> (FunctionDescription, Buffer, DeserializeFunctionResponse) { + let buffer = Buffer() + buffer.appendInt32(1451287362) + call.serialize(buffer, true) + return (FunctionDescription(name: "phone.startScheduledGroupCall", parameters: [("call", call)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in + let reader = BufferReader(buffer) + var result: Api.Updates? + if let signature = reader.readInt32() { + result = Api.parse(reader, signature: signature) as? Api.Updates + } + return result + }) + } } } } diff --git a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift index 7e259cedc8..c0c561af3b 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationGroupCall.swift @@ -538,7 +538,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { private var toneRenderer: PresentationCallToneRenderer? private var videoCapturer: OngoingCallVideoCapturer? - + private var useFrontCamera: Bool = true private let incomingVideoSourcePromise = Promise<[PeerId: UInt32]>([:]) public var incomingVideoSources: Signal<[PeerId: UInt32], NoError> { return self.incomingVideoSourcePromise.get() @@ -2008,6 +2008,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall { } } + public func switchVideoCamera() { + self.useFrontCamera = !self.useFrontCamera + self.videoCapturer?.switchVideoInput(isFront: self.useFrontCamera) + } + public func setVolume(peerId: PeerId, volume: Int32, sync: Bool) { for (ssrc, id) in self.ssrcMapping { if id == peerId { diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatActionButton.swift b/submodules/TelegramCallsUI/Sources/VoiceChatActionButton.swift index 96e5adc46f..886f8a3807 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatActionButton.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatActionButton.swift @@ -57,9 +57,9 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode { private var currentParams: (size: CGSize, buttonSize: CGSize, state: VoiceChatActionButton.State, dark: Bool, small: Bool, title: String, subtitle: String, snap: Bool)? private var activePromise = ValuePromise(false) - private var outerColorPromise = ValuePromise(nil) - var outerColor: Signal { - return outerColorPromise.get() + private var outerColorPromise = Promise<(UIColor?, UIColor?)>((nil, nil)) + var outerColor: Signal<(UIColor?, UIColor?), NoError> { + return self.outerColorPromise.get() } var connectingColor: UIColor = UIColor(rgb: 0xb6b6bb) { @@ -167,8 +167,8 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode { self?.activePromise.set(active) } - self.backgroundNode.updatedOuterColor = { [weak self] color in - self?.outerColorPromise.set(color) + self.backgroundNode.updatedColors = { [weak self] outerColor, activeColor in + self?.outerColorPromise.set(.single((outerColor, activeColor))) } } @@ -245,16 +245,17 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode { transition.updateAlpha(layer: self.backgroundNode.maskProgressLayer, alpha: 0.0) } else { let transition: ContainedViewLayoutTransition = animated ? .animated(duration: 0.2, curve: .easeInOut) : .immediate - if small { transition.updateTransformScale(node: self.backgroundNode, scale: self.pressing ? smallScale * 0.9 : smallScale, delay: 0.05) transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? smallIconScale * 0.9 : smallIconScale, delay: 0.05) + transition.updateAlpha(node: self.titleLabel, alpha: 0.0) + transition.updateAlpha(node: self.subtitleLabel, alpha: 0.0) } else { transition.updateTransformScale(node: self.backgroundNode, scale: 1.0, delay: 0.05) transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? 0.9 : 1.0, delay: 0.05) + transition.updateAlpha(node: self.titleLabel, alpha: 1.0, delay: 0.05) + transition.updateAlpha(node: self.subtitleLabel, alpha: 1.0, delay: 0.05) } - transition.updateAlpha(node: self.titleLabel, alpha: 1.0, delay: 0.05) - transition.updateAlpha(node: self.subtitleLabel, alpha: 1.0, delay: 0.05) transition.updateAlpha(layer: self.backgroundNode.maskProgressLayer, alpha: 1.0) } @@ -265,7 +266,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode { private var previousIcon: VoiceChatActionButtonIconAnimationState? private func applyIconParams() { - guard let (_, _, state, _, _, _, _, snap) = self.currentParams else { + guard let (_, _, state, _, _, _, _, _) = self.currentParams else { return } @@ -290,7 +291,6 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode { self.previousIcon = icon self.iconNode.enqueueState(icon) -// self.iconNode.update(state: VoiceChatMicrophoneNode.State(muted: iconMuted, filled: true, color: iconColor), animated: true) } func update(snap: Bool, animated: Bool) { @@ -298,7 +298,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode { self.currentParams = (previous.size, previous.buttonSize, previous.state, previous.dark, previous.small, previous.title, previous.subtitle, snap) self.backgroundNode.isSnap = snap - self.backgroundNode.glowHidden = snap + self.backgroundNode.glowHidden = snap || previous.small self.backgroundNode.updateColors() self.applyParams(animated: animated) self.applyIconParams() @@ -328,6 +328,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode { } self.applyIconParams() + self.backgroundNode.glowHidden = (self.currentParams?.snap ?? false) || small self.backgroundNode.isDark = dark self.backgroundNode.update(state: backgroundState, animated: true) @@ -468,7 +469,7 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode { } var updatedActive: ((Bool) -> Void)? - var updatedOuterColor: ((UIColor?) -> Void)? + var updatedColors: ((UIColor?, UIColor?) -> Void)? private let backgroundCircleLayer = CAShapeLayer() private let foregroundCircleLayer = CAShapeLayer() @@ -706,6 +707,7 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode { let initialColors = self.foregroundGradientLayer.colors let outerColor: UIColor? + let activeColor: UIColor? let targetColors: [CGColor] let targetScale: CGFloat switch type { @@ -713,20 +715,24 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode { targetColors = [activeBlue.cgColor, green.cgColor, green.cgColor] targetScale = 0.89 outerColor = UIColor(rgb: 0x21674f) + activeColor = green case .active: targetColors = [lightBlue.cgColor, blue.cgColor, blue.cgColor] targetScale = 0.85 outerColor = UIColor(rgb: 0x1d588d) + activeColor = blue case .connecting: targetColors = [lightBlue.cgColor, blue.cgColor, blue.cgColor] targetScale = 0.3 outerColor = nil + activeColor = blue case .muted: targetColors = [pink.cgColor, purple.cgColor, purple.cgColor] targetScale = 0.85 outerColor = UIColor(rgb: 0x3b3474) + activeColor = purple } - self.updatedOuterColor?(outerColor) + self.updatedColors?(outerColor, activeColor) self.maskGradientLayer.transform = CATransform3DMakeScale(targetScale, targetScale, 1.0) if let _ = previousType { diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift index 340a86e015..3de1fe6716 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatController.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatController.swift @@ -36,6 +36,8 @@ private let fullscreenBackgroundColor = UIColor(rgb: 0x000000) private let dimColor = UIColor(white: 0.0, alpha: 0.5) private let smallButtonSize = CGSize(width: 36.0, height: 36.0) private let sideButtonSize = CGSize(width: 56.0, height: 56.0) +private let mainVideoHeight: CGFloat = 240.0 +private let topPanelHeight: CGFloat = 63.0 private let bottomAreaHeight: CGFloat = 205.0 private let fullscreenBottomAreaHeight: CGFloat = 80.0 @@ -168,7 +170,7 @@ final class GroupVideoNode: ASDisplayNode { private let videoViewContainer: UIView private let videoView: PresentationCallVideoView - private var validLayout: CGSize? + private var validLayout: (CGSize, Bool)? var tapped: (() -> Void)? @@ -178,6 +180,8 @@ final class GroupVideoNode: ASDisplayNode { super.init() + self.isUserInteractionEnabled = false + self.videoViewContainer.addSubview(self.videoView.view) self.view.addSubview(self.videoViewContainer) @@ -188,8 +192,8 @@ final class GroupVideoNode: ASDisplayNode { guard let strongSelf = self else { return } - if let size = strongSelf.validLayout { - strongSelf.updateLayout(size: size, transition: .immediate) + if let (size, isLandscape) = strongSelf.validLayout { + strongSelf.updateLayout(size: size, isLandscape: isLandscape, transition: .immediate) } } }) @@ -199,8 +203,8 @@ final class GroupVideoNode: ASDisplayNode { guard let strongSelf = self else { return } - if let size = strongSelf.validLayout { - strongSelf.updateLayout(size: size, transition: .immediate) + if let (size, isLandscape) = strongSelf.validLayout { + strongSelf.updateLayout(size: size, isLandscape: isLandscape, transition: .immediate) } } }) @@ -214,9 +218,9 @@ final class GroupVideoNode: ASDisplayNode { } } - func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) { - self.validLayout = size - self.videoViewContainer.frame = CGRect(origin: CGPoint(), size: size) + func updateLayout(size: CGSize, isLandscape: Bool, transition: ContainedViewLayoutTransition) { + self.validLayout = (size, isLandscape) + transition.updateFrame(view: self.videoViewContainer, frame: CGRect(origin: CGPoint(), size: size)) let orientation = self.videoView.getOrientation() var aspect = self.videoView.getAspect() @@ -248,10 +252,10 @@ final class GroupVideoNode: ASDisplayNode { var rotatedVideoSize = CGSize(width: 100.0, height: rotatedAspect * 100.0) - if size.width < 100.0 || true { - rotatedVideoSize = rotatedVideoSize.aspectFilled(size) - } else { + if isLandscape { rotatedVideoSize = rotatedVideoSize.aspectFitted(size) + } else { + rotatedVideoSize = rotatedVideoSize.aspectFilled(size) } if switchOrientation { @@ -262,8 +266,9 @@ final class GroupVideoNode: ASDisplayNode { rotatedVideoFrame.origin.y = floor(rotatedVideoFrame.origin.y) rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width) rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height) - self.videoView.view.center = rotatedVideoFrame.center - self.videoView.view.bounds = CGRect(origin: CGPoint(), size: rotatedVideoFrame.size) + + transition.updatePosition(layer: self.videoView.view.layer, position: rotatedVideoFrame.center) + transition.updateBounds(layer: self.videoView.view.layer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoFrame.size)) let transition: ContainedViewLayoutTransition = .immediate transition.updateTransformRotation(view: self.videoView.view, angle: angle) @@ -276,17 +281,48 @@ private final class MainVideoContainerNode: ASDisplayNode { private var currentVideoNode: GroupVideoNode? private var candidateVideoNode: GroupVideoNode? + private let topCornersNode: ASImageNode + private let bottomCornersNode: ASImageNode + private let bottomEdgeNode: ASDisplayNode private var currentPeer: (PeerId, UInt32)? - private var validLayout: CGSize? + private var validLayout: (CGSize, CGFloat, Bool)? + + var tapped: (() -> Void)? init(context: AccountContext, call: PresentationGroupCall) { self.context = context self.call = call + self.topCornersNode = ASImageNode() + self.topCornersNode.displaysAsynchronously = false + self.topCornersNode.image = cornersImage(top: true, bottom: false, dark: true) + + self.bottomCornersNode = ASImageNode() + self.bottomCornersNode.displaysAsynchronously = false + self.bottomCornersNode.image = cornersImage(top: false, bottom: true, dark: true) + + self.bottomEdgeNode = ASDisplayNode() + self.bottomEdgeNode.backgroundColor = UIColor(rgb: 0x000000) + super.init() - self.backgroundColor = .black + self.clipsToBounds = true + self.backgroundColor = UIColor(rgb: 0x1c1c1e) + + self.addSubnode(self.topCornersNode) + self.addSubnode(self.bottomCornersNode) + self.addSubnode(self.bottomEdgeNode) + } + + override func didLoad() { + super.didLoad() + + self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tap))) + } + + @objc private func tap() { + self.tapped?() } func updatePeer(peer: (peerId: PeerId, source: UInt32)?, waitForFullSize: Bool) { @@ -315,9 +351,9 @@ private final class MainVideoContainerNode: ASDisplayNode { strongSelf.currentVideoNode = nil } strongSelf.currentVideoNode = videoNode - strongSelf.addSubnode(videoNode) - if let size = strongSelf.validLayout { - strongSelf.update(size: size, transition: .immediate) + strongSelf.insertSubnode(videoNode, belowSubnode: strongSelf.topCornersNode) + if let (size, sideInset, isLandscape) = strongSelf.validLayout { + strongSelf.update(size: size, sideInset: sideInset, isLandscape: isLandscape, transition: .immediate) } }) } else { @@ -330,9 +366,9 @@ private final class MainVideoContainerNode: ASDisplayNode { strongSelf.currentVideoNode = nil } strongSelf.currentVideoNode = videoNode - strongSelf.addSubnode(videoNode) - if let size = strongSelf.validLayout { - strongSelf.update(size: size, transition: .immediate) + strongSelf.insertSubnode(videoNode, belowSubnode: strongSelf.topCornersNode) + if let (size, sideInset, isLandscape) = strongSelf.validLayout { + strongSelf.update(size: size, sideInset: sideInset, isLandscape: isLandscape, transition: .immediate) } } } @@ -345,13 +381,17 @@ private final class MainVideoContainerNode: ASDisplayNode { } } - func update(size: CGSize, transition: ContainedViewLayoutTransition) { - self.validLayout = size + func update(size: CGSize, sideInset: CGFloat, isLandscape: Bool, transition: ContainedViewLayoutTransition) { + self.validLayout = (size, sideInset, isLandscape) if let currentVideoNode = self.currentVideoNode { transition.updateFrame(node: currentVideoNode, frame: CGRect(origin: CGPoint(), size: size)) - currentVideoNode.updateLayout(size: size, transition: .immediate) + currentVideoNode.updateLayout(size: size, isLandscape: isLandscape, transition: transition) } + + transition.updateFrame(node: self.topCornersNode, frame: CGRect(x: sideInset, y: 0.0, width: size.width - sideInset * 2.0, height: 50.0)) + transition.updateFrame(node: self.bottomCornersNode, frame: CGRect(x: sideInset, y: size.height - 6.0 - 50.0, width: size.width - sideInset * 2.0, height: 50.0)) + transition.updateFrame(node: self.bottomEdgeNode, frame: CGRect(x: sideInset, y: size.height - 6.0, width: size.width - sideInset * 2.0, height: 6.0)) } } @@ -379,7 +419,7 @@ public final class VoiceChatController: ViewController { let openInvite: () -> Void let peerContextAction: (PeerEntry, ASDisplayNode, ContextGesture?) -> Void let setPeerIdWithRevealedOptions: (PeerId?, PeerId?) -> Void - let getPeerVideo: (UInt32) -> GroupVideoNode? + let getPeerVideo: (UInt32, Bool) -> GroupVideoNode? var isExpanded: Bool = false private var audioLevels: [PeerId: ValuePipe] = [:] @@ -392,7 +432,7 @@ public final class VoiceChatController: ViewController { openInvite: @escaping () -> Void, peerContextAction: @escaping (PeerEntry, ASDisplayNode, ContextGesture?) -> Void, setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void, - getPeerVideo: @escaping (UInt32) -> GroupVideoNode? + getPeerVideo: @escaping (UInt32, Bool) -> GroupVideoNode? ) { self.updateIsMuted = updateIsMuted self.openPeer = openPeer @@ -589,7 +629,7 @@ public final class VoiceChatController: ViewController { } } - func item(context: AccountContext, presentationData: PresentationData, interaction: Interaction) -> ListViewItem { + func item(context: AccountContext, presentationData: PresentationData, interaction: Interaction, style: VoiceChatParticipantItem.LayoutStyle) -> ListViewItem { switch self { case let .invite(_, _, text): return VoiceChatActionItem(presentationData: ItemListPresentationData(presentationData), title: text, icon: .generic(UIImage(bundleImageName: "Chat/Context Menu/AddUser")!), action: { @@ -668,9 +708,9 @@ public final class VoiceChatController: ViewController { let revealOptions: [VoiceChatParticipantItem.RevealOption] = [] - return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, ssrc: peerEntry.ssrc, presence: peerEntry.presence, text: text, expandedText: expandedText, icon: icon, enabled: true, transparent: false, selectable: true, getAudioLevel: { return interaction.getAudioLevel(peer.id) }, getVideo: { + return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, ssrc: peerEntry.ssrc, presence: peerEntry.presence, text: text, expandedText: expandedText, icon: icon, style: style, enabled: true, transparent: false, selectable: true, getAudioLevel: { return interaction.getAudioLevel(peer.id) }, getVideo: { if let ssrc = peerEntry.ssrc { - return interaction.getPeerVideo(ssrc) + return interaction.getPeerVideo(ssrc, style == .tile) } else { return nil } @@ -687,12 +727,12 @@ public final class VoiceChatController: ViewController { } } - private func preparedTransition(from fromEntries: [ListEntry], to toEntries: [ListEntry], isLoading: Bool, isEmpty: Bool, canInvite: Bool, crossFade: Bool, animated: Bool, context: AccountContext, presentationData: PresentationData, interaction: Interaction) -> ListTransition { + private func preparedTransition(from fromEntries: [ListEntry], to toEntries: [ListEntry], isLoading: Bool, isEmpty: Bool, canInvite: Bool, crossFade: Bool, animated: Bool, context: AccountContext, presentationData: PresentationData, interaction: Interaction, style: VoiceChatParticipantItem.LayoutStyle) -> ListTransition { let (deleteIndices, indicesAndItems, updateIndices) = mergeListsStableWithUpdates(leftList: fromEntries, rightList: toEntries) let deletions = deleteIndices.map { ListViewDeleteItem(index: $0, directionHint: nil) } - let insertions = indicesAndItems.map { ListViewInsertItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction), directionHint: nil) } - let updates = updateIndices.map { ListViewUpdateItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction), directionHint: nil) } + let insertions = indicesAndItems.map { ListViewInsertItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction, style: style), directionHint: nil) } + let updates = updateIndices.map { ListViewUpdateItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction, style: style), directionHint: nil) } return ListTransition(deletions: deletions, insertions: insertions, updates: updates, isLoading: isLoading, isEmpty: isEmpty, canInvite: canInvite, crossFade: crossFade, count: toEntries.count, animated: animated) } @@ -710,8 +750,10 @@ public final class VoiceChatController: ViewController { private let dimNode: ASDisplayNode private let contentContainer: ASDisplayNode private let backgroundNode: ASDisplayNode - private var mainVideoContainer: MainVideoContainerNode? + private let mainVideoClippingNode: ASDisplayNode + private var mainVideoContainerNode: MainVideoContainerNode? private let listNode: ListView + private let horizontalListNode: ListView private let topPanelNode: ASDisplayNode private let topPanelEdgeNode: ASDisplayNode private let topPanelBackgroundNode: ASDisplayNode @@ -735,6 +777,8 @@ public final class VoiceChatController: ViewController { private var enqueuedTransitions: [ListTransition] = [] private var floatingHeaderOffset: CGFloat? + private var enqueuedHorizontalTransitions: [ListTransition] = [] + private var validLayout: (ContainerViewLayout, CGFloat)? private var didSetContentsReady: Bool = false private var didSetDataReady: Bool = false @@ -748,7 +792,8 @@ public final class VoiceChatController: ViewController { private var currentSpeakingPeers: Set? private var currentContentOffset: CGFloat? private var ignoreScrolling = false - private var currentAudioButtonColor: UIColor? + private var currentNormalButtonColor: UIColor? + private var currentActiveButtonColor: UIColor? private var currentEntries: [ListEntry] = [] @@ -839,8 +884,11 @@ public final class VoiceChatController: ViewController { self.backgroundNode.backgroundColor = secondaryPanelBackgroundColor self.backgroundNode.clipsToBounds = false + self.mainVideoClippingNode = ASDisplayNode() + self.mainVideoClippingNode.clipsToBounds = true + if sharedContext.immediateExperimentalUISettings.demoVideoChats { - self.mainVideoContainer = MainVideoContainerNode(context: call.accountContext, call: call) + self.mainVideoContainerNode = MainVideoContainerNode(context: call.accountContext, call: call) } self.listNode = ListView() @@ -851,6 +899,14 @@ public final class VoiceChatController: ViewController { return presentationData.strings.VoiceOver_ScrollStatus(row, count).0 } + self.horizontalListNode = ListView() + self.horizontalListNode.transform = CATransform3DMakeRotation(-CGFloat(CGFloat.pi / 2.0), 0.0, 0.0, 1.0) + self.horizontalListNode.clipsToBounds = true + self.horizontalListNode.isHidden = true + self.horizontalListNode.accessibilityPageScrolledString = { row, count in + return presentationData.strings.VoiceOver_ScrollStatus(row, count).0 + } + self.topPanelNode = ASDisplayNode() self.topPanelNode.clipsToBounds = false @@ -954,11 +1010,11 @@ public final class VoiceChatController: ViewController { if strongSelf.currentDominantSpeakerWithVideo?.0 != peerId || strongSelf.currentDominantSpeakerWithVideo?.1 != source { strongSelf.currentDominantSpeakerWithVideo = (peerId, source) strongSelf.call.setFullSizeVideo(peerId: peerId) - strongSelf.mainVideoContainer?.updatePeer(peer: (peerId: peerId, source: source), waitForFullSize: false) + strongSelf.mainVideoContainerNode?.updatePeer(peer: (peerId: peerId, source: source), waitForFullSize: false) } else { strongSelf.currentDominantSpeakerWithVideo = nil strongSelf.call.setFullSizeVideo(peerId: nil) - strongSelf.mainVideoContainer?.updatePeer(peer: nil, waitForFullSize: false) + strongSelf.mainVideoContainerNode?.updatePeer(peer: nil, waitForFullSize: false) } } } @@ -1521,10 +1577,19 @@ public final class VoiceChatController: ViewController { updated.revealedPeerId = peerId return updated } - }, getPeerVideo: { [weak self] ssrc in + }, getPeerVideo: { [weak self] ssrc, tile in guard let strongSelf = self else { return nil } + var skip = false + if case .fullscreen = strongSelf.displayMode { + skip = !tile + } else { + skip = tile + } + if skip { + return nil + } for (_, listSsrc, videoNode) in strongSelf.videoNodes { if listSsrc == ssrc { return videoNode @@ -1544,7 +1609,7 @@ public final class VoiceChatController: ViewController { self.bottomPanelNode.addSubnode(self.bottomCornersNode) self.bottomPanelNode.addSubnode(self.bottomPanelBackgroundNode) self.bottomPanelNode.addSubnode(self.audioButton) - if let _ = self.mainVideoContainer { + if let _ = self.mainVideoContainerNode { self.bottomPanelNode.addSubnode(self.cameraButton) self.bottomPanelNode.addSubnode(self.switchCameraButton) } @@ -1555,15 +1620,18 @@ public final class VoiceChatController: ViewController { self.addSubnode(self.contentContainer) self.contentContainer.addSubnode(self.backgroundNode) - self.contentContainer.addSubnode(self.listNode) - if let mainVideoContainer = self.mainVideoContainer { - self.contentContainer.addSubnode(mainVideoContainer) + if let mainVideoContainer = self.mainVideoContainerNode { + self.contentContainer.addSubnode(self.mainVideoClippingNode) + self.mainVideoClippingNode.addSubnode(mainVideoContainer) } + self.contentContainer.addSubnode(self.listNode) self.contentContainer.addSubnode(self.topPanelNode) self.contentContainer.addSubnode(self.leftBorderNode) self.contentContainer.addSubnode(self.rightBorderNode) self.contentContainer.addSubnode(self.bottomPanelNode) + self.contentContainer.addSubnode(self.horizontalListNode) + let invitedPeers: Signal<[Peer], NoError> = self.call.invitedPeers |> mapToSignal { ids -> Signal<[Peer], NoError> in return context.account.postbox.transaction { transaction -> [Peer] in @@ -1622,7 +1690,6 @@ public final class VoiceChatController: ViewController { if let callState = strongSelf.callState, callState.canManageCall { strongSelf.optionsButtonIsAvatar = false strongSelf.optionsButton.isUserInteractionEnabled = true - strongSelf.optionsButton.alpha = 1.0 } else if displayAsPeers.count > 1 { strongSelf.optionsButtonIsAvatar = true for peer in displayAsPeers { @@ -1631,11 +1698,9 @@ public final class VoiceChatController: ViewController { } } strongSelf.optionsButton.isUserInteractionEnabled = true - strongSelf.optionsButton.alpha = 1.0 } else { strongSelf.optionsButtonIsAvatar = false strongSelf.optionsButton.isUserInteractionEnabled = false - strongSelf.optionsButton.alpha = 0.0 } if let (layout, navigationHeight) = strongSelf.validLayout { @@ -1712,7 +1777,7 @@ public final class VoiceChatController: ViewController { if strongSelf.currentDominantSpeakerWithVideo?.0 != peerId || strongSelf.currentDominantSpeakerWithVideo?.1 != source { strongSelf.currentDominantSpeakerWithVideo = (peerId, source) strongSelf.call.setFullSizeVideo(peerId: peerId) - strongSelf.mainVideoContainer?.updatePeer(peer: (peerId: peerId, source: source), waitForFullSize: true) + strongSelf.mainVideoContainerNode?.updatePeer(peer: (peerId: peerId, source: source), waitForFullSize: true) } } @@ -1745,10 +1810,11 @@ public final class VoiceChatController: ViewController { self.closeButton.addTarget(self, action: #selector(self.closePressed), forControlEvents: .touchUpInside) self.actionButtonColorDisposable = (self.actionButton.outerColor - |> deliverOnMainQueue).start(next: { [weak self] color in + |> deliverOnMainQueue).start(next: { [weak self] normalColor, activeColor in if let strongSelf = self { - let animated = strongSelf.currentAudioButtonColor != nil - strongSelf.currentAudioButtonColor = color + let animated = strongSelf.currentNormalButtonColor != nil + strongSelf.currentNormalButtonColor = normalColor + strongSelf.currentActiveButtonColor = activeColor strongSelf.updateButtons(animated: animated) } }) @@ -1756,7 +1822,7 @@ public final class VoiceChatController: ViewController { self.listNode.updateFloatingHeaderOffset = { [weak self] offset, transition in if let strongSelf = self { strongSelf.currentContentOffset = offset - if strongSelf.animation == nil && !strongSelf.animatingExpansion { + if strongSelf.expandAnimation == nil && !strongSelf.animatingExpansion { strongSelf.updateFloatingHeaderOffset(offset: offset, transition: transition) } } @@ -1820,7 +1886,8 @@ public final class VoiceChatController: ViewController { case let .peer(peerEntry): if peerEntry.ssrc == source { let presentationData = strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme) - strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) + strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, style: .list), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) + strongSelf.horizontalListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, style: .tile), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) break loop } default: @@ -1845,7 +1912,8 @@ public final class VoiceChatController: ViewController { case let .peer(peerEntry): if peerEntry.ssrc == ssrc { let presentationData = strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme) - strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) + strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, style: .list), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) + strongSelf.horizontalListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, style: .tile), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil) break loop } default: @@ -1862,7 +1930,7 @@ public final class VoiceChatController: ViewController { if !validSources.contains(source) { strongSelf.currentDominantSpeakerWithVideo = nil strongSelf.call.setFullSizeVideo(peerId: nil) - strongSelf.mainVideoContainer?.updatePeer(peer: nil, waitForFullSize: false) + strongSelf.mainVideoContainerNode?.updatePeer(peer: nil, waitForFullSize: false) } } @@ -1890,6 +1958,68 @@ public final class VoiceChatController: ViewController { } } } + + self.mainVideoContainerNode?.tapped = { [weak self] in + if let strongSelf = self { + var effectiveDisplayMode = strongSelf.displayMode + var isLandscape = false + if let (layout, _) = strongSelf.validLayout, layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass { + isLandscape = true + if case .fullscreen = effectiveDisplayMode { + } else { + effectiveDisplayMode = .fullscreen(controlsHidden: false) + } + } + + switch effectiveDisplayMode { + case .default: + strongSelf.displayMode = .fullscreen(controlsHidden: false) + case let .fullscreen(controlsHidden): + if true { + strongSelf.displayMode = .fullscreen(controlsHidden: !controlsHidden) + } + else if controlsHidden && !isLandscape { + strongSelf.displayMode = .default + } else { + strongSelf.displayMode = .fullscreen(controlsHidden: true) + } + } + + if case .default = effectiveDisplayMode, case .fullscreen = strongSelf.displayMode { + strongSelf.horizontalListNode.isHidden = false + + var verticalItemNodes: [PeerId: VoiceChatParticipantItemNode] = [:] + strongSelf.listNode.forEachItemNode { itemNode in + if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item { + verticalItemNodes[item.peer.id] = itemNode + } + } + + strongSelf.horizontalListNode.forEachVisibleItemNode { itemNode in + if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item, let otherItemNode = verticalItemNodes[item.peer.id] { + itemNode.transitionIn(from: otherItemNode, containerNode: strongSelf) + } + } + } else if case .fullscreen = effectiveDisplayMode, case .default = strongSelf.displayMode { + var horizontalItemNodes: [PeerId: VoiceChatParticipantItemNode] = [:] + strongSelf.horizontalListNode.forEachItemNode { itemNode in + if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item { + horizontalItemNodes[item.peer.id] = itemNode + } + } + + strongSelf.listNode.forEachVisibleItemNode { itemNode in + if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item, let otherItemNode = horizontalItemNodes[item.peer.id] { + itemNode.transitionIn(from: otherItemNode, containerNode: strongSelf) + } + } + } + + if let (layout, navigationHeight) = strongSelf.validLayout { + strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) + } + } + } } deinit { @@ -2614,7 +2744,7 @@ public final class VoiceChatController: ViewController { } @objc private func switchCameraPressed() { - + self.call.switchVideoCamera() } private var effectiveBottomAreaHeight: CGFloat { @@ -2626,13 +2756,13 @@ public final class VoiceChatController: ViewController { } } + private var bringVideoToBackOnCompletion = false private func updateFloatingHeaderOffset(offset: CGFloat, transition: ContainedViewLayoutTransition, completion: (() -> Void)? = nil) { guard let (layout, _) = self.validLayout else { return } let layoutTopInset: CGFloat = max(layout.statusBarHeight ?? 0.0, layout.safeInsets.top) - let topPanelHeight: CGFloat = 63.0 let listTopInset = layoutTopInset + topPanelHeight let bottomPanelHeight = self.effectiveBottomAreaHeight + layout.intrinsicInsets.bottom @@ -2641,6 +2771,16 @@ public final class VoiceChatController: ViewController { size.width = floor(min(size.width, size.height) * 0.5) } + var isLandscape = false + var effectiveDisplayMode = self.displayMode + if case .compact = layout.metrics.widthClass, layout.size.width > layout.size.height { + isLandscape = true + if case .fullscreen = effectiveDisplayMode { + } else { + effectiveDisplayMode = .fullscreen(controlsHidden: false) + } + } + let listSize = CGSize(width: size.width, height: layout.size.height - listTopInset - bottomPanelHeight) let topInset: CGFloat if let (panInitialTopInset, panOffset) = self.panGestureArguments { @@ -2649,7 +2789,7 @@ public final class VoiceChatController: ViewController { } else { topInset = max(0.0, panInitialTopInset + min(0.0, panOffset)) } - } else if let _ = self.animation { + } else if let _ = self.expandAnimation { topInset = self.listNode.frame.minY - listTopInset } else if let currentTopInset = self.topInset { topInset = self.isExpanded ? 0.0 : currentTopInset @@ -2662,18 +2802,60 @@ public final class VoiceChatController: ViewController { let rawPanelOffset = offset + listTopInset - topPanelHeight let panelOffset = max(layoutTopInset, rawPanelOffset) - let topPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: panelOffset), size: CGSize(width: size.width, height: topPanelHeight)) + let topPanelFrame: CGRect + if isLandscape { + topPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: size.width, height: 0.0)) + } else { + topPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: panelOffset), size: CGSize(width: size.width, height: topPanelHeight)) + } + let sideInset: CGFloat = 16.0 - if let mainVideoContainer = self.mainVideoContainer { - let videoContainerFrame = CGRect(origin: CGPoint(x: 0.0, y: topPanelFrame.maxY), size: CGSize(width: layout.size.width, height: min(300.0, layout.size.width))) - transition.updateFrameAdditive(node: mainVideoContainer, frame: videoContainerFrame) - mainVideoContainer.update(size: videoContainerFrame.size, transition: transition) + if let mainVideoContainer = self.mainVideoContainerNode { + let videoClippingFrame: CGRect + let videoContainerFrame: CGRect + let videoInset: CGFloat + if isLandscape { + videoInset = 0.0 + videoClippingFrame = CGRect(x: layout.safeInsets.left, y: 0.0, width: layout.size.width - layout.safeInsets.left - layout.safeInsets.right - fullscreenBottomAreaHeight, height: layout.size.height + 6.0) + videoContainerFrame = CGRect(origin: CGPoint(), size: videoClippingFrame.size) + } else { + let videoHeight: CGFloat + let videoY: CGFloat + switch effectiveDisplayMode { + case .default: + videoInset = sideInset + videoHeight = min(mainVideoHeight, layout.size.width) + videoY = topPanelFrame.maxY + case .fullscreen: + videoInset = 0.0 + videoHeight = layout.size.height - (layout.statusBarHeight ?? 0.0) - layout.intrinsicInsets.bottom - fullscreenBottomAreaHeight - 6.0 + videoY = layout.statusBarHeight ?? 20.0 + + } + videoClippingFrame = CGRect(origin: CGPoint(x: videoInset, y: videoY), size: CGSize(width: layout.size.width - videoInset * 2.0, height: self.isFullscreen ? videoHeight : 0.0)) + videoContainerFrame = CGRect(origin: CGPoint(x: -videoInset, y: 0.0), size: CGSize(width: layout.size.width, height: videoHeight)) + } + transition.updateFrame(node: self.mainVideoClippingNode, frame: videoClippingFrame) + transition.updateFrame(node: mainVideoContainer, frame: videoContainerFrame, completion: { [weak self] _ in + if let strongSelf = self, strongSelf.bringVideoToBackOnCompletion { + strongSelf.bringVideoToBackOnCompletion = false + strongSelf.contentContainer.insertSubnode(strongSelf.mainVideoClippingNode, belowSubnode: strongSelf.horizontalListNode) + } + }) + mainVideoContainer.update(size: videoContainerFrame.size, sideInset: videoInset, isLandscape: isLandscape, transition: transition) } let backgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: topPanelFrame.maxY), size: CGSize(width: size.width, height: layout.size.height)) - let sideInset: CGFloat = 16.0 - let leftBorderFrame = CGRect(origin: CGPoint(x: 0.0, y: topPanelFrame.maxY - 16.0), size: CGSize(width: sideInset, height: layout.size.height)) - let rightBorderFrame = CGRect(origin: CGPoint(x: size.width - sideInset, y: topPanelFrame.maxY - 16.0), size: CGSize(width: sideInset, height: layout.size.height)) + + let leftBorderFrame: CGRect + let rightBorderFrame: CGRect + if isLandscape { + leftBorderFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: layout.safeInsets.left, height: layout.size.height)) + rightBorderFrame = CGRect(origin: CGPoint(x: size.width - layout.safeInsets.right, y: 0.0), size: CGSize(width: layout.safeInsets.right, height: layout.size.height)) + } else { + leftBorderFrame = CGRect(origin: CGPoint(x: 0.0, y: topPanelFrame.maxY - 16.0), size: CGSize(width: sideInset, height: layout.size.height)) + rightBorderFrame = CGRect(origin: CGPoint(x: size.width - sideInset, y: topPanelFrame.maxY - 16.0), size: CGSize(width: sideInset, height: layout.size.height)) + } let previousTopPanelFrame = self.topPanelNode.frame let previousBackgroundFrame = self.backgroundNode.frame @@ -2699,7 +2881,7 @@ public final class VoiceChatController: ViewController { } else { completion?() } - self.topPanelBackgroundNode.frame = CGRect(x: 0.0, y: topPanelHeight - 24.0, width: size.width, height: 24.0) + self.topPanelBackgroundNode.frame = CGRect(x: 0.0, y: topPanelHeight - 24.0, width: size.width, height: min(topPanelFrame.height, 24.0)) var bottomEdge: CGFloat = 0.0 self.listNode.forEachItemNode { itemNode in @@ -2747,7 +2929,6 @@ public final class VoiceChatController: ViewController { size.width = floor(min(size.width, size.height) * 0.5) } - let topPanelHeight: CGFloat = 63.0 let topEdgeFrame: CGRect if isFullscreen { let offset: CGFloat @@ -2770,7 +2951,6 @@ public final class VoiceChatController: ViewController { transition.updateBackgroundColor(node: self.bottomPanelBackgroundNode, color: isFullscreen ? fullscreenBackgroundColor : panelBackgroundColor) transition.updateBackgroundColor(node: self.leftBorderNode, color: isFullscreen ? fullscreenBackgroundColor : panelBackgroundColor) transition.updateBackgroundColor(node: self.rightBorderNode, color: isFullscreen ? fullscreenBackgroundColor : panelBackgroundColor) - transition.updateBackgroundColor(node: self.rightBorderNode, color: isFullscreen ? fullscreenBackgroundColor : panelBackgroundColor) if let snapshotView = self.topCornersNode.view.snapshotContentTree() { snapshotView.frame = self.topCornersNode.frame @@ -2850,22 +3030,28 @@ public final class VoiceChatController: ViewController { } } - let coloredButtonAppearance: CallControllerButtonItemNode.Content.Appearance - if let color = self.currentAudioButtonColor { - coloredButtonAppearance = .color(.custom(color.rgb, 1.0)) + let normalButtonAppearance: CallControllerButtonItemNode.Content.Appearance + let activeButtonAppearance: CallControllerButtonItemNode.Content.Appearance + if let color = self.currentNormalButtonColor { + normalButtonAppearance = .color(.custom(color.rgb, 1.0)) } else { - coloredButtonAppearance = .color(.custom(self.isFullscreen ? 0x1c1c1e : 0x2c2c2e, 1.0)) + normalButtonAppearance = .color(.custom(self.isFullscreen ? 0x1c1c1e : 0x2c2c2e, 1.0)) + } + if let color = self.currentActiveButtonColor { + activeButtonAppearance = .color(.custom(color.rgb, 1.0)) + } else { + activeButtonAppearance = .color(.custom(self.isFullscreen ? 0x1c1c1e : 0x2c2c2e, 1.0)) } let soundImage: CallControllerButtonItemNode.Content.Image - var soundAppearance: CallControllerButtonItemNode.Content.Appearance = coloredButtonAppearance + var soundAppearance: CallControllerButtonItemNode.Content.Appearance = normalButtonAppearance var soundTitle: String = self.presentationData.strings.Call_Speaker switch audioMode { case .none, .builtin: soundImage = .speaker case .speaker: soundImage = .speaker - soundAppearance = .blurred(isFilled: true) + soundAppearance = activeButtonAppearance case .headphones: soundImage = .headphones soundTitle = self.presentationData.strings.Call_Audio @@ -2883,7 +3069,15 @@ public final class VoiceChatController: ViewController { let videoButtonSize: CGSize var buttonsTitleAlpha: CGFloat - switch self.displayMode { + var effectiveDisplayMode = self.displayMode + if let (layout, _) = self.validLayout, layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass { + if case .fullscreen = effectiveDisplayMode { + } else { + effectiveDisplayMode = .fullscreen(controlsHidden: false) + } + } + + switch effectiveDisplayMode { case .default: videoButtonSize = smallButtonSize buttonsTitleAlpha = 1.0 @@ -2893,9 +3087,9 @@ public final class VoiceChatController: ViewController { } let transition: ContainedViewLayoutTransition = animated ? .animated(duration: 0.3, curve: .linear) : .immediate - self.cameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: coloredButtonAppearance, image: .camera), text: self.presentationData.strings.VoiceChat_Video, transition: transition) + self.cameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: normalButtonAppearance, image: .camera), text: self.presentationData.strings.VoiceChat_Video, transition: transition) - self.switchCameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: coloredButtonAppearance, image: .flipCamera), text: "", transition: transition) + self.switchCameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: normalButtonAppearance, image: .flipCamera), text: "", transition: transition) self.audioButton.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage), text: soundTitle, transition: transition) @@ -2916,24 +3110,51 @@ public final class VoiceChatController: ViewController { size.width = floor(min(size.width, size.height) * 0.5) } + var isLandscape = false + var effectiveDisplayMode = self.displayMode + if case .compact = layout.metrics.widthClass, layout.size.width > layout.size.height { + isLandscape = true + + if !self.isFullscreen { + self.isExpanded = true + self.updateIsFullscreen(true) + } + if case .fullscreen = effectiveDisplayMode { + } else { + effectiveDisplayMode = .fullscreen(controlsHidden: false) + } + } + + if let videoIndex = self.contentContainer.subnodes?.firstIndex(where: { $0 === self.mainVideoClippingNode }), let listIndex = self.contentContainer.subnodes?.firstIndex(where: { $0 === self.listNode }) { + switch effectiveDisplayMode { + case .default: + if listIndex < videoIndex { + self.bringVideoToBackOnCompletion = true + } + case .fullscreen: + if listIndex > videoIndex { + self.contentContainer.insertSubnode(self.mainVideoClippingNode, belowSubnode: self.horizontalListNode) + } + } + } + self.updateTitle(transition: transition) transition.updateFrame(node: self.titleNode, frame: CGRect(origin: CGPoint(x: 0.0, y: 10.0), size: CGSize(width: size.width, height: 44.0))) transition.updateFrame(node: self.optionsButton, frame: CGRect(origin: CGPoint(x: 20.0, y: 18.0), size: CGSize(width: 28.0, height: 28.0))) transition.updateFrame(node: self.closeButton, frame: CGRect(origin: CGPoint(x: size.width - 20.0 - 28.0, y: 18.0), size: CGSize(width: 28.0, height: 28.0))) transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size)) - transition.updateFrame(node: self.contentContainer, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - size.width) / 2.0), y: 0.0), size: size)) let layoutTopInset: CGFloat = max(layout.statusBarHeight ?? 0.0, layout.safeInsets.top) - let sideInset: CGFloat = 16.0 var insets = UIEdgeInsets() insets.left = layout.safeInsets.left + sideInset insets.right = layout.safeInsets.right + sideInset - let topPanelHeight: CGFloat = 63.0 - if let _ = self.panGestureArguments { + if isLandscape { + transition.updateFrame(node: self.topPanelEdgeNode, frame: CGRect(x: 0.0, y: 0.0, width: size.width, height: 0.0)) + } else if let _ = self.panGestureArguments { } else { let topEdgeFrame: CGRect if self.isFullscreen { @@ -2952,8 +3173,14 @@ public final class VoiceChatController: ViewController { let bottomPanelHeight = self.effectiveBottomAreaHeight + layout.intrinsicInsets.bottom var listTopInset = layoutTopInset + topPanelHeight - if self.mainVideoContainer != nil { - listTopInset += min(300.0, layout.size.width) + var topCornersY = topPanelHeight + if isLandscape { + listTopInset = 0.0 + topCornersY = -50.0 + } else if self.mainVideoContainerNode != nil && self.isFullscreen { + let videoContainerHeight = min(mainVideoHeight, layout.size.width) + listTopInset += videoContainerHeight + topCornersY += videoContainerHeight } let listSize = CGSize(width: size.width, height: layout.size.height - listTopInset - bottomPanelHeight) @@ -2970,18 +3197,33 @@ public final class VoiceChatController: ViewController { topInset = listSize.height } - if self.animation == nil { + if self.expandAnimation == nil { transition.updateFrame(node: self.listNode, frame: CGRect(origin: CGPoint(x: 0.0, y: listTopInset + topInset), size: listSize)) } let (duration, curve) = listViewAnimationDurationAndCurve(transition: transition) - let updateSizeAndInsets = ListViewUpdateSizeAndInsets(size: listSize, insets: insets, duration: duration, curve: curve) + self.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: nil, updateSizeAndInsets: ListViewUpdateSizeAndInsets(size: listSize, insets: insets, duration: duration, curve: curve), stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in }) - self.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: nil, updateSizeAndInsets: updateSizeAndInsets, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in }) + let horizontalListHeight: CGFloat = 84.0 + self.horizontalListNode.bounds = CGRect(x: 0.0, y: 0.0, width: horizontalListHeight, height: layout.size.width - layout.safeInsets.left - layout.safeInsets.right) - transition.updateFrame(node: self.topCornersNode, frame: CGRect(origin: CGPoint(x: sideInset, y: 63.0), size: CGSize(width: size.width - sideInset * 2.0, height: 50.0))) + let horizontalListY = isLandscape ? layout.size.height - layout.intrinsicInsets.bottom - 42.0 : layout.size.height - min(bottomPanelHeight, fullscreenBottomAreaHeight + layout.intrinsicInsets.bottom) - 42.0 + transition.updatePosition(node: self.horizontalListNode, position: CGPoint(x: layout.safeInsets.left + layout.size.width / 2.0, y: horizontalListY)) + self.horizontalListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: nil, updateSizeAndInsets: ListViewUpdateSizeAndInsets(size: CGSize(width: horizontalListHeight, height: layout.size.width), insets: UIEdgeInsets(top: 16.0, left: 0.0, bottom: 16.0, right: 0.0), duration: duration, curve: curve), stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in }) - let bottomPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - bottomPanelHeight), size: CGSize(width: size.width, height: bottomPanelHeight)) + transition.updateFrame(node: self.topCornersNode, frame: CGRect(origin: CGPoint(x: sideInset, y: topCornersY), size: CGSize(width: size.width - sideInset * 2.0, height: 50.0))) + + var bottomPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - bottomPanelHeight), size: CGSize(width: size.width, height: bottomPanelHeight)) + if isLandscape { + transition.updateAlpha(node: self.closeButton, alpha: 0.0) + transition.updateAlpha(node: self.optionsButton, alpha: 0.0) + transition.updateAlpha(node: self.titleNode, alpha: 0.0) + bottomPanelFrame = CGRect(origin: CGPoint(x: layout.size.width - fullscreenBottomAreaHeight - layout.safeInsets.right, y: 0.0), size: CGSize(width: fullscreenBottomAreaHeight + layout.safeInsets.right, height: layout.size.height)) + } else { + transition.updateAlpha(node: self.closeButton, alpha: 1.0) + transition.updateAlpha(node: self.optionsButton, alpha: self.optionsButton.isUserInteractionEnabled ? 1.0 : 0.0) + transition.updateAlpha(node: self.titleNode, alpha: 1.0) + } transition.updateFrame(node: self.bottomPanelNode, frame: bottomPanelFrame) let centralButtonSize = CGSize(width: 300.0, height: 300.0) @@ -2997,15 +3239,15 @@ public final class VoiceChatController: ViewController { let forthButtonFrame: CGRect let leftButtonFrame: CGRect - if self.mainVideoContainer == nil { + if self.mainVideoContainerNode == nil { leftButtonFrame = CGRect(origin: CGPoint(x: sideButtonOrigin, y: floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize) } else { leftButtonFrame = CGRect(origin: CGPoint(x: sideButtonOrigin, y: floor((self.effectiveBottomAreaHeight - sideButtonSize.height - upperButtonDistance - cameraButtonSize.height) / 2.0) + upperButtonDistance + cameraButtonSize.height), size: sideButtonSize) } let rightButtonFrame = CGRect(origin: CGPoint(x: size.width - sideButtonOrigin - sideButtonSize.width, y: floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize) - + let smallButtons: Bool - switch self.displayMode { + switch effectiveDisplayMode { case .default: smallButtons = false firstButtonFrame = CGRect(origin: CGPoint(x: floor(leftButtonFrame.midX - cameraButtonSize.width / 2.0), y: leftButtonFrame.minY - upperButtonDistance - cameraButtonSize.height), size: cameraButtonSize) @@ -3015,13 +3257,23 @@ public final class VoiceChatController: ViewController { case let .fullscreen(controlsHidden): smallButtons = true let sideInset: CGFloat = 26.0 - let spacing = floor((layout.size.width - sideInset * 2.0 - sideButtonSize.width * 4.0) / 3.0) - - firstButtonFrame = CGRect(origin: CGPoint(x: sideInset, y: controlsHidden ? layout.size.height : floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize) - secondButtonFrame = CGRect(origin: CGPoint(x: sideInset + sideButtonSize.width + spacing, y: controlsHidden ? layout.size.height : floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize) - let thirdButtonPreFrame = CGRect(origin: CGPoint(x: layout.size.width - sideInset - sideButtonSize.width - spacing - sideButtonSize.width, y: controlsHidden ? layout.size.height : floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize) - thirdButtonFrame = CGRect(origin: CGPoint(x: floor(thirdButtonPreFrame.midX - centralButtonSize.width / 2.0), y: floor(thirdButtonPreFrame.midY - centralButtonSize.height / 2.0)), size: centralButtonSize) - forthButtonFrame = CGRect(origin: CGPoint(x: layout.size.width - sideInset - sideButtonSize.width, y: controlsHidden ? layout.size.height : floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize) + if isLandscape { + let spacing = floor((layout.size.height - sideInset * 2.0 - sideButtonSize.height * 4.0) / 3.0) + let x = controlsHidden ? fullscreenBottomAreaHeight + layout.safeInsets.right + 30.0: floor((fullscreenBottomAreaHeight - sideButtonSize.width) / 2.0) + forthButtonFrame = CGRect(origin: CGPoint(x: x, y: sideInset), size: sideButtonSize) + let thirdButtonPreFrame = CGRect(origin: CGPoint(x: x, y: sideInset + sideButtonSize.height + spacing), size: sideButtonSize) + thirdButtonFrame = CGRect(origin: CGPoint(x: floor(thirdButtonPreFrame.midX - centralButtonSize.width / 2.0), y: floor(thirdButtonPreFrame.midY - centralButtonSize.height / 2.0)), size: centralButtonSize) + secondButtonFrame = CGRect(origin: CGPoint(x: x, y: layout.size.height - sideInset - sideButtonSize.height - spacing - sideButtonSize.height), size: sideButtonSize) + firstButtonFrame = CGRect(origin: CGPoint(x: x, y: layout.size.height - sideInset - sideButtonSize.height), size: sideButtonSize) + } else { + let spacing = floor((layout.size.width - sideInset * 2.0 - sideButtonSize.width * 4.0) / 3.0) + let y = controlsHidden ? self.effectiveBottomAreaHeight + layout.intrinsicInsets.bottom + 30.0: floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0) + firstButtonFrame = CGRect(origin: CGPoint(x: sideInset, y: y), size: sideButtonSize) + secondButtonFrame = CGRect(origin: CGPoint(x: sideInset + sideButtonSize.width + spacing, y: y), size: sideButtonSize) + let thirdButtonPreFrame = CGRect(origin: CGPoint(x: layout.size.width - sideInset - sideButtonSize.width - spacing - sideButtonSize.width, y: y), size: sideButtonSize) + thirdButtonFrame = CGRect(origin: CGPoint(x: floor(thirdButtonPreFrame.midX - centralButtonSize.width / 2.0), y: floor(thirdButtonPreFrame.midY - centralButtonSize.height / 2.0)), size: centralButtonSize) + forthButtonFrame = CGRect(origin: CGPoint(x: layout.size.width - sideInset - sideButtonSize.width, y: y), size: sideButtonSize) + } } let actionButtonState: VoiceChatActionButton.State @@ -3103,6 +3355,9 @@ public final class VoiceChatController: ViewController { while !self.enqueuedTransitions.isEmpty { self.dequeueTransition() } + while !self.enqueuedHorizontalTransitions.isEmpty { + self.dequeueTransition() + } } } @@ -3179,6 +3434,16 @@ public final class VoiceChatController: ViewController { } } + private func enqueueHorizontalTransition(_ transition: ListTransition) { + self.enqueuedHorizontalTransitions.append(transition) + + if let _ = self.validLayout { + while !self.enqueuedHorizontalTransitions.isEmpty { + self.dequeueHorizontalTransition() + } + } + } + private var topInset: CGFloat? private var isFirstTime = true private func dequeueTransition() { @@ -3195,7 +3460,7 @@ public final class VoiceChatController: ViewController { if transition.crossFade { options.insert(.AnimateCrossfade) } - if transition.animated && self.animation == nil { + if transition.animated && self.expandAnimation == nil { options.insert(.AnimateInsertion) } } @@ -3223,7 +3488,7 @@ public final class VoiceChatController: ViewController { } let bottomPanelHeight = self.effectiveBottomAreaHeight + layout.intrinsicInsets.bottom - let listTopInset = layoutTopInset + 63.0 + let listTopInset = layoutTopInset + topPanelHeight let listSize = CGSize(width: size.width, height: layout.size.height - listTopInset - bottomPanelHeight) self.topInset = max(0.0, max(listSize.height - itemsHeight, listSize.height - 46.0 - floor(56.0 * 3.5))) @@ -3236,7 +3501,7 @@ public final class VoiceChatController: ViewController { self.listNode.frame = frame } else if !self.isExpanded { if self.listNode.frame.minY != targetY && !self.animatingExpansion && self.panGestureArguments == nil { - self.animation = ListViewAnimation(from: self.listNode.frame.minY, to: targetY, duration: 0.4, curve: listViewAnimationCurveSystem, beginAt: CACurrentMediaTime(), update: { [weak self] _, currentValue in + self.expandAnimation = ListViewAnimation(from: self.listNode.frame.minY, to: targetY, duration: 0.4, curve: listViewAnimationCurveSystem, beginAt: CACurrentMediaTime(), update: { [weak self] _, currentValue in if let strongSelf = self { var frame = strongSelf.listNode.frame frame.origin.y = currentValue @@ -3260,17 +3525,40 @@ public final class VoiceChatController: ViewController { } + private func dequeueHorizontalTransition() { + guard let _ = self.validLayout, let transition = self.enqueuedHorizontalTransitions.first else { + return + } + self.enqueuedHorizontalTransitions.remove(at: 0) + + var options = ListViewDeleteAndInsertOptions() + let isFirstTime = self.isFirstTime + if !isFirstTime { + if transition.crossFade { + options.insert(.AnimateCrossfade) + } + if transition.animated { + options.insert(.AnimateInsertion) + } + } + options.insert(.LowLatency) + options.insert(.PreferSynchronousResourceLoading) + + self.horizontalListNode.transaction(deleteIndices: transition.deletions, insertIndicesAndItems: transition.insertions, updateIndicesAndItems: transition.updates, options: options, scrollToItem: nil, updateSizeAndInsets: nil, updateOpaqueState: nil, completion: { _ in + }) + } + private var animator: ConstantDisplayLinkAnimator? - private var animation: ListViewAnimation? + private var expandAnimation: ListViewAnimation? private func updateAnimation() { var animate = false let timestamp = CACurrentMediaTime() - if let animation = self.animation { + if let animation = self.expandAnimation { animation.applyAt(timestamp) if animation.completeAt(timestamp) { - self.animation = nil + self.expandAnimation = nil } else { animate = true } @@ -3318,7 +3606,7 @@ public final class VoiceChatController: ViewController { } } } - if canInvite { + if false, canInvite { entries.append(.invite(self.presentationData.theme, self.presentationData.strings, self.presentationData.strings.VoiceChat_InviteMember)) } @@ -3439,12 +3727,21 @@ public final class VoiceChatController: ViewController { } let presentationData = self.presentationData.withUpdated(theme: self.darkTheme) - let transition = preparedTransition(from: previousEntries, to: entries, isLoading: false, isEmpty: false, canInvite: canInvite, crossFade: false, animated: !disableAnimation, context: self.context, presentationData: presentationData, interaction: self.itemInteraction!) + let transition = preparedTransition(from: previousEntries, to: entries, isLoading: false, isEmpty: false, canInvite: canInvite, crossFade: false, animated: !disableAnimation, context: self.context, presentationData: presentationData, interaction: self.itemInteraction!, style: .list) self.enqueueTransition(transition) + + let horizontalTransition = preparedTransition(from: previousEntries, to: entries, isLoading: false, isEmpty: false, canInvite: canInvite, crossFade: false, animated: !disableAnimation, context: self.context, presentationData: presentationData, interaction: self.itemInteraction!, style: .tile) + self.enqueueHorizontalTransition(horizontalTransition) } override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool { if gestureRecognizer is DirectionalPanGestureRecognizer { + if let (layout, _) = self.validLayout, layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass { + return false + } + if case .fullscreen = self.displayMode { + return false + } let location = gestureRecognizer.location(in: self.bottomPanelNode.view) if self.audioButton.frame.contains(location) || (!self.cameraButton.isHidden && self.cameraButton.frame.contains(location)) || self.leaveButton.frame.contains(location) { return false diff --git a/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift b/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift index 75c16b2e30..08a584e141 100644 --- a/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift +++ b/submodules/TelegramCallsUI/Sources/VoiceChatParticipantItem.swift @@ -20,6 +20,11 @@ import AudioBlob import PeerInfoAvatarListNode final class VoiceChatParticipantItem: ListViewItem { + enum LayoutStyle { + case list + case tile + } + enum ParticipantText { public enum TextColor { case generic @@ -69,6 +74,7 @@ final class VoiceChatParticipantItem: ListViewItem { let text: ParticipantText let expandedText: ParticipantText? let icon: Icon + let style: LayoutStyle let enabled: Bool let transparent: Bool public let selectable: Bool @@ -82,7 +88,7 @@ final class VoiceChatParticipantItem: ListViewItem { let getIsExpanded: () -> Bool let getUpdatingAvatar: () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError> - public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, ssrc: UInt32?, presence: PeerPresence?, text: ParticipantText, expandedText: ParticipantText?, icon: Icon, enabled: Bool, transparent: Bool, selectable: Bool, getAudioLevel: (() -> Signal)?, getVideo: @escaping () -> GroupVideoNode?, revealOptions: [RevealOption], revealed: Bool?, setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void, action: ((ASDisplayNode) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getIsExpanded: @escaping () -> Bool, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) { + public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, ssrc: UInt32?, presence: PeerPresence?, text: ParticipantText, expandedText: ParticipantText?, icon: Icon, style: LayoutStyle, enabled: Bool, transparent: Bool, selectable: Bool, getAudioLevel: (() -> Signal)?, getVideo: @escaping () -> GroupVideoNode?, revealOptions: [RevealOption], revealed: Bool?, setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void, action: ((ASDisplayNode) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getIsExpanded: @escaping () -> Bool, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) { self.presentationData = presentationData self.dateTimeFormat = dateTimeFormat self.nameDisplayOrder = nameDisplayOrder @@ -93,6 +99,7 @@ final class VoiceChatParticipantItem: ListViewItem { self.text = text self.expandedText = expandedText self.icon = icon + self.style = style self.enabled = enabled self.transparent = transparent self.selectable = selectable @@ -151,6 +158,8 @@ final class VoiceChatParticipantItem: ListViewItem { } private let avatarFont = avatarPlaceholderFont(size: floor(40.0 * 16.0 / 37.0)) +private let tileSize = CGSize(width: 84.0, height: 84.0) +private let backgroundCornerRadius: CGFloat = 14.0 class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { private let topStripeNode: ASDisplayNode @@ -169,7 +178,9 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { private var extractedVerticalOffset: CGFloat? fileprivate let avatarNode: AvatarNode + private let contentWrapperNode: ASDisplayNode private let titleNode: TextNode + private let statusIconNode: ASImageNode private let statusNode: TextNode private let expandedStatusNode: TextNode private var credibilityIconNode: ASImageNode? @@ -196,6 +207,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { private var isExtracted = false private var wavesColor: UIColor? + private var videoContainerNode: ASDisplayNode private var videoNode: GroupVideoNode? private var raiseHandTimer: SwiftSignalKit.Timer? @@ -229,11 +241,19 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { self.avatarNode = AvatarNode(font: avatarFont) self.avatarNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 40.0, height: 40.0)) + self.contentWrapperNode = ASDisplayNode() + + self.videoContainerNode = ASDisplayNode() + self.videoContainerNode.clipsToBounds = true + self.titleNode = TextNode() self.titleNode.isUserInteractionEnabled = false self.titleNode.contentMode = .left self.titleNode.contentsScale = UIScreen.main.scale + self.statusIconNode = ASImageNode() + self.statusIconNode.displaysAsynchronously = false + self.statusNode = TextNode() self.statusNode.isUserInteractionEnabled = false self.statusNode.contentMode = .left @@ -262,12 +282,15 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { self.contextSourceNode.contentNode.addSubnode(self.backgroundImageNode) self.backgroundImageNode.addSubnode(self.extractedBackgroundImageNode) self.contextSourceNode.contentNode.addSubnode(self.offsetContainerNode) + self.offsetContainerNode.addSubnode(self.videoContainerNode) + self.offsetContainerNode.addSubnode(self.contentWrapperNode) + self.contentWrapperNode.addSubnode(self.titleNode) + self.contentWrapperNode.addSubnode(self.statusIconNode) + self.contentWrapperNode.addSubnode(self.statusNode) + self.contentWrapperNode.addSubnode(self.expandedStatusNode) + self.contentWrapperNode.addSubnode(self.actionContainerNode) + self.contentWrapperNode.addSubnode(self.actionButtonNode) self.offsetContainerNode.addSubnode(self.avatarNode) - self.offsetContainerNode.addSubnode(self.titleNode) - self.offsetContainerNode.addSubnode(self.statusNode) - self.offsetContainerNode.addSubnode(self.expandedStatusNode) - self.offsetContainerNode.addSubnode(self.actionContainerNode) - self.actionContainerNode.addSubnode(self.actionButtonNode) self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode self.actionButtonNode.addTarget(self, action: #selector(self.actionButtonPressed), forControlEvents: .touchUpInside) @@ -304,7 +327,6 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { strongSelf.isExtracted = isExtracted let inset: CGFloat = 12.0 - let cornerRadius: CGFloat = 14.0 if isExtracted { strongSelf.contextSourceNode.contentNode.customHitTest = { [weak self] point in if let strongSelf = self { @@ -339,23 +361,23 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { if !extractedVerticalOffset.isZero { let radiusTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut) if isExtracted { - strongSelf.backgroundImageNode.image = generateImage(CGSize(width: cornerRadius * 2.0, height: cornerRadius * 2.0), rotatedContext: { (size, context) in + strongSelf.backgroundImageNode.image = generateImage(CGSize(width: backgroundCornerRadius * 2.0, height: backgroundCornerRadius * 2.0), rotatedContext: { (size, context) in let bounds = CGRect(origin: CGPoint(), size: size) context.clear(bounds) context.setFillColor(itemBackgroundColor.cgColor) context.fillEllipse(in: bounds) context.fill(CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height / 2.0)) - })?.stretchableImage(withLeftCapWidth: Int(cornerRadius), topCapHeight: Int(cornerRadius)) - strongSelf.extractedBackgroundImageNode.image = generateImage(CGSize(width: cornerRadius * 2.0, height: cornerRadius * 2.0), rotatedContext: { (size, context) in + })?.stretchableImage(withLeftCapWidth: Int(backgroundCornerRadius), topCapHeight: Int(backgroundCornerRadius)) + strongSelf.extractedBackgroundImageNode.image = generateImage(CGSize(width: backgroundCornerRadius * 2.0, height: backgroundCornerRadius * 2.0), rotatedContext: { (size, context) in let bounds = CGRect(origin: CGPoint(), size: size) context.clear(bounds) context.setFillColor(item.presentationData.theme.list.itemBlocksBackgroundColor.cgColor) context.fillEllipse(in: bounds) context.fill(CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height / 2.0)) - })?.stretchableImage(withLeftCapWidth: Int(cornerRadius), topCapHeight: Int(cornerRadius)) - strongSelf.backgroundImageNode.cornerRadius = cornerRadius + })?.stretchableImage(withLeftCapWidth: Int(backgroundCornerRadius), topCapHeight: Int(backgroundCornerRadius)) + strongSelf.backgroundImageNode.cornerRadius = backgroundCornerRadius strongSelf.avatarNode.transform = CATransform3DIdentity var avatarInitialRect = strongSelf.avatarNode.view.convert(strongSelf.avatarNode.bounds, to: strongSelf.offsetContainerNode.supernode?.view) @@ -364,12 +386,12 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { let targetRect = CGRect(x: extractedRect.minX, y: extractedRect.minY, width: extractedRect.width, height: extractedRect.width) let initialScale = avatarInitialRect.width / targetRect.width - avatarInitialRect.origin.y += cornerRadius / 2.0 * initialScale + avatarInitialRect.origin.y += backgroundCornerRadius / 2.0 * initialScale let avatarListWrapperNode = ASDisplayNode() avatarListWrapperNode.clipsToBounds = true - avatarListWrapperNode.frame = CGRect(x: targetRect.minX, y: targetRect.minY, width: targetRect.width, height: targetRect.height + cornerRadius) - avatarListWrapperNode.cornerRadius = cornerRadius + avatarListWrapperNode.frame = CGRect(x: targetRect.minX, y: targetRect.minY, width: targetRect.width, height: targetRect.height + backgroundCornerRadius) + avatarListWrapperNode.cornerRadius = backgroundCornerRadius let transitionNode = ASImageNode() transitionNode.clipsToBounds = true @@ -422,11 +444,11 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { strongSelf.avatarListNode = avatarListNode } } else if let transitionNode = strongSelf.avatarTransitionNode, let avatarListWrapperNode = strongSelf.avatarListWrapperNode, let avatarListContainerNode = strongSelf.avatarListContainerNode { - transition.updateCornerRadius(node: strongSelf.backgroundImageNode, cornerRadius: cornerRadius) + transition.updateCornerRadius(node: strongSelf.backgroundImageNode, cornerRadius: backgroundCornerRadius) var avatarInitialRect = CGRect(origin: strongSelf.avatarNode.frame.origin, size: strongSelf.avatarNode.frame.size) let targetScale = avatarInitialRect.width / avatarListContainerNode.frame.width - avatarInitialRect.origin.y += cornerRadius / 2.0 * targetScale + avatarInitialRect.origin.y += backgroundCornerRadius / 2.0 * targetScale strongSelf.avatarTransitionNode = nil strongSelf.avatarListWrapperNode = nil @@ -493,17 +515,21 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { } else { strongSelf.extractedBackgroundImageNode.alpha = 0.0 strongSelf.extractedBackgroundImageNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, delay: 0.0, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: false, completion: { [weak self] _ in - self?.backgroundImageNode.image = nil - self?.extractedBackgroundImageNode.image = nil - self?.extractedBackgroundImageNode.layer.removeAllAnimations() + if let strongSelf = self { + if strongSelf.item?.style == .list { + strongSelf.backgroundImageNode.image = nil + } + strongSelf.extractedBackgroundImageNode.image = nil + strongSelf.extractedBackgroundImageNode.layer.removeAllAnimations() + } }) } } else { if isExtracted { strongSelf.backgroundImageNode.alpha = 0.0 strongSelf.extractedBackgroundImageNode.alpha = 1.0 - strongSelf.backgroundImageNode.image = generateStretchableFilledCircleImage(diameter: cornerRadius * 2.0, color: itemBackgroundColor) - strongSelf.extractedBackgroundImageNode.image = generateStretchableFilledCircleImage(diameter: cornerRadius * 2.0, color: item.presentationData.theme.list.itemBlocksBackgroundColor) + strongSelf.backgroundImageNode.image = generateStretchableFilledCircleImage(diameter: backgroundCornerRadius * 2.0, color: itemBackgroundColor) + strongSelf.extractedBackgroundImageNode.image = generateStretchableFilledCircleImage(diameter: backgroundCornerRadius * 2.0, color: item.presentationData.theme.list.itemBlocksBackgroundColor) } transition.updateFrame(node: strongSelf.backgroundImageNode, frame: rect) @@ -531,15 +557,69 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { self.audioLevelDisposable.dispose() self.raiseHandTimer?.invalidate() } - - @objc private func handleTap() { - print("tap") - } override func selected() { super.selected() self.layoutParams?.0.action?(self.contextSourceNode) } + + func transitionIn(from otherNode: VoiceChatParticipantItemNode, containerNode: ASDisplayNode) { + guard let otherItem = otherNode.item, otherItem.style != self.item?.style else { + return + } + + switch otherItem.style { + case .list: + otherNode.avatarNode.alpha = 0.0 + + let startContainerPosition = otherNode.avatarNode.view.convert(otherNode.avatarNode.bounds, to: containerNode.view).center.offsetBy(dx: 0.0, dy: 9.0) + + let initialPosition = self.contextSourceNode.position + let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center + + self.contextSourceNode.position = targetContainerPosition + containerNode.addSubnode(self.contextSourceNode) + + self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak self] _ in + if let strongSelf = self { + strongSelf.contextSourceNode.position = initialPosition + strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode) + } + }) + + if let videoNode = otherNode.videoNode { + self.avatarNode.alpha = 0.0 + + otherNode.videoNode = nil + self.videoNode = videoNode + + let initialPosition = videoNode.position + videoNode.position = CGPoint(x: self.videoContainerNode.frame.width / 2.0, y: self.videoContainerNode.frame.width / 2.0) + videoNode.layer.animatePosition(from: initialPosition, to: videoNode.position, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + self.videoContainerNode.addSubnode(videoNode) + + self.videoContainerNode.layer.animateFrame(from: self.avatarNode.frame, to: self.videoContainerNode.frame, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + self.videoContainerNode.layer.animate(from: (self.avatarNode.frame.width / 2.0) as NSNumber, to: backgroundCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { [weak self] value in + }) + } + + self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue) + case .tile: + if let otherVideoNode = otherNode.videoNode { + otherNode.videoNode = nil + self.videoNode = otherVideoNode + + let initialPosition = otherVideoNode.position + otherNode.position = CGPoint(x: self.videoContainerNode.frame.width / 2.0, y: self.videoContainerNode.frame.width / 2.0) + self.videoContainerNode.addSubnode(otherVideoNode) + } else { + self.avatarNode.alpha = 1.0 + } + } + } func asyncLayout() -> (_ item: VoiceChatParticipantItem, _ params: ListViewItemLayoutParams, _ first: Bool, _ last: Bool) -> (ListViewItemNodeLayout, (Bool, Bool) -> Void) { let makeTitleLayout = TextNode.asyncLayout(self.titleNode) @@ -555,7 +635,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { updatedTheme = item.presentationData.theme } - let titleFont = Font.regular(17.0) + let titleFont = item.style == .tile ? Font.regular(12.0) : Font.regular(17.0) let statusFont = Font.regular(14.0) var titleAttributedString: NSAttributedString? @@ -569,18 +649,29 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { if let user = item.peer as? TelegramUser { if let firstName = user.firstName, let lastName = user.lastName, !firstName.isEmpty, !lastName.isEmpty { - let string = NSMutableAttributedString() - switch item.nameDisplayOrder { - case .firstLast: - string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)) - string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor)) - string.append(NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor)) - case .lastFirst: - string.append(NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor)) - string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor)) - string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)) + if item.style == .tile { + let textColor: UIColor + switch item.icon { + case .wantsToSpeak: + textColor = item.presentationData.theme.list.itemAccentColor + default: + textColor = titleColor + } + titleAttributedString = NSAttributedString(string: firstName, font: titleFont, textColor: textColor) + } else { + let string = NSMutableAttributedString() + switch item.nameDisplayOrder { + case .firstLast: + string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)) + string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor)) + string.append(NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor)) + case .lastFirst: + string.append(NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor)) + string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor)) + string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor)) + } + titleAttributedString = string } - titleAttributedString = string } else if let firstName = user.firstName, !firstName.isEmpty { titleAttributedString = NSAttributedString(string: firstName, font: currentBoldFont, textColor: titleColor) } else if let lastName = user.lastName, !lastName.isEmpty { @@ -596,39 +687,39 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { var wavesColor = UIColor(rgb: 0x34c759) switch item.text { - case .presence: - if let user = item.peer as? TelegramUser, let botInfo = user.botInfo { - let botStatus: String - if botInfo.flags.contains(.hasAccessToChatHistory) { - botStatus = item.presentationData.strings.Bot_GroupStatusReadsHistory + case .presence: + if let user = item.peer as? TelegramUser, let botInfo = user.botInfo { + let botStatus: String + if botInfo.flags.contains(.hasAccessToChatHistory) { + botStatus = item.presentationData.strings.Bot_GroupStatusReadsHistory + } else { + botStatus = item.presentationData.strings.Bot_GroupStatusDoesNotReadHistory + } + statusAttributedString = NSAttributedString(string: botStatus, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor) + } else if let presence = item.presence as? TelegramUserPresence { + let timestamp = CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970 + let (string, _) = stringAndActivityForUserPresence(strings: item.presentationData.strings, dateTimeFormat: item.dateTimeFormat, presence: presence, relativeTo: Int32(timestamp)) + statusAttributedString = NSAttributedString(string: string, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor) } else { - botStatus = item.presentationData.strings.Bot_GroupStatusDoesNotReadHistory + statusAttributedString = NSAttributedString(string: item.presentationData.strings.LastSeen_Offline, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor) } - statusAttributedString = NSAttributedString(string: botStatus, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor) - } else if let presence = item.presence as? TelegramUserPresence { - let timestamp = CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970 - let (string, _) = stringAndActivityForUserPresence(strings: item.presentationData.strings, dateTimeFormat: item.dateTimeFormat, presence: presence, relativeTo: Int32(timestamp)) - statusAttributedString = NSAttributedString(string: string, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor) - } else { - statusAttributedString = NSAttributedString(string: item.presentationData.strings.LastSeen_Offline, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor) - } - case let .text(text, textColor): - let textColorValue: UIColor - switch textColor { - case .generic: - textColorValue = item.presentationData.theme.list.itemSecondaryTextColor - case .accent: - textColorValue = item.presentationData.theme.list.itemAccentColor - wavesColor = textColorValue - case .constructive: - textColorValue = UIColor(rgb: 0x34c759) - case .destructive: - textColorValue = UIColor(rgb: 0xff3b30) - wavesColor = textColorValue - } - statusAttributedString = NSAttributedString(string: text, font: statusFont, textColor: textColorValue) - case .none: - break + case let .text(text, textColor): + let textColorValue: UIColor + switch textColor { + case .generic: + textColorValue = item.presentationData.theme.list.itemSecondaryTextColor + case .accent: + textColorValue = item.presentationData.theme.list.itemAccentColor + wavesColor = textColorValue + case .constructive: + textColorValue = UIColor(rgb: 0x34c759) + case .destructive: + textColorValue = UIColor(rgb: 0xff3b30) + wavesColor = textColorValue + } + statusAttributedString = NSAttributedString(string: text, font: statusFont, textColor: textColorValue) + case .none: + break } if let expandedText = item.expandedText, case let .text(text, textColor) = expandedText { @@ -676,18 +767,34 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { expandedRightInset = 0.0 } - let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width - leftInset - 12.0 - rightInset - 30.0 - titleIconsWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) + let constrainedWidth: CGFloat + switch item.style { + case .list: + constrainedWidth = params.width - leftInset - 12.0 - rightInset - 30.0 - titleIconsWidth + case .tile: + constrainedWidth = params.width - 24.0 - 10.0 + } + let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: constrainedWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) let (statusLayout, statusApply) = makeStatusLayout(TextNodeLayoutArguments(attributedString: statusAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width - leftInset - 8.0 - rightInset - 30.0, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) let (expandedStatusLayout, expandedStatusApply) = makeExpandedStatusLayout(TextNodeLayoutArguments(attributedString: expandedStatusAttributedString, backgroundColor: nil, maximumNumberOfLines: 6, truncationType: .end, constrainedSize: CGSize(width: params.width - leftInset - 8.0 - rightInset - expandedRightInset, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets())) - let insets = UIEdgeInsets() - + let titleSpacing: CGFloat = statusLayout.size.height == 0.0 ? 0.0 : 1.0 let minHeight: CGFloat = titleLayout.size.height + verticalInset * 2.0 let rawHeight: CGFloat = verticalInset * 2.0 + titleLayout.size.height + titleSpacing + statusLayout.size.height - let contentSize = CGSize(width: params.width, height: max(minHeight, rawHeight)) + let contentSize: CGSize + let insets: UIEdgeInsets + switch item.style { + case .list: + contentSize = CGSize(width: params.width, height: max(minHeight, rawHeight)) + insets = UIEdgeInsets() + case .tile: + contentSize = tileSize + insets = UIEdgeInsets(top: 0.0, left: 0.0, bottom: !last ? 6.0 : 0.0, right: 0.0) + } + let separatorHeight = UIScreenPixel let layout = ListViewItemNodeLayout(contentSize: contentSize, insets: insets) @@ -743,7 +850,36 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { strongSelf.layoutParams = (item, params, first, last) strongSelf.wavesColor = wavesColor - let nonExtractedRect = CGRect(origin: CGPoint(x: 16.0, y: 0.0), size: CGSize(width: layout.contentSize.width - 32.0, height: layout.contentSize.height)) + + let nonExtractedRect: CGRect + let avatarFrame: CGRect + let titleFrame: CGRect + let animationSize: CGSize + let animationFrame: CGRect + let animationScale: CGFloat + + switch item.style { + case .list: + nonExtractedRect = CGRect(origin: CGPoint(x: 16.0, y: 0.0), size: CGSize(width: layout.contentSize.width - 32.0, height: layout.contentSize.height)) + avatarFrame = CGRect(origin: CGPoint(x: params.leftInset + 15.0, y: floorToScreenPixels((layout.contentSize.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize)) + animationSize = CGSize(width: 36.0, height: 36.0) + animationScale = 1.0 + animationFrame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height) + titleFrame = CGRect(origin: CGPoint(x: leftInset, y: verticalInset + verticalOffset), size: titleLayout.size) + case .tile: + nonExtractedRect = CGRect(origin: CGPoint(), size: layout.contentSize) + strongSelf.containerNode.transform = CATransform3DMakeRotation(CGFloat.pi / 2.0, 0.0, 0.0, 1.0) + strongSelf.statusNode.isHidden = true + strongSelf.expandedStatusNode.isHidden = true + avatarFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - avatarSize) / 2.0), y: 13.0), size: CGSize(width: avatarSize, height: avatarSize)) + + let textWidth: CGFloat = 24.0 + titleLayout.size.width + let textOrigin: CGFloat = floor((layout.size.width - textWidth) / 2.0) - 4.0 + animationSize = CGSize(width: 36.0, height: 36.0) + animationScale = 0.66667 + animationFrame = CGRect(x: textOrigin, y: 53.0, width: 24.0, height: 24.0) + titleFrame = CGRect(origin: CGPoint(x: textOrigin + 24.0, y: 61.0), size: titleLayout.size) + } var extractedRect = CGRect(origin: CGPoint(), size: layout.contentSize).insetBy(dx: 16.0 + params.leftInset, dy: 0.0) var extractedHeight = extractedRect.height + expandedStatusLayout.size.height - statusLayout.size.height @@ -768,11 +904,16 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { } else { strongSelf.backgroundImageNode.frame = nonExtractedRect } + if case .tile = item.style, strongSelf.backgroundImageNode.image == nil { + strongSelf.backgroundImageNode.image = generateStretchableFilledCircleImage(diameter: backgroundCornerRadius * 2.0, color: UIColor(rgb: 0x1c1c1e)) + strongSelf.backgroundImageNode.alpha = 1.0 + } strongSelf.extractedBackgroundImageNode.frame = strongSelf.backgroundImageNode.bounds strongSelf.contextSourceNode.contentRect = extractedRect strongSelf.containerNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize) strongSelf.contextSourceNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize) + strongSelf.contentWrapperNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize) strongSelf.offsetContainerNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize) strongSelf.contextSourceNode.contentNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize) strongSelf.containerNode.isGestureEnabled = item.contextAction != nil @@ -843,13 +984,13 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { strongSelf.insertSubnode(strongSelf.bottomStripeNode, at: 1) } - strongSelf.topStripeNode.isHidden = first - strongSelf.bottomStripeNode.isHidden = last + strongSelf.topStripeNode.isHidden = first || item.style == .tile + strongSelf.bottomStripeNode.isHidden = last || item.style == .tile transition.updateFrame(node: strongSelf.topStripeNode, frame: CGRect(origin: CGPoint(x: leftInset, y: -min(insets.top, separatorHeight)), size: CGSize(width: layoutSize.width, height: separatorHeight))) transition.updateFrame(node: strongSelf.bottomStripeNode, frame: CGRect(origin: CGPoint(x: leftInset, y: contentSize.height + -separatorHeight), size: CGSize(width: layoutSize.width - leftInset, height: separatorHeight))) - transition.updateFrame(node: strongSelf.titleNode, frame: CGRect(origin: CGPoint(x: leftInset, y: verticalInset + verticalOffset), size: titleLayout.size)) + transition.updateFrame(node: strongSelf.titleNode, frame: titleFrame) transition.updateFrame(node: strongSelf.statusNode, frame: CGRect(origin: CGPoint(x: leftInset, y: strongSelf.titleNode.frame.maxY + titleSpacing), size: statusLayout.size)) transition.updateFrame(node: strongSelf.expandedStatusNode, frame: CGRect(origin: CGPoint(x: leftInset, y: strongSelf.titleNode.frame.maxY + titleSpacing), size: expandedStatusLayout.size)) @@ -872,7 +1013,6 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { credibilityIconNode.removeFromSupernode() } - let avatarFrame = CGRect(origin: CGPoint(x: params.leftInset + 15.0, y: floorToScreenPixels((layout.contentSize.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize)) transition.updateFrameAsPositionAndBounds(node: strongSelf.avatarNode, frame: avatarFrame) let blobFrame = avatarFrame.insetBy(dx: -14.0, dy: -14.0) @@ -964,6 +1104,10 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { nodeToAnimateIn = animationNode } + var color = color + if color.rgb == 0x979797 && item.style == .tile { + color = UIColor(rgb: 0xffffff) + } animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: false, color: color), animated: true) strongSelf.actionButtonNode.isUserInteractionEnabled = false } else if let animationNode = strongSelf.animationNode { @@ -1040,31 +1184,42 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode { node.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2) } - let videoSize = CGSize(width: avatarSize, height: avatarSize) + let videoSize = tileSize let videoNode = item.getVideo() if let current = strongSelf.videoNode, current !== videoNode { current.removeFromSupernode() } - let actionOffset: CGFloat = 0.0 + strongSelf.videoNode = videoNode - if let videoNode = videoNode { - videoNode.updateLayout(size: videoSize, transition: .immediate) - if videoNode.supernode !== strongSelf.avatarNode { - videoNode.clipsToBounds = true - videoNode.cornerRadius = avatarSize / 2.0 - strongSelf.avatarNode.addSubnode(videoNode) - } - - videoNode.frame = CGRect(origin: CGPoint(), size: videoSize) + + switch item.style { + case .list: + strongSelf.videoContainerNode.frame = strongSelf.avatarNode.frame + strongSelf.videoContainerNode.cornerRadius = avatarSize / 2.0 + case .tile: + strongSelf.videoContainerNode.frame = CGRect(origin: CGPoint(), size: tileSize) + strongSelf.videoContainerNode.cornerRadius = backgroundCornerRadius + } + + if let videoNode = videoNode { + strongSelf.avatarNode.alpha = 0.0 + videoNode.updateLayout(size: videoSize, isLandscape: false, transition: .immediate) + if videoNode.supernode !== strongSelf.avatarNode { + videoNode.clipsToBounds = true + strongSelf.videoContainerNode.addSubnode(videoNode) + } + + videoNode.position = CGPoint(x: strongSelf.videoContainerNode.frame.width / 2.0, y: strongSelf.videoContainerNode.frame.height / 2.0) + videoNode.bounds = CGRect(origin: CGPoint(), size: videoSize) } - let animationSize = CGSize(width: 36.0, height: 36.0) strongSelf.iconNode?.frame = CGRect(origin: CGPoint(), size: animationSize) strongSelf.animationNode?.frame = CGRect(origin: CGPoint(), size: animationSize) strongSelf.raiseHandNode?.frame = CGRect(origin: CGPoint(), size: animationSize).insetBy(dx: -6.0, dy: -6.0).offsetBy(dx: -2.0, dy: 0.0) - strongSelf.actionButtonNode.frame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset + actionOffset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height) + strongSelf.actionButtonNode.transform = CATransform3DMakeScale(animationScale, animationScale, 1.0) + strongSelf.actionButtonNode.frame = animationFrame if let presence = item.presence as? TelegramUserPresence { strongSelf.peerPresenceManager?.reset(presence: presence) diff --git a/submodules/TelegramCore/Sources/ApiUtils/TelegramMediaAction.swift b/submodules/TelegramCore/Sources/ApiUtils/TelegramMediaAction.swift index af8314b29f..d21d94f107 100644 --- a/submodules/TelegramCore/Sources/ApiUtils/TelegramMediaAction.swift +++ b/submodules/TelegramCore/Sources/ApiUtils/TelegramMediaAction.swift @@ -73,11 +73,11 @@ func telegramMediaActionFromApiAction(_ action: Api.MessageAction) -> TelegramMe } case let .messageActionSetMessagesTTL(period): return TelegramMediaAction(action: .messageAutoremoveTimeoutUpdated(period)) - /*case let .messageActionGroupCallScheduled(call, scheduleDate): + case let .messageActionGroupCallScheduled(call, scheduleDate): switch call { case let .inputGroupCall(id, accessHash): return TelegramMediaAction(action: .groupPhoneCall(callId: id, accessHash: accessHash, scheduleDate: scheduleDate, duration: nil)) - }*/ + } } } diff --git a/submodules/TelegramCore/Sources/BotPaymentForm.swift b/submodules/TelegramCore/Sources/BotPaymentForm.swift index 7b346c7cae..772fbe852a 100644 --- a/submodules/TelegramCore/Sources/BotPaymentForm.swift +++ b/submodules/TelegramCore/Sources/BotPaymentForm.swift @@ -121,7 +121,7 @@ public enum BotPaymentFormRequestError { extension BotPaymentInvoice { init(apiInvoice: Api.Invoice) { switch apiInvoice { - case let .invoice(flags, currency, prices, minTipAmount, maxTipAmount, defaultTipAmount): + case let .invoice(flags, currency, prices, maxTipAmount, suggestedTipAmounts): var fields = BotPaymentInvoiceFields() if (flags & (1 << 1)) != 0 { fields.insert(.name) @@ -145,9 +145,9 @@ extension BotPaymentInvoice { fields.insert(.emailAvailableToProvider) } var parsedTip: BotPaymentInvoice.Tip? - if let minTipAmount = minTipAmount, let maxTipAmount = maxTipAmount, let defaultTipAmount = defaultTipAmount { - parsedTip = BotPaymentInvoice.Tip(min: minTipAmount, max: maxTipAmount, default: defaultTipAmount) - } +// if let minTipAmount = minTipAmount, let maxTipAmount = maxTipAmount, let defaultTipAmount = defaultTipAmount { +// parsedTip = BotPaymentInvoice.Tip(min: minTipAmount, max: maxTipAmount, default: defaultTipAmount) +// } self.init(isTest: (flags & (1 << 0)) != 0, requestedFields: fields, currency: currency, prices: prices.map { switch $0 { case let .labeledPrice(label, amount): diff --git a/submodules/TelegramCore/Sources/GroupCalls.swift b/submodules/TelegramCore/Sources/GroupCalls.swift index 07c1717ffc..93374cf163 100644 --- a/submodules/TelegramCore/Sources/GroupCalls.swift +++ b/submodules/TelegramCore/Sources/GroupCalls.swift @@ -43,7 +43,7 @@ public struct GroupCallSummary: Equatable { extension GroupCallInfo { init?(_ call: Api.GroupCall) { switch call { - case let .groupCall(flags, id, accessHash, participantCount, params, title, streamDcId, recordStartDate/*, scheduleDate*/, _): + case let .groupCall(flags, id, accessHash, participantCount, params, title, streamDcId, recordStartDate, scheduleDate, _): var clientParams: String? if let params = params { switch params { @@ -177,8 +177,7 @@ public func createGroupCall(account: Account, peerId: PeerId) -> Signal mapError { error -> CreateGroupCallError in if error.errorDescription == "ANONYMOUS_CALLS_DISABLED" { return .anonymousNotAllowed @@ -466,7 +465,7 @@ public func joinGroupCall(account: Account, peerId: PeerId, joinAs: PeerId?, cal maybeParsedCall = GroupCallInfo(call) switch call { - case let .groupCall(flags, _, _, _, _, title, _, recordStartDate/*, scheduleDate*/, _): + case let .groupCall(flags, _, _, _, _, title, _, recordStartDate, scheduleDate, _): let isMuted = (flags & (1 << 1)) != 0 let canChange = (flags & (1 << 2)) != 0 state.defaultParticipantsAreMuted = GroupCallParticipantsContext.State.DefaultParticipantsAreMuted(isMuted: isMuted, canChange: canChange) diff --git a/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift b/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift index 0180fd7ac5..96684a8838 100644 --- a/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift +++ b/submodules/TelegramCore/Sources/State/AccountStateManagementUtils.swift @@ -2991,7 +2991,7 @@ func replayFinalState(accountManager: AccountManager, postbox: Postbox, accountP }) switch call { - case let .groupCall(flags, _, _, _, _, title, streamDcId, recordStartDate, _): + case let .groupCall(flags, _, _, _, _, title, streamDcId, recordStartDate, _, _): let isMuted = (flags & (1 << 1)) != 0 let canChange = (flags & (1 << 2)) != 0 let defaultParticipantsAreMuted = GroupCallParticipantsContext.State.DefaultParticipantsAreMuted(isMuted: isMuted, canChange: canChange) diff --git a/submodules/TelegramCore/Sources/StoreMessage_Telegram.swift b/submodules/TelegramCore/Sources/StoreMessage_Telegram.swift index 1c6cd9c452..19efa3f4c5 100644 --- a/submodules/TelegramCore/Sources/StoreMessage_Telegram.swift +++ b/submodules/TelegramCore/Sources/StoreMessage_Telegram.swift @@ -196,7 +196,7 @@ func apiMessagePeerIds(_ message: Api.Message) -> [PeerId] { } switch action { - case .messageActionChannelCreate, .messageActionChatDeletePhoto, .messageActionChatEditPhoto, .messageActionChatEditTitle, .messageActionEmpty, .messageActionPinMessage, .messageActionHistoryClear, .messageActionGameScore, .messageActionPaymentSent, .messageActionPaymentSentMe, .messageActionPhoneCall, .messageActionScreenshotTaken, .messageActionCustomAction, .messageActionBotAllowed, .messageActionSecureValuesSent, .messageActionSecureValuesSentMe, .messageActionContactSignUp, .messageActionGroupCall, .messageActionSetMessagesTTL/*, .messageActionGroupCallScheduled*/: + case .messageActionChannelCreate, .messageActionChatDeletePhoto, .messageActionChatEditPhoto, .messageActionChatEditTitle, .messageActionEmpty, .messageActionPinMessage, .messageActionHistoryClear, .messageActionGameScore, .messageActionPaymentSent, .messageActionPaymentSentMe, .messageActionPhoneCall, .messageActionScreenshotTaken, .messageActionCustomAction, .messageActionBotAllowed, .messageActionSecureValuesSent, .messageActionSecureValuesSentMe, .messageActionContactSignUp, .messageActionGroupCall, .messageActionSetMessagesTTL, .messageActionGroupCallScheduled: break case let .messageActionChannelMigrateFrom(_, chatId): result.append(PeerId(namespace: Namespaces.Peer.CloudGroup, id: PeerId.Id._internalFromInt32Value(chatId))) diff --git a/submodules/TelegramUI/Images.xcassets/Call/Speaking.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/Speaking.imageset/Contents.json new file mode 100644 index 0000000000..d18942689f --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Call/Speaking.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "ic_vc_volume.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/Speaking.imageset/ic_vc_volume.pdf b/submodules/TelegramUI/Images.xcassets/Call/Speaking.imageset/ic_vc_volume.pdf new file mode 100644 index 0000000000000000000000000000000000000000..bbebd6cba6fd26eea27788ecf7ab085fe9a96ad0 GIT binary patch literal 4977 zcmai&2UJtp)_^HdAXJqO>J=$sA%)O{AU#qfbmyKx#k) z1qCV6K|rKQRf=@v4>RL?&YS;X5d{mh%&g5U=C42L zZg~q91)zY7r9D_i27svG9PwCJ@~_fUR=;&t?cA;D0K}CIz{8f^tpbIdZ%Dzeii+h#!K(Kw!jO+ z$1XFvI|f;scD=a|`q@NncEIB2J1W`=UcTkT>T_^eHL&3Ht=;RoPDfMaA$K2&*%pBu zgV&oD?1G2dPJS_5hF1{%?5AtJs}rj}!5?Z9BQ$FUuw`b&bQ+QkHqRI4LiP6?Rs;9! zZFF6hXsaP13H^mHBzg32_QuX78fGv>N;7ARPnL9kFgF`(Ufq_>_jI-c4p*tFKsTn4K>UE}4?V5!%wFBvsaT~K`MV@!h=PnoYRhvCZ(z{`kAfu>&^jWTPuG!fa(PG#=kK|p+ znY#@VAK-iM{w7f;MuQ27V{kj$A2I2HkTO9aqF(F3F9q$q}dvyQB}k8iqPiYzwR^cn;Rpoy;#y%SLHu} zj}ep*X;1xZl^CIQ)!2LKLo%~>y5+u3A!Lhf9DF3gZsuAC<;`z3*7%0J1Hn#4t%hq zo0cEru0MraT+vPJ7HADB?#{0<=u=&~p{R6ITy_(w+r}v2UON{%w7KB7KhI0IQh|jk zTR;3X#RZ9HMi1ClEVD7*i{lK6v-y}y$3EaY{3gYW@vKB}INutHk(ruSY1>Rk?s#Ur z*?UP*8vezOtppR9?z(RMn?LXP8=vrbr)W!%?5`84oiDM{+a=;CT=k_d@^?H2%P7N= zg$zAw*AL~;Qdn=!w9#oDe4BgF>Ph2Oxg=5#D7;?q6Ui$lk?^oRWt@j0zPW(5sJQ3; z2E{3i=B%BhLPu+EWq^c%o2rCN{z@e){o}}~GbvQpAyYk#n|dDHNGP(~$KOtmK6@zv zh0yfaZ;?_vgkyBm01c$E8kej)3Waz&5}#|NPDIJGnANWTD7ybVdUaO#!AVi-8&RB^ z8g=uW{nPnzGpU>xS7<}q!=~}0o=!Hmnv)q)Em5#}v9bO+kE&BrA5MDg`Hai-Cm`%+ zh~PJZkf_wk(s2pwGZA|KG)I4Ni24jsO*1I+&d)CW)X&kC@2P%j{}d(!AP*k~T+5LA zvF5q6Ju2t&e&y&`hiE zl>6>7S4^{HfU(Zje-%6O=%_@(ek+xF-nYrMQ43@H$G0oi84tk8Y8?QggLQLpceTR0 z0f;{ZiZ0H0vfd3iDz!RCHT17|NBUo-SIgDKN*9X2bP!Xx$tR;T4 z)+EmrB7d|QI3GO&qJ;IpSz&ch3jf*;;Nmd&|G#1UzDJ}bdg|#8vxjw&BuL#S`2c|p z?z?orN9pTqAnkC~KrO1!U_lS1V=AmCgbl3qUQwdL_^gy!>F?jO9Hd)g(R$mYt)x6N z7Ue+qtypY*JmsL(XkvJ-YH-;0o8Mq9h1UK(&}+OQWgsHckacCT$@Ju@_LeOwP8Lcg zL&{f^HZ~yCwh~p}J_(teTyXtu`R0m9pmp8o060h+O(Av6per<$YtQ z)K+r)%ASFA9>=wy^Qvs3Sml|#{5*A~jt&MUVwd&77R%w(D=FNEcg>d{XT0KA4f(3C z{q+%DaJXGvh)j@SjEFTjIG}$rfKyzEhRZ+Afz{64g2FFEN~>Cxrp*OOp?IE}-}%~I zD!)Y&78tm#q28`Iw$v>ee)s3x?pb=%2Jnbc$=Of5G4l=ZBmC)jtAjr?|o-s%aVT@!=A&OtT4dBV@&IFP7{(!rmx04&(!8{& z*X^H`yx1PoaJ(9wRI6r}WvY73G0F3t;&O<^>v&qZHd^W)6+>E{RV9TOLw?E==Nw}B za;OTGQ(~BMcxnjOWA98;9}mmKByqC8<(djJikh@EzzkTbLHm=ek_PAej=4{rN!V@^evMz;&bXD%#t)OQ|l|PFTA(?t2s^K&SE8?D(1nc zy1>O|#!10dulkGQtl#c2H@SVc^gY)A4&V(qGZB6EZKF^OoiWJa9`m_GfZvf{kA<6d zj`kq<#A}3hPyrgx7s}$@VrnV;{Na#%y(+JuW<0BEC_je|zi@Izke;eNpP3+1kaRBo z-BoLpE2oj1Il6>D1F#Taj=QGy=z<_CT8&p(VOUd2twzpU&Ra28hq1u9&+utdZJIHf zq~xdO2SLS)64!}S?~3D}#X2M`5`6^Fj|PdJL<^A&A&YYXYCrS#tEse`*Q4%?Oz47yJHqL$}ZO~x_uqHDXW_V|~XUgB}9DfbHwJoZ{l^YcwRTmY#j+m|K zn(7)AFU6T3)(a=>Ty(zZo4}V~lR%je{ZvEWrJuDw?WxmKbRX1S&>mwyY`@np@{G`x z-{qO!m1coBoZb&xD60*>TfJ-lqj>?j5YL^$-N4Pot@6?_YHTBex>uQ=1AhBQhH?>6q+EtMgA|pY>eK zHqMrQD{1xePQH)TU=hgtb1kg@>wHcqhp3n6$@p%%_A)-4rW$}W3!dTg;yp#8khZ~Q_RZ7*y}Xwx;B|M`St|&U^K`uuP_qR))3y~LS3+5|B1z~ zlhvK8QByQRK0?oXRr4p(j5>uo89TG-892Uk^j+a}?YT2q&FdN~Vl5J7;dMK+Pk%IF z%6M_NGy>(Ol= z+fb1Nc6D}7m3OE&%KfMt%09|OWp$$AWux+Gl_=vVAP>oYmWR!U-@~*JVu*woS& z@YITksA*#P(uiy2OksOhNsL94(VdHDc+Q@lU^xsw^}29Iqh!OP#&ZTV7+)8*7dQ9z z(i~ycY#X~D3nB);2<}lLwwQ%msg|g+sZLR$dt z7vr`lY{q--juW#R(<#%=?Wf z#rC5_KJ%U*mhE36zMus@#Vt-4`Q6+Z>(~rlCgpDC?s3t`TA;X7b*mm+xj}% zy`gn}dHTw=)I;BmsLkY*^7pptU9p{Pk5i>G zJpI=c-iEA3hI52ny@{8tt(g9pR#H|n-X26OHD9f9EgU87?dubt*_KF|N&BVwk(NUb z2m@#8(i1I~{BiS@-<(MEx62O_r-~mo9P^voHQR%XZ`S(FJUmR}p^iMhyCTxyb7=kv zIqx@q&_h$mY<{iduH=E;fz{GfmJk?yG5zVa;)DL}L7PI-#?q6)TV3T3Y@^q+_2Wb5v%F|X1_rCh`j#}x<_E{20(6Lt#KFy z7camBN;ZKX-G7j^I~mtu07M;U?M5ch1Larxxio|a1l|!^n!vr&e57YF(oOOsW71J zj&a4`1;{`7ugl76)HYx(bKFVNg*~D2)7pOBzF=Lgbrlcf-Z{ z$OQQBCjag|yj-z1U?>2Cf}#Jq0C6!fQ8B;<_)7*8MUWT&=mMO7$q=G2a@+EE859a9 zXW;KL7y|xJU$_K$W&XoX3{GDD|CEW76a04>LR9LX@epF4u^pS1eDP#;Qs@~%*qD< literal 0 HcmV?d00001 diff --git a/submodules/TelegramUI/Images.xcassets/Call/Video.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/Video.imageset/Contents.json new file mode 100644 index 0000000000..c01e0f97c1 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Call/Video.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "ic_vc_camera.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/Video.imageset/ic_vc_camera.pdf b/submodules/TelegramUI/Images.xcassets/Call/Video.imageset/ic_vc_camera.pdf new file mode 100644 index 0000000000000000000000000000000000000000..0fdb8285cc73793a331f71b00b6bf46cb574f84a GIT binary patch literal 4792 zcmai&2{@G9`^RlrhEQ3u)sqY@Vm4d$br6Ou8T%MxXS|Gkm)%U3$dV;Xge*lRLS&~T zyF#+>St2C=@h-o&_y7L>*Z*ASd7g9K=RWs2*SVkTdwtFU>#C?-gNndG;Fj6V+2x|G z2k%?jKyUy8;H@1&GBSXunu9X|=T81QV+c4E92Reb14OlOu66`_01A~51LWjD9t3wB z#s%aiD73aV%0bGG3Y^lK{%kN7k)N6zT_*CFPoTe4+C((yC( z4gG2v_u`|7+#8C+OBrSHO2YZB=kH|_YO!+5nf?Bi1}l3rhAML}hg*dueFtr?LD%JE z0(_qpcjB~~K@U)~Spp4E+7YFU=%);OesUq-&-uDh-4M$6aWO9{Tb9vyZNW2KwN{+K z1v<&OUJIYONPDi(Ozoyf=L6*|P5xX@2B(Bd zozL;60r}!w`~!h+sO=l1;n_)jqjr&ld&lbm9(IV8+5S%aD|KVu1SdiS`4* zvkxD9b6vMFt@`H54BVRkq}8{%SRtKJN4K7)Sgr$xN5MuG?Hc&H0BQf9NBv*W4LGQ} zYDoxJSW6=k=CG*!PYiK=WXN;mTG z;$!g}jogJ(EhL>YbS}-kC39hmBMgFg-QMpg8E29oC=;^zH?2^&8sukNUGdBfe3J2vjXkmRWTKCW3 z0jEl-v<#sM54`t+lvKPAme8k)8}!=$>B(#@#kTori#bv&KXanSE8Z~d9Bj=;%jlx! z{-tT1W!5jKE-LA!&~L;89SU_vM7wdpWf^nLHjqa+z1KE#bLR7Yq0uwYv7pt$l_wf` zDtA*CY=f$bG`x_fr?d-OPRFBL0^~c`3Rskaszl$@hw=ON9K0MEev%k4+ZOsvqP<~| zBwGp(AmaL(Nv&|CtyLwYzpIX~B3~+y?eVo4lK)yOA4lw1H)_;5cx%tk$xR%4AH0j< zvEa#D8L(<)wG&(!(d^35NsXE)C)uit3enFz6A$(jm}0frO}J8OaS;vlOsWK#WCh3< z^_+-@h`17^1GCFV$||%R?N;B?`TMwBhvE-wD^K~_FIX%LrypvMeWh=ItjJj2#5kUa z3v-Ef=gY~`kscX~PCEUnB>P)WNiYO|bm=I$O`dcVAABUEkTtu$d60_Q{Fa_$x)*S7 z@;y9w=+uLi0~Q&Q&dc7%8Ilqv3ohe6ho*a_0f#JC2#2&FoU6^B3Po-m>l*YIeKYWN zBMX`?#_spGJI<8=z{uS<0nwW{54@*47UuyV{xB%xT?yoV58zlIZypQZpYe|S|I$lc zcRW@fM*vL8K~>ZMb3jxX?~HfXcf(+Dz_Fkzdq4rPKLh*{L;RN*I^-Ed6_0h-l`Oh| zs0z-@0gJndRQiwd!o;Dl|E|ry%5l~`Zv*{Nj;P*xiTd}a`2p}Yw;u!WL0X-iMlVJq zM3*WuT+mD9gc{qa%SJW^uPKpH{8&{s#@k`m!wj3Ox@}E*Dyp;NL??#zGO?|RjKg;H z-l05Javq+uCxI+37RgExS}_St*%~DPK?7+R`Aut56O6 zsFzJof27`4wX-G?VpI1~xKUToW9TS2kzYbtfR+NJIw&p^B*-~Sm8@UY$^|>3>Do@Y z42y)qIC2^C9`jGPR5m1B<9db2Cf4arWPqpgi8Q%pY2eRx`2lpe zdIOCNirL*u{r%rFwJ0EL>6Sh7g1OEyFjY3LwU|vR3i{+0saNjIaeGqs*%Us7b~CXw zF&fr|r-suUW*0ha_cyBa_pG5;liz4hMf+&=ZdWtifpCUY;P0tYg3&JN_KB-yysN9| zp*j1##?i@hLHdWX9exLf8-gpn*^0z$^rZ93W6^pJiX8!y`+QXmDTWl)F0=}>Q;$*W z0LA7Bit35Xt#gsi7D+8%n1mSPR#u!nD07@dt}aL%h78Y884)4SIWjHC-W-qVsIFhWYVMuV;cFH50(ky^l2-j6D9*_0Iu>S%anUzS+m z5fAhGIlq6N(Ja<=N=uPne{P2<-o_oAC(M;U8SdOfbvl8L@BoAe6@gF*Tb<+$fxA&p z02JXN>a5iFs72JN9297tD$0#efE1x4lwP+T-6#e^{od2)go(S+n$g^PebSAQDFmqi zifp<8p_~YHpbz`X%ATiKlE6{GlCCtw%40(BtE{Li$wu9(9FxGBq_Fp1@X^^Tw@Y%B zb!E3N61Z#W&2R5NpnDG>Xb&j^!%wAA^cnVR`Fraq~m=3aZ0m> zLyq}HGv^e~R|??>%jUcE3LW&+eQL(^JR2%X3C04Hr!F`p@)uAQt7arH7x26!sVDw6 zLroi%i%C1n(Z)3$g(gl}8)1g5H6ep(*fb+8&N*>gR(R-M*hA~VCxymBkLA4>#82HP zU28G2inlVYOnpKV8F^vKTm%ceySOv7y1ZA;E!bW zZ85XH{N&Dv;wueaL7ls78j%8=Hw7-ISBDyCIP#kd$_mzBxZ8Q%2I+nltzdyF7svrD z!7NGgy2+OW*-)Cis!F3eQkpLnd=-3^AKpCq$aTQDB&{~f1XZsRs2M1VybDj=N}cX3 zyIY#*l(L-a4@M;qr+TMaisT4cUI^CwS@@%dO0P+wNk25d(kNGLs=8S=LPIqpFU30A zI>S1~`t}#GRz2{mq|l@nr%}ZUil^*{9eP*bg4eSHi!2H`_W6dd&5U2NSx zzfix;fCJ-9XD`k2&T`LIb={y>~M{uJ93%9QvLEkpbu+hA6SO9^TK;wb2daU69#7!)Zb^%nJdXZL1VB93N$L@ia; z#`x9jJAQk!B)fE%JA=D{n~U3!I~CC;#+Bik(U~!uF^_1kwl`sXMq@H-GXKm1l9ImG zV_*xeij&GMD_boh^d6)3$qvc-b$z&qQ;_@6?*Hr za(X#sIR%Z#M$_P~*YANbt2o8`O^30jbVB|@Px>{A zCbQ7*F26hZE}xN!^DF1TwX^PhzfINfx+jX*h!8D(+;RsDKc-BZEbsSd50mD-T3lMj zM`TA2YYwim++ayW#Lfp`@;t)8cCYR3=5zF8Z{atIypud!|4;AG;f+Ho9e>Oi+->aFdN{Y zQu?9n6(PeQ#$?XE?{QW{81Yze#ka=LC&aUurdLz|X7+p1^Ml>M;g|ZyydD#60yxfa-KaAgqnqK(=nqCuuo87Bg8FPO& zTio4So?zL8{_V;+p7VT@tVc0tnu}+(%C{|Fde71f->r)}NSbfEI#1d#|Bm~SNRt}= zG`vqGwZ%LJt5L4Ot}#u8QhfH=^Qq-#=uB~PLUR6PcyV9xTZi!K@74WEU`%Y~Rdcwk zP`&6IL_S2hw##8~&G1Z~LSLfRB@eW1UD}Oz% zGZ{0k-F%%}od-c;wqo9%b`4%XBY!lBtvNB5MGTdjeSE3oZQfKn^{rt!-UV<`AN|^` z)=k3kOuEKS4Cp=3t8B2cuUemLQ+lbSH&uV_ohh?(t24Xv>bPxHnd8S)ev7_u*4mTksJn;VPMeqG-e6e=RINcg;G5Wszvw7KU&t~4{i@IAY z4MrH#->g_1;%(xqVUM2HYgRZS2aA>pmO{DXGv?}-{9mkQjkSLqZk%)p?x!SCx6;X- zW8-}l+_9_cRHo?CiupA4_)YI}-coI&U9e@a%BuQCVlOfR`LMk?-g8iTU|8ajM6!4@ zDXHzr^l$C>!_VK8{m+yx>caD1ey5SVAhq$O{rma#Y$lme-`K#CMWMr-o#*dOVoh>~ zJ`Ke@FA#U|*YclS4C&9}d&hT8{{B(GHgP9CqpHj9MsMOf_RRZIIo?5=N^KDvaWR}x z*KZNzYO7~HWR+KzPjrW-R#@!+XS$ES)(j^l(JWWdUK+;xc91B zwb~N04B1QEBaZt$*>3&XO-${dj%d96^Z9N{UujN|_d&g#b!W?0r@rNMdCiGO4O06< zJ1+A!3p4%R4(o+Qu@j{Y@;jl2xd+(o_Qb^%)OS=$4oTiSfZ@lckD!a~+~jWf*1^A7 z`50A?*_O<+8p_H_7!RBcaLlSV0rS7`_n4Re!r#Af+5-^P#@RSvl<+=)DTM3-JFdU; zuqPR^p@8TO2OAGECm+)|IodBQEb*^^$`}I18E^LosC)cL=zjya?5x zFc51f7A_`%gW?c48yF6YfQTWiVHmhA1SSO&hslBdf0sZ02@e7}LBH=F0wE>|0)tgi HYM}oCd^CHN literal 0 HcmV?d00001