Video Chats Improvements

This commit is contained in:
Ilya Laktyushin 2021-04-03 01:17:45 +03:00
parent 7d8fbdd3bc
commit b1d1d52fd3
18 changed files with 809 additions and 250 deletions

View File

@ -324,6 +324,7 @@ public protocol PresentationGroupCall: class {
func lowerHand()
func requestVideo()
func disableVideo()
func switchVideoCamera()
func updateDefaultParticipantsAreMuted(isMuted: Bool)
func setVolume(peerId: PeerId, volume: Int32, sync: Bool)
func setFullSizeVideo(peerId: PeerId?)

View File

@ -595,6 +595,13 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate
self.animateOut(shared: true, completion: {
})
self.completed?(peerIds)
Queue.mainQueue().after(0.1) {
if self.hapticFeedback == nil {
self.hapticFeedback = HapticFeedback()
}
self.hapticFeedback?.success()
}
}
let fromForeignApp = self.fromForeignApp
self.shareDisposable.set((signal

View File

@ -7,7 +7,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-1255641564] = { return parseString($0) }
dict[-1240849242] = { return Api.messages.StickerSet.parse_stickerSet($0) }
dict[2004925620] = { return Api.GroupCall.parse_groupCallDiscarded($0) }
dict[-1061026514] = { return Api.GroupCall.parse_groupCall($0) }
dict[-916691372] = { return Api.GroupCall.parse_groupCall($0) }
dict[-457104426] = { return Api.InputGeoPoint.parse_inputGeoPointEmpty($0) }
dict[1210199983] = { return Api.InputGeoPoint.parse_inputGeoPoint($0) }
dict[-784000893] = { return Api.payments.ValidatedRequestedInfo.parse_validatedRequestedInfo($0) }
@ -587,7 +587,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[978610270] = { return Api.messages.Messages.parse_messagesSlice($0) }
dict[1682413576] = { return Api.messages.Messages.parse_channelMessages($0) }
dict[1951620897] = { return Api.messages.Messages.parse_messagesNotModified($0) }
dict[615970509] = { return Api.Invoice.parse_invoice($0) }
dict[215516896] = { return Api.Invoice.parse_invoice($0) }
dict[1933519201] = { return Api.PeerSettings.parse_peerSettings($0) }
dict[1577067778] = { return Api.auth.SentCode.parse_sentCode($0) }
dict[480546647] = { return Api.InputChatPhoto.parse_inputChatPhotoEmpty($0) }
@ -825,6 +825,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[2047704898] = { return Api.MessageAction.parse_messageActionGroupCall($0) }
dict[1991897370] = { return Api.MessageAction.parse_messageActionInviteToGroupCall($0) }
dict[-1441072131] = { return Api.MessageAction.parse_messageActionSetMessagesTTL($0) }
dict[-1281329567] = { return Api.MessageAction.parse_messageActionGroupCallScheduled($0) }
dict[1399245077] = { return Api.PhoneCall.parse_phoneCallEmpty($0) }
dict[462375633] = { return Api.PhoneCall.parse_phoneCallWaiting($0) }
dict[-2014659757] = { return Api.PhoneCall.parse_phoneCallRequested($0) }

View File

@ -1,7 +1,7 @@
public extension Api {
public enum GroupCall: TypeConstructorDescription {
case groupCallDiscarded(id: Int64, accessHash: Int64, duration: Int32)
case groupCall(flags: Int32, id: Int64, accessHash: Int64, participantsCount: Int32, params: Api.DataJSON?, title: String?, streamDcId: Int32?, recordStartDate: Int32?, version: Int32)
case groupCall(flags: Int32, id: Int64, accessHash: Int64, participantsCount: Int32, params: Api.DataJSON?, title: String?, streamDcId: Int32?, recordStartDate: Int32?, scheduleDate: Int32?, version: Int32)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
@ -13,9 +13,9 @@ public extension Api {
serializeInt64(accessHash, buffer: buffer, boxed: false)
serializeInt32(duration, buffer: buffer, boxed: false)
break
case .groupCall(let flags, let id, let accessHash, let participantsCount, let params, let title, let streamDcId, let recordStartDate, let version):
case .groupCall(let flags, let id, let accessHash, let participantsCount, let params, let title, let streamDcId, let recordStartDate, let scheduleDate, let version):
if boxed {
buffer.appendInt32(-1061026514)
buffer.appendInt32(-916691372)
}
serializeInt32(flags, buffer: buffer, boxed: false)
serializeInt64(id, buffer: buffer, boxed: false)
@ -25,6 +25,7 @@ public extension Api {
if Int(flags) & Int(1 << 3) != 0 {serializeString(title!, buffer: buffer, boxed: false)}
if Int(flags) & Int(1 << 4) != 0 {serializeInt32(streamDcId!, buffer: buffer, boxed: false)}
if Int(flags) & Int(1 << 5) != 0 {serializeInt32(recordStartDate!, buffer: buffer, boxed: false)}
if Int(flags) & Int(1 << 7) != 0 {serializeInt32(scheduleDate!, buffer: buffer, boxed: false)}
serializeInt32(version, buffer: buffer, boxed: false)
break
}
@ -34,8 +35,8 @@ public extension Api {
switch self {
case .groupCallDiscarded(let id, let accessHash, let duration):
return ("groupCallDiscarded", [("id", id), ("accessHash", accessHash), ("duration", duration)])
case .groupCall(let flags, let id, let accessHash, let participantsCount, let params, let title, let streamDcId, let recordStartDate, let version):
return ("groupCall", [("flags", flags), ("id", id), ("accessHash", accessHash), ("participantsCount", participantsCount), ("params", params), ("title", title), ("streamDcId", streamDcId), ("recordStartDate", recordStartDate), ("version", version)])
case .groupCall(let flags, let id, let accessHash, let participantsCount, let params, let title, let streamDcId, let recordStartDate, let scheduleDate, let version):
return ("groupCall", [("flags", flags), ("id", id), ("accessHash", accessHash), ("participantsCount", participantsCount), ("params", params), ("title", title), ("streamDcId", streamDcId), ("recordStartDate", recordStartDate), ("scheduleDate", scheduleDate), ("version", version)])
}
}
@ -76,7 +77,9 @@ public extension Api {
var _8: Int32?
if Int(_1!) & Int(1 << 5) != 0 {_8 = reader.readInt32() }
var _9: Int32?
_9 = reader.readInt32()
if Int(_1!) & Int(1 << 7) != 0 {_9 = reader.readInt32() }
var _10: Int32?
_10 = reader.readInt32()
let _c1 = _1 != nil
let _c2 = _2 != nil
let _c3 = _3 != nil
@ -85,9 +88,10 @@ public extension Api {
let _c6 = (Int(_1!) & Int(1 << 3) == 0) || _6 != nil
let _c7 = (Int(_1!) & Int(1 << 4) == 0) || _7 != nil
let _c8 = (Int(_1!) & Int(1 << 5) == 0) || _8 != nil
let _c9 = _9 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 && _c8 && _c9 {
return Api.GroupCall.groupCall(flags: _1!, id: _2!, accessHash: _3!, participantsCount: _4!, params: _5, title: _6, streamDcId: _7, recordStartDate: _8, version: _9!)
let _c9 = (Int(_1!) & Int(1 << 7) == 0) || _9 != nil
let _c10 = _10 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 && _c7 && _c8 && _c9 && _c10 {
return Api.GroupCall.groupCall(flags: _1!, id: _2!, accessHash: _3!, participantsCount: _4!, params: _5, title: _6, streamDcId: _7, recordStartDate: _8, scheduleDate: _9, version: _10!)
}
else {
return nil
@ -15026,13 +15030,13 @@ public extension Api {
}
public enum Invoice: TypeConstructorDescription {
case invoice(flags: Int32, currency: String, prices: [Api.LabeledPrice], minTipAmount: Int64?, maxTipAmount: Int64?, defaultTipAmount: Int64?)
case invoice(flags: Int32, currency: String, prices: [Api.LabeledPrice], maxTipAmount: Int64?, suggestedTipAmounts: [Int64]?)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
case .invoice(let flags, let currency, let prices, let minTipAmount, let maxTipAmount, let defaultTipAmount):
case .invoice(let flags, let currency, let prices, let maxTipAmount, let suggestedTipAmounts):
if boxed {
buffer.appendInt32(615970509)
buffer.appendInt32(215516896)
}
serializeInt32(flags, buffer: buffer, boxed: false)
serializeString(currency, buffer: buffer, boxed: false)
@ -15041,17 +15045,20 @@ public extension Api {
for item in prices {
item.serialize(buffer, true)
}
if Int(flags) & Int(1 << 8) != 0 {serializeInt64(minTipAmount!, buffer: buffer, boxed: false)}
if Int(flags) & Int(1 << 8) != 0 {serializeInt64(maxTipAmount!, buffer: buffer, boxed: false)}
if Int(flags) & Int(1 << 8) != 0 {serializeInt64(defaultTipAmount!, buffer: buffer, boxed: false)}
if Int(flags) & Int(1 << 8) != 0 {buffer.appendInt32(481674261)
buffer.appendInt32(Int32(suggestedTipAmounts!.count))
for item in suggestedTipAmounts! {
serializeInt64(item, buffer: buffer, boxed: false)
}}
break
}
}
public func descriptionFields() -> (String, [(String, Any)]) {
switch self {
case .invoice(let flags, let currency, let prices, let minTipAmount, let maxTipAmount, let defaultTipAmount):
return ("invoice", [("flags", flags), ("currency", currency), ("prices", prices), ("minTipAmount", minTipAmount), ("maxTipAmount", maxTipAmount), ("defaultTipAmount", defaultTipAmount)])
case .invoice(let flags, let currency, let prices, let maxTipAmount, let suggestedTipAmounts):
return ("invoice", [("flags", flags), ("currency", currency), ("prices", prices), ("maxTipAmount", maxTipAmount), ("suggestedTipAmounts", suggestedTipAmounts)])
}
}
@ -15066,18 +15073,17 @@ public extension Api {
}
var _4: Int64?
if Int(_1!) & Int(1 << 8) != 0 {_4 = reader.readInt64() }
var _5: Int64?
if Int(_1!) & Int(1 << 8) != 0 {_5 = reader.readInt64() }
var _6: Int64?
if Int(_1!) & Int(1 << 8) != 0 {_6 = reader.readInt64() }
var _5: [Int64]?
if Int(_1!) & Int(1 << 8) != 0 {if let _ = reader.readInt32() {
_5 = Api.parseVector(reader, elementSignature: 570911930, elementType: Int64.self)
} }
let _c1 = _1 != nil
let _c2 = _2 != nil
let _c3 = _3 != nil
let _c4 = (Int(_1!) & Int(1 << 8) == 0) || _4 != nil
let _c5 = (Int(_1!) & Int(1 << 8) == 0) || _5 != nil
let _c6 = (Int(_1!) & Int(1 << 8) == 0) || _6 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 {
return Api.Invoice.invoice(flags: _1!, currency: _2!, prices: _3!, minTipAmount: _4, maxTipAmount: _5, defaultTipAmount: _6)
if _c1 && _c2 && _c3 && _c4 && _c5 {
return Api.Invoice.invoice(flags: _1!, currency: _2!, prices: _3!, maxTipAmount: _4, suggestedTipAmounts: _5)
}
else {
return nil
@ -20433,6 +20439,7 @@ public extension Api {
case messageActionGroupCall(flags: Int32, call: Api.InputGroupCall, duration: Int32?)
case messageActionInviteToGroupCall(call: Api.InputGroupCall, users: [Int32])
case messageActionSetMessagesTTL(period: Int32)
case messageActionGroupCallScheduled(call: Api.InputGroupCall, scheduleDate: Int32)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
@ -20637,6 +20644,13 @@ public extension Api {
}
serializeInt32(period, buffer: buffer, boxed: false)
break
case .messageActionGroupCallScheduled(let call, let scheduleDate):
if boxed {
buffer.appendInt32(-1281329567)
}
call.serialize(buffer, true)
serializeInt32(scheduleDate, buffer: buffer, boxed: false)
break
}
}
@ -20696,6 +20710,8 @@ public extension Api {
return ("messageActionInviteToGroupCall", [("call", call), ("users", users)])
case .messageActionSetMessagesTTL(let period):
return ("messageActionSetMessagesTTL", [("period", period)])
case .messageActionGroupCallScheduled(let call, let scheduleDate):
return ("messageActionGroupCallScheduled", [("call", call), ("scheduleDate", scheduleDate)])
}
}
@ -21033,6 +21049,22 @@ public extension Api {
return nil
}
}
public static func parse_messageActionGroupCallScheduled(_ reader: BufferReader) -> MessageAction? {
var _1: Api.InputGroupCall?
if let signature = reader.readInt32() {
_1 = Api.parse(reader, signature: signature) as? Api.InputGroupCall
}
var _2: Int32?
_2 = reader.readInt32()
let _c1 = _1 != nil
let _c2 = _2 != nil
if _c1 && _c2 {
return Api.MessageAction.messageActionGroupCallScheduled(call: _1!, scheduleDate: _2!)
}
else {
return nil
}
}
}
public enum PhoneCall: TypeConstructorDescription {

View File

@ -7697,12 +7697,15 @@ public extension Api {
})
}
public static func createGroupCall(peer: Api.InputPeer, randomId: Int32) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
public static func createGroupCall(flags: Int32, peer: Api.InputPeer, randomId: Int32, title: String?, scheduleDate: Int32?) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
let buffer = Buffer()
buffer.appendInt32(-1120031776)
buffer.appendInt32(1221445336)
serializeInt32(flags, buffer: buffer, boxed: false)
peer.serialize(buffer, true)
serializeInt32(randomId, buffer: buffer, boxed: false)
return (FunctionDescription(name: "phone.createGroupCall", parameters: [("peer", peer), ("randomId", randomId)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in
if Int(flags) & Int(1 << 0) != 0 {serializeString(title!, buffer: buffer, boxed: false)}
if Int(flags) & Int(1 << 1) != 0 {serializeInt32(scheduleDate!, buffer: buffer, boxed: false)}
return (FunctionDescription(name: "phone.createGroupCall", parameters: [("flags", flags), ("peer", peer), ("randomId", randomId), ("title", title), ("scheduleDate", scheduleDate)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in
let reader = BufferReader(buffer)
var result: Api.Updates?
if let signature = reader.readInt32() {
@ -7926,6 +7929,35 @@ public extension Api {
return result
})
}
public static func toggleGroupCallStartSubscription(call: Api.InputGroupCall, subscribed: Api.Bool) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
let buffer = Buffer()
buffer.appendInt32(563885286)
call.serialize(buffer, true)
subscribed.serialize(buffer, true)
return (FunctionDescription(name: "phone.toggleGroupCallStartSubscription", parameters: [("call", call), ("subscribed", subscribed)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in
let reader = BufferReader(buffer)
var result: Api.Updates?
if let signature = reader.readInt32() {
result = Api.parse(reader, signature: signature) as? Api.Updates
}
return result
})
}
public static func startScheduledGroupCall(call: Api.InputGroupCall) -> (FunctionDescription, Buffer, DeserializeFunctionResponse<Api.Updates>) {
let buffer = Buffer()
buffer.appendInt32(1451287362)
call.serialize(buffer, true)
return (FunctionDescription(name: "phone.startScheduledGroupCall", parameters: [("call", call)]), buffer, DeserializeFunctionResponse { (buffer: Buffer) -> Api.Updates? in
let reader = BufferReader(buffer)
var result: Api.Updates?
if let signature = reader.readInt32() {
result = Api.parse(reader, signature: signature) as? Api.Updates
}
return result
})
}
}
}
}

View File

@ -538,7 +538,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private var toneRenderer: PresentationCallToneRenderer?
private var videoCapturer: OngoingCallVideoCapturer?
private var useFrontCamera: Bool = true
private let incomingVideoSourcePromise = Promise<[PeerId: UInt32]>([:])
public var incomingVideoSources: Signal<[PeerId: UInt32], NoError> {
return self.incomingVideoSourcePromise.get()
@ -2008,6 +2008,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
}
public func switchVideoCamera() {
self.useFrontCamera = !self.useFrontCamera
self.videoCapturer?.switchVideoInput(isFront: self.useFrontCamera)
}
public func setVolume(peerId: PeerId, volume: Int32, sync: Bool) {
for (ssrc, id) in self.ssrcMapping {
if id == peerId {

View File

@ -57,9 +57,9 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
private var currentParams: (size: CGSize, buttonSize: CGSize, state: VoiceChatActionButton.State, dark: Bool, small: Bool, title: String, subtitle: String, snap: Bool)?
private var activePromise = ValuePromise<Bool>(false)
private var outerColorPromise = ValuePromise<UIColor?>(nil)
var outerColor: Signal<UIColor?, NoError> {
return outerColorPromise.get()
private var outerColorPromise = Promise<(UIColor?, UIColor?)>((nil, nil))
var outerColor: Signal<(UIColor?, UIColor?), NoError> {
return self.outerColorPromise.get()
}
var connectingColor: UIColor = UIColor(rgb: 0xb6b6bb) {
@ -167,8 +167,8 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
self?.activePromise.set(active)
}
self.backgroundNode.updatedOuterColor = { [weak self] color in
self?.outerColorPromise.set(color)
self.backgroundNode.updatedColors = { [weak self] outerColor, activeColor in
self?.outerColorPromise.set(.single((outerColor, activeColor)))
}
}
@ -245,16 +245,17 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
transition.updateAlpha(layer: self.backgroundNode.maskProgressLayer, alpha: 0.0)
} else {
let transition: ContainedViewLayoutTransition = animated ? .animated(duration: 0.2, curve: .easeInOut) : .immediate
if small {
transition.updateTransformScale(node: self.backgroundNode, scale: self.pressing ? smallScale * 0.9 : smallScale, delay: 0.05)
transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? smallIconScale * 0.9 : smallIconScale, delay: 0.05)
transition.updateAlpha(node: self.titleLabel, alpha: 0.0)
transition.updateAlpha(node: self.subtitleLabel, alpha: 0.0)
} else {
transition.updateTransformScale(node: self.backgroundNode, scale: 1.0, delay: 0.05)
transition.updateTransformScale(node: self.iconNode, scale: self.pressing ? 0.9 : 1.0, delay: 0.05)
transition.updateAlpha(node: self.titleLabel, alpha: 1.0, delay: 0.05)
transition.updateAlpha(node: self.subtitleLabel, alpha: 1.0, delay: 0.05)
}
transition.updateAlpha(node: self.titleLabel, alpha: 1.0, delay: 0.05)
transition.updateAlpha(node: self.subtitleLabel, alpha: 1.0, delay: 0.05)
transition.updateAlpha(layer: self.backgroundNode.maskProgressLayer, alpha: 1.0)
}
@ -265,7 +266,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
private var previousIcon: VoiceChatActionButtonIconAnimationState?
private func applyIconParams() {
guard let (_, _, state, _, _, _, _, snap) = self.currentParams else {
guard let (_, _, state, _, _, _, _, _) = self.currentParams else {
return
}
@ -290,7 +291,6 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
self.previousIcon = icon
self.iconNode.enqueueState(icon)
// self.iconNode.update(state: VoiceChatMicrophoneNode.State(muted: iconMuted, filled: true, color: iconColor), animated: true)
}
func update(snap: Bool, animated: Bool) {
@ -298,7 +298,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
self.currentParams = (previous.size, previous.buttonSize, previous.state, previous.dark, previous.small, previous.title, previous.subtitle, snap)
self.backgroundNode.isSnap = snap
self.backgroundNode.glowHidden = snap
self.backgroundNode.glowHidden = snap || previous.small
self.backgroundNode.updateColors()
self.applyParams(animated: animated)
self.applyIconParams()
@ -328,6 +328,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
}
self.applyIconParams()
self.backgroundNode.glowHidden = (self.currentParams?.snap ?? false) || small
self.backgroundNode.isDark = dark
self.backgroundNode.update(state: backgroundState, animated: true)
@ -468,7 +469,7 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
}
var updatedActive: ((Bool) -> Void)?
var updatedOuterColor: ((UIColor?) -> Void)?
var updatedColors: ((UIColor?, UIColor?) -> Void)?
private let backgroundCircleLayer = CAShapeLayer()
private let foregroundCircleLayer = CAShapeLayer()
@ -706,6 +707,7 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
let initialColors = self.foregroundGradientLayer.colors
let outerColor: UIColor?
let activeColor: UIColor?
let targetColors: [CGColor]
let targetScale: CGFloat
switch type {
@ -713,20 +715,24 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
targetColors = [activeBlue.cgColor, green.cgColor, green.cgColor]
targetScale = 0.89
outerColor = UIColor(rgb: 0x21674f)
activeColor = green
case .active:
targetColors = [lightBlue.cgColor, blue.cgColor, blue.cgColor]
targetScale = 0.85
outerColor = UIColor(rgb: 0x1d588d)
activeColor = blue
case .connecting:
targetColors = [lightBlue.cgColor, blue.cgColor, blue.cgColor]
targetScale = 0.3
outerColor = nil
activeColor = blue
case .muted:
targetColors = [pink.cgColor, purple.cgColor, purple.cgColor]
targetScale = 0.85
outerColor = UIColor(rgb: 0x3b3474)
activeColor = purple
}
self.updatedOuterColor?(outerColor)
self.updatedColors?(outerColor, activeColor)
self.maskGradientLayer.transform = CATransform3DMakeScale(targetScale, targetScale, 1.0)
if let _ = previousType {

View File

@ -36,6 +36,8 @@ private let fullscreenBackgroundColor = UIColor(rgb: 0x000000)
private let dimColor = UIColor(white: 0.0, alpha: 0.5)
private let smallButtonSize = CGSize(width: 36.0, height: 36.0)
private let sideButtonSize = CGSize(width: 56.0, height: 56.0)
private let mainVideoHeight: CGFloat = 240.0
private let topPanelHeight: CGFloat = 63.0
private let bottomAreaHeight: CGFloat = 205.0
private let fullscreenBottomAreaHeight: CGFloat = 80.0
@ -168,7 +170,7 @@ final class GroupVideoNode: ASDisplayNode {
private let videoViewContainer: UIView
private let videoView: PresentationCallVideoView
private var validLayout: CGSize?
private var validLayout: (CGSize, Bool)?
var tapped: (() -> Void)?
@ -178,6 +180,8 @@ final class GroupVideoNode: ASDisplayNode {
super.init()
self.isUserInteractionEnabled = false
self.videoViewContainer.addSubview(self.videoView.view)
self.view.addSubview(self.videoViewContainer)
@ -188,8 +192,8 @@ final class GroupVideoNode: ASDisplayNode {
guard let strongSelf = self else {
return
}
if let size = strongSelf.validLayout {
strongSelf.updateLayout(size: size, transition: .immediate)
if let (size, isLandscape) = strongSelf.validLayout {
strongSelf.updateLayout(size: size, isLandscape: isLandscape, transition: .immediate)
}
}
})
@ -199,8 +203,8 @@ final class GroupVideoNode: ASDisplayNode {
guard let strongSelf = self else {
return
}
if let size = strongSelf.validLayout {
strongSelf.updateLayout(size: size, transition: .immediate)
if let (size, isLandscape) = strongSelf.validLayout {
strongSelf.updateLayout(size: size, isLandscape: isLandscape, transition: .immediate)
}
}
})
@ -214,9 +218,9 @@ final class GroupVideoNode: ASDisplayNode {
}
}
func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) {
self.validLayout = size
self.videoViewContainer.frame = CGRect(origin: CGPoint(), size: size)
func updateLayout(size: CGSize, isLandscape: Bool, transition: ContainedViewLayoutTransition) {
self.validLayout = (size, isLandscape)
transition.updateFrame(view: self.videoViewContainer, frame: CGRect(origin: CGPoint(), size: size))
let orientation = self.videoView.getOrientation()
var aspect = self.videoView.getAspect()
@ -248,10 +252,10 @@ final class GroupVideoNode: ASDisplayNode {
var rotatedVideoSize = CGSize(width: 100.0, height: rotatedAspect * 100.0)
if size.width < 100.0 || true {
rotatedVideoSize = rotatedVideoSize.aspectFilled(size)
} else {
if isLandscape {
rotatedVideoSize = rotatedVideoSize.aspectFitted(size)
} else {
rotatedVideoSize = rotatedVideoSize.aspectFilled(size)
}
if switchOrientation {
@ -262,8 +266,9 @@ final class GroupVideoNode: ASDisplayNode {
rotatedVideoFrame.origin.y = floor(rotatedVideoFrame.origin.y)
rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width)
rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height)
self.videoView.view.center = rotatedVideoFrame.center
self.videoView.view.bounds = CGRect(origin: CGPoint(), size: rotatedVideoFrame.size)
transition.updatePosition(layer: self.videoView.view.layer, position: rotatedVideoFrame.center)
transition.updateBounds(layer: self.videoView.view.layer, bounds: CGRect(origin: CGPoint(), size: rotatedVideoFrame.size))
let transition: ContainedViewLayoutTransition = .immediate
transition.updateTransformRotation(view: self.videoView.view, angle: angle)
@ -276,17 +281,48 @@ private final class MainVideoContainerNode: ASDisplayNode {
private var currentVideoNode: GroupVideoNode?
private var candidateVideoNode: GroupVideoNode?
private let topCornersNode: ASImageNode
private let bottomCornersNode: ASImageNode
private let bottomEdgeNode: ASDisplayNode
private var currentPeer: (PeerId, UInt32)?
private var validLayout: CGSize?
private var validLayout: (CGSize, CGFloat, Bool)?
var tapped: (() -> Void)?
init(context: AccountContext, call: PresentationGroupCall) {
self.context = context
self.call = call
self.topCornersNode = ASImageNode()
self.topCornersNode.displaysAsynchronously = false
self.topCornersNode.image = cornersImage(top: true, bottom: false, dark: true)
self.bottomCornersNode = ASImageNode()
self.bottomCornersNode.displaysAsynchronously = false
self.bottomCornersNode.image = cornersImage(top: false, bottom: true, dark: true)
self.bottomEdgeNode = ASDisplayNode()
self.bottomEdgeNode.backgroundColor = UIColor(rgb: 0x000000)
super.init()
self.backgroundColor = .black
self.clipsToBounds = true
self.backgroundColor = UIColor(rgb: 0x1c1c1e)
self.addSubnode(self.topCornersNode)
self.addSubnode(self.bottomCornersNode)
self.addSubnode(self.bottomEdgeNode)
}
override func didLoad() {
super.didLoad()
self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tap)))
}
@objc private func tap() {
self.tapped?()
}
func updatePeer(peer: (peerId: PeerId, source: UInt32)?, waitForFullSize: Bool) {
@ -315,9 +351,9 @@ private final class MainVideoContainerNode: ASDisplayNode {
strongSelf.currentVideoNode = nil
}
strongSelf.currentVideoNode = videoNode
strongSelf.addSubnode(videoNode)
if let size = strongSelf.validLayout {
strongSelf.update(size: size, transition: .immediate)
strongSelf.insertSubnode(videoNode, belowSubnode: strongSelf.topCornersNode)
if let (size, sideInset, isLandscape) = strongSelf.validLayout {
strongSelf.update(size: size, sideInset: sideInset, isLandscape: isLandscape, transition: .immediate)
}
})
} else {
@ -330,9 +366,9 @@ private final class MainVideoContainerNode: ASDisplayNode {
strongSelf.currentVideoNode = nil
}
strongSelf.currentVideoNode = videoNode
strongSelf.addSubnode(videoNode)
if let size = strongSelf.validLayout {
strongSelf.update(size: size, transition: .immediate)
strongSelf.insertSubnode(videoNode, belowSubnode: strongSelf.topCornersNode)
if let (size, sideInset, isLandscape) = strongSelf.validLayout {
strongSelf.update(size: size, sideInset: sideInset, isLandscape: isLandscape, transition: .immediate)
}
}
}
@ -345,13 +381,17 @@ private final class MainVideoContainerNode: ASDisplayNode {
}
}
func update(size: CGSize, transition: ContainedViewLayoutTransition) {
self.validLayout = size
func update(size: CGSize, sideInset: CGFloat, isLandscape: Bool, transition: ContainedViewLayoutTransition) {
self.validLayout = (size, sideInset, isLandscape)
if let currentVideoNode = self.currentVideoNode {
transition.updateFrame(node: currentVideoNode, frame: CGRect(origin: CGPoint(), size: size))
currentVideoNode.updateLayout(size: size, transition: .immediate)
currentVideoNode.updateLayout(size: size, isLandscape: isLandscape, transition: transition)
}
transition.updateFrame(node: self.topCornersNode, frame: CGRect(x: sideInset, y: 0.0, width: size.width - sideInset * 2.0, height: 50.0))
transition.updateFrame(node: self.bottomCornersNode, frame: CGRect(x: sideInset, y: size.height - 6.0 - 50.0, width: size.width - sideInset * 2.0, height: 50.0))
transition.updateFrame(node: self.bottomEdgeNode, frame: CGRect(x: sideInset, y: size.height - 6.0, width: size.width - sideInset * 2.0, height: 6.0))
}
}
@ -379,7 +419,7 @@ public final class VoiceChatController: ViewController {
let openInvite: () -> Void
let peerContextAction: (PeerEntry, ASDisplayNode, ContextGesture?) -> Void
let setPeerIdWithRevealedOptions: (PeerId?, PeerId?) -> Void
let getPeerVideo: (UInt32) -> GroupVideoNode?
let getPeerVideo: (UInt32, Bool) -> GroupVideoNode?
var isExpanded: Bool = false
private var audioLevels: [PeerId: ValuePipe<Float>] = [:]
@ -392,7 +432,7 @@ public final class VoiceChatController: ViewController {
openInvite: @escaping () -> Void,
peerContextAction: @escaping (PeerEntry, ASDisplayNode, ContextGesture?) -> Void,
setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void,
getPeerVideo: @escaping (UInt32) -> GroupVideoNode?
getPeerVideo: @escaping (UInt32, Bool) -> GroupVideoNode?
) {
self.updateIsMuted = updateIsMuted
self.openPeer = openPeer
@ -589,7 +629,7 @@ public final class VoiceChatController: ViewController {
}
}
func item(context: AccountContext, presentationData: PresentationData, interaction: Interaction) -> ListViewItem {
func item(context: AccountContext, presentationData: PresentationData, interaction: Interaction, style: VoiceChatParticipantItem.LayoutStyle) -> ListViewItem {
switch self {
case let .invite(_, _, text):
return VoiceChatActionItem(presentationData: ItemListPresentationData(presentationData), title: text, icon: .generic(UIImage(bundleImageName: "Chat/Context Menu/AddUser")!), action: {
@ -668,9 +708,9 @@ public final class VoiceChatController: ViewController {
let revealOptions: [VoiceChatParticipantItem.RevealOption] = []
return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, ssrc: peerEntry.ssrc, presence: peerEntry.presence, text: text, expandedText: expandedText, icon: icon, enabled: true, transparent: false, selectable: true, getAudioLevel: { return interaction.getAudioLevel(peer.id) }, getVideo: {
return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, ssrc: peerEntry.ssrc, presence: peerEntry.presence, text: text, expandedText: expandedText, icon: icon, style: style, enabled: true, transparent: false, selectable: true, getAudioLevel: { return interaction.getAudioLevel(peer.id) }, getVideo: {
if let ssrc = peerEntry.ssrc {
return interaction.getPeerVideo(ssrc)
return interaction.getPeerVideo(ssrc, style == .tile)
} else {
return nil
}
@ -687,12 +727,12 @@ public final class VoiceChatController: ViewController {
}
}
private func preparedTransition(from fromEntries: [ListEntry], to toEntries: [ListEntry], isLoading: Bool, isEmpty: Bool, canInvite: Bool, crossFade: Bool, animated: Bool, context: AccountContext, presentationData: PresentationData, interaction: Interaction) -> ListTransition {
private func preparedTransition(from fromEntries: [ListEntry], to toEntries: [ListEntry], isLoading: Bool, isEmpty: Bool, canInvite: Bool, crossFade: Bool, animated: Bool, context: AccountContext, presentationData: PresentationData, interaction: Interaction, style: VoiceChatParticipantItem.LayoutStyle) -> ListTransition {
let (deleteIndices, indicesAndItems, updateIndices) = mergeListsStableWithUpdates(leftList: fromEntries, rightList: toEntries)
let deletions = deleteIndices.map { ListViewDeleteItem(index: $0, directionHint: nil) }
let insertions = indicesAndItems.map { ListViewInsertItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction), directionHint: nil) }
let updates = updateIndices.map { ListViewUpdateItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction), directionHint: nil) }
let insertions = indicesAndItems.map { ListViewInsertItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction, style: style), directionHint: nil) }
let updates = updateIndices.map { ListViewUpdateItem(index: $0.0, previousIndex: $0.2, item: $0.1.item(context: context, presentationData: presentationData, interaction: interaction, style: style), directionHint: nil) }
return ListTransition(deletions: deletions, insertions: insertions, updates: updates, isLoading: isLoading, isEmpty: isEmpty, canInvite: canInvite, crossFade: crossFade, count: toEntries.count, animated: animated)
}
@ -710,8 +750,10 @@ public final class VoiceChatController: ViewController {
private let dimNode: ASDisplayNode
private let contentContainer: ASDisplayNode
private let backgroundNode: ASDisplayNode
private var mainVideoContainer: MainVideoContainerNode?
private let mainVideoClippingNode: ASDisplayNode
private var mainVideoContainerNode: MainVideoContainerNode?
private let listNode: ListView
private let horizontalListNode: ListView
private let topPanelNode: ASDisplayNode
private let topPanelEdgeNode: ASDisplayNode
private let topPanelBackgroundNode: ASDisplayNode
@ -735,6 +777,8 @@ public final class VoiceChatController: ViewController {
private var enqueuedTransitions: [ListTransition] = []
private var floatingHeaderOffset: CGFloat?
private var enqueuedHorizontalTransitions: [ListTransition] = []
private var validLayout: (ContainerViewLayout, CGFloat)?
private var didSetContentsReady: Bool = false
private var didSetDataReady: Bool = false
@ -748,7 +792,8 @@ public final class VoiceChatController: ViewController {
private var currentSpeakingPeers: Set<PeerId>?
private var currentContentOffset: CGFloat?
private var ignoreScrolling = false
private var currentAudioButtonColor: UIColor?
private var currentNormalButtonColor: UIColor?
private var currentActiveButtonColor: UIColor?
private var currentEntries: [ListEntry] = []
@ -839,8 +884,11 @@ public final class VoiceChatController: ViewController {
self.backgroundNode.backgroundColor = secondaryPanelBackgroundColor
self.backgroundNode.clipsToBounds = false
self.mainVideoClippingNode = ASDisplayNode()
self.mainVideoClippingNode.clipsToBounds = true
if sharedContext.immediateExperimentalUISettings.demoVideoChats {
self.mainVideoContainer = MainVideoContainerNode(context: call.accountContext, call: call)
self.mainVideoContainerNode = MainVideoContainerNode(context: call.accountContext, call: call)
}
self.listNode = ListView()
@ -851,6 +899,14 @@ public final class VoiceChatController: ViewController {
return presentationData.strings.VoiceOver_ScrollStatus(row, count).0
}
self.horizontalListNode = ListView()
self.horizontalListNode.transform = CATransform3DMakeRotation(-CGFloat(CGFloat.pi / 2.0), 0.0, 0.0, 1.0)
self.horizontalListNode.clipsToBounds = true
self.horizontalListNode.isHidden = true
self.horizontalListNode.accessibilityPageScrolledString = { row, count in
return presentationData.strings.VoiceOver_ScrollStatus(row, count).0
}
self.topPanelNode = ASDisplayNode()
self.topPanelNode.clipsToBounds = false
@ -954,11 +1010,11 @@ public final class VoiceChatController: ViewController {
if strongSelf.currentDominantSpeakerWithVideo?.0 != peerId || strongSelf.currentDominantSpeakerWithVideo?.1 != source {
strongSelf.currentDominantSpeakerWithVideo = (peerId, source)
strongSelf.call.setFullSizeVideo(peerId: peerId)
strongSelf.mainVideoContainer?.updatePeer(peer: (peerId: peerId, source: source), waitForFullSize: false)
strongSelf.mainVideoContainerNode?.updatePeer(peer: (peerId: peerId, source: source), waitForFullSize: false)
} else {
strongSelf.currentDominantSpeakerWithVideo = nil
strongSelf.call.setFullSizeVideo(peerId: nil)
strongSelf.mainVideoContainer?.updatePeer(peer: nil, waitForFullSize: false)
strongSelf.mainVideoContainerNode?.updatePeer(peer: nil, waitForFullSize: false)
}
}
}
@ -1521,10 +1577,19 @@ public final class VoiceChatController: ViewController {
updated.revealedPeerId = peerId
return updated
}
}, getPeerVideo: { [weak self] ssrc in
}, getPeerVideo: { [weak self] ssrc, tile in
guard let strongSelf = self else {
return nil
}
var skip = false
if case .fullscreen = strongSelf.displayMode {
skip = !tile
} else {
skip = tile
}
if skip {
return nil
}
for (_, listSsrc, videoNode) in strongSelf.videoNodes {
if listSsrc == ssrc {
return videoNode
@ -1544,7 +1609,7 @@ public final class VoiceChatController: ViewController {
self.bottomPanelNode.addSubnode(self.bottomCornersNode)
self.bottomPanelNode.addSubnode(self.bottomPanelBackgroundNode)
self.bottomPanelNode.addSubnode(self.audioButton)
if let _ = self.mainVideoContainer {
if let _ = self.mainVideoContainerNode {
self.bottomPanelNode.addSubnode(self.cameraButton)
self.bottomPanelNode.addSubnode(self.switchCameraButton)
}
@ -1555,15 +1620,18 @@ public final class VoiceChatController: ViewController {
self.addSubnode(self.contentContainer)
self.contentContainer.addSubnode(self.backgroundNode)
self.contentContainer.addSubnode(self.listNode)
if let mainVideoContainer = self.mainVideoContainer {
self.contentContainer.addSubnode(mainVideoContainer)
if let mainVideoContainer = self.mainVideoContainerNode {
self.contentContainer.addSubnode(self.mainVideoClippingNode)
self.mainVideoClippingNode.addSubnode(mainVideoContainer)
}
self.contentContainer.addSubnode(self.listNode)
self.contentContainer.addSubnode(self.topPanelNode)
self.contentContainer.addSubnode(self.leftBorderNode)
self.contentContainer.addSubnode(self.rightBorderNode)
self.contentContainer.addSubnode(self.bottomPanelNode)
self.contentContainer.addSubnode(self.horizontalListNode)
let invitedPeers: Signal<[Peer], NoError> = self.call.invitedPeers
|> mapToSignal { ids -> Signal<[Peer], NoError> in
return context.account.postbox.transaction { transaction -> [Peer] in
@ -1622,7 +1690,6 @@ public final class VoiceChatController: ViewController {
if let callState = strongSelf.callState, callState.canManageCall {
strongSelf.optionsButtonIsAvatar = false
strongSelf.optionsButton.isUserInteractionEnabled = true
strongSelf.optionsButton.alpha = 1.0
} else if displayAsPeers.count > 1 {
strongSelf.optionsButtonIsAvatar = true
for peer in displayAsPeers {
@ -1631,11 +1698,9 @@ public final class VoiceChatController: ViewController {
}
}
strongSelf.optionsButton.isUserInteractionEnabled = true
strongSelf.optionsButton.alpha = 1.0
} else {
strongSelf.optionsButtonIsAvatar = false
strongSelf.optionsButton.isUserInteractionEnabled = false
strongSelf.optionsButton.alpha = 0.0
}
if let (layout, navigationHeight) = strongSelf.validLayout {
@ -1712,7 +1777,7 @@ public final class VoiceChatController: ViewController {
if strongSelf.currentDominantSpeakerWithVideo?.0 != peerId || strongSelf.currentDominantSpeakerWithVideo?.1 != source {
strongSelf.currentDominantSpeakerWithVideo = (peerId, source)
strongSelf.call.setFullSizeVideo(peerId: peerId)
strongSelf.mainVideoContainer?.updatePeer(peer: (peerId: peerId, source: source), waitForFullSize: true)
strongSelf.mainVideoContainerNode?.updatePeer(peer: (peerId: peerId, source: source), waitForFullSize: true)
}
}
@ -1745,10 +1810,11 @@ public final class VoiceChatController: ViewController {
self.closeButton.addTarget(self, action: #selector(self.closePressed), forControlEvents: .touchUpInside)
self.actionButtonColorDisposable = (self.actionButton.outerColor
|> deliverOnMainQueue).start(next: { [weak self] color in
|> deliverOnMainQueue).start(next: { [weak self] normalColor, activeColor in
if let strongSelf = self {
let animated = strongSelf.currentAudioButtonColor != nil
strongSelf.currentAudioButtonColor = color
let animated = strongSelf.currentNormalButtonColor != nil
strongSelf.currentNormalButtonColor = normalColor
strongSelf.currentActiveButtonColor = activeColor
strongSelf.updateButtons(animated: animated)
}
})
@ -1756,7 +1822,7 @@ public final class VoiceChatController: ViewController {
self.listNode.updateFloatingHeaderOffset = { [weak self] offset, transition in
if let strongSelf = self {
strongSelf.currentContentOffset = offset
if strongSelf.animation == nil && !strongSelf.animatingExpansion {
if strongSelf.expandAnimation == nil && !strongSelf.animatingExpansion {
strongSelf.updateFloatingHeaderOffset(offset: offset, transition: transition)
}
}
@ -1820,7 +1886,8 @@ public final class VoiceChatController: ViewController {
case let .peer(peerEntry):
if peerEntry.ssrc == source {
let presentationData = strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme)
strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, style: .list), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
strongSelf.horizontalListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, style: .tile), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
break loop
}
default:
@ -1845,7 +1912,8 @@ public final class VoiceChatController: ViewController {
case let .peer(peerEntry):
if peerEntry.ssrc == ssrc {
let presentationData = strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme)
strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, style: .list), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
strongSelf.horizontalListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, style: .tile), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
break loop
}
default:
@ -1862,7 +1930,7 @@ public final class VoiceChatController: ViewController {
if !validSources.contains(source) {
strongSelf.currentDominantSpeakerWithVideo = nil
strongSelf.call.setFullSizeVideo(peerId: nil)
strongSelf.mainVideoContainer?.updatePeer(peer: nil, waitForFullSize: false)
strongSelf.mainVideoContainerNode?.updatePeer(peer: nil, waitForFullSize: false)
}
}
@ -1890,6 +1958,68 @@ public final class VoiceChatController: ViewController {
}
}
}
self.mainVideoContainerNode?.tapped = { [weak self] in
if let strongSelf = self {
var effectiveDisplayMode = strongSelf.displayMode
var isLandscape = false
if let (layout, _) = strongSelf.validLayout, layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass {
isLandscape = true
if case .fullscreen = effectiveDisplayMode {
} else {
effectiveDisplayMode = .fullscreen(controlsHidden: false)
}
}
switch effectiveDisplayMode {
case .default:
strongSelf.displayMode = .fullscreen(controlsHidden: false)
case let .fullscreen(controlsHidden):
if true {
strongSelf.displayMode = .fullscreen(controlsHidden: !controlsHidden)
}
else if controlsHidden && !isLandscape {
strongSelf.displayMode = .default
} else {
strongSelf.displayMode = .fullscreen(controlsHidden: true)
}
}
if case .default = effectiveDisplayMode, case .fullscreen = strongSelf.displayMode {
strongSelf.horizontalListNode.isHidden = false
var verticalItemNodes: [PeerId: VoiceChatParticipantItemNode] = [:]
strongSelf.listNode.forEachItemNode { itemNode in
if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item {
verticalItemNodes[item.peer.id] = itemNode
}
}
strongSelf.horizontalListNode.forEachVisibleItemNode { itemNode in
if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item, let otherItemNode = verticalItemNodes[item.peer.id] {
itemNode.transitionIn(from: otherItemNode, containerNode: strongSelf)
}
}
} else if case .fullscreen = effectiveDisplayMode, case .default = strongSelf.displayMode {
var horizontalItemNodes: [PeerId: VoiceChatParticipantItemNode] = [:]
strongSelf.horizontalListNode.forEachItemNode { itemNode in
if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item {
horizontalItemNodes[item.peer.id] = itemNode
}
}
strongSelf.listNode.forEachVisibleItemNode { itemNode in
if let itemNode = itemNode as? VoiceChatParticipantItemNode, let item = itemNode.item, let otherItemNode = horizontalItemNodes[item.peer.id] {
itemNode.transitionIn(from: otherItemNode, containerNode: strongSelf)
}
}
}
if let (layout, navigationHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
}
}
}
deinit {
@ -2614,7 +2744,7 @@ public final class VoiceChatController: ViewController {
}
@objc private func switchCameraPressed() {
self.call.switchVideoCamera()
}
private var effectiveBottomAreaHeight: CGFloat {
@ -2626,13 +2756,13 @@ public final class VoiceChatController: ViewController {
}
}
private var bringVideoToBackOnCompletion = false
private func updateFloatingHeaderOffset(offset: CGFloat, transition: ContainedViewLayoutTransition, completion: (() -> Void)? = nil) {
guard let (layout, _) = self.validLayout else {
return
}
let layoutTopInset: CGFloat = max(layout.statusBarHeight ?? 0.0, layout.safeInsets.top)
let topPanelHeight: CGFloat = 63.0
let listTopInset = layoutTopInset + topPanelHeight
let bottomPanelHeight = self.effectiveBottomAreaHeight + layout.intrinsicInsets.bottom
@ -2641,6 +2771,16 @@ public final class VoiceChatController: ViewController {
size.width = floor(min(size.width, size.height) * 0.5)
}
var isLandscape = false
var effectiveDisplayMode = self.displayMode
if case .compact = layout.metrics.widthClass, layout.size.width > layout.size.height {
isLandscape = true
if case .fullscreen = effectiveDisplayMode {
} else {
effectiveDisplayMode = .fullscreen(controlsHidden: false)
}
}
let listSize = CGSize(width: size.width, height: layout.size.height - listTopInset - bottomPanelHeight)
let topInset: CGFloat
if let (panInitialTopInset, panOffset) = self.panGestureArguments {
@ -2649,7 +2789,7 @@ public final class VoiceChatController: ViewController {
} else {
topInset = max(0.0, panInitialTopInset + min(0.0, panOffset))
}
} else if let _ = self.animation {
} else if let _ = self.expandAnimation {
topInset = self.listNode.frame.minY - listTopInset
} else if let currentTopInset = self.topInset {
topInset = self.isExpanded ? 0.0 : currentTopInset
@ -2662,18 +2802,60 @@ public final class VoiceChatController: ViewController {
let rawPanelOffset = offset + listTopInset - topPanelHeight
let panelOffset = max(layoutTopInset, rawPanelOffset)
let topPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: panelOffset), size: CGSize(width: size.width, height: topPanelHeight))
let topPanelFrame: CGRect
if isLandscape {
topPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: size.width, height: 0.0))
} else {
topPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: panelOffset), size: CGSize(width: size.width, height: topPanelHeight))
}
let sideInset: CGFloat = 16.0
if let mainVideoContainer = self.mainVideoContainer {
let videoContainerFrame = CGRect(origin: CGPoint(x: 0.0, y: topPanelFrame.maxY), size: CGSize(width: layout.size.width, height: min(300.0, layout.size.width)))
transition.updateFrameAdditive(node: mainVideoContainer, frame: videoContainerFrame)
mainVideoContainer.update(size: videoContainerFrame.size, transition: transition)
if let mainVideoContainer = self.mainVideoContainerNode {
let videoClippingFrame: CGRect
let videoContainerFrame: CGRect
let videoInset: CGFloat
if isLandscape {
videoInset = 0.0
videoClippingFrame = CGRect(x: layout.safeInsets.left, y: 0.0, width: layout.size.width - layout.safeInsets.left - layout.safeInsets.right - fullscreenBottomAreaHeight, height: layout.size.height + 6.0)
videoContainerFrame = CGRect(origin: CGPoint(), size: videoClippingFrame.size)
} else {
let videoHeight: CGFloat
let videoY: CGFloat
switch effectiveDisplayMode {
case .default:
videoInset = sideInset
videoHeight = min(mainVideoHeight, layout.size.width)
videoY = topPanelFrame.maxY
case .fullscreen:
videoInset = 0.0
videoHeight = layout.size.height - (layout.statusBarHeight ?? 0.0) - layout.intrinsicInsets.bottom - fullscreenBottomAreaHeight - 6.0
videoY = layout.statusBarHeight ?? 20.0
}
videoClippingFrame = CGRect(origin: CGPoint(x: videoInset, y: videoY), size: CGSize(width: layout.size.width - videoInset * 2.0, height: self.isFullscreen ? videoHeight : 0.0))
videoContainerFrame = CGRect(origin: CGPoint(x: -videoInset, y: 0.0), size: CGSize(width: layout.size.width, height: videoHeight))
}
transition.updateFrame(node: self.mainVideoClippingNode, frame: videoClippingFrame)
transition.updateFrame(node: mainVideoContainer, frame: videoContainerFrame, completion: { [weak self] _ in
if let strongSelf = self, strongSelf.bringVideoToBackOnCompletion {
strongSelf.bringVideoToBackOnCompletion = false
strongSelf.contentContainer.insertSubnode(strongSelf.mainVideoClippingNode, belowSubnode: strongSelf.horizontalListNode)
}
})
mainVideoContainer.update(size: videoContainerFrame.size, sideInset: videoInset, isLandscape: isLandscape, transition: transition)
}
let backgroundFrame = CGRect(origin: CGPoint(x: 0.0, y: topPanelFrame.maxY), size: CGSize(width: size.width, height: layout.size.height))
let sideInset: CGFloat = 16.0
let leftBorderFrame = CGRect(origin: CGPoint(x: 0.0, y: topPanelFrame.maxY - 16.0), size: CGSize(width: sideInset, height: layout.size.height))
let rightBorderFrame = CGRect(origin: CGPoint(x: size.width - sideInset, y: topPanelFrame.maxY - 16.0), size: CGSize(width: sideInset, height: layout.size.height))
let leftBorderFrame: CGRect
let rightBorderFrame: CGRect
if isLandscape {
leftBorderFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: layout.safeInsets.left, height: layout.size.height))
rightBorderFrame = CGRect(origin: CGPoint(x: size.width - layout.safeInsets.right, y: 0.0), size: CGSize(width: layout.safeInsets.right, height: layout.size.height))
} else {
leftBorderFrame = CGRect(origin: CGPoint(x: 0.0, y: topPanelFrame.maxY - 16.0), size: CGSize(width: sideInset, height: layout.size.height))
rightBorderFrame = CGRect(origin: CGPoint(x: size.width - sideInset, y: topPanelFrame.maxY - 16.0), size: CGSize(width: sideInset, height: layout.size.height))
}
let previousTopPanelFrame = self.topPanelNode.frame
let previousBackgroundFrame = self.backgroundNode.frame
@ -2699,7 +2881,7 @@ public final class VoiceChatController: ViewController {
} else {
completion?()
}
self.topPanelBackgroundNode.frame = CGRect(x: 0.0, y: topPanelHeight - 24.0, width: size.width, height: 24.0)
self.topPanelBackgroundNode.frame = CGRect(x: 0.0, y: topPanelHeight - 24.0, width: size.width, height: min(topPanelFrame.height, 24.0))
var bottomEdge: CGFloat = 0.0
self.listNode.forEachItemNode { itemNode in
@ -2747,7 +2929,6 @@ public final class VoiceChatController: ViewController {
size.width = floor(min(size.width, size.height) * 0.5)
}
let topPanelHeight: CGFloat = 63.0
let topEdgeFrame: CGRect
if isFullscreen {
let offset: CGFloat
@ -2770,7 +2951,6 @@ public final class VoiceChatController: ViewController {
transition.updateBackgroundColor(node: self.bottomPanelBackgroundNode, color: isFullscreen ? fullscreenBackgroundColor : panelBackgroundColor)
transition.updateBackgroundColor(node: self.leftBorderNode, color: isFullscreen ? fullscreenBackgroundColor : panelBackgroundColor)
transition.updateBackgroundColor(node: self.rightBorderNode, color: isFullscreen ? fullscreenBackgroundColor : panelBackgroundColor)
transition.updateBackgroundColor(node: self.rightBorderNode, color: isFullscreen ? fullscreenBackgroundColor : panelBackgroundColor)
if let snapshotView = self.topCornersNode.view.snapshotContentTree() {
snapshotView.frame = self.topCornersNode.frame
@ -2850,22 +3030,28 @@ public final class VoiceChatController: ViewController {
}
}
let coloredButtonAppearance: CallControllerButtonItemNode.Content.Appearance
if let color = self.currentAudioButtonColor {
coloredButtonAppearance = .color(.custom(color.rgb, 1.0))
let normalButtonAppearance: CallControllerButtonItemNode.Content.Appearance
let activeButtonAppearance: CallControllerButtonItemNode.Content.Appearance
if let color = self.currentNormalButtonColor {
normalButtonAppearance = .color(.custom(color.rgb, 1.0))
} else {
coloredButtonAppearance = .color(.custom(self.isFullscreen ? 0x1c1c1e : 0x2c2c2e, 1.0))
normalButtonAppearance = .color(.custom(self.isFullscreen ? 0x1c1c1e : 0x2c2c2e, 1.0))
}
if let color = self.currentActiveButtonColor {
activeButtonAppearance = .color(.custom(color.rgb, 1.0))
} else {
activeButtonAppearance = .color(.custom(self.isFullscreen ? 0x1c1c1e : 0x2c2c2e, 1.0))
}
let soundImage: CallControllerButtonItemNode.Content.Image
var soundAppearance: CallControllerButtonItemNode.Content.Appearance = coloredButtonAppearance
var soundAppearance: CallControllerButtonItemNode.Content.Appearance = normalButtonAppearance
var soundTitle: String = self.presentationData.strings.Call_Speaker
switch audioMode {
case .none, .builtin:
soundImage = .speaker
case .speaker:
soundImage = .speaker
soundAppearance = .blurred(isFilled: true)
soundAppearance = activeButtonAppearance
case .headphones:
soundImage = .headphones
soundTitle = self.presentationData.strings.Call_Audio
@ -2883,7 +3069,15 @@ public final class VoiceChatController: ViewController {
let videoButtonSize: CGSize
var buttonsTitleAlpha: CGFloat
switch self.displayMode {
var effectiveDisplayMode = self.displayMode
if let (layout, _) = self.validLayout, layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass {
if case .fullscreen = effectiveDisplayMode {
} else {
effectiveDisplayMode = .fullscreen(controlsHidden: false)
}
}
switch effectiveDisplayMode {
case .default:
videoButtonSize = smallButtonSize
buttonsTitleAlpha = 1.0
@ -2893,9 +3087,9 @@ public final class VoiceChatController: ViewController {
}
let transition: ContainedViewLayoutTransition = animated ? .animated(duration: 0.3, curve: .linear) : .immediate
self.cameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: coloredButtonAppearance, image: .camera), text: self.presentationData.strings.VoiceChat_Video, transition: transition)
self.cameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: normalButtonAppearance, image: .camera), text: self.presentationData.strings.VoiceChat_Video, transition: transition)
self.switchCameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: coloredButtonAppearance, image: .flipCamera), text: "", transition: transition)
self.switchCameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: normalButtonAppearance, image: .flipCamera), text: "", transition: transition)
self.audioButton.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage), text: soundTitle, transition: transition)
@ -2916,24 +3110,51 @@ public final class VoiceChatController: ViewController {
size.width = floor(min(size.width, size.height) * 0.5)
}
var isLandscape = false
var effectiveDisplayMode = self.displayMode
if case .compact = layout.metrics.widthClass, layout.size.width > layout.size.height {
isLandscape = true
if !self.isFullscreen {
self.isExpanded = true
self.updateIsFullscreen(true)
}
if case .fullscreen = effectiveDisplayMode {
} else {
effectiveDisplayMode = .fullscreen(controlsHidden: false)
}
}
if let videoIndex = self.contentContainer.subnodes?.firstIndex(where: { $0 === self.mainVideoClippingNode }), let listIndex = self.contentContainer.subnodes?.firstIndex(where: { $0 === self.listNode }) {
switch effectiveDisplayMode {
case .default:
if listIndex < videoIndex {
self.bringVideoToBackOnCompletion = true
}
case .fullscreen:
if listIndex > videoIndex {
self.contentContainer.insertSubnode(self.mainVideoClippingNode, belowSubnode: self.horizontalListNode)
}
}
}
self.updateTitle(transition: transition)
transition.updateFrame(node: self.titleNode, frame: CGRect(origin: CGPoint(x: 0.0, y: 10.0), size: CGSize(width: size.width, height: 44.0)))
transition.updateFrame(node: self.optionsButton, frame: CGRect(origin: CGPoint(x: 20.0, y: 18.0), size: CGSize(width: 28.0, height: 28.0)))
transition.updateFrame(node: self.closeButton, frame: CGRect(origin: CGPoint(x: size.width - 20.0 - 28.0, y: 18.0), size: CGSize(width: 28.0, height: 28.0)))
transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size))
transition.updateFrame(node: self.contentContainer, frame: CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - size.width) / 2.0), y: 0.0), size: size))
let layoutTopInset: CGFloat = max(layout.statusBarHeight ?? 0.0, layout.safeInsets.top)
let sideInset: CGFloat = 16.0
var insets = UIEdgeInsets()
insets.left = layout.safeInsets.left + sideInset
insets.right = layout.safeInsets.right + sideInset
let topPanelHeight: CGFloat = 63.0
if let _ = self.panGestureArguments {
if isLandscape {
transition.updateFrame(node: self.topPanelEdgeNode, frame: CGRect(x: 0.0, y: 0.0, width: size.width, height: 0.0))
} else if let _ = self.panGestureArguments {
} else {
let topEdgeFrame: CGRect
if self.isFullscreen {
@ -2952,8 +3173,14 @@ public final class VoiceChatController: ViewController {
let bottomPanelHeight = self.effectiveBottomAreaHeight + layout.intrinsicInsets.bottom
var listTopInset = layoutTopInset + topPanelHeight
if self.mainVideoContainer != nil {
listTopInset += min(300.0, layout.size.width)
var topCornersY = topPanelHeight
if isLandscape {
listTopInset = 0.0
topCornersY = -50.0
} else if self.mainVideoContainerNode != nil && self.isFullscreen {
let videoContainerHeight = min(mainVideoHeight, layout.size.width)
listTopInset += videoContainerHeight
topCornersY += videoContainerHeight
}
let listSize = CGSize(width: size.width, height: layout.size.height - listTopInset - bottomPanelHeight)
@ -2970,18 +3197,33 @@ public final class VoiceChatController: ViewController {
topInset = listSize.height
}
if self.animation == nil {
if self.expandAnimation == nil {
transition.updateFrame(node: self.listNode, frame: CGRect(origin: CGPoint(x: 0.0, y: listTopInset + topInset), size: listSize))
}
let (duration, curve) = listViewAnimationDurationAndCurve(transition: transition)
let updateSizeAndInsets = ListViewUpdateSizeAndInsets(size: listSize, insets: insets, duration: duration, curve: curve)
self.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: nil, updateSizeAndInsets: ListViewUpdateSizeAndInsets(size: listSize, insets: insets, duration: duration, curve: curve), stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in })
self.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: nil, updateSizeAndInsets: updateSizeAndInsets, stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in })
let horizontalListHeight: CGFloat = 84.0
self.horizontalListNode.bounds = CGRect(x: 0.0, y: 0.0, width: horizontalListHeight, height: layout.size.width - layout.safeInsets.left - layout.safeInsets.right)
transition.updateFrame(node: self.topCornersNode, frame: CGRect(origin: CGPoint(x: sideInset, y: 63.0), size: CGSize(width: size.width - sideInset * 2.0, height: 50.0)))
let horizontalListY = isLandscape ? layout.size.height - layout.intrinsicInsets.bottom - 42.0 : layout.size.height - min(bottomPanelHeight, fullscreenBottomAreaHeight + layout.intrinsicInsets.bottom) - 42.0
transition.updatePosition(node: self.horizontalListNode, position: CGPoint(x: layout.safeInsets.left + layout.size.width / 2.0, y: horizontalListY))
self.horizontalListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [], options: [.Synchronous, .LowLatency], scrollToItem: nil, updateSizeAndInsets: ListViewUpdateSizeAndInsets(size: CGSize(width: horizontalListHeight, height: layout.size.width), insets: UIEdgeInsets(top: 16.0, left: 0.0, bottom: 16.0, right: 0.0), duration: duration, curve: curve), stationaryItemRange: nil, updateOpaqueState: nil, completion: { _ in })
let bottomPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - bottomPanelHeight), size: CGSize(width: size.width, height: bottomPanelHeight))
transition.updateFrame(node: self.topCornersNode, frame: CGRect(origin: CGPoint(x: sideInset, y: topCornersY), size: CGSize(width: size.width - sideInset * 2.0, height: 50.0)))
var bottomPanelFrame = CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - bottomPanelHeight), size: CGSize(width: size.width, height: bottomPanelHeight))
if isLandscape {
transition.updateAlpha(node: self.closeButton, alpha: 0.0)
transition.updateAlpha(node: self.optionsButton, alpha: 0.0)
transition.updateAlpha(node: self.titleNode, alpha: 0.0)
bottomPanelFrame = CGRect(origin: CGPoint(x: layout.size.width - fullscreenBottomAreaHeight - layout.safeInsets.right, y: 0.0), size: CGSize(width: fullscreenBottomAreaHeight + layout.safeInsets.right, height: layout.size.height))
} else {
transition.updateAlpha(node: self.closeButton, alpha: 1.0)
transition.updateAlpha(node: self.optionsButton, alpha: self.optionsButton.isUserInteractionEnabled ? 1.0 : 0.0)
transition.updateAlpha(node: self.titleNode, alpha: 1.0)
}
transition.updateFrame(node: self.bottomPanelNode, frame: bottomPanelFrame)
let centralButtonSize = CGSize(width: 300.0, height: 300.0)
@ -2997,15 +3239,15 @@ public final class VoiceChatController: ViewController {
let forthButtonFrame: CGRect
let leftButtonFrame: CGRect
if self.mainVideoContainer == nil {
if self.mainVideoContainerNode == nil {
leftButtonFrame = CGRect(origin: CGPoint(x: sideButtonOrigin, y: floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize)
} else {
leftButtonFrame = CGRect(origin: CGPoint(x: sideButtonOrigin, y: floor((self.effectiveBottomAreaHeight - sideButtonSize.height - upperButtonDistance - cameraButtonSize.height) / 2.0) + upperButtonDistance + cameraButtonSize.height), size: sideButtonSize)
}
let rightButtonFrame = CGRect(origin: CGPoint(x: size.width - sideButtonOrigin - sideButtonSize.width, y: floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize)
let smallButtons: Bool
switch self.displayMode {
switch effectiveDisplayMode {
case .default:
smallButtons = false
firstButtonFrame = CGRect(origin: CGPoint(x: floor(leftButtonFrame.midX - cameraButtonSize.width / 2.0), y: leftButtonFrame.minY - upperButtonDistance - cameraButtonSize.height), size: cameraButtonSize)
@ -3015,13 +3257,23 @@ public final class VoiceChatController: ViewController {
case let .fullscreen(controlsHidden):
smallButtons = true
let sideInset: CGFloat = 26.0
let spacing = floor((layout.size.width - sideInset * 2.0 - sideButtonSize.width * 4.0) / 3.0)
firstButtonFrame = CGRect(origin: CGPoint(x: sideInset, y: controlsHidden ? layout.size.height : floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize)
secondButtonFrame = CGRect(origin: CGPoint(x: sideInset + sideButtonSize.width + spacing, y: controlsHidden ? layout.size.height : floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize)
let thirdButtonPreFrame = CGRect(origin: CGPoint(x: layout.size.width - sideInset - sideButtonSize.width - spacing - sideButtonSize.width, y: controlsHidden ? layout.size.height : floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize)
thirdButtonFrame = CGRect(origin: CGPoint(x: floor(thirdButtonPreFrame.midX - centralButtonSize.width / 2.0), y: floor(thirdButtonPreFrame.midY - centralButtonSize.height / 2.0)), size: centralButtonSize)
forthButtonFrame = CGRect(origin: CGPoint(x: layout.size.width - sideInset - sideButtonSize.width, y: controlsHidden ? layout.size.height : floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize)
if isLandscape {
let spacing = floor((layout.size.height - sideInset * 2.0 - sideButtonSize.height * 4.0) / 3.0)
let x = controlsHidden ? fullscreenBottomAreaHeight + layout.safeInsets.right + 30.0: floor((fullscreenBottomAreaHeight - sideButtonSize.width) / 2.0)
forthButtonFrame = CGRect(origin: CGPoint(x: x, y: sideInset), size: sideButtonSize)
let thirdButtonPreFrame = CGRect(origin: CGPoint(x: x, y: sideInset + sideButtonSize.height + spacing), size: sideButtonSize)
thirdButtonFrame = CGRect(origin: CGPoint(x: floor(thirdButtonPreFrame.midX - centralButtonSize.width / 2.0), y: floor(thirdButtonPreFrame.midY - centralButtonSize.height / 2.0)), size: centralButtonSize)
secondButtonFrame = CGRect(origin: CGPoint(x: x, y: layout.size.height - sideInset - sideButtonSize.height - spacing - sideButtonSize.height), size: sideButtonSize)
firstButtonFrame = CGRect(origin: CGPoint(x: x, y: layout.size.height - sideInset - sideButtonSize.height), size: sideButtonSize)
} else {
let spacing = floor((layout.size.width - sideInset * 2.0 - sideButtonSize.width * 4.0) / 3.0)
let y = controlsHidden ? self.effectiveBottomAreaHeight + layout.intrinsicInsets.bottom + 30.0: floor((self.effectiveBottomAreaHeight - sideButtonSize.height) / 2.0)
firstButtonFrame = CGRect(origin: CGPoint(x: sideInset, y: y), size: sideButtonSize)
secondButtonFrame = CGRect(origin: CGPoint(x: sideInset + sideButtonSize.width + spacing, y: y), size: sideButtonSize)
let thirdButtonPreFrame = CGRect(origin: CGPoint(x: layout.size.width - sideInset - sideButtonSize.width - spacing - sideButtonSize.width, y: y), size: sideButtonSize)
thirdButtonFrame = CGRect(origin: CGPoint(x: floor(thirdButtonPreFrame.midX - centralButtonSize.width / 2.0), y: floor(thirdButtonPreFrame.midY - centralButtonSize.height / 2.0)), size: centralButtonSize)
forthButtonFrame = CGRect(origin: CGPoint(x: layout.size.width - sideInset - sideButtonSize.width, y: y), size: sideButtonSize)
}
}
let actionButtonState: VoiceChatActionButton.State
@ -3103,6 +3355,9 @@ public final class VoiceChatController: ViewController {
while !self.enqueuedTransitions.isEmpty {
self.dequeueTransition()
}
while !self.enqueuedHorizontalTransitions.isEmpty {
self.dequeueTransition()
}
}
}
@ -3179,6 +3434,16 @@ public final class VoiceChatController: ViewController {
}
}
private func enqueueHorizontalTransition(_ transition: ListTransition) {
self.enqueuedHorizontalTransitions.append(transition)
if let _ = self.validLayout {
while !self.enqueuedHorizontalTransitions.isEmpty {
self.dequeueHorizontalTransition()
}
}
}
private var topInset: CGFloat?
private var isFirstTime = true
private func dequeueTransition() {
@ -3195,7 +3460,7 @@ public final class VoiceChatController: ViewController {
if transition.crossFade {
options.insert(.AnimateCrossfade)
}
if transition.animated && self.animation == nil {
if transition.animated && self.expandAnimation == nil {
options.insert(.AnimateInsertion)
}
}
@ -3223,7 +3488,7 @@ public final class VoiceChatController: ViewController {
}
let bottomPanelHeight = self.effectiveBottomAreaHeight + layout.intrinsicInsets.bottom
let listTopInset = layoutTopInset + 63.0
let listTopInset = layoutTopInset + topPanelHeight
let listSize = CGSize(width: size.width, height: layout.size.height - listTopInset - bottomPanelHeight)
self.topInset = max(0.0, max(listSize.height - itemsHeight, listSize.height - 46.0 - floor(56.0 * 3.5)))
@ -3236,7 +3501,7 @@ public final class VoiceChatController: ViewController {
self.listNode.frame = frame
} else if !self.isExpanded {
if self.listNode.frame.minY != targetY && !self.animatingExpansion && self.panGestureArguments == nil {
self.animation = ListViewAnimation(from: self.listNode.frame.minY, to: targetY, duration: 0.4, curve: listViewAnimationCurveSystem, beginAt: CACurrentMediaTime(), update: { [weak self] _, currentValue in
self.expandAnimation = ListViewAnimation(from: self.listNode.frame.minY, to: targetY, duration: 0.4, curve: listViewAnimationCurveSystem, beginAt: CACurrentMediaTime(), update: { [weak self] _, currentValue in
if let strongSelf = self {
var frame = strongSelf.listNode.frame
frame.origin.y = currentValue
@ -3260,17 +3525,40 @@ public final class VoiceChatController: ViewController {
}
private func dequeueHorizontalTransition() {
guard let _ = self.validLayout, let transition = self.enqueuedHorizontalTransitions.first else {
return
}
self.enqueuedHorizontalTransitions.remove(at: 0)
var options = ListViewDeleteAndInsertOptions()
let isFirstTime = self.isFirstTime
if !isFirstTime {
if transition.crossFade {
options.insert(.AnimateCrossfade)
}
if transition.animated {
options.insert(.AnimateInsertion)
}
}
options.insert(.LowLatency)
options.insert(.PreferSynchronousResourceLoading)
self.horizontalListNode.transaction(deleteIndices: transition.deletions, insertIndicesAndItems: transition.insertions, updateIndicesAndItems: transition.updates, options: options, scrollToItem: nil, updateSizeAndInsets: nil, updateOpaqueState: nil, completion: { _ in
})
}
private var animator: ConstantDisplayLinkAnimator?
private var animation: ListViewAnimation?
private var expandAnimation: ListViewAnimation?
private func updateAnimation() {
var animate = false
let timestamp = CACurrentMediaTime()
if let animation = self.animation {
if let animation = self.expandAnimation {
animation.applyAt(timestamp)
if animation.completeAt(timestamp) {
self.animation = nil
self.expandAnimation = nil
} else {
animate = true
}
@ -3318,7 +3606,7 @@ public final class VoiceChatController: ViewController {
}
}
}
if canInvite {
if false, canInvite {
entries.append(.invite(self.presentationData.theme, self.presentationData.strings, self.presentationData.strings.VoiceChat_InviteMember))
}
@ -3439,12 +3727,21 @@ public final class VoiceChatController: ViewController {
}
let presentationData = self.presentationData.withUpdated(theme: self.darkTheme)
let transition = preparedTransition(from: previousEntries, to: entries, isLoading: false, isEmpty: false, canInvite: canInvite, crossFade: false, animated: !disableAnimation, context: self.context, presentationData: presentationData, interaction: self.itemInteraction!)
let transition = preparedTransition(from: previousEntries, to: entries, isLoading: false, isEmpty: false, canInvite: canInvite, crossFade: false, animated: !disableAnimation, context: self.context, presentationData: presentationData, interaction: self.itemInteraction!, style: .list)
self.enqueueTransition(transition)
let horizontalTransition = preparedTransition(from: previousEntries, to: entries, isLoading: false, isEmpty: false, canInvite: canInvite, crossFade: false, animated: !disableAnimation, context: self.context, presentationData: presentationData, interaction: self.itemInteraction!, style: .tile)
self.enqueueHorizontalTransition(horizontalTransition)
}
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
if gestureRecognizer is DirectionalPanGestureRecognizer {
if let (layout, _) = self.validLayout, layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass {
return false
}
if case .fullscreen = self.displayMode {
return false
}
let location = gestureRecognizer.location(in: self.bottomPanelNode.view)
if self.audioButton.frame.contains(location) || (!self.cameraButton.isHidden && self.cameraButton.frame.contains(location)) || self.leaveButton.frame.contains(location) {
return false

View File

@ -20,6 +20,11 @@ import AudioBlob
import PeerInfoAvatarListNode
final class VoiceChatParticipantItem: ListViewItem {
enum LayoutStyle {
case list
case tile
}
enum ParticipantText {
public enum TextColor {
case generic
@ -69,6 +74,7 @@ final class VoiceChatParticipantItem: ListViewItem {
let text: ParticipantText
let expandedText: ParticipantText?
let icon: Icon
let style: LayoutStyle
let enabled: Bool
let transparent: Bool
public let selectable: Bool
@ -82,7 +88,7 @@ final class VoiceChatParticipantItem: ListViewItem {
let getIsExpanded: () -> Bool
let getUpdatingAvatar: () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>
public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, ssrc: UInt32?, presence: PeerPresence?, text: ParticipantText, expandedText: ParticipantText?, icon: Icon, enabled: Bool, transparent: Bool, selectable: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, getVideo: @escaping () -> GroupVideoNode?, revealOptions: [RevealOption], revealed: Bool?, setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void, action: ((ASDisplayNode) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getIsExpanded: @escaping () -> Bool, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) {
public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, ssrc: UInt32?, presence: PeerPresence?, text: ParticipantText, expandedText: ParticipantText?, icon: Icon, style: LayoutStyle, enabled: Bool, transparent: Bool, selectable: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, getVideo: @escaping () -> GroupVideoNode?, revealOptions: [RevealOption], revealed: Bool?, setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void, action: ((ASDisplayNode) -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil, getIsExpanded: @escaping () -> Bool, getUpdatingAvatar: @escaping () -> Signal<(TelegramMediaImageRepresentation, Float)?, NoError>) {
self.presentationData = presentationData
self.dateTimeFormat = dateTimeFormat
self.nameDisplayOrder = nameDisplayOrder
@ -93,6 +99,7 @@ final class VoiceChatParticipantItem: ListViewItem {
self.text = text
self.expandedText = expandedText
self.icon = icon
self.style = style
self.enabled = enabled
self.transparent = transparent
self.selectable = selectable
@ -151,6 +158,8 @@ final class VoiceChatParticipantItem: ListViewItem {
}
private let avatarFont = avatarPlaceholderFont(size: floor(40.0 * 16.0 / 37.0))
private let tileSize = CGSize(width: 84.0, height: 84.0)
private let backgroundCornerRadius: CGFloat = 14.0
class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
private let topStripeNode: ASDisplayNode
@ -169,7 +178,9 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
private var extractedVerticalOffset: CGFloat?
fileprivate let avatarNode: AvatarNode
private let contentWrapperNode: ASDisplayNode
private let titleNode: TextNode
private let statusIconNode: ASImageNode
private let statusNode: TextNode
private let expandedStatusNode: TextNode
private var credibilityIconNode: ASImageNode?
@ -196,6 +207,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
private var isExtracted = false
private var wavesColor: UIColor?
private var videoContainerNode: ASDisplayNode
private var videoNode: GroupVideoNode?
private var raiseHandTimer: SwiftSignalKit.Timer?
@ -229,11 +241,19 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
self.avatarNode = AvatarNode(font: avatarFont)
self.avatarNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 40.0, height: 40.0))
self.contentWrapperNode = ASDisplayNode()
self.videoContainerNode = ASDisplayNode()
self.videoContainerNode.clipsToBounds = true
self.titleNode = TextNode()
self.titleNode.isUserInteractionEnabled = false
self.titleNode.contentMode = .left
self.titleNode.contentsScale = UIScreen.main.scale
self.statusIconNode = ASImageNode()
self.statusIconNode.displaysAsynchronously = false
self.statusNode = TextNode()
self.statusNode.isUserInteractionEnabled = false
self.statusNode.contentMode = .left
@ -262,12 +282,15 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
self.contextSourceNode.contentNode.addSubnode(self.backgroundImageNode)
self.backgroundImageNode.addSubnode(self.extractedBackgroundImageNode)
self.contextSourceNode.contentNode.addSubnode(self.offsetContainerNode)
self.offsetContainerNode.addSubnode(self.videoContainerNode)
self.offsetContainerNode.addSubnode(self.contentWrapperNode)
self.contentWrapperNode.addSubnode(self.titleNode)
self.contentWrapperNode.addSubnode(self.statusIconNode)
self.contentWrapperNode.addSubnode(self.statusNode)
self.contentWrapperNode.addSubnode(self.expandedStatusNode)
self.contentWrapperNode.addSubnode(self.actionContainerNode)
self.contentWrapperNode.addSubnode(self.actionButtonNode)
self.offsetContainerNode.addSubnode(self.avatarNode)
self.offsetContainerNode.addSubnode(self.titleNode)
self.offsetContainerNode.addSubnode(self.statusNode)
self.offsetContainerNode.addSubnode(self.expandedStatusNode)
self.offsetContainerNode.addSubnode(self.actionContainerNode)
self.actionContainerNode.addSubnode(self.actionButtonNode)
self.containerNode.targetNodeForActivationProgress = self.contextSourceNode.contentNode
self.actionButtonNode.addTarget(self, action: #selector(self.actionButtonPressed), forControlEvents: .touchUpInside)
@ -304,7 +327,6 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.isExtracted = isExtracted
let inset: CGFloat = 12.0
let cornerRadius: CGFloat = 14.0
if isExtracted {
strongSelf.contextSourceNode.contentNode.customHitTest = { [weak self] point in
if let strongSelf = self {
@ -339,23 +361,23 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
if !extractedVerticalOffset.isZero {
let radiusTransition = ContainedViewLayoutTransition.animated(duration: 0.2, curve: .easeInOut)
if isExtracted {
strongSelf.backgroundImageNode.image = generateImage(CGSize(width: cornerRadius * 2.0, height: cornerRadius * 2.0), rotatedContext: { (size, context) in
strongSelf.backgroundImageNode.image = generateImage(CGSize(width: backgroundCornerRadius * 2.0, height: backgroundCornerRadius * 2.0), rotatedContext: { (size, context) in
let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds)
context.setFillColor(itemBackgroundColor.cgColor)
context.fillEllipse(in: bounds)
context.fill(CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height / 2.0))
})?.stretchableImage(withLeftCapWidth: Int(cornerRadius), topCapHeight: Int(cornerRadius))
strongSelf.extractedBackgroundImageNode.image = generateImage(CGSize(width: cornerRadius * 2.0, height: cornerRadius * 2.0), rotatedContext: { (size, context) in
})?.stretchableImage(withLeftCapWidth: Int(backgroundCornerRadius), topCapHeight: Int(backgroundCornerRadius))
strongSelf.extractedBackgroundImageNode.image = generateImage(CGSize(width: backgroundCornerRadius * 2.0, height: backgroundCornerRadius * 2.0), rotatedContext: { (size, context) in
let bounds = CGRect(origin: CGPoint(), size: size)
context.clear(bounds)
context.setFillColor(item.presentationData.theme.list.itemBlocksBackgroundColor.cgColor)
context.fillEllipse(in: bounds)
context.fill(CGRect(x: 0.0, y: 0.0, width: size.width, height: size.height / 2.0))
})?.stretchableImage(withLeftCapWidth: Int(cornerRadius), topCapHeight: Int(cornerRadius))
strongSelf.backgroundImageNode.cornerRadius = cornerRadius
})?.stretchableImage(withLeftCapWidth: Int(backgroundCornerRadius), topCapHeight: Int(backgroundCornerRadius))
strongSelf.backgroundImageNode.cornerRadius = backgroundCornerRadius
strongSelf.avatarNode.transform = CATransform3DIdentity
var avatarInitialRect = strongSelf.avatarNode.view.convert(strongSelf.avatarNode.bounds, to: strongSelf.offsetContainerNode.supernode?.view)
@ -364,12 +386,12 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
let targetRect = CGRect(x: extractedRect.minX, y: extractedRect.minY, width: extractedRect.width, height: extractedRect.width)
let initialScale = avatarInitialRect.width / targetRect.width
avatarInitialRect.origin.y += cornerRadius / 2.0 * initialScale
avatarInitialRect.origin.y += backgroundCornerRadius / 2.0 * initialScale
let avatarListWrapperNode = ASDisplayNode()
avatarListWrapperNode.clipsToBounds = true
avatarListWrapperNode.frame = CGRect(x: targetRect.minX, y: targetRect.minY, width: targetRect.width, height: targetRect.height + cornerRadius)
avatarListWrapperNode.cornerRadius = cornerRadius
avatarListWrapperNode.frame = CGRect(x: targetRect.minX, y: targetRect.minY, width: targetRect.width, height: targetRect.height + backgroundCornerRadius)
avatarListWrapperNode.cornerRadius = backgroundCornerRadius
let transitionNode = ASImageNode()
transitionNode.clipsToBounds = true
@ -422,11 +444,11 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.avatarListNode = avatarListNode
}
} else if let transitionNode = strongSelf.avatarTransitionNode, let avatarListWrapperNode = strongSelf.avatarListWrapperNode, let avatarListContainerNode = strongSelf.avatarListContainerNode {
transition.updateCornerRadius(node: strongSelf.backgroundImageNode, cornerRadius: cornerRadius)
transition.updateCornerRadius(node: strongSelf.backgroundImageNode, cornerRadius: backgroundCornerRadius)
var avatarInitialRect = CGRect(origin: strongSelf.avatarNode.frame.origin, size: strongSelf.avatarNode.frame.size)
let targetScale = avatarInitialRect.width / avatarListContainerNode.frame.width
avatarInitialRect.origin.y += cornerRadius / 2.0 * targetScale
avatarInitialRect.origin.y += backgroundCornerRadius / 2.0 * targetScale
strongSelf.avatarTransitionNode = nil
strongSelf.avatarListWrapperNode = nil
@ -493,17 +515,21 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
} else {
strongSelf.extractedBackgroundImageNode.alpha = 0.0
strongSelf.extractedBackgroundImageNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, delay: 0.0, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: false, completion: { [weak self] _ in
self?.backgroundImageNode.image = nil
self?.extractedBackgroundImageNode.image = nil
self?.extractedBackgroundImageNode.layer.removeAllAnimations()
if let strongSelf = self {
if strongSelf.item?.style == .list {
strongSelf.backgroundImageNode.image = nil
}
strongSelf.extractedBackgroundImageNode.image = nil
strongSelf.extractedBackgroundImageNode.layer.removeAllAnimations()
}
})
}
} else {
if isExtracted {
strongSelf.backgroundImageNode.alpha = 0.0
strongSelf.extractedBackgroundImageNode.alpha = 1.0
strongSelf.backgroundImageNode.image = generateStretchableFilledCircleImage(diameter: cornerRadius * 2.0, color: itemBackgroundColor)
strongSelf.extractedBackgroundImageNode.image = generateStretchableFilledCircleImage(diameter: cornerRadius * 2.0, color: item.presentationData.theme.list.itemBlocksBackgroundColor)
strongSelf.backgroundImageNode.image = generateStretchableFilledCircleImage(diameter: backgroundCornerRadius * 2.0, color: itemBackgroundColor)
strongSelf.extractedBackgroundImageNode.image = generateStretchableFilledCircleImage(diameter: backgroundCornerRadius * 2.0, color: item.presentationData.theme.list.itemBlocksBackgroundColor)
}
transition.updateFrame(node: strongSelf.backgroundImageNode, frame: rect)
@ -531,15 +557,69 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
self.audioLevelDisposable.dispose()
self.raiseHandTimer?.invalidate()
}
@objc private func handleTap() {
print("tap")
}
override func selected() {
super.selected()
self.layoutParams?.0.action?(self.contextSourceNode)
}
func transitionIn(from otherNode: VoiceChatParticipantItemNode, containerNode: ASDisplayNode) {
guard let otherItem = otherNode.item, otherItem.style != self.item?.style else {
return
}
switch otherItem.style {
case .list:
otherNode.avatarNode.alpha = 0.0
let startContainerPosition = otherNode.avatarNode.view.convert(otherNode.avatarNode.bounds, to: containerNode.view).center.offsetBy(dx: 0.0, dy: 9.0)
let initialPosition = self.contextSourceNode.position
let targetContainerPosition = self.contextSourceNode.view.convert(self.contextSourceNode.bounds, to: containerNode.view).center
self.contextSourceNode.position = targetContainerPosition
containerNode.addSubnode(self.contextSourceNode)
self.contextSourceNode.layer.animatePosition(from: startContainerPosition, to: targetContainerPosition, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.contextSourceNode.position = initialPosition
strongSelf.containerNode.addSubnode(strongSelf.contextSourceNode)
}
})
if let videoNode = otherNode.videoNode {
self.avatarNode.alpha = 0.0
otherNode.videoNode = nil
self.videoNode = videoNode
let initialPosition = videoNode.position
videoNode.position = CGPoint(x: self.videoContainerNode.frame.width / 2.0, y: self.videoContainerNode.frame.width / 2.0)
videoNode.layer.animatePosition(from: initialPosition, to: videoNode.position, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.videoContainerNode.addSubnode(videoNode)
self.videoContainerNode.layer.animateFrame(from: self.avatarNode.frame, to: self.videoContainerNode.frame, duration: 0.2, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.videoContainerNode.layer.animate(from: (self.avatarNode.frame.width / 2.0) as NSNumber, to: backgroundCornerRadius as NSNumber, keyPath: "cornerRadius", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2, removeOnCompletion: false, completion: { [weak self] value in
})
}
self.backgroundImageNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.backgroundImageNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.contentWrapperNode.layer.animateScale(from: 0.001, to: 1.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
self.contentWrapperNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue)
case .tile:
if let otherVideoNode = otherNode.videoNode {
otherNode.videoNode = nil
self.videoNode = otherVideoNode
let initialPosition = otherVideoNode.position
otherNode.position = CGPoint(x: self.videoContainerNode.frame.width / 2.0, y: self.videoContainerNode.frame.width / 2.0)
self.videoContainerNode.addSubnode(otherVideoNode)
} else {
self.avatarNode.alpha = 1.0
}
}
}
func asyncLayout() -> (_ item: VoiceChatParticipantItem, _ params: ListViewItemLayoutParams, _ first: Bool, _ last: Bool) -> (ListViewItemNodeLayout, (Bool, Bool) -> Void) {
let makeTitleLayout = TextNode.asyncLayout(self.titleNode)
@ -555,7 +635,7 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
updatedTheme = item.presentationData.theme
}
let titleFont = Font.regular(17.0)
let titleFont = item.style == .tile ? Font.regular(12.0) : Font.regular(17.0)
let statusFont = Font.regular(14.0)
var titleAttributedString: NSAttributedString?
@ -569,18 +649,29 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
if let user = item.peer as? TelegramUser {
if let firstName = user.firstName, let lastName = user.lastName, !firstName.isEmpty, !lastName.isEmpty {
let string = NSMutableAttributedString()
switch item.nameDisplayOrder {
case .firstLast:
string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor))
case .lastFirst:
string.append(NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor))
string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor))
if item.style == .tile {
let textColor: UIColor
switch item.icon {
case .wantsToSpeak:
textColor = item.presentationData.theme.list.itemAccentColor
default:
textColor = titleColor
}
titleAttributedString = NSAttributedString(string: firstName, font: titleFont, textColor: textColor)
} else {
let string = NSMutableAttributedString()
switch item.nameDisplayOrder {
case .firstLast:
string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor))
case .lastFirst:
string.append(NSAttributedString(string: lastName, font: currentBoldFont, textColor: titleColor))
string.append(NSAttributedString(string: " ", font: titleFont, textColor: titleColor))
string.append(NSAttributedString(string: firstName, font: titleFont, textColor: titleColor))
}
titleAttributedString = string
}
titleAttributedString = string
} else if let firstName = user.firstName, !firstName.isEmpty {
titleAttributedString = NSAttributedString(string: firstName, font: currentBoldFont, textColor: titleColor)
} else if let lastName = user.lastName, !lastName.isEmpty {
@ -596,39 +687,39 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
var wavesColor = UIColor(rgb: 0x34c759)
switch item.text {
case .presence:
if let user = item.peer as? TelegramUser, let botInfo = user.botInfo {
let botStatus: String
if botInfo.flags.contains(.hasAccessToChatHistory) {
botStatus = item.presentationData.strings.Bot_GroupStatusReadsHistory
case .presence:
if let user = item.peer as? TelegramUser, let botInfo = user.botInfo {
let botStatus: String
if botInfo.flags.contains(.hasAccessToChatHistory) {
botStatus = item.presentationData.strings.Bot_GroupStatusReadsHistory
} else {
botStatus = item.presentationData.strings.Bot_GroupStatusDoesNotReadHistory
}
statusAttributedString = NSAttributedString(string: botStatus, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor)
} else if let presence = item.presence as? TelegramUserPresence {
let timestamp = CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970
let (string, _) = stringAndActivityForUserPresence(strings: item.presentationData.strings, dateTimeFormat: item.dateTimeFormat, presence: presence, relativeTo: Int32(timestamp))
statusAttributedString = NSAttributedString(string: string, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor)
} else {
botStatus = item.presentationData.strings.Bot_GroupStatusDoesNotReadHistory
statusAttributedString = NSAttributedString(string: item.presentationData.strings.LastSeen_Offline, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor)
}
statusAttributedString = NSAttributedString(string: botStatus, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor)
} else if let presence = item.presence as? TelegramUserPresence {
let timestamp = CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970
let (string, _) = stringAndActivityForUserPresence(strings: item.presentationData.strings, dateTimeFormat: item.dateTimeFormat, presence: presence, relativeTo: Int32(timestamp))
statusAttributedString = NSAttributedString(string: string, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor)
} else {
statusAttributedString = NSAttributedString(string: item.presentationData.strings.LastSeen_Offline, font: statusFont, textColor: item.presentationData.theme.list.itemSecondaryTextColor)
}
case let .text(text, textColor):
let textColorValue: UIColor
switch textColor {
case .generic:
textColorValue = item.presentationData.theme.list.itemSecondaryTextColor
case .accent:
textColorValue = item.presentationData.theme.list.itemAccentColor
wavesColor = textColorValue
case .constructive:
textColorValue = UIColor(rgb: 0x34c759)
case .destructive:
textColorValue = UIColor(rgb: 0xff3b30)
wavesColor = textColorValue
}
statusAttributedString = NSAttributedString(string: text, font: statusFont, textColor: textColorValue)
case .none:
break
case let .text(text, textColor):
let textColorValue: UIColor
switch textColor {
case .generic:
textColorValue = item.presentationData.theme.list.itemSecondaryTextColor
case .accent:
textColorValue = item.presentationData.theme.list.itemAccentColor
wavesColor = textColorValue
case .constructive:
textColorValue = UIColor(rgb: 0x34c759)
case .destructive:
textColorValue = UIColor(rgb: 0xff3b30)
wavesColor = textColorValue
}
statusAttributedString = NSAttributedString(string: text, font: statusFont, textColor: textColorValue)
case .none:
break
}
if let expandedText = item.expandedText, case let .text(text, textColor) = expandedText {
@ -676,18 +767,34 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
expandedRightInset = 0.0
}
let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width - leftInset - 12.0 - rightInset - 30.0 - titleIconsWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let constrainedWidth: CGFloat
switch item.style {
case .list:
constrainedWidth = params.width - leftInset - 12.0 - rightInset - 30.0 - titleIconsWidth
case .tile:
constrainedWidth = params.width - 24.0 - 10.0
}
let (titleLayout, titleApply) = makeTitleLayout(TextNodeLayoutArguments(attributedString: titleAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: constrainedWidth, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let (statusLayout, statusApply) = makeStatusLayout(TextNodeLayoutArguments(attributedString: statusAttributedString, backgroundColor: nil, maximumNumberOfLines: 1, truncationType: .end, constrainedSize: CGSize(width: params.width - leftInset - 8.0 - rightInset - 30.0, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let (expandedStatusLayout, expandedStatusApply) = makeExpandedStatusLayout(TextNodeLayoutArguments(attributedString: expandedStatusAttributedString, backgroundColor: nil, maximumNumberOfLines: 6, truncationType: .end, constrainedSize: CGSize(width: params.width - leftInset - 8.0 - rightInset - expandedRightInset, height: CGFloat.greatestFiniteMagnitude), alignment: .natural, cutout: nil, insets: UIEdgeInsets()))
let insets = UIEdgeInsets()
let titleSpacing: CGFloat = statusLayout.size.height == 0.0 ? 0.0 : 1.0
let minHeight: CGFloat = titleLayout.size.height + verticalInset * 2.0
let rawHeight: CGFloat = verticalInset * 2.0 + titleLayout.size.height + titleSpacing + statusLayout.size.height
let contentSize = CGSize(width: params.width, height: max(minHeight, rawHeight))
let contentSize: CGSize
let insets: UIEdgeInsets
switch item.style {
case .list:
contentSize = CGSize(width: params.width, height: max(minHeight, rawHeight))
insets = UIEdgeInsets()
case .tile:
contentSize = tileSize
insets = UIEdgeInsets(top: 0.0, left: 0.0, bottom: !last ? 6.0 : 0.0, right: 0.0)
}
let separatorHeight = UIScreenPixel
let layout = ListViewItemNodeLayout(contentSize: contentSize, insets: insets)
@ -743,7 +850,36 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.layoutParams = (item, params, first, last)
strongSelf.wavesColor = wavesColor
let nonExtractedRect = CGRect(origin: CGPoint(x: 16.0, y: 0.0), size: CGSize(width: layout.contentSize.width - 32.0, height: layout.contentSize.height))
let nonExtractedRect: CGRect
let avatarFrame: CGRect
let titleFrame: CGRect
let animationSize: CGSize
let animationFrame: CGRect
let animationScale: CGFloat
switch item.style {
case .list:
nonExtractedRect = CGRect(origin: CGPoint(x: 16.0, y: 0.0), size: CGSize(width: layout.contentSize.width - 32.0, height: layout.contentSize.height))
avatarFrame = CGRect(origin: CGPoint(x: params.leftInset + 15.0, y: floorToScreenPixels((layout.contentSize.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize))
animationSize = CGSize(width: 36.0, height: 36.0)
animationScale = 1.0
animationFrame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height)
titleFrame = CGRect(origin: CGPoint(x: leftInset, y: verticalInset + verticalOffset), size: titleLayout.size)
case .tile:
nonExtractedRect = CGRect(origin: CGPoint(), size: layout.contentSize)
strongSelf.containerNode.transform = CATransform3DMakeRotation(CGFloat.pi / 2.0, 0.0, 0.0, 1.0)
strongSelf.statusNode.isHidden = true
strongSelf.expandedStatusNode.isHidden = true
avatarFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - avatarSize) / 2.0), y: 13.0), size: CGSize(width: avatarSize, height: avatarSize))
let textWidth: CGFloat = 24.0 + titleLayout.size.width
let textOrigin: CGFloat = floor((layout.size.width - textWidth) / 2.0) - 4.0
animationSize = CGSize(width: 36.0, height: 36.0)
animationScale = 0.66667
animationFrame = CGRect(x: textOrigin, y: 53.0, width: 24.0, height: 24.0)
titleFrame = CGRect(origin: CGPoint(x: textOrigin + 24.0, y: 61.0), size: titleLayout.size)
}
var extractedRect = CGRect(origin: CGPoint(), size: layout.contentSize).insetBy(dx: 16.0 + params.leftInset, dy: 0.0)
var extractedHeight = extractedRect.height + expandedStatusLayout.size.height - statusLayout.size.height
@ -768,11 +904,16 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
} else {
strongSelf.backgroundImageNode.frame = nonExtractedRect
}
if case .tile = item.style, strongSelf.backgroundImageNode.image == nil {
strongSelf.backgroundImageNode.image = generateStretchableFilledCircleImage(diameter: backgroundCornerRadius * 2.0, color: UIColor(rgb: 0x1c1c1e))
strongSelf.backgroundImageNode.alpha = 1.0
}
strongSelf.extractedBackgroundImageNode.frame = strongSelf.backgroundImageNode.bounds
strongSelf.contextSourceNode.contentRect = extractedRect
strongSelf.containerNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize)
strongSelf.contextSourceNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize)
strongSelf.contentWrapperNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize)
strongSelf.offsetContainerNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize)
strongSelf.contextSourceNode.contentNode.frame = CGRect(origin: CGPoint(), size: layout.contentSize)
strongSelf.containerNode.isGestureEnabled = item.contextAction != nil
@ -843,13 +984,13 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
strongSelf.insertSubnode(strongSelf.bottomStripeNode, at: 1)
}
strongSelf.topStripeNode.isHidden = first
strongSelf.bottomStripeNode.isHidden = last
strongSelf.topStripeNode.isHidden = first || item.style == .tile
strongSelf.bottomStripeNode.isHidden = last || item.style == .tile
transition.updateFrame(node: strongSelf.topStripeNode, frame: CGRect(origin: CGPoint(x: leftInset, y: -min(insets.top, separatorHeight)), size: CGSize(width: layoutSize.width, height: separatorHeight)))
transition.updateFrame(node: strongSelf.bottomStripeNode, frame: CGRect(origin: CGPoint(x: leftInset, y: contentSize.height + -separatorHeight), size: CGSize(width: layoutSize.width - leftInset, height: separatorHeight)))
transition.updateFrame(node: strongSelf.titleNode, frame: CGRect(origin: CGPoint(x: leftInset, y: verticalInset + verticalOffset), size: titleLayout.size))
transition.updateFrame(node: strongSelf.titleNode, frame: titleFrame)
transition.updateFrame(node: strongSelf.statusNode, frame: CGRect(origin: CGPoint(x: leftInset, y: strongSelf.titleNode.frame.maxY + titleSpacing), size: statusLayout.size))
transition.updateFrame(node: strongSelf.expandedStatusNode, frame: CGRect(origin: CGPoint(x: leftInset, y: strongSelf.titleNode.frame.maxY + titleSpacing), size: expandedStatusLayout.size))
@ -872,7 +1013,6 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
credibilityIconNode.removeFromSupernode()
}
let avatarFrame = CGRect(origin: CGPoint(x: params.leftInset + 15.0, y: floorToScreenPixels((layout.contentSize.height - avatarSize) / 2.0)), size: CGSize(width: avatarSize, height: avatarSize))
transition.updateFrameAsPositionAndBounds(node: strongSelf.avatarNode, frame: avatarFrame)
let blobFrame = avatarFrame.insetBy(dx: -14.0, dy: -14.0)
@ -964,6 +1104,10 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
nodeToAnimateIn = animationNode
}
var color = color
if color.rgb == 0x979797 && item.style == .tile {
color = UIColor(rgb: 0xffffff)
}
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: false, color: color), animated: true)
strongSelf.actionButtonNode.isUserInteractionEnabled = false
} else if let animationNode = strongSelf.animationNode {
@ -1040,31 +1184,42 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
node.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2)
}
let videoSize = CGSize(width: avatarSize, height: avatarSize)
let videoSize = tileSize
let videoNode = item.getVideo()
if let current = strongSelf.videoNode, current !== videoNode {
current.removeFromSupernode()
}
let actionOffset: CGFloat = 0.0
strongSelf.videoNode = videoNode
if let videoNode = videoNode {
videoNode.updateLayout(size: videoSize, transition: .immediate)
if videoNode.supernode !== strongSelf.avatarNode {
videoNode.clipsToBounds = true
videoNode.cornerRadius = avatarSize / 2.0
strongSelf.avatarNode.addSubnode(videoNode)
}
videoNode.frame = CGRect(origin: CGPoint(), size: videoSize)
switch item.style {
case .list:
strongSelf.videoContainerNode.frame = strongSelf.avatarNode.frame
strongSelf.videoContainerNode.cornerRadius = avatarSize / 2.0
case .tile:
strongSelf.videoContainerNode.frame = CGRect(origin: CGPoint(), size: tileSize)
strongSelf.videoContainerNode.cornerRadius = backgroundCornerRadius
}
if let videoNode = videoNode {
strongSelf.avatarNode.alpha = 0.0
videoNode.updateLayout(size: videoSize, isLandscape: false, transition: .immediate)
if videoNode.supernode !== strongSelf.avatarNode {
videoNode.clipsToBounds = true
strongSelf.videoContainerNode.addSubnode(videoNode)
}
videoNode.position = CGPoint(x: strongSelf.videoContainerNode.frame.width / 2.0, y: strongSelf.videoContainerNode.frame.height / 2.0)
videoNode.bounds = CGRect(origin: CGPoint(), size: videoSize)
}
let animationSize = CGSize(width: 36.0, height: 36.0)
strongSelf.iconNode?.frame = CGRect(origin: CGPoint(), size: animationSize)
strongSelf.animationNode?.frame = CGRect(origin: CGPoint(), size: animationSize)
strongSelf.raiseHandNode?.frame = CGRect(origin: CGPoint(), size: animationSize).insetBy(dx: -6.0, dy: -6.0).offsetBy(dx: -2.0, dy: 0.0)
strongSelf.actionButtonNode.frame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset + actionOffset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height)
strongSelf.actionButtonNode.transform = CATransform3DMakeScale(animationScale, animationScale, 1.0)
strongSelf.actionButtonNode.frame = animationFrame
if let presence = item.presence as? TelegramUserPresence {
strongSelf.peerPresenceManager?.reset(presence: presence)

View File

@ -73,11 +73,11 @@ func telegramMediaActionFromApiAction(_ action: Api.MessageAction) -> TelegramMe
}
case let .messageActionSetMessagesTTL(period):
return TelegramMediaAction(action: .messageAutoremoveTimeoutUpdated(period))
/*case let .messageActionGroupCallScheduled(call, scheduleDate):
case let .messageActionGroupCallScheduled(call, scheduleDate):
switch call {
case let .inputGroupCall(id, accessHash):
return TelegramMediaAction(action: .groupPhoneCall(callId: id, accessHash: accessHash, scheduleDate: scheduleDate, duration: nil))
}*/
}
}
}

View File

@ -121,7 +121,7 @@ public enum BotPaymentFormRequestError {
extension BotPaymentInvoice {
init(apiInvoice: Api.Invoice) {
switch apiInvoice {
case let .invoice(flags, currency, prices, minTipAmount, maxTipAmount, defaultTipAmount):
case let .invoice(flags, currency, prices, maxTipAmount, suggestedTipAmounts):
var fields = BotPaymentInvoiceFields()
if (flags & (1 << 1)) != 0 {
fields.insert(.name)
@ -145,9 +145,9 @@ extension BotPaymentInvoice {
fields.insert(.emailAvailableToProvider)
}
var parsedTip: BotPaymentInvoice.Tip?
if let minTipAmount = minTipAmount, let maxTipAmount = maxTipAmount, let defaultTipAmount = defaultTipAmount {
parsedTip = BotPaymentInvoice.Tip(min: minTipAmount, max: maxTipAmount, default: defaultTipAmount)
}
// if let minTipAmount = minTipAmount, let maxTipAmount = maxTipAmount, let defaultTipAmount = defaultTipAmount {
// parsedTip = BotPaymentInvoice.Tip(min: minTipAmount, max: maxTipAmount, default: defaultTipAmount)
// }
self.init(isTest: (flags & (1 << 0)) != 0, requestedFields: fields, currency: currency, prices: prices.map {
switch $0 {
case let .labeledPrice(label, amount):

View File

@ -43,7 +43,7 @@ public struct GroupCallSummary: Equatable {
extension GroupCallInfo {
init?(_ call: Api.GroupCall) {
switch call {
case let .groupCall(flags, id, accessHash, participantCount, params, title, streamDcId, recordStartDate/*, scheduleDate*/, _):
case let .groupCall(flags, id, accessHash, participantCount, params, title, streamDcId, recordStartDate, scheduleDate, _):
var clientParams: String?
if let params = params {
switch params {
@ -177,8 +177,7 @@ public func createGroupCall(account: Account, peerId: PeerId) -> Signal<GroupCal
return .fail(.generic)
}
//return account.network.request(Api.functions.phone.createGroupCall(flags: 0, peer: inputPeer, randomId: Int32.random(in: Int32.min ... Int32.max), title: nil, scheduleDate: nil))
return account.network.request(Api.functions.phone.createGroupCall(peer: inputPeer, randomId: Int32.random(in: Int32.min ... Int32.max)))
return account.network.request(Api.functions.phone.createGroupCall(flags: 0, peer: inputPeer, randomId: Int32.random(in: Int32.min ... Int32.max), title: nil, scheduleDate: nil))
|> mapError { error -> CreateGroupCallError in
if error.errorDescription == "ANONYMOUS_CALLS_DISABLED" {
return .anonymousNotAllowed
@ -466,7 +465,7 @@ public func joinGroupCall(account: Account, peerId: PeerId, joinAs: PeerId?, cal
maybeParsedCall = GroupCallInfo(call)
switch call {
case let .groupCall(flags, _, _, _, _, title, _, recordStartDate/*, scheduleDate*/, _):
case let .groupCall(flags, _, _, _, _, title, _, recordStartDate, scheduleDate, _):
let isMuted = (flags & (1 << 1)) != 0
let canChange = (flags & (1 << 2)) != 0
state.defaultParticipantsAreMuted = GroupCallParticipantsContext.State.DefaultParticipantsAreMuted(isMuted: isMuted, canChange: canChange)

View File

@ -2991,7 +2991,7 @@ func replayFinalState(accountManager: AccountManager, postbox: Postbox, accountP
})
switch call {
case let .groupCall(flags, _, _, _, _, title, streamDcId, recordStartDate, _):
case let .groupCall(flags, _, _, _, _, title, streamDcId, recordStartDate, _, _):
let isMuted = (flags & (1 << 1)) != 0
let canChange = (flags & (1 << 2)) != 0
let defaultParticipantsAreMuted = GroupCallParticipantsContext.State.DefaultParticipantsAreMuted(isMuted: isMuted, canChange: canChange)

View File

@ -196,7 +196,7 @@ func apiMessagePeerIds(_ message: Api.Message) -> [PeerId] {
}
switch action {
case .messageActionChannelCreate, .messageActionChatDeletePhoto, .messageActionChatEditPhoto, .messageActionChatEditTitle, .messageActionEmpty, .messageActionPinMessage, .messageActionHistoryClear, .messageActionGameScore, .messageActionPaymentSent, .messageActionPaymentSentMe, .messageActionPhoneCall, .messageActionScreenshotTaken, .messageActionCustomAction, .messageActionBotAllowed, .messageActionSecureValuesSent, .messageActionSecureValuesSentMe, .messageActionContactSignUp, .messageActionGroupCall, .messageActionSetMessagesTTL/*, .messageActionGroupCallScheduled*/:
case .messageActionChannelCreate, .messageActionChatDeletePhoto, .messageActionChatEditPhoto, .messageActionChatEditTitle, .messageActionEmpty, .messageActionPinMessage, .messageActionHistoryClear, .messageActionGameScore, .messageActionPaymentSent, .messageActionPaymentSentMe, .messageActionPhoneCall, .messageActionScreenshotTaken, .messageActionCustomAction, .messageActionBotAllowed, .messageActionSecureValuesSent, .messageActionSecureValuesSentMe, .messageActionContactSignUp, .messageActionGroupCall, .messageActionSetMessagesTTL, .messageActionGroupCallScheduled:
break
case let .messageActionChannelMigrateFrom(_, chatId):
result.append(PeerId(namespace: Namespaces.Peer.CloudGroup, id: PeerId.Id._internalFromInt32Value(chatId)))

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_vc_volume.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_vc_camera.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}