mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Video Chat Improvements
This commit is contained in:
parent
21632c6dc0
commit
29df2fd229
@ -94,6 +94,10 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
|
|||||||
}
|
}
|
||||||
|
|
||||||
public func updateLevel(_ level: CGFloat) {
|
public func updateLevel(_ level: CGFloat) {
|
||||||
|
self.updateLevel(level, immediately: false)
|
||||||
|
}
|
||||||
|
|
||||||
|
public func updateLevel(_ level: CGFloat, immediately: Bool = false) {
|
||||||
let normalizedLevel = min(1, max(level / maxLevel, 0))
|
let normalizedLevel = min(1, max(level / maxLevel, 0))
|
||||||
|
|
||||||
smallBlob.updateSpeedLevel(to: normalizedLevel)
|
smallBlob.updateSpeedLevel(to: normalizedLevel)
|
||||||
@ -101,6 +105,9 @@ public final class VoiceBlobView: UIView, TGModernConversationInputMicButtonDeco
|
|||||||
bigBlob.updateSpeedLevel(to: normalizedLevel)
|
bigBlob.updateSpeedLevel(to: normalizedLevel)
|
||||||
|
|
||||||
audioLevel = normalizedLevel
|
audioLevel = normalizedLevel
|
||||||
|
if immediately {
|
||||||
|
presentationAudioLevel = normalizedLevel
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func startAnimating() {
|
public func startAnimating() {
|
||||||
|
@ -206,7 +206,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
|
|||||||
self.activeDisposable.dispose()
|
self.activeDisposable.dispose()
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateLevel(_ level: CGFloat) {
|
func updateLevel(_ level: CGFloat, immediately: Bool = false) {
|
||||||
self.backgroundNode.audioLevel = level
|
self.backgroundNode.audioLevel = level
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -545,10 +545,12 @@ private final class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
|
|||||||
|
|
||||||
var audioLevel: CGFloat = 0.0 {
|
var audioLevel: CGFloat = 0.0 {
|
||||||
didSet {
|
didSet {
|
||||||
self.maskBlobView.updateLevel(audioLevel)
|
self.maskBlobView.updateLevel(self.audioLevel, immediately: false)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
var updatedActive: ((Bool) -> Void)?
|
var updatedActive: ((Bool) -> Void)?
|
||||||
var updatedColors: ((UIColor?, UIColor?) -> Void)?
|
var updatedColors: ((UIColor?, UIColor?) -> Void)?
|
||||||
|
|
||||||
@ -1299,13 +1301,16 @@ private final class VoiceBlobView: UIView {
|
|||||||
bigBlob.setColor(color.withAlphaComponent(0.21))
|
bigBlob.setColor(color.withAlphaComponent(0.21))
|
||||||
}
|
}
|
||||||
|
|
||||||
public func updateLevel(_ level: CGFloat) {
|
public func updateLevel(_ level: CGFloat, immediately: Bool) {
|
||||||
let normalizedLevel = min(1, max(level / maxLevel, 0))
|
let normalizedLevel = min(1, max(level / maxLevel, 0))
|
||||||
|
|
||||||
mediumBlob.updateSpeedLevel(to: normalizedLevel)
|
mediumBlob.updateSpeedLevel(to: normalizedLevel)
|
||||||
bigBlob.updateSpeedLevel(to: normalizedLevel)
|
bigBlob.updateSpeedLevel(to: normalizedLevel)
|
||||||
|
|
||||||
audioLevel = normalizedLevel
|
audioLevel = normalizedLevel
|
||||||
|
if immediately {
|
||||||
|
presentationAudioLevel = normalizedLevel
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public func startAnimating() {
|
public func startAnimating() {
|
||||||
@ -1450,7 +1455,7 @@ final class BlobView: UIView {
|
|||||||
let animation = CABasicAnimation(keyPath: "path")
|
let animation = CABasicAnimation(keyPath: "path")
|
||||||
let previousPath = self.shapeLayer.path
|
let previousPath = self.shapeLayer.path
|
||||||
self.shapeLayer.path = nextPath
|
self.shapeLayer.path = nextPath
|
||||||
animation.duration = CFTimeInterval(1 / (minSpeed + (maxSpeed - minSpeed) * speedLevel))
|
animation.duration = CFTimeInterval(1.0 / (minSpeed + (maxSpeed - minSpeed) * speedLevel))
|
||||||
animation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
|
animation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
|
||||||
animation.fromValue = previousPath
|
animation.fromValue = previousPath
|
||||||
animation.toValue = nextPath
|
animation.toValue = nextPath
|
||||||
|
@ -13,6 +13,8 @@ import PresentationDataUtils
|
|||||||
import UIKitRuntimeUtils
|
import UIKitRuntimeUtils
|
||||||
import ReplayKit
|
import ReplayKit
|
||||||
|
|
||||||
|
private let accentColor: UIColor = UIColor(rgb: 0x007aff)
|
||||||
|
|
||||||
final class VoiceChatCameraPreviewController: ViewController {
|
final class VoiceChatCameraPreviewController: ViewController {
|
||||||
private var controllerNode: VoiceChatCameraPreviewControllerNode {
|
private var controllerNode: VoiceChatCameraPreviewControllerNode {
|
||||||
return self.displayNode as! VoiceChatCameraPreviewControllerNode
|
return self.displayNode as! VoiceChatCameraPreviewControllerNode
|
||||||
@ -183,7 +185,7 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
|
|||||||
self.titleNode = ASTextNode()
|
self.titleNode = ASTextNode()
|
||||||
self.titleNode.attributedText = NSAttributedString(string: title, font: Font.bold(17.0), textColor: textColor)
|
self.titleNode.attributedText = NSAttributedString(string: title, font: Font.bold(17.0), textColor: textColor)
|
||||||
|
|
||||||
self.cameraButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(theme: self.presentationData.theme), font: .bold, height: 52.0, cornerRadius: 11.0, gloss: false)
|
self.cameraButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(backgroundColor: accentColor, foregroundColor: .white), font: .bold, height: 52.0, cornerRadius: 11.0, gloss: false)
|
||||||
self.cameraButton.title = self.presentationData.strings.VoiceChat_VideoPreviewShareCamera
|
self.cameraButton.title = self.presentationData.strings.VoiceChat_VideoPreviewShareCamera
|
||||||
|
|
||||||
self.screenButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(backgroundColor: buttonColor, foregroundColor: buttonTextColor), font: .bold, height: 52.0, cornerRadius: 11.0, gloss: false)
|
self.screenButton = SolidRoundedButtonNode(theme: SolidRoundedButtonTheme(backgroundColor: buttonColor, foregroundColor: buttonTextColor), font: .bold, height: 52.0, cornerRadius: 11.0, gloss: false)
|
||||||
|
@ -87,6 +87,8 @@ func decorationTopCornersImage(dark: Bool) -> UIImage? {
|
|||||||
})?.stretchableImage(withLeftCapWidth: 25, topCapHeight: 32)
|
})?.stretchableImage(withLeftCapWidth: 25, topCapHeight: 32)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
func decorationBottomCornersImage(dark: Bool) -> UIImage? {
|
func decorationBottomCornersImage(dark: Bool) -> UIImage? {
|
||||||
return generateImage(CGSize(width: 50.0, height: 110.0), rotatedContext: { (size, context) in
|
return generateImage(CGSize(width: 50.0, height: 110.0), rotatedContext: { (size, context) in
|
||||||
let bounds = CGRect(origin: CGPoint(), size: size)
|
let bounds = CGRect(origin: CGPoint(), size: size)
|
||||||
@ -414,7 +416,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func tileItem(context: AccountContext, presentationData: PresentationData, interaction: Interaction, videoEndpointId: String, videoReady: Bool) -> VoiceChatTileItem? {
|
func tileItem(context: AccountContext, presentationData: PresentationData, interaction: Interaction, videoEndpointId: String, videoReady: Bool, showAsPresentation: Bool) -> VoiceChatTileItem? {
|
||||||
guard case let .peer(peerEntry, _) = self else {
|
guard case let .peer(peerEntry, _) = self else {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -485,7 +487,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
text = .text(about, textIcon, .generic)
|
text = .text(about, textIcon, .generic)
|
||||||
}
|
}
|
||||||
|
|
||||||
return VoiceChatTileItem(account: context.account, peer: peerEntry.peer, videoEndpointId: videoEndpointId, videoReady: videoReady, strings: presentationData.strings, nameDisplayOrder: presentationData.nameDisplayOrder, speaking: speaking, icon: icon, text: text, additionalText: additionalText, action: {
|
return VoiceChatTileItem(account: context.account, peer: peerEntry.peer, videoEndpointId: videoEndpointId, videoReady: videoReady, strings: presentationData.strings, nameDisplayOrder: presentationData.nameDisplayOrder, speaking: speaking, icon: showAsPresentation ? .presentation : icon, text: text, additionalText: additionalText, action: {
|
||||||
interaction.switchToPeer(peer.id, videoEndpointId, true)
|
interaction.switchToPeer(peer.id, videoEndpointId, true)
|
||||||
}, contextAction: { node, gesture in
|
}, contextAction: { node, gesture in
|
||||||
interaction.peerContextAction(peerEntry, node, gesture)
|
interaction.peerContextAction(peerEntry, node, gesture)
|
||||||
@ -786,6 +788,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
private var animatingExpansion = false
|
private var animatingExpansion = false
|
||||||
private var animatingAppearance = false
|
private var animatingAppearance = false
|
||||||
private var animatingButtonsSwap = false
|
private var animatingButtonsSwap = false
|
||||||
|
private var animatingMainStage = false
|
||||||
private var panGestureArguments: (topInset: CGFloat, offset: CGFloat)?
|
private var panGestureArguments: (topInset: CGFloat, offset: CGFloat)?
|
||||||
private var isPanning = false
|
private var isPanning = false
|
||||||
|
|
||||||
@ -865,8 +868,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
private var timeoutedEndpointIds = Set<String>()
|
private var timeoutedEndpointIds = Set<String>()
|
||||||
private var readyVideoDisposables = DisposableDict<String>()
|
private var readyVideoDisposables = DisposableDict<String>()
|
||||||
|
|
||||||
private var endpointToPeerId: [String: PeerId] = [:]
|
private var peerIdToEndpointId: [PeerId: String] = [:]
|
||||||
private var peerIdToEndpoint: [PeerId: String] = [:]
|
|
||||||
|
|
||||||
private var currentSpeakers: [PeerId] = []
|
private var currentSpeakers: [PeerId] = []
|
||||||
private var currentDominantSpeaker: (PeerId, String?, Double)?
|
private var currentDominantSpeaker: (PeerId, String?, Double)?
|
||||||
@ -1114,7 +1116,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
}, switchToPeer: { [weak self] peerId, videoEndpointId, expand in
|
}, switchToPeer: { [weak self] peerId, videoEndpointId, expand in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
if expand, let videoEndpointId = videoEndpointId {
|
if expand, let videoEndpointId = videoEndpointId {
|
||||||
strongSelf.currentDominantSpeaker = (peerId, videoEndpointId, CACurrentMediaTime())
|
strongSelf.currentDominantSpeaker = (peerId, videoEndpointId, CACurrentMediaTime() + 3.0)
|
||||||
strongSelf.updateDisplayMode(.fullscreen(controlsHidden: false))
|
strongSelf.updateDisplayMode(.fullscreen(controlsHidden: false))
|
||||||
} else {
|
} else {
|
||||||
strongSelf.currentForcedSpeaker = nil
|
strongSelf.currentForcedSpeaker = nil
|
||||||
@ -1883,7 +1885,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
|
|
||||||
var maxLevelWithVideo: (PeerId, Float)?
|
var maxLevelWithVideo: (PeerId, Float)?
|
||||||
for (peerId, source, level, hasSpeech) in levels {
|
for (peerId, source, level, hasSpeech) in levels {
|
||||||
let hasVideo = strongSelf.peerIdToEndpoint[peerId] != nil
|
let hasVideo = strongSelf.peerIdToEndpointId[peerId] != nil
|
||||||
if hasSpeech && source != 0 && hasVideo {
|
if hasSpeech && source != 0 && hasVideo {
|
||||||
if let (_, currentLevel) = maxLevelWithVideo {
|
if let (_, currentLevel) = maxLevelWithVideo {
|
||||||
if currentLevel < level {
|
if currentLevel < level {
|
||||||
@ -1898,7 +1900,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
if maxLevelWithVideo == nil {
|
if maxLevelWithVideo == nil {
|
||||||
if let (peerId, _, _) = strongSelf.currentDominantSpeaker {
|
if let (peerId, _, _) = strongSelf.currentDominantSpeaker {
|
||||||
maxLevelWithVideo = (peerId, 0.0)
|
maxLevelWithVideo = (peerId, 0.0)
|
||||||
} else if strongSelf.peerIdToEndpoint.count > 0 {
|
} else if strongSelf.peerIdToEndpointId.count > 0 {
|
||||||
for entry in strongSelf.currentFullscreenEntries {
|
for entry in strongSelf.currentFullscreenEntries {
|
||||||
if case let .peer(peerEntry, _) = entry {
|
if case let .peer(peerEntry, _) = entry {
|
||||||
if let _ = peerEntry.effectiveVideoEndpointId {
|
if let _ = peerEntry.effectiveVideoEndpointId {
|
||||||
@ -2111,7 +2113,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.mainStageNode.back = { [weak self] in
|
self.mainStageNode.back = { [weak self] in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self, !strongSelf.isPanning && !strongSelf.animatingExpansion && !strongSelf.mainStageNode.animating {
|
||||||
strongSelf.currentForcedSpeaker = nil
|
strongSelf.currentForcedSpeaker = nil
|
||||||
strongSelf.updateDisplayMode(.modal(isExpanded: true, isFilled: true), fromPan: true)
|
strongSelf.updateDisplayMode(.modal(isExpanded: true, isFilled: true), fromPan: true)
|
||||||
strongSelf.effectiveSpeaker = nil
|
strongSelf.effectiveSpeaker = nil
|
||||||
@ -3489,7 +3491,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
let listMaxY = listTopInset + listSize.height
|
let listMaxY = listTopInset + listSize.height
|
||||||
let bottomOffset = min(0.0, bottomEdge - listMaxY) + layout.size.height - bottomPanelHeight
|
let bottomOffset = min(0.0, bottomEdge - listMaxY) + layout.size.height - bottomPanelHeight
|
||||||
|
|
||||||
let bottomCornersFrame = CGRect(origin: CGPoint(x: sideInset + floorToScreenPixels((size.width - contentWidth) / 2.0), y: -50.0 + bottomOffset + bottomGradientHeight), size: CGSize(width: contentWidth - sideInset * 2.0, height: 50.0 + 40.0))
|
let bottomCornersFrame = CGRect(origin: CGPoint(x: sideInset + floorToScreenPixels((size.width - contentWidth) / 2.0), y: -50.0 + bottomOffset + bottomGradientHeight), size: CGSize(width: contentWidth - sideInset * 2.0, height: 50.0 + 60.0))
|
||||||
let previousBottomCornersFrame = self.bottomCornersNode.frame
|
let previousBottomCornersFrame = self.bottomCornersNode.frame
|
||||||
if !bottomCornersFrame.equalTo(previousBottomCornersFrame) {
|
if !bottomCornersFrame.equalTo(previousBottomCornersFrame) {
|
||||||
self.bottomCornersNode.frame = bottomCornersFrame
|
self.bottomCornersNode.frame = bottomCornersFrame
|
||||||
@ -4362,11 +4364,11 @@ public final class VoiceChatController: ViewController {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
private func updateMembers(maybeUpdateVideo: Bool = true) {
|
private func updateMembers(maybeUpdateVideo: Bool = true, force: Bool = false) {
|
||||||
self.updateMembers(muteState: self.effectiveMuteState, callMembers: self.currentCallMembers ?? ([], nil), invitedPeers: self.currentInvitedPeers ?? [], speakingPeers: self.currentSpeakingPeers ?? Set(), maybeUpdateVideo: maybeUpdateVideo)
|
self.updateMembers(muteState: self.effectiveMuteState, callMembers: self.currentCallMembers ?? ([], nil), invitedPeers: self.currentInvitedPeers ?? [], speakingPeers: self.currentSpeakingPeers ?? Set(), maybeUpdateVideo: maybeUpdateVideo, force: force)
|
||||||
}
|
}
|
||||||
|
|
||||||
private func updateMembers(muteState: GroupCallParticipantsContext.Participant.MuteState?, callMembers: ([GroupCallParticipantsContext.Participant], String?), invitedPeers: [Peer], speakingPeers: Set<PeerId>, maybeUpdateVideo: Bool = true) {
|
private func updateMembers(muteState: GroupCallParticipantsContext.Participant.MuteState?, callMembers: ([GroupCallParticipantsContext.Participant], String?), invitedPeers: [Peer], speakingPeers: Set<PeerId>, maybeUpdateVideo: Bool = true, force: Bool = false) {
|
||||||
var disableAnimation = false
|
var disableAnimation = false
|
||||||
if self.currentCallMembers?.1 != callMembers.1 {
|
if self.currentCallMembers?.1 != callMembers.1 {
|
||||||
disableAnimation = true
|
disableAnimation = true
|
||||||
@ -4384,7 +4386,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
var processedPeerIds = Set<PeerId>()
|
var processedPeerIds = Set<PeerId>()
|
||||||
var processedFullscreenPeerIds = Set<PeerId>()
|
var processedFullscreenPeerIds = Set<PeerId>()
|
||||||
|
|
||||||
var endpointIdToPeerId: [String: PeerId] = [:]
|
var peerIdToCameraEndpointId: [PeerId: String] = [:]
|
||||||
var peerIdToEndpointId: [PeerId: String] = [:]
|
var peerIdToEndpointId: [PeerId: String] = [:]
|
||||||
|
|
||||||
var requestedVideoChannels: [PresentationGroupCallRequestedVideo] = []
|
var requestedVideoChannels: [PresentationGroupCallRequestedVideo] = []
|
||||||
@ -4446,10 +4448,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let videoEndpointId = member.videoEndpointId {
|
if let videoEndpointId = member.videoEndpointId {
|
||||||
endpointIdToPeerId[videoEndpointId] = member.peer.id
|
peerIdToCameraEndpointId[member.peer.id] = videoEndpointId
|
||||||
}
|
|
||||||
if let presentationEndpointId = member.presentationEndpointId {
|
|
||||||
endpointIdToPeerId[presentationEndpointId] = member.peer.id
|
|
||||||
}
|
}
|
||||||
if let anyEndpointId = member.presentationEndpointId ?? member.videoEndpointId {
|
if let anyEndpointId = member.presentationEndpointId ?? member.videoEndpointId {
|
||||||
peerIdToEndpointId[member.peer.id] = anyEndpointId
|
peerIdToEndpointId[member.peer.id] = anyEndpointId
|
||||||
@ -4485,7 +4484,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
self.videoOrder.append(videoEndpointId)
|
self.videoOrder.append(videoEndpointId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let tileItem = ListEntry.peer(peerEntry, 0).tileItem(context: self.context, presentationData: self.presentationData, interaction: interaction, videoEndpointId: videoEndpointId, videoReady: self.readyVideoEndpointIds.contains(videoEndpointId)) {
|
if let tileItem = ListEntry.peer(peerEntry, 0).tileItem(context: self.context, presentationData: self.presentationData, interaction: interaction, videoEndpointId: videoEndpointId, videoReady: self.readyVideoEndpointIds.contains(videoEndpointId), showAsPresentation: peerIdToCameraEndpointId[peerEntry.peer.id] != nil) {
|
||||||
isTile = true
|
isTile = true
|
||||||
tileByVideoEndpoint[videoEndpointId] = tileItem
|
tileByVideoEndpoint[videoEndpointId] = tileItem
|
||||||
}
|
}
|
||||||
@ -4501,7 +4500,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
self.videoOrder.append(videoEndpointId)
|
self.videoOrder.append(videoEndpointId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let tileItem = ListEntry.peer(peerEntry, 0).tileItem(context: self.context, presentationData: self.presentationData, interaction: interaction, videoEndpointId: videoEndpointId, videoReady: self.readyVideoEndpointIds.contains(videoEndpointId)) {
|
if let tileItem = ListEntry.peer(peerEntry, 0).tileItem(context: self.context, presentationData: self.presentationData, interaction: interaction, videoEndpointId: videoEndpointId, videoReady: self.readyVideoEndpointIds.contains(videoEndpointId), showAsPresentation: false) {
|
||||||
isTile = true
|
isTile = true
|
||||||
tileByVideoEndpoint[videoEndpointId] = tileItem
|
tileByVideoEndpoint[videoEndpointId] = tileItem
|
||||||
}
|
}
|
||||||
@ -4597,7 +4596,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
|
|
||||||
self.requestedVideoChannels = requestedVideoChannels
|
self.requestedVideoChannels = requestedVideoChannels
|
||||||
|
|
||||||
guard self.didSetDataReady && !self.isPanning && !self.animatingExpansion else {
|
guard self.didSetDataReady && (force || (!self.isPanning && !self.animatingExpansion && !self.animatingMainStage)) else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4619,8 +4618,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
|
|
||||||
self.updateRequestedVideoChannels()
|
self.updateRequestedVideoChannels()
|
||||||
|
|
||||||
self.endpointToPeerId = endpointIdToPeerId
|
self.peerIdToEndpointId = peerIdToEndpointId
|
||||||
self.peerIdToEndpoint = peerIdToEndpointId
|
|
||||||
|
|
||||||
if !tileItems.isEmpty {
|
if !tileItems.isEmpty {
|
||||||
entries.insert(.tiles(tileItems), at: 0)
|
entries.insert(.tiles(tileItems), at: 0)
|
||||||
@ -4835,7 +4833,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
|
|
||||||
self.effectiveSpeaker = effectiveSpeaker
|
self.effectiveSpeaker = effectiveSpeaker
|
||||||
if updateMembers {
|
if updateMembers {
|
||||||
self.updateMembers(maybeUpdateVideo: false)
|
self.updateMembers(maybeUpdateVideo: false, force: force)
|
||||||
}
|
}
|
||||||
self.mainStageNode.update(peer: effectiveSpeaker, waitForFullSize: waitForFullSize, completion: {
|
self.mainStageNode.update(peer: effectiveSpeaker, waitForFullSize: waitForFullSize, completion: {
|
||||||
completion?()
|
completion?()
|
||||||
@ -5010,6 +5008,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
self.panGestureArguments = nil
|
self.panGestureArguments = nil
|
||||||
self.fullscreenListContainer.subnodeTransform = CATransform3DIdentity
|
self.fullscreenListContainer.subnodeTransform = CATransform3DIdentity
|
||||||
if abs(translation.y) > 100.0 || abs(velocity.y) > 300.0 {
|
if abs(translation.y) > 100.0 || abs(velocity.y) > 300.0 {
|
||||||
|
self.mainStageBackgroundNode.layer.removeAllAnimations()
|
||||||
self.currentForcedSpeaker = nil
|
self.currentForcedSpeaker = nil
|
||||||
self.updateDisplayMode(.modal(isExpanded: true, isFilled: true), fromPan: true)
|
self.updateDisplayMode(.modal(isExpanded: true, isFilled: true), fromPan: true)
|
||||||
self.effectiveSpeaker = nil
|
self.effectiveSpeaker = nil
|
||||||
@ -5551,7 +5550,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func updateDisplayMode(_ displayMode: DisplayMode, fromPan: Bool = false) {
|
private func updateDisplayMode(_ displayMode: DisplayMode, fromPan: Bool = false) {
|
||||||
guard !self.animatingExpansion && !self.mainStageNode.animating else {
|
guard !self.animatingExpansion && !self.animatingMainStage && !self.mainStageNode.animating else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
self.updateMembers()
|
self.updateMembers()
|
||||||
@ -5562,6 +5561,11 @@ public final class VoiceChatController: ViewController {
|
|||||||
isFullscreen = true
|
isFullscreen = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if case .fullscreen = previousDisplayMode, case .fullscreen = displayMode {
|
||||||
|
} else {
|
||||||
|
self.animatingMainStage = true
|
||||||
|
}
|
||||||
|
|
||||||
let completion = {
|
let completion = {
|
||||||
self.displayMode = displayMode
|
self.displayMode = displayMode
|
||||||
self.updateDecorationsColors()
|
self.updateDecorationsColors()
|
||||||
@ -5585,8 +5589,6 @@ public final class VoiceChatController: ViewController {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.animatingExpansion = true
|
|
||||||
|
|
||||||
let completion = {
|
let completion = {
|
||||||
let effectiveSpeakerPeerId = self.effectiveSpeaker?.0
|
let effectiveSpeakerPeerId = self.effectiveSpeaker?.0
|
||||||
|
|
||||||
@ -5606,7 +5608,9 @@ public final class VoiceChatController: ViewController {
|
|||||||
let transitionStartPosition = otherItemNode.view.convert(CGPoint(x: otherItemNode.frame.width / 2.0, y: otherItemNode.frame.height), to: self.fullscreenListContainer.view.superview)
|
let transitionStartPosition = otherItemNode.view.convert(CGPoint(x: otherItemNode.frame.width / 2.0, y: otherItemNode.frame.height), to: self.fullscreenListContainer.view.superview)
|
||||||
self.fullscreenListContainer.layer.animatePosition(from: transitionStartPosition, to: self.fullscreenListContainer.position, duration: 0.55, timingFunction: kCAMediaTimingFunctionSpring)
|
self.fullscreenListContainer.layer.animatePosition(from: transitionStartPosition, to: self.fullscreenListContainer.position, duration: 0.55, timingFunction: kCAMediaTimingFunctionSpring)
|
||||||
|
|
||||||
self.mainStageNode.animateTransitionIn(from: otherItemNode, transition: transition)
|
self.mainStageNode.animateTransitionIn(from: otherItemNode, transition: transition, completion: { [weak self] in
|
||||||
|
self?.animatingMainStage = false
|
||||||
|
})
|
||||||
self.mainStageNode.alpha = 1.0
|
self.mainStageNode.alpha = 1.0
|
||||||
|
|
||||||
self.mainStageBackgroundNode.alpha = 1.0
|
self.mainStageBackgroundNode.alpha = 1.0
|
||||||
@ -5669,9 +5673,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
fullscreenItemNodes[String(item.peer.id.toInt64()) + "_" + (item.videoEndpointId ?? "")] = itemNode
|
fullscreenItemNodes[String(item.peer.id.toInt64()) + "_" + (item.videoEndpointId ?? "")] = itemNode
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.animatingExpansion = true
|
|
||||||
|
|
||||||
let completion = {
|
let completion = {
|
||||||
let effectiveSpeakerPeerId = self.effectiveSpeaker?.0
|
let effectiveSpeakerPeerId = self.effectiveSpeaker?.0
|
||||||
var targetTileNode: VoiceChatTileItemNode?
|
var targetTileNode: VoiceChatTileItemNode?
|
||||||
@ -5727,6 +5729,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
strongSelf.contentContainer.insertSubnode(strongSelf.mainStageContainerNode, belowSubnode: strongSelf.transitionContainerNode)
|
strongSelf.contentContainer.insertSubnode(strongSelf.mainStageContainerNode, belowSubnode: strongSelf.transitionContainerNode)
|
||||||
|
|
||||||
strongSelf.isPanning = false
|
strongSelf.isPanning = false
|
||||||
|
strongSelf.animatingMainStage = false
|
||||||
})
|
})
|
||||||
|
|
||||||
self.listContainer.layer.animateScale(from: 0.86, to: 1.0, duration: 0.55, timingFunction: kCAMediaTimingFunctionSpring)
|
self.listContainer.layer.animateScale(from: 0.86, to: 1.0, duration: 0.55, timingFunction: kCAMediaTimingFunctionSpring)
|
||||||
@ -5757,8 +5760,6 @@ public final class VoiceChatController: ViewController {
|
|||||||
completion()
|
completion()
|
||||||
}
|
}
|
||||||
} else if case .fullscreen = self.displayMode {
|
} else if case .fullscreen = self.displayMode {
|
||||||
self.animatingExpansion = true
|
|
||||||
|
|
||||||
if let (layout, navigationHeight) = self.validLayout {
|
if let (layout, navigationHeight) = self.validLayout {
|
||||||
let transition: ContainedViewLayoutTransition = .animated(duration: 0.4, curve: .spring)
|
let transition: ContainedViewLayoutTransition = .animated(duration: 0.4, curve: .spring)
|
||||||
self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: transition)
|
self.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: transition)
|
||||||
|
@ -290,12 +290,13 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
private var animatingOut = false
|
private var animatingOut = false
|
||||||
private var appeared = false
|
private var appeared = false
|
||||||
|
|
||||||
func animateTransitionIn(from sourceNode: ASDisplayNode, transition: ContainedViewLayoutTransition) {
|
func animateTransitionIn(from sourceNode: ASDisplayNode, transition: ContainedViewLayoutTransition, completion: @escaping () -> Void) {
|
||||||
guard let sourceNode = sourceNode as? VoiceChatTileItemNode, let _ = sourceNode.item, let (_, sideInset, bottomInset, isLandscape) = self.validLayout else {
|
guard let sourceNode = sourceNode as? VoiceChatTileItemNode, let _ = sourceNode.item, let (_, sideInset, bottomInset, isLandscape) = self.validLayout else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
self.appeared = true
|
self.appeared = true
|
||||||
|
|
||||||
|
self.backgroundNode.alpha = 0.0
|
||||||
self.topFadeNode.alpha = 0.0
|
self.topFadeNode.alpha = 0.0
|
||||||
self.titleNode.alpha = 0.0
|
self.titleNode.alpha = 0.0
|
||||||
self.microphoneNode.alpha = 0.0
|
self.microphoneNode.alpha = 0.0
|
||||||
@ -331,6 +332,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
transition.updateFrame(node: self, frame: targetFrame, completion: { [weak self] _ in
|
||||||
sourceNode.alpha = 1.0
|
sourceNode.alpha = 1.0
|
||||||
self?.animatingIn = false
|
self?.animatingIn = false
|
||||||
|
completion()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -346,6 +348,11 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 0.0)
|
alphaTransition.updateAlpha(node: self.backgroundNode, alpha: 0.0)
|
||||||
} else {
|
} else {
|
||||||
self.backgroundNode.alpha = 0.0
|
self.backgroundNode.alpha = 0.0
|
||||||
|
|
||||||
|
self.microphoneNode.alpha = 1.0
|
||||||
|
self.titleNode.alpha = 1.0
|
||||||
|
self.bottomFadeNode.alpha = 1.0
|
||||||
|
self.bottomFillNode.alpha = 1.0
|
||||||
}
|
}
|
||||||
alphaTransition.updateAlpha(node: self.topFadeNode, alpha: 0.0)
|
alphaTransition.updateAlpha(node: self.topFadeNode, alpha: 0.0)
|
||||||
alphaTransition.updateAlpha(node: self.titleNode, alpha: 0.0)
|
alphaTransition.updateAlpha(node: self.titleNode, alpha: 0.0)
|
||||||
@ -572,14 +579,17 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
self.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
self.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
||||||
}
|
}
|
||||||
|
|
||||||
self.audioLevelNode.updateGlowAndGradientAnimations(type: gradient, animated: true)
|
|
||||||
|
|
||||||
self.pinButtonTitleNode.isHidden = !pinned
|
self.pinButtonTitleNode.isHidden = !pinned
|
||||||
self.pinButtonIconNode.image = !pinned ? generateTintedImage(image: UIImage(bundleImageName: "Call/Pin"), color: .white) : generateTintedImage(image: UIImage(bundleImageName: "Call/Unpin"), color: .white)
|
self.pinButtonIconNode.image = !pinned ? generateTintedImage(image: UIImage(bundleImageName: "Call/Pin"), color: .white) : generateTintedImage(image: UIImage(bundleImageName: "Call/Unpin"), color: .white)
|
||||||
|
|
||||||
self.audioLevelNode.startAnimating(immediately: true)
|
self.audioLevelNode.startAnimating(immediately: true)
|
||||||
|
|
||||||
if let getAudioLevel = self.getAudioLevel, previousPeerEntry?.peer.id != peerEntry.peer.id {
|
if let getAudioLevel = self.getAudioLevel, previousPeerEntry?.peer.id != peerEntry.peer.id {
|
||||||
|
self.avatarNode.layer.removeAllAnimations()
|
||||||
|
self.avatarNode.transform = CATransform3DIdentity
|
||||||
|
self.audioLevelNode.updateGlowAndGradientAnimations(type: .active, animated: false)
|
||||||
|
self.audioLevelNode.updateLevel(0.0, immediately: true)
|
||||||
|
|
||||||
self.audioLevelNode.isHidden = self.currentPeer?.1 != nil
|
self.audioLevelNode.isHidden = self.currentPeer?.1 != nil
|
||||||
self.audioLevelDisposable.set((getAudioLevel(peerEntry.peer.id)
|
self.audioLevelDisposable.set((getAudioLevel(peerEntry.peer.id)
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] value in
|
|> deliverOnMainQueue).start(next: { [weak self] value in
|
||||||
@ -589,7 +599,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
|
|
||||||
let level = min(1.5, max(0.0, CGFloat(value)))
|
let level = min(1.5, max(0.0, CGFloat(value)))
|
||||||
|
|
||||||
strongSelf.audioLevelNode.updateLevel(CGFloat(value))
|
strongSelf.audioLevelNode.updateLevel(CGFloat(value), immediately: false)
|
||||||
|
|
||||||
let avatarScale: CGFloat
|
let avatarScale: CGFloat
|
||||||
if value > 0.02 {
|
if value > 0.02 {
|
||||||
@ -603,6 +613,8 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.audioLevelNode.updateGlowAndGradientAnimations(type: gradient, animated: true)
|
||||||
|
|
||||||
self.microphoneNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: microphoneColor), animated: true)
|
self.microphoneNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: microphoneColor), animated: true)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -932,8 +944,8 @@ class VoiceChatBlobNode: ASDisplayNode {
|
|||||||
self.blobView.startAnimating(immediately: true)
|
self.blobView.startAnimating(immediately: true)
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateLevel(_ level: CGFloat) {
|
func updateLevel(_ level: CGFloat, immediately: Bool) {
|
||||||
self.blobView.updateLevel(level)
|
self.blobView.updateLevel(level, immediately: immediately)
|
||||||
}
|
}
|
||||||
|
|
||||||
func startAnimating(immediately: Bool) {
|
func startAnimating(immediately: Bool) {
|
||||||
@ -996,9 +1008,14 @@ class VoiceChatBlobNode: ASDisplayNode {
|
|||||||
case .muted:
|
case .muted:
|
||||||
targetColors = [pink.cgColor, purple.cgColor, purple.cgColor]
|
targetColors = [pink.cgColor, purple.cgColor, purple.cgColor]
|
||||||
}
|
}
|
||||||
self.foregroundGradientLayer.colors = targetColors
|
|
||||||
if animated {
|
if animated {
|
||||||
|
self.foregroundGradientLayer.colors = targetColors
|
||||||
self.foregroundGradientLayer.animate(from: initialColors as AnyObject, to: targetColors as AnyObject, keyPath: "colors", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3)
|
self.foregroundGradientLayer.animate(from: initialColors as AnyObject, to: targetColors as AnyObject, keyPath: "colors", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3)
|
||||||
|
} else {
|
||||||
|
CATransaction.begin()
|
||||||
|
CATransaction.setDisableActions(true)
|
||||||
|
self.foregroundGradientLayer.colors = targetColors
|
||||||
|
CATransaction.commit()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,7 +69,7 @@ final class VoiceChatTileItem: Equatable {
|
|||||||
if lhs.videoReady != rhs.videoReady {
|
if lhs.videoReady != rhs.videoReady {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if lhs.speaking != rhs.speaking {
|
if lhs.icon != rhs.icon {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if lhs.text != rhs.text {
|
if lhs.text != rhs.text {
|
||||||
@ -78,6 +78,9 @@ final class VoiceChatTileItem: Equatable {
|
|||||||
if lhs.additionalText != rhs.additionalText {
|
if lhs.additionalText != rhs.additionalText {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
if lhs.speaking != rhs.speaking {
|
||||||
|
return false
|
||||||
|
}
|
||||||
if lhs.icon != rhs.icon {
|
if lhs.icon != rhs.icon {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -113,7 +116,7 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
let fadeNode: ASDisplayNode
|
let fadeNode: ASDisplayNode
|
||||||
private var shimmerNode: VoiceChatTileShimmeringNode?
|
private var shimmerNode: VoiceChatTileShimmeringNode?
|
||||||
private let titleNode: ImmediateTextNode
|
private let titleNode: ImmediateTextNode
|
||||||
private let iconNode: ASImageNode
|
private var iconNode: ASImageNode?
|
||||||
private var animationNode: VoiceChatMicrophoneNode?
|
private var animationNode: VoiceChatMicrophoneNode?
|
||||||
var highlightNode: VoiceChatTileHighlightNode
|
var highlightNode: VoiceChatTileHighlightNode
|
||||||
private let statusNode: VoiceChatParticipantStatusNode
|
private let statusNode: VoiceChatParticipantStatusNode
|
||||||
@ -157,10 +160,6 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
|
|
||||||
self.statusNode = VoiceChatParticipantStatusNode()
|
self.statusNode = VoiceChatParticipantStatusNode()
|
||||||
|
|
||||||
self.iconNode = ASImageNode()
|
|
||||||
self.iconNode.displaysAsynchronously = false
|
|
||||||
self.iconNode.displayWithoutProcessing = true
|
|
||||||
|
|
||||||
self.highlightNode = VoiceChatTileHighlightNode()
|
self.highlightNode = VoiceChatTileHighlightNode()
|
||||||
self.highlightNode.alpha = 0.0
|
self.highlightNode.alpha = 0.0
|
||||||
self.highlightNode.updateGlowAndGradientAnimations(type: .speaking)
|
self.highlightNode.updateGlowAndGradientAnimations(type: .speaking)
|
||||||
@ -179,11 +178,10 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
self.contentNode.addSubnode(self.fadeNode)
|
self.contentNode.addSubnode(self.fadeNode)
|
||||||
self.contentNode.addSubnode(self.infoNode)
|
self.contentNode.addSubnode(self.infoNode)
|
||||||
self.infoNode.addSubnode(self.titleNode)
|
self.infoNode.addSubnode(self.titleNode)
|
||||||
self.infoNode.addSubnode(self.iconNode)
|
|
||||||
self.contentNode.addSubnode(self.highlightNode)
|
self.contentNode.addSubnode(self.highlightNode)
|
||||||
|
|
||||||
self.containerNode.shouldBegin = { [weak self] location in
|
self.containerNode.shouldBegin = { [weak self] location in
|
||||||
guard let _ = self else {
|
guard let strongSelf = self, let item = strongSelf.item, item.videoReady else {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
@ -378,6 +376,56 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
self.animationNode = nil
|
self.animationNode = nil
|
||||||
animationNode.removeFromSupernode()
|
animationNode.removeFromSupernode()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var hadMicrophoneNode = false
|
||||||
|
var hadIconNode = false
|
||||||
|
var nodeToAnimateIn: ASDisplayNode?
|
||||||
|
|
||||||
|
if case let .microphone(muted) = item.icon {
|
||||||
|
let animationNode: VoiceChatMicrophoneNode
|
||||||
|
if let current = self.animationNode {
|
||||||
|
animationNode = current
|
||||||
|
} else {
|
||||||
|
animationNode = VoiceChatMicrophoneNode()
|
||||||
|
self.animationNode = animationNode
|
||||||
|
self.infoNode.addSubnode(animationNode)
|
||||||
|
}
|
||||||
|
animationNode.alpha = 1.0
|
||||||
|
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: microphoneColor), animated: true)
|
||||||
|
} else if let animationNode = self.animationNode {
|
||||||
|
hadMicrophoneNode = true
|
||||||
|
self.animationNode = nil
|
||||||
|
animationNode.removeFromSupernode()
|
||||||
|
}
|
||||||
|
|
||||||
|
if case .presentation = item.icon {
|
||||||
|
let iconNode: ASImageNode
|
||||||
|
if let current = self.iconNode {
|
||||||
|
iconNode = current
|
||||||
|
} else {
|
||||||
|
iconNode = ASImageNode()
|
||||||
|
iconNode.displaysAsynchronously = false
|
||||||
|
iconNode.contentMode = .center
|
||||||
|
self.iconNode = iconNode
|
||||||
|
self.infoNode.addSubnode(iconNode)
|
||||||
|
|
||||||
|
nodeToAnimateIn = iconNode
|
||||||
|
}
|
||||||
|
|
||||||
|
iconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Call/StatusScreen"), color: .white)
|
||||||
|
} else if let iconNode = self.iconNode {
|
||||||
|
hadIconNode = true
|
||||||
|
self.iconNode = nil
|
||||||
|
iconNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false)
|
||||||
|
iconNode.layer.animateScale(from: 1.0, to: 0.001, duration: 0.2, removeOnCompletion: false, completion: { [weak iconNode] _ in
|
||||||
|
iconNode?.removeFromSupernode()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if let node = nodeToAnimateIn, hadMicrophoneNode || hadIconNode {
|
||||||
|
node.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||||
|
node.layer.animateScale(from: 0.001, to: 1.0, duration: 0.2)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let bounds = CGRect(origin: CGPoint(), size: size)
|
let bounds = CGRect(origin: CGPoint(), size: size)
|
||||||
@ -415,6 +463,10 @@ final class VoiceChatTileItemNode: ASDisplayNode {
|
|||||||
let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - 50.0, height: size.height))
|
let titleSize = self.titleNode.updateLayout(CGSize(width: size.width - 50.0, height: size.height))
|
||||||
self.titleNode.frame = CGRect(origin: CGPoint(x: 30.0, y: size.height - titleSize.height - 8.0), size: titleSize)
|
self.titleNode.frame = CGRect(origin: CGPoint(x: 30.0, y: size.height - titleSize.height - 8.0), size: titleSize)
|
||||||
|
|
||||||
|
if let iconNode = self.iconNode, let image = iconNode.image {
|
||||||
|
transition.updateFrame(node: iconNode, frame: CGRect(origin: CGPoint(x: floorToScreenPixels(16.0 - image.size.width / 2.0), y: floorToScreenPixels(size.height - 15.0 - image.size.height / 2.0)), size: image.size))
|
||||||
|
}
|
||||||
|
|
||||||
if let animationNode = self.animationNode {
|
if let animationNode = self.animationNode {
|
||||||
let animationSize = CGSize(width: 36.0, height: 36.0)
|
let animationSize = CGSize(width: 36.0, height: 36.0)
|
||||||
animationNode.bounds = CGRect(origin: CGPoint(), size: animationSize)
|
animationNode.bounds = CGRect(origin: CGPoint(), size: animationSize)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user