Voice Chat UI fixes

This commit is contained in:
Ilya Laktyushin 2020-11-28 18:24:49 +04:00
parent 86259ac889
commit 7d45ba44b9
3 changed files with 62 additions and 29 deletions

View File

@ -389,11 +389,13 @@ private final class VoiceChatActionButtonBackgroundNodeTransition {
private class VoiceChatActionButtonBackgroundNodeDrawingState: NSObject {
let timestamp: Double
let state: VoiceChatActionButtonBackgroundNodeState
let simplified: Bool
let transition: VoiceChatActionButtonBackgroundNodeTransition?
init(timestamp: Double, state: VoiceChatActionButtonBackgroundNodeState, transition: VoiceChatActionButtonBackgroundNodeTransition?) {
init(timestamp: Double, state: VoiceChatActionButtonBackgroundNodeState, simplified: Bool, transition: VoiceChatActionButtonBackgroundNodeTransition?) {
self.timestamp = timestamp
self.state = state
self.simplified = simplified
self.transition = transition
}
}
@ -402,6 +404,7 @@ private class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
private var state: VoiceChatActionButtonBackgroundNodeState
private var hasState = false
private var transition: VoiceChatActionButtonBackgroundNodeTransition?
private var simplified = false
var audioLevel: CGFloat = 0.0 {
didSet {
@ -426,7 +429,7 @@ private class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
self.displaysAsynchronously = true
}
func update(state: VoiceChatActionButtonBackgroundNodeState, animated: Bool) {
func update(state: VoiceChatActionButtonBackgroundNodeState, simplified: Bool, animated: Bool) {
var animated = animated
var hadState = true
if !self.hasState {
@ -435,6 +438,8 @@ private class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
animated = false
}
self.simplified = simplified
if state.type != self.state.type || !hadState {
if animated {
self.transition = VoiceChatActionButtonBackgroundNodeTransition(startTime: CACurrentMediaTime(), duration: 0.3, previousState: self.state)
@ -491,13 +496,13 @@ private class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
}
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
return VoiceChatActionButtonBackgroundNodeDrawingState(timestamp: CACurrentMediaTime(), state: self.state, transition: self.transition)
return VoiceChatActionButtonBackgroundNodeDrawingState(timestamp: CACurrentMediaTime(), state: self.state, simplified: self.simplified, transition: self.transition)
}
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
let context = UIGraphicsGetCurrentContext()!
let drawStart = CACurrentMediaTime()
// let drawStart = CACurrentMediaTime()
if !isRasterizing {
context.setBlendMode(.copy)
@ -517,6 +522,7 @@ private class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
var gradientTransition: CGFloat = 0.0
var gradientImage: UIImage? = parameters.state.blueGradient
var simpleColor: UIColor = blue
let gradientSize: CGFloat = bounds.width * 2.0
context.interpolationQuality = .low
@ -537,19 +543,23 @@ private class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
}
glowScale += gradientTransition * 0.3
gradientImage = gradientTransition.isZero ? blobsState.blueGradient : blobsState.greenGradient
if gradientTransition > 0.0 && gradientTransition < 1.0 {
gradientImage = generateImage(CGSize(width: 100.0, height: 100.0), contextGenerator: { size, context in
context.interpolationQuality = .low
if let image = blobsState.blueGradient?.cgImage {
context.draw(image, in: CGRect(origin: CGPoint(), size: CGSize(width: 100.0, height: 100.0)))
}
context.setAlpha(gradientTransition)
if let image = blobsState.greenGradient?.cgImage {
context.draw(image, in: CGRect(origin: CGPoint(), size: CGSize(width: 100.0, height: 100.0)))
}
}, opaque: true, scale: deviceScale)!
simpleColor = blue.interpolateTo(green, fraction: gradientTransition)!
if !parameters.simplified {
gradientImage = gradientTransition.isZero ? blobsState.blueGradient : blobsState.greenGradient
if gradientTransition > 0.0 && gradientTransition < 1.0 {
gradientImage = generateImage(CGSize(width: 100.0, height: 100.0), contextGenerator: { size, context in
context.interpolationQuality = .low
if let image = blobsState.blueGradient?.cgImage {
context.draw(image, in: CGRect(origin: CGPoint(), size: CGSize(width: 100.0, height: 100.0)))
}
context.setAlpha(gradientTransition)
if let image = blobsState.greenGradient?.cgImage {
context.draw(image, in: CGRect(origin: CGPoint(), size: CGSize(width: 100.0, height: 100.0)))
}
}, opaque: true, scale: deviceScale)!
}
}
context.saveGState()
@ -559,7 +569,10 @@ private class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
context.clip(to: maskBounds, mask: radialMaskImage.cgImage!)
if let gradient = gradientImage?.cgImage {
if parameters.simplified {
context.setFillColor(simpleColor.cgColor)
context.fill(bounds)
} else if let gradient = gradientImage?.cgImage {
context.draw(gradient, in: CGRect(origin: CGPoint(x: gradientCenter.x - gradientSize / 2.0, y: gradientCenter.y - gradientSize / 2.0), size: CGSize(width: gradientSize, height: gradientSize)))
}
context.restoreGState()
@ -583,7 +596,10 @@ private class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
context.setAlpha(blob.alpha)
if let gradient = gradientImage?.cgImage {
if parameters.simplified {
context.setFillColor(simpleColor.cgColor)
context.fill(bounds)
} else if let gradient = gradientImage?.cgImage {
context.draw(gradient, in: CGRect(origin: CGPoint(x: gradientCenter.x - gradientSize / 2.0, y: gradientCenter.y - gradientSize / 2.0), size: CGSize(width: gradientSize, height: gradientSize)))
}
}
@ -660,8 +676,13 @@ private class VoiceChatActionButtonBackgroundNode: ASDisplayNode {
drawGradient = true
}
if drawGradient, let gradient = gradientImage?.cgImage {
context.draw(gradient, in: CGRect(origin: CGPoint(x: gradientCenter.x - gradientSize / 2.0, y: gradientCenter.y - gradientSize / 2.0), size: CGSize(width: gradientSize, height: gradientSize)))
if drawGradient {
if parameters.simplified {
context.setFillColor(simpleColor.cgColor)
context.fill(bounds)
} else if let gradient = gradientImage?.cgImage {
context.draw(gradient, in: CGRect(origin: CGPoint(x: gradientCenter.x - gradientSize / 2.0, y: gradientCenter.y - gradientSize / 2.0), size: CGSize(width: gradientSize, height: gradientSize)))
}
}
if let clearInside = clearInside {
@ -764,7 +785,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
self.backgroundNode.audioLevel = normalizedLevel
}
func update(size: CGSize, buttonSize: CGSize, state: VoiceChatActionButtonState, title: String, subtitle: String, animated: Bool = false) {
func update(size: CGSize, buttonSize: CGSize, state: VoiceChatActionButtonState, title: String, subtitle: String, simplified: Bool, animated: Bool = false) {
let updatedTitle = self.currentParams?.title != title
let updatedSubtitle = self.currentParams?.subtitle != subtitle
@ -793,7 +814,7 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
case .connecting:
backgroundState = VoiceChatActionButtonBackgroundNodeConnectingState(blueGradient: self.blueGradient)
}
self.backgroundNode.update(state: backgroundState, animated: true)
self.backgroundNode.update(state: backgroundState, simplified: simplified, animated: true)
if animated {
if let snapshotView = self.titleLabel.view.snapshotContentTree(), updatedTitle {
@ -822,7 +843,15 @@ final class VoiceChatActionButton: HighlightTrackingButtonNode {
self.subtitleLabel.frame = CGRect(origin: CGPoint(x: floor((size.width - subtitleSize.width) / 2.0), y: self.titleLabel.frame.maxY + 1.0), size: subtitleSize)
self.containerNode.frame = CGRect(origin: CGPoint(), size: size)
self.backgroundNode.frame = CGRect(origin: CGPoint(), size: size)
self.backgroundNode.bounds = CGRect(origin: CGPoint(), size: size)
self.backgroundNode.position = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
if simplified {
self.backgroundNode.transform = CATransform3DMakeScale(0.85, 0.85, 1.0)
} else {
self.backgroundNode.transform = CATransform3DIdentity
}
let iconSize = CGSize(width: 90.0, height: 90.0)
self.iconNode.frame = CGRect(origin: CGPoint(x: floor((size.width - iconSize.width) / 2.0), y: floor((size.height - iconSize.height) / 2.0)), size: iconSize)

View File

@ -839,8 +839,7 @@ public final class VoiceChatController: ViewController {
let sideButtonSize = CGSize(width: 60.0, height: 60.0)
let centralButtonSize = CGSize(width: 300.0, height: 300.0)
let sideButtonInset: CGFloat = 27.0
let actionButtonFrame = CGRect(origin: CGPoint(x: floor((layout.size.width - centralButtonSize.width) / 2.0), y: layout.size.height - bottomAreaHeight - layout.intrinsicInsets.bottom + floor((bottomAreaHeight - centralButtonSize.height) / 2.0)), size: centralButtonSize)
let actionButtonState: VoiceChatActionButtonState
@ -888,7 +887,7 @@ public final class VoiceChatController: ViewController {
}
self.actionButton.isUserInteractionEnabled = actionButtonEnabled
self.actionButton.update(size: centralButtonSize, buttonSize: CGSize(width: 144.0, height: 144.0), state: actionButtonState, title: actionButtonTitle, subtitle: actionButtonSubtitle, animated: true)
self.actionButton.update(size: centralButtonSize, buttonSize: CGSize(width: 144.0, height: 144.0), state: actionButtonState, title: actionButtonTitle, subtitle: actionButtonSubtitle, simplified: layout.size.width < 330.0, animated: true)
transition.updateFrame(node: self.actionButton, frame: actionButtonFrame)
var audioMode: CallControllerButtonsSpeakerMode = .none
@ -942,8 +941,12 @@ public final class VoiceChatController: ViewController {
self.leaveNode.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: .color(.custom(0x4d120e)), image: .end), text: self.presentationData.strings.VoiceChat_Leave, transition: .immediate)
transition.updateFrame(node: self.audioOutputNode, frame: CGRect(origin: CGPoint(x: sideButtonInset, y: layout.size.height - bottomAreaHeight - layout.intrinsicInsets.bottom + floor((bottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize))
transition.updateFrame(node: self.leaveNode, frame: CGRect(origin: CGPoint(x: layout.size.width - sideButtonInset - sideButtonSize.width, y: layout.size.height - bottomAreaHeight - layout.intrinsicInsets.bottom + floor((bottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize))
let sideButtonMinimalInset: CGFloat = 16.0
let sideButtonOffset = min(36.0, floor((((layout.size.width - 144.0) / 2.0) - sideButtonSize.width) / 2.0))
let sideButtonOrigin = max(sideButtonMinimalInset, floor((layout.size.width - 144.0) / 2.0) - sideButtonOffset - sideButtonSize.width)
transition.updateFrame(node: self.audioOutputNode, frame: CGRect(origin: CGPoint(x: sideButtonOrigin, y: layout.size.height - bottomAreaHeight - layout.intrinsicInsets.bottom + floor((bottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize))
transition.updateFrame(node: self.leaveNode, frame: CGRect(origin: CGPoint(x: layout.size.width - sideButtonOrigin - sideButtonSize.width, y: layout.size.height - bottomAreaHeight - layout.intrinsicInsets.bottom + floor((bottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize))
if isFirstTime {
while !self.enqueuedTransitions.isEmpty {

View File

@ -57,6 +57,7 @@ final class VoiceChatOptionsButton: HighlightableButtonNode {
self.containerNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: 28.0, height: 28.0))
self.extractedContainerNode.frame = self.containerNode.bounds
self.extractedContainerNode.contentRect = self.containerNode.bounds
self.iconNode.frame = self.containerNode.bounds
}