Video Chat Improvements

This commit is contained in:
Ilya Laktyushin 2021-05-25 22:09:48 +04:00
parent 54173d76e9
commit d3558a9c09
15 changed files with 2182 additions and 2127 deletions

View File

@ -6475,3 +6475,5 @@ Sorry for the inconvenience.";
"VoiceChat.ParticipantIsSpeaking" = "%1$@ is speaking";
"WallpaperPreview.WallpaperColors" = "Colors";
"VoiceChat.UnmuteSuggestion" = "You are on mute. Tap here to speak.";

View File

@ -385,6 +385,7 @@ public protocol PresentationGroupCall: class {
var audioOutputState: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> { get }
var isSpeaking: Signal<Bool, NoError> { get }
var canBeRemoved: Signal<Bool, NoError> { get }
var state: Signal<PresentationGroupCallState, NoError> { get }
var stateVersion: Signal<Int, NoError> { get }

View File

@ -256,7 +256,7 @@ final class BlobView: UIView {
layer.addSublayer(shapeLayer)
shapeLayer.transform = CATransform3DMakeScale(minScale, minScale, 1)
self.shapeLayer.transform = CATransform3DMakeScale(minScale, minScale, 1)
}
required init?(coder: NSCoder) {
@ -264,75 +264,58 @@ final class BlobView: UIView {
}
func setColor(_ color: UIColor, animated: Bool) {
let previousColor = shapeLayer.fillColor
shapeLayer.fillColor = color.cgColor
let previousColor = self.shapeLayer.fillColor
self.shapeLayer.fillColor = color.cgColor
if animated, let previousColor = previousColor {
shapeLayer.animate(from: previousColor, to: color.cgColor, keyPath: "fillColor", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3)
self.shapeLayer.animate(from: previousColor, to: color.cgColor, keyPath: "fillColor", timingFunction: CAMediaTimingFunctionName.linear.rawValue, duration: 0.3)
}
}
func updateSpeedLevel(to newSpeedLevel: CGFloat) {
speedLevel = max(speedLevel, newSpeedLevel)
self.speedLevel = max(self.speedLevel, newSpeedLevel)
if abs(lastSpeedLevel - newSpeedLevel) > 0.5 {
animateToNewShape()
}
// if abs(lastSpeedLevel - newSpeedLevel) > 0.5 {
// animateToNewShape()
// }
}
func startAnimating() {
animateToNewShape()
self.animateToNewShape()
}
func stopAnimating() {
fromPoints = currentPoints
toPoints = nil
pop_removeAnimation(forKey: "blob")
self.shapeLayer.removeAnimation(forKey: "path")
}
private func animateToNewShape() {
guard !isCircle else { return }
if pop_animation(forKey: "blob") != nil {
fromPoints = currentPoints
toPoints = nil
pop_removeAnimation(forKey: "blob")
if self.shapeLayer.path == nil {
let points = generateNextBlob(for: self.bounds.size)
self.shapeLayer.path = UIBezierPath.smoothCurve(through: points, length: bounds.width, smoothness: smoothness).cgPath
}
if fromPoints == nil {
fromPoints = generateNextBlob(for: bounds.size)
}
if toPoints == nil {
toPoints = generateNextBlob(for: bounds.size)
}
let nextPoints = generateNextBlob(for: self.bounds.size)
let nextPath = UIBezierPath.smoothCurve(through: nextPoints, length: bounds.width, smoothness: smoothness).cgPath
let animation = POPBasicAnimation()
animation.property = POPAnimatableProperty.property(withName: "blob.transition", initializer: { property in
property?.readBlock = { blobView, values in
guard let blobView = blobView as? BlobView, let values = values else { return }
values.pointee = blobView.transition
}
property?.writeBlock = { blobView, values in
guard let blobView = blobView as? BlobView, let values = values else { return }
blobView.transition = values.pointee
}
}) as? POPAnimatableProperty
animation.completionBlock = { [weak self] animation, finished in
let animation = CABasicAnimation(keyPath: "path")
let previousPath = self.shapeLayer.path
self.shapeLayer.path = nextPath
animation.duration = CFTimeInterval(1 / (self.minSpeed + (self.maxSpeed - self.minSpeed) * self.speedLevel))
animation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
animation.fromValue = previousPath
animation.toValue = nextPath
animation.isRemovedOnCompletion = false
animation.fillMode = .forwards
animation.completion = { [weak self] finished in
if finished {
self?.fromPoints = self?.currentPoints
self?.toPoints = nil
self?.animateToNewShape()
}
}
animation.duration = CFTimeInterval(1 / (minSpeed + (maxSpeed - minSpeed) * speedLevel))
animation.timingFunction = CAMediaTimingFunction(name: .linear)
animation.fromValue = 0
animation.toValue = 1
pop_add(animation, forKey: "blob")
self.shapeLayer.add(animation, forKey: "path")
lastSpeedLevel = speedLevel
speedLevel = 0
self.lastSpeedLevel = self.speedLevel
self.speedLevel = 0
}
// MARK: Helpers

View File

@ -653,7 +653,10 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
}
case let .extracted(extracted, keepInPlace):
let springDuration: Double = 0.42 * animationDurationFactor
let springDamping: CGFloat = 104.0
var springDamping: CGFloat = 104.0
if case let .extracted(source) = self.source, source.centerVertically {
springDamping = 124.0
}
self.actionsContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2 * animationDurationFactor)
self.actionsContainerNode.layer.animateSpring(from: 0.1 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: springDuration, initialVelocity: 0.0, damping: springDamping)
@ -662,13 +665,11 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
let contentParentNode = extracted
let localSourceFrame = self.view.convert(originalProjectedContentViewFrame.1, to: self.scrollNode.view)
var actionsSpringDamping = springDamping
var actionsDuration = springDuration
var actionsOffset: CGFloat = 0.0
var contentDuration = springDuration
if case let .extracted(source) = self.source, source.centerVertically {
actionsOffset = -(originalProjectedContentViewFrame.1.height - originalProjectedContentViewFrame.0.height) * 0.57
actionsSpringDamping *= 1.2
actionsDuration *= 1.0
contentDuration *= 0.9
}
@ -684,7 +685,7 @@ private final class ContextControllerNode: ViewControllerTracingNode, UIScrollVi
reactionContextNode.animateIn(from: CGRect(origin: CGPoint(x: originalProjectedContentViewFrame.1.minX, y: originalProjectedContentViewFrame.1.minY), size: contentParentNode.contentRect.size))
}
self.actionsContainerNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: localSourceFrame.center.x - self.actionsContainerNode.position.x, y: localSourceFrame.center.y - self.actionsContainerNode.position.y + actionsOffset)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: actionsDuration, initialVelocity: 0.0, damping: actionsSpringDamping, additive: true)
self.actionsContainerNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: localSourceFrame.center.x - self.actionsContainerNode.position.x, y: localSourceFrame.center.y - self.actionsContainerNode.position.y + actionsOffset)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: actionsDuration, initialVelocity: 0.0, damping: springDamping, additive: true)
let contentContainerOffset = CGPoint(x: localContentSourceFrame.center.x - self.contentContainerNode.frame.center.x - contentParentNode.contentRect.minX, y: localContentSourceFrame.center.y - self.contentContainerNode.frame.center.y - contentParentNode.contentRect.minY)
self.contentContainerNode.layer.animateSpring(from: NSValue(cgPoint: contentContainerOffset), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: contentDuration, initialVelocity: 0.0, damping: springDamping, additive: true)
contentParentNode.applyAbsoluteOffsetSpring?(-contentContainerOffset.y, springDuration, springDamping)

View File

@ -8,7 +8,6 @@ import TelegramCore
import TelegramPresentationData
import TelegramUIPreferences
import AccountContext
import LegacyComponents
import AnimatedCountLabelNode
private let blue = UIColor(rgb: 0x007fff)
@ -681,8 +680,8 @@ final class CurveView: UIView {
}
CATransaction.begin()
CATransaction.setDisableActions(true)
let lv = minOffset + (maxOffset - minOffset) * level
shapeLayer.transform = CATransform3DMakeTranslation(0.0, lv * 16.0, 0.0)
let lv = self.minOffset + (self.maxOffset - self.minOffset) * self.level
self.shapeLayer.transform = CATransform3DMakeTranslation(0.0, lv * 16.0, 0.0)
CATransaction.commit()
}
}
@ -696,39 +695,16 @@ final class CurveView: UIView {
return layer
}()
private var transition: CGFloat = 0 {
didSet {
guard let currentPoints = currentPoints else { return }
shapeLayer.path = UIBezierPath.smoothCurve(through: currentPoints, length: bounds.width, smoothness: smoothness, curve: true).cgPath
}
}
override var frame: CGRect {
didSet {
if self.frame.size != oldValue.size {
self.fromPoints = nil
self.toPoints = nil
self.shapeLayer.path = nil
self.animateToNewShape()
}
}
}
private var fromPoints: [CGPoint]?
private var toPoints: [CGPoint]?
private var currentPoints: [CGPoint]? {
guard let fromPoints = fromPoints, let toPoints = toPoints else { return nil }
return fromPoints.enumerated().map { offset, fromPoint in
let toPoint = toPoints[offset]
return CGPoint(
x: fromPoint.x + (toPoint.x - fromPoint.x) * transition,
y: fromPoint.y + (toPoint.y - fromPoint.y) * transition
)
}
}
init(
pointsCount: Int,
minRandomness: CGFloat,
@ -750,7 +726,7 @@ final class CurveView: UIView {
super.init(frame: .zero)
layer.addSublayer(shapeLayer)
self.layer.addSublayer(self.shapeLayer)
}
required init?(coder: NSCoder) {
@ -758,7 +734,7 @@ final class CurveView: UIView {
}
func setColor(_ color: UIColor) {
shapeLayer.fillColor = color.cgColor
self.shapeLayer.fillColor = color.cgColor
}
func updateSpeedLevel(to newSpeedLevel: CGFloat) {
@ -770,57 +746,40 @@ final class CurveView: UIView {
}
func startAnimating() {
animateToNewShape()
self.animateToNewShape()
}
func stopAnimating() {
fromPoints = currentPoints
toPoints = nil
pop_removeAnimation(forKey: "curve")
self.shapeLayer.removeAnimation(forKey: "path")
}
private func animateToNewShape() {
if pop_animation(forKey: "curve") != nil {
fromPoints = currentPoints
toPoints = nil
pop_removeAnimation(forKey: "curve")
if self.shapeLayer.path == nil {
let points = self.generateNextCurve(for: self.bounds.size)
self.shapeLayer.path = UIBezierPath.smoothCurve(through: points, length: bounds.width, smoothness: self.smoothness).cgPath
}
if fromPoints == nil {
fromPoints = generateNextCurve(for: bounds.size)
}
if toPoints == nil {
toPoints = generateNextCurve(for: bounds.size)
}
let nextPoints = self.generateNextCurve(for: self.bounds.size)
let nextPath = UIBezierPath.smoothCurve(through: nextPoints, length: bounds.width, smoothness: self.smoothness).cgPath
let animation = POPBasicAnimation()
animation.property = POPAnimatableProperty.property(withName: "curve.transition", initializer: { property in
property?.readBlock = { curveView, values in
guard let curveView = curveView as? CurveView, let values = values else { return }
values.pointee = curveView.transition
}
property?.writeBlock = { curveView, values in
guard let curveView = curveView as? CurveView, let values = values else { return }
curveView.transition = values.pointee
}
}) as? POPAnimatableProperty
animation.completionBlock = { [weak self] animation, finished in
let animation = CABasicAnimation(keyPath: "path")
let previousPath = self.shapeLayer.path
self.shapeLayer.path = nextPath
animation.duration = CFTimeInterval(1 / (self.minSpeed + (self.maxSpeed - self.minSpeed) * self.speedLevel))
animation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
animation.fromValue = previousPath
animation.toValue = nextPath
animation.isRemovedOnCompletion = false
animation.fillMode = .forwards
animation.completion = { [weak self] finished in
if finished {
self?.fromPoints = self?.currentPoints
self?.toPoints = nil
self?.animateToNewShape()
}
}
animation.duration = CFTimeInterval(1 / (minSpeed + (maxSpeed - minSpeed) * speedLevel))
animation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
animation.fromValue = 0
animation.toValue = 1
pop_add(animation, forKey: "curve")
self.shapeLayer.add(animation, forKey: "path")
lastSpeedLevel = speedLevel
speedLevel = 0
self.lastSpeedLevel = self.speedLevel
self.speedLevel = 0
}
private func generateNextCurve(for size: CGSize) -> [CGPoint] {
@ -872,8 +831,8 @@ final class CurveView: UIView {
CATransaction.begin()
CATransaction.setDisableActions(true)
shapeLayer.position = CGPoint(x: self.bounds.width / 2.0, y: self.bounds.height / 2.0)
shapeLayer.bounds = self.bounds
self.shapeLayer.position = CGPoint(x: self.bounds.width / 2.0, y: self.bounds.height / 2.0)
self.shapeLayer.bounds = self.bounds
CATransaction.commit()
}
}

View File

@ -625,6 +625,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private var isScheduled = false
private var isScheduledStarted = false
private let isSpeakingPromise = ValuePromise<Bool>(false, ignoreRepeated: true)
public var isSpeaking: Signal<Bool, NoError> {
return self.isSpeakingPromise.get()
}
private var screencastFramesDisposable: Disposable?
private var screencastStateDisposable: Disposable?
@ -1612,6 +1617,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
var result: [(PeerId, UInt32, Float, Bool)] = []
var myLevel: Float = 0.0
var myLevelHasVoice: Bool = false
var orignalMyLevelHasVoice: Bool = false
var missingSsrcs = Set<UInt32>()
for (ssrcKey, level, hasVoice) in levels {
var peerId: PeerId?
@ -1626,6 +1632,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
if let peerId = peerId {
if case .local = ssrcKey {
orignalMyLevelHasVoice = hasVoice
if !strongSelf.isMutedValue.isEffectivelyMuted {
myLevel = level
myLevelHasVoice = hasVoice
@ -1642,6 +1649,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
let mappedLevel = myLevel * 1.5
strongSelf.myAudioLevelPipe.putNext(mappedLevel)
strongSelf.processMyAudioLevel(level: mappedLevel, hasVoice: myLevelHasVoice)
strongSelf.isSpeakingPromise.set(orignalMyLevelHasVoice)
if !missingSsrcs.isEmpty {
strongSelf.participantsContext?.ensureHaveParticipants(ssrcs: missingSsrcs)

View File

@ -3,7 +3,6 @@ import UIKit
import AsyncDisplayKit
import Display
import SwiftSignalKit
import LegacyComponents
import AnimationUI
import AppBundle
import ManagedAnimationNode
@ -1433,9 +1432,9 @@ final class BlobView: UIView {
super.init(frame: .zero)
layer.addSublayer(shapeLayer)
self.layer.addSublayer(self.shapeLayer)
shapeLayer.transform = CATransform3DMakeScale(minScale, minScale, 1)
self.shapeLayer.transform = CATransform3DMakeScale(minScale, minScale, 1)
}
required init?(coder: NSCoder) {
@ -1443,11 +1442,11 @@ final class BlobView: UIView {
}
func setColor(_ color: UIColor) {
shapeLayer.fillColor = color.cgColor
self.shapeLayer.fillColor = color.cgColor
}
func updateSpeedLevel(to newSpeedLevel: CGFloat) {
speedLevel = max(speedLevel, newSpeedLevel)
self.speedLevel = max(self.speedLevel, newSpeedLevel)
// if abs(lastSpeedLevel - newSpeedLevel) > 0.45 {
// animateToNewShape()
@ -1455,54 +1454,37 @@ final class BlobView: UIView {
}
func startAnimating() {
animateToNewShape()
self.animateToNewShape()
}
func stopAnimating() {
fromPoints = currentPoints
toPoints = nil
pop_removeAnimation(forKey: "blob")
self.shapeLayer.removeAnimation(forKey: "path")
}
private func animateToNewShape() {
if pop_animation(forKey: "blob") != nil {
fromPoints = currentPoints
toPoints = nil
pop_removeAnimation(forKey: "blob")
if self.shapeLayer.path == nil {
let points = generateNextBlob(for: self.bounds.size)
self.shapeLayer.path = UIBezierPath.smoothCurve(through: points, length: bounds.width, smoothness: smoothness).cgPath
}
if fromPoints == nil {
fromPoints = generateNextBlob(for: bounds.size)
}
if toPoints == nil {
toPoints = generateNextBlob(for: bounds.size)
}
let nextPoints = generateNextBlob(for: self.bounds.size)
let nextPath = UIBezierPath.smoothCurve(through: nextPoints, length: bounds.width, smoothness: smoothness).cgPath
let animation = POPBasicAnimation()
animation.property = POPAnimatableProperty.property(withName: "blob.transition", initializer: { property in
property?.readBlock = { blobView, values in
guard let blobView = blobView as? BlobView, let values = values else { return }
values.pointee = blobView.transition
}
property?.writeBlock = { blobView, values in
guard let blobView = blobView as? BlobView, let values = values else { return }
blobView.transition = values.pointee
}
}) as? POPAnimatableProperty
animation.completionBlock = { [weak self] animation, finished in
let animation = CABasicAnimation(keyPath: "path")
let previousPath = self.shapeLayer.path
self.shapeLayer.path = nextPath
animation.duration = CFTimeInterval(1 / (minSpeed + (maxSpeed - minSpeed) * speedLevel))
animation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
animation.fromValue = previousPath
animation.toValue = nextPath
animation.isRemovedOnCompletion = false
animation.fillMode = .forwards
animation.completion = { [weak self] finished in
if finished {
self?.fromPoints = self?.currentPoints
self?.toPoints = nil
self?.animateToNewShape()
}
}
animation.duration = CFTimeInterval(1 / (minSpeed + (maxSpeed - minSpeed) * speedLevel))
animation.timingFunction = CAMediaTimingFunction(name: .easeInEaseOut)
animation.fromValue = 0
animation.toValue = 1
pop_add(animation, forKey: "blob")
self.shapeLayer.add(animation, forKey: "path")
lastSpeedLevel = speedLevel
speedLevel = 0

View File

@ -116,6 +116,7 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
private let contentBackgroundNode: ASDisplayNode
private let titleNode: ASTextNode
private let previewContainerNode: ASDisplayNode
private let shimmerNode: ShimmerEffectForegroundNode
private let cameraButton: SolidRoundedButtonNode
private let screenButton: SolidRoundedButtonNode
private var broadcastPickerView: UIView?
@ -135,6 +136,8 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
private let hapticFeedback = HapticFeedback()
private let readyDisposable = MetaDisposable()
var shareCamera: ((Bool) -> Void)?
var switchCamera: (() -> Void)?
var dismiss: (() -> Void)?
@ -200,7 +203,10 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
self.previewContainerNode = ASDisplayNode()
self.previewContainerNode.clipsToBounds = true
self.previewContainerNode.cornerRadius = 11.0
self.previewContainerNode.backgroundColor = .black
self.previewContainerNode.backgroundColor = UIColor(rgb: 0x2b2b2f)
self.shimmerNode = ShimmerEffectForegroundNode(size: 200.0)
self.previewContainerNode.addSubnode(self.shimmerNode)
self.microphoneButton = HighlightTrackingButtonNode()
self.microphoneButton.isSelected = true
@ -293,9 +299,19 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
}
}
}
self.readyDisposable.set(self.cameraNode.ready.start(next: { [weak self] ready in
if let strongSelf = self {
Queue.mainQueue().after(0.07) {
strongSelf.shimmerNode.alpha = 0.0
strongSelf.shimmerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
}
}
}))
}
deinit {
self.readyDisposable.dispose()
self.applicationStateDisposable?.dispose()
}
@ -407,6 +423,13 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
func containerLayoutUpdated(_ layout: ContainerViewLayout, navigationBarHeight: CGFloat, transition: ContainedViewLayoutTransition) {
self.containerLayout = (layout, navigationBarHeight)
let isLandscape: Bool
if layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass {
isLandscape = true
} else {
isLandscape = false
}
var insets = layout.insets(options: [.statusBar, .input])
let cleanInsets = layout.insets(options: [.statusBar])
insets.top = max(10.0, insets.top)
@ -419,9 +442,14 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
let titleHeight: CGFloat = 54.0
var contentHeight = titleHeight + bottomInset + 52.0 + 17.0
let innerContentHeight: CGFloat = layout.size.height - contentHeight - 160.0
var width = horizontalContainerFillingSizeForLayout(layout: layout, sideInset: layout.safeInsets.left)
if isLandscape {
contentHeight = layout.size.height
width = layout.size.width
} else {
contentHeight = titleHeight + bottomInset + 52.0 + 17.0 + innerContentHeight + buttonOffset
}
let width = horizontalContainerFillingSizeForLayout(layout: layout, sideInset: layout.safeInsets.left)
let previewInset: CGFloat = 16.0
let sideInset = floor((layout.size.width - width) / 2.0)
@ -432,6 +460,7 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
if backgroundFrame.minY < contentFrame.minY {
backgroundFrame.origin.y = contentFrame.minY
}
transition.updateAlpha(node: self.titleNode, alpha: isLandscape ? 0.0 : 1.0)
transition.updateFrame(node: self.backgroundNode, frame: backgroundFrame)
transition.updateFrame(node: self.effectNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
transition.updateFrame(node: self.contentBackgroundNode, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
@ -442,11 +471,23 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
let titleFrame = CGRect(origin: CGPoint(x: floor((contentFrame.width - titleSize.width) / 2.0), y: 18.0), size: titleSize)
transition.updateFrame(node: self.titleNode, frame: titleFrame)
let previewSize = CGSize(width: contentFrame.width - previewInset * 2.0, height: contentHeight - 243.0 - bottomInset)
transition.updateFrame(node: self.previewContainerNode, frame: CGRect(origin: CGPoint(x: previewInset, y: 56.0), size: previewSize))
let previewSize: CGSize
let previewFrame: CGRect
if isLandscape {
let previewHeight = contentHeight - layout.intrinsicInsets.bottom - 52.0 - 10.0
previewSize = CGSize(width: min(contentFrame.width - layout.safeInsets.left - layout.safeInsets.right, previewHeight * 1.7778), height: previewHeight)
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((contentFrame.width - previewSize.width) / 2.0), y: 0.0), size: previewSize)
} else {
previewSize = CGSize(width: contentFrame.width - previewInset * 2.0, height: contentHeight - 243.0 - bottomInset)
previewFrame = CGRect(origin: CGPoint(x: previewInset, y: 56.0), size: previewSize)
}
transition.updateFrame(node: self.previewContainerNode, frame: previewFrame)
transition.updateFrame(node: self.shimmerNode, frame: CGRect(origin: CGPoint(), size: previewFrame.size))
self.shimmerNode.update(foregroundColor: UIColor(rgb: 0xffffff, alpha: 0.07))
self.shimmerNode.updateAbsoluteRect(previewFrame, within: layout.size)
self.cameraNode.frame = CGRect(origin: CGPoint(), size: previewSize)
self.cameraNode.updateLayout(size: previewSize, layoutMode: .fillVertical, transition: .immediate)
self.cameraNode.updateLayout(size: previewSize, layoutMode: isLandscape ? .fillHorizontal : .fillVertical, transition: .immediate)
let microphoneFrame = CGRect(x: 16.0, y: previewSize.height - 48.0 - 16.0, width: 48.0, height: 48.0)
transition.updateFrame(node: self.microphoneButton, frame: microphoneFrame)
@ -459,6 +500,31 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
transition.updateFrame(view: self.switchCameraEffectView, frame: CGRect(origin: CGPoint(), size: switchCameraFrame.size))
transition.updateFrame(node: self.switchCameraIconNode, frame: CGRect(origin: CGPoint(), size: switchCameraFrame.size))
if isLandscape {
var buttonsCount: Int = 2
if let _ = self.broadcastPickerView {
buttonsCount += 1
} else {
self.screenButton.isHidden = true
}
let buttonInset: CGFloat = 6.0
let buttonWidth = floorToScreenPixels((contentFrame.width - layout.safeInsets.left - layout.safeInsets.right - CGFloat(buttonsCount + 1) * buttonInset) / CGFloat(buttonsCount))
let cameraButtonHeight = self.cameraButton.updateLayout(width: buttonWidth, transition: transition)
let screenButtonHeight = self.screenButton.updateLayout(width: buttonWidth, transition: transition)
let cancelButtonHeight = self.cancelButton.updateLayout(width: buttonWidth, transition: transition)
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: layout.safeInsets.left + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cancelButtonHeight))
if let broadcastPickerView = self.broadcastPickerView {
transition.updateFrame(node: self.screenButton, frame: CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: screenButtonHeight))
broadcastPickerView.frame = CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: screenButtonHeight)
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cameraButtonHeight))
} else {
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: layout.safeInsets.left + buttonInset + buttonWidth + buttonInset, y: previewFrame.maxY + 10.0, width: buttonWidth, height: cameraButtonHeight))
}
} else {
let buttonInset: CGFloat = 16.0
let cameraButtonHeight = self.cameraButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
transition.updateFrame(node: self.cameraButton, frame: CGRect(x: buttonInset, y: contentHeight - cameraButtonHeight - insets.bottom - 16.0 - buttonOffset, width: contentFrame.width, height: cameraButtonHeight))
@ -473,6 +539,7 @@ private class VoiceChatCameraPreviewControllerNode: ViewControllerTracingNode, U
let cancelButtonHeight = self.cancelButton.updateLayout(width: contentFrame.width - buttonInset * 2.0, transition: transition)
transition.updateFrame(node: self.cancelButton, frame: CGRect(x: buttonInset, y: contentHeight - cancelButtonHeight - insets.bottom - 16.0, width: contentFrame.width, height: cancelButtonHeight))
}
transition.updateFrame(node: self.contentContainerNode, frame: contentContainerFrame)
}

View File

@ -782,6 +782,7 @@ public final class VoiceChatController: ViewController {
private var audioLevelsDisposable: Disposable?
private var myAudioLevelDisposable: Disposable?
private var isSpeakingDisposable: Disposable?
private var memberStatesDisposable: Disposable?
private var actionButtonColorDisposable: Disposable?
@ -828,6 +829,9 @@ public final class VoiceChatController: ViewController {
private var ignoreConnecting = false
private var ignoreConnectingTimer: SwiftSignalKit.Timer?
private var displayUnmuteTooltipTimer: SwiftSignalKit.Timer?
private var lastUnmuteTooltipDisplayTimestamp: Double?
private var displayMode: DisplayMode = .modal(isExpanded: false, isFilled: false) {
didSet {
if case let .modal(isExpanded, _) = self.displayMode {
@ -1858,7 +1862,7 @@ public final class VoiceChatController: ViewController {
if let (currentPeerId, _, timestamp) = strongSelf.currentDominantSpeaker {
if CACurrentMediaTime() - timestamp > 2.5 && peerId != currentPeerId {
strongSelf.currentDominantSpeaker = (peerId, nil, CACurrentMediaTime())
strongSelf.updateMainVideo(waitForFullSize: false)
strongSelf.updateMainVideo(waitForFullSize: true)
}
}
}
@ -1879,6 +1883,40 @@ public final class VoiceChatController: ViewController {
strongSelf.actionButton.updateLevel(CGFloat(effectiveLevel))
})
self.isSpeakingDisposable = (self.call.isSpeaking
|> deliverOnMainQueue).start(next: { [weak self] isSpeaking in
guard let strongSelf = self else {
return
}
if let state = strongSelf.callState, state.muteState == nil || strongSelf.pushingToTalk {
strongSelf.displayUnmuteTooltipTimer?.invalidate()
} else {
if isSpeaking {
var shouldDisplayTooltip = false
if let previousTimstamp = strongSelf.lastUnmuteTooltipDisplayTimestamp, CACurrentMediaTime() < previousTimstamp + 60.0 {
shouldDisplayTooltip = true
} else if strongSelf.lastUnmuteTooltipDisplayTimestamp == nil {
shouldDisplayTooltip = true
}
if shouldDisplayTooltip {
let timer = SwiftSignalKit.Timer(timeout: 2.0, repeat: false, completion: { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.lastUnmuteTooltipDisplayTimestamp = CACurrentMediaTime()
strongSelf.displayUnmuteTooltip()
strongSelf.displayUnmuteTooltipTimer?.invalidate()
strongSelf.displayUnmuteTooltipTimer = nil
}, queue: Queue.mainQueue())
timer.start()
strongSelf.displayUnmuteTooltipTimer = timer
}
} else {
strongSelf.displayUnmuteTooltipTimer?.invalidate()
}
}
})
self.leaveButton.addTarget(self, action: #selector(self.leavePressed), forControlEvents: .touchUpInside)
self.actionButton.addTarget(self, action: #selector(self.actionPressed), forControlEvents: .touchUpInside)
self.audioButton.addTarget(self, action: #selector(self.audioPressed), forControlEvents: .touchUpInside)
@ -2063,6 +2101,7 @@ public final class VoiceChatController: ViewController {
self.memberStatesDisposable?.dispose()
self.audioLevelsDisposable?.dispose()
self.myAudioLevelDisposable?.dispose()
self.isSpeakingDisposable?.dispose()
self.inviteDisposable.dispose()
self.memberEventsDisposable.dispose()
self.reconnectedAsEventsDisposable.dispose()
@ -3533,7 +3572,7 @@ public final class VoiceChatController: ViewController {
}
}
let videoButtonSize: CGSize
let audioButtonSize: CGSize
var buttonsTitleAlpha: CGFloat
var effectiveDisplayMode = self.displayMode
if let (layout, _) = self.validLayout, layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass {
@ -3543,34 +3582,37 @@ public final class VoiceChatController: ViewController {
}
}
let hasCameraButton = self.cameraButton.isUserInteractionEnabled
let hasVideo = self.call.hasVideo
switch effectiveDisplayMode {
case .modal:
videoButtonSize = smallButtonSize
audioButtonSize = hasCameraButton ? smallButtonSize : sideButtonSize
buttonsTitleAlpha = 1.0
case .fullscreen:
videoButtonSize = sideButtonSize
audioButtonSize = sideButtonSize
buttonsTitleAlpha = 0.0
}
let hasVideo = self.call.hasVideo
self.cameraButton.update(size: hasVideo ? sideButtonSize : videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: hasVideo ? activeButtonAppearance : normalButtonAppearance, image: hasVideo ? .cameraOn : .cameraOff), text: self.presentationData.strings.VoiceChat_Video, transition: transition)
self.cameraButton.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: hasVideo ? activeButtonAppearance : normalButtonAppearance, image: hasVideo ? .cameraOn : .cameraOff), text: self.presentationData.strings.VoiceChat_Video, transition: transition)
self.switchCameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: normalButtonAppearance, image: .flipCamera), text: "", transition: transition)
self.switchCameraButton.update(size: audioButtonSize, content: CallControllerButtonItemNode.Content(appearance: normalButtonAppearance, image: .flipCamera), text: "", transition: transition)
transition.updateAlpha(node: self.switchCameraButton, alpha: hasVideo ? 1.0 : 0.0)
transition.updateTransformScale(node: self.switchCameraButton, scale: hasVideo ? 1.0 : 0.0)
transition.updateTransformScale(node: self.cameraButton, scale: hasCameraButton ? 1.0 : 0.0)
transition.updateAlpha(node: self.audioButton, alpha: hasVideo ? 0.0 : 1.0)
transition.updateTransformScale(node: self.audioButton, scale: hasVideo ? 0.0 : 1.0)
self.audioButton.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage, isEnabled: isSoundEnabled), text: soundTitle, transition: transition)
self.audioButton.update(size: audioButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage, isEnabled: isSoundEnabled), text: soundTitle, transition: transition)
self.audioButton.isUserInteractionEnabled = isSoundEnabled
self.leaveButton.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: .color(.custom(0xff3b30, 0.3)), image: .cancel), text: self.presentationData.strings.VoiceChat_Leave, transition: .immediate)
transition.updateAlpha(node: self.cameraButton.textNode, alpha: hasVideo ? buttonsTitleAlpha : 0.0)
transition.updateAlpha(node: self.cameraButton.textNode, alpha: hasCameraButton ? buttonsTitleAlpha : 0.0)
transition.updateAlpha(node: self.switchCameraButton.textNode, alpha: buttonsTitleAlpha)
transition.updateAlpha(node: self.audioButton.textNode, alpha: buttonsTitleAlpha)
transition.updateAlpha(node: self.audioButton.textNode, alpha: hasCameraButton ? 0.0 : buttonsTitleAlpha)
transition.updateAlpha(node: self.leaveButton.textNode, alpha: buttonsTitleAlpha)
}
@ -3963,6 +4005,8 @@ public final class VoiceChatController: ViewController {
} : nil)
}
self.cameraButton.isUserInteractionEnabled = hasCameraButton
var buttonsTransition: ContainedViewLayoutTransition = .immediate
if !isFirstTime {
if case .animated(_, .spring) = transition {
@ -3973,22 +4017,21 @@ public final class VoiceChatController: ViewController {
}
self.updateButtons(transition: buttonsTransition)
self.cameraButton.isUserInteractionEnabled = hasCameraButton
if self.audioButton.supernode === self.bottomPanelNode {
transition.updateAlpha(node: self.cameraButton, alpha: hasCameraButton ? 1.0 : 0.0)
transition.updateFrameAsPositionAndBounds(node: self.switchCameraButton, frame: firstButtonFrame)
if !self.animatingButtonsSwap || transition.isAnimated {
if self.call.hasVideo {
transition.updateFrameAsPositionAndBounds(node: self.cameraButton, frame: secondButtonFrame, completion: { [weak self] _ in
if hasCameraButton {
transition.updateFrameAsPositionAndBounds(node: self.audioButton, frame: firstButtonFrame, completion: { [weak self] _ in
self?.animatingButtonsSwap = false
})
} else {
transition.updateFrameAsPositionAndBounds(node: self.cameraButton, frame: firstButtonFrame, completion: { [weak self] _ in
transition.updateFrameAsPositionAndBounds(node: self.audioButton, frame: secondButtonFrame, completion: { [weak self] _ in
self?.animatingButtonsSwap = false
})
}
transition.updateFrameAsPositionAndBounds(node: self.audioButton, frame: secondButtonFrame)
transition.updateFrameAsPositionAndBounds(node: self.cameraButton, frame: secondButtonFrame)
}
transition.updateFrameAsPositionAndBounds(node: self.leaveButton, frame: forthButtonFrame)
}
@ -4515,8 +4558,10 @@ public final class VoiceChatController: ViewController {
self.requestedVideoSources.insert(channel.endpointId)
self.call.makeIncomingVideoView(endpointId: channel.endpointId, completion: { [weak self] videoView in
Queue.mainQueue().async {
print("create main video \(channel.endpointId)")
self?.call.makeIncomingVideoView(endpointId: channel.endpointId, completion: { [weak self] backdropVideoView in
Queue.mainQueue().async {
print("create blur video \(channel.endpointId)")
guard let strongSelf = self, let videoView = videoView else {
return
}
@ -4527,6 +4572,7 @@ public final class VoiceChatController: ViewController {
|> deliverOnMainQueue
).start(next: { [weak self, weak videoNode] _ in
if let strongSelf = self, let videoNode = videoNode {
print("video ready \(channel.endpointId)")
Queue.mainQueue().after(0.1) {
strongSelf.readyVideoNodes.insert(channel.endpointId)
if videoNode.aspectRatio <= 0.77 {
@ -5264,6 +5310,13 @@ public final class VoiceChatController: ViewController {
}))
}
private func displayUnmuteTooltip() {
let location = self.actionButton.view.convert(self.actionButton.bounds, to: self.view).center
let point = CGRect(origin: CGPoint(x: location.x - 5.0, y: location.y - 5.0 - 68.0), size: CGSize(width: 10.0, height: 10.0))
self.controller?.present(TooltipScreen(text: self.presentationData.strings.VoiceChat_UnmuteSuggestion, style: .gradient(UIColor(rgb: 0x1d446c), UIColor(rgb: 0x193e63)), icon: nil, location: .point(point, .bottom), displayDuration: .custom(3.0), shouldDismissOnTouch: { _ in
return .dismiss(consume: false)
}), in: .window(.root))
}
private func displayToggleVideoSourceTooltip(screencast: Bool) {
// guard let videoContainerNode = self.mainStageVideoContainerNode else {
@ -5346,7 +5399,7 @@ public final class VoiceChatController: ViewController {
self.mainStageContainerNode.isHidden = false
self.mainStageContainerNode.isUserInteractionEnabled = isFullscreen
let transition: ContainedViewLayoutTransition = .animated(duration: 0.4, curve: .spring)
let transition: ContainedViewLayoutTransition = .animated(duration: 0.55, curve: .spring)
if case .modal = previousDisplayMode, case .fullscreen = self.displayMode {
self.fullscreenListNode.isHidden = false
@ -5409,7 +5462,7 @@ public final class VoiceChatController: ViewController {
self.transitionMaskTopFillLayer.opacity = 1.0
}
self.transitionMaskBottomFillLayer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, removeOnCompletion: false, completion: { [weak self] _ in
Queue.mainQueue().after(0.2) {
Queue.mainQueue().after(0.3) {
self?.transitionMaskTopFillLayer.opacity = 0.0
self?.transitionMaskBottomFillLayer.removeAllAnimations()
}

View File

@ -773,7 +773,7 @@ class VoiceChatFullscreenParticipantItemNode: ItemListRevealOptionsItemNode {
nodeToAnimateIn = animationNode
}
var color = color
if color.rgb == 0x979797 {
if hasVideo || color.rgb == 0x979797 {
color = UIColor(rgb: 0xffffff)
}
animationNode.update(state: VoiceChatMicrophoneNode.State(muted: muted, filled: true, color: color), animated: true)

View File

@ -430,7 +430,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
strongSelf.speakingContainerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
strongSelf.speakingContainerNode.layer.animateScale(from: 0.01, to: 1.0, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
let blobFrame = strongSelf.speakingAvatarNode.frame.insetBy(dx: -10.0, dy: -10.0)
let blobFrame = strongSelf.speakingAvatarNode.frame.insetBy(dx: -14.0, dy: -14.0)
strongSelf.speakingAudioLevelDisposable.set((getAudioLevel(peerId)
|> deliverOnMainQueue).start(next: { [weak self] value in
guard let strongSelf = self else {
@ -614,8 +614,6 @@ final class VoiceChatMainStageNode: ASDisplayNode {
if previousPeer?.0 == peer?.0 && self.appeared {
delayTransition = true
}
let appeared = self.appeared
if !delayTransition {
self.setAvatarHidden(true)
}
@ -832,7 +830,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
let speakingInset: CGFloat = 16.0
let speakingAvatarSize = CGSize(width: 30.0, height: 30.0)
let speakingTitleSize = self.speakingTitleNode.updateLayout(CGSize(width: 220.0, height: CGFloat.greatestFiniteMagnitude))
let speakingTitleSize = self.speakingTitleNode.updateLayout(CGSize(width: size.width - 100.0, height: CGFloat.greatestFiniteMagnitude))
let speakingContainerSize = CGSize(width: speakingTitleSize.width + speakingInset * 2.0 + speakingAvatarSize.width, height: 38.0)
self.speakingEffectView?.frame = CGRect(origin: CGPoint(), size: speakingContainerSize)
self.speakingAvatarNode.frame = CGRect(origin: CGPoint(x: 4.0, y: 4.0), size: speakingAvatarSize)

View File

@ -683,11 +683,11 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
let startContainerAvatarPosition = sourceNode.avatarNode.view.convert(sourceNode.avatarNode.bounds, to: containerNode.view).center
var animate = true
if containerNode.frame.width > containerNode.frame.height {
if startContainerAvatarPosition.y < -tileSize.height || startContainerAvatarPosition.y > containerNode.frame.height + tileSize.height {
if startContainerAvatarPosition.y < -tileSize.height * 2.0 || startContainerAvatarPosition.y > containerNode.frame.height + tileSize.height * 2.0 {
animate = false
}
} else {
if startContainerAvatarPosition.x < -tileSize.width || startContainerAvatarPosition.x > containerNode.frame.width + tileSize.width {
if startContainerAvatarPosition.x < -tileSize.width * 4.0 || startContainerAvatarPosition.x > containerNode.frame.width + tileSize.width * 4.0 {
animate = false
}
}

View File

@ -449,7 +449,7 @@ final class VoiceChatTileItemNode: ASDisplayNode {
if animate {
sourceNode.isHidden = true
Queue.mainQueue().after(0.6) {
Queue.mainQueue().after(0.7) {
sourceNode.isHidden = false
}
@ -640,7 +640,7 @@ class VoiceChatTileHighlightNode: ASDisplayNode {
}
}
private final class ShimmerEffectForegroundNode: ASDisplayNode {
final class ShimmerEffectForegroundNode: ASDisplayNode {
private var currentForegroundColor: UIColor?
private let imageNodeContainer: ASDisplayNode
private let imageNode: ASImageNode