mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-10-09 03:20:48 +00:00
[WIP] Call UI
This commit is contained in:
parent
d2ffa8e19e
commit
7a12540f66
@ -5,11 +5,28 @@ import Display
|
||||
import CallScreen
|
||||
import ComponentFlow
|
||||
|
||||
private extension UIScreen {
|
||||
private static let cornerRadiusKey: String = {
|
||||
let components = ["Radius", "Corner", "display", "_"]
|
||||
return components.reversed().joined()
|
||||
}()
|
||||
|
||||
var displayCornerRadius: CGFloat {
|
||||
guard let cornerRadius = self.value(forKey: Self.cornerRadiusKey) as? CGFloat else {
|
||||
assertionFailure("Failed to detect screen corner radius")
|
||||
return 0
|
||||
}
|
||||
|
||||
return cornerRadius
|
||||
}
|
||||
}
|
||||
|
||||
public final class ViewController: UIViewController {
|
||||
private var callScreenView: PrivateCallScreen?
|
||||
private var callState: PrivateCallScreen.State = PrivateCallScreen.State(
|
||||
lifecycleState: .connecting,
|
||||
name: "Emma Walters",
|
||||
shortName: "Emma",
|
||||
avatarImage: UIImage(named: "test"),
|
||||
audioOutput: .internalSpeaker,
|
||||
isMicrophoneMuted: false,
|
||||
@ -20,6 +37,8 @@ public final class ViewController: UIViewController {
|
||||
private var currentLayout: (size: CGSize, insets: UIEdgeInsets)?
|
||||
private var viewLayoutTransition: Transition?
|
||||
|
||||
private var audioLevelTimer: Foundation.Timer?
|
||||
|
||||
override public func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
|
||||
@ -61,6 +80,29 @@ public final class ViewController: UIViewController {
|
||||
))
|
||||
}
|
||||
|
||||
switch self.callState.lifecycleState {
|
||||
case .terminated:
|
||||
if let audioLevelTimer = self.audioLevelTimer {
|
||||
self.audioLevelTimer = nil
|
||||
audioLevelTimer.invalidate()
|
||||
}
|
||||
default:
|
||||
if self.audioLevelTimer == nil {
|
||||
let startTime = CFAbsoluteTimeGetCurrent()
|
||||
self.audioLevelTimer = Foundation.Timer.scheduledTimer(withTimeInterval: 1.0 / 60.0, repeats: true, block: { [weak self] _ in
|
||||
guard let self, let callScreenView = self.callScreenView else {
|
||||
return
|
||||
}
|
||||
let timestamp = CFAbsoluteTimeGetCurrent() - startTime
|
||||
let stream1 = sin(timestamp * Double.pi * 2.0)
|
||||
let stream2 = sin(2.0 * timestamp * Double.pi * 2.0)
|
||||
let stream3 = sin(3.0 * timestamp * Double.pi * 2.0)
|
||||
let result = stream1 + stream2 + stream3
|
||||
callScreenView.addIncomingAudioLevel(value: abs(Float(result)))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
self.update(transition: .spring(duration: 0.4))
|
||||
}
|
||||
callScreenView.flipCameraAction = { [weak self] in
|
||||
@ -115,7 +157,7 @@ public final class ViewController: UIViewController {
|
||||
}
|
||||
|
||||
transition.setFrame(view: callScreenView, frame: CGRect(origin: CGPoint(), size: size))
|
||||
callScreenView.update(size: size, insets: insets, screenCornerRadius: 55.0, state: self.callState, transition: transition)
|
||||
callScreenView.update(size: size, insets: insets, screenCornerRadius: UIScreen.main.displayCornerRadius, state: self.callState, transition: transition)
|
||||
}
|
||||
|
||||
override public func viewWillLayoutSubviews() {
|
||||
|
@ -481,8 +481,14 @@ public struct Transition {
|
||||
}
|
||||
|
||||
public func setScale(layer: CALayer, scale: CGFloat, delay: Double = 0.0, completion: ((Bool) -> Void)? = nil) {
|
||||
let t = layer.presentation()?.transform ?? layer.transform
|
||||
let currentScale = sqrt((t.m11 * t.m11) + (t.m12 * t.m12) + (t.m13 * t.m13))
|
||||
let currentTransform: CATransform3D
|
||||
if layer.animation(forKey: "transform") != nil || layer.animation(forKey: "transform.scale") != nil {
|
||||
currentTransform = layer.presentation()?.transform ?? layer.transform
|
||||
} else {
|
||||
currentTransform = layer.transform
|
||||
}
|
||||
|
||||
let currentScale = sqrt((currentTransform.m11 * currentTransform.m11) + (currentTransform.m12 * currentTransform.m12) + (currentTransform.m13 * currentTransform.m13))
|
||||
if currentScale == scale {
|
||||
if let animation = layer.animation(forKey: "transform.scale") as? CABasicAnimation, let toValue = animation.toValue as? NSNumber {
|
||||
if toValue.doubleValue == scale {
|
||||
|
@ -119,6 +119,28 @@ open class MetalEngineSubjectLayer: SimpleLayer {
|
||||
fileprivate var surfaceChangeFrameCount: Int = 0
|
||||
#endif
|
||||
|
||||
public var cloneLayers: [CALayer] = []
|
||||
|
||||
override open var contents: Any? {
|
||||
didSet {
|
||||
if !self.cloneLayers.isEmpty {
|
||||
for cloneLayer in self.cloneLayers {
|
||||
cloneLayer.contents = self.contents
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override open var contentsRect: CGRect {
|
||||
didSet {
|
||||
if !self.cloneLayers.isEmpty {
|
||||
for cloneLayer in self.cloneLayers {
|
||||
cloneLayer.contentsRect = self.contentsRect
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public override init() {
|
||||
super.init()
|
||||
|
||||
@ -533,10 +555,13 @@ public final class MetalEngine {
|
||||
let renderingRect: CGRect
|
||||
let contentsRect: CGRect
|
||||
|
||||
init(baseRect: CGRect, surfaceWidth: Int, surfaceHeight: Int) {
|
||||
init(baseRect: CGRect, edgeSize: CGFloat, surfaceWidth: Int, surfaceHeight: Int) {
|
||||
self.subRect = CGRect(origin: CGPoint(x: baseRect.minX, y: baseRect.minY), size: CGSize(width: baseRect.width, height: baseRect.height))
|
||||
self.renderingRect = CGRect(origin: CGPoint(x: self.subRect.minX / CGFloat(surfaceWidth), y: self.subRect.minY / CGFloat(surfaceHeight)), size: CGSize(width: self.subRect.width / CGFloat(surfaceWidth), height: self.subRect.height / CGFloat(surfaceHeight)))
|
||||
self.contentsRect = CGRect(origin: CGPoint(x: self.subRect.minX / CGFloat(surfaceWidth), y: 1.0 - self.subRect.minY / CGFloat(surfaceHeight) - self.subRect.height / CGFloat(surfaceHeight)), size: CGSize(width: self.subRect.width / CGFloat(surfaceWidth), height: self.subRect.height / CGFloat(surfaceHeight)))
|
||||
|
||||
let subRectWithInset = self.subRect.insetBy(dx: edgeSize, dy: edgeSize)
|
||||
|
||||
self.contentsRect = CGRect(origin: CGPoint(x: subRectWithInset.minX / CGFloat(surfaceWidth), y: 1.0 - subRectWithInset.minY / CGFloat(surfaceHeight) - subRectWithInset.height / CGFloat(surfaceHeight)), size: CGSize(width: subRectWithInset.width / CGFloat(surfaceWidth), height: subRectWithInset.height / CGFloat(surfaceHeight)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -550,11 +575,13 @@ public final class MetalEngine {
|
||||
if item0.itemId != -1 && item1.itemId != -1 {
|
||||
let layout0 = AllocationLayout(
|
||||
baseRect: CGRect(origin: CGPoint(x: CGFloat(item0.x), y: CGFloat(item0.y)), size: CGSize(width: CGFloat(item0.width), height: CGFloat(item0.height))),
|
||||
edgeSize: CGFloat(renderingParameters.edgeInset),
|
||||
surfaceWidth: self.width,
|
||||
surfaceHeight: self.height
|
||||
)
|
||||
let layout1 = AllocationLayout(
|
||||
baseRect: CGRect(origin: CGPoint(x: CGFloat(item1.x), y: CGFloat(item1.y)), size: CGSize(width: CGFloat(item1.width), height: CGFloat(item1.height))),
|
||||
edgeSize: CGFloat(renderingParameters.edgeInset),
|
||||
surfaceWidth: self.width,
|
||||
surfaceHeight: self.height
|
||||
)
|
||||
|
@ -232,7 +232,7 @@ vertex BlobVertexOut callBlobVertex(
|
||||
fragment half4 callBlobFragment(
|
||||
BlobVertexOut in [[stage_in]]
|
||||
) {
|
||||
half alpha = 0.15;
|
||||
half alpha = 0.35;
|
||||
return half4(1.0 * alpha, 1.0 * alpha, 1.0 * alpha, alpha);
|
||||
}
|
||||
|
||||
@ -345,3 +345,28 @@ kernel void gaussianBlurVertical(
|
||||
) {
|
||||
gaussianBlur(inTexture, outTexture, float2(0, 1), gid);
|
||||
}
|
||||
|
||||
vertex QuadVertexOut edgeTestVertex(
|
||||
const device Rectangle &rect [[ buffer(0) ]],
|
||||
unsigned int vid [[ vertex_id ]]
|
||||
) {
|
||||
float2 quadVertex = quadVertices[vid];
|
||||
|
||||
QuadVertexOut out;
|
||||
|
||||
out.position = float4(rect.origin.x + quadVertex.x * rect.size.x, rect.origin.y + quadVertex.y * rect.size.y, 0.0, 1.0);
|
||||
out.position.x = -1.0 + out.position.x * 2.0;
|
||||
out.position.y = -1.0 + out.position.y * 2.0;
|
||||
|
||||
out.uv = quadVertex;
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
fragment half4 edgeTestFragment(
|
||||
QuadVertexOut in [[stage_in]],
|
||||
const device float4 &colorIn
|
||||
) {
|
||||
half4 color = half4(colorIn);
|
||||
return color;
|
||||
}
|
||||
|
@ -80,6 +80,7 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
||||
]
|
||||
|
||||
let blurredLayer: MetalEngineSubjectLayer
|
||||
let externalBlurredLayer: MetalEngineSubjectLayer
|
||||
|
||||
private var phase: Float = 0.0
|
||||
|
||||
@ -100,6 +101,7 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
||||
|
||||
override init() {
|
||||
self.blurredLayer = MetalEngineSubjectLayer()
|
||||
self.externalBlurredLayer = MetalEngineSubjectLayer()
|
||||
|
||||
self.colorSets = [
|
||||
ColorSet(colors: [
|
||||
@ -125,6 +127,8 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
||||
|
||||
super.init()
|
||||
|
||||
self.blurredLayer.cloneLayers.append(self.externalBlurredLayer)
|
||||
|
||||
self.didEnterHierarchy = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
@ -154,6 +158,7 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
||||
|
||||
override init(layer: Any) {
|
||||
self.blurredLayer = MetalEngineSubjectLayer()
|
||||
self.externalBlurredLayer = MetalEngineSubjectLayer()
|
||||
self.colorSets = []
|
||||
self.colorTransition = AnimatedProperty<ColorSet>(ColorSet(colors: []))
|
||||
|
||||
@ -187,7 +192,8 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
||||
for i in 0 ..< 2 {
|
||||
let isBlur = i == 1
|
||||
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: i == 0 ? self : self.blurredLayer, commands: { encoder, placement in
|
||||
let effectiveRect = placement.effectiveRect
|
||||
var effectiveRect = placement.effectiveRect
|
||||
effectiveRect = effectiveRect.insetBy(dx: -effectiveRect.width * 0.1, dy: -effectiveRect.height * 0.1)
|
||||
|
||||
var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
|
||||
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
|
||||
|
@ -36,18 +36,6 @@ final class CallBlobsLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
||||
}
|
||||
|
||||
final class RenderState: RenderToLayerState {
|
||||
final class Input {
|
||||
let rect: CGRect
|
||||
let blobs: [Blob]
|
||||
let phase: Float
|
||||
|
||||
init(rect: CGRect, blobs: [Blob], phase: Float) {
|
||||
self.rect = rect
|
||||
self.blobs = blobs
|
||||
self.phase = phase
|
||||
}
|
||||
}
|
||||
|
||||
let pipelineState: MTLRenderPipelineState
|
||||
|
||||
required init?(device: MTLDevice) {
|
||||
@ -133,7 +121,7 @@ final class CallBlobsLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
||||
let phase = self.phase
|
||||
let blobs = self.blobs
|
||||
|
||||
context.renderToLayer(spec: RenderLayerSpec(size: RenderSize(width: Int(self.bounds.width * 3.0), height: Int(self.bounds.height * 3.0))), state: RenderState.self, layer: self, commands: { encoder, placement in
|
||||
context.renderToLayer(spec: RenderLayerSpec(size: RenderSize(width: Int(self.bounds.width * 3.0), height: Int(self.bounds.height * 3.0)), edgeInset: 4), state: RenderState.self, layer: self, commands: { encoder, placement in
|
||||
let rect = placement.effectiveRect
|
||||
|
||||
for i in 0 ..< blobs.count {
|
||||
|
@ -6,11 +6,9 @@ import ComponentFlow
|
||||
final class EmojiExpandedInfoView: OverlayMaskContainerView {
|
||||
private struct Params: Equatable {
|
||||
var constrainedWidth: CGFloat
|
||||
var sideInset: CGFloat
|
||||
|
||||
init(constrainedWidth: CGFloat, sideInset: CGFloat) {
|
||||
init(constrainedWidth: CGFloat) {
|
||||
self.constrainedWidth = constrainedWidth
|
||||
self.sideInset = sideInset
|
||||
}
|
||||
}
|
||||
|
||||
@ -28,6 +26,8 @@ final class EmojiExpandedInfoView: OverlayMaskContainerView {
|
||||
private let text: String
|
||||
|
||||
private let backgroundView: UIImageView
|
||||
private let separatorLayer: SimpleLayer
|
||||
|
||||
private let titleView: TextView
|
||||
private let textView: TextView
|
||||
|
||||
@ -56,6 +56,8 @@ final class EmojiExpandedInfoView: OverlayMaskContainerView {
|
||||
context.fill(CGRect(origin: CGPoint(x: 0.0, y: size.height - buttonHeight), size: CGSize(width: size.width, height: UIScreenPixel)))
|
||||
})?.stretchableImage(withLeftCapWidth: Int(cornerRadius) + 5, topCapHeight: Int(cornerRadius) + 5)
|
||||
|
||||
self.separatorLayer = SimpleLayer()
|
||||
|
||||
self.titleView = TextView()
|
||||
self.textView = TextView()
|
||||
|
||||
@ -67,6 +69,8 @@ final class EmojiExpandedInfoView: OverlayMaskContainerView {
|
||||
|
||||
self.maskContents.addSubview(self.backgroundView)
|
||||
|
||||
self.layer.addSublayer(self.separatorLayer)
|
||||
|
||||
self.addSubview(self.titleView)
|
||||
self.addSubview(self.textView)
|
||||
|
||||
@ -100,6 +104,8 @@ final class EmojiExpandedInfoView: OverlayMaskContainerView {
|
||||
}
|
||||
}
|
||||
self.actionButton.addTarget(self, action: #selector(self.actionButtonPressed), for: .touchUpInside)
|
||||
|
||||
self.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))))
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
@ -110,8 +116,21 @@ final class EmojiExpandedInfoView: OverlayMaskContainerView {
|
||||
self.closeAction?()
|
||||
}
|
||||
|
||||
func update(constrainedWidth: CGFloat, sideInset: CGFloat, transition: Transition) -> CGSize {
|
||||
let params = Params(constrainedWidth: constrainedWidth, sideInset: sideInset)
|
||||
@objc private func tapGesture(_ recognizer: UITapGestureRecognizer) {
|
||||
if case .ended = recognizer.state {
|
||||
self.closeAction?()
|
||||
}
|
||||
}
|
||||
|
||||
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
|
||||
if let result = self.actionButton.hitTest(self.convert(point, to: self.actionButton), with: event) {
|
||||
return result
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func update(constrainedWidth: CGFloat, transition: Transition) -> CGSize {
|
||||
let params = Params(constrainedWidth: constrainedWidth)
|
||||
if let currentLayout = self.currentLayout, currentLayout.params == params {
|
||||
return currentLayout.size
|
||||
}
|
||||
@ -121,22 +140,32 @@ final class EmojiExpandedInfoView: OverlayMaskContainerView {
|
||||
}
|
||||
|
||||
private func update(params: Params, transition: Transition) -> CGSize {
|
||||
let size = CGSize(width: 304.0, height: 227.0)
|
||||
let buttonHeight: CGFloat = 56.0
|
||||
|
||||
var constrainedWidth = params.constrainedWidth
|
||||
constrainedWidth = min(constrainedWidth, 300.0)
|
||||
|
||||
let titleSize = self.titleView.update(string: self.title, fontSize: 16.0, fontWeight: 0.3, alignment: .center, color: .white, constrainedWidth: constrainedWidth - 16.0 * 2.0, transition: transition)
|
||||
let textSize = self.textView.update(string: self.text, fontSize: 16.0, fontWeight: 0.0, alignment: .center, color: .white, constrainedWidth: constrainedWidth - 16.0 * 2.0, transition: transition)
|
||||
|
||||
let contentWidth: CGFloat = max(titleSize.width, textSize.width) + 26.0 * 2.0
|
||||
let contentHeight = 78.0 + titleSize.height + 10.0 + textSize.height + 22.0 + buttonHeight
|
||||
|
||||
let size = CGSize(width: contentWidth, height: contentHeight)
|
||||
|
||||
transition.setFrame(view: self.backgroundView, frame: CGRect(origin: CGPoint(), size: size))
|
||||
|
||||
let titleSize = self.titleView.update(string: self.title, fontSize: 16.0, fontWeight: 0.3, alignment: .center, color: .white, constrainedWidth: params.constrainedWidth - params.sideInset * 2.0 - 16.0 * 2.0, transition: transition)
|
||||
let titleFrame = CGRect(origin: CGPoint(x: floor((size.width - titleSize.width) * 0.5), y: 78.0), size: titleSize)
|
||||
transition.setFrame(view: self.titleView, frame: titleFrame)
|
||||
|
||||
let textSize = self.textView.update(string: self.text, fontSize: 16.0, fontWeight: 0.0, alignment: .center, color: .white, constrainedWidth: params.constrainedWidth - params.sideInset * 2.0 - 16.0 * 2.0, transition: transition)
|
||||
let textFrame = CGRect(origin: CGPoint(x: floor((size.width - textSize.width) * 0.5), y: titleFrame.maxY + 10.0), size: textSize)
|
||||
transition.setFrame(view: self.textView, frame: textFrame)
|
||||
|
||||
let buttonHeight: CGFloat = 56.0
|
||||
let buttonFrame = CGRect(origin: CGPoint(x: 0.0, y: size.height - buttonHeight), size: CGSize(width: size.width, height: buttonHeight))
|
||||
transition.setFrame(view: self.actionButton, frame: buttonFrame)
|
||||
|
||||
transition.setFrame(layer: self.separatorLayer, frame: CGRect(origin: CGPoint(x: 0.0, y: size.height - buttonHeight), size: CGSize(width: size.width, height: UIScreenPixel)))
|
||||
|
||||
let actionTitleSize = self.actionTitleView.update(string: "OK", fontSize: 19.0, fontWeight: 0.3, color: .white, constrainedWidth: size.width, transition: transition)
|
||||
let actionTitleFrame = CGRect(origin: CGPoint(x: floor((buttonFrame.width - actionTitleSize.width) * 0.5), y: floor((buttonFrame.height - actionTitleSize.height) * 0.5)), size: actionTitleSize)
|
||||
transition.setFrame(view: self.actionTitleView, frame: actionTitleFrame)
|
||||
|
@ -29,6 +29,10 @@ final class KeyEmojiView: HighlightTrackingButton {
|
||||
|
||||
private var currentLayout: Layout?
|
||||
|
||||
var isExpanded: Bool? {
|
||||
return self.currentLayout?.params.isExpanded
|
||||
}
|
||||
|
||||
init(emoji: [String]) {
|
||||
self.emoji = emoji
|
||||
self.emojiViews = emoji.map { _ in
|
||||
@ -126,3 +130,50 @@ final class KeyEmojiView: HighlightTrackingButton {
|
||||
return CGSize(width: nextX, height: height)
|
||||
}
|
||||
}
|
||||
|
||||
func generateParabollicMotionKeyframes(from sourcePoint: CGPoint, to targetPosition: CGPoint, elevation: CGFloat, duration: Double, curve: Transition.Animation.Curve, reverse: Bool) -> [CGPoint] {
|
||||
let midPoint = CGPoint(x: (sourcePoint.x + targetPosition.x) / 2.0, y: sourcePoint.y - elevation)
|
||||
|
||||
let x1 = sourcePoint.x
|
||||
let y1 = sourcePoint.y
|
||||
let x2 = midPoint.x
|
||||
let y2 = midPoint.y
|
||||
let x3 = targetPosition.x
|
||||
let y3 = targetPosition.y
|
||||
|
||||
let numPoints: Int = Int(ceil(Double(UIScreen.main.maximumFramesPerSecond) * duration))
|
||||
|
||||
var keyframes: [CGPoint] = []
|
||||
if abs(y1 - y3) < 5.0 && abs(x1 - x3) < 5.0 {
|
||||
for rawI in 0 ..< numPoints {
|
||||
let i = reverse ? (numPoints - 1 - rawI) : rawI
|
||||
let ks = CGFloat(i) / CGFloat(numPoints - 1)
|
||||
var k = curve.solve(at: reverse ? (1.0 - ks) : ks)
|
||||
if reverse {
|
||||
k = 1.0 - k
|
||||
}
|
||||
let x = sourcePoint.x * (1.0 - k) + targetPosition.x * k
|
||||
let y = sourcePoint.y * (1.0 - k) + targetPosition.y * k
|
||||
keyframes.append(CGPoint(x: x, y: y))
|
||||
}
|
||||
} else {
|
||||
let a = (x3 * (y2 - y1) + x2 * (y1 - y3) + x1 * (y3 - y2)) / ((x1 - x2) * (x1 - x3) * (x2 - x3))
|
||||
let b = (x1 * x1 * (y2 - y3) + x3 * x3 * (y1 - y2) + x2 * x2 * (y3 - y1)) / ((x1 - x2) * (x1 - x3) * (x2 - x3))
|
||||
let c = (x2 * x2 * (x3 * y1 - x1 * y3) + x2 * (x1 * x1 * y3 - x3 * x3 * y1) + x1 * x3 * (x3 - x1) * y2) / ((x1 - x2) * (x1 - x3) * (x2 - x3))
|
||||
|
||||
for rawI in 0 ..< numPoints {
|
||||
let i = reverse ? (numPoints - 1 - rawI) : rawI
|
||||
|
||||
let ks = CGFloat(i) / CGFloat(numPoints - 1)
|
||||
var k = curve.solve(at: reverse ? (1.0 - ks) : ks)
|
||||
if reverse {
|
||||
k = 1.0 - k
|
||||
}
|
||||
let x = sourcePoint.x * (1.0 - k) + targetPosition.x * k
|
||||
let y = a * x * x + b * x + c
|
||||
keyframes.append(CGPoint(x: x, y: y))
|
||||
}
|
||||
}
|
||||
|
||||
return keyframes
|
||||
}
|
||||
|
@ -26,7 +26,6 @@ final class TextView: UIView {
|
||||
|
||||
self.isOpaque = false
|
||||
self.backgroundColor = nil
|
||||
self.contentMode = .center
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
|
@ -630,7 +630,7 @@ final class VideoContainerView: HighlightTrackingButton {
|
||||
}
|
||||
})
|
||||
|
||||
self.videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(videoLayout.rotatedVideoResolution.width), height: Int(videoLayout.rotatedVideoResolution.height)))
|
||||
self.videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(videoLayout.rotatedVideoResolution.width), height: Int(videoLayout.rotatedVideoResolution.height)), edgeInset: 2)
|
||||
} else {
|
||||
var rotatedResolution = videoMetrics.resolution
|
||||
var videoIsRotated = false
|
||||
@ -719,7 +719,7 @@ final class VideoContainerView: HighlightTrackingButton {
|
||||
videoTransition.setTransform(layer: self.videoLayer.blurredLayer, transform: CATransform3DMakeRotation(CGFloat(videoMetrics.rotationAngle), 0.0, 0.0, 1.0))
|
||||
|
||||
if !params.isAnimatedOut {
|
||||
self.videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(rotatedVideoResolution.width), height: Int(rotatedVideoResolution.height)))
|
||||
self.videoLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(rotatedVideoResolution.width), height: Int(rotatedVideoResolution.height)), edgeInset: 2)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5,6 +5,53 @@ import MetalEngine
|
||||
import ComponentFlow
|
||||
import SwiftSignalKit
|
||||
|
||||
/*private final class EdgeTestLayer: MetalEngineSubjectLayer, MetalEngineSubject {
|
||||
final class RenderState: RenderToLayerState {
|
||||
let pipelineState: MTLRenderPipelineState
|
||||
|
||||
required init?(device: MTLDevice) {
|
||||
guard let library = metalLibrary(device: device) else {
|
||||
return nil
|
||||
}
|
||||
guard let vertexFunction = library.makeFunction(name: "edgeTestVertex"), let fragmentFunction = library.makeFunction(name: "edgeTestFragment") else {
|
||||
return nil
|
||||
}
|
||||
|
||||
let pipelineDescriptor = MTLRenderPipelineDescriptor()
|
||||
pipelineDescriptor.vertexFunction = vertexFunction
|
||||
pipelineDescriptor.fragmentFunction = fragmentFunction
|
||||
pipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
|
||||
pipelineDescriptor.colorAttachments[0].isBlendingEnabled = true
|
||||
pipelineDescriptor.colorAttachments[0].rgbBlendOperation = .add
|
||||
pipelineDescriptor.colorAttachments[0].alphaBlendOperation = .add
|
||||
pipelineDescriptor.colorAttachments[0].sourceRGBBlendFactor = .one
|
||||
pipelineDescriptor.colorAttachments[0].sourceAlphaBlendFactor = .one
|
||||
pipelineDescriptor.colorAttachments[0].destinationRGBBlendFactor = .oneMinusSourceAlpha
|
||||
pipelineDescriptor.colorAttachments[0].destinationAlphaBlendFactor = .one
|
||||
|
||||
guard let pipelineState = try? device.makeRenderPipelineState(descriptor: pipelineDescriptor) else {
|
||||
return nil
|
||||
}
|
||||
self.pipelineState = pipelineState
|
||||
}
|
||||
}
|
||||
|
||||
var internalData: MetalEngineSubjectInternalData?
|
||||
|
||||
func update(context: MetalEngineSubjectContext) {
|
||||
context.renderToLayer(spec: RenderLayerSpec(size: RenderSize(width: 300, height: 300), edgeInset: 100), state: RenderState.self, layer: self, commands: { encoder, placement in
|
||||
let effectiveRect = placement.effectiveRect
|
||||
|
||||
var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width * 0.5), Float(effectiveRect.height))
|
||||
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
|
||||
|
||||
var color = SIMD4<Float>(1.0, 0.0, 0.0, 1.0)
|
||||
encoder.setFragmentBytes(&color, length: 4 * 4, index: 0)
|
||||
encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
|
||||
})
|
||||
}
|
||||
}*/
|
||||
|
||||
public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
public struct State: Equatable {
|
||||
public struct SignalInfo: Equatable {
|
||||
@ -50,6 +97,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
|
||||
public var lifecycleState: LifecycleState
|
||||
public var name: String
|
||||
public var shortName: String
|
||||
public var avatarImage: UIImage?
|
||||
public var audioOutput: AudioOutput
|
||||
public var isMicrophoneMuted: Bool
|
||||
@ -59,6 +107,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
public init(
|
||||
lifecycleState: LifecycleState,
|
||||
name: String,
|
||||
shortName: String,
|
||||
avatarImage: UIImage?,
|
||||
audioOutput: AudioOutput,
|
||||
isMicrophoneMuted: Bool,
|
||||
@ -67,6 +116,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
) {
|
||||
self.lifecycleState = lifecycleState
|
||||
self.name = name
|
||||
self.shortName = shortName
|
||||
self.avatarImage = avatarImage
|
||||
self.audioOutput = audioOutput
|
||||
self.isMicrophoneMuted = isMicrophoneMuted
|
||||
@ -81,6 +131,9 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
if lhs.name != rhs.name {
|
||||
return false
|
||||
}
|
||||
if lhs.shortName != rhs.shortName {
|
||||
return false
|
||||
}
|
||||
if lhs.avatarImage != rhs.avatarImage {
|
||||
return false
|
||||
}
|
||||
@ -119,7 +172,10 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
private let backgroundLayer: CallBackgroundLayer
|
||||
private let overlayContentsView: UIView
|
||||
private let buttonGroupView: ButtonGroupView
|
||||
private let blobTransformLayer: SimpleLayer
|
||||
private let blobBackgroundLayer: CALayer
|
||||
private let blobLayer: CallBlobsLayer
|
||||
private let avatarTransformLayer: SimpleLayer
|
||||
private let avatarLayer: AvatarLayer
|
||||
private let titleView: TextView
|
||||
|
||||
@ -140,6 +196,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
private var activeLocalVideoSource: VideoSource?
|
||||
private var waitingForFirstLocalVideoFrameDisposable: Disposable?
|
||||
|
||||
private var canAnimateAudioLevel: Bool = false
|
||||
private var isEmojiKeyExpanded: Bool = false
|
||||
private var areControlsHidden: Bool = false
|
||||
private var swapLocalAndRemoteVideo: Bool = false
|
||||
@ -147,6 +204,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
private var processedInitialAudioLevelBump: Bool = false
|
||||
private var audioLevelBump: Float = 0.0
|
||||
|
||||
private var currentAvatarAudioScale: CGFloat = 1.0
|
||||
private var targetAudioLevel: Float = 0.0
|
||||
private var audioLevel: Float = 0.0
|
||||
private var audioLevelUpdateSubscription: SharedDisplayLinkDriver.Link?
|
||||
@ -165,7 +223,12 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
|
||||
self.buttonGroupView = ButtonGroupView()
|
||||
|
||||
self.blobTransformLayer = SimpleLayer()
|
||||
self.blobBackgroundLayer = self.backgroundLayer.externalBlurredLayer
|
||||
self.blobLayer = CallBlobsLayer()
|
||||
self.blobBackgroundLayer.mask = self.blobTransformLayer
|
||||
|
||||
self.avatarTransformLayer = SimpleLayer()
|
||||
self.avatarLayer = AvatarLayer()
|
||||
|
||||
self.videoContainerBackgroundView = RoundedCornersView(color: .black)
|
||||
@ -176,13 +239,22 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.clipsToBounds = true
|
||||
|
||||
self.layer.addSublayer(self.backgroundLayer)
|
||||
self.overlayContentsView.layer.addSublayer(self.backgroundLayer.blurredLayer)
|
||||
|
||||
self.overlayContentsView.addSubview(self.overlayContentsVideoContainerBackgroundView)
|
||||
|
||||
self.layer.addSublayer(self.blobLayer)
|
||||
self.layer.addSublayer(self.avatarLayer)
|
||||
self.layer.addSublayer(self.blobBackgroundLayer)
|
||||
self.blobTransformLayer.addSublayer(self.blobLayer)
|
||||
|
||||
self.avatarTransformLayer.addSublayer(self.avatarLayer)
|
||||
self.layer.addSublayer(self.avatarTransformLayer)
|
||||
|
||||
/*let edgeTestLayer = EdgeTestLayer()
|
||||
edgeTestLayer.frame = CGRect(origin: CGPoint(x: 20.0, y: 100.0), size: CGSize(width: 100.0, height: 100.0))
|
||||
self.layer.addSublayer(edgeTestLayer)*/
|
||||
|
||||
self.addSubview(self.videoContainerBackgroundView)
|
||||
|
||||
@ -233,11 +305,21 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
return nil
|
||||
}
|
||||
|
||||
if let emojiExpandedInfoView = self.emojiExpandedInfoView, self.isEmojiKeyExpanded {
|
||||
if !result.isDescendant(of: emojiExpandedInfoView) {
|
||||
return emojiExpandedInfoView
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
public func addIncomingAudioLevel(value: Float) {
|
||||
self.targetAudioLevel = value
|
||||
if self.canAnimateAudioLevel {
|
||||
self.targetAudioLevel = value
|
||||
} else {
|
||||
self.targetAudioLevel = 0.0
|
||||
}
|
||||
}
|
||||
|
||||
private func attenuateAudioLevelStep() {
|
||||
@ -249,14 +331,15 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
}
|
||||
|
||||
private func updateAudioLevel() {
|
||||
if self.activeRemoteVideoSource == nil && self.activeLocalVideoSource == nil {
|
||||
if self.canAnimateAudioLevel {
|
||||
let additionalAvatarScale = CGFloat(max(0.0, min(self.audioLevel, 5.0)) * 0.05)
|
||||
self.avatarLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale, 1.0 + additionalAvatarScale, 1.0)
|
||||
self.currentAvatarAudioScale = 1.0 + additionalAvatarScale
|
||||
self.avatarTransformLayer.transform = CATransform3DMakeScale(self.currentAvatarAudioScale, self.currentAvatarAudioScale, 1.0)
|
||||
|
||||
if let params = self.params, case .terminated = params.state.lifecycleState {
|
||||
} else {
|
||||
let blobAmplificationFactor: CGFloat = 2.0
|
||||
self.blobLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0)
|
||||
self.blobTransformLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -396,11 +479,11 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
let backgroundAspect: CGFloat = params.size.width / params.size.height
|
||||
let backgroundSizeNorm: CGFloat = 64.0
|
||||
let backgroundRenderingSize = CGSize(width: floor(backgroundSizeNorm * backgroundAspect), height: backgroundSizeNorm)
|
||||
let backgroundEdgeSize: Int = 2
|
||||
let visualBackgroundFrame = backgroundFrame.insetBy(dx: -CGFloat(backgroundEdgeSize) / backgroundRenderingSize.width * backgroundFrame.width, dy: -CGFloat(backgroundEdgeSize) / backgroundRenderingSize.height * backgroundFrame.height)
|
||||
self.backgroundLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(backgroundRenderingSize.width) + backgroundEdgeSize * 2, height: Int(backgroundRenderingSize.height) + backgroundEdgeSize * 2))
|
||||
let visualBackgroundFrame = backgroundFrame
|
||||
self.backgroundLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(backgroundRenderingSize.width), height: Int(backgroundRenderingSize.height)), edgeInset: 8)
|
||||
transition.setFrame(layer: self.backgroundLayer, frame: visualBackgroundFrame)
|
||||
transition.setFrame(layer: self.backgroundLayer.blurredLayer, frame: visualBackgroundFrame)
|
||||
transition.setFrame(layer: self.blobBackgroundLayer, frame: visualBackgroundFrame)
|
||||
|
||||
let backgroundStateIndex: Int
|
||||
switch params.state.lifecycleState {
|
||||
@ -460,19 +543,27 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
}
|
||||
let contentBottomInset = self.buttonGroupView.update(size: params.size, insets: params.insets, controlsHidden: currentAreControlsHidden, buttons: buttons, transition: transition)
|
||||
|
||||
var expandedEmojiKeyRect: CGRect?
|
||||
if self.isEmojiKeyExpanded {
|
||||
let emojiExpandedInfoView: EmojiExpandedInfoView
|
||||
var emojiExpandedInfoTransition = transition
|
||||
var animateIn = false
|
||||
let alphaTransition: Transition
|
||||
if let current = self.emojiExpandedInfoView {
|
||||
emojiExpandedInfoView = current
|
||||
alphaTransition = genericAlphaTransition
|
||||
} else {
|
||||
emojiExpandedInfoTransition = emojiExpandedInfoTransition.withAnimation(.none)
|
||||
animateIn = true
|
||||
if !genericAlphaTransition.animation.isImmediate {
|
||||
alphaTransition = genericAlphaTransition.withAnimation(.curve(duration: 0.1, curve: .easeInOut))
|
||||
} else {
|
||||
alphaTransition = genericAlphaTransition
|
||||
}
|
||||
|
||||
emojiExpandedInfoView = EmojiExpandedInfoView(title: "This call is end-to-end encrypted", text: "If the emoji on Emma's screen are the same, this call is 100% secure.")
|
||||
emojiExpandedInfoView = EmojiExpandedInfoView(title: "This call is end-to-end encrypted", text: "If the emoji on \(params.state.shortName)'s screen are the same, this call is 100% secure.")
|
||||
self.emojiExpandedInfoView = emojiExpandedInfoView
|
||||
emojiExpandedInfoView.layer.anchorPoint = CGPoint(x: 1.0, y: 0.0)
|
||||
emojiExpandedInfoView.alpha = 0.0
|
||||
Transition.immediate.setScale(view: emojiExpandedInfoView, scale: 0.5)
|
||||
emojiExpandedInfoView.layer.anchorPoint = CGPoint(x: 0.5, y: 0.1)
|
||||
if let emojiView = self.emojiView {
|
||||
self.insertSubview(emojiExpandedInfoView, belowSubview: emojiView)
|
||||
} else {
|
||||
@ -488,22 +579,30 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
}
|
||||
}
|
||||
|
||||
let emojiExpandedInfoSize = emojiExpandedInfoView.update(constrainedWidth: params.size.width, sideInset: params.insets.left + 44.0, transition: emojiExpandedInfoTransition)
|
||||
let emojiExpandedInfoSize = emojiExpandedInfoView.update(constrainedWidth: params.size.width - (params.insets.left + 16.0) * 2.0, transition: emojiExpandedInfoTransition)
|
||||
let emojiExpandedInfoFrame = CGRect(origin: CGPoint(x: floor((params.size.width - emojiExpandedInfoSize.width) * 0.5), y: params.insets.top + 73.0), size: emojiExpandedInfoSize)
|
||||
emojiExpandedInfoTransition.setPosition(view: emojiExpandedInfoView, position: CGPoint(x: emojiExpandedInfoFrame.maxX, y: emojiExpandedInfoFrame.minY))
|
||||
emojiExpandedInfoTransition.setPosition(view: emojiExpandedInfoView, position: CGPoint(x: emojiExpandedInfoFrame.minX + emojiExpandedInfoView.layer.anchorPoint.x * emojiExpandedInfoFrame.width, y: emojiExpandedInfoFrame.minY + emojiExpandedInfoView.layer.anchorPoint.y * emojiExpandedInfoFrame.height))
|
||||
emojiExpandedInfoTransition.setBounds(view: emojiExpandedInfoView, bounds: CGRect(origin: CGPoint(), size: emojiExpandedInfoFrame.size))
|
||||
|
||||
if animateIn {
|
||||
transition.animateAlpha(view: emojiExpandedInfoView, from: 0.0, to: 1.0)
|
||||
transition.animateScale(view: emojiExpandedInfoView, from: 0.001, to: 1.0)
|
||||
}
|
||||
alphaTransition.setAlpha(view: emojiExpandedInfoView, alpha: 1.0)
|
||||
transition.setScale(view: emojiExpandedInfoView, scale: 1.0)
|
||||
|
||||
expandedEmojiKeyRect = emojiExpandedInfoFrame
|
||||
} else {
|
||||
if let emojiExpandedInfoView = self.emojiExpandedInfoView {
|
||||
self.emojiExpandedInfoView = nil
|
||||
transition.setAlpha(view: emojiExpandedInfoView, alpha: 0.0, completion: { [weak emojiExpandedInfoView] _ in
|
||||
|
||||
let alphaTransition: Transition
|
||||
if !genericAlphaTransition.animation.isImmediate {
|
||||
alphaTransition = genericAlphaTransition.withAnimation(.curve(duration: 0.1, curve: .easeInOut))
|
||||
} else {
|
||||
alphaTransition = genericAlphaTransition
|
||||
}
|
||||
|
||||
alphaTransition.setAlpha(view: emojiExpandedInfoView, alpha: 0.0, completion: { [weak emojiExpandedInfoView] _ in
|
||||
emojiExpandedInfoView?.removeFromSuperview()
|
||||
})
|
||||
transition.setScale(view: emojiExpandedInfoView, scale: 0.001)
|
||||
transition.setScale(view: emojiExpandedInfoView, scale: 0.5)
|
||||
}
|
||||
}
|
||||
|
||||
@ -536,11 +635,21 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
}
|
||||
emojiView.isUserInteractionEnabled = !self.isEmojiKeyExpanded
|
||||
|
||||
let emojiViewWasExpanded = emojiView.isExpanded
|
||||
let emojiViewSize = emojiView.update(isExpanded: self.isEmojiKeyExpanded, transition: emojiTransition)
|
||||
|
||||
if self.isEmojiKeyExpanded {
|
||||
let emojiViewFrame = CGRect(origin: CGPoint(x: floor((params.size.width - emojiViewSize.width) * 0.5), y: params.insets.top + 93.0), size: emojiViewSize)
|
||||
emojiTransition.setFrame(view: emojiView, frame: emojiViewFrame)
|
||||
|
||||
if case let .curve(duration, curve) = transition.animation, let emojiViewWasExpanded, !emojiViewWasExpanded {
|
||||
let distance = CGPoint(x: emojiViewFrame.midX - emojiView.center.x, y: emojiViewFrame.midY - emojiView.center.y)
|
||||
let positionKeyframes = generateParabollicMotionKeyframes(from: emojiView.center, to: emojiViewFrame.center, elevation: -distance.y * 0.8, duration: duration, curve: curve, reverse: false)
|
||||
emojiView.center = emojiViewFrame.center
|
||||
emojiView.layer.animateKeyframes(values: positionKeyframes.map { NSValue(cgPoint: $0) }, duration: duration, keyPath: "position", additive: false)
|
||||
} else {
|
||||
emojiTransition.setPosition(view: emojiView, position: emojiViewFrame.center)
|
||||
}
|
||||
emojiTransition.setBounds(view: emojiView, bounds: CGRect(origin: CGPoint(), size: emojiViewFrame.size))
|
||||
} else {
|
||||
let emojiY: CGFloat
|
||||
if currentAreControlsHidden {
|
||||
@ -548,7 +657,17 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
} else {
|
||||
emojiY = params.insets.top + 12.0
|
||||
}
|
||||
emojiTransition.setFrame(view: emojiView, frame: CGRect(origin: CGPoint(x: params.size.width - params.insets.right - 12.0 - emojiViewSize.width, y: emojiY), size: emojiViewSize))
|
||||
let emojiViewFrame = CGRect(origin: CGPoint(x: params.size.width - params.insets.right - 12.0 - emojiViewSize.width, y: emojiY), size: emojiViewSize)
|
||||
|
||||
if case let .curve(duration, curve) = transition.animation, let emojiViewWasExpanded, emojiViewWasExpanded {
|
||||
let distance = CGPoint(x: emojiViewFrame.midX - emojiView.center.x, y: emojiViewFrame.midY - emojiView.center.y)
|
||||
let positionKeyframes = generateParabollicMotionKeyframes(from: emojiViewFrame.center, to: emojiView.center, elevation: distance.y * 0.8, duration: duration, curve: curve, reverse: true)
|
||||
emojiView.center = emojiViewFrame.center
|
||||
emojiView.layer.animateKeyframes(values: positionKeyframes.map { NSValue(cgPoint: $0) }, duration: duration, keyPath: "position", additive: false)
|
||||
} else {
|
||||
emojiTransition.setPosition(view: emojiView, position: emojiViewFrame.center)
|
||||
}
|
||||
emojiTransition.setBounds(view: emojiView, bounds: CGRect(origin: CGPoint(), size: emojiViewFrame.size))
|
||||
emojiAlphaTransition.setAlpha(view: emojiView, alpha: currentAreControlsHidden ? 0.0 : 1.0)
|
||||
}
|
||||
|
||||
@ -565,7 +684,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
let collapsedAvatarSize: CGFloat = 136.0
|
||||
let blobSize: CGFloat = collapsedAvatarSize + 40.0
|
||||
|
||||
let collapsedAvatarFrame = CGRect(origin: CGPoint(x: floor((params.size.width - collapsedAvatarSize) * 0.5), y: 222.0), size: CGSize(width: collapsedAvatarSize, height: collapsedAvatarSize))
|
||||
let collapsedAvatarFrame = CGRect(origin: CGPoint(x: floor((params.size.width - collapsedAvatarSize) * 0.5), y: max(params.insets.top + 8.0, floor(params.size.height * 0.49) - 39.0 - collapsedAvatarSize)), size: CGSize(width: collapsedAvatarSize, height: collapsedAvatarSize))
|
||||
let expandedAvatarFrame = CGRect(origin: CGPoint(), size: params.size)
|
||||
let expandedVideoFrame = CGRect(origin: CGPoint(), size: params.size)
|
||||
let avatarFrame = havePrimaryVideo ? expandedAvatarFrame : collapsedAvatarFrame
|
||||
@ -625,13 +744,15 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
let videoContainerTransition = transition
|
||||
if animateIn {
|
||||
if i == 0 && self.videoContainerViews.count == 1 {
|
||||
videoContainerView.layer.position = self.avatarLayer.position
|
||||
videoContainerView.layer.bounds = self.avatarLayer.bounds
|
||||
videoContainerView.layer.position = self.avatarTransformLayer.position
|
||||
videoContainerView.layer.bounds = self.avatarTransformLayer.bounds
|
||||
videoContainerView.alpha = 0.0
|
||||
videoContainerView.blurredContainerLayer.position = self.avatarLayer.position
|
||||
videoContainerView.blurredContainerLayer.bounds = self.avatarLayer.bounds
|
||||
videoContainerView.blurredContainerLayer.position = self.avatarTransformLayer.position
|
||||
videoContainerView.blurredContainerLayer.bounds = self.avatarTransformLayer.bounds
|
||||
videoContainerView.blurredContainerLayer.opacity = 0.0
|
||||
videoContainerView.update(size: self.avatarLayer.bounds.size, insets: minimizedVideoInsets, cornerRadius: self.avatarLayer.params?.cornerRadius ?? 0.0, controlsHidden: currentAreControlsHidden, isMinimized: false, isAnimatedOut: true, transition: .immediate)
|
||||
videoContainerView.update(size: self.avatarTransformLayer.bounds.size, insets: minimizedVideoInsets, cornerRadius: self.avatarLayer.params?.cornerRadius ?? 0.0, controlsHidden: currentAreControlsHidden, isMinimized: false, isAnimatedOut: true, transition: .immediate)
|
||||
Transition.immediate.setScale(view: videoContainerView, scale: self.currentAvatarAudioScale)
|
||||
Transition.immediate.setScale(view: self.videoContainerBackgroundView, scale: self.currentAvatarAudioScale)
|
||||
} else {
|
||||
videoContainerView.layer.position = expandedVideoFrame.center
|
||||
videoContainerView.layer.bounds = CGRect(origin: CGPoint(), size: expandedVideoFrame.size)
|
||||
@ -639,14 +760,16 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
videoContainerView.blurredContainerLayer.position = expandedVideoFrame.center
|
||||
videoContainerView.blurredContainerLayer.bounds = CGRect(origin: CGPoint(), size: expandedVideoFrame.size)
|
||||
videoContainerView.blurredContainerLayer.opacity = 0.0
|
||||
videoContainerView.update(size: self.avatarLayer.bounds.size, insets: minimizedVideoInsets, cornerRadius: params.screenCornerRadius, controlsHidden: currentAreControlsHidden, isMinimized: i != 0, isAnimatedOut: i != 0, transition: .immediate)
|
||||
videoContainerView.update(size: self.avatarTransformLayer.bounds.size, insets: minimizedVideoInsets, cornerRadius: params.screenCornerRadius, controlsHidden: currentAreControlsHidden, isMinimized: i != 0, isAnimatedOut: i != 0, transition: .immediate)
|
||||
}
|
||||
}
|
||||
|
||||
videoContainerTransition.setPosition(view: videoContainerView, position: expandedVideoFrame.center)
|
||||
videoContainerTransition.setBounds(view: videoContainerView, bounds: CGRect(origin: CGPoint(), size: expandedVideoFrame.size))
|
||||
videoContainerTransition.setScale(view: videoContainerView, scale: 1.0)
|
||||
videoContainerTransition.setPosition(layer: videoContainerView.blurredContainerLayer, position: expandedVideoFrame.center)
|
||||
videoContainerTransition.setBounds(layer: videoContainerView.blurredContainerLayer, bounds: CGRect(origin: CGPoint(), size: expandedVideoFrame.size))
|
||||
videoContainerTransition.setScale(layer: videoContainerView.blurredContainerLayer, scale: 1.0)
|
||||
videoContainerView.update(size: expandedVideoFrame.size, insets: minimizedVideoInsets, cornerRadius: params.screenCornerRadius, controlsHidden: currentAreControlsHidden, isMinimized: i != 0, isAnimatedOut: false, transition: videoContainerTransition)
|
||||
|
||||
let alphaTransition: Transition
|
||||
@ -679,7 +802,7 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
if !validVideoContainerKeys.contains(videoContainerView.key) {
|
||||
removedVideoContainerIndices.append(i)
|
||||
|
||||
if self.videoContainerViews.count == 1 {
|
||||
if self.videoContainerViews.count == 1 || (i == 0 && !havePrimaryVideo) {
|
||||
let alphaTransition: Transition = genericAlphaTransition
|
||||
|
||||
videoContainerView.update(size: avatarFrame.size, insets: minimizedVideoInsets, cornerRadius: avatarCornerRadius, controlsHidden: currentAreControlsHidden, isMinimized: false, isAnimatedOut: true, transition: transition)
|
||||
@ -732,13 +855,48 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
if self.avatarLayer.image !== params.state.avatarImage {
|
||||
self.avatarLayer.image = params.state.avatarImage
|
||||
}
|
||||
transition.setPosition(layer: self.avatarLayer, position: avatarFrame.center)
|
||||
transition.setBounds(layer: self.avatarLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
|
||||
|
||||
transition.setPosition(layer: self.avatarTransformLayer, position: avatarFrame.center)
|
||||
transition.setBounds(layer: self.avatarTransformLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
|
||||
transition.setPosition(layer: self.avatarLayer, position: CGPoint(x: avatarFrame.width * 0.5, y: avatarFrame.height * 0.5))
|
||||
|
||||
if havePrimaryVideo != self.avatarLayer.params?.isExpanded {
|
||||
if havePrimaryVideo {
|
||||
self.canAnimateAudioLevel = false
|
||||
self.audioLevel = 0.0
|
||||
self.currentAvatarAudioScale = 1.0
|
||||
transition.setScale(layer: self.avatarTransformLayer, scale: 1.0)
|
||||
transition.setScale(layer: self.blobTransformLayer, scale: 1.0)
|
||||
}
|
||||
transition.setBounds(layer: self.avatarLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size), completion: { [weak self] completed in
|
||||
guard let self, let params = self.params, completed else {
|
||||
return
|
||||
}
|
||||
if !havePrimaryVideo {
|
||||
switch params.state.lifecycleState {
|
||||
case .terminated:
|
||||
break
|
||||
default:
|
||||
self.canAnimateAudioLevel = true
|
||||
}
|
||||
}
|
||||
})
|
||||
} else {
|
||||
transition.setBounds(layer: self.avatarLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
|
||||
}
|
||||
|
||||
var expandedEmojiKeyOverlapsAvatar = false
|
||||
if let expandedEmojiKeyRect, collapsedAvatarFrame.insetBy(dx: -40.0, dy: -40.0).intersects(expandedEmojiKeyRect) {
|
||||
expandedEmojiKeyOverlapsAvatar = true
|
||||
}
|
||||
|
||||
self.avatarLayer.update(size: collapsedAvatarFrame.size, isExpanded: havePrimaryVideo, cornerRadius: avatarCornerRadius, transition: transition)
|
||||
transition.setAlpha(layer: self.avatarLayer, alpha: (self.isEmojiKeyExpanded && !havePrimaryVideo) ? 0.0 : 1.0)
|
||||
transition.setAlpha(layer: self.avatarLayer, alpha: (expandedEmojiKeyOverlapsAvatar && !havePrimaryVideo) ? 0.0 : 1.0)
|
||||
transition.setScale(layer: self.avatarLayer, scale: expandedEmojiKeyOverlapsAvatar ? 0.001 : 1.0)
|
||||
|
||||
transition.setPosition(view: self.videoContainerBackgroundView, position: avatarFrame.center)
|
||||
transition.setBounds(view: self.videoContainerBackgroundView, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size))
|
||||
transition.setScale(view: self.videoContainerBackgroundView, scale: 1.0)
|
||||
transition.setAlpha(view: self.videoContainerBackgroundView, alpha: havePrimaryVideo ? 1.0 : 0.0)
|
||||
self.videoContainerBackgroundView.update(cornerRadius: havePrimaryVideo ? params.screenCornerRadius : avatarCornerRadius, transition: transition)
|
||||
|
||||
@ -748,22 +906,28 @@ public final class PrivateCallScreen: OverlayMaskContainerView {
|
||||
self.overlayContentsVideoContainerBackgroundView.update(cornerRadius: havePrimaryVideo ? params.screenCornerRadius : avatarCornerRadius, transition: transition)
|
||||
|
||||
let blobFrame = CGRect(origin: CGPoint(x: floor(avatarFrame.midX - blobSize * 0.5), y: floor(avatarFrame.midY - blobSize * 0.5)), size: CGSize(width: blobSize, height: blobSize))
|
||||
transition.setPosition(layer: self.blobLayer, position: CGPoint(x: blobFrame.midX, y: blobFrame.midY))
|
||||
transition.setPosition(layer: self.blobTransformLayer, position: CGPoint(x: blobFrame.midX, y: blobFrame.midY))
|
||||
transition.setBounds(layer: self.blobTransformLayer, bounds: CGRect(origin: CGPoint(), size: blobFrame.size))
|
||||
transition.setPosition(layer: self.blobLayer, position: CGPoint(x: blobFrame.width * 0.5, y: blobFrame.height * 0.5))
|
||||
transition.setBounds(layer: self.blobLayer, bounds: CGRect(origin: CGPoint(), size: blobFrame.size))
|
||||
|
||||
let titleString: String
|
||||
switch params.state.lifecycleState {
|
||||
case .terminated:
|
||||
self.titleView.contentMode = .center
|
||||
titleString = "Call Ended"
|
||||
if !transition.animation.isImmediate {
|
||||
transition.withAnimation(.curve(duration: 0.3, curve: .easeInOut)).setScale(layer: self.blobLayer, scale: 0.3)
|
||||
} else {
|
||||
transition.setScale(layer: self.blobLayer, scale: 0.3)
|
||||
}
|
||||
transition.setAlpha(layer: self.blobLayer, alpha: 0.0)
|
||||
genericAlphaTransition.setScale(layer: self.blobLayer, scale: 0.3)
|
||||
genericAlphaTransition.setAlpha(layer: self.blobLayer, alpha: 0.0)
|
||||
self.canAnimateAudioLevel = false
|
||||
self.audioLevel = 0.0
|
||||
self.currentAvatarAudioScale = 1.0
|
||||
transition.setScale(layer: self.avatarTransformLayer, scale: 1.0)
|
||||
transition.setScale(layer: self.blobTransformLayer, scale: 1.0)
|
||||
default:
|
||||
self.titleView.contentMode = .scaleToFill
|
||||
titleString = params.state.name
|
||||
transition.setAlpha(layer: self.blobLayer, alpha: (self.isEmojiKeyExpanded && !havePrimaryVideo) ? 0.0 : 1.0)
|
||||
genericAlphaTransition.setAlpha(layer: self.blobLayer, alpha: (expandedEmojiKeyOverlapsAvatar && !havePrimaryVideo) ? 0.0 : 1.0)
|
||||
transition.setScale(layer: self.blobLayer, scale: expandedEmojiKeyOverlapsAvatar ? 0.001 : 1.0)
|
||||
}
|
||||
|
||||
let titleSize = self.titleView.update(
|
||||
|
Loading…
x
Reference in New Issue
Block a user