diff --git a/Tests/CallUITest/Sources/ViewController.swift b/Tests/CallUITest/Sources/ViewController.swift index f61cb24fd5..da7ac61a97 100644 --- a/Tests/CallUITest/Sources/ViewController.swift +++ b/Tests/CallUITest/Sources/ViewController.swift @@ -10,7 +10,11 @@ public final class ViewController: UIViewController { private var callState: PrivateCallScreen.State = PrivateCallScreen.State( lifecycleState: .connecting, name: "Emma Walters", - avatarImage: UIImage(named: "test") + avatarImage: UIImage(named: "test"), + audioOutput: .internalSpeaker, + isMicrophoneMuted: false, + localVideo: nil, + remoteVideo: nil ) override public func viewDidLoad() { @@ -27,7 +31,59 @@ public final class ViewController: UIViewController { self.callScreenView = callScreenView self.view.addSubview(callScreenView) - self.update(size: self.view.bounds.size, transition: .immediate) + callScreenView.speakerAction = { [weak self] in + guard let self else { + return + } + + switch self.callState.lifecycleState { + case .connecting: + self.callState.lifecycleState = .ringing + case .ringing: + self.callState.lifecycleState = .exchangingKeys + case .exchangingKeys: + self.callState.lifecycleState = .active(PrivateCallScreen.State.ActiveState( + startTime: Date().timeIntervalSince1970, + signalInfo: PrivateCallScreen.State.SignalInfo(quality: 1.0), + emojiKey: ["A", "B", "C", "D"] + )) + case var .active(activeState): + activeState.signalInfo.quality = activeState.signalInfo.quality == 1.0 ? 0.1 : 1.0 + self.callState.lifecycleState = .active(activeState) + case .terminated: + break + } + + self.update(transition: .spring(duration: 0.4)) + } + callScreenView.videoAction = { [weak self] in + guard let self else { + return + } + if self.callState.remoteVideo == nil { + self.callState.remoteVideo = FileVideoSource(device: MetalEngine.shared.device, url: Bundle.main.url(forResource: "test2", withExtension: "mp4")!) + } else { + self.callState.remoteVideo = nil + } + self.update(transition: .spring(duration: 0.4)) + } + callScreenView.microhoneMuteAction = { + self.callState.isMicrophoneMuted = !self.callState.isMicrophoneMuted + self.update(transition: .spring(duration: 0.4)) + } + callScreenView.endCallAction = { [weak self] in + guard let self else { + return + } + self.callState.lifecycleState = .terminated(PrivateCallScreen.State.TerminatedState(duration: 82.0)) + self.update(transition: .spring(duration: 0.4)) + } + + self.update(transition: .immediate) + } + + private func update(transition: Transition) { + self.update(size: self.view.bounds.size, transition: transition) } private func update(size: CGSize, transition: Transition) { diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ButtonGroupView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ButtonGroupView.swift index 823ecf3866..61ccb20fb9 100644 --- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ButtonGroupView.swift +++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ButtonGroupView.swift @@ -4,7 +4,7 @@ import Display import ComponentFlow import AppBundle -final class ButtonGroupView: UIView, ContentOverlayView { +final class ButtonGroupView: OverlayMaskContainerView { final class Button { enum Content: Equatable { enum Key: Hashable { @@ -42,14 +42,10 @@ final class ButtonGroupView: UIView, ContentOverlayView { } } - let overlayMaskLayer: CALayer - private var buttons: [Button]? private var buttonViews: [Button.Content.Key: ContentOverlayButton] = [:] override init(frame: CGRect) { - self.overlayMaskLayer = SimpleLayer() - super.init(frame: frame) } @@ -57,46 +53,6 @@ final class ButtonGroupView: UIView, ContentOverlayView { fatalError("init(coder:) has not been implemented") } - override func addSubview(_ view: UIView) { - super.addSubview(view) - - if let view = view as? ContentOverlayView { - self.overlayMaskLayer.addSublayer(view.overlayMaskLayer) - } - } - - override func insertSubview(_ view: UIView, at index: Int) { - super.insertSubview(view, at: index) - - if let view = view as? ContentOverlayView { - self.overlayMaskLayer.addSublayer(view.overlayMaskLayer) - } - } - - override func insertSubview(_ view: UIView, aboveSubview siblingSubview: UIView) { - super.insertSubview(view, aboveSubview: siblingSubview) - - if let view = view as? ContentOverlayView { - self.overlayMaskLayer.addSublayer(view.overlayMaskLayer) - } - } - - override func insertSubview(_ view: UIView, belowSubview siblingSubview: UIView) { - super.insertSubview(view, belowSubview: siblingSubview) - - if let view = view as? ContentOverlayView { - self.overlayMaskLayer.addSublayer(view.overlayMaskLayer) - } - } - - override func willRemoveSubview(_ subview: UIView) { - super.willRemoveSubview(subview) - - if let view = subview as? ContentOverlayView { - view.overlayMaskLayer.removeFromSuperlayer() - } - } - func update(size: CGSize, buttons: [Button], transition: Transition) { self.buttons = buttons diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBackgroundLayer.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBackgroundLayer.swift index e2b8eeb758..465b0f2c69 100644 --- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBackgroundLayer.swift +++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/CallBackgroundLayer.swift @@ -78,7 +78,8 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject { SIMD2(x: 0.75, y: 0.40) ] - private var isBlur: Bool = false + let blurredLayer: MetalEngineSubjectLayer + private var phase: Float = 0.0 private var displayLinkSubscription: SharedDisplayLink.Subscription? @@ -96,8 +97,8 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject { private var stateIndex: Int = 0 private let phaseAcceleration = AnimatedProperty(0.0) - init(isBlur: Bool) { - self.isBlur = isBlur + override init() { + self.blurredLayer = MetalEngineSubjectLayer() self.colorSets = [ ColorSet(colors: [ @@ -151,6 +152,7 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject { } override init(layer: Any) { + self.blurredLayer = MetalEngineSubjectLayer() self.colorSets = [] self.colorTransition = AnimatedProperty(ColorSet(colors: [])) @@ -179,35 +181,37 @@ final class CallBackgroundLayer: MetalEngineSubjectLayer, MetalEngineSubject { return } - let isBlur = self.isBlur let phase = self.phase - context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self, commands: { encoder, placement in - let effectiveRect = placement.effectiveRect - - var rect = SIMD4(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height)) - encoder.setVertexBytes(&rect, length: 4 * 4, index: 0) - - let baseStep = floor(phase) - let nextStepInterpolation = phase - floor(phase) - - let positions0 = gatherPositions(shiftArray(array: CallBackgroundLayer.basePositions, offset: Int(baseStep))) - let positions1 = gatherPositions(shiftArray(array: CallBackgroundLayer.basePositions, offset: Int(baseStep) + 1)) - var positions = Array>(repeating: SIMD2(), count: 4) - for i in 0 ..< 4 { - positions[i] = interpolatePoints(positions0[i], positions1[i], at: nextStepInterpolation) - } - encoder.setFragmentBytes(&positions, length: 4 * MemoryLayout>.size, index: 0) - - var colors: [SIMD4] = self.colorTransition.value.colors - - encoder.setFragmentBytes(&colors, length: 4 * MemoryLayout>.size, index: 1) - var brightness: Float = isBlur ? 1.1 : 1.0 - var saturation: Float = isBlur ? 1.2 : 1.0 - encoder.setFragmentBytes(&brightness, length: 4, index: 2) - encoder.setFragmentBytes(&saturation, length: 4, index: 3) - - encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6) - }) + for i in 0 ..< 2 { + let isBlur = i == 1 + context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: i == 0 ? self : self.blurredLayer, commands: { encoder, placement in + let effectiveRect = placement.effectiveRect + + var rect = SIMD4(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height)) + encoder.setVertexBytes(&rect, length: 4 * 4, index: 0) + + let baseStep = floor(phase) + let nextStepInterpolation = phase - floor(phase) + + let positions0 = gatherPositions(shiftArray(array: CallBackgroundLayer.basePositions, offset: Int(baseStep))) + let positions1 = gatherPositions(shiftArray(array: CallBackgroundLayer.basePositions, offset: Int(baseStep) + 1)) + var positions = Array>(repeating: SIMD2(), count: 4) + for i in 0 ..< 4 { + positions[i] = interpolatePoints(positions0[i], positions1[i], at: nextStepInterpolation) + } + encoder.setFragmentBytes(&positions, length: 4 * MemoryLayout>.size, index: 0) + + var colors: [SIMD4] = self.colorTransition.value.colors + + encoder.setFragmentBytes(&colors, length: 4 * MemoryLayout>.size, index: 1) + var brightness: Float = isBlur ? 1.1 : 1.0 + var saturation: Float = isBlur ? 1.2 : 1.0 + encoder.setFragmentBytes(&brightness, length: 4, index: 2) + encoder.setFragmentBytes(&saturation, length: 4, index: 3) + + encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6) + }) + } } } diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentOverlayButton.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentOverlayButton.swift index 25126882a1..be9a3504ee 100644 --- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentOverlayButton.swift +++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentOverlayButton.swift @@ -3,7 +3,7 @@ import UIKit import Display import ComponentFlow -final class ContentOverlayButton: HighlightTrackingButton, ContentOverlayView { +final class ContentOverlayButton: HighlightTrackingButton, OverlayMaskContainerViewProtocol { private struct ContentParams: Equatable { var size: CGSize var image: UIImage? @@ -18,9 +18,7 @@ final class ContentOverlayButton: HighlightTrackingButton, ContentOverlayView { } } - var overlayMaskLayer: CALayer { - return self.overlayBackgroundLayer - } + let maskContents: UIView override static var layerClass: AnyClass { return MirroringLayer.self @@ -28,8 +26,6 @@ final class ContentOverlayButton: HighlightTrackingButton, ContentOverlayView { var action: (() -> Void)? - private let overlayBackgroundLayer: SimpleLayer - private let contentView: UIImageView private var currentContentViewIsSelected: Bool? @@ -38,7 +34,8 @@ final class ContentOverlayButton: HighlightTrackingButton, ContentOverlayView { private var contentParams: ContentParams? override init(frame: CGRect) { - self.overlayBackgroundLayer = SimpleLayer() + self.maskContents = UIView() + self.contentView = UIImageView() self.textView = TextView() @@ -48,14 +45,14 @@ final class ContentOverlayButton: HighlightTrackingButton, ContentOverlayView { let size: CGFloat = 56.0 let renderer = UIGraphicsImageRenderer(bounds: CGRect(origin: CGPoint(), size: CGSize(width: size, height: size))) - self.overlayBackgroundLayer.contents = renderer.image { context in + self.maskContents.layer.contents = renderer.image { context in UIGraphicsPushContext(context.cgContext) context.cgContext.setFillColor(UIColor.white.cgColor) context.cgContext.fillEllipse(in: CGRect(origin: CGPoint(), size: CGSize(width: size, height: size))) UIGraphicsPopContext() }.cgImage - (self.layer as? MirroringLayer)?.targetLayer = self.overlayBackgroundLayer + (self.layer as? MirroringLayer)?.targetLayer = self.maskContents.layer self.addSubview(self.contentView) self.addSubview(self.textView) diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentView.swift index 5fab3b34ed..8b13789179 100644 --- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentView.swift +++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/ContentView.swift @@ -1,375 +1 @@ -import Foundation -import UIKit -import Display -import MetalEngine -import ComponentFlow -import SwiftSignalKit -final class ContentView: UIView { - private struct Params: Equatable { - var size: CGSize - var insets: UIEdgeInsets - var screenCornerRadius: CGFloat - var state: PrivateCallScreen.State - - init(size: CGSize, insets: UIEdgeInsets, screenCornerRadius: CGFloat, state: PrivateCallScreen.State) { - self.size = size - self.insets = insets - self.screenCornerRadius = screenCornerRadius - self.state = state - } - - static func ==(lhs: Params, rhs: Params) -> Bool { - if lhs.size != rhs.size { - return false - } - if lhs.insets != rhs.insets { - return false - } - if lhs.screenCornerRadius != rhs.screenCornerRadius { - return false - } - if lhs.state != rhs.state { - return false - } - return true - } - } - - private let blobLayer: CallBlobsLayer - private let avatarLayer: AvatarLayer - private let titleView: TextView - - private var statusView: StatusView - - private var emojiView: KeyEmojiView? - - let blurContentsLayer: SimpleLayer - - private var videoContainerView: VideoContainerView? - - private var params: Params? - - private var activeRemoteVideoSource: VideoSource? - private var waitingForFirstVideoFrameDisposable: Disposable? - - private var processedInitialAudioLevelBump: Bool = false - private var audioLevelBump: Float = 0.0 - - private var targetAudioLevel: Float = 0.0 - private var audioLevel: Float = 0.0 - private var audioLevelUpdateSubscription: SharedDisplayLinkDriver.Link? - - override init(frame: CGRect) { - self.blobLayer = CallBlobsLayer() - self.avatarLayer = AvatarLayer() - - self.titleView = TextView() - self.statusView = StatusView() - - self.blurContentsLayer = SimpleLayer() - - super.init(frame: frame) - - self.layer.addSublayer(self.blobLayer) - self.layer.addSublayer(self.avatarLayer) - - self.addSubview(self.titleView) - - self.addSubview(self.statusView) - self.statusView.requestLayout = { [weak self] in - self?.update(transition: .immediate) - } - - self.audioLevelUpdateSubscription = SharedDisplayLinkDriver.shared.add(needsHighestFramerate: false, { [weak self] in - guard let self else { - return - } - self.attenuateAudioLevelStep() - }) - } - - required init?(coder: NSCoder) { - fatalError("init(coder:) has not been implemented") - } - - deinit { - self.waitingForFirstVideoFrameDisposable?.dispose() - } - - func addIncomingAudioLevel(value: Float) { - self.targetAudioLevel = value - } - - private func attenuateAudioLevelStep() { - self.audioLevel = self.audioLevel * 0.8 + (self.targetAudioLevel + self.audioLevelBump) * 0.2 - if self.audioLevel <= 0.01 { - self.audioLevel = 0.0 - } - self.updateAudioLevel() - } - - private func updateAudioLevel() { - if self.activeRemoteVideoSource == nil { - let additionalAvatarScale = CGFloat(max(0.0, min(self.audioLevel, 5.0)) * 0.05) - self.avatarLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale, 1.0 + additionalAvatarScale, 1.0) - - if let params = self.params, case .terminated = params.state.lifecycleState { - } else { - let blobAmplificationFactor: CGFloat = 2.0 - self.blobLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0) - } - } - } - - func update( - size: CGSize, - insets: UIEdgeInsets, - screenCornerRadius: CGFloat, - state: PrivateCallScreen.State, - transition: Transition - ) { - let params = Params(size: size, insets: insets, screenCornerRadius: screenCornerRadius, state: state) - if self.params == params { - return - } - - if self.params?.state.remoteVideo !== params.state.remoteVideo { - self.waitingForFirstVideoFrameDisposable?.dispose() - - if let remoteVideo = params.state.remoteVideo { - if remoteVideo.currentOutput != nil { - self.activeRemoteVideoSource = remoteVideo - } else { - let firstVideoFrameSignal = Signal { subscriber in - remoteVideo.updated = { [weak remoteVideo] in - guard let remoteVideo else { - subscriber.putCompletion() - return - } - if remoteVideo.currentOutput != nil { - subscriber.putCompletion() - } - } - - return EmptyDisposable - } - var shouldUpdate = false - self.waitingForFirstVideoFrameDisposable = (firstVideoFrameSignal - |> timeout(4.0, queue: .mainQueue(), alternate: .complete()) - |> deliverOnMainQueue).startStrict(completed: { [weak self] in - guard let self else { - return - } - self.activeRemoteVideoSource = remoteVideo - if shouldUpdate { - self.update(transition: .spring(duration: 0.3)) - } - }) - shouldUpdate = true - } - } else { - self.activeRemoteVideoSource = nil - } - } - - self.params = params - self.updateInternal(params: params, transition: transition) - } - - private func update(transition: Transition) { - guard let params = self.params else { - return - } - self.updateInternal(params: params, transition: transition) - } - - private func updateInternal(params: Params, transition: Transition) { - if case let .active(activeState) = params.state.lifecycleState { - let emojiView: KeyEmojiView - var emojiTransition = transition - if let current = self.emojiView { - emojiView = current - } else { - emojiTransition = transition.withAnimation(.none) - emojiView = KeyEmojiView(emoji: activeState.emojiKey) - self.emojiView = emojiView - } - if emojiView.superview == nil { - self.addSubview(emojiView) - if !transition.animation.isImmediate { - emojiView.animateIn() - } - } - emojiTransition.setFrame(view: emojiView, frame: CGRect(origin: CGPoint(x: params.size.width - params.insets.right - 12.0 - emojiView.size.width, y: params.insets.top + 27.0), size: emojiView.size)) - } else { - if let emojiView = self.emojiView { - self.emojiView = nil - emojiView.removeFromSuperview() - } - } - - let collapsedAvatarSize: CGFloat = 136.0 - let blobSize: CGFloat = collapsedAvatarSize + 40.0 - - let collapsedAvatarFrame = CGRect(origin: CGPoint(x: floor((params.size.width - collapsedAvatarSize) * 0.5), y: 222.0), size: CGSize(width: collapsedAvatarSize, height: collapsedAvatarSize)) - let expandedAvatarFrame = CGRect(origin: CGPoint(), size: params.size) - let avatarFrame = self.activeRemoteVideoSource != nil ? expandedAvatarFrame : collapsedAvatarFrame - let avatarCornerRadius = self.activeRemoteVideoSource != nil ? params.screenCornerRadius : collapsedAvatarSize * 0.5 - - if let activeRemoteVideoSource = self.activeRemoteVideoSource { - let videoContainerView: VideoContainerView - if let current = self.videoContainerView { - videoContainerView = current - } else { - videoContainerView = VideoContainerView(frame: CGRect()) - self.videoContainerView = videoContainerView - self.insertSubview(videoContainerView, belowSubview: self.titleView) - self.blurContentsLayer.addSublayer(videoContainerView.blurredContainerLayer) - - videoContainerView.layer.position = self.avatarLayer.position - videoContainerView.layer.bounds = self.avatarLayer.bounds - videoContainerView.alpha = 0.0 - videoContainerView.blurredContainerLayer.position = self.avatarLayer.position - videoContainerView.blurredContainerLayer.bounds = self.avatarLayer.bounds - videoContainerView.blurredContainerLayer.opacity = 0.0 - videoContainerView.update(size: self.avatarLayer.bounds.size, cornerRadius: self.avatarLayer.params?.cornerRadius ?? 0.0, isExpanded: false, transition: .immediate) - } - - if videoContainerView.video !== activeRemoteVideoSource { - videoContainerView.video = activeRemoteVideoSource - } - - transition.setPosition(view: videoContainerView, position: avatarFrame.center) - transition.setBounds(view: videoContainerView, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size)) - transition.setAlpha(view: videoContainerView, alpha: 1.0) - transition.setPosition(layer: videoContainerView.blurredContainerLayer, position: avatarFrame.center) - transition.setBounds(layer: videoContainerView.blurredContainerLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size)) - transition.setAlpha(layer: videoContainerView.blurredContainerLayer, alpha: 1.0) - videoContainerView.update(size: avatarFrame.size, cornerRadius: avatarCornerRadius, isExpanded: self.activeRemoteVideoSource != nil, transition: transition) - } else { - if let videoContainerView = self.videoContainerView { - videoContainerView.update(size: avatarFrame.size, cornerRadius: avatarCornerRadius, isExpanded: self.activeRemoteVideoSource != nil, transition: transition) - transition.setPosition(layer: videoContainerView.blurredContainerLayer, position: avatarFrame.center) - transition.setBounds(layer: videoContainerView.blurredContainerLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size)) - transition.setAlpha(layer: videoContainerView.blurredContainerLayer, alpha: 0.0) - transition.setPosition(view: videoContainerView, position: avatarFrame.center) - transition.setBounds(view: videoContainerView, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size)) - if videoContainerView.alpha != 0.0 { - transition.setAlpha(view: videoContainerView, alpha: 0.0, completion: { [weak self, weak videoContainerView] completed in - guard let self, let videoContainerView, completed else { - return - } - videoContainerView.removeFromSuperview() - videoContainerView.blurredContainerLayer.removeFromSuperlayer() - if self.videoContainerView === videoContainerView { - self.videoContainerView = nil - } - }) - } - } - } - - if self.avatarLayer.image !== params.state.avatarImage { - self.avatarLayer.image = params.state.avatarImage - } - transition.setPosition(layer: self.avatarLayer, position: avatarFrame.center) - transition.setBounds(layer: self.avatarLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size)) - self.avatarLayer.update(size: collapsedAvatarFrame.size, isExpanded: self.activeRemoteVideoSource != nil, cornerRadius: avatarCornerRadius, transition: transition) - - let blobFrame = CGRect(origin: CGPoint(x: floor(avatarFrame.midX - blobSize * 0.5), y: floor(avatarFrame.midY - blobSize * 0.5)), size: CGSize(width: blobSize, height: blobSize)) - transition.setPosition(layer: self.blobLayer, position: CGPoint(x: blobFrame.midX, y: blobFrame.midY)) - transition.setBounds(layer: self.blobLayer, bounds: CGRect(origin: CGPoint(), size: blobFrame.size)) - - let titleString: String - switch params.state.lifecycleState { - case .terminated: - titleString = "Call Ended" - transition.setScale(layer: self.blobLayer, scale: 0.001) - transition.setAlpha(layer: self.blobLayer, alpha: 0.0) - default: - titleString = params.state.name - } - - let titleSize = self.titleView.update( - string: titleString, - fontSize: self.activeRemoteVideoSource == nil ? 28.0 : 17.0, - fontWeight: self.activeRemoteVideoSource == nil ? 0.0 : 0.25, - color: .white, - constrainedWidth: params.size.width - 16.0 * 2.0, - transition: transition - ) - let titleFrame = CGRect( - origin: CGPoint( - x: (params.size.width - titleSize.width) * 0.5, - y: self.activeRemoteVideoSource == nil ? collapsedAvatarFrame.maxY + 39.0 : params.insets.top + 17.0 - ), - size: titleSize - ) - transition.setFrame(view: self.titleView, frame: titleFrame) - - let statusState: StatusView.State - switch params.state.lifecycleState { - case .connecting: - statusState = .waiting(.requesting) - case .ringing: - statusState = .waiting(.ringing) - case .exchangingKeys: - statusState = .waiting(.generatingKeys) - case let .active(activeState): - statusState = .active(StatusView.ActiveState(startTimestamp: activeState.startTime, signalStrength: activeState.signalInfo.quality)) - - if !self.processedInitialAudioLevelBump { - self.processedInitialAudioLevelBump = true - self.audioLevelBump = 2.0 - DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.2, execute: { [weak self] in - guard let self else { - return - } - self.audioLevelBump = 0.0 - }) - } - case let .terminated(terminatedState): - statusState = .terminated(StatusView.TerminatedState(duration: terminatedState.duration)) - } - - if let previousState = self.statusView.state, previousState.key != statusState.key { - let previousStatusView = self.statusView - if !transition.animation.isImmediate { - transition.setPosition(view: previousStatusView, position: CGPoint(x: previousStatusView.center.x, y: previousStatusView.center.y - 5.0)) - transition.setScale(view: previousStatusView, scale: 0.5) - Transition.easeInOut(duration: 0.1).setAlpha(view: previousStatusView, alpha: 0.0, completion: { [weak previousStatusView] _ in - previousStatusView?.removeFromSuperview() - }) - } else { - previousStatusView.removeFromSuperview() - } - - self.statusView = StatusView() - self.insertSubview(self.statusView, aboveSubview: previousStatusView) - self.statusView.requestLayout = { [weak self] in - self?.update(transition: .immediate) - } - } - - let statusSize = self.statusView.update(state: statusState, transition: .immediate) - let statusFrame = CGRect( - origin: CGPoint( - x: (params.size.width - statusSize.width) * 0.5, - y: titleFrame.maxY + (self.activeRemoteVideoSource != nil ? 0.0 : 4.0) - ), - size: statusSize - ) - if self.statusView.bounds.isEmpty { - self.statusView.frame = statusFrame - - if !transition.animation.isImmediate { - transition.animatePosition(view: self.statusView, from: CGPoint(x: 0.0, y: 5.0), to: CGPoint(), additive: true) - transition.animateScale(view: self.statusView, from: 0.5, to: 1.0) - Transition.easeInOut(duration: 0.15).animateAlpha(view: self.statusView, from: 0.0, to: 1.0) - } - } else { - transition.setFrame(view: self.statusView, frame: statusFrame) - } - } -} diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/VideoContainerLayer.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/TitleView copy.swift similarity index 100% rename from submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/VideoContainerLayer.swift rename to submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/TitleView copy.swift diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/WeakSignalView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/WeakSignalView.swift new file mode 100644 index 0000000000..6fea71c89a --- /dev/null +++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/Components/WeakSignalView.swift @@ -0,0 +1,68 @@ +import Foundation +import UIKit +import Display + +final class WeakSignalView: OverlayMaskContainerView { + private struct Params: Equatable { + var constrainedSize: CGSize + + init(constrainedSize: CGSize) { + self.constrainedSize = constrainedSize + } + } + private struct Layout { + var params: Params + var size: CGSize + + init(params: Params, size: CGSize) { + self.params = params + self.size = size + } + } + + private let titleView: TextView + private let overlayBackgroundView: UIImageView + private let backgroundView: UIImageView + + private var currentLayout: Layout? + + override init(frame: CGRect) { + self.titleView = TextView() + self.overlayBackgroundView = UIImageView() + self.backgroundView = UIImageView() + + super.init(frame: frame) + + self.maskContents.addSubview(self.overlayBackgroundView) + self.addSubview(self.backgroundView) + self.addSubview(self.titleView) + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + func update(constrainedSize: CGSize) -> CGSize { + let params = Params(constrainedSize: constrainedSize) + if let currentLayout = self.currentLayout, currentLayout.params == params { + return currentLayout.size + } + + let sideInset: CGFloat = 8.0 + let height: CGFloat = 30.0 + + let titleSize = self.titleView.update(string: "Weak network signal", fontSize: 16.0, fontWeight: 0.0, color: .white, constrainedWidth: constrainedSize.width - sideInset * 2.0, transition: .immediate) + let size = CGSize(width: titleSize.width + sideInset * 2.0, height: height) + self.titleView.frame = CGRect(origin: CGPoint(x: sideInset, y: floor((size.height - titleSize.height) * 0.5)), size: titleSize) + + if self.overlayBackgroundView.image?.size.height != height { + self.overlayBackgroundView.image = generateStretchableFilledCircleImage(diameter: height, color: .white) + self.backgroundView.image = generateStretchableFilledCircleImage(diameter: height, color: UIColor(white: 1.0, alpha: 0.2)) + } + self.overlayBackgroundView.frame = CGRect(origin: CGPoint(), size: size) + self.backgroundView.frame = CGRect(origin: CGPoint(), size: size) + + self.currentLayout = Layout(params: params, size: size) + return size + } +} diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/ContentOverlayContainer.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/ContentOverlayContainer.swift deleted file mode 100644 index 460f003478..0000000000 --- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/ContentOverlayContainer.swift +++ /dev/null @@ -1,60 +0,0 @@ -import Foundation -import UIKit - -protocol ContentOverlayView: UIView { - var overlayMaskLayer: CALayer { get } -} - -final class ContentOverlayContainer: UIView { - private let overlayLayer: ContentOverlayLayer - - init(overlayLayer: ContentOverlayLayer) { - self.overlayLayer = overlayLayer - - super.init(frame: CGRect()) - } - - required init?(coder: NSCoder) { - fatalError("init(coder:) has not been implemented") - } - - override func addSubview(_ view: UIView) { - super.addSubview(view) - - if let view = view as? ContentOverlayView { - self.overlayLayer.maskContentLayer.addSublayer(view.overlayMaskLayer) - } - } - - override func insertSubview(_ view: UIView, at index: Int) { - super.insertSubview(view, at: index) - - if let view = view as? ContentOverlayView { - self.overlayLayer.maskContentLayer.addSublayer(view.overlayMaskLayer) - } - } - - override func insertSubview(_ view: UIView, aboveSubview siblingSubview: UIView) { - super.insertSubview(view, aboveSubview: siblingSubview) - - if let view = view as? ContentOverlayView { - self.overlayLayer.maskContentLayer.addSublayer(view.overlayMaskLayer) - } - } - - override func insertSubview(_ view: UIView, belowSubview siblingSubview: UIView) { - super.insertSubview(view, belowSubview: siblingSubview) - - if let view = view as? ContentOverlayView { - self.overlayLayer.maskContentLayer.addSublayer(view.overlayMaskLayer) - } - } - - override func willRemoveSubview(_ subview: UIView) { - super.willRemoveSubview(subview) - - if let view = subview as? ContentOverlayView { - view.overlayMaskLayer.removeFromSuperlayer() - } - } -} diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/MirroringLayer.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/MirroringLayer.swift index 801d6c06e0..8506d05445 100644 --- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/MirroringLayer.swift +++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/MirroringLayer.swift @@ -3,7 +3,7 @@ import UIKit import Display final class MirroringLayer: SimpleLayer { - var targetLayer: SimpleLayer? + var targetLayer: CALayer? override init() { super.init() diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/OverlayMaskContainerView.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/OverlayMaskContainerView.swift new file mode 100644 index 0000000000..447704d0dc --- /dev/null +++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/OverlayMaskContainerView.swift @@ -0,0 +1,68 @@ +import Foundation +import UIKit + +public protocol OverlayMaskContainerViewProtocol: UIView { + var maskContents: UIView { get } +} + +public class OverlayMaskContainerView: UIView, OverlayMaskContainerViewProtocol { + override public static var layerClass: AnyClass { + return MirroringLayer.self + } + + public let maskContents: UIView + + override init(frame: CGRect) { + self.maskContents = UIView() + + super.init(frame: frame) + + (self.layer as? MirroringLayer)?.targetLayer = self.maskContents.layer + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override public func addSubview(_ view: UIView) { + super.addSubview(view) + + if let view = view as? OverlayMaskContainerViewProtocol { + self.maskContents.addSubview(view.maskContents) + } + } + + override public func insertSubview(_ view: UIView, at index: Int) { + super.insertSubview(view, at: index) + + if let view = view as? OverlayMaskContainerViewProtocol { + self.maskContents.addSubview(view.maskContents) + } + } + + override public func insertSubview(_ view: UIView, aboveSubview siblingSubview: UIView) { + super.insertSubview(view, aboveSubview: siblingSubview) + + if let view = view as? OverlayMaskContainerViewProtocol { + self.maskContents.addSubview(view.maskContents) + } + } + + override public func insertSubview(_ view: UIView, belowSubview siblingSubview: UIView) { + super.insertSubview(view, belowSubview: siblingSubview) + + if let view = view as? OverlayMaskContainerViewProtocol { + self.maskContents.addSubview(view.maskContents) + } + } + + override public func willRemoveSubview(_ subview: UIView) { + super.willRemoveSubview(subview) + + if let view = subview as? OverlayMaskContainerViewProtocol { + if view.maskContents.superview === self { + view.maskContents.removeFromSuperview() + } + } + } +} diff --git a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/PrivateCallScreen.swift b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/PrivateCallScreen.swift index 134f5ebd57..3926c1faa9 100644 --- a/submodules/TelegramUI/Components/Calls/CallScreen/Sources/PrivateCallScreen.swift +++ b/submodules/TelegramUI/Components/Calls/CallScreen/Sources/PrivateCallScreen.swift @@ -3,8 +3,9 @@ import UIKit import Display import MetalEngine import ComponentFlow +import SwiftSignalKit -public final class PrivateCallScreen: UIView { +public final class PrivateCallScreen: OverlayMaskContainerView { public struct State: Equatable { public struct SignalInfo: Equatable { public var quality: Double @@ -52,6 +53,7 @@ public final class PrivateCallScreen: UIView { public var avatarImage: UIImage? public var audioOutput: AudioOutput public var isMicrophoneMuted: Bool + public var localVideo: VideoSource? public var remoteVideo: VideoSource? public init( @@ -60,6 +62,7 @@ public final class PrivateCallScreen: UIView { avatarImage: UIImage?, audioOutput: AudioOutput, isMicrophoneMuted: Bool, + localVideo: VideoSource?, remoteVideo: VideoSource? ) { self.lifecycleState = lifecycleState @@ -67,6 +70,7 @@ public final class PrivateCallScreen: UIView { self.avatarImage = avatarImage self.audioOutput = audioOutput self.isMicrophoneMuted = isMicrophoneMuted + self.localVideo = localVideo self.remoteVideo = remoteVideo } @@ -86,6 +90,9 @@ public final class PrivateCallScreen: UIView { if lhs.isMicrophoneMuted != rhs.isMicrophoneMuted { return false } + if lhs.localVideo !== rhs.localVideo { + return false + } if lhs.remoteVideo !== rhs.remoteVideo { return false } @@ -107,20 +114,31 @@ public final class PrivateCallScreen: UIView { } } - private let backgroundLayer: CallBackgroundLayer - private let contentOverlayLayer: ContentOverlayLayer - private let contentOverlayContainer: ContentOverlayContainer - - private let blurContentsLayer: SimpleLayer - private let blurBackgroundLayer: CallBackgroundLayer - - private let contentView: ContentView - - private let buttonGroupView: ButtonGroupView - private var params: Params? - private var isVideoOn: Bool = false + private let backgroundLayer: CallBackgroundLayer + private let overlayContentsView: UIView + private let buttonGroupView: ButtonGroupView + private let blobLayer: CallBlobsLayer + private let avatarLayer: AvatarLayer + private let titleView: TextView + + private var statusView: StatusView + private var weakSignalView: WeakSignalView? + + private var emojiView: KeyEmojiView? + + private var videoContainerView: VideoContainerView? + + private var activeRemoteVideoSource: VideoSource? + private var waitingForFirstVideoFrameDisposable: Disposable? + + private var processedInitialAudioLevelBump: Bool = false + private var audioLevelBump: Float = 0.0 + + private var targetAudioLevel: Float = 0.0 + private var audioLevel: Float = 0.0 + private var audioLevelUpdateSubscription: SharedDisplayLinkDriver.Link? public var speakerAction: (() -> Void)? public var videoAction: (() -> Void)? @@ -128,89 +146,55 @@ public final class PrivateCallScreen: UIView { public var endCallAction: (() -> Void)? public override init(frame: CGRect) { - self.blurContentsLayer = SimpleLayer() + self.overlayContentsView = UIView() + self.overlayContentsView.isUserInteractionEnabled = false - self.backgroundLayer = CallBackgroundLayer(isBlur: false) - - self.contentOverlayLayer = ContentOverlayLayer() - self.contentOverlayContainer = ContentOverlayContainer(overlayLayer: self.contentOverlayLayer) - - self.blurBackgroundLayer = CallBackgroundLayer(isBlur: true) - - self.contentView = ContentView(frame: CGRect()) + self.backgroundLayer = CallBackgroundLayer() self.buttonGroupView = ButtonGroupView() + self.blobLayer = CallBlobsLayer() + self.avatarLayer = AvatarLayer() + + self.titleView = TextView() + self.statusView = StatusView() + super.init(frame: frame) - self.contentOverlayLayer.contentsLayer = self.blurContentsLayer - self.layer.addSublayer(self.backgroundLayer) + self.overlayContentsView.layer.addSublayer(self.backgroundLayer.blurredLayer) - self.blurContentsLayer.addSublayer(self.blurBackgroundLayer) + self.overlayContentsView.mask = self.maskContents + self.addSubview(self.overlayContentsView) - self.addSubview(self.contentView) - self.blurContentsLayer.addSublayer(self.contentView.blurContentsLayer) + self.addSubview(self.buttonGroupView) - self.layer.addSublayer(self.contentOverlayLayer) + self.layer.addSublayer(self.blobLayer) + self.layer.addSublayer(self.avatarLayer) - self.addSubview(self.contentOverlayContainer) + self.addSubview(self.titleView) - self.contentOverlayContainer.addSubview(self.buttonGroupView) - - /*self.buttonGroupView.audioPressed = { [weak self] in - guard let self, var params = self.params else { - return - } - - self.isSpeakerOn = !self.isSpeakerOn - - switch params.state.lifecycleState { - case .connecting: - params.state.lifecycleState = .ringing - case .ringing: - params.state.lifecycleState = .exchangingKeys - case .exchangingKeys: - params.state.lifecycleState = .active(State.ActiveState( - startTime: Date().timeIntervalSince1970, - signalInfo: State.SignalInfo(quality: 1.0), - emojiKey: ["🐱", "🚂", "❄️", "🎨"] - )) - case var .active(activeState): - if activeState.signalInfo.quality == 1.0 { - activeState.signalInfo.quality = 0.1 - } else { - activeState.signalInfo.quality = 1.0 - } - params.state.lifecycleState = .active(activeState) - } - - self.params = params - self.update(transition: .spring(duration: 0.3)) + self.addSubview(self.statusView) + self.statusView.requestLayout = { [weak self] in + self?.update(transition: .immediate) } - self.buttonGroupView.toggleVideo = { [weak self] in + self.audioLevelUpdateSubscription = SharedDisplayLinkDriver.shared.add(needsHighestFramerate: false, { [weak self] in guard let self else { return } - if self.remoteVideo == nil { - if let url = Bundle.main.url(forResource: "test2", withExtension: "mp4") { - self.remoteVideo = FileVideoSource(device: MetalEngine.shared.device, url: url) - } - } else { - self.remoteVideo = nil - } - - self.isVideoOn = !self.isVideoOn - - self.update(transition: .spring(duration: 0.3)) - }*/ + self.attenuateAudioLevelStep() + }) } public required init?(coder: NSCoder) { fatalError() } + deinit { + self.waitingForFirstVideoFrameDisposable?.dispose() + } + override public func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { guard let result = super.hitTest(point, with: event) else { return nil @@ -220,7 +204,28 @@ public final class PrivateCallScreen: UIView { } public func addIncomingAudioLevel(value: Float) { - self.contentView.addIncomingAudioLevel(value: value) + self.targetAudioLevel = value + } + + private func attenuateAudioLevelStep() { + self.audioLevel = self.audioLevel * 0.8 + (self.targetAudioLevel + self.audioLevelBump) * 0.2 + if self.audioLevel <= 0.01 { + self.audioLevel = 0.0 + } + self.updateAudioLevel() + } + + private func updateAudioLevel() { + if self.activeRemoteVideoSource == nil { + let additionalAvatarScale = CGFloat(max(0.0, min(self.audioLevel, 5.0)) * 0.05) + self.avatarLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale, 1.0 + additionalAvatarScale, 1.0) + + if let params = self.params, case .terminated = params.state.lifecycleState { + } else { + let blobAmplificationFactor: CGFloat = 2.0 + self.blobLayer.transform = CATransform3DMakeScale(1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0 + additionalAvatarScale * blobAmplificationFactor, 1.0) + } + } } public func update(size: CGSize, insets: UIEdgeInsets, screenCornerRadius: CGFloat, state: State, transition: Transition) { @@ -228,6 +233,46 @@ public final class PrivateCallScreen: UIView { if self.params == params { return } + + if self.params?.state.remoteVideo !== params.state.remoteVideo { + self.waitingForFirstVideoFrameDisposable?.dispose() + + if let remoteVideo = params.state.remoteVideo { + if remoteVideo.currentOutput != nil { + self.activeRemoteVideoSource = remoteVideo + } else { + let firstVideoFrameSignal = Signal { subscriber in + remoteVideo.updated = { [weak remoteVideo] in + guard let remoteVideo else { + subscriber.putCompletion() + return + } + if remoteVideo.currentOutput != nil { + subscriber.putCompletion() + } + } + + return EmptyDisposable + } + var shouldUpdate = false + self.waitingForFirstVideoFrameDisposable = (firstVideoFrameSignal + |> timeout(4.0, queue: .mainQueue(), alternate: .complete()) + |> deliverOnMainQueue).startStrict(completed: { [weak self] in + guard let self else { + return + } + self.activeRemoteVideoSource = remoteVideo + if shouldUpdate { + self.update(transition: .spring(duration: 0.3)) + } + }) + shouldUpdate = true + } + } else { + self.activeRemoteVideoSource = nil + } + } + self.params = params self.updateInternal(params: params, transition: transition) } @@ -251,6 +296,7 @@ public final class PrivateCallScreen: UIView { self.backgroundLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(renderingSize.width) + edgeSize * 2, height: Int(renderingSize.height) + edgeSize * 2)) transition.setFrame(layer: self.backgroundLayer, frame: visualBackgroundFrame) + transition.setFrame(layer: self.backgroundLayer.blurredLayer, frame: visualBackgroundFrame) let backgroundStateIndex: Int switch params.state.lifecycleState { @@ -271,16 +317,7 @@ public final class PrivateCallScreen: UIView { } self.backgroundLayer.update(stateIndex: backgroundStateIndex, transition: transition) - self.contentOverlayLayer.frame = CGRect(origin: CGPoint(), size: params.size) - self.contentOverlayLayer.update(size: params.size, contentInsets: UIEdgeInsets()) - - self.contentOverlayContainer.frame = CGRect(origin: CGPoint(), size: params.size) - - self.blurBackgroundLayer.renderSpec = RenderLayerSpec(size: RenderSize(width: Int(renderingSize.width) + edgeSize * 2, height: Int(renderingSize.height) + edgeSize * 2)) - self.blurBackgroundLayer.update(stateIndex: backgroundStateIndex, transition: transition) - transition.setFrame(layer: self.blurBackgroundLayer, frame: visualBackgroundFrame) - - self.buttonGroupView.frame = CGRect(origin: CGPoint(), size: params.size) + transition.setFrame(view: self.buttonGroupView, frame: CGRect(origin: CGPoint(), size: params.size)) let buttons: [ButtonGroupView.Button] = [ ButtonGroupView.Button(content: .speaker(isActive: params.state.audioOutput != .internalSpeaker), action: { [weak self] in @@ -288,48 +325,12 @@ public final class PrivateCallScreen: UIView { return } self.speakerAction?() - - /*self.isSpeakerOn = !self.isSpeakerOn - - switch params.state.lifecycleState { - case .connecting: - params.state.lifecycleState = .ringing - case .ringing: - params.state.lifecycleState = .exchangingKeys - case .exchangingKeys: - params.state.lifecycleState = .active(State.ActiveState( - startTime: Date().timeIntervalSince1970, - signalInfo: State.SignalInfo(quality: 1.0), - emojiKey: ["🐱", "🚂", "❄️", "🎨"] - )) - case var .active(activeState): - if activeState.signalInfo.quality == 1.0 { - activeState.signalInfo.quality = 0.1 - } else { - activeState.signalInfo.quality = 1.0 - } - params.state.lifecycleState = .active(activeState) - } - - self.params = params - self.update(transition: .spring(duration: 0.3))*/ }), - ButtonGroupView.Button(content: .video(isActive: self.isVideoOn), action: { [weak self] in + ButtonGroupView.Button(content: .video(isActive: params.state.localVideo != nil), action: { [weak self] in guard let self else { return } self.videoAction?() - /*if self.remoteVideo == nil { - if let url = Bundle.main.url(forResource: "test2", withExtension: "mp4") { - self.remoteVideo = FileVideoSource(device: MetalEngine.shared.device, url: url) - } - } else { - self.remoteVideo = nil - } - - self.isVideoOn = !self.isVideoOn - - self.update(transition: .spring(duration: 0.3))*/ }), ButtonGroupView.Button(content: .microphone(isMuted: params.state.isMicrophoneMuted), action: { [weak self] in guard let self else { @@ -346,13 +347,223 @@ public final class PrivateCallScreen: UIView { ] self.buttonGroupView.update(size: params.size, buttons: buttons, transition: transition) - self.contentView.frame = CGRect(origin: CGPoint(), size: params.size) - self.contentView.update( - size: params.size, - insets: params.insets, - screenCornerRadius: params.screenCornerRadius, - state: params.state, + if case let .active(activeState) = params.state.lifecycleState { + let emojiView: KeyEmojiView + var emojiTransition = transition + if let current = self.emojiView { + emojiView = current + } else { + emojiTransition = transition.withAnimation(.none) + emojiView = KeyEmojiView(emoji: activeState.emojiKey) + self.emojiView = emojiView + } + if emojiView.superview == nil { + self.addSubview(emojiView) + if !transition.animation.isImmediate { + emojiView.animateIn() + } + } + emojiTransition.setFrame(view: emojiView, frame: CGRect(origin: CGPoint(x: params.size.width - params.insets.right - 12.0 - emojiView.size.width, y: params.insets.top + 27.0), size: emojiView.size)) + } else { + if let emojiView = self.emojiView { + self.emojiView = nil + emojiView.removeFromSuperview() + } + } + + let collapsedAvatarSize: CGFloat = 136.0 + let blobSize: CGFloat = collapsedAvatarSize + 40.0 + + let collapsedAvatarFrame = CGRect(origin: CGPoint(x: floor((params.size.width - collapsedAvatarSize) * 0.5), y: 222.0), size: CGSize(width: collapsedAvatarSize, height: collapsedAvatarSize)) + let expandedAvatarFrame = CGRect(origin: CGPoint(), size: params.size) + let avatarFrame = self.activeRemoteVideoSource != nil ? expandedAvatarFrame : collapsedAvatarFrame + let avatarCornerRadius = self.activeRemoteVideoSource != nil ? params.screenCornerRadius : collapsedAvatarSize * 0.5 + + if let activeRemoteVideoSource = self.activeRemoteVideoSource { + let videoContainerView: VideoContainerView + if let current = self.videoContainerView { + videoContainerView = current + } else { + videoContainerView = VideoContainerView(frame: CGRect()) + self.videoContainerView = videoContainerView + self.insertSubview(videoContainerView, belowSubview: self.titleView) + self.overlayContentsView.layer.addSublayer(videoContainerView.blurredContainerLayer) + + videoContainerView.layer.position = self.avatarLayer.position + videoContainerView.layer.bounds = self.avatarLayer.bounds + videoContainerView.alpha = 0.0 + videoContainerView.blurredContainerLayer.position = self.avatarLayer.position + videoContainerView.blurredContainerLayer.bounds = self.avatarLayer.bounds + videoContainerView.blurredContainerLayer.opacity = 0.0 + videoContainerView.update(size: self.avatarLayer.bounds.size, cornerRadius: self.avatarLayer.params?.cornerRadius ?? 0.0, isExpanded: false, transition: .immediate) + } + + if videoContainerView.video !== activeRemoteVideoSource { + videoContainerView.video = activeRemoteVideoSource + } + + transition.setPosition(view: videoContainerView, position: avatarFrame.center) + transition.setBounds(view: videoContainerView, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size)) + transition.setAlpha(view: videoContainerView, alpha: 1.0) + transition.setPosition(layer: videoContainerView.blurredContainerLayer, position: avatarFrame.center) + transition.setBounds(layer: videoContainerView.blurredContainerLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size)) + transition.setAlpha(layer: videoContainerView.blurredContainerLayer, alpha: 1.0) + videoContainerView.update(size: avatarFrame.size, cornerRadius: avatarCornerRadius, isExpanded: self.activeRemoteVideoSource != nil, transition: transition) + } else { + if let videoContainerView = self.videoContainerView { + videoContainerView.update(size: avatarFrame.size, cornerRadius: avatarCornerRadius, isExpanded: self.activeRemoteVideoSource != nil, transition: transition) + transition.setPosition(layer: videoContainerView.blurredContainerLayer, position: avatarFrame.center) + transition.setBounds(layer: videoContainerView.blurredContainerLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size)) + transition.setAlpha(layer: videoContainerView.blurredContainerLayer, alpha: 0.0) + transition.setPosition(view: videoContainerView, position: avatarFrame.center) + transition.setBounds(view: videoContainerView, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size)) + if videoContainerView.alpha != 0.0 { + transition.setAlpha(view: videoContainerView, alpha: 0.0, completion: { [weak self, weak videoContainerView] completed in + guard let self, let videoContainerView, completed else { + return + } + videoContainerView.removeFromSuperview() + videoContainerView.blurredContainerLayer.removeFromSuperlayer() + if self.videoContainerView === videoContainerView { + self.videoContainerView = nil + } + }) + } + } + } + + if self.avatarLayer.image !== params.state.avatarImage { + self.avatarLayer.image = params.state.avatarImage + } + transition.setPosition(layer: self.avatarLayer, position: avatarFrame.center) + transition.setBounds(layer: self.avatarLayer, bounds: CGRect(origin: CGPoint(), size: avatarFrame.size)) + self.avatarLayer.update(size: collapsedAvatarFrame.size, isExpanded: self.activeRemoteVideoSource != nil, cornerRadius: avatarCornerRadius, transition: transition) + + let blobFrame = CGRect(origin: CGPoint(x: floor(avatarFrame.midX - blobSize * 0.5), y: floor(avatarFrame.midY - blobSize * 0.5)), size: CGSize(width: blobSize, height: blobSize)) + transition.setPosition(layer: self.blobLayer, position: CGPoint(x: blobFrame.midX, y: blobFrame.midY)) + transition.setBounds(layer: self.blobLayer, bounds: CGRect(origin: CGPoint(), size: blobFrame.size)) + + let titleString: String + switch params.state.lifecycleState { + case .terminated: + titleString = "Call Ended" + transition.setScale(layer: self.blobLayer, scale: 0.001) + transition.setAlpha(layer: self.blobLayer, alpha: 0.0) + default: + titleString = params.state.name + } + + let titleSize = self.titleView.update( + string: titleString, + fontSize: self.activeRemoteVideoSource == nil ? 28.0 : 17.0, + fontWeight: self.activeRemoteVideoSource == nil ? 0.0 : 0.25, + color: .white, + constrainedWidth: params.size.width - 16.0 * 2.0, transition: transition ) + let titleFrame = CGRect( + origin: CGPoint( + x: (params.size.width - titleSize.width) * 0.5, + y: self.activeRemoteVideoSource == nil ? collapsedAvatarFrame.maxY + 39.0 : params.insets.top + 17.0 + ), + size: titleSize + ) + transition.setFrame(view: self.titleView, frame: titleFrame) + + let statusState: StatusView.State + switch params.state.lifecycleState { + case .connecting: + statusState = .waiting(.requesting) + case .ringing: + statusState = .waiting(.ringing) + case .exchangingKeys: + statusState = .waiting(.generatingKeys) + case let .active(activeState): + statusState = .active(StatusView.ActiveState(startTimestamp: activeState.startTime, signalStrength: activeState.signalInfo.quality)) + + if !self.processedInitialAudioLevelBump { + self.processedInitialAudioLevelBump = true + self.audioLevelBump = 2.0 + DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.2, execute: { [weak self] in + guard let self else { + return + } + self.audioLevelBump = 0.0 + }) + } + case let .terminated(terminatedState): + statusState = .terminated(StatusView.TerminatedState(duration: terminatedState.duration)) + } + + if let previousState = self.statusView.state, previousState.key != statusState.key { + let previousStatusView = self.statusView + if !transition.animation.isImmediate { + transition.setPosition(view: previousStatusView, position: CGPoint(x: previousStatusView.center.x, y: previousStatusView.center.y - 5.0)) + transition.setScale(view: previousStatusView, scale: 0.5) + Transition.easeInOut(duration: 0.1).setAlpha(view: previousStatusView, alpha: 0.0, completion: { [weak previousStatusView] _ in + previousStatusView?.removeFromSuperview() + }) + } else { + previousStatusView.removeFromSuperview() + } + + self.statusView = StatusView() + self.insertSubview(self.statusView, aboveSubview: previousStatusView) + self.statusView.requestLayout = { [weak self] in + self?.update(transition: .immediate) + } + } + + let statusSize = self.statusView.update(state: statusState, transition: .immediate) + let statusFrame = CGRect( + origin: CGPoint( + x: (params.size.width - statusSize.width) * 0.5, + y: titleFrame.maxY + (self.activeRemoteVideoSource != nil ? 0.0 : 4.0) + ), + size: statusSize + ) + if self.statusView.bounds.isEmpty { + self.statusView.frame = statusFrame + + if !transition.animation.isImmediate { + transition.animatePosition(view: self.statusView, from: CGPoint(x: 0.0, y: 5.0), to: CGPoint(), additive: true) + transition.animateScale(view: self.statusView, from: 0.5, to: 1.0) + Transition.easeInOut(duration: 0.15).animateAlpha(view: self.statusView, from: 0.0, to: 1.0) + } + } else { + transition.setFrame(view: self.statusView, frame: statusFrame) + } + + if "".isEmpty {//} case let .active(activeState) = params.state.lifecycleState, activeState.signalInfo.quality <= 0.2 { + let weakSignalView: WeakSignalView + if let current = self.weakSignalView { + weakSignalView = current + } else { + weakSignalView = WeakSignalView() + self.weakSignalView = weakSignalView + self.addSubview(weakSignalView) + } + let weakSignalSize = weakSignalView.update(constrainedSize: CGSize(width: params.size.width - 32.0, height: 100.0)) + let weakSignalFrame = CGRect(origin: CGPoint(x: floor((params.size.width - weakSignalSize.width) * 0.5), y: statusFrame.maxY + (self.activeRemoteVideoSource != nil ? 4.0 : 4.0)), size: weakSignalSize) + if weakSignalView.bounds.isEmpty { + weakSignalView.frame = weakSignalFrame + if !transition.animation.isImmediate { + Transition.immediate.setScale(view: weakSignalView, scale: 0.001) + weakSignalView.alpha = 0.0 + transition.setScaleWithSpring(view: weakSignalView, scale: 1.0) + transition.setAlpha(view: weakSignalView, alpha: 1.0) + } + } else { + transition.setFrame(view: weakSignalView, frame: weakSignalFrame) + } + } else { + if let weakSignalView = self.weakSignalView { + self.weakSignalView = nil + transition.setScale(view: weakSignalView, scale: 0.001) + transition.setAlpha(view: weakSignalView, alpha: 0.0, completion: { [weak weakSignalView] _ in + weakSignalView?.removeFromSuperview() + }) + } + } } }