Call experiments

This commit is contained in:
Ali 2020-07-31 22:26:33 +04:00
parent 8d1f30a0b0
commit 3a79be5bef
13 changed files with 325 additions and 85 deletions

View File

@ -48,7 +48,7 @@ public struct PresentationCallState: Equatable {
case notAvailable
case possible
case outgoingRequested
case incomingRequested
case incomingRequested(sendsVideo: Bool)
case active
}

View File

@ -4,11 +4,21 @@ import AsyncDisplayKit
import Display
public enum SemanticStatusNodeState: Equatable {
public struct ProgressAppearance: Equatable {
public var inset: CGFloat
public var lineWidth: CGFloat
public init(inset: CGFloat, lineWidth: CGFloat) {
self.inset = inset
self.lineWidth = lineWidth
}
}
case none
case download
case play
case pause
case progress(value: CGFloat?, cancelEnabled: Bool)
case progress(value: CGFloat?, cancelEnabled: Bool, appearance: ProgressAppearance?)
case customIcon(UIImage)
}
@ -224,12 +234,14 @@ private final class SemanticStatusNodeProgressContext: SemanticStatusNodeStateCo
let transitionFraction: CGFloat
let value: CGFloat?
let displayCancel: Bool
let appearance: SemanticStatusNodeState.ProgressAppearance?
let timestamp: Double
init(transitionFraction: CGFloat, value: CGFloat?, displayCancel: Bool, timestamp: Double) {
init(transitionFraction: CGFloat, value: CGFloat?, displayCancel: Bool, appearance: SemanticStatusNodeState.ProgressAppearance?, timestamp: Double) {
self.transitionFraction = transitionFraction
self.value = value
self.displayCancel = displayCancel
self.appearance = appearance
self.timestamp = timestamp
super.init()
@ -252,22 +264,49 @@ private final class SemanticStatusNodeProgressContext: SemanticStatusNodeStateCo
context.setStrokeColor(foregroundColor.withAlphaComponent(foregroundColor.alpha * self.transitionFraction).cgColor)
}
var progress = self.value ?? 0.1
var startAngle = -CGFloat.pi / 2.0
var endAngle = CGFloat(progress) * 2.0 * CGFloat.pi + startAngle
var progress: CGFloat
var startAngle: CGFloat
var endAngle: CGFloat
if let value = self.value {
progress = value
startAngle = -CGFloat.pi / 2.0
endAngle = CGFloat(progress) * 2.0 * CGFloat.pi + startAngle
if progress > 1.0 {
progress = 2.0 - progress
let tmp = startAngle
startAngle = endAngle
endAngle = tmp
if progress > 1.0 {
progress = 2.0 - progress
let tmp = startAngle
startAngle = endAngle
endAngle = tmp
}
progress = min(1.0, progress)
} else {
progress = CGFloat(1.0 + self.timestamp.remainder(dividingBy: 2.0))
startAngle = -CGFloat.pi / 2.0
endAngle = CGFloat(progress) * 2.0 * CGFloat.pi + startAngle
if progress > 1.0 {
progress = 2.0 - progress
let tmp = startAngle
startAngle = endAngle
endAngle = tmp
}
progress = min(1.0, progress)
}
progress = min(1.0, progress)
let lineWidth: CGFloat = max(1.6, 2.25 * factor)
let lineWidth: CGFloat
if let appearance = self.appearance {
lineWidth = appearance.lineWidth
} else {
lineWidth = max(1.6, 2.25 * factor)
}
let pathDiameter: CGFloat
pathDiameter = diameter - lineWidth - 2.5 * 2.0
if let appearance = self.appearance {
pathDiameter = diameter - lineWidth - appearance.inset * 2.0
} else {
pathDiameter = diameter - lineWidth - 2.5 * 2.0
}
var angle = self.timestamp.truncatingRemainder(dividingBy: Double.pi * 2.0)
angle *= 4.0
@ -317,15 +356,17 @@ private final class SemanticStatusNodeProgressContext: SemanticStatusNodeStateCo
var value: CGFloat?
let displayCancel: Bool
let appearance: SemanticStatusNodeState.ProgressAppearance?
var transition: SemanticStatusNodeProgressTransition?
var isAnimating: Bool {
return true
}
init(value: CGFloat?, displayCancel: Bool) {
init(value: CGFloat?, displayCancel: Bool, appearance: SemanticStatusNodeState.ProgressAppearance?) {
self.value = value
self.displayCancel = displayCancel
self.appearance = appearance
}
func drawingState(transitionFraction: CGFloat) -> SemanticStatusNodeStateDrawingState {
@ -341,7 +382,7 @@ private final class SemanticStatusNodeProgressContext: SemanticStatusNodeStateCo
} else {
resolvedValue = nil
}
return DrawingState(transitionFraction: transitionFraction, value: resolvedValue, displayCancel: self.displayCancel, timestamp: timestamp)
return DrawingState(transitionFraction: transitionFraction, value: resolvedValue, displayCancel: self.displayCancel, appearance: self.appearance, timestamp: timestamp)
}
func updateValue(value: CGFloat?) {
@ -386,12 +427,12 @@ private extension SemanticStatusNodeState {
} else {
return SemanticStatusNodeIconContext(icon: icon)
}
case let .progress(value, cancelEnabled):
case let .progress(value, cancelEnabled, appearance):
if let current = current as? SemanticStatusNodeProgressContext, current.displayCancel == cancelEnabled {
current.updateValue(value: value)
return current
} else {
return SemanticStatusNodeProgressContext(value: value, displayCancel: cancelEnabled)
return SemanticStatusNodeProgressContext(value: value, displayCancel: cancelEnabled, appearance: appearance)
}
}
}

View File

@ -7,7 +7,7 @@ static_library(
]),
deps = [
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit#shared",
"//submodules/Display:Display#shared",
"//submodules/Display:Display#shared",
"//submodules/TelegramPresentationData:TelegramPresentationData",
"//submodules/TelegramUIPreferences:TelegramUIPreferences",
"//submodules/PhotoResources:PhotoResources",
@ -21,6 +21,7 @@ static_library(
"//submodules/AppBundle:AppBundle",
"//submodules/PresentationDataUtils:PresentationDataUtils",
"//submodules/TelegramCallsUI/CallsEmoji:CallsEmoji",
"//submodules/SemanticStatusNode:SemanticStatusNode",
],
frameworks = [
"$SDKROOT/System/Library/Frameworks/Foundation.framework",

View File

@ -22,6 +22,7 @@ swift_library(
"//submodules/AppBundle:AppBundle",
"//submodules/PresentationDataUtils:PresentationDataUtils",
"//submodules/TelegramCallsUI/CallsEmoji:CallsEmoji",
"//submodules/SemanticStatusNode:SemanticStatusNode",
],
visibility = [
"//visibility:public",

View File

@ -4,6 +4,7 @@ import Display
import AsyncDisplayKit
import SwiftSignalKit
import AppBundle
import SemanticStatusNode
private let labelFont = Font.regular(13.0)
@ -32,18 +33,22 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
var appearance: Appearance
var image: Image
var isEnabled: Bool
var hasProgress: Bool
init(appearance: Appearance, image: Image, isEnabled: Bool = true) {
init(appearance: Appearance, image: Image, isEnabled: Bool = true, hasProgress: Bool = false) {
self.appearance = appearance
self.image = image
self.isEnabled = isEnabled
self.hasProgress = hasProgress
}
}
private let contentContainer: ASDisplayNode
private let effectView: UIVisualEffectView
private let contentBackgroundNode: ASImageNode
private let contentNode: ASImageNode
private let overlayHighlightNode: ASImageNode
private var statusNode: SemanticStatusNode?
private let textNode: ImmediateTextNode
private let largeButtonSize: CGFloat = 72.0
@ -60,6 +65,9 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
self.effectView.clipsToBounds = true
self.effectView.isUserInteractionEnabled = false
self.contentBackgroundNode = ASImageNode()
self.contentBackgroundNode.isUserInteractionEnabled = false
self.contentNode = ASImageNode()
self.contentNode.isUserInteractionEnabled = false
@ -79,6 +87,7 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
self.addSubnode(self.textNode)
self.contentContainer.view.addSubview(self.effectView)
self.contentContainer.addSubnode(self.contentBackgroundNode)
self.contentContainer.addSubnode(self.contentNode)
self.contentContainer.addSubnode(self.overlayHighlightNode)
@ -88,9 +97,13 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
}
if highlighted {
strongSelf.overlayHighlightNode.alpha = 1.0
let transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .spring)
transition.updateSublayerTransformScale(node: strongSelf, scale: 0.9)
} else {
strongSelf.overlayHighlightNode.alpha = 0.0
strongSelf.overlayHighlightNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
let transition: ContainedViewLayoutTransition = .animated(duration: 0.5, curve: .spring)
transition.updateSublayerTransformScale(node: strongSelf, scale: 1.0)
}
}
}
@ -101,12 +114,34 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
let isSmall = self.largeButtonSize > size.width
self.effectView.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.contentBackgroundNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.contentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.overlayHighlightNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
if self.currentContent != content {
self.currentContent = content
if content.hasProgress {
if self.statusNode == nil {
let statusNode = SemanticStatusNode(backgroundNodeColor: .white, foregroundNodeColor: .clear)
self.statusNode = statusNode
self.contentContainer.insertSubnode(statusNode, belowSubnode: self.contentNode)
statusNode.transitionToState(.progress(value: nil, cancelEnabled: false, appearance: SemanticStatusNodeState.ProgressAppearance(inset: 4.0, lineWidth: 3.0)), animated: false, completion: {})
}
if let statusNode = self.statusNode {
statusNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
if transition.isAnimated {
statusNode.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
statusNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
}
} else if let statusNode = self.statusNode {
self.statusNode = nil
transition.updateAlpha(node: statusNode, alpha: 0.0, completion: { [weak statusNode] _ in
statusNode?.removeFromSupernode()
})
}
switch content.appearance {
case .blurred:
self.effectView.isHidden = false
@ -117,19 +152,29 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
self.alpha = content.isEnabled ? 1.0 : 0.7
self.isUserInteractionEnabled = content.isEnabled
let contentBackgroundImage: UIImage? = nil
let contentImage = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
var fillColor: UIColor = .clear
var imageColor: UIColor = .white
var drawOverMask = false
context.setBlendMode(.normal)
var imageScale: CGFloat = 1.0
switch content.appearance {
case let .blurred(isFilled):
if isFilled {
fillColor = .white
drawOverMask = true
if content.hasProgress {
fillColor = .clear
imageColor = .black
drawOverMask = false
context.setBlendMode(.copy)
} else {
if isFilled {
fillColor = .white
drawOverMask = true
context.setBlendMode(.copy)
}
}
let smallButtonSize: CGFloat = 60.0
imageScale = self.largeButtonSize / smallButtonSize
@ -149,19 +194,19 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
switch content.image {
case .camera:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallCameraButton"), color: .white)
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallCameraButton"), color: imageColor)
case .mute:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallMuteButton"), color: .white)
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallMuteButton"), color: imageColor)
case .flipCamera:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white)
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: imageColor)
case .bluetooth:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallBluetoothButton"), color: .white)
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallBluetoothButton"), color: imageColor)
case .speaker:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSpeakerButton"), color: .white)
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSpeakerButton"), color: imageColor)
case .accept:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAcceptButton"), color: .white)
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAcceptButton"), color: imageColor)
case .end:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallDeclineButton"), color: .white)
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallDeclineButton"), color: imageColor)
}
if let image = image {
@ -180,6 +225,14 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
}
}
})
if transition.isAnimated, let contentBackgroundImage = contentBackgroundImage, let previousContent = self.contentBackgroundNode.image {
self.contentBackgroundNode.image = contentBackgroundImage
self.contentBackgroundNode.layer.animate(from: previousContent.cgImage!, to: contentBackgroundImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2)
} else {
self.contentBackgroundNode.image = contentBackgroundImage
}
if transition.isAnimated, let contentImage = contentImage, let previousContent = self.contentNode.image {
self.contentNode.image = contentImage
self.contentNode.layer.animate(from: previousContent.cgImage!, to: contentImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2)

View File

@ -17,9 +17,9 @@ enum CallControllerButtonsSpeakerMode {
enum CallControllerButtonsMode: Equatable {
enum VideoState: Equatable {
case notAvailable
case possible(Bool)
case outgoingRequested
case incomingRequested
case possible(isEnabled: Bool, isInitializing: Bool)
case outgoingRequested(isInitializing: Bool)
case incomingRequested(sendsVideo: Bool)
case active
}
@ -52,7 +52,7 @@ private enum ButtonDescription: Equatable {
case accept
case end(EndType)
case enableCamera(Bool, Bool)
case enableCamera(Bool, Bool, Bool)
case switchCamera
case soundOutput(SoundOutput)
case mute(Bool)
@ -110,6 +110,10 @@ final class CallControllerButtonsNode: ASDisplayNode {
private var appliedMode: CallControllerButtonsMode?
func videoButtonFrame() -> CGRect? {
return self.buttonNodes[.enableCamera]?.frame
}
private func updateButtonsLayout(strings: PresentationStrings, mode: CallControllerButtonsMode, width: CGFloat, bottomInset: CGFloat, animated: Bool) -> CGFloat {
let transition: ContainedViewLayoutTransition
if animated {
@ -171,12 +175,12 @@ final class CallControllerButtonsNode: ASDisplayNode {
mappedState = .outgoingRinging
case let .active(_, videoStateValue):
switch videoStateValue {
case .incomingRequested:
mappedState = .incomingRinging
videoState = .outgoingRequested
case .outgoingRequested:
mappedState = .outgoingRinging
videoState = .outgoingRequested
case let .incomingRequested(sendsVideo):
mappedState = .active
videoState = .incomingRequested(sendsVideo: sendsVideo)
case let .outgoingRequested(isInitializing):
mappedState = .active
videoState = .outgoingRequested(isInitializing: isInitializing)
case .active, .possible, .notAvailable:
mappedState = .active
}
@ -204,14 +208,17 @@ final class CallControllerButtonsNode: ASDisplayNode {
case .active, .possible, .incomingRequested, .outgoingRequested:
let isCameraActive: Bool
let isCameraEnabled: Bool
if case let .possible(value) = videoState {
let isCameraInitializing: Bool
if case let .possible(value, isInitializing) = videoState {
isCameraActive = false
isCameraEnabled = value
isCameraInitializing = isInitializing
} else {
isCameraActive = !self.isCameraPaused
isCameraEnabled = true
isCameraInitializing = false
}
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled))
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled, isCameraInitializing))
topButtons.append(.mute(self.isMuted))
if case .possible = videoState {
topButtons.append(.soundOutput(soundOutput))
@ -256,12 +263,19 @@ final class CallControllerButtonsNode: ASDisplayNode {
case .active, .incomingRequested, .outgoingRequested:
let isCameraActive: Bool
let isCameraEnabled: Bool
if case let .possible(value) = videoState {
var isCameraInitializing: Bool
if case .incomingRequested = videoState {
isCameraActive = false
isCameraEnabled = true
isCameraInitializing = false
} else if case let .possible(value, isInitializing) = videoState {
isCameraActive = false
isCameraEnabled = value
isCameraInitializing = isInitializing
} else {
isCameraActive = !self.isCameraPaused
isCameraEnabled = true
isCameraInitializing = false
}
var topButtons: [ButtonDescription] = []
@ -278,7 +292,11 @@ final class CallControllerButtonsNode: ASDisplayNode {
soundOutput = .bluetooth
}
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled))
if case let .outgoingRequested(isInitializing) = videoState {
isCameraInitializing = isInitializing
}
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled, isCameraInitializing))
topButtons.append(.mute(isMuted))
topButtons.append(.switchCamera)
topButtons.append(.end(.end))
@ -298,6 +316,19 @@ final class CallControllerButtonsNode: ASDisplayNode {
var topButtons: [ButtonDescription] = []
var bottomButtons: [ButtonDescription] = []
let isCameraActive: Bool
let isCameraEnabled: Bool
var isCameraInitializing: Bool
if case let .possible(value, isInitializing) = videoState {
isCameraActive = false
isCameraEnabled = value
isCameraInitializing = isInitializing
} else {
isCameraActive = false
isCameraEnabled = true
isCameraInitializing = false
}
let soundOutput: ButtonDescription.SoundOutput
switch speakerMode {
case .none, .builtin:
@ -310,7 +341,7 @@ final class CallControllerButtonsNode: ASDisplayNode {
soundOutput = .bluetooth
}
topButtons.append(.enableCamera(false, true))
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled, isCameraInitializing))
topButtons.append(.mute(self.isMuted))
topButtons.append(.soundOutput(soundOutput))
@ -379,11 +410,12 @@ final class CallControllerButtonsNode: ASDisplayNode {
case .end:
buttonText = strings.Call_End
}
case let .enableCamera(isActivated, isEnabled):
case let .enableCamera(isActivated, isEnabled, isInitializing):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: isActivated),
image: .camera,
isEnabled: isEnabled
isEnabled: isEnabled,
hasProgress: isInitializing
)
buttonText = strings.Call_Camera
case .switchCamera:

View File

@ -93,8 +93,43 @@ private final class CallVideoNode: ASDisplayNode {
self.isReadyTimer?.invalidate()
}
func updateLayout(size: CGSize, cornerRadius: CGFloat, transition: ContainedViewLayoutTransition) {
func animateRadialMask(from fromRect: CGRect, to toRect: CGRect) {
let maskLayer = CAShapeLayer()
maskLayer.frame = fromRect
let path = CGMutablePath()
path.addEllipse(in: CGRect(origin: CGPoint(), size: fromRect.size))
maskLayer.path = path
self.layer.mask = maskLayer
let topLeft = CGPoint(x: 0.0, y: 0.0)
let topRight = CGPoint(x: self.bounds.width, y: 0.0)
let bottomLeft = CGPoint(x: 0.0, y: self.bounds.height)
let bottomRight = CGPoint(x: self.bounds.width, y: self.bounds.height)
func distance(_ v1: CGPoint, _ v2: CGPoint) -> CGFloat {
let dx = v1.x - v2.x
let dy = v1.y - v2.y
return sqrt(dx * dx + dy * dy)
}
var maxRadius = distance(toRect.center, topLeft)
maxRadius = max(maxRadius, distance(toRect.center, topRight))
maxRadius = max(maxRadius, distance(toRect.center, bottomLeft))
maxRadius = max(maxRadius, distance(toRect.center, bottomRight))
maxRadius = ceil(maxRadius)
let targetFrame = CGRect(origin: CGPoint(x: toRect.center.x - maxRadius, y: toRect.center.y - maxRadius), size: CGSize(width: maxRadius * 2.0, height: maxRadius * 2.0))
let transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .easeInOut)
transition.updatePosition(layer: maskLayer, position: targetFrame.center)
transition.updateTransformScale(layer: maskLayer, scale: maxRadius * 2.0 / fromRect.width, completion: { [weak self] _ in
self?.layer.mask = nil
})
}
func updateLayout(size: CGSize, cornerRadius: CGFloat, transition: ContainedViewLayoutTransition) {
self.currentCornerRadius = cornerRadius
var rotationAngle: CGFloat
@ -227,9 +262,13 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
private var incomingVideoNodeValue: CallVideoNode?
private var incomingVideoViewRequested: Bool = false
private var candidateOutgoingVideoNodeValue: CallVideoNode?
private var outgoingVideoNodeValue: CallVideoNode?
private var outgoingVideoViewRequested: Bool = false
private var isRequestingVideo: Bool = false
private var animateRequestedVideoOnce: Bool = false
private var expandedVideoNode: CallVideoNode?
private var minimizedVideoNode: CallVideoNode?
private var disableAnimationForExpandedVideoOnce: Bool = false
@ -396,7 +435,19 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
switch callState.state {
case .active:
if strongSelf.outgoingVideoNodeValue == nil {
strongSelf.call.requestVideo()
switch callState.videoState {
case .possible:
strongSelf.isRequestingVideo = true
strongSelf.updateButtonsMode()
default:
break
}
switch callState.videoState {
case .incomingRequested:
strongSelf.call.acceptVideo()
default:
strongSelf.call.requestVideo()
}
} else {
strongSelf.isVideoPaused = !strongSelf.isVideoPaused
strongSelf.outgoingVideoNodeValue?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused)
@ -417,7 +468,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
return
}
strongSelf.call.switchVideoCamera()
if let outgoingVideoNode = strongSelf.outgoingVideoNodeValue {
if let _ = strongSelf.outgoingVideoNodeValue {
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
@ -487,7 +538,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
private func setupAudioOutputs() {
if self.outgoingVideoNodeValue != nil {
if self.outgoingVideoNodeValue != nil || self.candidateOutgoingVideoNodeValue != nil {
if let audioOutputState = self.audioOutputState, let currentOutput = audioOutputState.currentOutput {
switch currentOutput {
case .headphones:
@ -508,7 +559,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
var statusReception: Int32?
switch callState.videoState {
case .active:
case .active, .incomingRequested(true):
if !self.incomingVideoViewRequested {
self.incomingVideoViewRequested = true
self.call.makeIncomingVideoView(completion: { [weak self] incomingVideoView in
@ -552,17 +603,47 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
switch callState.videoState {
case .active, .outgoingRequested, .incomingRequested:
case .active, .outgoingRequested, .incomingRequested(false):
if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true
let delayUntilInitialized = self.isRequestingVideo
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
guard let strongSelf = self else {
return
}
if let outgoingVideoView = outgoingVideoView {
outgoingVideoView.view.backgroundColor = .black
outgoingVideoView.view.clipsToBounds = true
let outgoingVideoNode = CallVideoNode(videoView: outgoingVideoView, isReadyUpdated: {}, orientationUpdated: {
let applyNode: () -> Void = {
guard let strongSelf = self, let outgoingVideoNode = strongSelf.candidateOutgoingVideoNodeValue else {
return
}
strongSelf.candidateOutgoingVideoNodeValue = nil
if strongSelf.isRequestingVideo {
strongSelf.isRequestingVideo = false
strongSelf.animateRequestedVideoOnce = true
}
strongSelf.outgoingVideoNodeValue = outgoingVideoNode
strongSelf.minimizedVideoNode = outgoingVideoNode
if let expandedVideoNode = strongSelf.expandedVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: expandedVideoNode)
} else {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
}
strongSelf.updateButtonsMode(transition: .animated(duration: 0.4, curve: .spring))
}
let outgoingVideoNode = CallVideoNode(videoView: outgoingVideoView, isReadyUpdated: {
if delayUntilInitialized {
Queue.mainQueue().after(0.4, {
applyNode()
})
}
}, orientationUpdated: {
guard let strongSelf = self else {
return
}
@ -577,17 +658,13 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
})
strongSelf.outgoingVideoNodeValue = outgoingVideoNode
strongSelf.minimizedVideoNode = outgoingVideoNode
if let expandedVideoNode = strongSelf.expandedVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: expandedVideoNode)
} else {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
}
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring))
}
strongSelf.candidateOutgoingVideoNodeValue = outgoingVideoNode
strongSelf.setupAudioOutputs()
if !delayUntilInitialized {
applyNode()
}
}
})
}
@ -679,7 +756,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
}
switch callState.videoState {
case .notAvailable, .active, .possible:
case .notAvailable, .active, .possible, .outgoingRequested:
statusValue = .timer({ value in
if isReconnecting {
return strings.Call_StatusConnecting
@ -695,8 +772,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
text += "\n\(self.statusNode.subtitle)"
}
statusValue = .text(string: text, displayLogo: true)
case .outgoingRequested:
statusValue = .text(string: self.presentationData.strings.Call_StatusRequesting, displayLogo: false)
/*case .outgoingRequested:
statusValue = .text(string: self.presentationData.strings.Call_StatusRequesting, displayLogo: false)*/
}
}
if self.shouldStayHiddenUntilConnection {
@ -732,7 +809,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
private var buttonsTerminationMode: CallControllerButtonsMode?
private func updateButtonsMode() {
private func updateButtonsMode(transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .spring)) {
guard let callState = self.callState else {
return
}
@ -765,11 +842,15 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
default:
break
}
mappedVideoState = .possible(isEnabled)
mappedVideoState = .possible(isEnabled: isEnabled, isInitializing: false)
case .outgoingRequested:
mappedVideoState = .outgoingRequested
case .incomingRequested:
mappedVideoState = .incomingRequested
if self.outgoingVideoNodeValue != nil {
mappedVideoState = .outgoingRequested(isInitializing: self.isRequestingVideo)
} else {
mappedVideoState = .possible(isEnabled: true, isInitializing: self.isRequestingVideo)
}
case let .incomingRequested(sendsVideo):
mappedVideoState = .incomingRequested(sendsVideo: sendsVideo)
case .active:
mappedVideoState = .active
}
@ -793,7 +874,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
if let (layout, navigationHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .spring))
self.containerLayoutUpdated(layout, navigationBarHeight: navigationHeight, transition: transition)
}
}
@ -919,6 +1000,10 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
var uiDisplayTransition: CGFloat = self.isUIHidden ? 0.0 : 1.0
uiDisplayTransition *= 1.0 - self.pictureInPictureTransitionFraction
let previousVideoButtonFrame = self.buttonsNode.videoButtonFrame().flatMap { frame -> CGRect in
return self.buttonsNode.view.convert(frame, to: self.view)
}
let buttonsHeight: CGFloat
if let buttonsMode = self.buttonsMode {
buttonsHeight = self.buttonsNode.updateLayout(strings: self.presentationData.strings, mode: buttonsMode, constrainedWidth: layout.size.width, bottomInset: layout.intrinsicInsets.bottom, transition: transition)
@ -1014,8 +1099,10 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
if let minimizedVideoNode = self.minimizedVideoNode {
var minimizedVideoTransition = transition
var didAppear = false
if minimizedVideoNode.frame.isEmpty {
minimizedVideoTransition = .immediate
didAppear = true
}
if let expandedVideoNode = self.expandedVideoNode, expandedVideoNode.isReady {
if self.minimizedVideoDraggingPosition == nil {
@ -1031,10 +1118,23 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
minimizedVideoTransition.updateFrame(node: minimizedVideoNode, frame: previewVideoFrame)
minimizedVideoNode.updateLayout(size: minimizedVideoNode.frame.size, cornerRadius: interpolate(from: 14.0, to: 24.0, value: self.pictureInPictureTransitionFraction), transition: minimizedVideoTransition)
if transition.isAnimated && didAppear {
minimizedVideoNode.layer.animateSpring(from: 0.1 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.5)
}
}
} else {
minimizedVideoNode.frame = fullscreenVideoFrame
minimizedVideoNode.updateLayout(size: layout.size, cornerRadius: 0.0, transition: minimizedVideoTransition)
if self.animateRequestedVideoOnce {
self.animateRequestedVideoOnce = false
let videoButtonFrame = self.buttonsNode.videoButtonFrame().flatMap { frame -> CGRect in
return self.buttonsNode.view.convert(frame, to: self.view)
}
if let previousVideoButtonFrame = previousVideoButtonFrame, let videoButtonFrame = videoButtonFrame {
minimizedVideoNode.animateRadialMask(from: previousVideoButtonFrame, to: videoButtonFrame)
}
}
}
self.animationForExpandedVideoSnapshotView = nil
}

View File

@ -463,8 +463,8 @@ public final class PresentationCallImpl: PresentationCall {
mappedVideoState = .possible
case .outgoingRequested:
mappedVideoState = .outgoingRequested
case .incomingRequested:
mappedVideoState = .incomingRequested
case let .incomingRequested(sendsVideo):
mappedVideoState = .incomingRequested(sendsVideo: sendsVideo)
case .active:
mappedVideoState = .active
self.videoWasActive = true

View File

@ -820,7 +820,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
switch fetchStatus {
case let .Fetching(_, progress):
let adjustedProgress = max(progress, 0.027)
state = .progress(value: CGFloat(adjustedProgress), cancelEnabled: true)
state = .progress(value: CGFloat(adjustedProgress), cancelEnabled: true, appearance: nil)
case .Local:
if isAudio {
state = .play

View File

@ -105,7 +105,7 @@ public struct OngoingCallContextState: Equatable {
case notAvailable
case possible
case outgoingRequested
case incomingRequested
case incomingRequested(sendsVideo: Bool)
case active
}
@ -561,9 +561,14 @@ public final class OngoingCallContext {
))
}
}
let screenSize = UIScreen.main.bounds.size
let portraitSize = CGSize(width: min(screenSize.width, screenSize.height), height: max(screenSize.width, screenSize.height))
let preferredAspectRatio = portraitSize.width / portraitSize.height
let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, rtcServers: rtcServers, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in
callSessionManager?.sendSignalingData(internalId: internalId, data: data)
}, videoCapturer: video?.impl)
}, videoCapturer: video?.impl, preferredAspectRatio: Float(preferredAspectRatio))
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState in
@ -577,7 +582,9 @@ public final class OngoingCallContext {
case .possible:
mappedVideoState = .possible
case .incomingRequested:
mappedVideoState = .incomingRequested
mappedVideoState = .incomingRequested(sendsVideo: false)
case .incomingRequestedAndActive:
mappedVideoState = .incomingRequested(sendsVideo: true)
case .outgoingRequested:
mappedVideoState = .outgoingRequested
case .active:

View File

@ -33,6 +33,7 @@ typedef NS_ENUM(int32_t, OngoingCallVideoStateWebrtc) {
OngoingCallVideoStatePossible,
OngoingCallVideoStateOutgoingRequested,
OngoingCallVideoStateIncomingRequested,
OngoingCallVideoStateIncomingRequestedAndActive,
OngoingCallVideoStateActive
};
@ -122,7 +123,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
@property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc);
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer;
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio;
- (void)beginTermination;
- (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion;

View File

@ -294,7 +294,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer {
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio {
self = [super init];
if (self != nil) {
_version = version;
@ -381,7 +381,8 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
.enableAGC = true,
.enableCallUpgrade = false,
.logPath = logPath.length == 0 ? "" : std::string(logPath.UTF8String),
.maxApiLayer = [OngoingCallThreadLocalContextWebrtc maxLayer]
.maxApiLayer = [OngoingCallThreadLocalContextWebrtc maxLayer],
.preferredAspectRatio = preferredAspectRatio
};
auto encryptionKeyValue = std::make_shared<std::array<uint8_t, 256>>();
@ -419,6 +420,9 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
case tgcalls::VideoState::IncomingRequested:
mappedVideoState = OngoingCallVideoStateIncomingRequested;
break;
case tgcalls::VideoState::IncomingRequestedAndActive:
mappedVideoState = OngoingCallVideoStateIncomingRequestedAndActive;
break;
case tgcalls::VideoState::Active:
mappedVideoState = OngoingCallVideoStateActive;
break;

@ -1 +1 @@
Subproject commit c3345bb26aba541c99ff3c7075bda8024c7a8202
Subproject commit 88f5dde08ba8bac5f014c4d1753fda890722b0ed