Call experiments

This commit is contained in:
Ali 2020-07-31 22:26:33 +04:00
parent 8d1f30a0b0
commit 3a79be5bef
13 changed files with 325 additions and 85 deletions

View File

@ -48,7 +48,7 @@ public struct PresentationCallState: Equatable {
case notAvailable case notAvailable
case possible case possible
case outgoingRequested case outgoingRequested
case incomingRequested case incomingRequested(sendsVideo: Bool)
case active case active
} }

View File

@ -4,11 +4,21 @@ import AsyncDisplayKit
import Display import Display
public enum SemanticStatusNodeState: Equatable { public enum SemanticStatusNodeState: Equatable {
public struct ProgressAppearance: Equatable {
public var inset: CGFloat
public var lineWidth: CGFloat
public init(inset: CGFloat, lineWidth: CGFloat) {
self.inset = inset
self.lineWidth = lineWidth
}
}
case none case none
case download case download
case play case play
case pause case pause
case progress(value: CGFloat?, cancelEnabled: Bool) case progress(value: CGFloat?, cancelEnabled: Bool, appearance: ProgressAppearance?)
case customIcon(UIImage) case customIcon(UIImage)
} }
@ -224,12 +234,14 @@ private final class SemanticStatusNodeProgressContext: SemanticStatusNodeStateCo
let transitionFraction: CGFloat let transitionFraction: CGFloat
let value: CGFloat? let value: CGFloat?
let displayCancel: Bool let displayCancel: Bool
let appearance: SemanticStatusNodeState.ProgressAppearance?
let timestamp: Double let timestamp: Double
init(transitionFraction: CGFloat, value: CGFloat?, displayCancel: Bool, timestamp: Double) { init(transitionFraction: CGFloat, value: CGFloat?, displayCancel: Bool, appearance: SemanticStatusNodeState.ProgressAppearance?, timestamp: Double) {
self.transitionFraction = transitionFraction self.transitionFraction = transitionFraction
self.value = value self.value = value
self.displayCancel = displayCancel self.displayCancel = displayCancel
self.appearance = appearance
self.timestamp = timestamp self.timestamp = timestamp
super.init() super.init()
@ -252,22 +264,49 @@ private final class SemanticStatusNodeProgressContext: SemanticStatusNodeStateCo
context.setStrokeColor(foregroundColor.withAlphaComponent(foregroundColor.alpha * self.transitionFraction).cgColor) context.setStrokeColor(foregroundColor.withAlphaComponent(foregroundColor.alpha * self.transitionFraction).cgColor)
} }
var progress = self.value ?? 0.1 var progress: CGFloat
var startAngle = -CGFloat.pi / 2.0 var startAngle: CGFloat
var endAngle = CGFloat(progress) * 2.0 * CGFloat.pi + startAngle var endAngle: CGFloat
if let value = self.value {
if progress > 1.0 { progress = value
progress = 2.0 - progress startAngle = -CGFloat.pi / 2.0
let tmp = startAngle endAngle = CGFloat(progress) * 2.0 * CGFloat.pi + startAngle
startAngle = endAngle
endAngle = tmp if progress > 1.0 {
progress = 2.0 - progress
let tmp = startAngle
startAngle = endAngle
endAngle = tmp
}
progress = min(1.0, progress)
} else {
progress = CGFloat(1.0 + self.timestamp.remainder(dividingBy: 2.0))
startAngle = -CGFloat.pi / 2.0
endAngle = CGFloat(progress) * 2.0 * CGFloat.pi + startAngle
if progress > 1.0 {
progress = 2.0 - progress
let tmp = startAngle
startAngle = endAngle
endAngle = tmp
}
progress = min(1.0, progress)
} }
progress = min(1.0, progress)
let lineWidth: CGFloat = max(1.6, 2.25 * factor) let lineWidth: CGFloat
if let appearance = self.appearance {
lineWidth = appearance.lineWidth
} else {
lineWidth = max(1.6, 2.25 * factor)
}
let pathDiameter: CGFloat let pathDiameter: CGFloat
pathDiameter = diameter - lineWidth - 2.5 * 2.0 if let appearance = self.appearance {
pathDiameter = diameter - lineWidth - appearance.inset * 2.0
} else {
pathDiameter = diameter - lineWidth - 2.5 * 2.0
}
var angle = self.timestamp.truncatingRemainder(dividingBy: Double.pi * 2.0) var angle = self.timestamp.truncatingRemainder(dividingBy: Double.pi * 2.0)
angle *= 4.0 angle *= 4.0
@ -317,15 +356,17 @@ private final class SemanticStatusNodeProgressContext: SemanticStatusNodeStateCo
var value: CGFloat? var value: CGFloat?
let displayCancel: Bool let displayCancel: Bool
let appearance: SemanticStatusNodeState.ProgressAppearance?
var transition: SemanticStatusNodeProgressTransition? var transition: SemanticStatusNodeProgressTransition?
var isAnimating: Bool { var isAnimating: Bool {
return true return true
} }
init(value: CGFloat?, displayCancel: Bool) { init(value: CGFloat?, displayCancel: Bool, appearance: SemanticStatusNodeState.ProgressAppearance?) {
self.value = value self.value = value
self.displayCancel = displayCancel self.displayCancel = displayCancel
self.appearance = appearance
} }
func drawingState(transitionFraction: CGFloat) -> SemanticStatusNodeStateDrawingState { func drawingState(transitionFraction: CGFloat) -> SemanticStatusNodeStateDrawingState {
@ -341,7 +382,7 @@ private final class SemanticStatusNodeProgressContext: SemanticStatusNodeStateCo
} else { } else {
resolvedValue = nil resolvedValue = nil
} }
return DrawingState(transitionFraction: transitionFraction, value: resolvedValue, displayCancel: self.displayCancel, timestamp: timestamp) return DrawingState(transitionFraction: transitionFraction, value: resolvedValue, displayCancel: self.displayCancel, appearance: self.appearance, timestamp: timestamp)
} }
func updateValue(value: CGFloat?) { func updateValue(value: CGFloat?) {
@ -386,12 +427,12 @@ private extension SemanticStatusNodeState {
} else { } else {
return SemanticStatusNodeIconContext(icon: icon) return SemanticStatusNodeIconContext(icon: icon)
} }
case let .progress(value, cancelEnabled): case let .progress(value, cancelEnabled, appearance):
if let current = current as? SemanticStatusNodeProgressContext, current.displayCancel == cancelEnabled { if let current = current as? SemanticStatusNodeProgressContext, current.displayCancel == cancelEnabled {
current.updateValue(value: value) current.updateValue(value: value)
return current return current
} else { } else {
return SemanticStatusNodeProgressContext(value: value, displayCancel: cancelEnabled) return SemanticStatusNodeProgressContext(value: value, displayCancel: cancelEnabled, appearance: appearance)
} }
} }
} }

View File

@ -7,7 +7,7 @@ static_library(
]), ]),
deps = [ deps = [
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit#shared", "//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit#shared",
"//submodules/Display:Display#shared", "//submodules/Display:Display#shared",
"//submodules/TelegramPresentationData:TelegramPresentationData", "//submodules/TelegramPresentationData:TelegramPresentationData",
"//submodules/TelegramUIPreferences:TelegramUIPreferences", "//submodules/TelegramUIPreferences:TelegramUIPreferences",
"//submodules/PhotoResources:PhotoResources", "//submodules/PhotoResources:PhotoResources",
@ -21,6 +21,7 @@ static_library(
"//submodules/AppBundle:AppBundle", "//submodules/AppBundle:AppBundle",
"//submodules/PresentationDataUtils:PresentationDataUtils", "//submodules/PresentationDataUtils:PresentationDataUtils",
"//submodules/TelegramCallsUI/CallsEmoji:CallsEmoji", "//submodules/TelegramCallsUI/CallsEmoji:CallsEmoji",
"//submodules/SemanticStatusNode:SemanticStatusNode",
], ],
frameworks = [ frameworks = [
"$SDKROOT/System/Library/Frameworks/Foundation.framework", "$SDKROOT/System/Library/Frameworks/Foundation.framework",

View File

@ -22,6 +22,7 @@ swift_library(
"//submodules/AppBundle:AppBundle", "//submodules/AppBundle:AppBundle",
"//submodules/PresentationDataUtils:PresentationDataUtils", "//submodules/PresentationDataUtils:PresentationDataUtils",
"//submodules/TelegramCallsUI/CallsEmoji:CallsEmoji", "//submodules/TelegramCallsUI/CallsEmoji:CallsEmoji",
"//submodules/SemanticStatusNode:SemanticStatusNode",
], ],
visibility = [ visibility = [
"//visibility:public", "//visibility:public",

View File

@ -4,6 +4,7 @@ import Display
import AsyncDisplayKit import AsyncDisplayKit
import SwiftSignalKit import SwiftSignalKit
import AppBundle import AppBundle
import SemanticStatusNode
private let labelFont = Font.regular(13.0) private let labelFont = Font.regular(13.0)
@ -32,18 +33,22 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
var appearance: Appearance var appearance: Appearance
var image: Image var image: Image
var isEnabled: Bool var isEnabled: Bool
var hasProgress: Bool
init(appearance: Appearance, image: Image, isEnabled: Bool = true) { init(appearance: Appearance, image: Image, isEnabled: Bool = true, hasProgress: Bool = false) {
self.appearance = appearance self.appearance = appearance
self.image = image self.image = image
self.isEnabled = isEnabled self.isEnabled = isEnabled
self.hasProgress = hasProgress
} }
} }
private let contentContainer: ASDisplayNode private let contentContainer: ASDisplayNode
private let effectView: UIVisualEffectView private let effectView: UIVisualEffectView
private let contentBackgroundNode: ASImageNode
private let contentNode: ASImageNode private let contentNode: ASImageNode
private let overlayHighlightNode: ASImageNode private let overlayHighlightNode: ASImageNode
private var statusNode: SemanticStatusNode?
private let textNode: ImmediateTextNode private let textNode: ImmediateTextNode
private let largeButtonSize: CGFloat = 72.0 private let largeButtonSize: CGFloat = 72.0
@ -60,6 +65,9 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
self.effectView.clipsToBounds = true self.effectView.clipsToBounds = true
self.effectView.isUserInteractionEnabled = false self.effectView.isUserInteractionEnabled = false
self.contentBackgroundNode = ASImageNode()
self.contentBackgroundNode.isUserInteractionEnabled = false
self.contentNode = ASImageNode() self.contentNode = ASImageNode()
self.contentNode.isUserInteractionEnabled = false self.contentNode.isUserInteractionEnabled = false
@ -79,6 +87,7 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
self.addSubnode(self.textNode) self.addSubnode(self.textNode)
self.contentContainer.view.addSubview(self.effectView) self.contentContainer.view.addSubview(self.effectView)
self.contentContainer.addSubnode(self.contentBackgroundNode)
self.contentContainer.addSubnode(self.contentNode) self.contentContainer.addSubnode(self.contentNode)
self.contentContainer.addSubnode(self.overlayHighlightNode) self.contentContainer.addSubnode(self.overlayHighlightNode)
@ -88,9 +97,13 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
} }
if highlighted { if highlighted {
strongSelf.overlayHighlightNode.alpha = 1.0 strongSelf.overlayHighlightNode.alpha = 1.0
let transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .spring)
transition.updateSublayerTransformScale(node: strongSelf, scale: 0.9)
} else { } else {
strongSelf.overlayHighlightNode.alpha = 0.0 strongSelf.overlayHighlightNode.alpha = 0.0
strongSelf.overlayHighlightNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2) strongSelf.overlayHighlightNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
let transition: ContainedViewLayoutTransition = .animated(duration: 0.5, curve: .spring)
transition.updateSublayerTransformScale(node: strongSelf, scale: 1.0)
} }
} }
} }
@ -101,12 +114,34 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
let isSmall = self.largeButtonSize > size.width let isSmall = self.largeButtonSize > size.width
self.effectView.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize)) self.effectView.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.contentBackgroundNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.contentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize)) self.contentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.overlayHighlightNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize)) self.overlayHighlightNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
if self.currentContent != content { if self.currentContent != content {
self.currentContent = content self.currentContent = content
if content.hasProgress {
if self.statusNode == nil {
let statusNode = SemanticStatusNode(backgroundNodeColor: .white, foregroundNodeColor: .clear)
self.statusNode = statusNode
self.contentContainer.insertSubnode(statusNode, belowSubnode: self.contentNode)
statusNode.transitionToState(.progress(value: nil, cancelEnabled: false, appearance: SemanticStatusNodeState.ProgressAppearance(inset: 4.0, lineWidth: 3.0)), animated: false, completion: {})
}
if let statusNode = self.statusNode {
statusNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
if transition.isAnimated {
statusNode.layer.animateScale(from: 0.1, to: 1.0, duration: 0.2)
statusNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
}
} else if let statusNode = self.statusNode {
self.statusNode = nil
transition.updateAlpha(node: statusNode, alpha: 0.0, completion: { [weak statusNode] _ in
statusNode?.removeFromSupernode()
})
}
switch content.appearance { switch content.appearance {
case .blurred: case .blurred:
self.effectView.isHidden = false self.effectView.isHidden = false
@ -117,19 +152,29 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
self.alpha = content.isEnabled ? 1.0 : 0.7 self.alpha = content.isEnabled ? 1.0 : 0.7
self.isUserInteractionEnabled = content.isEnabled self.isUserInteractionEnabled = content.isEnabled
let contentBackgroundImage: UIImage? = nil
let contentImage = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in let contentImage = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size)) context.clear(CGRect(origin: CGPoint(), size: size))
var fillColor: UIColor = .clear var fillColor: UIColor = .clear
var imageColor: UIColor = .white
var drawOverMask = false var drawOverMask = false
context.setBlendMode(.normal) context.setBlendMode(.normal)
var imageScale: CGFloat = 1.0 var imageScale: CGFloat = 1.0
switch content.appearance { switch content.appearance {
case let .blurred(isFilled): case let .blurred(isFilled):
if isFilled { if content.hasProgress {
fillColor = .white fillColor = .clear
drawOverMask = true imageColor = .black
drawOverMask = false
context.setBlendMode(.copy) context.setBlendMode(.copy)
} else {
if isFilled {
fillColor = .white
drawOverMask = true
context.setBlendMode(.copy)
}
} }
let smallButtonSize: CGFloat = 60.0 let smallButtonSize: CGFloat = 60.0
imageScale = self.largeButtonSize / smallButtonSize imageScale = self.largeButtonSize / smallButtonSize
@ -149,19 +194,19 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
switch content.image { switch content.image {
case .camera: case .camera:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallCameraButton"), color: .white) image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallCameraButton"), color: imageColor)
case .mute: case .mute:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallMuteButton"), color: .white) image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallMuteButton"), color: imageColor)
case .flipCamera: case .flipCamera:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white) image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: imageColor)
case .bluetooth: case .bluetooth:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallBluetoothButton"), color: .white) image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallBluetoothButton"), color: imageColor)
case .speaker: case .speaker:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSpeakerButton"), color: .white) image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSpeakerButton"), color: imageColor)
case .accept: case .accept:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAcceptButton"), color: .white) image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAcceptButton"), color: imageColor)
case .end: case .end:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallDeclineButton"), color: .white) image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallDeclineButton"), color: imageColor)
} }
if let image = image { if let image = image {
@ -180,6 +225,14 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
} }
} }
}) })
if transition.isAnimated, let contentBackgroundImage = contentBackgroundImage, let previousContent = self.contentBackgroundNode.image {
self.contentBackgroundNode.image = contentBackgroundImage
self.contentBackgroundNode.layer.animate(from: previousContent.cgImage!, to: contentBackgroundImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2)
} else {
self.contentBackgroundNode.image = contentBackgroundImage
}
if transition.isAnimated, let contentImage = contentImage, let previousContent = self.contentNode.image { if transition.isAnimated, let contentImage = contentImage, let previousContent = self.contentNode.image {
self.contentNode.image = contentImage self.contentNode.image = contentImage
self.contentNode.layer.animate(from: previousContent.cgImage!, to: contentImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2) self.contentNode.layer.animate(from: previousContent.cgImage!, to: contentImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2)

View File

@ -17,9 +17,9 @@ enum CallControllerButtonsSpeakerMode {
enum CallControllerButtonsMode: Equatable { enum CallControllerButtonsMode: Equatable {
enum VideoState: Equatable { enum VideoState: Equatable {
case notAvailable case notAvailable
case possible(Bool) case possible(isEnabled: Bool, isInitializing: Bool)
case outgoingRequested case outgoingRequested(isInitializing: Bool)
case incomingRequested case incomingRequested(sendsVideo: Bool)
case active case active
} }
@ -52,7 +52,7 @@ private enum ButtonDescription: Equatable {
case accept case accept
case end(EndType) case end(EndType)
case enableCamera(Bool, Bool) case enableCamera(Bool, Bool, Bool)
case switchCamera case switchCamera
case soundOutput(SoundOutput) case soundOutput(SoundOutput)
case mute(Bool) case mute(Bool)
@ -110,6 +110,10 @@ final class CallControllerButtonsNode: ASDisplayNode {
private var appliedMode: CallControllerButtonsMode? private var appliedMode: CallControllerButtonsMode?
func videoButtonFrame() -> CGRect? {
return self.buttonNodes[.enableCamera]?.frame
}
private func updateButtonsLayout(strings: PresentationStrings, mode: CallControllerButtonsMode, width: CGFloat, bottomInset: CGFloat, animated: Bool) -> CGFloat { private func updateButtonsLayout(strings: PresentationStrings, mode: CallControllerButtonsMode, width: CGFloat, bottomInset: CGFloat, animated: Bool) -> CGFloat {
let transition: ContainedViewLayoutTransition let transition: ContainedViewLayoutTransition
if animated { if animated {
@ -171,12 +175,12 @@ final class CallControllerButtonsNode: ASDisplayNode {
mappedState = .outgoingRinging mappedState = .outgoingRinging
case let .active(_, videoStateValue): case let .active(_, videoStateValue):
switch videoStateValue { switch videoStateValue {
case .incomingRequested: case let .incomingRequested(sendsVideo):
mappedState = .incomingRinging mappedState = .active
videoState = .outgoingRequested videoState = .incomingRequested(sendsVideo: sendsVideo)
case .outgoingRequested: case let .outgoingRequested(isInitializing):
mappedState = .outgoingRinging mappedState = .active
videoState = .outgoingRequested videoState = .outgoingRequested(isInitializing: isInitializing)
case .active, .possible, .notAvailable: case .active, .possible, .notAvailable:
mappedState = .active mappedState = .active
} }
@ -204,14 +208,17 @@ final class CallControllerButtonsNode: ASDisplayNode {
case .active, .possible, .incomingRequested, .outgoingRequested: case .active, .possible, .incomingRequested, .outgoingRequested:
let isCameraActive: Bool let isCameraActive: Bool
let isCameraEnabled: Bool let isCameraEnabled: Bool
if case let .possible(value) = videoState { let isCameraInitializing: Bool
if case let .possible(value, isInitializing) = videoState {
isCameraActive = false isCameraActive = false
isCameraEnabled = value isCameraEnabled = value
isCameraInitializing = isInitializing
} else { } else {
isCameraActive = !self.isCameraPaused isCameraActive = !self.isCameraPaused
isCameraEnabled = true isCameraEnabled = true
isCameraInitializing = false
} }
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled)) topButtons.append(.enableCamera(isCameraActive, isCameraEnabled, isCameraInitializing))
topButtons.append(.mute(self.isMuted)) topButtons.append(.mute(self.isMuted))
if case .possible = videoState { if case .possible = videoState {
topButtons.append(.soundOutput(soundOutput)) topButtons.append(.soundOutput(soundOutput))
@ -256,12 +263,19 @@ final class CallControllerButtonsNode: ASDisplayNode {
case .active, .incomingRequested, .outgoingRequested: case .active, .incomingRequested, .outgoingRequested:
let isCameraActive: Bool let isCameraActive: Bool
let isCameraEnabled: Bool let isCameraEnabled: Bool
if case let .possible(value) = videoState { var isCameraInitializing: Bool
if case .incomingRequested = videoState {
isCameraActive = false
isCameraEnabled = true
isCameraInitializing = false
} else if case let .possible(value, isInitializing) = videoState {
isCameraActive = false isCameraActive = false
isCameraEnabled = value isCameraEnabled = value
isCameraInitializing = isInitializing
} else { } else {
isCameraActive = !self.isCameraPaused isCameraActive = !self.isCameraPaused
isCameraEnabled = true isCameraEnabled = true
isCameraInitializing = false
} }
var topButtons: [ButtonDescription] = [] var topButtons: [ButtonDescription] = []
@ -278,7 +292,11 @@ final class CallControllerButtonsNode: ASDisplayNode {
soundOutput = .bluetooth soundOutput = .bluetooth
} }
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled)) if case let .outgoingRequested(isInitializing) = videoState {
isCameraInitializing = isInitializing
}
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled, isCameraInitializing))
topButtons.append(.mute(isMuted)) topButtons.append(.mute(isMuted))
topButtons.append(.switchCamera) topButtons.append(.switchCamera)
topButtons.append(.end(.end)) topButtons.append(.end(.end))
@ -298,6 +316,19 @@ final class CallControllerButtonsNode: ASDisplayNode {
var topButtons: [ButtonDescription] = [] var topButtons: [ButtonDescription] = []
var bottomButtons: [ButtonDescription] = [] var bottomButtons: [ButtonDescription] = []
let isCameraActive: Bool
let isCameraEnabled: Bool
var isCameraInitializing: Bool
if case let .possible(value, isInitializing) = videoState {
isCameraActive = false
isCameraEnabled = value
isCameraInitializing = isInitializing
} else {
isCameraActive = false
isCameraEnabled = true
isCameraInitializing = false
}
let soundOutput: ButtonDescription.SoundOutput let soundOutput: ButtonDescription.SoundOutput
switch speakerMode { switch speakerMode {
case .none, .builtin: case .none, .builtin:
@ -310,7 +341,7 @@ final class CallControllerButtonsNode: ASDisplayNode {
soundOutput = .bluetooth soundOutput = .bluetooth
} }
topButtons.append(.enableCamera(false, true)) topButtons.append(.enableCamera(isCameraActive, isCameraEnabled, isCameraInitializing))
topButtons.append(.mute(self.isMuted)) topButtons.append(.mute(self.isMuted))
topButtons.append(.soundOutput(soundOutput)) topButtons.append(.soundOutput(soundOutput))
@ -379,11 +410,12 @@ final class CallControllerButtonsNode: ASDisplayNode {
case .end: case .end:
buttonText = strings.Call_End buttonText = strings.Call_End
} }
case let .enableCamera(isActivated, isEnabled): case let .enableCamera(isActivated, isEnabled, isInitializing):
buttonContent = CallControllerButtonItemNode.Content( buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: isActivated), appearance: .blurred(isFilled: isActivated),
image: .camera, image: .camera,
isEnabled: isEnabled isEnabled: isEnabled,
hasProgress: isInitializing
) )
buttonText = strings.Call_Camera buttonText = strings.Call_Camera
case .switchCamera: case .switchCamera:

View File

@ -93,8 +93,43 @@ private final class CallVideoNode: ASDisplayNode {
self.isReadyTimer?.invalidate() self.isReadyTimer?.invalidate()
} }
func updateLayout(size: CGSize, cornerRadius: CGFloat, transition: ContainedViewLayoutTransition) { func animateRadialMask(from fromRect: CGRect, to toRect: CGRect) {
let maskLayer = CAShapeLayer()
maskLayer.frame = fromRect
let path = CGMutablePath()
path.addEllipse(in: CGRect(origin: CGPoint(), size: fromRect.size))
maskLayer.path = path
self.layer.mask = maskLayer
let topLeft = CGPoint(x: 0.0, y: 0.0)
let topRight = CGPoint(x: self.bounds.width, y: 0.0)
let bottomLeft = CGPoint(x: 0.0, y: self.bounds.height)
let bottomRight = CGPoint(x: self.bounds.width, y: self.bounds.height)
func distance(_ v1: CGPoint, _ v2: CGPoint) -> CGFloat {
let dx = v1.x - v2.x
let dy = v1.y - v2.y
return sqrt(dx * dx + dy * dy)
}
var maxRadius = distance(toRect.center, topLeft)
maxRadius = max(maxRadius, distance(toRect.center, topRight))
maxRadius = max(maxRadius, distance(toRect.center, bottomLeft))
maxRadius = max(maxRadius, distance(toRect.center, bottomRight))
maxRadius = ceil(maxRadius)
let targetFrame = CGRect(origin: CGPoint(x: toRect.center.x - maxRadius, y: toRect.center.y - maxRadius), size: CGSize(width: maxRadius * 2.0, height: maxRadius * 2.0))
let transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .easeInOut)
transition.updatePosition(layer: maskLayer, position: targetFrame.center)
transition.updateTransformScale(layer: maskLayer, scale: maxRadius * 2.0 / fromRect.width, completion: { [weak self] _ in
self?.layer.mask = nil
})
}
func updateLayout(size: CGSize, cornerRadius: CGFloat, transition: ContainedViewLayoutTransition) {
self.currentCornerRadius = cornerRadius self.currentCornerRadius = cornerRadius
var rotationAngle: CGFloat var rotationAngle: CGFloat
@ -227,9 +262,13 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
private var incomingVideoNodeValue: CallVideoNode? private var incomingVideoNodeValue: CallVideoNode?
private var incomingVideoViewRequested: Bool = false private var incomingVideoViewRequested: Bool = false
private var candidateOutgoingVideoNodeValue: CallVideoNode?
private var outgoingVideoNodeValue: CallVideoNode? private var outgoingVideoNodeValue: CallVideoNode?
private var outgoingVideoViewRequested: Bool = false private var outgoingVideoViewRequested: Bool = false
private var isRequestingVideo: Bool = false
private var animateRequestedVideoOnce: Bool = false
private var expandedVideoNode: CallVideoNode? private var expandedVideoNode: CallVideoNode?
private var minimizedVideoNode: CallVideoNode? private var minimizedVideoNode: CallVideoNode?
private var disableAnimationForExpandedVideoOnce: Bool = false private var disableAnimationForExpandedVideoOnce: Bool = false
@ -396,7 +435,19 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
switch callState.state { switch callState.state {
case .active: case .active:
if strongSelf.outgoingVideoNodeValue == nil { if strongSelf.outgoingVideoNodeValue == nil {
strongSelf.call.requestVideo() switch callState.videoState {
case .possible:
strongSelf.isRequestingVideo = true
strongSelf.updateButtonsMode()
default:
break
}
switch callState.videoState {
case .incomingRequested:
strongSelf.call.acceptVideo()
default:
strongSelf.call.requestVideo()
}
} else { } else {
strongSelf.isVideoPaused = !strongSelf.isVideoPaused strongSelf.isVideoPaused = !strongSelf.isVideoPaused
strongSelf.outgoingVideoNodeValue?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused) strongSelf.outgoingVideoNodeValue?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused)
@ -417,7 +468,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
return return
} }
strongSelf.call.switchVideoCamera() strongSelf.call.switchVideoCamera()
if let outgoingVideoNode = strongSelf.outgoingVideoNodeValue { if let _ = strongSelf.outgoingVideoNodeValue {
if let (layout, navigationBarHeight) = strongSelf.validLayout { if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate) strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
} }
@ -487,7 +538,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
} }
private func setupAudioOutputs() { private func setupAudioOutputs() {
if self.outgoingVideoNodeValue != nil { if self.outgoingVideoNodeValue != nil || self.candidateOutgoingVideoNodeValue != nil {
if let audioOutputState = self.audioOutputState, let currentOutput = audioOutputState.currentOutput { if let audioOutputState = self.audioOutputState, let currentOutput = audioOutputState.currentOutput {
switch currentOutput { switch currentOutput {
case .headphones: case .headphones:
@ -508,7 +559,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
var statusReception: Int32? var statusReception: Int32?
switch callState.videoState { switch callState.videoState {
case .active: case .active, .incomingRequested(true):
if !self.incomingVideoViewRequested { if !self.incomingVideoViewRequested {
self.incomingVideoViewRequested = true self.incomingVideoViewRequested = true
self.call.makeIncomingVideoView(completion: { [weak self] incomingVideoView in self.call.makeIncomingVideoView(completion: { [weak self] incomingVideoView in
@ -552,17 +603,47 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
} }
switch callState.videoState { switch callState.videoState {
case .active, .outgoingRequested, .incomingRequested: case .active, .outgoingRequested, .incomingRequested(false):
if !self.outgoingVideoViewRequested { if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true self.outgoingVideoViewRequested = true
let delayUntilInitialized = self.isRequestingVideo
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
guard let strongSelf = self else { guard let strongSelf = self else {
return return
} }
if let outgoingVideoView = outgoingVideoView { if let outgoingVideoView = outgoingVideoView {
outgoingVideoView.view.backgroundColor = .black outgoingVideoView.view.backgroundColor = .black
outgoingVideoView.view.clipsToBounds = true outgoingVideoView.view.clipsToBounds = true
let outgoingVideoNode = CallVideoNode(videoView: outgoingVideoView, isReadyUpdated: {}, orientationUpdated: {
let applyNode: () -> Void = {
guard let strongSelf = self, let outgoingVideoNode = strongSelf.candidateOutgoingVideoNodeValue else {
return
}
strongSelf.candidateOutgoingVideoNodeValue = nil
if strongSelf.isRequestingVideo {
strongSelf.isRequestingVideo = false
strongSelf.animateRequestedVideoOnce = true
}
strongSelf.outgoingVideoNodeValue = outgoingVideoNode
strongSelf.minimizedVideoNode = outgoingVideoNode
if let expandedVideoNode = strongSelf.expandedVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: expandedVideoNode)
} else {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
}
strongSelf.updateButtonsMode(transition: .animated(duration: 0.4, curve: .spring))
}
let outgoingVideoNode = CallVideoNode(videoView: outgoingVideoView, isReadyUpdated: {
if delayUntilInitialized {
Queue.mainQueue().after(0.4, {
applyNode()
})
}
}, orientationUpdated: {
guard let strongSelf = self else { guard let strongSelf = self else {
return return
} }
@ -577,17 +658,13 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate) strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
} }
}) })
strongSelf.outgoingVideoNodeValue = outgoingVideoNode
strongSelf.minimizedVideoNode = outgoingVideoNode strongSelf.candidateOutgoingVideoNodeValue = outgoingVideoNode
if let expandedVideoNode = strongSelf.expandedVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: expandedVideoNode)
} else {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
}
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring))
}
strongSelf.setupAudioOutputs() strongSelf.setupAudioOutputs()
if !delayUntilInitialized {
applyNode()
}
} }
}) })
} }
@ -679,7 +756,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
} }
} }
switch callState.videoState { switch callState.videoState {
case .notAvailable, .active, .possible: case .notAvailable, .active, .possible, .outgoingRequested:
statusValue = .timer({ value in statusValue = .timer({ value in
if isReconnecting { if isReconnecting {
return strings.Call_StatusConnecting return strings.Call_StatusConnecting
@ -695,8 +772,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
text += "\n\(self.statusNode.subtitle)" text += "\n\(self.statusNode.subtitle)"
} }
statusValue = .text(string: text, displayLogo: true) statusValue = .text(string: text, displayLogo: true)
case .outgoingRequested: /*case .outgoingRequested:
statusValue = .text(string: self.presentationData.strings.Call_StatusRequesting, displayLogo: false) statusValue = .text(string: self.presentationData.strings.Call_StatusRequesting, displayLogo: false)*/
} }
} }
if self.shouldStayHiddenUntilConnection { if self.shouldStayHiddenUntilConnection {
@ -732,7 +809,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
private var buttonsTerminationMode: CallControllerButtonsMode? private var buttonsTerminationMode: CallControllerButtonsMode?
private func updateButtonsMode() { private func updateButtonsMode(transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .spring)) {
guard let callState = self.callState else { guard let callState = self.callState else {
return return
} }
@ -765,11 +842,15 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
default: default:
break break
} }
mappedVideoState = .possible(isEnabled) mappedVideoState = .possible(isEnabled: isEnabled, isInitializing: false)
case .outgoingRequested: case .outgoingRequested:
mappedVideoState = .outgoingRequested if self.outgoingVideoNodeValue != nil {
case .incomingRequested: mappedVideoState = .outgoingRequested(isInitializing: self.isRequestingVideo)
mappedVideoState = .incomingRequested } else {
mappedVideoState = .possible(isEnabled: true, isInitializing: self.isRequestingVideo)
}
case let .incomingRequested(sendsVideo):
mappedVideoState = .incomingRequested(sendsVideo: sendsVideo)
case .active: case .active:
mappedVideoState = .active mappedVideoState = .active
} }
@ -793,7 +874,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
} }
if let (layout, navigationHeight) = self.validLayout { if let (layout, navigationHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .spring)) self.containerLayoutUpdated(layout, navigationBarHeight: navigationHeight, transition: transition)
} }
} }
@ -919,6 +1000,10 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
var uiDisplayTransition: CGFloat = self.isUIHidden ? 0.0 : 1.0 var uiDisplayTransition: CGFloat = self.isUIHidden ? 0.0 : 1.0
uiDisplayTransition *= 1.0 - self.pictureInPictureTransitionFraction uiDisplayTransition *= 1.0 - self.pictureInPictureTransitionFraction
let previousVideoButtonFrame = self.buttonsNode.videoButtonFrame().flatMap { frame -> CGRect in
return self.buttonsNode.view.convert(frame, to: self.view)
}
let buttonsHeight: CGFloat let buttonsHeight: CGFloat
if let buttonsMode = self.buttonsMode { if let buttonsMode = self.buttonsMode {
buttonsHeight = self.buttonsNode.updateLayout(strings: self.presentationData.strings, mode: buttonsMode, constrainedWidth: layout.size.width, bottomInset: layout.intrinsicInsets.bottom, transition: transition) buttonsHeight = self.buttonsNode.updateLayout(strings: self.presentationData.strings, mode: buttonsMode, constrainedWidth: layout.size.width, bottomInset: layout.intrinsicInsets.bottom, transition: transition)
@ -995,7 +1080,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0)) let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0))
transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize)) transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize))
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight))) transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight)))
transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha) transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha)
@ -1014,8 +1099,10 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
} }
if let minimizedVideoNode = self.minimizedVideoNode { if let minimizedVideoNode = self.minimizedVideoNode {
var minimizedVideoTransition = transition var minimizedVideoTransition = transition
var didAppear = false
if minimizedVideoNode.frame.isEmpty { if minimizedVideoNode.frame.isEmpty {
minimizedVideoTransition = .immediate minimizedVideoTransition = .immediate
didAppear = true
} }
if let expandedVideoNode = self.expandedVideoNode, expandedVideoNode.isReady { if let expandedVideoNode = self.expandedVideoNode, expandedVideoNode.isReady {
if self.minimizedVideoDraggingPosition == nil { if self.minimizedVideoDraggingPosition == nil {
@ -1031,10 +1118,23 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
} }
minimizedVideoTransition.updateFrame(node: minimizedVideoNode, frame: previewVideoFrame) minimizedVideoTransition.updateFrame(node: minimizedVideoNode, frame: previewVideoFrame)
minimizedVideoNode.updateLayout(size: minimizedVideoNode.frame.size, cornerRadius: interpolate(from: 14.0, to: 24.0, value: self.pictureInPictureTransitionFraction), transition: minimizedVideoTransition) minimizedVideoNode.updateLayout(size: minimizedVideoNode.frame.size, cornerRadius: interpolate(from: 14.0, to: 24.0, value: self.pictureInPictureTransitionFraction), transition: minimizedVideoTransition)
if transition.isAnimated && didAppear {
minimizedVideoNode.layer.animateSpring(from: 0.1 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.5)
}
} }
} else { } else {
minimizedVideoNode.frame = fullscreenVideoFrame minimizedVideoNode.frame = fullscreenVideoFrame
minimizedVideoNode.updateLayout(size: layout.size, cornerRadius: 0.0, transition: minimizedVideoTransition) minimizedVideoNode.updateLayout(size: layout.size, cornerRadius: 0.0, transition: minimizedVideoTransition)
if self.animateRequestedVideoOnce {
self.animateRequestedVideoOnce = false
let videoButtonFrame = self.buttonsNode.videoButtonFrame().flatMap { frame -> CGRect in
return self.buttonsNode.view.convert(frame, to: self.view)
}
if let previousVideoButtonFrame = previousVideoButtonFrame, let videoButtonFrame = videoButtonFrame {
minimizedVideoNode.animateRadialMask(from: previousVideoButtonFrame, to: videoButtonFrame)
}
}
} }
self.animationForExpandedVideoSnapshotView = nil self.animationForExpandedVideoSnapshotView = nil
} }

View File

@ -463,8 +463,8 @@ public final class PresentationCallImpl: PresentationCall {
mappedVideoState = .possible mappedVideoState = .possible
case .outgoingRequested: case .outgoingRequested:
mappedVideoState = .outgoingRequested mappedVideoState = .outgoingRequested
case .incomingRequested: case let .incomingRequested(sendsVideo):
mappedVideoState = .incomingRequested mappedVideoState = .incomingRequested(sendsVideo: sendsVideo)
case .active: case .active:
mappedVideoState = .active mappedVideoState = .active
self.videoWasActive = true self.videoWasActive = true

View File

@ -820,7 +820,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
switch fetchStatus { switch fetchStatus {
case let .Fetching(_, progress): case let .Fetching(_, progress):
let adjustedProgress = max(progress, 0.027) let adjustedProgress = max(progress, 0.027)
state = .progress(value: CGFloat(adjustedProgress), cancelEnabled: true) state = .progress(value: CGFloat(adjustedProgress), cancelEnabled: true, appearance: nil)
case .Local: case .Local:
if isAudio { if isAudio {
state = .play state = .play

View File

@ -105,7 +105,7 @@ public struct OngoingCallContextState: Equatable {
case notAvailable case notAvailable
case possible case possible
case outgoingRequested case outgoingRequested
case incomingRequested case incomingRequested(sendsVideo: Bool)
case active case active
} }
@ -561,9 +561,14 @@ public final class OngoingCallContext {
)) ))
} }
} }
let screenSize = UIScreen.main.bounds.size
let portraitSize = CGSize(width: min(screenSize.width, screenSize.height), height: max(screenSize.width, screenSize.height))
let preferredAspectRatio = portraitSize.width / portraitSize.height
let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, rtcServers: rtcServers, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, rtcServers: rtcServers, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in
callSessionManager?.sendSignalingData(internalId: internalId, data: data) callSessionManager?.sendSignalingData(internalId: internalId, data: data)
}, videoCapturer: video?.impl) }, videoCapturer: video?.impl, preferredAspectRatio: Float(preferredAspectRatio))
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context)) strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState in context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState in
@ -577,7 +582,9 @@ public final class OngoingCallContext {
case .possible: case .possible:
mappedVideoState = .possible mappedVideoState = .possible
case .incomingRequested: case .incomingRequested:
mappedVideoState = .incomingRequested mappedVideoState = .incomingRequested(sendsVideo: false)
case .incomingRequestedAndActive:
mappedVideoState = .incomingRequested(sendsVideo: true)
case .outgoingRequested: case .outgoingRequested:
mappedVideoState = .outgoingRequested mappedVideoState = .outgoingRequested
case .active: case .active:

View File

@ -33,6 +33,7 @@ typedef NS_ENUM(int32_t, OngoingCallVideoStateWebrtc) {
OngoingCallVideoStatePossible, OngoingCallVideoStatePossible,
OngoingCallVideoStateOutgoingRequested, OngoingCallVideoStateOutgoingRequested,
OngoingCallVideoStateIncomingRequested, OngoingCallVideoStateIncomingRequested,
OngoingCallVideoStateIncomingRequestedAndActive,
OngoingCallVideoStateActive OngoingCallVideoStateActive
}; };
@ -122,7 +123,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
@property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc); @property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc);
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t); @property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer; - (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio;
- (void)beginTermination; - (void)beginTermination;
- (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion; - (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion;

View File

@ -294,7 +294,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
} }
} }
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer { - (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio {
self = [super init]; self = [super init];
if (self != nil) { if (self != nil) {
_version = version; _version = version;
@ -381,7 +381,8 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
.enableAGC = true, .enableAGC = true,
.enableCallUpgrade = false, .enableCallUpgrade = false,
.logPath = logPath.length == 0 ? "" : std::string(logPath.UTF8String), .logPath = logPath.length == 0 ? "" : std::string(logPath.UTF8String),
.maxApiLayer = [OngoingCallThreadLocalContextWebrtc maxLayer] .maxApiLayer = [OngoingCallThreadLocalContextWebrtc maxLayer],
.preferredAspectRatio = preferredAspectRatio
}; };
auto encryptionKeyValue = std::make_shared<std::array<uint8_t, 256>>(); auto encryptionKeyValue = std::make_shared<std::array<uint8_t, 256>>();
@ -419,6 +420,9 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
case tgcalls::VideoState::IncomingRequested: case tgcalls::VideoState::IncomingRequested:
mappedVideoState = OngoingCallVideoStateIncomingRequested; mappedVideoState = OngoingCallVideoStateIncomingRequested;
break; break;
case tgcalls::VideoState::IncomingRequestedAndActive:
mappedVideoState = OngoingCallVideoStateIncomingRequestedAndActive;
break;
case tgcalls::VideoState::Active: case tgcalls::VideoState::Active:
mappedVideoState = OngoingCallVideoStateActive; mappedVideoState = OngoingCallVideoStateActive;
break; break;

@ -1 +1 @@
Subproject commit c3345bb26aba541c99ff3c7075bda8024c7a8202 Subproject commit 88f5dde08ba8bac5f014c4d1753fda890722b0ed