Video improvements

This commit is contained in:
Ali
2020-07-03 21:25:36 +04:00
parent c8c1c96f16
commit b542dc3fd7
44 changed files with 1097 additions and 613 deletions

View File

@@ -90,6 +90,7 @@ public protocol PresentationCall: class {
func toggleIsMuted()
func setIsMuted(_ value: Bool)
func setEnableVideo(_ value: Bool)
func setOutgoingVideoIsPaused(_ isPaused: Bool)
func switchVideoCamera()
func setCurrentAudioOutput(_ output: AudioSessionOutput)
func debugInfo() -> Signal<(String, String), NoError>

View File

@@ -238,14 +238,14 @@ public extension CALayer {
self.animate(from: NSValue(cgPoint: from), to: NSValue(cgPoint: to), keyPath: "position", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion)
}
func animateBounds(from: CGRect, to: CGRect, duration: Double, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
func animateBounds(from: CGRect, to: CGRect, duration: Double, delay: Double = 0.0, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
if from == to && !force {
if let completion = completion {
completion(true)
}
return
}
self.animate(from: NSValue(cgRect: from), to: NSValue(cgRect: to), keyPath: "bounds", timingFunction: timingFunction, duration: duration, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion)
self.animate(from: NSValue(cgRect: from), to: NSValue(cgRect: to), keyPath: "bounds", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion)
}
func animateBoundsOriginXAdditive(from: CGFloat, to: CGFloat, duration: Double, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
@@ -268,7 +268,7 @@ public extension CALayer {
self.animateKeyframes(values: values.map { NSValue(cgPoint: $0) }, duration: duration, keyPath: "position")
}
func animateFrame(from: CGRect, to: CGRect, duration: Double, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
func animateFrame(from: CGRect, to: CGRect, duration: Double, delay: Double = 0.0, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
if from == to && !force {
if let completion = completion {
completion(true)
@@ -302,14 +302,14 @@ public extension CALayer {
toBounds = CGRect()
}
self.animatePosition(from: fromPosition, to: toPosition, duration: duration, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in
self.animatePosition(from: fromPosition, to: toPosition, duration: duration, delay: delay, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in
if !value {
interrupted = true
}
completedPosition = true
partialCompletion()
})
self.animateBounds(from: fromBounds, to: toBounds, duration: duration, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in
self.animateBounds(from: fromBounds, to: toBounds, duration: duration, delay: delay, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in
if !value {
interrupted = true
}

View File

@@ -63,7 +63,7 @@ public enum ContainedViewLayoutTransition {
}
public extension ContainedViewLayoutTransition {
func updateFrame(node: ASDisplayNode, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, completion: ((Bool) -> Void)? = nil) {
func updateFrame(node: ASDisplayNode, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, delay: Double = 0.0, completion: ((Bool) -> Void)? = nil) {
if node.frame.equalTo(frame) && !force {
completion?(true)
} else {
@@ -81,7 +81,7 @@ public extension ContainedViewLayoutTransition {
previousFrame = node.frame
}
node.frame = frame
node.layer.animateFrame(from: previousFrame, to: frame, duration: duration, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, force: force, completion: { result in
node.layer.animateFrame(from: previousFrame, to: frame, duration: duration, delay: delay, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, force: force, completion: { result in
if let completion = completion {
completion(result)
}

View File

@@ -178,8 +178,8 @@ public final class CallController: ViewController {
let _ = self?.call.hangUp()
}
self.controllerNode.toggleVideo = { [weak self] in
let _ = self?.call.setEnableVideo(true)
self.controllerNode.setIsVideoPaused = { [weak self] isPaused in
self?.call.setOutgoingVideoIsPaused(isPaused)
}
self.controllerNode.back = { [weak self] in

View File

@@ -5,245 +5,218 @@ import AsyncDisplayKit
import SwiftSignalKit
import AppBundle
enum CallControllerButtonType {
case mute
case end
case accept
case speaker
case bluetooth
case switchCamera
}
private let labelFont = Font.regular(13.0)
private let buttonSize = CGSize(width: 75.0, height: 75.0)
private func generateEmptyButtonImage(icon: UIImage?, strokeColor: UIColor?, fillColor: UIColor, knockout: Bool = false, angle: CGFloat = 0.0) -> UIImage? {
return generateImage(buttonSize, contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setBlendMode(.copy)
if let strokeColor = strokeColor {
context.setFillColor(strokeColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(x: 1.5, y: 1.5), size: CGSize(width: size.width - 3.0, height: size.height - 3.0)))
} else {
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: size.height)))
}
if let icon = icon {
if !angle.isZero {
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.rotate(by: angle)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
}
let imageSize = icon.size
let imageRect = CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: floor((size.width - imageSize.height) / 2.0)), size: imageSize)
if knockout {
context.setBlendMode(.copy)
context.clip(to: imageRect, mask: icon.cgImage!)
context.setFillColor(UIColor.clear.cgColor)
context.fill(imageRect)
} else {
context.setBlendMode(.normal)
context.draw(icon.cgImage!, in: imageRect)
final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
struct Content: Equatable {
enum Appearance: Equatable {
enum Color {
case red
case green
}
case blurred(isFilled: Bool)
case color(Color)
}
})
}
private func generateFilledButtonImage(color: UIColor, icon: UIImage?, angle: CGFloat = 0.0) -> UIImage? {
return generateImage(buttonSize, contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setBlendMode(.normal)
context.setFillColor(color.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
if let icon = icon {
if !angle.isZero {
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.rotate(by: angle)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
}
context.draw(icon.cgImage!, in: CGRect(origin: CGPoint(x: floor((size.width - icon.size.width) / 2.0), y: floor((size.height - icon.size.height) / 2.0)), size: icon.size))
enum Image {
case camera
case mute
case flipCamera
case bluetooth
case speaker
case accept
case end
}
})
}
private let emptyStroke = UIColor(white: 1.0, alpha: 0.8)
private let emptyHighlightedFill = UIColor(white: 1.0, alpha: 0.3)
private let invertedFill = UIColor(white: 1.0, alpha: 1.0)
private let labelFont = Font.regular(14.5)
final class CallControllerButtonNode: HighlightTrackingButtonNode {
private var type: CallControllerButtonType
var appearance: Appearance
var image: Image
}
private var regularImage: UIImage?
private var highlightedImage: UIImage?
private var filledImage: UIImage?
private let contentContainer: ASDisplayNode
private let effectView: UIVisualEffectView
private let contentNode: ASImageNode
private let overlayHighlightNode: ASImageNode
private let textNode: ImmediateTextNode
private let backgroundNode: ASImageNode
private let labelNode: ASTextNode?
private let largeButtonSize: CGFloat = 72.0
init(type: CallControllerButtonType, label: String?) {
self.type = type
private(set) var currentContent: Content?
private(set) var currentText: String = ""
init() {
self.contentContainer = ASDisplayNode()
self.backgroundNode = ASImageNode()
self.backgroundNode.isLayerBacked = true
self.backgroundNode.displayWithoutProcessing = false
self.backgroundNode.displaysAsynchronously = false
self.effectView = UIVisualEffectView()
self.effectView.effect = UIBlurEffect(style: .light)
self.effectView.layer.cornerRadius = self.largeButtonSize / 2.0
self.effectView.clipsToBounds = true
self.effectView.isUserInteractionEnabled = false
if let label = label {
let labelNode = ASTextNode()
labelNode.attributedText = NSAttributedString(string: label, font: labelFont, textColor: .white)
self.labelNode = labelNode
} else {
self.labelNode = nil
}
self.contentNode = ASImageNode()
self.contentNode.isUserInteractionEnabled = false
var regularImage: UIImage?
var highlightedImage: UIImage?
var filledImage: UIImage?
self.overlayHighlightNode = ASImageNode()
self.overlayHighlightNode.isUserInteractionEnabled = false
self.overlayHighlightNode.alpha = 0.0
switch type {
case .mute:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .accept:
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0)
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0)
case .end:
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton"))
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton"))
case .speaker:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .bluetooth:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .switchCamera:
let patternImage = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white)
regularImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: patternImage, strokeColor: nil, fillColor: invertedFill, knockout: true)
}
self.textNode = ImmediateTextNode()
self.textNode.displaysAsynchronously = false
self.textNode.isUserInteractionEnabled = false
self.regularImage = regularImage
self.highlightedImage = highlightedImage
self.filledImage = filledImage
super.init(pointerStyle: nil)
super.init()
self.addSubnode(self.contentContainer)
self.contentContainer.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.addSubnode(self.backgroundNode)
self.addSubnode(self.textNode)
if let labelNode = self.labelNode {
self.addSubnode(labelNode)
}
self.backgroundNode.image = regularImage
self.currentImage = regularImage
self.contentContainer.view.addSubview(self.effectView)
self.contentContainer.addSubnode(self.contentNode)
self.contentContainer.addSubnode(self.overlayHighlightNode)
self.highligthedChanged = { [weak self] highlighted in
if let strongSelf = self {
strongSelf.internalHighlighted = highlighted
strongSelf.updateState(highlighted: highlighted, selected: strongSelf.isSelected)
guard let strongSelf = self else {
return
}
}
}
private var internalHighlighted = false
override var isSelected: Bool {
didSet {
self.updateState(highlighted: self.internalHighlighted, selected: self.isSelected)
}
}
private var currentImage: UIImage?
private func updateState(highlighted: Bool, selected: Bool) {
let image: UIImage?
if selected {
image = self.filledImage
} else if highlighted {
image = self.highlightedImage
} else {
image = self.regularImage
}
if self.currentImage !== image {
let currentContents = self.backgroundNode.layer.contents
self.backgroundNode.layer.removeAnimation(forKey: "contents")
if let currentContents = currentContents, let image = image {
self.backgroundNode.image = image
self.backgroundNode.layer.animate(from: currentContents as AnyObject, to: image.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: image === self.currentImage || image === self.filledImage ? 0.25 : 0.15)
if highlighted {
strongSelf.overlayHighlightNode.alpha = 1.0
} else {
self.backgroundNode.image = image
strongSelf.overlayHighlightNode.alpha = 0.0
strongSelf.overlayHighlightNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
}
self.currentImage = image
}
}
func updateType(_ type: CallControllerButtonType) {
if self.type == type {
return
}
self.type = type
var regularImage: UIImage?
var highlightedImage: UIImage?
var filledImage: UIImage?
func update(size: CGSize, content: Content, text: String, transition: ContainedViewLayoutTransition) {
let scaleFactor = size.width / self.largeButtonSize
switch type {
case .mute:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .accept:
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0)
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0)
case .end:
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton"))
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton"))
case .speaker:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .bluetooth:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .switchCamera:
let patternImage = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white)
regularImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: patternImage, strokeColor: nil, fillColor: invertedFill, knockout: true)
self.effectView.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.contentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.overlayHighlightNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
if self.currentContent != content {
self.currentContent = content
switch content.appearance {
case .blurred:
self.effectView.isHidden = false
case .color:
self.effectView.isHidden = true
}
let contentImage = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
var fillColor: UIColor = .clear
var drawOverMask = false
context.setBlendMode(.normal)
var imageScale: CGFloat = 1.0
switch content.appearance {
case let .blurred(isFilled):
if isFilled {
fillColor = .white
drawOverMask = true
context.setBlendMode(.copy)
}
let smallButtonSize: CGFloat = 60.0
imageScale = self.largeButtonSize / smallButtonSize
case let .color(color):
switch color {
case .red:
fillColor = UIColor(rgb: 0xd92326)
case .green:
fillColor = UIColor(rgb: 0x74db58)
}
}
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
var image: UIImage?
switch content.image {
case .camera:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallCameraButton"), color: .white)
case .mute:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallMuteButton"), color: .white)
case .flipCamera:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white)
case .bluetooth:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallBluetoothButton"), color: .white)
case .speaker:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSpeakerButton"), color: .white)
case .accept:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAcceptButton"), color: .white)
case .end:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallDeclineButton"), color: .white)
}
if let image = image {
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.scaleBy(x: imageScale, y: imageScale)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
let imageRect = CGRect(origin: CGPoint(x: floor((size.width - image.size.width) / 2.0), y: floor((size.height - image.size.height) / 2.0)), size: image.size)
if drawOverMask {
context.clip(to: imageRect, mask: image.cgImage!)
context.setBlendMode(.copy)
context.setFillColor(UIColor.clear.cgColor)
context.fill(CGRect(origin: CGPoint(), size: size))
} else {
context.draw(image.cgImage!, in: imageRect)
}
}
})
if transition.isAnimated, let contentImage = contentImage, let previousContent = self.contentNode.image {
self.contentNode.image = contentImage
self.contentNode.layer.animate(from: previousContent.cgImage!, to: contentImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2)
} else {
self.contentNode.image = contentImage
}
self.overlayHighlightNode.image = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
let fillColor: UIColor
context.setBlendMode(.normal)
switch content.appearance {
case let .blurred(isFilled):
if isFilled {
fillColor = UIColor(white: 0.0, alpha: 0.1)
} else {
fillColor = UIColor(white: 1.0, alpha: 0.2)
}
case let .color(color):
switch color {
case .red:
fillColor = UIColor(rgb: 0xd92326).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2)
case .green:
fillColor = UIColor(rgb: 0x74db58).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2)
}
}
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
})
}
self.regularImage = regularImage
self.highlightedImage = highlightedImage
self.filledImage = filledImage
transition.updatePosition(node: self.contentContainer, position: CGPoint(x: size.width / 2.0, y: size.height / 2.0))
transition.updateSublayerTransformScale(node: self.contentContainer, scale: scaleFactor)
self.updateState(highlighted: self.isHighlighted, selected: self.isSelected)
}
func animateRollTransition() {
self.backgroundNode.layer.animate(from: 0.0 as NSNumber, to: (-CGFloat.pi * 5 / 4) as NSNumber, keyPath: "transform.rotation.z", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3, removeOnCompletion: false)
self.labelNode?.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false)
}
override func layout() {
super.layout()
let size = self.bounds.size
self.backgroundNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: size.width))
if let labelNode = self.labelNode {
let labelSize = labelNode.measure(CGSize(width: 200.0, height: 100.0))
labelNode.frame = CGRect(origin: CGPoint(x: floor((size.width - labelSize.width) / 2.0), y: 81.0), size: labelSize)
if self.currentText != text {
self.textNode.attributedText = NSAttributedString(string: text, font: labelFont, textColor: .white)
}
let textSize = self.textNode.updateLayout(CGSize(width: 150.0, height: 100.0))
let textFrame = CGRect(origin: CGPoint(x: floor((size.width - textSize.width) / 2.0), y: size.height + 5.0), size: textSize)
if self.currentText.isEmpty {
self.textNode.frame = textFrame
if transition.isAnimated {
self.textNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
}
} else {
transition.updateFrameAdditiveToCenter(node: self.textNode, frame: textFrame)
}
self.currentText = text
}
}

View File

@@ -22,27 +22,65 @@ enum CallControllerButtonsMode: Equatable {
}
case active(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
case incoming
case incoming(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
case outgoingRinging(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
}
private enum ButtonDescription: Equatable {
enum Key: Hashable {
case accept
case end
case enableCamera
case switchCamera
case soundOutput
case mute
}
enum SoundOutput {
case speaker
case bluetooth
}
enum EndType {
case outgoing
case decline
case end
}
case accept
case end(EndType)
case enableCamera(Bool)
case switchCamera
case soundOutput(SoundOutput)
case mute(Bool)
var key: Key {
switch self {
case .accept:
return .accept
case .end:
return .end
case .enableCamera:
return .enableCamera
case .switchCamera:
return .switchCamera
case .soundOutput:
return .soundOutput
case .mute:
return .mute
}
}
}
final class CallControllerButtonsNode: ASDisplayNode {
private let acceptButton: CallControllerButtonNode
private let declineButton: CallControllerButtonNode
private let muteButton: CallControllerButtonNode
private let endButton: CallControllerButtonNode
private let speakerButton: CallControllerButtonNode
private let swichCameraButton: CallControllerButtonNode
private var buttonNodes: [ButtonDescription.Key: CallControllerButtonItemNode] = [:]
private var mode: CallControllerButtonsMode?
private var validLayout: CGFloat?
var isMuted = false {
didSet {
self.muteButton.isSelected = self.isMuted
}
}
var isMuted = false
var isCameraPaused = false
var accept: (() -> Void)?
var mute: (() -> Void)?
@@ -52,57 +90,30 @@ final class CallControllerButtonsNode: ASDisplayNode {
var rotateCamera: (() -> Void)?
init(strings: PresentationStrings) {
self.acceptButton = CallControllerButtonNode(type: .accept, label: strings.Call_Accept)
self.acceptButton.alpha = 0.0
self.declineButton = CallControllerButtonNode(type: .end, label: strings.Call_Decline)
self.declineButton.alpha = 0.0
self.muteButton = CallControllerButtonNode(type: .mute, label: nil)
self.muteButton.alpha = 0.0
self.endButton = CallControllerButtonNode(type: .end, label: nil)
self.endButton.alpha = 0.0
self.speakerButton = CallControllerButtonNode(type: .speaker, label: nil)
self.speakerButton.alpha = 0.0
self.swichCameraButton = CallControllerButtonNode(type: .switchCamera, label: nil)
self.swichCameraButton.alpha = 0.0
super.init()
self.addSubnode(self.acceptButton)
self.addSubnode(self.declineButton)
self.addSubnode(self.muteButton)
self.addSubnode(self.endButton)
self.addSubnode(self.speakerButton)
self.addSubnode(self.swichCameraButton)
self.acceptButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.declineButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.muteButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.endButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.speakerButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.swichCameraButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
}
func updateLayout(constrainedWidth: CGFloat, transition: ContainedViewLayoutTransition) {
let previousLayout = self.validLayout
func updateLayout(strings: PresentationStrings, constrainedWidth: CGFloat, transition: ContainedViewLayoutTransition) {
self.validLayout = constrainedWidth
if let mode = self.mode, previousLayout != self.validLayout {
self.updateButtonsLayout(mode: mode, width: constrainedWidth, animated: false)
if let mode = self.mode {
self.updateButtonsLayout(strings: strings, mode: mode, width: constrainedWidth, animated: transition.isAnimated)
}
}
func updateMode(_ mode: CallControllerButtonsMode) {
func updateMode(strings: PresentationStrings, mode: CallControllerButtonsMode) {
if self.mode != mode {
let previousMode = self.mode
self.mode = mode
if let validLayout = self.validLayout {
self.updateButtonsLayout(mode: mode, width: validLayout, animated: previousMode != nil)
self.updateButtonsLayout(strings: strings, mode: mode, width: validLayout, animated: previousMode != nil)
}
}
}
private func updateButtonsLayout(mode: CallControllerButtonsMode, width: CGFloat, animated: Bool) {
private var appliedMode: CallControllerButtonsMode?
private func updateButtonsLayout(strings: PresentationStrings, mode: CallControllerButtonsMode, width: CGFloat, animated: Bool) {
let transition: ContainedViewLayoutTransition
if animated {
transition = .animated(duration: 0.3, curve: .spring)
@@ -110,147 +121,273 @@ final class CallControllerButtonsNode: ASDisplayNode {
transition = .immediate
}
let threeButtonSpacing: CGFloat = 28.0
let twoButtonSpacing: CGFloat = 105.0
let buttonSize = CGSize(width: 75.0, height: 75.0)
let threeButtonsWidth = 3.0 * buttonSize.width + 2.0 * threeButtonSpacing
let twoButtonsWidth = 2.0 * buttonSize.width + 1.0 * twoButtonSpacing
let previousMode = self.appliedMode
self.appliedMode = mode
var origin = CGPoint(x: floor((width - threeButtonsWidth) / 2.0), y: 0.0)
var animatePositionsWithDelay = false
if let previousMode = previousMode {
switch previousMode {
case .incoming, .outgoingRinging:
if case .active = mode {
animatePositionsWithDelay = true
}
default:
break
}
}
for button in [self.muteButton, self.endButton, self.speakerButton] {
transition.updateFrame(node: button, frame: CGRect(origin: origin, size: buttonSize))
if button === self.speakerButton {
transition.updateFrame(node: self.swichCameraButton, frame: CGRect(origin: origin, size: buttonSize))
let minSmallButtonSideInset: CGFloat = 34.0
let maxSmallButtonSpacing: CGFloat = 34.0
let smallButtonSize: CGFloat = 60.0
let topBottomSpacing: CGFloat = 84.0
let maxLargeButtonSpacing: CGFloat = 115.0
let largeButtonSize: CGFloat = 72.0
let minLargeButtonSideInset: CGFloat = minSmallButtonSideInset - 6.0
struct PlacedButton {
let button: ButtonDescription
let frame: CGRect
}
var buttons: [PlacedButton] = []
switch mode {
case .incoming(let speakerMode, let videoState), .outgoingRinging(let speakerMode, let videoState):
var topButtons: [ButtonDescription] = []
var bottomButtons: [ButtonDescription] = []
let soundOutput: ButtonDescription.SoundOutput
switch speakerMode {
case .none, .builtin, .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .bluetooth
case .bluetooth:
soundOutput = .bluetooth
}
origin.x += buttonSize.width + threeButtonSpacing
switch videoState {
case .active, .available:
topButtons.append(.enableCamera(!self.isCameraPaused))
topButtons.append(.mute(self.isMuted))
topButtons.append(.switchCamera)
case .notAvailable:
topButtons.append(.enableCamera(!self.isCameraPaused))
topButtons.append(.mute(self.isMuted))
topButtons.append(.soundOutput(soundOutput))
}
let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize
let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0
let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1))
let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing
var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0)
for button in topButtons {
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: 0.0), size: CGSize(width: smallButtonSize, height: smallButtonSize))))
topButtonsLeftOffset += smallButtonSize + topButtonsSpacing
}
if case .incoming = mode {
bottomButtons.append(.end(.decline))
bottomButtons.append(.accept)
} else {
bottomButtons.append(.end(.outgoing))
}
let bottomButtonsContentWidth = CGFloat(bottomButtons.count) * largeButtonSize
let bottomButtonsAvailableSpacingWidth = width - bottomButtonsContentWidth - minLargeButtonSideInset * 2.0
let bottomButtonsSpacing = min(maxLargeButtonSpacing, bottomButtonsAvailableSpacingWidth / CGFloat(bottomButtons.count - 1))
let bottomButtonsWidth = CGFloat(bottomButtons.count) * largeButtonSize + CGFloat(bottomButtons.count - 1) * bottomButtonsSpacing
var bottomButtonsLeftOffset = floor((width - bottomButtonsWidth) / 2.0)
for button in bottomButtons {
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: bottomButtonsLeftOffset, y: smallButtonSize + topBottomSpacing), size: CGSize(width: largeButtonSize, height: largeButtonSize))))
bottomButtonsLeftOffset += largeButtonSize + bottomButtonsSpacing
}
case let .active(speakerMode, videoState):
var topButtons: [ButtonDescription] = []
let soundOutput: ButtonDescription.SoundOutput
switch speakerMode {
case .none, .builtin, .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .bluetooth
case .bluetooth:
soundOutput = .bluetooth
}
switch videoState {
case .active, .available:
topButtons.append(.enableCamera(!self.isCameraPaused))
topButtons.append(.mute(isMuted))
topButtons.append(.switchCamera)
case .notAvailable:
topButtons.append(.enableCamera(!self.isCameraPaused))
topButtons.append(.mute(isMuted))
topButtons.append(.soundOutput(soundOutput))
}
topButtons.append(.end(.end))
let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize
let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0
let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1))
let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing
var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0)
for button in topButtons {
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: smallButtonSize + topBottomSpacing), size: CGSize(width: smallButtonSize, height: smallButtonSize))))
topButtonsLeftOffset += smallButtonSize + topButtonsSpacing
}
}
origin = CGPoint(x: floor((width - twoButtonsWidth) / 2.0), y: 0.0)
for button in [self.declineButton, self.acceptButton] {
transition.updateFrame(node: button, frame: CGRect(origin: origin, size: buttonSize))
origin.x += buttonSize.width + twoButtonSpacing
let delayIncrement = 0.015
var validKeys: [ButtonDescription.Key] = []
for button in buttons {
validKeys.append(button.button.key)
var buttonTransition = transition
var animateButtonIn = false
let buttonNode: CallControllerButtonItemNode
if let current = self.buttonNodes[button.button.key] {
buttonNode = current
} else {
buttonNode = CallControllerButtonItemNode()
self.buttonNodes[button.button.key] = buttonNode
self.addSubnode(buttonNode)
buttonNode.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
buttonTransition = .immediate
animateButtonIn = transition.isAnimated
}
let buttonContent: CallControllerButtonItemNode.Content
let buttonText: String
switch button.button {
case .accept:
buttonContent = CallControllerButtonItemNode.Content(
appearance: .color(.green),
image: .accept
)
buttonText = strings.Call_Accept
case let .end(type):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .color(.red),
image: .end
)
switch type {
case .outgoing:
buttonText = ""
case .decline:
buttonText = strings.Call_Decline
case .end:
buttonText = strings.Call_End
}
case let .enableCamera(isEnabled):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: isEnabled),
image: .camera
)
buttonText = strings.Call_Camera
case .switchCamera:
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: false),
image: .flipCamera
)
buttonText = strings.Call_Flip
case let .soundOutput(value):
let image: CallControllerButtonItemNode.Content.Image
switch value {
case .speaker:
image = .speaker
case .bluetooth:
image = .bluetooth
}
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: false),
image: image
)
buttonText = strings.Call_Speaker
case let .mute(isMuted):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: isMuted),
image: .mute
)
buttonText = strings.Call_Mute
}
var buttonDelay = 0.0
if animatePositionsWithDelay {
switch button.button.key {
case .enableCamera:
buttonDelay = 0.0
case .mute:
buttonDelay = delayIncrement * 1.0
case .switchCamera:
buttonDelay = delayIncrement * 2.0
case .end:
buttonDelay = delayIncrement * 3.0
default:
break
}
}
buttonTransition.updateFrame(node: buttonNode, frame: button.frame, delay: buttonDelay)
buttonNode.update(size: button.frame.size, content: buttonContent, text: buttonText, transition: buttonTransition)
if animateButtonIn {
buttonNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
}
switch mode {
case .incoming:
for button in [self.declineButton, self.acceptButton] {
button.alpha = 1.0
}
for button in [self.muteButton, self.endButton, self.speakerButton, self.swichCameraButton] {
button.alpha = 0.0
}
case let .active(speakerMode, videoState):
for button in [self.muteButton] {
if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
button.alpha = 1.0
}
switch videoState {
case .active, .available:
for button in [self.speakerButton] {
if animated && !button.alpha.isZero {
button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
}
button.alpha = 0.0
}
for button in [self.swichCameraButton] {
if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
button.alpha = 1.0
}
case .notAvailable:
for button in [self.swichCameraButton] {
if animated && !button.alpha.isZero {
button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
}
button.alpha = 0.0
}
for button in [self.speakerButton] {
if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
button.alpha = 1.0
}
}
var animatingAcceptButton = false
if self.endButton.alpha.isZero {
if animated {
if !self.acceptButton.alpha.isZero {
animatingAcceptButton = true
self.endButton.layer.animatePosition(from: self.acceptButton.position, to: self.endButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
self.acceptButton.animateRollTransition()
self.endButton.layer.animate(from: (CGFloat.pi * 5 / 4) as NSNumber, to: 0.0 as NSNumber, keyPath: "transform.rotation.z", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3)
self.acceptButton.layer.animatePosition(from: self.acceptButton.position, to: self.endButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.acceptButton.alpha = 0.0
strongSelf.acceptButton.layer.removeAnimation(forKey: "position")
strongSelf.acceptButton.layer.removeAnimation(forKey: "transform.rotation.z")
}
var removedKeys: [ButtonDescription.Key] = []
for (key, button) in self.buttonNodes {
if !validKeys.contains(key) {
removedKeys.append(key)
if animated {
if case .accept = key {
if let endButton = self.buttonNodes[.end] {
transition.updateFrame(node: button, frame: endButton.frame)
if let content = button.currentContent {
button.update(size: endButton.frame.size, content: content, text: button.currentText, transition: transition)
}
transition.updateTransformScale(node: button, scale: 0.1)
transition.updateAlpha(node: button, alpha: 0.0, completion: { [weak button] _ in
button?.removeFromSupernode()
})
}
self.endButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
} else {
transition.updateAlpha(node: button, alpha: 0.0, completion: { [weak button] _ in
button?.removeFromSupernode()
})
}
self.endButton.alpha = 1.0
} else {
button.removeFromSupernode()
}
if !self.declineButton.alpha.isZero {
if animated {
self.declineButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
}
self.declineButton.alpha = 0.0
}
if self.acceptButton.alpha.isZero && !animatingAcceptButton {
self.acceptButton.alpha = 0.0
}
self.speakerButton.isSelected = speakerMode == .speaker
self.speakerButton.isHidden = speakerMode == .none
let speakerButtonType: CallControllerButtonType
switch speakerMode {
case .none, .builtin, .speaker:
speakerButtonType = .speaker
case .headphones:
speakerButtonType = .bluetooth
case .bluetooth:
speakerButtonType = .bluetooth
}
self.speakerButton.updateType(speakerButtonType)
}
}
for key in removedKeys {
self.buttonNodes.removeValue(forKey: key)
}
}
@objc func buttonPressed(_ button: CallControllerButtonNode) {
if button === self.muteButton {
self.mute?()
} else if button === self.endButton || button === self.declineButton {
self.end?()
} else if button === self.speakerButton {
self.speaker?()
} else if button === self.acceptButton {
self.accept?()
} else if button === self.swichCameraButton {
self.rotateCamera?()
@objc func buttonPressed(_ button: CallControllerButtonItemNode) {
for (key, listButton) in self.buttonNodes {
if button === listButton {
switch key {
case .accept:
self.accept?()
case .end:
self.end?()
case .enableCamera:
self.toggleVideo?()
case .switchCamera:
self.rotateCamera?()
case .soundOutput:
self.speaker?()
case .mute:
self.mute?()
}
break
}
}
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
let buttons = [
self.acceptButton,
self.declineButton,
self.muteButton,
self.endButton,
self.speakerButton,
self.swichCameraButton
]
for button in buttons {
if button.isHidden || button.alpha.isZero {
continue
}
for (_, button) in self.buttonNodes {
if let result = button.view.hitTest(self.view.convert(point, to: button.view), with: event) {
return result
}

View File

@@ -56,34 +56,91 @@ private final class IncomingVideoNode: ASDisplayNode {
}
private final class OutgoingVideoNode: ASDisplayNode {
private let videoTransformContainer: ASDisplayNode
private let videoView: UIView
private let switchCameraButton: HighlightableButtonNode
private let switchCamera: () -> Void
private let buttonNode: HighlightTrackingButtonNode
init(videoView: UIView, switchCamera: @escaping () -> Void) {
private var effectView: UIVisualEffectView?
private var isBlurred: Bool = false
private var isExpanded: Bool = false
var tapped: (() -> Void)?
init(videoView: UIView) {
self.videoTransformContainer = ASDisplayNode()
self.videoTransformContainer.clipsToBounds = true
self.videoView = videoView
self.switchCameraButton = HighlightableButtonNode()
self.switchCamera = switchCamera
self.videoView.layer.transform = CATransform3DMakeScale(-1.0, 1.0, 1.0)
self.buttonNode = HighlightTrackingButtonNode()
super.init()
self.view.addSubview(self.videoView)
self.addSubnode(self.switchCameraButton)
self.switchCameraButton.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside)
self.videoTransformContainer.view.addSubview(self.videoView)
self.addSubnode(self.videoTransformContainer)
//self.addSubnode(self.buttonNode)
self.buttonNode.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside)
}
@objc private func buttonPressed() {
self.switchCamera()
@objc func buttonPressed() {
self.tapped?()
}
func updateLayout(size: CGSize, isExpanded: Bool, transition: ContainedViewLayoutTransition) {
transition.updateFrame(view: self.videoView, frame: CGRect(origin: CGPoint(), size: size))
transition.updateCornerRadius(layer: self.videoView.layer, cornerRadius: isExpanded ? 0.0 : 16.0)
self.switchCameraButton.frame = CGRect(origin: CGPoint(), size: size)
let videoFrame = CGRect(origin: CGPoint(), size: size)
self.buttonNode.frame = videoFrame
self.isExpanded = isExpanded
let previousVideoFrame = self.videoTransformContainer.frame
self.videoTransformContainer.frame = videoFrame
if transition.isAnimated && !videoFrame.height.isZero && !previousVideoFrame.height.isZero {
transition.animatePositionAdditive(node: self.videoTransformContainer, offset: CGPoint(x: previousVideoFrame.midX - videoFrame.midX, y: previousVideoFrame.midY - videoFrame.midY))
transition.animateTransformScale(node: self.videoTransformContainer, from: previousVideoFrame.height / videoFrame.height)
}
self.videoView.frame = videoFrame
transition.updateCornerRadius(layer: self.videoTransformContainer.layer, cornerRadius: isExpanded ? 0.0 : 16.0)
if let effectView = self.effectView {
transition.updateCornerRadius(layer: effectView.layer, cornerRadius: isExpanded ? 0.0 : 16.0)
}
}
func updateIsBlurred(isBlurred: Bool) {
if self.isBlurred == isBlurred {
return
}
self.isBlurred = isBlurred
if isBlurred {
if self.effectView == nil {
let effectView = UIVisualEffectView()
effectView.clipsToBounds = true
effectView.layer.cornerRadius = self.isExpanded ? 0.0 : 16.0
self.effectView = effectView
effectView.frame = self.videoView.frame
self.view.addSubview(effectView)
}
UIView.animate(withDuration: 0.3, animations: {
self.effectView?.effect = UIBlurEffect(style: .dark)
})
} else if let effectView = self.effectView {
UIView.animate(withDuration: 0.3, animations: {
effectView.effect = nil
})
}
}
}
final class CallControllerNode: ASDisplayNode {
private enum VideoNodeCorner {
case topLeft
case topRight
case bottomLeft
case bottomRight
}
private let sharedContext: SharedAccountContext
private let account: Account
@@ -104,6 +161,8 @@ final class CallControllerNode: ASDisplayNode {
private var incomingVideoViewRequested: Bool = false
private var outgoingVideoNode: OutgoingVideoNode?
private var outgoingVideoViewRequested: Bool = false
private var outgoingVideoExplicitelyFullscreen: Bool = false
private var outgoingVideoNodeCorner: VideoNodeCorner = .bottomRight
private let backButtonArrowNode: ASImageNode
private let backButtonNode: HighlightableButtonNode
private let statusNode: CallControllerStatusNode
@@ -121,6 +180,9 @@ final class CallControllerNode: ASDisplayNode {
var isMuted: Bool = false {
didSet {
self.buttonsNode.isMuted = self.isMuted
if let (layout, navigationBarHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
}
}
@@ -134,12 +196,15 @@ final class CallControllerNode: ASDisplayNode {
var beginAudioOuputSelection: (() -> Void)?
var acceptCall: (() -> Void)?
var endCall: (() -> Void)?
var toggleVideo: (() -> Void)?
var setIsVideoPaused: ((Bool) -> Void)?
var back: (() -> Void)?
var presentCallRating: ((CallId) -> Void)?
var callEnded: ((Bool) -> Void)?
var dismissedInteractively: (() -> Void)?
private var isUIHidden: Bool = false
private var isVideoPaused: Bool = false
init(sharedContext: SharedAccountContext, account: Account, presentationData: PresentationData, statusBar: StatusBar, debugInfo: Signal<(String, String), NoError>, shouldStayHiddenUntilConnection: Bool = false, easyDebugAccess: Bool, call: PresentationCall) {
self.sharedContext = sharedContext
self.account = account
@@ -229,7 +294,17 @@ final class CallControllerNode: ASDisplayNode {
}
self.buttonsNode.toggleVideo = { [weak self] in
self?.toggleVideo?()
guard let strongSelf = self else {
return
}
strongSelf.isVideoPaused = !strongSelf.isVideoPaused
strongSelf.outgoingVideoNode?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused)
strongSelf.buttonsNode.isCameraPaused = strongSelf.isVideoPaused
strongSelf.setIsVideoPaused?(strongSelf.isVideoPaused)
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
}
self.buttonsNode.rotateCamera = { [weak self] in
@@ -302,17 +377,21 @@ final class CallControllerNode: ASDisplayNode {
return
}
if let incomingVideoView = incomingVideoView {
strongSelf.setCurrentAudioOutput?(.speaker)
let incomingVideoNode = IncomingVideoNode(videoView: incomingVideoView)
strongSelf.incomingVideoNode = incomingVideoNode
strongSelf.containerNode.insertSubnode(incomingVideoNode, aboveSubnode: strongSelf.dimNode)
strongSelf.statusNode.isHidden = true
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring))
}
}
})
}
default:
break
}
switch callState.videoState {
case .active, .activeOutgoing:
if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
@@ -322,13 +401,15 @@ final class CallControllerNode: ASDisplayNode {
if let outgoingVideoView = outgoingVideoView {
outgoingVideoView.backgroundColor = .black
outgoingVideoView.clipsToBounds = true
strongSelf.setCurrentAudioOutput?(.speaker)
let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: {
guard let strongSelf = self else {
return
if let audioOutputState = strongSelf.audioOutputState, let currentOutput = audioOutputState.currentOutput {
switch currentOutput {
case .speaker, .builtin:
break
default:
strongSelf.setCurrentAudioOutput?(.speaker)
}
strongSelf.call.switchVideoCamera()
})
}
let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView)
strongSelf.outgoingVideoNode = outgoingVideoNode
if let incomingVideoNode = strongSelf.incomingVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode)
@@ -336,38 +417,17 @@ final class CallControllerNode: ASDisplayNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
}
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring))
}
}
})
}
case .activeOutgoing:
if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
guard let strongSelf = self else {
return
}
if let outgoingVideoView = outgoingVideoView {
outgoingVideoView.backgroundColor = .black
outgoingVideoView.clipsToBounds = true
outgoingVideoView.layer.cornerRadius = 16.0
strongSelf.setCurrentAudioOutput?(.speaker)
let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: {
/*outgoingVideoNode.tapped = {
guard let strongSelf = self else {
return
}
strongSelf.call.switchVideoCamera()
})
strongSelf.outgoingVideoNode = outgoingVideoNode
if let incomingVideoNode = strongSelf.incomingVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode)
} else {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
}
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
strongSelf.outgoingVideoExplicitelyFullscreen = !strongSelf.outgoingVideoExplicitelyFullscreen
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring))
}
}*/
}
})
}
@@ -438,7 +498,7 @@ final class CallControllerNode: ASDisplayNode {
if isReconnecting {
return strings.Call_StatusConnecting
} else {
return strings.Call_StatusOngoing(value).0
return value
}
}, timestamp)
if self.keyTextData?.0 != keyVisualHash {
@@ -501,43 +561,60 @@ final class CallControllerNode: ASDisplayNode {
}
}
private var buttonsTerminationMode: CallControllerButtonsMode?
private func updateButtonsMode() {
guard let callState = self.callState else {
return
}
var mode: CallControllerButtonsSpeakerMode = .none
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
switch currentOutput {
case .builtin:
mode = .builtin
case .speaker:
mode = .speaker
case .headphones:
mode = .headphones
case .port:
mode = .bluetooth
}
if availableOutputs.count <= 1 {
mode = .none
}
}
let mappedVideoState: CallControllerButtonsMode.VideoState
switch callState.videoState {
case .notAvailable:
mappedVideoState = .notAvailable
case .available:
mappedVideoState = .available(true)
case .active:
mappedVideoState = .active
case .activeOutgoing:
mappedVideoState = .active
}
switch callState.state {
case .ringing:
self.buttonsNode.updateMode(.incoming)
default:
var mode: CallControllerButtonsSpeakerMode = .none
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
switch currentOutput {
case .builtin:
mode = .builtin
case .speaker:
mode = .speaker
case .headphones:
mode = .headphones
case .port:
mode = .bluetooth
}
if availableOutputs.count <= 1 {
mode = .none
}
}
let mappedVideoState: CallControllerButtonsMode.VideoState
switch callState.videoState {
case .notAvailable:
mappedVideoState = .notAvailable
case .available:
mappedVideoState = .available(true)
case .active:
mappedVideoState = .active
case .activeOutgoing:
mappedVideoState = .active
}
self.buttonsNode.updateMode(.active(speakerMode: mode, videoState: mappedVideoState))
case .ringing:
let buttonsMode: CallControllerButtonsMode = .incoming(speakerMode: mode, videoState: mappedVideoState)
self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode)
self.buttonsTerminationMode = buttonsMode
case .waiting, .requesting:
let buttonsMode: CallControllerButtonsMode = .outgoingRinging(speakerMode: mode, videoState: mappedVideoState)
self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode)
self.buttonsTerminationMode = buttonsMode
case .active, .connecting, .reconnecting:
let buttonsMode: CallControllerButtonsMode = .active(speakerMode: mode, videoState: mappedVideoState)
self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode)
self.buttonsTerminationMode = buttonsMode
case .terminating, .terminated:
if let buttonsTerminationMode = self.buttonsTerminationMode {
self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsTerminationMode)
} else {
self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: .active(speakerMode: mode, videoState: mappedVideoState))
}
}
}
@@ -568,9 +645,69 @@ final class CallControllerNode: ASDisplayNode {
}
}
private func calculatePreviewVideoRect(layout: ContainerViewLayout, navigationHeight: CGFloat) -> CGRect {
let buttonsHeight: CGFloat = 190.0
let buttonsOffset: CGFloat
if layout.size.width.isEqual(to: 320.0) {
if layout.size.height.isEqual(to: 480.0) {
buttonsOffset = 60.0
} else {
buttonsOffset = 73.0
}
} else {
buttonsOffset = 83.0
}
let buttonsOriginY: CGFloat
if self.isUIHidden {
buttonsOriginY = layout.size.height + 40.0 - 80.0
} else {
buttonsOriginY = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom
}
let previewVideoSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0))
let previewVideoY: CGFloat
let previewVideoX: CGFloat
switch self.outgoingVideoNodeCorner {
case .topLeft:
previewVideoX = 20.0
if self.isUIHidden {
previewVideoY = layout.insets(options: .statusBar).top + 8.0
} else {
previewVideoY = layout.insets(options: .statusBar).top + 44.0 + 8.0
}
case .topRight:
previewVideoX = layout.size.width - previewVideoSize.width - 20.0
if self.isUIHidden {
previewVideoY = layout.insets(options: .statusBar).top + 8.0
} else {
previewVideoY = layout.insets(options: .statusBar).top + 44.0 + 8.0
}
case .bottomLeft:
previewVideoX = 20.0
if self.isUIHidden {
previewVideoY = layout.size.height - layout.intrinsicInsets.bottom - 8.0 - previewVideoSize.height
} else {
previewVideoY = buttonsOriginY + 100.0 - previewVideoSize.height
}
case .bottomRight:
previewVideoX = layout.size.width - previewVideoSize.width - 20.0
if self.isUIHidden {
previewVideoY = layout.size.height - layout.intrinsicInsets.bottom - 8.0 - previewVideoSize.height
} else {
previewVideoY = buttonsOriginY + 100.0 - previewVideoSize.height
}
}
return CGRect(origin: CGPoint(x: previewVideoX, y: previewVideoY), size: previewVideoSize)
}
func containerLayoutUpdated(_ layout: ContainerViewLayout, navigationBarHeight: CGFloat, transition: ContainedViewLayoutTransition) {
self.validLayout = (layout, navigationBarHeight)
let overlayAlpha: CGFloat = self.isUIHidden ? 0.0 : 1.0
transition.updateFrame(node: self.containerNode, frame: CGRect(origin: CGPoint(), size: layout.size))
transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size))
@@ -592,6 +729,9 @@ final class CallControllerNode: ASDisplayNode {
}
transition.updateFrame(node: self.backButtonNode, frame: CGRect(origin: CGPoint(x: 29.0, y: navigationOffset + 11.0), size: backSize))
transition.updateAlpha(node: self.backButtonArrowNode, alpha: overlayAlpha)
transition.updateAlpha(node: self.backButtonNode, alpha: overlayAlpha)
var statusOffset: CGFloat
if layout.metrics.widthClass == .regular && layout.metrics.heightClass == .regular {
if layout.size.height.isEqual(to: 1366.0) {
@@ -611,7 +751,7 @@ final class CallControllerNode: ASDisplayNode {
statusOffset += layout.safeInsets.top
let buttonsHeight: CGFloat = 75.0
let buttonsHeight: CGFloat = 190.0
let buttonsOffset: CGFloat
if layout.size.width.isEqual(to: 320.0) {
if layout.size.height.isEqual(to: 480.0) {
@@ -625,36 +765,60 @@ final class CallControllerNode: ASDisplayNode {
let statusHeight = self.statusNode.updateLayout(constrainedWidth: layout.size.width, transition: transition)
transition.updateFrame(node: self.statusNode, frame: CGRect(origin: CGPoint(x: 0.0, y: statusOffset), size: CGSize(width: layout.size.width, height: statusHeight)))
transition.updateAlpha(node: self.statusNode, alpha: overlayAlpha)
let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0))
transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize))
self.buttonsNode.updateLayout(constrainedWidth: layout.size.width, transition: transition)
let buttonsOriginY: CGFloat = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom
self.buttonsNode.updateLayout(strings: self.presentationData.strings, constrainedWidth: layout.size.width, transition: transition)
let buttonsOriginY: CGFloat
if self.isUIHidden {
buttonsOriginY = layout.size.height + 40.0 - 80.0
} else {
buttonsOriginY = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom
}
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight)))
transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha)
let fullscreenVideoFrame = CGRect(origin: CGPoint(), size: layout.size)
let previewVideoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationBarHeight)
var outgoingVideoTransition = transition
if let incomingVideoNode = self.incomingVideoNode {
if incomingVideoNode.frame.width.isZero, let outgoingVideoNode = self.outgoingVideoNode, !outgoingVideoNode.frame.width.isZero, !transition.isAnimated {
outgoingVideoTransition = .animated(duration: 0.3, curve: .easeInOut)
var incomingVideoTransition = transition
if incomingVideoNode.frame.isEmpty {
incomingVideoTransition = .immediate
}
incomingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size)
incomingVideoNode.updateLayout(size: layout.size)
if self.outgoingVideoExplicitelyFullscreen {
incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: previewVideoFrame)
} else {
incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: fullscreenVideoFrame)
}
incomingVideoNode.updateLayout(size: incomingVideoNode.frame.size)
}
if let outgoingVideoNode = self.outgoingVideoNode {
var outgoingVideoTransition = transition
if outgoingVideoNode.frame.isEmpty {
outgoingVideoTransition = .immediate
}
if self.incomingVideoNode == nil {
outgoingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size)
outgoingVideoNode.updateLayout(size: layout.size, isExpanded: true, transition: transition)
outgoingVideoNode.frame = fullscreenVideoFrame
outgoingVideoNode.updateLayout(size: layout.size, isExpanded: true, transition: outgoingVideoTransition)
} else {
let outgoingSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0))
let outgoingFrame = CGRect(origin: CGPoint(x: layout.size.width - 16.0 - outgoingSize.width, y: buttonsOriginY - 32.0 - outgoingSize.height), size: outgoingSize)
outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: outgoingFrame)
outgoingVideoNode.updateLayout(size: outgoingFrame.size, isExpanded: false, transition: outgoingVideoTransition)
if self.minimizedVideoDraggingPosition == nil {
if self.outgoingVideoExplicitelyFullscreen {
outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: fullscreenVideoFrame)
} else {
outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: previewVideoFrame)
}
outgoingVideoNode.updateLayout(size: outgoingVideoNode.frame.size, isExpanded: self.outgoingVideoExplicitelyFullscreen, transition: outgoingVideoTransition)
}
}
}
let keyTextSize = self.keyButtonNode.frame.size
transition.updateFrame(node: self.keyButtonNode, frame: CGRect(origin: CGPoint(x: layout.size.width - keyTextSize.width - 8.0, y: navigationOffset + 8.0), size: keyTextSize))
transition.updateAlpha(node: self.keyButtonNode, alpha: overlayAlpha)
if let debugNode = self.debugNode {
transition.updateFrame(node: debugNode, frame: CGRect(origin: CGPoint(), size: layout.size))
@@ -700,26 +864,33 @@ final class CallControllerNode: ASDisplayNode {
if let _ = self.keyPreviewNode {
self.backPressed()
} else {
let point = recognizer.location(in: recognizer.view)
if self.statusNode.frame.contains(point) {
if self.easyDebugAccess {
self.presentDebugNode()
} else {
let timestamp = CACurrentMediaTime()
if self.debugTapCounter.0 < timestamp - 0.75 {
self.debugTapCounter.0 = timestamp
self.debugTapCounter.1 = 0
}
if self.debugTapCounter.0 >= timestamp - 0.75 {
self.debugTapCounter.0 = timestamp
self.debugTapCounter.1 += 1
}
if self.debugTapCounter.1 >= 10 {
self.debugTapCounter.1 = 0
if self.incomingVideoNode != nil || self.outgoingVideoNode != nil {
self.isUIHidden = !self.isUIHidden
if let (layout, navigationBarHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
} else {
let point = recognizer.location(in: recognizer.view)
if self.statusNode.frame.contains(point) {
if self.easyDebugAccess {
self.presentDebugNode()
} else {
let timestamp = CACurrentMediaTime()
if self.debugTapCounter.0 < timestamp - 0.75 {
self.debugTapCounter.0 = timestamp
self.debugTapCounter.1 = 0
}
if self.debugTapCounter.0 >= timestamp - 0.75 {
self.debugTapCounter.0 = timestamp
self.debugTapCounter.1 += 1
}
if self.debugTapCounter.1 >= 10 {
self.debugTapCounter.1 = 0
self.presentDebugNode()
}
}
}
}
@@ -749,36 +920,170 @@ final class CallControllerNode: ASDisplayNode {
}
}
@objc func panGesture(_ recognizer: UIPanGestureRecognizer) {
switch recognizer.state {
case .changed:
let offset = recognizer.translation(in: self.view).y
var bounds = self.bounds
bounds.origin.y = -offset
self.bounds = bounds
case .ended:
let velocity = recognizer.velocity(in: self.view).y
if abs(velocity) < 100.0 {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
private var minimizedVideoInitialPosition: CGPoint?
private var minimizedVideoDraggingPosition: CGPoint?
private func nodeLocationForPosition(layout: ContainerViewLayout, position: CGPoint, velocity: CGPoint) -> VideoNodeCorner {
let layoutInsets = UIEdgeInsets()
var result = CGPoint()
if position.x < layout.size.width / 2.0 {
result.x = 0.0
} else {
result.x = 1.0
}
if position.y < layoutInsets.top + (layout.size.height - layoutInsets.bottom - layoutInsets.top) / 2.0 {
result.y = 0.0
} else {
result.y = 1.0
}
let currentPosition = result
let angleEpsilon: CGFloat = 30.0
var shouldHide = false
if (velocity.x * velocity.x + velocity.y * velocity.y) >= 500.0 * 500.0 {
let x = velocity.x
let y = velocity.y
var angle = atan2(y, x) * 180.0 / CGFloat.pi * -1.0
if angle < 0.0 {
angle += 360.0
}
if currentPosition.x.isZero && currentPosition.y.isZero {
if ((angle > 0 && angle < 90 - angleEpsilon) || angle > 360 - angleEpsilon) {
result.x = 1.0
result.y = 0.0
} else if (angle > 180 + angleEpsilon && angle < 270 + angleEpsilon) {
result.x = 0.0
result.y = 1.0
} else if (angle > 270 + angleEpsilon && angle < 360 - angleEpsilon) {
result.x = 1.0
result.y = 1.0
} else {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint(x: 0.0, y: velocity > 0.0 ? -bounds.height: bounds.height)
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, completion: { [weak self] _ in
self?.dismissedInteractively?()
})
shouldHide = true
}
} else if !currentPosition.x.isZero && currentPosition.y.isZero {
if (angle > 90 + angleEpsilon && angle < 180 + angleEpsilon) {
result.x = 0.0
result.y = 0.0
}
else if (angle > 270 - angleEpsilon && angle < 360 - angleEpsilon) {
result.x = 1.0
result.y = 1.0
}
else if (angle > 180 + angleEpsilon && angle < 270 - angleEpsilon) {
result.x = 0.0
result.y = 1.0
}
else {
shouldHide = true
}
} else if currentPosition.x.isZero && !currentPosition.y.isZero {
if (angle > 90 - angleEpsilon && angle < 180 - angleEpsilon) {
result.x = 0.0
result.y = 0.0
}
else if (angle < angleEpsilon || angle > 270 + angleEpsilon) {
result.x = 1.0
result.y = 1.0
}
else if (angle > angleEpsilon && angle < 90 - angleEpsilon) {
result.x = 1.0
result.y = 0.0
}
else if (!shouldHide) {
shouldHide = true
}
} else if !currentPosition.x.isZero && !currentPosition.y.isZero {
if (angle > angleEpsilon && angle < 90 + angleEpsilon) {
result.x = 1.0
result.y = 0.0
}
else if (angle > 180 - angleEpsilon && angle < 270 - angleEpsilon) {
result.x = 0.0
result.y = 1.0
}
else if (angle > 90 + angleEpsilon && angle < 180 - angleEpsilon) {
result.x = 0.0
result.y = 0.0
}
else if (!shouldHide) {
shouldHide = true
}
}
}
if result.x.isZero {
if result.y.isZero {
return .topLeft
} else {
return .bottomLeft
}
} else {
if result.y.isZero {
return .topRight
} else {
return .bottomRight
}
}
}
@objc private func panGesture(_ recognizer: UIPanGestureRecognizer) {
switch recognizer.state {
case .began:
let location = recognizer.location(in: self.view)
//let translation = recognizer.translation(in: self.view)
//location.x += translation.x
//location.y += translation.y
if let _ = self.incomingVideoNode, let outgoingVideoNode = self.outgoingVideoNode, outgoingVideoNode.frame.contains(location) {
self.minimizedVideoInitialPosition = outgoingVideoNode.position
} else {
self.minimizedVideoInitialPosition = nil
}
case .changed:
if let outgoingVideoNode = self.outgoingVideoNode, let minimizedVideoInitialPosition = self.minimizedVideoInitialPosition {
let translation = recognizer.translation(in: self.view)
let minimizedVideoDraggingPosition = CGPoint(x: minimizedVideoInitialPosition.x + translation.x, y: minimizedVideoInitialPosition.y + translation.y)
self.minimizedVideoDraggingPosition = minimizedVideoDraggingPosition
outgoingVideoNode.position = minimizedVideoDraggingPosition
} else {
let offset = recognizer.translation(in: self.view).y
var bounds = self.bounds
bounds.origin.y = -offset
self.bounds = bounds
}
case .cancelled, .ended:
if let outgoingVideoNode = self.outgoingVideoNode, let _ = self.minimizedVideoInitialPosition, let minimizedVideoDraggingPosition = self.minimizedVideoDraggingPosition {
self.minimizedVideoInitialPosition = nil
self.minimizedVideoDraggingPosition = nil
if let (layout, navigationHeight) = self.validLayout {
self.outgoingVideoNodeCorner = self.nodeLocationForPosition(layout: layout, position: minimizedVideoDraggingPosition, velocity: recognizer.velocity(in: self.view))
let videoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationHeight)
outgoingVideoNode.frame = videoFrame
outgoingVideoNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: minimizedVideoDraggingPosition.x - videoFrame.midX, y: minimizedVideoDraggingPosition.y - videoFrame.midY)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: 0.5, delay: 0.0, initialVelocity: 0.0, damping: 110.0, removeOnCompletion: true, additive: true, completion: nil)
}
} else {
let velocity = recognizer.velocity(in: self.view).y
if abs(velocity) < 100.0 {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
} else {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint(x: 0.0, y: velocity > 0.0 ? -bounds.height: bounds.height)
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, completion: { [weak self] _ in
self?.dismissedInteractively?()
})
}
}
case .cancelled:
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
default:
break
}

View File

@@ -190,7 +190,7 @@ public final class PresentationCallImpl: PresentationCall {
private var sessionStateDisposable: Disposable?
private let statePromise = ValuePromise<PresentationCallState>(PresentationCallState(state: .waiting, videoState: .notAvailable, remoteVideoState: .inactive), ignoreRepeated: true)
private let statePromise = ValuePromise<PresentationCallState>()
public var state: Signal<PresentationCallState, NoError> {
return self.statePromise.get()
}
@@ -264,7 +264,9 @@ public final class PresentationCallImpl: PresentationCall {
self.isVideo = startWithVideo
if self.isVideo {
self.videoCapturer = OngoingCallVideoCapturer()
self.statePromise.set(PresentationCallState(state: .waiting, videoState: .activeOutgoing, remoteVideoState: .inactive))
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .activeOutgoing, remoteVideoState: .inactive))
} else {
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .notAvailable, remoteVideoState: .inactive))
}
self.serializedData = serializedData
@@ -457,7 +459,7 @@ public final class PresentationCallImpl: PresentationCall {
switch sessionState.state {
case .ringing:
presentationState = PresentationCallState(state: .ringing, videoState: .notAvailable, remoteVideoState: .inactive)
presentationState = PresentationCallState(state: .ringing, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState)
if previous == nil || previousControl == nil {
if !self.reportedIncomingCall {
self.reportedIncomingCall = true
@@ -520,7 +522,7 @@ public final class PresentationCallImpl: PresentationCall {
presentationState = PresentationCallState(state: .reconnecting(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState)
}
} else {
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: .notAvailable, remoteVideoState: .inactive)
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState)
}
}
@@ -536,6 +538,7 @@ public final class PresentationCallImpl: PresentationCall {
let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, auxiliaryServers: auxiliaryServers, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName)
self.ongoingContext = ongoingContext
ongoingContext.setIsMuted(self.isMutedValue)
self.debugInfoValue.set(ongoingContext.debugInfo())
@@ -729,6 +732,10 @@ public final class PresentationCallImpl: PresentationCall {
self.ongoingContext?.setEnableVideo(value)
}
public func setOutgoingVideoIsPaused(_ isPaused: Bool) {
self.videoCapturer?.setIsVideoEnabled(!isPaused)
}
public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
guard self.currentAudioOutputValue != output else {
return

View File

@@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_accept.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_video.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_decline.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -1,22 +1,12 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "CallMuteIcon@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallMuteIcon@3x.png",
"scale" : "3x"
"filename" : "ic_calls_mute.pdf",
"idiom" : "universal"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
"author" : "xcode",
"version" : 1
}
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 545 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 844 B

View File

@@ -1,22 +0,0 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "CallPhoneIcon@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallPhoneIcon@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 655 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -1,22 +1,12 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "CallRouteSpeaker@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallRouteSpeaker@3x.png",
"scale" : "3x"
"filename" : "ic_calls_speaker.pdf",
"idiom" : "universal"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
"author" : "xcode",
"version" : 1
}
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 KiB

View File

@@ -1,22 +1,12 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "CallSpeakerIcon@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallSpeakerIcon@3x.png",
"scale" : "3x"
"filename" : "ic_calls_speaker.pdf",
"idiom" : "universal"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
"author" : "xcode",
"version" : 1
}
}
}

View File

@@ -1,7 +1,7 @@
{
"images" : [
{
"filename" : "Video.pdf",
"filename" : "ic_calls_cameraflip.pdf",
"idiom" : "universal"
}
],

View File

@@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_tlogo.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -305,6 +305,10 @@ public final class OngoingCallVideoCapturer {
public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) {
self.impl.makeOutgoingVideoView(completion)
}
public func setIsVideoEnabled(_ value: Bool) {
self.impl.setIsVideoEnabled(value)
}
}
extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProtocol {

View File

@@ -13,6 +13,8 @@ namespace TGVOIP_NAMESPACE {
class VideoCapturerInterface {
public:
virtual ~VideoCapturerInterface();
virtual void setIsEnabled(bool isEnabled) = 0;
};
void configurePlatformAudio();

View File

@@ -112,6 +112,10 @@
[_videoCapturer stopCapture];
}
- (void)setIsEnabled:(bool)isEnabled {
[_videoCapturer setIsEnabled:isEnabled];
}
@end
@interface VideoCapturerInterfaceImplHolder : NSObject
@@ -153,6 +157,16 @@ public:
});
}
virtual void setIsEnabled(bool isEnabled) {
VideoCapturerInterfaceImplHolder *implReference = _implReference;
dispatch_async(dispatch_get_main_queue(), ^{
if (implReference.reference != nil) {
VideoCapturerInterfaceImplReference *reference = (__bridge VideoCapturerInterfaceImplReference *)implReference.reference;
[reference setIsEnabled:isEnabled];
}
});
}
private:
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
VideoCapturerInterfaceImplHolder *_implReference;

View File

@@ -138,6 +138,7 @@ public:
virtual ~TgVoipVideoCaptureInterface();
virtual void switchCamera() = 0;
virtual void setIsVideoEnabled(bool isVideoEnabled) = 0;
virtual void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
};

View File

@@ -17,6 +17,7 @@
- (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps;
- (void)stopCapture;
- (void)setIsEnabled:(bool)isEnabled;
@end

View File

@@ -39,6 +39,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
UIDeviceOrientation _orientation;
void (^_isActiveUpdated)(bool);
bool _isActiveValue;
bool _inForegroundValue;
bool _isPaused;
}
@end
@@ -49,6 +52,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
self = [super init];
if (self != nil) {
_source = source;
_isActiveValue = true;
_inForegroundValue = true;
_isPaused = false;
_isActiveUpdated = [isActiveUpdated copy];
if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) {
@@ -124,6 +130,11 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
[self stopCaptureWithCompletionHandler:nil];
}
- (void)setIsEnabled:(bool)isEnabled {
_isPaused = !isEnabled;
[self updateIsActiveValue];
}
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps
@@ -253,7 +264,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
getObjCVideoSource(_source)->OnCapturedFrame(videoFrame);
if (!_isPaused) {
getObjCVideoSource(_source)->OnCapturedFrame(videoFrame);
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
@@ -316,15 +329,23 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
_hasRetriedOnFatalError = NO;
}];
if (_isActiveUpdated) {
_isActiveUpdated(true);
}
_inForegroundValue = true;
[self updateIsActiveValue];
}
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
RTCLog(@"Capture session stopped.");
if (_isActiveUpdated) {
_isActiveUpdated(false);
_inForegroundValue = false;
[self updateIsActiveValue];
}
- (void)updateIsActiveValue {
bool isActive = _inForegroundValue && !_isPaused;
if (isActive != _isActiveValue) {
_isActiveValue = isActive;
if (_isActiveUpdated) {
_isActiveUpdated(_isActiveValue);
}
}
}

View File

@@ -18,6 +18,7 @@ public:
~TgVoipVideoCaptureInterfaceObject();
void switchCamera();
void setIsVideoEnabled(bool isVideoEnabled);
void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
void setIsActiveUpdated(std::function<void (bool)> isActiveUpdated);
@@ -29,6 +30,7 @@ private:
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentSink;
std::function<void (bool)> _isActiveUpdated;
bool _useFrontCamera;
bool _isVideoEnabled;
};
class TgVoipVideoCaptureInterfaceImpl : public TgVoipVideoCaptureInterface {
@@ -37,6 +39,7 @@ public:
virtual ~TgVoipVideoCaptureInterfaceImpl();
virtual void switchCamera();
virtual void setIsVideoEnabled(bool isVideoEnabled);
virtual void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
public:

View File

@@ -10,6 +10,7 @@ namespace TGVOIP_NAMESPACE {
TgVoipVideoCaptureInterfaceObject::TgVoipVideoCaptureInterfaceObject() {
_useFrontCamera = true;
_isVideoEnabled = true;
_videoSource = makeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread());
//this should outlive the capturer
_videoCapturer = makeVideoCapturer(_videoSource, _useFrontCamera, [this](bool isActive) {
@@ -33,6 +34,13 @@ void TgVoipVideoCaptureInterfaceObject::switchCamera() {
}
});
}
void TgVoipVideoCaptureInterfaceObject::setIsVideoEnabled(bool isVideoEnabled) {
if (_isVideoEnabled != isVideoEnabled) {
_isVideoEnabled = isVideoEnabled;
_videoCapturer->setIsEnabled(isVideoEnabled);
}
}
void TgVoipVideoCaptureInterfaceObject::setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
if (_currentSink != nullptr) {
@@ -66,6 +74,12 @@ void TgVoipVideoCaptureInterfaceImpl::switchCamera() {
impl->switchCamera();
});
}
void TgVoipVideoCaptureInterfaceImpl::setIsVideoEnabled(bool isVideoEnabled) {
_impl->perform([isVideoEnabled](TgVoipVideoCaptureInterfaceObject *impl) {
impl->setIsVideoEnabled(isVideoEnabled);
});
}
void TgVoipVideoCaptureInterfaceImpl::setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
_impl->perform([sink](TgVoipVideoCaptureInterfaceObject *impl) {

View File

@@ -83,6 +83,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
- (instancetype _Nonnull)init;
- (void)switchVideoCamera;
- (void)setIsVideoEnabled:(bool)isVideoEnabled;
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion;

View File

@@ -41,6 +41,10 @@ using namespace TGVOIP_NAMESPACE;
_interface->switchCamera();
}
- (void)setIsVideoEnabled:(bool)isVideoEnabled {
_interface->setIsVideoEnabled(isVideoEnabled);
}
- (std::shared_ptr<TgVoipVideoCaptureInterface>)getInterface {
return _interface;
}