Merge branch 'video-ui'

This commit is contained in:
Ali 2020-07-03 23:36:45 +04:00
commit 2bb0293fb4
52 changed files with 1397 additions and 800 deletions

View File

@ -90,6 +90,7 @@ public protocol PresentationCall: class {
func toggleIsMuted()
func setIsMuted(_ value: Bool)
func setEnableVideo(_ value: Bool)
func setOutgoingVideoIsPaused(_ isPaused: Bool)
func switchVideoCamera()
func setCurrentAudioOutput(_ output: AudioSessionOutput)
func debugInfo() -> Signal<(String, String), NoError>

View File

@ -238,14 +238,14 @@ public extension CALayer {
self.animate(from: NSValue(cgPoint: from), to: NSValue(cgPoint: to), keyPath: "position", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion)
}
func animateBounds(from: CGRect, to: CGRect, duration: Double, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
func animateBounds(from: CGRect, to: CGRect, duration: Double, delay: Double = 0.0, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
if from == to && !force {
if let completion = completion {
completion(true)
}
return
}
self.animate(from: NSValue(cgRect: from), to: NSValue(cgRect: to), keyPath: "bounds", timingFunction: timingFunction, duration: duration, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion)
self.animate(from: NSValue(cgRect: from), to: NSValue(cgRect: to), keyPath: "bounds", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion)
}
func animateBoundsOriginXAdditive(from: CGFloat, to: CGFloat, duration: Double, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
@ -268,7 +268,7 @@ public extension CALayer {
self.animateKeyframes(values: values.map { NSValue(cgPoint: $0) }, duration: duration, keyPath: "position")
}
func animateFrame(from: CGRect, to: CGRect, duration: Double, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
func animateFrame(from: CGRect, to: CGRect, duration: Double, delay: Double = 0.0, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
if from == to && !force {
if let completion = completion {
completion(true)
@ -302,14 +302,14 @@ public extension CALayer {
toBounds = CGRect()
}
self.animatePosition(from: fromPosition, to: toPosition, duration: duration, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in
self.animatePosition(from: fromPosition, to: toPosition, duration: duration, delay: delay, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in
if !value {
interrupted = true
}
completedPosition = true
partialCompletion()
})
self.animateBounds(from: fromBounds, to: toBounds, duration: duration, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in
self.animateBounds(from: fromBounds, to: toBounds, duration: duration, delay: delay, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in
if !value {
interrupted = true
}

View File

@ -63,7 +63,7 @@ public enum ContainedViewLayoutTransition {
}
public extension ContainedViewLayoutTransition {
func updateFrame(node: ASDisplayNode, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, completion: ((Bool) -> Void)? = nil) {
func updateFrame(node: ASDisplayNode, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, delay: Double = 0.0, completion: ((Bool) -> Void)? = nil) {
if node.frame.equalTo(frame) && !force {
completion?(true)
} else {
@ -81,7 +81,7 @@ public extension ContainedViewLayoutTransition {
previousFrame = node.frame
}
node.frame = frame
node.layer.animateFrame(from: previousFrame, to: frame, duration: duration, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, force: force, completion: { result in
node.layer.animateFrame(from: previousFrame, to: frame, duration: duration, delay: delay, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, force: force, completion: { result in
if let completion = completion {
completion(result)
}

View File

@ -178,8 +178,8 @@ public final class CallController: ViewController {
let _ = self?.call.hangUp()
}
self.controllerNode.toggleVideo = { [weak self] in
let _ = self?.call.setEnableVideo(true)
self.controllerNode.setIsVideoPaused = { [weak self] isPaused in
self?.call.setOutgoingVideoIsPaused(isPaused)
}
self.controllerNode.back = { [weak self] in

View File

@ -5,245 +5,218 @@ import AsyncDisplayKit
import SwiftSignalKit
import AppBundle
enum CallControllerButtonType {
case mute
case end
case accept
case speaker
case bluetooth
case switchCamera
}
private let labelFont = Font.regular(13.0)
private let buttonSize = CGSize(width: 75.0, height: 75.0)
private func generateEmptyButtonImage(icon: UIImage?, strokeColor: UIColor?, fillColor: UIColor, knockout: Bool = false, angle: CGFloat = 0.0) -> UIImage? {
return generateImage(buttonSize, contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setBlendMode(.copy)
if let strokeColor = strokeColor {
context.setFillColor(strokeColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(x: 1.5, y: 1.5), size: CGSize(width: size.width - 3.0, height: size.height - 3.0)))
} else {
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: size.height)))
}
if let icon = icon {
if !angle.isZero {
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.rotate(by: angle)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
struct Content: Equatable {
enum Appearance: Equatable {
enum Color {
case red
case green
}
let imageSize = icon.size
let imageRect = CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: floor((size.width - imageSize.height) / 2.0)), size: imageSize)
if knockout {
context.setBlendMode(.copy)
context.clip(to: imageRect, mask: icon.cgImage!)
context.setFillColor(UIColor.clear.cgColor)
context.fill(imageRect)
} else {
context.setBlendMode(.normal)
context.draw(icon.cgImage!, in: imageRect)
}
}
})
}
private func generateFilledButtonImage(color: UIColor, icon: UIImage?, angle: CGFloat = 0.0) -> UIImage? {
return generateImage(buttonSize, contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setBlendMode(.normal)
context.setFillColor(color.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
if let icon = icon {
if !angle.isZero {
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.rotate(by: angle)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
}
context.draw(icon.cgImage!, in: CGRect(origin: CGPoint(x: floor((size.width - icon.size.width) / 2.0), y: floor((size.height - icon.size.height) / 2.0)), size: icon.size))
}
})
}
private let emptyStroke = UIColor(white: 1.0, alpha: 0.8)
private let emptyHighlightedFill = UIColor(white: 1.0, alpha: 0.3)
private let invertedFill = UIColor(white: 1.0, alpha: 1.0)
private let labelFont = Font.regular(14.5)
final class CallControllerButtonNode: HighlightTrackingButtonNode {
private var type: CallControllerButtonType
private var regularImage: UIImage?
private var highlightedImage: UIImage?
private var filledImage: UIImage?
private let backgroundNode: ASImageNode
private let labelNode: ASTextNode?
init(type: CallControllerButtonType, label: String?) {
self.type = type
self.backgroundNode = ASImageNode()
self.backgroundNode.isLayerBacked = true
self.backgroundNode.displayWithoutProcessing = false
self.backgroundNode.displaysAsynchronously = false
if let label = label {
let labelNode = ASTextNode()
labelNode.attributedText = NSAttributedString(string: label, font: labelFont, textColor: .white)
self.labelNode = labelNode
} else {
self.labelNode = nil
case blurred(isFilled: Bool)
case color(Color)
}
var regularImage: UIImage?
var highlightedImage: UIImage?
var filledImage: UIImage?
switch type {
case .mute:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .accept:
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0)
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0)
case .end:
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton"))
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton"))
case .speaker:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .bluetooth:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .switchCamera:
let patternImage = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white)
regularImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: patternImage, strokeColor: nil, fillColor: invertedFill, knockout: true)
enum Image {
case camera
case mute
case flipCamera
case bluetooth
case speaker
case accept
case end
}
self.regularImage = regularImage
self.highlightedImage = highlightedImage
self.filledImage = filledImage
var appearance: Appearance
var image: Image
}
super.init()
private let contentContainer: ASDisplayNode
private let effectView: UIVisualEffectView
private let contentNode: ASImageNode
private let overlayHighlightNode: ASImageNode
private let textNode: ImmediateTextNode
self.addSubnode(self.backgroundNode)
private let largeButtonSize: CGFloat = 72.0
if let labelNode = self.labelNode {
self.addSubnode(labelNode)
}
private(set) var currentContent: Content?
private(set) var currentText: String = ""
self.backgroundNode.image = regularImage
self.currentImage = regularImage
init() {
self.contentContainer = ASDisplayNode()
self.effectView = UIVisualEffectView()
self.effectView.effect = UIBlurEffect(style: .light)
self.effectView.layer.cornerRadius = self.largeButtonSize / 2.0
self.effectView.clipsToBounds = true
self.effectView.isUserInteractionEnabled = false
self.contentNode = ASImageNode()
self.contentNode.isUserInteractionEnabled = false
self.overlayHighlightNode = ASImageNode()
self.overlayHighlightNode.isUserInteractionEnabled = false
self.overlayHighlightNode.alpha = 0.0
self.textNode = ImmediateTextNode()
self.textNode.displaysAsynchronously = false
self.textNode.isUserInteractionEnabled = false
super.init(pointerStyle: nil)
self.addSubnode(self.contentContainer)
self.contentContainer.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.addSubnode(self.textNode)
self.contentContainer.view.addSubview(self.effectView)
self.contentContainer.addSubnode(self.contentNode)
self.contentContainer.addSubnode(self.overlayHighlightNode)
self.highligthedChanged = { [weak self] highlighted in
if let strongSelf = self {
strongSelf.internalHighlighted = highlighted
strongSelf.updateState(highlighted: highlighted, selected: strongSelf.isSelected)
guard let strongSelf = self else {
return
}
}
}
private var internalHighlighted = false
override var isSelected: Bool {
didSet {
self.updateState(highlighted: self.internalHighlighted, selected: self.isSelected)
}
}
private var currentImage: UIImage?
private func updateState(highlighted: Bool, selected: Bool) {
let image: UIImage?
if selected {
image = self.filledImage
} else if highlighted {
image = self.highlightedImage
} else {
image = self.regularImage
}
if self.currentImage !== image {
let currentContents = self.backgroundNode.layer.contents
self.backgroundNode.layer.removeAnimation(forKey: "contents")
if let currentContents = currentContents, let image = image {
self.backgroundNode.image = image
self.backgroundNode.layer.animate(from: currentContents as AnyObject, to: image.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: image === self.currentImage || image === self.filledImage ? 0.25 : 0.15)
if highlighted {
strongSelf.overlayHighlightNode.alpha = 1.0
} else {
self.backgroundNode.image = image
strongSelf.overlayHighlightNode.alpha = 0.0
strongSelf.overlayHighlightNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
}
self.currentImage = image
}
}
func updateType(_ type: CallControllerButtonType) {
if self.type == type {
return
}
self.type = type
var regularImage: UIImage?
var highlightedImage: UIImage?
var filledImage: UIImage?
func update(size: CGSize, content: Content, text: String, transition: ContainedViewLayoutTransition) {
let scaleFactor = size.width / self.largeButtonSize
switch type {
case .mute:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .accept:
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0)
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0)
case .end:
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton"))
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton"))
case .speaker:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .bluetooth:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .switchCamera:
let patternImage = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white)
regularImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: patternImage, strokeColor: nil, fillColor: invertedFill, knockout: true)
self.effectView.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.contentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.overlayHighlightNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
if self.currentContent != content {
self.currentContent = content
switch content.appearance {
case .blurred:
self.effectView.isHidden = false
case .color:
self.effectView.isHidden = true
}
let contentImage = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
var fillColor: UIColor = .clear
var drawOverMask = false
context.setBlendMode(.normal)
var imageScale: CGFloat = 1.0
switch content.appearance {
case let .blurred(isFilled):
if isFilled {
fillColor = .white
drawOverMask = true
context.setBlendMode(.copy)
}
let smallButtonSize: CGFloat = 60.0
imageScale = self.largeButtonSize / smallButtonSize
case let .color(color):
switch color {
case .red:
fillColor = UIColor(rgb: 0xd92326)
case .green:
fillColor = UIColor(rgb: 0x74db58)
}
}
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
var image: UIImage?
switch content.image {
case .camera:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallCameraButton"), color: .white)
case .mute:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallMuteButton"), color: .white)
case .flipCamera:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white)
case .bluetooth:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallBluetoothButton"), color: .white)
case .speaker:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSpeakerButton"), color: .white)
case .accept:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAcceptButton"), color: .white)
case .end:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallDeclineButton"), color: .white)
}
if let image = image {
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.scaleBy(x: imageScale, y: imageScale)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
let imageRect = CGRect(origin: CGPoint(x: floor((size.width - image.size.width) / 2.0), y: floor((size.height - image.size.height) / 2.0)), size: image.size)
if drawOverMask {
context.clip(to: imageRect, mask: image.cgImage!)
context.setBlendMode(.copy)
context.setFillColor(UIColor.clear.cgColor)
context.fill(CGRect(origin: CGPoint(), size: size))
} else {
context.draw(image.cgImage!, in: imageRect)
}
}
})
if transition.isAnimated, let contentImage = contentImage, let previousContent = self.contentNode.image {
self.contentNode.image = contentImage
self.contentNode.layer.animate(from: previousContent.cgImage!, to: contentImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2)
} else {
self.contentNode.image = contentImage
}
self.overlayHighlightNode.image = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
let fillColor: UIColor
context.setBlendMode(.normal)
switch content.appearance {
case let .blurred(isFilled):
if isFilled {
fillColor = UIColor(white: 0.0, alpha: 0.1)
} else {
fillColor = UIColor(white: 1.0, alpha: 0.2)
}
case let .color(color):
switch color {
case .red:
fillColor = UIColor(rgb: 0xd92326).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2)
case .green:
fillColor = UIColor(rgb: 0x74db58).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2)
}
}
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
})
}
self.regularImage = regularImage
self.highlightedImage = highlightedImage
self.filledImage = filledImage
transition.updatePosition(node: self.contentContainer, position: CGPoint(x: size.width / 2.0, y: size.height / 2.0))
transition.updateSublayerTransformScale(node: self.contentContainer, scale: scaleFactor)
self.updateState(highlighted: self.isHighlighted, selected: self.isSelected)
}
func animateRollTransition() {
self.backgroundNode.layer.animate(from: 0.0 as NSNumber, to: (-CGFloat.pi * 5 / 4) as NSNumber, keyPath: "transform.rotation.z", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3, removeOnCompletion: false)
self.labelNode?.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false)
}
override func layout() {
super.layout()
let size = self.bounds.size
self.backgroundNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: size.width))
if let labelNode = self.labelNode {
let labelSize = labelNode.measure(CGSize(width: 200.0, height: 100.0))
labelNode.frame = CGRect(origin: CGPoint(x: floor((size.width - labelSize.width) / 2.0), y: 81.0), size: labelSize)
if self.currentText != text {
self.textNode.attributedText = NSAttributedString(string: text, font: labelFont, textColor: .white)
}
let textSize = self.textNode.updateLayout(CGSize(width: 150.0, height: 100.0))
let textFrame = CGRect(origin: CGPoint(x: floor((size.width - textSize.width) / 2.0), y: size.height + 5.0), size: textSize)
if self.currentText.isEmpty {
self.textNode.frame = textFrame
if transition.isAnimated {
self.textNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
}
} else {
transition.updateFrameAdditiveToCenter(node: self.textNode, frame: textFrame)
}
self.currentText = text
}
}

View File

@ -22,27 +22,66 @@ enum CallControllerButtonsMode: Equatable {
}
case active(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
case incoming
case incoming(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
case outgoingRinging(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
}
private enum ButtonDescription: Equatable {
enum Key: Hashable {
case accept
case end
case enableCamera
case switchCamera
case soundOutput
case mute
}
enum SoundOutput {
case builtin
case speaker
case bluetooth
}
enum EndType {
case outgoing
case decline
case end
}
case accept
case end(EndType)
case enableCamera(Bool)
case switchCamera
case soundOutput(SoundOutput)
case mute(Bool)
var key: Key {
switch self {
case .accept:
return .accept
case .end:
return .end
case .enableCamera:
return .enableCamera
case .switchCamera:
return .switchCamera
case .soundOutput:
return .soundOutput
case .mute:
return .mute
}
}
}
final class CallControllerButtonsNode: ASDisplayNode {
private let acceptButton: CallControllerButtonNode
private let declineButton: CallControllerButtonNode
private let muteButton: CallControllerButtonNode
private let endButton: CallControllerButtonNode
private let speakerButton: CallControllerButtonNode
private let swichCameraButton: CallControllerButtonNode
private var buttonNodes: [ButtonDescription.Key: CallControllerButtonItemNode] = [:]
private var mode: CallControllerButtonsMode?
private var validLayout: CGFloat?
var isMuted = false {
didSet {
self.muteButton.isSelected = self.isMuted
}
}
var isMuted = false
var isCameraPaused = false
var accept: (() -> Void)?
var mute: (() -> Void)?
@ -52,57 +91,30 @@ final class CallControllerButtonsNode: ASDisplayNode {
var rotateCamera: (() -> Void)?
init(strings: PresentationStrings) {
self.acceptButton = CallControllerButtonNode(type: .accept, label: strings.Call_Accept)
self.acceptButton.alpha = 0.0
self.declineButton = CallControllerButtonNode(type: .end, label: strings.Call_Decline)
self.declineButton.alpha = 0.0
self.muteButton = CallControllerButtonNode(type: .mute, label: nil)
self.muteButton.alpha = 0.0
self.endButton = CallControllerButtonNode(type: .end, label: nil)
self.endButton.alpha = 0.0
self.speakerButton = CallControllerButtonNode(type: .speaker, label: nil)
self.speakerButton.alpha = 0.0
self.swichCameraButton = CallControllerButtonNode(type: .switchCamera, label: nil)
self.swichCameraButton.alpha = 0.0
super.init()
self.addSubnode(self.acceptButton)
self.addSubnode(self.declineButton)
self.addSubnode(self.muteButton)
self.addSubnode(self.endButton)
self.addSubnode(self.speakerButton)
self.addSubnode(self.swichCameraButton)
self.acceptButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.declineButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.muteButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.endButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.speakerButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.swichCameraButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
}
func updateLayout(constrainedWidth: CGFloat, transition: ContainedViewLayoutTransition) {
let previousLayout = self.validLayout
func updateLayout(strings: PresentationStrings, constrainedWidth: CGFloat, transition: ContainedViewLayoutTransition) {
self.validLayout = constrainedWidth
if let mode = self.mode, previousLayout != self.validLayout {
self.updateButtonsLayout(mode: mode, width: constrainedWidth, animated: false)
if let mode = self.mode {
self.updateButtonsLayout(strings: strings, mode: mode, width: constrainedWidth, animated: transition.isAnimated)
}
}
func updateMode(_ mode: CallControllerButtonsMode) {
func updateMode(strings: PresentationStrings, mode: CallControllerButtonsMode) {
if self.mode != mode {
let previousMode = self.mode
self.mode = mode
if let validLayout = self.validLayout {
self.updateButtonsLayout(mode: mode, width: validLayout, animated: previousMode != nil)
self.updateButtonsLayout(strings: strings, mode: mode, width: validLayout, animated: previousMode != nil)
}
}
}
private func updateButtonsLayout(mode: CallControllerButtonsMode, width: CGFloat, animated: Bool) {
private var appliedMode: CallControllerButtonsMode?
private func updateButtonsLayout(strings: PresentationStrings, mode: CallControllerButtonsMode, width: CGFloat, animated: Bool) {
let transition: ContainedViewLayoutTransition
if animated {
transition = .animated(duration: 0.3, curve: .spring)
@ -110,147 +122,279 @@ final class CallControllerButtonsNode: ASDisplayNode {
transition = .immediate
}
let threeButtonSpacing: CGFloat = 28.0
let twoButtonSpacing: CGFloat = 105.0
let buttonSize = CGSize(width: 75.0, height: 75.0)
let previousMode = self.appliedMode
self.appliedMode = mode
let threeButtonsWidth = 3.0 * buttonSize.width + 2.0 * threeButtonSpacing
let twoButtonsWidth = 2.0 * buttonSize.width + 1.0 * twoButtonSpacing
var animatePositionsWithDelay = false
if let previousMode = previousMode {
switch previousMode {
case .incoming, .outgoingRinging:
if case .active = mode {
animatePositionsWithDelay = true
}
default:
break
}
}
var origin = CGPoint(x: floor((width - threeButtonsWidth) / 2.0), y: 0.0)
let minSmallButtonSideInset: CGFloat = 34.0
let maxSmallButtonSpacing: CGFloat = 34.0
let smallButtonSize: CGFloat = 60.0
let topBottomSpacing: CGFloat = 84.0
for button in [self.muteButton, self.endButton, self.speakerButton] {
transition.updateFrame(node: button, frame: CGRect(origin: origin, size: buttonSize))
if button === self.speakerButton {
transition.updateFrame(node: self.swichCameraButton, frame: CGRect(origin: origin, size: buttonSize))
let maxLargeButtonSpacing: CGFloat = 115.0
let largeButtonSize: CGFloat = 72.0
let minLargeButtonSideInset: CGFloat = minSmallButtonSideInset - 6.0
struct PlacedButton {
let button: ButtonDescription
let frame: CGRect
}
var buttons: [PlacedButton] = []
switch mode {
case .incoming(let speakerMode, let videoState), .outgoingRinging(let speakerMode, let videoState):
var topButtons: [ButtonDescription] = []
var bottomButtons: [ButtonDescription] = []
let soundOutput: ButtonDescription.SoundOutput
switch speakerMode {
case .none, .builtin:
soundOutput = .builtin
case .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .bluetooth
case .bluetooth:
soundOutput = .bluetooth
}
origin.x += buttonSize.width + threeButtonSpacing
switch videoState {
case .active, .available:
topButtons.append(.enableCamera(!self.isCameraPaused))
topButtons.append(.mute(self.isMuted))
topButtons.append(.switchCamera)
case .notAvailable:
topButtons.append(.mute(self.isMuted))
topButtons.append(.soundOutput(soundOutput))
}
let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize
let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0
let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1))
let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing
var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0)
for button in topButtons {
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: 0.0), size: CGSize(width: smallButtonSize, height: smallButtonSize))))
topButtonsLeftOffset += smallButtonSize + topButtonsSpacing
}
if case .incoming = mode {
bottomButtons.append(.end(.decline))
bottomButtons.append(.accept)
} else {
bottomButtons.append(.end(.outgoing))
}
let bottomButtonsContentWidth = CGFloat(bottomButtons.count) * largeButtonSize
let bottomButtonsAvailableSpacingWidth = width - bottomButtonsContentWidth - minLargeButtonSideInset * 2.0
let bottomButtonsSpacing = min(maxLargeButtonSpacing, bottomButtonsAvailableSpacingWidth / CGFloat(bottomButtons.count - 1))
let bottomButtonsWidth = CGFloat(bottomButtons.count) * largeButtonSize + CGFloat(bottomButtons.count - 1) * bottomButtonsSpacing
var bottomButtonsLeftOffset = floor((width - bottomButtonsWidth) / 2.0)
for button in bottomButtons {
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: bottomButtonsLeftOffset, y: smallButtonSize + topBottomSpacing), size: CGSize(width: largeButtonSize, height: largeButtonSize))))
bottomButtonsLeftOffset += largeButtonSize + bottomButtonsSpacing
}
case let .active(speakerMode, videoState):
var topButtons: [ButtonDescription] = []
let soundOutput: ButtonDescription.SoundOutput
switch speakerMode {
case .none, .builtin:
soundOutput = .builtin
case .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .builtin
case .bluetooth:
soundOutput = .bluetooth
}
switch videoState {
case .active, .available:
topButtons.append(.enableCamera(!self.isCameraPaused))
topButtons.append(.mute(isMuted))
topButtons.append(.switchCamera)
case .notAvailable:
topButtons.append(.mute(isMuted))
topButtons.append(.soundOutput(soundOutput))
}
topButtons.append(.end(.end))
let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize
let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0
let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1))
let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing
var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0)
for button in topButtons {
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: smallButtonSize + topBottomSpacing), size: CGSize(width: smallButtonSize, height: smallButtonSize))))
topButtonsLeftOffset += smallButtonSize + topButtonsSpacing
}
}
origin = CGPoint(x: floor((width - twoButtonsWidth) / 2.0), y: 0.0)
for button in [self.declineButton, self.acceptButton] {
transition.updateFrame(node: button, frame: CGRect(origin: origin, size: buttonSize))
origin.x += buttonSize.width + twoButtonSpacing
let delayIncrement = 0.015
var validKeys: [ButtonDescription.Key] = []
for button in buttons {
validKeys.append(button.button.key)
var buttonTransition = transition
var animateButtonIn = false
let buttonNode: CallControllerButtonItemNode
if let current = self.buttonNodes[button.button.key] {
buttonNode = current
} else {
buttonNode = CallControllerButtonItemNode()
self.buttonNodes[button.button.key] = buttonNode
self.addSubnode(buttonNode)
buttonNode.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
buttonTransition = .immediate
animateButtonIn = transition.isAnimated
}
let buttonContent: CallControllerButtonItemNode.Content
let buttonText: String
switch button.button {
case .accept:
buttonContent = CallControllerButtonItemNode.Content(
appearance: .color(.green),
image: .accept
)
buttonText = strings.Call_Accept
case let .end(type):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .color(.red),
image: .end
)
switch type {
case .outgoing:
buttonText = ""
case .decline:
buttonText = strings.Call_Decline
case .end:
buttonText = strings.Call_End
}
case let .enableCamera(isEnabled):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: isEnabled),
image: .camera
)
buttonText = strings.Call_Camera
case .switchCamera:
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: false),
image: .flipCamera
)
buttonText = strings.Call_Flip
case let .soundOutput(value):
let image: CallControllerButtonItemNode.Content.Image
var isFilled = false
switch value {
case .builtin:
image = .speaker
case .speaker:
image = .speaker
isFilled = true
case .bluetooth:
image = .bluetooth
}
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: isFilled),
image: image
)
buttonText = strings.Call_Speaker
case let .mute(isMuted):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: isMuted),
image: .mute
)
buttonText = strings.Call_Mute
}
var buttonDelay = 0.0
if animatePositionsWithDelay {
switch button.button.key {
case .enableCamera:
buttonDelay = 0.0
case .mute:
buttonDelay = delayIncrement * 1.0
case .switchCamera:
buttonDelay = delayIncrement * 2.0
case .end:
buttonDelay = delayIncrement * 3.0
default:
break
}
}
buttonTransition.updateFrame(node: buttonNode, frame: button.frame, delay: buttonDelay)
buttonNode.update(size: button.frame.size, content: buttonContent, text: buttonText, transition: buttonTransition)
if animateButtonIn {
buttonNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
}
switch mode {
case .incoming:
for button in [self.declineButton, self.acceptButton] {
button.alpha = 1.0
}
for button in [self.muteButton, self.endButton, self.speakerButton, self.swichCameraButton] {
button.alpha = 0.0
}
case let .active(speakerMode, videoState):
for button in [self.muteButton] {
if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
button.alpha = 1.0
}
switch videoState {
case .active, .available:
for button in [self.speakerButton] {
if animated && !button.alpha.isZero {
button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
}
button.alpha = 0.0
}
for button in [self.swichCameraButton] {
if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
button.alpha = 1.0
}
case .notAvailable:
for button in [self.swichCameraButton] {
if animated && !button.alpha.isZero {
button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
}
button.alpha = 0.0
}
for button in [self.speakerButton] {
if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
button.alpha = 1.0
}
}
var animatingAcceptButton = false
if self.endButton.alpha.isZero {
if animated {
if !self.acceptButton.alpha.isZero {
animatingAcceptButton = true
self.endButton.layer.animatePosition(from: self.acceptButton.position, to: self.endButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
self.acceptButton.animateRollTransition()
self.endButton.layer.animate(from: (CGFloat.pi * 5 / 4) as NSNumber, to: 0.0 as NSNumber, keyPath: "transform.rotation.z", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3)
self.acceptButton.layer.animatePosition(from: self.acceptButton.position, to: self.endButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.acceptButton.alpha = 0.0
strongSelf.acceptButton.layer.removeAnimation(forKey: "position")
strongSelf.acceptButton.layer.removeAnimation(forKey: "transform.rotation.z")
}
var removedKeys: [ButtonDescription.Key] = []
for (key, button) in self.buttonNodes {
if !validKeys.contains(key) {
removedKeys.append(key)
if animated {
if case .accept = key {
if let endButton = self.buttonNodes[.end] {
transition.updateFrame(node: button, frame: endButton.frame)
if let content = button.currentContent {
button.update(size: endButton.frame.size, content: content, text: button.currentText, transition: transition)
}
transition.updateTransformScale(node: button, scale: 0.1)
transition.updateAlpha(node: button, alpha: 0.0, completion: { [weak button] _ in
button?.removeFromSupernode()
})
}
self.endButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
} else {
transition.updateAlpha(node: button, alpha: 0.0, completion: { [weak button] _ in
button?.removeFromSupernode()
})
}
self.endButton.alpha = 1.0
} else {
button.removeFromSupernode()
}
if !self.declineButton.alpha.isZero {
if animated {
self.declineButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
}
self.declineButton.alpha = 0.0
}
if self.acceptButton.alpha.isZero && !animatingAcceptButton {
self.acceptButton.alpha = 0.0
}
self.speakerButton.isSelected = speakerMode == .speaker
self.speakerButton.isHidden = speakerMode == .none
let speakerButtonType: CallControllerButtonType
switch speakerMode {
case .none, .builtin, .speaker:
speakerButtonType = .speaker
case .headphones:
speakerButtonType = .bluetooth
case .bluetooth:
speakerButtonType = .bluetooth
}
self.speakerButton.updateType(speakerButtonType)
}
}
for key in removedKeys {
self.buttonNodes.removeValue(forKey: key)
}
}
@objc func buttonPressed(_ button: CallControllerButtonNode) {
if button === self.muteButton {
self.mute?()
} else if button === self.endButton || button === self.declineButton {
self.end?()
} else if button === self.speakerButton {
self.speaker?()
} else if button === self.acceptButton {
self.accept?()
} else if button === self.swichCameraButton {
self.rotateCamera?()
@objc func buttonPressed(_ button: CallControllerButtonItemNode) {
for (key, listButton) in self.buttonNodes {
if button === listButton {
switch key {
case .accept:
self.accept?()
case .end:
self.end?()
case .enableCamera:
self.toggleVideo?()
case .switchCamera:
self.rotateCamera?()
case .soundOutput:
self.speaker?()
case .mute:
self.mute?()
}
break
}
}
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
let buttons = [
self.acceptButton,
self.declineButton,
self.muteButton,
self.endButton,
self.speakerButton,
self.swichCameraButton
]
for button in buttons {
if button.isHidden || button.alpha.isZero {
continue
}
for (_, button) in self.buttonNodes {
if let result = button.view.hitTest(self.view.convert(point, to: button.view), with: event) {
return result
}

View File

@ -56,34 +56,91 @@ private final class IncomingVideoNode: ASDisplayNode {
}
private final class OutgoingVideoNode: ASDisplayNode {
private let videoTransformContainer: ASDisplayNode
private let videoView: UIView
private let switchCameraButton: HighlightableButtonNode
private let switchCamera: () -> Void
private let buttonNode: HighlightTrackingButtonNode
init(videoView: UIView, switchCamera: @escaping () -> Void) {
private var effectView: UIVisualEffectView?
private var isBlurred: Bool = false
private var isExpanded: Bool = false
var tapped: (() -> Void)?
init(videoView: UIView) {
self.videoTransformContainer = ASDisplayNode()
self.videoTransformContainer.clipsToBounds = true
self.videoView = videoView
self.switchCameraButton = HighlightableButtonNode()
self.switchCamera = switchCamera
self.videoView.layer.transform = CATransform3DMakeScale(-1.0, 1.0, 1.0)
self.buttonNode = HighlightTrackingButtonNode()
super.init()
self.view.addSubview(self.videoView)
self.addSubnode(self.switchCameraButton)
self.switchCameraButton.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside)
self.videoTransformContainer.view.addSubview(self.videoView)
self.addSubnode(self.videoTransformContainer)
//self.addSubnode(self.buttonNode)
self.buttonNode.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside)
}
@objc private func buttonPressed() {
self.switchCamera()
@objc func buttonPressed() {
self.tapped?()
}
func updateLayout(size: CGSize, isExpanded: Bool, transition: ContainedViewLayoutTransition) {
transition.updateFrame(view: self.videoView, frame: CGRect(origin: CGPoint(), size: size))
transition.updateCornerRadius(layer: self.videoView.layer, cornerRadius: isExpanded ? 0.0 : 16.0)
self.switchCameraButton.frame = CGRect(origin: CGPoint(), size: size)
let videoFrame = CGRect(origin: CGPoint(), size: size)
self.buttonNode.frame = videoFrame
self.isExpanded = isExpanded
let previousVideoFrame = self.videoTransformContainer.frame
self.videoTransformContainer.frame = videoFrame
if transition.isAnimated && !videoFrame.height.isZero && !previousVideoFrame.height.isZero {
transition.animatePositionAdditive(node: self.videoTransformContainer, offset: CGPoint(x: previousVideoFrame.midX - videoFrame.midX, y: previousVideoFrame.midY - videoFrame.midY))
transition.animateTransformScale(node: self.videoTransformContainer, from: previousVideoFrame.height / videoFrame.height)
}
self.videoView.frame = videoFrame
transition.updateCornerRadius(layer: self.videoTransformContainer.layer, cornerRadius: isExpanded ? 0.0 : 16.0)
if let effectView = self.effectView {
transition.updateCornerRadius(layer: effectView.layer, cornerRadius: isExpanded ? 0.0 : 16.0)
}
}
func updateIsBlurred(isBlurred: Bool) {
if self.isBlurred == isBlurred {
return
}
self.isBlurred = isBlurred
if isBlurred {
if self.effectView == nil {
let effectView = UIVisualEffectView()
effectView.clipsToBounds = true
effectView.layer.cornerRadius = self.isExpanded ? 0.0 : 16.0
self.effectView = effectView
effectView.frame = self.videoView.frame
self.view.addSubview(effectView)
}
UIView.animate(withDuration: 0.3, animations: {
self.effectView?.effect = UIBlurEffect(style: .dark)
})
} else if let effectView = self.effectView {
UIView.animate(withDuration: 0.3, animations: {
effectView.effect = nil
})
}
}
}
final class CallControllerNode: ASDisplayNode {
private enum VideoNodeCorner {
case topLeft
case topRight
case bottomLeft
case bottomRight
}
private let sharedContext: SharedAccountContext
private let account: Account
@ -104,6 +161,8 @@ final class CallControllerNode: ASDisplayNode {
private var incomingVideoViewRequested: Bool = false
private var outgoingVideoNode: OutgoingVideoNode?
private var outgoingVideoViewRequested: Bool = false
private var outgoingVideoExplicitelyFullscreen: Bool = false
private var outgoingVideoNodeCorner: VideoNodeCorner = .bottomRight
private let backButtonArrowNode: ASImageNode
private let backButtonNode: HighlightableButtonNode
private let statusNode: CallControllerStatusNode
@ -121,6 +180,9 @@ final class CallControllerNode: ASDisplayNode {
var isMuted: Bool = false {
didSet {
self.buttonsNode.isMuted = self.isMuted
if let (layout, navigationBarHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
}
}
@ -134,12 +196,15 @@ final class CallControllerNode: ASDisplayNode {
var beginAudioOuputSelection: (() -> Void)?
var acceptCall: (() -> Void)?
var endCall: (() -> Void)?
var toggleVideo: (() -> Void)?
var setIsVideoPaused: ((Bool) -> Void)?
var back: (() -> Void)?
var presentCallRating: ((CallId) -> Void)?
var callEnded: ((Bool) -> Void)?
var dismissedInteractively: (() -> Void)?
private var isUIHidden: Bool = false
private var isVideoPaused: Bool = false
init(sharedContext: SharedAccountContext, account: Account, presentationData: PresentationData, statusBar: StatusBar, debugInfo: Signal<(String, String), NoError>, shouldStayHiddenUntilConnection: Bool = false, easyDebugAccess: Bool, call: PresentationCall) {
self.sharedContext = sharedContext
self.account = account
@ -229,7 +294,17 @@ final class CallControllerNode: ASDisplayNode {
}
self.buttonsNode.toggleVideo = { [weak self] in
self?.toggleVideo?()
guard let strongSelf = self else {
return
}
strongSelf.isVideoPaused = !strongSelf.isVideoPaused
strongSelf.outgoingVideoNode?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused)
strongSelf.buttonsNode.isCameraPaused = strongSelf.isVideoPaused
strongSelf.setIsVideoPaused?(strongSelf.isVideoPaused)
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
}
self.buttonsNode.rotateCamera = { [weak self] in
@ -302,17 +377,21 @@ final class CallControllerNode: ASDisplayNode {
return
}
if let incomingVideoView = incomingVideoView {
strongSelf.setCurrentAudioOutput?(.speaker)
let incomingVideoNode = IncomingVideoNode(videoView: incomingVideoView)
strongSelf.incomingVideoNode = incomingVideoNode
strongSelf.containerNode.insertSubnode(incomingVideoNode, aboveSubnode: strongSelf.dimNode)
strongSelf.statusNode.isHidden = true
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring))
}
}
})
}
default:
break
}
switch callState.videoState {
case .active, .activeOutgoing:
if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
@ -322,13 +401,15 @@ final class CallControllerNode: ASDisplayNode {
if let outgoingVideoView = outgoingVideoView {
outgoingVideoView.backgroundColor = .black
outgoingVideoView.clipsToBounds = true
strongSelf.setCurrentAudioOutput?(.speaker)
let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: {
guard let strongSelf = self else {
return
if let audioOutputState = strongSelf.audioOutputState, let currentOutput = audioOutputState.currentOutput {
switch currentOutput {
case .speaker, .builtin:
break
default:
strongSelf.setCurrentAudioOutput?(.speaker)
}
strongSelf.call.switchVideoCamera()
})
}
let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView)
strongSelf.outgoingVideoNode = outgoingVideoNode
if let incomingVideoNode = strongSelf.incomingVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode)
@ -336,38 +417,17 @@ final class CallControllerNode: ASDisplayNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
}
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring))
}
}
})
}
case .activeOutgoing:
if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
guard let strongSelf = self else {
return
}
if let outgoingVideoView = outgoingVideoView {
outgoingVideoView.backgroundColor = .black
outgoingVideoView.clipsToBounds = true
outgoingVideoView.layer.cornerRadius = 16.0
strongSelf.setCurrentAudioOutput?(.speaker)
let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: {
/*outgoingVideoNode.tapped = {
guard let strongSelf = self else {
return
}
strongSelf.call.switchVideoCamera()
})
strongSelf.outgoingVideoNode = outgoingVideoNode
if let incomingVideoNode = strongSelf.incomingVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode)
} else {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
}
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
strongSelf.outgoingVideoExplicitelyFullscreen = !strongSelf.outgoingVideoExplicitelyFullscreen
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring))
}
}*/
}
})
}
@ -438,7 +498,7 @@ final class CallControllerNode: ASDisplayNode {
if isReconnecting {
return strings.Call_StatusConnecting
} else {
return strings.Call_StatusOngoing(value).0
return value
}
}, timestamp)
if self.keyTextData?.0 != keyVisualHash {
@ -501,43 +561,60 @@ final class CallControllerNode: ASDisplayNode {
}
}
private var buttonsTerminationMode: CallControllerButtonsMode?
private func updateButtonsMode() {
guard let callState = self.callState else {
return
}
var mode: CallControllerButtonsSpeakerMode = .none
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
switch currentOutput {
case .builtin:
mode = .builtin
case .speaker:
mode = .speaker
case .headphones:
mode = .headphones
case .port:
mode = .bluetooth
}
if availableOutputs.count <= 1 {
mode = .none
}
}
let mappedVideoState: CallControllerButtonsMode.VideoState
switch callState.videoState {
case .notAvailable:
mappedVideoState = .notAvailable
case .available:
mappedVideoState = .available(true)
case .active:
mappedVideoState = .active
case .activeOutgoing:
mappedVideoState = .active
}
switch callState.state {
case .ringing:
self.buttonsNode.updateMode(.incoming)
default:
var mode: CallControllerButtonsSpeakerMode = .none
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
switch currentOutput {
case .builtin:
mode = .builtin
case .speaker:
mode = .speaker
case .headphones:
mode = .headphones
case .port:
mode = .bluetooth
}
if availableOutputs.count <= 1 {
mode = .none
}
}
let mappedVideoState: CallControllerButtonsMode.VideoState
switch callState.videoState {
case .notAvailable:
mappedVideoState = .notAvailable
case .available:
mappedVideoState = .available(true)
case .active:
mappedVideoState = .active
case .activeOutgoing:
mappedVideoState = .active
}
self.buttonsNode.updateMode(.active(speakerMode: mode, videoState: mappedVideoState))
case .ringing:
let buttonsMode: CallControllerButtonsMode = .incoming(speakerMode: mode, videoState: mappedVideoState)
self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode)
self.buttonsTerminationMode = buttonsMode
case .waiting, .requesting:
let buttonsMode: CallControllerButtonsMode = .outgoingRinging(speakerMode: mode, videoState: mappedVideoState)
self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode)
self.buttonsTerminationMode = buttonsMode
case .active, .connecting, .reconnecting:
let buttonsMode: CallControllerButtonsMode = .active(speakerMode: mode, videoState: mappedVideoState)
self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode)
self.buttonsTerminationMode = buttonsMode
case .terminating, .terminated:
if let buttonsTerminationMode = self.buttonsTerminationMode {
self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsTerminationMode)
} else {
self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: .active(speakerMode: mode, videoState: mappedVideoState))
}
}
}
@ -568,9 +645,69 @@ final class CallControllerNode: ASDisplayNode {
}
}
private func calculatePreviewVideoRect(layout: ContainerViewLayout, navigationHeight: CGFloat) -> CGRect {
let buttonsHeight: CGFloat = 190.0
let buttonsOffset: CGFloat
if layout.size.width.isEqual(to: 320.0) {
if layout.size.height.isEqual(to: 480.0) {
buttonsOffset = 60.0
} else {
buttonsOffset = 73.0
}
} else {
buttonsOffset = 83.0
}
let buttonsOriginY: CGFloat
if self.isUIHidden {
buttonsOriginY = layout.size.height + 40.0 - 80.0
} else {
buttonsOriginY = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom
}
let previewVideoSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0))
let previewVideoY: CGFloat
let previewVideoX: CGFloat
switch self.outgoingVideoNodeCorner {
case .topLeft:
previewVideoX = 20.0
if self.isUIHidden {
previewVideoY = layout.insets(options: .statusBar).top + 8.0
} else {
previewVideoY = layout.insets(options: .statusBar).top + 44.0 + 8.0
}
case .topRight:
previewVideoX = layout.size.width - previewVideoSize.width - 20.0
if self.isUIHidden {
previewVideoY = layout.insets(options: .statusBar).top + 8.0
} else {
previewVideoY = layout.insets(options: .statusBar).top + 44.0 + 8.0
}
case .bottomLeft:
previewVideoX = 20.0
if self.isUIHidden {
previewVideoY = layout.size.height - layout.intrinsicInsets.bottom - 8.0 - previewVideoSize.height
} else {
previewVideoY = buttonsOriginY + 100.0 - previewVideoSize.height
}
case .bottomRight:
previewVideoX = layout.size.width - previewVideoSize.width - 20.0
if self.isUIHidden {
previewVideoY = layout.size.height - layout.intrinsicInsets.bottom - 8.0 - previewVideoSize.height
} else {
previewVideoY = buttonsOriginY + 100.0 - previewVideoSize.height
}
}
return CGRect(origin: CGPoint(x: previewVideoX, y: previewVideoY), size: previewVideoSize)
}
func containerLayoutUpdated(_ layout: ContainerViewLayout, navigationBarHeight: CGFloat, transition: ContainedViewLayoutTransition) {
self.validLayout = (layout, navigationBarHeight)
let overlayAlpha: CGFloat = self.isUIHidden ? 0.0 : 1.0
transition.updateFrame(node: self.containerNode, frame: CGRect(origin: CGPoint(), size: layout.size))
transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size))
@ -592,6 +729,9 @@ final class CallControllerNode: ASDisplayNode {
}
transition.updateFrame(node: self.backButtonNode, frame: CGRect(origin: CGPoint(x: 29.0, y: navigationOffset + 11.0), size: backSize))
transition.updateAlpha(node: self.backButtonArrowNode, alpha: overlayAlpha)
transition.updateAlpha(node: self.backButtonNode, alpha: overlayAlpha)
var statusOffset: CGFloat
if layout.metrics.widthClass == .regular && layout.metrics.heightClass == .regular {
if layout.size.height.isEqual(to: 1366.0) {
@ -611,7 +751,7 @@ final class CallControllerNode: ASDisplayNode {
statusOffset += layout.safeInsets.top
let buttonsHeight: CGFloat = 75.0
let buttonsHeight: CGFloat = 190.0
let buttonsOffset: CGFloat
if layout.size.width.isEqual(to: 320.0) {
if layout.size.height.isEqual(to: 480.0) {
@ -625,36 +765,60 @@ final class CallControllerNode: ASDisplayNode {
let statusHeight = self.statusNode.updateLayout(constrainedWidth: layout.size.width, transition: transition)
transition.updateFrame(node: self.statusNode, frame: CGRect(origin: CGPoint(x: 0.0, y: statusOffset), size: CGSize(width: layout.size.width, height: statusHeight)))
transition.updateAlpha(node: self.statusNode, alpha: overlayAlpha)
let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0))
transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize))
self.buttonsNode.updateLayout(constrainedWidth: layout.size.width, transition: transition)
let buttonsOriginY: CGFloat = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom
self.buttonsNode.updateLayout(strings: self.presentationData.strings, constrainedWidth: layout.size.width, transition: transition)
let buttonsOriginY: CGFloat
if self.isUIHidden {
buttonsOriginY = layout.size.height + 40.0 - 80.0
} else {
buttonsOriginY = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom
}
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight)))
transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha)
let fullscreenVideoFrame = CGRect(origin: CGPoint(), size: layout.size)
let previewVideoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationBarHeight)
var outgoingVideoTransition = transition
if let incomingVideoNode = self.incomingVideoNode {
if incomingVideoNode.frame.width.isZero, let outgoingVideoNode = self.outgoingVideoNode, !outgoingVideoNode.frame.width.isZero, !transition.isAnimated {
outgoingVideoTransition = .animated(duration: 0.3, curve: .easeInOut)
var incomingVideoTransition = transition
if incomingVideoNode.frame.isEmpty {
incomingVideoTransition = .immediate
}
incomingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size)
incomingVideoNode.updateLayout(size: layout.size)
if self.outgoingVideoExplicitelyFullscreen {
incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: previewVideoFrame)
} else {
incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: fullscreenVideoFrame)
}
incomingVideoNode.updateLayout(size: incomingVideoNode.frame.size)
}
if let outgoingVideoNode = self.outgoingVideoNode {
var outgoingVideoTransition = transition
if outgoingVideoNode.frame.isEmpty {
outgoingVideoTransition = .immediate
}
if self.incomingVideoNode == nil {
outgoingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size)
outgoingVideoNode.updateLayout(size: layout.size, isExpanded: true, transition: transition)
outgoingVideoNode.frame = fullscreenVideoFrame
outgoingVideoNode.updateLayout(size: layout.size, isExpanded: true, transition: outgoingVideoTransition)
} else {
let outgoingSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0))
let outgoingFrame = CGRect(origin: CGPoint(x: layout.size.width - 16.0 - outgoingSize.width, y: buttonsOriginY - 32.0 - outgoingSize.height), size: outgoingSize)
outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: outgoingFrame)
outgoingVideoNode.updateLayout(size: outgoingFrame.size, isExpanded: false, transition: outgoingVideoTransition)
if self.minimizedVideoDraggingPosition == nil {
if self.outgoingVideoExplicitelyFullscreen {
outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: fullscreenVideoFrame)
} else {
outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: previewVideoFrame)
}
outgoingVideoNode.updateLayout(size: outgoingVideoNode.frame.size, isExpanded: self.outgoingVideoExplicitelyFullscreen, transition: outgoingVideoTransition)
}
}
}
let keyTextSize = self.keyButtonNode.frame.size
transition.updateFrame(node: self.keyButtonNode, frame: CGRect(origin: CGPoint(x: layout.size.width - keyTextSize.width - 8.0, y: navigationOffset + 8.0), size: keyTextSize))
transition.updateAlpha(node: self.keyButtonNode, alpha: overlayAlpha)
if let debugNode = self.debugNode {
transition.updateFrame(node: debugNode, frame: CGRect(origin: CGPoint(), size: layout.size))
@ -700,26 +864,33 @@ final class CallControllerNode: ASDisplayNode {
if let _ = self.keyPreviewNode {
self.backPressed()
} else {
let point = recognizer.location(in: recognizer.view)
if self.statusNode.frame.contains(point) {
if self.easyDebugAccess {
self.presentDebugNode()
} else {
let timestamp = CACurrentMediaTime()
if self.debugTapCounter.0 < timestamp - 0.75 {
self.debugTapCounter.0 = timestamp
self.debugTapCounter.1 = 0
}
if self.debugTapCounter.0 >= timestamp - 0.75 {
self.debugTapCounter.0 = timestamp
self.debugTapCounter.1 += 1
}
if self.debugTapCounter.1 >= 10 {
self.debugTapCounter.1 = 0
if self.incomingVideoNode != nil || self.outgoingVideoNode != nil {
self.isUIHidden = !self.isUIHidden
if let (layout, navigationBarHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
} else {
let point = recognizer.location(in: recognizer.view)
if self.statusNode.frame.contains(point) {
if self.easyDebugAccess {
self.presentDebugNode()
} else {
let timestamp = CACurrentMediaTime()
if self.debugTapCounter.0 < timestamp - 0.75 {
self.debugTapCounter.0 = timestamp
self.debugTapCounter.1 = 0
}
if self.debugTapCounter.0 >= timestamp - 0.75 {
self.debugTapCounter.0 = timestamp
self.debugTapCounter.1 += 1
}
if self.debugTapCounter.1 >= 10 {
self.debugTapCounter.1 = 0
self.presentDebugNode()
}
}
}
}
@ -749,36 +920,170 @@ final class CallControllerNode: ASDisplayNode {
}
}
@objc func panGesture(_ recognizer: UIPanGestureRecognizer) {
switch recognizer.state {
case .changed:
let offset = recognizer.translation(in: self.view).y
var bounds = self.bounds
bounds.origin.y = -offset
self.bounds = bounds
case .ended:
let velocity = recognizer.velocity(in: self.view).y
if abs(velocity) < 100.0 {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
private var minimizedVideoInitialPosition: CGPoint?
private var minimizedVideoDraggingPosition: CGPoint?
private func nodeLocationForPosition(layout: ContainerViewLayout, position: CGPoint, velocity: CGPoint) -> VideoNodeCorner {
let layoutInsets = UIEdgeInsets()
var result = CGPoint()
if position.x < layout.size.width / 2.0 {
result.x = 0.0
} else {
result.x = 1.0
}
if position.y < layoutInsets.top + (layout.size.height - layoutInsets.bottom - layoutInsets.top) / 2.0 {
result.y = 0.0
} else {
result.y = 1.0
}
let currentPosition = result
let angleEpsilon: CGFloat = 30.0
var shouldHide = false
if (velocity.x * velocity.x + velocity.y * velocity.y) >= 500.0 * 500.0 {
let x = velocity.x
let y = velocity.y
var angle = atan2(y, x) * 180.0 / CGFloat.pi * -1.0
if angle < 0.0 {
angle += 360.0
}
if currentPosition.x.isZero && currentPosition.y.isZero {
if ((angle > 0 && angle < 90 - angleEpsilon) || angle > 360 - angleEpsilon) {
result.x = 1.0
result.y = 0.0
} else if (angle > 180 + angleEpsilon && angle < 270 + angleEpsilon) {
result.x = 0.0
result.y = 1.0
} else if (angle > 270 + angleEpsilon && angle < 360 - angleEpsilon) {
result.x = 1.0
result.y = 1.0
} else {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint(x: 0.0, y: velocity > 0.0 ? -bounds.height: bounds.height)
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, completion: { [weak self] _ in
self?.dismissedInteractively?()
})
shouldHide = true
}
} else if !currentPosition.x.isZero && currentPosition.y.isZero {
if (angle > 90 + angleEpsilon && angle < 180 + angleEpsilon) {
result.x = 0.0
result.y = 0.0
}
else if (angle > 270 - angleEpsilon && angle < 360 - angleEpsilon) {
result.x = 1.0
result.y = 1.0
}
else if (angle > 180 + angleEpsilon && angle < 270 - angleEpsilon) {
result.x = 0.0
result.y = 1.0
}
else {
shouldHide = true
}
} else if currentPosition.x.isZero && !currentPosition.y.isZero {
if (angle > 90 - angleEpsilon && angle < 180 - angleEpsilon) {
result.x = 0.0
result.y = 0.0
}
else if (angle < angleEpsilon || angle > 270 + angleEpsilon) {
result.x = 1.0
result.y = 1.0
}
else if (angle > angleEpsilon && angle < 90 - angleEpsilon) {
result.x = 1.0
result.y = 0.0
}
else if (!shouldHide) {
shouldHide = true
}
} else if !currentPosition.x.isZero && !currentPosition.y.isZero {
if (angle > angleEpsilon && angle < 90 + angleEpsilon) {
result.x = 1.0
result.y = 0.0
}
else if (angle > 180 - angleEpsilon && angle < 270 - angleEpsilon) {
result.x = 0.0
result.y = 1.0
}
else if (angle > 90 + angleEpsilon && angle < 180 - angleEpsilon) {
result.x = 0.0
result.y = 0.0
}
else if (!shouldHide) {
shouldHide = true
}
}
}
if result.x.isZero {
if result.y.isZero {
return .topLeft
} else {
return .bottomLeft
}
} else {
if result.y.isZero {
return .topRight
} else {
return .bottomRight
}
}
}
@objc private func panGesture(_ recognizer: UIPanGestureRecognizer) {
switch recognizer.state {
case .began:
let location = recognizer.location(in: self.view)
//let translation = recognizer.translation(in: self.view)
//location.x += translation.x
//location.y += translation.y
if let _ = self.incomingVideoNode, let outgoingVideoNode = self.outgoingVideoNode, outgoingVideoNode.frame.contains(location) {
self.minimizedVideoInitialPosition = outgoingVideoNode.position
} else {
self.minimizedVideoInitialPosition = nil
}
case .changed:
if let outgoingVideoNode = self.outgoingVideoNode, let minimizedVideoInitialPosition = self.minimizedVideoInitialPosition {
let translation = recognizer.translation(in: self.view)
let minimizedVideoDraggingPosition = CGPoint(x: minimizedVideoInitialPosition.x + translation.x, y: minimizedVideoInitialPosition.y + translation.y)
self.minimizedVideoDraggingPosition = minimizedVideoDraggingPosition
outgoingVideoNode.position = minimizedVideoDraggingPosition
} else {
let offset = recognizer.translation(in: self.view).y
var bounds = self.bounds
bounds.origin.y = -offset
self.bounds = bounds
}
case .cancelled, .ended:
if let outgoingVideoNode = self.outgoingVideoNode, let _ = self.minimizedVideoInitialPosition, let minimizedVideoDraggingPosition = self.minimizedVideoDraggingPosition {
self.minimizedVideoInitialPosition = nil
self.minimizedVideoDraggingPosition = nil
if let (layout, navigationHeight) = self.validLayout {
self.outgoingVideoNodeCorner = self.nodeLocationForPosition(layout: layout, position: minimizedVideoDraggingPosition, velocity: recognizer.velocity(in: self.view))
let videoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationHeight)
outgoingVideoNode.frame = videoFrame
outgoingVideoNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: minimizedVideoDraggingPosition.x - videoFrame.midX, y: minimizedVideoDraggingPosition.y - videoFrame.midY)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: 0.5, delay: 0.0, initialVelocity: 0.0, damping: 110.0, removeOnCompletion: true, additive: true, completion: nil)
}
} else {
let velocity = recognizer.velocity(in: self.view).y
if abs(velocity) < 100.0 {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
} else {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint(x: 0.0, y: velocity > 0.0 ? -bounds.height: bounds.height)
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, completion: { [weak self] _ in
self?.dismissedInteractively?()
})
}
}
case .cancelled:
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
default:
break
}

View File

@ -190,7 +190,7 @@ public final class PresentationCallImpl: PresentationCall {
private var sessionStateDisposable: Disposable?
private let statePromise = ValuePromise<PresentationCallState>(PresentationCallState(state: .waiting, videoState: .notAvailable, remoteVideoState: .inactive), ignoreRepeated: true)
private let statePromise = ValuePromise<PresentationCallState>()
public var state: Signal<PresentationCallState, NoError> {
return self.statePromise.get()
}
@ -233,7 +233,9 @@ public final class PresentationCallImpl: PresentationCall {
private var droppedCall = false
private var dropCallKitCallTimer: SwiftSignalKit.Timer?
init(account: Account, audioSession: ManagedAudioSession, callSessionManager: CallSessionManager, callKitIntegration: CallKitIntegration?, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), initialState: CallSession?, internalId: CallSessionInternalId, peerId: PeerId, isOutgoing: Bool, peer: Peer?, proxyServer: ProxyServerSettings?, auxiliaryServers: [CallAuxiliaryServer], currentNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>) {
private var videoCapturer: OngoingCallVideoCapturer?
init(account: Account, audioSession: ManagedAudioSession, callSessionManager: CallSessionManager, callKitIntegration: CallKitIntegration?, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), initialState: CallSession?, internalId: CallSessionInternalId, peerId: PeerId, isOutgoing: Bool, peer: Peer?, proxyServer: ProxyServerSettings?, auxiliaryServers: [CallAuxiliaryServer], currentNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, startWithVideo: Bool) {
self.account = account
self.audioSession = audioSession
self.callSessionManager = callSessionManager
@ -259,6 +261,13 @@ public final class PresentationCallImpl: PresentationCall {
self.isOutgoing = isOutgoing
self.isVideo = initialState?.type == .video
self.peer = peer
self.isVideo = startWithVideo
if self.isVideo {
self.videoCapturer = OngoingCallVideoCapturer()
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .activeOutgoing, remoteVideoState: .inactive))
} else {
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .notAvailable, remoteVideoState: .inactive))
}
self.serializedData = serializedData
self.dataSaving = dataSaving
@ -440,13 +449,17 @@ public final class PresentationCallImpl: PresentationCall {
mappedRemoteVideoState = .active
}
} else {
mappedVideoState = .notAvailable
if self.isVideo {
mappedVideoState = .activeOutgoing
} else {
mappedVideoState = .notAvailable
}
mappedRemoteVideoState = .inactive
}
switch sessionState.state {
case .ringing:
presentationState = PresentationCallState(state: .ringing, videoState: .notAvailable, remoteVideoState: .inactive)
presentationState = PresentationCallState(state: .ringing, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState)
if previous == nil || previousControl == nil {
if !self.reportedIncomingCall {
self.reportedIncomingCall = true
@ -509,7 +522,7 @@ public final class PresentationCallImpl: PresentationCall {
presentationState = PresentationCallState(state: .reconnecting(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState)
}
} else {
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: .notAvailable, remoteVideoState: .inactive)
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState)
}
}
@ -523,8 +536,9 @@ public final class PresentationCallImpl: PresentationCall {
if let _ = audioSessionControl, !wasActive || previousControl == nil {
let logName = "\(id.id)_\(id.accessHash)"
let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, auxiliaryServers: auxiliaryServers, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, isVideo: sessionState.type == .video, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName)
let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, auxiliaryServers: auxiliaryServers, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName)
self.ongoingContext = ongoingContext
ongoingContext.setIsMuted(self.isMutedValue)
self.debugInfoValue.set(ongoingContext.debugInfo())
@ -718,8 +732,8 @@ public final class PresentationCallImpl: PresentationCall {
self.ongoingContext?.setEnableVideo(value)
}
public func switchVideoCamera() {
self.ongoingContext?.switchVideoCamera()
public func setOutgoingVideoIsPaused(_ isPaused: Bool) {
self.videoCapturer?.setIsVideoEnabled(!isPaused)
}
public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
@ -748,6 +762,10 @@ public final class PresentationCallImpl: PresentationCall {
}
public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) {
self.ongoingContext?.makeOutgoingVideoView(completion: completion)
self.videoCapturer?.makeOutgoingVideoView(completion: completion)
}
public func switchVideoCamera() {
self.videoCapturer?.switchCamera()
}
}

View File

@ -278,52 +278,6 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
self.callSettingsDisposable?.dispose()
}
public func injectRingingStateSynchronously(account: Account, ringingState: CallSessionRingingState, callSession: CallSession) {
if self.currentCall != nil {
return
}
let semaphore = DispatchSemaphore(value: 0)
var data: (PreferencesView, AccountSharedDataView, Peer?)?
let _ = combineLatest(
account.postbox.preferencesView(keys: [PreferencesKeys.voipConfiguration, ApplicationSpecificPreferencesKeys.voipDerivedState, PreferencesKeys.appConfiguration])
|> take(1),
accountManager.sharedData(keys: [SharedDataKeys.autodownloadSettings])
|> take(1),
account.postbox.transaction { transaction -> Peer? in
return transaction.getPeer(ringingState.peerId)
}
).start(next: { preferences, sharedData, peer in
data = (preferences, sharedData, peer)
semaphore.signal()
})
semaphore.wait()
if let (preferences, sharedData, maybePeer) = data, let peer = maybePeer {
let configuration = preferences.values[PreferencesKeys.voipConfiguration] as? VoipConfiguration ?? .defaultValue
let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue
let derivedState = preferences.values[ApplicationSpecificPreferencesKeys.voipDerivedState] as? VoipDerivedState ?? .default
let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings
let enableCallKit = true
let call = PresentationCallImpl(account: account, audioSession: self.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(self.callKitIntegration, settings: self.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: self.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: self.getDeviceAccessData, initialState: callSession, internalId: ringingState.id, peerId: ringingState.peerId, isOutgoing: false, peer: peer, proxyServer: self.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: .none, updatedNetworkType: account.networkType)
self.updateCurrentCall(call)
self.currentCallPromise.set(.single(call))
self.hasActiveCallsPromise.set(true)
self.removeCurrentCallDisposable.set((call.canBeRemoved
|> deliverOnMainQueue).start(next: { [weak self, weak call] value in
if value, let strongSelf = self, let call = call {
if strongSelf.currentCall === call {
strongSelf.updateCurrentCall(nil)
strongSelf.currentCallPromise.set(.single(nil))
strongSelf.hasActiveCallsPromise.set(false)
}
}
}))
}
}
private func ringingStatesUpdated(_ ringingStates: [(Account, Peer, CallSessionRingingState, Bool, NetworkType)], enableCallKit: Bool) {
if let firstState = ringingStates.first {
if self.currentCall == nil {
@ -338,7 +292,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings
let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue
let call = PresentationCallImpl(account: firstState.0, audioSession: strongSelf.audioSession, callSessionManager: firstState.0.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: firstState.2.id, peerId: firstState.2.peerId, isOutgoing: false, peer: firstState.1, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: firstState.4, updatedNetworkType: firstState.0.networkType)
let call = PresentationCallImpl(account: firstState.0, audioSession: strongSelf.audioSession, callSessionManager: firstState.0.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: firstState.2.id, peerId: firstState.2.peerId, isOutgoing: false, peer: firstState.1, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: firstState.4, updatedNetworkType: firstState.0.networkType, startWithVideo: firstState.2.isVideo)
strongSelf.updateCurrentCall(call)
strongSelf.currentCallPromise.set(.single(call))
strongSelf.hasActiveCallsPromise.set(true)
@ -491,7 +445,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings
let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue
let call = PresentationCallImpl(account: account, audioSession: strongSelf.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings), serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: internalId, peerId: peerId, isOutgoing: true, peer: nil, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: currentNetworkType, updatedNetworkType: account.networkType)
let call = PresentationCallImpl(account: account, audioSession: strongSelf.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings), serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: internalId, peerId: peerId, isOutgoing: true, peer: nil, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: currentNetworkType, updatedNetworkType: account.networkType, startWithVideo: isVideo)
strongSelf.updateCurrentCall(call)
strongSelf.currentCallPromise.set(.single(call))
strongSelf.hasActiveCallsPromise.set(true)

View File

@ -107,9 +107,10 @@ typealias CallSessionStableId = Int64
public struct CallSessionRingingState: Equatable {
public let id: CallSessionInternalId
public let peerId: PeerId
public let isVideo: Bool
public static func ==(lhs: CallSessionRingingState, rhs: CallSessionRingingState) -> Bool {
return lhs.id == rhs.id && lhs.peerId == rhs.peerId
return lhs.id == rhs.id && lhs.peerId == rhs.peerId && lhs.isVideo == rhs.isVideo
}
}
@ -365,7 +366,7 @@ private final class CallSessionManagerContext {
var ringingContexts: [CallSessionRingingState] = []
for (id, context) in self.contexts {
if case .ringing = context.state {
ringingContexts.append(CallSessionRingingState(id: id, peerId: context.peerId))
ringingContexts.append(CallSessionRingingState(id: id, peerId: context.peerId, isVideo: context.type == .video))
}
}
return ringingContexts

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_accept.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_video.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_decline.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -1,22 +1,12 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "CallMuteIcon@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallMuteIcon@3x.png",
"scale" : "3x"
"filename" : "ic_calls_mute.pdf",
"idiom" : "universal"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 545 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 844 B

View File

@ -1,22 +0,0 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "CallPhoneIcon@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallPhoneIcon@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 655 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -1,22 +1,12 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "CallRouteSpeaker@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallRouteSpeaker@3x.png",
"scale" : "3x"
"filename" : "ic_calls_speaker.pdf",
"idiom" : "universal"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 KiB

View File

@ -1,22 +1,12 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "CallSpeakerIcon@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallSpeakerIcon@3x.png",
"scale" : "3x"
"filename" : "ic_calls_speaker.pdf",
"idiom" : "universal"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
"author" : "xcode",
"version" : 1
}
}

View File

@ -1,7 +1,7 @@
{
"images" : [
{
"filename" : "Video.pdf",
"filename" : "ic_calls_cameraflip.pdf",
"idiom" : "universal"
}
],

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_tlogo.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -245,7 +245,6 @@ private protocol OngoingCallThreadLocalContextProtocol: class {
func nativeSetNetworkType(_ type: NetworkType)
func nativeSetIsMuted(_ value: Bool)
func nativeSetVideoEnabled(_ value: Bool)
func nativeSwitchVideoCamera()
func nativeStop(_ completion: @escaping (String?, Int64, Int64, Int64, Int64) -> Void)
func nativeDebugInfo() -> String
func nativeVersion() -> String
@ -292,6 +291,26 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol {
}
}
public final class OngoingCallVideoCapturer {
fileprivate let impl: OngoingCallThreadLocalContextVideoCapturer
public init() {
self.impl = OngoingCallThreadLocalContextVideoCapturer()
}
public func switchCamera() {
self.impl.switchVideoCamera()
}
public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) {
self.impl.makeOutgoingVideoView(completion)
}
public func setIsVideoEnabled(_ value: Bool) {
self.impl.setIsVideoEnabled(value)
}
}
extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProtocol {
func nativeSetNetworkType(_ type: NetworkType) {
self.setNetworkType(ongoingNetworkTypeForTypeWebrtc(type))
@ -309,10 +328,6 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
self.setVideoEnabled(value)
}
func nativeSwitchVideoCamera() {
self.switchVideoCamera()
}
func nativeDebugInfo() -> String {
return self.debugInfo() ?? ""
}
@ -463,7 +478,7 @@ public final class OngoingCallContext {
return result
}
public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, auxiliaryServers: [AuxiliaryServer], initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, isVideo: Bool, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String) {
public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, auxiliaryServers: [AuxiliaryServer], initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String) {
let _ = setupLogs
OngoingCallThreadLocalContext.applyServerConfig(serializedData)
//OngoingCallThreadLocalContextWebrtc.applyServerConfig(serializedData)
@ -542,9 +557,9 @@ public final class OngoingCallContext {
))
}
}
let context = OngoingCallThreadLocalContextWebrtc(queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, rtcServers: rtcServers, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, isVideo: isVideo, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in
let context = OngoingCallThreadLocalContextWebrtc(queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, rtcServers: rtcServers, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in
callSessionManager?.sendSignalingData(internalId: internalId, data: data)
})
}, videoCapturer: video?.impl)
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
context.stateChanged = { state, videoState, remoteVideoState in
@ -696,12 +711,6 @@ public final class OngoingCallContext {
}
}
public func switchVideoCamera() {
self.withContext { context in
context.nativeSwitchVideoCamera()
}
}
public func debugInfo() -> Signal<(String, String), NoError> {
let poll = Signal<(String, String), NoError> { subscriber in
self.withContext { context in
@ -725,14 +734,4 @@ public final class OngoingCallContext {
}
}
}
public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) {
self.withContext { context in
if let context = context as? OngoingCallThreadLocalContextWebrtc {
context.makeOutgoingVideoView(completion)
} else {
completion(nil)
}
}
}
}

View File

@ -13,6 +13,8 @@ namespace TGVOIP_NAMESPACE {
class VideoCapturerInterface {
public:
virtual ~VideoCapturerInterface();
virtual void setIsEnabled(bool isEnabled) = 0;
};
void configurePlatformAudio();

View File

@ -112,6 +112,10 @@
[_videoCapturer stopCapture];
}
- (void)setIsEnabled:(bool)isEnabled {
[_videoCapturer setIsEnabled:isEnabled];
}
@end
@interface VideoCapturerInterfaceImplHolder : NSObject
@ -153,6 +157,16 @@ public:
});
}
virtual void setIsEnabled(bool isEnabled) {
VideoCapturerInterfaceImplHolder *implReference = _implReference;
dispatch_async(dispatch_get_main_queue(), ^{
if (implReference.reference != nil) {
VideoCapturerInterfaceImplReference *reference = (__bridge VideoCapturerInterfaceImplReference *)implReference.reference;
[reference setIsEnabled:isEnabled];
}
});
}
private:
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
VideoCapturerInterfaceImplHolder *_implReference;

View File

@ -26,8 +26,7 @@ static rtc::Thread *makeMediaThread() {
return value.get();
}
static rtc::Thread *getMediaThread() {
rtc::Thread *Manager::getMediaThread() {
static rtc::Thread *value = makeMediaThread();
return value;
}
@ -37,7 +36,7 @@ Manager::Manager(
TgVoipEncryptionKey encryptionKey,
bool enableP2P,
std::vector<TgVoipRtcServer> const &rtcServers,
bool isVideo,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
std::function<void (const TgVoipState &)> stateUpdated,
std::function<void (bool)> videoStateUpdated,
std::function<void (bool)> remoteVideoIsActiveUpdated,
@ -47,7 +46,7 @@ _thread(thread),
_encryptionKey(encryptionKey),
_enableP2P(enableP2P),
_rtcServers(rtcServers),
_startWithVideo(isVideo),
_videoCapture(videoCapture),
_stateUpdated(stateUpdated),
_videoStateUpdated(videoStateUpdated),
_remoteVideoIsActiveUpdated(remoteVideoIsActiveUpdated),
@ -111,11 +110,11 @@ void Manager::start() {
);
}));
bool isOutgoing = _encryptionKey.isOutgoing;
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [isOutgoing, thread = _thread, startWithVideo = _startWithVideo, weakThis]() {
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [isOutgoing, thread = _thread, videoCapture = _videoCapture, weakThis]() {
return new MediaManager(
getMediaThread(),
isOutgoing,
startWithVideo,
videoCapture,
[thread, weakThis](const rtc::CopyOnWriteBuffer &packet) {
thread->PostTask(RTC_FROM_HERE, [weakThis, packet]() {
auto strongThis = weakThis.lock();
@ -203,12 +202,6 @@ void Manager::setMuteOutgoingAudio(bool mute) {
});
}
void Manager::switchVideoCamera() {
_mediaManager->perform([](MediaManager *mediaManager) {
mediaManager->switchVideoCamera();
});
}
void Manager::notifyIsLocalVideoActive(bool isActive) {
rtc::CopyOnWriteBuffer buffer;
uint8_t mode = 4;
@ -228,12 +221,6 @@ void Manager::setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<web
});
}
void Manager::setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
_mediaManager->perform([sink](MediaManager *mediaManager) {
mediaManager->setOutgoingVideoOutput(sink);
});
}
#ifdef TGVOIP_NAMESPACE
}
#endif

View File

@ -12,12 +12,14 @@ namespace TGVOIP_NAMESPACE {
class Manager : public std::enable_shared_from_this<Manager> {
public:
static rtc::Thread *getMediaThread();
Manager(
rtc::Thread *thread,
TgVoipEncryptionKey encryptionKey,
bool enableP2P,
std::vector<TgVoipRtcServer> const &rtcServers,
bool isVideo,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
std::function<void (const TgVoipState &)> stateUpdated,
std::function<void (bool)> videoStateUpdated,
std::function<void (bool)> remoteVideoIsActiveUpdated,
@ -29,17 +31,15 @@ public:
void receiveSignalingData(const std::vector<uint8_t> &data);
void setSendVideo(bool sendVideo);
void setMuteOutgoingAudio(bool mute);
void switchVideoCamera();
void notifyIsLocalVideoActive(bool isActive);
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
private:
rtc::Thread *_thread;
TgVoipEncryptionKey _encryptionKey;
bool _enableP2P;
std::vector<TgVoipRtcServer> _rtcServers;
bool _startWithVideo;
std::shared_ptr<TgVoipVideoCaptureInterface> _videoCapture;
std::function<void (const TgVoipState &)> _stateUpdated;
std::function<void (bool)> _videoStateUpdated;
std::function<void (bool)> _remoteVideoIsActiveUpdated;

View File

@ -19,6 +19,9 @@
#include "api/video_codecs/builtin_video_encoder_factory.h"
#include "TgVoip.h"
#include "VideoCaptureInterfaceImpl.h"
#if TARGET_OS_IPHONE
#include "CodecsApple.h"
@ -164,7 +167,7 @@ static rtc::Thread *makeWorkerThread() {
}
static rtc::Thread *getWorkerThread() {
rtc::Thread *MediaManager::getWorkerThread() {
static rtc::Thread *value = makeWorkerThread();
return value;
}
@ -172,7 +175,7 @@ static rtc::Thread *getWorkerThread() {
MediaManager::MediaManager(
rtc::Thread *thread,
bool isOutgoing,
bool startWithVideo,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
std::function<void (const rtc::CopyOnWriteBuffer &)> packetEmitted,
std::function<void (bool)> localVideoCaptureActiveUpdated
) :
@ -180,7 +183,8 @@ _packetEmitted(packetEmitted),
_localVideoCaptureActiveUpdated(localVideoCaptureActiveUpdated),
_thread(thread),
_eventLog(std::make_unique<webrtc::RtcEventLogNull>()),
_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) {
_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()),
_videoCapture(videoCapture) {
_ssrcAudio.incoming = isOutgoing ? ssrcAudioIncoming : ssrcAudioOutgoing;
_ssrcAudio.outgoing = (!isOutgoing) ? ssrcAudioIncoming : ssrcAudioOutgoing;
_ssrcAudio.fecIncoming = isOutgoing ? ssrcAudioFecIncoming : ssrcAudioFecOutgoing;
@ -199,7 +203,6 @@ _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) {
_videoCodecs = AssignPayloadTypesAndDefaultCodecs(videoEncoderFactory->GetSupportedFormats());
_isSendingVideo = false;
_useFrontCamera = true;
_audioNetworkInterface = std::unique_ptr<MediaManager::NetworkInterfaceImpl>(new MediaManager::NetworkInterfaceImpl(this, false));
_videoNetworkInterface = std::unique_ptr<MediaManager::NetworkInterfaceImpl>(new MediaManager::NetworkInterfaceImpl(this, true));
@ -283,9 +286,9 @@ _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) {
_videoChannel->SetInterface(_videoNetworkInterface.get(), webrtc::MediaTransportConfig());
_nativeVideoSource = makeVideoSource(_thread, getWorkerThread());
if (_videoCapture != nullptr) {
((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->setIsActiveUpdated(this->_localVideoCaptureActiveUpdated);
if (startWithVideo) {
setSendVideo(true);
}
}
@ -372,10 +375,6 @@ void MediaManager::setSendVideo(bool sendVideo) {
codec.SetParam(cricket::kCodecParamStartBitrate, 512);
codec.SetParam(cricket::kCodecParamMaxBitrate, 2500);
_videoCapturer = makeVideoCapturer(_nativeVideoSource, _useFrontCamera, [localVideoCaptureActiveUpdated = _localVideoCaptureActiveUpdated](bool isActive) {
localVideoCaptureActiveUpdated(isActive);
});
cricket::VideoSendParameters videoSendParameters;
videoSendParameters.codecs.push_back(codec);
@ -402,11 +401,15 @@ void MediaManager::setSendVideo(bool sendVideo) {
videoSendStreamParams.cname = "cname";
_videoChannel->AddSendStream(videoSendStreamParams);
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, _nativeVideoSource.get());
if (_videoCapture != nullptr) {
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, ((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->_videoSource.get());
}
_videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr);
} else {
_videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing));
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, _nativeVideoSource.get());
if (_videoCapture != nullptr) {
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, ((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->_videoSource);
}
}
cricket::VideoRecvParameters videoRecvParameters;
@ -449,8 +452,6 @@ void MediaManager::setSendVideo(bool sendVideo) {
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, nullptr);
_videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr);
_videoCapturer.reset();
_videoChannel->RemoveRecvStream(_ssrcVideo.incoming);
_videoChannel->RemoveRecvStream(_ssrcVideo.fecIncoming);
_videoChannel->RemoveSendStream(_ssrcVideo.outgoing);
@ -466,25 +467,11 @@ void MediaManager::setMuteOutgoingAudio(bool mute) {
_audioChannel->SetAudioSend(_ssrcAudio.outgoing, _isConnected && !_muteOutgoingAudio, nullptr, &_audioSource);
}
void MediaManager::switchVideoCamera() {
if (_isSendingVideo) {
_useFrontCamera = !_useFrontCamera;
_videoCapturer = makeVideoCapturer(_nativeVideoSource, _useFrontCamera, [localVideoCaptureActiveUpdated = _localVideoCaptureActiveUpdated](bool isActive) {
localVideoCaptureActiveUpdated(isActive);
});
}
}
void MediaManager::setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
_currentIncomingVideoSink = sink;
_videoChannel->SetSink(_ssrcVideo.incoming, _currentIncomingVideoSink.get());
}
void MediaManager::setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
_currentOutgoingVideoSink = sink;
_nativeVideoSource->AddOrUpdateSink(_currentOutgoingVideoSink.get(), rtc::VideoSinkWants());
}
MediaManager::NetworkInterfaceImpl::NetworkInterfaceImpl(MediaManager *mediaManager, bool isVideo) :
_mediaManager(mediaManager),
_isVideo(isVideo) {

View File

@ -7,6 +7,8 @@
#include "api/transport/field_trial_based_config.h"
#include "pc/rtp_sender.h"
#include "TgVoip.h"
#include <functional>
#include <memory>
@ -54,10 +56,12 @@ private:
friend class MediaManager::NetworkInterfaceImpl;
public:
static rtc::Thread *getWorkerThread();
MediaManager(
rtc::Thread *thread,
bool isOutgoing,
bool startWithVideo,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
std::function<void (const rtc::CopyOnWriteBuffer &)> packetEmitted,
std::function<void (bool)> localVideoCaptureActiveUpdated
);
@ -68,9 +72,7 @@ public:
void notifyPacketSent(const rtc::SentPacket &sentPacket);
void setSendVideo(bool sendVideo);
void setMuteOutgoingAudio(bool mute);
void switchVideoCamera();
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
protected:
std::function<void (const rtc::CopyOnWriteBuffer &)> _packetEmitted;
@ -90,7 +92,6 @@ private:
std::vector<cricket::VideoCodec> _videoCodecs;
bool _isSendingVideo;
bool _useFrontCamera;
std::unique_ptr<cricket::MediaEngineInterface> _mediaEngine;
std::unique_ptr<webrtc::Call> _call;
@ -99,10 +100,8 @@ private:
std::unique_ptr<cricket::VoiceMediaChannel> _audioChannel;
std::unique_ptr<cricket::VideoMediaChannel> _videoChannel;
std::unique_ptr<webrtc::VideoBitrateAllocatorFactory> _videoBitrateAllocatorFactory;
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
std::unique_ptr<VideoCapturerInterface> _videoCapturer;
std::shared_ptr<TgVoipVideoCaptureInterface> _videoCapture;
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentIncomingVideoSink;
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentOutgoingVideoSink;
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _audioNetworkInterface;
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _videoNetworkInterface;

View File

@ -129,6 +129,19 @@ struct TgVoipAudioDataCallbacks {
std::function<void(int16_t*, size_t)> preprocessed;
};
class TgVoipVideoCaptureInterface {
protected:
TgVoipVideoCaptureInterface() = default;
public:
static std::shared_ptr<TgVoipVideoCaptureInterface> makeInstance();
virtual ~TgVoipVideoCaptureInterface();
virtual void switchCamera() = 0;
virtual void setIsVideoEnabled(bool isVideoEnabled) = 0;
virtual void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
};
class TgVoip {
protected:
TgVoip() = default;
@ -146,7 +159,7 @@ public:
std::vector<TgVoipRtcServer> const &rtcServers,
TgVoipNetworkType initialNetworkType,
TgVoipEncryptionKey const &encryptionKey,
bool isVideo,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
std::function<void(TgVoipState)> stateUpdated,
std::function<void(bool)> videoStateUpdated,
std::function<void(bool)> remoteVideoIsActiveUpdated,
@ -161,7 +174,6 @@ public:
virtual void setEchoCancellationStrength(int strength) = 0;
virtual void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
virtual void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
virtual std::string getLastError() = 0;
virtual std::string getDebugInfo() = 0;
@ -171,7 +183,6 @@ public:
virtual void receiveSignalingData(const std::vector<uint8_t> &data) = 0;
virtual void setSendVideo(bool sendVideo) = 0;
virtual void switchVideoCamera() = 0;
virtual TgVoipFinalState stop() = 0;
};

View File

@ -5,10 +5,21 @@
#include "rtc_base/logging.h"
#include "Manager.h"
#include "MediaManager.h"
#include <stdarg.h>
#include <iostream>
#include "VideoCaptureInterfaceImpl.h"
#if TARGET_OS_IPHONE
#include "CodecsApple.h"
#else
#error "Unsupported platform"
#endif
#import <Foundation/Foundation.h>
#include <sys/time.h>
@ -142,7 +153,7 @@ public:
std::vector<TgVoipRtcServer> const &rtcServers,
TgVoipConfig const &config,
TgVoipEncryptionKey const &encryptionKey,
bool isVideo,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
TgVoipNetworkType initialNetworkType,
std::function<void(TgVoipState)> stateUpdated,
std::function<void(bool)> videoStateUpdated,
@ -160,13 +171,13 @@ public:
bool enableP2P = config.enableP2P;
_manager.reset(new ThreadLocalObject<Manager>(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, isVideo, stateUpdated, videoStateUpdated, remoteVideoIsActiveUpdated, signalingDataEmitted, rtcServers](){
_manager.reset(new ThreadLocalObject<Manager>(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, stateUpdated, videoStateUpdated, remoteVideoIsActiveUpdated, signalingDataEmitted, rtcServers, videoCapture](){
return new Manager(
getManagerThread(),
encryptionKey,
enableP2P,
rtcServers,
isVideo,
videoCapture,
[stateUpdated](const TgVoipState &state) {
stateUpdated(state);
},
@ -202,12 +213,6 @@ public:
});
};
void switchVideoCamera() override {
_manager->perform([](Manager *manager) {
manager->switchVideoCamera();
});
}
void setNetworkType(TgVoipNetworkType networkType) override {
/*message::NetworkType mappedType;
@ -268,12 +273,6 @@ public:
});
}
void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override {
_manager->perform([sink](Manager *manager) {
manager->setOutgoingVideoOutput(sink);
});
}
void setAudioOutputGainControlEnabled(bool enabled) override {
}
@ -387,7 +386,7 @@ TgVoip *TgVoip::makeInstance(
std::vector<TgVoipRtcServer> const &rtcServers,
TgVoipNetworkType initialNetworkType,
TgVoipEncryptionKey const &encryptionKey,
bool isVideo,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
std::function<void(TgVoipState)> stateUpdated,
std::function<void(bool)> videoStateUpdated,
std::function<void(bool)> remoteVideoIsActiveUpdated,
@ -400,7 +399,7 @@ TgVoip *TgVoip::makeInstance(
rtcServers,
config,
encryptionKey,
isVideo,
videoCapture,
initialNetworkType,
stateUpdated,
videoStateUpdated,
@ -411,6 +410,12 @@ TgVoip *TgVoip::makeInstance(
TgVoip::~TgVoip() = default;
std::shared_ptr<TgVoipVideoCaptureInterface>TgVoipVideoCaptureInterface::makeInstance() {
return std::shared_ptr<TgVoipVideoCaptureInterface>(new TgVoipVideoCaptureInterfaceImpl());
}
TgVoipVideoCaptureInterface::~TgVoipVideoCaptureInterface() = default;
#ifdef TGVOIP_NAMESPACE
}
#endif

View File

@ -43,6 +43,12 @@ public:
});
}
T *getSyncAssumingSameThread() {
assert(_thread->IsCurrent());
assert(_valueHolder->_value != nullptr);
return _valueHolder->_value.get();
}
private:
rtc::Thread *_thread;
std::shared_ptr<ValueHolder<T>> _valueHolder;

View File

@ -17,6 +17,7 @@
- (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps;
- (void)stopCapture;
- (void)setIsEnabled:(bool)isEnabled;
@end

View File

@ -39,6 +39,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
UIDeviceOrientation _orientation;
void (^_isActiveUpdated)(bool);
bool _isActiveValue;
bool _inForegroundValue;
bool _isPaused;
}
@end
@ -49,6 +52,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
self = [super init];
if (self != nil) {
_source = source;
_isActiveValue = true;
_inForegroundValue = true;
_isPaused = false;
_isActiveUpdated = [isActiveUpdated copy];
if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) {
@ -124,6 +130,11 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
[self stopCaptureWithCompletionHandler:nil];
}
- (void)setIsEnabled:(bool)isEnabled {
_isPaused = !isEnabled;
[self updateIsActiveValue];
}
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps
@ -253,7 +264,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
getObjCVideoSource(_source)->OnCapturedFrame(videoFrame);
if (!_isPaused) {
getObjCVideoSource(_source)->OnCapturedFrame(videoFrame);
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
@ -316,15 +329,23 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
_hasRetriedOnFatalError = NO;
}];
if (_isActiveUpdated) {
_isActiveUpdated(true);
}
_inForegroundValue = true;
[self updateIsActiveValue];
}
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
RTCLog(@"Capture session stopped.");
if (_isActiveUpdated) {
_isActiveUpdated(false);
_inForegroundValue = false;
[self updateIsActiveValue];
}
- (void)updateIsActiveValue {
bool isActive = _inForegroundValue && !_isPaused;
if (isActive != _isActiveValue) {
_isActiveValue = isActive;
if (_isActiveUpdated) {
_isActiveUpdated(_isActiveValue);
}
}
}

View File

@ -0,0 +1,53 @@
#ifndef VIDEO_CAPTURE_INTERFACE_IMPL_H
#define VIDEO_CAPTURE_INTERFACE_IMPL_H
#include "TgVoip.h"
#include <memory>
#include "ThreadLocalObject.h"
#include "api/media_stream_interface.h"
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
class VideoCapturerInterface;
class TgVoipVideoCaptureInterfaceObject {
public:
TgVoipVideoCaptureInterfaceObject();
~TgVoipVideoCaptureInterfaceObject();
void switchCamera();
void setIsVideoEnabled(bool isVideoEnabled);
void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
void setIsActiveUpdated(std::function<void (bool)> isActiveUpdated);
public:
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _videoSource;
std::unique_ptr<VideoCapturerInterface> _videoCapturer;
private:
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentSink;
std::function<void (bool)> _isActiveUpdated;
bool _useFrontCamera;
bool _isVideoEnabled;
};
class TgVoipVideoCaptureInterfaceImpl : public TgVoipVideoCaptureInterface {
public:
TgVoipVideoCaptureInterfaceImpl();
virtual ~TgVoipVideoCaptureInterfaceImpl();
virtual void switchCamera();
virtual void setIsVideoEnabled(bool isVideoEnabled);
virtual void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
public:
std::unique_ptr<ThreadLocalObject<TgVoipVideoCaptureInterfaceObject>> _impl;
};
#ifdef TGVOIP_NAMESPACE
}
#endif
#endif

View File

@ -0,0 +1,90 @@
#include "VideoCaptureInterfaceImpl.h"
#include "CodecsApple.h"
#include "Manager.h"
#include "MediaManager.h"
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
TgVoipVideoCaptureInterfaceObject::TgVoipVideoCaptureInterfaceObject() {
_useFrontCamera = true;
_isVideoEnabled = true;
_videoSource = makeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread());
//this should outlive the capturer
_videoCapturer = makeVideoCapturer(_videoSource, _useFrontCamera, [this](bool isActive) {
if (this->_isActiveUpdated) {
this->_isActiveUpdated(isActive);
}
});
}
TgVoipVideoCaptureInterfaceObject::~TgVoipVideoCaptureInterfaceObject() {
if (_currentSink != nullptr) {
_videoSource->RemoveSink(_currentSink.get());
}
}
void TgVoipVideoCaptureInterfaceObject::switchCamera() {
_useFrontCamera = !_useFrontCamera;
_videoCapturer = makeVideoCapturer(_videoSource, _useFrontCamera, [this](bool isActive) {
if (this->_isActiveUpdated) {
this->_isActiveUpdated(isActive);
}
});
}
void TgVoipVideoCaptureInterfaceObject::setIsVideoEnabled(bool isVideoEnabled) {
if (_isVideoEnabled != isVideoEnabled) {
_isVideoEnabled = isVideoEnabled;
_videoCapturer->setIsEnabled(isVideoEnabled);
}
}
void TgVoipVideoCaptureInterfaceObject::setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
if (_currentSink != nullptr) {
_videoSource->RemoveSink(_currentSink.get());
}
_currentSink = sink;
if (_currentSink != nullptr) {
_videoSource->AddOrUpdateSink(_currentSink.get(), rtc::VideoSinkWants());
}
}
void TgVoipVideoCaptureInterfaceObject::setIsActiveUpdated(std::function<void (bool)> isActiveUpdated) {
_isActiveUpdated = isActiveUpdated;
}
TgVoipVideoCaptureInterfaceImpl::TgVoipVideoCaptureInterfaceImpl() {
_impl.reset(new ThreadLocalObject<TgVoipVideoCaptureInterfaceObject>(
Manager::getMediaThread(),
[]() {
return new TgVoipVideoCaptureInterfaceObject();
}
));
}
TgVoipVideoCaptureInterfaceImpl::~TgVoipVideoCaptureInterfaceImpl() {
}
void TgVoipVideoCaptureInterfaceImpl::switchCamera() {
_impl->perform([](TgVoipVideoCaptureInterfaceObject *impl) {
impl->switchCamera();
});
}
void TgVoipVideoCaptureInterfaceImpl::setIsVideoEnabled(bool isVideoEnabled) {
_impl->perform([isVideoEnabled](TgVoipVideoCaptureInterfaceObject *impl) {
impl->setIsVideoEnabled(isVideoEnabled);
});
}
void TgVoipVideoCaptureInterfaceImpl::setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
_impl->perform([sink](TgVoipVideoCaptureInterfaceObject *impl) {
impl->setVideoOutput(sink);
});
}
}

View File

@ -78,6 +78,17 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
@end
@interface OngoingCallThreadLocalContextVideoCapturer : NSObject
- (instancetype _Nonnull)init;
- (void)switchVideoCamera;
- (void)setIsVideoEnabled:(bool)isVideoEnabled;
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion;
@end
@interface OngoingCallThreadLocalContextWebrtc : NSObject
+ (void)setupLoggingFunction:(void (* _Nullable)(NSString * _Nullable))loggingFunction;
@ -88,7 +99,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
@property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc);
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing isVideo:(bool)isVideo primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData;
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer;
- (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion;
- (bool)needRate;
@ -99,10 +110,8 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
- (void)setIsMuted:(bool)isMuted;
- (void)setVideoEnabled:(bool)videoEnabled;
- (void)switchVideoCamera;
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType;
- (void)makeIncomingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion;
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion;
- (void)addSignalingData:(NSData * _Nonnull)data;
@end

View File

@ -21,6 +21,49 @@ using namespace TGVOIP_NAMESPACE;
@end
@interface OngoingCallThreadLocalContextVideoCapturer () {
std::shared_ptr<TgVoipVideoCaptureInterface> _interface;
}
@end
@implementation OngoingCallThreadLocalContextVideoCapturer
- (instancetype _Nonnull)init {
self = [super init];
if (self != nil) {
_interface = TgVoipVideoCaptureInterface::makeInstance();
}
return self;
}
- (void)switchVideoCamera {
_interface->switchCamera();
}
- (void)setIsVideoEnabled:(bool)isVideoEnabled {
_interface->setIsVideoEnabled(isVideoEnabled);
}
- (std::shared_ptr<TgVoipVideoCaptureInterface>)getInterface {
return _interface;
}
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion {
std::shared_ptr<TgVoipVideoCaptureInterface> interface = _interface;
dispatch_async(dispatch_get_main_queue(), ^{
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
interface->setVideoOutput(sink);
completion(remoteRenderer);
});
}
@end
@interface OngoingCallThreadLocalContextWebrtc () {
id<OngoingCallThreadLocalContextQueueWebrtc> _queue;
int32_t _contextId;
@ -36,6 +79,7 @@ using namespace TGVOIP_NAMESPACE;
OngoingCallStateWebrtc _state;
OngoingCallVideoStateWebrtc _videoState;
OngoingCallRemoteVideoStateWebrtc _remoteVideoState;
OngoingCallThreadLocalContextVideoCapturer *_videoCapturer;
int32_t _signalBars;
NSData *_lastDerivedState;
@ -134,7 +178,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
return @"2.7.7";
}
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing isVideo:(bool)isVideo primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData; {
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer {
self = [super init];
if (self != nil) {
_queue = queue;
@ -146,7 +190,8 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
_callPacketTimeout = 10.0;
_networkType = networkType;
_sendSignalingData = [sendSignalingData copy];
if (isVideo) {
_videoCapturer = videoCapturer;
if (videoCapturer != nil) {
_videoState = OngoingCallVideoStateActiveOutgoing;
_remoteVideoState = OngoingCallRemoteVideoStateActive;
} else {
@ -236,7 +281,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
parsedRtcServers,
callControllerNetworkTypeForType(networkType),
encryptionKey,
isVideo,
[_videoCapturer getInterface],
[weakSelf, queue](TgVoipState state) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
@ -424,12 +469,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)switchVideoCamera {
if (_tgVoip) {
_tgVoip->switchVideoCamera();
}
}
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType {
if (_networkType != networkType) {
_networkType = networkType;
@ -457,23 +496,5 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion {
if (_tgVoip) {
__weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
strongSelf->_tgVoip->setOutgoingVideoOutput(sink);
}
completion(remoteRenderer);
});
}
}
@end