Video improvements

This commit is contained in:
Ali
2020-07-03 21:25:36 +04:00
parent c8c1c96f16
commit b542dc3fd7
44 changed files with 1097 additions and 613 deletions

View File

@@ -90,6 +90,7 @@ public protocol PresentationCall: class {
func toggleIsMuted() func toggleIsMuted()
func setIsMuted(_ value: Bool) func setIsMuted(_ value: Bool)
func setEnableVideo(_ value: Bool) func setEnableVideo(_ value: Bool)
func setOutgoingVideoIsPaused(_ isPaused: Bool)
func switchVideoCamera() func switchVideoCamera()
func setCurrentAudioOutput(_ output: AudioSessionOutput) func setCurrentAudioOutput(_ output: AudioSessionOutput)
func debugInfo() -> Signal<(String, String), NoError> func debugInfo() -> Signal<(String, String), NoError>

View File

@@ -238,14 +238,14 @@ public extension CALayer {
self.animate(from: NSValue(cgPoint: from), to: NSValue(cgPoint: to), keyPath: "position", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion) self.animate(from: NSValue(cgPoint: from), to: NSValue(cgPoint: to), keyPath: "position", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion)
} }
func animateBounds(from: CGRect, to: CGRect, duration: Double, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) { func animateBounds(from: CGRect, to: CGRect, duration: Double, delay: Double = 0.0, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
if from == to && !force { if from == to && !force {
if let completion = completion { if let completion = completion {
completion(true) completion(true)
} }
return return
} }
self.animate(from: NSValue(cgRect: from), to: NSValue(cgRect: to), keyPath: "bounds", timingFunction: timingFunction, duration: duration, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion) self.animate(from: NSValue(cgRect: from), to: NSValue(cgRect: to), keyPath: "bounds", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion)
} }
func animateBoundsOriginXAdditive(from: CGFloat, to: CGFloat, duration: Double, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) { func animateBoundsOriginXAdditive(from: CGFloat, to: CGFloat, duration: Double, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
@@ -268,7 +268,7 @@ public extension CALayer {
self.animateKeyframes(values: values.map { NSValue(cgPoint: $0) }, duration: duration, keyPath: "position") self.animateKeyframes(values: values.map { NSValue(cgPoint: $0) }, duration: duration, keyPath: "position")
} }
func animateFrame(from: CGRect, to: CGRect, duration: Double, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) { func animateFrame(from: CGRect, to: CGRect, duration: Double, delay: Double = 0.0, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) {
if from == to && !force { if from == to && !force {
if let completion = completion { if let completion = completion {
completion(true) completion(true)
@@ -302,14 +302,14 @@ public extension CALayer {
toBounds = CGRect() toBounds = CGRect()
} }
self.animatePosition(from: fromPosition, to: toPosition, duration: duration, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in self.animatePosition(from: fromPosition, to: toPosition, duration: duration, delay: delay, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in
if !value { if !value {
interrupted = true interrupted = true
} }
completedPosition = true completedPosition = true
partialCompletion() partialCompletion()
}) })
self.animateBounds(from: fromBounds, to: toBounds, duration: duration, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in self.animateBounds(from: fromBounds, to: toBounds, duration: duration, delay: delay, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in
if !value { if !value {
interrupted = true interrupted = true
} }

View File

@@ -63,7 +63,7 @@ public enum ContainedViewLayoutTransition {
} }
public extension ContainedViewLayoutTransition { public extension ContainedViewLayoutTransition {
func updateFrame(node: ASDisplayNode, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, completion: ((Bool) -> Void)? = nil) { func updateFrame(node: ASDisplayNode, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, delay: Double = 0.0, completion: ((Bool) -> Void)? = nil) {
if node.frame.equalTo(frame) && !force { if node.frame.equalTo(frame) && !force {
completion?(true) completion?(true)
} else { } else {
@@ -81,7 +81,7 @@ public extension ContainedViewLayoutTransition {
previousFrame = node.frame previousFrame = node.frame
} }
node.frame = frame node.frame = frame
node.layer.animateFrame(from: previousFrame, to: frame, duration: duration, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, force: force, completion: { result in node.layer.animateFrame(from: previousFrame, to: frame, duration: duration, delay: delay, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, force: force, completion: { result in
if let completion = completion { if let completion = completion {
completion(result) completion(result)
} }

View File

@@ -178,8 +178,8 @@ public final class CallController: ViewController {
let _ = self?.call.hangUp() let _ = self?.call.hangUp()
} }
self.controllerNode.toggleVideo = { [weak self] in self.controllerNode.setIsVideoPaused = { [weak self] isPaused in
let _ = self?.call.setEnableVideo(true) self?.call.setOutgoingVideoIsPaused(isPaused)
} }
self.controllerNode.back = { [weak self] in self.controllerNode.back = { [weak self] in

View File

@@ -5,245 +5,218 @@ import AsyncDisplayKit
import SwiftSignalKit import SwiftSignalKit
import AppBundle import AppBundle
enum CallControllerButtonType { private let labelFont = Font.regular(13.0)
case mute
case end
case accept
case speaker
case bluetooth
case switchCamera
}
private let buttonSize = CGSize(width: 75.0, height: 75.0) final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
struct Content: Equatable {
private func generateEmptyButtonImage(icon: UIImage?, strokeColor: UIColor?, fillColor: UIColor, knockout: Bool = false, angle: CGFloat = 0.0) -> UIImage? { enum Appearance: Equatable {
return generateImage(buttonSize, contextGenerator: { size, context in enum Color {
context.clear(CGRect(origin: CGPoint(), size: size)) case red
context.setBlendMode(.copy) case green
if let strokeColor = strokeColor {
context.setFillColor(strokeColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(x: 1.5, y: 1.5), size: CGSize(width: size.width - 3.0, height: size.height - 3.0)))
} else {
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: size.height)))
}
if let icon = icon {
if !angle.isZero {
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.rotate(by: angle)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
}
let imageSize = icon.size
let imageRect = CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: floor((size.width - imageSize.height) / 2.0)), size: imageSize)
if knockout {
context.setBlendMode(.copy)
context.clip(to: imageRect, mask: icon.cgImage!)
context.setFillColor(UIColor.clear.cgColor)
context.fill(imageRect)
} else {
context.setBlendMode(.normal)
context.draw(icon.cgImage!, in: imageRect)
} }
case blurred(isFilled: Bool)
case color(Color)
} }
})
}
private func generateFilledButtonImage(color: UIColor, icon: UIImage?, angle: CGFloat = 0.0) -> UIImage? {
return generateImage(buttonSize, contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.setBlendMode(.normal)
context.setFillColor(color.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
if let icon = icon { enum Image {
if !angle.isZero { case camera
context.translateBy(x: size.width / 2.0, y: size.height / 2.0) case mute
context.rotate(by: angle) case flipCamera
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0) case bluetooth
} case speaker
context.draw(icon.cgImage!, in: CGRect(origin: CGPoint(x: floor((size.width - icon.size.width) / 2.0), y: floor((size.height - icon.size.height) / 2.0)), size: icon.size)) case accept
case end
} }
})
} var appearance: Appearance
var image: Image
private let emptyStroke = UIColor(white: 1.0, alpha: 0.8) }
private let emptyHighlightedFill = UIColor(white: 1.0, alpha: 0.3)
private let invertedFill = UIColor(white: 1.0, alpha: 1.0)
private let labelFont = Font.regular(14.5)
final class CallControllerButtonNode: HighlightTrackingButtonNode {
private var type: CallControllerButtonType
private var regularImage: UIImage? private let contentContainer: ASDisplayNode
private var highlightedImage: UIImage? private let effectView: UIVisualEffectView
private var filledImage: UIImage? private let contentNode: ASImageNode
private let overlayHighlightNode: ASImageNode
private let textNode: ImmediateTextNode
private let backgroundNode: ASImageNode private let largeButtonSize: CGFloat = 72.0
private let labelNode: ASTextNode?
init(type: CallControllerButtonType, label: String?) { private(set) var currentContent: Content?
self.type = type private(set) var currentText: String = ""
init() {
self.contentContainer = ASDisplayNode()
self.backgroundNode = ASImageNode() self.effectView = UIVisualEffectView()
self.backgroundNode.isLayerBacked = true self.effectView.effect = UIBlurEffect(style: .light)
self.backgroundNode.displayWithoutProcessing = false self.effectView.layer.cornerRadius = self.largeButtonSize / 2.0
self.backgroundNode.displaysAsynchronously = false self.effectView.clipsToBounds = true
self.effectView.isUserInteractionEnabled = false
if let label = label { self.contentNode = ASImageNode()
let labelNode = ASTextNode() self.contentNode.isUserInteractionEnabled = false
labelNode.attributedText = NSAttributedString(string: label, font: labelFont, textColor: .white)
self.labelNode = labelNode
} else {
self.labelNode = nil
}
var regularImage: UIImage? self.overlayHighlightNode = ASImageNode()
var highlightedImage: UIImage? self.overlayHighlightNode.isUserInteractionEnabled = false
var filledImage: UIImage? self.overlayHighlightNode.alpha = 0.0
switch type { self.textNode = ImmediateTextNode()
case .mute: self.textNode.displaysAsynchronously = false
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: .clear) self.textNode.isUserInteractionEnabled = false
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .accept:
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0)
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0)
case .end:
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton"))
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton"))
case .speaker:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .bluetooth:
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: nil, fillColor: invertedFill, knockout: true)
case .switchCamera:
let patternImage = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white)
regularImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: patternImage, strokeColor: nil, fillColor: invertedFill, knockout: true)
}
self.regularImage = regularImage super.init(pointerStyle: nil)
self.highlightedImage = highlightedImage
self.filledImage = filledImage
super.init() self.addSubnode(self.contentContainer)
self.contentContainer.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
self.addSubnode(self.backgroundNode) self.addSubnode(self.textNode)
if let labelNode = self.labelNode { self.contentContainer.view.addSubview(self.effectView)
self.addSubnode(labelNode) self.contentContainer.addSubnode(self.contentNode)
} self.contentContainer.addSubnode(self.overlayHighlightNode)
self.backgroundNode.image = regularImage
self.currentImage = regularImage
self.highligthedChanged = { [weak self] highlighted in self.highligthedChanged = { [weak self] highlighted in
if let strongSelf = self { guard let strongSelf = self else {
strongSelf.internalHighlighted = highlighted return
strongSelf.updateState(highlighted: highlighted, selected: strongSelf.isSelected)
} }
} if highlighted {
} strongSelf.overlayHighlightNode.alpha = 1.0
private var internalHighlighted = false
override var isSelected: Bool {
didSet {
self.updateState(highlighted: self.internalHighlighted, selected: self.isSelected)
}
}
private var currentImage: UIImage?
private func updateState(highlighted: Bool, selected: Bool) {
let image: UIImage?
if selected {
image = self.filledImage
} else if highlighted {
image = self.highlightedImage
} else {
image = self.regularImage
}
if self.currentImage !== image {
let currentContents = self.backgroundNode.layer.contents
self.backgroundNode.layer.removeAnimation(forKey: "contents")
if let currentContents = currentContents, let image = image {
self.backgroundNode.image = image
self.backgroundNode.layer.animate(from: currentContents as AnyObject, to: image.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: image === self.currentImage || image === self.filledImage ? 0.25 : 0.15)
} else { } else {
self.backgroundNode.image = image strongSelf.overlayHighlightNode.alpha = 0.0
strongSelf.overlayHighlightNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2)
} }
self.currentImage = image
} }
} }
func updateType(_ type: CallControllerButtonType) { func update(size: CGSize, content: Content, text: String, transition: ContainedViewLayoutTransition) {
if self.type == type { let scaleFactor = size.width / self.largeButtonSize
return
}
self.type = type
var regularImage: UIImage?
var highlightedImage: UIImage?
var filledImage: UIImage?
switch type { self.effectView.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
case .mute: self.contentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: .clear) self.overlayHighlightNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize))
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) if self.currentContent != content {
case .accept: self.currentContent = content
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0)
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0) switch content.appearance {
case .end: case .blurred:
regularImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton")) self.effectView.isHidden = false
highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton")) case .color:
case .speaker: self.effectView.isHidden = true
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: .clear) }
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill)
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) let contentImage = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in
case .bluetooth: context.clear(CGRect(origin: CGPoint(), size: size))
regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: .clear)
highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) var fillColor: UIColor = .clear
filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) var drawOverMask = false
case .switchCamera: context.setBlendMode(.normal)
let patternImage = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white) var imageScale: CGFloat = 1.0
regularImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: .clear) switch content.appearance {
highlightedImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: emptyHighlightedFill) case let .blurred(isFilled):
filledImage = generateEmptyButtonImage(icon: patternImage, strokeColor: nil, fillColor: invertedFill, knockout: true) if isFilled {
fillColor = .white
drawOverMask = true
context.setBlendMode(.copy)
}
let smallButtonSize: CGFloat = 60.0
imageScale = self.largeButtonSize / smallButtonSize
case let .color(color):
switch color {
case .red:
fillColor = UIColor(rgb: 0xd92326)
case .green:
fillColor = UIColor(rgb: 0x74db58)
}
}
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
var image: UIImage?
switch content.image {
case .camera:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallCameraButton"), color: .white)
case .mute:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallMuteButton"), color: .white)
case .flipCamera:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white)
case .bluetooth:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallBluetoothButton"), color: .white)
case .speaker:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSpeakerButton"), color: .white)
case .accept:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAcceptButton"), color: .white)
case .end:
image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallDeclineButton"), color: .white)
}
if let image = image {
context.translateBy(x: size.width / 2.0, y: size.height / 2.0)
context.scaleBy(x: imageScale, y: imageScale)
context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0)
let imageRect = CGRect(origin: CGPoint(x: floor((size.width - image.size.width) / 2.0), y: floor((size.height - image.size.height) / 2.0)), size: image.size)
if drawOverMask {
context.clip(to: imageRect, mask: image.cgImage!)
context.setBlendMode(.copy)
context.setFillColor(UIColor.clear.cgColor)
context.fill(CGRect(origin: CGPoint(), size: size))
} else {
context.draw(image.cgImage!, in: imageRect)
}
}
})
if transition.isAnimated, let contentImage = contentImage, let previousContent = self.contentNode.image {
self.contentNode.image = contentImage
self.contentNode.layer.animate(from: previousContent.cgImage!, to: contentImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2)
} else {
self.contentNode.image = contentImage
}
self.overlayHighlightNode.image = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
let fillColor: UIColor
context.setBlendMode(.normal)
switch content.appearance {
case let .blurred(isFilled):
if isFilled {
fillColor = UIColor(white: 0.0, alpha: 0.1)
} else {
fillColor = UIColor(white: 1.0, alpha: 0.2)
}
case let .color(color):
switch color {
case .red:
fillColor = UIColor(rgb: 0xd92326).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2)
case .green:
fillColor = UIColor(rgb: 0x74db58).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2)
}
}
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
})
} }
self.regularImage = regularImage transition.updatePosition(node: self.contentContainer, position: CGPoint(x: size.width / 2.0, y: size.height / 2.0))
self.highlightedImage = highlightedImage transition.updateSublayerTransformScale(node: self.contentContainer, scale: scaleFactor)
self.filledImage = filledImage
self.updateState(highlighted: self.isHighlighted, selected: self.isSelected) if self.currentText != text {
} self.textNode.attributedText = NSAttributedString(string: text, font: labelFont, textColor: .white)
func animateRollTransition() {
self.backgroundNode.layer.animate(from: 0.0 as NSNumber, to: (-CGFloat.pi * 5 / 4) as NSNumber, keyPath: "transform.rotation.z", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3, removeOnCompletion: false)
self.labelNode?.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false)
}
override func layout() {
super.layout()
let size = self.bounds.size
self.backgroundNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: size.width))
if let labelNode = self.labelNode {
let labelSize = labelNode.measure(CGSize(width: 200.0, height: 100.0))
labelNode.frame = CGRect(origin: CGPoint(x: floor((size.width - labelSize.width) / 2.0), y: 81.0), size: labelSize)
} }
let textSize = self.textNode.updateLayout(CGSize(width: 150.0, height: 100.0))
let textFrame = CGRect(origin: CGPoint(x: floor((size.width - textSize.width) / 2.0), y: size.height + 5.0), size: textSize)
if self.currentText.isEmpty {
self.textNode.frame = textFrame
if transition.isAnimated {
self.textNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
}
} else {
transition.updateFrameAdditiveToCenter(node: self.textNode, frame: textFrame)
}
self.currentText = text
} }
} }

View File

@@ -22,27 +22,65 @@ enum CallControllerButtonsMode: Equatable {
} }
case active(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState) case active(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
case incoming case incoming(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
case outgoingRinging(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState)
}
private enum ButtonDescription: Equatable {
enum Key: Hashable {
case accept
case end
case enableCamera
case switchCamera
case soundOutput
case mute
}
enum SoundOutput {
case speaker
case bluetooth
}
enum EndType {
case outgoing
case decline
case end
}
case accept
case end(EndType)
case enableCamera(Bool)
case switchCamera
case soundOutput(SoundOutput)
case mute(Bool)
var key: Key {
switch self {
case .accept:
return .accept
case .end:
return .end
case .enableCamera:
return .enableCamera
case .switchCamera:
return .switchCamera
case .soundOutput:
return .soundOutput
case .mute:
return .mute
}
}
} }
final class CallControllerButtonsNode: ASDisplayNode { final class CallControllerButtonsNode: ASDisplayNode {
private let acceptButton: CallControllerButtonNode private var buttonNodes: [ButtonDescription.Key: CallControllerButtonItemNode] = [:]
private let declineButton: CallControllerButtonNode
private let muteButton: CallControllerButtonNode
private let endButton: CallControllerButtonNode
private let speakerButton: CallControllerButtonNode
private let swichCameraButton: CallControllerButtonNode
private var mode: CallControllerButtonsMode? private var mode: CallControllerButtonsMode?
private var validLayout: CGFloat? private var validLayout: CGFloat?
var isMuted = false { var isMuted = false
didSet { var isCameraPaused = false
self.muteButton.isSelected = self.isMuted
}
}
var accept: (() -> Void)? var accept: (() -> Void)?
var mute: (() -> Void)? var mute: (() -> Void)?
@@ -52,57 +90,30 @@ final class CallControllerButtonsNode: ASDisplayNode {
var rotateCamera: (() -> Void)? var rotateCamera: (() -> Void)?
init(strings: PresentationStrings) { init(strings: PresentationStrings) {
self.acceptButton = CallControllerButtonNode(type: .accept, label: strings.Call_Accept)
self.acceptButton.alpha = 0.0
self.declineButton = CallControllerButtonNode(type: .end, label: strings.Call_Decline)
self.declineButton.alpha = 0.0
self.muteButton = CallControllerButtonNode(type: .mute, label: nil)
self.muteButton.alpha = 0.0
self.endButton = CallControllerButtonNode(type: .end, label: nil)
self.endButton.alpha = 0.0
self.speakerButton = CallControllerButtonNode(type: .speaker, label: nil)
self.speakerButton.alpha = 0.0
self.swichCameraButton = CallControllerButtonNode(type: .switchCamera, label: nil)
self.swichCameraButton.alpha = 0.0
super.init() super.init()
self.addSubnode(self.acceptButton)
self.addSubnode(self.declineButton)
self.addSubnode(self.muteButton)
self.addSubnode(self.endButton)
self.addSubnode(self.speakerButton)
self.addSubnode(self.swichCameraButton)
self.acceptButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.declineButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.muteButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.endButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.speakerButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
self.swichCameraButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
} }
func updateLayout(constrainedWidth: CGFloat, transition: ContainedViewLayoutTransition) { func updateLayout(strings: PresentationStrings, constrainedWidth: CGFloat, transition: ContainedViewLayoutTransition) {
let previousLayout = self.validLayout
self.validLayout = constrainedWidth self.validLayout = constrainedWidth
if let mode = self.mode, previousLayout != self.validLayout { if let mode = self.mode {
self.updateButtonsLayout(mode: mode, width: constrainedWidth, animated: false) self.updateButtonsLayout(strings: strings, mode: mode, width: constrainedWidth, animated: transition.isAnimated)
} }
} }
func updateMode(_ mode: CallControllerButtonsMode) { func updateMode(strings: PresentationStrings, mode: CallControllerButtonsMode) {
if self.mode != mode { if self.mode != mode {
let previousMode = self.mode let previousMode = self.mode
self.mode = mode self.mode = mode
if let validLayout = self.validLayout { if let validLayout = self.validLayout {
self.updateButtonsLayout(mode: mode, width: validLayout, animated: previousMode != nil) self.updateButtonsLayout(strings: strings, mode: mode, width: validLayout, animated: previousMode != nil)
} }
} }
} }
private func updateButtonsLayout(mode: CallControllerButtonsMode, width: CGFloat, animated: Bool) { private var appliedMode: CallControllerButtonsMode?
private func updateButtonsLayout(strings: PresentationStrings, mode: CallControllerButtonsMode, width: CGFloat, animated: Bool) {
let transition: ContainedViewLayoutTransition let transition: ContainedViewLayoutTransition
if animated { if animated {
transition = .animated(duration: 0.3, curve: .spring) transition = .animated(duration: 0.3, curve: .spring)
@@ -110,147 +121,273 @@ final class CallControllerButtonsNode: ASDisplayNode {
transition = .immediate transition = .immediate
} }
let threeButtonSpacing: CGFloat = 28.0 let previousMode = self.appliedMode
let twoButtonSpacing: CGFloat = 105.0 self.appliedMode = mode
let buttonSize = CGSize(width: 75.0, height: 75.0)
let threeButtonsWidth = 3.0 * buttonSize.width + 2.0 * threeButtonSpacing
let twoButtonsWidth = 2.0 * buttonSize.width + 1.0 * twoButtonSpacing
var origin = CGPoint(x: floor((width - threeButtonsWidth) / 2.0), y: 0.0) var animatePositionsWithDelay = false
if let previousMode = previousMode {
switch previousMode {
case .incoming, .outgoingRinging:
if case .active = mode {
animatePositionsWithDelay = true
}
default:
break
}
}
for button in [self.muteButton, self.endButton, self.speakerButton] { let minSmallButtonSideInset: CGFloat = 34.0
transition.updateFrame(node: button, frame: CGRect(origin: origin, size: buttonSize)) let maxSmallButtonSpacing: CGFloat = 34.0
if button === self.speakerButton { let smallButtonSize: CGFloat = 60.0
transition.updateFrame(node: self.swichCameraButton, frame: CGRect(origin: origin, size: buttonSize)) let topBottomSpacing: CGFloat = 84.0
let maxLargeButtonSpacing: CGFloat = 115.0
let largeButtonSize: CGFloat = 72.0
let minLargeButtonSideInset: CGFloat = minSmallButtonSideInset - 6.0
struct PlacedButton {
let button: ButtonDescription
let frame: CGRect
}
var buttons: [PlacedButton] = []
switch mode {
case .incoming(let speakerMode, let videoState), .outgoingRinging(let speakerMode, let videoState):
var topButtons: [ButtonDescription] = []
var bottomButtons: [ButtonDescription] = []
let soundOutput: ButtonDescription.SoundOutput
switch speakerMode {
case .none, .builtin, .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .bluetooth
case .bluetooth:
soundOutput = .bluetooth
} }
origin.x += buttonSize.width + threeButtonSpacing switch videoState {
case .active, .available:
topButtons.append(.enableCamera(!self.isCameraPaused))
topButtons.append(.mute(self.isMuted))
topButtons.append(.switchCamera)
case .notAvailable:
topButtons.append(.enableCamera(!self.isCameraPaused))
topButtons.append(.mute(self.isMuted))
topButtons.append(.soundOutput(soundOutput))
}
let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize
let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0
let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1))
let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing
var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0)
for button in topButtons {
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: 0.0), size: CGSize(width: smallButtonSize, height: smallButtonSize))))
topButtonsLeftOffset += smallButtonSize + topButtonsSpacing
}
if case .incoming = mode {
bottomButtons.append(.end(.decline))
bottomButtons.append(.accept)
} else {
bottomButtons.append(.end(.outgoing))
}
let bottomButtonsContentWidth = CGFloat(bottomButtons.count) * largeButtonSize
let bottomButtonsAvailableSpacingWidth = width - bottomButtonsContentWidth - minLargeButtonSideInset * 2.0
let bottomButtonsSpacing = min(maxLargeButtonSpacing, bottomButtonsAvailableSpacingWidth / CGFloat(bottomButtons.count - 1))
let bottomButtonsWidth = CGFloat(bottomButtons.count) * largeButtonSize + CGFloat(bottomButtons.count - 1) * bottomButtonsSpacing
var bottomButtonsLeftOffset = floor((width - bottomButtonsWidth) / 2.0)
for button in bottomButtons {
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: bottomButtonsLeftOffset, y: smallButtonSize + topBottomSpacing), size: CGSize(width: largeButtonSize, height: largeButtonSize))))
bottomButtonsLeftOffset += largeButtonSize + bottomButtonsSpacing
}
case let .active(speakerMode, videoState):
var topButtons: [ButtonDescription] = []
let soundOutput: ButtonDescription.SoundOutput
switch speakerMode {
case .none, .builtin, .speaker:
soundOutput = .speaker
case .headphones:
soundOutput = .bluetooth
case .bluetooth:
soundOutput = .bluetooth
}
switch videoState {
case .active, .available:
topButtons.append(.enableCamera(!self.isCameraPaused))
topButtons.append(.mute(isMuted))
topButtons.append(.switchCamera)
case .notAvailable:
topButtons.append(.enableCamera(!self.isCameraPaused))
topButtons.append(.mute(isMuted))
topButtons.append(.soundOutput(soundOutput))
}
topButtons.append(.end(.end))
let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize
let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0
let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1))
let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing
var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0)
for button in topButtons {
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: smallButtonSize + topBottomSpacing), size: CGSize(width: smallButtonSize, height: smallButtonSize))))
topButtonsLeftOffset += smallButtonSize + topButtonsSpacing
}
} }
origin = CGPoint(x: floor((width - twoButtonsWidth) / 2.0), y: 0.0) let delayIncrement = 0.015
for button in [self.declineButton, self.acceptButton] { var validKeys: [ButtonDescription.Key] = []
transition.updateFrame(node: button, frame: CGRect(origin: origin, size: buttonSize)) for button in buttons {
origin.x += buttonSize.width + twoButtonSpacing validKeys.append(button.button.key)
var buttonTransition = transition
var animateButtonIn = false
let buttonNode: CallControllerButtonItemNode
if let current = self.buttonNodes[button.button.key] {
buttonNode = current
} else {
buttonNode = CallControllerButtonItemNode()
self.buttonNodes[button.button.key] = buttonNode
self.addSubnode(buttonNode)
buttonNode.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside)
buttonTransition = .immediate
animateButtonIn = transition.isAnimated
}
let buttonContent: CallControllerButtonItemNode.Content
let buttonText: String
switch button.button {
case .accept:
buttonContent = CallControllerButtonItemNode.Content(
appearance: .color(.green),
image: .accept
)
buttonText = strings.Call_Accept
case let .end(type):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .color(.red),
image: .end
)
switch type {
case .outgoing:
buttonText = ""
case .decline:
buttonText = strings.Call_Decline
case .end:
buttonText = strings.Call_End
}
case let .enableCamera(isEnabled):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: isEnabled),
image: .camera
)
buttonText = strings.Call_Camera
case .switchCamera:
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: false),
image: .flipCamera
)
buttonText = strings.Call_Flip
case let .soundOutput(value):
let image: CallControllerButtonItemNode.Content.Image
switch value {
case .speaker:
image = .speaker
case .bluetooth:
image = .bluetooth
}
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: false),
image: image
)
buttonText = strings.Call_Speaker
case let .mute(isMuted):
buttonContent = CallControllerButtonItemNode.Content(
appearance: .blurred(isFilled: isMuted),
image: .mute
)
buttonText = strings.Call_Mute
}
var buttonDelay = 0.0
if animatePositionsWithDelay {
switch button.button.key {
case .enableCamera:
buttonDelay = 0.0
case .mute:
buttonDelay = delayIncrement * 1.0
case .switchCamera:
buttonDelay = delayIncrement * 2.0
case .end:
buttonDelay = delayIncrement * 3.0
default:
break
}
}
buttonTransition.updateFrame(node: buttonNode, frame: button.frame, delay: buttonDelay)
buttonNode.update(size: button.frame.size, content: buttonContent, text: buttonText, transition: buttonTransition)
if animateButtonIn {
buttonNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
} }
switch mode { var removedKeys: [ButtonDescription.Key] = []
case .incoming: for (key, button) in self.buttonNodes {
for button in [self.declineButton, self.acceptButton] { if !validKeys.contains(key) {
button.alpha = 1.0 removedKeys.append(key)
} if animated {
for button in [self.muteButton, self.endButton, self.speakerButton, self.swichCameraButton] { if case .accept = key {
button.alpha = 0.0 if let endButton = self.buttonNodes[.end] {
} transition.updateFrame(node: button, frame: endButton.frame)
case let .active(speakerMode, videoState): if let content = button.currentContent {
for button in [self.muteButton] { button.update(size: endButton.frame.size, content: content, text: button.currentText, transition: transition)
if animated && button.alpha.isZero { }
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) transition.updateTransformScale(node: button, scale: 0.1)
} transition.updateAlpha(node: button, alpha: 0.0, completion: { [weak button] _ in
button.alpha = 1.0 button?.removeFromSupernode()
}
switch videoState {
case .active, .available:
for button in [self.speakerButton] {
if animated && !button.alpha.isZero {
button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
}
button.alpha = 0.0
}
for button in [self.swichCameraButton] {
if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
button.alpha = 1.0
}
case .notAvailable:
for button in [self.swichCameraButton] {
if animated && !button.alpha.isZero {
button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
}
button.alpha = 0.0
}
for button in [self.speakerButton] {
if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
button.alpha = 1.0
}
}
var animatingAcceptButton = false
if self.endButton.alpha.isZero {
if animated {
if !self.acceptButton.alpha.isZero {
animatingAcceptButton = true
self.endButton.layer.animatePosition(from: self.acceptButton.position, to: self.endButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
self.acceptButton.animateRollTransition()
self.endButton.layer.animate(from: (CGFloat.pi * 5 / 4) as NSNumber, to: 0.0 as NSNumber, keyPath: "transform.rotation.z", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3)
self.acceptButton.layer.animatePosition(from: self.acceptButton.position, to: self.endButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { [weak self] _ in
if let strongSelf = self {
strongSelf.acceptButton.alpha = 0.0
strongSelf.acceptButton.layer.removeAnimation(forKey: "position")
strongSelf.acceptButton.layer.removeAnimation(forKey: "transform.rotation.z")
}
}) })
} }
self.endButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) } else {
transition.updateAlpha(node: button, alpha: 0.0, completion: { [weak button] _ in
button?.removeFromSupernode()
})
} }
self.endButton.alpha = 1.0 } else {
button.removeFromSupernode()
} }
}
if !self.declineButton.alpha.isZero { }
if animated { for key in removedKeys {
self.declineButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2) self.buttonNodes.removeValue(forKey: key)
}
self.declineButton.alpha = 0.0
}
if self.acceptButton.alpha.isZero && !animatingAcceptButton {
self.acceptButton.alpha = 0.0
}
self.speakerButton.isSelected = speakerMode == .speaker
self.speakerButton.isHidden = speakerMode == .none
let speakerButtonType: CallControllerButtonType
switch speakerMode {
case .none, .builtin, .speaker:
speakerButtonType = .speaker
case .headphones:
speakerButtonType = .bluetooth
case .bluetooth:
speakerButtonType = .bluetooth
}
self.speakerButton.updateType(speakerButtonType)
} }
} }
@objc func buttonPressed(_ button: CallControllerButtonNode) { @objc func buttonPressed(_ button: CallControllerButtonItemNode) {
if button === self.muteButton { for (key, listButton) in self.buttonNodes {
self.mute?() if button === listButton {
} else if button === self.endButton || button === self.declineButton { switch key {
self.end?() case .accept:
} else if button === self.speakerButton { self.accept?()
self.speaker?() case .end:
} else if button === self.acceptButton { self.end?()
self.accept?() case .enableCamera:
} else if button === self.swichCameraButton { self.toggleVideo?()
self.rotateCamera?() case .switchCamera:
self.rotateCamera?()
case .soundOutput:
self.speaker?()
case .mute:
self.mute?()
}
break
}
} }
} }
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
let buttons = [ for (_, button) in self.buttonNodes {
self.acceptButton,
self.declineButton,
self.muteButton,
self.endButton,
self.speakerButton,
self.swichCameraButton
]
for button in buttons {
if button.isHidden || button.alpha.isZero {
continue
}
if let result = button.view.hitTest(self.view.convert(point, to: button.view), with: event) { if let result = button.view.hitTest(self.view.convert(point, to: button.view), with: event) {
return result return result
} }

View File

@@ -56,34 +56,91 @@ private final class IncomingVideoNode: ASDisplayNode {
} }
private final class OutgoingVideoNode: ASDisplayNode { private final class OutgoingVideoNode: ASDisplayNode {
private let videoTransformContainer: ASDisplayNode
private let videoView: UIView private let videoView: UIView
private let switchCameraButton: HighlightableButtonNode private let buttonNode: HighlightTrackingButtonNode
private let switchCamera: () -> Void
init(videoView: UIView, switchCamera: @escaping () -> Void) { private var effectView: UIVisualEffectView?
private var isBlurred: Bool = false
private var isExpanded: Bool = false
var tapped: (() -> Void)?
init(videoView: UIView) {
self.videoTransformContainer = ASDisplayNode()
self.videoTransformContainer.clipsToBounds = true
self.videoView = videoView self.videoView = videoView
self.switchCameraButton = HighlightableButtonNode() self.videoView.layer.transform = CATransform3DMakeScale(-1.0, 1.0, 1.0)
self.switchCamera = switchCamera
self.buttonNode = HighlightTrackingButtonNode()
super.init() super.init()
self.view.addSubview(self.videoView) self.videoTransformContainer.view.addSubview(self.videoView)
self.addSubnode(self.switchCameraButton) self.addSubnode(self.videoTransformContainer)
self.switchCameraButton.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside) //self.addSubnode(self.buttonNode)
self.buttonNode.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside)
} }
@objc private func buttonPressed() { @objc func buttonPressed() {
self.switchCamera() self.tapped?()
} }
func updateLayout(size: CGSize, isExpanded: Bool, transition: ContainedViewLayoutTransition) { func updateLayout(size: CGSize, isExpanded: Bool, transition: ContainedViewLayoutTransition) {
transition.updateFrame(view: self.videoView, frame: CGRect(origin: CGPoint(), size: size)) let videoFrame = CGRect(origin: CGPoint(), size: size)
transition.updateCornerRadius(layer: self.videoView.layer, cornerRadius: isExpanded ? 0.0 : 16.0) self.buttonNode.frame = videoFrame
self.switchCameraButton.frame = CGRect(origin: CGPoint(), size: size) self.isExpanded = isExpanded
let previousVideoFrame = self.videoTransformContainer.frame
self.videoTransformContainer.frame = videoFrame
if transition.isAnimated && !videoFrame.height.isZero && !previousVideoFrame.height.isZero {
transition.animatePositionAdditive(node: self.videoTransformContainer, offset: CGPoint(x: previousVideoFrame.midX - videoFrame.midX, y: previousVideoFrame.midY - videoFrame.midY))
transition.animateTransformScale(node: self.videoTransformContainer, from: previousVideoFrame.height / videoFrame.height)
}
self.videoView.frame = videoFrame
transition.updateCornerRadius(layer: self.videoTransformContainer.layer, cornerRadius: isExpanded ? 0.0 : 16.0)
if let effectView = self.effectView {
transition.updateCornerRadius(layer: effectView.layer, cornerRadius: isExpanded ? 0.0 : 16.0)
}
}
func updateIsBlurred(isBlurred: Bool) {
if self.isBlurred == isBlurred {
return
}
self.isBlurred = isBlurred
if isBlurred {
if self.effectView == nil {
let effectView = UIVisualEffectView()
effectView.clipsToBounds = true
effectView.layer.cornerRadius = self.isExpanded ? 0.0 : 16.0
self.effectView = effectView
effectView.frame = self.videoView.frame
self.view.addSubview(effectView)
}
UIView.animate(withDuration: 0.3, animations: {
self.effectView?.effect = UIBlurEffect(style: .dark)
})
} else if let effectView = self.effectView {
UIView.animate(withDuration: 0.3, animations: {
effectView.effect = nil
})
}
} }
} }
final class CallControllerNode: ASDisplayNode { final class CallControllerNode: ASDisplayNode {
private enum VideoNodeCorner {
case topLeft
case topRight
case bottomLeft
case bottomRight
}
private let sharedContext: SharedAccountContext private let sharedContext: SharedAccountContext
private let account: Account private let account: Account
@@ -104,6 +161,8 @@ final class CallControllerNode: ASDisplayNode {
private var incomingVideoViewRequested: Bool = false private var incomingVideoViewRequested: Bool = false
private var outgoingVideoNode: OutgoingVideoNode? private var outgoingVideoNode: OutgoingVideoNode?
private var outgoingVideoViewRequested: Bool = false private var outgoingVideoViewRequested: Bool = false
private var outgoingVideoExplicitelyFullscreen: Bool = false
private var outgoingVideoNodeCorner: VideoNodeCorner = .bottomRight
private let backButtonArrowNode: ASImageNode private let backButtonArrowNode: ASImageNode
private let backButtonNode: HighlightableButtonNode private let backButtonNode: HighlightableButtonNode
private let statusNode: CallControllerStatusNode private let statusNode: CallControllerStatusNode
@@ -121,6 +180,9 @@ final class CallControllerNode: ASDisplayNode {
var isMuted: Bool = false { var isMuted: Bool = false {
didSet { didSet {
self.buttonsNode.isMuted = self.isMuted self.buttonsNode.isMuted = self.isMuted
if let (layout, navigationBarHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
} }
} }
@@ -134,12 +196,15 @@ final class CallControllerNode: ASDisplayNode {
var beginAudioOuputSelection: (() -> Void)? var beginAudioOuputSelection: (() -> Void)?
var acceptCall: (() -> Void)? var acceptCall: (() -> Void)?
var endCall: (() -> Void)? var endCall: (() -> Void)?
var toggleVideo: (() -> Void)? var setIsVideoPaused: ((Bool) -> Void)?
var back: (() -> Void)? var back: (() -> Void)?
var presentCallRating: ((CallId) -> Void)? var presentCallRating: ((CallId) -> Void)?
var callEnded: ((Bool) -> Void)? var callEnded: ((Bool) -> Void)?
var dismissedInteractively: (() -> Void)? var dismissedInteractively: (() -> Void)?
private var isUIHidden: Bool = false
private var isVideoPaused: Bool = false
init(sharedContext: SharedAccountContext, account: Account, presentationData: PresentationData, statusBar: StatusBar, debugInfo: Signal<(String, String), NoError>, shouldStayHiddenUntilConnection: Bool = false, easyDebugAccess: Bool, call: PresentationCall) { init(sharedContext: SharedAccountContext, account: Account, presentationData: PresentationData, statusBar: StatusBar, debugInfo: Signal<(String, String), NoError>, shouldStayHiddenUntilConnection: Bool = false, easyDebugAccess: Bool, call: PresentationCall) {
self.sharedContext = sharedContext self.sharedContext = sharedContext
self.account = account self.account = account
@@ -229,7 +294,17 @@ final class CallControllerNode: ASDisplayNode {
} }
self.buttonsNode.toggleVideo = { [weak self] in self.buttonsNode.toggleVideo = { [weak self] in
self?.toggleVideo?() guard let strongSelf = self else {
return
}
strongSelf.isVideoPaused = !strongSelf.isVideoPaused
strongSelf.outgoingVideoNode?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused)
strongSelf.buttonsNode.isCameraPaused = strongSelf.isVideoPaused
strongSelf.setIsVideoPaused?(strongSelf.isVideoPaused)
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
} }
self.buttonsNode.rotateCamera = { [weak self] in self.buttonsNode.rotateCamera = { [weak self] in
@@ -302,17 +377,21 @@ final class CallControllerNode: ASDisplayNode {
return return
} }
if let incomingVideoView = incomingVideoView { if let incomingVideoView = incomingVideoView {
strongSelf.setCurrentAudioOutput?(.speaker)
let incomingVideoNode = IncomingVideoNode(videoView: incomingVideoView) let incomingVideoNode = IncomingVideoNode(videoView: incomingVideoView)
strongSelf.incomingVideoNode = incomingVideoNode strongSelf.incomingVideoNode = incomingVideoNode
strongSelf.containerNode.insertSubnode(incomingVideoNode, aboveSubnode: strongSelf.dimNode) strongSelf.containerNode.insertSubnode(incomingVideoNode, aboveSubnode: strongSelf.dimNode)
strongSelf.statusNode.isHidden = true
if let (layout, navigationBarHeight) = strongSelf.validLayout { if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate) strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring))
} }
} }
}) })
} }
default:
break
}
switch callState.videoState {
case .active, .activeOutgoing:
if !self.outgoingVideoViewRequested { if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true self.outgoingVideoViewRequested = true
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
@@ -322,13 +401,15 @@ final class CallControllerNode: ASDisplayNode {
if let outgoingVideoView = outgoingVideoView { if let outgoingVideoView = outgoingVideoView {
outgoingVideoView.backgroundColor = .black outgoingVideoView.backgroundColor = .black
outgoingVideoView.clipsToBounds = true outgoingVideoView.clipsToBounds = true
strongSelf.setCurrentAudioOutput?(.speaker) if let audioOutputState = strongSelf.audioOutputState, let currentOutput = audioOutputState.currentOutput {
let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: { switch currentOutput {
guard let strongSelf = self else { case .speaker, .builtin:
return break
default:
strongSelf.setCurrentAudioOutput?(.speaker)
} }
strongSelf.call.switchVideoCamera() }
}) let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView)
strongSelf.outgoingVideoNode = outgoingVideoNode strongSelf.outgoingVideoNode = outgoingVideoNode
if let incomingVideoNode = strongSelf.incomingVideoNode { if let incomingVideoNode = strongSelf.incomingVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode) strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode)
@@ -336,38 +417,17 @@ final class CallControllerNode: ASDisplayNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode) strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
} }
if let (layout, navigationBarHeight) = strongSelf.validLayout { if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate) strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring))
} }
} /*outgoingVideoNode.tapped = {
})
}
case .activeOutgoing:
if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
guard let strongSelf = self else {
return
}
if let outgoingVideoView = outgoingVideoView {
outgoingVideoView.backgroundColor = .black
outgoingVideoView.clipsToBounds = true
outgoingVideoView.layer.cornerRadius = 16.0
strongSelf.setCurrentAudioOutput?(.speaker)
let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: {
guard let strongSelf = self else { guard let strongSelf = self else {
return return
} }
strongSelf.call.switchVideoCamera() strongSelf.outgoingVideoExplicitelyFullscreen = !strongSelf.outgoingVideoExplicitelyFullscreen
}) if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.outgoingVideoNode = outgoingVideoNode strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring))
if let incomingVideoNode = strongSelf.incomingVideoNode { }
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode) }*/
} else {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
}
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
} }
}) })
} }
@@ -438,7 +498,7 @@ final class CallControllerNode: ASDisplayNode {
if isReconnecting { if isReconnecting {
return strings.Call_StatusConnecting return strings.Call_StatusConnecting
} else { } else {
return strings.Call_StatusOngoing(value).0 return value
} }
}, timestamp) }, timestamp)
if self.keyTextData?.0 != keyVisualHash { if self.keyTextData?.0 != keyVisualHash {
@@ -501,43 +561,60 @@ final class CallControllerNode: ASDisplayNode {
} }
} }
private var buttonsTerminationMode: CallControllerButtonsMode?
private func updateButtonsMode() { private func updateButtonsMode() {
guard let callState = self.callState else { guard let callState = self.callState else {
return return
} }
var mode: CallControllerButtonsSpeakerMode = .none
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
switch currentOutput {
case .builtin:
mode = .builtin
case .speaker:
mode = .speaker
case .headphones:
mode = .headphones
case .port:
mode = .bluetooth
}
if availableOutputs.count <= 1 {
mode = .none
}
}
let mappedVideoState: CallControllerButtonsMode.VideoState
switch callState.videoState {
case .notAvailable:
mappedVideoState = .notAvailable
case .available:
mappedVideoState = .available(true)
case .active:
mappedVideoState = .active
case .activeOutgoing:
mappedVideoState = .active
}
switch callState.state { switch callState.state {
case .ringing: case .ringing:
self.buttonsNode.updateMode(.incoming) let buttonsMode: CallControllerButtonsMode = .incoming(speakerMode: mode, videoState: mappedVideoState)
default: self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode)
var mode: CallControllerButtonsSpeakerMode = .none self.buttonsTerminationMode = buttonsMode
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput { case .waiting, .requesting:
switch currentOutput { let buttonsMode: CallControllerButtonsMode = .outgoingRinging(speakerMode: mode, videoState: mappedVideoState)
case .builtin: self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode)
mode = .builtin self.buttonsTerminationMode = buttonsMode
case .speaker: case .active, .connecting, .reconnecting:
mode = .speaker let buttonsMode: CallControllerButtonsMode = .active(speakerMode: mode, videoState: mappedVideoState)
case .headphones: self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode)
mode = .headphones self.buttonsTerminationMode = buttonsMode
case .port: case .terminating, .terminated:
mode = .bluetooth if let buttonsTerminationMode = self.buttonsTerminationMode {
} self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsTerminationMode)
if availableOutputs.count <= 1 { } else {
mode = .none self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: .active(speakerMode: mode, videoState: mappedVideoState))
} }
}
let mappedVideoState: CallControllerButtonsMode.VideoState
switch callState.videoState {
case .notAvailable:
mappedVideoState = .notAvailable
case .available:
mappedVideoState = .available(true)
case .active:
mappedVideoState = .active
case .activeOutgoing:
mappedVideoState = .active
}
self.buttonsNode.updateMode(.active(speakerMode: mode, videoState: mappedVideoState))
} }
} }
@@ -568,9 +645,69 @@ final class CallControllerNode: ASDisplayNode {
} }
} }
private func calculatePreviewVideoRect(layout: ContainerViewLayout, navigationHeight: CGFloat) -> CGRect {
let buttonsHeight: CGFloat = 190.0
let buttonsOffset: CGFloat
if layout.size.width.isEqual(to: 320.0) {
if layout.size.height.isEqual(to: 480.0) {
buttonsOffset = 60.0
} else {
buttonsOffset = 73.0
}
} else {
buttonsOffset = 83.0
}
let buttonsOriginY: CGFloat
if self.isUIHidden {
buttonsOriginY = layout.size.height + 40.0 - 80.0
} else {
buttonsOriginY = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom
}
let previewVideoSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0))
let previewVideoY: CGFloat
let previewVideoX: CGFloat
switch self.outgoingVideoNodeCorner {
case .topLeft:
previewVideoX = 20.0
if self.isUIHidden {
previewVideoY = layout.insets(options: .statusBar).top + 8.0
} else {
previewVideoY = layout.insets(options: .statusBar).top + 44.0 + 8.0
}
case .topRight:
previewVideoX = layout.size.width - previewVideoSize.width - 20.0
if self.isUIHidden {
previewVideoY = layout.insets(options: .statusBar).top + 8.0
} else {
previewVideoY = layout.insets(options: .statusBar).top + 44.0 + 8.0
}
case .bottomLeft:
previewVideoX = 20.0
if self.isUIHidden {
previewVideoY = layout.size.height - layout.intrinsicInsets.bottom - 8.0 - previewVideoSize.height
} else {
previewVideoY = buttonsOriginY + 100.0 - previewVideoSize.height
}
case .bottomRight:
previewVideoX = layout.size.width - previewVideoSize.width - 20.0
if self.isUIHidden {
previewVideoY = layout.size.height - layout.intrinsicInsets.bottom - 8.0 - previewVideoSize.height
} else {
previewVideoY = buttonsOriginY + 100.0 - previewVideoSize.height
}
}
return CGRect(origin: CGPoint(x: previewVideoX, y: previewVideoY), size: previewVideoSize)
}
func containerLayoutUpdated(_ layout: ContainerViewLayout, navigationBarHeight: CGFloat, transition: ContainedViewLayoutTransition) { func containerLayoutUpdated(_ layout: ContainerViewLayout, navigationBarHeight: CGFloat, transition: ContainedViewLayoutTransition) {
self.validLayout = (layout, navigationBarHeight) self.validLayout = (layout, navigationBarHeight)
let overlayAlpha: CGFloat = self.isUIHidden ? 0.0 : 1.0
transition.updateFrame(node: self.containerNode, frame: CGRect(origin: CGPoint(), size: layout.size)) transition.updateFrame(node: self.containerNode, frame: CGRect(origin: CGPoint(), size: layout.size))
transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size)) transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size))
@@ -592,6 +729,9 @@ final class CallControllerNode: ASDisplayNode {
} }
transition.updateFrame(node: self.backButtonNode, frame: CGRect(origin: CGPoint(x: 29.0, y: navigationOffset + 11.0), size: backSize)) transition.updateFrame(node: self.backButtonNode, frame: CGRect(origin: CGPoint(x: 29.0, y: navigationOffset + 11.0), size: backSize))
transition.updateAlpha(node: self.backButtonArrowNode, alpha: overlayAlpha)
transition.updateAlpha(node: self.backButtonNode, alpha: overlayAlpha)
var statusOffset: CGFloat var statusOffset: CGFloat
if layout.metrics.widthClass == .regular && layout.metrics.heightClass == .regular { if layout.metrics.widthClass == .regular && layout.metrics.heightClass == .regular {
if layout.size.height.isEqual(to: 1366.0) { if layout.size.height.isEqual(to: 1366.0) {
@@ -611,7 +751,7 @@ final class CallControllerNode: ASDisplayNode {
statusOffset += layout.safeInsets.top statusOffset += layout.safeInsets.top
let buttonsHeight: CGFloat = 75.0 let buttonsHeight: CGFloat = 190.0
let buttonsOffset: CGFloat let buttonsOffset: CGFloat
if layout.size.width.isEqual(to: 320.0) { if layout.size.width.isEqual(to: 320.0) {
if layout.size.height.isEqual(to: 480.0) { if layout.size.height.isEqual(to: 480.0) {
@@ -625,36 +765,60 @@ final class CallControllerNode: ASDisplayNode {
let statusHeight = self.statusNode.updateLayout(constrainedWidth: layout.size.width, transition: transition) let statusHeight = self.statusNode.updateLayout(constrainedWidth: layout.size.width, transition: transition)
transition.updateFrame(node: self.statusNode, frame: CGRect(origin: CGPoint(x: 0.0, y: statusOffset), size: CGSize(width: layout.size.width, height: statusHeight))) transition.updateFrame(node: self.statusNode, frame: CGRect(origin: CGPoint(x: 0.0, y: statusOffset), size: CGSize(width: layout.size.width, height: statusHeight)))
transition.updateAlpha(node: self.statusNode, alpha: overlayAlpha)
let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0)) let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0))
transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize)) transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize))
self.buttonsNode.updateLayout(constrainedWidth: layout.size.width, transition: transition) self.buttonsNode.updateLayout(strings: self.presentationData.strings, constrainedWidth: layout.size.width, transition: transition)
let buttonsOriginY: CGFloat = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom let buttonsOriginY: CGFloat
if self.isUIHidden {
buttonsOriginY = layout.size.height + 40.0 - 80.0
} else {
buttonsOriginY = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom
}
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight))) transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight)))
transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha)
let fullscreenVideoFrame = CGRect(origin: CGPoint(), size: layout.size)
let previewVideoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationBarHeight)
var outgoingVideoTransition = transition
if let incomingVideoNode = self.incomingVideoNode { if let incomingVideoNode = self.incomingVideoNode {
if incomingVideoNode.frame.width.isZero, let outgoingVideoNode = self.outgoingVideoNode, !outgoingVideoNode.frame.width.isZero, !transition.isAnimated { var incomingVideoTransition = transition
outgoingVideoTransition = .animated(duration: 0.3, curve: .easeInOut) if incomingVideoNode.frame.isEmpty {
incomingVideoTransition = .immediate
} }
incomingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size) if self.outgoingVideoExplicitelyFullscreen {
incomingVideoNode.updateLayout(size: layout.size) incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: previewVideoFrame)
} else {
incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: fullscreenVideoFrame)
}
incomingVideoNode.updateLayout(size: incomingVideoNode.frame.size)
} }
if let outgoingVideoNode = self.outgoingVideoNode { if let outgoingVideoNode = self.outgoingVideoNode {
var outgoingVideoTransition = transition
if outgoingVideoNode.frame.isEmpty {
outgoingVideoTransition = .immediate
}
if self.incomingVideoNode == nil { if self.incomingVideoNode == nil {
outgoingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size) outgoingVideoNode.frame = fullscreenVideoFrame
outgoingVideoNode.updateLayout(size: layout.size, isExpanded: true, transition: transition) outgoingVideoNode.updateLayout(size: layout.size, isExpanded: true, transition: outgoingVideoTransition)
} else { } else {
let outgoingSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0)) if self.minimizedVideoDraggingPosition == nil {
let outgoingFrame = CGRect(origin: CGPoint(x: layout.size.width - 16.0 - outgoingSize.width, y: buttonsOriginY - 32.0 - outgoingSize.height), size: outgoingSize) if self.outgoingVideoExplicitelyFullscreen {
outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: outgoingFrame) outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: fullscreenVideoFrame)
outgoingVideoNode.updateLayout(size: outgoingFrame.size, isExpanded: false, transition: outgoingVideoTransition) } else {
outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: previewVideoFrame)
}
outgoingVideoNode.updateLayout(size: outgoingVideoNode.frame.size, isExpanded: self.outgoingVideoExplicitelyFullscreen, transition: outgoingVideoTransition)
}
} }
} }
let keyTextSize = self.keyButtonNode.frame.size let keyTextSize = self.keyButtonNode.frame.size
transition.updateFrame(node: self.keyButtonNode, frame: CGRect(origin: CGPoint(x: layout.size.width - keyTextSize.width - 8.0, y: navigationOffset + 8.0), size: keyTextSize)) transition.updateFrame(node: self.keyButtonNode, frame: CGRect(origin: CGPoint(x: layout.size.width - keyTextSize.width - 8.0, y: navigationOffset + 8.0), size: keyTextSize))
transition.updateAlpha(node: self.keyButtonNode, alpha: overlayAlpha)
if let debugNode = self.debugNode { if let debugNode = self.debugNode {
transition.updateFrame(node: debugNode, frame: CGRect(origin: CGPoint(), size: layout.size)) transition.updateFrame(node: debugNode, frame: CGRect(origin: CGPoint(), size: layout.size))
@@ -700,26 +864,33 @@ final class CallControllerNode: ASDisplayNode {
if let _ = self.keyPreviewNode { if let _ = self.keyPreviewNode {
self.backPressed() self.backPressed()
} else { } else {
let point = recognizer.location(in: recognizer.view) if self.incomingVideoNode != nil || self.outgoingVideoNode != nil {
if self.statusNode.frame.contains(point) { self.isUIHidden = !self.isUIHidden
if self.easyDebugAccess { if let (layout, navigationBarHeight) = self.validLayout {
self.presentDebugNode() self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
} else { }
let timestamp = CACurrentMediaTime() } else {
if self.debugTapCounter.0 < timestamp - 0.75 { let point = recognizer.location(in: recognizer.view)
self.debugTapCounter.0 = timestamp if self.statusNode.frame.contains(point) {
self.debugTapCounter.1 = 0 if self.easyDebugAccess {
}
if self.debugTapCounter.0 >= timestamp - 0.75 {
self.debugTapCounter.0 = timestamp
self.debugTapCounter.1 += 1
}
if self.debugTapCounter.1 >= 10 {
self.debugTapCounter.1 = 0
self.presentDebugNode() self.presentDebugNode()
} else {
let timestamp = CACurrentMediaTime()
if self.debugTapCounter.0 < timestamp - 0.75 {
self.debugTapCounter.0 = timestamp
self.debugTapCounter.1 = 0
}
if self.debugTapCounter.0 >= timestamp - 0.75 {
self.debugTapCounter.0 = timestamp
self.debugTapCounter.1 += 1
}
if self.debugTapCounter.1 >= 10 {
self.debugTapCounter.1 = 0
self.presentDebugNode()
}
} }
} }
} }
@@ -749,36 +920,170 @@ final class CallControllerNode: ASDisplayNode {
} }
} }
@objc func panGesture(_ recognizer: UIPanGestureRecognizer) { private var minimizedVideoInitialPosition: CGPoint?
switch recognizer.state { private var minimizedVideoDraggingPosition: CGPoint?
case .changed:
let offset = recognizer.translation(in: self.view).y private func nodeLocationForPosition(layout: ContainerViewLayout, position: CGPoint, velocity: CGPoint) -> VideoNodeCorner {
var bounds = self.bounds let layoutInsets = UIEdgeInsets()
bounds.origin.y = -offset var result = CGPoint()
self.bounds = bounds if position.x < layout.size.width / 2.0 {
case .ended: result.x = 0.0
let velocity = recognizer.velocity(in: self.view).y } else {
if abs(velocity) < 100.0 { result.x = 1.0
var bounds = self.bounds }
let previous = bounds if position.y < layoutInsets.top + (layout.size.height - layoutInsets.bottom - layoutInsets.top) / 2.0 {
bounds.origin = CGPoint() result.y = 0.0
self.bounds = bounds } else {
self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) result.y = 1.0
}
let currentPosition = result
let angleEpsilon: CGFloat = 30.0
var shouldHide = false
if (velocity.x * velocity.x + velocity.y * velocity.y) >= 500.0 * 500.0 {
let x = velocity.x
let y = velocity.y
var angle = atan2(y, x) * 180.0 / CGFloat.pi * -1.0
if angle < 0.0 {
angle += 360.0
}
if currentPosition.x.isZero && currentPosition.y.isZero {
if ((angle > 0 && angle < 90 - angleEpsilon) || angle > 360 - angleEpsilon) {
result.x = 1.0
result.y = 0.0
} else if (angle > 180 + angleEpsilon && angle < 270 + angleEpsilon) {
result.x = 0.0
result.y = 1.0
} else if (angle > 270 + angleEpsilon && angle < 360 - angleEpsilon) {
result.x = 1.0
result.y = 1.0
} else { } else {
var bounds = self.bounds shouldHide = true
let previous = bounds }
bounds.origin = CGPoint(x: 0.0, y: velocity > 0.0 ? -bounds.height: bounds.height) } else if !currentPosition.x.isZero && currentPosition.y.isZero {
self.bounds = bounds if (angle > 90 + angleEpsilon && angle < 180 + angleEpsilon) {
self.layer.animateBounds(from: previous, to: bounds, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, completion: { [weak self] _ in result.x = 0.0
self?.dismissedInteractively?() result.y = 0.0
}) }
else if (angle > 270 - angleEpsilon && angle < 360 - angleEpsilon) {
result.x = 1.0
result.y = 1.0
}
else if (angle > 180 + angleEpsilon && angle < 270 - angleEpsilon) {
result.x = 0.0
result.y = 1.0
}
else {
shouldHide = true
}
} else if currentPosition.x.isZero && !currentPosition.y.isZero {
if (angle > 90 - angleEpsilon && angle < 180 - angleEpsilon) {
result.x = 0.0
result.y = 0.0
}
else if (angle < angleEpsilon || angle > 270 + angleEpsilon) {
result.x = 1.0
result.y = 1.0
}
else if (angle > angleEpsilon && angle < 90 - angleEpsilon) {
result.x = 1.0
result.y = 0.0
}
else if (!shouldHide) {
shouldHide = true
}
} else if !currentPosition.x.isZero && !currentPosition.y.isZero {
if (angle > angleEpsilon && angle < 90 + angleEpsilon) {
result.x = 1.0
result.y = 0.0
}
else if (angle > 180 - angleEpsilon && angle < 270 - angleEpsilon) {
result.x = 0.0
result.y = 1.0
}
else if (angle > 90 + angleEpsilon && angle < 180 - angleEpsilon) {
result.x = 0.0
result.y = 0.0
}
else if (!shouldHide) {
shouldHide = true
}
}
}
if result.x.isZero {
if result.y.isZero {
return .topLeft
} else {
return .bottomLeft
}
} else {
if result.y.isZero {
return .topRight
} else {
return .bottomRight
}
}
}
@objc private func panGesture(_ recognizer: UIPanGestureRecognizer) {
switch recognizer.state {
case .began:
let location = recognizer.location(in: self.view)
//let translation = recognizer.translation(in: self.view)
//location.x += translation.x
//location.y += translation.y
if let _ = self.incomingVideoNode, let outgoingVideoNode = self.outgoingVideoNode, outgoingVideoNode.frame.contains(location) {
self.minimizedVideoInitialPosition = outgoingVideoNode.position
} else {
self.minimizedVideoInitialPosition = nil
}
case .changed:
if let outgoingVideoNode = self.outgoingVideoNode, let minimizedVideoInitialPosition = self.minimizedVideoInitialPosition {
let translation = recognizer.translation(in: self.view)
let minimizedVideoDraggingPosition = CGPoint(x: minimizedVideoInitialPosition.x + translation.x, y: minimizedVideoInitialPosition.y + translation.y)
self.minimizedVideoDraggingPosition = minimizedVideoDraggingPosition
outgoingVideoNode.position = minimizedVideoDraggingPosition
} else {
let offset = recognizer.translation(in: self.view).y
var bounds = self.bounds
bounds.origin.y = -offset
self.bounds = bounds
}
case .cancelled, .ended:
if let outgoingVideoNode = self.outgoingVideoNode, let _ = self.minimizedVideoInitialPosition, let minimizedVideoDraggingPosition = self.minimizedVideoDraggingPosition {
self.minimizedVideoInitialPosition = nil
self.minimizedVideoDraggingPosition = nil
if let (layout, navigationHeight) = self.validLayout {
self.outgoingVideoNodeCorner = self.nodeLocationForPosition(layout: layout, position: minimizedVideoDraggingPosition, velocity: recognizer.velocity(in: self.view))
let videoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationHeight)
outgoingVideoNode.frame = videoFrame
outgoingVideoNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: minimizedVideoDraggingPosition.x - videoFrame.midX, y: minimizedVideoDraggingPosition.y - videoFrame.midY)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: 0.5, delay: 0.0, initialVelocity: 0.0, damping: 110.0, removeOnCompletion: true, additive: true, completion: nil)
}
} else {
let velocity = recognizer.velocity(in: self.view).y
if abs(velocity) < 100.0 {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
} else {
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint(x: 0.0, y: velocity > 0.0 ? -bounds.height: bounds.height)
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, completion: { [weak self] _ in
self?.dismissedInteractively?()
})
}
} }
case .cancelled:
var bounds = self.bounds
let previous = bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring)
default: default:
break break
} }

View File

@@ -190,7 +190,7 @@ public final class PresentationCallImpl: PresentationCall {
private var sessionStateDisposable: Disposable? private var sessionStateDisposable: Disposable?
private let statePromise = ValuePromise<PresentationCallState>(PresentationCallState(state: .waiting, videoState: .notAvailable, remoteVideoState: .inactive), ignoreRepeated: true) private let statePromise = ValuePromise<PresentationCallState>()
public var state: Signal<PresentationCallState, NoError> { public var state: Signal<PresentationCallState, NoError> {
return self.statePromise.get() return self.statePromise.get()
} }
@@ -264,7 +264,9 @@ public final class PresentationCallImpl: PresentationCall {
self.isVideo = startWithVideo self.isVideo = startWithVideo
if self.isVideo { if self.isVideo {
self.videoCapturer = OngoingCallVideoCapturer() self.videoCapturer = OngoingCallVideoCapturer()
self.statePromise.set(PresentationCallState(state: .waiting, videoState: .activeOutgoing, remoteVideoState: .inactive)) self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .activeOutgoing, remoteVideoState: .inactive))
} else {
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .notAvailable, remoteVideoState: .inactive))
} }
self.serializedData = serializedData self.serializedData = serializedData
@@ -457,7 +459,7 @@ public final class PresentationCallImpl: PresentationCall {
switch sessionState.state { switch sessionState.state {
case .ringing: case .ringing:
presentationState = PresentationCallState(state: .ringing, videoState: .notAvailable, remoteVideoState: .inactive) presentationState = PresentationCallState(state: .ringing, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState)
if previous == nil || previousControl == nil { if previous == nil || previousControl == nil {
if !self.reportedIncomingCall { if !self.reportedIncomingCall {
self.reportedIncomingCall = true self.reportedIncomingCall = true
@@ -520,7 +522,7 @@ public final class PresentationCallImpl: PresentationCall {
presentationState = PresentationCallState(state: .reconnecting(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState) presentationState = PresentationCallState(state: .reconnecting(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState)
} }
} else { } else {
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: .notAvailable, remoteVideoState: .inactive) presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState)
} }
} }
@@ -536,6 +538,7 @@ public final class PresentationCallImpl: PresentationCall {
let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, auxiliaryServers: auxiliaryServers, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName) let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, auxiliaryServers: auxiliaryServers, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName)
self.ongoingContext = ongoingContext self.ongoingContext = ongoingContext
ongoingContext.setIsMuted(self.isMutedValue)
self.debugInfoValue.set(ongoingContext.debugInfo()) self.debugInfoValue.set(ongoingContext.debugInfo())
@@ -729,6 +732,10 @@ public final class PresentationCallImpl: PresentationCall {
self.ongoingContext?.setEnableVideo(value) self.ongoingContext?.setEnableVideo(value)
} }
public func setOutgoingVideoIsPaused(_ isPaused: Bool) {
self.videoCapturer?.setIsVideoEnabled(!isPaused)
}
public func setCurrentAudioOutput(_ output: AudioSessionOutput) { public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
guard self.currentAudioOutputValue != output else { guard self.currentAudioOutputValue != output else {
return return

View File

@@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_accept.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_video.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_decline.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

@@ -1,22 +1,12 @@
{ {
"images" : [ "images" : [
{ {
"idiom" : "universal", "filename" : "ic_calls_mute.pdf",
"scale" : "1x" "idiom" : "universal"
},
{
"idiom" : "universal",
"filename" : "CallMuteIcon@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallMuteIcon@3x.png",
"scale" : "3x"
} }
], ],
"info" : { "info" : {
"version" : 1, "author" : "xcode",
"author" : "xcode" "version" : 1
} }
} }

Binary file not shown.

Before

Width:  |  Height:  |  Size: 545 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 844 B

View File

@@ -1,22 +0,0 @@
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "CallPhoneIcon@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallPhoneIcon@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 655 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -1,22 +1,12 @@
{ {
"images" : [ "images" : [
{ {
"idiom" : "universal", "filename" : "ic_calls_speaker.pdf",
"scale" : "1x" "idiom" : "universal"
},
{
"idiom" : "universal",
"filename" : "CallRouteSpeaker@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallRouteSpeaker@3x.png",
"scale" : "3x"
} }
], ],
"info" : { "info" : {
"version" : 1, "author" : "xcode",
"author" : "xcode" "version" : 1
} }
} }

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.9 KiB

View File

@@ -1,22 +1,12 @@
{ {
"images" : [ "images" : [
{ {
"idiom" : "universal", "filename" : "ic_calls_speaker.pdf",
"scale" : "1x" "idiom" : "universal"
},
{
"idiom" : "universal",
"filename" : "CallSpeakerIcon@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "CallSpeakerIcon@3x.png",
"scale" : "3x"
} }
], ],
"info" : { "info" : {
"version" : 1, "author" : "xcode",
"author" : "xcode" "version" : 1
} }
} }

View File

@@ -1,7 +1,7 @@
{ {
"images" : [ "images" : [
{ {
"filename" : "Video.pdf", "filename" : "ic_calls_cameraflip.pdf",
"idiom" : "universal" "idiom" : "universal"
} }
], ],

View File

@@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "ic_calls_tlogo.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -305,6 +305,10 @@ public final class OngoingCallVideoCapturer {
public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) { public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) {
self.impl.makeOutgoingVideoView(completion) self.impl.makeOutgoingVideoView(completion)
} }
public func setIsVideoEnabled(_ value: Bool) {
self.impl.setIsVideoEnabled(value)
}
} }
extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProtocol { extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProtocol {

View File

@@ -13,6 +13,8 @@ namespace TGVOIP_NAMESPACE {
class VideoCapturerInterface { class VideoCapturerInterface {
public: public:
virtual ~VideoCapturerInterface(); virtual ~VideoCapturerInterface();
virtual void setIsEnabled(bool isEnabled) = 0;
}; };
void configurePlatformAudio(); void configurePlatformAudio();

View File

@@ -112,6 +112,10 @@
[_videoCapturer stopCapture]; [_videoCapturer stopCapture];
} }
- (void)setIsEnabled:(bool)isEnabled {
[_videoCapturer setIsEnabled:isEnabled];
}
@end @end
@interface VideoCapturerInterfaceImplHolder : NSObject @interface VideoCapturerInterfaceImplHolder : NSObject
@@ -153,6 +157,16 @@ public:
}); });
} }
virtual void setIsEnabled(bool isEnabled) {
VideoCapturerInterfaceImplHolder *implReference = _implReference;
dispatch_async(dispatch_get_main_queue(), ^{
if (implReference.reference != nil) {
VideoCapturerInterfaceImplReference *reference = (__bridge VideoCapturerInterfaceImplReference *)implReference.reference;
[reference setIsEnabled:isEnabled];
}
});
}
private: private:
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source; rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
VideoCapturerInterfaceImplHolder *_implReference; VideoCapturerInterfaceImplHolder *_implReference;

View File

@@ -138,6 +138,7 @@ public:
virtual ~TgVoipVideoCaptureInterface(); virtual ~TgVoipVideoCaptureInterface();
virtual void switchCamera() = 0; virtual void switchCamera() = 0;
virtual void setIsVideoEnabled(bool isVideoEnabled) = 0;
virtual void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0; virtual void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
}; };

View File

@@ -17,6 +17,7 @@
- (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps; - (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps;
- (void)stopCapture; - (void)stopCapture;
- (void)setIsEnabled:(bool)isEnabled;
@end @end

View File

@@ -39,6 +39,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
UIDeviceOrientation _orientation; UIDeviceOrientation _orientation;
void (^_isActiveUpdated)(bool); void (^_isActiveUpdated)(bool);
bool _isActiveValue;
bool _inForegroundValue;
bool _isPaused;
} }
@end @end
@@ -49,6 +52,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
self = [super init]; self = [super init];
if (self != nil) { if (self != nil) {
_source = source; _source = source;
_isActiveValue = true;
_inForegroundValue = true;
_isPaused = false;
_isActiveUpdated = [isActiveUpdated copy]; _isActiveUpdated = [isActiveUpdated copy];
if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) { if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) {
@@ -124,6 +130,11 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
[self stopCaptureWithCompletionHandler:nil]; [self stopCaptureWithCompletionHandler:nil];
} }
- (void)setIsEnabled:(bool)isEnabled {
_isPaused = !isEnabled;
[self updateIsActiveValue];
}
- (void)startCaptureWithDevice:(AVCaptureDevice *)device - (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps fps:(NSInteger)fps
@@ -253,7 +264,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:_rotation rotation:_rotation
timeStampNs:timeStampNs]; timeStampNs:timeStampNs];
getObjCVideoSource(_source)->OnCapturedFrame(videoFrame); if (!_isPaused) {
getObjCVideoSource(_source)->OnCapturedFrame(videoFrame);
}
} }
- (void)captureOutput:(AVCaptureOutput *)captureOutput - (void)captureOutput:(AVCaptureOutput *)captureOutput
@@ -316,15 +329,23 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr
_hasRetriedOnFatalError = NO; _hasRetriedOnFatalError = NO;
}]; }];
if (_isActiveUpdated) { _inForegroundValue = true;
_isActiveUpdated(true); [self updateIsActiveValue];
}
} }
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
RTCLog(@"Capture session stopped."); RTCLog(@"Capture session stopped.");
if (_isActiveUpdated) { _inForegroundValue = false;
_isActiveUpdated(false); [self updateIsActiveValue];
}
- (void)updateIsActiveValue {
bool isActive = _inForegroundValue && !_isPaused;
if (isActive != _isActiveValue) {
_isActiveValue = isActive;
if (_isActiveUpdated) {
_isActiveUpdated(_isActiveValue);
}
} }
} }

View File

@@ -18,6 +18,7 @@ public:
~TgVoipVideoCaptureInterfaceObject(); ~TgVoipVideoCaptureInterfaceObject();
void switchCamera(); void switchCamera();
void setIsVideoEnabled(bool isVideoEnabled);
void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink); void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
void setIsActiveUpdated(std::function<void (bool)> isActiveUpdated); void setIsActiveUpdated(std::function<void (bool)> isActiveUpdated);
@@ -29,6 +30,7 @@ private:
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentSink; std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentSink;
std::function<void (bool)> _isActiveUpdated; std::function<void (bool)> _isActiveUpdated;
bool _useFrontCamera; bool _useFrontCamera;
bool _isVideoEnabled;
}; };
class TgVoipVideoCaptureInterfaceImpl : public TgVoipVideoCaptureInterface { class TgVoipVideoCaptureInterfaceImpl : public TgVoipVideoCaptureInterface {
@@ -37,6 +39,7 @@ public:
virtual ~TgVoipVideoCaptureInterfaceImpl(); virtual ~TgVoipVideoCaptureInterfaceImpl();
virtual void switchCamera(); virtual void switchCamera();
virtual void setIsVideoEnabled(bool isVideoEnabled);
virtual void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink); virtual void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
public: public:

View File

@@ -10,6 +10,7 @@ namespace TGVOIP_NAMESPACE {
TgVoipVideoCaptureInterfaceObject::TgVoipVideoCaptureInterfaceObject() { TgVoipVideoCaptureInterfaceObject::TgVoipVideoCaptureInterfaceObject() {
_useFrontCamera = true; _useFrontCamera = true;
_isVideoEnabled = true;
_videoSource = makeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread()); _videoSource = makeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread());
//this should outlive the capturer //this should outlive the capturer
_videoCapturer = makeVideoCapturer(_videoSource, _useFrontCamera, [this](bool isActive) { _videoCapturer = makeVideoCapturer(_videoSource, _useFrontCamera, [this](bool isActive) {
@@ -33,6 +34,13 @@ void TgVoipVideoCaptureInterfaceObject::switchCamera() {
} }
}); });
} }
void TgVoipVideoCaptureInterfaceObject::setIsVideoEnabled(bool isVideoEnabled) {
if (_isVideoEnabled != isVideoEnabled) {
_isVideoEnabled = isVideoEnabled;
_videoCapturer->setIsEnabled(isVideoEnabled);
}
}
void TgVoipVideoCaptureInterfaceObject::setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) { void TgVoipVideoCaptureInterfaceObject::setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
if (_currentSink != nullptr) { if (_currentSink != nullptr) {
@@ -66,6 +74,12 @@ void TgVoipVideoCaptureInterfaceImpl::switchCamera() {
impl->switchCamera(); impl->switchCamera();
}); });
} }
void TgVoipVideoCaptureInterfaceImpl::setIsVideoEnabled(bool isVideoEnabled) {
_impl->perform([isVideoEnabled](TgVoipVideoCaptureInterfaceObject *impl) {
impl->setIsVideoEnabled(isVideoEnabled);
});
}
void TgVoipVideoCaptureInterfaceImpl::setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) { void TgVoipVideoCaptureInterfaceImpl::setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
_impl->perform([sink](TgVoipVideoCaptureInterfaceObject *impl) { _impl->perform([sink](TgVoipVideoCaptureInterfaceObject *impl) {

View File

@@ -83,6 +83,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
- (instancetype _Nonnull)init; - (instancetype _Nonnull)init;
- (void)switchVideoCamera; - (void)switchVideoCamera;
- (void)setIsVideoEnabled:(bool)isVideoEnabled;
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion; - (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion;

View File

@@ -41,6 +41,10 @@ using namespace TGVOIP_NAMESPACE;
_interface->switchCamera(); _interface->switchCamera();
} }
- (void)setIsVideoEnabled:(bool)isVideoEnabled {
_interface->setIsVideoEnabled(isVideoEnabled);
}
- (std::shared_ptr<TgVoipVideoCaptureInterface>)getInterface { - (std::shared_ptr<TgVoipVideoCaptureInterface>)getInterface {
return _interface; return _interface;
} }