diff --git a/submodules/AccountContext/Sources/PresentationCallManager.swift b/submodules/AccountContext/Sources/PresentationCallManager.swift index aedbdc6b8b..e1e1f40397 100644 --- a/submodules/AccountContext/Sources/PresentationCallManager.swift +++ b/submodules/AccountContext/Sources/PresentationCallManager.swift @@ -90,6 +90,7 @@ public protocol PresentationCall: class { func toggleIsMuted() func setIsMuted(_ value: Bool) func setEnableVideo(_ value: Bool) + func setOutgoingVideoIsPaused(_ isPaused: Bool) func switchVideoCamera() func setCurrentAudioOutput(_ output: AudioSessionOutput) func debugInfo() -> Signal<(String, String), NoError> diff --git a/submodules/Display/Source/CAAnimationUtils.swift b/submodules/Display/Source/CAAnimationUtils.swift index 6eb000add6..c683fdf4f8 100644 --- a/submodules/Display/Source/CAAnimationUtils.swift +++ b/submodules/Display/Source/CAAnimationUtils.swift @@ -238,14 +238,14 @@ public extension CALayer { self.animate(from: NSValue(cgPoint: from), to: NSValue(cgPoint: to), keyPath: "position", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion) } - func animateBounds(from: CGRect, to: CGRect, duration: Double, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) { + func animateBounds(from: CGRect, to: CGRect, duration: Double, delay: Double = 0.0, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) { if from == to && !force { if let completion = completion { completion(true) } return } - self.animate(from: NSValue(cgRect: from), to: NSValue(cgRect: to), keyPath: "bounds", timingFunction: timingFunction, duration: duration, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion) + self.animate(from: NSValue(cgRect: from), to: NSValue(cgRect: to), keyPath: "bounds", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion) } func animateBoundsOriginXAdditive(from: CGFloat, to: CGFloat, duration: Double, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) { @@ -268,7 +268,7 @@ public extension CALayer { self.animateKeyframes(values: values.map { NSValue(cgPoint: $0) }, duration: duration, keyPath: "position") } - func animateFrame(from: CGRect, to: CGRect, duration: Double, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) { + func animateFrame(from: CGRect, to: CGRect, duration: Double, delay: Double = 0.0, timingFunction: String, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, force: Bool = false, completion: ((Bool) -> Void)? = nil) { if from == to && !force { if let completion = completion { completion(true) @@ -302,14 +302,14 @@ public extension CALayer { toBounds = CGRect() } - self.animatePosition(from: fromPosition, to: toPosition, duration: duration, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in + self.animatePosition(from: fromPosition, to: toPosition, duration: duration, delay: delay, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in if !value { interrupted = true } completedPosition = true partialCompletion() }) - self.animateBounds(from: fromBounds, to: toBounds, duration: duration, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in + self.animateBounds(from: fromBounds, to: toBounds, duration: duration, delay: delay, timingFunction: timingFunction, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, force: force, completion: { value in if !value { interrupted = true } diff --git a/submodules/Display/Source/ContainedViewLayoutTransition.swift b/submodules/Display/Source/ContainedViewLayoutTransition.swift index 0ce89df155..1d4c1867a6 100644 --- a/submodules/Display/Source/ContainedViewLayoutTransition.swift +++ b/submodules/Display/Source/ContainedViewLayoutTransition.swift @@ -63,7 +63,7 @@ public enum ContainedViewLayoutTransition { } public extension ContainedViewLayoutTransition { - func updateFrame(node: ASDisplayNode, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, completion: ((Bool) -> Void)? = nil) { + func updateFrame(node: ASDisplayNode, frame: CGRect, force: Bool = false, beginWithCurrentState: Bool = false, delay: Double = 0.0, completion: ((Bool) -> Void)? = nil) { if node.frame.equalTo(frame) && !force { completion?(true) } else { @@ -81,7 +81,7 @@ public extension ContainedViewLayoutTransition { previousFrame = node.frame } node.frame = frame - node.layer.animateFrame(from: previousFrame, to: frame, duration: duration, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, force: force, completion: { result in + node.layer.animateFrame(from: previousFrame, to: frame, duration: duration, delay: delay, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, force: force, completion: { result in if let completion = completion { completion(result) } diff --git a/submodules/TelegramCallsUI/Sources/CallController.swift b/submodules/TelegramCallsUI/Sources/CallController.swift index 02db8c1132..52089bd548 100644 --- a/submodules/TelegramCallsUI/Sources/CallController.swift +++ b/submodules/TelegramCallsUI/Sources/CallController.swift @@ -178,8 +178,8 @@ public final class CallController: ViewController { let _ = self?.call.hangUp() } - self.controllerNode.toggleVideo = { [weak self] in - let _ = self?.call.setEnableVideo(true) + self.controllerNode.setIsVideoPaused = { [weak self] isPaused in + self?.call.setOutgoingVideoIsPaused(isPaused) } self.controllerNode.back = { [weak self] in diff --git a/submodules/TelegramCallsUI/Sources/CallControllerButton.swift b/submodules/TelegramCallsUI/Sources/CallControllerButton.swift index 7a3c6b9bc1..bc7c296f6b 100644 --- a/submodules/TelegramCallsUI/Sources/CallControllerButton.swift +++ b/submodules/TelegramCallsUI/Sources/CallControllerButton.swift @@ -5,245 +5,218 @@ import AsyncDisplayKit import SwiftSignalKit import AppBundle -enum CallControllerButtonType { - case mute - case end - case accept - case speaker - case bluetooth - case switchCamera -} +private let labelFont = Font.regular(13.0) -private let buttonSize = CGSize(width: 75.0, height: 75.0) - -private func generateEmptyButtonImage(icon: UIImage?, strokeColor: UIColor?, fillColor: UIColor, knockout: Bool = false, angle: CGFloat = 0.0) -> UIImage? { - return generateImage(buttonSize, contextGenerator: { size, context in - context.clear(CGRect(origin: CGPoint(), size: size)) - context.setBlendMode(.copy) - if let strokeColor = strokeColor { - context.setFillColor(strokeColor.cgColor) - context.fillEllipse(in: CGRect(origin: CGPoint(), size: size)) - context.setFillColor(fillColor.cgColor) - context.fillEllipse(in: CGRect(origin: CGPoint(x: 1.5, y: 1.5), size: CGSize(width: size.width - 3.0, height: size.height - 3.0))) - } else { - context.setFillColor(fillColor.cgColor) - context.fillEllipse(in: CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: size.height))) - } - - if let icon = icon { - if !angle.isZero { - context.translateBy(x: size.width / 2.0, y: size.height / 2.0) - context.rotate(by: angle) - context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0) - } - let imageSize = icon.size - let imageRect = CGRect(origin: CGPoint(x: floor((size.width - imageSize.width) / 2.0), y: floor((size.width - imageSize.height) / 2.0)), size: imageSize) - if knockout { - context.setBlendMode(.copy) - context.clip(to: imageRect, mask: icon.cgImage!) - context.setFillColor(UIColor.clear.cgColor) - context.fill(imageRect) - } else { - context.setBlendMode(.normal) - context.draw(icon.cgImage!, in: imageRect) +final class CallControllerButtonItemNode: HighlightTrackingButtonNode { + struct Content: Equatable { + enum Appearance: Equatable { + enum Color { + case red + case green } + + case blurred(isFilled: Bool) + case color(Color) } - }) -} - -private func generateFilledButtonImage(color: UIColor, icon: UIImage?, angle: CGFloat = 0.0) -> UIImage? { - return generateImage(buttonSize, contextGenerator: { size, context in - context.clear(CGRect(origin: CGPoint(), size: size)) - context.setBlendMode(.normal) - context.setFillColor(color.cgColor) - context.fillEllipse(in: CGRect(origin: CGPoint(), size: size)) - if let icon = icon { - if !angle.isZero { - context.translateBy(x: size.width / 2.0, y: size.height / 2.0) - context.rotate(by: angle) - context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0) - } - context.draw(icon.cgImage!, in: CGRect(origin: CGPoint(x: floor((size.width - icon.size.width) / 2.0), y: floor((size.height - icon.size.height) / 2.0)), size: icon.size)) + enum Image { + case camera + case mute + case flipCamera + case bluetooth + case speaker + case accept + case end } - }) -} - -private let emptyStroke = UIColor(white: 1.0, alpha: 0.8) -private let emptyHighlightedFill = UIColor(white: 1.0, alpha: 0.3) -private let invertedFill = UIColor(white: 1.0, alpha: 1.0) - -private let labelFont = Font.regular(14.5) - -final class CallControllerButtonNode: HighlightTrackingButtonNode { - private var type: CallControllerButtonType + + var appearance: Appearance + var image: Image + } - private var regularImage: UIImage? - private var highlightedImage: UIImage? - private var filledImage: UIImage? + private let contentContainer: ASDisplayNode + private let effectView: UIVisualEffectView + private let contentNode: ASImageNode + private let overlayHighlightNode: ASImageNode + private let textNode: ImmediateTextNode - private let backgroundNode: ASImageNode - private let labelNode: ASTextNode? + private let largeButtonSize: CGFloat = 72.0 - init(type: CallControllerButtonType, label: String?) { - self.type = type + private(set) var currentContent: Content? + private(set) var currentText: String = "" + + init() { + self.contentContainer = ASDisplayNode() - self.backgroundNode = ASImageNode() - self.backgroundNode.isLayerBacked = true - self.backgroundNode.displayWithoutProcessing = false - self.backgroundNode.displaysAsynchronously = false + self.effectView = UIVisualEffectView() + self.effectView.effect = UIBlurEffect(style: .light) + self.effectView.layer.cornerRadius = self.largeButtonSize / 2.0 + self.effectView.clipsToBounds = true + self.effectView.isUserInteractionEnabled = false - if let label = label { - let labelNode = ASTextNode() - labelNode.attributedText = NSAttributedString(string: label, font: labelFont, textColor: .white) - self.labelNode = labelNode - } else { - self.labelNode = nil - } + self.contentNode = ASImageNode() + self.contentNode.isUserInteractionEnabled = false - var regularImage: UIImage? - var highlightedImage: UIImage? - var filledImage: UIImage? + self.overlayHighlightNode = ASImageNode() + self.overlayHighlightNode.isUserInteractionEnabled = false + self.overlayHighlightNode.alpha = 0.0 - switch type { - case .mute: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .accept: - regularImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0) - highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0) - case .end: - regularImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton")) - highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton")) - case .speaker: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .bluetooth: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .switchCamera: - let patternImage = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white) - regularImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: patternImage, strokeColor: nil, fillColor: invertedFill, knockout: true) - } + self.textNode = ImmediateTextNode() + self.textNode.displaysAsynchronously = false + self.textNode.isUserInteractionEnabled = false - self.regularImage = regularImage - self.highlightedImage = highlightedImage - self.filledImage = filledImage + super.init(pointerStyle: nil) - super.init() + self.addSubnode(self.contentContainer) + self.contentContainer.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize)) - self.addSubnode(self.backgroundNode) + self.addSubnode(self.textNode) - if let labelNode = self.labelNode { - self.addSubnode(labelNode) - } - - self.backgroundNode.image = regularImage - self.currentImage = regularImage + self.contentContainer.view.addSubview(self.effectView) + self.contentContainer.addSubnode(self.contentNode) + self.contentContainer.addSubnode(self.overlayHighlightNode) self.highligthedChanged = { [weak self] highlighted in - if let strongSelf = self { - strongSelf.internalHighlighted = highlighted - strongSelf.updateState(highlighted: highlighted, selected: strongSelf.isSelected) + guard let strongSelf = self else { + return } - } - } - - private var internalHighlighted = false - - override var isSelected: Bool { - didSet { - self.updateState(highlighted: self.internalHighlighted, selected: self.isSelected) - } - } - - private var currentImage: UIImage? - - private func updateState(highlighted: Bool, selected: Bool) { - let image: UIImage? - if selected { - image = self.filledImage - } else if highlighted { - image = self.highlightedImage - } else { - image = self.regularImage - } - - if self.currentImage !== image { - let currentContents = self.backgroundNode.layer.contents - self.backgroundNode.layer.removeAnimation(forKey: "contents") - if let currentContents = currentContents, let image = image { - self.backgroundNode.image = image - self.backgroundNode.layer.animate(from: currentContents as AnyObject, to: image.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: image === self.currentImage || image === self.filledImage ? 0.25 : 0.15) + if highlighted { + strongSelf.overlayHighlightNode.alpha = 1.0 } else { - self.backgroundNode.image = image + strongSelf.overlayHighlightNode.alpha = 0.0 + strongSelf.overlayHighlightNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2) } - self.currentImage = image } } - func updateType(_ type: CallControllerButtonType) { - if self.type == type { - return - } - self.type = type - var regularImage: UIImage? - var highlightedImage: UIImage? - var filledImage: UIImage? + func update(size: CGSize, content: Content, text: String, transition: ContainedViewLayoutTransition) { + let scaleFactor = size.width / self.largeButtonSize - switch type { - case .mute: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallMuteButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .accept: - regularImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0) - highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0x74db58), icon: UIImage(bundleImageName: "Call/CallPhoneButton"), angle: CGFloat.pi * 3.0 / 4.0) - case .end: - regularImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton")) - highlightedImage = generateFilledButtonImage(color: UIColor(rgb: 0xd92326), icon: UIImage(bundleImageName: "Call/CallPhoneButton")) - case .speaker: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallSpeakerButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .bluetooth: - regularImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: UIImage(bundleImageName: "Call/CallBluetoothButton"), strokeColor: nil, fillColor: invertedFill, knockout: true) - case .switchCamera: - let patternImage = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white) - regularImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: .clear) - highlightedImage = generateEmptyButtonImage(icon: patternImage, strokeColor: emptyStroke, fillColor: emptyHighlightedFill) - filledImage = generateEmptyButtonImage(icon: patternImage, strokeColor: nil, fillColor: invertedFill, knockout: true) + self.effectView.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize)) + self.contentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize)) + self.overlayHighlightNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: self.largeButtonSize, height: self.largeButtonSize)) + + if self.currentContent != content { + self.currentContent = content + + switch content.appearance { + case .blurred: + self.effectView.isHidden = false + case .color: + self.effectView.isHidden = true + } + + let contentImage = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in + context.clear(CGRect(origin: CGPoint(), size: size)) + + var fillColor: UIColor = .clear + var drawOverMask = false + context.setBlendMode(.normal) + var imageScale: CGFloat = 1.0 + switch content.appearance { + case let .blurred(isFilled): + if isFilled { + fillColor = .white + drawOverMask = true + context.setBlendMode(.copy) + } + let smallButtonSize: CGFloat = 60.0 + imageScale = self.largeButtonSize / smallButtonSize + case let .color(color): + switch color { + case .red: + fillColor = UIColor(rgb: 0xd92326) + case .green: + fillColor = UIColor(rgb: 0x74db58) + } + } + + context.setFillColor(fillColor.cgColor) + context.fillEllipse(in: CGRect(origin: CGPoint(), size: size)) + + var image: UIImage? + + switch content.image { + case .camera: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallCameraButton"), color: .white) + case .mute: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallMuteButton"), color: .white) + case .flipCamera: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSwitchCameraButton"), color: .white) + case .bluetooth: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallBluetoothButton"), color: .white) + case .speaker: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallSpeakerButton"), color: .white) + case .accept: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallAcceptButton"), color: .white) + case .end: + image = generateTintedImage(image: UIImage(bundleImageName: "Call/CallDeclineButton"), color: .white) + } + + if let image = image { + context.translateBy(x: size.width / 2.0, y: size.height / 2.0) + context.scaleBy(x: imageScale, y: imageScale) + context.translateBy(x: -size.width / 2.0, y: -size.height / 2.0) + + let imageRect = CGRect(origin: CGPoint(x: floor((size.width - image.size.width) / 2.0), y: floor((size.height - image.size.height) / 2.0)), size: image.size) + if drawOverMask { + context.clip(to: imageRect, mask: image.cgImage!) + context.setBlendMode(.copy) + context.setFillColor(UIColor.clear.cgColor) + context.fill(CGRect(origin: CGPoint(), size: size)) + } else { + context.draw(image.cgImage!, in: imageRect) + } + } + }) + if transition.isAnimated, let contentImage = contentImage, let previousContent = self.contentNode.image { + self.contentNode.image = contentImage + self.contentNode.layer.animate(from: previousContent.cgImage!, to: contentImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2) + } else { + self.contentNode.image = contentImage + } + + self.overlayHighlightNode.image = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in + context.clear(CGRect(origin: CGPoint(), size: size)) + + let fillColor: UIColor + context.setBlendMode(.normal) + switch content.appearance { + case let .blurred(isFilled): + if isFilled { + fillColor = UIColor(white: 0.0, alpha: 0.1) + } else { + fillColor = UIColor(white: 1.0, alpha: 0.2) + } + case let .color(color): + switch color { + case .red: + fillColor = UIColor(rgb: 0xd92326).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2) + case .green: + fillColor = UIColor(rgb: 0x74db58).withMultipliedBrightnessBy(0.2).withAlphaComponent(0.2) + } + } + + context.setFillColor(fillColor.cgColor) + context.fillEllipse(in: CGRect(origin: CGPoint(), size: size)) + }) } - self.regularImage = regularImage - self.highlightedImage = highlightedImage - self.filledImage = filledImage + transition.updatePosition(node: self.contentContainer, position: CGPoint(x: size.width / 2.0, y: size.height / 2.0)) + transition.updateSublayerTransformScale(node: self.contentContainer, scale: scaleFactor) - self.updateState(highlighted: self.isHighlighted, selected: self.isSelected) - } - - func animateRollTransition() { - self.backgroundNode.layer.animate(from: 0.0 as NSNumber, to: (-CGFloat.pi * 5 / 4) as NSNumber, keyPath: "transform.rotation.z", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3, removeOnCompletion: false) - self.labelNode?.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.25, removeOnCompletion: false) - } - - override func layout() { - super.layout() - - let size = self.bounds.size - - self.backgroundNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: size.width)) - - if let labelNode = self.labelNode { - let labelSize = labelNode.measure(CGSize(width: 200.0, height: 100.0)) - labelNode.frame = CGRect(origin: CGPoint(x: floor((size.width - labelSize.width) / 2.0), y: 81.0), size: labelSize) + if self.currentText != text { + self.textNode.attributedText = NSAttributedString(string: text, font: labelFont, textColor: .white) } + let textSize = self.textNode.updateLayout(CGSize(width: 150.0, height: 100.0)) + let textFrame = CGRect(origin: CGPoint(x: floor((size.width - textSize.width) / 2.0), y: size.height + 5.0), size: textSize) + if self.currentText.isEmpty { + self.textNode.frame = textFrame + if transition.isAnimated { + self.textNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15) + } + } else { + transition.updateFrameAdditiveToCenter(node: self.textNode, frame: textFrame) + } + self.currentText = text } } diff --git a/submodules/TelegramCallsUI/Sources/CallControllerButtonsNode.swift b/submodules/TelegramCallsUI/Sources/CallControllerButtonsNode.swift index 7a90fd8b3f..769d077f3c 100644 --- a/submodules/TelegramCallsUI/Sources/CallControllerButtonsNode.swift +++ b/submodules/TelegramCallsUI/Sources/CallControllerButtonsNode.swift @@ -22,27 +22,66 @@ enum CallControllerButtonsMode: Equatable { } case active(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState) - case incoming + case incoming(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState) + case outgoingRinging(speakerMode: CallControllerButtonsSpeakerMode, videoState: VideoState) +} + +private enum ButtonDescription: Equatable { + enum Key: Hashable { + case accept + case end + case enableCamera + case switchCamera + case soundOutput + case mute + } + + enum SoundOutput { + case builtin + case speaker + case bluetooth + } + + enum EndType { + case outgoing + case decline + case end + } + + case accept + case end(EndType) + case enableCamera(Bool) + case switchCamera + case soundOutput(SoundOutput) + case mute(Bool) + + var key: Key { + switch self { + case .accept: + return .accept + case .end: + return .end + case .enableCamera: + return .enableCamera + case .switchCamera: + return .switchCamera + case .soundOutput: + return .soundOutput + case .mute: + return .mute + } + } } final class CallControllerButtonsNode: ASDisplayNode { - private let acceptButton: CallControllerButtonNode - private let declineButton: CallControllerButtonNode - - private let muteButton: CallControllerButtonNode - private let endButton: CallControllerButtonNode - private let speakerButton: CallControllerButtonNode - private let swichCameraButton: CallControllerButtonNode + private var buttonNodes: [ButtonDescription.Key: CallControllerButtonItemNode] = [:] private var mode: CallControllerButtonsMode? private var validLayout: CGFloat? - var isMuted = false { - didSet { - self.muteButton.isSelected = self.isMuted - } - } + var isMuted = false + var isCameraPaused = false var accept: (() -> Void)? var mute: (() -> Void)? @@ -52,57 +91,30 @@ final class CallControllerButtonsNode: ASDisplayNode { var rotateCamera: (() -> Void)? init(strings: PresentationStrings) { - self.acceptButton = CallControllerButtonNode(type: .accept, label: strings.Call_Accept) - self.acceptButton.alpha = 0.0 - self.declineButton = CallControllerButtonNode(type: .end, label: strings.Call_Decline) - self.declineButton.alpha = 0.0 - - self.muteButton = CallControllerButtonNode(type: .mute, label: nil) - self.muteButton.alpha = 0.0 - self.endButton = CallControllerButtonNode(type: .end, label: nil) - self.endButton.alpha = 0.0 - self.speakerButton = CallControllerButtonNode(type: .speaker, label: nil) - self.speakerButton.alpha = 0.0 - self.swichCameraButton = CallControllerButtonNode(type: .switchCamera, label: nil) - self.swichCameraButton.alpha = 0.0 - super.init() - - self.addSubnode(self.acceptButton) - self.addSubnode(self.declineButton) - self.addSubnode(self.muteButton) - self.addSubnode(self.endButton) - self.addSubnode(self.speakerButton) - self.addSubnode(self.swichCameraButton) - - self.acceptButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) - self.declineButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) - self.muteButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) - self.endButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) - self.speakerButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) - self.swichCameraButton.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) } - func updateLayout(constrainedWidth: CGFloat, transition: ContainedViewLayoutTransition) { - let previousLayout = self.validLayout + func updateLayout(strings: PresentationStrings, constrainedWidth: CGFloat, transition: ContainedViewLayoutTransition) { self.validLayout = constrainedWidth - if let mode = self.mode, previousLayout != self.validLayout { - self.updateButtonsLayout(mode: mode, width: constrainedWidth, animated: false) + if let mode = self.mode { + self.updateButtonsLayout(strings: strings, mode: mode, width: constrainedWidth, animated: transition.isAnimated) } } - func updateMode(_ mode: CallControllerButtonsMode) { + func updateMode(strings: PresentationStrings, mode: CallControllerButtonsMode) { if self.mode != mode { let previousMode = self.mode self.mode = mode if let validLayout = self.validLayout { - self.updateButtonsLayout(mode: mode, width: validLayout, animated: previousMode != nil) + self.updateButtonsLayout(strings: strings, mode: mode, width: validLayout, animated: previousMode != nil) } } } - private func updateButtonsLayout(mode: CallControllerButtonsMode, width: CGFloat, animated: Bool) { + private var appliedMode: CallControllerButtonsMode? + + private func updateButtonsLayout(strings: PresentationStrings, mode: CallControllerButtonsMode, width: CGFloat, animated: Bool) { let transition: ContainedViewLayoutTransition if animated { transition = .animated(duration: 0.3, curve: .spring) @@ -110,147 +122,279 @@ final class CallControllerButtonsNode: ASDisplayNode { transition = .immediate } - let threeButtonSpacing: CGFloat = 28.0 - let twoButtonSpacing: CGFloat = 105.0 - let buttonSize = CGSize(width: 75.0, height: 75.0) - - let threeButtonsWidth = 3.0 * buttonSize.width + 2.0 * threeButtonSpacing - let twoButtonsWidth = 2.0 * buttonSize.width + 1.0 * twoButtonSpacing + let previousMode = self.appliedMode + self.appliedMode = mode - var origin = CGPoint(x: floor((width - threeButtonsWidth) / 2.0), y: 0.0) + var animatePositionsWithDelay = false + if let previousMode = previousMode { + switch previousMode { + case .incoming, .outgoingRinging: + if case .active = mode { + animatePositionsWithDelay = true + } + default: + break + } + } - for button in [self.muteButton, self.endButton, self.speakerButton] { - transition.updateFrame(node: button, frame: CGRect(origin: origin, size: buttonSize)) - if button === self.speakerButton { - transition.updateFrame(node: self.swichCameraButton, frame: CGRect(origin: origin, size: buttonSize)) + let minSmallButtonSideInset: CGFloat = 34.0 + let maxSmallButtonSpacing: CGFloat = 34.0 + let smallButtonSize: CGFloat = 60.0 + let topBottomSpacing: CGFloat = 84.0 + + let maxLargeButtonSpacing: CGFloat = 115.0 + let largeButtonSize: CGFloat = 72.0 + let minLargeButtonSideInset: CGFloat = minSmallButtonSideInset - 6.0 + + struct PlacedButton { + let button: ButtonDescription + let frame: CGRect + } + + var buttons: [PlacedButton] = [] + switch mode { + case .incoming(let speakerMode, let videoState), .outgoingRinging(let speakerMode, let videoState): + var topButtons: [ButtonDescription] = [] + var bottomButtons: [ButtonDescription] = [] + + let soundOutput: ButtonDescription.SoundOutput + switch speakerMode { + case .none, .builtin: + soundOutput = .builtin + case .speaker: + soundOutput = .speaker + case .headphones: + soundOutput = .bluetooth + case .bluetooth: + soundOutput = .bluetooth } - origin.x += buttonSize.width + threeButtonSpacing + switch videoState { + case .active, .available: + topButtons.append(.enableCamera(!self.isCameraPaused)) + topButtons.append(.mute(self.isMuted)) + topButtons.append(.switchCamera) + case .notAvailable: + topButtons.append(.mute(self.isMuted)) + topButtons.append(.soundOutput(soundOutput)) + } + + let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize + let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0 + let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1)) + let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing + var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0) + for button in topButtons { + buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: 0.0), size: CGSize(width: smallButtonSize, height: smallButtonSize)))) + topButtonsLeftOffset += smallButtonSize + topButtonsSpacing + } + + if case .incoming = mode { + bottomButtons.append(.end(.decline)) + bottomButtons.append(.accept) + } else { + bottomButtons.append(.end(.outgoing)) + } + + let bottomButtonsContentWidth = CGFloat(bottomButtons.count) * largeButtonSize + let bottomButtonsAvailableSpacingWidth = width - bottomButtonsContentWidth - minLargeButtonSideInset * 2.0 + let bottomButtonsSpacing = min(maxLargeButtonSpacing, bottomButtonsAvailableSpacingWidth / CGFloat(bottomButtons.count - 1)) + let bottomButtonsWidth = CGFloat(bottomButtons.count) * largeButtonSize + CGFloat(bottomButtons.count - 1) * bottomButtonsSpacing + var bottomButtonsLeftOffset = floor((width - bottomButtonsWidth) / 2.0) + for button in bottomButtons { + buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: bottomButtonsLeftOffset, y: smallButtonSize + topBottomSpacing), size: CGSize(width: largeButtonSize, height: largeButtonSize)))) + bottomButtonsLeftOffset += largeButtonSize + bottomButtonsSpacing + } + case let .active(speakerMode, videoState): + var topButtons: [ButtonDescription] = [] + + let soundOutput: ButtonDescription.SoundOutput + switch speakerMode { + case .none, .builtin: + soundOutput = .builtin + case .speaker: + soundOutput = .speaker + case .headphones: + soundOutput = .builtin + case .bluetooth: + soundOutput = .bluetooth + } + + switch videoState { + case .active, .available: + topButtons.append(.enableCamera(!self.isCameraPaused)) + topButtons.append(.mute(isMuted)) + topButtons.append(.switchCamera) + case .notAvailable: + topButtons.append(.mute(isMuted)) + topButtons.append(.soundOutput(soundOutput)) + } + + topButtons.append(.end(.end)) + + let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize + let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0 + let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1)) + let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing + var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0) + for button in topButtons { + buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: smallButtonSize + topBottomSpacing), size: CGSize(width: smallButtonSize, height: smallButtonSize)))) + topButtonsLeftOffset += smallButtonSize + topButtonsSpacing + } } - origin = CGPoint(x: floor((width - twoButtonsWidth) / 2.0), y: 0.0) - for button in [self.declineButton, self.acceptButton] { - transition.updateFrame(node: button, frame: CGRect(origin: origin, size: buttonSize)) - origin.x += buttonSize.width + twoButtonSpacing + let delayIncrement = 0.015 + var validKeys: [ButtonDescription.Key] = [] + for button in buttons { + validKeys.append(button.button.key) + var buttonTransition = transition + var animateButtonIn = false + let buttonNode: CallControllerButtonItemNode + if let current = self.buttonNodes[button.button.key] { + buttonNode = current + } else { + buttonNode = CallControllerButtonItemNode() + self.buttonNodes[button.button.key] = buttonNode + self.addSubnode(buttonNode) + buttonNode.addTarget(self, action: #selector(self.buttonPressed(_:)), forControlEvents: .touchUpInside) + buttonTransition = .immediate + animateButtonIn = transition.isAnimated + } + let buttonContent: CallControllerButtonItemNode.Content + let buttonText: String + switch button.button { + case .accept: + buttonContent = CallControllerButtonItemNode.Content( + appearance: .color(.green), + image: .accept + ) + buttonText = strings.Call_Accept + case let .end(type): + buttonContent = CallControllerButtonItemNode.Content( + appearance: .color(.red), + image: .end + ) + switch type { + case .outgoing: + buttonText = "" + case .decline: + buttonText = strings.Call_Decline + case .end: + buttonText = strings.Call_End + } + case let .enableCamera(isEnabled): + buttonContent = CallControllerButtonItemNode.Content( + appearance: .blurred(isFilled: isEnabled), + image: .camera + ) + buttonText = strings.Call_Camera + case .switchCamera: + buttonContent = CallControllerButtonItemNode.Content( + appearance: .blurred(isFilled: false), + image: .flipCamera + ) + buttonText = strings.Call_Flip + case let .soundOutput(value): + let image: CallControllerButtonItemNode.Content.Image + var isFilled = false + switch value { + case .builtin: + image = .speaker + case .speaker: + image = .speaker + isFilled = true + case .bluetooth: + image = .bluetooth + } + buttonContent = CallControllerButtonItemNode.Content( + appearance: .blurred(isFilled: isFilled), + image: image + ) + buttonText = strings.Call_Speaker + case let .mute(isMuted): + buttonContent = CallControllerButtonItemNode.Content( + appearance: .blurred(isFilled: isMuted), + image: .mute + ) + buttonText = strings.Call_Mute + } + var buttonDelay = 0.0 + if animatePositionsWithDelay { + switch button.button.key { + case .enableCamera: + buttonDelay = 0.0 + case .mute: + buttonDelay = delayIncrement * 1.0 + case .switchCamera: + buttonDelay = delayIncrement * 2.0 + case .end: + buttonDelay = delayIncrement * 3.0 + default: + break + } + } + buttonTransition.updateFrame(node: buttonNode, frame: button.frame, delay: buttonDelay) + buttonNode.update(size: button.frame.size, content: buttonContent, text: buttonText, transition: buttonTransition) + if animateButtonIn { + buttonNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + } } - switch mode { - case .incoming: - for button in [self.declineButton, self.acceptButton] { - button.alpha = 1.0 - } - for button in [self.muteButton, self.endButton, self.speakerButton, self.swichCameraButton] { - button.alpha = 0.0 - } - case let .active(speakerMode, videoState): - for button in [self.muteButton] { - if animated && button.alpha.isZero { - button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) - } - button.alpha = 1.0 - } - switch videoState { - case .active, .available: - for button in [self.speakerButton] { - if animated && !button.alpha.isZero { - button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3) - } - button.alpha = 0.0 - } - for button in [self.swichCameraButton] { - if animated && button.alpha.isZero { - button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) - } - button.alpha = 1.0 - } - case .notAvailable: - for button in [self.swichCameraButton] { - if animated && !button.alpha.isZero { - button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3) - } - button.alpha = 0.0 - } - for button in [self.speakerButton] { - if animated && button.alpha.isZero { - button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) - } - button.alpha = 1.0 - } - } - var animatingAcceptButton = false - if self.endButton.alpha.isZero { - if animated { - if !self.acceptButton.alpha.isZero { - animatingAcceptButton = true - self.endButton.layer.animatePosition(from: self.acceptButton.position, to: self.endButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) - self.acceptButton.animateRollTransition() - self.endButton.layer.animate(from: (CGFloat.pi * 5 / 4) as NSNumber, to: 0.0 as NSNumber, keyPath: "transform.rotation.z", timingFunction: kCAMediaTimingFunctionSpring, duration: 0.3) - self.acceptButton.layer.animatePosition(from: self.acceptButton.position, to: self.endButton.position, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, completion: { [weak self] _ in - if let strongSelf = self { - strongSelf.acceptButton.alpha = 0.0 - strongSelf.acceptButton.layer.removeAnimation(forKey: "position") - strongSelf.acceptButton.layer.removeAnimation(forKey: "transform.rotation.z") - } + var removedKeys: [ButtonDescription.Key] = [] + for (key, button) in self.buttonNodes { + if !validKeys.contains(key) { + removedKeys.append(key) + if animated { + if case .accept = key { + if let endButton = self.buttonNodes[.end] { + transition.updateFrame(node: button, frame: endButton.frame) + if let content = button.currentContent { + button.update(size: endButton.frame.size, content: content, text: button.currentText, transition: transition) + } + transition.updateTransformScale(node: button, scale: 0.1) + transition.updateAlpha(node: button, alpha: 0.0, completion: { [weak button] _ in + button?.removeFromSupernode() }) } - self.endButton.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) + } else { + transition.updateAlpha(node: button, alpha: 0.0, completion: { [weak button] _ in + button?.removeFromSupernode() + }) } - self.endButton.alpha = 1.0 + } else { + button.removeFromSupernode() } - - if !self.declineButton.alpha.isZero { - if animated { - self.declineButton.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2) - } - self.declineButton.alpha = 0.0 - } - - if self.acceptButton.alpha.isZero && !animatingAcceptButton { - self.acceptButton.alpha = 0.0 - } - - self.speakerButton.isSelected = speakerMode == .speaker - self.speakerButton.isHidden = speakerMode == .none - let speakerButtonType: CallControllerButtonType - switch speakerMode { - case .none, .builtin, .speaker: - speakerButtonType = .speaker - case .headphones: - speakerButtonType = .bluetooth - case .bluetooth: - speakerButtonType = .bluetooth - } - self.speakerButton.updateType(speakerButtonType) + } + } + for key in removedKeys { + self.buttonNodes.removeValue(forKey: key) } } - @objc func buttonPressed(_ button: CallControllerButtonNode) { - if button === self.muteButton { - self.mute?() - } else if button === self.endButton || button === self.declineButton { - self.end?() - } else if button === self.speakerButton { - self.speaker?() - } else if button === self.acceptButton { - self.accept?() - } else if button === self.swichCameraButton { - self.rotateCamera?() + @objc func buttonPressed(_ button: CallControllerButtonItemNode) { + for (key, listButton) in self.buttonNodes { + if button === listButton { + switch key { + case .accept: + self.accept?() + case .end: + self.end?() + case .enableCamera: + self.toggleVideo?() + case .switchCamera: + self.rotateCamera?() + case .soundOutput: + self.speaker?() + case .mute: + self.mute?() + } + break + } } } override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? { - let buttons = [ - self.acceptButton, - self.declineButton, - self.muteButton, - self.endButton, - self.speakerButton, - self.swichCameraButton - ] - for button in buttons { - if button.isHidden || button.alpha.isZero { - continue - } + for (_, button) in self.buttonNodes { if let result = button.view.hitTest(self.view.convert(point, to: button.view), with: event) { return result } diff --git a/submodules/TelegramCallsUI/Sources/CallControllerNode.swift b/submodules/TelegramCallsUI/Sources/CallControllerNode.swift index 35a5100543..e204fea8d9 100644 --- a/submodules/TelegramCallsUI/Sources/CallControllerNode.swift +++ b/submodules/TelegramCallsUI/Sources/CallControllerNode.swift @@ -56,34 +56,91 @@ private final class IncomingVideoNode: ASDisplayNode { } private final class OutgoingVideoNode: ASDisplayNode { + private let videoTransformContainer: ASDisplayNode private let videoView: UIView - private let switchCameraButton: HighlightableButtonNode - private let switchCamera: () -> Void + private let buttonNode: HighlightTrackingButtonNode - init(videoView: UIView, switchCamera: @escaping () -> Void) { + private var effectView: UIVisualEffectView? + private var isBlurred: Bool = false + private var isExpanded: Bool = false + + var tapped: (() -> Void)? + + init(videoView: UIView) { + self.videoTransformContainer = ASDisplayNode() + self.videoTransformContainer.clipsToBounds = true self.videoView = videoView - self.switchCameraButton = HighlightableButtonNode() - self.switchCamera = switchCamera + self.videoView.layer.transform = CATransform3DMakeScale(-1.0, 1.0, 1.0) + + self.buttonNode = HighlightTrackingButtonNode() super.init() - self.view.addSubview(self.videoView) - self.addSubnode(self.switchCameraButton) - self.switchCameraButton.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside) + self.videoTransformContainer.view.addSubview(self.videoView) + self.addSubnode(self.videoTransformContainer) + //self.addSubnode(self.buttonNode) + + self.buttonNode.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside) } - @objc private func buttonPressed() { - self.switchCamera() + @objc func buttonPressed() { + self.tapped?() } func updateLayout(size: CGSize, isExpanded: Bool, transition: ContainedViewLayoutTransition) { - transition.updateFrame(view: self.videoView, frame: CGRect(origin: CGPoint(), size: size)) - transition.updateCornerRadius(layer: self.videoView.layer, cornerRadius: isExpanded ? 0.0 : 16.0) - self.switchCameraButton.frame = CGRect(origin: CGPoint(), size: size) + let videoFrame = CGRect(origin: CGPoint(), size: size) + self.buttonNode.frame = videoFrame + self.isExpanded = isExpanded + + let previousVideoFrame = self.videoTransformContainer.frame + self.videoTransformContainer.frame = videoFrame + if transition.isAnimated && !videoFrame.height.isZero && !previousVideoFrame.height.isZero { + transition.animatePositionAdditive(node: self.videoTransformContainer, offset: CGPoint(x: previousVideoFrame.midX - videoFrame.midX, y: previousVideoFrame.midY - videoFrame.midY)) + transition.animateTransformScale(node: self.videoTransformContainer, from: previousVideoFrame.height / videoFrame.height) + } + + self.videoView.frame = videoFrame + + transition.updateCornerRadius(layer: self.videoTransformContainer.layer, cornerRadius: isExpanded ? 0.0 : 16.0) + if let effectView = self.effectView { + transition.updateCornerRadius(layer: effectView.layer, cornerRadius: isExpanded ? 0.0 : 16.0) + } + } + + func updateIsBlurred(isBlurred: Bool) { + if self.isBlurred == isBlurred { + return + } + self.isBlurred = isBlurred + + if isBlurred { + if self.effectView == nil { + let effectView = UIVisualEffectView() + effectView.clipsToBounds = true + effectView.layer.cornerRadius = self.isExpanded ? 0.0 : 16.0 + self.effectView = effectView + effectView.frame = self.videoView.frame + self.view.addSubview(effectView) + } + UIView.animate(withDuration: 0.3, animations: { + self.effectView?.effect = UIBlurEffect(style: .dark) + }) + } else if let effectView = self.effectView { + UIView.animate(withDuration: 0.3, animations: { + effectView.effect = nil + }) + } } } final class CallControllerNode: ASDisplayNode { + private enum VideoNodeCorner { + case topLeft + case topRight + case bottomLeft + case bottomRight + } + private let sharedContext: SharedAccountContext private let account: Account @@ -104,6 +161,8 @@ final class CallControllerNode: ASDisplayNode { private var incomingVideoViewRequested: Bool = false private var outgoingVideoNode: OutgoingVideoNode? private var outgoingVideoViewRequested: Bool = false + private var outgoingVideoExplicitelyFullscreen: Bool = false + private var outgoingVideoNodeCorner: VideoNodeCorner = .bottomRight private let backButtonArrowNode: ASImageNode private let backButtonNode: HighlightableButtonNode private let statusNode: CallControllerStatusNode @@ -121,6 +180,9 @@ final class CallControllerNode: ASDisplayNode { var isMuted: Bool = false { didSet { self.buttonsNode.isMuted = self.isMuted + if let (layout, navigationBarHeight) = self.validLayout { + self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) + } } } @@ -134,12 +196,15 @@ final class CallControllerNode: ASDisplayNode { var beginAudioOuputSelection: (() -> Void)? var acceptCall: (() -> Void)? var endCall: (() -> Void)? - var toggleVideo: (() -> Void)? + var setIsVideoPaused: ((Bool) -> Void)? var back: (() -> Void)? var presentCallRating: ((CallId) -> Void)? var callEnded: ((Bool) -> Void)? var dismissedInteractively: (() -> Void)? + private var isUIHidden: Bool = false + private var isVideoPaused: Bool = false + init(sharedContext: SharedAccountContext, account: Account, presentationData: PresentationData, statusBar: StatusBar, debugInfo: Signal<(String, String), NoError>, shouldStayHiddenUntilConnection: Bool = false, easyDebugAccess: Bool, call: PresentationCall) { self.sharedContext = sharedContext self.account = account @@ -229,7 +294,17 @@ final class CallControllerNode: ASDisplayNode { } self.buttonsNode.toggleVideo = { [weak self] in - self?.toggleVideo?() + guard let strongSelf = self else { + return + } + strongSelf.isVideoPaused = !strongSelf.isVideoPaused + strongSelf.outgoingVideoNode?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused) + strongSelf.buttonsNode.isCameraPaused = strongSelf.isVideoPaused + strongSelf.setIsVideoPaused?(strongSelf.isVideoPaused) + + if let (layout, navigationBarHeight) = strongSelf.validLayout { + strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) + } } self.buttonsNode.rotateCamera = { [weak self] in @@ -302,17 +377,21 @@ final class CallControllerNode: ASDisplayNode { return } if let incomingVideoView = incomingVideoView { - strongSelf.setCurrentAudioOutput?(.speaker) let incomingVideoNode = IncomingVideoNode(videoView: incomingVideoView) strongSelf.incomingVideoNode = incomingVideoNode strongSelf.containerNode.insertSubnode(incomingVideoNode, aboveSubnode: strongSelf.dimNode) - strongSelf.statusNode.isHidden = true if let (layout, navigationBarHeight) = strongSelf.validLayout { - strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate) + strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring)) } } }) } + default: + break + } + + switch callState.videoState { + case .active, .activeOutgoing: if !self.outgoingVideoViewRequested { self.outgoingVideoViewRequested = true self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in @@ -322,13 +401,15 @@ final class CallControllerNode: ASDisplayNode { if let outgoingVideoView = outgoingVideoView { outgoingVideoView.backgroundColor = .black outgoingVideoView.clipsToBounds = true - strongSelf.setCurrentAudioOutput?(.speaker) - let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: { - guard let strongSelf = self else { - return + if let audioOutputState = strongSelf.audioOutputState, let currentOutput = audioOutputState.currentOutput { + switch currentOutput { + case .speaker, .builtin: + break + default: + strongSelf.setCurrentAudioOutput?(.speaker) } - strongSelf.call.switchVideoCamera() - }) + } + let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView) strongSelf.outgoingVideoNode = outgoingVideoNode if let incomingVideoNode = strongSelf.incomingVideoNode { strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode) @@ -336,38 +417,17 @@ final class CallControllerNode: ASDisplayNode { strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode) } if let (layout, navigationBarHeight) = strongSelf.validLayout { - strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate) + strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring)) } - } - }) - } - case .activeOutgoing: - if !self.outgoingVideoViewRequested { - self.outgoingVideoViewRequested = true - self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in - guard let strongSelf = self else { - return - } - if let outgoingVideoView = outgoingVideoView { - outgoingVideoView.backgroundColor = .black - outgoingVideoView.clipsToBounds = true - outgoingVideoView.layer.cornerRadius = 16.0 - strongSelf.setCurrentAudioOutput?(.speaker) - let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: { + /*outgoingVideoNode.tapped = { guard let strongSelf = self else { return } - strongSelf.call.switchVideoCamera() - }) - strongSelf.outgoingVideoNode = outgoingVideoNode - if let incomingVideoNode = strongSelf.incomingVideoNode { - strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode) - } else { - strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode) - } - if let (layout, navigationBarHeight) = strongSelf.validLayout { - strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate) - } + strongSelf.outgoingVideoExplicitelyFullscreen = !strongSelf.outgoingVideoExplicitelyFullscreen + if let (layout, navigationBarHeight) = strongSelf.validLayout { + strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring)) + } + }*/ } }) } @@ -438,7 +498,7 @@ final class CallControllerNode: ASDisplayNode { if isReconnecting { return strings.Call_StatusConnecting } else { - return strings.Call_StatusOngoing(value).0 + return value } }, timestamp) if self.keyTextData?.0 != keyVisualHash { @@ -501,43 +561,60 @@ final class CallControllerNode: ASDisplayNode { } } + private var buttonsTerminationMode: CallControllerButtonsMode? + private func updateButtonsMode() { guard let callState = self.callState else { return } + var mode: CallControllerButtonsSpeakerMode = .none + if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput { + switch currentOutput { + case .builtin: + mode = .builtin + case .speaker: + mode = .speaker + case .headphones: + mode = .headphones + case .port: + mode = .bluetooth + } + if availableOutputs.count <= 1 { + mode = .none + } + } + let mappedVideoState: CallControllerButtonsMode.VideoState + switch callState.videoState { + case .notAvailable: + mappedVideoState = .notAvailable + case .available: + mappedVideoState = .available(true) + case .active: + mappedVideoState = .active + case .activeOutgoing: + mappedVideoState = .active + } + switch callState.state { - case .ringing: - self.buttonsNode.updateMode(.incoming) - default: - var mode: CallControllerButtonsSpeakerMode = .none - if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput { - switch currentOutput { - case .builtin: - mode = .builtin - case .speaker: - mode = .speaker - case .headphones: - mode = .headphones - case .port: - mode = .bluetooth - } - if availableOutputs.count <= 1 { - mode = .none - } - } - let mappedVideoState: CallControllerButtonsMode.VideoState - switch callState.videoState { - case .notAvailable: - mappedVideoState = .notAvailable - case .available: - mappedVideoState = .available(true) - case .active: - mappedVideoState = .active - case .activeOutgoing: - mappedVideoState = .active - } - self.buttonsNode.updateMode(.active(speakerMode: mode, videoState: mappedVideoState)) + case .ringing: + let buttonsMode: CallControllerButtonsMode = .incoming(speakerMode: mode, videoState: mappedVideoState) + self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode) + self.buttonsTerminationMode = buttonsMode + case .waiting, .requesting: + let buttonsMode: CallControllerButtonsMode = .outgoingRinging(speakerMode: mode, videoState: mappedVideoState) + self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode) + self.buttonsTerminationMode = buttonsMode + case .active, .connecting, .reconnecting: + let buttonsMode: CallControllerButtonsMode = .active(speakerMode: mode, videoState: mappedVideoState) + self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsMode) + self.buttonsTerminationMode = buttonsMode + case .terminating, .terminated: + if let buttonsTerminationMode = self.buttonsTerminationMode { + self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: buttonsTerminationMode) + } else { + self.buttonsNode.updateMode(strings: self.presentationData.strings, mode: .active(speakerMode: mode, videoState: mappedVideoState)) + } } } @@ -568,9 +645,69 @@ final class CallControllerNode: ASDisplayNode { } } + private func calculatePreviewVideoRect(layout: ContainerViewLayout, navigationHeight: CGFloat) -> CGRect { + let buttonsHeight: CGFloat = 190.0 + let buttonsOffset: CGFloat + if layout.size.width.isEqual(to: 320.0) { + if layout.size.height.isEqual(to: 480.0) { + buttonsOffset = 60.0 + } else { + buttonsOffset = 73.0 + } + } else { + buttonsOffset = 83.0 + } + + let buttonsOriginY: CGFloat + if self.isUIHidden { + buttonsOriginY = layout.size.height + 40.0 - 80.0 + } else { + buttonsOriginY = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom + } + + let previewVideoSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0)) + let previewVideoY: CGFloat + let previewVideoX: CGFloat + + switch self.outgoingVideoNodeCorner { + case .topLeft: + previewVideoX = 20.0 + if self.isUIHidden { + previewVideoY = layout.insets(options: .statusBar).top + 8.0 + } else { + previewVideoY = layout.insets(options: .statusBar).top + 44.0 + 8.0 + } + case .topRight: + previewVideoX = layout.size.width - previewVideoSize.width - 20.0 + if self.isUIHidden { + previewVideoY = layout.insets(options: .statusBar).top + 8.0 + } else { + previewVideoY = layout.insets(options: .statusBar).top + 44.0 + 8.0 + } + case .bottomLeft: + previewVideoX = 20.0 + if self.isUIHidden { + previewVideoY = layout.size.height - layout.intrinsicInsets.bottom - 8.0 - previewVideoSize.height + } else { + previewVideoY = buttonsOriginY + 100.0 - previewVideoSize.height + } + case .bottomRight: + previewVideoX = layout.size.width - previewVideoSize.width - 20.0 + if self.isUIHidden { + previewVideoY = layout.size.height - layout.intrinsicInsets.bottom - 8.0 - previewVideoSize.height + } else { + previewVideoY = buttonsOriginY + 100.0 - previewVideoSize.height + } + } + + return CGRect(origin: CGPoint(x: previewVideoX, y: previewVideoY), size: previewVideoSize) + } + func containerLayoutUpdated(_ layout: ContainerViewLayout, navigationBarHeight: CGFloat, transition: ContainedViewLayoutTransition) { self.validLayout = (layout, navigationBarHeight) + let overlayAlpha: CGFloat = self.isUIHidden ? 0.0 : 1.0 + transition.updateFrame(node: self.containerNode, frame: CGRect(origin: CGPoint(), size: layout.size)) transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size)) @@ -592,6 +729,9 @@ final class CallControllerNode: ASDisplayNode { } transition.updateFrame(node: self.backButtonNode, frame: CGRect(origin: CGPoint(x: 29.0, y: navigationOffset + 11.0), size: backSize)) + transition.updateAlpha(node: self.backButtonArrowNode, alpha: overlayAlpha) + transition.updateAlpha(node: self.backButtonNode, alpha: overlayAlpha) + var statusOffset: CGFloat if layout.metrics.widthClass == .regular && layout.metrics.heightClass == .regular { if layout.size.height.isEqual(to: 1366.0) { @@ -611,7 +751,7 @@ final class CallControllerNode: ASDisplayNode { statusOffset += layout.safeInsets.top - let buttonsHeight: CGFloat = 75.0 + let buttonsHeight: CGFloat = 190.0 let buttonsOffset: CGFloat if layout.size.width.isEqual(to: 320.0) { if layout.size.height.isEqual(to: 480.0) { @@ -625,36 +765,60 @@ final class CallControllerNode: ASDisplayNode { let statusHeight = self.statusNode.updateLayout(constrainedWidth: layout.size.width, transition: transition) transition.updateFrame(node: self.statusNode, frame: CGRect(origin: CGPoint(x: 0.0, y: statusOffset), size: CGSize(width: layout.size.width, height: statusHeight))) + transition.updateAlpha(node: self.statusNode, alpha: overlayAlpha) let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0)) transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize)) - self.buttonsNode.updateLayout(constrainedWidth: layout.size.width, transition: transition) - let buttonsOriginY: CGFloat = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom + self.buttonsNode.updateLayout(strings: self.presentationData.strings, constrainedWidth: layout.size.width, transition: transition) + let buttonsOriginY: CGFloat + if self.isUIHidden { + buttonsOriginY = layout.size.height + 40.0 - 80.0 + } else { + buttonsOriginY = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom + } transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight))) + transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha) + + let fullscreenVideoFrame = CGRect(origin: CGPoint(), size: layout.size) + + let previewVideoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationBarHeight) - var outgoingVideoTransition = transition if let incomingVideoNode = self.incomingVideoNode { - if incomingVideoNode.frame.width.isZero, let outgoingVideoNode = self.outgoingVideoNode, !outgoingVideoNode.frame.width.isZero, !transition.isAnimated { - outgoingVideoTransition = .animated(duration: 0.3, curve: .easeInOut) + var incomingVideoTransition = transition + if incomingVideoNode.frame.isEmpty { + incomingVideoTransition = .immediate } - incomingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size) - incomingVideoNode.updateLayout(size: layout.size) + if self.outgoingVideoExplicitelyFullscreen { + incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: previewVideoFrame) + } else { + incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: fullscreenVideoFrame) + } + incomingVideoNode.updateLayout(size: incomingVideoNode.frame.size) } if let outgoingVideoNode = self.outgoingVideoNode { + var outgoingVideoTransition = transition + if outgoingVideoNode.frame.isEmpty { + outgoingVideoTransition = .immediate + } if self.incomingVideoNode == nil { - outgoingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size) - outgoingVideoNode.updateLayout(size: layout.size, isExpanded: true, transition: transition) + outgoingVideoNode.frame = fullscreenVideoFrame + outgoingVideoNode.updateLayout(size: layout.size, isExpanded: true, transition: outgoingVideoTransition) } else { - let outgoingSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0)) - let outgoingFrame = CGRect(origin: CGPoint(x: layout.size.width - 16.0 - outgoingSize.width, y: buttonsOriginY - 32.0 - outgoingSize.height), size: outgoingSize) - outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: outgoingFrame) - outgoingVideoNode.updateLayout(size: outgoingFrame.size, isExpanded: false, transition: outgoingVideoTransition) + if self.minimizedVideoDraggingPosition == nil { + if self.outgoingVideoExplicitelyFullscreen { + outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: fullscreenVideoFrame) + } else { + outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: previewVideoFrame) + } + outgoingVideoNode.updateLayout(size: outgoingVideoNode.frame.size, isExpanded: self.outgoingVideoExplicitelyFullscreen, transition: outgoingVideoTransition) + } } } let keyTextSize = self.keyButtonNode.frame.size transition.updateFrame(node: self.keyButtonNode, frame: CGRect(origin: CGPoint(x: layout.size.width - keyTextSize.width - 8.0, y: navigationOffset + 8.0), size: keyTextSize)) + transition.updateAlpha(node: self.keyButtonNode, alpha: overlayAlpha) if let debugNode = self.debugNode { transition.updateFrame(node: debugNode, frame: CGRect(origin: CGPoint(), size: layout.size)) @@ -700,26 +864,33 @@ final class CallControllerNode: ASDisplayNode { if let _ = self.keyPreviewNode { self.backPressed() } else { - let point = recognizer.location(in: recognizer.view) - if self.statusNode.frame.contains(point) { - if self.easyDebugAccess { - self.presentDebugNode() - } else { - let timestamp = CACurrentMediaTime() - if self.debugTapCounter.0 < timestamp - 0.75 { - self.debugTapCounter.0 = timestamp - self.debugTapCounter.1 = 0 - } - - if self.debugTapCounter.0 >= timestamp - 0.75 { - self.debugTapCounter.0 = timestamp - self.debugTapCounter.1 += 1 - } - - if self.debugTapCounter.1 >= 10 { - self.debugTapCounter.1 = 0 - + if self.incomingVideoNode != nil || self.outgoingVideoNode != nil { + self.isUIHidden = !self.isUIHidden + if let (layout, navigationBarHeight) = self.validLayout { + self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) + } + } else { + let point = recognizer.location(in: recognizer.view) + if self.statusNode.frame.contains(point) { + if self.easyDebugAccess { self.presentDebugNode() + } else { + let timestamp = CACurrentMediaTime() + if self.debugTapCounter.0 < timestamp - 0.75 { + self.debugTapCounter.0 = timestamp + self.debugTapCounter.1 = 0 + } + + if self.debugTapCounter.0 >= timestamp - 0.75 { + self.debugTapCounter.0 = timestamp + self.debugTapCounter.1 += 1 + } + + if self.debugTapCounter.1 >= 10 { + self.debugTapCounter.1 = 0 + + self.presentDebugNode() + } } } } @@ -749,36 +920,170 @@ final class CallControllerNode: ASDisplayNode { } } - @objc func panGesture(_ recognizer: UIPanGestureRecognizer) { - switch recognizer.state { - case .changed: - let offset = recognizer.translation(in: self.view).y - var bounds = self.bounds - bounds.origin.y = -offset - self.bounds = bounds - case .ended: - let velocity = recognizer.velocity(in: self.view).y - if abs(velocity) < 100.0 { - var bounds = self.bounds - let previous = bounds - bounds.origin = CGPoint() - self.bounds = bounds - self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + private var minimizedVideoInitialPosition: CGPoint? + private var minimizedVideoDraggingPosition: CGPoint? + + private func nodeLocationForPosition(layout: ContainerViewLayout, position: CGPoint, velocity: CGPoint) -> VideoNodeCorner { + let layoutInsets = UIEdgeInsets() + var result = CGPoint() + if position.x < layout.size.width / 2.0 { + result.x = 0.0 + } else { + result.x = 1.0 + } + if position.y < layoutInsets.top + (layout.size.height - layoutInsets.bottom - layoutInsets.top) / 2.0 { + result.y = 0.0 + } else { + result.y = 1.0 + } + + let currentPosition = result + + let angleEpsilon: CGFloat = 30.0 + var shouldHide = false + + if (velocity.x * velocity.x + velocity.y * velocity.y) >= 500.0 * 500.0 { + let x = velocity.x + let y = velocity.y + + var angle = atan2(y, x) * 180.0 / CGFloat.pi * -1.0 + if angle < 0.0 { + angle += 360.0 + } + + if currentPosition.x.isZero && currentPosition.y.isZero { + if ((angle > 0 && angle < 90 - angleEpsilon) || angle > 360 - angleEpsilon) { + result.x = 1.0 + result.y = 0.0 + } else if (angle > 180 + angleEpsilon && angle < 270 + angleEpsilon) { + result.x = 0.0 + result.y = 1.0 + } else if (angle > 270 + angleEpsilon && angle < 360 - angleEpsilon) { + result.x = 1.0 + result.y = 1.0 } else { - var bounds = self.bounds - let previous = bounds - bounds.origin = CGPoint(x: 0.0, y: velocity > 0.0 ? -bounds.height: bounds.height) - self.bounds = bounds - self.layer.animateBounds(from: previous, to: bounds, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, completion: { [weak self] _ in - self?.dismissedInteractively?() - }) + shouldHide = true + } + } else if !currentPosition.x.isZero && currentPosition.y.isZero { + if (angle > 90 + angleEpsilon && angle < 180 + angleEpsilon) { + result.x = 0.0 + result.y = 0.0 + } + else if (angle > 270 - angleEpsilon && angle < 360 - angleEpsilon) { + result.x = 1.0 + result.y = 1.0 + } + else if (angle > 180 + angleEpsilon && angle < 270 - angleEpsilon) { + result.x = 0.0 + result.y = 1.0 + } + else { + shouldHide = true + } + } else if currentPosition.x.isZero && !currentPosition.y.isZero { + if (angle > 90 - angleEpsilon && angle < 180 - angleEpsilon) { + result.x = 0.0 + result.y = 0.0 + } + else if (angle < angleEpsilon || angle > 270 + angleEpsilon) { + result.x = 1.0 + result.y = 1.0 + } + else if (angle > angleEpsilon && angle < 90 - angleEpsilon) { + result.x = 1.0 + result.y = 0.0 + } + else if (!shouldHide) { + shouldHide = true + } + } else if !currentPosition.x.isZero && !currentPosition.y.isZero { + if (angle > angleEpsilon && angle < 90 + angleEpsilon) { + result.x = 1.0 + result.y = 0.0 + } + else if (angle > 180 - angleEpsilon && angle < 270 - angleEpsilon) { + result.x = 0.0 + result.y = 1.0 + } + else if (angle > 90 + angleEpsilon && angle < 180 - angleEpsilon) { + result.x = 0.0 + result.y = 0.0 + } + else if (!shouldHide) { + shouldHide = true + } + } + } + + if result.x.isZero { + if result.y.isZero { + return .topLeft + } else { + return .bottomLeft + } + } else { + if result.y.isZero { + return .topRight + } else { + return .bottomRight + } + } + } + + @objc private func panGesture(_ recognizer: UIPanGestureRecognizer) { + switch recognizer.state { + case .began: + let location = recognizer.location(in: self.view) + //let translation = recognizer.translation(in: self.view) + //location.x += translation.x + //location.y += translation.y + if let _ = self.incomingVideoNode, let outgoingVideoNode = self.outgoingVideoNode, outgoingVideoNode.frame.contains(location) { + self.minimizedVideoInitialPosition = outgoingVideoNode.position + } else { + self.minimizedVideoInitialPosition = nil + } + case .changed: + if let outgoingVideoNode = self.outgoingVideoNode, let minimizedVideoInitialPosition = self.minimizedVideoInitialPosition { + let translation = recognizer.translation(in: self.view) + let minimizedVideoDraggingPosition = CGPoint(x: minimizedVideoInitialPosition.x + translation.x, y: minimizedVideoInitialPosition.y + translation.y) + self.minimizedVideoDraggingPosition = minimizedVideoDraggingPosition + outgoingVideoNode.position = minimizedVideoDraggingPosition + } else { + let offset = recognizer.translation(in: self.view).y + var bounds = self.bounds + bounds.origin.y = -offset + self.bounds = bounds + } + case .cancelled, .ended: + if let outgoingVideoNode = self.outgoingVideoNode, let _ = self.minimizedVideoInitialPosition, let minimizedVideoDraggingPosition = self.minimizedVideoDraggingPosition { + self.minimizedVideoInitialPosition = nil + self.minimizedVideoDraggingPosition = nil + + if let (layout, navigationHeight) = self.validLayout { + self.outgoingVideoNodeCorner = self.nodeLocationForPosition(layout: layout, position: minimizedVideoDraggingPosition, velocity: recognizer.velocity(in: self.view)) + + let videoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationHeight) + outgoingVideoNode.frame = videoFrame + outgoingVideoNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: minimizedVideoDraggingPosition.x - videoFrame.midX, y: minimizedVideoDraggingPosition.y - videoFrame.midY)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: 0.5, delay: 0.0, initialVelocity: 0.0, damping: 110.0, removeOnCompletion: true, additive: true, completion: nil) + } + } else { + let velocity = recognizer.velocity(in: self.view).y + if abs(velocity) < 100.0 { + var bounds = self.bounds + let previous = bounds + bounds.origin = CGPoint() + self.bounds = bounds + self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) + } else { + var bounds = self.bounds + let previous = bounds + bounds.origin = CGPoint(x: 0.0, y: velocity > 0.0 ? -bounds.height: bounds.height) + self.bounds = bounds + self.layer.animateBounds(from: previous, to: bounds, duration: 0.15, timingFunction: CAMediaTimingFunctionName.easeOut.rawValue, completion: { [weak self] _ in + self?.dismissedInteractively?() + }) + } } - case .cancelled: - var bounds = self.bounds - let previous = bounds - bounds.origin = CGPoint() - self.bounds = bounds - self.layer.animateBounds(from: previous, to: bounds, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring) default: break } diff --git a/submodules/TelegramCallsUI/Sources/PresentationCall.swift b/submodules/TelegramCallsUI/Sources/PresentationCall.swift index 6c4da73a80..7ad6771995 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationCall.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationCall.swift @@ -190,7 +190,7 @@ public final class PresentationCallImpl: PresentationCall { private var sessionStateDisposable: Disposable? - private let statePromise = ValuePromise(PresentationCallState(state: .waiting, videoState: .notAvailable, remoteVideoState: .inactive), ignoreRepeated: true) + private let statePromise = ValuePromise() public var state: Signal { return self.statePromise.get() } @@ -233,7 +233,9 @@ public final class PresentationCallImpl: PresentationCall { private var droppedCall = false private var dropCallKitCallTimer: SwiftSignalKit.Timer? - init(account: Account, audioSession: ManagedAudioSession, callSessionManager: CallSessionManager, callKitIntegration: CallKitIntegration?, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), initialState: CallSession?, internalId: CallSessionInternalId, peerId: PeerId, isOutgoing: Bool, peer: Peer?, proxyServer: ProxyServerSettings?, auxiliaryServers: [CallAuxiliaryServer], currentNetworkType: NetworkType, updatedNetworkType: Signal) { + private var videoCapturer: OngoingCallVideoCapturer? + + init(account: Account, audioSession: ManagedAudioSession, callSessionManager: CallSessionManager, callKitIntegration: CallKitIntegration?, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), initialState: CallSession?, internalId: CallSessionInternalId, peerId: PeerId, isOutgoing: Bool, peer: Peer?, proxyServer: ProxyServerSettings?, auxiliaryServers: [CallAuxiliaryServer], currentNetworkType: NetworkType, updatedNetworkType: Signal, startWithVideo: Bool) { self.account = account self.audioSession = audioSession self.callSessionManager = callSessionManager @@ -259,6 +261,13 @@ public final class PresentationCallImpl: PresentationCall { self.isOutgoing = isOutgoing self.isVideo = initialState?.type == .video self.peer = peer + self.isVideo = startWithVideo + if self.isVideo { + self.videoCapturer = OngoingCallVideoCapturer() + self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .activeOutgoing, remoteVideoState: .inactive)) + } else { + self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .notAvailable, remoteVideoState: .inactive)) + } self.serializedData = serializedData self.dataSaving = dataSaving @@ -440,13 +449,17 @@ public final class PresentationCallImpl: PresentationCall { mappedRemoteVideoState = .active } } else { - mappedVideoState = .notAvailable + if self.isVideo { + mappedVideoState = .activeOutgoing + } else { + mappedVideoState = .notAvailable + } mappedRemoteVideoState = .inactive } switch sessionState.state { case .ringing: - presentationState = PresentationCallState(state: .ringing, videoState: .notAvailable, remoteVideoState: .inactive) + presentationState = PresentationCallState(state: .ringing, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState) if previous == nil || previousControl == nil { if !self.reportedIncomingCall { self.reportedIncomingCall = true @@ -509,7 +522,7 @@ public final class PresentationCallImpl: PresentationCall { presentationState = PresentationCallState(state: .reconnecting(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState) } } else { - presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: .notAvailable, remoteVideoState: .inactive) + presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState) } } @@ -523,8 +536,9 @@ public final class PresentationCallImpl: PresentationCall { if let _ = audioSessionControl, !wasActive || previousControl == nil { let logName = "\(id.id)_\(id.accessHash)" - let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, auxiliaryServers: auxiliaryServers, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, isVideo: sessionState.type == .video, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName) + let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, auxiliaryServers: auxiliaryServers, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName) self.ongoingContext = ongoingContext + ongoingContext.setIsMuted(self.isMutedValue) self.debugInfoValue.set(ongoingContext.debugInfo()) @@ -718,8 +732,8 @@ public final class PresentationCallImpl: PresentationCall { self.ongoingContext?.setEnableVideo(value) } - public func switchVideoCamera() { - self.ongoingContext?.switchVideoCamera() + public func setOutgoingVideoIsPaused(_ isPaused: Bool) { + self.videoCapturer?.setIsVideoEnabled(!isPaused) } public func setCurrentAudioOutput(_ output: AudioSessionOutput) { @@ -748,6 +762,10 @@ public final class PresentationCallImpl: PresentationCall { } public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) { - self.ongoingContext?.makeOutgoingVideoView(completion: completion) + self.videoCapturer?.makeOutgoingVideoView(completion: completion) + } + + public func switchVideoCamera() { + self.videoCapturer?.switchCamera() } } diff --git a/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift b/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift index d0a41bf7ff..c85864c2aa 100644 --- a/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift +++ b/submodules/TelegramCallsUI/Sources/PresentationCallManager.swift @@ -278,52 +278,6 @@ public final class PresentationCallManagerImpl: PresentationCallManager { self.callSettingsDisposable?.dispose() } - public func injectRingingStateSynchronously(account: Account, ringingState: CallSessionRingingState, callSession: CallSession) { - if self.currentCall != nil { - return - } - - let semaphore = DispatchSemaphore(value: 0) - var data: (PreferencesView, AccountSharedDataView, Peer?)? - let _ = combineLatest( - account.postbox.preferencesView(keys: [PreferencesKeys.voipConfiguration, ApplicationSpecificPreferencesKeys.voipDerivedState, PreferencesKeys.appConfiguration]) - |> take(1), - accountManager.sharedData(keys: [SharedDataKeys.autodownloadSettings]) - |> take(1), - account.postbox.transaction { transaction -> Peer? in - return transaction.getPeer(ringingState.peerId) - } - ).start(next: { preferences, sharedData, peer in - data = (preferences, sharedData, peer) - semaphore.signal() - }) - semaphore.wait() - - if let (preferences, sharedData, maybePeer) = data, let peer = maybePeer { - let configuration = preferences.values[PreferencesKeys.voipConfiguration] as? VoipConfiguration ?? .defaultValue - let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue - let derivedState = preferences.values[ApplicationSpecificPreferencesKeys.voipDerivedState] as? VoipDerivedState ?? .default - let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings - - let enableCallKit = true - - let call = PresentationCallImpl(account: account, audioSession: self.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(self.callKitIntegration, settings: self.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: self.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: self.getDeviceAccessData, initialState: callSession, internalId: ringingState.id, peerId: ringingState.peerId, isOutgoing: false, peer: peer, proxyServer: self.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: .none, updatedNetworkType: account.networkType) - self.updateCurrentCall(call) - self.currentCallPromise.set(.single(call)) - self.hasActiveCallsPromise.set(true) - self.removeCurrentCallDisposable.set((call.canBeRemoved - |> deliverOnMainQueue).start(next: { [weak self, weak call] value in - if value, let strongSelf = self, let call = call { - if strongSelf.currentCall === call { - strongSelf.updateCurrentCall(nil) - strongSelf.currentCallPromise.set(.single(nil)) - strongSelf.hasActiveCallsPromise.set(false) - } - } - })) - } - } - private func ringingStatesUpdated(_ ringingStates: [(Account, Peer, CallSessionRingingState, Bool, NetworkType)], enableCallKit: Bool) { if let firstState = ringingStates.first { if self.currentCall == nil { @@ -338,7 +292,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager { let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue - let call = PresentationCallImpl(account: firstState.0, audioSession: strongSelf.audioSession, callSessionManager: firstState.0.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: firstState.2.id, peerId: firstState.2.peerId, isOutgoing: false, peer: firstState.1, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: firstState.4, updatedNetworkType: firstState.0.networkType) + let call = PresentationCallImpl(account: firstState.0, audioSession: strongSelf.audioSession, callSessionManager: firstState.0.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: firstState.2.id, peerId: firstState.2.peerId, isOutgoing: false, peer: firstState.1, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: firstState.4, updatedNetworkType: firstState.0.networkType, startWithVideo: firstState.2.isVideo) strongSelf.updateCurrentCall(call) strongSelf.currentCallPromise.set(.single(call)) strongSelf.hasActiveCallsPromise.set(true) @@ -491,7 +445,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager { let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue - let call = PresentationCallImpl(account: account, audioSession: strongSelf.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings), serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: internalId, peerId: peerId, isOutgoing: true, peer: nil, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: currentNetworkType, updatedNetworkType: account.networkType) + let call = PresentationCallImpl(account: account, audioSession: strongSelf.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings), serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: internalId, peerId: peerId, isOutgoing: true, peer: nil, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: currentNetworkType, updatedNetworkType: account.networkType, startWithVideo: isVideo) strongSelf.updateCurrentCall(call) strongSelf.currentCallPromise.set(.single(call)) strongSelf.hasActiveCallsPromise.set(true) diff --git a/submodules/TelegramCore/Sources/CallSessionManager.swift b/submodules/TelegramCore/Sources/CallSessionManager.swift index dae749103b..55c5657aec 100644 --- a/submodules/TelegramCore/Sources/CallSessionManager.swift +++ b/submodules/TelegramCore/Sources/CallSessionManager.swift @@ -107,9 +107,10 @@ typealias CallSessionStableId = Int64 public struct CallSessionRingingState: Equatable { public let id: CallSessionInternalId public let peerId: PeerId + public let isVideo: Bool public static func ==(lhs: CallSessionRingingState, rhs: CallSessionRingingState) -> Bool { - return lhs.id == rhs.id && lhs.peerId == rhs.peerId + return lhs.id == rhs.id && lhs.peerId == rhs.peerId && lhs.isVideo == rhs.isVideo } } @@ -365,7 +366,7 @@ private final class CallSessionManagerContext { var ringingContexts: [CallSessionRingingState] = [] for (id, context) in self.contexts { if case .ringing = context.state { - ringingContexts.append(CallSessionRingingState(id: id, peerId: context.peerId)) + ringingContexts.append(CallSessionRingingState(id: id, peerId: context.peerId, isVideo: context.type == .video)) } } return ringingContexts diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/Contents.json new file mode 100644 index 0000000000..ac8c955846 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "ic_calls_accept.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/ic_calls_accept.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/ic_calls_accept.pdf new file mode 100644 index 0000000000..b8eb92df16 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallAcceptButton.imageset/ic_calls_accept.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/Contents.json new file mode 100644 index 0000000000..1a290513b6 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "ic_calls_video.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/ic_calls_video.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/ic_calls_video.pdf new file mode 100644 index 0000000000..436c916812 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallCameraButton.imageset/ic_calls_video.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/Contents.json new file mode 100644 index 0000000000..ffce8d6fc5 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "ic_calls_decline.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/ic_calls_decline.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/ic_calls_decline.pdf new file mode 100644 index 0000000000..5f765d4e0b Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallDeclineButton.imageset/ic_calls_decline.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@2x.png b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@2x.png deleted file mode 100644 index 07403f47ec..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@2x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@3x.png b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@3x.png deleted file mode 100644 index 62a62518d8..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/CallMuteIcon@3x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/Contents.json index da57014646..1d8c3321c7 100644 --- a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/Contents.json +++ b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/Contents.json @@ -1,22 +1,12 @@ { "images" : [ { - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "filename" : "CallMuteIcon@2x.png", - "scale" : "2x" - }, - { - "idiom" : "universal", - "filename" : "CallMuteIcon@3x.png", - "scale" : "3x" + "filename" : "ic_calls_mute.pdf", + "idiom" : "universal" } ], "info" : { - "version" : 1, - "author" : "xcode" + "author" : "xcode", + "version" : 1 } -} \ No newline at end of file +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/ic_calls_mute.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/ic_calls_mute.pdf new file mode 100644 index 0000000000..22a473dbc8 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallMuteButton.imageset/ic_calls_mute.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@2x.png b/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@2x.png deleted file mode 100644 index f3587ea0f3..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@2x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@3x.png b/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@3x.png deleted file mode 100644 index 8237f34688..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/CallPhoneIcon@3x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/Contents.json deleted file mode 100644 index 8d3bffbcf4..0000000000 --- a/submodules/TelegramUI/Images.xcassets/Call/CallPhoneButton.imageset/Contents.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "images" : [ - { - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "filename" : "CallPhoneIcon@2x.png", - "scale" : "2x" - }, - { - "idiom" : "universal", - "filename" : "CallPhoneIcon@3x.png", - "scale" : "3x" - } - ], - "info" : { - "version" : 1, - "author" : "xcode" - } -} \ No newline at end of file diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@2x.png b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@2x.png deleted file mode 100644 index 9b5e566eb4..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@2x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@3x.png b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@3x.png deleted file mode 100644 index 0026e6063d..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/CallRouteSpeaker@3x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/Contents.json index 6995cd3e65..d0d69abee5 100644 --- a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/Contents.json +++ b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/Contents.json @@ -1,22 +1,12 @@ { "images" : [ { - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "filename" : "CallRouteSpeaker@2x.png", - "scale" : "2x" - }, - { - "idiom" : "universal", - "filename" : "CallRouteSpeaker@3x.png", - "scale" : "3x" + "filename" : "ic_calls_speaker.pdf", + "idiom" : "universal" } ], "info" : { - "version" : 1, - "author" : "xcode" + "author" : "xcode", + "version" : 1 } -} \ No newline at end of file +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/ic_calls_speaker.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/ic_calls_speaker.pdf new file mode 100644 index 0000000000..22af6e39c6 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallRouteSpeaker.imageset/ic_calls_speaker.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@2x.png b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@2x.png deleted file mode 100644 index 996959b567..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@2x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@3x.png b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@3x.png deleted file mode 100644 index 345c9a8f3b..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/CallSpeakerIcon@3x.png and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/Contents.json index 6ae257da6a..d0d69abee5 100644 --- a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/Contents.json +++ b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/Contents.json @@ -1,22 +1,12 @@ { "images" : [ { - "idiom" : "universal", - "scale" : "1x" - }, - { - "idiom" : "universal", - "filename" : "CallSpeakerIcon@2x.png", - "scale" : "2x" - }, - { - "idiom" : "universal", - "filename" : "CallSpeakerIcon@3x.png", - "scale" : "3x" + "filename" : "ic_calls_speaker.pdf", + "idiom" : "universal" } ], "info" : { - "version" : 1, - "author" : "xcode" + "author" : "xcode", + "version" : 1 } -} \ No newline at end of file +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/ic_calls_speaker.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/ic_calls_speaker.pdf new file mode 100644 index 0000000000..22af6e39c6 Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallSpeakerButton.imageset/ic_calls_speaker.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Contents.json index 0dd1dc8086..389dd744c8 100644 --- a/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Contents.json +++ b/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Contents.json @@ -1,7 +1,7 @@ { "images" : [ { - "filename" : "Video.pdf", + "filename" : "ic_calls_cameraflip.pdf", "idiom" : "universal" } ], diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Video.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Video.pdf deleted file mode 100644 index 71f35844b3..0000000000 Binary files a/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/Video.pdf and /dev/null differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/ic_calls_cameraflip.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/ic_calls_cameraflip.pdf new file mode 100644 index 0000000000..4f0db952ed Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallSwitchCameraButton.imageset/ic_calls_cameraflip.pdf differ diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/Contents.json b/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/Contents.json new file mode 100644 index 0000000000..9796eb0aa2 --- /dev/null +++ b/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/Contents.json @@ -0,0 +1,12 @@ +{ + "images" : [ + { + "filename" : "ic_calls_tlogo.pdf", + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/ic_calls_tlogo.pdf b/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/ic_calls_tlogo.pdf new file mode 100644 index 0000000000..6da8161ecb Binary files /dev/null and b/submodules/TelegramUI/Images.xcassets/Call/CallTitleLogo.imageset/ic_calls_tlogo.pdf differ diff --git a/submodules/TelegramVoip/Sources/OngoingCallContext.swift b/submodules/TelegramVoip/Sources/OngoingCallContext.swift index 5f9834ebb7..c37830a41b 100644 --- a/submodules/TelegramVoip/Sources/OngoingCallContext.swift +++ b/submodules/TelegramVoip/Sources/OngoingCallContext.swift @@ -245,7 +245,6 @@ private protocol OngoingCallThreadLocalContextProtocol: class { func nativeSetNetworkType(_ type: NetworkType) func nativeSetIsMuted(_ value: Bool) func nativeSetVideoEnabled(_ value: Bool) - func nativeSwitchVideoCamera() func nativeStop(_ completion: @escaping (String?, Int64, Int64, Int64, Int64) -> Void) func nativeDebugInfo() -> String func nativeVersion() -> String @@ -292,6 +291,26 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol { } } +public final class OngoingCallVideoCapturer { + fileprivate let impl: OngoingCallThreadLocalContextVideoCapturer + + public init() { + self.impl = OngoingCallThreadLocalContextVideoCapturer() + } + + public func switchCamera() { + self.impl.switchVideoCamera() + } + + public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) { + self.impl.makeOutgoingVideoView(completion) + } + + public func setIsVideoEnabled(_ value: Bool) { + self.impl.setIsVideoEnabled(value) + } +} + extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProtocol { func nativeSetNetworkType(_ type: NetworkType) { self.setNetworkType(ongoingNetworkTypeForTypeWebrtc(type)) @@ -309,10 +328,6 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt self.setVideoEnabled(value) } - func nativeSwitchVideoCamera() { - self.switchVideoCamera() - } - func nativeDebugInfo() -> String { return self.debugInfo() ?? "" } @@ -463,7 +478,7 @@ public final class OngoingCallContext { return result } - public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, auxiliaryServers: [AuxiliaryServer], initialNetworkType: NetworkType, updatedNetworkType: Signal, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, isVideo: Bool, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal, logName: String) { + public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, auxiliaryServers: [AuxiliaryServer], initialNetworkType: NetworkType, updatedNetworkType: Signal, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal, logName: String) { let _ = setupLogs OngoingCallThreadLocalContext.applyServerConfig(serializedData) //OngoingCallThreadLocalContextWebrtc.applyServerConfig(serializedData) @@ -542,9 +557,9 @@ public final class OngoingCallContext { )) } } - let context = OngoingCallThreadLocalContextWebrtc(queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, rtcServers: rtcServers, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, isVideo: isVideo, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in + let context = OngoingCallThreadLocalContextWebrtc(queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, rtcServers: rtcServers, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in callSessionManager?.sendSignalingData(internalId: internalId, data: data) - }) + }, videoCapturer: video?.impl) strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context)) context.stateChanged = { state, videoState, remoteVideoState in @@ -696,12 +711,6 @@ public final class OngoingCallContext { } } - public func switchVideoCamera() { - self.withContext { context in - context.nativeSwitchVideoCamera() - } - } - public func debugInfo() -> Signal<(String, String), NoError> { let poll = Signal<(String, String), NoError> { subscriber in self.withContext { context in @@ -725,14 +734,4 @@ public final class OngoingCallContext { } } } - - public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) { - self.withContext { context in - if let context = context as? OngoingCallThreadLocalContextWebrtc { - context.makeOutgoingVideoView(completion) - } else { - completion(nil) - } - } - } } diff --git a/submodules/TgVoipWebrtc/Impl/CodecsApple.h b/submodules/TgVoipWebrtc/Impl/CodecsApple.h index 442df44018..a09d444731 100644 --- a/submodules/TgVoipWebrtc/Impl/CodecsApple.h +++ b/submodules/TgVoipWebrtc/Impl/CodecsApple.h @@ -13,6 +13,8 @@ namespace TGVOIP_NAMESPACE { class VideoCapturerInterface { public: virtual ~VideoCapturerInterface(); + + virtual void setIsEnabled(bool isEnabled) = 0; }; void configurePlatformAudio(); diff --git a/submodules/TgVoipWebrtc/Impl/CodecsApple.mm b/submodules/TgVoipWebrtc/Impl/CodecsApple.mm index 71e4d06a24..e3fad7c4e8 100644 --- a/submodules/TgVoipWebrtc/Impl/CodecsApple.mm +++ b/submodules/TgVoipWebrtc/Impl/CodecsApple.mm @@ -112,6 +112,10 @@ [_videoCapturer stopCapture]; } +- (void)setIsEnabled:(bool)isEnabled { + [_videoCapturer setIsEnabled:isEnabled]; +} + @end @interface VideoCapturerInterfaceImplHolder : NSObject @@ -153,6 +157,16 @@ public: }); } + virtual void setIsEnabled(bool isEnabled) { + VideoCapturerInterfaceImplHolder *implReference = _implReference; + dispatch_async(dispatch_get_main_queue(), ^{ + if (implReference.reference != nil) { + VideoCapturerInterfaceImplReference *reference = (__bridge VideoCapturerInterfaceImplReference *)implReference.reference; + [reference setIsEnabled:isEnabled]; + } + }); + } + private: rtc::scoped_refptr _source; VideoCapturerInterfaceImplHolder *_implReference; diff --git a/submodules/TgVoipWebrtc/Impl/Manager.cpp b/submodules/TgVoipWebrtc/Impl/Manager.cpp index 6ffced3943..1e7cbe0a5f 100644 --- a/submodules/TgVoipWebrtc/Impl/Manager.cpp +++ b/submodules/TgVoipWebrtc/Impl/Manager.cpp @@ -26,8 +26,7 @@ static rtc::Thread *makeMediaThread() { return value.get(); } - -static rtc::Thread *getMediaThread() { +rtc::Thread *Manager::getMediaThread() { static rtc::Thread *value = makeMediaThread(); return value; } @@ -37,7 +36,7 @@ Manager::Manager( TgVoipEncryptionKey encryptionKey, bool enableP2P, std::vector const &rtcServers, - bool isVideo, + std::shared_ptr videoCapture, std::function stateUpdated, std::function videoStateUpdated, std::function remoteVideoIsActiveUpdated, @@ -47,7 +46,7 @@ _thread(thread), _encryptionKey(encryptionKey), _enableP2P(enableP2P), _rtcServers(rtcServers), -_startWithVideo(isVideo), +_videoCapture(videoCapture), _stateUpdated(stateUpdated), _videoStateUpdated(videoStateUpdated), _remoteVideoIsActiveUpdated(remoteVideoIsActiveUpdated), @@ -111,11 +110,11 @@ void Manager::start() { ); })); bool isOutgoing = _encryptionKey.isOutgoing; - _mediaManager.reset(new ThreadLocalObject(getMediaThread(), [isOutgoing, thread = _thread, startWithVideo = _startWithVideo, weakThis]() { + _mediaManager.reset(new ThreadLocalObject(getMediaThread(), [isOutgoing, thread = _thread, videoCapture = _videoCapture, weakThis]() { return new MediaManager( getMediaThread(), isOutgoing, - startWithVideo, + videoCapture, [thread, weakThis](const rtc::CopyOnWriteBuffer &packet) { thread->PostTask(RTC_FROM_HERE, [weakThis, packet]() { auto strongThis = weakThis.lock(); @@ -203,12 +202,6 @@ void Manager::setMuteOutgoingAudio(bool mute) { }); } -void Manager::switchVideoCamera() { - _mediaManager->perform([](MediaManager *mediaManager) { - mediaManager->switchVideoCamera(); - }); -} - void Manager::notifyIsLocalVideoActive(bool isActive) { rtc::CopyOnWriteBuffer buffer; uint8_t mode = 4; @@ -228,12 +221,6 @@ void Manager::setIncomingVideoOutput(std::shared_ptr> sink) { - _mediaManager->perform([sink](MediaManager *mediaManager) { - mediaManager->setOutgoingVideoOutput(sink); - }); -} - #ifdef TGVOIP_NAMESPACE } #endif diff --git a/submodules/TgVoipWebrtc/Impl/Manager.h b/submodules/TgVoipWebrtc/Impl/Manager.h index 79e8583937..ff113c4175 100644 --- a/submodules/TgVoipWebrtc/Impl/Manager.h +++ b/submodules/TgVoipWebrtc/Impl/Manager.h @@ -12,12 +12,14 @@ namespace TGVOIP_NAMESPACE { class Manager : public std::enable_shared_from_this { public: + static rtc::Thread *getMediaThread(); + Manager( rtc::Thread *thread, TgVoipEncryptionKey encryptionKey, bool enableP2P, std::vector const &rtcServers, - bool isVideo, + std::shared_ptr videoCapture, std::function stateUpdated, std::function videoStateUpdated, std::function remoteVideoIsActiveUpdated, @@ -29,17 +31,15 @@ public: void receiveSignalingData(const std::vector &data); void setSendVideo(bool sendVideo); void setMuteOutgoingAudio(bool mute); - void switchVideoCamera(); void notifyIsLocalVideoActive(bool isActive); void setIncomingVideoOutput(std::shared_ptr> sink); - void setOutgoingVideoOutput(std::shared_ptr> sink); private: rtc::Thread *_thread; TgVoipEncryptionKey _encryptionKey; bool _enableP2P; std::vector _rtcServers; - bool _startWithVideo; + std::shared_ptr _videoCapture; std::function _stateUpdated; std::function _videoStateUpdated; std::function _remoteVideoIsActiveUpdated; diff --git a/submodules/TgVoipWebrtc/Impl/MediaManager.cpp b/submodules/TgVoipWebrtc/Impl/MediaManager.cpp index b1e22eb78f..5c91342c7f 100644 --- a/submodules/TgVoipWebrtc/Impl/MediaManager.cpp +++ b/submodules/TgVoipWebrtc/Impl/MediaManager.cpp @@ -19,6 +19,9 @@ #include "api/video_codecs/builtin_video_encoder_factory.h" +#include "TgVoip.h" +#include "VideoCaptureInterfaceImpl.h" + #if TARGET_OS_IPHONE #include "CodecsApple.h" @@ -164,7 +167,7 @@ static rtc::Thread *makeWorkerThread() { } -static rtc::Thread *getWorkerThread() { +rtc::Thread *MediaManager::getWorkerThread() { static rtc::Thread *value = makeWorkerThread(); return value; } @@ -172,7 +175,7 @@ static rtc::Thread *getWorkerThread() { MediaManager::MediaManager( rtc::Thread *thread, bool isOutgoing, - bool startWithVideo, + std::shared_ptr videoCapture, std::function packetEmitted, std::function localVideoCaptureActiveUpdated ) : @@ -180,7 +183,8 @@ _packetEmitted(packetEmitted), _localVideoCaptureActiveUpdated(localVideoCaptureActiveUpdated), _thread(thread), _eventLog(std::make_unique()), -_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) { +_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()), +_videoCapture(videoCapture) { _ssrcAudio.incoming = isOutgoing ? ssrcAudioIncoming : ssrcAudioOutgoing; _ssrcAudio.outgoing = (!isOutgoing) ? ssrcAudioIncoming : ssrcAudioOutgoing; _ssrcAudio.fecIncoming = isOutgoing ? ssrcAudioFecIncoming : ssrcAudioFecOutgoing; @@ -199,7 +203,6 @@ _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) { _videoCodecs = AssignPayloadTypesAndDefaultCodecs(videoEncoderFactory->GetSupportedFormats()); _isSendingVideo = false; - _useFrontCamera = true; _audioNetworkInterface = std::unique_ptr(new MediaManager::NetworkInterfaceImpl(this, false)); _videoNetworkInterface = std::unique_ptr(new MediaManager::NetworkInterfaceImpl(this, true)); @@ -283,9 +286,9 @@ _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) { _videoChannel->SetInterface(_videoNetworkInterface.get(), webrtc::MediaTransportConfig()); - _nativeVideoSource = makeVideoSource(_thread, getWorkerThread()); - - if (startWithVideo) { + if (_videoCapture != nullptr) { + ((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->setIsActiveUpdated(this->_localVideoCaptureActiveUpdated); + setSendVideo(true); } } @@ -372,10 +375,6 @@ void MediaManager::setSendVideo(bool sendVideo) { codec.SetParam(cricket::kCodecParamStartBitrate, 512); codec.SetParam(cricket::kCodecParamMaxBitrate, 2500); - _videoCapturer = makeVideoCapturer(_nativeVideoSource, _useFrontCamera, [localVideoCaptureActiveUpdated = _localVideoCaptureActiveUpdated](bool isActive) { - localVideoCaptureActiveUpdated(isActive); - }); - cricket::VideoSendParameters videoSendParameters; videoSendParameters.codecs.push_back(codec); @@ -402,11 +401,15 @@ void MediaManager::setSendVideo(bool sendVideo) { videoSendStreamParams.cname = "cname"; _videoChannel->AddSendStream(videoSendStreamParams); - _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, _nativeVideoSource.get()); + if (_videoCapture != nullptr) { + _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, ((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->_videoSource.get()); + } _videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr); } else { _videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing)); - _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, _nativeVideoSource.get()); + if (_videoCapture != nullptr) { + _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, ((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->_videoSource); + } } cricket::VideoRecvParameters videoRecvParameters; @@ -449,8 +452,6 @@ void MediaManager::setSendVideo(bool sendVideo) { _videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, nullptr); _videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr); - _videoCapturer.reset(); - _videoChannel->RemoveRecvStream(_ssrcVideo.incoming); _videoChannel->RemoveRecvStream(_ssrcVideo.fecIncoming); _videoChannel->RemoveSendStream(_ssrcVideo.outgoing); @@ -466,25 +467,11 @@ void MediaManager::setMuteOutgoingAudio(bool mute) { _audioChannel->SetAudioSend(_ssrcAudio.outgoing, _isConnected && !_muteOutgoingAudio, nullptr, &_audioSource); } -void MediaManager::switchVideoCamera() { - if (_isSendingVideo) { - _useFrontCamera = !_useFrontCamera; - _videoCapturer = makeVideoCapturer(_nativeVideoSource, _useFrontCamera, [localVideoCaptureActiveUpdated = _localVideoCaptureActiveUpdated](bool isActive) { - localVideoCaptureActiveUpdated(isActive); - }); - } -} - void MediaManager::setIncomingVideoOutput(std::shared_ptr> sink) { _currentIncomingVideoSink = sink; _videoChannel->SetSink(_ssrcVideo.incoming, _currentIncomingVideoSink.get()); } -void MediaManager::setOutgoingVideoOutput(std::shared_ptr> sink) { - _currentOutgoingVideoSink = sink; - _nativeVideoSource->AddOrUpdateSink(_currentOutgoingVideoSink.get(), rtc::VideoSinkWants()); -} - MediaManager::NetworkInterfaceImpl::NetworkInterfaceImpl(MediaManager *mediaManager, bool isVideo) : _mediaManager(mediaManager), _isVideo(isVideo) { diff --git a/submodules/TgVoipWebrtc/Impl/MediaManager.h b/submodules/TgVoipWebrtc/Impl/MediaManager.h index 5e49c94ff4..a283d02283 100644 --- a/submodules/TgVoipWebrtc/Impl/MediaManager.h +++ b/submodules/TgVoipWebrtc/Impl/MediaManager.h @@ -7,6 +7,8 @@ #include "api/transport/field_trial_based_config.h" #include "pc/rtp_sender.h" +#include "TgVoip.h" + #include #include @@ -54,10 +56,12 @@ private: friend class MediaManager::NetworkInterfaceImpl; public: + static rtc::Thread *getWorkerThread(); + MediaManager( rtc::Thread *thread, bool isOutgoing, - bool startWithVideo, + std::shared_ptr videoCapture, std::function packetEmitted, std::function localVideoCaptureActiveUpdated ); @@ -68,9 +72,7 @@ public: void notifyPacketSent(const rtc::SentPacket &sentPacket); void setSendVideo(bool sendVideo); void setMuteOutgoingAudio(bool mute); - void switchVideoCamera(); void setIncomingVideoOutput(std::shared_ptr> sink); - void setOutgoingVideoOutput(std::shared_ptr> sink); protected: std::function _packetEmitted; @@ -90,7 +92,6 @@ private: std::vector _videoCodecs; bool _isSendingVideo; - bool _useFrontCamera; std::unique_ptr _mediaEngine; std::unique_ptr _call; @@ -99,10 +100,8 @@ private: std::unique_ptr _audioChannel; std::unique_ptr _videoChannel; std::unique_ptr _videoBitrateAllocatorFactory; - rtc::scoped_refptr _nativeVideoSource; - std::unique_ptr _videoCapturer; + std::shared_ptr _videoCapture; std::shared_ptr> _currentIncomingVideoSink; - std::shared_ptr> _currentOutgoingVideoSink; std::unique_ptr _audioNetworkInterface; std::unique_ptr _videoNetworkInterface; diff --git a/submodules/TgVoipWebrtc/Impl/TgVoip.h b/submodules/TgVoipWebrtc/Impl/TgVoip.h index c3be58baf5..ccd7a485d2 100644 --- a/submodules/TgVoipWebrtc/Impl/TgVoip.h +++ b/submodules/TgVoipWebrtc/Impl/TgVoip.h @@ -129,6 +129,19 @@ struct TgVoipAudioDataCallbacks { std::function preprocessed; }; +class TgVoipVideoCaptureInterface { +protected: + TgVoipVideoCaptureInterface() = default; +public: + static std::shared_ptr makeInstance(); + + virtual ~TgVoipVideoCaptureInterface(); + + virtual void switchCamera() = 0; + virtual void setIsVideoEnabled(bool isVideoEnabled) = 0; + virtual void setVideoOutput(std::shared_ptr> sink) = 0; +}; + class TgVoip { protected: TgVoip() = default; @@ -146,7 +159,7 @@ public: std::vector const &rtcServers, TgVoipNetworkType initialNetworkType, TgVoipEncryptionKey const &encryptionKey, - bool isVideo, + std::shared_ptr videoCapture, std::function stateUpdated, std::function videoStateUpdated, std::function remoteVideoIsActiveUpdated, @@ -161,7 +174,6 @@ public: virtual void setEchoCancellationStrength(int strength) = 0; virtual void setIncomingVideoOutput(std::shared_ptr> sink) = 0; - virtual void setOutgoingVideoOutput(std::shared_ptr> sink) = 0; virtual std::string getLastError() = 0; virtual std::string getDebugInfo() = 0; @@ -171,7 +183,6 @@ public: virtual void receiveSignalingData(const std::vector &data) = 0; virtual void setSendVideo(bool sendVideo) = 0; - virtual void switchVideoCamera() = 0; virtual TgVoipFinalState stop() = 0; }; diff --git a/submodules/TgVoipWebrtc/Impl/TgVoip.mm b/submodules/TgVoipWebrtc/Impl/TgVoip.mm index 905f997955..5cfb5d75a4 100644 --- a/submodules/TgVoipWebrtc/Impl/TgVoip.mm +++ b/submodules/TgVoipWebrtc/Impl/TgVoip.mm @@ -5,10 +5,21 @@ #include "rtc_base/logging.h" #include "Manager.h" +#include "MediaManager.h" #include #include +#include "VideoCaptureInterfaceImpl.h" + +#if TARGET_OS_IPHONE + +#include "CodecsApple.h" + +#else +#error "Unsupported platform" +#endif + #import #include @@ -142,7 +153,7 @@ public: std::vector const &rtcServers, TgVoipConfig const &config, TgVoipEncryptionKey const &encryptionKey, - bool isVideo, + std::shared_ptr videoCapture, TgVoipNetworkType initialNetworkType, std::function stateUpdated, std::function videoStateUpdated, @@ -160,13 +171,13 @@ public: bool enableP2P = config.enableP2P; - _manager.reset(new ThreadLocalObject(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, isVideo, stateUpdated, videoStateUpdated, remoteVideoIsActiveUpdated, signalingDataEmitted, rtcServers](){ + _manager.reset(new ThreadLocalObject(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, stateUpdated, videoStateUpdated, remoteVideoIsActiveUpdated, signalingDataEmitted, rtcServers, videoCapture](){ return new Manager( getManagerThread(), encryptionKey, enableP2P, rtcServers, - isVideo, + videoCapture, [stateUpdated](const TgVoipState &state) { stateUpdated(state); }, @@ -201,12 +212,6 @@ public: manager->setSendVideo(sendVideo); }); }; - - void switchVideoCamera() override { - _manager->perform([](Manager *manager) { - manager->switchVideoCamera(); - }); - } void setNetworkType(TgVoipNetworkType networkType) override { /*message::NetworkType mappedType; @@ -267,12 +272,6 @@ public: manager->setIncomingVideoOutput(sink); }); } - - void setOutgoingVideoOutput(std::shared_ptr> sink) override { - _manager->perform([sink](Manager *manager) { - manager->setOutgoingVideoOutput(sink); - }); - } void setAudioOutputGainControlEnabled(bool enabled) override { } @@ -387,7 +386,7 @@ TgVoip *TgVoip::makeInstance( std::vector const &rtcServers, TgVoipNetworkType initialNetworkType, TgVoipEncryptionKey const &encryptionKey, - bool isVideo, + std::shared_ptr videoCapture, std::function stateUpdated, std::function videoStateUpdated, std::function remoteVideoIsActiveUpdated, @@ -400,7 +399,7 @@ TgVoip *TgVoip::makeInstance( rtcServers, config, encryptionKey, - isVideo, + videoCapture, initialNetworkType, stateUpdated, videoStateUpdated, @@ -411,6 +410,12 @@ TgVoip *TgVoip::makeInstance( TgVoip::~TgVoip() = default; +std::shared_ptrTgVoipVideoCaptureInterface::makeInstance() { + return std::shared_ptr(new TgVoipVideoCaptureInterfaceImpl()); +} + +TgVoipVideoCaptureInterface::~TgVoipVideoCaptureInterface() = default; + #ifdef TGVOIP_NAMESPACE } #endif diff --git a/submodules/TgVoipWebrtc/Impl/ThreadLocalObject.h b/submodules/TgVoipWebrtc/Impl/ThreadLocalObject.h index fbaee62e2d..ce5007a21c 100644 --- a/submodules/TgVoipWebrtc/Impl/ThreadLocalObject.h +++ b/submodules/TgVoipWebrtc/Impl/ThreadLocalObject.h @@ -43,6 +43,12 @@ public: }); } + T *getSyncAssumingSameThread() { + assert(_thread->IsCurrent()); + assert(_valueHolder->_value != nullptr); + return _valueHolder->_value.get(); + } + private: rtc::Thread *_thread; std::shared_ptr> _valueHolder; diff --git a/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.h b/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.h index 5c8c24e9c2..f9f3e63f09 100644 --- a/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.h +++ b/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.h @@ -17,6 +17,7 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps; - (void)stopCapture; +- (void)setIsEnabled:(bool)isEnabled; @end diff --git a/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.mm b/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.mm index 723145155f..8149f87c45 100644 --- a/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.mm +++ b/submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.mm @@ -39,6 +39,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr UIDeviceOrientation _orientation; void (^_isActiveUpdated)(bool); + bool _isActiveValue; + bool _inForegroundValue; + bool _isPaused; } @end @@ -49,6 +52,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr self = [super init]; if (self != nil) { _source = source; + _isActiveValue = true; + _inForegroundValue = true; + _isPaused = false; _isActiveUpdated = [isActiveUpdated copy]; if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) { @@ -124,6 +130,11 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr [self stopCaptureWithCompletionHandler:nil]; } +- (void)setIsEnabled:(bool)isEnabled { + _isPaused = !isEnabled; + [self updateIsActiveValue]; +} + - (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps @@ -253,7 +264,9 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:_rotation timeStampNs:timeStampNs]; - getObjCVideoSource(_source)->OnCapturedFrame(videoFrame); + if (!_isPaused) { + getObjCVideoSource(_source)->OnCapturedFrame(videoFrame); + } } - (void)captureOutput:(AVCaptureOutput *)captureOutput @@ -316,15 +329,23 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr _hasRetriedOnFatalError = NO; }]; - if (_isActiveUpdated) { - _isActiveUpdated(true); - } + _inForegroundValue = true; + [self updateIsActiveValue]; } - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { RTCLog(@"Capture session stopped."); - if (_isActiveUpdated) { - _isActiveUpdated(false); + _inForegroundValue = false; + [self updateIsActiveValue]; +} + +- (void)updateIsActiveValue { + bool isActive = _inForegroundValue && !_isPaused; + if (isActive != _isActiveValue) { + _isActiveValue = isActive; + if (_isActiveUpdated) { + _isActiveUpdated(_isActiveValue); + } } } diff --git a/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.h b/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.h new file mode 100644 index 0000000000..324c61b181 --- /dev/null +++ b/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.h @@ -0,0 +1,53 @@ +#ifndef VIDEO_CAPTURE_INTERFACE_IMPL_H +#define VIDEO_CAPTURE_INTERFACE_IMPL_H + +#include "TgVoip.h" +#include +#include "ThreadLocalObject.h" +#include "api/media_stream_interface.h" + +#ifdef TGVOIP_NAMESPACE +namespace TGVOIP_NAMESPACE { +#endif + +class VideoCapturerInterface; + +class TgVoipVideoCaptureInterfaceObject { +public: + TgVoipVideoCaptureInterfaceObject(); + ~TgVoipVideoCaptureInterfaceObject(); + + void switchCamera(); + void setIsVideoEnabled(bool isVideoEnabled); + void setVideoOutput(std::shared_ptr> sink); + void setIsActiveUpdated(std::function isActiveUpdated); + +public: + rtc::scoped_refptr _videoSource; + std::unique_ptr _videoCapturer; + +private: + std::shared_ptr> _currentSink; + std::function _isActiveUpdated; + bool _useFrontCamera; + bool _isVideoEnabled; +}; + +class TgVoipVideoCaptureInterfaceImpl : public TgVoipVideoCaptureInterface { +public: + TgVoipVideoCaptureInterfaceImpl(); + virtual ~TgVoipVideoCaptureInterfaceImpl(); + + virtual void switchCamera(); + virtual void setIsVideoEnabled(bool isVideoEnabled); + virtual void setVideoOutput(std::shared_ptr> sink); + +public: + std::unique_ptr> _impl; +}; + +#ifdef TGVOIP_NAMESPACE +} +#endif + +#endif diff --git a/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.mm b/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.mm new file mode 100644 index 0000000000..e66e8c4a7a --- /dev/null +++ b/submodules/TgVoipWebrtc/Impl/VideoCaptureInterfaceImpl.mm @@ -0,0 +1,90 @@ +#include "VideoCaptureInterfaceImpl.h" + +#include "CodecsApple.h" +#include "Manager.h" +#include "MediaManager.h" + +#ifdef TGVOIP_NAMESPACE +namespace TGVOIP_NAMESPACE { +#endif + +TgVoipVideoCaptureInterfaceObject::TgVoipVideoCaptureInterfaceObject() { + _useFrontCamera = true; + _isVideoEnabled = true; + _videoSource = makeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread()); + //this should outlive the capturer + _videoCapturer = makeVideoCapturer(_videoSource, _useFrontCamera, [this](bool isActive) { + if (this->_isActiveUpdated) { + this->_isActiveUpdated(isActive); + } + }); +} + +TgVoipVideoCaptureInterfaceObject::~TgVoipVideoCaptureInterfaceObject() { + if (_currentSink != nullptr) { + _videoSource->RemoveSink(_currentSink.get()); + } +} + +void TgVoipVideoCaptureInterfaceObject::switchCamera() { + _useFrontCamera = !_useFrontCamera; + _videoCapturer = makeVideoCapturer(_videoSource, _useFrontCamera, [this](bool isActive) { + if (this->_isActiveUpdated) { + this->_isActiveUpdated(isActive); + } + }); +} + +void TgVoipVideoCaptureInterfaceObject::setIsVideoEnabled(bool isVideoEnabled) { + if (_isVideoEnabled != isVideoEnabled) { + _isVideoEnabled = isVideoEnabled; + _videoCapturer->setIsEnabled(isVideoEnabled); + } +} + +void TgVoipVideoCaptureInterfaceObject::setVideoOutput(std::shared_ptr> sink) { + if (_currentSink != nullptr) { + _videoSource->RemoveSink(_currentSink.get()); + } + _currentSink = sink; + if (_currentSink != nullptr) { + _videoSource->AddOrUpdateSink(_currentSink.get(), rtc::VideoSinkWants()); + } +} + +void TgVoipVideoCaptureInterfaceObject::setIsActiveUpdated(std::function isActiveUpdated) { + _isActiveUpdated = isActiveUpdated; +} + +TgVoipVideoCaptureInterfaceImpl::TgVoipVideoCaptureInterfaceImpl() { + _impl.reset(new ThreadLocalObject( + Manager::getMediaThread(), + []() { + return new TgVoipVideoCaptureInterfaceObject(); + } + )); +} + +TgVoipVideoCaptureInterfaceImpl::~TgVoipVideoCaptureInterfaceImpl() { + +} + +void TgVoipVideoCaptureInterfaceImpl::switchCamera() { + _impl->perform([](TgVoipVideoCaptureInterfaceObject *impl) { + impl->switchCamera(); + }); +} + +void TgVoipVideoCaptureInterfaceImpl::setIsVideoEnabled(bool isVideoEnabled) { + _impl->perform([isVideoEnabled](TgVoipVideoCaptureInterfaceObject *impl) { + impl->setIsVideoEnabled(isVideoEnabled); + }); +} + +void TgVoipVideoCaptureInterfaceImpl::setVideoOutput(std::shared_ptr> sink) { + _impl->perform([sink](TgVoipVideoCaptureInterfaceObject *impl) { + impl->setVideoOutput(sink); + }); +} + +} diff --git a/submodules/TgVoipWebrtc/PublicHeaders/TgVoip/OngoingCallThreadLocalContext.h b/submodules/TgVoipWebrtc/PublicHeaders/TgVoip/OngoingCallThreadLocalContext.h index 1582e4351a..b15f82773f 100644 --- a/submodules/TgVoipWebrtc/PublicHeaders/TgVoip/OngoingCallThreadLocalContext.h +++ b/submodules/TgVoipWebrtc/PublicHeaders/TgVoip/OngoingCallThreadLocalContext.h @@ -78,6 +78,17 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) { @end +@interface OngoingCallThreadLocalContextVideoCapturer : NSObject + +- (instancetype _Nonnull)init; + +- (void)switchVideoCamera; +- (void)setIsVideoEnabled:(bool)isVideoEnabled; + +- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion; + +@end + @interface OngoingCallThreadLocalContextWebrtc : NSObject + (void)setupLoggingFunction:(void (* _Nullable)(NSString * _Nullable))loggingFunction; @@ -88,7 +99,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) { @property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc); @property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t); -- (instancetype _Nonnull)initWithQueue:(id _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing isVideo:(bool)isVideo primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData; +- (instancetype _Nonnull)initWithQueue:(id _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer; - (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion; - (bool)needRate; @@ -99,10 +110,8 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) { - (void)setIsMuted:(bool)isMuted; - (void)setVideoEnabled:(bool)videoEnabled; -- (void)switchVideoCamera; - (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType; - (void)makeIncomingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion; -- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion; - (void)addSignalingData:(NSData * _Nonnull)data; @end diff --git a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm index a10e9950be..833505792b 100644 --- a/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm +++ b/submodules/TgVoipWebrtc/Sources/OngoingCallThreadLocalContext.mm @@ -21,6 +21,49 @@ using namespace TGVOIP_NAMESPACE; @end +@interface OngoingCallThreadLocalContextVideoCapturer () { + std::shared_ptr _interface; +} + +@end + +@implementation OngoingCallThreadLocalContextVideoCapturer + +- (instancetype _Nonnull)init { + self = [super init]; + if (self != nil) { + _interface = TgVoipVideoCaptureInterface::makeInstance(); + } + return self; +} + +- (void)switchVideoCamera { + _interface->switchCamera(); +} + +- (void)setIsVideoEnabled:(bool)isVideoEnabled { + _interface->setIsVideoEnabled(isVideoEnabled); +} + +- (std::shared_ptr)getInterface { + return _interface; +} + +- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion { + std::shared_ptr interface = _interface; + dispatch_async(dispatch_get_main_queue(), ^{ + VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero]; + remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill; + + std::shared_ptr> sink = [remoteRenderer getSink]; + interface->setVideoOutput(sink); + + completion(remoteRenderer); + }); +} + +@end + @interface OngoingCallThreadLocalContextWebrtc () { id _queue; int32_t _contextId; @@ -36,6 +79,7 @@ using namespace TGVOIP_NAMESPACE; OngoingCallStateWebrtc _state; OngoingCallVideoStateWebrtc _videoState; OngoingCallRemoteVideoStateWebrtc _remoteVideoState; + OngoingCallThreadLocalContextVideoCapturer *_videoCapturer; int32_t _signalBars; NSData *_lastDerivedState; @@ -134,7 +178,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; return @"2.7.7"; } -- (instancetype _Nonnull)initWithQueue:(id _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing isVideo:(bool)isVideo primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData; { +- (instancetype _Nonnull)initWithQueue:(id _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer { self = [super init]; if (self != nil) { _queue = queue; @@ -146,7 +190,8 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; _callPacketTimeout = 10.0; _networkType = networkType; _sendSignalingData = [sendSignalingData copy]; - if (isVideo) { + _videoCapturer = videoCapturer; + if (videoCapturer != nil) { _videoState = OngoingCallVideoStateActiveOutgoing; _remoteVideoState = OngoingCallRemoteVideoStateActive; } else { @@ -236,7 +281,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; parsedRtcServers, callControllerNetworkTypeForType(networkType), encryptionKey, - isVideo, + [_videoCapturer getInterface], [weakSelf, queue](TgVoipState state) { [queue dispatch:^{ __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf; @@ -424,12 +469,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; } } -- (void)switchVideoCamera { - if (_tgVoip) { - _tgVoip->switchVideoCamera(); - } -} - - (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType { if (_networkType != networkType) { _networkType = networkType; @@ -457,23 +496,5 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL; } } -- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion { - if (_tgVoip) { - __weak OngoingCallThreadLocalContextWebrtc *weakSelf = self; - dispatch_async(dispatch_get_main_queue(), ^{ - VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero]; - remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill; - - std::shared_ptr> sink = [remoteRenderer getSink]; - __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf; - if (strongSelf) { - strongSelf->_tgVoip->setOutgoingVideoOutput(sink); - } - - completion(remoteRenderer); - }); - } -} - @end