mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-11-07 09:20:08 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
commit
75a707c439
@ -69,15 +69,29 @@ public struct PresentationCallState: Equatable {
|
||||
}
|
||||
|
||||
public final class PresentationCallVideoView {
|
||||
public enum Orientation {
|
||||
case rotation0
|
||||
case rotation90
|
||||
case rotation180
|
||||
case rotation270
|
||||
}
|
||||
|
||||
public let view: UIView
|
||||
public let setOnFirstFrameReceived: ((() -> Void)?) -> Void
|
||||
|
||||
public let getOrientation: () -> Orientation
|
||||
public let setOnOrientationUpdated: (((Orientation) -> Void)?) -> Void
|
||||
|
||||
public init(
|
||||
view: UIView,
|
||||
setOnFirstFrameReceived: @escaping ((() -> Void)?) -> Void
|
||||
setOnFirstFrameReceived: @escaping ((() -> Void)?) -> Void,
|
||||
getOrientation: @escaping () -> Orientation,
|
||||
setOnOrientationUpdated: @escaping (((Orientation) -> Void)?) -> Void
|
||||
) {
|
||||
self.view = view
|
||||
self.setOnFirstFrameReceived = setOnFirstFrameReceived
|
||||
self.getOrientation = getOrientation
|
||||
self.setOnOrientationUpdated = setOnOrientationUpdated
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -220,8 +220,8 @@ public extension CALayer {
|
||||
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "opacity", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, completion: completion)
|
||||
}
|
||||
|
||||
func animateScale(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
|
||||
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "transform.scale", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, completion: completion)
|
||||
func animateScale(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, additive: Bool = false, completion: ((Bool) -> Void)? = nil) {
|
||||
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "transform.scale", timingFunction: timingFunction, duration: duration, delay: delay, mediaTimingFunction: mediaTimingFunction, removeOnCompletion: removeOnCompletion, additive: additive, completion: completion)
|
||||
}
|
||||
|
||||
func animateScaleY(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, mediaTimingFunction: CAMediaTimingFunction? = nil, removeOnCompletion: Bool = true, completion: ((Bool) -> Void)? = nil) {
|
||||
|
||||
@ -576,7 +576,7 @@ public extension ContainedViewLayoutTransition {
|
||||
}
|
||||
}
|
||||
|
||||
func animateTransformScale(node: ASDisplayNode, from fromScale: CGFloat, completion: ((Bool) -> Void)? = nil) {
|
||||
func animateTransformScale(node: ASDisplayNode, from fromScale: CGFloat, additive: Bool = false, completion: ((Bool) -> Void)? = nil) {
|
||||
let t = node.layer.transform
|
||||
let currentScale = sqrt((t.m11 * t.m11) + (t.m12 * t.m12) + (t.m13 * t.m13))
|
||||
if currentScale.isEqual(to: fromScale) {
|
||||
@ -592,7 +592,16 @@ public extension ContainedViewLayoutTransition {
|
||||
completion(true)
|
||||
}
|
||||
case let .animated(duration, curve):
|
||||
node.layer.animateScale(from: fromScale, to: currentScale, duration: duration, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, completion: { result in
|
||||
let calculatedFrom: CGFloat
|
||||
let calculatedTo: CGFloat
|
||||
if additive {
|
||||
calculatedFrom = fromScale - currentScale
|
||||
calculatedTo = 0.0
|
||||
} else {
|
||||
calculatedFrom = fromScale
|
||||
calculatedTo = currentScale
|
||||
}
|
||||
node.layer.animateScale(from: calculatedFrom, to: calculatedTo, duration: duration, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, additive: additive, completion: { result in
|
||||
if let completion = completion {
|
||||
completion(result)
|
||||
}
|
||||
@ -953,6 +962,73 @@ public extension ContainedViewLayoutTransition {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func updateTransformRotation(view: UIView, angle: CGFloat, beginWithCurrentState: Bool = false, completion: ((Bool) -> Void)? = nil) {
|
||||
let t = view.layer.transform
|
||||
let currentAngle = atan2(t.m12, t.m11)
|
||||
if currentAngle.isEqual(to: angle) {
|
||||
if let completion = completion {
|
||||
completion(true)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
switch self {
|
||||
case .immediate:
|
||||
view.layer.transform = CATransform3DMakeRotation(angle, 0.0, 0.0, 1.0)
|
||||
if let completion = completion {
|
||||
completion(true)
|
||||
}
|
||||
case let .animated(duration, curve):
|
||||
let previousAngle: CGFloat
|
||||
if beginWithCurrentState, let presentation = view.layer.presentation() {
|
||||
let t = presentation.transform
|
||||
previousAngle = atan2(t.m12, t.m11)
|
||||
} else {
|
||||
previousAngle = currentAngle
|
||||
}
|
||||
view.layer.transform = CATransform3DMakeRotation(angle, 0.0, 0.0, 1.0)
|
||||
view.layer.animateRotation(from: previousAngle, to: angle, duration: duration, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, completion: { result in
|
||||
if let completion = completion {
|
||||
completion(result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func updateTransformRotationAndScale(view: UIView, angle: CGFloat, scale: CGPoint, beginWithCurrentState: Bool = false, completion: ((Bool) -> Void)? = nil) {
|
||||
let t = view.layer.transform
|
||||
let currentAngle = atan2(t.m12, t.m11)
|
||||
let currentScale = CGPoint(x: t.m11, y: t.m12)
|
||||
if currentAngle.isEqual(to: angle) && currentScale == scale {
|
||||
if let completion = completion {
|
||||
completion(true)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
switch self {
|
||||
case .immediate:
|
||||
view.layer.transform = CATransform3DRotate(CATransform3DMakeScale(scale.x, scale.y, 1.0), angle, 0.0, 0.0, 1.0)
|
||||
if let completion = completion {
|
||||
completion(true)
|
||||
}
|
||||
case let .animated(duration, curve):
|
||||
let previousAngle: CGFloat
|
||||
if beginWithCurrentState, let presentation = view.layer.presentation() {
|
||||
let t = presentation.transform
|
||||
previousAngle = atan2(t.m12, t.m11)
|
||||
} else {
|
||||
previousAngle = currentAngle
|
||||
}
|
||||
view.layer.transform = CATransform3DRotate(CATransform3DMakeScale(scale.x, scale.y, 1.0), angle, 0.0, 0.0, 1.0)
|
||||
view.layer.animateRotation(from: previousAngle, to: angle, duration: duration, timingFunction: curve.timingFunction, mediaTimingFunction: curve.mediaTimingFunction, completion: { result in
|
||||
if let completion = completion {
|
||||
completion(result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if os(iOS)
|
||||
|
||||
@ -244,6 +244,20 @@ public final class ManagedAudioSession {
|
||||
|
||||
if let availableInputs = audioSession.availableInputs {
|
||||
var hasHeadphones = false
|
||||
|
||||
var headphonesAreActive = false
|
||||
loop: for currentOutput in audioSession.currentRoute.outputs {
|
||||
switch currentOutput.portType {
|
||||
case .headphones, .bluetoothA2DP, .bluetoothHFP:
|
||||
headphonesAreActive = true
|
||||
hasHeadphones = true
|
||||
activeOutput = .headphones
|
||||
break loop
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
for input in availableInputs {
|
||||
var isActive = false
|
||||
for currentInput in audioSession.currentRoute.inputs {
|
||||
@ -253,7 +267,7 @@ public final class ManagedAudioSession {
|
||||
}
|
||||
|
||||
if input.portType == .builtInMic {
|
||||
if isActive {
|
||||
if isActive && !headphonesAreActive {
|
||||
activeOutput = .builtin
|
||||
inner: for currentOutput in audioSession.currentRoute.outputs {
|
||||
if currentOutput.portType == .builtInSpeaker {
|
||||
@ -739,13 +753,28 @@ public final class ManagedAudioSession {
|
||||
case .voiceCall, .playWithPossiblePortOverride, .record(true):
|
||||
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
|
||||
if let routes = AVAudioSession.sharedInstance().availableInputs {
|
||||
for route in routes {
|
||||
if route.portType == .builtInMic {
|
||||
if case .record = updatedType, self.isHeadsetPluggedInValue {
|
||||
} else {
|
||||
var alreadySet = false
|
||||
if self.isHeadsetPluggedInValue {
|
||||
loop: for route in routes {
|
||||
switch route.portType {
|
||||
case .headphones, .bluetoothA2DP, .bluetoothHFP:
|
||||
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
|
||||
alreadySet = true
|
||||
break loop
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !alreadySet {
|
||||
for route in routes {
|
||||
if route.portType == .builtInMic {
|
||||
if case .record = updatedType, self.isHeadsetPluggedInValue {
|
||||
} else {
|
||||
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
|
||||
}
|
||||
break
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -31,6 +31,13 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
|
||||
|
||||
var appearance: Appearance
|
||||
var image: Image
|
||||
var isEnabled: Bool
|
||||
|
||||
init(appearance: Appearance, image: Image, isEnabled: Bool = true) {
|
||||
self.appearance = appearance
|
||||
self.image = image
|
||||
self.isEnabled = isEnabled
|
||||
}
|
||||
}
|
||||
|
||||
private let contentContainer: ASDisplayNode
|
||||
@ -107,6 +114,9 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
|
||||
self.effectView.isHidden = true
|
||||
}
|
||||
|
||||
self.alpha = content.isEnabled ? 1.0 : 0.7
|
||||
self.isUserInteractionEnabled = content.isEnabled
|
||||
|
||||
let contentImage = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in
|
||||
context.clear(CGRect(origin: CGPoint(), size: size))
|
||||
|
||||
|
||||
@ -17,7 +17,7 @@ enum CallControllerButtonsSpeakerMode {
|
||||
enum CallControllerButtonsMode: Equatable {
|
||||
enum VideoState: Equatable {
|
||||
case notAvailable
|
||||
case possible
|
||||
case possible(Bool)
|
||||
case outgoingRequested
|
||||
case incomingRequested
|
||||
case active
|
||||
@ -52,7 +52,7 @@ private enum ButtonDescription: Equatable {
|
||||
|
||||
case accept
|
||||
case end(EndType)
|
||||
case enableCamera(Bool)
|
||||
case enableCamera(Bool, Bool)
|
||||
case switchCamera
|
||||
case soundOutput(SoundOutput)
|
||||
case mute(Bool)
|
||||
@ -203,12 +203,15 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
||||
switch videoState {
|
||||
case .active, .possible, .incomingRequested, .outgoingRequested:
|
||||
let isCameraActive: Bool
|
||||
if case .possible = videoState {
|
||||
let isCameraEnabled: Bool
|
||||
if case let .possible(value) = videoState {
|
||||
isCameraActive = false
|
||||
isCameraEnabled = value
|
||||
} else {
|
||||
isCameraActive = !self.isCameraPaused
|
||||
isCameraEnabled = true
|
||||
}
|
||||
topButtons.append(.enableCamera(isCameraActive))
|
||||
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled))
|
||||
topButtons.append(.mute(self.isMuted))
|
||||
if case .possible = videoState {
|
||||
topButtons.append(.soundOutput(soundOutput))
|
||||
@ -252,10 +255,13 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
||||
switch videoState {
|
||||
case .active, .incomingRequested, .outgoingRequested:
|
||||
let isCameraActive: Bool
|
||||
if case .possible = videoState {
|
||||
let isCameraEnabled: Bool
|
||||
if case let .possible(value) = videoState {
|
||||
isCameraActive = false
|
||||
isCameraEnabled = value
|
||||
} else {
|
||||
isCameraActive = !self.isCameraPaused
|
||||
isCameraEnabled = true
|
||||
}
|
||||
|
||||
var topButtons: [ButtonDescription] = []
|
||||
@ -272,7 +278,7 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
||||
soundOutput = .bluetooth
|
||||
}
|
||||
|
||||
topButtons.append(.enableCamera(isCameraActive))
|
||||
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled))
|
||||
topButtons.append(.mute(isMuted))
|
||||
topButtons.append(.switchCamera)
|
||||
topButtons.append(.end(.end))
|
||||
@ -304,7 +310,7 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
||||
soundOutput = .bluetooth
|
||||
}
|
||||
|
||||
topButtons.append(.enableCamera(false))
|
||||
topButtons.append(.enableCamera(false, true))
|
||||
topButtons.append(.mute(self.isMuted))
|
||||
topButtons.append(.soundOutput(soundOutput))
|
||||
|
||||
@ -373,10 +379,11 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
||||
case .end:
|
||||
buttonText = strings.Call_End
|
||||
}
|
||||
case let .enableCamera(isEnabled):
|
||||
case let .enableCamera(isActivated, isEnabled):
|
||||
buttonContent = CallControllerButtonItemNode.Content(
|
||||
appearance: .blurred(isFilled: isEnabled),
|
||||
image: .camera
|
||||
appearance: .blurred(isFilled: isActivated),
|
||||
image: .camera,
|
||||
isEnabled: isEnabled
|
||||
)
|
||||
buttonText = strings.Call_Camera
|
||||
case .switchCamera:
|
||||
|
||||
@ -34,16 +34,25 @@ private final class CallVideoNode: ASDisplayNode {
|
||||
private(set) var isReady: Bool = false
|
||||
private var isReadyTimer: SwiftSignalKit.Timer?
|
||||
|
||||
init(videoView: PresentationCallVideoView, isReadyUpdated: @escaping () -> Void) {
|
||||
private let isFlippedUpdated: () -> Void
|
||||
|
||||
private(set) var currentOrientation: PresentationCallVideoView.Orientation
|
||||
|
||||
init(videoView: PresentationCallVideoView, isReadyUpdated: @escaping () -> Void, orientationUpdated: @escaping () -> Void, isFlippedUpdated: @escaping () -> Void) {
|
||||
self.isReadyUpdated = isReadyUpdated
|
||||
self.isFlippedUpdated = isFlippedUpdated
|
||||
|
||||
self.videoTransformContainer = ASDisplayNode()
|
||||
self.videoTransformContainer.clipsToBounds = true
|
||||
self.videoView = videoView
|
||||
self.videoView.view.layer.transform = CATransform3DMakeScale(-1.0, 1.0, 1.0)
|
||||
videoView.view.clipsToBounds = true
|
||||
|
||||
self.currentOrientation = videoView.getOrientation()
|
||||
|
||||
super.init()
|
||||
|
||||
self.backgroundColor = .black
|
||||
|
||||
self.videoTransformContainer.view.addSubview(self.videoView.view)
|
||||
self.addSubnode(self.videoTransformContainer)
|
||||
|
||||
@ -58,6 +67,16 @@ private final class CallVideoNode: ASDisplayNode {
|
||||
}
|
||||
}
|
||||
|
||||
self.videoView.setOnOrientationUpdated { [weak self] orientation in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
if strongSelf.currentOrientation != orientation {
|
||||
strongSelf.currentOrientation = orientation
|
||||
orientationUpdated()
|
||||
}
|
||||
}
|
||||
|
||||
self.isReadyTimer = SwiftSignalKit.Timer(timeout: 3.0, repeat: false, completion: { [weak self] in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
@ -75,28 +94,80 @@ private final class CallVideoNode: ASDisplayNode {
|
||||
}
|
||||
|
||||
func updateLayout(size: CGSize, cornerRadius: CGFloat, transition: ContainedViewLayoutTransition) {
|
||||
let videoFrame = CGRect(origin: CGPoint(), size: size)
|
||||
|
||||
self.currentCornerRadius = cornerRadius
|
||||
|
||||
let previousVideoFrame = self.videoTransformContainer.frame
|
||||
self.videoTransformContainer.frame = videoFrame
|
||||
if transition.isAnimated && !videoFrame.height.isZero && !previousVideoFrame.height.isZero {
|
||||
transition.animatePositionAdditive(node: self.videoTransformContainer, offset: CGPoint(x: previousVideoFrame.midX - videoFrame.midX, y: previousVideoFrame.midY - videoFrame.midY))
|
||||
transition.animateTransformScale(node: self.videoTransformContainer, from: previousVideoFrame.height / videoFrame.height)
|
||||
var rotationAngle: CGFloat
|
||||
var rotateFrame: Bool
|
||||
switch self.currentOrientation {
|
||||
case .rotation0:
|
||||
rotationAngle = 0.0
|
||||
rotateFrame = false
|
||||
case .rotation90:
|
||||
rotationAngle = -CGFloat.pi / 2.0
|
||||
rotateFrame = true
|
||||
case .rotation180:
|
||||
rotationAngle = -CGFloat.pi
|
||||
rotateFrame = false
|
||||
case .rotation270:
|
||||
rotationAngle = -CGFloat.pi * 3.0 / 2.0
|
||||
rotateFrame = true
|
||||
}
|
||||
var originalRotateFrame = rotateFrame
|
||||
if size.width > size.height {
|
||||
rotateFrame = !rotateFrame
|
||||
if rotateFrame {
|
||||
originalRotateFrame = true
|
||||
}
|
||||
} else {
|
||||
if rotateFrame {
|
||||
originalRotateFrame = false
|
||||
}
|
||||
}
|
||||
let videoFrame: CGRect
|
||||
let scale: CGFloat
|
||||
if rotateFrame {
|
||||
let frameSize = CGSize(width: size.height, height: size.width).aspectFitted(size)
|
||||
videoFrame = CGRect(origin: CGPoint(x: floor((size.width - frameSize.width) / 2.0), y: floor((size.height - frameSize.height) / 2.0)), size: frameSize)
|
||||
if size.width > size.height {
|
||||
scale = frameSize.height / size.width
|
||||
} else {
|
||||
scale = frameSize.width / size.height
|
||||
}
|
||||
} else {
|
||||
videoFrame = CGRect(origin: CGPoint(), size: size)
|
||||
if size.width > size.height {
|
||||
scale = 1.0
|
||||
} else {
|
||||
scale = 1.0
|
||||
}
|
||||
}
|
||||
|
||||
self.videoView.view.frame = videoFrame
|
||||
let previousVideoFrame = self.videoTransformContainer.frame
|
||||
self.videoTransformContainer.bounds = CGRect(origin: CGPoint(), size: size)
|
||||
if transition.isAnimated && !videoFrame.height.isZero && !previousVideoFrame.height.isZero {
|
||||
transition.animateTransformScale(node: self.videoTransformContainer, from: previousVideoFrame.height / size.height, additive: true)
|
||||
}
|
||||
transition.updatePosition(node: self.videoTransformContainer, position: videoFrame.center)
|
||||
transition.updateSublayerTransformScale(node: self.videoTransformContainer, scale: scale)
|
||||
|
||||
let localVideoSize = originalRotateFrame ? CGSize(width: size.height, height: size.width) : size
|
||||
let localVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - localVideoSize.width) / 2.0), y: floor((size.height - localVideoSize.height) / 2.0)), size: localVideoSize)
|
||||
|
||||
self.videoView.view.bounds = localVideoFrame
|
||||
self.videoView.view.center = localVideoFrame.center
|
||||
transition.updateTransformRotation(view: self.videoView.view, angle: rotationAngle)
|
||||
|
||||
if let effectView = self.effectView {
|
||||
effectView.frame = videoFrame
|
||||
transition.animatePositionAdditive(layer: effectView.layer, offset: CGPoint(x: previousVideoFrame.midX - videoFrame.midX, y: previousVideoFrame.midY - videoFrame.midY))
|
||||
transition.animateTransformScale(view: effectView, from: previousVideoFrame.height / videoFrame.height)
|
||||
transition.updateFrame(view: effectView, frame: videoFrame)
|
||||
}
|
||||
|
||||
transition.updateCornerRadius(layer: self.videoTransformContainer.layer, cornerRadius: self.currentCornerRadius)
|
||||
if let effectView = self.effectView {
|
||||
transition.updateCornerRadius(layer: effectView.layer, cornerRadius: self.currentCornerRadius)
|
||||
}
|
||||
|
||||
transition.updateCornerRadius(layer: self.layer, cornerRadius: self.currentCornerRadius)
|
||||
}
|
||||
|
||||
func updateIsBlurred(isBlurred: Bool) {
|
||||
@ -178,6 +249,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
private let keyButtonNode: HighlightableButtonNode
|
||||
|
||||
private var validLayout: (ContainerViewLayout, CGFloat)?
|
||||
private var disableActionsUntilTimestamp: Double = 0.0
|
||||
|
||||
var isMuted: Bool = false {
|
||||
didSet {
|
||||
@ -318,25 +390,38 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
}
|
||||
|
||||
self.buttonsNode.toggleVideo = { [weak self] in
|
||||
guard let strongSelf = self else {
|
||||
guard let strongSelf = self, let callState = strongSelf.callState else {
|
||||
return
|
||||
}
|
||||
if strongSelf.outgoingVideoNodeValue == nil {
|
||||
strongSelf.call.requestVideo()
|
||||
} else {
|
||||
strongSelf.isVideoPaused = !strongSelf.isVideoPaused
|
||||
strongSelf.outgoingVideoNodeValue?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused)
|
||||
strongSelf.buttonsNode.isCameraPaused = strongSelf.isVideoPaused
|
||||
strongSelf.setIsVideoPaused?(strongSelf.isVideoPaused)
|
||||
switch callState.state {
|
||||
case .active:
|
||||
if strongSelf.outgoingVideoNodeValue == nil {
|
||||
strongSelf.call.requestVideo()
|
||||
} else {
|
||||
strongSelf.isVideoPaused = !strongSelf.isVideoPaused
|
||||
strongSelf.outgoingVideoNodeValue?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused)
|
||||
strongSelf.buttonsNode.isCameraPaused = strongSelf.isVideoPaused
|
||||
strongSelf.setIsVideoPaused?(strongSelf.isVideoPaused)
|
||||
|
||||
if let (layout, navigationBarHeight) = strongSelf.validLayout {
|
||||
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
|
||||
if let (layout, navigationBarHeight) = strongSelf.validLayout {
|
||||
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
|
||||
}
|
||||
}
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
self.buttonsNode.rotateCamera = { [weak self] in
|
||||
self?.call.switchVideoCamera()
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
strongSelf.call.switchVideoCamera()
|
||||
if let outgoingVideoNode = strongSelf.outgoingVideoNodeValue {
|
||||
if let (layout, navigationBarHeight) = strongSelf.validLayout {
|
||||
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.keyButtonNode.addTarget(self, action: #selector(self.keyPressed), forControlEvents: .touchUpInside)
|
||||
@ -347,7 +432,16 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
override func didLoad() {
|
||||
super.didLoad()
|
||||
|
||||
let panRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:)))
|
||||
let panRecognizer = CallPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:)))
|
||||
panRecognizer.shouldBegin = { [weak self] _ in
|
||||
guard let strongSelf = self else {
|
||||
return false
|
||||
}
|
||||
if strongSelf.areUserActionsDisabledNow() {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
self.view.addGestureRecognizer(panRecognizer)
|
||||
|
||||
let tapRecognizer = UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:)))
|
||||
@ -387,6 +481,23 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
if self.audioOutputState?.0 != availableOutputs || self.audioOutputState?.1 != currentOutput {
|
||||
self.audioOutputState = (availableOutputs, currentOutput)
|
||||
self.updateButtonsMode()
|
||||
|
||||
self.setupAudioOutputs()
|
||||
}
|
||||
}
|
||||
|
||||
private func setupAudioOutputs() {
|
||||
if self.outgoingVideoNodeValue != nil {
|
||||
if let audioOutputState = self.audioOutputState, let currentOutput = audioOutputState.currentOutput {
|
||||
switch currentOutput {
|
||||
case .headphones:
|
||||
break
|
||||
case let .port(port) where port.type == .bluetooth:
|
||||
break
|
||||
default:
|
||||
self.setCurrentAudioOutput?(.speaker)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -412,6 +523,20 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
if let (layout, navigationBarHeight) = strongSelf.validLayout {
|
||||
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring))
|
||||
}
|
||||
}, orientationUpdated: {
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
if let (layout, navigationBarHeight) = strongSelf.validLayout {
|
||||
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
|
||||
}
|
||||
}, isFlippedUpdated: {
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
if let (layout, navigationBarHeight) = strongSelf.validLayout {
|
||||
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
|
||||
}
|
||||
})
|
||||
strongSelf.incomingVideoNodeValue = incomingVideoNode
|
||||
strongSelf.expandedVideoNode = incomingVideoNode
|
||||
@ -437,15 +562,21 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
if let outgoingVideoView = outgoingVideoView {
|
||||
outgoingVideoView.view.backgroundColor = .black
|
||||
outgoingVideoView.view.clipsToBounds = true
|
||||
if let audioOutputState = strongSelf.audioOutputState, let currentOutput = audioOutputState.currentOutput {
|
||||
switch currentOutput {
|
||||
case .speaker, .builtin:
|
||||
break
|
||||
default:
|
||||
strongSelf.setCurrentAudioOutput?(.speaker)
|
||||
let outgoingVideoNode = CallVideoNode(videoView: outgoingVideoView, isReadyUpdated: {}, orientationUpdated: {
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
}
|
||||
let outgoingVideoNode = CallVideoNode(videoView: outgoingVideoView, isReadyUpdated: {})
|
||||
if let (layout, navigationBarHeight) = strongSelf.validLayout {
|
||||
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
|
||||
}
|
||||
}, isFlippedUpdated: {
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
if let (layout, navigationBarHeight) = strongSelf.validLayout {
|
||||
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
|
||||
}
|
||||
})
|
||||
strongSelf.outgoingVideoNodeValue = outgoingVideoNode
|
||||
strongSelf.minimizedVideoNode = outgoingVideoNode
|
||||
if let expandedVideoNode = strongSelf.expandedVideoNode {
|
||||
@ -456,6 +587,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
if let (layout, navigationBarHeight) = strongSelf.validLayout {
|
||||
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring))
|
||||
}
|
||||
strongSelf.setupAudioOutputs()
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -626,7 +758,14 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
case .notAvailable:
|
||||
mappedVideoState = .notAvailable
|
||||
case .possible:
|
||||
mappedVideoState = .possible
|
||||
var isEnabled = false
|
||||
switch callState.state {
|
||||
case .active:
|
||||
isEnabled = true
|
||||
default:
|
||||
break
|
||||
}
|
||||
mappedVideoState = .possible(isEnabled)
|
||||
case .outgoingRequested:
|
||||
mappedVideoState = .outgoingRequested
|
||||
case .incomingRequested:
|
||||
@ -654,8 +793,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
}
|
||||
|
||||
if let (layout, navigationHeight) = self.validLayout {
|
||||
self.pictureInPictureTransitionFraction = 0.0
|
||||
|
||||
self.containerLayoutUpdated(layout, navigationBarHeight: navigationHeight, transition: .animated(duration: 0.3, curve: .spring))
|
||||
}
|
||||
}
|
||||
@ -678,7 +815,10 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
func animateOut(completion: @escaping () -> Void) {
|
||||
self.statusBar.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false)
|
||||
if !self.shouldStayHiddenUntilConnection || self.containerNode.alpha > 0.0 {
|
||||
self.containerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false)
|
||||
self.containerNode.layer.allowsGroupOpacity = true
|
||||
self.containerNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak self] _ in
|
||||
self?.containerNode.layer.allowsGroupOpacity = true
|
||||
})
|
||||
self.containerNode.layer.animateScale(from: 1.0, to: 1.04, duration: 0.3, removeOnCompletion: false, completion: { _ in
|
||||
completion()
|
||||
})
|
||||
@ -723,7 +863,15 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
insets.right = interpolate(from: expandedInset, to: insets.right, value: 1.0 - self.pictureInPictureTransitionFraction)
|
||||
|
||||
let previewVideoSide = interpolate(from: 350.0, to: 200.0, value: 1.0 - self.pictureInPictureTransitionFraction)
|
||||
let previewVideoSize = layout.size.aspectFitted(CGSize(width: previewVideoSide, height: previewVideoSide))
|
||||
var previewVideoSize = layout.size.aspectFitted(CGSize(width: previewVideoSide, height: previewVideoSide))
|
||||
if let minimizedVideoNode = minimizedVideoNode {
|
||||
switch minimizedVideoNode.currentOrientation {
|
||||
case .rotation90, .rotation270:
|
||||
previewVideoSize = CGSize(width: previewVideoSize.height, height: previewVideoSize.width)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
let previewVideoY: CGFloat
|
||||
let previewVideoX: CGFloat
|
||||
|
||||
@ -852,6 +1000,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha)
|
||||
|
||||
let fullscreenVideoFrame = CGRect(origin: CGPoint(), size: layout.size)
|
||||
|
||||
let previewVideoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationBarHeight)
|
||||
|
||||
if let expandedVideoNode = self.expandedVideoNode {
|
||||
@ -933,6 +1082,10 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
|
||||
private var debugTapCounter: (Double, Int) = (0.0, 0)
|
||||
|
||||
private func areUserActionsDisabledNow() -> Bool {
|
||||
return CACurrentMediaTime() < self.disableActionsUntilTimestamp
|
||||
}
|
||||
|
||||
@objc func tapGesture(_ recognizer: UITapGestureRecognizer) {
|
||||
if case .ended = recognizer.state {
|
||||
if !self.pictureInPictureTransitionFraction.isZero {
|
||||
@ -947,17 +1100,20 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
if let expandedVideoNode = self.expandedVideoNode, let minimizedVideoNode = self.minimizedVideoNode {
|
||||
let point = recognizer.location(in: recognizer.view)
|
||||
if minimizedVideoNode.frame.contains(point) {
|
||||
let copyView = minimizedVideoNode.view.snapshotView(afterScreenUpdates: false)
|
||||
copyView?.frame = minimizedVideoNode.frame
|
||||
self.expandedVideoNode = minimizedVideoNode
|
||||
self.minimizedVideoNode = expandedVideoNode
|
||||
if let supernode = expandedVideoNode.supernode {
|
||||
supernode.insertSubnode(expandedVideoNode, aboveSubnode: minimizedVideoNode)
|
||||
}
|
||||
if let (layout, navigationBarHeight) = self.validLayout {
|
||||
self.disableAnimationForExpandedVideoOnce = true
|
||||
self.animationForExpandedVideoSnapshotView = copyView
|
||||
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
|
||||
if !self.areUserActionsDisabledNow() {
|
||||
let copyView = minimizedVideoNode.view.snapshotView(afterScreenUpdates: false)
|
||||
copyView?.frame = minimizedVideoNode.frame
|
||||
self.expandedVideoNode = minimizedVideoNode
|
||||
self.minimizedVideoNode = expandedVideoNode
|
||||
if let supernode = expandedVideoNode.supernode {
|
||||
supernode.insertSubnode(expandedVideoNode, aboveSubnode: minimizedVideoNode)
|
||||
}
|
||||
self.disableActionsUntilTimestamp = CACurrentMediaTime() + 0.3
|
||||
if let (layout, navigationBarHeight) = self.validLayout {
|
||||
self.disableAnimationForExpandedVideoOnce = true
|
||||
self.animationForExpandedVideoSnapshotView = copyView
|
||||
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
var updated = false
|
||||
@ -1135,19 +1291,23 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func panGesture(_ recognizer: UIPanGestureRecognizer) {
|
||||
@objc private func panGesture(_ recognizer: CallPanGestureRecognizer) {
|
||||
switch recognizer.state {
|
||||
case .began:
|
||||
let location = recognizer.location(in: self.view)
|
||||
if self.self.pictureInPictureTransitionFraction.isZero, let _ = self.expandedVideoNode, let minimizedVideoNode = self.minimizedVideoNode, minimizedVideoNode.frame.contains(location) {
|
||||
guard let location = recognizer.firstLocation else {
|
||||
return
|
||||
}
|
||||
if self.pictureInPictureTransitionFraction.isZero, let expandedVideoNode = self.expandedVideoNode, let minimizedVideoNode = self.minimizedVideoNode, minimizedVideoNode.frame.contains(location), expandedVideoNode.frame != minimizedVideoNode.frame {
|
||||
self.minimizedVideoInitialPosition = minimizedVideoNode.position
|
||||
} else {
|
||||
} else if let _ = self.expandedVideoNode, let _ = self.minimizedVideoNode {
|
||||
self.minimizedVideoInitialPosition = nil
|
||||
if !self.pictureInPictureTransitionFraction.isZero {
|
||||
self.pictureInPictureGestureState = .dragging(initialPosition: self.containerTransformationNode.position, draggingPosition: self.containerTransformationNode.position)
|
||||
} else {
|
||||
self.pictureInPictureGestureState = .collapsing(didSelectCorner: false)
|
||||
}
|
||||
} else {
|
||||
self.pictureInPictureGestureState = .none
|
||||
}
|
||||
case .changed:
|
||||
if let minimizedVideoNode = self.minimizedVideoNode, let minimizedVideoInitialPosition = self.minimizedVideoInitialPosition {
|
||||
@ -1266,3 +1426,38 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
private final class CallPanGestureRecognizer: UIPanGestureRecognizer {
|
||||
private(set) var firstLocation: CGPoint?
|
||||
|
||||
public var shouldBegin: ((CGPoint) -> Bool)?
|
||||
|
||||
override public init(target: Any?, action: Selector?) {
|
||||
super.init(target: target, action: action)
|
||||
|
||||
self.maximumNumberOfTouches = 1
|
||||
}
|
||||
|
||||
override public func reset() {
|
||||
super.reset()
|
||||
|
||||
self.firstLocation = nil
|
||||
}
|
||||
|
||||
override public func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent) {
|
||||
super.touchesBegan(touches, with: event)
|
||||
|
||||
let touch = touches.first!
|
||||
let point = touch.location(in: self.view)
|
||||
if let shouldBegin = self.shouldBegin, !shouldBegin(point) {
|
||||
self.state = .failed
|
||||
return
|
||||
}
|
||||
|
||||
self.firstLocation = point
|
||||
}
|
||||
|
||||
override public func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent) {
|
||||
super.touchesMoved(touches, with: event)
|
||||
}
|
||||
}
|
||||
|
||||
@ -810,10 +810,45 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
self.ongoingContext?.makeIncomingVideoView(completion: { view in
|
||||
if let view = view {
|
||||
let setOnFirstFrameReceived = view.setOnFirstFrameReceived
|
||||
let setOnOrientationUpdated = view.setOnOrientationUpdated
|
||||
completion(PresentationCallVideoView(
|
||||
view: view.view,
|
||||
setOnFirstFrameReceived: { f in
|
||||
setOnFirstFrameReceived(f)
|
||||
},
|
||||
getOrientation: { [weak view] in
|
||||
if let view = view {
|
||||
let mappedValue: PresentationCallVideoView.Orientation
|
||||
switch view.getOrientation() {
|
||||
case .rotation0:
|
||||
mappedValue = .rotation0
|
||||
case .rotation90:
|
||||
mappedValue = .rotation90
|
||||
case .rotation180:
|
||||
mappedValue = .rotation180
|
||||
case .rotation270:
|
||||
mappedValue = .rotation270
|
||||
}
|
||||
return mappedValue
|
||||
} else {
|
||||
return .rotation0
|
||||
}
|
||||
},
|
||||
setOnOrientationUpdated: { f in
|
||||
setOnOrientationUpdated { value in
|
||||
let mappedValue: PresentationCallVideoView.Orientation
|
||||
switch value {
|
||||
case .rotation0:
|
||||
mappedValue = .rotation0
|
||||
case .rotation90:
|
||||
mappedValue = .rotation90
|
||||
case .rotation180:
|
||||
mappedValue = .rotation180
|
||||
case .rotation270:
|
||||
mappedValue = .rotation270
|
||||
}
|
||||
f?(mappedValue)
|
||||
}
|
||||
}
|
||||
))
|
||||
} else {
|
||||
@ -831,11 +866,47 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
self.videoCapturer?.makeOutgoingVideoView(completion: { view in
|
||||
if let view = view {
|
||||
let setOnFirstFrameReceived = view.setOnFirstFrameReceived
|
||||
let setOnOrientationUpdated = view.setOnOrientationUpdated
|
||||
completion(PresentationCallVideoView(
|
||||
view: view.view,
|
||||
setOnFirstFrameReceived: { f in
|
||||
setOnFirstFrameReceived(f)
|
||||
},
|
||||
getOrientation: { [weak view] in
|
||||
if let view = view {
|
||||
let mappedValue: PresentationCallVideoView.Orientation
|
||||
switch view.getOrientation() {
|
||||
case .rotation0:
|
||||
mappedValue = .rotation0
|
||||
case .rotation90:
|
||||
mappedValue = .rotation90
|
||||
case .rotation180:
|
||||
mappedValue = .rotation180
|
||||
case .rotation270:
|
||||
mappedValue = .rotation270
|
||||
}
|
||||
return mappedValue
|
||||
} else {
|
||||
return .rotation0
|
||||
}
|
||||
},
|
||||
setOnOrientationUpdated: { f in
|
||||
setOnOrientationUpdated { value in
|
||||
let mappedValue: PresentationCallVideoView.Orientation
|
||||
switch value {
|
||||
case .rotation0:
|
||||
mappedValue = .rotation0
|
||||
case .rotation90:
|
||||
mappedValue = .rotation90
|
||||
case .rotation180:
|
||||
mappedValue = .rotation180
|
||||
case .rotation270:
|
||||
mappedValue = .rotation270
|
||||
}
|
||||
f?(mappedValue)
|
||||
}
|
||||
}
|
||||
|
||||
))
|
||||
} else {
|
||||
completion(nil)
|
||||
|
||||
@ -318,6 +318,11 @@ public final class OngoingCallVideoCapturer {
|
||||
view: view,
|
||||
setOnFirstFrameReceived: { [weak view] f in
|
||||
view?.setOnFirstFrameReceived(f)
|
||||
},
|
||||
getOrientation: {
|
||||
return .rotation0
|
||||
},
|
||||
setOnOrientationUpdated: { _ in
|
||||
}
|
||||
))
|
||||
} else {
|
||||
@ -403,16 +408,46 @@ private extension OngoingCallContextState.State {
|
||||
}
|
||||
}
|
||||
|
||||
public enum OngoingCallVideoOrientation {
|
||||
case rotation0
|
||||
case rotation90
|
||||
case rotation180
|
||||
case rotation270
|
||||
}
|
||||
|
||||
private extension OngoingCallVideoOrientation {
|
||||
init(_ orientation: OngoingCallVideoOrientationWebrtc) {
|
||||
switch orientation {
|
||||
case .orientation0:
|
||||
self = .rotation0
|
||||
case .orientation90:
|
||||
self = .rotation90
|
||||
case .orientation180:
|
||||
self = .rotation180
|
||||
case .orientation270:
|
||||
self = .rotation270
|
||||
@unknown default:
|
||||
self = .rotation0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final class OngoingCallContextPresentationCallVideoView {
|
||||
public let view: UIView
|
||||
public let setOnFirstFrameReceived: ((() -> Void)?) -> Void
|
||||
public let getOrientation: () -> OngoingCallVideoOrientation
|
||||
public let setOnOrientationUpdated: (((OngoingCallVideoOrientation) -> Void)?) -> Void
|
||||
|
||||
public init(
|
||||
view: UIView,
|
||||
setOnFirstFrameReceived: @escaping ((() -> Void)?) -> Void
|
||||
setOnFirstFrameReceived: @escaping ((() -> Void)?) -> Void,
|
||||
getOrientation: @escaping () -> OngoingCallVideoOrientation,
|
||||
setOnOrientationUpdated: @escaping (((OngoingCallVideoOrientation) -> Void)?) -> Void
|
||||
) {
|
||||
self.view = view
|
||||
self.setOnFirstFrameReceived = setOnFirstFrameReceived
|
||||
self.getOrientation = getOrientation
|
||||
self.setOnOrientationUpdated = setOnOrientationUpdated
|
||||
}
|
||||
}
|
||||
|
||||
@ -721,6 +756,18 @@ public final class OngoingCallContext {
|
||||
view: view,
|
||||
setOnFirstFrameReceived: { [weak view] f in
|
||||
view?.setOnFirstFrameReceived(f)
|
||||
},
|
||||
getOrientation: { [weak view] in
|
||||
if let view = view {
|
||||
return OngoingCallVideoOrientation(view.orientation)
|
||||
} else {
|
||||
return .rotation0
|
||||
}
|
||||
},
|
||||
setOnOrientationUpdated: { [weak view] f in
|
||||
view?.setOnOrientationUpdated { value in
|
||||
f?(OngoingCallVideoOrientation(value))
|
||||
}
|
||||
}
|
||||
))
|
||||
} else {
|
||||
|
||||
@ -41,6 +41,13 @@ typedef NS_ENUM(int32_t, OngoingCallRemoteVideoStateWebrtc) {
|
||||
OngoingCallRemoteVideoStateActive
|
||||
};
|
||||
|
||||
typedef NS_ENUM(int32_t, OngoingCallVideoOrientationWebrtc) {
|
||||
OngoingCallVideoOrientation0,
|
||||
OngoingCallVideoOrientation90,
|
||||
OngoingCallVideoOrientation180,
|
||||
OngoingCallVideoOrientation270
|
||||
};
|
||||
|
||||
typedef NS_ENUM(int32_t, OngoingCallNetworkTypeWebrtc) {
|
||||
OngoingCallNetworkTypeWifi,
|
||||
OngoingCallNetworkTypeCellularGprs,
|
||||
@ -87,7 +94,10 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
||||
|
||||
@protocol OngoingCallThreadLocalContextWebrtcVideoView <NSObject>
|
||||
|
||||
@property (nonatomic, readonly) OngoingCallVideoOrientationWebrtc orientation;
|
||||
|
||||
- (void)setOnFirstFrameReceived:(void (^ _Nullable)())onFirstFrameReceived;
|
||||
- (void)setOnOrientationUpdated:(void (^ _Nullable)(OngoingCallVideoOrientationWebrtc))onOrientationUpdated;
|
||||
|
||||
@end
|
||||
|
||||
|
||||
@ -42,20 +42,66 @@
|
||||
|
||||
@end
|
||||
|
||||
@interface VideoMetalView (VideoViewImpl) <OngoingCallThreadLocalContextWebrtcVideoView>
|
||||
@protocol OngoingCallThreadLocalContextWebrtcVideoViewImpl <NSObject>
|
||||
|
||||
@property (nonatomic, readwrite) OngoingCallVideoOrientationWebrtc orientation;
|
||||
|
||||
@end
|
||||
|
||||
@interface VideoMetalView (VideoViewImpl) <OngoingCallThreadLocalContextWebrtcVideoView, OngoingCallThreadLocalContextWebrtcVideoViewImpl>
|
||||
|
||||
@property (nonatomic, readwrite) OngoingCallVideoOrientationWebrtc orientation;
|
||||
|
||||
@end
|
||||
|
||||
@implementation VideoMetalView (VideoViewImpl)
|
||||
|
||||
- (OngoingCallVideoOrientationWebrtc)orientation {
|
||||
return (OngoingCallVideoOrientationWebrtc)self.internalOrientation;
|
||||
}
|
||||
|
||||
- (void)setOrientation:(OngoingCallVideoOrientationWebrtc)orientation {
|
||||
[self setInternalOrientation:(int)orientation];
|
||||
}
|
||||
|
||||
- (void)setOnOrientationUpdated:(void (^ _Nullable)(OngoingCallVideoOrientationWebrtc))onOrientationUpdated {
|
||||
if (onOrientationUpdated) {
|
||||
[self internalSetOnOrientationUpdated:^(int value) {
|
||||
onOrientationUpdated((OngoingCallVideoOrientationWebrtc)value);
|
||||
}];
|
||||
} else {
|
||||
[self internalSetOnOrientationUpdated:nil];
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@interface GLVideoView (VideoViewImpl) <OngoingCallThreadLocalContextWebrtcVideoView>
|
||||
@interface GLVideoView (VideoViewImpl) <OngoingCallThreadLocalContextWebrtcVideoView, OngoingCallThreadLocalContextWebrtcVideoViewImpl>
|
||||
|
||||
@property (nonatomic, readwrite) OngoingCallVideoOrientationWebrtc orientation;
|
||||
|
||||
@end
|
||||
|
||||
@implementation GLVideoView (VideoViewImpl)
|
||||
|
||||
- (OngoingCallVideoOrientationWebrtc)orientation {
|
||||
return (OngoingCallVideoOrientationWebrtc)self.internalOrientation;
|
||||
}
|
||||
|
||||
- (void)setOrientation:(OngoingCallVideoOrientationWebrtc)orientation {
|
||||
[self setInternalOrientation:(int)orientation];
|
||||
}
|
||||
|
||||
- (void)setOnOrientationUpdated:(void (^ _Nullable)(OngoingCallVideoOrientationWebrtc))onOrientationUpdated {
|
||||
if (onOrientationUpdated) {
|
||||
[self internalSetOnOrientationUpdated:^(int value) {
|
||||
onOrientationUpdated((OngoingCallVideoOrientationWebrtc)value);
|
||||
}];
|
||||
} else {
|
||||
[self internalSetOnOrientationUpdated:nil];
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation OngoingCallThreadLocalContextVideoCapturer
|
||||
@ -68,6 +114,9 @@
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
}
|
||||
|
||||
- (void)switchVideoCamera {
|
||||
_interface->switchCamera();
|
||||
}
|
||||
@ -140,6 +189,8 @@
|
||||
OngoingCallVideoStateWebrtc _videoState;
|
||||
bool _connectedOnce;
|
||||
OngoingCallRemoteVideoStateWebrtc _remoteVideoState;
|
||||
OngoingCallVideoOrientationWebrtc _remoteVideoOrientation;
|
||||
__weak UIView<OngoingCallThreadLocalContextWebrtcVideoViewImpl> *_currentRemoteVideoRenderer;
|
||||
OngoingCallThreadLocalContextVideoCapturer *_videoCapturer;
|
||||
|
||||
int32_t _signalBars;
|
||||
@ -267,6 +318,8 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
_remoteVideoState = OngoingCallRemoteVideoStateActive;
|
||||
}
|
||||
|
||||
_remoteVideoOrientation = OngoingCallVideoOrientation0;
|
||||
|
||||
std::vector<uint8_t> derivedStateValue;
|
||||
derivedStateValue.resize(derivedState.length);
|
||||
[derivedState getBytes:derivedStateValue.data() length:derivedState.length];
|
||||
@ -568,6 +621,8 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
|
||||
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
||||
if (strongSelf) {
|
||||
[remoteRenderer setOrientation:strongSelf->_remoteVideoOrientation];
|
||||
strongSelf->_currentRemoteVideoRenderer = remoteRenderer;
|
||||
strongSelf->_tgVoip->setIncomingVideoOutput(sink);
|
||||
}
|
||||
|
||||
@ -578,6 +633,8 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
|
||||
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
||||
if (strongSelf) {
|
||||
[remoteRenderer setOrientation:strongSelf->_remoteVideoOrientation];
|
||||
strongSelf->_currentRemoteVideoRenderer = remoteRenderer;
|
||||
strongSelf->_tgVoip->setIncomingVideoOutput(sink);
|
||||
}
|
||||
|
||||
|
||||
@ -1 +1 @@
|
||||
Subproject commit 8e9d3e56d43ffa4ed9ababd5fe7a4b5df8ec94d1
|
||||
Subproject commit c3345bb26aba541c99ff3c7075bda8024c7a8202
|
||||
Loading…
x
Reference in New Issue
Block a user