mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
[WIP] Call UI
This commit is contained in:
parent
ee71901167
commit
1157ed10ce
@ -26,7 +26,7 @@ private func interpolate(from: CGFloat, to: CGFloat, value: CGFloat) -> CGFloat
|
||||
return (1.0 - value) * from + value * to
|
||||
}
|
||||
|
||||
private final class CallVideoNode: ASDisplayNode, PreviewVideoNode {
|
||||
final class CallVideoNode: ASDisplayNode, PreviewVideoNode {
|
||||
private let videoTransformContainer: ASDisplayNode
|
||||
private let videoView: PresentationCallVideoView
|
||||
|
||||
|
@ -16,6 +16,7 @@ import TinyThumbnail
|
||||
import ImageBlur
|
||||
import TelegramVoip
|
||||
import MetalEngine
|
||||
import DeviceAccess
|
||||
|
||||
final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeProtocol {
|
||||
private let sharedContext: SharedAccountContext
|
||||
@ -32,6 +33,7 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
||||
|
||||
private var callStartTimestamp: Double?
|
||||
|
||||
private var callState: PresentationCallState?
|
||||
var isMuted: Bool = false
|
||||
|
||||
var toggleMute: (() -> Void)?
|
||||
@ -56,6 +58,7 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
||||
private var isMicrophoneMutedDisposable: Disposable?
|
||||
private var audioLevelDisposable: Disposable?
|
||||
|
||||
private var localVideo: AdaptedCallVideoSource?
|
||||
private var remoteVideo: AdaptedCallVideoSource?
|
||||
|
||||
init(
|
||||
@ -94,7 +97,13 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
let _ = self
|
||||
self.toggleVideo()
|
||||
}
|
||||
self.callScreen.flipCameraAction = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.call.switchVideoCamera()
|
||||
}
|
||||
self.callScreen.microhoneMuteAction = { [weak self] in
|
||||
guard let self else {
|
||||
@ -112,6 +121,7 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
||||
self.callScreenState = PrivateCallScreen.State(
|
||||
lifecycleState: .connecting,
|
||||
name: " ",
|
||||
shortName: " ",
|
||||
avatarImage: nil,
|
||||
audioOutput: .internalSpeaker,
|
||||
isMicrophoneMuted: false,
|
||||
@ -176,6 +186,93 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
||||
}
|
||||
}
|
||||
|
||||
private func toggleVideo() {
|
||||
guard let callState = self.callState else {
|
||||
return
|
||||
}
|
||||
switch callState.state {
|
||||
case .active:
|
||||
switch callState.videoState {
|
||||
case .active(let isScreencast), .paused(let isScreencast):
|
||||
if isScreencast {
|
||||
(self.call as? PresentationCallImpl)?.disableScreencast()
|
||||
} else {
|
||||
self.call.disableVideo()
|
||||
}
|
||||
default:
|
||||
DeviceAccess.authorizeAccess(to: .camera(.videoCall), onlyCheck: true, presentationData: self.presentationData, present: { [weak self] c, a in
|
||||
if let strongSelf = self {
|
||||
strongSelf.present?(c)
|
||||
}
|
||||
}, openSettings: { [weak self] in
|
||||
self?.sharedContext.applicationBindings.openSettings()
|
||||
}, _: { [weak self] ready in
|
||||
guard let self, ready else {
|
||||
return
|
||||
}
|
||||
let proceed = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
/*switch callState.videoState {
|
||||
case .inactive:
|
||||
self.isRequestingVideo = true
|
||||
self.updateButtonsMode()
|
||||
default:
|
||||
break
|
||||
}*/
|
||||
self.call.requestVideo()
|
||||
}
|
||||
|
||||
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
|
||||
if let outgoingVideoView = outgoingVideoView {
|
||||
outgoingVideoView.view.backgroundColor = .black
|
||||
outgoingVideoView.view.clipsToBounds = true
|
||||
|
||||
var updateLayoutImpl: ((ContainerViewLayout, CGFloat) -> Void)?
|
||||
|
||||
let outgoingVideoNode = CallVideoNode(videoView: outgoingVideoView, disabledText: nil, assumeReadyAfterTimeout: true, isReadyUpdated: { [weak self] in
|
||||
guard let self, let (layout, navigationBarHeight) = self.validLayout else {
|
||||
return
|
||||
}
|
||||
updateLayoutImpl?(layout, navigationBarHeight)
|
||||
}, orientationUpdated: { [weak self] in
|
||||
guard let self, let (layout, navigationBarHeight) = self.validLayout else {
|
||||
return
|
||||
}
|
||||
updateLayoutImpl?(layout, navigationBarHeight)
|
||||
}, isFlippedUpdated: { [weak self] _ in
|
||||
guard let self, let (layout, navigationBarHeight) = self.validLayout else {
|
||||
return
|
||||
}
|
||||
updateLayoutImpl?(layout, navigationBarHeight)
|
||||
})
|
||||
|
||||
let controller = VoiceChatCameraPreviewController(sharedContext: self.sharedContext, cameraNode: outgoingVideoNode, shareCamera: { _, _ in
|
||||
proceed()
|
||||
}, switchCamera: { [weak self] in
|
||||
Queue.mainQueue().after(0.1) {
|
||||
self?.call.switchVideoCamera()
|
||||
}
|
||||
})
|
||||
self.present?(controller)
|
||||
|
||||
updateLayoutImpl = { [weak controller] layout, navigationBarHeight in
|
||||
controller?.containerLayoutUpdated(layout, transition: .immediate)
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
private func resolvedEmojiKey(data: Data) -> [String] {
|
||||
if let emojiKey = self.emojiKey, emojiKey.data == data {
|
||||
return emojiKey.resolvedKey
|
||||
@ -186,6 +283,8 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
||||
}
|
||||
|
||||
func updateCallState(_ callState: PresentationCallState) {
|
||||
self.callState = callState
|
||||
|
||||
let mappedLifecycleState: PrivateCallScreen.State.LifecycleState
|
||||
switch callState.state {
|
||||
case .waiting:
|
||||
@ -236,9 +335,23 @@ final class CallControllerNodeV2: ViewControllerTracingNode, CallControllerNodeP
|
||||
self.remoteVideo = nil
|
||||
}
|
||||
|
||||
switch callState.videoState {
|
||||
case .active(let isScreencast), .paused(let isScreencast):
|
||||
if isScreencast {
|
||||
self.localVideo = nil
|
||||
} else {
|
||||
if self.localVideo == nil, let call = self.call as? PresentationCallImpl, let videoStreamSignal = call.video(isIncoming: false) {
|
||||
self.localVideo = AdaptedCallVideoSource(videoStreamSignal: videoStreamSignal)
|
||||
}
|
||||
}
|
||||
case .inactive, .notAvailable:
|
||||
self.localVideo = nil
|
||||
}
|
||||
|
||||
if var callScreenState = self.callScreenState {
|
||||
callScreenState.lifecycleState = mappedLifecycleState
|
||||
callScreenState.remoteVideo = self.remoteVideo
|
||||
callScreenState.localVideo = self.localVideo
|
||||
|
||||
if self.callScreenState != callScreenState {
|
||||
self.callScreenState = callScreenState
|
||||
|
@ -896,7 +896,13 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
}
|
||||
|
||||
func video(isIncoming: Bool) -> Signal<OngoingGroupCallContext.VideoFrameData, NoError>? {
|
||||
return self.ongoingContext?.video(isIncoming: isIncoming)
|
||||
if isIncoming {
|
||||
return self.ongoingContext?.video(isIncoming: isIncoming)
|
||||
} else if let videoCapturer = self.videoCapturer {
|
||||
return videoCapturer.video()
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public func makeIncomingVideoView(completion: @escaping (PresentationCallVideoView?) -> Void) {
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit fe91ca12ae602fb4685a87ac0955fbb37589e3cb
|
||||
Subproject commit 8f41ea265404dea86f2444a47343993ccdc3a64e
|
Loading…
x
Reference in New Issue
Block a user