Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios

This commit is contained in:
Ilya Laktyushin 2020-08-07 21:09:36 +03:00
commit 575d98dd1f
21 changed files with 3845 additions and 3778 deletions

View File

@ -4,7 +4,7 @@
"NSLocationWhenInUseUsageDescription" = "When you send your location to your friends, Telegram needs access to show them a map.";
"NSLocationAlwaysAndWhenInUseUsageDescription" = "When you choose to share your Live Location with friends in a chat, Telegram needs background access to your location to keep them updated for the duration of the live sharing.";
"NSLocationAlwaysUsageDescription" = "When you choose to share your live location with friends in a chat, Telegram needs background access to your location to keep them updated for the duration of the live sharing. You also need this to send locations from an Apple Watch.";
"NSCameraUsageDescription" = "We need this so that you can take and share photos and videos.";
"NSCameraUsageDescription" = "We need this so that you can take and share photos and videos, as well as make video calls.";
"NSPhotoLibraryUsageDescription" = "We need this so that you can share photos and videos from your photo library.";
"NSPhotoLibraryAddUsageDescription" = "We need this so that you can save photos and videos to your photo library.";
"NSMicrophoneUsageDescription" = "We need this so that you can record and share voice messages and videos with sound.";

View File

@ -215,7 +215,9 @@
"PUSH_AUTH_REGION" = "New login|from unrecognized device %1$@, location: %2$@";
"PUSH_PHONE_CALL_REQUEST" = "%1$@|is calling you!";
"PUSH_VIDEO_CALL_REQUEST" = "%1$@|is calling you!";
"PUSH_PHONE_CALL_MISSED" = "%1$@|You missed a call";
"PUSH_VIDEO_CALL_MISSED" = "%1$@|You missed a video call";
"PUSH_MESSAGE_GAME_SCORE" = "%1$@ scored %3$@ in game %2$@";
"PUSH_MESSAGE_VIDEOS" = "%1$@ sent you %2$@ videos";
@ -2472,6 +2474,7 @@ Unused sets are archived when you add more.";
"Call.CallInProgressTitle" = "Call in Progress";
"Call.CallInProgressMessage" = "Finish call with %1$@ and start a new one with %2$@?";
"Call.ExternalCallInProgressMessage" = "Please finish the current call first.";
"Call.Message" = "Message";
@ -3025,7 +3028,7 @@ Unused sets are archived when you add more.";
"InfoPlist.NSContactsUsageDescription" = "Telegram will continuously upload your contacts to its heavily encrypted cloud servers to let you connect with your friends across all your devices.";
"InfoPlist.NSLocationWhenInUseUsageDescription" = "When you send your location to your friends, Telegram needs access to show them a map.";
"InfoPlist.NSCameraUsageDescription" = "We need this so that you can take and share photos and videos.";
"InfoPlist.NSCameraUsageDescription" = "We need this so that you can take and share photos and videos, as well as make video calls.";
"InfoPlist.NSPhotoLibraryUsageDescription" = "We need this so that you can share photos and videos from your photo library.";
"InfoPlist.NSPhotoLibraryAddUsageDescription" = "We need this so that you can save photos and videos to your photo library.";
"InfoPlist.NSMicrophoneUsageDescription" = "We need this so that you can record and share voice messages and videos with sound.";

View File

@ -8,7 +8,7 @@ import TelegramAudio
public enum RequestCallResult {
case requested
case alreadyInProgress(PeerId)
case alreadyInProgress(PeerId?)
}
public struct CallAuxiliaryServer {
@ -46,15 +46,20 @@ public struct PresentationCallState: Equatable {
public enum VideoState: Equatable {
case notAvailable
case possible
case outgoingRequested
case incomingRequested(sendsVideo: Bool)
case inactive
case active
case paused
}
public enum RemoteVideoState: Equatable {
case inactive
case active
case paused
}
public enum RemoteAudioState: Equatable {
case active
case muted
}
public enum RemoteBatteryLevel: Equatable {
@ -65,12 +70,14 @@ public struct PresentationCallState: Equatable {
public var state: State
public var videoState: VideoState
public var remoteVideoState: RemoteVideoState
public var remoteAudioState: RemoteAudioState
public var remoteBatteryLevel: RemoteBatteryLevel
public init(state: State, videoState: VideoState, remoteVideoState: RemoteVideoState, remoteBatteryLevel: RemoteBatteryLevel) {
public init(state: State, videoState: VideoState, remoteVideoState: RemoteVideoState, remoteAudioState: RemoteAudioState, remoteBatteryLevel: RemoteBatteryLevel) {
self.state = state
self.videoState = videoState
self.remoteVideoState = remoteVideoState
self.remoteAudioState = remoteAudioState
self.remoteBatteryLevel = remoteBatteryLevel
}
}
@ -130,7 +137,7 @@ public protocol PresentationCall: class {
func toggleIsMuted()
func setIsMuted(_ value: Bool)
func requestVideo()
func acceptVideo()
func disableVideo()
func setOutgoingVideoIsPaused(_ isPaused: Bool)
func switchVideoCamera()
func setCurrentAudioOutput(_ output: AudioSessionOutput)

View File

@ -285,17 +285,22 @@ public final class CallListController: ViewController {
} else {
let presentationData = strongSelf.presentationData
let _ = (strongSelf.context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peerId), transaction.getPeer(currentPeerId))
} |> deliverOnMainQueue).start(next: { [weak self] peer, current in
if let strongSelf = self, let peer = peer, let current = current {
return (transaction.getPeer(peerId), currentPeerId.flatMap(transaction.getPeer))
} |> deliverOnMainQueue).start(next: { [weak self] peer, current in
if let strongSelf = self, let peer = peer {
if let current = current {
strongSelf.present(textAlertController(context: strongSelf.context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_CallInProgressMessage(current.compactDisplayTitle, peer.compactDisplayTitle).0, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
if let strongSelf = self {
let _ = strongSelf.context.sharedContext.callManager?.requestCall(context: strongSelf.context, peerId: peerId, isVideo: isVideo, endCurrentIfAny: true)
began?()
}
})]), in: .window(.root))
} else {
strongSelf.present(textAlertController(context: strongSelf.context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_ExternalCallInProgressMessage, actions: [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
})]), in: .window(.root))
}
})
}
})
}
} else {
began?()

View File

@ -130,6 +130,21 @@ func contactContextMenuItems(context: AccountContext, peerId: PeerId, contactsCo
} else {
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let _ = (context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peerId), currentPeerId.flatMap(transaction.getPeer))
} |> deliverOnMainQueue).start(next: { [weak contactsController] peer, current in
if let contactsController = contactsController, let peer = peer {
if let current = current {
contactsController.present(textAlertController(context: context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_CallInProgressMessage(current.compactDisplayTitle, peer.compactDisplayTitle).0, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
let _ = context.sharedContext.callManager?.requestCall(context: context, peerId: peerId, isVideo: false, endCurrentIfAny: true)
})]), in: .window(.root))
} else {
contactsController.present(textAlertController(context: context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_ExternalCallInProgressMessage, actions: [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
})]), in: .window(.root))
}
}
})
/*let _ = (context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peerId), transaction.getPeer(currentPeerId))
}
|> deliverOnMainQueue).start(next: { [weak contactsController] peer, current in
@ -138,7 +153,7 @@ func contactContextMenuItems(context: AccountContext, peerId: PeerId, contactsCo
let _ = context.sharedContext.callManager?.requestCall(context: context, peerId: peerId, isVideo: false, endCurrentIfAny: true)
})]), in: .window(.root))
}
})
})*/
}
}
}
@ -155,6 +170,21 @@ func contactContextMenuItems(context: AccountContext, peerId: PeerId, contactsCo
} else {
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let _ = (context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peerId), currentPeerId.flatMap(transaction.getPeer))
} |> deliverOnMainQueue).start(next: { [weak contactsController] peer, current in
if let contactsController = contactsController, let peer = peer {
if let current = current {
contactsController.present(textAlertController(context: context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_CallInProgressMessage(current.compactDisplayTitle, peer.compactDisplayTitle).0, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
let _ = context.sharedContext.callManager?.requestCall(context: context, peerId: peerId, isVideo: true, endCurrentIfAny: true)
})]), in: .window(.root))
} else {
contactsController.present(textAlertController(context: context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_ExternalCallInProgressMessage, actions: [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
})]), in: .window(.root))
}
}
})
/*let _ = (context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peerId), transaction.getPeer(currentPeerId))
}
|> deliverOnMainQueue).start(next: { [weak contactsController] peer, current in
@ -163,7 +193,7 @@ func contactContextMenuItems(context: AccountContext, peerId: PeerId, contactsCo
let _ = context.sharedContext.callManager?.requestCall(context: context, peerId: peerId, isVideo: true, endCurrentIfAny: true)
})]), in: .window(.root))
}
})
})*/
}
}
}

View File

@ -884,14 +884,19 @@ public func deviceContactInfoController(context: AccountContext, subject: Device
} else {
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let _ = (context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(user.id), transaction.getPeer(currentPeerId))
} |> deliverOnMainQueue).start(next: { peer, current in
if let peer = peer, let current = current {
return (transaction.getPeer(user.id), currentPeerId.flatMap(transaction.getPeer))
} |> deliverOnMainQueue).start(next: { peer, current in
if let peer = peer {
if let current = current {
presentControllerImpl?(textAlertController(context: context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_CallInProgressMessage(current.compactDisplayTitle, peer.compactDisplayTitle).0, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
let _ = context.sharedContext.callManager?.requestCall(context: context, peerId: peer.id, isVideo: false, endCurrentIfAny: true)
let _ = context.sharedContext.callManager?.requestCall(context: context, peerId: user.id, isVideo: false, endCurrentIfAny: true)
})]), nil)
} else {
presentControllerImpl?(textAlertController(context: context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_ExternalCallInProgressMessage, actions: [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
})]), nil)
}
})
}
})
}
}
}),

View File

@ -880,7 +880,7 @@ public func userInfoController(context: AccountContext, peerId: PeerId, mode: Pe
} else {
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let _ = (context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peer.id), transaction.getPeer(currentPeerId))
return (transaction.getPeer(peer.id), currentPeerId.flatMap(transaction.getPeer))
} |> deliverOnMainQueue).start(next: { peer, current in
if let peer = peer, let current = current {
presentControllerImpl?(textAlertController(context: context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_CallInProgressMessage(current.compactDisplayTitle, peer.compactDisplayTitle).0, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {

View File

@ -22,7 +22,6 @@ protocol CallControllerNodeProtocol: class {
var beginAudioOuputSelection: ((Bool) -> Void)? { get set }
var acceptCall: (() -> Void)? { get set }
var endCall: (() -> Void)? { get set }
var setIsVideoPaused: ((Bool) -> Void)? { get set }
var back: (() -> Void)? { get set }
var presentCallRating: ((CallId) -> Void)? { get set }
var present: ((ViewController) -> Void)? { get set }
@ -222,10 +221,6 @@ public final class CallController: ViewController {
let _ = self?.call.hangUp()
}
self.controllerNode.setIsVideoPaused = { [weak self] isPaused in
self?.call.setOutgoingVideoIsPaused(isPaused)
}
self.controllerNode.back = { [weak self] in
let _ = self?.dismiss()
}

View File

@ -21,12 +21,12 @@ enum CallControllerButtonsSpeakerMode: Equatable {
}
enum CallControllerButtonsMode: Equatable {
enum VideoState: Equatable {
case notAvailable
case possible(isEnabled: Bool, isInitializing: Bool)
case outgoingRequested(isInitializing: Bool)
case incomingRequested(sendsVideo: Bool)
case active
struct VideoState: Equatable {
var isAvailable: Bool
var isCameraActive: Bool
var canChangeStatus: Bool
var hasVideo: Bool
var isInitializingCamera: Bool
}
case active(speakerMode: CallControllerButtonsSpeakerMode, hasAudioRouteMenu: Bool, videoState: VideoState)
@ -96,7 +96,6 @@ final class CallControllerButtonsNode: ASDisplayNode {
private var validLayout: (CGFloat, CGFloat)?
var isMuted = false
var isCameraPaused = false
var acceptOrEnd: (() -> Void)?
var decline: (() -> Void)?
@ -189,16 +188,8 @@ final class CallControllerButtonsNode: ASDisplayNode {
case .outgoingRinging:
mappedState = .outgoingRinging
case let .active(_, _, videoStateValue):
switch videoStateValue {
case let .incomingRequested(sendsVideo):
mappedState = .active
videoState = .incomingRequested(sendsVideo: sendsVideo)
case let .outgoingRequested(isInitializing):
mappedState = .active
videoState = .outgoingRequested(isInitializing: isInitializing)
case .active, .possible, .notAvailable:
mappedState = .active
}
mappedState = .active
videoState = videoStateValue
}
var buttons: [PlacedButton] = []
@ -226,22 +217,21 @@ final class CallControllerButtonsNode: ASDisplayNode {
}
}
switch videoState {
case .active, .possible, .incomingRequested, .outgoingRequested:
if videoState.isAvailable {
let isCameraActive: Bool
let isCameraEnabled: Bool
let isCameraInitializing: Bool
if case let .possible(value, isInitializing) = videoState {
isCameraActive = false
isCameraEnabled = value
isCameraInitializing = isInitializing
if videoState.hasVideo {
isCameraActive = videoState.isCameraActive
isCameraEnabled = videoState.canChangeStatus
isCameraInitializing = videoState.isInitializingCamera
} else {
isCameraActive = !self.isCameraPaused
isCameraEnabled = true
isCameraInitializing = false
isCameraActive = false
isCameraEnabled = videoState.canChangeStatus
isCameraInitializing = videoState.isInitializingCamera
}
topButtons.append(.enableCamera(isCameraActive, false, isCameraInitializing))
if case .possible = videoState {
if !videoState.hasVideo {
topButtons.append(.mute(self.isMuted))
topButtons.append(.soundOutput(soundOutput))
} else {
@ -252,7 +242,7 @@ final class CallControllerButtonsNode: ASDisplayNode {
}
topButtons.append(.switchCamera(isCameraActive && !isCameraInitializing))
}
case .notAvailable:
} else {
topButtons.append(.mute(self.isMuted))
topButtons.append(.soundOutput(soundOutput))
}
@ -286,23 +276,18 @@ final class CallControllerButtonsNode: ASDisplayNode {
height = largeButtonSize + topBottomSpacing + largeButtonSize + max(bottomInset + 32.0, 46.0)
case .active:
switch videoState {
case .active, .incomingRequested, .outgoingRequested:
if videoState.hasVideo {
let isCameraActive: Bool
let isCameraEnabled: Bool
var isCameraInitializing: Bool
if case .incomingRequested = videoState {
isCameraActive = false
isCameraEnabled = true
isCameraInitializing = false
} else if case let .possible(value, isInitializing) = videoState {
isCameraActive = false
isCameraEnabled = value
isCameraInitializing = isInitializing
let isCameraInitializing: Bool
if videoState.hasVideo {
isCameraActive = videoState.isCameraActive
isCameraEnabled = videoState.canChangeStatus
isCameraInitializing = videoState.isInitializingCamera
} else {
isCameraActive = !self.isCameraPaused
isCameraEnabled = true
isCameraInitializing = false
isCameraActive = false
isCameraEnabled = videoState.canChangeStatus
isCameraInitializing = videoState.isInitializingCamera
}
var topButtons: [ButtonDescription] = []
@ -326,10 +311,6 @@ final class CallControllerButtonsNode: ASDisplayNode {
}
}
if case let .outgoingRequested(isInitializing) = videoState {
isCameraInitializing = isInitializing
}
topButtons.append(.enableCamera(isCameraActive, isCameraEnabled, isCameraInitializing))
if hasAudioRouteMenu {
topButtons.append(.soundOutput(soundOutput))
@ -350,21 +331,21 @@ final class CallControllerButtonsNode: ASDisplayNode {
}
height = smallButtonSize + max(bottomInset + 19.0, 46.0)
case .notAvailable, .possible:
} else {
var topButtons: [ButtonDescription] = []
var bottomButtons: [ButtonDescription] = []
let isCameraActive: Bool
let isCameraEnabled: Bool
var isCameraInitializing: Bool
if case let .possible(value, isInitializing) = videoState {
isCameraActive = false
isCameraEnabled = value
isCameraInitializing = isInitializing
let isCameraInitializing: Bool
if videoState.hasVideo {
isCameraActive = videoState.isCameraActive
isCameraEnabled = videoState.canChangeStatus
isCameraInitializing = videoState.isInitializingCamera
} else {
isCameraActive = false
isCameraEnabled = true
isCameraInitializing = false
isCameraEnabled = videoState.canChangeStatus
isCameraInitializing = videoState.isInitializingCamera
}
let soundOutput: ButtonDescription.SoundOutput

View File

@ -302,12 +302,16 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
private let imageNode: TransformImageNode
private let dimNode: ASImageNode
private var candidateIncomingVideoNodeValue: CallVideoNode?
private var incomingVideoNodeValue: CallVideoNode?
private var incomingVideoViewRequested: Bool = false
private var candidateOutgoingVideoNodeValue: CallVideoNode?
private var outgoingVideoNodeValue: CallVideoNode?
private var outgoingVideoViewRequested: Bool = false
private var removedMinimizedVideoNodeValue: CallVideoNode?
private var removedExpandedVideoNodeValue: CallVideoNode?
private var isRequestingVideo: Bool = false
private var animateRequestedVideoOnce: Bool = false
@ -354,7 +358,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
var beginAudioOuputSelection: ((Bool) -> Void)?
var acceptCall: (() -> Void)?
var endCall: (() -> Void)?
var setIsVideoPaused: ((Bool) -> Void)?
var back: (() -> Void)?
var presentCallRating: ((CallId) -> Void)?
var callEnded: ((Bool) -> Void)?
@ -447,6 +450,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.containerNode.addSubnode(self.statusNode)
self.containerNode.addSubnode(self.videoPausedNode)
self.containerNode.addSubnode(self.buttonsNode)
self.containerNode.addSubnode(self.toastNode)
self.containerNode.addSubnode(self.keyButtonNode)
self.containerNode.addSubnode(self.backButtonArrowNode)
self.containerNode.addSubnode(self.backButtonNode)
@ -465,12 +469,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
switch callState.state {
case .active, .connecting, .reconnecting:
switch callState.videoState {
case .incomingRequested:
strongSelf.call.acceptVideo()
default:
strongSelf.endCall?()
}
strongSelf.endCall?()
case .requesting:
strongSelf.endCall?()
case .ringing:
@ -493,32 +492,20 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
if strongSelf.outgoingVideoNodeValue == nil {
let proceed = {
switch callState.videoState {
case .possible:
case .inactive:
strongSelf.isRequestingVideo = true
strongSelf.updateButtonsMode()
default:
break
}
switch callState.videoState {
case .incomingRequested:
strongSelf.call.acceptVideo()
default:
strongSelf.call.requestVideo()
}
strongSelf.call.requestVideo()
}
strongSelf.present?(textAlertController(sharedContext: strongSelf.sharedContext, title: nil, text: strongSelf.presentationData.strings.Call_CameraConfirmationText, actions: [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Call_CameraConfirmationConfirm, action: {
proceed()
})]))
} else {
strongSelf.isVideoPaused = !strongSelf.isVideoPaused
strongSelf.outgoingVideoNodeValue?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused)
strongSelf.buttonsNode.isCameraPaused = strongSelf.isVideoPaused
strongSelf.setIsVideoPaused?(strongSelf.isVideoPaused)
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
strongSelf.call.disableVideo()
}
default:
break
@ -526,9 +513,10 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
self.buttonsNode.rotateCamera = { [weak self] in
guard let strongSelf = self else {
guard let strongSelf = self, !strongSelf.areUserActionsDisabledNow() else {
return
}
strongSelf.disableActionsUntilTimestamp = CACurrentMediaTime() + 1.0
if let outgoingVideoNode = strongSelf.outgoingVideoNodeValue, let (layout, _) = strongSelf.validLayout {
outgoingVideoNode.flip(withBackground: outgoingVideoNode.frame.width == layout.size.width)
}
@ -543,6 +531,14 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.keyButtonNode.addTarget(self, action: #selector(self.keyPressed), forControlEvents: .touchUpInside)
self.backButtonNode.addTarget(self, action: #selector(self.backPressed), forControlEvents: .touchUpInside)
if !shouldStayHiddenUntilConnection && call.isVideo && call.isOutgoing {
self.containerNode.alpha = 0.0
Queue.mainQueue().after(1.0, { [weak self] in
self?.containerNode.alpha = 1.0
self?.animateIn()
})
}
}
func displayCameraTooltip() {
@ -616,7 +612,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
private func setupAudioOutputs() {
if self.outgoingVideoNodeValue != nil || self.candidateOutgoingVideoNodeValue != nil {
if self.outgoingVideoNodeValue != nil || self.incomingVideoNodeValue != nil || self.candidateOutgoingVideoNodeValue != nil || self.candidateIncomingVideoNodeValue != nil {
if let audioOutputState = self.audioOutputState, let currentOutput = audioOutputState.currentOutput {
switch currentOutput {
case .headphones:
@ -636,21 +632,39 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
let statusValue: CallControllerStatusValue
var statusReception: Int32?
switch callState.videoState {
case .active, .incomingRequested(true):
switch callState.remoteVideoState {
case .active, .paused:
if !self.incomingVideoViewRequested {
self.incomingVideoViewRequested = true
let delayUntilInitialized = true
self.call.makeIncomingVideoView(completion: { [weak self] incomingVideoView in
guard let strongSelf = self else {
return
}
if let incomingVideoView = incomingVideoView {
let incomingVideoNode = CallVideoNode(videoView: incomingVideoView, assumeReadyAfterTimeout: false, isReadyUpdated: {
guard let strongSelf = self else {
incomingVideoView.view.backgroundColor = .black
incomingVideoView.view.clipsToBounds = true
let applyNode: () -> Void = {
guard let strongSelf = self, let incomingVideoNode = strongSelf.candidateIncomingVideoNodeValue else {
return
}
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring))
strongSelf.candidateIncomingVideoNodeValue = nil
strongSelf.incomingVideoNodeValue = incomingVideoNode
if let expandedVideoNode = strongSelf.expandedVideoNode {
strongSelf.minimizedVideoNode = expandedVideoNode
}
strongSelf.expandedVideoNode = incomingVideoNode
strongSelf.containerNode.insertSubnode(incomingVideoNode, belowSubnode: strongSelf.dimNode)
strongSelf.updateButtonsMode(transition: .animated(duration: 0.4, curve: .spring))
}
let incomingVideoNode = CallVideoNode(videoView: incomingVideoView, assumeReadyAfterTimeout: false, isReadyUpdated: {
if delayUntilInitialized {
Queue.mainQueue().after(0.1, {
applyNode()
})
}
}, orientationUpdated: {
guard let strongSelf = self else {
@ -661,21 +675,38 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
}, isFlippedUpdated: { _ in
})
strongSelf.incomingVideoNodeValue = incomingVideoNode
strongSelf.expandedVideoNode = incomingVideoNode
strongSelf.containerNode.insertSubnode(incomingVideoNode, belowSubnode: strongSelf.dimNode)
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring))
strongSelf.candidateIncomingVideoNodeValue = incomingVideoNode
strongSelf.setupAudioOutputs()
if !delayUntilInitialized {
applyNode()
}
}
})
}
default:
break
case .inactive:
self.candidateIncomingVideoNodeValue = nil
if let incomingVideoNodeValue = self.incomingVideoNodeValue {
if self.minimizedVideoNode == incomingVideoNodeValue {
self.minimizedVideoNode = nil
self.removedMinimizedVideoNodeValue = incomingVideoNodeValue
}
if self.expandedVideoNode == incomingVideoNodeValue {
self.expandedVideoNode = nil
self.removedExpandedVideoNodeValue = incomingVideoNodeValue
if let minimizedVideoNode = self.minimizedVideoNode {
self.expandedVideoNode = minimizedVideoNode
self.minimizedVideoNode = nil
}
}
self.incomingVideoNodeValue = nil
self.incomingVideoViewRequested = false
}
}
switch callState.videoState {
case .active, .outgoingRequested, .incomingRequested(false):
case .active, .paused:
if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true
let delayUntilInitialized = self.isRequestingVideo
@ -700,10 +731,11 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
strongSelf.outgoingVideoNodeValue = outgoingVideoNode
strongSelf.minimizedVideoNode = outgoingVideoNode
if let expandedVideoNode = strongSelf.expandedVideoNode {
strongSelf.minimizedVideoNode = outgoingVideoNode
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: expandedVideoNode)
} else {
strongSelf.expandedVideoNode = outgoingVideoNode
strongSelf.containerNode.insertSubnode(outgoingVideoNode, belowSubnode: strongSelf.dimNode)
}
strongSelf.updateButtonsMode(transition: .animated(duration: 0.4, curve: .spring))
@ -750,12 +782,28 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
if !delayUntilInitialized {
applyNode()
}
strongSelf.setupAudioOutputs()
}
})
}
default:
break
case .notAvailable, .inactive:
self.candidateOutgoingVideoNodeValue = nil
if let outgoingVideoNodeValue = self.outgoingVideoNodeValue {
if self.minimizedVideoNode == outgoingVideoNodeValue {
self.minimizedVideoNode = nil
self.removedMinimizedVideoNodeValue = outgoingVideoNodeValue
}
if self.expandedVideoNode == self.outgoingVideoNodeValue {
self.expandedVideoNode = nil
self.removedExpandedVideoNodeValue = outgoingVideoNodeValue
if let minimizedVideoNode = self.minimizedVideoNode {
self.expandedVideoNode = minimizedVideoNode
self.minimizedVideoNode = nil
}
}
self.outgoingVideoNodeValue = nil
self.outgoingVideoViewRequested = false
}
}
if let incomingVideoNode = self.incomingVideoNodeValue {
@ -765,7 +813,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
default:
let isActive: Bool
switch callState.remoteVideoState {
case .inactive:
case .inactive, .paused:
isActive = false
case .active:
isActive = true
@ -842,25 +890,16 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
}
switch callState.videoState {
case .notAvailable, .active, .possible, .outgoingRequested:
statusValue = .timer({ value in
if isReconnecting {
return strings.Call_StatusConnecting
} else {
return value
}
}, timestamp)
statusReception = reception
case .incomingRequested:
var text: String
text = self.presentationData.strings.Call_IncomingVideoCall
if !self.statusNode.subtitle.isEmpty {
text += "\n\(self.statusNode.subtitle)"
statusValue = .timer({ value in
if isReconnecting {
return strings.Call_StatusConnecting
} else {
return value
}
statusValue = .text(string: text, displayLogo: false)
/*case .outgoingRequested:
statusValue = .text(string: self.presentationData.strings.Call_StatusRequesting, displayLogo: false)*/
}, timestamp)
if case .active = callState.state {
statusReception = reception
}
}
if self.shouldStayHiddenUntilConnection {
@ -900,7 +939,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
if self.incomingVideoViewRequested && !self.outgoingVideoViewRequested && !self.displayedCameraTooltip {
self.displayedCameraTooltip = true
Queue.mainQueue().after(1.0) {
Queue.mainQueue().after(2.0) {
self.displayCameraTooltip()
}
}
@ -937,6 +976,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.dimNode.backgroundColor = color
self.dimNode.image = image
}
self.statusNode.isHidden = !visible
}
}
@ -950,7 +990,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
var mode: CallControllerButtonsSpeakerMode = .none
var hasAudioRouteMenu: Bool = false
if let (availableOutputs, maybeCurrentOutput) = self.audioOutputState, let currentOutput = maybeCurrentOutput {
hasAudioRouteMenu = availableOutputs.count >= 2
hasAudioRouteMenu = availableOutputs.count > 2
switch currentOutput {
case .builtin:
mode = .builtin
@ -972,29 +1012,16 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
mode = .none
}
}
let mappedVideoState: CallControllerButtonsMode.VideoState
var mappedVideoState = CallControllerButtonsMode.VideoState(isAvailable: false, isCameraActive: self.outgoingVideoNodeValue != nil, canChangeStatus: false, hasVideo: self.outgoingVideoNodeValue != nil || self.incomingVideoNodeValue != nil, isInitializingCamera: self.isRequestingVideo)
switch callState.videoState {
case .notAvailable:
mappedVideoState = .notAvailable
case .possible:
var isEnabled = false
switch callState.state {
case .active:
isEnabled = true
default:
break
}
mappedVideoState = .possible(isEnabled: isEnabled, isInitializing: false)
case .outgoingRequested:
if self.outgoingVideoNodeValue != nil {
mappedVideoState = .outgoingRequested(isInitializing: self.isRequestingVideo)
} else {
mappedVideoState = .possible(isEnabled: true, isInitializing: self.isRequestingVideo)
}
case let .incomingRequested(sendsVideo):
mappedVideoState = .incomingRequested(sendsVideo: sendsVideo)
case .active:
mappedVideoState = .active
break
case .inactive:
mappedVideoState.isAvailable = true
mappedVideoState.canChangeStatus = true
case .active, .paused:
mappedVideoState.isAvailable = true
mappedVideoState.canChangeStatus = true
}
switch callState.state {
@ -1021,17 +1048,19 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
func animateIn() {
var bounds = self.bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.removeAnimation(forKey: "bounds")
self.statusBar.layer.removeAnimation(forKey: "opacity")
self.containerNode.layer.removeAnimation(forKey: "opacity")
self.containerNode.layer.removeAnimation(forKey: "scale")
self.statusBar.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
if !self.shouldStayHiddenUntilConnection {
self.containerNode.layer.animateScale(from: 1.04, to: 1.0, duration: 0.3)
self.containerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
if !self.containerNode.alpha.isZero {
var bounds = self.bounds
bounds.origin = CGPoint()
self.bounds = bounds
self.layer.removeAnimation(forKey: "bounds")
self.statusBar.layer.removeAnimation(forKey: "opacity")
self.containerNode.layer.removeAnimation(forKey: "opacity")
self.containerNode.layer.removeAnimation(forKey: "scale")
self.statusBar.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
if !self.shouldStayHiddenUntilConnection {
self.containerNode.layer.animateScale(from: 1.04, to: 1.0, duration: 0.3)
self.containerNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
}
}
@ -1063,6 +1092,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
uiDisplayTransition *= 1.0 - self.pictureInPictureTransitionFraction
let buttonsHeight: CGFloat = self.buttonsNode.bounds.height
let toastHeight: CGFloat = self.toastNode.bounds.height
var fullInsets = layout.insets(options: .statusBar)
@ -1072,7 +1102,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
cleanInsets.right = 20.0
fullInsets.top += 44.0 + 8.0
fullInsets.bottom = buttonsHeight + 27.0
fullInsets.bottom = buttonsHeight + toastHeight + 27.0
fullInsets.left = 20.0
fullInsets.right = 20.0
@ -1225,7 +1255,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0))
transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize))
transition.updateFrame(node: self.toastNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight)))
transition.updateFrame(node: self.toastNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY - toastHeight), size: CGSize(width: layout.size.width, height: toastHeight)))
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight)))
transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha)
@ -1233,15 +1263,66 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
let previewVideoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationBarHeight)
if let removedMinimizedVideoNodeValue = self.removedMinimizedVideoNodeValue {
self.removedMinimizedVideoNodeValue = nil
if transition.isAnimated {
removedMinimizedVideoNodeValue.layer.animateScale(from: 1.0, to: 0.1, duration: 0.3, removeOnCompletion: false)
removedMinimizedVideoNodeValue.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak removedMinimizedVideoNodeValue] _ in
removedMinimizedVideoNodeValue?.removeFromSupernode()
})
} else {
removedMinimizedVideoNodeValue.removeFromSupernode()
}
}
if let expandedVideoNode = self.expandedVideoNode {
var expandedVideoTransition = transition
if expandedVideoNode.frame.isEmpty || self.disableAnimationForExpandedVideoOnce {
expandedVideoTransition = .immediate
self.disableAnimationForExpandedVideoOnce = false
}
expandedVideoTransition.updateFrame(node: expandedVideoNode, frame: fullscreenVideoFrame)
if let removedExpandedVideoNodeValue = self.removedExpandedVideoNodeValue {
self.removedExpandedVideoNodeValue = nil
expandedVideoTransition.updateFrame(node: expandedVideoNode, frame: fullscreenVideoFrame, completion: { [weak removedExpandedVideoNodeValue] _ in
removedExpandedVideoNodeValue?.removeFromSupernode()
})
} else {
expandedVideoTransition.updateFrame(node: expandedVideoNode, frame: fullscreenVideoFrame)
}
expandedVideoNode.updateLayout(size: expandedVideoNode.frame.size, cornerRadius: 0.0, transition: expandedVideoTransition)
if self.animateRequestedVideoOnce {
self.animateRequestedVideoOnce = false
if expandedVideoNode === self.outgoingVideoNodeValue {
let videoButtonFrame = self.buttonsNode.videoButtonFrame().flatMap { frame -> CGRect in
return self.buttonsNode.view.convert(frame, to: self.view)
}
if let previousVideoButtonFrame = previousVideoButtonFrame, let videoButtonFrame = videoButtonFrame {
expandedVideoNode.animateRadialMask(from: previousVideoButtonFrame, to: videoButtonFrame)
}
}
}
} else {
if let removedExpandedVideoNodeValue = self.removedExpandedVideoNodeValue {
self.removedExpandedVideoNodeValue = nil
if transition.isAnimated {
removedExpandedVideoNodeValue.layer.animateScale(from: 1.0, to: 0.1, duration: 0.3, removeOnCompletion: false)
removedExpandedVideoNodeValue.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false, completion: { [weak removedExpandedVideoNodeValue] _ in
removedExpandedVideoNodeValue?.removeFromSupernode()
})
} else {
removedExpandedVideoNodeValue.removeFromSupernode()
}
}
}
if let minimizedVideoNode = self.minimizedVideoNode {
var minimizedVideoTransition = transition
var didAppear = false
@ -1249,38 +1330,24 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
minimizedVideoTransition = .immediate
didAppear = true
}
if let expandedVideoNode = self.expandedVideoNode, expandedVideoNode.isReady {
if self.minimizedVideoDraggingPosition == nil {
if let animationForExpandedVideoSnapshotView = self.animationForExpandedVideoSnapshotView {
self.containerNode.view.addSubview(animationForExpandedVideoSnapshotView)
transition.updateAlpha(layer: animationForExpandedVideoSnapshotView.layer, alpha: 0.0, completion: { [weak animationForExpandedVideoSnapshotView] _ in
animationForExpandedVideoSnapshotView?.removeFromSuperview()
})
transition.updateTransformScale(layer: animationForExpandedVideoSnapshotView.layer, scale: previewVideoFrame.width / fullscreenVideoFrame.width)
transition.updatePosition(layer: animationForExpandedVideoSnapshotView.layer, position: CGPoint(x: previewVideoFrame.minX + previewVideoFrame.center.x / fullscreenVideoFrame.width * previewVideoFrame.width, y: previewVideoFrame.minY + previewVideoFrame.center.y / fullscreenVideoFrame.height * previewVideoFrame.height))
self.animationForExpandedVideoSnapshotView = nil
}
minimizedVideoTransition.updateFrame(node: minimizedVideoNode, frame: previewVideoFrame)
minimizedVideoNode.updateLayout(size: minimizedVideoNode.frame.size, cornerRadius: interpolate(from: 14.0, to: 24.0, value: self.pictureInPictureTransitionFraction), transition: minimizedVideoTransition)
if transition.isAnimated && didAppear {
minimizedVideoNode.layer.animateSpring(from: 0.1 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.5)
}
}
} else {
minimizedVideoNode.frame = fullscreenVideoFrame
minimizedVideoNode.updateLayout(size: layout.size, cornerRadius: 0.0, transition: minimizedVideoTransition)
if self.animateRequestedVideoOnce {
self.animateRequestedVideoOnce = false
let videoButtonFrame = self.buttonsNode.videoButtonFrame().flatMap { frame -> CGRect in
return self.buttonsNode.view.convert(frame, to: self.view)
}
if self.minimizedVideoDraggingPosition == nil {
if let animationForExpandedVideoSnapshotView = self.animationForExpandedVideoSnapshotView {
self.containerNode.view.addSubview(animationForExpandedVideoSnapshotView)
transition.updateAlpha(layer: animationForExpandedVideoSnapshotView.layer, alpha: 0.0, completion: { [weak animationForExpandedVideoSnapshotView] _ in
animationForExpandedVideoSnapshotView?.removeFromSuperview()
})
transition.updateTransformScale(layer: animationForExpandedVideoSnapshotView.layer, scale: previewVideoFrame.width / fullscreenVideoFrame.width)
if let previousVideoButtonFrame = previousVideoButtonFrame, let videoButtonFrame = videoButtonFrame {
minimizedVideoNode.animateRadialMask(from: previousVideoButtonFrame, to: videoButtonFrame)
}
transition.updatePosition(layer: animationForExpandedVideoSnapshotView.layer, position: CGPoint(x: previewVideoFrame.minX + previewVideoFrame.center.x / fullscreenVideoFrame.width * previewVideoFrame.width, y: previewVideoFrame.minY + previewVideoFrame.center.y / fullscreenVideoFrame.height * previewVideoFrame.height))
self.animationForExpandedVideoSnapshotView = nil
}
minimizedVideoTransition.updateFrame(node: minimizedVideoNode, frame: previewVideoFrame)
minimizedVideoNode.updateLayout(size: previewVideoFrame.size, cornerRadius: interpolate(from: 14.0, to: 24.0, value: self.pictureInPictureTransitionFraction), transition: minimizedVideoTransition)
if transition.isAnimated && didAppear {
minimizedVideoNode.layer.animateSpring(from: 0.1 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 0.5)
}
}
self.animationForExpandedVideoSnapshotView = nil
}

View File

@ -94,11 +94,12 @@ final class CallControllerToastContainerNode: ASDisplayNode {
toasts.append(.battery)
}
var transitions: [ToastDescription.Key: (ContainedViewLayoutTransition, CGFloat, Bool)] = [:]
var validKeys: [ToastDescription.Key] = []
for toast in toasts {
validKeys.append(toast.key)
var toastTransition = transition
var animateToastIn = false
var animateIn = false
let toastNode: CallControllerToastItemNode
if let current = self.toastNodes[toast.key] {
toastNode = current
@ -107,41 +108,37 @@ final class CallControllerToastContainerNode: ASDisplayNode {
self.toastNodes[toast.key] = toastNode
self.addSubnode(toastNode)
toastTransition = .immediate
animateToastIn = transition.isAnimated
animateIn = transition.isAnimated
}
let toastContent: CallControllerToastItemNode.Content
let toastText: String
switch toast {
case .camera:
toastContent = CallControllerToastItemNode.Content(
key: .camera,
image: .camera,
text: strings.Call_CameraOff(self.title).0
)
case .microphone:
toastContent = CallControllerToastItemNode.Content(
key: .microphone,
image: .microphone,
text: strings.Call_MicrophoneOff(self.title).0
)
case .mute:
toastContent = CallControllerToastItemNode.Content(
key: .mute,
image: .microphone,
text: strings.Call_YourMicrophoneOff
)
case .battery:
toastContent = CallControllerToastItemNode.Content(
key: .battery,
image: .battery,
text: strings.Call_BatteryLow(self.title).0
)
}
let toastHeight = toastNode.update(width: width, content: buttonContent, text: buttonText, transition: buttonTransition)
let toastFrame = CGRect(x: 0.0, y: 0.0, width: 100.0, height: 20.0)
toastTransition.updateFrame(node: toastNode, frame: toastFrame)
height += toastHeight +
if animateToastIn {
toastNode.animateIn()
}
let toastHeight = toastNode.update(width: width, content: toastContent, transition: toastTransition)
transitions[toast.key] = (toastTransition, toastHeight, animateIn)
}
var removedKeys: [ToastDescription.Key] = []
@ -161,6 +158,25 @@ final class CallControllerToastContainerNode: ASDisplayNode {
self.toastNodes.removeValue(forKey: key)
}
guard let subnodes = self.subnodes else {
return 0.0
}
for case let toastNode as CallControllerToastItemNode in subnodes.reversed() {
if let content = toastNode.currentContent, let (transition, toastHeight, animateIn) = transitions[content.key] {
transition.updateFrame(node: toastNode, frame: CGRect(x: 0.0, y: height, width: width, height: toastHeight))
height += toastHeight + spacing
if animateIn {
toastNode.animateIn()
}
}
}
if height > 0.0 {
height -= spacing
}
height += bottomSpacing
return height
}
@ -177,7 +193,7 @@ final class CallControllerToastContainerNode: ASDisplayNode {
}
}
final class CallControllerToastItemNode: ASDisplayNode {
private class CallControllerToastItemNode: ASDisplayNode {
struct Content: Equatable {
enum Image {
case camera
@ -185,52 +201,63 @@ final class CallControllerToastItemNode: ASDisplayNode {
case battery
}
var key: ToastDescription.Key
var image: Image
var text: String
init(image: Image, text: String) {
init(key: ToastDescription.Key, image: Image, text: String) {
self.key = key
self.image = image
self.text = text
}
}
let clipNode: ASDisplayNode
let effectView: UIVisualEffectView
let iconNode: ASImageNode
let textNode: ImmediateTextNode
private(set) var currentContent: Content?
private(set) var currentWidth: CGFloat?
private(set) var currentHeight: CGFloat?
override init() {
self.clipNode = ASDisplayNode()
self.clipNode.clipsToBounds = true
self.clipNode.layer.cornerRadius = 14.0
if #available(iOS 13.0, *) {
self.clipNode.layer.cornerCurve = .continuous
}
self.effectView = UIVisualEffectView()
self.effectView.effect = UIBlurEffect(style: .light)
self.effectView.layer.cornerRadius = 16.0
self.effectView.clipsToBounds = true
self.effectView.isUserInteractionEnabled = false
self.iconNode = ASImageNode()
self.iconNode.displaysAsynchronously = false
self.iconNode.displayWithoutProcessing = true
self.iconNode.contentMode = .center
self.textNode = ImmediateTextNode()
self.textNode.maximumNumberOfLines = 2
self.textNode.displaysAsynchronously = false
self.textNode.isUserInteractionEnabled = false
super.init()
self.view.addSubview(self.effectView)
self.addSubnode(self.iconNode)
self.addSubnode(self.textNode)
self.addSubnode(self.clipNode)
self.clipNode.view.addSubview(self.effectView)
self.clipNode.addSubnode(self.iconNode)
self.clipNode.addSubnode(self.textNode)
}
func update(width: CGFloat, content: Content, transition: ContainedViewLayoutTransition) -> CGFloat {
let inset: CGFloat = 24.0
self.currentWidth = size.width
if self.currentContent != content {
let inset: CGFloat = 32.0
if self.currentContent != content || self.currentWidth != width {
let previousContent = self.currentContent
self.currentContent = content
self.currentWidth = width
var image: UIImage?
switch content.image {
@ -250,20 +277,25 @@ final class CallControllerToastItemNode: ASDisplayNode {
}
if previousContent?.text != content.text {
let textSize = self.textNode.updateLayout(CGSize(width: size.width - inset * 2.0, height: 100.0))
let textFrame = CGRect(origin: CGPoint(x: floor((size.width - textSize.width) / 2.0), y: size.height), size: textSize)
self.textNode.attributedText = NSAttributedString(string: content.text, font: Font.regular(17.0), textColor: .white)
if previousContent?.text.isEmpty ?? true {
self.textNode.frame = textFrame
if transition.isAnimated {
self.textNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
}
} else {
transition.updateFrameAdditiveToCenter(node: self.textNode, frame: textFrame)
}
let iconSize = CGSize(width: 44.0, height: 28.0)
let iconSpacing: CGFloat = 2.0
let textSize = self.textNode.updateLayout(CGSize(width: width - inset * 2.0 - iconSize.width - iconSpacing, height: 100.0))
let backgroundSize = CGSize(width: iconSize.width + iconSpacing + textSize.width + 6.0 * 2.0, height: max(28.0, textSize.height + 4.0 * 2.0))
let backgroundFrame = CGRect(origin: CGPoint(x: floor((width - backgroundSize.width) / 2.0), y: 0.0), size: backgroundSize)
transition.updateFrame(node: self.clipNode, frame: backgroundFrame)
transition.updateFrame(view: self.effectView, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
self.iconNode.frame = CGRect(origin: CGPoint(), size: iconSize)
self.textNode.frame = CGRect(origin: CGPoint(x: iconSize.width + iconSpacing, y: 4.0), size: textSize)
self.currentHeight = backgroundSize.height
}
}
return 28.0
return self.currentHeight ?? 28.0
}
func animateIn() {

View File

@ -14,75 +14,6 @@ import LocalizedPeerData
import PhotoResources
import CallsEmoji
private final class IncomingVideoNode: ASDisplayNode {
private let videoView: UIView
private var effectView: UIVisualEffectView?
private var isBlurred: Bool = false
init(videoView: UIView) {
self.videoView = videoView
super.init()
self.view.addSubview(self.videoView)
}
func updateLayout(size: CGSize) {
self.videoView.frame = CGRect(origin: CGPoint(), size: size)
}
func updateIsBlurred(isBlurred: Bool) {
if self.isBlurred == isBlurred {
return
}
self.isBlurred = isBlurred
if isBlurred {
if self.effectView == nil {
let effectView = UIVisualEffectView()
self.effectView = effectView
effectView.frame = self.videoView.frame
self.view.addSubview(effectView)
}
UIView.animate(withDuration: 0.3, animations: {
self.effectView?.effect = UIBlurEffect(style: .dark)
})
} else if let effectView = self.effectView {
UIView.animate(withDuration: 0.3, animations: {
effectView.effect = nil
})
}
}
}
private final class OutgoingVideoNode: ASDisplayNode {
private let videoView: UIView
private let switchCameraButton: HighlightableButtonNode
private let switchCamera: () -> Void
init(videoView: UIView, switchCamera: @escaping () -> Void) {
self.videoView = videoView
self.switchCameraButton = HighlightableButtonNode()
self.switchCamera = switchCamera
super.init()
self.view.addSubview(self.videoView)
self.addSubnode(self.switchCameraButton)
self.switchCameraButton.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside)
}
@objc private func buttonPressed() {
self.switchCamera()
}
func updateLayout(size: CGSize, isExpanded: Bool, transition: ContainedViewLayoutTransition) {
transition.updateFrame(view: self.videoView, frame: CGRect(origin: CGPoint(), size: size))
transition.updateCornerRadius(layer: self.videoView.layer, cornerRadius: isExpanded ? 0.0 : 16.0)
self.switchCameraButton.frame = CGRect(origin: CGPoint(), size: size)
}
}
final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol {
private let sharedContext: SharedAccountContext
private let account: Account
@ -100,14 +31,9 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
private let imageNode: TransformImageNode
private let dimNode: ASDisplayNode
private var incomingVideoNode: IncomingVideoNode?
private var incomingVideoViewRequested: Bool = false
private var outgoingVideoNode: OutgoingVideoNode?
private var outgoingVideoViewRequested: Bool = false
private let backButtonArrowNode: ASImageNode
private let backButtonNode: HighlightableButtonNode
private let statusNode: LegacyCallControllerStatusNode
private let videoPausedNode: ImmediateTextNode
private let buttonsNode: LegacyCallControllerButtonsNode
private var keyPreviewNode: CallControllerKeyPreviewNode?
@ -134,13 +60,12 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
var beginAudioOuputSelection: ((Bool) -> Void)?
var acceptCall: (() -> Void)?
var endCall: (() -> Void)?
var toggleVideo: (() -> Void)?
var setIsVideoPaused: ((Bool) -> Void)?
var back: (() -> Void)?
var presentCallRating: ((CallId) -> Void)?
var callEnded: ((Bool) -> Void)?
var dismissedInteractively: (() -> Void)?
var present: ((ViewController) -> Void)?
var setIsVideoPaused: ((Bool) -> Void)?
init(sharedContext: SharedAccountContext, account: Account, presentationData: PresentationData, statusBar: StatusBar, debugInfo: Signal<(String, String), NoError>, shouldStayHiddenUntilConnection: Bool = false, easyDebugAccess: Bool, call: PresentationCall) {
self.sharedContext = sharedContext
@ -171,9 +96,6 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
self.statusNode = LegacyCallControllerStatusNode()
self.videoPausedNode = ImmediateTextNode()
self.videoPausedNode.alpha = 0.0
self.buttonsNode = LegacyCallControllerButtonsNode(strings: self.presentationData.strings)
self.keyButtonNode = HighlightableButtonNode()
@ -208,7 +130,6 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
self.containerNode.addSubnode(self.imageNode)
self.containerNode.addSubnode(self.dimNode)
self.containerNode.addSubnode(self.statusNode)
self.containerNode.addSubnode(self.videoPausedNode)
self.containerNode.addSubnode(self.buttonsNode)
self.containerNode.addSubnode(self.keyButtonNode)
self.containerNode.addSubnode(self.backButtonArrowNode)
@ -230,10 +151,6 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
self?.acceptCall?()
}
self.buttonsNode.rotateCamera = { [weak self] in
self?.call.switchVideoCamera()
}
self.keyButtonNode.addTarget(self, action: #selector(self.keyPressed), forControlEvents: .touchUpInside)
self.backButtonNode.addTarget(self, action: #selector(self.backPressed), forControlEvents: .touchUpInside)
@ -270,8 +187,6 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
}
}
self.videoPausedNode.attributedText = NSAttributedString(string: self.presentationData.strings.Call_RemoteVideoPaused(peer.compactDisplayTitle).0, font: Font.regular(17.0), textColor: .white)
if let (layout, navigationBarHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
@ -291,84 +206,6 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
let statusValue: LegacyCallControllerStatusValue
var statusReception: Int32?
switch callState.videoState {
case .active:
if !self.incomingVideoViewRequested {
self.incomingVideoViewRequested = true
self.call.makeIncomingVideoView(completion: { [weak self] incomingVideoView in
guard let strongSelf = self else {
return
}
if let incomingVideoView = incomingVideoView {
strongSelf.setCurrentAudioOutput?(.speaker)
let incomingVideoNode = IncomingVideoNode(videoView: incomingVideoView.view)
strongSelf.incomingVideoNode = incomingVideoNode
strongSelf.containerNode.insertSubnode(incomingVideoNode, aboveSubnode: strongSelf.dimNode)
strongSelf.statusNode.isHidden = true
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
}
})
}
default:
break
}
switch callState.videoState {
case .active, .outgoingRequested:
if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
guard let strongSelf = self else {
return
}
if let outgoingVideoView = outgoingVideoView?.view {
outgoingVideoView.backgroundColor = .black
outgoingVideoView.clipsToBounds = true
outgoingVideoView.layer.cornerRadius = 16.0
strongSelf.setCurrentAudioOutput?(.speaker)
let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: {
guard let strongSelf = self else {
return
}
strongSelf.call.switchVideoCamera()
})
strongSelf.outgoingVideoNode = outgoingVideoNode
if let incomingVideoNode = strongSelf.incomingVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode)
} else {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
}
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
}
})
}
default:
break
}
if let incomingVideoNode = self.incomingVideoNode {
let isActive: Bool
switch callState.remoteVideoState {
case .inactive:
isActive = false
case .active:
isActive = true
}
incomingVideoNode.updateIsBlurred(isBlurred: !isActive)
if isActive != self.videoPausedNode.alpha.isZero {
if isActive {
self.videoPausedNode.alpha = 0.0
self.videoPausedNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
} else {
self.videoPausedNode.alpha = 1.0
self.videoPausedNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
}
}
switch callState.state {
case .waiting, .connecting:
statusValue = .text(self.presentationData.strings.Call_StatusConnecting)
@ -592,33 +429,10 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
let statusHeight = self.statusNode.updateLayout(constrainedWidth: layout.size.width, transition: transition)
transition.updateFrame(node: self.statusNode, frame: CGRect(origin: CGPoint(x: 0.0, y: statusOffset), size: CGSize(width: layout.size.width, height: statusHeight)))
let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0))
transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize))
self.buttonsNode.updateLayout(constrainedWidth: layout.size.width, transition: transition)
let buttonsOriginY: CGFloat = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight)))
var outgoingVideoTransition = transition
if let incomingVideoNode = self.incomingVideoNode {
if incomingVideoNode.frame.width.isZero, let outgoingVideoNode = self.outgoingVideoNode, !outgoingVideoNode.frame.width.isZero, !transition.isAnimated {
outgoingVideoTransition = .animated(duration: 0.3, curve: .easeInOut)
}
incomingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size)
incomingVideoNode.updateLayout(size: layout.size)
}
if let outgoingVideoNode = self.outgoingVideoNode {
if self.incomingVideoNode == nil {
outgoingVideoNode.frame = CGRect(origin: CGPoint(), size: layout.size)
outgoingVideoNode.updateLayout(size: layout.size, isExpanded: true, transition: transition)
} else {
let outgoingSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0))
let outgoingFrame = CGRect(origin: CGPoint(x: layout.size.width - 16.0 - outgoingSize.width, y: buttonsOriginY - 32.0 - outgoingSize.height), size: outgoingSize)
outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: outgoingFrame)
outgoingVideoNode.updateLayout(size: outgoingFrame.size, isExpanded: false, transition: outgoingVideoTransition)
}
}
let keyTextSize = self.keyButtonNode.frame.size
transition.updateFrame(node: self.keyButtonNode, frame: CGRect(origin: CGPoint(x: layout.size.width - keyTextSize.width - 8.0, y: navigationOffset + 8.0), size: keyTextSize))

View File

@ -190,7 +190,11 @@ public final class PresentationCallImpl: PresentationCall {
private var callWasActive = false
private var shouldPresentCallRating = false
private var videoWasActive = false
private var previousVideoState: PresentationCallState.VideoState?
private var previousRemoteVideoState: PresentationCallState.RemoteVideoState?
private var previousRemoteAudioState: PresentationCallState.RemoteAudioState?
private var previousRemoteBatteryLevel: PresentationCallState.RemoteBatteryLevel?
private var sessionStateDisposable: Disposable?
@ -291,9 +295,9 @@ public final class PresentationCallImpl: PresentationCall {
self.enableHighBitrateVideoCalls = enableHighBitrateVideoCalls
if self.isVideo {
self.videoCapturer = OngoingCallVideoCapturer()
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .outgoingRequested, remoteVideoState: .active, remoteBatteryLevel: .normal))
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .active, remoteVideoState: .inactive, remoteAudioState: .active, remoteBatteryLevel: .normal))
} else {
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: self.isVideoPossible ? .possible : .notAvailable, remoteAudioState: .active, remoteBatteryLevel: .normal))
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: self.isVideoPossible ? .inactive : .notAvailable, remoteVideoState: .inactive, remoteAudioState: .active, remoteBatteryLevel: .normal))
}
self.serializedData = serializedData
@ -376,7 +380,7 @@ public final class PresentationCallImpl: PresentationCall {
audioSessionActive = callKitIntegration.audioSessionActive
|> filter { $0 }
|> timeout(2.0, queue: Queue.mainQueue(), alternate: Signal { subscriber in
if let strongSelf = self, let audioSessionControl = strongSelf.audioSessionControl {
if let strongSelf = self, let _ = strongSelf.audioSessionControl {
//audioSessionControl.activate({ _ in })
}
subscriber.putNext(true)
@ -458,26 +462,32 @@ public final class PresentationCallImpl: PresentationCall {
let mappedVideoState: PresentationCallState.VideoState
let mappedRemoteVideoState: PresentationCallState.RemoteVideoState
let mappedRemoteAudioState: PresentationCallState.RemoteAudioState
let mappedRemoteBatteryLevel: PresentationCallState.RemoteBatteryLevel
if let callContextState = callContextState {
switch callContextState.videoState {
case .notAvailable:
mappedVideoState = .notAvailable
case .possible:
mappedVideoState = .possible
case .outgoingRequested:
mappedVideoState = .outgoingRequested
case let .incomingRequested(sendsVideo):
mappedVideoState = .incomingRequested(sendsVideo: sendsVideo)
case .active:
mappedVideoState = .active
self.videoWasActive = true
case .inactive:
mappedVideoState = .inactive
case .paused:
mappedVideoState = .paused
}
switch callContextState.remoteVideoState {
case .inactive:
mappedRemoteVideoState = .inactive
case .active:
mappedRemoteVideoState = .active
case .paused:
mappedRemoteVideoState = .paused
}
switch callContextState.remoteAudioState {
case .active:
mappedRemoteAudioState = .active
case .muted:
mappedRemoteAudioState = .muted
}
switch callContextState.remoteBatteryLevel {
case .normal:
@ -485,25 +495,38 @@ public final class PresentationCallImpl: PresentationCall {
case .low:
mappedRemoteBatteryLevel = .low
}
self.previousVideoState = mappedVideoState
self.previousRemoteVideoState = mappedRemoteVideoState
self.previousRemoteAudioState = mappedRemoteAudioState
self.previousRemoteBatteryLevel = mappedRemoteBatteryLevel
} else {
if self.isVideo {
mappedVideoState = .outgoingRequested
} else if self.isVideoPossible {
mappedVideoState = .possible
if let previousVideoState = self.previousVideoState {
mappedVideoState = previousVideoState
} else {
mappedVideoState = .notAvailable
if self.isVideo {
mappedVideoState = .active
} else if self.isVideoPossible {
mappedVideoState = .inactive
} else {
mappedVideoState = .notAvailable
}
}
if self.videoWasActive {
mappedRemoteVideoState = .active
mappedRemoteVideoState = .inactive
if let previousRemoteAudioState = self.previousRemoteAudioState {
mappedRemoteAudioState = previousRemoteAudioState
} else {
mappedRemoteVideoState = .inactive
mappedRemoteAudioState = .active
}
if let previousRemoteBatteryLevel = self.previousRemoteBatteryLevel {
mappedRemoteBatteryLevel = previousRemoteBatteryLevel
} else {
mappedRemoteBatteryLevel = .normal
}
mappedRemoteBatteryLevel = .normal
}
switch sessionState.state {
case .ringing:
presentationState = PresentationCallState(state: .ringing, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteBatteryLevel: mappedRemoteBatteryLevel)
presentationState = PresentationCallState(state: .ringing, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
if previous == nil || previousControl == nil {
if !self.reportedIncomingCall {
self.reportedIncomingCall = true
@ -530,19 +553,19 @@ public final class PresentationCallImpl: PresentationCall {
}
case .accepting:
self.callWasActive = true
presentationState = PresentationCallState(state: .connecting(nil), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteBatteryLevel: mappedRemoteBatteryLevel)
presentationState = PresentationCallState(state: .connecting(nil), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case .dropping:
presentationState = PresentationCallState(state: .terminating, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteBatteryLevel: mappedRemoteBatteryLevel)
presentationState = PresentationCallState(state: .terminating, videoState: mappedVideoState, remoteVideoState: .inactive, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case let .terminated(id, reason, options):
presentationState = PresentationCallState(state: .terminated(id, reason, self.callWasActive && (options.contains(.reportRating) || self.shouldPresentCallRating)), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteBatteryLevel: mappedRemoteBatteryLevel)
presentationState = PresentationCallState(state: .terminated(id, reason, self.callWasActive && (options.contains(.reportRating) || self.shouldPresentCallRating)), videoState: mappedVideoState, remoteVideoState: .inactive, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case let .requesting(ringing):
presentationState = PresentationCallState(state: .requesting(ringing), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteBatteryLevel: mappedRemoteBatteryLevel)
presentationState = PresentationCallState(state: .requesting(ringing), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case let .active(_, _, keyVisualHash, _, _, _, _):
self.callWasActive = true
if let callContextState = callContextState {
switch callContextState.state {
case .initializing:
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteBatteryLevel: mappedRemoteBatteryLevel)
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case .failed:
presentationState = nil
self.callSessionManager.drop(internalId: self.internalId, reason: .disconnect, debugLog: .single(nil))
@ -554,7 +577,7 @@ public final class PresentationCallImpl: PresentationCall {
timestamp = CFAbsoluteTimeGetCurrent()
self.activeTimestamp = timestamp
}
presentationState = PresentationCallState(state: .active(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteBatteryLevel: mappedRemoteBatteryLevel)
presentationState = PresentationCallState(state: .active(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
case .reconnecting:
let timestamp: Double
if let activeTimestamp = self.activeTimestamp {
@ -563,10 +586,10 @@ public final class PresentationCallImpl: PresentationCall {
timestamp = CFAbsoluteTimeGetCurrent()
self.activeTimestamp = timestamp
}
presentationState = PresentationCallState(state: .reconnecting(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteBatteryLevel: mappedRemoteBatteryLevel)
presentationState = PresentationCallState(state: .reconnecting(timestamp, reception, keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
}
} else {
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteBatteryLevel: mappedRemoteBatteryLevel)
presentationState = PresentationCallState(state: .connecting(keyVisualHash), videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)
}
}
@ -665,7 +688,9 @@ public final class PresentationCallImpl: PresentationCall {
private func updateTone(_ state: PresentationCallState, callContextState: OngoingCallContextState?, previous: CallSession?) {
var tone: PresentationCallTone?
if let callContextState = callContextState, case .reconnecting = callContextState.state {
tone = .connecting
if !self.isVideo {
tone = .connecting
}
} else if let previous = previous {
switch previous.state {
case .accepting, .active, .dropping, .requesting:
@ -674,7 +699,9 @@ public final class PresentationCallImpl: PresentationCall {
if case .requesting = previous.state {
tone = .ringing
} else {
tone = .connecting
if !self.isVideo {
tone = .connecting
}
}
case .requesting(true):
tone = .ringing
@ -789,13 +816,10 @@ public final class PresentationCallImpl: PresentationCall {
}
}
public func acceptVideo() {
if self.videoCapturer == nil {
let videoCapturer = OngoingCallVideoCapturer()
self.videoCapturer = videoCapturer
}
if let videoCapturer = self.videoCapturer {
self.ongoingContext?.acceptVideo(videoCapturer)
public func disableVideo() {
if let _ = self.videoCapturer {
self.videoCapturer = nil
self.ongoingContext?.disableVideo()
}
}

View File

@ -10,6 +10,7 @@ import TelegramAudio
import TelegramVoip
import TelegramUIPreferences
import AccountContext
import CallKit
private func callKitIntegrationIfEnabled(_ integration: CallKitIntegration?, settings: VoiceCallSettings?) -> CallKitIntegration? {
let enabled = settings?.enableSystemIntegration ?? true
@ -341,9 +342,23 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
public func requestCall(context: AccountContext, peerId: PeerId, isVideo: Bool, endCurrentIfAny: Bool) -> RequestCallResult {
let account = context.account
var alreadyInCall: Bool = false
var alreadyInCallWithPeerId: PeerId?
if let call = self.currentCall, !endCurrentIfAny {
return .alreadyInProgress(call.peerId)
if let call = self.currentCall {
alreadyInCall = true
alreadyInCallWithPeerId = call.peerId
} else {
if #available(iOS 10.0, *) {
if CXCallObserver().calls.contains(where: { $0.hasEnded == false }) {
alreadyInCall = true
}
}
}
if alreadyInCall, !endCurrentIfAny {
return .alreadyInProgress(alreadyInCallWithPeerId)
}
if let _ = callKitIntegrationIfEnabled(self.callKitIntegration, settings: self.callSettings) {
let begin: () -> Void = { [weak self] in
@ -460,12 +475,12 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
return .single(false)
}
let request = account.postbox.transaction { transaction -> VideoCallsConfiguration in
let request = account.postbox.transaction { transaction -> (VideoCallsConfiguration, CachedUserData?) in
let appConfiguration: AppConfiguration = transaction.getPreferencesEntry(key: PreferencesKeys.appConfiguration) as? AppConfiguration ?? AppConfiguration.defaultValue
return VideoCallsConfiguration(appConfiguration: appConfiguration)
return (VideoCallsConfiguration(appConfiguration: appConfiguration), transaction.getPeerCachedData(peerId: peerId) as? CachedUserData)
}
|> mapToSignal { callsConfiguration -> Signal<CallSessionInternalId, NoError> in
let isVideoPossible: Bool
|> mapToSignal { callsConfiguration, cachedUserData -> Signal<CallSessionInternalId, NoError> in
var isVideoPossible: Bool
switch callsConfiguration.videoCallsSupport {
case .disabled:
isVideoPossible = isVideo
@ -474,15 +489,23 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
case .onlyVideo:
isVideoPossible = isVideo
}
if let cachedUserData = cachedUserData, cachedUserData.videoCallsAvailable {
} else {
isVideoPossible = false
}
return account.callSessionManager.request(peerId: peerId, isVideo: isVideo, enableVideo: isVideoPossible, internalId: internalId)
}
let cachedUserData = account.postbox.transaction { transaction -> CachedUserData? in
return transaction.getPeerCachedData(peerId: peerId) as? CachedUserData
}
return (combineLatest(queue: .mainQueue(), request, networkType |> take(1), account.postbox.peerView(id: peerId) |> map { peerView -> Bool in
return peerView.peerIsContact
} |> take(1), account.postbox.preferencesView(keys: [PreferencesKeys.voipConfiguration, ApplicationSpecificPreferencesKeys.voipDerivedState, PreferencesKeys.appConfiguration]) |> take(1), accountManager.sharedData(keys: [SharedDataKeys.autodownloadSettings, ApplicationSpecificSharedDataKeys.experimentalUISettings]) |> take(1))
} |> take(1), account.postbox.preferencesView(keys: [PreferencesKeys.voipConfiguration, ApplicationSpecificPreferencesKeys.voipDerivedState, PreferencesKeys.appConfiguration]) |> take(1), accountManager.sharedData(keys: [SharedDataKeys.autodownloadSettings, ApplicationSpecificSharedDataKeys.experimentalUISettings]) |> take(1), cachedUserData)
|> deliverOnMainQueue
|> beforeNext { internalId, currentNetworkType, isContact, preferences, sharedData in
|> beforeNext { internalId, currentNetworkType, isContact, preferences, sharedData, cachedUserData in
if let strongSelf = self, accessEnabled {
if let currentCall = strongSelf.currentCall {
currentCall.rejectBusy()
@ -494,7 +517,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue
let callsConfiguration = VideoCallsConfiguration(appConfiguration: appConfiguration)
let isVideoPossible: Bool
var isVideoPossible: Bool
switch callsConfiguration.videoCallsSupport {
case .disabled:
isVideoPossible = isVideo
@ -503,6 +526,10 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
case .onlyVideo:
isVideoPossible = isVideo
}
if let cachedUserData = cachedUserData, cachedUserData.videoCallsAvailable {
} else {
isVideoPossible = false
}
let experimentalSettings = sharedData.entries[ApplicationSpecificSharedDataKeys.experimentalUISettings] as? ExperimentalUISettings ?? .defaultSettings

View File

@ -1226,6 +1226,23 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
} else {
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let _ = (context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peerId), currentPeerId.flatMap(transaction.getPeer))
} |> deliverOnMainQueue).start(next: { [weak self] peer, current in
if let peer = peer {
if let strongSelf = self, let current = current {
strongSelf.present(textAlertController(context: strongSelf.context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_CallInProgressMessage(current.compactDisplayTitle, peer.compactDisplayTitle).0, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
if let strongSelf = self {
let _ = strongSelf.context.sharedContext.callManager?.requestCall(context: context, peerId: peerId, isVideo: isVideo, endCurrentIfAny: true)
}
})]), in: .window(.root))
} else {
strongSelf.present(textAlertController(context: strongSelf.context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_ExternalCallInProgressMessage, actions: [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
})]), in: .window(.root))
}
}
})
/*let _ = (context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peer.id), transaction.getPeer(currentPeerId))
}
|> deliverOnMainQueue).start(next: { peer, current in
@ -1234,7 +1251,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
let _ = context.sharedContext.callManager?.requestCall(context: context, peerId: peer.id, isVideo: isVideo, endCurrentIfAny: true)
})]), in: .window(.root))
}
})
})*/
}
}
})

View File

@ -3158,7 +3158,25 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
if currentPeerId == peer.id {
self.context.sharedContext.navigateToCurrentCall()
} else {
let presentationData = self.presentationData
let _ = (self.context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peer.id), currentPeerId.flatMap(transaction.getPeer))
} |> deliverOnMainQueue).start(next: { [weak self] peer, current in
if let peer = peer {
if let strongSelf = self, let current = current {
strongSelf.controller?.present(textAlertController(context: strongSelf.context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_CallInProgressMessage(current.compactDisplayTitle, peer.compactDisplayTitle).0, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_Cancel, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
if let strongSelf = self {
let _ = strongSelf.context.sharedContext.callManager?.requestCall(context: strongSelf.context, peerId: peer.id, isVideo: isVideo, endCurrentIfAny: true)
}
})]), in: .window(.root))
} else if let strongSelf = self {
strongSelf.controller?.present(textAlertController(context: strongSelf.context, title: presentationData.strings.Call_CallInProgressTitle, text: presentationData.strings.Call_ExternalCallInProgressMessage, actions: [TextAlertAction(type: .genericAction, title: presentationData.strings.Common_OK, action: {
})]), in: .window(.root))
}
}
})
/*let _ = (self.context.account.postbox.transaction { transaction -> (Peer?, Peer?) in
return (transaction.getPeer(peer.id), transaction.getPeer(currentPeerId))
}
|> deliverOnMainQueue).start(next: { [weak self] peer, current in
@ -3173,7 +3191,7 @@ private final class PeerInfoScreenNode: ViewControllerTracingNode, UIScrollViewD
let _ = strongSelf.context.sharedContext.callManager?.requestCall(context: strongSelf.context, peerId: peer.id, isVideo: isVideo, endCurrentIfAny: true)
})]), in: .window(.root))
}
})
})*/
}
}
}

View File

@ -108,15 +108,20 @@ public struct OngoingCallContextState: Equatable {
public enum VideoState: Equatable {
case notAvailable
case possible
case outgoingRequested
case incomingRequested(sendsVideo: Bool)
case inactive
case active
case paused
}
public enum RemoteVideoState: Equatable {
case inactive
case active
case paused
}
public enum RemoteAudioState: Equatable {
case active
case muted
}
public enum RemoteBatteryLevel: Equatable {
@ -127,6 +132,7 @@ public struct OngoingCallContextState: Equatable {
public let state: State
public let videoState: VideoState
public let remoteVideoState: RemoteVideoState
public let remoteAudioState: RemoteAudioState
public let remoteBatteryLevel: RemoteBatteryLevel
}
@ -257,7 +263,7 @@ private protocol OngoingCallThreadLocalContextProtocol: class {
func nativeSetNetworkType(_ type: NetworkType)
func nativeSetIsMuted(_ value: Bool)
func nativeRequestVideo(_ capturer: OngoingCallVideoCapturer)
func nativeAcceptVideo(_ capturer: OngoingCallVideoCapturer)
func nativeDisableVideo()
func nativeStop(_ completion: @escaping (String?, Int64, Int64, Int64, Int64) -> Void)
func nativeBeginTermination()
func nativeDebugInfo() -> String
@ -292,7 +298,7 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol {
func nativeRequestVideo(_ capturer: OngoingCallVideoCapturer) {
}
func nativeAcceptVideo(_ capturer: OngoingCallVideoCapturer) {
func nativeDisableVideo() {
}
func nativeSwitchVideoCamera() {
@ -371,8 +377,8 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
self.requestVideo(capturer.impl)
}
func nativeAcceptVideo(_ capturer: OngoingCallVideoCapturer) {
self.acceptVideo(capturer.impl)
func nativeDisableVideo() {
self.disableVideo()
}
func nativeDebugInfo() -> String {
@ -580,15 +586,13 @@ public final class OngoingCallContext {
filteredConnections.append(mapped)
}
}
let primaryConnection = filteredConnections.first!
let restConnections = Array(filteredConnections[1...])
let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: primaryConnection, alternativeConnections: restConnections, maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in
let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, connections: filteredConnections, maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in
callSessionManager?.sendSignalingData(internalId: internalId, data: data)
}, videoCapturer: video?.impl, preferredAspectRatio: Float(preferredAspectRatio), enableHighBitrateVideoCalls: enableHighBitrateVideoCalls)
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteBatteryLevel, aspectRatio in
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, remoteBatteryLevel, _ in
queue.async {
guard let strongSelf = self else {
return
@ -596,16 +600,12 @@ public final class OngoingCallContext {
let mappedState = OngoingCallContextState.State(state)
let mappedVideoState: OngoingCallContextState.VideoState
switch videoState {
case .possible:
mappedVideoState = .possible
case .incomingRequested:
mappedVideoState = .incomingRequested(sendsVideo: false)
case .incomingRequestedAndActive:
mappedVideoState = .incomingRequested(sendsVideo: true)
case .outgoingRequested:
mappedVideoState = .outgoingRequested
case .inactive:
mappedVideoState = .inactive
case .active:
mappedVideoState = .active
case .paused:
mappedVideoState = .paused
@unknown default:
mappedVideoState = .notAvailable
}
@ -615,9 +615,20 @@ public final class OngoingCallContext {
mappedRemoteVideoState = .inactive
case .active:
mappedRemoteVideoState = .active
case .paused:
mappedRemoteVideoState = .paused
@unknown default:
mappedRemoteVideoState = .inactive
}
let mappedRemoteAudioState: OngoingCallContextState.RemoteAudioState
switch remoteAudioState {
case .active:
mappedRemoteAudioState = .active
case .muted:
mappedRemoteAudioState = .muted
@unknown default:
mappedRemoteAudioState = .active
}
let mappedRemoteBatteryLevel: OngoingCallContextState.RemoteBatteryLevel
switch remoteBatteryLevel {
case .normal:
@ -631,9 +642,10 @@ public final class OngoingCallContext {
strongSelf.didReportCallAsVideo = true
callSessionManager?.updateCallType(internalId: internalId, type: .video)
}
strongSelf.contextState.set(.single(OngoingCallContextState(state: mappedState, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteBatteryLevel: mappedRemoteBatteryLevel)))
strongSelf.contextState.set(.single(OngoingCallContextState(state: mappedState, videoState: mappedVideoState, remoteVideoState: mappedRemoteVideoState, remoteAudioState: mappedRemoteAudioState, remoteBatteryLevel: mappedRemoteBatteryLevel)))
}
}
strongSelf.receptionPromise.set(.single(4))
context.signalBarsChanged = { signalBars in
self?.receptionPromise.set(.single(signalBars))
}
@ -658,7 +670,7 @@ public final class OngoingCallContext {
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
context.stateChanged = { state in
self?.contextState.set(.single(OngoingCallContextState(state: OngoingCallContextState.State(state), videoState: .notAvailable, remoteVideoState: .inactive, remoteBatteryLevel: .normal)))
self?.contextState.set(.single(OngoingCallContextState(state: OngoingCallContextState.State(state), videoState: .notAvailable, remoteVideoState: .inactive, remoteAudioState: .active, remoteBatteryLevel: .normal)))
}
context.signalBarsChanged = { signalBars in
self?.receptionPromise.set(.single(signalBars))
@ -760,9 +772,9 @@ public final class OngoingCallContext {
}
}
public func acceptVideo(_ capturer: OngoingCallVideoCapturer) {
public func disableVideo() {
self.withContext { context in
context.nativeAcceptVideo(capturer)
context.nativeDisableVideo()
}
}

View File

@ -32,16 +32,20 @@ typedef NS_ENUM(int32_t, OngoingCallStateWebrtc) {
};
typedef NS_ENUM(int32_t, OngoingCallVideoStateWebrtc) {
OngoingCallVideoStatePossible,
OngoingCallVideoStateOutgoingRequested,
OngoingCallVideoStateIncomingRequested,
OngoingCallVideoStateIncomingRequestedAndActive,
OngoingCallVideoStateActive
OngoingCallVideoStateInactive,
OngoingCallVideoStateActive,
OngoingCallVideoStatePaused
};
typedef NS_ENUM(int32_t, OngoingCallRemoteVideoStateWebrtc) {
OngoingCallRemoteVideoStateInactive,
OngoingCallRemoteVideoStateActive
OngoingCallRemoteVideoStateActive,
OngoingCallRemoteVideoStatePaused
};
typedef NS_ENUM(int32_t, OngoingCallRemoteAudioStateWebrtc) {
OngoingCallRemoteAudioStateMuted,
OngoingCallRemoteAudioStateActive,
};
typedef NS_ENUM(int32_t, OngoingCallRemoteBatteryLevelWebrtc) {
@ -116,10 +120,10 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
+ (int32_t)maxLayer;
+ (NSArray<NSString *> * _Nonnull)versionsWithIncludeReference:(bool)includeReference;
@property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc, OngoingCallRemoteBatteryLevelWebrtc, float);
@property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc, OngoingCallRemoteAudioStateWebrtc, OngoingCallRemoteBatteryLevelWebrtc, float);
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio enableHighBitrateVideoCalls:(bool)enableHighBitrateVideoCalls;
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio enableHighBitrateVideoCalls:(bool)enableHighBitrateVideoCalls;
- (void)beginTermination;
- (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion;
@ -134,7 +138,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType;
- (void)makeIncomingVideoView:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion;
- (void)requestVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer;
- (void)acceptVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer;
- (void)disableVideo;
- (void)addSignalingData:(NSData * _Nonnull)data;
@end

View File

@ -146,7 +146,7 @@
}
- (void)setIsVideoEnabled:(bool)isVideoEnabled {
_interface->setIsVideoEnabled(isVideoEnabled);
_interface->setState(isVideoEnabled ? tgcalls::VideoState::Active : tgcalls::VideoState::Paused);
}
- (std::shared_ptr<tgcalls::VideoCaptureInterface>)getInterface {
@ -161,14 +161,14 @@
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
interface->setVideoOutput(sink);
interface->setOutput(sink);
completion(remoteRenderer);
} else {
GLVideoView *remoteRenderer = [[GLVideoView alloc] initWithFrame:CGRectZero];
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
interface->setVideoOutput(sink);
interface->setOutput(sink);
completion(remoteRenderer);
}
@ -214,6 +214,7 @@
bool _connectedOnce;
OngoingCallRemoteBatteryLevelWebrtc _remoteBatteryLevel;
OngoingCallRemoteVideoStateWebrtc _remoteVideoState;
OngoingCallRemoteAudioStateWebrtc _remoteAudioState;
OngoingCallVideoOrientationWebrtc _remoteVideoOrientation;
__weak UIView<OngoingCallThreadLocalContextWebrtcVideoViewImpl> *_currentRemoteVideoRenderer;
OngoingCallThreadLocalContextVideoCapturer *_videoCapturer;
@ -227,7 +228,7 @@
}
- (void)controllerStateChanged:(tgcalls::State)state videoState:(OngoingCallVideoStateWebrtc)videoState;
- (void)controllerStateChanged:(tgcalls::State)state;
- (void)signalBarsChanged:(int32_t)signalBars;
@end
@ -306,7 +307,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio enableHighBitrateVideoCalls:(bool)enableHighBitrateVideoCalls {
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredAspectRatio:(float)preferredAspectRatio enableHighBitrateVideoCalls:(bool)enableHighBitrateVideoCalls {
self = [super init];
if (self != nil) {
_version = version;
@ -324,12 +325,12 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
_sendSignalingData = [sendSignalingData copy];
_videoCapturer = videoCapturer;
if (videoCapturer != nil) {
_videoState = OngoingCallVideoStateOutgoingRequested;
_remoteVideoState = OngoingCallRemoteVideoStateActive;
_videoState = OngoingCallVideoStateActive;
} else {
_videoState = OngoingCallVideoStatePossible;
_remoteVideoState = OngoingCallRemoteVideoStateActive;
_videoState = OngoingCallVideoStateInactive;
}
_remoteVideoState = OngoingCallRemoteVideoStateInactive;
_remoteAudioState = OngoingCallRemoteAudioStateActive;
_remoteVideoOrientation = OngoingCallVideoOrientation0;
@ -347,8 +348,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
proxyValue = std::unique_ptr<tgcalls::Proxy>(proxyObject);
}
NSArray<OngoingCallConnectionDescriptionWebrtc *> *connections = [@[primaryConnection] arrayByAddingObjectsFromArray:alternativeConnections];
std::vector<tgcalls::RtcServer> parsedRtcServers;
for (OngoingCallConnectionDescriptionWebrtc *connection in connections) {
if (connection.hasStun) {
@ -382,7 +381,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
.enableNS = true,
.enableAGC = true,
.enableCallUpgrade = false,
.logPath = logPath.length == 0 ? "" : std::string(logPath.UTF8String),
.logPath = "", //logPath.length == 0 ? "" : std::string(logPath.UTF8String),
.maxApiLayer = [OngoingCallThreadLocalContextWebrtc maxLayer],
.preferredAspectRatio = preferredAspectRatio,
.enableHighBitrateVideo = enableHighBitrateVideoCalls
@ -408,30 +407,11 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
.initialNetworkType = callControllerNetworkTypeForType(networkType),
.encryptionKey = encryptionKey,
.videoCapture = [_videoCapturer getInterface],
.stateUpdated = [weakSelf, queue](tgcalls::State state, tgcalls::VideoState videoState) {
.stateUpdated = [weakSelf, queue](tgcalls::State state) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
OngoingCallVideoStateWebrtc mappedVideoState;
switch (videoState) {
case tgcalls::VideoState::Possible:
mappedVideoState = OngoingCallVideoStatePossible;
break;
case tgcalls::VideoState::OutgoingRequested:
mappedVideoState = OngoingCallVideoStateOutgoingRequested;
break;
case tgcalls::VideoState::IncomingRequested:
mappedVideoState = OngoingCallVideoStateIncomingRequested;
break;
case tgcalls::VideoState::IncomingRequestedAndActive:
mappedVideoState = OngoingCallVideoStateIncomingRequestedAndActive;
break;
case tgcalls::VideoState::Active:
mappedVideoState = OngoingCallVideoStateActive;
break;
}
[strongSelf controllerStateChanged:state videoState:mappedVideoState];
[strongSelf controllerStateChanged:state];
}
}];
},
@ -446,20 +426,42 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}];
},
.remoteVideoIsActiveUpdated = [weakSelf, queue](bool isActive) {
.remoteMediaStateUpdated = [weakSelf, queue](tgcalls::AudioState audioState, tgcalls::VideoState videoState) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
OngoingCallRemoteAudioStateWebrtc remoteAudioState;
OngoingCallRemoteVideoStateWebrtc remoteVideoState;
if (isActive) {
remoteVideoState = OngoingCallRemoteVideoStateActive;
} else {
remoteVideoState = OngoingCallRemoteVideoStateInactive;
switch (audioState) {
case tgcalls::AudioState::Muted:
remoteAudioState = OngoingCallRemoteAudioStateMuted;
break;
case tgcalls::AudioState::Active:
remoteAudioState = OngoingCallRemoteAudioStateActive;
break;
default:
remoteAudioState = OngoingCallRemoteAudioStateMuted;
break;
}
if (strongSelf->_remoteVideoState != remoteVideoState) {
switch (videoState) {
case tgcalls::VideoState::Inactive:
remoteVideoState = OngoingCallRemoteVideoStateInactive;
break;
case tgcalls::VideoState::Paused:
remoteVideoState = OngoingCallRemoteVideoStatePaused;
break;
case tgcalls::VideoState::Active:
remoteVideoState = OngoingCallRemoteVideoStateActive;
break;
default:
remoteVideoState = OngoingCallRemoteVideoStateInactive;
break;
}
if (strongSelf->_remoteVideoState != remoteVideoState || strongSelf->_remoteAudioState != remoteAudioState) {
strongSelf->_remoteVideoState = remoteVideoState;
strongSelf->_remoteAudioState = remoteAudioState;
if (strongSelf->_stateChanged) {
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteAudioState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
}
}
}
@ -478,7 +480,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
if (strongSelf->_remoteBatteryLevel != remoteBatteryLevel) {
strongSelf->_remoteBatteryLevel = remoteBatteryLevel;
if (strongSelf->_stateChanged) {
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteAudioState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
}
}
}
@ -490,7 +492,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
if (strongSelf) {
strongSelf->_remotePreferredAspectRatio = value;
if (strongSelf->_stateChanged) {
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState, strongSelf->_remoteAudioState, strongSelf->_remoteBatteryLevel, strongSelf->_remotePreferredAspectRatio);
}
}
}];
@ -507,7 +509,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
});
_state = OngoingCallStateInitializing;
_signalBars = -1;
_signalBars = 4;
}
return self;
}
@ -581,7 +583,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)controllerStateChanged:(tgcalls::State)state videoState:(OngoingCallVideoStateWebrtc)videoState {
- (void)controllerStateChanged:(tgcalls::State)state {
OngoingCallStateWebrtc callState = OngoingCallStateInitializing;
switch (state) {
case tgcalls::State::Established:
@ -597,12 +599,11 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
break;
}
if (_state != callState || _videoState != videoState) {
if (_state != callState) {
_state = callState;
_videoState = videoState;
if (_stateChanged) {
_stateChanged(_state, _videoState, _remoteVideoState, _remoteBatteryLevel, _remotePreferredAspectRatio);
_stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remoteBatteryLevel, _remotePreferredAspectRatio);
}
}
}
@ -654,7 +655,11 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
dispatch_async(dispatch_get_main_queue(), ^{
if ([VideoMetalView isSupported]) {
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFit;
#if TARGET_OS_IPHONE
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
#else
remoteRenderer.videoContentMode = UIViewContentModeScaleAspect;
#endif
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
@ -685,15 +690,29 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
- (void)requestVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer {
if (_tgVoip && _videoCapturer == nil) {
_videoCapturer = videoCapturer;
_tgVoip->requestVideo([_videoCapturer getInterface]);
_tgVoip->setVideoCapture([_videoCapturer getInterface]);
_videoState = OngoingCallVideoStateActive;
if (_stateChanged) {
_stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remoteBatteryLevel, _remotePreferredAspectRatio);
}
}
}
- (void)acceptVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer {
if (_tgVoip && _videoCapturer == nil) {
_videoCapturer = videoCapturer;
_tgVoip->requestVideo([_videoCapturer getInterface]);
- (void)disableVideo {
if (_tgVoip) {
_videoCapturer = nil;
_tgVoip->setVideoCapture(nullptr);
_videoState = OngoingCallVideoStateInactive;
if (_stateChanged) {
_stateChanged(_state, _videoState, _remoteVideoState, _remoteAudioState, _remoteBatteryLevel, _remotePreferredAspectRatio);
}
}
}
- (void)remotePrefferedAspectRatioUpdated:(float)remotePrefferedAspectRatio {
}
@end