Video Chat Improvements

This commit is contained in:
Ilya Laktyushin
2021-05-25 22:09:48 +04:00
parent 54173d76e9
commit d3558a9c09
15 changed files with 2182 additions and 2127 deletions

View File

@@ -782,6 +782,7 @@ public final class VoiceChatController: ViewController {
private var audioLevelsDisposable: Disposable?
private var myAudioLevelDisposable: Disposable?
private var isSpeakingDisposable: Disposable?
private var memberStatesDisposable: Disposable?
private var actionButtonColorDisposable: Disposable?
@@ -827,7 +828,10 @@ public final class VoiceChatController: ViewController {
private var connectedOnce = false
private var ignoreConnecting = false
private var ignoreConnectingTimer: SwiftSignalKit.Timer?
private var displayUnmuteTooltipTimer: SwiftSignalKit.Timer?
private var lastUnmuteTooltipDisplayTimestamp: Double?
private var displayMode: DisplayMode = .modal(isExpanded: false, isFilled: false) {
didSet {
if case let .modal(isExpanded, _) = self.displayMode {
@@ -1858,7 +1862,7 @@ public final class VoiceChatController: ViewController {
if let (currentPeerId, _, timestamp) = strongSelf.currentDominantSpeaker {
if CACurrentMediaTime() - timestamp > 2.5 && peerId != currentPeerId {
strongSelf.currentDominantSpeaker = (peerId, nil, CACurrentMediaTime())
strongSelf.updateMainVideo(waitForFullSize: false)
strongSelf.updateMainVideo(waitForFullSize: true)
}
}
}
@@ -1879,6 +1883,40 @@ public final class VoiceChatController: ViewController {
strongSelf.actionButton.updateLevel(CGFloat(effectiveLevel))
})
self.isSpeakingDisposable = (self.call.isSpeaking
|> deliverOnMainQueue).start(next: { [weak self] isSpeaking in
guard let strongSelf = self else {
return
}
if let state = strongSelf.callState, state.muteState == nil || strongSelf.pushingToTalk {
strongSelf.displayUnmuteTooltipTimer?.invalidate()
} else {
if isSpeaking {
var shouldDisplayTooltip = false
if let previousTimstamp = strongSelf.lastUnmuteTooltipDisplayTimestamp, CACurrentMediaTime() < previousTimstamp + 60.0 {
shouldDisplayTooltip = true
} else if strongSelf.lastUnmuteTooltipDisplayTimestamp == nil {
shouldDisplayTooltip = true
}
if shouldDisplayTooltip {
let timer = SwiftSignalKit.Timer(timeout: 2.0, repeat: false, completion: { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.lastUnmuteTooltipDisplayTimestamp = CACurrentMediaTime()
strongSelf.displayUnmuteTooltip()
strongSelf.displayUnmuteTooltipTimer?.invalidate()
strongSelf.displayUnmuteTooltipTimer = nil
}, queue: Queue.mainQueue())
timer.start()
strongSelf.displayUnmuteTooltipTimer = timer
}
} else {
strongSelf.displayUnmuteTooltipTimer?.invalidate()
}
}
})
self.leaveButton.addTarget(self, action: #selector(self.leavePressed), forControlEvents: .touchUpInside)
self.actionButton.addTarget(self, action: #selector(self.actionPressed), forControlEvents: .touchUpInside)
self.audioButton.addTarget(self, action: #selector(self.audioPressed), forControlEvents: .touchUpInside)
@@ -2063,6 +2101,7 @@ public final class VoiceChatController: ViewController {
self.memberStatesDisposable?.dispose()
self.audioLevelsDisposable?.dispose()
self.myAudioLevelDisposable?.dispose()
self.isSpeakingDisposable?.dispose()
self.inviteDisposable.dispose()
self.memberEventsDisposable.dispose()
self.reconnectedAsEventsDisposable.dispose()
@@ -3533,7 +3572,7 @@ public final class VoiceChatController: ViewController {
}
}
let videoButtonSize: CGSize
let audioButtonSize: CGSize
var buttonsTitleAlpha: CGFloat
var effectiveDisplayMode = self.displayMode
if let (layout, _) = self.validLayout, layout.size.width > layout.size.height, case .compact = layout.metrics.widthClass {
@@ -3543,34 +3582,37 @@ public final class VoiceChatController: ViewController {
}
}
let hasCameraButton = self.cameraButton.isUserInteractionEnabled
let hasVideo = self.call.hasVideo
switch effectiveDisplayMode {
case .modal:
videoButtonSize = smallButtonSize
audioButtonSize = hasCameraButton ? smallButtonSize : sideButtonSize
buttonsTitleAlpha = 1.0
case .fullscreen:
videoButtonSize = sideButtonSize
audioButtonSize = sideButtonSize
buttonsTitleAlpha = 0.0
}
let hasVideo = self.call.hasVideo
self.cameraButton.update(size: hasVideo ? sideButtonSize : videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: hasVideo ? activeButtonAppearance : normalButtonAppearance, image: hasVideo ? .cameraOn : .cameraOff), text: self.presentationData.strings.VoiceChat_Video, transition: transition)
self.cameraButton.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: hasVideo ? activeButtonAppearance : normalButtonAppearance, image: hasVideo ? .cameraOn : .cameraOff), text: self.presentationData.strings.VoiceChat_Video, transition: transition)
self.switchCameraButton.update(size: videoButtonSize, content: CallControllerButtonItemNode.Content(appearance: normalButtonAppearance, image: .flipCamera), text: "", transition: transition)
self.switchCameraButton.update(size: audioButtonSize, content: CallControllerButtonItemNode.Content(appearance: normalButtonAppearance, image: .flipCamera), text: "", transition: transition)
transition.updateAlpha(node: self.switchCameraButton, alpha: hasVideo ? 1.0 : 0.0)
transition.updateTransformScale(node: self.switchCameraButton, scale: hasVideo ? 1.0 : 0.0)
transition.updateTransformScale(node: self.cameraButton, scale: hasCameraButton ? 1.0 : 0.0)
transition.updateAlpha(node: self.audioButton, alpha: hasVideo ? 0.0 : 1.0)
transition.updateTransformScale(node: self.audioButton, scale: hasVideo ? 0.0 : 1.0)
self.audioButton.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage, isEnabled: isSoundEnabled), text: soundTitle, transition: transition)
self.audioButton.update(size: audioButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage, isEnabled: isSoundEnabled), text: soundTitle, transition: transition)
self.audioButton.isUserInteractionEnabled = isSoundEnabled
self.leaveButton.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: .color(.custom(0xff3b30, 0.3)), image: .cancel), text: self.presentationData.strings.VoiceChat_Leave, transition: .immediate)
transition.updateAlpha(node: self.cameraButton.textNode, alpha: hasVideo ? buttonsTitleAlpha : 0.0)
transition.updateAlpha(node: self.cameraButton.textNode, alpha: hasCameraButton ? buttonsTitleAlpha : 0.0)
transition.updateAlpha(node: self.switchCameraButton.textNode, alpha: buttonsTitleAlpha)
transition.updateAlpha(node: self.audioButton.textNode, alpha: buttonsTitleAlpha)
transition.updateAlpha(node: self.audioButton.textNode, alpha: hasCameraButton ? 0.0 : buttonsTitleAlpha)
transition.updateAlpha(node: self.leaveButton.textNode, alpha: buttonsTitleAlpha)
}
@@ -3963,6 +4005,8 @@ public final class VoiceChatController: ViewController {
} : nil)
}
self.cameraButton.isUserInteractionEnabled = hasCameraButton
var buttonsTransition: ContainedViewLayoutTransition = .immediate
if !isFirstTime {
if case .animated(_, .spring) = transition {
@@ -3973,22 +4017,21 @@ public final class VoiceChatController: ViewController {
}
self.updateButtons(transition: buttonsTransition)
self.cameraButton.isUserInteractionEnabled = hasCameraButton
if self.audioButton.supernode === self.bottomPanelNode {
transition.updateAlpha(node: self.cameraButton, alpha: hasCameraButton ? 1.0 : 0.0)
transition.updateFrameAsPositionAndBounds(node: self.switchCameraButton, frame: firstButtonFrame)
if !self.animatingButtonsSwap || transition.isAnimated {
if self.call.hasVideo {
transition.updateFrameAsPositionAndBounds(node: self.cameraButton, frame: secondButtonFrame, completion: { [weak self] _ in
if hasCameraButton {
transition.updateFrameAsPositionAndBounds(node: self.audioButton, frame: firstButtonFrame, completion: { [weak self] _ in
self?.animatingButtonsSwap = false
})
} else {
transition.updateFrameAsPositionAndBounds(node: self.cameraButton, frame: firstButtonFrame, completion: { [weak self] _ in
transition.updateFrameAsPositionAndBounds(node: self.audioButton, frame: secondButtonFrame, completion: { [weak self] _ in
self?.animatingButtonsSwap = false
})
}
transition.updateFrameAsPositionAndBounds(node: self.audioButton, frame: secondButtonFrame)
transition.updateFrameAsPositionAndBounds(node: self.cameraButton, frame: secondButtonFrame)
}
transition.updateFrameAsPositionAndBounds(node: self.leaveButton, frame: forthButtonFrame)
}
@@ -4515,8 +4558,10 @@ public final class VoiceChatController: ViewController {
self.requestedVideoSources.insert(channel.endpointId)
self.call.makeIncomingVideoView(endpointId: channel.endpointId, completion: { [weak self] videoView in
Queue.mainQueue().async {
print("create main video \(channel.endpointId)")
self?.call.makeIncomingVideoView(endpointId: channel.endpointId, completion: { [weak self] backdropVideoView in
Queue.mainQueue().async {
print("create blur video \(channel.endpointId)")
guard let strongSelf = self, let videoView = videoView else {
return
}
@@ -4527,6 +4572,7 @@ public final class VoiceChatController: ViewController {
|> deliverOnMainQueue
).start(next: { [weak self, weak videoNode] _ in
if let strongSelf = self, let videoNode = videoNode {
print("video ready \(channel.endpointId)")
Queue.mainQueue().after(0.1) {
strongSelf.readyVideoNodes.insert(channel.endpointId)
if videoNode.aspectRatio <= 0.77 {
@@ -5264,6 +5310,13 @@ public final class VoiceChatController: ViewController {
}))
}
private func displayUnmuteTooltip() {
let location = self.actionButton.view.convert(self.actionButton.bounds, to: self.view).center
let point = CGRect(origin: CGPoint(x: location.x - 5.0, y: location.y - 5.0 - 68.0), size: CGSize(width: 10.0, height: 10.0))
self.controller?.present(TooltipScreen(text: self.presentationData.strings.VoiceChat_UnmuteSuggestion, style: .gradient(UIColor(rgb: 0x1d446c), UIColor(rgb: 0x193e63)), icon: nil, location: .point(point, .bottom), displayDuration: .custom(3.0), shouldDismissOnTouch: { _ in
return .dismiss(consume: false)
}), in: .window(.root))
}
private func displayToggleVideoSourceTooltip(screencast: Bool) {
// guard let videoContainerNode = self.mainStageVideoContainerNode else {
@@ -5346,7 +5399,7 @@ public final class VoiceChatController: ViewController {
self.mainStageContainerNode.isHidden = false
self.mainStageContainerNode.isUserInteractionEnabled = isFullscreen
let transition: ContainedViewLayoutTransition = .animated(duration: 0.4, curve: .spring)
let transition: ContainedViewLayoutTransition = .animated(duration: 0.55, curve: .spring)
if case .modal = previousDisplayMode, case .fullscreen = self.displayMode {
self.fullscreenListNode.isHidden = false
@@ -5409,7 +5462,7 @@ public final class VoiceChatController: ViewController {
self.transitionMaskTopFillLayer.opacity = 1.0
}
self.transitionMaskBottomFillLayer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3, removeOnCompletion: false, completion: { [weak self] _ in
Queue.mainQueue().after(0.2) {
Queue.mainQueue().after(0.3) {
self?.transitionMaskTopFillLayer.opacity = 0.0
self?.transitionMaskBottomFillLayer.removeAllAnimations()
}