mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-07-15 07:49:27 +00:00
Merge commit '816ef02fb637d8b074dcda1fa4e7ce1072db927f'
This commit is contained in:
commit
f09a09014f
@ -1948,7 +1948,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
self.actionButtonColorDisposable = (self.actionButton.outerColor
|
self.actionButtonColorDisposable = (self.actionButton.outerColor
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] normalColor, activeColor in
|
|> deliverOnMainQueue).start(next: { [weak self] normalColor, activeColor in
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
let animated = strongSelf.currentNormalButtonColor != nil
|
let animated = strongSelf.currentNormalButtonColor != nil || strongSelf.currentActiveButtonColor == nil
|
||||||
strongSelf.currentNormalButtonColor = normalColor
|
strongSelf.currentNormalButtonColor = normalColor
|
||||||
strongSelf.currentActiveButtonColor = activeColor
|
strongSelf.currentActiveButtonColor = activeColor
|
||||||
strongSelf.updateButtons(transition: animated ? .animated(duration: 0.3, curve: .linear) : .immediate)
|
strongSelf.updateButtons(transition: animated ? .animated(duration: 0.3, curve: .linear) : .immediate)
|
||||||
@ -2211,6 +2211,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let callState = strongSelf.callState, callState.isVideoEnabled && (callState.muteState?.canUnmute ?? true) {
|
||||||
if #available(iOS 12.0, *) {
|
if #available(iOS 12.0, *) {
|
||||||
if strongSelf.call.hasScreencast {
|
if strongSelf.call.hasScreencast {
|
||||||
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.VoiceChat_StopScreenSharing, icon: { theme in
|
items.append(.action(ContextMenuActionItem(text: strongSelf.presentationData.strings.VoiceChat_StopScreenSharing, icon: { theme in
|
||||||
@ -2226,6 +2227,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
}, action: { _, _ in }), false))
|
}, action: { _, _ in }), false))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if canManageCall {
|
if canManageCall {
|
||||||
if let recordingStartTimestamp = strongSelf.callState?.recordingStartTimestamp {
|
if let recordingStartTimestamp = strongSelf.callState?.recordingStartTimestamp {
|
||||||
@ -3646,6 +3648,10 @@ public final class VoiceChatController: ViewController {
|
|||||||
|
|
||||||
transition.updateAlpha(node: self.cameraButton.textNode, alpha: hasCameraButton ? buttonsTitleAlpha : 0.0)
|
transition.updateAlpha(node: self.cameraButton.textNode, alpha: hasCameraButton ? buttonsTitleAlpha : 0.0)
|
||||||
transition.updateAlpha(node: self.switchCameraButton.textNode, alpha: buttonsTitleAlpha)
|
transition.updateAlpha(node: self.switchCameraButton.textNode, alpha: buttonsTitleAlpha)
|
||||||
|
var audioButtonTransition = transition
|
||||||
|
if hasCameraButton, transition.isAnimated {
|
||||||
|
audioButtonTransition = .animated(duration: 0.15, curve: .easeInOut)
|
||||||
|
}
|
||||||
transition.updateAlpha(node: self.audioButton.textNode, alpha: hasCameraButton ? 0.0 : buttonsTitleAlpha)
|
transition.updateAlpha(node: self.audioButton.textNode, alpha: hasCameraButton ? 0.0 : buttonsTitleAlpha)
|
||||||
transition.updateAlpha(node: self.leaveButton.textNode, alpha: buttonsTitleAlpha)
|
transition.updateAlpha(node: self.leaveButton.textNode, alpha: buttonsTitleAlpha)
|
||||||
}
|
}
|
||||||
@ -4480,7 +4486,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
|
|
||||||
self.requestedVideoChannels = requestedVideoChannels
|
self.requestedVideoChannels = requestedVideoChannels
|
||||||
|
|
||||||
guard self.didSetDataReady || !self.isPanning else {
|
guard self.didSetDataReady && !self.isPanning else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4596,8 +4602,7 @@ public final class VoiceChatController: ViewController {
|
|||||||
self.requestedVideoSources.insert(channel.endpointId)
|
self.requestedVideoSources.insert(channel.endpointId)
|
||||||
self.call.makeIncomingVideoView(endpointId: channel.endpointId, requestClone: true, completion: { [weak self] videoView, backdropVideoView in
|
self.call.makeIncomingVideoView(endpointId: channel.endpointId, requestClone: true, completion: { [weak self] videoView, backdropVideoView in
|
||||||
Queue.mainQueue().async {
|
Queue.mainQueue().async {
|
||||||
print("create main video \(channel.endpointId)")
|
print("create video \(channel.endpointId)")
|
||||||
print("create blur video \(channel.endpointId)")
|
|
||||||
guard let strongSelf = self, let videoView = videoView else {
|
guard let strongSelf = self, let videoView = videoView else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -5480,17 +5485,25 @@ public final class VoiceChatController: ViewController {
|
|||||||
let completion = {
|
let completion = {
|
||||||
let effectiveSpeakerPeerId = self.effectiveSpeaker?.0
|
let effectiveSpeakerPeerId = self.effectiveSpeaker?.0
|
||||||
if let effectiveSpeakerPeerId = effectiveSpeakerPeerId, let otherItemNode = verticalItemNodes[String(effectiveSpeakerPeerId.toInt64()) + "_" + (self.effectiveSpeaker?.1 ?? "")] {
|
if let effectiveSpeakerPeerId = effectiveSpeakerPeerId, let otherItemNode = verticalItemNodes[String(effectiveSpeakerPeerId.toInt64()) + "_" + (self.effectiveSpeaker?.1 ?? "")] {
|
||||||
|
|
||||||
|
self.mainStageNode.alpha = 0.0
|
||||||
|
|
||||||
|
Queue.mainQueue().after(0.05) {
|
||||||
self.mainStageNode.animateTransitionIn(from: otherItemNode, transition: transition)
|
self.mainStageNode.animateTransitionIn(from: otherItemNode, transition: transition)
|
||||||
|
self.mainStageNode.alpha = 1.0
|
||||||
|
|
||||||
self.mainStageBackgroundNode.alpha = 1.0
|
self.mainStageBackgroundNode.alpha = 1.0
|
||||||
self.mainStageBackgroundNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
self.mainStageBackgroundNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Queue.mainQueue().after(0.1) {
|
||||||
self.fullscreenListNode.forEachItemNode { itemNode in
|
self.fullscreenListNode.forEachItemNode { itemNode in
|
||||||
if let itemNode = itemNode as? VoiceChatFullscreenParticipantItemNode, let item = itemNode.item {
|
if let itemNode = itemNode as? VoiceChatFullscreenParticipantItemNode, let item = itemNode.item {
|
||||||
itemNode.animateTransitionIn(from: verticalItemNodes[String(item.peer.id.toInt64()) + "_" + (item.videoEndpointId ?? "")], containerNode: self.transitionContainerNode, transition: transition, animate: item.peer.id != effectiveSpeakerPeerId)
|
itemNode.animateTransitionIn(from: verticalItemNodes[String(item.peer.id.toInt64()) + "_" + (item.videoEndpointId ?? "")], containerNode: self.transitionContainerNode, transition: transition, animate: item.peer.id != effectiveSpeakerPeerId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if self.isLandscape {
|
if self.isLandscape {
|
||||||
self.transitionMaskTopFillLayer.opacity = 1.0
|
self.transitionMaskTopFillLayer.opacity = 1.0
|
||||||
|
@ -469,7 +469,7 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
avatarScale = 1.0
|
avatarScale = 1.0
|
||||||
}
|
}
|
||||||
|
|
||||||
let transition: ContainedViewLayoutTransition = .animated(duration: 0.15, curve: .easeInOut)
|
let transition: ContainedViewLayoutTransition = .animated(duration: 0.25, curve: .easeInOut)
|
||||||
transition.updateTransformScale(node: strongSelf.speakingAvatarNode, scale: avatarScale, beginWithCurrentState: true)
|
transition.updateTransformScale(node: strongSelf.speakingAvatarNode, scale: avatarScale, beginWithCurrentState: true)
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
@ -676,14 +676,14 @@ final class VoiceChatMainStageNode: ASDisplayNode {
|
|||||||
|> take(1)
|
|> take(1)
|
||||||
|> deliverOnMainQueue).start(next: { [weak self] _ in
|
|> deliverOnMainQueue).start(next: { [weak self] _ in
|
||||||
Queue.mainQueue().after(0.07) {
|
Queue.mainQueue().after(0.07) {
|
||||||
completion?()
|
|
||||||
|
|
||||||
if let strongSelf = self {
|
if let strongSelf = self {
|
||||||
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
|
if let (size, sideInset, bottomInset, isLandscape) = strongSelf.validLayout {
|
||||||
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
strongSelf.update(size: size, sideInset: sideInset, bottomInset: bottomInset, isLandscape: isLandscape, transition: .immediate)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
completion?()
|
||||||
|
|
||||||
if delayTransition {
|
if delayTransition {
|
||||||
if let videoNode = strongSelf.currentVideoNode {
|
if let videoNode = strongSelf.currentVideoNode {
|
||||||
videoNode.alpha = 1.0
|
videoNode.alpha = 1.0
|
||||||
|
Loading…
x
Reference in New Issue
Block a user