This commit is contained in:
Ali
2021-01-01 18:40:58 +04:00
parent 3ba3cc05ad
commit b42d5b9ac5
8 changed files with 134 additions and 11 deletions

View File

@@ -298,6 +298,7 @@ public protocol PresentationGroupCall: class {
func setIsMuted(action: PresentationGroupCallMuteAction)
func updateDefaultParticipantsAreMuted(isMuted: Bool)
func setVolume(peerId: PeerId, volume: Double)
func setFullSizeVideo(peerId: PeerId)
func setCurrentAudioOutput(_ output: AudioSessionOutput)
func updateMuteState(peerId: PeerId, isMuted: Bool)

View File

@@ -1262,6 +1262,17 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
}
public func setFullSizeVideo(peerId: PeerId) {
var resolvedSsrc: UInt32?
for (ssrc, id) in self.ssrcMapping {
if id == peerId {
resolvedSsrc = ssrc
break
}
}
self.callContext?.setFullSizeVideoSsrc(ssrc: resolvedSsrc)
}
public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
guard self.currentSelectedAudioOutputValue != output else {
return

View File

@@ -221,6 +221,7 @@ public final class VoiceChatController: ViewController {
let openInvite: () -> Void
let peerContextAction: (PeerEntry, ASDisplayNode, ContextGesture?) -> Void
let setPeerIdWithRevealedOptions: (PeerId?, PeerId?) -> Void
let getPeerVideo: (UInt32) -> GroupVideoNode?
private var audioLevels: [PeerId: ValuePipe<Float>] = [:]
@@ -229,13 +230,15 @@ public final class VoiceChatController: ViewController {
openPeer: @escaping (PeerId) -> Void,
openInvite: @escaping () -> Void,
peerContextAction: @escaping (PeerEntry, ASDisplayNode, ContextGesture?) -> Void,
setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void
setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void,
getPeerVideo: @escaping (UInt32) -> GroupVideoNode?
) {
self.updateIsMuted = updateIsMuted
self.openPeer = openPeer
self.openInvite = openInvite
self.peerContextAction = peerContextAction
self.setPeerIdWithRevealedOptions = setPeerIdWithRevealedOptions
self.getPeerVideo = getPeerVideo
}
func getAudioLevel(_ peerId: PeerId) -> Signal<Float, NoError> {
@@ -283,6 +286,7 @@ public final class VoiceChatController: ViewController {
}
var peer: Peer
var ssrc: UInt32
var presence: TelegramUserPresence?
var activityTimestamp: Int32
var state: State
@@ -298,6 +302,9 @@ public final class VoiceChatController: ViewController {
if !lhs.peer.isEqual(rhs.peer) {
return false
}
if lhs.ssrc != rhs.ssrc {
return false
}
if lhs.presence != rhs.presence {
return false
}
@@ -431,7 +438,9 @@ public final class VoiceChatController: ViewController {
let revealOptions: [VoiceChatParticipantItem.RevealOption] = []
return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, presence: peerEntry.presence, text: text, icon: icon, enabled: true, selectable: peer.id != context.account.peerId, getAudioLevel: { return interaction.getAudioLevel(peer.id) }, revealOptions: revealOptions, revealed: peerEntry.revealed, setPeerIdWithRevealedOptions: { peerId, fromPeerId in
return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, ssrc: peerEntry.ssrc, presence: peerEntry.presence, text: text, icon: icon, enabled: true, selectable: peer.id != context.account.peerId, getAudioLevel: { return interaction.getAudioLevel(peer.id) }, getVideo: {
return interaction.getPeerVideo(peerEntry.ssrc)
}, revealOptions: revealOptions, revealed: peerEntry.revealed, setPeerIdWithRevealedOptions: { peerId, fromPeerId in
interaction.setPeerIdWithRevealedOptions(peerId, fromPeerId)
}, action: {
interaction.openPeer(peer.id)
@@ -535,7 +544,7 @@ public final class VoiceChatController: ViewController {
private let voiceSourcesDisposable = MetaDisposable()
private var requestedVideoSources = Set<UInt32>()
private var videoNodes: [GroupVideoNode] = []
private var videoNodes: [(UInt32, GroupVideoNode)] = []
init(controller: VoiceChatController, sharedContext: SharedAccountContext, call: PresentationGroupCall) {
self.controller = controller
@@ -971,6 +980,16 @@ public final class VoiceChatController: ViewController {
updated.revealedPeerId = peerId
return updated
}
}, getPeerVideo: { [weak self] ssrc in
guard let strongSelf = self else {
return nil
}
for (listSsrc, videoNode) in strongSelf.videoNodes {
if listSsrc == ssrc {
return videoNode
}
}
return nil
})
self.topPanelNode.addSubnode(self.topPanelEdgeNode)
@@ -1272,7 +1291,10 @@ public final class VoiceChatController: ViewController {
guard let strongSelf = self else {
return
}
var validSources = Set<UInt32>()
for source in sources {
validSources.insert(source)
if !strongSelf.requestedVideoSources.contains(source) {
strongSelf.requestedVideoSources.insert(source)
strongSelf.call.makeIncomingVideoView(source: source, completion: { videoView in
@@ -1280,14 +1302,55 @@ public final class VoiceChatController: ViewController {
guard let strongSelf = self, let videoView = videoView else {
return
}
strongSelf.videoNodes.append(GroupVideoNode(videoView: videoView))
strongSelf.videoNodes.append((source, GroupVideoNode(videoView: videoView)))
if let (layout, navigationHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .immediate)
loop: for i in 0 ..< strongSelf.currentEntries.count {
let entry = strongSelf.currentEntries[i]
switch entry {
case let .peer(peerEntry):
if peerEntry.ssrc == source {
strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: strongSelf.presentationData, interaction: strongSelf.itemInteraction!), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
break loop
}
default:
break
}
}
}
}
})
}
}
var updated = false
for i in (0 ..< strongSelf.videoNodes.count).reversed() {
if !validSources.contains(strongSelf.videoNodes[i].0) {
loop: for j in 0 ..< strongSelf.currentEntries.count {
let entry = strongSelf.currentEntries[j]
switch entry {
case let .peer(peerEntry):
if peerEntry.ssrc == strongSelf.videoNodes[i].0 {
strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: strongSelf.presentationData, interaction: strongSelf.itemInteraction!), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
break loop
}
default:
break
}
}
//strongSelf.videoNodes[i].1.removeFromSupernode()
strongSelf.videoNodes.remove(at: i)
updated = true
}
}
if updated {
if let (layout, navigationHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .immediate)
}
}
}))
}
@@ -1891,9 +1954,9 @@ public final class VoiceChatController: ViewController {
self.updateButtons(transition: transition)
var currentVideoOrigin = CGPoint(x: 4.0, y: (layout.statusBarHeight ?? 0.0) + 4.0)
for videoNode in self.videoNodes {
let videoSize = CGSize(width: 200.0, height: 200.0)
/*var currentVideoOrigin = CGPoint(x: 4.0, y: (layout.statusBarHeight ?? 0.0) + 4.0)
for (_, videoNode) in self.videoNodes {
let videoSize = CGSize(width: 300.0, height: 500.0)
if currentVideoOrigin.x + videoSize.width > layout.size.width {
currentVideoOrigin.x = 0.0
currentVideoOrigin.y += videoSize.height
@@ -1906,7 +1969,7 @@ public final class VoiceChatController: ViewController {
}
currentVideoOrigin.x += videoSize.width + 4.0
}
}*/
let sideButtonMinimalInset: CGFloat = 16.0
let sideButtonOffset = min(36.0, floor((((size.width - 144.0) / 2.0) - sideButtonSize.width) / 2.0))
@@ -2137,6 +2200,7 @@ public final class VoiceChatController: ViewController {
entries.append(.peer(PeerEntry(
peer: member.peer,
ssrc: member.ssrc,
presence: nil,
activityTimestamp: Int32.max - 1 - index,
state: memberState,
@@ -2149,6 +2213,7 @@ public final class VoiceChatController: ViewController {
if let accountPeer = self.accountPeer, !processedPeerIds.contains(accountPeer.id) {
entries.insert(.peer(PeerEntry(
peer: accountPeer,
ssrc: 0,
presence: nil,
activityTimestamp: Int32.max - 1 - index,
state: .listening,
@@ -2165,6 +2230,7 @@ public final class VoiceChatController: ViewController {
entries.append(.peer(PeerEntry(
peer: peer,
ssrc: 0,
presence: nil,
activityTimestamp: Int32.max - 1 - index,
state: .invited,

View File

@@ -61,30 +61,34 @@ final class VoiceChatParticipantItem: ListViewItem {
let nameDisplayOrder: PresentationPersonNameOrder
let context: AccountContext
let peer: Peer
let ssrc: UInt32?
let presence: PeerPresence?
let text: ParticipantText
let icon: Icon
let enabled: Bool
public let selectable: Bool
let getAudioLevel: (() -> Signal<Float, NoError>)?
let getVideo: () -> GroupVideoNode?
let revealOptions: [RevealOption]
let revealed: Bool?
let setPeerIdWithRevealedOptions: (PeerId?, PeerId?) -> Void
let action: (() -> Void)?
let contextAction: ((ASDisplayNode, ContextGesture?) -> Void)?
public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, presence: PeerPresence?, text: ParticipantText, icon: Icon, enabled: Bool, selectable: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, revealOptions: [RevealOption], revealed: Bool?, setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void, action: (() -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil) {
public init(presentationData: ItemListPresentationData, dateTimeFormat: PresentationDateTimeFormat, nameDisplayOrder: PresentationPersonNameOrder, context: AccountContext, peer: Peer, ssrc: UInt32?, presence: PeerPresence?, text: ParticipantText, icon: Icon, enabled: Bool, selectable: Bool, getAudioLevel: (() -> Signal<Float, NoError>)?, getVideo: @escaping () -> GroupVideoNode?, revealOptions: [RevealOption], revealed: Bool?, setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void, action: (() -> Void)?, contextAction: ((ASDisplayNode, ContextGesture?) -> Void)? = nil) {
self.presentationData = presentationData
self.dateTimeFormat = dateTimeFormat
self.nameDisplayOrder = nameDisplayOrder
self.context = context
self.peer = peer
self.ssrc = ssrc
self.presence = presence
self.text = text
self.icon = icon
self.enabled = enabled
self.selectable = selectable
self.getAudioLevel = getAudioLevel
self.getVideo = getVideo
self.revealOptions = revealOptions
self.revealed = revealed
self.setPeerIdWithRevealedOptions = setPeerIdWithRevealedOptions
@@ -171,6 +175,12 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
private var layoutParams: (VoiceChatParticipantItem, ListViewItemLayoutParams, Bool, Bool)?
private var wavesColor: UIColor?
private var videoNode: GroupVideoNode?
var item: VoiceChatParticipantItem? {
return self.layoutParams?.0
}
init() {
self.topStripeNode = ASDisplayNode()
self.topStripeNode.isLayerBacked = true
@@ -659,11 +669,29 @@ class VoiceChatParticipantItemNode: ItemListRevealOptionsItemNode {
})
}
let videoSize = CGSize(width: 38.0, height: 38.0)
let videoNode = item.getVideo()
if let current = strongSelf.videoNode, current !== videoNode {
current.removeFromSupernode()
}
var actionOffset: CGFloat = 0.0
strongSelf.videoNode = videoNode
if let videoNode = videoNode {
videoNode.updateLayout(size: videoSize, transition: .immediate)
if videoNode.supernode !== strongSelf.offsetContainerNode {
strongSelf.offsetContainerNode.addSubnode(videoNode)
}
actionOffset = -videoSize.width - 6.0
videoNode.frame = CGRect(origin: CGPoint(x: params.width - videoSize.width - 6.0 - params.rightInset, y: (layout.contentSize.height - videoSize.height) / 2.0), size: videoSize)
}
let animationSize = CGSize(width: 36.0, height: 36.0)
strongSelf.iconNode?.frame = CGRect(origin: CGPoint(), size: animationSize)
strongSelf.animationNode?.frame = CGRect(origin: CGPoint(), size: animationSize)
strongSelf.actionButtonNode.frame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height)
strongSelf.actionButtonNode.frame = CGRect(x: params.width - animationSize.width - 6.0 - params.rightInset + actionOffset, y: floor((layout.contentSize.height - animationSize.height) / 2.0) + 1.0, width: animationSize.width, height: animationSize.height)
if let presence = item.presence as? TelegramUserPresence {
strongSelf.peerPresenceManager?.reset(presence: presence)

View File

@@ -145,6 +145,10 @@ public final class OngoingGroupCallContext {
self.context.setVolumeForSsrc(ssrc, volume: volume)
}
func setFullSizeVideoSsrc(ssrc: UInt32?) {
self.context.setFullSizeVideoSsrc(ssrc ?? 0)
}
func addParticipants(participants: [(UInt32, String?)]) {
if participants.isEmpty {
return
@@ -321,6 +325,12 @@ public final class OngoingGroupCallContext {
}
}
public func setFullSizeVideoSsrc(ssrc: UInt32?) {
self.impl.with { impl in
impl.setFullSizeVideoSsrc(ssrc: ssrc)
}
}
public func addParticipants(participants: [(UInt32, String?)]) {
self.impl.with { impl in
impl.addParticipants(participants: participants)

View File

@@ -179,6 +179,7 @@ typedef NS_ENUM(int32_t, GroupCallNetworkState) {
- (void)setIsMuted:(bool)isMuted;
- (void)setVolumeForSsrc:(uint32_t)ssrc volume:(double)volume;
- (void)setFullSizeVideoSsrc:(uint32_t)ssrc;
- (void)switchAudioOutput:(NSString * _Nonnull)deviceId;
- (void)switchAudioInput:(NSString * _Nonnull)deviceId;

View File

@@ -1301,6 +1301,12 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)setFullSizeVideoSsrc:(uint32_t)ssrc {
if (_instance) {
_instance->setFullSizeVideoSsrc(ssrc);
}
}
- (void)switchAudioOutput:(NSString * _Nonnull)deviceId {
if (_instance) {
_instance->setAudioOutputDevice(deviceId.UTF8String);