mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Refactor video calls
This commit is contained in:
parent
5b1f40820b
commit
5bd76f38ba
@ -314,7 +314,8 @@ public protocol PresentationGroupCall: class {
|
||||
var internalId: CallSessionInternalId { get }
|
||||
var peerId: PeerId { get }
|
||||
|
||||
var isVideo: Bool { get }
|
||||
var hasVideo: Bool { get }
|
||||
var hasScreencast: Bool { get }
|
||||
|
||||
var schedulePending: Bool { get }
|
||||
|
||||
@ -347,10 +348,12 @@ public protocol PresentationGroupCall: class {
|
||||
func lowerHand()
|
||||
func requestVideo()
|
||||
func disableVideo()
|
||||
func requestScreencast()
|
||||
func disableScreencast()
|
||||
func switchVideoCamera()
|
||||
func updateDefaultParticipantsAreMuted(isMuted: Bool)
|
||||
func setVolume(peerId: PeerId, volume: Int32, sync: Bool)
|
||||
func setFullSizeVideo(peerId: PeerId?)
|
||||
func setFullSizeVideo(endpointId: String?)
|
||||
func setCurrentAudioOutput(_ output: AudioSessionOutput)
|
||||
|
||||
func playTone(_ tone: PresentationGroupCallTone)
|
||||
@ -366,9 +369,9 @@ public protocol PresentationGroupCall: class {
|
||||
|
||||
var inviteLinks: Signal<GroupCallInviteLinks?, NoError> { get }
|
||||
|
||||
var incomingVideoSources: Signal<[PeerId: UInt32], NoError> { get }
|
||||
var incomingVideoSources: Signal<Set<String>, NoError> { get }
|
||||
|
||||
func makeIncomingVideoView(source: UInt32, completion: @escaping (PresentationCallVideoView?) -> Void)
|
||||
func makeIncomingVideoView(endpointId: String, completion: @escaping (PresentationCallVideoView?) -> Void)
|
||||
|
||||
func loadMoreMembers(token: String)
|
||||
}
|
||||
|
@ -54,6 +54,26 @@ private extension GroupCallParticipantsContext.Participant {
|
||||
}
|
||||
}
|
||||
|
||||
extension GroupCallParticipantsContext.Participant {
|
||||
var videoEndpointId: String? {
|
||||
if let jsonParams = self.videoJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
||||
if let endpoint = json["endpoint"] as? String {
|
||||
return endpoint
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var presentationEndpointId: String? {
|
||||
if let jsonParams = self.presentationJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
|
||||
if let endpoint = json["endpoint"] as? String {
|
||||
return endpoint
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public final class AccountGroupCallContextImpl: AccountGroupCallContext {
|
||||
public final class Proxy {
|
||||
public let context: AccountGroupCallContextImpl
|
||||
@ -377,7 +397,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
private var ignorePreviousJoinAsPeerId: (PeerId, UInt32)?
|
||||
private var reconnectingAsPeer: Peer?
|
||||
|
||||
public private(set) var isVideo: Bool
|
||||
public private(set) var hasVideo: Bool
|
||||
public private(set) var hasScreencast: Bool
|
||||
private let isVideoEnabled: Bool
|
||||
|
||||
private var temporaryJoinTimestamp: Int32
|
||||
@ -391,8 +412,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
private let internalStatePromise = Promise<InternalState>(.requesting)
|
||||
private var currentLocalSsrc: UInt32?
|
||||
|
||||
private var callContext: OngoingGroupCallContext?
|
||||
private var genericCallContext: OngoingGroupCallContext?
|
||||
private var currentConnectionMode: OngoingGroupCallContext.ConnectionMode = .none
|
||||
private var screencastCallContext: OngoingGroupCallContext?
|
||||
private var ssrcMapping: [UInt32: PeerId] = [:]
|
||||
|
||||
private var requestedSsrcs = Set<UInt32>()
|
||||
@ -526,6 +548,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
|
||||
private let joinDisposable = MetaDisposable()
|
||||
private let screencastJoinDisposable = MetaDisposable()
|
||||
private let requestDisposable = MetaDisposable()
|
||||
private let startDisposable = MetaDisposable()
|
||||
private let subscribeDisposable = MetaDisposable()
|
||||
@ -560,8 +583,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
|
||||
private var videoCapturer: OngoingCallVideoCapturer?
|
||||
private var useFrontCamera: Bool = true
|
||||
private let incomingVideoSourcePromise = Promise<[PeerId: UInt32]>([:])
|
||||
public var incomingVideoSources: Signal<[PeerId: UInt32], NoError> {
|
||||
|
||||
private var screenCapturer: OngoingCallVideoCapturer?
|
||||
|
||||
private let incomingVideoSourcePromise = Promise<Set<String>>(Set())
|
||||
public var incomingVideoSources: Signal<Set<String>, NoError> {
|
||||
return self.incomingVideoSourcePromise.get()
|
||||
}
|
||||
|
||||
@ -606,7 +632,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
self.temporaryJoinTimestamp = Int32(CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970)
|
||||
|
||||
self.isVideoEnabled = accountContext.sharedContext.immediateExperimentalUISettings.demoVideoChats
|
||||
self.isVideo = self.videoCapturer != nil
|
||||
self.hasVideo = false
|
||||
self.hasScreencast = false
|
||||
|
||||
var didReceiveAudioOutputs = false
|
||||
|
||||
@ -761,7 +788,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
}
|
||||
if !removedSsrc.isEmpty {
|
||||
strongSelf.callContext?.removeSsrcs(ssrcs: removedSsrc)
|
||||
strongSelf.genericCallContext?.removeSsrcs(ssrcs: removedSsrc)
|
||||
}
|
||||
}
|
||||
})
|
||||
@ -842,6 +869,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
self.summaryStateDisposable?.dispose()
|
||||
self.audioSessionDisposable?.dispose()
|
||||
self.joinDisposable.dispose()
|
||||
self.screencastJoinDisposable.dispose()
|
||||
self.requestDisposable.dispose()
|
||||
self.startDisposable.dispose()
|
||||
self.subscribeDisposable.dispose()
|
||||
@ -1291,9 +1319,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
|
||||
if shouldJoin, let callInfo = activeCallInfo {
|
||||
let callContext: OngoingGroupCallContext
|
||||
if let current = self.callContext {
|
||||
callContext = current
|
||||
let genericCallContext: OngoingGroupCallContext
|
||||
if let current = self.genericCallContext {
|
||||
genericCallContext = current
|
||||
} else {
|
||||
var outgoingAudioBitrateKbit: Int32?
|
||||
let appConfiguration = self.accountContext.currentAppConfiguration.with({ $0 })
|
||||
@ -1303,7 +1331,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
|
||||
let enableNoiseSuppression = accountContext.sharedContext.immediateExperimentalUISettings.enableNoiseSuppression
|
||||
|
||||
callContext = OngoingGroupCallContext(video: self.videoCapturer, participantDescriptionsRequired: { [weak self] ssrcs in
|
||||
genericCallContext = OngoingGroupCallContext(video: self.videoCapturer, participantDescriptionsRequired: { [weak self] ssrcs in
|
||||
Queue.mainQueue().async {
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
@ -1320,23 +1348,14 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
}
|
||||
}, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: self.isVideoEnabled ? .generic : .none, enableNoiseSuppression: enableNoiseSuppression)
|
||||
self.incomingVideoSourcePromise.set(callContext.videoSources
|
||||
self.incomingVideoSourcePromise.set(genericCallContext.videoSources
|
||||
|> deliverOnMainQueue
|
||||
|> map { [weak self] sources -> [PeerId: UInt32] in
|
||||
guard let strongSelf = self else {
|
||||
return [:]
|
||||
}
|
||||
var result: [PeerId: UInt32] = [:]
|
||||
for source in sources {
|
||||
if let peerId = strongSelf.ssrcMapping[source] {
|
||||
result[peerId] = source
|
||||
}
|
||||
}
|
||||
return result
|
||||
|> map { sources -> Set<String> in
|
||||
return Set(sources)
|
||||
})
|
||||
self.callContext = callContext
|
||||
self.genericCallContext = genericCallContext
|
||||
}
|
||||
self.joinDisposable.set((callContext.joinPayload
|
||||
self.joinDisposable.set((genericCallContext.joinPayload
|
||||
|> distinctUntilChanged(isEqual: { lhs, rhs in
|
||||
if lhs.0 != rhs.0 {
|
||||
return false
|
||||
@ -1414,11 +1433,11 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
switch joinCallResult.connectionMode {
|
||||
case .rtc:
|
||||
strongSelf.currentConnectionMode = .rtc
|
||||
strongSelf.callContext?.setConnectionMode(.rtc, keepBroadcastConnectedIfWasEnabled: false)
|
||||
strongSelf.callContext?.setJoinResponse(payload: clientParams)
|
||||
strongSelf.genericCallContext?.setConnectionMode(.rtc, keepBroadcastConnectedIfWasEnabled: false)
|
||||
strongSelf.genericCallContext?.setJoinResponse(payload: clientParams)
|
||||
case .broadcast:
|
||||
strongSelf.currentConnectionMode = .broadcast
|
||||
strongSelf.callContext?.setConnectionMode(.broadcast, keepBroadcastConnectedIfWasEnabled: false)
|
||||
strongSelf.genericCallContext?.setConnectionMode(.broadcast, keepBroadcastConnectedIfWasEnabled: false)
|
||||
}
|
||||
|
||||
strongSelf.updateSessionState(internalState: .established(info: joinCallResult.callInfo, connectionMode: joinCallResult.connectionMode, clientParams: clientParams, localSsrc: ssrc, initialState: joinCallResult.state), audioSessionControl: strongSelf.audioSessionControl)
|
||||
@ -1455,7 +1474,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}))
|
||||
}))
|
||||
|
||||
self.networkStateDisposable.set((callContext.networkState
|
||||
self.networkStateDisposable.set((genericCallContext.networkState
|
||||
|> deliverOnMainQueue).start(next: { [weak self] state in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
@ -1517,7 +1536,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
}))
|
||||
|
||||
self.isNoiseSuppressionEnabledDisposable.set((callContext.isNoiseSuppressionEnabled
|
||||
self.isNoiseSuppressionEnabledDisposable.set((genericCallContext.isNoiseSuppressionEnabled
|
||||
|> deliverOnMainQueue).start(next: { [weak self] value in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
@ -1525,7 +1544,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
strongSelf.isNoiseSuppressionEnabledPromise.set(value)
|
||||
}))
|
||||
|
||||
self.audioLevelsDisposable.set((callContext.audioLevels
|
||||
self.audioLevelsDisposable.set((genericCallContext.audioLevels
|
||||
|> deliverOnMainQueue).start(next: { [weak self] levels in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
@ -1840,31 +1859,31 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
switch strongSelf.isMutedValue {
|
||||
case let .muted(isPushToTalkActive):
|
||||
if !isPushToTalkActive {
|
||||
strongSelf.callContext?.setIsMuted(true)
|
||||
strongSelf.genericCallContext?.setIsMuted(true)
|
||||
}
|
||||
case .unmuted:
|
||||
strongSelf.isMutedValue = .muted(isPushToTalkActive: false)
|
||||
strongSelf.callContext?.setIsMuted(true)
|
||||
strongSelf.genericCallContext?.setIsMuted(true)
|
||||
}
|
||||
} else {
|
||||
strongSelf.isMutedValue = .muted(isPushToTalkActive: false)
|
||||
strongSelf.callContext?.setIsMuted(true)
|
||||
strongSelf.genericCallContext?.setIsMuted(true)
|
||||
}
|
||||
strongSelf.stateValue.muteState = muteState
|
||||
} else if let currentMuteState = strongSelf.stateValue.muteState, !currentMuteState.canUnmute {
|
||||
strongSelf.isMutedValue = .muted(isPushToTalkActive: false)
|
||||
strongSelf.stateValue.muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: true, mutedByYou: false)
|
||||
strongSelf.callContext?.setIsMuted(true)
|
||||
strongSelf.genericCallContext?.setIsMuted(true)
|
||||
}
|
||||
} else {
|
||||
if let ssrc = participant.ssrc {
|
||||
if let volume = participant.volume {
|
||||
strongSelf.callContext?.setVolume(ssrc: ssrc, volume: Double(volume) / 10000.0)
|
||||
strongSelf.genericCallContext?.setVolume(ssrc: ssrc, volume: Double(volume) / 10000.0)
|
||||
} else if participant.muteState?.mutedByYou == true {
|
||||
strongSelf.callContext?.setVolume(ssrc: ssrc, volume: 0.0)
|
||||
strongSelf.genericCallContext?.setVolume(ssrc: ssrc, volume: 0.0)
|
||||
}
|
||||
if participant.videoJsonDescription == nil {
|
||||
strongSelf.callContext?.removeIncomingVideoSource(ssrc)
|
||||
strongSelf.genericCallContext?.removeIncomingVideoSource(ssrc)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1977,7 +1996,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
|
||||
if !addedParticipants.isEmpty {
|
||||
self.callContext?.addParticipants(participants: addedParticipants)
|
||||
self.genericCallContext?.addParticipants(participants: addedParticipants)
|
||||
}
|
||||
|
||||
if !addedMissingSsrcs.isEmpty {
|
||||
@ -2016,10 +2035,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
if !addedParticipants.isEmpty {
|
||||
for (ssrc, volume, _, _) in addedParticipants {
|
||||
if let volume = volume {
|
||||
strongSelf.callContext?.setVolume(ssrc: ssrc, volume: Double(volume) / 10000.0)
|
||||
strongSelf.genericCallContext?.setVolume(ssrc: ssrc, volume: Double(volume) / 10000.0)
|
||||
}
|
||||
}
|
||||
strongSelf.callContext?.addParticipants(participants: addedParticipants.map { ssrc, _, videoParams, presentationParams in
|
||||
strongSelf.genericCallContext?.addParticipants(participants: addedParticipants.map { ssrc, _, videoParams, presentationParams in
|
||||
return (ssrc, videoParams, presentationParams)
|
||||
})
|
||||
}
|
||||
@ -2094,7 +2113,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
self.markedAsCanBeRemoved = true
|
||||
|
||||
self.callContext?.stop()
|
||||
self.genericCallContext?.stop()
|
||||
self.screencastCallContext?.stop()
|
||||
self._canBeRemoved.set(.single(true))
|
||||
|
||||
if self.didConnectOnce {
|
||||
@ -2234,7 +2254,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
isVisuallyMuted = false
|
||||
let _ = self.updateMuteState(peerId: self.joinAsPeerId, isMuted: false)
|
||||
}
|
||||
self.callContext?.setIsMuted(isEffectivelyMuted)
|
||||
self.genericCallContext?.setIsMuted(isEffectivelyMuted)
|
||||
|
||||
if isVisuallyMuted {
|
||||
self.stateValue.muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: true, mutedByYou: false)
|
||||
@ -2244,7 +2264,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
|
||||
public func setIsNoiseSuppressionEnabled(_ isNoiseSuppressionEnabled: Bool) {
|
||||
self.callContext?.setIsNoiseSuppressionEnabled(isNoiseSuppressionEnabled)
|
||||
self.genericCallContext?.setIsNoiseSuppressionEnabled(isNoiseSuppressionEnabled)
|
||||
}
|
||||
|
||||
public func toggleScheduledSubscription(_ subscribe: Bool) {
|
||||
@ -2344,19 +2364,19 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
let videoCapturer = OngoingCallVideoCapturer()
|
||||
self.videoCapturer = videoCapturer
|
||||
}
|
||||
self.isVideo = true
|
||||
self.hasVideo = true
|
||||
if let videoCapturer = self.videoCapturer {
|
||||
self.callContext?.requestVideo(videoCapturer)
|
||||
self.genericCallContext?.requestVideo(videoCapturer)
|
||||
|
||||
self.participantsContext?.updateVideoState(peerId: self.joinAsPeerId, isVideoMuted: false)
|
||||
}
|
||||
}
|
||||
|
||||
public func disableVideo() {
|
||||
self.isVideo = false
|
||||
self.hasVideo = false
|
||||
if let _ = self.videoCapturer {
|
||||
self.videoCapturer = nil
|
||||
self.callContext?.disableVideo()
|
||||
self.genericCallContext?.disableVideo()
|
||||
|
||||
self.participantsContext?.updateVideoState(peerId: self.joinAsPeerId, isVideoMuted: true)
|
||||
}
|
||||
@ -2366,11 +2386,100 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
self.useFrontCamera = !self.useFrontCamera
|
||||
self.videoCapturer?.switchVideoInput(isFront: self.useFrontCamera)
|
||||
}
|
||||
|
||||
public func requestScreencast() {
|
||||
if self.screencastCallContext != nil {
|
||||
return
|
||||
}
|
||||
|
||||
let maybeCallInfo: GroupCallInfo? = self.internalState.callInfo
|
||||
|
||||
guard let callInfo = maybeCallInfo else {
|
||||
return
|
||||
}
|
||||
|
||||
if self.screenCapturer == nil {
|
||||
let screenCapturer = OngoingCallVideoCapturer()
|
||||
self.screenCapturer = screenCapturer
|
||||
}
|
||||
|
||||
let screencastCallContext = OngoingGroupCallContext(
|
||||
video: self.screenCapturer,
|
||||
participantDescriptionsRequired: { _ in },
|
||||
audioStreamData: nil,
|
||||
rejoinNeeded: {},
|
||||
outgoingAudioBitrateKbit: nil,
|
||||
videoContentType: .screencast,
|
||||
enableNoiseSuppression: false
|
||||
)
|
||||
|
||||
self.screencastCallContext = screencastCallContext
|
||||
self.hasScreencast = true
|
||||
|
||||
self.screencastJoinDisposable.set((screencastCallContext.joinPayload
|
||||
|> distinctUntilChanged(isEqual: { lhs, rhs in
|
||||
if lhs.0 != rhs.0 {
|
||||
return false
|
||||
}
|
||||
if lhs.1 != rhs.1 {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
|> deliverOnMainQueue).start(next: { [weak self] joinPayload, _ in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
|
||||
strongSelf.requestDisposable.set((joinGroupCallAsScreencast(
|
||||
account: strongSelf.account,
|
||||
peerId: strongSelf.peerId,
|
||||
callId: callInfo.id,
|
||||
accessHash: callInfo.accessHash,
|
||||
joinPayload: joinPayload
|
||||
)
|
||||
|> deliverOnMainQueue).start(next: { joinCallResult in
|
||||
guard let strongSelf = self, let screencastCallContext = strongSelf.screencastCallContext else {
|
||||
return
|
||||
}
|
||||
let clientParams = joinCallResult.jsonParams
|
||||
|
||||
screencastCallContext.setConnectionMode(.rtc, keepBroadcastConnectedIfWasEnabled: false)
|
||||
screencastCallContext.setJoinResponse(payload: clientParams)
|
||||
}, error: { error in
|
||||
guard let _ = self else {
|
||||
return
|
||||
}
|
||||
}))
|
||||
}))
|
||||
}
|
||||
|
||||
public func disableScreencast() {
|
||||
self.hasScreencast = false
|
||||
if let screencastCallContext = self.screencastCallContext {
|
||||
self.screencastCallContext = nil
|
||||
screencastCallContext.stop()
|
||||
|
||||
let maybeCallInfo: GroupCallInfo? = self.internalState.callInfo
|
||||
|
||||
if let callInfo = maybeCallInfo {
|
||||
self.screencastJoinDisposable.set(leaveGroupCallAsScreencast(
|
||||
account: self.account,
|
||||
callId: callInfo.id,
|
||||
accessHash: callInfo.accessHash
|
||||
).start())
|
||||
}
|
||||
}
|
||||
if let _ = self.screenCapturer {
|
||||
self.screenCapturer = nil
|
||||
self.screencastCallContext?.disableVideo()
|
||||
}
|
||||
}
|
||||
|
||||
public func setVolume(peerId: PeerId, volume: Int32, sync: Bool) {
|
||||
for (ssrc, id) in self.ssrcMapping {
|
||||
if id == peerId {
|
||||
self.callContext?.setVolume(ssrc: ssrc, volume: Double(volume) / 10000.0)
|
||||
self.genericCallContext?.setVolume(ssrc: ssrc, volume: Double(volume) / 10000.0)
|
||||
if sync {
|
||||
self.participantsContext?.updateMuteState(peerId: peerId, muteState: nil, volume: volume, raiseHand: nil)
|
||||
}
|
||||
@ -2379,17 +2488,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
}
|
||||
}
|
||||
|
||||
public func setFullSizeVideo(peerId: PeerId?) {
|
||||
var resolvedSsrc: UInt32?
|
||||
if let peerId = peerId {
|
||||
for (ssrc, id) in self.ssrcMapping {
|
||||
if id == peerId {
|
||||
resolvedSsrc = ssrc
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
self.callContext?.setFullSizeVideoSsrc(ssrc: resolvedSsrc)
|
||||
public func setFullSizeVideo(endpointId: String?) {
|
||||
self.genericCallContext?.setFullSizeVideo(endpointId: endpointId)
|
||||
}
|
||||
|
||||
public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
|
||||
@ -2510,7 +2610,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
|
||||
private func requestCall(movingFromBroadcastToRtc: Bool) {
|
||||
self.currentConnectionMode = .none
|
||||
self.callContext?.setConnectionMode(.none, keepBroadcastConnectedIfWasEnabled: movingFromBroadcastToRtc)
|
||||
self.genericCallContext?.setConnectionMode(.none, keepBroadcastConnectedIfWasEnabled: movingFromBroadcastToRtc)
|
||||
|
||||
self.missingSsrcsDisposable.set(nil)
|
||||
self.missingSsrcs.removeAll()
|
||||
@ -2689,8 +2789,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
|
||||
self.participantsContext?.updateDefaultParticipantsAreMuted(isMuted: isMuted)
|
||||
}
|
||||
|
||||
public func makeIncomingVideoView(source: UInt32, completion: @escaping (PresentationCallVideoView?) -> Void) {
|
||||
self.callContext?.makeIncomingVideoView(source: source, completion: { view in
|
||||
public func makeIncomingVideoView(endpointId: String, completion: @escaping (PresentationCallVideoView?) -> Void) {
|
||||
self.genericCallContext?.makeIncomingVideoView(endpointId: endpointId, completion: { view in
|
||||
if let view = view {
|
||||
let setOnFirstFrameReceived = view.setOnFirstFrameReceived
|
||||
let setOnOrientationUpdated = view.setOnOrientationUpdated
|
||||
|
@ -200,7 +200,7 @@ private final class MainVideoContainerNode: ASDisplayNode {
|
||||
private let bottomCornersNode: ASImageNode
|
||||
private let bottomEdgeNode: ASDisplayNode
|
||||
private let fadeNode: ASImageNode
|
||||
private var currentPeer: (PeerId, UInt32)?
|
||||
private var currentPeer: (PeerId, String)?
|
||||
|
||||
private var validLayout: (CGSize, CGFloat, Bool)?
|
||||
|
||||
@ -256,13 +256,13 @@ private final class MainVideoContainerNode: ASDisplayNode {
|
||||
self.tapped?()
|
||||
}
|
||||
|
||||
func updatePeer(peer: (peerId: PeerId, source: UInt32)?, waitForFullSize: Bool) {
|
||||
func updatePeer(peer: (peerId: PeerId, endpointId: String)?, waitForFullSize: Bool) {
|
||||
if self.currentPeer?.0 == peer?.0 && self.currentPeer?.1 == peer?.1 {
|
||||
return
|
||||
}
|
||||
self.currentPeer = peer
|
||||
if let (_, source) = peer {
|
||||
self.call.makeIncomingVideoView(source: source, completion: { [weak self] videoView in
|
||||
if let (_, endpointId) = peer {
|
||||
self.call.makeIncomingVideoView(endpointId: endpointId, completion: { [weak self] videoView in
|
||||
Queue.mainQueue().async {
|
||||
guard let strongSelf = self, let videoView = videoView else {
|
||||
return
|
||||
@ -352,11 +352,11 @@ public final class VoiceChatController: ViewController {
|
||||
|
||||
private final class Interaction {
|
||||
let updateIsMuted: (PeerId, Bool) -> Void
|
||||
let pinPeer: (PeerId, UInt32?) -> Void
|
||||
let pinPeer: (PeerId, String?) -> Void
|
||||
let openInvite: () -> Void
|
||||
let peerContextAction: (PeerEntry, ASDisplayNode, ContextGesture?) -> Void
|
||||
let setPeerIdWithRevealedOptions: (PeerId?, PeerId?) -> Void
|
||||
let getPeerVideo: (UInt32, Bool) -> GroupVideoNode?
|
||||
let getPeerVideo: (String, Bool) -> GroupVideoNode?
|
||||
var isExpanded: Bool = false
|
||||
|
||||
private var audioLevels: [PeerId: ValuePipe<Float>] = [:]
|
||||
@ -365,11 +365,11 @@ public final class VoiceChatController: ViewController {
|
||||
|
||||
init(
|
||||
updateIsMuted: @escaping (PeerId, Bool) -> Void,
|
||||
pinPeer: @escaping (PeerId, UInt32?) -> Void,
|
||||
pinPeer: @escaping (PeerId, String?) -> Void,
|
||||
openInvite: @escaping () -> Void,
|
||||
peerContextAction: @escaping (PeerEntry, ASDisplayNode, ContextGesture?) -> Void,
|
||||
setPeerIdWithRevealedOptions: @escaping (PeerId?, PeerId?) -> Void,
|
||||
getPeerVideo: @escaping (UInt32, Bool) -> GroupVideoNode?
|
||||
getPeerVideo: @escaping (String, Bool) -> GroupVideoNode?
|
||||
) {
|
||||
self.updateIsMuted = updateIsMuted
|
||||
self.pinPeer = pinPeer
|
||||
@ -428,6 +428,7 @@ public final class VoiceChatController: ViewController {
|
||||
var about: String?
|
||||
var isMyPeer: Bool
|
||||
var ssrc: UInt32?
|
||||
var effectiveVideoEndpointId: String?
|
||||
var presence: TelegramUserPresence?
|
||||
var activityTimestamp: Int32
|
||||
var state: State
|
||||
@ -439,6 +440,42 @@ public final class VoiceChatController: ViewController {
|
||||
var displayRaisedHandStatus: Bool
|
||||
var pinned: Bool
|
||||
var style: VoiceChatParticipantItem.LayoutStyle
|
||||
|
||||
init(
|
||||
peer: Peer,
|
||||
about: String?,
|
||||
isMyPeer: Bool,
|
||||
ssrc: UInt32?,
|
||||
effectiveVideoEndpointId: String?,
|
||||
presence: TelegramUserPresence?,
|
||||
activityTimestamp: Int32,
|
||||
state: State,
|
||||
muteState: GroupCallParticipantsContext.Participant.MuteState?,
|
||||
revealed: Bool?,
|
||||
canManageCall: Bool,
|
||||
volume: Int32?,
|
||||
raisedHand: Bool,
|
||||
displayRaisedHandStatus: Bool,
|
||||
pinned: Bool,
|
||||
style: VoiceChatParticipantItem.LayoutStyle
|
||||
) {
|
||||
self.peer = peer
|
||||
self.about = about
|
||||
self.isMyPeer = isMyPeer
|
||||
self.ssrc = ssrc
|
||||
self.effectiveVideoEndpointId = effectiveVideoEndpointId
|
||||
self.presence = presence
|
||||
self.activityTimestamp = activityTimestamp
|
||||
self.state = state
|
||||
self.muteState = muteState
|
||||
self.revealed = revealed
|
||||
self.canManageCall = canManageCall
|
||||
self.volume = volume
|
||||
self.raisedHand = raisedHand
|
||||
self.displayRaisedHandStatus = displayRaisedHandStatus
|
||||
self.pinned = pinned
|
||||
self.style = style
|
||||
}
|
||||
|
||||
var stableId: PeerId {
|
||||
return self.peer.id
|
||||
@ -457,6 +494,9 @@ public final class VoiceChatController: ViewController {
|
||||
if lhs.ssrc != rhs.ssrc {
|
||||
return false
|
||||
}
|
||||
if lhs.effectiveVideoEndpointId != rhs.effectiveVideoEndpointId {
|
||||
return false
|
||||
}
|
||||
if lhs.presence != rhs.presence {
|
||||
return false
|
||||
}
|
||||
@ -660,8 +700,8 @@ public final class VoiceChatController: ViewController {
|
||||
let revealOptions: [VoiceChatParticipantItem.RevealOption] = []
|
||||
|
||||
return VoiceChatParticipantItem(presentationData: ItemListPresentationData(presentationData), dateTimeFormat: presentationData.dateTimeFormat, nameDisplayOrder: presentationData.nameDisplayOrder, context: context, peer: peer, ssrc: peerEntry.ssrc, presence: peerEntry.presence, text: text, expandedText: expandedText, icon: icon, style: peerEntry.style, enabled: true, transparent: transparent, pinned: peerEntry.pinned, selectable: true, getAudioLevel: { return interaction.getAudioLevel(peer.id) }, getVideo: {
|
||||
if let ssrc = peerEntry.ssrc {
|
||||
return interaction.getPeerVideo(ssrc, peerEntry.style != .list)
|
||||
if let endpointId = peerEntry.effectiveVideoEndpointId {
|
||||
return interaction.getPeerVideo(endpointId, peerEntry.style != .list)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
@ -673,8 +713,8 @@ public final class VoiceChatController: ViewController {
|
||||
} else {
|
||||
if peerEntry.pinned {
|
||||
interaction.peerContextAction(peerEntry, node, nil)
|
||||
} else {
|
||||
interaction.pinPeer(peer.id, peerEntry.ssrc)
|
||||
} else if let endpointId = peerEntry.effectiveVideoEndpointId {
|
||||
interaction.pinPeer(peer.id, endpointId)
|
||||
}
|
||||
}
|
||||
}, contextAction: peerEntry.style == .list ? { node, gesture in
|
||||
@ -828,12 +868,12 @@ public final class VoiceChatController: ViewController {
|
||||
}
|
||||
private let displayedRaisedHandsPromise = ValuePromise<Set<PeerId>>(Set())
|
||||
|
||||
private var requestedVideoSources = Set<UInt32>()
|
||||
private var videoNodes: [(PeerId, UInt32, GroupVideoNode)] = []
|
||||
private var requestedVideoSources = Set<String>()
|
||||
private var videoNodes: [(String, GroupVideoNode)] = []
|
||||
|
||||
private var currentDominantSpeakerWithVideo: (PeerId, UInt32)?
|
||||
private var currentForcedSpeakerWithVideo: (PeerId, UInt32)?
|
||||
private var effectiveSpeakerWithVideo: (PeerId, UInt32)?
|
||||
private var currentDominantSpeakerWithVideo: (PeerId, String)?
|
||||
private var currentForcedSpeakerWithVideo: (PeerId, String)?
|
||||
private var effectiveSpeakerWithVideo: (PeerId, String)?
|
||||
|
||||
private var updateAvatarDisposable = MetaDisposable()
|
||||
private let updateAvatarPromise = Promise<(TelegramMediaImageRepresentation, Float)?>(nil)
|
||||
@ -1030,10 +1070,10 @@ public final class VoiceChatController: ViewController {
|
||||
|
||||
self.itemInteraction = Interaction(updateIsMuted: { [weak self] peerId, isMuted in
|
||||
let _ = self?.call.updateMuteState(peerId: peerId, isMuted: isMuted)
|
||||
}, pinPeer: { [weak self] peerId, source in
|
||||
}, pinPeer: { [weak self] peerId, endpointId in
|
||||
if let strongSelf = self {
|
||||
if peerId != strongSelf.currentForcedSpeakerWithVideo?.0, let source = source {
|
||||
strongSelf.currentForcedSpeakerWithVideo = (peerId, source)
|
||||
if peerId != strongSelf.currentForcedSpeakerWithVideo?.0, let endpointId = endpointId {
|
||||
strongSelf.currentForcedSpeakerWithVideo = (peerId, endpointId)
|
||||
} else {
|
||||
strongSelf.currentForcedSpeakerWithVideo = nil
|
||||
}
|
||||
@ -1369,15 +1409,17 @@ public final class VoiceChatController: ViewController {
|
||||
}), true))
|
||||
}
|
||||
|
||||
for (peerId, _, _) in strongSelf.videoNodes {
|
||||
if peerId == peer.id {
|
||||
for (endpointId, _) in strongSelf.videoNodes {
|
||||
if entry.effectiveVideoEndpointId == endpointId {
|
||||
items.append(.action(ContextMenuActionItem(text: strongSelf.currentForcedSpeakerWithVideo?.0 == peer.id ? strongSelf.presentationData.strings.VoiceChat_UnpinVideo : strongSelf.presentationData.strings.VoiceChat_PinVideo, icon: { theme in
|
||||
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Pin"), color: theme.actionSheet.primaryTextColor)
|
||||
}, action: { _, f in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
strongSelf.itemInteraction?.pinPeer(peer.id, entry.ssrc)
|
||||
if let endpointId = entry.effectiveVideoEndpointId {
|
||||
strongSelf.itemInteraction?.pinPeer(peer.id, endpointId)
|
||||
}
|
||||
f(.default)
|
||||
})))
|
||||
break
|
||||
@ -1634,7 +1676,7 @@ public final class VoiceChatController: ViewController {
|
||||
updated.revealedPeerId = peerId
|
||||
return updated
|
||||
}
|
||||
}, getPeerVideo: { [weak self] ssrc, tile in
|
||||
}, getPeerVideo: { [weak self] endpointId, tile in
|
||||
guard let strongSelf = self else {
|
||||
return nil
|
||||
}
|
||||
@ -1647,8 +1689,8 @@ public final class VoiceChatController: ViewController {
|
||||
if skip {
|
||||
return nil
|
||||
}
|
||||
for (_, listSsrc, videoNode) in strongSelf.videoNodes {
|
||||
if listSsrc == ssrc {
|
||||
for (listEndpointId, videoNode) in strongSelf.videoNodes {
|
||||
if listEndpointId == endpointId {
|
||||
return videoNode
|
||||
}
|
||||
}
|
||||
@ -1845,12 +1887,12 @@ public final class VoiceChatController: ViewController {
|
||||
}
|
||||
}
|
||||
|
||||
if let (peerId, source, _) = maxLevelWithVideo {
|
||||
if strongSelf.currentDominantSpeakerWithVideo?.0 != peerId || strongSelf.currentDominantSpeakerWithVideo?.1 != source {
|
||||
strongSelf.currentDominantSpeakerWithVideo = (peerId, source)
|
||||
strongSelf.call.setFullSizeVideo(peerId: peerId)
|
||||
strongSelf.mainVideoContainerNode?.updatePeer(peer: (peerId: peerId, source: source), waitForFullSize: true)
|
||||
}
|
||||
if let (peerId, endpointId, _) = maxLevelWithVideo {
|
||||
/*if strongSelf.currentDominantSpeakerWithVideo?.0 != peerId || strongSelf.currentDominantSpeakerWithVideo?.1 != endpointId {
|
||||
strongSelf.currentDominantSpeakerWithVideo = (peerId, endpointId)
|
||||
strongSelf.call.setFullSizeVideo(endpointId: endpointId)
|
||||
strongSelf.mainVideoContainerNode?.updatePeer(peer: (peerId: peerId, source: endpointId), waitForFullSize: true)
|
||||
}*/
|
||||
}
|
||||
|
||||
strongSelf.itemInteraction?.updateAudioLevels(levels)
|
||||
@ -1931,23 +1973,23 @@ public final class VoiceChatController: ViewController {
|
||||
}))
|
||||
|
||||
self.voiceSourcesDisposable.set((self.call.incomingVideoSources
|
||||
|> deliverOnMainQueue).start(next: { [weak self] sources in
|
||||
|> deliverOnMainQueue).start(next: { [weak self] endpointIds in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
var validSources = Set<UInt32>()
|
||||
for (peerId, source) in sources {
|
||||
validSources.insert(source)
|
||||
var validSources = Set<String>()
|
||||
for endpointId in endpointIds {
|
||||
validSources.insert(endpointId)
|
||||
|
||||
if !strongSelf.requestedVideoSources.contains(source) {
|
||||
strongSelf.requestedVideoSources.insert(source)
|
||||
strongSelf.call.makeIncomingVideoView(source: source, completion: { videoView in
|
||||
if !strongSelf.requestedVideoSources.contains(endpointId) {
|
||||
strongSelf.requestedVideoSources.insert(endpointId)
|
||||
strongSelf.call.makeIncomingVideoView(endpointId: endpointId, completion: { videoView in
|
||||
Queue.mainQueue().async {
|
||||
guard let strongSelf = self, let videoView = videoView else {
|
||||
return
|
||||
}
|
||||
let videoNode = GroupVideoNode(videoView: videoView)
|
||||
strongSelf.videoNodes.append((peerId, source, videoNode))
|
||||
strongSelf.videoNodes.append((endpointId, videoNode))
|
||||
|
||||
if let (layout, navigationHeight) = strongSelf.validLayout {
|
||||
strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .immediate)
|
||||
@ -1957,7 +1999,7 @@ public final class VoiceChatController: ViewController {
|
||||
let tileEntry = strongSelf.currentTileEntries[i]
|
||||
switch entry {
|
||||
case let .peer(peerEntry):
|
||||
if peerEntry.ssrc == source {
|
||||
if peerEntry.effectiveVideoEndpointId == endpointId {
|
||||
let presentationData = strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme)
|
||||
strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
|
||||
strongSelf.tileListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: tileEntry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
|
||||
@ -1973,7 +2015,7 @@ public final class VoiceChatController: ViewController {
|
||||
}
|
||||
}
|
||||
|
||||
var removeRequestedVideoSources: [UInt32] = []
|
||||
var removeRequestedVideoSources: [String] = []
|
||||
for source in strongSelf.requestedVideoSources {
|
||||
if !validSources.contains(source) {
|
||||
removeRequestedVideoSources.append(source)
|
||||
@ -1985,8 +2027,8 @@ public final class VoiceChatController: ViewController {
|
||||
|
||||
var updated = false
|
||||
for i in (0 ..< strongSelf.videoNodes.count).reversed() {
|
||||
if !validSources.contains(strongSelf.videoNodes[i].1) {
|
||||
let ssrc = strongSelf.videoNodes[i].1
|
||||
if !validSources.contains(strongSelf.videoNodes[i].0) {
|
||||
let endpointId = strongSelf.videoNodes[i].0
|
||||
strongSelf.videoNodes.remove(at: i)
|
||||
|
||||
loop: for j in 0 ..< strongSelf.currentEntries.count {
|
||||
@ -1994,7 +2036,7 @@ public final class VoiceChatController: ViewController {
|
||||
let tileEntry = strongSelf.currentTileEntries[j]
|
||||
switch entry {
|
||||
case let .peer(peerEntry):
|
||||
if peerEntry.ssrc == ssrc {
|
||||
if peerEntry.effectiveVideoEndpointId == endpointId {
|
||||
let presentationData = strongSelf.presentationData.withUpdated(theme: strongSelf.darkTheme)
|
||||
strongSelf.listNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: entry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
|
||||
strongSelf.tileListNode.transaction(deleteIndices: [], insertIndicesAndItems: [], updateIndicesAndItems: [ListViewUpdateItem(index: i, previousIndex: i, item: tileEntry.item(context: strongSelf.context, presentationData: presentationData, interaction: strongSelf.itemInteraction!, transparent: false), directionHint: nil)], options: [.Synchronous], updateOpaqueState: nil)
|
||||
@ -2008,8 +2050,8 @@ public final class VoiceChatController: ViewController {
|
||||
}
|
||||
}
|
||||
|
||||
if let (peerId, source) = strongSelf.effectiveSpeakerWithVideo {
|
||||
if !validSources.contains(source) {
|
||||
if let (peerId, endpointId) = strongSelf.effectiveSpeakerWithVideo {
|
||||
if !validSources.contains(endpointId) {
|
||||
if peerId == strongSelf.currentForcedSpeakerWithVideo?.0 {
|
||||
strongSelf.currentForcedSpeakerWithVideo = nil
|
||||
}
|
||||
@ -3190,9 +3232,16 @@ public final class VoiceChatController: ViewController {
|
||||
}
|
||||
|
||||
@objc private func cameraPressed() {
|
||||
if self.call.isVideo {
|
||||
if self.call.hasVideo || self.call.hasScreencast {
|
||||
self.call.disableVideo()
|
||||
self.call.disableScreencast()
|
||||
} else {
|
||||
#if DEBUG
|
||||
//self.call.requestScreencast()
|
||||
self.call.requestVideo()
|
||||
return;
|
||||
#endif
|
||||
|
||||
let controller = voiceChatCameraPreviewController(sharedContext: self.context.sharedContext, account: self.context.account, forceTheme: self.darkTheme, title: self.presentationData.strings.VoiceChat_VideoPreviewTitle, text: self.presentationData.strings.VoiceChat_VideoPreviewDescription, apply: { [weak self] in
|
||||
self?.call.requestVideo()
|
||||
})
|
||||
@ -4289,10 +4338,12 @@ public final class VoiceChatController: ViewController {
|
||||
about: member.about,
|
||||
isMyPeer: self.callState?.myPeerId == member.peer.id,
|
||||
ssrc: member.ssrc,
|
||||
effectiveVideoEndpointId: member.presentationEndpointId ?? member.videoEndpointId,
|
||||
presence: nil,
|
||||
activityTimestamp: Int32.max - 1 - index,
|
||||
state: memberState,
|
||||
muteState: memberMuteState,
|
||||
revealed: false,
|
||||
canManageCall: self.callState?.canManageCall ?? false,
|
||||
volume: member.volume,
|
||||
raisedHand: member.hasRaiseHand,
|
||||
@ -4307,10 +4358,12 @@ public final class VoiceChatController: ViewController {
|
||||
about: member.about,
|
||||
isMyPeer: self.callState?.myPeerId == member.peer.id,
|
||||
ssrc: member.ssrc,
|
||||
effectiveVideoEndpointId: member.presentationEndpointId ?? member.videoEndpointId,
|
||||
presence: nil,
|
||||
activityTimestamp: Int32.max - 1 - index,
|
||||
state: memberState,
|
||||
muteState: memberMuteState,
|
||||
revealed: false,
|
||||
canManageCall: self.callState?.canManageCall ?? false,
|
||||
volume: member.volume,
|
||||
raisedHand: member.hasRaiseHand,
|
||||
@ -4327,10 +4380,12 @@ public final class VoiceChatController: ViewController {
|
||||
about: member.about,
|
||||
isMyPeer: self.callState?.myPeerId == member.peer.id,
|
||||
ssrc: member.ssrc,
|
||||
effectiveVideoEndpointId: member.presentationEndpointId ?? member.videoEndpointId,
|
||||
presence: nil,
|
||||
activityTimestamp: Int32.max - 1 - index,
|
||||
state: memberState,
|
||||
muteState: memberMuteState,
|
||||
revealed: false,
|
||||
canManageCall: self.callState?.canManageCall ?? false,
|
||||
volume: member.volume,
|
||||
raisedHand: member.hasRaiseHand,
|
||||
@ -4352,10 +4407,12 @@ public final class VoiceChatController: ViewController {
|
||||
about: nil,
|
||||
isMyPeer: false,
|
||||
ssrc: nil,
|
||||
effectiveVideoEndpointId: nil,
|
||||
presence: nil,
|
||||
activityTimestamp: Int32.max - 1 - index,
|
||||
state: .invited,
|
||||
muteState: nil,
|
||||
revealed: false,
|
||||
canManageCall: false,
|
||||
volume: nil,
|
||||
raisedHand: false,
|
||||
@ -4420,10 +4477,10 @@ public final class VoiceChatController: ViewController {
|
||||
for entry in self.currentEntries {
|
||||
switch entry {
|
||||
case let .peer(peer):
|
||||
if peer.peer.id == peerId, let source = peer.ssrc {
|
||||
self.effectiveSpeakerWithVideo = (peerId, source)
|
||||
self.call.setFullSizeVideo(peerId: peerId)
|
||||
self.mainVideoContainerNode?.updatePeer(peer: (peerId: peerId, source: source), waitForFullSize: false)
|
||||
if peer.peer.id == peerId, let endpointId = peer.effectiveVideoEndpointId {
|
||||
self.effectiveSpeakerWithVideo = (peerId, endpointId)
|
||||
self.call.setFullSizeVideo(endpointId: endpointId)
|
||||
self.mainVideoContainerNode?.updatePeer(peer: (peerId: peerId, endpointId: endpointId), waitForFullSize: false)
|
||||
}
|
||||
default:
|
||||
break
|
||||
|
@ -555,6 +555,7 @@ public func joinGroupCall(account: Account, peerId: PeerId, joinAs: PeerId?, cal
|
||||
if preferMuted {
|
||||
flags |= (1 << 0)
|
||||
}
|
||||
//flags |= (1 << 2)
|
||||
if let _ = inviteHash {
|
||||
flags |= (1 << 1)
|
||||
}
|
||||
@ -778,7 +779,57 @@ public func joinGroupCall(account: Account, peerId: PeerId, joinAs: PeerId?, cal
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public struct JoinGroupCallAsScreencastResult {
|
||||
public var jsonParams: String
|
||||
}
|
||||
|
||||
public func joinGroupCallAsScreencast(account: Account, peerId: PeerId, callId: Int64, accessHash: Int64, joinPayload: String) -> Signal<JoinGroupCallAsScreencastResult, JoinGroupCallError> {
|
||||
return account.network.request(Api.functions.phone.joinGroupCallPresentation(call: .inputGroupCall(id: callId, accessHash: accessHash), params: .dataJSON(data: joinPayload)))
|
||||
|> mapError { _ -> JoinGroupCallError in
|
||||
return .generic
|
||||
}
|
||||
|> mapToSignal { updates -> Signal<JoinGroupCallAsScreencastResult, JoinGroupCallError> in
|
||||
account.stateManager.addUpdates(updates)
|
||||
|
||||
var maybeParsedClientParams: String?
|
||||
loop: for update in updates.allUpdates {
|
||||
switch update {
|
||||
case let .updateGroupCallConnection(_, params):
|
||||
switch params {
|
||||
case let .dataJSON(data):
|
||||
maybeParsedClientParams = data
|
||||
}
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
guard let parsedClientParams = maybeParsedClientParams else {
|
||||
return .fail(.generic)
|
||||
}
|
||||
|
||||
return .single(JoinGroupCallAsScreencastResult(
|
||||
jsonParams: parsedClientParams
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
public enum LeaveGroupCallAsScreencastError {
|
||||
case generic
|
||||
}
|
||||
|
||||
public func leaveGroupCallAsScreencast(account: Account, callId: Int64, accessHash: Int64) -> Signal<Never, LeaveGroupCallAsScreencastError> {
|
||||
return account.network.request(Api.functions.phone.leaveGroupCallPresentation(call: .inputGroupCall(id: callId, accessHash: accessHash)))
|
||||
|> mapError { _ -> LeaveGroupCallAsScreencastError in
|
||||
return .generic
|
||||
}
|
||||
|> mapToSignal { updates -> Signal<Never, LeaveGroupCallAsScreencastError> in
|
||||
account.stateManager.addUpdates(updates)
|
||||
|
||||
return .complete()
|
||||
}
|
||||
}
|
||||
|
||||
public enum LeaveGroupCallError {
|
||||
|
@ -182,7 +182,7 @@ public final class OngoingGroupCallContext {
|
||||
let isNoiseSuppressionEnabled = ValuePromise<Bool>(true, ignoreRepeated: true)
|
||||
let audioLevels = ValuePipe<[(AudioLevelKey, Float, Bool)]>()
|
||||
|
||||
let videoSources = ValuePromise<Set<UInt32>>(Set(), ignoreRepeated: true)
|
||||
let videoSources = ValuePromise<Set<String>>(Set(), ignoreRepeated: true)
|
||||
|
||||
private var broadcastPartsSource: BroadcastPartSource?
|
||||
|
||||
@ -221,8 +221,8 @@ public final class OngoingGroupCallContext {
|
||||
inputDeviceId: inputDeviceId,
|
||||
outputDeviceId: outputDeviceId,
|
||||
videoCapturer: video?.impl,
|
||||
incomingVideoSourcesUpdated: { ssrcs in
|
||||
videoSources.set(Set(ssrcs.map { $0.uint32Value }))
|
||||
incomingVideoSourcesUpdated: { endpointIds in
|
||||
videoSources.set(Set(endpointIds))
|
||||
},
|
||||
participantDescriptionsRequired: { ssrcs in
|
||||
participantDescriptionsRequired(Set(ssrcs.map { $0.uint32Value }))
|
||||
@ -311,8 +311,8 @@ public final class OngoingGroupCallContext {
|
||||
self.context.setVolumeForSsrc(ssrc, volume: volume)
|
||||
}
|
||||
|
||||
func setFullSizeVideoSsrc(ssrc: UInt32?) {
|
||||
self.context.setFullSizeVideoSsrc(ssrc ?? 0)
|
||||
func setFullSizeVideo(endpointId: String?) {
|
||||
self.context.setFullSizeVideoEndpointId(endpointId)
|
||||
}
|
||||
|
||||
func addParticipants(participants: [(UInt32, String?, String?)]) {
|
||||
@ -397,8 +397,8 @@ public final class OngoingGroupCallContext {
|
||||
self.context.switchAudioOutput(deviceId)
|
||||
}
|
||||
|
||||
func makeIncomingVideoView(source: UInt32, completion: @escaping (OngoingCallContextPresentationCallVideoView?) -> Void) {
|
||||
self.context.makeIncomingVideoView(withSsrc: source, completion: { view in
|
||||
func makeIncomingVideoView(endpointId: String, completion: @escaping (OngoingCallContextPresentationCallVideoView?) -> Void) {
|
||||
self.context.makeIncomingVideoView(withEndpointId: endpointId, completion: { view in
|
||||
if let view = view {
|
||||
#if os(iOS)
|
||||
completion(OngoingCallContextPresentationCallVideoView(
|
||||
@ -535,7 +535,7 @@ public final class OngoingGroupCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
public var videoSources: Signal<Set<UInt32>, NoError> {
|
||||
public var videoSources: Signal<Set<String>, NoError> {
|
||||
return Signal { subscriber in
|
||||
let disposable = MetaDisposable()
|
||||
self.impl.with { impl in
|
||||
@ -624,9 +624,9 @@ public final class OngoingGroupCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
public func setFullSizeVideoSsrc(ssrc: UInt32?) {
|
||||
public func setFullSizeVideo(endpointId: String?) {
|
||||
self.impl.with { impl in
|
||||
impl.setFullSizeVideoSsrc(ssrc: ssrc)
|
||||
impl.setFullSizeVideo(endpointId: endpointId)
|
||||
}
|
||||
}
|
||||
|
||||
@ -642,9 +642,9 @@ public final class OngoingGroupCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
public func makeIncomingVideoView(source: UInt32, completion: @escaping (OngoingCallContextPresentationCallVideoView?) -> Void) {
|
||||
public func makeIncomingVideoView(endpointId: String, completion: @escaping (OngoingCallContextPresentationCallVideoView?) -> Void) {
|
||||
self.impl.with { impl in
|
||||
impl.makeIncomingVideoView(source: source, completion: completion)
|
||||
impl.makeIncomingVideoView(endpointId: endpointId, completion: completion)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ typedef NS_ENUM(int32_t, OngoingGroupCallVideoContentType) {
|
||||
|
||||
@interface GroupCallThreadLocalContext : NSObject
|
||||
|
||||
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))incomingVideoSourcesUpdated participantDescriptionsRequired:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))participantDescriptionsRequired requestBroadcastPart:(id<OngoingGroupCallBroadcastPartTask> _Nonnull (^ _Nonnull)(int64_t, int64_t, void (^ _Nonnull)(OngoingGroupCallBroadcastPart * _Nullable)))requestBroadcastPart outgoingAudioBitrateKbit:(int32_t)outgoingAudioBitrateKbit videoContentType:(OngoingGroupCallVideoContentType)videoContentType enableNoiseSuppression:(bool)enableNoiseSuppression;
|
||||
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSString *> * _Nonnull))incomingVideoSourcesUpdated participantDescriptionsRequired:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))participantDescriptionsRequired requestBroadcastPart:(id<OngoingGroupCallBroadcastPartTask> _Nonnull (^ _Nonnull)(int64_t, int64_t, void (^ _Nonnull)(OngoingGroupCallBroadcastPart * _Nullable)))requestBroadcastPart outgoingAudioBitrateKbit:(int32_t)outgoingAudioBitrateKbit videoContentType:(OngoingGroupCallVideoContentType)videoContentType enableNoiseSuppression:(bool)enableNoiseSuppression;
|
||||
|
||||
- (void)stop;
|
||||
|
||||
@ -220,11 +220,11 @@ typedef NS_ENUM(int32_t, OngoingGroupCallVideoContentType) {
|
||||
- (void)disableVideo:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
|
||||
|
||||
- (void)setVolumeForSsrc:(uint32_t)ssrc volume:(double)volume;
|
||||
- (void)setFullSizeVideoSsrc:(uint32_t)ssrc;
|
||||
- (void)setFullSizeVideoEndpointId:(NSString * _Nullable)endpointId;
|
||||
|
||||
- (void)switchAudioOutput:(NSString * _Nonnull)deviceId;
|
||||
- (void)switchAudioInput:(NSString * _Nonnull)deviceId;
|
||||
- (void)makeIncomingVideoViewWithSsrc:(uint32_t)ssrc completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion;
|
||||
- (void)makeIncomingVideoViewWithEndpointId:(NSString *)endpointId completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion;
|
||||
|
||||
@end
|
||||
|
||||
|
@ -859,7 +859,7 @@ private:
|
||||
|
||||
@implementation GroupCallThreadLocalContext
|
||||
|
||||
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))incomingVideoSourcesUpdated participantDescriptionsRequired:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))participantDescriptionsRequired requestBroadcastPart:(id<OngoingGroupCallBroadcastPartTask> _Nonnull (^ _Nonnull)(int64_t, int64_t, void (^ _Nonnull)(OngoingGroupCallBroadcastPart * _Nullable)))requestBroadcastPart outgoingAudioBitrateKbit:(int32_t)outgoingAudioBitrateKbit videoContentType:(OngoingGroupCallVideoContentType)videoContentType enableNoiseSuppression:(bool)enableNoiseSuppression {
|
||||
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSString *> * _Nonnull))incomingVideoSourcesUpdated participantDescriptionsRequired:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))participantDescriptionsRequired requestBroadcastPart:(id<OngoingGroupCallBroadcastPartTask> _Nonnull (^ _Nonnull)(int64_t, int64_t, void (^ _Nonnull)(OngoingGroupCallBroadcastPart * _Nullable)))requestBroadcastPart outgoingAudioBitrateKbit:(int32_t)outgoingAudioBitrateKbit videoContentType:(OngoingGroupCallVideoContentType)videoContentType enableNoiseSuppression:(bool)enableNoiseSuppression {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_queue = queue;
|
||||
@ -918,10 +918,10 @@ private:
|
||||
.initialInputDeviceId = inputDeviceId.UTF8String,
|
||||
.initialOutputDeviceId = outputDeviceId.UTF8String,
|
||||
.videoCapture = [_videoCapturer getInterface],
|
||||
.incomingVideoSourcesUpdated = [incomingVideoSourcesUpdated](std::vector<uint32_t> const &ssrcs) {
|
||||
NSMutableArray<NSNumber *> *mappedSources = [[NSMutableArray alloc] init];
|
||||
for (auto it : ssrcs) {
|
||||
[mappedSources addObject:@(it)];
|
||||
.incomingVideoSourcesUpdated = [incomingVideoSourcesUpdated](std::vector<std::string> const &sources) {
|
||||
NSMutableArray<NSString *> *mappedSources = [[NSMutableArray alloc] init];
|
||||
for (auto it : sources) {
|
||||
[mappedSources addObject:[NSString stringWithUTF8String:it.c_str()]];
|
||||
}
|
||||
incomingVideoSourcesUpdated(mappedSources);
|
||||
},
|
||||
@ -1088,9 +1088,9 @@ private:
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setFullSizeVideoSsrc:(uint32_t)ssrc {
|
||||
- (void)setFullSizeVideoEndpointId:(NSString * _Nullable)endpointId {
|
||||
if (_instance) {
|
||||
_instance->setFullSizeVideoSsrc(ssrc);
|
||||
_instance->setFullSizeVideoEndpointId(endpointId.UTF8String ?: "");
|
||||
}
|
||||
}
|
||||
|
||||
@ -1105,7 +1105,7 @@ private:
|
||||
}
|
||||
}
|
||||
|
||||
- (void)makeIncomingVideoViewWithSsrc:(uint32_t)ssrc completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion {
|
||||
- (void)makeIncomingVideoViewWithEndpointId:(NSString *)endpointId completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion {
|
||||
if (_instance) {
|
||||
__weak GroupCallThreadLocalContext *weakSelf = self;
|
||||
id<OngoingCallThreadLocalContextQueueWebrtc> queue = _queue;
|
||||
@ -1123,7 +1123,7 @@ private:
|
||||
[queue dispatch:^{
|
||||
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
|
||||
if (strongSelf && strongSelf->_instance) {
|
||||
strongSelf->_instance->addIncomingVideoOutput(ssrc, sink);
|
||||
strongSelf->_instance->addIncomingVideoOutput(endpointId.UTF8String, sink);
|
||||
}
|
||||
}];
|
||||
|
||||
@ -1136,7 +1136,7 @@ private:
|
||||
[queue dispatch:^{
|
||||
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
|
||||
if (strongSelf && strongSelf->_instance) {
|
||||
strongSelf->_instance->addIncomingVideoOutput(ssrc, sink);
|
||||
strongSelf->_instance->addIncomingVideoOutput(endpointId.UTF8String, sink);
|
||||
}
|
||||
}];
|
||||
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 00f592fd569246dca9bd10822b7d6291c2ce53e5
|
||||
Subproject commit 2bc860b5ea34048a2ccaa3b9be5e5a8a2585a063
|
Loading…
x
Reference in New Issue
Block a user