This commit is contained in:
Ali 2021-01-08 15:35:46 +04:00
parent 58f7578a5a
commit 4f4dbab3a6
8 changed files with 315 additions and 118 deletions

View File

@ -280,6 +280,8 @@ public protocol PresentationGroupCall: class {
var internalId: CallSessionInternalId { get } var internalId: CallSessionInternalId { get }
var peerId: PeerId { get } var peerId: PeerId { get }
var isVideo: Bool { get }
var audioOutputState: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> { get } var audioOutputState: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> { get }
var canBeRemoved: Signal<Bool, NoError> { get } var canBeRemoved: Signal<Bool, NoError> { get }
@ -296,6 +298,8 @@ public protocol PresentationGroupCall: class {
func toggleIsMuted() func toggleIsMuted()
func setIsMuted(action: PresentationGroupCallMuteAction) func setIsMuted(action: PresentationGroupCallMuteAction)
func requestVideo()
func disableVideo()
func updateDefaultParticipantsAreMuted(isMuted: Bool) func updateDefaultParticipantsAreMuted(isMuted: Bool)
func setVolume(peerId: PeerId, volume: Double) func setVolume(peerId: PeerId, volume: Double)
func setFullSizeVideo(peerId: PeerId?) func setFullSizeVideo(peerId: PeerId?)

View File

@ -312,6 +312,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
public let peerId: PeerId public let peerId: PeerId
public let peer: Peer? public let peer: Peer?
public private(set) var isVideo: Bool
private let temporaryJoinTimestamp: Int32 private let temporaryJoinTimestamp: Int32
private var internalState: InternalState = .requesting private var internalState: InternalState = .requesting
@ -319,6 +321,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private var callContext: OngoingGroupCallContext? private var callContext: OngoingGroupCallContext?
private var ssrcMapping: [UInt32: PeerId] = [:] private var ssrcMapping: [UInt32: PeerId] = [:]
private var requestedSsrcs = Set<UInt32>()
private var summaryInfoState = Promise<SummaryInfoState?>(nil) private var summaryInfoState = Promise<SummaryInfoState?>(nil)
private var summaryParticipantsState = Promise<SummaryParticipantsState?>(nil) private var summaryParticipantsState = Promise<SummaryParticipantsState?>(nil)
@ -433,6 +437,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
private let memberEventsPipeDisposable = MetaDisposable() private let memberEventsPipeDisposable = MetaDisposable()
private let joinDisposable = MetaDisposable()
private let requestDisposable = MetaDisposable() private let requestDisposable = MetaDisposable()
private var groupCallParticipantUpdatesDisposable: Disposable? private var groupCallParticipantUpdatesDisposable: Disposable?
@ -461,6 +466,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
return self.incomingVideoSourcePromise.get() return self.incomingVideoSourcePromise.get()
} }
private var missingSsrcs = Set<UInt32>()
private let missingSsrcsDisposable = MetaDisposable()
private var isRequestingMissingSsrcs: Bool = false
init( init(
accountContext: AccountContext, accountContext: AccountContext,
audioSession: ManagedAudioSession, audioSession: ManagedAudioSession,
@ -484,7 +493,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.temporaryJoinTimestamp = Int32(CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970) self.temporaryJoinTimestamp = Int32(CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970)
self.videoCapturer = OngoingCallVideoCapturer() self.isVideo = false
var didReceiveAudioOutputs = false var didReceiveAudioOutputs = false
@ -623,9 +632,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
if !removedSsrc.isEmpty { if !removedSsrc.isEmpty {
strongSelf.callContext?.removeSsrcs(ssrcs: removedSsrc) strongSelf.callContext?.removeSsrcs(ssrcs: removedSsrc)
} }
if !addedParticipants.isEmpty {
strongSelf.callContext?.addParticipants(participants: addedParticipants)
}
} }
}) })
@ -745,6 +751,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.audioSessionActiveDisposable?.dispose() self.audioSessionActiveDisposable?.dispose()
self.summaryStateDisposable?.dispose() self.summaryStateDisposable?.dispose()
self.audioSessionDisposable?.dispose() self.audioSessionDisposable?.dispose()
self.joinDisposable.dispose()
self.requestDisposable.dispose() self.requestDisposable.dispose()
self.groupCallParticipantUpdatesDisposable?.dispose() self.groupCallParticipantUpdatesDisposable?.dispose()
self.leaveDisposable.dispose() self.leaveDisposable.dispose()
@ -756,6 +763,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.participantsContextStateDisposable.dispose() self.participantsContextStateDisposable.dispose()
self.myAudioLevelDisposable.dispose() self.myAudioLevelDisposable.dispose()
self.memberEventsPipeDisposable.dispose() self.memberEventsPipeDisposable.dispose()
self.missingSsrcsDisposable.dispose()
self.myAudioLevelTimer?.invalidate() self.myAudioLevelTimer?.invalidate()
self.typingDisposable.dispose() self.typingDisposable.dispose()
@ -802,7 +810,14 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
break break
default: default:
if case let .active(callInfo) = internalState { if case let .active(callInfo) = internalState {
let callContext = OngoingGroupCallContext(video: self.videoCapturer) let callContext = OngoingGroupCallContext(video: self.videoCapturer, participantDescriptionsRequired: { [weak self] ssrcs in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
strongSelf.maybeRequestParticipants(ssrcs: ssrcs)
}
})
self.incomingVideoSourcePromise.set(callContext.videoSources self.incomingVideoSourcePromise.set(callContext.videoSources
|> deliverOnMainQueue |> deliverOnMainQueue
|> map { [weak self] sources -> [PeerId: UInt32] in |> map { [weak self] sources -> [PeerId: UInt32] in
@ -818,8 +833,16 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
return result return result
}) })
self.callContext = callContext self.callContext = callContext
self.requestDisposable.set((callContext.joinPayload self.joinDisposable.set((callContext.joinPayload
|> take(1) |> distinctUntilChanged(isEqual: { lhs, rhs in
if lhs.0 != rhs.0 {
return false
}
if lhs.1 != rhs.1 {
return false
}
return true
})
|> deliverOnMainQueue).start(next: { [weak self] joinPayload, ssrc in |> deliverOnMainQueue).start(next: { [weak self] joinPayload, ssrc in
guard let strongSelf = self else { guard let strongSelf = self else {
return return
@ -963,14 +986,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
self.ssrcMapping.removeAll() self.ssrcMapping.removeAll()
var addedParticipants: [(UInt32, String?)] = []
for participant in initialState.participants { for participant in initialState.participants {
self.ssrcMapping[participant.ssrc] = participant.peer.id self.ssrcMapping[participant.ssrc] = participant.peer.id
if participant.peer.id != self.accountContext.account.peerId {
addedParticipants.append((participant.ssrc, participant.jsonParams))
}
} }
self.callContext?.setJoinResponse(payload: clientParams, participants: addedParticipants) self.callContext?.setJoinResponse(payload: clientParams, participants: [])
let accountContext = self.accountContext let accountContext = self.accountContext
let peerId = self.peerId let peerId = self.peerId
@ -1135,6 +1154,65 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
} }
private func maybeRequestParticipants(ssrcs: Set<UInt32>) {
var missingSsrcs = ssrcs
var addedParticipants: [(UInt32, String?)] = []
if let membersValue = self.membersValue {
for participant in membersValue.participants {
if missingSsrcs.contains(participant.ssrc) {
missingSsrcs.remove(participant.ssrc)
addedParticipants.append((participant.ssrc, participant.jsonParams))
}
}
}
if !addedParticipants.isEmpty {
self.callContext?.addParticipants(participants: addedParticipants)
}
if !missingSsrcs.isEmpty {
self.missingSsrcs.formUnion(missingSsrcs)
self.maybeRequestMissingSsrcs()
}
}
private func maybeRequestMissingSsrcs() {
if self.isRequestingMissingSsrcs {
return
}
if self.missingSsrcs.isEmpty {
return
}
if case let .estabilished(callInfo, _, ssrc, _) = self.internalState {
self.isRequestingMissingSsrcs = true
let requestedSsrcs = self.missingSsrcs
self.missingSsrcsDisposable.set((getGroupCallParticipants(account: self.account, callId: callInfo.id, accessHash: callInfo.accessHash, ssrcs: Array(requestedSsrcs), offset: "", limit: 100)
|> deliverOnMainQueue).start(next: { [weak self] state in
guard let strongSelf = self else {
return
}
strongSelf.isRequestingMissingSsrcs = false
strongSelf.missingSsrcs.subtract(requestedSsrcs)
var addedParticipants: [(UInt32, String?)] = []
for participant in state.participants {
addedParticipants.append((participant.ssrc, participant.jsonParams))
}
if !addedParticipants.isEmpty {
strongSelf.callContext?.addParticipants(participants: addedParticipants)
}
strongSelf.maybeRequestMissingSsrcs()
}))
}
}
private func startCheckingCallIfNeeded() { private func startCheckingCallIfNeeded() {
if self.checkCallDisposable != nil { if self.checkCallDisposable != nil {
return return
@ -1271,6 +1349,25 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
} }
public func requestVideo() {
if self.videoCapturer == nil {
let videoCapturer = OngoingCallVideoCapturer()
self.videoCapturer = videoCapturer
}
self.isVideo = true
if let videoCapturer = self.videoCapturer {
self.callContext?.requestVideo(videoCapturer)
}
}
public func disableVideo() {
self.isVideo = false
if let _ = self.videoCapturer {
self.videoCapturer = nil
self.callContext?.disableVideo()
}
}
public func setVolume(peerId: PeerId, volume: Double) { public func setVolume(peerId: PeerId, volume: Double) {
for (ssrc, id) in self.ssrcMapping { for (ssrc, id) in self.ssrcMapping {
if id == peerId { if id == peerId {
@ -1422,6 +1519,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
} }
} }
self.joinDisposable.set(nil)
self.requestDisposable.set((currentOrRequestedCall self.requestDisposable.set((currentOrRequestedCall
|> deliverOnMainQueue).start(next: { [weak self] value in |> deliverOnMainQueue).start(next: { [weak self] value in
guard let strongSelf = self else { guard let strongSelf = self else {

View File

@ -567,6 +567,7 @@ public final class VoiceChatController: ViewController {
private let bottomPanelBackgroundNode: ASDisplayNode private let bottomPanelBackgroundNode: ASDisplayNode
private let bottomCornersNode: ASImageNode private let bottomCornersNode: ASImageNode
fileprivate let audioOutputNode: CallControllerButtonItemNode fileprivate let audioOutputNode: CallControllerButtonItemNode
fileprivate let cameraButtonNode: CallControllerButtonItemNode
fileprivate let leaveNode: CallControllerButtonItemNode fileprivate let leaveNode: CallControllerButtonItemNode
fileprivate let actionButton: VoiceChatActionButton fileprivate let actionButton: VoiceChatActionButton
private let leftBorderNode: ASDisplayNode private let leftBorderNode: ASDisplayNode
@ -699,6 +700,7 @@ public final class VoiceChatController: ViewController {
self.bottomCornersNode.image = cornersImage(top: false, bottom: true, dark: false) self.bottomCornersNode.image = cornersImage(top: false, bottom: true, dark: false)
self.audioOutputNode = CallControllerButtonItemNode() self.audioOutputNode = CallControllerButtonItemNode()
self.cameraButtonNode = CallControllerButtonItemNode()
self.leaveNode = CallControllerButtonItemNode() self.leaveNode = CallControllerButtonItemNode()
self.actionButton = VoiceChatActionButton() self.actionButton = VoiceChatActionButton()
@ -1094,6 +1096,7 @@ public final class VoiceChatController: ViewController {
self.bottomPanelNode.addSubnode(self.bottomCornersNode) self.bottomPanelNode.addSubnode(self.bottomCornersNode)
self.bottomPanelNode.addSubnode(self.bottomPanelBackgroundNode) self.bottomPanelNode.addSubnode(self.bottomPanelBackgroundNode)
self.bottomPanelNode.addSubnode(self.audioOutputNode) self.bottomPanelNode.addSubnode(self.audioOutputNode)
self.bottomPanelNode.addSubnode(self.cameraButtonNode)
self.bottomPanelNode.addSubnode(self.leaveNode) self.bottomPanelNode.addSubnode(self.leaveNode)
self.bottomPanelNode.addSubnode(self.actionButton) self.bottomPanelNode.addSubnode(self.actionButton)
@ -1293,6 +1296,8 @@ public final class VoiceChatController: ViewController {
self.audioOutputNode.addTarget(self, action: #selector(self.audioOutputPressed), forControlEvents: .touchUpInside) self.audioOutputNode.addTarget(self, action: #selector(self.audioOutputPressed), forControlEvents: .touchUpInside)
self.cameraButtonNode.addTarget(self, action: #selector(self.cameraPressed), forControlEvents: .touchUpInside)
self.optionsButton.contextAction = { [weak self, weak optionsButton] sourceNode, gesture in self.optionsButton.contextAction = { [weak self, weak optionsButton] sourceNode, gesture in
guard let strongSelf = self, let controller = strongSelf.controller, let strongOptionsButton = optionsButton else { guard let strongSelf = self, let controller = strongSelf.controller, let strongOptionsButton = optionsButton else {
return return
@ -1705,6 +1710,14 @@ public final class VoiceChatController: ViewController {
} }
} }
@objc private func cameraPressed() {
if self.call.isVideo {
self.call.disableVideo()
} else {
self.call.requestVideo()
}
}
private func updateFloatingHeaderOffset(offset: CGFloat, transition: ContainedViewLayoutTransition, completion: (() -> Void)? = nil) { private func updateFloatingHeaderOffset(offset: CGFloat, transition: ContainedViewLayoutTransition, completion: (() -> Void)? = nil) {
guard let (layout, _) = self.validLayout else { guard let (layout, _) = self.validLayout else {
return return
@ -1960,6 +1973,10 @@ public final class VoiceChatController: ViewController {
let sideButtonSize = CGSize(width: 60.0, height: 60.0) let sideButtonSize = CGSize(width: 60.0, height: 60.0)
self.audioOutputNode.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage), text: soundTitle, transition: .animated(duration: 0.3, curve: .linear)) self.audioOutputNode.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: soundAppearance, image: soundImage), text: soundTitle, transition: .animated(duration: 0.3, curve: .linear))
let cameraButtonSize = CGSize(width: 40.0, height: 40.0)
self.cameraButtonNode.update(size: cameraButtonSize, content: CallControllerButtonItemNode.Content(appearance: CallControllerButtonItemNode.Content.Appearance.blurred(isFilled: false), image: .camera), text: " ", transition: .animated(duration: 0.3, curve: .linear))
self.leaveNode.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: .color(.custom(0xff3b30, 0.3)), image: .end), text: self.presentationData.strings.VoiceChat_Leave, transition: .immediate) self.leaveNode.update(size: sideButtonSize, content: CallControllerButtonItemNode.Content(appearance: .color(.custom(0xff3b30, 0.3)), image: .end), text: self.presentationData.strings.VoiceChat_Leave, transition: .immediate)
} }
@ -2039,6 +2056,7 @@ public final class VoiceChatController: ViewController {
transition.updateFrame(node: self.bottomPanelNode, frame: bottomPanelFrame) transition.updateFrame(node: self.bottomPanelNode, frame: bottomPanelFrame)
let sideButtonSize = CGSize(width: 60.0, height: 60.0) let sideButtonSize = CGSize(width: 60.0, height: 60.0)
let cameraButtonSize = CGSize(width: 40.0, height: 40.0)
let centralButtonSize = CGSize(width: 440.0, height: 440.0) let centralButtonSize = CGSize(width: 440.0, height: 440.0)
let actionButtonFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - centralButtonSize.width) / 2.0), y: floorToScreenPixels((bottomAreaHeight - centralButtonSize.height) / 2.0)), size: centralButtonSize) let actionButtonFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((size.width - centralButtonSize.width) / 2.0), y: floorToScreenPixels((bottomAreaHeight - centralButtonSize.height) / 2.0)), size: centralButtonSize)
@ -2112,8 +2130,14 @@ public final class VoiceChatController: ViewController {
let sideButtonOrigin = max(sideButtonMinimalInset, floor((size.width - 144.0) / 2.0) - sideButtonOffset - sideButtonSize.width) let sideButtonOrigin = max(sideButtonMinimalInset, floor((size.width - 144.0) / 2.0) - sideButtonOffset - sideButtonSize.width)
if self.audioOutputNode.supernode === self.bottomPanelNode { if self.audioOutputNode.supernode === self.bottomPanelNode {
transition.updateFrame(node: self.audioOutputNode, frame: CGRect(origin: CGPoint(x: sideButtonOrigin, y: floor((bottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize)) let cameraButtonDistance: CGFloat = 4.0
let audioOutputFrame = CGRect(origin: CGPoint(x: sideButtonOrigin, y: floor((bottomAreaHeight - sideButtonSize.height - cameraButtonDistance - cameraButtonSize.height) / 2.0) + cameraButtonDistance + cameraButtonSize.height), size: sideButtonSize)
transition.updateFrame(node: self.audioOutputNode, frame: audioOutputFrame)
transition.updateFrame(node: self.leaveNode, frame: CGRect(origin: CGPoint(x: size.width - sideButtonOrigin - sideButtonSize.width, y: floor((bottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize)) transition.updateFrame(node: self.leaveNode, frame: CGRect(origin: CGPoint(x: size.width - sideButtonOrigin - sideButtonSize.width, y: floor((bottomAreaHeight - sideButtonSize.height) / 2.0)), size: sideButtonSize))
transition.updateFrame(node: self.cameraButtonNode, frame: CGRect(origin: CGPoint(x: floor(audioOutputFrame.midX - cameraButtonSize.width / 2.0), y: audioOutputFrame.minY - cameraButtonDistance - cameraButtonSize.height), size: cameraButtonSize))
} }
if isFirstTime { if isFirstTime {
while !self.enqueuedTransitions.isEmpty { while !self.enqueuedTransitions.isEmpty {
@ -2139,10 +2163,13 @@ public final class VoiceChatController: ViewController {
if self.actionButton.supernode !== self.bottomPanelNode { if self.actionButton.supernode !== self.bottomPanelNode {
self.actionButton.ignoreHierarchyChanges = true self.actionButton.ignoreHierarchyChanges = true
self.audioOutputNode.isHidden = false self.audioOutputNode.isHidden = false
self.cameraButtonNode.isHidden = false
self.leaveNode.isHidden = false self.leaveNode.isHidden = false
self.audioOutputNode.layer.removeAllAnimations() self.audioOutputNode.layer.removeAllAnimations()
self.cameraButtonNode.layer.removeAllAnimations()
self.leaveNode.layer.removeAllAnimations() self.leaveNode.layer.removeAllAnimations()
self.bottomPanelNode.addSubnode(self.audioOutputNode) self.bottomPanelNode.addSubnode(self.audioOutputNode)
self.bottomPanelNode.addSubnode(self.cameraButtonNode)
self.bottomPanelNode.addSubnode(self.leaveNode) self.bottomPanelNode.addSubnode(self.leaveNode)
self.bottomPanelNode.addSubnode(self.actionButton) self.bottomPanelNode.addSubnode(self.actionButton)
self.containerLayoutUpdated(layout, navigationHeight :navigationHeight, transition: .immediate) self.containerLayoutUpdated(layout, navigationHeight :navigationHeight, transition: .immediate)
@ -2386,7 +2413,7 @@ public final class VoiceChatController: ViewController {
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool { override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
if gestureRecognizer is DirectionalPanGestureRecognizer { if gestureRecognizer is DirectionalPanGestureRecognizer {
let location = gestureRecognizer.location(in: self.bottomPanelNode.view) let location = gestureRecognizer.location(in: self.bottomPanelNode.view)
if self.audioOutputNode.frame.contains(location) || self.leaveNode.frame.contains(location) { if self.audioOutputNode.frame.contains(location) || (!self.cameraButtonNode.isHidden && self.cameraButtonNode.frame.contains(location)) || self.leaveNode.frame.contains(location) {
return false return false
} }
} }

View File

@ -190,8 +190,10 @@ public enum GetGroupCallParticipantsError {
case generic case generic
} }
public func getGroupCallParticipants(account: Account, callId: Int64, accessHash: Int64, offset: String, limit: Int32) -> Signal<GroupCallParticipantsContext.State, GetGroupCallParticipantsError> { public func getGroupCallParticipants(account: Account, callId: Int64, accessHash: Int64, ssrcs: [UInt32] = [], offset: String, limit: Int32) -> Signal<GroupCallParticipantsContext.State, GetGroupCallParticipantsError> {
return account.network.request(Api.functions.phone.getGroupParticipants(call: .inputGroupCall(id: callId, accessHash: accessHash), ids: [], sources: [], offset: offset, limit: limit)) return account.network.request(Api.functions.phone.getGroupParticipants(call: .inputGroupCall(id: callId, accessHash: accessHash), ids: [], sources: ssrcs.map {
Int32(bitPattern: $0)
}, offset: offset, limit: limit))
|> mapError { _ -> GetGroupCallParticipantsError in |> mapError { _ -> GetGroupCallParticipantsError in
return .generic return .generic
} }

View File

@ -50,11 +50,12 @@ public final class OngoingGroupCallContext {
let videoSources = ValuePromise<Set<UInt32>>(Set(), ignoreRepeated: true) let videoSources = ValuePromise<Set<UInt32>>(Set(), ignoreRepeated: true)
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, video: OngoingCallVideoCapturer?) { init(queue: Queue, inputDeviceId: String, outputDeviceId: String, video: OngoingCallVideoCapturer?, participantDescriptionsRequired: @escaping (Set<UInt32>) -> Void) {
self.queue = queue self.queue = queue
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)? var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)? var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
var participantDescriptionsRequiredImpl: (([NSNumber]) -> Void)?
let videoSources = self.videoSources let videoSources = self.videoSources
self.context = GroupCallThreadLocalContext( self.context = GroupCallThreadLocalContext(
@ -70,6 +71,9 @@ public final class OngoingGroupCallContext {
videoCapturer: video?.impl, videoCapturer: video?.impl,
incomingVideoSourcesUpdated: { ssrcs in incomingVideoSourcesUpdated: { ssrcs in
videoSources.set(Set(ssrcs.map { $0.uint32Value })) videoSources.set(Set(ssrcs.map { $0.uint32Value }))
},
participantDescriptionsRequired: { ssrcs in
participantDescriptionsRequired(Set(ssrcs.map { $0.uint32Value }))
} }
) )
@ -167,6 +171,30 @@ public final class OngoingGroupCallContext {
self.context.setIsMuted(isMuted) self.context.setIsMuted(isMuted)
} }
func requestVideo(_ capturer: OngoingCallVideoCapturer?) {
let queue = self.queue
self.context.requestVideo(capturer?.impl, completion: { [weak self] payload, ssrc in
queue.async {
guard let strongSelf = self else {
return
}
strongSelf.joinPayload.set(.single((payload, ssrc)))
}
})
}
public func disableVideo() {
let queue = self.queue
self.context.disableVideo({ [weak self] payload, ssrc in
queue.async {
guard let strongSelf = self else {
return
}
strongSelf.joinPayload.set(.single((payload, ssrc)))
}
})
}
func switchAudioInput(_ deviceId: String) { func switchAudioInput(_ deviceId: String) {
self.context.switchAudioInput(deviceId) self.context.switchAudioInput(deviceId)
} }
@ -278,10 +306,10 @@ public final class OngoingGroupCallContext {
} }
} }
public init(inputDeviceId: String = "", outputDeviceId: String = "", video: OngoingCallVideoCapturer?) { public init(inputDeviceId: String = "", outputDeviceId: String = "", video: OngoingCallVideoCapturer?, participantDescriptionsRequired: @escaping (Set<UInt32>) -> Void) {
let queue = self.queue let queue = self.queue
self.impl = QueueLocalObject(queue: queue, generate: { self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, video: video) return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, video: video, participantDescriptionsRequired: participantDescriptionsRequired)
}) })
} }
@ -291,6 +319,18 @@ public final class OngoingGroupCallContext {
} }
} }
public func requestVideo(_ capturer: OngoingCallVideoCapturer?) {
self.impl.with { impl in
impl.requestVideo(capturer)
}
}
public func disableVideo() {
self.impl.with { impl in
impl.disableVideo()
}
}
public func switchAudioInput(_ deviceId: String) { public func switchAudioInput(_ deviceId: String) {
self.impl.with { impl in self.impl.with { impl in
impl.switchAudioInput(deviceId) impl.switchAudioInput(deviceId)

View File

@ -168,7 +168,7 @@ typedef NS_ENUM(int32_t, GroupCallNetworkState) {
@interface GroupCallThreadLocalContext : NSObject @interface GroupCallThreadLocalContext : NSObject
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))incomingVideoSourcesUpdated; - (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))incomingVideoSourcesUpdated participantDescriptionsRequired:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))participantDescriptionsRequired;
- (void)stop; - (void)stop;
@ -177,6 +177,8 @@ typedef NS_ENUM(int32_t, GroupCallNetworkState) {
- (void)removeSsrcs:(NSArray<NSNumber *> * _Nonnull)ssrcs; - (void)removeSsrcs:(NSArray<NSNumber *> * _Nonnull)ssrcs;
- (void)addParticipants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants; - (void)addParticipants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants;
- (void)setIsMuted:(bool)isMuted; - (void)setIsMuted:(bool)isMuted;
- (void)requestVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer completion:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
- (void)disableVideo:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
- (void)setVolumeForSsrc:(uint32_t)ssrc volume:(double)volume; - (void)setVolumeForSsrc:(uint32_t)ssrc volume:(double)volume;
- (void)setFullSizeVideoSsrc:(uint32_t)ssrc; - (void)setFullSizeVideoSsrc:(uint32_t)ssrc;

View File

@ -818,7 +818,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
@implementation GroupCallThreadLocalContext @implementation GroupCallThreadLocalContext
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))incomingVideoSourcesUpdated { - (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))incomingVideoSourcesUpdated participantDescriptionsRequired:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))participantDescriptionsRequired {
self = [super init]; self = [super init];
if (self != nil) { if (self != nil) {
_queue = queue; _queue = queue;
@ -855,6 +855,13 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
[mappedSources addObject:@(it)]; [mappedSources addObject:@(it)];
} }
incomingVideoSourcesUpdated(mappedSources); incomingVideoSourcesUpdated(mappedSources);
},
.participantDescriptionsRequired = [participantDescriptionsRequired](std::vector<uint32_t> const &ssrcs) {
NSMutableArray<NSNumber *> *mappedSources = [[NSMutableArray alloc] init];
for (auto it : ssrcs) {
[mappedSources addObject:@(it)];
}
participantDescriptionsRequired(mappedSources);
} }
})); }));
} }
@ -868,105 +875,106 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
} }
} }
static void processJoinPayload(tgcalls::GroupJoinPayload &payload, void (^ _Nonnull completion)(NSString * _Nonnull, uint32_t)) {
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init];
int32_t signedSsrc = *(int32_t *)&payload.ssrc;
dict[@"ssrc"] = @(signedSsrc);
dict[@"ufrag"] = [NSString stringWithUTF8String:payload.ufrag.c_str()];
dict[@"pwd"] = [NSString stringWithUTF8String:payload.pwd.c_str()];
NSMutableArray *fingerprints = [[NSMutableArray alloc] init];
for (auto &fingerprint : payload.fingerprints) {
[fingerprints addObject:@{
@"hash": [NSString stringWithUTF8String:fingerprint.hash.c_str()],
@"fingerprint": [NSString stringWithUTF8String:fingerprint.fingerprint.c_str()],
@"setup": [NSString stringWithUTF8String:fingerprint.setup.c_str()]
}];
}
dict[@"fingerprints"] = fingerprints;
NSMutableArray *parsedVideoSsrcGroups = [[NSMutableArray alloc] init];
NSMutableArray *parsedVideoSources = [[NSMutableArray alloc] init];
for (auto &group : payload.videoSourceGroups) {
NSMutableDictionary *parsedGroup = [[NSMutableDictionary alloc] init];
parsedGroup[@"semantics"] = [NSString stringWithUTF8String:group.semantics.c_str()];
NSMutableArray *sources = [[NSMutableArray alloc] init];
for (auto &source : group.ssrcs) {
[sources addObject:@(source)];
if (![parsedVideoSources containsObject:@(source)]) {
[parsedVideoSources addObject:@(source)];
}
}
parsedGroup[@"sources"] = sources;
[parsedVideoSsrcGroups addObject:parsedGroup];
}
if (parsedVideoSsrcGroups.count != 0) {
dict[@"ssrc-groups"] = parsedVideoSsrcGroups;
}
NSMutableArray *videoPayloadTypes = [[NSMutableArray alloc] init];
for (auto &payloadType : payload.videoPayloadTypes) {
NSMutableDictionary *parsedType = [[NSMutableDictionary alloc] init];
parsedType[@"id"] = @(payloadType.id);
NSString *name = [NSString stringWithUTF8String:payloadType.name.c_str()];
parsedType[@"name"] = name;
parsedType[@"clockrate"] = @(payloadType.clockrate);
if (![name isEqualToString:@"rtx"]) {
parsedType[@"channels"] = @(payloadType.channels);
}
NSMutableDictionary *parsedParameters = [[NSMutableDictionary alloc] init];
for (auto &it : payloadType.parameters) {
NSString *key = [NSString stringWithUTF8String:it.first.c_str()];
NSString *value = [NSString stringWithUTF8String:it.second.c_str()];
parsedParameters[key] = value;
}
if (parsedParameters.count != 0) {
parsedType[@"parameters"] = parsedParameters;
}
if (![name isEqualToString:@"rtx"]) {
NSMutableArray *parsedFbs = [[NSMutableArray alloc] init];
for (auto &it : payloadType.feedbackTypes) {
NSMutableDictionary *parsedFb = [[NSMutableDictionary alloc] init];
parsedFb[@"type"] = [NSString stringWithUTF8String:it.type.c_str()];
if (it.subtype.size() != 0) {
parsedFb[@"subtype"] = [NSString stringWithUTF8String:it.subtype.c_str()];
}
[parsedFbs addObject:parsedFb];
}
parsedType[@"rtcp-fbs"] = parsedFbs;
}
[videoPayloadTypes addObject:parsedType];
}
if (videoPayloadTypes.count != 0) {
dict[@"payload-types"] = videoPayloadTypes;
}
NSMutableArray *parsedExtensions = [[NSMutableArray alloc] init];
for (auto &it : payload.videoExtensionMap) {
NSMutableDictionary *parsedExtension = [[NSMutableDictionary alloc] init];
parsedExtension[@"id"] = @(it.first);
parsedExtension[@"uri"] = [NSString stringWithUTF8String:it.second.c_str()];
[parsedExtensions addObject:parsedExtension];
}
if (parsedExtensions.count != 0) {
dict[@"rtp-hdrexts"] = parsedExtensions;
}
NSData *data = [NSJSONSerialization dataWithJSONObject:dict options:0 error:nil];
NSString *string = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
completion(string, payload.ssrc);
}
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion { - (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion {
if (_instance) { if (_instance) {
_instance->emitJoinPayload([completion](tgcalls::GroupJoinPayload payload) { _instance->emitJoinPayload([completion](tgcalls::GroupJoinPayload payload) {
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init]; processJoinPayload(payload, completion);
int32_t signedSsrc = *(int32_t *)&payload.ssrc;
dict[@"ssrc"] = @(signedSsrc);
dict[@"ufrag"] = [NSString stringWithUTF8String:payload.ufrag.c_str()];
dict[@"pwd"] = [NSString stringWithUTF8String:payload.pwd.c_str()];
NSMutableArray *fingerprints = [[NSMutableArray alloc] init];
for (auto &fingerprint : payload.fingerprints) {
[fingerprints addObject:@{
@"hash": [NSString stringWithUTF8String:fingerprint.hash.c_str()],
@"fingerprint": [NSString stringWithUTF8String:fingerprint.fingerprint.c_str()],
@"setup": [NSString stringWithUTF8String:fingerprint.setup.c_str()]
}];
}
dict[@"fingerprints"] = fingerprints;
NSMutableArray *parsedVideoSsrcGroups = [[NSMutableArray alloc] init];
NSMutableArray *parsedVideoSources = [[NSMutableArray alloc] init];
for (auto &group : payload.videoSourceGroups) {
NSMutableDictionary *parsedGroup = [[NSMutableDictionary alloc] init];
parsedGroup[@"semantics"] = [NSString stringWithUTF8String:group.semantics.c_str()];
NSMutableArray *sources = [[NSMutableArray alloc] init];
for (auto &source : group.ssrcs) {
[sources addObject:@(source)];
if (![parsedVideoSources containsObject:@(source)]) {
[parsedVideoSources addObject:@(source)];
}
}
parsedGroup[@"sources"] = sources;
[parsedVideoSsrcGroups addObject:parsedGroup];
}
if (parsedVideoSsrcGroups.count != 0) {
dict[@"ssrc-groups"] = parsedVideoSsrcGroups;
}
if (parsedVideoSources.count != 0) {
//dict[@"sources"] = parsedVideoSources;
}
NSMutableArray *videoPayloadTypes = [[NSMutableArray alloc] init];
for (auto &payloadType : payload.videoPayloadTypes) {
NSMutableDictionary *parsedType = [[NSMutableDictionary alloc] init];
parsedType[@"id"] = @(payloadType.id);
NSString *name = [NSString stringWithUTF8String:payloadType.name.c_str()];
parsedType[@"name"] = name;
parsedType[@"clockrate"] = @(payloadType.clockrate);
if (![name isEqualToString:@"rtx"]) {
parsedType[@"channels"] = @(payloadType.channels);
}
NSMutableDictionary *parsedParameters = [[NSMutableDictionary alloc] init];
for (auto &it : payloadType.parameters) {
NSString *key = [NSString stringWithUTF8String:it.first.c_str()];
NSString *value = [NSString stringWithUTF8String:it.second.c_str()];
parsedParameters[key] = value;
}
if (parsedParameters.count != 0) {
parsedType[@"parameters"] = parsedParameters;
}
if (![name isEqualToString:@"rtx"]) {
NSMutableArray *parsedFbs = [[NSMutableArray alloc] init];
for (auto &it : payloadType.feedbackTypes) {
NSMutableDictionary *parsedFb = [[NSMutableDictionary alloc] init];
parsedFb[@"type"] = [NSString stringWithUTF8String:it.type.c_str()];
if (it.subtype.size() != 0) {
parsedFb[@"subtype"] = [NSString stringWithUTF8String:it.subtype.c_str()];
}
[parsedFbs addObject:parsedFb];
}
parsedType[@"rtcp-fbs"] = parsedFbs;
}
[videoPayloadTypes addObject:parsedType];
}
if (videoPayloadTypes.count != 0) {
dict[@"payload-types"] = videoPayloadTypes;
}
NSMutableArray *parsedExtensions = [[NSMutableArray alloc] init];
for (auto &it : payload.videoExtensionMap) {
NSMutableDictionary *parsedExtension = [[NSMutableDictionary alloc] init];
parsedExtension[@"id"] = @(it.first);
parsedExtension[@"uri"] = [NSString stringWithUTF8String:it.second.c_str()];
[parsedExtensions addObject:parsedExtension];
}
if (parsedExtensions.count != 0) {
dict[@"rtp-hdrexts"] = parsedExtensions;
}
NSData *data = [NSJSONSerialization dataWithJSONObject:dict options:0 error:nil];
NSString *string = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
completion(string, payload.ssrc);
}); });
} }
} }
@ -1295,6 +1303,22 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
} }
} }
- (void)requestVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer completion:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion {
if (_instance) {
_instance->setVideoCapture([videoCapturer getInterface], [completion](auto payload){
processJoinPayload(payload, completion);
});
}
}
- (void)disableVideo:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion {
if (_instance) {
_instance->setVideoCapture(nullptr, [completion](auto payload){
processJoinPayload(payload, completion);
});
}
}
- (void)setVolumeForSsrc:(uint32_t)ssrc volume:(double)volume { - (void)setVolumeForSsrc:(uint32_t)ssrc volume:(double)volume {
if (_instance) { if (_instance) {
_instance->setVolume(ssrc, volume); _instance->setVolume(ssrc, volume);

@ -1 +1 @@
Subproject commit bca416c0b76eac786f01b34e9d09e92df7863168 Subproject commit d8748598133e39c3fc8c52a77a443c2d1da25032