Refactor tgcalls

This commit is contained in:
Ali
2021-04-30 12:36:59 +04:00
parent e1f1c3c314
commit 08f90a322c
6 changed files with 262 additions and 133 deletions

View File

@@ -52,6 +52,42 @@ private extension GroupCallParticipantsContext.Participant {
}
return participantSsrcs
}
var videoSsrcs: Set<UInt32> {
var participantSsrcs = Set<UInt32>()
if let jsonParams = self.videoJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
if let groups = json["ssrc-groups"] as? [Any] {
for group in groups {
if let group = group as? [String: Any] {
if let groupSources = group["sources"] as? [UInt32] {
for source in groupSources {
participantSsrcs.insert(source)
}
}
}
}
}
}
return participantSsrcs
}
var presentationSsrcs: Set<UInt32> {
var participantSsrcs = Set<UInt32>()
if let jsonParams = self.presentationJsonDescription, let jsonData = jsonParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
if let groups = json["ssrc-groups"] as? [Any] {
for group in groups {
if let group = group as? [String: Any] {
if let groupSources = group["sources"] as? [UInt32] {
for source in groupSources {
participantSsrcs.insert(source)
}
}
}
}
}
}
return participantSsrcs
}
}
extension GroupCallParticipantsContext.Participant {
@@ -591,10 +627,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
return self.incomingVideoSourcePromise.get()
}
private var missingSsrcs = Set<UInt32>()
private let missingSsrcsDisposable = MetaDisposable()
private var isRequestingMissingSsrcs: Bool = false
private var peerUpdatesSubscription: Disposable?
public private(set) var schedulePending = false
@@ -884,7 +916,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.participantsContextStateDisposable.dispose()
self.myAudioLevelDisposable.dispose()
self.memberEventsPipeDisposable.dispose()
self.missingSsrcsDisposable.dispose()
self.myAudioLevelTimer?.invalidate()
self.typingDisposable.dispose()
@@ -1331,13 +1362,15 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
let enableNoiseSuppression = accountContext.sharedContext.immediateExperimentalUISettings.enableNoiseSuppression
genericCallContext = OngoingGroupCallContext(video: self.videoCapturer, participantDescriptionsRequired: { [weak self] ssrcs in
genericCallContext = OngoingGroupCallContext(video: self.videoCapturer, requestMediaChannelDescriptions: { [weak self] ssrcs, completion in
let disposable = MetaDisposable()
Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
strongSelf.maybeRequestParticipants(ssrcs: ssrcs)
disposable.set(strongSelf.requestMediaChannelDescriptions(ssrcs: ssrcs, completion: completion))
}
return disposable
}, audioStreamData: OngoingGroupCallContext.AudioStreamData(account: self.accountContext.account, callId: callInfo.id, accessHash: callInfo.accessHash), rejoinNeeded: { [weak self] in
Queue.mainQueue().async {
guard let strongSelf = self else {
@@ -1976,75 +2009,67 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
}
private func maybeRequestParticipants(ssrcs: Set<UInt32>) {
var addedMissingSsrcs = ssrcs
private func requestMediaChannelDescriptions(ssrcs: Set<UInt32>, completion: @escaping ([OngoingGroupCallContext.MediaChannelDescription]) -> Void) -> Disposable {
func extractMediaChannelDescriptions(remainingSsrcs: inout Set<UInt32>, participants: [GroupCallParticipantsContext.Participant], into result: inout [OngoingGroupCallContext.MediaChannelDescription]) {
for participant in participants {
guard let audioSsrc = participant.ssrc else {
continue
}
var addedParticipants: [(UInt32, String?, String?)] = []
if remainingSsrcs.contains(audioSsrc) {
remainingSsrcs.remove(audioSsrc)
result.append(OngoingGroupCallContext.MediaChannelDescription(
kind: .audio,
audioSsrc: audioSsrc,
videoDescription: nil
))
}
if let videoDescription = participant.videoJsonDescription, !videoDescription.isEmpty {
let videoSsrcs = participant.videoSsrcs
if !videoSsrcs.intersection(remainingSsrcs).isEmpty {
remainingSsrcs.subtract(videoSsrcs)
result.append(OngoingGroupCallContext.MediaChannelDescription(
kind: .video,
audioSsrc: audioSsrc,
videoDescription: videoDescription
))
}
}
if let videoDescription = participant.presentationJsonDescription, !videoDescription.isEmpty {
let videoSsrcs = participant.presentationSsrcs
if !videoSsrcs.intersection(remainingSsrcs).isEmpty {
remainingSsrcs.subtract(videoSsrcs)
result.append(OngoingGroupCallContext.MediaChannelDescription(
kind: .video,
audioSsrc: audioSsrc,
videoDescription: videoDescription
))
}
}
}
}
var remainingSsrcs = ssrcs
var result: [OngoingGroupCallContext.MediaChannelDescription] = []
if let membersValue = self.membersValue {
for participant in membersValue.participants {
let participantSsrcs = participant.allSsrcs
if !addedMissingSsrcs.intersection(participantSsrcs).isEmpty {
addedMissingSsrcs.subtract(participantSsrcs)
if let ssrc = participant.ssrc {
addedParticipants.append((ssrc, participant.videoJsonDescription, participant.presentationJsonDescription))
}
}
}
extractMediaChannelDescriptions(remainingSsrcs: &remainingSsrcs, participants: membersValue.participants, into: &result)
}
if !addedParticipants.isEmpty {
self.genericCallContext?.addParticipants(participants: addedParticipants)
}
if !remainingSsrcs.isEmpty, let callInfo = self.internalState.callInfo {
return (getGroupCallParticipants(account: self.account, callId: callInfo.id, accessHash: callInfo.accessHash, offset: "", ssrcs: Array(remainingSsrcs), limit: 100, sortAscending: callInfo.sortAscending)
|> deliverOnMainQueue).start(next: { state in
extractMediaChannelDescriptions(remainingSsrcs: &remainingSsrcs, participants: state.participants, into: &result)
if !addedMissingSsrcs.isEmpty {
self.missingSsrcs.formUnion(addedMissingSsrcs)
self.maybeRequestMissingSsrcs()
}
}
private func maybeRequestMissingSsrcs() {
if self.isRequestingMissingSsrcs {
return
}
if self.missingSsrcs.isEmpty {
return
}
if case let .established(callInfo, _, _, _, _) = self.internalState {
self.isRequestingMissingSsrcs = true
let requestedSsrcs = self.missingSsrcs
self.missingSsrcsDisposable.set((getGroupCallParticipants(account: self.account, callId: callInfo.id, accessHash: callInfo.accessHash, offset: "", ssrcs: Array(requestedSsrcs), limit: 100, sortAscending: callInfo.sortAscending)
|> deliverOnMainQueue).start(next: { [weak self] state in
guard let strongSelf = self else {
return
}
strongSelf.isRequestingMissingSsrcs = false
strongSelf.missingSsrcs.subtract(requestedSsrcs)
var addedParticipants: [(UInt32, Int32?, String?, String?)] = []
for participant in state.participants {
if let ssrc = participant.ssrc {
addedParticipants.append((ssrc, participant.volume, participant.videoJsonDescription, participant.presentationJsonDescription))
}
}
if !addedParticipants.isEmpty {
for (ssrc, volume, _, _) in addedParticipants {
if let volume = volume {
strongSelf.genericCallContext?.setVolume(ssrc: ssrc, volume: Double(volume) / 10000.0)
}
}
strongSelf.genericCallContext?.addParticipants(participants: addedParticipants.map { ssrc, _, videoParams, presentationParams in
return (ssrc, videoParams, presentationParams)
completion(result)
})
}
strongSelf.maybeRequestMissingSsrcs()
}))
} else {
completion(result)
return EmptyDisposable
}
}
@@ -2405,7 +2430,10 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
let screencastCallContext = OngoingGroupCallContext(
video: self.screenCapturer,
participantDescriptionsRequired: { _ in },
requestMediaChannelDescriptions: { _, completion in
completion([])
return EmptyDisposable
},
audioStreamData: nil,
rejoinNeeded: {},
outgoingAudioBitrateKbit: nil,
@@ -2446,6 +2474,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
screencastCallContext.setConnectionMode(.rtc, keepBroadcastConnectedIfWasEnabled: false)
screencastCallContext.setJoinResponse(payload: clientParams)
strongSelf.genericCallContext?.setIgnoreVideoEndpointIds(endpointIds: [joinCallResult.endpointId])
}, error: { error in
guard let _ = self else {
return
@@ -2612,9 +2642,6 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.currentConnectionMode = .none
self.genericCallContext?.setConnectionMode(.none, keepBroadcastConnectedIfWasEnabled: movingFromBroadcastToRtc)
self.missingSsrcsDisposable.set(nil)
self.missingSsrcs.removeAll()
self.internalState = .requesting
self.internalStatePromise.set(.single(.requesting))
self.isCurrentlyConnecting = nil

View File

@@ -783,6 +783,7 @@ public func joinGroupCall(account: Account, peerId: PeerId, joinAs: PeerId?, cal
public struct JoinGroupCallAsScreencastResult {
public var jsonParams: String
public var endpointId: String
}
public func joinGroupCallAsScreencast(account: Account, peerId: PeerId, callId: Int64, accessHash: Int64, joinPayload: String) -> Signal<JoinGroupCallAsScreencastResult, JoinGroupCallError> {
@@ -810,8 +811,21 @@ public func joinGroupCallAsScreencast(account: Account, peerId: PeerId, callId:
return .fail(.generic)
}
var maybeEndpointId: String?
if let jsonData = parsedClientParams.data(using: .utf8), let json = try? JSONSerialization.jsonObject(with: jsonData, options: []) as? [String: Any] {
if let videoSection = json["video"] as? [String: Any] {
maybeEndpointId = videoSection["endpoint"] as? String
}
}
guard let endpointId = maybeEndpointId else {
return .fail(.generic)
}
return .single(JoinGroupCallAsScreencastResult(
jsonParams: parsedClientParams
jsonParams: parsedClientParams,
endpointId: endpointId
))
}
}

View File

@@ -28,7 +28,7 @@ private final class ContextQueueImpl: NSObject, OngoingCallThreadLocalContextQue
}
}
private protocol BroadcastPartSource: class {
private protocol BroadcastPartSource: AnyObject {
func requestPart(timestampMilliseconds: Int64, durationMilliseconds: Int64, completion: @escaping (OngoingGroupCallBroadcastPart) -> Void, rejoinNeeded: @escaping () -> Void) -> Disposable
}
@@ -170,6 +170,23 @@ public final class OngoingGroupCallContext {
case source(UInt32)
}
public struct MediaChannelDescription {
public enum Kind {
case audio
case video
}
public var kind: Kind
public var audioSsrc: UInt32
public var videoDescription: String?
public init(kind: Kind, audioSsrc: UInt32, videoDescription: String?) {
self.kind = kind
self.audioSsrc = audioSsrc
self.videoDescription = videoDescription
}
}
private final class Impl {
let queue: Queue
let context: GroupCallThreadLocalContext
@@ -186,7 +203,7 @@ public final class OngoingGroupCallContext {
private var broadcastPartsSource: BroadcastPartSource?
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, video: OngoingCallVideoCapturer?, participantDescriptionsRequired: @escaping (Set<UInt32>) -> Void, audioStreamData: AudioStreamData?, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool) {
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, audioStreamData: AudioStreamData?, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool) {
self.queue = queue
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
@@ -224,8 +241,37 @@ public final class OngoingGroupCallContext {
incomingVideoSourcesUpdated: { endpointIds in
videoSources.set(Set(endpointIds))
},
participantDescriptionsRequired: { ssrcs in
participantDescriptionsRequired(Set(ssrcs.map { $0.uint32Value }))
requestMediaChannelDescriptions: { ssrcs, completion in
final class OngoingGroupCallMediaChannelDescriptionTaskImpl : NSObject, OngoingGroupCallMediaChannelDescriptionTask {
private let disposable: Disposable
init(disposable: Disposable) {
self.disposable = disposable
}
func cancel() {
self.disposable.dispose()
}
}
let disposable = requestMediaChannelDescriptions(Set(ssrcs.map { $0.uint32Value }), { channels in
completion(channels.map { channel -> OngoingGroupCallMediaChannelDescription in
let mappedType: OngoingGroupCallMediaChannelType
switch channel.kind {
case .audio:
mappedType = .audio
case .video:
mappedType = .video
}
return OngoingGroupCallMediaChannelDescription(
type: mappedType,
audioSsrc: channel.audioSsrc,
videoDescription: channel.videoDescription
)
})
})
return OngoingGroupCallMediaChannelDescriptionTaskImpl(disposable: disposable)
},
requestBroadcastPart: { timestampMilliseconds, durationMilliseconds, completion in
let disposable = MetaDisposable()
@@ -315,13 +361,8 @@ public final class OngoingGroupCallContext {
self.context.setFullSizeVideoEndpointId(endpointId)
}
func addParticipants(participants: [(UInt32, String?, String?)]) {
if participants.isEmpty {
return
}
self.context.addParticipants(participants.map { participant -> OngoingGroupCallParticipantDescription in
return OngoingGroupCallParticipantDescription(audioSsrc: participant.0, videoJsonDescription: participant.1, screencastJsonDescription: participant.2)
})
func setIgnoreVideoEndpointIds(endpointIds: [String]) {
self.context.setIgnoreVideoEndpointIds(endpointIds)
}
func stop() {
@@ -547,10 +588,10 @@ public final class OngoingGroupCallContext {
}
}
public init(inputDeviceId: String = "", outputDeviceId: String = "", video: OngoingCallVideoCapturer?, participantDescriptionsRequired: @escaping (Set<UInt32>) -> Void, audioStreamData: AudioStreamData?, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool) {
public init(inputDeviceId: String = "", outputDeviceId: String = "", video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, audioStreamData: AudioStreamData?, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool) {
let queue = self.queue
self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, video: video, participantDescriptionsRequired: participantDescriptionsRequired, audioStreamData: audioStreamData, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression)
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, audioStreamData: audioStreamData, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression)
})
}
@@ -630,9 +671,9 @@ public final class OngoingGroupCallContext {
}
}
public func addParticipants(participants: [(UInt32, String?, String?)]) {
public func setIgnoreVideoEndpointIds(endpointIds: [String]) {
self.impl.with { impl in
impl.addParticipants(participants: participants)
impl.setIgnoreVideoEndpointIds(endpointIds: endpointIds)
}
}

View File

@@ -156,13 +156,20 @@ typedef struct {
bool isTransitioningFromBroadcastToRtc;
} GroupCallNetworkState;
@interface OngoingGroupCallParticipantDescription : NSObject
typedef NS_ENUM(int32_t, OngoingGroupCallMediaChannelType) {
OngoingGroupCallMediaChannelTypeAudio,
OngoingGroupCallMediaChannelTypeVideo
};
@interface OngoingGroupCallMediaChannelDescription : NSObject
@property (nonatomic, readonly) OngoingGroupCallMediaChannelType type;
@property (nonatomic, readonly) uint32_t audioSsrc;
@property (nonatomic, strong, readonly) NSString * _Nullable videoJsonDescription;
@property (nonatomic, strong, readonly) NSString * _Nullable screencastJsonDescription;
@property (nonatomic, strong, readonly) NSString * _Nullable videoDescription;
- (instancetype _Nonnull)initWithAudioSsrc:(uint32_t)audioSsrc videoJsonDescription:(NSString * _Nullable)videoJsonDescription screencastJsonDescription:(NSString * _Nullable)screencastJsonDescription;
- (instancetype _Nonnull)initWithType:(OngoingGroupCallMediaChannelType)type
audioSsrc:(uint32_t)audioSsrc
videoDescription:(NSString * _Nullable)videoDescription;
@end
@@ -172,6 +179,12 @@ typedef struct {
@end
@protocol OngoingGroupCallMediaChannelDescriptionTask <NSObject>
- (void)cancel;
@end
typedef NS_ENUM(int32_t, OngoingCallConnectionMode) {
OngoingCallConnectionModeNone,
OngoingCallConnectionModeRtc,
@@ -203,7 +216,7 @@ typedef NS_ENUM(int32_t, OngoingGroupCallVideoContentType) {
@interface GroupCallThreadLocalContext : NSObject
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSString *> * _Nonnull))incomingVideoSourcesUpdated participantDescriptionsRequired:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))participantDescriptionsRequired requestBroadcastPart:(id<OngoingGroupCallBroadcastPartTask> _Nonnull (^ _Nonnull)(int64_t, int64_t, void (^ _Nonnull)(OngoingGroupCallBroadcastPart * _Nullable)))requestBroadcastPart outgoingAudioBitrateKbit:(int32_t)outgoingAudioBitrateKbit videoContentType:(OngoingGroupCallVideoContentType)videoContentType enableNoiseSuppression:(bool)enableNoiseSuppression;
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSString *> * _Nonnull))incomingVideoSourcesUpdated requestMediaChannelDescriptions:(id<OngoingGroupCallMediaChannelDescriptionTask> _Nonnull (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull, void (^ _Nonnull)(NSArray<OngoingGroupCallMediaChannelDescription *> * _Nonnull)))requestMediaChannelDescriptions requestBroadcastPart:(id<OngoingGroupCallBroadcastPartTask> _Nonnull (^ _Nonnull)(int64_t, int64_t, void (^ _Nonnull)(OngoingGroupCallBroadcastPart * _Nullable)))requestBroadcastPart outgoingAudioBitrateKbit:(int32_t)outgoingAudioBitrateKbit videoContentType:(OngoingGroupCallVideoContentType)videoContentType enableNoiseSuppression:(bool)enableNoiseSuppression;
- (void)stop;
@@ -213,7 +226,6 @@ typedef NS_ENUM(int32_t, OngoingGroupCallVideoContentType) {
- (void)setJoinResponsePayload:(NSString * _Nonnull)payload;
- (void)removeSsrcs:(NSArray<NSNumber *> * _Nonnull)ssrcs;
- (void)removeIncomingVideoSource:(uint32_t)ssrc;
- (void)addParticipants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants;
- (void)setIsMuted:(bool)isMuted;
- (void)setIsNoiseSuppressionEnabled:(bool)isNoiseSuppressionEnabled;
- (void)requestVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer completion:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
@@ -221,10 +233,11 @@ typedef NS_ENUM(int32_t, OngoingGroupCallVideoContentType) {
- (void)setVolumeForSsrc:(uint32_t)ssrc volume:(double)volume;
- (void)setFullSizeVideoEndpointId:(NSString * _Nullable)endpointId;
- (void)setIgnoreVideoEndpointIds:(NSArray<NSString *> * _Nonnull)ignoreVideoEndpointIds;
- (void)switchAudioOutput:(NSString * _Nonnull)deviceId;
- (void)switchAudioInput:(NSString * _Nonnull)deviceId;
- (void)makeIncomingVideoViewWithEndpointId:(NSString *)endpointId completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion;
- (void)makeIncomingVideoViewWithEndpointId:(NSString * _Nonnull)endpointId completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion;
@end

View File

@@ -844,6 +844,23 @@ private:
id<OngoingGroupCallBroadcastPartTask> _task;
};
class RequestMediaChannelDescriptionTaskImpl : public tgcalls::RequestMediaChannelDescriptionTask {
public:
RequestMediaChannelDescriptionTaskImpl(id<OngoingGroupCallMediaChannelDescriptionTask> task) {
_task = task;
}
virtual ~RequestMediaChannelDescriptionTaskImpl() {
}
virtual void cancel() override {
[_task cancel];
}
private:
id<OngoingGroupCallMediaChannelDescriptionTask> _task;
};
}
@interface GroupCallThreadLocalContext () {
@@ -859,7 +876,7 @@ private:
@implementation GroupCallThreadLocalContext
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSString *> * _Nonnull))incomingVideoSourcesUpdated participantDescriptionsRequired:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))participantDescriptionsRequired requestBroadcastPart:(id<OngoingGroupCallBroadcastPartTask> _Nonnull (^ _Nonnull)(int64_t, int64_t, void (^ _Nonnull)(OngoingGroupCallBroadcastPart * _Nullable)))requestBroadcastPart outgoingAudioBitrateKbit:(int32_t)outgoingAudioBitrateKbit videoContentType:(OngoingGroupCallVideoContentType)videoContentType enableNoiseSuppression:(bool)enableNoiseSuppression {
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSString *> * _Nonnull))incomingVideoSourcesUpdated requestMediaChannelDescriptions:(id<OngoingGroupCallMediaChannelDescriptionTask> _Nonnull (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull, void (^ _Nonnull)(NSArray<OngoingGroupCallMediaChannelDescription *> * _Nonnull)))requestMediaChannelDescriptions requestBroadcastPart:(id<OngoingGroupCallBroadcastPartTask> _Nonnull (^ _Nonnull)(int64_t, int64_t, void (^ _Nonnull)(OngoingGroupCallBroadcastPart * _Nullable)))requestBroadcastPart outgoingAudioBitrateKbit:(int32_t)outgoingAudioBitrateKbit videoContentType:(OngoingGroupCallVideoContentType)videoContentType enableNoiseSuppression:(bool)enableNoiseSuppression {
self = [super init];
if (self != nil) {
_queue = queue;
@@ -925,13 +942,6 @@ private:
}
incomingVideoSourcesUpdated(mappedSources);
},
.participantDescriptionsRequired = [participantDescriptionsRequired](std::vector<uint32_t> const &ssrcs) {
NSMutableArray<NSNumber *> *mappedSources = [[NSMutableArray alloc] init];
for (auto it : ssrcs) {
[mappedSources addObject:@(it)];
}
participantDescriptionsRequired(mappedSources);
},
.requestBroadcastPart = [requestBroadcastPart](int64_t timestampMilliseconds, int64_t durationMilliseconds, std::function<void(tgcalls::BroadcastPart &&)> completion) -> std::shared_ptr<tgcalls::BroadcastPartTask> {
id<OngoingGroupCallBroadcastPartTask> task = requestBroadcastPart(timestampMilliseconds, durationMilliseconds, ^(OngoingGroupCallBroadcastPart * _Nullable part) {
tgcalls::BroadcastPart parsedPart;
@@ -970,7 +980,39 @@ private:
.outgoingAudioBitrateKbit = outgoingAudioBitrateKbit,
.videoContentType = _videoContentType,
.videoCodecPreferences = videoCodecPreferences,
.initialEnableNoiseSuppression = enableNoiseSuppression
.initialEnableNoiseSuppression = enableNoiseSuppression,
.requestMediaChannelDescriptions = [requestMediaChannelDescriptions](std::vector<uint32_t> const &ssrcs, std::function<void(std::vector<tgcalls::MediaChannelDescription> &&)> completion) -> std::shared_ptr<tgcalls::RequestMediaChannelDescriptionTask> {
NSMutableArray<NSNumber *> *mappedSsrcs = [[NSMutableArray alloc] init];
for (auto ssrc : ssrcs) {
[mappedSsrcs addObject:[NSNumber numberWithUnsignedInt:ssrc]];
}
id<OngoingGroupCallMediaChannelDescriptionTask> task = requestMediaChannelDescriptions(mappedSsrcs, ^(NSArray<OngoingGroupCallMediaChannelDescription *> *channels) {
std::vector<tgcalls::MediaChannelDescription> mappedChannels;
for (OngoingGroupCallMediaChannelDescription *channel in channels) {
tgcalls::MediaChannelDescription mappedChannel;
switch (channel.type) {
case OngoingGroupCallMediaChannelTypeAudio: {
mappedChannel.type = tgcalls::MediaChannelDescription::Type::Audio;
break;
}
case OngoingGroupCallMediaChannelTypeVideo: {
mappedChannel.type = tgcalls::MediaChannelDescription::Type::Video;
break;
}
default: {
continue;
}
}
mappedChannel.audioSsrc = channel.audioSsrc;
mappedChannel.videoInformation = channel.videoDescription.UTF8String ?: "";
mappedChannels.push_back(std::move(mappedChannel));
}
completion(std::move(mappedChannels));
});
return std::make_shared<RequestMediaChannelDescriptionTaskImpl>(task);
}
}));
}
return self;
@@ -1038,26 +1080,6 @@ private:
}
}
- (void)addParticipants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants {
if (_instance) {
std::vector<tgcalls::GroupParticipantDescription> parsedParticipants;
for (OngoingGroupCallParticipantDescription *participant in participants) {
tgcalls::GroupParticipantDescription parsedParticipant;
parsedParticipant.audioSsrc = participant.audioSsrc;
if (participant.videoJsonDescription.length != 0) {
parsedParticipant.videoInformation = participant.videoJsonDescription.UTF8String;
}
if (participant.screencastJsonDescription.length != 0) {
parsedParticipant.screencastInformation = participant.screencastJsonDescription.UTF8String;
}
parsedParticipants.push_back(parsedParticipant);
}
_instance->addParticipants(std::move(parsedParticipants));
}
}
- (void)setIsMuted:(bool)isMuted {
if (_instance) {
_instance->setIsMuted(isMuted);
@@ -1094,6 +1116,16 @@ private:
}
}
- (void)setIgnoreVideoEndpointIds:(NSArray<NSString *> * _Nonnull)ignoreVideoEndpointIds {
if (_instance) {
std::vector<std::string> mappedEndpointIds;
for (NSString *value in ignoreVideoEndpointIds) {
mappedEndpointIds.push_back(value.UTF8String);
}
_instance->setIgnoreVideoEndpointIds(mappedEndpointIds);
}
}
- (void)switchAudioOutput:(NSString * _Nonnull)deviceId {
if (_instance) {
_instance->setAudioOutputDevice(deviceId.UTF8String);
@@ -1148,14 +1180,16 @@ private:
@end
@implementation OngoingGroupCallParticipantDescription
@implementation OngoingGroupCallMediaChannelDescription
- (instancetype _Nonnull)initWithAudioSsrc:(uint32_t)audioSsrc videoJsonDescription:(NSString * _Nullable)videoJsonDescription screencastJsonDescription:(NSString * _Nullable)screencastJsonDescription {
- (instancetype _Nonnull)initWithType:(OngoingGroupCallMediaChannelType)type
audioSsrc:(uint32_t)audioSsrc
videoDescription:(NSString * _Nullable)videoDescription {
self = [super init];
if (self != nil) {
_type = type;
_audioSsrc = audioSsrc;
_videoJsonDescription = videoJsonDescription;
_screencastJsonDescription = screencastJsonDescription;
_videoDescription = videoDescription;
}
return self;
}