Merge branch 'experiments/voip-custom-reflector' into refactoring/webrtc-upgrade

# Conflicts:
#	submodules/TelegramApi/Sources/Api0.swift
#	submodules/TelegramApi/Sources/Api1.swift
#	submodules/TelegramCore/Sources/Serialization.swift
#	submodules/TelegramVoip/Sources/GroupCallContext.swift
#	submodules/TgVoipWebrtc/tgcalls
#	third-party/webrtc/webrtc-ios
This commit is contained in:
Ali 2020-12-29 01:43:17 +04:00
commit 6383cf3a1d
12 changed files with 1023 additions and 23 deletions

View File

@ -96,6 +96,7 @@ public final class PresentationCallVideoView {
case rotation270
}
public let holder: AnyObject
public let view: UIView
public let setOnFirstFrameReceived: (((Float) -> Void)?) -> Void
@ -105,6 +106,7 @@ public final class PresentationCallVideoView {
public let setOnIsMirroredUpdated: (((Bool) -> Void)?) -> Void
public init(
holder: AnyObject,
view: UIView,
setOnFirstFrameReceived: @escaping (((Float) -> Void)?) -> Void,
getOrientation: @escaping () -> Orientation,
@ -112,6 +114,7 @@ public final class PresentationCallVideoView {
setOnOrientationUpdated: @escaping (((Orientation, CGFloat) -> Void)?) -> Void,
setOnIsMirroredUpdated: @escaping (((Bool) -> Void)?) -> Void
) {
self.holder = holder
self.view = view
self.setOnFirstFrameReceived = setOnFirstFrameReceived
self.getOrientation = getOrientation
@ -302,6 +305,10 @@ public protocol PresentationGroupCall: class {
func invitePeer(_ peerId: PeerId) -> Bool
func removedPeer(_ peerId: PeerId)
var invitedPeers: Signal<[PeerId], NoError> { get }
var incomingVideoSources: Signal<Set<UInt32>, NoError> { get }
func makeIncomingVideoView(source: UInt32, completion: @escaping (PresentationCallVideoView?) -> Void)
}
public protocol PresentationCallManager: class {

View File

@ -738,6 +738,15 @@ private enum DebugControllerEntry: ItemListNodeEntry {
})
case .voiceConference:
return ItemListDisclosureItem(presentationData: presentationData, title: "Voice Conference (Test)", label: "", sectionId: self.section, style: .blocks, action: {
guard let context = arguments.context else {
return
}
if #available(iOS 12.0, *) {
let chatScreen = DebugVoiceChatScreen(context: context)
chatScreen.navigationPresentation = .modal
arguments.pushController(chatScreen)
}
})
case let .preferredVideoCodec(_, title, value, isSelected):
return ItemListCheckboxItem(presentationData: presentationData, title: title, style: .right, checked: isSelected, zeroSeparatorInsets: false, sectionId: self.section, action: {

View File

@ -0,0 +1,125 @@
import SwiftSignalKit
import Display
import AsyncDisplayKit
import TelegramPresentationData
import AccountContext
import TelegramAudio
import TelegramVoip
@available(iOS 12.0, *)
public final class DebugVoiceChatScreen: ViewController {
@available(iOS 12.0, *)
private final class Node: ViewControllerTracingNode {
private let context: AccountContext
private let audioSession: ManagedAudioSession
private var callContext: DebugOngoingGroupCallContext?
private var audioSessionDisposable: Disposable?
private let audioSessionShouldBeActive = ValuePromise<Bool>(false, ignoreRepeated: true)
private var audioSessionShouldBeActiveDisposable: Disposable?
private var audioSessionControl: ManagedAudioSessionControl?
init(
context: AccountContext,
audioSession: ManagedAudioSession
) {
self.context = context
self.audioSession = audioSession
super.init()
self.backgroundColor = .white
/*self.audioSessionDisposable = self.audioSession.push(audioSessionType: .voiceCall, manualActivate: { [weak self] control in
Queue.mainQueue().async {
if let strongSelf = self {
strongSelf.updateAudioSessionControl(control)
}
}
}, deactivate: {
return Signal { subscriber in
Queue.mainQueue().async {
subscriber.putCompletion()
}
return EmptyDisposable
}
}, availableOutputsChanged: { availableOutputs, currentOutput in
Queue.mainQueue().async {
}
})*/
self.audioSessionShouldBeActive.set(true)
self.callContext = DebugOngoingGroupCallContext()
}
deinit {
self.audioSessionDisposable?.dispose()
self.audioSessionShouldBeActiveDisposable?.dispose()
}
private func updateAudioSessionControl(_ control: ManagedAudioSessionControl) {
if self.audioSessionControl != nil {
return
}
self.audioSessionControl = control
self.audioSessionShouldBeActiveDisposable = (self.audioSessionShouldBeActive.get()
|> deliverOnMainQueue).start(next: { [weak self] value in
if let strongSelf = self {
if value {
if let audioSessionControl = strongSelf.audioSessionControl {
audioSessionControl.activate({ _ in
Queue.mainQueue().async {
//strongSelf.audioSessionActive.set(.single(true))
}
})
} else {
//strongSelf.audioSessionActive.set(.single(false))
}
} else {
//strongSelf.audioSessionActive.set(.single(false))
}
}
})
}
func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) {
}
}
private var controllerNode: Node {
return self.displayNode as! Node
}
private let context: AccountContext
public init(
context: AccountContext
) {
self.context = context
super.init(navigationBarPresentationData: NavigationBarPresentationData(presentationData: context.sharedContext.currentPresentationData.with { $0 }))
}
required init(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override public func loadDisplayNode() {
self.displayNode = Node(
context: self.context,
audioSession: self.context.sharedContext.mediaManager.audioSession
)
self.displayNodeDidLoad()
}
override public func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) {
super.containerLayoutUpdated(layout, transition: transition)
self.controllerNode.containerLayoutUpdated(layout, transition: transition)
}
}

View File

@ -928,6 +928,7 @@ public final class PresentationCallImpl: PresentationCall {
let setOnOrientationUpdated = view.setOnOrientationUpdated
let setOnIsMirroredUpdated = view.setOnIsMirroredUpdated
completion(PresentationCallVideoView(
holder: view,
view: view.view,
setOnFirstFrameReceived: { f in
setOnFirstFrameReceived(f)
@ -997,6 +998,7 @@ public final class PresentationCallImpl: PresentationCall {
let setOnOrientationUpdated = view.setOnOrientationUpdated
let setOnIsMirroredUpdated = view.setOnIsMirroredUpdated
completion(PresentationCallVideoView(
holder: view,
view: view.view,
setOnFirstFrameReceived: { f in
setOnFirstFrameReceived(f)

View File

@ -454,6 +454,13 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private var didConnectOnce: Bool = false
private var toneRenderer: PresentationCallToneRenderer?
private var videoCapturer: OngoingCallVideoCapturer?
private let incomingVideoSourcePromise = Promise<Set<UInt32>>(Set())
public var incomingVideoSources: Signal<Set<UInt32>, NoError> {
return self.incomingVideoSourcePromise.get()
}
init(
accountContext: AccountContext,
audioSession: ManagedAudioSession,
@ -477,6 +484,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.temporaryJoinTimestamp = Int32(CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970)
self.videoCapturer = OngoingCallVideoCapturer()
var didReceiveAudioOutputs = false
if !audioSession.getIsHeadsetPluggedIn() {
@ -579,6 +588,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
return
}
if case let .estabilished(callInfo, _, _, _) = strongSelf.internalState {
var addedParticipants: [(UInt32, String?)] = []
var removedSsrc: [UInt32] = []
for (callId, update) in updates {
if callId == callInfo.id {
@ -598,6 +608,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
strongSelf._canBeRemoved.set(.single(true))
}
} else if case .joined = participantUpdate.participationStatusChange {
addedParticipants.append((participantUpdate.ssrc, participantUpdate.jsonParams))
}
}
case let .call(isTerminated, _):
@ -610,6 +621,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
if !removedSsrc.isEmpty {
strongSelf.callContext?.removeSsrcs(ssrcs: removedSsrc)
}
if !addedParticipants.isEmpty {
strongSelf.callContext?.addParticipants(participants: addedParticipants)
}
}
})
@ -667,6 +681,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
participants.append(GroupCallParticipantsContext.Participant(
peer: accountPeer,
ssrc: 0,
jsonParams: nil,
joinTimestamp: strongSelf.temporaryJoinTimestamp,
activityTimestamp: nil,
muteState: GroupCallParticipantsContext.Participant.MuteState(canUnmute: true)
@ -785,7 +800,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
break
default:
if case let .active(callInfo) = internalState {
let callContext = OngoingGroupCallContext()
let callContext = OngoingGroupCallContext(video: self.videoCapturer)
self.incomingVideoSourcePromise.set(callContext.videoSources)
self.callContext = callContext
self.requestDisposable.set((callContext.joinPayload
|> take(1)
@ -929,12 +945,14 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
self.ssrcMapping.removeAll()
var ssrcs: [UInt32] = []
var addedParticipants: [(UInt32, String?)] = []
for participant in initialState.participants {
self.ssrcMapping[participant.ssrc] = participant.peer.id
ssrcs.append(participant.ssrc)
if participant.peer.id != self.accountContext.account.peerId {
addedParticipants.append((participant.ssrc, participant.jsonParams))
}
}
self.callContext?.setJoinResponse(payload: clientParams, ssrcs: ssrcs)
self.callContext?.setJoinResponse(payload: clientParams, participants: addedParticipants)
let accountContext = self.accountContext
let peerId = self.peerId
@ -1462,4 +1480,69 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
public func updateDefaultParticipantsAreMuted(isMuted: Bool) {
self.participantsContext?.updateDefaultParticipantsAreMuted(isMuted: isMuted)
}
public func makeIncomingVideoView(source: UInt32, completion: @escaping (PresentationCallVideoView?) -> Void) {
self.callContext?.makeIncomingVideoView(source: source, completion: { view in
if let view = view {
let setOnFirstFrameReceived = view.setOnFirstFrameReceived
let setOnOrientationUpdated = view.setOnOrientationUpdated
let setOnIsMirroredUpdated = view.setOnIsMirroredUpdated
completion(PresentationCallVideoView(
holder: view,
view: view.view,
setOnFirstFrameReceived: { f in
setOnFirstFrameReceived(f)
},
getOrientation: { [weak view] in
if let view = view {
let mappedValue: PresentationCallVideoView.Orientation
switch view.getOrientation() {
case .rotation0:
mappedValue = .rotation0
case .rotation90:
mappedValue = .rotation90
case .rotation180:
mappedValue = .rotation180
case .rotation270:
mappedValue = .rotation270
}
return mappedValue
} else {
return .rotation0
}
},
getAspect: { [weak view] in
if let view = view {
return view.getAspect()
} else {
return 0.0
}
},
setOnOrientationUpdated: { f in
setOnOrientationUpdated { value, aspect in
let mappedValue: PresentationCallVideoView.Orientation
switch value {
case .rotation0:
mappedValue = .rotation0
case .rotation90:
mappedValue = .rotation90
case .rotation180:
mappedValue = .rotation180
case .rotation270:
mappedValue = .rotation270
}
f?(mappedValue, aspect)
}
},
setOnIsMirroredUpdated: { f in
setOnIsMirroredUpdated { value in
f?(value)
}
}
))
} else {
completion(nil)
}
})
}
}

View File

@ -118,6 +118,86 @@ private final class VoiceChatControllerTitleNode: ASDisplayNode {
}
}
final class GroupVideoNode: ASDisplayNode {
private let videoView: PresentationCallVideoView
private var validLayout: CGSize?
init(videoView: PresentationCallVideoView) {
self.videoView = videoView
super.init()
self.backgroundColor = .black
self.clipsToBounds = true
self.cornerRadius = 8.0
self.view.addSubview(self.videoView.view)
videoView.setOnFirstFrameReceived({ [weak self] _ in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
if let size = strongSelf.validLayout {
strongSelf.updateLayout(size: size, transition: .immediate)
}
}
})
videoView.setOnOrientationUpdated({ [weak self] _, _ in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
if let size = strongSelf.validLayout {
strongSelf.updateLayout(size: size, transition: .immediate)
}
}
})
}
func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) {
self.validLayout = size
let orientation = self.videoView.getOrientation()
var aspect = self.videoView.getAspect()
if aspect <= 0.01 {
aspect = 3.0 / 4.0
}
let rotatedAspect: CGFloat
let angle: CGFloat
switch orientation {
case .rotation0:
angle = 0.0
rotatedAspect = aspect
case .rotation90:
angle = CGFloat.pi / 2.0
rotatedAspect = 1 / aspect
case .rotation180:
angle = CGFloat.pi
rotatedAspect = aspect
case .rotation270:
angle = CGFloat.pi * 3.0 / 2.0
rotatedAspect = 1 / aspect
}
let rotatedVideoSize = CGSize(width: 100.0, height: rotatedAspect * 100.0).aspectFilled(size)
var rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - rotatedVideoSize.width) / 2.0), y: floor((size.height - rotatedVideoSize.height) / 2.0)), size: rotatedVideoSize)
rotatedVideoFrame.origin.x = floor(rotatedVideoFrame.origin.x)
rotatedVideoFrame.origin.y = floor(rotatedVideoFrame.origin.y)
rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width)
rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height)
rotatedVideoFrame = rotatedVideoFrame.insetBy(dx: -1.0, dy: -1.0)
self.videoView.view.center = rotatedVideoFrame.center
self.videoView.view.bounds = CGRect(origin: CGPoint(), size: rotatedVideoFrame.size)
let transition: ContainedViewLayoutTransition = .immediate
transition.updateTransformRotation(view: self.videoView.view, angle: angle)
}
}
public final class VoiceChatController: ViewController {
private final class Node: ViewControllerTracingNode, UIGestureRecognizerDelegate {
private struct ListTransition {
@ -452,6 +532,10 @@ public final class VoiceChatController: ViewController {
private let inviteDisposable = MetaDisposable()
private let memberEventsDisposable = MetaDisposable()
private let voiceSourcesDisposable = MetaDisposable()
private var requestedVideoSources = Set<UInt32>()
private var videoNodes: [GroupVideoNode] = []
init(controller: VoiceChatController, sharedContext: SharedAccountContext, call: PresentationGroupCall) {
self.controller = controller
@ -1182,6 +1266,29 @@ public final class VoiceChatController: ViewController {
strongSelf.presentUndoOverlay(content: .invitedToVoiceChat(context: strongSelf.context, peer: event.peer, text: strongSelf.presentationData.strings.VoiceChat_PeerJoinedText(event.peer.displayTitle(strings: strongSelf.presentationData.strings, displayOrder: strongSelf.presentationData.nameDisplayOrder)).0), action: { _ in return false })
}
}))
self.voiceSourcesDisposable.set((self.call.incomingVideoSources
|> deliverOnMainQueue).start(next: { [weak self] sources in
guard let strongSelf = self else {
return
}
for source in sources {
if !strongSelf.requestedVideoSources.contains(source) {
strongSelf.requestedVideoSources.insert(source)
strongSelf.call.makeIncomingVideoView(source: source, completion: { videoView in
Queue.mainQueue().async {
guard let strongSelf = self, let videoView = videoView else {
return
}
strongSelf.videoNodes.append(GroupVideoNode(videoView: videoView))
if let (layout, navigationHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .immediate)
}
}
})
}
}
}))
}
deinit {
@ -1195,6 +1302,8 @@ public final class VoiceChatController: ViewController {
self.audioLevelsDisposable?.dispose()
self.myAudioLevelDisposable?.dispose()
self.inviteDisposable.dispose()
self.memberEventsDisposable.dispose()
self.voiceSourcesDisposable.dispose()
}
override func didLoad() {
@ -1782,6 +1891,23 @@ public final class VoiceChatController: ViewController {
self.updateButtons(transition: transition)
var currentVideoOrigin = CGPoint(x: 4.0, y: (layout.statusBarHeight ?? 0.0) + 4.0)
for videoNode in self.videoNodes {
let videoSize = CGSize(width: 100.0, height: 100.0)
if currentVideoOrigin.x + videoSize.width > layout.size.width {
currentVideoOrigin.x = 0.0
currentVideoOrigin.y += videoSize.height
}
videoNode.frame = CGRect(origin: currentVideoOrigin, size: videoSize)
videoNode.updateLayout(size: videoSize, transition: .immediate)
if videoNode.supernode == nil {
self.contentContainer.addSubnode(videoNode)
}
currentVideoOrigin.x += videoSize.width + 4.0
}
let sideButtonMinimalInset: CGFloat = 16.0
let sideButtonOffset = min(36.0, floor((((size.width - 144.0) / 2.0) - sideButtonSize.width) / 2.0))
let sideButtonOrigin = max(sideButtonMinimalInset, floor((size.width - 144.0) / 2.0) - sideButtonOffset - sideButtonSize.width)

View File

@ -88,7 +88,7 @@ public func getCurrentGroupCall(account: Account, callId: Int64, accessHash: Int
loop: for participant in participants {
switch participant {
case let .groupCallParticipant(flags, userId, date, activeDate, source):
case let .groupCallParticipant(flags, userId, date, activeDate, source, params):
let peerId = PeerId(namespace: Namespaces.Peer.CloudUser, id: userId)
let ssrc = UInt32(bitPattern: source)
guard let peer = transaction.getPeer(peerId) else {
@ -99,9 +99,17 @@ public func getCurrentGroupCall(account: Account, callId: Int64, accessHash: Int
let canUnmute = (flags & (1 << 2)) != 0
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: canUnmute)
}
var jsonParams: String?
if let params = params {
switch params {
case let .dataJSON(data):
jsonParams = data
}
}
parsedParticipants.append(GroupCallParticipantsContext.Participant(
peer: peer,
ssrc: ssrc,
jsonParams: jsonParams,
joinTimestamp: date,
activityTimestamp: activeDate.flatMap(Double.init),
muteState: muteState
@ -223,7 +231,7 @@ public func getGroupCallParticipants(account: Account, callId: Int64, accessHash
loop: for participant in participants {
switch participant {
case let .groupCallParticipant(flags, userId, date, activeDate, source):
case let .groupCallParticipant(flags, userId, date, activeDate, source, params):
let peerId = PeerId(namespace: Namespaces.Peer.CloudUser, id: userId)
let ssrc = UInt32(bitPattern: source)
guard let peer = transaction.getPeer(peerId) else {
@ -234,9 +242,17 @@ public func getGroupCallParticipants(account: Account, callId: Int64, accessHash
let canUnmute = (flags & (1 << 2)) != 0
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: canUnmute)
}
var jsonParams: String?
if let params = params {
switch params {
case let .dataJSON(data):
jsonParams = data
}
}
parsedParticipants.append(GroupCallParticipantsContext.Participant(
peer: peer,
ssrc: ssrc,
jsonParams: jsonParams,
joinTimestamp: date,
activityTimestamp: activeDate.flatMap(Double.init),
muteState: muteState
@ -552,6 +568,7 @@ public final class GroupCallParticipantsContext {
public var peer: Peer
public var ssrc: UInt32
public var jsonParams: String?
public var joinTimestamp: Int32
public var activityTimestamp: Double?
public var muteState: MuteState?
@ -559,12 +576,14 @@ public final class GroupCallParticipantsContext {
public init(
peer: Peer,
ssrc: UInt32,
jsonParams: String?,
joinTimestamp: Int32,
activityTimestamp: Double?,
muteState: MuteState?
) {
self.peer = peer
self.ssrc = ssrc
self.jsonParams = jsonParams
self.joinTimestamp = joinTimestamp
self.activityTimestamp = activityTimestamp
self.muteState = muteState
@ -665,10 +684,29 @@ public final class GroupCallParticipantsContext {
public var peerId: PeerId
public var ssrc: UInt32
public var jsonParams: String?
public var joinTimestamp: Int32
public var activityTimestamp: Double?
public var muteState: Participant.MuteState?
public var participationStatusChange: ParticipationStatusChange
init(
peerId: PeerId,
ssrc: UInt32,
jsonParams: String?,
joinTimestamp: Int32,
activityTimestamp: Double?,
muteState: Participant.MuteState?,
participationStatusChange: ParticipationStatusChange
) {
self.peerId = peerId
self.ssrc = ssrc
self.jsonParams = jsonParams
self.joinTimestamp = joinTimestamp
self.activityTimestamp = activityTimestamp
self.muteState = muteState
self.participationStatusChange = participationStatusChange
}
}
public var participantUpdates: [ParticipantUpdate]
@ -1007,6 +1045,7 @@ public final class GroupCallParticipantsContext {
let participant = Participant(
peer: peer,
ssrc: participantUpdate.ssrc,
jsonParams: participantUpdate.jsonParams,
joinTimestamp: participantUpdate.joinTimestamp,
activityTimestamp: activityTimestamp,
muteState: participantUpdate.muteState
@ -1166,7 +1205,7 @@ public final class GroupCallParticipantsContext {
extension GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate {
init(_ apiParticipant: Api.GroupCallParticipant) {
switch apiParticipant {
case let .groupCallParticipant(flags, userId, date, activeDate, source):
case let .groupCallParticipant(flags, userId, date, activeDate, source, params):
let peerId = PeerId(namespace: Namespaces.Peer.CloudUser, id: userId)
let ssrc = UInt32(bitPattern: source)
var muteState: GroupCallParticipantsContext.Participant.MuteState?
@ -1186,9 +1225,18 @@ extension GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate {
participationStatusChange = .none
}
var jsonParams: String?
if let params = params {
switch params {
case let .dataJSON(data):
jsonParams = data
}
}
self.init(
peerId: peerId,
ssrc: ssrc,
jsonParams: jsonParams,
joinTimestamp: date,
activityTimestamp: activeDate.flatMap(Double.init),
muteState: muteState,
@ -1203,7 +1251,7 @@ extension GroupCallParticipantsContext.Update.StateUpdate {
var participantUpdates: [GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate] = []
for participant in participants {
switch participant {
case let .groupCallParticipant(flags, userId, date, activeDate, source):
case let .groupCallParticipant(flags, userId, date, activeDate, source, params):
let peerId = PeerId(namespace: Namespaces.Peer.CloudUser, id: userId)
let ssrc = UInt32(bitPattern: source)
var muteState: GroupCallParticipantsContext.Participant.MuteState?
@ -1223,9 +1271,18 @@ extension GroupCallParticipantsContext.Update.StateUpdate {
participationStatusChange = .none
}
var jsonParams: String?
if let params = params {
switch params {
case let .dataJSON(data):
jsonParams = data
}
}
participantUpdates.append(GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate(
peerId: peerId,
ssrc: ssrc,
jsonParams: jsonParams,
joinTimestamp: date,
activityTimestamp: activeDate.flatMap(Double.init),
muteState: muteState,

View File

@ -0,0 +1,159 @@
import Foundation
import SwiftSignalKit
import TgVoipWebrtc
import Network
private final class ContextQueueImpl: NSObject, OngoingCallThreadLocalContextQueueWebrtc {
private let queue: Queue
init(queue: Queue) {
self.queue = queue
super.init()
}
func dispatch(_ f: @escaping () -> Void) {
self.queue.async {
f()
}
}
func dispatch(after seconds: Double, block f: @escaping () -> Void) {
self.queue.after(seconds, f)
}
func isCurrent() -> Bool {
return self.queue.isCurrent()
}
}
@available(iOS 12.0, *)
public final class DebugOngoingGroupCallContext {
@available(iOS 12.0, *)
private final class Impl {
let queue: Queue
let context: GroupCallCustomThreadLocalContext
let connection: NWConnection
let address: NWEndpoint.Host
let port: NWEndpoint.Port
var pingTimer: SwiftSignalKit.Timer?
init(queue: Queue) {
self.queue = queue
var sendPacketImpl: ((Data) -> Void)?
self.context = GroupCallCustomThreadLocalContext(
queue: ContextQueueImpl(queue: queue),
sendPacket: { data in
sendPacketImpl?(data)
}
)
let codedAddress = IPv4Address("192.168.93.160")!
let codedPort = NWEndpoint.Port(rawValue: 9999)!
self.address = .ipv4(codedAddress)
self.port = codedPort
self.connection = NWConnection(host: self.address, port: self.port, using: .udp)
let queue = self.queue
sendPacketImpl = { [weak self] data in
queue.async {
guard let strongSelf = self else {
return
}
strongSelf.sendPacket(data: data)
}
}
connection.stateUpdateHandler = { [weak self] newState in
switch newState {
case .ready:
print("State: Ready")
queue.async {
guard let strongSelf = self else {
return
}
strongSelf.beginNetworking()
}
case .setup:
print("State: Setup")
case .cancelled:
print("State: Cancelled")
case .preparing:
print("State: Preparing")
default:
print("ERROR! State not defined!\n")
}
}
self.connection.start(queue: .global())
}
deinit {
self.pingTimer?.invalidate()
}
private func beginNetworking() {
self.receiveMessage()
let pingTimer = SwiftSignalKit.Timer(timeout: 1.0, repeat: true, completion: { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.sendPacket(data: Data(count: 1))
}, queue: self.queue)
self.pingTimer = pingTimer
pingTimer.start()
}
private func receiveMessage() {
let queue = self.queue
self.connection.receiveMessage { [weak self] data, context, _, error in
queue.async {
guard let strongSelf = self else {
return
}
if let error = error {
print("Receive error: \(error)")
} else if let data = data {
strongSelf.context.receivePacket(data)
}
strongSelf.receiveMessage()
}
}
}
private func sendPacket(data: Data) {
self.connection.send(content: data, completion: NWConnection.SendCompletion.contentProcessed { error in
if let error = error {
print("Send error: \(error)")
}
})
}
func stop() {
self.context.stop()
}
}
private let queue = Queue()
private let impl: QueueLocalObject<Impl>
public init() {
let queue = self.queue
self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue)
})
}
public func stop() {
self.impl.with { impl in
impl.stop()
}
}
}

View File

@ -48,12 +48,15 @@ public final class OngoingGroupCallContext {
let isMuted = ValuePromise<Bool>(true, ignoreRepeated: true)
let audioLevels = ValuePipe<[(AudioLevelKey, Float, Bool)]>()
init(queue: Queue, inputDeviceId: String, outputDeviceId: String) {
let videoSources = ValuePromise<Set<UInt32>>(Set(), ignoreRepeated: true)
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, video: OngoingCallVideoCapturer?) {
self.queue = queue
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
let videoSources = self.videoSources
self.context = GroupCallThreadLocalContext(
queue: ContextQueueImpl(queue: queue),
networkStateUpdated: { state in
@ -63,7 +66,11 @@ public final class OngoingGroupCallContext {
audioLevelsUpdatedImpl?(levels)
},
inputDeviceId: inputDeviceId,
outputDeviceId: outputDeviceId
outputDeviceId: outputDeviceId,
videoCapturer: video?.impl,
incomingVideoSourcesUpdated: { ssrcs in
videoSources.set(Set(ssrcs.map { $0.uint32Value }))
}
)
let queue = self.queue
@ -116,9 +123,10 @@ public final class OngoingGroupCallContext {
})
}
func setJoinResponse(payload: String, ssrcs: [UInt32]) {
self.context.setJoinResponsePayload(payload)
self.addSsrcs(ssrcs: ssrcs)
func setJoinResponse(payload: String, participants: [(UInt32, String?)]) {
self.context.setJoinResponsePayload(payload, participants: participants.map { participant -> OngoingGroupCallParticipantDescription in
return OngoingGroupCallParticipantDescription(audioSsrc: participant.0, jsonParams: participant.1)
})
}
func addSsrcs(ssrcs: [UInt32]) {
@ -137,6 +145,15 @@ public final class OngoingGroupCallContext {
self.context.setVolumeForSsrc(ssrc, volume: volume)
}
func addParticipants(participants: [(UInt32, String?)]) {
if participants.isEmpty {
return
}
self.context.addParticipants(participants.map { participant -> OngoingGroupCallParticipantDescription in
return OngoingGroupCallParticipantDescription(audioSsrc: participant.0, jsonParams: participant.1)
})
}
func stop() {
self.context.stop()
}
@ -149,9 +166,49 @@ public final class OngoingGroupCallContext {
func switchAudioInput(_ deviceId: String) {
self.context.switchAudioInput(deviceId)
}
func switchAudioOutput(_ deviceId: String) {
self.context.switchAudioOutput(deviceId)
}
func makeIncomingVideoView(source: UInt32, completion: @escaping (OngoingCallContextPresentationCallVideoView?) -> Void) {
self.context.makeIncomingVideoView(withSsrc: source, completion: { view in
if let view = view {
completion(OngoingCallContextPresentationCallVideoView(
view: view,
setOnFirstFrameReceived: { [weak view] f in
view?.setOnFirstFrameReceived(f)
},
getOrientation: { [weak view] in
if let view = view {
return OngoingCallVideoOrientation(view.orientation)
} else {
return .rotation0
}
},
getAspect: { [weak view] in
if let view = view {
return view.aspect
} else {
return 0.0
}
},
setOnOrientationUpdated: { [weak view] f in
view?.setOnOrientationUpdated { value, aspect in
f?(OngoingCallVideoOrientation(value), aspect)
}
},
setOnIsMirroredUpdated: { [weak view] f in
view?.setOnIsMirroredUpdated { value in
f?(value)
}
}
))
} else {
completion(nil)
}
})
}
}
private let queue = Queue()
@ -205,10 +262,22 @@ public final class OngoingGroupCallContext {
}
}
public init(inputDeviceId: String = "", outputDeviceId: String = "") {
public var videoSources: Signal<Set<UInt32>, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.impl.with { impl in
disposable.set(impl.videoSources.get().start(next: { value in
subscriber.putNext(value)
}))
}
return disposable
}
}
public init(inputDeviceId: String = "", outputDeviceId: String = "", video: OngoingCallVideoCapturer?) {
let queue = self.queue
self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId)
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, video: video)
})
}
@ -228,9 +297,9 @@ public final class OngoingGroupCallContext {
impl.switchAudioOutput(deviceId)
}
}
public func setJoinResponse(payload: String, ssrcs: [UInt32]) {
public func setJoinResponse(payload: String, participants: [(UInt32, String?)]) {
self.impl.with { impl in
impl.setJoinResponse(payload: payload, ssrcs: ssrcs)
impl.setJoinResponse(payload: payload, participants: participants)
}
}
@ -252,9 +321,21 @@ public final class OngoingGroupCallContext {
}
}
public func addParticipants(participants: [(UInt32, String?)]) {
self.impl.with { impl in
impl.addParticipants(participants: participants)
}
}
public func stop() {
self.impl.with { impl in
impl.stop()
}
}
public func makeIncomingVideoView(source: UInt32, completion: @escaping (OngoingCallContextPresentationCallVideoView?) -> Void) {
self.impl.with { impl in
impl.makeIncomingVideoView(source: source, completion: completion)
}
}
}

View File

@ -157,21 +157,42 @@ typedef NS_ENUM(int32_t, GroupCallNetworkState) {
GroupCallNetworkStateConnected
};
@interface OngoingGroupCallParticipantDescription : NSObject
@property (nonatomic, readonly) uint32_t audioSsrc;
@property (nonatomic, strong, readonly) NSString * _Nullable jsonParams;
- (instancetype _Nonnull)initWithAudioSsrc:(uint32_t)audioSsrc jsonParams:(NSString * _Nullable)jsonParams;
@end
@interface GroupCallThreadLocalContext : NSObject
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId;
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))incomingVideoSourcesUpdated;
- (void)stop;
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
- (void)setJoinResponsePayload:(NSString * _Nonnull)payload;
- (void)setJoinResponsePayload:(NSString * _Nonnull)payload participants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants;
- (void)removeSsrcs:(NSArray<NSNumber *> * _Nonnull)ssrcs;
- (void)addParticipants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants;
- (void)setIsMuted:(bool)isMuted;
- (void)setVolumeForSsrc:(uint32_t)ssrc volume:(double)volume;
- (void)switchAudioOutput:(NSString * _Nonnull)deviceId;
- (void)switchAudioInput:(NSString * _Nonnull)deviceId;
- (void)makeIncomingVideoViewWithSsrc:(uint32_t)ssrc completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion;
@end
@interface GroupCallCustomThreadLocalContext : NSObject
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue sendPacket:(void (^ _Nonnull)(NSData * _Nonnull))sendPacket;
- (void)stop;
- (void)receivePacket:(NSData * _Nonnull)data;
@end

View File

@ -22,6 +22,7 @@
#endif
#import "group/GroupInstanceImpl.h"
#import "group/GroupInstanceCustomImpl.h"
@implementation OngoingCallConnectionDescriptionWebrtc
@ -817,12 +818,13 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
@implementation GroupCallThreadLocalContext
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId {
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))incomingVideoSourcesUpdated {
self = [super init];
if (self != nil) {
_queue = queue;
_networkStateUpdated = [networkStateUpdated copy];
_videoCapturer = videoCapturer;
__weak GroupCallThreadLocalContext *weakSelf = self;
_instance.reset(new tgcalls::GroupInstanceImpl((tgcalls::GroupInstanceDescriptor){
@ -845,7 +847,15 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
audioLevelsUpdated(result);
},
.initialInputDeviceId = inputDeviceId.UTF8String,
.initialOutputDeviceId = outputDeviceId.UTF8String
.initialOutputDeviceId = outputDeviceId.UTF8String,
.videoCapture = [_videoCapturer getInterface],
.incomingVideoSourcesUpdated = [incomingVideoSourcesUpdated](std::vector<uint32_t> const &ssrcs) {
NSMutableArray<NSNumber *> *mappedSources = [[NSMutableArray alloc] init];
for (auto it : ssrcs) {
[mappedSources addObject:@(it)];
}
incomingVideoSourcesUpdated(mappedSources);
}
}));
}
return self;
@ -880,6 +890,79 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
dict[@"fingerprints"] = fingerprints;
NSMutableArray *parsedVideoSsrcGroups = [[NSMutableArray alloc] init];
NSMutableArray *parsedVideoSources = [[NSMutableArray alloc] init];
for (auto &group : payload.videoSourceGroups) {
NSMutableDictionary *parsedGroup = [[NSMutableDictionary alloc] init];
parsedGroup[@"semantics"] = [NSString stringWithUTF8String:group.semantics.c_str()];
NSMutableArray *sources = [[NSMutableArray alloc] init];
for (auto &source : group.ssrcs) {
[sources addObject:@(source)];
if (![parsedVideoSources containsObject:@(source)]) {
[parsedVideoSources addObject:@(source)];
}
}
parsedGroup[@"sources"] = sources;
[parsedVideoSsrcGroups addObject:parsedGroup];
}
if (parsedVideoSsrcGroups.count != 0) {
dict[@"ssrc-groups"] = parsedVideoSsrcGroups;
}
if (parsedVideoSources.count != 0) {
//dict[@"sources"] = parsedVideoSources;
}
NSMutableArray *videoPayloadTypes = [[NSMutableArray alloc] init];
for (auto &payloadType : payload.videoPayloadTypes) {
NSMutableDictionary *parsedType = [[NSMutableDictionary alloc] init];
parsedType[@"id"] = @(payloadType.id);
NSString *name = [NSString stringWithUTF8String:payloadType.name.c_str()];
parsedType[@"name"] = name;
parsedType[@"clockrate"] = @(payloadType.clockrate);
if (![name isEqualToString:@"rtx"]) {
parsedType[@"channels"] = @(payloadType.channels);
}
NSMutableDictionary *parsedParameters = [[NSMutableDictionary alloc] init];
for (auto &it : payloadType.parameters) {
NSString *key = [NSString stringWithUTF8String:it.first.c_str()];
NSString *value = [NSString stringWithUTF8String:it.second.c_str()];
parsedParameters[key] = value;
}
if (parsedParameters.count != 0) {
parsedType[@"parameters"] = parsedParameters;
}
if (![name isEqualToString:@"rtx"]) {
NSMutableArray *parsedFbs = [[NSMutableArray alloc] init];
for (auto &it : payloadType.feedbackTypes) {
NSMutableDictionary *parsedFb = [[NSMutableDictionary alloc] init];
parsedFb[@"type"] = [NSString stringWithUTF8String:it.type.c_str()];
if (it.subtype.size() != 0) {
parsedFb[@"subtype"] = [NSString stringWithUTF8String:it.subtype.c_str()];
}
[parsedFbs addObject:parsedFb];
}
parsedType[@"rtcp-fbs"] = parsedFbs;
}
[videoPayloadTypes addObject:parsedType];
}
if (videoPayloadTypes.count != 0) {
dict[@"payload-types"] = videoPayloadTypes;
}
NSMutableArray *parsedExtensions = [[NSMutableArray alloc] init];
for (auto &it : payload.videoExtensionMap) {
NSMutableDictionary *parsedExtension = [[NSMutableDictionary alloc] init];
parsedExtension[@"id"] = @(it.first);
parsedExtension[@"uri"] = [NSString stringWithUTF8String:it.second.c_str()];
[parsedExtensions addObject:parsedExtension];
}
if (parsedExtensions.count != 0) {
dict[@"rtp-hdrexts"] = parsedExtensions;
}
NSData *data = [NSJSONSerialization dataWithJSONObject:dict options:0 error:nil];
NSString *string = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
@ -888,7 +971,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)setJoinResponsePayload:(NSString * _Nonnull)payload {
- (void)setJoinResponsePayload:(NSString * _Nonnull)payload participants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants {
tgcalls::GroupJoinResponsePayload result;
NSData *payloadData = [payload dataUsingEncoding:NSUTF8StringEncoding];
@ -1031,8 +1114,19 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
result.candidates.push_back(candidate);
}
std::vector<tgcalls::GroupParticipantDescription> parsedParticipants;
for (OngoingGroupCallParticipantDescription *participant in participants) {
tgcalls::GroupParticipantDescription parsedParticipant;
parsedParticipant.audioSsrc = participant.audioSsrc;
if (participant.jsonParams.length != 0) {
[self parseJsonIntoParticipant:participant.jsonParams participant:parsedParticipant];
}
parsedParticipants.push_back(parsedParticipant);
}
if (_instance) {
_instance->setJoinResponsePayload(result);
_instance->setJoinResponsePayload(result, std::move(parsedParticipants));
}
}
@ -1046,6 +1140,148 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)parseJsonIntoParticipant:(NSString *)payload participant:(tgcalls::GroupParticipantDescription &)participant {
NSData *payloadData = [payload dataUsingEncoding:NSUTF8StringEncoding];
if (payloadData == nil) {
return;
}
NSDictionary *dict = [NSJSONSerialization JSONObjectWithData:payloadData options:0 error:nil];
if (![dict isKindOfClass:[NSDictionary class]]) {
return;
}
NSArray *ssrcGroups = dict[@"ssrc-groups"];
if ([ssrcGroups isKindOfClass:[NSArray class]]) {
for (NSDictionary *group in ssrcGroups) {
if (![group isKindOfClass:[NSDictionary class]]) {
continue;
}
NSString *semantics = group[@"semantics"];
if (![semantics isKindOfClass:[NSString class]]) {
continue;
}
NSArray *sources = group[@"sources"];
if (![sources isKindOfClass:[NSArray class]]) {
continue;
}
tgcalls::GroupJoinPayloadVideoSourceGroup groupDesc;
for (NSNumber *nSsrc in sources) {
if ([nSsrc isKindOfClass:[NSNumber class]]) {
groupDesc.ssrcs.push_back([nSsrc unsignedIntValue]);
}
}
groupDesc.semantics = [semantics UTF8String];
participant.videoSourceGroups.push_back(groupDesc);
}
}
NSArray *hdrExts = dict[@"rtp-hdrexts"];
if ([hdrExts isKindOfClass:[NSArray class]]) {
for (NSDictionary *extDict in hdrExts) {
if (![extDict isKindOfClass:[NSDictionary class]]) {
continue;
}
NSNumber *nId = extDict[@"id"];
if (![nId isKindOfClass:[NSNumber class]]) {
continue;
}
NSString *uri = extDict[@"uri"];
if (![uri isKindOfClass:[NSString class]]) {
continue;
}
participant.videoExtensionMap.push_back(std::make_pair((uint32_t)[nId unsignedIntValue], (std::string)[uri UTF8String]));
}
}
NSArray *payloadTypes = dict[@"payload-types"];
if ([payloadTypes isKindOfClass:[NSArray class]]) {
for (NSDictionary *payloadDict in payloadTypes) {
if (![payloadDict isKindOfClass:[NSDictionary class]]) {
continue;
}
NSNumber *nId = payloadDict[@"id"];
if (![nId isKindOfClass:[NSNumber class]]) {
continue;
}
NSNumber *nClockrate = payloadDict[@"clockrate"];
if (nClockrate != nil && ![nClockrate isKindOfClass:[NSNumber class]]) {
continue;
}
NSNumber *nChannels = payloadDict[@"channels"];
if (nChannels != nil && ![nChannels isKindOfClass:[NSNumber class]]) {
continue;
}
NSString *name = payloadDict[@"name"];
if (![name isKindOfClass:[NSString class]]) {
continue;
}
tgcalls::GroupJoinPayloadVideoPayloadType parsedPayload;
parsedPayload.id = [nId unsignedIntValue];
parsedPayload.clockrate = [nClockrate unsignedIntValue];
parsedPayload.channels = [nChannels unsignedIntValue];
parsedPayload.name = [name UTF8String];
NSArray *fbs = payloadDict[@"rtcp-fbs"];
if ([fbs isKindOfClass:[NSArray class]]) {
for (NSDictionary *fbDict in fbs) {
if (![fbDict isKindOfClass:[NSDictionary class]]) {
continue;
}
NSString *type = fbDict[@"type"];
if (![type isKindOfClass:[NSString class]]) {
continue;
}
NSString *subtype = fbDict[@"subtype"];
if (subtype != nil && ![subtype isKindOfClass:[NSString class]]) {
continue;
}
tgcalls::GroupJoinPayloadVideoPayloadFeedbackType parsedFeedback;
parsedFeedback.type = [type UTF8String];
if (subtype != nil) {
parsedFeedback.subtype = [subtype UTF8String];
}
parsedPayload.feedbackTypes.push_back(parsedFeedback);
}
}
NSDictionary *parameters = payloadDict[@"parameters"];
if ([parameters isKindOfClass:[NSDictionary class]]) {
for (NSString *nKey in parameters) {
if (![nKey isKindOfClass:[NSString class]]) {
continue;
}
NSString *value = parameters[nKey];
if (![value isKindOfClass:[NSString class]]) {
continue;
}
parsedPayload.parameters.push_back(std::make_pair((std::string)[nKey UTF8String], (std::string)[value UTF8String]));
}
}
participant.videoPayloadTypes.push_back(parsedPayload);
}
}
}
- (void)addParticipants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants {
if (_instance) {
std::vector<tgcalls::GroupParticipantDescription> parsedParticipants;
for (OngoingGroupCallParticipantDescription *participant in participants) {
tgcalls::GroupParticipantDescription parsedParticipant;
parsedParticipant.audioSsrc = participant.audioSsrc;
if (participant.jsonParams.length != 0) {
[self parseJsonIntoParticipant:participant.jsonParams participant:parsedParticipant];
}
parsedParticipants.push_back(parsedParticipant);
}
_instance->addParticipants(std::move(parsedParticipants));
}
}
- (void)setIsMuted:(bool)isMuted {
if (_instance) {
_instance->setIsMuted(isMuted);
@ -1069,5 +1305,98 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)makeIncomingVideoViewWithSsrc:(uint32_t)ssrc completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion {
if (_instance) {
__weak GroupCallThreadLocalContext *weakSelf = self;
id<OngoingCallThreadLocalContextQueueWebrtc> queue = _queue;
dispatch_async(dispatch_get_main_queue(), ^{
if ([VideoMetalView isSupported]) {
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
#if TARGET_OS_IPHONE
remoteRenderer.videoContentMode = UIViewContentModeScaleToFill;
#else
remoteRenderer.videoContentMode = UIViewContentModeScaleAspect;
#endif
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
[queue dispatch:^{
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
if (strongSelf && strongSelf->_instance) {
strongSelf->_instance->setIncomingVideoOutput(ssrc, sink);
}
}];
completion(remoteRenderer);
} else {
GLVideoView *remoteRenderer = [[GLVideoView alloc] initWithFrame:CGRectZero];
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
[queue dispatch:^{
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
if (strongSelf && strongSelf->_instance) {
strongSelf->_instance->setIncomingVideoOutput(ssrc, sink);
}
}];
completion(remoteRenderer);
}
});
}
}
@end
@interface GroupCallCustomThreadLocalContext () {
std::unique_ptr<tgcalls::GroupInstanceCustomImpl> _impl;
}
@end
@implementation GroupCallCustomThreadLocalContext
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue sendPacket:(void (^ _Nonnull)(NSData * _Nonnull))sendPacket {
self = [super init];
if (self != nil) {
tgcalls::GroupInstanceCustomDescriptor descriptor;
descriptor.sendPacket = [sendPacket](std::vector<uint8_t> const &data) {
if (sendPacket) {
sendPacket([[NSData alloc] initWithBytes:data.data() length:data.size()]);
}
};
_impl.reset(new tgcalls::GroupInstanceCustomImpl(std::move(descriptor)));
}
return self;
}
- (void)stop {
if (_impl) {
_impl->stop();
}
}
- (void)receivePacket:(NSData * _Nonnull)data {
if (_impl) {
std::vector<uint8_t> mappedData;
mappedData.resize(data.length);
memcpy(mappedData.data(), data.bytes, data.length);
_impl->receivePacket(std::move(mappedData));
}
}
@end
@implementation OngoingGroupCallParticipantDescription
- (instancetype _Nonnull)initWithAudioSsrc:(uint32_t)audioSsrc jsonParams:(NSString * _Nullable)jsonParams {
self = [super init];
if (self != nil) {
_audioSsrc = audioSsrc;
_jsonParams = jsonParams;
}
return self;
}
@end

1
third-party/webrtc/webrtc-ios vendored Submodule

@ -0,0 +1 @@
Subproject commit eb9d68429a5805f904e9b7605f3900cef6453333