This commit is contained in:
Ali 2020-12-25 19:01:39 +04:00
parent 4a6f9c05c5
commit 25e31bba43
13 changed files with 696 additions and 34 deletions

View File

@ -96,6 +96,7 @@ public final class PresentationCallVideoView {
case rotation270
}
public let holder: AnyObject
public let view: UIView
public let setOnFirstFrameReceived: (((Float) -> Void)?) -> Void
@ -105,6 +106,7 @@ public final class PresentationCallVideoView {
public let setOnIsMirroredUpdated: (((Bool) -> Void)?) -> Void
public init(
holder: AnyObject,
view: UIView,
setOnFirstFrameReceived: @escaping (((Float) -> Void)?) -> Void,
getOrientation: @escaping () -> Orientation,
@ -112,6 +114,7 @@ public final class PresentationCallVideoView {
setOnOrientationUpdated: @escaping (((Orientation, CGFloat) -> Void)?) -> Void,
setOnIsMirroredUpdated: @escaping (((Bool) -> Void)?) -> Void
) {
self.holder = holder
self.view = view
self.setOnFirstFrameReceived = setOnFirstFrameReceived
self.getOrientation = getOrientation
@ -301,6 +304,10 @@ public protocol PresentationGroupCall: class {
func invitePeer(_ peerId: PeerId) -> Bool
func removedPeer(_ peerId: PeerId)
var invitedPeers: Signal<[PeerId], NoError> { get }
var incomingVideoSources: Signal<Set<UInt32>, NoError> { get }
func makeIncomingVideoView(source: UInt32, completion: @escaping (PresentationCallVideoView?) -> Void)
}
public protocol PresentationCallManager: class {

View File

@ -139,7 +139,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[1511503333] = { return Api.InputEncryptedFile.parse_inputEncryptedFile($0) }
dict[767652808] = { return Api.InputEncryptedFile.parse_inputEncryptedFileBigUploaded($0) }
dict[-1456996667] = { return Api.messages.InactiveChats.parse_inactiveChats($0) }
dict[1454409673] = { return Api.GroupCallParticipant.parse_groupCallParticipant($0) }
dict[-461437776] = { return Api.GroupCallParticipant.parse_groupCallParticipant($0) }
dict[1443858741] = { return Api.messages.SentEncryptedMessage.parse_sentEncryptedMessage($0) }
dict[-1802240206] = { return Api.messages.SentEncryptedMessage.parse_sentEncryptedFile($0) }
dict[1571494644] = { return Api.ExportedMessageLink.parse_exportedMessageLink($0) }

View File

@ -5438,27 +5438,28 @@ public extension Api {
}
public enum GroupCallParticipant: TypeConstructorDescription {
case groupCallParticipant(flags: Int32, userId: Int32, date: Int32, activeDate: Int32?, source: Int32)
case groupCallParticipant(flags: Int32, userId: Int32, date: Int32, activeDate: Int32?, source: Int32, params: Api.DataJSON?)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
case .groupCallParticipant(let flags, let userId, let date, let activeDate, let source):
case .groupCallParticipant(let flags, let userId, let date, let activeDate, let source, let params):
if boxed {
buffer.appendInt32(1454409673)
buffer.appendInt32(-461437776)
}
serializeInt32(flags, buffer: buffer, boxed: false)
serializeInt32(userId, buffer: buffer, boxed: false)
serializeInt32(date, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 3) != 0 {serializeInt32(activeDate!, buffer: buffer, boxed: false)}
serializeInt32(source, buffer: buffer, boxed: false)
if Int(flags) & Int(1 << 6) != 0 {params!.serialize(buffer, true)}
break
}
}
public func descriptionFields() -> (String, [(String, Any)]) {
switch self {
case .groupCallParticipant(let flags, let userId, let date, let activeDate, let source):
return ("groupCallParticipant", [("flags", flags), ("userId", userId), ("date", date), ("activeDate", activeDate), ("source", source)])
case .groupCallParticipant(let flags, let userId, let date, let activeDate, let source, let params):
return ("groupCallParticipant", [("flags", flags), ("userId", userId), ("date", date), ("activeDate", activeDate), ("source", source), ("params", params)])
}
}
@ -5473,13 +5474,18 @@ public extension Api {
if Int(_1!) & Int(1 << 3) != 0 {_4 = reader.readInt32() }
var _5: Int32?
_5 = reader.readInt32()
var _6: Api.DataJSON?
if Int(_1!) & Int(1 << 6) != 0 {if let signature = reader.readInt32() {
_6 = Api.parse(reader, signature: signature) as? Api.DataJSON
} }
let _c1 = _1 != nil
let _c2 = _2 != nil
let _c3 = _3 != nil
let _c4 = (Int(_1!) & Int(1 << 3) == 0) || _4 != nil
let _c5 = _5 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 {
return Api.GroupCallParticipant.groupCallParticipant(flags: _1!, userId: _2!, date: _3!, activeDate: _4, source: _5!)
let _c6 = (Int(_1!) & Int(1 << 6) == 0) || _6 != nil
if _c1 && _c2 && _c3 && _c4 && _c5 && _c6 {
return Api.GroupCallParticipant.groupCallParticipant(flags: _1!, userId: _2!, date: _3!, activeDate: _4, source: _5!, params: _6)
}
else {
return nil

View File

@ -928,6 +928,7 @@ public final class PresentationCallImpl: PresentationCall {
let setOnOrientationUpdated = view.setOnOrientationUpdated
let setOnIsMirroredUpdated = view.setOnIsMirroredUpdated
completion(PresentationCallVideoView(
holder: view,
view: view.view,
setOnFirstFrameReceived: { f in
setOnFirstFrameReceived(f)
@ -997,6 +998,7 @@ public final class PresentationCallImpl: PresentationCall {
let setOnOrientationUpdated = view.setOnOrientationUpdated
let setOnIsMirroredUpdated = view.setOnIsMirroredUpdated
completion(PresentationCallVideoView(
holder: view,
view: view.view,
setOnFirstFrameReceived: { f in
setOnFirstFrameReceived(f)

View File

@ -454,6 +454,13 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private var didConnectOnce: Bool = false
private var toneRenderer: PresentationCallToneRenderer?
private var videoCapturer: OngoingCallVideoCapturer?
private let incomingVideoSourcePromise = Promise<Set<UInt32>>(Set())
public var incomingVideoSources: Signal<Set<UInt32>, NoError> {
return self.incomingVideoSourcePromise.get()
}
init(
accountContext: AccountContext,
audioSession: ManagedAudioSession,
@ -477,6 +484,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.temporaryJoinTimestamp = Int32(CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970)
self.videoCapturer = OngoingCallVideoCapturer()
var didReceiveAudioOutputs = false
if !audioSession.getIsHeadsetPluggedIn() {
@ -579,6 +588,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
return
}
if case let .estabilished(callInfo, _, _, _) = strongSelf.internalState {
var addedParticipants: [(UInt32, String?)] = []
var removedSsrc: [UInt32] = []
for (callId, update) in updates {
if callId == callInfo.id {
@ -598,6 +608,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
strongSelf._canBeRemoved.set(.single(true))
}
} else if case .joined = participantUpdate.participationStatusChange {
addedParticipants.append((participantUpdate.ssrc, participantUpdate.jsonParams))
}
}
case let .call(isTerminated, _):
@ -610,6 +621,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
if !removedSsrc.isEmpty {
strongSelf.callContext?.removeSsrcs(ssrcs: removedSsrc)
}
if !addedParticipants.isEmpty {
strongSelf.callContext?.addParticipants(participants: addedParticipants)
}
}
})
@ -667,6 +681,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
participants.append(GroupCallParticipantsContext.Participant(
peer: accountPeer,
ssrc: 0,
jsonParams: nil,
joinTimestamp: strongSelf.temporaryJoinTimestamp,
activityTimestamp: nil,
muteState: GroupCallParticipantsContext.Participant.MuteState(canUnmute: true)
@ -785,7 +800,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
break
default:
if case let .active(callInfo) = internalState {
let callContext = OngoingGroupCallContext()
let callContext = OngoingGroupCallContext(video: self.videoCapturer)
self.incomingVideoSourcePromise.set(callContext.videoSources)
self.callContext = callContext
self.requestDisposable.set((callContext.joinPayload
|> take(1)
@ -929,12 +945,14 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
}
self.ssrcMapping.removeAll()
var ssrcs: [UInt32] = []
var addedParticipants: [(UInt32, String?)] = []
for participant in initialState.participants {
self.ssrcMapping[participant.ssrc] = participant.peer.id
ssrcs.append(participant.ssrc)
if participant.peer.id != self.accountContext.account.peerId {
addedParticipants.append((participant.ssrc, participant.jsonParams))
}
}
self.callContext?.setJoinResponse(payload: clientParams, ssrcs: ssrcs)
self.callContext?.setJoinResponse(payload: clientParams, participants: addedParticipants)
let accountContext = self.accountContext
let peerId = self.peerId
@ -1453,4 +1471,69 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
public func updateDefaultParticipantsAreMuted(isMuted: Bool) {
self.participantsContext?.updateDefaultParticipantsAreMuted(isMuted: isMuted)
}
public func makeIncomingVideoView(source: UInt32, completion: @escaping (PresentationCallVideoView?) -> Void) {
self.callContext?.makeIncomingVideoView(source: source, completion: { view in
if let view = view {
let setOnFirstFrameReceived = view.setOnFirstFrameReceived
let setOnOrientationUpdated = view.setOnOrientationUpdated
let setOnIsMirroredUpdated = view.setOnIsMirroredUpdated
completion(PresentationCallVideoView(
holder: view,
view: view.view,
setOnFirstFrameReceived: { f in
setOnFirstFrameReceived(f)
},
getOrientation: { [weak view] in
if let view = view {
let mappedValue: PresentationCallVideoView.Orientation
switch view.getOrientation() {
case .rotation0:
mappedValue = .rotation0
case .rotation90:
mappedValue = .rotation90
case .rotation180:
mappedValue = .rotation180
case .rotation270:
mappedValue = .rotation270
}
return mappedValue
} else {
return .rotation0
}
},
getAspect: { [weak view] in
if let view = view {
return view.getAspect()
} else {
return 0.0
}
},
setOnOrientationUpdated: { f in
setOnOrientationUpdated { value, aspect in
let mappedValue: PresentationCallVideoView.Orientation
switch value {
case .rotation0:
mappedValue = .rotation0
case .rotation90:
mappedValue = .rotation90
case .rotation180:
mappedValue = .rotation180
case .rotation270:
mappedValue = .rotation270
}
f?(mappedValue, aspect)
}
},
setOnIsMirroredUpdated: { f in
setOnIsMirroredUpdated { value in
f?(value)
}
}
))
} else {
completion(nil)
}
})
}
}

View File

@ -118,6 +118,86 @@ private final class VoiceChatControllerTitleNode: ASDisplayNode {
}
}
final class GroupVideoNode: ASDisplayNode {
private let videoView: PresentationCallVideoView
private var validLayout: CGSize?
init(videoView: PresentationCallVideoView) {
self.videoView = videoView
super.init()
self.backgroundColor = .black
self.clipsToBounds = true
self.cornerRadius = 8.0
self.view.addSubview(self.videoView.view)
videoView.setOnFirstFrameReceived({ [weak self] _ in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
if let size = strongSelf.validLayout {
strongSelf.updateLayout(size: size, transition: .immediate)
}
}
})
videoView.setOnOrientationUpdated({ [weak self] _, _ in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
if let size = strongSelf.validLayout {
strongSelf.updateLayout(size: size, transition: .immediate)
}
}
})
}
func updateLayout(size: CGSize, transition: ContainedViewLayoutTransition) {
self.validLayout = size
let orientation = self.videoView.getOrientation()
var aspect = self.videoView.getAspect()
if aspect <= 0.01 {
aspect = 3.0 / 4.0
}
let rotatedAspect: CGFloat
let angle: CGFloat
switch orientation {
case .rotation0:
angle = 0.0
rotatedAspect = aspect
case .rotation90:
angle = CGFloat.pi / 2.0
rotatedAspect = 1 / aspect
case .rotation180:
angle = CGFloat.pi
rotatedAspect = aspect
case .rotation270:
angle = CGFloat.pi * 3.0 / 2.0
rotatedAspect = 1 / aspect
}
let rotatedVideoSize = CGSize(width: 100.0, height: rotatedAspect * 100.0).aspectFilled(size)
var rotatedVideoFrame = CGRect(origin: CGPoint(x: floor((size.width - rotatedVideoSize.width) / 2.0), y: floor((size.height - rotatedVideoSize.height) / 2.0)), size: rotatedVideoSize)
rotatedVideoFrame.origin.x = floor(rotatedVideoFrame.origin.x)
rotatedVideoFrame.origin.y = floor(rotatedVideoFrame.origin.y)
rotatedVideoFrame.size.width = ceil(rotatedVideoFrame.size.width)
rotatedVideoFrame.size.height = ceil(rotatedVideoFrame.size.height)
rotatedVideoFrame = rotatedVideoFrame.insetBy(dx: -1.0, dy: -1.0)
self.videoView.view.center = rotatedVideoFrame.center
self.videoView.view.bounds = CGRect(origin: CGPoint(), size: rotatedVideoFrame.size)
let transition: ContainedViewLayoutTransition = .immediate
transition.updateTransformRotation(view: self.videoView.view, angle: angle)
}
}
public final class VoiceChatController: ViewController {
private final class Node: ViewControllerTracingNode, UIGestureRecognizerDelegate {
private struct ListTransition {
@ -452,6 +532,10 @@ public final class VoiceChatController: ViewController {
private let inviteDisposable = MetaDisposable()
private let memberEventsDisposable = MetaDisposable()
private let voiceSourcesDisposable = MetaDisposable()
private var requestedVideoSources = Set<UInt32>()
private var videoNodes: [GroupVideoNode] = []
init(controller: VoiceChatController, sharedContext: SharedAccountContext, call: PresentationGroupCall) {
self.controller = controller
@ -1182,6 +1266,29 @@ public final class VoiceChatController: ViewController {
strongSelf.presentUndoOverlay(content: .invitedToVoiceChat(context: strongSelf.context, peer: event.peer, text: strongSelf.presentationData.strings.VoiceChat_PeerJoinedText(event.peer.displayTitle(strings: strongSelf.presentationData.strings, displayOrder: strongSelf.presentationData.nameDisplayOrder)).0), action: { _ in return false })
}
}))
self.voiceSourcesDisposable.set((self.call.incomingVideoSources
|> deliverOnMainQueue).start(next: { [weak self] sources in
guard let strongSelf = self else {
return
}
for source in sources {
if !strongSelf.requestedVideoSources.contains(source) {
strongSelf.requestedVideoSources.insert(source)
strongSelf.call.makeIncomingVideoView(source: source, completion: { videoView in
Queue.mainQueue().async {
guard let strongSelf = self, let videoView = videoView else {
return
}
strongSelf.videoNodes.append(GroupVideoNode(videoView: videoView))
if let (layout, navigationHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationHeight: navigationHeight, transition: .immediate)
}
}
})
}
}
}))
}
deinit {
@ -1195,6 +1302,8 @@ public final class VoiceChatController: ViewController {
self.audioLevelsDisposable?.dispose()
self.myAudioLevelDisposable?.dispose()
self.inviteDisposable.dispose()
self.memberEventsDisposable.dispose()
self.voiceSourcesDisposable.dispose()
}
override func didLoad() {
@ -1782,6 +1891,23 @@ public final class VoiceChatController: ViewController {
self.updateButtons(transition: transition)
var currentVideoOrigin = CGPoint(x: 4.0, y: (layout.statusBarHeight ?? 0.0) + 4.0)
for videoNode in self.videoNodes {
let videoSize = CGSize(width: 100.0, height: 100.0)
if currentVideoOrigin.x + videoSize.width > layout.size.width {
currentVideoOrigin.x = 0.0
currentVideoOrigin.y += videoSize.height
}
videoNode.frame = CGRect(origin: currentVideoOrigin, size: videoSize)
videoNode.updateLayout(size: videoSize, transition: .immediate)
if videoNode.supernode == nil {
self.contentContainer.addSubnode(videoNode)
}
currentVideoOrigin.x += videoSize.width + 4.0
}
let sideButtonMinimalInset: CGFloat = 16.0
let sideButtonOffset = min(36.0, floor((((size.width - 144.0) / 2.0) - sideButtonSize.width) / 2.0))
let sideButtonOrigin = max(sideButtonMinimalInset, floor((size.width - 144.0) / 2.0) - sideButtonOffset - sideButtonSize.width)

View File

@ -88,7 +88,7 @@ public func getCurrentGroupCall(account: Account, callId: Int64, accessHash: Int
loop: for participant in participants {
switch participant {
case let .groupCallParticipant(flags, userId, date, activeDate, source):
case let .groupCallParticipant(flags, userId, date, activeDate, source, params):
let peerId = PeerId(namespace: Namespaces.Peer.CloudUser, id: userId)
let ssrc = UInt32(bitPattern: source)
guard let peer = transaction.getPeer(peerId) else {
@ -99,9 +99,17 @@ public func getCurrentGroupCall(account: Account, callId: Int64, accessHash: Int
let canUnmute = (flags & (1 << 2)) != 0
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: canUnmute)
}
var jsonParams: String?
if let params = params {
switch params {
case let .dataJSON(data):
jsonParams = data
}
}
parsedParticipants.append(GroupCallParticipantsContext.Participant(
peer: peer,
ssrc: ssrc,
jsonParams: jsonParams,
joinTimestamp: date,
activityTimestamp: activeDate.flatMap(Double.init),
muteState: muteState
@ -223,7 +231,7 @@ public func getGroupCallParticipants(account: Account, callId: Int64, accessHash
loop: for participant in participants {
switch participant {
case let .groupCallParticipant(flags, userId, date, activeDate, source):
case let .groupCallParticipant(flags, userId, date, activeDate, source, params):
let peerId = PeerId(namespace: Namespaces.Peer.CloudUser, id: userId)
let ssrc = UInt32(bitPattern: source)
guard let peer = transaction.getPeer(peerId) else {
@ -234,9 +242,17 @@ public func getGroupCallParticipants(account: Account, callId: Int64, accessHash
let canUnmute = (flags & (1 << 2)) != 0
muteState = GroupCallParticipantsContext.Participant.MuteState(canUnmute: canUnmute)
}
var jsonParams: String?
if let params = params {
switch params {
case let .dataJSON(data):
jsonParams = data
}
}
parsedParticipants.append(GroupCallParticipantsContext.Participant(
peer: peer,
ssrc: ssrc,
jsonParams: jsonParams,
joinTimestamp: date,
activityTimestamp: activeDate.flatMap(Double.init),
muteState: muteState
@ -552,6 +568,7 @@ public final class GroupCallParticipantsContext {
public var peer: Peer
public var ssrc: UInt32
public var jsonParams: String?
public var joinTimestamp: Int32
public var activityTimestamp: Double?
public var muteState: MuteState?
@ -559,12 +576,14 @@ public final class GroupCallParticipantsContext {
public init(
peer: Peer,
ssrc: UInt32,
jsonParams: String?,
joinTimestamp: Int32,
activityTimestamp: Double?,
muteState: MuteState?
) {
self.peer = peer
self.ssrc = ssrc
self.jsonParams = jsonParams
self.joinTimestamp = joinTimestamp
self.activityTimestamp = activityTimestamp
self.muteState = muteState
@ -665,10 +684,29 @@ public final class GroupCallParticipantsContext {
public var peerId: PeerId
public var ssrc: UInt32
public var jsonParams: String?
public var joinTimestamp: Int32
public var activityTimestamp: Double?
public var muteState: Participant.MuteState?
public var participationStatusChange: ParticipationStatusChange
init(
peerId: PeerId,
ssrc: UInt32,
jsonParams: String?,
joinTimestamp: Int32,
activityTimestamp: Double?,
muteState: Participant.MuteState?,
participationStatusChange: ParticipationStatusChange
) {
self.peerId = peerId
self.ssrc = ssrc
self.jsonParams = jsonParams
self.joinTimestamp = joinTimestamp
self.activityTimestamp = activityTimestamp
self.muteState = muteState
self.participationStatusChange = participationStatusChange
}
}
public var participantUpdates: [ParticipantUpdate]
@ -1007,6 +1045,7 @@ public final class GroupCallParticipantsContext {
let participant = Participant(
peer: peer,
ssrc: participantUpdate.ssrc,
jsonParams: participantUpdate.jsonParams,
joinTimestamp: participantUpdate.joinTimestamp,
activityTimestamp: activityTimestamp,
muteState: participantUpdate.muteState
@ -1166,7 +1205,7 @@ public final class GroupCallParticipantsContext {
extension GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate {
init(_ apiParticipant: Api.GroupCallParticipant) {
switch apiParticipant {
case let .groupCallParticipant(flags, userId, date, activeDate, source):
case let .groupCallParticipant(flags, userId, date, activeDate, source, params):
let peerId = PeerId(namespace: Namespaces.Peer.CloudUser, id: userId)
let ssrc = UInt32(bitPattern: source)
var muteState: GroupCallParticipantsContext.Participant.MuteState?
@ -1186,9 +1225,18 @@ extension GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate {
participationStatusChange = .none
}
var jsonParams: String?
if let params = params {
switch params {
case let .dataJSON(data):
jsonParams = data
}
}
self.init(
peerId: peerId,
ssrc: ssrc,
jsonParams: jsonParams,
joinTimestamp: date,
activityTimestamp: activeDate.flatMap(Double.init),
muteState: muteState,
@ -1203,7 +1251,7 @@ extension GroupCallParticipantsContext.Update.StateUpdate {
var participantUpdates: [GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate] = []
for participant in participants {
switch participant {
case let .groupCallParticipant(flags, userId, date, activeDate, source):
case let .groupCallParticipant(flags, userId, date, activeDate, source, params):
let peerId = PeerId(namespace: Namespaces.Peer.CloudUser, id: userId)
let ssrc = UInt32(bitPattern: source)
var muteState: GroupCallParticipantsContext.Participant.MuteState?
@ -1223,9 +1271,18 @@ extension GroupCallParticipantsContext.Update.StateUpdate {
participationStatusChange = .none
}
var jsonParams: String?
if let params = params {
switch params {
case let .dataJSON(data):
jsonParams = data
}
}
participantUpdates.append(GroupCallParticipantsContext.Update.StateUpdate.ParticipantUpdate(
peerId: peerId,
ssrc: ssrc,
jsonParams: jsonParams,
joinTimestamp: date,
activityTimestamp: activeDate.flatMap(Double.init),
muteState: muteState,

View File

@ -210,7 +210,7 @@ public class BoxedMessage: NSObject {
public class Serialization: NSObject, MTSerialization {
public func currentLayer() -> UInt {
return 122
return 123
}
public func parseMessage(_ data: Data!) -> Any! {

View File

@ -48,12 +48,15 @@ public final class OngoingGroupCallContext {
let isMuted = ValuePromise<Bool>(true, ignoreRepeated: true)
let audioLevels = ValuePipe<[(AudioLevelKey, Float, Bool)]>()
init(queue: Queue, inputDeviceId: String, outputDeviceId: String) {
let videoSources = ValuePromise<Set<UInt32>>(Set(), ignoreRepeated: true)
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, video: OngoingCallVideoCapturer?) {
self.queue = queue
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
var audioLevelsUpdatedImpl: (([NSNumber]) -> Void)?
let videoSources = self.videoSources
self.context = GroupCallThreadLocalContext(
queue: ContextQueueImpl(queue: queue),
networkStateUpdated: { state in
@ -63,7 +66,11 @@ public final class OngoingGroupCallContext {
audioLevelsUpdatedImpl?(levels)
},
inputDeviceId: inputDeviceId,
outputDeviceId: outputDeviceId
outputDeviceId: outputDeviceId,
videoCapturer: video?.impl,
incomingVideoSourcesUpdated: { ssrcs in
videoSources.set(Set(ssrcs.map { $0.uint32Value }))
}
)
let queue = self.queue
@ -116,9 +123,10 @@ public final class OngoingGroupCallContext {
})
}
func setJoinResponse(payload: String, ssrcs: [UInt32]) {
self.context.setJoinResponsePayload(payload)
self.addSsrcs(ssrcs: ssrcs)
func setJoinResponse(payload: String, participants: [(UInt32, String?)]) {
self.context.setJoinResponsePayload(payload, participants: participants.map { participant -> OngoingGroupCallParticipantDescription in
return OngoingGroupCallParticipantDescription(audioSsrc: participant.0, jsonParams: participant.1)
})
}
func addSsrcs(ssrcs: [UInt32]) {
@ -133,6 +141,15 @@ public final class OngoingGroupCallContext {
})
}
func addParticipants(participants: [(UInt32, String?)]) {
if participants.isEmpty {
return
}
self.context.addParticipants(participants.map { participant -> OngoingGroupCallParticipantDescription in
return OngoingGroupCallParticipantDescription(audioSsrc: participant.0, jsonParams: participant.1)
})
}
func stop() {
self.context.stop()
}
@ -145,9 +162,49 @@ public final class OngoingGroupCallContext {
func switchAudioInput(_ deviceId: String) {
self.context.switchAudioInput(deviceId)
}
func switchAudioOutput(_ deviceId: String) {
self.context.switchAudioOutput(deviceId)
}
func makeIncomingVideoView(source: UInt32, completion: @escaping (OngoingCallContextPresentationCallVideoView?) -> Void) {
self.context.makeIncomingVideoView(withSsrc: source, completion: { view in
if let view = view {
completion(OngoingCallContextPresentationCallVideoView(
view: view,
setOnFirstFrameReceived: { [weak view] f in
view?.setOnFirstFrameReceived(f)
},
getOrientation: { [weak view] in
if let view = view {
return OngoingCallVideoOrientation(view.orientation)
} else {
return .rotation0
}
},
getAspect: { [weak view] in
if let view = view {
return view.aspect
} else {
return 0.0
}
},
setOnOrientationUpdated: { [weak view] f in
view?.setOnOrientationUpdated { value, aspect in
f?(OngoingCallVideoOrientation(value), aspect)
}
},
setOnIsMirroredUpdated: { [weak view] f in
view?.setOnIsMirroredUpdated { value in
f?(value)
}
}
))
} else {
completion(nil)
}
})
}
}
private let queue = Queue()
@ -201,10 +258,22 @@ public final class OngoingGroupCallContext {
}
}
public init(inputDeviceId: String = "", outputDeviceId: String = "") {
public var videoSources: Signal<Set<UInt32>, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.impl.with { impl in
disposable.set(impl.videoSources.get().start(next: { value in
subscriber.putNext(value)
}))
}
return disposable
}
}
public init(inputDeviceId: String = "", outputDeviceId: String = "", video: OngoingCallVideoCapturer?) {
let queue = self.queue
self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId)
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, video: video)
})
}
@ -224,9 +293,9 @@ public final class OngoingGroupCallContext {
impl.switchAudioOutput(deviceId)
}
}
public func setJoinResponse(payload: String, ssrcs: [UInt32]) {
public func setJoinResponse(payload: String, participants: [(UInt32, String?)]) {
self.impl.with { impl in
impl.setJoinResponse(payload: payload, ssrcs: ssrcs)
impl.setJoinResponse(payload: payload, participants: participants)
}
}
@ -242,9 +311,21 @@ public final class OngoingGroupCallContext {
}
}
public func addParticipants(participants: [(UInt32, String?)]) {
self.impl.with { impl in
impl.addParticipants(participants: participants)
}
}
public func stop() {
self.impl.with { impl in
impl.stop()
}
}
public func makeIncomingVideoView(source: UInt32, completion: @escaping (OngoingCallContextPresentationCallVideoView?) -> Void) {
self.impl.with { impl in
impl.makeIncomingVideoView(source: source, completion: completion)
}
}
}

View File

@ -157,19 +157,30 @@ typedef NS_ENUM(int32_t, GroupCallNetworkState) {
GroupCallNetworkStateConnected
};
@interface OngoingGroupCallParticipantDescription : NSObject
@property (nonatomic, readonly) uint32_t audioSsrc;
@property (nonatomic, strong, readonly) NSString * _Nullable jsonParams;
- (instancetype _Nonnull)initWithAudioSsrc:(uint32_t)audioSsrc jsonParams:(NSString * _Nullable)jsonParams;
@end
@interface GroupCallThreadLocalContext : NSObject
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId;
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))incomingVideoSourcesUpdated;
- (void)stop;
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;
- (void)setJoinResponsePayload:(NSString * _Nonnull)payload;
- (void)setJoinResponsePayload:(NSString * _Nonnull)payload participants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants;
- (void)removeSsrcs:(NSArray<NSNumber *> * _Nonnull)ssrcs;
- (void)addParticipants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants;
- (void)setIsMuted:(bool)isMuted;
- (void)switchAudioOutput:(NSString * _Nonnull)deviceId;
- (void)switchAudioInput:(NSString * _Nonnull)deviceId;
- (void)makeIncomingVideoViewWithSsrc:(uint32_t)ssrc completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion;
@end

View File

@ -818,12 +818,13 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
@implementation GroupCallThreadLocalContext
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId {
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue networkStateUpdated:(void (^ _Nonnull)(GroupCallNetworkState))networkStateUpdated audioLevelsUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))audioLevelsUpdated inputDeviceId:(NSString * _Nonnull)inputDeviceId outputDeviceId:(NSString * _Nonnull)outputDeviceId videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer incomingVideoSourcesUpdated:(void (^ _Nonnull)(NSArray<NSNumber *> * _Nonnull))incomingVideoSourcesUpdated {
self = [super init];
if (self != nil) {
_queue = queue;
_networkStateUpdated = [networkStateUpdated copy];
_videoCapturer = videoCapturer;
__weak GroupCallThreadLocalContext *weakSelf = self;
_instance.reset(new tgcalls::GroupInstanceImpl((tgcalls::GroupInstanceDescriptor){
@ -846,7 +847,15 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
audioLevelsUpdated(result);
},
.initialInputDeviceId = inputDeviceId.UTF8String,
.initialOutputDeviceId = outputDeviceId.UTF8String
.initialOutputDeviceId = outputDeviceId.UTF8String,
.videoCapture = [_videoCapturer getInterface],
.incomingVideoSourcesUpdated = [incomingVideoSourcesUpdated](std::vector<uint32_t> const &ssrcs) {
NSMutableArray<NSNumber *> *mappedSources = [[NSMutableArray alloc] init];
for (auto it : ssrcs) {
[mappedSources addObject:@(it)];
}
incomingVideoSourcesUpdated(mappedSources);
}
}));
}
return self;
@ -881,6 +890,79 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
dict[@"fingerprints"] = fingerprints;
NSMutableArray *parsedVideoSsrcGroups = [[NSMutableArray alloc] init];
NSMutableArray *parsedVideoSources = [[NSMutableArray alloc] init];
for (auto &group : payload.videoSourceGroups) {
NSMutableDictionary *parsedGroup = [[NSMutableDictionary alloc] init];
parsedGroup[@"semantics"] = [NSString stringWithUTF8String:group.semantics.c_str()];
NSMutableArray *sources = [[NSMutableArray alloc] init];
for (auto &source : group.ssrcs) {
[sources addObject:@(source)];
if (![parsedVideoSources containsObject:@(source)]) {
[parsedVideoSources addObject:@(source)];
}
}
parsedGroup[@"sources"] = sources;
[parsedVideoSsrcGroups addObject:parsedGroup];
}
if (parsedVideoSsrcGroups.count != 0) {
dict[@"ssrc-groups"] = parsedVideoSsrcGroups;
}
if (parsedVideoSources.count != 0) {
//dict[@"sources"] = parsedVideoSources;
}
NSMutableArray *videoPayloadTypes = [[NSMutableArray alloc] init];
for (auto &payloadType : payload.videoPayloadTypes) {
NSMutableDictionary *parsedType = [[NSMutableDictionary alloc] init];
parsedType[@"id"] = @(payloadType.id);
NSString *name = [NSString stringWithUTF8String:payloadType.name.c_str()];
parsedType[@"name"] = name;
parsedType[@"clockrate"] = @(payloadType.clockrate);
if (![name isEqualToString:@"rtx"]) {
parsedType[@"channels"] = @(payloadType.channels);
}
NSMutableDictionary *parsedParameters = [[NSMutableDictionary alloc] init];
for (auto &it : payloadType.parameters) {
NSString *key = [NSString stringWithUTF8String:it.first.c_str()];
NSString *value = [NSString stringWithUTF8String:it.second.c_str()];
parsedParameters[key] = value;
}
if (parsedParameters.count != 0) {
parsedType[@"parameters"] = parsedParameters;
}
if (![name isEqualToString:@"rtx"]) {
NSMutableArray *parsedFbs = [[NSMutableArray alloc] init];
for (auto &it : payloadType.feedbackTypes) {
NSMutableDictionary *parsedFb = [[NSMutableDictionary alloc] init];
parsedFb[@"type"] = [NSString stringWithUTF8String:it.type.c_str()];
if (it.subtype.size() != 0) {
parsedFb[@"subtype"] = [NSString stringWithUTF8String:it.subtype.c_str()];
}
[parsedFbs addObject:parsedFb];
}
parsedType[@"rtcp-fbs"] = parsedFbs;
}
[videoPayloadTypes addObject:parsedType];
}
if (videoPayloadTypes.count != 0) {
dict[@"payload-types"] = videoPayloadTypes;
}
NSMutableArray *parsedExtensions = [[NSMutableArray alloc] init];
for (auto &it : payload.videoExtensionMap) {
NSMutableDictionary *parsedExtension = [[NSMutableDictionary alloc] init];
parsedExtension[@"id"] = @(it.first);
parsedExtension[@"uri"] = [NSString stringWithUTF8String:it.second.c_str()];
[parsedExtensions addObject:parsedExtension];
}
if (parsedExtensions.count != 0) {
dict[@"rtp-hdrexts"] = parsedExtensions;
}
NSData *data = [NSJSONSerialization dataWithJSONObject:dict options:0 error:nil];
NSString *string = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
@ -889,7 +971,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)setJoinResponsePayload:(NSString * _Nonnull)payload {
- (void)setJoinResponsePayload:(NSString * _Nonnull)payload participants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants {
tgcalls::GroupJoinResponsePayload result;
NSData *payloadData = [payload dataUsingEncoding:NSUTF8StringEncoding];
@ -1032,8 +1114,19 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
result.candidates.push_back(candidate);
}
std::vector<tgcalls::GroupParticipantDescription> parsedParticipants;
for (OngoingGroupCallParticipantDescription *participant in participants) {
tgcalls::GroupParticipantDescription parsedParticipant;
parsedParticipant.audioSsrc = participant.audioSsrc;
if (participant.jsonParams.length != 0) {
[self parseJsonIntoParticipant:participant.jsonParams participant:parsedParticipant];
}
parsedParticipants.push_back(parsedParticipant);
}
if (_instance) {
_instance->setJoinResponsePayload(result);
_instance->setJoinResponsePayload(result, std::move(parsedParticipants));
}
}
@ -1047,6 +1140,148 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)parseJsonIntoParticipant:(NSString *)payload participant:(tgcalls::GroupParticipantDescription &)participant {
NSData *payloadData = [payload dataUsingEncoding:NSUTF8StringEncoding];
if (payloadData == nil) {
return;
}
NSDictionary *dict = [NSJSONSerialization JSONObjectWithData:payloadData options:0 error:nil];
if (![dict isKindOfClass:[NSDictionary class]]) {
return;
}
NSArray *ssrcGroups = dict[@"ssrc-groups"];
if ([ssrcGroups isKindOfClass:[NSArray class]]) {
for (NSDictionary *group in ssrcGroups) {
if (![group isKindOfClass:[NSDictionary class]]) {
continue;
}
NSString *semantics = group[@"semantics"];
if (![semantics isKindOfClass:[NSString class]]) {
continue;
}
NSArray *sources = group[@"sources"];
if (![sources isKindOfClass:[NSArray class]]) {
continue;
}
tgcalls::GroupJoinPayloadVideoSourceGroup groupDesc;
for (NSNumber *nSsrc in sources) {
if ([nSsrc isKindOfClass:[NSNumber class]]) {
groupDesc.ssrcs.push_back([nSsrc unsignedIntValue]);
}
}
groupDesc.semantics = [semantics UTF8String];
participant.videoSourceGroups.push_back(groupDesc);
}
}
NSArray *hdrExts = dict[@"rtp-hdrexts"];
if ([hdrExts isKindOfClass:[NSArray class]]) {
for (NSDictionary *extDict in hdrExts) {
if (![extDict isKindOfClass:[NSDictionary class]]) {
continue;
}
NSNumber *nId = extDict[@"id"];
if (![nId isKindOfClass:[NSNumber class]]) {
continue;
}
NSString *uri = extDict[@"uri"];
if (![uri isKindOfClass:[NSString class]]) {
continue;
}
participant.videoExtensionMap.push_back(std::make_pair((uint32_t)[nId unsignedIntValue], (std::string)[uri UTF8String]));
}
}
NSArray *payloadTypes = dict[@"payload-types"];
if ([payloadTypes isKindOfClass:[NSArray class]]) {
for (NSDictionary *payloadDict in payloadTypes) {
if (![payloadDict isKindOfClass:[NSDictionary class]]) {
continue;
}
NSNumber *nId = payloadDict[@"id"];
if (![nId isKindOfClass:[NSNumber class]]) {
continue;
}
NSNumber *nClockrate = payloadDict[@"clockrate"];
if (nClockrate != nil && ![nClockrate isKindOfClass:[NSNumber class]]) {
continue;
}
NSNumber *nChannels = payloadDict[@"channels"];
if (nChannels != nil && ![nChannels isKindOfClass:[NSNumber class]]) {
continue;
}
NSString *name = payloadDict[@"name"];
if (![name isKindOfClass:[NSString class]]) {
continue;
}
tgcalls::GroupJoinPayloadVideoPayloadType parsedPayload;
parsedPayload.id = [nId unsignedIntValue];
parsedPayload.clockrate = [nClockrate unsignedIntValue];
parsedPayload.channels = [nChannels unsignedIntValue];
parsedPayload.name = [name UTF8String];
NSArray *fbs = payloadDict[@"rtcp-fbs"];
if ([fbs isKindOfClass:[NSArray class]]) {
for (NSDictionary *fbDict in fbs) {
if (![fbDict isKindOfClass:[NSDictionary class]]) {
continue;
}
NSString *type = fbDict[@"type"];
if (![type isKindOfClass:[NSString class]]) {
continue;
}
NSString *subtype = fbDict[@"subtype"];
if (subtype != nil && ![subtype isKindOfClass:[NSString class]]) {
continue;
}
tgcalls::GroupJoinPayloadVideoPayloadFeedbackType parsedFeedback;
parsedFeedback.type = [type UTF8String];
if (subtype != nil) {
parsedFeedback.subtype = [subtype UTF8String];
}
parsedPayload.feedbackTypes.push_back(parsedFeedback);
}
}
NSDictionary *parameters = payloadDict[@"parameters"];
if ([parameters isKindOfClass:[NSDictionary class]]) {
for (NSString *nKey in parameters) {
if (![nKey isKindOfClass:[NSString class]]) {
continue;
}
NSString *value = parameters[nKey];
if (![value isKindOfClass:[NSString class]]) {
continue;
}
parsedPayload.parameters.push_back(std::make_pair((std::string)[nKey UTF8String], (std::string)[value UTF8String]));
}
}
participant.videoPayloadTypes.push_back(parsedPayload);
}
}
}
- (void)addParticipants:(NSArray<OngoingGroupCallParticipantDescription *> * _Nonnull)participants {
if (_instance) {
std::vector<tgcalls::GroupParticipantDescription> parsedParticipants;
for (OngoingGroupCallParticipantDescription *participant in participants) {
tgcalls::GroupParticipantDescription parsedParticipant;
parsedParticipant.audioSsrc = participant.audioSsrc;
if (participant.jsonParams.length != 0) {
[self parseJsonIntoParticipant:participant.jsonParams participant:parsedParticipant];
}
parsedParticipants.push_back(parsedParticipant);
}
_instance->addParticipants(std::move(parsedParticipants));
}
}
- (void)setIsMuted:(bool)isMuted {
if (_instance) {
_instance->setIsMuted(isMuted);
@ -1064,6 +1299,47 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)makeIncomingVideoViewWithSsrc:(uint32_t)ssrc completion:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion {
if (_instance) {
__weak GroupCallThreadLocalContext *weakSelf = self;
id<OngoingCallThreadLocalContextQueueWebrtc> queue = _queue;
dispatch_async(dispatch_get_main_queue(), ^{
if ([VideoMetalView isSupported]) {
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
#if TARGET_OS_IPHONE
remoteRenderer.videoContentMode = UIViewContentModeScaleToFill;
#else
remoteRenderer.videoContentMode = UIViewContentModeScaleAspect;
#endif
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
[queue dispatch:^{
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
if (strongSelf && strongSelf->_instance) {
strongSelf->_instance->setIncomingVideoOutput(ssrc, sink);
}
}];
completion(remoteRenderer);
} else {
GLVideoView *remoteRenderer = [[GLVideoView alloc] initWithFrame:CGRectZero];
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
[queue dispatch:^{
__strong GroupCallThreadLocalContext *strongSelf = weakSelf;
if (strongSelf && strongSelf->_instance) {
strongSelf->_instance->setIncomingVideoOutput(ssrc, sink);
}
}];
completion(remoteRenderer);
}
});
}
}
@end
@ -1105,3 +1381,16 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
@end
@implementation OngoingGroupCallParticipantDescription
- (instancetype _Nonnull)initWithAudioSsrc:(uint32_t)audioSsrc jsonParams:(NSString * _Nullable)jsonParams {
self = [super init];
if (self != nil) {
_audioSsrc = audioSsrc;
_jsonParams = jsonParams;
}
return self;
}
@end

@ -1 +1 @@
Subproject commit 28eb96a716266899616d708771e71778f270e04c
Subproject commit c262a55cd3c0426fca3a122fec8de6dd061e82d8

@ -1 +1 @@
Subproject commit db98670d64d24e4c093f49f5882d175673fd59f0
Subproject commit eb9d68429a5805f904e9b7605f3900cef6453333