Implement screencast audio channel

This commit is contained in:
Ali 2021-07-06 21:38:08 +04:00
parent 48efebaec0
commit 5b0aca61c5
7 changed files with 34 additions and 18 deletions

View File

@ -138,7 +138,7 @@ private func rootPathForBasePath(_ appGroupPath: String) -> String {
self.audioConverter = CustomAudioConverter(asbd: asbd)
}
if let audioConverter = self.audioConverter {
if let data = audioConverter.convert(sampleBuffer: sampleBuffer) {
if let data = audioConverter.convert(sampleBuffer: sampleBuffer), !data.isEmpty {
self.screencastBufferClientContext?.writeAudioData(data: data)
}
}

View File

@ -379,7 +379,7 @@ final class ShareControllerNode: ViewControllerTracingNode, UIScrollViewDelegate
let animation = contentNode.layer.makeAnimation(from: 0.0 as NSNumber, to: 1.0 as NSNumber, keyPath: "opacity", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.35)
animation.fillMode = .both
if !fastOut {
animation.beginTime = CACurrentMediaTime() + 0.1
animation.beginTime = contentNode.layer.convertTime(CACurrentMediaTime(), from: nil) + 0.1
}
contentNode.layer.add(animation, forKey: "opacity")
}

View File

@ -584,7 +584,7 @@ fileprivate let parsers: [Int32 : (BufferReader) -> Any?] = {
dict[-1160215659] = { return Api.InputMessage.parse_inputMessageReplyTo($0) }
dict[-2037963464] = { return Api.InputMessage.parse_inputMessagePinned($0) }
dict[-1392895362] = { return Api.InputMessage.parse_inputMessageCallbackQuery($0) }
dict[2028213859] = { return Api.GroupCallParticipantVideo.parse_groupCallParticipantVideo($0) }
dict[1735736008] = { return Api.GroupCallParticipantVideo.parse_groupCallParticipantVideo($0) }
dict[-58224696] = { return Api.PhoneCallProtocol.parse_phoneCallProtocol($0) }
dict[-1237848657] = { return Api.StatsDateRangeDays.parse_statsDateRangeDays($0) }
dict[-275956116] = { return Api.messages.AffectedFoundMessages.parse_affectedFoundMessages($0) }

View File

@ -14978,13 +14978,13 @@ public extension Api {
}
public enum GroupCallParticipantVideo: TypeConstructorDescription {
case groupCallParticipantVideo(flags: Int32, endpoint: String, sourceGroups: [Api.GroupCallParticipantVideoSourceGroup])
case groupCallParticipantVideo(flags: Int32, endpoint: String, sourceGroups: [Api.GroupCallParticipantVideoSourceGroup], audioSource: Int32?)
public func serialize(_ buffer: Buffer, _ boxed: Swift.Bool) {
switch self {
case .groupCallParticipantVideo(let flags, let endpoint, let sourceGroups):
case .groupCallParticipantVideo(let flags, let endpoint, let sourceGroups, let audioSource):
if boxed {
buffer.appendInt32(2028213859)
buffer.appendInt32(1735736008)
}
serializeInt32(flags, buffer: buffer, boxed: false)
serializeString(endpoint, buffer: buffer, boxed: false)
@ -14993,14 +14993,15 @@ public extension Api {
for item in sourceGroups {
item.serialize(buffer, true)
}
if Int(flags) & Int(1 << 1) != 0 {serializeInt32(audioSource!, buffer: buffer, boxed: false)}
break
}
}
public func descriptionFields() -> (String, [(String, Any)]) {
switch self {
case .groupCallParticipantVideo(let flags, let endpoint, let sourceGroups):
return ("groupCallParticipantVideo", [("flags", flags), ("endpoint", endpoint), ("sourceGroups", sourceGroups)])
case .groupCallParticipantVideo(let flags, let endpoint, let sourceGroups, let audioSource):
return ("groupCallParticipantVideo", [("flags", flags), ("endpoint", endpoint), ("sourceGroups", sourceGroups), ("audioSource", audioSource)])
}
}
@ -15013,11 +15014,14 @@ public extension Api {
if let _ = reader.readInt32() {
_3 = Api.parseVector(reader, elementSignature: 0, elementType: Api.GroupCallParticipantVideoSourceGroup.self)
}
var _4: Int32?
if Int(_1!) & Int(1 << 1) != 0 {_4 = reader.readInt32() }
let _c1 = _1 != nil
let _c2 = _2 != nil
let _c3 = _3 != nil
if _c1 && _c2 && _c3 {
return Api.GroupCallParticipantVideo.groupCallParticipantVideo(flags: _1!, endpoint: _2!, sourceGroups: _3!)
let _c4 = (Int(_1!) & Int(1 << 1) == 0) || _4 != nil
if _c1 && _c2 && _c3 && _c4 {
return Api.GroupCallParticipantVideo.groupCallParticipantVideo(flags: _1!, endpoint: _2!, sourceGroups: _3!, audioSource: _4)
}
else {
return nil

View File

@ -415,12 +415,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private var screencastBufferServerContext: IpcGroupCallBufferAppContext?
private var screencastCapturer: OngoingCallVideoCapturer?
//private var screencastIpcContext: IpcGroupCallAppContext?
private var ssrcMapping: [UInt32: PeerId] = [:]
private var requestedSsrcs = Set<UInt32>()
private var summaryInfoState = Promise<SummaryInfoState?>(nil)
private var summaryParticipantsState = Promise<SummaryParticipantsState?>(nil)
@ -887,7 +883,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
guard let strongSelf = self else {
return
}
strongSelf.genericCallContext?.addExternalAudioData(data: data)
strongSelf.screencastCallContext?.addExternalAudioData(data: data)
})
self.screencastStateDisposable = (screencastBufferServerContext.isActive
|> distinctUntilChanged
@ -2050,6 +2046,18 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
videoDescription: nil
))
}
if let screencastSsrc = participant.presentationDescription?.audioSsrc {
if remainingSsrcs.contains(screencastSsrc) {
remainingSsrcs.remove(screencastSsrc)
result.append(OngoingGroupCallContext.MediaChannelDescription(
kind: .audio,
audioSsrc: screencastSsrc,
videoDescription: nil
))
}
}
}
}

View File

@ -855,6 +855,7 @@ public final class GroupCallParticipantsContext {
public var endpointId: String
public var ssrcGroups: [SsrcGroup]
public var audioSsrc: UInt32?
public var isPaused: Bool
}
@ -1527,6 +1528,9 @@ public final class GroupCallParticipantsContext {
if let ssrc = participant.ssrc {
existingSsrcs.insert(ssrc)
}
if let presentationDescription = participant.presentationDescription, let presentationAudioSsrc = presentationDescription.audioSsrc {
existingSsrcs.insert(presentationAudioSsrc)
}
}
for ssrc in ssrcs {
@ -2488,7 +2492,7 @@ extension GroupCallParticipantsContext.Participant {
private extension GroupCallParticipantsContext.Participant.VideoDescription {
init(_ apiVideo: Api.GroupCallParticipantVideo) {
switch apiVideo {
case let .groupCallParticipantVideo(flags, endpoint, sourceGroups):
case let .groupCallParticipantVideo(flags, endpoint, sourceGroups, audioSource):
var parsedSsrcGroups: [SsrcGroup] = []
for group in sourceGroups {
switch group {
@ -2497,7 +2501,7 @@ private extension GroupCallParticipantsContext.Participant.VideoDescription {
}
}
let isPaused = (flags & (1 << 0)) != 0
self.init(endpointId: endpoint, ssrcGroups: parsedSsrcGroups, isPaused: isPaused)
self.init(endpointId: endpoint, ssrcGroups: parsedSsrcGroups, audioSsrc: audioSource.flatMap(UInt32.init(bitPattern:)), isPaused: isPaused)
}
}
}

@ -1 +1 @@
Subproject commit 76d781c09cd4d827e417a56cc7bc39b0d2217e78
Subproject commit 3cf1822a70e3b84f6a762755e5249b26e915d321