Roll back tgcalls

This commit is contained in:
Ali 2022-11-09 20:56:20 +04:00
parent 5a81f9fe15
commit f93b032a6a
9 changed files with 46 additions and 311 deletions

View File

@ -179,8 +179,6 @@ public class ManagedAudioSessionControl {
}
public final class ManagedAudioSession {
public private(set) static var shared: ManagedAudioSession?
private var nextId: Int32 = 0
private let queue: Queue
private let hasLoudspeaker: Bool
@ -258,8 +256,6 @@ public final class ManagedAudioSession {
self.isHeadsetPluggedInValue = self.isHeadsetPluggedIn()
self.updateCurrentAudioRouteInfo()
}
ManagedAudioSession.shared = self
}
deinit {
@ -788,61 +784,6 @@ public final class ManagedAudioSession {
}
}
public func applyVoiceChatOutputModeInCurrentAudioSession(outputMode: AudioSessionOutputMode) {
managedAudioSessionLog("applyVoiceChatOutputModeInCurrentAudioSession \(outputMode)")
do {
var resetToBuiltin = false
switch outputMode {
case .system:
resetToBuiltin = true
case let .custom(output):
switch output {
case .builtin:
resetToBuiltin = true
case .speaker:
if let routes = AVAudioSession.sharedInstance().availableInputs {
for route in routes {
if route.portType == .builtInMic {
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
break
}
}
}
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.speaker)
case .headphones:
break
case let .port(port):
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
if let routes = AVAudioSession.sharedInstance().availableInputs {
for route in routes {
if route.uid == port.uid {
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
break
}
}
}
}
case .speakerIfNoHeadphones:
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
}
if resetToBuiltin {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.none)
if let routes = AVAudioSession.sharedInstance().availableInputs {
for route in routes {
if route.portType == .builtInMic {
let _ = try? AVAudioSession.sharedInstance().setPreferredInput(route)
break
}
}
}
}
} catch let e {
managedAudioSessionLog("applyVoiceChatOutputModeInCurrentAudioSession error: \(e)")
}
}
private func setupOutputMode(_ outputMode: AudioSessionOutputMode, type: ManagedAudioSessionType) throws {
managedAudioSessionLog("ManagedAudioSession setup \(outputMode) for \(type)")
var resetToBuiltin = false

View File

@ -8,7 +8,6 @@ import TelegramCore
import SwiftSignalKit
import AppBundle
import AccountContext
import TelegramAudio
private let sharedProviderDelegate: AnyObject? = {
if #available(iOSApplicationExtension 10.0, iOS 10.0, *) {
@ -108,10 +107,6 @@ public final class CallKitIntegration {
}
}
}
public func applyVoiceChatOutputMode(outputMode: AudioSessionOutputMode) {
(sharedProviderDelegate as? CallKitProviderDelegate)?.applyVoiceChatOutputMode(outputMode: outputMode)
}
}
@available(iOSApplicationExtension 10.0, iOS 10.0, *)
@ -130,9 +125,6 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate {
private var setCallMuted: ((UUID, Bool) -> Void)?
private var audioSessionActivationChanged: ((Bool) -> Void)?
private var isAudioSessionActive: Bool = false
private var pendingVoiceChatOutputMode: AudioSessionOutputMode?
private let disposableSet = DisposableSet()
fileprivate var audioSessionActivePromise: ValuePromise<Bool>?
@ -171,7 +163,7 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate {
private func requestTransaction(_ transaction: CXTransaction, completion: ((Bool) -> Void)? = nil) {
self.callController.request(transaction) { error in
if let error = error {
print("Error requesting transaction \(transaction): \(error)")
print("Error requesting transaction: \(error)")
}
completion?(error == nil)
}
@ -246,12 +238,6 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate {
update.supportsDTMF = false
update.hasVideo = isVideo
do {
try AVAudioSession.sharedInstance().setMode(.voiceChat)
} catch let e {
print("AVAudioSession.sharedInstance().setMode(.voiceChat) error \(e)")
}
self.provider.reportNewIncomingCall(with: uuid, update: update, completion: { error in
completion?(error as NSError?)
})
@ -335,28 +321,13 @@ class CallKitProviderDelegate: NSObject, CXProviderDelegate {
}
func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {
print("provider didActivate default? \(audioSession === AVAudioSession.sharedInstance())")
self.isAudioSessionActive = true
self.audioSessionActivationChanged?(true)
self.audioSessionActivePromise?.set(true)
if let outputMode = self.pendingVoiceChatOutputMode {
self.pendingVoiceChatOutputMode = nil
ManagedAudioSession.shared?.applyVoiceChatOutputModeInCurrentAudioSession(outputMode: outputMode)
}
}
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
self.isAudioSessionActive = false
self.audioSessionActivationChanged?(false)
self.audioSessionActivePromise?.set(false)
}
func applyVoiceChatOutputMode(outputMode: AudioSessionOutputMode) {
if self.isAudioSessionActive {
ManagedAudioSession.shared?.applyVoiceChatOutputModeInCurrentAudioSession(outputMode: outputMode)
} else {
self.pendingVoiceChatOutputMode = outputMode
}
}
}

View File

@ -413,9 +413,6 @@ public final class PresentationCallImpl: PresentationCall {
return
}
strongSelf.audioOutputStateValue = (availableOutputs, currentOutput)
if let currentOutput = currentOutput {
strongSelf.currentAudioOutputValue = currentOutput
}
var signal: Signal<([AudioSessionOutput], AudioSessionOutput?), NoError> = .single((availableOutputs, currentOutput))
if !didReceiveAudioOutputs {
@ -440,7 +437,7 @@ public final class PresentationCallImpl: PresentationCall {
let audioSessionActive: Signal<Bool, NoError>
if let callKitIntegration = strongSelf.callKitIntegration {
audioSessionActive = callKitIntegration.audioSessionActive
/*|> filter { $0 }
|> filter { $0 }
|> timeout(2.0, queue: Queue.mainQueue(), alternate: Signal { subscriber in
if let strongSelf = self, let _ = strongSelf.audioSessionControl {
//audioSessionControl.activate({ _ in })
@ -448,7 +445,7 @@ public final class PresentationCallImpl: PresentationCall {
subscriber.putNext(true)
subscriber.putCompletion()
return EmptyDisposable
})*/
})
} else {
audioSessionControl.activate({ _ in })
audioSessionActive = .single(true)
@ -537,12 +534,8 @@ public final class PresentationCallImpl: PresentationCall {
}
if let audioSessionControl = audioSessionControl, previous == nil || previousControl == nil {
if let callKitIntegration = self.callKitIntegration {
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
} else {
audioSessionControl.setOutputMode(.custom(self.currentAudioOutputValue))
audioSessionControl.setup(synchronous: true)
}
audioSessionControl.setOutputMode(.custom(self.currentAudioOutputValue))
audioSessionControl.setup(synchronous: true)
}
let mappedVideoState: PresentationCallState.VideoState
@ -1038,11 +1031,7 @@ public final class PresentationCallImpl: PresentationCall {
))
if let audioSessionControl = self.audioSessionControl {
if let callKitIntegration = self.callKitIntegration {
callKitIntegration.applyVoiceChatOutputMode(outputMode: .custom(self.currentAudioOutputValue))
} else {
audioSessionControl.setOutputMode(.custom(output))
}
audioSessionControl.setOutputMode(.custom(output))
}
}

View File

@ -1648,7 +1648,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
outgoingAudioBitrateKbit = Int32(value)
}
genericCallContext = .call(OngoingGroupCallContext(audioSessionActive: self.audioSessionActive.get(), video: self.videoCapturer, requestMediaChannelDescriptions: { [weak self] ssrcs, completion in
genericCallContext = .call(OngoingGroupCallContext(video: self.videoCapturer, requestMediaChannelDescriptions: { [weak self] ssrcs, completion in
let disposable = MetaDisposable()
Queue.mainQueue().async {
guard let strongSelf = self else {
@ -2966,7 +2966,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
self.hasScreencast = true
let screencastCallContext = OngoingGroupCallContext(audioSessionActive: .single(true), video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, preferX264: false, logPath: "")
let screencastCallContext = OngoingGroupCallContext(video: self.screencastCapturer, requestMediaChannelDescriptions: { _, _ in EmptyDisposable }, rejoinNeeded: { }, outgoingAudioBitrateKbit: nil, videoContentType: .screencast, enableNoiseSuppression: false, disableAudioInput: true, preferX264: false, logPath: "")
self.screencastCallContext = screencastCallContext
self.screencastJoinDisposable.set((screencastCallContext.joinPayload

View File

@ -416,9 +416,7 @@ public final class OngoingGroupCallContext {
private let broadcastPartsSource = Atomic<BroadcastPartSource?>(value: nil)
private let audioSessionActiveDisposable = MetaDisposable()
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
init(queue: Queue, inputDeviceId: String, outputDeviceId: String, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
self.queue = queue
var networkStateUpdatedImpl: ((GroupCallNetworkState) -> Void)?
@ -573,20 +571,9 @@ public final class OngoingGroupCallContext {
strongSelf.joinPayload.set(.single((payload, ssrc)))
}
})
self.audioSessionActiveDisposable.set((audioSessionActive
|> deliverOn(queue)).start(next: { [weak self] isActive in
guard let `self` = self else {
return
}
#if os(iOS)
self.context.setManualAudioSessionIsActive(isActive)
#endif
}))
}
deinit {
self.audioSessionActiveDisposable.dispose()
}
func setJoinResponse(payload: String) {
@ -949,10 +936,10 @@ public final class OngoingGroupCallContext {
}
}
public init(inputDeviceId: String = "", outputDeviceId: String = "", audioSessionActive: Signal<Bool, NoError>, video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
public init(inputDeviceId: String = "", outputDeviceId: String = "", video: OngoingCallVideoCapturer?, requestMediaChannelDescriptions: @escaping (Set<UInt32>, @escaping ([MediaChannelDescription]) -> Void) -> Disposable, rejoinNeeded: @escaping () -> Void, outgoingAudioBitrateKbit: Int32?, videoContentType: VideoContentType, enableNoiseSuppression: Bool, disableAudioInput: Bool, preferX264: Bool, logPath: String) {
let queue = self.queue
self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, audioSessionActive: audioSessionActive, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, preferX264: preferX264, logPath: logPath)
return Impl(queue: queue, inputDeviceId: inputDeviceId, outputDeviceId: outputDeviceId, video: video, requestMediaChannelDescriptions: requestMediaChannelDescriptions, rejoinNeeded: rejoinNeeded, outgoingAudioBitrateKbit: outgoingAudioBitrateKbit, videoContentType: videoContentType, enableNoiseSuppression: enableNoiseSuppression, disableAudioInput: disableAudioInput, preferX264: preferX264, logPath: logPath)
})
}

View File

@ -326,7 +326,6 @@ private protocol OngoingCallThreadLocalContextProtocol: AnyObject {
func nativeVersion() -> String
func nativeGetDerivedState() -> Data
func addExternalAudioData(data: Data)
func nativeSetIsAudioSessionActive(isActive: Bool)
}
private final class OngoingCallThreadLocalContextHolder {
@ -382,9 +381,6 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol {
func addExternalAudioData(data: Data) {
}
func nativeSetIsAudioSessionActive(isActive: Bool) {
}
}
public final class OngoingCallVideoCapturer {
@ -577,12 +573,6 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
func addExternalAudioData(data: Data) {
self.addExternalAudioData(data)
}
func nativeSetIsAudioSessionActive(isActive: Bool) {
#if os(iOS)
self.setManualAudioSessionIsActive(isActive)
#endif
}
}
private extension OngoingCallContextState.State {
@ -736,13 +726,13 @@ public final class OngoingCallContext {
}
private let audioSessionDisposable = MetaDisposable()
private let audioSessionActiveDisposable = MetaDisposable()
private var networkTypeDisposable: Disposable?
public static var maxLayer: Int32 {
return OngoingCallThreadLocalContext.maxLayer()
}
private let tempLogFile: EngineTempBoxFile
private let tempStatsLogFile: EngineTempBoxFile
private var signalingConnectionManager: QueueLocalObject<CallSignalingConnectionManager>?
@ -775,8 +765,8 @@ public final class OngoingCallContext {
self.callSessionManager = callSessionManager
self.logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log"
let logPath = self.logPath
let _ = try? FileManager.default.createDirectory(atPath: callLogsPath(account: account), withIntermediateDirectories: true, attributes: nil)
self.tempLogFile = EngineTempBox.shared.tempFile(fileName: "CallLog.txt")
let tempLogPath = self.tempLogFile.path
self.tempStatsLogFile = EngineTempBox.shared.tempFile(fileName: "CallStats.json")
let tempStatsLogPath = self.tempStatsLogFile.path
@ -881,7 +871,7 @@ public final class OngoingCallContext {
}
}
let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: Data(), key: key, isOutgoing: isOutgoing, connections: filteredConnections, maxLayer: maxLayer, allowP2P: allowP2P, allowTCP: enableTCP, enableStunMarking: enableStunMarking, logPath: logPath, statsLogPath: tempStatsLogPath, sendSignalingData: { [weak callSessionManager] data in
let context = OngoingCallThreadLocalContextWebrtc(version: version, queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: Data(), key: key, isOutgoing: isOutgoing, connections: filteredConnections, maxLayer: maxLayer, allowP2P: allowP2P, allowTCP: enableTCP, enableStunMarking: enableStunMarking, logPath: tempLogPath, statsLogPath: tempStatsLogPath, sendSignalingData: { [weak callSessionManager] data in
queue.async {
guard let strongSelf = self else {
return
@ -896,7 +886,7 @@ public final class OngoingCallContext {
callSessionManager.sendSignalingData(internalId: internalId, data: data)
}
}
}, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "", useManualAudioSessionControl: true)
}, videoCapturer: video?.impl, preferredVideoCodec: preferredVideoCodec, audioInputDeviceId: "")
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
context.stateChanged = { [weak callSessionManager] state, videoState, remoteVideoState, remoteAudioState, remoteBatteryLevel, _ in
@ -960,16 +950,6 @@ public final class OngoingCallContext {
self?.audioLevelPromise.set(.single(level))
}
strongSelf.audioSessionActiveDisposable.set((audioSessionActive
|> deliverOn(queue)).start(next: { isActive in
guard let strongSelf = self else {
return
}
strongSelf.withContext { context in
context.nativeSetIsAudioSessionActive(isActive: isActive)
}
}))
strongSelf.networkTypeDisposable = (updatedNetworkType
|> deliverOn(queue)).start(next: { networkType in
self?.withContext { context in
@ -1030,7 +1010,6 @@ public final class OngoingCallContext {
}
self.audioSessionDisposable.dispose()
self.audioSessionActiveDisposable.dispose()
self.networkTypeDisposable?.dispose()
}
@ -1069,6 +1048,7 @@ public final class OngoingCallContext {
if !logPath.isEmpty {
statsLogPath = logPath + ".json"
}
let tempLogPath = self.tempLogFile.path
let tempStatsLogPath = self.tempStatsLogFile.path
self.withContextThenDeallocate { context in
@ -1082,6 +1062,12 @@ public final class OngoingCallContext {
outgoing: bytesSentWifi))
updateAccountNetworkUsageStats(account: self.account, category: .call, delta: delta)
if !logPath.isEmpty {
let logsPath = callLogsPath(account: account)
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
let _ = try? FileManager.default.moveItem(atPath: tempLogPath, toPath: logPath)
}
if !statsLogPath.isEmpty {
let logsPath = callLogsPath(account: account)
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
@ -1270,8 +1256,6 @@ private final class CallSignalingConnectionImpl: CallSignalingConnection {
}
func start() {
OngoingCallThreadLocalContextWebrtc.logMessage("CallSignaling: Connecting...")
self.connection.start(queue: self.queue.queue)
self.receivePacketHeader()
}
@ -1415,91 +1399,48 @@ private final class CallSignalingConnectionImpl: CallSignalingConnection {
}
private final class CallSignalingConnectionManager {
private final class ConnectionContext {
let connection: CallSignalingConnection
let host: String
let port: UInt16
init(connection: CallSignalingConnection, host: String, port: UInt16) {
self.connection = connection
self.host = host
self.port = port
}
}
private let queue: Queue
private let peerTag: Data
private let dataReceived: (Data) -> Void
private var isRunning: Bool = false
private var nextConnectionId: Int = 0
private var connections: [Int: ConnectionContext] = [:]
private var connections: [Int: CallSignalingConnection] = [:]
init(queue: Queue, peerTag: Data, servers: [OngoingCallConnectionDescriptionWebrtc], dataReceived: @escaping (Data) -> Void) {
self.queue = queue
self.peerTag = peerTag
self.dataReceived = dataReceived
for server in servers {
if server.hasTcp {
self.spawnConnection(host: server.ip, port: UInt16(server.port))
let id = self.nextConnectionId
self.nextConnectionId += 1
if #available(iOS 12.0, *) {
let connection = CallSignalingConnectionImpl(queue: queue, host: server.ip, port: UInt16(server.port), peerTag: peerTag, dataReceived: { data in
dataReceived(data)
}, isClosed: { [weak self] in
guard let strongSelf = self else {
return
}
let _ = strongSelf
})
connections[id] = connection
}
}
}
}
func start() {
if self.isRunning {
return
}
self.isRunning = true
for (_, connection) in self.connections {
connection.connection.start()
connection.start()
}
}
func stop() {
if !self.isRunning {
return
}
self.isRunning = false
for (_, connection) in self.connections {
connection.connection.stop()
connection.stop()
}
}
func send(payloadData: Data) {
for (_, connection) in self.connections {
connection.connection.send(payloadData: payloadData)
}
}
private func spawnConnection(host: String, port: UInt16) {
let id = self.nextConnectionId
self.nextConnectionId += 1
if #available(iOS 12.0, *) {
let dataReceived = self.dataReceived
let connection = CallSignalingConnectionImpl(queue: queue, host: host, port: port, peerTag: self.peerTag, dataReceived: { data in
dataReceived(data)
}, isClosed: { [weak self] in
guard let `self` = self else {
return
}
self.handleConnectionFailed(id: id)
})
self.connections[id] = ConnectionContext(connection: connection, host: host, port: port)
if self.isRunning {
connection.start()
}
}
}
private func handleConnectionFailed(id: Int) {
if let connection = self.connections.removeValue(forKey: id) {
connection.connection.stop()
self.spawnConnection(host: connection.host, port: connection.port)
connection.send(payloadData: payloadData)
}
}
}

View File

@ -206,24 +206,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
@property (nonatomic, copy) void (^ _Nullable audioLevelUpdated)(float);
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue
proxy:(VoipProxyServerWebrtc * _Nullable)proxy
networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving
derivedState:(NSData * _Nonnull)derivedState
key:(NSData * _Nonnull)key
isOutgoing:(bool)isOutgoing
connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer
allowP2P:(BOOL)allowP2P
allowTCP:(BOOL)allowTCP
enableStunMarking:(BOOL)enableStunMarking
logPath:(NSString * _Nonnull)logPath
statsLogPath:(NSString * _Nonnull)statsLogPath
sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer
preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec
audioInputDeviceId:(NSString * _Nonnull)audioInputDeviceId
useManualAudioSessionControl:(bool)useManualAudioSessionControl;
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P allowTCP:(BOOL)allowTCP enableStunMarking:(BOOL)enableStunMarking logPath:(NSString * _Nonnull)logPath statsLogPath:(NSString * _Nonnull)statsLogPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec audioInputDeviceId: (NSString * _Nonnull)audioInputDeviceId;
- (void)beginTermination;
- (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion;
@ -377,8 +360,6 @@ typedef NS_ENUM(int32_t, OngoingGroupCallRequestedVideoQuality) {
- (void)stop;
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive;
- (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast;
- (void)emitJoinPayload:(void (^ _Nonnull)(NSString * _Nonnull, uint32_t))completion;

View File

@ -28,9 +28,6 @@
#include "platform/darwin/iOS/tgcalls_audio_device_module_ios.h"
#include "platform/darwin/iOS/RTCAudioSession.h"
#include "platform/darwin/iOS/RTCAudioSessionConfiguration.h"
#endif
#import "group/GroupInstanceImpl.h"
@ -629,7 +626,7 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
}
std::shared_ptr<tgcalls::VideoCaptureInterface> interface = strongSelf->_interface;
/*if (false && requestClone) {
if (false && requestClone) {
VideoSampleBufferView *remoteRenderer = [[VideoSampleBufferView alloc] initWithFrame:CGRectZero];
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
@ -646,7 +643,7 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
}
completion(remoteRenderer, cloneRenderer);
} else */if ([VideoMetalView isSupported]) {
} else if ([VideoMetalView isSupported]) {
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
@ -708,8 +705,6 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
id<OngoingCallThreadLocalContextQueueWebrtc> _queue;
int32_t _contextId;
bool _useManualAudioSessionControl;
OngoingCallNetworkTypeWebrtc _networkType;
NSTimeInterval _callReceiveTimeout;
NSTimeInterval _callRingTimeout;
@ -848,22 +843,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue
proxy:(VoipProxyServerWebrtc * _Nullable)proxy
networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving
derivedState:(NSData * _Nonnull)derivedState
key:(NSData * _Nonnull)key
isOutgoing:(bool)isOutgoing
connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer
allowP2P:(BOOL)allowP2P
allowTCP:(BOOL)allowTCP
enableStunMarking:(BOOL)enableStunMarking
logPath:(NSString * _Nonnull)logPath
statsLogPath:(NSString * _Nonnull)statsLogPath
sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer
preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec
audioInputDeviceId:(NSString * _Nonnull)audioInputDeviceId
useManualAudioSessionControl:(bool)useManualAudioSessionControl {
- (instancetype _Nonnull)initWithVersion:(NSString * _Nonnull)version queue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing connections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)connections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P allowTCP:(BOOL)allowTCP enableStunMarking:(BOOL)enableStunMarking logPath:(NSString * _Nonnull)logPath statsLogPath:(NSString * _Nonnull)statsLogPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer preferredVideoCodec:(NSString * _Nullable)preferredVideoCodec audioInputDeviceId: (NSString * _Nonnull)audioInputDeviceId {
self = [super init];
if (self != nil) {
_version = version;
@ -872,25 +852,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
assert([[OngoingCallThreadLocalContextWebrtc versionsWithIncludeReference:true] containsObject:version]);
_useManualAudioSessionControl = useManualAudioSessionControl;
[RTCAudioSession sharedInstance].useManualAudio = true;
#ifdef WEBRTC_IOS
RTCAudioSessionConfiguration *sharedConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration];
if (useManualAudioSessionControl) {
sharedConfiguration.mode = AVAudioSessionModeVoiceChat;
} else {
sharedConfiguration.mode = AVAudioSessionModeVoiceChat;
}
sharedConfiguration.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers;
sharedConfiguration.outputNumberOfChannels = 1;
[RTCAudioSessionConfiguration setWebRTCConfiguration:sharedConfiguration];
/*[RTCAudioSession sharedInstance].useManualAudio = true;
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
[RTCAudioSession sharedInstance].isAudioEnabled = true;*/
#endif
_callReceiveTimeout = 20.0;
_callRingTimeout = 90.0;
_callConnectTimeout = 30.0;
@ -1133,17 +1094,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
- (void)beginTermination {
}
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive {
if (_useManualAudioSessionControl) {
if (isAudioSessionActive) {
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
} else {
[[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
}
[RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive;
}
}
+ (void)stopWithTerminationResult:(OngoingCallThreadLocalContextWebrtcTerminationResult *)terminationResult completion:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion {
if (completion) {
if (terminationResult) {
@ -1479,22 +1429,6 @@ private:
}
}
#ifdef WEBRTC_IOS
RTCAudioSessionConfiguration *sharedConfiguration = [RTCAudioSessionConfiguration webRTCConfiguration];
sharedConfiguration.mode = AVAudioSessionModeVoiceChat;
sharedConfiguration.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers;
if (disableAudioInput) {
sharedConfiguration.outputNumberOfChannels = 2;
} else {
sharedConfiguration.outputNumberOfChannels = 1;
}
[RTCAudioSessionConfiguration setWebRTCConfiguration:sharedConfiguration];
/*[RTCAudioSession sharedInstance].useManualAudio = true;
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
[RTCAudioSession sharedInstance].isAudioEnabled = true;*/
#endif
std::vector<tgcalls::VideoCodecName> videoCodecPreferences;
int minOutgoingVideoBitrateKbit = 500;
@ -1678,15 +1612,6 @@ private:
}
}
- (void)setManualAudioSessionIsActive:(bool)isAudioSessionActive {
if (isAudioSessionActive) {
[[RTCAudioSession sharedInstance] audioSessionDidActivate:[AVAudioSession sharedInstance]];
} else {
[[RTCAudioSession sharedInstance] audioSessionDidDeactivate:[AVAudioSession sharedInstance]];
}
[RTCAudioSession sharedInstance].isAudioEnabled = isAudioSessionActive;
}
- (void)setConnectionMode:(OngoingCallConnectionMode)connectionMode keepBroadcastConnectedIfWasEnabled:(bool)keepBroadcastConnectedIfWasEnabled isUnifiedBroadcast:(bool)isUnifiedBroadcast {
if (_instance) {
tgcalls::GroupConnectionMode mappedConnectionMode;

@ -1 +1 @@
Subproject commit 0aa4b1277fd018e56bf194d72b5405e397c6918b
Subproject commit 53bb1711ae0b3810d34edb1c81982b18d70c5506