mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Temp
This commit is contained in:
parent
cc6537b811
commit
c8c1c96f16
@ -233,7 +233,9 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
private var droppedCall = false
|
||||
private var dropCallKitCallTimer: SwiftSignalKit.Timer?
|
||||
|
||||
init(account: Account, audioSession: ManagedAudioSession, callSessionManager: CallSessionManager, callKitIntegration: CallKitIntegration?, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), initialState: CallSession?, internalId: CallSessionInternalId, peerId: PeerId, isOutgoing: Bool, peer: Peer?, proxyServer: ProxyServerSettings?, auxiliaryServers: [CallAuxiliaryServer], currentNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>) {
|
||||
private var videoCapturer: OngoingCallVideoCapturer?
|
||||
|
||||
init(account: Account, audioSession: ManagedAudioSession, callSessionManager: CallSessionManager, callKitIntegration: CallKitIntegration?, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), initialState: CallSession?, internalId: CallSessionInternalId, peerId: PeerId, isOutgoing: Bool, peer: Peer?, proxyServer: ProxyServerSettings?, auxiliaryServers: [CallAuxiliaryServer], currentNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, startWithVideo: Bool) {
|
||||
self.account = account
|
||||
self.audioSession = audioSession
|
||||
self.callSessionManager = callSessionManager
|
||||
@ -259,6 +261,11 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
self.isOutgoing = isOutgoing
|
||||
self.isVideo = initialState?.type == .video
|
||||
self.peer = peer
|
||||
self.isVideo = startWithVideo
|
||||
if self.isVideo {
|
||||
self.videoCapturer = OngoingCallVideoCapturer()
|
||||
self.statePromise.set(PresentationCallState(state: .waiting, videoState: .activeOutgoing, remoteVideoState: .inactive))
|
||||
}
|
||||
|
||||
self.serializedData = serializedData
|
||||
self.dataSaving = dataSaving
|
||||
@ -440,7 +447,11 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
mappedRemoteVideoState = .active
|
||||
}
|
||||
} else {
|
||||
mappedVideoState = .notAvailable
|
||||
if self.isVideo {
|
||||
mappedVideoState = .activeOutgoing
|
||||
} else {
|
||||
mappedVideoState = .notAvailable
|
||||
}
|
||||
mappedRemoteVideoState = .inactive
|
||||
}
|
||||
|
||||
@ -523,7 +534,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
if let _ = audioSessionControl, !wasActive || previousControl == nil {
|
||||
let logName = "\(id.id)_\(id.accessHash)"
|
||||
|
||||
let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, auxiliaryServers: auxiliaryServers, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, isVideo: sessionState.type == .video, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName)
|
||||
let ongoingContext = OngoingCallContext(account: account, callSessionManager: self.callSessionManager, internalId: self.internalId, proxyServer: proxyServer, auxiliaryServers: auxiliaryServers, initialNetworkType: self.currentNetworkType, updatedNetworkType: self.updatedNetworkType, serializedData: self.serializedData, dataSaving: dataSaving, derivedState: self.derivedState, key: key, isOutgoing: sessionState.isOutgoing, video: self.videoCapturer, connections: connections, maxLayer: maxLayer, version: version, allowP2P: allowsP2P, audioSessionActive: self.audioSessionActive.get(), logName: logName)
|
||||
self.ongoingContext = ongoingContext
|
||||
|
||||
self.debugInfoValue.set(ongoingContext.debugInfo())
|
||||
@ -718,10 +729,6 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
self.ongoingContext?.setEnableVideo(value)
|
||||
}
|
||||
|
||||
public func switchVideoCamera() {
|
||||
self.ongoingContext?.switchVideoCamera()
|
||||
}
|
||||
|
||||
public func setCurrentAudioOutput(_ output: AudioSessionOutput) {
|
||||
guard self.currentAudioOutputValue != output else {
|
||||
return
|
||||
@ -748,6 +755,10 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
}
|
||||
|
||||
public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) {
|
||||
self.ongoingContext?.makeOutgoingVideoView(completion: completion)
|
||||
self.videoCapturer?.makeOutgoingVideoView(completion: completion)
|
||||
}
|
||||
|
||||
public func switchVideoCamera() {
|
||||
self.videoCapturer?.switchCamera()
|
||||
}
|
||||
}
|
||||
|
@ -278,52 +278,6 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
|
||||
self.callSettingsDisposable?.dispose()
|
||||
}
|
||||
|
||||
public func injectRingingStateSynchronously(account: Account, ringingState: CallSessionRingingState, callSession: CallSession) {
|
||||
if self.currentCall != nil {
|
||||
return
|
||||
}
|
||||
|
||||
let semaphore = DispatchSemaphore(value: 0)
|
||||
var data: (PreferencesView, AccountSharedDataView, Peer?)?
|
||||
let _ = combineLatest(
|
||||
account.postbox.preferencesView(keys: [PreferencesKeys.voipConfiguration, ApplicationSpecificPreferencesKeys.voipDerivedState, PreferencesKeys.appConfiguration])
|
||||
|> take(1),
|
||||
accountManager.sharedData(keys: [SharedDataKeys.autodownloadSettings])
|
||||
|> take(1),
|
||||
account.postbox.transaction { transaction -> Peer? in
|
||||
return transaction.getPeer(ringingState.peerId)
|
||||
}
|
||||
).start(next: { preferences, sharedData, peer in
|
||||
data = (preferences, sharedData, peer)
|
||||
semaphore.signal()
|
||||
})
|
||||
semaphore.wait()
|
||||
|
||||
if let (preferences, sharedData, maybePeer) = data, let peer = maybePeer {
|
||||
let configuration = preferences.values[PreferencesKeys.voipConfiguration] as? VoipConfiguration ?? .defaultValue
|
||||
let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue
|
||||
let derivedState = preferences.values[ApplicationSpecificPreferencesKeys.voipDerivedState] as? VoipDerivedState ?? .default
|
||||
let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings
|
||||
|
||||
let enableCallKit = true
|
||||
|
||||
let call = PresentationCallImpl(account: account, audioSession: self.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(self.callKitIntegration, settings: self.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: self.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: self.getDeviceAccessData, initialState: callSession, internalId: ringingState.id, peerId: ringingState.peerId, isOutgoing: false, peer: peer, proxyServer: self.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: .none, updatedNetworkType: account.networkType)
|
||||
self.updateCurrentCall(call)
|
||||
self.currentCallPromise.set(.single(call))
|
||||
self.hasActiveCallsPromise.set(true)
|
||||
self.removeCurrentCallDisposable.set((call.canBeRemoved
|
||||
|> deliverOnMainQueue).start(next: { [weak self, weak call] value in
|
||||
if value, let strongSelf = self, let call = call {
|
||||
if strongSelf.currentCall === call {
|
||||
strongSelf.updateCurrentCall(nil)
|
||||
strongSelf.currentCallPromise.set(.single(nil))
|
||||
strongSelf.hasActiveCallsPromise.set(false)
|
||||
}
|
||||
}
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
private func ringingStatesUpdated(_ ringingStates: [(Account, Peer, CallSessionRingingState, Bool, NetworkType)], enableCallKit: Bool) {
|
||||
if let firstState = ringingStates.first {
|
||||
if self.currentCall == nil {
|
||||
@ -338,7 +292,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
|
||||
let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings
|
||||
let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue
|
||||
|
||||
let call = PresentationCallImpl(account: firstState.0, audioSession: strongSelf.audioSession, callSessionManager: firstState.0.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: firstState.2.id, peerId: firstState.2.peerId, isOutgoing: false, peer: firstState.1, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: firstState.4, updatedNetworkType: firstState.0.networkType)
|
||||
let call = PresentationCallImpl(account: firstState.0, audioSession: strongSelf.audioSession, callSessionManager: firstState.0.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: firstState.2.id, peerId: firstState.2.peerId, isOutgoing: false, peer: firstState.1, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: firstState.4, updatedNetworkType: firstState.0.networkType, startWithVideo: firstState.2.isVideo)
|
||||
strongSelf.updateCurrentCall(call)
|
||||
strongSelf.currentCallPromise.set(.single(call))
|
||||
strongSelf.hasActiveCallsPromise.set(true)
|
||||
@ -491,7 +445,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
|
||||
let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings
|
||||
let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue
|
||||
|
||||
let call = PresentationCallImpl(account: account, audioSession: strongSelf.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings), serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: internalId, peerId: peerId, isOutgoing: true, peer: nil, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: currentNetworkType, updatedNetworkType: account.networkType)
|
||||
let call = PresentationCallImpl(account: account, audioSession: strongSelf.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings), serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: internalId, peerId: peerId, isOutgoing: true, peer: nil, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: currentNetworkType, updatedNetworkType: account.networkType, startWithVideo: isVideo)
|
||||
strongSelf.updateCurrentCall(call)
|
||||
strongSelf.currentCallPromise.set(.single(call))
|
||||
strongSelf.hasActiveCallsPromise.set(true)
|
||||
|
@ -107,9 +107,10 @@ typealias CallSessionStableId = Int64
|
||||
public struct CallSessionRingingState: Equatable {
|
||||
public let id: CallSessionInternalId
|
||||
public let peerId: PeerId
|
||||
public let isVideo: Bool
|
||||
|
||||
public static func ==(lhs: CallSessionRingingState, rhs: CallSessionRingingState) -> Bool {
|
||||
return lhs.id == rhs.id && lhs.peerId == rhs.peerId
|
||||
return lhs.id == rhs.id && lhs.peerId == rhs.peerId && lhs.isVideo == rhs.isVideo
|
||||
}
|
||||
}
|
||||
|
||||
@ -365,7 +366,7 @@ private final class CallSessionManagerContext {
|
||||
var ringingContexts: [CallSessionRingingState] = []
|
||||
for (id, context) in self.contexts {
|
||||
if case .ringing = context.state {
|
||||
ringingContexts.append(CallSessionRingingState(id: id, peerId: context.peerId))
|
||||
ringingContexts.append(CallSessionRingingState(id: id, peerId: context.peerId, isVideo: context.type == .video))
|
||||
}
|
||||
}
|
||||
return ringingContexts
|
||||
|
@ -245,7 +245,6 @@ private protocol OngoingCallThreadLocalContextProtocol: class {
|
||||
func nativeSetNetworkType(_ type: NetworkType)
|
||||
func nativeSetIsMuted(_ value: Bool)
|
||||
func nativeSetVideoEnabled(_ value: Bool)
|
||||
func nativeSwitchVideoCamera()
|
||||
func nativeStop(_ completion: @escaping (String?, Int64, Int64, Int64, Int64) -> Void)
|
||||
func nativeDebugInfo() -> String
|
||||
func nativeVersion() -> String
|
||||
@ -292,6 +291,22 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol {
|
||||
}
|
||||
}
|
||||
|
||||
public final class OngoingCallVideoCapturer {
|
||||
fileprivate let impl: OngoingCallThreadLocalContextVideoCapturer
|
||||
|
||||
public init() {
|
||||
self.impl = OngoingCallThreadLocalContextVideoCapturer()
|
||||
}
|
||||
|
||||
public func switchCamera() {
|
||||
self.impl.switchVideoCamera()
|
||||
}
|
||||
|
||||
public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) {
|
||||
self.impl.makeOutgoingVideoView(completion)
|
||||
}
|
||||
}
|
||||
|
||||
extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProtocol {
|
||||
func nativeSetNetworkType(_ type: NetworkType) {
|
||||
self.setNetworkType(ongoingNetworkTypeForTypeWebrtc(type))
|
||||
@ -309,10 +324,6 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
|
||||
self.setVideoEnabled(value)
|
||||
}
|
||||
|
||||
func nativeSwitchVideoCamera() {
|
||||
self.switchVideoCamera()
|
||||
}
|
||||
|
||||
func nativeDebugInfo() -> String {
|
||||
return self.debugInfo() ?? ""
|
||||
}
|
||||
@ -463,7 +474,7 @@ public final class OngoingCallContext {
|
||||
return result
|
||||
}
|
||||
|
||||
public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, auxiliaryServers: [AuxiliaryServer], initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, isVideo: Bool, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String) {
|
||||
public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, auxiliaryServers: [AuxiliaryServer], initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String) {
|
||||
let _ = setupLogs
|
||||
OngoingCallThreadLocalContext.applyServerConfig(serializedData)
|
||||
//OngoingCallThreadLocalContextWebrtc.applyServerConfig(serializedData)
|
||||
@ -542,9 +553,9 @@ public final class OngoingCallContext {
|
||||
))
|
||||
}
|
||||
}
|
||||
let context = OngoingCallThreadLocalContextWebrtc(queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, rtcServers: rtcServers, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, isVideo: isVideo, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in
|
||||
let context = OngoingCallThreadLocalContextWebrtc(queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, rtcServers: rtcServers, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in
|
||||
callSessionManager?.sendSignalingData(internalId: internalId, data: data)
|
||||
})
|
||||
}, videoCapturer: video?.impl)
|
||||
|
||||
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
|
||||
context.stateChanged = { state, videoState, remoteVideoState in
|
||||
@ -696,12 +707,6 @@ public final class OngoingCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
public func switchVideoCamera() {
|
||||
self.withContext { context in
|
||||
context.nativeSwitchVideoCamera()
|
||||
}
|
||||
}
|
||||
|
||||
public func debugInfo() -> Signal<(String, String), NoError> {
|
||||
let poll = Signal<(String, String), NoError> { subscriber in
|
||||
self.withContext { context in
|
||||
@ -725,14 +730,4 @@ public final class OngoingCallContext {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) {
|
||||
self.withContext { context in
|
||||
if let context = context as? OngoingCallThreadLocalContextWebrtc {
|
||||
context.makeOutgoingVideoView(completion)
|
||||
} else {
|
||||
completion(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -36,7 +36,6 @@ Manager::Manager(
|
||||
TgVoipEncryptionKey encryptionKey,
|
||||
bool enableP2P,
|
||||
std::vector<TgVoipRtcServer> const &rtcServers,
|
||||
bool isVideo,
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
|
||||
std::function<void (const TgVoipState &)> stateUpdated,
|
||||
std::function<void (bool)> videoStateUpdated,
|
||||
@ -47,7 +46,6 @@ _thread(thread),
|
||||
_encryptionKey(encryptionKey),
|
||||
_enableP2P(enableP2P),
|
||||
_rtcServers(rtcServers),
|
||||
_startWithVideo(isVideo),
|
||||
_videoCapture(videoCapture),
|
||||
_stateUpdated(stateUpdated),
|
||||
_videoStateUpdated(videoStateUpdated),
|
||||
@ -112,11 +110,10 @@ void Manager::start() {
|
||||
);
|
||||
}));
|
||||
bool isOutgoing = _encryptionKey.isOutgoing;
|
||||
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [isOutgoing, thread = _thread, startWithVideo = _startWithVideo, videoCapture = _videoCapture, weakThis]() {
|
||||
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [isOutgoing, thread = _thread, videoCapture = _videoCapture, weakThis]() {
|
||||
return new MediaManager(
|
||||
getMediaThread(),
|
||||
isOutgoing,
|
||||
startWithVideo,
|
||||
videoCapture,
|
||||
[thread, weakThis](const rtc::CopyOnWriteBuffer &packet) {
|
||||
thread->PostTask(RTC_FROM_HERE, [weakThis, packet]() {
|
||||
@ -205,12 +202,6 @@ void Manager::setMuteOutgoingAudio(bool mute) {
|
||||
});
|
||||
}
|
||||
|
||||
void Manager::switchVideoCamera() {
|
||||
_mediaManager->perform([](MediaManager *mediaManager) {
|
||||
mediaManager->switchVideoCamera();
|
||||
});
|
||||
}
|
||||
|
||||
void Manager::notifyIsLocalVideoActive(bool isActive) {
|
||||
rtc::CopyOnWriteBuffer buffer;
|
||||
uint8_t mode = 4;
|
||||
@ -230,12 +221,6 @@ void Manager::setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<web
|
||||
});
|
||||
}
|
||||
|
||||
void Manager::setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
|
||||
_mediaManager->perform([sink](MediaManager *mediaManager) {
|
||||
mediaManager->setOutgoingVideoOutput(sink);
|
||||
});
|
||||
}
|
||||
|
||||
#ifdef TGVOIP_NAMESPACE
|
||||
}
|
||||
#endif
|
||||
|
@ -19,7 +19,6 @@ public:
|
||||
TgVoipEncryptionKey encryptionKey,
|
||||
bool enableP2P,
|
||||
std::vector<TgVoipRtcServer> const &rtcServers,
|
||||
bool isVideo,
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
|
||||
std::function<void (const TgVoipState &)> stateUpdated,
|
||||
std::function<void (bool)> videoStateUpdated,
|
||||
@ -32,17 +31,14 @@ public:
|
||||
void receiveSignalingData(const std::vector<uint8_t> &data);
|
||||
void setSendVideo(bool sendVideo);
|
||||
void setMuteOutgoingAudio(bool mute);
|
||||
void switchVideoCamera();
|
||||
void notifyIsLocalVideoActive(bool isActive);
|
||||
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||
void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||
|
||||
private:
|
||||
rtc::Thread *_thread;
|
||||
TgVoipEncryptionKey _encryptionKey;
|
||||
bool _enableP2P;
|
||||
std::vector<TgVoipRtcServer> _rtcServers;
|
||||
bool _startWithVideo;
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface> _videoCapture;
|
||||
std::function<void (const TgVoipState &)> _stateUpdated;
|
||||
std::function<void (bool)> _videoStateUpdated;
|
||||
|
@ -19,6 +19,9 @@
|
||||
|
||||
#include "api/video_codecs/builtin_video_encoder_factory.h"
|
||||
|
||||
#include "TgVoip.h"
|
||||
#include "VideoCaptureInterfaceImpl.h"
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
|
||||
#include "CodecsApple.h"
|
||||
@ -164,7 +167,7 @@ static rtc::Thread *makeWorkerThread() {
|
||||
}
|
||||
|
||||
|
||||
static rtc::Thread *MediaManager::getWorkerThread() {
|
||||
rtc::Thread *MediaManager::getWorkerThread() {
|
||||
static rtc::Thread *value = makeWorkerThread();
|
||||
return value;
|
||||
}
|
||||
@ -172,7 +175,7 @@ static rtc::Thread *MediaManager::getWorkerThread() {
|
||||
MediaManager::MediaManager(
|
||||
rtc::Thread *thread,
|
||||
bool isOutgoing,
|
||||
bool startWithVideo,
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
|
||||
std::function<void (const rtc::CopyOnWriteBuffer &)> packetEmitted,
|
||||
std::function<void (bool)> localVideoCaptureActiveUpdated
|
||||
) :
|
||||
@ -180,7 +183,8 @@ _packetEmitted(packetEmitted),
|
||||
_localVideoCaptureActiveUpdated(localVideoCaptureActiveUpdated),
|
||||
_thread(thread),
|
||||
_eventLog(std::make_unique<webrtc::RtcEventLogNull>()),
|
||||
_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) {
|
||||
_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()),
|
||||
_videoCapture(videoCapture) {
|
||||
_ssrcAudio.incoming = isOutgoing ? ssrcAudioIncoming : ssrcAudioOutgoing;
|
||||
_ssrcAudio.outgoing = (!isOutgoing) ? ssrcAudioIncoming : ssrcAudioOutgoing;
|
||||
_ssrcAudio.fecIncoming = isOutgoing ? ssrcAudioFecIncoming : ssrcAudioFecOutgoing;
|
||||
@ -199,7 +203,6 @@ _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) {
|
||||
_videoCodecs = AssignPayloadTypesAndDefaultCodecs(videoEncoderFactory->GetSupportedFormats());
|
||||
|
||||
_isSendingVideo = false;
|
||||
_useFrontCamera = true;
|
||||
|
||||
_audioNetworkInterface = std::unique_ptr<MediaManager::NetworkInterfaceImpl>(new MediaManager::NetworkInterfaceImpl(this, false));
|
||||
_videoNetworkInterface = std::unique_ptr<MediaManager::NetworkInterfaceImpl>(new MediaManager::NetworkInterfaceImpl(this, true));
|
||||
@ -283,9 +286,9 @@ _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) {
|
||||
|
||||
_videoChannel->SetInterface(_videoNetworkInterface.get(), webrtc::MediaTransportConfig());
|
||||
|
||||
_nativeVideoSource = makeVideoSource(_thread, getWorkerThread());
|
||||
|
||||
if (startWithVideo) {
|
||||
if (_videoCapture != nullptr) {
|
||||
((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->setIsActiveUpdated(this->_localVideoCaptureActiveUpdated);
|
||||
|
||||
setSendVideo(true);
|
||||
}
|
||||
}
|
||||
@ -372,10 +375,6 @@ void MediaManager::setSendVideo(bool sendVideo) {
|
||||
codec.SetParam(cricket::kCodecParamStartBitrate, 512);
|
||||
codec.SetParam(cricket::kCodecParamMaxBitrate, 2500);
|
||||
|
||||
_videoCapturer = makeVideoCapturer(_nativeVideoSource, _useFrontCamera, [localVideoCaptureActiveUpdated = _localVideoCaptureActiveUpdated](bool isActive) {
|
||||
localVideoCaptureActiveUpdated(isActive);
|
||||
});
|
||||
|
||||
cricket::VideoSendParameters videoSendParameters;
|
||||
videoSendParameters.codecs.push_back(codec);
|
||||
|
||||
@ -402,11 +401,15 @@ void MediaManager::setSendVideo(bool sendVideo) {
|
||||
videoSendStreamParams.cname = "cname";
|
||||
_videoChannel->AddSendStream(videoSendStreamParams);
|
||||
|
||||
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, _nativeVideoSource.get());
|
||||
if (_videoCapture != nullptr) {
|
||||
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, ((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->_videoSource.get());
|
||||
}
|
||||
_videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr);
|
||||
} else {
|
||||
_videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing));
|
||||
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, _nativeVideoSource.get());
|
||||
if (_videoCapture != nullptr) {
|
||||
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, ((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->_videoSource);
|
||||
}
|
||||
}
|
||||
|
||||
cricket::VideoRecvParameters videoRecvParameters;
|
||||
@ -449,8 +452,6 @@ void MediaManager::setSendVideo(bool sendVideo) {
|
||||
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, nullptr);
|
||||
_videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr);
|
||||
|
||||
_videoCapturer.reset();
|
||||
|
||||
_videoChannel->RemoveRecvStream(_ssrcVideo.incoming);
|
||||
_videoChannel->RemoveRecvStream(_ssrcVideo.fecIncoming);
|
||||
_videoChannel->RemoveSendStream(_ssrcVideo.outgoing);
|
||||
@ -466,25 +467,11 @@ void MediaManager::setMuteOutgoingAudio(bool mute) {
|
||||
_audioChannel->SetAudioSend(_ssrcAudio.outgoing, _isConnected && !_muteOutgoingAudio, nullptr, &_audioSource);
|
||||
}
|
||||
|
||||
void MediaManager::switchVideoCamera() {
|
||||
if (_isSendingVideo) {
|
||||
_useFrontCamera = !_useFrontCamera;
|
||||
_videoCapturer = makeVideoCapturer(_nativeVideoSource, _useFrontCamera, [localVideoCaptureActiveUpdated = _localVideoCaptureActiveUpdated](bool isActive) {
|
||||
localVideoCaptureActiveUpdated(isActive);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
void MediaManager::setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
|
||||
_currentIncomingVideoSink = sink;
|
||||
_videoChannel->SetSink(_ssrcVideo.incoming, _currentIncomingVideoSink.get());
|
||||
}
|
||||
|
||||
void MediaManager::setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
|
||||
_currentOutgoingVideoSink = sink;
|
||||
_nativeVideoSource->AddOrUpdateSink(_currentOutgoingVideoSink.get(), rtc::VideoSinkWants());
|
||||
}
|
||||
|
||||
MediaManager::NetworkInterfaceImpl::NetworkInterfaceImpl(MediaManager *mediaManager, bool isVideo) :
|
||||
_mediaManager(mediaManager),
|
||||
_isVideo(isVideo) {
|
||||
|
@ -7,6 +7,8 @@
|
||||
#include "api/transport/field_trial_based_config.h"
|
||||
#include "pc/rtp_sender.h"
|
||||
|
||||
#include "TgVoip.h"
|
||||
|
||||
#include <functional>
|
||||
#include <memory>
|
||||
|
||||
@ -59,7 +61,6 @@ public:
|
||||
MediaManager(
|
||||
rtc::Thread *thread,
|
||||
bool isOutgoing,
|
||||
bool startWithVideo,
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
|
||||
std::function<void (const rtc::CopyOnWriteBuffer &)> packetEmitted,
|
||||
std::function<void (bool)> localVideoCaptureActiveUpdated
|
||||
@ -71,9 +72,7 @@ public:
|
||||
void notifyPacketSent(const rtc::SentPacket &sentPacket);
|
||||
void setSendVideo(bool sendVideo);
|
||||
void setMuteOutgoingAudio(bool mute);
|
||||
void switchVideoCamera();
|
||||
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||
void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||
|
||||
protected:
|
||||
std::function<void (const rtc::CopyOnWriteBuffer &)> _packetEmitted;
|
||||
@ -93,7 +92,6 @@ private:
|
||||
|
||||
std::vector<cricket::VideoCodec> _videoCodecs;
|
||||
bool _isSendingVideo;
|
||||
bool _useFrontCamera;
|
||||
|
||||
std::unique_ptr<cricket::MediaEngineInterface> _mediaEngine;
|
||||
std::unique_ptr<webrtc::Call> _call;
|
||||
@ -104,7 +102,6 @@ private:
|
||||
std::unique_ptr<webrtc::VideoBitrateAllocatorFactory> _videoBitrateAllocatorFactory;
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface> _videoCapture;
|
||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentIncomingVideoSink;
|
||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentOutgoingVideoSink;
|
||||
|
||||
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _audioNetworkInterface;
|
||||
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _videoNetworkInterface;
|
||||
|
@ -136,6 +136,8 @@ public:
|
||||
static std::shared_ptr<TgVoipVideoCaptureInterface> makeInstance();
|
||||
|
||||
virtual ~TgVoipVideoCaptureInterface();
|
||||
|
||||
virtual void switchCamera() = 0;
|
||||
virtual void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
|
||||
};
|
||||
|
||||
@ -156,7 +158,6 @@ public:
|
||||
std::vector<TgVoipRtcServer> const &rtcServers,
|
||||
TgVoipNetworkType initialNetworkType,
|
||||
TgVoipEncryptionKey const &encryptionKey,
|
||||
bool isVideo,
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
|
||||
std::function<void(TgVoipState)> stateUpdated,
|
||||
std::function<void(bool)> videoStateUpdated,
|
||||
@ -172,7 +173,6 @@ public:
|
||||
virtual void setEchoCancellationStrength(int strength) = 0;
|
||||
|
||||
virtual void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
|
||||
virtual void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
|
||||
|
||||
virtual std::string getLastError() = 0;
|
||||
virtual std::string getDebugInfo() = 0;
|
||||
@ -182,7 +182,6 @@ public:
|
||||
|
||||
virtual void receiveSignalingData(const std::vector<uint8_t> &data) = 0;
|
||||
virtual void setSendVideo(bool sendVideo) = 0;
|
||||
virtual void switchVideoCamera() = 0;
|
||||
|
||||
virtual TgVoipFinalState stop() = 0;
|
||||
};
|
||||
|
@ -10,6 +10,8 @@
|
||||
#include <stdarg.h>
|
||||
#include <iostream>
|
||||
|
||||
#include "VideoCaptureInterfaceImpl.h"
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
|
||||
#include "CodecsApple.h"
|
||||
@ -151,7 +153,6 @@ public:
|
||||
std::vector<TgVoipRtcServer> const &rtcServers,
|
||||
TgVoipConfig const &config,
|
||||
TgVoipEncryptionKey const &encryptionKey,
|
||||
bool isVideo,
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
|
||||
TgVoipNetworkType initialNetworkType,
|
||||
std::function<void(TgVoipState)> stateUpdated,
|
||||
@ -170,13 +171,12 @@ public:
|
||||
|
||||
bool enableP2P = config.enableP2P;
|
||||
|
||||
_manager.reset(new ThreadLocalObject<Manager>(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, isVideo, stateUpdated, videoStateUpdated, remoteVideoIsActiveUpdated, signalingDataEmitted, rtcServers, videoCapture](){
|
||||
_manager.reset(new ThreadLocalObject<Manager>(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, stateUpdated, videoStateUpdated, remoteVideoIsActiveUpdated, signalingDataEmitted, rtcServers, videoCapture](){
|
||||
return new Manager(
|
||||
getManagerThread(),
|
||||
encryptionKey,
|
||||
enableP2P,
|
||||
rtcServers,
|
||||
isVideo,
|
||||
videoCapture,
|
||||
[stateUpdated](const TgVoipState &state) {
|
||||
stateUpdated(state);
|
||||
@ -212,12 +212,6 @@ public:
|
||||
manager->setSendVideo(sendVideo);
|
||||
});
|
||||
};
|
||||
|
||||
void switchVideoCamera() override {
|
||||
_manager->perform([](Manager *manager) {
|
||||
manager->switchVideoCamera();
|
||||
});
|
||||
}
|
||||
|
||||
void setNetworkType(TgVoipNetworkType networkType) override {
|
||||
/*message::NetworkType mappedType;
|
||||
@ -278,12 +272,6 @@ public:
|
||||
manager->setIncomingVideoOutput(sink);
|
||||
});
|
||||
}
|
||||
|
||||
void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override {
|
||||
_manager->perform([sink](Manager *manager) {
|
||||
manager->setOutgoingVideoOutput(sink);
|
||||
});
|
||||
}
|
||||
|
||||
void setAudioOutputGainControlEnabled(bool enabled) override {
|
||||
}
|
||||
@ -398,7 +386,6 @@ TgVoip *TgVoip::makeInstance(
|
||||
std::vector<TgVoipRtcServer> const &rtcServers,
|
||||
TgVoipNetworkType initialNetworkType,
|
||||
TgVoipEncryptionKey const &encryptionKey,
|
||||
bool isVideo,
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
|
||||
std::function<void(TgVoipState)> stateUpdated,
|
||||
std::function<void(bool)> videoStateUpdated,
|
||||
@ -412,7 +399,6 @@ TgVoip *TgVoip::makeInstance(
|
||||
rtcServers,
|
||||
config,
|
||||
encryptionKey,
|
||||
isVideo,
|
||||
videoCapture,
|
||||
initialNetworkType,
|
||||
stateUpdated,
|
||||
@ -424,72 +410,6 @@ TgVoip *TgVoip::makeInstance(
|
||||
|
||||
TgVoip::~TgVoip() = default;
|
||||
|
||||
class TgVoipVideoCaptureInterfaceObject {
|
||||
public:
|
||||
TgVoipVideoCaptureInterfaceObject() {
|
||||
_videoSource = makeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread());
|
||||
//this should outlive the capturer
|
||||
_videoCapturer = makeVideoCapturer(_videoSource, true, [this](bool isActive) {
|
||||
if (this->_isActiveUpdated) {
|
||||
this->_isActiveUpdated(isActive);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
~TgVoipVideoCaptureInterfaceObject() {
|
||||
if (_currentSink != nullptr) {
|
||||
_videoSource->RemoveSink(_currentSink.get());
|
||||
}
|
||||
}
|
||||
|
||||
void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
|
||||
if (_currentSink != nullptr) {
|
||||
_videoSource->RemoveSink(_currentSink.get());
|
||||
}
|
||||
_currentSink = sink;
|
||||
if (_currentSink != nullptr) {
|
||||
_videoSource->AddOrUpdateSink(_currentSink.get(), rtc::VideoSinkWants());
|
||||
}
|
||||
}
|
||||
|
||||
void setIsActiveUpdated(std::function<void (bool)> isActiveUpdated) {
|
||||
_isActiveUpdated = isActiveUpdated;
|
||||
}
|
||||
|
||||
public:
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _videoSource;
|
||||
std::unique_ptr<VideoCapturerInterface> _videoCapturer;
|
||||
|
||||
private:
|
||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentSink;
|
||||
std::function<void (bool)> _isActiveUpdated;
|
||||
};
|
||||
|
||||
class TgVoipVideoCaptureInterfaceImpl : public TgVoipVideoCaptureInterface {
|
||||
public:
|
||||
TgVoipVideoCaptureInterfaceImpl() {
|
||||
_impl.reset(new ThreadLocalObject<TgVoipVideoCaptureInterfaceObject>(
|
||||
Manager::getMediaThread(),
|
||||
[]() {
|
||||
return new TgVoipVideoCaptureInterfaceObject();
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
virtual ~TgVoipVideoCaptureInterfaceImpl() {
|
||||
|
||||
}
|
||||
|
||||
virtual void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
|
||||
_impl->perform([sink](TgVoipVideoCaptureInterfaceObject *impl) {
|
||||
impl->setVideoOutput(sink);
|
||||
});
|
||||
}
|
||||
|
||||
public:
|
||||
std::unique_ptr<ThreadLocalObject<TgVoipVideoCaptureInterfaceObject>> _impl;
|
||||
};
|
||||
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface>TgVoipVideoCaptureInterface::makeInstance() {
|
||||
return std::shared_ptr<TgVoipVideoCaptureInterface>(new TgVoipVideoCaptureInterfaceImpl());
|
||||
}
|
||||
|
@ -43,6 +43,12 @@ public:
|
||||
});
|
||||
}
|
||||
|
||||
T *getSyncAssumingSameThread() {
|
||||
assert(_thread->IsCurrent());
|
||||
assert(_valueHolder->_value != nullptr);
|
||||
return _valueHolder->_value.get();
|
||||
}
|
||||
|
||||
private:
|
||||
rtc::Thread *_thread;
|
||||
std::shared_ptr<ValueHolder<T>> _valueHolder;
|
||||
|
@ -0,0 +1,50 @@
|
||||
#ifndef VIDEO_CAPTURE_INTERFACE_IMPL_H
|
||||
#define VIDEO_CAPTURE_INTERFACE_IMPL_H
|
||||
|
||||
#include "TgVoip.h"
|
||||
#include <memory>
|
||||
#include "ThreadLocalObject.h"
|
||||
#include "api/media_stream_interface.h"
|
||||
|
||||
#ifdef TGVOIP_NAMESPACE
|
||||
namespace TGVOIP_NAMESPACE {
|
||||
#endif
|
||||
|
||||
class VideoCapturerInterface;
|
||||
|
||||
class TgVoipVideoCaptureInterfaceObject {
|
||||
public:
|
||||
TgVoipVideoCaptureInterfaceObject();
|
||||
~TgVoipVideoCaptureInterfaceObject();
|
||||
|
||||
void switchCamera();
|
||||
void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||
void setIsActiveUpdated(std::function<void (bool)> isActiveUpdated);
|
||||
|
||||
public:
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _videoSource;
|
||||
std::unique_ptr<VideoCapturerInterface> _videoCapturer;
|
||||
|
||||
private:
|
||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentSink;
|
||||
std::function<void (bool)> _isActiveUpdated;
|
||||
bool _useFrontCamera;
|
||||
};
|
||||
|
||||
class TgVoipVideoCaptureInterfaceImpl : public TgVoipVideoCaptureInterface {
|
||||
public:
|
||||
TgVoipVideoCaptureInterfaceImpl();
|
||||
virtual ~TgVoipVideoCaptureInterfaceImpl();
|
||||
|
||||
virtual void switchCamera();
|
||||
virtual void setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||
|
||||
public:
|
||||
std::unique_ptr<ThreadLocalObject<TgVoipVideoCaptureInterfaceObject>> _impl;
|
||||
};
|
||||
|
||||
#ifdef TGVOIP_NAMESPACE
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
@ -0,0 +1,76 @@
|
||||
#include "VideoCaptureInterfaceImpl.h"
|
||||
|
||||
#include "CodecsApple.h"
|
||||
#include "Manager.h"
|
||||
#include "MediaManager.h"
|
||||
|
||||
#ifdef TGVOIP_NAMESPACE
|
||||
namespace TGVOIP_NAMESPACE {
|
||||
#endif
|
||||
|
||||
TgVoipVideoCaptureInterfaceObject::TgVoipVideoCaptureInterfaceObject() {
|
||||
_useFrontCamera = true;
|
||||
_videoSource = makeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread());
|
||||
//this should outlive the capturer
|
||||
_videoCapturer = makeVideoCapturer(_videoSource, _useFrontCamera, [this](bool isActive) {
|
||||
if (this->_isActiveUpdated) {
|
||||
this->_isActiveUpdated(isActive);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
TgVoipVideoCaptureInterfaceObject::~TgVoipVideoCaptureInterfaceObject() {
|
||||
if (_currentSink != nullptr) {
|
||||
_videoSource->RemoveSink(_currentSink.get());
|
||||
}
|
||||
}
|
||||
|
||||
void TgVoipVideoCaptureInterfaceObject::switchCamera() {
|
||||
_useFrontCamera = !_useFrontCamera;
|
||||
_videoCapturer = makeVideoCapturer(_videoSource, _useFrontCamera, [this](bool isActive) {
|
||||
if (this->_isActiveUpdated) {
|
||||
this->_isActiveUpdated(isActive);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void TgVoipVideoCaptureInterfaceObject::setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
|
||||
if (_currentSink != nullptr) {
|
||||
_videoSource->RemoveSink(_currentSink.get());
|
||||
}
|
||||
_currentSink = sink;
|
||||
if (_currentSink != nullptr) {
|
||||
_videoSource->AddOrUpdateSink(_currentSink.get(), rtc::VideoSinkWants());
|
||||
}
|
||||
}
|
||||
|
||||
void TgVoipVideoCaptureInterfaceObject::setIsActiveUpdated(std::function<void (bool)> isActiveUpdated) {
|
||||
_isActiveUpdated = isActiveUpdated;
|
||||
}
|
||||
|
||||
TgVoipVideoCaptureInterfaceImpl::TgVoipVideoCaptureInterfaceImpl() {
|
||||
_impl.reset(new ThreadLocalObject<TgVoipVideoCaptureInterfaceObject>(
|
||||
Manager::getMediaThread(),
|
||||
[]() {
|
||||
return new TgVoipVideoCaptureInterfaceObject();
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
TgVoipVideoCaptureInterfaceImpl::~TgVoipVideoCaptureInterfaceImpl() {
|
||||
|
||||
}
|
||||
|
||||
void TgVoipVideoCaptureInterfaceImpl::switchCamera() {
|
||||
_impl->perform([](TgVoipVideoCaptureInterfaceObject *impl) {
|
||||
impl->switchCamera();
|
||||
});
|
||||
}
|
||||
|
||||
void TgVoipVideoCaptureInterfaceImpl::setVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
|
||||
_impl->perform([sink](TgVoipVideoCaptureInterfaceObject *impl) {
|
||||
impl->setVideoOutput(sink);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
@ -78,6 +78,16 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
||||
|
||||
@end
|
||||
|
||||
@interface OngoingCallThreadLocalContextVideoCapturer : NSObject
|
||||
|
||||
- (instancetype _Nonnull)init;
|
||||
|
||||
- (void)switchVideoCamera;
|
||||
|
||||
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion;
|
||||
|
||||
@end
|
||||
|
||||
@interface OngoingCallThreadLocalContextWebrtc : NSObject
|
||||
|
||||
+ (void)setupLoggingFunction:(void (* _Nullable)(NSString * _Nullable))loggingFunction;
|
||||
@ -88,7 +98,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
||||
@property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtc, OngoingCallVideoStateWebrtc, OngoingCallRemoteVideoStateWebrtc);
|
||||
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
|
||||
|
||||
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing isVideo:(bool)isVideo primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData;
|
||||
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^ _Nonnull)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer;
|
||||
- (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion;
|
||||
|
||||
- (bool)needRate;
|
||||
@ -99,10 +109,8 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
||||
|
||||
- (void)setIsMuted:(bool)isMuted;
|
||||
- (void)setVideoEnabled:(bool)videoEnabled;
|
||||
- (void)switchVideoCamera;
|
||||
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType;
|
||||
- (void)makeIncomingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion;
|
||||
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion;
|
||||
- (void)addSignalingData:(NSData * _Nonnull)data;
|
||||
|
||||
@end
|
||||
|
@ -21,6 +21,45 @@ using namespace TGVOIP_NAMESPACE;
|
||||
|
||||
@end
|
||||
|
||||
@interface OngoingCallThreadLocalContextVideoCapturer () {
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface> _interface;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation OngoingCallThreadLocalContextVideoCapturer
|
||||
|
||||
- (instancetype _Nonnull)init {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_interface = TgVoipVideoCaptureInterface::makeInstance();
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)switchVideoCamera {
|
||||
_interface->switchCamera();
|
||||
}
|
||||
|
||||
- (std::shared_ptr<TgVoipVideoCaptureInterface>)getInterface {
|
||||
return _interface;
|
||||
}
|
||||
|
||||
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion {
|
||||
std::shared_ptr<TgVoipVideoCaptureInterface> interface = _interface;
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
|
||||
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
|
||||
|
||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
|
||||
interface->setVideoOutput(sink);
|
||||
|
||||
completion(remoteRenderer);
|
||||
});
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@interface OngoingCallThreadLocalContextWebrtc () {
|
||||
id<OngoingCallThreadLocalContextQueueWebrtc> _queue;
|
||||
int32_t _contextId;
|
||||
@ -36,6 +75,7 @@ using namespace TGVOIP_NAMESPACE;
|
||||
OngoingCallStateWebrtc _state;
|
||||
OngoingCallVideoStateWebrtc _videoState;
|
||||
OngoingCallRemoteVideoStateWebrtc _remoteVideoState;
|
||||
OngoingCallThreadLocalContextVideoCapturer *_videoCapturer;
|
||||
|
||||
int32_t _signalBars;
|
||||
NSData *_lastDerivedState;
|
||||
@ -134,7 +174,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
return @"2.7.7";
|
||||
}
|
||||
|
||||
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing isVideo:(bool)isVideo primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData; {
|
||||
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtc> _Nonnull)queue proxy:(VoipProxyServerWebrtc * _Nullable)proxy rtcServers:(NSArray<VoipRtcServerWebrtc *> * _Nonnull)rtcServers networkType:(OngoingCallNetworkTypeWebrtc)networkType dataSaving:(OngoingCallDataSavingWebrtc)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtc * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtc *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData videoCapturer:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_queue = queue;
|
||||
@ -146,7 +186,8 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
_callPacketTimeout = 10.0;
|
||||
_networkType = networkType;
|
||||
_sendSignalingData = [sendSignalingData copy];
|
||||
if (isVideo) {
|
||||
_videoCapturer = videoCapturer;
|
||||
if (videoCapturer != nil) {
|
||||
_videoState = OngoingCallVideoStateActiveOutgoing;
|
||||
_remoteVideoState = OngoingCallRemoteVideoStateActive;
|
||||
} else {
|
||||
@ -236,7 +277,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
parsedRtcServers,
|
||||
callControllerNetworkTypeForType(networkType),
|
||||
encryptionKey,
|
||||
isVideo,
|
||||
[_videoCapturer getInterface],
|
||||
[weakSelf, queue](TgVoipState state) {
|
||||
[queue dispatch:^{
|
||||
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
||||
@ -424,12 +465,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)switchVideoCamera {
|
||||
if (_tgVoip) {
|
||||
_tgVoip->switchVideoCamera();
|
||||
}
|
||||
}
|
||||
|
||||
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType {
|
||||
if (_networkType != networkType) {
|
||||
_networkType = networkType;
|
||||
@ -457,23 +492,5 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion {
|
||||
if (_tgVoip) {
|
||||
__weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
|
||||
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
|
||||
|
||||
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
|
||||
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
||||
if (strongSelf) {
|
||||
strongSelf->_tgVoip->setOutgoingVideoOutput(sink);
|
||||
}
|
||||
|
||||
completion(remoteRenderer);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user