Add logging

This commit is contained in:
Ali
2020-06-16 16:51:33 +04:00
parent 4d8d4188a7
commit f093a5ae61
22 changed files with 355 additions and 949 deletions

View File

@@ -45,6 +45,8 @@ public final class CallController: ViewController {
private var audioOutputStateDisposable: Disposable?
private var audioOutputState: ([AudioSessionOutput], AudioSessionOutput?)?
private let idleTimerExtensionDisposable = MetaDisposable()
public init(sharedContext: SharedAccountContext, account: Account, call: PresentationCall, easyDebugAccess: Bool) {
self.sharedContext = sharedContext
self.account = account
@@ -97,6 +99,7 @@ public final class CallController: ViewController {
self.disposable?.dispose()
self.callMutedDisposable?.dispose()
self.audioOutputStateDisposable?.dispose()
self.idleTimerExtensionDisposable.dispose()
}
private func callStateUpdated(_ callState: PresentationCallState) {
@@ -260,6 +263,14 @@ public final class CallController: ViewController {
self.controllerNode.animateIn()
}
self.idleTimerExtensionDisposable.set(self.sharedContext.applicationBindings.pushIdleTimerExtension())
}
override public func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated)
self.idleTimerExtensionDisposable.set(nil)
}
override public func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) {

View File

@@ -287,6 +287,9 @@ final class CallControllerNode: ASDisplayNode {
return
}
if let outgoingVideoView = outgoingVideoView {
outgoingVideoView.backgroundColor = .black
outgoingVideoView.clipsToBounds = true
outgoingVideoView.layer.cornerRadius = 16.0
strongSelf.setCurrentAudioOutput?(.speaker)
strongSelf.outgoingVideoView = outgoingVideoView
if let incomingVideoView = strongSelf.incomingVideoView {
@@ -407,14 +410,6 @@ final class CallControllerNode: ASDisplayNode {
transition.updateFrame(node: self.containerNode, frame: CGRect(origin: CGPoint(), size: layout.size))
transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size))
if let incomingVideoView = self.incomingVideoView {
incomingVideoView.frame = CGRect(origin: CGPoint(), size: layout.size)
}
if let outgoingVideoView = self.outgoingVideoView {
let outgoingSize = layout.size.aspectFitted(CGSize(width: 320.0, height: 320.0))
outgoingVideoView.frame = CGRect(origin: CGPoint(x: layout.size.width - 16.0 - outgoingSize.width, y: layout.size.height - 16.0 - outgoingSize.height), size: outgoingSize)
}
if let keyPreviewNode = self.keyPreviewNode {
transition.updateFrame(node: keyPreviewNode, frame: CGRect(origin: CGPoint(), size: layout.size))
keyPreviewNode.updateLayout(size: layout.size, transition: .immediate)
@@ -468,7 +463,16 @@ final class CallControllerNode: ASDisplayNode {
transition.updateFrame(node: self.statusNode, frame: CGRect(origin: CGPoint(x: 0.0, y: statusOffset), size: CGSize(width: layout.size.width, height: statusHeight)))
self.buttonsNode.updateLayout(constrainedWidth: layout.size.width, transition: transition)
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom), size: CGSize(width: layout.size.width, height: buttonsHeight)))
let buttonsOriginY: CGFloat = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight)))
if let incomingVideoView = self.incomingVideoView {
incomingVideoView.frame = CGRect(origin: CGPoint(), size: layout.size)
}
if let outgoingVideoView = self.outgoingVideoView {
let outgoingSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0))
outgoingVideoView.frame = CGRect(origin: CGPoint(x: layout.size.width - 16.0 - outgoingSize.width, y: buttonsOriginY - 32.0 - outgoingSize.height), size: outgoingSize)
}
let keyTextSize = self.keyButtonNode.frame.size
transition.updateFrame(node: self.keyButtonNode, frame: CGRect(origin: CGPoint(x: layout.size.width - keyTextSize.width - 8.0, y: navigationOffset + 8.0), size: keyTextSize))
@@ -485,7 +489,8 @@ final class CallControllerNode: ASDisplayNode {
self?.backPressed()
}
})
self.containerNode.insertSubnode(keyPreviewNode, aboveSubnode: self.dimNode)
self.containerNode.insertSubnode(keyPreviewNode, belowSubnode: self.statusNode)
self.keyPreviewNode = keyPreviewNode
if let (validLayout, _) = self.validLayout {

View File

@@ -409,12 +409,34 @@ func contextMenuForChatPresentationIntefaceState(chatPresentationInterfaceState:
if let action = media as? TelegramMediaAction, case let .phoneCall(id, discardReason, _) = action.action {
if discardReason != .busy && discardReason != .missed {
if let logName = callLogNameForId(id: id, account: context.account) {
let logsPath = callLogsPath(account: context.account)
let logPath = logsPath + "/" + logName
let start = logName.index(logName.startIndex, offsetBy: "\(id)".count + 1)
let end = logName.index(logName.endIndex, offsetBy: -4)
let accessHash = logName[start..<end]
if let accessHash = Int64(accessHash) {
callId = CallId(id: id, accessHash: accessHash)
}
actions.append(.action(ContextMenuActionItem(text: "Share Statistics", icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Forward"), color: theme.actionSheet.primaryTextColor)
}, action: { _, f in
f(.dismissWithoutContent)
let controller = context.sharedContext.makePeerSelectionController(PeerSelectionControllerParams(context: context, filter: [.onlyWriteable, .excludeDisabled]))
controller.peerSelected = { [weak controller] peerId in
if let strongController = controller {
strongController.dismiss()
let id = arc4random64()
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: logPath, randomId: id), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: nil, attributes: [.FileName(fileName: "CallStats.log")])
let message: EnqueueMessage = .message(text: "", attributes: [], mediaReference: .standalone(media: file), replyToMessageId: nil, localGroupingKey: nil)
let _ = enqueueMessages(account: context.account, peerId: peerId, messages: [message]).start()
}
}
controllerInteraction.navigationController()?.pushViewController(controller)
})))
}
}
break

View File

@@ -375,6 +375,7 @@ public final class OngoingCallContext {
private let queue = Queue()
private let account: Account
private let callSessionManager: CallSessionManager
private let logPath: String
private var contextRef: Unmanaged<OngoingCallThreadLocalContextHolder>?
@@ -415,12 +416,13 @@ public final class OngoingCallContext {
self.internalId = internalId
self.account = account
self.callSessionManager = callSessionManager
self.logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log"
let logPath = self.logPath
let queue = self.queue
cleanupCallLogs(account: account)
let logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log"
self.audioSessionDisposable.set((audioSessionActive
|> filter { $0 }
|> take(1)
@@ -542,6 +544,9 @@ public final class OngoingCallContext {
}
public func stop(callId: CallId? = nil, sendDebugLogs: Bool = false, debugLogValue: Promise<String?>) {
let account = self.account
let logPath = self.logPath
self.withContext { context in
context.nativeStop { debugLog, bytesSentWifi, bytesReceivedWifi, bytesSentMobile, bytesReceivedMobile in
debugLogValue.set(.single(debugLog))
@@ -554,8 +559,18 @@ public final class OngoingCallContext {
outgoing: bytesSentWifi))
updateAccountNetworkUsageStats(account: self.account, category: .call, delta: delta)
if let callId = callId, let debugLog = debugLog, sendDebugLogs {
let _ = saveCallDebugLog(network: self.account.network, callId: callId, log: debugLog).start()
if !logPath.isEmpty, let debugLog = debugLog {
let logsPath = callLogsPath(account: account)
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
if let data = debugLog.data(using: .utf8) {
let _ = try? data.write(to: URL(fileURLWithPath: logPath))
}
}
if let callId = callId, let debugLog = debugLog {
if sendDebugLogs {
let _ = saveCallDebugLog(network: self.account.network, callId: callId, log: debugLog).start()
}
}
}
let derivedState = context.nativeGetDerivedState()

View File

@@ -98,6 +98,8 @@
- (void)dealloc {
assert([NSThread isMainThread]);
[_videoCapturer stopCapture];
}
@end

View File

@@ -1,50 +0,0 @@
#ifndef DEMO_CONNECTOR_H
#define DEMO_CONNECTOR_H
#include "p2p/base/basic_packet_socket_factory.h"
#include "rtc_base/proxy_info.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/thread.h"
#include "p2p/base/p2p_transport_channel.h"
#include "p2p/client/basic_port_allocator.h"
#include "p2p/base/basic_async_resolver_factory.h"
#include <memory>
#include <map>
class Connector : public sigslot::has_slots<> {
public:
explicit Connector(bool isOutgoing);
~Connector() override;
void Start();
sigslot::signal1<const std::vector<std::string>&> SignalCandidatesGathered;
sigslot::signal1<bool> SignalReadyToSendStateChanged;
sigslot::signal1<const rtc::CopyOnWriteBuffer&> SignalPacketReceived;
void AddRemoteCandidates(const std::vector<std::string> &candidates);
void SendPacket(const rtc::CopyOnWriteBuffer& data);
private:
void CandidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate);
void CandidateGatheringState(cricket::IceTransportInternal *transport);
void TransportStateChanged(cricket::IceTransportInternal *transport);
void TransportRoleConflict(cricket::IceTransportInternal *transport);
void TransportReadyToSend(cricket::IceTransportInternal *transport);
void TransportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t &timestamp, int unused);
std::unique_ptr<rtc::Thread> networkThread;
bool isOutgoing;
std::unique_ptr<rtc::BasicPacketSocketFactory> socketFactory;
std::unique_ptr<rtc::BasicNetworkManager> networkManager;
std::unique_ptr<cricket::BasicPortAllocator> portAllocator;
std::unique_ptr<webrtc::BasicAsyncResolverFactory> asyncResolverFactory;
std::unique_ptr<cricket::P2PTransportChannel> transportChannel;
std::vector<std::string> collectedLocalCandidates;
};
#endif //DEMO_CONNECTOR_H

View File

@@ -1,158 +0,0 @@
#include "Connector.h"
#include "MediaEngineWebrtc.h"
#include "api/packet_socket_factory.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "p2p/base/ice_credentials_iterator.h"
#include "api/jsep_ice_candidate.h"
#include <memory>
Connector::Connector(bool isOutgoing) {
networkThread = rtc::Thread::CreateWithSocketServer();
this->isOutgoing = isOutgoing;
}
Connector::~Connector() {
networkThread->Invoke<void>(RTC_FROM_HERE, [this]() {
transportChannel = nullptr;
asyncResolverFactory = nullptr;
portAllocator = nullptr;
networkManager = nullptr;
socketFactory = nullptr;
});
}
void Connector::Start() {
NSLog(@"Started %d", (int)[[NSDate date] timeIntervalSince1970]);
networkThread->Start();
networkThread->Invoke<void>(RTC_FROM_HERE, [this] {
socketFactory.reset(new rtc::BasicPacketSocketFactory(networkThread.get()));
networkManager = std::make_unique<rtc::BasicNetworkManager>();
portAllocator.reset(new cricket::BasicPortAllocator(networkManager.get(), socketFactory.get(), /*turn_customizer=*/ nullptr, /*relay_port_factory=*/ nullptr));
uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP;
//flags |= cricket::PORTALLOCATOR_DISABLE_UDP;
portAllocator->set_flags(portAllocator->flags() | flags);
portAllocator->Initialize();
rtc::SocketAddress defaultStunAddress = rtc::SocketAddress("hlgkfjdrtjfykgulhijkljhulyo.uksouth.cloudapp.azure.com", 3478);
cricket::ServerAddresses stunServers;
stunServers.insert(defaultStunAddress);
std::vector<cricket::RelayServerConfig> turnServers;
turnServers.push_back(cricket::RelayServerConfig(
rtc::SocketAddress("hlgkfjdrtjfykgulhijkljhulyo.uksouth.cloudapp.azure.com", 3478),
"user",
"root",
cricket::PROTO_UDP
));
portAllocator->SetConfiguration(stunServers, turnServers, 2, webrtc::NO_PRUNE);
asyncResolverFactory = std::make_unique<webrtc::BasicAsyncResolverFactory>();
transportChannel.reset(new cricket::P2PTransportChannel("transport", 0, portAllocator.get(), asyncResolverFactory.get(), /*event_log=*/ nullptr));
cricket::IceConfig iceConfig;
iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY;
transportChannel->SetIceConfig(iceConfig);
cricket::IceParameters localIceParameters(
"gcp3",
"zWDKozH8/3JWt8he3M/CMj5R",
false
);
cricket::IceParameters remoteIceParameters(
"acp3",
"aWDKozH8/3JWt8he3M/CMj5R",
false
);
transportChannel->SetIceParameters(isOutgoing ? localIceParameters : remoteIceParameters);
transportChannel->SetIceRole(isOutgoing ? cricket::ICEROLE_CONTROLLING : cricket::ICEROLE_CONTROLLED);
transportChannel->SignalCandidateGathered.connect(this, &Connector::CandidateGathered);
transportChannel->SignalGatheringState.connect(this, &Connector::CandidateGatheringState);
transportChannel->SignalIceTransportStateChanged.connect(this, &Connector::TransportStateChanged);
transportChannel->SignalRoleConflict.connect(this, &Connector::TransportRoleConflict);
transportChannel->SignalReadPacket.connect(this, &Connector::TransportPacketReceived);
transportChannel->MaybeStartGathering();
transportChannel->SetRemoteIceMode(cricket::ICEMODE_FULL);
transportChannel->SetRemoteIceParameters((!isOutgoing) ? localIceParameters : remoteIceParameters);
});
}
void Connector::AddRemoteCandidates(const std::vector<std::string> &candidates) {
networkThread->Invoke<void>(RTC_FROM_HERE, [this, candidates] {
for (auto &serializedCandidate : candidates) {
webrtc::JsepIceCandidate parseCandidate("", 0);
if (parseCandidate.Initialize(serializedCandidate, nullptr)) {
auto candidate = parseCandidate.candidate();
printf("Add remote candidate %s\n", serializedCandidate.c_str());
transportChannel->AddRemoteCandidate(candidate);
}
}
});
}
void Connector::CandidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) {
assert(networkThread->IsCurrent());
webrtc::JsepIceCandidate iceCandidate("", 0);
iceCandidate.SetCandidate(candidate);
std::string serializedCandidate;
if (iceCandidate.ToString(&serializedCandidate)) {
std::vector<std::string> arrayOfOne;
arrayOfOne.push_back(serializedCandidate);
SignalCandidatesGathered(arrayOfOne);
webrtc::JsepIceCandidate parseCandidate("", 0);
if (parseCandidate.Initialize(serializedCandidate, nullptr)) {
auto candidate = parseCandidate.candidate();
}
}
}
void Connector::CandidateGatheringState(cricket::IceTransportInternal *transport) {
if (transport->gathering_state() == cricket::IceGatheringState::kIceGatheringComplete) {
/*if (collectedLocalCandidates.size() != 0) {
SignalCandidatesGathered(collectedLocalCandidates);
}*/
}
}
void Connector::TransportStateChanged(cricket::IceTransportInternal *transport) {
auto state = transport->GetIceTransportState();
switch (state) {
case webrtc::IceTransportState::kConnected:
case webrtc::IceTransportState::kCompleted:
SignalReadyToSendStateChanged(true);
printf("===== State: Connected\n");
break;
default:
SignalReadyToSendStateChanged(false);
printf("===== State: Disconnected\n");
break;
}
}
void Connector::TransportRoleConflict(cricket::IceTransportInternal *transport) {
printf("===== Role conflict\n");
}
void Connector::TransportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t &timestamp, __unused int unused) {
rtc::CopyOnWriteBuffer data;
data.AppendData(bytes, size);
SignalPacketReceived(data);
}
void Connector::SendPacket(const rtc::CopyOnWriteBuffer& data) {
networkThread->Invoke<void>(RTC_FROM_HERE, [this, data] {
rtc::PacketOptions options;
transportChannel->SendPacket((const char *)data.data(), data.size(), options, 0);
});
}

View File

@@ -1,69 +0,0 @@
#ifndef DEMO_CONTROLLER_H
#define DEMO_CONTROLLER_H
#include "Connector.h"
#include "MediaEngineWebrtc.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/socket_address.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#import "VideoMetalView.h"
class Controller : public sigslot::has_slots<> {
public:
enum EndpointType {
UDP,
TCP,
P2P,
};
enum State {
Starting,
WaitInit,
WaitInitAck,
Established,
Failed,
Reconnecting,
};
explicit Controller(bool is_outgoing, size_t init_timeout, size_t reconnect_timeout);
~Controller() override;
void Start();
//void SetNetworkType(message::NetworkType network_type);
void SetDataSaving(bool data_saving);
void SetMute(bool mute);
void AttachVideoView(rtc::VideoSinkInterface<webrtc::VideoFrame> *sink);
void SetProxy(rtc::ProxyType type, const rtc::SocketAddress& addr, const std::string& username, const std::string& password);
void AddRemoteCandidates(const std::vector<std::string> &candidates);
//static std::map<message::NetworkType, MediaEngineWebrtc::NetworkParams> network_params;
static MediaEngineWebrtc::NetworkParams default_network_params;
static MediaEngineWebrtc::NetworkParams datasaving_network_params;
sigslot::signal1<State> SignalNewState;
sigslot::signal1<const std::vector<std::string>&> SignalCandidatesGathered;
private:
std::unique_ptr<rtc::Thread> thread;
std::unique_ptr<Connector> connector;
std::unique_ptr<MediaEngineWebrtc> media;
State state;
webrtc::RepeatingTaskHandle repeatable;
int64_t last_recv_time;
int64_t last_send_time;
const bool isOutgoing;
void PacketReceived(const rtc::CopyOnWriteBuffer &);
void WriteableStateChanged(bool);
void CandidatesGathered(const std::vector<std::string> &);
void SetFail();
void Play(const int16_t *data, size_t size);
void Record(int16_t *data, size_t size);
void SendRtp(rtc::CopyOnWriteBuffer packet);
//void UpdateNetworkParams(const message::RtpStream& rtp);
};
#endif //DEMO_CONTROLLER_H

View File

@@ -1,111 +0,0 @@
#include "Controller.h"
#include "modules/rtp_rtcp/source/rtp_utility.h"
#include <memory>
Controller::Controller(bool is_outgoing, size_t init_timeout, size_t reconnect_timeout)
: thread(rtc::Thread::Create())
, connector(std::make_unique<Connector>(is_outgoing))
, state(State::Starting)
, last_recv_time(rtc::TimeMillis())
, last_send_time(rtc::TimeMillis())
, isOutgoing(is_outgoing)
{
connector->SignalReadyToSendStateChanged.connect(this, &Controller::WriteableStateChanged);
connector->SignalPacketReceived.connect(this, &Controller::PacketReceived);
connector->SignalCandidatesGathered.connect(this, &Controller::CandidatesGathered);
thread->Start();
thread->Invoke<void>(RTC_FROM_HERE, [this, is_outgoing]() {
media.reset(new MediaEngineWebrtc(is_outgoing));
media->Send.connect(this, &Controller::SendRtp);
});
}
Controller::~Controller() {
thread->Invoke<void>(RTC_FROM_HERE, [this]() {
media = nullptr;
connector = nullptr;
});
}
void Controller::Start() {
last_recv_time = rtc::TimeMillis();
connector->Start();
}
void Controller::PacketReceived(const rtc::CopyOnWriteBuffer &data) {
thread->PostTask(RTC_FROM_HERE, [this, data]() {
if (media) {
media->Receive(data);
}
});
}
void Controller::WriteableStateChanged(bool isWriteable) {
if (isWriteable) {
SignalNewState(State::Established);
} else {
SignalNewState(State::Reconnecting);
}
thread->PostTask(RTC_FROM_HERE, [this, isWriteable]() {
if (media) {
media->SetCanSendPackets(isWriteable);
}
});
}
void Controller::SendRtp(rtc::CopyOnWriteBuffer packet) {
connector->SendPacket(packet);
}
/*void Controller::UpdateNetworkParams(const message::RtpStream& rtp) {
bool new_datasaving = local_datasaving || rtp.data_saving;
if (!new_datasaving) {
final_datasaving = false;
message::NetworkType new_network_type = std::min(local_network_type, rtp.network_type);
if (new_network_type != final_network_type) {
final_network_type = new_network_type;
auto it = network_params.find(rtp.network_type);
if (it == network_params.end())
media->SetNetworkParams(default_network_params);
else
media->SetNetworkParams(it->second);
}
} else if (new_datasaving != final_datasaving) {
final_datasaving = true;
media->SetNetworkParams(datasaving_network_params);
}
}*/
void Controller::AttachVideoView(rtc::VideoSinkInterface<webrtc::VideoFrame> *sink) {
thread->PostTask(RTC_FROM_HERE, [this, sink]() {
media->AttachVideoView(sink);
});
}
/*void Controller::SetNetworkType(message::NetworkType network_type) {
local_network_type = network_type;
}*/
void Controller::SetDataSaving(bool data_saving) {
}
void Controller::SetMute(bool mute) {
thread->Invoke<void>(RTC_FROM_HERE, [this, mute]() {
if (media)
media->SetMute(mute);
});
}
void Controller::SetProxy(rtc::ProxyType type, const rtc::SocketAddress& addr, const std::string& username, const std::string& password) {
}
void Controller::CandidatesGathered(const std::vector<std::string> &candidates) {
SignalCandidatesGathered(candidates);
}
void Controller::AddRemoteCandidates(const std::vector<std::string> &candidates) {
connector->AddRemoteCandidates(candidates);
}

View File

@@ -4,22 +4,45 @@
namespace TGVOIP_NAMESPACE {
#endif
static rtc::Thread *makeNetworkThread() {
static std::unique_ptr<rtc::Thread> value = rtc::Thread::CreateWithSocketServer();
value->SetName("WebRTC-Network", nullptr);
value->Start();
return value.get();
}
static rtc::Thread *getNetworkThread() {
static rtc::Thread *value = makeNetworkThread();
return value;
}
static rtc::Thread *makeMediaThread() {
static std::unique_ptr<rtc::Thread> value = rtc::Thread::Create();
value->SetName("WebRTC-Media", nullptr);
value->Start();
return value.get();
}
static rtc::Thread *getMediaThread() {
static rtc::Thread *value = makeMediaThread();
return value;
}
Manager::Manager(
rtc::Thread *thread,
TgVoipEncryptionKey encryptionKey,
bool enableP2P,
std::function<void (const TgVoipState &)> stateUpdated,
std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted
) :
_thread(thread),
_encryptionKey(encryptionKey),
_networkThread(rtc::Thread::CreateWithSocketServer()),
_mediaThread(rtc::Thread::Create()),
_enableP2P(enableP2P),
_stateUpdated(stateUpdated),
_signalingDataEmitted(signalingDataEmitted) {
assert(_thread->IsCurrent());
_networkThread->Start();
_mediaThread->Start();
}
Manager::~Manager() {
@@ -28,10 +51,11 @@ Manager::~Manager() {
void Manager::start() {
auto weakThis = std::weak_ptr<Manager>(shared_from_this());
_networkManager.reset(new ThreadLocalObject<NetworkManager>(_networkThread.get(), [networkThreadPtr = _networkThread.get(), encryptionKey = _encryptionKey, thread = _thread, weakThis]() {
_networkManager.reset(new ThreadLocalObject<NetworkManager>(getNetworkThread(), [encryptionKey = _encryptionKey, enableP2P = _enableP2P, thread = _thread, weakThis]() {
return new NetworkManager(
networkThreadPtr,
getNetworkThread(),
encryptionKey,
enableP2P,
[thread, weakThis](const NetworkManager::State &state) {
thread->Invoke<void>(RTC_FROM_HERE, [weakThis, state]() {
auto strongThis = weakThis.lock();
@@ -74,9 +98,9 @@ void Manager::start() {
);
}));
bool isOutgoing = _encryptionKey.isOutgoing;
_mediaManager.reset(new ThreadLocalObject<MediaManager>(_mediaThread.get(), [mediaThreadPtr = _mediaThread.get(), isOutgoing, thread = _thread, weakThis]() {
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [isOutgoing, thread = _thread, weakThis]() {
return new MediaManager(
mediaThreadPtr,
getMediaThread(),
isOutgoing,
[thread, weakThis](const rtc::CopyOnWriteBuffer &packet) {
thread->PostTask(RTC_FROM_HERE, [weakThis, packet]() {

View File

@@ -15,6 +15,7 @@ public:
Manager(
rtc::Thread *thread,
TgVoipEncryptionKey encryptionKey,
bool enableP2P,
std::function<void (const TgVoipState &)> stateUpdated,
std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted
);
@@ -28,8 +29,7 @@ public:
private:
rtc::Thread *_thread;
TgVoipEncryptionKey _encryptionKey;
std::unique_ptr<rtc::Thread> _networkThread;
std::unique_ptr<rtc::Thread> _mediaThread;
bool _enableP2P;
std::function<void (const TgVoipState &)> _stateUpdated;
std::function<void (const std::vector<uint8_t> &)> _signalingDataEmitted;
std::unique_ptr<ThreadLocalObject<NetworkManager>> _networkManager;

View File

@@ -1,19 +0,0 @@
#ifndef DEMO_MEDIAENGINEBASE_H
#define DEMO_MEDIAENGINEBASE_H
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include <cstdint>
class MediaEngineBase {
public:
MediaEngineBase() = default;
virtual ~MediaEngineBase() = default;
virtual void Receive(rtc::CopyOnWriteBuffer) = 0;
};
#endif //DEMO_MEDIAENGINEBASE_H

View File

@@ -1,76 +0,0 @@
#ifndef DEMO_MEDIAENGINEWEBRTC_H
#define DEMO_MEDIAENGINEWEBRTC_H
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "api/transport/field_trial_based_config.h"
#include "call/call.h"
#include "media/base/media_engine.h"
#include "pc/rtp_sender.h"
#include "rtc_base/task_queue.h"
#include <memory>
#import "VideoCameraCapturer.h"
#import "VideoMetalView.h"
class MediaEngineWebrtc : public sigslot::has_slots<> {
public:
struct NetworkParams {
uint8_t min_bitrate_kbps;
uint8_t max_bitrate_kbps;
uint8_t start_bitrate_kbps;
uint8_t ptime_ms;
bool echo_cancellation;
bool auto_gain_control;
bool noise_suppression;
};
explicit MediaEngineWebrtc(bool outgoing);
~MediaEngineWebrtc();
void Receive(rtc::CopyOnWriteBuffer);
void OnSentPacket(const rtc::SentPacket& sent_packet);
void SetNetworkParams(const NetworkParams& params);
void SetMute(bool mute);
void SetCanSendPackets(bool);
void AttachVideoView(rtc::VideoSinkInterface<webrtc::VideoFrame> *sink);
sigslot::signal1<rtc::CopyOnWriteBuffer> Send;
private:
class Sender final : public cricket::MediaChannel::NetworkInterface {
public:
explicit Sender(MediaEngineWebrtc &engine, bool isVideo);
bool SendPacket(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) override;
bool SendRtcp(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) override;
int SetOption(SocketType type, rtc::Socket::Option opt, int option) override;
private:
MediaEngineWebrtc &engine;
bool isVideo;
};
const uint32_t ssrc_send;
const uint32_t ssrc_recv;
const uint32_t ssrc_send_video;
const uint32_t ssrc_recv_video;
std::unique_ptr<webrtc::Call> call;
std::unique_ptr<cricket::MediaEngineInterface> media_engine;
std::unique_ptr<webrtc::RtcEventLogNull> event_log;
std::unique_ptr<webrtc::TaskQueueFactory> task_queue_factory;
webrtc::FieldTrialBasedConfig field_trials;
webrtc::LocalAudioSinkAdapter audio_source;
Sender audio_sender;
Sender video_sender;
std::unique_ptr<cricket::VoiceMediaChannel> voice_channel;
std::unique_ptr<cricket::VideoMediaChannel> video_channel;
std::unique_ptr<webrtc::VideoBitrateAllocatorFactory> video_bitrate_allocator_factory;
std::unique_ptr<rtc::Thread> signaling_thread;
std::unique_ptr<rtc::Thread> worker_thread;
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
VideoCameraCapturer *_videoCapturer;
};
#endif //DEMO_MEDIAENGINEWEBRTC_H

View File

@@ -1,424 +0,0 @@
#include "MediaEngineWebrtc.h"
#include "absl/strings/match.h"
#include "api/audio_codecs/audio_decoder_factory_template.h"
#include "api/audio_codecs/audio_encoder_factory_template.h"
#include "api/audio_codecs/opus/audio_decoder_opus.h"
#include "api/audio_codecs/opus/audio_encoder_opus.h"
#include "api/rtp_parameters.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "media/base/codec.h"
#include "media/base/media_constants.h"
#include "media/engine/webrtc_media_engine.h"
#include "modules/audio_device/include/audio_device_default.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "system_wrappers/include/field_trial.h"
#include "api/video/builtin_video_bitrate_allocator_factory.h"
#include "api/video/video_bitrate_allocation.h"
#include "sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h"
#include "sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h"
#include "sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h"
#include "sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h"
#include "sdk/objc/native/api/video_encoder_factory.h"
#include "sdk/objc/native/api/video_decoder_factory.h"
#include "sdk/objc/native/src/objc_video_track_source.h"
#include "api/video_track_source_proxy.h"
#include "sdk/objc/api/RTCVideoRendererAdapter.h"
#include "sdk/objc/native/api/video_frame.h"
#include "api/media_types.h"
namespace {
const size_t frame_samples = 480;
const uint8_t channels = 1;
const uint8_t sample_bytes = 2;
const uint32_t clockrate = 48000;
const uint16_t sdp_payload = 111;
const char* sdp_name = "opus";
const uint8_t sdp_channels = 2;
const uint32_t sdp_bitrate = 0;
const uint32_t caller_ssrc = 1;
const uint32_t called_ssrc = 2;
const uint32_t caller_ssrc_video = 3;
const uint32_t called_ssrc_video = 4;
const int extension_sequence = 1;
const int extension_sequence_video = 1;
}
static void AddDefaultFeedbackParams(cricket::VideoCodec* codec) {
// Don't add any feedback params for RED and ULPFEC.
if (codec->name == cricket::kRedCodecName || codec->name == cricket::kUlpfecCodecName)
return;
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty));
codec->AddFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty));
// Don't add any more feedback params for FLEXFEC.
if (codec->name == cricket::kFlexfecCodecName)
return;
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir));
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kParamValueEmpty));
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli));
if (codec->name == cricket::kVp8CodecName &&
webrtc::field_trial::IsEnabled("WebRTC-RtcpLossNotification")) {
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamLntf, cricket::kParamValueEmpty));
}
}
static std::vector<cricket::VideoCodec> AssignPayloadTypesAndDefaultCodecs(std::vector<webrtc::SdpVideoFormat> input_formats, int32_t &outCodecId) {
if (input_formats.empty())
return std::vector<cricket::VideoCodec>();
static const int kFirstDynamicPayloadType = 96;
static const int kLastDynamicPayloadType = 127;
int payload_type = kFirstDynamicPayloadType;
input_formats.push_back(webrtc::SdpVideoFormat(cricket::kRedCodecName));
input_formats.push_back(webrtc::SdpVideoFormat(cricket::kUlpfecCodecName));
/*if (IsFlexfecAdvertisedFieldTrialEnabled()) {
webrtc::SdpVideoFormat flexfec_format(kFlexfecCodecName);
// This value is currently arbitrarily set to 10 seconds. (The unit
// is microseconds.) This parameter MUST be present in the SDP, but
// we never use the actual value anywhere in our code however.
// TODO(brandtr): Consider honouring this value in the sender and receiver.
flexfec_format.parameters = {{kFlexfecFmtpRepairWindow, "10000000"}};
input_formats.push_back(flexfec_format);
}*/
bool found = false;
bool useVP9 = true;
std::vector<cricket::VideoCodec> output_codecs;
for (const webrtc::SdpVideoFormat& format : input_formats) {
cricket::VideoCodec codec(format);
codec.id = payload_type;
AddDefaultFeedbackParams(&codec);
output_codecs.push_back(codec);
if (useVP9 && codec.name == cricket::kVp9CodecName) {
if (!found) {
outCodecId = codec.id;
found = true;
}
}
if (!useVP9 && codec.name == cricket::kH264CodecName) {
if (!found) {
outCodecId = codec.id;
found = true;
}
}
// Increment payload type.
++payload_type;
if (payload_type > kLastDynamicPayloadType) {
RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest.";
break;
}
// Add associated RTX codec for non-FEC codecs.
if (!absl::EqualsIgnoreCase(codec.name, cricket::kUlpfecCodecName) &&
!absl::EqualsIgnoreCase(codec.name, cricket::kFlexfecCodecName)) {
output_codecs.push_back(
cricket::VideoCodec::CreateRtxCodec(payload_type, codec.id));
// Increment payload type.
++payload_type;
if (payload_type > kLastDynamicPayloadType) {
RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest.";
break;
}
}
}
return output_codecs;
}
MediaEngineWebrtc::MediaEngineWebrtc(bool outgoing)
: ssrc_send(outgoing ? caller_ssrc : called_ssrc)
, ssrc_recv(outgoing ? called_ssrc : caller_ssrc)
, ssrc_send_video(outgoing ? caller_ssrc_video : called_ssrc_video)
, ssrc_recv_video(outgoing ? called_ssrc_video : caller_ssrc_video)
, event_log(std::make_unique<webrtc::RtcEventLogNull>())
, task_queue_factory(webrtc::CreateDefaultTaskQueueFactory())
, audio_sender(*this, false)
, video_sender(*this, true)
, signaling_thread(rtc::Thread::Create())
, worker_thread(rtc::Thread::Create()) {
signaling_thread->Start();
worker_thread->Start();
webrtc::field_trial::InitFieldTrialsFromString(
"WebRTC-Audio-SendSideBwe/Enabled/"
"WebRTC-Audio-Allocation/min:6kbps,max:32kbps/"
"WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/"
);
video_bitrate_allocator_factory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory();
cricket::MediaEngineDependencies media_deps;
media_deps.task_queue_factory = task_queue_factory.get();
media_deps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory<webrtc::AudioEncoderOpus>();
media_deps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory<webrtc::AudioDecoderOpus>();
auto video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory([[RTCDefaultVideoEncoderFactory alloc] init]);
int32_t outCodecId = 96;
std::vector<cricket::VideoCodec> videoCodecs = AssignPayloadTypesAndDefaultCodecs(video_encoder_factory->GetSupportedFormats(), outCodecId);
media_deps.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory([[RTCDefaultVideoEncoderFactory alloc] init]);
media_deps.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory([[RTCDefaultVideoDecoderFactory alloc] init]);
media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
media_engine = cricket::CreateMediaEngine(std::move(media_deps));
media_engine->Init();
webrtc::Call::Config call_config(event_log.get());
call_config.task_queue_factory = task_queue_factory.get();
call_config.trials = &field_trials;
call_config.audio_state = media_engine->voice().GetAudioState();
call.reset(webrtc::Call::Create(call_config));
voice_channel.reset(media_engine->voice().CreateMediaChannel(
call.get(), cricket::MediaConfig(), cricket::AudioOptions(), webrtc::CryptoOptions::NoGcm()));
video_channel.reset(media_engine->video().CreateMediaChannel(call.get(), cricket::MediaConfig(), cricket::VideoOptions(), webrtc::CryptoOptions::NoGcm(), video_bitrate_allocator_factory.get()));
if (true) {
voice_channel->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc_send));
SetNetworkParams({6, 32, 6, 120, false, false, false});
SetMute(false);
voice_channel->SetInterface(&audio_sender, webrtc::MediaTransportConfig());
}
if (true) {
video_channel->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc_send_video));
for (auto codec : videoCodecs) {
if (codec.id == outCodecId) {
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>());
_nativeVideoSource = webrtc::VideoTrackSourceProxy::Create(signaling_thread.get(), worker_thread.get(), objCVideoTrackSource);
codec.SetParam(cricket::kCodecParamMinBitrate, 64);
codec.SetParam(cricket::kCodecParamStartBitrate, 256);
codec.SetParam(cricket::kCodecParamMaxBitrate, 2500);
dispatch_async(dispatch_get_main_queue(), ^{
#if TARGET_IPHONE_SIMULATOR
#else
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:_nativeVideoSource];
AVCaptureDevice *frontCamera = nil;
for (AVCaptureDevice *device in [VideoCameraCapturer captureDevices]) {
if (device.position == AVCaptureDevicePositionFront) {
frontCamera = device;
break;
}
}
if (frontCamera == nil) {
assert(false);
return;
}
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:frontCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
return width1 < width2 ? NSOrderedAscending : NSOrderedDescending;
}];
AVCaptureDeviceFormat *bestFormat = nil;
for (AVCaptureDeviceFormat *format in sortedFormats) {
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
if (dimensions.width >= 1000 || dimensions.height >= 1000) {
bestFormat = format;
break;
}
}
if (bestFormat == nil) {
assert(false);
return;
}
AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) {
if (lhs.maxFrameRate < rhs.maxFrameRate) {
return NSOrderedAscending;
} else {
return NSOrderedDescending;
}
}] lastObject];
if (frameRateRange == nil) {
assert(false);
return;
}
[_videoCapturer startCaptureWithDevice:frontCamera format:bestFormat fps:27];
#endif
});
cricket::VideoSendParameters send_parameters;
send_parameters.codecs.push_back(codec);
send_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence_video);
//send_parameters.options.echo_cancellation = params.echo_cancellation;
//send_parameters.options.noise_suppression = params.noise_suppression;
//send_parameters.options.auto_gain_control = params.auto_gain_control;
//send_parameters.options.highpass_filter = false;
//send_parameters.options.typing_detection = false;
//send_parameters.max_bandwidth_bps = 800000;
//send_parameters.rtcp.reduced_size = true;
send_parameters.rtcp.remote_estimate = true;
video_channel->SetSendParameters(send_parameters);
video_channel->SetVideoSend(ssrc_send_video, NULL, _nativeVideoSource.get());
video_channel->SetInterface(&video_sender, webrtc::MediaTransportConfig());
break;
}
}
}
if (true) {
cricket::AudioRecvParameters recv_parameters;
recv_parameters.codecs.emplace_back(sdp_payload, sdp_name, clockrate, sdp_bitrate, sdp_channels);
recv_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence);
recv_parameters.rtcp.reduced_size = true;
recv_parameters.rtcp.remote_estimate = true;
voice_channel->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc_recv));
voice_channel->SetRecvParameters(recv_parameters);
voice_channel->SetPlayout(true);
}
if (true) {
for (auto codec : videoCodecs) {
if (codec.id == outCodecId) {
codec.SetParam(cricket::kCodecParamMinBitrate, 32);
codec.SetParam(cricket::kCodecParamStartBitrate, 300);
codec.SetParam(cricket::kCodecParamMaxBitrate, 1000);
cricket::VideoRecvParameters recv_parameters;
recv_parameters.codecs.emplace_back(codec);
recv_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence_video);
//recv_parameters.rtcp.reduced_size = true;
recv_parameters.rtcp.remote_estimate = true;
video_channel->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc_recv_video));
video_channel->SetRecvParameters(recv_parameters);
break;
}
}
}
}
MediaEngineWebrtc::~MediaEngineWebrtc() {
[_videoCapturer stopCapture];
video_channel->SetSink(ssrc_recv_video, nullptr);
video_channel->RemoveSendStream(ssrc_send_video);
video_channel->RemoveRecvStream(ssrc_recv_video);
voice_channel->SetPlayout(false);
voice_channel->RemoveSendStream(ssrc_send);
voice_channel->RemoveRecvStream(ssrc_recv);
};
void MediaEngineWebrtc::Receive(rtc::CopyOnWriteBuffer packet) {
if (packet.size() < 1) {
return;
}
uint8_t header = ((uint8_t *)packet.data())[0];
rtc::CopyOnWriteBuffer unwrappedPacket = packet.Slice(1, packet.size() - 1);
if (header == 0xba) {
if (voice_channel) {
voice_channel->OnPacketReceived(unwrappedPacket, -1);
}
} else if (header == 0xbf) {
if (video_channel) {
video_channel->OnPacketReceived(unwrappedPacket, -1);
}
} else {
printf("----- Unknown packet header");
}
}
void MediaEngineWebrtc::OnSentPacket(const rtc::SentPacket& sent_packet) {
call->OnSentPacket(sent_packet);
}
void MediaEngineWebrtc::SetNetworkParams(const MediaEngineWebrtc::NetworkParams& params) {
cricket::AudioCodec opus_codec(sdp_payload, sdp_name, clockrate, sdp_bitrate, sdp_channels);
opus_codec.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc));
opus_codec.SetParam(cricket::kCodecParamMinBitrate, params.min_bitrate_kbps);
opus_codec.SetParam(cricket::kCodecParamStartBitrate, params.start_bitrate_kbps);
opus_codec.SetParam(cricket::kCodecParamMaxBitrate, params.max_bitrate_kbps);
opus_codec.SetParam(cricket::kCodecParamUseInbandFec, 1);
opus_codec.SetParam(cricket::kCodecParamPTime, params.ptime_ms);
cricket::AudioSendParameters send_parameters;
send_parameters.codecs.push_back(opus_codec);
send_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence);
send_parameters.options.echo_cancellation = params.echo_cancellation;
// send_parameters.options.experimental_ns = false;
send_parameters.options.noise_suppression = params.noise_suppression;
send_parameters.options.auto_gain_control = params.auto_gain_control;
send_parameters.options.highpass_filter = false;
send_parameters.options.typing_detection = false;
// send_parameters.max_bandwidth_bps = 16000;
send_parameters.rtcp.reduced_size = true;
send_parameters.rtcp.remote_estimate = true;
voice_channel->SetSendParameters(send_parameters);
}
void MediaEngineWebrtc::SetMute(bool mute) {
}
void MediaEngineWebrtc::SetCanSendPackets(bool canSendPackets) {
if (canSendPackets) {
call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkUp);
call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp);
} else {
call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkDown);
call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkDown);
}
if (voice_channel) {
voice_channel->OnReadyToSend(canSendPackets);
voice_channel->SetSend(canSendPackets);
voice_channel->SetAudioSend(ssrc_send, true, nullptr, &audio_source);
}
if (video_channel) {
video_channel->OnReadyToSend(canSendPackets);
video_channel->SetSend(canSendPackets);
}
}
void MediaEngineWebrtc::AttachVideoView(rtc::VideoSinkInterface<webrtc::VideoFrame> *sink) {
video_channel->SetSink(ssrc_recv_video, sink);
}
bool MediaEngineWebrtc::Sender::SendPacket(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) {
rtc::CopyOnWriteBuffer wrappedPacket;
uint8_t header = isVideo ? 0xbf : 0xba;
wrappedPacket.AppendData(&header, 1);
wrappedPacket.AppendData(*packet);
engine.Send(wrappedPacket);
rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent);
engine.OnSentPacket(sent_packet);
return true;
}
bool MediaEngineWebrtc::Sender::SendRtcp(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) {
rtc::CopyOnWriteBuffer wrappedPacket;
uint8_t header = isVideo ? 0xbf : 0xba;
wrappedPacket.AppendData(&header, 1);
wrappedPacket.AppendData(*packet);
engine.Send(wrappedPacket);
rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent);
engine.OnSentPacket(sent_packet);
return true;
}
int MediaEngineWebrtc::Sender::SetOption(cricket::MediaChannel::NetworkInterface::SocketType, rtc::Socket::Option, int) {
return -1; // in general, the result is not important yet
}
MediaEngineWebrtc::Sender::Sender(MediaEngineWebrtc &engine, bool isVideo) :
engine(engine),
isVideo(isVideo) {
}

View File

@@ -137,6 +137,19 @@ static absl::optional<cricket::VideoCodec> selectVideoCodec(std::vector<cricket:
return absl::optional<cricket::VideoCodec>();
}
static rtc::Thread *makeWorkerThread() {
static std::unique_ptr<rtc::Thread> value = rtc::Thread::Create();
value->SetName("WebRTC-Worker", nullptr);
value->Start();
return value.get();
}
static rtc::Thread *getWorkerThread() {
static rtc::Thread *value = makeWorkerThread();
return value;
}
MediaManager::MediaManager(
rtc::Thread *thread,
bool isOutgoing,
@@ -145,8 +158,7 @@ MediaManager::MediaManager(
_packetEmitted(packetEmitted),
_thread(thread),
_eventLog(std::make_unique<webrtc::RtcEventLogNull>()),
_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()),
_workerThread(rtc::Thread::Create()) {
_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) {
_ssrcAudio.incoming = isOutgoing ? ssrcAudioIncoming : ssrcAudioOutgoing;
_ssrcAudio.outgoing = (!isOutgoing) ? ssrcAudioIncoming : ssrcAudioOutgoing;
_ssrcVideo.incoming = isOutgoing ? ssrcVideoIncoming : ssrcVideoOutgoing;
@@ -155,8 +167,6 @@ _workerThread(rtc::Thread::Create()) {
_audioNetworkInterface = std::unique_ptr<MediaManager::NetworkInterfaceImpl>(new MediaManager::NetworkInterfaceImpl(this, false));
_videoNetworkInterface = std::unique_ptr<MediaManager::NetworkInterfaceImpl>(new MediaManager::NetworkInterfaceImpl(this, true));
_workerThread->Start();
webrtc::field_trial::InitFieldTrialsFromString(
"WebRTC-Audio-SendSideBwe/Enabled/"
"WebRTC-Audio-Allocation/min:6kbps,max:32kbps/"
@@ -238,7 +248,7 @@ _workerThread(rtc::Thread::Create()) {
auto videoCodec = selectVideoCodec(videoCodecs);
if (videoCodec.has_value()) {
_nativeVideoSource = makeVideoSource(_thread, _workerThread.get());
_nativeVideoSource = makeVideoSource(_thread, getWorkerThread());
auto codec = videoCodec.value();
@@ -285,6 +295,14 @@ MediaManager::~MediaManager() {
_audioChannel->RemoveRecvStream(_ssrcAudio.incoming);
_audioChannel->RemoveSendStream(_ssrcAudio.outgoing);
_audioChannel->SetInterface(nullptr, webrtc::MediaTransportConfig());
_videoChannel->RemoveRecvStream(_ssrcVideo.incoming);
_videoChannel->RemoveSendStream(_ssrcVideo.outgoing);
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, nullptr);
_videoChannel->SetInterface(nullptr, webrtc::MediaTransportConfig());
}
void MediaManager::setIsConnected(bool isConnected) {

View File

@@ -72,7 +72,6 @@ private:
rtc::Thread *_thread;
std::unique_ptr<webrtc::RtcEventLogNull> _eventLog;
std::unique_ptr<webrtc::TaskQueueFactory> _taskQueueFactory;
std::unique_ptr<rtc::Thread> _workerThread;
SSRC _ssrcAudio;
SSRC _ssrcVideo;

View File

@@ -9,13 +9,151 @@
#include "p2p/base/ice_credentials_iterator.h"
#include "api/jsep_ice_candidate.h"
extern "C" {
#include <openssl/sha.h>
#include <openssl/aes.h>
#include <openssl/modes.h>
#include <openssl/rand.h>
#include <openssl/crypto.h>
}
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
static void KDF2(unsigned char *encryptionKey, unsigned char *msgKey, size_t x, unsigned char *aesKey, unsigned char *aesIv) {
uint8_t sA[32], sB[32];
uint8_t buf[16 + 36];
memcpy(buf, msgKey, 16);
memcpy(buf + 16, encryptionKey + x, 36);
SHA256(buf, 16 + 36, sA);
memcpy(buf, encryptionKey + 40 + x, 36);
memcpy(buf + 36, msgKey, 16);
SHA256(buf, 36 + 16, sB);
memcpy(aesKey, sA, 8);
memcpy(aesKey + 8, sB + 8, 16);
memcpy(aesKey + 8 + 16, sA + 24, 8);
memcpy(aesIv, sB, 8);
memcpy(aesIv + 8, sA + 8, 16);
memcpy(aesIv + 8 + 16, sB + 24, 8);
}
static void aesIgeEncrypt(uint8_t *in, uint8_t *out, size_t length, uint8_t *key, uint8_t *iv) {
AES_KEY akey;
AES_set_encrypt_key(key, 32*8, &akey);
AES_ige_encrypt(in, out, length, &akey, iv, AES_ENCRYPT);
}
static void aesIgeDecrypt(uint8_t *in, uint8_t *out, size_t length, uint8_t *key, uint8_t *iv) {
AES_KEY akey;
AES_set_decrypt_key(key, 32*8, &akey);
AES_ige_encrypt(in, out, length, &akey, iv, AES_DECRYPT);
}
static absl::optional<rtc::CopyOnWriteBuffer> decryptPacket(const rtc::CopyOnWriteBuffer &packet, const TgVoipEncryptionKey &encryptionKey) {
if (packet.size() < 16 + 16) {
return absl::nullopt;
}
unsigned char msgKey[16];
memcpy(msgKey, packet.data(), 16);
int x = encryptionKey.isOutgoing ? 8 : 0;
unsigned char aesKey[32];
unsigned char aesIv[32];
KDF2((unsigned char *)encryptionKey.value.data(), msgKey, x, aesKey, aesIv);
size_t decryptedSize = packet.size() - 16;
if (decryptedSize < 0 || decryptedSize > 128 * 1024) {
return absl::nullopt;
}
if (decryptedSize % 16 != 0) {
return absl::nullopt;
}
rtc::Buffer decryptionBuffer(decryptedSize);
aesIgeDecrypt(((uint8_t *)packet.data()) + 16, decryptionBuffer.begin(), decryptionBuffer.size(), aesKey, aesIv);
rtc::ByteBufferWriter msgKeyData;
msgKeyData.WriteBytes((const char *)encryptionKey.value.data() + 88 + x, 32);
msgKeyData.WriteBytes((const char *)decryptionBuffer.data(), decryptionBuffer.size());
unsigned char msgKeyLarge[32];
SHA256((uint8_t *)msgKeyData.Data(), msgKeyData.Length(), msgKeyLarge);
uint16_t innerSize;
memcpy(&innerSize, decryptionBuffer.data(), 2);
unsigned char checkMsgKey[16];
memcpy(checkMsgKey, msgKeyLarge + 8, 16);
if (memcmp(checkMsgKey, msgKey, 16) != 0) {
return absl::nullopt;
}
if (innerSize < 0 || innerSize > decryptionBuffer.size() - 2) {
return absl::nullopt;
}
rtc::CopyOnWriteBuffer decryptedPacket;
decryptedPacket.AppendData((const char *)decryptionBuffer.data() + 2, innerSize);
return decryptedPacket;
}
static absl::optional<rtc::Buffer> encryptPacket(const rtc::CopyOnWriteBuffer &packet, const TgVoipEncryptionKey &encryptionKey) {
if (packet.size() > UINT16_MAX) {
return absl::nullopt;
}
rtc::ByteBufferWriter innerData;
uint16_t packetSize = (uint16_t)packet.size();
innerData.WriteBytes((const char *)&packetSize, 2);
innerData.WriteBytes((const char *)packet.data(), packet.size());
size_t innerPadding = 16 - innerData.Length() % 16;
uint8_t paddingData[16];
RAND_bytes(paddingData, (int)innerPadding);
innerData.WriteBytes((const char *)paddingData, innerPadding);
if (innerData.Length() % 16 != 0) {
assert(false);
return absl::nullopt;
}
int x = encryptionKey.isOutgoing ? 0 : 8;
rtc::ByteBufferWriter msgKeyData;
msgKeyData.WriteBytes((const char *)encryptionKey.value.data() + 88 + x, 32);
msgKeyData.WriteBytes(innerData.Data(), innerData.Length());
unsigned char msgKeyLarge[32];
SHA256((uint8_t *)msgKeyData.Data(), msgKeyData.Length(), msgKeyLarge);
unsigned char msgKey[16];
memcpy(msgKey, msgKeyLarge + 8, 16);
unsigned char aesKey[32];
unsigned char aesIv[32];
KDF2((unsigned char *)encryptionKey.value.data(), msgKey, x, aesKey, aesIv);
rtc::Buffer encryptedPacket;
encryptedPacket.AppendData((const char *)msgKey, 16);
rtc::Buffer encryptionBuffer(innerData.Length());
aesIgeEncrypt((uint8_t *)innerData.Data(), encryptionBuffer.begin(), innerData.Length(), aesKey, aesIv);
encryptedPacket.AppendData(encryptionBuffer.begin(), encryptionBuffer.size());
/*rtc::CopyOnWriteBuffer testBuffer;
testBuffer.AppendData(encryptedPacket.data(), encryptedPacket.size());
TgVoipEncryptionKey testKey;
testKey.value = encryptionKey.value;
testKey.isOutgoing = !encryptionKey.isOutgoing;
decryptPacket(testBuffer, testKey);*/
return encryptedPacket;
}
NetworkManager::NetworkManager(
rtc::Thread *thread,
TgVoipEncryptionKey encryptionKey,
bool enableP2P,
std::function<void (const NetworkManager::State &)> stateUpdated,
std::function<void (const rtc::CopyOnWriteBuffer &)> packetReceived,
std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted
@@ -33,6 +171,10 @@ _signalingDataEmitted(signalingDataEmitted) {
_portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), nullptr, nullptr));
uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP;
if (!enableP2P) {
flags |= cricket::PORTALLOCATOR_DISABLE_UDP;
flags |= cricket::PORTALLOCATOR_DISABLE_STUN;
}
//flags |= cricket::PORTALLOCATOR_DISABLE_UDP;
_portAllocator->set_flags(_portAllocator->flags() | flags);
_portAllocator->Initialize();
@@ -120,8 +262,11 @@ void NetworkManager::receiveSignalingData(const std::vector<uint8_t> &data) {
}
void NetworkManager::sendPacket(const rtc::CopyOnWriteBuffer &packet) {
rtc::PacketOptions packetOptions;
_transportChannel->SendPacket((const char *)packet.data(), packet.size(), packetOptions, 0);
auto encryptedPacket = encryptPacket(packet, _encryptionKey);
if (encryptedPacket.has_value()) {
rtc::PacketOptions packetOptions;
_transportChannel->SendPacket((const char *)encryptedPacket->data(), encryptedPacket->size(), packetOptions, 0);
}
}
void NetworkManager::candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) {
@@ -177,7 +322,11 @@ void NetworkManager::transportPacketReceived(rtc::PacketTransportInternal *trans
assert(_thread->IsCurrent());
rtc::CopyOnWriteBuffer packet;
packet.AppendData(bytes, size);
_packetReceived(packet);
auto decryptedPacket = decryptPacket(packet, _encryptionKey);
if (decryptedPacket.has_value()) {
_packetReceived(decryptedPacket.value());
}
}
#ifdef TGVOIP_NAMESPACE

View File

@@ -40,6 +40,7 @@ public:
NetworkManager(
rtc::Thread *thread,
TgVoipEncryptionKey encryptionKey,
bool enableP2P,
std::function<void (const NetworkManager::State &)> stateUpdated,
std::function<void (const rtc::CopyOnWriteBuffer &)> packetReceived,
std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted

View File

@@ -11,6 +11,8 @@
#import <Foundation/Foundation.h>
#include <sys/time.h>
#ifndef TGVOIP_USE_CUSTOM_CRYPTO
/*extern "C" {
#include <openssl/sha.h>
@@ -79,6 +81,58 @@ CryptoFunctions Layer92::crypto={
namespace TGVOIP_NAMESPACE {
#endif
class LogSinkImpl : public rtc::LogSink {
public:
LogSinkImpl() {
}
virtual ~LogSinkImpl() {
}
virtual void OnLogMessage(const std::string &msg, rtc::LoggingSeverity severity, const char *tag) override {
OnLogMessage(std::string(tag) + ": " + msg);
}
virtual void OnLogMessage(const std::string &message, rtc::LoggingSeverity severity) override {
OnLogMessage(message);
}
virtual void OnLogMessage(const std::string &message) override {
time_t rawTime;
time(&rawTime);
struct tm timeinfo;
localtime_r(&rawTime, &timeinfo);
timeval curTime;
gettimeofday(&curTime, nullptr);
int32_t milliseconds = curTime.tv_usec / 1000;
_data << (timeinfo.tm_year + 1900);
_data << "-" << (timeinfo.tm_mon + 1);
_data << "-" << (timeinfo.tm_mday);
_data << " " << timeinfo.tm_hour;
_data << ":" << timeinfo.tm_min;
_data << ":" << timeinfo.tm_sec;
_data << ":" << milliseconds;
_data << " " << message;
}
public:
std::ostringstream _data;
};
static rtc::Thread *makeManagerThread() {
static std::unique_ptr<rtc::Thread> value = rtc::Thread::Create();
value->SetName("WebRTC-Manager", nullptr);
value->Start();
return value.get();
}
static rtc::Thread *getManagerThread() {
static rtc::Thread *value = makeManagerThread();
return value;
}
class TgVoipImpl : public TgVoip, public sigslot::has_slots<> {
public:
TgVoipImpl(
@@ -98,13 +152,15 @@ public:
rtc::LogMessage::LogToDebug(rtc::LS_INFO);
rtc::LogMessage::SetLogToStderr(true);
});
rtc::LogMessage::AddLogToStream(&_logSink, rtc::LS_INFO);
_managerThread = rtc::Thread::Create();
_managerThread->Start();
_manager.reset(new ThreadLocalObject<Manager>(_managerThread.get(), [managerThreadPtr = _managerThread.get(), encryptionKey = encryptionKey, stateUpdated, signalingDataEmitted](){
bool enableP2P = config.enableP2P;
_manager.reset(new ThreadLocalObject<Manager>(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, stateUpdated, signalingDataEmitted](){
return new Manager(
managerThreadPtr,
getManagerThread(),
encryptionKey,
enableP2P,
[stateUpdated](const TgVoipState &state) {
stateUpdated(state);
},
@@ -119,6 +175,7 @@ public:
}
~TgVoipImpl() override {
rtc::LogMessage::RemoveLogToStream(&_logSink);
}
void receiveSignalingData(const std::vector<uint8_t> &data) override {
@@ -179,13 +236,13 @@ public:
//controller_->SetMute(muteMicrophone);
}
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override {
_manager->perform([sink](Manager *manager) {
manager->setIncomingVideoOutput(sink);
});
}
void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override {
_manager->perform([sink](Manager *manager) {
manager->setOutgoingVideoOutput(sink);
});
@@ -218,8 +275,9 @@ public:
}
TgVoipFinalState stop() override {
TgVoipFinalState finalState = {
};
TgVoipFinalState finalState;
finalState.debugLog = _logSink._data.str();
finalState.isRatingSuggested = false;
return finalState;
}
@@ -253,10 +311,11 @@ public:
}*/
private:
std::unique_ptr<rtc::Thread> _managerThread;
std::unique_ptr<ThreadLocalObject<Manager>> _manager;
std::function<void(TgVoipState)> _stateUpdated;
std::function<void(const std::vector<uint8_t> &)> _signalingDataEmitted;
LogSinkImpl _logSink;
};
std::function<void(std::string const &)> globalLoggingFunction;

View File

@@ -52,6 +52,14 @@ public:
});
}
template <class FunctorT>
void performSync(FunctorT&& functor) {
_thread->Invoke<void>(RTC_FROM_HERE, [this, f = std::forward<FunctorT>(functor)](){
assert(_valueHolder->_value != nullptr);
f(_valueHolder->_value.get());
});
}
private:
rtc::Thread *_thread;
ValueHolder<T> *_valueHolder;