Merge branch 'webrtc-fork'

This commit is contained in:
Ali 2020-06-16 21:33:04 +04:00
commit c79bbad083
32 changed files with 1712 additions and 1171 deletions

View File

@ -47,7 +47,8 @@ public protocol PresentationCall: class {
func setCurrentAudioOutput(_ output: AudioSessionOutput)
func debugInfo() -> Signal<(String, String), NoError>
func getVideoView(completion: @escaping (UIView?) -> Void)
func makeIncomingVideoView(completion: @escaping (UIView?) -> Void)
func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void)
}
public protocol PresentationCallManager: class {

View File

@ -45,6 +45,8 @@ public final class CallController: ViewController {
private var audioOutputStateDisposable: Disposable?
private var audioOutputState: ([AudioSessionOutput], AudioSessionOutput?)?
private let idleTimerExtensionDisposable = MetaDisposable()
public init(sharedContext: SharedAccountContext, account: Account, call: PresentationCall, easyDebugAccess: Bool) {
self.sharedContext = sharedContext
self.account = account
@ -97,6 +99,7 @@ public final class CallController: ViewController {
self.disposable?.dispose()
self.callMutedDisposable?.dispose()
self.audioOutputStateDisposable?.dispose()
self.idleTimerExtensionDisposable.dispose()
}
private func callStateUpdated(_ callState: PresentationCallState) {
@ -260,6 +263,14 @@ public final class CallController: ViewController {
self.controllerNode.animateIn()
}
self.idleTimerExtensionDisposable.set(self.sharedContext.applicationBindings.pushIdleTimerExtension())
}
override public func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated)
self.idleTimerExtensionDisposable.set(nil)
}
override public func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) {

View File

@ -31,8 +31,9 @@ final class CallControllerNode: ASDisplayNode {
private let imageNode: TransformImageNode
private let dimNode: ASDisplayNode
private var videoView: UIView?
private var videoViewRequested: Bool = false
private var incomingVideoView: UIView?
private var outgoingVideoView: UIView?
private var videoViewsRequested: Bool = false
private let backButtonArrowNode: ASImageNode
private let backButtonNode: HighlightableButtonNode
private let statusNode: CallControllerStatusNode
@ -265,16 +266,37 @@ final class CallControllerNode: ASDisplayNode {
}
}
statusReception = reception
if !self.videoViewRequested {
self.videoViewRequested = true
self.call.getVideoView(completion: { [weak self] videoView in
if !self.videoViewsRequested {
self.videoViewsRequested = true
self.call.makeIncomingVideoView(completion: { [weak self] incomingVideoView in
guard let strongSelf = self else {
return
}
if let videoView = videoView {
if let incomingVideoView = incomingVideoView {
strongSelf.setCurrentAudioOutput?(.speaker)
strongSelf.videoView = videoView
strongSelf.containerNode.view.insertSubview(videoView, aboveSubview: strongSelf.dimNode.view)
strongSelf.incomingVideoView = incomingVideoView
strongSelf.containerNode.view.insertSubview(incomingVideoView, aboveSubview: strongSelf.dimNode.view)
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
}
})
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
guard let strongSelf = self else {
return
}
if let outgoingVideoView = outgoingVideoView {
outgoingVideoView.backgroundColor = .black
outgoingVideoView.clipsToBounds = true
outgoingVideoView.layer.cornerRadius = 16.0
strongSelf.setCurrentAudioOutput?(.speaker)
strongSelf.outgoingVideoView = outgoingVideoView
if let incomingVideoView = strongSelf.incomingVideoView {
strongSelf.containerNode.view.insertSubview(outgoingVideoView, aboveSubview: incomingVideoView)
} else {
strongSelf.containerNode.view.insertSubview(outgoingVideoView, aboveSubview: strongSelf.dimNode.view)
}
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
@ -388,10 +410,6 @@ final class CallControllerNode: ASDisplayNode {
transition.updateFrame(node: self.containerNode, frame: CGRect(origin: CGPoint(), size: layout.size))
transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size))
if let videoView = self.videoView {
videoView.frame = CGRect(origin: CGPoint(), size: layout.size)
}
if let keyPreviewNode = self.keyPreviewNode {
transition.updateFrame(node: keyPreviewNode, frame: CGRect(origin: CGPoint(), size: layout.size))
keyPreviewNode.updateLayout(size: layout.size, transition: .immediate)
@ -445,7 +463,16 @@ final class CallControllerNode: ASDisplayNode {
transition.updateFrame(node: self.statusNode, frame: CGRect(origin: CGPoint(x: 0.0, y: statusOffset), size: CGSize(width: layout.size.width, height: statusHeight)))
self.buttonsNode.updateLayout(constrainedWidth: layout.size.width, transition: transition)
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom), size: CGSize(width: layout.size.width, height: buttonsHeight)))
let buttonsOriginY: CGFloat = layout.size.height - (buttonsOffset - 40.0) - buttonsHeight - layout.intrinsicInsets.bottom
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight)))
if let incomingVideoView = self.incomingVideoView {
incomingVideoView.frame = CGRect(origin: CGPoint(), size: layout.size)
}
if let outgoingVideoView = self.outgoingVideoView {
let outgoingSize = layout.size.aspectFitted(CGSize(width: 200.0, height: 200.0))
outgoingVideoView.frame = CGRect(origin: CGPoint(x: layout.size.width - 16.0 - outgoingSize.width, y: buttonsOriginY - 32.0 - outgoingSize.height), size: outgoingSize)
}
let keyTextSize = self.keyButtonNode.frame.size
transition.updateFrame(node: self.keyButtonNode, frame: CGRect(origin: CGPoint(x: layout.size.width - keyTextSize.width - 8.0, y: navigationOffset + 8.0), size: keyTextSize))
@ -462,7 +489,8 @@ final class CallControllerNode: ASDisplayNode {
self?.backPressed()
}
})
self.containerNode.insertSubnode(keyPreviewNode, aboveSubnode: self.dimNode)
self.containerNode.insertSubnode(keyPreviewNode, belowSubnode: self.statusNode)
self.keyPreviewNode = keyPreviewNode
if let (validLayout, _) = self.validLayout {

View File

@ -673,7 +673,11 @@ public final class PresentationCallImpl: PresentationCall {
return self.debugInfoValue.get()
}
public func getVideoView(completion: @escaping (UIView?) -> Void) {
self.ongoingContext?.getVideoView(completion: completion)
public func makeIncomingVideoView(completion: @escaping (UIView?) -> Void) {
self.ongoingContext?.makeIncomingVideoView(completion: completion)
}
public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) {
self.ongoingContext?.makeOutgoingVideoView(completion: completion)
}
}

View File

@ -409,12 +409,34 @@ func contextMenuForChatPresentationIntefaceState(chatPresentationInterfaceState:
if let action = media as? TelegramMediaAction, case let .phoneCall(id, discardReason, _) = action.action {
if discardReason != .busy && discardReason != .missed {
if let logName = callLogNameForId(id: id, account: context.account) {
let logsPath = callLogsPath(account: context.account)
let logPath = logsPath + "/" + logName
let start = logName.index(logName.startIndex, offsetBy: "\(id)".count + 1)
let end = logName.index(logName.endIndex, offsetBy: -4)
let accessHash = logName[start..<end]
if let accessHash = Int64(accessHash) {
callId = CallId(id: id, accessHash: accessHash)
}
actions.append(.action(ContextMenuActionItem(text: "Share Statistics", icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/Forward"), color: theme.actionSheet.primaryTextColor)
}, action: { _, f in
f(.dismissWithoutContent)
let controller = context.sharedContext.makePeerSelectionController(PeerSelectionControllerParams(context: context, filter: [.onlyWriteable, .excludeDisabled]))
controller.peerSelected = { [weak controller] peerId in
if let strongController = controller {
strongController.dismiss()
let id = arc4random64()
let file = TelegramMediaFile(fileId: MediaId(namespace: Namespaces.Media.LocalFile, id: id), partialReference: nil, resource: LocalFileReferenceMediaResource(localFilePath: logPath, randomId: id), previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "application/text", size: nil, attributes: [.FileName(fileName: "CallStats.log")])
let message: EnqueueMessage = .message(text: "", attributes: [], mediaReference: .standalone(media: file), replyToMessageId: nil, localGroupingKey: nil)
let _ = enqueueMessages(account: context.account, peerId: peerId, messages: [message]).start()
}
}
controllerInteraction.navigationController()?.pushViewController(controller)
})))
}
}
break

View File

@ -375,6 +375,7 @@ public final class OngoingCallContext {
private let queue = Queue()
private let account: Account
private let callSessionManager: CallSessionManager
private let logPath: String
private var contextRef: Unmanaged<OngoingCallThreadLocalContextHolder>?
@ -415,12 +416,13 @@ public final class OngoingCallContext {
self.internalId = internalId
self.account = account
self.callSessionManager = callSessionManager
self.logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log"
let logPath = self.logPath
let queue = self.queue
cleanupCallLogs(account: account)
let logPath = logName.isEmpty ? "" : callLogsPath(account: self.account) + "/" + logName + ".log"
self.audioSessionDisposable.set((audioSessionActive
|> filter { $0 }
|> take(1)
@ -542,6 +544,9 @@ public final class OngoingCallContext {
}
public func stop(callId: CallId? = nil, sendDebugLogs: Bool = false, debugLogValue: Promise<String?>) {
let account = self.account
let logPath = self.logPath
self.withContext { context in
context.nativeStop { debugLog, bytesSentWifi, bytesReceivedWifi, bytesSentMobile, bytesReceivedMobile in
debugLogValue.set(.single(debugLog))
@ -554,8 +559,18 @@ public final class OngoingCallContext {
outgoing: bytesSentWifi))
updateAccountNetworkUsageStats(account: self.account, category: .call, delta: delta)
if let callId = callId, let debugLog = debugLog, sendDebugLogs {
let _ = saveCallDebugLog(network: self.account.network, callId: callId, log: debugLog).start()
if !logPath.isEmpty, let debugLog = debugLog {
let logsPath = callLogsPath(account: account)
let _ = try? FileManager.default.createDirectory(atPath: logsPath, withIntermediateDirectories: true, attributes: nil)
if let data = debugLog.data(using: .utf8) {
let _ = try? data.write(to: URL(fileURLWithPath: logPath))
}
}
if let callId = callId, let debugLog = debugLog {
if sendDebugLogs {
let _ = saveCallDebugLog(network: self.account.network, callId: callId, log: debugLog).start()
}
}
}
let derivedState = context.nativeGetDerivedState()
@ -585,12 +600,23 @@ public final class OngoingCallContext {
return (poll |> then(.complete() |> delay(0.5, queue: Queue.concurrentDefaultQueue()))) |> restart
}
public func getVideoView(completion: @escaping (UIView?) -> Void) {
public func makeIncomingVideoView(completion: @escaping (UIView?) -> Void) {
self.withContext { context in
if let context = context as? OngoingCallThreadLocalContextWebrtc {
context.getRemoteCameraView(completion)
context.makeIncomingVideoView(completion)
} else {
completion(nil)
}
}
}
public func makeOutgoingVideoView(completion: @escaping (UIView?) -> Void) {
self.withContext { context in
if let context = context as? OngoingCallThreadLocalContextWebrtc {
context.makeOutgoingVideoView(completion)
} else {
completion(nil)
}
completion(nil)
}
}
}

View File

@ -12,10 +12,10 @@ objc_library(
"Sources/**/*.m",
"Sources/**/*.mm",
"Sources/**/*.h",
"Impl/*.h",
"Impl/*.cpp",
"Impl/*.mm",
"Impl/*.m",
"Impl/**/*.h",
"Impl/**/*.cpp",
"Impl/**/*.mm",
"Impl/**/*.m",
]),
hdrs = glob([
"PublicHeaders/**/*.h",
@ -30,6 +30,8 @@ objc_library(
"-DWEBRTC_IOS",
"-DWEBRTC_MAC",
"-DWEBRTC_POSIX",
"-DRTC_ENABLE_VP9",
"-DTGVOIP_NAMESPACE=tgvoip_webrtc",
"-std=c++14",
],
includes = [

View File

@ -0,0 +1,27 @@
#ifndef CODECS_APPLE_H
#define CODECS_APPLE_H
#include "rtc_base/thread.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/media_stream_interface.h"
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
class VideoCapturerInterface {
public:
virtual ~VideoCapturerInterface();
};
std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory();
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory();
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread);
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source);
#ifdef TGVOIP_NAMESPACE
}
#endif
#endif

View File

@ -0,0 +1,171 @@
#import "CodecsApple.h"
#include "absl/strings/match.h"
#include "api/audio_codecs/audio_decoder_factory_template.h"
#include "api/audio_codecs/audio_encoder_factory_template.h"
#include "api/audio_codecs/opus/audio_decoder_opus.h"
#include "api/audio_codecs/opus/audio_encoder_opus.h"
#include "api/rtp_parameters.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "media/base/codec.h"
#include "media/base/media_constants.h"
#include "media/engine/webrtc_media_engine.h"
#include "modules/audio_device/include/audio_device_default.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "system_wrappers/include/field_trial.h"
#include "api/video/builtin_video_bitrate_allocator_factory.h"
#include "api/video/video_bitrate_allocation.h"
#include "sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h"
#include "sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h"
#include "sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h"
#include "sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h"
#include "sdk/objc/native/api/video_encoder_factory.h"
#include "sdk/objc/native/api/video_decoder_factory.h"
#include "sdk/objc/native/src/objc_video_track_source.h"
#include "api/video_track_source_proxy.h"
#include "sdk/objc/api/RTCVideoRendererAdapter.h"
#include "sdk/objc/native/api/video_frame.h"
#include "api/media_types.h"
#import "VideoCameraCapturer.h"
@interface VideoCapturerInterfaceImplReference : NSObject {
VideoCameraCapturer *_videoCapturer;
}
@end
@implementation VideoCapturerInterfaceImplReference
- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source {
self = [super init];
if (self != nil) {
assert([NSThread isMainThread]);
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:source];
AVCaptureDevice *frontCamera = nil;
for (AVCaptureDevice *device in [VideoCameraCapturer captureDevices]) {
if (device.position == AVCaptureDevicePositionFront) {
frontCamera = device;
break;
}
}
if (frontCamera == nil) {
return nil;
}
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:frontCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
return width1 < width2 ? NSOrderedAscending : NSOrderedDescending;
}];
AVCaptureDeviceFormat *bestFormat = nil;
for (AVCaptureDeviceFormat *format in sortedFormats) {
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
if (dimensions.width >= 1000 || dimensions.height >= 1000) {
bestFormat = format;
break;
}
}
if (bestFormat == nil) {
assert(false);
return nil;
}
AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) {
if (lhs.maxFrameRate < rhs.maxFrameRate) {
return NSOrderedAscending;
} else {
return NSOrderedDescending;
}
}] lastObject];
if (frameRateRange == nil) {
assert(false);
return nil;
}
[_videoCapturer startCaptureWithDevice:frontCamera format:bestFormat fps:27];
}
return self;
}
- (void)dealloc {
assert([NSThread isMainThread]);
[_videoCapturer stopCapture];
}
@end
@interface VideoCapturerInterfaceImplHolder : NSObject
@property (nonatomic) void *reference;
@end
@implementation VideoCapturerInterfaceImplHolder
@end
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
class VideoCapturerInterfaceImpl: public VideoCapturerInterface {
public:
VideoCapturerInterfaceImpl(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source) :
_source(source) {
_implReference = [[VideoCapturerInterfaceImplHolder alloc] init];
VideoCapturerInterfaceImplHolder *implReference = _implReference;
dispatch_async(dispatch_get_main_queue(), ^{
VideoCapturerInterfaceImplReference *value = [[VideoCapturerInterfaceImplReference alloc] initWithSource:source];
if (value != nil) {
implReference.reference = (void *)CFBridgingRetain(value);
}
});
}
virtual ~VideoCapturerInterfaceImpl() {
VideoCapturerInterfaceImplHolder *implReference = _implReference;
dispatch_async(dispatch_get_main_queue(), ^{
if (implReference.reference != nil) {
CFBridgingRelease(implReference.reference);
}
});
}
private:
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
VideoCapturerInterfaceImplHolder *_implReference;
};
VideoCapturerInterface::~VideoCapturerInterface() {
}
std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory() {
return webrtc::ObjCToNativeVideoEncoderFactory([[RTCDefaultVideoEncoderFactory alloc] init]);
}
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() {
return webrtc::ObjCToNativeVideoDecoderFactory([[RTCDefaultVideoDecoderFactory alloc] init]);
}
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) {
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>());
return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, objCVideoTrackSource);
}
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source) {
return std::make_unique<VideoCapturerInterfaceImpl>(source);
}
#ifdef TGVOIP_NAMESPACE
}
#endif

View File

@ -1,50 +0,0 @@
#ifndef DEMO_CONNECTOR_H
#define DEMO_CONNECTOR_H
#include "p2p/base/basic_packet_socket_factory.h"
#include "rtc_base/proxy_info.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/thread.h"
#include "p2p/base/p2p_transport_channel.h"
#include "p2p/client/basic_port_allocator.h"
#include "p2p/base/basic_async_resolver_factory.h"
#include <memory>
#include <map>
class Connector : public sigslot::has_slots<> {
public:
explicit Connector(bool isOutgoing);
~Connector() override;
void Start();
sigslot::signal1<const std::vector<std::string>&> SignalCandidatesGathered;
sigslot::signal1<bool> SignalReadyToSendStateChanged;
sigslot::signal1<const rtc::CopyOnWriteBuffer&> SignalPacketReceived;
void AddRemoteCandidates(const std::vector<std::string> &candidates);
void SendPacket(const rtc::CopyOnWriteBuffer& data);
private:
void CandidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate);
void CandidateGatheringState(cricket::IceTransportInternal *transport);
void TransportStateChanged(cricket::IceTransportInternal *transport);
void TransportRoleConflict(cricket::IceTransportInternal *transport);
void TransportReadyToSend(cricket::IceTransportInternal *transport);
void TransportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t &timestamp, int unused);
std::unique_ptr<rtc::Thread> networkThread;
bool isOutgoing;
std::unique_ptr<rtc::BasicPacketSocketFactory> socketFactory;
std::unique_ptr<rtc::BasicNetworkManager> networkManager;
std::unique_ptr<cricket::BasicPortAllocator> portAllocator;
std::unique_ptr<webrtc::BasicAsyncResolverFactory> asyncResolverFactory;
std::unique_ptr<cricket::P2PTransportChannel> transportChannel;
std::vector<std::string> collectedLocalCandidates;
};
#endif //DEMO_CONNECTOR_H

View File

@ -1,158 +0,0 @@
#include "Connector.h"
#include "MediaEngineWebrtc.h"
#include "api/packet_socket_factory.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "p2p/base/ice_credentials_iterator.h"
#include "api/jsep_ice_candidate.h"
#include <memory>
Connector::Connector(bool isOutgoing) {
networkThread = rtc::Thread::CreateWithSocketServer();
this->isOutgoing = isOutgoing;
}
Connector::~Connector() {
networkThread->Invoke<void>(RTC_FROM_HERE, [this]() {
transportChannel = nullptr;
asyncResolverFactory = nullptr;
portAllocator = nullptr;
networkManager = nullptr;
socketFactory = nullptr;
});
}
void Connector::Start() {
NSLog(@"Started %d", (int)[[NSDate date] timeIntervalSince1970]);
networkThread->Start();
networkThread->Invoke<void>(RTC_FROM_HERE, [this] {
socketFactory.reset(new rtc::BasicPacketSocketFactory(networkThread.get()));
networkManager = std::make_unique<rtc::BasicNetworkManager>();
portAllocator.reset(new cricket::BasicPortAllocator(networkManager.get(), socketFactory.get(), /*turn_customizer=*/ nullptr, /*relay_port_factory=*/ nullptr));
uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP;
//flags |= cricket::PORTALLOCATOR_DISABLE_UDP;
portAllocator->set_flags(portAllocator->flags() | flags);
portAllocator->Initialize();
rtc::SocketAddress defaultStunAddress = rtc::SocketAddress("hlgkfjdrtjfykgulhijkljhulyo.uksouth.cloudapp.azure.com", 3478);
cricket::ServerAddresses stunServers;
stunServers.insert(defaultStunAddress);
std::vector<cricket::RelayServerConfig> turnServers;
turnServers.push_back(cricket::RelayServerConfig(
rtc::SocketAddress("hlgkfjdrtjfykgulhijkljhulyo.uksouth.cloudapp.azure.com", 3478),
"user",
"root",
cricket::PROTO_UDP
));
portAllocator->SetConfiguration(stunServers, turnServers, 2, webrtc::NO_PRUNE);
asyncResolverFactory = std::make_unique<webrtc::BasicAsyncResolverFactory>();
transportChannel.reset(new cricket::P2PTransportChannel("transport", 0, portAllocator.get(), asyncResolverFactory.get(), /*event_log=*/ nullptr));
cricket::IceConfig iceConfig;
iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY;
transportChannel->SetIceConfig(iceConfig);
cricket::IceParameters localIceParameters(
"gcp3",
"zWDKozH8/3JWt8he3M/CMj5R",
false
);
cricket::IceParameters remoteIceParameters(
"acp3",
"aWDKozH8/3JWt8he3M/CMj5R",
false
);
transportChannel->SetIceParameters(isOutgoing ? localIceParameters : remoteIceParameters);
transportChannel->SetIceRole(isOutgoing ? cricket::ICEROLE_CONTROLLING : cricket::ICEROLE_CONTROLLED);
transportChannel->SignalCandidateGathered.connect(this, &Connector::CandidateGathered);
transportChannel->SignalGatheringState.connect(this, &Connector::CandidateGatheringState);
transportChannel->SignalIceTransportStateChanged.connect(this, &Connector::TransportStateChanged);
transportChannel->SignalRoleConflict.connect(this, &Connector::TransportRoleConflict);
transportChannel->SignalReadPacket.connect(this, &Connector::TransportPacketReceived);
transportChannel->MaybeStartGathering();
transportChannel->SetRemoteIceMode(cricket::ICEMODE_FULL);
transportChannel->SetRemoteIceParameters((!isOutgoing) ? localIceParameters : remoteIceParameters);
});
}
void Connector::AddRemoteCandidates(const std::vector<std::string> &candidates) {
networkThread->Invoke<void>(RTC_FROM_HERE, [this, candidates] {
for (auto &serializedCandidate : candidates) {
webrtc::JsepIceCandidate parseCandidate("", 0);
if (parseCandidate.Initialize(serializedCandidate, nullptr)) {
auto candidate = parseCandidate.candidate();
printf("Add remote candidate %s\n", serializedCandidate.c_str());
transportChannel->AddRemoteCandidate(candidate);
}
}
});
}
void Connector::CandidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) {
assert(networkThread->IsCurrent());
webrtc::JsepIceCandidate iceCandidate("", 0);
iceCandidate.SetCandidate(candidate);
std::string serializedCandidate;
if (iceCandidate.ToString(&serializedCandidate)) {
std::vector<std::string> arrayOfOne;
arrayOfOne.push_back(serializedCandidate);
SignalCandidatesGathered(arrayOfOne);
webrtc::JsepIceCandidate parseCandidate("", 0);
if (parseCandidate.Initialize(serializedCandidate, nullptr)) {
auto candidate = parseCandidate.candidate();
}
}
}
void Connector::CandidateGatheringState(cricket::IceTransportInternal *transport) {
if (transport->gathering_state() == cricket::IceGatheringState::kIceGatheringComplete) {
/*if (collectedLocalCandidates.size() != 0) {
SignalCandidatesGathered(collectedLocalCandidates);
}*/
}
}
void Connector::TransportStateChanged(cricket::IceTransportInternal *transport) {
auto state = transport->GetIceTransportState();
switch (state) {
case webrtc::IceTransportState::kConnected:
case webrtc::IceTransportState::kCompleted:
SignalReadyToSendStateChanged(true);
printf("===== State: Connected\n");
break;
default:
SignalReadyToSendStateChanged(false);
printf("===== State: Disconnected\n");
break;
}
}
void Connector::TransportRoleConflict(cricket::IceTransportInternal *transport) {
printf("===== Role conflict\n");
}
void Connector::TransportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t &timestamp, __unused int unused) {
rtc::CopyOnWriteBuffer data;
data.AppendData(bytes, size);
SignalPacketReceived(data);
}
void Connector::SendPacket(const rtc::CopyOnWriteBuffer& data) {
networkThread->Invoke<void>(RTC_FROM_HERE, [this, data] {
rtc::PacketOptions options;
transportChannel->SendPacket((const char *)data.data(), data.size(), options, 0);
});
}

View File

@ -1,75 +0,0 @@
#ifndef DEMO_CONTROLLER_H
#define DEMO_CONTROLLER_H
#include "Connector.h"
#include "MediaEngineWebrtc.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/socket_address.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#import "VideoMetalView.h"
class Controller : public sigslot::has_slots<> {
public:
enum EndpointType {
UDP,
TCP,
P2P,
};
enum State {
Starting,
WaitInit,
WaitInitAck,
Established,
Failed,
Reconnecting,
};
explicit Controller(bool is_outgoing, size_t init_timeout, size_t reconnect_timeout);
~Controller() override;
void Start();
//void SetNetworkType(message::NetworkType network_type);
void SetDataSaving(bool data_saving);
void SetMute(bool mute);
void AttachVideoView(rtc::VideoSinkInterface<webrtc::VideoFrame> *sink);
void SetProxy(rtc::ProxyType type, const rtc::SocketAddress& addr, const std::string& username, const std::string& password);
void AddRemoteCandidates(const std::vector<std::string> &candidates);
//static std::map<message::NetworkType, MediaEngineWebrtc::NetworkParams> network_params;
static MediaEngineWebrtc::NetworkParams default_network_params;
static MediaEngineWebrtc::NetworkParams datasaving_network_params;
sigslot::signal1<State> SignalNewState;
sigslot::signal1<const std::vector<std::string>&> SignalCandidatesGathered;
private:
std::unique_ptr<rtc::Thread> thread;
std::unique_ptr<Connector> connector;
std::unique_ptr<MediaEngineWebrtc> media;
State state;
webrtc::RepeatingTaskHandle repeatable;
int64_t last_recv_time;
int64_t last_send_time;
const bool is_outgoing;
const size_t init_timeout;
const size_t reconnect_timeout;
bool local_datasaving;
bool final_datasaving;
//message::NetworkType local_network_type;
//message::NetworkType final_network_type;
void PacketReceived(const rtc::CopyOnWriteBuffer &);
void WriteableStateChanged(bool);
void CandidatesGathered(const std::vector<std::string> &);
void SetFail();
void Play(const int16_t *data, size_t size);
void Record(int16_t *data, size_t size);
void SendRtp(rtc::CopyOnWriteBuffer packet);
//void UpdateNetworkParams(const message::RtpStream& rtp);
};
#endif //DEMO_CONTROLLER_H

View File

@ -1,126 +0,0 @@
#include "Controller.h"
#include "modules/rtp_rtcp/source/rtp_utility.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/message_handler.h"
#include <memory>
/*std::map<message::NetworkType, MediaEngineWebrtc::NetworkParams> Controller::network_params = {
{message::NetworkType::nGprs, {6, 8, 6, 120, false, false, false}},
{message::NetworkType::nEdge, {6, 16, 6, 120, false, false, false}},
{message::NetworkType::n3gOrAbove, {6, 32, 16, 60, false, false, false}},
};
MediaEngineWebrtc::NetworkParams Controller::default_network_params = {6, 32, 16, 30, false, false, false};
MediaEngineWebrtc::NetworkParams Controller::datasaving_network_params = {6, 8, 6, 120, false, false, true};*/
Controller::Controller(bool is_outgoing, size_t init_timeout, size_t reconnect_timeout)
: thread(rtc::Thread::Create())
, connector(std::make_unique<Connector>(is_outgoing))
, state(State::Starting)
, last_recv_time(rtc::TimeMillis())
, last_send_time(rtc::TimeMillis())
, is_outgoing(is_outgoing)
, init_timeout(init_timeout * 1000)
, reconnect_timeout(reconnect_timeout * 1000)
, local_datasaving(false)
, final_datasaving(false)
{
connector->SignalReadyToSendStateChanged.connect(this, &Controller::WriteableStateChanged);
connector->SignalPacketReceived.connect(this, &Controller::PacketReceived);
connector->SignalCandidatesGathered.connect(this, &Controller::CandidatesGathered);
thread->Start();
thread->Invoke<void>(RTC_FROM_HERE, [this, is_outgoing]() {
media.reset(new MediaEngineWebrtc(is_outgoing));
media->Send.connect(this, &Controller::SendRtp);
});
}
Controller::~Controller() {
thread->Invoke<void>(RTC_FROM_HERE, [this]() {
media = nullptr;
connector = nullptr;
});
}
void Controller::Start() {
last_recv_time = rtc::TimeMillis();
connector->Start();
}
void Controller::PacketReceived(const rtc::CopyOnWriteBuffer &data) {
thread->PostTask(RTC_FROM_HERE, [this, data]() {
if (media) {
media->Receive(data);
}
});
}
void Controller::WriteableStateChanged(bool isWriteable) {
if (isWriteable) {
SignalNewState(State::Established);
} else {
SignalNewState(State::Reconnecting);
}
thread->PostTask(RTC_FROM_HERE, [this, isWriteable]() {
if (media) {
media->SetCanSendPackets(isWriteable);
}
});
}
void Controller::SendRtp(rtc::CopyOnWriteBuffer packet) {
connector->SendPacket(packet);
}
/*void Controller::UpdateNetworkParams(const message::RtpStream& rtp) {
bool new_datasaving = local_datasaving || rtp.data_saving;
if (!new_datasaving) {
final_datasaving = false;
message::NetworkType new_network_type = std::min(local_network_type, rtp.network_type);
if (new_network_type != final_network_type) {
final_network_type = new_network_type;
auto it = network_params.find(rtp.network_type);
if (it == network_params.end())
media->SetNetworkParams(default_network_params);
else
media->SetNetworkParams(it->second);
}
} else if (new_datasaving != final_datasaving) {
final_datasaving = true;
media->SetNetworkParams(datasaving_network_params);
}
}*/
void Controller::AttachVideoView(rtc::VideoSinkInterface<webrtc::VideoFrame> *sink) {
thread->PostTask(RTC_FROM_HERE, [this, sink]() {
media->AttachVideoView(sink);
});
}
/*void Controller::SetNetworkType(message::NetworkType network_type) {
local_network_type = network_type;
}*/
void Controller::SetDataSaving(bool data_saving) {
local_datasaving = data_saving;
}
void Controller::SetMute(bool mute) {
thread->Invoke<void>(RTC_FROM_HERE, [this, mute]() {
if (media)
media->SetMute(mute);
});
}
void Controller::SetProxy(rtc::ProxyType type, const rtc::SocketAddress& addr, const std::string& username, const std::string& password) {
}
void Controller::CandidatesGathered(const std::vector<std::string> &candidates) {
SignalCandidatesGathered(candidates);
}
void Controller::AddRemoteCandidates(const std::vector<std::string> &candidates) {
connector->AddRemoteCandidates(candidates);
}

View File

@ -0,0 +1,140 @@
#include "Manager.h"
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
static rtc::Thread *makeNetworkThread() {
static std::unique_ptr<rtc::Thread> value = rtc::Thread::CreateWithSocketServer();
value->SetName("WebRTC-Network", nullptr);
value->Start();
return value.get();
}
static rtc::Thread *getNetworkThread() {
static rtc::Thread *value = makeNetworkThread();
return value;
}
static rtc::Thread *makeMediaThread() {
static std::unique_ptr<rtc::Thread> value = rtc::Thread::Create();
value->SetName("WebRTC-Media", nullptr);
value->Start();
return value.get();
}
static rtc::Thread *getMediaThread() {
static rtc::Thread *value = makeMediaThread();
return value;
}
Manager::Manager(
rtc::Thread *thread,
TgVoipEncryptionKey encryptionKey,
bool enableP2P,
std::function<void (const TgVoipState &)> stateUpdated,
std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted
) :
_thread(thread),
_encryptionKey(encryptionKey),
_enableP2P(enableP2P),
_stateUpdated(stateUpdated),
_signalingDataEmitted(signalingDataEmitted) {
assert(_thread->IsCurrent());
}
Manager::~Manager() {
assert(_thread->IsCurrent());
}
void Manager::start() {
auto weakThis = std::weak_ptr<Manager>(shared_from_this());
_networkManager.reset(new ThreadLocalObject<NetworkManager>(getNetworkThread(), [encryptionKey = _encryptionKey, enableP2P = _enableP2P, thread = _thread, weakThis]() {
return new NetworkManager(
getNetworkThread(),
encryptionKey,
enableP2P,
[thread, weakThis](const NetworkManager::State &state) {
thread->Invoke<void>(RTC_FROM_HERE, [weakThis, state]() {
auto strongThis = weakThis.lock();
if (strongThis == nullptr) {
return;
}
TgVoipState mappedState;
if (state.isReadyToSendData) {
mappedState = TgVoipState::Estabilished;
} else {
mappedState = TgVoipState::Reconnecting;
}
strongThis->_stateUpdated(mappedState);
strongThis->_mediaManager->perform([state](MediaManager *mediaManager) {
mediaManager->setIsConnected(state.isReadyToSendData);
});
});
},
[thread, weakThis](const rtc::CopyOnWriteBuffer &packet) {
thread->PostTask(RTC_FROM_HERE, [weakThis, packet]() {
auto strongThis = weakThis.lock();
if (strongThis == nullptr) {
return;
}
strongThis->_mediaManager->perform([packet](MediaManager *mediaManager) {
mediaManager->receivePacket(packet);
});
});
},
[thread, weakThis](const std::vector<uint8_t> &data) {
thread->PostTask(RTC_FROM_HERE, [weakThis, data]() {
auto strongThis = weakThis.lock();
if (strongThis == nullptr) {
return;
}
strongThis->_signalingDataEmitted(data);
});
}
);
}));
bool isOutgoing = _encryptionKey.isOutgoing;
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [isOutgoing, thread = _thread, weakThis]() {
return new MediaManager(
getMediaThread(),
isOutgoing,
[thread, weakThis](const rtc::CopyOnWriteBuffer &packet) {
thread->PostTask(RTC_FROM_HERE, [weakThis, packet]() {
auto strongThis = weakThis.lock();
if (strongThis == nullptr) {
return;
}
strongThis->_networkManager->perform([packet](NetworkManager *networkManager) {
networkManager->sendPacket(packet);
});
});
}
);
}));
}
void Manager::receiveSignalingData(const std::vector<uint8_t> &data) {
_networkManager->perform([data](NetworkManager *networkManager) {
networkManager->receiveSignalingData(data);
});
}
void Manager::setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
_mediaManager->perform([sink](MediaManager *mediaManager) {
mediaManager->setIncomingVideoOutput(sink);
});
}
void Manager::setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
_mediaManager->perform([sink](MediaManager *mediaManager) {
mediaManager->setOutgoingVideoOutput(sink);
});
}
#ifdef TGVOIP_NAMESPACE
}
#endif

View File

@ -0,0 +1,45 @@
#ifndef TGVOIP_WEBRTC_MANAGER_H
#define TGVOIP_WEBRTC_MANAGER_H
#include "ThreadLocalObject.h"
#include "NetworkManager.h"
#include "MediaManager.h"
#include "TgVoip.h"
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
class Manager : public std::enable_shared_from_this<Manager> {
public:
Manager(
rtc::Thread *thread,
TgVoipEncryptionKey encryptionKey,
bool enableP2P,
std::function<void (const TgVoipState &)> stateUpdated,
std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted
);
~Manager();
void start();
void receiveSignalingData(const std::vector<uint8_t> &data);
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
private:
rtc::Thread *_thread;
TgVoipEncryptionKey _encryptionKey;
bool _enableP2P;
std::function<void (const TgVoipState &)> _stateUpdated;
std::function<void (const std::vector<uint8_t> &)> _signalingDataEmitted;
std::unique_ptr<ThreadLocalObject<NetworkManager>> _networkManager;
std::unique_ptr<ThreadLocalObject<MediaManager>> _mediaManager;
private:
};
#ifdef TGVOIP_NAMESPACE
}
#endif
#endif

View File

@ -1,19 +0,0 @@
#ifndef DEMO_MEDIAENGINEBASE_H
#define DEMO_MEDIAENGINEBASE_H
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include <cstdint>
class MediaEngineBase {
public:
MediaEngineBase() = default;
virtual ~MediaEngineBase() = default;
virtual void Receive(rtc::CopyOnWriteBuffer) = 0;
};
#endif //DEMO_MEDIAENGINEBASE_H

View File

@ -1,76 +0,0 @@
#ifndef DEMO_MEDIAENGINEWEBRTC_H
#define DEMO_MEDIAENGINEWEBRTC_H
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "api/transport/field_trial_based_config.h"
#include "call/call.h"
#include "media/base/media_engine.h"
#include "pc/rtp_sender.h"
#include "rtc_base/task_queue.h"
#include <memory>
#import "VideoCameraCapturer.h"
#import "VideoMetalView.h"
class MediaEngineWebrtc : public sigslot::has_slots<> {
public:
struct NetworkParams {
uint8_t min_bitrate_kbps;
uint8_t max_bitrate_kbps;
uint8_t start_bitrate_kbps;
uint8_t ptime_ms;
bool echo_cancellation;
bool auto_gain_control;
bool noise_suppression;
};
explicit MediaEngineWebrtc(bool outgoing);
~MediaEngineWebrtc();
void Receive(rtc::CopyOnWriteBuffer);
void OnSentPacket(const rtc::SentPacket& sent_packet);
void SetNetworkParams(const NetworkParams& params);
void SetMute(bool mute);
void SetCanSendPackets(bool);
void AttachVideoView(rtc::VideoSinkInterface<webrtc::VideoFrame> *sink);
sigslot::signal1<rtc::CopyOnWriteBuffer> Send;
private:
class Sender final : public cricket::MediaChannel::NetworkInterface {
public:
explicit Sender(MediaEngineWebrtc &engine, bool isVideo);
bool SendPacket(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) override;
bool SendRtcp(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) override;
int SetOption(SocketType type, rtc::Socket::Option opt, int option) override;
private:
MediaEngineWebrtc &engine;
bool isVideo;
};
const uint32_t ssrc_send;
const uint32_t ssrc_recv;
const uint32_t ssrc_send_video;
const uint32_t ssrc_recv_video;
std::unique_ptr<webrtc::Call> call;
std::unique_ptr<cricket::MediaEngineInterface> media_engine;
std::unique_ptr<webrtc::RtcEventLogNull> event_log;
std::unique_ptr<webrtc::TaskQueueFactory> task_queue_factory;
webrtc::FieldTrialBasedConfig field_trials;
webrtc::LocalAudioSinkAdapter audio_source;
Sender audio_sender;
Sender video_sender;
std::unique_ptr<cricket::VoiceMediaChannel> voice_channel;
std::unique_ptr<cricket::VideoMediaChannel> video_channel;
std::unique_ptr<webrtc::VideoBitrateAllocatorFactory> video_bitrate_allocator_factory;
std::unique_ptr<rtc::Thread> signaling_thread;
std::unique_ptr<rtc::Thread> worker_thread;
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
VideoCameraCapturer *_videoCapturer;
};
#endif //DEMO_MEDIAENGINEWEBRTC_H

View File

@ -1,425 +0,0 @@
#include "MediaEngineWebrtc.h"
#include "absl/strings/match.h"
#include "api/audio_codecs/audio_decoder_factory_template.h"
#include "api/audio_codecs/audio_encoder_factory_template.h"
#include "api/audio_codecs/opus/audio_decoder_opus.h"
#include "api/audio_codecs/opus/audio_encoder_opus.h"
#include "api/rtp_parameters.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "media/base/codec.h"
#include "media/base/media_constants.h"
#include "media/engine/webrtc_media_engine.h"
#include "modules/audio_device/include/audio_device_default.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "system_wrappers/include/field_trial.h"
#include "api/video/builtin_video_bitrate_allocator_factory.h"
#include "api/video/video_bitrate_allocation.h"
#include "sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h"
#include "sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h"
#include "sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.h"
#include "sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.h"
#include "sdk/objc/native/api/video_encoder_factory.h"
#include "sdk/objc/native/api/video_decoder_factory.h"
#include "sdk/objc/native/src/objc_video_track_source.h"
#include "api/video_track_source_proxy.h"
#include "sdk/objc/api/RTCVideoRendererAdapter.h"
#include "sdk/objc/native/api/video_frame.h"
#include "api/media_types.h"
namespace {
const size_t frame_samples = 480;
const uint8_t channels = 1;
const uint8_t sample_bytes = 2;
const uint32_t clockrate = 48000;
const uint16_t sdp_payload = 111;
const char* sdp_name = "opus";
const uint8_t sdp_channels = 2;
const uint32_t sdp_bitrate = 0;
const uint32_t caller_ssrc = 1;
const uint32_t called_ssrc = 2;
const uint32_t caller_ssrc_video = 3;
const uint32_t called_ssrc_video = 4;
const int extension_sequence = 1;
const int extension_sequence_video = 1;
}
static void AddDefaultFeedbackParams(cricket::VideoCodec* codec) {
// Don't add any feedback params for RED and ULPFEC.
if (codec->name == cricket::kRedCodecName || codec->name == cricket::kUlpfecCodecName)
return;
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty));
codec->AddFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty));
// Don't add any more feedback params for FLEXFEC.
if (codec->name == cricket::kFlexfecCodecName)
return;
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir));
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kParamValueEmpty));
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli));
if (codec->name == cricket::kVp8CodecName &&
webrtc::field_trial::IsEnabled("WebRTC-RtcpLossNotification")) {
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamLntf, cricket::kParamValueEmpty));
}
}
static std::vector<cricket::VideoCodec> AssignPayloadTypesAndDefaultCodecs(std::vector<webrtc::SdpVideoFormat> input_formats, int32_t &outCodecId) {
if (input_formats.empty())
return std::vector<cricket::VideoCodec>();
static const int kFirstDynamicPayloadType = 96;
static const int kLastDynamicPayloadType = 127;
int payload_type = kFirstDynamicPayloadType;
input_formats.push_back(webrtc::SdpVideoFormat(cricket::kRedCodecName));
input_formats.push_back(webrtc::SdpVideoFormat(cricket::kUlpfecCodecName));
/*if (IsFlexfecAdvertisedFieldTrialEnabled()) {
webrtc::SdpVideoFormat flexfec_format(kFlexfecCodecName);
// This value is currently arbitrarily set to 10 seconds. (The unit
// is microseconds.) This parameter MUST be present in the SDP, but
// we never use the actual value anywhere in our code however.
// TODO(brandtr): Consider honouring this value in the sender and receiver.
flexfec_format.parameters = {{kFlexfecFmtpRepairWindow, "10000000"}};
input_formats.push_back(flexfec_format);
}*/
bool found = false;
bool useVP9 = true;
std::vector<cricket::VideoCodec> output_codecs;
for (const webrtc::SdpVideoFormat& format : input_formats) {
cricket::VideoCodec codec(format);
codec.id = payload_type;
AddDefaultFeedbackParams(&codec);
output_codecs.push_back(codec);
if (useVP9 && codec.name == cricket::kVp9CodecName) {
if (!found) {
outCodecId = codec.id;
found = true;
}
}
if (!useVP9 && codec.name == cricket::kH264CodecName) {
if (!found) {
outCodecId = codec.id;
found = true;
}
}
// Increment payload type.
++payload_type;
if (payload_type > kLastDynamicPayloadType) {
RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest.";
break;
}
// Add associated RTX codec for non-FEC codecs.
if (!absl::EqualsIgnoreCase(codec.name, cricket::kUlpfecCodecName) &&
!absl::EqualsIgnoreCase(codec.name, cricket::kFlexfecCodecName)) {
output_codecs.push_back(
cricket::VideoCodec::CreateRtxCodec(payload_type, codec.id));
// Increment payload type.
++payload_type;
if (payload_type > kLastDynamicPayloadType) {
RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest.";
break;
}
}
}
return output_codecs;
}
MediaEngineWebrtc::MediaEngineWebrtc(bool outgoing)
: ssrc_send(outgoing ? caller_ssrc : called_ssrc)
, ssrc_recv(outgoing ? called_ssrc : caller_ssrc)
, ssrc_send_video(outgoing ? caller_ssrc_video : called_ssrc_video)
, ssrc_recv_video(outgoing ? called_ssrc_video : caller_ssrc_video)
, event_log(std::make_unique<webrtc::RtcEventLogNull>())
, task_queue_factory(webrtc::CreateDefaultTaskQueueFactory())
, audio_sender(*this, false)
, video_sender(*this, true)
, signaling_thread(rtc::Thread::Create())
, worker_thread(rtc::Thread::Create()) {
signaling_thread->Start();
worker_thread->Start();
webrtc::field_trial::InitFieldTrialsFromString(
"WebRTC-Audio-SendSideBwe/Enabled/"
"WebRTC-Audio-Allocation/min:6kbps,max:32kbps/"
"WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/"
);
video_bitrate_allocator_factory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory();
cricket::MediaEngineDependencies media_deps;
media_deps.task_queue_factory = task_queue_factory.get();
media_deps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory<webrtc::AudioEncoderOpus>();
media_deps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory<webrtc::AudioDecoderOpus>();
//auto video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]);
auto video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory([[RTCDefaultVideoEncoderFactory alloc] init]);
int32_t outCodecId = 96;
std::vector<cricket::VideoCodec> videoCodecs = AssignPayloadTypesAndDefaultCodecs(video_encoder_factory->GetSupportedFormats(), outCodecId);
media_deps.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory([[RTCDefaultVideoEncoderFactory alloc] init]);
media_deps.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory([[RTCDefaultVideoDecoderFactory alloc] init]);
media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
media_engine = cricket::CreateMediaEngine(std::move(media_deps));
media_engine->Init();
webrtc::Call::Config call_config(event_log.get());
call_config.task_queue_factory = task_queue_factory.get();
call_config.trials = &field_trials;
call_config.audio_state = media_engine->voice().GetAudioState();
call.reset(webrtc::Call::Create(call_config));
voice_channel.reset(media_engine->voice().CreateMediaChannel(
call.get(), cricket::MediaConfig(), cricket::AudioOptions(), webrtc::CryptoOptions::NoGcm()));
video_channel.reset(media_engine->video().CreateMediaChannel(call.get(), cricket::MediaConfig(), cricket::VideoOptions(), webrtc::CryptoOptions::NoGcm(), video_bitrate_allocator_factory.get()));
if (true) {
voice_channel->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc_send));
SetNetworkParams({6, 32, 6, 120, false, false, false});
SetMute(false);
voice_channel->SetInterface(&audio_sender, webrtc::MediaTransportConfig());
}
if (true) {
video_channel->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc_send_video));
for (auto codec : videoCodecs) {
if (codec.id == outCodecId) {
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>());
_nativeVideoSource = webrtc::VideoTrackSourceProxy::Create(signaling_thread.get(), worker_thread.get(), objCVideoTrackSource);
codec.SetParam(cricket::kCodecParamMinBitrate, 64);
codec.SetParam(cricket::kCodecParamStartBitrate, 256);
codec.SetParam(cricket::kCodecParamMaxBitrate, 2500);
dispatch_async(dispatch_get_main_queue(), ^{
#if TARGET_IPHONE_SIMULATOR
#else
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:_nativeVideoSource];
AVCaptureDevice *frontCamera = nil;
for (AVCaptureDevice *device in [VideoCameraCapturer captureDevices]) {
if (device.position == AVCaptureDevicePositionFront) {
frontCamera = device;
break;
}
}
if (frontCamera == nil) {
assert(false);
return;
}
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:frontCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
return width1 < width2 ? NSOrderedAscending : NSOrderedDescending;
}];
AVCaptureDeviceFormat *bestFormat = nil;
for (AVCaptureDeviceFormat *format in sortedFormats) {
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
if (dimensions.width >= 1000 || dimensions.height >= 1000) {
bestFormat = format;
break;
}
}
if (bestFormat == nil) {
assert(false);
return;
}
AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) {
if (lhs.maxFrameRate < rhs.maxFrameRate) {
return NSOrderedAscending;
} else {
return NSOrderedDescending;
}
}] lastObject];
if (frameRateRange == nil) {
assert(false);
return;
}
[_videoCapturer startCaptureWithDevice:frontCamera format:bestFormat fps:27];
#endif
});
cricket::VideoSendParameters send_parameters;
send_parameters.codecs.push_back(codec);
send_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence_video);
//send_parameters.options.echo_cancellation = params.echo_cancellation;
//send_parameters.options.noise_suppression = params.noise_suppression;
//send_parameters.options.auto_gain_control = params.auto_gain_control;
//send_parameters.options.highpass_filter = false;
//send_parameters.options.typing_detection = false;
//send_parameters.max_bandwidth_bps = 800000;
//send_parameters.rtcp.reduced_size = true;
send_parameters.rtcp.remote_estimate = true;
video_channel->SetSendParameters(send_parameters);
video_channel->SetVideoSend(ssrc_send_video, NULL, _nativeVideoSource.get());
video_channel->SetInterface(&video_sender, webrtc::MediaTransportConfig());
break;
}
}
}
if (true) {
cricket::AudioRecvParameters recv_parameters;
recv_parameters.codecs.emplace_back(sdp_payload, sdp_name, clockrate, sdp_bitrate, sdp_channels);
recv_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence);
recv_parameters.rtcp.reduced_size = true;
recv_parameters.rtcp.remote_estimate = true;
voice_channel->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc_recv));
voice_channel->SetRecvParameters(recv_parameters);
voice_channel->SetPlayout(true);
}
if (true) {
for (auto codec : videoCodecs) {
if (codec.id == outCodecId) {
codec.SetParam(cricket::kCodecParamMinBitrate, 32);
codec.SetParam(cricket::kCodecParamStartBitrate, 300);
codec.SetParam(cricket::kCodecParamMaxBitrate, 1000);
cricket::VideoRecvParameters recv_parameters;
recv_parameters.codecs.emplace_back(codec);
recv_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence_video);
//recv_parameters.rtcp.reduced_size = true;
recv_parameters.rtcp.remote_estimate = true;
video_channel->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc_recv_video));
video_channel->SetRecvParameters(recv_parameters);
break;
}
}
}
}
MediaEngineWebrtc::~MediaEngineWebrtc() {
[_videoCapturer stopCapture];
video_channel->SetSink(ssrc_recv_video, nullptr);
video_channel->RemoveSendStream(ssrc_send_video);
video_channel->RemoveRecvStream(ssrc_recv_video);
voice_channel->SetPlayout(false);
voice_channel->RemoveSendStream(ssrc_send);
voice_channel->RemoveRecvStream(ssrc_recv);
};
void MediaEngineWebrtc::Receive(rtc::CopyOnWriteBuffer packet) {
if (packet.size() < 1) {
return;
}
uint8_t header = ((uint8_t *)packet.data())[0];
rtc::CopyOnWriteBuffer unwrappedPacket = packet.Slice(1, packet.size() - 1);
if (header == 0xba) {
if (voice_channel) {
voice_channel->OnPacketReceived(unwrappedPacket, -1);
}
} else if (header == 0xbf) {
if (video_channel) {
video_channel->OnPacketReceived(unwrappedPacket, -1);
}
} else {
printf("----- Unknown packet header");
}
}
void MediaEngineWebrtc::OnSentPacket(const rtc::SentPacket& sent_packet) {
call->OnSentPacket(sent_packet);
}
void MediaEngineWebrtc::SetNetworkParams(const MediaEngineWebrtc::NetworkParams& params) {
cricket::AudioCodec opus_codec(sdp_payload, sdp_name, clockrate, sdp_bitrate, sdp_channels);
opus_codec.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc));
opus_codec.SetParam(cricket::kCodecParamMinBitrate, params.min_bitrate_kbps);
opus_codec.SetParam(cricket::kCodecParamStartBitrate, params.start_bitrate_kbps);
opus_codec.SetParam(cricket::kCodecParamMaxBitrate, params.max_bitrate_kbps);
opus_codec.SetParam(cricket::kCodecParamUseInbandFec, 1);
opus_codec.SetParam(cricket::kCodecParamPTime, params.ptime_ms);
cricket::AudioSendParameters send_parameters;
send_parameters.codecs.push_back(opus_codec);
send_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence);
send_parameters.options.echo_cancellation = params.echo_cancellation;
// send_parameters.options.experimental_ns = false;
send_parameters.options.noise_suppression = params.noise_suppression;
send_parameters.options.auto_gain_control = params.auto_gain_control;
send_parameters.options.highpass_filter = false;
send_parameters.options.typing_detection = false;
// send_parameters.max_bandwidth_bps = 16000;
send_parameters.rtcp.reduced_size = true;
send_parameters.rtcp.remote_estimate = true;
voice_channel->SetSendParameters(send_parameters);
}
void MediaEngineWebrtc::SetMute(bool mute) {
}
void MediaEngineWebrtc::SetCanSendPackets(bool canSendPackets) {
if (canSendPackets) {
call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkUp);
call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp);
} else {
call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkDown);
call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkDown);
}
if (voice_channel) {
voice_channel->OnReadyToSend(canSendPackets);
voice_channel->SetSend(canSendPackets);
voice_channel->SetAudioSend(ssrc_send, true, nullptr, &audio_source);
}
if (video_channel) {
video_channel->OnReadyToSend(canSendPackets);
video_channel->SetSend(canSendPackets);
}
}
void MediaEngineWebrtc::AttachVideoView(rtc::VideoSinkInterface<webrtc::VideoFrame> *sink) {
video_channel->SetSink(ssrc_recv_video, sink);
}
bool MediaEngineWebrtc::Sender::SendPacket(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) {
rtc::CopyOnWriteBuffer wrappedPacket;
uint8_t header = isVideo ? 0xbf : 0xba;
wrappedPacket.AppendData(&header, 1);
wrappedPacket.AppendData(*packet);
engine.Send(wrappedPacket);
rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent);
engine.OnSentPacket(sent_packet);
return true;
}
bool MediaEngineWebrtc::Sender::SendRtcp(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) {
rtc::CopyOnWriteBuffer wrappedPacket;
uint8_t header = isVideo ? 0xbf : 0xba;
wrappedPacket.AppendData(&header, 1);
wrappedPacket.AppendData(*packet);
engine.Send(wrappedPacket);
rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent);
engine.OnSentPacket(sent_packet);
return true;
}
int MediaEngineWebrtc::Sender::SetOption(cricket::MediaChannel::NetworkInterface::SocketType, rtc::Socket::Option, int) {
return -1; // in general, the result is not important yet
}
MediaEngineWebrtc::Sender::Sender(MediaEngineWebrtc &engine, bool isVideo) :
engine(engine),
isVideo(isVideo) {
}

View File

@ -0,0 +1,395 @@
#include "MediaManager.h"
#include "absl/strings/match.h"
#include "api/audio_codecs/audio_decoder_factory_template.h"
#include "api/audio_codecs/audio_encoder_factory_template.h"
#include "api/audio_codecs/opus/audio_decoder_opus.h"
#include "api/audio_codecs/opus/audio_encoder_opus.h"
#include "api/rtp_parameters.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "media/base/codec.h"
#include "media/base/media_constants.h"
#include "media/engine/webrtc_media_engine.h"
#include "modules/audio_device/include/audio_device_default.h"
#include "rtc_base/task_utils/repeating_task.h"
#include "system_wrappers/include/field_trial.h"
#include "api/video/builtin_video_bitrate_allocator_factory.h"
#include "api/video/video_bitrate_allocation.h"
#include "call/call.h"
#if TARGET_OS_IPHONE
#include "CodecsApple.h"
#else
#error "Unsupported platform"
#endif
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
static const uint32_t ssrcAudioIncoming = 1;
static const uint32_t ssrcAudioOutgoing = 2;
static const uint32_t ssrcVideoIncoming = 3;
static const uint32_t ssrcVideoOutgoing = 4;
static void AddDefaultFeedbackParams(cricket::VideoCodec *codec) {
// Don't add any feedback params for RED and ULPFEC.
if (codec->name == cricket::kRedCodecName || codec->name == cricket::kUlpfecCodecName)
return;
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty));
codec->AddFeedbackParam(
cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty));
// Don't add any more feedback params for FLEXFEC.
if (codec->name == cricket::kFlexfecCodecName)
return;
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir));
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kParamValueEmpty));
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli));
if (codec->name == cricket::kVp8CodecName &&
webrtc::field_trial::IsEnabled("WebRTC-RtcpLossNotification")) {
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamLntf, cricket::kParamValueEmpty));
}
}
static std::vector<cricket::VideoCodec> AssignPayloadTypesAndDefaultCodecs(std::vector<webrtc::SdpVideoFormat> input_formats, int32_t &outCodecId) {
if (input_formats.empty())
return std::vector<cricket::VideoCodec>();
static const int kFirstDynamicPayloadType = 96;
static const int kLastDynamicPayloadType = 127;
int payload_type = kFirstDynamicPayloadType;
input_formats.push_back(webrtc::SdpVideoFormat(cricket::kRedCodecName));
input_formats.push_back(webrtc::SdpVideoFormat(cricket::kUlpfecCodecName));
/*if (IsFlexfecAdvertisedFieldTrialEnabled()) {
webrtc::SdpVideoFormat flexfec_format(kFlexfecCodecName);
// This value is currently arbitrarily set to 10 seconds. (The unit
// is microseconds.) This parameter MUST be present in the SDP, but
// we never use the actual value anywhere in our code however.
// TODO(brandtr): Consider honouring this value in the sender and receiver.
flexfec_format.parameters = {{kFlexfecFmtpRepairWindow, "10000000"}};
input_formats.push_back(flexfec_format);
}*/
bool found = false;
bool useVP9 = true;
std::vector<cricket::VideoCodec> output_codecs;
for (const webrtc::SdpVideoFormat& format : input_formats) {
cricket::VideoCodec codec(format);
codec.id = payload_type;
AddDefaultFeedbackParams(&codec);
output_codecs.push_back(codec);
if (useVP9 && codec.name == cricket::kVp9CodecName) {
if (!found) {
outCodecId = codec.id;
found = true;
}
}
if (!useVP9 && codec.name == cricket::kH264CodecName) {
if (!found) {
outCodecId = codec.id;
found = true;
}
}
// Increment payload type.
++payload_type;
if (payload_type > kLastDynamicPayloadType) {
RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest.";
break;
}
// Add associated RTX codec for non-FEC codecs.
if (!absl::EqualsIgnoreCase(codec.name, cricket::kUlpfecCodecName) &&
!absl::EqualsIgnoreCase(codec.name, cricket::kFlexfecCodecName)) {
output_codecs.push_back(
cricket::VideoCodec::CreateRtxCodec(payload_type, codec.id));
// Increment payload type.
++payload_type;
if (payload_type > kLastDynamicPayloadType) {
RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest.";
break;
}
}
}
return output_codecs;
}
static absl::optional<cricket::VideoCodec> selectVideoCodec(std::vector<cricket::VideoCodec> &codecs) {
bool useVP9 = false;
for (auto &codec : codecs) {
if (useVP9) {
if (codec.name == cricket::kVp9CodecName) {
return absl::optional<cricket::VideoCodec>(codec);
}
} else {
if (codec.name == cricket::kH264CodecName) {
return absl::optional<cricket::VideoCodec>(codec);
}
}
}
return absl::optional<cricket::VideoCodec>();
}
static rtc::Thread *makeWorkerThread() {
static std::unique_ptr<rtc::Thread> value = rtc::Thread::Create();
value->SetName("WebRTC-Worker", nullptr);
value->Start();
return value.get();
}
static rtc::Thread *getWorkerThread() {
static rtc::Thread *value = makeWorkerThread();
return value;
}
MediaManager::MediaManager(
rtc::Thread *thread,
bool isOutgoing,
std::function<void (const rtc::CopyOnWriteBuffer &)> packetEmitted
) :
_packetEmitted(packetEmitted),
_thread(thread),
_eventLog(std::make_unique<webrtc::RtcEventLogNull>()),
_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()) {
_ssrcAudio.incoming = isOutgoing ? ssrcAudioIncoming : ssrcAudioOutgoing;
_ssrcAudio.outgoing = (!isOutgoing) ? ssrcAudioIncoming : ssrcAudioOutgoing;
_ssrcVideo.incoming = isOutgoing ? ssrcVideoIncoming : ssrcVideoOutgoing;
_ssrcVideo.outgoing = (!isOutgoing) ? ssrcVideoIncoming : ssrcVideoOutgoing;
_audioNetworkInterface = std::unique_ptr<MediaManager::NetworkInterfaceImpl>(new MediaManager::NetworkInterfaceImpl(this, false));
_videoNetworkInterface = std::unique_ptr<MediaManager::NetworkInterfaceImpl>(new MediaManager::NetworkInterfaceImpl(this, true));
webrtc::field_trial::InitFieldTrialsFromString(
"WebRTC-Audio-SendSideBwe/Enabled/"
"WebRTC-Audio-Allocation/min:6kbps,max:32kbps/"
"WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/"
);
_videoBitrateAllocatorFactory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory();
cricket::MediaEngineDependencies mediaDeps;
mediaDeps.task_queue_factory = _taskQueueFactory.get();
mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory<webrtc::AudioEncoderOpus>();
mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory<webrtc::AudioDecoderOpus>();
auto videoEncoderFactory = makeVideoEncoderFactory();
int32_t outCodecId = 96;
std::vector<cricket::VideoCodec> videoCodecs = AssignPayloadTypesAndDefaultCodecs(videoEncoderFactory->GetSupportedFormats(), outCodecId);
mediaDeps.video_encoder_factory = makeVideoEncoderFactory();
mediaDeps.video_decoder_factory = makeVideoDecoderFactory();
mediaDeps.audio_processing = webrtc::AudioProcessingBuilder().Create();
_mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps));
_mediaEngine->Init();
webrtc::Call::Config callConfig(_eventLog.get());
callConfig.task_queue_factory = _taskQueueFactory.get();
callConfig.trials = &_fieldTrials;
callConfig.audio_state = _mediaEngine->voice().GetAudioState();
_call.reset(webrtc::Call::Create(callConfig));
_audioChannel.reset(_mediaEngine->voice().CreateMediaChannel(_call.get(), cricket::MediaConfig(), cricket::AudioOptions(), webrtc::CryptoOptions::NoGcm()));
_videoChannel.reset(_mediaEngine->video().CreateMediaChannel(_call.get(), cricket::MediaConfig(), cricket::VideoOptions(), webrtc::CryptoOptions::NoGcm(), _videoBitrateAllocatorFactory.get()));
_audioChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcAudio.outgoing));
const uint32_t opusClockrate = 48000;
const uint16_t opusSdpPayload = 111;
const char *opusSdpName = "opus";
const uint8_t opusSdpChannels = 2;
const uint32_t opusSdpBitrate = 0;
const uint8_t opusMinBitrateKbps = 6;
const uint8_t opusMaxBitrateKbps = 32;
const uint8_t opusStartBitrateKbps = 6;
const uint8_t opusPTimeMs = 120;
const int opusExtensionSequence = 1;
cricket::AudioCodec opusCodec(opusSdpPayload, opusSdpName, opusClockrate, opusSdpBitrate, opusSdpChannels);
opusCodec.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc));
opusCodec.SetParam(cricket::kCodecParamMinBitrate, opusMinBitrateKbps);
opusCodec.SetParam(cricket::kCodecParamStartBitrate, opusStartBitrateKbps);
opusCodec.SetParam(cricket::kCodecParamMaxBitrate, opusMaxBitrateKbps);
opusCodec.SetParam(cricket::kCodecParamUseInbandFec, 1);
opusCodec.SetParam(cricket::kCodecParamPTime, opusPTimeMs);
cricket::AudioSendParameters audioSendPrameters;
audioSendPrameters.codecs.push_back(opusCodec);
audioSendPrameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, opusExtensionSequence);
audioSendPrameters.options.echo_cancellation = false;
//audioSendPrameters.options.experimental_ns = false;
audioSendPrameters.options.noise_suppression = false;
audioSendPrameters.options.auto_gain_control = false;
audioSendPrameters.options.highpass_filter = false;
audioSendPrameters.options.typing_detection = false;
//audioSendPrameters.max_bandwidth_bps = 16000;
audioSendPrameters.rtcp.reduced_size = true;
audioSendPrameters.rtcp.remote_estimate = true;
_audioChannel->SetSendParameters(audioSendPrameters);
_audioChannel->SetInterface(_audioNetworkInterface.get(), webrtc::MediaTransportConfig());
cricket::AudioRecvParameters audioRecvParameters;
audioRecvParameters.codecs.emplace_back(opusSdpPayload, opusSdpName, opusClockrate, opusSdpBitrate, opusSdpChannels);
audioRecvParameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, opusExtensionSequence);
audioRecvParameters.rtcp.reduced_size = true;
audioRecvParameters.rtcp.remote_estimate = true;
_audioChannel->AddRecvStream(cricket::StreamParams::CreateLegacy(_ssrcAudio.incoming));
_audioChannel->SetRecvParameters(audioRecvParameters);
_audioChannel->SetPlayout(true);
_videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing));
auto videoCodec = selectVideoCodec(videoCodecs);
if (videoCodec.has_value()) {
_nativeVideoSource = makeVideoSource(_thread, getWorkerThread());
auto codec = videoCodec.value();
codec.SetParam(cricket::kCodecParamMinBitrate, 64);
codec.SetParam(cricket::kCodecParamStartBitrate, 256);
codec.SetParam(cricket::kCodecParamMaxBitrate, 2500);
_videoCapturer = makeVideoCapturer(_nativeVideoSource);
cricket::VideoSendParameters videoSendParameters;
videoSendParameters.codecs.push_back(codec);
const int videoExtensionSequence = 1;
videoSendParameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, videoExtensionSequence);
//send_parameters.max_bandwidth_bps = 800000;
//send_parameters.rtcp.reduced_size = true;
videoSendParameters.rtcp.remote_estimate = true;
_videoChannel->SetSendParameters(videoSendParameters);
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, _nativeVideoSource.get());
_videoChannel->SetInterface(_videoNetworkInterface.get(), webrtc::MediaTransportConfig());
cricket::VideoRecvParameters videoRecvParameters;
videoRecvParameters.codecs.emplace_back(codec);
videoRecvParameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, videoExtensionSequence);
//recv_parameters.rtcp.reduced_size = true;
videoRecvParameters.rtcp.remote_estimate = true;
_videoChannel->AddRecvStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.incoming));
_videoChannel->SetRecvParameters(videoRecvParameters);
}
}
MediaManager::~MediaManager() {
assert(_thread->IsCurrent());
_call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkDown);
_call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkDown);
_audioChannel->OnReadyToSend(false);
_audioChannel->SetSend(false);
_audioChannel->SetAudioSend(_ssrcAudio.outgoing, false, nullptr, &_audioSource);
_audioChannel->SetPlayout(false);
_audioChannel->RemoveRecvStream(_ssrcAudio.incoming);
_audioChannel->RemoveSendStream(_ssrcAudio.outgoing);
_audioChannel->SetInterface(nullptr, webrtc::MediaTransportConfig());
_videoChannel->RemoveRecvStream(_ssrcVideo.incoming);
_videoChannel->RemoveSendStream(_ssrcVideo.outgoing);
_videoChannel->SetVideoSend(_ssrcVideo.outgoing, NULL, nullptr);
_videoChannel->SetInterface(nullptr, webrtc::MediaTransportConfig());
}
void MediaManager::setIsConnected(bool isConnected) {
if (isConnected) {
_call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkUp);
_call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp);
} else {
_call->SignalChannelNetworkState(webrtc::MediaType::AUDIO, webrtc::kNetworkDown);
_call->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkDown);
}
if (_audioChannel) {
_audioChannel->OnReadyToSend(isConnected);
_audioChannel->SetSend(isConnected);
_audioChannel->SetAudioSend(_ssrcAudio.outgoing, isConnected, nullptr, &_audioSource);
}
if (_videoChannel) {
_videoChannel->OnReadyToSend(isConnected);
_videoChannel->SetSend(isConnected);
}
}
void MediaManager::receivePacket(const rtc::CopyOnWriteBuffer &packet) {
if (packet.size() < 1) {
return;
}
uint8_t header = ((uint8_t *)packet.data())[0];
rtc::CopyOnWriteBuffer unwrappedPacket = packet.Slice(1, packet.size() - 1);
if (header == 0xba) {
if (_audioChannel) {
_audioChannel->OnPacketReceived(unwrappedPacket, -1);
}
} else if (header == 0xbf) {
if (_videoChannel) {
_videoChannel->OnPacketReceived(unwrappedPacket, -1);
}
}
}
void MediaManager::notifyPacketSent(const rtc::SentPacket &sentPacket) {
_call->OnSentPacket(sentPacket);
}
void MediaManager::setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
_currentIncomingVideoSink = sink;
_videoChannel->SetSink(_ssrcVideo.incoming, sink.get());
}
void MediaManager::setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) {
_currentOutgoingVideoSink = sink;
_nativeVideoSource->AddOrUpdateSink(sink.get(), rtc::VideoSinkWants());
}
MediaManager::NetworkInterfaceImpl::NetworkInterfaceImpl(MediaManager *mediaManager, bool isVideo) :
_mediaManager(mediaManager),
_isVideo(isVideo) {
}
bool MediaManager::NetworkInterfaceImpl::SendPacket(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) {
rtc::CopyOnWriteBuffer wrappedPacket;
uint8_t header = _isVideo ? 0xbf : 0xba;
wrappedPacket.AppendData(&header, 1);
wrappedPacket.AppendData(*packet);
_mediaManager->_packetEmitted(wrappedPacket);
rtc::SentPacket sentPacket(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent);
_mediaManager->notifyPacketSent(sentPacket);
return true;
}
bool MediaManager::NetworkInterfaceImpl::SendRtcp(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) {
rtc::CopyOnWriteBuffer wrappedPacket;
uint8_t header = _isVideo ? 0xbf : 0xba;
wrappedPacket.AppendData(&header, 1);
wrappedPacket.AppendData(*packet);
_mediaManager->_packetEmitted(wrappedPacket);
rtc::SentPacket sentPacket(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent);
_mediaManager->notifyPacketSent(sentPacket);
return true;
}
int MediaManager::NetworkInterfaceImpl::SetOption(cricket::MediaChannel::NetworkInterface::SocketType, rtc::Socket::Option, int) {
return -1;
}
#ifdef TGVOIP_NAMESPACE
}
#endif

View File

@ -0,0 +1,99 @@
#ifndef TGVOIP_WEBRTC_MEDIA_MANAGER_H
#define TGVOIP_WEBRTC_MEDIA_MANAGER_H
#include "rtc_base/thread.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "api/transport/field_trial_based_config.h"
#include "pc/rtp_sender.h"
#include <functional>
#include <memory>
namespace webrtc {
class Call;
class RtcEventLogNull;
class TaskQueueFactory;
class VideoBitrateAllocatorFactory;
class VideoTrackSourceInterface;
};
namespace cricket {
class MediaEngineInterface;
class VoiceMediaChannel;
class VideoMediaChannel;
};
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
class VideoCapturerInterface;
class MediaManager : public sigslot::has_slots<>, public std::enable_shared_from_this<MediaManager> {
private:
struct SSRC {
uint32_t incoming;
uint32_t outgoing;
};
class NetworkInterfaceImpl : public cricket::MediaChannel::NetworkInterface {
public:
NetworkInterfaceImpl(MediaManager *mediaManager, bool isVideo);
bool SendPacket(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) override;
bool SendRtcp(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) override;
int SetOption(SocketType type, rtc::Socket::Option opt, int option) override;
private:
MediaManager *_mediaManager;
bool _isVideo;
};
friend class MediaManager::NetworkInterfaceImpl;
public:
MediaManager(
rtc::Thread *thread,
bool isOutgoing,
std::function<void (const rtc::CopyOnWriteBuffer &)> packetEmitted
);
~MediaManager();
void setIsConnected(bool isConnected);
void receivePacket(const rtc::CopyOnWriteBuffer &packet);
void notifyPacketSent(const rtc::SentPacket &sentPacket);
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
protected:
std::function<void (const rtc::CopyOnWriteBuffer &)> _packetEmitted;
private:
rtc::Thread *_thread;
std::unique_ptr<webrtc::RtcEventLogNull> _eventLog;
std::unique_ptr<webrtc::TaskQueueFactory> _taskQueueFactory;
SSRC _ssrcAudio;
SSRC _ssrcVideo;
std::unique_ptr<cricket::MediaEngineInterface> _mediaEngine;
std::unique_ptr<webrtc::Call> _call;
webrtc::FieldTrialBasedConfig _fieldTrials;
webrtc::LocalAudioSinkAdapter _audioSource;
std::unique_ptr<cricket::VoiceMediaChannel> _audioChannel;
std::unique_ptr<cricket::VideoMediaChannel> _videoChannel;
std::unique_ptr<webrtc::VideoBitrateAllocatorFactory> _videoBitrateAllocatorFactory;
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
std::unique_ptr<VideoCapturerInterface> _videoCapturer;
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentIncomingVideoSink;
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> _currentOutgoingVideoSink;
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _audioNetworkInterface;
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _videoNetworkInterface;
};
#ifdef TGVOIP_NAMESPACE
}
#endif
#endif

View File

@ -0,0 +1,334 @@
#include "NetworkManager.h"
#include "p2p/base/basic_packet_socket_factory.h"
#include "p2p/client/basic_port_allocator.h"
#include "p2p/base/p2p_transport_channel.h"
#include "p2p/base/basic_async_resolver_factory.h"
#include "api/packet_socket_factory.h"
#include "rtc_base/task_utils/to_queued_task.h"
#include "p2p/base/ice_credentials_iterator.h"
#include "api/jsep_ice_candidate.h"
extern "C" {
#include <openssl/sha.h>
#include <openssl/aes.h>
#include <openssl/modes.h>
#include <openssl/rand.h>
#include <openssl/crypto.h>
}
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
static void KDF2(unsigned char *encryptionKey, unsigned char *msgKey, size_t x, unsigned char *aesKey, unsigned char *aesIv) {
uint8_t sA[32], sB[32];
uint8_t buf[16 + 36];
memcpy(buf, msgKey, 16);
memcpy(buf + 16, encryptionKey + x, 36);
SHA256(buf, 16 + 36, sA);
memcpy(buf, encryptionKey + 40 + x, 36);
memcpy(buf + 36, msgKey, 16);
SHA256(buf, 36 + 16, sB);
memcpy(aesKey, sA, 8);
memcpy(aesKey + 8, sB + 8, 16);
memcpy(aesKey + 8 + 16, sA + 24, 8);
memcpy(aesIv, sB, 8);
memcpy(aesIv + 8, sA + 8, 16);
memcpy(aesIv + 8 + 16, sB + 24, 8);
}
static void aesIgeEncrypt(uint8_t *in, uint8_t *out, size_t length, uint8_t *key, uint8_t *iv) {
AES_KEY akey;
AES_set_encrypt_key(key, 32*8, &akey);
AES_ige_encrypt(in, out, length, &akey, iv, AES_ENCRYPT);
}
static void aesIgeDecrypt(uint8_t *in, uint8_t *out, size_t length, uint8_t *key, uint8_t *iv) {
AES_KEY akey;
AES_set_decrypt_key(key, 32*8, &akey);
AES_ige_encrypt(in, out, length, &akey, iv, AES_DECRYPT);
}
static absl::optional<rtc::CopyOnWriteBuffer> decryptPacket(const rtc::CopyOnWriteBuffer &packet, const TgVoipEncryptionKey &encryptionKey) {
if (packet.size() < 16 + 16) {
return absl::nullopt;
}
unsigned char msgKey[16];
memcpy(msgKey, packet.data(), 16);
int x = encryptionKey.isOutgoing ? 8 : 0;
unsigned char aesKey[32];
unsigned char aesIv[32];
KDF2((unsigned char *)encryptionKey.value.data(), msgKey, x, aesKey, aesIv);
size_t decryptedSize = packet.size() - 16;
if (decryptedSize < 0 || decryptedSize > 128 * 1024) {
return absl::nullopt;
}
if (decryptedSize % 16 != 0) {
return absl::nullopt;
}
rtc::Buffer decryptionBuffer(decryptedSize);
aesIgeDecrypt(((uint8_t *)packet.data()) + 16, decryptionBuffer.begin(), decryptionBuffer.size(), aesKey, aesIv);
rtc::ByteBufferWriter msgKeyData;
msgKeyData.WriteBytes((const char *)encryptionKey.value.data() + 88 + x, 32);
msgKeyData.WriteBytes((const char *)decryptionBuffer.data(), decryptionBuffer.size());
unsigned char msgKeyLarge[32];
SHA256((uint8_t *)msgKeyData.Data(), msgKeyData.Length(), msgKeyLarge);
uint16_t innerSize;
memcpy(&innerSize, decryptionBuffer.data(), 2);
unsigned char checkMsgKey[16];
memcpy(checkMsgKey, msgKeyLarge + 8, 16);
if (memcmp(checkMsgKey, msgKey, 16) != 0) {
return absl::nullopt;
}
if (innerSize < 0 || innerSize > decryptionBuffer.size() - 2) {
return absl::nullopt;
}
rtc::CopyOnWriteBuffer decryptedPacket;
decryptedPacket.AppendData((const char *)decryptionBuffer.data() + 2, innerSize);
return decryptedPacket;
}
static absl::optional<rtc::Buffer> encryptPacket(const rtc::CopyOnWriteBuffer &packet, const TgVoipEncryptionKey &encryptionKey) {
if (packet.size() > UINT16_MAX) {
return absl::nullopt;
}
rtc::ByteBufferWriter innerData;
uint16_t packetSize = (uint16_t)packet.size();
innerData.WriteBytes((const char *)&packetSize, 2);
innerData.WriteBytes((const char *)packet.data(), packet.size());
size_t innerPadding = 16 - innerData.Length() % 16;
uint8_t paddingData[16];
RAND_bytes(paddingData, (int)innerPadding);
innerData.WriteBytes((const char *)paddingData, innerPadding);
if (innerData.Length() % 16 != 0) {
assert(false);
return absl::nullopt;
}
int x = encryptionKey.isOutgoing ? 0 : 8;
rtc::ByteBufferWriter msgKeyData;
msgKeyData.WriteBytes((const char *)encryptionKey.value.data() + 88 + x, 32);
msgKeyData.WriteBytes(innerData.Data(), innerData.Length());
unsigned char msgKeyLarge[32];
SHA256((uint8_t *)msgKeyData.Data(), msgKeyData.Length(), msgKeyLarge);
unsigned char msgKey[16];
memcpy(msgKey, msgKeyLarge + 8, 16);
unsigned char aesKey[32];
unsigned char aesIv[32];
KDF2((unsigned char *)encryptionKey.value.data(), msgKey, x, aesKey, aesIv);
rtc::Buffer encryptedPacket;
encryptedPacket.AppendData((const char *)msgKey, 16);
rtc::Buffer encryptionBuffer(innerData.Length());
aesIgeEncrypt((uint8_t *)innerData.Data(), encryptionBuffer.begin(), innerData.Length(), aesKey, aesIv);
encryptedPacket.AppendData(encryptionBuffer.begin(), encryptionBuffer.size());
/*rtc::CopyOnWriteBuffer testBuffer;
testBuffer.AppendData(encryptedPacket.data(), encryptedPacket.size());
TgVoipEncryptionKey testKey;
testKey.value = encryptionKey.value;
testKey.isOutgoing = !encryptionKey.isOutgoing;
decryptPacket(testBuffer, testKey);*/
return encryptedPacket;
}
NetworkManager::NetworkManager(
rtc::Thread *thread,
TgVoipEncryptionKey encryptionKey,
bool enableP2P,
std::function<void (const NetworkManager::State &)> stateUpdated,
std::function<void (const rtc::CopyOnWriteBuffer &)> packetReceived,
std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted
) :
_thread(thread),
_encryptionKey(encryptionKey),
_stateUpdated(stateUpdated),
_packetReceived(packetReceived),
_signalingDataEmitted(signalingDataEmitted) {
assert(_thread->IsCurrent());
_socketFactory.reset(new rtc::BasicPacketSocketFactory(_thread));
_networkManager = std::make_unique<rtc::BasicNetworkManager>();
_portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), nullptr, nullptr));
uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP;
if (!enableP2P) {
flags |= cricket::PORTALLOCATOR_DISABLE_UDP;
flags |= cricket::PORTALLOCATOR_DISABLE_STUN;
}
//flags |= cricket::PORTALLOCATOR_DISABLE_UDP;
_portAllocator->set_flags(_portAllocator->flags() | flags);
_portAllocator->Initialize();
rtc::SocketAddress defaultStunAddress = rtc::SocketAddress("hlgkfjdrtjfykgulhijkljhulyo.uksouth.cloudapp.azure.com", 3478);
cricket::ServerAddresses stunServers;
stunServers.insert(defaultStunAddress);
std::vector<cricket::RelayServerConfig> turnServers;
turnServers.push_back(cricket::RelayServerConfig(
rtc::SocketAddress("hlgkfjdrtjfykgulhijkljhulyo.uksouth.cloudapp.azure.com", 3478),
"user",
"root",
cricket::PROTO_UDP
));
_portAllocator->SetConfiguration(stunServers, turnServers, 2, webrtc::NO_PRUNE);
_asyncResolverFactory = std::make_unique<webrtc::BasicAsyncResolverFactory>();
_transportChannel.reset(new cricket::P2PTransportChannel("transport", 0, _portAllocator.get(), _asyncResolverFactory.get(), nullptr));
cricket::IceConfig iceConfig;
iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY;
_transportChannel->SetIceConfig(iceConfig);
cricket::IceParameters localIceParameters(
"gcp3",
"zWDKozH8/3JWt8he3M/CMj5R",
false
);
cricket::IceParameters remoteIceParameters(
"acp3",
"aWDKozH8/3JWt8he3M/CMj5R",
false
);
_transportChannel->SetIceParameters(_encryptionKey.isOutgoing ? localIceParameters : remoteIceParameters);
_transportChannel->SetIceRole(_encryptionKey.isOutgoing ? cricket::ICEROLE_CONTROLLING : cricket::ICEROLE_CONTROLLED);
_transportChannel->SignalCandidateGathered.connect(this, &NetworkManager::candidateGathered);
_transportChannel->SignalGatheringState.connect(this, &NetworkManager::candidateGatheringState);
_transportChannel->SignalIceTransportStateChanged.connect(this, &NetworkManager::transportStateChanged);
_transportChannel->SignalReadPacket.connect(this, &NetworkManager::transportPacketReceived);
_transportChannel->MaybeStartGathering();
_transportChannel->SetRemoteIceMode(cricket::ICEMODE_FULL);
_transportChannel->SetRemoteIceParameters((!_encryptionKey.isOutgoing) ? localIceParameters : remoteIceParameters);
}
NetworkManager::~NetworkManager() {
assert(_thread->IsCurrent());
_transportChannel.reset();
_asyncResolverFactory.reset();
_portAllocator.reset();
_networkManager.reset();
_socketFactory.reset();
}
void NetworkManager::receiveSignalingData(const std::vector<uint8_t> &data) {
rtc::ByteBufferReader reader((const char *)data.data(), data.size());
uint32_t candidateCount = 0;
if (!reader.ReadUInt32(&candidateCount)) {
return;
}
std::vector<std::string> candidates;
for (uint32_t i = 0; i < candidateCount; i++) {
uint32_t candidateLength = 0;
if (!reader.ReadUInt32(&candidateLength)) {
return;
}
std::string candidate;
if (!reader.ReadString(&candidate, candidateLength)) {
return;
}
candidates.push_back(candidate);
}
for (auto &serializedCandidate : candidates) {
webrtc::JsepIceCandidate parseCandidate("", 0);
if (parseCandidate.Initialize(serializedCandidate, nullptr)) {
auto parsedCandidate = parseCandidate.candidate();
_transportChannel->AddRemoteCandidate(parsedCandidate);
}
}
}
void NetworkManager::sendPacket(const rtc::CopyOnWriteBuffer &packet) {
auto encryptedPacket = encryptPacket(packet, _encryptionKey);
if (encryptedPacket.has_value()) {
rtc::PacketOptions packetOptions;
_transportChannel->SendPacket((const char *)encryptedPacket->data(), encryptedPacket->size(), packetOptions, 0);
}
}
void NetworkManager::candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) {
assert(_thread->IsCurrent());
webrtc::JsepIceCandidate iceCandidate("", 0);
iceCandidate.SetCandidate(candidate);
std::string serializedCandidate;
if (!iceCandidate.ToString(&serializedCandidate)) {
return;
}
std::vector<std::string> candidates;
candidates.push_back(serializedCandidate);
rtc::ByteBufferWriter writer;
writer.WriteUInt32((uint32_t)candidates.size());
for (auto string : candidates) {
writer.WriteUInt32((uint32_t)string.size());
writer.WriteString(string);
}
std::vector<uint8_t> data;
data.resize(writer.Length());
memcpy(data.data(), writer.Data(), writer.Length());
_signalingDataEmitted(data);
}
void NetworkManager::candidateGatheringState(cricket::IceTransportInternal *transport) {
assert(_thread->IsCurrent());
}
void NetworkManager::transportStateChanged(cricket::IceTransportInternal *transport) {
assert(_thread->IsCurrent());
auto state = transport->GetIceTransportState();
bool isConnected = false;
switch (state) {
case webrtc::IceTransportState::kConnected:
case webrtc::IceTransportState::kCompleted:
isConnected = true;
break;
default:
break;
}
NetworkManager::State emitState;
emitState.isReadyToSendData = isConnected;
_stateUpdated(emitState);
}
void NetworkManager::transportReadyToSend(cricket::IceTransportInternal *transport) {
assert(_thread->IsCurrent());
}
void NetworkManager::transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t &timestamp, int unused) {
assert(_thread->IsCurrent());
rtc::CopyOnWriteBuffer packet;
packet.AppendData(bytes, size);
auto decryptedPacket = decryptPacket(packet, _encryptionKey);
if (decryptedPacket.has_value()) {
_packetReceived(decryptedPacket.value());
}
}
#ifdef TGVOIP_NAMESPACE
}
#endif

View File

@ -0,0 +1,78 @@
#ifndef TGVOIP_WEBRTC_NETWORK_MANAGER_H
#define TGVOIP_WEBRTC_NETWORK_MANAGER_H
#include "rtc_base/thread.h"
#include <functional>
#include <memory>
#include "rtc_base/copy_on_write_buffer.h"
#include "api/candidate.h"
#include "TgVoip.h"
namespace rtc {
class BasicPacketSocketFactory;
class BasicNetworkManager;
class PacketTransportInternal;
}
namespace cricket {
class BasicPortAllocator;
class P2PTransportChannel;
class IceTransportInternal;
}
namespace webrtc {
class BasicAsyncResolverFactory;
}
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
class NetworkManager: public sigslot::has_slots<> {
public:
struct State {
bool isReadyToSendData;
};
public:
NetworkManager(
rtc::Thread *thread,
TgVoipEncryptionKey encryptionKey,
bool enableP2P,
std::function<void (const NetworkManager::State &)> stateUpdated,
std::function<void (const rtc::CopyOnWriteBuffer &)> packetReceived,
std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted
);
~NetworkManager();
void receiveSignalingData(const std::vector<uint8_t> &data);
void sendPacket(const rtc::CopyOnWriteBuffer &packet);
private:
rtc::Thread *_thread;
TgVoipEncryptionKey _encryptionKey;
std::function<void (const NetworkManager::State &)> _stateUpdated;
std::function<void (const rtc::CopyOnWriteBuffer &)> _packetReceived;
std::function<void (const std::vector<uint8_t> &)> _signalingDataEmitted;
std::unique_ptr<rtc::BasicPacketSocketFactory> _socketFactory;
std::unique_ptr<rtc::BasicNetworkManager> _networkManager;
std::unique_ptr<cricket::BasicPortAllocator> _portAllocator;
std::unique_ptr<webrtc::BasicAsyncResolverFactory> _asyncResolverFactory;
std::unique_ptr<cricket::P2PTransportChannel> _transportChannel;
private:
void candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate);
void candidateGatheringState(cricket::IceTransportInternal *transport);
void transportStateChanged(cricket::IceTransportInternal *transport);
void transportReadyToSend(cricket::IceTransportInternal *transport);
void transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t &timestamp, int unused);
};
#ifdef TGVOIP_NAMESPACE
}
#endif
#endif

View File

@ -1,14 +1,19 @@
#ifndef __TGVOIP_H
#define __TGVOIP_H
#define TGVOIP_NAMESPACE tgvoip_webrtc
#include <functional>
#include <vector>
#include <string>
#include <memory>
#import "VideoMetalView.h"
namespace rtc {
template <typename VideoFrameT>
class VideoSinkInterface;
}
namespace webrtc {
class VideoFrame;
}
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
@ -131,7 +136,9 @@ public:
std::vector<TgVoipEndpoint> const &endpoints,
std::unique_ptr<TgVoipProxy> const &proxy,
TgVoipNetworkType initialNetworkType,
TgVoipEncryptionKey const &encryptionKey
TgVoipEncryptionKey const &encryptionKey,
std::function<void(TgVoipState)> stateUpdated,
std::function<void(const std::vector<uint8_t> &)> signalingDataEmitted
);
virtual ~TgVoip();
@ -141,19 +148,16 @@ public:
virtual void setAudioOutputGainControlEnabled(bool enabled) = 0;
virtual void setEchoCancellationStrength(int strength) = 0;
virtual void AttachVideoView(VideoMetalView *videoView) = 0;
virtual void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
virtual void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
virtual std::string getLastError() = 0;
virtual std::string getDebugInfo() = 0;
virtual int64_t getPreferredRelayId() = 0;
virtual TgVoipTrafficStats getTrafficStats() = 0;
virtual TgVoipPersistentState getPersistentState() = 0;
virtual void setOnStateUpdated(std::function<void(TgVoipState)> onStateUpdated) = 0;
virtual void setOnSignalBarsUpdated(std::function<void(int)> onSignalBarsUpdated) = 0;
virtual void setOnCandidatesGathered(std::function<void(const std::vector<std::string> &)> onCandidatesGathered) = 0;
virtual void addRemoteCandidates(const std::vector<std::string> &candidates) = 0;
virtual void receiveSignalingData(const std::vector<uint8_t> &data) = 0;
virtual TgVoipFinalState stop() = 0;
};

View File

@ -2,11 +2,17 @@
#include "TgVoip.h"
#include "Controller.h"
#include "rtc_base/logging.h"
#include "Manager.h"
#include <stdarg.h>
#include <iostream>
#import <Foundation/Foundation.h>
#include <sys/time.h>
#ifndef TGVOIP_USE_CUSTOM_CRYPTO
/*extern "C" {
#include <openssl/sha.h>
@ -75,10 +81,59 @@ CryptoFunctions Layer92::crypto={
namespace TGVOIP_NAMESPACE {
#endif
class LogSinkImpl : public rtc::LogSink {
public:
LogSinkImpl() {
}
virtual ~LogSinkImpl() {
}
virtual void OnLogMessage(const std::string &msg, rtc::LoggingSeverity severity, const char *tag) override {
OnLogMessage(std::string(tag) + ": " + msg);
}
virtual void OnLogMessage(const std::string &message, rtc::LoggingSeverity severity) override {
OnLogMessage(message);
}
virtual void OnLogMessage(const std::string &message) override {
time_t rawTime;
time(&rawTime);
struct tm timeinfo;
localtime_r(&rawTime, &timeinfo);
timeval curTime;
gettimeofday(&curTime, nullptr);
int32_t milliseconds = curTime.tv_usec / 1000;
_data << (timeinfo.tm_year + 1900);
_data << "-" << (timeinfo.tm_mon + 1);
_data << "-" << (timeinfo.tm_mday);
_data << " " << timeinfo.tm_hour;
_data << ":" << timeinfo.tm_min;
_data << ":" << timeinfo.tm_sec;
_data << ":" << milliseconds;
_data << " " << message;
}
public:
std::ostringstream _data;
};
static rtc::Thread *makeManagerThread() {
static std::unique_ptr<rtc::Thread> value = rtc::Thread::Create();
value->SetName("WebRTC-Manager", nullptr);
value->Start();
return value.get();
}
static rtc::Thread *getManagerThread() {
static rtc::Thread *value = makeManagerThread();
return value;
}
class TgVoipImpl : public TgVoip, public sigslot::has_slots<> {
private:
public:
TgVoipImpl(
std::vector<TgVoipEndpoint> const &endpoints,
@ -86,88 +141,48 @@ public:
std::unique_ptr<TgVoipProxy> const &proxy,
TgVoipConfig const &config,
TgVoipEncryptionKey const &encryptionKey,
TgVoipNetworkType initialNetworkType
) {
TgVoipNetworkType initialNetworkType,
std::function<void(TgVoipState)> stateUpdated,
std::function<void(const std::vector<uint8_t> &)> signalingDataEmitted
) :
_stateUpdated(stateUpdated),
_signalingDataEmitted(signalingDataEmitted) {
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
rtc::LogMessage::LogToDebug(rtc::LS_INFO);
rtc::LogMessage::SetLogToStderr(true);
});
/*EncryptionKey encryptionKeyValue;
memcpy(encryptionKeyValue, encryptionKey.value.data(), 256);*/
controller_ = new Controller(encryptionKey.isOutgoing, 5, 3);
if (proxy != nullptr) {
controller_->SetProxy(rtc::ProxyType::PROXY_SOCKS5, rtc::SocketAddress(proxy->host, proxy->port),
proxy->login, proxy->password);
}
controller_->SignalNewState.connect(this, &TgVoipImpl::controllerStateCallback);
controller_->SignalCandidatesGathered.connect(this, &TgVoipImpl::candidatesGathered);
controller_->Start();
for (const auto &endpoint : endpoints) {
rtc::SocketAddress addr(endpoint.host.ipv4, endpoint.port);
Controller::EndpointType type;
switch (endpoint.type) {
case TgVoipEndpointType::UdpRelay:
type = Controller::EndpointType::UDP;
break;
case TgVoipEndpointType::Lan:
case TgVoipEndpointType::Inet:
type = Controller::EndpointType::P2P;
break;
case TgVoipEndpointType::TcpRelay:
type = Controller::EndpointType::TCP;
break;
default:
type = Controller::EndpointType::UDP;
break;
}
//controller_->AddEndpoint(addr, endpoint.peerTag, type);
}
/*rtc::SocketAddress addr("192.168.8.118", 7325);
unsigned char peerTag[16];
controller_->AddEndpoint(addr, peerTag, Controller::EndpointType::P2P);*/
setNetworkType(initialNetworkType);
switch (config.dataSaving) {
case TgVoipDataSaving::Mobile:
controller_->SetDataSaving(true);
break;
case TgVoipDataSaving::Always:
controller_->SetDataSaving(true);
break;
default:
controller_->SetDataSaving(false);
break;
}
rtc::LogMessage::AddLogToStream(&_logSink, rtc::LS_INFO);
bool enableP2P = config.enableP2P;
_manager.reset(new ThreadLocalObject<Manager>(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, stateUpdated, signalingDataEmitted](){
return new Manager(
getManagerThread(),
encryptionKey,
enableP2P,
[stateUpdated](const TgVoipState &state) {
stateUpdated(state);
},
[signalingDataEmitted](const std::vector<uint8_t> &data) {
signalingDataEmitted(data);
}
);
}));
_manager->perform([](Manager *manager) {
manager->start();
});
}
~TgVoipImpl() override {
stop();
}
void setOnStateUpdated(std::function<void(TgVoipState)> onStateUpdated) override {
std::lock_guard<std::mutex> lock(m_onStateUpdated);
onStateUpdated_ = onStateUpdated;
}
void setOnSignalBarsUpdated(std::function<void(int)> onSignalBarsUpdated) override {
std::lock_guard<std::mutex> lock(m_onSignalBarsUpdated);
onSignalBarsUpdated_ = onSignalBarsUpdated;
rtc::LogMessage::RemoveLogToStream(&_logSink);
}
void setOnCandidatesGathered(std::function<void(const std::vector<std::string> &)> onCandidatesGathered) override {
onCandidatesGathered_ = onCandidatesGathered;
}
void addRemoteCandidates(const std::vector<std::string> &candidates) override {
controller_->AddRemoteCandidates(candidates);
}
void receiveSignalingData(const std::vector<uint8_t> &data) override {
_manager->perform([data](Manager *manager) {
manager->receiveSignalingData(data);
});
};
void setNetworkType(TgVoipNetworkType networkType) override {
/*message::NetworkType mappedType;
@ -218,11 +233,19 @@ public:
}
void setMuteMicrophone(bool muteMicrophone) override {
controller_->SetMute(muteMicrophone);
//controller_->SetMute(muteMicrophone);
}
void AttachVideoView(VideoMetalView *videoView) override {
controller_->AttachVideoView([videoView getSink]);
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override {
_manager->perform([sink](Manager *manager) {
manager->setIncomingVideoOutput(sink);
});
}
void setOutgoingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override {
_manager->perform([sink](Manager *manager) {
manager->setOutgoingVideoOutput(sink);
});
}
void setAudioOutputGainControlEnabled(bool enabled) override {
@ -252,16 +275,14 @@ public:
}
TgVoipFinalState stop() override {
TgVoipFinalState finalState = {
};
delete controller_;
controller_ = nullptr;
TgVoipFinalState finalState;
finalState.debugLog = _logSink._data.str();
finalState.isRatingSuggested = false;
return finalState;
}
void controllerStateCallback(Controller::State state) {
/*void controllerStateCallback(Controller::State state) {
if (onStateUpdated_) {
TgVoipState mappedState;
switch (state) {
@ -287,44 +308,14 @@ public:
onStateUpdated_(mappedState);
}
}
}*/
private:
std::unique_ptr<ThreadLocalObject<Manager>> _manager;
std::function<void(TgVoipState)> _stateUpdated;
std::function<void(const std::vector<uint8_t> &)> _signalingDataEmitted;
void candidatesGathered(const std::vector<std::string> &candidates) {
onCandidatesGathered_(candidates);
}
private:
#ifdef TGVOIP_USE_CALLBACK_AUDIO_IO
TgVoipAudioDataCallbacks audioCallbacks;
void play(const int16_t *data, size_t size) {
if (!audioCallbacks.output)
return;
int16_t buf[size];
memcpy(buf, data, size * 2);
audioCallbacks.output(buf, size);
}
void record(int16_t *data, size_t size) {
if (audioCallbacks.input)
audioCallbacks.input(data, size);
}
void preprocessed(const int16_t *data, size_t size) {
if (!audioCallbacks.preprocessed)
return;
int16_t buf[size];
memcpy(buf, data, size * 2);
audioCallbacks.preprocessed(buf, size);
}
#endif
private:
Controller *controller_;
std::function<void(TgVoipState)> onStateUpdated_;
std::function<void(int)> onSignalBarsUpdated_;
std::function<void(const std::vector<std::string> &)> onCandidatesGathered_;
std::mutex m_onStateUpdated, m_onSignalBarsUpdated;
LogSinkImpl _logSink;
};
std::function<void(std::string const &)> globalLoggingFunction;
@ -368,7 +359,9 @@ TgVoip *TgVoip::makeInstance(
std::vector<TgVoipEndpoint> const &endpoints,
std::unique_ptr<TgVoipProxy> const &proxy,
TgVoipNetworkType initialNetworkType,
TgVoipEncryptionKey const &encryptionKey
TgVoipEncryptionKey const &encryptionKey,
std::function<void(TgVoipState)> stateUpdated,
std::function<void(const std::vector<uint8_t> &)> signalingDataEmitted
) {
return new TgVoipImpl(
endpoints,
@ -376,7 +369,9 @@ TgVoip *TgVoip::makeInstance(
proxy,
config,
encryptionKey,
initialNetworkType
initialNetworkType,
stateUpdated,
signalingDataEmitted
);
}

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,72 @@
#ifndef TGVOIP_WEBRTC_THREAD_LOCAL_OBJECT_H
#define TGVOIP_WEBRTC_THREAD_LOCAL_OBJECT_H
#include "rtc_base/thread.h"
#include <functional>
#include <memory>
#ifdef TGVOIP_NAMESPACE
namespace TGVOIP_NAMESPACE {
#endif
template<class T>
class ThreadLocalObject {
private:
template<class TV>
class ValueHolder {
public:
std::shared_ptr<TV> _value;
};
public:
ThreadLocalObject(rtc::Thread *thread, std::function<T *()> generator) :
_thread(thread) {
assert(_thread != nullptr);
_valueHolder = new ThreadLocalObject::ValueHolder<T>();
//ValueHolder<T> *valueHolder = _valueHolder;
_thread->Invoke<void>(RTC_FROM_HERE, [this, generator](){
this->_valueHolder->_value.reset(generator());
});
}
~ThreadLocalObject() {
ValueHolder<T> *valueHolder = _valueHolder;
_thread->Invoke<void>(RTC_FROM_HERE, [this](){
this->_valueHolder->_value.reset();
});
delete valueHolder;
}
template <class FunctorT>
void perform(FunctorT&& functor) {
//ValueHolder<T> *valueHolder = _valueHolder;
/*_thread->PostTask(RTC_FROM_HERE, [valueHolder, f = std::forward<std::function<void(T &)>>(f)](){
T *value = valueHolder->_value;
assert(value != nullptr);
f(*value);
});*/
_thread->Invoke<void>(RTC_FROM_HERE, [this, f = std::forward<FunctorT>(functor)](){
assert(_valueHolder->_value != nullptr);
f(_valueHolder->_value.get());
});
}
template <class FunctorT>
void performSync(FunctorT&& functor) {
_thread->Invoke<void>(RTC_FROM_HERE, [this, f = std::forward<FunctorT>(functor)](){
assert(_valueHolder->_value != nullptr);
f(_valueHolder->_value.get());
});
}
private:
rtc::Thread *_thread;
ValueHolder<T> *_valueHolder;
};
#ifdef TGVOIP_NAMESPACE
}
#endif
#endif

View File

@ -6,6 +6,8 @@
#import "api/media_stream_interface.h"
#include <memory>
@class RTCVideoFrame;
@interface VideoMetalView : UIView
@ -17,9 +19,7 @@
- (void)setSize:(CGSize)size;
- (void)renderFrame:(nullable RTCVideoFrame *)frame;
- (void)addToTrack:(rtc::scoped_refptr<webrtc::VideoTrackInterface>)track;
- (rtc::VideoSinkInterface<webrtc::VideoFrame> *)getSink;
- (std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)getSink;
@end

View File

@ -23,26 +23,22 @@
class VideoRendererAdapterImpl : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
VideoRendererAdapterImpl(VideoMetalView *adapter) {
adapter_ = adapter;
size_ = CGSizeZero;
VideoRendererAdapterImpl(void (^frameReceived)(CGSize, RTCVideoFrame *)) {
_frameReceived = [frameReceived copy];
}
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
RTCVideoFrame* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
CGSize current_size = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) : CGSizeMake(videoFrame.height, videoFrame.width);
CGSize currentSize = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) : CGSizeMake(videoFrame.height, videoFrame.width);
if (!CGSizeEqualToSize(size_, current_size)) {
size_ = current_size;
[adapter_ setSize:size_];
if (_frameReceived) {
_frameReceived(currentSize, videoFrame);
}
[adapter_ renderFrame:videoFrame];
}
private:
__weak VideoMetalView *adapter_;
CGSize size_;
void (^_frameReceived)(CGSize, RTCVideoFrame *);
};
@interface VideoMetalView () <MTKViewDelegate> {
@ -54,7 +50,8 @@ private:
CGSize _videoFrameSize;
int64_t _lastFrameTimeNs;
std::unique_ptr<VideoRendererAdapterImpl> _sink;
CGSize _currentSize;
std::shared_ptr<VideoRendererAdapterImpl> _sink;
}
@end
@ -66,7 +63,23 @@ private:
if (self) {
[self configure];
_sink.reset(new VideoRendererAdapterImpl(self));
_currentSize = CGSizeZero;
__weak VideoMetalView *weakSelf = self;
_sink.reset(new VideoRendererAdapterImpl(^(CGSize size, RTCVideoFrame *videoFrame) {
dispatch_async(dispatch_get_main_queue(), ^{
__strong VideoMetalView *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
if (!CGSizeEqualToSize(size, strongSelf->_currentSize)) {
strongSelf->_currentSize = size;
[strongSelf setSize:size];
}
[strongSelf renderFrame:videoFrame];
});
}));
}
return self;
}
@ -239,23 +252,19 @@ private:
#pragma mark - RTCVideoRenderer
- (void)setSize:(CGSize)size {
__weak VideoMetalView *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
__strong VideoMetalView *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
strongSelf->_videoFrameSize = size;
CGSize drawableSize = [strongSelf drawableSize];
strongSelf->_metalView.drawableSize = drawableSize;
[strongSelf setNeedsLayout];
//[strongSelf.delegate videoView:self didChangeVideoSize:size];
});
assert([NSThread isMainThread]);
_videoFrameSize = size;
CGSize drawableSize = [self drawableSize];
_metalView.drawableSize = drawableSize;
[self setNeedsLayout];
//[strongSelf.delegate videoView:self didChangeVideoSize:size];
}
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
assert([NSThread isMainThread]);
if (!self.isEnabled) {
return;
}
@ -267,12 +276,10 @@ private:
_videoFrame = frame;
}
- (void)addToTrack:(rtc::scoped_refptr<webrtc::VideoTrackInterface>)track {
track->AddOrUpdateSink(_sink.get(), rtc::VideoSinkWants());
}
- (rtc::VideoSinkInterface<webrtc::VideoFrame> *)getSink {
return _sink.get();
- (std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>>)getSink {
assert([NSThread isMainThread]);
return _sink;
}
@end

View File

@ -76,7 +76,8 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
- (void)setIsMuted:(bool)isMuted;
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType;
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion;
- (void)makeIncomingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion;
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion;
- (void)addSignalingData:(NSData * _Nonnull)data;
@end

View File

@ -1,6 +1,7 @@
#import <TgVoip/OngoingCallThreadLocalContext.h>
#import "TgVoip.h"
#import "VideoMetalView.h"
using namespace TGVOIP_NAMESPACE;
@ -189,41 +190,35 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
.isOutgoing = isOutgoing,
};
__weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
_tgVoip = TgVoip::makeInstance(
config,
{ derivedStateValue },
endpoints,
proxyValue,
callControllerNetworkTypeForType(networkType),
encryptionKey
encryptionKey,
[weakSelf, queue](TgVoipState state) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
[strongSelf controllerStateChanged:state];
}
}];
},
[weakSelf, queue](const std::vector<uint8_t> &data) {
NSData *mappedData = [[NSData alloc] initWithBytes:data.data() length:data.size()];
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
[strongSelf signalingDataEmitted:mappedData];
}
}];
}
);
_state = OngoingCallStateInitializing;
_signalBars = -1;
__weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
_tgVoip->setOnStateUpdated([weakSelf](TgVoipState state) {
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
[strongSelf controllerStateChanged:state];
}
});
_tgVoip->setOnSignalBarsUpdated([weakSelf](int signalBars) {
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
[strongSelf signalBarsChanged:signalBars];
}
});
_tgVoip->setOnCandidatesGathered([weakSelf](const std::vector<std::string> &candidates) {
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
NSMutableArray *mappedCandidates = [[NSMutableArray alloc] init];
for (auto &candidate : candidates) {
[mappedCandidates addObject:[[NSString alloc] initWithCString:candidate.c_str() encoding:NSUTF8StringEncoding]];
}
[strongSelf candidatesGathered:mappedCandidates];
}
});
}
return self;
}
@ -320,27 +315,18 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)candidatesGathered:(NSArray<NSString *> *)candidates {
- (void)signalingDataEmitted:(NSData *)data {
if (_sendSignalingData) {
NSData *data = [NSKeyedArchiver archivedDataWithRootObject:@{
@"type": @"candidates",
@"data": candidates
}];
_sendSignalingData(data);
}
}
- (void)addSignalingData:(NSData *)data {
NSDictionary *dict = [NSKeyedUnarchiver unarchiveObjectWithData:data];
NSString *type = dict[@"type"];
if ([type isEqualToString:@"candidates"]) {
if (_tgVoip) {
std::vector<std::string> candidates;
for (NSString *string in dict[@"data"]) {
candidates.push_back([string UTF8String]);
}
_tgVoip->addRemoteCandidates(candidates);
}
if (_tgVoip) {
std::vector<uint8_t> mappedData;
mappedData.resize(data.length);
[data getBytes:mappedData.data() length:data.length];
_tgVoip->receiveSignalingData(mappedData);
}
}
@ -359,17 +345,38 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
}
}
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion {
- (void)makeIncomingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion {
if (_tgVoip) {
__weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
_tgVoip->AttachVideoView(remoteRenderer);
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
strongSelf->_tgVoip->setIncomingVideoOutput(sink);
}
dispatch_async(dispatch_get_main_queue(), ^{
completion(remoteRenderer);
});
completion(remoteRenderer);
});
}
}
- (void)makeOutgoingVideoView:(void (^_Nonnull)(UIView * _Nullable))completion {
if (_tgVoip) {
__weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectZero];
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink = [remoteRenderer getSink];
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) {
strongSelf->_tgVoip->setOutgoingVideoOutput(sink);
}
completion(remoteRenderer);
});
}
}