Working video calls

This commit is contained in:
Ali 2020-05-11 16:30:17 +04:00
parent d62fb29488
commit 346b8160c4
10 changed files with 1046 additions and 26 deletions

View File

@ -1,4 +1,5 @@
import Foundation
import UIKit
import Postbox
import TelegramCore
import SyncCore
@ -45,6 +46,8 @@ public protocol PresentationCall: class {
func setIsMuted(_ value: Bool)
func setCurrentAudioOutput(_ output: AudioSessionOutput)
func debugInfo() -> Signal<(String, String), NoError>
func getVideoView(completion: @escaping (UIView?) -> Void)
}
public protocol PresentationCallManager: class {

View File

@ -106,7 +106,7 @@ public final class CallController: ViewController {
}
override public func loadDisplayNode() {
self.displayNode = CallControllerNode(sharedContext: self.sharedContext, account: self.account, presentationData: self.presentationData, statusBar: self.statusBar, debugInfo: self.call.debugInfo(), shouldStayHiddenUntilConnection: !self.call.isOutgoing && self.call.isIntegratedWithCallKit, easyDebugAccess: self.easyDebugAccess)
self.displayNode = CallControllerNode(sharedContext: self.sharedContext, account: self.account, presentationData: self.presentationData, statusBar: self.statusBar, debugInfo: self.call.debugInfo(), shouldStayHiddenUntilConnection: !self.call.isOutgoing && self.call.isIntegratedWithCallKit, easyDebugAccess: self.easyDebugAccess, call: self.call)
self.displayNodeDidLoad()
self.controllerNode.toggleMute = { [weak self] in

View File

@ -25,11 +25,14 @@ final class CallControllerNode: ASDisplayNode {
private let debugInfo: Signal<(String, String), NoError>
private var forceReportRating = false
private let easyDebugAccess: Bool
private let call: PresentationCall
private let containerNode: ASDisplayNode
private let imageNode: TransformImageNode
private let dimNode: ASDisplayNode
private var videoView: UIView?
private var videoViewRequested: Bool = false
private let backButtonArrowNode: ASImageNode
private let backButtonNode: HighlightableButtonNode
private let statusNode: CallControllerStatusNode
@ -64,7 +67,7 @@ final class CallControllerNode: ASDisplayNode {
var callEnded: ((Bool) -> Void)?
var dismissedInteractively: (() -> Void)?
init(sharedContext: SharedAccountContext, account: Account, presentationData: PresentationData, statusBar: StatusBar, debugInfo: Signal<(String, String), NoError>, shouldStayHiddenUntilConnection: Bool = false, easyDebugAccess: Bool) {
init(sharedContext: SharedAccountContext, account: Account, presentationData: PresentationData, statusBar: StatusBar, debugInfo: Signal<(String, String), NoError>, shouldStayHiddenUntilConnection: Bool = false, easyDebugAccess: Bool, call: PresentationCall) {
self.sharedContext = sharedContext
self.account = account
self.presentationData = presentationData
@ -72,6 +75,7 @@ final class CallControllerNode: ASDisplayNode {
self.debugInfo = debugInfo
self.shouldStayHiddenUntilConnection = shouldStayHiddenUntilConnection
self.easyDebugAccess = easyDebugAccess
self.call = call
self.containerNode = ASDisplayNode()
if self.shouldStayHiddenUntilConnection {
@ -81,7 +85,7 @@ final class CallControllerNode: ASDisplayNode {
self.imageNode = TransformImageNode()
self.imageNode.contentAnimations = [.subsequentUpdates]
self.dimNode = ASDisplayNode()
self.dimNode.isLayerBacked = true
self.dimNode.isUserInteractionEnabled = false
self.dimNode.backgroundColor = UIColor(white: 0.0, alpha: 0.4)
self.backButtonArrowNode = ASImageNode()
@ -261,6 +265,21 @@ final class CallControllerNode: ASDisplayNode {
}
}
statusReception = reception
if !self.videoViewRequested {
self.videoViewRequested = true
self.call.getVideoView(completion: { [weak self] videoView in
guard let strongSelf = self else {
return
}
if let videoView = videoView {
strongSelf.videoView = videoView
strongSelf.containerNode.view.insertSubview(videoView, aboveSubview: strongSelf.dimNode.view)
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
}
})
}
}
switch callState {
case .terminated, .terminating:
@ -368,6 +387,10 @@ final class CallControllerNode: ASDisplayNode {
transition.updateFrame(node: self.containerNode, frame: CGRect(origin: CGPoint(), size: layout.size))
transition.updateFrame(node: self.dimNode, frame: CGRect(origin: CGPoint(), size: layout.size))
if let videoView = self.videoView {
videoView.frame = CGRect(origin: CGPoint(), size: layout.size)
}
if let keyPreviewNode = self.keyPreviewNode {
transition.updateFrame(node: keyPreviewNode, frame: CGRect(origin: CGPoint(), size: layout.size))
keyPreviewNode.updateLayout(size: layout.size, transition: .immediate)

View File

@ -1,4 +1,5 @@
import Foundation
import UIKit
import Postbox
import TelegramCore
import SyncCore
@ -671,4 +672,8 @@ public final class PresentationCallImpl: PresentationCall {
public func debugInfo() -> Signal<(String, String), NoError> {
return self.debugInfoValue.get()
}
public func getVideoView(completion: @escaping (UIView?) -> Void) {
self.ongoingContext?.getVideoView(completion: completion)
}
}

View File

@ -14,6 +14,7 @@ swift_library(
"//submodules/TelegramUIPreferences:TelegramUIPreferences",
"//submodules/TgVoip:TgVoip",
"//submodules/TgVoipWebrtc:TgVoipWebrtc",
"//submodules/TgVoipWebrtcCustom:TgVoipWebrtcCustom",
],
visibility = [
"//visibility:public",

View File

@ -1,4 +1,5 @@
import Foundation
import UIKit
import SwiftSignalKit
import TelegramCore
import SyncCore
@ -7,6 +8,7 @@ import TelegramUIPreferences
import TgVoip
import TgVoipWebrtc
import TgVoipWebrtcCustom
private func callConnectionDescription(_ connection: CallSessionConnection) -> OngoingCallConnectionDescription {
return OngoingCallConnectionDescription(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag)
@ -16,6 +18,10 @@ private func callConnectionDescriptionWebrtc(_ connection: CallSessionConnection
return OngoingCallConnectionDescriptionWebrtc(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag)
}
private func callConnectionDescriptionWebrtcCustom(_ connection: CallSessionConnection) -> OngoingCallConnectionDescriptionWebrtcCustom {
return OngoingCallConnectionDescriptionWebrtcCustom(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag)
}
private let callLogsLimit = 20
public func callLogNameForId(id: Int64, account: Account) -> String? {
@ -79,6 +85,11 @@ private let setupLogs: Bool = {
Logger.shared.log("TGVOIP", value)
}
})
OngoingCallThreadLocalContextWebrtcCustom.setupLoggingFunction({ value in
if let value = value {
Logger.shared.log("TGVOIP", value)
}
})
return true
}()
@ -89,7 +100,7 @@ public enum OngoingCallContextState {
case failed
}
private final class OngoingCallThreadLocalContextQueueImpl: NSObject, OngoingCallThreadLocalContextQueue {
private final class OngoingCallThreadLocalContextQueueImpl: NSObject, OngoingCallThreadLocalContextQueue, OngoingCallThreadLocalContextQueueWebrtc, OngoingCallThreadLocalContextQueueWebrtcCustom {
private let queue: Queue
init(queue: Queue) {
@ -104,24 +115,8 @@ private final class OngoingCallThreadLocalContextQueueImpl: NSObject, OngoingCal
}
}
func isCurrent() -> Bool {
return self.queue.isCurrent()
}
}
private final class OngoingCallThreadLocalContextQueueWebrtcImpl: NSObject, OngoingCallThreadLocalContextQueueWebrtc {
private let queue: Queue
init(queue: Queue) {
self.queue = queue
super.init()
}
func dispatch(_ f: @escaping () -> Void) {
self.queue.async {
f()
}
func dispatch(after seconds: Double, block f: @escaping () -> Void) {
self.queue.after(seconds, f)
}
func isCurrent() -> Bool {
@ -169,6 +164,26 @@ private func ongoingNetworkTypeForTypeWebrtc(_ type: NetworkType) -> OngoingCall
}
}
private func ongoingNetworkTypeForTypeWebrtcCustom(_ type: NetworkType) -> OngoingCallNetworkTypeWebrtcCustom {
switch type {
case .none:
return .wifi
case .wifi:
return .wifi
case let .cellular(cellular):
switch cellular {
case .edge:
return .cellularEdge
case .gprs:
return .cellularGprs
case .thirdG, .unknown:
return .cellular3g
case .lte:
return .cellularLte
}
}
}
private func ongoingDataSavingForType(_ type: VoiceCallDataSaving) -> OngoingCallDataSaving {
switch type {
case .never:
@ -195,6 +210,19 @@ private func ongoingDataSavingForTypeWebrtc(_ type: VoiceCallDataSaving) -> Ongo
}
}
private func ongoingDataSavingForTypeWebrtcCustom(_ type: VoiceCallDataSaving) -> OngoingCallDataSavingWebrtcCustom {
switch type {
case .never:
return .never
case .cellular:
return .cellular
case .always:
return .always
default:
return .never
}
}
private protocol OngoingCallThreadLocalContextProtocol: class {
func nativeSetNetworkType(_ type: NetworkType)
func nativeSetIsMuted(_ value: Bool)
@ -264,6 +292,32 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
}
}
extension OngoingCallThreadLocalContextWebrtcCustom: OngoingCallThreadLocalContextProtocol {
func nativeSetNetworkType(_ type: NetworkType) {
self.setNetworkType(ongoingNetworkTypeForTypeWebrtcCustom(type))
}
func nativeStop(_ completion: @escaping (String?, Int64, Int64, Int64, Int64) -> Void) {
self.stop(completion)
}
func nativeSetIsMuted(_ value: Bool) {
self.setIsMuted(value)
}
func nativeDebugInfo() -> String {
return self.debugInfo() ?? ""
}
func nativeVersion() -> String {
return self.version() ?? ""
}
func nativeGetDerivedState() -> Data {
return self.getDerivedState()
}
}
private extension OngoingCallContextState {
init(_ state: OngoingCallState) {
switch state {
@ -298,6 +352,23 @@ private extension OngoingCallContextState {
}
}
private extension OngoingCallContextState {
init(_ state: OngoingCallStateWebrtcCustom) {
switch state {
case .initializing:
self = .initializing
case .connected:
self = .connected
case .failed:
self = .failed
case .reconnecting:
self = .reconnecting
default:
self = .failed
}
}
}
public final class OngoingCallContext {
public let internalId: CallSessionInternalId
@ -325,7 +396,7 @@ public final class OngoingCallContext {
}
public static var versions: [String] {
return [OngoingCallThreadLocalContext.version(), OngoingCallThreadLocalContextWebrtc.version()]
return [OngoingCallThreadLocalContext.version(), OngoingCallThreadLocalContextWebrtc.version(), OngoingCallThreadLocalContextWebrtcCustom.version()]
}
public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String) {
@ -347,7 +418,33 @@ public final class OngoingCallContext {
|> take(1)
|> deliverOn(queue)).start(next: { [weak self] _ in
if let strongSelf = self {
if version == OngoingCallThreadLocalContextWebrtc.version() {
if version == OngoingCallThreadLocalContextWebrtcCustom.version() {
var voipProxyServer: VoipProxyServerWebrtcCustom?
if let proxyServer = proxyServer {
switch proxyServer.connection {
case let .socks5(username, password):
voipProxyServer = VoipProxyServerWebrtcCustom(host: proxyServer.host, port: proxyServer.port, username: username, password: password)
case .mtp:
break
}
}
let context = OngoingCallThreadLocalContextWebrtcCustom(queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtcCustom(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtcCustom(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: callConnectionDescriptionWebrtcCustom(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtcCustom), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath)
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
context.stateChanged = { state in
self?.contextState.set(.single(OngoingCallContextState(state)))
}
context.signalBarsChanged = { signalBars in
self?.receptionPromise.set(.single(signalBars))
}
strongSelf.networkTypeDisposable = (updatedNetworkType
|> deliverOn(queue)).start(next: { networkType in
self?.withContext { context in
context.nativeSetNetworkType(networkType)
}
})
} else if version == OngoingCallThreadLocalContextWebrtc.version() {
var voipProxyServer: VoipProxyServerWebrtc?
if let proxyServer = proxyServer {
switch proxyServer.connection {
@ -357,7 +454,7 @@ public final class OngoingCallContext {
break
}
}
let context = OngoingCallThreadLocalContextWebrtc(queue: OngoingCallThreadLocalContextQueueWebrtcImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath)
let context = OngoingCallThreadLocalContextWebrtc(queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtc(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtc(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: callConnectionDescriptionWebrtc(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtc), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath)
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
context.stateChanged = { state in
@ -466,5 +563,13 @@ public final class OngoingCallContext {
}
return (poll |> then(.complete() |> delay(0.5, queue: Queue.concurrentDefaultQueue()))) |> restart
}
public func getVideoView(completion: @escaping (UIView?) -> Void) {
self.withContext { context in
if let context = context as? OngoingCallThreadLocalContextWebrtcCustom {
context.getRemoteCameraView(completion)
}
}
}
}

View File

@ -0,0 +1,43 @@
objc_library(
name = "TgVoipWebrtcCustom",
enable_modules = True,
module_name = "TgVoipWebrtcCustom",
srcs = glob([
"Sources/**/*.m",
"Sources/**/*.mm",
"Sources/**/*.h",
]),
hdrs = glob([
"PublicHeaders/**/*.h",
]),
copts = [
"-I{}/Impl".format(package_name()),
"-Ithird-party/webrtc/webrtc-ios/src",
"-Ithird-party/webrtc/webrtc-ios/src/third_party/abseil-cpp",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/base",
"-DWEBRTC_IOS",
"-DWEBRTC_MAC",
"-DWEBRTC_POSIX",
],
includes = [
"PublicHeaders",
],
deps = [
"//third-party/webrtc:webrtc_lib",
"//submodules/MtProtoKit:MtProtoKit",
],
sdk_frameworks = [
"Foundation",
"UIKit",
"AudioToolbox",
"VideoToolbox",
"CoreTelephony",
"CoreMedia",
"AVFoundation",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,84 @@
#ifndef OngoingCallContext_h
#define OngoingCallContext_h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@interface OngoingCallConnectionDescriptionWebrtcCustom : NSObject
@property (nonatomic, readonly) int64_t connectionId;
@property (nonatomic, strong, readonly) NSString * _Nonnull ip;
@property (nonatomic, strong, readonly) NSString * _Nonnull ipv6;
@property (nonatomic, readonly) int32_t port;
@property (nonatomic, strong, readonly) NSData * _Nonnull peerTag;
- (instancetype _Nonnull)initWithConnectionId:(int64_t)connectionId ip:(NSString * _Nonnull)ip ipv6:(NSString * _Nonnull)ipv6 port:(int32_t)port peerTag:(NSData * _Nonnull)peerTag;
@end
typedef NS_ENUM(int32_t, OngoingCallStateWebrtcCustom) {
OngoingCallStateInitializing,
OngoingCallStateConnected,
OngoingCallStateFailed,
OngoingCallStateReconnecting
};
typedef NS_ENUM(int32_t, OngoingCallNetworkTypeWebrtcCustom) {
OngoingCallNetworkTypeWifi,
OngoingCallNetworkTypeCellularGprs,
OngoingCallNetworkTypeCellularEdge,
OngoingCallNetworkTypeCellular3g,
OngoingCallNetworkTypeCellularLte
};
typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtcCustom) {
OngoingCallDataSavingNever,
OngoingCallDataSavingCellular,
OngoingCallDataSavingAlways
};
@protocol OngoingCallThreadLocalContextQueueWebrtcCustom <NSObject>
- (void)dispatch:(void (^ _Nonnull)())f;
- (void)dispatchAfter:(double)seconds block:(void (^ _Nonnull)())f;
- (bool)isCurrent;
@end
@interface VoipProxyServerWebrtcCustom : NSObject
@property (nonatomic, strong, readonly) NSString * _Nonnull host;
@property (nonatomic, readonly) int32_t port;
@property (nonatomic, strong, readonly) NSString * _Nullable username;
@property (nonatomic, strong, readonly) NSString * _Nullable password;
- (instancetype _Nonnull)initWithHost:(NSString * _Nonnull)host port:(int32_t)port username:(NSString * _Nullable)username password:(NSString * _Nullable)password;
@end
@interface OngoingCallThreadLocalContextWebrtcCustom : NSObject
+ (void)setupLoggingFunction:(void (* _Nullable)(NSString * _Nullable))loggingFunction;
+ (void)applyServerConfig:(NSString * _Nullable)data;
+ (int32_t)maxLayer;
+ (NSString * _Nonnull)version;
@property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtcCustom);
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtcCustom> _Nonnull)queue proxy:(VoipProxyServerWebrtcCustom * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtcCustom)networkType dataSaving:(OngoingCallDataSavingWebrtcCustom)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtcCustom * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtcCustom *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath;
- (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion;
- (bool)needRate;
- (NSString * _Nullable)debugInfo;
- (NSString * _Nullable)version;
- (NSData * _Nonnull)getDerivedState;
- (void)setIsMuted:(bool)isMuted;
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtcCustom)networkType;
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion;
@end
#endif

View File

@ -0,0 +1,756 @@
#import <TgVoip/OngoingCallThreadLocalContext.h>
#import <Foundation/Foundation.h>
#import "api/peerconnection/RTCPeerConnectionFactory.h"
#import "api/peerconnection/RTCSSLAdapter.h"
#import "api/peerconnection/RTCConfiguration.h"
#import "api/peerconnection/RTCIceServer.h"
#import "api/peerconnection/RTCPeerConnection.h"
#import "api/peerconnection/RTCMediaConstraints.h"
#import "api/peerconnection/RTCMediaStreamTrack.h"
#import "api/peerconnection/RTCAudioTrack.h"
#import "api/peerconnection/RTCVideoTrack.h"
#import "api/peerconnection/RTCRtpTransceiver.h"
#import "api/peerconnection/RTCSessionDescription.h"
#import "api/peerconnection/RTCIceCandidate.h"
#import "api/peerconnection/RTCMediaStream.h"
#import "components/video_codec/RTCDefaultVideoDecoderFactory.h"
#import "components/video_codec/RTCDefaultVideoEncoderFactory.h"
#import "components/audio/RTCAudioSession.h"
#import "base/RTCVideoCapturer.h"
#import "api/peerconnection/RTCVideoSource.h"
#import "components/capturer/RTCFileVideoCapturer.h"
#import "components/capturer/RTCCameraVideoCapturer.h"
#import "components/renderer/metal/RTCMTLVideoView.h"
#import "components/renderer/opengl/RTCEAGLVideoView.h"
static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
static void voipLog(NSString* format, ...) {
va_list args;
va_start(args, format);
NSString *string = [[NSString alloc] initWithFormat:format arguments:args];
va_end(args);
if (InternalVoipLoggingFunction) {
InternalVoipLoggingFunction(string);
}
}
@class NativeWebSocketDelegate;
API_AVAILABLE(ios(13.0))
@interface NativeWebSocket : NSObject {
id<OngoingCallThreadLocalContextQueueWebrtcCustom> _queue;
NativeWebSocketDelegate *_socketDelegate;
void (^_receivedData)(NSData *);
NSURLSession *_session;
NSURLSessionWebSocketTask *_socket;
}
@end
API_AVAILABLE(ios(13.0))
@interface NativeWebSocketDelegate: NSObject <NSURLSessionDelegate, NSURLSessionWebSocketDelegate> {
id<OngoingCallThreadLocalContextQueueWebrtcCustom> _queue;
__weak NativeWebSocket *_target;
}
@end
@implementation NativeWebSocketDelegate
- (instancetype)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtcCustom>)queue target:(NativeWebSocket *)target {
self = [super init];
if (self != nil) {
_queue = queue;
_target = target;
}
return self;
}
- (void)URLSession:(NSURLSession *)session webSocketTask:(NSURLSessionWebSocketTask *)webSocketTask didOpenWithProtocol:(NSString *)protocol {
}
- (void)URLSession:(NSURLSession *)session webSocketTask:(NSURLSessionWebSocketTask *)webSocketTask didCloseWithCode:(NSURLSessionWebSocketCloseCode)closeCode reason:(NSData *)reason {
}
@end
@implementation NativeWebSocket
- (instancetype)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtcCustom>)queue receivedData:(void (^)(NSData *))receivedData {
self = [super init];
if (self != nil) {
_queue = queue;
_receivedData = [receivedData copy];
_socketDelegate = [[NativeWebSocketDelegate alloc] initWithQueue:queue target:self];
_session = [NSURLSession sessionWithConfiguration:[NSURLSessionConfiguration defaultSessionConfiguration] delegate:_socketDelegate delegateQueue:nil];
}
return self;
}
- (void)connect {
_socket = [_session webSocketTaskWithURL:[[NSURL alloc] initWithString:@"ws://192.168.8.118:8080"]];
[_socket resume];
[self readMessage];
}
- (void)readMessage {
id<OngoingCallThreadLocalContextQueueWebrtcCustom> queue = _queue;
__weak NativeWebSocket *weakSelf = self;
[_socket receiveMessageWithCompletionHandler:^(NSURLSessionWebSocketMessage * _Nullable message, NSError * _Nullable error) {
[queue dispatch:^{
__strong NativeWebSocket *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
if (error != nil) {
voipLog(@"WebSocket error: %@", error);
} else if (message.data != nil) {
if (strongSelf->_receivedData) {
strongSelf->_receivedData(message.data);
}
[strongSelf readMessage];
} else {
[strongSelf readMessage];
}
}];
}];
}
- (void)sendData:(NSData *)data {
[_socket sendMessage:[[NSURLSessionWebSocketMessage alloc] initWithData:data] completionHandler:^(__unused NSError * _Nullable error) {
}];
}
- (void)disconned {
[_socket cancel];
}
@end
@protocol NativeWebrtcSignallingClientDelegate <NSObject>
@end
API_AVAILABLE(ios(13.0))
@interface NativeWebrtcSignallingClient : NSObject {
id<OngoingCallThreadLocalContextQueueWebrtcCustom> _queue;
NativeWebSocket *_socket;
void (^_didReceiveSessionDescription)(RTCSessionDescription *);
void (^_didReceiveIceCandidate)(RTCIceCandidate *);
}
@property (nonatomic, weak) id<NativeWebrtcSignallingClientDelegate> delegate;
@end
@implementation NativeWebrtcSignallingClient
- (instancetype)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtcCustom>)queue didReceiveSessionDescription:(void (^)(RTCSessionDescription *))didReceiveSessionDescription didReceiveIceCandidate:(void (^)(RTCIceCandidate *))didReceiveIceCandidate {
self = [super init];
if (self != nil) {
_queue = queue;
_didReceiveSessionDescription = [didReceiveSessionDescription copy];
_didReceiveIceCandidate = [didReceiveIceCandidate copy];
__weak NativeWebrtcSignallingClient *weakSelf = self;
_socket = [[NativeWebSocket alloc] initWithQueue:queue receivedData:^(NSData *data) {
__strong NativeWebrtcSignallingClient *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf didReceiveData:data];
}];
}
return self;
}
- (void)connect {
[_socket connect];
}
- (void)sendSdp:(RTCSessionDescription *)rtcSdp {
NSMutableDictionary *json = [[NSMutableDictionary alloc] init];
json[@"messageType"] = @"sessionDescription";
json[@"sdp"] = rtcSdp.sdp;
if (rtcSdp.type == RTCSdpTypeOffer) {
json[@"type"] = @"offer";
} else if (rtcSdp.type == RTCSdpTypePrAnswer) {
json[@"type"] = @"prAnswer";
} else if (rtcSdp.type == RTCSdpTypeAnswer) {
json[@"type"] = @"answer";
}
NSData *data = [NSJSONSerialization dataWithJSONObject:json options:0 error:nil];
if (data != nil) {
[_socket sendData:data];
}
}
- (void)sendCandidate:(RTCIceCandidate *)rtcIceCandidate {
NSMutableDictionary *json = [[NSMutableDictionary alloc] init];
json[@"messageType"] = @"iceCandidate";
json[@"sdp"] = rtcIceCandidate.sdp;
json[@"mLineIndex"] = @(rtcIceCandidate.sdpMLineIndex);
if (rtcIceCandidate.sdpMid != nil) {
json[@"sdpMid"] = rtcIceCandidate.sdpMid;
}
NSData *data = [NSJSONSerialization dataWithJSONObject:json options:0 error:nil];
if (data != nil) {
[_socket sendData:data];
}
}
- (void)didReceiveData:(NSData *)data {
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:data options:0 error:nil];
if (![json isKindOfClass:[NSDictionary class]]) {
return;
}
NSString *messageType = json[@"messageType"];
if (![messageType isKindOfClass:[NSString class]]) {
return;
}
if ([messageType isEqualToString:@"sessionDescription"]) {
NSString *sdp = json[@"sdp"];
if (![sdp isKindOfClass:[NSString class]]) {
return;
}
NSString *typeString = json[@"type"];
if (![typeString isKindOfClass:[NSString class]]) {
return;
}
RTCSdpType type;
if ([typeString isEqualToString:@"offer"]) {
type = RTCSdpTypeOffer;
} else if ([typeString isEqualToString:@"prAnswer"]) {
type = RTCSdpTypePrAnswer;
} else if ([typeString isEqualToString:@"answer"]) {
type = RTCSdpTypeAnswer;
} else {
return;
}
if (_didReceiveSessionDescription) {
_didReceiveSessionDescription([[RTCSessionDescription alloc] initWithType:type sdp:sdp]);
}
} else if ([messageType isEqualToString:@"iceCandidate"]) {
NSString *sdp = json[@"sdp"];
if (![sdp isKindOfClass:[NSString class]]) {
return;
}
NSNumber *mLineIndex = json[@"mLineIndex"];
if (![mLineIndex isKindOfClass:[NSNumber class]]) {
return;
}
NSString *sdpMidString = json[@"sdpMid"];
NSString *sdpMid = nil;
if ([sdpMidString isKindOfClass:[NSString class]]) {
sdpMid = sdpMidString;
}
if (_didReceiveIceCandidate) {
_didReceiveIceCandidate([[RTCIceCandidate alloc] initWithSdp:sdp sdpMLineIndex:[mLineIndex intValue] sdpMid:sdpMid]);
}
}
}
@end
@interface NativePeerConnectionDelegate : NSObject <RTCPeerConnectionDelegate> {
id<OngoingCallThreadLocalContextQueueWebrtcCustom> _queue;
void (^_didGenerateIceCandidate)(RTCIceCandidate *);
void (^_didChangeIceState)(OngoingCallStateWebrtcCustom);
}
@end
@implementation NativePeerConnectionDelegate
- (instancetype)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtcCustom>)queue didGenerateIceCandidate:(void (^)(RTCIceCandidate *))didGenerateIceCandidate didChangeIceState:(void (^)(OngoingCallStateWebrtcCustom))didChangeIceState {
self = [super init];
if (self != nil) {
_queue = queue;
_didGenerateIceCandidate = [didGenerateIceCandidate copy];
_didChangeIceState = [didChangeIceState copy];
}
return self;
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeSignalingState:(RTCSignalingState)stateChanged {
switch (stateChanged) {
case RTCSignalingStateStable:
_didChangeIceState(OngoingCallStateConnected);
break;
case RTCSignalingStateHaveLocalOffer:
_didChangeIceState(OngoingCallStateInitializing);
break;
case RTCSignalingStateHaveLocalPrAnswer:
_didChangeIceState(OngoingCallStateInitializing);
break;
case RTCSignalingStateHaveRemoteOffer:
_didChangeIceState(OngoingCallStateInitializing);
break;
case RTCSignalingStateHaveRemotePrAnswer:
_didChangeIceState(OngoingCallStateInitializing);
break;
default:
break;
}
voipLog(@"didChangeSignalingState: %d", stateChanged);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didAddStream:(RTCMediaStream *)stream {
voipLog(@"Added stream: %@", stream.streamId);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveStream:(RTCMediaStream *)stream {
}
- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection {
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceConnectionState:(RTCIceConnectionState)newState {
voipLog(@"IceConnectionState: %d", newState);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didChangeIceGatheringState:(RTCIceGatheringState)newState {
voipLog(@"didChangeIceGatheringState: %d", newState);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didGenerateIceCandidate:(RTCIceCandidate *)candidate {
[_queue dispatch:^{
_didGenerateIceCandidate(candidate);
}];
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection didRemoveIceCandidates:(NSArray<RTCIceCandidate *> *)candidates {
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didOpenDataChannel:(RTCDataChannel *)dataChannel {
}
@end
@implementation OngoingCallConnectionDescriptionWebrtcCustom
- (instancetype _Nonnull)initWithConnectionId:(int64_t)connectionId ip:(NSString * _Nonnull)ip ipv6:(NSString * _Nonnull)ipv6 port:(int32_t)port peerTag:(NSData * _Nonnull)peerTag {
self = [super init];
if (self != nil) {
_connectionId = connectionId;
_ip = ip;
_ipv6 = ipv6;
_port = port;
_peerTag = peerTag;
}
return self;
}
@end
@interface OngoingCallThreadLocalContextWebrtcCustom () {
id<OngoingCallThreadLocalContextQueueWebrtcCustom> _queue;
int32_t _contextId;
NativePeerConnectionDelegate *_peerConnectionDelegate;
OngoingCallNetworkTypeWebrtcCustom _networkType;
NSTimeInterval _callReceiveTimeout;
NSTimeInterval _callRingTimeout;
NSTimeInterval _callConnectTimeout;
NSTimeInterval _callPacketTimeout;
OngoingCallStateWebrtcCustom _state;
int32_t _signalBars;
NativeWebrtcSignallingClient *_signallingClient;
RTCPeerConnectionFactory *_peerConnectionFactory;
RTCPeerConnection *_peerConnection;
RTCVideoCapturer *_videoCapturer;
RTCVideoTrack *_localVideoTrack;
RTCVideoTrack *_remoteVideoTrack;
bool _receivedRemoteDescription;
}
@end
@implementation VoipProxyServerWebrtcCustom
- (instancetype _Nonnull)initWithHost:(NSString * _Nonnull)host port:(int32_t)port username:(NSString * _Nullable)username password:(NSString * _Nullable)password {
self = [super init];
if (self != nil) {
_host = host;
_port = port;
_username = username;
_password = password;
}
return self;
}
@end
@implementation OngoingCallThreadLocalContextWebrtcCustom
+ (NSString *)version {
return @"2.8.8";
}
+ (void)setupLoggingFunction:(void (*)(NSString *))loggingFunction {
InternalVoipLoggingFunction = loggingFunction;
}
+ (void)applyServerConfig:(NSString * _Nullable)__unused data {
}
+ (int32_t)maxLayer {
return 80;
}
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtcCustom> _Nonnull)queue proxy:(VoipProxyServerWebrtcCustom * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtcCustom)networkType dataSaving:(OngoingCallDataSavingWebrtcCustom)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtcCustom * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtcCustom *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath {
self = [super init];
if (self != nil) {
_queue = queue;
assert([queue isCurrent]);
_callReceiveTimeout = 20.0;
_callRingTimeout = 90.0;
_callConnectTimeout = 30.0;
_callPacketTimeout = 10.0;
_networkType = networkType;
_state = OngoingCallStateInitializing;
_signalBars = -1;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
RTCInitializeSSL();
});
[RTCAudioSession sharedInstance].useManualAudio = true;
[RTCAudioSession sharedInstance].isAudioEnabled = true;
RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init];
RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init];
_peerConnectionFactory = [[RTCPeerConnectionFactory alloc] initWithEncoderFactory:encoderFactory decoderFactory:decoderFactory];
NSArray<NSString *> *iceServers = @[
@"stun:stun.l.google.com:19302"
];
RTCConfiguration *config = [[RTCConfiguration alloc] init];
config.iceServers = @[
[[RTCIceServer alloc] initWithURLStrings:iceServers]
];
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
config.continualGatheringPolicy = RTCContinualGatheringPolicyGatherContinually;
RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil optionalConstraints:@{ @"DtlsSrtpKeyAgreement": kRTCMediaConstraintsValueTrue }];
__weak OngoingCallThreadLocalContextWebrtcCustom *weakSelf = self;
_peerConnectionDelegate = [[NativePeerConnectionDelegate alloc] initWithQueue:_queue didGenerateIceCandidate:^(RTCIceCandidate *iceCandidate) {
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf->_signallingClient sendCandidate:iceCandidate];
} didChangeIceState: ^(OngoingCallStateWebrtcCustom state) {
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
if (strongSelf.stateChanged) {
strongSelf.stateChanged(state);
}
}];
_peerConnection = [_peerConnectionFactory peerConnectionWithConfiguration:config constraints:constraints delegate:_peerConnectionDelegate];
NSString *streamId = @"stream";
RTCMediaConstraints *audioConstrains = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil optionalConstraints:nil];
RTCAudioSource *audioSource = [_peerConnectionFactory audioSourceWithConstraints:audioConstrains];
RTCAudioTrack * _Nonnull audioTrack = [_peerConnectionFactory audioTrackWithSource:audioSource trackId:@"audio0"];
[_peerConnection addTrack:audioTrack streamIds:@[streamId]];
RTCVideoSource *videoSource = [_peerConnectionFactory videoSource];
#if TARGET_OS_SIMULATOR
_videoCapturer = [[RTCFileVideoCapturer alloc] initWithDelegate:videoSource];
#else
_videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource];
#endif
_localVideoTrack = [_peerConnectionFactory videoTrackWithSource:videoSource trackId:@"video0"];
[_peerConnection addTrack:_localVideoTrack streamIds:@[streamId]];
NSDictionary *mediaConstraints = @{
kRTCMediaConstraintsOfferToReceiveAudio: kRTCMediaConstraintsValueTrue,
kRTCMediaConstraintsOfferToReceiveVideo: kRTCMediaConstraintsValueTrue
};
RTCMediaConstraints *connectionConstraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mediaConstraints optionalConstraints:nil];
_signallingClient = [[NativeWebrtcSignallingClient alloc] initWithQueue:queue didReceiveSessionDescription:^(RTCSessionDescription *sessionDescription) {
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
if (strongSelf->_receivedRemoteDescription) {
return;
}
strongSelf->_receivedRemoteDescription = true;
[strongSelf->_peerConnection setRemoteDescription:sessionDescription completionHandler:^(__unused NSError * _Nullable error) {
}];
if (!isOutgoing) {
[strongSelf->_peerConnection answerForConstraints:connectionConstraints completionHandler:^(RTCSessionDescription * _Nullable sdp, NSError * _Nullable error) {
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf->_peerConnection setLocalDescription:sdp completionHandler:^(__unused NSError * _Nullable error) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf->_signallingClient sendSdp:sdp];
}];
}];
}];
}
} didReceiveIceCandidate:^(RTCIceCandidate *iceCandidate) {
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
voipLog(@"didReceiveIceCandidate: %@", iceCandidate);
[strongSelf->_peerConnection addIceCandidate:iceCandidate];
}];
[_signallingClient connect];
if (isOutgoing) {
id<OngoingCallThreadLocalContextQueueWebrtcCustom> queue = _queue;
NSDictionary *mediaConstraints = @{
kRTCMediaConstraintsOfferToReceiveAudio: kRTCMediaConstraintsValueTrue,
kRTCMediaConstraintsOfferToReceiveVideo: kRTCMediaConstraintsValueTrue
};
RTCMediaConstraints *connectionConstraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:mediaConstraints optionalConstraints:nil];
__weak OngoingCallThreadLocalContextWebrtcCustom *weakSelf = self;
[_peerConnection offerForConstraints:connectionConstraints completionHandler:^(RTCSessionDescription * _Nullable sdp, NSError * _Nullable error) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf->_peerConnection setLocalDescription:sdp completionHandler:^(__unused NSError * _Nullable error) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf tryAdvertising:sdp];
}];
}];
}];
}];
}
[self startLocalVideo];
}
return self;
}
- (void)dealloc {
assert([_queue isCurrent]);
}
- (void)tryAdvertising:(RTCSessionDescription *)sessionDescription {
if (_receivedRemoteDescription) {
return;
}
[_signallingClient sendSdp:sessionDescription];
__weak OngoingCallThreadLocalContextWebrtcCustom *weakSelf = self;
[_queue dispatchAfter:1.0 block:^{
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf tryAdvertising:sessionDescription];
}];
}
- (void)startLocalVideo {
if (_videoCapturer == nil || ![_videoCapturer isKindOfClass:[RTCCameraVideoCapturer class]]) {
return;
}
RTCCameraVideoCapturer *cameraCapturer = (RTCCameraVideoCapturer *)_videoCapturer;
AVCaptureDevice *frontCamera = nil;
for (AVCaptureDevice *device in [RTCCameraVideoCapturer captureDevices]) {
if (device.position == AVCaptureDevicePositionFront) {
frontCamera = device;
break;
}
}
if (cameraCapturer == nil) {
return;
}
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[RTCCameraVideoCapturer supportedFormatsForDevice:frontCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
return width1 < width2 ? NSOrderedAscending : NSOrderedDescending;
}];
AVCaptureDeviceFormat *bestFormat = nil;
for (AVCaptureDeviceFormat *format in sortedFormats) {
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
if (dimensions.width >= 600 || dimensions.height >= 600) {
bestFormat = format;
break;
}
}
if (bestFormat == nil) {
return;
}
AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) {
if (lhs.maxFrameRate < rhs.maxFrameRate) {
return NSOrderedAscending;
} else {
return NSOrderedDescending;
}
}] lastObject];
if (frameRateRange == nil) {
return;
}
[cameraCapturer startCaptureWithDevice:frontCamera format:bestFormat fps:27 completionHandler:^(NSError * _Nonnull error) {
}];
//add renderer
/*
guard let capturer = self.videoCapturer as? RTCCameraVideoCapturer else {
return
}
guard
let frontCamera = (RTCCameraVideoCapturer.captureDevices().first { $0.position == .front }),
// choose highest res
let format = (RTCCameraVideoCapturer.supportedFormats(for: frontCamera).sorted { (f1, f2) -> Bool in
let width1 = CMVideoFormatDescriptionGetDimensions(f1.formatDescription).width
let width2 = CMVideoFormatDescriptionGetDimensions(f2.formatDescription).width
return width1 < width2
}).last,
// choose highest fps
let fps = (format.videoSupportedFrameRateRanges.sorted { return $0.maxFrameRate < $1.maxFrameRate }.last) else {
return
}
capturer.startCapture(with: frontCamera,
format: format,
fps: Int(fps.maxFrameRate))
self.localVideoTrack?.add(renderer)
*/
}
- (bool)needRate {
return false;
}
- (void)stop:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion {
if ([_videoCapturer isKindOfClass:[RTCCameraVideoCapturer class]]) {
RTCCameraVideoCapturer *cameraCapturer = (RTCCameraVideoCapturer *)_videoCapturer;
[cameraCapturer stopCapture];
}
[_peerConnection close];
if (completion) {
completion(@"", 0, 0, 0, 0);
}
}
- (NSString *)debugInfo {
NSString *version = [self version];
return [NSString stringWithFormat:@"WebRTC, Version: %@", version];
}
- (NSString *)version {
return [OngoingCallThreadLocalContextWebrtcCustom version];
}
- (NSData * _Nonnull)getDerivedState {
return [NSData data];
}
- (void)setIsMuted:(bool)isMuted {
for (RTCRtpTransceiver *transceiver in _peerConnection.transceivers) {
if ([transceiver isKindOfClass:[RTCAudioTrack class]]) {
RTCAudioTrack *audioTrack = (RTCAudioTrack *)transceiver;
[audioTrack setIsEnabled:!isMuted];
}
}
}
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtcCustom)networkType {
}
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion {
if (_remoteVideoTrack == nil) {
for (RTCRtpTransceiver *transceiver in _peerConnection.transceivers) {
if (transceiver.mediaType == RTCRtpMediaTypeVideo && [transceiver.receiver.track isKindOfClass:[RTCVideoTrack class]]) {
_remoteVideoTrack = (RTCVideoTrack *)transceiver.receiver.track;
break;
}
}
}
RTCVideoTrack *remoteVideoTrack = _remoteVideoTrack;
dispatch_async(dispatch_get_main_queue(), ^{
#if false && TARGET_OS_SIMULATOR
RTCEAGLVideoView *remoteRenderer = [[RTCEAGLVideoView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 320.0f, 240.0f)];
[remoteVideoTrack addRenderer:remoteRenderer];
completion(remoteRenderer);
#else
RTCMTLVideoView *remoteRenderer = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 320.0f, 240.0f)];
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
[remoteVideoTrack addRenderer:remoteRenderer];
completion(remoteRenderer);
#endif
});
}
@end

View File

@ -23,7 +23,7 @@ if [ "$ARCH" == "x64" ]; then
OUT_DIR="ios_sim"
fi
gn gen out/$OUT_DIR --args="use_xcode_clang=true "" target_cpu=\"$ARCH\""' target_os="ios" is_debug=false is_component_build=false rtc_include_tests=false use_rtti=true rtc_use_x11=false use_custom_libcxx=false use_custom_libcxx_for_host=false rtc_include_builtin_video_codecs=false rtc_build_ssl=false rtc_build_examples=false rtc_build_tools=false ios_deployment_target="9.0" ios_enable_code_signing=false is_unsafe_developer_build=false rtc_enable_protobuf=false rtc_include_builtin_video_codecs=false rtc_use_gtk=false rtc_use_metal_rendering=false rtc_ssl_root="//openssl"'
gn gen out/$OUT_DIR --args="use_xcode_clang=true "" target_cpu=\"$ARCH\""' target_os="ios" is_debug=true is_component_build=false rtc_include_tests=false use_rtti=true rtc_use_x11=false use_custom_libcxx=false use_custom_libcxx_for_host=false rtc_include_builtin_video_codecs=false rtc_build_ssl=false rtc_build_examples=false rtc_build_tools=false ios_deployment_target="9.0" ios_enable_code_signing=false is_unsafe_developer_build=false rtc_enable_protobuf=false rtc_include_builtin_video_codecs=false rtc_use_gtk=false rtc_use_metal_rendering=true rtc_ssl_root="//openssl"'
ninja -C out/$OUT_DIR framework_objc_static
popd