Build with buck

This commit is contained in:
Ali 2020-05-12 22:01:20 +04:00
parent 8c9ecb800e
commit 82e676728a
25 changed files with 1051 additions and 139 deletions

View File

@ -69,6 +69,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
case photoPreview(PresentationTheme, Bool) case photoPreview(PresentationTheme, Bool)
case knockoutWallpaper(PresentationTheme, Bool) case knockoutWallpaper(PresentationTheme, Bool)
case alternativeFolderTabs(Bool) case alternativeFolderTabs(Bool)
case videoCalls(Bool)
case hostInfo(PresentationTheme, String) case hostInfo(PresentationTheme, String)
case versionInfo(PresentationTheme) case versionInfo(PresentationTheme)
@ -82,7 +83,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return DebugControllerSection.logging.rawValue return DebugControllerSection.logging.rawValue
case .enableRaiseToSpeak, .keepChatNavigationStack, .skipReadHistory, .crashOnSlowQueries: case .enableRaiseToSpeak, .keepChatNavigationStack, .skipReadHistory, .crashOnSlowQueries:
return DebugControllerSection.experiments.rawValue return DebugControllerSection.experiments.rawValue
case .clearTips, .reimport, .resetData, .resetDatabase, .resetHoles, .reindexUnread, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .alternativeFolderTabs: case .clearTips, .reimport, .resetData, .resetDatabase, .resetHoles, .reindexUnread, .resetBiometricsData, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .alternativeFolderTabs, .videoCalls:
return DebugControllerSection.experiments.rawValue return DebugControllerSection.experiments.rawValue
case .hostInfo, .versionInfo: case .hostInfo, .versionInfo:
return DebugControllerSection.info.rawValue return DebugControllerSection.info.rawValue
@ -137,10 +138,12 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return 22 return 22
case .alternativeFolderTabs: case .alternativeFolderTabs:
return 23 return 23
case .hostInfo: case .videoCalls:
return 24 return 24
case .versionInfo: case .hostInfo:
return 25 return 25
case .versionInfo:
return 26
} }
} }
@ -538,6 +541,16 @@ private enum DebugControllerEntry: ItemListNodeEntry {
}) })
}).start() }).start()
}) })
case let .videoCalls(value):
return ItemListSwitchItem(presentationData: presentationData, title: "Video", value: value, sectionId: self.section, style: .blocks, updated: { value in
let _ = arguments.sharedContext.accountManager.transaction ({ transaction in
transaction.updateSharedData(ApplicationSpecificSharedDataKeys.experimentalUISettings, { settings in
var settings = settings as? ExperimentalUISettings ?? ExperimentalUISettings.defaultSettings
settings.videoCalls = value
return settings
})
}).start()
})
case let .hostInfo(theme, string): case let .hostInfo(theme, string):
return ItemListTextItem(presentationData: presentationData, text: .plain(string), sectionId: self.section) return ItemListTextItem(presentationData: presentationData, text: .plain(string), sectionId: self.section)
case let .versionInfo(theme): case let .versionInfo(theme):
@ -581,6 +594,7 @@ private func debugControllerEntries(presentationData: PresentationData, loggingS
entries.append(.photoPreview(presentationData.theme, experimentalSettings.chatListPhotos)) entries.append(.photoPreview(presentationData.theme, experimentalSettings.chatListPhotos))
entries.append(.knockoutWallpaper(presentationData.theme, experimentalSettings.knockoutWallpaper)) entries.append(.knockoutWallpaper(presentationData.theme, experimentalSettings.knockoutWallpaper))
entries.append(.alternativeFolderTabs(experimentalSettings.foldersTabAtBottom)) entries.append(.alternativeFolderTabs(experimentalSettings.foldersTabAtBottom))
entries.append(.videoCalls(experimentalSettings.videoCalls))
if let backupHostOverride = networkSettings?.backupHostOverride { if let backupHostOverride = networkSettings?.backupHostOverride {
entries.append(.hostInfo(presentationData.theme, "Host: \(backupHostOverride)")) entries.append(.hostInfo(presentationData.theme, "Host: \(backupHostOverride)"))

View File

@ -272,6 +272,7 @@ final class CallControllerNode: ASDisplayNode {
return return
} }
if let videoView = videoView { if let videoView = videoView {
strongSelf.setCurrentAudioOutput?(.speaker)
strongSelf.videoView = videoView strongSelf.videoView = videoView
strongSelf.containerNode.view.insertSubview(videoView, aboveSubview: strongSelf.dimNode.view) strongSelf.containerNode.view.insertSubview(videoView, aboveSubview: strongSelf.dimNode.view)
if let (layout, navigationBarHeight) = strongSelf.validLayout { if let (layout, navigationBarHeight) = strongSelf.validLayout {

View File

@ -75,8 +75,8 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
return OngoingCallContext.maxLayer return OngoingCallContext.maxLayer
} }
public static var voipVersions: [String] { public static func voipVersions(includeExperimental: Bool) -> [String] {
return OngoingCallContext.versions return OngoingCallContext.versions(includeExperimental: includeExperimental)
} }
public init(accountManager: AccountManager, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), isMediaPlaying: @escaping () -> Bool, resumeMediaPlayback: @escaping () -> Void, audioSession: ManagedAudioSession, activeAccounts: Signal<[Account], NoError>) { public init(accountManager: AccountManager, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), isMediaPlaying: @escaping () -> Bool, resumeMediaPlayback: @escaping () -> Void, audioSession: ManagedAudioSession, activeAccounts: Signal<[Account], NoError>) {

View File

@ -252,7 +252,7 @@ private final class CallSessionManagerContext {
private let postbox: Postbox private let postbox: Postbox
private let network: Network private let network: Network
private let maxLayer: Int32 private let maxLayer: Int32
private let versions: [String] private var versions: [String]
private let addUpdates: (Api.Updates) -> Void private let addUpdates: (Api.Updates) -> Void
private let ringingSubscribers = Bag<([CallSessionRingingState]) -> Void>() private let ringingSubscribers = Bag<([CallSessionRingingState]) -> Void>()
@ -275,6 +275,10 @@ private final class CallSessionManagerContext {
self.disposables.dispose() self.disposables.dispose()
} }
func updateVersions(versions: [String]) {
self.versions = versions.reversed()
}
func ringingStates() -> Signal<[CallSessionRingingState], NoError> { func ringingStates() -> Signal<[CallSessionRingingState], NoError> {
let queue = self.queue let queue = self.queue
return Signal { [weak self] subscriber in return Signal { [weak self] subscriber in
@ -917,6 +921,12 @@ public final class CallSessionManager {
} }
} }
public func updateVersions(versions: [String]) {
self.withContext { context in
context.updateVersions(versions: versions)
}
}
public func ringingStates() -> Signal<[CallSessionRingingState], NoError> { public func ringingStates() -> Signal<[CallSessionRingingState], NoError> {
return Signal { [weak self] subscriber in return Signal { [weak self] subscriber in
let disposable = MetaDisposable() let disposable = MetaDisposable()

View File

@ -35,7 +35,6 @@ framework(
"//submodules/AccountContext:AccountContext", "//submodules/AccountContext:AccountContext",
"//submodules/LegacyComponents:LegacyComponents", "//submodules/LegacyComponents:LegacyComponents",
"//submodules/TgVoip:TgVoip", "//submodules/TgVoip:TgVoip",
"//submodules/TgVoipWebrtc:TgVoipWebrtc",
"//submodules/lottie-ios:Lottie", "//submodules/lottie-ios:Lottie",
"//submodules/FFMpegBinding:FFMpegBinding", "//submodules/FFMpegBinding:FFMpegBinding",
"//submodules/WebPBinding:WebPBinding", "//submodules/WebPBinding:WebPBinding",

View File

@ -15,6 +15,9 @@ import WalletCore
import WalletUI import WalletUI
#endif #endif
import PhoneNumberFormat import PhoneNumberFormat
import TelegramUIPreferences
import TelegramVoip
import TelegramCallsUI
private final class DeviceSpecificContactImportContext { private final class DeviceSpecificContactImportContext {
let disposable = MetaDisposable() let disposable = MetaDisposable()
@ -147,6 +150,8 @@ public final class AccountContextImpl: AccountContext {
private let deviceSpecificContactImportContexts: QueueLocalObject<DeviceSpecificContactImportContexts> private let deviceSpecificContactImportContexts: QueueLocalObject<DeviceSpecificContactImportContexts>
private var managedAppSpecificContactsDisposable: Disposable? private var managedAppSpecificContactsDisposable: Disposable?
private var experimentalUISettingsDisposable: Disposable?
#if ENABLE_WALLET #if ENABLE_WALLET
public var hasWallets: Signal<Bool, NoError> { public var hasWallets: Signal<Bool, NoError> {
return WalletStorageInterfaceImpl(postbox: self.account.postbox).getWalletRecords() return WalletStorageInterfaceImpl(postbox: self.account.postbox).getWalletRecords()
@ -242,12 +247,20 @@ public final class AccountContextImpl: AccountContext {
} }
}) })
} }
self.experimentalUISettingsDisposable = (sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.experimentalUISettings])
|> deliverOnMainQueue).start(next: { sharedData in
if let settings = sharedData.entries[ApplicationSpecificSharedDataKeys.experimentalUISettings] as? ExperimentalUISettings {
account.callSessionManager.updateVersions(versions: PresentationCallManagerImpl.voipVersions(includeExperimental: settings.videoCalls))
}
})
} }
deinit { deinit {
self.limitsConfigurationDisposable?.dispose() self.limitsConfigurationDisposable?.dispose()
self.managedAppSpecificContactsDisposable?.dispose() self.managedAppSpecificContactsDisposable?.dispose()
self.contentSettingsDisposable?.dispose() self.contentSettingsDisposable?.dispose()
self.experimentalUISettingsDisposable?.dispose()
} }
public func storeSecureIdPassword(password: String) { public func storeSecureIdPassword(password: String) {

View File

@ -393,7 +393,7 @@ final class SharedApplicationContext {
} }
} }
let networkArguments = NetworkInitializationArguments(apiId: apiId, apiHash: apiHash, languagesCategory: languagesCategory, appVersion: appVersion, voipMaxLayer: PresentationCallManagerImpl.voipMaxLayer, voipVersions: PresentationCallManagerImpl.voipVersions, appData: self.deviceToken.get() let networkArguments = NetworkInitializationArguments(apiId: apiId, apiHash: apiHash, languagesCategory: languagesCategory, appVersion: appVersion, voipMaxLayer: PresentationCallManagerImpl.voipMaxLayer, voipVersions: PresentationCallManagerImpl.voipVersions(includeExperimental: false), appData: self.deviceToken.get()
|> map { token in |> map { token in
let data = buildConfig.bundleData(withAppToken: token, signatureDict: signatureDict) let data = buildConfig.bundleData(withAppToken: token, signatureDict: signatureDict)
if let data = data, let jsonString = String(data: data, encoding: .utf8) { if let data = data, let jsonString = String(data: data, encoding: .utf8) {

View File

@ -9,18 +9,20 @@ public struct ExperimentalUISettings: Equatable, PreferencesEntry {
public var chatListPhotos: Bool public var chatListPhotos: Bool
public var knockoutWallpaper: Bool public var knockoutWallpaper: Bool
public var foldersTabAtBottom: Bool public var foldersTabAtBottom: Bool
public var videoCalls: Bool
public static var defaultSettings: ExperimentalUISettings { public static var defaultSettings: ExperimentalUISettings {
return ExperimentalUISettings(keepChatNavigationStack: false, skipReadHistory: false, crashOnLongQueries: false, chatListPhotos: false, knockoutWallpaper: false, foldersTabAtBottom: false) return ExperimentalUISettings(keepChatNavigationStack: false, skipReadHistory: false, crashOnLongQueries: false, chatListPhotos: false, knockoutWallpaper: false, foldersTabAtBottom: false, videoCalls: false)
} }
public init(keepChatNavigationStack: Bool, skipReadHistory: Bool, crashOnLongQueries: Bool, chatListPhotos: Bool, knockoutWallpaper: Bool, foldersTabAtBottom: Bool) { public init(keepChatNavigationStack: Bool, skipReadHistory: Bool, crashOnLongQueries: Bool, chatListPhotos: Bool, knockoutWallpaper: Bool, foldersTabAtBottom: Bool, videoCalls: Bool) {
self.keepChatNavigationStack = keepChatNavigationStack self.keepChatNavigationStack = keepChatNavigationStack
self.skipReadHistory = skipReadHistory self.skipReadHistory = skipReadHistory
self.crashOnLongQueries = crashOnLongQueries self.crashOnLongQueries = crashOnLongQueries
self.chatListPhotos = chatListPhotos self.chatListPhotos = chatListPhotos
self.knockoutWallpaper = knockoutWallpaper self.knockoutWallpaper = knockoutWallpaper
self.foldersTabAtBottom = foldersTabAtBottom self.foldersTabAtBottom = foldersTabAtBottom
self.videoCalls = videoCalls
} }
public init(decoder: PostboxDecoder) { public init(decoder: PostboxDecoder) {
@ -30,6 +32,7 @@ public struct ExperimentalUISettings: Equatable, PreferencesEntry {
self.chatListPhotos = decoder.decodeInt32ForKey("chatListPhotos", orElse: 0) != 0 self.chatListPhotos = decoder.decodeInt32ForKey("chatListPhotos", orElse: 0) != 0
self.knockoutWallpaper = decoder.decodeInt32ForKey("knockoutWallpaper", orElse: 0) != 0 self.knockoutWallpaper = decoder.decodeInt32ForKey("knockoutWallpaper", orElse: 0) != 0
self.foldersTabAtBottom = decoder.decodeInt32ForKey("foldersTabAtBottom", orElse: 0) != 0 self.foldersTabAtBottom = decoder.decodeInt32ForKey("foldersTabAtBottom", orElse: 0) != 0
self.videoCalls = decoder.decodeInt32ForKey("videoCalls", orElse: 0) != 0
} }
public func encode(_ encoder: PostboxEncoder) { public func encode(_ encoder: PostboxEncoder) {
@ -39,6 +42,7 @@ public struct ExperimentalUISettings: Equatable, PreferencesEntry {
encoder.encodeInt32(self.chatListPhotos ? 1 : 0, forKey: "chatListPhotos") encoder.encodeInt32(self.chatListPhotos ? 1 : 0, forKey: "chatListPhotos")
encoder.encodeInt32(self.knockoutWallpaper ? 1 : 0, forKey: "knockoutWallpaper") encoder.encodeInt32(self.knockoutWallpaper ? 1 : 0, forKey: "knockoutWallpaper")
encoder.encodeInt32(self.foldersTabAtBottom ? 1 : 0, forKey: "foldersTabAtBottom") encoder.encodeInt32(self.foldersTabAtBottom ? 1 : 0, forKey: "foldersTabAtBottom")
encoder.encodeInt32(self.videoCalls ? 1 : 0, forKey: "videoCalls")
} }
public func isEqual(to: PreferencesEntry) -> Bool { public func isEqual(to: PreferencesEntry) -> Bool {

View File

@ -12,7 +12,7 @@ static_library(
"//submodules/Postbox:Postbox#shared", "//submodules/Postbox:Postbox#shared",
"//submodules/TelegramUIPreferences:TelegramUIPreferences", "//submodules/TelegramUIPreferences:TelegramUIPreferences",
"//submodules/TgVoip:TgVoip", "//submodules/TgVoip:TgVoip",
"//submodules/TgVoipWebrtc:TgVoipWebrtc", "//submodules/TgVoipWebrtcCustom:TgVoipWebrtcCustom",
], ],
frameworks = [ frameworks = [
"$SDKROOT/System/Library/Frameworks/Foundation.framework", "$SDKROOT/System/Library/Frameworks/Foundation.framework",

View File

@ -7,16 +7,16 @@ import Postbox
import TelegramUIPreferences import TelegramUIPreferences
import TgVoip import TgVoip
import TgVoipWebrtc //import TgVoipWebrtc
import TgVoipWebrtcCustom import TgVoipWebrtcCustom
private func callConnectionDescription(_ connection: CallSessionConnection) -> OngoingCallConnectionDescription { private func callConnectionDescription(_ connection: CallSessionConnection) -> OngoingCallConnectionDescription {
return OngoingCallConnectionDescription(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag) return OngoingCallConnectionDescription(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag)
} }
private func callConnectionDescriptionWebrtc(_ connection: CallSessionConnection) -> OngoingCallConnectionDescriptionWebrtc { /*private func callConnectionDescriptionWebrtc(_ connection: CallSessionConnection) -> OngoingCallConnectionDescriptionWebrtc {
return OngoingCallConnectionDescriptionWebrtc(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag) return OngoingCallConnectionDescriptionWebrtc(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag)
} }*/
private func callConnectionDescriptionWebrtcCustom(_ connection: CallSessionConnection) -> OngoingCallConnectionDescriptionWebrtcCustom { private func callConnectionDescriptionWebrtcCustom(_ connection: CallSessionConnection) -> OngoingCallConnectionDescriptionWebrtcCustom {
return OngoingCallConnectionDescriptionWebrtcCustom(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag) return OngoingCallConnectionDescriptionWebrtcCustom(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag)
@ -80,11 +80,11 @@ private let setupLogs: Bool = {
Logger.shared.log("TGVOIP", value) Logger.shared.log("TGVOIP", value)
} }
}) })
OngoingCallThreadLocalContextWebrtc.setupLoggingFunction({ value in /*OngoingCallThreadLocalContextWebrtc.setupLoggingFunction({ value in
if let value = value { if let value = value {
Logger.shared.log("TGVOIP", value) Logger.shared.log("TGVOIP", value)
} }
}) })*/
OngoingCallThreadLocalContextWebrtcCustom.setupLoggingFunction({ value in OngoingCallThreadLocalContextWebrtcCustom.setupLoggingFunction({ value in
if let value = value { if let value = value {
Logger.shared.log("TGVOIP", value) Logger.shared.log("TGVOIP", value)
@ -100,7 +100,7 @@ public enum OngoingCallContextState {
case failed case failed
} }
private final class OngoingCallThreadLocalContextQueueImpl: NSObject, OngoingCallThreadLocalContextQueue, OngoingCallThreadLocalContextQueueWebrtc, OngoingCallThreadLocalContextQueueWebrtcCustom { private final class OngoingCallThreadLocalContextQueueImpl: NSObject, OngoingCallThreadLocalContextQueue, /*OngoingCallThreadLocalContextQueueWebrtc,*/ OngoingCallThreadLocalContextQueueWebrtcCustom {
private let queue: Queue private let queue: Queue
init(queue: Queue) { init(queue: Queue) {
@ -144,7 +144,7 @@ private func ongoingNetworkTypeForType(_ type: NetworkType) -> OngoingCallNetwor
} }
} }
private func ongoingNetworkTypeForTypeWebrtc(_ type: NetworkType) -> OngoingCallNetworkTypeWebrtc { /*private func ongoingNetworkTypeForTypeWebrtc(_ type: NetworkType) -> OngoingCallNetworkTypeWebrtc {
switch type { switch type {
case .none: case .none:
return .wifi return .wifi
@ -162,7 +162,7 @@ private func ongoingNetworkTypeForTypeWebrtc(_ type: NetworkType) -> OngoingCall
return .cellularLte return .cellularLte
} }
} }
} }*/
private func ongoingNetworkTypeForTypeWebrtcCustom(_ type: NetworkType) -> OngoingCallNetworkTypeWebrtcCustom { private func ongoingNetworkTypeForTypeWebrtcCustom(_ type: NetworkType) -> OngoingCallNetworkTypeWebrtcCustom {
switch type { switch type {
@ -197,7 +197,7 @@ private func ongoingDataSavingForType(_ type: VoiceCallDataSaving) -> OngoingCal
} }
} }
private func ongoingDataSavingForTypeWebrtc(_ type: VoiceCallDataSaving) -> OngoingCallDataSavingWebrtc { /*private func ongoingDataSavingForTypeWebrtc(_ type: VoiceCallDataSaving) -> OngoingCallDataSavingWebrtc {
switch type { switch type {
case .never: case .never:
return .never return .never
@ -208,7 +208,7 @@ private func ongoingDataSavingForTypeWebrtc(_ type: VoiceCallDataSaving) -> Ongo
default: default:
return .never return .never
} }
} }*/
private func ongoingDataSavingForTypeWebrtcCustom(_ type: VoiceCallDataSaving) -> OngoingCallDataSavingWebrtcCustom { private func ongoingDataSavingForTypeWebrtcCustom(_ type: VoiceCallDataSaving) -> OngoingCallDataSavingWebrtcCustom {
switch type { switch type {
@ -266,7 +266,7 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol {
} }
} }
extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProtocol { /*extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProtocol {
func nativeSetNetworkType(_ type: NetworkType) { func nativeSetNetworkType(_ type: NetworkType) {
self.setNetworkType(ongoingNetworkTypeForTypeWebrtc(type)) self.setNetworkType(ongoingNetworkTypeForTypeWebrtc(type))
} }
@ -290,7 +290,7 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
func nativeGetDerivedState() -> Data { func nativeGetDerivedState() -> Data {
return self.getDerivedState() return self.getDerivedState()
} }
} }*/
extension OngoingCallThreadLocalContextWebrtcCustom: OngoingCallThreadLocalContextProtocol { extension OngoingCallThreadLocalContextWebrtcCustom: OngoingCallThreadLocalContextProtocol {
func nativeSetNetworkType(_ type: NetworkType) { func nativeSetNetworkType(_ type: NetworkType) {
@ -335,7 +335,7 @@ private extension OngoingCallContextState {
} }
} }
private extension OngoingCallContextState { /*private extension OngoingCallContextState {
init(_ state: OngoingCallStateWebrtc) { init(_ state: OngoingCallStateWebrtc) {
switch state { switch state {
case .initializing: case .initializing:
@ -350,7 +350,7 @@ private extension OngoingCallContextState {
self = .failed self = .failed
} }
} }
} }*/
private extension OngoingCallContextState { private extension OngoingCallContextState {
init(_ state: OngoingCallStateWebrtcCustom) { init(_ state: OngoingCallStateWebrtcCustom) {
@ -394,17 +394,22 @@ public final class OngoingCallContext {
private var networkTypeDisposable: Disposable? private var networkTypeDisposable: Disposable?
public static var maxLayer: Int32 { public static var maxLayer: Int32 {
return max(OngoingCallThreadLocalContext.maxLayer(), OngoingCallThreadLocalContextWebrtc.maxLayer()) return OngoingCallThreadLocalContext.maxLayer()
//return max(OngoingCallThreadLocalContext.maxLayer(), OngoingCallThreadLocalContextWebrtc.maxLayer())
} }
public static var versions: [String] { public static func versions(includeExperimental: Bool) -> [String] {
return [OngoingCallThreadLocalContext.version(), OngoingCallThreadLocalContextWebrtc.version(), OngoingCallThreadLocalContextWebrtcCustom.version()] var result: [String] = [OngoingCallThreadLocalContext.version()]
if includeExperimental {
result.append(OngoingCallThreadLocalContextWebrtcCustom.version())
}
return result
} }
public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String) { public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String) {
let _ = setupLogs let _ = setupLogs
OngoingCallThreadLocalContext.applyServerConfig(serializedData) OngoingCallThreadLocalContext.applyServerConfig(serializedData)
OngoingCallThreadLocalContextWebrtc.applyServerConfig(serializedData) //OngoingCallThreadLocalContextWebrtc.applyServerConfig(serializedData)
self.internalId = internalId self.internalId = internalId
self.account = account self.account = account
@ -448,7 +453,7 @@ public final class OngoingCallContext {
context.nativeSetNetworkType(networkType) context.nativeSetNetworkType(networkType)
} }
}) })
} else if version == OngoingCallThreadLocalContextWebrtc.version() { }/* else if version == OngoingCallThreadLocalContextWebrtc.version() {
var voipProxyServer: VoipProxyServerWebrtc? var voipProxyServer: VoipProxyServerWebrtc?
if let proxyServer = proxyServer { if let proxyServer = proxyServer {
switch proxyServer.connection { switch proxyServer.connection {
@ -474,7 +479,7 @@ public final class OngoingCallContext {
context.nativeSetNetworkType(networkType) context.nativeSetNetworkType(networkType)
} }
}) })
} else { }*/ else {
var voipProxyServer: VoipProxyServer? var voipProxyServer: VoipProxyServer?
if let proxyServer = proxyServer { if let proxyServer = proxyServer {
switch proxyServer.connection { switch proxyServer.connection {

View File

@ -0,0 +1,47 @@
load("//Config:buck_rule_macros.bzl", "static_library", "glob_map", "glob_sub_map", "merge_maps")
static_library(
name = "TgVoipWebrtcCustom",
srcs = glob([
"Sources/**/*.m",
"Sources/**/*.mm",
"Impl/*.cpp",
]),
has_cpp = True,
headers = merge_maps([
glob_sub_map("PublicHeaders/", [
"PublicHeaders/**/*.h",
]),
glob_sub_map("Impl/", [
"Impl/*.h",
]),
]),
exported_headers = glob([
"PublicHeaders/**/*.h",
]),
compiler_flags = [
"-Ithird-party/submodules/TgVoipWebrtcCustom/PublicHeaders",
"-Ithird-party/webrtc/webrtc-ios/src",
"-Ithird-party/webrtc/webrtc-ios/src/third_party/abseil-cpp",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/base",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/components/renderer/metal",
"-DWEBRTC_IOS",
"-DWEBRTC_MAC",
"-DWEBRTC_POSIX",
"-std=c++14",
],
deps = [
"//third-party/webrtc:webrtc_lib",
],
frameworks = [
"$SDKROOT/System/Library/Frameworks/Foundation.framework",
"$SDKROOT/System/Library/Frameworks/UIKit.framework",
"$SDKROOT/System/Library/Frameworks/AudioToolbox.framework",
"$SDKROOT/System/Library/Frameworks/VideoToolbox.framework",
"$SDKROOT/System/Library/Frameworks/CoreTelephony.framework",
"$SDKROOT/System/Library/Frameworks/CoreMedia.framework",
"$SDKROOT/System/Library/Frameworks/AVFoundation.framework",
"$SDKROOT/System/Library/Frameworks/Metal.framework",
],
)

View File

@ -17,6 +17,7 @@ objc_library(
"-Ithird-party/webrtc/webrtc-ios/src/third_party/abseil-cpp", "-Ithird-party/webrtc/webrtc-ios/src/third_party/abseil-cpp",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc", "-Ithird-party/webrtc/webrtc-ios/src/sdk/objc",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/base", "-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/base",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/components/renderer/metal",
"-DWEBRTC_IOS", "-DWEBRTC_IOS",
"-DWEBRTC_MAC", "-DWEBRTC_MAC",
"-DWEBRTC_POSIX", "-DWEBRTC_POSIX",

View File

@ -74,13 +74,7 @@ static void voipLog(NSString* format, ...) {
RtcConnection *_connection; RtcConnection *_connection;
//RTCVideoCapturer *_videoCapturer;
//RTCVideoTrack *_localVideoTrack;
//RTCVideoTrack *_remoteVideoTrack;
bool _receivedRemoteDescription; bool _receivedRemoteDescription;
} }
@end @end
@ -201,8 +195,6 @@ static void voipLog(NSString* format, ...) {
}]; }];
}]; }];
} }
[self startLocalVideo];
} }
return self; return self;
} }
@ -227,67 +219,11 @@ static void voipLog(NSString* format, ...) {
}]; }];
} }
- (void)startLocalVideo {
/*if (_videoCapturer == nil || ![_videoCapturer isKindOfClass:[RTCCameraVideoCapturer class]]) {
return;
}
RTCCameraVideoCapturer *cameraCapturer = (RTCCameraVideoCapturer *)_videoCapturer;
AVCaptureDevice *frontCamera = nil;
for (AVCaptureDevice *device in [RTCCameraVideoCapturer captureDevices]) {
if (device.position == AVCaptureDevicePositionFront) {
frontCamera = device;
break;
}
}
if (cameraCapturer == nil) {
return;
}
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[RTCCameraVideoCapturer supportedFormatsForDevice:frontCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
return width1 < width2 ? NSOrderedAscending : NSOrderedDescending;
}];
AVCaptureDeviceFormat *bestFormat = nil;
for (AVCaptureDeviceFormat *format in sortedFormats) {
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
if (dimensions.width >= 600 || dimensions.height >= 600) {
bestFormat = format;
break;
}
}
if (bestFormat == nil) {
return;
}
AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) {
if (lhs.maxFrameRate < rhs.maxFrameRate) {
return NSOrderedAscending;
} else {
return NSOrderedDescending;
}
}] lastObject];
if (frameRateRange == nil) {
return;
}
[cameraCapturer startCaptureWithDevice:frontCamera format:bestFormat fps:27 completionHandler:^(NSError * _Nonnull error) {
}];*/
}
- (bool)needRate { - (bool)needRate {
return false; return false;
} }
- (void)stop:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion { - (void)stop:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion {
/*if ([_videoCapturer isKindOfClass:[RTCCameraVideoCapturer class]]) {
RTCCameraVideoCapturer *cameraCapturer = (RTCCameraVideoCapturer *)_videoCapturer;
[cameraCapturer stopCapture];
}*/
[_connection close]; [_connection close];
if (completion) { if (completion) {
completion(@"", 0, 0, 0, 0); completion(@"", 0, 0, 0, 0);
@ -404,40 +340,14 @@ static void voipLog(NSString* format, ...) {
} }
- (void)setIsMuted:(bool)isMuted { - (void)setIsMuted:(bool)isMuted {
/*for (RTCRtpTransceiver *transceiver in _peerConnection.transceivers) { [_connection setIsMuted:isMuted];
if ([transceiver isKindOfClass:[RTCAudioTrack class]]) {
RTCAudioTrack *audioTrack = (RTCAudioTrack *)transceiver;
[audioTrack setIsEnabled:!isMuted];
}
}*/
} }
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtcCustom)networkType { - (void)setNetworkType:(OngoingCallNetworkTypeWebrtcCustom)networkType {
} }
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion { - (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion {
/*if (_remoteVideoTrack == nil) { [_connection getRemoteCameraView:completion];
for (RTCRtpTransceiver *transceiver in _peerConnection.transceivers) {
if (transceiver.mediaType == RTCRtpMediaTypeVideo && [transceiver.receiver.track isKindOfClass:[RTCVideoTrack class]]) {
_remoteVideoTrack = (RTCVideoTrack *)transceiver.receiver.track;
break;
}
}
}
RTCVideoTrack *remoteVideoTrack = _remoteVideoTrack;
dispatch_async(dispatch_get_main_queue(), ^{
#if false && TARGET_OS_SIMULATOR
RTCEAGLVideoView *remoteRenderer = [[RTCEAGLVideoView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 320.0f, 240.0f)];
[remoteVideoTrack addRenderer:remoteRenderer];
completion(remoteRenderer);
#else
RTCMTLVideoView *remoteRenderer = [[RTCMTLVideoView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 320.0f, 240.0f)];
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
[remoteVideoTrack addRenderer:remoteRenderer];
completion(remoteRenderer);
#endif
});*/
} }
@end @end

View File

@ -2,18 +2,23 @@
#define RTCCONNECTION_H #define RTCCONNECTION_H
#import <Foundation/Foundation.h> #import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@interface RtcConnection : NSObject @interface RtcConnection : NSObject
- (instancetype)initWithDiscoveredIceCandidate:(void (^)(NSString *, int, NSString *))discoveredIceCandidate connectionStateChanged:(void (^)(bool))connectionStateChanged; - (instancetype _Nonnull)initWithDiscoveredIceCandidate:(void (^_Nonnull)(NSString *, int, NSString * _Nonnull))discoveredIceCandidate connectionStateChanged:(void (^_Nonnull)(bool))connectionStateChanged;
- (void)close; - (void)close;
- (void)getOffer:(void (^)(NSString *, NSString *))completion; - (void)setIsMuted:(bool)isMuted;
- (void)getAnswer:(void (^)(NSString *, NSString *))completion;
- (void)setLocalDescription:(NSString *)serializedDescription type:(NSString *)type completion:(void (^)())completion; - (void)getOffer:(void (^_Nonnull)(NSString * _Nonnull, NSString * _Nonnull))completion;
- (void)setRemoteDescription:(NSString *)serializedDescription type:(NSString *)type completion:(void (^)())completion; - (void)getAnswer:(void (^_Nonnull)(NSString * _Nonnull, NSString * _Nonnull))completion;
- (void)addIceCandidateWithSdp:(NSString *)sdp sdpMLineIndex:(int)sdpMLineIndex sdpMid:(NSString *)sdpMid; - (void)setLocalDescription:(NSString * _Nonnull)serializedDescription type:(NSString * _Nonnull)type completion:(void (^_Nonnull)())completion;
- (void)setRemoteDescription:(NSString * _Nonnull)serializedDescription type:(NSString * _Nonnull)type completion:(void (^_Nonnull)())completion;
- (void)addIceCandidateWithSdp:(NSString * _Nonnull)sdp sdpMLineIndex:(int)sdpMLineIndex sdpMid:(NSString * _Nullable)sdpMid;
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion;
@end @end

View File

@ -1,5 +1,7 @@
#import "RtcConnection.h" #import "RtcConnection.h"
#import <UIKit/UIKit.h>
#include <memory> #include <memory>
#include "api/scoped_refptr.h" #include "api/scoped_refptr.h"
#include "rtc_base/thread.h" #include "rtc_base/thread.h"
@ -16,6 +18,14 @@
#include "api/rtc_event_log/rtc_event_log_factory.h" #include "api/rtc_event_log/rtc_event_log_factory.h"
#include "sdk/media_constraints.h" #include "sdk/media_constraints.h"
#include "api/peer_connection_interface.h" #include "api/peer_connection_interface.h"
#include "sdk/objc/native/src/objc_video_track_source.h"
#include "api/video_track_source_proxy.h"
#include "sdk/objc/api/RTCVideoRendererAdapter.h"
#include "sdk/objc/native/api/video_frame.h"
#include "VideoCameraCapturer.h"
#import "VideoMetalView.h"
class PeerConnectionObserverImpl : public webrtc::PeerConnectionObserver { class PeerConnectionObserverImpl : public webrtc::PeerConnectionObserver {
private: private:
@ -175,6 +185,14 @@ public:
rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection; rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
std::unique_ptr<webrtc::MediaConstraints> _nativeConstraints; std::unique_ptr<webrtc::MediaConstraints> _nativeConstraints;
bool _hasStartedRtcEventLog; bool _hasStartedRtcEventLog;
rtc::scoped_refptr<webrtc::AudioTrackInterface> _localAudioTrack;
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
rtc::scoped_refptr<webrtc::VideoTrackInterface> _localVideoTrack;
VideoCameraCapturer *_videoCapturer;
rtc::scoped_refptr<webrtc::VideoTrackInterface> _remoteVideoTrack;
} }
@end @end
@ -228,27 +246,109 @@ public:
config.continual_gathering_policy = webrtc::PeerConnectionInterface::ContinualGatheringPolicy::GATHER_CONTINUALLY; config.continual_gathering_policy = webrtc::PeerConnectionInterface::ContinualGatheringPolicy::GATHER_CONTINUALLY;
webrtc::PeerConnectionInterface::IceServer iceServer; webrtc::PeerConnectionInterface::IceServer iceServer;
iceServer.uri = "stun:stun.l.google.com:19302"; iceServer.uri = "stun:stun.l.google.com:19302";
/*iceServer.uri = "stun:rrrtest.uksouth.cloudapp.azure.com:3478";
iceServer.username = "user";
iceServer.password = "root";*/
config.servers.push_back(iceServer); config.servers.push_back(iceServer);
/*webrtc::PeerConnectionInterface::IceServer turnServer;
turnServer.uri = "turn:rrrtest.uksouth.cloudapp.azure.com:3478";
turnServer.username = "user";
turnServer.password = "root";
config.servers.push_back(turnServer);*/
//config.type = webrtc::PeerConnectionInterface::kRelay;
_observer.reset(new PeerConnectionObserverImpl(_discoveredIceCandidate, _connectionStateChanged)); _observer.reset(new PeerConnectionObserverImpl(_discoveredIceCandidate, _connectionStateChanged));
_peerConnection = _nativeFactory->CreatePeerConnection(config, nullptr, nullptr, _observer.get()); _peerConnection = _nativeFactory->CreatePeerConnection(config, nullptr, nullptr, _observer.get());
assert(_peerConnection != nullptr);
cricket::AudioOptions options;
rtc::scoped_refptr<webrtc::AudioSourceInterface> audioSource = _nativeFactory->CreateAudioSource(options);
rtc::scoped_refptr<webrtc::AudioTrackInterface> track = _nativeFactory->CreateAudioTrack("audio0", audioSource);
std::vector<std::string> streamIds; std::vector<std::string> streamIds;
streamIds.push_back("stream"); streamIds.push_back("stream");
_peerConnection->AddTrack(track, streamIds); cricket::AudioOptions options;
rtc::scoped_refptr<webrtc::AudioSourceInterface> audioSource = _nativeFactory->CreateAudioSource(options);
_localAudioTrack = _nativeFactory->CreateAudioTrack("audio0", audioSource);
_peerConnection->AddTrack(_localAudioTrack, streamIds);
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>());
_nativeVideoSource = webrtc::VideoTrackSourceProxy::Create(_signalingThread.get(), _workerThread.get(), objCVideoTrackSource);
_localVideoTrack = _nativeFactory->CreateVideoTrack("video0", _nativeVideoSource);
_peerConnection->AddTrack(_localVideoTrack, streamIds);
[self startLocalVideo];
} }
return self; return self;
} }
- (void)close { - (void)close {
if (_videoCapturer != nil) {
[_videoCapturer stopCapture];
}
_peerConnection->Close(); _peerConnection->Close();
} }
- (void)startLocalVideo {
#if TARGET_OS_SIMULATOR
return;
#endif
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:_nativeVideoSource];
AVCaptureDevice *frontCamera = nil;
for (AVCaptureDevice *device in [VideoCameraCapturer captureDevices]) {
if (device.position == AVCaptureDevicePositionFront) {
frontCamera = device;
break;
}
}
if (frontCamera == nil) {
return;
}
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:frontCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
return width1 < width2 ? NSOrderedAscending : NSOrderedDescending;
}];
AVCaptureDeviceFormat *bestFormat = nil;
for (AVCaptureDeviceFormat *format in sortedFormats) {
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
if (dimensions.width >= 600 || dimensions.height >= 600) {
bestFormat = format;
break;
}
}
if (bestFormat == nil) {
return;
}
AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) {
if (lhs.maxFrameRate < rhs.maxFrameRate) {
return NSOrderedAscending;
} else {
return NSOrderedDescending;
}
}] lastObject];
if (frameRateRange == nil) {
return;
}
[_videoCapturer startCaptureWithDevice:frontCamera format:bestFormat fps:27];
}
- (void)setIsMuted:(bool)isMuted {
_localAudioTrack->set_enabled(!isMuted);
}
- (void)getOffer:(void (^)(NSString *, NSString *))completion { - (void)getOffer:(void (^)(NSString *, NSString *))completion {
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options; webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
options.offer_to_receive_audio = 1; options.offer_to_receive_audio = 1;
@ -295,4 +395,27 @@ public:
} }
} }
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion {
if (_remoteVideoTrack == nullptr) {
for (auto &it : _peerConnection->GetTransceivers()) {
if (it->media_type() == cricket::MediaType::MEDIA_TYPE_VIDEO) {
_remoteVideoTrack = static_cast<webrtc::VideoTrackInterface *>(it->receiver()->track().get());
break;
}
}
}
rtc::scoped_refptr<webrtc::VideoTrackInterface> remoteVideoTrack = _remoteVideoTrack;
dispatch_async(dispatch_get_main_queue(), ^{
if (remoteVideoTrack != nullptr) {
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 320.0f, 240.0f)];
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
[remoteRenderer addToTrack:remoteVideoTrack];
completion(remoteRenderer);
}
});
}
@end @end

View File

@ -0,0 +1,23 @@
#ifndef VIDEOCAMERACAPTURER_H
#define VIDEOCAMERACAPTURER_H
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#include <memory>
#include "api/scoped_refptr.h"
#include "api/media_stream_interface.h"
@interface VideoCameraCapturer : NSObject
+ (NSArray<AVCaptureDevice *> *)captureDevices;
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device;
- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source;
- (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps;
- (void)stopCapture;
@end
#endif

View File

@ -0,0 +1,459 @@
#include "VideoCameraCapturer.h"
#import <AVFoundation/AVFoundation.h>
#import "base/RTCLogging.h"
#import "base/RTCVideoFrameBuffer.h"
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
#import "sdk/objc/native/src/objc_video_track_source.h"
#import "api/video_track_source_proxy.h"
#import "helpers/UIDevice+RTCDevice.h"
#import "helpers/AVCaptureSession+DevicePosition.h"
#import "helpers/RTCDispatcher+Private.h"
#import "base/RTCVideoFrame.h"
static const int64_t kNanosecondsPerSecond = 1000000000;
static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
webrtc::VideoTrackSourceProxy *proxy_source =
static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
return static_cast<webrtc::ObjCVideoTrackSource *>(proxy_source->internal());
}
@interface VideoCameraCapturer () <AVCaptureVideoDataOutputSampleBufferDelegate> {
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
dispatch_queue_t _frameQueue;
AVCaptureDevice *_currentDevice;
BOOL _hasRetriedOnFatalError;
BOOL _isRunning;
BOOL _willBeRunning;
AVCaptureVideoDataOutput *_videoDataOutput;
AVCaptureSession *_captureSession;
FourCharCode _preferredOutputPixelFormat;
FourCharCode _outputPixelFormat;
RTCVideoRotation _rotation;
UIDeviceOrientation _orientation;
}
@end
@implementation VideoCameraCapturer
- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source {
self = [super init];
if (self != nil) {
_source = source;
if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) {
return nil;
}
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
_orientation = UIDeviceOrientationPortrait;
_rotation = RTCVideoRotation_90;
[center addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[center addObserver:self
selector:@selector(handleCaptureSessionInterruption:)
name:AVCaptureSessionWasInterruptedNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionInterruptionEnded:)
name:AVCaptureSessionInterruptionEndedNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleApplicationDidBecomeActive:)
name:UIApplicationDidBecomeActiveNotification
object:[UIApplication sharedApplication]];
[center addObserver:self
selector:@selector(handleCaptureSessionRuntimeError:)
name:AVCaptureSessionRuntimeErrorNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionDidStartRunning:)
name:AVCaptureSessionDidStartRunningNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionDidStopRunning:)
name:AVCaptureSessionDidStopRunningNotification
object:_captureSession];
}
return self;
}
- (void)dealloc {
NSAssert(!_willBeRunning, @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?");
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
+ (NSArray<AVCaptureDevice *> *)captureDevices {
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
return session.devices;
}
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
// Support opening the device in any format. We make sure it's converted to a format we
// can handle, if needed, in the method `-setupVideoDataOutput`.
return device.formats;
}
- (FourCharCode)preferredOutputPixelFormat {
return _preferredOutputPixelFormat;
}
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps {
[self startCaptureWithDevice:device format:format fps:fps completionHandler:nil];
}
- (void)stopCapture {
[self stopCaptureWithCompletionHandler:nil];
}
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps
completionHandler:(nullable void (^)(NSError *))completionHandler {
_willBeRunning = YES;
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
dispatch_async(dispatch_get_main_queue(), ^{
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
});
_currentDevice = device;
NSError *error = nil;
if (![_currentDevice lockForConfiguration:&error]) {
RTCLogError(@"Failed to lock device %@. Error: %@",
_currentDevice,
error.userInfo);
if (completionHandler) {
completionHandler(error);
}
_willBeRunning = NO;
return;
}
[self reconfigureCaptureSessionInput];
[self updateOrientation];
[self updateDeviceCaptureFormat:format fps:fps];
[self updateVideoDataOutputPixelFormat:format];
[_captureSession startRunning];
[_currentDevice unlockForConfiguration];
_isRunning = YES;
if (completionHandler) {
completionHandler(nil);
}
}];
}
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
_willBeRunning = NO;
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("Stop");
_currentDevice = nil;
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
[_captureSession removeInput:oldInput];
}
[_captureSession stopRunning];
dispatch_async(dispatch_get_main_queue(), ^{
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
});
_isRunning = NO;
if (completionHandler) {
completionHandler();
}
}];
}
#pragma mark iOS notifications
#if TARGET_OS_IPHONE
- (void)deviceOrientationDidChange:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[self updateOrientation];
}];
}
#endif
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
NSParameterAssert(captureOutput == _videoDataOutput);
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
return;
}
// Default to portrait orientation on iPhone.
BOOL usingFrontCamera = NO;
// Check the image's EXIF for the camera the image came from as the image could have been
// delayed as we set alwaysDiscardsLateVideoFrames to NO.
AVCaptureDevicePosition cameraPosition =
[AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
if (cameraPosition != AVCaptureDevicePositionUnspecified) {
usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
} else {
AVCaptureDeviceInput *deviceInput =
(AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
}
switch (_orientation) {
case UIDeviceOrientationPortrait:
_rotation = RTCVideoRotation_90;
break;
case UIDeviceOrientationPortraitUpsideDown:
_rotation = RTCVideoRotation_270;
break;
case UIDeviceOrientationLandscapeLeft:
_rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
break;
case UIDeviceOrientationLandscapeRight:
_rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
case UIDeviceOrientationUnknown:
// Ignore.
break;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
kNanosecondsPerSecond;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
getObjCVideoSource(_source)->OnCapturedFrame(videoFrame);
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
NSString *droppedReason =
(__bridge NSString *)CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil);
RTCLogError(@"Dropped sample buffer. Reason: %@", droppedReason);
}
#pragma mark - AVCaptureSession notifications
- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
NSString *reasonString = nil;
NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
if (reason) {
switch (reason.intValue) {
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
reasonString = @"VideoDeviceNotAvailableInBackground";
break;
case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
reasonString = @"AudioDeviceInUseByAnotherClient";
break;
case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
reasonString = @"VideoDeviceInUseByAnotherClient";
break;
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
break;
}
}
RTCLog(@"Capture session interrupted: %@", reasonString);
}
- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
RTCLog(@"Capture session interruption ended.");
}
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
RTCLogError(@"Capture session runtime error: %@", error);
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (error.code == AVErrorMediaServicesWereReset) {
[self handleNonFatalError];
} else {
[self handleFatalError];
}
}];
}
- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
RTCLog(@"Capture session started.");
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
// If we successfully restarted after an unknown error,
// allow future retries on fatal errors.
_hasRetriedOnFatalError = NO;
}];
}
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
RTCLog(@"Capture session stopped.");
}
- (void)handleFatalError {
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (!_hasRetriedOnFatalError) {
RTCLogWarning(@"Attempting to recover from fatal capture error.");
[self handleNonFatalError];
_hasRetriedOnFatalError = YES;
} else {
RTCLogError(@"Previous fatal error recovery failed.");
}
}];
}
- (void)handleNonFatalError {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLog(@"Restarting capture session after error.");
if (_isRunning) {
[_captureSession startRunning];
}
}];
}
#pragma mark - UIApplication notifications
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (_isRunning && !_captureSession.isRunning) {
RTCLog(@"Restarting capture session on active.");
[_captureSession startRunning];
}
}];
}
#pragma mark - Private
- (dispatch_queue_t)frameQueue {
if (!_frameQueue) {
_frameQueue =
dispatch_queue_create("org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_frameQueue,
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
}
return _frameQueue;
}
- (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession {
NSAssert(_captureSession == nil, @"Setup capture session called twice.");
_captureSession = captureSession;
_captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
_captureSession.usesApplicationAudioSession = NO;
[self setupVideoDataOutput];
// Add the output.
if (![_captureSession canAddOutput:_videoDataOutput]) {
RTCLogError(@"Video data output unsupported.");
return NO;
}
[_captureSession addOutput:_videoDataOutput];
return YES;
}
- (void)setupVideoDataOutput {
NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
// `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
// device with the most efficient output format first. Find the first format that we support.
NSSet<NSNumber *> *supportedPixelFormats = [RTCCVPixelBuffer supportedPixelFormats];
NSMutableOrderedSet *availablePixelFormats =
[NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
[availablePixelFormats intersectSet:supportedPixelFormats];
NSNumber *pixelFormat = availablePixelFormats.firstObject;
NSAssert(pixelFormat, @"Output device has no supported formats.");
_preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
_outputPixelFormat = _preferredOutputPixelFormat;
videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
[videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
_videoDataOutput = videoDataOutput;
}
- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
if (![[RTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) {
mediaSubType = _preferredOutputPixelFormat;
}
if (mediaSubType != _outputPixelFormat) {
_outputPixelFormat = mediaSubType;
_videoDataOutput.videoSettings =
@{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(mediaSubType) };
}
}
#pragma mark - Private, called inside capture queue
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateDeviceCaptureFormat must be called on the capture queue.");
@try {
_currentDevice.activeFormat = format;
_currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, (int32_t)fps);
} @catch (NSException *exception) {
RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
return;
}
}
- (void)reconfigureCaptureSessionInput {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"reconfigureCaptureSessionInput must be called on the capture queue.");
NSError *error = nil;
AVCaptureDeviceInput *input =
[AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
if (!input) {
RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
return;
}
[_captureSession beginConfiguration];
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
[_captureSession removeInput:oldInput];
}
if ([_captureSession canAddInput:input]) {
[_captureSession addInput:input];
} else {
RTCLogError(@"Cannot add camera as an input to the session.");
}
[_captureSession commitConfiguration];
}
- (void)updateOrientation {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateOrientation must be called on the capture queue.");
_orientation = [UIDevice currentDevice].orientation;
}
@end

View File

@ -0,0 +1,24 @@
#ifndef VIDEOMETALVIEW_H
#define VIDEOMETALVIEW_H
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import "api/media_stream_interface.h"
@class RTCVideoFrame;
@interface VideoMetalView : UIView
@property(nonatomic) UIViewContentMode videoContentMode;
@property(nonatomic, getter=isEnabled) BOOL enabled;
@property(nonatomic, nullable) NSValue* rotationOverride;
- (void)setSize:(CGSize)size;
- (void)renderFrame:(nullable RTCVideoFrame *)frame;
- (void)addToTrack:(rtc::scoped_refptr<webrtc::VideoTrackInterface>)track;
@end
#endif

View File

@ -0,0 +1,274 @@
#import "VideoMetalView.h"
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#import "base/RTCLogging.h"
#import "base/RTCVideoFrame.h"
#import "base/RTCVideoFrameBuffer.h"
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
#include "sdk/objc/native/api/video_frame.h"
#import "api/video/video_sink_interface.h"
#import "api/media_stream_interface.h"
#import "RTCMTLI420Renderer.h"
#import "RTCMTLNV12Renderer.h"
#import "RTCMTLRGBRenderer.h"
#define MTKViewClass NSClassFromString(@"MTKView")
#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
class VideoRendererAdapterImpl : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
VideoRendererAdapterImpl(VideoMetalView *adapter) {
adapter_ = adapter;
size_ = CGSizeZero;
}
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
RTCVideoFrame* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
CGSize current_size = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) : CGSizeMake(videoFrame.height, videoFrame.width);
if (!CGSizeEqualToSize(size_, current_size)) {
size_ = current_size;
[adapter_ setSize:size_];
}
[adapter_ renderFrame:videoFrame];
}
private:
__weak VideoMetalView *adapter_;
CGSize size_;
};
@interface VideoMetalView () <MTKViewDelegate> {
RTCMTLI420Renderer *_rendererI420;
RTCMTLNV12Renderer *_rendererNV12;
RTCMTLRGBRenderer *_rendererRGB;
MTKView *_metalView;
RTCVideoFrame *_videoFrame;
CGSize _videoFrameSize;
int64_t _lastFrameTimeNs;
std::unique_ptr<VideoRendererAdapterImpl> _sink;
}
@end
@implementation VideoMetalView
- (instancetype)initWithFrame:(CGRect)frameRect {
self = [super initWithFrame:frameRect];
if (self) {
[self configure];
_sink.reset(new VideoRendererAdapterImpl(self));
}
return self;
}
- (BOOL)isEnabled {
return !_metalView.paused;
}
- (void)setEnabled:(BOOL)enabled {
_metalView.paused = !enabled;
}
- (UIViewContentMode)videoContentMode {
return _metalView.contentMode;
}
- (void)setVideoContentMode:(UIViewContentMode)mode {
_metalView.contentMode = mode;
}
#pragma mark - Private
+ (BOOL)isMetalAvailable {
return MTLCreateSystemDefaultDevice() != nil;
}
+ (MTKView *)createMetalView:(CGRect)frame {
return [[MTKViewClass alloc] initWithFrame:frame];
}
+ (RTCMTLNV12Renderer *)createNV12Renderer {
return [[RTCMTLNV12RendererClass alloc] init];
}
+ (RTCMTLI420Renderer *)createI420Renderer {
return [[RTCMTLI420RendererClass alloc] init];
}
+ (RTCMTLRGBRenderer *)createRGBRenderer {
return [[RTCMTLRGBRenderer alloc] init];
}
- (void)configure {
NSAssert([VideoMetalView isMetalAvailable], @"Metal not availiable on this device");
_metalView = [VideoMetalView createMetalView:self.bounds];
_metalView.delegate = self;
_metalView.contentMode = UIViewContentModeScaleAspectFill;
[self addSubview:_metalView];
_videoFrameSize = CGSizeZero;
}
- (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled {
[super setMultipleTouchEnabled:multipleTouchEnabled];
_metalView.multipleTouchEnabled = multipleTouchEnabled;
}
- (void)layoutSubviews {
[super layoutSubviews];
CGRect bounds = self.bounds;
_metalView.frame = bounds;
if (!CGSizeEqualToSize(_videoFrameSize, CGSizeZero)) {
_metalView.drawableSize = [self drawableSize];
} else {
_metalView.drawableSize = bounds.size;
}
}
#pragma mark - MTKViewDelegate methods
- (void)drawInMTKView:(nonnull MTKView *)view {
NSAssert(view == _metalView, @"Receiving draw callbacks from foreign instance.");
RTCVideoFrame *videoFrame = _videoFrame;
// Skip rendering if we've already rendered this frame.
if (!videoFrame || videoFrame.timeStampNs == _lastFrameTimeNs) {
return;
}
if (CGRectIsEmpty(view.bounds)) {
return;
}
RTCMTLRenderer *renderer;
if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) {
if (!_rendererRGB) {
_rendererRGB = [VideoMetalView createRGBRenderer];
if (![_rendererRGB addRenderingDestination:_metalView]) {
_rendererRGB = nil;
RTCLogError(@"Failed to create RGB renderer");
return;
}
}
renderer = _rendererRGB;
} else {
if (!_rendererNV12) {
_rendererNV12 = [VideoMetalView createNV12Renderer];
if (![_rendererNV12 addRenderingDestination:_metalView]) {
_rendererNV12 = nil;
RTCLogError(@"Failed to create NV12 renderer");
return;
}
}
renderer = _rendererNV12;
}
} else {
if (!_rendererI420) {
_rendererI420 = [VideoMetalView createI420Renderer];
if (![_rendererI420 addRenderingDestination:_metalView]) {
_rendererI420 = nil;
RTCLogError(@"Failed to create I420 renderer");
return;
}
}
renderer = _rendererI420;
}
renderer.rotationOverride = _rotationOverride;
[renderer drawFrame:videoFrame];
_lastFrameTimeNs = videoFrame.timeStampNs;
}
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
}
#pragma mark -
- (void)setRotationOverride:(NSValue *)rotationOverride {
_rotationOverride = rotationOverride;
_metalView.drawableSize = [self drawableSize];
[self setNeedsLayout];
}
- (RTCVideoRotation)frameRotation {
if (_rotationOverride) {
RTCVideoRotation rotation;
if (@available(iOS 11, *)) {
[_rotationOverride getValue:&rotation size:sizeof(rotation)];
} else {
[_rotationOverride getValue:&rotation];
}
return rotation;
}
return _videoFrame.rotation;
}
- (CGSize)drawableSize {
// Flip width/height if the rotations are not the same.
CGSize videoFrameSize = _videoFrameSize;
RTCVideoRotation frameRotation = [self frameRotation];
BOOL useLandscape =
(frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180);
BOOL sizeIsLandscape = (_videoFrame.rotation == RTCVideoRotation_0) ||
(_videoFrame.rotation == RTCVideoRotation_180);
if (useLandscape == sizeIsLandscape) {
return videoFrameSize;
} else {
return CGSizeMake(videoFrameSize.height, videoFrameSize.width);
}
}
#pragma mark - RTCVideoRenderer
- (void)setSize:(CGSize)size {
__weak VideoMetalView *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
__strong VideoMetalView *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
strongSelf->_videoFrameSize = size;
CGSize drawableSize = [strongSelf drawableSize];
strongSelf->_metalView.drawableSize = drawableSize;
[strongSelf setNeedsLayout];
//[strongSelf.delegate videoView:self didChangeVideoSize:size];
});
}
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
if (!self.isEnabled) {
return;
}
if (frame == nil) {
RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
return;
}
_videoFrame = frame;
}
- (void)addToTrack:(rtc::scoped_refptr<webrtc::VideoTrackInterface>)track {
track->AddOrUpdateSink(_sink.get(), rtc::VideoSinkWants());
}
@end

View File

@ -1,7 +1,7 @@
load("//Config:buck_rule_macros.bzl", "static_library") load("//Config:buck_rule_macros.bzl", "static_library")
webrtc_lib_flags = [ webrtc_lib_flags = [
"-lwebrtc" "-lframework_objc_static"
] ]
genrule( genrule(
@ -42,7 +42,7 @@ genrule(
sh $SRCDIR/build-webrtc-buck.sh "$BUILD_DIR" $BUILD_ARCH sh $SRCDIR/build-webrtc-buck.sh "$BUILD_DIR" $BUILD_ARCH
mkdir -p "$OUT" mkdir -p "$OUT"
cp "$BUILD_DIR/webrtc-ios/src/out/$OUT_DIR/obj/libwebrtc.a" "$OUT/" cp "$BUILD_DIR/webrtc-ios/src/out/$OUT_DIR/obj/sdk/libframework_objc_static.a" "$OUT/"
""", """,
out = "libwebrtc", out = "libwebrtc",
visibility = ["PUBLIC"] visibility = ["PUBLIC"]

View File

@ -27,7 +27,7 @@ if [ "$ARCH" == "x64" ]; then
OUT_DIR="ios_sim" OUT_DIR="ios_sim"
fi fi
buildtools/mac/gn gen out/$OUT_DIR --args="use_xcode_clang=true "" target_cpu=\"$ARCH\""' target_os="ios" is_debug=false is_component_build=false rtc_include_tests=false use_rtti=true rtc_use_x11=false use_custom_libcxx=false use_custom_libcxx_for_host=false rtc_include_builtin_video_codecs=false rtc_build_ssl=false rtc_build_examples=false rtc_build_tools=false ios_deployment_target="9.0" ios_enable_code_signing=false is_unsafe_developer_build=false rtc_enable_protobuf=false rtc_include_builtin_video_codecs=false rtc_use_gtk=false rtc_use_metal_rendering=false rtc_ssl_root="//openssl"' buildtools/mac/gn gen out/$OUT_DIR --args="use_xcode_clang=true "" target_cpu=\"$ARCH\""' target_os="ios" is_debug=false is_component_build=false rtc_include_tests=false use_rtti=true rtc_use_x11=false use_custom_libcxx=false use_custom_libcxx_for_host=false rtc_include_builtin_video_codecs=false rtc_build_ssl=false rtc_build_examples=false rtc_build_tools=false ios_deployment_target="9.0" ios_enable_code_signing=false is_unsafe_developer_build=false rtc_enable_protobuf=false rtc_include_builtin_video_codecs=false rtc_use_gtk=false rtc_use_metal_rendering=true rtc_ssl_root="//openssl"'
ninja -C out/$OUT_DIR webrtc ninja -C out/$OUT_DIR framework_objc_static
popd popd