Video call improvements

This commit is contained in:
Ali 2020-07-10 19:57:40 +04:00
parent e1ac3c98df
commit eee4036987
50 changed files with 454 additions and 17960 deletions

View File

@ -46,9 +46,10 @@ public struct PresentationCallState: Equatable {
public enum VideoState: Equatable { public enum VideoState: Equatable {
case notAvailable case notAvailable
case available(Bool) case possible
case outgoingRequested
case incomingRequested
case active case active
case activeOutgoing
} }
public enum RemoteVideoState: Equatable { public enum RemoteVideoState: Equatable {
@ -87,6 +88,7 @@ public protocol PresentationCall: class {
var peerId: PeerId { get } var peerId: PeerId { get }
var isOutgoing: Bool { get } var isOutgoing: Bool { get }
var isVideo: Bool { get } var isVideo: Bool { get }
var isVideoPossible: Bool { get }
var peer: Peer? { get } var peer: Peer? { get }
var state: Signal<PresentationCallState, NoError> { get } var state: Signal<PresentationCallState, NoError> { get }
@ -103,7 +105,7 @@ public protocol PresentationCall: class {
func toggleIsMuted() func toggleIsMuted()
func setIsMuted(_ value: Bool) func setIsMuted(_ value: Bool)
func setEnableVideo(_ value: Bool) func requestVideo()
func setOutgoingVideoIsPaused(_ isPaused: Bool) func setOutgoingVideoIsPaused(_ isPaused: Bool)
func switchVideoCamera() func switchVideoCamera()
func setCurrentAudioOutput(_ output: AudioSessionOutput) func setCurrentAudioOutput(_ output: AudioSessionOutput)

View File

@ -629,7 +629,7 @@ private func debugControllerEntries(presentationData: PresentationData, loggingS
entries.append(.knockoutWallpaper(presentationData.theme, experimentalSettings.knockoutWallpaper)) entries.append(.knockoutWallpaper(presentationData.theme, experimentalSettings.knockoutWallpaper))
entries.append(.alternativeFolderTabs(experimentalSettings.foldersTabAtBottom)) entries.append(.alternativeFolderTabs(experimentalSettings.foldersTabAtBottom))
entries.append(.playerEmbedding(experimentalSettings.playerEmbedding)) entries.append(.playerEmbedding(experimentalSettings.playerEmbedding))
//entries.append(.playlistPlayback(experimentalSettings.playlistPlayback)) entries.append(.playlistPlayback(experimentalSettings.playlistPlayback))
entries.append(.videoCalls(experimentalSettings.videoCalls)) entries.append(.videoCalls(experimentalSettings.videoCalls))
entries.append(.videoCallsInfo(presentationData.theme, "Enables experimental transmission of electromagnetic radiation synchronized with pressure waves. Needs to be enabled on both sides.")) entries.append(.videoCallsInfo(presentationData.theme, "Enables experimental transmission of electromagnetic radiation synchronized with pressure waves. Needs to be enabled on both sides."))

View File

@ -134,7 +134,7 @@ public final class CallController: ViewController {
} }
override public func loadDisplayNode() { override public func loadDisplayNode() {
if self.call.isVideo { if self.call.isVideoPossible {
self.displayNode = CallControllerNode(sharedContext: self.sharedContext, account: self.account, presentationData: self.presentationData, statusBar: self.statusBar, debugInfo: self.call.debugInfo(), shouldStayHiddenUntilConnection: !self.call.isOutgoing && self.call.isIntegratedWithCallKit, easyDebugAccess: self.easyDebugAccess, call: self.call) self.displayNode = CallControllerNode(sharedContext: self.sharedContext, account: self.account, presentationData: self.presentationData, statusBar: self.statusBar, debugInfo: self.call.debugInfo(), shouldStayHiddenUntilConnection: !self.call.isOutgoing && self.call.isIntegratedWithCallKit, easyDebugAccess: self.easyDebugAccess, call: self.call)
} else { } else {
self.displayNode = LegacyCallControllerNode(sharedContext: self.sharedContext, account: self.account, presentationData: self.presentationData, statusBar: self.statusBar, debugInfo: self.call.debugInfo(), shouldStayHiddenUntilConnection: !self.call.isOutgoing && self.call.isIntegratedWithCallKit, easyDebugAccess: self.easyDebugAccess, call: self.call) self.displayNode = LegacyCallControllerNode(sharedContext: self.sharedContext, account: self.account, presentationData: self.presentationData, statusBar: self.statusBar, debugInfo: self.call.debugInfo(), shouldStayHiddenUntilConnection: !self.call.isOutgoing && self.call.isIntegratedWithCallKit, easyDebugAccess: self.easyDebugAccess, call: self.call)

View File

@ -17,7 +17,9 @@ enum CallControllerButtonsSpeakerMode {
enum CallControllerButtonsMode: Equatable { enum CallControllerButtonsMode: Equatable {
enum VideoState: Equatable { enum VideoState: Equatable {
case notAvailable case notAvailable
case available(Bool) case possible
case outgoingRequested
case incomingRequested
case active case active
} }
@ -147,9 +149,42 @@ final class CallControllerButtonsNode: ASDisplayNode {
let height: CGFloat let height: CGFloat
var buttons: [PlacedButton] = [] let speakerMode: CallControllerButtonsSpeakerMode
var videoState: CallControllerButtonsMode.VideoState
switch mode { switch mode {
case .incoming(let speakerMode, let videoState), .outgoingRinging(let speakerMode, let videoState): case .incoming(let speakerModeValue, let videoStateValue), .outgoingRinging(let speakerModeValue, let videoStateValue), .active(let speakerModeValue, let videoStateValue):
speakerMode = speakerModeValue
videoState = videoStateValue
}
enum MappedState {
case incomingRinging
case outgoingRinging
case active
}
let mappedState: MappedState
switch mode {
case .incoming:
mappedState = .incomingRinging
case .outgoingRinging:
mappedState = .outgoingRinging
case let .active(_, videoStateValue):
switch videoStateValue {
case .incomingRequested:
mappedState = .incomingRinging
videoState = .outgoingRequested
case .outgoingRequested:
mappedState = .outgoingRinging
videoState = .outgoingRequested
case .active, .possible, .notAvailable:
mappedState = .active
}
}
var buttons: [PlacedButton] = []
switch mappedState {
case .incomingRinging, .outgoingRinging:
var topButtons: [ButtonDescription] = [] var topButtons: [ButtonDescription] = []
var bottomButtons: [ButtonDescription] = [] var bottomButtons: [ButtonDescription] = []
@ -166,8 +201,14 @@ final class CallControllerButtonsNode: ASDisplayNode {
} }
switch videoState { switch videoState {
case .active, .available: case .active, .possible, .incomingRequested, .outgoingRequested:
topButtons.append(.enableCamera(!self.isCameraPaused)) let isCameraActive: Bool
if case .possible = videoState {
isCameraActive = false
} else {
isCameraActive = !self.isCameraPaused
}
topButtons.append(.enableCamera(isCameraActive))
topButtons.append(.mute(self.isMuted)) topButtons.append(.mute(self.isMuted))
topButtons.append(.switchCamera) topButtons.append(.switchCamera)
case .notAvailable: case .notAvailable:
@ -185,7 +226,7 @@ final class CallControllerButtonsNode: ASDisplayNode {
topButtonsLeftOffset += smallButtonSize + topButtonsSpacing topButtonsLeftOffset += smallButtonSize + topButtonsSpacing
} }
if case .incoming = mode { if case .incomingRinging = mappedState {
bottomButtons.append(.end(.decline)) bottomButtons.append(.end(.decline))
bottomButtons.append(.accept) bottomButtons.append(.accept)
} else { } else {
@ -203,7 +244,7 @@ final class CallControllerButtonsNode: ASDisplayNode {
} }
height = smallButtonSize + topBottomSpacing + largeButtonSize + max(bottomInset + 32.0, 46.0) height = smallButtonSize + topBottomSpacing + largeButtonSize + max(bottomInset + 32.0, 46.0)
case let .active(speakerMode, videoState): case .active:
var topButtons: [ButtonDescription] = [] var topButtons: [ButtonDescription] = []
let soundOutput: ButtonDescription.SoundOutput let soundOutput: ButtonDescription.SoundOutput
@ -219,8 +260,14 @@ final class CallControllerButtonsNode: ASDisplayNode {
} }
switch videoState { switch videoState {
case .active, .available: case .active, .incomingRequested, .outgoingRequested, .possible:
topButtons.append(.enableCamera(!self.isCameraPaused)) let isCameraActive: Bool
if case .possible = videoState {
isCameraActive = false
} else {
isCameraActive = !self.isCameraPaused
}
topButtons.append(.enableCamera(isCameraActive))
topButtons.append(.mute(isMuted)) topButtons.append(.mute(isMuted))
topButtons.append(.switchCamera) topButtons.append(.switchCamera)
case .notAvailable: case .notAvailable:

View File

@ -22,22 +22,41 @@ private func interpolate(from: CGFloat, to: CGFloat, value: CGFloat) -> CGFloat
return (1.0 - value) * from + value * to return (1.0 - value) * from + value * to
} }
private final class IncomingVideoNode: ASDisplayNode { private final class CallVideoNode: ASDisplayNode {
private let videoTransformContainer: ASDisplayNode
private let videoView: PresentationCallVideoView private let videoView: PresentationCallVideoView
private var effectView: UIVisualEffectView? private var effectView: UIVisualEffectView?
private var isBlurred: Bool = false private var isBlurred: Bool = false
private var currentCornerRadius: CGFloat = 0.0
private let isReadyUpdated: () -> Void private let isReadyUpdated: () -> Void
private(set) var isReady: Bool = false private(set) var isReady: Bool = false
private var isReadyTimer: SwiftSignalKit.Timer? private var isReadyTimer: SwiftSignalKit.Timer?
init(videoView: PresentationCallVideoView, isReadyUpdated: @escaping () -> Void) { init(videoView: PresentationCallVideoView, isReadyUpdated: @escaping () -> Void) {
self.videoView = videoView
self.isReadyUpdated = isReadyUpdated self.isReadyUpdated = isReadyUpdated
self.videoTransformContainer = ASDisplayNode()
self.videoTransformContainer.clipsToBounds = true
self.videoView = videoView
self.videoView.view.layer.transform = CATransform3DMakeScale(-1.0, 1.0, 1.0)
super.init() super.init()
self.view.addSubview(self.videoView.view) self.videoTransformContainer.view.addSubview(self.videoView.view)
self.addSubnode(self.videoTransformContainer)
self.videoView.setOnFirstFrameReceived { [weak self] in
guard let strongSelf = self else {
return
}
if !strongSelf.isReady {
strongSelf.isReady = true
strongSelf.isReadyTimer?.invalidate()
strongSelf.isReadyUpdated()
}
}
self.isReadyTimer = SwiftSignalKit.Timer(timeout: 3.0, repeat: false, completion: { [weak self] in self.isReadyTimer = SwiftSignalKit.Timer(timeout: 3.0, repeat: false, completion: { [weak self] in
guard let strongSelf = self else { guard let strongSelf = self else {
@ -49,89 +68,14 @@ private final class IncomingVideoNode: ASDisplayNode {
} }
}, queue: .mainQueue()) }, queue: .mainQueue())
self.isReadyTimer?.start() self.isReadyTimer?.start()
videoView.setOnFirstFrameReceived { [weak self] in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
}
if !strongSelf.isReady {
strongSelf.isReady = true
strongSelf.isReadyTimer?.invalidate()
strongSelf.isReadyUpdated()
}
}
}
} }
deinit { deinit {
self.isReadyTimer?.invalidate() self.isReadyTimer?.invalidate()
} }
func updateLayout(size: CGSize) {
self.videoView.view.frame = CGRect(origin: CGPoint(), size: size)
}
func updateIsBlurred(isBlurred: Bool) {
if self.isBlurred == isBlurred {
return
}
self.isBlurred = isBlurred
if isBlurred {
if self.effectView == nil {
let effectView = UIVisualEffectView()
self.effectView = effectView
effectView.frame = self.videoView.view.frame
self.view.addSubview(effectView)
}
UIView.animate(withDuration: 0.3, animations: {
self.effectView?.effect = UIBlurEffect(style: .dark)
})
} else if let effectView = self.effectView {
UIView.animate(withDuration: 0.3, animations: {
effectView.effect = nil
})
}
}
}
private final class OutgoingVideoNode: ASDisplayNode {
private let videoTransformContainer: ASDisplayNode
private let videoView: PresentationCallVideoView
private let buttonNode: HighlightTrackingButtonNode
private var effectView: UIVisualEffectView?
private var isBlurred: Bool = false
private var currentCornerRadius: CGFloat = 0.0
var tapped: (() -> Void)?
init(videoView: PresentationCallVideoView) {
self.videoTransformContainer = ASDisplayNode()
self.videoTransformContainer.clipsToBounds = true
self.videoView = videoView
self.videoView.view.layer.transform = CATransform3DMakeScale(-1.0, 1.0, 1.0)
self.buttonNode = HighlightTrackingButtonNode()
super.init()
self.videoTransformContainer.view.addSubview(self.videoView.view)
self.addSubnode(self.videoTransformContainer)
//self.addSubnode(self.buttonNode)
self.buttonNode.addTarget(self, action: #selector(self.buttonPressed), forControlEvents: .touchUpInside)
}
@objc func buttonPressed() {
self.tapped?()
}
func updateLayout(size: CGSize, cornerRadius: CGFloat, transition: ContainedViewLayoutTransition) { func updateLayout(size: CGSize, cornerRadius: CGFloat, transition: ContainedViewLayoutTransition) {
let videoFrame = CGRect(origin: CGPoint(), size: size) let videoFrame = CGRect(origin: CGPoint(), size: size)
self.buttonNode.frame = videoFrame
self.currentCornerRadius = cornerRadius self.currentCornerRadius = cornerRadius
let previousVideoFrame = self.videoTransformContainer.frame let previousVideoFrame = self.videoTransformContainer.frame
@ -168,8 +112,11 @@ private final class OutgoingVideoNode: ASDisplayNode {
self.effectView?.effect = UIBlurEffect(style: .dark) self.effectView?.effect = UIBlurEffect(style: .dark)
}) })
} else if let effectView = self.effectView { } else if let effectView = self.effectView {
self.effectView = nil
UIView.animate(withDuration: 0.3, animations: { UIView.animate(withDuration: 0.3, animations: {
effectView.effect = nil effectView.effect = nil
}, completion: { [weak effectView] _ in
effectView?.removeFromSuperview()
}) })
} }
} }
@ -200,11 +147,16 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
private let imageNode: TransformImageNode private let imageNode: TransformImageNode
private let dimNode: ASDisplayNode private let dimNode: ASDisplayNode
private var incomingVideoNode: IncomingVideoNode?
private var incomingVideoNodeValue: CallVideoNode?
private var incomingVideoViewRequested: Bool = false private var incomingVideoViewRequested: Bool = false
private var outgoingVideoNode: OutgoingVideoNode? private var outgoingVideoNodeValue: CallVideoNode?
private var outgoingVideoViewRequested: Bool = false private var outgoingVideoViewRequested: Bool = false
private var outgoingVideoExplicitelyFullscreen: Bool = false
private var expandedVideoNode: CallVideoNode?
private var minimizedVideoNode: CallVideoNode?
private var disableAnimationForExpandedVideoOnce: Bool = false
private var outgoingVideoNodeCorner: VideoNodeCorner = .bottomRight private var outgoingVideoNodeCorner: VideoNodeCorner = .bottomRight
private let backButtonArrowNode: ASImageNode private let backButtonArrowNode: ASImageNode
private let backButtonNode: HighlightableButtonNode private let backButtonNode: HighlightableButtonNode
@ -352,13 +304,17 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
guard let strongSelf = self else { guard let strongSelf = self else {
return return
} }
strongSelf.isVideoPaused = !strongSelf.isVideoPaused if strongSelf.outgoingVideoNodeValue == nil {
strongSelf.outgoingVideoNode?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused) strongSelf.call.requestVideo()
strongSelf.buttonsNode.isCameraPaused = strongSelf.isVideoPaused } else {
strongSelf.setIsVideoPaused?(strongSelf.isVideoPaused) strongSelf.isVideoPaused = !strongSelf.isVideoPaused
strongSelf.outgoingVideoNodeValue?.updateIsBlurred(isBlurred: strongSelf.isVideoPaused)
if let (layout, navigationBarHeight) = strongSelf.validLayout { strongSelf.buttonsNode.isCameraPaused = strongSelf.isVideoPaused
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut)) strongSelf.setIsVideoPaused?(strongSelf.isVideoPaused)
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
} }
} }
@ -432,7 +388,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
return return
} }
if let incomingVideoView = incomingVideoView { if let incomingVideoView = incomingVideoView {
let incomingVideoNode = IncomingVideoNode(videoView: incomingVideoView, isReadyUpdated: { let incomingVideoNode = CallVideoNode(videoView: incomingVideoView, isReadyUpdated: {
guard let strongSelf = self else { guard let strongSelf = self else {
return return
} }
@ -440,7 +396,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring)) strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring))
} }
}) })
strongSelf.incomingVideoNode = incomingVideoNode strongSelf.incomingVideoNodeValue = incomingVideoNode
strongSelf.expandedVideoNode = incomingVideoNode
strongSelf.containerNode.insertSubnode(incomingVideoNode, aboveSubnode: strongSelf.dimNode) strongSelf.containerNode.insertSubnode(incomingVideoNode, aboveSubnode: strongSelf.dimNode)
if let (layout, navigationBarHeight) = strongSelf.validLayout { if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring)) strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.5, curve: .spring))
@ -453,7 +410,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
} }
switch callState.videoState { switch callState.videoState {
case .active, .activeOutgoing: case .active, .outgoingRequested, .incomingRequested:
if !self.outgoingVideoViewRequested { if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true self.outgoingVideoViewRequested = true
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
@ -471,25 +428,17 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
strongSelf.setCurrentAudioOutput?(.speaker) strongSelf.setCurrentAudioOutput?(.speaker)
} }
} }
let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView) let outgoingVideoNode = CallVideoNode(videoView: outgoingVideoView, isReadyUpdated: {})
strongSelf.outgoingVideoNode = outgoingVideoNode strongSelf.outgoingVideoNodeValue = outgoingVideoNode
if let incomingVideoNode = strongSelf.incomingVideoNode { strongSelf.minimizedVideoNode = outgoingVideoNode
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode) if let expandedVideoNode = strongSelf.expandedVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: expandedVideoNode)
} else { } else {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode) strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
} }
if let (layout, navigationBarHeight) = strongSelf.validLayout { if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring)) strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring))
} }
/*outgoingVideoNode.tapped = {
guard let strongSelf = self else {
return
}
strongSelf.outgoingVideoExplicitelyFullscreen = !strongSelf.outgoingVideoExplicitelyFullscreen
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.4, curve: .spring))
}
}*/
} }
}) })
} }
@ -497,8 +446,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
break break
} }
if let incomingVideoNode = self.incomingVideoNode { if let incomingVideoNode = self.incomingVideoNodeValue {
incomingVideoNode.isHidden = !incomingVideoNode.isReady
let isActive: Bool let isActive: Bool
switch callState.remoteVideoState { switch callState.remoteVideoState {
case .inactive: case .inactive:
@ -643,12 +591,14 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
switch callState.videoState { switch callState.videoState {
case .notAvailable: case .notAvailable:
mappedVideoState = .notAvailable mappedVideoState = .notAvailable
case .available: case .possible:
mappedVideoState = .available(true) mappedVideoState = .possible
case .outgoingRequested:
mappedVideoState = .outgoingRequested
case .incomingRequested:
mappedVideoState = .incomingRequested
case .active: case .active:
mappedVideoState = .active mappedVideoState = .active
case .activeOutgoing:
mappedVideoState = .active
} }
switch callState.state { switch callState.state {
@ -717,16 +667,24 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
let buttonsHeight: CGFloat = self.buttonsNode.bounds.height let buttonsHeight: CGFloat = self.buttonsNode.bounds.height
var insets = layout.insets(options: .statusBar) var fullInsets = layout.insets(options: .statusBar)
insets.top += 44.0 + 8.0
insets.bottom = buttonsHeight + 27.0 var cleanInsets = fullInsets
insets.left = 20.0 cleanInsets.bottom = layout.intrinsicInsets.bottom
insets.right = 20.0 cleanInsets.left = 20.0
cleanInsets.right = 20.0
fullInsets.top += 44.0 + 8.0
fullInsets.bottom = buttonsHeight + 27.0
fullInsets.left = 20.0
fullInsets.right = 20.0
var insets: UIEdgeInsets = self.isUIHidden ? cleanInsets : fullInsets
let expandedInset: CGFloat = 16.0 let expandedInset: CGFloat = 16.0
insets.top = interpolate(from: expandedInset, to: insets.top, value: uiDisplayTransition) insets.top = interpolate(from: expandedInset, to: insets.top, value: 1.0 - self.pictureInPictureTransitionFraction)
insets.bottom = interpolate(from: expandedInset, to: insets.bottom, value: uiDisplayTransition) insets.bottom = interpolate(from: expandedInset, to: insets.bottom, value: 1.0 - self.pictureInPictureTransitionFraction)
insets.left = interpolate(from: expandedInset, to: insets.left, value: 1.0 - self.pictureInPictureTransitionFraction) insets.left = interpolate(from: expandedInset, to: insets.left, value: 1.0 - self.pictureInPictureTransitionFraction)
insets.right = interpolate(from: expandedInset, to: insets.right, value: 1.0 - self.pictureInPictureTransitionFraction) insets.right = interpolate(from: expandedInset, to: insets.right, value: 1.0 - self.pictureInPictureTransitionFraction)
@ -860,38 +818,30 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha) transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha)
let fullscreenVideoFrame = CGRect(origin: CGPoint(), size: layout.size) let fullscreenVideoFrame = CGRect(origin: CGPoint(), size: layout.size)
let previewVideoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationBarHeight) let previewVideoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationBarHeight)
if let incomingVideoNode = self.incomingVideoNode { if let expandedVideoNode = self.expandedVideoNode {
var incomingVideoTransition = transition var expandedVideoTransition = transition
if incomingVideoNode.frame.isEmpty { if expandedVideoNode.frame.isEmpty || self.disableAnimationForExpandedVideoOnce {
incomingVideoTransition = .immediate expandedVideoTransition = .immediate
self.disableAnimationForExpandedVideoOnce = false
} }
if self.outgoingVideoExplicitelyFullscreen { expandedVideoTransition.updateFrame(node: expandedVideoNode, frame: fullscreenVideoFrame)
incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: previewVideoFrame) expandedVideoNode.updateLayout(size: expandedVideoNode.frame.size, cornerRadius: 0.0, transition: expandedVideoTransition)
} else {
incomingVideoTransition.updateFrame(node: incomingVideoNode, frame: fullscreenVideoFrame)
}
incomingVideoNode.updateLayout(size: incomingVideoNode.frame.size)
} }
if let outgoingVideoNode = self.outgoingVideoNode { if let minimizedVideoNode = self.minimizedVideoNode {
var outgoingVideoTransition = transition var minimizedVideoTransition = transition
if outgoingVideoNode.frame.isEmpty { if minimizedVideoNode.frame.isEmpty {
outgoingVideoTransition = .immediate minimizedVideoTransition = .immediate
} }
if let incomingVideoNode = self.incomingVideoNode, incomingVideoNode.isReady { if let expandedVideoNode = self.expandedVideoNode, expandedVideoNode.isReady {
if self.minimizedVideoDraggingPosition == nil { if self.minimizedVideoDraggingPosition == nil {
if self.outgoingVideoExplicitelyFullscreen { minimizedVideoTransition.updateFrame(node: minimizedVideoNode, frame: previewVideoFrame)
outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: fullscreenVideoFrame) minimizedVideoNode.updateLayout(size: minimizedVideoNode.frame.size, cornerRadius: interpolate(from: 14.0, to: 24.0, value: self.pictureInPictureTransitionFraction), transition: minimizedVideoTransition)
} else {
outgoingVideoTransition.updateFrame(node: outgoingVideoNode, frame: previewVideoFrame)
}
outgoingVideoNode.updateLayout(size: outgoingVideoNode.frame.size, cornerRadius: interpolate(from: self.outgoingVideoExplicitelyFullscreen ? 0.0 : 14.0, to: 24.0, value: self.pictureInPictureTransitionFraction), transition: outgoingVideoTransition)
} }
} else { } else {
outgoingVideoNode.frame = fullscreenVideoFrame minimizedVideoNode.frame = fullscreenVideoFrame
outgoingVideoNode.updateLayout(size: layout.size, cornerRadius: 0.0, transition: outgoingVideoTransition) minimizedVideoNode.updateLayout(size: layout.size, cornerRadius: 0.0, transition: minimizedVideoTransition)
} }
} }
@ -949,19 +899,32 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
} else if let _ = self.keyPreviewNode { } else if let _ = self.keyPreviewNode {
self.backPressed() self.backPressed()
} else { } else {
if self.incomingVideoNode != nil || self.outgoingVideoNode != nil { if let expandedVideoNode = self.expandedVideoNode, let minimizedVideoNode = self.minimizedVideoNode {
var updated = false let point = recognizer.location(in: recognizer.view)
if let callState = self.callState { if minimizedVideoNode.frame.contains(point) {
switch callState.state { self.expandedVideoNode = minimizedVideoNode
case .active, .connecting, .reconnecting: self.minimizedVideoNode = expandedVideoNode
self.isUIHidden = !self.isUIHidden if let supernode = expandedVideoNode.supernode {
updated = true supernode.insertSubnode(expandedVideoNode, aboveSubnode: minimizedVideoNode)
default: }
break if let (layout, navigationBarHeight) = self.validLayout {
self.disableAnimationForExpandedVideoOnce = true
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
} else {
var updated = false
if let callState = self.callState {
switch callState.state {
case .active, .connecting, .reconnecting:
self.isUIHidden = !self.isUIHidden
updated = true
default:
break
}
}
if updated, let (layout, navigationBarHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
} }
}
if updated, let (layout, navigationBarHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
} }
} else { } else {
let point = recognizer.location(in: recognizer.view) let point = recognizer.location(in: recognizer.view)
@ -1128,8 +1091,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
switch recognizer.state { switch recognizer.state {
case .began: case .began:
let location = recognizer.location(in: self.view) let location = recognizer.location(in: self.view)
if self.self.pictureInPictureTransitionFraction.isZero, let _ = self.incomingVideoNode, let outgoingVideoNode = self.outgoingVideoNode, outgoingVideoNode.frame.contains(location) { if self.self.pictureInPictureTransitionFraction.isZero, let _ = self.expandedVideoNode, let minimizedVideoNode = self.minimizedVideoNode, minimizedVideoNode.frame.contains(location) {
self.minimizedVideoInitialPosition = outgoingVideoNode.position self.minimizedVideoInitialPosition = minimizedVideoNode.position
} else { } else {
self.minimizedVideoInitialPosition = nil self.minimizedVideoInitialPosition = nil
if !self.pictureInPictureTransitionFraction.isZero { if !self.pictureInPictureTransitionFraction.isZero {
@ -1139,11 +1102,11 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
} }
} }
case .changed: case .changed:
if let outgoingVideoNode = self.outgoingVideoNode, let minimizedVideoInitialPosition = self.minimizedVideoInitialPosition { if let minimizedVideoNode = self.minimizedVideoNode, let minimizedVideoInitialPosition = self.minimizedVideoInitialPosition {
let translation = recognizer.translation(in: self.view) let translation = recognizer.translation(in: self.view)
let minimizedVideoDraggingPosition = CGPoint(x: minimizedVideoInitialPosition.x + translation.x, y: minimizedVideoInitialPosition.y + translation.y) let minimizedVideoDraggingPosition = CGPoint(x: minimizedVideoInitialPosition.x + translation.x, y: minimizedVideoInitialPosition.y + translation.y)
self.minimizedVideoDraggingPosition = minimizedVideoDraggingPosition self.minimizedVideoDraggingPosition = minimizedVideoDraggingPosition
outgoingVideoNode.position = minimizedVideoDraggingPosition minimizedVideoNode.position = minimizedVideoDraggingPosition
} else { } else {
switch self.pictureInPictureGestureState { switch self.pictureInPictureGestureState {
case .none: case .none:
@ -1184,7 +1147,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
} }
} }
case .cancelled, .ended: case .cancelled, .ended:
if let outgoingVideoNode = self.outgoingVideoNode, let _ = self.minimizedVideoInitialPosition, let minimizedVideoDraggingPosition = self.minimizedVideoDraggingPosition { if let minimizedVideoNode = self.minimizedVideoNode, let _ = self.minimizedVideoInitialPosition, let minimizedVideoDraggingPosition = self.minimizedVideoDraggingPosition {
self.minimizedVideoInitialPosition = nil self.minimizedVideoInitialPosition = nil
self.minimizedVideoDraggingPosition = nil self.minimizedVideoDraggingPosition = nil
@ -1192,8 +1155,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.outgoingVideoNodeCorner = self.nodeLocationForPosition(layout: layout, position: minimizedVideoDraggingPosition, velocity: recognizer.velocity(in: self.view)) self.outgoingVideoNodeCorner = self.nodeLocationForPosition(layout: layout, position: minimizedVideoDraggingPosition, velocity: recognizer.velocity(in: self.view))
let videoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationHeight) let videoFrame = self.calculatePreviewVideoRect(layout: layout, navigationHeight: navigationHeight)
outgoingVideoNode.frame = videoFrame minimizedVideoNode.frame = videoFrame
outgoingVideoNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: minimizedVideoDraggingPosition.x - videoFrame.midX, y: minimizedVideoDraggingPosition.y - videoFrame.midY)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: 0.5, delay: 0.0, initialVelocity: 0.0, damping: 110.0, removeOnCompletion: true, additive: true, completion: nil) minimizedVideoNode.layer.animateSpring(from: NSValue(cgPoint: CGPoint(x: minimizedVideoDraggingPosition.x - videoFrame.midX, y: minimizedVideoDraggingPosition.y - videoFrame.midY)), to: NSValue(cgPoint: CGPoint()), keyPath: "position", duration: 0.5, delay: 0.0, initialVelocity: 0.0, damping: 110.0, removeOnCompletion: true, additive: true, completion: nil)
} }
} else { } else {
switch self.pictureInPictureGestureState { switch self.pictureInPictureGestureState {

View File

@ -15,13 +15,7 @@ enum LegacyCallControllerButtonsSpeakerMode {
} }
enum LegacyCallControllerButtonsMode: Equatable { enum LegacyCallControllerButtonsMode: Equatable {
enum VideoState: Equatable { case active(speakerMode: LegacyCallControllerButtonsSpeakerMode)
case notAvailable
case available(Bool)
case active
}
case active(speakerMode: LegacyCallControllerButtonsSpeakerMode, videoState: VideoState)
case incoming case incoming
} }
@ -142,41 +136,27 @@ final class LegacyCallControllerButtonsNode: ASDisplayNode {
for button in [self.muteButton, self.endButton, self.speakerButton, self.swichCameraButton] { for button in [self.muteButton, self.endButton, self.speakerButton, self.swichCameraButton] {
button.alpha = 0.0 button.alpha = 0.0
} }
case let .active(speakerMode, videoState): case let .active(speakerMode):
for button in [self.muteButton] { for button in [self.muteButton] {
if animated && button.alpha.isZero { if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3) button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
} }
button.alpha = 1.0 button.alpha = 1.0
} }
switch videoState {
case .active, .available: for button in [self.swichCameraButton] {
for button in [self.speakerButton] { if animated && !button.alpha.isZero {
if animated && !button.alpha.isZero { button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
}
button.alpha = 0.0
}
for button in [self.swichCameraButton] {
if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
button.alpha = 1.0
}
case .notAvailable:
for button in [self.swichCameraButton] {
if animated && !button.alpha.isZero {
button.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
}
button.alpha = 0.0
}
for button in [self.speakerButton] {
if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
button.alpha = 1.0
} }
button.alpha = 0.0
} }
for button in [self.speakerButton] {
if animated && button.alpha.isZero {
button.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
button.alpha = 1.0
}
var animatingAcceptButton = false var animatingAcceptButton = false
if self.endButton.alpha.isZero { if self.endButton.alpha.isZero {
if animated { if animated {

View File

@ -229,10 +229,6 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
self?.acceptCall?() self?.acceptCall?()
} }
self.buttonsNode.toggleVideo = { [weak self] in
self?.toggleVideo?()
}
self.buttonsNode.rotateCamera = { [weak self] in self.buttonsNode.rotateCamera = { [weak self] in
self?.call.switchVideoCamera() self?.call.switchVideoCamera()
} }
@ -314,35 +310,11 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
} }
}) })
} }
if !self.outgoingVideoViewRequested { default:
self.outgoingVideoViewRequested = true break
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in }
guard let strongSelf = self else { switch callState.videoState {
return case .active, .outgoingRequested:
}
if let outgoingVideoView = outgoingVideoView?.view {
outgoingVideoView.backgroundColor = .black
outgoingVideoView.clipsToBounds = true
strongSelf.setCurrentAudioOutput?(.speaker)
let outgoingVideoNode = OutgoingVideoNode(videoView: outgoingVideoView, switchCamera: {
guard let strongSelf = self else {
return
}
strongSelf.call.switchVideoCamera()
})
strongSelf.outgoingVideoNode = outgoingVideoNode
if let incomingVideoNode = strongSelf.incomingVideoNode {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: incomingVideoNode)
} else {
strongSelf.containerNode.insertSubnode(outgoingVideoNode, aboveSubnode: strongSelf.dimNode)
}
if let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
}
})
}
case .activeOutgoing:
if !self.outgoingVideoViewRequested { if !self.outgoingVideoViewRequested {
self.outgoingVideoViewRequested = true self.outgoingVideoViewRequested = true
self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in self.call.makeOutgoingVideoView(completion: { [weak self] outgoingVideoView in
@ -527,18 +499,7 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
mode = .none mode = .none
} }
} }
let mappedVideoState: LegacyCallControllerButtonsMode.VideoState self.buttonsNode.updateMode(.active(speakerMode: mode))
switch callState.videoState {
case .notAvailable:
mappedVideoState = .notAvailable
case .available:
mappedVideoState = .available(true)
case .active:
mappedVideoState = .active
case .activeOutgoing:
mappedVideoState = .active
}
self.buttonsNode.updateMode(.active(speakerMode: mode, videoState: mappedVideoState))
} }
} }

View File

@ -168,6 +168,7 @@ public final class PresentationCallImpl: PresentationCall {
public let peerId: PeerId public let peerId: PeerId
public let isOutgoing: Bool public let isOutgoing: Bool
public var isVideo: Bool public var isVideo: Bool
public var isVideoPossible: Bool
public let peer: Peer? public let peer: Peer?
private let serializedData: String? private let serializedData: String?
@ -236,7 +237,7 @@ public final class PresentationCallImpl: PresentationCall {
private var videoCapturer: OngoingCallVideoCapturer? private var videoCapturer: OngoingCallVideoCapturer?
init(account: Account, audioSession: ManagedAudioSession, callSessionManager: CallSessionManager, callKitIntegration: CallKitIntegration?, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), initialState: CallSession?, internalId: CallSessionInternalId, peerId: PeerId, isOutgoing: Bool, peer: Peer?, proxyServer: ProxyServerSettings?, auxiliaryServers: [CallAuxiliaryServer], currentNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, startWithVideo: Bool) { init(account: Account, audioSession: ManagedAudioSession, callSessionManager: CallSessionManager, callKitIntegration: CallKitIntegration?, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, getDeviceAccessData: @escaping () -> (presentationData: PresentationData, present: (ViewController, Any?) -> Void, openSettings: () -> Void), initialState: CallSession?, internalId: CallSessionInternalId, peerId: PeerId, isOutgoing: Bool, peer: Peer?, proxyServer: ProxyServerSettings?, auxiliaryServers: [CallAuxiliaryServer], currentNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, startWithVideo: Bool, isVideoPossible: Bool) {
self.account = account self.account = account
self.audioSession = audioSession self.audioSession = audioSession
self.callSessionManager = callSessionManager self.callSessionManager = callSessionManager
@ -261,11 +262,12 @@ public final class PresentationCallImpl: PresentationCall {
self.peerId = peerId self.peerId = peerId
self.isOutgoing = isOutgoing self.isOutgoing = isOutgoing
self.isVideo = initialState?.type == .video self.isVideo = initialState?.type == .video
self.isVideoPossible = isVideoPossible
self.peer = peer self.peer = peer
self.isVideo = startWithVideo self.isVideo = startWithVideo
if self.isVideo { if self.isVideo {
self.videoCapturer = OngoingCallVideoCapturer() self.videoCapturer = OngoingCallVideoCapturer()
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .activeOutgoing, remoteVideoState: .inactive)) self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .outgoingRequested, remoteVideoState: .inactive))
} else { } else {
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .notAvailable, remoteVideoState: .inactive)) self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .notAvailable, remoteVideoState: .inactive))
} }
@ -436,12 +438,14 @@ public final class PresentationCallImpl: PresentationCall {
switch callContextState.videoState { switch callContextState.videoState {
case .notAvailable: case .notAvailable:
mappedVideoState = .notAvailable mappedVideoState = .notAvailable
case let .available(enabled): case .possible:
mappedVideoState = .available(enabled) mappedVideoState = .possible
case .outgoingRequested:
mappedVideoState = .outgoingRequested
case .incomingRequested:
mappedVideoState = .incomingRequested
case .active: case .active:
mappedVideoState = .active mappedVideoState = .active
case .activeOutgoing:
mappedVideoState = .activeOutgoing
} }
switch callContextState.remoteVideoState { switch callContextState.remoteVideoState {
case .inactive: case .inactive:
@ -451,7 +455,9 @@ public final class PresentationCallImpl: PresentationCall {
} }
} else { } else {
if self.isVideo { if self.isVideo {
mappedVideoState = .activeOutgoing mappedVideoState = .outgoingRequested
} else if self.isVideoPossible {
mappedVideoState = .possible
} else { } else {
mappedVideoState = .notAvailable mappedVideoState = .notAvailable
} }
@ -729,8 +735,12 @@ public final class PresentationCallImpl: PresentationCall {
self.ongoingContext?.setIsMuted(self.isMutedValue) self.ongoingContext?.setIsMuted(self.isMutedValue)
} }
public func setEnableVideo(_ value: Bool) { public func requestVideo() {
self.ongoingContext?.setEnableVideo(value) if self.videoCapturer == nil {
let videoCapturer = OngoingCallVideoCapturer()
self.videoCapturer = videoCapturer
self.ongoingContext?.requestVideo(videoCapturer)
}
} }
public func setOutgoingVideoIsPaused(_ isPaused: Bool) { public func setOutgoingVideoIsPaused(_ isPaused: Bool) {

View File

@ -82,6 +82,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
private let accountManager: AccountManager private let accountManager: AccountManager
private let audioSession: ManagedAudioSession private let audioSession: ManagedAudioSession
private let callKitIntegration: CallKitIntegration? private let callKitIntegration: CallKitIntegration?
private var isVideoPossible: Bool
private var currentCallValue: PresentationCallImpl? private var currentCallValue: PresentationCallImpl?
private var currentCall: PresentationCallImpl? { private var currentCall: PresentationCallImpl? {
@ -124,6 +125,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
self.getDeviceAccessData = getDeviceAccessData self.getDeviceAccessData = getDeviceAccessData
self.accountManager = accountManager self.accountManager = accountManager
self.audioSession = audioSession self.audioSession = audioSession
self.isVideoPossible = enableVideoCalls
self.isMediaPlaying = isMediaPlaying self.isMediaPlaying = isMediaPlaying
self.resumeMediaPlayback = resumeMediaPlayback self.resumeMediaPlayback = resumeMediaPlayback
@ -212,7 +214,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
startCallImpl = { [weak self] account, uuid, handle, isVideo in startCallImpl = { [weak self] account, uuid, handle, isVideo in
if let strongSelf = self, let userId = Int32(handle) { if let strongSelf = self, let userId = Int32(handle) {
return strongSelf.startCall(account: account, peerId: PeerId(namespace: Namespaces.Peer.CloudUser, id: userId), isVideo: isVideo, internalId: uuid) return strongSelf.startCall(account: account, peerId: PeerId(namespace: Namespaces.Peer.CloudUser, id: userId), isVideo: isVideo, isVideoPossible: strongSelf.isVideoPossible, internalId: uuid)
|> take(1) |> take(1)
|> map { result -> Bool in |> map { result -> Bool in
return result return result
@ -292,7 +294,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings
let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue
let call = PresentationCallImpl(account: firstState.0, audioSession: strongSelf.audioSession, callSessionManager: firstState.0.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: firstState.2.id, peerId: firstState.2.peerId, isOutgoing: false, peer: firstState.1, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: firstState.4, updatedNetworkType: firstState.0.networkType, startWithVideo: firstState.2.isVideo) let call = PresentationCallImpl(account: firstState.0, audioSession: strongSelf.audioSession, callSessionManager: firstState.0.callSessionManager, callKitIntegration: enableCallKit ? callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings) : nil, serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: firstState.2.id, peerId: firstState.2.peerId, isOutgoing: false, peer: firstState.1, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: firstState.4, updatedNetworkType: firstState.0.networkType, startWithVideo: firstState.2.isVideo, isVideoPossible: strongSelf.isVideoPossible)
strongSelf.updateCurrentCall(call) strongSelf.updateCurrentCall(call)
strongSelf.currentCallPromise.set(.single(call)) strongSelf.currentCallPromise.set(.single(call))
strongSelf.hasActiveCallsPromise.set(true) strongSelf.hasActiveCallsPromise.set(true)
@ -318,6 +320,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
} }
public func requestCall(account: Account, peerId: PeerId, isVideo: Bool, endCurrentIfAny: Bool) -> RequestCallResult { public func requestCall(account: Account, peerId: PeerId, isVideo: Bool, endCurrentIfAny: Bool) -> RequestCallResult {
let isVideoPossible = self.isVideoPossible
if let call = self.currentCall, !endCurrentIfAny { if let call = self.currentCall, !endCurrentIfAny {
return .alreadyInProgress(call.peerId) return .alreadyInProgress(call.peerId)
} }
@ -382,7 +385,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
guard let strongSelf = self else { guard let strongSelf = self else {
return return
} }
let _ = strongSelf.startCall(account: account, peerId: peerId, isVideo: isVideo).start() let _ = strongSelf.startCall(account: account, peerId: peerId, isVideo: isVideo, isVideoPossible: isVideoPossible).start()
} }
if let currentCall = self.currentCall { if let currentCall = self.currentCall {
self.startCallDisposable.set((currentCall.hangUp() self.startCallDisposable.set((currentCall.hangUp()
@ -396,7 +399,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
return .requested return .requested
} }
private func startCall(account: Account, peerId: PeerId, isVideo: Bool, internalId: CallSessionInternalId = CallSessionInternalId()) -> Signal<Bool, NoError> { private func startCall(account: Account, peerId: PeerId, isVideo: Bool, isVideoPossible: Bool, internalId: CallSessionInternalId = CallSessionInternalId()) -> Signal<Bool, NoError> {
let (presentationData, present, openSettings) = self.getDeviceAccessData() let (presentationData, present, openSettings) = self.getDeviceAccessData()
let accessEnabledSignal: Signal<Bool, NoError> = Signal { subscriber in let accessEnabledSignal: Signal<Bool, NoError> = Signal { subscriber in
@ -445,7 +448,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings let autodownloadSettings = sharedData.entries[SharedDataKeys.autodownloadSettings] as? AutodownloadSettings ?? .defaultSettings
let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue let appConfiguration = preferences.values[PreferencesKeys.appConfiguration] as? AppConfiguration ?? AppConfiguration.defaultValue
let call = PresentationCallImpl(account: account, audioSession: strongSelf.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings), serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: internalId, peerId: peerId, isOutgoing: true, peer: nil, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: currentNetworkType, updatedNetworkType: account.networkType, startWithVideo: isVideo) let call = PresentationCallImpl(account: account, audioSession: strongSelf.audioSession, callSessionManager: account.callSessionManager, callKitIntegration: callKitIntegrationIfEnabled(strongSelf.callKitIntegration, settings: strongSelf.callSettings), serializedData: configuration.serializedData, dataSaving: effectiveDataSaving(for: strongSelf.callSettings, autodownloadSettings: autodownloadSettings), derivedState: derivedState, getDeviceAccessData: strongSelf.getDeviceAccessData, initialState: nil, internalId: internalId, peerId: peerId, isOutgoing: true, peer: nil, proxyServer: strongSelf.proxyServer, auxiliaryServers: auxiliaryServers(appConfiguration: appConfiguration), currentNetworkType: currentNetworkType, updatedNetworkType: account.networkType, startWithVideo: isVideo, isVideoPossible: isVideoPossible)
strongSelf.updateCurrentCall(call) strongSelf.updateCurrentCall(call)
strongSelf.currentCallPromise.set(.single(call)) strongSelf.currentCallPromise.set(.single(call))
strongSelf.hasActiveCallsPromise.set(true) strongSelf.hasActiveCallsPromise.set(true)

View File

@ -68,12 +68,13 @@ private struct ChatControllerNodeDerivedLayoutState {
private final class ChatEmbeddedTitleContentNode: ASDisplayNode { private final class ChatEmbeddedTitleContentNode: ASDisplayNode {
private let context: AccountContext private let context: AccountContext
private let backgroundNode: ASDisplayNode private let backgroundNode: ASDisplayNode
private let statusBarBackgroundNode: ASDisplayNode
private let videoNode: OverlayUniversalVideoNode private let videoNode: OverlayUniversalVideoNode
private let disableInternalAnimationIn: Bool private let disableInternalAnimationIn: Bool
private let isUIHiddenUpdated: () -> Void private let isUIHiddenUpdated: () -> Void
private let unembedWhenPortrait: (OverlayMediaItemNode) -> Bool private let unembedWhenPortrait: (OverlayMediaItemNode) -> Bool
private var validLayout: (CGSize, CGFloat, CGFloat)? private var validLayout: (CGSize, CGFloat, CGFloat, CGFloat)?
private let dismissed: () -> Void private let dismissed: () -> Void
private let interactiveExtensionUpdated: (ContainedViewLayoutTransition) -> Void private let interactiveExtensionUpdated: (ContainedViewLayoutTransition) -> Void
@ -83,6 +84,8 @@ private final class ChatEmbeddedTitleContentNode: ASDisplayNode {
private(set) var isUIHidden: Bool = false private(set) var isUIHidden: Bool = false
var unembedOnLeave: Bool = true
init(context: AccountContext, videoNode: OverlayUniversalVideoNode, disableInternalAnimationIn: Bool, interactiveExtensionUpdated: @escaping (ContainedViewLayoutTransition) -> Void, dismissed: @escaping () -> Void, isUIHiddenUpdated: @escaping () -> Void, unembedWhenPortrait: @escaping (OverlayMediaItemNode) -> Bool) { init(context: AccountContext, videoNode: OverlayUniversalVideoNode, disableInternalAnimationIn: Bool, interactiveExtensionUpdated: @escaping (ContainedViewLayoutTransition) -> Void, dismissed: @escaping () -> Void, isUIHiddenUpdated: @escaping () -> Void, unembedWhenPortrait: @escaping (OverlayMediaItemNode) -> Bool) {
self.dismissed = dismissed self.dismissed = dismissed
self.interactiveExtensionUpdated = interactiveExtensionUpdated self.interactiveExtensionUpdated = interactiveExtensionUpdated
@ -95,6 +98,9 @@ private final class ChatEmbeddedTitleContentNode: ASDisplayNode {
self.backgroundNode = ASDisplayNode() self.backgroundNode = ASDisplayNode()
self.backgroundNode.backgroundColor = .black self.backgroundNode.backgroundColor = .black
self.statusBarBackgroundNode = ASDisplayNode()
self.statusBarBackgroundNode.backgroundColor = .black
self.videoNode = videoNode self.videoNode = videoNode
super.init() super.init()
@ -102,6 +108,7 @@ private final class ChatEmbeddedTitleContentNode: ASDisplayNode {
self.clipsToBounds = true self.clipsToBounds = true
self.addSubnode(self.backgroundNode) self.addSubnode(self.backgroundNode)
self.addSubnode(self.statusBarBackgroundNode)
self.view.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:)))) self.view.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:))))
@ -151,12 +158,13 @@ private final class ChatEmbeddedTitleContentNode: ASDisplayNode {
return self.videoNode.content.dimensions.aspectFilled(CGSize(width: width, height: 16.0)).height return self.videoNode.content.dimensions.aspectFilled(CGSize(width: width, height: 16.0)).height
} }
func updateLayout(size: CGSize, topInset: CGFloat, interactiveExtension: CGFloat, transition: ContainedViewLayoutTransition, transitionSurface: ASDisplayNode?, navigationBar: NavigationBar?) { func updateLayout(size: CGSize, actualHeight: CGFloat, topInset: CGFloat, interactiveExtension: CGFloat, transition: ContainedViewLayoutTransition, transitionSurface: ASDisplayNode?, navigationBar: NavigationBar?) {
let isFirstTime = self.validLayout == nil let isFirstTime = self.validLayout == nil
self.validLayout = (size, topInset, interactiveExtension) self.validLayout = (size, actualHeight, topInset, interactiveExtension)
let videoSize = CGSize(width: size.width, height: actualHeight)
let videoFrame = CGRect(origin: CGPoint(x: 0.0, y: topInset + interactiveExtension), size: CGSize(width: size.width, height: size.height - topInset - interactiveExtension)) let videoFrame = CGRect(origin: CGPoint(x: 0.0, y: topInset + interactiveExtension + floor((size.height - actualHeight) / 2.0)), size: CGSize(width: videoSize.width, height: videoSize.height - topInset - interactiveExtension))
if isFirstTime, let transitionSurface = transitionSurface { if isFirstTime, let transitionSurface = transitionSurface {
let sourceFrame = self.videoNode.view.convert(self.videoNode.bounds, to: transitionSurface.view) let sourceFrame = self.videoNode.view.convert(self.videoNode.bounds, to: transitionSurface.view)
@ -204,16 +212,16 @@ private final class ChatEmbeddedTitleContentNode: ASDisplayNode {
self.videoNode.updateLayout(targetFrame.size, transition: nodeTransition) self.videoNode.updateLayout(targetFrame.size, transition: nodeTransition)
self.videoNode.frame = targetFrame self.videoNode.frame = targetFrame
if self.disableInternalAnimationIn { if self.disableInternalAnimationIn {
self.addSubnode(self.videoNode) self.insertSubnode(self.videoNode, belowSubnode: self.statusBarBackgroundNode)
} else { } else {
self.videoNode.layer.animateFrame(from: sourceFrame, to: targetFrame, duration: 0.25, timingFunction: kCAMediaTimingFunctionSpring, completion: { [weak self] _ in self.videoNode.layer.animateFrame(from: sourceFrame, to: targetFrame, duration: 0.25, timingFunction: kCAMediaTimingFunctionSpring, completion: { [weak self] _ in
guard let strongSelf = self else { guard let strongSelf = self else {
return return
} }
navigationBarContainer?.removeFromSuperview() navigationBarContainer?.removeFromSuperview()
strongSelf.addSubnode(strongSelf.videoNode) strongSelf.insertSubnode(strongSelf.videoNode, belowSubnode: strongSelf.statusBarBackgroundNode)
if let (size, topInset, interactiveExtension) = strongSelf.validLayout { if let (size, actualHeight, topInset, interactiveExtension) = strongSelf.validLayout {
strongSelf.updateLayout(size: size, topInset: topInset, interactiveExtension: interactiveExtension, transition: .immediate, transitionSurface: nil, navigationBar: nil) strongSelf.updateLayout(size: size, actualHeight: actualHeight, topInset: topInset, interactiveExtension: interactiveExtension, transition: .immediate, transitionSurface: nil, navigationBar: nil)
} }
}) })
self.backgroundNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2) self.backgroundNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
@ -228,6 +236,7 @@ private final class ChatEmbeddedTitleContentNode: ASDisplayNode {
} }
} }
transition.updateFrame(node: self.backgroundNode, frame: CGRect(origin: CGPoint(), size: size)) transition.updateFrame(node: self.backgroundNode, frame: CGRect(origin: CGPoint(), size: size))
transition.updateFrame(node: self.statusBarBackgroundNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: size.width, height: topInset)))
if self.videoNode.supernode == self { if self.videoNode.supernode == self {
self.videoNode.layer.transform = CATransform3DIdentity self.videoNode.layer.transform = CATransform3DIdentity
@ -430,6 +439,7 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
var hasEmbeddedTitleContent: Bool { var hasEmbeddedTitleContent: Bool {
return self.embeddedTitleContentNode != nil return self.embeddedTitleContentNode != nil
} }
private var didProcessExperimentalEmbedUrl: String?
init(context: AccountContext, chatLocation: ChatLocation, subject: ChatControllerSubject?, controllerInteraction: ChatControllerInteraction, chatPresentationInterfaceState: ChatPresentationInterfaceState, automaticMediaDownloadSettings: MediaAutoDownloadSettings, navigationBar: NavigationBar?, controller: ChatControllerImpl?) { init(context: AccountContext, chatLocation: ChatLocation, subject: ChatControllerSubject?, controllerInteraction: ChatControllerInteraction, chatPresentationInterfaceState: ChatPresentationInterfaceState, automaticMediaDownloadSettings: MediaAutoDownloadSettings, navigationBar: NavigationBar?, controller: ChatControllerImpl?) {
self.context = context self.context = context
@ -953,6 +963,64 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
let statusBarHeight = layout.insets(options: [.statusBar]).top let statusBarHeight = layout.insets(options: [.statusBar]).top
func extractExperimentalPlaylistUrl(_ text: String) -> String? {
let prefix = "stream: "
if text.hasPrefix(prefix) {
if let url = URL(string: String(text[text.index(text.startIndex, offsetBy: prefix.count)...])), url.absoluteString.hasSuffix(".m3u8") {
return url.absoluteString
} else {
return nil
}
} else {
return nil
}
}
if let pinnedMessage = self.chatPresentationInterfaceState.pinnedMessage, self.context.sharedContext.immediateExperimentalUISettings.playerEmbedding, self.context.sharedContext.immediateExperimentalUISettings.playlistPlayback, self.embeddedTitleContentNode == nil, let url = extractExperimentalPlaylistUrl(pinnedMessage.text), self.didProcessExperimentalEmbedUrl != url {
self.didProcessExperimentalEmbedUrl = url
let context = self.context
let baseNavigationController = self.controller?.navigationController as? NavigationController
let mediaManager = self.context.sharedContext.mediaManager
var expandImpl: (() -> Void)?
let content = PlatformVideoContent(id: .instantPage(MediaId(namespace: 0, id: 0), MediaId(namespace: 0, id: 0)), content: .url(url), streamVideo: true, loopVideo: false)
let overlayNode = OverlayUniversalVideoNode(postbox: self.context.account.postbox, audioSession: self.context.sharedContext.mediaManager.audioSession, manager: self.context.sharedContext.mediaManager.universalVideoManager, content: content, expand: {
expandImpl?()
}, close: { [weak mediaManager] in
mediaManager?.setOverlayVideoNode(nil)
})
self.embeddedTitleContentNode = ChatEmbeddedTitleContentNode(context: self.context, videoNode: overlayNode, disableInternalAnimationIn: true, interactiveExtensionUpdated: { [weak self] transition in
guard let strongSelf = self else {
return
}
strongSelf.requestLayout(transition)
}, dismissed: { [weak self] in
guard let strongSelf = self else {
return
}
if let embeddedTitleContentNode = strongSelf.embeddedTitleContentNode {
strongSelf.embeddedTitleContentNode = nil
strongSelf.dismissedEmbeddedTitleContentNode = embeddedTitleContentNode
strongSelf.requestLayout(.animated(duration: 0.25, curve: .spring))
strongSelf.updateHasEmbeddedTitleContent?()
}
}, isUIHiddenUpdated: { [weak self] in
self?.updateHasEmbeddedTitleContent?()
}, unembedWhenPortrait: { [weak self] itemNode in
guard let strongSelf = self, let itemNode = itemNode as? OverlayUniversalVideoNode else {
return false
}
strongSelf.unembedWhenPortrait(contentNode: itemNode)
return true
})
self.embeddedTitleContentNode?.unembedOnLeave = false
self.updateHasEmbeddedTitleContent?()
overlayNode.controlPlay()
}
if self.chatPresentationInterfaceState.pinnedMessage == nil {
self.didProcessExperimentalEmbedUrl = nil
}
if let embeddedTitleContentNode = self.embeddedTitleContentNode, embeddedTitleContentNode.supernode != nil { if let embeddedTitleContentNode = self.embeddedTitleContentNode, embeddedTitleContentNode.supernode != nil {
if layout.size.width > layout.size.height { if layout.size.width > layout.size.height {
self.embeddedTitleContentNode = nil self.embeddedTitleContentNode = nil
@ -963,7 +1031,15 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
} }
if let embeddedTitleContentNode = self.embeddedTitleContentNode { if let embeddedTitleContentNode = self.embeddedTitleContentNode {
let embeddedSize = CGSize(width: layout.size.width, height: min(400.0, embeddedTitleContentNode.calculateHeight(width: layout.size.width)) + statusBarHeight + embeddedTitleContentNode.interactiveExtension) let defaultEmbeddedSize = CGSize(width: layout.size.width, height: min(400.0, embeddedTitleContentNode.calculateHeight(width: layout.size.width)) + statusBarHeight + embeddedTitleContentNode.interactiveExtension)
let embeddedSize: CGSize
if let inputHeight = layout.inputHeight, inputHeight > 100.0 {
embeddedSize = CGSize(width: defaultEmbeddedSize.width, height: floor(defaultEmbeddedSize.height * 0.6))
} else {
embeddedSize = defaultEmbeddedSize
}
if embeddedTitleContentNode.supernode == nil { if embeddedTitleContentNode.supernode == nil {
self.insertSubnode(embeddedTitleContentNode, aboveSubnode: self.navigationBarBackroundNode) self.insertSubnode(embeddedTitleContentNode, aboveSubnode: self.navigationBarBackroundNode)
@ -980,10 +1056,10 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
embeddedTitleContentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: layout.size.width, height: previousTopInset)) embeddedTitleContentNode.frame = CGRect(origin: CGPoint(), size: CGSize(width: layout.size.width, height: previousTopInset))
transition.updateFrame(node: embeddedTitleContentNode, frame: CGRect(origin: CGPoint(), size: embeddedSize)) transition.updateFrame(node: embeddedTitleContentNode, frame: CGRect(origin: CGPoint(), size: embeddedSize))
embeddedTitleContentNode.updateLayout(size: embeddedSize, topInset: statusBarHeight, interactiveExtension: embeddedTitleContentNode.interactiveExtension, transition: .immediate, transitionSurface: self, navigationBar: self.navigationBar) embeddedTitleContentNode.updateLayout(size: embeddedSize, actualHeight: defaultEmbeddedSize.height, topInset: statusBarHeight, interactiveExtension: embeddedTitleContentNode.interactiveExtension, transition: .immediate, transitionSurface: self, navigationBar: self.navigationBar)
} else { } else {
transition.updateFrame(node: embeddedTitleContentNode, frame: CGRect(origin: CGPoint(), size: embeddedSize)) transition.updateFrame(node: embeddedTitleContentNode, frame: CGRect(origin: CGPoint(), size: embeddedSize))
embeddedTitleContentNode.updateLayout(size: embeddedSize, topInset: statusBarHeight, interactiveExtension: embeddedTitleContentNode.interactiveExtension, transition: transition, transitionSurface: self, navigationBar: self.navigationBar) embeddedTitleContentNode.updateLayout(size: embeddedSize, actualHeight: defaultEmbeddedSize.height, topInset: statusBarHeight, interactiveExtension: embeddedTitleContentNode.interactiveExtension, transition: transition, transitionSurface: self, navigationBar: self.navigationBar)
} }
insets.top += embeddedSize.height insets.top += embeddedSize.height
@ -2817,7 +2893,7 @@ class ChatControllerNode: ASDisplayNode, UIScrollViewDelegate {
} }
func willNavigateAway() { func willNavigateAway() {
if let embeddedTitleContentNode = self.embeddedTitleContentNode { if let embeddedTitleContentNode = self.embeddedTitleContentNode, embeddedTitleContentNode.unembedOnLeave {
self.embeddedTitleContentNode = nil self.embeddedTitleContentNode = nil
self.dismissedEmbeddedTitleContentNode = embeddedTitleContentNode self.dismissedEmbeddedTitleContentNode = embeddedTitleContentNode
embeddedTitleContentNode.expandIntoPiP() embeddedTitleContentNode.expandIntoPiP()

View File

@ -164,4 +164,8 @@ public final class OverlayUniversalVideoNode: OverlayMediaItemNode {
self.defaultExpand() self.defaultExpand()
} }
} }
public func controlPlay() {
self.videoNode.play()
}
} }

View File

@ -103,9 +103,10 @@ public struct OngoingCallContextState: Equatable {
public enum VideoState: Equatable { public enum VideoState: Equatable {
case notAvailable case notAvailable
case available(Bool) case possible
case outgoingRequested
case incomingRequested
case active case active
case activeOutgoing
} }
public enum RemoteVideoState: Equatable { public enum RemoteVideoState: Equatable {
@ -244,7 +245,7 @@ private func ongoingDataSavingForTypeWebrtc(_ type: VoiceCallDataSaving) -> Ongo
private protocol OngoingCallThreadLocalContextProtocol: class { private protocol OngoingCallThreadLocalContextProtocol: class {
func nativeSetNetworkType(_ type: NetworkType) func nativeSetNetworkType(_ type: NetworkType)
func nativeSetIsMuted(_ value: Bool) func nativeSetIsMuted(_ value: Bool)
func nativeSetVideoEnabled(_ value: Bool) func nativeRequestVideo(_ capturer: OngoingCallVideoCapturer)
func nativeStop(_ completion: @escaping (String?, Int64, Int64, Int64, Int64) -> Void) func nativeStop(_ completion: @escaping (String?, Int64, Int64, Int64, Int64) -> Void)
func nativeDebugInfo() -> String func nativeDebugInfo() -> String
func nativeVersion() -> String func nativeVersion() -> String
@ -272,7 +273,7 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol {
self.setIsMuted(value) self.setIsMuted(value)
} }
func nativeSetVideoEnabled(_ value: Bool) { func nativeRequestVideo(_ capturer: OngoingCallVideoCapturer) {
} }
func nativeSwitchVideoCamera() { func nativeSwitchVideoCamera() {
@ -324,8 +325,8 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
self.setIsMuted(value) self.setIsMuted(value)
} }
func nativeSetVideoEnabled(_ value: Bool) { func nativeRequestVideo(_ capturer: OngoingCallVideoCapturer) {
self.setVideoEnabled(value) self.requestVideo(capturer.impl)
} }
func nativeDebugInfo() -> String { func nativeDebugInfo() -> String {
@ -341,32 +342,6 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
} }
} }
/*extension OngoingCallThreadLocalContextWebrtcCustom: OngoingCallThreadLocalContextProtocol {
func nativeSetNetworkType(_ type: NetworkType) {
self.setNetworkType(ongoingNetworkTypeForTypeWebrtcCustom(type))
}
func nativeStop(_ completion: @escaping (String?, Int64, Int64, Int64, Int64) -> Void) {
self.stop(completion)
}
func nativeSetIsMuted(_ value: Bool) {
self.setIsMuted(value)
}
func nativeDebugInfo() -> String {
return self.debugInfo() ?? ""
}
func nativeVersion() -> String {
return self.version() ?? ""
}
func nativeGetDerivedState() -> Data {
return self.getDerivedState()
}
}*/
private extension OngoingCallContextState.State { private extension OngoingCallContextState.State {
init(_ state: OngoingCallState) { init(_ state: OngoingCallState) {
switch state { switch state {
@ -401,23 +376,6 @@ private extension OngoingCallContextState.State {
} }
} }
/*private extension OngoingCallContextState {
init(_ state: OngoingCallStateWebrtcCustom) {
switch state {
case .initializing:
self = .initializing
case .connected:
self = .connected
case .failed:
self = .failed
case .reconnecting:
self = .reconnecting
default:
self = .failed
}
}
}*/
public protocol OngoingCallContextPresentationCallVideoView: UIView { public protocol OngoingCallContextPresentationCallVideoView: UIView {
func setOnFirstFrameReceived(_ onFirstFrameReceived: (() -> Void)?) func setOnFirstFrameReceived(_ onFirstFrameReceived: (() -> Void)?)
} }
@ -485,7 +443,6 @@ public final class OngoingCallContext {
public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, auxiliaryServers: [AuxiliaryServer], initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String) { public init(account: Account, callSessionManager: CallSessionManager, internalId: CallSessionInternalId, proxyServer: ProxyServerSettings?, auxiliaryServers: [AuxiliaryServer], initialNetworkType: NetworkType, updatedNetworkType: Signal<NetworkType, NoError>, serializedData: String?, dataSaving: VoiceCallDataSaving, derivedState: VoipDerivedState, key: Data, isOutgoing: Bool, video: OngoingCallVideoCapturer?, connections: CallSessionConnectionSet, maxLayer: Int32, version: String, allowP2P: Bool, audioSessionActive: Signal<Bool, NoError>, logName: String) {
let _ = setupLogs let _ = setupLogs
OngoingCallThreadLocalContext.applyServerConfig(serializedData) OngoingCallThreadLocalContext.applyServerConfig(serializedData)
//OngoingCallThreadLocalContextWebrtc.applyServerConfig(serializedData)
self.internalId = internalId self.internalId = internalId
self.account = account self.account = account
@ -502,35 +459,7 @@ public final class OngoingCallContext {
|> take(1) |> take(1)
|> deliverOn(queue)).start(next: { [weak self] _ in |> deliverOn(queue)).start(next: { [weak self] _ in
if let strongSelf = self { if let strongSelf = self {
/*if version == OngoingCallThreadLocalContextWebrtcCustom.version() { if version == OngoingCallThreadLocalContextWebrtc.version() {
var voipProxyServer: VoipProxyServerWebrtcCustom?
if let proxyServer = proxyServer {
switch proxyServer.connection {
case let .socks5(username, password):
voipProxyServer = VoipProxyServerWebrtcCustom(host: proxyServer.host, port: proxyServer.port, username: username, password: password)
case .mtp:
break
}
}
let context = OngoingCallThreadLocalContextWebrtcCustom(queue: OngoingCallThreadLocalContextQueueImpl(queue: queue), proxy: voipProxyServer, networkType: ongoingNetworkTypeForTypeWebrtcCustom(initialNetworkType), dataSaving: ongoingDataSavingForTypeWebrtcCustom(dataSaving), derivedState: derivedState.data, key: key, isOutgoing: isOutgoing, primaryConnection: callConnectionDescriptionWebrtcCustom(connections.primary), alternativeConnections: connections.alternatives.map(callConnectionDescriptionWebrtcCustom), maxLayer: maxLayer, allowP2P: allowP2P, logPath: logPath, sendSignalingData: { [weak callSessionManager] data in
callSessionManager?.sendSignalingData(internalId: internalId, data: data)
})
strongSelf.contextRef = Unmanaged.passRetained(OngoingCallThreadLocalContextHolder(context))
context.stateChanged = { state in
self?.contextState.set(.single(OngoingCallContextState(state)))
}
context.signalBarsChanged = { signalBars in
self?.receptionPromise.set(.single(signalBars))
}
strongSelf.networkTypeDisposable = (updatedNetworkType
|> deliverOn(queue)).start(next: { networkType in
self?.withContext { context in
context.nativeSetNetworkType(networkType)
}
})
} else */if version == OngoingCallThreadLocalContextWebrtc.version() {
var voipProxyServer: VoipProxyServerWebrtc? var voipProxyServer: VoipProxyServerWebrtc?
if let proxyServer = proxyServer { if let proxyServer = proxyServer {
switch proxyServer.connection { switch proxyServer.connection {
@ -574,14 +503,16 @@ public final class OngoingCallContext {
let mappedState = OngoingCallContextState.State(state) let mappedState = OngoingCallContextState.State(state)
let mappedVideoState: OngoingCallContextState.VideoState let mappedVideoState: OngoingCallContextState.VideoState
switch videoState { switch videoState {
case .inactive: case .possible:
mappedVideoState = .available(true) mappedVideoState = .possible
case .incomingRequested:
mappedVideoState = .incomingRequested
case .outgoingRequested:
mappedVideoState = .outgoingRequested
case .active: case .active:
mappedVideoState = .active mappedVideoState = .active
case .activeOutgoing:
mappedVideoState = .activeOutgoing
@unknown default: @unknown default:
mappedVideoState = .available(false) mappedVideoState = .notAvailable
} }
let mappedRemoteVideoState: OngoingCallContextState.RemoteVideoState let mappedRemoteVideoState: OngoingCallContextState.RemoteVideoState
switch remoteVideoState { switch remoteVideoState {
@ -709,9 +640,9 @@ public final class OngoingCallContext {
} }
} }
public func setEnableVideo(_ value: Bool) { public func requestVideo(_ capturer: OngoingCallVideoCapturer) {
self.withContext { context in self.withContext { context in
context.nativeSetVideoEnabled(value) context.nativeRequestVideo(capturer)
} }
} }

View File

@ -37,8 +37,7 @@ Manager::Manager(
bool enableP2P, bool enableP2P,
std::vector<TgVoipRtcServer> const &rtcServers, std::vector<TgVoipRtcServer> const &rtcServers,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture, std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
std::function<void (const TgVoipState &)> stateUpdated, std::function<void (const TgVoipState &, VideoState)> stateUpdated,
std::function<void (bool)> videoStateUpdated,
std::function<void (bool)> remoteVideoIsActiveUpdated, std::function<void (bool)> remoteVideoIsActiveUpdated,
std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted
) : ) :
@ -48,11 +47,14 @@ _enableP2P(enableP2P),
_rtcServers(rtcServers), _rtcServers(rtcServers),
_videoCapture(videoCapture), _videoCapture(videoCapture),
_stateUpdated(stateUpdated), _stateUpdated(stateUpdated),
_videoStateUpdated(videoStateUpdated),
_remoteVideoIsActiveUpdated(remoteVideoIsActiveUpdated), _remoteVideoIsActiveUpdated(remoteVideoIsActiveUpdated),
_signalingDataEmitted(signalingDataEmitted), _signalingDataEmitted(signalingDataEmitted),
_isVideoRequested(false) { _state(TgVoipState::Reconnecting),
_videoState(VideoState::possible) {
assert(_thread->IsCurrent()); assert(_thread->IsCurrent());
if (videoCapture != nullptr) {
_videoState = VideoState::outgoingRequested;
}
} }
Manager::~Manager() { Manager::~Manager() {
@ -60,6 +62,9 @@ Manager::~Manager() {
} }
void Manager::start() { void Manager::start() {
if (_videoCapture != nullptr) {
_videoState = VideoState::active;
}
auto weakThis = std::weak_ptr<Manager>(shared_from_this()); auto weakThis = std::weak_ptr<Manager>(shared_from_this());
_networkManager.reset(new ThreadLocalObject<NetworkManager>(getNetworkThread(), [encryptionKey = _encryptionKey, enableP2P = _enableP2P, rtcServers = _rtcServers, thread = _thread, weakThis, signalingDataEmitted = _signalingDataEmitted]() { _networkManager.reset(new ThreadLocalObject<NetworkManager>(getNetworkThread(), [encryptionKey = _encryptionKey, enableP2P = _enableP2P, rtcServers = _rtcServers, thread = _thread, weakThis, signalingDataEmitted = _signalingDataEmitted]() {
return new NetworkManager( return new NetworkManager(
@ -76,10 +81,17 @@ void Manager::start() {
TgVoipState mappedState; TgVoipState mappedState;
if (state.isReadyToSendData) { if (state.isReadyToSendData) {
mappedState = TgVoipState::Estabilished; mappedState = TgVoipState::Estabilished;
if (!strongThis->_didConnectOnce) {
strongThis->_didConnectOnce = true;
if (strongThis->_videoState == VideoState::outgoingRequested) {
strongThis->_videoState = VideoState::active;
}
}
} else { } else {
mappedState = TgVoipState::Reconnecting; mappedState = TgVoipState::Reconnecting;
} }
strongThis->_stateUpdated(mappedState); strongThis->_state = mappedState;
strongThis->_stateUpdated(mappedState, strongThis->_videoState);
strongThis->_mediaManager->perform([state](MediaManager *mediaManager) { strongThis->_mediaManager->perform([state](MediaManager *mediaManager) {
mediaManager->setIsConnected(state.isReadyToSendData); mediaManager->setIsConnected(state.isReadyToSendData);
@ -154,10 +166,10 @@ void Manager::receiveSignalingData(const std::vector<uint8_t> &data) {
} }
if (mode == 1) { if (mode == 1) {
_mediaManager->perform([](MediaManager *mediaManager) { if (_videoState == VideoState::possible) {
mediaManager->setSendVideo(true); _videoState = VideoState::incomingRequested;
}); _stateUpdated(_state, _videoState);
_videoStateUpdated(true); }
} else if (mode == 2) { } else if (mode == 2) {
} else if (mode == 3) { } else if (mode == 3) {
auto candidatesData = buffer.Slice(1, buffer.size() - 1); auto candidatesData = buffer.Slice(1, buffer.size() - 1);
@ -172,10 +184,10 @@ void Manager::receiveSignalingData(const std::vector<uint8_t> &data) {
} }
} }
void Manager::setSendVideo(bool sendVideo) { void Manager::requestVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture) {
if (sendVideo) { if (videoCapture != nullptr) {
if (!_isVideoRequested) { if (_videoState == VideoState::possible) {
_isVideoRequested = true; _videoState = VideoState::outgoingRequested;
rtc::CopyOnWriteBuffer buffer; rtc::CopyOnWriteBuffer buffer;
uint8_t mode = 1; uint8_t mode = 1;
@ -187,11 +199,11 @@ void Manager::setSendVideo(bool sendVideo) {
_signalingDataEmitted(data); _signalingDataEmitted(data);
_mediaManager->perform([](MediaManager *mediaManager) { /*_mediaManager->perform([](MediaManager *mediaManager) {
mediaManager->setSendVideo(true); mediaManager->setSendVideo(true);
}); });*/
_videoStateUpdated(true); _stateUpdated(_state, _videoState);
} }
} }
} }

View File

@ -12,6 +12,13 @@ namespace TGVOIP_NAMESPACE {
class Manager : public std::enable_shared_from_this<Manager> { class Manager : public std::enable_shared_from_this<Manager> {
public: public:
enum class VideoState {
possible,
outgoingRequested,
incomingRequested,
active
};
static rtc::Thread *getMediaThread(); static rtc::Thread *getMediaThread();
Manager( Manager(
@ -20,8 +27,7 @@ public:
bool enableP2P, bool enableP2P,
std::vector<TgVoipRtcServer> const &rtcServers, std::vector<TgVoipRtcServer> const &rtcServers,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture, std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
std::function<void (const TgVoipState &)> stateUpdated, std::function<void (const TgVoipState &, VideoState)> stateUpdated,
std::function<void (bool)> videoStateUpdated,
std::function<void (bool)> remoteVideoIsActiveUpdated, std::function<void (bool)> remoteVideoIsActiveUpdated,
std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted std::function<void (const std::vector<uint8_t> &)> signalingDataEmitted
); );
@ -29,7 +35,7 @@ public:
void start(); void start();
void receiveSignalingData(const std::vector<uint8_t> &data); void receiveSignalingData(const std::vector<uint8_t> &data);
void setSendVideo(bool sendVideo); void requestVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture);
void setMuteOutgoingAudio(bool mute); void setMuteOutgoingAudio(bool mute);
void notifyIsLocalVideoActive(bool isActive); void notifyIsLocalVideoActive(bool isActive);
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink); void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
@ -40,13 +46,14 @@ private:
bool _enableP2P; bool _enableP2P;
std::vector<TgVoipRtcServer> _rtcServers; std::vector<TgVoipRtcServer> _rtcServers;
std::shared_ptr<TgVoipVideoCaptureInterface> _videoCapture; std::shared_ptr<TgVoipVideoCaptureInterface> _videoCapture;
std::function<void (const TgVoipState &)> _stateUpdated; std::function<void (const TgVoipState &, VideoState)> _stateUpdated;
std::function<void (bool)> _videoStateUpdated;
std::function<void (bool)> _remoteVideoIsActiveUpdated; std::function<void (bool)> _remoteVideoIsActiveUpdated;
std::function<void (const std::vector<uint8_t> &)> _signalingDataEmitted; std::function<void (const std::vector<uint8_t> &)> _signalingDataEmitted;
std::unique_ptr<ThreadLocalObject<NetworkManager>> _networkManager; std::unique_ptr<ThreadLocalObject<NetworkManager>> _networkManager;
std::unique_ptr<ThreadLocalObject<MediaManager>> _mediaManager; std::unique_ptr<ThreadLocalObject<MediaManager>> _mediaManager;
bool _isVideoRequested; TgVoipState _state;
VideoState _videoState;
bool _didConnectOnce;
private: private:
}; };

View File

@ -147,6 +147,13 @@ protected:
TgVoip() = default; TgVoip() = default;
public: public:
enum class VideoState {
possible,
outgoingRequested,
incomingRequested,
active
};
static void setLoggingFunction(std::function<void(std::string const &)> loggingFunction); static void setLoggingFunction(std::function<void(std::string const &)> loggingFunction);
static void setGlobalServerConfig(std::string const &serverConfig); static void setGlobalServerConfig(std::string const &serverConfig);
static int getConnectionMaxLayer(); static int getConnectionMaxLayer();
@ -160,8 +167,7 @@ public:
TgVoipNetworkType initialNetworkType, TgVoipNetworkType initialNetworkType,
TgVoipEncryptionKey const &encryptionKey, TgVoipEncryptionKey const &encryptionKey,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture, std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
std::function<void(TgVoipState)> stateUpdated, std::function<void(TgVoipState, VideoState)> stateUpdated,
std::function<void(bool)> videoStateUpdated,
std::function<void(bool)> remoteVideoIsActiveUpdated, std::function<void(bool)> remoteVideoIsActiveUpdated,
std::function<void(const std::vector<uint8_t> &)> signalingDataEmitted std::function<void(const std::vector<uint8_t> &)> signalingDataEmitted
); );
@ -182,7 +188,7 @@ public:
virtual TgVoipPersistentState getPersistentState() = 0; virtual TgVoipPersistentState getPersistentState() = 0;
virtual void receiveSignalingData(const std::vector<uint8_t> &data) = 0; virtual void receiveSignalingData(const std::vector<uint8_t> &data) = 0;
virtual void setSendVideo(bool sendVideo) = 0; virtual void requestVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture) = 0;
virtual TgVoipFinalState stop() = 0; virtual TgVoipFinalState stop() = 0;
}; };

View File

@ -155,8 +155,7 @@ public:
TgVoipEncryptionKey const &encryptionKey, TgVoipEncryptionKey const &encryptionKey,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture, std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
TgVoipNetworkType initialNetworkType, TgVoipNetworkType initialNetworkType,
std::function<void(TgVoipState)> stateUpdated, std::function<void(TgVoipState, TgVoip::VideoState)> stateUpdated,
std::function<void(bool)> videoStateUpdated,
std::function<void(bool)> remoteVideoIsActiveUpdated, std::function<void(bool)> remoteVideoIsActiveUpdated,
std::function<void(const std::vector<uint8_t> &)> signalingDataEmitted std::function<void(const std::vector<uint8_t> &)> signalingDataEmitted
) : ) :
@ -171,18 +170,30 @@ public:
bool enableP2P = config.enableP2P; bool enableP2P = config.enableP2P;
_manager.reset(new ThreadLocalObject<Manager>(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, stateUpdated, videoStateUpdated, remoteVideoIsActiveUpdated, signalingDataEmitted, rtcServers, videoCapture](){ _manager.reset(new ThreadLocalObject<Manager>(getManagerThread(), [encryptionKey = encryptionKey, enableP2P = enableP2P, stateUpdated, remoteVideoIsActiveUpdated, signalingDataEmitted, rtcServers, videoCapture](){
return new Manager( return new Manager(
getManagerThread(), getManagerThread(),
encryptionKey, encryptionKey,
enableP2P, enableP2P,
rtcServers, rtcServers,
videoCapture, videoCapture,
[stateUpdated](const TgVoipState &state) { [stateUpdated](const TgVoipState &state, Manager::VideoState videoState) {
stateUpdated(state); TgVoip::VideoState mappedVideoState;
}, switch (videoState) {
[videoStateUpdated](bool isActive) { case Manager::VideoState::possible:
videoStateUpdated(isActive); mappedVideoState = TgVoip::VideoState::possible;
break;
case Manager::VideoState::outgoingRequested:
mappedVideoState = TgVoip::VideoState::outgoingRequested;
break;
case Manager::VideoState::incomingRequested:
mappedVideoState = TgVoip::VideoState::incomingRequested;
break;
case Manager::VideoState::active:
mappedVideoState = TgVoip::VideoState::active;
break;
}
stateUpdated(state, mappedVideoState);
}, },
[remoteVideoIsActiveUpdated](bool isActive) { [remoteVideoIsActiveUpdated](bool isActive) {
remoteVideoIsActiveUpdated(isActive); remoteVideoIsActiveUpdated(isActive);
@ -207,11 +218,11 @@ public:
}); });
}; };
void setSendVideo(bool sendVideo) override { virtual void requestVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture) override {
_manager->perform([sendVideo](Manager *manager) { _manager->perform([videoCapture](Manager *manager) {
manager->setSendVideo(sendVideo); manager->requestVideo(videoCapture);
}); });
}; }
void setNetworkType(TgVoipNetworkType networkType) override { void setNetworkType(TgVoipNetworkType networkType) override {
/*message::NetworkType mappedType; /*message::NetworkType mappedType;
@ -307,37 +318,9 @@ public:
return finalState; return finalState;
} }
/*void controllerStateCallback(Controller::State state) {
if (onStateUpdated_) {
TgVoipState mappedState;
switch (state) {
case Controller::State::WaitInit:
mappedState = TgVoipState::WaitInit;
break;
case Controller::State::WaitInitAck:
mappedState = TgVoipState::WaitInitAck;
break;
case Controller::State::Established:
mappedState = TgVoipState::Estabilished;
break;
case Controller::State::Failed:
mappedState = TgVoipState::Failed;
break;
case Controller::State::Reconnecting:
mappedState = TgVoipState::Reconnecting;
break;
default:
mappedState = TgVoipState::Estabilished;
break;
}
onStateUpdated_(mappedState);
}
}*/
private: private:
std::unique_ptr<ThreadLocalObject<Manager>> _manager; std::unique_ptr<ThreadLocalObject<Manager>> _manager;
std::function<void(TgVoipState)> _stateUpdated; std::function<void(TgVoipState, TgVoip::VideoState)> _stateUpdated;
std::function<void(const std::vector<uint8_t> &)> _signalingDataEmitted; std::function<void(const std::vector<uint8_t> &)> _signalingDataEmitted;
LogSinkImpl _logSink; LogSinkImpl _logSink;
@ -371,11 +354,11 @@ void TgVoip::setGlobalServerConfig(const std::string &serverConfig) {
} }
int TgVoip::getConnectionMaxLayer() { int TgVoip::getConnectionMaxLayer() {
return 92; // TODO: retrieve from LayerBase return 92;
} }
std::string TgVoip::getVersion() { std::string TgVoip::getVersion() {
return ""; // TODO: version not known while not released return "";
} }
TgVoip *TgVoip::makeInstance( TgVoip *TgVoip::makeInstance(
@ -387,8 +370,7 @@ TgVoip *TgVoip::makeInstance(
TgVoipNetworkType initialNetworkType, TgVoipNetworkType initialNetworkType,
TgVoipEncryptionKey const &encryptionKey, TgVoipEncryptionKey const &encryptionKey,
std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture, std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture,
std::function<void(TgVoipState)> stateUpdated, std::function<void(TgVoipState, TgVoip::VideoState)> stateUpdated,
std::function<void(bool)> videoStateUpdated,
std::function<void(bool)> remoteVideoIsActiveUpdated, std::function<void(bool)> remoteVideoIsActiveUpdated,
std::function<void(const std::vector<uint8_t> &)> signalingDataEmitted std::function<void(const std::vector<uint8_t> &)> signalingDataEmitted
) { ) {
@ -402,7 +384,6 @@ TgVoip *TgVoip::makeInstance(
videoCapture, videoCapture,
initialNetworkType, initialNetworkType,
stateUpdated, stateUpdated,
videoStateUpdated,
remoteVideoIsActiveUpdated, remoteVideoIsActiveUpdated,
signalingDataEmitted signalingDataEmitted
); );

View File

@ -24,8 +24,9 @@ typedef NS_ENUM(int32_t, OngoingCallStateWebrtc) {
}; };
typedef NS_ENUM(int32_t, OngoingCallVideoStateWebrtc) { typedef NS_ENUM(int32_t, OngoingCallVideoStateWebrtc) {
OngoingCallVideoStateInactive, OngoingCallVideoStatePossible,
OngoingCallVideoStateActiveOutgoing, OngoingCallVideoStateOutgoingRequested,
OngoingCallVideoStateIncomingRequested,
OngoingCallVideoStateActive OngoingCallVideoStateActive
}; };
@ -115,9 +116,9 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
- (NSData * _Nonnull)getDerivedState; - (NSData * _Nonnull)getDerivedState;
- (void)setIsMuted:(bool)isMuted; - (void)setIsMuted:(bool)isMuted;
- (void)setVideoEnabled:(bool)videoEnabled;
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType; - (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType;
- (void)makeIncomingVideoView:(void (^_Nonnull)(OngoingCallThreadLocalContextWebrtcVideoView * _Nullable))completion; - (void)makeIncomingVideoView:(void (^_Nonnull)(OngoingCallThreadLocalContextWebrtcVideoView * _Nullable))completion;
- (void)requestVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer;
- (void)addSignalingData:(NSData * _Nonnull)data; - (void)addSignalingData:(NSData * _Nonnull)data;
@end @end

View File

@ -78,6 +78,7 @@ using namespace TGVOIP_NAMESPACE;
OngoingCallStateWebrtc _state; OngoingCallStateWebrtc _state;
OngoingCallVideoStateWebrtc _videoState; OngoingCallVideoStateWebrtc _videoState;
bool _connectedOnce;
OngoingCallRemoteVideoStateWebrtc _remoteVideoState; OngoingCallRemoteVideoStateWebrtc _remoteVideoState;
OngoingCallThreadLocalContextVideoCapturer *_videoCapturer; OngoingCallThreadLocalContextVideoCapturer *_videoCapturer;
@ -87,7 +88,7 @@ using namespace TGVOIP_NAMESPACE;
void (^_sendSignalingData)(NSData *); void (^_sendSignalingData)(NSData *);
} }
- (void)controllerStateChanged:(TgVoipState)state; - (void)controllerStateChanged:(TgVoipState)state videoState:(OngoingCallVideoStateWebrtc)videoState;
- (void)signalBarsChanged:(int32_t)signalBars; - (void)signalBarsChanged:(int32_t)signalBars;
@end @end
@ -192,10 +193,10 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
_sendSignalingData = [sendSignalingData copy]; _sendSignalingData = [sendSignalingData copy];
_videoCapturer = videoCapturer; _videoCapturer = videoCapturer;
if (videoCapturer != nil) { if (videoCapturer != nil) {
_videoState = OngoingCallVideoStateActiveOutgoing; _videoState = OngoingCallVideoStateOutgoingRequested;
_remoteVideoState = OngoingCallRemoteVideoStateActive; _remoteVideoState = OngoingCallRemoteVideoStateActive;
} else { } else {
_videoState = OngoingCallVideoStateInactive; _videoState = OngoingCallVideoStatePossible;
_remoteVideoState = OngoingCallRemoteVideoStateInactive; _remoteVideoState = OngoingCallRemoteVideoStateInactive;
} }
@ -282,30 +283,27 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
callControllerNetworkTypeForType(networkType), callControllerNetworkTypeForType(networkType),
encryptionKey, encryptionKey,
[_videoCapturer getInterface], [_videoCapturer getInterface],
[weakSelf, queue](TgVoipState state) { [weakSelf, queue](TgVoipState state, TgVoip::VideoState videoState) {
[queue dispatch:^{ [queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf; __strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
if (strongSelf) { if (strongSelf) {
[strongSelf controllerStateChanged:state]; OngoingCallVideoStateWebrtc mappedVideoState;
} switch (videoState) {
}]; case TgVoip::VideoState::possible:
}, mappedVideoState = OngoingCallVideoStatePossible;
[weakSelf, queue](bool isActive) { break;
[queue dispatch:^{ case TgVoip::VideoState::outgoingRequested:
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf; mappedVideoState = OngoingCallVideoStateOutgoingRequested;
if (strongSelf) { break;
OngoingCallVideoStateWebrtc videoState; case TgVoip::VideoState::incomingRequested:
if (isActive) { mappedVideoState = OngoingCallVideoStateIncomingRequested;
videoState = OngoingCallVideoStateActive; break;
} else { case TgVoip::VideoState::active:
videoState = OngoingCallVideoStateInactive; mappedVideoState = OngoingCallVideoStateActive;
} break;
if (strongSelf->_videoState != videoState) {
strongSelf->_videoState = videoState;
if (strongSelf->_stateChanged) {
strongSelf->_stateChanged(strongSelf->_state, strongSelf->_videoState, strongSelf->_remoteVideoState);
}
} }
[strongSelf controllerStateChanged:state videoState:mappedVideoState];
} }
}]; }];
}, },
@ -402,7 +400,7 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
} }
} }
- (void)controllerStateChanged:(TgVoipState)state { - (void)controllerStateChanged:(TgVoipState)state videoState:(OngoingCallVideoStateWebrtc)videoState {
OngoingCallStateWebrtc callState = OngoingCallStateInitializing; OngoingCallStateWebrtc callState = OngoingCallStateInitializing;
switch (state) { switch (state) {
case TgVoipState::Estabilished: case TgVoipState::Estabilished:
@ -418,15 +416,11 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
break; break;
} }
if (callState != _state) { if (_state != callState || _videoState != videoState) {
_state = callState; _state = callState;
_videoState = videoState;
if (_stateChanged) { if (_stateChanged) {
if (_videoState == OngoingCallVideoStateActiveOutgoing) {
if (_state == OngoingCallStateConnected) {
_videoState = OngoingCallVideoStateActive;
}
}
_stateChanged(_state, _videoState, _remoteVideoState); _stateChanged(_state, _videoState, _remoteVideoState);
} }
} }
@ -463,12 +457,6 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
} }
} }
- (void)setVideoEnabled:(bool)videoEnabled {
if (_tgVoip) {
_tgVoip->setSendVideo(videoEnabled);
}
}
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType { - (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType {
if (_networkType != networkType) { if (_networkType != networkType) {
_networkType = networkType; _networkType = networkType;
@ -496,6 +484,13 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
} }
} }
- (void)requestVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer {
if (_tgVoip && _videoCapturer == nil) {
_videoCapturer = videoCapturer;
_tgVoip->requestVideo([_videoCapturer getInterface]);
}
}
@end @end
@implementation OngoingCallThreadLocalContextWebrtcVideoView : UIView @implementation OngoingCallThreadLocalContextWebrtcVideoView : UIView

View File

@ -1,47 +0,0 @@
load("//Config:buck_rule_macros.bzl", "static_library", "glob_map", "glob_sub_map", "merge_maps")
static_library(
name = "TgVoipWebrtcCustom",
srcs = glob([
"Sources/**/*.m",
"Sources/**/*.mm",
"Impl/*.cpp",
]),
has_cpp = True,
headers = merge_maps([
glob_sub_map("PublicHeaders/", [
"PublicHeaders/**/*.h",
]),
glob_sub_map("Impl/", [
"Impl/*.h",
]),
]),
exported_headers = glob([
"PublicHeaders/**/*.h",
]),
compiler_flags = [
"-Ithird-party/submodules/TgVoipWebrtcCustom/PublicHeaders",
"-Ithird-party/webrtc/webrtc-ios/src",
"-Ithird-party/webrtc/webrtc-ios/src/third_party/abseil-cpp",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/base",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/components/renderer/metal",
"-DWEBRTC_IOS",
"-DWEBRTC_MAC",
"-DWEBRTC_POSIX",
"-std=c++14",
],
deps = [
"//third-party/webrtc:webrtc_lib",
],
frameworks = [
"$SDKROOT/System/Library/Frameworks/Foundation.framework",
"$SDKROOT/System/Library/Frameworks/UIKit.framework",
"$SDKROOT/System/Library/Frameworks/AudioToolbox.framework",
"$SDKROOT/System/Library/Frameworks/VideoToolbox.framework",
"$SDKROOT/System/Library/Frameworks/CoreTelephony.framework",
"$SDKROOT/System/Library/Frameworks/CoreMedia.framework",
"$SDKROOT/System/Library/Frameworks/AVFoundation.framework",
"$SDKROOT/System/Library/Frameworks/Metal.framework",
],
)

View File

@ -1,47 +0,0 @@
objc_library(
name = "TgVoipWebrtcCustom",
enable_modules = True,
module_name = "TgVoipWebrtcCustom",
srcs = glob([
"Sources/**/*.m",
"Sources/**/*.mm",
"Sources/**/*.h",
"Sources/**/*.cpp",
"Sources/**/*.h",
]),
hdrs = glob([
"PublicHeaders/**/*.h",
]),
copts = [
"-I{}/Impl".format(package_name()),
"-Ithird-party/webrtc/webrtc-ios/src",
"-Ithird-party/webrtc/webrtc-ios/src/third_party/abseil-cpp",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/base",
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/components/renderer/metal",
"-DWEBRTC_IOS",
"-DWEBRTC_MAC",
"-DWEBRTC_POSIX",
"-std=c++14",
],
includes = [
"PublicHeaders",
],
deps = [
"//third-party/webrtc:webrtc_lib",
"//submodules/MtProtoKit:MtProtoKit",
],
sdk_frameworks = [
"Foundation",
"UIKit",
"AudioToolbox",
"VideoToolbox",
"CoreTelephony",
"CoreMedia",
"AVFoundation",
],
visibility = [
"//visibility:public",
],
)

View File

@ -1,87 +0,0 @@
#ifndef OngoingCallContext_h
#define OngoingCallContext_h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@interface OngoingCallConnectionDescriptionWebrtcCustom : NSObject
@property (nonatomic, readonly) int64_t connectionId;
@property (nonatomic, strong, readonly) NSString * _Nonnull ip;
@property (nonatomic, strong, readonly) NSString * _Nonnull ipv6;
@property (nonatomic, readonly) int32_t port;
@property (nonatomic, strong, readonly) NSData * _Nonnull peerTag;
- (instancetype _Nonnull)initWithConnectionId:(int64_t)connectionId ip:(NSString * _Nonnull)ip ipv6:(NSString * _Nonnull)ipv6 port:(int32_t)port peerTag:(NSData * _Nonnull)peerTag;
@end
typedef NS_ENUM(int32_t, OngoingCallStateWebrtcCustom) {
OngoingCallStateInitializing,
OngoingCallStateConnected,
OngoingCallStateFailed,
OngoingCallStateReconnecting
};
typedef NS_ENUM(int32_t, OngoingCallNetworkTypeWebrtcCustom) {
OngoingCallNetworkTypeWifi,
OngoingCallNetworkTypeCellularGprs,
OngoingCallNetworkTypeCellularEdge,
OngoingCallNetworkTypeCellular3g,
OngoingCallNetworkTypeCellularLte
};
typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtcCustom) {
OngoingCallDataSavingNever,
OngoingCallDataSavingCellular,
OngoingCallDataSavingAlways
};
@protocol OngoingCallThreadLocalContextQueueWebrtcCustom <NSObject>
- (void)dispatch:(void (^ _Nonnull)())f;
- (void)dispatchAfter:(double)seconds block:(void (^ _Nonnull)())f;
- (bool)isCurrent;
@end
@interface VoipProxyServerWebrtcCustom : NSObject
@property (nonatomic, strong, readonly) NSString * _Nonnull host;
@property (nonatomic, readonly) int32_t port;
@property (nonatomic, strong, readonly) NSString * _Nullable username;
@property (nonatomic, strong, readonly) NSString * _Nullable password;
- (instancetype _Nonnull)initWithHost:(NSString * _Nonnull)host port:(int32_t)port username:(NSString * _Nullable)username password:(NSString * _Nullable)password;
@end
@interface OngoingCallThreadLocalContextWebrtcCustom : NSObject
+ (void)setupLoggingFunction:(void (* _Nullable)(NSString * _Nullable))loggingFunction;
+ (void)applyServerConfig:(NSString * _Nullable)data;
+ (int32_t)maxLayer;
+ (NSString * _Nonnull)version;
@property (nonatomic, copy) void (^ _Nullable stateChanged)(OngoingCallStateWebrtcCustom);
@property (nonatomic, copy) void (^ _Nullable signalBarsChanged)(int32_t);
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtcCustom> _Nonnull)queue proxy:(VoipProxyServerWebrtcCustom * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtcCustom)networkType dataSaving:(OngoingCallDataSavingWebrtcCustom)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtcCustom * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtcCustom *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData;
- (void)stop:(void (^_Nullable)(NSString * _Nullable debugLog, int64_t bytesSentWifi, int64_t bytesReceivedWifi, int64_t bytesSentMobile, int64_t bytesReceivedMobile))completion;
- (bool)needRate;
- (NSString * _Nullable)debugInfo;
- (NSString * _Nullable)version;
- (NSData * _Nonnull)getDerivedState;
- (void)receiveSignalingData:(NSData * _Nonnull)data;
- (void)setIsMuted:(bool)isMuted;
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtcCustom)networkType;
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion;
@end
#endif

View File

@ -1,353 +0,0 @@
#import <TgVoip/OngoingCallThreadLocalContext.h>
#import <Foundation/Foundation.h>
#import "api/peerconnection/RTCPeerConnectionFactory.h"
#import "api/peerconnection/RTCSSLAdapter.h"
#import "api/peerconnection/RTCConfiguration.h"
#import "api/peerconnection/RTCIceServer.h"
#import "api/peerconnection/RTCPeerConnection.h"
#import "api/peerconnection/RTCMediaConstraints.h"
#import "api/peerconnection/RTCMediaStreamTrack.h"
#import "api/peerconnection/RTCAudioTrack.h"
#import "api/peerconnection/RTCVideoTrack.h"
#import "api/peerconnection/RTCRtpTransceiver.h"
#import "api/peerconnection/RTCSessionDescription.h"
#import "api/peerconnection/RTCIceCandidate.h"
#import "api/peerconnection/RTCMediaStream.h"
#import "components/video_codec/RTCDefaultVideoDecoderFactory.h"
#import "components/video_codec/RTCDefaultVideoEncoderFactory.h"
#import "components/audio/RTCAudioSession.h"
#import "base/RTCVideoCapturer.h"
#import "api/peerconnection/RTCVideoSource.h"
#import "components/capturer/RTCFileVideoCapturer.h"
#import "components/capturer/RTCCameraVideoCapturer.h"
#import "components/renderer/metal/RTCMTLVideoView.h"
#import "components/renderer/opengl/RTCEAGLVideoView.h"
#import "RtcConnection.h"
static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
static void voipLog(NSString* format, ...) {
va_list args;
va_start(args, format);
NSString *string = [[NSString alloc] initWithFormat:format arguments:args];
va_end(args);
if (InternalVoipLoggingFunction) {
InternalVoipLoggingFunction(string);
}
}
@implementation OngoingCallConnectionDescriptionWebrtcCustom
- (instancetype _Nonnull)initWithConnectionId:(int64_t)connectionId ip:(NSString * _Nonnull)ip ipv6:(NSString * _Nonnull)ipv6 port:(int32_t)port peerTag:(NSData * _Nonnull)peerTag {
self = [super init];
if (self != nil) {
_connectionId = connectionId;
_ip = ip;
_ipv6 = ipv6;
_port = port;
_peerTag = peerTag;
}
return self;
}
@end
@interface OngoingCallThreadLocalContextWebrtcCustom () {
id<OngoingCallThreadLocalContextQueueWebrtcCustom> _queue;
int32_t _contextId;
bool _isOutgoing;
void (^_sendSignalingData)(NSData * _Nonnull);
OngoingCallNetworkTypeWebrtcCustom _networkType;
NSTimeInterval _callReceiveTimeout;
NSTimeInterval _callRingTimeout;
NSTimeInterval _callConnectTimeout;
NSTimeInterval _callPacketTimeout;
OngoingCallStateWebrtcCustom _state;
int32_t _signalBars;
RtcConnection *_connection;
bool _receivedRemoteDescription;
}
@end
@implementation VoipProxyServerWebrtcCustom
- (instancetype _Nonnull)initWithHost:(NSString * _Nonnull)host port:(int32_t)port username:(NSString * _Nullable)username password:(NSString * _Nullable)password {
self = [super init];
if (self != nil) {
_host = host;
_port = port;
_username = username;
_password = password;
}
return self;
}
@end
@implementation OngoingCallThreadLocalContextWebrtcCustom
+ (NSString *)version {
return @"2.8.8";
}
+ (void)setupLoggingFunction:(void (*)(NSString *))loggingFunction {
InternalVoipLoggingFunction = loggingFunction;
}
+ (void)applyServerConfig:(NSString * _Nullable)__unused data {
}
+ (int32_t)maxLayer {
return 80;
}
- (instancetype _Nonnull)initWithQueue:(id<OngoingCallThreadLocalContextQueueWebrtcCustom> _Nonnull)queue proxy:(VoipProxyServerWebrtcCustom * _Nullable)proxy networkType:(OngoingCallNetworkTypeWebrtcCustom)networkType dataSaving:(OngoingCallDataSavingWebrtcCustom)dataSaving derivedState:(NSData * _Nonnull)derivedState key:(NSData * _Nonnull)key isOutgoing:(bool)isOutgoing primaryConnection:(OngoingCallConnectionDescriptionWebrtcCustom * _Nonnull)primaryConnection alternativeConnections:(NSArray<OngoingCallConnectionDescriptionWebrtcCustom *> * _Nonnull)alternativeConnections maxLayer:(int32_t)maxLayer allowP2P:(BOOL)allowP2P logPath:(NSString * _Nonnull)logPath sendSignalingData:(void (^)(NSData * _Nonnull))sendSignalingData {
self = [super init];
if (self != nil) {
_queue = queue;
assert([queue isCurrent]);
_isOutgoing = isOutgoing;
_sendSignalingData = [sendSignalingData copy];
_callReceiveTimeout = 20.0;
_callRingTimeout = 90.0;
_callConnectTimeout = 30.0;
_callPacketTimeout = 10.0;
_networkType = networkType;
_state = OngoingCallStateInitializing;
_signalBars = -1;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
RTCInitializeSSL();
});
[RTCAudioSession sharedInstance].useManualAudio = true;
[RTCAudioSession sharedInstance].isAudioEnabled = true;
__weak OngoingCallThreadLocalContextWebrtcCustom *weakSelf = self;
_connection = [[RtcConnection alloc] initWithDiscoveredIceCandidate:^(NSString *sdp, int mLineIndex, NSString *sdpMid) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf sendCandidateWithSdp:sdp mLineIndex:mLineIndex sdpMid:sdpMid];
}];
} connectionStateChanged:^(bool isConnected) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
if (strongSelf.stateChanged) {
strongSelf.stateChanged(isConnected ? OngoingCallStateConnected : OngoingCallStateInitializing);
}
}];
}];
//RTCMediaConstraints *constraints = [[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil optionalConstraints:@{ @"DtlsSrtpKeyAgreement": kRTCMediaConstraintsValueTrue }];
/*RTCVideoSource *videoSource = [_peerConnectionFactory videoSource];
#if TARGET_OS_SIMULATOR
_videoCapturer = [[RTCFileVideoCapturer alloc] initWithDelegate:videoSource];
#else
_videoCapturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:videoSource];
#endif
_localVideoTrack = [_peerConnectionFactory videoTrackWithSource:videoSource trackId:@"video0"];
[_peerConnection addTrack:_localVideoTrack streamIds:@[streamId]];*/
if (isOutgoing) {
id<OngoingCallThreadLocalContextQueueWebrtcCustom> queue = _queue;
[_connection getOffer:^(NSString *sdp, NSString *type) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf->_connection setLocalDescription:sdp type:type completion:^{
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf tryAdvertising:sdp type:type];
}];
}];
}];
}];
}
}
return self;
}
- (void)dealloc {
assert([_queue isCurrent]);
}
- (void)tryAdvertising:(NSString *)sdp type:(NSString *)type {
if (_receivedRemoteDescription) {
return;
}
[self sendSdp:sdp type:type];
__weak OngoingCallThreadLocalContextWebrtcCustom *weakSelf = self;
[_queue dispatchAfter:1.0 block:^{
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf tryAdvertising:sdp type:type];
}];
}
- (bool)needRate {
return false;
}
- (void)stop:(void (^)(NSString *, int64_t, int64_t, int64_t, int64_t))completion {
[_connection close];
if (completion) {
completion(@"", 0, 0, 0, 0);
}
}
- (NSString *)debugInfo {
NSString *version = [self version];
return [NSString stringWithFormat:@"WebRTC, Version: %@", version];
}
- (NSString *)version {
return [OngoingCallThreadLocalContextWebrtcCustom version];
}
- (NSData * _Nonnull)getDerivedState {
return [NSData data];
}
- (void)sendSdp:(NSString *)sdp type:(NSString *)type {
NSMutableDictionary *json = [[NSMutableDictionary alloc] init];
json[@"messageType"] = @"sessionDescription";
json[@"sdp"] = sdp;
json[@"type"] = type;
NSData *data = [NSJSONSerialization dataWithJSONObject:json options:0 error:nil];
if (data != nil) {
_sendSignalingData(data);
}
}
- (void)sendCandidateWithSdp:(NSString *)sdp mLineIndex:(int)mLineIndex sdpMid:(NSString *)sdpMid {
NSMutableDictionary *json = [[NSMutableDictionary alloc] init];
json[@"messageType"] = @"iceCandidate";
json[@"sdp"] = sdp;
json[@"mLineIndex"] = @(mLineIndex);
if (sdpMid != nil) {
json[@"sdpMid"] = sdpMid;
}
NSData *data = [NSJSONSerialization dataWithJSONObject:json options:0 error:nil];
if (data != nil) {
_sendSignalingData(data);
}
}
- (void)receiveSignalingData:(NSData *)data {
NSDictionary *json = [NSJSONSerialization JSONObjectWithData:data options:0 error:nil];
if (![json isKindOfClass:[NSDictionary class]]) {
return;
}
NSString *messageType = json[@"messageType"];
if (![messageType isKindOfClass:[NSString class]]) {
return;
}
if ([messageType isEqualToString:@"sessionDescription"]) {
NSString *sdp = json[@"sdp"];
if (![sdp isKindOfClass:[NSString class]]) {
return;
}
NSString *typeString = json[@"type"];
if (![typeString isKindOfClass:[NSString class]]) {
return;
}
if (_receivedRemoteDescription) {
return;
}
_receivedRemoteDescription = true;
[_connection setRemoteDescription:sdp type:typeString completion:^{
}];
if (!_isOutgoing) {
__weak OngoingCallThreadLocalContextWebrtcCustom *weakSelf = self;
id<OngoingCallThreadLocalContextQueueWebrtcCustom> queue = _queue;
[_connection getAnswer:^(NSString *sdp, NSString *type) {
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf->_connection setLocalDescription:sdp type:type completion:^{
[queue dispatch:^{
__strong OngoingCallThreadLocalContextWebrtcCustom *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
[strongSelf sendSdp:sdp type:type];
}];
}];
}];
}];
}
} else if ([messageType isEqualToString:@"iceCandidate"]) {
NSString *sdp = json[@"sdp"];
if (![sdp isKindOfClass:[NSString class]]) {
return;
}
NSNumber *mLineIndex = json[@"mLineIndex"];
if (![mLineIndex isKindOfClass:[NSNumber class]]) {
return;
}
NSString *sdpMidString = json[@"sdpMid"];
NSString *sdpMid = nil;
if ([sdpMidString isKindOfClass:[NSString class]]) {
sdpMid = sdpMidString;
}
[_connection addIceCandidateWithSdp:sdp sdpMLineIndex:[mLineIndex intValue] sdpMid:sdpMid];
}
}
- (void)setIsMuted:(bool)isMuted {
[_connection setIsMuted:isMuted];
}
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtcCustom)networkType {
}
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion {
[_connection getRemoteCameraView:completion];
}
@end

View File

@ -1,25 +0,0 @@
#ifndef RTCCONNECTION_H
#define RTCCONNECTION_H
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@interface RtcConnection : NSObject
- (instancetype _Nonnull)initWithDiscoveredIceCandidate:(void (^_Nonnull)(NSString *, int, NSString * _Nonnull))discoveredIceCandidate connectionStateChanged:(void (^_Nonnull)(bool))connectionStateChanged;
- (void)close;
- (void)setIsMuted:(bool)isMuted;
- (void)getOffer:(void (^_Nonnull)(NSString * _Nonnull, NSString * _Nonnull))completion;
- (void)getAnswer:(void (^_Nonnull)(NSString * _Nonnull, NSString * _Nonnull))completion;
- (void)setLocalDescription:(NSString * _Nonnull)serializedDescription type:(NSString * _Nonnull)type completion:(void (^_Nonnull)())completion;
- (void)setRemoteDescription:(NSString * _Nonnull)serializedDescription type:(NSString * _Nonnull)type completion:(void (^_Nonnull)())completion;
- (void)addIceCandidateWithSdp:(NSString * _Nonnull)sdp sdpMLineIndex:(int)sdpMLineIndex sdpMid:(NSString * _Nullable)sdpMid;
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion;
@end
#endif

View File

@ -1,438 +0,0 @@
#import "RtcConnection.h"
#import <UIKit/UIKit.h>
#include <memory>
#include "api/scoped_refptr.h"
#include "api/proxy.h"
#include "api/peer_connection_factory_proxy.h"
#include "rtc_base/thread.h"
#include "api/task_queue/default_task_queue_factory.h"
#include "media/engine/webrtc_media_engine.h"
#include "sdk/objc/native/api/audio_device_module.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h"
#include "sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h"
#include "sdk/objc/native/api/video_encoder_factory.h"
#include "sdk/objc/native/api/video_decoder_factory.h"
#include "api/rtc_event_log/rtc_event_log_factory.h"
#include "sdk/media_constraints.h"
#include "api/peer_connection_interface.h"
#include "sdk/objc/native/src/objc_video_track_source.h"
#include "api/video_track_source_proxy.h"
#include "sdk/objc/api/RTCVideoRendererAdapter.h"
#include "sdk/objc/native/api/video_frame.h"
#include "tg_peer_connection.h"
#include "tg_peer_connection_factory.h"
#include "VideoCameraCapturer.h"
#import "VideoMetalView.h"
class PeerConnectionObserverImpl : public webrtc::PeerConnectionObserver {
private:
void (^_discoveredIceCandidate)(NSString *, int, NSString *);
void (^_connectionStateChanged)(bool);
public:
PeerConnectionObserverImpl(void (^discoveredIceCandidate)(NSString *, int, NSString *), void (^connectionStateChanged)(bool)) {
_discoveredIceCandidate = [discoveredIceCandidate copy];
_connectionStateChanged = [connectionStateChanged copy];
}
virtual ~PeerConnectionObserverImpl() {
_discoveredIceCandidate = nil;
_connectionStateChanged = nil;
}
virtual void OnSignalingChange(webrtc::PeerConnectionInterface::SignalingState new_state) {
bool isConnected = false;
if (new_state == webrtc::PeerConnectionInterface::SignalingState::kStable) {
isConnected = true;
}
_connectionStateChanged(isConnected);
}
virtual void OnAddStream(rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) {
}
virtual void OnRemoveStream(rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) {
}
virtual void OnDataChannel(
rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel) {
}
virtual void OnRenegotiationNeeded() {
}
virtual void OnIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState new_state) {
}
virtual void OnStandardizedIceConnectionChange(webrtc::PeerConnectionInterface::IceConnectionState new_state) {
}
virtual void OnConnectionChange(webrtc::PeerConnectionInterface::PeerConnectionState new_state) {
}
virtual void OnIceGatheringChange(webrtc::PeerConnectionInterface::IceGatheringState new_state) {
}
virtual void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) {
std::string sdp;
candidate->ToString(&sdp);
NSString *sdpString = [NSString stringWithUTF8String:sdp.c_str()];
NSString *sdpMidString = [NSString stringWithUTF8String:candidate->sdp_mid().c_str()];
_discoveredIceCandidate(sdpString, candidate->sdp_mline_index(), sdpMidString);
}
virtual void OnIceCandidateError(const std::string& host_candidate, const std::string& url, int error_code, const std::string& error_text) {
}
virtual void OnIceCandidateError(const std::string& address,
int port,
const std::string& url,
int error_code,
const std::string& error_text) {
}
virtual void OnIceCandidatesRemoved(const std::vector<cricket::Candidate>& candidates) {
}
virtual void OnIceConnectionReceivingChange(bool receiving) {
}
virtual void OnIceSelectedCandidatePairChanged(const cricket::CandidatePairChangeEvent& event) {
}
virtual void OnAddTrack(rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver, const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface>>& streams) {
}
virtual void OnTrack(rtc::scoped_refptr<webrtc::RtpTransceiverInterface> transceiver) {
}
virtual void OnRemoveTrack(rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver) {
}
virtual void OnInterestingUsage(int usage_pattern) {
}
};
class CreateSessionDescriptionObserverImpl : public webrtc::CreateSessionDescriptionObserver {
private:
void (^_completion)(NSString *, NSString *);
public:
CreateSessionDescriptionObserverImpl(void (^completion)(NSString *, NSString *)) {
_completion = [completion copy];
}
~CreateSessionDescriptionObserverImpl() override {
_completion = nil;
}
virtual void OnSuccess(webrtc::SessionDescriptionInterface* desc) override {
if (desc) {
NSString *typeString = [NSString stringWithUTF8String:desc->type().c_str()];
std::string sdp;
desc->ToString(&sdp);
NSString *serializedString = [NSString stringWithUTF8String:sdp.c_str()];
if (_completion && typeString && serializedString) {
_completion(serializedString, typeString);
}
}
_completion = nil;
}
virtual void OnFailure(webrtc::RTCError error) override {
_completion = nil;
}
};
class SetSessionDescriptionObserverImpl : public webrtc::SetSessionDescriptionObserver {
private:
void (^_completion)();
public:
SetSessionDescriptionObserverImpl(void (^completion)()) {
_completion = [completion copy];
}
~SetSessionDescriptionObserverImpl() override {
_completion = nil;
}
virtual void OnSuccess() override {
if (_completion) {
_completion();
}
_completion = nil;
}
virtual void OnFailure(webrtc::RTCError error) override {
_completion = nil;
}
};
@interface RtcConnection () {
void (^_discoveredIceCandidate)(NSString *, int, NSString *);
void (^_connectionStateChanged)(bool);
std::unique_ptr<rtc::Thread> _networkThread;
std::unique_ptr<rtc::Thread> _workerThread;
std::unique_ptr<rtc::Thread> _signalingThread;
rtc::scoped_refptr<webrtc::TgPeerConnectionFactoryInterface> _nativeFactory;
std::unique_ptr<PeerConnectionObserverImpl> _observer;
rtc::scoped_refptr<webrtc::TgPeerConnectionInterface> _peerConnection;
std::unique_ptr<webrtc::MediaConstraints> _nativeConstraints;
bool _hasStartedRtcEventLog;
rtc::scoped_refptr<webrtc::AudioTrackInterface> _localAudioTrack;
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
rtc::scoped_refptr<webrtc::VideoTrackInterface> _localVideoTrack;
VideoCameraCapturer *_videoCapturer;
rtc::scoped_refptr<webrtc::VideoTrackInterface> _remoteVideoTrack;
}
@end
@implementation RtcConnection
- (instancetype)initWithDiscoveredIceCandidate:(void (^)(NSString *, int, NSString *))discoveredIceCandidate connectionStateChanged:(void (^)(bool))connectionStateChanged {
self = [super init];
if (self != nil) {
_discoveredIceCandidate = [discoveredIceCandidate copy];
_connectionStateChanged = [connectionStateChanged copy];
_networkThread = rtc::Thread::CreateWithSocketServer();
_networkThread->SetName("network_thread", _networkThread.get());
bool result = _networkThread->Start();
assert(result);
_workerThread = rtc::Thread::Create();
_workerThread->SetName("worker_thread", _workerThread.get());
result = _workerThread->Start();
assert(result);
_signalingThread = rtc::Thread::Create();
_signalingThread->SetName("signaling_thread", _signalingThread.get());
result = _signalingThread->Start();
assert(result);
webrtc::PeerConnectionFactoryDependencies dependencies;
dependencies.network_thread = _networkThread.get();
dependencies.worker_thread = _workerThread.get();
dependencies.signaling_thread = _signalingThread.get();
dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory();
cricket::MediaEngineDependencies media_deps;
media_deps.adm = webrtc::CreateAudioDeviceModule();
media_deps.task_queue_factory = dependencies.task_queue_factory.get();
media_deps.audio_encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory();
media_deps.audio_decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory();
media_deps.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]);
media_deps.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory([[RTCVideoDecoderFactoryH264 alloc] init]);
media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
dependencies.media_engine = cricket::CreateMediaEngine(std::move(media_deps));
dependencies.call_factory = webrtc::CreateCallFactory();
dependencies.event_log_factory =
std::make_unique<webrtc::RtcEventLogFactory>(dependencies.task_queue_factory.get());
dependencies.network_controller_factory = nil;
dependencies.media_transport_factory = nil;
rtc::scoped_refptr<webrtc::TgPeerConnectionFactory> pc_factory(
new rtc::RefCountedObject<webrtc::TgPeerConnectionFactory>(
std::move(dependencies)));
// Call Initialize synchronously but make sure it is executed on
// |signaling_thread|.
webrtc::MethodCall<webrtc::TgPeerConnectionFactory, bool> call(pc_factory.get(), &webrtc::TgPeerConnectionFactory::Initialize);
result = call.Marshal(RTC_FROM_HERE, pc_factory->signaling_thread());
if (!result) {
return nil;
}
_nativeFactory = webrtc::TgPeerConnectionFactoryProxy::Create(pc_factory->signaling_thread(), pc_factory);
webrtc::PeerConnectionInterface::RTCConfiguration config;
config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
config.continual_gathering_policy = webrtc::PeerConnectionInterface::ContinualGatheringPolicy::GATHER_CONTINUALLY;
webrtc::PeerConnectionInterface::IceServer iceServer;
iceServer.uri = "stun:stun.l.google.com:19302";
/*iceServer.uri = "stun:rrrtest.uksouth.cloudapp.azure.com:3478";
iceServer.username = "user";
iceServer.password = "root";*/
config.servers.push_back(iceServer);
/*webrtc::PeerConnectionInterface::IceServer turnServer;
turnServer.uri = "turn:rrrtest.uksouth.cloudapp.azure.com:3478";
turnServer.username = "user";
turnServer.password = "root";
config.servers.push_back(turnServer);*/
//config.type = webrtc::PeerConnectionInterface::kRelay;
_observer.reset(new PeerConnectionObserverImpl(_discoveredIceCandidate, _connectionStateChanged));
_peerConnection = _nativeFactory->CreatePeerConnection(config, nullptr, nullptr, _observer.get());
assert(_peerConnection != nullptr);
std::vector<std::string> streamIds;
streamIds.push_back("stream");
cricket::AudioOptions options;
rtc::scoped_refptr<webrtc::AudioSourceInterface> audioSource = _nativeFactory->CreateAudioSource(options);
_localAudioTrack = _nativeFactory->CreateAudioTrack("audio0", audioSource);
_peerConnection->AddTrack(_localAudioTrack, streamIds);
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>());
_nativeVideoSource = webrtc::VideoTrackSourceProxy::Create(_signalingThread.get(), _workerThread.get(), objCVideoTrackSource);
_localVideoTrack = _nativeFactory->CreateVideoTrack("video0", _nativeVideoSource);
_peerConnection->AddTrack(_localVideoTrack, streamIds);
[self startLocalVideo];
}
return self;
}
- (void)close {
if (_videoCapturer != nil) {
[_videoCapturer stopCapture];
}
_peerConnection->Close();
}
- (void)startLocalVideo {
#if TARGET_OS_SIMULATOR
return;
#endif
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:_nativeVideoSource];
AVCaptureDevice *frontCamera = nil;
for (AVCaptureDevice *device in [VideoCameraCapturer captureDevices]) {
if (device.position == AVCaptureDevicePositionFront) {
frontCamera = device;
break;
}
}
if (frontCamera == nil) {
return;
}
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:frontCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
return width1 < width2 ? NSOrderedAscending : NSOrderedDescending;
}];
AVCaptureDeviceFormat *bestFormat = nil;
for (AVCaptureDeviceFormat *format in sortedFormats) {
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
if (dimensions.width >= 1000 || dimensions.height >= 1000) {
bestFormat = format;
break;
}
}
if (bestFormat == nil) {
return;
}
AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) {
if (lhs.maxFrameRate < rhs.maxFrameRate) {
return NSOrderedAscending;
} else {
return NSOrderedDescending;
}
}] lastObject];
if (frameRateRange == nil) {
return;
}
[_videoCapturer startCaptureWithDevice:frontCamera format:bestFormat fps:27];
}
- (void)setIsMuted:(bool)isMuted {
_localAudioTrack->set_enabled(!isMuted);
}
- (void)getOffer:(void (^)(NSString *, NSString *))completion {
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
options.offer_to_receive_audio = 1;
options.offer_to_receive_video = 1;
rtc::scoped_refptr<CreateSessionDescriptionObserverImpl> observer(new rtc::RefCountedObject<CreateSessionDescriptionObserverImpl>(completion));
_peerConnection->CreateOffer(observer, options);
}
- (void)getAnswer:(void (^)(NSString *, NSString *))completion {
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
options.offer_to_receive_audio = 1;
options.offer_to_receive_video = 1;
rtc::scoped_refptr<CreateSessionDescriptionObserverImpl> observer(new rtc::RefCountedObject<CreateSessionDescriptionObserverImpl>(completion));
_peerConnection->CreateAnswer(observer, options);
}
- (void)setLocalDescription:(NSString *)serializedDescription type:(NSString *)type completion:(void (^)())completion {
webrtc::SdpParseError error;
webrtc::SessionDescriptionInterface *sessionDescription = webrtc::CreateSessionDescription(type.UTF8String, serializedDescription.UTF8String, &error);
if (sessionDescription != nullptr) {
rtc::scoped_refptr<SetSessionDescriptionObserverImpl> observer(new rtc::RefCountedObject<SetSessionDescriptionObserverImpl>(completion));
_peerConnection->SetLocalDescription(observer, sessionDescription);
}
}
- (void)setRemoteDescription:(NSString *)serializedDescription type:(NSString *)type completion:(void (^)())completion {
webrtc::SdpParseError error;
webrtc::SessionDescriptionInterface *sessionDescription = webrtc::CreateSessionDescription(type.UTF8String, serializedDescription.UTF8String, &error);
if (sessionDescription != nullptr) {
rtc::scoped_refptr<SetSessionDescriptionObserverImpl> observer(new rtc::RefCountedObject<SetSessionDescriptionObserverImpl>(completion));
_peerConnection->SetRemoteDescription(observer, sessionDescription);
}
}
- (void)addIceCandidateWithSdp:(NSString *)sdp sdpMLineIndex:(int)sdpMLineIndex sdpMid:(NSString *)sdpMid {
webrtc::SdpParseError error;
webrtc::IceCandidateInterface *iceCandidate = webrtc::CreateIceCandidate(sdpMid == nil ? "" : sdpMid.UTF8String, sdpMLineIndex, sdp.UTF8String, &error);
if (iceCandidate != nullptr) {
std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate = std::unique_ptr<webrtc::IceCandidateInterface>(iceCandidate);
_peerConnection->AddIceCandidate(std::move(nativeCandidate), [](auto error) {
});
}
}
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion {
if (_remoteVideoTrack == nullptr) {
for (auto &it : _peerConnection->GetTransceivers()) {
if (it->media_type() == cricket::MediaType::MEDIA_TYPE_VIDEO) {
_remoteVideoTrack = static_cast<webrtc::VideoTrackInterface *>(it->receiver()->track().get());
break;
}
}
}
rtc::scoped_refptr<webrtc::VideoTrackInterface> remoteVideoTrack = _remoteVideoTrack;
dispatch_async(dispatch_get_main_queue(), ^{
if (remoteVideoTrack != nullptr) {
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 320.0f, 240.0f)];
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
[remoteRenderer addToTrack:remoteVideoTrack];
completion(remoteRenderer);
}
});
}
@end

View File

@ -1,23 +0,0 @@
#ifndef VIDEOCAMERACAPTURER_H
#define VIDEOCAMERACAPTURER_H
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#include <memory>
#include "api/scoped_refptr.h"
#include "api/media_stream_interface.h"
@interface VideoCameraCapturer : NSObject
+ (NSArray<AVCaptureDevice *> *)captureDevices;
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device;
- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source;
- (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps;
- (void)stopCapture;
@end
#endif

View File

@ -1,459 +0,0 @@
#include "VideoCameraCapturer.h"
#import <AVFoundation/AVFoundation.h>
#import "base/RTCLogging.h"
#import "base/RTCVideoFrameBuffer.h"
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
#import "sdk/objc/native/src/objc_video_track_source.h"
#import "api/video_track_source_proxy.h"
#import "helpers/UIDevice+RTCDevice.h"
#import "helpers/AVCaptureSession+DevicePosition.h"
#import "helpers/RTCDispatcher+Private.h"
#import "base/RTCVideoFrame.h"
static const int64_t kNanosecondsPerSecond = 1000000000;
static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
webrtc::VideoTrackSourceProxy *proxy_source =
static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
return static_cast<webrtc::ObjCVideoTrackSource *>(proxy_source->internal());
}
@interface VideoCameraCapturer () <AVCaptureVideoDataOutputSampleBufferDelegate> {
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
dispatch_queue_t _frameQueue;
AVCaptureDevice *_currentDevice;
BOOL _hasRetriedOnFatalError;
BOOL _isRunning;
BOOL _willBeRunning;
AVCaptureVideoDataOutput *_videoDataOutput;
AVCaptureSession *_captureSession;
FourCharCode _preferredOutputPixelFormat;
FourCharCode _outputPixelFormat;
RTCVideoRotation _rotation;
UIDeviceOrientation _orientation;
}
@end
@implementation VideoCameraCapturer
- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source {
self = [super init];
if (self != nil) {
_source = source;
if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) {
return nil;
}
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
_orientation = UIDeviceOrientationPortrait;
_rotation = RTCVideoRotation_90;
[center addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[center addObserver:self
selector:@selector(handleCaptureSessionInterruption:)
name:AVCaptureSessionWasInterruptedNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionInterruptionEnded:)
name:AVCaptureSessionInterruptionEndedNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleApplicationDidBecomeActive:)
name:UIApplicationDidBecomeActiveNotification
object:[UIApplication sharedApplication]];
[center addObserver:self
selector:@selector(handleCaptureSessionRuntimeError:)
name:AVCaptureSessionRuntimeErrorNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionDidStartRunning:)
name:AVCaptureSessionDidStartRunningNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionDidStopRunning:)
name:AVCaptureSessionDidStopRunningNotification
object:_captureSession];
}
return self;
}
- (void)dealloc {
NSAssert(!_willBeRunning, @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?");
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
+ (NSArray<AVCaptureDevice *> *)captureDevices {
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
return session.devices;
}
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
// Support opening the device in any format. We make sure it's converted to a format we
// can handle, if needed, in the method `-setupVideoDataOutput`.
return device.formats;
}
- (FourCharCode)preferredOutputPixelFormat {
return _preferredOutputPixelFormat;
}
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps {
[self startCaptureWithDevice:device format:format fps:fps completionHandler:nil];
}
- (void)stopCapture {
[self stopCaptureWithCompletionHandler:nil];
}
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps
completionHandler:(nullable void (^)(NSError *))completionHandler {
_willBeRunning = YES;
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
dispatch_async(dispatch_get_main_queue(), ^{
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
});
_currentDevice = device;
NSError *error = nil;
if (![_currentDevice lockForConfiguration:&error]) {
RTCLogError(@"Failed to lock device %@. Error: %@",
_currentDevice,
error.userInfo);
if (completionHandler) {
completionHandler(error);
}
_willBeRunning = NO;
return;
}
[self reconfigureCaptureSessionInput];
[self updateOrientation];
[self updateDeviceCaptureFormat:format fps:fps];
[self updateVideoDataOutputPixelFormat:format];
[_captureSession startRunning];
[_currentDevice unlockForConfiguration];
_isRunning = YES;
if (completionHandler) {
completionHandler(nil);
}
}];
}
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
_willBeRunning = NO;
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("Stop");
_currentDevice = nil;
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
[_captureSession removeInput:oldInput];
}
[_captureSession stopRunning];
dispatch_async(dispatch_get_main_queue(), ^{
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
});
_isRunning = NO;
if (completionHandler) {
completionHandler();
}
}];
}
#pragma mark iOS notifications
#if TARGET_OS_IPHONE
- (void)deviceOrientationDidChange:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[self updateOrientation];
}];
}
#endif
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
NSParameterAssert(captureOutput == _videoDataOutput);
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
return;
}
// Default to portrait orientation on iPhone.
BOOL usingFrontCamera = NO;
// Check the image's EXIF for the camera the image came from as the image could have been
// delayed as we set alwaysDiscardsLateVideoFrames to NO.
AVCaptureDevicePosition cameraPosition =
[AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
if (cameraPosition != AVCaptureDevicePositionUnspecified) {
usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
} else {
AVCaptureDeviceInput *deviceInput =
(AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
}
switch (_orientation) {
case UIDeviceOrientationPortrait:
_rotation = RTCVideoRotation_90;
break;
case UIDeviceOrientationPortraitUpsideDown:
_rotation = RTCVideoRotation_270;
break;
case UIDeviceOrientationLandscapeLeft:
_rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
break;
case UIDeviceOrientationLandscapeRight:
_rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
case UIDeviceOrientationUnknown:
// Ignore.
break;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
kNanosecondsPerSecond;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
getObjCVideoSource(_source)->OnCapturedFrame(videoFrame);
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
NSString *droppedReason =
(__bridge NSString *)CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil);
RTCLogError(@"Dropped sample buffer. Reason: %@", droppedReason);
}
#pragma mark - AVCaptureSession notifications
- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
NSString *reasonString = nil;
NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
if (reason) {
switch (reason.intValue) {
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
reasonString = @"VideoDeviceNotAvailableInBackground";
break;
case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
reasonString = @"AudioDeviceInUseByAnotherClient";
break;
case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
reasonString = @"VideoDeviceInUseByAnotherClient";
break;
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
break;
}
}
RTCLog(@"Capture session interrupted: %@", reasonString);
}
- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
RTCLog(@"Capture session interruption ended.");
}
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
RTCLogError(@"Capture session runtime error: %@", error);
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (error.code == AVErrorMediaServicesWereReset) {
[self handleNonFatalError];
} else {
[self handleFatalError];
}
}];
}
- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
RTCLog(@"Capture session started.");
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
// If we successfully restarted after an unknown error,
// allow future retries on fatal errors.
_hasRetriedOnFatalError = NO;
}];
}
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
RTCLog(@"Capture session stopped.");
}
- (void)handleFatalError {
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (!_hasRetriedOnFatalError) {
RTCLogWarning(@"Attempting to recover from fatal capture error.");
[self handleNonFatalError];
_hasRetriedOnFatalError = YES;
} else {
RTCLogError(@"Previous fatal error recovery failed.");
}
}];
}
- (void)handleNonFatalError {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLog(@"Restarting capture session after error.");
if (_isRunning) {
[_captureSession startRunning];
}
}];
}
#pragma mark - UIApplication notifications
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (_isRunning && !_captureSession.isRunning) {
RTCLog(@"Restarting capture session on active.");
[_captureSession startRunning];
}
}];
}
#pragma mark - Private
- (dispatch_queue_t)frameQueue {
if (!_frameQueue) {
_frameQueue =
dispatch_queue_create("org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_frameQueue,
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
}
return _frameQueue;
}
- (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession {
NSAssert(_captureSession == nil, @"Setup capture session called twice.");
_captureSession = captureSession;
_captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
_captureSession.usesApplicationAudioSession = NO;
[self setupVideoDataOutput];
// Add the output.
if (![_captureSession canAddOutput:_videoDataOutput]) {
RTCLogError(@"Video data output unsupported.");
return NO;
}
[_captureSession addOutput:_videoDataOutput];
return YES;
}
- (void)setupVideoDataOutput {
NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
// `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
// device with the most efficient output format first. Find the first format that we support.
NSSet<NSNumber *> *supportedPixelFormats = [RTCCVPixelBuffer supportedPixelFormats];
NSMutableOrderedSet *availablePixelFormats =
[NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
[availablePixelFormats intersectSet:supportedPixelFormats];
NSNumber *pixelFormat = availablePixelFormats.firstObject;
NSAssert(pixelFormat, @"Output device has no supported formats.");
_preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
_outputPixelFormat = _preferredOutputPixelFormat;
videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
[videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
_videoDataOutput = videoDataOutput;
}
- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
if (![[RTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) {
mediaSubType = _preferredOutputPixelFormat;
}
if (mediaSubType != _outputPixelFormat) {
_outputPixelFormat = mediaSubType;
_videoDataOutput.videoSettings =
@{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(mediaSubType) };
}
}
#pragma mark - Private, called inside capture queue
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateDeviceCaptureFormat must be called on the capture queue.");
@try {
_currentDevice.activeFormat = format;
_currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, (int32_t)fps);
} @catch (NSException *exception) {
RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
return;
}
}
- (void)reconfigureCaptureSessionInput {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"reconfigureCaptureSessionInput must be called on the capture queue.");
NSError *error = nil;
AVCaptureDeviceInput *input =
[AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
if (!input) {
RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
return;
}
[_captureSession beginConfiguration];
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
[_captureSession removeInput:oldInput];
}
if ([_captureSession canAddInput:input]) {
[_captureSession addInput:input];
} else {
RTCLogError(@"Cannot add camera as an input to the session.");
}
[_captureSession commitConfiguration];
}
- (void)updateOrientation {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateOrientation must be called on the capture queue.");
_orientation = [UIDevice currentDevice].orientation;
}
@end

View File

@ -1,24 +0,0 @@
#ifndef VIDEOMETALVIEW_H
#define VIDEOMETALVIEW_H
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import "api/media_stream_interface.h"
@class RTCVideoFrame;
@interface VideoMetalView : UIView
@property(nonatomic) UIViewContentMode videoContentMode;
@property(nonatomic, getter=isEnabled) BOOL enabled;
@property(nonatomic, nullable) NSValue* rotationOverride;
- (void)setSize:(CGSize)size;
- (void)renderFrame:(nullable RTCVideoFrame *)frame;
- (void)addToTrack:(rtc::scoped_refptr<webrtc::VideoTrackInterface>)track;
@end
#endif

View File

@ -1,274 +0,0 @@
#import "VideoMetalView.h"
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#import "base/RTCLogging.h"
#import "base/RTCVideoFrame.h"
#import "base/RTCVideoFrameBuffer.h"
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
#include "sdk/objc/native/api/video_frame.h"
#import "api/video/video_sink_interface.h"
#import "api/media_stream_interface.h"
#import "RTCMTLI420Renderer.h"
#import "RTCMTLNV12Renderer.h"
#import "RTCMTLRGBRenderer.h"
#define MTKViewClass NSClassFromString(@"MTKView")
#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
class VideoRendererAdapterImpl : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
VideoRendererAdapterImpl(VideoMetalView *adapter) {
adapter_ = adapter;
size_ = CGSizeZero;
}
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
RTCVideoFrame* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
CGSize current_size = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) : CGSizeMake(videoFrame.height, videoFrame.width);
if (!CGSizeEqualToSize(size_, current_size)) {
size_ = current_size;
[adapter_ setSize:size_];
}
[adapter_ renderFrame:videoFrame];
}
private:
__weak VideoMetalView *adapter_;
CGSize size_;
};
@interface VideoMetalView () <MTKViewDelegate> {
RTCMTLI420Renderer *_rendererI420;
RTCMTLNV12Renderer *_rendererNV12;
RTCMTLRGBRenderer *_rendererRGB;
MTKView *_metalView;
RTCVideoFrame *_videoFrame;
CGSize _videoFrameSize;
int64_t _lastFrameTimeNs;
std::unique_ptr<VideoRendererAdapterImpl> _sink;
}
@end
@implementation VideoMetalView
- (instancetype)initWithFrame:(CGRect)frameRect {
self = [super initWithFrame:frameRect];
if (self) {
[self configure];
_sink.reset(new VideoRendererAdapterImpl(self));
}
return self;
}
- (BOOL)isEnabled {
return !_metalView.paused;
}
- (void)setEnabled:(BOOL)enabled {
_metalView.paused = !enabled;
}
- (UIViewContentMode)videoContentMode {
return _metalView.contentMode;
}
- (void)setVideoContentMode:(UIViewContentMode)mode {
_metalView.contentMode = mode;
}
#pragma mark - Private
+ (BOOL)isMetalAvailable {
return MTLCreateSystemDefaultDevice() != nil;
}
+ (MTKView *)createMetalView:(CGRect)frame {
return [[MTKViewClass alloc] initWithFrame:frame];
}
+ (RTCMTLNV12Renderer *)createNV12Renderer {
return [[RTCMTLNV12RendererClass alloc] init];
}
+ (RTCMTLI420Renderer *)createI420Renderer {
return [[RTCMTLI420RendererClass alloc] init];
}
+ (RTCMTLRGBRenderer *)createRGBRenderer {
return [[RTCMTLRGBRenderer alloc] init];
}
- (void)configure {
NSAssert([VideoMetalView isMetalAvailable], @"Metal not availiable on this device");
_metalView = [VideoMetalView createMetalView:self.bounds];
_metalView.delegate = self;
_metalView.contentMode = UIViewContentModeScaleAspectFill;
[self addSubview:_metalView];
_videoFrameSize = CGSizeZero;
}
- (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled {
[super setMultipleTouchEnabled:multipleTouchEnabled];
_metalView.multipleTouchEnabled = multipleTouchEnabled;
}
- (void)layoutSubviews {
[super layoutSubviews];
CGRect bounds = self.bounds;
_metalView.frame = bounds;
if (!CGSizeEqualToSize(_videoFrameSize, CGSizeZero)) {
_metalView.drawableSize = [self drawableSize];
} else {
_metalView.drawableSize = bounds.size;
}
}
#pragma mark - MTKViewDelegate methods
- (void)drawInMTKView:(nonnull MTKView *)view {
NSAssert(view == _metalView, @"Receiving draw callbacks from foreign instance.");
RTCVideoFrame *videoFrame = _videoFrame;
// Skip rendering if we've already rendered this frame.
if (!videoFrame || videoFrame.timeStampNs == _lastFrameTimeNs) {
return;
}
if (CGRectIsEmpty(view.bounds)) {
return;
}
RTCMTLRenderer *renderer;
if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) {
if (!_rendererRGB) {
_rendererRGB = [VideoMetalView createRGBRenderer];
if (![_rendererRGB addRenderingDestination:_metalView]) {
_rendererRGB = nil;
RTCLogError(@"Failed to create RGB renderer");
return;
}
}
renderer = _rendererRGB;
} else {
if (!_rendererNV12) {
_rendererNV12 = [VideoMetalView createNV12Renderer];
if (![_rendererNV12 addRenderingDestination:_metalView]) {
_rendererNV12 = nil;
RTCLogError(@"Failed to create NV12 renderer");
return;
}
}
renderer = _rendererNV12;
}
} else {
if (!_rendererI420) {
_rendererI420 = [VideoMetalView createI420Renderer];
if (![_rendererI420 addRenderingDestination:_metalView]) {
_rendererI420 = nil;
RTCLogError(@"Failed to create I420 renderer");
return;
}
}
renderer = _rendererI420;
}
renderer.rotationOverride = _rotationOverride;
[renderer drawFrame:videoFrame];
_lastFrameTimeNs = videoFrame.timeStampNs;
}
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
}
#pragma mark -
- (void)setRotationOverride:(NSValue *)rotationOverride {
_rotationOverride = rotationOverride;
_metalView.drawableSize = [self drawableSize];
[self setNeedsLayout];
}
- (RTCVideoRotation)frameRotation {
if (_rotationOverride) {
RTCVideoRotation rotation;
if (@available(iOS 11, *)) {
[_rotationOverride getValue:&rotation size:sizeof(rotation)];
} else {
[_rotationOverride getValue:&rotation];
}
return rotation;
}
return _videoFrame.rotation;
}
- (CGSize)drawableSize {
// Flip width/height if the rotations are not the same.
CGSize videoFrameSize = _videoFrameSize;
RTCVideoRotation frameRotation = [self frameRotation];
BOOL useLandscape =
(frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180);
BOOL sizeIsLandscape = (_videoFrame.rotation == RTCVideoRotation_0) ||
(_videoFrame.rotation == RTCVideoRotation_180);
if (useLandscape == sizeIsLandscape) {
return videoFrameSize;
} else {
return CGSizeMake(videoFrameSize.height, videoFrameSize.width);
}
}
#pragma mark - RTCVideoRenderer
- (void)setSize:(CGSize)size {
__weak VideoMetalView *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
__strong VideoMetalView *strongSelf = weakSelf;
if (strongSelf == nil) {
return;
}
strongSelf->_videoFrameSize = size;
CGSize drawableSize = [strongSelf drawableSize];
strongSelf->_metalView.drawableSize = drawableSize;
[strongSelf setNeedsLayout];
//[strongSelf.delegate videoView:self didChangeVideoSize:size];
});
}
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
if (!self.isEnabled) {
return;
}
if (frame == nil) {
RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
return;
}
_videoFrame = frame;
}
- (void)addToTrack:(rtc::scoped_refptr<webrtc::VideoTrackInterface>)track {
track->AddOrUpdateSink(_sink.get(), rtc::VideoSinkWants());
}
@end

View File

@ -1,811 +0,0 @@
/*
* Copyright 2011 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "tg_dtls_transport.h"
#include <algorithm>
#include <memory>
#include <utility>
#include "api/rtc_event_log/rtc_event_log.h"
#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h"
#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h"
#include "p2p/base/packet_transport_internal.h"
#include "rtc_base/buffer.h"
#include "rtc_base/checks.h"
#include "rtc_base/dscp.h"
#include "rtc_base/logging.h"
#include "rtc_base/message_queue.h"
#include "rtc_base/rtc_certificate.h"
#include "rtc_base/ssl_stream_adapter.h"
#include "rtc_base/stream.h"
#include "rtc_base/thread.h"
namespace cricket {
// We don't pull the RTP constants from rtputils.h, to avoid a layer violation.
static const size_t kDtlsRecordHeaderLen = 13;
static const size_t kMaxDtlsPacketLen = 2048;
static const size_t kMinRtpPacketLen = 12;
// Maximum number of pending packets in the queue. Packets are read immediately
// after they have been written, so a capacity of "1" is sufficient.
static const size_t kMaxPendingPackets = 1;
// Minimum and maximum values for the initial DTLS handshake timeout. We'll pick
// an initial timeout based on ICE RTT estimates, but clamp it to this range.
static const int kMinHandshakeTimeout = 50;
static const int kMaxHandshakeTimeout = 3000;
static bool IsDtlsPacket(const char* data, size_t len) {
const uint8_t* u = reinterpret_cast<const uint8_t*>(data);
return (len >= kDtlsRecordHeaderLen && (u[0] > 19 && u[0] < 64));
}
static bool IsDtlsClientHelloPacket(const char* data, size_t len) {
if (!IsDtlsPacket(data, len)) {
return false;
}
const uint8_t* u = reinterpret_cast<const uint8_t*>(data);
return len > 17 && u[0] == 22 && u[13] == 1;
}
static bool IsRtpPacket(const char* data, size_t len) {
const uint8_t* u = reinterpret_cast<const uint8_t*>(data);
return (len >= kMinRtpPacketLen && (u[0] & 0xC0) == 0x80);
}
/*StreamInterfaceChannel::StreamInterfaceChannel(
IceTransportInternal* ice_transport)
: ice_transport_(ice_transport),
state_(rtc::SS_OPEN),
packets_(kMaxPendingPackets, kMaxDtlsPacketLen) {}
rtc::StreamResult StreamInterfaceChannel::Read(void* buffer,
size_t buffer_len,
size_t* read,
int* error) {
if (state_ == rtc::SS_CLOSED)
return rtc::SR_EOS;
if (state_ == rtc::SS_OPENING)
return rtc::SR_BLOCK;
if (!packets_.ReadFront(buffer, buffer_len, read)) {
return rtc::SR_BLOCK;
}
return rtc::SR_SUCCESS;
}
rtc::StreamResult StreamInterfaceChannel::Write(const void* data,
size_t data_len,
size_t* written,
int* error) {
// Always succeeds, since this is an unreliable transport anyway.
// TODO(zhihuang): Should this block if ice_transport_'s temporarily
// unwritable?
rtc::PacketOptions packet_options;
ice_transport_->SendPacket(static_cast<const char*>(data), data_len,
packet_options);
if (written) {
*written = data_len;
}
return rtc::SR_SUCCESS;
}
bool StreamInterfaceChannel::OnPacketReceived(const char* data, size_t size) {
// We force a read event here to ensure that we don't overflow our queue.
bool ret = packets_.WriteBack(data, size, NULL);
RTC_CHECK(ret) << "Failed to write packet to queue.";
if (ret) {
SignalEvent(this, rtc::SE_READ, 0);
}
return ret;
}
rtc::StreamState StreamInterfaceChannel::GetState() const {
return state_;
}
void StreamInterfaceChannel::Close() {
packets_.Clear();
state_ = rtc::SS_CLOSED;
}*/
TgDtlsTransport::TgDtlsTransport(IceTransportInternal* ice_transport,
const webrtc::CryptoOptions& crypto_options,
webrtc::RtcEventLog* event_log)
: transport_name_(ice_transport->transport_name()),
component_(ice_transport->component()),
ice_transport_(ice_transport),
downward_(NULL),
srtp_ciphers_(crypto_options.GetSupportedDtlsSrtpCryptoSuites()),
ssl_max_version_(rtc::SSL_PROTOCOL_DTLS_12),
crypto_options_(crypto_options),
event_log_(event_log) {
RTC_DCHECK(ice_transport_);
ConnectToIceTransport();
}
TgDtlsTransport::~TgDtlsTransport() = default;
const webrtc::CryptoOptions& TgDtlsTransport::crypto_options() const {
return crypto_options_;
}
DtlsTransportState TgDtlsTransport::dtls_state() const {
return dtls_state_;
}
const std::string& TgDtlsTransport::transport_name() const {
return transport_name_;
}
int TgDtlsTransport::component() const {
return component_;
}
bool TgDtlsTransport::IsDtlsActive() const {
return dtls_active_;
}
bool TgDtlsTransport::SetLocalCertificate(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
if (dtls_active_) {
if (certificate == local_certificate_) {
// This may happen during renegotiation.
RTC_LOG(LS_INFO) << ToString() << ": Ignoring identical DTLS identity";
return true;
} else {
RTC_LOG(LS_ERROR) << ToString()
<< ": Can't change DTLS local identity in this state";
return false;
}
}
if (certificate) {
local_certificate_ = certificate;
dtls_active_ = true;
} else {
RTC_LOG(LS_INFO) << ToString()
<< ": NULL DTLS identity supplied. Not doing DTLS";
}
return true;
}
rtc::scoped_refptr<rtc::RTCCertificate> TgDtlsTransport::GetLocalCertificate()
const {
return local_certificate_;
}
bool TgDtlsTransport::SetSslMaxProtocolVersion(rtc::SSLProtocolVersion version) {
if (dtls_active_) {
RTC_LOG(LS_ERROR) << "Not changing max. protocol version "
"while DTLS is negotiating";
return false;
}
ssl_max_version_ = version;
return true;
}
bool TgDtlsTransport::SetDtlsRole(rtc::SSLRole role) {
if (dtls_) {
RTC_DCHECK(dtls_role_);
if (*dtls_role_ != role) {
RTC_LOG(LS_ERROR)
<< "SSL Role can't be reversed after the session is setup.";
return false;
}
return true;
}
dtls_role_ = role;
return true;
}
bool TgDtlsTransport::GetDtlsRole(rtc::SSLRole* role) const {
if (!dtls_role_) {
return false;
}
*role = *dtls_role_;
return true;
}
bool TgDtlsTransport::GetSslCipherSuite(int* cipher) {
if (dtls_state() != DTLS_TRANSPORT_CONNECTED) {
return false;
}
return dtls_->GetSslCipherSuite(cipher);
}
bool TgDtlsTransport::SetRemoteFingerprint(const std::string& digest_alg,
const uint8_t* digest,
size_t digest_len) {
rtc::Buffer remote_fingerprint_value(digest, digest_len);
// Once we have the local certificate, the same remote fingerprint can be set
// multiple times.
if (dtls_active_ && remote_fingerprint_value_ == remote_fingerprint_value &&
!digest_alg.empty()) {
// This may happen during renegotiation.
RTC_LOG(LS_INFO) << ToString()
<< ": Ignoring identical remote DTLS fingerprint";
return true;
}
// If the other side doesn't support DTLS, turn off |dtls_active_|.
// TODO(deadbeef): Remove this. It's dangerous, because it relies on higher
// level code to ensure DTLS is actually used, but there are tests that
// depend on it, for the case where an m= section is rejected. In that case
// SetRemoteFingerprint shouldn't even be called though.
if (digest_alg.empty()) {
RTC_DCHECK(!digest_len);
RTC_LOG(LS_INFO) << ToString() << ": Other side didn't support DTLS.";
dtls_active_ = false;
return true;
}
// Otherwise, we must have a local certificate before setting remote
// fingerprint.
if (!dtls_active_) {
RTC_LOG(LS_ERROR) << ToString()
<< ": Can't set DTLS remote settings in this state.";
return false;
}
// At this point we know we are doing DTLS
bool fingerprint_changing = remote_fingerprint_value_.size() > 0u;
remote_fingerprint_value_ = std::move(remote_fingerprint_value);
remote_fingerprint_algorithm_ = digest_alg;
if (dtls_ && !fingerprint_changing) {
// This can occur if DTLS is set up before a remote fingerprint is
// received. For instance, if we set up DTLS due to receiving an early
// ClientHello.
rtc::SSLPeerCertificateDigestError err;
if (!dtls_->SetPeerCertificateDigest(
remote_fingerprint_algorithm_,
reinterpret_cast<unsigned char*>(remote_fingerprint_value_.data()),
remote_fingerprint_value_.size(), &err)) {
RTC_LOG(LS_ERROR) << ToString()
<< ": Couldn't set DTLS certificate digest.";
set_dtls_state(DTLS_TRANSPORT_FAILED);
// If the error is "verification failed", don't return false, because
// this means the fingerprint was formatted correctly but didn't match
// the certificate from the DTLS handshake. Thus the DTLS state should go
// to "failed", but SetRemoteDescription shouldn't fail.
return err == rtc::SSLPeerCertificateDigestError::VERIFICATION_FAILED;
}
return true;
}
// If the fingerprint is changing, we'll tear down the DTLS association and
// create a new one, resetting our state.
if (dtls_ && fingerprint_changing) {
dtls_.reset(nullptr);
set_dtls_state(DTLS_TRANSPORT_NEW);
set_writable(false);
}
if (!SetupDtls()) {
set_dtls_state(DTLS_TRANSPORT_FAILED);
return false;
}
return true;
}
std::unique_ptr<rtc::SSLCertChain> TgDtlsTransport::GetRemoteSSLCertChain()
const {
if (!dtls_) {
return nullptr;
}
return dtls_->GetPeerSSLCertChain();
}
bool TgDtlsTransport::ExportKeyingMaterial(const std::string& label,
const uint8_t* context,
size_t context_len,
bool use_context,
uint8_t* result,
size_t result_len) {
return (dtls_.get())
? dtls_->ExportKeyingMaterial(label, context, context_len,
use_context, result, result_len)
: false;
}
bool TgDtlsTransport::SetupDtls() {
RTC_DCHECK(dtls_role_);
StreamInterfaceChannel* downward = new StreamInterfaceChannel(ice_transport_);
dtls_.reset(rtc::SSLStreamAdapter::Create(downward));
if (!dtls_) {
RTC_LOG(LS_ERROR) << ToString() << ": Failed to create DTLS adapter.";
delete downward;
return false;
}
downward_ = downward;
dtls_->SetIdentity(local_certificate_->identity()->GetReference());
dtls_->SetMode(rtc::SSL_MODE_DTLS);
dtls_->SetMaxProtocolVersion(ssl_max_version_);
dtls_->SetServerRole(*dtls_role_);
dtls_->SignalEvent.connect(this, &TgDtlsTransport::OnDtlsEvent);
dtls_->SignalSSLHandshakeError.connect(this,
&TgDtlsTransport::OnDtlsHandshakeError);
if (remote_fingerprint_value_.size() &&
!dtls_->SetPeerCertificateDigest(
remote_fingerprint_algorithm_,
reinterpret_cast<unsigned char*>(remote_fingerprint_value_.data()),
remote_fingerprint_value_.size())) {
RTC_LOG(LS_ERROR) << ToString()
<< ": Couldn't set DTLS certificate digest.";
return false;
}
// Set up DTLS-SRTP, if it's been enabled.
if (!srtp_ciphers_.empty()) {
if (!dtls_->SetDtlsSrtpCryptoSuites(srtp_ciphers_)) {
RTC_LOG(LS_ERROR) << ToString() << ": Couldn't set DTLS-SRTP ciphers.";
return false;
}
} else {
RTC_LOG(LS_INFO) << ToString() << ": Not using DTLS-SRTP.";
}
RTC_LOG(LS_INFO) << ToString() << ": DTLS setup complete.";
// If the underlying ice_transport is already writable at this point, we may
// be able to start DTLS right away.
MaybeStartDtls();
return true;
}
bool TgDtlsTransport::GetSrtpCryptoSuite(int* cipher) {
if (dtls_state() != DTLS_TRANSPORT_CONNECTED) {
return false;
}
return dtls_->GetDtlsSrtpCryptoSuite(cipher);
}
bool TgDtlsTransport::GetSslVersionBytes(int* version) const {
if (dtls_state() != DTLS_TRANSPORT_CONNECTED) {
return false;
}
return dtls_->GetSslVersionBytes(version);
}
// Called from upper layers to send a media packet.
int TgDtlsTransport::SendPacket(const char* data,
size_t size,
const rtc::PacketOptions& options,
int flags) {
if (!dtls_active_) {
// Not doing DTLS.
return ice_transport_->SendPacket(data, size, options);
}
switch (dtls_state()) {
case DTLS_TRANSPORT_NEW:
// Can't send data until the connection is active.
// TODO(ekr@rtfm.com): assert here if dtls_ is NULL?
return -1;
case DTLS_TRANSPORT_CONNECTING:
// Can't send data until the connection is active.
return -1;
case DTLS_TRANSPORT_CONNECTED:
if (flags & PF_SRTP_BYPASS) {
RTC_DCHECK(!srtp_ciphers_.empty());
if (!IsRtpPacket(data, size)) {
return -1;
}
return ice_transport_->SendPacket(data, size, options);
} else {
return (dtls_->WriteAll(data, size, NULL, NULL) == rtc::SR_SUCCESS)
? static_cast<int>(size)
: -1;
}
case DTLS_TRANSPORT_FAILED:
case DTLS_TRANSPORT_CLOSED:
// Can't send anything when we're closed.
return -1;
default:
RTC_NOTREACHED();
return -1;
}
}
IceTransportInternal* TgDtlsTransport::ice_transport() {
return ice_transport_;
}
bool TgDtlsTransport::IsDtlsConnected() {
return dtls_ && dtls_->IsTlsConnected();
}
bool TgDtlsTransport::receiving() const {
return receiving_;
}
bool TgDtlsTransport::writable() const {
return writable_;
}
int TgDtlsTransport::GetError() {
return ice_transport_->GetError();
}
absl::optional<rtc::NetworkRoute> TgDtlsTransport::network_route() const {
return ice_transport_->network_route();
}
bool TgDtlsTransport::GetOption(rtc::Socket::Option opt, int* value) {
return ice_transport_->GetOption(opt, value);
}
int TgDtlsTransport::SetOption(rtc::Socket::Option opt, int value) {
return ice_transport_->SetOption(opt, value);
}
void TgDtlsTransport::ConnectToIceTransport() {
RTC_DCHECK(ice_transport_);
ice_transport_->SignalWritableState.connect(this,
&TgDtlsTransport::OnWritableState);
ice_transport_->SignalReadPacket.connect(this, &TgDtlsTransport::OnReadPacket);
ice_transport_->SignalSentPacket.connect(this, &TgDtlsTransport::OnSentPacket);
ice_transport_->SignalReadyToSend.connect(this,
&TgDtlsTransport::OnReadyToSend);
ice_transport_->SignalReceivingState.connect(
this, &TgDtlsTransport::OnReceivingState);
ice_transport_->SignalNetworkRouteChanged.connect(
this, &TgDtlsTransport::OnNetworkRouteChanged);
}
// The state transition logic here is as follows:
// (1) If we're not doing DTLS-SRTP, then the state is just the
// state of the underlying impl()
// (2) If we're doing DTLS-SRTP:
// - Prior to the DTLS handshake, the state is neither receiving nor
// writable
// - When the impl goes writable for the first time we
// start the DTLS handshake
// - Once the DTLS handshake completes, the state is that of the
// impl again
void TgDtlsTransport::OnWritableState(rtc::PacketTransportInternal* transport) {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(transport == ice_transport_);
RTC_LOG(LS_VERBOSE) << ToString()
<< ": ice_transport writable state changed to "
<< ice_transport_->writable();
if (!dtls_active_) {
// Not doing DTLS.
// Note: SignalWritableState fired by set_writable.
set_writable(ice_transport_->writable());
return;
}
switch (dtls_state()) {
case DTLS_TRANSPORT_NEW:
MaybeStartDtls();
break;
case DTLS_TRANSPORT_CONNECTED:
// Note: SignalWritableState fired by set_writable.
set_writable(ice_transport_->writable());
break;
case DTLS_TRANSPORT_CONNECTING:
// Do nothing.
break;
case DTLS_TRANSPORT_FAILED:
case DTLS_TRANSPORT_CLOSED:
// Should not happen. Do nothing.
break;
}
}
void TgDtlsTransport::OnReceivingState(rtc::PacketTransportInternal* transport) {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(transport == ice_transport_);
RTC_LOG(LS_VERBOSE) << ToString()
<< ": ice_transport "
"receiving state changed to "
<< ice_transport_->receiving();
if (!dtls_active_ || dtls_state() == DTLS_TRANSPORT_CONNECTED) {
// Note: SignalReceivingState fired by set_receiving.
set_receiving(ice_transport_->receiving());
}
}
void TgDtlsTransport::OnReadPacket(rtc::PacketTransportInternal* transport,
const char* data,
size_t size,
const int64_t& packet_time_us,
int flags) {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(transport == ice_transport_);
RTC_DCHECK(flags == 0);
if (!dtls_active_) {
// Not doing DTLS.
SignalReadPacket(this, data, size, packet_time_us, 0);
return;
}
switch (dtls_state()) {
case DTLS_TRANSPORT_NEW:
if (dtls_) {
RTC_LOG(LS_INFO) << ToString()
<< ": Packet received before DTLS started.";
} else {
RTC_LOG(LS_WARNING) << ToString()
<< ": Packet received before we know if we are "
"doing DTLS or not.";
}
// Cache a client hello packet received before DTLS has actually started.
if (IsDtlsClientHelloPacket(data, size)) {
RTC_LOG(LS_INFO) << ToString()
<< ": Caching DTLS ClientHello packet until DTLS is "
"started.";
cached_client_hello_.SetData(data, size);
// If we haven't started setting up DTLS yet (because we don't have a
// remote fingerprint/role), we can use the client hello as a clue that
// the peer has chosen the client role, and proceed with the handshake.
// The fingerprint will be verified when it's set.
if (!dtls_ && local_certificate_) {
SetDtlsRole(rtc::SSL_SERVER);
SetupDtls();
}
} else {
RTC_LOG(LS_INFO) << ToString()
<< ": Not a DTLS ClientHello packet; dropping.";
}
break;
case DTLS_TRANSPORT_CONNECTING:
case DTLS_TRANSPORT_CONNECTED:
// We should only get DTLS or SRTP packets; STUN's already been demuxed.
// Is this potentially a DTLS packet?
if (IsDtlsPacket(data, size)) {
if (!HandleDtlsPacket(data, size)) {
RTC_LOG(LS_ERROR) << ToString() << ": Failed to handle DTLS packet.";
return;
}
} else {
// Not a DTLS packet; our handshake should be complete by now.
if (dtls_state() != DTLS_TRANSPORT_CONNECTED) {
RTC_LOG(LS_ERROR) << ToString()
<< ": Received non-DTLS packet before DTLS "
"complete.";
return;
}
// And it had better be a SRTP packet.
if (!IsRtpPacket(data, size)) {
RTC_LOG(LS_ERROR)
<< ToString() << ": Received unexpected non-DTLS packet.";
return;
}
// Sanity check.
RTC_DCHECK(!srtp_ciphers_.empty());
// Signal this upwards as a bypass packet.
SignalReadPacket(this, data, size, packet_time_us, PF_SRTP_BYPASS);
}
break;
case DTLS_TRANSPORT_FAILED:
case DTLS_TRANSPORT_CLOSED:
// This shouldn't be happening. Drop the packet.
break;
}
}
void TgDtlsTransport::OnSentPacket(rtc::PacketTransportInternal* transport,
const rtc::SentPacket& sent_packet) {
RTC_DCHECK_RUN_ON(&thread_checker_);
SignalSentPacket(this, sent_packet);
}
void TgDtlsTransport::OnReadyToSend(rtc::PacketTransportInternal* transport) {
RTC_DCHECK_RUN_ON(&thread_checker_);
if (writable()) {
SignalReadyToSend(this);
}
}
void TgDtlsTransport::OnDtlsEvent(rtc::StreamInterface* dtls, int sig, int err) {
RTC_DCHECK_RUN_ON(&thread_checker_);
RTC_DCHECK(dtls == dtls_.get());
if (sig & rtc::SE_OPEN) {
// This is the first time.
RTC_LOG(LS_INFO) << ToString() << ": DTLS handshake complete.";
if (dtls_->GetState() == rtc::SS_OPEN) {
// The check for OPEN shouldn't be necessary but let's make
// sure we don't accidentally frob the state if it's closed.
set_dtls_state(DTLS_TRANSPORT_CONNECTED);
set_writable(true);
}
}
if (sig & rtc::SE_READ) {
char buf[kMaxDtlsPacketLen];
size_t read;
int read_error;
rtc::StreamResult ret;
// The underlying DTLS stream may have received multiple DTLS records in
// one packet, so read all of them.
do {
ret = dtls_->Read(buf, sizeof(buf), &read, &read_error);
if (ret == rtc::SR_SUCCESS) {
SignalReadPacket(this, buf, read, rtc::TimeMicros(), 0);
} else if (ret == rtc::SR_EOS) {
// Remote peer shut down the association with no error.
RTC_LOG(LS_INFO) << ToString() << ": DTLS transport closed by remote";
set_writable(false);
set_dtls_state(DTLS_TRANSPORT_CLOSED);
} else if (ret == rtc::SR_ERROR) {
// Remote peer shut down the association with an error.
RTC_LOG(LS_INFO)
<< ToString()
<< ": Closed by remote with DTLS transport error, code="
<< read_error;
set_writable(false);
set_dtls_state(DTLS_TRANSPORT_FAILED);
}
} while (ret == rtc::SR_SUCCESS);
}
if (sig & rtc::SE_CLOSE) {
RTC_DCHECK(sig == rtc::SE_CLOSE); // SE_CLOSE should be by itself.
set_writable(false);
if (!err) {
RTC_LOG(LS_INFO) << ToString() << ": DTLS transport closed";
set_dtls_state(DTLS_TRANSPORT_CLOSED);
} else {
RTC_LOG(LS_INFO) << ToString() << ": DTLS transport error, code=" << err;
set_dtls_state(DTLS_TRANSPORT_FAILED);
}
}
}
void TgDtlsTransport::OnNetworkRouteChanged(
absl::optional<rtc::NetworkRoute> network_route) {
RTC_DCHECK_RUN_ON(&thread_checker_);
SignalNetworkRouteChanged(network_route);
}
void TgDtlsTransport::MaybeStartDtls() {
if (dtls_ && ice_transport_->writable()) {
ConfigureHandshakeTimeout();
if (dtls_->StartSSL()) {
// This should never fail:
// Because we are operating in a nonblocking mode and all
// incoming packets come in via OnReadPacket(), which rejects
// packets in this state, the incoming queue must be empty. We
// ignore write errors, thus any errors must be because of
// configuration and therefore are our fault.
RTC_NOTREACHED() << "StartSSL failed.";
RTC_LOG(LS_ERROR) << ToString() << ": Couldn't start DTLS handshake";
set_dtls_state(DTLS_TRANSPORT_FAILED);
return;
}
RTC_LOG(LS_INFO) << ToString() << ": DtlsTransport: Started DTLS handshake";
set_dtls_state(DTLS_TRANSPORT_CONNECTING);
// Now that the handshake has started, we can process a cached ClientHello
// (if one exists).
if (cached_client_hello_.size()) {
if (*dtls_role_ == rtc::SSL_SERVER) {
RTC_LOG(LS_INFO) << ToString()
<< ": Handling cached DTLS ClientHello packet.";
if (!HandleDtlsPacket(cached_client_hello_.data<char>(),
cached_client_hello_.size())) {
RTC_LOG(LS_ERROR) << ToString() << ": Failed to handle DTLS packet.";
}
} else {
RTC_LOG(LS_WARNING) << ToString()
<< ": Discarding cached DTLS ClientHello packet "
"because we don't have the server role.";
}
cached_client_hello_.Clear();
}
}
}
// Called from OnReadPacket when a DTLS packet is received.
bool TgDtlsTransport::HandleDtlsPacket(const char* data, size_t size) {
// Sanity check we're not passing junk that
// just looks like DTLS.
const uint8_t* tmp_data = reinterpret_cast<const uint8_t*>(data);
size_t tmp_size = size;
while (tmp_size > 0) {
if (tmp_size < kDtlsRecordHeaderLen)
return false; // Too short for the header
size_t record_len = (tmp_data[11] << 8) | (tmp_data[12]);
if ((record_len + kDtlsRecordHeaderLen) > tmp_size)
return false; // Body too short
tmp_data += record_len + kDtlsRecordHeaderLen;
tmp_size -= record_len + kDtlsRecordHeaderLen;
}
// Looks good. Pass to the SIC which ends up being passed to
// the DTLS stack.
return downward_->OnPacketReceived(data, size);
}
void TgDtlsTransport::set_receiving(bool receiving) {
if (receiving_ == receiving) {
return;
}
receiving_ = receiving;
SignalReceivingState(this);
}
void TgDtlsTransport::set_writable(bool writable) {
if (writable_ == writable) {
return;
}
if (event_log_) {
event_log_->Log(
std::make_unique<webrtc::RtcEventDtlsWritableState>(writable));
}
RTC_LOG(LS_VERBOSE) << ToString() << ": set_writable to: " << writable;
writable_ = writable;
if (writable_) {
SignalReadyToSend(this);
}
SignalWritableState(this);
}
void TgDtlsTransport::set_dtls_state(DtlsTransportState state) {
if (dtls_state_ == state) {
return;
}
if (event_log_) {
event_log_->Log(std::make_unique<webrtc::RtcEventDtlsTransportState>(
ConvertDtlsTransportState(state)));
}
RTC_LOG(LS_VERBOSE) << ToString() << ": set_dtls_state from:" << dtls_state_
<< " to " << state;
dtls_state_ = state;
SignalDtlsState(this, state);
}
void TgDtlsTransport::OnDtlsHandshakeError(rtc::SSLHandshakeError error) {
SignalDtlsHandshakeError(error);
}
void TgDtlsTransport::ConfigureHandshakeTimeout() {
RTC_DCHECK(dtls_);
absl::optional<int> rtt = ice_transport_->GetRttEstimate();
if (rtt) {
// Limit the timeout to a reasonable range in case the ICE RTT takes
// extreme values.
int initial_timeout = std::max(kMinHandshakeTimeout,
std::min(kMaxHandshakeTimeout, 2 * (*rtt)));
RTC_LOG(LS_INFO) << ToString() << ": configuring DTLS handshake timeout "
<< initial_timeout << " based on ICE RTT " << *rtt;
dtls_->SetInitialRetransmissionTimeout(initial_timeout);
} else {
RTC_LOG(LS_INFO)
<< ToString()
<< ": no RTT estimate - using default DTLS handshake timeout";
}
}
} // namespace cricket

View File

@ -1,229 +0,0 @@
/*
* Copyright 2011 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef TG_P2P_BASE_DTLS_TRANSPORT_H_
#define TG_P2P_BASE_DTLS_TRANSPORT_H_
#include <memory>
#include <string>
#include <vector>
#include "p2p/base/dtls_transport.h"
#include "api/crypto/crypto_options.h"
#include "p2p/base/dtls_transport_internal.h"
#include "p2p/base/ice_transport_internal.h"
#include "rtc_base/buffer.h"
#include "rtc_base/buffer_queue.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/ssl_stream_adapter.h"
#include "rtc_base/stream.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/thread_checker.h"
namespace rtc {
class PacketTransportInternal;
}
namespace cricket {
// This class provides a DTLS SSLStreamAdapter inside a TransportChannel-style
// packet-based interface, wrapping an existing TransportChannel instance
// (e.g a P2PTransportChannel)
// Here's the way this works:
//
// DtlsTransport {
// SSLStreamAdapter* dtls_ {
// StreamInterfaceChannel downward_ {
// IceTransportInternal* ice_transport_;
// }
// }
// }
//
// - Data which comes into DtlsTransport from the underlying
// ice_transport_ via OnReadPacket() is checked for whether it is DTLS
// or not, and if it is, is passed to DtlsTransport::HandleDtlsPacket,
// which pushes it into to downward_. dtls_ is listening for events on
// downward_, so it immediately calls downward_->Read().
//
// - Data written to DtlsTransport is passed either to downward_ or directly
// to ice_transport_, depending on whether DTLS is negotiated and whether
// the flags include PF_SRTP_BYPASS
//
// - The SSLStreamAdapter writes to downward_->Write() which translates it
// into packet writes on ice_transport_.
//
// This class is not thread safe; all methods must be called on the same thread
// as the constructor.
class TgDtlsTransport : public DtlsTransportInternal {
public:
// |ice_transport| is the ICE transport this DTLS transport is wrapping. It
// must outlive this DTLS transport.
//
// |crypto_options| are the options used for the DTLS handshake. This affects
// whether GCM crypto suites are negotiated.
//
// |event_log| is an optional RtcEventLog for logging state changes. It should
// outlive the DtlsTransport.
explicit TgDtlsTransport(IceTransportInternal* ice_transport,
const webrtc::CryptoOptions& crypto_options,
webrtc::RtcEventLog* event_log);
~TgDtlsTransport() override;
const webrtc::CryptoOptions& crypto_options() const override;
DtlsTransportState dtls_state() const override;
const std::string& transport_name() const override;
int component() const override;
// DTLS is active if a local certificate was set. Otherwise this acts in a
// "passthrough" mode, sending packets directly through the underlying ICE
// transport.
// TODO(deadbeef): Remove this weirdness, and handle it in the upper layers.
bool IsDtlsActive() const override;
// SetLocalCertificate is what makes DTLS active. It must be called before
// SetRemoteFinterprint.
// TODO(deadbeef): Once DtlsTransport no longer has the concept of being
// "active" or not (acting as a passthrough if not active), just require this
// certificate on construction or "Start".
bool SetLocalCertificate(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) override;
rtc::scoped_refptr<rtc::RTCCertificate> GetLocalCertificate() const override;
// SetRemoteFingerprint must be called after SetLocalCertificate, and any
// other methods like SetDtlsRole. It's what triggers the actual DTLS setup.
// TODO(deadbeef): Rename to "Start" like in ORTC?
bool SetRemoteFingerprint(const std::string& digest_alg,
const uint8_t* digest,
size_t digest_len) override;
// Called to send a packet (via DTLS, if turned on).
int SendPacket(const char* data,
size_t size,
const rtc::PacketOptions& options,
int flags) override;
bool GetOption(rtc::Socket::Option opt, int* value) override;
bool SetSslMaxProtocolVersion(rtc::SSLProtocolVersion version) override;
// Find out which TLS version was negotiated
bool GetSslVersionBytes(int* version) const override;
// Find out which DTLS-SRTP cipher was negotiated
bool GetSrtpCryptoSuite(int* cipher) override;
bool GetDtlsRole(rtc::SSLRole* role) const override;
bool SetDtlsRole(rtc::SSLRole role) override;
// Find out which DTLS cipher was negotiated
bool GetSslCipherSuite(int* cipher) override;
// Once DTLS has been established, this method retrieves the certificate
// chain in use by the remote peer, for use in external identity
// verification.
std::unique_ptr<rtc::SSLCertChain> GetRemoteSSLCertChain() const override;
// Once DTLS has established (i.e., this ice_transport is writable), this
// method extracts the keys negotiated during the DTLS handshake, for use in
// external encryption. DTLS-SRTP uses this to extract the needed SRTP keys.
// See the SSLStreamAdapter documentation for info on the specific parameters.
bool ExportKeyingMaterial(const std::string& label,
const uint8_t* context,
size_t context_len,
bool use_context,
uint8_t* result,
size_t result_len) override;
IceTransportInternal* ice_transport() override;
// For informational purposes. Tells if the DTLS handshake has finished.
// This may be true even if writable() is false, if the remote fingerprint
// has not yet been verified.
bool IsDtlsConnected();
bool receiving() const override;
bool writable() const override;
int GetError() override;
absl::optional<rtc::NetworkRoute> network_route() const override;
int SetOption(rtc::Socket::Option opt, int value) override;
std::string ToString() const {
const absl::string_view RECEIVING_ABBREV[2] = {"_", "R"};
const absl::string_view WRITABLE_ABBREV[2] = {"_", "W"};
rtc::StringBuilder sb;
sb << "DtlsTransport[" << transport_name_ << "|" << component_ << "|"
<< RECEIVING_ABBREV[receiving()] << WRITABLE_ABBREV[writable()] << "]";
return sb.Release();
}
private:
void ConnectToIceTransport();
void OnWritableState(rtc::PacketTransportInternal* transport);
void OnReadPacket(rtc::PacketTransportInternal* transport,
const char* data,
size_t size,
const int64_t& packet_time_us,
int flags);
void OnSentPacket(rtc::PacketTransportInternal* transport,
const rtc::SentPacket& sent_packet);
void OnReadyToSend(rtc::PacketTransportInternal* transport);
void OnReceivingState(rtc::PacketTransportInternal* transport);
void OnDtlsEvent(rtc::StreamInterface* stream_, int sig, int err);
void OnNetworkRouteChanged(absl::optional<rtc::NetworkRoute> network_route);
bool SetupDtls();
void MaybeStartDtls();
bool HandleDtlsPacket(const char* data, size_t size);
void OnDtlsHandshakeError(rtc::SSLHandshakeError error);
void ConfigureHandshakeTimeout();
void set_receiving(bool receiving);
void set_writable(bool writable);
// Sets the DTLS state, signaling if necessary.
void set_dtls_state(DtlsTransportState state);
rtc::ThreadChecker thread_checker_;
std::string transport_name_;
int component_;
DtlsTransportState dtls_state_ = DTLS_TRANSPORT_NEW;
// Underlying ice_transport, not owned by this class.
IceTransportInternal* ice_transport_;
std::unique_ptr<rtc::SSLStreamAdapter> dtls_; // The DTLS stream
StreamInterfaceChannel*
downward_; // Wrapper for ice_transport_, owned by dtls_.
std::vector<int> srtp_ciphers_; // SRTP ciphers to use with DTLS.
bool dtls_active_ = false;
rtc::scoped_refptr<rtc::RTCCertificate> local_certificate_;
absl::optional<rtc::SSLRole> dtls_role_;
rtc::SSLProtocolVersion ssl_max_version_;
webrtc::CryptoOptions crypto_options_;
rtc::Buffer remote_fingerprint_value_;
std::string remote_fingerprint_algorithm_;
// Cached DTLS ClientHello packet that was received before we started the
// DTLS handshake. This could happen if the hello was received before the
// ice transport became writable, or before a remote fingerprint was received.
rtc::Buffer cached_client_hello_;
bool receiving_ = false;
bool writable_ = false;
webrtc::RtcEventLog* const event_log_;
RTC_DISALLOW_COPY_AND_ASSIGN(TgDtlsTransport);
};
} // namespace cricket
#endif // P2P_BASE_DTLS_TRANSPORT_H_

View File

@ -1,868 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "tg_jsep_transport.h"
#include <stddef.h>
#include <stdint.h>
#include <memory>
#include <type_traits>
#include <utility> // for std::pair
#include "api/array_view.h"
#include "api/candidate.h"
#include "p2p/base/p2p_constants.h"
#include "p2p/base/p2p_transport_channel.h"
#include "pc/sctp_data_channel_transport.h"
#include "rtc_base/checks.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
using webrtc::SdpType;
namespace cricket {
static bool VerifyIceParams(const TgJsepTransportDescription& jsep_description) {
// For legacy protocols.
// TODO(zhihuang): Remove this once the legacy protocol is no longer
// supported.
if (jsep_description.transport_desc.ice_ufrag.empty() &&
jsep_description.transport_desc.ice_pwd.empty()) {
return true;
}
if (jsep_description.transport_desc.ice_ufrag.length() <
ICE_UFRAG_MIN_LENGTH ||
jsep_description.transport_desc.ice_ufrag.length() >
ICE_UFRAG_MAX_LENGTH) {
return false;
}
if (jsep_description.transport_desc.ice_pwd.length() < ICE_PWD_MIN_LENGTH ||
jsep_description.transport_desc.ice_pwd.length() > ICE_PWD_MAX_LENGTH) {
return false;
}
return true;
}
TgJsepTransportDescription::TgJsepTransportDescription() {}
TgJsepTransportDescription::TgJsepTransportDescription(
bool rtcp_mux_enabled,
const std::vector<CryptoParams>& cryptos,
const std::vector<int>& encrypted_header_extension_ids,
int rtp_abs_sendtime_extn_id,
const TransportDescription& transport_desc,
absl::optional<std::string> media_alt_protocol,
absl::optional<std::string> data_alt_protocol)
: rtcp_mux_enabled(rtcp_mux_enabled),
cryptos(cryptos),
encrypted_header_extension_ids(encrypted_header_extension_ids),
rtp_abs_sendtime_extn_id(rtp_abs_sendtime_extn_id),
transport_desc(transport_desc),
media_alt_protocol(media_alt_protocol),
data_alt_protocol(data_alt_protocol) {}
TgJsepTransportDescription::TgJsepTransportDescription(
const TgJsepTransportDescription& from)
: rtcp_mux_enabled(from.rtcp_mux_enabled),
cryptos(from.cryptos),
encrypted_header_extension_ids(from.encrypted_header_extension_ids),
rtp_abs_sendtime_extn_id(from.rtp_abs_sendtime_extn_id),
transport_desc(from.transport_desc),
media_alt_protocol(from.media_alt_protocol),
data_alt_protocol(from.data_alt_protocol) {}
TgJsepTransportDescription::~TgJsepTransportDescription() = default;
TgJsepTransportDescription& TgJsepTransportDescription::operator=(
const TgJsepTransportDescription& from) {
if (this == &from) {
return *this;
}
rtcp_mux_enabled = from.rtcp_mux_enabled;
cryptos = from.cryptos;
encrypted_header_extension_ids = from.encrypted_header_extension_ids;
rtp_abs_sendtime_extn_id = from.rtp_abs_sendtime_extn_id;
transport_desc = from.transport_desc;
media_alt_protocol = from.media_alt_protocol;
data_alt_protocol = from.data_alt_protocol;
return *this;
}
TgJsepTransport::TgJsepTransport(
const std::string& mid,
const rtc::scoped_refptr<rtc::RTCCertificate>& local_certificate,
rtc::scoped_refptr<webrtc::IceTransportInterface> ice_transport,
rtc::scoped_refptr<webrtc::IceTransportInterface> rtcp_ice_transport,
std::unique_ptr<webrtc::TgRtpTransport> unencrypted_rtp_transport,
std::unique_ptr<webrtc::SrtpTransport> sdes_transport,
std::unique_ptr<webrtc::DtlsSrtpTransport> dtls_srtp_transport,
std::unique_ptr<webrtc::RtpTransportInternal> datagram_rtp_transport,
std::unique_ptr<DtlsTransportInternal> rtp_dtls_transport,
std::unique_ptr<DtlsTransportInternal> rtcp_dtls_transport,
std::unique_ptr<SctpTransportInternal> sctp_transport,
std::unique_ptr<webrtc::DatagramTransportInterface> datagram_transport,
webrtc::DataChannelTransportInterface* data_channel_transport)
: network_thread_(rtc::Thread::Current()),
mid_(mid),
local_certificate_(local_certificate),
ice_transport_(std::move(ice_transport)),
rtcp_ice_transport_(std::move(rtcp_ice_transport)),
unencrypted_rtp_transport_(std::move(unencrypted_rtp_transport)),
sdes_transport_(std::move(sdes_transport)),
dtls_srtp_transport_(std::move(dtls_srtp_transport)),
rtp_dtls_transport_(
rtp_dtls_transport ? new rtc::RefCountedObject<webrtc::DtlsTransport>(
std::move(rtp_dtls_transport))
: nullptr),
rtcp_dtls_transport_(
rtcp_dtls_transport
? new rtc::RefCountedObject<webrtc::DtlsTransport>(
std::move(rtcp_dtls_transport))
: nullptr),
sctp_data_channel_transport_(
sctp_transport ? std::make_unique<webrtc::SctpDataChannelTransport>(
sctp_transport.get())
: nullptr),
sctp_transport_(sctp_transport
? new rtc::RefCountedObject<webrtc::SctpTransport>(
std::move(sctp_transport))
: nullptr),
datagram_transport_(std::move(datagram_transport)),
datagram_rtp_transport_(std::move(datagram_rtp_transport)),
data_channel_transport_(data_channel_transport) {
RTC_DCHECK(ice_transport_);
RTC_DCHECK(rtp_dtls_transport_);
// |rtcp_ice_transport_| must be present iff |rtcp_dtls_transport_| is
// present.
RTC_DCHECK_EQ((rtcp_ice_transport_ != nullptr),
(rtcp_dtls_transport_ != nullptr));
// Verify the "only one out of these three can be set" invariant.
if (unencrypted_rtp_transport_) {
RTC_DCHECK(!sdes_transport);
RTC_DCHECK(!dtls_srtp_transport);
} else if (sdes_transport_) {
RTC_DCHECK(!unencrypted_rtp_transport);
RTC_DCHECK(!dtls_srtp_transport);
} else {
RTC_DCHECK(dtls_srtp_transport_);
RTC_DCHECK(!unencrypted_rtp_transport);
RTC_DCHECK(!sdes_transport);
}
if (sctp_transport_) {
sctp_transport_->SetDtlsTransport(rtp_dtls_transport_);
}
if (datagram_rtp_transport_ && default_rtp_transport()) {
composite_rtp_transport_ = std::make_unique<webrtc::CompositeRtpTransport>(
std::vector<webrtc::RtpTransportInternal*>{
datagram_rtp_transport_.get(), default_rtp_transport()});
}
if (data_channel_transport_ && sctp_data_channel_transport_) {
composite_data_channel_transport_ =
std::make_unique<webrtc::CompositeDataChannelTransport>(
std::vector<webrtc::DataChannelTransportInterface*>{
data_channel_transport_, sctp_data_channel_transport_.get()});
}
}
TgJsepTransport::~TgJsepTransport() {
if (sctp_transport_) {
sctp_transport_->Clear();
}
// Clear all DtlsTransports. There may be pointers to these from
// other places, so we can't assume they'll be deleted by the destructor.
rtp_dtls_transport_->Clear();
if (rtcp_dtls_transport_) {
rtcp_dtls_transport_->Clear();
}
// ICE will be the last transport to be deleted.
}
webrtc::RTCError TgJsepTransport::SetLocalJsepTransportDescription(
const TgJsepTransportDescription& jsep_description,
SdpType type) {
webrtc::RTCError error;
RTC_DCHECK_RUN_ON(network_thread_);
if (!VerifyIceParams(jsep_description)) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
"Invalid ice-ufrag or ice-pwd length.");
}
if (!SetRtcpMux(jsep_description.rtcp_mux_enabled, type,
ContentSource::CS_LOCAL)) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
"Failed to setup RTCP mux.");
}
// If doing SDES, setup the SDES crypto parameters.
{
rtc::CritScope scope(&accessor_lock_);
if (sdes_transport_) {
RTC_DCHECK(!unencrypted_rtp_transport_);
RTC_DCHECK(!dtls_srtp_transport_);
if (!SetSdes(jsep_description.cryptos,
jsep_description.encrypted_header_extension_ids, type,
ContentSource::CS_LOCAL)) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
"Failed to setup SDES crypto parameters.");
}
} else if (dtls_srtp_transport_) {
RTC_DCHECK(!unencrypted_rtp_transport_);
RTC_DCHECK(!sdes_transport_);
dtls_srtp_transport_->UpdateRecvEncryptedHeaderExtensionIds(
jsep_description.encrypted_header_extension_ids);
}
}
bool ice_restarting =
local_description_ != nullptr &&
IceCredentialsChanged(local_description_->transport_desc.ice_ufrag,
local_description_->transport_desc.ice_pwd,
jsep_description.transport_desc.ice_ufrag,
jsep_description.transport_desc.ice_pwd);
local_description_.reset(new TgJsepTransportDescription(jsep_description));
rtc::SSLFingerprint* local_fp =
local_description_->transport_desc.identity_fingerprint.get();
if (!local_fp) {
local_certificate_ = nullptr;
} else {
error = VerifyCertificateFingerprint(local_certificate_, local_fp);
if (!error.ok()) {
local_description_.reset();
return error;
}
}
{
rtc::CritScope scope(&accessor_lock_);
RTC_DCHECK(rtp_dtls_transport_->internal());
SetLocalIceParameters(rtp_dtls_transport_->internal()->ice_transport());
if (rtcp_dtls_transport_) {
RTC_DCHECK(rtcp_dtls_transport_->internal());
SetLocalIceParameters(rtcp_dtls_transport_->internal()->ice_transport());
}
}
// If PRANSWER/ANSWER is set, we should decide transport protocol type.
if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) {
error = NegotiateAndSetDtlsParameters(type);
NegotiateDatagramTransport(type);
}
if (!error.ok()) {
local_description_.reset();
return error;
}
{
rtc::CritScope scope(&accessor_lock_);
if (needs_ice_restart_ && ice_restarting) {
needs_ice_restart_ = false;
RTC_LOG(LS_VERBOSE) << "needs-ice-restart flag cleared for transport "
<< mid();
}
}
return webrtc::RTCError::OK();
}
webrtc::RTCError TgJsepTransport::SetRemoteJsepTransportDescription(
const TgJsepTransportDescription& jsep_description,
webrtc::SdpType type) {
webrtc::RTCError error;
RTC_DCHECK_RUN_ON(network_thread_);
if (!VerifyIceParams(jsep_description)) {
remote_description_.reset();
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
"Invalid ice-ufrag or ice-pwd length.");
}
if (!SetRtcpMux(jsep_description.rtcp_mux_enabled, type,
ContentSource::CS_REMOTE)) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
"Failed to setup RTCP mux.");
}
// If doing SDES, setup the SDES crypto parameters.
{
rtc::CritScope lock(&accessor_lock_);
if (sdes_transport_) {
RTC_DCHECK(!unencrypted_rtp_transport_);
RTC_DCHECK(!dtls_srtp_transport_);
if (!SetSdes(jsep_description.cryptos,
jsep_description.encrypted_header_extension_ids, type,
ContentSource::CS_REMOTE)) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
"Failed to setup SDES crypto parameters.");
}
sdes_transport_->CacheRtpAbsSendTimeHeaderExtension(
jsep_description.rtp_abs_sendtime_extn_id);
} else if (dtls_srtp_transport_) {
RTC_DCHECK(!unencrypted_rtp_transport_);
RTC_DCHECK(!sdes_transport_);
dtls_srtp_transport_->UpdateSendEncryptedHeaderExtensionIds(
jsep_description.encrypted_header_extension_ids);
dtls_srtp_transport_->CacheRtpAbsSendTimeHeaderExtension(
jsep_description.rtp_abs_sendtime_extn_id);
}
}
remote_description_.reset(new TgJsepTransportDescription(jsep_description));
RTC_DCHECK(rtp_dtls_transport());
SetRemoteIceParameters(rtp_dtls_transport()->ice_transport());
if (rtcp_dtls_transport()) {
SetRemoteIceParameters(rtcp_dtls_transport()->ice_transport());
}
// If PRANSWER/ANSWER is set, we should decide transport protocol type.
if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) {
error = NegotiateAndSetDtlsParameters(SdpType::kOffer);
NegotiateDatagramTransport(type);
}
if (!error.ok()) {
remote_description_.reset();
return error;
}
return webrtc::RTCError::OK();
}
webrtc::RTCError TgJsepTransport::AddRemoteCandidates(
const Candidates& candidates) {
RTC_DCHECK_RUN_ON(network_thread_);
if (!local_description_ || !remote_description_) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_STATE,
mid() +
" is not ready to use the remote candidate "
"because the local or remote description is "
"not set.");
}
for (const cricket::Candidate& candidate : candidates) {
auto transport =
candidate.component() == cricket::ICE_CANDIDATE_COMPONENT_RTP
? rtp_dtls_transport_
: rtcp_dtls_transport_;
if (!transport) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
"Candidate has an unknown component: " +
candidate.ToSensitiveString() + " for mid " +
mid());
}
RTC_DCHECK(transport->internal() && transport->internal()->ice_transport());
transport->internal()->ice_transport()->AddRemoteCandidate(candidate);
}
return webrtc::RTCError::OK();
}
void TgJsepTransport::SetNeedsIceRestartFlag() {
rtc::CritScope scope(&accessor_lock_);
if (!needs_ice_restart_) {
needs_ice_restart_ = true;
RTC_LOG(LS_VERBOSE) << "needs-ice-restart flag set for transport " << mid();
}
}
absl::optional<rtc::SSLRole> TgJsepTransport::GetDtlsRole() const {
RTC_DCHECK_RUN_ON(network_thread_);
rtc::CritScope scope(&accessor_lock_);
RTC_DCHECK(rtp_dtls_transport_);
RTC_DCHECK(rtp_dtls_transport_->internal());
rtc::SSLRole dtls_role;
if (!rtp_dtls_transport_->internal()->GetDtlsRole(&dtls_role)) {
return absl::optional<rtc::SSLRole>();
}
return absl::optional<rtc::SSLRole>(dtls_role);
}
absl::optional<OpaqueTransportParameters>
TgJsepTransport::GetTransportParameters() const {
rtc::CritScope scope(&accessor_lock_);
if (!datagram_transport()) {
return absl::nullopt;
}
OpaqueTransportParameters params;
params.parameters = datagram_transport()->GetTransportParameters();
return params;
}
bool TgJsepTransport::GetStats(TransportStats* stats) {
RTC_DCHECK_RUN_ON(network_thread_);
rtc::CritScope scope(&accessor_lock_);
stats->transport_name = mid();
stats->channel_stats.clear();
RTC_DCHECK(rtp_dtls_transport_->internal());
bool ret = GetTransportStats(rtp_dtls_transport_->internal(), stats);
if (rtcp_dtls_transport_) {
RTC_DCHECK(rtcp_dtls_transport_->internal());
ret &= GetTransportStats(rtcp_dtls_transport_->internal(), stats);
}
return ret;
}
webrtc::RTCError TgJsepTransport::VerifyCertificateFingerprint(
const rtc::RTCCertificate* certificate,
const rtc::SSLFingerprint* fingerprint) const {
RTC_DCHECK_RUN_ON(network_thread_);
if (!fingerprint) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
"No fingerprint");
}
if (!certificate) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
"Fingerprint provided but no identity available.");
}
std::unique_ptr<rtc::SSLFingerprint> fp_tmp =
rtc::SSLFingerprint::CreateUnique(fingerprint->algorithm,
*certificate->identity());
RTC_DCHECK(fp_tmp.get() != NULL);
if (*fp_tmp == *fingerprint) {
return webrtc::RTCError::OK();
}
char ss_buf[1024];
rtc::SimpleStringBuilder desc(ss_buf);
desc << "Local fingerprint does not match identity. Expected: ";
desc << fp_tmp->ToString();
desc << " Got: " << fingerprint->ToString();
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
std::string(desc.str()));
}
void TgJsepTransport::SetActiveResetSrtpParams(bool active_reset_srtp_params) {
RTC_DCHECK_RUN_ON(network_thread_);
rtc::CritScope scope(&accessor_lock_);
if (dtls_srtp_transport_) {
RTC_LOG(INFO)
<< "Setting active_reset_srtp_params of DtlsSrtpTransport to: "
<< active_reset_srtp_params;
dtls_srtp_transport_->SetActiveResetSrtpParams(active_reset_srtp_params);
}
}
void TgJsepTransport::SetLocalIceParameters(IceTransportInternal* ice_transport) {
RTC_DCHECK_RUN_ON(network_thread_);
RTC_DCHECK(ice_transport);
RTC_DCHECK(local_description_);
ice_transport->SetIceParameters(
local_description_->transport_desc.GetIceParameters());
}
void TgJsepTransport::SetRemoteIceParameters(
IceTransportInternal* ice_transport) {
RTC_DCHECK_RUN_ON(network_thread_);
RTC_DCHECK(ice_transport);
RTC_DCHECK(remote_description_);
ice_transport->SetRemoteIceParameters(
remote_description_->transport_desc.GetIceParameters());
ice_transport->SetRemoteIceMode(remote_description_->transport_desc.ice_mode);
}
webrtc::RTCError TgJsepTransport::SetNegotiatedDtlsParameters(
DtlsTransportInternal* dtls_transport,
absl::optional<rtc::SSLRole> dtls_role,
rtc::SSLFingerprint* remote_fingerprint) {
RTC_DCHECK_RUN_ON(network_thread_);
RTC_DCHECK(dtls_transport);
// Set SSL role. Role must be set before fingerprint is applied, which
// initiates DTLS setup.
if (dtls_role && !dtls_transport->SetDtlsRole(*dtls_role)) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
"Failed to set SSL role for the transport.");
}
// Apply remote fingerprint.
if (!remote_fingerprint ||
!dtls_transport->SetRemoteFingerprint(
remote_fingerprint->algorithm, remote_fingerprint->digest.cdata(),
remote_fingerprint->digest.size())) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_PARAMETER,
"Failed to apply remote fingerprint.");
}
return webrtc::RTCError::OK();
}
bool TgJsepTransport::SetRtcpMux(bool enable,
webrtc::SdpType type,
ContentSource source) {
RTC_DCHECK_RUN_ON(network_thread_);
bool ret = false;
switch (type) {
case SdpType::kOffer:
ret = rtcp_mux_negotiator_.SetOffer(enable, source);
break;
case SdpType::kPrAnswer:
// This may activate RTCP muxing, but we don't yet destroy the transport
// because the final answer may deactivate it.
ret = rtcp_mux_negotiator_.SetProvisionalAnswer(enable, source);
break;
case SdpType::kAnswer:
ret = rtcp_mux_negotiator_.SetAnswer(enable, source);
if (ret && rtcp_mux_negotiator_.IsActive()) {
ActivateRtcpMux();
}
break;
default:
RTC_NOTREACHED();
}
if (!ret) {
return false;
}
auto transport = rtp_transport();
transport->SetRtcpMuxEnabled(rtcp_mux_negotiator_.IsActive());
return ret;
}
void TgJsepTransport::ActivateRtcpMux() {
{
// Don't hold the network_thread_ lock while calling other functions,
// since they might call other functions that call RTC_DCHECK_RUN_ON.
// TODO(https://crbug.com/webrtc/10318): Simplify when possible.
RTC_DCHECK_RUN_ON(network_thread_);
}
{
rtc::CritScope scope(&accessor_lock_);
if (unencrypted_rtp_transport_) {
RTC_DCHECK(!sdes_transport_);
RTC_DCHECK(!dtls_srtp_transport_);
unencrypted_rtp_transport_->SetRtcpPacketTransport(nullptr);
} else if (sdes_transport_) {
RTC_DCHECK(!unencrypted_rtp_transport_);
RTC_DCHECK(!dtls_srtp_transport_);
sdes_transport_->SetRtcpPacketTransport(nullptr);
} else if (dtls_srtp_transport_) {
RTC_DCHECK(dtls_srtp_transport_);
RTC_DCHECK(!unencrypted_rtp_transport_);
RTC_DCHECK(!sdes_transport_);
dtls_srtp_transport_->SetDtlsTransports(rtp_dtls_transport(),
/*rtcp_dtls_transport=*/nullptr);
}
rtcp_dtls_transport_ = nullptr; // Destroy this reference.
}
// Notify the JsepTransportController to update the aggregate states.
SignalRtcpMuxActive();
}
bool TgJsepTransport::SetSdes(const std::vector<CryptoParams>& cryptos,
const std::vector<int>& encrypted_extension_ids,
webrtc::SdpType type,
ContentSource source) {
RTC_DCHECK_RUN_ON(network_thread_);
rtc::CritScope scope(&accessor_lock_);
bool ret = false;
ret = sdes_negotiator_.Process(cryptos, type, source);
if (!ret) {
return ret;
}
if (source == ContentSource::CS_LOCAL) {
recv_extension_ids_ = encrypted_extension_ids;
} else {
send_extension_ids_ = encrypted_extension_ids;
}
// If setting an SDES answer succeeded, apply the negotiated parameters
// to the SRTP transport.
if ((type == SdpType::kPrAnswer || type == SdpType::kAnswer) && ret) {
if (sdes_negotiator_.send_cipher_suite() &&
sdes_negotiator_.recv_cipher_suite()) {
RTC_DCHECK(send_extension_ids_);
RTC_DCHECK(recv_extension_ids_);
ret = sdes_transport_->SetRtpParams(
*(sdes_negotiator_.send_cipher_suite()),
sdes_negotiator_.send_key().data(),
static_cast<int>(sdes_negotiator_.send_key().size()),
*(send_extension_ids_), *(sdes_negotiator_.recv_cipher_suite()),
sdes_negotiator_.recv_key().data(),
static_cast<int>(sdes_negotiator_.recv_key().size()),
*(recv_extension_ids_));
} else {
RTC_LOG(LS_INFO) << "No crypto keys are provided for SDES.";
if (type == SdpType::kAnswer) {
// Explicitly reset the |sdes_transport_| if no crypto param is
// provided in the answer. No need to call |ResetParams()| for
// |sdes_negotiator_| because it resets the params inside |SetAnswer|.
sdes_transport_->ResetParams();
}
}
}
return ret;
}
webrtc::RTCError TgJsepTransport::NegotiateAndSetDtlsParameters(
SdpType local_description_type) {
RTC_DCHECK_RUN_ON(network_thread_);
if (!local_description_ || !remote_description_) {
return webrtc::RTCError(webrtc::RTCErrorType::INVALID_STATE,
"Applying an answer transport description "
"without applying any offer.");
}
std::unique_ptr<rtc::SSLFingerprint> remote_fingerprint;
absl::optional<rtc::SSLRole> negotiated_dtls_role;
rtc::SSLFingerprint* local_fp =
local_description_->transport_desc.identity_fingerprint.get();
rtc::SSLFingerprint* remote_fp =
remote_description_->transport_desc.identity_fingerprint.get();
if (remote_fp && local_fp) {
remote_fingerprint = std::make_unique<rtc::SSLFingerprint>(*remote_fp);
webrtc::RTCError error =
NegotiateDtlsRole(local_description_type,
local_description_->transport_desc.connection_role,
remote_description_->transport_desc.connection_role,
&negotiated_dtls_role);
if (!error.ok()) {
return error;
}
} else if (local_fp && (local_description_type == SdpType::kAnswer)) {
return webrtc::RTCError(
webrtc::RTCErrorType::INVALID_PARAMETER,
"Local fingerprint supplied when caller didn't offer DTLS.");
} else {
// We are not doing DTLS
remote_fingerprint = std::make_unique<rtc::SSLFingerprint>(
"", rtc::ArrayView<const uint8_t>());
}
// Now that we have negotiated everything, push it downward.
// Note that we cache the result so that if we have race conditions
// between future SetRemote/SetLocal invocations and new transport
// creation, we have the negotiation state saved until a new
// negotiation happens.
RTC_DCHECK(rtp_dtls_transport());
webrtc::RTCError error = SetNegotiatedDtlsParameters(
rtp_dtls_transport(), negotiated_dtls_role, remote_fingerprint.get());
if (!error.ok()) {
return error;
}
if (rtcp_dtls_transport()) {
error = SetNegotiatedDtlsParameters(
rtcp_dtls_transport(), negotiated_dtls_role, remote_fingerprint.get());
}
return error;
}
webrtc::RTCError TgJsepTransport::NegotiateDtlsRole(
SdpType local_description_type,
ConnectionRole local_connection_role,
ConnectionRole remote_connection_role,
absl::optional<rtc::SSLRole>* negotiated_dtls_role) {
// From RFC 4145, section-4.1, The following are the values that the
// 'setup' attribute can take in an offer/answer exchange:
// Offer Answer
// ________________
// active passive / holdconn
// passive active / holdconn
// actpass active / passive / holdconn
// holdconn holdconn
//
// Set the role that is most conformant with RFC 5763, Section 5, bullet 1
// The endpoint MUST use the setup attribute defined in [RFC4145].
// The endpoint that is the offerer MUST use the setup attribute
// value of setup:actpass and be prepared to receive a client_hello
// before it receives the answer. The answerer MUST use either a
// setup attribute value of setup:active or setup:passive. Note that
// if the answerer uses setup:passive, then the DTLS handshake will
// not begin until the answerer is received, which adds additional
// latency. setup:active allows the answer and the DTLS handshake to
// occur in parallel. Thus, setup:active is RECOMMENDED. Whichever
// party is active MUST initiate a DTLS handshake by sending a
// ClientHello over each flow (host/port quartet).
// IOW - actpass and passive modes should be treated as server and
// active as client.
bool is_remote_server = false;
if (local_description_type == SdpType::kOffer) {
if (local_connection_role != CONNECTIONROLE_ACTPASS) {
return webrtc::RTCError(
webrtc::RTCErrorType::INVALID_PARAMETER,
"Offerer must use actpass value for setup attribute.");
}
if (remote_connection_role == CONNECTIONROLE_ACTIVE ||
remote_connection_role == CONNECTIONROLE_PASSIVE ||
remote_connection_role == CONNECTIONROLE_NONE) {
is_remote_server = (remote_connection_role == CONNECTIONROLE_PASSIVE);
} else {
return webrtc::RTCError(
webrtc::RTCErrorType::INVALID_PARAMETER,
"Answerer must use either active or passive value "
"for setup attribute.");
}
// If remote is NONE or ACTIVE it will act as client.
} else {
if (remote_connection_role != CONNECTIONROLE_ACTPASS &&
remote_connection_role != CONNECTIONROLE_NONE) {
// Accept a remote role attribute that's not "actpass", but matches the
// current negotiated role. This is allowed by dtls-sdp, though our
// implementation will never generate such an offer as it's not
// recommended.
//
// See https://datatracker.ietf.org/doc/html/draft-ietf-mmusic-dtls-sdp,
// section 5.5.
auto current_dtls_role = GetDtlsRole();
if (!current_dtls_role ||
(*current_dtls_role == rtc::SSL_CLIENT &&
remote_connection_role == CONNECTIONROLE_ACTIVE) ||
(*current_dtls_role == rtc::SSL_SERVER &&
remote_connection_role == CONNECTIONROLE_PASSIVE)) {
return webrtc::RTCError(
webrtc::RTCErrorType::INVALID_PARAMETER,
"Offerer must use actpass value or current negotiated role for "
"setup attribute.");
}
}
if (local_connection_role == CONNECTIONROLE_ACTIVE ||
local_connection_role == CONNECTIONROLE_PASSIVE) {
is_remote_server = (local_connection_role == CONNECTIONROLE_ACTIVE);
} else {
return webrtc::RTCError(
webrtc::RTCErrorType::INVALID_PARAMETER,
"Answerer must use either active or passive value "
"for setup attribute.");
}
// If local is passive, local will act as server.
}
*negotiated_dtls_role =
(is_remote_server ? rtc::SSL_CLIENT : rtc::SSL_SERVER);
return webrtc::RTCError::OK();
}
bool TgJsepTransport::GetTransportStats(DtlsTransportInternal* dtls_transport,
TransportStats* stats) {
RTC_DCHECK_RUN_ON(network_thread_);
rtc::CritScope scope(&accessor_lock_);
RTC_DCHECK(dtls_transport);
TransportChannelStats substats;
if (rtcp_dtls_transport_) {
substats.component = dtls_transport == rtcp_dtls_transport_->internal()
? ICE_CANDIDATE_COMPONENT_RTCP
: ICE_CANDIDATE_COMPONENT_RTP;
} else {
substats.component = ICE_CANDIDATE_COMPONENT_RTP;
}
dtls_transport->GetSslVersionBytes(&substats.ssl_version_bytes);
dtls_transport->GetSrtpCryptoSuite(&substats.srtp_crypto_suite);
dtls_transport->GetSslCipherSuite(&substats.ssl_cipher_suite);
substats.dtls_state = dtls_transport->dtls_state();
if (!dtls_transport->ice_transport()->GetStats(
&substats.ice_transport_stats)) {
return false;
}
stats->channel_stats.push_back(substats);
return true;
}
void TgJsepTransport::NegotiateDatagramTransport(SdpType type) {
RTC_DCHECK(type == SdpType::kAnswer || type == SdpType::kPrAnswer);
rtc::CritScope lock(&accessor_lock_);
if (!datagram_transport_) {
return; // No need to negotiate the use of datagram transport.
}
bool compatible_datagram_transport =
remote_description_->transport_desc.opaque_parameters &&
remote_description_->transport_desc.opaque_parameters ==
local_description_->transport_desc.opaque_parameters;
bool use_datagram_transport_for_media =
compatible_datagram_transport &&
remote_description_->media_alt_protocol ==
remote_description_->transport_desc.opaque_parameters->protocol &&
remote_description_->media_alt_protocol ==
local_description_->media_alt_protocol;
bool use_datagram_transport_for_data =
compatible_datagram_transport &&
remote_description_->data_alt_protocol ==
remote_description_->transport_desc.opaque_parameters->protocol &&
remote_description_->data_alt_protocol ==
local_description_->data_alt_protocol;
RTC_LOG(LS_INFO)
<< "Negotiating datagram transport, use_datagram_transport_for_media="
<< use_datagram_transport_for_media
<< ", use_datagram_transport_for_data=" << use_datagram_transport_for_data
<< " answer type=" << (type == SdpType::kAnswer ? "answer" : "pr_answer");
// A provisional or full or answer lets the peer start sending on one of the
// transports.
if (composite_rtp_transport_) {
composite_rtp_transport_->SetSendTransport(
use_datagram_transport_for_media ? datagram_rtp_transport_.get()
: default_rtp_transport());
}
if (composite_data_channel_transport_) {
composite_data_channel_transport_->SetSendTransport(
use_datagram_transport_for_data ? data_channel_transport_
: sctp_data_channel_transport_.get());
}
if (type != SdpType::kAnswer) {
return;
}
if (composite_rtp_transport_) {
if (use_datagram_transport_for_media) {
// Negotiated use of datagram transport for RTP, so remove the
// non-datagram RTP transport.
composite_rtp_transport_->RemoveTransport(default_rtp_transport());
if (unencrypted_rtp_transport_) {
unencrypted_rtp_transport_ = nullptr;
} else if (sdes_transport_) {
sdes_transport_ = nullptr;
} else {
dtls_srtp_transport_ = nullptr;
}
} else {
composite_rtp_transport_->RemoveTransport(datagram_rtp_transport_.get());
datagram_rtp_transport_ = nullptr;
}
}
if (composite_data_channel_transport_) {
if (use_datagram_transport_for_data) {
// Negotiated use of datagram transport for data channels, so remove the
// non-datagram data channel transport.
composite_data_channel_transport_->RemoveTransport(
sctp_data_channel_transport_.get());
sctp_data_channel_transport_ = nullptr;
sctp_transport_ = nullptr;
} else {
composite_data_channel_transport_->RemoveTransport(
data_channel_transport_);
data_channel_transport_ = nullptr;
}
} else if (data_channel_transport_ && !use_datagram_transport_for_data) {
// The datagram transport has been rejected without a fallback. We still
// need to inform the application and delete it.
SignalDataChannelTransportNegotiated(this, nullptr);
data_channel_transport_ = nullptr;
}
if (!use_datagram_transport_for_media && !use_datagram_transport_for_data) {
// Datagram transport is not being used for anything, so clean it up.
datagram_transport_ = nullptr;
}
}
} // namespace cricket

View File

@ -1,417 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef TG_PC_JSEP_TRANSPORT_H_
#define TG_PC_JSEP_TRANSPORT_H_
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "absl/types/optional.h"
#include "api/candidate.h"
#include "api/ice_transport_interface.h"
#include "api/jsep.h"
#include "api/transport/datagram_transport_interface.h"
#include "media/sctp/sctp_transport_internal.h"
#include "p2p/base/dtls_transport.h"
#include "p2p/base/p2p_constants.h"
#include "p2p/base/transport_info.h"
#include "pc/composite_data_channel_transport.h"
#include "pc/composite_rtp_transport.h"
#include "pc/dtls_srtp_transport.h"
#include "pc/dtls_transport.h"
#include "pc/rtcp_mux_filter.h"
#include "pc/rtp_transport.h"
#include "pc/sctp_transport.h"
#include "pc/session_description.h"
#include "pc/srtp_filter.h"
#include "pc/srtp_transport.h"
#include "pc/transport_stats.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/message_queue.h"
#include "rtc_base/rtc_certificate.h"
#include "rtc_base/ssl_stream_adapter.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/thread_checker.h"
#include "tg_rtp_transport.h"
namespace cricket {
class DtlsTransportInternal;
struct TgJsepTransportDescription {
public:
TgJsepTransportDescription();
TgJsepTransportDescription(
bool rtcp_mux_enabled,
const std::vector<CryptoParams>& cryptos,
const std::vector<int>& encrypted_header_extension_ids,
int rtp_abs_sendtime_extn_id,
const TransportDescription& transport_description,
absl::optional<std::string> media_alt_protocol,
absl::optional<std::string> data_alt_protocol);
TgJsepTransportDescription(const TgJsepTransportDescription& from);
~TgJsepTransportDescription();
TgJsepTransportDescription& operator=(const TgJsepTransportDescription& from);
bool rtcp_mux_enabled = true;
std::vector<CryptoParams> cryptos;
std::vector<int> encrypted_header_extension_ids;
int rtp_abs_sendtime_extn_id = -1;
// TODO(zhihuang): Add the ICE and DTLS related variables and methods from
// TransportDescription and remove this extra layer of abstraction.
TransportDescription transport_desc;
// Alt-protocols that apply to this TgJsepTransport. Presence indicates a
// request to use an alternative protocol for media and/or data. The
// alt-protocol is handled by a datagram transport. If one or both of these
// values are present, TgJsepTransport will attempt to negotiate use of the
// datagram transport for media and/or data.
absl::optional<std::string> media_alt_protocol;
absl::optional<std::string> data_alt_protocol;
};
// Helper class used by TgJsepTransportController that processes
// TransportDescriptions. A TransportDescription represents the
// transport-specific properties of an SDP m= section, processed according to
// JSEP. Each transport consists of DTLS and ICE transport channels for RTP
// (and possibly RTCP, if rtcp-mux isn't used).
//
// On Threading: TgJsepTransport performs work solely on the network thread, and
// so its methods should only be called on the network thread.
class TgJsepTransport : public sigslot::has_slots<> {
public:
// |mid| is just used for log statements in order to identify the Transport.
// Note that |local_certificate| is allowed to be null since a remote
// description may be set before a local certificate is generated.
TgJsepTransport(
const std::string& mid,
const rtc::scoped_refptr<rtc::RTCCertificate>& local_certificate,
rtc::scoped_refptr<webrtc::IceTransportInterface> ice_transport,
rtc::scoped_refptr<webrtc::IceTransportInterface> rtcp_ice_transport,
std::unique_ptr<webrtc::TgRtpTransport> unencrypted_rtp_transport,
std::unique_ptr<webrtc::SrtpTransport> sdes_transport,
std::unique_ptr<webrtc::DtlsSrtpTransport> dtls_srtp_transport,
std::unique_ptr<webrtc::RtpTransportInternal> datagram_rtp_transport,
std::unique_ptr<DtlsTransportInternal> rtp_dtls_transport,
std::unique_ptr<DtlsTransportInternal> rtcp_dtls_transport,
std::unique_ptr<SctpTransportInternal> sctp_transport,
std::unique_ptr<webrtc::DatagramTransportInterface> datagram_transport,
webrtc::DataChannelTransportInterface* data_channel_transport);
~TgJsepTransport() override;
// Returns the MID of this transport. This is only used for logging.
const std::string& mid() const { return mid_; }
// Must be called before applying local session description.
// Needed in order to verify the local fingerprint.
void SetLocalCertificate(
const rtc::scoped_refptr<rtc::RTCCertificate>& local_certificate) {
RTC_DCHECK_RUN_ON(network_thread_);
local_certificate_ = local_certificate;
}
// Return the local certificate provided by SetLocalCertificate.
rtc::scoped_refptr<rtc::RTCCertificate> GetLocalCertificate() const {
RTC_DCHECK_RUN_ON(network_thread_);
return local_certificate_;
}
webrtc::RTCError SetLocalJsepTransportDescription(
const TgJsepTransportDescription& jsep_description,
webrtc::SdpType type);
// Set the remote TransportDescription to be used by DTLS and ICE channels
// that are part of this Transport.
webrtc::RTCError SetRemoteJsepTransportDescription(
const TgJsepTransportDescription& jsep_description,
webrtc::SdpType type);
webrtc::RTCError AddRemoteCandidates(const Candidates& candidates);
// Set the "needs-ice-restart" flag as described in JSEP. After the flag is
// set, offers should generate new ufrags/passwords until an ICE restart
// occurs.
//
// This and the below method can be called safely from any thread as long as
// SetXTransportDescription is not in progress.
void SetNeedsIceRestartFlag();
// Returns true if the ICE restart flag above was set, and no ICE restart has
// occurred yet for this transport (by applying a local description with
// changed ufrag/password).
bool needs_ice_restart() const {
rtc::CritScope scope(&accessor_lock_);
return needs_ice_restart_;
}
// Returns role if negotiated, or empty absl::optional if it hasn't been
// negotiated yet.
absl::optional<rtc::SSLRole> GetDtlsRole() const;
absl::optional<OpaqueTransportParameters> GetTransportParameters() const;
// TODO(deadbeef): Make this const. See comment in transportcontroller.h.
bool GetStats(TransportStats* stats);
const TgJsepTransportDescription* local_description() const {
RTC_DCHECK_RUN_ON(network_thread_);
return local_description_.get();
}
const TgJsepTransportDescription* remote_description() const {
RTC_DCHECK_RUN_ON(network_thread_);
return remote_description_.get();
}
webrtc::RtpTransportInternal* rtp_transport() const {
rtc::CritScope scope(&accessor_lock_);
if (composite_rtp_transport_) {
return composite_rtp_transport_.get();
} else if (datagram_rtp_transport_) {
return datagram_rtp_transport_.get();
} else {
return default_rtp_transport();
}
}
const DtlsTransportInternal* rtp_dtls_transport() const {
rtc::CritScope scope(&accessor_lock_);
if (rtp_dtls_transport_) {
return rtp_dtls_transport_->internal();
} else {
return nullptr;
}
}
DtlsTransportInternal* rtp_dtls_transport() {
rtc::CritScope scope(&accessor_lock_);
if (rtp_dtls_transport_) {
return rtp_dtls_transport_->internal();
} else {
return nullptr;
}
}
const DtlsTransportInternal* rtcp_dtls_transport() const {
rtc::CritScope scope(&accessor_lock_);
if (rtcp_dtls_transport_) {
return rtcp_dtls_transport_->internal();
} else {
return nullptr;
}
}
DtlsTransportInternal* rtcp_dtls_transport() {
rtc::CritScope scope(&accessor_lock_);
if (rtcp_dtls_transport_) {
return rtcp_dtls_transport_->internal();
} else {
return nullptr;
}
}
rtc::scoped_refptr<webrtc::DtlsTransport> RtpDtlsTransport() {
rtc::CritScope scope(&accessor_lock_);
return rtp_dtls_transport_;
}
rtc::scoped_refptr<webrtc::SctpTransport> SctpTransport() const {
rtc::CritScope scope(&accessor_lock_);
return sctp_transport_;
}
webrtc::DataChannelTransportInterface* data_channel_transport() const {
rtc::CritScope scope(&accessor_lock_);
if (composite_data_channel_transport_) {
return composite_data_channel_transport_.get();
} else if (sctp_data_channel_transport_) {
return sctp_data_channel_transport_.get();
}
return data_channel_transport_;
}
// Returns datagram transport, if available.
webrtc::DatagramTransportInterface* datagram_transport() const {
rtc::CritScope scope(&accessor_lock_);
return datagram_transport_.get();
}
// This is signaled when RTCP-mux becomes active and
// |rtcp_dtls_transport_| is destroyed. The TgJsepTransportController will
// handle the signal and update the aggregate transport states.
sigslot::signal<> SignalRtcpMuxActive;
// Signals that a data channel transport was negotiated and may be used to
// send data. The first parameter is |this|. The second parameter is the
// transport that was negotiated, or null if negotiation rejected the data
// channel transport. The third parameter (bool) indicates whether the
// negotiation was provisional or final. If true, it is provisional, if
// false, it is final.
sigslot::signal2<TgJsepTransport*, webrtc::DataChannelTransportInterface*>
SignalDataChannelTransportNegotiated;
// TODO(deadbeef): The methods below are only public for testing. Should make
// them utility functions or objects so they can be tested independently from
// this class.
// Returns an error if the certificate's identity does not match the
// fingerprint, or either is NULL.
webrtc::RTCError VerifyCertificateFingerprint(
const rtc::RTCCertificate* certificate,
const rtc::SSLFingerprint* fingerprint) const;
void SetActiveResetSrtpParams(bool active_reset_srtp_params);
private:
bool SetRtcpMux(bool enable, webrtc::SdpType type, ContentSource source);
void ActivateRtcpMux();
bool SetSdes(const std::vector<CryptoParams>& cryptos,
const std::vector<int>& encrypted_extension_ids,
webrtc::SdpType type,
ContentSource source);
// Negotiates and sets the DTLS parameters based on the current local and
// remote transport description, such as the DTLS role to use, and whether
// DTLS should be activated.
//
// Called when an answer TransportDescription is applied.
webrtc::RTCError NegotiateAndSetDtlsParameters(
webrtc::SdpType local_description_type);
// Negotiates the DTLS role based off the offer and answer as specified by
// RFC 4145, section-4.1. Returns an RTCError if role cannot be determined
// from the local description and remote description.
webrtc::RTCError NegotiateDtlsRole(
webrtc::SdpType local_description_type,
ConnectionRole local_connection_role,
ConnectionRole remote_connection_role,
absl::optional<rtc::SSLRole>* negotiated_dtls_role);
// Pushes down the ICE parameters from the local description, such
// as the ICE ufrag and pwd.
void SetLocalIceParameters(IceTransportInternal* ice);
// Pushes down the ICE parameters from the remote description.
void SetRemoteIceParameters(IceTransportInternal* ice);
// Pushes down the DTLS parameters obtained via negotiation.
webrtc::RTCError SetNegotiatedDtlsParameters(
DtlsTransportInternal* dtls_transport,
absl::optional<rtc::SSLRole> dtls_role,
rtc::SSLFingerprint* remote_fingerprint);
bool GetTransportStats(DtlsTransportInternal* dtls_transport,
TransportStats* stats);
// Deactivates, signals removal, and deletes |composite_rtp_transport_| if the
// current state of negotiation is sufficient to determine which rtp_transport
// and data channel transport to use.
void NegotiateDatagramTransport(webrtc::SdpType type)
RTC_RUN_ON(network_thread_);
// Returns the default (non-datagram) rtp transport, if any.
webrtc::RtpTransportInternal* default_rtp_transport() const
RTC_EXCLUSIVE_LOCKS_REQUIRED(accessor_lock_) {
if (dtls_srtp_transport_) {
return dtls_srtp_transport_.get();
} else if (sdes_transport_) {
return sdes_transport_.get();
} else if (unencrypted_rtp_transport_) {
return unencrypted_rtp_transport_.get();
} else {
return nullptr;
}
}
// Owning thread, for safety checks
const rtc::Thread* const network_thread_;
// Critical scope for fields accessed off-thread
// TODO(https://bugs.webrtc.org/10300): Stop doing this.
rtc::CriticalSection accessor_lock_;
const std::string mid_;
// needs-ice-restart bit as described in JSEP.
bool needs_ice_restart_ RTC_GUARDED_BY(accessor_lock_) = false;
rtc::scoped_refptr<rtc::RTCCertificate> local_certificate_
RTC_GUARDED_BY(network_thread_);
std::unique_ptr<TgJsepTransportDescription> local_description_
RTC_GUARDED_BY(network_thread_);
std::unique_ptr<TgJsepTransportDescription> remote_description_
RTC_GUARDED_BY(network_thread_);
// Ice transport which may be used by any of upper-layer transports (below).
// Owned by TgJsepTransport and guaranteed to outlive the transports below.
const rtc::scoped_refptr<webrtc::IceTransportInterface> ice_transport_;
const rtc::scoped_refptr<webrtc::IceTransportInterface> rtcp_ice_transport_;
// To avoid downcasting and make it type safe, keep three unique pointers for
// different SRTP mode and only one of these is non-nullptr.
std::unique_ptr<webrtc::TgRtpTransport> unencrypted_rtp_transport_
RTC_GUARDED_BY(accessor_lock_);
std::unique_ptr<webrtc::SrtpTransport> sdes_transport_
RTC_GUARDED_BY(accessor_lock_);
std::unique_ptr<webrtc::DtlsSrtpTransport> dtls_srtp_transport_
RTC_GUARDED_BY(accessor_lock_);
// If multiple RTP transports are in use, |composite_rtp_transport_| will be
// passed to callers. This is only valid for offer-only, receive-only
// scenarios, as it is not possible for the composite to correctly choose
// which transport to use for sending.
std::unique_ptr<webrtc::CompositeRtpTransport> composite_rtp_transport_
RTC_GUARDED_BY(accessor_lock_);
rtc::scoped_refptr<webrtc::DtlsTransport> rtp_dtls_transport_
RTC_GUARDED_BY(accessor_lock_);
rtc::scoped_refptr<webrtc::DtlsTransport> rtcp_dtls_transport_
RTC_GUARDED_BY(accessor_lock_);
rtc::scoped_refptr<webrtc::DtlsTransport> datagram_dtls_transport_
RTC_GUARDED_BY(accessor_lock_);
std::unique_ptr<webrtc::DataChannelTransportInterface>
sctp_data_channel_transport_ RTC_GUARDED_BY(accessor_lock_);
rtc::scoped_refptr<webrtc::SctpTransport> sctp_transport_
RTC_GUARDED_BY(accessor_lock_);
SrtpFilter sdes_negotiator_ RTC_GUARDED_BY(network_thread_);
RtcpMuxFilter rtcp_mux_negotiator_ RTC_GUARDED_BY(network_thread_);
// Cache the encrypted header extension IDs for SDES negoitation.
absl::optional<std::vector<int>> send_extension_ids_
RTC_GUARDED_BY(network_thread_);
absl::optional<std::vector<int>> recv_extension_ids_
RTC_GUARDED_BY(network_thread_);
// Optional datagram transport (experimental).
std::unique_ptr<webrtc::DatagramTransportInterface> datagram_transport_
RTC_GUARDED_BY(accessor_lock_);
std::unique_ptr<webrtc::RtpTransportInternal> datagram_rtp_transport_
RTC_GUARDED_BY(accessor_lock_);
// Non-SCTP data channel transport. Set to |datagram_transport_| if that
// transport should be used for data chanels. Unset otherwise.
webrtc::DataChannelTransportInterface* data_channel_transport_
RTC_GUARDED_BY(accessor_lock_) = nullptr;
// Composite data channel transport, used during negotiation.
std::unique_ptr<webrtc::CompositeDataChannelTransport>
composite_data_channel_transport_ RTC_GUARDED_BY(accessor_lock_);
RTC_DISALLOW_COPY_AND_ASSIGN(TgJsepTransport);
};
} // namespace cricket
#endif // PC_JSEP_TRANSPORT_H_

View File

@ -1,478 +0,0 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef TG_PC_JSEP_TRANSPORT_CONTROLLER_H_
#define TG_PC_JSEP_TRANSPORT_CONTROLLER_H_
#include <map>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "api/candidate.h"
#include "api/crypto/crypto_options.h"
#include "api/ice_transport_factory.h"
#include "api/peer_connection_interface.h"
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/transport/media/media_transport_config.h"
#include "media/sctp/sctp_transport_internal.h"
#include "p2p/base/dtls_transport.h"
#include "p2p/base/dtls_transport_factory.h"
#include "p2p/base/p2p_transport_channel.h"
#include "pc/channel.h"
#include "pc/dtls_srtp_transport.h"
#include "pc/dtls_transport.h"
#include "pc/rtp_transport.h"
#include "pc/srtp_transport.h"
#include "rtc_base/async_invoker.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/ref_counted_object.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "tg_jsep_transport.h"
#include "tg_rtp_transport.h"
namespace rtc {
class Thread;
class PacketTransportInternal;
} // namespace rtc
namespace webrtc {
class TgJsepTransportController : public sigslot::has_slots<> {
public:
// Used when the RtpTransport/DtlsTransport of the m= section is changed
// because the section is rejected or BUNDLE is enabled.
class Observer {
public:
virtual ~Observer() {}
// Returns true if media associated with |mid| was successfully set up to be
// demultiplexed on |rtp_transport|. Could return false if two bundled m=
// sections use the same SSRC, for example.
//
// If a data channel transport must be negotiated, |data_channel_transport|
// and |negotiation_state| indicate negotiation status. If
// |data_channel_transport| is null, the data channel transport should not
// be used. Otherwise, the value is a pointer to the transport to be used
// for data channels on |mid|, if any.
//
// The observer should not send data on |data_channel_transport| until
// |negotiation_state| is provisional or final. It should not delete
// |data_channel_transport| or any fallback transport until
// |negotiation_state| is final.
virtual bool OnTransportChanged(
const std::string& mid,
RtpTransportInternal* rtp_transport,
rtc::scoped_refptr<DtlsTransport> dtls_transport,
DataChannelTransportInterface* data_channel_transport) = 0;
};
struct Config {
// If |redetermine_role_on_ice_restart| is true, ICE role is redetermined
// upon setting a local transport description that indicates an ICE
// restart.
bool redetermine_role_on_ice_restart = true;
rtc::SSLProtocolVersion ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12;
// |crypto_options| is used to determine if created DTLS transports
// negotiate GCM crypto suites or not.
webrtc::CryptoOptions crypto_options;
PeerConnectionInterface::BundlePolicy bundle_policy =
PeerConnectionInterface::kBundlePolicyBalanced;
PeerConnectionInterface::RtcpMuxPolicy rtcp_mux_policy =
PeerConnectionInterface::kRtcpMuxPolicyRequire;
bool disable_encryption = false;
bool enable_external_auth = false;
// Used to inject the ICE/DTLS transports created externally.
webrtc::IceTransportFactory* ice_transport_factory = nullptr;
cricket::DtlsTransportFactory* dtls_transport_factory = nullptr;
Observer* transport_observer = nullptr;
// Must be provided and valid for the lifetime of the
// TgJsepTransportController instance.
std::function<void(const rtc::CopyOnWriteBuffer& packet,
int64_t packet_time_us)>
rtcp_handler;
bool active_reset_srtp_params = false;
RtcEventLog* event_log = nullptr;
// Factory for SCTP transports.
cricket::SctpTransportInternalFactory* sctp_factory = nullptr;
// Whether an RtpMediaTransport should be created as default, when no
// MediaTransportFactory is provided.
bool use_rtp_media_transport = false;
// Use encrypted datagram transport to send packets.
bool use_datagram_transport = false;
// Use datagram transport's implementation of data channels instead of SCTP.
bool use_datagram_transport_for_data_channels = false;
// Whether |use_datagram_transport_for_data_channels| applies to outgoing
// calls. If true, |use_datagram_transport_for_data_channels| applies only
// to incoming calls.
bool use_datagram_transport_for_data_channels_receive_only = false;
// Optional media transport factory (experimental). If provided it will be
// used to create datagram_transport (as long as either
// |use_datagram_transport| or
// |use_datagram_transport_for_data_channels| is set to true). However,
// whether it will be used to send / receive audio and video frames instead
// of RTP is determined by |use_datagram_transport|. Note that currently
// datagram_transport co-exists with RTP / RTCP transports and may use the
// same underlying ICE transport.
MediaTransportFactory* media_transport_factory = nullptr;
};
// The ICE related events are signaled on the |signaling_thread|.
// All the transport related methods are called on the |network_thread|.
TgJsepTransportController(rtc::Thread* signaling_thread,
rtc::Thread* network_thread,
cricket::PortAllocator* port_allocator,
AsyncResolverFactory* async_resolver_factory,
Config config);
virtual ~TgJsepTransportController();
// The main method to be called; applies a description at the transport
// level, creating/destroying transport objects as needed and updating their
// properties. This includes RTP, DTLS, and ICE (but not SCTP). At least not
// yet? May make sense to in the future.
RTCError SetLocalDescription(SdpType type,
const cricket::SessionDescription* description);
RTCError SetRemoteDescription(SdpType type,
const cricket::SessionDescription* description);
// Get transports to be used for the provided |mid|. If bundling is enabled,
// calling GetRtpTransport for multiple MIDs may yield the same object.
RtpTransportInternal* GetRtpTransport(const std::string& mid) const;
cricket::DtlsTransportInternal* GetDtlsTransport(const std::string& mid);
const cricket::DtlsTransportInternal* GetRtcpDtlsTransport(
const std::string& mid) const;
// Gets the externally sharable version of the DtlsTransport.
rtc::scoped_refptr<webrtc::DtlsTransport> LookupDtlsTransportByMid(
const std::string& mid);
rtc::scoped_refptr<SctpTransport> GetSctpTransport(
const std::string& mid) const;
MediaTransportConfig GetMediaTransportConfig(const std::string& mid) const;
DataChannelTransportInterface* GetDataChannelTransport(
const std::string& mid) const;
/*********************
* ICE-related methods
********************/
// This method is public to allow PeerConnection to update it from
// SetConfiguration.
void SetIceConfig(const cricket::IceConfig& config);
// Set the "needs-ice-restart" flag as described in JSEP. After the flag is
// set, offers should generate new ufrags/passwords until an ICE restart
// occurs.
void SetNeedsIceRestartFlag();
// Returns true if the ICE restart flag above was set, and no ICE restart has
// occurred yet for this transport (by applying a local description with
// changed ufrag/password). If the transport has been deleted as a result of
// bundling, returns false.
bool NeedsIceRestart(const std::string& mid) const;
// Start gathering candidates for any new transports, or transports doing an
// ICE restart.
void MaybeStartGathering();
RTCError AddRemoteCandidates(
const std::string& mid,
const std::vector<cricket::Candidate>& candidates);
RTCError RemoveRemoteCandidates(
const std::vector<cricket::Candidate>& candidates);
/**********************
* DTLS-related methods
*********************/
// Specifies the identity to use in this session.
// Can only be called once.
bool SetLocalCertificate(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate);
rtc::scoped_refptr<rtc::RTCCertificate> GetLocalCertificate(
const std::string& mid) const;
// Caller owns returned certificate chain. This method mainly exists for
// stats reporting.
std::unique_ptr<rtc::SSLCertChain> GetRemoteSSLCertChain(
const std::string& mid) const;
// Get negotiated role, if one has been negotiated.
absl::optional<rtc::SSLRole> GetDtlsRole(const std::string& mid) const;
// TODO(deadbeef): GetStats isn't const because all the way down to
// OpenSSLStreamAdapter, GetSslCipherSuite and GetDtlsSrtpCryptoSuite are not
// const. Fix this.
bool GetStats(const std::string& mid, cricket::TransportStats* stats);
bool initial_offerer() const { return initial_offerer_ && *initial_offerer_; }
void SetActiveResetSrtpParams(bool active_reset_srtp_params);
// Allows to overwrite the settings from config. You may set or reset the
// media transport configuration on the jsep transport controller, as long as
// you did not call 'GetMediaTransport' or 'MaybeCreateJsepTransport'. Once
// Jsep transport is created, you can't change this setting.
void SetMediaTransportSettings(
bool use_datagram_transport,
bool use_datagram_transport_for_data_channels,
bool use_datagram_transport_for_data_channels_receive_only);
// TODO(elrello): For now the rollback only removes mid to transport mappings
// and deletes unused transports, but doesn't consider anything more complex.
void RollbackTransportForMids(const std::vector<std::string>& mids);
// Gets the transport parameters for the transport identified by |mid|.
// If |mid| is bundled, returns the parameters for the bundled transport.
// If the transport for |mid| has not been created yet, it may be allocated in
// order to generate transport parameters.
absl::optional<cricket::OpaqueTransportParameters> GetTransportParameters(
const std::string& mid);
// All of these signals are fired on the signaling thread.
// If any transport failed => failed,
// Else if all completed => completed,
// Else if all connected => connected,
// Else => connecting
sigslot::signal1<cricket::IceConnectionState> SignalIceConnectionState;
sigslot::signal1<PeerConnectionInterface::PeerConnectionState>
SignalConnectionState;
sigslot::signal1<PeerConnectionInterface::IceConnectionState>
SignalStandardizedIceConnectionState;
// If all transports done gathering => complete,
// Else if any are gathering => gathering,
// Else => new
sigslot::signal1<cricket::IceGatheringState> SignalIceGatheringState;
// (mid, candidates)
sigslot::signal2<const std::string&, const std::vector<cricket::Candidate>&>
SignalIceCandidatesGathered;
sigslot::signal1<const cricket::IceCandidateErrorEvent&>
SignalIceCandidateError;
sigslot::signal1<const std::vector<cricket::Candidate>&>
SignalIceCandidatesRemoved;
sigslot::signal1<const cricket::CandidatePairChangeEvent&>
SignalIceCandidatePairChanged;
sigslot::signal1<rtc::SSLHandshakeError> SignalDtlsHandshakeError;
private:
RTCError ApplyDescription_n(bool local,
SdpType type,
const cricket::SessionDescription* description);
RTCError ValidateAndMaybeUpdateBundleGroup(
bool local,
SdpType type,
const cricket::SessionDescription* description);
RTCError ValidateContent(const cricket::ContentInfo& content_info);
void HandleRejectedContent(const cricket::ContentInfo& content_info,
const cricket::SessionDescription* description);
bool HandleBundledContent(const cricket::ContentInfo& content_info);
bool SetTransportForMid(const std::string& mid,
cricket::TgJsepTransport* jsep_transport);
void RemoveTransportForMid(const std::string& mid);
cricket::TgJsepTransportDescription CreateJsepTransportDescription(
const cricket::ContentInfo& content_info,
const cricket::TransportInfo& transport_info,
const std::vector<int>& encrypted_extension_ids,
int rtp_abs_sendtime_extn_id,
absl::optional<std::string> media_alt_protocol,
absl::optional<std::string> data_alt_protocol);
absl::optional<std::string> bundled_mid() const {
absl::optional<std::string> bundled_mid;
if (bundle_group_ && bundle_group_->FirstContentName()) {
bundled_mid = *(bundle_group_->FirstContentName());
}
return bundled_mid;
}
bool IsBundled(const std::string& mid) const {
return bundle_group_ && bundle_group_->HasContentName(mid);
}
bool ShouldUpdateBundleGroup(SdpType type,
const cricket::SessionDescription* description);
std::vector<int> MergeEncryptedHeaderExtensionIdsForBundle(
const cricket::SessionDescription* description);
std::vector<int> GetEncryptedHeaderExtensionIds(
const cricket::ContentInfo& content_info);
// Extracts the alt-protocol settings that apply to the bundle group.
RTCError GetAltProtocolsForBundle(
const cricket::SessionDescription* description,
absl::optional<std::string>* media_alt_protocol,
absl::optional<std::string>* data_alt_protocol);
int GetRtpAbsSendTimeHeaderExtensionId(
const cricket::ContentInfo& content_info);
// This method takes the BUNDLE group into account. If the TgJsepTransport is
// destroyed because of BUNDLE, it would return the transport which other
// transports are bundled on (In current implementation, it is the first
// content in the BUNDLE group).
const cricket::TgJsepTransport* GetJsepTransportForMid(
const std::string& mid) const;
cricket::TgJsepTransport* GetJsepTransportForMid(const std::string& mid);
// Get the JsepTransport without considering the BUNDLE group. Return nullptr
// if the JsepTransport is destroyed.
const cricket::TgJsepTransport* GetJsepTransportByName(
const std::string& transport_name) const;
cricket::TgJsepTransport* GetJsepTransportByName(
const std::string& transport_name);
// Creates jsep transport. Noop if transport is already created.
// Transport is created either during SetLocalDescription (|local| == true) or
// during SetRemoteDescription (|local| == false). Passing |local| helps to
// differentiate initiator (caller) from answerer (callee).
RTCError MaybeCreateJsepTransport(
bool local,
const cricket::ContentInfo& content_info,
const cricket::SessionDescription& description);
// Creates datagram transport if config wants to use it, and a=x-mt line is
// present for the current media transport. Returned
// DatagramTransportInterface is not connected, and must be connected to ICE.
// You must call |GenerateOrGetLastMediaTransportOffer| on the caller before
// calling MaybeCreateDatagramTransport.
std::unique_ptr<webrtc::DatagramTransportInterface>
MaybeCreateDatagramTransport(const cricket::ContentInfo& content_info,
const cricket::SessionDescription& description,
bool local);
void MaybeDestroyJsepTransport(const std::string& mid);
void DestroyAllJsepTransports_n();
void SetIceRole_n(cricket::IceRole ice_role);
cricket::IceRole DetermineIceRole(
cricket::TgJsepTransport* jsep_transport,
const cricket::TransportInfo& transport_info,
SdpType type,
bool local);
std::unique_ptr<cricket::DtlsTransportInternal> CreateDtlsTransport(
const cricket::ContentInfo& content_info,
cricket::IceTransportInternal* ice,
DatagramTransportInterface* datagram_transport);
rtc::scoped_refptr<webrtc::IceTransportInterface> CreateIceTransport(
const std::string& transport_name,
bool rtcp);
std::unique_ptr<webrtc::TgRtpTransport> CreateUnencryptedRtpTransport(
const std::string& transport_name,
rtc::PacketTransportInternal* rtp_packet_transport,
rtc::PacketTransportInternal* rtcp_packet_transport);
std::unique_ptr<webrtc::SrtpTransport> CreateSdesTransport(
const std::string& transport_name,
cricket::DtlsTransportInternal* rtp_dtls_transport,
cricket::DtlsTransportInternal* rtcp_dtls_transport);
std::unique_ptr<webrtc::DtlsSrtpTransport> CreateDtlsSrtpTransport(
const std::string& transport_name,
cricket::DtlsTransportInternal* rtp_dtls_transport,
cricket::DtlsTransportInternal* rtcp_dtls_transport);
// Collect all the DtlsTransports, including RTP and RTCP, from the
// JsepTransports. JsepTransportController can iterate all the DtlsTransports
// and update the aggregate states.
std::vector<cricket::DtlsTransportInternal*> GetDtlsTransports();
// Handlers for signals from Transport.
void OnTransportWritableState_n(rtc::PacketTransportInternal* transport);
void OnTransportReceivingState_n(rtc::PacketTransportInternal* transport);
void OnTransportGatheringState_n(cricket::IceTransportInternal* transport);
void OnTransportCandidateGathered_n(cricket::IceTransportInternal* transport,
const cricket::Candidate& candidate);
void OnTransportCandidateError_n(
cricket::IceTransportInternal* transport,
const cricket::IceCandidateErrorEvent& event);
void OnTransportCandidatesRemoved_n(cricket::IceTransportInternal* transport,
const cricket::Candidates& candidates);
void OnTransportRoleConflict_n(cricket::IceTransportInternal* transport);
void OnTransportStateChanged_n(cricket::IceTransportInternal* transport);
void OnTransportCandidatePairChanged_n(
const cricket::CandidatePairChangeEvent& event);
void OnDataChannelTransportNegotiated_n(
cricket::TgJsepTransport* transport,
DataChannelTransportInterface* data_channel_transport);
void UpdateAggregateStates_n();
void OnRtcpPacketReceived_n(rtc::CopyOnWriteBuffer* packet,
int64_t packet_time_us);
void OnDtlsHandshakeError(rtc::SSLHandshakeError error);
rtc::Thread* const signaling_thread_ = nullptr;
rtc::Thread* const network_thread_ = nullptr;
cricket::PortAllocator* const port_allocator_ = nullptr;
AsyncResolverFactory* const async_resolver_factory_ = nullptr;
std::map<std::string, std::unique_ptr<cricket::TgJsepTransport>>
jsep_transports_by_name_;
// This keeps track of the mapping between media section
// (BaseChannel/SctpTransport) and the TgJsepTransport underneath.
std::map<std::string, cricket::TgJsepTransport*> mid_to_transport_;
// Aggregate states for Transports.
// standardized_ice_connection_state_ is intended to replace
// ice_connection_state, see bugs.webrtc.org/9308
cricket::IceConnectionState ice_connection_state_ =
cricket::kIceConnectionConnecting;
PeerConnectionInterface::IceConnectionState
standardized_ice_connection_state_ =
PeerConnectionInterface::kIceConnectionNew;
PeerConnectionInterface::PeerConnectionState combined_connection_state_ =
PeerConnectionInterface::PeerConnectionState::kNew;
cricket::IceGatheringState ice_gathering_state_ = cricket::kIceGatheringNew;
Config config_;
// Early on in the call we don't know if datagram transport is going to be
// used, but we need to get the server-supported parameters to add to an SDP.
// This server datagram transport will be promoted to the used datagram
// transport after the local description is set, and the ownership will be
// transferred to the actual TgJsepTransport. This "offer" datagram transport is
// not created if it's done on the party that provides answer. This offer
// datagram transport is only created once at the beginning of the connection,
// and never again.
std::unique_ptr<DatagramTransportInterface> offer_datagram_transport_ =
nullptr;
const cricket::SessionDescription* local_desc_ = nullptr;
const cricket::SessionDescription* remote_desc_ = nullptr;
absl::optional<bool> initial_offerer_;
absl::optional<cricket::ContentGroup> bundle_group_;
cricket::IceConfig ice_config_;
cricket::IceRole ice_role_ = cricket::ICEROLE_CONTROLLING;
uint64_t ice_tiebreaker_ = rtc::CreateRandomId64();
rtc::scoped_refptr<rtc::RTCCertificate> certificate_;
rtc::AsyncInvoker invoker_;
RTC_DISALLOW_COPY_AND_ASSIGN(TgJsepTransportController);
};
} // namespace webrtc
#endif // PC_JSEP_TRANSPORT_CONTROLLER_H_

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,516 +0,0 @@
/*
* Copyright 2004 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "tg_peer_connection_factory.h"
#include <memory>
#include <utility>
#include <vector>
#include "api/fec_controller.h"
#include "api/media_stream_proxy.h"
#include "api/media_stream_track_proxy.h"
#include "api/network_state_predictor.h"
#include "api/peer_connection_factory_proxy.h"
#include "api/peer_connection_proxy.h"
#include "api/rtc_event_log/rtc_event_log.h"
#include "api/transport/field_trial_based_config.h"
#include "api/transport/media/media_transport_interface.h"
#include "api/turn_customizer.h"
#include "api/units/data_rate.h"
#include "api/video_track_source_proxy.h"
#include "media/sctp/sctp_transport.h"
#include "p2p/base/basic_packet_socket_factory.h"
#include "p2p/base/default_ice_transport_factory.h"
#include "p2p/client/basic_port_allocator.h"
#include "pc/audio_track.h"
#include "pc/local_audio_source.h"
#include "pc/media_stream.h"
#include "pc/peer_connection.h"
#include "pc/rtp_parameters_conversion.h"
#include "pc/video_track.h"
#include "rtc_base/bind.h"
#include "rtc_base/checks.h"
#include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/experiments/field_trial_units.h"
#include "rtc_base/numerics/safe_conversions.h"
#include "rtc_base/system/file_wrapper.h"
#include "tg_rtp_data_engine.h"
#include "tg_peer_connection.h"
namespace webrtc {
rtc::scoped_refptr<TgPeerConnectionInterface>
TgPeerConnectionFactoryInterface::CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
std::unique_ptr<cricket::PortAllocator> allocator,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
PeerConnectionObserver* observer) {
return nullptr;
}
rtc::scoped_refptr<TgPeerConnectionInterface>
TgPeerConnectionFactoryInterface::CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
PeerConnectionDependencies dependencies) {
return nullptr;
}
RtpCapabilities TgPeerConnectionFactoryInterface::GetRtpSenderCapabilities(
cricket::MediaType kind) const {
return {};
}
RtpCapabilities TgPeerConnectionFactoryInterface::GetRtpReceiverCapabilities(
cricket::MediaType kind) const {
return {};
}
BEGIN_SIGNALING_PROXY_MAP(TgPeerConnection)
PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>, local_streams)
PROXY_METHOD0(rtc::scoped_refptr<StreamCollectionInterface>, remote_streams)
PROXY_METHOD1(bool, AddStream, MediaStreamInterface*)
PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*)
PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpSenderInterface>>,
AddTrack,
rtc::scoped_refptr<MediaStreamTrackInterface>,
const std::vector<std::string>&)
PROXY_METHOD1(bool, RemoveTrack, RtpSenderInterface*)
PROXY_METHOD1(RTCError, RemoveTrackNew, rtc::scoped_refptr<RtpSenderInterface>)
PROXY_METHOD1(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
AddTransceiver,
rtc::scoped_refptr<MediaStreamTrackInterface>)
PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
AddTransceiver,
rtc::scoped_refptr<MediaStreamTrackInterface>,
const RtpTransceiverInit&)
PROXY_METHOD1(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
AddTransceiver,
cricket::MediaType)
PROXY_METHOD2(RTCErrorOr<rtc::scoped_refptr<RtpTransceiverInterface>>,
AddTransceiver,
cricket::MediaType,
const RtpTransceiverInit&)
PROXY_METHOD2(rtc::scoped_refptr<RtpSenderInterface>,
CreateSender,
const std::string&,
const std::string&)
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpSenderInterface>>,
GetSenders)
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpReceiverInterface>>,
GetReceivers)
PROXY_CONSTMETHOD0(std::vector<rtc::scoped_refptr<RtpTransceiverInterface>>,
GetTransceivers)
PROXY_METHOD0(void, ClearStatsCache)
PROXY_METHOD2(rtc::scoped_refptr<DataChannelInterface>,
CreateDataChannel,
const std::string&,
const DataChannelInit*)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, local_description)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, remote_description)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
current_local_description)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
current_remote_description)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
pending_local_description)
PROXY_CONSTMETHOD0(const SessionDescriptionInterface*,
pending_remote_description)
PROXY_METHOD0(void, RestartIce)
PROXY_METHOD2(void,
CreateOffer,
CreateSessionDescriptionObserver*,
const PeerConnectionInterface::RTCOfferAnswerOptions&)
PROXY_METHOD2(void,
CreateAnswer,
CreateSessionDescriptionObserver*,
const PeerConnectionInterface::RTCOfferAnswerOptions&)
PROXY_METHOD2(void,
SetLocalDescription,
SetSessionDescriptionObserver*,
SessionDescriptionInterface*)
PROXY_METHOD1(void, SetLocalDescription, SetSessionDescriptionObserver*)
PROXY_METHOD2(void,
SetRemoteDescription,
SetSessionDescriptionObserver*,
SessionDescriptionInterface*)
PROXY_METHOD2(void,
SetRemoteDescription,
std::unique_ptr<SessionDescriptionInterface>,
rtc::scoped_refptr<SetRemoteDescriptionObserverInterface>)
PROXY_METHOD0(PeerConnectionInterface::RTCConfiguration, GetConfiguration)
PROXY_METHOD1(RTCError,
SetConfiguration,
const PeerConnectionInterface::RTCConfiguration&)
PROXY_METHOD1(bool, AddIceCandidate, const IceCandidateInterface*)
PROXY_METHOD2(void,
AddIceCandidate,
std::unique_ptr<IceCandidateInterface>,
std::function<void(RTCError)>)
PROXY_METHOD1(bool, RemoveIceCandidates, const std::vector<cricket::Candidate>&)
PROXY_METHOD1(RTCError, SetBitrate, const BitrateSettings&)
PROXY_METHOD1(void, SetAudioPlayout, bool)
PROXY_METHOD1(void, SetAudioRecording, bool)
PROXY_METHOD1(rtc::scoped_refptr<DtlsTransportInterface>,
LookupDtlsTransportByMid,
const std::string&)
PROXY_CONSTMETHOD0(rtc::scoped_refptr<SctpTransportInterface>, GetSctpTransport)
PROXY_METHOD0(PeerConnectionInterface::SignalingState, signaling_state)
PROXY_METHOD0(PeerConnectionInterface::IceConnectionState, ice_connection_state)
PROXY_METHOD0(PeerConnectionInterface::IceConnectionState, standardized_ice_connection_state)
PROXY_METHOD0(PeerConnectionInterface::PeerConnectionState, peer_connection_state)
PROXY_METHOD0(PeerConnectionInterface::IceGatheringState, ice_gathering_state)
PROXY_METHOD2(bool,
StartRtcEventLog,
std::unique_ptr<RtcEventLogOutput>,
int64_t)
PROXY_METHOD1(bool, StartRtcEventLog, std::unique_ptr<RtcEventLogOutput>)
PROXY_METHOD0(void, StopRtcEventLog)
PROXY_METHOD0(void, Close)
END_PROXY_MAP()
TgPeerConnectionFactory::TgPeerConnectionFactory(
PeerConnectionFactoryDependencies dependencies)
: wraps_current_thread_(false),
network_thread_(dependencies.network_thread),
worker_thread_(dependencies.worker_thread),
signaling_thread_(dependencies.signaling_thread),
task_queue_factory_(std::move(dependencies.task_queue_factory)),
media_engine_(std::move(dependencies.media_engine)),
call_factory_(std::move(dependencies.call_factory)),
event_log_factory_(std::move(dependencies.event_log_factory)),
fec_controller_factory_(std::move(dependencies.fec_controller_factory)),
network_state_predictor_factory_(
std::move(dependencies.network_state_predictor_factory)),
injected_network_controller_factory_(
std::move(dependencies.network_controller_factory)),
media_transport_factory_(std::move(dependencies.media_transport_factory)),
neteq_factory_(std::move(dependencies.neteq_factory)),
trials_(dependencies.trials ? std::move(dependencies.trials)
: std::make_unique<FieldTrialBasedConfig>()) {
if (!network_thread_) {
owned_network_thread_ = rtc::Thread::CreateWithSocketServer();
owned_network_thread_->SetName("pc_network_thread", nullptr);
owned_network_thread_->Start();
network_thread_ = owned_network_thread_.get();
}
if (!worker_thread_) {
owned_worker_thread_ = rtc::Thread::Create();
owned_worker_thread_->SetName("pc_worker_thread", nullptr);
owned_worker_thread_->Start();
worker_thread_ = owned_worker_thread_.get();
}
if (!signaling_thread_) {
signaling_thread_ = rtc::Thread::Current();
if (!signaling_thread_) {
// If this thread isn't already wrapped by an rtc::Thread, create a
// wrapper and own it in this class.
signaling_thread_ = rtc::ThreadManager::Instance()->WrapCurrentThread();
wraps_current_thread_ = true;
}
}
options_.disable_encryption = true;
}
TgPeerConnectionFactory::~TgPeerConnectionFactory() {
RTC_DCHECK(signaling_thread_->IsCurrent());
channel_manager_.reset(nullptr);
// Make sure |worker_thread_| and |signaling_thread_| outlive
// |default_socket_factory_| and |default_network_manager_|.
default_socket_factory_ = nullptr;
default_network_manager_ = nullptr;
if (wraps_current_thread_)
rtc::ThreadManager::Instance()->UnwrapCurrentThread();
}
bool TgPeerConnectionFactory::Initialize() {
RTC_DCHECK(signaling_thread_->IsCurrent());
rtc::InitRandom(rtc::Time32());
default_network_manager_.reset(new rtc::BasicNetworkManager());
if (!default_network_manager_) {
return false;
}
default_socket_factory_.reset(
new rtc::BasicPacketSocketFactory(network_thread_));
if (!default_socket_factory_) {
return false;
}
channel_manager_ = std::make_unique<cricket::ChannelManager>(
std::move(media_engine_), std::make_unique<cricket::TgRtpDataEngine>(),
worker_thread_, network_thread_);
channel_manager_->SetVideoRtxEnabled(true);
if (!channel_manager_->Init()) {
return false;
}
return true;
}
void TgPeerConnectionFactory::SetOptions(const PeerConnectionFactory::Options& options) {
options_ = options;
}
RtpCapabilities TgPeerConnectionFactory::GetRtpSenderCapabilities(
cricket::MediaType kind) const {
RTC_DCHECK_RUN_ON(signaling_thread_);
switch (kind) {
case cricket::MEDIA_TYPE_AUDIO: {
cricket::AudioCodecs cricket_codecs;
cricket::RtpHeaderExtensions cricket_extensions;
channel_manager_->GetSupportedAudioSendCodecs(&cricket_codecs);
channel_manager_->GetSupportedAudioRtpHeaderExtensions(
&cricket_extensions);
return ToRtpCapabilities(cricket_codecs, cricket_extensions);
}
case cricket::MEDIA_TYPE_VIDEO: {
cricket::VideoCodecs cricket_codecs;
cricket::RtpHeaderExtensions cricket_extensions;
channel_manager_->GetSupportedVideoCodecs(&cricket_codecs);
channel_manager_->GetSupportedVideoRtpHeaderExtensions(
&cricket_extensions);
return ToRtpCapabilities(cricket_codecs, cricket_extensions);
}
case cricket::MEDIA_TYPE_DATA:
return RtpCapabilities();
}
// Not reached; avoids compile warning.
FATAL();
}
RtpCapabilities TgPeerConnectionFactory::GetRtpReceiverCapabilities(
cricket::MediaType kind) const {
RTC_DCHECK_RUN_ON(signaling_thread_);
switch (kind) {
case cricket::MEDIA_TYPE_AUDIO: {
cricket::AudioCodecs cricket_codecs;
cricket::RtpHeaderExtensions cricket_extensions;
channel_manager_->GetSupportedAudioReceiveCodecs(&cricket_codecs);
channel_manager_->GetSupportedAudioRtpHeaderExtensions(
&cricket_extensions);
return ToRtpCapabilities(cricket_codecs, cricket_extensions);
}
case cricket::MEDIA_TYPE_VIDEO: {
cricket::VideoCodecs cricket_codecs;
cricket::RtpHeaderExtensions cricket_extensions;
channel_manager_->GetSupportedVideoCodecs(&cricket_codecs);
channel_manager_->GetSupportedVideoRtpHeaderExtensions(
&cricket_extensions);
return ToRtpCapabilities(cricket_codecs, cricket_extensions);
}
case cricket::MEDIA_TYPE_DATA:
return RtpCapabilities();
}
// Not reached; avoids compile warning.
FATAL();
}
rtc::scoped_refptr<AudioSourceInterface>
TgPeerConnectionFactory::CreateAudioSource(const cricket::AudioOptions& options) {
RTC_DCHECK(signaling_thread_->IsCurrent());
rtc::scoped_refptr<LocalAudioSource> source(
LocalAudioSource::Create(&options));
return source;
}
bool TgPeerConnectionFactory::StartAecDump(FILE* file, int64_t max_size_bytes) {
RTC_DCHECK(signaling_thread_->IsCurrent());
return channel_manager_->StartAecDump(FileWrapper(file), max_size_bytes);
}
void TgPeerConnectionFactory::StopAecDump() {
RTC_DCHECK(signaling_thread_->IsCurrent());
channel_manager_->StopAecDump();
}
rtc::scoped_refptr<TgPeerConnectionInterface>
TgPeerConnectionFactory::CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
std::unique_ptr<cricket::PortAllocator> allocator,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
PeerConnectionObserver* observer) {
// Convert the legacy API into the new dependency structure.
PeerConnectionDependencies dependencies(observer);
dependencies.allocator = std::move(allocator);
dependencies.cert_generator = std::move(cert_generator);
// Pass that into the new API.
return CreatePeerConnection(configuration, std::move(dependencies));
}
rtc::scoped_refptr<TgPeerConnectionInterface>
TgPeerConnectionFactory::CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
PeerConnectionDependencies dependencies) {
RTC_DCHECK(signaling_thread_->IsCurrent());
RTC_DCHECK(!(dependencies.allocator && dependencies.packet_socket_factory))
<< "You can't set both allocator and packet_socket_factory; "
"the former is going away (see bugs.webrtc.org/7447";
// Set internal defaults if optional dependencies are not set.
if (!dependencies.cert_generator) {
dependencies.cert_generator =
std::make_unique<rtc::RTCCertificateGenerator>(signaling_thread_,
network_thread_);
}
if (!dependencies.allocator) {
rtc::PacketSocketFactory* packet_socket_factory;
if (dependencies.packet_socket_factory)
packet_socket_factory = dependencies.packet_socket_factory.get();
else
packet_socket_factory = default_socket_factory_.get();
network_thread_->Invoke<void>(RTC_FROM_HERE, [this, &configuration,
&dependencies,
&packet_socket_factory]() {
dependencies.allocator = std::make_unique<cricket::BasicPortAllocator>(
default_network_manager_.get(), packet_socket_factory,
configuration.turn_customizer);
});
}
if (!dependencies.ice_transport_factory) {
dependencies.ice_transport_factory =
std::make_unique<DefaultIceTransportFactory>();
}
// TODO(zstein): Once chromium injects its own AsyncResolverFactory, set
// |dependencies.async_resolver_factory| to a new
// |rtc::BasicAsyncResolverFactory| if no factory is provided.
network_thread_->Invoke<void>(
RTC_FROM_HERE,
rtc::Bind(&cricket::PortAllocator::SetNetworkIgnoreMask,
dependencies.allocator.get(), options_.network_ignore_mask));
std::unique_ptr<RtcEventLog> event_log =
worker_thread_->Invoke<std::unique_ptr<RtcEventLog>>(
RTC_FROM_HERE,
rtc::Bind(&TgPeerConnectionFactory::CreateRtcEventLog_w, this));
std::unique_ptr<Call> call = worker_thread_->Invoke<std::unique_ptr<Call>>(
RTC_FROM_HERE,
rtc::Bind(&TgPeerConnectionFactory::CreateCall_w, this, event_log.get()));
rtc::scoped_refptr<TgPeerConnection> pc(
new rtc::RefCountedObject<TgPeerConnection>(this, std::move(event_log),
std::move(call)));
if (!pc->Initialize(configuration, std::move(dependencies))) {
return nullptr;
}
return TgPeerConnectionProxy::Create(signaling_thread(), pc);
}
rtc::scoped_refptr<MediaStreamInterface>
TgPeerConnectionFactory::CreateLocalMediaStream(const std::string& stream_id) {
RTC_DCHECK(signaling_thread_->IsCurrent());
return MediaStreamProxy::Create(signaling_thread_,
MediaStream::Create(stream_id));
}
rtc::scoped_refptr<VideoTrackInterface> TgPeerConnectionFactory::CreateVideoTrack(
const std::string& id,
VideoTrackSourceInterface* source) {
RTC_DCHECK(signaling_thread_->IsCurrent());
rtc::scoped_refptr<VideoTrackInterface> track(
VideoTrack::Create(id, source, worker_thread_));
return VideoTrackProxy::Create(signaling_thread_, worker_thread_, track);
}
rtc::scoped_refptr<AudioTrackInterface> TgPeerConnectionFactory::CreateAudioTrack(
const std::string& id,
AudioSourceInterface* source) {
RTC_DCHECK(signaling_thread_->IsCurrent());
rtc::scoped_refptr<AudioTrackInterface> track(AudioTrack::Create(id, source));
return AudioTrackProxy::Create(signaling_thread_, track);
}
std::unique_ptr<cricket::SctpTransportInternalFactory>
TgPeerConnectionFactory::CreateSctpTransportInternalFactory() {
#ifdef HAVE_SCTP
return std::make_unique<cricket::SctpTransportFactory>(network_thread());
#else
return nullptr;
#endif
}
cricket::ChannelManager* TgPeerConnectionFactory::channel_manager() {
return channel_manager_.get();
}
std::unique_ptr<RtcEventLog> TgPeerConnectionFactory::CreateRtcEventLog_w() {
RTC_DCHECK_RUN_ON(worker_thread_);
auto encoding_type = RtcEventLog::EncodingType::Legacy;
if (IsTrialEnabled("WebRTC-RtcEventLogNewFormat"))
encoding_type = RtcEventLog::EncodingType::NewFormat;
return event_log_factory_
? event_log_factory_->CreateRtcEventLog(encoding_type)
: std::make_unique<RtcEventLogNull>();
}
std::unique_ptr<Call> TgPeerConnectionFactory::CreateCall_w(
RtcEventLog* event_log) {
RTC_DCHECK_RUN_ON(worker_thread_);
webrtc::Call::Config call_config(event_log);
if (!channel_manager_->media_engine() || !call_factory_) {
return nullptr;
}
call_config.audio_state =
channel_manager_->media_engine()->voice().GetAudioState();
FieldTrialParameter<DataRate> min_bandwidth("min", DataRate::kbps(30));
FieldTrialParameter<DataRate> start_bandwidth("start", DataRate::kbps(300));
FieldTrialParameter<DataRate> max_bandwidth("max", DataRate::kbps(2000));
ParseFieldTrial({&min_bandwidth, &start_bandwidth, &max_bandwidth},
trials_->Lookup("WebRTC-PcFactoryDefaultBitrates"));
call_config.bitrate_config.min_bitrate_bps =
rtc::saturated_cast<int>(min_bandwidth->bps());
call_config.bitrate_config.start_bitrate_bps =
rtc::saturated_cast<int>(start_bandwidth->bps());
call_config.bitrate_config.max_bitrate_bps =
rtc::saturated_cast<int>(max_bandwidth->bps());
call_config.fec_controller_factory = fec_controller_factory_.get();
call_config.task_queue_factory = task_queue_factory_.get();
call_config.network_state_predictor_factory =
network_state_predictor_factory_.get();
call_config.neteq_factory = neteq_factory_.get();
if (IsTrialEnabled("WebRTC-Bwe-InjectedCongestionController")) {
RTC_LOG(LS_INFO) << "Using injected network controller factory";
call_config.network_controller_factory =
injected_network_controller_factory_.get();
} else {
RTC_LOG(LS_INFO) << "Using default network controller factory";
}
call_config.trials = trials_.get();
return std::unique_ptr<Call>(call_factory_->CreateCall(call_config));
}
bool TgPeerConnectionFactory::IsTrialEnabled(absl::string_view key) const {
RTC_DCHECK(trials_);
return trials_->Lookup(key).find("Enabled") == 0;
}
} // namespace webrtc

View File

@ -1,261 +0,0 @@
/*
* Copyright 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef TG_PC_PEER_CONNECTION_FACTORY_H_
#define TG_PC_PEER_CONNECTION_FACTORY_H_
#include <memory>
#include <string>
#include "api/media_stream_interface.h"
#include "api/peer_connection_interface.h"
#include "api/scoped_refptr.h"
#include "api/transport/media/media_transport_interface.h"
#include "media/sctp/sctp_transport_internal.h"
#include "pc/channel_manager.h"
#include "rtc_base/rtc_certificate_generator.h"
#include "rtc_base/thread.h"
#include "pc/peer_connection_factory.h"
namespace rtc {
class BasicNetworkManager;
class BasicPacketSocketFactory;
} // namespace rtc
namespace webrtc {
class RtcEventLog;
class TgPeerConnection;
class TgPeerConnectionInterface;
class RTC_EXPORT TgPeerConnectionFactoryInterface
: public rtc::RefCountInterface {
public:
// Set the options to be used for subsequently created PeerConnections.
virtual void SetOptions(const PeerConnectionFactoryInterface::Options& options) = 0;
// The preferred way to create a new peer connection. Simply provide the
// configuration and a PeerConnectionDependencies structure.
// TODO(benwright): Make pure virtual once downstream mock PC factory classes
// are updated.
virtual rtc::scoped_refptr<TgPeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
PeerConnectionDependencies dependencies);
// Deprecated; |allocator| and |cert_generator| may be null, in which case
// default implementations will be used.
//
// |observer| must not be null.
//
// Note that this method does not take ownership of |observer|; it's the
// responsibility of the caller to delete it. It can be safely deleted after
// Close has been called on the returned PeerConnection, which ensures no
// more observer callbacks will be invoked.
virtual rtc::scoped_refptr<TgPeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
std::unique_ptr<cricket::PortAllocator> allocator,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
PeerConnectionObserver* observer);
// Returns the capabilities of an RTP sender of type |kind|.
// If for some reason you pass in MEDIA_TYPE_DATA, returns an empty structure.
// TODO(orphis): Make pure virtual when all subclasses implement it.
virtual RtpCapabilities GetRtpSenderCapabilities(
cricket::MediaType kind) const;
// Returns the capabilities of an RTP receiver of type |kind|.
// If for some reason you pass in MEDIA_TYPE_DATA, returns an empty structure.
// TODO(orphis): Make pure virtual when all subclasses implement it.
virtual RtpCapabilities GetRtpReceiverCapabilities(
cricket::MediaType kind) const;
virtual rtc::scoped_refptr<MediaStreamInterface> CreateLocalMediaStream(
const std::string& stream_id) = 0;
// Creates an AudioSourceInterface.
// |options| decides audio processing settings.
virtual rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource(
const cricket::AudioOptions& options) = 0;
// Creates a new local VideoTrack. The same |source| can be used in several
// tracks.
virtual rtc::scoped_refptr<VideoTrackInterface> CreateVideoTrack(
const std::string& label,
VideoTrackSourceInterface* source) = 0;
// Creates an new AudioTrack. At the moment |source| can be null.
virtual rtc::scoped_refptr<AudioTrackInterface> CreateAudioTrack(
const std::string& label,
AudioSourceInterface* source) = 0;
// Starts AEC dump using existing file. Takes ownership of |file| and passes
// it on to VoiceEngine (via other objects) immediately, which will take
// the ownerhip. If the operation fails, the file will be closed.
// A maximum file size in bytes can be specified. When the file size limit is
// reached, logging is stopped automatically. If max_size_bytes is set to a
// value <= 0, no limit will be used, and logging will continue until the
// StopAecDump function is called.
// TODO(webrtc:6463): Delete default implementation when downstream mocks
// classes are updated.
virtual bool StartAecDump(FILE* file, int64_t max_size_bytes) {
return false;
}
// Stops logging the AEC dump.
virtual void StopAecDump() = 0;
protected:
// Dtor and ctor protected as objects shouldn't be created or deleted via
// this interface.
TgPeerConnectionFactoryInterface() {}
~TgPeerConnectionFactoryInterface() override = default;
};
class TgPeerConnectionFactory: public TgPeerConnectionFactoryInterface {
public:
void SetOptions(const PeerConnectionFactoryInterface::Options& options);
rtc::scoped_refptr<TgPeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
std::unique_ptr<cricket::PortAllocator> allocator,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
PeerConnectionObserver* observer);
rtc::scoped_refptr<TgPeerConnectionInterface> CreatePeerConnection(
const PeerConnectionInterface::RTCConfiguration& configuration,
PeerConnectionDependencies dependencies);
bool Initialize();
RtpCapabilities GetRtpSenderCapabilities(
cricket::MediaType kind) const;
RtpCapabilities GetRtpReceiverCapabilities(
cricket::MediaType kind) const;
rtc::scoped_refptr<MediaStreamInterface> CreateLocalMediaStream(
const std::string& stream_id);
rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource(
const cricket::AudioOptions& options);
rtc::scoped_refptr<VideoTrackInterface> CreateVideoTrack(
const std::string& id,
VideoTrackSourceInterface* video_source);
rtc::scoped_refptr<AudioTrackInterface> CreateAudioTrack(
const std::string& id,
AudioSourceInterface* audio_source);
bool StartAecDump(FILE* file, int64_t max_size_bytes);
void StopAecDump();
virtual std::unique_ptr<cricket::SctpTransportInternalFactory>
CreateSctpTransportInternalFactory();
virtual cricket::ChannelManager* channel_manager();
rtc::Thread* signaling_thread() {
// This method can be called on a different thread when the factory is
// created in CreatePeerConnectionFactory().
return signaling_thread_;
}
rtc::Thread* worker_thread() { return worker_thread_; }
rtc::Thread* network_thread() { return network_thread_; }
const PeerConnectionFactoryInterface::Options& options() const { return options_; }
MediaTransportFactory* media_transport_factory() {
return media_transport_factory_.get();
}
protected:
// This structure allows simple management of all new dependencies being added
// to the PeerConnectionFactory.
explicit TgPeerConnectionFactory(
PeerConnectionFactoryDependencies dependencies);
// Hook to let testing framework insert actions between
// "new RTCPeerConnection" and "pc.Initialize"
virtual void ActionsBeforeInitializeForTesting(PeerConnectionInterface*) {}
virtual ~TgPeerConnectionFactory();
private:
bool IsTrialEnabled(absl::string_view key) const;
std::unique_ptr<RtcEventLog> CreateRtcEventLog_w();
std::unique_ptr<Call> CreateCall_w(RtcEventLog* event_log);
bool wraps_current_thread_;
rtc::Thread* network_thread_;
rtc::Thread* worker_thread_;
rtc::Thread* signaling_thread_;
std::unique_ptr<rtc::Thread> owned_network_thread_;
std::unique_ptr<rtc::Thread> owned_worker_thread_;
const std::unique_ptr<TaskQueueFactory> task_queue_factory_;
PeerConnectionFactoryInterface::Options options_;
std::unique_ptr<cricket::ChannelManager> channel_manager_;
std::unique_ptr<rtc::BasicNetworkManager> default_network_manager_;
std::unique_ptr<rtc::BasicPacketSocketFactory> default_socket_factory_;
std::unique_ptr<cricket::MediaEngineInterface> media_engine_;
std::unique_ptr<webrtc::CallFactoryInterface> call_factory_;
std::unique_ptr<RtcEventLogFactoryInterface> event_log_factory_;
std::unique_ptr<FecControllerFactoryInterface> fec_controller_factory_;
std::unique_ptr<NetworkStatePredictorFactoryInterface>
network_state_predictor_factory_;
std::unique_ptr<NetworkControllerFactoryInterface>
injected_network_controller_factory_;
std::unique_ptr<MediaTransportFactory> media_transport_factory_;
std::unique_ptr<NetEqFactory> neteq_factory_;
const std::unique_ptr<WebRtcKeyValueConfig> trials_;
};
BEGIN_SIGNALING_PROXY_MAP(TgPeerConnectionFactory)
PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_METHOD1(void, SetOptions, const PeerConnectionFactory::Options&)
PROXY_METHOD4(rtc::scoped_refptr<TgPeerConnectionInterface>,
CreatePeerConnection,
const PeerConnectionInterface::RTCConfiguration&,
std::unique_ptr<cricket::PortAllocator>,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface>,
PeerConnectionObserver*)
PROXY_METHOD2(rtc::scoped_refptr<TgPeerConnectionInterface>,
CreatePeerConnection,
const PeerConnectionInterface::RTCConfiguration&,
PeerConnectionDependencies)
PROXY_CONSTMETHOD1(webrtc::RtpCapabilities,
GetRtpSenderCapabilities,
cricket::MediaType)
PROXY_CONSTMETHOD1(webrtc::RtpCapabilities,
GetRtpReceiverCapabilities,
cricket::MediaType)
PROXY_METHOD1(rtc::scoped_refptr<MediaStreamInterface>,
CreateLocalMediaStream,
const std::string&)
PROXY_METHOD1(rtc::scoped_refptr<AudioSourceInterface>,
CreateAudioSource,
const cricket::AudioOptions&)
PROXY_METHOD2(rtc::scoped_refptr<VideoTrackInterface>,
CreateVideoTrack,
const std::string&,
VideoTrackSourceInterface*)
PROXY_METHOD2(rtc::scoped_refptr<AudioTrackInterface>,
CreateAudioTrack,
const std::string&,
AudioSourceInterface*)
PROXY_METHOD2(bool, StartAecDump, FILE*, int64_t)
PROXY_METHOD0(void, StopAecDump)
END_PROXY_MAP()
} // namespace webrtc
#endif // PC_PEER_CONNECTION_FACTORY_H_

View File

@ -1,330 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "tg_rtp_data_engine.h"
#include <map>
#include "absl/strings/match.h"
#include "media/base/codec.h"
#include "media/base/media_constants.h"
#include "media/base/rtp_utils.h"
#include "media/base/stream_params.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/data_rate_limiter.h"
#include "rtc_base/helpers.h"
#include "rtc_base/logging.h"
#include "rtc_base/sanitizer.h"
namespace cricket {
// We want to avoid IP fragmentation.
static const size_t kDataMaxRtpPacketLen = 1200U;
// We reserve space after the RTP header for future wiggle room.
static const unsigned char kReservedSpace[] = {0x00, 0x00, 0x00, 0x00};
// Amount of overhead SRTP may take. We need to leave room in the
// buffer for it, otherwise SRTP will fail later. If SRTP ever uses
// more than this, we need to increase this number.
static const size_t kMaxSrtpHmacOverhead = 16;
TgRtpDataEngine::TgRtpDataEngine() {
data_codecs_.push_back(
DataCodec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName));
}
DataMediaChannel* TgRtpDataEngine::CreateChannel(const MediaConfig& config) {
return new TgRtpDataMediaChannel(config);
}
static const DataCodec* FindCodecByName(const std::vector<DataCodec>& codecs,
const std::string& name) {
for (const DataCodec& codec : codecs) {
if (absl::EqualsIgnoreCase(name, codec.name))
return &codec;
}
return nullptr;
}
TgRtpDataMediaChannel::TgRtpDataMediaChannel(const MediaConfig& config)
: DataMediaChannel(config) {
Construct();
SetPreferredDscp(rtc::DSCP_AF41);
}
void TgRtpDataMediaChannel::Construct() {
sending_ = false;
receiving_ = false;
send_limiter_.reset(new rtc::DataRateLimiter(kDataMaxBandwidth / 8, 1.0));
}
TgRtpDataMediaChannel::~TgRtpDataMediaChannel() {
std::map<uint32_t, RtpClock*>::const_iterator iter;
for (iter = rtp_clock_by_send_ssrc_.begin();
iter != rtp_clock_by_send_ssrc_.end(); ++iter) {
delete iter->second;
}
}
const DataCodec* TgFindUnknownCodec(const std::vector<DataCodec>& codecs) {
DataCodec data_codec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName);
std::vector<DataCodec>::const_iterator iter;
for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
if (!iter->Matches(data_codec)) {
return &(*iter);
}
}
return NULL;
}
const DataCodec* TgFindKnownCodec(const std::vector<DataCodec>& codecs) {
DataCodec data_codec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName);
std::vector<DataCodec>::const_iterator iter;
for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
if (iter->Matches(data_codec)) {
return &(*iter);
}
}
return NULL;
}
bool TgRtpDataMediaChannel::SetRecvCodecs(const std::vector<DataCodec>& codecs) {
const DataCodec* unknown_codec = TgFindUnknownCodec(codecs);
if (unknown_codec) {
RTC_LOG(LS_WARNING) << "Failed to SetRecvCodecs because of unknown codec: "
<< unknown_codec->ToString();
return false;
}
recv_codecs_ = codecs;
return true;
}
bool TgRtpDataMediaChannel::SetSendCodecs(const std::vector<DataCodec>& codecs) {
const DataCodec* known_codec = TgFindKnownCodec(codecs);
if (!known_codec) {
RTC_LOG(LS_WARNING)
<< "Failed to SetSendCodecs because there is no known codec.";
return false;
}
send_codecs_ = codecs;
return true;
}
bool TgRtpDataMediaChannel::SetSendParameters(const DataSendParameters& params) {
return (SetSendCodecs(params.codecs) &&
SetMaxSendBandwidth(params.max_bandwidth_bps));
}
bool TgRtpDataMediaChannel::SetRecvParameters(const DataRecvParameters& params) {
return SetRecvCodecs(params.codecs);
}
bool TgRtpDataMediaChannel::AddSendStream(const StreamParams& stream) {
if (!stream.has_ssrcs()) {
return false;
}
if (GetStreamBySsrc(send_streams_, stream.first_ssrc())) {
RTC_LOG(LS_WARNING) << "Not adding data send stream '" << stream.id
<< "' with ssrc=" << stream.first_ssrc()
<< " because stream already exists.";
return false;
}
send_streams_.push_back(stream);
// TODO(pthatcher): This should be per-stream, not per-ssrc.
// And we should probably allow more than one per stream.
rtp_clock_by_send_ssrc_[stream.first_ssrc()] =
new RtpClock(kDataCodecClockrate, rtc::CreateRandomNonZeroId(),
rtc::CreateRandomNonZeroId());
RTC_LOG(LS_INFO) << "Added data send stream '" << stream.id
<< "' with ssrc=" << stream.first_ssrc();
return true;
}
bool TgRtpDataMediaChannel::RemoveSendStream(uint32_t ssrc) {
if (!GetStreamBySsrc(send_streams_, ssrc)) {
return false;
}
RemoveStreamBySsrc(&send_streams_, ssrc);
delete rtp_clock_by_send_ssrc_[ssrc];
rtp_clock_by_send_ssrc_.erase(ssrc);
return true;
}
bool TgRtpDataMediaChannel::AddRecvStream(const StreamParams& stream) {
if (!stream.has_ssrcs()) {
return false;
}
if (GetStreamBySsrc(recv_streams_, stream.first_ssrc())) {
RTC_LOG(LS_WARNING) << "Not adding data recv stream '" << stream.id
<< "' with ssrc=" << stream.first_ssrc()
<< " because stream already exists.";
return false;
}
recv_streams_.push_back(stream);
RTC_LOG(LS_INFO) << "Added data recv stream '" << stream.id
<< "' with ssrc=" << stream.first_ssrc();
return true;
}
bool TgRtpDataMediaChannel::RemoveRecvStream(uint32_t ssrc) {
RemoveStreamBySsrc(&recv_streams_, ssrc);
return true;
}
// Not implemented.
void TgRtpDataMediaChannel::ResetUnsignaledRecvStream() {}
void TgRtpDataMediaChannel::OnPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t /* packet_time_us */) {
RtpHeader header;
if (!GetRtpHeader(packet.cdata(), packet.size(), &header)) {
return;
}
size_t header_length;
if (!GetRtpHeaderLen(packet.cdata(), packet.size(), &header_length)) {
return;
}
const char* data =
packet.cdata<char>() + header_length + sizeof(kReservedSpace);
size_t data_len = packet.size() - header_length - sizeof(kReservedSpace);
if (!receiving_) {
RTC_LOG(LS_WARNING) << "Not receiving packet " << header.ssrc << ":"
<< header.seq_num << " before SetReceive(true) called.";
return;
}
if (!FindCodecById(recv_codecs_, header.payload_type)) {
return;
}
if (!GetStreamBySsrc(recv_streams_, header.ssrc)) {
RTC_LOG(LS_WARNING) << "Received packet for unknown ssrc: " << header.ssrc;
return;
}
// Uncomment this for easy debugging.
// const auto* found_stream = GetStreamBySsrc(recv_streams_, header.ssrc);
// RTC_LOG(LS_INFO) << "Received packet"
// << " groupid=" << found_stream.groupid
// << ", ssrc=" << header.ssrc
// << ", seqnum=" << header.seq_num
// << ", timestamp=" << header.timestamp
// << ", len=" << data_len;
ReceiveDataParams params;
params.ssrc = header.ssrc;
params.seq_num = header.seq_num;
params.timestamp = header.timestamp;
SignalDataReceived(params, data, data_len);
}
bool TgRtpDataMediaChannel::SetMaxSendBandwidth(int bps) {
if (bps <= 0) {
bps = kDataMaxBandwidth;
}
send_limiter_.reset(new rtc::DataRateLimiter(bps / 8, 1.0));
RTC_LOG(LS_INFO) << "TgRtpDataMediaChannel::SetSendBandwidth to " << bps
<< "bps.";
return true;
}
bool TgRtpDataMediaChannel::SendData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
SendDataResult* result) {
if (result) {
// If we return true, we'll set this to SDR_SUCCESS.
*result = SDR_ERROR;
}
if (!sending_) {
RTC_LOG(LS_WARNING) << "Not sending packet with ssrc=" << params.ssrc
<< " len=" << payload.size()
<< " before SetSend(true).";
return false;
}
if (params.type != cricket::DMT_TEXT) {
RTC_LOG(LS_WARNING)
<< "Not sending data because binary type is unsupported.";
return false;
}
const StreamParams* found_stream =
GetStreamBySsrc(send_streams_, params.ssrc);
if (!found_stream) {
RTC_LOG(LS_WARNING) << "Not sending data because ssrc is unknown: "
<< params.ssrc;
return false;
}
const DataCodec* found_codec =
FindCodecByName(send_codecs_, kGoogleRtpDataCodecName);
if (!found_codec) {
RTC_LOG(LS_WARNING) << "Not sending data because codec is unknown: "
<< kGoogleRtpDataCodecName;
return false;
}
size_t packet_len = (kMinRtpPacketLen + sizeof(kReservedSpace) +
payload.size() + kMaxSrtpHmacOverhead);
if (packet_len > kDataMaxRtpPacketLen) {
return false;
}
double now =
rtc::TimeMicros() / static_cast<double>(rtc::kNumMicrosecsPerSec);
if (!send_limiter_->CanUse(packet_len, now)) {
RTC_LOG(LS_VERBOSE) << "Dropped data packet of len=" << packet_len
<< "; already sent " << send_limiter_->used_in_period()
<< "/" << send_limiter_->max_per_period();
return false;
}
RtpHeader header;
header.payload_type = found_codec->id;
header.ssrc = params.ssrc;
rtp_clock_by_send_ssrc_[header.ssrc]->Tick(now, &header.seq_num,
&header.timestamp);
rtc::CopyOnWriteBuffer packet(kMinRtpPacketLen, packet_len);
if (!SetRtpHeader(packet.data(), packet.size(), header)) {
return false;
}
packet.AppendData(kReservedSpace);
packet.AppendData(payload);
RTC_LOG(LS_VERBOSE) << "Sent RTP data packet: "
<< " stream=" << found_stream->id
<< " ssrc=" << header.ssrc
<< ", seqnum=" << header.seq_num
<< ", timestamp=" << header.timestamp
<< ", len=" << payload.size();
rtc::PacketOptions options;
options.info_signaled_after_sent.packet_type = rtc::PacketType::kData;
MediaChannel::SendPacket(&packet, options);
send_limiter_->Use(packet_len, now);
if (result) {
*result = SDR_SUCCESS;
}
return true;
}
} // namespace cricket

View File

@ -1,109 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef TG_MEDIA_BASE_RTP_DATA_ENGINE_H_
#define TG_MEDIA_BASE_RTP_DATA_ENGINE_H_
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "media/base/codec.h"
#include "media/base/media_channel.h"
#include "media/base/media_constants.h"
#include "media/base/media_engine.h"
namespace rtc {
class DataRateLimiter;
}
namespace cricket {
class TgRtpDataEngine : public DataEngineInterface {
public:
TgRtpDataEngine();
virtual DataMediaChannel* CreateChannel(const MediaConfig& config);
virtual const std::vector<DataCodec>& data_codecs() { return data_codecs_; }
private:
std::vector<DataCodec> data_codecs_;
};
// Keep track of sequence number and timestamp of an RTP stream. The
// sequence number starts with a "random" value and increments. The
// timestamp starts with a "random" value and increases monotonically
// according to the clockrate.
class RtpClock {
public:
RtpClock(int clockrate, uint16_t first_seq_num, uint32_t timestamp_offset)
: clockrate_(clockrate),
last_seq_num_(first_seq_num),
timestamp_offset_(timestamp_offset) {}
// Given the current time (in number of seconds which must be
// monotonically increasing), Return the next sequence number and
// timestamp.
void Tick(double now, int* seq_num, uint32_t* timestamp);
private:
int clockrate_;
uint16_t last_seq_num_;
uint32_t timestamp_offset_;
};
class TgRtpDataMediaChannel : public DataMediaChannel {
public:
explicit TgRtpDataMediaChannel(const MediaConfig& config);
virtual ~TgRtpDataMediaChannel();
virtual bool SetSendParameters(const DataSendParameters& params);
virtual bool SetRecvParameters(const DataRecvParameters& params);
virtual bool AddSendStream(const StreamParams& sp);
virtual bool RemoveSendStream(uint32_t ssrc);
virtual bool AddRecvStream(const StreamParams& sp);
virtual bool RemoveRecvStream(uint32_t ssrc);
virtual void ResetUnsignaledRecvStream();
virtual bool SetSend(bool send) {
sending_ = send;
return true;
}
virtual bool SetReceive(bool receive) {
receiving_ = receive;
return true;
}
virtual void OnPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us);
virtual void OnReadyToSend(bool ready) {}
virtual bool SendData(const SendDataParams& params,
const rtc::CopyOnWriteBuffer& payload,
SendDataResult* result);
private:
void Construct();
bool SetMaxSendBandwidth(int bps);
bool SetSendCodecs(const std::vector<DataCodec>& codecs);
bool SetRecvCodecs(const std::vector<DataCodec>& codecs);
bool sending_;
bool receiving_;
std::vector<DataCodec> send_codecs_;
std::vector<DataCodec> recv_codecs_;
std::vector<StreamParams> send_streams_;
std::vector<StreamParams> recv_streams_;
std::map<uint32_t, RtpClock*> rtp_clock_by_send_ssrc_;
std::unique_ptr<rtc::DataRateLimiter> send_limiter_;
};
} // namespace cricket
#endif // MEDIA_BASE_RTP_DATA_ENGINE_H_

View File

@ -1,357 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "tg_rtp_sender.h"
#include <atomic>
#include <utility>
#include <vector>
#include "api/audio_options.h"
#include "api/media_stream_interface.h"
#include "media/base/media_engine.h"
#include "pc/peer_connection.h"
#include "pc/stats_collector.h"
#include "rtc_base/checks.h"
#include "rtc_base/helpers.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/trace_event.h"
namespace webrtc {
namespace {
// This function is only expected to be called on the signaling thread.
// On the other hand, some test or even production setups may use
// several signaling threads.
int GenerateUniqueId() {
static std::atomic<int> g_unique_id{0};
return ++g_unique_id;
}
// Returns true if a "per-sender" encoding parameter contains a value that isn't
// its default. Currently max_bitrate_bps and bitrate_priority both are
// implemented "per-sender," meaning that these encoding parameters
// are used for the RtpSender as a whole, not for a specific encoding layer.
// This is done by setting these encoding parameters at index 0 of
// RtpParameters.encodings. This function can be used to check if these
// parameters are set at any index other than 0 of RtpParameters.encodings,
// because they are currently unimplemented to be used for a specific encoding
// layer.
bool PerSenderRtpEncodingParameterHasValue(
const RtpEncodingParameters& encoding_params) {
if (encoding_params.bitrate_priority != kDefaultBitratePriority ||
encoding_params.network_priority != kDefaultBitratePriority) {
return true;
}
return false;
}
void RemoveEncodingLayers(const std::vector<std::string>& rids,
std::vector<RtpEncodingParameters>* encodings) {
RTC_DCHECK(encodings);
encodings->erase(
std::remove_if(encodings->begin(), encodings->end(),
[&rids](const RtpEncodingParameters& encoding) {
return absl::c_linear_search(rids, encoding.rid);
}),
encodings->end());
}
RtpParameters RestoreEncodingLayers(
const RtpParameters& parameters,
const std::vector<std::string>& removed_rids,
const std::vector<RtpEncodingParameters>& all_layers) {
RTC_DCHECK_EQ(parameters.encodings.size() + removed_rids.size(),
all_layers.size());
RtpParameters result(parameters);
result.encodings.clear();
size_t index = 0;
for (const RtpEncodingParameters& encoding : all_layers) {
if (absl::c_linear_search(removed_rids, encoding.rid)) {
result.encodings.push_back(encoding);
continue;
}
result.encodings.push_back(parameters.encodings[index++]);
}
return result;
}
} // namespace
// Returns true if any RtpParameters member that isn't implemented contains a
// value.
bool TgUnimplementedRtpParameterHasValue(const RtpParameters& parameters) {
if (!parameters.mid.empty()) {
return true;
}
for (size_t i = 0; i < parameters.encodings.size(); ++i) {
// Encoding parameters that are per-sender should only contain value at
// index 0.
if (i != 0 &&
PerSenderRtpEncodingParameterHasValue(parameters.encodings[i])) {
return true;
}
}
return false;
}
TgLocalAudioSinkAdapter::TgLocalAudioSinkAdapter() : sink_(nullptr) {}
TgLocalAudioSinkAdapter::~TgLocalAudioSinkAdapter() {
rtc::CritScope lock(&lock_);
if (sink_)
sink_->OnClose();
}
void TgLocalAudioSinkAdapter::OnData(const void* audio_data,
int bits_per_sample,
int sample_rate,
size_t number_of_channels,
size_t number_of_frames) {
rtc::CritScope lock(&lock_);
if (sink_) {
sink_->OnData(audio_data, bits_per_sample, sample_rate, number_of_channels,
number_of_frames);
}
}
void TgLocalAudioSinkAdapter::SetSink(cricket::AudioSource::Sink* sink) {
rtc::CritScope lock(&lock_);
RTC_DCHECK(!sink || !sink_);
sink_ = sink;
}
rtc::scoped_refptr<TgAudioRtpSender> TgAudioRtpSender::Create(
rtc::Thread* worker_thread,
const std::string& id,
SetStreamsObserver* set_streams_observer) {
return rtc::scoped_refptr<TgAudioRtpSender>(
new rtc::RefCountedObject<TgAudioRtpSender>(worker_thread, id,
set_streams_observer));
}
TgAudioRtpSender::TgAudioRtpSender(rtc::Thread* worker_thread,
const std::string& id,
SetStreamsObserver* set_streams_observer)
: RtpSenderBase(worker_thread, id, set_streams_observer),
dtmf_sender_proxy_(DtmfSenderProxy::Create(
rtc::Thread::Current(),
DtmfSender::Create(rtc::Thread::Current(), this))),
sink_adapter_(new TgLocalAudioSinkAdapter()) {}
TgAudioRtpSender::~TgAudioRtpSender() {
// For DtmfSender.
SignalDestroyed();
Stop();
}
bool TgAudioRtpSender::CanInsertDtmf() {
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "CanInsertDtmf: No audio channel exists.";
return false;
}
// Check that this RTP sender is active (description has been applied that
// matches an SSRC to its ID).
if (!ssrc_) {
RTC_LOG(LS_ERROR) << "CanInsertDtmf: Sender does not have SSRC.";
return false;
}
return worker_thread_->Invoke<bool>(
RTC_FROM_HERE, [&] { return voice_media_channel()->CanInsertDtmf(); });
}
bool TgAudioRtpSender::InsertDtmf(int code, int duration) {
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "InsertDtmf: No audio channel exists.";
return false;
}
if (!ssrc_) {
RTC_LOG(LS_ERROR) << "InsertDtmf: Sender does not have SSRC.";
return false;
}
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return voice_media_channel()->InsertDtmf(ssrc_, code, duration);
});
if (!success) {
RTC_LOG(LS_ERROR) << "Failed to insert DTMF to channel.";
}
return success;
}
sigslot::signal0<>* TgAudioRtpSender::GetOnDestroyedSignal() {
return &SignalDestroyed;
}
void TgAudioRtpSender::OnChanged() {
TRACE_EVENT0("webrtc", "TgAudioRtpSender::OnChanged");
RTC_DCHECK(!stopped_);
if (cached_track_enabled_ != track_->enabled()) {
cached_track_enabled_ = track_->enabled();
if (can_send_track()) {
SetSend();
}
}
}
void TgAudioRtpSender::DetachTrack() {
RTC_DCHECK(track_);
audio_track()->RemoveSink(sink_adapter_.get());
}
void TgAudioRtpSender::AttachTrack() {
RTC_DCHECK(track_);
cached_track_enabled_ = track_->enabled();
audio_track()->AddSink(sink_adapter_.get());
}
void TgAudioRtpSender::AddTrackToStats() {
}
void TgAudioRtpSender::RemoveTrackFromStats() {
}
rtc::scoped_refptr<DtmfSenderInterface> TgAudioRtpSender::GetDtmfSender() const {
return dtmf_sender_proxy_;
}
void TgAudioRtpSender::SetSend() {
RTC_DCHECK(!stopped_);
RTC_DCHECK(can_send_track());
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "SetAudioSend: No audio channel exists.";
return;
}
cricket::AudioOptions options;
#if !defined(WEBRTC_CHROMIUM_BUILD) && !defined(WEBRTC_WEBKIT_BUILD)
// TODO(tommi): Remove this hack when we move CreateAudioSource out of
// PeerConnection. This is a bit of a strange way to apply local audio
// options since it is also applied to all streams/channels, local or remote.
if (track_->enabled() && audio_track()->GetSource() &&
!audio_track()->GetSource()->remote()) {
options = audio_track()->GetSource()->options();
}
#endif
// |track_->enabled()| hops to the signaling thread, so call it before we hop
// to the worker thread or else it will deadlock.
bool track_enabled = track_->enabled();
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return voice_media_channel()->SetAudioSend(ssrc_, track_enabled, &options,
sink_adapter_.get());
});
if (!success) {
RTC_LOG(LS_ERROR) << "SetAudioSend: ssrc is incorrect: " << ssrc_;
}
}
void TgAudioRtpSender::ClearSend() {
RTC_DCHECK(ssrc_ != 0);
RTC_DCHECK(!stopped_);
if (!media_channel_) {
RTC_LOG(LS_WARNING) << "ClearAudioSend: No audio channel exists.";
return;
}
cricket::AudioOptions options;
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return voice_media_channel()->SetAudioSend(ssrc_, false, &options, nullptr);
});
if (!success) {
RTC_LOG(LS_WARNING) << "ClearAudioSend: ssrc is incorrect: " << ssrc_;
}
}
rtc::scoped_refptr<TgVideoRtpSender> TgVideoRtpSender::Create(
rtc::Thread* worker_thread,
const std::string& id,
SetStreamsObserver* set_streams_observer) {
return rtc::scoped_refptr<TgVideoRtpSender>(
new rtc::RefCountedObject<TgVideoRtpSender>(worker_thread, id,
set_streams_observer));
}
TgVideoRtpSender::TgVideoRtpSender(rtc::Thread* worker_thread,
const std::string& id,
SetStreamsObserver* set_streams_observer)
: RtpSenderBase(worker_thread, id, set_streams_observer) {}
TgVideoRtpSender::~TgVideoRtpSender() {
Stop();
}
void TgVideoRtpSender::OnChanged() {
TRACE_EVENT0("webrtc", "TgVideoRtpSender::OnChanged");
RTC_DCHECK(!stopped_);
if (cached_track_content_hint_ != video_track()->content_hint()) {
cached_track_content_hint_ = video_track()->content_hint();
if (can_send_track()) {
SetSend();
}
}
}
void TgVideoRtpSender::AttachTrack() {
RTC_DCHECK(track_);
cached_track_content_hint_ = video_track()->content_hint();
}
rtc::scoped_refptr<DtmfSenderInterface> TgVideoRtpSender::GetDtmfSender() const {
RTC_LOG(LS_ERROR) << "Tried to get DTMF sender from video sender.";
return nullptr;
}
void TgVideoRtpSender::SetSend() {
RTC_DCHECK(!stopped_);
RTC_DCHECK(can_send_track());
if (!media_channel_) {
RTC_LOG(LS_ERROR) << "SetVideoSend: No video channel exists.";
return;
}
cricket::VideoOptions options;
VideoTrackSourceInterface* source = video_track()->GetSource();
if (source) {
options.is_screencast = source->is_screencast();
options.video_noise_reduction = source->needs_denoising();
}
switch (cached_track_content_hint_) {
case VideoTrackInterface::ContentHint::kNone:
break;
case VideoTrackInterface::ContentHint::kFluid:
options.is_screencast = false;
break;
case VideoTrackInterface::ContentHint::kDetailed:
case VideoTrackInterface::ContentHint::kText:
options.is_screencast = true;
break;
}
bool success = worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return video_media_channel()->SetVideoSend(ssrc_, &options, video_track());
});
RTC_DCHECK(success);
}
void TgVideoRtpSender::ClearSend() {
RTC_DCHECK(ssrc_ != 0);
RTC_DCHECK(!stopped_);
if (!media_channel_) {
RTC_LOG(LS_WARNING) << "SetVideoSend: No video channel exists.";
return;
}
// Allow SetVideoSend to fail since |enable| is false and |source| is null.
// This the normal case when the underlying media channel has already been
// deleted.
worker_thread_->Invoke<bool>(RTC_FROM_HERE, [&] {
return video_media_channel()->SetVideoSend(ssrc_, nullptr, nullptr);
});
}
} // namespace webrtc

View File

@ -1,176 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This file contains classes that implement RtpSenderInterface.
// An RtpSender associates a MediaStreamTrackInterface with an underlying
// transport (provided by AudioProviderInterface/VideoProviderInterface)
#ifndef TG_PC_RTP_SENDER_H_
#define TG_PC_RTP_SENDER_H_
#include <memory>
#include <string>
#include <vector>
#include "api/media_stream_interface.h"
#include "api/rtp_sender_interface.h"
#include "media/base/audio_source.h"
#include "media/base/media_channel.h"
#include "pc/dtmf_sender.h"
#include "rtc_base/critical_section.h"
#include "pc/rtp_sender.h"
namespace webrtc {
class StatsCollector;
bool TgUnimplementedRtpParameterHasValue(const RtpParameters& parameters);
// TgLocalAudioSinkAdapter receives data callback as a sink to the local
// AudioTrack, and passes the data to the sink of AudioSource.
class TgLocalAudioSinkAdapter : public AudioTrackSinkInterface,
public cricket::AudioSource {
public:
TgLocalAudioSinkAdapter();
virtual ~TgLocalAudioSinkAdapter();
private:
// AudioSinkInterface implementation.
void OnData(const void* audio_data,
int bits_per_sample,
int sample_rate,
size_t number_of_channels,
size_t number_of_frames) override;
// cricket::AudioSource implementation.
void SetSink(cricket::AudioSource::Sink* sink) override;
cricket::AudioSource::Sink* sink_;
// Critical section protecting |sink_|.
rtc::CriticalSection lock_;
};
class TgAudioRtpSender : public DtmfProviderInterface, public RtpSenderBase {
public:
// Construct an RtpSender for audio with the given sender ID.
// The sender is initialized with no track to send and no associated streams.
// StatsCollector provided so that Add/RemoveLocalAudioTrack can be called
// at the appropriate times.
// If |set_streams_observer| is not null, it is invoked when SetStreams()
// is called. |set_streams_observer| is not owned by this object. If not
// null, it must be valid at least until this sender becomes stopped.
static rtc::scoped_refptr<TgAudioRtpSender> Create(
rtc::Thread* worker_thread,
const std::string& id,
SetStreamsObserver* set_streams_observer);
virtual ~TgAudioRtpSender();
// DtmfSenderProvider implementation.
bool CanInsertDtmf() override;
bool InsertDtmf(int code, int duration) override;
sigslot::signal0<>* GetOnDestroyedSignal() override;
// ObserverInterface implementation.
void OnChanged() override;
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_AUDIO;
}
std::string track_kind() const override {
return MediaStreamTrackInterface::kAudioKind;
}
rtc::scoped_refptr<DtmfSenderInterface> GetDtmfSender() const override;
protected:
TgAudioRtpSender(rtc::Thread* worker_thread,
const std::string& id,
SetStreamsObserver* set_streams_observer);
void SetSend() override;
void ClearSend() override;
// Hooks to allow custom logic when tracks are attached and detached.
void AttachTrack() override;
void DetachTrack() override;
void AddTrackToStats() override;
void RemoveTrackFromStats() override;
private:
cricket::VoiceMediaChannel* voice_media_channel() {
return static_cast<cricket::VoiceMediaChannel*>(media_channel_);
}
rtc::scoped_refptr<AudioTrackInterface> audio_track() const {
return rtc::scoped_refptr<AudioTrackInterface>(
static_cast<AudioTrackInterface*>(track_.get()));
}
sigslot::signal0<> SignalDestroyed;
rtc::scoped_refptr<DtmfSenderInterface> dtmf_sender_proxy_;
bool cached_track_enabled_ = false;
// Used to pass the data callback from the |track_| to the other end of
// cricket::AudioSource.
std::unique_ptr<TgLocalAudioSinkAdapter> sink_adapter_;
};
class TgVideoRtpSender : public RtpSenderBase {
public:
// Construct an RtpSender for video with the given sender ID.
// The sender is initialized with no track to send and no associated streams.
// If |set_streams_observer| is not null, it is invoked when SetStreams()
// is called. |set_streams_observer| is not owned by this object. If not
// null, it must be valid at least until this sender becomes stopped.
static rtc::scoped_refptr<TgVideoRtpSender> Create(
rtc::Thread* worker_thread,
const std::string& id,
SetStreamsObserver* set_streams_observer);
virtual ~TgVideoRtpSender();
// ObserverInterface implementation
void OnChanged() override;
cricket::MediaType media_type() const override {
return cricket::MEDIA_TYPE_VIDEO;
}
std::string track_kind() const override {
return MediaStreamTrackInterface::kVideoKind;
}
rtc::scoped_refptr<DtmfSenderInterface> GetDtmfSender() const override;
protected:
TgVideoRtpSender(rtc::Thread* worker_thread,
const std::string& id,
SetStreamsObserver* set_streams_observer);
void SetSend() override;
void ClearSend() override;
// Hook to allow custom logic when tracks are attached.
void AttachTrack() override;
private:
cricket::VideoMediaChannel* video_media_channel() {
return static_cast<cricket::VideoMediaChannel*>(media_channel_);
}
rtc::scoped_refptr<VideoTrackInterface> video_track() const {
return rtc::scoped_refptr<VideoTrackInterface>(
static_cast<VideoTrackInterface*>(track_.get()));
}
VideoTrackInterface::ContentHint cached_track_content_hint_ =
VideoTrackInterface::ContentHint::kNone;
};
} // namespace webrtc
#endif // PC_RTP_SENDER_H_

View File

@ -1,292 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "tg_rtp_transport.h"
#include <errno.h>
#include <string>
#include <utility>
#include "api/rtp_headers.h"
#include "api/rtp_parameters.h"
#include "media/base/rtp_utils.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "rtc_base/checks.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/logging.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/trace_event.h"
namespace webrtc {
void TgRtpTransport::SetRtcpMuxEnabled(bool enable) {
rtcp_mux_enabled_ = enable;
MaybeSignalReadyToSend();
}
const std::string& TgRtpTransport::transport_name() const {
return rtp_packet_transport_->transport_name();
}
int TgRtpTransport::SetRtpOption(rtc::Socket::Option opt, int value) {
return rtp_packet_transport_->SetOption(opt, value);
}
int TgRtpTransport::SetRtcpOption(rtc::Socket::Option opt, int value) {
if (rtcp_packet_transport_) {
return rtcp_packet_transport_->SetOption(opt, value);
}
return -1;
}
void TgRtpTransport::SetRtpPacketTransport(
rtc::PacketTransportInternal* new_packet_transport) {
if (new_packet_transport == rtp_packet_transport_) {
return;
}
if (rtp_packet_transport_) {
rtp_packet_transport_->SignalReadyToSend.disconnect(this);
rtp_packet_transport_->SignalReadPacket.disconnect(this);
rtp_packet_transport_->SignalNetworkRouteChanged.disconnect(this);
rtp_packet_transport_->SignalWritableState.disconnect(this);
rtp_packet_transport_->SignalSentPacket.disconnect(this);
// Reset the network route of the old transport.
SignalNetworkRouteChanged(absl::optional<rtc::NetworkRoute>());
}
if (new_packet_transport) {
new_packet_transport->SignalReadyToSend.connect(
this, &TgRtpTransport::OnReadyToSend);
new_packet_transport->SignalReadPacket.connect(this,
&TgRtpTransport::OnReadPacket);
new_packet_transport->SignalNetworkRouteChanged.connect(
this, &TgRtpTransport::OnNetworkRouteChanged);
new_packet_transport->SignalWritableState.connect(
this, &TgRtpTransport::OnWritableState);
new_packet_transport->SignalSentPacket.connect(this,
&TgRtpTransport::OnSentPacket);
// Set the network route for the new transport.
SignalNetworkRouteChanged(new_packet_transport->network_route());
}
rtp_packet_transport_ = new_packet_transport;
// Assumes the transport is ready to send if it is writable. If we are wrong,
// ready to send will be updated the next time we try to send.
SetReadyToSend(false,
rtp_packet_transport_ && rtp_packet_transport_->writable());
}
void TgRtpTransport::SetRtcpPacketTransport(
rtc::PacketTransportInternal* new_packet_transport) {
if (new_packet_transport == rtcp_packet_transport_) {
return;
}
if (rtcp_packet_transport_) {
rtcp_packet_transport_->SignalReadyToSend.disconnect(this);
rtcp_packet_transport_->SignalReadPacket.disconnect(this);
rtcp_packet_transport_->SignalNetworkRouteChanged.disconnect(this);
rtcp_packet_transport_->SignalWritableState.disconnect(this);
rtcp_packet_transport_->SignalSentPacket.disconnect(this);
// Reset the network route of the old transport.
SignalNetworkRouteChanged(absl::optional<rtc::NetworkRoute>());
}
if (new_packet_transport) {
new_packet_transport->SignalReadyToSend.connect(
this, &TgRtpTransport::OnReadyToSend);
new_packet_transport->SignalReadPacket.connect(this,
&TgRtpTransport::OnReadPacket);
new_packet_transport->SignalNetworkRouteChanged.connect(
this, &TgRtpTransport::OnNetworkRouteChanged);
new_packet_transport->SignalWritableState.connect(
this, &TgRtpTransport::OnWritableState);
new_packet_transport->SignalSentPacket.connect(this,
&TgRtpTransport::OnSentPacket);
// Set the network route for the new transport.
SignalNetworkRouteChanged(new_packet_transport->network_route());
}
rtcp_packet_transport_ = new_packet_transport;
// Assumes the transport is ready to send if it is writable. If we are wrong,
// ready to send will be updated the next time we try to send.
SetReadyToSend(true,
rtcp_packet_transport_ && rtcp_packet_transport_->writable());
}
bool TgRtpTransport::IsWritable(bool rtcp) const {
rtc::PacketTransportInternal* transport = rtcp && !rtcp_mux_enabled_
? rtcp_packet_transport_
: rtp_packet_transport_;
return transport && transport->writable();
}
bool TgRtpTransport::SendRtpPacket(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options,
int flags) {
return SendPacket(false, packet, options, flags);
}
bool TgRtpTransport::SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options,
int flags) {
return SendPacket(true, packet, options, flags);
}
bool TgRtpTransport::SendPacket(bool rtcp,
rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options,
int flags) {
rtc::PacketTransportInternal* transport = rtcp && !rtcp_mux_enabled_
? rtcp_packet_transport_
: rtp_packet_transport_;
int ret = transport->SendPacket(packet->cdata<char>(), packet->size(),
options, flags);
if (ret != static_cast<int>(packet->size())) {
if (transport->GetError() == ENOTCONN) {
RTC_LOG(LS_WARNING) << "Got ENOTCONN from transport.";
SetReadyToSend(rtcp, false);
}
return false;
}
return true;
}
void TgRtpTransport::UpdateRtpHeaderExtensionMap(
const cricket::RtpHeaderExtensions& header_extensions) {
header_extension_map_ = RtpHeaderExtensionMap(header_extensions);
}
bool TgRtpTransport::RegisterRtpDemuxerSink(const RtpDemuxerCriteria& criteria,
RtpPacketSinkInterface* sink) {
rtp_demuxer_.RemoveSink(sink);
if (!rtp_demuxer_.AddSink(criteria, sink)) {
RTC_LOG(LS_ERROR) << "Failed to register the sink for RTP demuxer.";
return false;
}
return true;
}
bool TgRtpTransport::UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) {
if (!rtp_demuxer_.RemoveSink(sink)) {
RTC_LOG(LS_ERROR) << "Failed to unregister the sink for RTP demuxer.";
return false;
}
return true;
}
void TgRtpTransport::DemuxPacket(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) {
webrtc::RtpPacketReceived parsed_packet(&header_extension_map_);
if (!parsed_packet.Parse(std::move(packet))) {
RTC_LOG(LS_ERROR)
<< "Failed to parse the incoming RTP packet before demuxing. Drop it.";
return;
}
if (packet_time_us != -1) {
parsed_packet.set_arrival_time_ms((packet_time_us + 500) / 1000);
}
if (!rtp_demuxer_.OnRtpPacket(parsed_packet)) {
RTC_LOG(LS_WARNING) << "Failed to demux RTP packet: "
<< RtpDemuxer::DescribePacket(parsed_packet);
}
}
bool TgRtpTransport::IsTransportWritable() {
auto rtcp_packet_transport =
rtcp_mux_enabled_ ? nullptr : rtcp_packet_transport_;
return rtp_packet_transport_ && rtp_packet_transport_->writable() &&
(!rtcp_packet_transport || rtcp_packet_transport->writable());
}
void TgRtpTransport::OnReadyToSend(rtc::PacketTransportInternal* transport) {
SetReadyToSend(transport == rtcp_packet_transport_, true);
}
void TgRtpTransport::OnNetworkRouteChanged(
absl::optional<rtc::NetworkRoute> network_route) {
SignalNetworkRouteChanged(network_route);
}
void TgRtpTransport::OnWritableState(
rtc::PacketTransportInternal* packet_transport) {
RTC_DCHECK(packet_transport == rtp_packet_transport_ ||
packet_transport == rtcp_packet_transport_);
SignalWritableState(IsTransportWritable());
}
void TgRtpTransport::OnSentPacket(rtc::PacketTransportInternal* packet_transport,
const rtc::SentPacket& sent_packet) {
RTC_DCHECK(packet_transport == rtp_packet_transport_ ||
packet_transport == rtcp_packet_transport_);
SignalSentPacket(sent_packet);
}
void TgRtpTransport::OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) {
DemuxPacket(packet, packet_time_us);
}
void TgRtpTransport::OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us) {
SignalRtcpPacketReceived(&packet, packet_time_us);
}
void TgRtpTransport::OnReadPacket(rtc::PacketTransportInternal* transport,
const char* data,
size_t len,
const int64_t& packet_time_us,
int flags) {
TRACE_EVENT0("webrtc", "TgRtpTransport::OnReadPacket");
// When using RTCP multiplexing we might get RTCP packets on the RTP
// transport. We check the RTP payload type to determine if it is RTCP.
auto array_view = rtc::MakeArrayView(data, len);
cricket::RtpPacketType packet_type = cricket::InferRtpPacketType(array_view);
// Filter out the packet that is neither RTP nor RTCP.
if (packet_type == cricket::RtpPacketType::kUnknown) {
return;
}
// Protect ourselves against crazy data.
if (!cricket::IsValidRtpPacketSize(packet_type, len)) {
RTC_LOG(LS_ERROR) << "Dropping incoming "
<< cricket::RtpPacketTypeToString(packet_type)
<< " packet: wrong size=" << len;
return;
}
rtc::CopyOnWriteBuffer packet(data, len);
if (packet_type == cricket::RtpPacketType::kRtcp) {
OnRtcpPacketReceived(std::move(packet), packet_time_us);
} else {
OnRtpPacketReceived(std::move(packet), packet_time_us);
}
}
void TgRtpTransport::SetReadyToSend(bool rtcp, bool ready) {
if (rtcp) {
rtcp_ready_to_send_ = ready;
} else {
rtp_ready_to_send_ = ready;
}
MaybeSignalReadyToSend();
}
void TgRtpTransport::MaybeSignalReadyToSend() {
bool ready_to_send =
rtp_ready_to_send_ && (rtcp_ready_to_send_ || rtcp_mux_enabled_);
if (ready_to_send != ready_to_send_) {
ready_to_send_ = ready_to_send;
SignalReadyToSend(ready_to_send);
}
}
} // namespace webrtc

View File

@ -1,133 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef TG_PC_RTP_TRANSPORT_H_
#define TG_PC_RTP_TRANSPORT_H_
#include <string>
#include "call/rtp_demuxer.h"
#include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
#include "pc/rtp_transport_internal.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
namespace rtc {
class CopyOnWriteBuffer;
struct PacketOptions;
class PacketTransportInternal;
} // namespace rtc
namespace webrtc {
class TgRtpTransport : public RtpTransportInternal {
public:
TgRtpTransport(const TgRtpTransport&) = delete;
TgRtpTransport& operator=(const TgRtpTransport&) = delete;
explicit TgRtpTransport(bool rtcp_mux_enabled)
: rtcp_mux_enabled_(rtcp_mux_enabled) {}
bool rtcp_mux_enabled() const override { return rtcp_mux_enabled_; }
void SetRtcpMuxEnabled(bool enable) override;
const std::string& transport_name() const override;
int SetRtpOption(rtc::Socket::Option opt, int value) override;
int SetRtcpOption(rtc::Socket::Option opt, int value) override;
rtc::PacketTransportInternal* rtp_packet_transport() const {
return rtp_packet_transport_;
}
void SetRtpPacketTransport(rtc::PacketTransportInternal* rtp);
rtc::PacketTransportInternal* rtcp_packet_transport() const {
return rtcp_packet_transport_;
}
void SetRtcpPacketTransport(rtc::PacketTransportInternal* rtcp);
bool IsReadyToSend() const override { return ready_to_send_; }
bool IsWritable(bool rtcp) const override;
bool SendRtpPacket(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options,
int flags) override;
bool SendRtcpPacket(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options,
int flags) override;
bool IsSrtpActive() const override { return false; }
void UpdateRtpHeaderExtensionMap(
const cricket::RtpHeaderExtensions& header_extensions) override;
bool RegisterRtpDemuxerSink(const RtpDemuxerCriteria& criteria,
RtpPacketSinkInterface* sink) override;
bool UnregisterRtpDemuxerSink(RtpPacketSinkInterface* sink) override;
protected:
// These methods will be used in the subclasses.
void DemuxPacket(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us);
bool SendPacket(bool rtcp,
rtc::CopyOnWriteBuffer* packet,
const rtc::PacketOptions& options,
int flags);
// Overridden by SrtpTransport.
virtual void OnNetworkRouteChanged(
absl::optional<rtc::NetworkRoute> network_route);
virtual void OnRtpPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us);
virtual void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer packet,
int64_t packet_time_us);
// Overridden by SrtpTransport and DtlsSrtpTransport.
virtual void OnWritableState(rtc::PacketTransportInternal* packet_transport);
private:
void OnReadyToSend(rtc::PacketTransportInternal* transport);
void OnSentPacket(rtc::PacketTransportInternal* packet_transport,
const rtc::SentPacket& sent_packet);
void OnReadPacket(rtc::PacketTransportInternal* transport,
const char* data,
size_t len,
const int64_t& packet_time_us,
int flags);
// Updates "ready to send" for an individual channel and fires
// SignalReadyToSend.
void SetReadyToSend(bool rtcp, bool ready);
void MaybeSignalReadyToSend();
bool IsTransportWritable();
bool rtcp_mux_enabled_;
rtc::PacketTransportInternal* rtp_packet_transport_ = nullptr;
rtc::PacketTransportInternal* rtcp_packet_transport_ = nullptr;
bool ready_to_send_ = false;
bool rtp_ready_to_send_ = false;
bool rtcp_ready_to_send_ = false;
RtpDemuxer rtp_demuxer_;
// Used for identifying the MID for RtpDemuxer.
RtpHeaderExtensionMap header_extension_map_;
};
} // namespace webrtc
#endif // PC_RTP_TRANSPORT_H_

View File

@ -1,501 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "tg_webrtc_session_description_factory.h"
#include <stddef.h>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/algorithm/container.h"
#include "absl/types/optional.h"
#include "api/jsep.h"
#include "api/jsep_session_description.h"
#include "api/rtc_error.h"
#include "pc/session_description.h"
#include "rtc_base/checks.h"
#include "rtc_base/location.h"
#include "rtc_base/logging.h"
#include "rtc_base/ref_counted_object.h"
#include "rtc_base/ssl_identity.h"
#include "rtc_base/ssl_stream_adapter.h"
#include "rtc_base/string_encode.h"
#include "tg_peer_connection.h"
using cricket::MediaSessionOptions;
using rtc::UniqueRandomIdGenerator;
namespace webrtc {
namespace {
static const char kFailedDueToIdentityFailed[] =
" failed because DTLS identity request failed";
static const char kFailedDueToSessionShutdown[] =
" failed because the session was shut down";
static const uint64_t kInitSessionVersion = 2;
// Check that each sender has a unique ID.
static bool ValidMediaSessionOptions(
const cricket::MediaSessionOptions& session_options) {
std::vector<cricket::SenderOptions> sorted_senders;
for (const cricket::MediaDescriptionOptions& media_description_options :
session_options.media_description_options) {
sorted_senders.insert(sorted_senders.end(),
media_description_options.sender_options.begin(),
media_description_options.sender_options.end());
}
absl::c_sort(sorted_senders, [](const cricket::SenderOptions& sender1,
const cricket::SenderOptions& sender2) {
return sender1.track_id < sender2.track_id;
});
return absl::c_adjacent_find(sorted_senders,
[](const cricket::SenderOptions& sender1,
const cricket::SenderOptions& sender2) {
return sender1.track_id == sender2.track_id;
}) == sorted_senders.end();
}
enum {
MSG_CREATE_SESSIONDESCRIPTION_SUCCESS,
MSG_CREATE_SESSIONDESCRIPTION_FAILED,
MSG_USE_CONSTRUCTOR_CERTIFICATE
};
struct CreateSessionDescriptionMsg : public rtc::MessageData {
explicit CreateSessionDescriptionMsg(
webrtc::CreateSessionDescriptionObserver* observer,
RTCError error_in)
: observer(observer), error(std::move(error_in)) {}
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserver> observer;
RTCError error;
std::unique_ptr<webrtc::SessionDescriptionInterface> description;
};
} // namespace
void TgWebRtcCertificateGeneratorCallback::OnFailure() {
SignalRequestFailed();
}
void TgWebRtcCertificateGeneratorCallback::OnSuccess(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
SignalCertificateReady(certificate);
}
// static
void TgWebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription(
const SessionDescriptionInterface* source_desc,
const std::string& content_name,
SessionDescriptionInterface* dest_desc) {
if (!source_desc) {
return;
}
const cricket::ContentInfos& contents =
source_desc->description()->contents();
const cricket::ContentInfo* cinfo =
source_desc->description()->GetContentByName(content_name);
if (!cinfo) {
return;
}
size_t mediasection_index = static_cast<int>(cinfo - &contents[0]);
const IceCandidateCollection* source_candidates =
source_desc->candidates(mediasection_index);
const IceCandidateCollection* dest_candidates =
dest_desc->candidates(mediasection_index);
if (!source_candidates || !dest_candidates) {
return;
}
for (size_t n = 0; n < source_candidates->count(); ++n) {
const IceCandidateInterface* new_candidate = source_candidates->at(n);
if (!dest_candidates->HasCandidate(new_candidate)) {
dest_desc->AddCandidate(source_candidates->at(n));
}
}
}
TgWebRtcSessionDescriptionFactory::TgWebRtcSessionDescriptionFactory(
rtc::Thread* signaling_thread,
cricket::ChannelManager* channel_manager,
TgPeerConnection* pc,
const std::string& session_id,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate,
UniqueRandomIdGenerator* ssrc_generator)
: signaling_thread_(signaling_thread),
session_desc_factory_(channel_manager,
&transport_desc_factory_,
ssrc_generator),
// RFC 4566 suggested a Network Time Protocol (NTP) format timestamp
// as the session id and session version. To simplify, it should be fine
// to just use a random number as session id and start version from
// |kInitSessionVersion|.
session_version_(kInitSessionVersion),
cert_generator_(std::move(cert_generator)),
pc_(pc),
session_id_(session_id),
certificate_request_state_(CERTIFICATE_NOT_NEEDED) {
RTC_DCHECK(signaling_thread_);
RTC_DCHECK(!(cert_generator_ && certificate));
bool dtls_enabled = cert_generator_ || certificate;
// SRTP-SDES is disabled if DTLS is on.
SetSdesPolicy(dtls_enabled ? cricket::SEC_DISABLED : cricket::SEC_REQUIRED);
if (!dtls_enabled) {
RTC_LOG(LS_VERBOSE) << "DTLS-SRTP disabled.";
return;
}
if (certificate) {
// Use |certificate|.
certificate_request_state_ = CERTIFICATE_WAITING;
RTC_LOG(LS_VERBOSE) << "DTLS-SRTP enabled; has certificate parameter.";
// We already have a certificate but we wait to do |SetIdentity|; if we do
// it in the constructor then the caller has not had a chance to connect to
// |SignalCertificateReady|.
signaling_thread_->Post(
RTC_FROM_HERE, this, MSG_USE_CONSTRUCTOR_CERTIFICATE,
new rtc::ScopedRefMessageData<rtc::RTCCertificate>(certificate));
} else {
// Generate certificate.
certificate_request_state_ = CERTIFICATE_WAITING;
rtc::scoped_refptr<TgWebRtcCertificateGeneratorCallback> callback(
new rtc::RefCountedObject<TgWebRtcCertificateGeneratorCallback>());
callback->SignalRequestFailed.connect(
this, &TgWebRtcSessionDescriptionFactory::OnCertificateRequestFailed);
callback->SignalCertificateReady.connect(
this, &TgWebRtcSessionDescriptionFactory::SetCertificate);
rtc::KeyParams key_params = rtc::KeyParams();
RTC_LOG(LS_VERBOSE)
<< "DTLS-SRTP enabled; sending DTLS identity request (key type: "
<< key_params.type() << ").";
// Request certificate. This happens asynchronously, so that the caller gets
// a chance to connect to |SignalCertificateReady|.
cert_generator_->GenerateCertificateAsync(key_params, absl::nullopt,
callback);
}
}
TgWebRtcSessionDescriptionFactory::~TgWebRtcSessionDescriptionFactory() {
RTC_DCHECK(signaling_thread_->IsCurrent());
// Fail any requests that were asked for before identity generation completed.
FailPendingRequests(kFailedDueToSessionShutdown);
// Process all pending notifications in the message queue. If we don't do
// this, requests will linger and not know they succeeded or failed.
rtc::MessageList list;
signaling_thread_->Clear(this, rtc::MQID_ANY, &list);
for (auto& msg : list) {
if (msg.message_id != MSG_USE_CONSTRUCTOR_CERTIFICATE) {
OnMessage(&msg);
} else {
// Skip MSG_USE_CONSTRUCTOR_CERTIFICATE because we don't want to trigger
// SetIdentity-related callbacks in the destructor. This can be a problem
// when WebRtcSession listens to the callback but it was the WebRtcSession
// destructor that caused TgWebRtcSessionDescriptionFactory's destruction.
// The callback is then ignored, leaking memory allocated by OnMessage for
// MSG_USE_CONSTRUCTOR_CERTIFICATE.
delete msg.pdata;
}
}
}
void TgWebRtcSessionDescriptionFactory::CreateOffer(
CreateSessionDescriptionObserver* observer,
const PeerConnectionInterface::RTCOfferAnswerOptions& options,
const cricket::MediaSessionOptions& session_options) {
std::string error = "CreateOffer";
if (certificate_request_state_ == CERTIFICATE_FAILED) {
error += kFailedDueToIdentityFailed;
RTC_LOG(LS_ERROR) << error;
PostCreateSessionDescriptionFailed(observer, error);
return;
}
if (!ValidMediaSessionOptions(session_options)) {
error += " called with invalid session options";
RTC_LOG(LS_ERROR) << error;
PostCreateSessionDescriptionFailed(observer, error);
return;
}
TgCreateSessionDescriptionRequest request(
TgCreateSessionDescriptionRequest::kOffer, observer, session_options);
if (certificate_request_state_ == CERTIFICATE_WAITING) {
create_session_description_requests_.push(request);
} else {
RTC_DCHECK(certificate_request_state_ == CERTIFICATE_SUCCEEDED ||
certificate_request_state_ == CERTIFICATE_NOT_NEEDED);
InternalCreateOffer(request);
}
}
void TgWebRtcSessionDescriptionFactory::CreateAnswer(
CreateSessionDescriptionObserver* observer,
const cricket::MediaSessionOptions& session_options) {
std::string error = "CreateAnswer";
if (certificate_request_state_ == CERTIFICATE_FAILED) {
error += kFailedDueToIdentityFailed;
RTC_LOG(LS_ERROR) << error;
PostCreateSessionDescriptionFailed(observer, error);
return;
}
if (!pc_->remote_description()) {
error += " can't be called before SetRemoteDescription.";
RTC_LOG(LS_ERROR) << error;
PostCreateSessionDescriptionFailed(observer, error);
return;
}
if (pc_->remote_description()->GetType() != SdpType::kOffer) {
error += " failed because remote_description is not an offer.";
RTC_LOG(LS_ERROR) << error;
PostCreateSessionDescriptionFailed(observer, error);
return;
}
if (!ValidMediaSessionOptions(session_options)) {
error += " called with invalid session options.";
RTC_LOG(LS_ERROR) << error;
PostCreateSessionDescriptionFailed(observer, error);
return;
}
TgCreateSessionDescriptionRequest request(
TgCreateSessionDescriptionRequest::kAnswer, observer, session_options);
if (certificate_request_state_ == CERTIFICATE_WAITING) {
create_session_description_requests_.push(request);
} else {
RTC_DCHECK(certificate_request_state_ == CERTIFICATE_SUCCEEDED ||
certificate_request_state_ == CERTIFICATE_NOT_NEEDED);
InternalCreateAnswer(request);
}
}
void TgWebRtcSessionDescriptionFactory::SetSdesPolicy(
cricket::SecurePolicy secure_policy) {
session_desc_factory_.set_secure(secure_policy);
}
cricket::SecurePolicy TgWebRtcSessionDescriptionFactory::SdesPolicy() const {
return session_desc_factory_.secure();
}
void TgWebRtcSessionDescriptionFactory::OnMessage(rtc::Message* msg) {
switch (msg->message_id) {
case MSG_CREATE_SESSIONDESCRIPTION_SUCCESS: {
CreateSessionDescriptionMsg* param =
static_cast<CreateSessionDescriptionMsg*>(msg->pdata);
param->observer->OnSuccess(param->description.release());
delete param;
break;
}
case MSG_CREATE_SESSIONDESCRIPTION_FAILED: {
CreateSessionDescriptionMsg* param =
static_cast<CreateSessionDescriptionMsg*>(msg->pdata);
param->observer->OnFailure(std::move(param->error));
delete param;
break;
}
case MSG_USE_CONSTRUCTOR_CERTIFICATE: {
rtc::ScopedRefMessageData<rtc::RTCCertificate>* param =
static_cast<rtc::ScopedRefMessageData<rtc::RTCCertificate>*>(
msg->pdata);
RTC_LOG(LS_INFO) << "Using certificate supplied to the constructor.";
SetCertificate(param->data());
delete param;
break;
}
default:
RTC_NOTREACHED();
break;
}
}
void TgWebRtcSessionDescriptionFactory::InternalCreateOffer(
TgCreateSessionDescriptionRequest request) {
if (pc_->local_description()) {
// If the needs-ice-restart flag is set as described by JSEP, we should
// generate an offer with a new ufrag/password to trigger an ICE restart.
for (cricket::MediaDescriptionOptions& options :
request.options.media_description_options) {
if (pc_->NeedsIceRestart(options.mid)) {
options.transport_options.ice_restart = true;
}
}
}
std::unique_ptr<cricket::SessionDescription> desc =
session_desc_factory_.CreateOffer(
request.options, pc_->local_description()
? pc_->local_description()->description()
: nullptr);
if (!desc) {
PostCreateSessionDescriptionFailed(request.observer,
"Failed to initialize the offer.");
return;
}
// RFC 3264
// When issuing an offer that modifies the session,
// the "o=" line of the new SDP MUST be identical to that in the
// previous SDP, except that the version in the origin field MUST
// increment by one from the previous SDP.
// Just increase the version number by one each time when a new offer
// is created regardless if it's identical to the previous one or not.
// The |session_version_| is a uint64_t, the wrap around should not happen.
RTC_DCHECK(session_version_ + 1 > session_version_);
auto offer = std::make_unique<JsepSessionDescription>(
SdpType::kOffer, std::move(desc), session_id_,
rtc::ToString(session_version_++));
if (pc_->local_description()) {
for (const cricket::MediaDescriptionOptions& options :
request.options.media_description_options) {
if (!options.transport_options.ice_restart) {
CopyCandidatesFromSessionDescription(pc_->local_description(),
options.mid, offer.get());
}
}
}
PostCreateSessionDescriptionSucceeded(request.observer, std::move(offer));
}
void TgWebRtcSessionDescriptionFactory::InternalCreateAnswer(
TgCreateSessionDescriptionRequest request) {
if (pc_->remote_description()) {
for (cricket::MediaDescriptionOptions& options :
request.options.media_description_options) {
// According to http://tools.ietf.org/html/rfc5245#section-9.2.1.1
// an answer should also contain new ICE ufrag and password if an offer
// has been received with new ufrag and password.
options.transport_options.ice_restart =
pc_->IceRestartPending(options.mid);
// We should pass the current SSL role to the transport description
// factory, if there is already an existing ongoing session.
rtc::SSLRole ssl_role;
if (pc_->GetSslRole(options.mid, &ssl_role)) {
options.transport_options.prefer_passive_role =
(rtc::SSL_SERVER == ssl_role);
}
}
}
std::unique_ptr<cricket::SessionDescription> desc =
session_desc_factory_.CreateAnswer(
pc_->remote_description() ? pc_->remote_description()->description()
: nullptr,
request.options,
pc_->local_description() ? pc_->local_description()->description()
: nullptr);
if (!desc) {
PostCreateSessionDescriptionFailed(request.observer,
"Failed to initialize the answer.");
return;
}
// RFC 3264
// If the answer is different from the offer in any way (different IP
// addresses, ports, etc.), the origin line MUST be different in the answer.
// In that case, the version number in the "o=" line of the answer is
// unrelated to the version number in the o line of the offer.
// Get a new version number by increasing the |session_version_answer_|.
// The |session_version_| is a uint64_t, the wrap around should not happen.
RTC_DCHECK(session_version_ + 1 > session_version_);
auto answer = std::make_unique<JsepSessionDescription>(
SdpType::kAnswer, std::move(desc), session_id_,
rtc::ToString(session_version_++));
if (pc_->local_description()) {
// Include all local ICE candidates in the SessionDescription unless
// the remote peer has requested an ICE restart.
for (const cricket::MediaDescriptionOptions& options :
request.options.media_description_options) {
if (!options.transport_options.ice_restart) {
CopyCandidatesFromSessionDescription(pc_->local_description(),
options.mid, answer.get());
}
}
}
PostCreateSessionDescriptionSucceeded(request.observer, std::move(answer));
}
void TgWebRtcSessionDescriptionFactory::FailPendingRequests(
const std::string& reason) {
RTC_DCHECK(signaling_thread_->IsCurrent());
while (!create_session_description_requests_.empty()) {
const TgCreateSessionDescriptionRequest& request =
create_session_description_requests_.front();
PostCreateSessionDescriptionFailed(
request.observer,
((request.type == TgCreateSessionDescriptionRequest::kOffer)
? "CreateOffer"
: "CreateAnswer") +
reason);
create_session_description_requests_.pop();
}
}
void TgWebRtcSessionDescriptionFactory::PostCreateSessionDescriptionFailed(
CreateSessionDescriptionObserver* observer,
const std::string& error) {
CreateSessionDescriptionMsg* msg = new CreateSessionDescriptionMsg(
observer, RTCError(RTCErrorType::INTERNAL_ERROR, std::string(error)));
signaling_thread_->Post(RTC_FROM_HERE, this,
MSG_CREATE_SESSIONDESCRIPTION_FAILED, msg);
RTC_LOG(LS_ERROR) << "Create SDP failed: " << error;
}
void TgWebRtcSessionDescriptionFactory::PostCreateSessionDescriptionSucceeded(
CreateSessionDescriptionObserver* observer,
std::unique_ptr<SessionDescriptionInterface> description) {
CreateSessionDescriptionMsg* msg =
new CreateSessionDescriptionMsg(observer, RTCError::OK());
msg->description = std::move(description);
signaling_thread_->Post(RTC_FROM_HERE, this,
MSG_CREATE_SESSIONDESCRIPTION_SUCCESS, msg);
}
void TgWebRtcSessionDescriptionFactory::OnCertificateRequestFailed() {
RTC_DCHECK(signaling_thread_->IsCurrent());
RTC_LOG(LS_ERROR) << "Asynchronous certificate generation request failed.";
certificate_request_state_ = CERTIFICATE_FAILED;
FailPendingRequests(kFailedDueToIdentityFailed);
}
void TgWebRtcSessionDescriptionFactory::SetCertificate(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) {
RTC_DCHECK(certificate);
RTC_LOG(LS_VERBOSE) << "Setting new certificate.";
certificate_request_state_ = CERTIFICATE_SUCCEEDED;
SignalCertificateReady(certificate);
transport_desc_factory_.set_certificate(certificate);
transport_desc_factory_.set_secure(cricket::SEC_ENABLED);
while (!create_session_description_requests_.empty()) {
if (create_session_description_requests_.front().type ==
TgCreateSessionDescriptionRequest::kOffer) {
InternalCreateOffer(create_session_description_requests_.front());
} else {
InternalCreateAnswer(create_session_description_requests_.front());
}
create_session_description_requests_.pop();
}
}
} // namespace webrtc

View File

@ -1,167 +0,0 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef TG_PC_WEBRTC_SESSION_DESCRIPTION_FACTORY_H_
#define TG_PC_WEBRTC_SESSION_DESCRIPTION_FACTORY_H_
#include <stdint.h>
#include <memory>
#include <queue>
#include <string>
#include "api/jsep.h"
#include "api/peer_connection_interface.h"
#include "api/scoped_refptr.h"
#include "p2p/base/transport_description.h"
#include "p2p/base/transport_description_factory.h"
#include "pc/media_session.h"
#include "pc/peer_connection_internal.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_base/message_handler.h"
#include "rtc_base/message_queue.h"
#include "rtc_base/rtc_certificate.h"
#include "rtc_base/rtc_certificate_generator.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/thread.h"
#include "rtc_base/unique_id_generator.h"
namespace webrtc {
class TgPeerConnection;
// DTLS certificate request callback class.
class TgWebRtcCertificateGeneratorCallback
: public rtc::RTCCertificateGeneratorCallback,
public sigslot::has_slots<> {
public:
// |rtc::RTCCertificateGeneratorCallback| overrides.
void OnSuccess(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate) override;
void OnFailure() override;
sigslot::signal0<> SignalRequestFailed;
sigslot::signal1<const rtc::scoped_refptr<rtc::RTCCertificate>&>
SignalCertificateReady;
};
struct TgCreateSessionDescriptionRequest {
enum Type {
kOffer,
kAnswer,
};
TgCreateSessionDescriptionRequest(Type type,
CreateSessionDescriptionObserver* observer,
const cricket::MediaSessionOptions& options)
: type(type), observer(observer), options(options) {}
Type type;
rtc::scoped_refptr<CreateSessionDescriptionObserver> observer;
cricket::MediaSessionOptions options;
};
// This class is used to create offer/answer session description. Certificates
// for WebRtcSession/DTLS are either supplied at construction or generated
// asynchronously. It queues the create offer/answer request until the
// certificate generation has completed, i.e. when OnCertificateRequestFailed or
// OnCertificateReady is called.
class TgWebRtcSessionDescriptionFactory : public rtc::MessageHandler,
public sigslot::has_slots<> {
public:
// Can specify either a |cert_generator| or |certificate| to enable DTLS. If
// a certificate generator is given, starts generating the certificate
// asynchronously. If a certificate is given, will use that for identifying
// over DTLS. If neither is specified, DTLS is disabled.
TgWebRtcSessionDescriptionFactory(
rtc::Thread* signaling_thread,
cricket::ChannelManager* channel_manager,
TgPeerConnection* pc,
const std::string& session_id,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator,
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate,
rtc::UniqueRandomIdGenerator* ssrc_generator);
virtual ~TgWebRtcSessionDescriptionFactory();
static void CopyCandidatesFromSessionDescription(
const SessionDescriptionInterface* source_desc,
const std::string& content_name,
SessionDescriptionInterface* dest_desc);
void CreateOffer(
CreateSessionDescriptionObserver* observer,
const PeerConnectionInterface::RTCOfferAnswerOptions& options,
const cricket::MediaSessionOptions& session_options);
void CreateAnswer(CreateSessionDescriptionObserver* observer,
const cricket::MediaSessionOptions& session_options);
void SetSdesPolicy(cricket::SecurePolicy secure_policy);
cricket::SecurePolicy SdesPolicy() const;
void set_enable_encrypted_rtp_header_extensions(bool enable) {
session_desc_factory_.set_enable_encrypted_rtp_header_extensions(enable);
}
void set_is_unified_plan(bool is_unified_plan) {
session_desc_factory_.set_is_unified_plan(is_unified_plan);
}
sigslot::signal1<const rtc::scoped_refptr<rtc::RTCCertificate>&>
SignalCertificateReady;
// For testing.
bool waiting_for_certificate_for_testing() const {
return certificate_request_state_ == CERTIFICATE_WAITING;
}
private:
enum CertificateRequestState {
CERTIFICATE_NOT_NEEDED,
CERTIFICATE_WAITING,
CERTIFICATE_SUCCEEDED,
CERTIFICATE_FAILED,
};
// MessageHandler implementation.
virtual void OnMessage(rtc::Message* msg);
void InternalCreateOffer(TgCreateSessionDescriptionRequest request);
void InternalCreateAnswer(TgCreateSessionDescriptionRequest request);
// Posts failure notifications for all pending session description requests.
void FailPendingRequests(const std::string& reason);
void PostCreateSessionDescriptionFailed(
CreateSessionDescriptionObserver* observer,
const std::string& error);
void PostCreateSessionDescriptionSucceeded(
CreateSessionDescriptionObserver* observer,
std::unique_ptr<SessionDescriptionInterface> description);
void OnCertificateRequestFailed();
void SetCertificate(
const rtc::scoped_refptr<rtc::RTCCertificate>& certificate);
std::queue<TgCreateSessionDescriptionRequest>
create_session_description_requests_;
rtc::Thread* const signaling_thread_;
cricket::TransportDescriptionFactory transport_desc_factory_;
cricket::MediaSessionDescriptionFactory session_desc_factory_;
uint64_t session_version_;
const std::unique_ptr<rtc::RTCCertificateGeneratorInterface> cert_generator_;
// TODO(jiayl): remove the dependency on peer connection once bug 2264 is
// fixed.
TgPeerConnection* const pc_;
const std::string session_id_;
CertificateRequestState certificate_request_state_;
RTC_DISALLOW_COPY_AND_ASSIGN(TgWebRtcSessionDescriptionFactory);
};
} // namespace webrtc
#endif // PC_WEBRTC_SESSION_DESCRIPTION_FACTORY_H_