mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-07-30 15:10:56 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
commit
a38fb8b93f
@ -106,6 +106,7 @@ public protocol PresentationCall: class {
|
||||
func toggleIsMuted()
|
||||
func setIsMuted(_ value: Bool)
|
||||
func requestVideo()
|
||||
func acceptVideo()
|
||||
func setOutgoingVideoIsPaused(_ isPaused: Bool)
|
||||
func switchVideoCamera()
|
||||
func setCurrentAudioOutput(_ output: AudioSessionOutput)
|
||||
|
@ -245,49 +245,89 @@ final class CallControllerButtonsNode: ASDisplayNode {
|
||||
|
||||
height = smallButtonSize + topBottomSpacing + largeButtonSize + max(bottomInset + 32.0, 46.0)
|
||||
case .active:
|
||||
var topButtons: [ButtonDescription] = []
|
||||
|
||||
let soundOutput: ButtonDescription.SoundOutput
|
||||
switch speakerMode {
|
||||
case .none, .builtin:
|
||||
soundOutput = .builtin
|
||||
case .speaker:
|
||||
soundOutput = .speaker
|
||||
case .headphones:
|
||||
soundOutput = .builtin
|
||||
case .bluetooth:
|
||||
soundOutput = .bluetooth
|
||||
}
|
||||
|
||||
switch videoState {
|
||||
case .active, .incomingRequested, .outgoingRequested, .possible:
|
||||
case .active, .incomingRequested, .outgoingRequested:
|
||||
let isCameraActive: Bool
|
||||
if case .possible = videoState {
|
||||
isCameraActive = false
|
||||
} else {
|
||||
isCameraActive = !self.isCameraPaused
|
||||
}
|
||||
|
||||
var topButtons: [ButtonDescription] = []
|
||||
|
||||
let soundOutput: ButtonDescription.SoundOutput
|
||||
switch speakerMode {
|
||||
case .none, .builtin:
|
||||
soundOutput = .builtin
|
||||
case .speaker:
|
||||
soundOutput = .speaker
|
||||
case .headphones:
|
||||
soundOutput = .builtin
|
||||
case .bluetooth:
|
||||
soundOutput = .bluetooth
|
||||
}
|
||||
|
||||
topButtons.append(.enableCamera(isCameraActive))
|
||||
topButtons.append(.mute(isMuted))
|
||||
topButtons.append(.switchCamera)
|
||||
case .notAvailable:
|
||||
topButtons.append(.mute(isMuted))
|
||||
topButtons.append(.soundOutput(soundOutput))
|
||||
topButtons.append(.end(.end))
|
||||
|
||||
let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize
|
||||
let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0
|
||||
let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1))
|
||||
let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing
|
||||
var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0)
|
||||
for button in topButtons {
|
||||
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: 0.0), size: CGSize(width: smallButtonSize, height: smallButtonSize))))
|
||||
topButtonsLeftOffset += smallButtonSize + topButtonsSpacing
|
||||
}
|
||||
|
||||
height = smallButtonSize + max(bottomInset + 19.0, 46.0)
|
||||
case .notAvailable, .possible:
|
||||
var topButtons: [ButtonDescription] = []
|
||||
var bottomButtons: [ButtonDescription] = []
|
||||
|
||||
let soundOutput: ButtonDescription.SoundOutput
|
||||
switch speakerMode {
|
||||
case .none, .builtin:
|
||||
soundOutput = .builtin
|
||||
case .speaker:
|
||||
soundOutput = .speaker
|
||||
case .headphones:
|
||||
soundOutput = .bluetooth
|
||||
case .bluetooth:
|
||||
soundOutput = .bluetooth
|
||||
}
|
||||
|
||||
topButtons.append(.enableCamera(false))
|
||||
topButtons.append(.mute(self.isMuted))
|
||||
topButtons.append(.switchCamera)
|
||||
|
||||
let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize
|
||||
let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0
|
||||
let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1))
|
||||
let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing
|
||||
var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0)
|
||||
for button in topButtons {
|
||||
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: 0.0), size: CGSize(width: smallButtonSize, height: smallButtonSize))))
|
||||
topButtonsLeftOffset += smallButtonSize + topButtonsSpacing
|
||||
}
|
||||
|
||||
bottomButtons.append(.end(.outgoing))
|
||||
|
||||
let bottomButtonsContentWidth = CGFloat(bottomButtons.count) * largeButtonSize
|
||||
let bottomButtonsAvailableSpacingWidth = width - bottomButtonsContentWidth - minLargeButtonSideInset * 2.0
|
||||
let bottomButtonsSpacing = min(maxLargeButtonSpacing, bottomButtonsAvailableSpacingWidth / CGFloat(bottomButtons.count - 1))
|
||||
let bottomButtonsWidth = CGFloat(bottomButtons.count) * largeButtonSize + CGFloat(bottomButtons.count - 1) * bottomButtonsSpacing
|
||||
var bottomButtonsLeftOffset = floor((width - bottomButtonsWidth) / 2.0)
|
||||
for button in bottomButtons {
|
||||
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: bottomButtonsLeftOffset, y: smallButtonSize + topBottomSpacing), size: CGSize(width: largeButtonSize, height: largeButtonSize))))
|
||||
bottomButtonsLeftOffset += largeButtonSize + bottomButtonsSpacing
|
||||
}
|
||||
|
||||
height = smallButtonSize + topBottomSpacing + largeButtonSize + max(bottomInset + 32.0, 46.0)
|
||||
}
|
||||
|
||||
topButtons.append(.end(.end))
|
||||
|
||||
let topButtonsContentWidth = CGFloat(topButtons.count) * smallButtonSize
|
||||
let topButtonsAvailableSpacingWidth = width - topButtonsContentWidth - minSmallButtonSideInset * 2.0
|
||||
let topButtonsSpacing = min(maxSmallButtonSpacing, topButtonsAvailableSpacingWidth / CGFloat(topButtons.count - 1))
|
||||
let topButtonsWidth = CGFloat(topButtons.count) * smallButtonSize + CGFloat(topButtons.count - 1) * topButtonsSpacing
|
||||
var topButtonsLeftOffset = floor((width - topButtonsWidth) / 2.0)
|
||||
for button in topButtons {
|
||||
buttons.append(PlacedButton(button: button, frame: CGRect(origin: CGPoint(x: topButtonsLeftOffset, y: 0.0), size: CGSize(width: smallButtonSize, height: smallButtonSize))))
|
||||
topButtonsLeftOffset += smallButtonSize + topButtonsSpacing
|
||||
}
|
||||
|
||||
height = smallButtonSize + max(bottomInset + 19.0, 46.0)
|
||||
}
|
||||
|
||||
let delayIncrement = 0.015
|
||||
|
@ -156,6 +156,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
private var expandedVideoNode: CallVideoNode?
|
||||
private var minimizedVideoNode: CallVideoNode?
|
||||
private var disableAnimationForExpandedVideoOnce: Bool = false
|
||||
private var animationForExpandedVideoSnapshotView: UIView? = nil
|
||||
|
||||
private var outgoingVideoNodeCorner: VideoNodeCorner = .bottomRight
|
||||
private let backButtonArrowNode: ASImageNode
|
||||
@ -297,7 +298,17 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
}
|
||||
|
||||
self.buttonsNode.accept = { [weak self] in
|
||||
self?.acceptCall?()
|
||||
guard let strongSelf = self, let callState = strongSelf.callState else {
|
||||
return
|
||||
}
|
||||
switch callState.state {
|
||||
case .active, .connecting, .reconnecting:
|
||||
strongSelf.call.acceptVideo()
|
||||
case .ringing:
|
||||
strongSelf.acceptCall?()
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
self.buttonsNode.toggleVideo = { [weak self] in
|
||||
@ -447,21 +458,26 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
}
|
||||
|
||||
if let incomingVideoNode = self.incomingVideoNodeValue {
|
||||
let isActive: Bool
|
||||
switch callState.remoteVideoState {
|
||||
case .inactive:
|
||||
isActive = false
|
||||
case .active:
|
||||
isActive = true
|
||||
}
|
||||
incomingVideoNode.updateIsBlurred(isBlurred: !isActive)
|
||||
if isActive != self.videoPausedNode.alpha.isZero {
|
||||
if isActive {
|
||||
self.videoPausedNode.alpha = 0.0
|
||||
self.videoPausedNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
|
||||
} else {
|
||||
self.videoPausedNode.alpha = 1.0
|
||||
self.videoPausedNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
|
||||
switch callState.state {
|
||||
case .terminating, .terminated:
|
||||
break
|
||||
default:
|
||||
let isActive: Bool
|
||||
switch callState.remoteVideoState {
|
||||
case .inactive:
|
||||
isActive = false
|
||||
case .active:
|
||||
isActive = true
|
||||
}
|
||||
incomingVideoNode.updateIsBlurred(isBlurred: !isActive)
|
||||
if isActive != self.videoPausedNode.alpha.isZero {
|
||||
if isActive {
|
||||
self.videoPausedNode.alpha = 0.0
|
||||
self.videoPausedNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
|
||||
} else {
|
||||
self.videoPausedNode.alpha = 1.0
|
||||
self.videoPausedNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -510,13 +526,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
if case .reconnecting = callState.state {
|
||||
isReconnecting = true
|
||||
}
|
||||
statusValue = .timer({ value in
|
||||
if isReconnecting {
|
||||
return strings.Call_StatusConnecting
|
||||
} else {
|
||||
return value
|
||||
}
|
||||
}, timestamp)
|
||||
if self.keyTextData?.0 != keyVisualHash {
|
||||
let text = stringForEmojiHashOfData(keyVisualHash, 4)!
|
||||
self.keyTextData = (keyVisualHash, text)
|
||||
@ -531,7 +540,26 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
|
||||
}
|
||||
}
|
||||
statusReception = reception
|
||||
switch callState.videoState {
|
||||
case .notAvailable, .active, .possible:
|
||||
statusValue = .timer({ value in
|
||||
if isReconnecting {
|
||||
return strings.Call_StatusConnecting
|
||||
} else {
|
||||
return value
|
||||
}
|
||||
}, timestamp)
|
||||
statusReception = reception
|
||||
case .incomingRequested:
|
||||
var text: String
|
||||
text = self.presentationData.strings.Call_IncomingVideoCall
|
||||
if !self.statusNode.subtitle.isEmpty {
|
||||
text += "\n\(self.statusNode.subtitle)"
|
||||
}
|
||||
statusValue = .text(string: text, displayLogo: true)
|
||||
case .outgoingRequested:
|
||||
statusValue = .text(string: self.presentationData.strings.Call_StatusRequesting, displayLogo: false)
|
||||
}
|
||||
}
|
||||
if self.shouldStayHiddenUntilConnection {
|
||||
switch callState.state {
|
||||
@ -836,6 +864,16 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
}
|
||||
if let expandedVideoNode = self.expandedVideoNode, expandedVideoNode.isReady {
|
||||
if self.minimizedVideoDraggingPosition == nil {
|
||||
if let animationForExpandedVideoSnapshotView = self.animationForExpandedVideoSnapshotView {
|
||||
self.containerNode.view.addSubview(animationForExpandedVideoSnapshotView)
|
||||
transition.updateAlpha(layer: animationForExpandedVideoSnapshotView.layer, alpha: 0.0, completion: { [weak animationForExpandedVideoSnapshotView] _ in
|
||||
animationForExpandedVideoSnapshotView?.removeFromSuperview()
|
||||
})
|
||||
transition.updateTransformScale(layer: animationForExpandedVideoSnapshotView.layer, scale: previewVideoFrame.width / fullscreenVideoFrame.width)
|
||||
|
||||
transition.updatePosition(layer: animationForExpandedVideoSnapshotView.layer, position: CGPoint(x: previewVideoFrame.minX + previewVideoFrame.center.x / fullscreenVideoFrame.width * previewVideoFrame.width, y: previewVideoFrame.minY + previewVideoFrame.center.y / fullscreenVideoFrame.height * previewVideoFrame.height))
|
||||
self.animationForExpandedVideoSnapshotView = nil
|
||||
}
|
||||
minimizedVideoTransition.updateFrame(node: minimizedVideoNode, frame: previewVideoFrame)
|
||||
minimizedVideoNode.updateLayout(size: minimizedVideoNode.frame.size, cornerRadius: interpolate(from: 14.0, to: 24.0, value: self.pictureInPictureTransitionFraction), transition: minimizedVideoTransition)
|
||||
}
|
||||
@ -843,6 +881,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
minimizedVideoNode.frame = fullscreenVideoFrame
|
||||
minimizedVideoNode.updateLayout(size: layout.size, cornerRadius: 0.0, transition: minimizedVideoTransition)
|
||||
}
|
||||
self.animationForExpandedVideoSnapshotView = nil
|
||||
}
|
||||
|
||||
let keyTextSize = self.keyButtonNode.frame.size
|
||||
@ -902,6 +941,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
if let expandedVideoNode = self.expandedVideoNode, let minimizedVideoNode = self.minimizedVideoNode {
|
||||
let point = recognizer.location(in: recognizer.view)
|
||||
if minimizedVideoNode.frame.contains(point) {
|
||||
let copyView = minimizedVideoNode.view.snapshotView(afterScreenUpdates: false)
|
||||
copyView?.frame = minimizedVideoNode.frame
|
||||
self.expandedVideoNode = minimizedVideoNode
|
||||
self.minimizedVideoNode = expandedVideoNode
|
||||
if let supernode = expandedVideoNode.supernode {
|
||||
@ -909,6 +950,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
|
||||
}
|
||||
if let (layout, navigationBarHeight) = self.validLayout {
|
||||
self.disableAnimationForExpandedVideoOnce = true
|
||||
self.animationForExpandedVideoSnapshotView = copyView
|
||||
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
|
||||
}
|
||||
} else {
|
||||
|
@ -189,6 +189,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
|
||||
private var callWasActive = false
|
||||
private var shouldPresentCallRating = false
|
||||
private var videoWasActive = false
|
||||
|
||||
private var sessionStateDisposable: Disposable?
|
||||
|
||||
@ -269,7 +270,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
self.videoCapturer = OngoingCallVideoCapturer()
|
||||
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .outgoingRequested, remoteVideoState: .inactive))
|
||||
} else {
|
||||
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: .notAvailable, remoteVideoState: .inactive))
|
||||
self.statePromise.set(PresentationCallState(state: isOutgoing ? .waiting : .ringing, videoState: self.isVideoPossible ? .possible : .notAvailable, remoteVideoState: .inactive))
|
||||
}
|
||||
|
||||
self.serializedData = serializedData
|
||||
@ -446,6 +447,7 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
mappedVideoState = .incomingRequested
|
||||
case .active:
|
||||
mappedVideoState = .active
|
||||
self.videoWasActive = true
|
||||
}
|
||||
switch callContextState.remoteVideoState {
|
||||
case .inactive:
|
||||
@ -461,7 +463,11 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
} else {
|
||||
mappedVideoState = .notAvailable
|
||||
}
|
||||
mappedRemoteVideoState = .inactive
|
||||
if videoWasActive {
|
||||
mappedRemoteVideoState = .active
|
||||
} else {
|
||||
mappedRemoteVideoState = .inactive
|
||||
}
|
||||
}
|
||||
|
||||
switch sessionState.state {
|
||||
@ -739,10 +745,22 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
if self.videoCapturer == nil {
|
||||
let videoCapturer = OngoingCallVideoCapturer()
|
||||
self.videoCapturer = videoCapturer
|
||||
}
|
||||
if let videoCapturer = self.videoCapturer {
|
||||
self.ongoingContext?.requestVideo(videoCapturer)
|
||||
}
|
||||
}
|
||||
|
||||
public func acceptVideo() {
|
||||
if self.videoCapturer == nil {
|
||||
let videoCapturer = OngoingCallVideoCapturer()
|
||||
self.videoCapturer = videoCapturer
|
||||
}
|
||||
if let videoCapturer = self.videoCapturer {
|
||||
self.ongoingContext?.acceptVideo(videoCapturer)
|
||||
}
|
||||
}
|
||||
|
||||
public func setOutgoingVideoIsPaused(_ isPaused: Bool) {
|
||||
self.videoCapturer?.setIsVideoEnabled(!isPaused)
|
||||
}
|
||||
@ -784,6 +802,11 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
}
|
||||
|
||||
public func makeOutgoingVideoView(completion: @escaping (PresentationCallVideoView?) -> Void) {
|
||||
if self.videoCapturer == nil {
|
||||
let videoCapturer = OngoingCallVideoCapturer()
|
||||
self.videoCapturer = videoCapturer
|
||||
}
|
||||
|
||||
self.videoCapturer?.makeOutgoingVideoView(completion: { view in
|
||||
if let view = view {
|
||||
completion(PresentationCallVideoView(
|
||||
|
@ -246,6 +246,7 @@ private protocol OngoingCallThreadLocalContextProtocol: class {
|
||||
func nativeSetNetworkType(_ type: NetworkType)
|
||||
func nativeSetIsMuted(_ value: Bool)
|
||||
func nativeRequestVideo(_ capturer: OngoingCallVideoCapturer)
|
||||
func nativeAcceptVideo(_ capturer: OngoingCallVideoCapturer)
|
||||
func nativeStop(_ completion: @escaping (String?, Int64, Int64, Int64, Int64) -> Void)
|
||||
func nativeDebugInfo() -> String
|
||||
func nativeVersion() -> String
|
||||
@ -276,6 +277,9 @@ extension OngoingCallThreadLocalContext: OngoingCallThreadLocalContextProtocol {
|
||||
func nativeRequestVideo(_ capturer: OngoingCallVideoCapturer) {
|
||||
}
|
||||
|
||||
func nativeAcceptVideo(_ capturer: OngoingCallVideoCapturer) {
|
||||
}
|
||||
|
||||
func nativeSwitchVideoCamera() {
|
||||
}
|
||||
|
||||
@ -329,6 +333,10 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
|
||||
self.requestVideo(capturer.impl)
|
||||
}
|
||||
|
||||
func nativeAcceptVideo(_ capturer: OngoingCallVideoCapturer) {
|
||||
self.acceptVideo(capturer.impl)
|
||||
}
|
||||
|
||||
func nativeDebugInfo() -> String {
|
||||
return self.debugInfo() ?? ""
|
||||
}
|
||||
@ -646,6 +654,12 @@ public final class OngoingCallContext {
|
||||
}
|
||||
}
|
||||
|
||||
public func acceptVideo(_ capturer: OngoingCallVideoCapturer) {
|
||||
self.withContext { context in
|
||||
context.nativeAcceptVideo(capturer)
|
||||
}
|
||||
}
|
||||
|
||||
public func debugInfo() -> Signal<(String, String), NoError> {
|
||||
let poll = Signal<(String, String), NoError> { subscriber in
|
||||
self.withContext { context in
|
||||
|
@ -171,6 +171,14 @@ void Manager::receiveSignalingData(const std::vector<uint8_t> &data) {
|
||||
_stateUpdated(_state, _videoState);
|
||||
}
|
||||
} else if (mode == 2) {
|
||||
if (_videoState == VideoState::outgoingRequested) {
|
||||
_videoState = VideoState::active;
|
||||
_stateUpdated(_state, _videoState);
|
||||
|
||||
_mediaManager->perform([videoCapture = _videoCapture](MediaManager *mediaManager) {
|
||||
mediaManager->setSendVideo(videoCapture);
|
||||
});
|
||||
}
|
||||
} else if (mode == 3) {
|
||||
auto candidatesData = buffer.Slice(1, buffer.size() - 1);
|
||||
_networkManager->perform([candidatesData](NetworkManager *networkManager) {
|
||||
@ -186,6 +194,7 @@ void Manager::receiveSignalingData(const std::vector<uint8_t> &data) {
|
||||
|
||||
void Manager::requestVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture) {
|
||||
if (videoCapture != nullptr) {
|
||||
_videoCapture = videoCapture;
|
||||
if (_videoState == VideoState::possible) {
|
||||
_videoState = VideoState::outgoingRequested;
|
||||
|
||||
@ -198,16 +207,35 @@ void Manager::requestVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCap
|
||||
memcpy(data.data(), buffer.data(), buffer.size());
|
||||
|
||||
_signalingDataEmitted(data);
|
||||
|
||||
/*_mediaManager->perform([](MediaManager *mediaManager) {
|
||||
mediaManager->setSendVideo(true);
|
||||
});*/
|
||||
|
||||
_stateUpdated(_state, _videoState);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Manager::acceptVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture) {
|
||||
if (videoCapture != nullptr) {
|
||||
_videoCapture = videoCapture;
|
||||
if (_videoState == VideoState::incomingRequested) {
|
||||
_videoState = VideoState::active;
|
||||
|
||||
rtc::CopyOnWriteBuffer buffer;
|
||||
uint8_t mode = 2;
|
||||
buffer.AppendData(&mode, 1);
|
||||
|
||||
std::vector<uint8_t> data;
|
||||
data.resize(buffer.size());
|
||||
memcpy(data.data(), buffer.data(), buffer.size());
|
||||
|
||||
_signalingDataEmitted(data);
|
||||
_stateUpdated(_state, _videoState);
|
||||
|
||||
_mediaManager->perform([videoCapture](MediaManager *mediaManager) {
|
||||
mediaManager->setSendVideo(videoCapture);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Manager::setMuteOutgoingAudio(bool mute) {
|
||||
_mediaManager->perform([mute](MediaManager *mediaManager) {
|
||||
mediaManager->setMuteOutgoingAudio(mute);
|
||||
|
@ -36,6 +36,7 @@ public:
|
||||
void start();
|
||||
void receiveSignalingData(const std::vector<uint8_t> &data);
|
||||
void requestVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture);
|
||||
void acceptVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture);
|
||||
void setMuteOutgoingAudio(bool mute);
|
||||
void notifyIsLocalVideoActive(bool isActive);
|
||||
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||
|
@ -287,9 +287,7 @@ _videoCapture(videoCapture) {
|
||||
_videoChannel->SetInterface(_videoNetworkInterface.get(), webrtc::MediaTransportConfig());
|
||||
|
||||
if (_videoCapture != nullptr) {
|
||||
((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->setIsActiveUpdated(this->_localVideoCaptureActiveUpdated);
|
||||
|
||||
setSendVideo(true);
|
||||
setSendVideo(_videoCapture);
|
||||
}
|
||||
}
|
||||
|
||||
@ -310,7 +308,7 @@ MediaManager::~MediaManager() {
|
||||
|
||||
_audioChannel->SetInterface(nullptr, webrtc::MediaTransportConfig());
|
||||
|
||||
setSendVideo(false);
|
||||
setSendVideo(nullptr);
|
||||
}
|
||||
|
||||
void MediaManager::setIsConnected(bool isConnected) {
|
||||
@ -360,11 +358,16 @@ void MediaManager::notifyPacketSent(const rtc::SentPacket &sentPacket) {
|
||||
_call->OnSentPacket(sentPacket);
|
||||
}
|
||||
|
||||
void MediaManager::setSendVideo(bool sendVideo) {
|
||||
if (_isSendingVideo == sendVideo) {
|
||||
void MediaManager::setSendVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture) {
|
||||
if (_isSendingVideo == (videoCapture != nullptr)) {
|
||||
return;
|
||||
}
|
||||
_isSendingVideo = sendVideo;
|
||||
_isSendingVideo = videoCapture != nullptr;
|
||||
_videoCapture = videoCapture;
|
||||
|
||||
if (_videoCapture != nullptr) {
|
||||
((TgVoipVideoCaptureInterfaceImpl *)_videoCapture.get())->_impl->getSyncAssumingSameThread()->setIsActiveUpdated(this->_localVideoCaptureActiveUpdated);
|
||||
}
|
||||
|
||||
if (_isSendingVideo) {
|
||||
auto videoCodec = selectVideoCodec(_videoCodecs);
|
||||
|
@ -70,7 +70,7 @@ public:
|
||||
void setIsConnected(bool isConnected);
|
||||
void receivePacket(const rtc::CopyOnWriteBuffer &packet);
|
||||
void notifyPacketSent(const rtc::SentPacket &sentPacket);
|
||||
void setSendVideo(bool sendVideo);
|
||||
void setSendVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture);
|
||||
void setMuteOutgoingAudio(bool mute);
|
||||
void setIncomingVideoOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink);
|
||||
|
||||
|
@ -189,6 +189,7 @@ public:
|
||||
|
||||
virtual void receiveSignalingData(const std::vector<uint8_t> &data) = 0;
|
||||
virtual void requestVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture) = 0;
|
||||
virtual void acceptVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture) = 0;
|
||||
|
||||
virtual TgVoipFinalState stop() = 0;
|
||||
};
|
||||
|
@ -223,6 +223,12 @@ public:
|
||||
manager->requestVideo(videoCapture);
|
||||
});
|
||||
}
|
||||
|
||||
virtual void acceptVideo(std::shared_ptr<TgVoipVideoCaptureInterface> videoCapture) override {
|
||||
_manager->perform([videoCapture](Manager *manager) {
|
||||
manager->acceptVideo(videoCapture);
|
||||
});
|
||||
}
|
||||
|
||||
void setNetworkType(TgVoipNetworkType networkType) override {
|
||||
/*message::NetworkType mappedType;
|
||||
|
@ -119,6 +119,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
||||
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType;
|
||||
- (void)makeIncomingVideoView:(void (^_Nonnull)(OngoingCallThreadLocalContextWebrtcVideoView * _Nullable))completion;
|
||||
- (void)requestVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer;
|
||||
- (void)acceptVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer;
|
||||
- (void)addSignalingData:(NSData * _Nonnull)data;
|
||||
|
||||
@end
|
||||
|
@ -491,6 +491,13 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)acceptVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer {
|
||||
if (_tgVoip && _videoCapturer == nil) {
|
||||
_videoCapturer = videoCapturer;
|
||||
_tgVoip->acceptVideo([_videoCapturer getInterface]);
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation OngoingCallThreadLocalContextWebrtcVideoView : UIView
|
||||
|
Loading…
x
Reference in New Issue
Block a user