mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Add call video stream api
This commit is contained in:
parent
b91671537a
commit
c15971dd8e
@ -209,6 +209,7 @@ public final class ManagedAudioSession {
|
||||
private let isActiveSubscribers = Bag<(Bool) -> Void>()
|
||||
private let isPlaybackActiveSubscribers = Bag<(Bool) -> Void>()
|
||||
|
||||
private var isActiveValue: Bool = false
|
||||
private var callKitAudioSessionIsActive: Bool = false
|
||||
|
||||
public init() {
|
||||
@ -392,7 +393,7 @@ public final class ManagedAudioSession {
|
||||
let queue = self.queue
|
||||
return Signal { [weak self] subscriber in
|
||||
if let strongSelf = self {
|
||||
subscriber.putNext(strongSelf.currentTypeAndOutputMode != nil)
|
||||
subscriber.putNext(strongSelf.isActiveValue || strongSelf.callKitAudioSessionIsActive)
|
||||
|
||||
let index = strongSelf.isActiveSubscribers.add({ value in
|
||||
subscriber.putNext(value)
|
||||
@ -686,7 +687,6 @@ public final class ManagedAudioSession {
|
||||
self.deactivateTimer?.invalidate()
|
||||
self.deactivateTimer = nil
|
||||
|
||||
let wasActive = self.currentTypeAndOutputMode != nil
|
||||
let wasPlaybackActive = self.currentTypeAndOutputMode?.0.isPlay ?? false
|
||||
self.currentTypeAndOutputMode = nil
|
||||
|
||||
@ -709,10 +709,9 @@ public final class ManagedAudioSession {
|
||||
}
|
||||
}
|
||||
|
||||
if wasActive {
|
||||
for subscriber in self.isActiveSubscribers.copyItems() {
|
||||
subscriber(false)
|
||||
}
|
||||
self.isActiveValue = false
|
||||
for subscriber in self.isActiveSubscribers.copyItems() {
|
||||
subscriber(self.isActiveValue || self.callKitAudioSessionIsActive)
|
||||
}
|
||||
if wasPlaybackActive {
|
||||
for subscriber in self.isPlaybackActiveSubscribers.copyItems() {
|
||||
@ -725,7 +724,6 @@ public final class ManagedAudioSession {
|
||||
self.deactivateTimer?.invalidate()
|
||||
self.deactivateTimer = nil
|
||||
|
||||
let wasActive = self.currentTypeAndOutputMode != nil
|
||||
let wasPlaybackActive = self.currentTypeAndOutputMode?.0.isPlay ?? false
|
||||
|
||||
if self.currentTypeAndOutputMode == nil || self.currentTypeAndOutputMode! != (type, outputMode) {
|
||||
@ -782,10 +780,9 @@ public final class ManagedAudioSession {
|
||||
}
|
||||
}
|
||||
|
||||
if !wasActive {
|
||||
for subscriber in self.isActiveSubscribers.copyItems() {
|
||||
subscriber(true)
|
||||
}
|
||||
self.isActiveValue = true
|
||||
for subscriber in self.isActiveSubscribers.copyItems() {
|
||||
subscriber(self.isActiveValue || self.callKitAudioSessionIsActive)
|
||||
}
|
||||
if !wasPlaybackActive && (self.currentTypeAndOutputMode?.0.isPlay ?? false) {
|
||||
for subscriber in self.isPlaybackActiveSubscribers.copyItems() {
|
||||
@ -976,6 +973,10 @@ public final class ManagedAudioSession {
|
||||
managedAudioSessionLog("ManagedAudioSession callKitActivatedAudioSession")
|
||||
self.callKitAudioSessionIsActive = true
|
||||
self.updateHolders()
|
||||
|
||||
for subscriber in self.isActiveSubscribers.copyItems() {
|
||||
subscriber(self.isActiveValue || self.callKitAudioSessionIsActive)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -984,6 +985,10 @@ public final class ManagedAudioSession {
|
||||
managedAudioSessionLog("ManagedAudioSession callKitDeactivatedAudioSession")
|
||||
self.callKitAudioSessionIsActive = false
|
||||
self.updateHolders()
|
||||
|
||||
for subscriber in self.isActiveSubscribers.copyItems() {
|
||||
subscriber(self.isActiveValue || self.callKitAudioSessionIsActive)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -888,6 +888,10 @@ public final class PresentationCallImpl: PresentationCall {
|
||||
return self.debugInfoValue.get()
|
||||
}
|
||||
|
||||
func video(isIncoming: Bool) -> Signal<OngoingGroupCallContext.VideoFrameData, NoError>? {
|
||||
return self.ongoingContext?.video(isIncoming: isIncoming)
|
||||
}
|
||||
|
||||
public func makeIncomingVideoView(completion: @escaping (PresentationCallVideoView?) -> Void) {
|
||||
self.ongoingContext?.makeIncomingVideoView(completion: { view in
|
||||
if let view = view {
|
||||
|
@ -1192,6 +1192,31 @@ public final class OngoingCallContext {
|
||||
return (poll |> then(.complete() |> delay(0.5, queue: Queue.concurrentDefaultQueue()))) |> restart
|
||||
}
|
||||
|
||||
public func video(isIncoming: Bool) -> Signal<OngoingGroupCallContext.VideoFrameData, NoError> {
|
||||
let queue = self.queue
|
||||
return Signal { [weak self] subscriber in
|
||||
let disposable = MetaDisposable()
|
||||
|
||||
queue.async {
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
strongSelf.withContext { context in
|
||||
if let context = context as? OngoingCallThreadLocalContextWebrtc {
|
||||
let innerDisposable = context.addVideoOutput(withIsIncoming: isIncoming, sink: { videoFrameData in
|
||||
subscriber.putNext(OngoingGroupCallContext.VideoFrameData(frameData: videoFrameData))
|
||||
})
|
||||
disposable.set(ActionDisposable {
|
||||
innerDisposable.dispose()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return disposable
|
||||
}
|
||||
}
|
||||
|
||||
public func makeIncomingVideoView(completion: @escaping (OngoingCallContextPresentationCallVideoView?) -> Void) {
|
||||
self.withContext { context in
|
||||
if let context = context as? OngoingCallThreadLocalContextWebrtc {
|
||||
|
@ -261,6 +261,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
||||
- (void)setIsMuted:(bool)isMuted;
|
||||
- (void)setIsLowBatteryLevel:(bool)isLowBatteryLevel;
|
||||
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType;
|
||||
- (GroupCallDisposable * _Nonnull)addVideoOutputWithIsIncoming:(bool)isIncoming sink:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))sink;
|
||||
- (void)makeIncomingVideoView:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion;
|
||||
- (void)requestVideo:(OngoingCallThreadLocalContextVideoCapturer * _Nullable)videoCapturer;
|
||||
- (void)setRequestedVideoAspect:(float)aspect;
|
||||
|
@ -850,6 +850,9 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
|
||||
bool _useManualAudioSessionControl;
|
||||
SharedCallAudioDevice *_audioDevice;
|
||||
|
||||
int _nextSinkId;
|
||||
NSMutableDictionary<NSNumber *, GroupCallVideoSink *> *_sinks;
|
||||
|
||||
rtc::scoped_refptr<webrtc::tgcalls_ios_adm::AudioDeviceModuleIOS> _currentAudioDeviceModule;
|
||||
rtc::Thread *_currentAudioDeviceModuleThread;
|
||||
|
||||
@ -1030,6 +1033,8 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
|
||||
_audioDevice = audioDevice;
|
||||
|
||||
_sinks = [[NSMutableDictionary alloc] init];
|
||||
|
||||
_useManualAudioSessionControl = true;
|
||||
[RTCAudioSession sharedInstance].useManualAudio = true;
|
||||
|
||||
@ -1469,6 +1474,33 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
- (GroupCallDisposable * _Nonnull)addVideoOutputWithIsIncoming:(bool)isIncoming sink:(void (^_Nonnull)(CallVideoFrameData * _Nonnull))sink {
|
||||
int sinkId = _nextSinkId;
|
||||
_nextSinkId += 1;
|
||||
|
||||
GroupCallVideoSink *storedSink = [[GroupCallVideoSink alloc] initWithSink:sink];
|
||||
_sinks[@(sinkId)] = storedSink;
|
||||
|
||||
if (_tgVoip) {
|
||||
if (isIncoming) {
|
||||
_tgVoip->setIncomingVideoOutput([storedSink sink]);
|
||||
}
|
||||
}
|
||||
|
||||
__weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
|
||||
id<OngoingCallThreadLocalContextQueueWebrtc> queue = _queue;
|
||||
return [[GroupCallDisposable alloc] initWithBlock:^{
|
||||
[queue dispatch:^{
|
||||
__strong OngoingCallThreadLocalContextWebrtc *strongSelf = weakSelf;
|
||||
if (!strongSelf) {
|
||||
return;
|
||||
}
|
||||
|
||||
[strongSelf->_sinks removeObjectForKey:@(sinkId)];
|
||||
}];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)makeIncomingVideoView:(void (^_Nonnull)(UIView<OngoingCallThreadLocalContextWebrtcVideoView> * _Nullable))completion {
|
||||
if (_tgVoip) {
|
||||
__weak OngoingCallThreadLocalContextWebrtc *weakSelf = self;
|
||||
|
Loading…
x
Reference in New Issue
Block a user