Merge commit '8b434853d1c308deef7ccd2376b0b370e220b387'

This commit is contained in:
Ali 2021-07-03 00:18:15 +04:00
commit 5255fa2e66
3 changed files with 26 additions and 73 deletions

View File

@ -601,44 +601,15 @@ public final class OngoingGroupCallContext {
mainView?.setOnIsMirroredUpdated { value in
f?(value)
}
},
setIsPaused: { [weak mainView] paused in
mainView?.setIsPaused(paused)
},
renderToSize: { [weak mainView] size, animated in
mainView?.render(to: size, animated: animated)
}
)
var cloneVideoView: OngoingCallContextPresentationCallVideoView?
if let cloneView = cloneView {
cloneVideoView = OngoingCallContextPresentationCallVideoView(
view: cloneView,
setOnFirstFrameReceived: { [weak cloneView] f in
cloneView?.setOnFirstFrameReceived(f)
},
getOrientation: { [weak cloneView] in
if let cloneView = cloneView {
return OngoingCallVideoOrientation(cloneView.orientation)
} else {
return .rotation0
}
},
getAspect: { [weak cloneView] in
if let cloneView = cloneView {
return cloneView.aspect
} else {
return 0.0
}
},
setOnOrientationUpdated: { [weak cloneView] f in
cloneView?.setOnOrientationUpdated { value, aspect in
f?(OngoingCallVideoOrientation(value), aspect)
}
}, setVideoContentMode: { [weak cloneView] mode in
cloneView?.setVideoContentMode(mode)
},
setOnIsMirroredUpdated: { [weak cloneView] f in
cloneView?.setOnIsMirroredUpdated { value in
f?(value)
}
}
)
}
completion(mainVideoView, cloneVideoView)
completion(mainVideoView, nil)
#endif
} else {
completion(nil, nil)

View File

@ -105,6 +105,8 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
- (void)setVideoContentMode:(CALayerContentsGravity _Nonnull )mode;
- (void)setForceMirrored:(bool)forceMirrored;
- (void)setIsPaused:(bool)paused;
- (void)renderToSize:(NSSize)size animated: (bool)animated;
#endif
@end

View File

@ -50,26 +50,8 @@
@end
@interface IsProcessingCustomSampleBufferFlag : NSObject
@property (nonatomic) bool value;
@end
@implementation IsProcessingCustomSampleBufferFlag
- (instancetype)init {
self = [super init];
if (self != nil) {
}
return self;
}
@end
@interface OngoingCallThreadLocalContextVideoCapturer () {
std::shared_ptr<tgcalls::VideoCaptureInterface> _interface;
IsProcessingCustomSampleBufferFlag *_isProcessingCustomSampleBuffer;
}
@end
@ -235,7 +217,6 @@
self = [super init];
if (self != nil) {
_interface = interface;
_isProcessingCustomSampleBuffer = [[IsProcessingCustomSampleBufferFlag alloc] init];
_croppingBuffer = std::make_shared<std::vector<uint8_t>>();
}
return self;
@ -273,6 +254,20 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
}
#if TARGET_OS_IOS
- (void)submitSampleBuffer:(CMSampleBufferRef _Nonnull)sampleBuffer {
if (!sampleBuffer) {
return;
}
tgcalls::StaticThreads::getThreads()->getMediaThread()->PostTask(RTC_FROM_HERE, [interface = _interface, sampleBuffer = CFRetain(sampleBuffer)]() {
auto capture = GetVideoCaptureAssumingSameThread(interface.get());
auto source = capture->source();
if (source) {
[CustomExternalCapturer passSampleBuffer:(CMSampleBufferRef)sampleBuffer toSource:source];
}
CFRelease(sampleBuffer);
});
}
- (void)submitPixelBuffer:(CVPixelBufferRef _Nonnull)pixelBuffer rotation:(OngoingCallVideoOrientationWebrtc)rotation {
if (!pixelBuffer) {
return;
@ -294,19 +289,13 @@ tgcalls::VideoCaptureInterfaceObject *GetVideoCaptureAssumingSameThread(tgcalls:
break;
}
if (_isProcessingCustomSampleBuffer.value) {
return;
}
_isProcessingCustomSampleBuffer.value = true;
tgcalls::StaticThreads::getThreads()->getMediaThread()->PostTask(RTC_FROM_HERE, [interface = _interface, pixelBuffer = CFRetain(pixelBuffer), croppingBuffer = _croppingBuffer, videoRotation = videoRotation, isProcessingCustomSampleBuffer = _isProcessingCustomSampleBuffer]() {
tgcalls::StaticThreads::getThreads()->getMediaThread()->PostTask(RTC_FROM_HERE, [interface = _interface, pixelBuffer = CFRetain(pixelBuffer), croppingBuffer = _croppingBuffer, videoRotation = videoRotation]() {
auto capture = GetVideoCaptureAssumingSameThread(interface.get());
auto source = capture->source();
if (source) {
[CustomExternalCapturer passPixelBuffer:(CVPixelBufferRef)pixelBuffer rotation:videoRotation toSource:source croppingBuffer:*croppingBuffer];
}
CFRelease(pixelBuffer);
isProcessingCustomSampleBuffer.value = false;
});
}
@ -1121,9 +1110,10 @@ private:
}
std::vector<tgcalls::VideoCodecName> videoCodecPreferences;
// videoCodecPreferences.push_back(tgcalls::VideoCodecName::H264);
//videoCodecPreferences.push_back(tgcalls::VideoCodecName::VP9);/
int minOutgoingVideoBitrateKbit = 500;
bool disableOutgoingAudioProcessing = false;
tgcalls::GroupConfig config;
config.need_log = false;
@ -1195,7 +1185,6 @@ private:
return std::make_shared<BroadcastPartTaskImpl>(task);
},
.outgoingAudioBitrateKbit = outgoingAudioBitrateKbit,
.disableOutgoingAudioProcessing = disableOutgoingAudioProcessing,
.videoContentType = _videoContentType,
.videoCodecPreferences = videoCodecPreferences,
.initialEnableNoiseSuppression = enableNoiseSuppression,
@ -1477,15 +1466,6 @@ private:
}
}
- (void)addExternalAudioData:(NSData * _Nonnull)data {
if (_instance) {
std::vector<uint8_t> samples;
samples.resize(data.length);
[data getBytes:samples.data() length:data.length];
_instance->addExternalAudioSamples(std::move(samples));
}
}
@end
@implementation OngoingGroupCallMediaChannelDescription