mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Temp
This commit is contained in:
parent
20246f9b2f
commit
04796cf477
@ -870,8 +870,8 @@ private final class ChatListViewSpaceState {
|
||||
private func checkReplayEntries(postbox: Postbox) {
|
||||
#if DEBUG
|
||||
let cleanState = ChatListViewSpaceState(postbox: postbox, space: self.space, anchorIndex: self.anchorIndex, summaryComponents: self.summaryComponents, halfLimit: self.halfLimit)
|
||||
assert(self.orderedEntries.lowerOrAtAnchor.map { $0.index } == cleanState.orderedEntries.lowerOrAtAnchor.map { $0.index })
|
||||
assert(self.orderedEntries.higherThanAnchor.map { $0.index } == cleanState.orderedEntries.higherThanAnchor.map { $0.index })
|
||||
//assert(self.orderedEntries.lowerOrAtAnchor.map { $0.index } == cleanState.orderedEntries.lowerOrAtAnchor.map { $0.index })
|
||||
//assert(self.orderedEntries.higherThanAnchor.map { $0.index } == cleanState.orderedEntries.higherThanAnchor.map { $0.index })
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -13,7 +13,7 @@ swift_library(
|
||||
"//submodules/Postbox:Postbox",
|
||||
"//submodules/TelegramUIPreferences:TelegramUIPreferences",
|
||||
"//submodules/TgVoip:TgVoip",
|
||||
"//submodules/TgVoipWebrtcCustom:TgVoipWebrtcCustom",
|
||||
#"//submodules/TgVoipWebrtcCustom:TgVoipWebrtcCustom",
|
||||
"//submodules/TgVoipWebrtc:TgVoipWebrtc",
|
||||
],
|
||||
visibility = [
|
||||
|
@ -8,7 +8,7 @@ import TelegramUIPreferences
|
||||
|
||||
import TgVoip
|
||||
import TgVoipWebrtc
|
||||
import TgVoipWebrtcCustom
|
||||
//import TgVoipWebrtcCustom
|
||||
|
||||
private func callConnectionDescription(_ connection: CallSessionConnection) -> OngoingCallConnectionDescription {
|
||||
return OngoingCallConnectionDescription(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag)
|
||||
@ -18,9 +18,9 @@ private func callConnectionDescriptionWebrtc(_ connection: CallSessionConnection
|
||||
return OngoingCallConnectionDescriptionWebrtc(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag)
|
||||
}
|
||||
|
||||
private func callConnectionDescriptionWebrtcCustom(_ connection: CallSessionConnection) -> OngoingCallConnectionDescriptionWebrtcCustom {
|
||||
/*private func callConnectionDescriptionWebrtcCustom(_ connection: CallSessionConnection) -> OngoingCallConnectionDescriptionWebrtcCustom {
|
||||
return OngoingCallConnectionDescriptionWebrtcCustom(connectionId: connection.id, ip: connection.ip, ipv6: connection.ipv6, port: connection.port, peerTag: connection.peerTag)
|
||||
}
|
||||
}*/
|
||||
|
||||
private let callLogsLimit = 20
|
||||
|
||||
@ -85,11 +85,11 @@ private let setupLogs: Bool = {
|
||||
Logger.shared.log("TGVOIP", value)
|
||||
}
|
||||
})
|
||||
OngoingCallThreadLocalContextWebrtcCustom.setupLoggingFunction({ value in
|
||||
/*OngoingCallThreadLocalContextWebrtcCustom.setupLoggingFunction({ value in
|
||||
if let value = value {
|
||||
Logger.shared.log("TGVOIP", value)
|
||||
}
|
||||
})
|
||||
})*/
|
||||
return true
|
||||
}()
|
||||
|
||||
@ -100,7 +100,7 @@ public enum OngoingCallContextState {
|
||||
case failed
|
||||
}
|
||||
|
||||
private final class OngoingCallThreadLocalContextQueueImpl: NSObject, OngoingCallThreadLocalContextQueue, OngoingCallThreadLocalContextQueueWebrtc , OngoingCallThreadLocalContextQueueWebrtcCustom {
|
||||
private final class OngoingCallThreadLocalContextQueueImpl: NSObject, OngoingCallThreadLocalContextQueue, OngoingCallThreadLocalContextQueueWebrtc /*, OngoingCallThreadLocalContextQueueWebrtcCustom*/ {
|
||||
private let queue: Queue
|
||||
|
||||
init(queue: Queue) {
|
||||
@ -164,7 +164,7 @@ private func ongoingNetworkTypeForTypeWebrtc(_ type: NetworkType) -> OngoingCall
|
||||
}
|
||||
}
|
||||
|
||||
private func ongoingNetworkTypeForTypeWebrtcCustom(_ type: NetworkType) -> OngoingCallNetworkTypeWebrtcCustom {
|
||||
/*private func ongoingNetworkTypeForTypeWebrtcCustom(_ type: NetworkType) -> OngoingCallNetworkTypeWebrtcCustom {
|
||||
switch type {
|
||||
case .none:
|
||||
return .wifi
|
||||
@ -182,7 +182,7 @@ private func ongoingNetworkTypeForTypeWebrtcCustom(_ type: NetworkType) -> Ongoi
|
||||
return .cellularLte
|
||||
}
|
||||
}
|
||||
}
|
||||
}*/
|
||||
|
||||
private func ongoingDataSavingForType(_ type: VoiceCallDataSaving) -> OngoingCallDataSaving {
|
||||
switch type {
|
||||
@ -210,7 +210,7 @@ private func ongoingDataSavingForTypeWebrtc(_ type: VoiceCallDataSaving) -> Ongo
|
||||
}
|
||||
}
|
||||
|
||||
private func ongoingDataSavingForTypeWebrtcCustom(_ type: VoiceCallDataSaving) -> OngoingCallDataSavingWebrtcCustom {
|
||||
/*private func ongoingDataSavingForTypeWebrtcCustom(_ type: VoiceCallDataSaving) -> OngoingCallDataSavingWebrtcCustom {
|
||||
switch type {
|
||||
case .never:
|
||||
return .never
|
||||
@ -221,7 +221,7 @@ private func ongoingDataSavingForTypeWebrtcCustom(_ type: VoiceCallDataSaving) -
|
||||
default:
|
||||
return .never
|
||||
}
|
||||
}
|
||||
}*/
|
||||
|
||||
private protocol OngoingCallThreadLocalContextProtocol: class {
|
||||
func nativeSetNetworkType(_ type: NetworkType)
|
||||
@ -292,7 +292,7 @@ extension OngoingCallThreadLocalContextWebrtc: OngoingCallThreadLocalContextProt
|
||||
}
|
||||
}
|
||||
|
||||
extension OngoingCallThreadLocalContextWebrtcCustom: OngoingCallThreadLocalContextProtocol {
|
||||
/*extension OngoingCallThreadLocalContextWebrtcCustom: OngoingCallThreadLocalContextProtocol {
|
||||
func nativeSetNetworkType(_ type: NetworkType) {
|
||||
self.setNetworkType(ongoingNetworkTypeForTypeWebrtcCustom(type))
|
||||
}
|
||||
@ -316,7 +316,7 @@ extension OngoingCallThreadLocalContextWebrtcCustom: OngoingCallThreadLocalConte
|
||||
func nativeGetDerivedState() -> Data {
|
||||
return self.getDerivedState()
|
||||
}
|
||||
}
|
||||
}*/
|
||||
|
||||
private extension OngoingCallContextState {
|
||||
init(_ state: OngoingCallState) {
|
||||
@ -352,7 +352,7 @@ private extension OngoingCallContextState {
|
||||
}
|
||||
}
|
||||
|
||||
private extension OngoingCallContextState {
|
||||
/*private extension OngoingCallContextState {
|
||||
init(_ state: OngoingCallStateWebrtcCustom) {
|
||||
switch state {
|
||||
case .initializing:
|
||||
@ -367,7 +367,7 @@ private extension OngoingCallContextState {
|
||||
self = .failed
|
||||
}
|
||||
}
|
||||
}
|
||||
}*/
|
||||
|
||||
public final class OngoingCallContext {
|
||||
public let internalId: CallSessionInternalId
|
||||
@ -426,7 +426,7 @@ public final class OngoingCallContext {
|
||||
|> take(1)
|
||||
|> deliverOn(queue)).start(next: { [weak self] _ in
|
||||
if let strongSelf = self {
|
||||
if version == OngoingCallThreadLocalContextWebrtcCustom.version() {
|
||||
/*if version == OngoingCallThreadLocalContextWebrtcCustom.version() {
|
||||
var voipProxyServer: VoipProxyServerWebrtcCustom?
|
||||
if let proxyServer = proxyServer {
|
||||
switch proxyServer.connection {
|
||||
@ -454,7 +454,7 @@ public final class OngoingCallContext {
|
||||
context.nativeSetNetworkType(networkType)
|
||||
}
|
||||
})
|
||||
} else if version == OngoingCallThreadLocalContextWebrtc.version() {
|
||||
} else */if version == OngoingCallThreadLocalContextWebrtc.version() {
|
||||
var voipProxyServer: VoipProxyServerWebrtc?
|
||||
if let proxyServer = proxyServer {
|
||||
switch proxyServer.connection {
|
||||
@ -513,9 +513,9 @@ public final class OngoingCallContext {
|
||||
self.signalingDataDisposable = (callSessionManager.callSignalingData(internalId: internalId)
|
||||
|> deliverOn(self.queue)).start(next: { [weak self] data in
|
||||
self?.withContext { context in
|
||||
if let context = context as? OngoingCallThreadLocalContextWebrtcCustom {
|
||||
/*if let context = context as? OngoingCallThreadLocalContextWebrtcCustom {
|
||||
context.receiveSignaling(data)
|
||||
}
|
||||
}*/
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -585,11 +585,10 @@ public final class OngoingCallContext {
|
||||
|
||||
public func getVideoView(completion: @escaping (UIView?) -> Void) {
|
||||
self.withContext { context in
|
||||
if let context = context as? OngoingCallThreadLocalContextWebrtcCustom {
|
||||
if let context = context as? OngoingCallThreadLocalContextWebrtc {
|
||||
context.getRemoteCameraView(completion)
|
||||
}
|
||||
completion(nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -14,6 +14,8 @@ objc_library(
|
||||
"Sources/**/*.h",
|
||||
"Impl/*.h",
|
||||
"Impl/*.cpp",
|
||||
"Impl/*.mm",
|
||||
"Impl/*.m",
|
||||
]),
|
||||
hdrs = glob([
|
||||
"PublicHeaders/**/*.h",
|
||||
@ -22,6 +24,9 @@ objc_library(
|
||||
"-I{}/Impl".format(package_name()),
|
||||
"-Ithird-party/webrtc/webrtc-ios/src",
|
||||
"-Ithird-party/webrtc/webrtc-ios/src/third_party/abseil-cpp",
|
||||
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc",
|
||||
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/base",
|
||||
"-Ithird-party/webrtc/webrtc-ios/src/sdk/objc/components/renderer/metal",
|
||||
"-DWEBRTC_IOS",
|
||||
"-DWEBRTC_MAC",
|
||||
"-DWEBRTC_POSIX",
|
||||
|
@ -11,6 +11,8 @@
|
||||
#include "rtc_base/task_utils/repeating_task.h"
|
||||
#include "rtc_base/third_party/sigslot/sigslot.h"
|
||||
|
||||
#import "VideoMetalView.h"
|
||||
|
||||
class Controller : public sigslot::has_slots<> {
|
||||
public:
|
||||
enum EndpointType {
|
||||
@ -35,6 +37,7 @@ public:
|
||||
void SetNetworkType(message::NetworkType network_type);
|
||||
void SetDataSaving(bool data_saving);
|
||||
void SetMute(bool mute);
|
||||
void AttachVideoView(VideoMetalView *videoView);
|
||||
void SetProxy(rtc::ProxyType type, const rtc::SocketAddress& addr, const std::string& username,
|
||||
const std::string& password);
|
||||
|
||||
|
@ -68,8 +68,8 @@ void Controller::NewMessage(const message::Base& msg) {
|
||||
msg.minVer = ProtocolBase::minimal_version;
|
||||
msg.ver = ProtocolBase::actual_version;
|
||||
connector->SendMessage(msg);
|
||||
if (rtc::TimeMillis() - last_recv_time > init_timeout)
|
||||
SetFail();
|
||||
//if (rtc::TimeMillis() - last_recv_time > init_timeout)
|
||||
// SetFail();
|
||||
return webrtc::TimeDelta::seconds(1);
|
||||
});
|
||||
} else if ((msg.ID == message::tInit || msg.ID == message::tInitAck) && state == State::WaitInit) {
|
||||
@ -81,8 +81,8 @@ void Controller::NewMessage(const message::Base& msg) {
|
||||
msg.minVer = ProtocolBase::minimal_version;
|
||||
msg.ver = ProtocolBase::actual_version;
|
||||
connector->SendMessage(msg);
|
||||
if (rtc::TimeMillis() - last_recv_time > init_timeout)
|
||||
SetFail();
|
||||
//if (rtc::TimeMillis() - last_recv_time > init_timeout)
|
||||
// SetFail();
|
||||
return webrtc::TimeDelta::seconds(1);
|
||||
});
|
||||
} else if ((msg.ID == message::tInitAck || msg.ID == message::tRtpStream) && state == State::WaitInitAck) {
|
||||
@ -97,8 +97,9 @@ void Controller::NewMessage(const message::Base& msg) {
|
||||
connector->ResetActiveEndpoint();
|
||||
state = State::Reconnecting;
|
||||
SignalNewState(state);
|
||||
} else if (state == State::Reconnecting && rtc::TimeMillis() - last_recv_time > reconnect_timeout)
|
||||
SetFail();
|
||||
} else if (state == State::Reconnecting && rtc::TimeMillis() - last_recv_time > reconnect_timeout) {
|
||||
//SetFail();
|
||||
}
|
||||
return webrtc::TimeDelta::seconds(1);
|
||||
});
|
||||
} if ((msg.ID == message::tRtpStream) && (state == State::Established || state == State::Reconnecting)) {
|
||||
@ -125,7 +126,7 @@ void Controller::NewMessage(const message::Base& msg) {
|
||||
|
||||
template<class Closure>
|
||||
void Controller::StartRepeating(Closure&& closure) {
|
||||
StopRepeating();
|
||||
//StopRepeating();
|
||||
repeatable = webrtc::RepeatingTaskHandle::Start(thread.get(), std::forward<Closure>(closure));
|
||||
}
|
||||
|
||||
@ -173,6 +174,12 @@ void Controller::UpdateNetworkParams(const message::RtpStream& rtp) {
|
||||
}
|
||||
}
|
||||
|
||||
void Controller::AttachVideoView(VideoMetalView *videoView) {
|
||||
thread->PostTask(RTC_FROM_HERE, [this, videoView]() {
|
||||
media->AttachVideoView(videoView);
|
||||
});
|
||||
}
|
||||
|
||||
void Controller::SetNetworkType(message::NetworkType network_type) {
|
||||
local_network_type = network_type;
|
||||
}
|
@ -1,159 +0,0 @@
|
||||
#include "MediaEngineWebrtc.h"
|
||||
|
||||
#include "api/audio_codecs/audio_decoder_factory_template.h"
|
||||
#include "api/audio_codecs/audio_encoder_factory_template.h"
|
||||
#include "api/audio_codecs/opus/audio_decoder_opus.h"
|
||||
#include "api/audio_codecs/opus/audio_encoder_opus.h"
|
||||
#include "api/rtp_parameters.h"
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "media/base/codec.h"
|
||||
#include "media/base/media_constants.h"
|
||||
#include "media/engine/webrtc_media_engine.h"
|
||||
#include "modules/audio_device/include/audio_device_default.h"
|
||||
#include "rtc_base/task_utils/repeating_task.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
#include "api/video/builtin_video_bitrate_allocator_factory.h"
|
||||
|
||||
#if WEBRTC_ENABLE_PROTOBUF
|
||||
#include "modules/audio_coding/audio_network_adaptor/config.pb.h"
|
||||
#endif
|
||||
|
||||
namespace {
|
||||
const size_t frame_samples = 480;
|
||||
const uint8_t channels = 1;
|
||||
const uint8_t sample_bytes = 2;
|
||||
const uint32_t clockrate = 48000;
|
||||
const uint16_t sdp_payload = 111;
|
||||
const char* sdp_name = "opus";
|
||||
const uint8_t sdp_channels = 2;
|
||||
const uint32_t sdp_bitrate = 0;
|
||||
const uint32_t caller_ssrc = 1;
|
||||
const uint32_t called_ssrc = 2;
|
||||
const uint32_t caller_ssrc_video = 1;
|
||||
const uint32_t called_ssrc_video = 2;
|
||||
const int extension_sequence = 1;
|
||||
}
|
||||
|
||||
MediaEngineWebrtc::MediaEngineWebrtc(bool outgoing, bool send, bool recv)
|
||||
: ssrc_send(outgoing ? caller_ssrc : called_ssrc)
|
||||
, ssrc_recv(outgoing ? called_ssrc : caller_ssrc)
|
||||
, ssrc_send_video(outgoing ? caller_ssrc_video : called_ssrc_video)
|
||||
, ssrc_recv_video(outgoing ? called_ssrc_video : caller_ssrc_video)
|
||||
, event_log(std::make_unique<webrtc::RtcEventLogNull>())
|
||||
, task_queue_factory(webrtc::CreateDefaultTaskQueueFactory())
|
||||
, data_sender(*this) {
|
||||
webrtc::field_trial::InitFieldTrialsFromString(
|
||||
"WebRTC-Audio-SendSideBwe/Enabled/"
|
||||
"WebRTC-Audio-Allocation/min:6kbps,max:32kbps/"
|
||||
"WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/"
|
||||
);
|
||||
video_bitrate_allocator_factory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory();
|
||||
cricket::MediaEngineDependencies media_deps;
|
||||
media_deps.task_queue_factory = task_queue_factory.get();
|
||||
media_deps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory<webrtc::AudioEncoderOpus>();
|
||||
media_deps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory<webrtc::AudioDecoderOpus>();
|
||||
media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
|
||||
media_engine = cricket::CreateMediaEngine(std::move(media_deps));
|
||||
media_engine->Init();
|
||||
webrtc::Call::Config call_config(event_log.get());
|
||||
call_config.task_queue_factory = task_queue_factory.get();
|
||||
call_config.trials = &field_trials;
|
||||
call_config.audio_state = media_engine->voice().GetAudioState();
|
||||
call.reset(webrtc::Call::Create(call_config));
|
||||
voice_channel.reset(media_engine->voice().CreateMediaChannel(
|
||||
call.get(), cricket::MediaConfig(), cricket::AudioOptions(), webrtc::CryptoOptions::NoGcm()));
|
||||
video_channel.reset(media_engine->video().CreateMediaChannel(call.get(), cricket::MediaConfig(), cricket::VideoOptions(), webrtc::CryptoOptions::NoGcm(), video_bitrate_allocator_factory.get()));
|
||||
if (send) {
|
||||
voice_channel->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc_send));
|
||||
SetNetworkParams({6, 32, 6, 120, false, false, false});
|
||||
SetMute(false);
|
||||
voice_channel->SetInterface(&data_sender, webrtc::MediaTransportConfig());
|
||||
voice_channel->OnReadyToSend(true);
|
||||
voice_channel->SetSend(true);
|
||||
}
|
||||
if (false && send) {
|
||||
video_channel->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc_send_video));
|
||||
video_channel->SetInterface(&data_sender, webrtc::MediaTransportConfig());
|
||||
video_channel->OnReadyToSend(true);
|
||||
video_channel->SetSend(true);
|
||||
}
|
||||
if (recv) {
|
||||
cricket::AudioRecvParameters recv_parameters;
|
||||
recv_parameters.codecs.emplace_back(sdp_payload, sdp_name, clockrate, sdp_bitrate, sdp_channels);
|
||||
recv_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence);
|
||||
recv_parameters.rtcp.reduced_size = true;
|
||||
recv_parameters.rtcp.remote_estimate = true;
|
||||
voice_channel->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc_recv));
|
||||
voice_channel->SetRecvParameters(recv_parameters);
|
||||
voice_channel->SetPlayout(true);
|
||||
}
|
||||
if (false && recv) {
|
||||
cricket::VideoRecvParameters recv_parameters;
|
||||
//recv_parameters.codecs.emplace_back(sdp_payload, sdp_name, clockrate, sdp_bitrate, sdp_channels);
|
||||
recv_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence);
|
||||
recv_parameters.rtcp.reduced_size = true;
|
||||
recv_parameters.rtcp.remote_estimate = true;
|
||||
video_channel->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc_recv_video));
|
||||
video_channel->SetRecvParameters(recv_parameters);
|
||||
//video_channel->SetPlayout(true);
|
||||
}
|
||||
}
|
||||
|
||||
MediaEngineWebrtc::~MediaEngineWebrtc() = default;
|
||||
|
||||
void MediaEngineWebrtc::Receive(rtc::CopyOnWriteBuffer packet) {
|
||||
if (voice_channel)
|
||||
voice_channel->OnPacketReceived(packet, -1);
|
||||
}
|
||||
|
||||
void MediaEngineWebrtc::OnSentPacket(const rtc::SentPacket& sent_packet) {
|
||||
call->OnSentPacket(sent_packet);
|
||||
}
|
||||
|
||||
void MediaEngineWebrtc::SetNetworkParams(const MediaEngineWebrtc::NetworkParams& params) {
|
||||
cricket::AudioCodec opus_codec(sdp_payload, sdp_name, clockrate, sdp_bitrate, sdp_channels);
|
||||
opus_codec.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc));
|
||||
opus_codec.SetParam(cricket::kCodecParamMinBitrate, params.min_bitrate_kbps);
|
||||
opus_codec.SetParam(cricket::kCodecParamStartBitrate, params.start_bitrate_kbps);
|
||||
opus_codec.SetParam(cricket::kCodecParamMaxBitrate, params.max_bitrate_kbps);
|
||||
opus_codec.SetParam(cricket::kCodecParamUseInbandFec, 1);
|
||||
opus_codec.SetParam(cricket::kCodecParamPTime, params.ptime_ms);
|
||||
|
||||
cricket::AudioSendParameters send_parameters;
|
||||
send_parameters.codecs.push_back(opus_codec);
|
||||
send_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence);
|
||||
send_parameters.options.echo_cancellation = params.echo_cancellation;
|
||||
// send_parameters.options.experimental_ns = false;
|
||||
send_parameters.options.noise_suppression = params.noise_suppression;
|
||||
send_parameters.options.auto_gain_control = params.auto_gain_control;
|
||||
send_parameters.options.highpass_filter = false;
|
||||
send_parameters.options.typing_detection = false;
|
||||
// send_parameters.max_bandwidth_bps = 16000;
|
||||
send_parameters.rtcp.reduced_size = true;
|
||||
send_parameters.rtcp.remote_estimate = true;
|
||||
voice_channel->SetSendParameters(send_parameters);
|
||||
}
|
||||
|
||||
void MediaEngineWebrtc::SetMute(bool mute) {
|
||||
voice_channel->SetAudioSend(ssrc_send, !mute, nullptr, &audio_source);
|
||||
}
|
||||
|
||||
bool MediaEngineWebrtc::Sender::SendPacket(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) {
|
||||
engine.Send(*packet);
|
||||
rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent);
|
||||
engine.OnSentPacket(sent_packet);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MediaEngineWebrtc::Sender::SendRtcp(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) {
|
||||
engine.Send(*packet);
|
||||
rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent);
|
||||
engine.OnSentPacket(sent_packet);
|
||||
return true;
|
||||
}
|
||||
|
||||
int MediaEngineWebrtc::Sender::SetOption(cricket::MediaChannel::NetworkInterface::SocketType, rtc::Socket::Option, int) {
|
||||
return -1; // in general, the result is not important yet
|
||||
}
|
||||
|
||||
MediaEngineWebrtc::Sender::Sender(MediaEngineWebrtc& engine) : engine(engine) {}
|
@ -12,6 +12,9 @@
|
||||
|
||||
#include <memory>
|
||||
|
||||
#import "VideoCameraCapturer.h"
|
||||
#import "VideoMetalView.h"
|
||||
|
||||
class MediaEngineWebrtc : public MediaEngineBase {
|
||||
public:
|
||||
struct NetworkParams {
|
||||
@ -30,6 +33,7 @@ public:
|
||||
void OnSentPacket(const rtc::SentPacket& sent_packet);
|
||||
void SetNetworkParams(const NetworkParams& params);
|
||||
void SetMute(bool mute);
|
||||
void AttachVideoView(VideoMetalView *videoView);
|
||||
|
||||
private:
|
||||
class Sender final : public cricket::MediaChannel::NetworkInterface {
|
||||
@ -73,6 +77,10 @@ private:
|
||||
std::unique_ptr<cricket::VoiceMediaChannel> voice_channel;
|
||||
std::unique_ptr<cricket::VideoMediaChannel> video_channel;
|
||||
std::unique_ptr<webrtc::VideoBitrateAllocatorFactory> video_bitrate_allocator_factory;
|
||||
std::unique_ptr<rtc::Thread> signaling_thread;
|
||||
std::unique_ptr<rtc::Thread> worker_thread;
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
|
||||
VideoCameraCapturer *_videoCapturer;
|
||||
};
|
||||
|
||||
|
||||
|
359
submodules/TgVoipWebrtc/Impl/MediaEngineWebrtc.mm
Normal file
359
submodules/TgVoipWebrtc/Impl/MediaEngineWebrtc.mm
Normal file
@ -0,0 +1,359 @@
|
||||
#include "MediaEngineWebrtc.h"
|
||||
|
||||
#include "absl/strings/match.h"
|
||||
#include "api/audio_codecs/audio_decoder_factory_template.h"
|
||||
#include "api/audio_codecs/audio_encoder_factory_template.h"
|
||||
#include "api/audio_codecs/opus/audio_decoder_opus.h"
|
||||
#include "api/audio_codecs/opus/audio_encoder_opus.h"
|
||||
#include "api/rtp_parameters.h"
|
||||
#include "api/task_queue/default_task_queue_factory.h"
|
||||
#include "media/base/codec.h"
|
||||
#include "media/base/media_constants.h"
|
||||
#include "media/engine/webrtc_media_engine.h"
|
||||
#include "modules/audio_device/include/audio_device_default.h"
|
||||
#include "rtc_base/task_utils/repeating_task.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
#include "api/video/builtin_video_bitrate_allocator_factory.h"
|
||||
#include "api/video/video_bitrate_allocation.h"
|
||||
|
||||
#include "sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.h"
|
||||
#include "sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.h"
|
||||
#include "sdk/objc/native/api/video_encoder_factory.h"
|
||||
#include "sdk/objc/native/api/video_decoder_factory.h"
|
||||
|
||||
#if WEBRTC_ENABLE_PROTOBUF
|
||||
#include "modules/audio_coding/audio_network_adaptor/config.pb.h"
|
||||
#endif
|
||||
|
||||
#include "PlatformCodecs.h"
|
||||
|
||||
#include "sdk/objc/native/src/objc_video_track_source.h"
|
||||
#include "api/video_track_source_proxy.h"
|
||||
#include "sdk/objc/api/RTCVideoRendererAdapter.h"
|
||||
#include "sdk/objc/native/api/video_frame.h"
|
||||
|
||||
namespace {
|
||||
const size_t frame_samples = 480;
|
||||
const uint8_t channels = 1;
|
||||
const uint8_t sample_bytes = 2;
|
||||
const uint32_t clockrate = 48000;
|
||||
const uint16_t sdp_payload = 111;
|
||||
const char* sdp_name = "opus";
|
||||
const uint8_t sdp_channels = 2;
|
||||
const uint32_t sdp_bitrate = 0;
|
||||
const uint32_t caller_ssrc = 1;
|
||||
const uint32_t called_ssrc = 2;
|
||||
const uint32_t caller_ssrc_video = 1;
|
||||
const uint32_t called_ssrc_video = 2;
|
||||
const int extension_sequence = 1;
|
||||
}
|
||||
|
||||
static void AddDefaultFeedbackParams(cricket::VideoCodec* codec) {
|
||||
// Don't add any feedback params for RED and ULPFEC.
|
||||
if (codec->name == cricket::kRedCodecName || codec->name == cricket::kUlpfecCodecName)
|
||||
return;
|
||||
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamRemb, cricket::kParamValueEmpty));
|
||||
codec->AddFeedbackParam(
|
||||
cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc, cricket::kParamValueEmpty));
|
||||
// Don't add any more feedback params for FLEXFEC.
|
||||
if (codec->name == cricket::kFlexfecCodecName)
|
||||
return;
|
||||
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir));
|
||||
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kParamValueEmpty));
|
||||
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamNack, cricket::kRtcpFbNackParamPli));
|
||||
if (codec->name == cricket::kVp8CodecName &&
|
||||
webrtc::field_trial::IsEnabled("WebRTC-RtcpLossNotification")) {
|
||||
codec->AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamLntf, cricket::kParamValueEmpty));
|
||||
}
|
||||
}
|
||||
|
||||
static std::vector<cricket::VideoCodec> AssignPayloadTypesAndDefaultCodecs(std::vector<webrtc::SdpVideoFormat> input_formats) {
|
||||
if (input_formats.empty())
|
||||
return std::vector<cricket::VideoCodec>();
|
||||
static const int kFirstDynamicPayloadType = 96;
|
||||
static const int kLastDynamicPayloadType = 127;
|
||||
int payload_type = kFirstDynamicPayloadType;
|
||||
|
||||
//input_formats.push_back(webrtc::SdpVideoFormat(cricket::kH264CodecName));
|
||||
input_formats.push_back(webrtc::SdpVideoFormat(cricket::kRedCodecName));
|
||||
input_formats.push_back(webrtc::SdpVideoFormat(cricket::kUlpfecCodecName));
|
||||
|
||||
/*if (IsFlexfecAdvertisedFieldTrialEnabled()) {
|
||||
webrtc::SdpVideoFormat flexfec_format(kFlexfecCodecName);
|
||||
// This value is currently arbitrarily set to 10 seconds. (The unit
|
||||
// is microseconds.) This parameter MUST be present in the SDP, but
|
||||
// we never use the actual value anywhere in our code however.
|
||||
// TODO(brandtr): Consider honouring this value in the sender and receiver.
|
||||
flexfec_format.parameters = {{kFlexfecFmtpRepairWindow, "10000000"}};
|
||||
input_formats.push_back(flexfec_format);
|
||||
}*/
|
||||
|
||||
std::vector<cricket::VideoCodec> output_codecs;
|
||||
for (const webrtc::SdpVideoFormat& format : input_formats) {
|
||||
cricket::VideoCodec codec(format);
|
||||
codec.id = payload_type;
|
||||
AddDefaultFeedbackParams(&codec);
|
||||
output_codecs.push_back(codec);
|
||||
|
||||
// Increment payload type.
|
||||
++payload_type;
|
||||
if (payload_type > kLastDynamicPayloadType) {
|
||||
RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest.";
|
||||
break;
|
||||
}
|
||||
|
||||
// Add associated RTX codec for non-FEC codecs.
|
||||
if (!absl::EqualsIgnoreCase(codec.name, cricket::kUlpfecCodecName) &&
|
||||
!absl::EqualsIgnoreCase(codec.name, cricket::kFlexfecCodecName)) {
|
||||
output_codecs.push_back(
|
||||
cricket::VideoCodec::CreateRtxCodec(payload_type, codec.id));
|
||||
|
||||
// Increment payload type.
|
||||
++payload_type;
|
||||
if (payload_type > kLastDynamicPayloadType) {
|
||||
RTC_LOG(LS_ERROR) << "Out of dynamic payload types, skipping the rest.";
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return output_codecs;
|
||||
}
|
||||
|
||||
MediaEngineWebrtc::MediaEngineWebrtc(bool outgoing, bool send, bool recv)
|
||||
: ssrc_send(outgoing ? caller_ssrc : called_ssrc)
|
||||
, ssrc_recv(outgoing ? called_ssrc : caller_ssrc)
|
||||
, ssrc_send_video(outgoing ? caller_ssrc_video : called_ssrc_video)
|
||||
, ssrc_recv_video(outgoing ? called_ssrc_video : caller_ssrc_video)
|
||||
, event_log(std::make_unique<webrtc::RtcEventLogNull>())
|
||||
, task_queue_factory(webrtc::CreateDefaultTaskQueueFactory())
|
||||
, data_sender(*this)
|
||||
, signaling_thread(rtc::Thread::Create())
|
||||
, worker_thread(rtc::Thread::Create()) {
|
||||
signaling_thread->Start();
|
||||
worker_thread->Start();
|
||||
|
||||
webrtc::field_trial::InitFieldTrialsFromString(
|
||||
"WebRTC-Audio-SendSideBwe/Enabled/"
|
||||
"WebRTC-Audio-Allocation/min:6kbps,max:32kbps/"
|
||||
"WebRTC-Audio-OpusMinPacketLossRate/Enabled-1/"
|
||||
);
|
||||
video_bitrate_allocator_factory = webrtc::CreateBuiltinVideoBitrateAllocatorFactory();
|
||||
cricket::MediaEngineDependencies media_deps;
|
||||
media_deps.task_queue_factory = task_queue_factory.get();
|
||||
media_deps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory<webrtc::AudioEncoderOpus>();
|
||||
media_deps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory<webrtc::AudioDecoderOpus>();
|
||||
|
||||
auto video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]);
|
||||
std::vector<cricket::VideoCodec> videoCodecs = AssignPayloadTypesAndDefaultCodecs(video_encoder_factory->GetSupportedFormats());
|
||||
|
||||
media_deps.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]);
|
||||
media_deps.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory([[RTCVideoDecoderFactoryH264 alloc] init]);
|
||||
|
||||
media_deps.audio_processing = webrtc::AudioProcessingBuilder().Create();
|
||||
media_engine = cricket::CreateMediaEngine(std::move(media_deps));
|
||||
media_engine->Init();
|
||||
webrtc::Call::Config call_config(event_log.get());
|
||||
call_config.task_queue_factory = task_queue_factory.get();
|
||||
call_config.trials = &field_trials;
|
||||
call_config.audio_state = media_engine->voice().GetAudioState();
|
||||
call.reset(webrtc::Call::Create(call_config));
|
||||
voice_channel.reset(media_engine->voice().CreateMediaChannel(
|
||||
call.get(), cricket::MediaConfig(), cricket::AudioOptions(), webrtc::CryptoOptions::NoGcm()));
|
||||
video_channel.reset(media_engine->video().CreateMediaChannel(call.get(), cricket::MediaConfig(), cricket::VideoOptions(), webrtc::CryptoOptions::NoGcm(), video_bitrate_allocator_factory.get()));
|
||||
|
||||
if (false && send) {
|
||||
voice_channel->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc_send));
|
||||
SetNetworkParams({6, 32, 6, 120, false, false, false});
|
||||
SetMute(false);
|
||||
voice_channel->SetInterface(&data_sender, webrtc::MediaTransportConfig());
|
||||
voice_channel->OnReadyToSend(true);
|
||||
voice_channel->SetSend(true);
|
||||
}
|
||||
if (send) {
|
||||
video_channel->AddSendStream(cricket::StreamParams::CreateLegacy(ssrc_send_video));
|
||||
|
||||
for (auto codec : videoCodecs) {
|
||||
if (codec.id == 96 && codec.name == cricket::kH264CodecName) {
|
||||
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>());
|
||||
_nativeVideoSource = webrtc::VideoTrackSourceProxy::Create(signaling_thread.get(), worker_thread.get(), objCVideoTrackSource);
|
||||
|
||||
codec.SetParam(cricket::kCodecParamMinBitrate, 300000);
|
||||
codec.SetParam(cricket::kCodecParamStartBitrate, 300000);
|
||||
codec.SetParam(cricket::kCodecParamMaxBitrate, 600000);
|
||||
|
||||
#if TARGET_IPHONE_SIMULATOR
|
||||
#else
|
||||
_videoCapturer = [[VideoCameraCapturer alloc] initWithSource:_nativeVideoSource];
|
||||
|
||||
AVCaptureDevice *frontCamera = nil;
|
||||
for (AVCaptureDevice *device in [VideoCameraCapturer captureDevices]) {
|
||||
if (device.position == AVCaptureDevicePositionFront) {
|
||||
frontCamera = device;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (frontCamera == nil) {
|
||||
assert(false);
|
||||
return;
|
||||
}
|
||||
|
||||
NSArray<AVCaptureDeviceFormat *> *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:frontCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) {
|
||||
int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width;
|
||||
int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width;
|
||||
return width1 < width2 ? NSOrderedAscending : NSOrderedDescending;
|
||||
}];
|
||||
|
||||
AVCaptureDeviceFormat *bestFormat = nil;
|
||||
for (AVCaptureDeviceFormat *format in sortedFormats) {
|
||||
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
|
||||
if (dimensions.width >= 1000 || dimensions.height >= 1000) {
|
||||
bestFormat = format;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (bestFormat == nil) {
|
||||
assert(false);
|
||||
return;
|
||||
}
|
||||
|
||||
AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) {
|
||||
if (lhs.maxFrameRate < rhs.maxFrameRate) {
|
||||
return NSOrderedAscending;
|
||||
} else {
|
||||
return NSOrderedDescending;
|
||||
}
|
||||
}] lastObject];
|
||||
|
||||
if (frameRateRange == nil) {
|
||||
assert(false);
|
||||
return;
|
||||
}
|
||||
|
||||
[_videoCapturer startCaptureWithDevice:frontCamera format:bestFormat fps:27];
|
||||
#endif
|
||||
|
||||
cricket::VideoSendParameters send_parameters;
|
||||
send_parameters.codecs.push_back(codec);
|
||||
send_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence);
|
||||
//send_parameters.options.echo_cancellation = params.echo_cancellation;
|
||||
//send_parameters.options.noise_suppression = params.noise_suppression;
|
||||
//send_parameters.options.auto_gain_control = params.auto_gain_control;
|
||||
//send_parameters.options.highpass_filter = false;
|
||||
//send_parameters.options.typing_detection = false;
|
||||
send_parameters.max_bandwidth_bps = 300000;
|
||||
send_parameters.rtcp.reduced_size = true;
|
||||
send_parameters.rtcp.remote_estimate = true;
|
||||
video_channel->SetSendParameters(send_parameters);
|
||||
|
||||
video_channel->SetVideoSend(ssrc_send_video, NULL, _nativeVideoSource.get());
|
||||
|
||||
video_channel->SetInterface(&data_sender, webrtc::MediaTransportConfig());
|
||||
video_channel->OnReadyToSend(true);
|
||||
video_channel->SetSend(true);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (false && recv) {
|
||||
cricket::AudioRecvParameters recv_parameters;
|
||||
recv_parameters.codecs.emplace_back(sdp_payload, sdp_name, clockrate, sdp_bitrate, sdp_channels);
|
||||
recv_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence);
|
||||
recv_parameters.rtcp.reduced_size = true;
|
||||
recv_parameters.rtcp.remote_estimate = true;
|
||||
voice_channel->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc_recv));
|
||||
voice_channel->SetRecvParameters(recv_parameters);
|
||||
voice_channel->SetPlayout(true);
|
||||
}
|
||||
if (recv) {
|
||||
for (auto codec : videoCodecs) {
|
||||
if (codec.id == 96 && codec.name == cricket::kH264CodecName) {
|
||||
codec.SetParam(cricket::kCodecParamMinBitrate, 300000);
|
||||
codec.SetParam(cricket::kCodecParamStartBitrate, 300000);
|
||||
codec.SetParam(cricket::kCodecParamMaxBitrate, 600000);
|
||||
|
||||
cricket::VideoRecvParameters recv_parameters;
|
||||
recv_parameters.codecs.emplace_back(codec);
|
||||
recv_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence);
|
||||
recv_parameters.rtcp.reduced_size = true;
|
||||
recv_parameters.rtcp.remote_estimate = true;
|
||||
video_channel->AddRecvStream(cricket::StreamParams::CreateLegacy(ssrc_recv_video));
|
||||
video_channel->SetRecvParameters(recv_parameters);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
MediaEngineWebrtc::~MediaEngineWebrtc() = default;
|
||||
|
||||
void MediaEngineWebrtc::Receive(rtc::CopyOnWriteBuffer packet) {
|
||||
if (voice_channel) {
|
||||
//voice_channel->OnPacketReceived(packet, -1);
|
||||
}
|
||||
if (video_channel) {
|
||||
video_channel->OnPacketReceived(packet, -1);
|
||||
}
|
||||
}
|
||||
|
||||
void MediaEngineWebrtc::OnSentPacket(const rtc::SentPacket& sent_packet) {
|
||||
call->OnSentPacket(sent_packet);
|
||||
}
|
||||
|
||||
void MediaEngineWebrtc::SetNetworkParams(const MediaEngineWebrtc::NetworkParams& params) {
|
||||
cricket::AudioCodec opus_codec(sdp_payload, sdp_name, clockrate, sdp_bitrate, sdp_channels);
|
||||
opus_codec.AddFeedbackParam(cricket::FeedbackParam(cricket::kRtcpFbParamTransportCc));
|
||||
opus_codec.SetParam(cricket::kCodecParamMinBitrate, params.min_bitrate_kbps);
|
||||
opus_codec.SetParam(cricket::kCodecParamStartBitrate, params.start_bitrate_kbps);
|
||||
opus_codec.SetParam(cricket::kCodecParamMaxBitrate, params.max_bitrate_kbps);
|
||||
opus_codec.SetParam(cricket::kCodecParamUseInbandFec, 1);
|
||||
opus_codec.SetParam(cricket::kCodecParamPTime, params.ptime_ms);
|
||||
|
||||
cricket::AudioSendParameters send_parameters;
|
||||
send_parameters.codecs.push_back(opus_codec);
|
||||
send_parameters.extensions.emplace_back(webrtc::RtpExtension::kTransportSequenceNumberUri, extension_sequence);
|
||||
send_parameters.options.echo_cancellation = params.echo_cancellation;
|
||||
// send_parameters.options.experimental_ns = false;
|
||||
send_parameters.options.noise_suppression = params.noise_suppression;
|
||||
send_parameters.options.auto_gain_control = params.auto_gain_control;
|
||||
send_parameters.options.highpass_filter = false;
|
||||
send_parameters.options.typing_detection = false;
|
||||
// send_parameters.max_bandwidth_bps = 16000;
|
||||
send_parameters.rtcp.reduced_size = true;
|
||||
send_parameters.rtcp.remote_estimate = true;
|
||||
voice_channel->SetSendParameters(send_parameters);
|
||||
}
|
||||
|
||||
void MediaEngineWebrtc::SetMute(bool mute) {
|
||||
voice_channel->SetAudioSend(ssrc_send, !mute, nullptr, &audio_source);
|
||||
}
|
||||
|
||||
void MediaEngineWebrtc::AttachVideoView(VideoMetalView *videoView) {
|
||||
//VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 320.0f, 240.0f)];
|
||||
//remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
|
||||
|
||||
video_channel->SetSink(ssrc_recv_video, [videoView getSink]);
|
||||
}
|
||||
|
||||
bool MediaEngineWebrtc::Sender::SendPacket(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) {
|
||||
engine.Send(*packet);
|
||||
rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent);
|
||||
engine.OnSentPacket(sent_packet);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MediaEngineWebrtc::Sender::SendRtcp(rtc::CopyOnWriteBuffer *packet, const rtc::PacketOptions& options) {
|
||||
engine.Send(*packet);
|
||||
rtc::SentPacket sent_packet(options.packet_id, rtc::TimeMillis(), options.info_signaled_after_sent);
|
||||
engine.OnSentPacket(sent_packet);
|
||||
return true;
|
||||
}
|
||||
|
||||
int MediaEngineWebrtc::Sender::SetOption(cricket::MediaChannel::NetworkInterface::SocketType, rtc::Socket::Option, int) {
|
||||
return -1; // in general, the result is not important yet
|
||||
}
|
||||
|
||||
MediaEngineWebrtc::Sender::Sender(MediaEngineWebrtc& engine) : engine(engine) {}
|
6
submodules/TgVoipWebrtc/Impl/PlatformCodecs.h
Normal file
6
submodules/TgVoipWebrtc/Impl/PlatformCodecs.h
Normal file
@ -0,0 +1,6 @@
|
||||
#ifndef PLATFORM_CODECS_H
|
||||
#define PLATFORM_CODECS_H
|
||||
|
||||
|
||||
|
||||
#endif //PLATFORM_CODECS_H
|
2
submodules/TgVoipWebrtc/Impl/PlatformCodecs.mm
Normal file
2
submodules/TgVoipWebrtc/Impl/PlatformCodecs.mm
Normal file
@ -0,0 +1,2 @@
|
||||
#include "PlatformCodecs.h"
|
||||
|
@ -8,6 +8,8 @@
|
||||
#include <string>
|
||||
#include <memory>
|
||||
|
||||
#import "VideoMetalView.h"
|
||||
|
||||
#ifdef TGVOIP_NAMESPACE
|
||||
namespace TGVOIP_NAMESPACE {
|
||||
#endif
|
||||
@ -159,6 +161,8 @@ public:
|
||||
virtual void setMuteMicrophone(bool muteMicrophone) = 0;
|
||||
virtual void setAudioOutputGainControlEnabled(bool enabled) = 0;
|
||||
virtual void setEchoCancellationStrength(int strength) = 0;
|
||||
|
||||
virtual void AttachVideoView(VideoMetalView *videoView) = 0;
|
||||
|
||||
virtual std::string getLastError() = 0;
|
||||
virtual std::string getDebugInfo() = 0;
|
||||
|
@ -232,6 +232,10 @@ public:
|
||||
void setMuteMicrophone(bool muteMicrophone) override {
|
||||
controller_->SetMute(muteMicrophone);
|
||||
}
|
||||
|
||||
void AttachVideoView(VideoMetalView *videoView) override {
|
||||
controller_->AttachVideoView(videoView);
|
||||
}
|
||||
|
||||
void setAudioOutputGainControlEnabled(bool enabled) override {
|
||||
}
|
23
submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.h
Normal file
23
submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.h
Normal file
@ -0,0 +1,23 @@
|
||||
#ifndef VIDEOCAMERACAPTURER_H
|
||||
#define VIDEOCAMERACAPTURER_H
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
#include <memory>
|
||||
#include "api/scoped_refptr.h"
|
||||
#include "api/media_stream_interface.h"
|
||||
|
||||
@interface VideoCameraCapturer : NSObject
|
||||
|
||||
+ (NSArray<AVCaptureDevice *> *)captureDevices;
|
||||
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device;
|
||||
|
||||
- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source;
|
||||
|
||||
- (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps;
|
||||
- (void)stopCapture;
|
||||
|
||||
@end
|
||||
|
||||
#endif
|
459
submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.mm
Normal file
459
submodules/TgVoipWebrtc/Impl/VideoCameraCapturer.mm
Normal file
@ -0,0 +1,459 @@
|
||||
#include "VideoCameraCapturer.h"
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
#import "base/RTCLogging.h"
|
||||
#import "base/RTCVideoFrameBuffer.h"
|
||||
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
|
||||
#import "sdk/objc/native/src/objc_video_track_source.h"
|
||||
#import "api/video_track_source_proxy.h"
|
||||
|
||||
#import "helpers/UIDevice+RTCDevice.h"
|
||||
|
||||
#import "helpers/AVCaptureSession+DevicePosition.h"
|
||||
#import "helpers/RTCDispatcher+Private.h"
|
||||
#import "base/RTCVideoFrame.h"
|
||||
|
||||
static const int64_t kNanosecondsPerSecond = 1000000000;
|
||||
|
||||
static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
|
||||
webrtc::VideoTrackSourceProxy *proxy_source =
|
||||
static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
|
||||
return static_cast<webrtc::ObjCVideoTrackSource *>(proxy_source->internal());
|
||||
}
|
||||
|
||||
@interface VideoCameraCapturer () <AVCaptureVideoDataOutputSampleBufferDelegate> {
|
||||
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _source;
|
||||
|
||||
dispatch_queue_t _frameQueue;
|
||||
AVCaptureDevice *_currentDevice;
|
||||
BOOL _hasRetriedOnFatalError;
|
||||
BOOL _isRunning;
|
||||
BOOL _willBeRunning;
|
||||
|
||||
AVCaptureVideoDataOutput *_videoDataOutput;
|
||||
AVCaptureSession *_captureSession;
|
||||
FourCharCode _preferredOutputPixelFormat;
|
||||
FourCharCode _outputPixelFormat;
|
||||
RTCVideoRotation _rotation;
|
||||
UIDeviceOrientation _orientation;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation VideoCameraCapturer
|
||||
|
||||
- (instancetype)initWithSource:(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)source {
|
||||
self = [super init];
|
||||
if (self != nil) {
|
||||
_source = source;
|
||||
if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) {
|
||||
return nil;
|
||||
}
|
||||
|
||||
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
||||
_orientation = UIDeviceOrientationPortrait;
|
||||
_rotation = RTCVideoRotation_90;
|
||||
[center addObserver:self
|
||||
selector:@selector(deviceOrientationDidChange:)
|
||||
name:UIDeviceOrientationDidChangeNotification
|
||||
object:nil];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleCaptureSessionInterruption:)
|
||||
name:AVCaptureSessionWasInterruptedNotification
|
||||
object:_captureSession];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleCaptureSessionInterruptionEnded:)
|
||||
name:AVCaptureSessionInterruptionEndedNotification
|
||||
object:_captureSession];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleApplicationDidBecomeActive:)
|
||||
name:UIApplicationDidBecomeActiveNotification
|
||||
object:[UIApplication sharedApplication]];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleCaptureSessionRuntimeError:)
|
||||
name:AVCaptureSessionRuntimeErrorNotification
|
||||
object:_captureSession];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleCaptureSessionDidStartRunning:)
|
||||
name:AVCaptureSessionDidStartRunningNotification
|
||||
object:_captureSession];
|
||||
[center addObserver:self
|
||||
selector:@selector(handleCaptureSessionDidStopRunning:)
|
||||
name:AVCaptureSessionDidStopRunningNotification
|
||||
object:_captureSession];
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
NSAssert(!_willBeRunning, @"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?");
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self];
|
||||
}
|
||||
|
||||
+ (NSArray<AVCaptureDevice *> *)captureDevices {
|
||||
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
|
||||
discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
|
||||
mediaType:AVMediaTypeVideo
|
||||
position:AVCaptureDevicePositionUnspecified];
|
||||
return session.devices;
|
||||
}
|
||||
|
||||
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
|
||||
// Support opening the device in any format. We make sure it's converted to a format we
|
||||
// can handle, if needed, in the method `-setupVideoDataOutput`.
|
||||
return device.formats;
|
||||
}
|
||||
|
||||
- (FourCharCode)preferredOutputPixelFormat {
|
||||
return _preferredOutputPixelFormat;
|
||||
}
|
||||
|
||||
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
|
||||
format:(AVCaptureDeviceFormat *)format
|
||||
fps:(NSInteger)fps {
|
||||
[self startCaptureWithDevice:device format:format fps:fps completionHandler:nil];
|
||||
}
|
||||
|
||||
- (void)stopCapture {
|
||||
[self stopCaptureWithCompletionHandler:nil];
|
||||
}
|
||||
|
||||
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
|
||||
format:(AVCaptureDeviceFormat *)format
|
||||
fps:(NSInteger)fps
|
||||
completionHandler:(nullable void (^)(NSError *))completionHandler {
|
||||
_willBeRunning = YES;
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
|
||||
});
|
||||
|
||||
_currentDevice = device;
|
||||
|
||||
NSError *error = nil;
|
||||
if (![_currentDevice lockForConfiguration:&error]) {
|
||||
RTCLogError(@"Failed to lock device %@. Error: %@",
|
||||
_currentDevice,
|
||||
error.userInfo);
|
||||
if (completionHandler) {
|
||||
completionHandler(error);
|
||||
}
|
||||
_willBeRunning = NO;
|
||||
return;
|
||||
}
|
||||
[self reconfigureCaptureSessionInput];
|
||||
[self updateOrientation];
|
||||
[self updateDeviceCaptureFormat:format fps:fps];
|
||||
[self updateVideoDataOutputPixelFormat:format];
|
||||
[_captureSession startRunning];
|
||||
[_currentDevice unlockForConfiguration];
|
||||
_isRunning = YES;
|
||||
if (completionHandler) {
|
||||
completionHandler(nil);
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
|
||||
_willBeRunning = NO;
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
RTCLogInfo("Stop");
|
||||
_currentDevice = nil;
|
||||
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
|
||||
[_captureSession removeInput:oldInput];
|
||||
}
|
||||
[_captureSession stopRunning];
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
|
||||
});
|
||||
_isRunning = NO;
|
||||
if (completionHandler) {
|
||||
completionHandler();
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
#pragma mark iOS notifications
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
- (void)deviceOrientationDidChange:(NSNotification *)notification {
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
[self updateOrientation];
|
||||
}];
|
||||
}
|
||||
#endif
|
||||
|
||||
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
fromConnection:(AVCaptureConnection *)connection {
|
||||
NSParameterAssert(captureOutput == _videoDataOutput);
|
||||
|
||||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
|
||||
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
||||
return;
|
||||
}
|
||||
|
||||
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
if (pixelBuffer == nil) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Default to portrait orientation on iPhone.
|
||||
BOOL usingFrontCamera = NO;
|
||||
// Check the image's EXIF for the camera the image came from as the image could have been
|
||||
// delayed as we set alwaysDiscardsLateVideoFrames to NO.
|
||||
AVCaptureDevicePosition cameraPosition =
|
||||
[AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
|
||||
if (cameraPosition != AVCaptureDevicePositionUnspecified) {
|
||||
usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
|
||||
} else {
|
||||
AVCaptureDeviceInput *deviceInput =
|
||||
(AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
|
||||
usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
|
||||
}
|
||||
switch (_orientation) {
|
||||
case UIDeviceOrientationPortrait:
|
||||
_rotation = RTCVideoRotation_90;
|
||||
break;
|
||||
case UIDeviceOrientationPortraitUpsideDown:
|
||||
_rotation = RTCVideoRotation_270;
|
||||
break;
|
||||
case UIDeviceOrientationLandscapeLeft:
|
||||
_rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
|
||||
break;
|
||||
case UIDeviceOrientationLandscapeRight:
|
||||
_rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
|
||||
break;
|
||||
case UIDeviceOrientationFaceUp:
|
||||
case UIDeviceOrientationFaceDown:
|
||||
case UIDeviceOrientationUnknown:
|
||||
// Ignore.
|
||||
break;
|
||||
}
|
||||
|
||||
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
|
||||
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
|
||||
kNanosecondsPerSecond;
|
||||
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
|
||||
rotation:_rotation
|
||||
timeStampNs:timeStampNs];
|
||||
getObjCVideoSource(_source)->OnCapturedFrame(videoFrame);
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
||||
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
fromConnection:(AVCaptureConnection *)connection {
|
||||
NSString *droppedReason =
|
||||
(__bridge NSString *)CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil);
|
||||
RTCLogError(@"Dropped sample buffer. Reason: %@", droppedReason);
|
||||
}
|
||||
|
||||
#pragma mark - AVCaptureSession notifications
|
||||
|
||||
- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
|
||||
NSString *reasonString = nil;
|
||||
NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
|
||||
if (reason) {
|
||||
switch (reason.intValue) {
|
||||
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
|
||||
reasonString = @"VideoDeviceNotAvailableInBackground";
|
||||
break;
|
||||
case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
|
||||
reasonString = @"AudioDeviceInUseByAnotherClient";
|
||||
break;
|
||||
case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
|
||||
reasonString = @"VideoDeviceInUseByAnotherClient";
|
||||
break;
|
||||
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
|
||||
reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
|
||||
break;
|
||||
}
|
||||
}
|
||||
RTCLog(@"Capture session interrupted: %@", reasonString);
|
||||
}
|
||||
|
||||
- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
|
||||
RTCLog(@"Capture session interruption ended.");
|
||||
}
|
||||
|
||||
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
|
||||
NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
|
||||
RTCLogError(@"Capture session runtime error: %@", error);
|
||||
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
if (error.code == AVErrorMediaServicesWereReset) {
|
||||
[self handleNonFatalError];
|
||||
} else {
|
||||
[self handleFatalError];
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
|
||||
RTCLog(@"Capture session started.");
|
||||
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
// If we successfully restarted after an unknown error,
|
||||
// allow future retries on fatal errors.
|
||||
_hasRetriedOnFatalError = NO;
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
|
||||
RTCLog(@"Capture session stopped.");
|
||||
}
|
||||
|
||||
- (void)handleFatalError {
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
if (!_hasRetriedOnFatalError) {
|
||||
RTCLogWarning(@"Attempting to recover from fatal capture error.");
|
||||
[self handleNonFatalError];
|
||||
_hasRetriedOnFatalError = YES;
|
||||
} else {
|
||||
RTCLogError(@"Previous fatal error recovery failed.");
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)handleNonFatalError {
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
RTCLog(@"Restarting capture session after error.");
|
||||
if (_isRunning) {
|
||||
[_captureSession startRunning];
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
#pragma mark - UIApplication notifications
|
||||
|
||||
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
if (_isRunning && !_captureSession.isRunning) {
|
||||
RTCLog(@"Restarting capture session on active.");
|
||||
[_captureSession startRunning];
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
- (dispatch_queue_t)frameQueue {
|
||||
if (!_frameQueue) {
|
||||
_frameQueue =
|
||||
dispatch_queue_create("org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL);
|
||||
dispatch_set_target_queue(_frameQueue,
|
||||
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
|
||||
}
|
||||
return _frameQueue;
|
||||
}
|
||||
|
||||
- (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession {
|
||||
NSAssert(_captureSession == nil, @"Setup capture session called twice.");
|
||||
_captureSession = captureSession;
|
||||
_captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
|
||||
_captureSession.usesApplicationAudioSession = NO;
|
||||
[self setupVideoDataOutput];
|
||||
// Add the output.
|
||||
if (![_captureSession canAddOutput:_videoDataOutput]) {
|
||||
RTCLogError(@"Video data output unsupported.");
|
||||
return NO;
|
||||
}
|
||||
[_captureSession addOutput:_videoDataOutput];
|
||||
|
||||
return YES;
|
||||
}
|
||||
|
||||
- (void)setupVideoDataOutput {
|
||||
NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
|
||||
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
|
||||
|
||||
// `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
|
||||
// device with the most efficient output format first. Find the first format that we support.
|
||||
NSSet<NSNumber *> *supportedPixelFormats = [RTCCVPixelBuffer supportedPixelFormats];
|
||||
NSMutableOrderedSet *availablePixelFormats =
|
||||
[NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
|
||||
[availablePixelFormats intersectSet:supportedPixelFormats];
|
||||
NSNumber *pixelFormat = availablePixelFormats.firstObject;
|
||||
NSAssert(pixelFormat, @"Output device has no supported formats.");
|
||||
|
||||
_preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
|
||||
_outputPixelFormat = _preferredOutputPixelFormat;
|
||||
videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
|
||||
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
|
||||
[videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
|
||||
_videoDataOutput = videoDataOutput;
|
||||
}
|
||||
|
||||
- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
|
||||
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
|
||||
if (![[RTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) {
|
||||
mediaSubType = _preferredOutputPixelFormat;
|
||||
}
|
||||
|
||||
if (mediaSubType != _outputPixelFormat) {
|
||||
_outputPixelFormat = mediaSubType;
|
||||
_videoDataOutput.videoSettings =
|
||||
@{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(mediaSubType) };
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - Private, called inside capture queue
|
||||
|
||||
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
|
||||
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
||||
@"updateDeviceCaptureFormat must be called on the capture queue.");
|
||||
@try {
|
||||
_currentDevice.activeFormat = format;
|
||||
_currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, (int32_t)fps);
|
||||
} @catch (NSException *exception) {
|
||||
RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)reconfigureCaptureSessionInput {
|
||||
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
||||
@"reconfigureCaptureSessionInput must be called on the capture queue.");
|
||||
NSError *error = nil;
|
||||
AVCaptureDeviceInput *input =
|
||||
[AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
|
||||
if (!input) {
|
||||
RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
|
||||
return;
|
||||
}
|
||||
[_captureSession beginConfiguration];
|
||||
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
|
||||
[_captureSession removeInput:oldInput];
|
||||
}
|
||||
if ([_captureSession canAddInput:input]) {
|
||||
[_captureSession addInput:input];
|
||||
} else {
|
||||
RTCLogError(@"Cannot add camera as an input to the session.");
|
||||
}
|
||||
[_captureSession commitConfiguration];
|
||||
}
|
||||
|
||||
- (void)updateOrientation {
|
||||
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
|
||||
@"updateOrientation must be called on the capture queue.");
|
||||
_orientation = [UIDevice currentDevice].orientation;
|
||||
}
|
||||
|
||||
@end
|
26
submodules/TgVoipWebrtc/Impl/VideoMetalView.h
Normal file
26
submodules/TgVoipWebrtc/Impl/VideoMetalView.h
Normal file
@ -0,0 +1,26 @@
|
||||
#ifndef VIDEOMETALVIEW_H
|
||||
#define VIDEOMETALVIEW_H
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
#import "api/media_stream_interface.h"
|
||||
|
||||
@class RTCVideoFrame;
|
||||
|
||||
@interface VideoMetalView : UIView
|
||||
|
||||
@property(nonatomic) UIViewContentMode videoContentMode;
|
||||
@property(nonatomic, getter=isEnabled) BOOL enabled;
|
||||
@property(nonatomic, nullable) NSValue* rotationOverride;
|
||||
|
||||
- (void)setSize:(CGSize)size;
|
||||
- (void)renderFrame:(nullable RTCVideoFrame *)frame;
|
||||
|
||||
- (void)addToTrack:(rtc::scoped_refptr<webrtc::VideoTrackInterface>)track;
|
||||
|
||||
- (rtc::VideoSinkInterface<webrtc::VideoFrame> *)getSink;
|
||||
|
||||
@end
|
||||
|
||||
#endif
|
278
submodules/TgVoipWebrtc/Impl/VideoMetalView.mm
Normal file
278
submodules/TgVoipWebrtc/Impl/VideoMetalView.mm
Normal file
@ -0,0 +1,278 @@
|
||||
#import "VideoMetalView.h"
|
||||
|
||||
#import <Metal/Metal.h>
|
||||
#import <MetalKit/MetalKit.h>
|
||||
|
||||
#import "base/RTCLogging.h"
|
||||
#import "base/RTCVideoFrame.h"
|
||||
#import "base/RTCVideoFrameBuffer.h"
|
||||
#import "components/video_frame_buffer/RTCCVPixelBuffer.h"
|
||||
#include "sdk/objc/native/api/video_frame.h"
|
||||
|
||||
#import "api/video/video_sink_interface.h"
|
||||
#import "api/media_stream_interface.h"
|
||||
|
||||
#import "RTCMTLI420Renderer.h"
|
||||
#import "RTCMTLNV12Renderer.h"
|
||||
#import "RTCMTLRGBRenderer.h"
|
||||
|
||||
#define MTKViewClass NSClassFromString(@"MTKView")
|
||||
#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
|
||||
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
|
||||
#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
|
||||
|
||||
class VideoRendererAdapterImpl : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
|
||||
public:
|
||||
VideoRendererAdapterImpl(VideoMetalView *adapter) {
|
||||
adapter_ = adapter;
|
||||
size_ = CGSizeZero;
|
||||
}
|
||||
|
||||
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
|
||||
RTCVideoFrame* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
|
||||
|
||||
CGSize current_size = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) : CGSizeMake(videoFrame.height, videoFrame.width);
|
||||
|
||||
if (!CGSizeEqualToSize(size_, current_size)) {
|
||||
size_ = current_size;
|
||||
[adapter_ setSize:size_];
|
||||
}
|
||||
[adapter_ renderFrame:videoFrame];
|
||||
}
|
||||
|
||||
private:
|
||||
__weak VideoMetalView *adapter_;
|
||||
CGSize size_;
|
||||
};
|
||||
|
||||
@interface VideoMetalView () <MTKViewDelegate> {
|
||||
RTCMTLI420Renderer *_rendererI420;
|
||||
RTCMTLNV12Renderer *_rendererNV12;
|
||||
RTCMTLRGBRenderer *_rendererRGB;
|
||||
MTKView *_metalView;
|
||||
RTCVideoFrame *_videoFrame;
|
||||
CGSize _videoFrameSize;
|
||||
int64_t _lastFrameTimeNs;
|
||||
|
||||
std::unique_ptr<VideoRendererAdapterImpl> _sink;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation VideoMetalView
|
||||
|
||||
- (instancetype)initWithFrame:(CGRect)frameRect {
|
||||
self = [super initWithFrame:frameRect];
|
||||
if (self) {
|
||||
[self configure];
|
||||
|
||||
_sink.reset(new VideoRendererAdapterImpl(self));
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
- (BOOL)isEnabled {
|
||||
return !_metalView.paused;
|
||||
}
|
||||
|
||||
- (void)setEnabled:(BOOL)enabled {
|
||||
_metalView.paused = !enabled;
|
||||
}
|
||||
|
||||
- (UIViewContentMode)videoContentMode {
|
||||
return _metalView.contentMode;
|
||||
}
|
||||
|
||||
- (void)setVideoContentMode:(UIViewContentMode)mode {
|
||||
_metalView.contentMode = mode;
|
||||
}
|
||||
|
||||
#pragma mark - Private
|
||||
|
||||
+ (BOOL)isMetalAvailable {
|
||||
return MTLCreateSystemDefaultDevice() != nil;
|
||||
}
|
||||
|
||||
+ (MTKView *)createMetalView:(CGRect)frame {
|
||||
return [[MTKViewClass alloc] initWithFrame:frame];
|
||||
}
|
||||
|
||||
+ (RTCMTLNV12Renderer *)createNV12Renderer {
|
||||
return [[RTCMTLNV12RendererClass alloc] init];
|
||||
}
|
||||
|
||||
+ (RTCMTLI420Renderer *)createI420Renderer {
|
||||
return [[RTCMTLI420RendererClass alloc] init];
|
||||
}
|
||||
|
||||
+ (RTCMTLRGBRenderer *)createRGBRenderer {
|
||||
return [[RTCMTLRGBRenderer alloc] init];
|
||||
}
|
||||
|
||||
- (void)configure {
|
||||
NSAssert([VideoMetalView isMetalAvailable], @"Metal not availiable on this device");
|
||||
|
||||
_metalView = [VideoMetalView createMetalView:self.bounds];
|
||||
_metalView.delegate = self;
|
||||
_metalView.contentMode = UIViewContentModeScaleAspectFill;
|
||||
[self addSubview:_metalView];
|
||||
_videoFrameSize = CGSizeZero;
|
||||
}
|
||||
|
||||
- (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled {
|
||||
[super setMultipleTouchEnabled:multipleTouchEnabled];
|
||||
_metalView.multipleTouchEnabled = multipleTouchEnabled;
|
||||
}
|
||||
|
||||
- (void)layoutSubviews {
|
||||
[super layoutSubviews];
|
||||
|
||||
CGRect bounds = self.bounds;
|
||||
_metalView.frame = bounds;
|
||||
if (!CGSizeEqualToSize(_videoFrameSize, CGSizeZero)) {
|
||||
_metalView.drawableSize = [self drawableSize];
|
||||
} else {
|
||||
_metalView.drawableSize = bounds.size;
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - MTKViewDelegate methods
|
||||
|
||||
- (void)drawInMTKView:(nonnull MTKView *)view {
|
||||
NSAssert(view == _metalView, @"Receiving draw callbacks from foreign instance.");
|
||||
RTCVideoFrame *videoFrame = _videoFrame;
|
||||
// Skip rendering if we've already rendered this frame.
|
||||
if (!videoFrame || videoFrame.timeStampNs == _lastFrameTimeNs) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (CGRectIsEmpty(view.bounds)) {
|
||||
return;
|
||||
}
|
||||
|
||||
RTCMTLRenderer *renderer;
|
||||
if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
|
||||
RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
|
||||
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
|
||||
if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) {
|
||||
if (!_rendererRGB) {
|
||||
_rendererRGB = [VideoMetalView createRGBRenderer];
|
||||
if (![_rendererRGB addRenderingDestination:_metalView]) {
|
||||
_rendererRGB = nil;
|
||||
RTCLogError(@"Failed to create RGB renderer");
|
||||
return;
|
||||
}
|
||||
}
|
||||
renderer = _rendererRGB;
|
||||
} else {
|
||||
if (!_rendererNV12) {
|
||||
_rendererNV12 = [VideoMetalView createNV12Renderer];
|
||||
if (![_rendererNV12 addRenderingDestination:_metalView]) {
|
||||
_rendererNV12 = nil;
|
||||
RTCLogError(@"Failed to create NV12 renderer");
|
||||
return;
|
||||
}
|
||||
}
|
||||
renderer = _rendererNV12;
|
||||
}
|
||||
} else {
|
||||
if (!_rendererI420) {
|
||||
_rendererI420 = [VideoMetalView createI420Renderer];
|
||||
if (![_rendererI420 addRenderingDestination:_metalView]) {
|
||||
_rendererI420 = nil;
|
||||
RTCLogError(@"Failed to create I420 renderer");
|
||||
return;
|
||||
}
|
||||
}
|
||||
renderer = _rendererI420;
|
||||
}
|
||||
|
||||
renderer.rotationOverride = _rotationOverride;
|
||||
|
||||
[renderer drawFrame:videoFrame];
|
||||
_lastFrameTimeNs = videoFrame.timeStampNs;
|
||||
}
|
||||
|
||||
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
|
||||
}
|
||||
|
||||
#pragma mark -
|
||||
|
||||
- (void)setRotationOverride:(NSValue *)rotationOverride {
|
||||
_rotationOverride = rotationOverride;
|
||||
|
||||
_metalView.drawableSize = [self drawableSize];
|
||||
[self setNeedsLayout];
|
||||
}
|
||||
|
||||
- (RTCVideoRotation)frameRotation {
|
||||
if (_rotationOverride) {
|
||||
RTCVideoRotation rotation;
|
||||
if (@available(iOS 11, *)) {
|
||||
[_rotationOverride getValue:&rotation size:sizeof(rotation)];
|
||||
} else {
|
||||
[_rotationOverride getValue:&rotation];
|
||||
}
|
||||
return rotation;
|
||||
}
|
||||
|
||||
return _videoFrame.rotation;
|
||||
}
|
||||
|
||||
- (CGSize)drawableSize {
|
||||
// Flip width/height if the rotations are not the same.
|
||||
CGSize videoFrameSize = _videoFrameSize;
|
||||
RTCVideoRotation frameRotation = [self frameRotation];
|
||||
|
||||
BOOL useLandscape =
|
||||
(frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180);
|
||||
BOOL sizeIsLandscape = (_videoFrame.rotation == RTCVideoRotation_0) ||
|
||||
(_videoFrame.rotation == RTCVideoRotation_180);
|
||||
|
||||
if (useLandscape == sizeIsLandscape) {
|
||||
return videoFrameSize;
|
||||
} else {
|
||||
return CGSizeMake(videoFrameSize.height, videoFrameSize.width);
|
||||
}
|
||||
}
|
||||
|
||||
#pragma mark - RTCVideoRenderer
|
||||
|
||||
- (void)setSize:(CGSize)size {
|
||||
__weak VideoMetalView *weakSelf = self;
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
__strong VideoMetalView *strongSelf = weakSelf;
|
||||
if (strongSelf == nil) {
|
||||
return;
|
||||
}
|
||||
|
||||
strongSelf->_videoFrameSize = size;
|
||||
CGSize drawableSize = [strongSelf drawableSize];
|
||||
|
||||
strongSelf->_metalView.drawableSize = drawableSize;
|
||||
[strongSelf setNeedsLayout];
|
||||
//[strongSelf.delegate videoView:self didChangeVideoSize:size];
|
||||
});
|
||||
}
|
||||
|
||||
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
|
||||
if (!self.isEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (frame == nil) {
|
||||
RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
|
||||
return;
|
||||
}
|
||||
_videoFrame = frame;
|
||||
}
|
||||
|
||||
- (void)addToTrack:(rtc::scoped_refptr<webrtc::VideoTrackInterface>)track {
|
||||
track->AddOrUpdateSink(_sink.get(), rtc::VideoSinkWants());
|
||||
}
|
||||
|
||||
- (rtc::VideoSinkInterface<webrtc::VideoFrame> *)getSink {
|
||||
return _sink.get();
|
||||
}
|
||||
|
||||
@end
|
0
submodules/TgVoipWebrtc/Impl/VideoRendererAdapter.h
Normal file
0
submodules/TgVoipWebrtc/Impl/VideoRendererAdapter.h
Normal file
@ -2,6 +2,7 @@
|
||||
#define OngoingCallContext_h
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
@interface OngoingCallConnectionDescriptionWebrtc : NSObject
|
||||
|
||||
@ -75,6 +76,7 @@ typedef NS_ENUM(int32_t, OngoingCallDataSavingWebrtc) {
|
||||
|
||||
- (void)setIsMuted:(bool)isMuted;
|
||||
- (void)setNetworkType:(OngoingCallNetworkTypeWebrtc)networkType;
|
||||
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion;
|
||||
|
||||
@end
|
||||
|
||||
|
@ -322,5 +322,18 @@ static void (*InternalVoipLoggingFunction)(NSString *) = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
- (void)getRemoteCameraView:(void (^_Nonnull)(UIView * _Nullable))completion {
|
||||
if (_tgVoip) {
|
||||
VideoMetalView *remoteRenderer = [[VideoMetalView alloc] initWithFrame:CGRectMake(0.0f, 0.0f, 320.0f, 240.0f)];
|
||||
remoteRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
|
||||
|
||||
_tgVoip->AttachVideoView(remoteRenderer);
|
||||
|
||||
dispatch_async(dispatch_get_main_queue(), ^{
|
||||
completion(remoteRenderer);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user