Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios

This commit is contained in:
Ilya Laktyushin 2024-07-13 18:14:51 +04:00
commit 57c071fa4e
38 changed files with 1898 additions and 110 deletions

View File

@ -19,6 +19,8 @@ internal:
except:
- tags
script:
- export PATH=/opt/homebrew/opt/ruby/bin:$PATH
- export PATH=`gem environment gemdir`/bin:$PATH
- python3 -u build-system/Make/Make.py remote-build --darwinContainers="$DARWIN_CONTAINERS" --darwinContainersHost="$DARWIN_CONTAINERS_HOST" --cacheHost="$TELEGRAM_BAZEL_CACHE_HOST" --configurationPath="$TELEGRAM_PRIVATE_DATA_PATH/build-configurations/enterprise-configuration.json" --gitCodesigningRepository="$TELEGRAM_GIT_CODESIGNING_REPOSITORY" --gitCodesigningType=enterprise --configuration=release_arm64
- python3 -u build-system/Make/DeployToAppCenter.py --configuration="$TELEGRAM_PRIVATE_DATA_PATH/appcenter-configurations/appcenter-internal.json" --ipa="build/artifacts/Telegram.ipa" --dsyms="build/artifacts/Telegram.DSYMs.zip"
environment:
@ -93,6 +95,8 @@ beta_testflight:
except:
- tags
script:
- export PATH=/opt/homebrew/opt/ruby/bin:$PATH
- export PATH=`gem environment gemdir`/bin:$PATH
- python3 -u build-system/Make/Make.py remote-build --darwinContainers="$DARWIN_CONTAINERS" --darwinContainersHost="$DARWIN_CONTAINERS_HOST" --cacheHost="$TELEGRAM_BAZEL_CACHE_HOST" --configurationPath="build-system/appstore-configuration.json" --gitCodesigningRepository="$TELEGRAM_GIT_CODESIGNING_REPOSITORY" --gitCodesigningType=appstore --configuration=release_arm64
environment:
name: testflight_llc

View File

@ -104,7 +104,7 @@ def decrypt_codesigning_directory_recursively(source_base_path, destination_base
source_path = source_base_path + '/' + file_name
destination_path = destination_base_path + '/' + file_name
if os.path.isfile(source_path):
os.system('openssl aes-256-cbc -md md5 -k "{password}" -in "{source_path}" -out "{destination_path}" -a -d 2>/dev/null'.format(
os.system('ruby build-system/decrypt.rb "{password}" "{source_path}" "{destination_path}"'.format(
password=password,
source_path=source_path,
destination_path=destination_path

156
build-system/decrypt.rb Normal file
View File

@ -0,0 +1,156 @@
require 'base64'
require 'openssl'
require 'securerandom'
class EncryptionV1
ALGORITHM = 'aes-256-cbc'
def encrypt(data:, password:, salt:, hash_algorithm: "MD5")
cipher = ::OpenSSL::Cipher.new(ALGORITHM)
cipher.encrypt
keyivgen(cipher, password, salt, hash_algorithm)
encrypted_data = cipher.update(data)
encrypted_data << cipher.final
{ encrypted_data: encrypted_data }
end
def decrypt(encrypted_data:, password:, salt:, hash_algorithm: "MD5")
cipher = ::OpenSSL::Cipher.new(ALGORITHM)
cipher.decrypt
keyivgen(cipher, password, salt, hash_algorithm)
data = cipher.update(encrypted_data)
data << cipher.final
end
private
def keyivgen(cipher, password, salt, hash_algorithm)
cipher.pkcs5_keyivgen(password, salt, 1, hash_algorithm)
end
end
# The newer encryption mechanism, which features a more secure key and IV generation.
#
# The IV is randomly generated and provided unencrypted.
# The salt should be randomly generated and provided unencrypted (like in the current implementation).
# The key is generated with OpenSSL::KDF::pbkdf2_hmac with properly chosen parameters.
#
# Short explanation about salt and IV: https://stackoverflow.com/a/1950674/6324550
class EncryptionV2
ALGORITHM = 'aes-256-gcm'
def encrypt(data:, password:, salt:)
cipher = ::OpenSSL::Cipher.new(ALGORITHM)
cipher.encrypt
keyivgen(cipher, password, salt)
encrypted_data = cipher.update(data)
encrypted_data << cipher.final
auth_tag = cipher.auth_tag
{ encrypted_data: encrypted_data, auth_tag: auth_tag }
end
def decrypt(encrypted_data:, password:, salt:, auth_tag:)
cipher = ::OpenSSL::Cipher.new(ALGORITHM)
cipher.decrypt
keyivgen(cipher, password, salt)
cipher.auth_tag = auth_tag
data = cipher.update(encrypted_data)
data << cipher.final
end
private
def keyivgen(cipher, password, salt)
keyIv = ::OpenSSL::KDF.pbkdf2_hmac(password, salt: salt, iterations: 10_000, length: 32 + 12 + 24, hash: "sha256")
key = keyIv[0..31]
iv = keyIv[32..43]
auth_data = keyIv[44..-1]
cipher.key = key
cipher.iv = iv
cipher.auth_data = auth_data
end
end
class MatchDataEncryption
V1_PREFIX = "Salted__"
V2_PREFIX = "match_encrypted_v2__"
def encrypt(data:, password:, version: 2)
salt = SecureRandom.random_bytes(8)
if version == 2
e = EncryptionV2.new
encryption = e.encrypt(data: data, password: password, salt: salt)
encrypted_data = V2_PREFIX + salt + encryption[:auth_tag] + encryption[:encrypted_data]
else
e = EncryptionV1.new
encryption = e.encrypt(data: data, password: password, salt: salt)
encrypted_data = V1_PREFIX + salt + encryption[:encrypted_data]
end
Base64.encode64(encrypted_data)
end
def decrypt(base64encoded_encrypted:, password:)
stored_data = Base64.decode64(base64encoded_encrypted)
if stored_data.start_with?(V2_PREFIX)
salt = stored_data[20..27]
auth_tag = stored_data[28..43]
data_to_decrypt = stored_data[44..-1]
e = EncryptionV2.new
e.decrypt(encrypted_data: data_to_decrypt, password: password, salt: salt, auth_tag: auth_tag)
else
salt = stored_data[8..15]
data_to_decrypt = stored_data[16..-1]
e = EncryptionV1.new
begin
# Note that we are not guaranteed to catch the decryption errors here if the password or the hash is wrong
# as there's no integrity checks.
# see https://github.com/fastlane/fastlane/issues/21663
e.decrypt(encrypted_data: data_to_decrypt, password: password, salt: salt)
# With the wrong hash_algorithm, there's here 0.4% chance that the decryption failure will go undetected
rescue => _ex
# With a wrong password, there's a 0.4% chance it will decrypt garbage and not fail
fallback_hash_algorithm = "SHA256"
e.decrypt(encrypted_data: data_to_decrypt, password: password, salt: salt, hash_algorithm: fallback_hash_algorithm)
end
end
end
end
class MatchFileEncryption
def encrypt(file_path:, password:, output_path: nil)
output_path = file_path unless output_path
data_to_encrypt = File.binread(file_path)
e = MatchDataEncryption.new
data = e.encrypt(data: data_to_encrypt, password: password)
File.write(output_path, data)
end
def decrypt(file_path:, password:, output_path: nil)
output_path = file_path unless output_path
content = File.read(file_path)
e = MatchDataEncryption.new
decrypted_data = e.decrypt(base64encoded_encrypted: content, password: password)
File.binwrite(output_path, decrypted_data)
end
end
if ARGV.length != 3
print 'Invalid command line'
else
dec = MatchFileEncryption.new
dec.decrypt(file_path: ARGV[1], password: ARGV[0], output_path: ARGV[2])
end

View File

@ -3991,6 +3991,7 @@ public class ChatListControllerImpl: TelegramBaseController, ChatListController
StoryContainerScreen.openPeerStoriesCustom(
context: self.context,
peerId: peerId,
focusOnId: storyId,
isHidden: false,
singlePeer: true,
parentController: self,

View File

@ -10,4 +10,5 @@
#import <FFMpegBinding/FFMpegPacket.h>
#import <FFMpegBinding/FFMpegAVCodec.h>
#import <FFMpegBinding/FFMpegRemuxer.h>
#import <FFMpegBinding/FFMpegLiveMuxer.h>
#import <FFMpegBinding/FrameConverter.h>

View File

@ -0,0 +1,11 @@
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface FFMpegLiveMuxer : NSObject
+ (bool)remux:(NSString * _Nonnull)path to:(NSString * _Nonnull)outPath offsetSeconds:(double)offsetSeconds;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,340 @@
#import <FFMpegBinding/FFMpegLiveMuxer.h>
#import <FFMpegBinding/FFMpegAVIOContext.h>
#include "libavutil/timestamp.h"
#include "libavformat/avformat.h"
#include "libavcodec/avcodec.h"
#include "libswresample/swresample.h"
#define MOV_TIMESCALE 1000
@implementation FFMpegLiveMuxer
+ (bool)remux:(NSString * _Nonnull)path to:(NSString * _Nonnull)outPath offsetSeconds:(double)offsetSeconds {
AVFormatContext *input_format_context = NULL, *output_format_context = NULL;
AVPacket packet;
const char *in_filename, *out_filename;
int ret, i;
int stream_index = 0;
int *streams_list = NULL;
int number_of_streams = 0;
struct SwrContext *swr_ctx = NULL;
in_filename = [path UTF8String];
out_filename = [outPath UTF8String];
if ((ret = avformat_open_input(&input_format_context, in_filename, av_find_input_format("mp4"), NULL)) < 0) {
fprintf(stderr, "Could not open input file '%s'\n", in_filename);
goto end;
}
if ((ret = avformat_find_stream_info(input_format_context, NULL)) < 0) {
fprintf(stderr, "Failed to retrieve input stream information\n");
goto end;
}
avformat_alloc_output_context2(&output_format_context, NULL, "mpegts", out_filename);
if (!output_format_context) {
fprintf(stderr, "Could not create output context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
const AVCodec *aac_codec = avcodec_find_encoder(AV_CODEC_ID_AAC);
if (!aac_codec) {
fprintf(stderr, "Could not find AAC encoder\n");
ret = AVERROR_UNKNOWN;
goto end;
}
AVCodecContext *aac_codec_context = avcodec_alloc_context3(aac_codec);
if (!aac_codec_context) {
fprintf(stderr, "Could not allocate AAC codec context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
const AVCodec *opus_decoder = avcodec_find_decoder(AV_CODEC_ID_AAC);
if (!opus_decoder) {
fprintf(stderr, "Could not find Opus decoder\n");
ret = AVERROR_UNKNOWN;
goto end;
}
AVCodecContext *opus_decoder_context = avcodec_alloc_context3(opus_decoder);
if (!opus_decoder_context) {
fprintf(stderr, "Could not allocate Opus decoder context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
number_of_streams = input_format_context->nb_streams;
streams_list = av_malloc_array(number_of_streams, sizeof(*streams_list));
if (!streams_list) {
ret = AVERROR(ENOMEM);
goto end;
}
bool hasAudio = false;
for (i = 0; i < input_format_context->nb_streams; i++) {
AVStream *out_stream;
AVStream *in_stream = input_format_context->streams[i];
AVCodecParameters *in_codecpar = in_stream->codecpar;
if (/*in_codecpar->codec_type != AVMEDIA_TYPE_AUDIO && */in_codecpar->codec_type != AVMEDIA_TYPE_VIDEO) {
streams_list[i] = -1;
continue;
}
streams_list[i] = stream_index++;
if (in_codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
out_stream = avformat_new_stream(output_format_context, NULL);
if (!out_stream) {
fprintf(stderr, "Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ret = avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
if (ret < 0) {
fprintf(stderr, "Failed to copy codec parameters\n");
goto end;
}
out_stream->time_base = in_stream->time_base;
out_stream->duration = in_stream->duration;
} else if (in_codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
out_stream = avformat_new_stream(output_format_context, aac_codec);
if (!out_stream) {
fprintf(stderr, "Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
hasAudio = true;
// Set the codec parameters for the AAC encoder
aac_codec_context->sample_rate = in_codecpar->sample_rate;
aac_codec_context->channel_layout = in_codecpar->channel_layout ? in_codecpar->channel_layout : AV_CH_LAYOUT_STEREO;
aac_codec_context->channels = av_get_channel_layout_nb_channels(aac_codec_context->channel_layout);
aac_codec_context->sample_fmt = aac_codec->sample_fmts ? aac_codec->sample_fmts[0] : AV_SAMPLE_FMT_FLTP; // Use the first supported sample format
aac_codec_context->bit_rate = 128000; // Set a default bitrate, you can adjust this as needed
//aac_codec_context->time_base = (AVRational){1, 90000};
ret = avcodec_open2(aac_codec_context, aac_codec, NULL);
if (ret < 0) {
fprintf(stderr, "Could not open AAC encoder\n");
goto end;
}
ret = avcodec_parameters_from_context(out_stream->codecpar, aac_codec_context);
if (ret < 0) {
fprintf(stderr, "Failed initializing audio output stream\n");
goto end;
}
out_stream->time_base = (AVRational){1, 90000};
out_stream->duration = av_rescale_q(in_stream->duration, in_stream->time_base, out_stream->time_base);
// Set up the Opus decoder context
ret = avcodec_parameters_to_context(opus_decoder_context, in_codecpar);
if (ret < 0) {
fprintf(stderr, "Could not copy codec parameters to decoder context\n");
goto end;
}
if (opus_decoder_context->channel_layout == 0) {
opus_decoder_context->channel_layout = av_get_default_channel_layout(opus_decoder_context->channels);
}
ret = avcodec_open2(opus_decoder_context, opus_decoder, NULL);
if (ret < 0) {
fprintf(stderr, "Could not open Opus decoder\n");
goto end;
}
// Reset the channel layout if it was unset before opening the codec
if (opus_decoder_context->channel_layout == 0) {
opus_decoder_context->channel_layout = av_get_default_channel_layout(opus_decoder_context->channels);
}
}
}
if (hasAudio) {
// Set up the resampling context
swr_ctx = swr_alloc_set_opts(NULL,
aac_codec_context->channel_layout, aac_codec_context->sample_fmt, aac_codec_context->sample_rate,
opus_decoder_context->channel_layout, opus_decoder_context->sample_fmt, opus_decoder_context->sample_rate,
0, NULL);
if (!swr_ctx) {
fprintf(stderr, "Could not allocate resampler context\n");
ret = AVERROR(ENOMEM);
goto end;
}
if ((ret = swr_init(swr_ctx)) < 0) {
fprintf(stderr, "Failed to initialize the resampling context\n");
goto end;
}
}
if (!(output_format_context->oformat->flags & AVFMT_NOFILE)) {
ret = avio_open(&output_format_context->pb, out_filename, AVIO_FLAG_WRITE);
if (ret < 0) {
fprintf(stderr, "Could not open output file '%s'\n", out_filename);
goto end;
}
}
AVDictionary* opts = NULL;
ret = avformat_write_header(output_format_context, &opts);
if (ret < 0) {
fprintf(stderr, "Error occurred when opening output file\n");
goto end;
}
while (1) {
AVStream *in_stream, *out_stream;
ret = av_read_frame(input_format_context, &packet);
if (ret < 0)
break;
in_stream = input_format_context->streams[packet.stream_index];
if (packet.stream_index >= number_of_streams || streams_list[packet.stream_index] < 0) {
av_packet_unref(&packet);
continue;
}
packet.stream_index = streams_list[packet.stream_index];
out_stream = output_format_context->streams[packet.stream_index];
if (in_stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
ret = avcodec_send_packet(opus_decoder_context, &packet);
if (ret < 0) {
fprintf(stderr, "Error sending packet to decoder\n");
av_packet_unref(&packet);
continue;
}
AVFrame *frame = av_frame_alloc();
ret = avcodec_receive_frame(opus_decoder_context, frame);
if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
fprintf(stderr, "Error receiving frame from decoder\n");
av_frame_free(&frame);
av_packet_unref(&packet);
continue;
}
if (ret >= 0) {
frame->pts = frame->best_effort_timestamp;
AVFrame *resampled_frame = av_frame_alloc();
resampled_frame->channel_layout = aac_codec_context->channel_layout;
resampled_frame->sample_rate = aac_codec_context->sample_rate;
resampled_frame->format = aac_codec_context->sample_fmt;
resampled_frame->nb_samples = aac_codec_context->frame_size;
if ((ret = av_frame_get_buffer(resampled_frame, 0)) < 0) {
fprintf(stderr, "Could not allocate resampled frame buffer\n");
av_frame_free(&resampled_frame);
av_frame_free(&frame);
av_packet_unref(&packet);
continue;
}
memset(resampled_frame->data[0], 0, resampled_frame->nb_samples * 2 * 2);
//arc4random_buf(resampled_frame->data[0], resampled_frame->nb_samples * 2 * 2);
//memset(frame->data[0], 0, frame->nb_samples * 2 * 2);
if ((ret = swr_convert(swr_ctx,
resampled_frame->data, resampled_frame->nb_samples,
(const uint8_t **)frame->data, frame->nb_samples)) < 0) {
fprintf(stderr, "Error while converting\n");
av_frame_free(&resampled_frame);
av_frame_free(&frame);
av_packet_unref(&packet);
continue;
}
resampled_frame->pts = av_rescale_q(frame->pts, opus_decoder_context->time_base, aac_codec_context->time_base);
ret = avcodec_send_frame(aac_codec_context, resampled_frame);
if (ret < 0) {
fprintf(stderr, "Error sending frame to encoder\n");
av_frame_free(&resampled_frame);
av_frame_free(&frame);
av_packet_unref(&packet);
continue;
}
AVPacket out_packet;
av_init_packet(&out_packet);
out_packet.data = NULL;
out_packet.size = 0;
ret = avcodec_receive_packet(aac_codec_context, &out_packet);
if (ret >= 0) {
out_packet.pts = av_rescale_q_rnd(packet.pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX);
out_packet.dts = av_rescale_q_rnd(packet.dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX);
out_packet.pts += (int64_t)(offsetSeconds * out_stream->time_base.den);
out_packet.dts += (int64_t)(offsetSeconds * out_stream->time_base.den);
out_packet.duration = av_rescale_q(out_packet.duration, aac_codec_context->time_base, out_stream->time_base);
out_packet.stream_index = packet.stream_index;
ret = av_interleaved_write_frame(output_format_context, &out_packet);
if (ret < 0) {
fprintf(stderr, "Error muxing packet\n");
av_packet_unref(&out_packet);
av_frame_free(&resampled_frame);
av_frame_free(&frame);
av_packet_unref(&packet);
break;
}
av_packet_unref(&out_packet);
}
av_frame_free(&resampled_frame);
av_frame_free(&frame);
}
} else {
packet.pts = av_rescale_q_rnd(packet.pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX);
packet.dts = av_rescale_q_rnd(packet.dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX);
packet.pts += (int64_t)(offsetSeconds * out_stream->time_base.den);
packet.dts += (int64_t)(offsetSeconds * out_stream->time_base.den);
packet.duration = av_rescale_q(packet.duration, in_stream->time_base, out_stream->time_base);
packet.pos = -1;
ret = av_interleaved_write_frame(output_format_context, &packet);
if (ret < 0) {
fprintf(stderr, "Error muxing packet\n");
av_packet_unref(&packet);
break;
}
}
av_packet_unref(&packet);
}
av_write_trailer(output_format_context);
end:
avformat_close_input(&input_format_context);
if (output_format_context && !(output_format_context->oformat->flags & AVFMT_NOFILE)) {
avio_closep(&output_format_context->pb);
}
avformat_free_context(output_format_context);
avcodec_free_context(&aac_codec_context);
avcodec_free_context(&opus_decoder_context);
av_freep(&streams_list);
if (swr_ctx) {
swr_free(&swr_ctx);
}
if (ret < 0 && ret != AVERROR_EOF) {
fprintf(stderr, "Error occurred: %s\n", av_err2str(ret));
return false;
}
//printf("Remuxed video into %s\n", outPath.UTF8String);
return true;
}
@end

View File

@ -6,4 +6,5 @@
- (instancetype)initWithPersistentKey:(MTDatacenterAuthKey *)persistentKey ephemeralKey:(MTDatacenterAuthKey *)ephemeralKey completion:(void (^)(bool))completion;
-(void)complete;
@end

View File

@ -168,4 +168,8 @@
}
}
-(void)complete {
_completion(true);
}
@end

View File

@ -250,7 +250,7 @@ static int32_t fixedTimeDifferenceValue = 0;
_tempKeyExpiration = 24 * 60 * 60;
#if DEBUG
//_tempKeyExpiration = 30;
//_tempKeyExpiration = 30;
#endif
_datacenterSeedAddressSetById = [[NSMutableDictionary alloc] init];

View File

@ -55,7 +55,7 @@
#import <MtProtoKit/MTSignal.h>
#import <MtProtoKit/MTQueue.h>
#import <MtProtoKit/MTBindKeyMessageService.h>
typedef enum {
MTProtoStateAwaitingDatacenterScheme = 1,
MTProtoStateAwaitingDatacenterAuthorization = 2,
@ -885,7 +885,7 @@ static const NSUInteger MTMaxUnacknowledgedMessageCount = 64;
- (MTDatacenterAuthKey *)getAuthKeyForCurrentScheme:(MTTransportScheme *)scheme createIfNeeded:(bool)createIfNeeded authInfoSelector:(MTDatacenterAuthInfoSelector *)authInfoSelector {
if (_useExplicitAuthKey) {
MTDatacenterAuthInfoSelector selector = MTDatacenterAuthInfoSelectorEphemeralMain;
MTDatacenterAuthInfoSelector selector = scheme.media ? MTDatacenterAuthInfoSelectorEphemeralMedia : MTDatacenterAuthInfoSelectorEphemeralMain;
if (authInfoSelector != nil) {
*authInfoSelector = selector;
}
@ -2151,6 +2151,15 @@ static NSString *dumpHexString(NSData *data, int maxLength) {
}
if (_useExplicitAuthKey != nil) {
if (scheme.media) {
for (NSInteger i = (NSInteger)_messageServices.count - 1; i >= 0; i--)
{
MTBindKeyMessageService* messageService = (MTBindKeyMessageService *)_messageServices[(NSUInteger)i];
if ([messageService respondsToSelector:@selector(complete)]) {
[messageService complete];
}
}
}
} else if (_cdn) {
_validAuthInfo = nil;

View File

@ -75,9 +75,9 @@ public final class MediaStreamComponent: CombinedComponent {
var videoStalled: Bool = true
var videoIsPlayable: Bool {
!videoStalled && hasVideo
return true
//!videoStalled && hasVideo
}
// var wantsPiP: Bool = false
let deactivatePictureInPictureIfVisible = StoredActionSlot(Void.self)

View File

@ -6,11 +6,11 @@ import AVKit
import MultilineTextComponent
import Display
import ShimmerEffect
import TelegramCore
import SwiftSignalKit
import AvatarNode
import Postbox
import TelegramVoip
final class MediaStreamVideoComponent: Component {
let call: PresentationGroupCallImpl
@ -157,6 +157,8 @@ final class MediaStreamVideoComponent: Component {
private var lastPresentation: UIView?
private var pipTrackDisplayLink: CADisplayLink?
private var livePlayerView: ProxyVideoView?
override init(frame: CGRect) {
self.blurTintView = UIView()
self.blurTintView.backgroundColor = UIColor(white: 0.0, alpha: 0.55)
@ -211,7 +213,7 @@ final class MediaStreamVideoComponent: Component {
let needsFadeInAnimation = hadVideo
if loadingBlurView.superview == nil {
addSubview(loadingBlurView)
//addSubview(loadingBlurView)
if needsFadeInAnimation {
let anim = CABasicAnimation(keyPath: "opacity")
anim.duration = 0.5
@ -542,6 +544,39 @@ final class MediaStreamVideoComponent: Component {
videoFrameUpdateTransition.setFrame(layer: self.videoBlurGradientMask, frame: videoBlurView.bounds)
videoFrameUpdateTransition.setFrame(layer: self.videoBlurSolidMask, frame: self.videoBlurGradientMask.bounds)
}
if self.livePlayerView == nil {
let livePlayerView = ProxyVideoView(context: component.call.accountContext, call: component.call)
self.livePlayerView = livePlayerView
livePlayerView.layer.masksToBounds = true
self.addSubview(livePlayerView)
livePlayerView.frame = newVideoFrame
livePlayerView.layer.cornerRadius = videoCornerRadius
livePlayerView.update(size: newVideoFrame.size)
var pictureInPictureController: AVPictureInPictureController? = nil
if #available(iOS 15.0, *) {
pictureInPictureController = AVPictureInPictureController(contentSource: AVPictureInPictureController.ContentSource(playerLayer: livePlayerView.playerLayer))
pictureInPictureController?.playerLayer.masksToBounds = false
pictureInPictureController?.playerLayer.cornerRadius = 10
} else if AVPictureInPictureController.isPictureInPictureSupported() {
pictureInPictureController = AVPictureInPictureController.init(playerLayer: AVPlayerLayer(player: AVPlayer()))
}
pictureInPictureController?.delegate = self
if #available(iOS 14.2, *) {
pictureInPictureController?.canStartPictureInPictureAutomaticallyFromInline = true
}
if #available(iOS 14.0, *) {
pictureInPictureController?.requiresLinearPlayback = true
}
self.pictureInPictureController = pictureInPictureController
}
if let livePlayerView = self.livePlayerView {
videoFrameUpdateTransition.setFrame(view: livePlayerView, frame: newVideoFrame, completion: nil)
videoFrameUpdateTransition.setCornerRadius(layer: livePlayerView.layer, cornerRadius: videoCornerRadius)
livePlayerView.update(size: newVideoFrame.size)
}
} else {
videoSize = CGSize(width: 16 / 9 * 100.0, height: 100.0).aspectFitted(.init(width: availableSize.width - videoInset * 2, height: availableSize.height))
}
@ -601,7 +636,7 @@ final class MediaStreamVideoComponent: Component {
}
}
if self.noSignalTimeout {
if self.noSignalTimeout, !"".isEmpty {
var noSignalTransition = transition
let noSignalView: ComponentHostView<Empty>
if let current = self.noSignalView {
@ -769,3 +804,94 @@ private final class CustomIntensityVisualEffectView: UIVisualEffectView {
animator.stopAnimation(true)
}
}
private final class ProxyVideoView: UIView {
private let call: PresentationGroupCallImpl
private let id: Int64
private let player: AVPlayer
private let playerItem: AVPlayerItem
let playerLayer: AVPlayerLayer
private var contextDisposable: Disposable?
private var failureObserverId: AnyObject?
private var errorObserverId: AnyObject?
private var rateObserver: NSKeyValueObservation?
private var isActiveDisposable: Disposable?
init(context: AccountContext, call: PresentationGroupCallImpl) {
self.call = call
self.id = Int64.random(in: Int64.min ... Int64.max)
let assetUrl = "http://127.0.0.1:\(SharedHLSServer.shared.port)/\(call.internalId)/master.m3u8"
Logger.shared.log("MediaStreamVideoComponent", "Initializing HLS asset at \(assetUrl)")
#if DEBUG
print("Initializing HLS asset at \(assetUrl)")
#endif
let asset = AVURLAsset(url: URL(string: assetUrl)!, options: [:])
self.playerItem = AVPlayerItem(asset: asset)
self.player = AVPlayer(playerItem: self.playerItem)
self.player.allowsExternalPlayback = true
self.playerLayer = AVPlayerLayer(player: self.player)
super.init(frame: CGRect())
self.failureObserverId = NotificationCenter.default.addObserver(forName: AVPlayerItem.failedToPlayToEndTimeNotification, object: playerItem, queue: .main, using: { notification in
print("Player Error: \(notification.description)")
})
self.errorObserverId = NotificationCenter.default.addObserver(forName: AVPlayerItem.newErrorLogEntryNotification, object: playerItem, queue: .main, using: { notification in
print("Player Error: \(notification.description)")
})
self.rateObserver = self.player.observe(\.rate, changeHandler: { [weak self] _, change in
guard let self else {
return
}
print("Player rate: \(self.player.rate)")
})
self.layer.addSublayer(self.playerLayer)
self.isActiveDisposable = (context.sharedContext.applicationBindings.applicationIsActive
|> distinctUntilChanged
|> deliverOnMainQueue).start(next: { [weak self] isActive in
guard let self else {
return
}
if isActive {
self.playerLayer.player = self.player
if self.player.rate == 0.0 {
self.player.play()
}
} else {
self.playerLayer.player = nil
}
})
self.player.play()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
self.contextDisposable?.dispose()
if let failureObserverId = self.failureObserverId {
NotificationCenter.default.removeObserver(failureObserverId)
}
if let errorObserverId = self.errorObserverId {
NotificationCenter.default.removeObserver(errorObserverId)
}
if let rateObserver = self.rateObserver {
rateObserver.invalidate()
}
self.isActiveDisposable?.dispose()
}
func update(size: CGSize) {
self.playerLayer.frame = CGRect(origin: CGPoint(), size: size)
}
}

View File

@ -276,6 +276,7 @@ private extension PresentationGroupCallState {
private enum CurrentImpl {
case call(OngoingGroupCallContext)
case mediaStream(WrappedMediaStreamingContext)
case externalMediaStream(ExternalMediaStreamingContext)
}
private extension CurrentImpl {
@ -283,7 +284,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
return callContext.joinPayload
case .mediaStream:
case .mediaStream, .externalMediaStream:
let ssrcId = UInt32.random(in: 0 ..< UInt32(Int32.max - 1))
let dict: [String: Any] = [
"fingerprints": [] as [Any],
@ -303,7 +304,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
return callContext.networkState
case .mediaStream:
case .mediaStream, .externalMediaStream:
return .single(OngoingGroupCallContext.NetworkState(isConnected: true, isTransitioningFromBroadcastToRtc: false))
}
}
@ -312,7 +313,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
return callContext.audioLevels
case .mediaStream:
case .mediaStream, .externalMediaStream:
return .single([])
}
}
@ -321,7 +322,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
return callContext.isMuted
case .mediaStream:
case .mediaStream, .externalMediaStream:
return .single(true)
}
}
@ -330,7 +331,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
return callContext.isNoiseSuppressionEnabled
case .mediaStream:
case .mediaStream, .externalMediaStream:
return .single(false)
}
}
@ -339,7 +340,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
callContext.stop()
case .mediaStream:
case .mediaStream, .externalMediaStream:
break
}
}
@ -348,7 +349,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
callContext.setIsMuted(isMuted)
case .mediaStream:
case .mediaStream, .externalMediaStream:
break
}
}
@ -357,7 +358,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
callContext.setIsNoiseSuppressionEnabled(isNoiseSuppressionEnabled)
case .mediaStream:
case .mediaStream, .externalMediaStream:
break
}
}
@ -366,7 +367,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
callContext.requestVideo(capturer)
case .mediaStream:
case .mediaStream, .externalMediaStream:
break
}
}
@ -375,7 +376,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
callContext.disableVideo()
case .mediaStream:
case .mediaStream, .externalMediaStream:
break
}
}
@ -384,7 +385,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
callContext.setVolume(ssrc: ssrc, volume: volume)
case .mediaStream:
case .mediaStream, .externalMediaStream:
break
}
}
@ -393,7 +394,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
callContext.setRequestedVideoChannels(channels)
case .mediaStream:
case .mediaStream, .externalMediaStream:
break
}
}
@ -402,17 +403,19 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
callContext.makeIncomingVideoView(endpointId: endpointId, requestClone: requestClone, completion: completion)
case .mediaStream:
case .mediaStream, .externalMediaStream:
break
}
}
func video(endpointId: String) -> Signal<OngoingGroupCallContext.VideoFrameData, NoError> {
func video(endpointId: String) -> Signal<OngoingGroupCallContext.VideoFrameData, NoError>? {
switch self {
case let .call(callContext):
return callContext.video(endpointId: endpointId)
case let .mediaStream(mediaStreamContext):
return mediaStreamContext.video()
case .externalMediaStream:
return .never()
}
}
@ -420,7 +423,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
callContext.addExternalAudioData(data: data)
case .mediaStream:
case .mediaStream, .externalMediaStream:
break
}
}
@ -429,7 +432,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
callContext.getStats(completion: completion)
case .mediaStream:
case .mediaStream, .externalMediaStream:
break
}
}
@ -438,7 +441,7 @@ private extension CurrentImpl {
switch self {
case let .call(callContext):
callContext.setTone(tone: tone)
case .mediaStream:
case .mediaStream, .externalMediaStream:
break
}
}
@ -647,6 +650,8 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
private var genericCallContext: CurrentImpl?
private var currentConnectionMode: OngoingGroupCallContext.ConnectionMode = .none
private var didInitializeConnectionMode: Bool = false
let externalMediaStream = Promise<ExternalMediaStreamingContext>()
private var screencastCallContext: OngoingGroupCallContext?
private var screencastBufferServerContext: IpcGroupCallBufferAppContext?
@ -1638,7 +1643,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
genericCallContext = current
} else {
if self.isStream, self.accountContext.sharedContext.immediateExperimentalUISettings.liveStreamV2 {
genericCallContext = .mediaStream(WrappedMediaStreamingContext(rejoinNeeded: { [weak self] in
let externalMediaStream = ExternalMediaStreamingContext(id: self.internalId, rejoinNeeded: { [weak self] in
Queue.mainQueue().async {
guard let strongSelf = self else {
return
@ -1650,7 +1655,9 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
strongSelf.requestCall(movingFromBroadcastToRtc: false)
}
}
}))
})
genericCallContext = .externalMediaStream(externalMediaStream)
self.externalMediaStream.set(.single(externalMediaStream))
} else {
var outgoingAudioBitrateKbit: Int32?
let appConfiguration = self.accountContext.currentAppConfiguration.with({ $0 })
@ -1797,6 +1804,14 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
strongSelf.currentConnectionMode = .broadcast
mediaStreamContext.setAudioStreamData(audioStreamData: OngoingGroupCallContext.AudioStreamData(engine: strongSelf.accountContext.engine, callId: callInfo.id, accessHash: callInfo.accessHash, isExternalStream: callInfo.isStream))
}
case let .externalMediaStream(externalMediaStream):
switch joinCallResult.connectionMode {
case .rtc:
strongSelf.currentConnectionMode = .rtc
case .broadcast:
strongSelf.currentConnectionMode = .broadcast
externalMediaStream.setAudioStreamData(audioStreamData: OngoingGroupCallContext.AudioStreamData(engine: strongSelf.accountContext.engine, callId: callInfo.id, accessHash: callInfo.accessHash, isExternalStream: callInfo.isStream))
}
}
}
@ -3199,7 +3214,7 @@ public final class PresentationGroupCallImpl: PresentationGroupCall {
switch genericCallContext {
case let .call(callContext):
callContext.setConnectionMode(.none, keepBroadcastConnectedIfWasEnabled: movingFromBroadcastToRtc, isUnifiedBroadcast: false)
case .mediaStream:
case .mediaStream, .externalMediaStream:
assertionFailure()
break
}

View File

@ -236,6 +236,59 @@ private func mergedResult(_ state: SearchMessagesState) -> SearchMessagesResult
return SearchMessagesResult(messages: messages, readStates: readStates, threadInfo: threadInfo, totalCount: state.main.totalCount + (state.additional?.totalCount ?? 0), completed: state.main.completed && (state.additional?.completed ?? true))
}
func _internal_getSearchMessageCount(account: Account, location: SearchMessagesLocation, query: String) -> Signal<Int?, NoError> {
guard case let .peer(peerId, fromId, _, _, threadId, _, _) = location else {
return .single(nil)
}
return account.postbox.transaction { transaction -> (Api.InputPeer?, Api.InputPeer?) in
var chatPeer = transaction.getPeer(peerId).flatMap(apiInputPeer)
var fromPeer: Api.InputPeer?
if let fromId {
if let value = transaction.getPeer(fromId).flatMap(apiInputPeer) {
fromPeer = value
} else {
chatPeer = nil
}
}
return (chatPeer, fromPeer)
}
|> mapToSignal { inputPeer, fromPeer -> Signal<Int?, NoError> in
guard let inputPeer else {
return .single(nil)
}
var flags: Int32 = 0
if let _ = fromPeer {
flags |= (1 << 0)
}
var topMsgId: Int32?
if let threadId = threadId {
flags |= (1 << 1)
topMsgId = Int32(clamping: threadId)
}
return account.network.request(Api.functions.messages.search(flags: flags, peer: inputPeer, q: query, fromId: fromPeer, savedPeerId: nil, savedReaction: nil, topMsgId: topMsgId, filter: .inputMessagesFilterEmpty, minDate: 0, maxDate: 0, offsetId: 0, addOffset: 0, limit: 1, maxId: 0, minId: 0, hash: 0))
|> map { result -> Int? in
switch result {
case let .channelMessages(_, _, count, _, _, _, _, _):
return Int(count)
case let .messages(messages, _, _):
return messages.count
case let .messagesNotModified(count):
return Int(count)
case let .messagesSlice(_, count, _, _, _, _, _):
return Int(count)
}
}
|> `catch` { _ -> Signal<Int?, NoError> in
return .single(nil)
}
}
}
func _internal_searchMessages(account: Account, location: SearchMessagesLocation, query: String, state: SearchMessagesState?, centerId: MessageId?, limit: Int32 = 100) -> Signal<(SearchMessagesResult, SearchMessagesState), NoError> {
if case let .peer(peerId, fromId, tags, reactions, threadId, minDate, maxDate) = location, fromId == nil, tags == nil, peerId == account.peerId, let reactions, let reaction = reactions.first, (minDate == nil || minDate == 0), (maxDate == nil || maxDate == 0) {
return account.postbox.transaction { transaction -> (SearchMessagesResult, SearchMessagesState) in

View File

@ -76,6 +76,10 @@ public extension TelegramEngine {
return _internal_searchMessages(account: self.account, location: location, query: query, state: state, centerId: centerId, limit: limit)
}
public func getSearchMessageCount(location: SearchMessagesLocation, query: String) -> Signal<Int?, NoError> {
return _internal_getSearchMessageCount(account: self.account, location: location, query: query)
}
public func searchHashtagPosts(hashtag: String, state: SearchMessagesState?, limit: Int32 = 100) -> Signal<(SearchMessagesResult, SearchMessagesState), NoError> {
return _internal_searchHashtagPosts(account: self.account, hashtag: hashtag, state: state, limit: limit)
}

View File

@ -1044,6 +1044,8 @@ public func universalServiceMessageString(presentationData: (PresentationTheme,
attributedString = mutableString
case .unknown:
attributedString = nil
case .paymentRefunded, .giftStars:
attributedString = nil
}
break

View File

@ -194,6 +194,7 @@ private final class AdminUserActionsSheetComponent: Component {
let chatPeer: EnginePeer
let peers: [RenderedChannelParticipant]
let messageCount: Int
let deleteAllMessageCount: Int?
let completion: (AdminUserActionsSheet.Result) -> Void
init(
@ -201,12 +202,14 @@ private final class AdminUserActionsSheetComponent: Component {
chatPeer: EnginePeer,
peers: [RenderedChannelParticipant],
messageCount: Int,
deleteAllMessageCount: Int?,
completion: @escaping (AdminUserActionsSheet.Result) -> Void
) {
self.context = context
self.chatPeer = chatPeer
self.peers = peers
self.messageCount = messageCount
self.deleteAllMessageCount = deleteAllMessageCount
self.completion = completion
}
@ -223,6 +226,9 @@ private final class AdminUserActionsSheetComponent: Component {
if lhs.messageCount != rhs.messageCount {
return false
}
if lhs.deleteAllMessageCount != rhs.deleteAllMessageCount {
return false
}
return true
}
@ -642,7 +648,7 @@ private final class AdminUserActionsSheetComponent: Component {
let sectionId: AnyHashable
let selectedPeers: Set<EnginePeer.Id>
let isExpanded: Bool
let title: String
var title: String
switch section {
case .report:
@ -870,7 +876,14 @@ private final class AdminUserActionsSheetComponent: Component {
)))
}
let titleString: String = environment.strings.Chat_AdminActionSheet_DeleteTitle(Int32(component.messageCount))
var titleString: String = environment.strings.Chat_AdminActionSheet_DeleteTitle(Int32(component.messageCount))
if let deleteAllMessageCount = component.deleteAllMessageCount {
if self.optionDeleteAllSelectedPeers == Set(component.peers.map(\.peer.id)) {
titleString = environment.strings.Chat_AdminActionSheet_DeleteTitle(Int32(deleteAllMessageCount))
}
}
let titleSize = self.title.update(
transition: .immediate,
component: AnyComponent(MultilineTextComponent(
@ -884,7 +897,9 @@ private final class AdminUserActionsSheetComponent: Component {
if titleView.superview == nil {
self.navigationBarContainer.addSubview(titleView)
}
transition.setFrame(view: titleView, frame: titleFrame)
//transition.setPosition(view: titleView, position: titleFrame.center)
titleView.center = titleFrame.center
titleView.bounds = CGRect(origin: CGPoint(), size: titleFrame.size)
}
let navigationBackgroundFrame = CGRect(origin: CGPoint(), size: CGSize(width: availableSize.width, height: 54.0))
@ -1424,10 +1439,10 @@ public class AdminUserActionsSheet: ViewControllerComponentContainer {
private var isDismissed: Bool = false
public init(context: AccountContext, chatPeer: EnginePeer, peers: [RenderedChannelParticipant], messageCount: Int, completion: @escaping (Result) -> Void) {
public init(context: AccountContext, chatPeer: EnginePeer, peers: [RenderedChannelParticipant], messageCount: Int, deleteAllMessageCount: Int?, completion: @escaping (Result) -> Void) {
self.context = context
super.init(context: context, component: AdminUserActionsSheetComponent(context: context, chatPeer: chatPeer, peers: peers, messageCount: messageCount, completion: completion), navigationBarAppearance: .none)
super.init(context: context, component: AdminUserActionsSheetComponent(context: context, chatPeer: chatPeer, peers: peers, messageCount: messageCount, deleteAllMessageCount: deleteAllMessageCount, completion: completion), navigationBarAppearance: .none)
self.statusBar.statusBarStyle = .Ignore
self.navigationPresentation = .flatModal

View File

@ -4749,25 +4749,31 @@ public class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewI
}
if let forwardInfoNode = self.forwardInfoNode, forwardInfoNode.frame.contains(location) {
if let item = self.item, let forwardInfo = item.message.forwardInfo {
let performAction: () -> Void = {
let performAction: () -> Void = { [weak forwardInfoNode] in
if let sourceMessageId = forwardInfo.sourceMessageId {
if let channel = forwardInfo.author as? TelegramChannel, channel.addressName == nil {
if case let .broadcast(info) = channel.info, info.flags.contains(.hasDiscussionGroup) {
} else if case .member = channel.participationStatus {
} else if !item.message.id.peerId.isReplies {
item.controllerInteraction.displayMessageTooltip(item.message.id, item.presentationData.strings.Conversation_PrivateChannelTooltip, false, forwardInfoNode, nil)
if let forwardInfoNode {
item.controllerInteraction.displayMessageTooltip(item.message.id, item.presentationData.strings.Conversation_PrivateChannelTooltip, false, forwardInfoNode, nil)
}
return
}
}
item.controllerInteraction.navigateToMessage(item.message.id, sourceMessageId, NavigateToMessageParams(timestamp: nil, quote: nil))
if let forwardInfoNode {
item.controllerInteraction.navigateToMessage(item.message.id, sourceMessageId, NavigateToMessageParams(timestamp: nil, quote: nil, progress: forwardInfoNode.makeActivate()?()))
}
} else if let peer = forwardInfo.source ?? forwardInfo.author {
item.controllerInteraction.openPeer(EnginePeer(peer), peer is TelegramUser ? .info(nil) : .chat(textInputState: nil, subject: nil, peekData: nil), nil, .default)
} else if let _ = forwardInfo.authorSignature {
var subRect: CGRect?
if let textNode = forwardInfoNode.nameNode {
subRect = textNode.frame
if let forwardInfoNode {
var subRect: CGRect?
if let textNode = forwardInfoNode.nameNode {
subRect = textNode.frame
}
item.controllerInteraction.displayMessageTooltip(item.message.id, item.presentationData.strings.Conversation_ForwardAuthorHiddenTooltip, false, forwardInfoNode, subRect)
}
item.controllerInteraction.displayMessageTooltip(item.message.id, item.presentationData.strings.Conversation_ForwardAuthorHiddenTooltip, false, forwardInfoNode, subRect)
}
}

View File

@ -25,6 +25,7 @@ swift_library(
"//submodules/TelegramUI/Components/TextNodeWithEntities",
"//submodules/TelegramUI/Components/AnimationCache",
"//submodules/TelegramUI/Components/MultiAnimationRenderer",
"//submodules/TelegramUI/Components/TextLoadingEffect",
"//submodules/AvatarNode",
],
visibility = [

View File

@ -8,6 +8,8 @@ import TelegramPresentationData
import LocalizedPeerData
import AccountContext
import AvatarNode
import TextLoadingEffect
import SwiftSignalKit
public enum ChatMessageForwardInfoType: Equatable {
case bubble(incoming: Bool)
@ -85,6 +87,10 @@ public class ChatMessageForwardInfoNode: ASDisplayNode {
private var highlightColor: UIColor?
private var linkHighlightingNode: LinkHighlightingNode?
private var hasLinkProgress: Bool = false
private var linkProgressView: TextLoadingEffectView?
private var linkProgressDisposable: Disposable?
private var previousPeer: Peer?
public var openPsa: ((String, ASDisplayNode) -> Void)?
@ -93,6 +99,10 @@ public class ChatMessageForwardInfoNode: ASDisplayNode {
super.init()
}
deinit {
self.linkProgressDisposable?.dispose()
}
public func hasAction(at point: CGPoint) -> Bool {
if let infoNode = self.infoNode, infoNode.frame.contains(point) {
return true
@ -172,7 +182,6 @@ public class ChatMessageForwardInfoNode: ASDisplayNode {
if isHighlighted, !initialRects.isEmpty, let highlightColor = self.highlightColor {
let rects = initialRects
let linkHighlightingNode: LinkHighlightingNode
if let current = self.linkHighlightingNode {
linkHighlightingNode = current
@ -191,6 +200,85 @@ public class ChatMessageForwardInfoNode: ASDisplayNode {
}
}
public func makeActivate() -> (() -> Promise<Bool>?)? {
return { [weak self] in
guard let self else {
return nil
}
let promise = Promise<Bool>()
self.linkProgressDisposable?.dispose()
if self.hasLinkProgress {
self.hasLinkProgress = false
self.updateLinkProgressState()
}
self.linkProgressDisposable = (promise.get() |> deliverOnMainQueue).startStrict(next: { [weak self] value in
guard let self else {
return
}
if self.hasLinkProgress != value {
self.hasLinkProgress = value
self.updateLinkProgressState()
}
})
return promise
}
}
private func updateLinkProgressState() {
guard let highlightColor = self.highlightColor else {
return
}
if self.hasLinkProgress, let titleNode = self.titleNode, let nameNode = self.nameNode {
var initialRects: [CGRect] = []
let addRects: (TextNode, CGPoint, CGFloat) -> Void = { textNode, offset, additionalWidth in
guard let cachedLayout = textNode.cachedLayout else {
return
}
for rect in cachedLayout.linesRects() {
var rect = rect
rect.size.width += rect.origin.x + additionalWidth
rect.origin.x = 0.0
initialRects.append(rect.offsetBy(dx: offset.x, dy: offset.y))
}
}
let offsetY: CGFloat = -12.0
if let titleNode = self.titleNode {
addRects(titleNode, CGPoint(x: titleNode.frame.minX, y: offsetY + titleNode.frame.minY), 0.0)
if let nameNode = self.nameNode {
addRects(nameNode, CGPoint(x: titleNode.frame.minX, y: offsetY + nameNode.frame.minY), nameNode.frame.minX - titleNode.frame.minX)
}
}
let linkProgressView: TextLoadingEffectView
if let current = self.linkProgressView {
linkProgressView = current
} else {
linkProgressView = TextLoadingEffectView(frame: CGRect())
self.linkProgressView = linkProgressView
self.view.addSubview(linkProgressView)
}
linkProgressView.frame = titleNode.frame
let progressColor: UIColor = highlightColor
linkProgressView.update(color: progressColor, size: CGRectUnion(titleNode.frame, nameNode.frame).size, rects: initialRects)
} else {
if let linkProgressView = self.linkProgressView {
self.linkProgressView = nil
linkProgressView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak linkProgressView] _ in
linkProgressView?.removeFromSuperview()
})
}
}
}
public static func asyncLayout(_ maybeNode: ChatMessageForwardInfoNode?) -> (_ context: AccountContext, _ presentationData: ChatPresentationData, _ strings: PresentationStrings, _ type: ChatMessageForwardInfoType, _ peer: Peer?, _ authorName: String?, _ psaType: String?, _ storyData: StoryData?, _ constrainedSize: CGSize) -> (CGSize, (CGFloat) -> ChatMessageForwardInfoNode) {
let titleNodeLayout = TextNode.asyncLayout(maybeNode?.titleNode)
let nameNodeLayout = TextNode.asyncLayout(maybeNode?.nameNode)

View File

@ -370,6 +370,7 @@ public final class ReplyAccessoryPanelNode: AccessoryPanelNode {
super.didLoad()
self.view.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))))
self.view.addGestureRecognizer(UILongPressGestureRecognizer(target: self, action: #selector(self.longPressGesture(_:))))
}
override public func animateIn() {
@ -491,9 +492,9 @@ public final class ReplyAccessoryPanelNode: AccessoryPanelNode {
}
}
/*@objc func tapGesture(_ recognizer: UITapGestureRecognizer) {
if case .ended = recognizer.state {
self.interfaceInteraction?.navigateToMessage(self.messageId, false, true, .generic)
@objc func longPressGesture(_ recognizer: UILongPressGestureRecognizer) {
if case .began = recognizer.state {
self.interfaceInteraction?.navigateToMessage(self.messageId, false, true, ChatLoadingMessageSubject.generic)
}
}*/
}
}

View File

@ -868,7 +868,7 @@ final class PeerAllowedReactionsScreenComponent: Component {
}
contentHeight += reactionCountSectionSize.height
if "".isEmpty {
if !"".isEmpty {
contentHeight += 32.0
let paidReactionsSection: ComponentView<Empty>

View File

@ -143,10 +143,12 @@ swift_library(
"//submodules/TelegramUI/Components/TextLoadingEffect",
"//submodules/TelegramUI/Components/Settings/BirthdayPickerScreen",
"//submodules/TelegramUI/Components/Settings/PeerSelectionScreen",
"//submodules/TelegramUI/Components/ButtonComponent",
"//submodules/ConfettiEffect",
"//submodules/ContactsPeerItem",
"//submodules/TelegramUI/Components/PeerManagement/OldChannelsController",
"//submodules/TelegramUI/Components/TextNodeWithEntities",
"//submodules/UrlHandling",
],
visibility = [
"//visibility:public",

View File

@ -11,6 +11,9 @@ import ContextUI
import SwiftSignalKit
import TextLoadingEffect
import EmojiTextAttachmentView
import ComponentFlow
import ButtonComponent
import ComponentDisplayAdapters
enum PeerInfoScreenLabeledValueTextColor {
case primary
@ -49,6 +52,16 @@ private struct TextLinkItemSource: Equatable {
}
final class PeerInfoScreenLabeledValueItem: PeerInfoScreenItem {
final class Button {
let title: String
let action: () -> Void
init(title: String, action: @escaping () -> Void) {
self.title = title
self.action = action
}
}
let id: AnyHashable
let context: AccountContext?
let label: String
@ -62,6 +75,7 @@ final class PeerInfoScreenLabeledValueItem: PeerInfoScreenItem {
let longTapAction: ((ASDisplayNode) -> Void)?
let linkItemAction: ((TextLinkItemActionType, TextLinkItem, ASDisplayNode, CGRect?, Promise<Bool>?) -> Void)?
let iconAction: (() -> Void)?
let button: Button?
let contextAction: ((ASDisplayNode, ContextGesture?, CGPoint?) -> Void)?
let requestLayout: () -> Void
@ -79,6 +93,7 @@ final class PeerInfoScreenLabeledValueItem: PeerInfoScreenItem {
longTapAction: ((ASDisplayNode) -> Void)? = nil,
linkItemAction: ((TextLinkItemActionType, TextLinkItem, ASDisplayNode, CGRect?, Promise<Bool>?) -> Void)? = nil,
iconAction: (() -> Void)? = nil,
button: Button? = nil,
contextAction: ((ASDisplayNode, ContextGesture?, CGPoint?) -> Void)? = nil,
requestLayout: @escaping () -> Void
) {
@ -95,6 +110,7 @@ final class PeerInfoScreenLabeledValueItem: PeerInfoScreenItem {
self.longTapAction = longTapAction
self.linkItemAction = linkItemAction
self.iconAction = iconAction
self.button = button
self.contextAction = contextAction
self.requestLayout = requestLayout
}
@ -150,6 +166,8 @@ private final class PeerInfoScreenLabeledValueItemNode: PeerInfoScreenItemNode {
private var linkHighlightingNode: LinkHighlightingNode?
private var actionButton: ComponentView<Empty>?
private let activateArea: AccessibilityAreaNode
private var validLayout: (width: CGFloat, safeInsets: UIEdgeInsets, presentationData: PresentationData, item: PeerInfoScreenItem, topItem: PeerInfoScreenItem?, bottomItem: PeerInfoScreenItem?, hasCorners: Bool)?
@ -654,6 +672,52 @@ private final class PeerInfoScreenLabeledValueItemNode: PeerInfoScreenItemNode {
height += additionalTextSize.height + 3.0
}
if let button = item.button {
height += 3.0
let actionButton: ComponentView<Empty>
if let current = self.actionButton {
actionButton = current
} else {
actionButton = ComponentView()
self.actionButton = actionButton
}
let actionButtonSize = actionButton.update(
transition: ComponentTransition(transition),
component: AnyComponent(ButtonComponent(
background: ButtonComponent.Background(
color: presentationData.theme.list.itemCheckColors.fillColor,
foreground: presentationData.theme.list.itemCheckColors.foregroundColor,
pressedColor: presentationData.theme.list.itemCheckColors.fillColor.withMultipliedAlpha(0.8)
),
content: AnyComponentWithIdentity(id: AnyHashable(0 as Int), component: AnyComponent(Text(text: button.title, font: Font.semibold(17.0), color: presentationData.theme.list.itemCheckColors.foregroundColor))),
isEnabled: true,
allowActionWhenDisabled: false,
displaysProgress: false,
action: {
button.action()
}
)),
environment: {},
containerSize: CGSize(width: width - sideInset * 2.0, height: 50.0)
)
let actionButtonFrame = CGRect(origin: CGPoint(x: sideInset, y: height), size: actionButtonSize)
if let actionButtonView = actionButton.view {
if actionButtonView.superview == nil {
self.contextSourceNode.contentNode.view.addSubview(actionButtonView)
}
transition.updateFrame(view: actionButtonView, frame: actionButtonFrame)
}
height += actionButtonSize.height
height += 16.0
} else {
if let actionButton = self.actionButton {
self.actionButton = nil
actionButton.view?.removeFromSuperview()
}
}
let highlightNodeOffset: CGFloat = topItem == nil ? 0.0 : UIScreenPixel
self.selectionNode.update(size: CGSize(width: width, height: height + highlightNodeOffset), theme: presentationData.theme, transition: transition)
transition.updateFrame(node: self.selectionNode, frame: CGRect(origin: CGPoint(x: 0.0, y: -highlightNodeOffset), size: CGSize(width: width, height: height + highlightNodeOffset)))

View File

@ -109,6 +109,7 @@ import PeerNameColorItem
import PeerSelectionScreen
import UIKitRuntimeUtils
import OldChannelsController
import UrlHandling
public enum PeerInfoAvatarEditingMode {
case generic
@ -1203,6 +1204,7 @@ private enum InfoSection: Int, CaseIterable {
case calls
case personalChannel
case peerInfo
case peerInfoTrailing
case peerMembers
}
@ -1210,6 +1212,8 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
guard let data = data else {
return []
}
var currentPeerInfoSection: InfoSection = .peerInfo
var items: [InfoSection: [PeerInfoScreenItem]] = [:]
for section in InfoSection.allCases {
@ -1265,7 +1269,7 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
} else {
label = presentationData.strings.ContactInfo_PhoneLabelMobile
}
items[.peerInfo]!.append(PeerInfoScreenLabeledValueItem(id: 2, label: label, text: formattedPhone, textColor: .accent, action: { node, progress in
items[currentPeerInfoSection]!.append(PeerInfoScreenLabeledValueItem(id: 2, label: label, text: formattedPhone, textColor: .accent, action: { node, progress in
interaction.openPhone(phone, node, nil, progress)
}, longTapAction: nil, contextAction: { node, gesture, _ in
interaction.openPhone(phone, node, gesture, nil)
@ -1280,7 +1284,7 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
additionalUsernames = presentationData.strings.Profile_AdditionalUsernames(String(usernames.map { "@\($0.username)" }.joined(separator: ", "))).string
}
items[.peerInfo]!.append(
items[currentPeerInfoSection]!.append(
PeerInfoScreenLabeledValueItem(
id: 1,
label: presentationData.strings.Profile_Username,
@ -1326,30 +1330,70 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
}
}
items[.peerInfo]!.append(PeerInfoScreenLabeledValueItem(id: 400, context: context, label: hasBirthdayToday ? presentationData.strings.UserInfo_BirthdayToday : presentationData.strings.UserInfo_Birthday, text: stringForCompactBirthday(birthday, strings: presentationData.strings, showAge: true), textColor: .primary, leftIcon: hasBirthdayToday ? .birthday : nil, icon: hasBirthdayToday ? .premiumGift : nil, action: birthdayAction, longTapAction: nil, iconAction: {
items[currentPeerInfoSection]!.append(PeerInfoScreenLabeledValueItem(id: 400, context: context, label: hasBirthdayToday ? presentationData.strings.UserInfo_BirthdayToday : presentationData.strings.UserInfo_Birthday, text: stringForCompactBirthday(birthday, strings: presentationData.strings, showAge: true), textColor: .primary, leftIcon: hasBirthdayToday ? .birthday : nil, icon: hasBirthdayToday ? .premiumGift : nil, action: birthdayAction, longTapAction: nil, iconAction: {
interaction.openPremiumGift()
}, contextAction: birthdayContextAction, requestLayout: {
}))
}
if user.isFake {
items[.peerInfo]!.append(PeerInfoScreenLabeledValueItem(id: 0, label: "", text: user.botInfo != nil ? presentationData.strings.UserInfo_FakeBotWarning : presentationData.strings.UserInfo_FakeUserWarning, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: user.botInfo != nil ? enabledPrivateBioEntities : []), action: nil, requestLayout: {
items[currentPeerInfoSection]!.append(PeerInfoScreenLabeledValueItem(id: 0, label: "", text: user.botInfo != nil ? presentationData.strings.UserInfo_FakeBotWarning : presentationData.strings.UserInfo_FakeUserWarning, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: user.botInfo != nil ? enabledPrivateBioEntities : []), action: nil, requestLayout: {
interaction.requestLayout(false)
}))
} else if user.isScam {
items[.peerInfo]!.append(PeerInfoScreenLabeledValueItem(id: 0, label: user.botInfo == nil ? presentationData.strings.Profile_About : presentationData.strings.Profile_BotInfo, text: user.botInfo != nil ? presentationData.strings.UserInfo_ScamBotWarning : presentationData.strings.UserInfo_ScamUserWarning, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: user.botInfo != nil ? enabledPrivateBioEntities : []), action: nil, requestLayout: {
items[currentPeerInfoSection]!.append(PeerInfoScreenLabeledValueItem(id: 0, label: user.botInfo == nil ? presentationData.strings.Profile_About : presentationData.strings.Profile_BotInfo, text: user.botInfo != nil ? presentationData.strings.UserInfo_ScamBotWarning : presentationData.strings.UserInfo_ScamUserWarning, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: user.botInfo != nil ? enabledPrivateBioEntities : []), action: nil, requestLayout: {
interaction.requestLayout(false)
}))
} else if let about = cachedData.about, !about.isEmpty {
items[.peerInfo]!.append(PeerInfoScreenLabeledValueItem(id: 0, label: user.botInfo == nil ? presentationData.strings.Profile_About : presentationData.strings.Profile_BotInfo, text: about, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: user.isPremium ? enabledPublicBioEntities : enabledPrivateBioEntities), action: isMyProfile ? { node, _ in
var actionButton: PeerInfoScreenLabeledValueItem.Button?
if let menuButton = cachedData.botInfo?.menuButton, case let .webView(text, url) = menuButton {
//TODO:localize
actionButton = PeerInfoScreenLabeledValueItem.Button(title: "Open App", action: {
guard let parentController = interaction.getController() else {
return
}
openWebApp(
parentController: parentController,
context: context,
peer: .user(user),
buttonText: text,
url: url,
simple: false,
source: .menu
)
})
}
items[currentPeerInfoSection]!.append(PeerInfoScreenLabeledValueItem(id: 0, label: user.botInfo == nil ? presentationData.strings.Profile_About : presentationData.strings.Profile_BotInfo, text: about, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: user.isPremium ? enabledPublicBioEntities : enabledPrivateBioEntities), action: isMyProfile ? { node, _ in
bioContextAction(node, nil, nil)
} : nil, linkItemAction: bioLinkAction, contextAction: bioContextAction, requestLayout: {
} : nil, linkItemAction: bioLinkAction, button: actionButton, contextAction: bioContextAction, requestLayout: {
interaction.requestLayout(false)
}))
if let botInfo = user.botInfo, botInfo.flags.contains(.canEdit) {
//TODO:localize
items[currentPeerInfoSection]!.append(PeerInfoScreenCommentItem(id: 800, text: "By publishing this mini app, you agree to the [Telegram Terms of Service for Developers](https://telegram.org/privacy).", linkAction: { action in
if case let .tap(url) = action {
context.sharedContext.applicationBindings.openUrl(url)
}
}))
currentPeerInfoSection = .peerInfoTrailing
} else if actionButton != nil {
//TODO:localize
items[currentPeerInfoSection]!.append(PeerInfoScreenCommentItem(id: 800, text: "By launching this mini app, you agree to the [Terms of Service for Mini Apps](https://telegram.org/privacy).", linkAction: { action in
if case let .tap(url) = action {
context.sharedContext.applicationBindings.openUrl(url)
}
}))
currentPeerInfoSection = .peerInfoTrailing
}
}
if let businessHours = cachedData.businessHours {
items[.peerInfo]!.append(PeerInfoScreenBusinessHoursItem(id: 300, label: presentationData.strings.PeerInfo_BusinessHours_Label, businessHours: businessHours, requestLayout: { animated in
items[currentPeerInfoSection]!.append(PeerInfoScreenBusinessHoursItem(id: 300, label: presentationData.strings.PeerInfo_BusinessHours_Label, businessHours: businessHours, requestLayout: { animated in
interaction.requestLayout(animated)
}, longTapAction: nil, contextAction: workingHoursContextAction))
}
@ -1357,7 +1401,7 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
if let businessLocation = cachedData.businessLocation {
if let coordinates = businessLocation.coordinates {
let imageSignal = chatMapSnapshotImage(engine: context.engine, resource: MapSnapshotMediaResource(latitude: coordinates.latitude, longitude: coordinates.longitude, width: 90, height: 90))
items[.peerInfo]!.append(PeerInfoScreenAddressItem(
items[currentPeerInfoSection]!.append(PeerInfoScreenAddressItem(
id: 301,
label: presentationData.strings.PeerInfo_Location_Label,
text: businessLocation.address,
@ -1368,7 +1412,7 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
contextAction: businessLocationContextAction
))
} else {
items[.peerInfo]!.append(PeerInfoScreenAddressItem(
items[currentPeerInfoSection]!.append(PeerInfoScreenAddressItem(
id: 301,
label: presentationData.strings.PeerInfo_Location_Label,
text: businessLocation.address,
@ -1382,25 +1426,25 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
if !isMyProfile {
if let reactionSourceMessageId = reactionSourceMessageId, !data.isContact {
items[.peerInfo]!.append(PeerInfoScreenActionItem(id: 3, text: presentationData.strings.UserInfo_SendMessage, action: {
items[currentPeerInfoSection]!.append(PeerInfoScreenActionItem(id: 3, text: presentationData.strings.UserInfo_SendMessage, action: {
interaction.openChat(nil)
}))
items[.peerInfo]!.append(PeerInfoScreenActionItem(id: 4, text: presentationData.strings.ReportPeer_BanAndReport, color: .destructive, action: {
items[currentPeerInfoSection]!.append(PeerInfoScreenActionItem(id: 4, text: presentationData.strings.ReportPeer_BanAndReport, color: .destructive, action: {
interaction.openReport(.reaction(reactionSourceMessageId))
}))
} else if let _ = nearbyPeerDistance {
items[.peerInfo]!.append(PeerInfoScreenActionItem(id: 3, text: presentationData.strings.UserInfo_SendMessage, action: {
items[currentPeerInfoSection]!.append(PeerInfoScreenActionItem(id: 3, text: presentationData.strings.UserInfo_SendMessage, action: {
interaction.openChat(nil)
}))
items[.peerInfo]!.append(PeerInfoScreenActionItem(id: 4, text: presentationData.strings.ReportPeer_Report, color: .destructive, action: {
items[currentPeerInfoSection]!.append(PeerInfoScreenActionItem(id: 4, text: presentationData.strings.ReportPeer_Report, color: .destructive, action: {
interaction.openReport(.user)
}))
} else {
if !data.isContact {
if user.botInfo == nil {
items[.peerInfo]!.append(PeerInfoScreenActionItem(id: 3, text: presentationData.strings.PeerInfo_AddToContacts, action: {
items[currentPeerInfoSection]!.append(PeerInfoScreenActionItem(id: 3, text: presentationData.strings.PeerInfo_AddToContacts, action: {
interaction.openAddContact()
}))
}
@ -1412,14 +1456,14 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
}
if isBlocked {
items[.peerInfo]!.append(PeerInfoScreenActionItem(id: 4, text: user.botInfo != nil ? presentationData.strings.Bot_Unblock : presentationData.strings.Conversation_Unblock, action: {
items[currentPeerInfoSection]!.append(PeerInfoScreenActionItem(id: 4, text: user.botInfo != nil ? presentationData.strings.Bot_Unblock : presentationData.strings.Conversation_Unblock, action: {
interaction.updateBlocked(false)
}))
} else {
if user.flags.contains(.isSupport) || data.isContact {
} else {
if user.botInfo == nil {
items[.peerInfo]!.append(PeerInfoScreenActionItem(id: 4, text: presentationData.strings.Conversation_BlockUser, color: .destructive, action: {
items[currentPeerInfoSection]!.append(PeerInfoScreenActionItem(id: 4, text: presentationData.strings.Conversation_BlockUser, color: .destructive, action: {
interaction.updateBlocked(true)
}))
}
@ -1427,22 +1471,22 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
}
if let encryptionKeyFingerprint = data.encryptionKeyFingerprint {
items[.peerInfo]!.append(PeerInfoScreenDisclosureEncryptionKeyItem(id: 5, text: presentationData.strings.Profile_EncryptionKey, fingerprint: encryptionKeyFingerprint, action: {
items[currentPeerInfoSection]!.append(PeerInfoScreenDisclosureEncryptionKeyItem(id: 5, text: presentationData.strings.Profile_EncryptionKey, fingerprint: encryptionKeyFingerprint, action: {
interaction.openEncryptionKey()
}))
}
if user.botInfo != nil {
items[.peerInfo]!.append(PeerInfoScreenActionItem(id: 6, text: presentationData.strings.ReportPeer_Report, action: {
items[currentPeerInfoSection]!.append(PeerInfoScreenActionItem(id: 6, text: presentationData.strings.ReportPeer_Report, action: {
interaction.openReport(.default)
}))
}
if let botInfo = user.botInfo, botInfo.flags.contains(.worksWithGroups) {
items[.peerInfo]!.append(PeerInfoScreenActionItem(id: 7, text: presentationData.strings.Bot_AddToChat, color: .accent, action: {
items[currentPeerInfoSection]!.append(PeerInfoScreenActionItem(id: 7, text: presentationData.strings.Bot_AddToChat, color: .accent, action: {
interaction.openAddBotToGroup()
}))
items[.peerInfo]!.append(PeerInfoScreenCommentItem(id: 8, text: presentationData.strings.Bot_AddToChatInfo))
items[currentPeerInfoSection]!.append(PeerInfoScreenCommentItem(id: 8, text: presentationData.strings.Bot_AddToChatInfo))
}
if let botInfo = user.botInfo, botInfo.flags.contains(.canEdit) {
@ -1498,7 +1542,7 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
let linkText = "https://t.me/\(mainUsername)/\(threadId)"
items[.peerInfo]!.append(
items[currentPeerInfoSection]!.append(
PeerInfoScreenLabeledValueItem(
id: ItemUsername,
label: presentationData.strings.Channel_LinkItem,
@ -1525,7 +1569,7 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
if let _ = channel.addressName {
} else {
items[.peerInfo]!.append(PeerInfoScreenCommentItem(id: ItemUsernameInfo, text: presentationData.strings.PeerInfo_PrivateShareLinkInfo))
items[currentPeerInfoSection]!.append(PeerInfoScreenCommentItem(id: ItemUsernameInfo, text: presentationData.strings.PeerInfo_PrivateShareLinkInfo))
}
} else {
if let location = (data.cachedData as? CachedChannelData)?.peerGeoLocation {
@ -1550,7 +1594,7 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
additionalUsernames = presentationData.strings.Profile_AdditionalUsernames(String(usernames.map { "@\($0.username)" }.joined(separator: ", "))).string
}
items[.peerInfo]!.append(
items[currentPeerInfoSection]!.append(
PeerInfoScreenLabeledValueItem(
id: ItemUsername,
label: presentationData.strings.Channel_LinkItem,
@ -1605,7 +1649,7 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
if case .group = channel.info {
enabledEntities = enabledPrivateBioEntities
}
items[.peerInfo]!.append(PeerInfoScreenLabeledValueItem(id: ItemAbout, label: presentationData.strings.Channel_Info_Description, text: aboutText, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: enabledEntities), action: isMyProfile ? { node, _ in
items[currentPeerInfoSection]!.append(PeerInfoScreenLabeledValueItem(id: ItemAbout, label: presentationData.strings.Channel_Info_Description, text: aboutText, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: enabledEntities), action: isMyProfile ? { node, _ in
bioContextAction(node, nil, nil)
} : nil, linkItemAction: bioLinkAction, contextAction: bioContextAction, requestLayout: {
interaction.requestLayout(true)
@ -1691,7 +1735,7 @@ private func infoItems(data: PeerInfoScreenData?, context: AccountContext, prese
}
if let aboutText = aboutText {
items[.peerInfo]!.append(PeerInfoScreenLabeledValueItem(id: 0, label: presentationData.strings.Channel_Info_Description, text: aboutText, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: enabledPrivateBioEntities), action: isMyProfile ? { node, _ in
items[currentPeerInfoSection]!.append(PeerInfoScreenLabeledValueItem(id: 0, label: presentationData.strings.Channel_Info_Description, text: aboutText, textColor: .primary, textBehavior: .multiLine(maxLines: 100, enabledEntities: enabledPrivateBioEntities), action: isMyProfile ? { node, _ in
bioContextAction(node, nil, nil)
} : nil, linkItemAction: bioLinkAction, contextAction: bioContextAction, requestLayout: {
interaction.requestLayout(true)
@ -13738,3 +13782,192 @@ private final class HeaderContextReferenceContentSource: ContextReferenceContent
return ContextControllerReferenceViewInfo(referenceView: self.sourceView, contentAreaInScreenSpace: UIScreen.main.bounds)
}
}
private func openWebApp(parentController: ViewController, context: AccountContext, peer: EnginePeer, buttonText: String, url: String, simple: Bool, source: ChatOpenWebViewSource) {
let presentationData = context.sharedContext.currentPresentationData.with({ $0 })
let botName: String
let botAddress: String
let botVerified: Bool
if case let .inline(bot) = source {
botName = bot.compactDisplayTitle
botAddress = bot.addressName ?? ""
botVerified = bot.isVerified
} else {
botName = peer.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder)
botAddress = peer.addressName ?? ""
botVerified = peer.isVerified
}
let _ = botAddress
let openWebView = { [weak parentController] in
guard let parentController else {
return
}
if source == .menu {
if let navigationController = parentController.navigationController as? NavigationController, let minimizedContainer = navigationController.minimizedContainer {
for controller in minimizedContainer.controllers {
if let controller = controller as? AttachmentController, let mainController = controller.mainController as? WebAppController, mainController.botId == peer.id && mainController.source == .menu {
navigationController.maximizeViewController(controller, animated: true)
return
}
}
}
var fullSize = false
if isTelegramMeLink(url), let internalUrl = parseFullInternalUrl(sharedContext: context.sharedContext, url: url), case .peer(_, .appStart) = internalUrl {
fullSize = !url.contains("?mode=compact")
}
var presentImpl: ((ViewController, Any?) -> Void)?
let params = WebAppParameters(source: .menu, peerId: peer.id, botId: peer.id, botName: botName, botVerified: botVerified, url: url, queryId: nil, payload: nil, buttonText: buttonText, keepAliveSignal: nil, forceHasSettings: false, fullSize: fullSize)
//TODO:localize
//updatedPresentationData
let controller = standaloneWebAppController(context: context, updatedPresentationData: nil, params: params, threadId: nil, openUrl: { [weak parentController] url, concealed, commit in
guard let parentController else {
return
}
let _ = parentController
/*ChatControllerImpl.botOpenUrl(context: context, peerId: peerId, controller: self, url: url, concealed: concealed, present: { c, a in
presentImpl?(c, a)
}, commit: commit)*/
}, requestSwitchInline: { [weak parentController] query, chatTypes, completion in
guard let parentController else {
return
}
let _ = parentController
//ChatControllerImpl.botRequestSwitchInline(context: context, controller: self, peerId: peerId, botAddress: botAddress, query: query, chatTypes: chatTypes, completion: completion)
}, getInputContainerNode: {
return nil
}, completion: {
}, willDismiss: {
}, didDismiss: {
}, getNavigationController: { [weak parentController] () -> NavigationController? in
guard let parentController else {
return nil
}
return parentController.navigationController as? NavigationController ?? context.sharedContext.mainWindow?.viewController as? NavigationController
})
controller.navigationPresentation = .flatModal
parentController.push(controller)
presentImpl = { [weak controller] c, a in
controller?.present(c, in: .window(.root), with: a)
}
let _ = presentImpl
} else if simple {
var isInline = false
var botId = peer.id
var botName = botName
var botAddress = ""
var botVerified = false
if case let .inline(bot) = source {
isInline = true
botId = bot.id
botName = bot.displayTitle(strings: presentationData.strings, displayOrder: presentationData.nameDisplayOrder)
botAddress = bot.addressName ?? ""
botVerified = bot.isVerified
}
let _ = botAddress
let _ = ((context.engine.messages.requestSimpleWebView(botId: botId, url: url, source: isInline ? .inline : .generic, themeParams: generateWebAppThemeParams(presentationData.theme)))
|> deliverOnMainQueue).startStandalone(next: { [weak parentController] result in
guard let parentController else {
return
}
var presentImpl: ((ViewController, Any?) -> Void)?
let params = WebAppParameters(source: isInline ? .inline : .simple, peerId: peer.id, botId: botId, botName: botName, botVerified: botVerified, url: result.url, queryId: nil, payload: nil, buttonText: buttonText, keepAliveSignal: nil, forceHasSettings: false, fullSize: result.flags.contains(.fullSize))
let controller = standaloneWebAppController(context: context, updatedPresentationData: nil, params: params, threadId: nil, openUrl: { [weak parentController] url, concealed, commit in
guard let parentController else {
return
}
let _ = parentController
/*ChatControllerImpl.botOpenUrl(context: context, peerId: peerId, controller: self, url: url, concealed: concealed, present: { c, a in
presentImpl?(c, a)
}, commit: commit)*/
}, requestSwitchInline: { query, chatTypes, completion in
//ChatControllerImpl.botRequestSwitchInline(context: context, controller: self, peerId: peerId, botAddress: botAddress, query: query, chatTypes: chatTypes, completion: completion)
}, getNavigationController: { [weak parentController] in
guard let parentController else {
return nil
}
return parentController.navigationController as? NavigationController ?? context.sharedContext.mainWindow?.viewController as? NavigationController
})
controller.navigationPresentation = .flatModal
parentController.push(controller)
presentImpl = { [weak controller] c, a in
controller?.present(c, in: .window(.root), with: a)
}
let _ = presentImpl
}, error: { [weak parentController] error in
guard let parentController else {
return
}
parentController.present(textAlertController(context: context, updatedPresentationData: nil, title: nil, text: presentationData.strings.Login_UnknownError, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_OK, action: {
})]), in: .window(.root))
})
} else {
let _ = ((context.engine.messages.requestWebView(peerId: peer.id, botId: peer.id, url: !url.isEmpty ? url : nil, payload: nil, themeParams: generateWebAppThemeParams(presentationData.theme), fromMenu: false, replyToMessageId: nil, threadId: nil))
|> deliverOnMainQueue).startStandalone(next: { [weak parentController] result in
guard let parentController else {
return
}
var presentImpl: ((ViewController, Any?) -> Void)?
let context = context
let params = WebAppParameters(source: .button, peerId: peer.id, botId: peer.id, botName: botName, botVerified: botVerified, url: result.url, queryId: result.queryId, payload: nil, buttonText: buttonText, keepAliveSignal: result.keepAliveSignal, forceHasSettings: false, fullSize: result.flags.contains(.fullSize))
let controller = standaloneWebAppController(context: context, updatedPresentationData: nil, params: params, threadId: nil, openUrl: { [weak parentController] url, concealed, commit in
guard let parentController else {
return
}
let _ = parentController
/*ChatControllerImpl.botOpenUrl(context: context, peerId: peerId, controller: self, url: url, concealed: concealed, present: { c, a in
presentImpl?(c, a)
}, commit: commit)*/
}, completion: {
}, getNavigationController: { [weak parentController] in
guard let parentController else {
return nil
}
return parentController.navigationController as? NavigationController ?? context.sharedContext.mainWindow?.viewController as? NavigationController
})
controller.navigationPresentation = .flatModal
parentController.push(controller)
presentImpl = { [weak controller] c, a in
controller?.present(c, in: .window(.root), with: a)
}
let _ = presentImpl
}, error: { [weak parentController] error in
guard let parentController else {
return
}
parentController.present(textAlertController(context: context, updatedPresentationData: nil, title: nil, text: presentationData.strings.Login_UnknownError, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_OK, action: {
})]), in: .window(.root))
})
}
}
var botPeer = peer
if case let .inline(bot) = source {
botPeer = bot
}
let _ = (ApplicationSpecificNotice.getBotGameNotice(accountManager: context.sharedContext.accountManager, peerId: botPeer.id)
|> deliverOnMainQueue).startStandalone(next: { [weak parentController] value in
guard let parentController else {
return
}
if value {
openWebView()
} else {
let controller = webAppLaunchConfirmationController(context: context, updatedPresentationData: nil, peer: botPeer, completion: { _ in
let _ = ApplicationSpecificNotice.setBotGameNotice(accountManager: context.sharedContext.accountManager, peerId: botPeer.id).startStandalone()
openWebView()
}, showMore: nil)
parentController.present(controller, in: .window(.root))
}
})
}

View File

@ -163,6 +163,7 @@ public extension StoryContainerScreen {
static func openPeerStoriesCustom(
context: AccountContext,
peerId: EnginePeer.Id,
focusOnId: Int32? = nil,
isHidden: Bool,
initialOrder: [EnginePeer.Id] = [],
singlePeer: Bool,
@ -173,7 +174,7 @@ public extension StoryContainerScreen {
setProgress: @escaping (Signal<Never, NoError>) -> Void,
completion: @escaping (StoryContainerScreen) -> Void = { _ in }
) {
let storyContent = StoryContentContextImpl(context: context, isHidden: isHidden, focusedPeerId: peerId, singlePeer: singlePeer, fixedOrder: initialOrder)
let storyContent = StoryContentContextImpl(context: context, isHidden: isHidden, focusedPeerId: peerId, focusedStoryId: focusOnId, singlePeer: singlePeer, fixedOrder: initialOrder)
let signal = storyContent.state
|> take(1)
|> mapToSignal { state -> Signal<StoryContentContextState, NoError> in

View File

@ -48,6 +48,7 @@ public final class StoryContentContextImpl: StoryContentContext {
self.peerId = peerId
self.currentFocusedId = initialFocusedId
self.storedFocusedId = self.currentFocusedId
self.currentFocusedIdUpdatedPromise.set(.single(Void()))
context.engine.account.viewTracker.refreshCanSendMessagesForPeerIds(peerIds: [peerId])
@ -600,13 +601,14 @@ public final class StoryContentContextImpl: StoryContentContext {
context: AccountContext,
isHidden: Bool,
focusedPeerId: EnginePeer.Id?,
focusedStoryId: Int32? = nil,
singlePeer: Bool,
fixedOrder: [EnginePeer.Id] = []
) {
self.context = context
self.isHidden = isHidden
if let focusedPeerId {
self.focusedItem = (focusedPeerId, nil)
self.focusedItem = (focusedPeerId, focusedStoryId)
}
self.fixedSubscriptionOrder = fixedOrder
@ -858,9 +860,11 @@ public final class StoryContentContextImpl: StoryContentContext {
}
var centralIndex: Int?
if let (focusedPeerId, _) = self.focusedItem {
var centralStoryId: Int32?
if let (focusedPeerId, focusedStoryId) = self.focusedItem {
if let index = subscriptionItems.firstIndex(where: { $0.peer.id == focusedPeerId }) {
centralIndex = index
centralStoryId = focusedStoryId
}
}
if centralIndex == nil {
@ -874,7 +878,7 @@ public final class StoryContentContextImpl: StoryContentContext {
if let currentState = self.currentState, let existingContext = currentState.findPeerContext(id: subscriptionItems[centralIndex].peer.id) {
centralPeerContext = existingContext
} else {
centralPeerContext = PeerContext(context: self.context, peerId: subscriptionItems[centralIndex].peer.id, focusedId: nil, loadIds: loadIds)
centralPeerContext = PeerContext(context: self.context, peerId: subscriptionItems[centralIndex].peer.id, focusedId: centralStoryId, loadIds: loadIds)
}
var previousPeerContext: PeerContext?

View File

@ -171,4 +171,34 @@ public final class TextLoadingEffectView: UIView {
self.updateAnimations(size: maskFrame.size)
}
}
public func update(color: UIColor, size: CGSize, rects: [CGRect]) {
let rectsSet: [CGRect] = rects
let maskFrame = CGRect(origin: CGPoint(), size: size).insetBy(dx: -4.0, dy: -4.0)
self.maskContentsView.backgroundColor = color.withAlphaComponent(0.1)
self.maskBorderContentsView.backgroundColor = color.withAlphaComponent(0.12)
self.backgroundView.tintColor = color
self.borderBackgroundView.tintColor = color
self.maskContentsView.frame = maskFrame
self.maskBorderContentsView.frame = maskFrame
self.maskHighlightNode.updateRects(rectsSet)
self.maskHighlightNode.frame = CGRect(origin: CGPoint(x: -maskFrame.minX, y: -maskFrame.minY), size: CGSize())
self.maskBorderHighlightNode.updateRects(rectsSet)
self.maskBorderHighlightNode.frame = CGRect(origin: CGPoint(x: -maskFrame.minX, y: -maskFrame.minY), size: CGSize())
if self.size != maskFrame.size {
self.size = maskFrame.size
self.backgroundView.frame = CGRect(origin: CGPoint(x: -self.gradientWidth, y: 0.0), size: CGSize(width: self.gradientWidth, height: maskFrame.height))
self.borderBackgroundView.frame = CGRect(origin: CGPoint(x: -self.gradientWidth, y: 0.0), size: CGSize(width: self.gradientWidth, height: maskFrame.height))
self.updateAnimations(size: maskFrame.size)
}
}
}

View File

@ -29,7 +29,7 @@ extension ChatControllerImpl {
guard let self else {
return
}
self.navigateToMessage(from: fromId, to: .id(id, params), forceInCurrentChat: fromId.peerId == id.peerId && !params.forceNew, forceNew: params.forceNew)
self.navigateToMessage(from: fromId, to: .id(id, params), forceInCurrentChat: fromId.peerId == id.peerId && !params.forceNew, forceNew: params.forceNew, progress: params.progress)
}
let _ = (self.context.engine.data.get(
@ -77,6 +77,7 @@ extension ChatControllerImpl {
animated: Bool = true,
completion: (() -> Void)? = nil,
customPresentProgress: ((ViewController, Any?) -> Void)? = nil,
progress: Promise<Bool>? = nil,
statusSubject: ChatLoadingMessageSubject = .generic
) {
if !self.isNodeLoaded {
@ -160,31 +161,156 @@ extension ChatControllerImpl {
guard let self, let peer = peer else {
return
}
if let navigationController = self.effectiveNavigationController {
var chatLocation: NavigateToChatControllerParams.Location = .peer(peer)
var displayMessageNotFoundToast = false
if case let .channel(channel) = peer, channel.flags.contains(.isForum) {
if let message = message, let threadId = message.threadId {
chatLocation = .replyThread(ChatReplyThreadMessage(peerId: peer.id, threadId: threadId, channelMessageId: nil, isChannelPost: false, isForumPost: true, maxMessage: nil, maxReadIncomingMessageId: nil, maxReadOutgoingMessageId: nil, unreadCount: 0, initialFilledHoles: IndexSet(), initialAnchor: .automatic, isNotAvailable: false))
var quote: ChatControllerSubject.MessageHighlight.Quote?
if case let .id(_, params) = messageLocation {
quote = params.quote.flatMap { quote in ChatControllerSubject.MessageHighlight.Quote(string: quote.string, offset: quote.offset) }
}
var progressValue: Promise<Bool>?
if let value = progress {
progressValue = value
} else if case let .id(_, params) = messageLocation {
progressValue = params.progress
}
self.loadingMessage.set(.single(statusSubject) |> delay(0.1, queue: .mainQueue()))
var chatLocation: NavigateToChatControllerParams.Location = .peer(peer)
var preloadChatLocation: ChatLocation = .peer(id: peer.id)
var displayMessageNotFoundToast = false
if case let .channel(channel) = peer, channel.flags.contains(.isForum) {
if let message = message, let threadId = message.threadId {
let replyThreadMessage = ChatReplyThreadMessage(peerId: peer.id, threadId: threadId, channelMessageId: nil, isChannelPost: false, isForumPost: true, maxMessage: nil, maxReadIncomingMessageId: nil, maxReadOutgoingMessageId: nil, unreadCount: 0, initialFilledHoles: IndexSet(), initialAnchor: .automatic, isNotAvailable: false)
chatLocation = .replyThread(replyThreadMessage)
preloadChatLocation = .replyThread(message: replyThreadMessage)
} else {
displayMessageNotFoundToast = true
}
}
let searchLocation: ChatHistoryInitialSearchLocation
switch messageLocation {
case let .id(id, _):
if case let .replyThread(message) = chatLocation, id == message.effectiveMessageId {
searchLocation = .index(.absoluteLowerBound())
} else {
searchLocation = .id(id)
}
case let .index(index):
searchLocation = .index(index)
case .upperBound:
searchLocation = .index(MessageIndex.upperBound(peerId: chatLocation.peerId))
}
var historyView: Signal<ChatHistoryViewUpdate, NoError>
let subject: ChatControllerSubject = .message(id: .id(messageId), highlight: ChatControllerSubject.MessageHighlight(quote: quote), timecode: nil, setupReply: false)
historyView = preloadedChatHistoryViewForLocation(ChatHistoryLocationInput(content: .InitialSearch(subject: MessageHistoryInitialSearchSubject(location: searchLocation, quote: nil), count: 50, highlight: true, setupReply: false), id: 0), context: self.context, chatLocation: preloadChatLocation, subject: subject, chatLocationContextHolder: Atomic<ChatLocationContextHolder?>(value: nil), fixedCombinedReadStates: nil, tag: nil, additionalData: [])
var signal: Signal<(MessageIndex?, Bool), NoError>
signal = historyView
|> mapToSignal { historyView -> Signal<(MessageIndex?, Bool), NoError> in
switch historyView {
case .Loading:
return .single((nil, true))
case let .HistoryView(view, _, _, _, _, _, _):
for entry in view.entries {
if entry.message.id == messageLocation.messageId {
return .single((entry.message.index, false))
}
}
if case let .index(index) = searchLocation {
return .single((index, false))
}
return .single((nil, false))
}
}
|> take(until: { index in
return SignalTakeAction(passthrough: true, complete: !index.1)
})
/*#if DEBUG
signal = .single((nil, true)) |> then(signal |> delay(2.0, queue: .mainQueue()))
#endif*/
var cancelImpl: (() -> Void)?
let presentationData = self.presentationData
let displayTime = CACurrentMediaTime()
let progressSignal = Signal<Never, NoError> { [weak self] subscriber in
if let progressValue {
progressValue.set(.single(true))
return ActionDisposable {
Queue.mainQueue().async() {
progressValue.set(.single(false))
}
}
} else if case .generic = statusSubject {
let controller = OverlayStatusController(theme: presentationData.theme, type: .loading(cancelled: {
if CACurrentMediaTime() - displayTime > 1.5 {
cancelImpl?()
}
}))
if let customPresentProgress = customPresentProgress {
customPresentProgress(controller, nil)
} else {
displayMessageNotFoundToast = true
self?.present(controller, in: .window(.root))
}
return ActionDisposable { [weak controller] in
Queue.mainQueue().async() {
controller?.dismiss()
}
}
} else {
return EmptyDisposable
}
}
|> runOn(Queue.mainQueue())
|> delay(progressValue == nil ? 0.05 : 0.0, queue: Queue.mainQueue())
let progressDisposable = MetaDisposable()
var progressStarted = false
self.messageIndexDisposable.set((signal
|> afterDisposed {
Queue.mainQueue().async {
progressDisposable.dispose()
}
}
|> deliverOnMainQueue).startStrict(next: { [weak self] index in
guard let self else {
return
}
var quote: ChatControllerSubject.MessageHighlight.Quote?
if case let .id(_, params) = messageLocation {
quote = params.quote.flatMap { quote in ChatControllerSubject.MessageHighlight.Quote(string: quote.string, offset: quote.offset) }
if let index = index.0 {
let _ = index
//strongSelf.chatDisplayNode.historyNode.scrollToMessage(from: scrollFromIndex, to: index, animated: animated, quote: quote, scrollPosition: scrollPosition)
} else if index.1 {
if !progressStarted {
progressStarted = true
progressDisposable.set(progressSignal.start())
}
return
}
let context = self.context
self.context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: self.context, chatLocation: chatLocation, subject: .message(id: .id(messageId), highlight: ChatControllerSubject.MessageHighlight(quote: quote), timecode: nil, setupReply: false), keepStack: .always, chatListCompletion: { chatListController in
if displayMessageNotFoundToast {
let presentationData = context.sharedContext.currentPresentationData.with({ $0 })
chatListController.present(UndoOverlayController(presentationData: presentationData, content: .info(title: nil, text: presentationData.strings.Conversation_MessageDoesntExist, timeout: nil, customUndoText: nil), elevatedLayout: false, animateInAsReplacement: false, action: { _ in
return true
}), in: .current)
}
}))
if let navigationController = self.effectiveNavigationController {
let context = self.context
self.context.sharedContext.navigateToChatController(NavigateToChatControllerParams(navigationController: navigationController, context: self.context, chatLocation: chatLocation, subject: subject, keepStack: .always, chatListCompletion: { chatListController in
if displayMessageNotFoundToast {
let presentationData = context.sharedContext.currentPresentationData.with({ $0 })
chatListController.present(UndoOverlayController(presentationData: presentationData, content: .info(title: nil, text: presentationData.strings.Conversation_MessageDoesntExist, timeout: nil, customUndoText: nil), elevatedLayout: false, animateInAsReplacement: false, action: { _ in
return true
}), in: .current)
}
}))
}
}, completed: { [weak self] in
if let strongSelf = self {
strongSelf.loadingMessage.set(.single(nil))
}
completion?()
}))
cancelImpl = { [weak self] in
if let strongSelf = self {
strongSelf.loadingMessage.set(.single(nil))
strongSelf.messageIndexDisposable.set(nil)
}
}
completion?()

View File

@ -125,6 +125,14 @@ extension ChatControllerImpl {
return
}
var deleteAllMessageCount: Signal<Int?, NoError> = .single(nil)
if authors.count == 1 {
deleteAllMessageCount = self.context.engine.messages.searchMessages(location: .peer(peerId: peerId, fromId: authors[0].id, tags: nil, reactions: nil, threadId: self.chatLocation.threadId, minDate: nil, maxDate: nil), query: "", state: nil)
|> map { result, _ -> Int? in
return Int(result.totalCount)
}
}
var signal = combineLatest(authors.map { author in
self.context.engine.peers.fetchChannelParticipant(peerId: peerId, participantId: author.id)
|> map { result -> (Peer, ChannelParticipant?) in
@ -161,8 +169,8 @@ extension ChatControllerImpl {
disposables.set(nil)
}
disposables.set((signal
|> deliverOnMainQueue).startStrict(next: { [weak self] authorsAndParticipants in
disposables.set((combineLatest(signal, deleteAllMessageCount)
|> deliverOnMainQueue).startStrict(next: { [weak self] authorsAndParticipants, deleteAllMessageCount in
guard let self else {
return
}
@ -212,6 +220,7 @@ extension ChatControllerImpl {
chatPeer: chatPeer,
peers: renderedParticipants,
messageCount: messageIds.count,
deleteAllMessageCount: deleteAllMessageCount,
completion: { [weak self] result in
guard let self else {
return
@ -259,8 +268,16 @@ extension ChatControllerImpl {
disposables.set(nil)
}
disposables.set((signal
|> deliverOnMainQueue).startStrict(next: { [weak self] maybeParticipant in
var deleteAllMessageCount: Signal<Int?, NoError> = .single(nil)
do {
deleteAllMessageCount = self.context.engine.messages.getSearchMessageCount(location: .peer(peerId: peerId, fromId: author.id, tags: nil, reactions: nil, threadId: self.chatLocation.threadId, minDate: nil, maxDate: nil), query: "")
|> map { result -> Int? in
return result
}
}
disposables.set((combineLatest(signal, deleteAllMessageCount)
|> deliverOnMainQueue).startStrict(next: { [weak self] maybeParticipant, deleteAllMessageCount in
guard let self else {
return
}
@ -310,6 +327,7 @@ extension ChatControllerImpl {
peer: authorPeer._asPeer()
)],
messageCount: messageIds.count,
deleteAllMessageCount: deleteAllMessageCount,
completion: { [weak self] result in
guard let self else {
return

View File

@ -257,10 +257,10 @@ func openExternalUrlImpl(context: AccountContext, urlContext: OpenURLContext, ur
if let value = URL(string: "ipfs:/" + parsedUrl.path) {
parsedUrl = value
}
}
} else if let scheme = parsedUrl.scheme, scheme == "https", parsedUrl.host == "t.me", parsedUrl.path.hasPrefix("/ipfs/") {
if let value = URL(string: "ipfs://" + String(parsedUrl.path[parsedUrl.path.index(parsedUrl.path.startIndex, offsetBy: "/ipfs/".count)...])) {
parsedUrl = value
} else if parsedUrl.host == "ton" {
if let value = URL(string: "ton:/" + parsedUrl.path) {
parsedUrl = value
}
}
}
}
@ -1009,7 +1009,7 @@ func openExternalUrlImpl(context: AccountContext, urlContext: OpenURLContext, ur
isInternetUrl = true
}
if context.sharedContext.immediateExperimentalUISettings.browserExperiment {
if parsedUrl.scheme == "ipfs" || parsedUrl.scheme == "ipns" {
if parsedUrl.scheme == "ipfs" || parsedUrl.scheme == "ipns" || parsedUrl.scheme == "ton" {
isInternetUrl = true
}
}

View File

@ -251,6 +251,10 @@ private final class NativeVideoContentNode: ASDisplayNode, UniversalVideoContent
}
if displayImage {
if captureProtected {
setLayerDisableScreenshots(self.imageNode.layer, captureProtected)
}
self.imageNode.setSignal(internalMediaGridMessageVideo(postbox: postbox, userLocation: userLocation, videoReference: fileReference, imageReference: imageReference, onlyFullSize: onlyFullSizeThumbnail, useLargeThumbnail: useLargeThumbnail, autoFetchFullSizeThumbnail: autoFetchFullSizeThumbnail || fileReference.media.isInstantVideo) |> map { [weak self] getSize, getData in
Queue.mainQueue().async {
if let strongSelf = self, strongSelf.dimensions == nil {

View File

@ -17,6 +17,7 @@ swift_library(
"//submodules/TelegramUIPreferences:TelegramUIPreferences",
"//submodules/TgVoip:TgVoip",
"//submodules/TgVoipWebrtc:TgVoipWebrtc",
"//submodules/FFMpegBinding",
],
visibility = [
"//visibility:public",

View File

@ -17,6 +17,7 @@ let package = Package(
// .package(url: /* package url */, from: "1.0.0"),
.package(name: "TgVoipWebrtc", path: "../../../tgcalls"),
.package(name: "SSignalKit", path: "../SSignalKit"),
.package(name: "FFMpegBinding", path: "../FFMpegBinding"),
.package(name: "TelegramCore", path: "../TelegramCore")
],
@ -28,6 +29,7 @@ let package = Package(
dependencies: [
.product(name: "TgVoipWebrtc", package: "TgVoipWebrtc", condition: nil),
.product(name: "SwiftSignalKit", package: "SSignalKit", condition: nil),
.product(name: "FFMpegBinding", package: "FFMpegBinding", condition: nil),
.product(name: "TelegramCore", package: "TelegramCore", condition: nil),
],
path: "Sources",

View File

@ -2,7 +2,12 @@ import Foundation
import SwiftSignalKit
import TgVoipWebrtc
import TelegramCore
import Network
import Postbox
import FFMpegBinding
@available(iOS 12.0, macOS 14.0, *)
public final class WrappedMediaStreamingContext {
private final class Impl {
let queue: Queue
@ -132,3 +137,460 @@ public final class WrappedMediaStreamingContext {
}
}
}
@available(iOS 12.0, macOS 14.0, *)
public final class ExternalMediaStreamingContext {
private final class Impl {
let queue: Queue
private var broadcastPartsSource: BroadcastPartSource?
private let resetPlaylistDisposable = MetaDisposable()
private let updatePlaylistDisposable = MetaDisposable()
let masterPlaylistData = Promise<String>()
let playlistData = Promise<String>()
let mediumPlaylistData = Promise<String>()
init(queue: Queue, rejoinNeeded: @escaping () -> Void) {
self.queue = queue
}
deinit {
self.updatePlaylistDisposable.dispose()
}
func setAudioStreamData(audioStreamData: OngoingGroupCallContext.AudioStreamData?) {
if let audioStreamData {
let broadcastPartsSource = NetworkBroadcastPartSource(queue: self.queue, engine: audioStreamData.engine, callId: audioStreamData.callId, accessHash: audioStreamData.accessHash, isExternalStream: audioStreamData.isExternalStream)
self.broadcastPartsSource = broadcastPartsSource
self.updatePlaylistDisposable.set(nil)
let queue = self.queue
self.resetPlaylistDisposable.set(broadcastPartsSource.requestTime(completion: { [weak self] timestamp in
queue.async {
guard let self else {
return
}
let segmentDuration: Int64 = 1000
var adjustedTimestamp: Int64 = 0
if timestamp > 0 {
adjustedTimestamp = timestamp / segmentDuration * segmentDuration - 4 * segmentDuration
}
if adjustedTimestamp > 0 {
var masterPlaylistData = "#EXTM3U\n" +
"#EXT-X-VERSION:6\n" +
"#EXT-X-STREAM-INF:BANDWIDTH=3300000,RESOLUTION=1280x720,CODECS=\"avc1.64001f,mp4a.40.2\"\n" +
"hls_level_0.m3u8\n"
masterPlaylistData += "#EXT-X-STREAM-INF:BANDWIDTH=1000000,RESOLUTION=640x360,CODECS=\"avc1.64001f,mp4a.40.2\"\n" +
"hls_level_1.m3u8\n"
self.masterPlaylistData.set(.single(masterPlaylistData))
self.beginUpdatingPlaylist(initialHeadTimestamp: adjustedTimestamp)
}
}
}))
}
}
private func beginUpdatingPlaylist(initialHeadTimestamp: Int64) {
let segmentDuration: Int64 = 1000
var timestamp = initialHeadTimestamp
self.updatePlaylist(headTimestamp: timestamp, quality: 0)
self.updatePlaylist(headTimestamp: timestamp, quality: 1)
self.updatePlaylistDisposable.set((
Signal<Void, NoError>.single(Void())
|> delay(1.0, queue: self.queue)
|> restart
|> deliverOn(self.queue)
).start(next: { [weak self] _ in
guard let self else {
return
}
timestamp += segmentDuration
self.updatePlaylist(headTimestamp: timestamp, quality: 0)
self.updatePlaylist(headTimestamp: timestamp, quality: 1)
}))
}
private func updatePlaylist(headTimestamp: Int64, quality: Int) {
let segmentDuration: Int64 = 1000
let headIndex = headTimestamp / segmentDuration
let minIndex = headIndex - 20
var playlistData = "#EXTM3U\n" +
"#EXT-X-VERSION:6\n" +
"#EXT-X-TARGETDURATION:1\n" +
"#EXT-X-MEDIA-SEQUENCE:\(minIndex)\n" +
"#EXT-X-INDEPENDENT-SEGMENTS\n"
for index in minIndex ... headIndex {
playlistData.append("#EXTINF:1.000000,\n")
playlistData.append("hls_stream\(quality)_\(index).ts\n")
}
//print("Player: updating playlist \(quality) \(minIndex) ... \(headIndex)")
if quality == 0 {
self.playlistData.set(.single(playlistData))
} else {
self.mediumPlaylistData.set(.single(playlistData))
}
}
func partData(index: Int, quality: Int) -> Signal<Data?, NoError> {
let segmentDuration: Int64 = 1000
let timestamp = Int64(index) * segmentDuration
print("Player: request part(q: \(quality)) \(index) -> \(timestamp)")
guard let broadcastPartsSource = self.broadcastPartsSource else {
return .single(nil)
}
return Signal { subscriber in
return broadcastPartsSource.requestPart(
timestampMilliseconds: timestamp,
durationMilliseconds: segmentDuration,
subject: .video(channelId: 1, quality: quality == 0 ? .full : .medium),
completion: { part in
var data = part.oggData
if data.count > 32 {
data = data.subdata(in: 32 ..< data.count)
}
subscriber.putNext(data)
},
rejoinNeeded: {
//TODO
}
)
}
}
}
private let queue = Queue()
let id: CallSessionInternalId
private let impl: QueueLocalObject<Impl>
private var hlsServerDisposable: Disposable?
public init(id: CallSessionInternalId, rejoinNeeded: @escaping () -> Void) {
self.id = id
let queue = self.queue
self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, rejoinNeeded: rejoinNeeded)
})
self.hlsServerDisposable = SharedHLSServer.shared.registerPlayer(streamingContext: self)
}
deinit {
self.hlsServerDisposable?.dispose()
}
public func setAudioStreamData(audioStreamData: OngoingGroupCallContext.AudioStreamData?) {
self.impl.with { impl in
impl.setAudioStreamData(audioStreamData: audioStreamData)
}
}
public func masterPlaylistData() -> Signal<String, NoError> {
return self.impl.signalWith { impl, subscriber in
impl.masterPlaylistData.get().start(next: subscriber.putNext)
}
}
public func playlistData(quality: Int) -> Signal<String, NoError> {
return self.impl.signalWith { impl, subscriber in
if quality == 0 {
impl.playlistData.get().start(next: subscriber.putNext)
} else {
impl.mediumPlaylistData.get().start(next: subscriber.putNext)
}
}
}
public func partData(index: Int, quality: Int) -> Signal<Data?, NoError> {
return self.impl.signalWith { impl, subscriber in
impl.partData(index: index, quality: quality).start(next: subscriber.putNext)
}
}
}
@available(iOS 12.0, macOS 14.0, *)
public final class SharedHLSServer {
public static let shared: SharedHLSServer = {
return SharedHLSServer()
}()
private enum ResponseError {
case badRequest
case notFound
case internalServerError
var httpString: String {
switch self {
case .badRequest:
return "400 Bad Request"
case .notFound:
return "404 Not Found"
case .internalServerError:
return "500 Internal Server Error"
}
}
}
private final class ContextReference {
weak var streamingContext: ExternalMediaStreamingContext?
init(streamingContext: ExternalMediaStreamingContext) {
self.streamingContext = streamingContext
}
}
@available(iOS 12.0, macOS 14.0, *)
private final class Impl {
private let queue: Queue
private let port: NWEndpoint.Port
private var listener: NWListener?
private var contextReferences = Bag<ContextReference>()
init(queue: Queue, port: UInt16) {
self.queue = queue
self.port = NWEndpoint.Port(rawValue: port)!
self.start()
}
func start() {
let listener: NWListener
do {
listener = try NWListener(using: .tcp, on: self.port)
} catch {
Logger.shared.log("SharedHLSServer", "Failed to create listener: \(error)")
return
}
self.listener = listener
listener.newConnectionHandler = { [weak self] connection in
guard let self else {
return
}
self.handleConnection(connection: connection)
}
listener.stateUpdateHandler = { [weak self] state in
guard let self else {
return
}
switch state {
case .ready:
Logger.shared.log("SharedHLSServer", "Server is ready on port \(self.port)")
case let .failed(error):
Logger.shared.log("SharedHLSServer", "Server failed with error: \(error)")
self.listener?.cancel()
self.listener?.start(queue: self.queue.queue)
default:
break
}
}
listener.start(queue: self.queue.queue)
}
private func handleConnection(connection: NWConnection) {
connection.start(queue: self.queue.queue)
connection.receive(minimumIncompleteLength: 1, maximumLength: 1024, completion: { [weak self] data, _, isComplete, error in
guard let self else {
return
}
if let data, !data.isEmpty {
self.handleRequest(data: data, connection: connection)
} else if isComplete {
connection.cancel()
} else if let error = error {
Logger.shared.log("SharedHLSServer", "Error on connection: \(error)")
connection.cancel()
}
})
}
private func handleRequest(data: Data, connection: NWConnection) {
guard let requestString = String(data: data, encoding: .utf8) else {
connection.cancel()
return
}
if !requestString.hasPrefix("GET /") {
self.sendErrorAndClose(connection: connection)
return
}
guard let firstCrLf = requestString.range(of: "\r\n") else {
self.sendErrorAndClose(connection: connection)
return
}
let firstLine = String(requestString[requestString.index(requestString.startIndex, offsetBy: "GET /".count) ..< firstCrLf.lowerBound])
if !(firstLine.hasSuffix(" HTTP/1.0") || firstLine.hasSuffix(" HTTP/1.1")) {
self.sendErrorAndClose(connection: connection)
return
}
let requestPath = String(firstLine[firstLine.startIndex ..< firstLine.index(firstLine.endIndex, offsetBy: -" HTTP/1.1".count)])
guard let firstSlash = requestPath.range(of: "/") else {
self.sendErrorAndClose(connection: connection, error: .notFound)
return
}
guard let streamId = UUID(uuidString: String(requestPath[requestPath.startIndex ..< firstSlash.lowerBound])) else {
self.sendErrorAndClose(connection: connection)
return
}
guard let streamingContext = self.contextReferences.copyItems().first(where: { $0.streamingContext?.id == streamId })?.streamingContext else {
self.sendErrorAndClose(connection: connection)
return
}
let filePath = String(requestPath[firstSlash.upperBound...])
if filePath == "master.m3u8" {
let _ = (streamingContext.masterPlaylistData()
|> deliverOn(self.queue)
|> take(1)).start(next: { [weak self] result in
guard let self else {
return
}
self.sendResponseAndClose(connection: connection, data: result.data(using: .utf8)!)
})
} else if filePath.hasPrefix("hls_level_") && filePath.hasSuffix(".m3u8") {
guard let levelIndex = Int(String(filePath[filePath.index(filePath.startIndex, offsetBy: "hls_level_".count) ..< filePath.index(filePath.endIndex, offsetBy: -".m3u8".count)])) else {
self.sendErrorAndClose(connection: connection)
return
}
let _ = (streamingContext.playlistData(quality: levelIndex)
|> deliverOn(self.queue)
|> take(1)).start(next: { [weak self] result in
guard let self else {
return
}
self.sendResponseAndClose(connection: connection, data: result.data(using: .utf8)!)
})
} else if filePath.hasPrefix("hls_stream") && filePath.hasSuffix(".ts") {
let fileId = String(filePath[filePath.index(filePath.startIndex, offsetBy: "hls_stream".count) ..< filePath.index(filePath.endIndex, offsetBy: -".ts".count)])
guard let underscoreRange = fileId.range(of: "_") else {
self.sendErrorAndClose(connection: connection)
return
}
guard let levelIndex = Int(String(fileId[fileId.startIndex ..< underscoreRange.lowerBound])) else {
self.sendErrorAndClose(connection: connection)
return
}
guard let partIndex = Int(String(fileId[underscoreRange.upperBound...])) else {
self.sendErrorAndClose(connection: connection)
return
}
let _ = (streamingContext.partData(index: partIndex, quality: levelIndex)
|> deliverOn(self.queue)
|> take(1)).start(next: { [weak self] result in
guard let self else {
return
}
if let result {
let sourceTempFile = TempBox.shared.tempFile(fileName: "part.mp4")
let tempFile = TempBox.shared.tempFile(fileName: "part.ts")
defer {
TempBox.shared.dispose(sourceTempFile)
TempBox.shared.dispose(tempFile)
}
guard let _ = try? result.write(to: URL(fileURLWithPath: sourceTempFile.path)) else {
self.sendErrorAndClose(connection: connection, error: .internalServerError)
return
}
let sourcePath = sourceTempFile.path
FFMpegLiveMuxer.remux(sourcePath, to: tempFile.path, offsetSeconds: Double(partIndex))
if let data = try? Data(contentsOf: URL(fileURLWithPath: tempFile.path)) {
self.sendResponseAndClose(connection: connection, data: data)
} else {
self.sendErrorAndClose(connection: connection, error: .internalServerError)
}
} else {
self.sendErrorAndClose(connection: connection, error: .notFound)
}
})
} else {
self.sendErrorAndClose(connection: connection, error: .notFound)
}
}
private func sendErrorAndClose(connection: NWConnection, error: ResponseError = .badRequest) {
let errorResponse = "HTTP/1.1 \(error.httpString)\r\nContent-Type: text/html\r\nConnection: close\r\n\r\n"
connection.send(content: errorResponse.data(using: .utf8), completion: .contentProcessed { error in
if let error {
Logger.shared.log("SharedHLSServer", "Failed to send response: \(error)")
}
connection.cancel()
})
}
private func sendResponseAndClose(connection: NWConnection, data: Data) {
let responseHeaders = "HTTP/1.1 200 OK\r\nContent-Type: application/octet-stream\r\nConnection: close\r\n\r\n"
var responseData = Data()
responseData.append(responseHeaders.data(using: .utf8)!)
responseData.append(data)
connection.send(content: responseData, completion: .contentProcessed { error in
if let error {
Logger.shared.log("SharedHLSServer", "Failed to send response: \(error)")
}
connection.cancel()
})
}
func registerPlayer(streamingContext: ExternalMediaStreamingContext) -> Disposable {
let queue = self.queue
let index = self.contextReferences.add(ContextReference(streamingContext: streamingContext))
return ActionDisposable { [weak self] in
queue.async {
guard let self else {
return
}
self.contextReferences.remove(index)
}
}
}
}
private static let queue = Queue(name: "SharedHLSServer")
public let port: UInt16 = 8016
private let impl: QueueLocalObject<Impl>
private init() {
let queue = SharedHLSServer.queue
let port = self.port
self.impl = QueueLocalObject(queue: queue, generate: {
return Impl(queue: queue, port: port)
})
}
fileprivate func registerPlayer(streamingContext: ExternalMediaStreamingContext) -> Disposable {
let disposable = MetaDisposable()
self.impl.with { impl in
disposable.set(impl.registerPlayer(streamingContext: streamingContext))
}
return disposable
}
}

View File

@ -134,6 +134,9 @@ public func parseInternalUrl(sharedContext: SharedAccountContext, query: String)
if query.hasPrefix("ipfs/") {
return .externalUrl(url: "ipfs://" + String(query[query.index(query.startIndex, offsetBy: "ipfs/".count)...]))
}
if query.hasPrefix("ton/") {
return .externalUrl(url: "ton://" + String(query[query.index(query.startIndex, offsetBy: "ton/".count)...]))
}
}
if pathComponents[0].hasPrefix("+") || pathComponents[0].hasPrefix("%20") {

View File

@ -47,13 +47,13 @@ CONFIGURE_FLAGS="--enable-cross-compile --disable-programs \
--enable-libopus \
--enable-libvpx \
--enable-audiotoolbox \
--enable-bsf=aac_adtstoasc,vp9_superframe \
--enable-bsf=aac_adtstoasc,vp9_superframe,h264_mp4toannexb \
--enable-decoder=h264,libvpx_vp9,hevc,libopus,mp3,aac,flac,alac_at,pcm_s16le,pcm_s24le,gsm_ms_at \
--enable-encoder=libvpx_vp9 \
--enable-demuxer=aac,mov,m4v,mp3,ogg,libopus,flac,wav,aiff,matroska \
--enable-encoder=libvpx_vp9,aac_at \
--enable-demuxer=aac,mov,m4v,mp3,ogg,libopus,flac,wav,aiff,matroska,mpegts \
--enable-parser=aac,h264,mp3,libopus \
--enable-protocol=file \
--enable-muxer=mp4,matroska \
--enable-muxer=mp4,matroska,mpegts \
"