Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios

This commit is contained in:
Ilya Laktyushin 2023-09-30 22:17:29 +04:00
commit d1f2b29370
30 changed files with 307 additions and 114 deletions

View File

@ -719,14 +719,19 @@ private final class NotificationServiceHandler {
let semaphore = DispatchSemaphore(value: 0)
var loggingSettings = LoggingSettings.defaultSettings
let _ = (self.accountManager.transaction { transaction -> LoggingSettings in
if buildConfig.isInternalBuild {
loggingSettings = LoggingSettings(logToFile: true, logToConsole: false, redactSensitiveData: true)
}
let _ = (self.accountManager.transaction { transaction -> LoggingSettings? in
if let value = transaction.getSharedData(SharedDataKeys.loggingSettings)?.get(LoggingSettings.self) {
return value
} else {
return LoggingSettings.defaultSettings
return nil
}
}).start(next: { value in
loggingSettings = value
if let value {
loggingSettings = value
}
semaphore.signal()
})
semaphore.wait()

View File

@ -70,6 +70,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
case redactSensitiveData(PresentationTheme, Bool)
case keepChatNavigationStack(PresentationTheme, Bool)
case skipReadHistory(PresentationTheme, Bool)
case unidirectionalSwipeToReply(Bool)
case crashOnSlowQueries(PresentationTheme, Bool)
case crashOnMemoryPressure(PresentationTheme, Bool)
case clearTips(PresentationTheme)
@ -118,7 +119,7 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return DebugControllerSection.logs.rawValue
case .logToFile, .logToConsole, .redactSensitiveData:
return DebugControllerSection.logging.rawValue
case .keepChatNavigationStack, .skipReadHistory, .crashOnSlowQueries, .crashOnMemoryPressure:
case .keepChatNavigationStack, .skipReadHistory, .unidirectionalSwipeToReply, .crashOnSlowQueries, .crashOnMemoryPressure:
return DebugControllerSection.experiments.rawValue
case .clearTips, .resetNotifications, .crash, .resetData, .resetDatabase, .resetDatabaseAndCache, .resetHoles, .reindexUnread, .resetCacheIndex, .reindexCache, .resetBiometricsData, .resetWebViewCache, .optimizeDatabase, .photoPreview, .knockoutWallpaper, .storiesExperiment, .storiesJpegExperiment, .playlistPlayback, .enableQuickReactionSwitch, .voiceConference, .experimentalCompatibility, .enableDebugDataDisplay, .acceleratedStickers, .inlineForums, .localTranscription, .enableReactionOverrides, .restorePurchases:
return DebugControllerSection.experiments.rawValue
@ -165,46 +166,48 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return 14
case .skipReadHistory:
return 15
case .crashOnSlowQueries:
case .unidirectionalSwipeToReply:
return 16
case .crashOnMemoryPressure:
case .crashOnSlowQueries:
return 17
case .clearTips:
case .crashOnMemoryPressure:
return 18
case .resetNotifications:
case .clearTips:
return 19
case .crash:
case .resetNotifications:
return 20
case .resetData:
case .crash:
return 21
case .resetDatabase:
case .resetData:
return 22
case .resetDatabaseAndCache:
case .resetDatabase:
return 23
case .resetHoles:
case .resetDatabaseAndCache:
return 24
case .reindexUnread:
case .resetHoles:
return 25
case .resetCacheIndex:
case .reindexUnread:
return 26
case .reindexCache:
case .resetCacheIndex:
return 27
case .resetBiometricsData:
case .reindexCache:
return 28
case .resetWebViewCache:
case .resetBiometricsData:
return 29
case .optimizeDatabase:
case .resetWebViewCache:
return 30
case .photoPreview:
case .optimizeDatabase:
return 31
case .knockoutWallpaper:
case .photoPreview:
return 32
case .experimentalCompatibility:
case .knockoutWallpaper:
return 33
case .enableDebugDataDisplay:
case .experimentalCompatibility:
return 34
case .acceleratedStickers:
case .enableDebugDataDisplay:
return 35
case .acceleratedStickers:
return 36
case .inlineForums:
return 37
case .localTranscription:
@ -928,6 +931,14 @@ private enum DebugControllerEntry: ItemListNodeEntry {
return settings
}).start()
})
case let .unidirectionalSwipeToReply(value):
return ItemListSwitchItem(presentationData: presentationData, title: "Legacy swipe to reply", value: value, sectionId: self.section, style: .blocks, updated: { value in
let _ = updateExperimentalUISettingsInteractively(accountManager: arguments.sharedContext.accountManager, { settings in
var settings = settings
settings.unidirectionalSwipeToReply = value
return settings
}).start()
})
case let .crashOnSlowQueries(_, value):
return ItemListSwitchItem(presentationData: presentationData, title: "Crash when slow", value: value, sectionId: self.section, style: .blocks, updated: { value in
let _ = updateExperimentalUISettingsInteractively(accountManager: arguments.sharedContext.accountManager, { settings in
@ -1375,6 +1386,7 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present
#if DEBUG
entries.append(.skipReadHistory(presentationData.theme, experimentalSettings.skipReadHistory))
#endif
entries.append(.unidirectionalSwipeToReply(experimentalSettings.unidirectionalSwipeToReply))
}
entries.append(.crashOnSlowQueries(presentationData.theme, experimentalSettings.crashOnLongQueries))
entries.append(.crashOnMemoryPressure(presentationData.theme, experimentalSettings.crashOnMemoryPressure))

View File

@ -10,6 +10,9 @@ objc_library(
hdrs = glob([
"Public/**/*.h",
]),
copts = [
"-Werror",
],
includes = [
"Public",
],

View File

@ -4,6 +4,8 @@ NS_ASSUME_NONNULL_BEGIN
#define FFMPEG_AVSEEK_SIZE 0x10000
extern int FFMPEG_CONSTANT_AVERROR_EOF;
@interface FFMpegAVIOContext : NSObject
- (instancetype _Nullable)initWithBufferSize:(int32_t)bufferSize opaqueContext:(void * const)opaqueContext readPacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))readPacket writePacket:(int (* _Nullable)(void * _Nullable opaque, uint8_t * _Nullable buf, int buf_size))writePacket seek:(int64_t (*)(void * _Nullable opaque, int64_t offset, int whence))seek isSeekable:(bool)isSeekable;

View File

@ -3,14 +3,14 @@
#import "libavcodec/avcodec.h"
@interface FFMpegAVCodec () {
AVCodec *_impl;
AVCodec const *_impl;
}
@end
@implementation FFMpegAVCodec
- (instancetype)initWithImpl:(AVCodec *)impl {
- (instancetype)initWithImpl:(AVCodec const *)impl {
self = [super init];
if (self != nil) {
_impl = impl;
@ -19,7 +19,7 @@
}
+ (FFMpegAVCodec * _Nullable)findForId:(int)codecId {
AVCodec *codec = avcodec_find_decoder(codecId);
AVCodec const *codec = avcodec_find_decoder(codecId);
if (codec) {
return [[FFMpegAVCodec alloc] initWithImpl:codec];
} else {
@ -28,7 +28,7 @@
}
- (void *)impl {
return _impl;
return (void *)_impl;
}
@end

View File

@ -35,7 +35,11 @@
}
- (int32_t)channels {
#if LIBAVFORMAT_VERSION_MAJOR >= 59
return (int32_t)_impl->ch_layout.nb_channels;
#else
return (int32_t)_impl->channels;
#endif
}
- (int32_t)sampleRate {

View File

@ -4,6 +4,7 @@
#import <FFMpegBinding/FFMpegPacket.h>
#import <FFMpegBinding/FFMpegAVCodecContext.h>
#import "libavcodec/avcodec.h"
#import "libavformat/avformat.h"
int FFMpegCodecIdH264 = AV_CODEC_ID_H264;
@ -115,9 +116,9 @@ int FFMpegCodecIdVP9 = AV_CODEC_ID_VP9;
if (stream->time_base.den != 0 && stream->time_base.num != 0) {
timebase = CMTimeMake((int64_t)stream->time_base.num, stream->time_base.den);
} else if (stream->codec->time_base.den != 0 && stream->codec->time_base.num != 0) {
}/* else if (stream->codec->time_base.den != 0 && stream->codec->time_base.num != 0) {
timebase = CMTimeMake((int64_t)stream->codec->time_base.num, stream->codec->time_base.den);
} else {
}*/ else {
timebase = defaultTimeBase;
}

View File

@ -20,7 +20,7 @@
- (void)dealloc {
if (_impl) {
av_frame_unref(_impl);
av_frame_free(&_impl);
}
}
@ -45,7 +45,11 @@
}
- (int64_t)duration {
#if LIBAVFORMAT_VERSION_MAJOR >= 59
return _impl->duration;
#else
return _impl->pkt_duration;
#endif
}
- (FFMpegAVFrameColorRange)colorRange {

View File

@ -2,6 +2,8 @@
#import "libavformat/avformat.h"
int FFMPEG_CONSTANT_AVERROR_EOF = AVERROR_EOF;
@interface FFMpegAVIOContext () {
AVIOContext *_impl;
}

View File

@ -10,7 +10,6 @@
#else
av_log_set_level(AV_LOG_QUIET);
#endif
av_register_all();
}
@end

View File

@ -2,10 +2,11 @@
#import <FFMpegBinding/FFMpegAVCodecContext.h>
#import "libavcodec/avcodec.h"
#import "libavformat/avformat.h"
@interface FFMpegPacket () {
AVPacket _impl;
AVPacket *_impl;
}
@end
@ -15,49 +16,49 @@
- (instancetype)init {
self = [super init];
if (self != nil) {
av_init_packet(&_impl);
_impl = av_packet_alloc();
}
return self;
}
- (void)dealloc {
av_packet_unref(&_impl);
av_packet_free(&_impl);
}
- (void *)impl {
return &_impl;
return _impl;
}
- (int64_t)pts {
if (_impl.pts == 0x8000000000000000) {
return _impl.dts;
if (_impl->pts == 0x8000000000000000) {
return _impl->dts;
} else {
return _impl.pts;
return _impl->pts;
}
}
- (int64_t)dts {
return _impl.dts;
return _impl->dts;
}
- (int64_t)duration {
return _impl.duration;
return _impl->duration;
}
- (int32_t)streamIndex {
return (int32_t)_impl.stream_index;
return (int32_t)_impl->stream_index;
}
- (int32_t)size {
return (int32_t)_impl.size;
return (int32_t)_impl->size;
}
- (uint8_t *)data {
return _impl.data;
return _impl->data;
}
- (int32_t)sendToDecoder:(FFMpegAVCodecContext *)codecContext {
return avcodec_send_packet((AVCodecContext *)[codecContext impl], &_impl);
return avcodec_send_packet((AVCodecContext *)[codecContext impl], _impl);
}
@end

View File

@ -34,18 +34,18 @@
@end
static int readPacketImpl(void * _Nullable opaque, uint8_t * _Nullable buffer, int length) {
/*static int readPacketImpl(void * _Nullable opaque, uint8_t * _Nullable buffer, int length) {
FFMpegRemuxerContext *context = (__bridge FFMpegRemuxerContext *)opaque;
context->_offset += length;
printf("read %lld bytes (offset is now %lld)\n", length, context->_offset);
return read(context->_fd, buffer, length);
printf("read %lld bytes (offset is now %lld)\n", (int64_t)length, context->_offset);
return (int)read(context->_fd, buffer, length);
}
static int writePacketImpl(void * _Nullable opaque, uint8_t * _Nullable buffer, int length) {
FFMpegRemuxerContext *context = (__bridge FFMpegRemuxerContext *)opaque;
context->_offset += length;
printf("write %lld bytes (offset is now %lld)\n", length, context->_offset);
return write(context->_fd, buffer, length);
printf("write %lld bytes (offset is now %lld)\n", (int64_t)length, context->_offset);
return (int)write(context->_fd, buffer, length);
}
static int64_t seekImpl(void * _Nullable opaque, int64_t offset, int whence) {
@ -57,7 +57,7 @@ static int64_t seekImpl(void * _Nullable opaque, int64_t offset, int whence) {
context->_offset = offset;
return lseek(context->_fd, offset, SEEK_SET);
}
}
}*/
@implementation FFMpegRemuxer
@ -99,7 +99,7 @@ static int64_t seekImpl(void * _Nullable opaque, int64_t offset, int whence) {
}
number_of_streams = input_format_context->nb_streams;
streams_list = av_mallocz_array(number_of_streams, sizeof(*streams_list));
streams_list = av_malloc_array(number_of_streams, sizeof(*streams_list));
if (!streams_list) {
ret = AVERROR(ENOMEM);

View File

@ -52,7 +52,9 @@
swr_free(&_context);
_context = NULL;
}
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
_context = swr_alloc_set_opts(NULL,
av_get_default_channel_layout((int)_destinationChannelCount),
(enum AVSampleFormat)_destinationSampleFormat,
@ -62,6 +64,7 @@
(int)_sourceSampleRate,
0,
NULL);
#pragma clang diagnostic pop
_currentSourceChannelCount = channelCount;
_ratio = MAX(1, _destinationSampleRate / MAX(_sourceSampleRate, 1)) * MAX(1, _destinationChannelCount / channelCount) * 2;
if (_context) {
@ -72,7 +75,11 @@
- (NSData * _Nullable)resample:(FFMpegAVFrame *)frame {
AVFrame *frameImpl = (AVFrame *)[frame impl];
#if LIBAVFORMAT_VERSION_MAJOR >= 59
int numChannels = frameImpl->ch_layout.nb_channels;
#else
int numChannels = frameImpl->channels;
#endif
if (numChannels != _currentSourceChannelCount) {
[self resetContextForChannelCount:numChannels];
}

View File

@ -65,7 +65,7 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
while true {
let result = self.codecContext.receive(into: self.audioFrame)
if case .success = result {
if let convertedFrame = convertAudioFrame(self.audioFrame, pts: frame.pts, duration: frame.duration) {
if let convertedFrame = convertAudioFrame(self.audioFrame, pts: frame.pts) {
self.delayedFrames.append(convertedFrame)
}
} else {
@ -121,7 +121,7 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
}
}
private func convertAudioFrame(_ frame: FFMpegAVFrame, pts: CMTime, duration: CMTime) -> MediaTrackFrame? {
private func convertAudioFrame(_ frame: FFMpegAVFrame, pts: CMTime) -> MediaTrackFrame? {
guard let data = self.swrContext.resample(frame) else {
return nil
}
@ -135,18 +135,12 @@ final class FFMpegAudioFrameDecoder: MediaTrackFrameDecoder {
return nil
}
//var timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: pts, decodeTimeStamp: pts)
var sampleBuffer: CMSampleBuffer?
//var sampleSize = data.count
guard CMAudioSampleBufferCreateReadyWithPacketDescriptions(allocator: nil, dataBuffer: blockBuffer!, formatDescription: self.formatDescription, sampleCount: Int(data.count / 2), presentationTimeStamp: pts, packetDescriptions: nil, sampleBufferOut: &sampleBuffer) == noErr else {
return nil
}
/*guard CMSampleBufferCreate(allocator: nil, dataBuffer: blockBuffer, dataReady: true, makeDataReadyCallback: nil, refcon: nil, formatDescription: self.formatDescription, sampleCount: Int(frame.duration), sampleTimingEntryCount: 1, sampleTimingArray: &timingInfo, sampleSizeEntryCount: 1, sampleSizeArray: &sampleSize, sampleBufferOut: &sampleBuffer) == noErr else {
return nil
}*/
let resetDecoder = self.resetDecoderOnNextFrame
self.resetDecoderOnNextFrame = false

View File

@ -85,7 +85,7 @@ private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: Unsa
}
if totalCount > maximumFetchSize {
context.readingError = true
return 0
return FFMPEG_CONSTANT_AVERROR_EOF
}
}
@ -118,7 +118,7 @@ private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: Unsa
disposable.dispose()
if !completedRequest {
context.readingError = true
return 0
return FFMPEG_CONSTANT_AVERROR_EOF
}
}
}
@ -176,7 +176,7 @@ private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: Unsa
disposable.dispose()
if !completedRequest {
context.readingError = true
return 0
return FFMPEG_CONSTANT_AVERROR_EOF
}
}
}
@ -192,7 +192,7 @@ private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: Unsa
if context.closed {
context.readingError = true
return 0
return FFMPEG_CONSTANT_AVERROR_EOF
}
return fetchedCount
}
@ -725,7 +725,7 @@ private func videoFrameFromPacket(_ packet: FFMpegPacket, videoStream: StreamCon
if frameDuration != 0 {
duration = CMTimeMake(value: frameDuration * videoStream.timebase.value, timescale: videoStream.timebase.timescale)
} else {
duration = videoStream.fps
duration = CMTimeMake(value: Int64(videoStream.fps.timescale), timescale: Int32(videoStream.fps.value))
}
return MediaTrackDecodableFrame(type: .video, packet: packet, pts: pts, dts: dts, duration: duration)

View File

@ -11,9 +11,13 @@ import FFMpegBinding
private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: UnsafeMutablePointer<UInt8>?, bufferSize: Int32) -> Int32 {
let context = Unmanaged<SoftwareVideoSource>.fromOpaque(userData!).takeUnretainedValue()
if let fd = context.fd {
return Int32(read(fd, buffer, Int(bufferSize)))
let result = read(fd, buffer, Int(bufferSize))
if result == 0 {
return FFMPEG_CONSTANT_AVERROR_EOF
}
return Int32(result)
}
return 0
return FFMPEG_CONSTANT_AVERROR_EOF
}
private func seekCallback(userData: UnsafeMutableRawPointer?, offset: Int64, whence: Int32) -> Int64 {

View File

@ -71,7 +71,7 @@ private func readPacketCallback(userData: UnsafeMutableRawPointer?, buffer: Unsa
context.readingOffset += Int64(fetchedCount)
return fetchedCount
} else {
return 0
return FFMPEG_CONSTANT_AVERROR_EOF
}
}

View File

@ -98,13 +98,13 @@ static void MTNetworkAvailabilityContextRelease(const void *info)
MTTimer *timer = _timer;
_timer = nil;
[[MTNetworkAvailability networkAvailabilityQueue] dispatchOnQueue:^
{
[timer invalidate];
SCNetworkReachabilitySetCallback(reachability, NULL, NULL);
SCNetworkReachabilitySetDispatchQueue(reachability, NULL);
}];
[[MTNetworkAvailability networkAvailabilityQueue] dispatchOnQueue:^{
[timer invalidate];
SCNetworkReachabilitySetCallback(reachability, NULL, NULL);
SCNetworkReachabilitySetDispatchQueue(reachability, NULL);
CFRelease(reachability);
}];
}
+ (MTQueue *)networkAvailabilityQueue

View File

@ -270,7 +270,7 @@ public final class MediaStreamComponent: CombinedComponent {
let moreButtonTag = local.moreButtonTag
let moreAnimationTag = local.moreAnimationTag
func makeBody() -> CGSize {
let makeBody: () -> CGSize = { [weak local] in
let canEnforceOrientation = UIDevice.current.model != "iPad"
var forceFullScreenInLandscape: Bool { canEnforceOrientation && true }
let environment = context.environment[ViewControllerComponentContainer.Environment.self].value
@ -289,13 +289,20 @@ public final class MediaStreamComponent: CombinedComponent {
let state = context.state
let controller = environment.controller
context.state.deactivatePictureInPictureIfVisible.connect {
context.state.deactivatePictureInPictureIfVisible.connect { [weak state] in
guard let controller = controller(), controller.view.window != nil else {
return
}
guard let state else {
return
}
state.updated(transition: .easeInOut(duration: 3))
deactivatePictureInPicture.invoke(Void())
guard let local else {
return
}
local.deactivatePictureInPicture.invoke(Void())
}
let isFullscreen: Bool
let isLandscape = context.availableSize.width > context.availableSize.height
@ -347,8 +354,7 @@ public final class MediaStreamComponent: CombinedComponent {
availableSize: CGSize(width: context.availableSize.width, height: dismissTapAreaHeight),
transition: context.transition
)
// (controller() as? MediaStreamComponentController)?.prefersOnScreenNavigationHidden = isFullscreen
// (controller() as? MediaStreamComponentController)?.window?.invalidatePrefersOnScreenNavigationHidden()
let video = video.update(
component: MediaStreamVideoComponent(
call: context.component.call,
@ -832,6 +838,8 @@ public final class MediaStreamComponent: CombinedComponent {
UIColor(red: 0.314, green: 0.161, blue: 0.197, alpha: 1).cgColor
],
image: generateImage(CGSize(width: 44.0 * imageRenderScale, height: 44 * imageRenderScale), opaque: false, rotatedContext: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
context.translateBy(x: size.width / 2, y: size.height / 2)
context.scaleBy(x: 0.4, y: 0.4)
context.translateBy(x: -size.width / 2, y: -size.height / 2)

View File

@ -753,12 +753,18 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
public func joinGroupCall(context: AccountContext, peerId: PeerId, invite: String?, requestJoinAsPeerId: ((@escaping (PeerId?) -> Void) -> Void)?, initialCall: EngineGroupCallDescription, endCurrentIfAny: Bool) -> JoinGroupCallManagerResult {
let begin: () -> Void = { [weak self] in
if let requestJoinAsPeerId = requestJoinAsPeerId {
if let requestJoinAsPeerId = requestJoinAsPeerId, (initialCall.isStream == nil || initialCall.isStream == false) {
requestJoinAsPeerId({ joinAsPeerId in
let _ = self?.startGroupCall(accountContext: context, peerId: peerId, invite: invite, joinAsPeerId: joinAsPeerId, initialCall: initialCall).start()
guard let self else {
return
}
self.startCallDisposable.set(self.startGroupCall(accountContext: context, peerId: peerId, invite: invite, joinAsPeerId: joinAsPeerId, initialCall: initialCall).startStrict())
})
} else {
let _ = self?.startGroupCall(accountContext: context, peerId: peerId, invite: invite, joinAsPeerId: nil, initialCall: initialCall).start()
guard let self else {
return
}
self.startCallDisposable.set(self.startGroupCall(accountContext: context, peerId: peerId, invite: invite, joinAsPeerId: nil, initialCall: initialCall).startStrict())
}
}
@ -805,6 +811,8 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
let accessEnabledSignal: Signal<Bool, NoError> = Signal { subscriber in
if let isStream = initialCall.isStream, isStream {
subscriber.putNext(true)
subscriber.putCompletion()
return EmptyDisposable
}

View File

@ -1165,37 +1165,37 @@ public class Account {
let extractedExpr1: [Signal<AccountRunningImportantTasks, NoError>] = [
managedSynchronizeChatInputStateOperations(postbox: self.postbox, network: self.network) |> map { inputStates in
if inputStates {
print("inputStates: true")
//print("inputStates: true")
}
return inputStates ? AccountRunningImportantTasks.other : []
},
self.pendingMessageManager.hasPendingMessages |> map { hasPendingMessages in
if !hasPendingMessages.isEmpty {
print("hasPendingMessages: true")
//print("hasPendingMessages: true")
}
return !hasPendingMessages.isEmpty ? AccountRunningImportantTasks.pendingMessages : []
},
(self.pendingStoryManager?.hasPending ?? .single(false)) |> map { hasPending in
if hasPending {
print("hasPending: true")
//print("hasPending: true")
}
return hasPending ? AccountRunningImportantTasks.pendingMessages : []
},
self.pendingUpdateMessageManager.updatingMessageMedia |> map { updatingMessageMedia in
if !updatingMessageMedia.isEmpty {
print("updatingMessageMedia: true")
//print("updatingMessageMedia: true")
}
return !updatingMessageMedia.isEmpty ? AccountRunningImportantTasks.pendingMessages : []
},
self.pendingPeerMediaUploadManager.uploadingPeerMedia |> map { uploadingPeerMedia in
if !uploadingPeerMedia.isEmpty {
print("uploadingPeerMedia: true")
//print("uploadingPeerMedia: true")
}
return !uploadingPeerMedia.isEmpty ? AccountRunningImportantTasks.pendingMessages : []
},
self.accountPresenceManager.isPerformingUpdate() |> map { presenceUpdate in
if presenceUpdate {
print("accountPresenceManager isPerformingUpdate: true")
//print("accountPresenceManager isPerformingUpdate: true")
//return []
}
return presenceUpdate ? AccountRunningImportantTasks.other : []

View File

@ -1757,7 +1757,7 @@ private func finalStateWithUpdatesAndServerTime(accountPeerId: PeerId, postbox:
|> mapToSignal { finalState in
return resolveAssociatedMessages(postbox: postbox, network: network, state: finalState)
|> mapToSignal { resultingState -> Signal<AccountFinalState, NoError> in
return resolveAssociatedStories(postbox: postbox, network: network, accountPeerId: accountPeerId, state: finalState)
return resolveAssociatedStories(postbox: postbox, network: network, accountPeerId: accountPeerId, state: resultingState)
|> mapToSignal { resultingState -> Signal<AccountFinalState, NoError> in
return resolveMissingPeerChatInfos(network: network, state: resultingState)
|> map { resultingState, resolveError -> AccountFinalState in

View File

@ -311,11 +311,12 @@ public final class AccountStateManager {
|> distinctUntilChanged
|> mapToSignal { value -> Signal<Never, NoError> in
if isMaxMessageId {
return network.request(Api.functions.messages.receivedMessages(maxId: value))
return .complete()
/*return network.request(Api.functions.messages.receivedMessages(maxId: value))
|> ignoreValues
|> `catch` { _ -> Signal<Never, NoError> in
return .complete()
}
}*/
} else {
if value == 0 {
return .complete()

View File

@ -284,6 +284,7 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
private var appliedForwardInfo: (Peer?, String?)?
private var replyRecognizer: ChatSwipeToReplyRecognizer?
private var currentSwipeAction: ChatControllerInteractionSwipeAction?
private var wasPending: Bool = false
@ -450,6 +451,10 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
self.view.addGestureRecognizer(recognizer)
let replyRecognizer = ChatSwipeToReplyRecognizer(target: self, action: #selector(self.swipeToReplyGesture(_:)))
if let item = self.item {
replyRecognizer.allowBothDirections = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
self.view.disablesInteractiveTransitionGestureRecognizer = true
}
replyRecognizer.shouldBegin = { [weak self] in
if let strongSelf = self, let item = strongSelf.item {
if strongSelf.selectionNode != nil {
@ -470,6 +475,7 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
}
return false
}
self.replyRecognizer = replyRecognizer
self.view.addGestureRecognizer(replyRecognizer)
}
@ -510,6 +516,9 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
private var setupTimestamp: Double?
private func setupNode(item: ChatMessageItem) {
self.replyRecognizer?.allowBothDirections = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
self.view.disablesInteractiveTransitionGestureRecognizer = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
guard self.animationNode == nil else {
return
}
@ -2419,11 +2428,14 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
private var playedSwipeToReplyHaptic = false
@objc func swipeToReplyGesture(_ recognizer: ChatSwipeToReplyRecognizer) {
var offset: CGFloat = 0.0
var leftOffset: CGFloat = 0.0
var swipeOffset: CGFloat = 45.0
if let item = self.item, item.content.effectivelyIncoming(item.context.account.peerId, associatedData: item.associatedData) {
offset = -24.0
leftOffset = -10.0
} else {
offset = 10.0
leftOffset = -10.0
swipeOffset = 60.0
}
@ -2451,7 +2463,11 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
if translation.x < 0.0 {
translation.x = max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset)))
} else {
translation.x = 0.0
if recognizer.allowBothDirections {
translation.x = -max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset)))
} else {
translation.x = 0.0
}
}
if let item = self.item, self.swipeToReplyNode == nil {
@ -2469,7 +2485,13 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
if let swipeToReplyNode = self.swipeToReplyNode {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
if translation.x < 0.0 {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
} else {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: leftOffset - 33.0 * 0.5, y: self.contentSize.height / 2.0)
}
if let (rect, containerSize) = self.absoluteRect {
let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size)
@ -2488,7 +2510,15 @@ class ChatMessageAnimatedStickerItemNode: ChatMessageItemView {
self.swipeToReplyFeedback = nil
let translation = recognizer.translation(in: self.view)
if case .ended = recognizer.state, translation.x < -swipeOffset {
let gestureRecognized: Bool
if recognizer.allowBothDirections {
gestureRecognized = abs(translation.x) > swipeOffset
} else {
gestureRecognized = translation.x < -swipeOffset
}
if case .ended = recognizer.state, gestureRecognized {
if let item = self.item {
if let currentSwipeAction = currentSwipeAction {
switch currentSwipeAction {

View File

@ -588,6 +588,7 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
private var tapRecognizer: TapLongTapOrDoubleTapGestureRecognizer?
private var replyRecognizer: ChatSwipeToReplyRecognizer?
private var currentSwipeAction: ChatControllerInteractionSwipeAction?
//private let debugNode: ASDisplayNode
@ -1080,6 +1081,10 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
self.view.isExclusiveTouch = true
let replyRecognizer = ChatSwipeToReplyRecognizer(target: self, action: #selector(self.swipeToReplyGesture(_:)))
if let item = self.item {
replyRecognizer.allowBothDirections = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
self.view.disablesInteractiveTransitionGestureRecognizer = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
}
replyRecognizer.shouldBegin = { [weak self] in
if let strongSelf = self, let item = strongSelf.item {
if strongSelf.selectionNode != nil {
@ -1111,6 +1116,7 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
}
return false
}
self.replyRecognizer = replyRecognizer
self.view.addGestureRecognizer(replyRecognizer)
}
@ -2671,6 +2677,9 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
strongSelf.updateAccessibilityData(accessibilityData)
strongSelf.disablesComments = disablesComments
strongSelf.replyRecognizer?.allowBothDirections = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
strongSelf.view.disablesInteractiveTransitionGestureRecognizer = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
var animation = animation
if strongSelf.mainContextSourceNode.isExtractedToContextPreview {
animation = .System(duration: 0.25, transition: ControlledTransition(duration: 0.25, curve: .easeInOut, interactive: false))
@ -4389,11 +4398,14 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
private var playedSwipeToReplyHaptic = false
@objc func swipeToReplyGesture(_ recognizer: ChatSwipeToReplyRecognizer) {
var offset: CGFloat = 0.0
var leftOffset: CGFloat = 0.0
var swipeOffset: CGFloat = 45.0
if let item = self.item, item.content.effectivelyIncoming(item.context.account.peerId, associatedData: item.associatedData) {
offset = -24.0
leftOffset = -10.0
} else {
offset = 10.0
leftOffset = -10.0
swipeOffset = 60.0
}
@ -4421,7 +4433,11 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
if translation.x < 0.0 {
translation.x = max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset)))
} else {
translation.x = 0.0
if recognizer.allowBothDirections {
translation.x = -max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset)))
} else {
translation.x = 0.0
}
}
if let item = self.item, self.swipeToReplyNode == nil {
@ -4441,8 +4457,13 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
self.updateAttachedAvatarNodeOffset(offset: translation.x, transition: .immediate)
if let swipeToReplyNode = self.swipeToReplyNode {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
if translation.x < 0.0 {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
} else {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: leftOffset - 33.0 * 0.5, y: self.contentSize.height / 2.0)
}
if let (rect, containerSize) = self.absoluteRect {
let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size)
@ -4461,7 +4482,13 @@ class ChatMessageBubbleItemNode: ChatMessageItemView, ChatMessagePreviewItemNode
self.swipeToReplyFeedback = nil
let translation = recognizer.translation(in: self.view)
if case .ended = recognizer.state, translation.x < -swipeOffset {
let gestureRecognized: Bool
if recognizer.allowBothDirections {
gestureRecognized = abs(translation.x) > swipeOffset
} else {
gestureRecognized = translation.x < -swipeOffset
}
if case .ended = recognizer.state, gestureRecognized {
if let item = self.item {
if let currentSwipeAction = currentSwipeAction {
switch currentSwipeAction {

View File

@ -57,7 +57,8 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
var currentSwipeToReplyTranslation: CGFloat = 0.0
var recognizer: TapLongTapOrDoubleTapGestureRecognizer?
private var replyRecognizer: ChatSwipeToReplyRecognizer?
var currentSwipeAction: ChatControllerInteractionSwipeAction?
override var visibility: ListViewItemNodeVisibility {
@ -220,7 +221,14 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
}
return false
}
if let item = self.item {
replyRecognizer.allowBothDirections = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
self.view.disablesInteractiveTransitionGestureRecognizer = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
}
self.replyRecognizer = replyRecognizer
self.view.addGestureRecognizer(replyRecognizer)
self.view.disablesInteractiveTransitionGestureRecognizer = true
}
override func updateAccessibilityData(_ accessibilityData: ChatMessageAccessibilityData) {
@ -617,6 +625,9 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
strongSelf.appliedCurrentlyPlaying = isPlaying
strongSelf.appliedAutomaticDownload = automaticDownload
strongSelf.replyRecognizer?.allowBothDirections = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
strongSelf.view.disablesInteractiveTransitionGestureRecognizer = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
strongSelf.updateAccessibilityData(accessibilityData)
let videoLayoutData: ChatMessageInstantVideoItemLayoutData
@ -1006,11 +1017,14 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
private var playedSwipeToReplyHaptic = false
@objc func swipeToReplyGesture(_ recognizer: ChatSwipeToReplyRecognizer) {
var offset: CGFloat = 0.0
var leftOffset: CGFloat = 0.0
var swipeOffset: CGFloat = 45.0
if let item = self.item, item.content.effectivelyIncoming(item.context.account.peerId, associatedData: item.associatedData) {
offset = -24.0
leftOffset = -10.0
} else {
offset = 10.0
leftOffset = -10.0
swipeOffset = 60.0
}
@ -1024,8 +1038,26 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
}
self.item?.controllerInteraction.cancelInteractiveKeyboardGestures()
case .changed:
func rubberBandingOffset(offset: CGFloat, bandingStart: CGFloat) -> CGFloat {
let bandedOffset = offset - bandingStart
if offset < bandingStart {
return offset
}
let range: CGFloat = 100.0
let coefficient: CGFloat = 0.4
return bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range
}
var translation = recognizer.translation(in: self.view)
translation.x = max(-80.0, min(0.0, translation.x))
if translation.x < 0.0 {
translation.x = max(-80.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset)))
} else {
if recognizer.allowBothDirections {
translation.x = -max(-80.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset)))
} else {
translation.x = 0.0
}
}
if let item = self.item, self.swipeToReplyNode == nil {
let swipeToReplyNode = ChatMessageSwipeToReplyNode(fillColor: selectDateFillStaticColor(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), enableBlur: item.controllerInteraction.enableFullTranslucency && dateFillNeedsBlur(theme: item.presentationData.theme.theme, wallpaper: item.presentationData.theme.wallpaper), foregroundColor: bubbleVariableColor(variableColor: item.presentationData.theme.theme.chat.message.shareButtonForegroundColor, wallpaper: item.presentationData.theme.wallpaper), backgroundNode: item.controllerInteraction.presentationContext.backgroundNode, action: ChatMessageSwipeToReplyNode.Action(self.currentSwipeAction))
@ -1042,7 +1074,13 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
if let swipeToReplyNode = self.swipeToReplyNode {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
if translation.x < 0.0 {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
} else {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: leftOffset - 33.0 * 0.5, y: self.contentSize.height / 2.0)
}
if let (rect, containerSize) = self.absoluteRect {
let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size)
@ -1061,7 +1099,13 @@ class ChatMessageInstantVideoItemNode: ChatMessageItemView, UIGestureRecognizerD
self.swipeToReplyFeedback = nil
let translation = recognizer.translation(in: self.view)
if case .ended = recognizer.state, translation.x < -swipeOffset {
let gestureRecognized: Bool
if recognizer.allowBothDirections {
gestureRecognized = abs(translation.x) > swipeOffset
} else {
gestureRecognized = translation.x < -swipeOffset
}
if case .ended = recognizer.state, gestureRecognized {
if let item = self.item {
if let currentSwipeAction = currentSwipeAction {
switch currentSwipeAction {

View File

@ -55,6 +55,7 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
private var currentSwipeToReplyTranslation: CGFloat = 0.0
private var replyRecognizer: ChatSwipeToReplyRecognizer?
private var currentSwipeAction: ChatControllerInteractionSwipeAction?
private var appliedForwardInfo: (Peer?, String?)?
@ -254,12 +255,22 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
}
return false
}
if let item = self.item {
replyRecognizer.allowBothDirections = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
self.view.disablesInteractiveTransitionGestureRecognizer = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
}
self.replyRecognizer = replyRecognizer
self.view.addGestureRecognizer(replyRecognizer)
self.view.disablesInteractiveTransitionGestureRecognizer = true
}
override func setupItem(_ item: ChatMessageItem, synchronousLoad: Bool) {
super.setupItem(item, synchronousLoad: synchronousLoad)
self.replyRecognizer?.allowBothDirections = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
self.view.disablesInteractiveTransitionGestureRecognizer = !item.context.sharedContext.immediateExperimentalUISettings.unidirectionalSwipeToReply
for media in item.message.media {
if let telegramFile = media as? TelegramMediaFile {
if self.telegramFile != telegramFile {
@ -1393,11 +1404,14 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
private var playedSwipeToReplyHaptic = false
@objc func swipeToReplyGesture(_ recognizer: ChatSwipeToReplyRecognizer) {
var offset: CGFloat = 0.0
var leftOffset: CGFloat = 0.0
var swipeOffset: CGFloat = 45.0
if let item = self.item, item.content.effectivelyIncoming(item.context.account.peerId, associatedData: item.associatedData) {
offset = -24.0
leftOffset = -10.0
} else {
offset = 10.0
leftOffset = -10.0
swipeOffset = 60.0
}
@ -1425,7 +1439,11 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
if translation.x < 0.0 {
translation.x = max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset)))
} else {
translation.x = 0.0
if recognizer.allowBothDirections {
translation.x = -max(-180.0, min(0.0, -rubberBandingOffset(offset: abs(translation.x), bandingStart: swipeOffset)))
} else {
translation.x = 0.0
}
}
if let item = self.item, self.swipeToReplyNode == nil {
@ -1443,7 +1461,13 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
if let swipeToReplyNode = self.swipeToReplyNode {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
if translation.x < 0.0 {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: bounds.size.width + offset + 33.0 * 0.5, y: self.contentSize.height / 2.0)
} else {
swipeToReplyNode.bounds = CGRect(origin: .zero, size: CGSize(width: 33.0, height: 33.0))
swipeToReplyNode.position = CGPoint(x: leftOffset - 33.0 * 0.5, y: self.contentSize.height / 2.0)
}
if let (rect, containerSize) = self.absoluteRect {
let mappedRect = CGRect(origin: CGPoint(x: rect.minX + swipeToReplyNode.frame.minX, y: rect.minY + swipeToReplyNode.frame.minY), size: swipeToReplyNode.frame.size)
@ -1462,7 +1486,13 @@ class ChatMessageStickerItemNode: ChatMessageItemView {
self.swipeToReplyFeedback = nil
let translation = recognizer.translation(in: self.view)
if case .ended = recognizer.state, translation.x < -swipeOffset {
let gestureRecognized: Bool
if recognizer.allowBothDirections {
gestureRecognized = abs(translation.x) > swipeOffset
} else {
gestureRecognized = translation.x < -swipeOffset
}
if case .ended = recognizer.state, gestureRecognized {
if let item = self.item {
if let currentSwipeAction = currentSwipeAction {
switch currentSwipeAction {

View File

@ -4,6 +4,7 @@ import UIKit
class ChatSwipeToReplyRecognizer: UIPanGestureRecognizer {
var validatedGesture = false
var firstLocation: CGPoint = CGPoint()
var allowBothDirections: Bool = true
var shouldBegin: (() -> Bool)?
@ -37,17 +38,17 @@ class ChatSwipeToReplyRecognizer: UIPanGestureRecognizer {
let absTranslationX: CGFloat = abs(translation.x)
let absTranslationY: CGFloat = abs(translation.y)
if !validatedGesture {
if translation.x > 0.0 {
if !self.validatedGesture {
if !self.allowBothDirections && translation.x > 0.0 {
self.state = .failed
} else if absTranslationY > 2.0 && absTranslationY > absTranslationX * 2.0 {
self.state = .failed
} else if absTranslationX > 2.0 && absTranslationY * 2.0 < absTranslationX {
validatedGesture = true
self.validatedGesture = true
}
}
if validatedGesture {
if self.validatedGesture {
super.touchesMoved(touches, with: event)
}
}

View File

@ -53,6 +53,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
public var storiesExperiment: Bool
public var storiesJpegExperiment: Bool
public var crashOnMemoryPressure: Bool
public var unidirectionalSwipeToReply: Bool
public static var defaultSettings: ExperimentalUISettings {
return ExperimentalUISettings(
@ -83,7 +84,8 @@ public struct ExperimentalUISettings: Codable, Equatable {
logLanguageRecognition: false,
storiesExperiment: false,
storiesJpegExperiment: false,
crashOnMemoryPressure: false
crashOnMemoryPressure: false,
unidirectionalSwipeToReply: false
)
}
@ -115,7 +117,8 @@ public struct ExperimentalUISettings: Codable, Equatable {
logLanguageRecognition: Bool,
storiesExperiment: Bool,
storiesJpegExperiment: Bool,
crashOnMemoryPressure: Bool
crashOnMemoryPressure: Bool,
unidirectionalSwipeToReply: Bool
) {
self.keepChatNavigationStack = keepChatNavigationStack
self.skipReadHistory = skipReadHistory
@ -145,6 +148,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
self.storiesExperiment = storiesExperiment
self.storiesJpegExperiment = storiesJpegExperiment
self.crashOnMemoryPressure = crashOnMemoryPressure
self.unidirectionalSwipeToReply = unidirectionalSwipeToReply
}
public init(from decoder: Decoder) throws {
@ -178,6 +182,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
self.storiesExperiment = try container.decodeIfPresent(Bool.self, forKey: "storiesExperiment") ?? false
self.storiesJpegExperiment = try container.decodeIfPresent(Bool.self, forKey: "storiesJpegExperiment") ?? false
self.crashOnMemoryPressure = try container.decodeIfPresent(Bool.self, forKey: "crashOnMemoryPressure") ?? false
self.unidirectionalSwipeToReply = try container.decodeIfPresent(Bool.self, forKey: "unidirectionalSwipeToReply") ?? false
}
public func encode(to encoder: Encoder) throws {
@ -211,6 +216,7 @@ public struct ExperimentalUISettings: Codable, Equatable {
try container.encode(self.storiesExperiment, forKey: "storiesExperiment")
try container.encode(self.storiesJpegExperiment, forKey: "storiesJpegExperiment")
try container.encode(self.crashOnMemoryPressure, forKey: "crashOnMemoryPressure")
try container.encode(self.unidirectionalSwipeToReply, forKey: "unidirectionalSwipeToReply")
}
}

@ -1 +1 @@
Subproject commit 8031d485599404c7edf51337ab27ade5aaec9724
Subproject commit 272dfe03416f1022dfc5d73875dbe8ace9ec9cfd