Merge commit 'a0a39026475450a47ccbda530cee2cbbaccf9455'

This commit is contained in:
Ali 2020-08-10 16:50:40 +02:00
commit a23b97ea6f
34 changed files with 2595 additions and 2408 deletions

View File

@ -4,7 +4,7 @@
"NSLocationWhenInUseUsageDescription" = "Si envieu la vostra ubicació als amics, Telegram requereix accés per a mostra-los un mapa.";
"NSLocationAlwaysAndWhenInUseUsageDescription" = "Si trieu de compartir la vostra ubicació en directe amb amics en un xat, Telegram requereix accés en segon pla a la vostra ubicació per a actualitzar-la durant la compartició en directe.";
"NSLocationAlwaysUsageDescription" = "Si trieu de compartir la vostra ubicació en directe amb amics en un xat, Telegram requereix de tenir accés en segon pla a la vostra ubicació per a actualitzar-la durant la compartició en directe. També necessiteu això per a enviar ubicacions des d'un Apple Watch.";
"NSCameraUsageDescription" = "Ens cal això perquè pugueu fer i compartir fotos i vídeos.";
"NSCameraUsageDescription" = "Ens cal això perquè pugueu fer i compartir fotos i vídeos, així com fer videotrucades.";
"NSPhotoLibraryUsageDescription" = "Ens cal això perquè pugueu compartir fotos i vídeos de la biblioteca de fotos.";
"NSPhotoLibraryAddUsageDescription" = "Ens cal això perquè així pugueu desar fotos i vídeos a la biblioteca de fotos.";
"NSMicrophoneUsageDescription" = "Ens cal això perquè pugueu enregistrar i compartir missatges de veu i vídeos amb so.";

View File

@ -5733,3 +5733,5 @@ Any member of this group will be able to see messages in the channel.";
"Call.Audio" = "audio";
"Call.AudioRouteMute" = "Mute Yourself";
"AccessDenied.VideoCallCamera" = "Telegram needs access to your camera to make video calls.\n\nPlease go to Settings > Privacy > Camera and set Telegram to ON.";

View File

@ -4,7 +4,7 @@
"NSLocationWhenInUseUsageDescription" = "Cuando envías tu ubicación a tus amigos, Telegram necesita acceso para mostrarles un mapa.";
"NSLocationAlwaysAndWhenInUseUsageDescription" = "Cuando eliges compartir tu ubicación en tiempo real con amigos en un chat, Telegram necesita acceso en segundo plano a tu ubicación para mantenerla actualizada mientras la función esté en uso.";
"NSLocationAlwaysUsageDescription" = "Cuando envías tu ubicación a tus amigos, Telegram necesita acceso para mostrarles un mapa. También es requerido para enviar ubicaciones desde un Apple Watch.";
"NSCameraUsageDescription" = "Es requerido para que puedas hacer fotos y vídeos.";
"NSCameraUsageDescription" = "Necesitamos esto para que puedas tomar y compartir fotos y videos, así como para realizar videollamadas.";
"NSPhotoLibraryUsageDescription" = "Es requerido para que puedas compartir fotos y vídeos desde tu biblioteca de fotos.";
"NSPhotoLibraryAddUsageDescription" = "Necesitamos esto para que puedas guardar fotos y videos en tu biblioteca de fotos.";
"NSMicrophoneUsageDescription" = "Es requerido para que puedas grabar y compartir mensajes de voz y vídeos con sonido.";

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IntentPhrases</key>
<array>
<dict>
<key>IntentName</key>
<string>INSendMessageIntent</string>
<key>IntentExamples</key>
<array>
<string>Send a Telegram message to Alex saying I&apos;ll be there in 10 minutes</string>
</array>
</dict>
</array>
</dict>
</plist>

View File

@ -0,0 +1,12 @@
/* Localized versions of Info.plist keys */
"NSContactsUsageDescription" = "تلگرام به طور مداوم مخاطبین شما را به سرورهای اَبری و رمزگذاری شده خود آپلود می‌کند تا شما بتوانید در تمامی دستگاه‌های خود با دوستانتان ارتباط برقرار کنید.";
"NSLocationWhenInUseUsageDescription" = "هنگامی که شما موقعیت مکانی خود را برای دوستانتان ارسال می‌کنید، تلگرام جهت نشان دادن نقشه به آن‌ها نیازمند دسترسی است.";
"NSLocationAlwaysAndWhenInUseUsageDescription" = "در صورتی که تصمیم بگیرید موقعیت مکانی لحظه‌ای خود را در یک گفتگو با دوستانتان به اشتراک بگذارید، تلگرام جهت به‌روزرسانی این اطلاعات در طول به اشتراک گذاری لحظه‌ای، باید در پس‌زمینه به موقعیت مکانی شما دسترسی داشته باشد.";
"NSLocationAlwaysUsageDescription" = "در صورتی که شما تصمیم بگیرید موقعیت مکانی لحظه‌ای خود را در یک گفتگو با دوستانتان به اشتراک بگذارید، تلگرام جهت به‌روزرسانی این اطلاعات در طول به اشتراک گذاری لحظه‌ای، باید در پس‌زمینه به موقعیت مکانی شما دسترسی داشته باشد. شما همچنین برای ارسال موقعیت مکانی از ساعت اَپل به این دسترسی نیاز دارید.";
"NSCameraUsageDescription" = "ما به این دسترسی نیاز داریم تا شما بتوانید عکس‌ و ویدیو گرفته و آن‌ها را به اشتراک بگذارید.";
"NSPhotoLibraryUsageDescription" = "ما به این دسترسی نیاز داریم تا شما بتوانید از گالری تصاویر خود عکس و ویدیو به اشتراک بگذارید.";
"NSPhotoLibraryAddUsageDescription" = "ما به این دسترسی نیاز داریم تا شما بتوانید عکس‌ها و ویدیوها را در گالری تصاویر خود ذخیره کنید.";
"NSMicrophoneUsageDescription" = "ما به این دسترسی نیاز داریم تا شما بتوانید پیام‌های صوتی و ویدیوهای صدادار را ضبط کرده و به اشتراک بگذارید.";
"NSSiriUsageDescription" = "شما می‌توانید با استفاده از Siri پیام ارسال کنید.";
"NSFaceIDUsageDescription" = "شما می‌توانید با استفاده‌ از Face ID قفل برنامه را باز کنید.";

View File

@ -4,7 +4,7 @@
"NSLocationWhenInUseUsageDescription" = "Quando invii la tua posizione ai tuoi amici, Telegram ha bisogno di accedere per mostrare loro la mappa.";
"NSLocationAlwaysUsageDescription" = "Quando invii la tua posizione ai tuoi amici, Telegram ha bisogno di accedere per mostrare loro la mappa. Ti serve anche per inviare posizioni da Apple Watch.";
"NSLocationAlwaysAndWhenInUseUsageDescription" = "Quando scegli di condividere la tua Posizione Attuale con gli amici in una chat, Telegram ha bisogno dell'accesso in background alla tua posizione per tenerli aggiornati durante la durata della condivisione della posizione.";
"NSCameraUsageDescription" = "Ci serve per farti scattare, registrare e condividere foto e video.";
"NSCameraUsageDescription" = "Ci serve per farti scattare, registrare e condividere foto e video, oltre che per fare videochiamate.";
"NSPhotoLibraryUsageDescription" = "Ci serve per farti condividere foto e video dalla tua libreria foto.";
"NSPhotoLibraryAddUsageDescription" = "Ci serve per farti salvare foto e video nella tua libreria foto.";
"NSMicrophoneUsageDescription" = "Ci serve per farti registrare e condividere messaggi vocali e video con il sonoro.";

View File

@ -4,7 +4,7 @@
"NSLocationWhenInUseUsageDescription" = "Bila anda hantar lokasi anda kepada rakan anda, Telegram perlukan akses untuk tunjuk peta.";
"NSLocationAlwaysAndWhenInUseUsageDescription" = "Bila anda pilih untuk kongsi Lokasi Langsung anda dengan rakan dalam chat, Telegram perlu akses latar belakang ke lokasi anda agar lokasi anda sentiasa dikemaskini ketika perkongsian.";
"NSLocationAlwaysUsageDescription" = "Bila anda pilih untuk kongsi lokasi langsung anda dengan rakan dalam chat, Telegram perlu akses latar belakang agar lokasi anda sentiasa dikemaskini. Anda juga harus hantar lokasi anda ke Jam Apple.";
"NSCameraUsageDescription" = "Kita perlukan ini agar anda boleh ambil dan kongsi foto dan video.";
"NSCameraUsageDescription" = "Kita perlukan ini agar anda boleh ambil dan kongsi foto dan video, dan juga buat panggilan video.";
"NSPhotoLibraryUsageDescription" = "Kita perlu ini agar anda boleh kongsi foto dan video dari librari foto anda.";
"NSPhotoLibraryAddUsageDescription" = "Kita perlu ini agar anda boleh simpan foto dan video ke librari foto anda.";
"NSMicrophoneUsageDescription" = "Kita perlu ini agar anda boleh rekod dan kongsi mesej suara dan video dengan suara.";

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IntentPhrases</key>
<array>
<dict>
<key>IntentName</key>
<string>INSendMessageIntent</string>
<key>IntentExamples</key>
<array>
<string>Send a Telegram message to Alex saying I&apos;ll be there in 10 minutes</string>
</array>
</dict>
</array>
</dict>
</plist>

View File

@ -0,0 +1,12 @@
/* Localized versions of Info.plist keys */
"NSContactsUsageDescription" = "Telegram będzie nieprzerwanie przesyłać kontakty do silnie zaszyfrowanych serwerów w chmurze, aby umożliwić ci połączenie się ze znajomymi na wszystkich urządzeniach.";
"NSLocationWhenInUseUsageDescription" = "Gdy wysyłasz swoją lokalizację znajomym, Telegram potrzebuje dostępu, aby pokazać im mapę.";
"NSLocationAlwaysAndWhenInUseUsageDescription" = "Gdy zdecydujesz się udostępnić swoją „lokalizację na żywo” znajomym podczas czatu, Telegram potrzebuje dostępu w tle do twojej lokalizacji, aby zapewnić jej aktualizację przez cały czas udostępniania „na żywo”.";
"NSLocationAlwaysUsageDescription" = "Gdy zdecydujesz się udostępnić swoją „lokalizację na żywo” znajomym podczas czatu, Telegram potrzebuje dostępu w tle do twojej lokalizacji, aby zapewnić jej aktualizację przez cały czas udostępniania „na żywo”. Jest to również potrzebne do wysyłania lokalizacji z Apple Watch.";
"NSCameraUsageDescription" = "Potrzebujemy tego, aby można było robić i udostępniać zdjęcia i wideo, a także prowadzić rozmowy wideo.";
"NSPhotoLibraryUsageDescription" = "Potrzebujemy tego, aby można było udostępniać zdjęcia i wideo ze swojej biblioteki zdjęć.";
"NSPhotoLibraryAddUsageDescription" = "Potrzebujemy tego, aby można było zapisywać zdjęcia i wideo w swojej bibliotece zdjęć.";
"NSMicrophoneUsageDescription" = "Potrzebujemy tego, aby można było nagrywać i udostępniać wiadomości głosowe i wideo z dźwiękiem.";
"NSSiriUsageDescription" = "Możesz użyć Siri do wysyłania wiadomości.";
"NSFaceIDUsageDescription" = "Możesz użyć Face ID, aby odblokować aplikację.";

View File

@ -4,7 +4,7 @@
"NSLocationWhenInUseUsageDescription" = "Konumunuzu arkadaşlarınıza gönderdiğinizde, Telegram'ın onlara bir harita göstermesi için erişmesi gerekiyor.";
"NSLocationAlwaysAndWhenInUseUsageDescription" = "Canlı Konumunuzu arkadaşlarınızla bir sohbette paylaşmayı seçtiğinizde, Telegram'ın canlı paylaşım süresince onları güncel tutmak için konumunuza arka plan erişimi olması gerekir.";
"NSLocationAlwaysUsageDescription" = "Canlı konumunuzu bir sohbette arkadaşlarınızla paylaşmayı seçtiğinizde, Telegram'ın canlı paylaşım süresince konumunuzu güncel tutması için bir arka plan erişimi gerekir. Ayrıca Apple Watch'dan konum göndermek için de buna ihtiyacınız var.";
"NSCameraUsageDescription" = "Fotoğraf ve video çekip paylaşabilmeniz için buna ihtiyacımız var.";
"NSCameraUsageDescription" = "Fotoğraf ve video çekip paylaşabilmeniz ve görüntülü arama yapabilmeniz için buna ihtiyacımız var.";
"NSPhotoLibraryUsageDescription" = "Fotoğraf arşivinizdeki fotoğraf ve videoları paylaşabilmeniz için buna ihtiyacımız var.";
"NSPhotoLibraryAddUsageDescription" = "Fotoğraf arşivine fotoğraf ve video kaydedebilmeniz için buna ihtiyacımız var.";
"NSMicrophoneUsageDescription" = "Sesli mesajları ve videoları ses ile kaydedebilmeniz ve paylaşabilmeniz için buna ihtiyacımız var.";

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IntentPhrases</key>
<array>
<dict>
<key>IntentName</key>
<string>INSendMessageIntent</string>
<key>IntentExamples</key>
<array>
<string>Send a Telegram message to Alex saying I&apos;ll be there in 10 minutes</string>
</array>
</dict>
</array>
</dict>
</plist>

View File

@ -0,0 +1,12 @@
/* Localized versions of Info.plist keys */
"NSContactsUsageDescription" = "Telegram barcha qurilmalaringizdan doʻstlaringiz bilan bogʻlana olishingiz uchun muntazam ravishda kontaktlaringizni kuchli shifrlanadigan bulut serverlariga yuklaydi.";
"NSLocationWhenInUseUsageDescription" = "Joylashuvingizni doʻstlaringizga yuborganingizda Telegram ularga xaritani koʻrsatishi uchun ruxsat kerak boʻladi.";
"NSLocationAlwaysAndWhenInUseUsageDescription" = "Chatda doʻstlarga Jonli joylashuvingizni ulashishni tanlaganingizda, Telegram joylashuvingizni jonli ulashish davomida yangilab turishi uchun undan fonda foydalanishi kerak.";
"NSLocationAlwaysUsageDescription" = "Chatda doʻstlarga Jonli joylashuvingizni ulashishni tanlaganingizda, Telegram joylashuvingizni jonli ulashish davomida yangilab turishi uchun undan fonda foydalanishi kerak. Sizga bu Apple Watchdan joylashuvlarni yuborish uchun ham kerak.";
"NSCameraUsageDescription" = "Bu bizga rasm va videolarga olish, shuningdek, video chaqiruvlar qilish va ularni ulashishingiz uchun kerak.";
"NSPhotoLibraryUsageDescription" = "Bu bizga galereyangizdan rasm va videolaringizni ulasha olishingiz uchun kerak.";
"NSPhotoLibraryAddUsageDescription" = "Bu bizga rasm va videolaringizni galereyangizga saqlay olishingiz uchun kerak.";
"NSMicrophoneUsageDescription" = "Bu bizga ovozli xabarlarni tovush bilan yozishingiz va ulashingiz uchun kerak.";
"NSSiriUsageDescription" = "Siz xabarlar yuborish uchun Siridan foydalanishingiz mumkin.";
"NSFaceIDUsageDescription" = "Siz ilovani Face ID yordamida ochishingiz mumkin.";

View File

@ -112,7 +112,7 @@ public final class AuthDataTransferSplashScreen: ViewController {
return
}
DeviceAccess.authorizeAccess(to: .camera, presentationData: strongSelf.presentationData, present: { c, a in
DeviceAccess.authorizeAccess(to: .camera(.video), presentationData: strongSelf.presentationData, present: { c, a in
guard let strongSelf = self else {
return
}

View File

@ -15,6 +15,12 @@ import TelegramPresentationData
import LegacyComponents
import AccountContext
public enum DeviceAccessCameraSubject {
case video
case videoCall
}
public enum DeviceAccessMicrophoneSubject {
case audio
case video
@ -34,7 +40,7 @@ public enum DeviceAccessLocationSubject {
}
public enum DeviceAccessSubject {
case camera
case camera(DeviceAccessCameraSubject)
case microphone(DeviceAccessMicrophoneSubject)
case mediaLibrary(DeviceAccessMediaLibrarySubject)
case location(DeviceAccessLocationSubject)
@ -246,14 +252,20 @@ public final class DeviceAccess {
public static func authorizeAccess(to subject: DeviceAccessSubject, registerForNotifications: ((@escaping (Bool) -> Void) -> Void)? = nil, requestSiriAuthorization: ((@escaping (Bool) -> Void) -> Void)? = nil, locationManager: LocationManager? = nil, presentationData: PresentationData? = nil, present: @escaping (ViewController, Any?) -> Void = { _, _ in }, openSettings: @escaping () -> Void = { }, displayNotificationFromBackground: @escaping (String) -> Void = { _ in }, _ completion: @escaping (Bool) -> Void = { _ in }) {
switch subject {
case .camera:
case let .camera(cameraSubject):
let status = PGCamera.cameraAuthorizationStatus()
if status == PGCameraAuthorizationStatusNotDetermined {
AVCaptureDevice.requestAccess(for: AVMediaType.video) { response in
Queue.mainQueue().async {
completion(response)
if !response, let presentationData = presentationData {
let text = presentationData.strings.AccessDenied_Camera
let text: String
switch cameraSubject {
case .video:
text = presentationData.strings.AccessDenied_Camera
case .videoCall:
text = presentationData.strings.AccessDenied_VideoCallCamera
}
present(standardTextAlertController(theme: AlertControllerTheme(presentationData: presentationData), title: presentationData.strings.AccessDenied_Title, text: text, actions: [TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_NotNow, action: {}), TextAlertAction(type: .genericAction, title: presentationData.strings.AccessDenied_Settings, action: {
openSettings()
})]), nil)

View File

@ -1482,7 +1482,7 @@
break;
default:
frame = CGRectMake(screenEdges.right - 46 - _safeAreaInset.right - buttonInset, screenEdges.bottom - TGPhotoEditorToolbarSize - [_captionMixin.inputPanel baseHeight] - 45 - _safeAreaInset.bottom - panelInset, 44, 44);
frame = CGRectMake(screenEdges.right - 46 - _safeAreaInset.right - buttonInset, screenEdges.bottom - TGPhotoEditorToolbarSize - [_captionMixin.inputPanel baseHeight] - 45 - _safeAreaInset.bottom - panelInset - (hasHeaderView ? 64.0 : 0.0), 44, 44);
break;
}

View File

@ -127,7 +127,7 @@ public func legacyAttachmentMenu(context: AccountContext, peer: Peer, editMediaO
return
}
DeviceAccess.authorizeAccess(to: .camera, presentationData: context.sharedContext.currentPresentationData.with { $0 }, present: context.sharedContext.presentGlobalController, openSettings: context.sharedContext.applicationBindings.openSettings, { value in
DeviceAccess.authorizeAccess(to: .camera(.video), presentationData: context.sharedContext.currentPresentationData.with { $0 }, present: context.sharedContext.presentGlobalController, openSettings: context.sharedContext.applicationBindings.openSettings, { value in
if value {
openCamera(cameraView, controller)
}

View File

@ -29,6 +29,7 @@
#define IPHONE_8Plus_NAMESTRING @"iPhone 8 Plus"
#define IPHONE_X_NAMESTRING @"iPhone X"
#define IPHONE_SE_NAMESTRING @"iPhone SE"
#define IPHONE_SE2_NAMESTRING @"iPhone SE (2nd gen)"
#define IPHONE_XS_NAMESTRING @"iPhone XS"
#define IPHONE_XSMAX_NAMESTRING @"iPhone XS Max"
#define IPHONE_XR_NAMESTRING @"iPhone XR"
@ -96,7 +97,7 @@ typedef enum {
UIDevice5SiPhone,
UIDevice6iPhone,
UIDevice6PlusiPhone,
UIDevice6siPhone,
UIDevice6SiPhone,
UIDevice6SPlusiPhone,
UIDevice7iPhone,
UIDevice7PlusiPhone,
@ -104,6 +105,7 @@ typedef enum {
UIDevice8PlusiPhone,
UIDeviceXiPhone,
UIDeviceSEPhone,
UIDeviceSE2Phone,
UIDeviceXSiPhone,
UIDeviceXSMaxiPhone,
UIDeviceXRiPhone,
@ -587,7 +589,7 @@ NSString *suffix = @"";
case UIDevice5SiPhone: return IPHONE_5S_NAMESTRING;
case UIDevice6iPhone: return IPHONE_6_NAMESTRING;
case UIDevice6PlusiPhone: return IPHONE_6Plus_NAMESTRING;
case UIDevice6siPhone: return IPHONE_6S_NAMESTRING;
case UIDevice6SiPhone: return IPHONE_6S_NAMESTRING;
case UIDevice6SPlusiPhone: return IPHONE_6SPlus_NAMESTRING;
case UIDevice7iPhone: return IPHONE_7_NAMESTRING;
case UIDevice7PlusiPhone: return IPHONE_7Plus_NAMESTRING;
@ -595,6 +597,7 @@ NSString *suffix = @"";
case UIDevice8PlusiPhone: return IPHONE_8Plus_NAMESTRING;
case UIDeviceXiPhone: return IPHONE_X_NAMESTRING;
case UIDeviceSEPhone: return IPHONE_SE_NAMESTRING;
case UIDeviceSE2Phone: return IPHONE_SE2_NAMESTRING;
case UIDeviceXSiPhone: return IPHONE_XS_NAMESTRING;
case UIDeviceXSMaxiPhone: return IPHONE_XSMAX_NAMESTRING;
case UIDeviceXRiPhone: return IPHONE_XR_NAMESTRING;
@ -675,7 +678,7 @@ NSString *suffix = @"";
if ([platform isEqualToString:@"iPhone7,1"]) return UIDevice6PlusiPhone;
if ([platform isEqualToString:@"iPhone7,2"]) return UIDevice6iPhone;
if ([platform isEqualToString:@"iPhone8,1"]) return UIDevice6siPhone;
if ([platform isEqualToString:@"iPhone8,1"]) return UIDevice6SiPhone;
if ([platform isEqualToString:@"iPhone8,2"]) return UIDevice6SPlusiPhone;
if ([platform isEqualToString:@"iPhone9,1"]) return UIDevice7iPhone;
if ([platform isEqualToString:@"iPhone9,3"]) return UIDevice7iPhone;
@ -696,6 +699,7 @@ NSString *suffix = @"";
if ([platform isEqualToString:@"iPhone12,1"]) return UIDevice11iPhone;
if ([platform isEqualToString:@"iPhone12,3"]) return UIDevice11ProiPhone;
if ([platform isEqualToString:@"iPhone12,5"]) return UIDevice11ProMaxiPhone;
if ([platform isEqualToString:@"iPhone12,8"]) return UIDeviceSE2Phone;
if ([platform isEqualToString:@"iPhone8,4"]) return UIDeviceSEPhone;

View File

@ -451,12 +451,14 @@ private final class SemanticStatusNodeTransitionDrawingState {
private final class SemanticStatusNodeDrawingState: NSObject {
let background: UIColor
let foreground: UIColor
let hollow: Bool
let transitionState: SemanticStatusNodeTransitionDrawingState?
let drawingState: SemanticStatusNodeStateDrawingState
init(background: UIColor, foreground: UIColor, transitionState: SemanticStatusNodeTransitionDrawingState?, drawingState: SemanticStatusNodeStateDrawingState) {
init(background: UIColor, foreground: UIColor, hollow: Bool, transitionState: SemanticStatusNodeTransitionDrawingState?, drawingState: SemanticStatusNodeStateDrawingState) {
self.background = background
self.foreground = foreground
self.hollow = hollow
self.transitionState = transitionState
self.drawingState = drawingState
@ -495,6 +497,8 @@ public final class SemanticStatusNode: ASControlNode {
}
}
private let hollow: Bool
private var animator: ConstantDisplayLinkAnimator?
private var hasState: Bool = false
@ -502,9 +506,10 @@ public final class SemanticStatusNode: ASControlNode {
private var transtionContext: SemanticStatusNodeTransitionContext?
private var stateContext: SemanticStatusNodeStateContext
public init(backgroundNodeColor: UIColor, foregroundNodeColor: UIColor) {
public init(backgroundNodeColor: UIColor, foregroundNodeColor: UIColor, hollow: Bool = false) {
self.backgroundNodeColor = backgroundNodeColor
self.foregroundNodeColor = foregroundNodeColor
self.hollow = hollow
self.state = .none
self.stateContext = self.state.context(current: nil)
@ -584,7 +589,7 @@ public final class SemanticStatusNode: ASControlNode {
transitionState = SemanticStatusNodeTransitionDrawingState(transition: t, drawingState: transitionContext.previousStateContext.drawingState(transitionFraction: 1.0 - t))
}
return SemanticStatusNodeDrawingState(background: self.backgroundNodeColor, foreground: self.foregroundNodeColor, transitionState: transitionState, drawingState: self.stateContext.drawingState(transitionFraction: transitionFraction))
return SemanticStatusNodeDrawingState(background: self.backgroundNodeColor, foreground: self.foregroundNodeColor, hollow: self.hollow, transitionState: transitionState, drawingState: self.stateContext.drawingState(transitionFraction: transitionFraction))
}
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
@ -606,5 +611,10 @@ public final class SemanticStatusNode: ASControlNode {
transitionState.drawingState.draw(context: context, size: bounds.size, foregroundColor: parameters.foreground)
}
parameters.drawingState.draw(context: context, size: bounds.size, foregroundColor: parameters.foreground)
if parameters.hollow {
context.setBlendMode(.clear)
context.fillEllipse(in: bounds.insetBy(dx: 8.0, dy: 8.0))
}
}
}

View File

@ -13,6 +13,7 @@ import TelegramAudio
import AccountContext
import TelegramNotices
import AppBundle
import TooltipUI
protocol CallControllerNodeProtocol: class {
var isMuted: Bool { get set }
@ -27,6 +28,7 @@ protocol CallControllerNodeProtocol: class {
var present: ((ViewController) -> Void)? { get set }
var callEnded: ((Bool) -> Void)? { get set }
var dismissedInteractively: (() -> Void)? { get set }
var dismissAllTooltips: (() -> Void)? { get set }
func updateAudioOutputs(availableOutputs: [AudioSessionOutput], currentOutput: AudioSessionOutput?)
func updateCallState(_ callState: PresentationCallState)
@ -255,6 +257,17 @@ public final class CallController: ViewController {
}
}
self.controllerNode.dismissAllTooltips = { [weak self] in
if let strongSelf = self {
strongSelf.forEachController({ controller in
if let controller = controller as? TooltipScreen {
controller.dismiss()
}
return true
})
}
}
self.controllerNode.callEnded = { [weak self] didPresentRating in
if let strongSelf = self, !didPresentRating {
let _ = (combineLatest(strongSelf.sharedContext.accountManager.sharedData(keys: [ApplicationSpecificSharedDataKeys.callListSettings]), ApplicationSpecificNotice.getCallsTabTip(accountManager: strongSelf.sharedContext.accountManager))

View File

@ -134,7 +134,7 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
if content.hasProgress {
if self.statusNode == nil {
let statusNode = SemanticStatusNode(backgroundNodeColor: .white, foregroundNodeColor: .clear)
let statusNode = SemanticStatusNode(backgroundNodeColor: .white, foregroundNodeColor: .clear, hollow: true)
self.statusNode = statusNode
self.contentContainer.insertSubnode(statusNode, belowSubnode: self.contentNode)
statusNode.transitionToState(.progress(value: nil, cancelEnabled: false, appearance: SemanticStatusNodeState.ProgressAppearance(inset: 4.0, lineWidth: 3.0)), animated: false, completion: {})
@ -168,18 +168,19 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
let contentImage = generateImage(CGSize(width: self.largeButtonSize, height: self.largeButtonSize), contextGenerator: { size, context in
context.clear(CGRect(origin: CGPoint(), size: size))
var ellipseRect = CGRect(origin: CGPoint(), size: size)
var fillColor: UIColor = .clear
var imageColor: UIColor = .white
let imageColor: UIColor = .white
var drawOverMask = false
context.setBlendMode(.normal)
var imageScale: CGFloat = 1.0
let imageScale: CGFloat = 1.0
switch content.appearance {
case let .blurred(isFilled):
if content.hasProgress {
fillColor = .clear
imageColor = .black
drawOverMask = false
fillColor = .white
drawOverMask = true
context.setBlendMode(.copy)
ellipseRect = ellipseRect.insetBy(dx: 7.0, dy: 7.0)
} else {
if isFilled {
fillColor = .white
@ -187,8 +188,6 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
context.setBlendMode(.copy)
}
}
// let smallButtonSize: CGFloat = 60.0
// imageScale = self.largeButtonSize / smallButtonSize
case let .color(color):
switch color {
case .red:
@ -199,7 +198,7 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
}
context.setFillColor(fillColor.cgColor)
context.fillEllipse(in: CGRect(origin: CGPoint(), size: size))
context.fillEllipse(in: ellipseRect)
var image: UIImage?
@ -241,11 +240,6 @@ final class CallControllerButtonItemNode: HighlightTrackingButtonNode {
}
})
// if transition.isAnimated, let previousContent = previousContent, content.image == .camera, !previousContent.appearance.isFilled && content.appearance.isFilled {
// self.contentBackgroundNode.image = contentBackgroundImage
// self.contentBackgroundNode.layer.animateSpring(from: 0.01 as NSNumber, to: 1.0 as NSNumber, keyPath: "transform.scale", duration: 1.25, damping: 105.0)
// }
if transition.isAnimated, let contentBackgroundImage = contentBackgroundImage, let previousContent = self.contentBackgroundNode.image {
self.contentBackgroundNode.image = contentBackgroundImage
self.contentBackgroundNode.layer.animate(from: previousContent.cgImage!, to: contentBackgroundImage.cgImage!, keyPath: "contents", timingFunction: CAMediaTimingFunctionName.easeInEaseOut.rawValue, duration: 0.2)

View File

@ -149,7 +149,7 @@ final class CallControllerButtonsNode: ASDisplayNode {
}
}
let minSmallButtonSideInset: CGFloat = 34.0
let minSmallButtonSideInset: CGFloat = width > 320.0 ? 34.0 : 16.0
let maxSmallButtonSpacing: CGFloat = 34.0
let smallButtonSize: CGFloat = 60.0
let topBottomSpacing: CGFloat = 84.0

View File

@ -30,6 +30,8 @@ private final class CallVideoNode: ASDisplayNode {
private let videoView: PresentationCallVideoView
private var effectView: UIVisualEffectView?
private let videoPausedNode: ImmediateTextNode
private var isBlurred: Bool = false
private var currentCornerRadius: CGFloat = 0.0
@ -41,7 +43,7 @@ private final class CallVideoNode: ASDisplayNode {
private(set) var currentOrientation: PresentationCallVideoView.Orientation
init(videoView: PresentationCallVideoView, assumeReadyAfterTimeout: Bool, isReadyUpdated: @escaping () -> Void, orientationUpdated: @escaping () -> Void, isFlippedUpdated: @escaping (CallVideoNode) -> Void) {
init(videoView: PresentationCallVideoView, disabledText: String?, assumeReadyAfterTimeout: Bool, isReadyUpdated: @escaping () -> Void, orientationUpdated: @escaping () -> Void, isFlippedUpdated: @escaping (CallVideoNode) -> Void) {
self.isReadyUpdated = isReadyUpdated
self.isFlippedUpdated = isFlippedUpdated
@ -53,6 +55,10 @@ private final class CallVideoNode: ASDisplayNode {
self.currentOrientation = videoView.getOrientation()
self.videoPausedNode = ImmediateTextNode()
self.videoPausedNode.alpha = 0.0
self.videoPausedNode.maximumNumberOfLines = 3
super.init()
if #available(iOS 13.0, *) {
@ -63,6 +69,11 @@ private final class CallVideoNode: ASDisplayNode {
self.videoTransformContainer.view.addSubview(self.videoView.view)
self.addSubnode(self.videoTransformContainer)
if let disabledText = disabledText {
self.videoPausedNode.attributedText = NSAttributedString(string: disabledText, font: Font.regular(17.0), textColor: .white)
self.addSubnode(self.videoPausedNode)
}
self.videoView.setOnFirstFrameReceived { [weak self] aspectRatio in
Queue.mainQueue().async {
guard let strongSelf = self else {
@ -200,6 +211,9 @@ private final class CallVideoNode: ASDisplayNode {
}
}
let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: size.width - 16.0, height: 100.0))
transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((size.width - videoPausedSize.width) / 2.0), y: floor((size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize))
let previousVideoFrame = self.videoTransformContainer.frame
self.videoTransformContainer.bounds = CGRect(origin: CGPoint(), size: size)
if transition.isAnimated && !videoFrame.height.isZero && !previousVideoFrame.height.isZero {
@ -244,6 +258,7 @@ private final class CallVideoNode: ASDisplayNode {
}
if animated {
UIView.animate(withDuration: 0.3, animations: {
self.videoPausedNode.alpha = 1.0
self.effectView?.effect = UIBlurEffect(style: light ? .light : .dark)
})
} else {
@ -252,6 +267,7 @@ private final class CallVideoNode: ASDisplayNode {
} else if let effectView = self.effectView {
self.effectView = nil
UIView.animate(withDuration: 0.3, animations: {
self.videoPausedNode.alpha = 0.0
effectView.effect = nil
}, completion: { [weak effectView] _ in
effectView?.removeFromSuperview()
@ -315,6 +331,9 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
private var isRequestingVideo: Bool = false
private var animateRequestedVideoOnce: Bool = false
private var hiddenUIForActiveVideoCallOnce: Bool = false
private var hideUIForActiveVideoCallTimer: SwiftSignalKit.Timer?
private var displayedCameraConfirmation: Bool = false
private var displayedCameraTooltip: Bool = false
@ -327,7 +346,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
private let backButtonArrowNode: ASImageNode
private let backButtonNode: HighlightableButtonNode
private let statusNode: CallControllerStatusNode
private let videoPausedNode: ImmediateTextNode
private let toastNode: CallControllerToastContainerNode
private let buttonsNode: CallControllerButtonsNode
private var keyPreviewNode: CallControllerKeyPreviewNode?
@ -343,6 +361,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
var isMuted: Bool = false {
didSet {
self.buttonsNode.isMuted = self.isMuted
self.updateToastContent()
if let (layout, navigationBarHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
@ -364,6 +383,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
var callEnded: ((Bool) -> Void)?
var dismissedInteractively: (() -> Void)?
var present: ((ViewController) -> Void)?
var dismissAllTooltips: (() -> Void)?
private var toastContent: CallControllerToastContent?
private var displayToastsAfterTimestamp: Double?
@ -416,9 +436,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.statusNode = CallControllerStatusNode()
self.videoPausedNode = ImmediateTextNode()
self.videoPausedNode.alpha = 0.0
self.buttonsNode = CallControllerButtonsNode(strings: self.presentationData.strings)
self.toastNode = CallControllerToastContainerNode(strings: self.presentationData.strings)
self.keyButtonNode = CallControllerKeyButton()
@ -451,7 +468,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.containerNode.addSubnode(self.imageNode)
self.containerNode.addSubnode(self.dimNode)
self.containerNode.addSubnode(self.statusNode)
self.containerNode.addSubnode(self.videoPausedNode)
self.containerNode.addSubnode(self.buttonsNode)
self.containerNode.addSubnode(self.toastNode)
self.containerNode.addSubnode(self.keyButtonNode)
@ -460,6 +476,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.buttonsNode.mute = { [weak self] in
self?.toggleMute?()
self?.cancelScheduledUIHiding()
}
self.buttonsNode.speaker = { [weak self] in
@ -467,6 +484,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
return
}
strongSelf.beginAudioOuputSelection?(strongSelf.hasVideoNodes)
strongSelf.cancelScheduledUIHiding()
}
self.buttonsNode.acceptOrEnd = { [weak self] in
@ -476,6 +494,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
switch callState.state {
case .active, .connecting, .reconnecting:
strongSelf.endCall?()
strongSelf.cancelScheduledUIHiding()
case .requesting:
strongSelf.endCall?()
case .ringing:
@ -517,6 +536,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
} else {
strongSelf.call.disableVideo()
strongSelf.cancelScheduledUIHiding()
}
default:
break
@ -537,6 +557,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
}
strongSelf.cancelScheduledUIHiding()
}
self.keyButtonNode.addTarget(self, action: #selector(self.keyPressed), forControlEvents: .touchUpInside)
@ -553,7 +574,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
func displayCameraTooltip() {
guard let location = self.buttonsNode.videoButtonFrame().flatMap({ frame -> CGRect in
guard self.pictureInPictureTransitionFraction.isZero, let location = self.buttonsNode.videoButtonFrame().flatMap({ frame -> CGRect in
return self.buttonsNode.view.convert(frame, to: self.view)
}) else {
return
@ -605,8 +626,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
}
self.videoPausedNode.attributedText = NSAttributedString(string: self.presentationData.strings.Call_RemoteVideoPaused(peer.compactDisplayTitle).0, font: Font.regular(17.0), textColor: .white)
if let (layout, navigationBarHeight) = self.validLayout {
self.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .immediate)
}
@ -671,9 +690,12 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
strongSelf.expandedVideoNode = incomingVideoNode
strongSelf.updateButtonsMode(transition: .animated(duration: 0.4, curve: .spring))
strongSelf.updateDimVisibility()
strongSelf.maybeScheduleUIHidingForActiveVideoCall()
}
let incomingVideoNode = CallVideoNode(videoView: incomingVideoView, assumeReadyAfterTimeout: false, isReadyUpdated: {
let incomingVideoNode = CallVideoNode(videoView: incomingVideoView, disabledText: strongSelf.presentationData.strings.Call_RemoteVideoPaused(strongSelf.peer?.compactDisplayTitle ?? "").0, assumeReadyAfterTimeout: false, isReadyUpdated: {
if delayUntilInitialized {
Queue.mainQueue().after(0.1, {
applyNode()
@ -752,9 +774,12 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
strongSelf.containerNode.insertSubnode(outgoingVideoNode, belowSubnode: strongSelf.dimNode)
}
strongSelf.updateButtonsMode(transition: .animated(duration: 0.4, curve: .spring))
strongSelf.updateDimVisibility()
strongSelf.maybeScheduleUIHidingForActiveVideoCall()
}
let outgoingVideoNode = CallVideoNode(videoView: outgoingVideoView, assumeReadyAfterTimeout: true, isReadyUpdated: {
let outgoingVideoNode = CallVideoNode(videoView: outgoingVideoView, disabledText: nil, assumeReadyAfterTimeout: true, isReadyUpdated: {
if delayUntilInitialized {
Queue.mainQueue().after(0.4, {
applyNode()
@ -832,15 +857,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
isActive = true
}
incomingVideoNode.updateIsBlurred(isBlurred: !isActive)
if isActive != self.videoPausedNode.alpha.isZero {
if isActive {
self.videoPausedNode.alpha = 0.0
self.videoPausedNode.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
} else {
self.videoPausedNode.alpha = 1.0
self.videoPausedNode.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
}
}
}
@ -904,8 +920,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
}
statusValue = .timer({ value in
if isReconnecting {
statusValue = .timer({ value, measure in
if isReconnecting || (self.outgoingVideoViewRequested && value == "00:00" && !measure) {
return strings.Call_StatusConnecting
} else {
return value
@ -935,6 +951,34 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
}
self.updateToastContent()
self.updateButtonsMode()
self.updateDimVisibility()
if self.incomingVideoViewRequested && self.outgoingVideoViewRequested {
self.displayedCameraTooltip = true
self.displayedCameraConfirmation = true
}
if self.incomingVideoViewRequested && !self.outgoingVideoViewRequested && !self.displayedCameraTooltip && (self.toastContent?.isEmpty ?? true) {
self.displayedCameraTooltip = true
Queue.mainQueue().after(2.0) {
self.displayCameraTooltip()
}
}
if case let .terminated(id, _, reportRating) = callState.state, let callId = id {
let presentRating = reportRating || self.forceReportRating
if presentRating {
self.presentCallRating?(callId)
}
self.callEnded?(presentRating)
}
}
private func updateToastContent() {
guard let callState = self.callState else {
return
}
if case .terminating = callState.state {
} else if case .terminated = callState.state {
} else {
@ -961,27 +1005,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
self.toastContent = toastContent
}
self.updateButtonsMode()
self.updateDimVisibility()
if self.incomingVideoViewRequested && self.outgoingVideoViewRequested {
self.displayedCameraTooltip = true
}
if self.incomingVideoViewRequested && !self.outgoingVideoViewRequested && !self.displayedCameraTooltip && (self.toastContent?.isEmpty ?? true) {
self.displayedCameraTooltip = true
Queue.mainQueue().after(2.0) {
self.displayCameraTooltip()
}
}
if case let .terminated(id, _, reportRating) = callState.state, let callId = id {
let presentRating = reportRating || self.forceReportRating
if presentRating {
self.presentCallRating?(callId)
}
self.callEnded?(presentRating)
}
}
private func updateDimVisibility(transition: ContainedViewLayoutTransition = .animated(duration: 0.3, curve: .easeInOut)) {
@ -998,8 +1021,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
if visible != currentVisible {
let color = visible ? UIColor(rgb: 0x000000, alpha: 0.3) : UIColor.clear
let image: UIImage? = visible ? nil : generateGradientImage(size: CGSize(width: 1.0, height: 640.0), colors: [UIColor.black.withAlphaComponent(0.3), UIColor.clear, UIColor.clear, UIColor.black.withAlphaComponent(0.3)], locations: [0.0, 0.22, 0.7, 1.0])
if transition.isAnimated {
UIView.transition(with: self.dimNode.view, duration: 0.3, options: .transitionCrossDissolve, animations: {
if case let .animated(duration, _) = transition {
UIView.transition(with: self.dimNode.view, duration: duration, options: .transitionCrossDissolve, animations: {
self.dimNode.backgroundColor = color
self.dimNode.image = image
}, completion: nil)
@ -1007,8 +1030,41 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.dimNode.backgroundColor = color
self.dimNode.image = image
}
self.statusNode.isHidden = !visible
}
self.statusNode.setVisible(visible || self.keyPreviewNode != nil, transition: transition)
}
private func maybeScheduleUIHidingForActiveVideoCall() {
guard let callState = self.callState, case .active = callState.state, self.incomingVideoNodeValue != nil && self.outgoingVideoNodeValue != nil, !self.hiddenUIForActiveVideoCallOnce && self.keyPreviewNode == nil else {
return
}
let timer = SwiftSignalKit.Timer(timeout: 3.0, repeat: false, completion: { [weak self] in
if let strongSelf = self {
var updated = false
if let callState = strongSelf.callState, !strongSelf.isUIHidden {
switch callState.state {
case .active, .connecting, .reconnecting:
strongSelf.isUIHidden = true
updated = true
default:
break
}
}
if updated, let (layout, navigationBarHeight) = strongSelf.validLayout {
strongSelf.containerLayoutUpdated(layout, navigationBarHeight: navigationBarHeight, transition: .animated(duration: 0.3, curve: .easeInOut))
}
strongSelf.hideUIForActiveVideoCallTimer = nil
}
}, queue: Queue.mainQueue())
timer.start()
self.hideUIForActiveVideoCallTimer = timer
self.hiddenUIForActiveVideoCallOnce = true
}
private func cancelScheduledUIHiding() {
self.hideUIForActiveVideoCallTimer?.invalidate()
self.hideUIForActiveVideoCallTimer = nil
}
private var buttonsTerminationMode: CallControllerButtonsMode?
@ -1299,9 +1355,6 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
transition.updateFrame(node: self.statusNode, frame: CGRect(origin: CGPoint(x: 0.0, y: statusOffset), size: CGSize(width: layout.size.width, height: statusHeight)))
transition.updateAlpha(node: self.statusNode, alpha: overlayAlpha)
let videoPausedSize = self.videoPausedNode.updateLayout(CGSize(width: layout.size.width - 16.0, height: 100.0))
transition.updateFrame(node: self.videoPausedNode, frame: CGRect(origin: CGPoint(x: floor((layout.size.width - videoPausedSize.width) / 2.0), y: floor((layout.size.height - videoPausedSize.height) / 2.0)), size: videoPausedSize))
transition.updateFrame(node: self.toastNode, frame: CGRect(origin: CGPoint(x: 0.0, y: toastOriginY), size: CGSize(width: layout.size.width, height: toastHeight)))
transition.updateFrame(node: self.buttonsNode, frame: CGRect(origin: CGPoint(x: 0.0, y: buttonsOriginY), size: CGSize(width: layout.size.width, height: buttonsHeight)))
transition.updateAlpha(node: self.buttonsNode, alpha: overlayAlpha)
@ -1424,6 +1477,8 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self.keyButtonNode.isHidden = true
keyPreviewNode.animateIn(from: self.keyButtonNode.frame, fromNode: self.keyButtonNode)
}
self.updateDimVisibility()
}
}
@ -1434,6 +1489,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
self?.keyButtonNode.isHidden = false
keyPreviewNode?.removeFromSupernode()
})
self.updateDimVisibility()
} else if self.hasVideoNodes {
if let (layout, navigationHeight) = self.validLayout {
self.pictureInPictureTransitionFraction = 1.0
@ -1669,7 +1725,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
}
if self.pictureInPictureTransitionFraction.isZero, let expandedVideoNode = self.expandedVideoNode, let minimizedVideoNode = self.minimizedVideoNode, minimizedVideoNode.frame.contains(location), expandedVideoNode.frame != minimizedVideoNode.frame {
self.minimizedVideoInitialPosition = minimizedVideoNode.position
} else if let _ = self.minimizedVideoNode {
} else if self.hasVideoNodes {
self.minimizedVideoInitialPosition = nil
if !self.pictureInPictureTransitionFraction.isZero {
self.pictureInPictureGestureState = .dragging(initialPosition: self.containerTransformationNode.position, draggingPosition: self.containerTransformationNode.position)
@ -1679,6 +1735,7 @@ final class CallControllerNode: ViewControllerTracingNode, CallControllerNodePro
} else {
self.pictureInPictureGestureState = .none
}
self.dismissAllTooltips?()
case .changed:
if let minimizedVideoNode = self.minimizedVideoNode, let minimizedVideoInitialPosition = self.minimizedVideoInitialPosition {
let translation = recognizer.translation(in: self.view)

View File

@ -12,7 +12,7 @@ private let regularStatusFont = Font.regular(18.0)
enum CallControllerStatusValue: Equatable {
case text(string: String, displayLogo: Bool)
case timer((String) -> String, Double)
case timer((String, Bool) -> String, Double)
static func ==(lhs: CallControllerStatusValue, rhs: CallControllerStatusValue) -> Bool {
switch lhs {
@ -133,6 +133,12 @@ final class CallControllerStatusNode: ASDisplayNode {
self.statusTimer?.invalidate()
}
func setVisible(_ visible: Bool, transition: ContainedViewLayoutTransition) {
let alpha: CGFloat = visible ? 1.0 : 0.0
transition.updateAlpha(node: self.titleNode, alpha: alpha)
transition.updateAlpha(node: self.statusContainerNode, alpha: alpha)
}
func updateLayout(constrainedWidth: CGFloat, transition: ContainedViewLayoutTransition) -> CGFloat {
self.validLayoutWidth = constrainedWidth
@ -169,8 +175,8 @@ final class CallControllerStatusNode: ASDisplayNode {
durationString = String(format: "%02d:%02d", arguments: [(duration / 60) % 60, duration % 60])
measureDurationString = "00:00"
}
statusText = format(durationString)
statusMeasureText = format(measureDurationString)
statusText = format(durationString, false)
statusMeasureText = format(measureDurationString, true)
if self.reception != nil {
statusOffset += 8.0
}

View File

@ -6,6 +6,7 @@ import SwiftSignalKit
import TelegramPresentationData
private let labelFont = Font.regular(17.0)
private let smallLabelFont = Font.regular(15.0)
private enum ToastDescription: Equatable {
enum Key: Hashable {
@ -73,7 +74,6 @@ final class CallControllerToastContainerNode: ASDisplayNode {
transition = .immediate
}
let previousContent = self.appliedContent
self.appliedContent = content
let spacing: CGFloat = 18.0
@ -249,10 +249,12 @@ private class CallControllerToastItemNode: ASDisplayNode {
}
func update(width: CGFloat, content: Content, transition: ContainedViewLayoutTransition) -> CGFloat {
let inset: CGFloat = 32.0
let inset: CGFloat = 30.0
let isNarrowScreen = width <= 320.0
let font = isNarrowScreen ? smallLabelFont : labelFont
let topInset: CGFloat = isNarrowScreen ? 5.0 : 4.0
if self.currentContent != content || self.currentWidth != width {
let previousContent = self.currentContent
self.currentContent = content
self.currentWidth = width
@ -272,25 +274,23 @@ private class CallControllerToastItemNode: ASDisplayNode {
} else {
self.iconNode.image = image
}
self.textNode.attributedText = NSAttributedString(string: content.text, font: font, textColor: .white)
if previousContent?.text != content.text {
self.textNode.attributedText = NSAttributedString(string: content.text, font: Font.regular(17.0), textColor: .white)
let iconSize = CGSize(width: 44.0, height: 28.0)
let iconSpacing: CGFloat = 2.0
let textSize = self.textNode.updateLayout(CGSize(width: width - inset * 2.0 - iconSize.width - iconSpacing, height: 100.0))
let backgroundSize = CGSize(width: iconSize.width + iconSpacing + textSize.width + 6.0 * 2.0, height: max(28.0, textSize.height + 4.0 * 2.0))
let backgroundFrame = CGRect(origin: CGPoint(x: floor((width - backgroundSize.width) / 2.0), y: 0.0), size: backgroundSize)
transition.updateFrame(node: self.clipNode, frame: backgroundFrame)
transition.updateFrame(view: self.effectView, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
self.iconNode.frame = CGRect(origin: CGPoint(), size: iconSize)
self.textNode.frame = CGRect(origin: CGPoint(x: iconSize.width + iconSpacing, y: 4.0), size: textSize)
self.currentHeight = backgroundSize.height
}
let iconSize = CGSize(width: 44.0, height: 28.0)
let iconSpacing: CGFloat = isNarrowScreen ? 0.0 : 1.0
let textSize = self.textNode.updateLayout(CGSize(width: width - inset * 2.0 - iconSize.width - iconSpacing, height: 100.0))
let backgroundSize = CGSize(width: iconSize.width + iconSpacing + textSize.width + 6.0 * 2.0, height: max(28.0, textSize.height + 4.0 * 2.0))
let backgroundFrame = CGRect(origin: CGPoint(x: floor((width - backgroundSize.width) / 2.0), y: 0.0), size: backgroundSize)
transition.updateFrame(node: self.clipNode, frame: backgroundFrame)
transition.updateFrame(view: self.effectView, frame: CGRect(origin: CGPoint(), size: backgroundFrame.size))
self.iconNode.frame = CGRect(origin: CGPoint(), size: iconSize)
self.textNode.frame = CGRect(origin: CGPoint(x: iconSize.width + iconSpacing, y: topInset), size: textSize)
self.currentHeight = backgroundSize.height
}
return self.currentHeight ?? 28.0
}

View File

@ -66,6 +66,7 @@ final class LegacyCallControllerNode: ASDisplayNode, CallControllerNodeProtocol
var callEnded: ((Bool) -> Void)?
var dismissedInteractively: (() -> Void)?
var present: ((ViewController) -> Void)?
var dismissAllTooltips: (() -> Void)?
init(sharedContext: SharedAccountContext, account: Account, presentationData: PresentationData, statusBar: StatusBar, debugInfo: Signal<(String, String), NoError>, shouldStayHiddenUntilConnection: Bool = false, easyDebugAccess: Bool, call: PresentationCall) {
self.sharedContext = sharedContext

View File

@ -641,7 +641,7 @@ public final class PresentationCallImpl: PresentationCall {
var previousBatteryLevelIsLow = false
let timer = SwiftSignalKit.Timer(timeout: 30.0, repeat: true, completion: {
let batteryLevelIsLow = device.batteryLevel < 0.1 && device.batteryState != .charging
let batteryLevelIsLow = device.batteryLevel >= 0.0 && device.batteryLevel < 0.1 && device.batteryState != .charging
if batteryLevelIsLow != previousBatteryLevelIsLow {
previousBatteryLevelIsLow = batteryLevelIsLow
subscriber.putNext(batteryLevelIsLow)
@ -789,7 +789,7 @@ public final class PresentationCallImpl: PresentationCall {
}
if value {
if strongSelf.isVideo {
DeviceAccess.authorizeAccess(to: .camera, presentationData: presentationData, present: { c, a in
DeviceAccess.authorizeAccess(to: .camera(.videoCall), presentationData: presentationData, present: { c, a in
present(c, a)
}, openSettings: {
openSettings()

View File

@ -374,7 +374,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
openSettings()
}, { value in
if isVideo && value {
DeviceAccess.authorizeAccess(to: .camera, presentationData: presentationData, present: { c, a in
DeviceAccess.authorizeAccess(to: .camera(.videoCall), presentationData: presentationData, present: { c, a in
present(c, a)
}, openSettings: {
openSettings()
@ -450,7 +450,7 @@ public final class PresentationCallManagerImpl: PresentationCallManager {
openSettings()
}, { value in
if isVideo && value {
DeviceAccess.authorizeAccess(to: .camera, presentationData: presentationData, present: { c, a in
DeviceAccess.authorizeAccess(to: .camera(.videoCall), presentationData: presentationData, present: { c, a in
present(c, a)
}, openSettings: {
openSettings()

View File

@ -4022,7 +4022,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
return
}
if isVideo {
DeviceAccess.authorizeAccess(to: .camera, presentationData: strongSelf.presentationData, present: { c, a in
DeviceAccess.authorizeAccess(to: .camera(.video), presentationData: strongSelf.presentationData, present: { c, a in
self?.present(c, in: .window(.root), with: a)
}, openSettings: {
self?.context.sharedContext.applicationBindings.openSettings()

View File

@ -259,7 +259,7 @@ final class WalletContextImpl: WalletContext {
func authorizeAccessToCamera(completion: @escaping () -> Void) {
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
DeviceAccess.authorizeAccess(to: .camera, presentationData: presentationData, present: { c, a in
DeviceAccess.authorizeAccess(to: .camera(.video), presentationData: presentationData, present: { c, a in
c.presentationArguments = a
self.context.sharedContext.mainWindow?.present(c, on: .root)
}, openSettings: { [weak self] in