mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
commit
cd4940865d
@ -9343,3 +9343,5 @@ Sorry for the inconvenience.";
|
||||
|
||||
"ChatList.PremiumRestoreDiscountTitle" = "Get Premium back with up to %@ off";
|
||||
"ChatList.PremiumRestoreDiscountText" = "Your Telegram Premium has recently expired. Tap here to extend it.";
|
||||
|
||||
"Login.ErrorAppOutdated" = "Please update Telegram to the latest version to log in.";
|
||||
|
@ -221,6 +221,13 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
|
||||
case .phoneLimitExceeded:
|
||||
text = strongSelf.presentationData.strings.Login_PhoneFloodError
|
||||
actions.append(TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {}))
|
||||
case .appOutdated:
|
||||
text = strongSelf.presentationData.strings.Login_ErrorAppOutdated
|
||||
let updateUrl = strongSelf.presentationData.strings.InviteText_URL
|
||||
let sharedContext = strongSelf.sharedContext
|
||||
actions.append(TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {
|
||||
sharedContext.applicationBindings.openUrl(updateUrl)
|
||||
}))
|
||||
case .phoneBanned:
|
||||
text = strongSelf.presentationData.strings.Login_PhoneBannedError
|
||||
actions.append(TextAlertAction(type: .genericAction, title: strongSelf.presentationData.strings.Common_OK, action: {}))
|
||||
@ -581,6 +588,8 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
|
||||
if let strongSelf = self, let controller = controller {
|
||||
controller.inProgress = false
|
||||
|
||||
var actions: [TextAlertAction] = [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {})]
|
||||
|
||||
let text: String
|
||||
switch error {
|
||||
case .limitExceeded:
|
||||
@ -589,6 +598,13 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
|
||||
text = strongSelf.presentationData.strings.Login_InvalidPhoneError
|
||||
case .phoneLimitExceeded:
|
||||
text = strongSelf.presentationData.strings.Login_PhoneFloodError
|
||||
case .appOutdated:
|
||||
text = strongSelf.presentationData.strings.Login_ErrorAppOutdated
|
||||
let updateUrl = strongSelf.presentationData.strings.InviteText_URL
|
||||
let sharedContext = strongSelf.sharedContext
|
||||
actions = [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {
|
||||
sharedContext.applicationBindings.openUrl(updateUrl)
|
||||
})]
|
||||
case .phoneBanned:
|
||||
text = strongSelf.presentationData.strings.Login_PhoneBannedError
|
||||
case .generic:
|
||||
@ -597,7 +613,7 @@ public final class AuthorizationSequenceController: NavigationController, MFMail
|
||||
text = strongSelf.presentationData.strings.Login_NetworkError
|
||||
}
|
||||
|
||||
controller.present(standardTextAlertController(theme: AlertControllerTheme(presentationData: strongSelf.presentationData), title: nil, text: text, actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {})]), in: .window(.root))
|
||||
controller.present(standardTextAlertController(theme: AlertControllerTheme(presentationData: strongSelf.presentationData), title: nil, text: text, actions: actions), in: .window(.root))
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
@ -386,8 +386,15 @@ public struct Transition {
|
||||
let t = layer.presentation()?.transform ?? layer.transform
|
||||
let currentScale = sqrt((t.m11 * t.m11) + (t.m12 * t.m12) + (t.m13 * t.m13))
|
||||
if currentScale == scale {
|
||||
completion?(true)
|
||||
return
|
||||
if let animation = layer.animation(forKey: "transform.scale") as? CABasicAnimation, let toValue = animation.toValue as? NSNumber {
|
||||
if toValue.doubleValue == scale {
|
||||
completion?(true)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
completion?(true)
|
||||
return
|
||||
}
|
||||
}
|
||||
switch self.animation {
|
||||
case .none:
|
||||
|
@ -10,12 +10,23 @@ private let nullAction = NullActionClass()
|
||||
open class HierarchyTrackingLayer: CALayer {
|
||||
public var didEnterHierarchy: (() -> Void)?
|
||||
public var didExitHierarchy: (() -> Void)?
|
||||
public var isInHierarchyUpdated: ((Bool) -> Void)?
|
||||
|
||||
public private(set) var isInHierarchy: Bool = false {
|
||||
didSet {
|
||||
if self.isInHierarchy != oldValue {
|
||||
self.isInHierarchyUpdated?(self.isInHierarchy)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override open func action(forKey event: String) -> CAAction? {
|
||||
if event == kCAOnOrderIn {
|
||||
self.didEnterHierarchy?()
|
||||
self.isInHierarchy = true
|
||||
} else if event == kCAOnOrderOut {
|
||||
self.didExitHierarchy?()
|
||||
self.isInHierarchy = false
|
||||
}
|
||||
return nullAction
|
||||
}
|
||||
|
@ -7,6 +7,17 @@ import TelegramPresentationData
|
||||
import AccountContext
|
||||
import ComponentDisplayAdapters
|
||||
|
||||
private func resolveTheme(baseTheme: PresentationTheme, theme: ViewControllerComponentContainer.Theme) -> PresentationTheme {
|
||||
switch theme {
|
||||
case .default:
|
||||
return baseTheme
|
||||
case let .custom(value):
|
||||
return value
|
||||
case .dark:
|
||||
return customizeDefaultDarkPresentationTheme(theme: defaultDarkPresentationTheme, editing: false, title: nil, accentColor: baseTheme.list.itemAccentColor, backgroundColors: [], bubbleColors: [], animateBubbleColors: false, wallpaper: nil, baseColor: nil)
|
||||
}
|
||||
}
|
||||
|
||||
open class ViewControllerComponentContainer: ViewController {
|
||||
public enum NavigationBarAppearance {
|
||||
case none
|
||||
@ -25,6 +36,12 @@ open class ViewControllerComponentContainer: ViewController {
|
||||
case modal
|
||||
}
|
||||
|
||||
public enum Theme {
|
||||
case `default`
|
||||
case dark
|
||||
case custom(PresentationTheme)
|
||||
}
|
||||
|
||||
public final class Environment: Equatable {
|
||||
public let statusBarHeight: CGFloat
|
||||
public let navigationHeight: CGFloat
|
||||
@ -121,19 +138,21 @@ open class ViewControllerComponentContainer: ViewController {
|
||||
private weak var controller: ViewControllerComponentContainer?
|
||||
|
||||
private var component: AnyComponent<ViewControllerComponentContainer.Environment>
|
||||
var theme: PresentationTheme?
|
||||
let theme: Theme
|
||||
var resolvedTheme: PresentationTheme
|
||||
public let hostView: ComponentHostView<ViewControllerComponentContainer.Environment>
|
||||
|
||||
private var currentIsVisible: Bool = false
|
||||
private var currentLayout: (layout: ContainerViewLayout, navigationHeight: CGFloat)?
|
||||
|
||||
init(context: AccountContext, controller: ViewControllerComponentContainer, component: AnyComponent<ViewControllerComponentContainer.Environment>, theme: PresentationTheme?) {
|
||||
init(context: AccountContext, controller: ViewControllerComponentContainer, component: AnyComponent<ViewControllerComponentContainer.Environment>, theme: Theme) {
|
||||
self.presentationData = context.sharedContext.currentPresentationData.with { $0 }
|
||||
|
||||
self.controller = controller
|
||||
|
||||
self.component = component
|
||||
self.theme = theme
|
||||
self.resolvedTheme = resolveTheme(baseTheme: self.presentationData.theme, theme: theme)
|
||||
self.hostView = ComponentHostView()
|
||||
|
||||
super.init()
|
||||
@ -152,7 +171,7 @@ open class ViewControllerComponentContainer: ViewController {
|
||||
metrics: layout.metrics,
|
||||
deviceMetrics: layout.deviceMetrics,
|
||||
isVisible: self.currentIsVisible,
|
||||
theme: self.theme ?? self.presentationData.theme,
|
||||
theme: self.resolvedTheme,
|
||||
strings: self.presentationData.strings,
|
||||
dateTimeFormat: self.presentationData.dateTimeFormat,
|
||||
controller: { [weak self] in
|
||||
@ -197,13 +216,13 @@ open class ViewControllerComponentContainer: ViewController {
|
||||
}
|
||||
|
||||
private let context: AccountContext
|
||||
private var theme: PresentationTheme?
|
||||
private var theme: Theme
|
||||
private let component: AnyComponent<ViewControllerComponentContainer.Environment>
|
||||
|
||||
private var presentationDataDisposable: Disposable?
|
||||
public private(set) var validLayout: ContainerViewLayout?
|
||||
|
||||
public init<C: Component>(context: AccountContext, component: C, navigationBarAppearance: NavigationBarAppearance, statusBarStyle: StatusBarStyle = .default, presentationMode: PresentationMode = .default, theme: PresentationTheme? = nil) where C.EnvironmentType == ViewControllerComponentContainer.Environment {
|
||||
public init<C: Component>(context: AccountContext, component: C, navigationBarAppearance: NavigationBarAppearance, statusBarStyle: StatusBarStyle = .default, presentationMode: PresentationMode = .default, theme: Theme = .default) where C.EnvironmentType == ViewControllerComponentContainer.Environment {
|
||||
self.context = context
|
||||
self.component = AnyComponent(component)
|
||||
self.theme = theme
|
||||
@ -230,6 +249,7 @@ open class ViewControllerComponentContainer: ViewController {
|
||||
}
|
||||
|
||||
strongSelf.node.presentationData = presentationData.withUpdated(theme: theme)
|
||||
strongSelf.node.resolvedTheme = resolveTheme(baseTheme: presentationData.theme, theme: strongSelf.theme)
|
||||
|
||||
switch statusBarStyle {
|
||||
case .none:
|
||||
|
@ -1384,7 +1384,9 @@ private func debugControllerEntries(sharedContext: SharedAccountContext, present
|
||||
entries.append(.logTranslationRecognition(experimentalSettings.logLanguageRecognition))
|
||||
entries.append(.resetTranslationStates)
|
||||
|
||||
entries.append(.storiesExperiment(experimentalSettings.storiesExperiment))
|
||||
if case .internal = sharedContext.applicationBindings.appBuildType {
|
||||
entries.append(.storiesExperiment(experimentalSettings.storiesExperiment))
|
||||
}
|
||||
entries.append(.playlistPlayback(experimentalSettings.playlistPlayback))
|
||||
entries.append(.enableQuickReactionSwitch(!experimentalSettings.disableQuickReaction))
|
||||
}
|
||||
|
@ -116,7 +116,10 @@ public final class NavigationContainer: ASDisplayNode, UIGestureRecognizerDelega
|
||||
private var currentKeyboardLeftEdge: CGFloat = 0.0
|
||||
private var additionalKeyboardLeftEdgeOffset: CGFloat = 0.0
|
||||
|
||||
var statusBarStyle: StatusBarStyle = .Ignore
|
||||
var statusBarStyle: StatusBarStyle = .Ignore {
|
||||
didSet {
|
||||
}
|
||||
}
|
||||
var statusBarStyleUpdated: ((ContainedViewLayoutTransition) -> Void)?
|
||||
|
||||
|
||||
|
@ -792,6 +792,18 @@ open class NavigationController: UINavigationController, ContainableController,
|
||||
} else {
|
||||
modalContainer.keyboardViewManager = nil
|
||||
modalContainer.canHaveKeyboardFocus = false
|
||||
|
||||
if modalContainer.isFlat {
|
||||
let controllerStatusBarStyle = modalContainer.container.statusBarStyle
|
||||
switch controllerStatusBarStyle {
|
||||
case .Black, .White, .Hide:
|
||||
if topVisibleModalContainerWithStatusBar == nil {
|
||||
topVisibleModalContainerWithStatusBar = modalContainer
|
||||
}
|
||||
case .Ignore:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
previousModalContainer = modalContainer
|
||||
if isStandaloneModal {
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
#import <CommonCrypto/CommonDigest.h>
|
||||
#import <sys/stat.h>
|
||||
#import <VideoToolbox/VideoToolbox.h>
|
||||
|
||||
#import "GPUImageContext.h"
|
||||
|
||||
@ -1319,13 +1320,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
||||
AVVideoPixelAspectRatioVerticalSpacingKey: @3
|
||||
};
|
||||
|
||||
NSDictionary *codecSettings = @
|
||||
{
|
||||
AVVideoAverageBitRateKey: @([self _videoBitrateKbpsForPreset:preset] * 1000),
|
||||
AVVideoCleanApertureKey: videoCleanApertureSettings,
|
||||
AVVideoPixelAspectRatioKey: videoAspectRatioSettings,
|
||||
AVVideoExpectedSourceFrameRateKey: @(frameRate)
|
||||
};
|
||||
NSInteger videoBitrate = [self _videoBitrateKbpsForPreset:preset] * 1000;
|
||||
|
||||
NSDictionary *hdVideoProperties = @
|
||||
{
|
||||
@ -1334,23 +1329,59 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
|
||||
AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_709_2,
|
||||
};
|
||||
|
||||
#if TARGET_IPHONE_SIMULATOR
|
||||
return @
|
||||
{
|
||||
AVVideoCodecKey: AVVideoCodecH264,
|
||||
AVVideoCompressionPropertiesKey: codecSettings,
|
||||
AVVideoWidthKey: @((NSInteger)dimensions.width),
|
||||
AVVideoHeightKey: @((NSInteger)dimensions.height)
|
||||
};
|
||||
bool useH265 = false;
|
||||
#if DEBUG
|
||||
//videoBitrate = 800 * 1000;
|
||||
useH265 = false;
|
||||
#endif
|
||||
return @
|
||||
{
|
||||
AVVideoCodecKey: AVVideoCodecH264,
|
||||
AVVideoCompressionPropertiesKey: codecSettings,
|
||||
AVVideoWidthKey: @((NSInteger)dimensions.width),
|
||||
AVVideoHeightKey: @((NSInteger)dimensions.height),
|
||||
AVVideoColorPropertiesKey: hdVideoProperties
|
||||
};
|
||||
|
||||
if (useH265) {
|
||||
NSDictionary *codecSettings = @
|
||||
{
|
||||
AVVideoAverageBitRateKey: @(videoBitrate),
|
||||
AVVideoCleanApertureKey: videoCleanApertureSettings,
|
||||
AVVideoPixelAspectRatioKey: videoAspectRatioSettings,
|
||||
AVVideoExpectedSourceFrameRateKey: @(frameRate),
|
||||
AVVideoProfileLevelKey: (__bridge NSString *)kVTProfileLevel_HEVC_Main_AutoLevel
|
||||
};
|
||||
|
||||
return @
|
||||
{
|
||||
AVVideoCodecKey: AVVideoCodecTypeHEVC,
|
||||
AVVideoCompressionPropertiesKey: codecSettings,
|
||||
AVVideoWidthKey: @((NSInteger)dimensions.width),
|
||||
AVVideoHeightKey: @((NSInteger)dimensions.height),
|
||||
AVVideoColorPropertiesKey: hdVideoProperties
|
||||
};
|
||||
} else {
|
||||
NSDictionary *codecSettings = @
|
||||
{
|
||||
AVVideoAverageBitRateKey: @(videoBitrate),
|
||||
AVVideoCleanApertureKey: videoCleanApertureSettings,
|
||||
AVVideoPixelAspectRatioKey: videoAspectRatioSettings,
|
||||
AVVideoExpectedSourceFrameRateKey: @(frameRate),
|
||||
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
|
||||
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
|
||||
};
|
||||
|
||||
#if TARGET_IPHONE_SIMULATOR
|
||||
return @
|
||||
{
|
||||
AVVideoCodecKey: AVVideoCodecTypeH264,
|
||||
AVVideoCompressionPropertiesKey: codecSettings,
|
||||
AVVideoWidthKey: @((NSInteger)dimensions.width),
|
||||
AVVideoHeightKey: @((NSInteger)dimensions.height)
|
||||
};
|
||||
#endif
|
||||
return @
|
||||
{
|
||||
AVVideoCodecKey: AVVideoCodecTypeH264,
|
||||
AVVideoCompressionPropertiesKey: codecSettings,
|
||||
AVVideoWidthKey: @((NSInteger)dimensions.width),
|
||||
AVVideoHeightKey: @((NSInteger)dimensions.height),
|
||||
AVVideoColorPropertiesKey: hdVideoProperties
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
+ (NSInteger)_videoBitrateKbpsForPreset:(TGMediaVideoConversionPreset)preset
|
||||
|
@ -448,7 +448,7 @@ public final class CreateExternalMediaStreamScreen: ViewControllerComponentConta
|
||||
self.peerId = peerId
|
||||
self.mode = mode
|
||||
|
||||
super.init(context: context, component: CreateExternalMediaStreamScreenComponent(context: context, peerId: peerId, mode: mode, credentialsPromise: credentialsPromise), navigationBarAppearance: .transparent, theme: defaultDarkPresentationTheme)
|
||||
super.init(context: context, component: CreateExternalMediaStreamScreenComponent(context: context, peerId: peerId, mode: mode, credentialsPromise: credentialsPromise), navigationBarAppearance: .transparent, theme: .dark)
|
||||
|
||||
self.navigationPresentation = .modal
|
||||
|
||||
|
@ -138,11 +138,17 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext {
|
||||
self.fullPath = fullPath
|
||||
self.metaPath = metaPath
|
||||
|
||||
do {
|
||||
self.fileMap = try MediaBoxFileMap.read(manager: self.manager, path: self.metaPath)
|
||||
} catch {
|
||||
if !FileManager.default.fileExists(atPath: self.partialPath) {
|
||||
let _ = try? FileManager.default.removeItem(atPath: self.metaPath)
|
||||
self.fileMap = MediaBoxFileMap()
|
||||
self.fileMap.serialize(manager: self.manager, to: self.metaPath)
|
||||
} else {
|
||||
do {
|
||||
self.fileMap = try MediaBoxFileMap.read(manager: self.manager, path: self.metaPath)
|
||||
} catch {
|
||||
let _ = try? FileManager.default.removeItem(atPath: self.metaPath)
|
||||
self.fileMap = MediaBoxFileMap()
|
||||
}
|
||||
}
|
||||
|
||||
self.destinationFile = self.manager.open(path: self.partialPath, mode: .readwrite)
|
||||
@ -172,6 +178,17 @@ final class MediaBoxFileContextV2Impl: MediaBoxFileContext {
|
||||
completed: completed
|
||||
)
|
||||
if self.updateRangeRequest(request: request) {
|
||||
if !self.isComplete, let truncationSize = self.fileMap.truncationSize, truncationSize == self.fileMap.sum {
|
||||
self.isComplete = true
|
||||
|
||||
let linkResult = link(self.partialPath, self.fullPath)
|
||||
if linkResult != 0 {
|
||||
postboxLog("MediaBoxFileContextV2Impl: error while linking \(self.partialPath): \(linkResult)")
|
||||
}
|
||||
}
|
||||
|
||||
self.updateRequests()
|
||||
|
||||
return EmptyDisposable
|
||||
} else {
|
||||
let index = self.rangeRequests.add(request)
|
||||
|
@ -47,7 +47,11 @@ public final class QrCodeScreen: ViewController {
|
||||
case let .invite(invite, _):
|
||||
return invite.link ?? ""
|
||||
case let .chatFolder(slug):
|
||||
return slug
|
||||
if slug.hasPrefix("https://") {
|
||||
return slug
|
||||
} else {
|
||||
return "https://t.me/addlist/\(slug)"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -25,8 +25,9 @@ extension ReactionsMessageAttribute {
|
||||
case let .messagePeerReaction(flags, peerId, date, reaction):
|
||||
let isLarge = (flags & (1 << 0)) != 0
|
||||
let isUnseen = (flags & (1 << 1)) != 0
|
||||
let isMy = (flags & (1 << 2)) != 0
|
||||
if let reaction = MessageReaction.Reaction(apiReaction: reaction) {
|
||||
return ReactionsMessageAttribute.RecentPeer(value: reaction, isLarge: isLarge, isUnseen: isUnseen, peerId: peerId.peerId, timestamp: date)
|
||||
return ReactionsMessageAttribute.RecentPeer(value: reaction, isLarge: isLarge, isUnseen: isUnseen, isMy: isMy, peerId: peerId.peerId, timestamp: date)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
@ -117,17 +118,17 @@ private func mergeReactions(reactions: [MessageReaction], recentPeers: [Reaction
|
||||
let pendingReactionSendAsPeerId = pendingReaction.sendAsPeerId ?? accountPeerId
|
||||
|
||||
if let index = recentPeers.firstIndex(where: {
|
||||
$0.value == pendingReaction.value && $0.peerId == pendingReactionSendAsPeerId
|
||||
$0.value == pendingReaction.value && ($0.peerId == pendingReactionSendAsPeerId || $0.isMy)
|
||||
}) {
|
||||
recentPeers.remove(at: index)
|
||||
}
|
||||
recentPeers.append(ReactionsMessageAttribute.RecentPeer(value: pendingReaction.value, isLarge: false, isUnseen: false, peerId: pendingReactionSendAsPeerId, timestamp: Int32(CFAbsoluteTimeGetCurrent() + kCFAbsoluteTimeIntervalSince1970)))
|
||||
recentPeers.append(ReactionsMessageAttribute.RecentPeer(value: pendingReaction.value, isLarge: false, isUnseen: false, isMy: true, peerId: pendingReactionSendAsPeerId, timestamp: Int32(CFAbsoluteTimeGetCurrent() + kCFAbsoluteTimeIntervalSince1970)))
|
||||
}
|
||||
|
||||
for i in (0 ..< result.count).reversed() {
|
||||
if result[i].chosenOrder != nil {
|
||||
if !pending.contains(where: { $0.value == result[i].value }) {
|
||||
if let index = recentPeers.firstIndex(where: { $0.value == result[i].value && $0.peerId == accountPeerId }) {
|
||||
if let index = recentPeers.firstIndex(where: { $0.value == result[i].value && ($0.peerId == accountPeerId || $0.isMy) }) {
|
||||
recentPeers.remove(at: index)
|
||||
}
|
||||
|
||||
@ -191,8 +192,9 @@ extension ReactionsMessageAttribute {
|
||||
case let .messagePeerReaction(flags, peerId, date, reaction):
|
||||
let isLarge = (flags & (1 << 0)) != 0
|
||||
let isUnseen = (flags & (1 << 1)) != 0
|
||||
let isMy = (flags & (1 << 2)) != 0
|
||||
if let reaction = MessageReaction.Reaction(apiReaction: reaction) {
|
||||
return ReactionsMessageAttribute.RecentPeer(value: reaction, isLarge: isLarge, isUnseen: isUnseen, peerId: peerId.peerId, timestamp: date)
|
||||
return ReactionsMessageAttribute.RecentPeer(value: reaction, isLarge: isLarge, isUnseen: isUnseen, isMy: isMy, peerId: peerId.peerId, timestamp: date)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
|
@ -12,6 +12,7 @@ public enum AuthorizationCodeRequestError {
|
||||
case phoneLimitExceeded
|
||||
case phoneBanned
|
||||
case timeout
|
||||
case appOutdated
|
||||
}
|
||||
|
||||
func switchToAuthorizedAccount(transaction: AccountManagerModifier<TelegramAccountManagerTypes>, account: UnauthorizedAccount) {
|
||||
@ -230,6 +231,8 @@ public func sendAuthorizationCode(accountManager: AccountManager<TelegramAccount
|
||||
return .fail(.phoneLimitExceeded)
|
||||
} else if error.errorDescription == "PHONE_NUMBER_BANNED" {
|
||||
return .fail(.phoneBanned)
|
||||
} else if error.errorDescription == "UPDATE_APP_TO_LOGIN" {
|
||||
return .fail(.appOutdated)
|
||||
} else if error.errorDescription == "SESSION_PASSWORD_NEEDED" {
|
||||
return account.network.request(Api.functions.account.getPassword(), automaticFloodWait: false)
|
||||
|> mapError { error -> AuthorizationCodeRequestError in
|
||||
@ -356,6 +359,8 @@ private func internalResendAuthorizationCode(accountManager: AccountManager<Tele
|
||||
return .phoneLimitExceeded
|
||||
} else if error.errorDescription == "PHONE_NUMBER_BANNED" {
|
||||
return .phoneBanned
|
||||
} else if error.errorDescription == "UPDATE_APP_TO_LOGIN" {
|
||||
return .appOutdated
|
||||
} else {
|
||||
return .generic(info: (Int(error.errorCode), error.errorDescription))
|
||||
}
|
||||
@ -442,6 +447,8 @@ public func resendAuthorizationCode(accountManager: AccountManager<TelegramAccou
|
||||
return .phoneLimitExceeded
|
||||
} else if error.errorDescription == "PHONE_NUMBER_BANNED" {
|
||||
return .phoneBanned
|
||||
} else if error.errorDescription == "UPDATE_APP_TO_LOGIN" {
|
||||
return .appOutdated
|
||||
} else {
|
||||
return .generic(info: (Int(error.errorCode), error.errorDescription))
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ private final class MultiplexedRequestManagerContext {
|
||||
private var nextId: Int32 = 0
|
||||
|
||||
private var targetContexts: [MultiplexedRequestTargetKey: [RequestTargetContext]] = [:]
|
||||
private var emptyTargetTimers: [MultiplexedRequestTargetTimerKey: SignalKitTimer] = [:]
|
||||
private var emptyTargetDisposables: [MultiplexedRequestTargetTimerKey: Disposable] = [:]
|
||||
|
||||
init(queue: Queue, takeWorker: @escaping (MultiplexedRequestTarget, MediaResourceFetchTag?, Bool) -> Download?) {
|
||||
self.queue = queue
|
||||
@ -109,8 +109,8 @@ private final class MultiplexedRequestManagerContext {
|
||||
}
|
||||
}
|
||||
}
|
||||
for timer in emptyTargetTimers.values {
|
||||
timer.invalidate()
|
||||
for disposable in emptyTargetDisposables.values {
|
||||
disposable.dispose()
|
||||
}
|
||||
}
|
||||
|
||||
@ -243,12 +243,17 @@ private final class MultiplexedRequestManagerContext {
|
||||
for context in contexts {
|
||||
let key = MultiplexedRequestTargetTimerKey(key: targetKey, id: context.id)
|
||||
if context.requests.isEmpty {
|
||||
if self.emptyTargetTimers[key] == nil {
|
||||
let timer = SignalKitTimer(timeout: 2.0, repeat: false, completion: { [weak self] in
|
||||
if self.emptyTargetDisposables[key] == nil {
|
||||
let disposable = MetaDisposable()
|
||||
self.emptyTargetDisposables[key] = disposable
|
||||
|
||||
disposable.set((Signal<Never, NoError>.complete()
|
||||
|> delay(20 * 60, queue: self.queue)
|
||||
|> deliverOn(self.queue)).start(completed: { [weak self] in
|
||||
guard let strongSelf = self else {
|
||||
return
|
||||
}
|
||||
strongSelf.emptyTargetTimers.removeValue(forKey: key)
|
||||
strongSelf.emptyTargetDisposables.removeValue(forKey: key)
|
||||
if strongSelf.targetContexts[targetKey] != nil {
|
||||
for i in 0 ..< strongSelf.targetContexts[targetKey]!.count {
|
||||
if strongSelf.targetContexts[targetKey]![i].id == key.id {
|
||||
@ -257,14 +262,12 @@ private final class MultiplexedRequestManagerContext {
|
||||
}
|
||||
}
|
||||
}
|
||||
}, queue: self.queue)
|
||||
self.emptyTargetTimers[key] = timer
|
||||
timer.start()
|
||||
}))
|
||||
}
|
||||
} else {
|
||||
if let timer = self.emptyTargetTimers[key] {
|
||||
timer.invalidate()
|
||||
self.emptyTargetTimers.removeValue(forKey: key)
|
||||
if let disposable = self.emptyTargetDisposables[key] {
|
||||
disposable.dispose()
|
||||
self.emptyTargetDisposables.removeValue(forKey: key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -178,7 +178,7 @@ final class NetworkFrameworkTcpConnectionInterface: NSObject, MTTcpConnectionInt
|
||||
|
||||
func write(data: Data) {
|
||||
guard let connection = self.connection else {
|
||||
assertionFailure("Connection not ready")
|
||||
Logger.shared.log("NetworkFrameworkTcpConnectionInterface", "write called while connection == nil")
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -287,6 +287,12 @@ private func maybePredownloadedFileResource(postbox: Postbox, auxiliaryMethods:
|
||||
return .single(.none)
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
if "".isEmpty {
|
||||
return .single(.none)
|
||||
}
|
||||
#endif
|
||||
|
||||
return auxiliaryMethods.fetchResourceMediaReferenceHash(resource)
|
||||
|> mapToSignal { hash -> Signal<PredownloadedResource, NoError> in
|
||||
if let hash = hash {
|
||||
|
@ -118,13 +118,15 @@ public final class ReactionsMessageAttribute: Equatable, MessageAttribute {
|
||||
public var value: MessageReaction.Reaction
|
||||
public var isLarge: Bool
|
||||
public var isUnseen: Bool
|
||||
public var isMy: Bool
|
||||
public var peerId: PeerId
|
||||
public var timestamp: Int32?
|
||||
|
||||
public init(value: MessageReaction.Reaction, isLarge: Bool, isUnseen: Bool, peerId: PeerId, timestamp: Int32?) {
|
||||
public init(value: MessageReaction.Reaction, isLarge: Bool, isUnseen: Bool, isMy: Bool, peerId: PeerId, timestamp: Int32?) {
|
||||
self.value = value
|
||||
self.isLarge = isLarge
|
||||
self.isUnseen = isUnseen
|
||||
self.isMy = isMy
|
||||
self.peerId = peerId
|
||||
self.timestamp = timestamp
|
||||
}
|
||||
@ -137,6 +139,7 @@ public final class ReactionsMessageAttribute: Equatable, MessageAttribute {
|
||||
}
|
||||
self.isLarge = decoder.decodeInt32ForKey("l", orElse: 0) != 0
|
||||
self.isUnseen = decoder.decodeInt32ForKey("u", orElse: 0) != 0
|
||||
self.isMy = decoder.decodeInt32ForKey("my", orElse: 0) != 0
|
||||
self.peerId = PeerId(decoder.decodeInt64ForKey("p", orElse: 0))
|
||||
self.timestamp = decoder.decodeOptionalInt32ForKey("ts")
|
||||
}
|
||||
@ -150,6 +153,7 @@ public final class ReactionsMessageAttribute: Equatable, MessageAttribute {
|
||||
}
|
||||
encoder.encodeInt32(self.isLarge ? 1 : 0, forKey: "l")
|
||||
encoder.encodeInt32(self.isUnseen ? 1 : 0, forKey: "u")
|
||||
encoder.encodeInt32(self.isMy ? 1 : 0, forKey: "my")
|
||||
encoder.encodeInt64(self.peerId.toInt64(), forKey: "p")
|
||||
if let timestamp = self.timestamp {
|
||||
encoder.encodeInt32(timestamp, forKey: "ts")
|
||||
|
@ -59,8 +59,8 @@ public struct ExportedChatFolderLink: Equatable {
|
||||
public extension ExportedChatFolderLink {
|
||||
var slug: String {
|
||||
var slug = self.link
|
||||
if slug.hasPrefix("https://t.me/folder/") {
|
||||
slug = String(slug[slug.index(slug.startIndex, offsetBy: "https://t.me/folder/".count)...])
|
||||
if slug.hasPrefix("https://t.me/addlist/") {
|
||||
slug = String(slug[slug.index(slug.startIndex, offsetBy: "https://t.me/addlist/".count)...])
|
||||
}
|
||||
return slug
|
||||
}
|
||||
|
@ -364,6 +364,9 @@ swift_library(
|
||||
"//submodules/TelegramUI/Components/ChatScheduleTimeController",
|
||||
"//submodules/ICloudResources",
|
||||
"//submodules/TelegramUI/Components/LegacyCamera",
|
||||
"//submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton",
|
||||
"//submodules/TelegramUI/Components/ChatSendButtonRadialStatusNode",
|
||||
"//submodules/TelegramUI/Components/LegacyInstantVideoController",
|
||||
] + select({
|
||||
"@build_bazel_rules_apple//apple:ios_armv7": [],
|
||||
"@build_bazel_rules_apple//apple:ios_arm64": appcenter_targets,
|
||||
|
@ -935,7 +935,7 @@ private final class ChatFolderLinkPreviewScreenComponent: Component {
|
||||
} else if let linkContents = component.linkContents {
|
||||
actionButtonBadge = max(0, self.selectedItems.count - (linkContents.peers.count - canAddChatCount))
|
||||
if linkContents.localFilterId != nil {
|
||||
if self.selectedItems.isEmpty {
|
||||
if actionButtonBadge == 0 {
|
||||
actionButtonTitle = environment.strings.FolderLinkPreview_ButtonDoNotJoinChats
|
||||
} else {
|
||||
actionButtonTitle = environment.strings.FolderLinkPreview_ButtonJoinChats
|
||||
|
@ -0,0 +1,22 @@
|
||||
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
|
||||
|
||||
swift_library(
|
||||
name = "ChatSendButtonRadialStatusNode",
|
||||
module_name = "ChatSendButtonRadialStatusNode",
|
||||
srcs = glob([
|
||||
"Sources/**/*.swift",
|
||||
]),
|
||||
copts = [
|
||||
"-warnings-as-errors",
|
||||
],
|
||||
deps = [
|
||||
"//submodules/AsyncDisplayKit",
|
||||
"//submodules/Display",
|
||||
"//submodules/SSignalKit/SwiftSignalKit",
|
||||
"//submodules/LegacyComponents",
|
||||
"//submodules/ChatPresentationInterfaceState",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
],
|
||||
)
|
@ -16,7 +16,7 @@ private final class ChatSendButtonRadialStatusNodeParameters: NSObject {
|
||||
}
|
||||
}
|
||||
|
||||
final class ChatSendButtonRadialStatusNode: ASDisplayNode {
|
||||
public final class ChatSendButtonRadialStatusNode: ASDisplayNode {
|
||||
private let color: UIColor
|
||||
|
||||
private var effectiveProgress: CGFloat = 0.0 {
|
||||
@ -25,7 +25,7 @@ final class ChatSendButtonRadialStatusNode: ASDisplayNode {
|
||||
}
|
||||
}
|
||||
|
||||
var slowmodeState: ChatSlowmodeState? = nil {
|
||||
public var slowmodeState: ChatSlowmodeState? = nil {
|
||||
didSet {
|
||||
if self.slowmodeState != oldValue {
|
||||
self.updateProgress()
|
||||
@ -35,7 +35,7 @@ final class ChatSendButtonRadialStatusNode: ASDisplayNode {
|
||||
|
||||
private var updateTimer: SwiftSignalKit.Timer?
|
||||
|
||||
init(color: UIColor) {
|
||||
public init(color: UIColor) {
|
||||
self.color = color
|
||||
|
||||
super.init()
|
||||
@ -48,11 +48,11 @@ final class ChatSendButtonRadialStatusNode: ASDisplayNode {
|
||||
self.updateTimer?.invalidate()
|
||||
}
|
||||
|
||||
override func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
|
||||
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
|
||||
return ChatSendButtonRadialStatusNodeParameters(color: self.color, progress: self.effectiveProgress)
|
||||
}
|
||||
|
||||
@objc override class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
|
||||
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
|
||||
let context = UIGraphicsGetCurrentContext()!
|
||||
|
||||
if !isRasterizing {
|
||||
@ -107,7 +107,7 @@ final class ChatSendButtonRadialStatusNode: ASDisplayNode {
|
||||
}
|
||||
}
|
||||
|
||||
final class ChatSendButtonRadialStatusView: UIView {
|
||||
public final class ChatSendButtonRadialStatusView: UIView {
|
||||
private let color: UIColor
|
||||
|
||||
private var effectiveProgress: CGFloat = 0.0 {
|
||||
@ -116,7 +116,7 @@ final class ChatSendButtonRadialStatusView: UIView {
|
||||
}
|
||||
}
|
||||
|
||||
var slowmodeState: ChatSlowmodeState? = nil {
|
||||
public var slowmodeState: ChatSlowmodeState? = nil {
|
||||
didSet {
|
||||
if self.slowmodeState != oldValue {
|
||||
self.updateProgress()
|
||||
@ -126,7 +126,7 @@ final class ChatSendButtonRadialStatusView: UIView {
|
||||
|
||||
private var updateTimer: SwiftSignalKit.Timer?
|
||||
|
||||
init(color: UIColor) {
|
||||
public init(color: UIColor) {
|
||||
self.color = color
|
||||
|
||||
super.init(frame: CGRect())
|
||||
@ -135,7 +135,7 @@ final class ChatSendButtonRadialStatusView: UIView {
|
||||
self.isOpaque = false
|
||||
}
|
||||
|
||||
required init?(coder aDecoder: NSCoder) {
|
||||
required public init?(coder aDecoder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
@ -143,7 +143,7 @@ final class ChatSendButtonRadialStatusView: UIView {
|
||||
self.updateTimer?.invalidate()
|
||||
}
|
||||
|
||||
override func draw(_ rect: CGRect) {
|
||||
override public func draw(_ rect: CGRect) {
|
||||
if rect.isEmpty {
|
||||
return
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
|
||||
|
||||
swift_library(
|
||||
name = "ChatTextInputMediaRecordingButton",
|
||||
module_name = "ChatTextInputMediaRecordingButton",
|
||||
srcs = glob([
|
||||
"Sources/**/*.swift",
|
||||
]),
|
||||
copts = [
|
||||
"-warnings-as-errors",
|
||||
],
|
||||
deps = [
|
||||
"//submodules/Display",
|
||||
"//submodules/AsyncDisplayKit",
|
||||
"//submodules/TelegramCore",
|
||||
"//submodules/SSignalKit/SwiftSignalKit",
|
||||
"//submodules/TelegramPresentationData",
|
||||
"//submodules/LegacyComponents",
|
||||
"//submodules/AccountContext",
|
||||
"//submodules/ChatInterfaceState",
|
||||
"//submodules/AudioBlob",
|
||||
"//submodules/ChatPresentationInterfaceState",
|
||||
"//submodules/ComponentFlow",
|
||||
"//submodules/Components/LottieAnimationComponent",
|
||||
"//submodules/TelegramUI/Components/LottieComponent",
|
||||
"//submodules/TelegramUI/Components/LegacyInstantVideoController",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
],
|
||||
)
|
@ -13,7 +13,7 @@ import ChatPresentationInterfaceState
|
||||
import ComponentFlow
|
||||
import LottieAnimationComponent
|
||||
import LottieComponent
|
||||
import AccountContext
|
||||
import LegacyInstantVideoController
|
||||
|
||||
private let offsetThreshold: CGFloat = 10.0
|
||||
private let dismissOffsetThreshold: CGFloat = 70.0
|
||||
@ -175,22 +175,23 @@ private final class ChatTextInputMediaRecordingButtonPresenter : NSObject, TGMod
|
||||
}
|
||||
}
|
||||
|
||||
final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButton, TGModernConversationInputMicButtonDelegate {
|
||||
public final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButton, TGModernConversationInputMicButtonDelegate {
|
||||
private let context: AccountContext
|
||||
private var theme: PresentationTheme
|
||||
private let useDarkTheme: Bool
|
||||
private let strings: PresentationStrings
|
||||
|
||||
var mode: ChatTextInputMediaRecordingButtonMode = .audio
|
||||
var statusBarHost: StatusBarHost?
|
||||
let presentController: (ViewController) -> Void
|
||||
var recordingDisabled: () -> Void = { }
|
||||
var beginRecording: () -> Void = { }
|
||||
var endRecording: (Bool) -> Void = { _ in }
|
||||
var stopRecording: () -> Void = { }
|
||||
var offsetRecordingControls: () -> Void = { }
|
||||
var switchMode: () -> Void = { }
|
||||
var updateLocked: (Bool) -> Void = { _ in }
|
||||
var updateCancelTranslation: () -> Void = { }
|
||||
public var mode: ChatTextInputMediaRecordingButtonMode = .audio
|
||||
public var statusBarHost: StatusBarHost?
|
||||
public let presentController: (ViewController) -> Void
|
||||
public var recordingDisabled: () -> Void = { }
|
||||
public var beginRecording: () -> Void = { }
|
||||
public var endRecording: (Bool) -> Void = { _ in }
|
||||
public var stopRecording: () -> Void = { }
|
||||
public var offsetRecordingControls: () -> Void = { }
|
||||
public var switchMode: () -> Void = { }
|
||||
public var updateLocked: (Bool) -> Void = { _ in }
|
||||
public var updateCancelTranslation: () -> Void = { }
|
||||
|
||||
private var modeTimeoutTimer: SwiftSignalKit.Timer?
|
||||
|
||||
@ -199,13 +200,13 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
private var recordingOverlay: ChatTextInputAudioRecordingOverlay?
|
||||
private var startTouchLocation: CGPoint?
|
||||
fileprivate var controlsOffset: CGFloat = 0.0
|
||||
private(set) var cancelTranslation: CGFloat = 0.0
|
||||
public private(set) var cancelTranslation: CGFloat = 0.0
|
||||
|
||||
private var micLevelDisposable: MetaDisposable?
|
||||
|
||||
private weak var currentPresenter: UIView?
|
||||
|
||||
var contentContainer: (UIView, CGRect)? {
|
||||
public var contentContainer: (UIView, CGRect)? {
|
||||
if let _ = self.currentPresenter {
|
||||
return (self.micDecoration, self.micDecoration.bounds)
|
||||
} else {
|
||||
@ -213,7 +214,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
}
|
||||
}
|
||||
|
||||
var audioRecorder: ManagedAudioRecorder? {
|
||||
public var audioRecorder: ManagedAudioRecorder? {
|
||||
didSet {
|
||||
if self.audioRecorder !== oldValue {
|
||||
if self.micLevelDisposable == nil {
|
||||
@ -235,7 +236,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
}
|
||||
}
|
||||
|
||||
var videoRecordingStatus: InstantVideoControllerRecordingStatus? {
|
||||
public var videoRecordingStatus: InstantVideoControllerRecordingStatus? {
|
||||
didSet {
|
||||
if self.videoRecordingStatus !== oldValue {
|
||||
if self.micLevelDisposable == nil {
|
||||
@ -293,16 +294,17 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
if let current = self.micLockValue {
|
||||
return current
|
||||
} else {
|
||||
let lockView = LockView(frame: CGRect(origin: CGPoint(), size: CGSize(width: 40.0, height: 60.0)), theme: self.theme, strings: self.strings)
|
||||
let lockView = LockView(frame: CGRect(origin: CGPoint(), size: CGSize(width: 40.0, height: 60.0)), theme: self.theme, useDarkTheme: self.useDarkTheme, strings: self.strings)
|
||||
lockView.addTarget(self, action: #selector(handleStopTap), for: .touchUpInside)
|
||||
self.micLockValue = lockView
|
||||
return lockView
|
||||
}
|
||||
}
|
||||
|
||||
init(context: AccountContext, theme: PresentationTheme, strings: PresentationStrings, presentController: @escaping (ViewController) -> Void) {
|
||||
public init(context: AccountContext, theme: PresentationTheme, useDarkTheme: Bool = false, strings: PresentationStrings, presentController: @escaping (ViewController) -> Void) {
|
||||
self.context = context
|
||||
self.theme = theme
|
||||
self.useDarkTheme = useDarkTheme
|
||||
self.strings = strings
|
||||
self.animationView = ComponentView<Empty>()
|
||||
self.presentController = presentController
|
||||
@ -323,7 +325,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
self.centerOffset = CGPoint(x: 0.0, y: -1.0 + UIScreenPixel)
|
||||
}
|
||||
|
||||
required init?(coder aDecoder: NSCoder) {
|
||||
required public init?(coder aDecoder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
@ -336,7 +338,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
}
|
||||
}
|
||||
|
||||
func updateMode(mode: ChatTextInputMediaRecordingButtonMode, animated: Bool) {
|
||||
public func updateMode(mode: ChatTextInputMediaRecordingButtonMode, animated: Bool) {
|
||||
self.updateMode(mode: mode, animated: animated, force: false)
|
||||
}
|
||||
|
||||
@ -382,7 +384,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
transition: .immediate,
|
||||
component: AnyComponent(LottieComponent(
|
||||
content: LottieComponent.AppBundleContent(name: animationName),
|
||||
color: self.theme.chat.inputPanel.panelControlColor.blitOver(self.theme.chat.inputPanel.inputBackgroundColor, alpha: 1.0)
|
||||
color: self.useDarkTheme ? .white : self.theme.chat.inputPanel.panelControlColor.blitOver(self.theme.chat.inputPanel.inputBackgroundColor, alpha: 1.0)
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: animationFrame.size
|
||||
@ -401,22 +403,22 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
}
|
||||
}
|
||||
|
||||
func updateTheme(theme: PresentationTheme) {
|
||||
public func updateTheme(theme: PresentationTheme) {
|
||||
self.theme = theme
|
||||
|
||||
self.updateAnimation(previousMode: self.mode)
|
||||
|
||||
self.pallete = legacyInputMicPalette(from: theme)
|
||||
self.micDecorationValue?.setColor(self.theme.chat.inputPanel.actionControlFillColor)
|
||||
self.micDecorationValue?.setColor( self.theme.chat.inputPanel.actionControlFillColor)
|
||||
(self.micLockValue as? LockView)?.updateTheme(theme)
|
||||
}
|
||||
|
||||
func cancelRecording() {
|
||||
public func cancelRecording() {
|
||||
self.isEnabled = false
|
||||
self.isEnabled = true
|
||||
}
|
||||
|
||||
func micButtonInteractionBegan() {
|
||||
public func micButtonInteractionBegan() {
|
||||
if self.fadeDisabled {
|
||||
self.recordingDisabled()
|
||||
} else {
|
||||
@ -433,13 +435,13 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
}
|
||||
}
|
||||
|
||||
func micButtonInteractionCancelled(_ velocity: CGPoint) {
|
||||
public func micButtonInteractionCancelled(_ velocity: CGPoint) {
|
||||
//print("\(CFAbsoluteTimeGetCurrent()) cancelled")
|
||||
self.modeTimeoutTimer?.invalidate()
|
||||
self.endRecording(false)
|
||||
}
|
||||
|
||||
func micButtonInteractionCompleted(_ velocity: CGPoint) {
|
||||
public func micButtonInteractionCompleted(_ velocity: CGPoint) {
|
||||
//print("\(CFAbsoluteTimeGetCurrent()) completed")
|
||||
if let modeTimeoutTimer = self.modeTimeoutTimer {
|
||||
//print("\(CFAbsoluteTimeGetCurrent()) switch")
|
||||
@ -450,43 +452,43 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
self.endRecording(true)
|
||||
}
|
||||
|
||||
func micButtonInteractionUpdate(_ offset: CGPoint) {
|
||||
public func micButtonInteractionUpdate(_ offset: CGPoint) {
|
||||
self.controlsOffset = offset.x
|
||||
self.offsetRecordingControls()
|
||||
}
|
||||
|
||||
func micButtonInteractionUpdateCancelTranslation(_ translation: CGFloat) {
|
||||
public func micButtonInteractionUpdateCancelTranslation(_ translation: CGFloat) {
|
||||
self.cancelTranslation = translation
|
||||
self.updateCancelTranslation()
|
||||
}
|
||||
|
||||
func micButtonInteractionLocked() {
|
||||
public func micButtonInteractionLocked() {
|
||||
self.updateLocked(true)
|
||||
}
|
||||
|
||||
func micButtonInteractionRequestedLockedAction() {
|
||||
public func micButtonInteractionRequestedLockedAction() {
|
||||
}
|
||||
|
||||
func micButtonInteractionStopped() {
|
||||
public func micButtonInteractionStopped() {
|
||||
self.stopRecording()
|
||||
}
|
||||
|
||||
func micButtonShouldLock() -> Bool {
|
||||
public func micButtonShouldLock() -> Bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func micButtonPresenter() -> TGModernConversationInputMicButtonPresentation! {
|
||||
public func micButtonPresenter() -> TGModernConversationInputMicButtonPresentation! {
|
||||
let presenter = ChatTextInputMediaRecordingButtonPresenter(statusBarHost: self.statusBarHost, presentController: self.presentController)
|
||||
presenter.button = self
|
||||
self.currentPresenter = presenter.view()
|
||||
return presenter
|
||||
}
|
||||
|
||||
func micButtonDecoration() -> (UIView & TGModernConversationInputMicButtonDecoration)! {
|
||||
public func micButtonDecoration() -> (UIView & TGModernConversationInputMicButtonDecoration)! {
|
||||
return micDecoration
|
||||
}
|
||||
|
||||
func micButtonLock() -> (UIView & TGModernConversationInputMicButtonLock)! {
|
||||
public func micButtonLock() -> (UIView & TGModernConversationInputMicButtonLock)! {
|
||||
return micLock
|
||||
}
|
||||
|
||||
@ -494,7 +496,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
micButtonInteractionStopped()
|
||||
}
|
||||
|
||||
override func animateIn() {
|
||||
override public func animateIn() {
|
||||
super.animateIn()
|
||||
|
||||
if self.context.sharedContext.energyUsageSettings.fullTranslucency {
|
||||
@ -509,7 +511,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
}
|
||||
}
|
||||
|
||||
override func animateOut(_ toSmallSize: Bool) {
|
||||
override public func animateOut(_ toSmallSize: Bool) {
|
||||
super.animateOut(toSmallSize)
|
||||
|
||||
micDecoration.stopAnimating()
|
||||
@ -527,7 +529,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
|
||||
}
|
||||
|
||||
private var previousSize = CGSize()
|
||||
func layoutItems() {
|
||||
public func layoutItems() {
|
||||
let size = self.bounds.size
|
||||
if size != self.previousSize {
|
||||
self.previousSize = size
|
@ -5,7 +5,7 @@ import Lottie
|
||||
import TelegramPresentationData
|
||||
|
||||
final class LockView: UIButton, TGModernConversationInputMicButtonLock {
|
||||
//private var colorCallbacks = [LOTValueDelegate]()
|
||||
private let useDarkTheme: Bool
|
||||
|
||||
private let idleView: AnimationView = {
|
||||
guard let url = getAppBundle().url(forResource: "LockWait", withExtension: "json"), let animation = Animation.filepath(url.path)
|
||||
@ -28,7 +28,9 @@ final class LockView: UIButton, TGModernConversationInputMicButtonLock {
|
||||
return view
|
||||
}()
|
||||
|
||||
init(frame: CGRect, theme: PresentationTheme, strings: PresentationStrings) {
|
||||
init(frame: CGRect, theme: PresentationTheme, useDarkTheme: Bool = false, strings: PresentationStrings) {
|
||||
self.useDarkTheme = useDarkTheme
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
accessibilityLabel = strings.VoiceOver_Recording_StopAndPreview
|
||||
@ -60,8 +62,6 @@ final class LockView: UIButton, TGModernConversationInputMicButtonLock {
|
||||
}
|
||||
|
||||
func updateTheme(_ theme: PresentationTheme) {
|
||||
//colorCallbacks.removeAll()
|
||||
|
||||
[
|
||||
"Rectangle.Заливка 1": theme.chat.inputPanel.panelBackgroundColor,
|
||||
"Rectangle.Rectangle.Обводка 1": theme.chat.inputPanel.panelControlAccentColor,
|
||||
@ -69,9 +69,6 @@ final class LockView: UIButton, TGModernConversationInputMicButtonLock {
|
||||
"Path 4.Path 4.Обводка 1": theme.chat.inputPanel.panelControlAccentColor
|
||||
].forEach { key, value in
|
||||
idleView.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: "\(key).Color"))
|
||||
/*let colorCallback = LOTColorValueCallback(color: value.cgColor)
|
||||
self.colorCallbacks.append(colorCallback)
|
||||
idleView.setValueDelegate(colorCallback, for: LOTKeypath(string: "\(key).Color"))*/
|
||||
}
|
||||
|
||||
[
|
||||
@ -82,9 +79,6 @@ final class LockView: UIButton, TGModernConversationInputMicButtonLock {
|
||||
"Path 4.Path 4.Обводка 1": theme.chat.inputPanel.panelControlAccentColor
|
||||
].forEach { key, value in
|
||||
lockingView.setValueProvider(ColorValueProvider(value.lottieColorValue), keypath: AnimationKeypath(keypath: "\(key).Color"))
|
||||
/*let colorCallback = LOTColorValueCallback(color: value.cgColor)
|
||||
self.colorCallbacks.append(colorCallback)
|
||||
lockingView.setValueDelegate(colorCallback, for: LOTKeypath(string: "\(key).Color"))*/
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,33 @@
|
||||
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
|
||||
|
||||
swift_library(
|
||||
name = "LegacyInstantVideoController",
|
||||
module_name = "LegacyInstantVideoController",
|
||||
srcs = glob([
|
||||
"Sources/**/*.swift",
|
||||
]),
|
||||
copts = [
|
||||
"-warnings-as-errors",
|
||||
],
|
||||
deps = [
|
||||
"//submodules/AsyncDisplayKit",
|
||||
"//submodules/Display",
|
||||
"//submodules/TelegramCore",
|
||||
"//submodules/Postbox",
|
||||
"//submodules/SSignalKit/SwiftSignalKit",
|
||||
"//submodules/TelegramPresentationData",
|
||||
"//submodules/MediaResources",
|
||||
"//submodules/LegacyComponents",
|
||||
"//submodules/AccountContext",
|
||||
"//submodules/LegacyUI",
|
||||
"//submodules/ImageCompression",
|
||||
"//submodules/LocalMediaResources",
|
||||
"//submodules/AppBundle",
|
||||
"//submodules/LegacyMediaPickerUI",
|
||||
"//submodules/ChatPresentationInterfaceState",
|
||||
"//submodules/TelegramUI/Components/ChatSendButtonRadialStatusNode",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
],
|
||||
)
|
@ -15,21 +15,22 @@ import LocalMediaResources
|
||||
import AppBundle
|
||||
import LegacyMediaPickerUI
|
||||
import ChatPresentationInterfaceState
|
||||
import ChatSendButtonRadialStatusNode
|
||||
|
||||
final class InstantVideoController: LegacyController, StandalonePresentableController {
|
||||
public final class InstantVideoController: LegacyController, StandalonePresentableController {
|
||||
private var captureController: TGVideoMessageCaptureController?
|
||||
|
||||
var onDismiss: ((Bool) -> Void)?
|
||||
var onStop: (() -> Void)?
|
||||
public var onDismiss: ((Bool) -> Void)?
|
||||
public var onStop: (() -> Void)?
|
||||
|
||||
private let micLevelValue = ValuePromise<Float>(0.0)
|
||||
private let durationValue = ValuePromise<TimeInterval>(0.0)
|
||||
let audioStatus: InstantVideoControllerRecordingStatus
|
||||
public let audioStatus: InstantVideoControllerRecordingStatus
|
||||
|
||||
private var completed = false
|
||||
private var dismissed = false
|
||||
|
||||
override init(presentation: LegacyControllerPresentation, theme: PresentationTheme?, strings: PresentationStrings? = nil, initialLayout: ContainerViewLayout? = nil) {
|
||||
override public init(presentation: LegacyControllerPresentation, theme: PresentationTheme?, strings: PresentationStrings? = nil, initialLayout: ContainerViewLayout? = nil) {
|
||||
self.audioStatus = InstantVideoControllerRecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get())
|
||||
|
||||
super.init(presentation: presentation, theme: theme, initialLayout: initialLayout)
|
||||
@ -41,7 +42,7 @@ final class InstantVideoController: LegacyController, StandalonePresentableContr
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
func bindCaptureController(_ captureController: TGVideoMessageCaptureController?) {
|
||||
public func bindCaptureController(_ captureController: TGVideoMessageCaptureController?) {
|
||||
self.captureController = captureController
|
||||
if let captureController = captureController {
|
||||
captureController.view.disablesInteractiveKeyboardGestureRecognizer = true
|
||||
@ -66,61 +67,61 @@ final class InstantVideoController: LegacyController, StandalonePresentableContr
|
||||
}
|
||||
}
|
||||
|
||||
func dismissVideo() {
|
||||
public func dismissVideo() {
|
||||
if let captureController = self.captureController, !self.dismissed {
|
||||
self.dismissed = true
|
||||
captureController.dismiss(true)
|
||||
}
|
||||
}
|
||||
|
||||
func extractVideoSnapshot() -> UIView? {
|
||||
public func extractVideoSnapshot() -> UIView? {
|
||||
self.captureController?.extractVideoContent()
|
||||
}
|
||||
|
||||
func hideVideoSnapshot() {
|
||||
public func hideVideoSnapshot() {
|
||||
self.captureController?.hideVideoContent()
|
||||
}
|
||||
|
||||
func completeVideo() {
|
||||
public func completeVideo() {
|
||||
if let captureController = self.captureController, !self.completed {
|
||||
self.completed = true
|
||||
captureController.complete()
|
||||
}
|
||||
}
|
||||
|
||||
func dismissAnimated() {
|
||||
public func dismissAnimated() {
|
||||
if let captureController = self.captureController, !self.dismissed {
|
||||
self.dismissed = true
|
||||
captureController.dismiss(false)
|
||||
}
|
||||
}
|
||||
|
||||
func stopVideo() -> Bool {
|
||||
public func stopVideo() -> Bool {
|
||||
if let captureController = self.captureController {
|
||||
return captureController.stop()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func lockVideo() {
|
||||
public func lockVideo() {
|
||||
if let captureController = self.captureController {
|
||||
return captureController.setLocked()
|
||||
}
|
||||
}
|
||||
|
||||
func updateRecordButtonInteraction(_ value: CGFloat) {
|
||||
public func updateRecordButtonInteraction(_ value: CGFloat) {
|
||||
if let captureController = self.captureController {
|
||||
captureController.buttonInteractionUpdate(CGPoint(x: value, y: 0.0))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func legacyInputMicPalette(from theme: PresentationTheme) -> TGModernConversationInputMicPallete {
|
||||
public func legacyInputMicPalette(from theme: PresentationTheme) -> TGModernConversationInputMicPallete {
|
||||
let inputPanelTheme = theme.chat.inputPanel
|
||||
return TGModernConversationInputMicPallete(dark: theme.overallDarkAppearance, buttonColor: inputPanelTheme.actionControlFillColor, iconColor: inputPanelTheme.actionControlForegroundColor, backgroundColor: theme.rootController.navigationBar.opaqueBackgroundColor, borderColor: inputPanelTheme.panelSeparatorColor, lock: inputPanelTheme.panelControlAccentColor, textColor: inputPanelTheme.primaryTextColor, secondaryTextColor: inputPanelTheme.secondaryTextColor, recording: inputPanelTheme.mediaRecordingDotColor)
|
||||
}
|
||||
|
||||
func legacyInstantVideoController(theme: PresentationTheme, panelFrame: CGRect, context: AccountContext, peerId: PeerId, slowmodeState: ChatSlowmodeState?, hasSchedule: Bool, send: @escaping (InstantVideoController, EnqueueMessage?) -> Void, displaySlowmodeTooltip: @escaping (UIView, CGRect) -> Void, presentSchedulePicker: @escaping (@escaping (Int32) -> Void) -> Void) -> InstantVideoController {
|
||||
public func legacyInstantVideoController(theme: PresentationTheme, panelFrame: CGRect, context: AccountContext, peerId: PeerId, slowmodeState: ChatSlowmodeState?, hasSchedule: Bool, send: @escaping (InstantVideoController, EnqueueMessage?) -> Void, displaySlowmodeTooltip: @escaping (UIView, CGRect) -> Void, presentSchedulePicker: @escaping (@escaping (Int32) -> Void) -> Void) -> InstantVideoController {
|
||||
let isSecretChat = peerId.namespace == Namespaces.Peer.SecretChat
|
||||
|
||||
let legacyController = InstantVideoController(presentation: .custom, theme: theme)
|
@ -55,16 +55,24 @@ public final class LottieComponent: Component {
|
||||
return EmptyDisposable
|
||||
}
|
||||
}
|
||||
|
||||
public enum StartingPosition {
|
||||
case begin
|
||||
case end
|
||||
}
|
||||
|
||||
public let content: Content
|
||||
public let color: UIColor
|
||||
public let startingPosition: StartingPosition
|
||||
|
||||
public init(
|
||||
content: Content,
|
||||
color: UIColor
|
||||
color: UIColor,
|
||||
startingPosition: StartingPosition = .end
|
||||
) {
|
||||
self.content = content
|
||||
self.color = color
|
||||
self.startingPosition = startingPosition
|
||||
}
|
||||
|
||||
public static func ==(lhs: LottieComponent, rhs: LottieComponent) -> Bool {
|
||||
@ -74,6 +82,9 @@ public final class LottieComponent: Component {
|
||||
if lhs.color != rhs.color {
|
||||
return false
|
||||
}
|
||||
if lhs.startingPosition != rhs.startingPosition {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@ -82,6 +93,7 @@ public final class LottieComponent: Component {
|
||||
private var component: LottieComponent?
|
||||
|
||||
private var scheduledPlayOnce: Bool = false
|
||||
private var playOnceCompletion: (() -> Void)?
|
||||
private var animationInstance: LottieInstance?
|
||||
private var currentDisplaySize: CGSize?
|
||||
private var currentContentDisposable: Disposable?
|
||||
@ -147,7 +159,9 @@ public final class LottieComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
public func playOnce(delay: Double = 0.0) {
|
||||
public func playOnce(delay: Double = 0.0, completion: (() -> Void)? = nil) {
|
||||
self.playOnceCompletion = completion
|
||||
|
||||
guard let _ = self.animationInstance else {
|
||||
self.scheduledPlayOnce = true
|
||||
return
|
||||
@ -194,13 +208,18 @@ public final class LottieComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
private func loadAnimation(data: Data, cacheKey: String?) {
|
||||
private func loadAnimation(data: Data, cacheKey: String?, startingPosition: StartingPosition) {
|
||||
self.animationInstance = LottieInstance(data: data, fitzModifier: .none, colorReplacements: nil, cacheKey: cacheKey ?? "")
|
||||
if self.scheduledPlayOnce {
|
||||
self.scheduledPlayOnce = false
|
||||
self.playOnce()
|
||||
} else if let animationInstance = self.animationInstance {
|
||||
self.currentFrame = Int(animationInstance.frameCount - 1)
|
||||
switch startingPosition {
|
||||
case .begin:
|
||||
self.currentFrame = 0
|
||||
case .end:
|
||||
self.currentFrame = Int(animationInstance.frameCount - 1)
|
||||
}
|
||||
self.updateImage()
|
||||
}
|
||||
}
|
||||
@ -222,12 +241,21 @@ public final class LottieComponent: Component {
|
||||
|
||||
let timestamp = CACurrentMediaTime()
|
||||
if currentFrameStartTime + timestamp >= secondsPerFrame * 0.9 {
|
||||
self.currentFrame += 1
|
||||
var advanceFrameCount = 1
|
||||
if animationInstance.frameRate == 360 {
|
||||
advanceFrameCount = 6
|
||||
}
|
||||
self.currentFrame += advanceFrameCount
|
||||
if self.currentFrame >= Int(animationInstance.frameCount) - 1 {
|
||||
self.currentFrame = Int(animationInstance.frameCount) - 1
|
||||
self.updateImage()
|
||||
self.displayLink?.invalidate()
|
||||
self.displayLink = nil
|
||||
|
||||
if let playOnceCompletion = self.playOnceCompletion {
|
||||
self.playOnceCompletion = nil
|
||||
playOnceCompletion()
|
||||
}
|
||||
} else {
|
||||
self.currentFrameStartTime = timestamp
|
||||
self.updateImage()
|
||||
@ -271,10 +299,10 @@ public final class LottieComponent: Component {
|
||||
let content = component.content
|
||||
self.currentContentDisposable = component.content.load { [weak self, weak content] data, cacheKey in
|
||||
Queue.mainQueue().async {
|
||||
guard let self, self.component?.content == content else {
|
||||
guard let self, let component = self.component, component.content == content else {
|
||||
return
|
||||
}
|
||||
self.loadAnimation(data: data, cacheKey: cacheKey)
|
||||
self.loadAnimation(data: data, cacheKey: cacheKey, startingPosition: component.startingPosition)
|
||||
}
|
||||
}
|
||||
} else if redrawImage {
|
||||
|
@ -15,6 +15,12 @@ swift_library(
|
||||
"//submodules/AppBundle",
|
||||
"//submodules/TelegramUI/Components/TextFieldComponent",
|
||||
"//submodules/Components/BundleIconComponent",
|
||||
"//submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton",
|
||||
"//submodules/TelegramUI/Components/LottieComponent",
|
||||
"//submodules/AccountContext",
|
||||
"//submodules/TelegramPresentationData",
|
||||
"//submodules/SSignalKit/SwiftSignalKit",
|
||||
"//submodules/Components/HierarchyTrackingLayer",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -0,0 +1,302 @@
|
||||
import Foundation
|
||||
import UIKit
|
||||
import Display
|
||||
import ComponentFlow
|
||||
import AppBundle
|
||||
import TextFieldComponent
|
||||
import BundleIconComponent
|
||||
import AccountContext
|
||||
import TelegramPresentationData
|
||||
import ChatPresentationInterfaceState
|
||||
import SwiftSignalKit
|
||||
import LottieComponent
|
||||
import HierarchyTrackingLayer
|
||||
|
||||
public final class MediaRecordingPanelComponent: Component {
|
||||
public let audioRecorder: ManagedAudioRecorder?
|
||||
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
|
||||
public let cancelFraction: CGFloat
|
||||
|
||||
public init(
|
||||
audioRecorder: ManagedAudioRecorder?,
|
||||
videoRecordingStatus: InstantVideoControllerRecordingStatus?,
|
||||
cancelFraction: CGFloat
|
||||
) {
|
||||
self.audioRecorder = audioRecorder
|
||||
self.videoRecordingStatus = videoRecordingStatus
|
||||
self.cancelFraction = cancelFraction
|
||||
}
|
||||
|
||||
public static func ==(lhs: MediaRecordingPanelComponent, rhs: MediaRecordingPanelComponent) -> Bool {
|
||||
if lhs.audioRecorder !== rhs.audioRecorder {
|
||||
return false
|
||||
}
|
||||
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
|
||||
return false
|
||||
}
|
||||
if lhs.cancelFraction != rhs.cancelFraction {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
public final class View: UIView {
|
||||
private var component: MediaRecordingPanelComponent?
|
||||
private weak var state: EmptyComponentState?
|
||||
|
||||
private let trackingLayer: HierarchyTrackingLayer
|
||||
|
||||
private let indicator = ComponentView<Empty>()
|
||||
|
||||
private let cancelContainerView: UIView
|
||||
private let cancelIconView: UIImageView
|
||||
private let cancelText = ComponentView<Empty>()
|
||||
|
||||
private let timerFont: UIFont
|
||||
private let timerText = ComponentView<Empty>()
|
||||
|
||||
private var timerTextDisposable: Disposable?
|
||||
|
||||
private var timerTextValue: String = "0:00,00"
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.trackingLayer = HierarchyTrackingLayer()
|
||||
self.cancelIconView = UIImageView()
|
||||
|
||||
self.timerFont = Font.with(size: 15.0, design: .camera, traits: .monospacedNumbers)
|
||||
|
||||
self.cancelContainerView = UIView()
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.layer.addSublayer(self.trackingLayer)
|
||||
|
||||
self.cancelContainerView.addSubview(self.cancelIconView)
|
||||
self.addSubview(self.cancelContainerView)
|
||||
|
||||
self.trackingLayer.didEnterHierarchy = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.updateAnimations()
|
||||
}
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
deinit {
|
||||
self.timerTextDisposable?.dispose()
|
||||
}
|
||||
|
||||
private func updateAnimations() {
|
||||
if let indicatorView = self.indicator.view {
|
||||
if indicatorView.layer.animation(forKey: "recording") == nil {
|
||||
let animation = CAKeyframeAnimation(keyPath: "opacity")
|
||||
animation.values = [1.0 as NSNumber, 1.0 as NSNumber, 0.0 as NSNumber]
|
||||
animation.keyTimes = [0.0 as NSNumber, 0.4546 as NSNumber, 0.9091 as NSNumber, 1 as NSNumber]
|
||||
animation.duration = 0.5
|
||||
animation.autoreverses = true
|
||||
animation.repeatCount = Float.infinity
|
||||
|
||||
indicatorView.layer.add(animation, forKey: "recording")
|
||||
}
|
||||
}
|
||||
if self.cancelContainerView.layer.animation(forKey: "recording") == nil {
|
||||
let animation = CAKeyframeAnimation(keyPath: "position.x")
|
||||
animation.values = [-5.0 as NSNumber, 5.0 as NSNumber, 0.0 as NSNumber]
|
||||
animation.keyTimes = [0.0 as NSNumber, 0.4546 as NSNumber, 0.9091 as NSNumber, 1 as NSNumber]
|
||||
animation.duration = 1.5
|
||||
animation.autoreverses = true
|
||||
animation.isAdditive = true
|
||||
animation.repeatCount = Float.infinity
|
||||
|
||||
self.cancelContainerView.layer.add(animation, forKey: "recording")
|
||||
}
|
||||
}
|
||||
|
||||
public func animateIn() {
|
||||
if let indicatorView = self.indicator.view {
|
||||
indicatorView.layer.animatePosition(from: CGPoint(x: -20.0, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
}
|
||||
if let timerTextView = self.timerText.view {
|
||||
timerTextView.layer.animatePosition(from: CGPoint(x: -20.0, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
}
|
||||
self.cancelContainerView.layer.animatePosition(from: CGPoint(x: self.bounds.width, y: 0.0), to: CGPoint(), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
|
||||
}
|
||||
|
||||
public func animateOut(dismissRecording: Bool, completion: @escaping () -> Void) {
|
||||
if let indicatorView = self.indicator.view as? LottieComponent.View {
|
||||
if let _ = indicatorView.layer.animation(forKey: "recording") {
|
||||
let fromAlpha = indicatorView.layer.presentation()?.opacity ?? indicatorView.layer.opacity
|
||||
indicatorView.layer.removeAnimation(forKey: "recording")
|
||||
indicatorView.layer.animateAlpha(from: CGFloat(fromAlpha), to: 1.0, duration: 0.2)
|
||||
|
||||
indicatorView.playOnce(completion: { [weak indicatorView] in
|
||||
if let indicatorView {
|
||||
let transition = Transition(animation: .curve(duration: 0.3, curve: .spring))
|
||||
transition.setScale(view: indicatorView, scale: 0.001)
|
||||
}
|
||||
|
||||
completion()
|
||||
})
|
||||
}
|
||||
} else {
|
||||
completion()
|
||||
}
|
||||
|
||||
|
||||
let transition = Transition(animation: .curve(duration: 0.3, curve: .spring))
|
||||
if let timerTextView = self.timerText.view {
|
||||
transition.setAlpha(view: timerTextView, alpha: 0.0)
|
||||
transition.setScale(view: timerTextView, scale: 0.001)
|
||||
}
|
||||
|
||||
transition.setAlpha(view: self.cancelContainerView, alpha: 0.0)
|
||||
}
|
||||
|
||||
func update(component: MediaRecordingPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
|
||||
let previousComponent = self.component
|
||||
self.component = component
|
||||
self.state = state
|
||||
|
||||
if previousComponent?.audioRecorder !== component.audioRecorder || previousComponent?.videoRecordingStatus !== component.videoRecordingStatus {
|
||||
self.timerTextDisposable?.dispose()
|
||||
|
||||
if let audioRecorder = component.audioRecorder {
|
||||
var updateNow = false
|
||||
self.timerTextDisposable = audioRecorder.recordingState.start(next: { [weak self] state in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
switch state {
|
||||
case .paused(let duration), .recording(let duration, _):
|
||||
let currentAudioDurationSeconds = Int(duration)
|
||||
let currentAudioDurationMilliseconds = Int(duration * 100.0) % 100
|
||||
let text: String
|
||||
if currentAudioDurationSeconds >= 60 * 60 {
|
||||
text = String(format: "%d:%02d:%02d,%02d", currentAudioDurationSeconds / 3600, currentAudioDurationSeconds / 60 % 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
|
||||
} else {
|
||||
text = String(format: "%d:%02d,%02d", currentAudioDurationSeconds / 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
|
||||
}
|
||||
if self.timerTextValue != text {
|
||||
self.timerTextValue = text
|
||||
}
|
||||
if updateNow {
|
||||
self.state?.updated(transition: .immediate)
|
||||
}
|
||||
case .stopped:
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
updateNow = true
|
||||
} else if let videoRecordingStatus = component.videoRecordingStatus {
|
||||
var updateNow = false
|
||||
self.timerTextDisposable = videoRecordingStatus.duration.start(next: { [weak self] duration in
|
||||
Queue.mainQueue().async {
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
let currentAudioDurationSeconds = Int(duration)
|
||||
let currentAudioDurationMilliseconds = Int(duration * 100.0) % 100
|
||||
let text: String
|
||||
if currentAudioDurationSeconds >= 60 * 60 {
|
||||
text = String(format: "%d:%02d:%02d,%02d", currentAudioDurationSeconds / 3600, currentAudioDurationSeconds / 60 % 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
|
||||
} else {
|
||||
text = String(format: "%d:%02d,%02d", currentAudioDurationSeconds / 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
|
||||
}
|
||||
if self.timerTextValue != text {
|
||||
self.timerTextValue = text
|
||||
}
|
||||
if updateNow {
|
||||
self.state?.updated(transition: .immediate)
|
||||
}
|
||||
}
|
||||
})
|
||||
updateNow = true
|
||||
}
|
||||
}
|
||||
|
||||
let indicatorSize = self.indicator.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(LottieComponent(
|
||||
content: LottieComponent.AppBundleContent(name: "BinRed"),
|
||||
color: UIColor(rgb: 0xFF3B30),
|
||||
startingPosition: .begin
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 40.0, height: 40.0)
|
||||
)
|
||||
if let indicatorView = self.indicator.view {
|
||||
if indicatorView.superview == nil {
|
||||
self.addSubview(indicatorView)
|
||||
}
|
||||
transition.setFrame(view: indicatorView, frame: CGRect(origin: CGPoint(x: 3.0, y: floor((availableSize.height - indicatorSize.height) * 0.5)), size: indicatorSize))
|
||||
}
|
||||
|
||||
let timerTextSize = self.timerText.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(Text(text: self.timerTextValue, font: self.timerFont, color: .white)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 100.0, height: 100.0)
|
||||
)
|
||||
if let timerTextView = self.timerText.view {
|
||||
if timerTextView.superview == nil {
|
||||
self.addSubview(timerTextView)
|
||||
timerTextView.layer.anchorPoint = CGPoint(x: 0.0, y: 0.5)
|
||||
}
|
||||
let timerTextFrame = CGRect(origin: CGPoint(x: 38.0, y: floor((availableSize.height - timerTextSize.height) * 0.5)), size: timerTextSize)
|
||||
transition.setPosition(view: timerTextView, position: CGPoint(x: timerTextFrame.minX, y: timerTextFrame.midY))
|
||||
timerTextView.bounds = CGRect(origin: CGPoint(), size: timerTextFrame.size)
|
||||
}
|
||||
|
||||
if self.cancelIconView.image == nil {
|
||||
self.cancelIconView.image = UIImage(bundleImageName: "Chat/Input/Text/AudioRecordingCancelArrow")?.withRenderingMode(.alwaysTemplate)
|
||||
}
|
||||
|
||||
self.cancelIconView.tintColor = UIColor(white: 1.0, alpha: 0.4)
|
||||
|
||||
let cancelTextSize = self.cancelText.update(
|
||||
transition: .immediate,
|
||||
component: AnyComponent(Text(text: "Slide to cancel", font: Font.regular(15.0), color: UIColor(white: 1.0, alpha: 0.4))),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
|
||||
)
|
||||
|
||||
var textFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - cancelTextSize.width) * 0.5), y: floor((availableSize.height - cancelTextSize.height) * 0.5)), size: cancelTextSize)
|
||||
|
||||
let bandingStart: CGFloat = 0.0
|
||||
let bandedOffset = abs(component.cancelFraction) - bandingStart
|
||||
let range: CGFloat = 300.0
|
||||
let coefficient: CGFloat = 0.4
|
||||
let mappedCancelFraction = bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range
|
||||
|
||||
textFrame.origin.x -= mappedCancelFraction * 0.5
|
||||
|
||||
if let cancelTextView = self.cancelText.view {
|
||||
if cancelTextView.superview == nil {
|
||||
self.cancelContainerView.addSubview(cancelTextView)
|
||||
}
|
||||
transition.setFrame(view: cancelTextView, frame: textFrame)
|
||||
}
|
||||
if let image = self.cancelIconView.image {
|
||||
transition.setFrame(view: self.cancelIconView, frame: CGRect(origin: CGPoint(x: textFrame.minX - 4.0 - image.size.width, y: textFrame.minY + floor((textFrame.height - image.size.height) * 0.5)), size: image.size))
|
||||
}
|
||||
|
||||
self.updateAnimations()
|
||||
|
||||
return availableSize
|
||||
}
|
||||
}
|
||||
|
||||
public func makeView() -> View {
|
||||
return View(frame: CGRect())
|
||||
}
|
||||
|
||||
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
|
||||
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
|
||||
}
|
||||
}
|
@ -3,6 +3,10 @@ import UIKit
|
||||
import Display
|
||||
import ComponentFlow
|
||||
import AppBundle
|
||||
import ChatTextInputMediaRecordingButton
|
||||
import AccountContext
|
||||
import TelegramPresentationData
|
||||
import ChatPresentationInterfaceState
|
||||
|
||||
public final class MessageInputActionButtonComponent: Component {
|
||||
public enum Mode {
|
||||
@ -10,45 +14,83 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
case voiceInput
|
||||
case videoInput
|
||||
}
|
||||
|
||||
public enum Action {
|
||||
case down
|
||||
case up
|
||||
}
|
||||
|
||||
public let mode: Mode
|
||||
public let action: () -> Void
|
||||
public let action: (Mode, Action, Bool) -> Void
|
||||
public let switchMediaInputMode: () -> Void
|
||||
public let updateMediaCancelFraction: (CGFloat) -> Void
|
||||
public let context: AccountContext
|
||||
public let theme: PresentationTheme
|
||||
public let strings: PresentationStrings
|
||||
public let presentController: (ViewController) -> Void
|
||||
public let audioRecorder: ManagedAudioRecorder?
|
||||
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
|
||||
|
||||
public init(
|
||||
mode: Mode,
|
||||
action: @escaping () -> Void
|
||||
action: @escaping (Mode, Action, Bool) -> Void,
|
||||
switchMediaInputMode: @escaping () -> Void,
|
||||
updateMediaCancelFraction: @escaping (CGFloat) -> Void,
|
||||
context: AccountContext,
|
||||
theme: PresentationTheme,
|
||||
strings: PresentationStrings,
|
||||
presentController: @escaping (ViewController) -> Void,
|
||||
audioRecorder: ManagedAudioRecorder?,
|
||||
videoRecordingStatus: InstantVideoControllerRecordingStatus?
|
||||
) {
|
||||
self.mode = mode
|
||||
self.action = action
|
||||
self.switchMediaInputMode = switchMediaInputMode
|
||||
self.updateMediaCancelFraction = updateMediaCancelFraction
|
||||
self.context = context
|
||||
self.theme = theme
|
||||
self.strings = strings
|
||||
self.presentController = presentController
|
||||
self.audioRecorder = audioRecorder
|
||||
self.videoRecordingStatus = videoRecordingStatus
|
||||
}
|
||||
|
||||
public static func ==(lhs: MessageInputActionButtonComponent, rhs: MessageInputActionButtonComponent) -> Bool {
|
||||
if lhs.mode != rhs.mode {
|
||||
return false
|
||||
}
|
||||
if lhs.context !== rhs.context {
|
||||
return false
|
||||
}
|
||||
if lhs.theme !== rhs.theme {
|
||||
return false
|
||||
}
|
||||
if lhs.strings !== rhs.strings {
|
||||
return false
|
||||
}
|
||||
if lhs.audioRecorder !== rhs.audioRecorder {
|
||||
return false
|
||||
}
|
||||
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
public final class View: HighlightTrackingButton {
|
||||
private let microphoneIconView: UIImageView
|
||||
private let cameraIconView: UIImageView
|
||||
private var micButton: ChatTextInputMediaRecordingButton?
|
||||
private let sendIconView: UIImageView
|
||||
|
||||
private var component: MessageInputActionButtonComponent?
|
||||
private weak var componentState: EmptyComponentState?
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.microphoneIconView = UIImageView()
|
||||
|
||||
self.cameraIconView = UIImageView()
|
||||
self.sendIconView = UIImageView()
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.isMultipleTouchEnabled = false
|
||||
|
||||
self.addSubview(self.microphoneIconView)
|
||||
self.addSubview(self.cameraIconView)
|
||||
self.addSubview(self.sendIconView)
|
||||
|
||||
self.highligthedChanged = { [weak self] highlighted in
|
||||
@ -62,6 +104,7 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
transition.setSublayerTransform(view: self, transform: CATransform3DMakeScale(scale, scale, 1.0))
|
||||
}
|
||||
|
||||
self.addTarget(self, action: #selector(self.touchDown), for: .touchDown)
|
||||
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
|
||||
}
|
||||
|
||||
@ -69,8 +112,18 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
fatalError("init(coder:) has not been implemented")
|
||||
}
|
||||
|
||||
@objc private func touchDown() {
|
||||
guard let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.action(component.mode, .down, false)
|
||||
}
|
||||
|
||||
@objc private func pressed() {
|
||||
self.component?.action()
|
||||
guard let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.action(component.mode, .up, false)
|
||||
}
|
||||
|
||||
override public func continueTracking(_ touch: UITouch, with event: UIEvent?) -> Bool {
|
||||
@ -78,16 +131,58 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
}
|
||||
|
||||
func update(component: MessageInputActionButtonComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
|
||||
let previousComponent = self.component
|
||||
self.component = component
|
||||
self.componentState = state
|
||||
|
||||
if self.microphoneIconView.image == nil {
|
||||
self.microphoneIconView.image = UIImage(bundleImageName: "Chat/Input/Text/IconMicrophone")?.withRenderingMode(.alwaysTemplate)
|
||||
self.microphoneIconView.tintColor = .white
|
||||
}
|
||||
if self.cameraIconView.image == nil {
|
||||
self.cameraIconView.image = UIImage(bundleImageName: "Chat/Input/Text/IconVideo")?.withRenderingMode(.alwaysTemplate)
|
||||
self.cameraIconView.tintColor = .white
|
||||
let themeUpdated = previousComponent?.theme !== component.theme
|
||||
|
||||
if self.micButton == nil {
|
||||
let micButton = ChatTextInputMediaRecordingButton(
|
||||
context: component.context,
|
||||
theme: component.theme,
|
||||
useDarkTheme: true,
|
||||
strings: component.strings,
|
||||
presentController: component.presentController
|
||||
)
|
||||
self.micButton = micButton
|
||||
micButton.statusBarHost = component.context.sharedContext.mainWindow?.statusBarHost
|
||||
self.addSubview(micButton)
|
||||
|
||||
micButton.beginRecording = { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
switch component.mode {
|
||||
case .voiceInput, .videoInput:
|
||||
component.action(component.mode, .down, false)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
micButton.endRecording = { [weak self] sendMedia in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
switch component.mode {
|
||||
case .voiceInput, .videoInput:
|
||||
component.action(component.mode, .up, sendMedia)
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
micButton.switchMode = { [weak self] in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.switchMediaInputMode()
|
||||
}
|
||||
micButton.updateCancelTranslation = { [weak self] in
|
||||
guard let self, let micButton = self.micButton, let component = self.component else {
|
||||
return
|
||||
}
|
||||
component.updateMediaCancelFraction(micButton.cancelTranslation)
|
||||
}
|
||||
}
|
||||
|
||||
if self.sendIconView.image == nil {
|
||||
@ -117,40 +212,55 @@ public final class MessageInputActionButtonComponent: Component {
|
||||
|
||||
var sendAlpha: CGFloat = 0.0
|
||||
var microphoneAlpha: CGFloat = 0.0
|
||||
var cameraAlpha: CGFloat = 0.0
|
||||
|
||||
switch component.mode {
|
||||
case .send:
|
||||
sendAlpha = 1.0
|
||||
case .videoInput:
|
||||
cameraAlpha = 1.0
|
||||
case .voiceInput:
|
||||
case .videoInput, .voiceInput:
|
||||
microphoneAlpha = 1.0
|
||||
}
|
||||
|
||||
transition.setAlpha(view: self.sendIconView, alpha: sendAlpha)
|
||||
transition.setScale(view: self.sendIconView, scale: sendAlpha == 0.0 ? 0.01 : 1.0)
|
||||
|
||||
transition.setAlpha(view: self.cameraIconView, alpha: cameraAlpha)
|
||||
transition.setScale(view: self.cameraIconView, scale: cameraAlpha == 0.0 ? 0.01 : 1.0)
|
||||
|
||||
transition.setAlpha(view: self.microphoneIconView, alpha: microphoneAlpha)
|
||||
transition.setScale(view: self.microphoneIconView, scale: microphoneAlpha == 0.0 ? 0.01 : 1.0)
|
||||
|
||||
if let image = self.sendIconView.image {
|
||||
let iconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - image.size.width) * 0.5), y: floorToScreenPixels((availableSize.height - image.size.height) * 0.5)), size: image.size)
|
||||
transition.setPosition(view: self.sendIconView, position: iconFrame.center)
|
||||
transition.setBounds(view: self.sendIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
|
||||
}
|
||||
if let image = self.cameraIconView.image {
|
||||
let iconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - image.size.width) * 0.5), y: floorToScreenPixels((availableSize.height - image.size.height) * 0.5)), size: image.size)
|
||||
transition.setPosition(view: self.cameraIconView, position: iconFrame.center)
|
||||
transition.setBounds(view: self.cameraIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
|
||||
}
|
||||
if let image = self.microphoneIconView.image {
|
||||
let iconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - image.size.width) * 0.5), y: floorToScreenPixels((availableSize.height - image.size.height) * 0.5)), size: image.size)
|
||||
transition.setPosition(view: self.microphoneIconView, position: iconFrame.center)
|
||||
transition.setBounds(view: self.microphoneIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
|
||||
|
||||
if let micButton = self.micButton {
|
||||
if themeUpdated {
|
||||
micButton.updateTheme(theme: component.theme)
|
||||
}
|
||||
|
||||
let micButtonFrame = CGRect(origin: CGPoint(), size: availableSize)
|
||||
let shouldLayoutMicButton = micButton.bounds.size != micButtonFrame.size
|
||||
transition.setPosition(layer: micButton.layer, position: micButtonFrame.center)
|
||||
transition.setBounds(layer: micButton.layer, bounds: CGRect(origin: CGPoint(), size: micButtonFrame.size))
|
||||
if shouldLayoutMicButton {
|
||||
micButton.layoutItems()
|
||||
}
|
||||
|
||||
if previousComponent?.mode != component.mode {
|
||||
switch component.mode {
|
||||
case .send, .voiceInput:
|
||||
micButton.updateMode(mode: .audio, animated: !transition.animation.isImmediate)
|
||||
case .videoInput:
|
||||
micButton.updateMode(mode: .video, animated: !transition.animation.isImmediate)
|
||||
}
|
||||
}
|
||||
|
||||
DispatchQueue.main.async { [weak self, weak micButton] in
|
||||
guard let self, let component = self.component, let micButton else {
|
||||
return
|
||||
}
|
||||
micButton.audioRecorder = component.audioRecorder
|
||||
micButton.videoRecordingStatus = component.videoRecordingStatus
|
||||
}
|
||||
|
||||
transition.setAlpha(view: micButton, alpha: microphoneAlpha)
|
||||
transition.setScale(view: micButton, scale: microphoneAlpha == 0.0 ? 0.01 : 1.0)
|
||||
}
|
||||
|
||||
return availableSize
|
||||
|
@ -5,9 +5,13 @@ import ComponentFlow
|
||||
import AppBundle
|
||||
import TextFieldComponent
|
||||
import BundleIconComponent
|
||||
import AccountContext
|
||||
import TelegramPresentationData
|
||||
import ChatPresentationInterfaceState
|
||||
|
||||
public final class MessageInputPanelComponent: Component {
|
||||
public final class ExternalState {
|
||||
public fileprivate(set) var isEditing: Bool = false
|
||||
public fileprivate(set) var hasText: Bool = false
|
||||
|
||||
public init() {
|
||||
@ -15,23 +19,59 @@ public final class MessageInputPanelComponent: Component {
|
||||
}
|
||||
|
||||
public let externalState: ExternalState
|
||||
public let context: AccountContext
|
||||
public let theme: PresentationTheme
|
||||
public let strings: PresentationStrings
|
||||
public let presentController: (ViewController) -> Void
|
||||
public let sendMessageAction: () -> Void
|
||||
public let setMediaRecordingActive: (Bool, Bool, Bool) -> Void
|
||||
public let attachmentAction: () -> Void
|
||||
public let audioRecorder: ManagedAudioRecorder?
|
||||
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
|
||||
|
||||
public init(
|
||||
externalState: ExternalState,
|
||||
context: AccountContext,
|
||||
theme: PresentationTheme,
|
||||
strings: PresentationStrings,
|
||||
presentController: @escaping (ViewController) -> Void,
|
||||
sendMessageAction: @escaping () -> Void,
|
||||
attachmentAction: @escaping () -> Void
|
||||
setMediaRecordingActive: @escaping (Bool, Bool, Bool) -> Void,
|
||||
attachmentAction: @escaping () -> Void,
|
||||
audioRecorder: ManagedAudioRecorder?,
|
||||
videoRecordingStatus: InstantVideoControllerRecordingStatus?
|
||||
) {
|
||||
self.externalState = externalState
|
||||
self.context = context
|
||||
self.theme = theme
|
||||
self.strings = strings
|
||||
self.presentController = presentController
|
||||
self.sendMessageAction = sendMessageAction
|
||||
self.setMediaRecordingActive = setMediaRecordingActive
|
||||
self.attachmentAction = attachmentAction
|
||||
self.audioRecorder = audioRecorder
|
||||
self.videoRecordingStatus = videoRecordingStatus
|
||||
}
|
||||
|
||||
public static func ==(lhs: MessageInputPanelComponent, rhs: MessageInputPanelComponent) -> Bool {
|
||||
if lhs.externalState !== rhs.externalState {
|
||||
return false
|
||||
}
|
||||
if lhs.context !== rhs.context {
|
||||
return false
|
||||
}
|
||||
if lhs.theme !== rhs.theme {
|
||||
return false
|
||||
}
|
||||
if lhs.strings !== rhs.strings {
|
||||
return false
|
||||
}
|
||||
if lhs.audioRecorder !== rhs.audioRecorder {
|
||||
return false
|
||||
}
|
||||
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@ -49,7 +89,11 @@ public final class MessageInputPanelComponent: Component {
|
||||
private let inputActionButton = ComponentView<Empty>()
|
||||
private let stickerIconView: UIImageView
|
||||
|
||||
private var mediaRecordingPanel: ComponentView<Empty>?
|
||||
private weak var dismissingMediaRecordingPanel: UIView?
|
||||
|
||||
private var currentMediaInputIsVoice: Bool = true
|
||||
private var mediaCancelFraction: CGFloat = 0.0
|
||||
|
||||
private var component: MessageInputPanelComponent?
|
||||
private weak var state: EmptyComponentState?
|
||||
@ -106,6 +150,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
self.stickerIconView.image = UIImage(bundleImageName: "Chat/Input/Text/AccessoryIconStickers")?.withRenderingMode(.alwaysTemplate)
|
||||
self.stickerIconView.tintColor = .white
|
||||
}
|
||||
transition.setAlpha(view: self.stickerIconView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
|
||||
|
||||
let availableTextFieldSize = CGSize(width: availableSize.width - insets.left - insets.right, height: availableSize.height - insets.top - insets.bottom)
|
||||
|
||||
@ -122,6 +167,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
|
||||
let fieldFrame = CGRect(origin: CGPoint(x: insets.left, y: insets.top), size: CGSize(width: availableSize.width - insets.left - insets.right, height: textFieldSize.height))
|
||||
transition.setFrame(view: self.fieldBackgroundView, frame: fieldFrame)
|
||||
transition.setAlpha(view: self.fieldBackgroundView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
|
||||
|
||||
let rightFieldInset: CGFloat = 34.0
|
||||
|
||||
@ -132,6 +178,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
self.addSubview(textFieldView)
|
||||
}
|
||||
transition.setFrame(view: textFieldView, frame: CGRect(origin: CGPoint(x: fieldFrame.minX, y: fieldFrame.maxY - textFieldSize.height), size: textFieldSize))
|
||||
transition.setAlpha(view: textFieldView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
|
||||
}
|
||||
|
||||
let attachmentButtonSize = self.attachmentButton.update(
|
||||
@ -162,20 +209,45 @@ public final class MessageInputPanelComponent: Component {
|
||||
transition: transition,
|
||||
component: AnyComponent(MessageInputActionButtonComponent(
|
||||
mode: self.textFieldExternalState.hasText ? .send : (self.currentMediaInputIsVoice ? .voiceInput : .videoInput),
|
||||
action: { [weak self] in
|
||||
action: { [weak self] mode, action, sendAction in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
|
||||
if case .text("") = self.getSendMessageInput() {
|
||||
self.currentMediaInputIsVoice = !self.currentMediaInputIsVoice
|
||||
self.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
|
||||
|
||||
HapticFeedback().impact()
|
||||
} else {
|
||||
self.component?.sendMessageAction()
|
||||
switch mode {
|
||||
case .send:
|
||||
if case .up = action {
|
||||
if case .text("") = self.getSendMessageInput() {
|
||||
} else {
|
||||
self.component?.sendMessageAction()
|
||||
}
|
||||
}
|
||||
case .voiceInput, .videoInput:
|
||||
self.component?.setMediaRecordingActive(action == .down, mode == .videoInput, sendAction)
|
||||
}
|
||||
}
|
||||
},
|
||||
switchMediaInputMode: { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.currentMediaInputIsVoice = !self.currentMediaInputIsVoice
|
||||
self.state?.updated(transition: Transition(animation: .curve(duration: 0.4, curve: .spring)))
|
||||
},
|
||||
updateMediaCancelFraction: { [weak self] mediaCancelFraction in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if self.mediaCancelFraction != mediaCancelFraction {
|
||||
self.mediaCancelFraction = mediaCancelFraction
|
||||
self.state?.updated(transition: .immediate)
|
||||
}
|
||||
},
|
||||
context: component.context,
|
||||
theme: component.theme,
|
||||
strings: component.strings,
|
||||
presentController: component.presentController,
|
||||
audioRecorder: component.audioRecorder,
|
||||
videoRecordingStatus: component.videoRecordingStatus
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: 33.0, height: 33.0)
|
||||
@ -195,8 +267,87 @@ public final class MessageInputPanelComponent: Component {
|
||||
transition.setScale(view: self.stickerIconView, scale: self.textFieldExternalState.hasText ? 0.1 : 1.0)
|
||||
}
|
||||
|
||||
component.externalState.isEditing = self.textFieldExternalState.isEditing
|
||||
component.externalState.hasText = self.textFieldExternalState.hasText
|
||||
|
||||
if component.audioRecorder != nil || component.videoRecordingStatus != nil {
|
||||
if let dismissingMediaRecordingPanel = self.dismissingMediaRecordingPanel {
|
||||
self.dismissingMediaRecordingPanel = nil
|
||||
transition.setAlpha(view: dismissingMediaRecordingPanel, alpha: 0.0, completion: { [weak dismissingMediaRecordingPanel] _ in
|
||||
dismissingMediaRecordingPanel?.removeFromSuperview()
|
||||
})
|
||||
}
|
||||
|
||||
let mediaRecordingPanel: ComponentView<Empty>
|
||||
var mediaRecordingPanelTransition = transition
|
||||
if let current = self.mediaRecordingPanel {
|
||||
mediaRecordingPanel = current
|
||||
} else {
|
||||
mediaRecordingPanelTransition = .immediate
|
||||
mediaRecordingPanel = ComponentView()
|
||||
self.mediaRecordingPanel = mediaRecordingPanel
|
||||
}
|
||||
|
||||
let _ = mediaRecordingPanel.update(
|
||||
transition: mediaRecordingPanelTransition,
|
||||
component: AnyComponent(MediaRecordingPanelComponent(
|
||||
audioRecorder: component.audioRecorder,
|
||||
videoRecordingStatus: component.videoRecordingStatus,
|
||||
cancelFraction: self.mediaCancelFraction
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: size
|
||||
)
|
||||
if let mediaRecordingPanelView = mediaRecordingPanel.view as? MediaRecordingPanelComponent.View {
|
||||
var animateIn = false
|
||||
if mediaRecordingPanelView.superview == nil {
|
||||
animateIn = true
|
||||
self.insertSubview(mediaRecordingPanelView, at: 0)
|
||||
}
|
||||
mediaRecordingPanelTransition.setFrame(view: mediaRecordingPanelView, frame: CGRect(origin: CGPoint(), size: size))
|
||||
if animateIn && !transition.animation.isImmediate {
|
||||
mediaRecordingPanelView.animateIn()
|
||||
}
|
||||
}
|
||||
|
||||
if let attachmentButtonView = self.attachmentButton.view {
|
||||
transition.setAlpha(view: attachmentButtonView, alpha: 0.0)
|
||||
}
|
||||
} else {
|
||||
if let mediaRecordingPanel = self.mediaRecordingPanel {
|
||||
self.mediaRecordingPanel = nil
|
||||
|
||||
if let dismissingMediaRecordingPanel = self.dismissingMediaRecordingPanel {
|
||||
self.dismissingMediaRecordingPanel = nil
|
||||
transition.setAlpha(view: dismissingMediaRecordingPanel, alpha: 0.0, completion: { [weak dismissingMediaRecordingPanel] _ in
|
||||
dismissingMediaRecordingPanel?.removeFromSuperview()
|
||||
})
|
||||
}
|
||||
|
||||
self.dismissingMediaRecordingPanel = mediaRecordingPanel.view
|
||||
|
||||
if let mediaRecordingPanelView = mediaRecordingPanel.view as? MediaRecordingPanelComponent.View {
|
||||
mediaRecordingPanelView.animateOut(dismissRecording: true, completion: { [weak self, weak mediaRecordingPanelView] in
|
||||
let transition = Transition(animation: .curve(duration: 0.3, curve: .spring))
|
||||
|
||||
if let mediaRecordingPanelView = mediaRecordingPanelView {
|
||||
transition.setAlpha(view: mediaRecordingPanelView, alpha: 0.0, completion: { [weak mediaRecordingPanelView] _ in
|
||||
mediaRecordingPanelView?.removeFromSuperview()
|
||||
})
|
||||
}
|
||||
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if self.mediaRecordingPanel == nil, let attachmentButtonView = self.attachmentButton.view {
|
||||
transition.setAlpha(view: attachmentButtonView, alpha: 1.0)
|
||||
transition.animateScale(view: attachmentButtonView, from: 0.001, to: 1.0)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return size
|
||||
}
|
||||
}
|
||||
|
@ -40,7 +40,9 @@ swift_library(
|
||||
"//submodules/ICloudResources",
|
||||
"//submodules/LegacyComponents",
|
||||
"//submodules/TelegramUI/Components/LegacyCamera",
|
||||
"//submodules/TelegramUI/Components/LegacyInstantVideoController",
|
||||
"//submodules/TelegramUI/Components/Stories/StoryFooterPanelComponent",
|
||||
"//submodules/TelegramPresentationData",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -120,7 +120,10 @@ final class MediaNavigationStripComponent: Component {
|
||||
}
|
||||
|
||||
let potentiallyVisibleCount = Int(ceil((availableSize.width + spacing) / (itemWidth + spacing)))
|
||||
for i in (component.index - potentiallyVisibleCount) ... (component.index + potentiallyVisibleCount) {
|
||||
let overflowDistance: CGFloat = 24.0
|
||||
let potentialOverflowCount = 10
|
||||
let _ = overflowDistance
|
||||
for i in (component.index - potentiallyVisibleCount) ... (component.index + potentiallyVisibleCount + potentialOverflowCount) {
|
||||
if i < 0 {
|
||||
continue
|
||||
}
|
||||
|
@ -30,6 +30,8 @@ import ICloudResources
|
||||
import LegacyComponents
|
||||
import LegacyCamera
|
||||
import StoryFooterPanelComponent
|
||||
import TelegramPresentationData
|
||||
import LegacyInstantVideoController
|
||||
|
||||
private func hasFirstResponder(_ view: UIView) -> Bool {
|
||||
if view.isFirstResponder {
|
||||
@ -140,6 +142,16 @@ private final class StoryContainerScreenComponent: Component {
|
||||
private var currentSlice: StoryContentItemSlice?
|
||||
private var currentSliceDisposable: Disposable?
|
||||
|
||||
private var audioRecorderValue: ManagedAudioRecorder?
|
||||
private var audioRecorder = Promise<ManagedAudioRecorder?>()
|
||||
private var audioRecorderDisposable: Disposable?
|
||||
private var audioRecorderStatusDisposable: Disposable?
|
||||
|
||||
private var videoRecorderValue: InstantVideoController?
|
||||
private var tempVideoRecorderValue: InstantVideoController?
|
||||
private var videoRecorder = Promise<InstantVideoController?>()
|
||||
private var videoRecorderDisposable: Disposable?
|
||||
|
||||
private var visibleItems: [AnyHashable: VisibleItem] = [:]
|
||||
|
||||
private var preloadContexts: [AnyHashable: Disposable] = [:]
|
||||
@ -149,7 +161,6 @@ private final class StoryContainerScreenComponent: Component {
|
||||
|
||||
self.contentContainerView = UIView()
|
||||
self.contentContainerView.clipsToBounds = true
|
||||
self.contentContainerView.isUserInteractionEnabled = false
|
||||
|
||||
self.topContentGradientLayer = SimpleGradientLayer()
|
||||
self.bottomContentGradientLayer = SimpleGradientLayer()
|
||||
@ -188,7 +199,107 @@ private final class StoryContainerScreenComponent: Component {
|
||||
self.addSubview(self.closeButton)
|
||||
self.closeButton.addTarget(self, action: #selector(self.closePressed), for: .touchUpInside)
|
||||
|
||||
self.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))))
|
||||
self.contentContainerView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))))
|
||||
self.contentContainerView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:))))
|
||||
|
||||
self.audioRecorderDisposable = (self.audioRecorder.get()
|
||||
|> deliverOnMainQueue).start(next: { [weak self] audioRecorder in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if self.audioRecorderValue !== audioRecorder {
|
||||
self.audioRecorderValue = audioRecorder
|
||||
self.environment?.controller()?.lockOrientation = audioRecorder != nil
|
||||
|
||||
/*strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||
$0.updatedInputTextPanelState { panelState in
|
||||
let isLocked = strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId
|
||||
if let audioRecorder = audioRecorder {
|
||||
if panelState.mediaRecordingState == nil {
|
||||
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorder, isLocked: isLocked))
|
||||
}
|
||||
} else {
|
||||
if case .waitingForPreview = panelState.mediaRecordingState {
|
||||
return panelState
|
||||
}
|
||||
return panelState.withUpdatedMediaRecordingState(nil)
|
||||
}
|
||||
return panelState
|
||||
}
|
||||
})*/
|
||||
|
||||
self.audioRecorderStatusDisposable?.dispose()
|
||||
self.audioRecorderStatusDisposable = nil
|
||||
|
||||
if let audioRecorder = audioRecorder {
|
||||
if !audioRecorder.beginWithTone {
|
||||
HapticFeedback().impact(.light)
|
||||
}
|
||||
audioRecorder.start()
|
||||
self.audioRecorderStatusDisposable = (audioRecorder.recordingState
|
||||
|> deliverOnMainQueue).start(next: { [weak self] value in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if case .stopped = value {
|
||||
self.stopMediaRecorder()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
self.state?.updated(transition: Transition(animation: .curve(duration: 0.4, curve: .spring)))
|
||||
}
|
||||
})
|
||||
|
||||
self.videoRecorderDisposable = (self.videoRecorder.get()
|
||||
|> deliverOnMainQueue).start(next: { [weak self] videoRecorder in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
if self.videoRecorderValue !== videoRecorder {
|
||||
let previousVideoRecorderValue = self.videoRecorderValue
|
||||
self.videoRecorderValue = videoRecorder
|
||||
|
||||
if let videoRecorder = videoRecorder {
|
||||
HapticFeedback().impact(.light)
|
||||
|
||||
videoRecorder.onDismiss = { [weak self] isCancelled in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
//self?.chatDisplayNode.updateRecordedMediaDeleted(isCancelled)
|
||||
//self?.beginMediaRecordingRequestId += 1
|
||||
//self?.lockMediaRecordingRequestId = nil
|
||||
self.videoRecorder.set(.single(nil))
|
||||
}
|
||||
videoRecorder.onStop = { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
/*if let strongSelf = self {
|
||||
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
|
||||
$0.updatedInputTextPanelState { panelState in
|
||||
return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
|
||||
}
|
||||
})
|
||||
}*/
|
||||
let _ = self
|
||||
//TODO:editing
|
||||
}
|
||||
self.environment?.controller()?.present(videoRecorder, in: .window(.root))
|
||||
|
||||
/*if strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId {
|
||||
videoRecorder.lockVideo()
|
||||
}*/
|
||||
}
|
||||
|
||||
if let previousVideoRecorderValue {
|
||||
previousVideoRecorderValue.dismissVideo()
|
||||
}
|
||||
|
||||
self.state?.updated(transition: .immediate)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
@ -199,6 +310,8 @@ private final class StoryContainerScreenComponent: Component {
|
||||
self.currentSliceDisposable?.dispose()
|
||||
self.controllerNavigationDisposable.dispose()
|
||||
self.enqueueMediaMessageDisposable.dispose()
|
||||
self.audioRecorderDisposable?.dispose()
|
||||
self.audioRecorderStatusDisposable?.dispose()
|
||||
}
|
||||
|
||||
@objc private func tapGesture(_ recognizer: UITapGestureRecognizer) {
|
||||
@ -209,7 +322,7 @@ private final class StoryContainerScreenComponent: Component {
|
||||
let point = recognizer.location(in: self)
|
||||
|
||||
var nextIndex: Int
|
||||
if point.x < itemLayout.size.width * 0.5 {
|
||||
if point.x < itemLayout.size.width * 0.25 {
|
||||
nextIndex = currentIndex + 1
|
||||
} else {
|
||||
nextIndex = currentIndex - 1
|
||||
@ -237,6 +350,19 @@ private final class StoryContainerScreenComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func panGesture(_ recognizer: UIPanGestureRecognizer) {
|
||||
switch recognizer.state {
|
||||
case .began:
|
||||
break
|
||||
case .changed:
|
||||
break
|
||||
case .cancelled, .ended:
|
||||
break
|
||||
default:
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
@objc private func closePressed() {
|
||||
guard let environment = self.environment, let controller = environment.controller() else {
|
||||
return
|
||||
@ -329,9 +455,14 @@ private final class StoryContainerScreenComponent: Component {
|
||||
)
|
||||
if let view = visibleItem.view.view {
|
||||
if view.superview == nil {
|
||||
view.isUserInteractionEnabled = false
|
||||
self.contentContainerView.addSubview(view)
|
||||
}
|
||||
itemTransition.setFrame(view: view, frame: CGRect(origin: CGPoint(), size: itemLayout.size))
|
||||
|
||||
if let view = view as? StoryContentItem.View {
|
||||
view.setIsProgressPaused(self.inputPanelExternalState.isEditing || self.attachmentController != nil || self.audioRecorderValue != nil || self.videoRecorderValue != nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -349,6 +480,16 @@ private final class StoryContainerScreenComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
private func updateIsProgressPaused() {
|
||||
for (_, visibleItem) in self.visibleItems {
|
||||
if let view = visibleItem.view.view {
|
||||
if let view = view as? StoryContentItem.View {
|
||||
view.setIsProgressPaused(self.inputPanelExternalState.isEditing || self.attachmentController?.window != nil || self.audioRecorderValue != nil || self.videoRecorderValue != nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func animateIn() {
|
||||
self.layer.allowsGroupOpacity = true
|
||||
self.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15, completion: { [weak self] _ in
|
||||
@ -389,6 +530,7 @@ private final class StoryContainerScreenComponent: Component {
|
||||
content: .text(text)
|
||||
)
|
||||
inputPanelView.clearSendMessageInput()
|
||||
self.endEditing(true)
|
||||
|
||||
if let controller = self.environment?.controller() {
|
||||
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
|
||||
@ -404,6 +546,123 @@ private final class StoryContainerScreenComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
private func setMediaRecordingActive(isActive: Bool, isVideo: Bool, sendAction: Bool) {
|
||||
guard let component = self.component else {
|
||||
return
|
||||
}
|
||||
guard let focusedItemId = self.focusedItemId, let focusedItem = self.currentSlice?.items.first(where: { $0.id == focusedItemId }) else {
|
||||
return
|
||||
}
|
||||
guard let targetMessageId = focusedItem.targetMessageId else {
|
||||
return
|
||||
}
|
||||
let _ = (component.context.engine.data.get(
|
||||
TelegramEngine.EngineData.Item.Messages.Message(id: targetMessageId)
|
||||
)
|
||||
|> deliverOnMainQueue).start(next: { [weak self] targetMessage in
|
||||
guard let self, let component = self.component, let environment = self.environment, let targetMessage, let peer = targetMessage.author else {
|
||||
return
|
||||
}
|
||||
|
||||
if isActive {
|
||||
if isVideo {
|
||||
if self.videoRecorderValue == nil {
|
||||
if let currentInputPanelFrame = self.inputPanel.view?.frame {
|
||||
self.videoRecorder.set(.single(legacyInstantVideoController(theme: environment.theme, panelFrame: self.convert(currentInputPanelFrame, to: nil), context: component.context, peerId: peer.id, slowmodeState: nil, hasSchedule: peer.id.namespace != Namespaces.Peer.SecretChat, send: { [weak self] videoController, message in
|
||||
if let strongSelf = self {
|
||||
guard let message = message else {
|
||||
strongSelf.videoRecorder.set(.single(nil))
|
||||
return
|
||||
}
|
||||
|
||||
let replyMessageId = targetMessageId
|
||||
let correlationId = Int64.random(in: 0 ..< Int64.max)
|
||||
let updatedMessage = message
|
||||
.withUpdatedReplyToMessageId(replyMessageId)
|
||||
.withUpdatedCorrelationId(correlationId)
|
||||
|
||||
strongSelf.videoRecorder.set(.single(nil))
|
||||
|
||||
strongSelf.sendMessages(peer: peer, messages: [updatedMessage])
|
||||
|
||||
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
|
||||
strongSelf.environment?.controller()?.present(UndoOverlayController(
|
||||
presentationData: presentationData,
|
||||
content: .succeed(text: "Message Sent"),
|
||||
elevatedLayout: false,
|
||||
animateInAsReplacement: false,
|
||||
action: { _ in return false }
|
||||
), in: .current)
|
||||
}
|
||||
}, displaySlowmodeTooltip: { [weak self] view, rect in
|
||||
//self?.interfaceInteraction?.displaySlowmodeTooltip(view, rect)
|
||||
let _ = self
|
||||
}, presentSchedulePicker: { [weak self] done in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.presentScheduleTimePicker(peer: peer, completion: { time in
|
||||
done(time)
|
||||
})
|
||||
})))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if self.audioRecorderValue == nil {
|
||||
self.audioRecorder.set(component.context.sharedContext.mediaManager.audioRecorder(beginWithTone: false, applicationBindings: component.context.sharedContext.applicationBindings, beganWithTone: { _ in
|
||||
}))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if let audioRecorderValue = self.audioRecorderValue {
|
||||
let _ = (audioRecorderValue.takenRecordedData()
|
||||
|> deliverOnMainQueue).start(next: { [weak self] data in
|
||||
guard let self, let component = self.component else {
|
||||
return
|
||||
}
|
||||
|
||||
self.audioRecorder.set(.single(nil))
|
||||
|
||||
guard let data else {
|
||||
return
|
||||
}
|
||||
|
||||
if data.duration < 0.5 || !sendAction {
|
||||
HapticFeedback().error()
|
||||
} else {
|
||||
let randomId = Int64.random(in: Int64.min ... Int64.max)
|
||||
|
||||
let resource = LocalFileMediaResource(fileId: randomId)
|
||||
component.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data.compressedData)
|
||||
|
||||
let waveformBuffer: Data? = data.waveform
|
||||
|
||||
self.sendMessages(peer: peer, messages: [.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: randomId), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(data.compressedData.count), attributes: [.Audio(isVoice: true, duration: Int(data.duration), title: nil, performer: nil, waveform: waveformBuffer)])), replyToMessageId: targetMessageId, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])])
|
||||
|
||||
HapticFeedback().tap()
|
||||
|
||||
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
|
||||
self.environment?.controller()?.present(UndoOverlayController(
|
||||
presentationData: presentationData,
|
||||
content: .succeed(text: "Message Sent"),
|
||||
elevatedLayout: false,
|
||||
animateInAsReplacement: false,
|
||||
action: { _ in return false }
|
||||
), in: .current)
|
||||
}
|
||||
})
|
||||
} else if let videoRecorderValue = self.videoRecorderValue {
|
||||
let _ = videoRecorderValue
|
||||
self.videoRecorder.set(.single(nil))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
private func stopMediaRecorder() {
|
||||
|
||||
}
|
||||
|
||||
private func performInlineAction(item: StoryActionsComponent.Item) {
|
||||
guard let component = self.component else {
|
||||
return
|
||||
@ -628,7 +887,7 @@ private final class StoryContainerScreenComponent: Component {
|
||||
}
|
||||
|
||||
let _ = combineLatest(queue: Queue.mainQueue(), buttons, dataSettings).start(next: { [weak self] buttonsAndInitialButton, dataSettings in
|
||||
guard let self, let component = self.component else {
|
||||
guard let self, let component = self.component, let environment = self.environment else {
|
||||
return
|
||||
}
|
||||
|
||||
@ -684,9 +943,10 @@ private final class StoryContainerScreenComponent: Component {
|
||||
let currentFilesController = Atomic<AttachmentFileController?>(value: nil)
|
||||
let currentLocationController = Atomic<LocationPickerController?>(value: nil)
|
||||
|
||||
let theme = environment.theme
|
||||
let attachmentController = AttachmentController(
|
||||
context: component.context,
|
||||
updatedPresentationData: nil,
|
||||
updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }),
|
||||
chatLocation: .peer(id: peer.id),
|
||||
buttons: buttons,
|
||||
initialButton: initialButton,
|
||||
@ -706,6 +966,7 @@ private final class StoryContainerScreenComponent: Component {
|
||||
return
|
||||
}
|
||||
self.attachmentController = nil
|
||||
self.updateIsProgressPaused()
|
||||
}
|
||||
attachmentController.getSourceRect = { [weak self] in
|
||||
guard let self else {
|
||||
@ -720,7 +981,7 @@ private final class StoryContainerScreenComponent: Component {
|
||||
return attachmentButtonView.convert(attachmentButtonView.bounds, to: self)
|
||||
}
|
||||
attachmentController.requestController = { [weak self, weak attachmentController] type, completion in
|
||||
guard let self else {
|
||||
guard let self, let environment = self.environment else {
|
||||
return
|
||||
}
|
||||
switch type {
|
||||
@ -764,7 +1025,8 @@ private final class StoryContainerScreenComponent: Component {
|
||||
controller.prepareForReuse()
|
||||
return
|
||||
}
|
||||
let controller = component.context.sharedContext.makeAttachmentFileController(context: component.context, updatedPresentationData: nil, bannedSendMedia: bannedSendFiles, presentGallery: { [weak self, weak attachmentController] in
|
||||
let theme = environment.theme
|
||||
let controller = component.context.sharedContext.makeAttachmentFileController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), bannedSendMedia: bannedSendFiles, presentGallery: { [weak self, weak attachmentController] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
@ -817,11 +1079,12 @@ private final class StoryContainerScreenComponent: Component {
|
||||
}
|
||||
let _ = (component.context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: selfPeerId))
|
||||
|> deliverOnMainQueue).start(next: { [weak self] selfPeer in
|
||||
guard let self, let component = self.component, let selfPeer else {
|
||||
guard let self, let component = self.component, let environment = self.environment, let selfPeer else {
|
||||
return
|
||||
}
|
||||
let hasLiveLocation = peer.id.namespace != Namespaces.Peer.SecretChat && peer.id != component.context.account.peerId
|
||||
let controller = LocationPickerController(context: component.context, updatedPresentationData: nil, mode: .share(peer: peer, selfPeer: selfPeer, hasLiveLocation: hasLiveLocation), completion: { [weak self] location, _ in
|
||||
let theme = environment.theme
|
||||
let controller = LocationPickerController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), mode: .share(peer: peer, selfPeer: selfPeer, hasLiveLocation: hasLiveLocation), completion: { [weak self] location, _ in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
@ -833,7 +1096,8 @@ private final class StoryContainerScreenComponent: Component {
|
||||
let _ = currentLocationController.swap(controller)
|
||||
})
|
||||
case .contact:
|
||||
let contactsController = component.context.sharedContext.makeContactSelectionController(ContactSelectionControllerParams(context: component.context, updatedPresentationData: nil, title: { $0.Contacts_Title }, displayDeviceContacts: true, multipleSelection: true))
|
||||
let theme = environment.theme
|
||||
let contactsController = component.context.sharedContext.makeContactSelectionController(ContactSelectionControllerParams(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), title: { $0.Contacts_Title }, displayDeviceContacts: true, multipleSelection: true))
|
||||
contactsController.presentScheduleTimePicker = { [weak self] completion in
|
||||
guard let self else {
|
||||
return
|
||||
@ -1041,7 +1305,8 @@ private final class StoryContainerScreenComponent: Component {
|
||||
fromAttachMenu = true
|
||||
let params = WebAppParameters(peerId: peer.id, botId: bot.id, botName: botName, url: nil, queryId: nil, payload: payload, buttonText: nil, keepAliveSignal: nil, fromMenu: false, fromAttachMenu: fromAttachMenu, isInline: false, isSimple: false)
|
||||
let replyMessageId = targetMessageId
|
||||
let controller = WebAppController(context: component.context, updatedPresentationData: nil, params: params, replyToMessageId: replyMessageId, threadId: nil)
|
||||
let theme = environment.theme
|
||||
let controller = WebAppController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), params: params, replyToMessageId: replyMessageId, threadId: nil)
|
||||
controller.openUrl = { [weak self] url in
|
||||
guard let self else {
|
||||
return
|
||||
@ -1080,6 +1345,7 @@ private final class StoryContainerScreenComponent: Component {
|
||||
attachmentController.navigationPresentation = .flatModal
|
||||
controller.push(attachmentController)
|
||||
self.attachmentController = attachmentController
|
||||
self.updateIsProgressPaused()
|
||||
}
|
||||
|
||||
if inputIsActive {
|
||||
@ -1104,10 +1370,11 @@ private final class StoryContainerScreenComponent: Component {
|
||||
updateMediaPickerContext: @escaping (AttachmentMediaPickerContext?) -> Void,
|
||||
completion: @escaping ([Any], Bool, Int32?, @escaping (String) -> UIView?, @escaping () -> Void) -> Void
|
||||
) {
|
||||
guard let component = self.component else {
|
||||
guard let component = self.component, let environment = self.environment else {
|
||||
return
|
||||
}
|
||||
let controller = MediaPickerScreen(context: component.context, updatedPresentationData: nil, peer: peer, threadTitle: nil, chatLocation: .peer(id: peer.id), bannedSendPhotos: bannedSendPhotos, bannedSendVideos: bannedSendVideos, subject: subject, saveEditedPhotos: saveEditedPhotos)
|
||||
let theme = environment.theme
|
||||
let controller = MediaPickerScreen(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peer: peer, threadTitle: nil, chatLocation: .peer(id: peer.id), bannedSendPhotos: bannedSendPhotos, bannedSendVideos: bannedSendVideos, subject: subject, saveEditedPhotos: saveEditedPhotos)
|
||||
let mediaPickerContext = controller.mediaPickerContext
|
||||
controller.openCamera = { [weak self] cameraView in
|
||||
guard let self else {
|
||||
@ -1219,8 +1486,9 @@ private final class StoryContainerScreenComponent: Component {
|
||||
legacyController.deferScreenEdgeGestures = [.top]
|
||||
|
||||
configureLegacyAssetPicker(controller, context: component.context, peer: peer._asPeer(), chatLocation: .peer(id: peer.id), initialCaption: inputText, hasSchedule: peer.id.namespace != Namespaces.Peer.SecretChat, presentWebSearch: editingMedia ? nil : { [weak legacyController] in
|
||||
if let strongSelf = self, let component = strongSelf.component {
|
||||
let controller = WebSearchController(context: component.context, updatedPresentationData: nil, peer: peer, chatLocation: .peer(id: peer.id), configuration: searchBotsConfiguration, mode: .media(attachment: false, completion: { results, selectionState, editingState, silentPosting in
|
||||
if let strongSelf = self, let component = strongSelf.component, let environment = strongSelf.environment {
|
||||
let theme = environment.theme
|
||||
let controller = WebSearchController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peer: peer, chatLocation: .peer(id: peer.id), configuration: searchBotsConfiguration, mode: .media(attachment: false, completion: { results, selectionState, editingState, silentPosting in
|
||||
if let legacyController = legacyController {
|
||||
legacyController.dismiss()
|
||||
}
|
||||
@ -1420,6 +1688,10 @@ private final class StoryContainerScreenComponent: Component {
|
||||
}
|
||||
if component.context.engine.messages.enqueueOutgoingMessageWithChatContextResult(to: peer.id, threadId: nil, botId: results.botId, result: result, replyToMessageId: replyMessageId, hideVia: hideVia, silentPosting: silentPosting, scheduleTime: scheduleTime) {
|
||||
}
|
||||
|
||||
if let attachmentController = self.attachmentController {
|
||||
attachmentController.dismiss(animated: true)
|
||||
}
|
||||
}
|
||||
|
||||
sendMessage(nil)
|
||||
@ -1671,7 +1943,7 @@ private final class StoryContainerScreenComponent: Component {
|
||||
TelegramEngine.EngineData.Item.Peer.Presence(id: peer.id)
|
||||
)
|
||||
|> deliverOnMainQueue).start(next: { [weak self] presence in
|
||||
guard let self, let component = self.component else {
|
||||
guard let self, let component = self.component, let environment = self.environment else {
|
||||
return
|
||||
}
|
||||
|
||||
@ -1689,7 +1961,8 @@ private final class StoryContainerScreenComponent: Component {
|
||||
} else {
|
||||
mode = .scheduledMessages(sendWhenOnlineAvailable: sendWhenOnlineAvailable)
|
||||
}
|
||||
let controller = ChatScheduleTimeController(context: component.context, updatedPresentationData: nil, peerId: peer.id, mode: mode, style: style, currentTime: selectedTime, minimalTime: nil, dismissByTapOutside: dismissByTapOutside, completion: { time in
|
||||
let theme = environment.theme
|
||||
let controller = ChatScheduleTimeController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peerId: peer.id, mode: mode, style: style, currentTime: selectedTime, minimalTime: nil, dismissByTapOutside: dismissByTapOutside, completion: { time in
|
||||
completion(time)
|
||||
})
|
||||
self.endEditing(true)
|
||||
@ -1698,10 +1971,11 @@ private final class StoryContainerScreenComponent: Component {
|
||||
}
|
||||
|
||||
private func presentTimerPicker(peer: EnginePeer, style: ChatTimerScreenStyle = .default, selectedTime: Int32? = nil, dismissByTapOutside: Bool = true, completion: @escaping (Int32) -> Void) {
|
||||
guard let component = self.component else {
|
||||
guard let component = self.component, let environment = self.environment else {
|
||||
return
|
||||
}
|
||||
let controller = ChatTimerScreen(context: component.context, updatedPresentationData: nil, style: style, currentTime: selectedTime, dismissByTapOutside: dismissByTapOutside, completion: { time in
|
||||
let theme = environment.theme
|
||||
let controller = ChatTimerScreen(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), style: style, currentTime: selectedTime, dismissByTapOutside: dismissByTapOutside, completion: { time in
|
||||
completion(time)
|
||||
})
|
||||
self.endEditing(true)
|
||||
@ -1709,10 +1983,11 @@ private final class StoryContainerScreenComponent: Component {
|
||||
}
|
||||
|
||||
private func configurePollCreation(peer: EnginePeer, targetMessageId: EngineMessage.Id, isQuiz: Bool? = nil) -> CreatePollControllerImpl? {
|
||||
guard let component = self.component else {
|
||||
guard let component = self.component, let environment = self.environment else {
|
||||
return nil
|
||||
}
|
||||
return createPollController(context: component.context, updatedPresentationData: nil, peer: peer, isQuiz: isQuiz, completion: { [weak self] poll in
|
||||
let theme = environment.theme
|
||||
return createPollController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peer: peer, isQuiz: isQuiz, completion: { [weak self] poll in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
@ -1805,6 +2080,10 @@ private final class StoryContainerScreenComponent: Component {
|
||||
|
||||
donateSendMessageIntent(account: component.context.account, sharedContext: component.context.sharedContext, intentContext: .chat, peerIds: [peer.id])
|
||||
|
||||
if let attachmentController = self.attachmentController {
|
||||
attachmentController.dismiss(animated: true)
|
||||
}
|
||||
|
||||
if let controller = self.environment?.controller() {
|
||||
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
|
||||
controller.present(UndoOverlayController(
|
||||
@ -2013,18 +2292,35 @@ private final class StoryContainerScreenComponent: Component {
|
||||
transition: transition,
|
||||
component: AnyComponent(MessageInputPanelComponent(
|
||||
externalState: self.inputPanelExternalState,
|
||||
context: component.context,
|
||||
theme: environment.theme,
|
||||
strings: environment.strings,
|
||||
presentController: { [weak self] c in
|
||||
guard let self, let controller = self.environment?.controller() else {
|
||||
return
|
||||
}
|
||||
controller.present(c, in: .window(.root))
|
||||
},
|
||||
sendMessageAction: { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.performSendMessageAction()
|
||||
},
|
||||
setMediaRecordingActive: { [weak self] isActive, isVideo, sendAction in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.setMediaRecordingActive(isActive: isActive, isVideo: isVideo, sendAction: sendAction)
|
||||
},
|
||||
attachmentAction: { [weak self] in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.presentAttachmentMenu(subject: .default)
|
||||
}
|
||||
},
|
||||
audioRecorder: self.audioRecorderValue,
|
||||
videoRecordingStatus: self.videoRecorderValue?.audioStatus
|
||||
)),
|
||||
environment: {},
|
||||
containerSize: CGSize(width: availableSize.width, height: 200.0)
|
||||
@ -2182,6 +2478,7 @@ private final class StoryContainerScreenComponent: Component {
|
||||
)
|
||||
if let navigationStripView = self.navigationStrip.view {
|
||||
if navigationStripView.superview == nil {
|
||||
navigationStripView.isUserInteractionEnabled = false
|
||||
self.addSubview(navigationStripView)
|
||||
}
|
||||
transition.setFrame(view: navigationStripView, frame: CGRect(origin: CGPoint(x: contentFrame.minX + navigationStripSideInset, y: contentFrame.minY + navigationStripTopInset), size: CGSize(width: availableSize.width - navigationStripSideInset * 2.0, height: 2.0)))
|
||||
@ -2219,7 +2516,13 @@ private final class StoryContainerScreenComponent: Component {
|
||||
self.addSubview(inlineActionsView)
|
||||
}
|
||||
transition.setFrame(view: inlineActionsView, frame: CGRect(origin: CGPoint(x: contentFrame.maxX - 10.0 - inlineActionsSize.width, y: contentFrame.maxY - 20.0 - inlineActionsSize.height), size: inlineActionsSize))
|
||||
transition.setAlpha(view: inlineActionsView, alpha: inputPanelIsOverlay ? 0.0 : 1.0)
|
||||
|
||||
var inlineActionsAlpha: CGFloat = inputPanelIsOverlay ? 0.0 : 1.0
|
||||
if self.audioRecorderValue != nil {
|
||||
inlineActionsAlpha = 0.0
|
||||
}
|
||||
|
||||
transition.setAlpha(view: inlineActionsView, alpha: inlineActionsAlpha)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2269,7 +2572,7 @@ private final class StoryContainerScreenComponent: Component {
|
||||
}
|
||||
|
||||
transition.setFrame(layer: self.contentDimLayer, frame: contentFrame)
|
||||
transition.setAlpha(layer: self.contentDimLayer, alpha: inputPanelIsOverlay ? 1.0 : 0.0)
|
||||
transition.setAlpha(layer: self.contentDimLayer, alpha: (inputPanelIsOverlay || self.inputPanelExternalState.isEditing) ? 1.0 : 0.0)
|
||||
|
||||
self.ignoreScrolling = true
|
||||
transition.setFrame(view: self.scrollView, frame: CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: CGSize(width: availableSize.width, height: availableSize.height)))
|
||||
|
@ -12,7 +12,7 @@ public final class StoryContentItem {
|
||||
}
|
||||
|
||||
open class View: UIView {
|
||||
func setIsProgressPaused(_ isProgressPaused: Bool) {
|
||||
open func setIsProgressPaused(_ isProgressPaused: Bool) {
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -21,6 +21,7 @@ swift_library(
|
||||
"//submodules/MediaPlayer:UniversalMediaPlayer",
|
||||
"//submodules/TelegramUniversalVideoContent",
|
||||
"//submodules/AvatarNode",
|
||||
"//submodules/Components/HierarchyTrackingLayer",
|
||||
],
|
||||
visibility = [
|
||||
"//visibility:public",
|
||||
|
@ -10,6 +10,7 @@ import SwiftSignalKit
|
||||
import UniversalMediaPlayer
|
||||
import TelegramUniversalVideoContent
|
||||
import StoryContainerScreen
|
||||
import HierarchyTrackingLayer
|
||||
|
||||
final class StoryMessageContentComponent: Component {
|
||||
typealias EnvironmentType = StoryContentItem.Environment
|
||||
@ -93,16 +94,31 @@ final class StoryMessageContentComponent: Component {
|
||||
private weak var state: EmptyComponentState?
|
||||
private var environment: StoryContentItem.Environment?
|
||||
|
||||
private var currentProgressStart: Double?
|
||||
private var isProgressPaused: Bool = false
|
||||
private var currentProgressTimer: SwiftSignalKit.Timer?
|
||||
private var currentProgressTimerValue: Double = 0.0
|
||||
private var videoProgressDisposable: Disposable?
|
||||
|
||||
private var videoPlaybackStatus: MediaPlayerStatus?
|
||||
|
||||
private let hierarchyTrackingLayer: HierarchyTrackingLayer
|
||||
|
||||
override init(frame: CGRect) {
|
||||
self.hierarchyTrackingLayer = HierarchyTrackingLayer()
|
||||
self.imageNode = TransformImageNode()
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.layer.addSublayer(self.hierarchyTrackingLayer)
|
||||
|
||||
self.addSubnode(self.imageNode)
|
||||
|
||||
self.hierarchyTrackingLayer.isInHierarchyUpdated = { [weak self] value in
|
||||
guard let self else {
|
||||
return
|
||||
}
|
||||
self.updateIsProgressPaused()
|
||||
}
|
||||
}
|
||||
|
||||
required init?(coder: NSCoder) {
|
||||
@ -150,7 +166,7 @@ final class StoryMessageContentComponent: Component {
|
||||
}
|
||||
if value {
|
||||
self.videoNode?.seek(0.0)
|
||||
self.videoNode?.play()
|
||||
self.videoNode?.playOnceWithSound(playAndRecord: false)
|
||||
}
|
||||
}
|
||||
videoNode.canAttachContent = true
|
||||
@ -161,7 +177,100 @@ final class StoryMessageContentComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
func setIsProgressPaused(_ isProgressPaused: Bool) {
|
||||
override func setIsProgressPaused(_ isProgressPaused: Bool) {
|
||||
if self.isProgressPaused != isProgressPaused {
|
||||
self.isProgressPaused = isProgressPaused
|
||||
self.updateIsProgressPaused()
|
||||
}
|
||||
}
|
||||
|
||||
private func updateIsProgressPaused() {
|
||||
if let videoNode = self.videoNode {
|
||||
if !self.isProgressPaused && self.hierarchyTrackingLayer.isInHierarchy {
|
||||
videoNode.play()
|
||||
} else {
|
||||
videoNode.pause()
|
||||
}
|
||||
}
|
||||
|
||||
self.updateVideoPlaybackProgress()
|
||||
self.updateProgressTimer()
|
||||
}
|
||||
|
||||
private func updateProgressTimer() {
|
||||
let needsTimer = !self.isProgressPaused && self.hierarchyTrackingLayer.isInHierarchy
|
||||
|
||||
if needsTimer {
|
||||
if self.currentProgressTimer == nil {
|
||||
self.currentProgressTimer = SwiftSignalKit.Timer(
|
||||
timeout: 1.0 / 60.0,
|
||||
repeat: true,
|
||||
completion: { [weak self] in
|
||||
guard let self, !self.isProgressPaused, self.hierarchyTrackingLayer.isInHierarchy else {
|
||||
return
|
||||
}
|
||||
|
||||
if self.videoNode != nil {
|
||||
self.updateVideoPlaybackProgress()
|
||||
} else {
|
||||
let currentProgressTimerLimit: Double = 5.0
|
||||
var currentProgressTimerValue = self.currentProgressTimerValue + 1.0 / 60.0
|
||||
currentProgressTimerValue = max(0.0, min(currentProgressTimerLimit, currentProgressTimerValue))
|
||||
self.currentProgressTimerValue = currentProgressTimerValue
|
||||
|
||||
self.environment?.presentationProgressUpdated(currentProgressTimerValue / currentProgressTimerLimit)
|
||||
}
|
||||
}, queue: .mainQueue()
|
||||
)
|
||||
self.currentProgressTimer?.start()
|
||||
}
|
||||
} else {
|
||||
if let currentProgressTimer = self.currentProgressTimer {
|
||||
self.currentProgressTimer = nil
|
||||
currentProgressTimer.invalidate()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func updateVideoPlaybackProgress() {
|
||||
var isPlaying = false
|
||||
var timestampAndDuration: (timestamp: Double?, duration: Double)?
|
||||
if let videoPlaybackStatus = self.videoPlaybackStatus {
|
||||
switch videoPlaybackStatus.status {
|
||||
case .playing:
|
||||
isPlaying = true
|
||||
default:
|
||||
break
|
||||
}
|
||||
if case .buffering(true, _, _, _) = videoPlaybackStatus.status {
|
||||
timestampAndDuration = (nil, videoPlaybackStatus.duration)
|
||||
} else if Double(0.0).isLess(than: videoPlaybackStatus.duration) {
|
||||
timestampAndDuration = (videoPlaybackStatus.timestamp, videoPlaybackStatus.duration)
|
||||
}
|
||||
}
|
||||
|
||||
var currentProgress: Double = 0.0
|
||||
|
||||
if let (maybeTimestamp, duration) = timestampAndDuration, let timestamp = maybeTimestamp, duration > 0.01, let videoPlaybackStatus = self.videoPlaybackStatus {
|
||||
var actualTimestamp: Double
|
||||
if videoPlaybackStatus.generationTimestamp.isZero || !isPlaying {
|
||||
actualTimestamp = timestamp
|
||||
} else {
|
||||
let currentTimestamp = CACurrentMediaTime()
|
||||
actualTimestamp = timestamp + (currentTimestamp - videoPlaybackStatus.generationTimestamp) * videoPlaybackStatus.baseRate
|
||||
}
|
||||
|
||||
var progress = CGFloat(actualTimestamp / duration)
|
||||
if progress.isNaN || !progress.isFinite {
|
||||
progress = 0.0
|
||||
}
|
||||
progress = min(1.0, progress)
|
||||
|
||||
currentProgress = progress
|
||||
}
|
||||
|
||||
let clippedProgress = max(0.0, min(1.0, currentProgress))
|
||||
self.environment?.presentationProgressUpdated(clippedProgress)
|
||||
}
|
||||
|
||||
func update(component: StoryMessageContentComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<StoryContentItem.Environment>, transition: Transition) -> CGSize {
|
||||
@ -286,31 +395,16 @@ final class StoryMessageContentComponent: Component {
|
||||
if self.videoProgressDisposable == nil {
|
||||
self.videoProgressDisposable = (videoNode.status
|
||||
|> deliverOnMainQueue).start(next: { [weak self] status in
|
||||
guard let self, let status, status.duration > 0.0 else {
|
||||
guard let self, let status else {
|
||||
return
|
||||
}
|
||||
let currentProgress = Double(status.timestamp / status.duration)
|
||||
let clippedProgress = max(0.0, min(1.0, currentProgress))
|
||||
self.environment?.presentationProgressUpdated(clippedProgress)
|
||||
|
||||
self.videoPlaybackStatus = status
|
||||
self.updateVideoPlaybackProgress()
|
||||
})
|
||||
}
|
||||
} else {
|
||||
if self.currentProgressTimer == nil {
|
||||
self.currentProgressStart = CFAbsoluteTimeGetCurrent()
|
||||
self.currentProgressTimer = SwiftSignalKit.Timer(
|
||||
timeout: 1.0 / 60.0,
|
||||
repeat: true,
|
||||
completion: { [weak self] in
|
||||
guard let self, let currentProgressStart = self.currentProgressStart else {
|
||||
return
|
||||
}
|
||||
let currentProgress = (CFAbsoluteTimeGetCurrent() - currentProgressStart) / 5.0
|
||||
let clippedProgress = max(0.0, min(1.0, currentProgress))
|
||||
self.environment?.presentationProgressUpdated(clippedProgress)
|
||||
}, queue: .mainQueue())
|
||||
self.currentProgressTimer?.start()
|
||||
}
|
||||
}
|
||||
self.updateProgressTimer()
|
||||
|
||||
return availableSize
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import ComponentFlow
|
||||
|
||||
public final class TextFieldComponent: Component {
|
||||
public final class ExternalState {
|
||||
public fileprivate(set) var isEditing: Bool = false
|
||||
public fileprivate(set) var hasText: Bool = false
|
||||
|
||||
public init() {
|
||||
@ -14,6 +15,7 @@ public final class TextFieldComponent: Component {
|
||||
public final class AnimationHint {
|
||||
public enum Kind {
|
||||
case textChanged
|
||||
case textFocusChanged
|
||||
}
|
||||
|
||||
public let kind: Kind
|
||||
@ -102,6 +104,14 @@ public final class TextFieldComponent: Component {
|
||||
self.state?.updated(transition: Transition(animation: .curve(duration: 0.4, curve: .spring)).withUserData(AnimationHint(kind: .textChanged)))
|
||||
}
|
||||
|
||||
public func textViewDidBeginEditing(_ textView: UITextView) {
|
||||
self.state?.updated(transition: Transition(animation: .curve(duration: 0.4, curve: .spring)).withUserData(AnimationHint(kind: .textFocusChanged)))
|
||||
}
|
||||
|
||||
public func textViewDidEndEditing(_ textView: UITextView) {
|
||||
self.state?.updated(transition: Transition(animation: .curve(duration: 0.4, curve: .spring)).withUserData(AnimationHint(kind: .textFocusChanged)))
|
||||
}
|
||||
|
||||
public func scrollViewDidScroll(_ scrollView: UIScrollView) {
|
||||
//print("didScroll \(scrollView.bounds)")
|
||||
}
|
||||
@ -128,7 +138,6 @@ public final class TextFieldComponent: Component {
|
||||
|
||||
let refreshScrolling = self.textView.bounds.size != size
|
||||
self.textView.frame = CGRect(origin: CGPoint(), size: size)
|
||||
//transition.setFrame(view: self.textView, frame: )
|
||||
|
||||
if refreshScrolling {
|
||||
self.textView.setContentOffset(CGPoint(x: 0.0, y: max(0.0, self.textView.contentSize.height - self.textView.bounds.height)), animated: false)
|
||||
@ -155,6 +164,7 @@ public final class TextFieldComponent: Component {
|
||||
}
|
||||
|
||||
component.externalState.hasText = self.textStorage.length != 0
|
||||
component.externalState.isEditing = self.textView.isFirstResponder
|
||||
|
||||
return size
|
||||
}
|
||||
|
@ -93,6 +93,7 @@ import AvatarEditorScreen
|
||||
import ChatScheduleTimeController
|
||||
import ICloudResources
|
||||
import LegacyCamera
|
||||
import LegacyInstantVideoController
|
||||
|
||||
#if DEBUG
|
||||
import os.signpost
|
||||
|
@ -1469,6 +1469,7 @@ final class ChatMessageInteractiveFileNode: ASDisplayNode {
|
||||
switch fetchStatus {
|
||||
case let .Fetching(_, progress):
|
||||
if let size = file.size, size > 0 && size != .max {
|
||||
let progress = max(0.0, min(1.0, progress))
|
||||
let compactString = dataSizeString(Int(Float(size) * progress), forceDecimal: true, formatting: DataSizeStringFormatting(chatPresentationData: presentationData))
|
||||
let descriptionFont = Font.with(size: floor(presentationData.fontSize.baseDisplaySize * 13.0 / 17.0), design: .regular, weight: .regular, traits: [.monospacedNumbers])
|
||||
downloadingStrings = ("\(compactString) / \(dataSizeString(size, forceDecimal: true, formatting: DataSizeStringFormatting(chatPresentationData: presentationData)))", compactString, descriptionFont)
|
||||
|
@ -11,6 +11,7 @@ import TelegramCore
|
||||
import ReactionSelectionNode
|
||||
import ChatControllerInteraction
|
||||
import FeaturedStickersScreen
|
||||
import ChatTextInputMediaRecordingButton
|
||||
|
||||
private func convertAnimatingSourceRect(_ rect: CGRect, fromView: UIView, toView: UIView?) -> CGRect {
|
||||
if let presentationLayer = fromView.layer.presentation() {
|
||||
|
@ -12,6 +12,7 @@ import ContextUI
|
||||
import AnimationUI
|
||||
import ManagedAnimationNode
|
||||
import ChatPresentationInterfaceState
|
||||
import ChatSendButtonRadialStatusNode
|
||||
|
||||
extension AudioWaveformNode: CustomMediaPlayerScrubbingForegroundNode {
|
||||
|
||||
|
@ -10,6 +10,8 @@ import ChatPresentationInterfaceState
|
||||
import ChatMessageBackground
|
||||
import ChatControllerInteraction
|
||||
import AccountContext
|
||||
import ChatTextInputMediaRecordingButton
|
||||
import ChatSendButtonRadialStatusNode
|
||||
|
||||
final class ChatTextInputActionButtonsNode: ASDisplayNode {
|
||||
private let presentationContext: ChatPresentationContext?
|
||||
|
@ -36,6 +36,7 @@ import StickerPeekUI
|
||||
import LottieComponent
|
||||
import SolidRoundedButtonNode
|
||||
import TooltipUI
|
||||
import ChatTextInputMediaRecordingButton
|
||||
|
||||
private let accessoryButtonFont = Font.medium(14.0)
|
||||
private let counterFont = Font.with(size: 14.0, design: .regular, traits: [.monospacedNumbers])
|
||||
|
@ -260,33 +260,47 @@ public func fetchVideoLibraryMediaResource(account: Account, resource: VideoLibr
|
||||
}
|
||||
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
|
||||
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in
|
||||
var value = stat()
|
||||
/*var value = stat()
|
||||
if stat(path, &value) == 0 {
|
||||
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
|
||||
var range: Range<Int64>?
|
||||
let _ = updatedSize.modify { updatedSize in
|
||||
range = updatedSize ..< value.st_size
|
||||
return value.st_size
|
||||
}
|
||||
//print("size = \(Int(value.st_size)), range: \(range!)")
|
||||
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
|
||||
}
|
||||
}
|
||||
}), entityRenderer: entityRenderer)!
|
||||
let signalDisposable = signal.start(next: { next in
|
||||
if let result = next as? TGMediaVideoConversionResult {
|
||||
var value = stat()
|
||||
if stat(result.fileURL.path, &value) == 0 {
|
||||
if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) {
|
||||
let remuxedTempFile = TempBox.shared.tempFile(fileName: "video.mp4")
|
||||
if FFMpegRemuxer.remux(path, to: remuxedTempFile.path) {
|
||||
TempBox.shared.dispose(tempFile)
|
||||
subscriber.putNext(.moveTempFile(file: remuxedTempFile))
|
||||
} else {
|
||||
TempBox.shared.dispose(remuxedTempFile)
|
||||
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
|
||||
var range: Range<Int64>?
|
||||
let _ = updatedSize.modify { updatedSize in
|
||||
range = updatedSize ..< value.st_size
|
||||
return value.st_size
|
||||
}
|
||||
//print("finish size = \(Int(value.st_size)), range: \(range!)")
|
||||
//print("size = \(Int(value.st_size)), range: \(range!)")
|
||||
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
|
||||
subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024))
|
||||
subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true))
|
||||
}
|
||||
}
|
||||
}*/
|
||||
}), entityRenderer: entityRenderer)!
|
||||
let signalDisposable = signal.start(next: { next in
|
||||
if let result = next as? TGMediaVideoConversionResult {
|
||||
var value = stat()
|
||||
if stat(result.fileURL.path, &value) == 0 {
|
||||
let remuxedTempFile = TempBox.shared.tempFile(fileName: "video.mp4")
|
||||
if let size = fileSize(result.fileURL.path), size <= 32 * 1024 * 1024, FFMpegRemuxer.remux(result.fileURL.path, to: remuxedTempFile.path) {
|
||||
TempBox.shared.dispose(tempFile)
|
||||
subscriber.putNext(.moveTempFile(file: remuxedTempFile))
|
||||
} else {
|
||||
TempBox.shared.dispose(remuxedTempFile)
|
||||
if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) {
|
||||
var range: Range<Int64>?
|
||||
let _ = updatedSize.modify { updatedSize in
|
||||
range = updatedSize ..< value.st_size
|
||||
return value.st_size
|
||||
}
|
||||
//print("finish size = \(Int(value.st_size)), range: \(range!)")
|
||||
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
|
||||
subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024))
|
||||
subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
subscriber.putError(.generic)
|
||||
@ -482,6 +496,11 @@ public func fetchVideoLibraryMediaResourceHash(resource: VideoLibraryMediaResour
|
||||
if isPassthrough {
|
||||
updatedData.reverse()
|
||||
}
|
||||
#if DEBUG
|
||||
if "".isEmpty {
|
||||
subscriber.putNext(nil)
|
||||
}
|
||||
#endif
|
||||
subscriber.putNext(updatedData)
|
||||
} else {
|
||||
subscriber.putNext(nil)
|
||||
|
@ -13,6 +13,7 @@ import SettingsUI
|
||||
import ChatPresentationInterfaceState
|
||||
import AttachmentUI
|
||||
import ForumCreateTopicScreen
|
||||
import LegacyInstantVideoController
|
||||
|
||||
public func navigateToChatControllerImpl(_ params: NavigateToChatControllerParams) {
|
||||
if case let .peer(peer) = params.chatLocation, case let .channel(channel) = peer, channel.flags.contains(.isForum) {
|
||||
|
@ -61,7 +61,7 @@
|
||||
|
||||
_dimensions = CGSizeMake(width, height);
|
||||
|
||||
if ((_frameRate > 60) || _animation->duration() > 9.0) {
|
||||
if ((_frameRate > 360) || _animation->duration() > 9.0) {
|
||||
return nil;
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user