[WIP] Stories

This commit is contained in:
Ali 2023-04-28 20:20:46 +04:00
parent cb1f40de1a
commit 7dd76ef329
27 changed files with 1062 additions and 190 deletions

View File

@ -7,6 +7,17 @@ import TelegramPresentationData
import AccountContext
import ComponentDisplayAdapters
private func resolveTheme(baseTheme: PresentationTheme, theme: ViewControllerComponentContainer.Theme) -> PresentationTheme {
switch theme {
case .default:
return baseTheme
case let .custom(value):
return value
case .dark:
return customizeDefaultDarkPresentationTheme(theme: defaultDarkPresentationTheme, editing: false, title: nil, accentColor: baseTheme.list.itemAccentColor, backgroundColors: [], bubbleColors: [], animateBubbleColors: false, wallpaper: nil, baseColor: nil)
}
}
open class ViewControllerComponentContainer: ViewController {
public enum NavigationBarAppearance {
case none
@ -25,6 +36,12 @@ open class ViewControllerComponentContainer: ViewController {
case modal
}
public enum Theme {
case `default`
case dark
case custom(PresentationTheme)
}
public final class Environment: Equatable {
public let statusBarHeight: CGFloat
public let navigationHeight: CGFloat
@ -121,19 +138,21 @@ open class ViewControllerComponentContainer: ViewController {
private weak var controller: ViewControllerComponentContainer?
private var component: AnyComponent<ViewControllerComponentContainer.Environment>
var theme: PresentationTheme?
let theme: Theme
var resolvedTheme: PresentationTheme
public let hostView: ComponentHostView<ViewControllerComponentContainer.Environment>
private var currentIsVisible: Bool = false
private var currentLayout: (layout: ContainerViewLayout, navigationHeight: CGFloat)?
init(context: AccountContext, controller: ViewControllerComponentContainer, component: AnyComponent<ViewControllerComponentContainer.Environment>, theme: PresentationTheme?) {
init(context: AccountContext, controller: ViewControllerComponentContainer, component: AnyComponent<ViewControllerComponentContainer.Environment>, theme: Theme) {
self.presentationData = context.sharedContext.currentPresentationData.with { $0 }
self.controller = controller
self.component = component
self.theme = theme
self.resolvedTheme = resolveTheme(baseTheme: self.presentationData.theme, theme: theme)
self.hostView = ComponentHostView()
super.init()
@ -152,7 +171,7 @@ open class ViewControllerComponentContainer: ViewController {
metrics: layout.metrics,
deviceMetrics: layout.deviceMetrics,
isVisible: self.currentIsVisible,
theme: self.theme ?? self.presentationData.theme,
theme: self.resolvedTheme,
strings: self.presentationData.strings,
dateTimeFormat: self.presentationData.dateTimeFormat,
controller: { [weak self] in
@ -197,13 +216,13 @@ open class ViewControllerComponentContainer: ViewController {
}
private let context: AccountContext
private var theme: PresentationTheme?
private var theme: Theme
private let component: AnyComponent<ViewControllerComponentContainer.Environment>
private var presentationDataDisposable: Disposable?
public private(set) var validLayout: ContainerViewLayout?
public init<C: Component>(context: AccountContext, component: C, navigationBarAppearance: NavigationBarAppearance, statusBarStyle: StatusBarStyle = .default, presentationMode: PresentationMode = .default, theme: PresentationTheme? = nil) where C.EnvironmentType == ViewControllerComponentContainer.Environment {
public init<C: Component>(context: AccountContext, component: C, navigationBarAppearance: NavigationBarAppearance, statusBarStyle: StatusBarStyle = .default, presentationMode: PresentationMode = .default, theme: Theme = .default) where C.EnvironmentType == ViewControllerComponentContainer.Environment {
self.context = context
self.component = AnyComponent(component)
self.theme = theme
@ -230,6 +249,7 @@ open class ViewControllerComponentContainer: ViewController {
}
strongSelf.node.presentationData = presentationData.withUpdated(theme: theme)
strongSelf.node.resolvedTheme = resolveTheme(baseTheme: presentationData.theme, theme: strongSelf.theme)
switch statusBarStyle {
case .none:

View File

@ -2,6 +2,7 @@
#import <CommonCrypto/CommonDigest.h>
#import <sys/stat.h>
#import <VideoToolbox/VideoToolbox.h>
#import "GPUImageContext.h"
@ -1319,13 +1320,7 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
AVVideoPixelAspectRatioVerticalSpacingKey: @3
};
NSDictionary *codecSettings = @
{
AVVideoAverageBitRateKey: @([self _videoBitrateKbpsForPreset:preset] * 1000),
AVVideoCleanApertureKey: videoCleanApertureSettings,
AVVideoPixelAspectRatioKey: videoAspectRatioSettings,
AVVideoExpectedSourceFrameRateKey: @(frameRate)
};
NSInteger videoBitrate = [self _videoBitrateKbpsForPreset:preset] * 1000;
NSDictionary *hdVideoProperties = @
{
@ -1334,23 +1329,59 @@ static CGFloat progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer,
AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_709_2,
};
#if TARGET_IPHONE_SIMULATOR
return @
{
AVVideoCodecKey: AVVideoCodecH264,
AVVideoCompressionPropertiesKey: codecSettings,
AVVideoWidthKey: @((NSInteger)dimensions.width),
AVVideoHeightKey: @((NSInteger)dimensions.height)
};
bool useH265 = false;
#if DEBUG
//videoBitrate = 800 * 1000;
useH265 = false;
#endif
return @
{
AVVideoCodecKey: AVVideoCodecH264,
AVVideoCompressionPropertiesKey: codecSettings,
AVVideoWidthKey: @((NSInteger)dimensions.width),
AVVideoHeightKey: @((NSInteger)dimensions.height),
AVVideoColorPropertiesKey: hdVideoProperties
};
if (useH265) {
NSDictionary *codecSettings = @
{
AVVideoAverageBitRateKey: @(videoBitrate),
AVVideoCleanApertureKey: videoCleanApertureSettings,
AVVideoPixelAspectRatioKey: videoAspectRatioSettings,
AVVideoExpectedSourceFrameRateKey: @(frameRate),
AVVideoProfileLevelKey: (__bridge NSString *)kVTProfileLevel_HEVC_Main_AutoLevel
};
return @
{
AVVideoCodecKey: AVVideoCodecTypeHEVC,
AVVideoCompressionPropertiesKey: codecSettings,
AVVideoWidthKey: @((NSInteger)dimensions.width),
AVVideoHeightKey: @((NSInteger)dimensions.height),
AVVideoColorPropertiesKey: hdVideoProperties
};
} else {
NSDictionary *codecSettings = @
{
AVVideoAverageBitRateKey: @(videoBitrate),
AVVideoCleanApertureKey: videoCleanApertureSettings,
AVVideoPixelAspectRatioKey: videoAspectRatioSettings,
AVVideoExpectedSourceFrameRateKey: @(frameRate),
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
};
#if TARGET_IPHONE_SIMULATOR
return @
{
AVVideoCodecKey: AVVideoCodecTypeH264,
AVVideoCompressionPropertiesKey: codecSettings,
AVVideoWidthKey: @((NSInteger)dimensions.width),
AVVideoHeightKey: @((NSInteger)dimensions.height)
};
#endif
return @
{
AVVideoCodecKey: AVVideoCodecTypeH264,
AVVideoCompressionPropertiesKey: codecSettings,
AVVideoWidthKey: @((NSInteger)dimensions.width),
AVVideoHeightKey: @((NSInteger)dimensions.height),
AVVideoColorPropertiesKey: hdVideoProperties
};
}
}
+ (NSInteger)_videoBitrateKbpsForPreset:(TGMediaVideoConversionPreset)preset

View File

@ -448,7 +448,7 @@ public final class CreateExternalMediaStreamScreen: ViewControllerComponentConta
self.peerId = peerId
self.mode = mode
super.init(context: context, component: CreateExternalMediaStreamScreenComponent(context: context, peerId: peerId, mode: mode, credentialsPromise: credentialsPromise), navigationBarAppearance: .transparent, theme: defaultDarkPresentationTheme)
super.init(context: context, component: CreateExternalMediaStreamScreenComponent(context: context, peerId: peerId, mode: mode, credentialsPromise: credentialsPromise), navigationBarAppearance: .transparent, theme: .dark)
self.navigationPresentation = .modal

View File

@ -94,7 +94,7 @@ private final class MultiplexedRequestManagerContext {
private var nextId: Int32 = 0
private var targetContexts: [MultiplexedRequestTargetKey: [RequestTargetContext]] = [:]
private var emptyTargetTimers: [MultiplexedRequestTargetTimerKey: SignalKitTimer] = [:]
private var emptyTargetDisposables: [MultiplexedRequestTargetTimerKey: Disposable] = [:]
init(queue: Queue, takeWorker: @escaping (MultiplexedRequestTarget, MediaResourceFetchTag?, Bool) -> Download?) {
self.queue = queue
@ -109,8 +109,8 @@ private final class MultiplexedRequestManagerContext {
}
}
}
for timer in emptyTargetTimers.values {
timer.invalidate()
for disposable in emptyTargetDisposables.values {
disposable.dispose()
}
}
@ -243,12 +243,17 @@ private final class MultiplexedRequestManagerContext {
for context in contexts {
let key = MultiplexedRequestTargetTimerKey(key: targetKey, id: context.id)
if context.requests.isEmpty {
if self.emptyTargetTimers[key] == nil {
let timer = SignalKitTimer(timeout: 2.0, repeat: false, completion: { [weak self] in
if self.emptyTargetDisposables[key] == nil {
let disposable = MetaDisposable()
self.emptyTargetDisposables[key] = disposable
disposable.set((Signal<Never, NoError>.complete()
|> delay(20 * 60, queue: self.queue)
|> deliverOn(self.queue)).start(completed: { [weak self] in
guard let strongSelf = self else {
return
}
strongSelf.emptyTargetTimers.removeValue(forKey: key)
strongSelf.emptyTargetDisposables.removeValue(forKey: key)
if strongSelf.targetContexts[targetKey] != nil {
for i in 0 ..< strongSelf.targetContexts[targetKey]!.count {
if strongSelf.targetContexts[targetKey]![i].id == key.id {
@ -257,14 +262,12 @@ private final class MultiplexedRequestManagerContext {
}
}
}
}, queue: self.queue)
self.emptyTargetTimers[key] = timer
timer.start()
}))
}
} else {
if let timer = self.emptyTargetTimers[key] {
timer.invalidate()
self.emptyTargetTimers.removeValue(forKey: key)
if let disposable = self.emptyTargetDisposables[key] {
disposable.dispose()
self.emptyTargetDisposables.removeValue(forKey: key)
}
}
}

View File

@ -287,6 +287,12 @@ private func maybePredownloadedFileResource(postbox: Postbox, auxiliaryMethods:
return .single(.none)
}
#if DEBUG
if "".isEmpty {
return .single(.none)
}
#endif
return auxiliaryMethods.fetchResourceMediaReferenceHash(resource)
|> mapToSignal { hash -> Signal<PredownloadedResource, NoError> in
if let hash = hash {

View File

@ -363,6 +363,9 @@ swift_library(
"//submodules/TelegramUI/Components/ChatScheduleTimeController",
"//submodules/ICloudResources",
"//submodules/TelegramUI/Components/LegacyCamera",
"//submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton",
"//submodules/TelegramUI/Components/ChatSendButtonRadialStatusNode",
"//submodules/TelegramUI/Components/LegacyInstantVideoController",
] + select({
"@build_bazel_rules_apple//apple:ios_armv7": [],
"@build_bazel_rules_apple//apple:ios_arm64": appcenter_targets,

View File

@ -0,0 +1,22 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "ChatSendButtonRadialStatusNode",
module_name = "ChatSendButtonRadialStatusNode",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/AsyncDisplayKit",
"//submodules/Display",
"//submodules/SSignalKit/SwiftSignalKit",
"//submodules/LegacyComponents",
"//submodules/ChatPresentationInterfaceState",
],
visibility = [
"//visibility:public",
],
)

View File

@ -16,7 +16,7 @@ private final class ChatSendButtonRadialStatusNodeParameters: NSObject {
}
}
final class ChatSendButtonRadialStatusNode: ASDisplayNode {
public final class ChatSendButtonRadialStatusNode: ASDisplayNode {
private let color: UIColor
private var effectiveProgress: CGFloat = 0.0 {
@ -25,7 +25,7 @@ final class ChatSendButtonRadialStatusNode: ASDisplayNode {
}
}
var slowmodeState: ChatSlowmodeState? = nil {
public var slowmodeState: ChatSlowmodeState? = nil {
didSet {
if self.slowmodeState != oldValue {
self.updateProgress()
@ -35,7 +35,7 @@ final class ChatSendButtonRadialStatusNode: ASDisplayNode {
private var updateTimer: SwiftSignalKit.Timer?
init(color: UIColor) {
public init(color: UIColor) {
self.color = color
super.init()
@ -48,11 +48,11 @@ final class ChatSendButtonRadialStatusNode: ASDisplayNode {
self.updateTimer?.invalidate()
}
override func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
return ChatSendButtonRadialStatusNodeParameters(color: self.color, progress: self.effectiveProgress)
}
@objc override class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
let context = UIGraphicsGetCurrentContext()!
if !isRasterizing {
@ -107,7 +107,7 @@ final class ChatSendButtonRadialStatusNode: ASDisplayNode {
}
}
final class ChatSendButtonRadialStatusView: UIView {
public final class ChatSendButtonRadialStatusView: UIView {
private let color: UIColor
private var effectiveProgress: CGFloat = 0.0 {
@ -116,7 +116,7 @@ final class ChatSendButtonRadialStatusView: UIView {
}
}
var slowmodeState: ChatSlowmodeState? = nil {
public var slowmodeState: ChatSlowmodeState? = nil {
didSet {
if self.slowmodeState != oldValue {
self.updateProgress()
@ -126,7 +126,7 @@ final class ChatSendButtonRadialStatusView: UIView {
private var updateTimer: SwiftSignalKit.Timer?
init(color: UIColor) {
public init(color: UIColor) {
self.color = color
super.init(frame: CGRect())
@ -135,7 +135,7 @@ final class ChatSendButtonRadialStatusView: UIView {
self.isOpaque = false
}
required init?(coder aDecoder: NSCoder) {
required public init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
@ -143,7 +143,7 @@ final class ChatSendButtonRadialStatusView: UIView {
self.updateTimer?.invalidate()
}
override func draw(_ rect: CGRect) {
override public func draw(_ rect: CGRect) {
if rect.isEmpty {
return
}

View File

@ -0,0 +1,31 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "ChatTextInputMediaRecordingButton",
module_name = "ChatTextInputMediaRecordingButton",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/Display",
"//submodules/AsyncDisplayKit",
"//submodules/TelegramCore",
"//submodules/SSignalKit/SwiftSignalKit",
"//submodules/TelegramPresentationData",
"//submodules/LegacyComponents",
"//submodules/AccountContext",
"//submodules/ChatInterfaceState",
"//submodules/AudioBlob",
"//submodules/ChatPresentationInterfaceState",
"//submodules/ComponentFlow",
"//submodules/Components/LottieAnimationComponent",
"//submodules/TelegramUI/Components/LottieComponent",
"//submodules/TelegramUI/Components/LegacyInstantVideoController",
],
visibility = [
"//visibility:public",
],
)

View File

@ -13,7 +13,7 @@ import ChatPresentationInterfaceState
import ComponentFlow
import LottieAnimationComponent
import LottieComponent
import AccountContext
import LegacyInstantVideoController
private let offsetThreshold: CGFloat = 10.0
private let dismissOffsetThreshold: CGFloat = 70.0
@ -175,22 +175,22 @@ private final class ChatTextInputMediaRecordingButtonPresenter : NSObject, TGMod
}
}
final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButton, TGModernConversationInputMicButtonDelegate {
public final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButton, TGModernConversationInputMicButtonDelegate {
private let context: AccountContext
private var theme: PresentationTheme
private let strings: PresentationStrings
var mode: ChatTextInputMediaRecordingButtonMode = .audio
var statusBarHost: StatusBarHost?
let presentController: (ViewController) -> Void
var recordingDisabled: () -> Void = { }
var beginRecording: () -> Void = { }
var endRecording: (Bool) -> Void = { _ in }
var stopRecording: () -> Void = { }
var offsetRecordingControls: () -> Void = { }
var switchMode: () -> Void = { }
var updateLocked: (Bool) -> Void = { _ in }
var updateCancelTranslation: () -> Void = { }
public var mode: ChatTextInputMediaRecordingButtonMode = .audio
public var statusBarHost: StatusBarHost?
public let presentController: (ViewController) -> Void
public var recordingDisabled: () -> Void = { }
public var beginRecording: () -> Void = { }
public var endRecording: (Bool) -> Void = { _ in }
public var stopRecording: () -> Void = { }
public var offsetRecordingControls: () -> Void = { }
public var switchMode: () -> Void = { }
public var updateLocked: (Bool) -> Void = { _ in }
public var updateCancelTranslation: () -> Void = { }
private var modeTimeoutTimer: SwiftSignalKit.Timer?
@ -199,13 +199,13 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
private var recordingOverlay: ChatTextInputAudioRecordingOverlay?
private var startTouchLocation: CGPoint?
fileprivate var controlsOffset: CGFloat = 0.0
private(set) var cancelTranslation: CGFloat = 0.0
public private(set) var cancelTranslation: CGFloat = 0.0
private var micLevelDisposable: MetaDisposable?
private weak var currentPresenter: UIView?
var contentContainer: (UIView, CGRect)? {
public var contentContainer: (UIView, CGRect)? {
if let _ = self.currentPresenter {
return (self.micDecoration, self.micDecoration.bounds)
} else {
@ -213,7 +213,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
}
}
var audioRecorder: ManagedAudioRecorder? {
public var audioRecorder: ManagedAudioRecorder? {
didSet {
if self.audioRecorder !== oldValue {
if self.micLevelDisposable == nil {
@ -235,7 +235,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
}
}
var videoRecordingStatus: InstantVideoControllerRecordingStatus? {
public var videoRecordingStatus: InstantVideoControllerRecordingStatus? {
didSet {
if self.videoRecordingStatus !== oldValue {
if self.micLevelDisposable == nil {
@ -300,7 +300,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
}
}
init(context: AccountContext, theme: PresentationTheme, strings: PresentationStrings, presentController: @escaping (ViewController) -> Void) {
public init(context: AccountContext, theme: PresentationTheme, strings: PresentationStrings, presentController: @escaping (ViewController) -> Void) {
self.context = context
self.theme = theme
self.strings = strings
@ -323,7 +323,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
self.centerOffset = CGPoint(x: 0.0, y: -1.0 + UIScreenPixel)
}
required init?(coder aDecoder: NSCoder) {
required public init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
@ -336,7 +336,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
}
}
func updateMode(mode: ChatTextInputMediaRecordingButtonMode, animated: Bool) {
public func updateMode(mode: ChatTextInputMediaRecordingButtonMode, animated: Bool) {
self.updateMode(mode: mode, animated: animated, force: false)
}
@ -401,7 +401,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
}
}
func updateTheme(theme: PresentationTheme) {
public func updateTheme(theme: PresentationTheme) {
self.theme = theme
self.updateAnimation(previousMode: self.mode)
@ -411,12 +411,12 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
(self.micLockValue as? LockView)?.updateTheme(theme)
}
func cancelRecording() {
public func cancelRecording() {
self.isEnabled = false
self.isEnabled = true
}
func micButtonInteractionBegan() {
public func micButtonInteractionBegan() {
if self.fadeDisabled {
self.recordingDisabled()
} else {
@ -433,13 +433,13 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
}
}
func micButtonInteractionCancelled(_ velocity: CGPoint) {
public func micButtonInteractionCancelled(_ velocity: CGPoint) {
//print("\(CFAbsoluteTimeGetCurrent()) cancelled")
self.modeTimeoutTimer?.invalidate()
self.endRecording(false)
}
func micButtonInteractionCompleted(_ velocity: CGPoint) {
public func micButtonInteractionCompleted(_ velocity: CGPoint) {
//print("\(CFAbsoluteTimeGetCurrent()) completed")
if let modeTimeoutTimer = self.modeTimeoutTimer {
//print("\(CFAbsoluteTimeGetCurrent()) switch")
@ -450,43 +450,43 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
self.endRecording(true)
}
func micButtonInteractionUpdate(_ offset: CGPoint) {
public func micButtonInteractionUpdate(_ offset: CGPoint) {
self.controlsOffset = offset.x
self.offsetRecordingControls()
}
func micButtonInteractionUpdateCancelTranslation(_ translation: CGFloat) {
public func micButtonInteractionUpdateCancelTranslation(_ translation: CGFloat) {
self.cancelTranslation = translation
self.updateCancelTranslation()
}
func micButtonInteractionLocked() {
public func micButtonInteractionLocked() {
self.updateLocked(true)
}
func micButtonInteractionRequestedLockedAction() {
public func micButtonInteractionRequestedLockedAction() {
}
func micButtonInteractionStopped() {
public func micButtonInteractionStopped() {
self.stopRecording()
}
func micButtonShouldLock() -> Bool {
public func micButtonShouldLock() -> Bool {
return true
}
func micButtonPresenter() -> TGModernConversationInputMicButtonPresentation! {
public func micButtonPresenter() -> TGModernConversationInputMicButtonPresentation! {
let presenter = ChatTextInputMediaRecordingButtonPresenter(statusBarHost: self.statusBarHost, presentController: self.presentController)
presenter.button = self
self.currentPresenter = presenter.view()
return presenter
}
func micButtonDecoration() -> (UIView & TGModernConversationInputMicButtonDecoration)! {
public func micButtonDecoration() -> (UIView & TGModernConversationInputMicButtonDecoration)! {
return micDecoration
}
func micButtonLock() -> (UIView & TGModernConversationInputMicButtonLock)! {
public func micButtonLock() -> (UIView & TGModernConversationInputMicButtonLock)! {
return micLock
}
@ -494,7 +494,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
micButtonInteractionStopped()
}
override func animateIn() {
override public func animateIn() {
super.animateIn()
if self.context.sharedContext.energyUsageSettings.fullTranslucency {
@ -509,7 +509,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
}
}
override func animateOut(_ toSmallSize: Bool) {
override public func animateOut(_ toSmallSize: Bool) {
super.animateOut(toSmallSize)
micDecoration.stopAnimating()
@ -527,7 +527,7 @@ final class ChatTextInputMediaRecordingButton: TGModernConversationInputMicButto
}
private var previousSize = CGSize()
func layoutItems() {
public func layoutItems() {
let size = self.bounds.size
if size != self.previousSize {
self.previousSize = size

View File

@ -0,0 +1,33 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
swift_library(
name = "LegacyInstantVideoController",
module_name = "LegacyInstantVideoController",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
deps = [
"//submodules/AsyncDisplayKit",
"//submodules/Display",
"//submodules/TelegramCore",
"//submodules/Postbox",
"//submodules/SSignalKit/SwiftSignalKit",
"//submodules/TelegramPresentationData",
"//submodules/MediaResources",
"//submodules/LegacyComponents",
"//submodules/AccountContext",
"//submodules/LegacyUI",
"//submodules/ImageCompression",
"//submodules/LocalMediaResources",
"//submodules/AppBundle",
"//submodules/LegacyMediaPickerUI",
"//submodules/ChatPresentationInterfaceState",
"//submodules/TelegramUI/Components/ChatSendButtonRadialStatusNode",
],
visibility = [
"//visibility:public",
],
)

View File

@ -15,21 +15,22 @@ import LocalMediaResources
import AppBundle
import LegacyMediaPickerUI
import ChatPresentationInterfaceState
import ChatSendButtonRadialStatusNode
final class InstantVideoController: LegacyController, StandalonePresentableController {
public final class InstantVideoController: LegacyController, StandalonePresentableController {
private var captureController: TGVideoMessageCaptureController?
var onDismiss: ((Bool) -> Void)?
var onStop: (() -> Void)?
public var onDismiss: ((Bool) -> Void)?
public var onStop: (() -> Void)?
private let micLevelValue = ValuePromise<Float>(0.0)
private let durationValue = ValuePromise<TimeInterval>(0.0)
let audioStatus: InstantVideoControllerRecordingStatus
public let audioStatus: InstantVideoControllerRecordingStatus
private var completed = false
private var dismissed = false
override init(presentation: LegacyControllerPresentation, theme: PresentationTheme?, strings: PresentationStrings? = nil, initialLayout: ContainerViewLayout? = nil) {
override public init(presentation: LegacyControllerPresentation, theme: PresentationTheme?, strings: PresentationStrings? = nil, initialLayout: ContainerViewLayout? = nil) {
self.audioStatus = InstantVideoControllerRecordingStatus(micLevel: self.micLevelValue.get(), duration: self.durationValue.get())
super.init(presentation: presentation, theme: theme, initialLayout: initialLayout)
@ -41,7 +42,7 @@ final class InstantVideoController: LegacyController, StandalonePresentableContr
fatalError("init(coder:) has not been implemented")
}
func bindCaptureController(_ captureController: TGVideoMessageCaptureController?) {
public func bindCaptureController(_ captureController: TGVideoMessageCaptureController?) {
self.captureController = captureController
if let captureController = captureController {
captureController.view.disablesInteractiveKeyboardGestureRecognizer = true
@ -66,61 +67,61 @@ final class InstantVideoController: LegacyController, StandalonePresentableContr
}
}
func dismissVideo() {
public func dismissVideo() {
if let captureController = self.captureController, !self.dismissed {
self.dismissed = true
captureController.dismiss(true)
}
}
func extractVideoSnapshot() -> UIView? {
public func extractVideoSnapshot() -> UIView? {
self.captureController?.extractVideoContent()
}
func hideVideoSnapshot() {
public func hideVideoSnapshot() {
self.captureController?.hideVideoContent()
}
func completeVideo() {
public func completeVideo() {
if let captureController = self.captureController, !self.completed {
self.completed = true
captureController.complete()
}
}
func dismissAnimated() {
public func dismissAnimated() {
if let captureController = self.captureController, !self.dismissed {
self.dismissed = true
captureController.dismiss(false)
}
}
func stopVideo() -> Bool {
public func stopVideo() -> Bool {
if let captureController = self.captureController {
return captureController.stop()
}
return false
}
func lockVideo() {
public func lockVideo() {
if let captureController = self.captureController {
return captureController.setLocked()
}
}
func updateRecordButtonInteraction(_ value: CGFloat) {
public func updateRecordButtonInteraction(_ value: CGFloat) {
if let captureController = self.captureController {
captureController.buttonInteractionUpdate(CGPoint(x: value, y: 0.0))
}
}
}
func legacyInputMicPalette(from theme: PresentationTheme) -> TGModernConversationInputMicPallete {
public func legacyInputMicPalette(from theme: PresentationTheme) -> TGModernConversationInputMicPallete {
let inputPanelTheme = theme.chat.inputPanel
return TGModernConversationInputMicPallete(dark: theme.overallDarkAppearance, buttonColor: inputPanelTheme.actionControlFillColor, iconColor: inputPanelTheme.actionControlForegroundColor, backgroundColor: theme.rootController.navigationBar.opaqueBackgroundColor, borderColor: inputPanelTheme.panelSeparatorColor, lock: inputPanelTheme.panelControlAccentColor, textColor: inputPanelTheme.primaryTextColor, secondaryTextColor: inputPanelTheme.secondaryTextColor, recording: inputPanelTheme.mediaRecordingDotColor)
}
func legacyInstantVideoController(theme: PresentationTheme, panelFrame: CGRect, context: AccountContext, peerId: PeerId, slowmodeState: ChatSlowmodeState?, hasSchedule: Bool, send: @escaping (InstantVideoController, EnqueueMessage?) -> Void, displaySlowmodeTooltip: @escaping (UIView, CGRect) -> Void, presentSchedulePicker: @escaping (@escaping (Int32) -> Void) -> Void) -> InstantVideoController {
public func legacyInstantVideoController(theme: PresentationTheme, panelFrame: CGRect, context: AccountContext, peerId: PeerId, slowmodeState: ChatSlowmodeState?, hasSchedule: Bool, send: @escaping (InstantVideoController, EnqueueMessage?) -> Void, displaySlowmodeTooltip: @escaping (UIView, CGRect) -> Void, presentSchedulePicker: @escaping (@escaping (Int32) -> Void) -> Void) -> InstantVideoController {
let isSecretChat = peerId.namespace == Namespaces.Peer.SecretChat
let legacyController = InstantVideoController(presentation: .custom, theme: theme)

View File

@ -15,6 +15,10 @@ swift_library(
"//submodules/AppBundle",
"//submodules/TelegramUI/Components/TextFieldComponent",
"//submodules/Components/BundleIconComponent",
"//submodules/TelegramUI/Components/ChatTextInputMediaRecordingButton",
"//submodules/AccountContext",
"//submodules/TelegramPresentationData",
"//submodules/SSignalKit/SwiftSignalKit",
],
visibility = [
"//visibility:public",

View File

@ -0,0 +1,204 @@
import Foundation
import UIKit
import Display
import ComponentFlow
import AppBundle
import TextFieldComponent
import BundleIconComponent
import AccountContext
import TelegramPresentationData
import ChatPresentationInterfaceState
import SwiftSignalKit
public final class MediaRecordingPanelComponent: Component {
public let audioRecorder: ManagedAudioRecorder?
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
public let cancelFraction: CGFloat
public init(
audioRecorder: ManagedAudioRecorder?,
videoRecordingStatus: InstantVideoControllerRecordingStatus?,
cancelFraction: CGFloat
) {
self.audioRecorder = audioRecorder
self.videoRecordingStatus = videoRecordingStatus
self.cancelFraction = cancelFraction
}
public static func ==(lhs: MediaRecordingPanelComponent, rhs: MediaRecordingPanelComponent) -> Bool {
if lhs.audioRecorder !== rhs.audioRecorder {
return false
}
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
return false
}
if lhs.cancelFraction != rhs.cancelFraction {
return false
}
return true
}
public final class View: UIView {
private var component: MediaRecordingPanelComponent?
private weak var state: EmptyComponentState?
private let indicatorView: UIImageView
private let cancelIconView: UIImageView
private let cancelText = ComponentView<Empty>()
private let timerText = ComponentView<Empty>()
private var timerTextDisposable: Disposable?
private var timerTextValue: String = "0:00,00"
override init(frame: CGRect) {
self.indicatorView = UIImageView()
self.cancelIconView = UIImageView()
super.init(frame: frame)
self.addSubview(self.indicatorView)
self.addSubview(self.cancelIconView)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
self.timerTextDisposable?.dispose()
}
func update(component: MediaRecordingPanelComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
let previousComponent = self.component
self.component = component
self.state = state
if previousComponent?.audioRecorder !== component.audioRecorder || previousComponent?.videoRecordingStatus !== component.videoRecordingStatus {
self.timerTextDisposable?.dispose()
if let audioRecorder = component.audioRecorder {
var updateNow = false
self.timerTextDisposable = audioRecorder.recordingState.start(next: { [weak self] state in
Queue.mainQueue().async {
guard let self else {
return
}
switch state {
case .paused(let duration), .recording(let duration, _):
let currentAudioDurationSeconds = Int(duration)
let currentAudioDurationMilliseconds = Int(duration * 100.0) % 100
let text: String
if currentAudioDurationSeconds >= 60 * 60 {
text = String(format: "%d:%02d:%02d,%02d", currentAudioDurationSeconds / 3600, currentAudioDurationSeconds / 60 % 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
} else {
text = String(format: "%d:%02d,%02d", currentAudioDurationSeconds / 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
}
if self.timerTextValue != text {
self.timerTextValue = text
}
if updateNow {
self.state?.updated(transition: .immediate)
}
case .stopped:
break
}
}
})
updateNow = true
} else if let videoRecordingStatus = component.videoRecordingStatus {
var updateNow = false
self.timerTextDisposable = videoRecordingStatus.duration.start(next: { [weak self] duration in
Queue.mainQueue().async {
guard let self else {
return
}
let currentAudioDurationSeconds = Int(duration)
let currentAudioDurationMilliseconds = Int(duration * 100.0) % 100
let text: String
if currentAudioDurationSeconds >= 60 * 60 {
text = String(format: "%d:%02d:%02d,%02d", currentAudioDurationSeconds / 3600, currentAudioDurationSeconds / 60 % 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
} else {
text = String(format: "%d:%02d,%02d", currentAudioDurationSeconds / 60, currentAudioDurationSeconds % 60, currentAudioDurationMilliseconds)
}
if self.timerTextValue != text {
self.timerTextValue = text
}
if updateNow {
self.state?.updated(transition: .immediate)
}
}
})
updateNow = true
}
}
if self.indicatorView.image == nil {
self.indicatorView.image = generateFilledCircleImage(diameter: 10.0, color: UIColor(rgb: 0xFF3B30))
}
if let image = self.indicatorView.image {
transition.setFrame(view: self.indicatorView, frame: CGRect(origin: CGPoint(x: 10.0, y: floor((availableSize.height - image.size.height) * 0.5)), size: image.size))
}
let timerTextSize = self.timerText.update(
transition: .immediate,
component: AnyComponent(Text(text: self.timerTextValue, font: Font.regular(15.0), color: .white)),
environment: {},
containerSize: CGSize(width: 100.0, height: 100.0)
)
if let timerTextView = self.timerText.view {
if timerTextView.superview == nil {
self.addSubview(timerTextView)
timerTextView.layer.anchorPoint = CGPoint()
}
let timerTextFrame = CGRect(origin: CGPoint(x: 28.0, y: floor((availableSize.height - timerTextSize.height) * 0.5)), size: timerTextSize)
transition.setPosition(view: timerTextView, position: timerTextFrame.origin)
timerTextView.bounds = CGRect(origin: CGPoint(), size: timerTextFrame.size)
}
if self.cancelIconView.image == nil {
self.cancelIconView.image = UIImage(bundleImageName: "Chat/Input/Text/AudioRecordingCancelArrow")?.withRenderingMode(.alwaysTemplate)
}
self.cancelIconView.tintColor = UIColor(white: 1.0, alpha: 0.3)
let cancelTextSize = self.cancelText.update(
transition: .immediate,
component: AnyComponent(Text(text: "Slide to cancel", font: Font.regular(15.0), color: UIColor(white: 1.0, alpha: 0.3))),
environment: {},
containerSize: CGSize(width: max(30.0, availableSize.width - 100.0), height: 44.0)
)
var textFrame = CGRect(origin: CGPoint(x: floor((availableSize.width - cancelTextSize.width) * 0.5), y: floor((availableSize.height - cancelTextSize.height) * 0.5)), size: cancelTextSize)
let bandingStart: CGFloat = 0.0
let bandedOffset = abs(component.cancelFraction) - bandingStart
let range: CGFloat = 300.0
let coefficient: CGFloat = 0.4
let mappedCancelFraction = bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range
textFrame.origin.x -= mappedCancelFraction * 0.5
if let cancelTextView = self.cancelText.view {
if cancelTextView.superview == nil {
self.addSubview(cancelTextView)
}
transition.setFrame(view: cancelTextView, frame: textFrame)
}
if let image = self.cancelIconView.image {
transition.setFrame(view: self.cancelIconView, frame: CGRect(origin: CGPoint(x: textFrame.minX - 4.0 - image.size.width, y: textFrame.minY + floor((textFrame.height - image.size.height) * 0.5)), size: image.size))
}
return availableSize
}
}
public func makeView() -> View {
return View(frame: CGRect())
}
public func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, state: state, environment: environment, transition: transition)
}
}

View File

@ -3,6 +3,10 @@ import UIKit
import Display
import ComponentFlow
import AppBundle
import ChatTextInputMediaRecordingButton
import AccountContext
import TelegramPresentationData
import ChatPresentationInterfaceState
public final class MessageInputActionButtonComponent: Component {
public enum Mode {
@ -10,45 +14,83 @@ public final class MessageInputActionButtonComponent: Component {
case voiceInput
case videoInput
}
public enum Action {
case down
case up
}
public let mode: Mode
public let action: () -> Void
public let action: (Mode, Action, Bool) -> Void
public let switchMediaInputMode: () -> Void
public let updateMediaCancelFraction: (CGFloat) -> Void
public let context: AccountContext
public let theme: PresentationTheme
public let strings: PresentationStrings
public let presentController: (ViewController) -> Void
public let audioRecorder: ManagedAudioRecorder?
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
public init(
mode: Mode,
action: @escaping () -> Void
action: @escaping (Mode, Action, Bool) -> Void,
switchMediaInputMode: @escaping () -> Void,
updateMediaCancelFraction: @escaping (CGFloat) -> Void,
context: AccountContext,
theme: PresentationTheme,
strings: PresentationStrings,
presentController: @escaping (ViewController) -> Void,
audioRecorder: ManagedAudioRecorder?,
videoRecordingStatus: InstantVideoControllerRecordingStatus?
) {
self.mode = mode
self.action = action
self.switchMediaInputMode = switchMediaInputMode
self.updateMediaCancelFraction = updateMediaCancelFraction
self.context = context
self.theme = theme
self.strings = strings
self.presentController = presentController
self.audioRecorder = audioRecorder
self.videoRecordingStatus = videoRecordingStatus
}
public static func ==(lhs: MessageInputActionButtonComponent, rhs: MessageInputActionButtonComponent) -> Bool {
if lhs.mode != rhs.mode {
return false
}
if lhs.context !== rhs.context {
return false
}
if lhs.theme !== rhs.theme {
return false
}
if lhs.strings !== rhs.strings {
return false
}
if lhs.audioRecorder !== rhs.audioRecorder {
return false
}
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
return false
}
return true
}
public final class View: HighlightTrackingButton {
private let microphoneIconView: UIImageView
private let cameraIconView: UIImageView
private var micButton: ChatTextInputMediaRecordingButton?
private let sendIconView: UIImageView
private var component: MessageInputActionButtonComponent?
private weak var componentState: EmptyComponentState?
override init(frame: CGRect) {
self.microphoneIconView = UIImageView()
self.cameraIconView = UIImageView()
self.sendIconView = UIImageView()
super.init(frame: frame)
self.isMultipleTouchEnabled = false
self.addSubview(self.microphoneIconView)
self.addSubview(self.cameraIconView)
self.addSubview(self.sendIconView)
self.highligthedChanged = { [weak self] highlighted in
@ -62,6 +104,7 @@ public final class MessageInputActionButtonComponent: Component {
transition.setSublayerTransform(view: self, transform: CATransform3DMakeScale(scale, scale, 1.0))
}
self.addTarget(self, action: #selector(self.touchDown), for: .touchDown)
self.addTarget(self, action: #selector(self.pressed), for: .touchUpInside)
}
@ -69,8 +112,18 @@ public final class MessageInputActionButtonComponent: Component {
fatalError("init(coder:) has not been implemented")
}
@objc private func touchDown() {
guard let component = self.component else {
return
}
component.action(component.mode, .down, false)
}
@objc private func pressed() {
self.component?.action()
guard let component = self.component else {
return
}
component.action(component.mode, .up, false)
}
override public func continueTracking(_ touch: UITouch, with event: UIEvent?) -> Bool {
@ -78,16 +131,57 @@ public final class MessageInputActionButtonComponent: Component {
}
func update(component: MessageInputActionButtonComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
let previousComponent = self.component
self.component = component
self.componentState = state
if self.microphoneIconView.image == nil {
self.microphoneIconView.image = UIImage(bundleImageName: "Chat/Input/Text/IconMicrophone")?.withRenderingMode(.alwaysTemplate)
self.microphoneIconView.tintColor = .white
}
if self.cameraIconView.image == nil {
self.cameraIconView.image = UIImage(bundleImageName: "Chat/Input/Text/IconVideo")?.withRenderingMode(.alwaysTemplate)
self.cameraIconView.tintColor = .white
let themeUpdated = previousComponent?.theme !== component.theme
if self.micButton == nil {
let micButton = ChatTextInputMediaRecordingButton(
context: component.context,
theme: component.theme,
strings: component.strings,
presentController: component.presentController
)
self.micButton = micButton
micButton.statusBarHost = component.context.sharedContext.mainWindow?.statusBarHost
self.addSubview(micButton)
micButton.beginRecording = { [weak self] in
guard let self, let component = self.component else {
return
}
switch component.mode {
case .voiceInput, .videoInput:
component.action(component.mode, .down, false)
default:
break
}
}
micButton.endRecording = { [weak self] sendMedia in
guard let self, let component = self.component else {
return
}
switch component.mode {
case .voiceInput, .videoInput:
component.action(component.mode, .up, sendMedia)
default:
break
}
}
micButton.switchMode = { [weak self] in
guard let self, let component = self.component else {
return
}
component.switchMediaInputMode()
}
micButton.updateCancelTranslation = { [weak self] in
guard let self, let micButton = self.micButton, let component = self.component else {
return
}
component.updateMediaCancelFraction(micButton.cancelTranslation)
}
}
if self.sendIconView.image == nil {
@ -117,40 +211,55 @@ public final class MessageInputActionButtonComponent: Component {
var sendAlpha: CGFloat = 0.0
var microphoneAlpha: CGFloat = 0.0
var cameraAlpha: CGFloat = 0.0
switch component.mode {
case .send:
sendAlpha = 1.0
case .videoInput:
cameraAlpha = 1.0
case .voiceInput:
case .videoInput, .voiceInput:
microphoneAlpha = 1.0
}
transition.setAlpha(view: self.sendIconView, alpha: sendAlpha)
transition.setScale(view: self.sendIconView, scale: sendAlpha == 0.0 ? 0.01 : 1.0)
transition.setAlpha(view: self.cameraIconView, alpha: cameraAlpha)
transition.setScale(view: self.cameraIconView, scale: cameraAlpha == 0.0 ? 0.01 : 1.0)
transition.setAlpha(view: self.microphoneIconView, alpha: microphoneAlpha)
transition.setScale(view: self.microphoneIconView, scale: microphoneAlpha == 0.0 ? 0.01 : 1.0)
if let image = self.sendIconView.image {
let iconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - image.size.width) * 0.5), y: floorToScreenPixels((availableSize.height - image.size.height) * 0.5)), size: image.size)
transition.setPosition(view: self.sendIconView, position: iconFrame.center)
transition.setBounds(view: self.sendIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
}
if let image = self.cameraIconView.image {
let iconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - image.size.width) * 0.5), y: floorToScreenPixels((availableSize.height - image.size.height) * 0.5)), size: image.size)
transition.setPosition(view: self.cameraIconView, position: iconFrame.center)
transition.setBounds(view: self.cameraIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
}
if let image = self.microphoneIconView.image {
let iconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - image.size.width) * 0.5), y: floorToScreenPixels((availableSize.height - image.size.height) * 0.5)), size: image.size)
transition.setPosition(view: self.microphoneIconView, position: iconFrame.center)
transition.setBounds(view: self.microphoneIconView, bounds: CGRect(origin: CGPoint(), size: iconFrame.size))
if let micButton = self.micButton {
if themeUpdated {
micButton.updateTheme(theme: component.theme)
}
let micButtonFrame = CGRect(origin: CGPoint(), size: availableSize)
let shouldLayoutMicButton = micButton.bounds.size != micButtonFrame.size
transition.setPosition(layer: micButton.layer, position: micButtonFrame.center)
transition.setBounds(layer: micButton.layer, bounds: CGRect(origin: CGPoint(), size: micButtonFrame.size))
if shouldLayoutMicButton {
micButton.layoutItems()
}
if previousComponent?.mode != component.mode {
switch component.mode {
case .send, .voiceInput:
micButton.updateMode(mode: .audio, animated: !transition.animation.isImmediate)
case .videoInput:
micButton.updateMode(mode: .video, animated: !transition.animation.isImmediate)
}
}
DispatchQueue.main.async { [weak self, weak micButton] in
guard let self, let component = self.component, let micButton else {
return
}
micButton.audioRecorder = component.audioRecorder
micButton.videoRecordingStatus = component.videoRecordingStatus
}
transition.setAlpha(view: micButton, alpha: microphoneAlpha)
transition.setScale(view: micButton, scale: microphoneAlpha == 0.0 ? 0.01 : 1.0)
}
return availableSize

View File

@ -5,6 +5,9 @@ import ComponentFlow
import AppBundle
import TextFieldComponent
import BundleIconComponent
import AccountContext
import TelegramPresentationData
import ChatPresentationInterfaceState
public final class MessageInputPanelComponent: Component {
public final class ExternalState {
@ -16,23 +19,59 @@ public final class MessageInputPanelComponent: Component {
}
public let externalState: ExternalState
public let context: AccountContext
public let theme: PresentationTheme
public let strings: PresentationStrings
public let presentController: (ViewController) -> Void
public let sendMessageAction: () -> Void
public let setMediaRecordingActive: (Bool, Bool, Bool) -> Void
public let attachmentAction: () -> Void
public let audioRecorder: ManagedAudioRecorder?
public let videoRecordingStatus: InstantVideoControllerRecordingStatus?
public init(
externalState: ExternalState,
context: AccountContext,
theme: PresentationTheme,
strings: PresentationStrings,
presentController: @escaping (ViewController) -> Void,
sendMessageAction: @escaping () -> Void,
attachmentAction: @escaping () -> Void
setMediaRecordingActive: @escaping (Bool, Bool, Bool) -> Void,
attachmentAction: @escaping () -> Void,
audioRecorder: ManagedAudioRecorder?,
videoRecordingStatus: InstantVideoControllerRecordingStatus?
) {
self.externalState = externalState
self.context = context
self.theme = theme
self.strings = strings
self.presentController = presentController
self.sendMessageAction = sendMessageAction
self.setMediaRecordingActive = setMediaRecordingActive
self.attachmentAction = attachmentAction
self.audioRecorder = audioRecorder
self.videoRecordingStatus = videoRecordingStatus
}
public static func ==(lhs: MessageInputPanelComponent, rhs: MessageInputPanelComponent) -> Bool {
if lhs.externalState !== rhs.externalState {
return false
}
if lhs.context !== rhs.context {
return false
}
if lhs.theme !== rhs.theme {
return false
}
if lhs.strings !== rhs.strings {
return false
}
if lhs.audioRecorder !== rhs.audioRecorder {
return false
}
if lhs.videoRecordingStatus !== rhs.videoRecordingStatus {
return false
}
return true
}
@ -50,7 +89,10 @@ public final class MessageInputPanelComponent: Component {
private let inputActionButton = ComponentView<Empty>()
private let stickerIconView: UIImageView
private var mediaRecordingPanel: ComponentView<Empty>?
private var currentMediaInputIsVoice: Bool = true
private var mediaCancelFraction: CGFloat = 0.0
private var component: MessageInputPanelComponent?
private weak var state: EmptyComponentState?
@ -107,6 +149,7 @@ public final class MessageInputPanelComponent: Component {
self.stickerIconView.image = UIImage(bundleImageName: "Chat/Input/Text/AccessoryIconStickers")?.withRenderingMode(.alwaysTemplate)
self.stickerIconView.tintColor = .white
}
transition.setAlpha(view: self.stickerIconView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
let availableTextFieldSize = CGSize(width: availableSize.width - insets.left - insets.right, height: availableSize.height - insets.top - insets.bottom)
@ -123,6 +166,7 @@ public final class MessageInputPanelComponent: Component {
let fieldFrame = CGRect(origin: CGPoint(x: insets.left, y: insets.top), size: CGSize(width: availableSize.width - insets.left - insets.right, height: textFieldSize.height))
transition.setFrame(view: self.fieldBackgroundView, frame: fieldFrame)
transition.setAlpha(view: self.fieldBackgroundView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
let rightFieldInset: CGFloat = 34.0
@ -133,6 +177,7 @@ public final class MessageInputPanelComponent: Component {
self.addSubview(textFieldView)
}
transition.setFrame(view: textFieldView, frame: CGRect(origin: CGPoint(x: fieldFrame.minX, y: fieldFrame.maxY - textFieldSize.height), size: textFieldSize))
transition.setAlpha(view: textFieldView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
}
let attachmentButtonSize = self.attachmentButton.update(
@ -157,26 +202,53 @@ public final class MessageInputPanelComponent: Component {
self.addSubview(attachmentButtonView)
}
transition.setFrame(view: attachmentButtonView, frame: CGRect(origin: CGPoint(x: floor((insets.left - attachmentButtonSize.width) * 0.5), y: size.height - baseHeight + floor((baseHeight - attachmentButtonSize.height) * 0.5)), size: attachmentButtonSize))
transition.setAlpha(view: attachmentButtonView, alpha: (component.audioRecorder != nil || component.videoRecordingStatus != nil) ? 0.0 : 1.0)
}
let inputActionButtonSize = self.inputActionButton.update(
transition: transition,
component: AnyComponent(MessageInputActionButtonComponent(
mode: self.textFieldExternalState.hasText ? .send : (self.currentMediaInputIsVoice ? .voiceInput : .videoInput),
action: { [weak self] in
action: { [weak self] mode, action, sendAction in
guard let self else {
return
}
if case .text("") = self.getSendMessageInput() {
self.currentMediaInputIsVoice = !self.currentMediaInputIsVoice
self.state?.updated(transition: Transition(animation: .curve(duration: 0.3, curve: .spring)))
HapticFeedback().impact()
} else {
self.component?.sendMessageAction()
switch mode {
case .send:
if case .up = action {
if case .text("") = self.getSendMessageInput() {
} else {
self.component?.sendMessageAction()
}
}
case .voiceInput, .videoInput:
self.component?.setMediaRecordingActive(action == .down, mode == .videoInput, sendAction)
}
}
},
switchMediaInputMode: { [weak self] in
guard let self else {
return
}
self.currentMediaInputIsVoice = !self.currentMediaInputIsVoice
self.state?.updated(transition: Transition(animation: .curve(duration: 0.4, curve: .spring)))
},
updateMediaCancelFraction: { [weak self] mediaCancelFraction in
guard let self else {
return
}
if self.mediaCancelFraction != mediaCancelFraction {
self.mediaCancelFraction = mediaCancelFraction
self.state?.updated(transition: .immediate)
}
},
context: component.context,
theme: component.theme,
strings: component.strings,
presentController: component.presentController,
audioRecorder: component.audioRecorder,
videoRecordingStatus: component.videoRecordingStatus
)),
environment: {},
containerSize: CGSize(width: 33.0, height: 33.0)
@ -199,6 +271,50 @@ public final class MessageInputPanelComponent: Component {
component.externalState.isEditing = self.textFieldExternalState.isEditing
component.externalState.hasText = self.textFieldExternalState.hasText
if component.audioRecorder != nil || component.videoRecordingStatus != nil {
let mediaRecordingPanel: ComponentView<Empty>
var mediaRecordingPanelTransition = transition
if let current = self.mediaRecordingPanel {
mediaRecordingPanel = current
} else {
mediaRecordingPanelTransition = .immediate
mediaRecordingPanel = ComponentView()
self.mediaRecordingPanel = mediaRecordingPanel
}
let _ = mediaRecordingPanel.update(
transition: mediaRecordingPanelTransition,
component: AnyComponent(MediaRecordingPanelComponent(
audioRecorder: component.audioRecorder,
videoRecordingStatus: component.videoRecordingStatus,
cancelFraction: self.mediaCancelFraction
)),
environment: {},
containerSize: size
)
if let mediaRecordingPanelView = mediaRecordingPanel.view {
var animateIn = false
if mediaRecordingPanelView.superview == nil {
animateIn = true
self.insertSubview(mediaRecordingPanelView, at: 0)
}
mediaRecordingPanelTransition.setFrame(view: mediaRecordingPanelView, frame: CGRect(origin: CGPoint(), size: size))
if animateIn && !transition.animation.isImmediate {
transition.animateAlpha(view: mediaRecordingPanelView, from: 0.0, to: 1.0)
}
}
} else {
if let mediaRecordingPanel = self.mediaRecordingPanel {
self.mediaRecordingPanel = nil
if let mediaRecordingPanelView = mediaRecordingPanel.view {
transition.setAlpha(view: mediaRecordingPanelView, alpha: 0.0, completion: { [weak mediaRecordingPanelView] _ in
mediaRecordingPanelView?.removeFromSuperview()
})
}
}
}
return size
}
}

View File

@ -40,6 +40,7 @@ swift_library(
"//submodules/ICloudResources",
"//submodules/LegacyComponents",
"//submodules/TelegramUI/Components/LegacyCamera",
"//submodules/TelegramUI/Components/LegacyInstantVideoController",
"//submodules/TelegramUI/Components/Stories/StoryFooterPanelComponent",
"//submodules/TelegramPresentationData",
],

View File

@ -31,6 +31,7 @@ import LegacyComponents
import LegacyCamera
import StoryFooterPanelComponent
import TelegramPresentationData
import LegacyInstantVideoController
private func hasFirstResponder(_ view: UIView) -> Bool {
if view.isFirstResponder {
@ -141,6 +142,16 @@ private final class StoryContainerScreenComponent: Component {
private var currentSlice: StoryContentItemSlice?
private var currentSliceDisposable: Disposable?
private var audioRecorderValue: ManagedAudioRecorder?
private var audioRecorder = Promise<ManagedAudioRecorder?>()
private var audioRecorderDisposable: Disposable?
private var audioRecorderStatusDisposable: Disposable?
private var videoRecorderValue: InstantVideoController?
private var tempVideoRecorderValue: InstantVideoController?
private var videoRecorder = Promise<InstantVideoController?>()
private var videoRecorderDisposable: Disposable?
private var visibleItems: [AnyHashable: VisibleItem] = [:]
private var preloadContexts: [AnyHashable: Disposable] = [:]
@ -190,6 +201,105 @@ private final class StoryContainerScreenComponent: Component {
self.contentContainerView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(self.tapGesture(_:))))
self.contentContainerView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(self.panGesture(_:))))
self.audioRecorderDisposable = (self.audioRecorder.get()
|> deliverOnMainQueue).start(next: { [weak self] audioRecorder in
guard let self else {
return
}
if self.audioRecorderValue !== audioRecorder {
self.audioRecorderValue = audioRecorder
self.environment?.controller()?.lockOrientation = audioRecorder != nil
/*strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
let isLocked = strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId
if let audioRecorder = audioRecorder {
if panelState.mediaRecordingState == nil {
return panelState.withUpdatedMediaRecordingState(.audio(recorder: audioRecorder, isLocked: isLocked))
}
} else {
if case .waitingForPreview = panelState.mediaRecordingState {
return panelState
}
return panelState.withUpdatedMediaRecordingState(nil)
}
return panelState
}
})*/
self.audioRecorderStatusDisposable?.dispose()
self.audioRecorderStatusDisposable = nil
if let audioRecorder = audioRecorder {
if !audioRecorder.beginWithTone {
HapticFeedback().impact(.light)
}
audioRecorder.start()
self.audioRecorderStatusDisposable = (audioRecorder.recordingState
|> deliverOnMainQueue).start(next: { [weak self] value in
guard let self else {
return
}
if case .stopped = value {
self.stopMediaRecorder()
}
})
}
self.state?.updated(transition: Transition(animation: .curve(duration: 0.4, curve: .spring)))
}
})
self.videoRecorderDisposable = (self.videoRecorder.get()
|> deliverOnMainQueue).start(next: { [weak self] videoRecorder in
guard let self else {
return
}
if self.videoRecorderValue !== videoRecorder {
let previousVideoRecorderValue = self.videoRecorderValue
self.videoRecorderValue = videoRecorder
if let videoRecorder = videoRecorder {
HapticFeedback().impact(.light)
videoRecorder.onDismiss = { [weak self] isCancelled in
guard let self else {
return
}
//self?.chatDisplayNode.updateRecordedMediaDeleted(isCancelled)
//self?.beginMediaRecordingRequestId += 1
//self?.lockMediaRecordingRequestId = nil
self.videoRecorder.set(.single(nil))
}
videoRecorder.onStop = { [weak self] in
guard let self else {
return
}
/*if let strongSelf = self {
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(.video(status: .editing, isLocked: false))
}
})
}*/
let _ = self
//TODO:editing
}
self.environment?.controller()?.present(videoRecorder, in: .window(.root))
/*if strongSelf.lockMediaRecordingRequestId == strongSelf.beginMediaRecordingRequestId {
videoRecorder.lockVideo()
}*/
}
if let previousVideoRecorderValue {
previousVideoRecorderValue.dismissVideo()
}
self.state?.updated(transition: .immediate)
}
})
}
required init?(coder: NSCoder) {
@ -200,6 +310,8 @@ private final class StoryContainerScreenComponent: Component {
self.currentSliceDisposable?.dispose()
self.controllerNavigationDisposable.dispose()
self.enqueueMediaMessageDisposable.dispose()
self.audioRecorderDisposable?.dispose()
self.audioRecorderStatusDisposable?.dispose()
}
@objc private func tapGesture(_ recognizer: UITapGestureRecognizer) {
@ -434,6 +546,123 @@ private final class StoryContainerScreenComponent: Component {
}
}
private func setMediaRecordingActive(isActive: Bool, isVideo: Bool, sendAction: Bool) {
guard let component = self.component else {
return
}
guard let focusedItemId = self.focusedItemId, let focusedItem = self.currentSlice?.items.first(where: { $0.id == focusedItemId }) else {
return
}
guard let targetMessageId = focusedItem.targetMessageId else {
return
}
let _ = (component.context.engine.data.get(
TelegramEngine.EngineData.Item.Messages.Message(id: targetMessageId)
)
|> deliverOnMainQueue).start(next: { [weak self] targetMessage in
guard let self, let component = self.component, let environment = self.environment, let targetMessage, let peer = targetMessage.author else {
return
}
if isActive {
if isVideo {
if self.videoRecorderValue == nil {
if let currentInputPanelFrame = self.inputPanel.view?.frame {
self.videoRecorder.set(.single(legacyInstantVideoController(theme: environment.theme, panelFrame: self.convert(currentInputPanelFrame, to: nil), context: component.context, peerId: peer.id, slowmodeState: nil, hasSchedule: peer.id.namespace != Namespaces.Peer.SecretChat, send: { [weak self] videoController, message in
if let strongSelf = self {
guard let message = message else {
strongSelf.videoRecorder.set(.single(nil))
return
}
let replyMessageId = targetMessageId
let correlationId = Int64.random(in: 0 ..< Int64.max)
let updatedMessage = message
.withUpdatedReplyToMessageId(replyMessageId)
.withUpdatedCorrelationId(correlationId)
strongSelf.videoRecorder.set(.single(nil))
strongSelf.sendMessages(peer: peer, messages: [updatedMessage])
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
strongSelf.environment?.controller()?.present(UndoOverlayController(
presentationData: presentationData,
content: .succeed(text: "Message Sent"),
elevatedLayout: false,
animateInAsReplacement: false,
action: { _ in return false }
), in: .current)
}
}, displaySlowmodeTooltip: { [weak self] view, rect in
//self?.interfaceInteraction?.displaySlowmodeTooltip(view, rect)
let _ = self
}, presentSchedulePicker: { [weak self] done in
guard let self else {
return
}
self.presentScheduleTimePicker(peer: peer, completion: { time in
done(time)
})
})))
}
}
} else {
if self.audioRecorderValue == nil {
self.audioRecorder.set(component.context.sharedContext.mediaManager.audioRecorder(beginWithTone: false, applicationBindings: component.context.sharedContext.applicationBindings, beganWithTone: { _ in
}))
}
}
} else {
if let audioRecorderValue = self.audioRecorderValue {
let _ = (audioRecorderValue.takenRecordedData()
|> deliverOnMainQueue).start(next: { [weak self] data in
guard let self, let component = self.component else {
return
}
self.audioRecorder.set(.single(nil))
guard let data else {
return
}
if data.duration < 0.5 || !sendAction {
HapticFeedback().error()
} else {
let randomId = Int64.random(in: Int64.min ... Int64.max)
let resource = LocalFileMediaResource(fileId: randomId)
component.context.account.postbox.mediaBox.storeResourceData(resource.id, data: data.compressedData)
let waveformBuffer: Data? = data.waveform
self.sendMessages(peer: peer, messages: [.message(text: "", attributes: [], inlineStickers: [:], mediaReference: .standalone(media: TelegramMediaFile(fileId: EngineMedia.Id(namespace: Namespaces.Media.LocalFile, id: randomId), partialReference: nil, resource: resource, previewRepresentations: [], videoThumbnails: [], immediateThumbnailData: nil, mimeType: "audio/ogg", size: Int64(data.compressedData.count), attributes: [.Audio(isVoice: true, duration: Int(data.duration), title: nil, performer: nil, waveform: waveformBuffer)])), replyToMessageId: targetMessageId, localGroupingKey: nil, correlationId: nil, bubbleUpEmojiOrStickersets: [])])
HapticFeedback().tap()
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
self.environment?.controller()?.present(UndoOverlayController(
presentationData: presentationData,
content: .succeed(text: "Message Sent"),
elevatedLayout: false,
animateInAsReplacement: false,
action: { _ in return false }
), in: .current)
}
})
} else if let videoRecorderValue = self.videoRecorderValue {
let _ = videoRecorderValue
self.videoRecorder.set(.single(nil))
}
}
})
}
private func stopMediaRecorder() {
}
private func performInlineAction(item: StoryActionsComponent.Item) {
guard let component = self.component else {
return
@ -658,7 +887,7 @@ private final class StoryContainerScreenComponent: Component {
}
let _ = combineLatest(queue: Queue.mainQueue(), buttons, dataSettings).start(next: { [weak self] buttonsAndInitialButton, dataSettings in
guard let self, let component = self.component else {
guard let self, let component = self.component, let environment = self.environment else {
return
}
@ -714,9 +943,10 @@ private final class StoryContainerScreenComponent: Component {
let currentFilesController = Atomic<AttachmentFileController?>(value: nil)
let currentLocationController = Atomic<LocationPickerController?>(value: nil)
let theme = environment.theme
let attachmentController = AttachmentController(
context: component.context,
updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: defaultDarkPresentationTheme) }),
updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }),
chatLocation: .peer(id: peer.id),
buttons: buttons,
initialButton: initialButton,
@ -751,7 +981,7 @@ private final class StoryContainerScreenComponent: Component {
return attachmentButtonView.convert(attachmentButtonView.bounds, to: self)
}
attachmentController.requestController = { [weak self, weak attachmentController] type, completion in
guard let self else {
guard let self, let environment = self.environment else {
return
}
switch type {
@ -795,7 +1025,8 @@ private final class StoryContainerScreenComponent: Component {
controller.prepareForReuse()
return
}
let controller = component.context.sharedContext.makeAttachmentFileController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: defaultDarkPresentationTheme) }), bannedSendMedia: bannedSendFiles, presentGallery: { [weak self, weak attachmentController] in
let theme = environment.theme
let controller = component.context.sharedContext.makeAttachmentFileController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), bannedSendMedia: bannedSendFiles, presentGallery: { [weak self, weak attachmentController] in
guard let self else {
return
}
@ -848,11 +1079,12 @@ private final class StoryContainerScreenComponent: Component {
}
let _ = (component.context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: selfPeerId))
|> deliverOnMainQueue).start(next: { [weak self] selfPeer in
guard let self, let component = self.component, let selfPeer else {
guard let self, let component = self.component, let environment = self.environment, let selfPeer else {
return
}
let hasLiveLocation = peer.id.namespace != Namespaces.Peer.SecretChat && peer.id != component.context.account.peerId
let controller = LocationPickerController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: defaultDarkPresentationTheme) }), mode: .share(peer: peer, selfPeer: selfPeer, hasLiveLocation: hasLiveLocation), completion: { [weak self] location, _ in
let theme = environment.theme
let controller = LocationPickerController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), mode: .share(peer: peer, selfPeer: selfPeer, hasLiveLocation: hasLiveLocation), completion: { [weak self] location, _ in
guard let self else {
return
}
@ -864,7 +1096,8 @@ private final class StoryContainerScreenComponent: Component {
let _ = currentLocationController.swap(controller)
})
case .contact:
let contactsController = component.context.sharedContext.makeContactSelectionController(ContactSelectionControllerParams(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: defaultDarkPresentationTheme) }), title: { $0.Contacts_Title }, displayDeviceContacts: true, multipleSelection: true))
let theme = environment.theme
let contactsController = component.context.sharedContext.makeContactSelectionController(ContactSelectionControllerParams(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), title: { $0.Contacts_Title }, displayDeviceContacts: true, multipleSelection: true))
contactsController.presentScheduleTimePicker = { [weak self] completion in
guard let self else {
return
@ -1072,7 +1305,8 @@ private final class StoryContainerScreenComponent: Component {
fromAttachMenu = true
let params = WebAppParameters(peerId: peer.id, botId: bot.id, botName: botName, url: nil, queryId: nil, payload: payload, buttonText: nil, keepAliveSignal: nil, fromMenu: false, fromAttachMenu: fromAttachMenu, isInline: false, isSimple: false)
let replyMessageId = targetMessageId
let controller = WebAppController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: defaultDarkPresentationTheme) }), params: params, replyToMessageId: replyMessageId, threadId: nil)
let theme = environment.theme
let controller = WebAppController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), params: params, replyToMessageId: replyMessageId, threadId: nil)
controller.openUrl = { [weak self] url in
guard let self else {
return
@ -1136,10 +1370,11 @@ private final class StoryContainerScreenComponent: Component {
updateMediaPickerContext: @escaping (AttachmentMediaPickerContext?) -> Void,
completion: @escaping ([Any], Bool, Int32?, @escaping (String) -> UIView?, @escaping () -> Void) -> Void
) {
guard let component = self.component else {
guard let component = self.component, let environment = self.environment else {
return
}
let controller = MediaPickerScreen(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: defaultDarkPresentationTheme) }), peer: peer, threadTitle: nil, chatLocation: .peer(id: peer.id), bannedSendPhotos: bannedSendPhotos, bannedSendVideos: bannedSendVideos, subject: subject, saveEditedPhotos: saveEditedPhotos)
let theme = environment.theme
let controller = MediaPickerScreen(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peer: peer, threadTitle: nil, chatLocation: .peer(id: peer.id), bannedSendPhotos: bannedSendPhotos, bannedSendVideos: bannedSendVideos, subject: subject, saveEditedPhotos: saveEditedPhotos)
let mediaPickerContext = controller.mediaPickerContext
controller.openCamera = { [weak self] cameraView in
guard let self else {
@ -1251,8 +1486,9 @@ private final class StoryContainerScreenComponent: Component {
legacyController.deferScreenEdgeGestures = [.top]
configureLegacyAssetPicker(controller, context: component.context, peer: peer._asPeer(), chatLocation: .peer(id: peer.id), initialCaption: inputText, hasSchedule: peer.id.namespace != Namespaces.Peer.SecretChat, presentWebSearch: editingMedia ? nil : { [weak legacyController] in
if let strongSelf = self, let component = strongSelf.component {
let controller = WebSearchController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: defaultDarkPresentationTheme) }), peer: peer, chatLocation: .peer(id: peer.id), configuration: searchBotsConfiguration, mode: .media(attachment: false, completion: { results, selectionState, editingState, silentPosting in
if let strongSelf = self, let component = strongSelf.component, let environment = strongSelf.environment {
let theme = environment.theme
let controller = WebSearchController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peer: peer, chatLocation: .peer(id: peer.id), configuration: searchBotsConfiguration, mode: .media(attachment: false, completion: { results, selectionState, editingState, silentPosting in
if let legacyController = legacyController {
legacyController.dismiss()
}
@ -1707,7 +1943,7 @@ private final class StoryContainerScreenComponent: Component {
TelegramEngine.EngineData.Item.Peer.Presence(id: peer.id)
)
|> deliverOnMainQueue).start(next: { [weak self] presence in
guard let self, let component = self.component else {
guard let self, let component = self.component, let environment = self.environment else {
return
}
@ -1725,7 +1961,8 @@ private final class StoryContainerScreenComponent: Component {
} else {
mode = .scheduledMessages(sendWhenOnlineAvailable: sendWhenOnlineAvailable)
}
let controller = ChatScheduleTimeController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: defaultDarkPresentationTheme) }), peerId: peer.id, mode: mode, style: style, currentTime: selectedTime, minimalTime: nil, dismissByTapOutside: dismissByTapOutside, completion: { time in
let theme = environment.theme
let controller = ChatScheduleTimeController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peerId: peer.id, mode: mode, style: style, currentTime: selectedTime, minimalTime: nil, dismissByTapOutside: dismissByTapOutside, completion: { time in
completion(time)
})
self.endEditing(true)
@ -1734,10 +1971,11 @@ private final class StoryContainerScreenComponent: Component {
}
private func presentTimerPicker(peer: EnginePeer, style: ChatTimerScreenStyle = .default, selectedTime: Int32? = nil, dismissByTapOutside: Bool = true, completion: @escaping (Int32) -> Void) {
guard let component = self.component else {
guard let component = self.component, let environment = self.environment else {
return
}
let controller = ChatTimerScreen(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: defaultDarkPresentationTheme) }), style: style, currentTime: selectedTime, dismissByTapOutside: dismissByTapOutside, completion: { time in
let theme = environment.theme
let controller = ChatTimerScreen(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), style: style, currentTime: selectedTime, dismissByTapOutside: dismissByTapOutside, completion: { time in
completion(time)
})
self.endEditing(true)
@ -1745,10 +1983,11 @@ private final class StoryContainerScreenComponent: Component {
}
private func configurePollCreation(peer: EnginePeer, targetMessageId: EngineMessage.Id, isQuiz: Bool? = nil) -> CreatePollControllerImpl? {
guard let component = self.component else {
guard let component = self.component, let environment = self.environment else {
return nil
}
return createPollController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkPresentationTheme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: defaultDarkPresentationTheme) }), peer: peer, isQuiz: isQuiz, completion: { [weak self] poll in
let theme = environment.theme
return createPollController(context: component.context, updatedPresentationData: (component.context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: theme), component.context.sharedContext.presentationData |> map { $0.withUpdated(theme: theme) }), peer: peer, isQuiz: isQuiz, completion: { [weak self] poll in
guard let self else {
return
}
@ -2053,18 +2292,35 @@ private final class StoryContainerScreenComponent: Component {
transition: transition,
component: AnyComponent(MessageInputPanelComponent(
externalState: self.inputPanelExternalState,
context: component.context,
theme: environment.theme,
strings: environment.strings,
presentController: { [weak self] c in
guard let self, let controller = self.environment?.controller() else {
return
}
controller.present(c, in: .window(.root))
},
sendMessageAction: { [weak self] in
guard let self else {
return
}
self.performSendMessageAction()
},
setMediaRecordingActive: { [weak self] isActive, isVideo, sendAction in
guard let self else {
return
}
self.setMediaRecordingActive(isActive: isActive, isVideo: isVideo, sendAction: sendAction)
},
attachmentAction: { [weak self] in
guard let self else {
return
}
self.presentAttachmentMenu(subject: .default)
}
},
audioRecorder: self.audioRecorderValue,
videoRecordingStatus: self.videoRecorderValue?.audioStatus
)),
environment: {},
containerSize: CGSize(width: availableSize.width, height: 200.0)
@ -2260,7 +2516,13 @@ private final class StoryContainerScreenComponent: Component {
self.addSubview(inlineActionsView)
}
transition.setFrame(view: inlineActionsView, frame: CGRect(origin: CGPoint(x: contentFrame.maxX - 10.0 - inlineActionsSize.width, y: contentFrame.maxY - 20.0 - inlineActionsSize.height), size: inlineActionsSize))
transition.setAlpha(view: inlineActionsView, alpha: inputPanelIsOverlay ? 0.0 : 1.0)
var inlineActionsAlpha: CGFloat = inputPanelIsOverlay ? 0.0 : 1.0
if self.audioRecorderValue != nil {
inlineActionsAlpha = 0.0
}
transition.setAlpha(view: inlineActionsView, alpha: inlineActionsAlpha)
}
}
}

View File

@ -93,6 +93,7 @@ import AvatarEditorScreen
import ChatScheduleTimeController
import ICloudResources
import LegacyCamera
import LegacyInstantVideoController
#if DEBUG
import os.signpost

View File

@ -11,6 +11,7 @@ import TelegramCore
import ReactionSelectionNode
import ChatControllerInteraction
import FeaturedStickersScreen
import ChatTextInputMediaRecordingButton
private func convertAnimatingSourceRect(_ rect: CGRect, fromView: UIView, toView: UIView?) -> CGRect {
if let presentationLayer = fromView.layer.presentation() {

View File

@ -12,6 +12,7 @@ import ContextUI
import AnimationUI
import ManagedAnimationNode
import ChatPresentationInterfaceState
import ChatSendButtonRadialStatusNode
extension AudioWaveformNode: CustomMediaPlayerScrubbingForegroundNode {

View File

@ -10,6 +10,8 @@ import ChatPresentationInterfaceState
import ChatMessageBackground
import ChatControllerInteraction
import AccountContext
import ChatTextInputMediaRecordingButton
import ChatSendButtonRadialStatusNode
final class ChatTextInputActionButtonsNode: ASDisplayNode {
private let presentationContext: ChatPresentationContext?

View File

@ -36,6 +36,7 @@ import StickerPeekUI
import LottieComponent
import SolidRoundedButtonNode
import TooltipUI
import ChatTextInputMediaRecordingButton
private let accessoryButtonFont = Font.medium(14.0)
private let counterFont = Font.with(size: 14.0, design: .regular, traits: [.monospacedNumbers])

View File

@ -260,33 +260,47 @@ public func fetchVideoLibraryMediaResource(account: Account, resource: VideoLibr
}
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in
var value = stat()
/*var value = stat()
if stat(path, &value) == 0 {
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
var range: Range<Int64>?
let _ = updatedSize.modify { updatedSize in
range = updatedSize ..< value.st_size
return value.st_size
}
//print("size = \(Int(value.st_size)), range: \(range!)")
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
}
}
}), entityRenderer: entityRenderer)!
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
var value = stat()
if stat(result.fileURL.path, &value) == 0 {
if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) {
let remuxedTempFile = TempBox.shared.tempFile(fileName: "video.mp4")
if FFMpegRemuxer.remux(path, to: remuxedTempFile.path) {
TempBox.shared.dispose(tempFile)
subscriber.putNext(.moveTempFile(file: remuxedTempFile))
} else {
TempBox.shared.dispose(remuxedTempFile)
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
var range: Range<Int64>?
let _ = updatedSize.modify { updatedSize in
range = updatedSize ..< value.st_size
return value.st_size
}
//print("finish size = \(Int(value.st_size)), range: \(range!)")
//print("size = \(Int(value.st_size)), range: \(range!)")
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024))
subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true))
}
}
}*/
}), entityRenderer: entityRenderer)!
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
var value = stat()
if stat(result.fileURL.path, &value) == 0 {
let remuxedTempFile = TempBox.shared.tempFile(fileName: "video.mp4")
if let size = fileSize(result.fileURL.path), size <= 32 * 1024 * 1024, FFMpegRemuxer.remux(result.fileURL.path, to: remuxedTempFile.path) {
TempBox.shared.dispose(tempFile)
subscriber.putNext(.moveTempFile(file: remuxedTempFile))
} else {
TempBox.shared.dispose(remuxedTempFile)
if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) {
var range: Range<Int64>?
let _ = updatedSize.modify { updatedSize in
range = updatedSize ..< value.st_size
return value.st_size
}
//print("finish size = \(Int(value.st_size)), range: \(range!)")
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024))
subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true))
}
}
} else {
subscriber.putError(.generic)
@ -482,6 +496,11 @@ public func fetchVideoLibraryMediaResourceHash(resource: VideoLibraryMediaResour
if isPassthrough {
updatedData.reverse()
}
#if DEBUG
if "".isEmpty {
subscriber.putNext(nil)
}
#endif
subscriber.putNext(updatedData)
} else {
subscriber.putNext(nil)

View File

@ -13,6 +13,7 @@ import SettingsUI
import ChatPresentationInterfaceState
import AttachmentUI
import ForumCreateTopicScreen
import LegacyInstantVideoController
public func navigateToChatControllerImpl(_ params: NavigateToChatControllerParams) {
if case let .peer(peer) = params.chatLocation, case let .channel(channel) = peer, channel.flags.contains(.isForum) {