mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-10-09 11:23:48 +00:00
Various improvements
This commit is contained in:
parent
81c5ce4af9
commit
bbba821145
@ -806,6 +806,12 @@ final class DrawingStickerEntititySelectionView: DrawingEntitySelectionView {
|
||||
|
||||
switch gestureRecognizer.state {
|
||||
case .began:
|
||||
self.tapGestureRecognizer?.isEnabled = false
|
||||
self.tapGestureRecognizer?.isEnabled = true
|
||||
|
||||
self.longPressGestureRecognizer?.isEnabled = false
|
||||
self.longPressGestureRecognizer?.isEnabled = true
|
||||
|
||||
self.snapTool.maybeSkipFromStart(entityView: entityView, position: entity.position)
|
||||
|
||||
let _ = entityView.dismissReactionSelection()
|
||||
|
@ -265,7 +265,7 @@ func presentLegacyMediaPickerGallery(context: AccountContext, peer: EnginePeer?,
|
||||
|> take(1)
|
||||
|> deliverOnMainQueue).start(next: { sendWhenOnlineAvailable in
|
||||
let legacySheetController = LegacyController(presentation: .custom, theme: presentationData.theme, initialLayout: nil)
|
||||
let sheetController = TGMediaPickerSendActionSheetController(context: legacyController.context, isDark: true, sendButtonFrame: model.interfaceView.doneButtonFrame, canSendSilently: hasSilentPosting, canSendWhenOnline: sendWhenOnlineAvailable, canSchedule: effectiveHasSchedule, reminder: reminder, hasTimer: hasTimer)
|
||||
let sheetController = TGMediaPickerSendActionSheetController(context: legacyController.context, isDark: true, sendButtonFrame: model.interfaceView.doneButtonFrame, canSendSilently: hasSilentPosting, canSendWhenOnline: sendWhenOnlineAvailable, canSchedule: effectiveHasSchedule, reminder: reminder, hasTimer: false)
|
||||
let dismissImpl = { [weak model] in
|
||||
model?.dismiss(true, false)
|
||||
dismissAll()
|
||||
|
@ -350,9 +350,11 @@ public final class MediaEditor {
|
||||
} else {
|
||||
self.audioPlayer?.removeTimeObserver(timeObserver)
|
||||
}
|
||||
self.timeObserver = nil
|
||||
}
|
||||
if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver {
|
||||
NotificationCenter.default.removeObserver(didPlayToEndTimeObserver)
|
||||
self.didPlayToEndTimeObserver = nil
|
||||
}
|
||||
|
||||
self.audioDelayTimer?.invalidate()
|
||||
@ -890,7 +892,16 @@ public final class MediaEditor {
|
||||
|
||||
if self.player == nil, let audioPlayer = self.audioPlayer {
|
||||
let itemTime = audioPlayer.currentItem?.currentTime() ?? .invalid
|
||||
audioPlayer.setRate(rate, time: itemTime, atHostTime: futureTime)
|
||||
if audioPlayer.status == .readyToPlay {
|
||||
audioPlayer.setRate(rate, time: itemTime, atHostTime: futureTime)
|
||||
} else {
|
||||
audioPlayer.seek(to: itemTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
if rate > 0.0 {
|
||||
audioPlayer.play()
|
||||
} else {
|
||||
audioPlayer.pause()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let itemTime = self.player?.currentItem?.currentTime() ?? .invalid
|
||||
let audioTime = self.audioTime(for: itemTime)
|
||||
@ -898,13 +909,24 @@ public final class MediaEditor {
|
||||
self.player?.setRate(rate, time: itemTime, atHostTime: futureTime)
|
||||
self.additionalPlayer?.setRate(rate, time: itemTime, atHostTime: futureTime)
|
||||
|
||||
if rate > 0.0, let audioDelay = self.audioDelay(for: itemTime) {
|
||||
self.audioDelayTimer = SwiftSignalKit.Timer(timeout: audioDelay, repeat: false, completion: { [weak self] in
|
||||
self?.audioPlayer?.setRate(rate, time: audioTime, atHostTime: futureTime)
|
||||
}, queue: Queue.mainQueue())
|
||||
self.audioDelayTimer?.start()
|
||||
} else {
|
||||
self.audioPlayer?.setRate(rate, time: audioTime, atHostTime: futureTime)
|
||||
if let audioPlayer = self.audioPlayer {
|
||||
if rate > 0.0, let audioDelay = self.audioDelay(for: itemTime) {
|
||||
self.audioDelayTimer = SwiftSignalKit.Timer(timeout: audioDelay, repeat: false, completion: { [weak self] in
|
||||
self?.audioPlayer?.setRate(rate, time: audioTime, atHostTime: futureTime)
|
||||
}, queue: Queue.mainQueue())
|
||||
self.audioDelayTimer?.start()
|
||||
} else {
|
||||
if audioPlayer.status == .readyToPlay {
|
||||
audioPlayer.setRate(rate, time: audioTime, atHostTime: futureTime)
|
||||
} else {
|
||||
audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
||||
if rate > 0.0 {
|
||||
audioPlayer.play()
|
||||
} else {
|
||||
audioPlayer.pause()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,6 @@ public final class MediaEditorVideoAVAssetWriter: MediaEditorVideoExportWriter {
|
||||
var videoSettings = configuration.videoSettings
|
||||
if var compressionSettings = videoSettings[AVVideoCompressionPropertiesKey] as? [String: Any] {
|
||||
compressionSettings[AVVideoExpectedSourceFrameRateKey] = sourceFrameRate
|
||||
// compressionSettings[AVVideoMaxKeyFrameIntervalKey] = sourceFrameRate
|
||||
videoSettings[AVVideoCompressionPropertiesKey] = compressionSettings
|
||||
}
|
||||
|
||||
@ -221,12 +220,19 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
|
||||
var audioTimeRange: CMTimeRange? {
|
||||
let offset = self.values.audioTrackOffset ?? 0.0
|
||||
if let range = self.values.audioTrackTrimRange {
|
||||
return CMTimeRange(
|
||||
start: CMTime(seconds: offset + range.lowerBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC)),
|
||||
end: CMTime(seconds: offset + range.upperBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
|
||||
)
|
||||
if let audioTrack = self.values.audioTrack {
|
||||
let offset = self.values.audioTrackOffset ?? 0.0
|
||||
if let range = self.values.audioTrackTrimRange {
|
||||
return CMTimeRange(
|
||||
start: CMTime(seconds: offset + range.lowerBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC)),
|
||||
end: CMTime(seconds: offset + range.upperBound, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
|
||||
)
|
||||
} else {
|
||||
return CMTimeRange(
|
||||
start: CMTime(seconds: offset, preferredTimescale: CMTimeScale(NSEC_PER_SEC)),
|
||||
end: CMTime(seconds: offset + min(15.0, audioTrack.duration), preferredTimescale: CMTimeScale(NSEC_PER_SEC))
|
||||
)
|
||||
}
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
@ -289,7 +295,7 @@ public final class MediaEditorVideoExport {
|
||||
private var additionalReader: AVAssetReader?
|
||||
|
||||
private var videoOutput: AVAssetReaderOutput?
|
||||
private var audioOutput: AVAssetReaderAudioMixOutput?
|
||||
private var audioOutput: AVAssetReaderOutput?
|
||||
private var textureRotation: TextureRotation = .rotate0Degrees
|
||||
|
||||
private var additionalVideoOutput: AVAssetReaderOutput?
|
||||
@ -400,8 +406,6 @@ public final class MediaEditorVideoExport {
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
|
||||
let timeRange: CMTimeRange = CMTimeRangeMake(start: .zero, duration: duration)
|
||||
try? videoTrack.insertTimeRange(timeRange, of: videoAssetTrack, at: .zero)
|
||||
if let audioAssetTrack = asset.tracks(withMediaType: .audio).first, let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid), !self.configuration.values.videoIsMuted {
|
||||
@ -542,12 +546,63 @@ public final class MediaEditorVideoExport {
|
||||
|
||||
self.setupComposer()
|
||||
|
||||
var inputAudioMix: AVMutableAudioMix?
|
||||
|
||||
self.writer = MediaEditorVideoAVAssetWriter()
|
||||
guard let writer = self.writer else {
|
||||
return
|
||||
}
|
||||
writer.setup(configuration: self.configuration, outputPath: self.outputPath)
|
||||
writer.setupVideoInput(configuration: self.configuration, sourceFrameRate: 30.0)
|
||||
|
||||
if let audioData = self.configuration.values.audioTrack {
|
||||
let mixComposition = AVMutableComposition()
|
||||
let audioPath = fullDraftPath(peerId: self.configuration.values.peerId, path: audioData.path)
|
||||
let audioAsset = AVURLAsset(url: URL(fileURLWithPath: audioPath))
|
||||
|
||||
if let musicAssetTrack = audioAsset.tracks(withMediaType: .audio).first,
|
||||
let musicTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
|
||||
do {
|
||||
let reader = try AVAssetReader(asset: mixComposition)
|
||||
|
||||
var musicRange = CMTimeRange(start: .zero, duration: CMTime(seconds: min(15.0, audioData.duration), preferredTimescale: CMTimeScale(NSEC_PER_SEC)))
|
||||
if let audioTrackRange = self.configuration.audioTimeRange {
|
||||
musicRange = audioTrackRange
|
||||
}
|
||||
try? musicTrack.insertTimeRange(musicRange, of: musicAssetTrack, at: .zero)
|
||||
|
||||
if let volume = self.configuration.values.audioTrackVolume, volume < 1.0 {
|
||||
let audioMix = AVMutableAudioMix()
|
||||
var audioMixParam: [AVMutableAudioMixInputParameters] = []
|
||||
let param: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: musicTrack)
|
||||
param.trackID = musicTrack.trackID
|
||||
param.setVolume(Float(volume), at: CMTime.zero)
|
||||
audioMixParam.append(param)
|
||||
audioMix.inputParameters = audioMixParam
|
||||
inputAudioMix = audioMix
|
||||
}
|
||||
|
||||
let audioTracks = mixComposition.tracks(withMediaType: .audio)
|
||||
let audioOutput = AVAssetReaderAudioMixOutput(audioTracks: audioTracks, audioSettings: nil)
|
||||
audioOutput.audioMix = inputAudioMix
|
||||
audioOutput.alwaysCopiesSampleData = false
|
||||
if reader.canAdd(audioOutput) {
|
||||
reader.add(audioOutput)
|
||||
|
||||
self.reader = reader
|
||||
self.audioOutput = audioOutput
|
||||
|
||||
writer.setupAudioInput(configuration: self.configuration)
|
||||
} else {
|
||||
self.internalStatus = .finished
|
||||
self.statusValue = .failed(.addAudioOutput)
|
||||
}
|
||||
} catch {
|
||||
self.internalStatus = .finished
|
||||
self.statusValue = .failed(.addAudioOutput)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func finish() {
|
||||
@ -818,20 +873,53 @@ public final class MediaEditorVideoExport {
|
||||
return
|
||||
}
|
||||
|
||||
if let _ = self.audioOutput, let reader = self.reader {
|
||||
guard reader.startReading() else {
|
||||
self.statusValue = .failed(.reading(nil))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
self.internalStatus = .exporting
|
||||
|
||||
writer.startSession(atSourceTime: .zero)
|
||||
|
||||
self.imageArguments = (5.0, Double(self.configuration.frameRate), CMTime(value: 0, timescale: Int32(self.configuration.frameRate)))
|
||||
var duration: Double = 5.0
|
||||
if let audioDuration = self.configuration.audioTimeRange?.duration.seconds {
|
||||
duration = audioDuration
|
||||
}
|
||||
self.imageArguments = (duration, Double(self.configuration.frameRate), CMTime(value: 0, timescale: Int32(self.configuration.frameRate)))
|
||||
|
||||
var videoCompleted = false
|
||||
var audioCompleted = false
|
||||
|
||||
var exportForVideoOutput: MediaEditorVideoExport? = self
|
||||
writer.requestVideoDataWhenReady(on: self.queue.queue) {
|
||||
guard let export = exportForVideoOutput else { return }
|
||||
if !export.encodeImageVideo() {
|
||||
videoCompleted = true
|
||||
exportForVideoOutput = nil
|
||||
export.finish()
|
||||
if audioCompleted {
|
||||
export.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let _ = self.audioOutput {
|
||||
var exportForAudioOutput: MediaEditorVideoExport? = self
|
||||
writer.requestAudioDataWhenReady(on: self.queue.queue) {
|
||||
guard let export = exportForAudioOutput else { return }
|
||||
if !export.encodeAudio() {
|
||||
audioCompleted = true
|
||||
exportForAudioOutput = nil
|
||||
if videoCompleted {
|
||||
export.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
audioCompleted = true
|
||||
}
|
||||
}
|
||||
|
||||
private func startVideoExport() {
|
||||
|
@ -3086,16 +3086,20 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
}
|
||||
|
||||
func presentAudioPicker() {
|
||||
var isSettingTrack = false
|
||||
self.controller?.present(legacyICloudFilePicker(theme: self.presentationData.theme, mode: .import, documentTypes: ["public.mp3"], forceDarkTheme: true, dismissed: { [weak self] in
|
||||
if let self {
|
||||
Queue.mainQueue().after(0.1) {
|
||||
self.mediaEditor?.play()
|
||||
if !isSettingTrack {
|
||||
self.mediaEditor?.play()
|
||||
}
|
||||
}
|
||||
}
|
||||
}, completion: { [weak self] urls in
|
||||
guard let self, let mediaEditor = self.mediaEditor, !urls.isEmpty, let url = urls.first else {
|
||||
return
|
||||
}
|
||||
isSettingTrack = true
|
||||
|
||||
try? FileManager.default.createDirectory(atPath: draftPath(engine: self.context.engine), withIntermediateDirectories: true)
|
||||
|
||||
@ -3183,6 +3187,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
if isScopedResource {
|
||||
url.stopAccessingSecurityScopedResource()
|
||||
}
|
||||
|
||||
mediaEditor.play()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -374,6 +374,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
private let counter = ComponentView<Empty>()
|
||||
|
||||
private var disabledPlaceholder: ComponentView<Empty>?
|
||||
private var textClippingView = UIView()
|
||||
private let textField = ComponentView<Empty>()
|
||||
private let textFieldExternalState = TextFieldComponent.ExternalState()
|
||||
|
||||
@ -440,12 +441,15 @@ public final class MessageInputPanelComponent: Component {
|
||||
self.gradientView = UIImageView()
|
||||
self.bottomGradientView = UIView()
|
||||
|
||||
self.textClippingView.clipsToBounds = true
|
||||
|
||||
super.init(frame: frame)
|
||||
|
||||
self.addSubview(self.bottomGradientView)
|
||||
self.addSubview(self.gradientView)
|
||||
self.fieldBackgroundView.addSubview(self.vibrancyEffectView)
|
||||
self.addSubview(self.fieldBackgroundView)
|
||||
self.addSubview(self.textClippingView)
|
||||
|
||||
self.viewForOverlayContent = ViewForOverlayContent(
|
||||
ignoreHit: { [weak self] view, point in
|
||||
@ -715,7 +719,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
return value
|
||||
}
|
||||
},
|
||||
resetScrollOnFocusChange: component.style == .media,
|
||||
isOneLineWhenUnfocused: component.style == .media,
|
||||
formatMenuAvailability: component.isFormattingLocked ? .locked : .available,
|
||||
lockedFormatAction: {
|
||||
component.presentTextFormattingTooltip?()
|
||||
@ -799,6 +803,12 @@ public final class MessageInputPanelComponent: Component {
|
||||
transition.setFrame(view: self.fieldBackgroundView, frame: fieldBackgroundFrame)
|
||||
self.fieldBackgroundView.update(size: fieldBackgroundFrame.size, cornerRadius: baseFieldHeight * 0.5, transition: transition.containedViewLayoutTransition)
|
||||
|
||||
var textClippingFrame = fieldBackgroundFrame
|
||||
if component.style == .media, !isEditing {
|
||||
textClippingFrame.size.height -= 10.0
|
||||
}
|
||||
transition.setFrame(view: self.textClippingView, frame: textClippingFrame)
|
||||
|
||||
let gradientFrame = CGRect(origin: CGPoint(x: fieldBackgroundFrame.minX - fieldFrame.minX, y: -topGradientHeight), size: CGSize(width: availableSize.width - (fieldBackgroundFrame.minX - fieldFrame.minX), height: topGradientHeight + fieldBackgroundFrame.maxY + insets.bottom))
|
||||
transition.setFrame(view: self.gradientView, frame: gradientFrame)
|
||||
transition.setFrame(view: self.bottomGradientView, frame: CGRect(origin: CGPoint(x: 0.0, y: gradientFrame.maxY), size: CGSize(width: availableSize.width, height: component.bottomInset)))
|
||||
@ -918,7 +928,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
|
||||
if let textFieldView = self.textField.view as? TextFieldComponent.View {
|
||||
if textFieldView.superview == nil {
|
||||
self.addSubview(textFieldView)
|
||||
self.textClippingView.addSubview(textFieldView)
|
||||
|
||||
if let viewForOverlayContent = self.viewForOverlayContent {
|
||||
self.addSubview(viewForOverlayContent)
|
||||
@ -932,7 +942,7 @@ public final class MessageInputPanelComponent: Component {
|
||||
}
|
||||
}
|
||||
}
|
||||
let textFieldFrame = CGRect(origin: CGPoint(x: fieldBackgroundFrame.minX, y: fieldBackgroundFrame.maxY - textFieldSize.height), size: textFieldSize)
|
||||
let textFieldFrame = CGRect(origin: CGPoint(x: 0.0, y: 0.0), size: textFieldSize)
|
||||
transition.setFrame(view: textFieldView, frame: textFieldFrame)
|
||||
transition.setAlpha(view: textFieldView, alpha: (hasMediaRecording || hasMediaEditing || component.disabledPlaceholder != nil || component.isChannel) ? 0.0 : 1.0)
|
||||
|
||||
|
@ -91,7 +91,7 @@ public final class TextFieldComponent: Component {
|
||||
public let insets: UIEdgeInsets
|
||||
public let hideKeyboard: Bool
|
||||
public let resetText: NSAttributedString?
|
||||
public let resetScrollOnFocusChange: Bool
|
||||
public let isOneLineWhenUnfocused: Bool
|
||||
public let formatMenuAvailability: FormatMenuAvailability
|
||||
public let lockedFormatAction: () -> Void
|
||||
public let present: (ViewController) -> Void
|
||||
@ -106,7 +106,7 @@ public final class TextFieldComponent: Component {
|
||||
insets: UIEdgeInsets,
|
||||
hideKeyboard: Bool,
|
||||
resetText: NSAttributedString?,
|
||||
resetScrollOnFocusChange: Bool,
|
||||
isOneLineWhenUnfocused: Bool,
|
||||
formatMenuAvailability: FormatMenuAvailability,
|
||||
lockedFormatAction: @escaping () -> Void,
|
||||
present: @escaping (ViewController) -> Void,
|
||||
@ -120,7 +120,7 @@ public final class TextFieldComponent: Component {
|
||||
self.insets = insets
|
||||
self.hideKeyboard = hideKeyboard
|
||||
self.resetText = resetText
|
||||
self.resetScrollOnFocusChange = resetScrollOnFocusChange
|
||||
self.isOneLineWhenUnfocused = isOneLineWhenUnfocused
|
||||
self.formatMenuAvailability = formatMenuAvailability
|
||||
self.lockedFormatAction = lockedFormatAction
|
||||
self.present = present
|
||||
@ -149,7 +149,7 @@ public final class TextFieldComponent: Component {
|
||||
if lhs.resetText != rhs.resetText {
|
||||
return false
|
||||
}
|
||||
if lhs.resetScrollOnFocusChange != rhs.resetScrollOnFocusChange {
|
||||
if lhs.isOneLineWhenUnfocused != rhs.isOneLineWhenUnfocused {
|
||||
return false
|
||||
}
|
||||
if lhs.formatMenuAvailability != rhs.formatMenuAvailability {
|
||||
@ -201,6 +201,8 @@ public final class TextFieldComponent: Component {
|
||||
private var customEmojiContainerView: CustomEmojiContainerView?
|
||||
private var emojiViewProvider: ((ChatTextInputTextCustomEmojiAttribute) -> UIView)?
|
||||
|
||||
private let ellipsisView = ComponentView<Empty>()
|
||||
|
||||
private var inputState: InputState {
|
||||
let selectionRange: Range<Int> = self.textView.selectedRange.location ..< (self.textView.selectedRange.location + self.textView.selectedRange.length)
|
||||
return InputState(inputText: stateAttributedStringForText(self.textView.attributedText ?? NSAttributedString()), selectionRange: selectionRange)
|
||||
@ -796,6 +798,30 @@ public final class TextFieldComponent: Component {
|
||||
component.externalState.hasTrackingView = hasTrackingView
|
||||
}
|
||||
|
||||
func rightmostPositionOfFirstLine() -> CGPoint? {
|
||||
let glyphRange = self.layoutManager.glyphRange(for: self.textContainer)
|
||||
|
||||
if glyphRange.length == 0 { return nil }
|
||||
|
||||
var lineRect = CGRect.zero
|
||||
var glyphIndexForStringStart = glyphRange.location
|
||||
var lineRange: NSRange = NSRange()
|
||||
|
||||
repeat {
|
||||
lineRect = self.layoutManager.lineFragmentUsedRect(forGlyphAt: glyphIndexForStringStart, effectiveRange: &lineRange)
|
||||
if NSMaxRange(lineRange) > glyphRange.length {
|
||||
lineRange.length = glyphRange.length - lineRange.location
|
||||
}
|
||||
glyphIndexForStringStart = NSMaxRange(lineRange)
|
||||
} while glyphIndexForStringStart < NSMaxRange(glyphRange) && !NSLocationInRange(glyphRange.location, lineRange)
|
||||
|
||||
let padding = self.textView.textContainerInset.left
|
||||
let rightmostX = lineRect.maxX + padding
|
||||
let rightmostY = lineRect.minY + self.textView.textContainerInset.top
|
||||
|
||||
return CGPoint(x: rightmostX, y: rightmostY)
|
||||
}
|
||||
|
||||
func update(component: TextFieldComponent, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
|
||||
self.component = component
|
||||
self.state = state
|
||||
@ -834,7 +860,7 @@ public final class TextFieldComponent: Component {
|
||||
let isEditing = self.textView.isFirstResponder
|
||||
|
||||
var refreshScrolling = self.textView.bounds.size != size
|
||||
if component.resetScrollOnFocusChange && !isEditing && isEditing != wasEditing {
|
||||
if component.isOneLineWhenUnfocused && !isEditing && isEditing != wasEditing {
|
||||
refreshScrolling = true
|
||||
}
|
||||
self.textView.frame = CGRect(origin: CGPoint(), size: size)
|
||||
@ -844,7 +870,7 @@ public final class TextFieldComponent: Component {
|
||||
|
||||
if refreshScrolling {
|
||||
if isEditing {
|
||||
if wasEditing {
|
||||
if wasEditing || component.isOneLineWhenUnfocused {
|
||||
self.textView.setContentOffset(CGPoint(x: 0.0, y: max(0.0, self.textView.contentSize.height - self.textView.bounds.height)), animated: false)
|
||||
}
|
||||
} else {
|
||||
@ -872,6 +898,35 @@ public final class TextFieldComponent: Component {
|
||||
}
|
||||
}
|
||||
|
||||
if component.isOneLineWhenUnfocused, let position = self.rightmostPositionOfFirstLine() {
|
||||
let ellipsisSize = self.ellipsisView.update(
|
||||
transition: transition,
|
||||
component: AnyComponent(
|
||||
Text(
|
||||
text: "\u{2026}",
|
||||
font: Font.regular(component.fontSize),
|
||||
color: component.textColor
|
||||
)
|
||||
),
|
||||
environment: {},
|
||||
containerSize: availableSize
|
||||
)
|
||||
if let view = self.ellipsisView.view {
|
||||
if view.superview == nil {
|
||||
self.textView.addSubview(view)
|
||||
}
|
||||
transition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: position.x - 11.0, y: position.y), size: ellipsisSize))
|
||||
|
||||
let ellipsisTransition: Transition
|
||||
if isEditing {
|
||||
ellipsisTransition = .easeInOut(duration: 0.2)
|
||||
} else {
|
||||
ellipsisTransition = .easeInOut(duration: 0.3)
|
||||
}
|
||||
ellipsisTransition.setAlpha(view: view, alpha: isEditing ? 0.0 : 1.0)
|
||||
}
|
||||
}
|
||||
|
||||
self.updateEntities()
|
||||
|
||||
return size
|
||||
|
Loading…
x
Reference in New Issue
Block a user