Stories improvements

This commit is contained in:
Ilya Laktyushin 2023-06-24 14:55:39 +02:00
parent a710019021
commit 44fc1b46f1
12 changed files with 229 additions and 99 deletions

View File

@ -839,6 +839,14 @@ public final class Camera {
return disposable
}
}
public static var isDualCamSupported: Bool {
if #available(iOS 13.0, *), AVCaptureMultiCamSession.isMultiCamSupported {
return true
} else {
return false
}
}
}
public final class CameraHolder {

View File

@ -1122,7 +1122,7 @@ public final class ChatListContainerNode: ASDisplayNode, UIGestureRecognizerDele
self.applyItemNodeAsCurrent(id: .all, itemNode: itemNode)
let panRecognizer = InteractiveTransitionGestureRecognizer(target: self, action: #selector(self.panGesture(_:)), allowedDirections: { [weak self] _ in
guard let strongSelf = self, strongSelf.availableFilters.count > 1 else {
guard let strongSelf = self, strongSelf.availableFilters.count > 1 || strongSelf.controller?.isStoryPostingAvailable == true else {
return []
}
switch strongSelf.currentItemNode.visibleContentOffset() {
@ -1136,8 +1136,11 @@ public final class ChatListContainerNode: ASDisplayNode, UIGestureRecognizerDele
if !strongSelf.currentItemNode.isNavigationInAFinalState {
return []
}
let directions: InteractiveTransitionGestureRecognizerDirections = [.leftCenter, .rightCenter]
return directions
if strongSelf.availableFilters.count > 1 {
return [.leftCenter, .rightCenter]
} else {
return [.rightEdge]
}
}, edgeWidth: .widthMultiplier(factor: 1.0 / 6.0, min: 22.0, max: 80.0))
panRecognizer.delegate = self
panRecognizer.delaysTouchesBegan = false

View File

@ -300,8 +300,7 @@ final class ContactsControllerNode: ASDisplayNode, UIGestureRecognizerDelegate {
super.didLoad()
let panRecognizer = InteractiveTransitionGestureRecognizer(target: self, action: #selector(self.panGesture(_:)), allowedDirections: { _ in
let directions: InteractiveTransitionGestureRecognizerDirections = [.rightCenter, .rightEdge]
return directions
return [.rightCenter, .rightEdge]
}, edgeWidth: .widthMultiplier(factor: 1.0 / 6.0, min: 22.0, max: 80.0))
panRecognizer.delegate = self
panRecognizer.delaysTouchesBegan = false

View File

@ -256,24 +256,41 @@ final class MediaPickerGridItemNode: GridItemNode {
self.backgroundColor = theme.list.mediaPlaceholderColor
if self.currentDraftState == nil || self.currentDraftState?.0.path != draft.path || self.currentDraftState!.1 != index || self.currentState != nil {
self.currentState = nil
let imageSignal: Signal<UIImage?, NoError> = .single(draft.thumbnail)
self.imageNode.setSignal(imageSignal)
self.currentDraftState = (draft, index)
if self.currentState != nil {
self.currentState = nil
self.typeIconNode.removeFromSupernode()
}
if self.draftNode.supernode == nil {
self.draftNode.attributedText = NSAttributedString(string: "Draft", font: Font.semibold(12.0), textColor: .white)
self.addSubnode(self.draftNode)
}
if self.typeIconNode.supernode != nil {
self.typeIconNode.removeFromSupernode()
}
if self.durationNode.supernode != nil {
self.durationNode.removeFromSupernode()
if draft.isVideo {
self.typeIconNode.image = UIImage(bundleImageName: "Media Editor/MediaVideo")
self.durationNode.attributedText = NSAttributedString(string: stringForDuration(Int32(draft.duration ?? 0.0)), font: Font.semibold(12.0), textColor: .white)
if self.typeIconNode.supernode == nil {
self.addSubnode(self.gradientNode)
self.addSubnode(self.typeIconNode)
self.addSubnode(self.durationNode)
self.setNeedsLayout()
}
} else {
if self.typeIconNode.supernode != nil {
self.typeIconNode.removeFromSupernode()
}
if self.durationNode.supernode != nil {
self.durationNode.removeFromSupernode()
}
if self.gradientNode.supernode != nil {
self.gradientNode.removeFromSupernode()
}
}
self.setNeedsLayout()
@ -334,8 +351,6 @@ final class MediaPickerGridItemNode: GridItemNode {
}
if self.currentState == nil || self.currentState!.0 !== fetchResult || self.currentState!.1 != index || self.currentDraftState != nil {
self.currentDraftState = nil
self.backgroundNode.image = nil
let editingContext = interaction.editingState
let asset = fetchResult.object(at: index)
@ -434,6 +449,10 @@ final class MediaPickerGridItemNode: GridItemNode {
strongSelf.updateHasSpoiler(hasSpoiler)
}))
if self.currentDraftState != nil {
self.currentDraftState = nil
}
if asset.isFavorite {
self.typeIconNode.image = generateTintedImage(image: UIImage(bundleImageName: "Media Grid/Favorite"), color: .white)
if self.typeIconNode.supernode == nil {
@ -450,9 +469,9 @@ final class MediaPickerGridItemNode: GridItemNode {
self.typeIconNode.image = UIImage(bundleImageName: "Media Editor/MediaVideo")
}
self.durationNode.attributedText = NSAttributedString(string: stringForDuration(Int32(asset.duration)), font: Font.semibold(12.0), textColor: .white)
if self.typeIconNode.supernode == nil {
self.durationNode.attributedText = NSAttributedString(string: stringForDuration(Int32(asset.duration)), font: Font.semibold(12.0), textColor: .white)
self.addSubnode(self.gradientNode)
self.addSubnode(self.typeIconNode)
self.addSubnode(self.durationNode)

View File

@ -221,7 +221,7 @@ public extension TelegramEngine {
replyTo replyToMessageId: EngineMessage.Id?,
storyId: StoryId? = nil,
content: EngineOutgoingMessageContent
) {
) -> Signal<[MessageId?], NoError> {
var attributes: [MessageAttribute] = []
var text: String = ""
var mediaReference: AnyMediaReference?
@ -246,11 +246,11 @@ public extension TelegramEngine {
correlationId: nil,
bubbleUpEmojiOrStickersets: []
)
let _ = enqueueMessages(
return enqueueMessages(
account: self.account,
peerId: peerId,
messages: [message]
).start()
)
}
public func enqueueOutgoingMessageWithChatContextResult(to peerId: PeerId, threadId: Int64?, botId: PeerId, result: ChatContextResult, replyToMessageId: MessageId? = nil, replyToStoryId: StoryId? = nil, hideVia: Bool = false, silentPosting: Bool = false, scheduleTime: Int32? = nil, correlationId: Int64? = nil) -> Bool {

View File

@ -451,7 +451,7 @@ private final class CameraScreenComponent: CombinedComponent {
.disappear(.default(scale: true))
)
if #available(iOS 13.0, *), !isTablet {
if !isTablet && Camera.isDualCamSupported {
let dualButton = dualButton.update(
component: CameraButton(
content: AnyComponentWithIdentity(

View File

@ -116,6 +116,18 @@ public final class MediaEditor {
}
private let playerPlaybackStatePromise = Promise<(Double, Double, Bool, Bool)>((0.0, 0.0, false, false))
public var duration: Double? {
if let _ = self.player {
if let trimRange = self.values.videoTrimRange {
return trimRange.upperBound - trimRange.lowerBound
} else {
return min(60.0, self.playerPlaybackState.0)
}
} else {
return nil
}
}
public var onFirstDisplay: () -> Void = {}
public func playerState(framesCount: Int) -> Signal<MediaEditorPlayerState?, NoError> {
@ -293,16 +305,51 @@ public final class MediaEditor {
let colors = mediaEditorGetGradientColors(from: image)
textureSource = .single((ImageTextureSource(image: image, renderTarget: renderTarget), image, nil, colors.0, colors.1))
case let .draft(draft):
guard let image = UIImage(contentsOfFile: draft.fullPath()) else {
return
}
let colors: (UIColor, UIColor)
if let gradientColors = draft.values.gradientColors {
colors = (gradientColors.first!, gradientColors.last!)
if draft.isVideo {
textureSource = Signal { subscriber in
let url = URL(fileURLWithPath: draft.fullPath())
let asset = AVURLAsset(url: url)
let playerItem = AVPlayerItem(asset: asset)
let player = AVPlayer(playerItem: playerItem)
player.automaticallyWaitsToMinimizeStalling = false
if let gradientColors = draft.values.gradientColors {
let colors = (gradientColors.first!, gradientColors.last!)
subscriber.putNext((VideoTextureSource(player: player, renderTarget: renderTarget), nil, player, colors.0, colors.1))
subscriber.putCompletion()
return EmptyDisposable
} else {
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
imageGenerator.maximumSize = CGSize(width: 72, height: 128)
imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: 0, preferredTimescale: CMTimeScale(30.0)))]) { _, image, _, _, _ in
if let image {
let colors = mediaEditorGetGradientColors(from: UIImage(cgImage: image))
subscriber.putNext((VideoTextureSource(player: player, renderTarget: renderTarget), nil, player, colors.0, colors.1))
} else {
subscriber.putNext((VideoTextureSource(player: player, renderTarget: renderTarget), nil, player, .black, .black))
}
subscriber.putCompletion()
}
return ActionDisposable {
imageGenerator.cancelAllCGImageGeneration()
}
}
}
} else {
colors = mediaEditorGetGradientColors(from: image)
guard let image = UIImage(contentsOfFile: draft.fullPath()) else {
return
}
let colors: (UIColor, UIColor)
if let gradientColors = draft.values.gradientColors {
colors = (gradientColors.first!, gradientColors.last!)
} else {
colors = mediaEditorGetGradientColors(from: image)
}
textureSource = .single((ImageTextureSource(image: image, renderTarget: renderTarget), image, nil, colors.0, colors.1))
}
textureSource = .single((ImageTextureSource(image: image, renderTarget: renderTarget), image, nil, colors.0, colors.1))
case let .video(path, transitionImage, _):
textureSource = Signal { subscriber in
let url = URL(fileURLWithPath: path)

View File

@ -41,9 +41,9 @@ public struct MediaEditorResultPrivacy: Codable, Equatable {
public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(privacy, forKey: .privacy)
try container.encode(Int32(timeout), forKey: .timeout)
try container.encode(archive, forKey: .archive)
try container.encode(self.privacy, forKey: .privacy)
try container.encode(Int32(self.timeout), forKey: .timeout)
try container.encode(self.archive, forKey: .archive)
}
}
@ -58,6 +58,7 @@ public final class MediaEditorDraft: Codable, Equatable {
case thumbnail
case dimensionsWidth
case dimensionsHeight
case duration
case values
case caption
case privacy
@ -67,15 +68,17 @@ public final class MediaEditorDraft: Codable, Equatable {
public let isVideo: Bool
public let thumbnail: UIImage
public let dimensions: PixelDimensions
public let duration: Double?
public let values: MediaEditorValues
public let caption: NSAttributedString
public let privacy: MediaEditorResultPrivacy?
public init(path: String, isVideo: Bool, thumbnail: UIImage, dimensions: PixelDimensions, values: MediaEditorValues, caption: NSAttributedString, privacy: MediaEditorResultPrivacy?) {
public init(path: String, isVideo: Bool, thumbnail: UIImage, dimensions: PixelDimensions, duration: Double?, values: MediaEditorValues, caption: NSAttributedString, privacy: MediaEditorResultPrivacy?) {
self.path = path
self.isVideo = isVideo
self.thumbnail = thumbnail
self.dimensions = dimensions
self.duration = duration
self.values = values
self.caption = caption
self.privacy = privacy
@ -96,6 +99,7 @@ public final class MediaEditorDraft: Codable, Equatable {
width: try container.decode(Int32.self, forKey: .dimensionsWidth),
height: try container.decode(Int32.self, forKey: .dimensionsHeight)
)
self.duration = try container.decodeIfPresent(Double.self, forKey: .duration)
let valuesData = try container.decode(Data.self, forKey: .values)
if let values = try? JSONDecoder().decode(MediaEditorValues.self, from: valuesData) {
self.values = values
@ -103,8 +107,12 @@ public final class MediaEditorDraft: Codable, Equatable {
fatalError()
}
self.caption = ((try? container.decode(ChatTextInputStateText.self, forKey: .caption)) ?? ChatTextInputStateText()).attributedText()
self.privacy = nil
//self.privacy = try container.decode(MediaEditorResultPrivacy.self, forKey: .values)
if let data = try container.decodeIfPresent(Data.self, forKey: .privacy), let privacy = try? JSONDecoder().decode(MediaEditorResultPrivacy.self, from: data) {
self.privacy = privacy
} else {
self.privacy = nil
}
}
public func encode(to encoder: Encoder) throws {
@ -112,20 +120,27 @@ public final class MediaEditorDraft: Codable, Equatable {
try container.encode(self.path, forKey: .path)
try container.encode(self.isVideo, forKey: .isVideo)
if let thumbnailData = self.thumbnail.jpegData(compressionQuality: 0.8) {
if let thumbnailData = self.thumbnail.jpegData(compressionQuality: 0.6) {
try container.encode(thumbnailData, forKey: .thumbnail)
}
try container.encode(self.dimensions.width, forKey: .dimensionsWidth)
try container.encode(self.dimensions.height, forKey: .dimensionsHeight)
try container.encodeIfPresent(self.duration, forKey: .duration)
if let valuesData = try? JSONEncoder().encode(self.values) {
try container.encode(valuesData, forKey: .values)
} else {
fatalError()
}
let chatInputText = ChatTextInputStateText(attributedText: self.caption)
try container.encode(chatInputText, forKey: .caption)
//try container.encode(self.privacy, forKey: .privacy)
if let privacy = self .privacy {
if let data = try? JSONEncoder().encode(privacy) {
try container.encode(data, forKey: .privacy)
} else {
try container.encodeNil(forKey: .privacy)
}
} else {
try container.encodeNil(forKey: .privacy)
}
}
}

View File

@ -603,10 +603,13 @@ final class MediaEditorScreenComponent: Component {
if let controller = environment.controller() as? MediaEditorScreen {
isEditingStory = controller.isEditingStory
if self.component == nil {
self.inputPanelExternalState.initialText = controller.initialCaption
if let initialCaption = controller.initialCaption {
self.inputPanelExternalState.initialText = initialCaption
} else if case let .draft(draft, _) = controller.node.subject {
self.inputPanelExternalState.initialText = draft.caption
}
}
}
self.component = component
self.state = state
@ -1517,7 +1520,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
var state = State() {
didSet {
self.node.requestUpdate()
if self.isNodeLoaded {
self.node.requestUpdate()
}
}
}
@ -1714,6 +1719,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
return
}
if case let .draft(draft, _) = subject, let privacy = draft.privacy {
controller.state.privacy = privacy
}
let isSavingAvailable: Bool
switch subject {
case .image, .video:
@ -3110,7 +3119,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
let codableEntities = DrawingEntitiesView.encodeEntities(entities, entitiesView: self.node.entitiesView)
mediaEditor.setDrawingAndEntities(data: nil, image: mediaEditor.values.drawing, entities: codableEntities)
if let subject = self.node.subject, case .asset = subject, self.node.mediaEditor?.values.hasChanges == false {
let caption = self.getCaption()
if let subject = self.node.subject, case .asset = subject, self.node.mediaEditor?.values.hasChanges == false && caption.string.isEmpty {
return false
}
return true
@ -3189,16 +3200,18 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}
private func saveDraft(id: Int64?) {
guard let subject = self.node.subject, let values = self.node.mediaEditor?.values else {
guard let subject = self.node.subject, let mediaEditor = self.node.mediaEditor else {
return
}
try? FileManager.default.createDirectory(atPath: draftPath(), withIntermediateDirectories: true)
let values = mediaEditor.values
let privacy = self.state.privacy
let caption = self.getCaption()
let duration = mediaEditor.duration ?? 0.0
if let resultImage = self.node.mediaEditor?.resultImage {
self.node.mediaEditor?.seek(0.0, andPlay: false)
if let resultImage = mediaEditor.resultImage {
mediaEditor.seek(0.0, andPlay: false)
makeEditorImageComposition(account: self.context.account, inputImage: resultImage, dimensions: storyDimensions, values: values, time: .zero, completion: { resultImage in
guard let resultImage else {
return
@ -3209,7 +3222,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
if let thumbnailImage = generateScaledImage(image: resultImage, size: fittedSize) {
let path = "\(Int64.random(in: .min ... .max)).jpg"
if let data = image.jpegData(compressionQuality: 0.87) {
let draft = MediaEditorDraft(path: path, isVideo: false, thumbnail: thumbnailImage, dimensions: dimensions, values: values, caption: caption, privacy: privacy)
let draft = MediaEditorDraft(path: path, isVideo: false, thumbnail: thumbnailImage, dimensions: dimensions, duration: nil, values: values, caption: caption, privacy: privacy)
try? data.write(to: URL(fileURLWithPath: draft.fullPath()))
if let id {
saveStorySource(engine: self.context.engine, item: draft, id: id)
@ -3220,10 +3233,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}
}
let saveVideoDraft: (String, PixelDimensions) -> Void = { videoPath, dimensions in
let saveVideoDraft: (String, PixelDimensions, Double) -> Void = { videoPath, dimensions, duration in
if let thumbnailImage = generateScaledImage(image: resultImage, size: fittedSize) {
let path = "\(Int64.random(in: .min ... .max)).mp4"
let draft = MediaEditorDraft(path: path, isVideo: true, thumbnail: thumbnailImage, dimensions: dimensions, values: values, caption: caption, privacy: privacy)
let draft = MediaEditorDraft(path: path, isVideo: true, thumbnail: thumbnailImage, dimensions: dimensions, duration: duration, values: values, caption: caption, privacy: privacy)
try? FileManager.default.moveItem(atPath: videoPath, toPath: draft.fullPath())
if let id {
saveStorySource(engine: self.context.engine, item: draft, id: id)
@ -3237,12 +3250,12 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
case let .image(image, dimensions, _, _):
saveImageDraft(image, dimensions)
case let .video(path, _, _, _, dimensions, _):
saveVideoDraft(path, dimensions)
saveVideoDraft(path, dimensions, duration)
case let .asset(asset):
if asset.mediaType == .video {
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
if let urlAsset = avAsset as? AVURLAsset {
saveVideoDraft(urlAsset.url.absoluteString, PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight)))
saveVideoDraft(urlAsset.url.relativePath, PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight)), duration)
}
}
} else {
@ -3256,7 +3269,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}
case let .draft(draft, _):
if draft.isVideo {
saveVideoDraft(draft.fullPath(), draft.dimensions)
saveVideoDraft(draft.fullPath(), draft.dimensions, draft.duration ?? 0.0)
} else if let image = UIImage(contentsOfFile: draft.fullPath()) {
saveImageDraft(image, draft.dimensions)
}

View File

@ -2350,7 +2350,7 @@ public final class StoryItemSetContainerComponent: Component {
})
}
private func navigateToPeer(peer: EnginePeer, messageId: EngineMessage.Id? = nil) {
func navigateToPeer(peer: EnginePeer, messageId: EngineMessage.Id? = nil) {
guard let component = self.component else {
return
}

View File

@ -321,29 +321,37 @@ final class StoryItemSetContainerSendMessage {
return
}
let focusedStoryId = StoryId(peerId: peerId, id: focusedItem.storyItem.id)
let peer = component.slice.peer
component.context.engine.messages.enqueueOutgoingMessage(
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
let controller = component.controller()
let _ = (component.context.engine.messages.enqueueOutgoingMessage(
to: peerId,
replyTo: nil,
storyId: focusedStoryId,
content: .file(fileReference)
)
) |> deliverOnMainQueue).start(next: { [weak controller, weak view] messageIds in
if let controller {
Queue.mainQueue().after(0.3) {
controller.present(UndoOverlayController(
presentationData: presentationData,
content: .actionSucceeded(title: "", text: "Message Sent", cancel: "View in Chat"),
elevatedLayout: false,
animateInAsReplacement: false,
action: { [weak view] action in
if case .undo = action, let messageId = messageIds.first {
view?.navigateToPeer(peer: peer, messageId: messageId)
}
return false
}
), in: .current)
}
}
})
self.currentInputMode = .text
view.endEditing(true)
Queue.mainQueue().after(0.66) {
if let controller = component.controller() {
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
controller.present(UndoOverlayController(
presentationData: presentationData,
content: .succeed(text: "Message Sent"),
elevatedLayout: false,
animateInAsReplacement: false,
action: { _ in return false }
), in: .current)
}
}
}
func performSendGifAction(view: StoryItemSetContainerComponent.View, fileReference: FileMediaReference) {
@ -355,29 +363,37 @@ final class StoryItemSetContainerSendMessage {
return
}
let focusedStoryId = StoryId(peerId: peerId, id: focusedItem.storyItem.id)
let peer = component.slice.peer
component.context.engine.messages.enqueueOutgoingMessage(
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
let controller = component.controller()
let _ = (component.context.engine.messages.enqueueOutgoingMessage(
to: peerId,
replyTo: nil,
storyId: focusedStoryId,
content: .file(fileReference)
)
) |> deliverOnMainQueue).start(next: { [weak controller, weak view] messageIds in
if let controller {
Queue.mainQueue().after(0.3) {
controller.present(UndoOverlayController(
presentationData: presentationData,
content: .actionSucceeded(title: "", text: "Message Sent", cancel: "View in Chat"),
elevatedLayout: false,
animateInAsReplacement: false,
action: { [weak view] action in
if case .undo = action, let messageId = messageIds.first {
view?.navigateToPeer(peer: peer, messageId: messageId)
}
return false
}
), in: .current)
}
}
})
self.currentInputMode = .text
view.endEditing(true)
Queue.mainQueue().after(0.66) {
if let controller = component.controller() {
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
controller.present(UndoOverlayController(
presentationData: presentationData,
content: .succeed(text: "Message Sent"),
elevatedLayout: false,
animateInAsReplacement: false,
action: { _ in return false }
), in: .current)
}
}
}
func performSendMessageAction(
@ -394,6 +410,10 @@ final class StoryItemSetContainerSendMessage {
guard let inputPanelView = view.inputPanel.view as? MessageInputPanelComponent.View else {
return
}
let peer = component.slice.peer
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
let controller = component.controller()
if let recordedAudioPreview = self.recordedAudioPreview {
self.recordedAudioPreview = nil
@ -410,28 +430,32 @@ final class StoryItemSetContainerSendMessage {
case let .text(text):
if !text.string.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty {
let entities = generateChatInputTextEntities(text)
component.context.engine.messages.enqueueOutgoingMessage(
let _ = (component.context.engine.messages.enqueueOutgoingMessage(
to: peerId,
replyTo: nil,
storyId: focusedStoryId,
content: .text(text.string, entities)
)
) |> deliverOnMainQueue).start(next: { [weak controller, weak view] messageIds in
if let controller {
Queue.mainQueue().after(0.3) {
controller.present(UndoOverlayController(
presentationData: presentationData,
content: .actionSucceeded(title: "", text: "Message Sent", cancel: "View in Chat"),
elevatedLayout: false,
animateInAsReplacement: false,
action: { [weak view] action in
if case .undo = action, let messageId = messageIds.first {
view?.navigateToPeer(peer: peer, messageId: messageId)
}
return false
}
), in: .current)
}
}
})
inputPanelView.clearSendMessageInput()
self.currentInputMode = .text
view.endEditing(true)
Queue.mainQueue().after(0.66) {
if let controller = component.controller() {
let presentationData = component.context.sharedContext.currentPresentationData.with { $0 }
controller.present(UndoOverlayController(
presentationData: presentationData,
content: .succeed(text: "Message Sent"),
elevatedLayout: false,
animateInAsReplacement: false,
action: { _ in return false }
), in: .current)
}
}
}
}
}

View File

@ -613,6 +613,8 @@ public final class TextFieldComponent: Component {
}
}
self.updateEntities()
return size
}
}