Various fixes

This commit is contained in:
Ilya Laktyushin 2023-12-18 17:14:00 +04:00
parent 758016c638
commit 595dfd18c5
11 changed files with 258 additions and 119 deletions

View File

@ -10761,3 +10761,5 @@ Sorry for the inconvenience.";
"Story.Views.Commented" = " • commented";
"Share.RepostToStory" = "Repost\nto Story";
"Conversation.ReadMore" = "Read More";

View File

@ -837,12 +837,10 @@ public final class DrawingEntitiesView: UIView, TGPhotoDrawingEntitiesView {
selectionView.handlePan(gestureRecognizer)
}
}
}
else if self.autoSelectEntities, gestureRecognizer.numberOfTouches == 1, let viewToSelect = self.entity(at: location) {
} else if self.autoSelectEntities, gestureRecognizer.numberOfTouches == 1, let viewToSelect = self.entity(at: location) {
self.selectEntity(viewToSelect.entity, animate: false)
self.onInteractionUpdated(true)
}
else if gestureRecognizer.numberOfTouches == 2, let mediaEntityView = self.subviews.first(where: { $0 is DrawingEntityMediaView }) as? DrawingEntityMediaView {
} else if gestureRecognizer.numberOfTouches == 2, let mediaEntityView = self.subviews.first(where: { $0 is DrawingEntityMediaView }) as? DrawingEntityMediaView {
mediaEntityView.handlePan(gestureRecognizer)
}
}
@ -963,7 +961,7 @@ public class DrawingEntityView: UIView {
}
selectionView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2, delay: 0.1)
selectionView.layer.animateScale(from: 0.87, to: 1.0, duration: 0.2, delay: 0.1)
selectionView.layer.animateScale(from: 0.88, to: 1.0, duration: 0.23, delay: 0.1)
let values = [self.entity.scale, self.entity.scale * 0.88, self.entity.scale]
let keyTimes = [0.0, 0.33, 1.0]

View File

@ -2486,10 +2486,10 @@ private final class PremiumIntroScreenComponent: CombinedComponent {
self.updateInProgress(false)
self.updated(transition: .immediate)
if case let .waitForExpiration(period) = error {
if case let .waitForExpiration(date) = error {
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
let dateText = stringForMediumDate(timestamp: Int32(CFAbsoluteTimeGetCurrent() + NSTimeIntervalSince1970) + period, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat)
let dateText = stringForMediumDate(timestamp: date, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat)
self.present(UndoOverlayController(presentationData: presentationData, content: .info(title: presentationData.strings.Premium_Gift_ApplyLink_AlreadyHasPremium_Title, text: presentationData.strings.Premium_Gift_ApplyLink_AlreadyHasPremium_Text(dateText).string, timeout: nil, customUndoText: nil), elevatedLayout: true, position: .bottom, action: { _ in return true }))
}
}, completed: { [weak self] in

View File

@ -197,7 +197,7 @@ public enum ApplyPremiumGiftCodeError {
func _internal_applyPremiumGiftCode(account: Account, slug: String) -> Signal<Never, ApplyPremiumGiftCodeError> {
return account.network.request(Api.functions.payments.applyGiftCode(slug: slug))
|> mapError { error -> ApplyPremiumGiftCodeError in
if error.errorDescription.hasPrefix("FLOOD_WAIT_") {
if error.errorDescription.hasPrefix("PREMIUM_SUB_ACTIVE_UNTIL_") {
if let range = error.errorDescription.range(of: "_", options: .backwards) {
if let value = Int32(error.errorDescription[range.upperBound...]) {
return .waitForExpiration(value)

View File

@ -86,6 +86,7 @@ public class ChatMessageTextBubbleContentNode: ChatMessageBubbleContentNode {
private let textNode: TextNodeWithEntities
private var spoilerTextNode: TextNodeWithEntities?
private var dustNode: InvisibleInkDustNode?
private var moreNode: TextNode?
private let textAccessibilityOverlayNode: TextAccessibilityOverlayNode
public var statusNode: ChatMessageDateAndStatusNode?
@ -166,6 +167,7 @@ public class ChatMessageTextBubbleContentNode: ChatMessageBubbleContentNode {
let textLayout = TextNodeWithEntities.asyncLayout(self.textNode)
let spoilerTextLayout = TextNodeWithEntities.asyncLayout(self.spoilerTextNode)
let statusLayout = ChatMessageDateAndStatusNode.asyncLayout(self.statusNode)
let moreLayout = TextNode.asyncLayout(self.moreNode)
let currentCachedChatMessageText = self.cachedChatMessageText
@ -504,21 +506,29 @@ public class ChatMessageTextBubbleContentNode: ChatMessageBubbleContentNode {
attributedText = updatedString
}
let cutout: TextNodeCutout? = nil
let hideAllAdditionalInfo = item.presentationData.isPreview
var moreLayoutAndApply: (TextNodeLayout, () -> TextNode)?
var cutout: TextNodeCutout? = nil
if item.presentationData.isPreview {
moreLayoutAndApply = moreLayout(TextNodeLayoutArguments(attributedString: NSAttributedString(string: item.presentationData.strings.Conversation_ReadMore, font: textFont, textColor: messageTheme.accentTextColor), maximumNumberOfLines: 1, truncationType: .end, constrainedSize: textConstrainedSize))
cutout = TextNodeCutout(bottomRight: moreLayoutAndApply?.0.size)
}
let textInsets = UIEdgeInsets(top: 2.0, left: 2.0, bottom: 5.0, right: 2.0)
let hideAllAdditionalInfo = item.presentationData.isPreview
let (textLayout, textApply) = textLayout(TextNodeLayoutArguments(attributedString: attributedText, backgroundColor: nil, maximumNumberOfLines: hideAllAdditionalInfo ? 10 : 0, truncationType: .end, constrainedSize: textConstrainedSize, alignment: .natural, cutout: cutout, insets: textInsets, lineColor: messageTheme.accentControlColor))
let (textLayout, textApply) = textLayout(TextNodeLayoutArguments(attributedString: attributedText, backgroundColor: nil, maximumNumberOfLines: hideAllAdditionalInfo ? 12 : 0, truncationType: .end, constrainedSize: textConstrainedSize, alignment: .natural, cutout: cutout, insets: textInsets, lineColor: messageTheme.accentControlColor))
if !textLayout.truncated {
moreLayoutAndApply = nil
}
let spoilerTextLayoutAndApply: (TextNodeLayout, (TextNodeWithEntities.Arguments?) -> TextNodeWithEntities)?
if !textLayout.spoilers.isEmpty {
spoilerTextLayoutAndApply = spoilerTextLayout(TextNodeLayoutArguments(attributedString: attributedText, backgroundColor: nil, maximumNumberOfLines: hideAllAdditionalInfo ? 10 : 0, truncationType: .end, constrainedSize: textConstrainedSize, alignment: .natural, cutout: cutout, insets: textInsets, lineColor: messageTheme.accentControlColor, displaySpoilers: true, displayEmbeddedItemsUnderSpoilers: true))
spoilerTextLayoutAndApply = spoilerTextLayout(TextNodeLayoutArguments(attributedString: attributedText, backgroundColor: nil, maximumNumberOfLines: hideAllAdditionalInfo ? 12 : 0, truncationType: .end, constrainedSize: textConstrainedSize, alignment: .natural, cutout: cutout, insets: textInsets, lineColor: messageTheme.accentControlColor, displaySpoilers: true, displayEmbeddedItemsUnderSpoilers: true))
} else {
spoilerTextLayoutAndApply = nil
}
var statusSuggestedWidthAndContinue: (CGFloat, (CGFloat) -> (CGSize, (ListViewItemUpdateAnimation) -> ChatMessageDateAndStatusNode))?
if let statusType = statusType {
var isReplyThread = false
@ -564,6 +574,14 @@ public class ChatMessageTextBubbleContentNode: ChatMessageBubbleContentNode {
textFrame = textFrame.offsetBy(dx: layoutConstants.text.bubbleInsets.left, dy: topInset)
textFrameWithoutInsets = textFrameWithoutInsets.offsetBy(dx: layoutConstants.text.bubbleInsets.left, dy: topInset)
var readMoreFrame: CGRect = .zero
if let (readMoreLayout, _ ) = moreLayoutAndApply {
let remainingLineWidth = textLayout.size.width - textLayout.trailingLineWidth
if readMoreLayout.size.width < remainingLineWidth {
readMoreFrame = CGRect(origin: CGPoint(x: textFrame.maxX - readMoreLayout.size.width - textInsets.right, y: textFrame.maxY - readMoreLayout.size.height - textInsets.bottom), size: readMoreLayout.size)
}
}
var suggestedBoundingWidth: CGFloat = textFrameWithoutInsets.width
if let statusSuggestedWidthAndContinue = statusSuggestedWidthAndContinue {
suggestedBoundingWidth = max(suggestedBoundingWidth, statusSuggestedWidthAndContinue.0)
@ -654,6 +672,19 @@ public class ChatMessageTextBubbleContentNode: ChatMessageBubbleContentNode {
}
}
if let (_, moreApply) = moreLayoutAndApply {
let moreNode = moreApply()
if strongSelf.moreNode == nil {
moreNode.displaysAsynchronously = false
strongSelf.moreNode = moreNode
strongSelf.containerNode.insertSubnode(moreNode, aboveSubnode: strongSelf.textNode.textNode)
}
moreNode.frame = readMoreFrame
} else if let moreNode = strongSelf.moreNode {
strongSelf.moreNode = nil
moreNode.removeFromSupernode()
}
switch strongSelf.visibility {
case .none:
strongSelf.textNode.visibilityRect = nil

View File

@ -636,20 +636,9 @@ public final class MediaEditor {
}
}
case let .message(messageId):
let context = self.context
textureSource = self.context.account.postbox.transaction { transaction -> TelegramWallpaper? in
return (transaction.getPeerCachedData(peerId: messageId.peerId) as? CachedChannelData)?.wallpaper
}
|> mapToSignal { customWallpaper -> Signal<(UIImage?, UIImage?, AVPlayer?, AVPlayer?, GradientColors), NoError> in
return Signal { subscriber in
Queue.mainQueue().async {
let wallpaperRenderer = DrawingWallpaperRenderer(context: context, customWallpaper: customWallpaper)
wallpaperRenderer.render { size, image, darkImage in
subscriber.putNext((image, darkImage, nil, nil, GradientColors(top: .black, bottom: .black)))
}
}
return EmptyDisposable
}
textureSource = getChatWallpaperImage(context: self.context, messageId: messageId)
|> map { _, image, nightImage in
return (image, nightImage, nil, nil, GradientColors(top: .black, bottom: .black))
}
}

View File

@ -2,6 +2,8 @@ import Foundation
import UIKit
import AVFoundation
import SwiftSignalKit
import TelegramCore
import AccountContext
extension AVPlayer {
func fadeVolume(from: Float, to: Float, duration: Float, completion: (() -> Void)? = nil) -> SwiftSignalKit.Timer? {
@ -129,3 +131,21 @@ func getTextureImage(device: MTLDevice, texture: MTLTexture, mirror: Bool = fals
}
return UIImage(cgImage: cgImage)
}
public func getChatWallpaperImage(context: AccountContext, messageId: EngineMessage.Id) -> Signal<(CGSize, UIImage?, UIImage?), NoError> {
return context.account.postbox.transaction { transaction -> TelegramWallpaper? in
return (transaction.getPeerCachedData(peerId: messageId.peerId) as? CachedChannelData)?.wallpaper
}
|> mapToSignal { customWallpaper -> Signal<(CGSize, UIImage?, UIImage?), NoError> in
return Signal { subscriber in
Queue.mainQueue().async {
let wallpaperRenderer = DrawingWallpaperRenderer(context: context, customWallpaper: customWallpaper)
wallpaperRenderer.render { size, image, darkImage in
subscriber.putNext((size, image, darkImage))
subscriber.putCompletion()
}
}
return EmptyDisposable
}
}
}

View File

@ -712,6 +712,10 @@ public final class MediaEditorValues: Codable, Equatable {
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: nightTheme, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
}
public func withUpdatedEntities(_ entities: [CodableDrawingEntity]) -> MediaEditorValues {
return MediaEditorValues(peerId: self.peerId, originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropRect: self.cropRect, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, cropOrientation: self.cropOrientation, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, videoVolume: self.videoVolume, additionalVideoPath: self.additionalVideoPath, additionalVideoIsDual: self.additionalVideoIsDual, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, additionalVideoTrimRange: self.additionalVideoTrimRange, additionalVideoOffset: self.additionalVideoOffset, additionalVideoVolume: self.additionalVideoVolume, nightTheme: self.nightTheme, drawing: self.drawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackOffset: self.audioTrackOffset, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples, qualityPreset: self.qualityPreset)
}
public var resultDimensions: PixelDimensions {
if self.videoIsFullHd {
return PixelDimensions(width: 1080, height: 1920)

View File

@ -21,10 +21,25 @@ extension MediaEditorScreen {
let codableEntities = DrawingEntitiesView.encodeEntities(entities, entitiesView: self.node.entitiesView)
mediaEditor.setDrawingAndEntities(data: nil, image: mediaEditor.values.drawing, entities: codableEntities)
let caption = self.getCaption()
let filteredEntities = self.node.entitiesView.entities.filter { entity in
if entity is DrawingMediaEntity {
return false
} else if let entity = entity as? DrawingStickerEntity, case .message = entity.content {
return false
}
return true
}
if let subject = self.node.subject, case .asset = subject, self.node.mediaEditor?.values.hasChanges == false && caption.string.isEmpty {
return false
let values = mediaEditor.values
let filteredValues = values.withUpdatedEntities([])
let caption = self.getCaption()
if let subject = self.node.subject {
if case .asset = subject, !values.hasChanges && caption.string.isEmpty {
return false
} else if case .message = subject, !filteredValues.hasChanges && filteredEntities.isEmpty && caption.string.isEmpty {
return false
}
}
return true
}

View File

@ -1789,10 +1789,12 @@ final class MediaEditorScreenComponent: Component {
})
}
mediaEditor.toggleNightTheme()
controller.node.entitiesView.eachView { view in
if let stickerEntityView = view as? DrawingStickerEntityView {
stickerEntityView.toggleNightTheme()
Queue.mainQueue().after(0.1) {
mediaEditor.toggleNightTheme()
controller.node.entitiesView.eachView { view in
if let stickerEntityView = view as? DrawingStickerEntityView {
stickerEntityView.toggleNightTheme()
}
}
}
}
@ -2077,11 +2079,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
private var isDismissed = false
private var isDismissBySwipeSuppressed = false
fileprivate var hasAnyChanges = false
private (set) var hasAnyChanges = false
private var playbackPositionDisposable: Disposable?
var recording: MediaEditorScreen.Recording
private var presentationData: PresentationData
@ -2195,6 +2196,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
stickerItems
) |> map { emoji, stickers -> StickerPickerInputData in
return StickerPickerInputData(emoji: emoji, stickers: stickers, gifs: nil)
} |> afterNext { [weak self] _ in
if let self {
self.controller?.checkPostingAvailability()
}
}
stickerPickerInputData.set(signal)
@ -2269,6 +2274,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
return
}
Queue.mainQueue().justDispatch {
controller.setupAudioSessionIfNeeded()
}
if case let .draft(draft, _) = subject, let privacy = draft.privacy {
controller.state.privacy = privacy
}
@ -2770,14 +2779,23 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}
@objc func handlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
if gestureRecognizer.numberOfTouches == 2, let subject = self.subject, case .message = subject, !self.entitiesView.hasSelection {
return
}
self.entitiesView.handlePan(gestureRecognizer)
}
@objc func handlePinch(_ gestureRecognizer: UIPinchGestureRecognizer) {
if gestureRecognizer.numberOfTouches == 2, let subject = self.subject, case .message = subject, !self.entitiesView.hasSelection {
return
}
self.entitiesView.handlePinch(gestureRecognizer)
}
@objc func handleRotate(_ gestureRecognizer: UIRotationGestureRecognizer) {
if gestureRecognizer.numberOfTouches == 2, let subject = self.subject, case .message = subject, !self.entitiesView.hasSelection {
return
}
self.entitiesView.handleRotate(gestureRecognizer)
}
@ -4307,10 +4325,6 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
updateStorySources(engine: self.context.engine)
updateStoryDrafts(engine: self.context.engine)
if let _ = forwardSource {
self.postingAvailabilityPromise.set(self.context.engine.messages.checkStoriesUploadAvailability(target: .myStories))
}
}
required public init(coder aDecoder: NSCoder) {
@ -4323,6 +4337,91 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
self.postingAvailabilityDisposable?.dispose()
}
fileprivate func setupAudioSessionIfNeeded() {
guard let subject = self.node.subject else {
return
}
var needsAudioSession = false
var checkPostingAvailability = false
if self.forwardSource != nil {
needsAudioSession = true
checkPostingAvailability = true
}
if self.isEditingStory {
needsAudioSession = true
}
if case .message = subject {
needsAudioSession = true
checkPostingAvailability = true
}
if needsAudioSession {
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { _ in
if #available(iOS 13.0, *) {
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
}
}, deactivate: { _ in
return .single(Void())
})
}
if checkPostingAvailability {
self.postingAvailabilityPromise.set(self.context.engine.messages.checkStoriesUploadAvailability(target: .myStories))
}
}
fileprivate func checkPostingAvailability() {
guard self.postingAvailabilityDisposable == nil else {
return
}
self.postingAvailabilityDisposable = (self.postingAvailabilityPromise.get()
|> deliverOnMainQueue).start(next: { [weak self] availability in
guard let self, availability != .available else {
return
}
let subject: PremiumLimitSubject
switch availability {
case .expiringLimit:
subject = .expiringStories
case .weeklyLimit:
subject = .storiesWeekly
case .monthlyLimit:
subject = .storiesMonthly
default:
subject = .expiringStories
}
let context = self.context
var replaceImpl: ((ViewController) -> Void)?
let controller = self.context.sharedContext.makePremiumLimitController(context: self.context, subject: subject, count: 10, forceDark: true, cancel: { [weak self] in
self?.requestDismiss(saveDraft: false, animated: true)
}, action: { [weak self] in
let controller = context.sharedContext.makePremiumIntroController(context: context, source: .stories, forceDark: true, dismissed: { [weak self] in
guard let self else {
return
}
let _ = (self.context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: self.context.account.peerId))
|> deliverOnMainQueue).start(next: { [weak self] peer in
guard let self else {
return
}
let isPremium = peer?.isPremium ?? false
if !isPremium {
self.requestDismiss(saveDraft: false, animated: true)
}
})
})
replaceImpl?(controller)
return true
})
replaceImpl = { [weak controller] c in
controller?.replace(with: c)
}
if let navigationController = self.context.sharedContext.mainWindow?.viewController as? NavigationController {
navigationController.pushViewController(controller)
}
})
}
override public func loadDisplayNode() {
self.displayNode = Node(controller: self)
@ -4334,65 +4433,6 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
Queue.mainQueue().after(0.4) {
self.adminedChannels.set(.single([]) |> then(self.context.engine.peers.channelsForStories()))
self.closeFriends.set(self.context.engine.data.get(TelegramEngine.EngineData.Item.Contacts.CloseFriends()))
if self.forwardSource != nil || self.isEditingStory {
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { _ in
if #available(iOS 13.0, *) {
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
}
}, deactivate: { _ in
return .single(Void())
})
}
self.postingAvailabilityDisposable = (self.postingAvailabilityPromise.get()
|> deliverOnMainQueue).start(next: { [weak self] availability in
guard let self, availability != .available else {
return
}
let subject: PremiumLimitSubject
switch availability {
case .expiringLimit:
subject = .expiringStories
case .weeklyLimit:
subject = .storiesWeekly
case .monthlyLimit:
subject = .storiesMonthly
default:
subject = .expiringStories
}
let context = self.context
var replaceImpl: ((ViewController) -> Void)?
let controller = self.context.sharedContext.makePremiumLimitController(context: self.context, subject: subject, count: 10, forceDark: true, cancel: { [weak self] in
self?.requestDismiss(saveDraft: false, animated: true)
}, action: { [weak self] in
let controller = context.sharedContext.makePremiumIntroController(context: context, source: .stories, forceDark: true, dismissed: { [weak self] in
guard let self else {
return
}
let _ = (self.context.engine.data.get(TelegramEngine.EngineData.Item.Peer.Peer(id: self.context.account.peerId))
|> deliverOnMainQueue).start(next: { [weak self] peer in
guard let self else {
return
}
let isPremium = peer?.isPremium ?? false
if !isPremium {
self.requestDismiss(saveDraft: false, animated: true)
}
})
})
replaceImpl?(controller)
return true
})
replaceImpl = { [weak controller] c in
controller?.replace(with: c)
}
if let navigationController = self.context.sharedContext.mainWindow?.viewController as? NavigationController {
navigationController.pushViewController(controller)
}
})
}
}
@ -5002,7 +5042,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
var firstFrame: Signal<(UIImage?, UIImage?), NoError>
let firstFrameTime = CMTime(seconds: mediaEditor.values.videoTrimRange?.lowerBound ?? 0.0, preferredTimescale: CMTimeScale(60))
let videoResult: MediaResult.VideoResult
let videoResult: Signal<MediaResult.VideoResult, NoError>
var videoIsMirrored = false
let duration: Double
switch subject {
@ -5011,13 +5051,13 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
if let data = image.jpegData(compressionQuality: 0.85) {
try? data.write(to: URL(fileURLWithPath: tempImagePath))
}
videoResult = .imageFile(path: tempImagePath)
videoResult = .single(.imageFile(path: tempImagePath))
duration = 5.0
firstFrame = .single((image, nil))
case let .video(path, _, mirror, additionalPath, _, _, durationValue, _, _):
videoIsMirrored = mirror
videoResult = .videoFile(path: path)
videoResult = .single(.videoFile(path: path))
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
@ -5059,7 +5099,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}
}
case let .asset(asset):
videoResult = .asset(localIdentifier: asset.localIdentifier)
videoResult = .single(.asset(localIdentifier: asset.localIdentifier))
if asset.mediaType == .video {
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
@ -5134,7 +5174,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
case let .draft(draft, _):
let draftPath = draft.fullPath(engine: context.engine)
if draft.isVideo {
videoResult = .videoFile(path: draftPath)
videoResult = .single(.videoFile(path: draftPath))
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
@ -5155,7 +5195,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}
}
} else {
videoResult = .imageFile(path: draftPath)
videoResult = .single(.imageFile(path: draftPath))
duration = 5.0
if let image = UIImage(contentsOfFile: draftPath) {
@ -5164,21 +5204,41 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
firstFrame = .single((UIImage(), nil))
}
}
case .message:
let image = generateSingleColorImage(size: CGSize(width: 1080, height: 1920), color: .black, scale: 1.0)!
let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).jpg"
if let data = image.jpegData(compressionQuality: 0.85) {
try? data.write(to: URL(fileURLWithPath: tempImagePath))
case let .message(messages):
let isNightTheme = mediaEditor.values.nightTheme
let wallpaper = getChatWallpaperImage(context: self.context, messageId: messages.first!)
|> map { _, image, nightImage -> UIImage? in
if isNightTheme {
return nightImage ?? image
} else {
return image
}
}
videoResult = .imageFile(path: tempImagePath)
firstFrame = .single((image, nil))
videoResult = wallpaper
|> mapToSignal { image in
if let image {
let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).jpg"
if let data = image.jpegData(compressionQuality: 0.85) {
try? data.write(to: URL(fileURLWithPath: tempImagePath))
}
return .single(.imageFile(path: tempImagePath))
} else {
return .complete()
}
}
firstFrame = wallpaper
|> map { image in
return (image, nil)
}
duration = 5.0
}
let _ = (firstFrame
|> deliverOnMainQueue).start(next: { [weak self] image, additionalImage in
let _ = combineLatest(queue: Queue.mainQueue(), firstFrame, videoResult)
.start(next: { [weak self] images, videoResult in
if let self {
let (image, additionalImage) = images
var currentImage = mediaEditor.resultImage
if let image {
mediaEditor.replaceSource(image, additionalImage: additionalImage, time: firstFrameTime, mirror: true)
@ -5337,8 +5397,17 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
fatalError()
}
}
case .message:
exportSubject = .single(.image(image: generateSingleColorImage(size: CGSize(width: 1080, height: 1920), color: .black, scale: 1.0)!))
case let .message(messages):
let isNightTheme = mediaEditor.values.nightTheme
exportSubject = getChatWallpaperImage(context: self.context, messageId: messages.first!)
|> mapToSignal { _, image, nightImage -> Signal<MediaEditorVideoExport.Subject, NoError> in
if isNightTheme {
let effectiveImage = nightImage ?? image
return effectiveImage.flatMap({ .single(.image(image: $0)) }) ?? .complete()
} else {
return image.flatMap({ .single(.image(image: $0)) }) ?? .complete()
}
}
}
let _ = exportSubject.start(next: { [weak self] exportSubject in

View File

@ -2432,7 +2432,7 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
guard let self else {
return
}
Queue.mainQueue().after(0.05) {
Queue.mainQueue().after(0.15) {
self.openStorySharing(messages: messages)
}
}
@ -18824,7 +18824,18 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
} else {
text = self.presentationData.strings.Story_MessageReposted_Personal
}
self.present(UndoOverlayController(presentationData: self.presentationData, content: .succeed(text: text, timeout: nil, customUndoText: nil), elevatedLayout: false, action: { _ in return false }), in: .current)
Queue.mainQueue().after(0.25) {
self.present(UndoOverlayController(
presentationData: self.presentationData,
content: .forward(savedMessages: false, text: text),
elevatedLayout: false,
action: { _ in return false }
), in: .current)
Queue.mainQueue().after(0.1) {
self.chatDisplayNode.hapticFeedback.success()
}
}
})
}