Various fixes

This commit is contained in:
Ilya Laktyushin 2025-04-25 18:13:30 +04:00
parent 1ed853e255
commit 9e0600edfa
16 changed files with 948 additions and 879 deletions

View File

@ -14290,3 +14290,6 @@ Sorry for the inconvenience.";
"MediaPicker.CreateStory_1" = "Create %@ Story"; "MediaPicker.CreateStory_1" = "Create %@ Story";
"MediaPicker.CreateStory_any" = "Create %@ Stories"; "MediaPicker.CreateStory_any" = "Create %@ Stories";
"MediaPicker.CombineIntoCollage" = "Combine into Collage"; "MediaPicker.CombineIntoCollage" = "Combine into Collage";
"Gift.Resale.Unavailable.Title" = "Resell Gift";
"Gift.Resale.Unavailable.Text" = "Sorry, you can't list this gift yet.\n\Reselling will be available on %@.";

View File

@ -2004,7 +2004,7 @@ public final class MediaPickerScreenImpl: ViewController, MediaPickerScreen, Att
var hasSelect = false var hasSelect = false
if forCollage { if forCollage {
hasSelect = true hasSelect = true
} else if case .story = mode { } else if case .story = mode, selectionContext.selectionLimit > 1 {
hasSelect = true hasSelect = true
} }
@ -3402,7 +3402,7 @@ public func stickerMediaPickerController(
destinationCornerRadius: 0.0 destinationCornerRadius: 0.0
) )
}, },
completion: { result, _, commit in completion: { result, _, _, commit in
completion(result, nil, .zero, nil, true, { _ in return nil }, { completion(result, nil, .zero, nil, true, { _ in return nil }, {
returnToCameraImpl?() returnToCameraImpl?()
}) })
@ -3520,7 +3520,7 @@ public func avatarMediaPickerController(
destinationCornerRadius: 0.0 destinationCornerRadius: 0.0
) )
}, },
completion: { result, _, commit in completion: { result, _, _, commit in
completion(result, nil, .zero, nil, true, { _ in return nil }, { completion(result, nil, .zero, nil, true, { _ in return nil }, {
returnToCameraImpl?() returnToCameraImpl?()
}) })

View File

@ -1959,6 +1959,7 @@ public class CameraScreenImpl: ViewController, CameraScreen {
} }
}, },
nil, nil,
1,
{} {}
) )
} else { } else {
@ -1995,6 +1996,7 @@ public class CameraScreenImpl: ViewController, CameraScreen {
} }
}, },
nil, nil,
self.controller?.remainingStoryCount,
{} {}
) )
} }
@ -3374,7 +3376,7 @@ public class CameraScreenImpl: ViewController, CameraScreen {
self.transitionOut = transitionOut self.transitionOut = transitionOut
} }
} }
fileprivate let completion: (Signal<CameraScreenImpl.Result, NoError>, ResultTransition?, @escaping () -> Void) -> Void fileprivate let completion: (Signal<CameraScreenImpl.Result, NoError>, ResultTransition?, Int32?, @escaping () -> Void) -> Void
public var transitionedIn: () -> Void = {} public var transitionedIn: () -> Void = {}
public var transitionedOut: () -> Void = {} public var transitionedOut: () -> Void = {}
@ -3382,6 +3384,7 @@ public class CameraScreenImpl: ViewController, CameraScreen {
private let postingAvailabilityPromise = Promise<StoriesUploadAvailability>() private let postingAvailabilityPromise = Promise<StoriesUploadAvailability>()
private var postingAvailabilityDisposable: Disposable? private var postingAvailabilityDisposable: Disposable?
private var remainingStoryCount: Int32?
private var codeDisposable: Disposable? private var codeDisposable: Disposable?
private var resolveCodeDisposable: Disposable? private var resolveCodeDisposable: Disposable?
@ -3419,7 +3422,7 @@ public class CameraScreenImpl: ViewController, CameraScreen {
holder: CameraHolder? = nil, holder: CameraHolder? = nil,
transitionIn: TransitionIn?, transitionIn: TransitionIn?,
transitionOut: @escaping (Bool) -> TransitionOut?, transitionOut: @escaping (Bool) -> TransitionOut?,
completion: @escaping (Signal<CameraScreenImpl.Result, NoError>, ResultTransition?, @escaping () -> Void) -> Void completion: @escaping (Signal<CameraScreenImpl.Result, NoError>, ResultTransition?, Int32?, @escaping () -> Void) -> Void
) { ) {
self.context = context self.context = context
self.mode = mode self.mode = mode
@ -3473,7 +3476,7 @@ public class CameraScreenImpl: ViewController, CameraScreen {
return return
} }
if case let .available(remainingCount) = availability { if case let .available(remainingCount) = availability {
let _ = remainingCount self.remainingStoryCount = remainingCount
return return
} }
self.node.postingAvailable = false self.node.postingAvailable = false
@ -3639,7 +3642,11 @@ public class CameraScreenImpl: ViewController, CameraScreen {
if self.cameraState.isCollageEnabled { if self.cameraState.isCollageEnabled {
selectionLimit = 6 selectionLimit = 6
} else { } else {
selectionLimit = 10 if let remainingStoryCount = self.remainingStoryCount {
selectionLimit = min(Int(remainingStoryCount), 10)
} else {
selectionLimit = 10
}
} }
} }
controller = self.context.sharedContext.makeStoryMediaPickerScreen( controller = self.context.sharedContext.makeStoryMediaPickerScreen(
@ -3704,10 +3711,10 @@ public class CameraScreenImpl: ViewController, CameraScreen {
) )
self.present(alertController, in: .window(.root)) self.present(alertController, in: .window(.root))
} else { } else {
self.completion(.single(.asset(asset)), resultTransition, dismissed) self.completion(.single(.asset(asset)), resultTransition, self.remainingStoryCount, dismissed)
} }
} else if let draft = result as? MediaEditorDraft { } else if let draft = result as? MediaEditorDraft {
self.completion(.single(.draft(draft)), resultTransition, dismissed) self.completion(.single(.draft(draft)), resultTransition, self.remainingStoryCount, dismissed)
} }
} }
} }
@ -3753,7 +3760,7 @@ public class CameraScreenImpl: ViewController, CameraScreen {
} }
} else { } else {
if let assets = results as? [PHAsset] { if let assets = results as? [PHAsset] {
self.completion(.single(.assets(assets)), nil, { self.completion(.single(.assets(assets)), nil, self.remainingStoryCount, {
}) })
} }

View File

@ -338,8 +338,6 @@ final class GiftStoreScreenComponent: Component {
) )
if let effectiveGifts = self.effectiveGifts, effectiveGifts.isEmpty && self.state?.starGiftsState?.dataState != .loading { if let effectiveGifts = self.effectiveGifts, effectiveGifts.isEmpty && self.state?.starGiftsState?.dataState != .loading {
showClearFilters = true
let emptyAnimationHeight = 148.0 let emptyAnimationHeight = 148.0
let visibleHeight = availableHeight let visibleHeight = availableHeight
let emptyAnimationSpacing: CGFloat = 20.0 let emptyAnimationSpacing: CGFloat = 20.0

View File

@ -2820,7 +2820,7 @@ public class GiftViewScreen: ViewControllerComponentContainer {
case upgradePreview([StarGift.UniqueGift.Attribute], String) case upgradePreview([StarGift.UniqueGift.Attribute], String)
case wearPreview(StarGift.UniqueGift) case wearPreview(StarGift.UniqueGift)
var arguments: (peerId: EnginePeer.Id?, fromPeerId: EnginePeer.Id?, fromPeerName: String?, messageId: EngineMessage.Id?, reference: StarGiftReference?, incoming: Bool, gift: StarGift, date: Int32, convertStars: Int64?, text: String?, entities: [MessageTextEntity]?, nameHidden: Bool, savedToProfile: Bool, pinnedToTop: Bool?, converted: Bool, upgraded: Bool, refunded: Bool, canUpgrade: Bool, upgradeStars: Int64?, transferStars: Int64?, resellStars: Int64?, canExportDate: Int32?, upgradeMessageId: Int32?)? { var arguments: (peerId: EnginePeer.Id?, fromPeerId: EnginePeer.Id?, fromPeerName: String?, messageId: EngineMessage.Id?, reference: StarGiftReference?, incoming: Bool, gift: StarGift, date: Int32, convertStars: Int64?, text: String?, entities: [MessageTextEntity]?, nameHidden: Bool, savedToProfile: Bool, pinnedToTop: Bool?, converted: Bool, upgraded: Bool, refunded: Bool, canUpgrade: Bool, upgradeStars: Int64?, transferStars: Int64?, resellStars: Int64?, canExportDate: Int32?, upgradeMessageId: Int32?, canTransferDate: Int32?, canResaleDate: Int32?)? {
switch self { switch self {
case let .message(message): case let .message(message):
if let action = message.media.first(where: { $0 is TelegramMediaAction }) as? TelegramMediaAction { if let action = message.media.first(where: { $0 is TelegramMediaAction }) as? TelegramMediaAction {
@ -2832,8 +2832,8 @@ public class GiftViewScreen: ViewControllerComponentContainer {
} else { } else {
reference = .message(messageId: message.id) reference = .message(messageId: message.id)
} }
return (message.id.peerId, senderId ?? message.author?.id, message.author?.compactDisplayTitle, message.id, reference, message.flags.contains(.Incoming), gift, message.timestamp, convertStars, text, entities, nameHidden, savedToProfile, nil, converted, upgraded, isRefunded, canUpgrade, upgradeStars, nil, nil, nil, upgradeMessageId) return (message.id.peerId, senderId ?? message.author?.id, message.author?.compactDisplayTitle, message.id, reference, message.flags.contains(.Incoming), gift, message.timestamp, convertStars, text, entities, nameHidden, savedToProfile, nil, converted, upgraded, isRefunded, canUpgrade, upgradeStars, nil, nil, nil, upgradeMessageId, nil, nil)
case let .starGiftUnique(gift, isUpgrade, isTransferred, savedToProfile, canExportDate, transferStars, _, peerId, senderId, savedId, _, _, _): case let .starGiftUnique(gift, isUpgrade, isTransferred, savedToProfile, canExportDate, transferStars, _, peerId, senderId, savedId, _, canTransferDate, canResaleDate):
var reference: StarGiftReference var reference: StarGiftReference
if let peerId, let savedId { if let peerId, let savedId {
reference = .peer(peerId: peerId, id: savedId) reference = .peer(peerId: peerId, id: savedId)
@ -2857,13 +2857,13 @@ public class GiftViewScreen: ViewControllerComponentContainer {
if case let .unique(uniqueGift) = gift { if case let .unique(uniqueGift) = gift {
resellStars = uniqueGift.resellStars resellStars = uniqueGift.resellStars
} }
return (message.id.peerId, senderId ?? message.author?.id, message.author?.compactDisplayTitle, message.id, reference, incoming, gift, message.timestamp, nil, nil, nil, false, savedToProfile, nil, false, false, false, false, nil, transferStars, resellStars, canExportDate, nil) return (message.id.peerId, senderId ?? message.author?.id, message.author?.compactDisplayTitle, message.id, reference, incoming, gift, message.timestamp, nil, nil, nil, false, savedToProfile, nil, false, false, false, false, nil, transferStars, resellStars, canExportDate, nil, canTransferDate, canResaleDate)
default: default:
return nil return nil
} }
} }
case let .uniqueGift(gift, _), let .wearPreview(gift): case let .uniqueGift(gift, _), let .wearPreview(gift):
return (nil, nil, nil, nil, nil, false, .unique(gift), 0, nil, nil, nil, false, false, nil, false, false, false, false, nil, nil, gift.resellStars, nil, nil) return (nil, nil, nil, nil, nil, false, .unique(gift), 0, nil, nil, nil, false, false, nil, false, false, false, false, nil, nil, gift.resellStars, nil, nil, nil, nil)
case let .profileGift(peerId, gift): case let .profileGift(peerId, gift):
var messageId: EngineMessage.Id? var messageId: EngineMessage.Id?
if case let .message(messageIdValue) = gift.reference { if case let .message(messageIdValue) = gift.reference {
@ -2873,7 +2873,7 @@ public class GiftViewScreen: ViewControllerComponentContainer {
if case let .unique(uniqueGift) = gift.gift { if case let .unique(uniqueGift) = gift.gift {
resellStars = uniqueGift.resellStars resellStars = uniqueGift.resellStars
} }
return (peerId, gift.fromPeer?.id, gift.fromPeer?.compactDisplayTitle, messageId, gift.reference, false, gift.gift, gift.date, gift.convertStars, gift.text, gift.entities, gift.nameHidden, gift.savedToProfile, gift.pinnedToTop, false, false, false, gift.canUpgrade, gift.upgradeStars, gift.transferStars, resellStars, gift.canExportDate, nil) return (peerId, gift.fromPeer?.id, gift.fromPeer?.compactDisplayTitle, messageId, gift.reference, false, gift.gift, gift.date, gift.convertStars, gift.text, gift.entities, gift.nameHidden, gift.savedToProfile, gift.pinnedToTop, false, false, false, gift.canUpgrade, gift.upgradeStars, gift.transferStars, resellStars, gift.canExportDate, nil, gift.canTransferDate, gift.canResaleDate)
case .soldOutGift: case .soldOutGift:
return nil return nil
case .upgradePreview: case .upgradePreview:
@ -3400,6 +3400,22 @@ public class GiftViewScreen: ViewControllerComponentContainer {
self.dismissAllTooltips() self.dismissAllTooltips()
let currentTime = Int32(CFAbsoluteTimeGetCurrent() + kCFAbsoluteTimeIntervalSince1970)
if let canResaleDate = arguments.canResaleDate, currentTime < canResaleDate {
let dateString = stringForFullDate(timestamp: canResaleDate, strings: presentationData.strings, dateTimeFormat: presentationData.dateTimeFormat)
let controller = textAlertController(
context: self.context,
title: presentationData.strings.Gift_Resale_Unavailable_Title,
text: presentationData.strings.Gift_Resale_Unavailable_Text(dateString).string,
actions: [
TextAlertAction(type: .defaultAction, title: presentationData.strings.Common_OK, action: {})
],
parseMarkdown: true
)
self.present(controller, in: .window(.root))
return
}
let presentationData = context.sharedContext.currentPresentationData.with { $0 } let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let giftTitle = "\(gift.title) #\(presentationStringsFormattedNumber(gift.number, presentationData.dateTimeFormat.groupingSeparator))" let giftTitle = "\(gift.title) #\(presentationStringsFormattedNumber(gift.number, presentationData.dateTimeFormat.groupingSeparator))"
let reference = arguments.reference ?? .slug(slug: gift.slug) let reference = arguments.reference ?? .slug(slug: gift.slug)
@ -3582,7 +3598,7 @@ public class GiftViewScreen: ViewControllerComponentContainer {
if case let .unique(gift) = arguments.gift, let resellStars = gift.resellStars, resellStars > 0 { if case let .unique(gift) = arguments.gift, let resellStars = gift.resellStars, resellStars > 0 {
items.append(.action(ContextMenuActionItem(text: presentationData.strings.Gift_View_Context_ChangePrice, icon: { theme in items.append(.action(ContextMenuActionItem(text: presentationData.strings.Gift_View_Context_ChangePrice, icon: { theme in
return generateTintedImage(image: UIImage(bundleImageName: "Media Grid/Paid"), color: theme.contextMenu.primaryColor) return generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/PriceTag"), color: theme.contextMenu.primaryColor)
}, action: { c, _ in }, action: { c, _ in
c?.dismiss(completion: nil) c?.dismiss(completion: nil)

View File

@ -988,6 +988,8 @@ public final class MediaEditor {
if let trimRange = self.values.videoTrimRange { if let trimRange = self.values.videoTrimRange {
player.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000)) player.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
// additionalPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000)) // additionalPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
} else if let duration = player.currentItem?.duration.seconds, duration > self.maxDuration {
player.currentItem?.forwardPlaybackEndTime = CMTime(seconds: self.maxDuration, preferredTimescale: CMTimeScale(1000))
} }
if let initialSeekPosition = self.initialSeekPosition { if let initialSeekPosition = self.initialSeekPosition {

View File

@ -97,7 +97,7 @@ public extension MediaEditorScreenImpl {
var updateProgressImpl: ((Float) -> Void)? var updateProgressImpl: ((Float) -> Void)?
let controller = MediaEditorScreenImpl( let controller = MediaEditorScreenImpl(
context: context, context: context,
mode: .storyEditor, mode: .storyEditor(remainingCount: 1),
subject: subject, subject: subject,
isEditing: !repost, isEditing: !repost,
isEditingCover: cover, isEditingCover: cover,

View File

@ -2019,13 +2019,17 @@ final class MediaEditorScreenComponent: Component {
} else { } else {
minDuration = 1.0 minDuration = 1.0
if case .avatarEditor = controller.mode { if case .avatarEditor = controller.mode {
maxDuration = 9.9 maxDuration = avatarMaxVideoDuration
} else { } else {
if controller.node.items.count > 0 { if controller.node.items.count > 0 {
maxDuration = storyMaxVideoDuration maxDuration = storyMaxVideoDuration
} else { } else {
maxDuration = storyMaxCombinedVideoDuration if case let .storyEditor(remainingCount) = controller.mode, remainingCount > 1 {
segmentDuration = storyMaxVideoDuration maxDuration = min(storyMaxCombinedVideoDuration, Double(remainingCount) * storyMaxVideoDuration)
segmentDuration = storyMaxVideoDuration
} else {
maxDuration = storyMaxVideoDuration
}
} }
} }
} }
@ -2843,6 +2847,8 @@ let storyMaxVideoDuration: Double = 60.0
let storyMaxCombinedVideoCount: Int = 3 let storyMaxCombinedVideoCount: Int = 3
let storyMaxCombinedVideoDuration: Double = storyMaxVideoDuration * Double(storyMaxCombinedVideoCount) let storyMaxCombinedVideoDuration: Double = storyMaxVideoDuration * Double(storyMaxCombinedVideoCount)
let avatarMaxVideoDuration: Double = 10.0
public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UIDropInteractionDelegate { public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UIDropInteractionDelegate {
public enum Mode { public enum Mode {
public enum StickerEditorMode { public enum StickerEditorMode {
@ -2852,7 +2858,7 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
case businessIntro case businessIntro
} }
case storyEditor case storyEditor(remainingCount: Int32)
case stickerEditor(mode: StickerEditorMode) case stickerEditor(mode: StickerEditorMode)
case botPreview case botPreview
case avatarEditor case avatarEditor
@ -3510,8 +3516,10 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
values: initialValues, values: initialValues,
hasHistogram: true hasHistogram: true
) )
if case .storyEditor = controller.mode, self.items.isEmpty { if case let .storyEditor(remainingCount) = controller.mode, self.items.isEmpty {
mediaEditor.maxDuration = storyMaxCombinedVideoDuration mediaEditor.maxDuration = min(storyMaxCombinedVideoDuration, Double(remainingCount) * storyMaxVideoDuration)
} else if case .avatarEditor = controller.mode {
mediaEditor.maxDuration = avatarMaxVideoDuration
} }
if case .avatarEditor = controller.mode { if case .avatarEditor = controller.mode {
@ -6549,15 +6557,17 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
fileprivate let customTarget: EnginePeer.Id? fileprivate let customTarget: EnginePeer.Id?
let forwardSource: (EnginePeer, EngineStoryItem)? let forwardSource: (EnginePeer, EngineStoryItem)?
fileprivate let initialCaption: NSAttributedString? let initialCaption: NSAttributedString?
fileprivate let initialPrivacy: EngineStoryPrivacy? let initialPrivacy: EngineStoryPrivacy?
fileprivate let initialMediaAreas: [MediaArea]? let initialMediaAreas: [MediaArea]?
fileprivate let initialVideoPosition: Double? let initialVideoPosition: Double?
fileprivate let initialLink: (url: String, name: String?)? let initialLink: (url: String, name: String?)?
fileprivate let transitionIn: TransitionIn? fileprivate let transitionIn: TransitionIn?
fileprivate let transitionOut: (Bool, Bool?) -> TransitionOut? fileprivate let transitionOut: (Bool, Bool?) -> TransitionOut?
var didComplete = false
public var cancelled: (Bool) -> Void = { _ in } public var cancelled: (Bool) -> Void = { _ in }
public var willComplete: (UIImage?, Bool, @escaping () -> Void) -> Void public var willComplete: (UIImage?, Bool, @escaping () -> Void) -> Void
public var completion: ([MediaEditorScreenImpl.Result], @escaping (@escaping () -> Void) -> Void) -> Void public var completion: ([MediaEditorScreenImpl.Result], @escaping (@escaping () -> Void) -> Void) -> Void
@ -6784,7 +6794,7 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
} }
} }
fileprivate var isEmbeddedEditor: Bool { var isEmbeddedEditor: Bool {
return self.isEditingStory || self.isEditingStoryCover || self.forwardSource != nil return self.isEditingStory || self.isEditingStoryCover || self.forwardSource != nil
} }
@ -7386,825 +7396,6 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
return true return true
} }
private func processMultipleItems(items: [EditingItem], isLongVideo: Bool) {
guard !items.isEmpty else {
return
}
var items = items
if let mediaEditor = self.node.mediaEditor, case let .asset(asset) = self.node.subject, let currentItemIndex = items.firstIndex(where: { $0.asset.localIdentifier == asset.localIdentifier }) {
var updatedCurrentItem = items[currentItemIndex]
updatedCurrentItem.caption = self.node.getCaption()
updatedCurrentItem.values = mediaEditor.values
items[currentItemIndex] = updatedCurrentItem
}
let multipleResults = Atomic<[MediaEditorScreenImpl.Result]>(value: [])
let totalItems = items.count
let dispatchGroup = DispatchGroup()
let privacy = self.state.privacy
if !(self.isEditingStory || self.isEditingStoryCover) {
let _ = updateMediaEditorStoredStateInteractively(engine: self.context.engine, { current in
if let current {
return current.withUpdatedPrivacy(privacy)
} else {
return MediaEditorStoredState(privacy: privacy, textSettings: nil)
}
}).start()
}
var order: [Int64] = []
for (index, item) in items.enumerated() {
guard item.isEnabled else {
continue
}
dispatchGroup.enter()
let randomId = Int64.random(in: .min ... .max)
order.append(randomId)
if item.asset.mediaType == .video {
processVideoItem(item: item, index: index, randomId: randomId, isLongVideo: isLongVideo) { result in
let _ = multipleResults.modify { results in
var updatedResults = results
updatedResults.append(result)
return updatedResults
}
dispatchGroup.leave()
}
} else if item.asset.mediaType == .image {
processImageItem(item: item, index: index, randomId: randomId) { result in
let _ = multipleResults.modify { results in
var updatedResults = results
updatedResults.append(result)
return updatedResults
}
dispatchGroup.leave()
}
} else {
dispatchGroup.leave()
}
}
dispatchGroup.notify(queue: .main) {
let results = multipleResults.with { $0 }
if results.count == totalItems {
var orderedResults: [MediaEditorScreenImpl.Result] = []
for id in order {
if let item = results.first(where: { $0.randomId == id }) {
orderedResults.append(item)
}
}
self.completion(results, { [weak self] finished in
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
self?.dismiss()
Queue.mainQueue().justDispatch {
finished()
}
})
})
}
}
}
private func processVideoItem(item: EditingItem, index: Int, randomId: Int64, isLongVideo: Bool, completion: @escaping (MediaEditorScreenImpl.Result) -> Void) {
let asset = item.asset
let itemMediaEditor = setupMediaEditorForItem(item: item)
var caption = item.caption
caption = convertMarkdownToAttributes(caption)
var mediaAreas: [MediaArea] = []
var stickers: [TelegramMediaFile] = []
if let entities = item.values?.entities {
for entity in entities {
if let mediaArea = entity.mediaArea {
mediaAreas.append(mediaArea)
}
extractStickersFromEntity(entity, into: &stickers)
}
}
let firstFrameTime: CMTime
if let coverImageTimestamp = item.values?.coverImageTimestamp, !isLongVideo || index == 0 {
firstFrameTime = CMTime(seconds: coverImageTimestamp, preferredTimescale: CMTimeScale(60))
} else {
firstFrameTime = CMTime(seconds: item.values?.videoTrimRange?.lowerBound ?? 0.0, preferredTimescale: CMTimeScale(60))
}
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { [weak self] avAsset, _, _ in
guard let avAsset else {
DispatchQueue.main.async {
if let self {
completion(self.createEmptyResult(randomId: randomId))
}
}
return
}
let duration: Double
if let videoTrimRange = item.values?.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
duration = min(asset.duration, storyMaxVideoDuration)
}
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)]) { [weak self] _, cgImage, _, _, _ in
guard let self else {
return
}
DispatchQueue.main.async {
if let cgImage {
let image = UIImage(cgImage: cgImage)
itemMediaEditor.replaceSource(image, additionalImage: nil, time: firstFrameTime, mirror: false)
if let resultImage = itemMediaEditor.resultImage {
makeEditorImageComposition(
context: self.node.ciContext,
postbox: self.context.account.postbox,
inputImage: resultImage,
dimensions: storyDimensions,
values: itemMediaEditor.values,
time: firstFrameTime,
textScale: 2.0
) { coverImage in
if let coverImage = coverImage {
let result = MediaEditorScreenImpl.Result(
media: .video(
video: .asset(localIdentifier: asset.localIdentifier),
coverImage: coverImage,
values: itemMediaEditor.values,
duration: duration,
dimensions: itemMediaEditor.values.resultDimensions
),
mediaAreas: mediaAreas,
caption: caption,
coverTimestamp: itemMediaEditor.values.coverImageTimestamp,
options: self.state.privacy,
stickers: stickers,
randomId: randomId
)
completion(result)
} else {
completion(self.createEmptyResult(randomId: randomId))
}
}
} else {
completion(self.createEmptyResult(randomId: randomId))
}
} else {
completion(self.createEmptyResult(randomId: randomId))
}
}
}
}
}
private func processImageItem(item: EditingItem, index: Int, randomId: Int64, completion: @escaping (MediaEditorScreenImpl.Result) -> Void) {
let asset = item.asset
let itemMediaEditor = setupMediaEditorForItem(item: item)
var caption = item.caption
caption = convertMarkdownToAttributes(caption)
var mediaAreas: [MediaArea] = []
var stickers: [TelegramMediaFile] = []
if let entities = item.values?.entities {
for entity in entities {
if let mediaArea = entity.mediaArea {
mediaAreas.append(mediaArea)
}
extractStickersFromEntity(entity, into: &stickers)
}
}
let options = PHImageRequestOptions()
options.deliveryMode = .highQualityFormat
options.isNetworkAccessAllowed = true
PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .default, options: options) { [weak self] image, _ in
guard let self else {
return
}
DispatchQueue.main.async {
if let image {
itemMediaEditor.replaceSource(image, additionalImage: nil, time: .zero, mirror: false)
if let resultImage = itemMediaEditor.resultImage {
makeEditorImageComposition(
context: self.node.ciContext,
postbox: self.context.account.postbox,
inputImage: resultImage,
dimensions: storyDimensions,
values: itemMediaEditor.values,
time: .zero,
textScale: 2.0
) { resultImage in
if let resultImage = resultImage {
let result = MediaEditorScreenImpl.Result(
media: .image(
image: resultImage,
dimensions: PixelDimensions(resultImage.size)
),
mediaAreas: mediaAreas,
caption: caption,
coverTimestamp: nil,
options: self.state.privacy,
stickers: stickers,
randomId: randomId
)
completion(result)
} else {
completion(self.createEmptyResult(randomId: randomId))
}
}
} else {
completion(self.createEmptyResult(randomId: randomId))
}
} else {
completion(self.createEmptyResult(randomId: randomId))
}
}
}
}
private func setupMediaEditorForItem(item: EditingItem) -> MediaEditor {
var values = item.values
if values?.videoTrimRange == nil {
values = values?.withUpdatedVideoTrimRange(0 ..< storyMaxVideoDuration)
}
return MediaEditor(
context: self.context,
mode: .default,
subject: .asset(item.asset),
values: values,
hasHistogram: false,
isStandalone: true
)
}
private func extractStickersFromEntity(_ entity: CodableDrawingEntity, into stickers: inout [TelegramMediaFile]) {
switch entity {
case let .sticker(stickerEntity):
if case let .file(file, fileType) = stickerEntity.content, case .sticker = fileType {
stickers.append(file.media)
}
case let .text(textEntity):
if let subEntities = textEntity.renderSubEntities {
for entity in subEntities {
if let stickerEntity = entity as? DrawingStickerEntity, case let .file(file, fileType) = stickerEntity.content, case .sticker = fileType {
stickers.append(file.media)
}
}
}
default:
break
}
}
private func createEmptyResult(randomId: Int64) -> MediaEditorScreenImpl.Result {
let emptyImage = UIImage()
return MediaEditorScreenImpl.Result(
media: .image(
image: emptyImage,
dimensions: PixelDimensions(emptyImage.size)
),
mediaAreas: [],
caption: NSAttributedString(),
coverTimestamp: nil,
options: self.state.privacy,
stickers: [],
randomId: randomId
)
}
private func processSingleItem() {
guard let mediaEditor = self.node.mediaEditor, let subject = self.node.subject, let actualSubject = self.node.actualSubject else {
return
}
var caption = self.node.getCaption()
caption = convertMarkdownToAttributes(caption)
var hasEntityChanges = false
let randomId: Int64
if case let .draft(_, id) = actualSubject, let id {
randomId = id
} else {
randomId = Int64.random(in: .min ... .max)
}
let codableEntities = mediaEditor.values.entities
var mediaAreas: [MediaArea] = []
if case let .draft(draft, _) = actualSubject {
if draft.values.entities != codableEntities {
hasEntityChanges = true
}
} else {
mediaAreas = self.initialMediaAreas ?? []
}
var stickers: [TelegramMediaFile] = []
for entity in codableEntities {
switch entity {
case let .sticker(stickerEntity):
if case let .file(file, fileType) = stickerEntity.content, case .sticker = fileType {
stickers.append(file.media)
}
case let .text(textEntity):
if let subEntities = textEntity.renderSubEntities {
for entity in subEntities {
if let stickerEntity = entity as? DrawingStickerEntity, case let .file(file, fileType) = stickerEntity.content, case .sticker = fileType {
stickers.append(file.media)
}
}
}
default:
break
}
if let mediaArea = entity.mediaArea {
mediaAreas.append(mediaArea)
}
}
var hasAnyChanges = self.node.hasAnyChanges
if self.isEditingStoryCover {
hasAnyChanges = false
}
if self.isEmbeddedEditor && !(hasAnyChanges || hasEntityChanges) {
self.saveDraft(id: randomId, isEdit: true)
self.completion([MediaEditorScreenImpl.Result(media: nil, mediaAreas: [], caption: caption, coverTimestamp: mediaEditor.values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId)], { [weak self] finished in
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
self?.dismiss()
Queue.mainQueue().justDispatch {
finished()
}
})
})
return
}
if !(self.isEditingStory || self.isEditingStoryCover) {
let privacy = self.state.privacy
let _ = updateMediaEditorStoredStateInteractively(engine: self.context.engine, { current in
if let current {
return current.withUpdatedPrivacy(privacy)
} else {
return MediaEditorStoredState(privacy: privacy, textSettings: nil)
}
}).start()
}
if mediaEditor.resultIsVideo {
self.saveDraft(id: randomId)
var firstFrame: Signal<(UIImage?, UIImage?), NoError>
let firstFrameTime: CMTime
if let coverImageTimestamp = mediaEditor.values.coverImageTimestamp {
firstFrameTime = CMTime(seconds: coverImageTimestamp, preferredTimescale: CMTimeScale(60))
} else {
firstFrameTime = CMTime(seconds: mediaEditor.values.videoTrimRange?.lowerBound ?? 0.0, preferredTimescale: CMTimeScale(60))
}
let videoResult: Signal<MediaResult.VideoResult, NoError>
var videoIsMirrored = false
let duration: Double
switch subject {
case let .empty(dimensions):
let image = generateImage(dimensions.cgSize, opaque: false, scale: 1.0, rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
})!
let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).jpg"
if let data = image.jpegData(compressionQuality: 0.85) {
try? data.write(to: URL(fileURLWithPath: tempImagePath))
}
videoResult = .single(.imageFile(path: tempImagePath))
duration = 3.0
firstFrame = .single((image, nil))
case let .image(image, _, _, _):
let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).jpg"
if let data = image.jpegData(compressionQuality: 0.85) {
try? data.write(to: URL(fileURLWithPath: tempImagePath))
}
videoResult = .single(.imageFile(path: tempImagePath))
duration = 5.0
firstFrame = .single((image, nil))
case let .video(path, _, mirror, additionalPath, _, _, durationValue, _, _):
videoIsMirrored = mirror
videoResult = .single(.videoFile(path: path))
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
duration = durationValue
}
var additionalPath = additionalPath
if additionalPath == nil, let valuesAdditionalPath = mediaEditor.values.additionalVideoPath {
additionalPath = valuesAdditionalPath
}
firstFrame = Signal<(UIImage?, UIImage?), NoError> { subscriber in
let avAsset = AVURLAsset(url: URL(fileURLWithPath: path))
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, cgImage, _, _, _ in
if let cgImage {
if let additionalPath {
let avAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, additionalCGImage, _, _, _ in
if let additionalCGImage {
subscriber.putNext((UIImage(cgImage: cgImage), UIImage(cgImage: additionalCGImage)))
subscriber.putCompletion()
} else {
subscriber.putNext((UIImage(cgImage: cgImage), nil))
subscriber.putCompletion()
}
})
} else {
subscriber.putNext((UIImage(cgImage: cgImage), nil))
subscriber.putCompletion()
}
}
})
return ActionDisposable {
avAssetGenerator.cancelAllCGImageGeneration()
}
}
case let .videoCollage(items):
var maxDurationItem: (Double, Subject.VideoCollageItem)?
for item in items {
switch item.content {
case .image:
break
case let .video(_, duration):
if let (maxDuration, _) = maxDurationItem {
if duration > maxDuration {
maxDurationItem = (duration, item)
}
} else {
maxDurationItem = (duration, item)
}
case let .asset(asset):
if let (maxDuration, _) = maxDurationItem {
if asset.duration > maxDuration {
maxDurationItem = (asset.duration, item)
}
} else {
maxDurationItem = (asset.duration, item)
}
}
}
guard let (maxDuration, mainItem) = maxDurationItem else {
fatalError()
}
switch mainItem.content {
case let .video(path, _):
videoResult = .single(.videoFile(path: path))
case let .asset(asset):
videoResult = .single(.asset(localIdentifier: asset.localIdentifier))
default:
fatalError()
}
let image = generateImage(storyDimensions, opaque: false, scale: 1.0, rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
})!
firstFrame = .single((image, nil))
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
duration = min(maxDuration, storyMaxVideoDuration)
}
case let .asset(asset):
videoResult = .single(.asset(localIdentifier: asset.localIdentifier))
if asset.mediaType == .video {
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
duration = min(asset.duration, storyMaxVideoDuration)
}
} else {
duration = 5.0
}
var additionalPath: String?
if let valuesAdditionalPath = mediaEditor.values.additionalVideoPath {
additionalPath = valuesAdditionalPath
}
firstFrame = Signal<(UIImage?, UIImage?), NoError> { subscriber in
if asset.mediaType == .video {
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
if let avAsset {
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, cgImage, _, _, _ in
if let cgImage {
if let additionalPath {
let avAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, additionalCGImage, _, _, _ in
if let additionalCGImage {
subscriber.putNext((UIImage(cgImage: cgImage), UIImage(cgImage: additionalCGImage)))
subscriber.putCompletion()
} else {
subscriber.putNext((UIImage(cgImage: cgImage), nil))
subscriber.putCompletion()
}
})
} else {
subscriber.putNext((UIImage(cgImage: cgImage), nil))
subscriber.putCompletion()
}
}
})
}
}
} else {
let options = PHImageRequestOptions()
options.deliveryMode = .highQualityFormat
PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .default, options: options) { image, _ in
if let image {
if let additionalPath {
let avAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, additionalCGImage, _, _, _ in
if let additionalCGImage {
subscriber.putNext((image, UIImage(cgImage: additionalCGImage)))
subscriber.putCompletion()
} else {
subscriber.putNext((image, nil))
subscriber.putCompletion()
}
})
} else {
subscriber.putNext((image, nil))
subscriber.putCompletion()
}
}
}
}
return EmptyDisposable
}
case let .draft(draft, _):
let draftPath = draft.fullPath(engine: context.engine)
if draft.isVideo {
videoResult = .single(.videoFile(path: draftPath))
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
duration = min(draft.duration ?? 5.0, storyMaxVideoDuration)
}
firstFrame = Signal<(UIImage?, UIImage?), NoError> { subscriber in
let avAsset = AVURLAsset(url: URL(fileURLWithPath: draftPath))
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, cgImage, _, _, _ in
if let cgImage {
subscriber.putNext((UIImage(cgImage: cgImage), nil))
subscriber.putCompletion()
}
})
return ActionDisposable {
avAssetGenerator.cancelAllCGImageGeneration()
}
}
} else {
videoResult = .single(.imageFile(path: draftPath))
duration = 5.0
if let image = UIImage(contentsOfFile: draftPath) {
firstFrame = .single((image, nil))
} else {
firstFrame = .single((UIImage(), nil))
}
}
case .message, .gift:
let peerId: EnginePeer.Id
if case let .message(messageIds) = subject {
peerId = messageIds.first!.peerId
} else {
peerId = self.context.account.peerId
}
let isNightTheme = mediaEditor.values.nightTheme
let wallpaper = getChatWallpaperImage(context: self.context, peerId: peerId)
|> map { _, image, nightImage -> UIImage? in
if isNightTheme {
return nightImage ?? image
} else {
return image
}
}
videoResult = wallpaper
|> mapToSignal { image in
if let image {
let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).jpg"
if let data = image.jpegData(compressionQuality: 0.85) {
try? data.write(to: URL(fileURLWithPath: tempImagePath))
}
return .single(.imageFile(path: tempImagePath))
} else {
return .complete()
}
}
firstFrame = wallpaper
|> map { image in
return (image, nil)
}
duration = 5.0
case .sticker:
let image = generateImage(storyDimensions, contextGenerator: { size, context in
context.clear(CGRect(origin: .zero, size: size))
}, opaque: false, scale: 1.0)
let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).png"
if let data = image?.pngData() {
try? data.write(to: URL(fileURLWithPath: tempImagePath))
}
videoResult = .single(.imageFile(path: tempImagePath))
duration = 3.0
firstFrame = .single((image, nil))
case .assets:
fatalError()
}
let _ = combineLatest(queue: Queue.mainQueue(), firstFrame, videoResult)
.start(next: { [weak self] images, videoResult in
if let self {
let (image, additionalImage) = images
var currentImage = mediaEditor.resultImage
if let image {
mediaEditor.replaceSource(image, additionalImage: additionalImage, time: firstFrameTime, mirror: true)
if let updatedImage = mediaEditor.getResultImage(mirror: videoIsMirrored) {
currentImage = updatedImage
}
}
var inputImage: UIImage
if let currentImage {
inputImage = currentImage
} else if let image {
inputImage = image
} else {
inputImage = UIImage()
}
makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: inputImage, dimensions: storyDimensions, values: mediaEditor.values, time: firstFrameTime, textScale: 2.0, completion: { [weak self] coverImage in
if let self {
self.willComplete(coverImage, true, { [weak self] in
guard let self else {
return
}
Logger.shared.log("MediaEditor", "Completed with video \(videoResult)")
self.completion([MediaEditorScreenImpl.Result(media: .video(video: videoResult, coverImage: coverImage, values: mediaEditor.values, duration: duration, dimensions: mediaEditor.values.resultDimensions), mediaAreas: mediaAreas, caption: caption, coverTimestamp: mediaEditor.values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId)], { [weak self] finished in
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
self?.dismiss()
Queue.mainQueue().justDispatch {
finished()
}
})
})
})
}
})
}
})
if case let .draft(draft, id) = actualSubject, id == nil {
removeStoryDraft(engine: self.context.engine, path: draft.path, delete: false)
}
} else if let image = mediaEditor.resultImage {
self.saveDraft(id: randomId)
var values = mediaEditor.values
var outputDimensions: CGSize?
if case .avatarEditor = self.mode {
outputDimensions = CGSize(width: 640.0, height: 640.0)
values = values.withUpdatedQualityPreset(.profile)
}
makeEditorImageComposition(
context: self.node.ciContext,
postbox: self.context.account.postbox,
inputImage: image,
dimensions: storyDimensions,
outputDimensions: outputDimensions,
values: values,
time: .zero,
textScale: 2.0,
completion: { [weak self] resultImage in
if let self, let resultImage {
self.willComplete(resultImage, false, { [weak self] in
guard let self else {
return
}
Logger.shared.log("MediaEditor", "Completed with image \(resultImage)")
self.completion([MediaEditorScreenImpl.Result(media: .image(image: resultImage, dimensions: PixelDimensions(resultImage.size)), mediaAreas: mediaAreas, caption: caption, coverTimestamp: nil, options: self.state.privacy, stickers: stickers, randomId: randomId)], { [weak self] finished in
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
self?.dismiss()
Queue.mainQueue().justDispatch {
finished()
}
})
})
if case let .draft(draft, id) = actualSubject, id == nil {
removeStoryDraft(engine: self.context.engine, path: draft.path, delete: true)
}
})
}
})
}
}
private func updateMediaEditorEntities() {
guard let mediaEditor = self.node.mediaEditor else {
return
}
let entities = self.node.entitiesView.entities.filter { !($0 is DrawingMediaEntity) }
let codableEntities = DrawingEntitiesView.encodeEntities(entities, entitiesView: self.node.entitiesView)
mediaEditor.setDrawingAndEntities(data: nil, image: mediaEditor.values.drawing, entities: codableEntities)
}
private var didComplete = false
func requestStoryCompletion(animated: Bool) {
guard let mediaEditor = self.node.mediaEditor, !self.didComplete else {
return
}
self.didComplete = true
self.updateMediaEditorEntities()
mediaEditor.stop()
mediaEditor.invalidate()
self.node.entitiesView.invalidate()
if let navigationController = self.navigationController as? NavigationController {
navigationController.updateRootContainerTransitionOffset(0.0, transition: .immediate)
}
var multipleItems: [EditingItem] = []
var isLongVideo = false
if self.node.items.count > 1 {
multipleItems = self.node.items.filter({ $0.isEnabled })
} else if case let .asset(asset) = self.node.subject {
let duration: Double
if let playerDuration = mediaEditor.duration {
duration = playerDuration
} else {
duration = asset.duration
}
if duration > storyMaxVideoDuration {
let originalDuration = mediaEditor.originalDuration ?? asset.duration
let values = mediaEditor.values
let storyCount = min(storyMaxCombinedVideoCount, Int(ceil(duration / storyMaxVideoDuration)))
var start = values.videoTrimRange?.lowerBound ?? 0
for i in 0 ..< storyCount {
let trimmedValues = values.withUpdatedVideoTrimRange(start ..< min(start + storyMaxVideoDuration, originalDuration))
var editingItem = EditingItem(asset: asset)
if i == 0 {
editingItem.caption = self.node.getCaption()
}
editingItem.values = trimmedValues
multipleItems.append(editingItem)
start += storyMaxVideoDuration
}
isLongVideo = true
}
}
if multipleItems.count > 1 {
self.processMultipleItems(items: multipleItems, isLongVideo: isLongVideo)
} else {
self.processSingleItem()
}
self.dismissAllTooltips()
}
func requestStickerCompletion(animated: Bool) { func requestStickerCompletion(animated: Bool) {
guard let mediaEditor = self.node.mediaEditor else { guard let mediaEditor = self.node.mediaEditor else {
return return
@ -8257,13 +7448,6 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
let values = mediaEditor.values.withUpdatedCoverDimensions(dimensions) let values = mediaEditor.values.withUpdatedCoverDimensions(dimensions)
makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: image, dimensions: storyDimensions, outputDimensions: dimensions.aspectFitted(CGSize(width: 1080, height: 1080)), values: values, time: .zero, textScale: 2.0, completion: { [weak self] resultImage in makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: image, dimensions: storyDimensions, outputDimensions: dimensions.aspectFitted(CGSize(width: 1080, height: 1080)), values: values, time: .zero, textScale: 2.0, completion: { [weak self] resultImage in
if let self, let resultImage { if let self, let resultImage {
#if DEBUG
if let data = resultImage.jpegData(compressionQuality: 0.7) {
let path = NSTemporaryDirectory() + "\(Int(Date().timeIntervalSince1970)).jpg"
try? data.write(to: URL(fileURLWithPath: path))
}
#endif
self.completion([MediaEditorScreenImpl.Result(media: .image(image: resultImage, dimensions: PixelDimensions(resultImage.size)))], { [weak self] finished in self.completion([MediaEditorScreenImpl.Result(media: .image(image: resultImage, dimensions: PixelDimensions(resultImage.size)))], { [weak self] finished in
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
self?.dismiss() self?.dismiss()
@ -9105,7 +8289,7 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
self.node.updateEditProgress(progress, cancel: cancel) self.node.updateEditProgress(progress, cancel: cancel)
} }
fileprivate func dismissAllTooltips() { func dismissAllTooltips() {
self.window?.forEachController({ controller in self.window?.forEachController({ controller in
if let controller = controller as? TooltipScreen { if let controller = controller as? TooltipScreen {
controller.dismiss() controller.dismiss()

View File

@ -0,0 +1,837 @@
import Foundation
import UIKit
import Display
import AVFoundation
import SwiftSignalKit
import TelegramCore
import TextFormat
import Photos
import MediaEditor
import DrawingUI
extension MediaEditorScreenImpl {
func requestStoryCompletion(animated: Bool) {
guard let mediaEditor = self.node.mediaEditor, !self.didComplete else {
return
}
self.didComplete = true
self.updateMediaEditorEntities()
mediaEditor.stop()
mediaEditor.invalidate()
self.node.entitiesView.invalidate()
if let navigationController = self.navigationController as? NavigationController {
navigationController.updateRootContainerTransitionOffset(0.0, transition: .immediate)
}
var multipleItems: [EditingItem] = []
var isLongVideo = false
if self.node.items.count > 1 {
multipleItems = self.node.items.filter({ $0.isEnabled })
} else if case let .asset(asset) = self.node.subject {
let duration: Double
if let playerDuration = mediaEditor.duration {
duration = playerDuration
} else {
duration = asset.duration
}
if duration > storyMaxVideoDuration {
let originalDuration = mediaEditor.originalDuration ?? asset.duration
let values = mediaEditor.values
let storyCount = min(storyMaxCombinedVideoCount, Int(ceil(duration / storyMaxVideoDuration)))
var start = values.videoTrimRange?.lowerBound ?? 0
for i in 0 ..< storyCount {
let trimmedValues = values.withUpdatedVideoTrimRange(start ..< min(start + storyMaxVideoDuration, originalDuration))
var editingItem = EditingItem(asset: asset)
if i == 0 {
editingItem.caption = self.node.getCaption()
}
editingItem.values = trimmedValues
multipleItems.append(editingItem)
start += storyMaxVideoDuration
}
isLongVideo = true
}
}
if multipleItems.count > 1 {
self.processMultipleItems(items: multipleItems, isLongVideo: isLongVideo)
} else {
self.processSingleItem()
}
self.dismissAllTooltips()
}
private func processSingleItem() {
guard let mediaEditor = self.node.mediaEditor, let subject = self.node.subject, let actualSubject = self.node.actualSubject else {
return
}
var caption = self.node.getCaption()
caption = convertMarkdownToAttributes(caption)
var hasEntityChanges = false
let randomId: Int64
if case let .draft(_, id) = actualSubject, let id {
randomId = id
} else {
randomId = Int64.random(in: .min ... .max)
}
let codableEntities = mediaEditor.values.entities
var mediaAreas: [MediaArea] = []
if case let .draft(draft, _) = actualSubject {
if draft.values.entities != codableEntities {
hasEntityChanges = true
}
} else {
mediaAreas = self.initialMediaAreas ?? []
}
var stickers: [TelegramMediaFile] = []
for entity in codableEntities {
switch entity {
case let .sticker(stickerEntity):
if case let .file(file, fileType) = stickerEntity.content, case .sticker = fileType {
stickers.append(file.media)
}
case let .text(textEntity):
if let subEntities = textEntity.renderSubEntities {
for entity in subEntities {
if let stickerEntity = entity as? DrawingStickerEntity, case let .file(file, fileType) = stickerEntity.content, case .sticker = fileType {
stickers.append(file.media)
}
}
}
default:
break
}
if let mediaArea = entity.mediaArea {
mediaAreas.append(mediaArea)
}
}
var hasAnyChanges = self.node.hasAnyChanges
if self.isEditingStoryCover {
hasAnyChanges = false
}
if self.isEmbeddedEditor && !(hasAnyChanges || hasEntityChanges) {
self.saveDraft(id: randomId, isEdit: true)
self.completion([MediaEditorScreenImpl.Result(media: nil, mediaAreas: [], caption: caption, coverTimestamp: mediaEditor.values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId)], { [weak self] finished in
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
self?.dismiss()
Queue.mainQueue().justDispatch {
finished()
}
})
})
return
}
if !(self.isEditingStory || self.isEditingStoryCover) {
let privacy = self.state.privacy
let _ = updateMediaEditorStoredStateInteractively(engine: self.context.engine, { current in
if let current {
return current.withUpdatedPrivacy(privacy)
} else {
return MediaEditorStoredState(privacy: privacy, textSettings: nil)
}
}).start()
}
if mediaEditor.resultIsVideo {
self.saveDraft(id: randomId)
var firstFrame: Signal<(UIImage?, UIImage?), NoError>
let firstFrameTime: CMTime
if let coverImageTimestamp = mediaEditor.values.coverImageTimestamp {
firstFrameTime = CMTime(seconds: coverImageTimestamp, preferredTimescale: CMTimeScale(60))
} else {
firstFrameTime = CMTime(seconds: mediaEditor.values.videoTrimRange?.lowerBound ?? 0.0, preferredTimescale: CMTimeScale(60))
}
let videoResult: Signal<MediaResult.VideoResult, NoError>
var videoIsMirrored = false
let duration: Double
switch subject {
case let .empty(dimensions):
let image = generateImage(dimensions.cgSize, opaque: false, scale: 1.0, rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
})!
let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).jpg"
if let data = image.jpegData(compressionQuality: 0.85) {
try? data.write(to: URL(fileURLWithPath: tempImagePath))
}
videoResult = .single(.imageFile(path: tempImagePath))
duration = 3.0
firstFrame = .single((image, nil))
case let .image(image, _, _, _):
let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).jpg"
if let data = image.jpegData(compressionQuality: 0.85) {
try? data.write(to: URL(fileURLWithPath: tempImagePath))
}
videoResult = .single(.imageFile(path: tempImagePath))
duration = 5.0
firstFrame = .single((image, nil))
case let .video(path, _, mirror, additionalPath, _, _, durationValue, _, _):
videoIsMirrored = mirror
videoResult = .single(.videoFile(path: path))
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
duration = durationValue
}
var additionalPath = additionalPath
if additionalPath == nil, let valuesAdditionalPath = mediaEditor.values.additionalVideoPath {
additionalPath = valuesAdditionalPath
}
firstFrame = Signal<(UIImage?, UIImage?), NoError> { subscriber in
let avAsset = AVURLAsset(url: URL(fileURLWithPath: path))
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, cgImage, _, _, _ in
if let cgImage {
if let additionalPath {
let avAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, additionalCGImage, _, _, _ in
if let additionalCGImage {
subscriber.putNext((UIImage(cgImage: cgImage), UIImage(cgImage: additionalCGImage)))
subscriber.putCompletion()
} else {
subscriber.putNext((UIImage(cgImage: cgImage), nil))
subscriber.putCompletion()
}
})
} else {
subscriber.putNext((UIImage(cgImage: cgImage), nil))
subscriber.putCompletion()
}
}
})
return ActionDisposable {
avAssetGenerator.cancelAllCGImageGeneration()
}
}
case let .videoCollage(items):
var maxDurationItem: (Double, Subject.VideoCollageItem)?
for item in items {
switch item.content {
case .image:
break
case let .video(_, duration):
if let (maxDuration, _) = maxDurationItem {
if duration > maxDuration {
maxDurationItem = (duration, item)
}
} else {
maxDurationItem = (duration, item)
}
case let .asset(asset):
if let (maxDuration, _) = maxDurationItem {
if asset.duration > maxDuration {
maxDurationItem = (asset.duration, item)
}
} else {
maxDurationItem = (asset.duration, item)
}
}
}
guard let (maxDuration, mainItem) = maxDurationItem else {
fatalError()
}
switch mainItem.content {
case let .video(path, _):
videoResult = .single(.videoFile(path: path))
case let .asset(asset):
videoResult = .single(.asset(localIdentifier: asset.localIdentifier))
default:
fatalError()
}
let image = generateImage(storyDimensions, opaque: false, scale: 1.0, rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
})!
firstFrame = .single((image, nil))
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
duration = min(maxDuration, storyMaxVideoDuration)
}
case let .asset(asset):
videoResult = .single(.asset(localIdentifier: asset.localIdentifier))
if asset.mediaType == .video {
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
duration = min(asset.duration, storyMaxVideoDuration)
}
} else {
duration = 5.0
}
var additionalPath: String?
if let valuesAdditionalPath = mediaEditor.values.additionalVideoPath {
additionalPath = valuesAdditionalPath
}
firstFrame = Signal<(UIImage?, UIImage?), NoError> { subscriber in
if asset.mediaType == .video {
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
if let avAsset {
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, cgImage, _, _, _ in
if let cgImage {
if let additionalPath {
let avAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, additionalCGImage, _, _, _ in
if let additionalCGImage {
subscriber.putNext((UIImage(cgImage: cgImage), UIImage(cgImage: additionalCGImage)))
subscriber.putCompletion()
} else {
subscriber.putNext((UIImage(cgImage: cgImage), nil))
subscriber.putCompletion()
}
})
} else {
subscriber.putNext((UIImage(cgImage: cgImage), nil))
subscriber.putCompletion()
}
}
})
}
}
} else {
let options = PHImageRequestOptions()
options.deliveryMode = .highQualityFormat
PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .default, options: options) { image, _ in
if let image {
if let additionalPath {
let avAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, additionalCGImage, _, _, _ in
if let additionalCGImage {
subscriber.putNext((image, UIImage(cgImage: additionalCGImage)))
subscriber.putCompletion()
} else {
subscriber.putNext((image, nil))
subscriber.putCompletion()
}
})
} else {
subscriber.putNext((image, nil))
subscriber.putCompletion()
}
}
}
}
return EmptyDisposable
}
case let .draft(draft, _):
let draftPath = draft.fullPath(engine: context.engine)
if draft.isVideo {
videoResult = .single(.videoFile(path: draftPath))
if let videoTrimRange = mediaEditor.values.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
duration = min(draft.duration ?? 5.0, storyMaxVideoDuration)
}
firstFrame = Signal<(UIImage?, UIImage?), NoError> { subscriber in
let avAsset = AVURLAsset(url: URL(fileURLWithPath: draftPath))
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, cgImage, _, _, _ in
if let cgImage {
subscriber.putNext((UIImage(cgImage: cgImage), nil))
subscriber.putCompletion()
}
})
return ActionDisposable {
avAssetGenerator.cancelAllCGImageGeneration()
}
}
} else {
videoResult = .single(.imageFile(path: draftPath))
duration = 5.0
if let image = UIImage(contentsOfFile: draftPath) {
firstFrame = .single((image, nil))
} else {
firstFrame = .single((UIImage(), nil))
}
}
case .message, .gift:
let peerId: EnginePeer.Id
if case let .message(messageIds) = subject {
peerId = messageIds.first!.peerId
} else {
peerId = self.context.account.peerId
}
let isNightTheme = mediaEditor.values.nightTheme
let wallpaper = getChatWallpaperImage(context: self.context, peerId: peerId)
|> map { _, image, nightImage -> UIImage? in
if isNightTheme {
return nightImage ?? image
} else {
return image
}
}
videoResult = wallpaper
|> mapToSignal { image in
if let image {
let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).jpg"
if let data = image.jpegData(compressionQuality: 0.85) {
try? data.write(to: URL(fileURLWithPath: tempImagePath))
}
return .single(.imageFile(path: tempImagePath))
} else {
return .complete()
}
}
firstFrame = wallpaper
|> map { image in
return (image, nil)
}
duration = 5.0
case .sticker:
let image = generateImage(storyDimensions, contextGenerator: { size, context in
context.clear(CGRect(origin: .zero, size: size))
}, opaque: false, scale: 1.0)
let tempImagePath = NSTemporaryDirectory() + "\(Int64.random(in: Int64.min ... Int64.max)).png"
if let data = image?.pngData() {
try? data.write(to: URL(fileURLWithPath: tempImagePath))
}
videoResult = .single(.imageFile(path: tempImagePath))
duration = 3.0
firstFrame = .single((image, nil))
case .assets:
fatalError()
}
let _ = combineLatest(queue: Queue.mainQueue(), firstFrame, videoResult)
.start(next: { [weak self] images, videoResult in
if let self {
let (image, additionalImage) = images
var currentImage = mediaEditor.resultImage
if let image {
mediaEditor.replaceSource(image, additionalImage: additionalImage, time: firstFrameTime, mirror: true)
if let updatedImage = mediaEditor.getResultImage(mirror: videoIsMirrored) {
currentImage = updatedImage
}
}
var inputImage: UIImage
if let currentImage {
inputImage = currentImage
} else if let image {
inputImage = image
} else {
inputImage = UIImage()
}
var values = mediaEditor.values
if case .avatarEditor = self.mode, values.videoTrimRange == nil && duration > avatarMaxVideoDuration {
values = values.withUpdatedVideoTrimRange(0 ..< avatarMaxVideoDuration)
}
makeEditorImageComposition(context: self.node.ciContext, postbox: self.context.account.postbox, inputImage: inputImage, dimensions: storyDimensions, values: values, time: firstFrameTime, textScale: 2.0, completion: { [weak self] coverImage in
if let self {
self.willComplete(coverImage, true, { [weak self] in
guard let self else {
return
}
Logger.shared.log("MediaEditor", "Completed with video \(videoResult)")
self.completion([MediaEditorScreenImpl.Result(media: .video(video: videoResult, coverImage: coverImage, values: values, duration: duration, dimensions: values.resultDimensions), mediaAreas: mediaAreas, caption: caption, coverTimestamp: values.coverImageTimestamp, options: self.state.privacy, stickers: stickers, randomId: randomId)], { [weak self] finished in
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
self?.dismiss()
Queue.mainQueue().justDispatch {
finished()
}
})
})
})
}
})
}
})
if case let .draft(draft, id) = actualSubject, id == nil {
removeStoryDraft(engine: self.context.engine, path: draft.path, delete: false)
}
} else if let image = mediaEditor.resultImage {
self.saveDraft(id: randomId)
var values = mediaEditor.values
var outputDimensions: CGSize?
if case .avatarEditor = self.mode {
outputDimensions = CGSize(width: 640.0, height: 640.0)
values = values.withUpdatedQualityPreset(.profile)
}
makeEditorImageComposition(
context: self.node.ciContext,
postbox: self.context.account.postbox,
inputImage: image,
dimensions: storyDimensions,
outputDimensions: outputDimensions,
values: values,
time: .zero,
textScale: 2.0,
completion: { [weak self] resultImage in
if let self, let resultImage {
self.willComplete(resultImage, false, { [weak self] in
guard let self else {
return
}
Logger.shared.log("MediaEditor", "Completed with image \(resultImage)")
self.completion([MediaEditorScreenImpl.Result(media: .image(image: resultImage, dimensions: PixelDimensions(resultImage.size)), mediaAreas: mediaAreas, caption: caption, coverTimestamp: nil, options: self.state.privacy, stickers: stickers, randomId: randomId)], { [weak self] finished in
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
self?.dismiss()
Queue.mainQueue().justDispatch {
finished()
}
})
})
if case let .draft(draft, id) = actualSubject, id == nil {
removeStoryDraft(engine: self.context.engine, path: draft.path, delete: true)
}
})
}
})
}
}
private func processMultipleItems(items: [EditingItem], isLongVideo: Bool) {
guard !items.isEmpty else {
return
}
var items = items
if let mediaEditor = self.node.mediaEditor, case let .asset(asset) = self.node.subject, let currentItemIndex = items.firstIndex(where: { $0.asset.localIdentifier == asset.localIdentifier }) {
var updatedCurrentItem = items[currentItemIndex]
updatedCurrentItem.caption = self.node.getCaption()
updatedCurrentItem.values = mediaEditor.values
items[currentItemIndex] = updatedCurrentItem
}
let multipleResults = Atomic<[MediaEditorScreenImpl.Result]>(value: [])
let totalItems = items.count
let dispatchGroup = DispatchGroup()
let privacy = self.state.privacy
if !(self.isEditingStory || self.isEditingStoryCover) {
let _ = updateMediaEditorStoredStateInteractively(engine: self.context.engine, { current in
if let current {
return current.withUpdatedPrivacy(privacy)
} else {
return MediaEditorStoredState(privacy: privacy, textSettings: nil)
}
}).start()
}
var order: [Int64] = []
for (index, item) in items.enumerated() {
guard item.isEnabled else {
continue
}
dispatchGroup.enter()
let randomId = Int64.random(in: .min ... .max)
order.append(randomId)
if item.asset.mediaType == .video {
processVideoItem(item: item, index: index, randomId: randomId, isLongVideo: isLongVideo) { result in
let _ = multipleResults.modify { results in
var updatedResults = results
updatedResults.append(result)
return updatedResults
}
dispatchGroup.leave()
}
} else if item.asset.mediaType == .image {
processImageItem(item: item, index: index, randomId: randomId) { result in
let _ = multipleResults.modify { results in
var updatedResults = results
updatedResults.append(result)
return updatedResults
}
dispatchGroup.leave()
}
} else {
dispatchGroup.leave()
}
}
dispatchGroup.notify(queue: .main) {
let results = multipleResults.with { $0 }
if results.count == totalItems {
var orderedResults: [MediaEditorScreenImpl.Result] = []
for id in order {
if let item = results.first(where: { $0.randomId == id }) {
orderedResults.append(item)
}
}
self.completion(results, { [weak self] finished in
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
self?.dismiss()
Queue.mainQueue().justDispatch {
finished()
}
})
})
}
}
}
private func processVideoItem(item: EditingItem, index: Int, randomId: Int64, isLongVideo: Bool, completion: @escaping (MediaEditorScreenImpl.Result) -> Void) {
let asset = item.asset
let itemMediaEditor = setupMediaEditorForItem(item: item)
var caption = item.caption
caption = convertMarkdownToAttributes(caption)
var mediaAreas: [MediaArea] = []
var stickers: [TelegramMediaFile] = []
if let entities = item.values?.entities {
for entity in entities {
if let mediaArea = entity.mediaArea {
mediaAreas.append(mediaArea)
}
extractStickersFromEntity(entity, into: &stickers)
}
}
let firstFrameTime: CMTime
if let coverImageTimestamp = item.values?.coverImageTimestamp, !isLongVideo || index == 0 {
firstFrameTime = CMTime(seconds: coverImageTimestamp, preferredTimescale: CMTimeScale(60))
} else {
firstFrameTime = CMTime(seconds: item.values?.videoTrimRange?.lowerBound ?? 0.0, preferredTimescale: CMTimeScale(60))
}
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { [weak self] avAsset, _, _ in
guard let avAsset else {
DispatchQueue.main.async {
if let self {
completion(self.createEmptyResult(randomId: randomId))
}
}
return
}
let duration: Double
if let videoTrimRange = item.values?.videoTrimRange {
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
} else {
duration = min(asset.duration, storyMaxVideoDuration)
}
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
avAssetGenerator.appliesPreferredTrackTransform = true
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)]) { [weak self] _, cgImage, _, _, _ in
guard let self else {
return
}
DispatchQueue.main.async {
if let cgImage {
let image = UIImage(cgImage: cgImage)
itemMediaEditor.replaceSource(image, additionalImage: nil, time: firstFrameTime, mirror: false)
if let resultImage = itemMediaEditor.resultImage {
makeEditorImageComposition(
context: self.node.ciContext,
postbox: self.context.account.postbox,
inputImage: resultImage,
dimensions: storyDimensions,
values: itemMediaEditor.values,
time: firstFrameTime,
textScale: 2.0
) { coverImage in
if let coverImage = coverImage {
let result = MediaEditorScreenImpl.Result(
media: .video(
video: .asset(localIdentifier: asset.localIdentifier),
coverImage: coverImage,
values: itemMediaEditor.values,
duration: duration,
dimensions: itemMediaEditor.values.resultDimensions
),
mediaAreas: mediaAreas,
caption: caption,
coverTimestamp: itemMediaEditor.values.coverImageTimestamp,
options: self.state.privacy,
stickers: stickers,
randomId: randomId
)
completion(result)
} else {
completion(self.createEmptyResult(randomId: randomId))
}
}
} else {
completion(self.createEmptyResult(randomId: randomId))
}
} else {
completion(self.createEmptyResult(randomId: randomId))
}
}
}
}
}
private func processImageItem(item: EditingItem, index: Int, randomId: Int64, completion: @escaping (MediaEditorScreenImpl.Result) -> Void) {
let asset = item.asset
let itemMediaEditor = setupMediaEditorForItem(item: item)
var caption = item.caption
caption = convertMarkdownToAttributes(caption)
var mediaAreas: [MediaArea] = []
var stickers: [TelegramMediaFile] = []
if let entities = item.values?.entities {
for entity in entities {
if let mediaArea = entity.mediaArea {
mediaAreas.append(mediaArea)
}
extractStickersFromEntity(entity, into: &stickers)
}
}
let options = PHImageRequestOptions()
options.deliveryMode = .highQualityFormat
options.isNetworkAccessAllowed = true
PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .default, options: options) { [weak self] image, _ in
guard let self else {
return
}
DispatchQueue.main.async {
if let image {
itemMediaEditor.replaceSource(image, additionalImage: nil, time: .zero, mirror: false)
if let resultImage = itemMediaEditor.resultImage {
makeEditorImageComposition(
context: self.node.ciContext,
postbox: self.context.account.postbox,
inputImage: resultImage,
dimensions: storyDimensions,
values: itemMediaEditor.values,
time: .zero,
textScale: 2.0
) { resultImage in
if let resultImage = resultImage {
let result = MediaEditorScreenImpl.Result(
media: .image(
image: resultImage,
dimensions: PixelDimensions(resultImage.size)
),
mediaAreas: mediaAreas,
caption: caption,
coverTimestamp: nil,
options: self.state.privacy,
stickers: stickers,
randomId: randomId
)
completion(result)
} else {
completion(self.createEmptyResult(randomId: randomId))
}
}
} else {
completion(self.createEmptyResult(randomId: randomId))
}
} else {
completion(self.createEmptyResult(randomId: randomId))
}
}
}
}
private func setupMediaEditorForItem(item: EditingItem) -> MediaEditor {
var values = item.values
if values?.videoTrimRange == nil {
values = values?.withUpdatedVideoTrimRange(0 ..< storyMaxVideoDuration)
}
return MediaEditor(
context: self.context,
mode: .default,
subject: .asset(item.asset),
values: values,
hasHistogram: false,
isStandalone: true
)
}
private func extractStickersFromEntity(_ entity: CodableDrawingEntity, into stickers: inout [TelegramMediaFile]) {
switch entity {
case let .sticker(stickerEntity):
if case let .file(file, fileType) = stickerEntity.content, case .sticker = fileType {
stickers.append(file.media)
}
case let .text(textEntity):
if let subEntities = textEntity.renderSubEntities {
for entity in subEntities {
if let stickerEntity = entity as? DrawingStickerEntity, case let .file(file, fileType) = stickerEntity.content, case .sticker = fileType {
stickers.append(file.media)
}
}
}
default:
break
}
}
private func createEmptyResult(randomId: Int64) -> MediaEditorScreenImpl.Result {
let emptyImage = UIImage()
return MediaEditorScreenImpl.Result(
media: .image(
image: emptyImage,
dimensions: PixelDimensions(emptyImage.size)
),
mediaAreas: [],
caption: NSAttributedString(),
coverTimestamp: nil,
options: self.state.privacy,
stickers: [],
randomId: randomId
)
}
func updateMediaEditorEntities() {
guard let mediaEditor = self.node.mediaEditor else {
return
}
let entities = self.node.entitiesView.entities.filter { !($0 is DrawingMediaEntity) }
let codableEntities = DrawingEntitiesView.encodeEntities(entities, entitiesView: self.node.entitiesView)
mediaEditor.setDrawingAndEntities(data: nil, image: mediaEditor.values.drawing, entities: codableEntities)
}
}

View File

@ -1099,7 +1099,7 @@ private final class StarsTransactionSheetContent: CombinedComponent {
} }
} }
if let starRefPeerId = transaction.starrefPeerId, let starRefPeer = state.peerMap[starRefPeerId] { if let starRefPeerId = transaction.starrefPeerId, let starRefPeer = state.peerMap[starRefPeerId] {
if !transaction.flags.contains(.isPaidMessage) { if !transaction.flags.contains(.isPaidMessage) && !transaction.flags.contains(.isStarGiftResale) {
tableItems.append(.init( tableItems.append(.init(
id: "to", id: "to",
title: strings.StarsTransaction_StarRefReason_Affiliate, title: strings.StarsTransaction_StarRefReason_Affiliate,
@ -1130,7 +1130,7 @@ private final class StarsTransactionSheetContent: CombinedComponent {
)) ))
} }
if let toPeer { if let toPeer, !transaction.flags.contains(.isStarGiftResale) {
tableItems.append(.init( tableItems.append(.init(
id: "referred", id: "referred",
title: transaction.flags.contains(.isPaidMessage) ? strings.Stars_Transaction_From : strings.StarsTransaction_StarRefReason_Referred, title: transaction.flags.contains(.isPaidMessage) ? strings.Stars_Transaction_From : strings.StarsTransaction_StarRefReason_Referred,
@ -1162,7 +1162,7 @@ private final class StarsTransactionSheetContent: CombinedComponent {
} }
} }
if let starrefCommissionPermille = transaction.starrefCommissionPermille, transaction.starrefPeerId != nil { if let starrefCommissionPermille = transaction.starrefCommissionPermille, transaction.starrefPeerId != nil {
if transaction.flags.contains(.isPaidMessage) { if transaction.flags.contains(.isPaidMessage) || transaction.flags.contains(.isStarGiftResale) {
var totalStars = transaction.count var totalStars = transaction.count
if let starrefCount = transaction.starrefAmount { if let starrefCount = transaction.starrefAmount {
totalStars = totalStars + starrefCount totalStars = totalStars + starrefCount

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "price (2).pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -1946,12 +1946,14 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto
} }
var audioTranscriptionProvidedByBoost = false var audioTranscriptionProvidedByBoost = false
var autoTranslate = false
var isCopyProtectionEnabled: Bool = data.initialData?.peer?.isCopyProtectionEnabled ?? false var isCopyProtectionEnabled: Bool = data.initialData?.peer?.isCopyProtectionEnabled ?? false
for entry in view.additionalData { for entry in view.additionalData {
if case let .peer(_, maybePeer) = entry, let peer = maybePeer { if case let .peer(_, maybePeer) = entry, let peer = maybePeer {
isCopyProtectionEnabled = peer.isCopyProtectionEnabled isCopyProtectionEnabled = peer.isCopyProtectionEnabled
if let channel = peer as? TelegramChannel, let boostLevel = channel.approximateBoostLevel { if let channel = peer as? TelegramChannel {
if boostLevel >= premiumConfiguration.minGroupAudioTranscriptionLevel { autoTranslate = channel.flags.contains(.autoTranslateEnabled)
if let boostLevel = channel.approximateBoostLevel, boostLevel >= premiumConfiguration.minGroupAudioTranscriptionLevel {
audioTranscriptionProvidedByBoost = true audioTranscriptionProvidedByBoost = true
} }
} }
@ -1964,7 +1966,7 @@ public final class ChatHistoryListNodeImpl: ListView, ChatHistoryNode, ChatHisto
) )
var translateToLanguage: (fromLang: String, toLang: String)? var translateToLanguage: (fromLang: String, toLang: String)?
if let translationState, isPremium && translationState.isEnabled { if let translationState, (isPremium || autoTranslate) && translationState.isEnabled {
var languageCode = translationState.toLang ?? chatPresentationData.strings.baseLanguageCode var languageCode = translationState.toLang ?? chatPresentationData.strings.baseLanguageCode
let rawSuffix = "-raw" let rawSuffix = "-raw"
if languageCode.hasSuffix(rawSuffix) { if languageCode.hasSuffix(rawSuffix) {

View File

@ -187,19 +187,27 @@ final class ChatTranslationPanelNode: ASDisplayNode {
} }
let isPremium = self.chatInterfaceState?.isPremium ?? false let isPremium = self.chatInterfaceState?.isPremium ?? false
if isPremium {
var translationAvailable = isPremium
if let channel = self.chatInterfaceState?.renderedPeer?.chatMainPeer as? TelegramChannel, channel.flags.contains(.autoTranslateEnabled) {
translationAvailable = true
}
if translationAvailable {
self.interfaceInteraction?.toggleTranslation(translationState.isEnabled ? .original : .translated) self.interfaceInteraction?.toggleTranslation(translationState.isEnabled ? .original : .translated)
} else if !translationState.isEnabled { } else if !translationState.isEnabled {
let context = self.context if !isPremium {
var replaceImpl: ((ViewController) -> Void)? let context = self.context
let controller = PremiumDemoScreen(context: context, subject: .translation, action: { var replaceImpl: ((ViewController) -> Void)?
let controller = PremiumIntroScreen(context: context, source: .translation) let controller = PremiumDemoScreen(context: context, subject: .translation, action: {
replaceImpl?(controller) let controller = PremiumIntroScreen(context: context, source: .translation)
}) replaceImpl?(controller)
replaceImpl = { [weak controller] c in })
controller?.replace(with: c) replaceImpl = { [weak controller] c in
controller?.replace(with: c)
}
self.interfaceInteraction?.chatController()?.push(controller)
} }
self.interfaceInteraction?.chatController()?.push(controller)
} }
} }

View File

@ -3551,7 +3551,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
} }
let editorController = MediaEditorScreenImpl( let editorController = MediaEditorScreenImpl(
context: context, context: context,
mode: .storyEditor, mode: .storyEditor(remainingCount: 1),
subject: subject, subject: subject,
customTarget: nil, customTarget: nil,
initialCaption: text.flatMap { NSAttributedString(string: $0) }, initialCaption: text.flatMap { NSAttributedString(string: $0) },
@ -3716,7 +3716,7 @@ public final class SharedAccountContextImpl: SharedAccountContext {
let presentationData = context.sharedContext.currentPresentationData.with { $0 } let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let controller = MediaEditorScreenImpl( let controller = MediaEditorScreenImpl(
context: context, context: context,
mode: .storyEditor, mode: .storyEditor(remainingCount: 1),
subject: editorSubject, subject: editorSubject,
transitionIn: nil, transitionIn: nil,
transitionOut: { _, _ in transitionOut: { _, _ in

View File

@ -346,7 +346,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
return nil return nil
} }
}, },
completion: { result, resultTransition, dismissed in completion: { result, resultTransition, storyRemainingCount, dismissed in
let subject: Signal<MediaEditorScreenImpl.Subject?, NoError> = result let subject: Signal<MediaEditorScreenImpl.Subject?, NoError> = result
|> map { value -> MediaEditorScreenImpl.Subject? in |> map { value -> MediaEditorScreenImpl.Subject? in
func editorPIPPosition(_ position: CameraScreenImpl.PIPPosition) -> MediaEditorScreenImpl.PIPPosition { func editorPIPPosition(_ position: CameraScreenImpl.PIPPosition) -> MediaEditorScreenImpl.PIPPosition {
@ -422,7 +422,7 @@ public final class TelegramRootController: NavigationController, TelegramRootCon
let controller = MediaEditorScreenImpl( let controller = MediaEditorScreenImpl(
context: context, context: context,
mode: .storyEditor, mode: .storyEditor(remainingCount: storyRemainingCount ?? 1),
subject: subject, subject: subject,
customTarget: mediaEditorCustomTarget, customTarget: mediaEditorCustomTarget,
transitionIn: transitionIn, transitionIn: transitionIn,