[WIP] Stickers editor

This commit is contained in:
Ilya Laktyushin 2024-04-10 17:37:37 +04:00
parent d4c13120f8
commit 3e3b04e495
17 changed files with 486 additions and 279 deletions

View File

@ -11742,7 +11742,7 @@ Sorry for the inconvenience.";
"ReportAd.Help" = "Learn more about [Telegram Ad Policies and Guidelines]().";
"ReportAd.Help_URL" = "https://ads.telegram.org/guidelines";
"ReportAd.Reported" = "We will review this ad to ensure it matches our [Ad Policies and Guidelines]().";
"ReportAd.Hidden" = "Ads are hidden now.";
"ReportAd.Hidden" = "You will no longer see ads from Telegram.";
"AdsInfo.Title" = "About These Ads";
"AdsInfo.Info" = "Telegram Ads are very different from ads on other platforms. Ads such as this one:";

View File

@ -287,6 +287,7 @@ public final class VideoStickerDirectFrameSource: AnimatedStickerFrameSource {
private let bytesPerRow: Int
public var frameCount: Int
public let frameRate: Int
public var duration: Double
fileprivate var currentFrame: Int
private let source: SoftwareVideoSource?
@ -316,17 +317,24 @@ public final class VideoStickerDirectFrameSource: AnimatedStickerFrameSource {
self.image = nil
self.frameRate = Int(cache.frameRate)
self.frameCount = Int(cache.frameCount)
if self.frameRate > 0 {
self.duration = Double(self.frameCount) / Double(self.frameRate)
} else {
self.duration = 0.0
}
} else if let data = try? Data(contentsOf: URL(fileURLWithPath: path)), let image = WebP.convert(fromWebP: data) {
self.source = nil
self.image = image
self.frameRate = 1
self.frameCount = 1
self.duration = 0.0
} else {
let source = SoftwareVideoSource(path: path, hintVP9: true, unpremultiplyAlpha: unpremultiplyAlpha)
self.source = source
self.image = nil
self.frameRate = min(30, source.getFramerate())
self.frameCount = 0
self.duration = source.reportedDuration.seconds
}
}

View File

@ -43,6 +43,7 @@ extern int FFMpegCodecIdVP9;
- (NSArray<NSNumber *> *)streamIndicesForType:(FFMpegAVFormatStreamType)type;
- (bool)isAttachedPicAtStreamIndex:(int32_t)streamIndex;
- (int)codecIdAtStreamIndex:(int32_t)streamIndex;
- (double)duration;
- (int64_t)durationAtStreamIndex:(int32_t)streamIndex;
- (bool)codecParamsAtStreamIndex:(int32_t)streamIndex toContext:(FFMpegAVCodecContext *)context;
- (FFMpegFpsAndTimebase)fpsAndTimebaseForStreamIndex:(int32_t)streamIndex defaultTimeBase:(CMTime)defaultTimeBase;

View File

@ -99,6 +99,10 @@ int FFMpegCodecIdVP9 = AV_CODEC_ID_VP9;
return _impl->streams[streamIndex]->codecpar->codec_id;
}
- (double)duration {
return (double)_impl->duration / AV_TIME_BASE;
}
- (int64_t)durationAtStreamIndex:(int32_t)streamIndex {
return _impl->streams[streamIndex]->duration;
}

View File

@ -69,6 +69,8 @@ public final class SoftwareVideoSource {
private var enqueuedFrames: [(MediaTrackFrame, CGFloat, CGFloat, Bool)] = []
private var hasReadToEnd: Bool = false
public private(set) var reportedDuration: CMTime = .invalid
public init(path: String, hintVP9: Bool, unpremultiplyAlpha: Bool) {
let _ = FFMpegMediaFrameSourceContextHelpers.registerFFMpegGlobals
@ -142,6 +144,8 @@ public final class SoftwareVideoSource {
}
}
self.reportedDuration = CMTime(seconds: avFormatContext.duration(), preferredTimescale: CMTimeScale(NSEC_PER_SEC))
self.videoStream = videoStream
if let videoStream = self.videoStream {

View File

@ -1107,6 +1107,77 @@ private final class DemoSheetContent: CombinedComponent {
)
var measuredTextHeight: CGFloat?
var text: String
switch component.subject {
case .moreUpload:
text = strings.Premium_UploadSizeInfo
case .fasterDownload:
text = strings.Premium_FasterSpeedStandaloneInfo
case .voiceToText:
text = strings.Premium_VoiceToTextStandaloneInfo
case .noAds:
text = strings.Premium_NoAdsStandaloneInfo
case .uniqueReactions:
text = strings.Premium_InfiniteReactionsInfo
case .premiumStickers:
text = strings.Premium_StickersInfo
case .emojiStatus:
text = strings.Premium_EmojiStatusInfo
case .advancedChatManagement:
text = strings.Premium_ChatManagementStandaloneInfo
case .profileBadge:
text = strings.Premium_BadgeInfo
case .animatedUserpics:
text = strings.Premium_AvatarInfo
case .appIcons:
text = strings.Premium_AppIconStandaloneInfo
case .animatedEmoji:
text = strings.Premium_AnimatedEmojiStandaloneInfo
case .translation:
text = strings.Premium_TranslationStandaloneInfo
case .colors:
text = strings.Premium_ColorsInfo
case .wallpapers:
text = strings.Premium_WallpapersInfo
case .messageTags:
text = strings.Premium_MessageTagsInfo
case .lastSeen:
text = strings.Premium_LastSeenInfo
case .messagePrivacy:
text = strings.Premium_MessagePrivacyInfo
case .folderTags:
text = strings.Premium_FolderTagsStandaloneInfo
default:
text = ""
}
let textSideInset: CGFloat = 24.0
let textColor = UIColor.black
let textFont = Font.regular(17.0)
let boldTextFont = Font.semibold(17.0)
let markdownAttributes = MarkdownAttributes(
body: MarkdownAttributeSet(font: textFont, textColor: textColor),
bold: MarkdownAttributeSet(font: boldTextFont, textColor: textColor),
link: MarkdownAttributeSet(font: textFont, textColor: textColor),
linkAttribute: { _ in
return nil
}
)
let measureText = measureText.update(
component: MultilineTextComponent(
text: .markdown(text: text, attributes: markdownAttributes),
horizontalAlignment: .center,
maximumNumberOfLines: 0,
lineSpacing: 0.0
),
availableSize: CGSize(width: context.availableSize.width - textSideInset * 2.0, height: context.availableSize.height),
transition: .immediate
)
context.add(measureText
.position(CGPoint(x: 0.0, y: 1000.0))
)
measuredTextHeight = measureText.size.height
let buttonText: String
var buttonAnimationName: String?
@ -1119,7 +1190,6 @@ private final class DemoSheetContent: CombinedComponent {
case let .gift(price):
buttonText = strings.Premium_Gift_GiftSubscription(price ?? "").string
case .other:
var text: String
switch component.subject {
case .fasterDownload:
buttonText = strings.Premium_FasterSpeed_Proceed
@ -1161,77 +1231,6 @@ private final class DemoSheetContent: CombinedComponent {
default:
buttonText = strings.Common_OK
}
switch component.subject {
case .moreUpload:
text = strings.Premium_UploadSizeInfo
case .fasterDownload:
text = strings.Premium_FasterSpeedStandaloneInfo
case .voiceToText:
text = strings.Premium_VoiceToTextStandaloneInfo
case .noAds:
text = strings.Premium_NoAdsStandaloneInfo
case .uniqueReactions:
text = strings.Premium_InfiniteReactionsInfo
case .premiumStickers:
text = strings.Premium_StickersInfo
case .emojiStatus:
text = strings.Premium_EmojiStatusInfo
case .advancedChatManagement:
text = strings.Premium_ChatManagementStandaloneInfo
case .profileBadge:
text = strings.Premium_BadgeInfo
case .animatedUserpics:
text = strings.Premium_AvatarInfo
case .appIcons:
text = strings.Premium_AppIconStandaloneInfo
case .animatedEmoji:
text = strings.Premium_AnimatedEmojiStandaloneInfo
case .translation:
text = strings.Premium_TranslationStandaloneInfo
case .colors:
text = strings.Premium_ColorsInfo
case .wallpapers:
text = strings.Premium_WallpapersInfo
case .messageTags:
text = strings.Premium_MessageTagsInfo
case .lastSeen:
text = strings.Premium_LastSeenInfo
case .messagePrivacy:
text = strings.Premium_MessagePrivacyInfo
case .folderTags:
text = strings.Premium_FolderTagsStandaloneInfo
default:
text = ""
}
let textSideInset: CGFloat = 24.0
let textColor = UIColor.black
let textFont = Font.regular(17.0)
let boldTextFont = Font.semibold(17.0)
let markdownAttributes = MarkdownAttributes(
body: MarkdownAttributeSet(font: textFont, textColor: textColor),
bold: MarkdownAttributeSet(font: boldTextFont, textColor: textColor),
link: MarkdownAttributeSet(font: textFont, textColor: textColor),
linkAttribute: { _ in
return nil
}
)
let measureText = measureText.update(
component: MultilineTextComponent(
text: .markdown(text: text, attributes: markdownAttributes),
horizontalAlignment: .center,
maximumNumberOfLines: 0,
lineSpacing: 0.0
),
availableSize: CGSize(width: context.availableSize.width - textSideInset * 2.0, height: context.availableSize.height),
transition: .immediate
)
context.add(measureText
.position(CGPoint(x: 0.0, y: 1000.0))
)
measuredTextHeight = measureText.size.height
}
}

View File

@ -90,6 +90,9 @@ func _internal_uploadSticker(account: Account, peer: Peer, resource: MediaResour
case let .messageMediaDocument(_, document, _, _):
if let document = document, let file = telegramMediaFileFromApiDocument(document), let uploadedResource = file.resource as? CloudDocumentMediaResource {
account.postbox.mediaBox.copyResourceData(from: resource.id, to: uploadedResource.id, synchronous: true)
if let thumbnail, let previewRepresentation = file.previewRepresentations.first(where: { $0.dimensions == PixelDimensions(width: 320, height: 320) }) {
account.postbox.mediaBox.copyResourceData(from: thumbnail.id, to: previewRepresentation.resource.id, synchronous: true)
}
return .single(.complete(uploadedResource, file.mimeType))
}
default:
@ -338,6 +341,29 @@ public enum AddStickerToSetError {
case generic
}
private func revalidatedSticker<T>(account: Account, sticker: FileMediaReference, signal: @escaping (CloudDocumentMediaResource) -> Signal<T, MTRpcError>) -> Signal<T, MTRpcError> {
guard let resource = sticker.media.resource as? CloudDocumentMediaResource else {
return .fail(MTRpcError(errorCode: 500, errorDescription: "Internal"))
}
return signal(resource)
|> `catch` { error -> Signal<T, MTRpcError> in
if error.errorDescription == "FILE_REFERENCE_EXPIRED" {
return revalidateMediaResourceReference(accountPeerId: account.peerId, postbox: account.postbox, network: account.network, revalidationContext: account.mediaReferenceRevalidationContext, info: TelegramCloudMediaResourceFetchInfo(reference: sticker.resourceReference(resource), preferBackgroundReferenceRevalidation: false, continueInBackground: false), resource: resource)
|> mapError { _ -> MTRpcError in
return MTRpcError(errorCode: 500, errorDescription: "Internal")
}
|> mapToSignal { result -> Signal<T, MTRpcError> in
guard let resource = result.updatedResource as? CloudDocumentMediaResource else {
return .fail(MTRpcError(errorCode: 500, errorDescription: "Internal"))
}
return signal(resource)
}
} else {
return .fail(error)
}
}
}
func _internal_addStickerToStickerSet(account: Account, packReference: StickerPackReference, sticker: ImportSticker) -> Signal<Bool, AddStickerToSetError> {
let uploadSticker: Signal<UploadStickerStatus, AddStickerToSetError>
if let resource = sticker.resource.resource as? CloudDocumentMediaResource {
@ -363,7 +389,6 @@ func _internal_addStickerToStickerSet(account: Account, packReference: StickerPa
flags |= (1 << 1)
}
let inputSticker: Api.InputStickerSetItem = .inputStickerSetItem(flags: flags, document: .inputDocument(id: resource.fileId, accessHash: resource.accessHash, fileReference: Buffer(data: resource.fileReference ?? Data())), emoji: sticker.emojis.joined(), maskCoords: nil, keywords: sticker.keywords)
return account.network.request(Api.functions.stickers.addStickerToSet(stickerset: packReference.apiInputStickerSet, sticker: inputSticker))
|> `catch` { error -> Signal<Api.messages.StickerSet, MTRpcError> in
if error.errorDescription == "FILE_REFERENCE_EXPIRED" {
@ -408,10 +433,9 @@ public enum ReorderStickerError {
}
func _internal_reorderSticker(account: Account, sticker: FileMediaReference, position: Int) -> Signal<Never, ReorderStickerError> {
guard let resource = sticker.media.resource as? CloudDocumentMediaResource else {
return .fail(.generic)
}
return account.network.request(Api.functions.stickers.changeStickerPosition(sticker: .inputDocument(id: resource.fileId, accessHash: resource.accessHash, fileReference: Buffer(data: resource.fileReference)), position: Int32(position)))
return revalidatedSticker(account: account, sticker: sticker, signal: { resource in
return account.network.request(Api.functions.stickers.changeStickerPosition(sticker: .inputDocument(id: resource.fileId, accessHash: resource.accessHash, fileReference: Buffer(data: resource.fileReference)), position: Int32(position)))
})
|> mapError { error -> ReorderStickerError in
return .generic
}
@ -436,10 +460,9 @@ public enum DeleteStickerError {
}
func _internal_deleteStickerFromStickerSet(account: Account, sticker: FileMediaReference) -> Signal<Never, DeleteStickerError> {
guard let resource = sticker.media.resource as? CloudDocumentMediaResource else {
return .fail(.generic)
}
return account.network.request(Api.functions.stickers.removeStickerFromSet(sticker: .inputDocument(id: resource.fileId, accessHash: resource.accessHash, fileReference: Buffer(data: resource.fileReference))))
return revalidatedSticker(account: account, sticker: sticker, signal: { resource in
return account.network.request(Api.functions.stickers.removeStickerFromSet(sticker: .inputDocument(id: resource.fileId, accessHash: resource.accessHash, fileReference: Buffer(data: resource.fileReference))))
})
|> mapError { error -> DeleteStickerError in
return .generic
}
@ -463,10 +486,6 @@ public enum ReplaceStickerError {
}
func _internal_replaceSticker(account: Account, previousSticker: FileMediaReference, sticker: ImportSticker) -> Signal<Never, ReplaceStickerError> {
guard let previousResource = previousSticker.media.resource as? CloudDocumentMediaResource else {
return .fail(.generic)
}
let uploadSticker: Signal<UploadStickerStatus, ReplaceStickerError>
if let resource = sticker.resource.resource as? CloudDocumentMediaResource {
uploadSticker = .single(.complete(resource, sticker.mimeType))
@ -485,14 +504,14 @@ func _internal_replaceSticker(account: Account, previousSticker: FileMediaRefere
guard case let .complete(resource, _) = uploadedSticker else {
return .complete()
}
var flags: Int32 = 0
if sticker.keywords.count > 0 {
flags |= (1 << 1)
}
let inputSticker: Api.InputStickerSetItem = .inputStickerSetItem(flags: flags, document: .inputDocument(id: resource.fileId, accessHash: resource.accessHash, fileReference: Buffer(data: resource.fileReference ?? Data())), emoji: sticker.emojis.joined(), maskCoords: nil, keywords: sticker.keywords)
return account.network.request(Api.functions.stickers.replaceSticker(sticker: .inputDocument(id: previousResource.fileId, accessHash: previousResource.accessHash, fileReference: Buffer(data: previousResource.fileReference ?? Data())), newSticker: inputSticker))
return revalidatedSticker(account: account, sticker: previousSticker, signal: { previousResource in
return account.network.request(Api.functions.stickers.replaceSticker(sticker: .inputDocument(id: previousResource.fileId, accessHash: previousResource.accessHash, fileReference: Buffer(data: previousResource.fileReference)), newSticker: inputSticker))
})
|> mapError { error -> ReplaceStickerError in
return .generic
}

View File

@ -130,11 +130,9 @@ public final class MediaEditor {
private let clock = CMClockGetHostTimeClock()
private var player: AVPlayer? {
didSet {
}
}
private var stickerEntity: MediaEditorComposerStickerEntity?
private var player: AVPlayer?
private var playerAudioMix: AVMutableAudioMix?
private var additionalPlayer: AVPlayer?
@ -209,6 +207,9 @@ public final class MediaEditor {
}
public var resultIsVideo: Bool {
if case let .sticker(file) = self.subject {
return file.isAnimatedSticker || file.isVideoSticker
}
return self.player != nil || self.audioPlayer != nil || self.additionalPlayer != nil || self.values.entities.contains(where: { $0.entity.isAnimated })
}
@ -262,7 +263,9 @@ public final class MediaEditor {
}
public var duration: Double? {
if let _ = self.player {
if let stickerEntity = self.stickerEntity {
return stickerEntity.totalDuration
} else if let _ = self.player {
if let trimRange = self.values.videoTrimRange {
return trimRange.upperBound - trimRange.lowerBound
} else {
@ -506,13 +509,22 @@ public final class MediaEditor {
let image: UIImage?
let nightImage: UIImage?
let player: AVPlayer?
let stickerEntity: MediaEditorComposerStickerEntity?
let playerIsReference: Bool
let gradientColors: GradientColors
init(image: UIImage? = nil, nightImage: UIImage? = nil, player: AVPlayer? = nil, playerIsReference: Bool = false, gradientColors: GradientColors) {
init(
image: UIImage? = nil,
nightImage: UIImage? = nil,
player: AVPlayer? = nil,
stickerEntity: MediaEditorComposerStickerEntity? = nil,
playerIsReference: Bool = false,
gradientColors: GradientColors
) {
self.image = image
self.nightImage = nightImage
self.player = player
self.stickerEntity = stickerEntity
self.playerIsReference = playerIsReference
self.gradientColors = gradientColors
}
@ -661,16 +673,23 @@ public final class MediaEditor {
)
}
}
case .sticker:
let image = generateImage(CGSize(width: 1080, height: 1920), contextGenerator: { size, context in
context.clear(CGRect(origin: .zero, size: size))
}, opaque: false, scale: 1.0)
case let .sticker(file):
let entity = MediaEditorComposerStickerEntity(
postbox: self.context.account.postbox,
content: .file(file),
position: .zero,
scale: 1.0,
rotation: 0.0,
baseSize: CGSize(width: 512.0, height: 512.0),
mirrored: false,
colorSpace: CGColorSpaceCreateDeviceRGB(),
tintColor: nil,
isStatic: false,
highRes: true
)
textureSource = .single(
TextureSourceResult(
image: image,
nightImage: nil,
player: nil,
playerIsReference: false,
stickerEntity: entity,
gradientColors: GradientColors(top: .clear, bottom: .clear)
)
)
@ -693,7 +712,7 @@ public final class MediaEditor {
self.player = textureSourceResult.player
self.playerPromise.set(.single(player))
if let image = textureSourceResult.image {
if self.values.nightTheme, let nightImage = textureSourceResult.nightImage {
textureSource.setMainInput(.image(nightImage))
@ -732,6 +751,11 @@ public final class MediaEditor {
if let additionalPlayer, let playerItem = additionalPlayer.currentItem {
textureSource.setAdditionalInput(.video(playerItem))
}
if let entity = textureSourceResult.stickerEntity {
textureSource.setMainInput(.entity(entity))
}
self.stickerEntity = textureSourceResult.stickerEntity
self.renderer.textureSource = textureSource
switch self.mode {

View File

@ -54,6 +54,7 @@ final class MediaEditorComposer {
enum Input {
case texture(MTLTexture, CMTime, Bool)
case videoBuffer(VideoPixelBuffer)
case ciImage(CIImage, CMTime)
var timestamp: CMTime {
switch self {
@ -61,30 +62,33 @@ final class MediaEditorComposer {
return timestamp
case let .videoBuffer(videoBuffer):
return videoBuffer.timestamp
case let .ciImage(_, timestamp):
return timestamp
}
}
var rendererInput: MediaEditorRenderer.Input {
switch self {
case let .texture(texture, time, hasTransparency):
return .texture(texture, time, hasTransparency)
case let .texture(texture, timestamp, hasTransparency):
return .texture(texture, timestamp, hasTransparency)
case let .videoBuffer(videoBuffer):
return .videoBuffer(videoBuffer)
case let .ciImage(image, timestamp):
return .ciImage(image, timestamp)
}
}
}
let device: MTLDevice?
private let colorSpace: CGColorSpace
let colorSpace: CGColorSpace
let ciContext: CIContext?
private var textureCache: CVMetalTextureCache?
private let values: MediaEditorValues
private let dimensions: CGSize
private let outputDimensions: CGSize
private let textScale: CGFloat
private let ciContext: CIContext?
private var textureCache: CVMetalTextureCache?
private let renderer = MediaEditorRenderer()
private let renderChain = MediaEditorRenderChain()

View File

@ -153,7 +153,7 @@ private class MediaEditorComposerStaticEntity: MediaEditorComposerEntity {
}
}
private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity {
final class MediaEditorComposerStickerEntity: MediaEditorComposerEntity {
public enum Content {
case file(TelegramMediaFile)
case video(TelegramMediaFile)
@ -203,7 +203,7 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity {
var imagePixelBuffer: CVPixelBuffer?
let imagePromise = Promise<UIImage>()
init(postbox: Postbox, content: Content, position: CGPoint, scale: CGFloat, rotation: CGFloat, baseSize: CGSize, mirrored: Bool, colorSpace: CGColorSpace, tintColor: UIColor?, isStatic: Bool) {
init(postbox: Postbox, content: Content, position: CGPoint, scale: CGFloat, rotation: CGFloat, baseSize: CGSize, mirrored: Bool, colorSpace: CGColorSpace, tintColor: UIColor?, isStatic: Bool, highRes: Bool = false) {
self.postbox = postbox
self.content = content
self.position = position
@ -226,7 +226,9 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity {
let pathPrefix = postbox.mediaBox.shortLivedResourceCachePathPrefix(file.resource.id)
if let source = self.source {
let fitToSize: CGSize
if self.isStatic {
if highRes {
fitToSize = CGSize(width: 512, height: 512)
} else if self.isStatic {
fitToSize = CGSize(width: 768, height: 768)
} else {
fitToSize = CGSize(width: 384, height: 384)
@ -245,8 +247,13 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity {
frameSource.syncWith { frameSource in
strongSelf.frameCount = frameSource.frameCount
strongSelf.frameRate = frameSource.frameRate
let duration = Double(frameSource.frameCount) / Double(frameSource.frameRate)
let duration: Double
if frameSource.frameCount > 0 {
duration = Double(frameSource.frameCount) / Double(frameSource.frameRate)
} else {
duration = frameSource.duration
}
strongSelf.totalDuration = duration
strongSelf.durationPromise.set(.single(duration))
}
@ -489,7 +496,7 @@ private class MediaEditorComposerStickerEntity: MediaEditorComposerEntity {
}
completion(strongSelf.image)
} else {
completion(nil)
completion(strongSelf.image)
}
}
}
@ -595,7 +602,6 @@ private func render(context: CIContext, width: Int, height: Int, bytesPerRow: In
let calculatedBytesPerRow = (4 * Int(width) + 31) & (~31)
//assert(bytesPerRow == calculatedBytesPerRow)
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
let dest = CVPixelBufferGetBaseAddress(pixelBuffer)

View File

@ -61,6 +61,7 @@ final class MediaEditorRenderer {
enum Input {
case texture(MTLTexture, CMTime, Bool)
case videoBuffer(VideoPixelBuffer)
case ciImage(CIImage, CMTime)
var timestamp: CMTime {
switch self {
@ -68,6 +69,8 @@ final class MediaEditorRenderer {
return timestamp
case let .videoBuffer(videoBuffer):
return videoBuffer.timestamp
case let .ciImage(_, timestamp):
return timestamp
}
}
}
@ -80,6 +83,7 @@ final class MediaEditorRenderer {
private var renderPasses: [RenderPass] = []
private let ciInputPass = CIInputPass()
private let mainVideoInputPass = VideoInputPass()
private let additionalVideoInputPass = VideoInputPass()
let videoFinishPass = VideoFinishPass()
@ -150,6 +154,7 @@ final class MediaEditorRenderer {
self.commandQueue = device.makeCommandQueue()
self.commandQueue?.label = "Media Editor Command Queue"
self.ciInputPass.setup(device: device, library: library)
self.mainVideoInputPass.setup(device: device, library: library)
self.additionalVideoInputPass.setup(device: device, library: library)
self.videoFinishPass.setup(device: device, library: library)
@ -190,8 +195,14 @@ final class MediaEditorRenderer {
case let .texture(texture, _, hasTransparency):
return (texture, hasTransparency)
case let .videoBuffer(videoBuffer):
if let buffer = videoInputPass.processPixelBuffer(videoBuffer, textureCache: textureCache, device: device, commandBuffer: commandBuffer) {
return (buffer, false)
if let texture = videoInputPass.processPixelBuffer(videoBuffer, textureCache: textureCache, device: device, commandBuffer: commandBuffer) {
return (texture, false)
} else {
return nil
}
case let .ciImage(image, _):
if let texture = self.ciInputPass.processCIImage(image, device: device, commandBuffer: commandBuffer) {
return (texture, true)
} else {
return nil
}

View File

@ -48,6 +48,7 @@ public final class MediaEditorVideoExport {
public enum Subject {
case image(image: UIImage)
case video(asset: AVAsset, isStory: Bool)
case sticker(file: TelegramMediaFile)
}
public struct Configuration {
@ -198,7 +199,10 @@ public final class MediaEditorVideoExport {
private var mainComposeFramerate: Float?
private var audioOutput: AVAssetReaderOutput?
private var stickerEntity: MediaEditorComposerStickerEntity?
private let stickerSemaphore = DispatchSemaphore(value: 0)
private var writer: MediaEditorVideoExportWriter?
private var composer: MediaEditorComposer?
@ -218,7 +222,7 @@ public final class MediaEditorVideoExport {
private var startTimestamp = CACurrentMediaTime()
private let semaphore = DispatchSemaphore(value: 0)
private let composerSemaphore = DispatchSemaphore(value: 0)
public init(postbox: Postbox, subject: Subject, configuration: Configuration, outputPath: String, textScale: CGFloat = 1.0) {
self.postbox = postbox
@ -249,6 +253,7 @@ public final class MediaEditorVideoExport {
enum Input {
case image(UIImage)
case video(AVAsset)
case sticker(TelegramMediaFile)
var isVideo: Bool {
if case .video = self {
@ -283,6 +288,8 @@ public final class MediaEditorVideoExport {
isStory = isStoryValue
case let .image(image):
mainInput = .image(image)
case let .sticker(file):
mainInput = .sticker(file)
}
let duration: CMTime
@ -464,7 +471,7 @@ public final class MediaEditorVideoExport {
self.reader?.timeRange = readerRange
}
}
if self.configuration.isSticker {
self.writer = MediaEditorVideoFFMpegWriter()
} else {
@ -476,6 +483,10 @@ public final class MediaEditorVideoExport {
}
writer.setup(configuration: self.configuration, outputPath: self.outputPath)
self.setupComposer()
if case let .sticker(file) = main, let composer = self.composer {
self.stickerEntity = MediaEditorComposerStickerEntity(postbox: self.postbox, content: .file(file), position: .zero, scale: 1.0, rotation: 0.0, baseSize: CGSize(width: 512.0, height: 512.0), mirrored: false, colorSpace: composer.colorSpace, tintColor: nil, isStatic: false, highRes: true)
}
if let reader {
let colorProperties: [String: Any] = [
@ -657,6 +668,24 @@ public final class MediaEditorVideoExport {
writer.markVideoAsFinished()
return false
}
if let stickerEntity = self.stickerEntity, let ciContext = composer.ciContext {
let imageArguments = self.imageArguments
stickerEntity.image(for: timestamp, frameRate: Float(imageArguments?.frameRate ?? 30.0), context: ciContext, completion: { image in
if let image {
mainInput = .ciImage(image, imageArguments?.position ?? .zero)
}
self.stickerSemaphore.signal()
})
self.stickerSemaphore.wait()
if !updatedProgress, let imageArguments = self.imageArguments, let duration = self.durationValue {
let progress = imageArguments.position.seconds / duration.seconds
self.statusValue = .progress(Float(progress))
updatedProgress = true
}
}
composer.process(
main: mainInput!,
additional: additionalInput,
@ -671,10 +700,10 @@ public final class MediaEditorVideoExport {
} else {
appendFailed = true
}
self.semaphore.signal()
self.composerSemaphore.signal()
}
)
self.semaphore.wait()
self.composerSemaphore.wait()
if let imageArguments = self.imageArguments, let duration = self.durationValue {
let position = imageArguments.position + CMTime(value: 1, timescale: Int32(imageArguments.frameRate))
@ -736,8 +765,13 @@ public final class MediaEditorVideoExport {
return
}
if case .image = self.subject, self.additionalVideoOutput == nil {
self.imageArguments = (Double(self.configuration.frameRate), CMTime(value: 0, timescale: Int32(self.configuration.frameRate)))
if self.additionalVideoOutput == nil {
switch self.subject {
case .image, .sticker:
self.imageArguments = (Double(self.configuration.frameRate), CMTime(value: 0, timescale: Int32(self.configuration.frameRate)))
default:
break
}
}
self.internalStatus = .exporting

View File

@ -3,11 +3,13 @@ import AVFoundation
import Metal
import MetalKit
import ImageTransparency
import SwiftSignalKit
final class UniversalTextureSource: TextureSource {
enum Input {
case image(UIImage)
case video(AVPlayerItem)
case entity(MediaEditorComposerEntity)
fileprivate func createContext(renderTarget: RenderTarget, queue: DispatchQueue, additional: Bool) -> InputContext {
switch self {
@ -15,6 +17,8 @@ final class UniversalTextureSource: TextureSource {
return ImageInputContext(input: self, renderTarget: renderTarget, queue: queue)
case .video:
return VideoInputContext(input: self, renderTarget: renderTarget, queue: queue, additional: additional)
case .entity:
return EntityInputContext(input: self, renderTarget: renderTarget, queue: queue)
}
}
}
@ -76,9 +80,15 @@ final class UniversalTextureSource: TextureSource {
}
private var previousAdditionalOutput: MediaEditorRenderer.Input?
private var readyForMoreData = Atomic<Bool>(value: true)
private func update(forced: Bool) {
let time = CACurrentMediaTime()
var fps: Int = 60
if self.mainInputContext?.useAsyncOutput == true {
fps = 30
}
let needsDisplayLink = (self.mainInputContext?.needsDisplayLink ?? false) || (self.additionalInputContext?.needsDisplayLink ?? false)
if needsDisplayLink {
if self.displayLink == nil {
@ -87,7 +97,7 @@ final class UniversalTextureSource: TextureSource {
self.update(forced: self.forceUpdates)
}
}), selector: #selector(DisplayLinkTarget.handleDisplayLinkUpdate(sender:)))
displayLink.preferredFramesPerSecond = 60
displayLink.preferredFramesPerSecond = fps
displayLink.add(to: .main, forMode: .common)
self.displayLink = displayLink
}
@ -102,19 +112,33 @@ final class UniversalTextureSource: TextureSource {
return
}
let main = self.mainInputContext?.output(time: time)
var additional = self.additionalInputContext?.output(time: time)
if let additional {
self.previousAdditionalOutput = additional
} else if self.additionalInputContext != nil {
additional = self.previousAdditionalOutput
if let mainInputContext = self.mainInputContext, mainInputContext.useAsyncOutput {
guard self.readyForMoreData.with({ $0 }) else {
return
}
let _ = self.readyForMoreData.swap(false)
mainInputContext.asyncOutput(time: time, completion: { [weak self] main in
guard let self else {
return
}
if let main {
self.output?.consume(main: main, additional: nil, render: true)
}
let _ = self.readyForMoreData.swap(true)
})
} else {
let main = self.mainInputContext?.output(time: time)
var additional = self.additionalInputContext?.output(time: time)
if let additional {
self.previousAdditionalOutput = additional
} else if self.additionalInputContext != nil {
additional = self.previousAdditionalOutput
}
guard let main else {
return
}
self.output?.consume(main: main, additional: additional, render: true)
}
guard let main else {
return
}
self.output?.consume(main: main, additional: additional, render: true)
}
func connect(to consumer: MediaEditorRenderer) {
@ -138,18 +162,31 @@ final class UniversalTextureSource: TextureSource {
}
}
private protocol InputContext {
protocol InputContext {
typealias Input = UniversalTextureSource.Input
typealias Output = MediaEditorRenderer.Input
var input: Input { get }
var useAsyncOutput: Bool { get }
func output(time: Double) -> Output?
func asyncOutput(time: Double, completion: @escaping (Output?) -> Void)
var needsDisplayLink: Bool { get }
func invalidate()
}
extension InputContext {
var useAsyncOutput: Bool {
return false
}
func asyncOutput(time: Double, completion: @escaping (Output?) -> Void) {
completion(self.output(time: time))
}
}
private class ImageInputContext: InputContext {
fileprivate var input: Input
private var texture: MTLTexture?
@ -248,3 +285,59 @@ private class VideoInputContext: NSObject, InputContext, AVPlayerItemOutputPullD
return true
}
}
final class EntityInputContext: NSObject, InputContext, AVPlayerItemOutputPullDelegate {
internal var input: Input
private var textureRotation: TextureRotation = .rotate0Degrees
var entity: MediaEditorComposerEntity {
guard case let .entity(entity) = self.input else {
fatalError()
}
return entity
}
private let ciContext: CIContext
private let startTime: Double
init(input: Input, renderTarget: RenderTarget, queue: DispatchQueue) {
guard case .entity = input else {
fatalError()
}
self.input = input
self.ciContext = CIContext(options: [.workingColorSpace : CGColorSpaceCreateDeviceRGB()])
self.startTime = CACurrentMediaTime()
super.init()
self.textureRotation = .rotate0Degrees
}
func output(time: Double) -> Output? {
return nil
}
func asyncOutput(time: Double, completion: @escaping (Output?) -> Void) {
let deltaTime = max(0.0, time - self.startTime)
let timestamp = CMTime(seconds: deltaTime, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
self.entity.image(for: timestamp, frameRate: 30, context: self.ciContext, completion: { image in
Queue.mainQueue().async {
completion(image.flatMap { .ciImage($0, timestamp) })
}
})
}
func invalidate() {
}
var needsDisplayLink: Bool {
if let entity = self.entity as? MediaEditorComposerStickerEntity, entity.isAnimated {
return true
}
return false
}
var useAsyncOutput: Bool {
return true
}
}

View File

@ -2,6 +2,7 @@ import Foundation
import AVFoundation
import Metal
import MetalKit
import CoreImage
final class VideoInputPass: DefaultRenderPass {
private var cachedTexture: MTLTexture?
@ -84,3 +85,44 @@ final class VideoInputPass: DefaultRenderPass {
return self.cachedTexture
}
}
final class CIInputPass: RenderPass {
private var context: CIContext?
func setup(device: MTLDevice, library: MTLLibrary) {
self.context = CIContext(mtlDevice: device, options: [.workingColorSpace : CGColorSpaceCreateDeviceRGB()])
}
func process(input: MTLTexture, device: MTLDevice, commandBuffer: MTLCommandBuffer) -> MTLTexture? {
return nil
}
private var outputTexture: MTLTexture?
func processCIImage(_ ciImage: CIImage, device: MTLDevice, commandBuffer: MTLCommandBuffer) -> MTLTexture? {
if self.outputTexture == nil {
let textureDescriptor = MTLTextureDescriptor()
textureDescriptor.textureType = .type2D
textureDescriptor.width = Int(ciImage.extent.width)
textureDescriptor.height = Int(ciImage.extent.height)
textureDescriptor.pixelFormat = .bgra8Unorm
textureDescriptor.storageMode = .private
textureDescriptor.usage = [.shaderRead, .shaderWrite, .renderTarget]
guard let texture = device.makeTexture(descriptor: textureDescriptor) else {
return nil
}
self.outputTexture = texture
texture.label = "outlineOutputTexture"
}
guard let outputTexture = self.outputTexture, let context = self.context else {
return nil
}
let transformedImage = ciImage.transformed(by: CGAffineTransformMakeScale(1.0, -1.0).translatedBy(x: 0.0, y: -ciImage.extent.height))
let renderDestination = CIRenderDestination(mtlTexture: outputTexture, commandBuffer: commandBuffer)
_ = try? context.startTask(toRender: transformedImage, to: renderDestination)
return outputTexture
}
}

View File

@ -2936,6 +2936,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
mediaEntity.position = mediaEntity.position.offsetBy(dx: initialValues.cropOffset.x, dy: initialValues.cropOffset.y)
mediaEntity.rotation = mediaEntity.rotation + initialValues.cropRotation
mediaEntity.scale = mediaEntity.scale * initialValues.cropScale
} else if case .sticker = subject {
mediaEntity.scale = mediaEntity.scale * 0.97
}
}
@ -3087,13 +3089,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
self.readyValue.set(.single(true))
})
})
} else if case let .sticker(sticker, emoji) = effectiveSubject {
} else if case let .sticker(_, emoji) = effectiveSubject {
controller.stickerSelectedEmoji = emoji
let stickerEntity = DrawingStickerEntity(content: .file(.standalone(media: sticker), .sticker))
stickerEntity.referenceDrawingSize = storyDimensions
stickerEntity.scale = 4.0 * 0.97
stickerEntity.position = CGPoint(x: storyDimensions.width / 2.0, y: storyDimensions.height / 2.0)
self.entitiesView.add(stickerEntity, announce: false)
}
self.gradientColorsDisposable = mediaEditor.gradientColors.start(next: { [weak self] colors in
@ -6394,12 +6391,25 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
private func effectiveStickerEmoji() -> [String] {
let filtered = self.stickerSelectedEmoji.filter { !$0.isEmpty }
guard !filtered.isEmpty else {
for entity in self.node.entitiesView.entities {
if let stickerEntity = entity as? DrawingStickerEntity, case let .file(file, _) = stickerEntity.content {
for attribute in file.media.attributes {
if case let .Sticker(displayText, _, _) = attribute {
return [displayText]
}
}
break
}
}
return ["🫥"]
}
return filtered
}
private func preferredStickerDuration() -> Double {
if let duration = self.node.mediaEditor?.duration, duration > 0.0 {
return min(3.0, duration)
}
var duration: Double = 3.0
var stickerDurations: [Double] = []
self.node.entitiesView.eachView { entityView in
@ -6412,7 +6422,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
if !stickerDurations.isEmpty {
duration = stickerDurations.max() ?? 3.0
}
return duration
return min(3.0, duration)
}
private weak var stickerResultController: PeekController?
@ -6429,13 +6439,15 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
if mediaEditor.resultIsVideo {
isVideo = true
}
let imagesReady = ValuePromise<Bool>(false, ignoreRepeated: true)
Queue.concurrentDefaultQueue().async {
if !isVideo, let data = try? WebP.convert(toWebP: image, quality: 97.0) {
self.context.account.postbox.mediaBox.storeResourceData(isVideo ? thumbnailResource.id : resource.id, data: data)
self.context.account.postbox.mediaBox.storeResourceData(isVideo ? thumbnailResource.id : resource.id, data: data, synchronous: true)
}
if let thumbnailImage = generateScaledImage(image: image, size: CGSize(width: 320.0, height: 320.0), opaque: false, scale: 1.0), let data = try? WebP.convert(toWebP: thumbnailImage, quality: 90.0) {
self.context.account.postbox.mediaBox.storeResourceData(thumbnailResource.id, data: data)
self.context.account.postbox.mediaBox.storeResourceData(thumbnailResource.id, data: data, synchronous: true)
}
imagesReady.set(true)
}
var file = stickerFile(resource: resource, thumbnailResource: thumbnailResource, size: Int64(0), dimensions: PixelDimensions(image.size), duration: self.preferredStickerDuration(), isVideo: isVideo)
@ -6448,26 +6460,34 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
return
}
if isVideo {
self.uploadSticker(file, action: .send)
} else {
self.stickerResultController?.disappeared = nil
self.completion(MediaEditorScreen.Result(
media: .sticker(file: file, emoji: self.effectiveStickerEmoji()),
mediaAreas: [],
caption: NSAttributedString(),
options: MediaEditorResultPrivacy(sendAsPeerId: nil, privacy: EngineStoryPrivacy(base: .everyone, additionallyIncludePeers: []), timeout: 0, isForwardingDisabled: false, pin: false),
stickers: [],
randomId: 0
), { [weak self] finished in
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
self?.dismiss()
Queue.mainQueue().justDispatch {
finished()
}
let _ = (imagesReady.get()
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] _ in
guard let self else {
return
}
if isVideo {
self.uploadSticker(file, action: .send)
} else {
self.stickerResultController?.disappeared = nil
self.completion(MediaEditorScreen.Result(
media: .sticker(file: file, emoji: self.effectiveStickerEmoji()),
mediaAreas: [],
caption: NSAttributedString(),
options: MediaEditorResultPrivacy(sendAsPeerId: nil, privacy: EngineStoryPrivacy(base: .everyone, additionallyIncludePeers: []), timeout: 0, isForwardingDisabled: false, pin: false),
stickers: [],
randomId: 0
), { [weak self] finished in
self?.node.animateOut(finished: true, saveDraft: false, completion: { [weak self] in
self?.dismiss()
Queue.mainQueue().justDispatch {
finished()
}
})
})
})
}
}
})
f(.default)
})))
@ -6476,7 +6496,15 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
guard let self else {
return
}
self.uploadSticker(file, action: .addToFavorites)
let _ = (imagesReady.get()
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] _ in
guard let self else {
return
}
self.uploadSticker(file, action: .addToFavorites)
})
})))
menuItems.append(.action(ContextMenuActionItem(text: "Add to Sticker Set", icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/AddSticker"), color: theme.contextMenu.primaryColor) }, action: { [weak self] c, f in
guard let self else {
@ -6518,7 +6546,15 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
self.present(controller, in: .window(.root))
return false
} else {
self.uploadSticker(file, action: .addToStickerPack(pack: .id(id: pack.id.id, accessHash: pack.accessHash), title: pack.title))
let _ = (imagesReady.get()
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] _ in
guard let self else {
return
}
self.uploadSticker(file, action: .addToStickerPack(pack: .id(id: pack.id.id, accessHash: pack.accessHash), title: pack.title))
})
return true
}
}), false))
@ -6554,8 +6590,15 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
file = sticker
action = .update
}
self.uploadSticker(file, action: action)
let _ = (imagesReady.get()
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] _ in
guard let self else {
return
}
self.uploadSticker(file, action: action)
})
})))
case .addingToPack:
menuItems.append(.action(ContextMenuActionItem(text: "Add to Sticker Set", icon: { theme in generateTintedImage(image: UIImage(bundleImageName: "Chat/Context Menu/AddSticker"), color: theme.contextMenu.primaryColor) }, action: { [weak self] c, f in
@ -6563,7 +6606,16 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
return
}
f(.default)
self.uploadSticker(file, action: .upload)
let _ = (imagesReady.get()
|> filter { $0 }
|> take(1)
|> deliverOnMainQueue).start(next: { [weak self] _ in
guard let self else {
return
}
self.uploadSticker(file, action: .upload)
})
})))
}
}
@ -7028,11 +7080,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
return image.flatMap({ .single(.image(image: $0)) }) ?? .complete()
}
}
case .sticker:
let image = generateImage(CGSize(width: 1080, height: 1920), contextGenerator: { size, context in
context.clear(CGRect(origin: .zero, size: size))
}, opaque: false, scale: 1.0)!
exportSubject = .single(.image(image: image))
case let .sticker(file, _):
exportSubject = .single(.sticker(file: file))
}
let _ = exportSubject.start(next: { [weak self] exportSubject in
@ -7046,7 +7095,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
}
if isSticker {
duration = self.preferredStickerDuration()
values = values.withUpdatedMaskDrawing(maskDrawing: self.node.stickerMaskDrawingView?.drawingImage)
if case .sticker = subject {
} else {
values = values.withUpdatedMaskDrawing(maskDrawing: self.node.stickerMaskDrawingView?.drawingImage)
}
}
let configuration = recommendedVideoExportConfiguration(values: values, duration: duration, forceFullHd: true, frameRate: 60.0, isSticker: isSticker)
let outputPath = NSTemporaryDirectory() + "\(Int64.random(in: 0 ..< .max)).\(fileExtension)"

View File

@ -89,7 +89,7 @@ final class StickerCutoutOutlineView: UIView {
lineEmitterCell.contents = UIImage(named: "Media Editor/ParticleDot")?.cgImage
lineEmitterCell.lifetime = 2.2
lineEmitterCell.birthRate = 1700
lineEmitterCell.scale = 0.18
lineEmitterCell.scale = 0.185
lineEmitterCell.alphaSpeed = -0.4
self.outlineLayer.emitterCells = [lineEmitterCell]
@ -157,16 +157,13 @@ final class StickerCutoutOutlineView: UIView {
}
private func getPathFromMaskImage(_ image: CIImage, size: CGSize, values: MediaEditorValues) -> BezierPath? {
// let edges = image.applyingFilter("CILineOverlay", parameters: ["inputEdgeIntensity": 0.1])
guard let pixelBuffer = getEdgesBitmap(image) else {
let extendedImage = image.applyingFilter("CIMorphologyMaximum", parameters: ["inputRadius": 3.0])
guard let pixelBuffer = getEdgesBitmap(extendedImage) else {
return nil
}
let minSide = min(size.width, size.height)
let scaledImageSize = image.extent.size.aspectFilled(CGSize(width: minSide, height: minSide))
let contourImageSize = image.extent.size.aspectFilled(CGSize(width: 256.0, height: 256.0))
// var contour = findContours(pixelBuffer: pixelBuffer)
var contour = findEdgePoints(in: pixelBuffer)
guard !contour.isEmpty else {
@ -285,97 +282,6 @@ outerLoop: for y in 0..<height {
return Array(edgePath.map { $0.cgPoint })
}
private func findContours(pixelBuffer: CVPixelBuffer) -> [CGPoint] {
struct Point: Hashable {
let x: Int
let y: Int
var cgPoint: CGPoint {
return CGPoint(x: x, y: y)
}
}
var contours = [[Point]]()
CVPixelBufferLockBaseAddress(pixelBuffer, [])
defer { CVPixelBufferUnlockBaseAddress(pixelBuffer, []) }
let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer)
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
var visited: [Point: Bool] = [:]
func markVisited(_ point: Point) {
visited[point] = true
}
func getPixelIntensity(_ point: Point) -> UInt8 {
let pixelOffset = point.y * bytesPerRow + point.x
let pixelPtr = baseAddress?.advanced(by: pixelOffset)
return pixelPtr?.load(as: UInt8.self) ?? 0
}
func isBlackPixel(_ point: Point) -> Bool {
if point.x >= 0 && point.x < width && point.y >= 0 && point.y < height {
let value = getPixelIntensity(point)
return value < 225
} else {
return false
}
}
func traceContour(startPoint: Point) -> [Point] {
var contour = [startPoint]
var currentPoint = startPoint
var previousDirection = 7
let dx = [1, 1, 0, -1, -1, -1, 0, 1]
let dy = [0, 1, 1, 1, 0, -1, -1, -1]
repeat {
var found = false
for i in 0 ..< 8 {
let direction = (previousDirection + i) % 8
let newX = currentPoint.x + dx[direction]
let newY = currentPoint.y + dy[direction]
let newPoint = Point(x: newX, y: newY)
if isBlackPixel(newPoint) && !(visited[newPoint] == true) {
contour.append(newPoint)
previousDirection = (direction + 5) % 8
currentPoint = newPoint
found = true
markVisited(newPoint)
break
}
}
if !found {
break
}
} while currentPoint != startPoint
return contour
}
for y in 0 ..< height {
for x in 0 ..< width {
let point = Point(x: x, y: y)
if visited[point] == true {
continue
}
if isBlackPixel(point) {
let contour = traceContour(startPoint: point)
if contour.count > 25 {
contours.append(contour)
}
}
}
}
return (contours.sorted(by: { lhs, rhs in lhs.count > rhs.count }).first ?? []).map { $0.cgPoint }
}
private func getEdgesBitmap(_ ciImage: CIImage) -> CVPixelBuffer? {
let context = CIContext(options: nil)
guard let contourCgImage = context.createCGImage(ciImage, from: ciImage.extent) else {

View File

@ -35,7 +35,7 @@ public func transformOutgoingMessageMedia(postbox: Postbox, network: Network, me
return result
|> mapToSignal { data -> Signal<AnyMediaReference?, NoError> in
if data.complete {
if file.mimeType.hasPrefix("image/") {
if file.mimeType.hasPrefix("image/") && !file.mimeType.hasSuffix("/webp") {
return Signal { subscriber in
if let fullSizeData = try? Data(contentsOf: URL(fileURLWithPath: data.path)) {
let options = NSMutableDictionary()
@ -88,7 +88,7 @@ public func transformOutgoingMessageMedia(postbox: Postbox, network: Network, me
return EmptyDisposable
} |> runOn(opportunistic ? Queue.mainQueue() : Queue.concurrentDefaultQueue())
} else if file.mimeType.hasPrefix("video/") {
} else if file.mimeType.hasPrefix("video/") && !file.mimeType.hasSuffix("/webm") {
return Signal { subscriber in
if let scaledImage = generateVideoFirstFrame(data.path, maxDimensions: CGSize(width: 320.0, height: 320.0)), let thumbnailData = scaledImage.jpegData(compressionQuality: 0.6) {
let thumbnailResource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max))