Fix sending 10-bit heif photos from third-party cameras

This commit is contained in:
Ilya Laktyushin 2024-04-15 15:03:37 +04:00
parent deeecf61f9
commit 5f047c9eb2
6 changed files with 77 additions and 39 deletions

View File

@ -5,7 +5,7 @@ import Display
private func generateHistogram(cgImage: CGImage) -> ([[vImagePixelCount]], Int)? {
var sourceBuffer = vImage_Buffer()
defer {
free(sourceBuffer.data)
sourceBuffer.data?.deallocate()
}
var cgImageFormat = vImage_CGImageFormat(

View File

@ -5,6 +5,7 @@ import Postbox
import SwiftSignalKit
import ImageCompression
import Accelerate.vImage
import CoreImage
private final class RequestId {
var id: PHImageRequestID?
@ -15,24 +16,43 @@ private func resizedImage(_ image: UIImage, for size: CGSize) -> UIImage? {
guard let cgImage = image.cgImage else {
return nil
}
if #available(iOS 14.1, *) {
if cgImage.bitsPerComponent == 10, let ciImage = CIImage(image: image, options: [.applyOrientationProperty: true, .toneMapHDRtoSDR: true]) {
let scaleX = size.width / ciImage.extent.width
let filter = CIFilter(name: "CILanczosScaleTransform")!
filter.setValue(ciImage, forKey: kCIInputImageKey)
filter.setValue(scaleX, forKey: kCIInputScaleKey)
filter.setValue(1.0, forKey: kCIInputAspectRatioKey)
guard let outputImage = filter.outputImage else { return nil }
let ciContext = CIContext()
guard let cgImage = ciContext.createCGImage(outputImage, from: outputImage.extent) else { return nil }
return UIImage(cgImage: cgImage)
}
}
var format = vImage_CGImageFormat(bitsPerComponent: 8,
bitsPerPixel: 32,
colorSpace: nil,
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.first.rawValue),
version: 0,
decode: nil,
renderingIntent: .defaultIntent)
renderingIntent: cgImage.renderingIntent)
var error: vImage_Error
var sourceBuffer = vImage_Buffer()
defer { sourceBuffer.data.deallocate() }
defer { sourceBuffer.data?.deallocate() }
error = vImageBuffer_InitWithCGImage(&sourceBuffer,
&format,
nil,
cgImage,
vImage_Flags(kvImageNoFlags))
guard error == kvImageNoError else { return nil }
guard error == kvImageNoError else {
return nil
}
var destinationBuffer = vImage_Buffer()
error = vImageBuffer_Init(&destinationBuffer,
@ -84,7 +104,7 @@ extension UIImage.Orientation {
private let fetchPhotoWorkers = ThreadPool(threadCount: 3, threadPriority: 0.2)
public func fetchPhotoLibraryResource(localIdentifier: String, width: Int32?, height: Int32?, format: MediaImageFormat?, quality: Int32?) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> {
public func fetchPhotoLibraryResource(localIdentifier: String, width: Int32?, height: Int32?, format: MediaImageFormat?, quality: Int32?, useExif: Bool) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> {
return Signal { subscriber in
let queue = ThreadPoolQueue(threadPool: fetchPhotoWorkers)
@ -96,7 +116,7 @@ public func fetchPhotoLibraryResource(localIdentifier: String, width: Int32?, he
option.deliveryMode = .highQualityFormat
option.isNetworkAccessAllowed = true
option.isSynchronous = false
let size: CGSize
if let width, let height {
size = CGSize(width: CGFloat(width), height: CGFloat(height))
@ -104,11 +124,31 @@ public func fetchPhotoLibraryResource(localIdentifier: String, width: Int32?, he
size = CGSize(width: 1280.0, height: 1280.0)
}
var targetSize = PHImageManagerMaximumSize
//TODO: figure out how to manually read and resize some weird 10-bit heif photos from third-party cameras
if useExif, min(asset.pixelWidth, asset.pixelHeight) > 3800 {
func encodeText(string: String, key: Int16) -> String {
let nsString = string as NSString
let result = NSMutableString()
for i in 0 ..< nsString.length {
var c: unichar = nsString.character(at: i)
c = unichar(Int16(c) + key)
result.append(NSString(characters: &c, length: 1) as String)
}
return result as String
}
if let values = asset.value(forKeyPath: encodeText(string: "jnbhfQspqfsujft", key: -1)) as? [String: Any] {
if let depth = values["Depth"] as? Int, depth == 10 {
targetSize = size
}
}
}
queue.addTask(ThreadPoolTask({ _ in
let startTime = CACurrentMediaTime()
let semaphore = DispatchSemaphore(value: 0)
let requestIdValue = PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .aspectFit, options: option, resultHandler: { (image, info) -> Void in
let requestIdValue = PHImageManager.default().requestImage(for: asset, targetSize: targetSize, contentMode: .aspectFit, options: option, resultHandler: { (image, info) -> Void in
Queue.concurrentDefaultQueue().async {
requestId.with { current -> Void in
if !current.invalidated {

View File

@ -35,15 +35,11 @@ public enum PreparedShareItems {
}
private func scalePhotoImage(_ image: UIImage, dimensions: CGSize) -> UIImage? {
if #available(iOSApplicationExtension 10.0, iOS 10.0, *) {
let format = UIGraphicsImageRendererFormat()
format.scale = 1.0
let renderer = UIGraphicsImageRenderer(size: dimensions, format: format)
return renderer.image { _ in
image.draw(in: CGRect(origin: .zero, size: dimensions))
}
} else {
return TGScaleImageToPixelSize(image, dimensions)
let format = UIGraphicsImageRendererFormat()
format.scale = 1.0
let renderer = UIGraphicsImageRenderer(size: dimensions, format: format)
return renderer.image { _ in
image.draw(in: CGRect(origin: .zero, size: dimensions))
}
}
@ -234,7 +230,7 @@ private func preparedShareItem(postbox: Postbox, network: Network, to peerId: Pe
}
)
} else {
let scaledImage = TGScaleImageToPixelSize(image, CGSize(width: image.size.width * image.scale, height: image.size.height * image.scale).fitted(CGSize(width: 1280.0, height: 1280.0)))!
let scaledImage = scalePhotoImage(image, dimensions: CGSize(width: image.size.width * image.scale, height: image.size.height * image.scale).fitted(CGSize(width: 1280.0, height: 1280.0)))!
let imageData = scaledImage.jpegData(compressionQuality: 0.54)!
return .single(.preparing(false))
|> then(

View File

@ -757,15 +757,11 @@ final class StoryItemSetContainerSendMessage {
let size = image.size.aspectFitted(CGSize(width: 512.0, height: 512.0))
func scaleImage(_ image: UIImage, size: CGSize, boundiingSize: CGSize) -> UIImage? {
if #available(iOSApplicationExtension 10.0, iOS 10.0, *) {
let format = UIGraphicsImageRendererFormat()
format.scale = 1.0
let renderer = UIGraphicsImageRenderer(size: size, format: format)
return renderer.image { _ in
image.draw(in: CGRect(origin: .zero, size: size))
}
} else {
return TGScaleImageToPixelSize(image, size)
let format = UIGraphicsImageRendererFormat()
format.scale = 1.0
let renderer = UIGraphicsImageRenderer(size: size, format: format)
return renderer.image { _ in
image.draw(in: CGRect(origin: .zero, size: size))
}
}

View File

@ -41,12 +41,23 @@ public func makeTelegramAccountAuxiliaryMethods(uploadInBackground: ((Postbox, M
}
|> castError(MediaResourceDataFetchError.self)
|> mapToSignal { useModernPipeline -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> in
fetchLocalFileVideoMediaResource(postbox: postbox, resource: resource, alwaysUseModernPipeline: useModernPipeline)
return fetchLocalFileVideoMediaResource(postbox: postbox, resource: resource, alwaysUseModernPipeline: useModernPipeline)
}
} else if let resource = resource as? LocalFileGifMediaResource {
return fetchLocalFileGifMediaResource(resource: resource)
} else if let photoLibraryResource = resource as? PhotoLibraryMediaResource {
return fetchPhotoLibraryResource(localIdentifier: photoLibraryResource.localIdentifier, width: photoLibraryResource.width, height: photoLibraryResource.height, format: photoLibraryResource.format, quality: photoLibraryResource.quality)
return postbox.transaction { transaction -> Bool in
var useExif = true
let appConfig = currentAppConfiguration(transaction: transaction)
if let data = appConfig.data, let _ = data["ios_killswitch_disable_use_photo_exif"] {
useExif = false
}
return useExif
}
|> castError(MediaResourceDataFetchError.self)
|> mapToSignal { useExif -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> in
return fetchPhotoLibraryResource(localIdentifier: photoLibraryResource.localIdentifier, width: photoLibraryResource.width, height: photoLibraryResource.height, format: photoLibraryResource.format, quality: photoLibraryResource.quality, useExif: useExif)
}
} else if let resource = resource as? ICloudFileResource {
return fetchICloudFileResource(resource: resource)
} else if let resource = resource as? SecureIdLocalImageResource {

View File

@ -1,19 +1,14 @@
import UIKit
import SwiftSignalKit
import LegacyComponents
import Display
import WebPBinding
private func scaleImage(_ image: UIImage, size: CGSize, boundiingSize: CGSize) -> UIImage? {
if #available(iOSApplicationExtension 10.0, iOS 10.0, *) {
let format = UIGraphicsImageRendererFormat()
format.scale = 1.0
let renderer = UIGraphicsImageRenderer(size: size, format: format)
return renderer.image { _ in
image.draw(in: CGRect(origin: .zero, size: size))
}
} else {
return TGScaleImageToPixelSize(image, size)
let format = UIGraphicsImageRendererFormat()
format.scale = 1.0
let renderer = UIGraphicsImageRenderer(size: size, format: format)
return renderer.image { _ in
image.draw(in: CGRect(origin: .zero, size: size))
}
}