More photo upload fixes

This commit is contained in:
Ilya Laktyushin
2023-04-24 00:07:11 +04:00
parent 2967795424
commit 060bc61011
3 changed files with 212 additions and 228 deletions

View File

@@ -476,9 +476,19 @@ public class CheckLayer: CALayer {
context.strokePath() context.strokePath()
case let .counter(number): case let .counter(number):
let text = NSAttributedString(string: "\(number)", font: Font.with(size: 16.0, design: .round, weight: .regular, traits: []), textColor: parameters.theme.strokeColor.withMultipliedAlpha(parameters.animationProgress)) let fontSize: CGFloat
let string = "\(number)"
switch string.count {
case 1:
fontSize = 16.0
case 2:
fontSize = 15.0
default:
fontSize = 13.0
}
let text = NSAttributedString(string: string, font: Font.with(size: fontSize, design: .round, weight: .medium, traits: []), textColor: parameters.theme.strokeColor.withMultipliedAlpha(parameters.animationProgress))
let textRect = text.boundingRect(with: CGSize(width: 100.0, height: 100.0), options: [.usesLineFragmentOrigin], context: nil) let textRect = text.boundingRect(with: CGSize(width: 100.0, height: 100.0), options: [.usesLineFragmentOrigin], context: nil)
text.draw(at: CGPoint(x: UIScreenPixel + textRect.minX + floor((size.width - textRect.width) * 0.5), y: textRect.minY + floorToScreenPixels((size.height - textRect.height) * 0.5))) text.draw(at: CGPoint(x: textRect.minX + floorToScreenPixels((size.width - textRect.width) * 0.5), y: textRect.minY + floorToScreenPixels((size.height - textRect.height) * 0.5)))
} }
} }
} }

View File

@@ -83,29 +83,25 @@ extension UIImage.Orientation {
} }
private let fetchPhotoWorkers = ThreadPool(threadCount: 3, threadPriority: 0.2) private let fetchPhotoWorkers = ThreadPool(threadCount: 3, threadPriority: 0.2)
private let fetchPhotoQueue = ThreadPoolQueue(threadPool: fetchPhotoWorkers)
public func fetchPhotoLibraryResource(localIdentifier: String) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> { public func fetchPhotoLibraryResource(localIdentifier: String) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> {
return Signal { subscriber in return Signal { subscriber in
let queue = ThreadPoolQueue(threadPool: fetchPhotoWorkers)
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil) let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil)
let requestId = Atomic<RequestId>(value: RequestId()) let requestId = Atomic<RequestId>(value: RequestId())
if fetchResult.count != 0 { if fetchResult.count != 0 {
let asset = fetchResult.object(at: 0) let asset = fetchResult.object(at: 0)
let option = PHImageRequestOptions() let option = PHImageRequestOptions()
option.deliveryMode = .opportunistic option.deliveryMode = .highQualityFormat
option.isNetworkAccessAllowed = true option.isNetworkAccessAllowed = true
option.isSynchronous = false option.isSynchronous = false
let madeProgress = Atomic<Bool>(value: false)
option.progressHandler = { progress, error, _, _ in
if !madeProgress.swap(true) {
//subscriber.putNext(.reset)
}
}
let size = CGSize(width: 1280.0, height: 1280.0) let size = CGSize(width: 1280.0, height: 1280.0)
let startTime = CACurrentMediaTime() queue.addTask(ThreadPoolTask({ _ in
let startTime = CACurrentMediaTime()
fetchPhotoQueue.addTask(ThreadPoolTask({ _ in
let semaphore = DispatchSemaphore(value: 0) let semaphore = DispatchSemaphore(value: 0)
let requestIdValue = PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .aspectFit, options: option, resultHandler: { (image, info) -> Void in let requestIdValue = PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .aspectFit, options: option, resultHandler: { (image, info) -> Void in
Queue.concurrentDefaultQueue().async { Queue.concurrentDefaultQueue().async {
@@ -117,16 +113,12 @@ public func fetchPhotoLibraryResource(localIdentifier: String) -> Signal<MediaRe
} }
if let image = image { if let image = image {
if let info = info, let degraded = info[PHImageResultIsDegradedKey], (degraded as AnyObject).boolValue!{ if let info = info, let degraded = info[PHImageResultIsDegradedKey], (degraded as AnyObject).boolValue!{
if !madeProgress.swap(true) {
//subscriber.putNext(.reset)
}
} else { } else {
#if DEBUG #if DEBUG
print("load completion \((CACurrentMediaTime() - startTime) * 1000.0) ms") print("load completion \((CACurrentMediaTime() - startTime) * 1000.0) ms")
#endif #endif
_ = madeProgress.swap(true)
let scale = min(1.0, min(size.width / max(1.0, image.size.width), size.height / max(1.0, image.size.height))) let scale = min(1.0, min(size.width / max(1.0, image.size.width), size.height / max(1.0, image.size.height)))
let scaledSize = CGSize(width: floor(image.size.width * scale), height: floor(image.size.height * scale)) let scaledSize = CGSize(width: floor(image.size.width * scale), height: floor(image.size.height * scale))
let scaledImage = resizedImage(image, for: scaledSize) let scaledImage = resizedImage(image, for: scaledSize)
@@ -144,13 +136,11 @@ public func fetchPhotoLibraryResource(localIdentifier: String) -> Signal<MediaRe
} else { } else {
subscriber.putCompletion() subscriber.putCompletion()
} }
semaphore.signal()
} }
} else { } else {
if !madeProgress.swap(true) { semaphore.signal()
//subscriber.putNext(.reset)
}
} }
semaphore.signal()
} }
}) })
requestId.with { current -> Void in requestId.with { current -> Void in

View File

@@ -207,198 +207,65 @@ private final class FetchVideoLibraryMediaResourceContext {
private let throttlingContext = FetchVideoLibraryMediaResourceContext() private let throttlingContext = FetchVideoLibraryMediaResourceContext()
public func fetchVideoLibraryMediaResource(account: Account, resource: VideoLibraryMediaResource) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> { public func fetchVideoLibraryMediaResource(account: Account, resource: VideoLibraryMediaResource) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> {
return account.postbox.preferencesView(keys: [PreferencesKeys.appConfiguration]) let signal = Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> { subscriber in
|> take(1) subscriber.putNext(.reset)
|> map { view in let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [resource.localIdentifier], options: nil)
return view.values[PreferencesKeys.appConfiguration]?.get(AppConfiguration.self) ?? .defaultValue var requestId: PHImageRequestID?
} let disposable = MetaDisposable()
|> castError(MediaResourceDataFetchError.self) if fetchResult.count != 0 {
|> mapToSignal { appConfiguration -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> in let asset = fetchResult.object(at: 0)
let signal = Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> { subscriber in let option = PHVideoRequestOptions()
subscriber.putNext(.reset) option.isNetworkAccessAllowed = true
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [resource.localIdentifier], options: nil) option.deliveryMode = .highQualityFormat
var requestId: PHImageRequestID?
let disposable = MetaDisposable() let alreadyReceivedAsset = Atomic<Bool>(value: false)
if fetchResult.count != 0 { requestId = PHImageManager.default().requestAVAsset(forVideo: asset, options: option, resultHandler: { avAsset, _, _ in
let asset = fetchResult.object(at: 0) if avAsset == nil {
let option = PHVideoRequestOptions() return
option.isNetworkAccessAllowed = true }
option.deliveryMode = .highQualityFormat
let alreadyReceivedAsset = Atomic<Bool>(value: false) if alreadyReceivedAsset.swap(true) {
requestId = PHImageManager.default().requestAVAsset(forVideo: asset, options: option, resultHandler: { avAsset, _, _ in return
if avAsset == nil { }
return
} var adjustments: TGVideoEditAdjustments?
switch resource.conversion {
if alreadyReceivedAsset.swap(true) { case .passthrough:
return if let asset = avAsset as? AVURLAsset {
}
var adjustments: TGVideoEditAdjustments?
switch resource.conversion {
case .passthrough:
if let asset = avAsset as? AVURLAsset {
var value = stat()
if stat(asset.url.path, &value) == 0 {
subscriber.putNext(.copyLocalItem(AVURLAssetCopyItem(url: asset.url)))
subscriber.putCompletion()
} else {
subscriber.putError(.generic)
}
return
} else {
adjustments = nil
}
case let .compress(adjustmentsValue):
if let adjustmentsValue = adjustmentsValue {
if let dict = NSKeyedUnarchiver.unarchiveObject(with: adjustmentsValue.data.makeData()) as? [AnyHashable : Any] {
adjustments = TGVideoEditAdjustments(dictionary: dict)
}
}
}
let updatedSize = Atomic<Int64>(value: 0)
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: account, adjustments: adjustments)
} else {
return nil
}
}
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in
var value = stat()
if stat(path, &value) == 0 {
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
var range: Range<Int64>?
let _ = updatedSize.modify { updatedSize in
range = updatedSize ..< value.st_size
return value.st_size
}
//print("size = \(Int(value.st_size)), range: \(range!)")
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
}
}
}), entityRenderer: entityRenderer)!
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
var value = stat() var value = stat()
if stat(result.fileURL.path, &value) == 0 { if stat(asset.url.path, &value) == 0 {
if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) { subscriber.putNext(.copyLocalItem(AVURLAssetCopyItem(url: asset.url)))
var range: Range<Int64>? subscriber.putCompletion()
let _ = updatedSize.modify { updatedSize in
range = updatedSize ..< value.st_size
return value.st_size
}
//print("finish size = \(Int(value.st_size)), range: \(range!)")
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024))
subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true))
}
} else { } else {
subscriber.putError(.generic) subscriber.putError(.generic)
} }
subscriber.putCompletion() return
EngineTempBox.shared.dispose(tempFile)
}
}, error: { _ in
subscriber.putError(.generic)
}, completed: nil)
disposable.set(ActionDisposable {
signalDisposable?.dispose()
})
})
}
return ActionDisposable {
if let requestId = requestId {
PHImageManager.default().cancelImageRequest(requestId)
}
disposable.dispose()
}
}
return throttlingContext.wrap(priority: .default, signal: signal)
}
}
func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideoMediaResource) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> {
return account.postbox.preferencesView(keys: [PreferencesKeys.appConfiguration])
|> take(1)
|> map { view in
return view.values[PreferencesKeys.appConfiguration]?.get(AppConfiguration.self) ?? .defaultValue
}
|> castError(MediaResourceDataFetchError.self)
|> mapToSignal { appConfiguration -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> in
let signal = Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> { subscriber in
subscriber.putNext(.reset)
var filteredPath = resource.path
if filteredPath.hasPrefix("file://") {
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
}
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
var adjustments: TGVideoEditAdjustments?
if let videoAdjustments = resource.adjustments {
if let dict = NSKeyedUnarchiver.unarchiveObject(with: videoAdjustments.data.makeData()) as? [AnyHashable : Any] {
adjustments = TGVideoEditAdjustments(dictionary: dict)
}
}
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let updatedSize = Atomic<Int64>(value: 0)
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: account, adjustments: adjustments)
} else {
return nil
}
}
let signal: SSignal
if filteredPath.contains(".jpg"), let entityRenderer = entityRenderer {
if let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) {
let durationSignal: SSignal = SSignal(generator: { subscriber in
let disposable = (entityRenderer.duration()).start(next: { duration in
subscriber.putNext(duration)
subscriber.putCompletion()
})
return SBlockDisposable(block: {
disposable.dispose()
})
})
signal = durationSignal.map(toSignal: { duration -> SSignal in
if let duration = duration as? Double {
return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in
var value = stat()
if stat(path, &value) == 0 {
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
var range: Range<Int64>?
let _ = updatedSize.modify { updatedSize in
range = updatedSize ..< value.st_size
return value.st_size
}
//print("size = \(Int(value.st_size)), range: \(range!)")
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
}
}
}), entityRenderer: entityRenderer)!
} else { } else {
return SSignal.single(nil) adjustments = nil
}
case let .compress(adjustmentsValue):
if let adjustmentsValue = adjustmentsValue {
if let dict = NSKeyedUnarchiver.unarchiveObject(with: adjustmentsValue.data.makeData()) as? [AnyHashable : Any] {
adjustments = TGVideoEditAdjustments(dictionary: dict)
}
} }
})
} else {
signal = SSignal.single(nil)
} }
} else { let updatedSize = Atomic<Int64>(value: 0)
signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: account, adjustments: adjustments)
} else {
return nil
}
}
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in
var value = stat() var value = stat()
if stat(path, &value) == 0 { if stat(path, &value) == 0 {
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
var range: Range<Int64>? var range: Range<Int64>?
let _ = updatedSize.modify { updatedSize in let _ = updatedSize.modify { updatedSize in
range = updatedSize ..< Int64(value.st_size) range = updatedSize ..< value.st_size
return value.st_size return value.st_size
} }
//print("size = \(Int(value.st_size)), range: \(range!)") //print("size = \(Int(value.st_size)), range: \(range!)")
@@ -406,12 +273,130 @@ func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideo
} }
} }
}), entityRenderer: entityRenderer)! }), entityRenderer: entityRenderer)!
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
var value = stat()
if stat(result.fileURL.path, &value) == 0 {
if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) {
var range: Range<Int64>?
let _ = updatedSize.modify { updatedSize in
range = updatedSize ..< value.st_size
return value.st_size
}
//print("finish size = \(Int(value.st_size)), range: \(range!)")
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024))
subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true))
}
} else {
subscriber.putError(.generic)
}
subscriber.putCompletion()
EngineTempBox.shared.dispose(tempFile)
}
}, error: { _ in
subscriber.putError(.generic)
}, completed: nil)
disposable.set(ActionDisposable {
signalDisposable?.dispose()
})
})
}
return ActionDisposable {
if let requestId = requestId {
PHImageManager.default().cancelImageRequest(requestId)
} }
disposable.dispose()
}
}
return throttlingContext.wrap(priority: .default, signal: signal)
}
func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideoMediaResource) -> Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> {
let signal = Signal<MediaResourceDataFetchResult, MediaResourceDataFetchError> { subscriber in
subscriber.putNext(.reset)
var filteredPath = resource.path
if filteredPath.hasPrefix("file://") {
filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)])
}
let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath))
var adjustments: TGVideoEditAdjustments?
if let videoAdjustments = resource.adjustments {
if let dict = NSKeyedUnarchiver.unarchiveObject(with: videoAdjustments.data.makeData()) as? [AnyHashable : Any] {
adjustments = TGVideoEditAdjustments(dictionary: dict)
}
}
let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4")
let updatedSize = Atomic<Int64>(value: 0)
let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in
if let paintingData = adjustments.paintingData, paintingData.hasAnimation {
return LegacyPaintEntityRenderer(account: account, adjustments: adjustments)
} else {
return nil
}
}
let signal: SSignal
if filteredPath.contains(".jpg"), let entityRenderer = entityRenderer {
if let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) {
let durationSignal: SSignal = SSignal(generator: { subscriber in
let disposable = (entityRenderer.duration()).start(next: { duration in
subscriber.putNext(duration)
subscriber.putCompletion()
})
return SBlockDisposable(block: {
disposable.dispose()
})
})
let signalDisposable = signal.start(next: { next in signal = durationSignal.map(toSignal: { duration -> SSignal in
if let result = next as? TGMediaVideoConversionResult { if let duration = duration as? Double {
var value = stat() return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in
if stat(result.fileURL.path, &value) == 0 { var value = stat()
if stat(path, &value) == 0 {
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
var range: Range<Int64>?
let _ = updatedSize.modify { updatedSize in
range = updatedSize ..< value.st_size
return value.st_size
}
//print("size = \(Int(value.st_size)), range: \(range!)")
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
}
}
}), entityRenderer: entityRenderer)!
} else {
return SSignal.single(nil)
}
})
} else {
signal = SSignal.single(nil)
}
} else {
signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in
var value = stat()
if stat(path, &value) == 0 {
if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) {
var range: Range<Int64>?
let _ = updatedSize.modify { updatedSize in
range = updatedSize ..< Int64(value.st_size)
return value.st_size
}
//print("size = \(Int(value.st_size)), range: \(range!)")
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
}
}
}), entityRenderer: entityRenderer)!
}
let signalDisposable = signal.start(next: { next in
if let result = next as? TGMediaVideoConversionResult {
var value = stat()
if stat(result.fileURL.path, &value) == 0 {
// if config.remuxToFMp4 { // if config.remuxToFMp4 {
// let tempFile = TempBox.shared.tempFile(fileName: "video.mp4") // let tempFile = TempBox.shared.tempFile(fileName: "video.mp4")
// if FFMpegRemuxer.remux(result.fileURL.path, to: tempFile.path) { // if FFMpegRemuxer.remux(result.fileURL.path, to: tempFile.path) {
@@ -424,35 +409,34 @@ func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideo
// } else { // } else {
// subscriber.putNext(.moveLocalFile(path: result.fileURL.path)) // subscriber.putNext(.moveLocalFile(path: result.fileURL.path))
// } // }
if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) { if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) {
var range: Range<Int64>? var range: Range<Int64>?
let _ = updatedSize.modify { updatedSize in let _ = updatedSize.modify { updatedSize in
range = updatedSize ..< value.st_size range = updatedSize ..< value.st_size
return value.st_size return value.st_size
}
//print("finish size = \(Int(value.st_size)), range: \(range!)")
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024))
subscriber.putNext(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: true))
EngineTempBox.shared.dispose(tempFile)
} }
//print("finish size = \(Int(value.st_size)), range: \(range!)")
subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false))
subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024))
subscriber.putNext(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: true))
EngineTempBox.shared.dispose(tempFile)
} }
subscriber.putCompletion()
} }
}, error: { _ in subscriber.putCompletion()
}, completed: nil)
let disposable = ActionDisposable {
signalDisposable?.dispose()
}
return ActionDisposable {
disposable.dispose()
} }
}, error: { _ in
}, completed: nil)
let disposable = ActionDisposable {
signalDisposable?.dispose()
}
return ActionDisposable {
disposable.dispose()
} }
return throttlingContext.wrap(priority: .default, signal: signal)
} }
return throttlingContext.wrap(priority: .default, signal: signal)
} }
public func fetchVideoLibraryMediaResourceHash(resource: VideoLibraryMediaResource) -> Signal<Data?, NoError> { public func fetchVideoLibraryMediaResourceHash(resource: VideoLibraryMediaResource) -> Signal<Data?, NoError> {