diff --git a/submodules/CheckNode/Sources/CheckNode.swift b/submodules/CheckNode/Sources/CheckNode.swift index 86229c2222..607158d5c1 100644 --- a/submodules/CheckNode/Sources/CheckNode.swift +++ b/submodules/CheckNode/Sources/CheckNode.swift @@ -476,9 +476,19 @@ public class CheckLayer: CALayer { context.strokePath() case let .counter(number): - let text = NSAttributedString(string: "\(number)", font: Font.with(size: 16.0, design: .round, weight: .regular, traits: []), textColor: parameters.theme.strokeColor.withMultipliedAlpha(parameters.animationProgress)) + let fontSize: CGFloat + let string = "\(number)" + switch string.count { + case 1: + fontSize = 16.0 + case 2: + fontSize = 15.0 + default: + fontSize = 13.0 + } + let text = NSAttributedString(string: string, font: Font.with(size: fontSize, design: .round, weight: .medium, traits: []), textColor: parameters.theme.strokeColor.withMultipliedAlpha(parameters.animationProgress)) let textRect = text.boundingRect(with: CGSize(width: 100.0, height: 100.0), options: [.usesLineFragmentOrigin], context: nil) - text.draw(at: CGPoint(x: UIScreenPixel + textRect.minX + floor((size.width - textRect.width) * 0.5), y: textRect.minY + floorToScreenPixels((size.height - textRect.height) * 0.5))) + text.draw(at: CGPoint(x: textRect.minX + floorToScreenPixels((size.width - textRect.width) * 0.5), y: textRect.minY + floorToScreenPixels((size.height - textRect.height) * 0.5))) } } } diff --git a/submodules/LocalMediaResources/Sources/FetchPhotoLibraryImageResource.swift b/submodules/LocalMediaResources/Sources/FetchPhotoLibraryImageResource.swift index 3ee6571835..f2b7aa5631 100644 --- a/submodules/LocalMediaResources/Sources/FetchPhotoLibraryImageResource.swift +++ b/submodules/LocalMediaResources/Sources/FetchPhotoLibraryImageResource.swift @@ -83,29 +83,25 @@ extension UIImage.Orientation { } private let fetchPhotoWorkers = ThreadPool(threadCount: 3, threadPriority: 0.2) -private let fetchPhotoQueue = ThreadPoolQueue(threadPool: fetchPhotoWorkers) public func fetchPhotoLibraryResource(localIdentifier: String) -> Signal { return Signal { subscriber in + let queue = ThreadPoolQueue(threadPool: fetchPhotoWorkers) + let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil) let requestId = Atomic(value: RequestId()) if fetchResult.count != 0 { let asset = fetchResult.object(at: 0) let option = PHImageRequestOptions() - option.deliveryMode = .opportunistic + option.deliveryMode = .highQualityFormat option.isNetworkAccessAllowed = true option.isSynchronous = false - let madeProgress = Atomic(value: false) - option.progressHandler = { progress, error, _, _ in - if !madeProgress.swap(true) { - //subscriber.putNext(.reset) - } - } + let size = CGSize(width: 1280.0, height: 1280.0) - let startTime = CACurrentMediaTime() - - fetchPhotoQueue.addTask(ThreadPoolTask({ _ in + queue.addTask(ThreadPoolTask({ _ in + let startTime = CACurrentMediaTime() + let semaphore = DispatchSemaphore(value: 0) let requestIdValue = PHImageManager.default().requestImage(for: asset, targetSize: PHImageManagerMaximumSize, contentMode: .aspectFit, options: option, resultHandler: { (image, info) -> Void in Queue.concurrentDefaultQueue().async { @@ -117,16 +113,12 @@ public func fetchPhotoLibraryResource(localIdentifier: String) -> Signal Signal Void in diff --git a/submodules/TelegramUI/Sources/FetchVideoMediaResource.swift b/submodules/TelegramUI/Sources/FetchVideoMediaResource.swift index 6f35c73e81..13b0a19b55 100644 --- a/submodules/TelegramUI/Sources/FetchVideoMediaResource.swift +++ b/submodules/TelegramUI/Sources/FetchVideoMediaResource.swift @@ -207,198 +207,65 @@ private final class FetchVideoLibraryMediaResourceContext { private let throttlingContext = FetchVideoLibraryMediaResourceContext() public func fetchVideoLibraryMediaResource(account: Account, resource: VideoLibraryMediaResource) -> Signal { - return account.postbox.preferencesView(keys: [PreferencesKeys.appConfiguration]) - |> take(1) - |> map { view in - return view.values[PreferencesKeys.appConfiguration]?.get(AppConfiguration.self) ?? .defaultValue - } - |> castError(MediaResourceDataFetchError.self) - |> mapToSignal { appConfiguration -> Signal in - let signal = Signal { subscriber in - subscriber.putNext(.reset) - let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [resource.localIdentifier], options: nil) - var requestId: PHImageRequestID? - let disposable = MetaDisposable() - if fetchResult.count != 0 { - let asset = fetchResult.object(at: 0) - let option = PHVideoRequestOptions() - option.isNetworkAccessAllowed = true - option.deliveryMode = .highQualityFormat + let signal = Signal { subscriber in + subscriber.putNext(.reset) + let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [resource.localIdentifier], options: nil) + var requestId: PHImageRequestID? + let disposable = MetaDisposable() + if fetchResult.count != 0 { + let asset = fetchResult.object(at: 0) + let option = PHVideoRequestOptions() + option.isNetworkAccessAllowed = true + option.deliveryMode = .highQualityFormat + + let alreadyReceivedAsset = Atomic(value: false) + requestId = PHImageManager.default().requestAVAsset(forVideo: asset, options: option, resultHandler: { avAsset, _, _ in + if avAsset == nil { + return + } - let alreadyReceivedAsset = Atomic(value: false) - requestId = PHImageManager.default().requestAVAsset(forVideo: asset, options: option, resultHandler: { avAsset, _, _ in - if avAsset == nil { - return - } - - if alreadyReceivedAsset.swap(true) { - return - } - - var adjustments: TGVideoEditAdjustments? - switch resource.conversion { - case .passthrough: - if let asset = avAsset as? AVURLAsset { - var value = stat() - if stat(asset.url.path, &value) == 0 { - subscriber.putNext(.copyLocalItem(AVURLAssetCopyItem(url: asset.url))) - subscriber.putCompletion() - } else { - subscriber.putError(.generic) - } - return - } else { - adjustments = nil - } - case let .compress(adjustmentsValue): - if let adjustmentsValue = adjustmentsValue { - if let dict = NSKeyedUnarchiver.unarchiveObject(with: adjustmentsValue.data.makeData()) as? [AnyHashable : Any] { - adjustments = TGVideoEditAdjustments(dictionary: dict) - } - } - } - let updatedSize = Atomic(value: 0) - let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in - if let paintingData = adjustments.paintingData, paintingData.hasAnimation { - return LegacyPaintEntityRenderer(account: account, adjustments: adjustments) - } else { - return nil - } - } - let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4") - let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in - var value = stat() - if stat(path, &value) == 0 { - if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { - var range: Range? - let _ = updatedSize.modify { updatedSize in - range = updatedSize ..< value.st_size - return value.st_size - } - //print("size = \(Int(value.st_size)), range: \(range!)") - subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) - } - } - }), entityRenderer: entityRenderer)! - let signalDisposable = signal.start(next: { next in - if let result = next as? TGMediaVideoConversionResult { + if alreadyReceivedAsset.swap(true) { + return + } + + var adjustments: TGVideoEditAdjustments? + switch resource.conversion { + case .passthrough: + if let asset = avAsset as? AVURLAsset { var value = stat() - if stat(result.fileURL.path, &value) == 0 { - if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) { - var range: Range? - let _ = updatedSize.modify { updatedSize in - range = updatedSize ..< value.st_size - return value.st_size - } - //print("finish size = \(Int(value.st_size)), range: \(range!)") - subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) - subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024)) - subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true)) - } + if stat(asset.url.path, &value) == 0 { + subscriber.putNext(.copyLocalItem(AVURLAssetCopyItem(url: asset.url))) + subscriber.putCompletion() } else { subscriber.putError(.generic) } - subscriber.putCompletion() - - EngineTempBox.shared.dispose(tempFile) - } - }, error: { _ in - subscriber.putError(.generic) - }, completed: nil) - disposable.set(ActionDisposable { - signalDisposable?.dispose() - }) - }) - } - - return ActionDisposable { - if let requestId = requestId { - PHImageManager.default().cancelImageRequest(requestId) - } - disposable.dispose() - } - } - return throttlingContext.wrap(priority: .default, signal: signal) - } -} - -func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideoMediaResource) -> Signal { - return account.postbox.preferencesView(keys: [PreferencesKeys.appConfiguration]) - |> take(1) - |> map { view in - return view.values[PreferencesKeys.appConfiguration]?.get(AppConfiguration.self) ?? .defaultValue - } - |> castError(MediaResourceDataFetchError.self) - |> mapToSignal { appConfiguration -> Signal in - let signal = Signal { subscriber in - subscriber.putNext(.reset) - - var filteredPath = resource.path - if filteredPath.hasPrefix("file://") { - filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)]) - } - - let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath)) - var adjustments: TGVideoEditAdjustments? - if let videoAdjustments = resource.adjustments { - if let dict = NSKeyedUnarchiver.unarchiveObject(with: videoAdjustments.data.makeData()) as? [AnyHashable : Any] { - adjustments = TGVideoEditAdjustments(dictionary: dict) - } - } - let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4") - let updatedSize = Atomic(value: 0) - let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in - if let paintingData = adjustments.paintingData, paintingData.hasAnimation { - return LegacyPaintEntityRenderer(account: account, adjustments: adjustments) - } else { - return nil - } - } - let signal: SSignal - if filteredPath.contains(".jpg"), let entityRenderer = entityRenderer { - if let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) { - let durationSignal: SSignal = SSignal(generator: { subscriber in - let disposable = (entityRenderer.duration()).start(next: { duration in - subscriber.putNext(duration) - subscriber.putCompletion() - }) - - return SBlockDisposable(block: { - disposable.dispose() - }) - }) - - signal = durationSignal.map(toSignal: { duration -> SSignal in - if let duration = duration as? Double { - return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in - var value = stat() - if stat(path, &value) == 0 { - if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { - var range: Range? - let _ = updatedSize.modify { updatedSize in - range = updatedSize ..< value.st_size - return value.st_size - } - //print("size = \(Int(value.st_size)), range: \(range!)") - subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) - } - } - }), entityRenderer: entityRenderer)! + return } else { - return SSignal.single(nil) + adjustments = nil + } + case let .compress(adjustmentsValue): + if let adjustmentsValue = adjustmentsValue { + if let dict = NSKeyedUnarchiver.unarchiveObject(with: adjustmentsValue.data.makeData()) as? [AnyHashable : Any] { + adjustments = TGVideoEditAdjustments(dictionary: dict) + } } - }) - } else { - signal = SSignal.single(nil) } - } else { - signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in + let updatedSize = Atomic(value: 0) + let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in + if let paintingData = adjustments.paintingData, paintingData.hasAnimation { + return LegacyPaintEntityRenderer(account: account, adjustments: adjustments) + } else { + return nil + } + } + let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4") + let signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in var value = stat() if stat(path, &value) == 0 { if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { var range: Range? let _ = updatedSize.modify { updatedSize in - range = updatedSize ..< Int64(value.st_size) + range = updatedSize ..< value.st_size return value.st_size } //print("size = \(Int(value.st_size)), range: \(range!)") @@ -406,12 +273,130 @@ func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideo } } }), entityRenderer: entityRenderer)! + let signalDisposable = signal.start(next: { next in + if let result = next as? TGMediaVideoConversionResult { + var value = stat() + if stat(result.fileURL.path, &value) == 0 { + if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) { + var range: Range? + let _ = updatedSize.modify { updatedSize in + range = updatedSize ..< value.st_size + return value.st_size + } + //print("finish size = \(Int(value.st_size)), range: \(range!)") + subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024)) + subscriber.putNext(.dataPart(resourceOffset: Int64(data.count), data: Data(), range: 0 ..< 0, complete: true)) + } + } else { + subscriber.putError(.generic) + } + subscriber.putCompletion() + + EngineTempBox.shared.dispose(tempFile) + } + }, error: { _ in + subscriber.putError(.generic) + }, completed: nil) + disposable.set(ActionDisposable { + signalDisposable?.dispose() + }) + }) + } + + return ActionDisposable { + if let requestId = requestId { + PHImageManager.default().cancelImageRequest(requestId) } + disposable.dispose() + } + } + return throttlingContext.wrap(priority: .default, signal: signal) +} + +func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideoMediaResource) -> Signal { + let signal = Signal { subscriber in + subscriber.putNext(.reset) + + var filteredPath = resource.path + if filteredPath.hasPrefix("file://") { + filteredPath = String(filteredPath[filteredPath.index(filteredPath.startIndex, offsetBy: "file://".count)]) + } + + let avAsset = AVURLAsset(url: URL(fileURLWithPath: filteredPath)) + var adjustments: TGVideoEditAdjustments? + if let videoAdjustments = resource.adjustments { + if let dict = NSKeyedUnarchiver.unarchiveObject(with: videoAdjustments.data.makeData()) as? [AnyHashable : Any] { + adjustments = TGVideoEditAdjustments(dictionary: dict) + } + } + let tempFile = EngineTempBox.shared.tempFile(fileName: "video.mp4") + let updatedSize = Atomic(value: 0) + let entityRenderer: LegacyPaintEntityRenderer? = adjustments.flatMap { adjustments in + if let paintingData = adjustments.paintingData, paintingData.hasAnimation { + return LegacyPaintEntityRenderer(account: account, adjustments: adjustments) + } else { + return nil + } + } + let signal: SSignal + if filteredPath.contains(".jpg"), let entityRenderer = entityRenderer { + if let data = try? Data(contentsOf: URL(fileURLWithPath: filteredPath), options: [.mappedRead]), let image = UIImage(data: data) { + let durationSignal: SSignal = SSignal(generator: { subscriber in + let disposable = (entityRenderer.duration()).start(next: { duration in + subscriber.putNext(duration) + subscriber.putCompletion() + }) + + return SBlockDisposable(block: { + disposable.dispose() + }) + }) - let signalDisposable = signal.start(next: { next in - if let result = next as? TGMediaVideoConversionResult { - var value = stat() - if stat(result.fileURL.path, &value) == 0 { + signal = durationSignal.map(toSignal: { duration -> SSignal in + if let duration = duration as? Double { + return TGMediaVideoConverter.renderUIImage(image, duration: duration, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in + var value = stat() + if stat(path, &value) == 0 { + if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { + var range: Range? + let _ = updatedSize.modify { updatedSize in + range = updatedSize ..< value.st_size + return value.st_size + } + //print("size = \(Int(value.st_size)), range: \(range!)") + subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + } + } + }), entityRenderer: entityRenderer)! + } else { + return SSignal.single(nil) + } + }) + } else { + signal = SSignal.single(nil) + } + } else { + signal = TGMediaVideoConverter.convert(avAsset, adjustments: adjustments, path: tempFile.path, watcher: VideoConversionWatcher(update: { path, size in + var value = stat() + if stat(path, &value) == 0 { + if let data = try? Data(contentsOf: URL(fileURLWithPath: path), options: [.mappedRead]) { + var range: Range? + let _ = updatedSize.modify { updatedSize in + range = updatedSize ..< Int64(value.st_size) + return value.st_size + } + //print("size = \(Int(value.st_size)), range: \(range!)") + subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + } + } + }), entityRenderer: entityRenderer)! + } + + let signalDisposable = signal.start(next: { next in + if let result = next as? TGMediaVideoConversionResult { + var value = stat() + if stat(result.fileURL.path, &value) == 0 { // if config.remuxToFMp4 { // let tempFile = TempBox.shared.tempFile(fileName: "video.mp4") // if FFMpegRemuxer.remux(result.fileURL.path, to: tempFile.path) { @@ -424,35 +409,34 @@ func fetchLocalFileVideoMediaResource(account: Account, resource: LocalFileVideo // } else { // subscriber.putNext(.moveLocalFile(path: result.fileURL.path)) // } - if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) { - var range: Range? - let _ = updatedSize.modify { updatedSize in - range = updatedSize ..< value.st_size - return value.st_size - } - //print("finish size = \(Int(value.st_size)), range: \(range!)") - subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) - subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024)) - subscriber.putNext(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: true)) - - EngineTempBox.shared.dispose(tempFile) + if let data = try? Data(contentsOf: result.fileURL, options: [.mappedRead]) { + var range: Range? + let _ = updatedSize.modify { updatedSize in + range = updatedSize ..< value.st_size + return value.st_size } + //print("finish size = \(Int(value.st_size)), range: \(range!)") + subscriber.putNext(.dataPart(resourceOffset: range!.lowerBound, data: data, range: range!, complete: false)) + subscriber.putNext(.replaceHeader(data: data, range: 0 ..< 1024)) + subscriber.putNext(.dataPart(resourceOffset: 0, data: Data(), range: 0 ..< 0, complete: true)) + + EngineTempBox.shared.dispose(tempFile) } - subscriber.putCompletion() } - }, error: { _ in - }, completed: nil) - - let disposable = ActionDisposable { - signalDisposable?.dispose() - } - - return ActionDisposable { - disposable.dispose() + subscriber.putCompletion() } + }, error: { _ in + }, completed: nil) + + let disposable = ActionDisposable { + signalDisposable?.dispose() + } + + return ActionDisposable { + disposable.dispose() } - return throttlingContext.wrap(priority: .default, signal: signal) } + return throttlingContext.wrap(priority: .default, signal: signal) } public func fetchVideoLibraryMediaResourceHash(resource: VideoLibraryMediaResource) -> Signal {