mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
740 lines
31 KiB
Swift
740 lines
31 KiB
Swift
import Foundation
|
|
import UIKit
|
|
import Metal
|
|
import MetalKit
|
|
import Vision
|
|
import Photos
|
|
import SwiftSignalKit
|
|
import Display
|
|
import TelegramCore
|
|
import TelegramPresentationData
|
|
import FastBlur
|
|
|
|
public struct MediaEditorPlayerState {
|
|
public let duration: Double
|
|
public let timeRange: Range<Double>?
|
|
public let position: Double
|
|
public let frames: [UIImage]
|
|
public let framesCount: Int
|
|
public let framesUpdateTimestamp: Double
|
|
public let hasAudio: Bool
|
|
}
|
|
|
|
public final class MediaEditor {
|
|
public enum Subject {
|
|
case image(UIImage, PixelDimensions)
|
|
case video(String, UIImage?, PixelDimensions)
|
|
case asset(PHAsset)
|
|
case draft(MediaEditorDraft)
|
|
|
|
var dimensions: PixelDimensions {
|
|
switch self {
|
|
case let .image(_, dimensions), let .video(_, _, dimensions):
|
|
return dimensions
|
|
case let .asset(asset):
|
|
return PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight))
|
|
case let .draft(draft):
|
|
return draft.dimensions
|
|
}
|
|
}
|
|
}
|
|
|
|
private let subject: Subject
|
|
private var player: AVPlayer?
|
|
private var timeObserver: Any?
|
|
private var didPlayToEndTimeObserver: NSObjectProtocol?
|
|
|
|
private weak var previewView: MediaEditorPreviewView?
|
|
|
|
public var values: MediaEditorValues {
|
|
didSet {
|
|
if !self.skipRendering {
|
|
self.updateRenderChain()
|
|
}
|
|
self.valuesPromise.set(.single(self.values))
|
|
}
|
|
}
|
|
private var valuesPromise = Promise<MediaEditorValues>()
|
|
|
|
private let renderer = MediaEditorRenderer()
|
|
private let renderChain = MediaEditorRenderChain()
|
|
private let histogramCalculationPass = HistogramCalculationPass()
|
|
|
|
private var textureSourceDisposable: Disposable?
|
|
|
|
private let gradientColorsPromise = Promise<(UIColor, UIColor)?>()
|
|
public var gradientColors: Signal<(UIColor, UIColor)?, NoError> {
|
|
return self.gradientColorsPromise.get()
|
|
}
|
|
private var gradientColorsValue: (UIColor, UIColor)? {
|
|
didSet {
|
|
self.gradientColorsPromise.set(.single(self.gradientColorsValue))
|
|
}
|
|
}
|
|
|
|
private let histogramPromise = Promise<Data>()
|
|
public var histogram: Signal<Data, NoError> {
|
|
return self.histogramPromise.get()
|
|
}
|
|
public var isHistogramEnabled: Bool {
|
|
get {
|
|
return self.histogramCalculationPass.isEnabled
|
|
}
|
|
set {
|
|
self.histogramCalculationPass.isEnabled = newValue
|
|
if newValue {
|
|
Queue.mainQueue().justDispatch {
|
|
self.updateRenderChain()
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
private var textureCache: CVMetalTextureCache!
|
|
|
|
public var hasPortraitMask: Bool {
|
|
return self.renderChain.blurPass.maskTexture != nil
|
|
}
|
|
|
|
public var resultIsVideo: Bool {
|
|
return self.player != nil || self.values.entities.contains(where: { $0.entity.isAnimated })
|
|
}
|
|
|
|
public var resultImage: UIImage? {
|
|
return self.renderer.finalRenderedImage()
|
|
}
|
|
|
|
private let playerPromise = Promise<AVPlayer?>()
|
|
private var playerPlaybackState: (Double, Double, Bool) = (0.0, 0.0, false) {
|
|
didSet {
|
|
self.playerPlaybackStatePromise.set(.single(self.playerPlaybackState))
|
|
}
|
|
}
|
|
private let playerPlaybackStatePromise = Promise<(Double, Double, Bool)>((0.0, 0.0, false))
|
|
|
|
public var onFirstDisplay: () -> Void = {}
|
|
|
|
public func playerState(framesCount: Int) -> Signal<MediaEditorPlayerState?, NoError> {
|
|
return self.playerPromise.get()
|
|
|> mapToSignal { [weak self] player in
|
|
if let self, let asset = player?.currentItem?.asset {
|
|
return combineLatest(self.valuesPromise.get(), self.playerPlaybackStatePromise.get(), self.videoFrames(asset: asset, count: framesCount))
|
|
|> map { values, durationAndPosition, framesAndUpdateTimestamp in
|
|
let (duration, position, hasAudio) = durationAndPosition
|
|
let (frames, framesUpdateTimestamp) = framesAndUpdateTimestamp
|
|
return MediaEditorPlayerState(
|
|
duration: duration,
|
|
timeRange: values.videoTrimRange,
|
|
position: position,
|
|
frames: frames,
|
|
framesCount: framesCount,
|
|
framesUpdateTimestamp: framesUpdateTimestamp,
|
|
hasAudio: hasAudio
|
|
)
|
|
}
|
|
} else {
|
|
return .single(nil)
|
|
}
|
|
}
|
|
}
|
|
|
|
public func videoFrames(asset: AVAsset, count: Int) -> Signal<([UIImage], Double), NoError> {
|
|
func blurredImage(_ image: UIImage) -> UIImage? {
|
|
guard let image = image.cgImage else {
|
|
return nil
|
|
}
|
|
|
|
let thumbnailSize = CGSize(width: image.width, height: image.height)
|
|
let thumbnailContextSize = thumbnailSize.aspectFilled(CGSize(width: 20.0, height: 20.0))
|
|
if let thumbnailContext = DrawingContext(size: thumbnailContextSize, scale: 1.0) {
|
|
thumbnailContext.withFlippedContext { c in
|
|
c.interpolationQuality = .none
|
|
c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContextSize))
|
|
}
|
|
imageFastBlur(Int32(thumbnailContextSize.width), Int32(thumbnailContextSize.height), Int32(thumbnailContext.bytesPerRow), thumbnailContext.bytes)
|
|
|
|
let thumbnailContext2Size = thumbnailSize.aspectFitted(CGSize(width: 100.0, height: 100.0))
|
|
if let thumbnailContext2 = DrawingContext(size: thumbnailContext2Size, scale: 1.0) {
|
|
thumbnailContext2.withFlippedContext { c in
|
|
c.interpolationQuality = .none
|
|
if let image = thumbnailContext.generateImage()?.cgImage {
|
|
c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContext2Size))
|
|
}
|
|
}
|
|
imageFastBlur(Int32(thumbnailContext2Size.width), Int32(thumbnailContext2Size.height), Int32(thumbnailContext2.bytesPerRow), thumbnailContext2.bytes)
|
|
return thumbnailContext2.generateImage()
|
|
}
|
|
}
|
|
return nil
|
|
}
|
|
|
|
guard count > 0 else {
|
|
return .complete()
|
|
}
|
|
let scale = UIScreen.main.scale
|
|
let imageGenerator = AVAssetImageGenerator(asset: asset)
|
|
imageGenerator.maximumSize = CGSize(width: 48.0 * scale, height: 36.0 * scale)
|
|
imageGenerator.appliesPreferredTrackTransform = true
|
|
imageGenerator.requestedTimeToleranceBefore = .zero
|
|
imageGenerator.requestedTimeToleranceAfter = .zero
|
|
|
|
var firstFrame: UIImage
|
|
if let cgImage = try? imageGenerator.copyCGImage(at: .zero, actualTime: nil) {
|
|
firstFrame = UIImage(cgImage: cgImage)
|
|
if let blurred = blurredImage(firstFrame) {
|
|
firstFrame = blurred
|
|
}
|
|
} else {
|
|
firstFrame = generateSingleColorImage(size: CGSize(width: 24.0, height: 36.0), color: .black)!
|
|
}
|
|
return Signal { subscriber in
|
|
subscriber.putNext((Array(repeating: firstFrame, count: count), CACurrentMediaTime()))
|
|
|
|
var timestamps: [NSValue] = []
|
|
let duration = asset.duration.seconds
|
|
let interval = duration / Double(count)
|
|
for i in 0 ..< count {
|
|
timestamps.append(NSValue(time: CMTime(seconds: Double(i) * interval, preferredTimescale: CMTimeScale(1000))))
|
|
}
|
|
|
|
var updatedFrames: [UIImage] = []
|
|
imageGenerator.generateCGImagesAsynchronously(forTimes: timestamps) { _, image, _, _, _ in
|
|
if let image {
|
|
updatedFrames.append(UIImage(cgImage: image))
|
|
if updatedFrames.count == count {
|
|
subscriber.putNext((updatedFrames, CACurrentMediaTime()))
|
|
subscriber.putCompletion()
|
|
} else {
|
|
var tempFrames = updatedFrames
|
|
for _ in 0 ..< count - updatedFrames.count {
|
|
tempFrames.append(firstFrame)
|
|
}
|
|
subscriber.putNext((tempFrames, CACurrentMediaTime()))
|
|
}
|
|
}
|
|
}
|
|
|
|
return ActionDisposable {
|
|
imageGenerator.cancelAllCGImageGeneration()
|
|
}
|
|
}
|
|
}
|
|
|
|
public init(subject: Subject, values: MediaEditorValues? = nil, hasHistogram: Bool = false) {
|
|
self.subject = subject
|
|
if let values {
|
|
self.values = values
|
|
} else {
|
|
self.values = MediaEditorValues(
|
|
originalDimensions: subject.dimensions,
|
|
cropOffset: .zero,
|
|
cropSize: nil,
|
|
cropScale: 1.0,
|
|
cropRotation: 0.0,
|
|
cropMirroring: false,
|
|
gradientColors: nil,
|
|
videoTrimRange: nil,
|
|
videoIsMuted: false,
|
|
videoIsFullHd: false,
|
|
drawing: nil,
|
|
entities: [],
|
|
toolValues: [:]
|
|
)
|
|
}
|
|
self.valuesPromise.set(.single(self.values))
|
|
|
|
self.renderer.addRenderChain(self.renderChain)
|
|
if hasHistogram {
|
|
self.renderer.addRenderPass(self.histogramCalculationPass)
|
|
}
|
|
|
|
self.histogramCalculationPass.updated = { [weak self] data in
|
|
if let self {
|
|
self.histogramPromise.set(.single(data))
|
|
}
|
|
}
|
|
}
|
|
|
|
deinit {
|
|
self.textureSourceDisposable?.dispose()
|
|
|
|
if let timeObserver = self.timeObserver {
|
|
self.player?.removeTimeObserver(timeObserver)
|
|
}
|
|
if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver {
|
|
NotificationCenter.default.removeObserver(didPlayToEndTimeObserver)
|
|
}
|
|
}
|
|
|
|
private var volumeFade: SwiftSignalKit.Timer?
|
|
private func setupSource() {
|
|
guard let renderTarget = self.previewView else {
|
|
return
|
|
}
|
|
|
|
if let device = renderTarget.mtlDevice, CVMetalTextureCacheCreate(nil, nil, device, nil, &self.textureCache) != kCVReturnSuccess {
|
|
print("error")
|
|
}
|
|
|
|
let textureSource: Signal<(TextureSource, UIImage?, AVPlayer?, UIColor, UIColor), NoError>
|
|
switch subject {
|
|
case let .image(image, _):
|
|
let colors = mediaEditorGetGradientColors(from: image)
|
|
textureSource = .single((ImageTextureSource(image: image, renderTarget: renderTarget), image, nil, colors.0, colors.1))
|
|
case let .draft(draft):
|
|
guard let image = UIImage(contentsOfFile: draft.path) else {
|
|
return
|
|
}
|
|
let colors: (UIColor, UIColor)
|
|
if let gradientColors = draft.values.gradientColors {
|
|
colors = (gradientColors.first!, gradientColors.last!)
|
|
} else {
|
|
colors = mediaEditorGetGradientColors(from: image)
|
|
}
|
|
textureSource = .single((ImageTextureSource(image: image, renderTarget: renderTarget), image, nil, colors.0, colors.1))
|
|
case let .video(path, transitionImage, _):
|
|
textureSource = Signal { subscriber in
|
|
let url = URL(fileURLWithPath: path)
|
|
let asset = AVURLAsset(url: url)
|
|
|
|
let playerItem = AVPlayerItem(asset: asset)
|
|
let player = AVPlayer(playerItem: playerItem)
|
|
|
|
if let transitionImage {
|
|
let colors = mediaEditorGetGradientColors(from: transitionImage)
|
|
subscriber.putNext((VideoTextureSource(player: player, renderTarget: renderTarget), nil, player, colors.0, colors.1))
|
|
subscriber.putCompletion()
|
|
|
|
return EmptyDisposable
|
|
} else {
|
|
let imageGenerator = AVAssetImageGenerator(asset: asset)
|
|
imageGenerator.appliesPreferredTrackTransform = true
|
|
imageGenerator.maximumSize = CGSize(width: 72, height: 128)
|
|
imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: 0, preferredTimescale: CMTimeScale(30.0)))]) { _, image, _, _, _ in
|
|
if let image {
|
|
let colors = mediaEditorGetGradientColors(from: UIImage(cgImage: image))
|
|
subscriber.putNext((VideoTextureSource(player: player, renderTarget: renderTarget), nil, player, colors.0, colors.1))
|
|
} else {
|
|
subscriber.putNext((VideoTextureSource(player: player, renderTarget: renderTarget), nil, player, .black, .black))
|
|
}
|
|
subscriber.putCompletion()
|
|
}
|
|
return ActionDisposable {
|
|
imageGenerator.cancelAllCGImageGeneration()
|
|
}
|
|
}
|
|
}
|
|
case let .asset(asset):
|
|
textureSource = Signal { subscriber in
|
|
if asset.mediaType == .video {
|
|
let options = PHImageRequestOptions()
|
|
options.deliveryMode = .fastFormat
|
|
let requestId = PHImageManager.default().requestImage(for: asset, targetSize: CGSize(width: 128.0, height: 128.0), contentMode: .aspectFit, options: options, resultHandler: { image, info in
|
|
if let image {
|
|
if let info {
|
|
if let cancelled = info[PHImageCancelledKey] as? Bool, cancelled {
|
|
return
|
|
}
|
|
}
|
|
let colors = mediaEditorGetGradientColors(from: image)
|
|
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil, resultHandler: { asset, _, _ in
|
|
if let asset {
|
|
let playerItem = AVPlayerItem(asset: asset)
|
|
let player = AVPlayer(playerItem: playerItem)
|
|
subscriber.putNext((VideoTextureSource(player: player, renderTarget: renderTarget), nil, player, colors.0, colors.1))
|
|
subscriber.putCompletion()
|
|
}
|
|
})
|
|
}
|
|
})
|
|
return ActionDisposable {
|
|
PHImageManager.default().cancelImageRequest(requestId)
|
|
}
|
|
} else {
|
|
let options = PHImageRequestOptions()
|
|
options.deliveryMode = .highQualityFormat
|
|
let requestId = PHImageManager.default().requestImage(for: asset, targetSize: CGSize(width: 1920.0, height: 1920.0), contentMode: .aspectFit, options: options, resultHandler: { image, info in
|
|
if let image {
|
|
var degraded = false
|
|
if let info {
|
|
if let cancelled = info[PHImageCancelledKey] as? Bool, cancelled {
|
|
return
|
|
}
|
|
if let degradedValue = info[PHImageResultIsDegradedKey] as? Bool, degradedValue {
|
|
degraded = true
|
|
}
|
|
}
|
|
if !degraded {
|
|
let colors = mediaEditorGetGradientColors(from: image)
|
|
subscriber.putNext((ImageTextureSource(image: image, renderTarget: renderTarget), image, nil, colors.0, colors.1))
|
|
subscriber.putCompletion()
|
|
}
|
|
}
|
|
})
|
|
return ActionDisposable {
|
|
PHImageManager.default().cancelImageRequest(requestId)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
self.textureSourceDisposable = (textureSource
|
|
|> deliverOnMainQueue).start(next: { [weak self] sourceAndColors in
|
|
if let self {
|
|
let (source, image, player, topColor, bottomColor) = sourceAndColors
|
|
self.renderer.onNextRender = { [weak self] in
|
|
self?.onFirstDisplay()
|
|
}
|
|
self.renderer.textureSource = source
|
|
self.player = player
|
|
self.playerPromise.set(.single(player))
|
|
self.gradientColorsValue = (topColor, bottomColor)
|
|
self.setGradientColors([topColor, bottomColor])
|
|
|
|
if player == nil {
|
|
self.updateRenderChain()
|
|
self.maybeGeneratePersonSegmentation(image)
|
|
}
|
|
|
|
if let player {
|
|
self.timeObserver = player.addPeriodicTimeObserver(forInterval: CMTimeMake(value: 1, timescale: 10), queue: DispatchQueue.main) { [weak self] time in
|
|
guard let self, let duration = player.currentItem?.duration.seconds else {
|
|
return
|
|
}
|
|
var hasAudio = false
|
|
if let audioTracks = player.currentItem?.asset.tracks(withMediaType: .audio) {
|
|
hasAudio = !audioTracks.isEmpty
|
|
}
|
|
self.playerPlaybackState = (duration, time.seconds, hasAudio)
|
|
}
|
|
self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: nil, using: { [weak self] notification in
|
|
if let self {
|
|
let start = self.values.videoTrimRange?.lowerBound ?? 0.0
|
|
self.player?.seek(to: CMTime(seconds: start, preferredTimescale: CMTimeScale(1000)))
|
|
self.player?.play()
|
|
}
|
|
})
|
|
self.player?.play()
|
|
self.volumeFade = self.player?.fadeVolume(from: 0.0, to: 1.0, duration: 0.4)
|
|
}
|
|
}
|
|
})
|
|
}
|
|
|
|
public func attachPreviewView(_ previewView: MediaEditorPreviewView) {
|
|
self.previewView?.renderer = nil
|
|
|
|
self.previewView = previewView
|
|
previewView.renderer = self.renderer
|
|
|
|
self.setupSource()
|
|
}
|
|
|
|
private var skipRendering = false
|
|
private func updateValues(skipRendering: Bool = false, _ f: (MediaEditorValues) -> MediaEditorValues) {
|
|
if skipRendering {
|
|
self.skipRendering = true
|
|
}
|
|
self.values = f(self.values)
|
|
if skipRendering {
|
|
self.skipRendering = false
|
|
}
|
|
}
|
|
|
|
public func setCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) {
|
|
self.updateValues(skipRendering: true) { values in
|
|
return values.withUpdatedCrop(offset: offset, scale: scale, rotation: rotation, mirroring: mirroring)
|
|
}
|
|
}
|
|
|
|
public func getToolValue(_ key: EditorToolKey) -> Any? {
|
|
return self.values.toolValues[key]
|
|
}
|
|
|
|
public func setToolValue(_ key: EditorToolKey, value: Any) {
|
|
self.updateValues { values in
|
|
var updatedToolValues = values.toolValues
|
|
updatedToolValues[key] = value
|
|
return values.withUpdatedToolValues(updatedToolValues)
|
|
}
|
|
}
|
|
|
|
public func setVideoIsMuted(_ videoIsMuted: Bool) {
|
|
self.player?.isMuted = videoIsMuted
|
|
self.updateValues(skipRendering: true) { values in
|
|
return values.withUpdatedVideoIsMuted(videoIsMuted)
|
|
}
|
|
}
|
|
|
|
public func setVideoIsFullHd(_ videoIsFullHd: Bool) {
|
|
self.updateValues(skipRendering: true) { values in
|
|
return values.withUpdatedVideoIsFullHd(videoIsFullHd)
|
|
}
|
|
}
|
|
|
|
private var targetTimePosition: (CMTime, Bool)?
|
|
private var updatingTimePosition = false
|
|
public func seek(_ position: Double, andPlay play: Bool) {
|
|
if !play {
|
|
self.player?.pause()
|
|
}
|
|
let targetPosition = CMTime(seconds: position, preferredTimescale: CMTimeScale(60.0))
|
|
if self.targetTimePosition?.0 != targetPosition {
|
|
self.targetTimePosition = (targetPosition, play)
|
|
if !self.updatingTimePosition {
|
|
self.updateVideoTimePosition()
|
|
}
|
|
}
|
|
if play {
|
|
self.player?.play()
|
|
}
|
|
}
|
|
|
|
public func play() {
|
|
self.player?.play()
|
|
}
|
|
|
|
public func stop() {
|
|
self.player?.pause()
|
|
}
|
|
|
|
private func updateVideoTimePosition() {
|
|
guard let (targetPosition, _) = self.targetTimePosition else {
|
|
return
|
|
}
|
|
self.updatingTimePosition = true
|
|
self.player?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { [weak self] _ in
|
|
if let self {
|
|
if let (currentTargetPosition, _) = self.targetTimePosition, currentTargetPosition == targetPosition {
|
|
self.updatingTimePosition = false
|
|
self.targetTimePosition = nil
|
|
} else {
|
|
self.updateVideoTimePosition()
|
|
}
|
|
}
|
|
})
|
|
}
|
|
|
|
public func setVideoTrimRange(_ trimRange: Range<Double>, apply: Bool) {
|
|
self.updateValues(skipRendering: true) { values in
|
|
return values.withUpdatedVideoTrimRange(trimRange)
|
|
}
|
|
|
|
if apply {
|
|
self.player?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
|
|
}
|
|
}
|
|
|
|
public func setDrawingAndEntities(data: Data?, image: UIImage?, entities: [CodableDrawingEntity]) {
|
|
self.updateValues(skipRendering: true) { values in
|
|
return values.withUpdatedDrawingAndEntities(drawing: image, entities: entities)
|
|
}
|
|
}
|
|
|
|
public func setGradientColors(_ gradientColors: [UIColor]) {
|
|
self.updateValues(skipRendering: true) { values in
|
|
return values.withUpdatedGradientColors(gradientColors: gradientColors)
|
|
}
|
|
}
|
|
|
|
private var previousUpdateTime: Double?
|
|
private var scheduledUpdate = false
|
|
private func updateRenderChain() {
|
|
self.renderChain.update(values: self.values)
|
|
if let player = self.player, player.rate > 0.0 {
|
|
} else {
|
|
let currentTime = CACurrentMediaTime()
|
|
if !self.scheduledUpdate {
|
|
let delay = 0.03333
|
|
if let previousUpdateTime = self.previousUpdateTime, currentTime - previousUpdateTime < delay {
|
|
self.scheduledUpdate = true
|
|
Queue.mainQueue().after(delay - (currentTime - previousUpdateTime)) {
|
|
self.scheduledUpdate = false
|
|
self.previousUpdateTime = CACurrentMediaTime()
|
|
self.renderer.willRenderFrame()
|
|
self.renderer.renderFrame()
|
|
}
|
|
} else {
|
|
self.previousUpdateTime = currentTime
|
|
self.renderer.willRenderFrame()
|
|
self.renderer.renderFrame()
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
private func maybeGeneratePersonSegmentation(_ image: UIImage?) {
|
|
if #available(iOS 15.0, *), let cgImage = image?.cgImage {
|
|
let faceRequest = VNDetectFaceRectanglesRequest { [weak self] request, _ in
|
|
guard let _ = request.results?.first as? VNFaceObservation else { return }
|
|
|
|
let personRequest = VNGeneratePersonSegmentationRequest(completionHandler: { [weak self] request, error in
|
|
if let self, let result = (request as? VNGeneratePersonSegmentationRequest)?.results?.first {
|
|
Queue.mainQueue().async {
|
|
self.renderChain.blurPass.maskTexture = pixelBufferToMTLTexture(pixelBuffer: result.pixelBuffer, textureCache: self.textureCache)
|
|
}
|
|
}
|
|
})
|
|
personRequest.qualityLevel = .accurate
|
|
personRequest.outputPixelFormat = kCVPixelFormatType_OneComponent8
|
|
|
|
let handler = VNImageRequestHandler(cgImage: cgImage, options: [:])
|
|
do {
|
|
try handler.perform([personRequest])
|
|
} catch {
|
|
print(error)
|
|
}
|
|
}
|
|
let handler = VNImageRequestHandler(cgImage: cgImage, options: [:])
|
|
do {
|
|
try handler.perform([faceRequest])
|
|
} catch {
|
|
print(error)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
final class MediaEditorRenderChain {
|
|
fileprivate let enhancePass = EnhanceRenderPass()
|
|
fileprivate let sharpenPass = SharpenRenderPass()
|
|
fileprivate let blurPass = BlurRenderPass()
|
|
fileprivate let adjustmentsPass = AdjustmentsRenderPass()
|
|
|
|
var renderPasses: [RenderPass] {
|
|
return [
|
|
self.enhancePass,
|
|
self.sharpenPass,
|
|
self.blurPass,
|
|
self.adjustmentsPass
|
|
]
|
|
}
|
|
|
|
func update(values: MediaEditorValues) {
|
|
for key in EditorToolKey.allCases {
|
|
let value = values.toolValues[key]
|
|
switch key {
|
|
case .enhance:
|
|
if let value = value as? Float {
|
|
self.enhancePass.value = value
|
|
} else {
|
|
self.enhancePass.value = 0.0
|
|
}
|
|
case .brightness:
|
|
if let value = value as? Float {
|
|
self.adjustmentsPass.adjustments.exposure = value
|
|
} else {
|
|
self.adjustmentsPass.adjustments.exposure = 0.0
|
|
}
|
|
case .contrast:
|
|
if let value = value as? Float {
|
|
self.adjustmentsPass.adjustments.contrast = value
|
|
} else {
|
|
self.adjustmentsPass.adjustments.contrast = 0.0
|
|
}
|
|
case .saturation:
|
|
if let value = value as? Float {
|
|
self.adjustmentsPass.adjustments.saturation = value
|
|
} else {
|
|
self.adjustmentsPass.adjustments.saturation = 0.0
|
|
}
|
|
case .warmth:
|
|
if let value = value as? Float {
|
|
self.adjustmentsPass.adjustments.warmth = value
|
|
} else {
|
|
self.adjustmentsPass.adjustments.warmth = 0.0
|
|
}
|
|
case .fade:
|
|
if let value = value as? Float {
|
|
self.adjustmentsPass.adjustments.fade = value
|
|
} else {
|
|
self.adjustmentsPass.adjustments.fade = 0.0
|
|
}
|
|
case .highlights:
|
|
if let value = value as? Float {
|
|
self.adjustmentsPass.adjustments.highlights = value
|
|
} else {
|
|
self.adjustmentsPass.adjustments.highlights = 0.0
|
|
}
|
|
case .shadows:
|
|
if let value = value as? Float {
|
|
self.adjustmentsPass.adjustments.shadows = value
|
|
} else {
|
|
self.adjustmentsPass.adjustments.shadows = 0.0
|
|
}
|
|
case .vignette:
|
|
if let value = value as? Float {
|
|
self.adjustmentsPass.adjustments.vignette = value
|
|
} else {
|
|
self.adjustmentsPass.adjustments.vignette = 0.0
|
|
}
|
|
case .grain:
|
|
break
|
|
case .sharpen:
|
|
if let value = value as? Float {
|
|
self.sharpenPass.value = value
|
|
} else {
|
|
self.sharpenPass.value = 0.0
|
|
}
|
|
case .shadowsTint:
|
|
if let value = value as? TintValue {
|
|
if value.color != .clear {
|
|
let (red, green, blue, _) = value.color.components
|
|
self.adjustmentsPass.adjustments.shadowsTintColor = simd_float3(Float(red), Float(green), Float(blue))
|
|
self.adjustmentsPass.adjustments.shadowsTintIntensity = value.intensity
|
|
} else {
|
|
self.adjustmentsPass.adjustments.shadowsTintIntensity = 0.0
|
|
}
|
|
}
|
|
case .highlightsTint:
|
|
if let value = value as? TintValue {
|
|
if value.color != .clear {
|
|
let (red, green, blue, _) = value.color.components
|
|
self.adjustmentsPass.adjustments.shadowsTintColor = simd_float3(Float(red), Float(green), Float(blue))
|
|
self.adjustmentsPass.adjustments.highlightsTintIntensity = value.intensity
|
|
} else {
|
|
self.adjustmentsPass.adjustments.highlightsTintIntensity = 0.0
|
|
}
|
|
}
|
|
case .blur:
|
|
if let value = value as? BlurValue {
|
|
switch value.mode {
|
|
case .off:
|
|
self.blurPass.mode = .off
|
|
case .linear:
|
|
self.blurPass.mode = .linear
|
|
case .radial:
|
|
self.blurPass.mode = .radial
|
|
case .portrait:
|
|
self.blurPass.mode = .portrait
|
|
}
|
|
self.blurPass.intensity = value.intensity
|
|
self.blurPass.value.size = Float(value.size)
|
|
self.blurPass.value.position = simd_float2(Float(value.position.x), Float(value.position.y))
|
|
self.blurPass.value.falloff = Float(value.falloff)
|
|
self.blurPass.value.rotation = Float(value.rotation)
|
|
}
|
|
case .curves:
|
|
var value = (value as? CurvesValue) ?? CurvesValue.initial
|
|
let allDataPoints = value.all.dataPoints
|
|
let redDataPoints = value.red.dataPoints
|
|
let greenDataPoints = value.green.dataPoints
|
|
let blueDataPoints = value.blue.dataPoints
|
|
|
|
self.adjustmentsPass.allCurve = allDataPoints
|
|
self.adjustmentsPass.redCurve = redDataPoints
|
|
self.adjustmentsPass.greenCurve = greenDataPoints
|
|
self.adjustmentsPass.blueCurve = blueDataPoints
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
public func debugSaveImage(_ image: UIImage, name: String) {
|
|
let path = NSTemporaryDirectory() + "debug_\(name)_\(Int64.random(in: .min ... .max)).png"
|
|
print(path)
|
|
if let data = image.pngData() {
|
|
try? data.write(to: URL(fileURLWithPath: path))
|
|
}
|
|
}
|