mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
1604 lines
67 KiB
Swift
1604 lines
67 KiB
Swift
import Foundation
|
|
import UIKit
|
|
import Metal
|
|
import MetalKit
|
|
import Vision
|
|
import Photos
|
|
import SwiftSignalKit
|
|
import Display
|
|
import TelegramCore
|
|
import TelegramPresentationData
|
|
import FastBlur
|
|
import AccountContext
|
|
|
|
public struct MediaEditorPlayerState {
|
|
public struct Track: Equatable {
|
|
public enum Content: Equatable {
|
|
case video(frames: [UIImage], framesUpdateTimestamp: Double)
|
|
case audio(artist: String?, title: String?, samples: Data?, peak: Int32)
|
|
|
|
public static func ==(lhs: Content, rhs: Content) -> Bool {
|
|
switch lhs {
|
|
case let .video(_, framesUpdateTimestamp):
|
|
if case .video(_, framesUpdateTimestamp) = rhs {
|
|
return true
|
|
} else {
|
|
return false
|
|
}
|
|
case let .audio(lhsArtist, lhsTitle, lhsSamples, lhsPeak):
|
|
if case let .audio(rhsArtist, rhsTitle, rhsSamples, rhsPeak) = rhs {
|
|
return lhsArtist == rhsArtist && lhsTitle == rhsTitle && lhsSamples == rhsSamples && lhsPeak == rhsPeak
|
|
} else {
|
|
return false
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
public let id: Int32
|
|
public let content: Content
|
|
public let duration: Double
|
|
public let trimRange: Range<Double>?
|
|
public let offset: Double?
|
|
public let isMain: Bool
|
|
public let visibleInTimeline: Bool
|
|
}
|
|
|
|
public let generationTimestamp: Double
|
|
public let tracks: [Track]
|
|
public let position: Double
|
|
public let isPlaying: Bool
|
|
|
|
public var isAudioOnly: Bool {
|
|
var hasVideoTrack = false
|
|
var hasAudioTrack = false
|
|
for track in tracks {
|
|
switch track.content {
|
|
case .video:
|
|
hasVideoTrack = true
|
|
case .audio:
|
|
hasAudioTrack = true
|
|
}
|
|
}
|
|
return !hasVideoTrack && hasAudioTrack
|
|
}
|
|
|
|
public var hasAudio: Bool {
|
|
return true
|
|
}
|
|
}
|
|
|
|
public final class MediaEditor {
|
|
public struct GradientColors {
|
|
public let top: UIColor
|
|
public let bottom: UIColor
|
|
|
|
public init(top: UIColor, bottom: UIColor) {
|
|
self.top = top
|
|
self.bottom = bottom
|
|
}
|
|
|
|
public var array: [UIColor] {
|
|
return [self.top, self.bottom]
|
|
}
|
|
}
|
|
|
|
public enum Subject {
|
|
case image(UIImage, PixelDimensions)
|
|
case video(String, UIImage?, Bool, String?, PixelDimensions, Double)
|
|
case asset(PHAsset)
|
|
case draft(MediaEditorDraft)
|
|
|
|
var dimensions: PixelDimensions {
|
|
switch self {
|
|
case let .image(_, dimensions), let .video(_, _, _, _, dimensions, _):
|
|
return dimensions
|
|
case let .asset(asset):
|
|
return PixelDimensions(width: Int32(asset.pixelWidth), height: Int32(asset.pixelHeight))
|
|
case let .draft(draft):
|
|
return draft.dimensions
|
|
}
|
|
}
|
|
}
|
|
|
|
private let context: AccountContext
|
|
private let subject: Subject
|
|
|
|
private let clock = CMClockGetHostTimeClock()
|
|
|
|
private var player: AVPlayer?
|
|
private var additionalPlayer: AVPlayer?
|
|
private var audioPlayer: AVPlayer?
|
|
private var volumeFadeIn: SwiftSignalKit.Timer?
|
|
|
|
private var timeObserver: Any?
|
|
private weak var timeObserverPlayer: AVPlayer?
|
|
private var didPlayToEndTimeObserver: NSObjectProtocol?
|
|
|
|
private weak var previewView: MediaEditorPreviewView?
|
|
|
|
public var values: MediaEditorValues {
|
|
didSet {
|
|
if !self.skipRendering {
|
|
self.updateRenderChain()
|
|
}
|
|
self.valuesPromise.set(.single(self.values))
|
|
self.valuesUpdated(self.values)
|
|
}
|
|
}
|
|
public var valuesUpdated: (MediaEditorValues) -> Void = { _ in }
|
|
private var valuesPromise = Promise<MediaEditorValues>()
|
|
|
|
private let renderer = MediaEditorRenderer()
|
|
private let renderChain = MediaEditorRenderChain()
|
|
private let histogramCalculationPass = HistogramCalculationPass()
|
|
|
|
private var textureSourceDisposable: Disposable?
|
|
|
|
private let gradientColorsPromise = Promise<GradientColors?>()
|
|
private var gradientColorsValue: GradientColors? {
|
|
didSet {
|
|
self.gradientColorsPromise.set(.single(self.gradientColorsValue))
|
|
}
|
|
}
|
|
public var gradientColors: Signal<GradientColors?, NoError> {
|
|
return self.gradientColorsPromise.get()
|
|
}
|
|
|
|
private let histogramPromise = Promise<Data>()
|
|
public var histogram: Signal<Data, NoError> {
|
|
return self.histogramPromise.get()
|
|
}
|
|
public var isHistogramEnabled: Bool {
|
|
get {
|
|
return self.histogramCalculationPass.isEnabled
|
|
}
|
|
set {
|
|
self.histogramCalculationPass.isEnabled = newValue
|
|
if newValue {
|
|
Queue.mainQueue().justDispatch {
|
|
self.updateRenderChain()
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
private var textureCache: CVMetalTextureCache!
|
|
|
|
public var hasPortraitMask: Bool {
|
|
return self.renderChain.blurPass.maskTexture != nil
|
|
}
|
|
|
|
public var sourceIsVideo: Bool {
|
|
self.player != nil
|
|
}
|
|
|
|
public var resultIsVideo: Bool {
|
|
return self.player != nil || self.audioPlayer != nil || self.values.entities.contains(where: { $0.entity.isAnimated })
|
|
}
|
|
|
|
public var resultImage: UIImage? {
|
|
return self.renderer.finalRenderedImage()
|
|
}
|
|
|
|
public func getResultImage(mirror: Bool) -> UIImage? {
|
|
return self.renderer.finalRenderedImage(mirror: mirror)
|
|
}
|
|
|
|
private let playerPromise = Promise<AVPlayer?>()
|
|
private let additionalPlayerPromise = Promise<AVPlayer?>(nil)
|
|
|
|
private struct PlaybackState: Equatable {
|
|
let duration: Double
|
|
let position: Double
|
|
let isPlaying: Bool
|
|
let hasAudio: Bool
|
|
|
|
init() {
|
|
self.duration = 0.0
|
|
self.position = 0.0
|
|
self.isPlaying = false
|
|
self.hasAudio = false
|
|
}
|
|
|
|
init(duration: Double, position: Double, isPlaying: Bool, hasAudio: Bool) {
|
|
self.duration = duration
|
|
self.position = position
|
|
self.isPlaying = isPlaying
|
|
self.hasAudio = hasAudio
|
|
}
|
|
}
|
|
|
|
private var playerPlaybackState: PlaybackState = PlaybackState() {
|
|
didSet {
|
|
self.playerPlaybackStatePromise.set(.single(self.playerPlaybackState))
|
|
}
|
|
}
|
|
private let playerPlaybackStatePromise = Promise<PlaybackState>(PlaybackState())
|
|
|
|
public var position: Signal<Double, NoError> {
|
|
return self.playerPlaybackStatePromise.get()
|
|
|> map { state -> Double in
|
|
return state.position
|
|
}
|
|
}
|
|
|
|
public var duration: Double? {
|
|
if let _ = self.player {
|
|
if let trimRange = self.values.videoTrimRange {
|
|
return trimRange.upperBound - trimRange.lowerBound
|
|
} else {
|
|
return min(60.0, self.playerPlaybackState.duration)
|
|
}
|
|
} else {
|
|
return nil
|
|
}
|
|
}
|
|
|
|
public var originalDuration: Double? {
|
|
if let _ = self.player {
|
|
return min(60.0, self.playerPlaybackState.duration)
|
|
} else {
|
|
return nil
|
|
}
|
|
}
|
|
|
|
public var onFirstDisplay: () -> Void = {}
|
|
|
|
public func playerState(framesCount: Int) -> Signal<MediaEditorPlayerState?, NoError> {
|
|
let additionalFramesAndUpdateTimestamp = self.additionalPlayerPromise.get()
|
|
|> mapToSignal { player -> Signal<([UIImage], Double)?, NoError> in
|
|
if let player, let asset = player.currentItem?.asset {
|
|
return videoFrames(asset: asset, count: framesCount)
|
|
|> map(Optional.init)
|
|
} else {
|
|
return .single(nil)
|
|
}
|
|
}
|
|
|
|
func artistAndTitleForTrack(_ audioTrack: MediaAudioTrack) -> (artist: String?, title: String?) {
|
|
let artist = audioTrack.artist
|
|
var title = audioTrack.title
|
|
if artist == nil && title == nil {
|
|
if let underscoreIndex = audioTrack.path.firstIndex(of: "_"), let dotIndex = audioTrack.path.lastIndex(of: ".") {
|
|
title = String(audioTrack.path[audioTrack.path.index(after: underscoreIndex)..<dotIndex])
|
|
} else {
|
|
title = audioTrack.path
|
|
}
|
|
}
|
|
return (artist: artist, title: title)
|
|
}
|
|
|
|
return self.playerPromise.get()
|
|
|> mapToSignal { [weak self] player in
|
|
if let self, player != nil {
|
|
if player === self.player, let asset = player?.currentItem?.asset {
|
|
return combineLatest(self.valuesPromise.get(), self.playerPlaybackStatePromise.get(), videoFrames(asset: asset, count: framesCount), additionalFramesAndUpdateTimestamp)
|
|
|> map { values, playbackState, framesAndUpdateTimestamp, additionalFramesAndUpdateTimestamp in
|
|
var tracks: [MediaEditorPlayerState.Track] = []
|
|
tracks.append(MediaEditorPlayerState.Track(
|
|
id: 0,
|
|
content: .video(
|
|
frames: framesAndUpdateTimestamp.0,
|
|
framesUpdateTimestamp: framesAndUpdateTimestamp.1
|
|
),
|
|
duration: playbackState.duration,
|
|
trimRange: values.videoTrimRange,
|
|
offset: nil,
|
|
isMain: true,
|
|
visibleInTimeline: true
|
|
))
|
|
|
|
if let additionalFramesAndUpdateTimestamp {
|
|
tracks.append(MediaEditorPlayerState.Track(
|
|
id: 1,
|
|
content: .video(
|
|
frames: additionalFramesAndUpdateTimestamp.0,
|
|
framesUpdateTimestamp: additionalFramesAndUpdateTimestamp.1
|
|
),
|
|
duration: playbackState.duration,
|
|
trimRange: values.additionalVideoTrimRange,
|
|
offset: values.additionalVideoOffset,
|
|
isMain: false,
|
|
visibleInTimeline: true
|
|
))
|
|
}
|
|
|
|
if let audioTrack = values.audioTrack {
|
|
let (artist, title) = artistAndTitleForTrack(audioTrack)
|
|
tracks.append(MediaEditorPlayerState.Track(
|
|
id: 2,
|
|
content: .audio(
|
|
artist: artist,
|
|
title: title,
|
|
samples: values.audioTrackSamples?.samples,
|
|
peak: values.audioTrackSamples?.peak ?? 0
|
|
),
|
|
duration: audioTrack.duration,
|
|
trimRange: values.audioTrackTrimRange,
|
|
offset: values.audioTrackOffset,
|
|
isMain: false,
|
|
visibleInTimeline: true
|
|
))
|
|
}
|
|
|
|
return MediaEditorPlayerState(
|
|
generationTimestamp: CACurrentMediaTime(),
|
|
tracks: tracks,
|
|
position: playbackState.position,
|
|
isPlaying: playbackState.isPlaying
|
|
)
|
|
}
|
|
} else if player === self.audioPlayer {
|
|
return combineLatest(self.valuesPromise.get(), self.playerPlaybackStatePromise.get())
|
|
|> map { values, playbackState in
|
|
var tracks: [MediaEditorPlayerState.Track] = []
|
|
|
|
if let audioTrack = values.audioTrack {
|
|
let (artist, title) = artistAndTitleForTrack(audioTrack)
|
|
tracks.append(MediaEditorPlayerState.Track(
|
|
id: 0,
|
|
content: .audio(
|
|
artist: artist,
|
|
title: title,
|
|
samples: values.audioTrackSamples?.samples,
|
|
peak: values.audioTrackSamples?.peak ?? 0
|
|
),
|
|
duration: audioTrack.duration,
|
|
trimRange: values.audioTrackTrimRange,
|
|
offset: values.audioTrackOffset,
|
|
isMain: true,
|
|
visibleInTimeline: true
|
|
))
|
|
}
|
|
|
|
return MediaEditorPlayerState(
|
|
generationTimestamp: CACurrentMediaTime(),
|
|
tracks: tracks,
|
|
position: playbackState.position,
|
|
isPlaying: playbackState.isPlaying
|
|
)
|
|
}
|
|
} else {
|
|
return .single(nil)
|
|
}
|
|
} else {
|
|
return .single(nil)
|
|
}
|
|
}
|
|
}
|
|
|
|
public init(context: AccountContext, subject: Subject, values: MediaEditorValues? = nil, hasHistogram: Bool = false) {
|
|
self.context = context
|
|
self.subject = subject
|
|
if let values {
|
|
self.values = values
|
|
self.updateRenderChain()
|
|
} else {
|
|
self.values = MediaEditorValues(
|
|
peerId: context.account.peerId,
|
|
originalDimensions: subject.dimensions,
|
|
cropOffset: .zero,
|
|
cropRect: nil,
|
|
cropScale: 1.0,
|
|
cropRotation: 0.0,
|
|
cropMirroring: false,
|
|
cropOrientation: nil,
|
|
gradientColors: nil,
|
|
videoTrimRange: nil,
|
|
videoIsMuted: false,
|
|
videoIsFullHd: false,
|
|
videoIsMirrored: false,
|
|
additionalVideoPath: nil,
|
|
additionalVideoPosition: nil,
|
|
additionalVideoScale: nil,
|
|
additionalVideoRotation: nil,
|
|
additionalVideoPositionChanges: [],
|
|
additionalVideoTrimRange: nil,
|
|
additionalVideoOffset: nil,
|
|
additionalVideoVolume: nil,
|
|
drawing: nil,
|
|
entities: [],
|
|
toolValues: [:],
|
|
audioTrack: nil,
|
|
audioTrackTrimRange: nil,
|
|
audioTrackOffset: nil,
|
|
audioTrackVolume: nil,
|
|
audioTrackSamples: nil,
|
|
qualityPreset: nil
|
|
)
|
|
}
|
|
self.valuesPromise.set(.single(self.values))
|
|
|
|
self.renderer.addRenderChain(self.renderChain)
|
|
if hasHistogram {
|
|
self.renderer.addRenderPass(self.histogramCalculationPass)
|
|
}
|
|
|
|
self.histogramCalculationPass.updated = { [weak self] data in
|
|
if let self {
|
|
self.histogramPromise.set(.single(data))
|
|
}
|
|
}
|
|
|
|
if case let .asset(asset) = subject {
|
|
self.playerPlaybackState = PlaybackState(duration: asset.duration, position: 0.0, isPlaying: false, hasAudio: asset.mediaType == .video)
|
|
self.playerPlaybackStatePromise.set(.single(self.playerPlaybackState))
|
|
} else if case let .video(_, _, _, _, _, duration) = subject {
|
|
self.playerPlaybackState = PlaybackState(duration: duration, position: 0.0, isPlaying: false, hasAudio: true)
|
|
self.playerPlaybackStatePromise.set(.single(self.playerPlaybackState))
|
|
}
|
|
}
|
|
|
|
deinit {
|
|
self.textureSourceDisposable?.dispose()
|
|
self.invalidateTimeObservers()
|
|
}
|
|
|
|
public func replaceSource(_ image: UIImage, additionalImage: UIImage?, time: CMTime) {
|
|
guard let renderTarget = self.previewView, let device = renderTarget.mtlDevice, let texture = loadTexture(image: image, device: device) else {
|
|
return
|
|
}
|
|
let additionalTexture = additionalImage.flatMap { loadTexture(image: $0, device: device) }
|
|
self.renderer.consume(main: .texture(texture, time), additional: additionalTexture.flatMap { .texture($0, time) }, render: true, displayEnabled: false)
|
|
}
|
|
|
|
private func setupSource() {
|
|
guard let renderTarget = self.previewView else {
|
|
return
|
|
}
|
|
|
|
if let device = renderTarget.mtlDevice, CVMetalTextureCacheCreate(nil, nil, device, nil, &self.textureCache) != kCVReturnSuccess {
|
|
print("error")
|
|
}
|
|
|
|
let context = self.context
|
|
let clock = self.clock
|
|
let textureSource: Signal<(UIImage?, AVPlayer?, AVPlayer?, GradientColors), NoError>
|
|
switch subject {
|
|
case let .image(image, _):
|
|
let colors = mediaEditorGetGradientColors(from: image)
|
|
textureSource = .single((image, nil, nil, colors))
|
|
case let .draft(draft):
|
|
if draft.isVideo {
|
|
textureSource = Signal { subscriber in
|
|
let url = URL(fileURLWithPath: draft.fullPath(engine: context.engine))
|
|
let asset = AVURLAsset(url: url)
|
|
|
|
let playerItem = AVPlayerItem(asset: asset)
|
|
let player = AVPlayer(playerItem: playerItem)
|
|
if #available(iOS 15.0, *) {
|
|
player.sourceClock = clock
|
|
} else {
|
|
player.masterClock = clock
|
|
}
|
|
player.automaticallyWaitsToMinimizeStalling = false
|
|
|
|
if let gradientColors = draft.values.gradientColors {
|
|
let colors = GradientColors(top: gradientColors.first!, bottom: gradientColors.last!)
|
|
subscriber.putNext((nil, player, nil, colors))
|
|
subscriber.putCompletion()
|
|
|
|
return EmptyDisposable
|
|
} else {
|
|
let imageGenerator = AVAssetImageGenerator(asset: asset)
|
|
imageGenerator.appliesPreferredTrackTransform = true
|
|
imageGenerator.maximumSize = CGSize(width: 72, height: 128)
|
|
imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: 0, preferredTimescale: CMTimeScale(30.0)))]) { _, image, _, _, _ in
|
|
let colors: GradientColors = image.flatMap({ mediaEditorGetGradientColors(from: UIImage(cgImage: $0)) }) ?? GradientColors(top: .black, bottom: .black)
|
|
subscriber.putNext((nil, player, nil, colors))
|
|
subscriber.putCompletion()
|
|
}
|
|
return ActionDisposable {
|
|
imageGenerator.cancelAllCGImageGeneration()
|
|
}
|
|
}
|
|
}
|
|
} else {
|
|
guard let image = UIImage(contentsOfFile: draft.fullPath(engine: context.engine)) else {
|
|
return
|
|
}
|
|
let colors: GradientColors
|
|
if let gradientColors = draft.values.gradientColors {
|
|
colors = GradientColors(top: gradientColors.first!, bottom: gradientColors.last!)
|
|
} else {
|
|
colors = mediaEditorGetGradientColors(from: image)
|
|
}
|
|
textureSource = .single((image, nil, nil, colors))
|
|
}
|
|
case let .video(path, transitionImage, mirror, additionalPath, _, _):
|
|
let _ = mirror
|
|
textureSource = Signal { subscriber in
|
|
let asset = AVURLAsset(url: URL(fileURLWithPath: path))
|
|
let player = AVPlayer(playerItem: AVPlayerItem(asset: asset))
|
|
if #available(iOS 15.0, *) {
|
|
player.sourceClock = clock
|
|
} else {
|
|
player.masterClock = clock
|
|
}
|
|
player.automaticallyWaitsToMinimizeStalling = false
|
|
|
|
var additionalPlayer: AVPlayer?
|
|
if let additionalPath {
|
|
let additionalAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
|
|
additionalPlayer = AVPlayer(playerItem: AVPlayerItem(asset: additionalAsset))
|
|
if #available(iOS 15.0, *) {
|
|
additionalPlayer?.sourceClock = clock
|
|
} else {
|
|
additionalPlayer?.masterClock = clock
|
|
}
|
|
additionalPlayer?.automaticallyWaitsToMinimizeStalling = false
|
|
}
|
|
|
|
if let transitionImage {
|
|
let colors = mediaEditorGetGradientColors(from: transitionImage)
|
|
//TODO pass mirror
|
|
subscriber.putNext((nil, player, additionalPlayer, colors))
|
|
subscriber.putCompletion()
|
|
|
|
return EmptyDisposable
|
|
} else {
|
|
let imageGenerator = AVAssetImageGenerator(asset: asset)
|
|
imageGenerator.appliesPreferredTrackTransform = true
|
|
imageGenerator.maximumSize = CGSize(width: 72, height: 128)
|
|
imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: 0, preferredTimescale: CMTimeScale(30.0)))]) { _, image, _, _, _ in
|
|
let colors: GradientColors = image.flatMap({ mediaEditorGetGradientColors(from: UIImage(cgImage: $0)) }) ?? GradientColors(top: .black, bottom: .black)
|
|
//TODO pass mirror
|
|
subscriber.putNext((nil, player, additionalPlayer, colors))
|
|
subscriber.putCompletion()
|
|
}
|
|
return ActionDisposable {
|
|
imageGenerator.cancelAllCGImageGeneration()
|
|
}
|
|
}
|
|
}
|
|
case let .asset(asset):
|
|
textureSource = Signal { subscriber in
|
|
if asset.mediaType == .video {
|
|
let options = PHImageRequestOptions()
|
|
options.deliveryMode = .fastFormat
|
|
options.isNetworkAccessAllowed = true
|
|
let requestId = PHImageManager.default().requestImage(for: asset, targetSize: CGSize(width: 128.0, height: 128.0), contentMode: .aspectFit, options: options, resultHandler: { image, info in
|
|
if let image {
|
|
if let info {
|
|
if let cancelled = info[PHImageCancelledKey] as? Bool, cancelled {
|
|
return
|
|
}
|
|
}
|
|
let colors = mediaEditorGetGradientColors(from: image)
|
|
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil, resultHandler: { asset, _, _ in
|
|
if let asset {
|
|
let playerItem = AVPlayerItem(asset: asset)
|
|
let player = AVPlayer(playerItem: playerItem)
|
|
player.automaticallyWaitsToMinimizeStalling = false
|
|
|
|
#if targetEnvironment(simulator)
|
|
let additionalPlayerItem = AVPlayerItem(asset: asset)
|
|
let additionalPlayer = AVPlayer(playerItem: additionalPlayerItem)
|
|
additionalPlayer.automaticallyWaitsToMinimizeStalling = false
|
|
subscriber.putNext((nil, player, additionalPlayer, colors))
|
|
#else
|
|
subscriber.putNext((nil, player, nil, colors))
|
|
#endif
|
|
subscriber.putCompletion()
|
|
}
|
|
})
|
|
}
|
|
})
|
|
return ActionDisposable {
|
|
PHImageManager.default().cancelImageRequest(requestId)
|
|
}
|
|
} else {
|
|
let options = PHImageRequestOptions()
|
|
options.deliveryMode = .highQualityFormat
|
|
options.isNetworkAccessAllowed = true
|
|
let requestId = PHImageManager.default().requestImage(for: asset, targetSize: CGSize(width: 1920.0, height: 1920.0), contentMode: .aspectFit, options: options, resultHandler: { image, info in
|
|
if let image {
|
|
var degraded = false
|
|
if let info {
|
|
if let cancelled = info[PHImageCancelledKey] as? Bool, cancelled {
|
|
return
|
|
}
|
|
if let degradedValue = info[PHImageResultIsDegradedKey] as? Bool, degradedValue {
|
|
degraded = true
|
|
}
|
|
}
|
|
if !degraded {
|
|
let colors = mediaEditorGetGradientColors(from: image)
|
|
subscriber.putNext((image, nil, nil, colors))
|
|
subscriber.putCompletion()
|
|
}
|
|
}
|
|
})
|
|
return ActionDisposable {
|
|
PHImageManager.default().cancelImageRequest(requestId)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
self.textureSourceDisposable = (textureSource
|
|
|> deliverOnMainQueue).start(next: { [weak self] sourceAndColors in
|
|
if let self {
|
|
let (image, player, additionalPlayer, colors) = sourceAndColors
|
|
self.renderer.onNextRender = { [weak self] in
|
|
self?.onFirstDisplay()
|
|
}
|
|
|
|
let textureSource = UniversalTextureSource(renderTarget: renderTarget)
|
|
|
|
self.player = player
|
|
self.playerPromise.set(.single(player))
|
|
|
|
self.additionalPlayer = additionalPlayer
|
|
self.additionalPlayerPromise.set(.single(additionalPlayer))
|
|
|
|
if let image {
|
|
textureSource.setMainInput(.image(image))
|
|
}
|
|
if let player, let playerItem = player.currentItem {
|
|
textureSource.setMainInput(.video(playerItem))
|
|
}
|
|
if let additionalPlayer, let playerItem = additionalPlayer.currentItem {
|
|
if self.values.additionalVideoPath == nil {
|
|
self.values = self.values.withUpdatedAdditionalVideo(path: "", positionChanges: [])
|
|
}
|
|
textureSource.setAdditionalInput(.video(playerItem))
|
|
}
|
|
self.renderer.textureSource = textureSource
|
|
|
|
self.gradientColorsValue = colors
|
|
self.setGradientColors(colors.array)
|
|
|
|
if player == nil {
|
|
self.updateRenderChain()
|
|
let _ = image
|
|
// self.maybeGeneratePersonSegmentation(image)
|
|
}
|
|
|
|
if let _ = self.values.audioTrack {
|
|
self.setupAudioPlayback()
|
|
self.updateAudioPlaybackRange()
|
|
}
|
|
|
|
if let player {
|
|
player.isMuted = self.values.videoIsMuted
|
|
if let trimRange = self.values.videoTrimRange {
|
|
player.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
|
|
additionalPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
|
|
}
|
|
|
|
if let initialSeekPosition = self.initialSeekPosition {
|
|
self.initialSeekPosition = nil
|
|
player.seek(to: CMTime(seconds: initialSeekPosition, preferredTimescale: CMTimeScale(1000)), toleranceBefore: .zero, toleranceAfter: .zero)
|
|
} else if let trimRange = self.values.videoTrimRange {
|
|
player.seek(to: CMTime(seconds: trimRange.lowerBound, preferredTimescale: CMTimeScale(1000)), toleranceBefore: .zero, toleranceAfter: .zero)
|
|
}
|
|
|
|
self.setupTimeObservers()
|
|
Queue.mainQueue().justDispatch {
|
|
let startPlayback = {
|
|
player.playImmediately(atRate: 1.0)
|
|
additionalPlayer?.playImmediately(atRate: 1.0)
|
|
self.audioPlayer?.playImmediately(atRate: 1.0)
|
|
self.onPlaybackAction(.play)
|
|
self.volumeFadeIn = player.fadeVolume(from: 0.0, to: 1.0, duration: 0.4)
|
|
}
|
|
if let audioPlayer = self.audioPlayer, audioPlayer.status != .readyToPlay {
|
|
Queue.mainQueue().after(0.1) {
|
|
startPlayback()
|
|
}
|
|
} else {
|
|
startPlayback()
|
|
}
|
|
}
|
|
} else if let audioPlayer = self.audioPlayer {
|
|
let offset = self.values.audioTrackOffset ?? 0.0
|
|
let lowerBound = self.values.audioTrackTrimRange?.lowerBound ?? 0.0
|
|
|
|
let audioTime = CMTime(seconds: offset + lowerBound, preferredTimescale: CMTimeScale(1000))
|
|
audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
if audioPlayer.status != .readyToPlay {
|
|
Queue.mainQueue().after(0.1) {
|
|
audioPlayer.play()
|
|
}
|
|
} else {
|
|
audioPlayer.play()
|
|
}
|
|
}
|
|
}
|
|
})
|
|
}
|
|
|
|
private func setupTimeObservers() {
|
|
var observedPlayer = self.player
|
|
if observedPlayer == nil {
|
|
observedPlayer = self.audioPlayer
|
|
}
|
|
guard let observedPlayer else {
|
|
return
|
|
}
|
|
|
|
if self.timeObserver == nil {
|
|
self.timeObserverPlayer = observedPlayer
|
|
self.timeObserver = observedPlayer.addPeriodicTimeObserver(forInterval: CMTimeMake(value: 1, timescale: 10), queue: DispatchQueue.main) { [weak self, weak observedPlayer] time in
|
|
guard let self, let observedPlayer, let duration = observedPlayer.currentItem?.duration.seconds else {
|
|
return
|
|
}
|
|
var hasAudio = false
|
|
if let audioTracks = observedPlayer.currentItem?.asset.tracks(withMediaType: .audio) {
|
|
hasAudio = !audioTracks.isEmpty
|
|
}
|
|
if time.seconds > 20000 {
|
|
|
|
} else {
|
|
self.playerPlaybackState = PlaybackState(duration: duration, position: time.seconds, isPlaying: observedPlayer.rate > 0.0, hasAudio: hasAudio)
|
|
}
|
|
}
|
|
}
|
|
|
|
if self.didPlayToEndTimeObserver == nil {
|
|
self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: observedPlayer.currentItem, queue: nil, using: { [weak self] notification in
|
|
if let self {
|
|
var start: Double
|
|
if self.sourceIsVideo {
|
|
start = self.values.videoTrimRange?.lowerBound ?? 0.0
|
|
} else {
|
|
start = (self.values.audioTrackOffset ?? 0.0) + (self.values.audioTrackTrimRange?.lowerBound ?? 0.0)
|
|
}
|
|
let targetTime = CMTime(seconds: start, preferredTimescale: CMTimeScale(1000))
|
|
self.player?.seek(to: targetTime)
|
|
// self.additionalPlayer?.seek(to: targetTime)
|
|
self.onPlaybackAction(.seek(start))
|
|
|
|
self.player?.play()
|
|
// self.additionalPlayer?.play()
|
|
|
|
if self.sourceIsVideo {
|
|
let audioTime = self.audioTime(for: targetTime)
|
|
if let audioDelay = self.audioDelay(for: targetTime) {
|
|
self.audioPlayer?.pause()
|
|
self.audioDelayTimer = SwiftSignalKit.Timer(timeout: audioDelay, repeat: false, completion: { [weak self] in
|
|
self?.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
self?.audioPlayer?.play()
|
|
}, queue: Queue.mainQueue())
|
|
self.audioDelayTimer?.start()
|
|
} else {
|
|
self.audioPlayer?.seek(to: audioTime)
|
|
self.audioPlayer?.play()
|
|
}
|
|
|
|
|
|
let videoTime = self.videoTime(for: targetTime)
|
|
if let videoDelay = self.videoDelay(for: targetTime) {
|
|
self.additionalPlayer?.pause()
|
|
self.videoDelayTimer = SwiftSignalKit.Timer(timeout: videoDelay, repeat: false, completion: { [weak self] in
|
|
self?.additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
self?.additionalPlayer?.play()
|
|
}, queue: Queue.mainQueue())
|
|
self.videoDelayTimer?.start()
|
|
} else {
|
|
self.additionalPlayer?.seek(to: videoTime)
|
|
self.additionalPlayer?.play()
|
|
}
|
|
} else {
|
|
self.audioPlayer?.seek(to: targetTime)
|
|
self.audioPlayer?.play()
|
|
}
|
|
|
|
Queue.mainQueue().justDispatch {
|
|
self.onPlaybackAction(.play)
|
|
}
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
private func invalidateTimeObservers() {
|
|
if let timeObserver = self.timeObserver {
|
|
self.timeObserverPlayer?.removeTimeObserver(timeObserver)
|
|
|
|
self.timeObserver = nil
|
|
self.timeObserverPlayer = nil
|
|
}
|
|
if let didPlayToEndTimeObserver = self.didPlayToEndTimeObserver {
|
|
NotificationCenter.default.removeObserver(didPlayToEndTimeObserver)
|
|
self.didPlayToEndTimeObserver = nil
|
|
}
|
|
|
|
self.videoDelayTimer?.invalidate()
|
|
self.videoDelayTimer = nil
|
|
|
|
self.audioDelayTimer?.invalidate()
|
|
self.audioDelayTimer = nil
|
|
}
|
|
|
|
public func attachPreviewView(_ previewView: MediaEditorPreviewView) {
|
|
self.previewView?.renderer = nil
|
|
|
|
self.previewView = previewView
|
|
previewView.renderer = self.renderer
|
|
|
|
self.setupSource()
|
|
}
|
|
|
|
private var skipRendering = false
|
|
private var forceRendering = false
|
|
|
|
private enum UpdateMode {
|
|
case generic
|
|
case skipRendering
|
|
case forceRendering
|
|
}
|
|
private func updateValues(mode: UpdateMode = .generic, _ f: (MediaEditorValues) -> MediaEditorValues) {
|
|
if case .skipRendering = mode {
|
|
self.skipRendering = true
|
|
} else if case .forceRendering = mode {
|
|
self.forceRendering = true
|
|
}
|
|
let updatedValues = f(self.values)
|
|
if self.values != updatedValues {
|
|
self.values = updatedValues
|
|
}
|
|
if case .skipRendering = mode {
|
|
self.skipRendering = false
|
|
} else if case .forceRendering = mode {
|
|
self.forceRendering = false
|
|
}
|
|
}
|
|
|
|
public func setCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) {
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedCrop(offset: offset, scale: scale, rotation: rotation, mirroring: mirroring)
|
|
}
|
|
}
|
|
|
|
public func getToolValue(_ key: EditorToolKey) -> Any? {
|
|
return self.values.toolValues[key]
|
|
}
|
|
|
|
private var previewUnedited = false
|
|
public func setPreviewUnedited(_ preview: Bool) {
|
|
self.previewUnedited = preview
|
|
self.updateRenderChain()
|
|
}
|
|
|
|
public func setToolValue(_ key: EditorToolKey, value: Any) {
|
|
self.updateValues { values in
|
|
var updatedToolValues = values.toolValues
|
|
updatedToolValues[key] = value
|
|
return values.withUpdatedToolValues(updatedToolValues)
|
|
}
|
|
}
|
|
|
|
public func setVideoIsMuted(_ videoIsMuted: Bool) {
|
|
self.player?.isMuted = videoIsMuted
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedVideoIsMuted(videoIsMuted)
|
|
}
|
|
}
|
|
|
|
public func setVideoIsMirrored(_ videoIsMirrored: Bool) {
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedVideoIsMirrored(videoIsMirrored)
|
|
}
|
|
}
|
|
|
|
public func setVideoIsFullHd(_ videoIsFullHd: Bool) {
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedVideoIsFullHd(videoIsFullHd)
|
|
}
|
|
}
|
|
|
|
public enum PlaybackAction {
|
|
case play
|
|
case pause
|
|
case seek(Double)
|
|
}
|
|
|
|
public var onPlaybackAction: (PlaybackAction) -> Void = { _ in }
|
|
|
|
private var initialSeekPosition: Double?
|
|
private var targetTimePosition: (CMTime, Bool)?
|
|
private var updatingTimePosition = false
|
|
public func seek(_ position: Double, andPlay play: Bool) {
|
|
if self.player == nil && self.audioPlayer == nil {
|
|
self.initialSeekPosition = position
|
|
return
|
|
}
|
|
if !play {
|
|
self.player?.pause()
|
|
self.additionalPlayer?.pause()
|
|
self.audioPlayer?.pause()
|
|
self.onPlaybackAction(.pause)
|
|
}
|
|
let targetPosition = CMTime(seconds: position, preferredTimescale: CMTimeScale(1000.0))
|
|
if self.targetTimePosition?.0 != targetPosition {
|
|
self.targetTimePosition = (targetPosition, play)
|
|
if !self.updatingTimePosition {
|
|
self.updateVideoTimePosition()
|
|
}
|
|
}
|
|
if play {
|
|
self.player?.play()
|
|
// self.additionalPlayer?.play()
|
|
|
|
if self.sourceIsVideo {
|
|
let audioTime = self.audioTime(for: targetPosition)
|
|
if let audioDelay = self.audioDelay(for: targetPosition) {
|
|
self.audioDelayTimer = SwiftSignalKit.Timer(timeout: audioDelay, repeat: false, completion: { [weak self] in
|
|
self?.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
self?.audioPlayer?.play()
|
|
}, queue: Queue.mainQueue())
|
|
self.audioDelayTimer?.start()
|
|
} else {
|
|
self.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
self.audioPlayer?.play()
|
|
}
|
|
|
|
let videoTime = self.videoTime(for: targetPosition)
|
|
if let videoDelay = self.videoDelay(for: targetPosition) {
|
|
self.videoDelayTimer = SwiftSignalKit.Timer(timeout: videoDelay, repeat: false, completion: { [weak self] in
|
|
self?.additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
self?.additionalPlayer?.play()
|
|
}, queue: Queue.mainQueue())
|
|
self.videoDelayTimer?.start()
|
|
} else {
|
|
self.additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
self.additionalPlayer?.play()
|
|
}
|
|
} else {
|
|
self.audioPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
self.audioPlayer?.play()
|
|
|
|
self.additionalPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
self.additionalPlayer?.play()
|
|
}
|
|
|
|
self.onPlaybackAction(.play)
|
|
}
|
|
}
|
|
|
|
public func seek(_ position: Double, completion: @escaping () -> Void) {
|
|
guard let player = self.player else {
|
|
completion()
|
|
return
|
|
}
|
|
player.pause()
|
|
self.additionalPlayer?.pause()
|
|
self.audioPlayer?.pause()
|
|
|
|
let targetPosition = CMTime(seconds: position, preferredTimescale: CMTimeScale(1000.0))
|
|
player.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { _ in
|
|
Queue.mainQueue().async {
|
|
completion()
|
|
}
|
|
})
|
|
|
|
if let _ = self.videoDelay(for: targetPosition) {
|
|
|
|
} else {
|
|
self.additionalPlayer?.seek(to: self.videoTime(for: targetPosition), toleranceBefore: .zero, toleranceAfter: .zero)
|
|
}
|
|
|
|
if let _ = self.audioDelay(for: targetPosition) {
|
|
|
|
} else {
|
|
self.audioPlayer?.seek(to: self.audioTime(for: targetPosition), toleranceBefore: .zero, toleranceAfter: .zero)
|
|
}
|
|
}
|
|
|
|
private var audioDelayTimer: SwiftSignalKit.Timer?
|
|
private func audioDelay(for time: CMTime) -> Double? {
|
|
var time = time
|
|
if time == .invalid {
|
|
time = .zero
|
|
}
|
|
let videoStart = self.values.videoTrimRange?.lowerBound ?? 0.0
|
|
var audioStart = self.values.audioTrackTrimRange?.lowerBound ?? 0.0
|
|
if let offset = self.values.audioTrackOffset, offset < 0.0 {
|
|
audioStart -= offset
|
|
}
|
|
if audioStart - videoStart > 0.0 {
|
|
let delay = audioStart - time.seconds
|
|
if delay > 0 {
|
|
return delay
|
|
}
|
|
}
|
|
return nil
|
|
}
|
|
|
|
private func audioTime(for time: CMTime) -> CMTime {
|
|
var time = time
|
|
if time == .invalid {
|
|
time = .zero
|
|
}
|
|
let seconds = time.seconds
|
|
|
|
let offset = self.values.audioTrackOffset ?? 0.0
|
|
let audioOffset = max(0.0, offset)
|
|
let audioStart = self.values.audioTrackTrimRange?.lowerBound ?? 0.0
|
|
if seconds < audioStart - min(0.0, offset) {
|
|
return CMTime(seconds: audioOffset + audioStart, preferredTimescale: CMTimeScale(1000.0))
|
|
} else {
|
|
return CMTime(seconds: audioOffset + seconds + min(0.0, offset), preferredTimescale: CMTimeScale(1000.0))
|
|
}
|
|
}
|
|
|
|
|
|
|
|
///
|
|
|
|
private var videoDelayTimer: SwiftSignalKit.Timer?
|
|
private func videoDelay(for time: CMTime) -> Double? {
|
|
var time = time
|
|
if time == .invalid {
|
|
time = .zero
|
|
}
|
|
let mainStart = self.values.videoTrimRange?.lowerBound ?? 0.0
|
|
var trackStart = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0
|
|
if let offset = self.values.additionalVideoOffset, offset < 0.0 {
|
|
trackStart -= offset
|
|
}
|
|
if trackStart - mainStart > 0.0 {
|
|
let delay = trackStart - time.seconds
|
|
if delay > 0 {
|
|
return delay
|
|
}
|
|
}
|
|
return nil
|
|
}
|
|
|
|
private func videoTime(for time: CMTime) -> CMTime {
|
|
var time = time
|
|
if time == .invalid {
|
|
time = .zero
|
|
}
|
|
let seconds = time.seconds
|
|
|
|
let offset = self.values.additionalVideoOffset ?? 0.0
|
|
let trackOffset = max(0.0, offset)
|
|
let trackStart = self.values.additionalVideoTrimRange?.lowerBound ?? 0.0
|
|
if seconds < trackStart - min(0.0, offset) {
|
|
return CMTime(seconds: trackOffset + trackStart, preferredTimescale: CMTimeScale(1000.0))
|
|
} else {
|
|
return CMTime(seconds: trackOffset + seconds + min(0.0, offset), preferredTimescale: CMTimeScale(1000.0))
|
|
}
|
|
}
|
|
|
|
///
|
|
|
|
public var isPlaying: Bool {
|
|
let effectivePlayer = self.player ?? self.audioPlayer
|
|
return (effectivePlayer?.rate ?? 0.0) > 0.0
|
|
}
|
|
|
|
public func togglePlayback() {
|
|
if self.isPlaying {
|
|
self.stop()
|
|
} else {
|
|
self.play()
|
|
}
|
|
}
|
|
|
|
public func play() {
|
|
self.setRate(1.0)
|
|
}
|
|
|
|
public func stop() {
|
|
self.setRate(0.0)
|
|
}
|
|
|
|
private func setRate(_ rate: Float) {
|
|
let hostTime: UInt64 = mach_absolute_time()
|
|
let time: TimeInterval = 0
|
|
let cmHostTime = CMClockMakeHostTimeFromSystemUnits(hostTime)
|
|
let cmVTime = CMTimeMakeWithSeconds(time, preferredTimescale: 1000000)
|
|
let futureTime = CMTimeAdd(cmHostTime, cmVTime)
|
|
|
|
if self.player == nil, let audioPlayer = self.audioPlayer {
|
|
let itemTime = audioPlayer.currentItem?.currentTime() ?? .invalid
|
|
if audioPlayer.status == .readyToPlay {
|
|
audioPlayer.setRate(rate, time: itemTime, atHostTime: futureTime)
|
|
} else {
|
|
audioPlayer.seek(to: itemTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
if rate > 0.0 {
|
|
audioPlayer.play()
|
|
} else {
|
|
audioPlayer.pause()
|
|
}
|
|
}
|
|
} else {
|
|
let itemTime = self.player?.currentItem?.currentTime() ?? .invalid
|
|
let videoTime = self.videoTime(for: itemTime)
|
|
let audioTime = self.audioTime(for: itemTime)
|
|
|
|
self.player?.setRate(rate, time: itemTime, atHostTime: futureTime)
|
|
// self.additionalPlayer?.setRate(rate, time: itemTime, atHostTime: futureTime)
|
|
|
|
|
|
if let additionalPlayer = self.additionalPlayer {
|
|
if rate > 0.0 {
|
|
if let videoDelay = self.videoDelay(for: itemTime) {
|
|
self.videoDelayTimer = SwiftSignalKit.Timer(timeout: videoDelay, repeat: false, completion: { [weak self] in
|
|
self?.additionalPlayer?.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
self?.additionalPlayer?.play()
|
|
}, queue: Queue.mainQueue())
|
|
self.videoDelayTimer?.start()
|
|
} else {
|
|
if additionalPlayer.status == .readyToPlay {
|
|
additionalPlayer.setRate(rate, time: videoTime, atHostTime: futureTime)
|
|
additionalPlayer.play()
|
|
} else {
|
|
additionalPlayer.seek(to: videoTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
additionalPlayer.play()
|
|
}
|
|
}
|
|
} else {
|
|
additionalPlayer.pause()
|
|
}
|
|
}
|
|
|
|
if let audioPlayer = self.audioPlayer {
|
|
if rate > 0.0 {
|
|
if let audioDelay = self.audioDelay(for: itemTime) {
|
|
self.audioDelayTimer = SwiftSignalKit.Timer(timeout: audioDelay, repeat: false, completion: { [weak self] in
|
|
self?.audioPlayer?.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
self?.audioPlayer?.play()
|
|
}, queue: Queue.mainQueue())
|
|
self.audioDelayTimer?.start()
|
|
} else {
|
|
if audioPlayer.status == .readyToPlay {
|
|
audioPlayer.setRate(rate, time: audioTime, atHostTime: futureTime)
|
|
audioPlayer.play()
|
|
} else {
|
|
audioPlayer.seek(to: audioTime, toleranceBefore: .zero, toleranceAfter: .zero)
|
|
audioPlayer.play()
|
|
}
|
|
}
|
|
} else {
|
|
audioPlayer.pause()
|
|
}
|
|
}
|
|
}
|
|
|
|
if rate > 0.0 {
|
|
self.onPlaybackAction(.play)
|
|
} else {
|
|
self.onPlaybackAction(.pause)
|
|
|
|
self.videoDelayTimer?.invalidate()
|
|
self.videoDelayTimer = nil
|
|
|
|
self.audioDelayTimer?.invalidate()
|
|
self.audioDelayTimer = nil
|
|
}
|
|
}
|
|
|
|
public func invalidate() {
|
|
self.player?.pause()
|
|
self.additionalPlayer?.pause()
|
|
self.audioPlayer?.pause()
|
|
self.onPlaybackAction(.pause)
|
|
self.renderer.textureSource?.invalidate()
|
|
|
|
self.audioDelayTimer?.invalidate()
|
|
self.audioDelayTimer = nil
|
|
|
|
self.videoDelayTimer?.invalidate()
|
|
self.videoDelayTimer = nil
|
|
}
|
|
|
|
private func updateVideoTimePosition() {
|
|
guard let (targetPosition, _) = self.targetTimePosition else {
|
|
return
|
|
}
|
|
self.updatingTimePosition = true
|
|
|
|
if self.player == nil, let audioPlayer = self.audioPlayer {
|
|
audioPlayer.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { [weak self] _ in
|
|
if let self {
|
|
if let (currentTargetPosition, _) = self.targetTimePosition, currentTargetPosition == targetPosition {
|
|
self.updatingTimePosition = false
|
|
self.targetTimePosition = nil
|
|
} else {
|
|
self.updateVideoTimePosition()
|
|
}
|
|
}
|
|
})
|
|
} else {
|
|
self.player?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { [weak self] _ in
|
|
if let self {
|
|
if let (currentTargetPosition, _) = self.targetTimePosition, currentTargetPosition == targetPosition {
|
|
self.updatingTimePosition = false
|
|
self.targetTimePosition = nil
|
|
} else {
|
|
self.updateVideoTimePosition()
|
|
}
|
|
}
|
|
})
|
|
|
|
if let _ = self.videoDelay(for: targetPosition) {
|
|
} else {
|
|
self.additionalPlayer?.seek(to: self.videoTime(for: targetPosition), toleranceBefore: .zero, toleranceAfter: .zero)
|
|
}
|
|
|
|
if let _ = self.audioDelay(for: targetPosition) {
|
|
} else {
|
|
self.audioPlayer?.seek(to: self.audioTime(for: targetPosition), toleranceBefore: .zero, toleranceAfter: .zero)
|
|
}
|
|
}
|
|
self.onPlaybackAction(.seek(targetPosition.seconds))
|
|
}
|
|
|
|
public func setVideoTrimRange(_ trimRange: Range<Double>, apply: Bool) {
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedVideoTrimRange(trimRange)
|
|
}
|
|
|
|
if apply {
|
|
self.player?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
|
|
// self.additionalPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: trimRange.upperBound, preferredTimescale: CMTimeScale(1000))
|
|
}
|
|
}
|
|
|
|
public func setAdditionalVideo(_ path: String?, positionChanges: [VideoPositionChange]) {
|
|
if self.values.additionalVideoPath == nil, let path {
|
|
let asset = AVURLAsset(url: URL(fileURLWithPath: path))
|
|
let playerItem = AVPlayerItem(asset: asset)
|
|
let player = AVPlayer(playerItem: playerItem)
|
|
if #available(iOS 15.0, *) {
|
|
player.sourceClock = clock
|
|
} else {
|
|
player.masterClock = clock
|
|
}
|
|
player.automaticallyWaitsToMinimizeStalling = false
|
|
self.additionalPlayer = player
|
|
self.additionalPlayerPromise.set(.single(player))
|
|
|
|
(self.renderer.textureSource as? UniversalTextureSource)?.setAdditionalInput(.video(playerItem))
|
|
} else if path == nil {
|
|
self.additionalPlayer?.pause()
|
|
self.additionalPlayer = nil
|
|
self.additionalPlayerPromise.set(.single(nil))
|
|
(self.renderer.textureSource as? UniversalTextureSource)?.setAdditionalInput(nil)
|
|
}
|
|
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedAdditionalVideo(path: path, positionChanges: positionChanges)
|
|
}
|
|
}
|
|
|
|
public func setAdditionalVideoPosition(_ position: CGPoint, scale: CGFloat, rotation: CGFloat) {
|
|
self.updateValues(mode: .forceRendering) { values in
|
|
return values.withUpdatedAdditionalVideo(position: position, scale: scale, rotation: rotation)
|
|
}
|
|
}
|
|
|
|
public func setAdditionalVideoTrimRange(_ trimRange: Range<Double>, apply: Bool) {
|
|
self.updateValues(mode: .generic) { values in
|
|
return values.withUpdatedAdditionalVideoTrimRange(trimRange)
|
|
}
|
|
|
|
if apply {
|
|
self.updateAdditionalVideoPlaybackRange()
|
|
}
|
|
}
|
|
|
|
public func setAdditionalVideoOffset(_ offset: Double?, apply: Bool) {
|
|
self.updateValues(mode: .generic) { values in
|
|
return values.withUpdatedAdditionalVideoOffset(offset)
|
|
}
|
|
|
|
if apply {
|
|
self.updateAdditionalVideoPlaybackRange()
|
|
}
|
|
}
|
|
|
|
public func setDrawingAndEntities(data: Data?, image: UIImage?, entities: [CodableDrawingEntity]) {
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedDrawingAndEntities(drawing: image, entities: entities)
|
|
}
|
|
}
|
|
|
|
public func setGradientColors(_ gradientColors: [UIColor]) {
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedGradientColors(gradientColors: gradientColors)
|
|
}
|
|
}
|
|
|
|
public func setAudioTrack(_ audioTrack: MediaAudioTrack?, trimRange: Range<Double>? = nil, offset: Double? = nil) {
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values
|
|
.withUpdatedAudioTrack(audioTrack)
|
|
.withUpdatedAudioTrackSamples(nil)
|
|
.withUpdatedAudioTrackTrimRange(trimRange)
|
|
.withUpdatedAudioTrackVolume(nil)
|
|
.withUpdatedAudioTrackOffset(offset)
|
|
}
|
|
|
|
if let audioPlayer = self.audioPlayer {
|
|
audioPlayer.pause()
|
|
|
|
if self.sourceIsVideo {
|
|
self.audioDelayTimer?.invalidate()
|
|
self.audioDelayTimer = nil
|
|
} else {
|
|
self.invalidateTimeObservers()
|
|
self.playerPromise.set(.single(nil))
|
|
}
|
|
self.audioPlayer = nil
|
|
}
|
|
|
|
self.setupAudioPlayback()
|
|
self.updateAudioPlaybackRange()
|
|
}
|
|
|
|
private func setupAudioPlayback() {
|
|
if let audioTrack = self.values.audioTrack {
|
|
let audioPath = fullDraftPath(peerId: self.context.account.peerId, path: audioTrack.path)
|
|
let audioAsset = AVURLAsset(url: URL(fileURLWithPath: audioPath))
|
|
let audioPlayer = AVPlayer(playerItem: AVPlayerItem(asset: audioAsset))
|
|
audioPlayer.automaticallyWaitsToMinimizeStalling = false
|
|
self.audioPlayer = audioPlayer
|
|
self.maybeGenerateAudioSamples(asset: audioAsset)
|
|
|
|
if let volume = self.values.audioTrackVolume {
|
|
self.audioPlayer?.volume = Float(volume)
|
|
}
|
|
|
|
self.setupTimeObservers()
|
|
|
|
if !self.sourceIsVideo {
|
|
self.playerPromise.set(.single(audioPlayer))
|
|
}
|
|
}
|
|
}
|
|
|
|
public func setAudioTrackTrimRange(_ trimRange: Range<Double>?, apply: Bool) {
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedAudioTrackTrimRange(trimRange)
|
|
}
|
|
|
|
if apply, let _ = trimRange {
|
|
self.updateAudioPlaybackRange()
|
|
}
|
|
}
|
|
|
|
public func setAudioTrackOffset(_ offset: Double?, apply: Bool) {
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedAudioTrackOffset(offset)
|
|
}
|
|
|
|
if apply {
|
|
self.updateAudioPlaybackRange()
|
|
}
|
|
}
|
|
|
|
private func updateAdditionalVideoPlaybackRange() {
|
|
if let upperBound = self.values.additionalVideoTrimRange?.upperBound {
|
|
let offset = max(0.0, self.values.additionalVideoOffset ?? 0.0)
|
|
self.additionalPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + upperBound, preferredTimescale: CMTimeScale(1000))
|
|
} else {
|
|
self.additionalPlayer?.currentItem?.forwardPlaybackEndTime = .invalid
|
|
}
|
|
}
|
|
|
|
private func updateAudioPlaybackRange() {
|
|
if let upperBound = self.values.audioTrackTrimRange?.upperBound {
|
|
let offset = max(0.0, self.values.audioTrackOffset ?? 0.0)
|
|
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = CMTime(seconds: offset + upperBound, preferredTimescale: CMTimeScale(1000))
|
|
} else {
|
|
self.audioPlayer?.currentItem?.forwardPlaybackEndTime = .invalid
|
|
}
|
|
}
|
|
|
|
public func setAudioTrackVolume(_ volume: CGFloat?) {
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedAudioTrackVolume(volume)
|
|
}
|
|
|
|
self.audioPlayer?.volume = Float(volume ?? 1.0)
|
|
}
|
|
|
|
private var previousUpdateTime: Double?
|
|
private var scheduledUpdate = false
|
|
private func updateRenderChain() {
|
|
self.renderer.skipEditingPasses = self.previewUnedited
|
|
self.renderChain.update(values: self.values)
|
|
self.renderer.videoFinishPass.update(values: self.values)
|
|
|
|
if let player = self.player, player.rate > 0.0 && !self.forceRendering {
|
|
} else {
|
|
let currentTime = CACurrentMediaTime()
|
|
if !self.scheduledUpdate {
|
|
let delay = self.forceRendering ? 0.0 : 0.03333
|
|
if let previousUpdateTime = self.previousUpdateTime, delay > 0.0, currentTime - previousUpdateTime < delay {
|
|
self.scheduledUpdate = true
|
|
Queue.mainQueue().after(delay - (currentTime - previousUpdateTime)) {
|
|
self.scheduledUpdate = false
|
|
self.previousUpdateTime = CACurrentMediaTime()
|
|
self.renderer.willRenderFrame()
|
|
self.renderer.renderFrame()
|
|
}
|
|
} else {
|
|
self.previousUpdateTime = currentTime
|
|
self.renderer.willRenderFrame()
|
|
self.renderer.renderFrame()
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
public func requestRenderFrame() {
|
|
self.renderer.willRenderFrame()
|
|
self.renderer.renderFrame()
|
|
}
|
|
|
|
private func maybeGeneratePersonSegmentation(_ image: UIImage?) {
|
|
if #available(iOS 15.0, *), let cgImage = image?.cgImage {
|
|
let faceRequest = VNDetectFaceRectanglesRequest { [weak self] request, _ in
|
|
guard let _ = request.results?.first as? VNFaceObservation else { return }
|
|
|
|
let personRequest = VNGeneratePersonSegmentationRequest(completionHandler: { [weak self] request, error in
|
|
if let self, let result = (request as? VNGeneratePersonSegmentationRequest)?.results?.first {
|
|
Queue.mainQueue().async {
|
|
self.renderChain.blurPass.maskTexture = pixelBufferToMTLTexture(pixelBuffer: result.pixelBuffer, textureCache: self.textureCache)
|
|
}
|
|
}
|
|
})
|
|
personRequest.qualityLevel = .accurate
|
|
personRequest.outputPixelFormat = kCVPixelFormatType_OneComponent8
|
|
|
|
let handler = VNImageRequestHandler(cgImage: cgImage, options: [:])
|
|
do {
|
|
try handler.perform([personRequest])
|
|
} catch {
|
|
print(error)
|
|
}
|
|
}
|
|
let handler = VNImageRequestHandler(cgImage: cgImage, options: [:])
|
|
do {
|
|
try handler.perform([faceRequest])
|
|
} catch {
|
|
print(error)
|
|
}
|
|
}
|
|
}
|
|
|
|
private func maybeGenerateAudioSamples(asset: AVAsset) {
|
|
Queue.concurrentDefaultQueue().async {
|
|
guard let audioTrack = asset.tracks(withMediaType: .audio).first else {
|
|
return
|
|
}
|
|
|
|
do {
|
|
let assetReader = try AVAssetReader(asset: asset)
|
|
|
|
let settings: [String: Any] = [
|
|
AVFormatIDKey: kAudioFormatLinearPCM,
|
|
AVLinearPCMBitDepthKey: 32,
|
|
AVLinearPCMIsFloatKey: true,
|
|
AVLinearPCMIsBigEndianKey: false,
|
|
AVLinearPCMIsNonInterleaved: false
|
|
]
|
|
|
|
let assetReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: settings)
|
|
|
|
assetReader.add(assetReaderOutput)
|
|
|
|
assetReader.startReading()
|
|
|
|
var samplesData = Data()
|
|
var peak: Int32 = 0
|
|
|
|
while let sampleBuffer = assetReaderOutput.copyNextSampleBuffer() {
|
|
if let dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer) {
|
|
let length = CMBlockBufferGetDataLength(dataBuffer)
|
|
let bytes = UnsafeMutablePointer<Int32>.allocate(capacity: length)
|
|
CMBlockBufferCopyDataBytes(dataBuffer, atOffset: 0, dataLength: length, destination: bytes)
|
|
|
|
let samples = Array(UnsafeBufferPointer(start: bytes, count: length / MemoryLayout<Int32>.size))
|
|
if var maxSample = samples.max() {
|
|
if maxSample > peak {
|
|
peak = maxSample
|
|
}
|
|
samplesData.append(Data(bytesNoCopy: &maxSample, count: 4, deallocator: .none))
|
|
}
|
|
|
|
bytes.deallocate()
|
|
}
|
|
}
|
|
Queue.mainQueue().async {
|
|
self.updateValues(mode: .skipRendering) { values in
|
|
return values.withUpdatedAudioTrackSamples(MediaAudioTrackSamples(samples: samplesData, peak: peak))
|
|
}
|
|
}
|
|
} catch {
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
private func videoFrames(asset: AVAsset, count: Int) -> Signal<([UIImage], Double), NoError> {
|
|
func blurredImage(_ image: UIImage) -> UIImage? {
|
|
guard let image = image.cgImage else {
|
|
return nil
|
|
}
|
|
|
|
let thumbnailSize = CGSize(width: image.width, height: image.height)
|
|
let thumbnailContextSize = thumbnailSize.aspectFilled(CGSize(width: 20.0, height: 20.0))
|
|
if let thumbnailContext = DrawingContext(size: thumbnailContextSize, scale: 1.0) {
|
|
thumbnailContext.withFlippedContext { c in
|
|
c.interpolationQuality = .none
|
|
c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContextSize))
|
|
}
|
|
imageFastBlur(Int32(thumbnailContextSize.width), Int32(thumbnailContextSize.height), Int32(thumbnailContext.bytesPerRow), thumbnailContext.bytes)
|
|
|
|
let thumbnailContext2Size = thumbnailSize.aspectFitted(CGSize(width: 100.0, height: 100.0))
|
|
if let thumbnailContext2 = DrawingContext(size: thumbnailContext2Size, scale: 1.0) {
|
|
thumbnailContext2.withFlippedContext { c in
|
|
c.interpolationQuality = .none
|
|
if let image = thumbnailContext.generateImage()?.cgImage {
|
|
c.draw(image, in: CGRect(origin: CGPoint(), size: thumbnailContext2Size))
|
|
}
|
|
}
|
|
imageFastBlur(Int32(thumbnailContext2Size.width), Int32(thumbnailContext2Size.height), Int32(thumbnailContext2.bytesPerRow), thumbnailContext2.bytes)
|
|
return thumbnailContext2.generateImage()
|
|
}
|
|
}
|
|
return nil
|
|
}
|
|
|
|
guard count > 0 else {
|
|
return .complete()
|
|
}
|
|
let scale = UIScreen.main.scale
|
|
let imageGenerator = AVAssetImageGenerator(asset: asset)
|
|
imageGenerator.maximumSize = CGSize(width: 48.0 * scale, height: 36.0 * scale)
|
|
imageGenerator.appliesPreferredTrackTransform = true
|
|
imageGenerator.requestedTimeToleranceBefore = .zero
|
|
imageGenerator.requestedTimeToleranceAfter = .zero
|
|
|
|
var firstFrame: UIImage
|
|
if let cgImage = try? imageGenerator.copyCGImage(at: .zero, actualTime: nil) {
|
|
firstFrame = UIImage(cgImage: cgImage)
|
|
if let blurred = blurredImage(firstFrame) {
|
|
firstFrame = blurred
|
|
}
|
|
} else {
|
|
firstFrame = generateSingleColorImage(size: CGSize(width: 24.0, height: 36.0), color: .black)!
|
|
}
|
|
return Signal { subscriber in
|
|
subscriber.putNext((Array(repeating: firstFrame, count: count), CACurrentMediaTime()))
|
|
|
|
var timestamps: [NSValue] = []
|
|
let duration = asset.duration.seconds
|
|
let interval = duration / Double(count)
|
|
for i in 0 ..< count {
|
|
timestamps.append(NSValue(time: CMTime(seconds: Double(i) * interval, preferredTimescale: CMTimeScale(1000))))
|
|
}
|
|
|
|
var updatedFrames: [UIImage] = []
|
|
imageGenerator.generateCGImagesAsynchronously(forTimes: timestamps) { _, image, _, _, _ in
|
|
if let image {
|
|
updatedFrames.append(UIImage(cgImage: image))
|
|
if updatedFrames.count == count {
|
|
subscriber.putNext((updatedFrames, CACurrentMediaTime()))
|
|
subscriber.putCompletion()
|
|
} else {
|
|
var tempFrames = updatedFrames
|
|
for _ in 0 ..< count - updatedFrames.count {
|
|
tempFrames.append(firstFrame)
|
|
}
|
|
subscriber.putNext((tempFrames, CACurrentMediaTime()))
|
|
}
|
|
}
|
|
}
|
|
|
|
return ActionDisposable {
|
|
imageGenerator.cancelAllCGImageGeneration()
|
|
}
|
|
}
|
|
}
|