Various improvements

This commit is contained in:
Ilya Laktyushin 2023-08-28 17:22:49 +04:00
parent 7480c3c4f6
commit a280d5841d
6 changed files with 481 additions and 204 deletions

View File

@ -2360,29 +2360,29 @@ final class StoryStickersContentView: UIView, EmojiCustomContentView {
})
)
),
// AnyComponentWithIdentity(
// id: "audio",
// component: AnyComponent(
// CameraButton(
// content: AnyComponentWithIdentity(
// id: "audio",
// component: AnyComponent(
// InteractiveStickerButtonContent(
// theme: theme,
// title: "AUDIO",
// iconName: "Media Editor/Audio",
// useOpaqueTheme: useOpaqueTheme,
// tintContainerView: self.tintContainerView
// )
// )
// ),
// action: { [weak self] in
// if let self {
// self.audioAction()
// }
// })
// )
// ),
AnyComponentWithIdentity(
id: "audio",
component: AnyComponent(
CameraButton(
content: AnyComponentWithIdentity(
id: "audio",
component: AnyComponent(
InteractiveStickerButtonContent(
theme: theme,
title: "AUDIO",
iconName: "Media Editor/Audio",
useOpaqueTheme: useOpaqueTheme,
tintContainerView: self.tintContainerView
)
)
),
action: { [weak self] in
if let self {
self.audioAction()
}
})
)
),
AnyComponentWithIdentity(
id: "reaction",
component: AnyComponent(

View File

@ -21,6 +21,7 @@ public struct MediaEditorPlayerState {
public let framesCount: Int
public let framesUpdateTimestamp: Double
public let hasAudio: Bool
public let isAudioPlayerOnly: Bool
}
public final class MediaEditor {
@ -111,7 +112,7 @@ public final class MediaEditor {
}
public var resultIsVideo: Bool {
return self.player != nil || self.values.entities.contains(where: { $0.entity.isAnimated })
return self.player != nil || self.audioPlayer != nil || self.values.entities.contains(where: { $0.entity.isAnimated })
}
public var resultImage: UIImage? {
@ -123,16 +124,16 @@ public final class MediaEditor {
}
private let playerPromise = Promise<AVPlayer?>()
private var playerPlaybackState: (Double, Double, Bool, Bool) = (0.0, 0.0, false, false) {
private var playerPlaybackState: (Double, Double, Bool, Bool, Bool) = (0.0, 0.0, false, false, false) {
didSet {
self.playerPlaybackStatePromise.set(.single(self.playerPlaybackState))
}
}
private let playerPlaybackStatePromise = Promise<(Double, Double, Bool, Bool)>((0.0, 0.0, false, false))
private let playerPlaybackStatePromise = Promise<(Double, Double, Bool, Bool, Bool)>((0.0, 0.0, false, false, false))
public var position: Signal<Double, NoError> {
return self.playerPlaybackStatePromise.get()
|> map { _, position, _, _ -> Double in
|> map { _, position, _, _, _ -> Double in
return position
}
}
@ -153,22 +154,44 @@ public final class MediaEditor {
public func playerState(framesCount: Int) -> Signal<MediaEditorPlayerState?, NoError> {
return self.playerPromise.get()
|> mapToSignal { [weak self] player in
if let self, let asset = player?.currentItem?.asset {
return combineLatest(self.valuesPromise.get(), self.playerPlaybackStatePromise.get(), self.videoFrames(asset: asset, count: framesCount))
|> map { values, durationAndPosition, framesAndUpdateTimestamp in
let (duration, position, isPlaying, hasAudio) = durationAndPosition
let (frames, framesUpdateTimestamp) = framesAndUpdateTimestamp
return MediaEditorPlayerState(
generationTimestamp: CACurrentMediaTime(),
duration: duration,
timeRange: values.videoTrimRange,
position: position,
isPlaying: isPlaying,
frames: frames,
framesCount: framesCount,
framesUpdateTimestamp: framesUpdateTimestamp,
hasAudio: hasAudio
)
if let self, player != nil {
if player === self.player, let asset = player?.currentItem?.asset {
return combineLatest(self.valuesPromise.get(), self.playerPlaybackStatePromise.get(), self.videoFrames(asset: asset, count: framesCount))
|> map { values, durationAndPosition, framesAndUpdateTimestamp in
let (duration, position, isPlaying, hasAudio, isAudioPlayerOnly) = durationAndPosition
let (frames, framesUpdateTimestamp) = framesAndUpdateTimestamp
return MediaEditorPlayerState(
generationTimestamp: CACurrentMediaTime(),
duration: duration,
timeRange: values.videoTrimRange,
position: position,
isPlaying: isPlaying,
frames: frames,
framesCount: framesCount,
framesUpdateTimestamp: framesUpdateTimestamp,
hasAudio: hasAudio,
isAudioPlayerOnly: isAudioPlayerOnly
)
}
} else if player === self.audioPlayer {
return combineLatest(self.valuesPromise.get(), self.playerPlaybackStatePromise.get())
|> map { values, durationAndPosition in
let (duration, position, isPlaying, _, _) = durationAndPosition
return MediaEditorPlayerState(
generationTimestamp: CACurrentMediaTime(),
duration: duration,
timeRange: values.audioTrackTrimRange,
position: position,
isPlaying: isPlaying,
frames: [],
framesCount: 0,
framesUpdateTimestamp: 0,
hasAudio: false,
isAudioPlayerOnly: true
)
}
} else {
return .single(nil)
}
} else {
return .single(nil)
@ -287,6 +310,8 @@ public final class MediaEditor {
toolValues: [:],
audioTrack: nil,
audioTrackTrimRange: nil,
audioTrackStart: nil,
audioTrackVolume: nil,
audioTrackSamples: nil
)
}
@ -304,10 +329,10 @@ public final class MediaEditor {
}
if case let .asset(asset) = subject {
self.playerPlaybackState = (asset.duration, 0.0, false, false)
self.playerPlaybackState = (asset.duration, 0.0, false, false, false)
self.playerPlaybackStatePromise.set(.single(self.playerPlaybackState))
} else if case let .video(_, _, _, _, _, duration) = subject {
self.playerPlaybackState = (duration, 0.0, false, true)
self.playerPlaybackState = (duration, 0.0, false, true, false)
self.playerPlaybackStatePromise.set(.single(self.playerPlaybackState))
}
}
@ -524,6 +549,13 @@ public final class MediaEditor {
// self.maybeGeneratePersonSegmentation(image)
}
if let audioTrack = self.values.audioTrack {
self.setAudioTrack(audioTrack)
self.setAudioTrackVolume(self.values.audioTrackVolume)
self.setAudioTrackTrimRange(self.values.audioTrackTrimRange, apply: true)
self.setAudioTrackStart(self.values.audioTrackStart)
}
if let player {
player.isMuted = self.values.videoIsMuted
if let trimRange = self.values.videoTrimRange {
@ -535,40 +567,12 @@ public final class MediaEditor {
self.initialSeekPosition = nil
player.seek(to: CMTime(seconds: initialSeekPosition, preferredTimescale: CMTimeScale(1000)), toleranceBefore: .zero, toleranceAfter: .zero)
}
self.timeObserver = player.addPeriodicTimeObserver(forInterval: CMTimeMake(value: 1, timescale: 10), queue: DispatchQueue.main) { [weak self] time in
guard let self, let duration = player.currentItem?.duration.seconds else {
return
}
var hasAudio = false
if let audioTracks = player.currentItem?.asset.tracks(withMediaType: .audio) {
hasAudio = !audioTracks.isEmpty
}
if time.seconds > 20000 {
} else {
self.playerPlaybackState = (duration, time.seconds, player.rate > 0.0, hasAudio)
}
}
self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: nil, using: { [weak self] notification in
if let self {
let start = self.values.videoTrimRange?.lowerBound ?? 0.0
self.player?.seek(to: CMTime(seconds: start, preferredTimescale: CMTimeScale(1000)))
self.additionalPlayer?.seek(to: CMTime(seconds: start, preferredTimescale: CMTimeScale(1000)))
self.audioPlayer?.seek(to: CMTime(seconds: start, preferredTimescale: CMTimeScale(1000)))
self.onPlaybackAction(.seek(start))
self.player?.play()
self.additionalPlayer?.play()
self.audioPlayer?.play()
Queue.mainQueue().justDispatch {
self.onPlaybackAction(.play)
}
}
})
self.setupTimeObservers()
Queue.mainQueue().justDispatch {
player.playImmediately(atRate: 1.0)
additionalPlayer?.playImmediately(atRate: 1.0)
self.audioPlayer?.playImmediately(atRate: 1.0)
self.onPlaybackAction(.play)
self.volumeFade = self.player?.fadeVolume(from: 0.0, to: 1.0, duration: 0.4)
}
@ -577,6 +581,62 @@ public final class MediaEditor {
})
}
private func setupTimeObservers() {
var observedPlayer = self.player
var isAudioPlayerOnly = false
if observedPlayer == nil {
observedPlayer = self.audioPlayer
if observedPlayer != nil {
isAudioPlayerOnly = true
}
}
guard let observedPlayer else {
return
}
if self.timeObserver == nil {
self.timeObserver = observedPlayer.addPeriodicTimeObserver(forInterval: CMTimeMake(value: 1, timescale: 10), queue: DispatchQueue.main) { [weak self, weak observedPlayer] time in
guard let self, let observedPlayer, let duration = observedPlayer.currentItem?.duration.seconds else {
return
}
var hasAudio = false
if let audioTracks = observedPlayer.currentItem?.asset.tracks(withMediaType: .audio) {
hasAudio = !audioTracks.isEmpty
}
if time.seconds > 20000 {
} else {
self.playerPlaybackState = (duration, time.seconds, observedPlayer.rate > 0.0, hasAudio, isAudioPlayerOnly)
}
}
}
if self.didPlayToEndTimeObserver == nil {
self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: observedPlayer.currentItem, queue: nil, using: { [weak self] notification in
if let self {
let start = self.values.videoTrimRange?.lowerBound ?? 0.0
let targetTime = CMTime(seconds: start, preferredTimescale: CMTimeScale(1000))
self.player?.seek(to: targetTime)
self.additionalPlayer?.seek(to: targetTime)
self.audioPlayer?.seek(to: self.audioTime(for: targetTime))
self.onPlaybackAction(.seek(start))
self.player?.play()
self.additionalPlayer?.play()
self.audioPlayer?.play()
Queue.mainQueue().justDispatch {
self.onPlaybackAction(.play)
}
}
})
}
}
private func setupDidPlayToEndObserver() {
}
public func attachPreviewView(_ previewView: MediaEditorPreviewView) {
self.previewView?.renderer = nil
@ -666,12 +726,12 @@ public final class MediaEditor {
private var targetTimePosition: (CMTime, Bool)?
private var updatingTimePosition = false
public func seek(_ position: Double, andPlay play: Bool) {
guard let player = self.player else {
if self.player == nil && self.audioPlayer == nil {
self.initialSeekPosition = position
return
}
if !play {
player.pause()
self.player?.pause()
self.additionalPlayer?.pause()
self.audioPlayer?.pause()
self.onPlaybackAction(.pause)
@ -684,7 +744,7 @@ public final class MediaEditor {
}
}
if play {
player.play()
self.player?.play()
self.additionalPlayer?.play()
self.audioPlayer?.play()
self.onPlaybackAction(.play)
@ -705,12 +765,21 @@ public final class MediaEditor {
completion()
}
})
self.additionalPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
self.audioPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
self.audioPlayer?.seek(to: self.audioTime(for: targetPosition), toleranceBefore: .zero, toleranceAfter: .zero)
}
private func audioTime(for time: CMTime) -> CMTime {
let time = time.seconds
let offsettedTime = time - (self.values.videoTrimRange?.lowerBound ?? 0.0) + (self.values.audioTrackTrimRange?.lowerBound ?? 0.0) - (self.values.audioTrackStart ?? 0.0)
return CMTime(seconds: offsettedTime, preferredTimescale: CMTimeScale(1000.0))
}
public var isPlaying: Bool {
return (self.player?.rate ?? 0.0) > 0.0
let effectivePlayer = self.player ?? self.audioPlayer
return (effectivePlayer?.rate ?? 0.0) > 0.0
}
public func togglePlayback() {
@ -736,11 +805,22 @@ public final class MediaEditor {
let cmVTime = CMTimeMakeWithSeconds(time, preferredTimescale: 1000000)
let futureTime = CMTimeAdd(cmHostTime, cmVTime)
let itemTime = self.player?.currentItem?.currentTime() ?? .invalid
self.player?.setRate(rate, time: itemTime, atHostTime: futureTime)
self.additionalPlayer?.setRate(rate, time: itemTime, atHostTime: futureTime)
self.audioPlayer?.setRate(rate, time: itemTime, atHostTime: futureTime)
if self.player == nil, let audioPlayer = self.audioPlayer {
let itemTime = audioPlayer.currentItem?.currentTime() ?? .invalid
audioPlayer.setRate(rate, time: itemTime, atHostTime: futureTime)
} else {
let itemTime = self.player?.currentItem?.currentTime() ?? .invalid
let audioTime: CMTime
if itemTime == .invalid {
audioTime = .invalid
} else {
audioTime = self.audioTime(for: itemTime)
}
self.player?.setRate(rate, time: itemTime, atHostTime: futureTime)
self.additionalPlayer?.setRate(rate, time: itemTime, atHostTime: futureTime)
self.audioPlayer?.setRate(rate, time: audioTime, atHostTime: futureTime)
}
if rate > 0.0 {
self.onPlaybackAction(.play)
@ -762,18 +842,33 @@ public final class MediaEditor {
return
}
self.updatingTimePosition = true
self.player?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { [weak self] _ in
if let self {
if let (currentTargetPosition, _) = self.targetTimePosition, currentTargetPosition == targetPosition {
self.updatingTimePosition = false
self.targetTimePosition = nil
} else {
self.updateVideoTimePosition()
if self.player == nil, let audioPlayer = self.audioPlayer {
audioPlayer.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { [weak self] _ in
if let self {
if let (currentTargetPosition, _) = self.targetTimePosition, currentTargetPosition == targetPosition {
self.updatingTimePosition = false
self.targetTimePosition = nil
} else {
self.updateVideoTimePosition()
}
}
}
})
self.additionalPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
self.audioPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
})
} else {
self.player?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero, completionHandler: { [weak self] _ in
if let self {
if let (currentTargetPosition, _) = self.targetTimePosition, currentTargetPosition == targetPosition {
self.updatingTimePosition = false
self.targetTimePosition = nil
} else {
self.updateVideoTimePosition()
}
}
})
self.additionalPlayer?.seek(to: targetPosition, toleranceBefore: .zero, toleranceAfter: .zero)
self.audioPlayer?.seek(to: self.audioTime(for: targetPosition), toleranceBefore: .zero, toleranceAfter: .zero)
}
self.onPlaybackAction(.seek(targetPosition.seconds))
}
@ -814,7 +909,7 @@ public final class MediaEditor {
public func setAudioTrack(_ audioTrack: MediaAudioTrack?) {
self.updateValues(mode: .skipRendering) { values in
return values.withUpdatedAudioTrack(audioTrack).withUpdatedAudioTrackSamples(nil).withUpdatedAudioTrackTrimRange(nil)
return values.withUpdatedAudioTrack(audioTrack).withUpdatedAudioTrackSamples(nil).withUpdatedAudioTrackTrimRange(nil).withUpdatedAudioTrackVolume(nil).withUpdatedAudioTrackStart(nil)
}
if let audioTrack {
@ -824,9 +919,19 @@ public final class MediaEditor {
player.automaticallyWaitsToMinimizeStalling = false
self.audioPlayer = player
self.maybeGenerateAudioSamples(asset: audioAsset)
self.setupTimeObservers()
if !self.sourceIsVideo {
self.playerPromise.set(.single(player))
}
} else if let audioPlayer = self.audioPlayer {
audioPlayer.pause()
self.audioPlayer = nil
if !self.sourceIsVideo {
self.playerPromise.set(.single(nil))
}
}
}
@ -840,6 +945,20 @@ public final class MediaEditor {
}
}
public func setAudioTrackStart(_ start: Double?) {
self.updateValues(mode: .skipRendering) { values in
return values.withUpdatedAudioTrackStart(start)
}
}
public func setAudioTrackVolume(_ volume: CGFloat?) {
self.updateValues(mode: .skipRendering) { values in
return values.withUpdatedAudioTrackVolume(volume)
}
self.audioPlayer?.volume = Float(volume ?? 1.0)
}
private var previousUpdateTime: Double?
private var scheduledUpdate = false
private func updateRenderChain() {

View File

@ -60,20 +60,24 @@ public struct MediaAudioTrack: Codable, Equatable {
case path
case artist
case title
case duration
}
public let path: String
public let artist: String?
public let title: String?
public let duration: Double
public init(
path: String,
artist: String?,
title: String?
title: String?,
duration: Double
) {
self.path = path
self.artist = artist
self.title = title
self.duration = duration
}
}
@ -220,6 +224,8 @@ public final class MediaEditorValues: Codable, Equatable {
case audioTrack
case audioTrackTrimRange
case audioTrackStart
case audioTrackVolume
}
public let originalDimensions: PixelDimensions
@ -248,6 +254,8 @@ public final class MediaEditorValues: Codable, Equatable {
public let audioTrack: MediaAudioTrack?
public let audioTrackTrimRange: Range<Double>?
public let audioTrackStart: Double?
public let audioTrackVolume: CGFloat?
public let audioTrackSamples: MediaAudioTrackSamples?
init(
@ -272,6 +280,8 @@ public final class MediaEditorValues: Codable, Equatable {
toolValues: [EditorToolKey: Any],
audioTrack: MediaAudioTrack?,
audioTrackTrimRange: Range<Double>?,
audioTrackStart: Double?,
audioTrackVolume: CGFloat?,
audioTrackSamples: MediaAudioTrackSamples?
) {
self.originalDimensions = originalDimensions
@ -295,6 +305,8 @@ public final class MediaEditorValues: Codable, Equatable {
self.toolValues = toolValues
self.audioTrack = audioTrack
self.audioTrackTrimRange = audioTrackTrimRange
self.audioTrackStart = audioTrackStart
self.audioTrackVolume = audioTrackVolume
self.audioTrackSamples = audioTrackSamples
}
@ -346,6 +358,8 @@ public final class MediaEditorValues: Codable, Equatable {
self.audioTrack = try container.decodeIfPresent(MediaAudioTrack.self, forKey: .audioTrack)
self.audioTrackTrimRange = try container.decodeIfPresent(Range<Double>.self, forKey: .audioTrackTrimRange)
self.audioTrackStart = try container.decodeIfPresent(Double.self, forKey: .audioTrackStart)
self.audioTrackVolume = try container.decodeIfPresent(CGFloat.self, forKey: .audioTrackVolume)
self.audioTrackSamples = nil
}
@ -393,63 +407,73 @@ public final class MediaEditorValues: Codable, Equatable {
try container.encodeIfPresent(self.audioTrack, forKey: .audioTrack)
try container.encodeIfPresent(self.audioTrackTrimRange, forKey: .audioTrackTrimRange)
try container.encodeIfPresent(self.audioTrackStart, forKey: .audioTrackStart)
try container.encodeIfPresent(self.audioTrackVolume, forKey: .audioTrackVolume)
}
public func makeCopy() -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: offset, cropSize: self.cropSize, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: offset, cropSize: self.cropSize, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedGradientColors(gradientColors: [UIColor]) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedVideoIsMuted(_ videoIsMuted: Bool) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedVideoIsFullHd(_ videoIsFullHd: Bool) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedVideoIsMirrored(_ videoIsMirrored: Bool) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedAdditionalVideo(path: String, positionChanges: [VideoPositionChange]) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: path, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: path, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: positionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedAdditionalVideo(position: CGPoint, scale: CGFloat, rotation: CGFloat) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: position, additionalVideoScale: scale, additionalVideoRotation: rotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedVideoTrimRange(_ videoTrimRange: Range<Double>) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedDrawingAndEntities(drawing: UIImage?, entities: [CodableDrawingEntity]) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: drawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: drawing, entities: entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedToolValues(_ toolValues: [EditorToolKey: Any]) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedAudioTrack(_ audioTrack: MediaAudioTrack?) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedAudioTrackTrimRange(_ audioTrackTrimRange: Range<Double>?) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackSamples: self.audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedAudioTrackStart(_ audioTrackStart: Double?) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedAudioTrackVolume(_ audioTrackVolume: CGFloat?) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: audioTrackVolume, audioTrackSamples: self.audioTrackSamples)
}
func withUpdatedAudioTrackSamples(_ audioTrackSamples: MediaAudioTrackSamples?) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackSamples: audioTrackSamples)
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: videoTrimRange, videoIsMuted: self.videoIsMuted, videoIsFullHd: self.videoIsFullHd, videoIsMirrored: self.videoIsMirrored, additionalVideoPath: self.additionalVideoPath, additionalVideoPosition: self.additionalVideoPosition, additionalVideoScale: self.additionalVideoScale, additionalVideoRotation: self.additionalVideoRotation, additionalVideoPositionChanges: self.additionalVideoPositionChanges, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues, audioTrack: self.audioTrack, audioTrackTrimRange: self.audioTrackTrimRange, audioTrackStart: self.audioTrackStart, audioTrackVolume: self.audioTrackVolume, audioTrackSamples: audioTrackSamples)
}
public var resultDimensions: PixelDimensions {

View File

@ -397,7 +397,7 @@ public final class MediaEditorVideoExport {
if let audioTrackRange = self.configuration.audioTimeRange {
musicRange = audioTrackRange
}
try? musicTrack.insertTimeRange(musicRange, of: musicAssetTrack, at: .zero)
try? musicTrack.insertTimeRange(musicRange, of: musicAssetTrack, at: CMTime(seconds: self.configuration.values.audioTrackStart ?? 0.0, preferredTimescale: CMTimeScale(NSEC_PER_SEC)))
inputAsset = mixComposition
}

View File

@ -229,6 +229,7 @@ final class MediaEditorScreenComponent: Component {
}
var muteDidChange = false
var playbackDidChange = false
}
func makeState() -> State {
@ -921,12 +922,18 @@ final class MediaEditorScreenComponent: Component {
let previousAudioData = self.appliedAudioData
var audioData: VideoScrubberComponent.AudioData?
if let audioTrack = mediaEditor?.values.audioTrack {
let trimRange = mediaEditor?.values.audioTrackTrimRange
let offset = mediaEditor?.values.audioTrackStart
let audioSamples = mediaEditor?.values.audioTrackSamples
audioData = VideoScrubberComponent.AudioData(
artist: audioTrack.artist,
title: audioTrack.title,
samples: audioSamples?.samples,
peak: audioSamples?.peak ?? 0
peak: audioSamples?.peak ?? 0,
duration: audioTrack.duration,
start: trimRange?.lowerBound,
end: trimRange?.upperBound,
offset: offset ?? 0.0
)
}
self.appliedAudioData = audioData
@ -1272,16 +1279,30 @@ final class MediaEditorScreenComponent: Component {
if (audioData == nil) != (previousAudioData == nil) {
bottomControlsTransition = .easeInOut(duration: 0.25)
}
let minDuration: Double
let maxDuration: Double
if let mediaEditor, !mediaEditor.sourceIsVideo {
minDuration = 5.0
maxDuration = 15.0
} else {
minDuration = 1.0
maxDuration = storyMaxVideoDuration
}
let isAudioOnly = mediaEditor?.sourceIsVideo == false
let scrubberSize = self.scrubber.update(
transition: transition,
component: AnyComponent(VideoScrubberComponent(
context: component.context,
generationTimestamp: playerState.generationTimestamp,
audioOnly: isAudioOnly,
duration: playerState.duration,
startPosition: playerState.timeRange?.lowerBound ?? 0.0,
endPosition: playerState.timeRange?.upperBound ?? min(playerState.duration, storyMaxVideoDuration),
position: playerState.position,
maxDuration: storyMaxVideoDuration,
minDuration: minDuration,
maxDuration: maxDuration,
isPlaying: playerState.isPlaying,
frames: playerState.frames,
framesUpdateTimestamp: playerState.framesUpdateTimestamp,
@ -1304,8 +1325,8 @@ final class MediaEditorScreenComponent: Component {
audioTrimUpdated: { [weak mediaEditor] start, end, _, done in
if let mediaEditor {
mediaEditor.setAudioTrackTrimRange(start..<end, apply: done)
if done {
if done && isAudioOnly {
mediaEditor.seek(start, andPlay: true)
}
}
},
@ -1331,7 +1352,7 @@ final class MediaEditorScreenComponent: Component {
}
}
bottomControlsTransition.setFrame(view: scrubberView, frame: scrubberFrame)
if !self.animatingButtons {
if !self.animatingButtons && !isAudioOnly {
transition.setAlpha(view: scrubberView, alpha: component.isDisplayingTool || component.isDismissing || component.isInteractingWithEntities || isEditingCaption ? 0.0 : 1.0)
} else if animateIn {
scrubberView.layer.animatePosition(from: CGPoint(x: 0.0, y: 44.0), to: .zero, duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
@ -1340,7 +1361,16 @@ final class MediaEditorScreenComponent: Component {
}
}
} else {
if let scrubberView = self.scrubber.view, scrubberView.superview != nil {
scrubberView.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: 44.0), duration: 0.3, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
scrubberView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { _ in
scrubberView.removeFromSuperview()
Queue.mainQueue().after(0.1) {
scrubberView.layer.removeAllAnimations()
}
})
scrubberView.layer.animateScale(from: 1.0, to: 0.6, duration: 0.2)
}
}
let displayTopButtons = !(self.inputPanelExternalState.isEditing || isEditingTextEntity || component.isDisplayingTool)
@ -1485,28 +1515,30 @@ final class MediaEditorScreenComponent: Component {
}
topButtonOffsetX += 50.0
} else if let muteButtonView = self.muteButton.view, muteButtonView.superview != nil {
muteButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak muteButtonView] _ in
muteButtonView?.removeFromSuperview()
})
muteButtonView.layer.animateScale(from: 1.0, to: 0.01, duration: 0.2, removeOnCompletion: false)
} else {
if let muteButtonView = self.muteButton.view, muteButtonView.superview != nil {
muteButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak muteButtonView] _ in
muteButtonView?.removeFromSuperview()
})
muteButtonView.layer.animateScale(from: 1.0, to: 0.01, duration: 0.2, removeOnCompletion: false)
}
}
if let playerState = state.playerState {
let playbackContentComponent: AnyComponentWithIdentity<Empty>
if component.hasAppeared && !"".isEmpty {
if component.hasAppeared {
playbackContentComponent = AnyComponentWithIdentity(
id: "animatedIcon",
component: AnyComponent(
LottieAnimationComponent(
animation: LottieAnimationComponent.AnimationItem(
name: "anim_storymute",
mode: state.muteDidChange ? .animating(loop: false) : .still(position: .begin),
range: "".isEmpty ? (0.0, 0.5) : (0.5, 1.0)
name: "anim_storyplayback",
mode: state.playbackDidChange ? .animating(loop: false) : .still(position: .end), // : .still(position: .begin),
range: playerState.isPlaying ? (0.5, 1.0) : (0.0, 0.5)
),
colors: ["__allcolors__": .white],
size: CGSize(width: 30.0, height: 30.0)
).tagged(muteButtonTag)
).tagged(playbackButtonTag)
)
)
} else {
@ -1525,11 +1557,10 @@ final class MediaEditorScreenComponent: Component {
transition: transition,
component: AnyComponent(CameraButton(
content: playbackContentComponent,
action: { [weak mediaEditor] in
action: { [weak mediaEditor, weak state] in
if let mediaEditor {
// state?.muteDidChange = true
state?.playbackDidChange = true
mediaEditor.togglePlayback()
// state?.updated()
}
}
)),
@ -1556,6 +1587,13 @@ final class MediaEditorScreenComponent: Component {
transition.setScale(view: playbackButtonView, scale: displayTopButtons ? 1.0 : 0.01)
transition.setAlpha(view: playbackButtonView, alpha: displayTopButtons && !component.isDismissing && !component.isInteractingWithEntities ? 1.0 : 0.0)
}
} else {
if let playbackButtonView = self.playbackButton.view, playbackButtonView.superview != nil {
playbackButtonView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.2, removeOnCompletion: false, completion: { [weak playbackButtonView] _ in
playbackButtonView?.removeFromSuperview()
})
playbackButtonView.layer.animateScale(from: 1.0, to: 0.01, duration: 0.2, removeOnCompletion: false)
}
}
let textCancelButtonSize = self.textCancelButton.update(
@ -2989,7 +3027,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
func presentAudioPicker() {
self.controller?.present(legacyICloudFilePicker(theme: self.presentationData.theme, mode: .import, documentTypes: ["public.mp3"], forceDarkTheme: true, completion: { [weak self] urls in
guard let self, !urls.isEmpty, let url = urls.first else {
guard let self, let mediaEditor = self.mediaEditor, !urls.isEmpty, let url = urls.first else {
return
}
@ -3005,7 +3043,13 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
title = data.stringValue
}
}
self.mediaEditor?.setAudioTrack(MediaAudioTrack(path: path, artist: artist, title: title))
let duration = audioAsset.duration.seconds
mediaEditor.setAudioTrack(MediaAudioTrack(path: path, artist: artist, title: title, duration: duration))
if !mediaEditor.sourceIsVideo {
mediaEditor.setAudioTrackTrimRange(0 ..< min(15, duration), apply: true)
}
self.requestUpdate(transition: .easeInOut(duration: 0.2))
Queue.mainQueue().after(0.1) {
@ -3016,8 +3060,10 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
func presentAudioOptions(sourceView: UIView) {
let items: [ContextMenuItem] = [
.custom(VolumeSliderContextItem(minValue: 0.0, value: 0.75, valueChanged: { _, _ in
.custom(VolumeSliderContextItem(minValue: 0.0, value: 0.75, valueChanged: { [weak self] value, _ in
if let self {
self.mediaEditor?.setAudioTrackVolume(value)
}
}), false),
.action(
ContextMenuActionItem(

View File

@ -16,7 +16,6 @@ private let scrubberHeight: CGFloat = 39.0
private let collapsedScrubberHeight: CGFloat = 26.0
private let borderHeight: CGFloat = 1.0 + UIScreenPixel
private let frameWidth: CGFloat = 24.0
private let minumumDuration: CGFloat = 1.0
private class VideoFrameLayer: SimpleShapeLayer {
private let stripeLayer = SimpleShapeLayer()
@ -48,14 +47,20 @@ final class VideoScrubberComponent: Component {
let title: String?
let samples: Data?
let peak: Int32
let duration: Double
let start: Double?
let end: Double?
let offset: Double?
}
let context: AccountContext
let generationTimestamp: Double
let audioOnly: Bool
let duration: Double
let startPosition: Double
let endPosition: Double
let position: Double
let minDuration: Double
let maxDuration: Double
let isPlaying: Bool
let frames: [UIImage]
@ -69,10 +74,12 @@ final class VideoScrubberComponent: Component {
init(
context: AccountContext,
generationTimestamp: Double,
audioOnly: Bool,
duration: Double,
startPosition: Double,
endPosition: Double,
position: Double,
minDuration: Double,
maxDuration: Double,
isPlaying: Bool,
frames: [UIImage],
@ -85,10 +92,12 @@ final class VideoScrubberComponent: Component {
) {
self.context = context
self.generationTimestamp = generationTimestamp
self.audioOnly = audioOnly
self.duration = duration
self.startPosition = startPosition
self.endPosition = endPosition
self.position = position
self.minDuration = minDuration
self.maxDuration = maxDuration
self.isPlaying = isPlaying
self.frames = frames
@ -107,6 +116,9 @@ final class VideoScrubberComponent: Component {
if lhs.generationTimestamp != rhs.generationTimestamp {
return false
}
if lhs.audioOnly != rhs.audioOnly {
return false
}
if lhs.duration != rhs.duration {
return false
}
@ -119,6 +131,9 @@ final class VideoScrubberComponent: Component {
if lhs.position != rhs.position {
return false
}
if lhs.minDuration != rhs.minDuration {
return false
}
if lhs.maxDuration != rhs.maxDuration {
return false
}
@ -140,15 +155,16 @@ final class VideoScrubberComponent: Component {
private let audioBackgroundView: BlurredBackgroundView
private let audioVibrancyView: UIVisualEffectView
private let audioVibrancyContainer: UIView
private let audioTrimView = TrimView(frame: .zero)
private let audioButton = UIButton()
private let audioContentContainerView: UIView
private let audioContentMaskView: UIImageView
private let audioIconView: UIImageView
private let audioTitle = ComponentView<Empty>()
private let audioWaveform = ComponentView<Empty>()
private let videoTrimView = TrimView(frame: .zero)
private let trimView = TrimView(frame: .zero)
private let cursorView = HandleView()
private let transparentFramesContainer = UIView()
@ -187,6 +203,12 @@ final class VideoScrubberComponent: Component {
self.audioVibrancyContainer = UIView()
self.audioVibrancyView.contentView.addSubview(self.audioVibrancyContainer)
self.audioContentContainerView = UIView()
self.audioContentContainerView.clipsToBounds = true
self.audioContentMaskView = UIImageView()
self.audioContentContainerView.mask = self.audioContentMaskView
self.audioIconView = UIImageView(image: UIImage(bundleImageName: "Media Editor/SmallAudio"))
self.audioButton.isUserInteractionEnabled = false
@ -194,6 +216,8 @@ final class VideoScrubberComponent: Component {
super.init(frame: frame)
self.clipsToBounds = false
self.disablesInteractiveModalDismiss = true
self.disablesInteractiveKeyboardGestureRecognizer = true
@ -222,14 +246,12 @@ final class VideoScrubberComponent: Component {
self.audioClippingView.addSubview(self.audioContainerView)
self.audioContainerView.addSubview(self.audioBackgroundView)
self.audioBackgroundView.addSubview(self.audioVibrancyView)
self.addSubview(self.audioTrimView)
self.addSubview(self.audioIconView)
self.addSubview(self.transparentFramesContainer)
self.addSubview(self.opaqueFramesContainer)
self.addSubview(self.videoTrimView)
self.addSubview(self.trimView)
self.addSubview(self.audioButton)
self.addSubview(self.videoButton)
@ -242,13 +264,17 @@ final class VideoScrubberComponent: Component {
}
self.displayLink?.isPaused = true
self.videoTrimView.updated = { [weak self] transition in
self.trimView.updated = { [weak self] transition in
self?.state?.updated(transition: transition)
}
self.videoTrimView.trimUpdated = { [weak self] startValue, endValue, updatedEnd, done in
if let component = self?.component {
component.videoTrimUpdated(startValue, endValue, updatedEnd, done)
self.trimView.trimUpdated = { [weak self] startValue, endValue, updatedEnd, done in
if let self, let component = self.component {
if self.isAudioSelected || component.audioOnly {
component.audioTrimUpdated(startValue, endValue, updatedEnd, done)
} else {
component.videoTrimUpdated(startValue, endValue, updatedEnd, done)
}
}
}
@ -258,6 +284,19 @@ final class VideoScrubberComponent: Component {
let longPressGesture = UILongPressGestureRecognizer(target: self, action: #selector(self.longPressed(_:)))
longPressGesture.delegate = self
self.addGestureRecognizer(longPressGesture)
let maskImage = generateImage(CGSize(width: 100.0, height: 50.0), rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
var locations: [CGFloat] = [0.0, 0.75, 0.95, 1.0]
let colors: [CGColor] = [UIColor.white.cgColor, UIColor.white.cgColor, UIColor.white.withAlphaComponent(0.0).cgColor, UIColor.white.withAlphaComponent(0.0).cgColor]
let colorSpace = CGColorSpaceCreateDeviceRGB()
let gradient = CGGradient(colorsSpace: colorSpace, colors: colors as CFArray, locations: &locations)!
context.drawLinearGradient(gradient, start: CGPoint(x: 0.0, y: 0.0), end: CGPoint(x: size.width, y: 0.0), options: CGGradientDrawingOptions())
})?.stretchableImage(withLeftCapWidth: 40, topCapHeight: 0)
self.audioContentMaskView.image = maskImage
}
required init?(coder: NSCoder) {
@ -280,7 +319,7 @@ final class VideoScrubberComponent: Component {
guard let component = self.component, component.audioData != nil, case .began = gestureRecognizer.state else {
return
}
component.audioLongPressed?(self.audioContainerView)
component.audioLongPressed?(self.audioClippingView)
}
@objc private func audioButtonPressed() {
@ -323,8 +362,8 @@ final class VideoScrubberComponent: Component {
let cursorPositionFraction = duration > 0.0 ? position / duration : 0.0
let cursorPosition = floorToScreenPixels(handleWidth + handleWidth / 2.0 - cursorPadding + (size.width - handleWidth * 3.0 + cursorPadding * 2.0) * cursorPositionFraction)
var cursorFrame = CGRect(origin: CGPoint(x: cursorPosition - handleWidth / 2.0, y: -5.0 - UIScreenPixel), size: CGSize(width: handleWidth, height: height))
cursorFrame.origin.x = max(self.videoTrimView.leftHandleView.frame.maxX - cursorPadding, cursorFrame.origin.x)
cursorFrame.origin.x = min(self.videoTrimView.rightHandleView.frame.minX + cursorPadding, cursorFrame.origin.x)
cursorFrame.origin.x = max(self.trimView.leftHandleView.frame.maxX - cursorPadding, cursorFrame.origin.x)
cursorFrame.origin.x = min(self.trimView.rightHandleView.frame.minX + cursorPadding, cursorFrame.origin.x)
return cursorFrame
}
@ -360,7 +399,9 @@ final class VideoScrubberComponent: Component {
if let previousComponent {
if previousComponent.audioData == nil, component.audioData != nil {
self.positionAnimation = nil
self.isAudioSelected = true
if !component.audioOnly {
self.isAudioSelected = true
}
animateAudioAppearance = true
} else if previousComponent.audioData != nil, component.audioData == nil {
self.positionAnimation = nil
@ -384,40 +425,80 @@ final class VideoScrubberComponent: Component {
videoTransition = .easeInOut(duration: 0.25)
}
let totalWidth = scrubberSize.width - handleWidth
var audioTotalWidth = scrubberSize.width
var originY: CGFloat = 0
var totalHeight = scrubberSize.height
var audioAlpha: CGFloat = 0.0
if let _ = component.audioData {
totalHeight += collapsedScrubberHeight + scrubberSpacing
audioAlpha = 1.0
originY += self.isAudioSelected ? scrubberHeight : collapsedScrubberHeight
originY += scrubberSpacing
if self.isAudioSelected {
if let audioData = component.audioData {
if component.audioOnly {
audioScrubberHeight = scrubberHeight
videoScrubberHeight = collapsedScrubberHeight
audioAlpha = 1.0
} else {
totalHeight += collapsedScrubberHeight + scrubberSpacing
audioAlpha = 1.0
originY += self.isAudioSelected ? scrubberHeight : collapsedScrubberHeight
originY += scrubberSpacing
if self.isAudioSelected {
audioScrubberHeight = scrubberHeight
videoScrubberHeight = collapsedScrubberHeight
}
if component.duration > 0.0 {
let audioFraction = audioData.duration / component.duration
audioTotalWidth = ceil(totalWidth * audioFraction)
}
}
} else {
self.isAudioSelected = false
}
audioTransition.setAlpha(view: self.audioClippingView, alpha: audioAlpha)
self.audioButton.isUserInteractionEnabled = !self.isAudioSelected
self.videoButton.isUserInteractionEnabled = self.isAudioSelected
self.audioButton.isUserInteractionEnabled = component.audioData != nil && !component.audioOnly && !self.isAudioSelected
self.videoButton.isUserInteractionEnabled = component.audioData != nil && !component.audioOnly && self.isAudioSelected
let audioClippingFrame = CGRect(origin: .zero, size: CGSize(width: availableSize.width, height: audioScrubberHeight))
var audioClipOrigin: CGFloat = 0.0
var audioClipWidth = availableSize.width + 18.0
if !self.isAudioSelected {
if let audioData = component.audioData, !component.audioOnly {
let duration: Double
if let end = audioData.end, let start = audioData.start, component.duration > 0.0 {
duration = end - start
} else {
duration = component.endPosition - component.startPosition
}
if component.duration > 0.0 {
let fraction = duration / component.duration
audioClipWidth = availableSize.width * fraction
audioClipOrigin = (component.startPosition + (audioData.offset ?? 0.0)) / component.duration * availableSize.width
}
} else {
audioClipWidth = availableSize.width
}
}
let audioClippingFrame = CGRect(origin: CGPoint(x: audioClipOrigin, y: 0.0), size: CGSize(width: audioClipWidth, height: audioScrubberHeight))
audioTransition.setFrame(view: self.audioButton, frame: audioClippingFrame)
audioTransition.setFrame(view: self.audioClippingView, frame: audioClippingFrame)
let audioContainerFrame = CGRect(origin: .zero, size: audioClippingFrame.size)
audioTransition.setCornerRadius(layer: self.audioClippingView.layer, cornerRadius: self.isAudioSelected ? 0.0 : 9.0)
let audioContainerFrame = CGRect(origin: .zero, size: CGSize(width: audioTotalWidth, height: audioScrubberHeight))
audioTransition.setFrame(view: self.audioContainerView, frame: audioContainerFrame)
audioTransition.setFrame(view: self.audioBackgroundView, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
self.audioBackgroundView.update(size: audioClippingFrame.size, transition: audioTransition.containedViewLayoutTransition)
audioTransition.setFrame(view: self.audioVibrancyView, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
audioTransition.setFrame(view: self.audioVibrancyContainer, frame: CGRect(origin: .zero, size: audioClippingFrame.size))
audioTransition.setFrame(view: self.audioBackgroundView, frame: CGRect(origin: .zero, size: audioContainerFrame.size))
self.audioBackgroundView.update(size: audioContainerFrame.size, transition: audioTransition.containedViewLayoutTransition)
audioTransition.setFrame(view: self.audioVibrancyView, frame: CGRect(origin: .zero, size: audioContainerFrame.size))
audioTransition.setFrame(view: self.audioVibrancyContainer, frame: CGRect(origin: .zero, size: audioContainerFrame.size))
if let audioData = component.audioData {
let containerFrame = CGRect(origin: .zero, size: CGSize(width: audioClipWidth, height: audioContainerFrame.height))
audioTransition.setFrame(view: self.audioContentContainerView, frame: containerFrame)
audioTransition.setFrame(view: self.audioContentMaskView, frame: CGRect(origin: .zero, size: containerFrame.size))
if let audioData = component.audioData, !component.audioOnly {
var components: [String] = []
if let artist = audioData.artist {
components.append(artist)
@ -446,7 +527,7 @@ final class VideoScrubberComponent: Component {
audioTransition.setAlpha(view: self.audioIconView, alpha: self.isAudioSelected ? 0.0 : 1.0)
let audioIconFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - totalWidth) / 2.0), y: floorToScreenPixels((audioScrubberHeight - iconSize.height) / 2.0)), size: iconSize)
let audioIconFrame = CGRect(origin: CGPoint(x: max(8.0, floorToScreenPixels((audioClipWidth - totalWidth) / 2.0)), y: floorToScreenPixels((audioScrubberHeight - iconSize.height) / 2.0)), size: iconSize)
audioTransition.setBounds(view: self.audioIconView, bounds: CGRect(origin: .zero, size: audioIconFrame.size))
audioTransition.setPosition(view: self.audioIconView, position: audioIconFrame.center)
@ -454,12 +535,13 @@ final class VideoScrubberComponent: Component {
if view.superview == nil {
view.alpha = 0.0
view.isUserInteractionEnabled = false
self.audioContainerView.addSubview(self.audioIconView)
self.audioContainerView.addSubview(view)
self.audioContainerView.addSubview(self.audioContentContainerView)
self.audioContentContainerView.addSubview(self.audioIconView)
self.audioContentContainerView.addSubview(view)
}
audioTransition.setAlpha(view: view, alpha: self.isAudioSelected ? 0.0 : 1.0)
let audioTitleFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((availableSize.width - totalWidth) / 2.0) + iconSize.width + spacing, y: floorToScreenPixels((audioScrubberHeight - audioTitleSize.height) / 2.0)), size: audioTitleSize)
let audioTitleFrame = CGRect(origin: CGPoint(x: audioIconFrame.maxX + spacing, y: floorToScreenPixels((audioScrubberHeight - audioTitleSize.height) / 2.0)), size: audioTitleSize)
audioTransition.setBounds(view: view, bounds: CGRect(origin: .zero, size: audioTitleFrame.size))
audioTransition.setPosition(view: view, position: audioTitleFrame.center)
}
@ -487,7 +569,7 @@ final class VideoScrubberComponent: Component {
)
),
environment: {},
containerSize: CGSize(width: audioContainerFrame.width * 5.0, height: scrubberHeight)
containerSize: CGSize(width: audioContainerFrame.width, height: scrubberHeight)
)
if let view = self.audioWaveform.view {
if view.superview == nil {
@ -496,26 +578,11 @@ final class VideoScrubberComponent: Component {
view.layer.animateScaleY(from: 0.01, to: 1.0, duration: 0.2)
view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
}
audioTransition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: 0.0, y: self.isAudioSelected ? 0.0 : 6.0), size: audioWaveformSize))
audioTransition.setFrame(view: view, frame: CGRect(origin: CGPoint(x: 0.0, y: self.isAudioSelected || component.audioOnly ? 0.0 : 6.0), size: audioWaveformSize))
}
}
let bounds = CGRect(origin: .zero, size: scrubberSize)
let totalWidth = scrubberSize.width - handleWidth
audioTransition.setAlpha(view: self.audioTrimView, alpha: self.isAudioSelected ? 1.0 : 0.0)
audioTransition.setFrame(view: self.audioTrimView, frame: bounds)
let _ = self.audioTrimView.update(
totalWidth: totalWidth,
scrubberSize: scrubberSize,
duration: component.duration,
startPosition: component.startPosition,
endPosition: component.duration,
position: component.position,
maxDuration: component.maxDuration,
transition: transition
)
if component.framesUpdateTimestamp != previousFramesUpdateTimestamp {
for i in 0 ..< component.frames.count {
@ -546,17 +613,36 @@ final class VideoScrubberComponent: Component {
}
}
let (leftHandleFrame, rightHandleFrame) = self.videoTrimView.update(
var startPosition = component.startPosition
var endPosition = component.endPosition
if self.isAudioSelected, let audioData = component.audioData {
if let start = audioData.start {
startPosition = start
}
if let end = audioData.end {
endPosition = end
}
}
let (leftHandleFrame, rightHandleFrame) = self.trimView.update(
totalWidth: totalWidth,
scrubberSize: scrubberSize,
duration: component.duration,
startPosition: component.startPosition,
endPosition: component.endPosition,
startPosition: startPosition,
endPosition: endPosition,
position: component.position,
minDuration: component.minDuration,
maxDuration: component.maxDuration,
transition: transition
)
var containerLeftEdge = leftHandleFrame.maxX
var containerRightEdge = rightHandleFrame.minX
if self.isAudioSelected && component.duration > 0.0 {
containerLeftEdge = floorToScreenPixels(component.startPosition / component.duration * scrubberSize.width)
containerRightEdge = floorToScreenPixels(component.endPosition / component.duration * scrubberSize.width)
}
if self.isPanningPositionHandle || !component.isPlaying {
self.positionAnimation = nil
self.displayLink?.isPaused = true
@ -576,12 +662,13 @@ final class VideoScrubberComponent: Component {
}
// transition.setAlpha(view: self.cursorView, alpha: self.isPanningTrimHandle ? 0.0 : 1.0)
videoTransition.setAlpha(view: self.videoTrimView, alpha: self.isAudioSelected ? 0.0 : 1.0)
videoTransition.setFrame(view: self.videoTrimView, frame: bounds.offsetBy(dx: 0.0, dy: originY))
videoTransition.setFrame(view: self.trimView, frame: bounds.offsetBy(dx: 0.0, dy: self.isAudioSelected ? 0.0 : originY))
let handleInset: CGFloat = 7.0
videoTransition.setFrame(view: self.transparentFramesContainer, frame: CGRect(origin: CGPoint(x: 0.0, y: originY), size: CGSize(width: scrubberSize.width, height: videoScrubberHeight)))
videoTransition.setFrame(view: self.opaqueFramesContainer, frame: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: originY), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: videoScrubberHeight)))
videoTransition.setBounds(view: self.opaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: leftHandleFrame.maxX - handleInset, y: 0.0), size: CGSize(width: rightHandleFrame.minX - leftHandleFrame.maxX + handleInset * 2.0, height: videoScrubberHeight)))
videoTransition.setFrame(view: self.opaqueFramesContainer, frame: CGRect(origin: CGPoint(x: containerLeftEdge - handleInset, y: originY), size: CGSize(width: containerRightEdge - containerLeftEdge + handleInset * 2.0, height: videoScrubberHeight)))
videoTransition.setBounds(view: self.opaqueFramesContainer, bounds: CGRect(origin: CGPoint(x: containerLeftEdge - handleInset, y: 0.0), size: CGSize(width: containerRightEdge - containerLeftEdge + handleInset * 2.0, height: videoScrubberHeight)))
videoTransition.setCornerRadius(layer: self.opaqueFramesContainer.layer, cornerRadius: self.isAudioSelected ? 9.0 : 0.0)
videoTransition.setFrame(view: self.videoButton, frame: bounds.offsetBy(dx: 0.0, dy: originY))
@ -673,7 +760,6 @@ private class TrimView: UIView {
self.rightHandleView.tintColor = .white
self.rightHandleView.hitTestSlop = UIEdgeInsets(top: -8.0, left: -9.0, bottom: -8.0, right: -9.0)
self.borderView.image = generateImage(CGSize(width: 1.0, height: scrubberHeight), rotatedContext: { size, context in
context.clear(CGRect(origin: .zero, size: size))
context.setFillColor(UIColor.white.cgColor)
@ -744,8 +830,8 @@ private class TrimView: UIView {
let fraction = (location.x - start) / length
var startValue = max(0.0, params.duration * fraction)
if startValue > params.endPosition - minumumDuration {
startValue = max(0.0, params.endPosition - minumumDuration)
if startValue > params.endPosition - params.minDuration {
startValue = max(0.0, params.endPosition - params.minDuration)
}
var endValue = params.endPosition
if endValue - startValue > params.maxDuration {
@ -782,8 +868,8 @@ private class TrimView: UIView {
let fraction = (location.x - start) / length
var endValue = min(params.duration, params.duration * fraction)
if endValue < params.startPosition + minumumDuration {
endValue = min(params.duration, params.startPosition + minumumDuration)
if endValue < params.startPosition + params.minDuration {
endValue = min(params.duration, params.startPosition + params.minDuration)
}
var startValue = params.startPosition
if endValue - startValue > params.maxDuration {
@ -814,6 +900,7 @@ private class TrimView: UIView {
startPosition: Double,
endPosition: Double,
position: Double,
minDuration: Double,
maxDuration: Double
)?
@ -824,11 +911,12 @@ private class TrimView: UIView {
startPosition: Double,
endPosition: Double,
position: Double,
minDuration: Double,
maxDuration: Double,
transition: Transition
) -> (leftHandleFrame: CGRect, rightHandleFrame: CGRect)
{
self.params = (duration, startPosition, endPosition, position, maxDuration)
self.params = (duration, startPosition, endPosition, position, minDuration, maxDuration)
let trimColor = self.isPanningTrimHandle ? UIColor(rgb: 0xf8d74a) : .white
transition.setTintColor(view: self.leftHandleView, color: trimColor)