Merge commit '2749d3a2feb6518a0bcfaa3ab3a52059729de8f3'

This commit is contained in:
Ali 2023-05-19 18:10:23 +04:00
commit 200523f50e
6 changed files with 55 additions and 43 deletions

View File

@ -190,13 +190,18 @@ open class ManagedAnimationNode: ASDisplayNode {
self.addSubnode(self.imageNode)
var previousTimestamp = CACurrentMediaTime()
displayLinkUpdate = { [weak self] in
if let strongSelf = self {
let currentTimestamp = CACurrentMediaTime()
strongSelf.delta = currentTimestamp - previousTimestamp
let delta: Double
if let previousTimestamp = strongSelf.previousTimestamp {
delta = currentTimestamp - previousTimestamp
} else {
delta = 1.0 / 60.0
}
strongSelf.delta = delta
strongSelf.updateAnimation()
previousTimestamp = currentTimestamp
strongSelf.previousTimestamp = currentTimestamp
}
}
}
@ -216,6 +221,7 @@ open class ManagedAnimationNode: ASDisplayNode {
}
self.didTryAdvancingState = false
self.previousTimestamp = CACurrentMediaTime()
self.displayLink.isPaused = false
}

View File

@ -85,8 +85,6 @@ private final class EnhanceLightnessPass: DefaultRenderPass {
renderCommandEncoder.endEncoding()
//saveTexture(self.cachedTexture!, name: "lightness", device: device)
return self.cachedTexture!
}
}

View File

@ -3,6 +3,7 @@ import UIKit
import Display
import TelegramCore
import AVFoundation
import VideoToolbox
public enum EditorToolKey: Int32 {
case enhance
@ -173,6 +174,10 @@ public final class MediaEditorValues: Codable {
try container.encode(values, forKey: .toolValues)
}
public func makeCopy() -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: self.cropOffset, cropSize: self.cropSize, cropScale: self.cropScale, cropRotation: self.cropRotation, cropMirroring: self.cropMirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
}
func withUpdatedCrop(offset: CGPoint, scale: CGFloat, rotation: CGFloat, mirroring: Bool) -> MediaEditorValues {
return MediaEditorValues(originalDimensions: self.originalDimensions, cropOffset: offset, cropSize: self.cropSize, cropScale: scale, cropRotation: rotation, cropMirroring: mirroring, gradientColors: self.gradientColors, videoTrimRange: self.videoTrimRange, videoIsMuted: self.videoIsMuted, drawing: self.drawing, entities: self.entities, toolValues: self.toolValues)
}
@ -913,14 +918,18 @@ extension CodableToolValue: Codable {
public func recommendedVideoExportConfiguration(values: MediaEditorValues) -> MediaEditorVideoExport.Configuration {
let compressionProperties: [String: Any] = [
AVVideoAverageBitRateKey: 2000000
AVVideoAverageBitRateKey: 2000000,
//AVVideoProfileLevelKey: kVTProfileLevel_HEVC_Main_AutoLevel
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
]
let videoSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
//AVVideoCodecKey: AVVideoCodecType.hevc,
AVVideoCompressionPropertiesKey: compressionProperties,
AVVideoWidthKey: 1080,
AVVideoHeightKey: 1920
AVVideoWidthKey: 720,
AVVideoHeightKey: 1280
]
let audioSettings: [String: Any] = [

View File

@ -14,7 +14,7 @@ enum ExportWriterStatus {
protocol MediaEditorVideoExportWriter {
func setup(configuration: MediaEditorVideoExport.Configuration, outputPath: String)
func setupVideoInput(configuration: MediaEditorVideoExport.Configuration, inputTransform: CGAffineTransform?)
func setupVideoInput(configuration: MediaEditorVideoExport.Configuration)
func setupAudioInput(configuration: MediaEditorVideoExport.Configuration)
func startWriting() -> Bool
@ -55,28 +55,17 @@ public final class MediaEditorVideoAVAssetWriter: MediaEditorVideoExportWriter {
writer.shouldOptimizeForNetworkUse = configuration.shouldOptimizeForNetworkUse
}
func setupVideoInput(configuration: MediaEditorVideoExport.Configuration, inputTransform: CGAffineTransform?) {
func setupVideoInput(configuration: MediaEditorVideoExport.Configuration) {
guard let writer = self.writer else {
return
}
let videoInput: AVAssetWriterInput
if let transform = inputTransform {
let size = CGSize(width: configuration.videoSettings[AVVideoWidthKey] as! Int, height: configuration.videoSettings[AVVideoHeightKey] as! Int)
let transformedSize = size.applying(transform.inverted())
var videoSettings = configuration.videoSettings
videoSettings[AVVideoWidthKey] = abs(transformedSize.width)
videoSettings[AVVideoHeightKey] = abs(transformedSize.height)
videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
videoInput.transform = transform
} else {
videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: configuration.videoSettings)
}
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: configuration.videoSettings)
videoInput.expectsMediaDataInRealTime = false
let sourcePixelBufferAttributes = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
kCVPixelBufferWidthKey as String: 1080,
kCVPixelBufferHeightKey as String: 1920
kCVPixelBufferWidthKey as String: UInt32(configuration.dimensions.width),
kCVPixelBufferHeightKey as String: UInt32(configuration.dimensions.height)
]
self.adaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoInput, sourcePixelBufferAttributes: sourcePixelBufferAttributes)
@ -284,11 +273,7 @@ public final class MediaEditorVideoExport {
} else {
self.duration.set(CMTime(seconds: 3, preferredTimescale: 1))
}
if self.configuration.values.requiresComposing {
self.composer = MediaEditorComposer(account: self.account, values: self.configuration.values, dimensions: self.configuration.dimensions)
}
switch self.subject {
case let .video(asset):
self.setupWithAsset(asset)
@ -297,6 +282,13 @@ public final class MediaEditorVideoExport {
}
}
private func setupComposer() {
guard self.composer == nil else {
return
}
self.composer = MediaEditorComposer(account: self.account, values: self.configuration.values, dimensions: self.configuration.dimensions)
}
private func setupWithAsset(_ asset: AVAsset) {
self.reader = try? AVAssetReader(asset: asset)
guard let reader = self.reader else {
@ -315,16 +307,14 @@ public final class MediaEditorVideoExport {
let videoTracks = asset.tracks(withMediaType: .video)
if (videoTracks.count > 0) {
let videoOutput: AVAssetReaderOutput
let inputTransform: CGAffineTransform?
if self.composer == nil {
videoOutput = AVAssetReaderTrackOutput(track: videoTracks.first!, outputSettings: [kCVPixelBufferPixelFormatTypeKey as String: [kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]])
inputTransform = videoTracks.first!.preferredTransform
let outputSettings: [String : Any]
if let videoTrack = videoTracks.first, videoTrack.preferredTransform.isIdentity && !self.configuration.values.requiresComposing {
outputSettings = [kCVPixelBufferPixelFormatTypeKey as String: [kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]]
} else {
videoOutput = AVAssetReaderTrackOutput(track: videoTracks.first!, outputSettings: [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA])
inputTransform = nil
outputSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
self.setupComposer()
}
let videoOutput = AVAssetReaderTrackOutput(track: videoTracks.first!, outputSettings: outputSettings)
videoOutput.alwaysCopiesSampleData = false
if reader.canAdd(videoOutput) {
reader.add(videoOutput)
@ -334,7 +324,7 @@ public final class MediaEditorVideoExport {
}
self.videoOutput = videoOutput
writer.setupVideoInput(configuration: self.configuration, inputTransform: inputTransform)
writer.setupVideoInput(configuration: self.configuration)
} else {
self.videoOutput = nil
}
@ -363,12 +353,14 @@ public final class MediaEditorVideoExport {
}
private func setupWithImage(_ image: UIImage) {
self.setupComposer()
self.writer = MediaEditorVideoAVAssetWriter()
guard let writer = self.writer else {
return
}
writer.setup(configuration: self.configuration, outputPath: self.outputPath)
writer.setupVideoInput(configuration: self.configuration, inputTransform: nil)
writer.setupVideoInput(configuration: self.configuration)
}
private func finish() {

View File

@ -1534,7 +1534,7 @@ public final class MediaEditorScreen: ViewController {
duration = 5.0
}
}
self.completion(.video(video: videoResult, coverImage: nil, values: mediaEditor.values, duration: duration, dimensions: PixelDimensions(width: 1080, height: 1920), caption: caption), { [weak self] in
self.completion(.video(video: videoResult, coverImage: nil, values: mediaEditor.values, duration: duration, dimensions: PixelDimensions(width: 720, height: 1280), caption: caption), { [weak self] in
self?.node.animateOut(finished: true, completion: { [weak self] in
self?.dismiss()
})

View File

@ -351,7 +351,7 @@ private final class MediaToolsScreenComponent: Component {
guard let controller = environment.controller() as? MediaToolsScreen else {
return
}
controller.requestDismiss(animated: true)
controller.requestDismiss(reset: true, animated: true)
}
)),
environment: {},
@ -379,7 +379,7 @@ private final class MediaToolsScreenComponent: Component {
guard let controller = environment.controller() as? MediaToolsScreen else {
return
}
controller.requestDismiss(animated: true)
controller.requestDismiss(reset: false, animated: true)
}
)),
environment: {},
@ -954,10 +954,13 @@ public final class MediaToolsScreen: ViewController {
fileprivate let mediaEditor: MediaEditor
public var dismissed: () -> Void = {}
private var initialValues: MediaEditorValues
public init(context: AccountContext, mediaEditor: MediaEditor) {
self.context = context
self.mediaEditor = mediaEditor
self.initialValues = mediaEditor.values.makeCopy()
super.init(navigationBarPresentationData: nil)
self.navigationPresentation = .flatModal
@ -977,7 +980,11 @@ public final class MediaToolsScreen: ViewController {
super.displayNodeDidLoad()
}
func requestDismiss(animated: Bool) {
func requestDismiss(reset: Bool, animated: Bool) {
if reset {
self.mediaEditor.values = self.initialValues
}
self.dismissed()
self.node.animateOutToEditor(completion: {