mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-12-23 22:55:00 +00:00
Merge branch 'master' of gitlab.com:peter-iakovlev/telegram-ios
This commit is contained in:
@@ -2,6 +2,7 @@ import Foundation
|
||||
import AVFoundation
|
||||
import Metal
|
||||
import MetalKit
|
||||
import Display
|
||||
|
||||
final class ImageTextureSource: TextureSource {
|
||||
weak var output: TextureConsumer?
|
||||
@@ -10,12 +11,21 @@ final class ImageTextureSource: TextureSource {
|
||||
var texture: MTLTexture?
|
||||
|
||||
init(image: UIImage, renderTarget: RenderTarget) {
|
||||
guard let device = renderTarget.mtlDevice, let cgImage = image.cgImage else {
|
||||
guard let device = renderTarget.mtlDevice, var cgImage = image.cgImage else {
|
||||
return
|
||||
}
|
||||
let textureLoader = MTKTextureLoader(device: device)
|
||||
self.textureLoader = textureLoader
|
||||
|
||||
if let bitsPerPixel = image.cgImage?.bitsPerPixel, bitsPerPixel > 32 {
|
||||
let updatedImage = generateImage(image.size, contextGenerator: { size, context in
|
||||
context.setFillColor(UIColor.black.cgColor)
|
||||
context.fill(CGRect(origin: .zero, size: size))
|
||||
context.draw(cgImage, in: CGRect(origin: .zero, size: size))
|
||||
}, opaque: false)
|
||||
cgImage = updatedImage?.cgImage ?? cgImage
|
||||
}
|
||||
|
||||
self.texture = try? textureLoader.newTexture(cgImage: cgImage, options: [.SRGB : false])
|
||||
}
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ public struct MediaEditorPlayerState {
|
||||
public let frames: [UIImage]
|
||||
public let framesCount: Int
|
||||
public let framesUpdateTimestamp: Double
|
||||
public let hasAudio: Bool
|
||||
}
|
||||
|
||||
public final class MediaEditor {
|
||||
@@ -91,20 +92,20 @@ public final class MediaEditor {
|
||||
}
|
||||
|
||||
private let playerPromise = Promise<AVPlayer?>()
|
||||
private var playerPosition: (Double, Double) = (0.0, 0.0) {
|
||||
private var playerPlaybackState: (Double, Double, Bool) = (0.0, 0.0, false) {
|
||||
didSet {
|
||||
self.playerPositionPromise.set(.single(self.playerPosition))
|
||||
self.playerPlaybackStatePromise.set(.single(self.playerPlaybackState))
|
||||
}
|
||||
}
|
||||
private let playerPositionPromise = Promise<(Double, Double)>((0.0, 0.0))
|
||||
private let playerPlaybackStatePromise = Promise<(Double, Double, Bool)>((0.0, 0.0, false))
|
||||
|
||||
public func playerState(framesCount: Int) -> Signal<MediaEditorPlayerState?, NoError> {
|
||||
return self.playerPromise.get()
|
||||
|> mapToSignal { [weak self] player in
|
||||
if let self, let asset = player?.currentItem?.asset {
|
||||
return combineLatest(self.valuesPromise.get(), self.playerPositionPromise.get(), self.videoFrames(asset: asset, count: framesCount))
|
||||
return combineLatest(self.valuesPromise.get(), self.playerPlaybackStatePromise.get(), self.videoFrames(asset: asset, count: framesCount))
|
||||
|> map { values, durationAndPosition, framesAndUpdateTimestamp in
|
||||
let (duration, position) = durationAndPosition
|
||||
let (duration, position, hasAudio) = durationAndPosition
|
||||
let (frames, framesUpdateTimestamp) = framesAndUpdateTimestamp
|
||||
return MediaEditorPlayerState(
|
||||
duration: duration,
|
||||
@@ -112,7 +113,8 @@ public final class MediaEditor {
|
||||
position: position,
|
||||
frames: frames,
|
||||
framesCount: framesCount,
|
||||
framesUpdateTimestamp: framesUpdateTimestamp
|
||||
framesUpdateTimestamp: framesUpdateTimestamp,
|
||||
hasAudio: hasAudio
|
||||
)
|
||||
}
|
||||
} else {
|
||||
@@ -334,7 +336,9 @@ public final class MediaEditor {
|
||||
PHImageManager.default().cancelImageRequest(requestId)
|
||||
}
|
||||
} else {
|
||||
let requestId = PHImageManager.default().requestImage(for: asset, targetSize: CGSize(width: 1920.0, height: 1920.0), contentMode: .aspectFit, options: nil, resultHandler: { image, info in
|
||||
let options = PHImageRequestOptions()
|
||||
options.deliveryMode = .highQualityFormat
|
||||
let requestId = PHImageManager.default().requestImage(for: asset, targetSize: CGSize(width: 1920.0, height: 1920.0), contentMode: .aspectFit, options: options, resultHandler: { image, info in
|
||||
if let image {
|
||||
var degraded = false
|
||||
if let info {
|
||||
@@ -376,7 +380,11 @@ public final class MediaEditor {
|
||||
guard let self, let duration = player.currentItem?.duration.seconds else {
|
||||
return
|
||||
}
|
||||
self.playerPosition = (duration, time.seconds)
|
||||
var hasAudio = false
|
||||
if let audioTracks = player.currentItem?.asset.tracks(withMediaType: .audio) {
|
||||
hasAudio = !audioTracks.isEmpty
|
||||
}
|
||||
self.playerPlaybackState = (duration, time.seconds, hasAudio)
|
||||
}
|
||||
self.didPlayToEndTimeObserver = NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: nil, using: { [weak self] notification in
|
||||
if let self {
|
||||
@@ -434,7 +442,7 @@ public final class MediaEditor {
|
||||
}
|
||||
|
||||
public func setVideoTrimStart(_ trimStart: Double) {
|
||||
let trimEnd = self.values.videoTrimRange?.upperBound ?? self.playerPosition.0
|
||||
let trimEnd = self.values.videoTrimRange?.upperBound ?? self.playerPlaybackState.0
|
||||
let trimRange = trimStart ..< trimEnd
|
||||
self.values = self.values.withUpdatedVideoTrimRange(trimRange)
|
||||
}
|
||||
|
||||
@@ -209,7 +209,7 @@ private func makeEditorImageFrameComposition(inputImage: CIImage, gradientImage:
|
||||
resultImage = drawingImage.composited(over: resultImage)
|
||||
}
|
||||
|
||||
let frameRate: Float = 60.0
|
||||
let frameRate: Float = 30.0
|
||||
|
||||
let entitiesCount = Atomic<Int>(value: 1)
|
||||
let entitiesImages = Atomic<[(CIImage, Int)]>(value: [])
|
||||
|
||||
@@ -920,7 +920,7 @@ extension CodableToolValue: Codable {
|
||||
}
|
||||
}
|
||||
|
||||
public func recommendedVideoExportConfiguration(values: MediaEditorValues) -> MediaEditorVideoExport.Configuration {
|
||||
public func recommendedVideoExportConfiguration(values: MediaEditorValues, frameRate: Float) -> MediaEditorVideoExport.Configuration {
|
||||
let compressionProperties: [String: Any] = [
|
||||
AVVideoAverageBitRateKey: 2000000,
|
||||
AVVideoProfileLevelKey: kVTProfileLevel_HEVC_Main_AutoLevel
|
||||
@@ -933,7 +933,8 @@ public func recommendedVideoExportConfiguration(values: MediaEditorValues) -> Me
|
||||
AVVideoCodecKey: AVVideoCodecType.hevc,
|
||||
AVVideoCompressionPropertiesKey: compressionProperties,
|
||||
AVVideoWidthKey: 720,
|
||||
AVVideoHeightKey: 1280
|
||||
AVVideoHeightKey: 1280,
|
||||
MediaEditorVideoExport.Configuration.FrameRateKey: frameRate
|
||||
]
|
||||
|
||||
let audioSettings: [String: Any] = [
|
||||
|
||||
@@ -14,7 +14,7 @@ enum ExportWriterStatus {
|
||||
|
||||
protocol MediaEditorVideoExportWriter {
|
||||
func setup(configuration: MediaEditorVideoExport.Configuration, outputPath: String)
|
||||
func setupVideoInput(configuration: MediaEditorVideoExport.Configuration)
|
||||
func setupVideoInput(configuration: MediaEditorVideoExport.Configuration, sourceFrameRate: Float)
|
||||
func setupAudioInput(configuration: MediaEditorVideoExport.Configuration)
|
||||
|
||||
func startWriting() -> Bool
|
||||
@@ -55,13 +55,16 @@ public final class MediaEditorVideoAVAssetWriter: MediaEditorVideoExportWriter {
|
||||
writer.shouldOptimizeForNetworkUse = configuration.shouldOptimizeForNetworkUse
|
||||
}
|
||||
|
||||
func setupVideoInput(configuration: MediaEditorVideoExport.Configuration) {
|
||||
func setupVideoInput(configuration: MediaEditorVideoExport.Configuration, sourceFrameRate: Float) {
|
||||
guard let writer = self.writer else {
|
||||
return
|
||||
}
|
||||
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: configuration.videoSettings)
|
||||
videoInput.expectsMediaDataInRealTime = false
|
||||
|
||||
var videoSettings = configuration.videoSettings
|
||||
videoSettings[AVVideoExpectedSourceFrameRateKey] = sourceFrameRate
|
||||
|
||||
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
|
||||
videoInput.expectsMediaDataInRealTime = false
|
||||
let sourcePixelBufferAttributes = [
|
||||
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
|
||||
kCVPixelBufferWidthKey as String: UInt32(configuration.dimensions.width),
|
||||
@@ -172,12 +175,18 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
|
||||
public struct Configuration {
|
||||
public static let FrameRateKey = "Telegram__FrameRate"
|
||||
|
||||
public var shouldOptimizeForNetworkUse: Bool = true
|
||||
public var videoSettings: [String: Any]
|
||||
public var audioSettings: [String: Any]
|
||||
public var values: MediaEditorValues
|
||||
|
||||
public init(videoSettings: [String: Any], audioSettings: [String: Any], values: MediaEditorValues) {
|
||||
public init(
|
||||
videoSettings: [String: Any],
|
||||
audioSettings: [String: Any],
|
||||
values: MediaEditorValues
|
||||
) {
|
||||
self.videoSettings = videoSettings
|
||||
self.audioSettings = audioSettings
|
||||
self.values = values
|
||||
@@ -202,6 +211,14 @@ public final class MediaEditorVideoExport {
|
||||
return CGSize(width: 1920.0, height: 1080.0)
|
||||
}
|
||||
}
|
||||
|
||||
var frameRate: Float {
|
||||
if let frameRate = self.videoSettings[Configuration.FrameRateKey] as? Float {
|
||||
return frameRate
|
||||
} else {
|
||||
return 30.0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public enum Status {
|
||||
@@ -275,7 +292,7 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.duration.set(CMTime(seconds: 3, preferredTimescale: 1))
|
||||
self.duration.set(CMTime(seconds: 5, preferredTimescale: 1))
|
||||
}
|
||||
|
||||
switch self.subject {
|
||||
@@ -312,6 +329,7 @@ public final class MediaEditorVideoExport {
|
||||
let videoTracks = asset.tracks(withMediaType: .video)
|
||||
if (videoTracks.count > 0) {
|
||||
let outputSettings: [String : Any]
|
||||
var sourceFrameRate: Float = 0.0
|
||||
if let videoTrack = videoTracks.first, videoTrack.preferredTransform.isIdentity && !self.configuration.values.requiresComposing {
|
||||
outputSettings = [kCVPixelBufferPixelFormatTypeKey as String: [kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]]
|
||||
} else {
|
||||
@@ -328,7 +346,18 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
self.videoOutput = videoOutput
|
||||
|
||||
writer.setupVideoInput(configuration: self.configuration)
|
||||
if let videoTrack = videoTracks.first {
|
||||
if videoTrack.nominalFrameRate > 0.0 {
|
||||
sourceFrameRate = videoTrack.nominalFrameRate
|
||||
} else if videoTrack.minFrameDuration.seconds > 0.0 {
|
||||
sourceFrameRate = Float(1.0 / videoTrack.minFrameDuration.seconds)
|
||||
} else {
|
||||
sourceFrameRate = 30.0
|
||||
}
|
||||
} else {
|
||||
sourceFrameRate = 30.0
|
||||
}
|
||||
writer.setupVideoInput(configuration: self.configuration, sourceFrameRate: sourceFrameRate)
|
||||
} else {
|
||||
self.videoOutput = nil
|
||||
}
|
||||
@@ -364,7 +393,7 @@ public final class MediaEditorVideoExport {
|
||||
return
|
||||
}
|
||||
writer.setup(configuration: self.configuration, outputPath: self.outputPath)
|
||||
writer.setupVideoInput(configuration: self.configuration)
|
||||
writer.setupVideoInput(configuration: self.configuration, sourceFrameRate: 30.0)
|
||||
}
|
||||
|
||||
private func finish() {
|
||||
@@ -439,8 +468,8 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
|
||||
let duration: Double = 3.0
|
||||
let frameRate: Double = 60.0
|
||||
var position: CMTime = CMTime(value: 0, timescale: Int32(frameRate))
|
||||
let frameRate: Double = Double(self.configuration.frameRate)
|
||||
var position: CMTime = CMTime(value: 0, timescale: Int32(self.configuration.frameRate))
|
||||
|
||||
var appendFailed = false
|
||||
while writer.isReadyForMoreVideoData {
|
||||
|
||||
Reference in New Issue
Block a user