mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Merge commit '6baa5e1c0db413fe9454acc6529c7ced8bb91624' into beta
This commit is contained in:
commit
57fde69bf5
@ -76,13 +76,18 @@ func pixelBufferToMTLTexture(pixelBuffer: CVPixelBuffer, textureCache: CVMetalTe
|
||||
return nil
|
||||
}
|
||||
|
||||
func getTextureImage(device: MTLDevice, texture: MTLTexture) -> UIImage? {
|
||||
func getTextureImage(device: MTLDevice, texture: MTLTexture, mirror: Bool = false) -> UIImage? {
|
||||
let colorSpace = CGColorSpaceCreateDeviceRGB()
|
||||
let context = CIContext(mtlDevice: device, options: [:])
|
||||
guard var ciImage = CIImage(mtlTexture: texture, options: [.colorSpace: colorSpace]) else {
|
||||
return nil
|
||||
}
|
||||
let transform = CGAffineTransform(1.0, 0.0, 0.0, -1.0, 0.0, ciImage.extent.height)
|
||||
let transform: CGAffineTransform
|
||||
if mirror {
|
||||
transform = CGAffineTransform(-1.0, 0.0, 0.0, -1.0, ciImage.extent.width, ciImage.extent.height)
|
||||
} else {
|
||||
transform = CGAffineTransform(1.0, 0.0, 0.0, -1.0, 0.0, ciImage.extent.height)
|
||||
}
|
||||
ciImage = ciImage.transformed(by: transform)
|
||||
guard let cgImage = context.createCGImage(ciImage, from: CGRect(origin: .zero, size: CGSize(width: ciImage.extent.width, height: ciImage.extent.height))) else {
|
||||
return nil
|
||||
|
@ -112,6 +112,10 @@ public final class MediaEditor {
|
||||
public var resultImage: UIImage? {
|
||||
return self.renderer.finalRenderedImage()
|
||||
}
|
||||
|
||||
public func getResultImage(mirror: Bool) -> UIImage? {
|
||||
return self.renderer.finalRenderedImage(mirror: mirror)
|
||||
}
|
||||
|
||||
private let playerPromise = Promise<AVPlayer?>()
|
||||
private var playerPlaybackState: (Double, Double, Bool, Bool) = (0.0, 0.0, false, false) {
|
||||
@ -311,20 +315,9 @@ public final class MediaEditor {
|
||||
}
|
||||
|
||||
public func replaceSource(_ image: UIImage, additionalImage: UIImage?, time: CMTime) {
|
||||
func fixImageOrientation(_ image: UIImage) -> UIImage {
|
||||
UIGraphicsBeginImageContext(image.size)
|
||||
image.draw(at: .zero)
|
||||
let newImage = UIGraphicsGetImageFromCurrentImageContext()
|
||||
UIGraphicsEndImageContext()
|
||||
return newImage ?? image
|
||||
}
|
||||
let image = fixImageOrientation(image)
|
||||
|
||||
guard let renderTarget = self.previewView, let device = renderTarget.mtlDevice, let texture = loadTexture(image: image, device: device) else {
|
||||
return
|
||||
}
|
||||
|
||||
let additionalImage = additionalImage.flatMap { fixImageOrientation($0) }
|
||||
let additionalTexture = additionalImage.flatMap { loadTexture(image: $0, device: device) }
|
||||
self.renderer.consumeTexture(texture, additionalTexture: additionalTexture, time: time, render: true)
|
||||
}
|
||||
|
@ -204,6 +204,8 @@ final class MediaEditorRenderer: TextureConsumer {
|
||||
|
||||
var texture: MTLTexture
|
||||
if let currentAdditionalTexture = self.currentAdditionalTexture, let currentTexture = self.currentTexture {
|
||||
self.videoFinishPass.mainTextureRotation = .rotate0Degrees
|
||||
self.videoFinishPass.additionalTextureRotation = .rotate0DegreesMirrored
|
||||
if let result = self.videoFinishPass.process(input: currentTexture, secondInput: currentAdditionalTexture, timestamp: self.currentTime, device: device, commandBuffer: commandBuffer) {
|
||||
texture = result
|
||||
} else {
|
||||
@ -309,6 +311,8 @@ final class MediaEditorRenderer: TextureConsumer {
|
||||
}
|
||||
|
||||
func consumeTexture(_ texture: MTLTexture, additionalTexture: MTLTexture?, time: CMTime, render: Bool) {
|
||||
self.displayEnabled = false
|
||||
|
||||
if render {
|
||||
self.willRenderFrame()
|
||||
}
|
||||
@ -349,9 +353,9 @@ final class MediaEditorRenderer: TextureConsumer {
|
||||
self.renderTarget?.redraw()
|
||||
}
|
||||
|
||||
func finalRenderedImage() -> UIImage? {
|
||||
func finalRenderedImage(mirror: Bool = false) -> UIImage? {
|
||||
if let finalTexture = self.finalTexture, let device = self.renderTarget?.mtlDevice {
|
||||
return getTextureImage(device: device, texture: finalTexture)
|
||||
return getTextureImage(device: device, texture: finalTexture, mirror: mirror)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
|
@ -11,6 +11,7 @@ struct VertexData {
|
||||
|
||||
enum TextureRotation: Int {
|
||||
case rotate0Degrees
|
||||
case rotate0DegreesMirrored
|
||||
case rotate90Degrees
|
||||
case rotate180Degrees
|
||||
case rotate270Degrees
|
||||
@ -29,6 +30,11 @@ func verticesDataForRotation(_ rotation: TextureRotation, rect: CGRect = CGRect(
|
||||
topRight = simd_float2(1.0, 1.0)
|
||||
bottomLeft = simd_float2(0.0, 0.0)
|
||||
bottomRight = simd_float2(1.0, 0.0)
|
||||
case .rotate0DegreesMirrored:
|
||||
topLeft = simd_float2(1.0, 1.0)
|
||||
topRight = simd_float2(0.0, 1.0)
|
||||
bottomLeft = simd_float2(1.0, 0.0)
|
||||
bottomRight = simd_float2(0.0, 0.0)
|
||||
case .rotate180Degrees:
|
||||
topLeft = simd_float2(1.0, 0.0)
|
||||
topRight = simd_float2(0.0, 0.0)
|
||||
|
@ -319,6 +319,11 @@ private func verticesData(
|
||||
topRight = simd_float2(1.0, 1.0)
|
||||
bottomLeft = simd_float2(0.0, 0.0)
|
||||
bottomRight = simd_float2(1.0, 0.0)
|
||||
case .rotate0DegreesMirrored:
|
||||
topLeft = simd_float2(1.0, 1.0)
|
||||
topRight = simd_float2(0.0, 1.0)
|
||||
bottomLeft = simd_float2(1.0, 0.0)
|
||||
bottomRight = simd_float2(0.0, 0.0)
|
||||
case .rotate180Degrees:
|
||||
topLeft = simd_float2(1.0, 0.0)
|
||||
topRight = simd_float2(0.0, 0.0)
|
||||
@ -463,17 +468,6 @@ final class VideoInputScalePass: RenderPass {
|
||||
}
|
||||
}
|
||||
|
||||
func setupMainVerticesBuffer(device: MTLDevice, rotation: TextureRotation = .rotate0Degrees) {
|
||||
if self.mainVerticesBuffer == nil || rotation != self.mainTextureRotation {
|
||||
self.mainTextureRotation = rotation
|
||||
let vertices = verticesDataForRotation(rotation)
|
||||
self.mainVerticesBuffer = device.makeBuffer(
|
||||
bytes: vertices,
|
||||
length: MemoryLayout<VertexData>.stride * vertices.count,
|
||||
options: [])
|
||||
}
|
||||
}
|
||||
|
||||
func encodeVideo(
|
||||
using encoder: MTLRenderCommandEncoder,
|
||||
containerSize: CGSize,
|
||||
@ -516,15 +510,6 @@ final class VideoInputScalePass: RenderPass {
|
||||
encoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
|
||||
}
|
||||
|
||||
func setupAdditionalVerticesBuffer(device: MTLDevice, rotation: TextureRotation = .rotate0Degrees) {
|
||||
self.additionalTextureRotation = rotation
|
||||
let vertices = verticesDataForRotation(rotation, rect: CGRect(x: -0.5, y: -0.5, width: 0.5, height: 0.5), z: 0.5)
|
||||
self.additionalVerticesBuffer = device.makeBuffer(
|
||||
bytes: vertices,
|
||||
length: MemoryLayout<VertexData>.stride * vertices.count,
|
||||
options: [])
|
||||
}
|
||||
|
||||
func update(values: MediaEditorValues) {
|
||||
if let position = values.additionalVideoPosition, let scale = values.additionalVideoScale, let rotation = values.additionalVideoRotation {
|
||||
self.additionalPosition = VideoInputScalePass.VideoPosition(position: position, size: CGSize(width: 1080.0 / 4.0, height: 1440.0 / 4.0), scale: scale, rotation: rotation)
|
||||
|
@ -2381,9 +2381,9 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
backgroundImage = additionalTransitionImage
|
||||
foregroundImage = mainTransitionImage
|
||||
}
|
||||
if let combinedTransitionImage = generateImage(backgroundImage.size, scale: 1.0, rotatedContext: { size, context in
|
||||
if let combinedTransitionImage = generateImage(CGSize(width: 1080, height: 1920), scale: 1.0, rotatedContext: { size, context in
|
||||
UIGraphicsPushContext(context)
|
||||
backgroundImage.draw(in: CGRect(origin: .zero, size: size))
|
||||
backgroundImage.draw(in: CGRect(origin: CGPoint(x: (size.width - backgroundImage.size.width) / 2.0, y: (size.height - backgroundImage.size.height) / 2.0), size: backgroundImage.size))
|
||||
|
||||
let ellipsePosition = pipPosition.getPosition(storyDimensions)
|
||||
let ellipseSize = CGSize(width: 439.0, height: 439.0)
|
||||
@ -3793,6 +3793,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
let firstFrameTime = CMTime(seconds: mediaEditor.values.videoTrimRange?.lowerBound ?? 0.0, preferredTimescale: CMTimeScale(60))
|
||||
|
||||
let videoResult: Result.VideoResult
|
||||
var videoIsMirrored = false
|
||||
let duration: Double
|
||||
switch subject {
|
||||
case let .image(image, _, _, _):
|
||||
@ -3804,7 +3805,8 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
duration = 5.0
|
||||
|
||||
firstFrame = .single((image, nil))
|
||||
case let .video(path, _, _, additionalPath, _, _, durationValue, _, _):
|
||||
case let .video(path, _, mirror, additionalPath, _, _, durationValue, _, _):
|
||||
videoIsMirrored = mirror
|
||||
videoResult = .videoFile(path: path)
|
||||
if let videoTrimRange = mediaEditor.values.videoTrimRange {
|
||||
duration = videoTrimRange.upperBound - videoTrimRange.lowerBound
|
||||
@ -3820,8 +3822,23 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
avAssetGenerator.appliesPreferredTrackTransform = true
|
||||
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, cgImage, _, _, _ in
|
||||
if let cgImage {
|
||||
subscriber.putNext((UIImage(cgImage: cgImage), nil))
|
||||
subscriber.putCompletion()
|
||||
if let additionalPath {
|
||||
let avAsset = AVURLAsset(url: URL(fileURLWithPath: additionalPath))
|
||||
let avAssetGenerator = AVAssetImageGenerator(asset: avAsset)
|
||||
avAssetGenerator.appliesPreferredTrackTransform = true
|
||||
avAssetGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: firstFrameTime)], completionHandler: { _, additionalCGImage, _, _, _ in
|
||||
if let additionalCGImage {
|
||||
subscriber.putNext((UIImage(cgImage: cgImage), UIImage(cgImage: additionalCGImage)))
|
||||
subscriber.putCompletion()
|
||||
} else {
|
||||
subscriber.putNext((UIImage(cgImage: cgImage), nil))
|
||||
subscriber.putCompletion()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
subscriber.putNext((UIImage(cgImage: cgImage), nil))
|
||||
subscriber.putCompletion()
|
||||
}
|
||||
}
|
||||
})
|
||||
return ActionDisposable {
|
||||
@ -3905,7 +3922,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
var currentImage = mediaEditor.resultImage
|
||||
if let image {
|
||||
mediaEditor.replaceSource(image, additionalImage: additionalImage, time: firstFrameTime)
|
||||
if let updatedImage = mediaEditor.resultImage {
|
||||
if let updatedImage = mediaEditor.getResultImage(mirror: videoIsMirrored) {
|
||||
currentImage = updatedImage
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user