Various fixes

This commit is contained in:
Ilya Laktyushin 2024-11-29 13:12:12 +04:00
parent de6f737bc1
commit 07436e4705
8 changed files with 113 additions and 109 deletions

View File

@ -379,18 +379,19 @@ private final class CameraContext {
return
}
var front = false
if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front
}
if sampleBuffer.type == kCMMediaType_Video {
Queue.mainQueue().async {
self.videoOutput?.push(sampleBuffer)
self.videoOutput?.push(sampleBuffer, mirror: front)
}
}
let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot {
var front = false
if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front
}
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastSnapshotTimestamp = timestamp
self.savedSnapshot = true
@ -1140,13 +1141,13 @@ public enum CameraRecordingError {
}
public class CameraVideoOutput {
private let sink: (CMSampleBuffer) -> Void
private let sink: (CMSampleBuffer, Bool) -> Void
public init(sink: @escaping (CMSampleBuffer) -> Void) {
public init(sink: @escaping (CMSampleBuffer, Bool) -> Void) {
self.sink = sink
}
func push(_ buffer: CMSampleBuffer) {
self.sink(buffer)
func push(_ buffer: CMSampleBuffer, mirror: Bool) {
self.sink(buffer, mirror)
}
}

View File

@ -234,6 +234,13 @@ final class CameraCollage {
self.uniqueIds.append(Int64.random(in: .min ... .max))
}
func getItem(id: Int64) -> CaptureResult? {
guard let index = self.uniqueIds.firstIndex(where: { $0 == id }) else {
return nil
}
return self.results[index]
}
private func checkResults() {
self.results = self.results.filter { $0.content != nil }
}

View File

@ -130,6 +130,7 @@ enum CameraScreenTransition {
case animateIn
case animateOut
case finishedAnimateIn
case flashModeChanged
}
private let cancelButtonTag = GenericComponentViewTag()
@ -1134,6 +1135,9 @@ private final class CameraScreenComponent: CombinedComponent {
let flashContentComponent: AnyComponentWithIdentity<Empty>
if component.hasAppeared {
let animationHint = context.transition.userData(CameraScreenTransition.self)
let shouldAnimateIcon = component.cameraState.flashModeDidChange && animationHint == .flashModeChanged
let flashIconName: String
switch component.cameraState.flashMode {
case .off:
@ -1157,7 +1161,7 @@ private final class CameraScreenComponent: CombinedComponent {
LottieAnimationComponent(
animation: LottieAnimationComponent.AnimationItem(
name: flashIconName,
mode: !component.cameraState.flashModeDidChange ? .still(position: .end) : .animating(loop: false),
mode: shouldAnimateIcon ? .animating(loop: false) : .still(position: .end),
range: nil,
waitForCompletion: false
),
@ -1318,7 +1322,7 @@ private final class CameraScreenComponent: CombinedComponent {
state?.updateCollageGrid(grid)
}
),
availableSize: CGSize(width: nextButtonX, height: 40.0),
availableSize: CGSize(width: nextButtonX + 4.0, height: 40.0),
transition: .immediate
)
context.add(collageCarousel
@ -2103,7 +2107,11 @@ public class CameraScreenImpl: ViewController, CameraScreen {
let previousState = self.cameraState
self.cameraState = self.cameraState.updatedPosition(position).updatedFlashMode(flashMode)
if !self.animatingDualCameraPositionSwitch {
self.requestUpdateLayout(transition: .easeInOut(duration: 0.2))
var transition: ComponentTransition = .easeInOut(duration: 0.2)
if previousState.flashMode != flashMode {
transition = transition.withUserData(CameraScreenTransition.flashModeChanged)
}
self.requestUpdateLayout(transition: transition)
}
if previousState.position != self.cameraState.position {
@ -2255,15 +2263,15 @@ public class CameraScreenImpl: ViewController, CameraScreen {
case .began:
break
case .changed:
if case .none = self.cameraState.recording, self.cameraState.collageProgress.isZero {
if case .none = self.cameraState.recording {
if case .compact = layout.metrics.widthClass {
switch controller.mode {
case .story:
if (translation.x < -10.0 || self.isDismissing) && self.hasAppeared {
if (translation.x < -10.0 || self.isDismissing) && self.hasAppeared && self.cameraState.collageProgress.isZero {
self.isDismissing = true
let transitionFraction = 1.0 - max(0.0, translation.x * -1.0) / self.frame.width
controller.updateTransitionProgress(transitionFraction, transition: .immediate)
} else if translation.y < -10.0 && abs(translation.y) > abs(translation.x) {
} else if translation.y < -10.0 && abs(translation.y) > abs(translation.x) && self.cameraState.collageProgress < 1.0 {
controller.presentGallery(fromGesture: true)
gestureRecognizer.isEnabled = false
gestureRecognizer.isEnabled = true
@ -2592,7 +2600,7 @@ public class CameraScreenImpl: ViewController, CameraScreen {
view.animateOutToEditor(transition: transition)
}
Queue.mainQueue().after(1.0, {
Queue.mainQueue().after(2.0, {
if self.cameraState.isCollageEnabled {
self.collage = nil
if let collageView = self.collageView {

View File

@ -247,96 +247,66 @@ final class CameraVideoLayer: MetalEngineSubjectLayer, MetalEngineSubject {
computeEncoder.endEncoding()
})
if !self.blurredLayer.isHidden {
guard let downscaledTexture = self.downscaledTexture?.get(context: context), let blurredHorizontalTexture = self.blurredHorizontalTexture?.get(context: context), let blurredVerticalTexture = self.blurredVerticalTexture?.get(context: context) else {
return
guard let downscaledTexture = self.downscaledTexture?.get(context: context), let blurredHorizontalTexture = self.blurredHorizontalTexture?.get(context: context), let blurredVerticalTexture = self.blurredVerticalTexture?.get(context: context) else {
return
}
let blurredTexture = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, downscaledTexture.placeholer, blurredHorizontalTexture.placeholer, blurredVerticalTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture, downscaledTexture, blurredHorizontalTexture, blurredVerticalTexture -> MTLTexture? in
guard let rgbaTexture, let downscaledTexture, let blurredHorizontalTexture, let blurredVerticalTexture else {
return nil
}
let blurredTexture = context.compute(state: BlurState.self, inputs: rgbaTexture.placeholer, downscaledTexture.placeholer, blurredHorizontalTexture.placeholer, blurredVerticalTexture.placeholer, commands: { commandBuffer, blurState, rgbaTexture, downscaledTexture, blurredHorizontalTexture, blurredVerticalTexture -> MTLTexture? in
guard let rgbaTexture, let downscaledTexture, let blurredHorizontalTexture, let blurredVerticalTexture else {
blurState.downscaleKernel.encode(commandBuffer: commandBuffer, sourceTexture: rgbaTexture, destinationTexture: downscaledTexture)
do {
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
return nil
}
blurState.downscaleKernel.encode(commandBuffer: commandBuffer, sourceTexture: rgbaTexture, destinationTexture: downscaledTexture)
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
let threadgroupCount = MTLSize(width: (downscaledTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (downscaledTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
do {
guard let computeEncoder = commandBuffer.makeComputeCommandEncoder() else {
return nil
}
let threadgroupSize = MTLSize(width: 16, height: 16, depth: 1)
let threadgroupCount = MTLSize(width: (downscaledTexture.width + threadgroupSize.width - 1) / threadgroupSize.width, height: (downscaledTexture.height + threadgroupSize.height - 1) / threadgroupSize.height, depth: 1)
computeEncoder.setComputePipelineState(blurState.computePipelineStateHorizontal)
computeEncoder.setTexture(downscaledTexture, index: 0)
computeEncoder.setTexture(blurredHorizontalTexture, index: 1)
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
computeEncoder.setComputePipelineState(blurState.computePipelineStateVertical)
computeEncoder.setTexture(blurredHorizontalTexture, index: 0)
computeEncoder.setTexture(blurredVerticalTexture, index: 1)
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
computeEncoder.endEncoding()
}
computeEncoder.setComputePipelineState(blurState.computePipelineStateHorizontal)
computeEncoder.setTexture(downscaledTexture, index: 0)
computeEncoder.setTexture(blurredHorizontalTexture, index: 1)
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
return blurredVerticalTexture
})
computeEncoder.setComputePipelineState(blurState.computePipelineStateVertical)
computeEncoder.setTexture(blurredHorizontalTexture, index: 0)
computeEncoder.setTexture(blurredVerticalTexture, index: 1)
computeEncoder.dispatchThreadgroups(threadgroupCount, threadsPerThreadgroup: threadgroupSize)
computeEncoder.endEncoding()
}
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self.blurredLayer, inputs: blurredTexture, commands: { encoder, placement, blurredTexture in
guard let blurredTexture else {
return
}
let effectiveRect = placement.effectiveRect
var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
var mirror = SIMD2<UInt32>(
videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
)
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
encoder.setFragmentTexture(blurredTexture, index: 0)
var brightness: Float = 0.85
var saturation: Float = 1.3
var overlay: SIMD4<Float> = SIMD4<Float>()
encoder.setFragmentBytes(&brightness, length: 4, index: 0)
encoder.setFragmentBytes(&saturation, length: 4, index: 1)
encoder.setFragmentBytes(&overlay, length: 4 * 4, index: 2)
encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
})
}
// context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self, inputs: rgbaTexture.placeholer, commands: { encoder, placement, rgbaTexture in
// guard let rgbaTexture else {
// return
// }
//
// let effectiveRect = placement.effectiveRect
//
// var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
// encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
//
// var mirror = SIMD2<UInt32>(
// videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
// videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
// )
// encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
//
// encoder.setFragmentTexture(rgbaTexture, index: 0)
//
// var brightness: Float = 1.0
// var saturation: Float = 1.0
// var overlay: SIMD4<Float> = SIMD4<Float>()
// encoder.setFragmentBytes(&brightness, length: 4, index: 0)
// encoder.setFragmentBytes(&saturation, length: 4, index: 1)
// encoder.setFragmentBytes(&overlay, length: 4 * 4, index: 2)
//
// encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
// })
return blurredVerticalTexture
})
context.renderToLayer(spec: renderSpec, state: RenderState.self, layer: self.blurredLayer, inputs: blurredTexture, commands: { encoder, placement, blurredTexture in
guard let blurredTexture else {
return
}
let effectiveRect = placement.effectiveRect
var rect = SIMD4<Float>(Float(effectiveRect.minX), Float(effectiveRect.minY), Float(effectiveRect.width), Float(effectiveRect.height))
encoder.setVertexBytes(&rect, length: 4 * 4, index: 0)
var mirror = SIMD2<UInt32>(
videoTextures.mirrorDirection.contains(.horizontal) ? 1 : 0,
videoTextures.mirrorDirection.contains(.vertical) ? 1 : 0
)
encoder.setVertexBytes(&mirror, length: 2 * 4, index: 1)
encoder.setFragmentTexture(blurredTexture, index: 0)
var brightness: Float = 0.85
var saturation: Float = 1.3
var overlay: SIMD4<Float> = SIMD4<Float>()
encoder.setFragmentBytes(&brightness, length: 4, index: 0)
encoder.setFragmentBytes(&saturation, length: 4, index: 1)
encoder.setFragmentBytes(&overlay, length: 4 * 4, index: 2)
encoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
})
}
}

View File

@ -21,8 +21,8 @@ final class CameraVideoSource: VideoSource {
public init?() {
self.device = MetalEngine.shared.device
self.cameraVideoOutput = CameraVideoOutput(sink: { [weak self] buffer in
self?.push(buffer)
self.cameraVideoOutput = CameraVideoOutput(sink: { [weak self] buffer, mirror in
self?.push(buffer, mirror: mirror)
})
CVMetalTextureCacheCreate(nil, nil, self.device, nil, &self.textureCache)
@ -41,7 +41,7 @@ final class CameraVideoSource: VideoSource {
}
}
private func push(_ sampleBuffer: CMSampleBuffer) {
private func push(_ sampleBuffer: CMSampleBuffer, mirror: Bool) {
guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
@ -71,7 +71,7 @@ final class CameraVideoSource: VideoSource {
uv: uvTexture
)),
dataBuffer: Output.NativeDataBuffer(pixelBuffer: buffer),
mirrorDirection: [],
mirrorDirection: mirror ? [.vertical] : [],
sourceId: self.sourceId
)

View File

@ -213,7 +213,7 @@ final class CollageIconCarouselComponent: Component {
self.state = state
let inset: CGFloat = 27.0
let spacing: CGFloat = 8.0
let spacing: CGFloat = availableSize.width > 290.0 ? 7.0 : 8.0
var contentWidth: CGFloat = inset
let buttonSize = CGSize(width: 40.0, height: 40.0)
@ -275,7 +275,7 @@ final class CollageIconCarouselComponent: Component {
self.clippingView.frame = CGRect(origin: .zero, size: availableSize)
if self.clippingView.mask == nil {
if let maskImage = generateGradientImage(size: CGSize(width: 42.0, height: 10.0), colors: [UIColor.clear, UIColor.black, UIColor.black, UIColor.clear], locations: [0.0, 0.3, 0.7, 1.0], direction: .horizontal) {
if let maskImage = generateGradientImage(size: CGSize(width: 42.0, height: 10.0), colors: [UIColor.clear, UIColor.black, UIColor.black, UIColor.clear], locations: [0.0, 0.2, 0.8, 1.0], direction: .horizontal) {
let maskView = UIImageView(image: maskImage.stretchableImage(withLeftCapWidth: 13, topCapHeight: 0))
self.clippingView.mask = maskView
}

View File

@ -1149,6 +1149,11 @@ public final class MediaEditor {
public func setVideoIsMuted(_ videoIsMuted: Bool) {
self.player?.isMuted = videoIsMuted
if !self.values.collage.isEmpty {
for player in self.additionalPlayers {
player.isMuted = videoIsMuted
}
}
self.updateValues(mode: .skipRendering) { values in
return values.withUpdatedVideoIsMuted(videoIsMuted)
}
@ -1857,9 +1862,11 @@ public final class MediaEditor {
public func collageItemIndexForTrackId(_ trackId: Int32) -> Int? {
var collageIndex = -1
var trackIndex = 0
var trackIndex = -1
for item in self.values.collage {
if case .videoFile = item.content {
if case .main = item.content {
trackIndex += 1
} else if case .videoFile = item.content {
trackIndex += 1
} else if case .asset(_, true) = item.content {
trackIndex += 1

View File

@ -3208,8 +3208,18 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
if self.controller?.isEmbeddedEditor == true {
} else {
self.previewContainerView.alpha = 1.0
if CACurrentMediaTime() - self.initializationTimestamp > 0.2, case .image = subject {
if case .videoCollage = subject {
Queue.mainQueue().after(0.7) {
self.previewContainerView.alpha = 1.0
self.previewContainerView.layer.allowsGroupOpacity = true
self.previewContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25, completion: { _ in
self.previewContainerView.layer.allowsGroupOpacity = false
self.previewContainerView.alpha = 1.0
self.backgroundDimView.isHidden = false
})
}
} else if CACurrentMediaTime() - self.initializationTimestamp > 0.2, case .image = subject {
self.previewContainerView.alpha = 1.0
self.previewContainerView.layer.allowsGroupOpacity = true
self.previewContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.25, completion: { _ in
self.previewContainerView.layer.allowsGroupOpacity = false
@ -3217,6 +3227,7 @@ public final class MediaEditorScreenImpl: ViewController, MediaEditorScreen, UID
self.backgroundDimView.isHidden = false
})
} else {
self.previewContainerView.alpha = 1.0
self.backgroundDimView.isHidden = false
}
}