Video message recording improvements

This commit is contained in:
Ilya Laktyushin 2024-01-13 18:05:58 +04:00
parent 999ae1e827
commit bb23f962b8
5 changed files with 110 additions and 47 deletions

View File

@ -92,6 +92,7 @@ final class CameraOutput: NSObject {
private var previewConnection: AVCaptureConnection?
private var roundVideoFilter: CameraRoundVideoFilter?
private let semaphore = DispatchSemaphore(value: 1)
private let queue = DispatchQueue(label: "")
private let metadataQueue = DispatchQueue(label: "")
@ -375,7 +376,7 @@ final class CameraOutput: NSObject {
}
return Signal { subscriber in
let timer = SwiftSignalKit.Timer(timeout: 0.02, repeat: true, completion: { [weak videoRecorder] in
let timer = SwiftSignalKit.Timer(timeout: 0.033, repeat: true, completion: { [weak videoRecorder] in
let recordingData = CameraRecordingData(duration: videoRecorder?.duration ?? 0.0, filePath: outputFilePath)
subscriber.putNext(recordingData)
}, queue: Queue.mainQueue())
@ -405,18 +406,32 @@ final class CameraOutput: NSObject {
}
private weak var masterOutput: CameraOutput?
func processVideoRecording(_ sampleBuffer: CMSampleBuffer, fromAdditionalOutput: Bool) {
guard let formatDescriptor = CMSampleBufferGetFormatDescription(sampleBuffer) else {
return
}
let type = CMFormatDescriptionGetMediaType(formatDescriptor)
if let videoRecorder = self.videoRecorder, videoRecorder.isRecording {
if case .roundVideo = self.currentMode {
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, mirror: fromAdditionalOutput) {
if case .front = self.currentPosition {
if fromAdditionalOutput {
videoRecorder.appendSampleBuffer(processedSampleBuffer)
}
} else {
if !fromAdditionalOutput {
videoRecorder.appendSampleBuffer(processedSampleBuffer)
}
if case .roundVideo = self.currentMode, type == kCMMediaType_Video {
var transitionFactor: CGFloat = 0.0
let currentTimestamp = CACurrentMediaTime()
let duration: Double = 0.2
if case .front = self.currentPosition {
transitionFactor = 1.0
if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration {
transitionFactor = max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration)
}
} else {
transitionFactor = 0.0
if self.lastSwitchTimestamp > 0.0, currentTimestamp - self.lastSwitchTimestamp < duration {
transitionFactor = 1.0 - max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration)
}
}
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) {
if (transitionFactor == 1.0 && fromAdditionalOutput) || (transitionFactor == 0.0 && !fromAdditionalOutput) || (transitionFactor > 0.0 && transitionFactor < 1.0) {
videoRecorder.appendSampleBuffer(processedSampleBuffer)
}
} else {
videoRecorder.appendSampleBuffer(sampleBuffer)
@ -427,10 +442,12 @@ final class CameraOutput: NSObject {
}
}
private func processRoundVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer, mirror: Bool) -> CMSampleBuffer? {
private func processRoundVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer, additional: Bool, transitionFactor: CGFloat) -> CMSampleBuffer? {
guard let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) else {
return nil
}
self.semaphore.wait()
let mediaSubType = CMFormatDescriptionGetMediaSubType(formatDescription)
let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any]
@ -453,7 +470,8 @@ final class CameraOutput: NSObject {
if !filter.isPrepared {
filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3)
}
guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, mirror: mirror) else {
guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, transitionFactor: transitionFactor) else {
self.semaphore.signal()
return nil
}
@ -473,8 +491,10 @@ final class CameraOutput: NSObject {
)
if status == noErr, let newSampleBuffer {
self.semaphore.signal()
return newSampleBuffer
}
self.semaphore.signal()
return nil
}
@ -483,6 +503,7 @@ final class CameraOutput: NSObject {
func markPositionChange(position: Camera.Position) {
self.currentPosition = position
self.lastSwitchTimestamp = CACurrentMediaTime()
if let videoRecorder = self.videoRecorder {
videoRecorder.markPositionChange(position: position)

View File

@ -90,7 +90,9 @@ class CameraRoundVideoFilter {
private let ciContext: CIContext
private var resizeFilter: CIFilter?
private var overlayFilter: CIFilter?
private var compositeFilter: CIFilter?
private var borderFilter: CIFilter?
private var outputColorSpace: CGColorSpace?
private var outputPixelBufferPool: CVPixelBufferPool?
@ -99,8 +101,6 @@ class CameraRoundVideoFilter {
private(set) var isPrepared = false
let semaphore = DispatchSemaphore(value: 1)
init(ciContext: CIContext) {
self.ciContext = ciContext
}
@ -125,32 +125,39 @@ class CameraRoundVideoFilter {
})!
self.resizeFilter = CIFilter(name: "CILanczosScaleTransform")
self.overlayFilter = CIFilter(name: "CIColorMatrix")
self.compositeFilter = CIFilter(name: "CISourceOverCompositing")
self.compositeFilter?.setValue(CIImage(image: circleImage), forKey: kCIInputImageKey)
self.borderFilter = CIFilter(name: "CISourceOverCompositing")
self.borderFilter?.setValue(CIImage(image: circleImage), forKey: kCIInputImageKey)
self.isPrepared = true
}
func reset() {
self.resizeFilter = nil
self.overlayFilter = nil
self.compositeFilter = nil
self.borderFilter = nil
self.outputColorSpace = nil
self.outputPixelBufferPool = nil
self.outputFormatDescription = nil
self.inputFormatDescription = nil
self.isPrepared = false
self.lastMainSourceImage = nil
self.lastAdditionalSourceImage = nil
}
func render(pixelBuffer: CVPixelBuffer, mirror: Bool) -> CVPixelBuffer? {
self.semaphore.wait()
guard let resizeFilter = self.resizeFilter, let compositeFilter = self.compositeFilter, self.isPrepared else {
private var lastMainSourceImage: CIImage?
private var lastAdditionalSourceImage: CIImage?
func render(pixelBuffer: CVPixelBuffer, additional: Bool, transitionFactor: CGFloat) -> CVPixelBuffer? {
guard let resizeFilter = self.resizeFilter, let overlayFilter = self.overlayFilter, let compositeFilter = self.compositeFilter, let borderFilter = self.borderFilter, self.isPrepared else {
return nil
}
var sourceImage = CIImage(cvImageBuffer: pixelBuffer)
sourceImage = sourceImage.oriented(mirror ? .leftMirrored : .right)
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right)
let scale = 400.0 / min(sourceImage.extent.width, sourceImage.extent.height)
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
@ -161,18 +168,46 @@ class CameraRoundVideoFilter {
} else {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
}
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0))
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width))
compositeFilter.setValue(sourceImage, forKey: kCIInputBackgroundImageKey)
if additional {
self.lastAdditionalSourceImage = sourceImage
} else {
self.lastMainSourceImage = sourceImage
}
let finalImage = compositeFilter.outputImage
var effectiveSourceImage: CIImage
if transitionFactor == 0.0 {
effectiveSourceImage = !additional ? sourceImage : (self.lastMainSourceImage ?? sourceImage)
} else if transitionFactor == 1.0 {
effectiveSourceImage = additional ? sourceImage : (self.lastAdditionalSourceImage ?? sourceImage)
} else {
if let mainSourceImage = self.lastMainSourceImage, let additionalSourceImage = self.lastAdditionalSourceImage {
let overlayRgba: [CGFloat] = [0, 0, 0, transitionFactor]
let alphaVector: CIVector = CIVector(values: overlayRgba, count: 4)
overlayFilter.setValue(additionalSourceImage, forKey: kCIInputImageKey)
overlayFilter.setValue(alphaVector, forKey: "inputAVector")
compositeFilter.setValue(mainSourceImage, forKey: kCIInputBackgroundImageKey)
compositeFilter.setValue(overlayFilter.outputImage, forKey: kCIInputImageKey)
effectiveSourceImage = compositeFilter.outputImage ?? sourceImage
} else {
effectiveSourceImage = sourceImage
}
}
borderFilter.setValue(effectiveSourceImage, forKey: kCIInputBackgroundImageKey)
let finalImage = borderFilter.outputImage
guard let finalImage else {
return nil
}
if finalImage.extent.width != 400 {
print("wtf: \(finalImage)")
}
var pbuf: CVPixelBuffer?
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
guard let outputPixelBuffer = pbuf else {
@ -181,8 +216,6 @@ class CameraRoundVideoFilter {
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: CGSize(width: 400, height: 400)), colorSpace: outputColorSpace)
self.semaphore.signal()
return outputPixelBuffer
}
}

View File

@ -298,8 +298,12 @@ public func mediaContentKind(_ media: EngineMedia, message: EngineMessage? = nil
switch expiredMedia.data {
case .image:
return .expiredImage
case .file, .videoMessage, .voiceMessage:
case .file:
return .expiredVideo
case .voiceMessage:
return .expiredVoiceMessage
case .videoMessage:
return .expiredVideoMessage
}
case .image:
return .image

View File

@ -1983,6 +1983,8 @@ public class CameraScreen: ViewController {
self.requestUpdateLayout(hasAppeared: self.hasAppeared, transition: .immediate)
CATransaction.commit()
self.animatingDualCameraPositionSwitch = true
self.additionalPreviewContainerView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.2)
self.additionalPreviewContainerView.layer.animateScale(from: 0.01, to: 1.0, duration: duration, timingFunction: timingFunction)
@ -2009,7 +2011,6 @@ public class CameraScreen: ViewController {
timingFunction: timingFunction
)
self.animatingDualCameraPositionSwitch = true
self.mainPreviewContainerView.layer.animateBounds(
from: CGRect(origin: CGPoint(x: 0.0, y: floorToScreenPixels((self.mainPreviewContainerView.bounds.height - self.mainPreviewContainerView.bounds.width) / 2.0)), size: CGSize(width: self.mainPreviewContainerView.bounds.width, height: self.mainPreviewContainerView.bounds.width)),
to: self.mainPreviewContainerView.bounds,
@ -2533,11 +2534,13 @@ public class CameraScreen: ViewController {
let additionalPreviewFrame = CGRect(origin: CGPoint(x: origin.x - circleSide / 2.0, y: origin.y - circleSide / 2.0), size: CGSize(width: circleSide, height: circleSide))
transition.setPosition(view: self.additionalPreviewContainerView, position: additionalPreviewFrame.center)
transition.setBounds(view: self.additionalPreviewContainerView, bounds: CGRect(origin: .zero, size: additionalPreviewFrame.size))
self.additionalPreviewContainerView.layer.cornerRadius = additionalPreviewFrame.width / 2.0
if !self.animatingDualCameraPositionSwitch {
transition.setPosition(view: self.additionalPreviewContainerView, position: additionalPreviewFrame.center)
transition.setBounds(view: self.additionalPreviewContainerView, bounds: CGRect(origin: .zero, size: additionalPreviewFrame.size))
self.additionalPreviewContainerView.layer.cornerRadius = additionalPreviewFrame.width / 2.0
transition.setScale(view: self.additionalPreviewContainerView, scale: isDualCameraEnabled ? 1.0 : 0.1)
}
transition.setScale(view: self.additionalPreviewContainerView, scale: isDualCameraEnabled ? 1.0 : 0.1)
transition.setAlpha(view: self.additionalPreviewContainerView, alpha: isDualCameraEnabled ? 1.0 : 0.0)
if dualCamUpdated && isDualCameraEnabled {

View File

@ -484,7 +484,6 @@ public class VideoMessageCameraScreen: ViewController {
private var resultPreviewView: ResultPreviewView?
private var cameraStateDisposable: Disposable?
private var changingPositionDisposable: Disposable?
private let idleTimerExtensionDisposable = MetaDisposable()
@ -603,7 +602,6 @@ public class VideoMessageCameraScreen: ViewController {
deinit {
self.cameraStateDisposable?.dispose()
self.changingPositionDisposable?.dispose()
self.idleTimerExtensionDisposable.dispose()
}
@ -670,13 +668,6 @@ public class VideoMessageCameraScreen: ViewController {
self.requestUpdateLayout(transition: .easeInOut(duration: 0.2))
})
self.changingPositionDisposable = (camera.modeChange
|> deliverOnMainQueue).start(next: { [weak self] modeChange in
if let self {
let _ = self
}
})
camera.focus(at: CGPoint(x: 0.5, y: 0.5), autoFocus: true)
camera.startCapture()
@ -799,8 +790,15 @@ public class VideoMessageCameraScreen: ViewController {
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
let result = super.hitTest(point, with: event)
if let controller = self.controller, point.y > self.frame.height - controller.inputPanelFrame.height - 34.0 {
return nil
if let controller = self.controller, let layout = self.validLayout {
if point.y > layout.size.height - controller.inputPanelFrame.height - 34.0 {
if layout.metrics.isTablet {
if point.x < layout.size.width * 0.33 {
return result
}
}
return nil
}
}
return result
}
@ -987,8 +985,12 @@ public class VideoMessageCameraScreen: ViewController {
let availableHeight = layout.size.height - (layout.inputHeight ?? 0.0)
let previewSide = min(369.0, layout.size.width - 24.0)
let previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 16.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
let previewFrame: CGRect
if layout.metrics.isTablet {
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 16.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
} else {
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 16.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
}
if !self.animatingIn {
transition.setFrame(view: self.previewContainerView, frame: previewFrame)
}