Various fixes

This commit is contained in:
Ilya Laktyushin 2024-01-22 17:10:05 +04:00
parent 2bbb9fd1f3
commit 7b8e64ca8a
6 changed files with 75 additions and 19 deletions

View File

@ -131,7 +131,10 @@ private final class CameraContext {
var secondaryPreviewView: CameraSimplePreviewView? var secondaryPreviewView: CameraSimplePreviewView?
private var lastSnapshotTimestamp: Double = CACurrentMediaTime() private var lastSnapshotTimestamp: Double = CACurrentMediaTime()
private var savedSnapshot = false
private var lastAdditionalSnapshotTimestamp: Double = CACurrentMediaTime() private var lastAdditionalSnapshotTimestamp: Double = CACurrentMediaTime()
private var savedAdditionalSnapshot = false
private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer, front: Bool) { private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer, front: Bool) {
Queue.concurrentDefaultQueue().async { Queue.concurrentDefaultQueue().async {
var ciImage = CIImage(cvImageBuffer: pixelBuffer) var ciImage = CIImage(cvImageBuffer: pixelBuffer)
@ -141,7 +144,7 @@ private final class CameraContext {
transform = CGAffineTransformTranslate(transform, 0.0, -size.height) transform = CGAffineTransformTranslate(transform, 0.0, -size.height)
ciImage = ciImage.transformed(by: transform) ciImage = ciImage.transformed(by: transform)
} }
ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: 100.0).cropped(to: CGRect(origin: .zero, size: size)) ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: Camera.isDualCameraSupported ? 100.0 : 40.0).cropped(to: CGRect(origin: .zero, size: size))
if let cgImage = self.ciContext.createCGImage(ciImage, from: ciImage.extent) { if let cgImage = self.ciContext.createCGImage(ciImage, from: ciImage.extent) {
let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right) let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
if front { if front {
@ -330,13 +333,14 @@ private final class CameraContext {
return return
} }
let timestamp = CACurrentMediaTime() let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording { if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot {
var front = false var front = false
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front front = connection.inputPorts.first?.sourceDevicePosition == .front
} }
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front) self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastSnapshotTimestamp = timestamp self.lastSnapshotTimestamp = timestamp
self.savedSnapshot = true
} }
} }
self.additionalDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in self.additionalDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
@ -344,13 +348,14 @@ private final class CameraContext {
return return
} }
let timestamp = CACurrentMediaTime() let timestamp = CACurrentMediaTime()
if timestamp > self.lastAdditionalSnapshotTimestamp + 2.5, !additionalDeviceContext.output.isRecording { if timestamp > self.lastAdditionalSnapshotTimestamp + 2.5, !additionalDeviceContext.output.isRecording || !self.savedAdditionalSnapshot {
var front = false var front = false
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front front = connection.inputPorts.first?.sourceDevicePosition == .front
} }
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front) self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastAdditionalSnapshotTimestamp = timestamp self.lastAdditionalSnapshotTimestamp = timestamp
self.savedAdditionalSnapshot = true
} }
} }
} else { } else {
@ -370,13 +375,14 @@ private final class CameraContext {
return return
} }
let timestamp = CACurrentMediaTime() let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording { if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot {
var front = false var front = false
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front front = connection.inputPorts.first?.sourceDevicePosition == .front
} }
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front) self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastSnapshotTimestamp = timestamp self.lastSnapshotTimestamp = timestamp
self.savedSnapshot = true
} }
} }
if self.initialConfiguration.reportAudioLevel { if self.initialConfiguration.reportAudioLevel {
@ -557,7 +563,7 @@ private final class CameraContext {
let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait
if self.initialConfiguration.isRoundVideo { if self.initialConfiguration.isRoundVideo {
return mainDeviceContext.output.startRecording(mode: .roundVideo, orientation: .portrait, additionalOutput: self.additionalDeviceContext?.output) return mainDeviceContext.output.startRecording(mode: .roundVideo, orientation: DeviceModel.current.isIpad ? orientation : .portrait, additionalOutput: self.additionalDeviceContext?.output)
} else { } else {
if let additionalDeviceContext = self.additionalDeviceContext { if let additionalDeviceContext = self.additionalDeviceContext {
return combineLatest( return combineLatest(

View File

@ -101,6 +101,8 @@ final class CameraOutput: NSObject {
private var photoCaptureRequests: [Int64: PhotoCaptureContext] = [:] private var photoCaptureRequests: [Int64: PhotoCaptureContext] = [:]
private var videoRecorder: VideoRecorder? private var videoRecorder: VideoRecorder?
private var captureOrientation: AVCaptureVideoOrientation = .portrait
var processSampleBuffer: ((CMSampleBuffer, CVImageBuffer, AVCaptureConnection) -> Void)? var processSampleBuffer: ((CMSampleBuffer, CVImageBuffer, AVCaptureConnection) -> Void)?
var processAudioBuffer: ((CMSampleBuffer) -> Void)? var processAudioBuffer: ((CMSampleBuffer) -> Void)?
var processCodes: (([CameraCode]) -> Void)? var processCodes: (([CameraCode]) -> Void)?
@ -305,6 +307,7 @@ final class CameraOutput: NSObject {
self.currentMode = mode self.currentMode = mode
self.lastSampleTimestamp = nil self.lastSampleTimestamp = nil
self.captureOrientation = orientation
var orientation = orientation var orientation = orientation
let dimensions: CGSize let dimensions: CGSize
@ -538,7 +541,7 @@ final class CameraOutput: NSObject {
if !filter.isPrepared { if !filter.isPrepared {
filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3) filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3)
} }
guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, transitionFactor: transitionFactor) else { guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, captureOrientation: self.captureOrientation, transitionFactor: transitionFactor) else {
self.semaphore.signal() self.semaphore.signal()
return nil return nil
} }

View File

@ -1,5 +1,6 @@
import Foundation import Foundation
import UIKit import UIKit
import AVFoundation
import CoreImage import CoreImage
import CoreMedia import CoreMedia
import CoreVideo import CoreVideo
@ -157,13 +158,29 @@ final class CameraRoundVideoFilter {
private var lastMainSourceImage: CIImage? private var lastMainSourceImage: CIImage?
private var lastAdditionalSourceImage: CIImage? private var lastAdditionalSourceImage: CIImage?
func render(pixelBuffer: CVPixelBuffer, additional: Bool, transitionFactor: CGFloat) -> CVPixelBuffer? { func render(pixelBuffer: CVPixelBuffer, additional: Bool, captureOrientation: AVCaptureVideoOrientation, transitionFactor: CGFloat) -> CVPixelBuffer? {
guard let resizeFilter = self.resizeFilter, let overlayFilter = self.overlayFilter, let compositeFilter = self.compositeFilter, let borderFilter = self.borderFilter, self.isPrepared else { guard let resizeFilter = self.resizeFilter, let overlayFilter = self.overlayFilter, let compositeFilter = self.compositeFilter, let borderFilter = self.borderFilter, self.isPrepared else {
return nil return nil
} }
var sourceImage = CIImage(cvImageBuffer: pixelBuffer, options: [.colorSpace: self.colorSpace]) var sourceImage = CIImage(cvImageBuffer: pixelBuffer, options: [.colorSpace: self.colorSpace])
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right) var sourceOrientation: CGImagePropertyOrientation
var sourceIsLandscape = false
switch captureOrientation {
case .portrait:
sourceOrientation = additional ? .leftMirrored : .right
case .landscapeLeft:
sourceOrientation = additional ? .upMirrored : .down
sourceIsLandscape = true
case .landscapeRight:
sourceOrientation = additional ? .downMirrored : .up
sourceIsLandscape = true
case .portraitUpsideDown:
sourceOrientation = additional ? .rightMirrored : .left
@unknown default:
sourceOrientation = additional ? .leftMirrored : .right
}
sourceImage = sourceImage.oriented(sourceOrientation)
let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height) let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height)
if !self.simple { if !self.simple {
@ -179,8 +196,13 @@ final class CameraRoundVideoFilter {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true) sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
} }
if sourceIsLandscape {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(-(sourceImage.extent.width - sourceImage.extent.height) / 2.0, 0.0))
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.height, height: sourceImage.extent.height))
} else {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0)) sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0))
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width)) sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width))
}
if additional { if additional {
self.lastAdditionalSourceImage = sourceImage self.lastAdditionalSourceImage = sourceImage

View File

@ -38,6 +38,7 @@ swift_library(
"//submodules/DeviceAccess", "//submodules/DeviceAccess",
"//submodules/TelegramUI/Components/MediaEditor", "//submodules/TelegramUI/Components/MediaEditor",
"//submodules/LegacyMediaPickerUI", "//submodules/LegacyMediaPickerUI",
"//submodules/TelegramAudio",
], ],
visibility = [ visibility = [
"//visibility:public", "//visibility:public",

View File

@ -26,6 +26,7 @@ import MediaResources
import LocalMediaResources import LocalMediaResources
import ImageCompression import ImageCompression
import LegacyMediaPickerUI import LegacyMediaPickerUI
import TelegramAudio
struct CameraState: Equatable { struct CameraState: Equatable {
enum Recording: Equatable { enum Recording: Equatable {
@ -694,7 +695,7 @@ public class VideoMessageCameraScreen: ViewController {
func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) { func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) {
let previewReady: Signal<Bool, NoError> let previewReady: Signal<Bool, NoError>
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing |> delay(0.2, queue: Queue.mainQueue())
} else { } else {
previewReady = .single(true) |> delay(0.35, queue: Queue.mainQueue()) previewReady = .single(true) |> delay(0.35, queue: Queue.mainQueue())
} }
@ -1116,9 +1117,22 @@ public class VideoMessageCameraScreen: ViewController {
let previewSide = min(369.0, layout.size.width - 24.0) let previewSide = min(369.0, layout.size.width - 24.0)
let previewFrame: CGRect let previewFrame: CGRect
if layout.metrics.isTablet { if layout.metrics.isTablet {
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.2 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide)) let statusBarOrientation: UIInterfaceOrientation
if #available(iOS 13.0, *) {
statusBarOrientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation ?? .portrait
} else { } else {
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 16.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide)) statusBarOrientation = UIApplication.shared.statusBarOrientation
}
if statusBarOrientation == .landscapeLeft {
previewFrame = CGRect(origin: CGPoint(x: layout.size.width - 44.0 - previewSide, y: floorToScreenPixels((layout.size.height - previewSide) / 2.0)), size: CGSize(width: previewSide, height: previewSide))
} else if statusBarOrientation == .landscapeRight {
previewFrame = CGRect(origin: CGPoint(x: 44.0, y: floorToScreenPixels((layout.size.height - previewSide) / 2.0)), size: CGSize(width: previewSide, height: previewSide))
} else {
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.2 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
}
} else {
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
} }
if !self.animatingIn { if !self.animatingIn {
transition.setFrame(view: self.previewContainerView, frame: previewFrame) transition.setFrame(view: self.previewContainerView, frame: previewFrame)
@ -1321,7 +1335,7 @@ public class VideoMessageCameraScreen: ViewController {
public func takenRecordedData() -> Signal<RecordedVideoData?, NoError> { public func takenRecordedData() -> Signal<RecordedVideoData?, NoError> {
let previewState = self.node.previewStatePromise.get() let previewState = self.node.previewStatePromise.get()
let count = 12 let count = 13
let initialPlaceholder: Signal<UIImage?, NoError> let initialPlaceholder: Signal<UIImage?, NoError>
if let firstResult = self.node.results.first { if let firstResult = self.node.results.first {
@ -1657,7 +1671,14 @@ public class VideoMessageCameraScreen: ViewController {
} }
private func requestAudioSession() { private func requestAudioSession() {
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { [weak self] _ in let audioSessionType: ManagedAudioSessionType
if self.context.sharedContext.currentMediaInputSettings.with({ $0 }).pauseMusicOnRecording {
audioSessionType = .record(speaker: false, withOthers: false)
} else {
audioSessionType = .recordWithOthers
}
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: audioSessionType, activate: { [weak self] _ in
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true) try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
} }

View File

@ -337,18 +337,21 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
), ),
environment: {}, environment: {},
forceUpdate: false, forceUpdate: false,
containerSize: CGSize(width: width - leftInset - rightInset - 45.0 * 2.0, height: 33.0) containerSize: CGSize(width: min(424, width - leftInset - rightInset - 45.0 * 2.0), height: 33.0)
) )
if let view = self.scrubber.view { if let view = self.scrubber.view {
if view.superview == nil { if view.superview == nil {
self.view.addSubview(view) self.view.addSubview(view)
} }
view.bounds = CGRect(origin: .zero, size: scrubberSize)
}
}
}
}
view.frame = CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: scrubberSize) if let view = self.scrubber.view {
} view.frame = CGRect(origin: CGPoint(x: max(leftInset + 45.0, floorToScreenPixels((width - view.bounds.width) / 2.0)), y: 7.0 - UIScreenPixel), size: view.bounds.size)
}
}
} }
let panelHeight = defaultHeight(metrics: metrics) let panelHeight = defaultHeight(metrics: metrics)