mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2026-01-04 04:05:00 +00:00
Cherry-pick various fixes
This commit is contained in:
@@ -45,16 +45,18 @@ final class CameraDeviceContext {
|
||||
|
||||
private let exclusive: Bool
|
||||
private let additional: Bool
|
||||
private let isRoundVideo: Bool
|
||||
|
||||
let device = CameraDevice()
|
||||
let input = CameraInput()
|
||||
let output: CameraOutput
|
||||
|
||||
init(session: CameraSession, exclusive: Bool, additional: Bool, ciContext: CIContext, use32BGRA: Bool = false) {
|
||||
init(session: CameraSession, exclusive: Bool, additional: Bool, ciContext: CIContext, colorSpace: CGColorSpace, isRoundVideo: Bool = false) {
|
||||
self.session = session
|
||||
self.exclusive = exclusive
|
||||
self.additional = additional
|
||||
self.output = CameraOutput(exclusive: exclusive, ciContext: ciContext, use32BGRA: use32BGRA)
|
||||
self.isRoundVideo = isRoundVideo
|
||||
self.output = CameraOutput(exclusive: exclusive, ciContext: ciContext, colorSpace: colorSpace, use32BGRA: isRoundVideo)
|
||||
}
|
||||
|
||||
func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false, switchAudio: Bool = true) {
|
||||
@@ -63,7 +65,7 @@ final class CameraDeviceContext {
|
||||
}
|
||||
|
||||
self.previewView = previewView
|
||||
|
||||
|
||||
self.device.configure(for: session, position: position, dual: !self.exclusive || self.additional, switchAudio: switchAudio)
|
||||
self.device.configureDeviceFormat(maxDimensions: self.maxDimensions(additional: self.additional, preferWide: preferWide), maxFramerate: self.preferredMaxFrameRate(useLower: preferLowerFramerate))
|
||||
self.input.configure(for: session, device: self.device, audio: audio && switchAudio)
|
||||
@@ -83,11 +85,19 @@ final class CameraDeviceContext {
|
||||
}
|
||||
|
||||
private func maxDimensions(additional: Bool, preferWide: Bool) -> CMVideoDimensions {
|
||||
if additional || preferWide {
|
||||
return CMVideoDimensions(width: 1920, height: 1440)
|
||||
} else {
|
||||
return CMVideoDimensions(width: 1920, height: 1080)
|
||||
}
|
||||
// if self.isRoundVideo {
|
||||
// if additional {
|
||||
// return CMVideoDimensions(width: 640, height: 480)
|
||||
// } else {
|
||||
// return CMVideoDimensions(width: 1280, height: 720)
|
||||
// }
|
||||
// } else {
|
||||
if additional || preferWide {
|
||||
return CMVideoDimensions(width: 1920, height: 1440)
|
||||
} else {
|
||||
return CMVideoDimensions(width: 1920, height: 1080)
|
||||
}
|
||||
// }
|
||||
}
|
||||
|
||||
private func preferredMaxFrameRate(useLower: Bool) -> Double {
|
||||
@@ -108,14 +118,13 @@ final class CameraDeviceContext {
|
||||
|
||||
private final class CameraContext {
|
||||
private let queue: Queue
|
||||
|
||||
private let session: CameraSession
|
||||
private let ciContext: CIContext
|
||||
private let colorSpace: CGColorSpace
|
||||
|
||||
private var mainDeviceContext: CameraDeviceContext?
|
||||
private var additionalDeviceContext: CameraDeviceContext?
|
||||
|
||||
private let ciContext = CIContext()
|
||||
|
||||
private let initialConfiguration: Camera.Configuration
|
||||
private var invalidated = false
|
||||
|
||||
@@ -139,7 +148,7 @@ private final class CameraContext {
|
||||
transform = CGAffineTransformTranslate(transform, 0.0, -size.height)
|
||||
ciImage = ciImage.transformed(by: transform)
|
||||
}
|
||||
ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: 40.0).cropped(to: CGRect(origin: .zero, size: size))
|
||||
ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: 100.0).cropped(to: CGRect(origin: .zero, size: size))
|
||||
if let cgImage = self.ciContext.createCGImage(ciImage, from: ciImage.extent) {
|
||||
let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
|
||||
if front {
|
||||
@@ -156,6 +165,10 @@ private final class CameraContext {
|
||||
|
||||
self.queue = queue
|
||||
self.session = session
|
||||
|
||||
self.colorSpace = CGColorSpaceCreateDeviceRGB()
|
||||
self.ciContext = CIContext(options: [.workingColorSpace : self.colorSpace])
|
||||
|
||||
self.initialConfiguration = configuration
|
||||
self.simplePreviewView = previewView
|
||||
self.secondaryPreviewView = secondaryPreviewView
|
||||
@@ -313,10 +326,10 @@ private final class CameraContext {
|
||||
if enabled {
|
||||
self.configure {
|
||||
self.mainDeviceContext?.invalidate()
|
||||
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo)
|
||||
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: false, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo)
|
||||
self.mainDeviceContext?.configure(position: .back, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata)
|
||||
|
||||
self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo)
|
||||
self.additionalDeviceContext = CameraDeviceContext(session: self.session, exclusive: false, additional: true, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo)
|
||||
self.additionalDeviceContext?.configure(position: .front, previewView: self.secondaryPreviewView, audio: false, photo: true, metadata: false)
|
||||
}
|
||||
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
|
||||
@@ -356,7 +369,7 @@ private final class CameraContext {
|
||||
let preferWide = self.initialConfiguration.preferWide || self.initialConfiguration.isRoundVideo
|
||||
let preferLowerFramerate = self.initialConfiguration.preferLowerFramerate || self.initialConfiguration.isRoundVideo
|
||||
|
||||
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, use32BGRA: self.initialConfiguration.isRoundVideo)
|
||||
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false, ciContext: self.ciContext, colorSpace: self.colorSpace, isRoundVideo: self.initialConfiguration.isRoundVideo)
|
||||
self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: preferWide, preferLowerFramerate: preferLowerFramerate)
|
||||
}
|
||||
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
|
||||
|
||||
@@ -80,6 +80,7 @@ public struct CameraCode: Equatable {
|
||||
final class CameraOutput: NSObject {
|
||||
let exclusive: Bool
|
||||
let ciContext: CIContext
|
||||
let colorSpace: CGColorSpace
|
||||
let isVideoMessage: Bool
|
||||
|
||||
let photoOutput = AVCapturePhotoOutput()
|
||||
@@ -104,9 +105,10 @@ final class CameraOutput: NSObject {
|
||||
var processAudioBuffer: ((CMSampleBuffer) -> Void)?
|
||||
var processCodes: (([CameraCode]) -> Void)?
|
||||
|
||||
init(exclusive: Bool, ciContext: CIContext, use32BGRA: Bool = false) {
|
||||
init(exclusive: Bool, ciContext: CIContext, colorSpace: CGColorSpace, use32BGRA: Bool = false) {
|
||||
self.exclusive = exclusive
|
||||
self.ciContext = ciContext
|
||||
self.colorSpace = colorSpace
|
||||
self.isVideoMessage = use32BGRA
|
||||
|
||||
super.init()
|
||||
@@ -530,7 +532,7 @@ final class CameraOutput: NSObject {
|
||||
if let current = self.roundVideoFilter {
|
||||
filter = current
|
||||
} else {
|
||||
filter = CameraRoundVideoFilter(ciContext: self.ciContext)
|
||||
filter = CameraRoundVideoFilter(ciContext: self.ciContext, colorSpace: self.colorSpace)
|
||||
self.roundVideoFilter = filter
|
||||
}
|
||||
if !filter.isPrepared {
|
||||
|
||||
@@ -89,8 +89,9 @@ private func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: In
|
||||
pixelBuffers.removeAll()
|
||||
}
|
||||
|
||||
class CameraRoundVideoFilter {
|
||||
final class CameraRoundVideoFilter {
|
||||
private let ciContext: CIContext
|
||||
private let colorSpace: CGColorSpace
|
||||
|
||||
private var resizeFilter: CIFilter?
|
||||
private var overlayFilter: CIFilter?
|
||||
@@ -104,8 +105,9 @@ class CameraRoundVideoFilter {
|
||||
|
||||
private(set) var isPrepared = false
|
||||
|
||||
init(ciContext: CIContext) {
|
||||
init(ciContext: CIContext, colorSpace: CGColorSpace) {
|
||||
self.ciContext = ciContext
|
||||
self.colorSpace = colorSpace
|
||||
}
|
||||
|
||||
func prepare(with formatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) {
|
||||
@@ -158,7 +160,7 @@ class CameraRoundVideoFilter {
|
||||
return nil
|
||||
}
|
||||
|
||||
var sourceImage = CIImage(cvImageBuffer: pixelBuffer)
|
||||
var sourceImage = CIImage(cvImageBuffer: pixelBuffer, options: [.colorSpace: self.colorSpace])
|
||||
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right)
|
||||
let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height)
|
||||
|
||||
|
||||
@@ -112,7 +112,7 @@ private final class VideoRecorderImpl {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public func appendVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
|
||||
if let _ = self.hasError() {
|
||||
return
|
||||
@@ -129,6 +129,8 @@ private final class VideoRecorderImpl {
|
||||
}
|
||||
var failed = false
|
||||
if self.videoInput == nil {
|
||||
Logger.shared.log("VideoRecorder", "Try adding video input")
|
||||
|
||||
let videoSettings = self.configuration.videoSettings
|
||||
if self.assetWriter.canApply(outputSettings: videoSettings, forMediaType: .video) {
|
||||
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings, sourceFormatHint: formatDescription)
|
||||
@@ -137,6 +139,8 @@ private final class VideoRecorderImpl {
|
||||
if self.assetWriter.canAdd(videoInput) {
|
||||
self.assetWriter.add(videoInput)
|
||||
self.videoInput = videoInput
|
||||
|
||||
Logger.shared.log("VideoRecorder", "Successfully added video input")
|
||||
} else {
|
||||
failed = true
|
||||
}
|
||||
@@ -146,26 +150,32 @@ private final class VideoRecorderImpl {
|
||||
}
|
||||
|
||||
if failed {
|
||||
print("append video error")
|
||||
Logger.shared.log("VideoRecorder", "Failed to append video buffer")
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
if self.assetWriter.status == .unknown {
|
||||
if sampleBuffer.presentationTimestamp < self.recordingStartSampleTime {
|
||||
return
|
||||
}
|
||||
if !self.assetWriter.startWriting() {
|
||||
if let error = self.assetWriter.error {
|
||||
self.transitionToFailedStatus(error: .avError(error))
|
||||
return
|
||||
if self.videoInput != nil && (self.audioInput != nil || !self.configuration.hasAudio) {
|
||||
if !self.assetWriter.startWriting() {
|
||||
if let error = self.assetWriter.error {
|
||||
self.transitionToFailedStatus(error: .avError(error))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
self.assetWriter.startSession(atSourceTime: presentationTime)
|
||||
self.recordingStartSampleTime = presentationTime
|
||||
self.lastVideoSampleTime = presentationTime
|
||||
}
|
||||
|
||||
self.assetWriter.startSession(atSourceTime: presentationTime)
|
||||
self.recordingStartSampleTime = presentationTime
|
||||
self.lastVideoSampleTime = presentationTime
|
||||
}
|
||||
|
||||
if self.recordingStartSampleTime == .invalid || sampleBuffer.presentationTimestamp < self.recordingStartSampleTime {
|
||||
return
|
||||
}
|
||||
|
||||
if self.assetWriter.status == .writing {
|
||||
if self.recordingStopSampleTime != .invalid && sampleBuffer.presentationTimestamp > self.recordingStopSampleTime {
|
||||
self.hasAllVideoBuffers = true
|
||||
@@ -225,6 +235,8 @@ private final class VideoRecorderImpl {
|
||||
|
||||
var failed = false
|
||||
if self.audioInput == nil {
|
||||
Logger.shared.log("VideoRecorder", "Try adding audio input")
|
||||
|
||||
var audioSettings = self.configuration.audioSettings
|
||||
if let currentAudioStreamBasicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription) {
|
||||
audioSettings[AVSampleRateKey] = currentAudioStreamBasicDescription.pointee.mSampleRate
|
||||
@@ -247,6 +259,8 @@ private final class VideoRecorderImpl {
|
||||
if self.assetWriter.canAdd(audioInput) {
|
||||
self.assetWriter.add(audioInput)
|
||||
self.audioInput = audioInput
|
||||
|
||||
Logger.shared.log("VideoRecorder", "Successfully added audio input")
|
||||
} else {
|
||||
failed = true
|
||||
}
|
||||
@@ -256,11 +270,11 @@ private final class VideoRecorderImpl {
|
||||
}
|
||||
|
||||
if failed {
|
||||
print("append audio error")
|
||||
Logger.shared.log("VideoRecorder", "Failed to append audio buffer")
|
||||
return
|
||||
}
|
||||
|
||||
if self.assetWriter.status == .writing {
|
||||
if self.recordingStartSampleTime != .invalid { //self.assetWriter.status == .writing {
|
||||
if sampleBuffer.presentationTimestamp < self.recordingStartSampleTime {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -584,7 +584,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
if self.cameraState.isViewOnceEnabled != oldValue.isViewOnceEnabled {
|
||||
if self.cameraState.isViewOnceEnabled {
|
||||
let presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
|
||||
self.displayViewOnceTooltip(text: presentationData.strings.Chat_PlayVideoMessageOnceTooltip, hasIcon: false)
|
||||
self.displayViewOnceTooltip(text: presentationData.strings.Chat_PlayVideoMessageOnceTooltip, hasIcon: true)
|
||||
|
||||
let _ = ApplicationSpecificNotice.incrementVideoMessagesPlayOnceSuggestion(accountManager: self.context.sharedContext.accountManager, count: 3).startStandalone()
|
||||
} else {
|
||||
@@ -1439,9 +1439,10 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
return
|
||||
}
|
||||
|
||||
var skipAction = false
|
||||
let currentTimestamp = CACurrentMediaTime()
|
||||
if let lastActionTimestamp = self.lastActionTimestamp, currentTimestamp - lastActionTimestamp < 0.5 {
|
||||
return
|
||||
skipAction = true
|
||||
}
|
||||
|
||||
if case .none = self.cameraState.recording, self.node.results.isEmpty {
|
||||
@@ -1451,9 +1452,21 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
|
||||
if case .none = self.cameraState.recording {
|
||||
} else {
|
||||
self.isSendingImmediately = true
|
||||
self.waitingForNextResult = true
|
||||
self.node.stopRecording.invoke(Void())
|
||||
if self.cameraState.duration > 0.5 {
|
||||
if skipAction {
|
||||
return
|
||||
}
|
||||
self.isSendingImmediately = true
|
||||
self.waitingForNextResult = true
|
||||
self.node.stopRecording.invoke(Void())
|
||||
} else {
|
||||
self.completion(nil, nil, nil)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
guard !skipAction else {
|
||||
return
|
||||
}
|
||||
|
||||
self.didSend = true
|
||||
@@ -1630,7 +1643,9 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
|
||||
}
|
||||
if let self {
|
||||
self.node.setupCamera()
|
||||
Queue.mainQueue().async {
|
||||
self.node.setupCamera()
|
||||
}
|
||||
}
|
||||
}, deactivate: { _ in
|
||||
return .single(Void())
|
||||
|
||||
Reference in New Issue
Block a user