mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Various video recording improvements
This commit is contained in:
parent
12c29616e0
commit
ee745c48a6
@ -85,7 +85,7 @@ final class CameraDeviceContext {
|
||||
}
|
||||
|
||||
private func maxDimensions(additional: Bool, preferWide: Bool) -> CMVideoDimensions {
|
||||
if self.isRoundVideo && !Camera.isDualCameraSupported {
|
||||
if self.isRoundVideo && self.exclusive {
|
||||
return CMVideoDimensions(width: 640, height: 480)
|
||||
} else {
|
||||
if additional || preferWide {
|
||||
|
@ -307,6 +307,8 @@ final class CameraOutput: NSObject {
|
||||
return .complete()
|
||||
}
|
||||
|
||||
Logger.shared.log("CameraOutput", "startRecording")
|
||||
|
||||
self.currentMode = mode
|
||||
self.lastSampleTimestamp = nil
|
||||
self.captureOrientation = orientation
|
||||
@ -451,20 +453,19 @@ final class CameraOutput: NSObject {
|
||||
transitionFactor = 1.0 - max(0.0, (currentTimestamp - self.lastSwitchTimestamp) / duration)
|
||||
}
|
||||
}
|
||||
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) {
|
||||
let presentationTime = CMSampleBufferGetPresentationTimeStamp(processedSampleBuffer)
|
||||
if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime {
|
||||
|
||||
} else {
|
||||
if (transitionFactor == 1.0 && fromAdditionalOutput)
|
||||
|| (transitionFactor == 0.0 && !fromAdditionalOutput)
|
||||
|| (transitionFactor > 0.0 && transitionFactor < 1.0) {
|
||||
|
||||
if (transitionFactor == 1.0 && fromAdditionalOutput)
|
||||
|| (transitionFactor == 0.0 && !fromAdditionalOutput)
|
||||
|| (transitionFactor > 0.0 && transitionFactor < 1.0) {
|
||||
if let processedSampleBuffer = self.processRoundVideoSampleBuffer(sampleBuffer, additional: fromAdditionalOutput, transitionFactor: transitionFactor) {
|
||||
let presentationTime = CMSampleBufferGetPresentationTimeStamp(processedSampleBuffer)
|
||||
if let lastSampleTimestamp = self.lastSampleTimestamp, lastSampleTimestamp > presentationTime {
|
||||
|
||||
} else {
|
||||
videoRecorder.appendSampleBuffer(processedSampleBuffer)
|
||||
self.lastSampleTimestamp = presentationTime
|
||||
}
|
||||
}
|
||||
} else {
|
||||
videoRecorder.appendSampleBuffer(sampleBuffer)
|
||||
}
|
||||
} else {
|
||||
var additional = self.currentPosition == .front
|
||||
@ -544,8 +545,9 @@ final class CameraOutput: NSObject {
|
||||
self.roundVideoFilter = filter
|
||||
}
|
||||
if !filter.isPrepared {
|
||||
filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3)
|
||||
filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 4)
|
||||
}
|
||||
|
||||
guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, captureOrientation: self.captureOrientation, transitionFactor: transitionFactor) else {
|
||||
self.semaphore.signal()
|
||||
return nil
|
||||
|
@ -165,14 +165,18 @@ private final class VideoRecorderImpl {
|
||||
return
|
||||
}
|
||||
if self.videoInput != nil && (self.audioInput != nil || !self.configuration.hasAudio) {
|
||||
print("startWriting")
|
||||
let start = CACurrentMediaTime()
|
||||
if !self.assetWriter.startWriting() {
|
||||
if let error = self.assetWriter.error {
|
||||
self.transitionToFailedStatus(error: .avError(error))
|
||||
}
|
||||
}
|
||||
print("started In \(CACurrentMediaTime() - start)")
|
||||
return
|
||||
}
|
||||
} else if self.assetWriter.status == .writing && !self.startedSession {
|
||||
print("Started session at \(presentationTime)")
|
||||
self.assetWriter.startSession(atSourceTime: presentationTime)
|
||||
self.recordingStartSampleTime = presentationTime
|
||||
self.lastVideoSampleTime = presentationTime
|
||||
|
@ -695,7 +695,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) {
|
||||
let previewReady: Signal<Bool, NoError>
|
||||
if #available(iOS 13.0, *) {
|
||||
previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing |> delay(0.2, queue: Queue.mainQueue())
|
||||
previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing |> delay(0.25, queue: Queue.mainQueue())
|
||||
} else {
|
||||
previewReady = .single(true) |> delay(0.35, queue: Queue.mainQueue())
|
||||
}
|
||||
@ -740,7 +740,7 @@ public class VideoMessageCameraScreen: ViewController {
|
||||
position: self.cameraState.position,
|
||||
isDualEnabled: self.cameraState.isDualCameraEnabled,
|
||||
audio: true,
|
||||
photo: true,
|
||||
photo: false,
|
||||
metadata: false,
|
||||
isRoundVideo: true
|
||||
),
|
||||
|
Loading…
x
Reference in New Issue
Block a user