mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Various fixes
This commit is contained in:
parent
a1df141c67
commit
6fc47bed25
@ -57,7 +57,7 @@ final class CameraDeviceContext {
|
||||
self.output = CameraOutput(exclusive: exclusive)
|
||||
}
|
||||
|
||||
func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false) {
|
||||
func configure(position: Camera.Position, previewView: CameraSimplePreviewView?, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false) {
|
||||
guard let session = self.session else {
|
||||
return
|
||||
}
|
||||
@ -65,7 +65,7 @@ final class CameraDeviceContext {
|
||||
self.previewView = previewView
|
||||
|
||||
self.device.configure(for: session, position: position, dual: !exclusive || additional)
|
||||
self.device.configureDeviceFormat(maxDimensions: self.maxDimensions(additional: self.additional, preferWide: preferWide), maxFramerate: self.preferredMaxFrameRate)
|
||||
self.device.configureDeviceFormat(maxDimensions: self.maxDimensions(additional: self.additional, preferWide: preferWide), maxFramerate: self.preferredMaxFrameRate(useLower: preferLowerFramerate))
|
||||
self.input.configure(for: session, device: self.device, audio: audio)
|
||||
self.output.configure(for: session, device: self.device, input: self.input, previewView: previewView, audio: audio, photo: photo, metadata: metadata)
|
||||
|
||||
@ -90,10 +90,13 @@ final class CameraDeviceContext {
|
||||
}
|
||||
}
|
||||
|
||||
private var preferredMaxFrameRate: Double {
|
||||
private func preferredMaxFrameRate(useLower: Bool) -> Double {
|
||||
if !self.exclusive {
|
||||
return 30.0
|
||||
}
|
||||
if useLower {
|
||||
return 30.0
|
||||
}
|
||||
switch DeviceModel.current {
|
||||
case .iPhone15ProMax, .iPhone14ProMax, .iPhone13ProMax:
|
||||
return 60.0
|
||||
@ -257,7 +260,7 @@ private final class CameraContext {
|
||||
self._positionPromise.set(targetPosition)
|
||||
self.modeChange = .position
|
||||
|
||||
mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide)
|
||||
mainDeviceContext.configure(position: targetPosition, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide, preferLowerFramerate: self.initialConfiguration.preferLowerFramerate)
|
||||
|
||||
self.queue.after(0.5) {
|
||||
self.modeChange = .none
|
||||
@ -341,7 +344,7 @@ private final class CameraContext {
|
||||
self.additionalDeviceContext = nil
|
||||
|
||||
self.mainDeviceContext = CameraDeviceContext(session: self.session, exclusive: true, additional: false)
|
||||
self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide)
|
||||
self.mainDeviceContext?.configure(position: self.positionValue, previewView: self.simplePreviewView, audio: self.initialConfiguration.audio, photo: self.initialConfiguration.photo, metadata: self.initialConfiguration.metadata, preferWide: self.initialConfiguration.preferWide, preferLowerFramerate: self.initialConfiguration.preferLowerFramerate)
|
||||
}
|
||||
self.mainDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
|
||||
guard let self, let mainDeviceContext = self.mainDeviceContext else {
|
||||
@ -613,19 +616,19 @@ public final class Camera {
|
||||
let audio: Bool
|
||||
let photo: Bool
|
||||
let metadata: Bool
|
||||
let preferredFps: Double
|
||||
let preferWide: Bool
|
||||
let preferLowerFramerate: Bool
|
||||
let reportAudioLevel: Bool
|
||||
|
||||
public init(preset: Preset, position: Position, isDualEnabled: Bool = false, audio: Bool, photo: Bool, metadata: Bool, preferredFps: Double, preferWide: Bool = false, reportAudioLevel: Bool = false) {
|
||||
public init(preset: Preset, position: Position, isDualEnabled: Bool = false, audio: Bool, photo: Bool, metadata: Bool, preferWide: Bool = false, preferLowerFramerate: Bool = false, reportAudioLevel: Bool = false) {
|
||||
self.preset = preset
|
||||
self.position = position
|
||||
self.isDualEnabled = isDualEnabled
|
||||
self.audio = audio
|
||||
self.photo = photo
|
||||
self.metadata = metadata
|
||||
self.preferredFps = preferredFps
|
||||
self.preferWide = preferWide
|
||||
self.preferLowerFramerate = preferLowerFramerate
|
||||
self.reportAudioLevel = reportAudioLevel
|
||||
}
|
||||
}
|
||||
@ -637,7 +640,7 @@ public final class Camera {
|
||||
|
||||
public let metrics: Camera.Metrics
|
||||
|
||||
public init(configuration: Camera.Configuration = Configuration(preset: .hd1920x1080, position: .back, audio: true, photo: false, metadata: false, preferredFps: 60.0), previewView: CameraSimplePreviewView? = nil, secondaryPreviewView: CameraSimplePreviewView? = nil) {
|
||||
public init(configuration: Camera.Configuration = Configuration(preset: .hd1920x1080, position: .back, audio: true, photo: false, metadata: false), previewView: CameraSimplePreviewView? = nil, secondaryPreviewView: CameraSimplePreviewView? = nil) {
|
||||
Logger.shared.log("Camera", "Init")
|
||||
|
||||
self.metrics = Camera.Metrics(model: DeviceModel.current)
|
||||
|
@ -41,6 +41,14 @@ public final class EntityVideoRecorder {
|
||||
|
||||
self.entity = DrawingStickerEntity(content: .dualVideoReference(true))
|
||||
|
||||
var preferLowerFramerate = false
|
||||
if let mainFramerate = mediaEditor.mainFramerate {
|
||||
let frameRate = Int(round(mainFramerate / 30.0) * 30.0)
|
||||
if frameRate == 30 {
|
||||
preferLowerFramerate = true
|
||||
}
|
||||
}
|
||||
|
||||
self.camera = Camera(
|
||||
configuration: Camera.Configuration(
|
||||
preset: .hd1920x1080,
|
||||
@ -49,8 +57,8 @@ public final class EntityVideoRecorder {
|
||||
audio: true,
|
||||
photo: false,
|
||||
metadata: false,
|
||||
preferredFps: 60.0,
|
||||
preferWide: true,
|
||||
preferLowerFramerate: preferLowerFramerate,
|
||||
reportAudioLevel: true
|
||||
),
|
||||
previewView: self.previewView,
|
||||
|
@ -513,7 +513,7 @@ private final class QrCodeScanScreenNode: ViewControllerTracingNode, UIScrollVie
|
||||
self.errorTextNode.textAlignment = .center
|
||||
self.errorTextNode.isHidden = true
|
||||
|
||||
self.camera = Camera(configuration: .init(preset: .hd1920x1080, position: .back, audio: false, photo: true, metadata: true, preferredFps: 60), previewView: self.previewView)
|
||||
self.camera = Camera(configuration: .init(preset: .hd1920x1080, position: .back, audio: false, photo: true, metadata: true), previewView: self.previewView)
|
||||
|
||||
super.init()
|
||||
|
||||
|
@ -1745,8 +1745,7 @@ public class CameraScreen: ViewController {
|
||||
isDualEnabled: self.cameraState.isDualCameraEnabled,
|
||||
audio: true,
|
||||
photo: true,
|
||||
metadata: false,
|
||||
preferredFps: 60.0
|
||||
metadata: false
|
||||
),
|
||||
previewView: self.mainPreviewView,
|
||||
secondaryPreviewView: self.additionalPreviewView
|
||||
|
@ -1149,6 +1149,17 @@ public final class MediaEditor {
|
||||
self.setRate(0.0)
|
||||
}
|
||||
|
||||
public var mainFramerate: Float? {
|
||||
if let player = self.player, let asset = player.currentItem?.asset, let track = asset.tracks(withMediaType: .video).first {
|
||||
if track.nominalFrameRate > 0.0 {
|
||||
return track.nominalFrameRate
|
||||
} else if track.minFrameDuration.seconds > 0.0 {
|
||||
return Float(1.0 / track.minFrameDuration.seconds)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
private func setRate(_ rate: Float) {
|
||||
let hostTime: UInt64 = mach_absolute_time()
|
||||
let time: TimeInterval = 0
|
||||
|
@ -332,9 +332,15 @@ public final class MediaEditorVideoExport {
|
||||
private var reader: AVAssetReader?
|
||||
private var videoOutput: AVAssetReaderOutput?
|
||||
private var textureRotation: TextureRotation = .rotate0Degrees
|
||||
private var frameRate: Float?
|
||||
|
||||
private var additionalVideoOutput: AVAssetReaderOutput?
|
||||
private var additionalTextureRotation: TextureRotation = .rotate0Degrees
|
||||
private var additionalFrameRate: Float?
|
||||
private var additionalVideoDuration: Double?
|
||||
|
||||
private var mainComposeFramerate: Float?
|
||||
|
||||
private var audioOutput: AVAssetReaderOutput?
|
||||
|
||||
private var writer: MediaEditorVideoExportWriter?
|
||||
@ -368,7 +374,6 @@ public final class MediaEditorVideoExport {
|
||||
if FileManager.default.fileExists(atPath: outputPath) {
|
||||
try? FileManager.default.removeItem(atPath: outputPath)
|
||||
}
|
||||
|
||||
self.setup()
|
||||
|
||||
let _ = NotificationCenter.default.addObserver(forName: UIApplication.willEnterForegroundNotification, object: nil, queue: nil, using: { [weak self] _ in
|
||||
@ -644,19 +649,23 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
}
|
||||
|
||||
var sourceFrameRate: Float = 0.0
|
||||
let effectiveVideoTrack = mainVideoTrack ?? additionalVideoTrack
|
||||
if let effectiveVideoTrack {
|
||||
if effectiveVideoTrack.nominalFrameRate > 0.0 {
|
||||
sourceFrameRate = effectiveVideoTrack.nominalFrameRate
|
||||
} else if effectiveVideoTrack.minFrameDuration.seconds > 0.0 {
|
||||
sourceFrameRate = Float(1.0 / effectiveVideoTrack.minFrameDuration.seconds)
|
||||
} else {
|
||||
sourceFrameRate = 30.0
|
||||
func frameRate(for track: AVCompositionTrack) -> Float {
|
||||
if track.nominalFrameRate > 0.0 {
|
||||
return track.nominalFrameRate
|
||||
} else if track.minFrameDuration.seconds > 0.0 {
|
||||
return Float(1.0 / track.minFrameDuration.seconds)
|
||||
}
|
||||
} else {
|
||||
sourceFrameRate = 30.0
|
||||
return 30.0
|
||||
}
|
||||
|
||||
if let mainVideoTrack {
|
||||
self.frameRate = frameRate(for: mainVideoTrack)
|
||||
}
|
||||
if let additionalVideoTrack {
|
||||
self.additionalFrameRate = frameRate(for: additionalVideoTrack)
|
||||
}
|
||||
let sourceFrameRate: Float = (self.frameRate ?? self.additionalFrameRate) ?? 30.0
|
||||
self.mainComposeFramerate = round(sourceFrameRate / 30.0) * 30.0
|
||||
writer.setupVideoInput(configuration: self.configuration, preferredTransform: nil, sourceFrameRate: sourceFrameRate)
|
||||
|
||||
if let reader {
|
||||
@ -678,252 +687,7 @@ public final class MediaEditorVideoExport {
|
||||
}
|
||||
}
|
||||
|
||||
// private func setupWithAsset(_ asset: AVAsset, additionalAsset: AVAsset?, isStory: Bool) {
|
||||
// var inputAsset = asset
|
||||
//
|
||||
// var inputAudioMix: AVMutableAudioMix?
|
||||
// if let audioData = self.configuration.values.audioTrack {
|
||||
// let mixComposition = AVMutableComposition()
|
||||
// let audioPath = fullDraftPath(peerId: self.configuration.values.peerId, path: audioData.path)
|
||||
// let audioAsset = AVURLAsset(url: URL(fileURLWithPath: audioPath))
|
||||
//
|
||||
// guard
|
||||
// let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid),
|
||||
// let musicTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid),
|
||||
// let videoAssetTrack = asset.tracks(withMediaType: .video).first,
|
||||
// let musicAssetTrack = audioAsset.tracks(withMediaType: .audio).first,
|
||||
// let duration = self.durationValue
|
||||
// else {
|
||||
// print("error")
|
||||
// return
|
||||
// }
|
||||
// videoTrack.preferredTransform = videoAssetTrack.preferredTransform
|
||||
//
|
||||
// let timeRange: CMTimeRange = CMTimeRangeMake(start: .zero, duration: duration)
|
||||
// try? videoTrack.insertTimeRange(timeRange, of: videoAssetTrack, at: .zero)
|
||||
//
|
||||
// if let audioAssetTrack = asset.tracks(withMediaType: .audio).first, let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid), !self.configuration.values.videoIsMuted {
|
||||
// try? audioTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: .zero)
|
||||
// }
|
||||
//
|
||||
// var musicRange = timeRange
|
||||
// let musicStartTime = self.configuration.audioStartTime
|
||||
// if let audioTrackRange = self.configuration.audioTimeRange {
|
||||
// musicRange = audioTrackRange
|
||||
// }
|
||||
// if musicStartTime + musicRange.duration > duration {
|
||||
// musicRange = CMTimeRange(start: musicRange.start, end: duration - musicStartTime)
|
||||
// }
|
||||
// try? musicTrack.insertTimeRange(musicRange, of: musicAssetTrack, at: musicStartTime)
|
||||
//
|
||||
// if let volume = self.configuration.values.audioTrackVolume, volume < 1.0 {
|
||||
// let audioMix = AVMutableAudioMix()
|
||||
// var audioMixParam: [AVMutableAudioMixInputParameters] = []
|
||||
// let param: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: musicTrack)
|
||||
// param.trackID = musicTrack.trackID
|
||||
// param.setVolume(Float(volume), at: CMTime.zero)
|
||||
// audioMixParam.append(param)
|
||||
// audioMix.inputParameters = audioMixParam
|
||||
// inputAudioMix = audioMix
|
||||
// }
|
||||
//
|
||||
// inputAsset = mixComposition
|
||||
// }
|
||||
//
|
||||
// self.reader = try? AVAssetReader(asset: inputAsset)
|
||||
//
|
||||
// var mirror = false
|
||||
// if additionalAsset == nil, self.configuration.values.videoIsMirrored {
|
||||
// mirror = true
|
||||
// }
|
||||
//
|
||||
// self.textureRotation = textureRotatonForAVAsset(asset, mirror: mirror)
|
||||
//
|
||||
// if let additionalAsset {
|
||||
// self.additionalReader = try? AVAssetReader(asset: additionalAsset)
|
||||
// self.additionalTextureRotation = textureRotatonForAVAsset(additionalAsset, mirror: true)
|
||||
// }
|
||||
// guard let reader = self.reader else {
|
||||
// return
|
||||
// }
|
||||
// if let timeRange = self.configuration.timeRange {
|
||||
// reader.timeRange = timeRange
|
||||
// if let additionalTimeRange = self.configuration.additionalVideoTimeRange {
|
||||
// self.additionalReader?.timeRange = additionalTimeRange
|
||||
// } else {
|
||||
// self.additionalReader?.timeRange = timeRange
|
||||
// }
|
||||
// } else if asset.duration.seconds > 60.0 && isStory {
|
||||
// let trimmedRange = CMTimeRange(start: CMTime(seconds: 0.0, preferredTimescale: CMTimeScale(NSEC_PER_SEC)), end: CMTime(seconds: 60.0, preferredTimescale: CMTimeScale(NSEC_PER_SEC)))
|
||||
// reader.timeRange = trimmedRange
|
||||
// if let additionalTimeRange = self.configuration.additionalVideoTimeRange {
|
||||
// self.additionalReader?.timeRange = additionalTimeRange
|
||||
// } else {
|
||||
// self.additionalReader?.timeRange = trimmedRange
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// self.writer = MediaEditorVideoAVAssetWriter()
|
||||
// guard let writer = self.writer else {
|
||||
// return
|
||||
// }
|
||||
// writer.setup(configuration: self.configuration, outputPath: self.outputPath)
|
||||
//
|
||||
// let videoTracks = inputAsset.tracks(withMediaType: .video)
|
||||
// let additionalVideoTracks = additionalAsset?.tracks(withMediaType: .video)
|
||||
// if videoTracks.count > 0 {
|
||||
// var sourceFrameRate: Float = 0.0
|
||||
// let colorProperties: [String: Any] = [
|
||||
// AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_709_2,
|
||||
// AVVideoTransferFunctionKey: AVVideoTransferFunction_ITU_R_709_2,
|
||||
// AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_709_2
|
||||
// ]
|
||||
//
|
||||
// let outputSettings: [String: Any] = [
|
||||
// kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
|
||||
// kCVPixelBufferMetalCompatibilityKey as String: true,
|
||||
// AVVideoColorPropertiesKey: colorProperties
|
||||
// ]
|
||||
//
|
||||
// let originalDimensions = self.configuration.values.originalDimensions
|
||||
// var isNotFullscreen = false
|
||||
// var hasNonIdentityTransform = false
|
||||
// if case .video(_, true) = self.subject {
|
||||
// if originalDimensions.width > 0 && abs((Double(originalDimensions.height) / Double(originalDimensions.width)) - 1.7777778) > 0.001 {
|
||||
// isNotFullscreen = true
|
||||
// }
|
||||
// if let videoTrack = videoTracks.first {
|
||||
// hasNonIdentityTransform = !videoTrack.preferredTransform.isIdentity
|
||||
// }
|
||||
// }
|
||||
// var preferredTransform: CGAffineTransform?
|
||||
// if let videoTrack = videoTracks.first, !self.configuration.values.requiresComposing && !isNotFullscreen && !hasNonIdentityTransform {
|
||||
// preferredTransform = videoTrack.preferredTransform
|
||||
// } else {
|
||||
// self.setupComposer()
|
||||
// }
|
||||
// let videoOutput = AVAssetReaderTrackOutput(track: videoTracks.first!, outputSettings: outputSettings)
|
||||
// videoOutput.alwaysCopiesSampleData = true
|
||||
// if reader.canAdd(videoOutput) {
|
||||
// reader.add(videoOutput)
|
||||
// } else {
|
||||
// self.internalStatus = .finished
|
||||
// self.statusValue = .failed(.addVideoOutput)
|
||||
// }
|
||||
// self.videoOutput = videoOutput
|
||||
//
|
||||
// if let additionalReader = self.additionalReader, let additionalVideoTrack = additionalVideoTracks?.first {
|
||||
// let additionalVideoOutput = AVAssetReaderTrackOutput(track: additionalVideoTrack, outputSettings: outputSettings)
|
||||
// additionalVideoOutput.alwaysCopiesSampleData = true
|
||||
// if additionalReader.canAdd(additionalVideoOutput) {
|
||||
// additionalReader.add(additionalVideoOutput)
|
||||
// }
|
||||
// self.additionalVideoOutput = additionalVideoOutput
|
||||
// }
|
||||
//
|
||||
// if let videoTrack = videoTracks.first {
|
||||
// if videoTrack.nominalFrameRate > 0.0 {
|
||||
// sourceFrameRate = videoTrack.nominalFrameRate
|
||||
// } else if videoTrack.minFrameDuration.seconds > 0.0 {
|
||||
// sourceFrameRate = Float(1.0 / videoTrack.minFrameDuration.seconds)
|
||||
// } else {
|
||||
// sourceFrameRate = 30.0
|
||||
// }
|
||||
// } else {
|
||||
// sourceFrameRate = 30.0
|
||||
// }
|
||||
// writer.setupVideoInput(configuration: self.configuration, preferredTransform: preferredTransform, sourceFrameRate: sourceFrameRate)
|
||||
// } else {
|
||||
// self.videoOutput = nil
|
||||
// }
|
||||
//
|
||||
// let audioTracks = inputAsset.tracks(withMediaType: .audio)
|
||||
// if audioTracks.count > 0, !self.configuration.values.videoIsMuted || self.configuration.values.audioTrack != nil {
|
||||
// let audioOutput = AVAssetReaderAudioMixOutput(audioTracks: audioTracks, audioSettings: nil)
|
||||
// audioOutput.audioMix = inputAudioMix
|
||||
// audioOutput.alwaysCopiesSampleData = false
|
||||
// if reader.canAdd(audioOutput) {
|
||||
// reader.add(audioOutput)
|
||||
// } else {
|
||||
// self.internalStatus = .finished
|
||||
// self.statusValue = .failed(.addAudioOutput)
|
||||
// }
|
||||
// self.audioOutput = audioOutput
|
||||
//
|
||||
// writer.setupAudioInput(configuration: self.configuration)
|
||||
// } else {
|
||||
// self.audioOutput = nil
|
||||
// }
|
||||
//
|
||||
// if videoTracks.count == 0 && audioTracks.count == 0 {
|
||||
// self.internalStatus = .finished
|
||||
// self.statusValue = .failed(.noTracksFound)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// private func setupWithImage(_ image: UIImage) {
|
||||
// Logger.shared.log("VideoExport", "Setup with image")
|
||||
//
|
||||
// self.setupComposer()
|
||||
//
|
||||
// var inputAudioMix: AVMutableAudioMix?
|
||||
//
|
||||
// self.writer = MediaEditorVideoAVAssetWriter()
|
||||
// guard let writer = self.writer else {
|
||||
// return
|
||||
// }
|
||||
// writer.setup(configuration: self.configuration, outputPath: self.outputPath)
|
||||
// writer.setupVideoInput(configuration: self.configuration, preferredTransform: nil, sourceFrameRate: 30.0)
|
||||
//
|
||||
// if let audioData = self.configuration.values.audioTrack {
|
||||
// let mixComposition = AVMutableComposition()
|
||||
// let audioPath = fullDraftPath(peerId: self.configuration.values.peerId, path: audioData.path)
|
||||
// let audioAsset = AVURLAsset(url: URL(fileURLWithPath: audioPath))
|
||||
//
|
||||
// if let musicAssetTrack = audioAsset.tracks(withMediaType: .audio).first,
|
||||
// let musicTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
|
||||
// do {
|
||||
// let reader = try AVAssetReader(asset: mixComposition)
|
||||
//
|
||||
// var musicRange = CMTimeRange(start: .zero, duration: CMTime(seconds: min(15.0, audioData.duration), preferredTimescale: CMTimeScale(NSEC_PER_SEC)))
|
||||
// if let audioTrackRange = self.configuration.audioTimeRange {
|
||||
// musicRange = audioTrackRange
|
||||
// }
|
||||
// try? musicTrack.insertTimeRange(musicRange, of: musicAssetTrack, at: .zero)
|
||||
//
|
||||
// if let volume = self.configuration.values.audioTrackVolume, volume < 1.0 {
|
||||
// let audioMix = AVMutableAudioMix()
|
||||
// var audioMixParam: [AVMutableAudioMixInputParameters] = []
|
||||
// let param: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: musicTrack)
|
||||
// param.trackID = musicTrack.trackID
|
||||
// param.setVolume(Float(volume), at: CMTime.zero)
|
||||
// audioMixParam.append(param)
|
||||
// audioMix.inputParameters = audioMixParam
|
||||
// inputAudioMix = audioMix
|
||||
// }
|
||||
//
|
||||
// let audioTracks = mixComposition.tracks(withMediaType: .audio)
|
||||
// let audioOutput = AVAssetReaderAudioMixOutput(audioTracks: audioTracks, audioSettings: nil)
|
||||
// audioOutput.audioMix = inputAudioMix
|
||||
// audioOutput.alwaysCopiesSampleData = false
|
||||
// if reader.canAdd(audioOutput) {
|
||||
// reader.add(audioOutput)
|
||||
//
|
||||
// self.reader = reader
|
||||
// self.audioOutput = audioOutput
|
||||
//
|
||||
// writer.setupAudioInput(configuration: self.configuration)
|
||||
// } else {
|
||||
// self.internalStatus = .finished
|
||||
// self.statusValue = .failed(.addAudioOutput)
|
||||
// }
|
||||
// } catch {
|
||||
// self.internalStatus = .finished
|
||||
// self.statusValue = .failed(.addAudioOutput)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
private var skippingAdditionalCopyUpdate = false
|
||||
|
||||
private func encodeVideo() -> Bool {
|
||||
guard let writer = self.writer else {
|
||||
@ -977,20 +741,30 @@ public final class MediaEditorVideoExport {
|
||||
if let additionalVideoOutput = self.additionalVideoOutput {
|
||||
if let mainTimestamp, mainTimestamp < self.configuration.additionalVideoStartTime {
|
||||
|
||||
} else if let sampleBuffer = additionalVideoOutput.copyNextSampleBuffer() {
|
||||
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
|
||||
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
|
||||
additionalInput = .videoBuffer(VideoPixelBuffer(
|
||||
pixelBuffer: pixelBuffer,
|
||||
rotation: self.additionalTextureRotation,
|
||||
timestamp: timestamp
|
||||
))
|
||||
|
||||
if !updatedProgress, let duration = self.durationValue {
|
||||
let startTime = self.reader?.timeRange.start.seconds ?? 0.0
|
||||
let progress = (timestamp.seconds - startTime) / duration.seconds
|
||||
self.statusValue = .progress(Float(progress))
|
||||
updatedProgress = true
|
||||
} else {
|
||||
if self.skippingAdditionalCopyUpdate {
|
||||
self.skippingAdditionalCopyUpdate = false
|
||||
} else if let sampleBuffer = additionalVideoOutput.copyNextSampleBuffer() {
|
||||
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
|
||||
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
|
||||
additionalInput = .videoBuffer(VideoPixelBuffer(
|
||||
pixelBuffer: pixelBuffer,
|
||||
rotation: self.additionalTextureRotation,
|
||||
timestamp: timestamp
|
||||
))
|
||||
|
||||
if !updatedProgress, let duration = self.durationValue {
|
||||
let startTime = self.reader?.timeRange.start.seconds ?? 0.0
|
||||
let progress = (timestamp.seconds - startTime) / duration.seconds
|
||||
self.statusValue = .progress(Float(progress))
|
||||
updatedProgress = true
|
||||
}
|
||||
}
|
||||
if let additionalFrameRate = self.additionalFrameRate, let mainComposeFramerate = self.mainComposeFramerate {
|
||||
let additionalFrameRate = round(additionalFrameRate / 30.0) * 30.0
|
||||
if Int(mainComposeFramerate) == Int(additionalFrameRate) * 2 {
|
||||
self.skippingAdditionalCopyUpdate = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1026,8 +800,6 @@ public final class MediaEditorVideoExport {
|
||||
pool: writer.pixelBufferPool,
|
||||
completion: { pixelBuffer in
|
||||
if let pixelBuffer {
|
||||
print("writing: \(timestamp)")
|
||||
|
||||
if !writer.appendPixelBuffer(pixelBuffer, at: timestamp) {
|
||||
writer.markVideoAsFinished()
|
||||
appendFailed = true
|
||||
|
@ -4161,7 +4161,7 @@ public final class MediaEditorScreen: ViewController, UIDropInteractionDelegate
|
||||
self.adminedChannels.set(.single([]) |> then(self.context.engine.peers.channelsForStories()))
|
||||
self.closeFriends.set(self.context.engine.data.get(TelegramEngine.EngineData.Item.Contacts.CloseFriends()))
|
||||
|
||||
if let _ = self.forwardSource {
|
||||
if self.forwardSource != nil || self.isEditingStory {
|
||||
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { _ in
|
||||
if #available(iOS 13.0, *) {
|
||||
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
|
||||
|
Loading…
x
Reference in New Issue
Block a user