Varios fixes

This commit is contained in:
Ilya Laktyushin 2024-01-17 17:43:04 +04:00
parent 3dc5f6b48a
commit 346790b650
4 changed files with 40 additions and 36 deletions

View File

@ -304,31 +304,35 @@ final class CameraOutput: NSObject {
self.currentMode = mode self.currentMode = mode
self.lastSampleTimestamp = nil self.lastSampleTimestamp = nil
let codecType: AVVideoCodecType
if case .roundVideo = mode {
codecType = .h264
} else {
if hasHEVCHardwareEncoder {
codecType = .hevc
} else {
codecType = .h264
}
}
guard var videoSettings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else {
return .complete()
}
var dimensions: CGSize = CGSize(width: 1080, height: 1920)
if orientation == .landscapeLeft || orientation == .landscapeRight {
dimensions = CGSize(width: 1920, height: 1080)
}
var orientation = orientation var orientation = orientation
let dimensions: CGSize
let videoSettings: [String: Any]
if case .roundVideo = mode { if case .roundVideo = mode {
videoSettings[AVVideoWidthKey] = 400 dimensions = videoMessageDimensions.cgSize
videoSettings[AVVideoHeightKey] = 400
dimensions = CGSize(width: 400, height: 400)
orientation = .landscapeRight orientation = .landscapeRight
let compressionProperties: [String: Any] = [
AVVideoAverageBitRateKey: 1000 * 1000,
AVVideoProfileLevelKey: AVVideoProfileLevelH264HighAutoLevel,
AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCABAC
]
videoSettings = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoCompressionPropertiesKey: compressionProperties,
AVVideoWidthKey: Int(dimensions.width),
AVVideoHeightKey: Int(dimensions.height)
]
} else {
let codecType: AVVideoCodecType = hasHEVCHardwareEncoder ? .hevc : .h264
if orientation == .landscapeLeft || orientation == .landscapeRight {
dimensions = CGSize(width: 1920, height: 1080)
} else {
dimensions = CGSize(width: 1080, height: 1920)
}
guard let settings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: codecType, assetWriterOutputFileType: .mp4) else {
return .complete()
}
videoSettings = settings
} }
let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:] let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) ?? [:]
@ -514,10 +518,10 @@ final class CameraOutput: NSObject {
let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any] let extensions = CMFormatDescriptionGetExtensions(formatDescription) as! [String: Any]
var updatedExtensions = extensions var updatedExtensions = extensions
updatedExtensions["CVBytesPerRow"] = 400 * 4 updatedExtensions["CVBytesPerRow"] = videoMessageDimensions.width * 4
var newFormatDescription: CMFormatDescription? var newFormatDescription: CMFormatDescription?
var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: 400, height: 400, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription) var status = CMVideoFormatDescriptionCreate(allocator: nil, codecType: mediaSubType, width: videoMessageDimensions.width, height: videoMessageDimensions.height, extensions: updatedExtensions as CFDictionary, formatDescriptionOut: &newFormatDescription)
guard status == noErr, let newFormatDescription else { guard status == noErr, let newFormatDescription else {
return nil return nil
} }

View File

@ -5,6 +5,9 @@ import CoreMedia
import CoreVideo import CoreVideo
import Metal import Metal
import Display import Display
import TelegramCore
let videoMessageDimensions = PixelDimensions(width: 400, height: 400)
func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) -> ( func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) -> (
outputBufferPool: CVPixelBufferPool?, outputBufferPool: CVPixelBufferPool?,
@ -114,8 +117,7 @@ class CameraRoundVideoFilter {
} }
self.inputFormatDescription = formatDescription self.inputFormatDescription = formatDescription
let diameter: CGFloat = 400.0 let circleImage = generateImage(videoMessageDimensions.cgSize, opaque: false, scale: 1.0, rotatedContext: { size, context in
let circleImage = generateImage(CGSize(width: diameter, height: diameter), opaque: false, scale: 1.0, rotatedContext: { size, context in
let bounds = CGRect(origin: .zero, size: size) let bounds = CGRect(origin: .zero, size: size)
context.clear(bounds) context.clear(bounds)
context.setFillColor(UIColor.white.cgColor) context.setFillColor(UIColor.white.cgColor)
@ -158,7 +160,7 @@ class CameraRoundVideoFilter {
var sourceImage = CIImage(cvImageBuffer: pixelBuffer) var sourceImage = CIImage(cvImageBuffer: pixelBuffer)
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right) sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right)
let scale = 400.0 / min(sourceImage.extent.width, sourceImage.extent.height) let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height)
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey) resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
resizeFilter.setValue(scale, forKey: kCIInputScaleKey) resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
@ -203,18 +205,14 @@ class CameraRoundVideoFilter {
guard let finalImage else { guard let finalImage else {
return nil return nil
} }
if finalImage.extent.width != 400 {
print("wtf: \(finalImage)")
}
var pbuf: CVPixelBuffer? var pbuf: CVPixelBuffer?
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf) CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
guard let outputPixelBuffer = pbuf else { guard let outputPixelBuffer = pbuf else {
return nil return nil
} }
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: CGSize(width: 400, height: 400)), colorSpace: outputColorSpace) self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: videoMessageDimensions.cgSize), colorSpace: outputColorSpace)
return outputPixelBuffer return outputPixelBuffer
} }

View File

@ -327,7 +327,7 @@ public class ChatMessageContactBubbleContentNode: ChatMessageBubbleContentNode {
let lineWidth: CGFloat = 3.0 let lineWidth: CGFloat = 3.0
var buttonCount = 1 var buttonCount = 1
if canMessage { if canMessage && canAdd {
buttonCount += 1 buttonCount += 1
} }
var buttonWidth = floor((boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0 - lineWidth)) var buttonWidth = floor((boundingWidth - layoutConstants.text.bubbleInsets.right * 2.0 - lineWidth))

View File

@ -1484,11 +1484,13 @@ public class VideoMessageCameraScreen: ViewController {
finalDuration = duration finalDuration = duration
} }
let dimensions = PixelDimensions(width: 400, height: 400)
var thumbnailImage = video.thumbnail var thumbnailImage = video.thumbnail
if startTime > 0.0 { if startTime > 0.0 {
let composition = composition(with: results) let composition = composition(with: results)
let imageGenerator = AVAssetImageGenerator(asset: composition) let imageGenerator = AVAssetImageGenerator(asset: composition)
imageGenerator.maximumSize = CGSize(width: 400, height: 400) imageGenerator.maximumSize = dimensions.cgSize
imageGenerator.appliesPreferredTrackTransform = true imageGenerator.appliesPreferredTrackTransform = true
if let cgImage = try? imageGenerator.copyCGImage(at: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale), actualTime: nil) { if let cgImage = try? imageGenerator.copyCGImage(at: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale), actualTime: nil) {
@ -1496,7 +1498,7 @@ public class VideoMessageCameraScreen: ViewController {
} }
} }
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: PixelDimensions(width: 400, height: 400), cropOffset: .zero, cropRect: CGRect(origin: .zero, size: CGSize(width: 400.0, height: 400.0)), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage) let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage)
var resourceAdjustments: VideoMediaResourceAdjustments? = nil var resourceAdjustments: VideoMediaResourceAdjustments? = nil
if let valuesData = try? JSONEncoder().encode(values) { if let valuesData = try? JSONEncoder().encode(values) {