mirror of
https://github.com/Swiftgram/Telegram-iOS.git
synced 2025-06-16 05:55:20 +00:00
Cherry-pick various fixes
This commit is contained in:
parent
cdcda05f95
commit
1fb7544b52
@ -85,26 +85,19 @@ final class CameraDeviceContext {
|
||||
}
|
||||
|
||||
private func maxDimensions(additional: Bool, preferWide: Bool) -> CMVideoDimensions {
|
||||
// if self.isRoundVideo {
|
||||
// if additional {
|
||||
// return CMVideoDimensions(width: 640, height: 480)
|
||||
// } else {
|
||||
// return CMVideoDimensions(width: 1280, height: 720)
|
||||
// }
|
||||
// } else {
|
||||
if self.isRoundVideo && !Camera.isDualCameraSupported {
|
||||
return CMVideoDimensions(width: 640, height: 480)
|
||||
} else {
|
||||
if additional || preferWide {
|
||||
return CMVideoDimensions(width: 1920, height: 1440)
|
||||
} else {
|
||||
return CMVideoDimensions(width: 1920, height: 1080)
|
||||
}
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
private func preferredMaxFrameRate(useLower: Bool) -> Double {
|
||||
if !self.exclusive {
|
||||
return 30.0
|
||||
}
|
||||
if useLower {
|
||||
if !self.exclusive || self.isRoundVideo || useLower {
|
||||
return 30.0
|
||||
}
|
||||
switch DeviceModel.current {
|
||||
|
@ -532,7 +532,7 @@ final class CameraOutput: NSObject {
|
||||
if let current = self.roundVideoFilter {
|
||||
filter = current
|
||||
} else {
|
||||
filter = CameraRoundVideoFilter(ciContext: self.ciContext, colorSpace: self.colorSpace)
|
||||
filter = CameraRoundVideoFilter(ciContext: self.ciContext, colorSpace: self.colorSpace, simple: self.exclusive)
|
||||
self.roundVideoFilter = filter
|
||||
}
|
||||
if !filter.isPrepared {
|
||||
|
@ -92,6 +92,7 @@ private func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: In
|
||||
final class CameraRoundVideoFilter {
|
||||
private let ciContext: CIContext
|
||||
private let colorSpace: CGColorSpace
|
||||
private let simple: Bool
|
||||
|
||||
private var resizeFilter: CIFilter?
|
||||
private var overlayFilter: CIFilter?
|
||||
@ -105,9 +106,10 @@ final class CameraRoundVideoFilter {
|
||||
|
||||
private(set) var isPrepared = false
|
||||
|
||||
init(ciContext: CIContext, colorSpace: CGColorSpace) {
|
||||
init(ciContext: CIContext, colorSpace: CGColorSpace, simple: Bool) {
|
||||
self.ciContext = ciContext
|
||||
self.colorSpace = colorSpace
|
||||
self.simple = simple
|
||||
}
|
||||
|
||||
func prepare(with formatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) {
|
||||
@ -164,14 +166,19 @@ final class CameraRoundVideoFilter {
|
||||
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right)
|
||||
let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height)
|
||||
|
||||
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
|
||||
resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
|
||||
|
||||
if let resizedImage = resizeFilter.outputImage {
|
||||
sourceImage = resizedImage
|
||||
if !self.simple {
|
||||
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
|
||||
resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
|
||||
|
||||
if let resizedImage = resizeFilter.outputImage {
|
||||
sourceImage = resizedImage
|
||||
} else {
|
||||
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
|
||||
}
|
||||
} else {
|
||||
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
|
||||
}
|
||||
|
||||
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0))
|
||||
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width))
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user