Various fixes

This commit is contained in:
Ilya Laktyushin 2025-05-25 18:57:37 +02:00
parent ce3af41ebe
commit 7b5883c115
8 changed files with 247 additions and 39 deletions

View File

@ -1161,7 +1161,7 @@ public protocol SharedAccountContext: AnyObject {
func makeMediaPickerScreen(context: AccountContext, hasSearch: Bool, completion: @escaping (Any) -> Void) -> ViewController
func makeStoryMediaEditorScreen(context: AccountContext, source: Any?, text: String?, link: (url: String, name: String?)?, completion: @escaping ([MediaEditorScreenResult], @escaping (@escaping () -> Void) -> Void) -> Void) -> ViewController
func makeStoryMediaEditorScreen(context: AccountContext, source: Any?, text: String?, link: (url: String, name: String?)?, remainingCount: Int32, completion: @escaping ([MediaEditorScreenResult], MediaEditorTransitionOutExternalState, @escaping (@escaping () -> Void) -> Void) -> Void) -> ViewController
func makeBotPreviewEditorScreen(context: AccountContext, source: Any?, target: Stories.PendingTarget, transitionArguments: (UIView, CGRect, UIImage?)?, transitionOut: @escaping () -> BotPreviewEditorTransitionOut?, externalState: MediaEditorTransitionOutExternalState, completion: @escaping (MediaEditorScreenResult, @escaping (@escaping () -> Void) -> Void) -> Void, cancelled: @escaping () -> Void) -> ViewController

View File

@ -12,8 +12,8 @@ final class CameraSession {
let hasMultiCam: Bool
init() {
if #available(iOS 13.0, *), AVCaptureMultiCamSession.isMultiCamSupported {
init(forRoundVideo: Bool) {
if #available(iOS 13.0, *), Camera.isDualCameraSupported(forRoundVideo: forRoundVideo) {
self.multiSession = AVCaptureMultiCamSession()
self.singleSession = nil
self.hasMultiCam = true
@ -765,7 +765,7 @@ public final class Camera {
self.metrics = Camera.Metrics(model: DeviceModel.current)
let session = CameraSession()
let session = CameraSession(forRoundVideo: configuration.isRoundVideo)
session.session.automaticallyConfiguresApplicationAudioSession = false
session.session.automaticallyConfiguresCaptureDeviceForWideColor = false
session.session.usesApplicationAudioSession = true

View File

@ -0,0 +1,168 @@
import Foundation
import UIKit
import AVFoundation
import CoreImage
import CoreMedia
import CoreVideo
import Metal
import Display
import TelegramCore
final class CameraRoundLegacyVideoFilter {
private let ciContext: CIContext
private let colorSpace: CGColorSpace
private let simple: Bool
private var resizeFilter: CIFilter?
private var overlayFilter: CIFilter?
private var compositeFilter: CIFilter?
private var borderFilter: CIFilter?
private var outputColorSpace: CGColorSpace?
private var outputPixelBufferPool: CVPixelBufferPool?
private(set) var outputFormatDescription: CMFormatDescription?
private(set) var inputFormatDescription: CMFormatDescription?
private(set) var isPrepared = false
init(ciContext: CIContext, colorSpace: CGColorSpace, simple: Bool) {
self.ciContext = ciContext
self.colorSpace = colorSpace
self.simple = simple
}
func prepare(with formatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) {
self.reset()
(self.outputPixelBufferPool, self.outputColorSpace, self.outputFormatDescription) = allocateOutputBufferPool(with: formatDescription, outputRetainedBufferCountHint: outputRetainedBufferCountHint)
if self.outputPixelBufferPool == nil {
return
}
self.inputFormatDescription = formatDescription
let circleImage = generateImage(videoMessageDimensions.cgSize, opaque: false, scale: 1.0, rotatedContext: { size, context in
let bounds = CGRect(origin: .zero, size: size)
context.clear(bounds)
context.setFillColor(UIColor.white.cgColor)
context.fill(bounds)
context.setBlendMode(.clear)
context.fillEllipse(in: bounds.insetBy(dx: -2.0, dy: -2.0))
})!
self.resizeFilter = CIFilter(name: "CILanczosScaleTransform")
self.overlayFilter = CIFilter(name: "CIColorMatrix")
self.compositeFilter = CIFilter(name: "CISourceOverCompositing")
self.borderFilter = CIFilter(name: "CISourceOverCompositing")
self.borderFilter?.setValue(CIImage(image: circleImage), forKey: kCIInputImageKey)
self.isPrepared = true
}
func reset() {
self.resizeFilter = nil
self.overlayFilter = nil
self.compositeFilter = nil
self.borderFilter = nil
self.outputColorSpace = nil
self.outputPixelBufferPool = nil
self.outputFormatDescription = nil
self.inputFormatDescription = nil
self.isPrepared = false
self.lastMainSourceImage = nil
self.lastAdditionalSourceImage = nil
}
private var lastMainSourceImage: CIImage?
private var lastAdditionalSourceImage: CIImage?
func render(pixelBuffer: CVPixelBuffer, additional: Bool, captureOrientation: AVCaptureVideoOrientation, transitionFactor: CGFloat) -> CVPixelBuffer? {
guard let resizeFilter = self.resizeFilter, let overlayFilter = self.overlayFilter, let compositeFilter = self.compositeFilter, let borderFilter = self.borderFilter, self.isPrepared else {
return nil
}
var sourceImage = CIImage(cvImageBuffer: pixelBuffer, options: [.colorSpace: self.colorSpace])
var sourceOrientation: CGImagePropertyOrientation
var sourceIsLandscape = false
switch captureOrientation {
case .portrait:
sourceOrientation = additional ? .leftMirrored : .right
case .landscapeLeft:
sourceOrientation = additional ? .upMirrored : .down
sourceIsLandscape = true
case .landscapeRight:
sourceOrientation = additional ? .downMirrored : .up
sourceIsLandscape = true
case .portraitUpsideDown:
sourceOrientation = additional ? .rightMirrored : .left
@unknown default:
sourceOrientation = additional ? .leftMirrored : .right
}
sourceImage = sourceImage.oriented(sourceOrientation)
let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height)
if !self.simple {
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
if let resizedImage = resizeFilter.outputImage {
sourceImage = resizedImage
} else {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
}
} else {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
}
if sourceIsLandscape {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(-(sourceImage.extent.width - sourceImage.extent.height) / 2.0, 0.0))
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.height, height: sourceImage.extent.height))
} else {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0))
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width))
}
if additional {
self.lastAdditionalSourceImage = sourceImage
} else {
self.lastMainSourceImage = sourceImage
}
var effectiveSourceImage: CIImage
if transitionFactor == 0.0 {
effectiveSourceImage = !additional ? sourceImage : (self.lastMainSourceImage ?? sourceImage)
} else if transitionFactor == 1.0 {
effectiveSourceImage = additional ? sourceImage : (self.lastAdditionalSourceImage ?? sourceImage)
} else {
if let mainSourceImage = self.lastMainSourceImage, let additionalSourceImage = self.lastAdditionalSourceImage {
let overlayRgba: [CGFloat] = [0, 0, 0, transitionFactor]
let alphaVector: CIVector = CIVector(values: overlayRgba, count: 4)
overlayFilter.setValue(additionalSourceImage, forKey: kCIInputImageKey)
overlayFilter.setValue(alphaVector, forKey: "inputAVector")
compositeFilter.setValue(mainSourceImage, forKey: kCIInputBackgroundImageKey)
compositeFilter.setValue(overlayFilter.outputImage, forKey: kCIInputImageKey)
effectiveSourceImage = compositeFilter.outputImage ?? sourceImage
} else {
effectiveSourceImage = sourceImage
}
}
borderFilter.setValue(effectiveSourceImage, forKey: kCIInputBackgroundImageKey)
let finalImage = borderFilter.outputImage
guard let finalImage else {
return nil
}
var pbuf: CVPixelBuffer?
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
guard let outputPixelBuffer = pbuf else {
return nil
}
self.ciContext.render(finalImage, to: outputPixelBuffer, bounds: CGRect(origin: .zero, size: videoMessageDimensions.cgSize), colorSpace: outputColorSpace)
return outputPixelBuffer
}
}

View File

@ -78,7 +78,7 @@ func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription,
return (pixelBufferPool, cgColorSpace, outputFormatDescription)
}
private func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: Int) {
func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: Int) {
var pixelBuffers = [CVPixelBuffer]()
var error: CVReturn = kCVReturnSuccess
let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: allocationThreshold] as NSDictionary

View File

@ -297,12 +297,13 @@ extension ChatControllerImpl {
if data.duration < 0.5 {
strongSelf.recorderFeedback?.error()
strongSelf.recorderFeedback = nil
strongSelf.audioRecorder.set(.single(nil))
strongSelf.recorderDataDisposable.set(nil)
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
$0.updatedInputTextPanelState { panelState in
return panelState.withUpdatedMediaRecordingState(nil)
}
})
strongSelf.recorderDataDisposable.set(nil)
} else if let waveform = data.waveform {
if resource == nil {
resource = LocalFileMediaResource(fileId: Int64.random(in: Int64.min ... Int64.max), size: Int64(data.compressedData.count))
@ -351,6 +352,7 @@ extension ChatControllerImpl {
strongSelf.recorderFeedback?.error()
strongSelf.recorderFeedback = nil
strongSelf.audioRecorder.set(.single(nil))
strongSelf.recorderDataDisposable.set(nil)
} else {
let randomId = Int64.random(in: Int64.min ... Int64.max)

View File

@ -926,24 +926,50 @@ func openResolvedUrlImpl(
source = subject
}
}
let externalState = MediaEditorTransitionOutExternalState(
storyTarget: nil,
isForcedTarget: false,
isPeerArchived: false,
transitionOut: nil
)
let controller = context.sharedContext.makeStoryMediaEditorScreen(context: context, source: source, text: nil, link: nil, completion: { results, commit in
let target: Stories.PendingTarget = results.first!.target
externalState.storyTarget = target
if let rootController = context.sharedContext.mainWindow?.viewController as? TelegramRootControllerInterface {
rootController.proceedWithStoryUpload(target: target, results: results, existingMedia: nil, forwardInfo: nil, externalState: externalState, commit: commit)
let _ = (context.engine.messages.checkStoriesUploadAvailability(target: .myStories)
|> deliverOnMainQueue).start(next: { availability in
if case let .available(remainingCount) = availability {
let controller = context.sharedContext.makeStoryMediaEditorScreen(context: context, source: source, text: nil, link: nil, remainingCount: remainingCount, completion: { results, externalState, commit in
let target: Stories.PendingTarget = results.first!.target
externalState.storyTarget = target
if let rootController = context.sharedContext.mainWindow?.viewController as? TelegramRootControllerInterface {
rootController.popToRoot(animated: false)
rootController.proceedWithStoryUpload(target: target, results: results, existingMedia: nil, forwardInfo: nil, externalState: externalState, commit: commit)
}
})
if let navigationController {
navigationController.pushViewController(controller)
}
} else {
let subject: PremiumLimitSubject
switch availability {
case .expiringLimit:
subject = .expiringStories
case .weeklyLimit:
subject = .storiesWeekly
case .monthlyLimit:
subject = .storiesMonthly
default:
subject = .expiringStories
}
var replaceImpl: ((ViewController) -> Void)?
let controller = context.sharedContext.makePremiumLimitController(context: context, subject: subject, count: 10, forceDark: false, cancel: {
}, action: {
let controller = context.sharedContext.makePremiumIntroController(context: context, source: .stories, forceDark: true, dismissed: {
})
replaceImpl?(controller)
return true
})
replaceImpl = { [weak controller] c in
controller?.replace(with: c)
}
if let navigationController {
navigationController.pushViewController(controller)
}
}
})
if let navigationController {
navigationController.pushViewController(controller)
}
case let .startAttach(peerId, payload, choose):
let presentError: (String) -> Void = { errorText in
present(UndoOverlayController(presentationData: presentationData, content: .info(title: nil, text: errorText, timeout: nil, customUndoText: nil), elevatedLayout: true, animateInAsReplacement: false, action: { _ in

View File

@ -3552,31 +3552,49 @@ public final class SharedAccountContextImpl: SharedAccountContext {
return editorController
}
public func makeStoryMediaEditorScreen(context: AccountContext, source: Any?, text: String?, link: (url: String, name: String?)?, completion: @escaping ([MediaEditorScreenResult], @escaping (@escaping () -> Void) -> Void) -> Void) -> ViewController {
let subject: Signal<MediaEditorScreenImpl.Subject?, NoError>
public func makeStoryMediaEditorScreen(context: AccountContext, source: Any?, text: String?, link: (url: String, name: String?)?, remainingCount: Int32, completion: @escaping ([MediaEditorScreenResult], MediaEditorTransitionOutExternalState, @escaping (@escaping () -> Void) -> Void) -> Void) -> ViewController {
let editorSubject: Signal<MediaEditorScreenImpl.Subject?, NoError>
if let image = source as? UIImage {
subject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight, fromCamera: false))
editorSubject = .single(.image(image: image, dimensions: PixelDimensions(image.size), additionalImage: nil, additionalImagePosition: .bottomRight, fromCamera: false))
} else if let path = source as? String {
subject = .single(.video(videoPath: path, thumbnail: nil, mirror: false, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: PixelDimensions(width: 1080, height: 1920), duration: 0.0, videoPositionChanges: [], additionalVideoPosition: .bottomRight, fromCamera: false))
editorSubject = .single(.video(videoPath: path, thumbnail: nil, mirror: false, additionalVideoPath: nil, additionalThumbnail: nil, dimensions: PixelDimensions(width: 1080, height: 1920), duration: 0.0, videoPositionChanges: [], additionalVideoPosition: .bottomRight, fromCamera: false))
} else if let subjects = source as? [MediaEditorScreenImpl.Subject] {
subject = .single(.multiple(subjects))
editorSubject = .single(.multiple(subjects))
} else if let subjectValue = source as? MediaEditorScreenImpl.Subject {
subject = .single(subjectValue)
editorSubject = .single(subjectValue)
} else {
subject = .single(.empty(PixelDimensions(width: 1080, height: 1920)))
editorSubject = .single(.empty(PixelDimensions(width: 1080, height: 1920)))
}
let externalState = MediaEditorTransitionOutExternalState(
storyTarget: nil,
isForcedTarget: false,
isPeerArchived: false,
transitionOut: nil
)
let editorController = MediaEditorScreenImpl(
context: context,
mode: .storyEditor(remainingCount: 1),
subject: subject,
mode: .storyEditor(remainingCount: remainingCount),
subject: editorSubject,
customTarget: nil,
initialCaption: text.flatMap { NSAttributedString(string: $0) },
initialLink: link,
transitionIn: nil,
transitionOut: { finished, isNew in
if let externalTransitionOut = externalState.transitionOut {
if finished, let transitionOut = externalTransitionOut(externalState.storyTarget, false), let destinationView = transitionOut.destinationView {
return MediaEditorScreenImpl.TransitionOut(
destinationView: destinationView,
destinationRect: transitionOut.destinationRect,
destinationCornerRadius: transitionOut.destinationCornerRadius,
completion: transitionOut.completion
)
}
}
return nil
}, completion: { results, commit in
completion(results, commit)
completion(results, externalState, commit)
} as ([MediaEditorScreenImpl.Result], @escaping (@escaping () -> Void) -> Void) -> Void
)
return editorController

View File

@ -1487,13 +1487,7 @@ public final class WebAppController: ViewController, AttachmentContainable {
}
}
if let source {
let externalState = MediaEditorTransitionOutExternalState(
storyTarget: nil,
isForcedTarget: false,
isPeerArchived: false,
transitionOut: nil
)
let controller = self.context.sharedContext.makeStoryMediaEditorScreen(context: self.context, source: source, text: text, link: linkUrl.flatMap { ($0, linkName) }, completion: { results, commit in
let controller = self.context.sharedContext.makeStoryMediaEditorScreen(context: self.context, source: source, text: text, link: linkUrl.flatMap { ($0, linkName) }, remainingCount: 1, completion: { results, externalState, commit in
let target: Stories.PendingTarget = results.first!.target
externalState.storyTarget = target