Merge commit '73c87ca361777edc9759758fefe9795185249a2b'

This commit is contained in:
Isaac 2024-01-23 17:52:25 +01:00
commit f4d586f41d
8 changed files with 202 additions and 114 deletions

View File

@ -85,26 +85,19 @@ final class CameraDeviceContext {
} }
private func maxDimensions(additional: Bool, preferWide: Bool) -> CMVideoDimensions { private func maxDimensions(additional: Bool, preferWide: Bool) -> CMVideoDimensions {
// if self.isRoundVideo { if self.isRoundVideo && !Camera.isDualCameraSupported {
// if additional { return CMVideoDimensions(width: 640, height: 480)
// return CMVideoDimensions(width: 640, height: 480) } else {
// } else {
// return CMVideoDimensions(width: 1280, height: 720)
// }
// } else {
if additional || preferWide { if additional || preferWide {
return CMVideoDimensions(width: 1920, height: 1440) return CMVideoDimensions(width: 1920, height: 1440)
} else { } else {
return CMVideoDimensions(width: 1920, height: 1080) return CMVideoDimensions(width: 1920, height: 1080)
} }
// } }
} }
private func preferredMaxFrameRate(useLower: Bool) -> Double { private func preferredMaxFrameRate(useLower: Bool) -> Double {
if !self.exclusive { if !self.exclusive || self.isRoundVideo || useLower {
return 30.0
}
if useLower {
return 30.0 return 30.0
} }
switch DeviceModel.current { switch DeviceModel.current {
@ -138,7 +131,10 @@ private final class CameraContext {
var secondaryPreviewView: CameraSimplePreviewView? var secondaryPreviewView: CameraSimplePreviewView?
private var lastSnapshotTimestamp: Double = CACurrentMediaTime() private var lastSnapshotTimestamp: Double = CACurrentMediaTime()
private var savedSnapshot = false
private var lastAdditionalSnapshotTimestamp: Double = CACurrentMediaTime() private var lastAdditionalSnapshotTimestamp: Double = CACurrentMediaTime()
private var savedAdditionalSnapshot = false
private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer, front: Bool) { private func savePreviewSnapshot(pixelBuffer: CVPixelBuffer, front: Bool) {
Queue.concurrentDefaultQueue().async { Queue.concurrentDefaultQueue().async {
var ciImage = CIImage(cvImageBuffer: pixelBuffer) var ciImage = CIImage(cvImageBuffer: pixelBuffer)
@ -148,7 +144,7 @@ private final class CameraContext {
transform = CGAffineTransformTranslate(transform, 0.0, -size.height) transform = CGAffineTransformTranslate(transform, 0.0, -size.height)
ciImage = ciImage.transformed(by: transform) ciImage = ciImage.transformed(by: transform)
} }
ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: 100.0).cropped(to: CGRect(origin: .zero, size: size)) ciImage = ciImage.clampedToExtent().applyingGaussianBlur(sigma: Camera.isDualCameraSupported ? 100.0 : 40.0).cropped(to: CGRect(origin: .zero, size: size))
if let cgImage = self.ciContext.createCGImage(ciImage, from: ciImage.extent) { if let cgImage = self.ciContext.createCGImage(ciImage, from: ciImage.extent) {
let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right) let uiImage = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
if front { if front {
@ -337,13 +333,14 @@ private final class CameraContext {
return return
} }
let timestamp = CACurrentMediaTime() let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording { if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot {
var front = false var front = false
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front front = connection.inputPorts.first?.sourceDevicePosition == .front
} }
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front) self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastSnapshotTimestamp = timestamp self.lastSnapshotTimestamp = timestamp
self.savedSnapshot = true
} }
} }
self.additionalDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in self.additionalDeviceContext?.output.processSampleBuffer = { [weak self] sampleBuffer, pixelBuffer, connection in
@ -351,13 +348,14 @@ private final class CameraContext {
return return
} }
let timestamp = CACurrentMediaTime() let timestamp = CACurrentMediaTime()
if timestamp > self.lastAdditionalSnapshotTimestamp + 2.5, !additionalDeviceContext.output.isRecording { if timestamp > self.lastAdditionalSnapshotTimestamp + 2.5, !additionalDeviceContext.output.isRecording || !self.savedAdditionalSnapshot {
var front = false var front = false
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front front = connection.inputPorts.first?.sourceDevicePosition == .front
} }
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front) self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastAdditionalSnapshotTimestamp = timestamp self.lastAdditionalSnapshotTimestamp = timestamp
self.savedAdditionalSnapshot = true
} }
} }
} else { } else {
@ -377,13 +375,14 @@ private final class CameraContext {
return return
} }
let timestamp = CACurrentMediaTime() let timestamp = CACurrentMediaTime()
if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording { if timestamp > self.lastSnapshotTimestamp + 2.5, !mainDeviceContext.output.isRecording || !self.savedSnapshot {
var front = false var front = false
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
front = connection.inputPorts.first?.sourceDevicePosition == .front front = connection.inputPorts.first?.sourceDevicePosition == .front
} }
self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front) self.savePreviewSnapshot(pixelBuffer: pixelBuffer, front: front)
self.lastSnapshotTimestamp = timestamp self.lastSnapshotTimestamp = timestamp
self.savedSnapshot = true
} }
} }
if self.initialConfiguration.reportAudioLevel { if self.initialConfiguration.reportAudioLevel {
@ -564,7 +563,7 @@ private final class CameraContext {
let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait let orientation = self.simplePreviewView?.videoPreviewLayer.connection?.videoOrientation ?? .portrait
if self.initialConfiguration.isRoundVideo { if self.initialConfiguration.isRoundVideo {
return mainDeviceContext.output.startRecording(mode: .roundVideo, orientation: .portrait, additionalOutput: self.additionalDeviceContext?.output) return mainDeviceContext.output.startRecording(mode: .roundVideo, orientation: DeviceModel.current.isIpad ? orientation : .portrait, additionalOutput: self.additionalDeviceContext?.output)
} else { } else {
if let additionalDeviceContext = self.additionalDeviceContext { if let additionalDeviceContext = self.additionalDeviceContext {
return combineLatest( return combineLatest(

View File

@ -101,6 +101,8 @@ final class CameraOutput: NSObject {
private var photoCaptureRequests: [Int64: PhotoCaptureContext] = [:] private var photoCaptureRequests: [Int64: PhotoCaptureContext] = [:]
private var videoRecorder: VideoRecorder? private var videoRecorder: VideoRecorder?
private var captureOrientation: AVCaptureVideoOrientation = .portrait
var processSampleBuffer: ((CMSampleBuffer, CVImageBuffer, AVCaptureConnection) -> Void)? var processSampleBuffer: ((CMSampleBuffer, CVImageBuffer, AVCaptureConnection) -> Void)?
var processAudioBuffer: ((CMSampleBuffer) -> Void)? var processAudioBuffer: ((CMSampleBuffer) -> Void)?
var processCodes: (([CameraCode]) -> Void)? var processCodes: (([CameraCode]) -> Void)?
@ -305,6 +307,7 @@ final class CameraOutput: NSObject {
self.currentMode = mode self.currentMode = mode
self.lastSampleTimestamp = nil self.lastSampleTimestamp = nil
self.captureOrientation = orientation
var orientation = orientation var orientation = orientation
let dimensions: CGSize let dimensions: CGSize
@ -532,13 +535,13 @@ final class CameraOutput: NSObject {
if let current = self.roundVideoFilter { if let current = self.roundVideoFilter {
filter = current filter = current
} else { } else {
filter = CameraRoundVideoFilter(ciContext: self.ciContext, colorSpace: self.colorSpace) filter = CameraRoundVideoFilter(ciContext: self.ciContext, colorSpace: self.colorSpace, simple: self.exclusive)
self.roundVideoFilter = filter self.roundVideoFilter = filter
} }
if !filter.isPrepared { if !filter.isPrepared {
filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3) filter.prepare(with: newFormatDescription, outputRetainedBufferCountHint: 3)
} }
guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, transitionFactor: transitionFactor) else { guard let newPixelBuffer = filter.render(pixelBuffer: videoPixelBuffer, additional: additional, captureOrientation: self.captureOrientation, transitionFactor: transitionFactor) else {
self.semaphore.signal() self.semaphore.signal()
return nil return nil
} }

View File

@ -1,5 +1,6 @@
import Foundation import Foundation
import UIKit import UIKit
import AVFoundation
import CoreImage import CoreImage
import CoreMedia import CoreMedia
import CoreVideo import CoreVideo
@ -92,6 +93,7 @@ private func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: In
final class CameraRoundVideoFilter { final class CameraRoundVideoFilter {
private let ciContext: CIContext private let ciContext: CIContext
private let colorSpace: CGColorSpace private let colorSpace: CGColorSpace
private let simple: Bool
private var resizeFilter: CIFilter? private var resizeFilter: CIFilter?
private var overlayFilter: CIFilter? private var overlayFilter: CIFilter?
@ -105,9 +107,10 @@ final class CameraRoundVideoFilter {
private(set) var isPrepared = false private(set) var isPrepared = false
init(ciContext: CIContext, colorSpace: CGColorSpace) { init(ciContext: CIContext, colorSpace: CGColorSpace, simple: Bool) {
self.ciContext = ciContext self.ciContext = ciContext
self.colorSpace = colorSpace self.colorSpace = colorSpace
self.simple = simple
} }
func prepare(with formatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) { func prepare(with formatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) {
@ -155,25 +158,51 @@ final class CameraRoundVideoFilter {
private var lastMainSourceImage: CIImage? private var lastMainSourceImage: CIImage?
private var lastAdditionalSourceImage: CIImage? private var lastAdditionalSourceImage: CIImage?
func render(pixelBuffer: CVPixelBuffer, additional: Bool, transitionFactor: CGFloat) -> CVPixelBuffer? { func render(pixelBuffer: CVPixelBuffer, additional: Bool, captureOrientation: AVCaptureVideoOrientation, transitionFactor: CGFloat) -> CVPixelBuffer? {
guard let resizeFilter = self.resizeFilter, let overlayFilter = self.overlayFilter, let compositeFilter = self.compositeFilter, let borderFilter = self.borderFilter, self.isPrepared else { guard let resizeFilter = self.resizeFilter, let overlayFilter = self.overlayFilter, let compositeFilter = self.compositeFilter, let borderFilter = self.borderFilter, self.isPrepared else {
return nil return nil
} }
var sourceImage = CIImage(cvImageBuffer: pixelBuffer, options: [.colorSpace: self.colorSpace]) var sourceImage = CIImage(cvImageBuffer: pixelBuffer, options: [.colorSpace: self.colorSpace])
sourceImage = sourceImage.oriented(additional ? .leftMirrored : .right) var sourceOrientation: CGImagePropertyOrientation
var sourceIsLandscape = false
switch captureOrientation {
case .portrait:
sourceOrientation = additional ? .leftMirrored : .right
case .landscapeLeft:
sourceOrientation = additional ? .upMirrored : .down
sourceIsLandscape = true
case .landscapeRight:
sourceOrientation = additional ? .downMirrored : .up
sourceIsLandscape = true
case .portraitUpsideDown:
sourceOrientation = additional ? .rightMirrored : .left
@unknown default:
sourceOrientation = additional ? .leftMirrored : .right
}
sourceImage = sourceImage.oriented(sourceOrientation)
let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height) let scale = CGFloat(videoMessageDimensions.width) / min(sourceImage.extent.width, sourceImage.extent.height)
if !self.simple {
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey) resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
resizeFilter.setValue(scale, forKey: kCIInputScaleKey) resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
if !"".isEmpty, let resizedImage = resizeFilter.outputImage { if let resizedImage = resizeFilter.outputImage {
sourceImage = resizedImage sourceImage = resizedImage
} else { } else {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true) sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
} }
} else {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeScale(scale, scale), highQualityDownsample: true)
}
if sourceIsLandscape {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(-(sourceImage.extent.width - sourceImage.extent.height) / 2.0, 0.0))
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.height, height: sourceImage.extent.height))
} else {
sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0)) sourceImage = sourceImage.transformed(by: CGAffineTransformMakeTranslation(0.0, -(sourceImage.extent.height - sourceImage.extent.width) / 2.0))
sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width)) sourceImage = sourceImage.cropped(to: CGRect(x: 0.0, y: 0.0, width: sourceImage.extent.width, height: sourceImage.extent.width))
}
if additional { if additional {
self.lastAdditionalSourceImage = sourceImage self.lastAdditionalSourceImage = sourceImage

View File

@ -3,22 +3,27 @@ import Postbox
import TelegramApi import TelegramApi
internal func _internal_updateIsPremiumRequiredToContact(account: Account, peerIds: [EnginePeer.Id]) -> Signal<[EnginePeer.Id], NoError> { internal func _internal_updateIsPremiumRequiredToContact(account: Account, peerIds: [EnginePeer.Id]) -> Signal<[EnginePeer.Id], NoError> {
return account.postbox.transaction { transaction -> ([Api.InputUser], [PeerId]) in return account.postbox.transaction { transaction -> ([Api.InputUser], [PeerId], [PeerId]) in
var inputUsers: [Api.InputUser] = [] var inputUsers: [Api.InputUser] = []
var premiumRequired:[EnginePeer.Id] = [] var premiumRequired:[EnginePeer.Id] = []
var ids:[PeerId] = []
for id in peerIds { for id in peerIds {
if let peer = transaction.getPeer(id), let inputUser = apiInputUser(peer) { if let peer = transaction.getPeer(id), let inputUser = apiInputUser(peer) {
if peer.isPremium {
if let cachedData = transaction.getPeerCachedData(peerId: id) as? CachedUserData { if let cachedData = transaction.getPeerCachedData(peerId: id) as? CachedUserData {
if cachedData.flags.contains(.premiumRequired) { if cachedData.flags.contains(.premiumRequired) {
premiumRequired.append(id)
}
} else {
inputUsers.append(inputUser) inputUsers.append(inputUser)
ids.append(id)
}
} else if let peer = peer as? TelegramUser, peer.flags.contains(.requirePremium), !peer.flags.contains(.mutualContact) {
inputUsers.append(inputUser)
ids.append(id)
} }
} }
} }
return (inputUsers, premiumRequired) }
} |> mapToSignal { inputUsers, premiumRequired -> Signal<[EnginePeer.Id], NoError> in return (inputUsers, premiumRequired, ids)
} |> mapToSignal { inputUsers, premiumRequired, reqIds -> Signal<[EnginePeer.Id], NoError> in
if !inputUsers.isEmpty { if !inputUsers.isEmpty {
return account.network.request(Api.functions.users.getIsPremiumRequiredToContact(id: inputUsers)) return account.network.request(Api.functions.users.getIsPremiumRequiredToContact(id: inputUsers))
@ -27,7 +32,7 @@ internal func _internal_updateIsPremiumRequiredToContact(account: Account, peerI
return account.postbox.transaction { transaction in return account.postbox.transaction { transaction in
var requiredPeerIds: [EnginePeer.Id] = [] var requiredPeerIds: [EnginePeer.Id] = []
for (i, req) in result.enumerated() { for (i, req) in result.enumerated() {
let peerId = peerIds[i] let peerId = reqIds[i]
let required = req == .boolTrue let required = req == .boolTrue
transaction.updatePeerCachedData(peerIds: Set([peerId]), update: { _, cachedData in transaction.updatePeerCachedData(peerIds: Set([peerId]), update: { _, cachedData in
let data = cachedData as? CachedUserData ?? CachedUserData() let data = cachedData as? CachedUserData ?? CachedUserData()

View File

@ -38,6 +38,7 @@ swift_library(
"//submodules/DeviceAccess", "//submodules/DeviceAccess",
"//submodules/TelegramUI/Components/MediaEditor", "//submodules/TelegramUI/Components/MediaEditor",
"//submodules/LegacyMediaPickerUI", "//submodules/LegacyMediaPickerUI",
"//submodules/TelegramAudio",
], ],
visibility = [ visibility = [
"//visibility:public", "//visibility:public",

View File

@ -26,6 +26,7 @@ import MediaResources
import LocalMediaResources import LocalMediaResources
import ImageCompression import ImageCompression
import LegacyMediaPickerUI import LegacyMediaPickerUI
import TelegramAudio
struct CameraState: Equatable { struct CameraState: Equatable {
enum Recording: Equatable { enum Recording: Equatable {
@ -694,7 +695,7 @@ public class VideoMessageCameraScreen: ViewController {
func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) { func withReadyCamera(isFirstTime: Bool = false, _ f: @escaping () -> Void) {
let previewReady: Signal<Bool, NoError> let previewReady: Signal<Bool, NoError>
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing previewReady = self.cameraState.isDualCameraEnabled ? self.additionalPreviewView.isPreviewing : self.mainPreviewView.isPreviewing |> delay(0.2, queue: Queue.mainQueue())
} else { } else {
previewReady = .single(true) |> delay(0.35, queue: Queue.mainQueue()) previewReady = .single(true) |> delay(0.35, queue: Queue.mainQueue())
} }
@ -1116,9 +1117,22 @@ public class VideoMessageCameraScreen: ViewController {
let previewSide = min(369.0, layout.size.width - 24.0) let previewSide = min(369.0, layout.size.width - 24.0)
let previewFrame: CGRect let previewFrame: CGRect
if layout.metrics.isTablet { if layout.metrics.isTablet {
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.2 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide)) let statusBarOrientation: UIInterfaceOrientation
if #available(iOS 13.0, *) {
statusBarOrientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation ?? .portrait
} else { } else {
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 16.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide)) statusBarOrientation = UIApplication.shared.statusBarOrientation
}
if statusBarOrientation == .landscapeLeft {
previewFrame = CGRect(origin: CGPoint(x: layout.size.width - 44.0 - previewSide, y: floorToScreenPixels((layout.size.height - previewSide) / 2.0)), size: CGSize(width: previewSide, height: previewSide))
} else if statusBarOrientation == .landscapeRight {
previewFrame = CGRect(origin: CGPoint(x: 44.0, y: floorToScreenPixels((layout.size.height - previewSide) / 2.0)), size: CGSize(width: previewSide, height: previewSide))
} else {
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.2 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
}
} else {
previewFrame = CGRect(origin: CGPoint(x: floorToScreenPixels((layout.size.width - previewSide) / 2.0), y: max(layout.statusBarHeight ?? 0.0 + 24.0, availableHeight * 0.4 - previewSide / 2.0)), size: CGSize(width: previewSide, height: previewSide))
} }
if !self.animatingIn { if !self.animatingIn {
transition.setFrame(view: self.previewContainerView, frame: previewFrame) transition.setFrame(view: self.previewContainerView, frame: previewFrame)
@ -1321,7 +1335,7 @@ public class VideoMessageCameraScreen: ViewController {
public func takenRecordedData() -> Signal<RecordedVideoData?, NoError> { public func takenRecordedData() -> Signal<RecordedVideoData?, NoError> {
let previewState = self.node.previewStatePromise.get() let previewState = self.node.previewStatePromise.get()
let count = 12 let count = 13
let initialPlaceholder: Signal<UIImage?, NoError> let initialPlaceholder: Signal<UIImage?, NoError>
if let firstResult = self.node.results.first { if let firstResult = self.node.results.first {
@ -1508,18 +1522,36 @@ public class VideoMessageCameraScreen: ViewController {
let dimensions = PixelDimensions(width: 400, height: 400) let dimensions = PixelDimensions(width: 400, height: 400)
var thumbnailImage = video.thumbnail let thumbnailImage: Signal<UIImage, NoError>
if startTime > 0.0 { if startTime > 0.0 {
thumbnailImage = Signal { subscriber in
let composition = composition(with: results) let composition = composition(with: results)
let imageGenerator = AVAssetImageGenerator(asset: composition) let imageGenerator = AVAssetImageGenerator(asset: composition)
imageGenerator.maximumSize = dimensions.cgSize imageGenerator.maximumSize = dimensions.cgSize
imageGenerator.appliesPreferredTrackTransform = true imageGenerator.appliesPreferredTrackTransform = true
if let cgImage = try? imageGenerator.copyCGImage(at: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale), actualTime: nil) { imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue(time: CMTime(seconds: startTime, preferredTimescale: composition.duration.timescale))], completionHandler: { _, image, _, _, _ in
thumbnailImage = UIImage(cgImage: cgImage) if let image {
subscriber.putNext(UIImage(cgImage: image))
} else {
subscriber.putNext(video.thumbnail)
} }
subscriber.putCompletion()
})
return ActionDisposable {
imageGenerator.cancelAllCGImageGeneration()
}
}
} else {
thumbnailImage = .single(video.thumbnail)
} }
let _ = (thumbnailImage
|> deliverOnMainQueue).startStandalone(next: { [weak self] thumbnailImage in
guard let self else {
return
}
let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage) let values = MediaEditorValues(peerId: self.context.account.peerId, originalDimensions: dimensions, cropOffset: .zero, cropRect: CGRect(origin: .zero, size: dimensions.cgSize), cropScale: 1.0, cropRotation: 0.0, cropMirroring: false, cropOrientation: nil, gradientColors: nil, videoTrimRange: self.node.previewState?.trimRange, videoIsMuted: false, videoIsFullHd: false, videoIsMirrored: false, videoVolume: nil, additionalVideoPath: nil, additionalVideoIsDual: false, additionalVideoPosition: nil, additionalVideoScale: nil, additionalVideoRotation: nil, additionalVideoPositionChanges: [], additionalVideoTrimRange: nil, additionalVideoOffset: nil, additionalVideoVolume: nil, nightTheme: false, drawing: nil, entities: [], toolValues: [:], audioTrack: nil, audioTrackTrimRange: nil, audioTrackOffset: nil, audioTrackVolume: nil, audioTrackSamples: nil, qualityPreset: .videoMessage)
var resourceAdjustments: VideoMediaResourceAdjustments? = nil var resourceAdjustments: VideoMediaResourceAdjustments? = nil
@ -1581,6 +1613,7 @@ public class VideoMessageCameraScreen: ViewController {
bubbleUpEmojiOrStickersets: [] bubbleUpEmojiOrStickersets: []
), silentPosting, scheduleTime) ), silentPosting, scheduleTime)
}) })
})
} }
private var waitingForNextResult = false private var waitingForNextResult = false
@ -1638,7 +1671,14 @@ public class VideoMessageCameraScreen: ViewController {
} }
private func requestAudioSession() { private func requestAudioSession() {
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: .recordWithOthers, activate: { [weak self] _ in let audioSessionType: ManagedAudioSessionType
if self.context.sharedContext.currentMediaInputSettings.with({ $0 }).pauseMusicOnRecording {
audioSessionType = .record(speaker: false, withOthers: false)
} else {
audioSessionType = .recordWithOthers
}
self.audioSessionDisposable = self.context.sharedContext.mediaManager.audioSession.push(audioSessionType: audioSessionType, activate: { [weak self] _ in
if #available(iOS 13.0, *) { if #available(iOS 13.0, *) {
try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true) try? AVAudioSession.sharedInstance().setAllowHapticsAndSystemSoundsDuringRecording(true)
} }

View File

@ -337,18 +337,21 @@ final class ChatRecordingPreviewInputPanelNode: ChatInputPanelNode {
), ),
environment: {}, environment: {},
forceUpdate: false, forceUpdate: false,
containerSize: CGSize(width: width - leftInset - rightInset - 45.0 * 2.0, height: 33.0) containerSize: CGSize(width: min(424, width - leftInset - rightInset - 45.0 * 2.0), height: 33.0)
) )
if let view = self.scrubber.view { if let view = self.scrubber.view {
if view.superview == nil { if view.superview == nil {
self.view.addSubview(view) self.view.addSubview(view)
} }
view.bounds = CGRect(origin: .zero, size: scrubberSize)
}
}
}
}
view.frame = CGRect(origin: CGPoint(x: leftInset + 45.0, y: 7.0 - UIScreenPixel), size: scrubberSize) if let view = self.scrubber.view {
} view.frame = CGRect(origin: CGPoint(x: max(leftInset + 45.0, floorToScreenPixels((width - view.bounds.width) / 2.0)), y: 7.0 - UIScreenPixel), size: view.bounds.size)
}
}
} }
let panelHeight = defaultHeight(metrics: metrics) let panelHeight = defaultHeight(metrics: metrics)

View File

@ -377,6 +377,8 @@ public final class OngoingGroupCallContext {
} }
public enum Buffer { public enum Buffer {
case argb(NativeBuffer)
case bgra(NativeBuffer)
case native(NativeBuffer) case native(NativeBuffer)
case nv12(NV12Buffer) case nv12(NV12Buffer)
case i420(I420Buffer) case i420(I420Buffer)
@ -392,7 +394,13 @@ public final class OngoingGroupCallContext {
init(frameData: CallVideoFrameData) { init(frameData: CallVideoFrameData) {
if let nativeBuffer = frameData.buffer as? CallVideoFrameNativePixelBuffer { if let nativeBuffer = frameData.buffer as? CallVideoFrameNativePixelBuffer {
if CVPixelBufferGetPixelFormatType(nativeBuffer.pixelBuffer) == kCVPixelFormatType_32ARGB {
self.buffer = .argb(NativeBuffer(pixelBuffer: nativeBuffer.pixelBuffer))
} else if CVPixelBufferGetPixelFormatType(nativeBuffer.pixelBuffer) == kCVPixelFormatType_32BGRA {
self.buffer = .bgra(NativeBuffer(pixelBuffer: nativeBuffer.pixelBuffer))
} else {
self.buffer = .native(NativeBuffer(pixelBuffer: nativeBuffer.pixelBuffer)) self.buffer = .native(NativeBuffer(pixelBuffer: nativeBuffer.pixelBuffer))
}
} else if let nv12Buffer = frameData.buffer as? CallVideoFrameNV12Buffer { } else if let nv12Buffer = frameData.buffer as? CallVideoFrameNV12Buffer {
self.buffer = .nv12(NV12Buffer(wrapped: nv12Buffer)) self.buffer = .nv12(NV12Buffer(wrapped: nv12Buffer))
} else if let i420Buffer = frameData.buffer as? CallVideoFrameI420Buffer { } else if let i420Buffer = frameData.buffer as? CallVideoFrameI420Buffer {