Camera improvements

This commit is contained in:
Ilya Laktyushin 2023-04-25 18:38:09 +04:00
parent 385c65d908
commit da421229fd
52 changed files with 4219 additions and 455 deletions

View File

@ -13,6 +13,7 @@ import TemporaryCachedPeerDataManager
import InAppPurchaseManager
import AnimationCache
import MultiAnimationRenderer
import Photos
public final class TelegramApplicationOpenUrlCompletion {
public let completion: (Bool) -> Void
@ -831,7 +832,9 @@ public protocol SharedAccountContext: AnyObject {
func makePremiumLimitController(context: AccountContext, subject: PremiumLimitSubject, count: Int32, action: @escaping () -> Void) -> ViewController
func makeStickerPackScreen(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?, mainStickerPack: StickerPackReference, stickerPacks: [StickerPackReference], loadedStickerPacks: [LoadedStickerPack], parentNavigationController: NavigationController?, sendSticker: ((FileMediaReference, UIView, CGRect) -> Bool)?) -> ViewController
func makeMediaPickerScreen(context: AccountContext, completion: @escaping (PHAsset) -> Void) -> ViewController
func makeProxySettingsController(sharedContext: SharedAccountContext, account: UnauthorizedAccount) -> ViewController
func makeInstalledStickerPacksController(context: AccountContext, mode: InstalledStickerPacksControllerMode) -> ViewController

View File

@ -1,4 +1,44 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
load(
"@build_bazel_rules_apple//apple:resources.bzl",
"apple_resource_bundle",
"apple_resource_group",
)
load("//build-system/bazel-utils:plist_fragment.bzl",
"plist_fragment",
)
filegroup(
name = "CameraMetalResources",
srcs = glob([
"MetalResources/**/*.*",
]),
visibility = ["//visibility:public"],
)
plist_fragment(
name = "CameraBundleInfoPlist",
extension = "plist",
template =
"""
<key>CFBundleIdentifier</key>
<string>org.telegram.Camera</string>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleName</key>
<string>Camera</string>
"""
)
apple_resource_bundle(
name = "CameraBundle",
infoplists = [
":CameraBundleInfoPlist",
],
resources = [
":CameraMetalResources",
],
)
swift_library(
name = "Camera",
@ -9,6 +49,9 @@ swift_library(
copts = [
"-warnings-as-errors",
],
data = [
":CameraBundle",
],
deps = [
"//submodules/SSignalKit/SwiftSignalKit:SwiftSignalKit",
"//submodules/AsyncDisplayKit:AsyncDisplayKit",

View File

@ -0,0 +1,30 @@
#include <metal_stdlib>
using namespace metal;
// Vertex input/output structure for passing results from vertex shader to fragment shader
struct VertexIO
{
float4 position [[position]];
float2 textureCoord [[user(texturecoord)]];
};
// Vertex shader for a textured quad
vertex VertexIO vertexPassThrough(const device packed_float4 *pPosition [[ buffer(0) ]],
const device packed_float2 *pTexCoords [[ buffer(1) ]],
uint vid [[ vertex_id ]])
{
VertexIO outVertex;
outVertex.position = pPosition[vid];
outVertex.textureCoord = pTexCoords[vid];
return outVertex;
}
// Fragment shader for a textured quad
fragment half4 fragmentPassThrough(VertexIO inputFragment [[ stage_in ]],
texture2d<half> inputTexture [[ texture(0) ]],
sampler samplr [[ sampler(0) ]])
{
return inputTexture.sample(samplr, inputFragment.textureCoord);
}

View File

@ -12,10 +12,8 @@ private final class CameraContext {
private let initialConfiguration: Camera.Configuration
private var invalidated = false
private var previousSampleBuffer: CMSampleBuffer?
var processSampleBuffer: ((CMSampleBuffer) -> Void)?
private let detectedCodesPipe = ValuePipe<[CameraCode]>()
fileprivate let changingPositionPromise = ValuePromise<Bool>(false)
var previewNode: CameraPreviewNode? {
didSet {
@ -23,27 +21,45 @@ private final class CameraContext {
}
}
init(queue: Queue, configuration: Camera.Configuration) {
var previewView: CameraPreviewView? {
didSet {
}
}
private let filter = CameraTestFilter()
private var videoOrientation: AVCaptureVideoOrientation?
init(queue: Queue, configuration: Camera.Configuration, metrics: Camera.Metrics) {
self.queue = queue
self.initialConfiguration = configuration
self.device = CameraDevice()
self.device.configure(for: self.session, position: configuration.position)
self.session.beginConfiguration()
self.session.sessionPreset = configuration.preset
self.input.configure(for: self.session, device: self.device, audio: configuration.audio)
self.output.configure(for: self.session)
self.session.commitConfiguration()
self.configure {
self.session.sessionPreset = configuration.preset
self.input.configure(for: self.session, device: self.device, audio: configuration.audio)
self.output.configure(for: self.session, configuration: configuration)
}
self.output.processSampleBuffer = { [weak self] sampleBuffer, connection in
if let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer), CMFormatDescriptionGetMediaType(formatDescription) == kCMMediaType_Video {
self?.previousSampleBuffer = sampleBuffer
self?.previewNode?.enqueue(sampleBuffer)
self.output.processSampleBuffer = { [weak self] pixelBuffer, connection in
guard let self else {
return
}
self?.queue.async {
self?.processSampleBuffer?(sampleBuffer)
if let previewView = self.previewView, !self.changingPosition {
let videoOrientation = connection.videoOrientation
if #available(iOS 13.0, *) {
previewView.mirroring = connection.inputPorts.first?.sourceDevicePosition == .front
}
if let rotation = CameraPreviewView.Rotation(with: .portrait, videoOrientation: videoOrientation, cameraPosition: self.device.position) {
previewView.rotation = rotation
}
previewView.pixelBuffer = pixelBuffer
Queue.mainQueue().async {
self.videoOrientation = videoOrientation
}
}
}
@ -56,16 +72,15 @@ private final class CameraContext {
guard !self.session.isRunning else {
return
}
self.session.startRunning()
}
func stopCapture(invalidate: Bool = false) {
if invalidate {
self.session.beginConfiguration()
self.input.invalidate(for: self.session)
self.output.invalidate(for: self.session)
self.session.commitConfiguration()
self.configure {
self.input.invalidate(for: self.session)
self.output.invalidate(for: self.session)
}
}
self.session.stopRunning()
@ -75,32 +90,97 @@ private final class CameraContext {
self.device.setFocusPoint(point, focusMode: .continuousAutoFocus, exposureMode: .continuousAutoExposure, monitorSubjectAreaChange: true)
}
func setFPS(_ fps: Float64) {
func setFps(_ fps: Float64) {
self.device.fps = fps
}
func togglePosition() {
self.session.beginConfiguration()
self.input.invalidate(for: self.session)
let targetPosition: Camera.Position
if case .back = self.device.position {
targetPosition = .front
} else {
targetPosition = .back
private var changingPosition = false {
didSet {
if oldValue != self.changingPosition {
self.changingPositionPromise.set(self.changingPosition)
}
}
self.device.configure(for: self.session, position: targetPosition)
self.input.configure(for: self.session, device: self.device, audio: self.initialConfiguration.audio)
}
func togglePosition() {
self.configure {
self.input.invalidate(for: self.session)
let targetPosition: Camera.Position
if case .back = self.device.position {
targetPosition = .front
} else {
targetPosition = .back
}
self.changingPosition = true
self.device.configure(for: self.session, position: targetPosition)
self.input.configure(for: self.session, device: self.device, audio: self.initialConfiguration.audio)
self.queue.after(0.7) {
self.changingPosition = false
}
}
}
public func setPosition(_ position: Camera.Position) {
self.configure {
self.input.invalidate(for: self.session)
self.device.configure(for: self.session, position: position)
self.input.configure(for: self.session, device: self.device, audio: self.initialConfiguration.audio)
}
}
private func configure(_ f: () -> Void) {
self.session.beginConfiguration()
f()
self.session.commitConfiguration()
}
var hasTorch: Signal<Bool, NoError> {
return self.device.isFlashAvailable
return self.device.isTorchAvailable
}
func setTorchActive(_ active: Bool) {
self.device.setTorchActive(active)
}
var isFlashActive: Signal<Bool, NoError> {
return self.output.isFlashActive
}
private var _flashMode: Camera.FlashMode = .off {
didSet {
self._flashModePromise.set(self._flashMode)
}
}
private var _flashModePromise = ValuePromise<Camera.FlashMode>(.off)
var flashMode: Signal<Camera.FlashMode, NoError> {
return self._flashModePromise.get()
}
func setFlashMode(_ mode: Camera.FlashMode) {
self._flashMode = mode
if mode == .on {
self.output.activeFilter = self.filter
} else if mode == .off {
self.output.activeFilter = nil
}
}
func setZoomLevel(_ zoomLevel: CGFloat) {
self.device.setZoomLevel(zoomLevel)
}
func takePhoto() -> Signal<PhotoCaptureResult, NoError> {
return self.output.takePhoto(orientation: self.videoOrientation ?? .portrait, flashMode: .off) //self._flashMode)
}
public func startRecording() -> Signal<Double, NoError> {
return self.output.startRecording()
}
public func stopRecording() -> Signal<String?, NoError> {
return self.output.stopRecording()
}
var detectedCodes: Signal<[CameraCode], NoError> {
return self.detectedCodesPipe.signal()
}
@ -111,25 +191,36 @@ public final class Camera {
public typealias Position = AVCaptureDevice.Position
public typealias FocusMode = AVCaptureDevice.FocusMode
public typealias ExposureMode = AVCaptureDevice.ExposureMode
public typealias FlashMode = AVCaptureDevice.FlashMode
public struct Configuration {
let preset: Preset
let position: Position
let audio: Bool
let photo: Bool
let metadata: Bool
public init(preset: Preset, position: Position, audio: Bool) {
public init(preset: Preset, position: Position, audio: Bool, photo: Bool, metadata: Bool) {
self.preset = preset
self.position = position
self.audio = audio
self.photo = photo
self.metadata = metadata
}
}
private let queue = Queue()
private var contextRef: Unmanaged<CameraContext>?
private weak var previewView: CameraPreviewView?
public init(configuration: Camera.Configuration = Configuration(preset: .hd1920x1080, position: .back, audio: true)) {
public let metrics: Camera.Metrics
public init(configuration: Camera.Configuration = Configuration(preset: .hd1920x1080, position: .back, audio: true, photo: false, metadata: false)) {
self.metrics = Camera.Metrics(model: DeviceModel.current)
self.queue.async {
let context = CameraContext(queue: self.queue, configuration: configuration)
let context = CameraContext(queue: self.queue, configuration: configuration, metrics: self.metrics)
self.contextRef = Unmanaged.passRetained(context)
}
}
@ -165,8 +256,60 @@ public final class Camera {
}
}
public func takePhoto() -> Signal<Void, NoError> {
return .never()
public func setPosition(_ position: Camera.Position) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.setPosition(position)
}
}
}
public func takePhoto() -> Signal<PhotoCaptureResult, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.takePhoto().start(next: { value in
subscriber.putNext(value)
}, completed: {
subscriber.putCompletion()
}))
}
}
return disposable
}
}
public func startRecording() -> Signal<Double, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.startRecording().start(next: { value in
subscriber.putNext(value)
}, completed: {
subscriber.putCompletion()
}))
}
}
return disposable
}
}
public func stopRecording() -> Signal<String?, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.stopRecording().start(next: { value in
subscriber.putNext(value)
}, completed: {
subscriber.putCompletion()
}))
}
}
return disposable
}
}
public func focus(at point: CGPoint) {
@ -177,10 +320,26 @@ public final class Camera {
}
}
public func setFPS(_ fps: Double) {
public func setFps(_ fps: Double) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.setFPS(fps)
context.setFps(fps)
}
}
}
public func setFlashMode(_ flashMode: FlashMode) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.setFlashMode(flashMode)
}
}
}
public func setZoomLevel(_ zoomLevel: CGFloat) {
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.setZoomLevel(zoomLevel)
}
}
}
@ -200,6 +359,39 @@ public final class Camera {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.hasTorch.start(next: { hasTorch in
subscriber.putNext(hasTorch)
}, completed: {
subscriber.putCompletion()
}))
}
}
return disposable
}
}
public var isFlashActive: Signal<Bool, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.isFlashActive.start(next: { isFlashActive in
subscriber.putNext(isFlashActive)
}, completed: {
subscriber.putCompletion()
}))
}
}
return disposable
}
}
public var flashMode: Signal<Camera.FlashMode, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.flashMode.start(next: { flashMode in
subscriber.putNext(flashMode)
}, completed: {
subscriber.putCompletion()
}))
}
@ -222,14 +414,21 @@ public final class Camera {
}
}
public func setProcessSampleBuffer(_ block: ((CMSampleBuffer) -> Void)?) {
public func attachPreviewView(_ view: CameraPreviewView) {
self.previewView = view
let viewRef: Unmanaged<CameraPreviewView> = Unmanaged.passRetained(view)
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
context.processSampleBuffer = block
context.previewView = viewRef.takeUnretainedValue()
viewRef.release()
} else {
Queue.mainQueue().async {
viewRef.release()
}
}
}
}
public var detectedCodes: Signal<[CameraCode], NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
@ -243,4 +442,28 @@ public final class Camera {
return disposable
}
}
public var changingPosition: Signal<Bool, NoError> {
return Signal { subscriber in
let disposable = MetaDisposable()
self.queue.async {
if let context = self.contextRef?.takeUnretainedValue() {
disposable.set(context.changingPositionPromise.get().start(next: { value in
subscriber.putNext(value)
}))
}
}
return disposable
}
}
}
public final class CameraHolder {
public let camera: Camera
public let previewView: CameraPreviewView
public init(camera: Camera, previewView: CameraPreviewView) {
self.camera = camera
self.previewView = previewView
}
}

View File

@ -5,23 +5,21 @@ import SwiftSignalKit
private let defaultFPS: Double = 30.0
final class CameraDevice {
public private(set) var videoDevice: AVCaptureDevice? = nil
public private(set) var audioDevice: AVCaptureDevice? = nil
private var videoDevicePromise = Promise<AVCaptureDevice>()
init() {
}
var position: Camera.Position = .back
public private(set) var videoDevice: AVCaptureDevice? = nil {
didSet {
self.videoDevicePromise.set(.single(self.videoDevice))
}
}
private var videoDevicePromise = Promise<AVCaptureDevice?>()
public private(set) var audioDevice: AVCaptureDevice? = nil
func configure(for session: AVCaptureSession, position: Camera.Position) {
self.position = position
if #available(iOSApplicationExtension 10.0, iOS 10.0, *) {
self.videoDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInWideAngleCamera, .builtInTelephotoCamera], mediaType: .video, position: position).devices.first
} else {
self.videoDevice = AVCaptureDevice.devices(for: .video).filter { $0.position == position }.first
}
if let videoDevice = self.videoDevice {
if let videoDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInWideAngleCamera, .builtInTelephotoCamera], mediaType: .video, position: position).devices.first {
self.videoDevice = videoDevice
self.videoDevicePromise.set(.single(videoDevice))
}
self.audioDevice = AVCaptureDevice.default(for: .audio)
@ -43,7 +41,7 @@ final class CameraDevice {
}
@objc private func subjectAreaChanged() {
self.setFocusPoint(CGPoint(x: 0.5, y: 0.5), focusMode: .continuousAutoFocus, exposureMode: .continuousAutoExposure, monitorSubjectAreaChange: false)
}
var fps: Double = defaultFPS {
@ -61,26 +59,13 @@ final class CameraDevice {
}
}
/*var isFlashActive: Signal<Bool, NoError> {
var isTorchAvailable: Signal<Bool, NoError> {
return self.videoDevicePromise.get()
|> mapToSignal { device -> Signal<Bool, NoError> in
return Signal { subscriber in
subscriber.putNext(device.isFlashActive)
let observer = device.observe(\.isFlashActive, options: [.new], changeHandler: { device, _ in
subscriber.putNext(device.isFlashActive)
})
return ActionDisposable {
observer.invalidate()
guard let device else {
return EmptyDisposable
}
}
|> distinctUntilChanged
}
}*/
var isFlashAvailable: Signal<Bool, NoError> {
return self.videoDevicePromise.get()
|> mapToSignal { device -> Signal<Bool, NoError> in
return Signal { subscriber in
subscriber.putNext(device.isFlashAvailable)
let observer = device.observe(\.isFlashAvailable, options: [.new], changeHandler: { device, _ in
subscriber.putNext(device.isFlashAvailable)
@ -97,6 +82,9 @@ final class CameraDevice {
return self.videoDevicePromise.get()
|> mapToSignal { device -> Signal<Bool, NoError> in
return Signal { subscriber in
guard let device else {
return EmptyDisposable
}
subscriber.putNext(device.isAdjustingFocus)
let observer = device.observe(\.isAdjustingFocus, options: [.new], changeHandler: { device, _ in
subscriber.putNext(device.isAdjustingFocus)
@ -144,4 +132,13 @@ final class CameraDevice {
device.torchMode = active ? .on : .off
}
}
func setZoomLevel(_ zoomLevel: CGFloat) {
guard let device = self.videoDevice else {
return
}
self.transaction(device) { device in
device.videoZoomFactor = max(1.0, min(10.0, zoomLevel))
}
}
}

View File

@ -0,0 +1,178 @@
import Foundation
import CoreImage
import CoreMedia
import CoreVideo
import Metal
protocol CameraFilter: AnyObject {
var isPrepared: Bool { get }
func prepare(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int)
func reset()
var outputFormatDescription: CMFormatDescription? { get }
var inputFormatDescription: CMFormatDescription? { get }
func render(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer?
}
func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) ->(
outputBufferPool: CVPixelBufferPool?,
outputColorSpace: CGColorSpace?,
outputFormatDescription: CMFormatDescription?) {
let inputMediaSubType = CMFormatDescriptionGetMediaSubType(inputFormatDescription)
if inputMediaSubType != kCVPixelFormatType_32BGRA {
assertionFailure("Invalid input pixel buffer type \(inputMediaSubType)")
return (nil, nil, nil)
}
let inputDimensions = CMVideoFormatDescriptionGetDimensions(inputFormatDescription)
var pixelBufferAttributes: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String: UInt(inputMediaSubType),
kCVPixelBufferWidthKey as String: Int(inputDimensions.width),
kCVPixelBufferHeightKey as String: Int(inputDimensions.height),
kCVPixelBufferIOSurfacePropertiesKey as String: [:] as NSDictionary
]
var cgColorSpace = CGColorSpaceCreateDeviceRGB()
if let inputFormatDescriptionExtension = CMFormatDescriptionGetExtensions(inputFormatDescription) as Dictionary? {
let colorPrimaries = inputFormatDescriptionExtension[kCVImageBufferColorPrimariesKey]
if let colorPrimaries = colorPrimaries {
var colorSpaceProperties: [String: AnyObject] = [kCVImageBufferColorPrimariesKey as String: colorPrimaries]
if let yCbCrMatrix = inputFormatDescriptionExtension[kCVImageBufferYCbCrMatrixKey] {
colorSpaceProperties[kCVImageBufferYCbCrMatrixKey as String] = yCbCrMatrix
}
if let transferFunction = inputFormatDescriptionExtension[kCVImageBufferTransferFunctionKey] {
colorSpaceProperties[kCVImageBufferTransferFunctionKey as String] = transferFunction
}
pixelBufferAttributes[kCVBufferPropagatedAttachmentsKey as String] = colorSpaceProperties
}
if let cvColorspace = inputFormatDescriptionExtension[kCVImageBufferCGColorSpaceKey] {
cgColorSpace = cvColorspace as! CGColorSpace
} else if (colorPrimaries as? String) == (kCVImageBufferColorPrimaries_P3_D65 as String) {
cgColorSpace = CGColorSpace(name: CGColorSpace.displayP3)!
}
}
let poolAttributes = [kCVPixelBufferPoolMinimumBufferCountKey as String: outputRetainedBufferCountHint]
var cvPixelBufferPool: CVPixelBufferPool?
CVPixelBufferPoolCreate(kCFAllocatorDefault, poolAttributes as NSDictionary?, pixelBufferAttributes as NSDictionary?, &cvPixelBufferPool)
guard let pixelBufferPool = cvPixelBufferPool else {
assertionFailure("Allocation failure: Could not allocate pixel buffer pool.")
return (nil, nil, nil)
}
preallocateBuffers(pool: pixelBufferPool, allocationThreshold: outputRetainedBufferCountHint)
// Get the output format description.
var pixelBuffer: CVPixelBuffer?
var outputFormatDescription: CMFormatDescription?
let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: outputRetainedBufferCountHint] as NSDictionary
CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pixelBufferPool, auxAttributes, &pixelBuffer)
if let pixelBuffer = pixelBuffer {
CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault,
imageBuffer: pixelBuffer,
formatDescriptionOut: &outputFormatDescription)
}
pixelBuffer = nil
return (pixelBufferPool, cgColorSpace, outputFormatDescription)
}
private func preallocateBuffers(pool: CVPixelBufferPool, allocationThreshold: Int) {
var pixelBuffers = [CVPixelBuffer]()
var error: CVReturn = kCVReturnSuccess
let auxAttributes = [kCVPixelBufferPoolAllocationThresholdKey as String: allocationThreshold] as NSDictionary
var pixelBuffer: CVPixelBuffer?
while error == kCVReturnSuccess {
error = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes(kCFAllocatorDefault, pool, auxAttributes, &pixelBuffer)
if let pixelBuffer = pixelBuffer {
pixelBuffers.append(pixelBuffer)
}
pixelBuffer = nil
}
pixelBuffers.removeAll()
}
class CameraTestFilter: CameraFilter {
var isPrepared = false
private var ciContext: CIContext?
private var rosyFilter: CIFilter?
private var outputColorSpace: CGColorSpace?
private var outputPixelBufferPool: CVPixelBufferPool?
private(set) var outputFormatDescription: CMFormatDescription?
private(set) var inputFormatDescription: CMFormatDescription?
/// - Tag: FilterCoreImageRosy
func prepare(with formatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) {
reset()
(outputPixelBufferPool,
outputColorSpace,
outputFormatDescription) = allocateOutputBufferPool(with: formatDescription,
outputRetainedBufferCountHint: outputRetainedBufferCountHint)
if outputPixelBufferPool == nil {
return
}
inputFormatDescription = formatDescription
ciContext = CIContext()
rosyFilter = CIFilter(name: "CIColorControls")
rosyFilter!.setValue(0.0, forKey: kCIInputBrightnessKey)
rosyFilter!.setValue(0.0, forKey: kCIInputSaturationKey)
rosyFilter!.setValue(1.1, forKey: kCIInputContrastKey)
isPrepared = true
}
func reset() {
ciContext = nil
rosyFilter = nil
outputColorSpace = nil
outputPixelBufferPool = nil
outputFormatDescription = nil
inputFormatDescription = nil
isPrepared = false
}
func render(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer? {
guard let rosyFilter = rosyFilter,
let ciContext = ciContext,
isPrepared else {
assertionFailure("Invalid state: Not prepared")
return nil
}
let sourceImage = CIImage(cvImageBuffer: pixelBuffer)
rosyFilter.setValue(sourceImage, forKey: kCIInputImageKey)
guard let filteredImage = rosyFilter.value(forKey: kCIOutputImageKey) as? CIImage else {
print("CIFilter failed to render image")
return nil
}
var pbuf: CVPixelBuffer?
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
guard let outputPixelBuffer = pbuf else {
print("Allocation failure")
return nil
}
// Render the filtered image out to a pixel buffer (no locking needed, as CIContext's render method will do that)
ciContext.render(filteredImage, to: outputPixelBuffer, bounds: filteredImage.extent, colorSpace: outputColorSpace)
return outputPixelBuffer
}
}

View File

@ -0,0 +1,171 @@
import Foundation
public extension Camera {
enum Metrics {
case singleCamera
case iPhone14
case iPhone14Plus
case iPhone14Pro
case iPhone14ProMax
case unknown
init(model: DeviceModel) {
switch model {
case .iPodTouch1, .iPodTouch2, .iPodTouch3, .iPodTouch4, .iPodTouch5, .iPodTouch6, .iPodTouch7:
self = .singleCamera
case .iPhone14:
self = .iPhone14
case .iPhone14Plus:
self = .iPhone14Plus
case .iPhone14Pro:
self = .iPhone14Pro
case .iPhone14ProMax:
self = .iPhone14ProMax
case .unknown:
self = .unknown
}
}
public var zoomLevels: [Float] {
switch self {
case .singleCamera:
return [1.0]
case .iPhone14:
return [0.5, 1.0, 2.0]
case .iPhone14Plus:
return [0.5, 1.0, 2.0]
case .iPhone14Pro:
return [0.5, 1.0, 2.0, 3.0]
case .iPhone14ProMax:
return [0.5, 1.0, 2.0, 3.0]
case .unknown:
return [1.0, 2.0]
}
}
}
}
enum DeviceModel: CaseIterable {
static var allCases: [DeviceModel] {
return [
.iPodTouch1,
.iPodTouch2,
.iPodTouch3,
.iPodTouch4,
.iPodTouch5,
.iPodTouch6,
.iPodTouch7,
.iPhone14,
.iPhone14Plus,
.iPhone14Pro,
.iPhone14ProMax
]
}
case iPodTouch1
case iPodTouch2
case iPodTouch3
case iPodTouch4
case iPodTouch5
case iPodTouch6
case iPodTouch7
case iPhone14
case iPhone14Plus
case iPhone14Pro
case iPhone14ProMax
case unknown(String)
var modelId: String? {
switch self {
case .iPodTouch1:
return "iPod1,1"
case .iPodTouch2:
return "iPod2,1"
case .iPodTouch3:
return "iPod3,1"
case .iPodTouch4:
return "iPod4,1"
case .iPodTouch5:
return "iPod5,1"
case .iPodTouch6:
return "iPod7,1"
case .iPodTouch7:
return "iPod9,1"
case .iPhone14:
return "iPhone14,7"
case .iPhone14Plus:
return "iPhone14,8"
case .iPhone14Pro:
return "iPhone15,2"
case .iPhone14ProMax:
return "iPhone15,3"
case let .unknown(modelId):
return modelId
}
}
var modelName: String {
switch self {
case .iPodTouch1:
return "iPod touch 1G"
case .iPodTouch2:
return "iPod touch 2G"
case .iPodTouch3:
return "iPod touch 3G"
case .iPodTouch4:
return "iPod touch 4G"
case .iPodTouch5:
return "iPod touch 5G"
case .iPodTouch6:
return "iPod touch 6G"
case .iPodTouch7:
return "iPod touch 7G"
case .iPhone14:
return "iPhone 14"
case .iPhone14Plus:
return "iPhone 14 Plus"
case .iPhone14Pro:
return "iPhone 14 Pro"
case .iPhone14ProMax:
return "iPhone 14 Pro Max"
case let .unknown(modelId):
if modelId.hasPrefix("iPhone") {
return "Unknown iPhone"
} else if modelId.hasPrefix("iPod") {
return "Unknown iPod"
} else if modelId.hasPrefix("iPad") {
return "Unknown iPad"
} else {
return "Unknown Device"
}
}
}
static let current = DeviceModel()
private init() {
var systemInfo = utsname()
uname(&systemInfo)
let modelCode = withUnsafePointer(to: &systemInfo.machine) {
$0.withMemoryRebound(to: CChar.self, capacity: 1) {
ptr in String.init(validatingUTF8: ptr)
}
}
var result: DeviceModel?
if let modelCode {
for model in DeviceModel.allCases {
if model.modelId == modelCode {
result = model
break
}
}
}
if let result {
self = result
} else {
self = .unknown(modelCode ?? "")
}
}
}

View File

@ -1,4 +1,5 @@
import AVFoundation
import SwiftSignalKit
public struct CameraCode: Equatable {
public enum CodeType {
@ -39,7 +40,7 @@ public struct CameraCode: Equatable {
}
final class CameraOutput: NSObject {
//private let photoOutput = CameraPhotoOutput()
private let photoOutput = AVCapturePhotoOutput()
private let videoOutput = AVCaptureVideoDataOutput()
private let audioOutput = AVCaptureAudioDataOutput()
private let metadataOutput = AVCaptureMetadataOutput()
@ -47,14 +48,20 @@ final class CameraOutput: NSObject {
private let queue = DispatchQueue(label: "")
private let metadataQueue = DispatchQueue(label: "")
var processSampleBuffer: ((CMSampleBuffer, AVCaptureConnection) -> Void)?
private var photoCaptureRequests: [Int64: PhotoCaptureContext] = [:]
private var videoRecorder: VideoRecorder?
var activeFilter: CameraFilter?
var processSampleBuffer: ((CVImageBuffer, AVCaptureConnection) -> Void)?
var processCodes: (([CameraCode]) -> Void)?
override init() {
super.init()
self.videoOutput.alwaysDiscardsLateVideoFrames = true;
self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any]
self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA] as [String : Any]
//[kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any]
}
deinit {
@ -62,16 +69,19 @@ final class CameraOutput: NSObject {
self.audioOutput.setSampleBufferDelegate(nil, queue: nil)
}
func configure(for session: AVCaptureSession) {
func configure(for session: AVCaptureSession, configuration: Camera.Configuration) {
if session.canAddOutput(self.videoOutput) {
session.addOutput(self.videoOutput)
self.videoOutput.setSampleBufferDelegate(self, queue: self.queue)
}
if session.canAddOutput(self.audioOutput) {
if configuration.audio, session.canAddOutput(self.audioOutput) {
session.addOutput(self.audioOutput)
self.audioOutput.setSampleBufferDelegate(self, queue: self.queue)
}
if session.canAddOutput(self.metadataOutput) {
if configuration.photo, session.canAddOutput(self.photoOutput) {
session.addOutput(self.photoOutput)
}
if configuration.metadata, session.canAddOutput(self.metadataOutput) {
session.addOutput(self.metadataOutput)
self.metadataOutput.setMetadataObjectsDelegate(self, queue: self.metadataQueue)
@ -86,6 +96,100 @@ final class CameraOutput: NSObject {
session.removeOutput(output)
}
}
var isFlashActive: Signal<Bool, NoError> {
return Signal { [weak self] subscriber in
guard let self else {
return EmptyDisposable
}
subscriber.putNext(self.photoOutput.isFlashScene)
let observer = self.photoOutput.observe(\.isFlashScene, options: [.new], changeHandler: { device, _ in
subscriber.putNext(self.photoOutput.isFlashScene)
})
return ActionDisposable {
observer.invalidate()
}
}
|> distinctUntilChanged
}
func takePhoto(orientation: AVCaptureVideoOrientation, flashMode: AVCaptureDevice.FlashMode) -> Signal<PhotoCaptureResult, NoError> {
if let connection = self.photoOutput.connection(with: .video) {
connection.videoOrientation = orientation
}
// var settings = AVCapturePhotoSettings()
// if self.photoOutput.availablePhotoCodecTypes.contains(.hevc) {
// settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
// }
let settings = AVCapturePhotoSettings(format: [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)])
settings.flashMode = flashMode
if let previewPhotoPixelFormatType = settings.availablePreviewPhotoPixelFormatTypes.first {
settings.previewPhotoFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPhotoPixelFormatType]
}
if #available(iOS 13.0, *) {
settings.photoQualityPrioritization = .balanced
}
let uniqueId = settings.uniqueID
let photoCapture = PhotoCaptureContext(settings: settings, filter: self.activeFilter)
self.photoCaptureRequests[uniqueId] = photoCapture
self.photoOutput.capturePhoto(with: settings, delegate: photoCapture)
return photoCapture.signal
|> afterDisposed { [weak self] in
self?.photoCaptureRequests.removeValue(forKey: uniqueId)
}
}
private var recordingCompletionPipe = ValuePipe<String?>()
func startRecording() -> Signal<Double, NoError> {
guard self.videoRecorder == nil else {
return .complete()
}
guard let videoSettings = self.videoOutput.recommendedVideoSettings(forVideoCodecType: .h264, assetWriterOutputFileType: .mp4) else {
return .complete()
}
guard let audioSettings = self.audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .mp4) else {
return .complete()
}
let outputFileName = NSUUID().uuidString
let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(outputFileName).appendingPathExtension("mp4")
let outputFilePath = outputFileURL.absoluteString
let videoRecorder = VideoRecorder(preset: MediaPreset(videoSettings: videoSettings, audioSettings: audioSettings), videoTransform: CGAffineTransform(rotationAngle: .pi / 2.0), fileUrl: outputFileURL, completion: { [weak self] result in
if case .success = result {
self?.recordingCompletionPipe.putNext(outputFilePath)
} else {
self?.recordingCompletionPipe.putNext(nil)
}
})
videoRecorder.start()
self.videoRecorder = videoRecorder
return Signal { subscriber in
let timer = SwiftSignalKit.Timer(timeout: 0.33, repeat: true, completion: { [weak videoRecorder] in
subscriber.putNext(videoRecorder?.duration ?? 0.0)
}, queue: Queue.mainQueue())
timer.start()
return ActionDisposable {
timer.invalidate()
}
}
}
func stopRecording() -> Signal<String?, NoError> {
self.videoRecorder?.stop()
return self.recordingCompletionPipe.signal()
|> take(1)
|> afterDisposed {
self.videoRecorder = nil
}
}
}
extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
@ -93,8 +197,31 @@ extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureA
guard CMSampleBufferDataIsReady(sampleBuffer) else {
return
}
let finalSampleBuffer: CMSampleBuffer = sampleBuffer
if let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) {
var finalVideoPixelBuffer = videoPixelBuffer
if let filter = self.activeFilter {
if !filter.isPrepared {
filter.prepare(with: formatDescription, outputRetainedBufferCountHint: 3)
}
guard let filteredBuffer = filter.render(pixelBuffer: finalVideoPixelBuffer) else {
return
}
finalVideoPixelBuffer = filteredBuffer
}
self.processSampleBuffer?(finalVideoPixelBuffer, connection)
}
self.processSampleBuffer?(sampleBuffer, connection)
if let videoRecorder = self.videoRecorder, videoRecorder.isRecording || videoRecorder.isStopping {
let mediaType = sampleBuffer.type
if mediaType == kCMMediaType_Video {
videoRecorder.appendVideo(sampleBuffer: finalSampleBuffer)
} else if mediaType == kCMMediaType_Audio {
videoRecorder.appendAudio(sampleBuffer: sampleBuffer)
}
}
}
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {

View File

@ -0,0 +1,305 @@
import Foundation
import UIKit
import Display
import AVFoundation
import SwiftSignalKit
import Metal
import MetalKit
import CoreMedia
public class CameraPreviewView: MTKView {
private let queue = DispatchQueue(label: "CameraPreview", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
private let commandQueue: MTLCommandQueue
private var textureCache: CVMetalTextureCache?
private var sampler: MTLSamplerState!
private var renderPipelineState: MTLRenderPipelineState!
private var vertexCoordBuffer: MTLBuffer!
private var texCoordBuffer: MTLBuffer!
private var textureWidth: Int = 0
private var textureHeight: Int = 0
private var textureMirroring = false
private var textureRotation: Rotation = .rotate0Degrees
private var textureTranform: CGAffineTransform?
private var _bounds = CGRectNull
public enum Rotation: Int {
case rotate0Degrees
case rotate90Degrees
case rotate180Degrees
case rotate270Degrees
}
private var _mirroring: Bool?
private var _scheduledMirroring: Bool?
public var mirroring = false {
didSet {
self.queue.sync {
if self._mirroring != nil {
self._scheduledMirroring = self.mirroring
} else {
self._mirroring = self.mirroring
}
}
}
}
private var _rotation: Rotation = .rotate0Degrees
public var rotation: Rotation = .rotate0Degrees {
didSet {
self.queue.sync {
self._rotation = rotation
}
}
}
private var _pixelBuffer: CVPixelBuffer?
var pixelBuffer: CVPixelBuffer? {
didSet {
self.queue.sync {
if let scheduledMirroring = self._scheduledMirroring {
self._scheduledMirroring = nil
self._mirroring = scheduledMirroring
}
self._pixelBuffer = pixelBuffer
}
}
}
public init?(test: Bool) {
let mainBundle = Bundle(for: CameraPreviewView.self)
guard let path = mainBundle.path(forResource: "CameraBundle", ofType: "bundle") else {
return nil
}
guard let bundle = Bundle(path: path) else {
return nil
}
guard let device = MTLCreateSystemDefaultDevice() else {
return nil
}
guard let defaultLibrary = try? device.makeDefaultLibrary(bundle: bundle) else {
return nil
}
guard let commandQueue = device.makeCommandQueue() else {
return nil
}
self.commandQueue = commandQueue
super.init(frame: .zero, device: device)
self.colorPixelFormat = .bgra8Unorm
let pipelineDescriptor = MTLRenderPipelineDescriptor()
pipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
pipelineDescriptor.vertexFunction = defaultLibrary.makeFunction(name: "vertexPassThrough")
pipelineDescriptor.fragmentFunction = defaultLibrary.makeFunction(name: "fragmentPassThrough")
let samplerDescriptor = MTLSamplerDescriptor()
samplerDescriptor.sAddressMode = .clampToEdge
samplerDescriptor.tAddressMode = .clampToEdge
samplerDescriptor.minFilter = .linear
samplerDescriptor.magFilter = .linear
self.sampler = device.makeSamplerState(descriptor: samplerDescriptor)
do {
self.renderPipelineState = try device.makeRenderPipelineState(descriptor: pipelineDescriptor)
} catch {
fatalError("\(error)")
}
self.setupTextureCache()
}
required public init(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func setupTextureCache() {
var newTextureCache: CVMetalTextureCache?
if CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device!, nil, &newTextureCache) == kCVReturnSuccess {
self.textureCache = newTextureCache
} else {
assertionFailure("Unable to allocate texture cache")
}
}
private func setupTransform(width: Int, height: Int, rotation: Rotation, mirroring: Bool) {
var scaleX: Float = 1.0
var scaleY: Float = 1.0
var resizeAspect: Float = 1.0
self._bounds = self.bounds
self.textureWidth = width
self.textureHeight = height
self.textureMirroring = mirroring
self.textureRotation = rotation
if self.textureWidth > 0 && self.textureHeight > 0 {
switch self.textureRotation {
case .rotate0Degrees, .rotate180Degrees:
scaleX = Float(self._bounds.width / CGFloat(self.textureWidth))
scaleY = Float(self._bounds.height / CGFloat(self.textureHeight))
case .rotate90Degrees, .rotate270Degrees:
scaleX = Float(self._bounds.width / CGFloat(self.textureHeight))
scaleY = Float(self._bounds.height / CGFloat(self.textureWidth))
}
}
resizeAspect = min(scaleX, scaleY)
if scaleX < scaleY {
scaleY = scaleX / scaleY
scaleX = 1.0
} else {
scaleX = scaleY / scaleX
scaleY = 1.0
}
if self.textureMirroring {
scaleX *= -1.0
}
let vertexData: [Float] = [
-scaleX, -scaleY, 0.0, 1.0,
scaleX, -scaleY, 0.0, 1.0,
-scaleX, scaleY, 0.0, 1.0,
scaleX, scaleY, 0.0, 1.0
]
self.vertexCoordBuffer = device!.makeBuffer(bytes: vertexData, length: vertexData.count * MemoryLayout<Float>.size, options: [])
var texCoordBufferData: [Float]
switch self.textureRotation {
case .rotate0Degrees:
texCoordBufferData = [
0.0, 1.0,
1.0, 1.0,
0.0, 0.0,
1.0, 0.0
]
case .rotate180Degrees:
texCoordBufferData = [
1.0, 0.0,
0.0, 0.0,
1.0, 1.0,
0.0, 1.0
]
case .rotate90Degrees:
texCoordBufferData = [
1.0, 1.0,
1.0, 0.0,
0.0, 1.0,
0.0, 0.0
]
case .rotate270Degrees:
texCoordBufferData = [
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0
]
}
self.texCoordBuffer = device?.makeBuffer(bytes: texCoordBufferData, length: texCoordBufferData.count * MemoryLayout<Float>.size, options: [])
var transform = CGAffineTransform.identity
if self.textureMirroring {
transform = transform.concatenating(CGAffineTransform(scaleX: -1, y: 1))
transform = transform.concatenating(CGAffineTransform(translationX: CGFloat(self.textureWidth), y: 0))
}
switch self.textureRotation {
case .rotate0Degrees:
transform = transform.concatenating(CGAffineTransform(rotationAngle: CGFloat(0)))
case .rotate180Degrees:
transform = transform.concatenating(CGAffineTransform(rotationAngle: CGFloat(Double.pi)))
transform = transform.concatenating(CGAffineTransform(translationX: CGFloat(self.textureWidth), y: CGFloat(self.textureHeight)))
case .rotate90Degrees:
transform = transform.concatenating(CGAffineTransform(rotationAngle: CGFloat(Double.pi) / 2))
transform = transform.concatenating(CGAffineTransform(translationX: CGFloat(self.textureHeight), y: 0))
case .rotate270Degrees:
transform = transform.concatenating(CGAffineTransform(rotationAngle: 3 * CGFloat(Double.pi) / 2))
transform = transform.concatenating(CGAffineTransform(translationX: 0, y: CGFloat(self.textureWidth)))
}
transform = transform.concatenating(CGAffineTransform(scaleX: CGFloat(resizeAspect), y: CGFloat(resizeAspect)))
let tranformRect = CGRect(origin: .zero, size: CGSize(width: self.textureWidth, height: self.textureHeight)).applying(transform)
let xShift = (self._bounds.size.width - tranformRect.size.width) / 2
let yShift = (self._bounds.size.height - tranformRect.size.height) / 2
transform = transform.concatenating(CGAffineTransform(translationX: xShift, y: yShift))
self.textureTranform = transform.inverted()
}
public override func draw(_ rect: CGRect) {
var pixelBuffer: CVPixelBuffer?
var mirroring = false
var rotation: Rotation = .rotate0Degrees
self.queue.sync {
pixelBuffer = self._pixelBuffer
if let mirroringValue = self._mirroring {
mirroring = mirroringValue
}
rotation = self._rotation
}
guard let drawable = currentDrawable, let currentRenderPassDescriptor = currentRenderPassDescriptor, let previewPixelBuffer = pixelBuffer else {
return
}
let width = CVPixelBufferGetWidth(previewPixelBuffer)
let height = CVPixelBufferGetHeight(previewPixelBuffer)
if self.textureCache == nil {
self.setupTextureCache()
}
var cvTextureOut: CVMetalTexture?
CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault,
textureCache!,
previewPixelBuffer,
nil,
.bgra8Unorm,
width,
height,
0,
&cvTextureOut)
guard let cvTexture = cvTextureOut, let texture = CVMetalTextureGetTexture(cvTexture) else {
CVMetalTextureCacheFlush(self.textureCache!, 0)
return
}
if texture.width != self.textureWidth ||
texture.height != self.textureHeight ||
self.bounds != self._bounds ||
rotation != self.textureRotation ||
mirroring != self.textureMirroring {
self.setupTransform(width: texture.width, height: texture.height, rotation: rotation, mirroring: mirroring)
}
guard let commandBuffer = self.commandQueue.makeCommandBuffer() else {
CVMetalTextureCacheFlush(self.textureCache!, 0)
return
}
guard let commandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: currentRenderPassDescriptor) else {
CVMetalTextureCacheFlush(self.textureCache!, 0)
return
}
commandEncoder.setRenderPipelineState(self.renderPipelineState!)
commandEncoder.setVertexBuffer(self.vertexCoordBuffer, offset: 0, index: 0)
commandEncoder.setVertexBuffer(self.texCoordBuffer, offset: 0, index: 1)
commandEncoder.setFragmentTexture(texture, index: 0)
commandEncoder.setFragmentSamplerState(self.sampler, index: 0)
commandEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
commandEncoder.endEncoding()
commandBuffer.present(drawable)
commandBuffer.commit()
}
}

View File

@ -1,3 +1,4 @@
import UIKit
import AVFoundation
extension AVFrameRateRange {
@ -31,7 +32,6 @@ extension AVCaptureDevice {
}
let diff = frameRates.map { abs($0 - fps) }
if let minElement: Float64 = diff.min() {
for i in 0..<diff.count where diff[i] == minElement {
return (frameRates[i], durations[i])
@ -41,3 +41,137 @@ extension AVCaptureDevice {
return nil
}
}
extension CMSampleBuffer {
var presentationTimestamp: CMTime {
return CMSampleBufferGetPresentationTimeStamp(self)
}
var type: CMMediaType {
if let formatDescription = CMSampleBufferGetFormatDescription(self) {
return CMFormatDescriptionGetMediaType(formatDescription)
} else {
return kCMMediaType_Video
}
}
}
public extension AVCaptureVideoOrientation {
init?(interfaceOrientation: UIInterfaceOrientation) {
switch interfaceOrientation {
case .portrait: self = .portrait
case .portraitUpsideDown: self = .portraitUpsideDown
case .landscapeLeft: self = .landscapeLeft
case .landscapeRight: self = .landscapeRight
default: return nil
}
}
}
public extension CameraPreviewView.Rotation {
init?(with interfaceOrientation: UIInterfaceOrientation, videoOrientation: AVCaptureVideoOrientation, cameraPosition: AVCaptureDevice.Position) {
switch videoOrientation {
case .portrait:
switch interfaceOrientation {
case .landscapeRight:
if cameraPosition == .front {
self = .rotate90Degrees
} else {
self = .rotate270Degrees
}
case .landscapeLeft:
if cameraPosition == .front {
self = .rotate270Degrees
} else {
self = .rotate90Degrees
}
case .portrait:
self = .rotate0Degrees
case .portraitUpsideDown:
self = .rotate180Degrees
default: return nil
}
case .portraitUpsideDown:
switch interfaceOrientation {
case .landscapeRight:
if cameraPosition == .front {
self = .rotate270Degrees
} else {
self = .rotate90Degrees
}
case .landscapeLeft:
if cameraPosition == .front {
self = .rotate90Degrees
} else {
self = .rotate270Degrees
}
case .portrait:
self = .rotate180Degrees
case .portraitUpsideDown:
self = .rotate0Degrees
default: return nil
}
case .landscapeRight:
switch interfaceOrientation {
case .landscapeRight:
self = .rotate0Degrees
case .landscapeLeft:
self = .rotate180Degrees
case .portrait:
if cameraPosition == .front {
self = .rotate270Degrees
} else {
self = .rotate90Degrees
}
case .portraitUpsideDown:
if cameraPosition == .front {
self = .rotate90Degrees
} else {
self = .rotate270Degrees
}
default: return nil
}
case .landscapeLeft:
switch interfaceOrientation {
case .landscapeLeft:
self = .rotate0Degrees
case .landscapeRight:
self = .rotate180Degrees
case .portrait:
if cameraPosition == .front {
self = .rotate90Degrees
} else {
self = .rotate270Degrees
}
case .portraitUpsideDown:
if cameraPosition == .front {
self = .rotate270Degrees
} else {
self = .rotate90Degrees
}
default: return nil
}
@unknown default:
fatalError("Unknown orientation.")
}
}
}

View File

@ -0,0 +1,85 @@
import Foundation
import AVFoundation
import UIKit
import SwiftSignalKit
public enum PhotoCaptureResult {
case began
case finished(UIImage)
case failed
}
final class PhotoCaptureContext: NSObject, AVCapturePhotoCaptureDelegate {
private let pipe = ValuePipe<PhotoCaptureResult>()
private let filter: CameraFilter?
init(settings: AVCapturePhotoSettings, filter: CameraFilter?) {
self.filter = filter
super.init()
}
func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
self.pipe.putNext(.began)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let _ = error {
self.pipe.putNext(.failed)
} else {
guard let photoPixelBuffer = photo.pixelBuffer else {
print("Error occurred while capturing photo: Missing pixel buffer (\(String(describing: error)))")
return
}
var photoFormatDescription: CMFormatDescription?
CMVideoFormatDescriptionCreateForImageBuffer(allocator: kCFAllocatorDefault, imageBuffer: photoPixelBuffer, formatDescriptionOut: &photoFormatDescription)
var finalPixelBuffer = photoPixelBuffer
if let filter = self.filter {
if !filter.isPrepared {
if let unwrappedPhotoFormatDescription = photoFormatDescription {
filter.prepare(with: unwrappedPhotoFormatDescription, outputRetainedBufferCountHint: 2)
}
}
guard let filteredPixelBuffer = filter.render(pixelBuffer: finalPixelBuffer) else {
print("Unable to filter photo buffer")
return
}
finalPixelBuffer = filteredPixelBuffer
}
let ciContext = CIContext()
let renderedCIImage = CIImage(cvImageBuffer: finalPixelBuffer)
if let cgImage = ciContext.createCGImage(renderedCIImage, from: renderedCIImage.extent) {
var image = UIImage(cgImage: cgImage, scale: 1.0, orientation: .right)
if image.imageOrientation != .up {
UIGraphicsBeginImageContextWithOptions(image.size, true, image.scale)
image.draw(in: CGRect(origin: .zero, size: image.size))
if let currentImage = UIGraphicsGetImageFromCurrentImageContext() {
image = currentImage
}
UIGraphicsEndImageContext()
}
self.pipe.putNext(.finished(image))
} else {
self.pipe.putNext(.failed)
}
}
}
var signal: Signal<PhotoCaptureResult, NoError> {
return self.pipe.signal()
|> take(until: { next in
let complete: Bool
switch next {
case .finished, .failed:
complete = true
default:
complete = false
}
return SignalTakeAction(passthrough: true, complete: complete)
})
}
}

View File

@ -0,0 +1,204 @@
import Foundation
import AVFoundation
import SwiftSignalKit
struct MediaPreset {
var videoSettings: [String: Any]
var audioSettings: [String: Any]
init(videoSettings: [String: Any], audioSettings: [String: Any]) {
self.videoSettings = videoSettings
self.audioSettings = audioSettings
}
var hasAudio: Bool {
return !self.audioSettings.isEmpty
}
}
final class VideoRecorder {
enum Result {
enum Error {
case generic
}
case success
case writeError(Error)
case finishError(Error)
}
private let completion: (Result) -> Void
private let queue = Queue()
private var assetWriter: AVAssetWriter?
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
private let preset: MediaPreset
private let videoTransform: CGAffineTransform
private let fileUrl: URL
private (set) var isRecording = false
private (set) var isStopping = false
private var finishedWriting = false
private var captureStartTimestamp: Double?
private var firstVideoTimestamp: CMTime?
private var lastVideoTimestamp: CMTime?
private var lastAudioTimestamp: CMTime?
private var pendingAudioBuffers: [CMSampleBuffer] = []
init(preset: MediaPreset, videoTransform: CGAffineTransform, fileUrl: URL, completion: @escaping (Result) -> Void) {
self.preset = preset
self.videoTransform = videoTransform
self.fileUrl = fileUrl
self.completion = completion
}
func start() {
self.queue.async {
guard self.assetWriter == nil else {
return
}
self.captureStartTimestamp = CFAbsoluteTimeGetCurrent()
guard let assetWriter = try? AVAssetWriter(url: self.fileUrl, fileType: .mp4) else {
return
}
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: self.preset.videoSettings)
videoInput.expectsMediaDataInRealTime = true
videoInput.transform = self.videoTransform
if assetWriter.canAdd(videoInput) {
assetWriter.add(videoInput)
}
let audioInput: AVAssetWriterInput?
if self.preset.hasAudio {
audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: self.preset.audioSettings)
audioInput!.expectsMediaDataInRealTime = true
if assetWriter.canAdd(audioInput!) {
assetWriter.add(audioInput!)
}
} else {
audioInput = nil
}
self.assetWriter = assetWriter
self.videoInput = videoInput
self.audioInput = audioInput
self.isRecording = true
assetWriter.startWriting()
}
}
func stop() {
self.queue.async {
guard let captureStartTimestamp = self.captureStartTimestamp, abs(CFAbsoluteTimeGetCurrent() - captureStartTimestamp) > 0.5 else {
return
}
self.isStopping = true
if self.audioInput == nil {
self.finish()
}
}
}
private func finish() {
guard let assetWriter = self.assetWriter else {
return
}
self.queue.async {
self.isRecording = false
self.isStopping = false
assetWriter.finishWriting {
self.finishedWriting = true
if case .completed = assetWriter.status {
self.completion(.success)
} else {
self.completion(.finishError(.generic))
}
}
}
}
func appendVideo(sampleBuffer: CMSampleBuffer) {
self.queue.async {
guard let assetWriter = self.assetWriter, let videoInput = self.videoInput, (self.isRecording || self.isStopping) && !self.finishedWriting else {
return
}
let timestamp = sampleBuffer.presentationTimestamp
switch assetWriter.status {
case .unknown:
break
case .writing:
if self.firstVideoTimestamp == nil {
self.firstVideoTimestamp = timestamp
assetWriter.startSession(atSourceTime: timestamp)
}
while !videoInput.isReadyForMoreMediaData {
RunLoop.current.run(until: Date(timeIntervalSinceNow: 0.1))
}
if videoInput.append(sampleBuffer) {
self.lastVideoTimestamp = timestamp
}
if self.audioInput != nil && self.isStopping, let lastVideoTimestamp = self.lastAudioTimestamp, let lastAudioTimestamp = self.lastAudioTimestamp, lastVideoTimestamp >= lastAudioTimestamp {
self.finish()
}
case .failed:
self.isRecording = false
self.completion(.writeError(.generic))
default:
break
}
}
}
func appendAudio(sampleBuffer: CMSampleBuffer) {
self.queue.async {
guard let _ = self.assetWriter, let audioInput = self.audioInput, !self.isStopping && !self.finishedWriting else {
return
}
let timestamp = sampleBuffer.presentationTimestamp
if let _ = self.firstVideoTimestamp {
if !self.pendingAudioBuffers.isEmpty {
for buffer in self.pendingAudioBuffers {
audioInput.append(buffer)
}
self.pendingAudioBuffers.removeAll()
}
while !audioInput.isReadyForMoreMediaData {
RunLoop.current.run(until: Date(timeIntervalSinceNow: 0.1))
}
if audioInput.append(sampleBuffer) {
self.lastAudioTimestamp = timestamp
}
} else {
self.pendingAudioBuffers.append(sampleBuffer)
}
}
}
var duration: Double? {
guard let firstTimestamp = self.firstVideoTimestamp, let lastTimestamp = self.lastVideoTimestamp else {
return nil
}
return (lastTimestamp - firstTimestamp).seconds
}
}

View File

@ -1,11 +0,0 @@
#import <UIKit/UIKit.h>
//! Project version number for Camera.
FOUNDATION_EXPORT double CameraVersionNumber;
//! Project version string for Camera.
FOUNDATION_EXPORT const unsigned char CameraVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <Camera/PublicHeader.h>

View File

@ -1,21 +0,0 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Display
import SwiftSignalKit
final class CameraModeNode: ASDisplayNode {
enum Mode {
case photo
case video
case scan
}
override init() {
super.init()
}
func update(mode: Mode, transition: ContainedViewLayoutTransition) {
}
}

View File

@ -1,227 +0,0 @@
import Foundation
import UIKit
import AsyncDisplayKit
import Display
private final class ZoomWheelNodeDrawingState: NSObject {
let transition: CGFloat
let reverse: Bool
init(transition: CGFloat, reverse: Bool) {
self.transition = transition
self.reverse = reverse
super.init()
}
}
final class ZoomWheelNode: ASDisplayNode {
class State: Equatable {
let active: Bool
init(active: Bool) {
self.active = active
}
static func ==(lhs: State, rhs: State) -> Bool {
if lhs.active != rhs.active {
return false
}
return true
}
}
private class TransitionContext {
let startTime: Double
let duration: Double
let previousState: State
init(startTime: Double, duration: Double, previousState: State) {
self.startTime = startTime
self.duration = duration
self.previousState = previousState
}
}
private var animator: ConstantDisplayLinkAnimator?
private var hasState = false
private var state: State = State(active: false)
private var transitionContext: TransitionContext?
override init() {
super.init()
self.isOpaque = false
}
func update(state: State, animated: Bool) {
var animated = animated
if !self.hasState {
self.hasState = true
animated = false
}
if self.state != state {
let previousState = self.state
self.state = state
if animated {
self.transitionContext = TransitionContext(startTime: CACurrentMediaTime(), duration: 0.18, previousState: previousState)
}
self.updateAnimations()
self.setNeedsDisplay()
}
}
private func updateAnimations() {
var animate = false
let timestamp = CACurrentMediaTime()
if let transitionContext = self.transitionContext {
if transitionContext.startTime + transitionContext.duration < timestamp {
self.transitionContext = nil
} else {
animate = true
}
}
if animate {
let animator: ConstantDisplayLinkAnimator
if let current = self.animator {
animator = current
} else {
animator = ConstantDisplayLinkAnimator(update: { [weak self] in
self?.updateAnimations()
})
self.animator = animator
}
animator.isPaused = false
} else {
self.animator?.isPaused = true
}
self.setNeedsDisplay()
}
override public func drawParameters(forAsyncLayer layer: _ASDisplayLayer) -> NSObjectProtocol? {
var transitionFraction: CGFloat = self.state.active ? 1.0 : 0.0
var reverse = false
if let transitionContext = self.transitionContext {
let timestamp = CACurrentMediaTime()
var t = CGFloat((timestamp - transitionContext.startTime) / transitionContext.duration)
t = min(1.0, max(0.0, t))
if transitionContext.previousState.active != self.state.active {
transitionFraction = self.state.active ? t : 1.0 - t
reverse = transitionContext.previousState.active
}
}
return ZoomWheelNodeDrawingState(transition: transitionFraction, reverse: reverse)
}
@objc override public class func draw(_ bounds: CGRect, withParameters parameters: Any?, isCancelled: () -> Bool, isRasterizing: Bool) {
let context = UIGraphicsGetCurrentContext()!
if !isRasterizing {
context.setBlendMode(.copy)
context.setFillColor(UIColor.clear.cgColor)
context.fill(bounds)
}
guard let parameters = parameters as? ZoomWheelNodeDrawingState else {
return
}
let color = UIColor(rgb: 0xffffff)
context.setFillColor(color.cgColor)
let clearLineWidth: CGFloat = 4.0
let lineWidth: CGFloat = 1.0 + UIScreenPixel
context.scaleBy(x: 2.5, y: 2.5)
context.translateBy(x: 4.0, y: 3.0)
let _ = try? drawSvgPath(context, path: "M14,8.335 C14.36727,8.335 14.665,8.632731 14.665,9 C14.665,11.903515 12.48064,14.296846 9.665603,14.626311 L9.665,16 C9.665,16.367269 9.367269,16.665 9,16.665 C8.666119,16.665 8.389708,16.418942 8.34221,16.098269 L8.335,16 L8.3354,14.626428 C5.519879,14.297415 3.335,11.90386 3.335,9 C3.335,8.632731 3.632731,8.335 4,8.335 C4.367269,8.335 4.665,8.632731 4.665,9 C4.665,11.394154 6.605846,13.335 9,13.335 C11.39415,13.335 13.335,11.394154 13.335,9 C13.335,8.632731 13.63273,8.335 14,8.335 Z ")
let _ = try? drawSvgPath(context, path: "M9,2.5 C10.38071,2.5 11.5,3.61929 11.5,5 L11.5,9 C11.5,10.380712 10.38071,11.5 9,11.5 C7.619288,11.5 6.5,10.380712 6.5,9 L6.5,5 C6.5,3.61929 7.619288,2.5 9,2.5 Z ")
context.translateBy(x: -4.0, y: -3.0)
if parameters.transition > 0.0 {
let startPoint: CGPoint
let endPoint: CGPoint
let origin = CGPoint(x: 9.0, y: 10.0 - UIScreenPixel)
let length: CGFloat = 17.0
if parameters.reverse {
startPoint = CGPoint(x: origin.x + length * (1.0 - parameters.transition), y: origin.y + length * (1.0 - parameters.transition))
endPoint = CGPoint(x: origin.x + length, y: origin.y + length)
} else {
startPoint = origin
endPoint = CGPoint(x: origin.x + length * parameters.transition, y: origin.y + length * parameters.transition)
}
context.setBlendMode(.clear)
context.setLineWidth(clearLineWidth)
context.move(to: startPoint)
context.addLine(to: endPoint)
context.strokePath()
context.setBlendMode(.normal)
context.setStrokeColor(color.cgColor)
context.setLineWidth(lineWidth)
context.setLineCap(.round)
context.setLineJoin(.round)
context.move(to: startPoint)
context.addLine(to: endPoint)
context.strokePath()
}
}
}
private class ButtonNode: HighlightTrackingButtonNode {
private let backgroundNode: ASDisplayNode
private let textNode: ImmediateTextNode
init() {
self.backgroundNode = ASDisplayNode()
self.textNode = ImmediateTextNode()
super.init()
self.addSubnode(self.backgroundNode)
self.addSubnode(self.textNode)
self.highligthedChanged = { [weak self] highlight in
if let strongSelf = self {
}
}
}
func update() {
}
}
final class CameraZoomNode: ASDisplayNode {
private let wheelNode: ZoomWheelNode
private let backgroundNode: ASDisplayNode
override init() {
self.wheelNode = ZoomWheelNode()
self.backgroundNode = ASDisplayNode()
super.init()
self.addSubnode(self.wheelNode)
}
}

View File

@ -628,7 +628,7 @@ private final class ChatListMediaPreviewNode: ASDisplayNode {
}
}
private let loginCodeRegex = try? NSRegularExpression(pattern: "[0-9]{5,6}", options: [])
private let loginCodeRegex = try? NSRegularExpression(pattern: "[\\d\\-]{5,7}", options: [])
class ChatListItemNode: ItemListRevealOptionsItemNode {
final class TopicItemNode: ASDisplayNode {

View File

@ -9,20 +9,24 @@ public final class Button: Component {
public let isEnabled: Bool
public let action: () -> Void
public let holdAction: (() -> Void)?
public let highlightedAction: ActionSlot<Bool>?
convenience public init(
content: AnyComponent<Empty>,
isEnabled: Bool = true,
action: @escaping () -> Void
automaticHighlight: Bool = true,
action: @escaping () -> Void,
highlightedAction: ActionSlot<Bool>? = nil
) {
self.init(
content: content,
minSize: nil,
tag: nil,
automaticHighlight: true,
automaticHighlight: automaticHighlight,
isEnabled: isEnabled,
action: action,
holdAction: nil
holdAction: nil,
highlightedAction: highlightedAction
)
}
@ -33,7 +37,8 @@ public final class Button: Component {
automaticHighlight: Bool = true,
isEnabled: Bool = true,
action: @escaping () -> Void,
holdAction: (() -> Void)?
holdAction: (() -> Void)?,
highlightedAction: ActionSlot<Bool>?
) {
self.content = content
self.minSize = minSize
@ -42,6 +47,7 @@ public final class Button: Component {
self.isEnabled = isEnabled
self.action = action
self.holdAction = holdAction
self.highlightedAction = highlightedAction
}
public func minSize(_ minSize: CGSize?) -> Button {
@ -52,7 +58,8 @@ public final class Button: Component {
automaticHighlight: self.automaticHighlight,
isEnabled: self.isEnabled,
action: self.action,
holdAction: self.holdAction
holdAction: self.holdAction,
highlightedAction: self.highlightedAction
)
}
@ -64,7 +71,8 @@ public final class Button: Component {
automaticHighlight: self.automaticHighlight,
isEnabled: self.isEnabled,
action: self.action,
holdAction: holdAction
holdAction: holdAction,
highlightedAction: self.highlightedAction
)
}
@ -76,7 +84,8 @@ public final class Button: Component {
automaticHighlight: self.automaticHighlight,
isEnabled: self.isEnabled,
action: self.action,
holdAction: self.holdAction
holdAction: self.holdAction,
highlightedAction: self.highlightedAction
)
}
@ -105,11 +114,14 @@ public final class Button: Component {
private var component: Button?
private var currentIsHighlighted: Bool = false {
didSet {
guard let component = self.component, component.automaticHighlight else {
guard let component = self.component else {
return
}
if self.currentIsHighlighted != oldValue {
self.updateAlpha(transition: .immediate)
if component.automaticHighlight {
self.updateAlpha(transition: .immediate)
}
component.highlightedAction?.invoke(self.currentIsHighlighted)
}
}
}

View File

@ -4,13 +4,16 @@ import UIKit
public final class Image: Component {
public let image: UIImage?
public let tintColor: UIColor?
public let size: CGSize?
public init(
image: UIImage?,
tintColor: UIColor? = nil
tintColor: UIColor? = nil,
size: CGSize? = nil
) {
self.image = image
self.tintColor = tintColor
self.size = size
}
public static func ==(lhs: Image, rhs: Image) -> Bool {
@ -20,6 +23,9 @@ public final class Image: Component {
if lhs.tintColor != rhs.tintColor {
return false
}
if lhs.size != rhs.size {
return false
}
return true
}
@ -36,7 +42,7 @@ public final class Image: Component {
self.image = component.image
self.tintColor = component.tintColor
return availableSize
return component.size ?? availableSize
}
}

View File

@ -37,81 +37,8 @@ public final class BlurredBackgroundComponent: Component {
private var vibrancyEffectView: UIVisualEffectView?
public func update(component: BlurredBackgroundComponent, availableSize: CGSize, transition: Transition) -> CGSize {
/*if self.tintContainerView !== component.tintContainerView {
if let tintContainerView = self.tintContainerView {
self.tintContainerView = nil
if tintContainerView.superview === self {
tintContainerView.removeFromSuperview()
}
}
self.tintContainerView = component.tintContainerView
if let tintContainerView = self.tintContainerView {
let vibrancyEffectView: UIVisualEffectView
if let current = self.vibrancyEffectView {
vibrancyEffectView = current
} else {
let blurEffect = UIBlurEffect(style: .extraLight)
let vibrancyEffect = UIVibrancyEffect(blurEffect: blurEffect)
vibrancyEffectView = UIVisualEffectView(effect: vibrancyEffect)
self.vibrancyEffectView = vibrancyEffectView
self.addSubview(vibrancyEffectView)
}
tintContainerView.backgroundColor = .white
vibrancyEffectView.contentView.addSubview(tintContainerView)
} else {
if let vibrancyEffectView = self.vibrancyEffectView {
self.vibrancyEffectView = nil
vibrancyEffectView.removeFromSuperview()
}
}
}*/
self.updateColor(color: component.color, transition: transition.containedViewLayoutTransition)
/*if let _ = self.viewWithTag(123) {
} else {
let blurEffect = UIBlurEffect(style: .extraLight)
/*let segmentedControl = UISegmentedControl(items: ["First Item", "Second Item"])
segmentedControl.sizeToFit()
segmentedControl.center = CGPoint(x: 250.0, y: 250.0)*/
let testView = UIView(frame: CGRect(origin: CGPoint(x: 50.0, y: 100.0), size: CGSize(width: 250.0, height: 50.0)))
testView.backgroundColor = .white
let testView2 = UILabel()
testView2.text = "Test 13245"
testView2.font = Font.semibold(17.0)
testView2.textColor = .black
testView2.sizeToFit()
testView2.center = CGPoint(x: 250.0 - testView.frame.minX, y: 490.0 - testView.frame.minY)
let vibrancyEffect = UIVibrancyEffect(blurEffect: blurEffect)
let vibrancyEffectView = UIVisualEffectView(effect: vibrancyEffect)
//vibrancyEffectView.frame = CGRect(origin: CGPoint(), size: CGSize(width: 400.0, height: 300.0))
vibrancyEffectView.tag = 123
vibrancyEffectView.contentView.addSubview(testView)
testView.addSubview(testView2)
//vibrancyEffectView.contentView.addSubview(testView2)
self.addSubview(vibrancyEffectView)
/*let view = UIView()
view.tag = 123
view.layer.compositingFilter = "sourceOverCompositing"
view.backgroundColor = .white
view.frame = CGRect(origin: CGPoint(), size: CGSize(width: 100.0, height: 200.0))
self.addSubview(view)*/
}
if let view = self.viewWithTag(123) {
view.frame = CGRect(origin: CGPoint(), size: availableSize)
}*/
self.update(size: availableSize, cornerRadius: component.cornerRadius, transition: transition.containedViewLayoutTransition)
if let tintContainerView = self.tintContainerView {

View File

@ -13,6 +13,8 @@ public protocol TabBarController: ViewController {
var controllers: [ViewController] { get }
var selectedIndex: Int { get set }
var cameraItem: UITabBarItem? { get set }
func setControllers(_ controllers: [ViewController], selectedIndex: Int?)
func updateBackgroundAlpha(_ alpha: CGFloat, transition: ContainedViewLayoutTransition)

View File

@ -98,6 +98,48 @@ public func legacyWallpaperEditor(context: AccountContext, item: TGMediaEditable
})
}
public func legacyFullMediaEditor(context: AccountContext, item: TGMediaEditableItem & TGMediaSelectableItem, getCaptionPanelView: @escaping () -> TGCaptionPanelView?, sendMessagesWithSignals: @escaping ([Any]?, Bool, Int32) -> Void, present: @escaping (ViewController, Any?) -> Void) {
let paintStickersContext = LegacyPaintStickersContext(context: context)
paintStickersContext.captionPanelView = {
return getCaptionPanelView()
}
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let legacyController = LegacyController(presentation: .custom, theme: presentationData.theme, initialLayout: nil)
legacyController.blocksBackgroundWhenInOverlay = true
legacyController.acceptsFocusWhenInOverlay = true
legacyController.statusBar.statusBarStyle = .Ignore
legacyController.controllerLoaded = { [weak legacyController] in
legacyController?.view.disablesInteractiveTransitionGestureRecognizer = true
}
let emptyController = LegacyEmptyController(context: legacyController.context)!
emptyController.navigationBarShouldBeHidden = true
let navigationController = makeLegacyNavigationController(rootController: emptyController)
navigationController.setNavigationBarHidden(true, animated: false)
legacyController.bind(controller: navigationController)
legacyController.enableSizeClassSignal = true
present(legacyController, nil)
TGPhotoVideoEditor.present(with: legacyController.context, controller: emptyController, caption: NSAttributedString(), withItem: item, paint: false, adjustments: false, recipientName: "Story", stickersContext: paintStickersContext, from: .zero, mainSnapshot: nil, snapshots: [] as [Any], immediate: true, appeared: {
}, completion: { result, editingContext in
let nativeGenerator = legacyAssetPickerItemGenerator()
var selectableResult: TGMediaSelectableItem?
if let result = result {
selectableResult = unsafeDowncast(result, to: TGMediaSelectableItem.self)
}
let signals = TGCameraController.resultSignals(for: nil, editingContext: editingContext, currentItem: selectableResult, storeAssets: false, saveEditedPhotos: false, descriptionGenerator: { _1, _2, _3 in
nativeGenerator(_1, _2, _3, nil)
})
sendMessagesWithSignals(signals, false, 0)
}, dismissed: { [weak legacyController] in
legacyController?.dismiss()
})
}
public func legacyMediaEditor(context: AccountContext, peer: Peer, threadTitle: String?, media: AnyMediaReference, mode: LegacyMediaEditorMode, initialCaption: NSAttributedString, snapshots: [UIView], transitionCompletion: (() -> Void)?, getCaptionPanelView: @escaping () -> TGCaptionPanelView?, sendMessagesWithSignals: @escaping ([Any]?, Bool, Int32) -> Void, present: @escaping (ViewController, Any?) -> Void) {
let _ = (fetchMediaData(context: context, postbox: context.account.postbox, userLocation: .other, mediaReference: media)
|> deliverOnMainQueue).start(next: { (value, isImage) in

View File

@ -41,7 +41,8 @@ swift_library(
"//submodules/SparseItemGrid:SparseItemGrid",
"//submodules/UndoUI:UndoUI",
"//submodules/MoreButtonNode:MoreButtonNode",
"//submodules/InvisibleInkDustNode:InvisibleInkDustNode",
"//submodules/InvisibleInkDustNode:InvisibleInkDustNode",
"//submodules/TelegramUI/Components/CameraScreen",
],
visibility = [
"//visibility:public",

View File

@ -13,6 +13,7 @@ import Photos
import LegacyComponents
import AttachmentUI
import ItemListUI
import CameraScreen
private enum MediaGroupsEntry: Comparable, Identifiable {
enum StableId: Hashable {

View File

@ -21,6 +21,7 @@ import SparseItemGrid
import UndoUI
import PresentationDataUtils
import MoreButtonNode
import CameraScreen
final class MediaPickerInteraction {
let openMedia: (PHFetchResult<PHAsset>, Int, UIImage?) -> Void
@ -131,6 +132,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
public enum AssetsMode: Equatable {
case `default`
case wallpaper
case story
}
case assets(PHAssetCollection?, AssetsMode)
@ -392,7 +394,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
self.gridNode.scrollView.alwaysBounceVertical = true
self.gridNode.scrollView.showsVerticalScrollIndicator = false
if case let .assets(_, mode) = controller.subject, case .wallpaper = mode {
if case let .assets(_, mode) = controller.subject, [.wallpaper, .story].contains(mode) {
} else {
let selectionGesture = MediaPickerGridSelectionGesture<TGMediaSelectableItem>()
@ -608,6 +610,16 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
updateLayout = true
}
#if DEBUG
if case let .assets(collection, _) = controller.subject, collection?.localizedTitle == "BulkTest" {
for i in 0 ..< totalCount {
let backingAsset = fetchResult.object(at: i)
let asset = TGMediaAsset(phAsset: backingAsset)
controller.interaction?.selectionState?.setItem(asset, selected: true)
}
}
#endif
if case .notDetermined = cameraAccess, !self.requestedCameraAccess {
self.requestedCameraAccess = true
self.mediaAssetsContext.requestCameraAccess()
@ -1292,7 +1304,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
self.titleView.title = collection.localizedTitle ?? presentationData.strings.Attachment_Gallery
} else {
switch mode {
case .default:
case .default, .story:
self.titleView.title = presentationData.strings.Attachment_Gallery
case .wallpaper:
self.titleView.title = presentationData.strings.Conversation_Theme_ChooseWallpaperTitle
@ -2032,7 +2044,7 @@ public func wallpaperMediaPickerController(
controller.animateAppearance = animateAppearance
controller.requestController = { [weak controller] _, present in
let presentationData = context.sharedContext.currentPresentationData.with { $0 }
let mediaPickerController = MediaPickerScreen(context: context, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper), mainButtonState: AttachmentMainButtonState(text: presentationData.strings.Conversation_Theme_SetColorWallpaper, font: .regular, background: .color(.clear), textColor: presentationData.theme.actionSheet.controlAccentColor, isVisible: true, progress: .none, isEnabled: true), mainButtonAction: {
let mediaPickerController = MediaPickerScreen(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .wallpaper), mainButtonState: AttachmentMainButtonState(text: presentationData.strings.Conversation_Theme_SetColorWallpaper, font: .regular, background: .color(.clear), textColor: presentationData.theme.actionSheet.controlAccentColor, isVisible: true, progress: .none, isEnabled: true), mainButtonAction: {
controller?.dismiss(animated: true)
openColors()
})
@ -2042,3 +2054,22 @@ public func wallpaperMediaPickerController(
controller.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait)
return controller
}
public func storyMediaPickerController(
context: AccountContext,
completion: @escaping (PHAsset) -> Void = { _ in }
) -> ViewController {
let presentationData = context.sharedContext.currentPresentationData.with({ $0 }).withUpdated(theme: defaultDarkColorPresentationTheme)
let updatedPresentationData: (PresentationData, Signal<PresentationData, NoError>) = (presentationData, .single(presentationData))
let controller = AttachmentController(context: context, updatedPresentationData: updatedPresentationData, chatLocation: nil, buttons: [.standalone], initialButton: .standalone, fromMenu: false, hasTextInput: false, makeEntityInputView: {
return nil
})
controller.requestController = { _, present in
let mediaPickerController = MediaPickerScreen(context: context, updatedPresentationData: updatedPresentationData, peer: nil, threadTitle: nil, chatLocation: nil, bannedSendPhotos: nil, bannedSendVideos: nil, subject: .assets(nil, .story), mainButtonState: nil, mainButtonAction: nil)
mediaPickerController.customSelection = completion
present(mediaPickerController, mediaPickerController.mediaPickerContext)
}
controller.navigationPresentation = .flatModal
controller.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait)
return controller
}

View File

@ -189,6 +189,5 @@ public final class MatrixView: MTKView, MTKViewDelegate, PhoneDemoDecorationView
commandBuffer.present(drawable)
commandBuffer.commit()
}
}

View File

@ -130,6 +130,8 @@ open class TabBarControllerImpl: ViewController, TabBarController {
private var navigationBarPresentationData: NavigationBarPresentationData
private var theme: TabBarControllerTheme
public var middleItemAction: () -> Void = {}
public init(navigationBarPresentationData: NavigationBarPresentationData, theme: TabBarControllerTheme) {
self.navigationBarPresentationData = navigationBarPresentationData
self.theme = theme
@ -199,11 +201,20 @@ open class TabBarControllerImpl: ViewController, TabBarController {
override open func loadDisplayNode() {
self.displayNode = TabBarControllerNode(theme: self.theme, navigationBarPresentationData: self.navigationBarPresentationData, itemSelected: { [weak self] index, longTap, itemNodes in
if let strongSelf = self {
var index = index
if strongSelf.tabBarControllerNode.tabBarNode.tabBarItems.count == 5 {
if index == 2 {
strongSelf.middleItemAction()
return
} else if index > 2 {
index -= 1
}
}
if longTap, let controller = strongSelf.controllers[index] as? TabBarContainedController {
controller.presentTabBarPreviewingController(sourceNodes: itemNodes)
return
}
let timestamp = CACurrentMediaTime()
if strongSelf.debugTapCounter.0 < timestamp - 0.4 {
strongSelf.debugTapCounter.0 = timestamp
@ -297,7 +308,15 @@ open class TabBarControllerImpl: ViewController, TabBarController {
return
}
self.tabBarControllerNode.tabBarNode.selectedIndex = self.selectedIndex
if self.tabBarControllerNode.tabBarNode.tabBarItems.count == 5 {
var selectedIndex = self.selectedIndex
if selectedIndex >= 2 {
selectedIndex += 1
}
self.tabBarControllerNode.tabBarNode.selectedIndex = selectedIndex
} else {
self.tabBarControllerNode.tabBarNode.selectedIndex = self.selectedIndex
}
if let currentController = self.currentController {
currentController.willMove(toParent: nil)
@ -394,6 +413,8 @@ open class TabBarControllerImpl: ViewController, TabBarController {
}
}
public var cameraItem: UITabBarItem?
public func setControllers(_ controllers: [ViewController], selectedIndex: Int?) {
var updatedSelectedIndex: Int? = selectedIndex
if updatedSelectedIndex == nil, let selectedIndex = self._selectedIndex, selectedIndex < self.controllers.count {
@ -404,7 +425,13 @@ open class TabBarControllerImpl: ViewController, TabBarController {
}
}
self.controllers = controllers
self.tabBarControllerNode.tabBarNode.tabBarItems = self.controllers.map({ TabBarNodeItem(item: $0.tabBarItem, contextActionType: $0.tabBarItemContextActionType) })
var tabBarItems = self.controllers.map({ TabBarNodeItem(item: $0.tabBarItem, contextActionType: $0.tabBarItemContextActionType) })
if tabBarItems.count == 4, let cameraItem = self.cameraItem {
tabBarItems.insert(TabBarNodeItem(item: cameraItem, contextActionType: .none), at: 2)
}
self.tabBarControllerNode.tabBarNode.tabBarItems = tabBarItems
let signals = combineLatest(self.controllers.map({ $0.tabBarItem }).map { tabBarItem -> Signal<Bool, NoError> in
if let tabBarItem = tabBarItem, tabBarItem.image == nil {

View File

@ -316,7 +316,7 @@ final class TabBarNodeItem {
}
}
class TabBarNode: ASDisplayNode {
class TabBarNode: ASDisplayNode, UIGestureRecognizerDelegate {
var tabBarItems: [TabBarNodeItem] = [] {
didSet {
self.reloadTabBarItems()
@ -378,6 +378,8 @@ class TabBarNode: ASDisplayNode {
self.isOpaque = false
self.backgroundColor = nil
self.isExclusiveTouch = true
self.addSubnode(self.backgroundNode)
self.addSubnode(self.separatorNode)
@ -387,6 +389,7 @@ class TabBarNode: ASDisplayNode {
super.didLoad()
let recognizer = TapLongTapOrDoubleTapGestureRecognizer(target: self, action: #selector(self.tapLongTapOrDoubleTapGesture(_:)))
recognizer.delegate = self
recognizer.tapActionAtPoint = { _ in
return .keepWithSingleTap
}
@ -394,6 +397,13 @@ class TabBarNode: ASDisplayNode {
self.view.addGestureRecognizer(recognizer)
}
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
if otherGestureRecognizer is UIPanGestureRecognizer {
return false
}
return true
}
@objc private func tapLongTapOrDoubleTapGesture(_ recognizer: TapLongTapOrDoubleTapGestureRecognizer) {
switch recognizer.state {
case .ended:

View File

@ -360,6 +360,7 @@ swift_library(
"//submodules/TelegramUI/Components/SliderContextItem:SliderContextItem",
"//submodules/TelegramUI/Components/Stories/StoryContainerScreen",
"//submodules/TelegramUI/Components/Stories/StoryContentComponent",
"//submodules/TelegramUI/Components/CameraScreen",
] + select({
"@build_bazel_rules_apple//apple:ios_armv7": [],
"@build_bazel_rules_apple//apple:ios_arm64": appcenter_targets,

View File

@ -0,0 +1,76 @@
load("@build_bazel_rules_swift//swift:swift.bzl", "swift_library")
load(
"@build_bazel_rules_apple//apple:resources.bzl",
"apple_resource_bundle",
"apple_resource_group",
)
load("//build-system/bazel-utils:plist_fragment.bzl",
"plist_fragment",
)
filegroup(
name = "CameraScreenMetalResources",
srcs = glob([
"MetalResources/**/*.*",
]),
visibility = ["//visibility:public"],
)
plist_fragment(
name = "CameraScreenBundleInfoPlist",
extension = "plist",
template =
"""
<key>CFBundleIdentifier</key>
<string>org.telegram.CameraScreen</string>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleName</key>
<string>CameraScreen</string>
"""
)
apple_resource_bundle(
name = "CameraScreenBundle",
infoplists = [
":CameraScreenBundleInfoPlist",
],
resources = [
":CameraScreenMetalResources",
],
)
swift_library(
name = "CameraScreen",
module_name = "CameraScreen",
srcs = glob([
"Sources/**/*.swift",
]),
copts = [
"-warnings-as-errors",
],
data = [
":CameraScreenBundle",
],
deps = [
"//submodules/AsyncDisplayKit",
"//submodules/Display",
"//submodules/TelegramCore",
"//submodules/SSignalKit/SwiftSignalKit",
"//submodules/ComponentFlow",
"//submodules/Components/ViewControllerComponent",
"//submodules/Components/ComponentDisplayAdapters",
"//submodules/TelegramPresentationData",
"//submodules/AccountContext",
"//submodules/AppBundle",
"//submodules/TelegramStringFormatting",
"//submodules/PresentationDataUtils",
"//submodules/LocalMediaResources",
"//submodules/Camera",
"//submodules/Components/MultilineTextComponent",
"//submodules/Components/BlurredBackgroundComponent",
],
visibility = [
"//visibility:public",
],
)

View File

@ -0,0 +1,76 @@
#include <metal_stdlib>
using namespace metal;
typedef struct {
packed_float2 position;
} Vertex;
struct RasterizerData
{
float4 position [[position]];
};
vertex RasterizerData cameraBlobVertex
(
constant Vertex *vertexArray[[buffer(0)]],
uint vertexID [[ vertex_id ]]
) {
RasterizerData out;
out.position = vector_float4(vertexArray[vertexID].position[0], vertexArray[vertexID].position[1], 0.0, 1.0);
return out;
}
#define BINDING_DIST .15
#define AA_RADIUS 2.
float smin(float a, float b, float k) {
float h = clamp(0.5 + 0.5 * (a - b) / k, 0.0, 1.0);
return mix(a, b, h) - k * h * (1.0 - h);
}
float sdist_disk(float2 uv, float2 position, float radius) {
return length(uv - position) - radius;
}
float sdist_rect(float2 uv, float2 position, float size, float radius){
float2 q = abs(uv - position) - size + radius;
return length(max(q, 0.0)) + min(max(q.x, q.y), 0.0) - radius;
}
float map(float2 uv, float2 diskPos, float2 rectPos) {
float disk = sdist_disk(uv, diskPos, 0.2);
float rect = sdist_rect(uv, rectPos, 0.15, 0.15);
float metaballs = 1.0;
metaballs = smin(metaballs, disk, BINDING_DIST);
metaballs = smin(metaballs, rect, BINDING_DIST);
return metaballs;
}
float mod(float x, float y) {
return x - y * floor(x / y);
}
fragment half4 cameraBlobFragment(RasterizerData in[[stage_in]],
constant uint2 &resolution[[buffer(0)]],
constant float &time[[buffer(1)]])
{
float finalTime = mod(time * 1.5, 3.0);
float2 R = float2(resolution.x, resolution.y);
float2 uv = (2.0 * in.position.xy - R.xy) / R.y;
float t = AA_RADIUS / resolution.y;
float2 diskPos = float2(0.1, 0.4);
float2 rectPos = float2(0.2 - 0.3 * finalTime, 0.4);
float cAlpha = 0.0;
if (finalTime > 1.5) {
cAlpha = min(1.0, (finalTime - 1.5) * 1.75);
}
float c = smoothstep(t, -t, map(uv, diskPos, rectPos));
return half4(c, cAlpha * c, cAlpha * c, c);
}

View File

@ -0,0 +1,746 @@
import Foundation
import UIKit
import Display
import AsyncDisplayKit
import ComponentFlow
import SwiftSignalKit
import ViewControllerComponent
import ComponentDisplayAdapters
import TelegramPresentationData
import AccountContext
import TelegramCore
import PresentationDataUtils
import Camera
import MultilineTextComponent
import BlurredBackgroundComponent
import Photos
let videoRedColor = UIColor(rgb: 0xff3b30)
enum CameraMode: Equatable {
case photo
case video
}
private struct CameraState {
enum Recording: Equatable {
case none
case holding
case handsFree
}
let mode: CameraMode
let flashMode: Camera.FlashMode
let recording: Recording
let duration: Double
func updatedMode(_ mode: CameraMode) -> CameraState {
return CameraState(mode: mode, flashMode: self.flashMode, recording: self.recording, duration: self.duration)
}
func updatedFlashMode(_ flashMode: Camera.FlashMode) -> CameraState {
return CameraState(mode: self.mode, flashMode: flashMode, recording: self.recording, duration: self.duration)
}
func updatedRecording(_ recording: Recording) -> CameraState {
return CameraState(mode: self.mode, flashMode: self.flashMode, recording: recording, duration: self.duration)
}
func updatedDuration(_ duration: Double) -> CameraState {
return CameraState(mode: self.mode, flashMode: self.flashMode, recording: self.recording, duration: duration)
}
}
enum CameraScreenTransition {
case animateIn
case animateOut
}
private let cancelButtonTag = GenericComponentViewTag()
private let flashButtonTag = GenericComponentViewTag()
private let shutterButtonTag = GenericComponentViewTag()
private let flipButtonTag = GenericComponentViewTag()
private let zoomControlTag = GenericComponentViewTag()
private final class CameraScreenComponent: CombinedComponent {
typealias EnvironmentType = ViewControllerComponentContainer.Environment
let context: AccountContext
let camera: Camera
let present: (ViewController) -> Void
let push: (ViewController) -> Void
let completion: (CameraScreen.Result) -> Void
init(
context: AccountContext,
camera: Camera,
present: @escaping (ViewController) -> Void,
push: @escaping (ViewController) -> Void,
completion: @escaping (CameraScreen.Result) -> Void
) {
self.context = context
self.camera = camera
self.present = present
self.push = push
self.completion = completion
}
static func ==(lhs: CameraScreenComponent, rhs: CameraScreenComponent) -> Bool {
if lhs.context !== rhs.context {
return false
}
return true
}
final class State: ComponentState {
enum ImageKey: Hashable {
case cancel
case flip
case flash
}
private var cachedImages: [ImageKey: UIImage] = [:]
func image(_ key: ImageKey) -> UIImage {
if let image = self.cachedImages[key] {
return image
} else {
var image: UIImage
switch key {
case .cancel:
image = UIImage(bundleImageName: "Camera/CloseIcon")!
case .flip:
image = UIImage(bundleImageName: "Camera/FlipIcon")!
case .flash:
image = UIImage(bundleImageName: "Camera/FlashIcon")!
}
cachedImages[key] = image
return image
}
}
private let context: AccountContext
fileprivate let camera: Camera
private let present: (ViewController) -> Void
private let completion: (CameraScreen.Result) -> Void
private var cameraStateDisposable: Disposable?
private var resultDisposable = MetaDisposable()
private var mediaAssetsContext: MediaAssetsContext
fileprivate var lastGalleryAsset: PHAsset?
private var lastGalleryAssetsDisposable: Disposable?
var cameraState = CameraState(mode: .photo, flashMode: .off, recording: .none, duration: 0.0)
var swipeHint: CaptureControlsComponent.SwipeHint = .none
init(context: AccountContext, camera: Camera, present: @escaping (ViewController) -> Void, completion: @escaping (CameraScreen.Result) -> Void) {
self.context = context
self.camera = camera
self.present = present
self.completion = completion
self.mediaAssetsContext = MediaAssetsContext()
super.init()
self.cameraStateDisposable = (camera.flashMode
|> deliverOnMainQueue).start(next: { [weak self] flashMode in
guard let self else {
return
}
self.cameraState = self.cameraState.updatedFlashMode(flashMode)
self.updated(transition: .easeInOut(duration: 0.2))
})
self.lastGalleryAssetsDisposable = (self.mediaAssetsContext.recentAssets()
|> map { fetchResult in
return fetchResult?.lastObject
}
|> deliverOnMainQueue).start(next: { [weak self] asset in
guard let self else {
return
}
self.lastGalleryAsset = asset
self.updated(transition: .easeInOut(duration: 0.2))
})
}
deinit {
self.cameraStateDisposable?.dispose()
self.lastGalleryAssetsDisposable?.dispose()
self.resultDisposable.dispose()
}
func updateCameraMode(_ mode: CameraMode) {
self.cameraState = self.cameraState.updatedMode(mode)
self.updated(transition: .spring(duration: 0.3))
}
func updateSwipeHint(_ hint: CaptureControlsComponent.SwipeHint) {
self.swipeHint = hint
self.updated(transition: .easeInOut(duration: 0.2))
}
func takePhoto() {
self.resultDisposable.set((self.camera.takePhoto()
|> deliverOnMainQueue).start(next: { [weak self] value in
if let self {
switch value {
case .began:
print("blink")
case let .finished(image):
self.completion(.image(image))
case .failed:
print("failed")
}
}
}))
}
func startVideoRecording(pressing: Bool) {
self.cameraState = self.cameraState.updatedDuration(0.0).updatedRecording(pressing ? .holding : .handsFree)
self.resultDisposable.set((self.camera.startRecording()
|> deliverOnMainQueue).start(next: { [weak self] duration in
if let self {
self.cameraState = self.cameraState.updatedDuration(duration)
self.updated(transition: .immediate)
}
}))
self.updated(transition: .spring(duration: 0.4))
}
func stopVideoRecording() {
self.cameraState = self.cameraState.updatedRecording(.none).updatedDuration(0.0)
self.resultDisposable.set((self.camera.stopRecording()
|> deliverOnMainQueue).start(next: { [weak self] path in
if let self, let path {
self.completion(.video(path))
}
}))
self.updated(transition: .spring(duration: 0.4))
}
func lockVideoRecording() {
self.cameraState = self.cameraState.updatedRecording(.handsFree)
self.updated(transition: .spring(duration: 0.4))
}
}
func makeState() -> State {
return State(context: self.context, camera: self.camera, present: self.present, completion: self.completion)
}
static var body: Body {
let cancelButton = Child(Button.self)
let captureControls = Child(CaptureControlsComponent.self)
let zoomControl = Child(ZoomComponent.self)
let flashButton = Child(Button.self)
let modeControl = Child(ModeComponent.self)
let hintLabel = Child(MultilineTextComponent.self)
let timeBackground = Child(RoundedRectangle.self)
let timeLabel = Child(MultilineTextComponent.self)
return { context in
let environment = context.environment[ViewControllerComponentContainer.Environment.self].value
let component = context.component
let state = context.state
let controller = environment.controller
let availableSize = context.availableSize
let accountContext = component.context
let push = component.push
let completion = component.completion
let topControlInset: CGFloat = 20.0
if case .none = state.cameraState.recording {
let cancelButton = cancelButton.update(
component: Button(
content: AnyComponent(Image(
image: state.image(.cancel),
size: CGSize(width: 40.0, height: 40.0)
)),
action: {
guard let controller = controller() as? CameraScreen else {
return
}
controller.dismiss(animated: true)
}
).tagged(cancelButtonTag),
availableSize: CGSize(width: 40.0, height: 40.0),
transition: .immediate
)
context.add(cancelButton
.position(CGPoint(x: topControlInset + cancelButton.size.width / 2.0, y: environment.safeInsets.top + topControlInset + cancelButton.size.height / 2.0))
.appear(.default(scale: true))
.disappear(.default(scale: true))
.cornerRadius(20.0)
)
let flashButton = flashButton.update(
component: Button(
content: AnyComponent(Image(image: state.image(.flash))),
action: { [weak state] in
guard let state else {
return
}
if state.cameraState.flashMode == .off {
state.camera.setFlashMode(.on)
} else {
state.camera.setFlashMode(.off)
}
}
).tagged(flashButtonTag),
availableSize: CGSize(width: 40.0, height: 40.0),
transition: .immediate
)
context.add(flashButton
.position(CGPoint(x: availableSize.width - topControlInset - flashButton.size.width / 2.0, y: environment.safeInsets.top + topControlInset + flashButton.size.height / 2.0))
.appear(.default(scale: true))
.disappear(.default(scale: true))
.cornerRadius(20.0)
)
}
let zoomControl = zoomControl.update(
component: ZoomComponent(
availableValues: state.camera.metrics.zoomLevels,
value: 1.0,
tag: zoomControlTag
),
availableSize: context.availableSize,
transition: context.transition
)
context.add(zoomControl
.position(CGPoint(x: context.availableSize.width / 2.0, y: availableSize.height - zoomControl.size.height / 2.0 - 187.0 - environment.safeInsets.bottom))
)
let shutterState: ShutterButtonState
switch state.cameraState.recording {
case .handsFree:
shutterState = .stopRecording
case .holding:
shutterState = .holdRecording(progress: min(1.0, Float(state.cameraState.duration / 60.0)))
case .none:
switch state.cameraState.mode {
case .photo:
shutterState = .generic
case .video:
shutterState = .video
}
}
let captureControls = captureControls.update(
component: CaptureControlsComponent(
shutterState: shutterState,
lastGalleryAsset: state.lastGalleryAsset,
tag: shutterButtonTag,
shutterTapped: { [weak state] in
guard let state else {
return
}
if case .none = state.cameraState.recording {
if state.cameraState.mode == .photo {
state.takePhoto()
} else if state.cameraState.mode == .video {
state.startVideoRecording(pressing: false)
}
} else {
state.stopVideoRecording()
}
},
shutterPressed: { [weak state] in
guard let state, case .none = state.cameraState.recording else {
return
}
state.startVideoRecording(pressing: true)
},
shutterReleased: { [weak state] in
guard let state, state.cameraState.recording != .none else {
return
}
state.stopVideoRecording()
},
lockRecording: { [weak state] in
guard let state, state.cameraState.recording != .none else {
return
}
state.lockVideoRecording()
},
flipTapped: { [weak state] in
guard let state else {
return
}
state.camera.togglePosition()
},
galleryTapped: {
let controller = accountContext.sharedContext.makeMediaPickerScreen(context: accountContext, completion: { asset in
completion(.asset(asset))
})
push(controller)
},
swipeHintUpdated: { hint in
state.updateSwipeHint(hint)
}
),
availableSize: context.availableSize,
transition: context.transition
)
context.add(captureControls
.position(CGPoint(x: context.availableSize.width / 2.0, y: context.availableSize.height - captureControls.size.height / 2.0 - 77.0 - environment.safeInsets.bottom))
)
var isVideoRecording = false
if case .video = state.cameraState.mode {
isVideoRecording = true
} else if state.cameraState.recording != .none {
isVideoRecording = true
}
if isVideoRecording {
let duration = Int(state.cameraState.duration)
let durationString = String(format: "%02d:%02d", (duration / 60) % 60, duration % 60)
let timeLabel = timeLabel.update(
component: MultilineTextComponent(
text: .plain(NSAttributedString(string: durationString, font: Font.with(size: 21.0, design: .camera), textColor: .white)),
horizontalAlignment: .center,
textShadowColor: UIColor(rgb: 0x000000, alpha: 0.2)
),
availableSize: context.availableSize,
transition: context.transition
)
if state.cameraState.recording != .none {
let timeBackground = timeBackground.update(
component: RoundedRectangle(color: videoRedColor, cornerRadius: 4.0),
availableSize: CGSize(width: timeLabel.size.width + 8.0, height: 28.0),
transition: context.transition
)
context.add(timeBackground
.position(CGPoint(x: context.availableSize.width / 2.0, y: environment.safeInsets.top + 40.0))
.appear(.default(alpha: true))
.disappear(.default(alpha: true))
)
}
context.add(timeLabel
.position(CGPoint(x: context.availableSize.width / 2.0, y: environment.safeInsets.top + 40.0))
.appear(.default(alpha: true))
.disappear(.default(alpha: true))
)
if case .holding = state.cameraState.recording {
let hintText: String?
switch state.swipeHint {
case .none:
hintText = nil
case .zoom:
hintText = "Swipe up to zoom"
case .lock:
hintText = "Swipe left to lock"
case .releaseLock:
hintText = "Release to lock"
case .flip:
hintText = "Swipe right to flip"
}
if let hintText {
let hintLabel = hintLabel.update(
component: MultilineTextComponent(
text: .plain(NSAttributedString(string: hintText.uppercased(), font: Font.with(size: 14.0, design: .camera, weight: .semibold), textColor: .white)),
horizontalAlignment: .center
),
availableSize: context.availableSize,
transition: .immediate
)
context.add(hintLabel
.position(CGPoint(x: context.availableSize.width / 2.0, y: context.availableSize.height - 35.0 - hintLabel.size.height - environment.safeInsets.bottom))
.appear(.default(alpha: true))
.disappear(.default(alpha: true))
)
}
}
}
if case .none = state.cameraState.recording {
let modeControl = modeControl.update(
component: ModeComponent(
availableModes: [.photo, .video],
currentMode: state.cameraState.mode,
updatedMode: { [weak state] mode in
if let state {
state.updateCameraMode(mode)
}
}
),
availableSize: context.availableSize,
transition: context.transition
)
context.add(modeControl
.position(CGPoint(x: context.availableSize.width / 2.0, y: context.availableSize.height - 7.0 - modeControl.size.height - environment.safeInsets.bottom))
.appear(.default(alpha: true))
.disappear(.default(alpha: true))
)
}
return context.availableSize
}
}
}
public class CameraScreen: ViewController {
public enum Mode {
case generic
case story
case instantVideo
}
public enum Result {
case image(UIImage)
case video(String)
case asset(PHAsset)
}
fileprivate final class Node: ViewControllerTracingNode {
private weak var controller: CameraScreen?
private let context: AccountContext
private let updateState: ActionSlot<CameraState>
fileprivate let componentHost: ComponentView<ViewControllerComponentContainer.Environment>
private let previewContainerView: UIView
fileprivate let previewView: CameraPreviewView
fileprivate let blurView: UIVisualEffectView
fileprivate let camera: Camera
private var presentationData: PresentationData
private let hapticFeedback = HapticFeedback()
private var validLayout: ContainerViewLayout?
private var changingPositionDisposable: Disposable?
init(controller: CameraScreen) {
self.controller = controller
self.context = controller.context
self.updateState = ActionSlot<CameraState>()
self.presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
self.componentHost = ComponentView<ViewControllerComponentContainer.Environment>()
self.previewContainerView = UIView()
self.previewContainerView.clipsToBounds = true
self.previewContainerView.layer.cornerRadius = 12.0
self.blurView = UIVisualEffectView(effect: nil)
self.blurView.isUserInteractionEnabled = false
if let holder = controller.holder {
self.previewView = holder.previewView
self.camera = holder.camera
} else {
self.previewView = CameraPreviewView(test: false)!
self.camera = Camera(configuration: Camera.Configuration(preset: .hd1920x1080, position: .back, audio: true, photo: true, metadata: false))
self.camera.attachPreviewView(self.previewView)
}
self.previewView.clipsToBounds = true
super.init()
self.backgroundColor = .black
self.view.addSubview(self.previewContainerView)
self.previewContainerView.addSubview(self.previewView)
self.previewContainerView.addSubview(self.blurView)
self.changingPositionDisposable = (self.camera.changingPosition
|> deliverOnMainQueue).start(next: { [weak self] value in
if let self {
UIView.animate(withDuration: 0.5) {
if value {
if #available(iOS 13.0, *) {
self.blurView.effect = UIBlurEffect(style: .systemThinMaterialDark)
}
} else {
self.blurView.effect = nil
}
}
}
})
}
deinit {
self.changingPositionDisposable?.dispose()
}
override func didLoad() {
super.didLoad()
self.view.disablesInteractiveModalDismiss = true
self.view.disablesInteractiveKeyboardGestureRecognizer = true
let pinchGestureRecognizer = UIPinchGestureRecognizer(target: self, action: #selector(self.handlePinch(_:)))
self.previewView.addGestureRecognizer(pinchGestureRecognizer)
}
@objc private func handlePinch(_ gestureRecognizer: UIPinchGestureRecognizer) {
switch gestureRecognizer.state {
case .began:
gestureRecognizer.scale = 1.0
case .changed:
let scale = gestureRecognizer.scale
self.camera.setZoomLevel(scale)
default:
break
}
}
func animateIn() {
guard let layout = self.validLayout else {
return
}
// if let view = self.componentHost.findTaggedView(tag: topGradientTag) {
// view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
// }
self.camera.focus(at: CGPoint(x: 0.5, y: 0.5))
self.camera.startCapture()
self.layer.animatePosition(from: CGPoint(x: 0.0, y: layout.size.height), to: .zero, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
}
func animateOut(completion: @escaping () -> Void) {
// if let (layout, orientation) = self.validLayout {
// self.containerLayoutUpdated(layout: layout, orientation: orientation, animateOut: true, transition: .easeInOut(duration: 0.2))
// }
//
// if let view = self.componentHost.findTaggedView(tag: topGradientTag) {
// view.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false)
// }
//self.layer.animatePosition(from: CGPoint(x: 0.0, y: self.frame.height), to: .zero, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring)
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
let result = super.hitTest(point, with: event)
if result == self.componentHost.view {
return self.previewView
}
return result
}
func containerLayoutUpdated(layout: ContainerViewLayout, forceUpdate: Bool = false, animateOut: Bool = false, transition: Transition) {
guard let controller = self.controller else {
return
}
let isFirstTime = self.validLayout == nil
self.validLayout = layout
let topInset: CGFloat = 60.0 //layout.intrinsicInsets.top + layout.safeInsets.top
let environment = ViewControllerComponentContainer.Environment(
statusBarHeight: layout.statusBarHeight ?? 0.0,
navigationHeight: 0.0,
safeInsets: UIEdgeInsets(
top: topInset,
left: layout.safeInsets.left,
bottom: layout.intrinsicInsets.bottom + layout.safeInsets.bottom,
right: layout.safeInsets.right
),
inputHeight: layout.inputHeight ?? 0.0,
metrics: layout.metrics,
deviceMetrics: layout.deviceMetrics,
orientation: nil,
isVisible: true,
theme: self.presentationData.theme,
strings: self.presentationData.strings,
dateTimeFormat: self.presentationData.dateTimeFormat,
controller: { [weak self] in
return self?.controller
}
)
var transition = transition
if isFirstTime {
transition = transition.withUserData(CameraScreenTransition.animateIn)
} else if animateOut {
transition = transition.withUserData(CameraScreenTransition.animateOut)
}
let componentSize = self.componentHost.update(
transition: transition,
component: AnyComponent(
CameraScreenComponent(
context: self.context,
camera: self.camera,
present: { [weak self] c in
self?.controller?.present(c, in: .window(.root))
},
push: { [weak self] c in
self?.controller?.push(c)
},
completion: controller.completion
)
),
environment: {
environment
},
forceUpdate: forceUpdate || animateOut,
containerSize: layout.size
)
if let componentView = self.componentHost.view {
if componentView.superview == nil {
self.view.insertSubview(componentView, at: 1)
componentView.clipsToBounds = true
}
let componentFrame = CGRect(origin: .zero, size: componentSize)
transition.setFrame(view: componentView, frame: CGRect(origin: componentFrame.origin, size: CGSize(width: componentFrame.width, height: componentFrame.height)))
if isFirstTime {
self.animateIn()
}
}
let previewSize = CGSize(width: layout.size.width, height: floorToScreenPixels(layout.size.width * 1.77778))
let previewFrame = CGRect(origin: CGPoint(x: 0.0, y: 60.0), size: previewSize)
transition.setFrame(view: self.previewContainerView, frame: previewFrame)
transition.setFrame(view: self.previewView, frame: CGRect(origin: .zero, size: previewFrame.size))
transition.setFrame(view: self.blurView, frame: CGRect(origin: .zero, size: previewFrame.size))
}
}
fileprivate var node: Node {
return self.displayNode as! Node
}
private let context: AccountContext
fileprivate let mode: Mode
fileprivate let holder: CameraHolder?
fileprivate let completion: (CameraScreen.Result) -> Void
public init(context: AccountContext, mode: Mode, holder: CameraHolder? = nil, completion: @escaping (CameraScreen.Result) -> Void) {
self.context = context
self.mode = mode
self.holder = holder
self.completion = completion
super.init(navigationBarPresentationData: nil)
self.statusBar.statusBarStyle = .White
self.supportedOrientations = ViewControllerSupportedOrientations(regularSize: .all, compactSize: .portrait)
self.navigationPresentation = .flatModal
}
required public init(coder: NSCoder) {
preconditionFailure()
}
override public func loadDisplayNode() {
self.displayNode = Node(controller: self)
super.displayNodeDidLoad()
}
override public func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) {
super.containerLayoutUpdated(layout, transition: transition)
(self.displayNode as! Node).containerLayoutUpdated(layout: layout, transition: Transition(transition))
}
}

View File

@ -0,0 +1,570 @@
import Foundation
import UIKit
import Display
import ComponentFlow
import SwiftSignalKit
import Photos
import LocalMediaResources
enum ShutterButtonState: Equatable {
case generic
case video
case stopRecording
case holdRecording(progress: Float)
}
private let maximumShutterSize = CGSize(width: 96.0, height: 96.0)
private extension SimpleShapeLayer {
func animateStrokeStart(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: Bool = true, completion: ((Bool) -> ())? = nil) {
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "strokeStart", timingFunction: timingFunction, duration: duration, delay: delay, removeOnCompletion: removeOnCompletion, completion: completion)
}
func animateStrokeEnd(from: CGFloat, to: CGFloat, duration: Double, delay: Double = 0.0, timingFunction: String = CAMediaTimingFunctionName.easeInEaseOut.rawValue, removeOnCompletion: Bool = true, completion: ((Bool) -> ())? = nil) {
self.animate(from: NSNumber(value: Float(from)), to: NSNumber(value: Float(to)), keyPath: "strokeEnd", timingFunction: timingFunction, duration: duration, delay: delay, removeOnCompletion: removeOnCompletion, completion: completion)
}
}
private final class ShutterButtonContentComponent: Component {
let shutterState: ShutterButtonState
let highlightedAction: ActionSlot<Bool>
init(
shutterState: ShutterButtonState,
highlightedAction: ActionSlot<Bool>
) {
self.shutterState = shutterState
self.highlightedAction = highlightedAction
}
static func ==(lhs: ShutterButtonContentComponent, rhs: ShutterButtonContentComponent) -> Bool {
if lhs.shutterState != rhs.shutterState {
return false
}
return true
}
final class View: UIView {
private var component: ShutterButtonContentComponent?
private let ringLayer = SimpleShapeLayer()
private let innerLayer = SimpleLayer()
private let progressLayer = SimpleShapeLayer()
init() {
super.init(frame: CGRect())
self.layer.allowsGroupOpacity = true
self.layer.addSublayer(self.ringLayer)
self.layer.addSublayer(self.innerLayer)
self.layer.addSublayer(self.progressLayer)
}
required init?(coder aDecoder: NSCoder) {
preconditionFailure()
}
func updateIsHighlighted(_ isHighlighted: Bool) {
let scale: CGFloat = isHighlighted ? 0.8 : 1.0
let transition = Transition(animation: .curve(duration: 0.3, curve: .easeInOut))
transition.setTransform(layer: self.innerLayer, transform: CATransform3DMakeScale(scale, scale, 1.0))
}
func update(component: ShutterButtonContentComponent, availableSize: CGSize, transition: Transition) -> CGSize {
self.component = component
component.highlightedAction.connect { [weak self] highlighted in
self?.updateIsHighlighted(highlighted)
}
let innerColor: UIColor
let innerSize: CGSize
let innerCornerRadius: CGFloat
let ringSize: CGSize
let ringWidth: CGFloat = 3.0
var recordingProgress: Float?
switch component.shutterState {
case .generic:
innerColor = .white
innerSize = CGSize(width: 60.0, height: 60.0)
innerCornerRadius = innerSize.height / 2.0
ringSize = CGSize(width: 68.0, height: 68.0)
case .video:
innerColor = videoRedColor
innerSize = CGSize(width: 60.0, height: 60.0)
innerCornerRadius = innerSize.height / 2.0
ringSize = CGSize(width: 68.0, height: 68.0)
case .stopRecording:
innerColor = videoRedColor
innerSize = CGSize(width: 26.0, height: 26.0)
innerCornerRadius = 9.0
ringSize = CGSize(width: 68.0, height: 68.0)
case let .holdRecording(progress):
innerColor = videoRedColor
innerSize = CGSize(width: 60.0, height: 60.0)
innerCornerRadius = innerSize.height / 2.0
ringSize = CGSize(width: 92.0, height: 92.0)
recordingProgress = progress
}
self.ringLayer.fillColor = UIColor.clear.cgColor
self.ringLayer.strokeColor = UIColor.white.cgColor
self.ringLayer.lineWidth = ringWidth
let ringPath = CGPath(
ellipseIn: CGRect(
origin: CGPoint(
x: (maximumShutterSize.width - ringSize.width) / 2.0,
y: (maximumShutterSize.height - ringSize.height) / 2.0),
size: ringSize
),
transform: nil
)
transition.setShapeLayerPath(layer: self.ringLayer, path: ringPath)
self.ringLayer.bounds = CGRect(origin: .zero, size: maximumShutterSize)
self.ringLayer.position = CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0)
transition.setBackgroundColor(layer: self.innerLayer, color: innerColor)
transition.setCornerRadius(layer: self.innerLayer, cornerRadius: innerCornerRadius)
transition.setPosition(layer: self.innerLayer, position: CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0))
transition.setBounds(layer: self.innerLayer, bounds: CGRect(origin: .zero, size: innerSize))
self.progressLayer.bounds = CGRect(origin: .zero, size: maximumShutterSize)
self.progressLayer.position = CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0)
transition.setShapeLayerPath(layer: self.progressLayer, path: ringPath)
self.progressLayer.fillColor = UIColor.clear.cgColor
self.progressLayer.strokeColor = videoRedColor.cgColor
self.progressLayer.lineWidth = ringWidth + UIScreenPixel
self.progressLayer.lineCap = .round
self.progressLayer.transform = CATransform3DMakeRotation(-.pi / 2.0, 0.0, 0.0, 1.0)
let previousValue = self.progressLayer.strokeEnd
self.progressLayer.strokeEnd = CGFloat(recordingProgress ?? 0.0)
self.progressLayer.animateStrokeEnd(from: previousValue, to: self.progressLayer.strokeEnd, duration: 0.33)
return maximumShutterSize
}
}
func makeView() -> View {
return View()
}
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}
private final class FlipButtonContentComponent: Component {
private let action: ActionSlot<Void>
init(action: ActionSlot<Void>) {
self.action = action
}
static func ==(lhs: FlipButtonContentComponent, rhs: FlipButtonContentComponent) -> Bool {
return true
}
final class View: UIView {
private var component: FlipButtonContentComponent?
private let icon = SimpleLayer()
init() {
super.init(frame: CGRect())
self.layer.addSublayer(self.icon)
self.icon.contents = UIImage(bundleImageName: "Camera/FlipIcon")?.cgImage
}
required init?(coder aDecoder: NSCoder) {
preconditionFailure()
}
func playAnimation() {
let animation = CASpringAnimation(keyPath: "transform.rotation.z")
animation.fromValue = 0.0 as NSNumber
animation.toValue = CGFloat.pi as NSNumber
animation.mass = 5.0
animation.stiffness = 900.0
animation.damping = 90.0
animation.duration = animation.settlingDuration
if #available(iOS 15.0, *) {
let maxFps = Float(UIScreen.main.maximumFramesPerSecond)
animation.preferredFrameRateRange = CAFrameRateRange(minimum: 30.0, maximum: maxFps, preferred: maxFps)
}
self.icon.add(animation, forKey: "transform.rotation.z")
}
func update(component: FlipButtonContentComponent, availableSize: CGSize, transition: Transition) -> CGSize {
self.component = component
component.action.connect { [weak self] _ in
self?.playAnimation()
}
let size = CGSize(width: 48.0, height: 48.0)
self.icon.position = CGPoint(x: size.width / 2.0, y: size.height / 2.0)
self.icon.bounds = CGRect(origin: .zero, size: size)
return size
}
}
func makeView() -> View {
return View()
}
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}
final class CaptureControlsComponent: Component {
enum SwipeHint {
case none
case zoom
case lock
case releaseLock
case flip
}
let shutterState: ShutterButtonState
let lastGalleryAsset: PHAsset?
let tag: AnyObject?
let shutterTapped: () -> Void
let shutterPressed: () -> Void
let shutterReleased: () -> Void
let lockRecording: () -> Void
let flipTapped: () -> Void
let galleryTapped: () -> Void
let swipeHintUpdated: (SwipeHint) -> Void
init(
shutterState: ShutterButtonState,
lastGalleryAsset: PHAsset?,
tag: AnyObject?,
shutterTapped: @escaping () -> Void,
shutterPressed: @escaping () -> Void,
shutterReleased: @escaping () -> Void,
lockRecording: @escaping () -> Void,
flipTapped: @escaping () -> Void,
galleryTapped: @escaping () -> Void,
swipeHintUpdated: @escaping (SwipeHint) -> Void
) {
self.shutterState = shutterState
self.lastGalleryAsset = lastGalleryAsset
self.tag = tag
self.shutterTapped = shutterTapped
self.shutterPressed = shutterPressed
self.shutterReleased = shutterReleased
self.lockRecording = lockRecording
self.flipTapped = flipTapped
self.galleryTapped = galleryTapped
self.swipeHintUpdated = swipeHintUpdated
}
static func ==(lhs: CaptureControlsComponent, rhs: CaptureControlsComponent) -> Bool {
if lhs.shutterState != rhs.shutterState {
return false
}
if lhs.lastGalleryAsset?.localIdentifier != rhs.lastGalleryAsset?.localIdentifier {
return false
}
return true
}
final class State: ComponentState {
var cachedAssetImage: (String, UIImage)?
private let assetDisposable = MetaDisposable()
var lastGalleryAsset: PHAsset? {
didSet {
if self.cachedAssetImage?.0 != self.lastGalleryAsset?.localIdentifier {
self.cachedAssetImage = nil
if let lastGalleryAsset = self.lastGalleryAsset {
self.assetDisposable.set((fetchPhotoLibraryImage(localIdentifier: lastGalleryAsset.localIdentifier, thumbnail: true)
|> deliverOnMainQueue).start(next: { [weak self] imageAndDegraded in
if let self, let (image, _) = imageAndDegraded {
self.cachedAssetImage = (lastGalleryAsset.localIdentifier, image)
self.updated(transition: .easeInOut(duration: 0.2))
}
}))
}
}
}
}
deinit {
self.assetDisposable.dispose()
}
}
func makeState() -> State {
return State()
}
final class View: UIView, ComponentTaggedView, UIGestureRecognizerDelegate {
private var component: CaptureControlsComponent?
private let lockView = ComponentView<Empty>()
private let galleryButtonView = ComponentView<Empty>()
private let shutterButtonView = ComponentView<Empty>()
private let flipButtonView = ComponentView<Empty>()
private let leftGuide = SimpleLayer()
private let rightGuide = SimpleLayer()
private let shutterHightlightedAction = ActionSlot<Bool>()
private let flipAnimationAction = ActionSlot<Void>()
private let lockImage = UIImage(bundleImageName: "Camera/LockIcon")
public func matches(tag: Any) -> Bool {
if let component = self.component, let componentTag = component.tag {
let tag = tag as AnyObject
if componentTag === tag {
return true
}
}
return false
}
init() {
super.init(frame: CGRect())
self.leftGuide.backgroundColor = UIColor(rgb: 0xffffff, alpha: 0.2).cgColor
self.rightGuide.backgroundColor = UIColor(rgb: 0xffffff, alpha: 0.2).cgColor
self.layer.addSublayer(self.leftGuide)
self.layer.addSublayer(self.rightGuide)
}
required init?(coder aDecoder: NSCoder) {
preconditionFailure()
}
@objc private func handlePress(_ gestureRecognizer: UILongPressGestureRecognizer) {
let location = gestureRecognizer.location(in: self)
switch gestureRecognizer.state {
case .began:
self.component?.shutterPressed()
self.component?.swipeHintUpdated(.zoom)
case .ended, .cancelled:
if location.x < 75.0 {
self.component?.lockRecording()
} else {
self.component?.shutterReleased()
}
default:
break
}
}
@objc private func handlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
let location = gestureRecognizer.location(in: self)
switch gestureRecognizer.state {
case .changed:
if location.x < self.frame.width / 2.0 - 40.0 {
if location.x < 75.0 {
self.component?.swipeHintUpdated(.releaseLock)
} else {
self.component?.swipeHintUpdated(.lock)
}
} else if location.x > self.frame.width / 2.0 + 40.0 {
self.component?.swipeHintUpdated(.flip)
} else {
self.component?.swipeHintUpdated(.zoom)
}
default:
break
}
}
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
return true
}
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
return true
}
func animateIn() {
}
func animateOut() {
}
func update(component: CaptureControlsComponent, state: State, availableSize: CGSize, transition: Transition) -> CGSize {
self.component = component
state.lastGalleryAsset = component.lastGalleryAsset
let size = CGSize(width: availableSize.width, height: maximumShutterSize.height)
let buttonSideInset: CGFloat = 28.0
//let buttonMaxOffset: CGFloat = 100.0
var isRecording = false
var isHolding = false
if case .stopRecording = component.shutterState {
isRecording = true
} else if case .holdRecording = component.shutterState {
isRecording = true
isHolding = true
}
let galleryButtonSize = self.galleryButtonView.update(
transition: .immediate,
component: AnyComponent(
Button(
content: AnyComponent(
Image(
image: state.cachedAssetImage?.1,
size: CGSize(width: 50.0, height: 50.0)
)
),
action: {
component.galleryTapped()
}
)
),
environment: {},
containerSize: CGSize(width: 50.0, height: 50.0)
)
let galleryButtonFrame = CGRect(origin: CGPoint(x: buttonSideInset, y: (size.height - galleryButtonSize.height) / 2.0), size: galleryButtonSize)
if let galleryButtonView = self.galleryButtonView.view {
galleryButtonView.clipsToBounds = true
galleryButtonView.layer.cornerRadius = 10.0
if galleryButtonView.superview == nil {
self.addSubview(galleryButtonView)
}
transition.setBounds(view: galleryButtonView, bounds: CGRect(origin: .zero, size: galleryButtonFrame.size))
transition.setPosition(view: galleryButtonView, position: galleryButtonFrame.center)
transition.setScale(view: galleryButtonView, scale: isRecording ? 0.1 : 1.0)
transition.setAlpha(view: galleryButtonView, alpha: isRecording ? 0.0 : 1.0)
}
let _ = self.lockView.update(
transition: .immediate,
component: AnyComponent(
Image(
image: self.lockImage,
size: CGSize(width: 30.0, height: 30.0)
)
),
environment: {},
containerSize: CGSize(width: 30.0, height: 30.0)
)
let lockFrame = galleryButtonFrame.insetBy(dx: 10.0, dy: 10.0)
if let lockView = self.lockView.view {
if lockView.superview == nil {
self.addSubview(lockView)
}
transition.setBounds(view: lockView, bounds: CGRect(origin: .zero, size: lockFrame.size))
transition.setPosition(view: lockView, position: lockFrame.center)
transition.setScale(view: lockView, scale: isHolding ? 1.0 : 0.1)
transition.setAlpha(view: lockView, alpha: isHolding ? 1.0 : 0.0)
}
let shutterButtonSize = self.shutterButtonView.update(
transition: transition,
component: AnyComponent(
Button(
content: AnyComponent(
ShutterButtonContentComponent(
shutterState: component.shutterState,
highlightedAction: self.shutterHightlightedAction
)
),
automaticHighlight: false,
action: {
component.shutterTapped()
},
highlightedAction: self.shutterHightlightedAction
).minSize(maximumShutterSize)
),
environment: {},
containerSize: availableSize
)
let shutterButtonFrame = CGRect(origin: CGPoint(x: (availableSize.width - shutterButtonSize.width) / 2.0, y: (size.height - shutterButtonSize.height) / 2.0), size: shutterButtonSize)
if let shutterButtonView = self.shutterButtonView.view {
if shutterButtonView.superview == nil {
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.handlePan(_:)))
panGestureRecognizer.delegate = self
shutterButtonView.addGestureRecognizer(panGestureRecognizer)
let pressGestureRecognizer = UILongPressGestureRecognizer(target: self, action: #selector(self.handlePress(_:)))
pressGestureRecognizer.delegate = self
shutterButtonView.addGestureRecognizer(pressGestureRecognizer)
self.addSubview(shutterButtonView)
}
transition.setFrame(view: shutterButtonView, frame: shutterButtonFrame)
}
let flipAnimationAction = self.flipAnimationAction
let flipButtonSize = self.flipButtonView.update(
transition: .immediate,
component: AnyComponent(
Button(
content: AnyComponent(
FlipButtonContentComponent(action: flipAnimationAction)
),
action: {
component.flipTapped()
flipAnimationAction.invoke(Void())
}
).minSize(CGSize(width: 44.0, height: 44.0))
),
environment: {},
containerSize: availableSize
)
let flipButtonFrame = CGRect(origin: CGPoint(x: availableSize.width - flipButtonSize.width - buttonSideInset, y: (size.height - flipButtonSize.height) / 2.0), size: flipButtonSize)
if let flipButtonView = self.flipButtonView.view {
if flipButtonView.superview == nil {
self.addSubview(flipButtonView)
}
transition.setFrame(view: flipButtonView, frame: flipButtonFrame)
}
let guideSpacing: CGFloat = 9.0
let guideSize = CGSize(width: isHolding ? 60.0 : 0.0, height: 1.0 + UIScreenPixel)
let guideAlpha = isHolding ? 1.0 : 0.0
let leftGuideFrame = CGRect(origin: CGPoint(x: shutterButtonFrame.minX - guideSpacing - guideSize.width, y: (size.height - guideSize.height) / 2.0), size: guideSize)
transition.setFrame(layer: self.leftGuide, frame: leftGuideFrame)
transition.setAlpha(layer: self.leftGuide, alpha: guideAlpha)
self.leftGuide.cornerRadius = guideSize.height / 2.0
let rightGuideFrame = CGRect(origin: CGPoint(x: shutterButtonFrame.maxX + guideSpacing, y: (size.height - guideSize.height) / 2.0), size: guideSize)
transition.setFrame(layer: self.rightGuide, frame: rightGuideFrame)
transition.setAlpha(layer: self.rightGuide, alpha: guideAlpha)
self.rightGuide.cornerRadius = guideSize.height / 2.0
if let screenTransition = transition.userData(CameraScreenTransition.self) {
switch screenTransition {
case .animateIn:
self.animateIn()
case .animateOut:
self.animateOut()
}
}
return size
}
}
func makeView() -> View {
return View()
}
func update(view: View, availableSize: CGSize, state: State, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, state: state, availableSize: availableSize, transition: transition)
}
}

View File

@ -4,7 +4,7 @@ import SwiftSignalKit
import Photos
import AVFoundation
class MediaAssetsContext: NSObject, PHPhotoLibraryChangeObserver {
public final class MediaAssetsContext: NSObject, PHPhotoLibraryChangeObserver {
private let assetType: PHAssetMediaType?
private var registeredChangeObserver = false
@ -12,7 +12,7 @@ class MediaAssetsContext: NSObject, PHPhotoLibraryChangeObserver {
private let mediaAccessSink = ValuePipe<PHAuthorizationStatus>()
private let cameraAccessSink = ValuePipe<AVAuthorizationStatus?>()
init(assetType: PHAssetMediaType?) {
public init(assetType: PHAssetMediaType? = nil) {
self.assetType = assetType
super.init()
@ -29,11 +29,11 @@ class MediaAssetsContext: NSObject, PHPhotoLibraryChangeObserver {
}
}
func photoLibraryDidChange(_ changeInstance: PHChange) {
public func photoLibraryDidChange(_ changeInstance: PHChange) {
self.changeSink.putNext(changeInstance)
}
func fetchAssets(_ collection: PHAssetCollection) -> Signal<PHFetchResult<PHAsset>, NoError> {
public func fetchAssets(_ collection: PHAssetCollection) -> Signal<PHFetchResult<PHAsset>, NoError> {
let options = PHFetchOptions()
if let assetType = self.assetType {
options.predicate = NSPredicate(format: "mediaType = %d", assetType.rawValue)
@ -55,7 +55,7 @@ class MediaAssetsContext: NSObject, PHPhotoLibraryChangeObserver {
)
}
func fetchAssetsCollections(_ type: PHAssetCollectionType) -> Signal<PHFetchResult<PHAssetCollection>, NoError> {
public func fetchAssetsCollections(_ type: PHAssetCollectionType) -> Signal<PHFetchResult<PHAssetCollection>, NoError> {
let initialFetchResult = PHAssetCollection.fetchAssetCollections(with: type, subtype: .any, options: nil)
let fetchResult = Atomic<PHFetchResult<PHAssetCollection>>(value: initialFetchResult)
return .single(initialFetchResult)
@ -72,7 +72,7 @@ class MediaAssetsContext: NSObject, PHPhotoLibraryChangeObserver {
)
}
func recentAssets() -> Signal<PHFetchResult<PHAsset>?, NoError> {
public func recentAssets() -> Signal<PHFetchResult<PHAsset>?, NoError> {
let collections = PHAssetCollection.fetchAssetCollections(with: .smartAlbum, subtype: .smartAlbumUserLibrary, options: nil)
if let collection = collections.firstObject {
return fetchAssets(collection)
@ -82,7 +82,7 @@ class MediaAssetsContext: NSObject, PHPhotoLibraryChangeObserver {
}
}
func mediaAccess() -> Signal<PHAuthorizationStatus, NoError> {
public func mediaAccess() -> Signal<PHAuthorizationStatus, NoError> {
let initialStatus: PHAuthorizationStatus
if #available(iOS 14.0, *) {
initialStatus = PHPhotoLibrary.authorizationStatus(for: .readWrite)
@ -95,13 +95,13 @@ class MediaAssetsContext: NSObject, PHPhotoLibraryChangeObserver {
)
}
func requestMediaAccess() -> Void {
public func requestMediaAccess() -> Void {
PHPhotoLibrary.requestAuthorization { [weak self] status in
self?.mediaAccessSink.putNext(status)
}
}
func cameraAccess() -> Signal<AVAuthorizationStatus?, NoError> {
public func cameraAccess() -> Signal<AVAuthorizationStatus?, NoError> {
#if targetEnvironment(simulator)
return .single(.authorized)
#else
@ -116,7 +116,7 @@ class MediaAssetsContext: NSObject, PHPhotoLibraryChangeObserver {
#endif
}
func requestCameraAccess() -> Void {
public func requestCameraAccess() -> Void {
AVCaptureDevice.requestAccess(for: .video, completionHandler: { [weak self] result in
if result {
self?.cameraAccessSink.putNext(.authorized)

View File

@ -0,0 +1,134 @@
import Foundation
import UIKit
import Display
import ComponentFlow
extension CameraMode {
var title: String {
switch self {
case .photo:
return "Photo"
case .video:
return "Video"
}
}
}
final class ModeComponent: Component {
let availableModes: [CameraMode]
let currentMode: CameraMode
let updatedMode: (CameraMode) -> Void
init(
availableModes: [CameraMode],
currentMode: CameraMode,
updatedMode: @escaping (CameraMode) -> Void
) {
self.availableModes = availableModes
self.currentMode = currentMode
self.updatedMode = updatedMode
}
static func ==(lhs: ModeComponent, rhs: ModeComponent) -> Bool {
if lhs.availableModes != rhs.availableModes {
return false
}
if lhs.currentMode != rhs.currentMode {
return false
}
return true
}
final class View: UIView {
private var component: ModeComponent?
final class ItemView: HighlightTrackingButton {
var pressed: () -> Void = {
}
init() {
super.init(frame: .zero)
self.addTarget(self, action: #selector(self.buttonPressed), for: .touchUpInside)
}
required init(coder: NSCoder) {
preconditionFailure()
}
@objc func buttonPressed() {
self.pressed()
}
func update(value: String, selected: Bool) {
self.setAttributedTitle(NSAttributedString(string: value.uppercased(), font: Font.with(size: 14.0, design: .camera, weight: .semibold), textColor: selected ? UIColor(rgb: 0xf8d74a) : .white, paragraphAlignment: .center), for: .normal)
}
}
private var containerView = UIView()
private var itemViews: [ItemView] = []
init() {
super.init(frame: CGRect())
self.layer.allowsGroupOpacity = true
self.addSubview(self.containerView)
}
required init?(coder aDecoder: NSCoder) {
preconditionFailure()
}
func update(component: ModeComponent, availableSize: CGSize, transition: Transition) -> CGSize {
self.component = component
let updatedMode = component.updatedMode
let spacing: CGFloat = 14.0
let buttonSize = CGSize(width: 55.0, height: 44.0)
var i = 0
var itemFrame = CGRect(origin: .zero, size: buttonSize)
var selectedCenter = itemFrame.minX
for mode in component.availableModes {
let itemView: ItemView
if self.itemViews.count == i {
itemView = ItemView()
self.containerView.addSubview(itemView)
self.itemViews.append(itemView)
} else {
itemView = self.itemViews[i]
}
itemView.pressed = {
updatedMode(mode)
}
itemView.update(value: mode.title, selected: mode == component.currentMode)
itemView.bounds = CGRect(origin: .zero, size: itemFrame.size)
itemView.center = CGPoint(x: itemFrame.midX, y: itemFrame.midY)
if mode == component.currentMode {
selectedCenter = itemFrame.midX
}
i += 1
itemFrame = itemFrame.offsetBy(dx: buttonSize.width + spacing, dy: 0.0)
}
let totalSize = CGSize(width: buttonSize.width * CGFloat(component.availableModes.count) + spacing * CGFloat(component.availableModes.count - 1), height: buttonSize.height)
transition.setFrame(view: self.containerView, frame: CGRect(origin: CGPoint(x: availableSize.width / 2.0 - selectedCenter, y: 0.0), size: totalSize))
return CGSize(width: availableSize.width, height: buttonSize.height)
}
}
func makeView() -> View {
return View()
}
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}

View File

@ -0,0 +1,152 @@
import Foundation
import Metal
import MetalKit
final class ShutterBlobView: MTKView, MTKViewDelegate {
public func draw(in view: MTKView) {
}
private let commandQueue: MTLCommandQueue
private let drawPassthroughPipelineState: MTLRenderPipelineState
private var displayLink: CADisplayLink?
private var viewportDimensions = CGSize(width: 1, height: 1)
private var startTimestamp = CACurrentMediaTime()
public init?(test: Bool) {
let mainBundle = Bundle(for: ShutterBlobView.self)
guard let device = MTLCreateSystemDefaultDevice() else {
return nil
}
guard let defaultLibrary = try? device.makeDefaultLibrary(bundle: mainBundle) else {
return nil
}
guard let commandQueue = device.makeCommandQueue() else {
return nil
}
self.commandQueue = commandQueue
guard let loadedVertexProgram = defaultLibrary.makeFunction(name: "cameraBlobVertex") else {
return nil
}
guard let loadedFragmentProgram = defaultLibrary.makeFunction(name: "cameraBlobFragment") else {
return nil
}
let pipelineStateDescriptor = MTLRenderPipelineDescriptor()
pipelineStateDescriptor.vertexFunction = loadedVertexProgram
pipelineStateDescriptor.fragmentFunction = loadedFragmentProgram
pipelineStateDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
pipelineStateDescriptor.colorAttachments[0].isBlendingEnabled = true
pipelineStateDescriptor.colorAttachments[0].rgbBlendOperation = .add
pipelineStateDescriptor.colorAttachments[0].alphaBlendOperation = .add
pipelineStateDescriptor.colorAttachments[0].sourceRGBBlendFactor = .sourceAlpha
pipelineStateDescriptor.colorAttachments[0].sourceAlphaBlendFactor = .sourceAlpha
pipelineStateDescriptor.colorAttachments[0].destinationRGBBlendFactor = .oneMinusSourceAlpha
pipelineStateDescriptor.colorAttachments[0].destinationAlphaBlendFactor = .oneMinusSourceAlpha
self.drawPassthroughPipelineState = try! device.makeRenderPipelineState(descriptor: pipelineStateDescriptor)
super.init(frame: CGRect(), device: device)
self.delegate = self
self.isOpaque = false
self.backgroundColor = .clear
self.framebufferOnly = true
class DisplayLinkProxy: NSObject {
weak var target: ShutterBlobView?
init(target: ShutterBlobView) {
self.target = target
}
@objc func displayLinkEvent() {
self.target?.displayLinkEvent()
}
}
self.displayLink = CADisplayLink(target: DisplayLinkProxy(target: self), selector: #selector(DisplayLinkProxy.displayLinkEvent))
if #available(iOS 15.0, *) {
let maxFps = Float(UIScreen.main.maximumFramesPerSecond)
self.displayLink?.preferredFrameRateRange = CAFrameRateRange(minimum: 60.0, maximum: maxFps, preferred: maxFps)
}
self.displayLink?.add(to: .main, forMode: .common)
self.displayLink?.isPaused = false
self.isPaused = true
}
public func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
self.viewportDimensions = size
}
required public init(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit {
self.displayLink?.invalidate()
}
@objc private func displayLinkEvent() {
self.draw()
}
override public func draw(_ rect: CGRect) {
self.redraw(drawable: self.currentDrawable!)
}
private func redraw(drawable: MTLDrawable) {
guard let commandBuffer = self.commandQueue.makeCommandBuffer() else {
return
}
let renderPassDescriptor = self.currentRenderPassDescriptor!
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0, green: 0, blue: 0, alpha: 0.0)
guard let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else {
return
}
let viewportDimensions = self.viewportDimensions
renderEncoder.setViewport(MTLViewport(originX: 0.0, originY: 0.0, width: viewportDimensions.width, height: viewportDimensions.height, znear: -1.0, zfar: 1.0))
renderEncoder.setRenderPipelineState(self.drawPassthroughPipelineState)
let w = Float(1)
let h = Float(1)
var vertices: [Float] = [
w, -h,
-w, -h,
-w, h,
w, -h,
-w, h,
w, h
]
renderEncoder.setVertexBytes(&vertices, length: 4 * vertices.count, index: 0)
var resolution = simd_uint2(UInt32(viewportDimensions.width), UInt32(viewportDimensions.height))
renderEncoder.setFragmentBytes(&resolution, length: MemoryLayout<simd_uint2>.size * 2, index: 0)
var time = Float(CACurrentMediaTime() - self.startTimestamp) * 0.5
renderEncoder.setFragmentBytes(&time, length: 4, index: 1)
renderEncoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6, instanceCount: 1)
renderEncoder.endEncoding()
commandBuffer.present(drawable)
commandBuffer.commit()
}
}

View File

@ -0,0 +1,181 @@
import Foundation
import UIKit
import Display
import ComponentFlow
final class ZoomComponent: Component {
let availableValues: [Float]
let value: Float
let tag: AnyObject?
init(
availableValues: [Float],
value: Float,
tag: AnyObject?
) {
self.availableValues = availableValues
self.value = value
self.tag = tag
}
static func ==(lhs: ZoomComponent, rhs: ZoomComponent) -> Bool {
if lhs.availableValues != rhs.availableValues {
return false
}
if lhs.value != rhs.value {
return false
}
return true
}
final class View: UIView, UIGestureRecognizerDelegate, ComponentTaggedView {
final class ItemView: HighlightTrackingButton {
init() {
super.init(frame: .zero)
self.backgroundColor = UIColor(rgb: 0x000000, alpha: 0.3)
if #available(iOS 13.0, *) {
self.layer.cornerCurve = .circular
}
self.layer.cornerRadius = 18.5
}
required init(coder: NSCoder) {
preconditionFailure()
}
func update(value: String, selected: Bool) {
self.setAttributedTitle(NSAttributedString(string: value, font: Font.with(size: 13.0, design: .round, weight: selected ? .semibold : .regular), textColor: selected ? UIColor(rgb: 0xf8d74a) : .white, paragraphAlignment: .center), for: .normal)
}
}
private let backgroundView: BlurredBackgroundView
private var itemViews: [ItemView] = []
private var component: ZoomComponent?
public func matches(tag: Any) -> Bool {
if let component = self.component, let componentTag = component.tag {
let tag = tag as AnyObject
if componentTag === tag {
return true
}
}
return false
}
init() {
self.backgroundView = BlurredBackgroundView(color: UIColor(rgb: 0x222222, alpha: 0.3))
self.backgroundView.clipsToBounds = true
self.backgroundView.layer.cornerRadius = 43.0 / 2.0
super.init(frame: CGRect())
self.layer.allowsGroupOpacity = true
self.addSubview(self.backgroundView)
let pressGestureRecognizer = UILongPressGestureRecognizer(target: self, action: #selector(self.handlePress(_:)))
pressGestureRecognizer.minimumPressDuration = 0.01
pressGestureRecognizer.delegate = self
self.addGestureRecognizer(pressGestureRecognizer)
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.handlePan(_:)))
panGestureRecognizer.delegate = self
self.addGestureRecognizer(panGestureRecognizer)
}
required init?(coder aDecoder: NSCoder) {
preconditionFailure()
}
@objc func handlePress(_ gestureRecognizer: UILongPressGestureRecognizer) {
}
@objc func handlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
}
override func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
return true
}
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer) -> Bool {
return true
}
func animateIn() {
self.backgroundView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
}
func animateOut() {
self.backgroundView.alpha = 0.0
self.backgroundView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3)
}
func update(component: ZoomComponent, availableSize: CGSize, transition: Transition) -> CGSize {
self.component = component
let sideInset: CGFloat = 3.0
let spacing: CGFloat = 3.0
let buttonSize = CGSize(width: 37.0, height: 37.0)
let size: CGSize = CGSize(width: buttonSize.width * CGFloat(component.availableValues.count) + spacing * CGFloat(component.availableValues.count - 1) + sideInset * 2.0, height: 43.0)
if let screenTransition = transition.userData(CameraScreenTransition.self) {
switch screenTransition {
case .animateIn:
self.animateIn()
case .animateOut:
self.animateOut()
}
}
var i = 0
var itemFrame = CGRect(origin: CGPoint(x: sideInset, y: 3.0), size: buttonSize)
for value in component.availableValues {
let itemView: ItemView
if self.itemViews.count == i {
itemView = ItemView()
self.addSubview(itemView)
self.itemViews.append(itemView)
} else {
itemView = self.itemViews[i]
}
let text: String
if value > 0.5 {
if value == 1.0 {
text = "1×"
} else {
text = "\(Int(value))"
}
} else {
text = String(format: "%0.1f", value)
}
itemView.update(value: text, selected: value == 1.0)
itemView.bounds = CGRect(origin: .zero, size: itemFrame.size)
itemView.center = CGPoint(x: itemFrame.midX, y: itemFrame.midY)
if value == 1.0 {
itemView.transform = CGAffineTransformIdentity
} else {
itemView.transform = CGAffineTransform(scaleX: 0.7, y: 0.7)
}
i += 1
itemFrame = itemFrame.offsetBy(dx: buttonSize.width + spacing, dy: 0.0)
}
transition.setFrame(view: self.backgroundView, frame: CGRect(origin: .zero, size: size))
self.backgroundView.update(size: size, transition: transition.containedViewLayoutTransition)
return size
}
}
func makeView() -> View {
return View()
}
func update(view: View, availableSize: CGSize, state: EmptyComponentState, environment: Environment<Empty>, transition: Transition) -> CGSize {
return view.update(component: self, availableSize: availableSize, transition: transition)
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "close.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,9 @@
{
"info" : {
"author" : "xcode",
"version" : 1
},
"properties" : {
"provides-namespace" : true
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "flash.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "flip.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "lock_30.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,134 @@
%PDF-1.7
1 0 obj
<< >>
endobj
2 0 obj
<< /Length 3 0 R >>
stream
/DeviceRGB CS
/DeviceRGB cs
q
1.000000 0.000000 -0.000000 1.000000 6.170044 3.700439 cm
1.000000 1.000000 1.000000 scn
3.766776 18.579189 m
3.366859 20.847229 4.881272 23.010035 7.149312 23.409950 c
9.417350 23.809868 11.580155 22.295454 11.980072 20.027416 c
12.240544 18.550203 l
12.320145 18.098772 12.750631 17.797342 13.202063 17.876942 c
13.653495 17.956541 13.954925 18.387028 13.875325 18.838459 c
13.614853 20.315672 l
13.055737 23.486576 10.031959 25.603848 6.861055 25.044731 c
3.690152 24.485617 1.572879 21.461838 2.131995 18.290934 c
2.852337 14.205671 l
2.823063 14.192786 2.793923 14.179490 2.764909 14.165768 c
1.754750 13.687998 0.941502 12.874750 0.463732 11.864591 c
0.210292 11.328737 0.102110 10.749706 0.050434 10.075613 c
-0.000011 9.417589 -0.000006 8.604074 0.000000 7.583378 c
0.000000 7.549500 l
0.000000 7.515623 l
-0.000006 6.494926 -0.000011 5.681410 0.050434 5.023388 c
0.102110 4.349293 0.210292 3.770264 0.463732 3.234409 c
0.941502 2.224251 1.754750 1.411003 2.764909 0.933231 c
3.300763 0.679792 3.879794 0.571609 4.553888 0.519934 c
5.211907 0.469490 6.025415 0.469494 7.046103 0.469501 c
7.080004 0.469501 l
10.579982 0.469501 l
10.613883 0.469501 l
11.634569 0.469494 12.448076 0.469490 13.106094 0.519934 c
13.780188 0.571609 14.359218 0.679792 14.895072 0.933231 c
15.905231 1.411003 16.718481 2.224251 17.196251 3.234409 c
17.449692 3.770264 17.557873 4.349293 17.609549 5.023388 c
17.659992 5.681389 17.659988 6.494875 17.659983 7.515532 c
17.659983 7.515600 l
17.659983 7.549500 l
17.659983 7.583401 l
17.659983 7.583469 l
17.659988 8.604126 17.659992 9.417610 17.609549 10.075613 c
17.557873 10.749706 17.449692 11.328737 17.196251 11.864591 c
16.718481 12.874750 15.905231 13.687998 14.895072 14.165768 c
14.359218 14.419208 13.780187 14.527390 13.106093 14.579066 c
12.448069 14.629511 11.634552 14.629506 10.613853 14.629499 c
10.579977 14.629499 l
7.079999 14.629499 l
7.046122 14.629499 l
6.025425 14.629506 5.211910 14.629511 4.553887 14.579066 c
4.526856 14.576994 4.499979 14.574831 4.473251 14.572572 c
3.766776 18.579189 l
h
3.474651 12.665146 m
3.750787 12.795749 4.102457 12.879588 4.680771 12.923923 c
5.267892 12.968931 6.017776 12.969500 7.079999 12.969500 c
10.579977 12.969500 l
11.642202 12.969500 12.392088 12.968931 12.979209 12.923923 c
13.557524 12.879588 13.909194 12.795749 14.185330 12.665146 c
14.848312 12.351579 15.382060 11.817831 15.695627 11.154849 c
15.826230 10.878713 15.910069 10.527042 15.954404 9.948729 c
15.999413 9.361609 15.999981 8.611725 15.999981 7.549500 c
15.999981 6.487276 15.999413 5.737392 15.954404 5.150272 c
15.910069 4.571957 15.826230 4.220287 15.695627 3.944151 c
15.382060 3.281170 14.848312 2.747421 14.185330 2.433853 c
13.909194 2.303251 13.557524 2.219412 12.979210 2.175077 c
12.392090 2.130070 11.642206 2.129501 10.579982 2.129501 c
7.080004 2.129501 l
6.017778 2.129501 5.267894 2.130070 4.680773 2.175077 c
4.102457 2.219412 3.750787 2.303251 3.474651 2.433853 c
2.811668 2.747421 2.277921 3.281170 1.964354 3.944151 c
1.833751 4.220287 1.749911 4.571957 1.705577 5.150272 c
1.660568 5.737392 1.660000 6.487276 1.660000 7.549500 c
1.660000 8.611725 1.660568 9.361609 1.705577 9.948729 c
1.749911 10.527042 1.833751 10.878713 1.964354 11.154849 c
2.277921 11.817831 2.811668 12.351579 3.474651 12.665146 c
h
f*
n
Q
endstream
endobj
3 0 obj
3325
endobj
4 0 obj
<< /Annots []
/Type /Page
/MediaBox [ 0.000000 0.000000 30.000000 30.000000 ]
/Resources 1 0 R
/Contents 2 0 R
/Parent 5 0 R
>>
endobj
5 0 obj
<< /Kids [ 4 0 R ]
/Count 1
/Type /Pages
>>
endobj
6 0 obj
<< /Pages 5 0 R
/Type /Catalog
>>
endobj
xref
0 7
0000000000 65535 f
0000000010 00000 n
0000000034 00000 n
0000003415 00000 n
0000003438 00000 n
0000003611 00000 n
0000003685 00000 n
trailer
<< /ID [ (some) (id) ]
/Root 6 0 R
/Size 7
>>
startxref
3744
%%EOF

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "locked_30.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,135 @@
%PDF-1.7
1 0 obj
<< >>
endobj
2 0 obj
<< /Length 3 0 R >>
stream
/DeviceRGB CS
/DeviceRGB cs
q
1.000000 0.000000 -0.000000 1.000000 6.169994 4.169989 cm
1.000000 1.000000 1.000000 scn
4.495012 16.830009 m
4.495012 19.224165 6.435858 21.165010 8.830012 21.165010 c
11.224167 21.165010 13.165013 19.224165 13.165013 16.830009 c
13.165013 14.104894 l
13.145454 14.106504 13.125815 14.108065 13.106094 14.109576 c
12.448070 14.160021 11.634553 14.160016 10.613854 14.160009 c
10.579978 14.160009 l
7.079999 14.160009 l
7.046122 14.160009 l
6.025425 14.160016 5.211910 14.160021 4.553886 14.109576 c
4.534181 14.108066 4.514556 14.106507 4.495012 14.104897 c
4.495012 16.830009 l
h
3.165012 13.856937 m
3.165012 16.830009 l
3.165012 19.958704 5.701319 22.495010 8.830012 22.495010 c
11.958705 22.495010 14.495013 19.958704 14.495013 16.830009 c
14.495013 13.856922 l
14.631429 13.811139 14.764532 13.758018 14.895073 13.696278 c
15.905232 13.218508 16.718479 12.405260 17.196249 11.395101 c
17.449690 10.859247 17.557871 10.280216 17.609547 9.606123 c
17.659990 8.948120 17.659986 8.134636 17.659981 7.113982 c
17.659981 7.113911 l
17.659981 7.080009 l
17.659981 7.046110 l
17.659981 7.046039 l
17.659986 6.025385 17.659990 5.211899 17.609547 4.553898 c
17.557871 3.879803 17.449690 3.300774 17.196249 2.764919 c
16.718479 1.754761 15.905232 0.941513 14.895073 0.463741 c
14.359219 0.210302 13.780189 0.102119 13.106095 0.050444 c
12.448077 0.000000 11.634570 0.000004 10.613884 0.000011 c
10.579983 0.000011 l
7.080004 0.000011 l
7.046103 0.000011 l
6.025415 0.000004 5.211907 0.000000 4.553888 0.050444 c
3.879794 0.102119 3.300763 0.210302 2.764909 0.463741 c
1.754750 0.941513 0.941502 1.754761 0.463732 2.764919 c
0.210292 3.300774 0.102110 3.879803 0.050434 4.553898 c
-0.000011 5.211920 -0.000006 6.025436 0.000000 7.046133 c
0.000000 7.080009 l
0.000000 7.113888 l
-0.000006 8.134584 -0.000011 8.948099 0.050434 9.606123 c
0.102110 10.280216 0.210292 10.859247 0.463732 11.395101 c
0.941502 12.405260 1.754750 13.218508 2.764909 13.696278 c
2.895464 13.758025 3.028581 13.811152 3.165012 13.856937 c
h
3.474651 12.195656 m
3.750787 12.326259 4.102457 12.410098 4.680771 12.454432 c
5.267892 12.499441 6.017776 12.500010 7.079999 12.500010 c
10.579978 12.500010 l
11.642203 12.500010 12.392089 12.499441 12.979210 12.454432 c
13.557525 12.410098 13.909195 12.326259 14.185331 12.195656 c
14.848313 11.882089 15.382061 11.348341 15.695628 10.685359 c
15.826231 10.409223 15.910070 10.057552 15.954405 9.479239 c
15.999413 8.892118 15.999982 8.142235 15.999982 7.080009 c
15.999982 6.017786 15.999413 5.267902 15.954405 4.680782 c
15.910070 4.102467 15.826231 3.750797 15.695628 3.474661 c
15.382061 2.811680 14.848313 2.277931 14.185331 1.964363 c
13.909195 1.833761 13.557525 1.749922 12.979211 1.705587 c
12.392091 1.660580 11.642207 1.660011 10.579983 1.660011 c
7.080004 1.660011 l
6.017778 1.660011 5.267894 1.660580 4.680772 1.705587 c
4.102457 1.749922 3.750787 1.833761 3.474651 1.964363 c
2.811668 2.277931 2.277921 2.811680 1.964354 3.474661 c
1.833751 3.750797 1.749911 4.102467 1.705577 4.680782 c
1.660568 5.267902 1.660000 6.017786 1.660000 7.080009 c
1.660000 8.142235 1.660568 8.892118 1.705577 9.479239 c
1.749911 10.057552 1.833751 10.409223 1.964354 10.685359 c
2.277921 11.348341 2.811668 11.882089 3.474651 12.195656 c
h
f*
n
Q
endstream
endobj
3 0 obj
3285
endobj
4 0 obj
<< /Annots []
/Type /Page
/MediaBox [ 0.000000 0.000000 30.000000 30.000000 ]
/Resources 1 0 R
/Contents 2 0 R
/Parent 5 0 R
>>
endobj
5 0 obj
<< /Kids [ 4 0 R ]
/Count 1
/Type /Pages
>>
endobj
6 0 obj
<< /Pages 5 0 R
/Type /Catalog
>>
endobj
xref
0 7
0000000000 65535 f
0000000010 00000 n
0000000034 00000 n
0000003375 00000 n
0000003398 00000 n
0000003571 00000 n
0000003645 00000 n
trailer
<< /ID [ (some) (id) ]
/Root 6 0 R
/Size 7
>>
startxref
3704
%%EOF

View File

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "camera_30.pdf",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@ -0,0 +1,111 @@
%PDF-1.7
1 0 obj
<< >>
endobj
2 0 obj
<< /Length 3 0 R >>
stream
/DeviceRGB CS
/DeviceRGB cs
q
1.000000 0.000000 -0.000000 1.000000 3.000000 4.000000 cm
1.000000 1.000000 1.000000 scn
3.425999 18.958599 m
3.711514 19.000000 4.052144 19.000000 4.733403 19.000000 c
4.924341 19.000000 5.019810 19.000000 5.109602 19.008112 c
5.649768 19.056911 6.146953 19.322996 6.487186 19.745373 c
6.543742 19.815582 6.596698 19.895016 6.702607 20.053881 c
6.702613 20.053888 l
7.050047 20.575041 l
7.397368 21.096022 7.571029 21.356514 7.800469 21.545092 c
8.003592 21.712038 8.237638 21.837296 8.489218 21.913698 c
8.773396 22.000000 9.086466 22.000000 9.712608 22.000000 c
14.336973 22.000000 l
14.945781 22.000000 15.250185 22.000000 15.527864 21.917871 c
15.773739 21.845148 16.003336 21.725826 16.204153 21.566402 c
16.430944 21.386356 16.605883 21.137241 16.955757 20.639011 c
17.408188 19.994743 l
17.408228 19.994684 l
17.506807 19.854311 17.556101 19.784115 17.608028 19.721636 c
17.951839 19.307968 18.447783 19.050224 18.983902 19.006592 c
19.064886 19.000000 19.150675 19.000000 19.322250 19.000000 c
19.951771 19.000000 20.266531 19.000000 20.530714 18.964636 c
22.318726 18.725283 23.725273 17.318735 23.964624 15.530725 c
23.999990 15.266541 23.999990 14.951781 23.999990 14.322262 c
23.999990 6.400001 l
23.999990 4.159790 23.999990 3.039684 23.564016 2.184038 c
23.180523 1.431389 22.568600 0.819468 21.815952 0.435974 c
20.960304 0.000000 19.840200 0.000000 17.599991 0.000000 c
6.400000 0.000000 l
4.159787 0.000000 3.039680 0.000000 2.184032 0.435974 c
1.431383 0.819469 0.819461 1.431393 0.435968 2.184042 c
-0.000005 3.039690 -0.000004 4.159798 0.000000 6.400010 c
0.000013 14.266617 l
0.000014 14.947877 0.000014 15.288507 0.041416 15.574020 c
0.295642 17.327229 1.672791 18.704376 3.425999 18.958599 c
h
7.829990 10.000000 m
7.829990 12.303027 9.696962 14.170000 11.999990 14.170000 c
14.303018 14.170000 16.169991 12.303027 16.169991 10.000000 c
16.169991 7.696972 14.303018 5.830000 11.999990 5.830000 c
9.696962 5.830000 7.829990 7.696972 7.829990 10.000000 c
h
11.999990 15.830000 m
8.780169 15.830000 6.169990 13.219820 6.169990 10.000000 c
6.169990 6.780180 8.780169 4.170000 11.999990 4.170000 c
15.219810 4.170000 17.829990 6.780180 17.829990 10.000000 c
17.829990 13.219820 15.219810 15.830000 11.999990 15.830000 c
h
f*
n
Q
endstream
endobj
3 0 obj
2278
endobj
4 0 obj
<< /Annots []
/Type /Page
/MediaBox [ 0.000000 0.000000 30.000000 30.000000 ]
/Resources 1 0 R
/Contents 2 0 R
/Parent 5 0 R
>>
endobj
5 0 obj
<< /Kids [ 4 0 R ]
/Count 1
/Type /Pages
>>
endobj
6 0 obj
<< /Pages 5 0 R
/Type /Catalog
>>
endobj
xref
0 7
0000000000 65535 f
0000000010 00000 n
0000000034 00000 n
0000002368 00000 n
0000002391 00000 n
0000002564 00000 n
0000002638 00000 n
trailer
<< /ID [ (some) (id) ]
/Root 6 0 R
/Size 7
>>
startxref
2697
%%EOF

View File

@ -11119,15 +11119,37 @@ public final class ChatControllerImpl: TelegramBaseController, ChatController, G
if !self.checkedPeerChatServiceActions {
self.checkedPeerChatServiceActions = true
if case let .peer(peerId) = self.chatLocation, peerId.namespace == Namespaces.Peer.SecretChat, self.screenCaptureManager == nil {
self.screenCaptureManager = ScreenCaptureDetectionManager(check: { [weak self] in
if let strongSelf = self, strongSelf.canReadHistoryValue, strongSelf.traceVisibility() {
let _ = strongSelf.context.engine.messages.addSecretChatMessageScreenshot(peerId: peerId).start()
return true
} else {
return false
}
})
if case let .peer(peerId) = self.chatLocation, self.screenCaptureManager == nil {
if peerId.namespace == Namespaces.Peer.SecretChat {
self.screenCaptureManager = ScreenCaptureDetectionManager(check: { [weak self] in
if let strongSelf = self, strongSelf.traceVisibility() {
if strongSelf.canReadHistoryValue {
let _ = strongSelf.context.engine.messages.addSecretChatMessageScreenshot(peerId: peerId).start()
}
return true
} else {
return false
}
})
} else if peerId.namespace == Namespaces.Peer.CloudUser && peerId.id._internalGetInt64Value() == 777000 {
self.screenCaptureManager = ScreenCaptureDetectionManager(check: { [weak self] in
if let strongSelf = self, strongSelf.traceVisibility() {
let loginCodeRegex = try? NSRegularExpression(pattern: "[\\d\\-]{5,7}", options: [])
var leakingLoginCode: String?
strongSelf.chatDisplayNode.historyNode.forEachVisibleMessageItemNode({ itemNode in
if let text = itemNode.item?.message.text, let matches = loginCodeRegex?.matches(in: text, options: [], range: NSMakeRange(0, (text as NSString).length)), let match = matches.first {
leakingLoginCode = (text as NSString).substring(with: match.range)
}
})
if let _ = leakingLoginCode {
}
return true
} else {
return false
}
})
}
}
if case let .peer(peerId) = self.chatLocation {

View File

@ -31,6 +31,8 @@ import ChatControllerInteraction
import ChatPresentationInterfaceState
import StorageUsageScreen
import DebugSettingsUI
import MediaPickerUI
import Photos
private final class AccountUserInterfaceInUseContext {
let subscribers = Bag<(Bool) -> Void>()
@ -1720,6 +1722,10 @@ public final class SharedAccountContextImpl: SharedAccountContext {
public func makeStickerPackScreen(context: AccountContext, updatedPresentationData: (initial: PresentationData, signal: Signal<PresentationData, NoError>)?, mainStickerPack: StickerPackReference, stickerPacks: [StickerPackReference], loadedStickerPacks: [LoadedStickerPack], parentNavigationController: NavigationController?, sendSticker: ((FileMediaReference, UIView, CGRect) -> Bool)?) -> ViewController {
return StickerPackScreen(context: context, updatedPresentationData: updatedPresentationData, mainStickerPack: mainStickerPack, stickerPacks: stickerPacks, loadedStickerPacks: loadedStickerPacks, parentNavigationController: parentNavigationController, sendSticker: sendSticker)
}
public func makeMediaPickerScreen(context: AccountContext, completion: @escaping (PHAsset) -> Void) -> ViewController {
return storyMediaPickerController(context: context, completion: completion)
}
public func makeProxySettingsController(sharedContext: SharedAccountContext, account: UnauthorizedAccount) -> ViewController {
return proxySettingsController(accountManager: sharedContext.accountManager, postbox: account.postbox, network: account.network, mode: .modal, presentationData: sharedContext.currentPresentationData.with { $0 }, updatedPresentationData: sharedContext.presentationData)

View File

@ -17,6 +17,9 @@ import DebugSettingsUI
import TabBarUI
import WallpaperBackgroundNode
import ChatPresentationInterfaceState
import CameraScreen
import LegacyComponents
import LegacyMediaPickerUI
private class DetailsChatPlaceholderNode: ASDisplayNode, NavigationDetailsPlaceholderNode {
private var presentationData: PresentationData
@ -181,6 +184,8 @@ public final class TelegramRootController: NavigationController {
accountSettingsController.parentController = self
controllers.append(accountSettingsController)
tabBarController.cameraItem = UITabBarItem(title: "Camera", image: UIImage(bundleImageName: "Chat List/Tabs/IconCamera"), tag: 2)
tabBarController.setControllers(controllers, selectedIndex: restoreSettignsController != nil ? (controllers.count - 1) : (controllers.count - 2))
self.contactsController = contactsController
@ -189,6 +194,10 @@ public final class TelegramRootController: NavigationController {
self.accountSettingsController = accountSettingsController
self.rootTabController = tabBarController
self.pushViewController(tabBarController, animated: false)
tabBarController.middleItemAction = { [weak self] in
self?.openStoryCamera()
}
}
public func updateRootControllers(showCallsTab: Bool) {
@ -236,6 +245,37 @@ public final class TelegramRootController: NavigationController {
presentedLegacyShortcutCamera(context: self.context, saveCapturedMedia: false, saveEditedPhotos: false, mediaGrouping: true, parentController: controller)
}
public func openStoryCamera() {
guard let controller = self.viewControllers.last as? ViewController else {
return
}
controller.view.endEditing(true)
var presentImpl: ((ViewController) -> Void)?
let cameraController = CameraScreen(context: self.context, mode: .story, completion: { [weak self] result in
if let self {
let item: TGMediaEditableItem & TGMediaSelectableItem
switch result {
case let .image(image):
item = TGCameraCapturedPhoto(existing: image)
case let .video(path):
item = TGCameraCapturedVideo(url: URL(fileURLWithPath: path))
case let .asset(asset):
item = TGMediaAsset(phAsset: asset)
}
legacyFullMediaEditor(context: self.context, item: item, getCaptionPanelView: { return nil }, sendMessagesWithSignals: { _, _, _ in
}, present: { c, a in
presentImpl?(c)
})
}
})
controller.push(cameraController)
presentImpl = { [weak cameraController] c in
cameraController?.present(c, in: .window(.root))
}
}
public func openSettings() {
guard let rootTabController = self.rootTabController else {
return