Camera improvements

This commit is contained in:
Ilya Laktyushin 2023-05-01 15:51:23 +04:00
parent a87079fff8
commit b9dc5d99cd
21 changed files with 1203 additions and 233 deletions

View File

@ -55,7 +55,11 @@ private final class CameraContext {
if let rotation = CameraPreviewView.Rotation(with: .portrait, videoOrientation: videoOrientation, cameraPosition: self.device.position) {
previewView.rotation = rotation
}
if #available(iOS 13.0, *), connection.inputPorts.first?.sourceDevicePosition == .front {
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
previewView.captureDeviceResolution = CGSize(width: width, height: height)
}
previewView.pixelBuffer = pixelBuffer
Queue.mainQueue().async {
self.videoOrientation = videoOrientation
@ -63,6 +67,15 @@ private final class CameraContext {
}
}
self.output.processFaceLandmarks = { [weak self] observations in
guard let self else {
return
}
if let previewView = self.previewView {
previewView.drawFaceObservations(observations)
}
}
self.output.processCodes = { [weak self] codes in
self?.detectedCodesPipe.putNext(codes)
}
@ -158,11 +171,13 @@ private final class CameraContext {
func setFlashMode(_ mode: Camera.FlashMode) {
self._flashMode = mode
if mode == .on {
self.output.activeFilter = self.filter
} else if mode == .off {
self.output.activeFilter = nil
}
// if mode == .on {
// self.output.faceLandmarks = true
// //self.output.activeFilter = self.filter
// } else if mode == .off {
// self.output.faceLandmarks = false
// //self.output.activeFilter = nil
// }
}
func setZoomLevel(_ zoomLevel: CGFloat) {

View File

@ -16,11 +16,10 @@ protocol CameraFilter: AnyObject {
func render(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer?
}
func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) ->(
func allocateOutputBufferPool(with inputFormatDescription: CMFormatDescription, outputRetainedBufferCountHint: Int) -> (
outputBufferPool: CVPixelBufferPool?,
outputColorSpace: CGColorSpace?,
outputFormatDescription: CMFormatDescription?) {
let inputMediaSubType = CMFormatDescriptionGetMediaSubType(inputFormatDescription)
if inputMediaSubType != kCVPixelFormatType_32BGRA {
assertionFailure("Invalid input pixel buffer type \(inputMediaSubType)")
@ -109,9 +108,7 @@ class CameraTestFilter: CameraFilter {
private var rosyFilter: CIFilter?
private var outputColorSpace: CGColorSpace?
private var outputPixelBufferPool: CVPixelBufferPool?
private(set) var outputFormatDescription: CMFormatDescription?
private(set) var inputFormatDescription: CMFormatDescription?

View File

@ -1,5 +1,6 @@
import AVFoundation
import SwiftSignalKit
import Vision
public struct CameraCode: Equatable {
public enum CodeType {
@ -44,17 +45,21 @@ final class CameraOutput: NSObject {
private let videoOutput = AVCaptureVideoDataOutput()
private let audioOutput = AVCaptureAudioDataOutput()
private let metadataOutput = AVCaptureMetadataOutput()
private let faceLandmarksOutput = FaceLandmarksDataOutput()
private let queue = DispatchQueue(label: "")
private let metadataQueue = DispatchQueue(label: "")
private let faceLandmarksQueue = DispatchQueue(label: "")
private var photoCaptureRequests: [Int64: PhotoCaptureContext] = [:]
private var videoRecorder: VideoRecorder?
var activeFilter: CameraFilter?
var faceLandmarks: Bool = false
var processSampleBuffer: ((CVImageBuffer, AVCaptureConnection) -> Void)?
var processCodes: (([CameraCode]) -> Void)?
var processFaceLandmarks: (([VNFaceObservation]) -> Void)?
override init() {
super.init()
@ -62,6 +67,12 @@ final class CameraOutput: NSObject {
self.videoOutput.alwaysDiscardsLateVideoFrames = true;
self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA] as [String : Any]
//[kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any]
self.faceLandmarksOutput.outputFaceObservations = { [weak self] observations in
if let self {
self.processFaceLandmarks?(observations)
}
}
}
deinit {
@ -170,7 +181,7 @@ final class CameraOutput: NSObject {
self.videoRecorder = videoRecorder
return Signal { subscriber in
let timer = SwiftSignalKit.Timer(timeout: 0.33, repeat: true, completion: { [weak videoRecorder] in
let timer = SwiftSignalKit.Timer(timeout: 0.1, repeat: true, completion: { [weak videoRecorder] in
subscriber.putNext(videoRecorder?.duration ?? 0.0)
}, queue: Queue.mainQueue())
timer.start()
@ -197,6 +208,13 @@ extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureA
guard CMSampleBufferDataIsReady(sampleBuffer) else {
return
}
if self.faceLandmarks {
self.faceLandmarksQueue.async {
self.faceLandmarksOutput.process(sampleBuffer: sampleBuffer)
}
}
let finalSampleBuffer: CMSampleBuffer = sampleBuffer
if let videoPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) {
var finalVideoPixelBuffer = videoPixelBuffer
@ -210,7 +228,6 @@ extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureA
}
finalVideoPixelBuffer = filteredBuffer
}
self.processSampleBuffer?(finalVideoPixelBuffer, connection)
}

View File

@ -6,6 +6,7 @@ import SwiftSignalKit
import Metal
import MetalKit
import CoreMedia
import Vision
public class CameraPreviewView: MTKView {
private let queue = DispatchQueue(label: "CameraPreview", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
@ -302,4 +303,198 @@ public class CameraPreviewView: MTKView {
commandBuffer.present(drawable)
commandBuffer.commit()
}
var captureDeviceResolution: CGSize = CGSize() {
didSet {
if oldValue.width.isZero, !self.captureDeviceResolution.width.isZero {
Queue.mainQueue().async {
self.setupVisionDrawingLayers()
}
}
}
}
var detectionOverlayLayer: CALayer?
var detectedFaceRectangleShapeLayer: CAShapeLayer?
var detectedFaceLandmarksShapeLayer: CAShapeLayer?
func drawFaceObservations(_ faceObservations: [VNFaceObservation]) {
guard let faceRectangleShapeLayer = self.detectedFaceRectangleShapeLayer,
let faceLandmarksShapeLayer = self.detectedFaceLandmarksShapeLayer
else {
return
}
CATransaction.begin()
CATransaction.setValue(NSNumber(value: true), forKey: kCATransactionDisableActions)
self.detectionOverlayLayer?.isHidden = faceObservations.isEmpty
let faceRectanglePath = CGMutablePath()
let faceLandmarksPath = CGMutablePath()
for faceObservation in faceObservations {
self.addIndicators(to: faceRectanglePath,
faceLandmarksPath: faceLandmarksPath,
for: faceObservation)
}
faceRectangleShapeLayer.path = faceRectanglePath
faceLandmarksShapeLayer.path = faceLandmarksPath
self.updateLayerGeometry()
CATransaction.commit()
}
fileprivate func addPoints(in landmarkRegion: VNFaceLandmarkRegion2D, to path: CGMutablePath, applying affineTransform: CGAffineTransform, closingWhenComplete closePath: Bool) {
let pointCount = landmarkRegion.pointCount
if pointCount > 1 {
let points: [CGPoint] = landmarkRegion.normalizedPoints
path.move(to: points[0], transform: affineTransform)
path.addLines(between: points, transform: affineTransform)
if closePath {
path.addLine(to: points[0], transform: affineTransform)
path.closeSubpath()
}
}
}
fileprivate func addIndicators(to faceRectanglePath: CGMutablePath, faceLandmarksPath: CGMutablePath, for faceObservation: VNFaceObservation) {
let displaySize = self.captureDeviceResolution
let faceBounds = VNImageRectForNormalizedRect(faceObservation.boundingBox, Int(displaySize.width), Int(displaySize.height))
faceRectanglePath.addRect(faceBounds)
if let landmarks = faceObservation.landmarks {
let affineTransform = CGAffineTransform(translationX: faceBounds.origin.x, y: faceBounds.origin.y)
.scaledBy(x: faceBounds.size.width, y: faceBounds.size.height)
let openLandmarkRegions: [VNFaceLandmarkRegion2D?] = [
landmarks.leftEyebrow,
landmarks.rightEyebrow,
landmarks.faceContour,
landmarks.noseCrest,
landmarks.medianLine
]
for openLandmarkRegion in openLandmarkRegions where openLandmarkRegion != nil {
self.addPoints(in: openLandmarkRegion!, to: faceLandmarksPath, applying: affineTransform, closingWhenComplete: false)
}
let closedLandmarkRegions: [VNFaceLandmarkRegion2D?] = [
landmarks.leftEye,
landmarks.rightEye,
landmarks.outerLips,
landmarks.innerLips,
landmarks.nose
]
for closedLandmarkRegion in closedLandmarkRegions where closedLandmarkRegion != nil {
self.addPoints(in: closedLandmarkRegion!, to: faceLandmarksPath, applying: affineTransform, closingWhenComplete: true)
}
}
}
fileprivate func radiansForDegrees(_ degrees: CGFloat) -> CGFloat {
return CGFloat(Double(degrees) * Double.pi / 180.0)
}
fileprivate func updateLayerGeometry() {
guard let overlayLayer = self.detectionOverlayLayer else {
return
}
CATransaction.setValue(NSNumber(value: true), forKey: kCATransactionDisableActions)
let videoPreviewRect = self.bounds
var rotation: CGFloat
var scaleX: CGFloat
var scaleY: CGFloat
// Rotate the layer into screen orientation.
switch UIDevice.current.orientation {
case .portraitUpsideDown:
rotation = 180
scaleX = videoPreviewRect.width / captureDeviceResolution.width
scaleY = videoPreviewRect.height / captureDeviceResolution.height
case .landscapeLeft:
rotation = 90
scaleX = videoPreviewRect.height / captureDeviceResolution.width
scaleY = scaleX
case .landscapeRight:
rotation = -90
scaleX = videoPreviewRect.height / captureDeviceResolution.width
scaleY = scaleX
default:
rotation = 0
scaleX = videoPreviewRect.width / captureDeviceResolution.width
scaleY = videoPreviewRect.height / captureDeviceResolution.height
}
// Scale and mirror the image to ensure upright presentation.
let affineTransform = CGAffineTransform(rotationAngle: radiansForDegrees(rotation))
.scaledBy(x: scaleX, y: -scaleY)
overlayLayer.setAffineTransform(affineTransform)
// Cover entire screen UI.
let rootLayerBounds = self.bounds
overlayLayer.position = CGPoint(x: rootLayerBounds.midX, y: rootLayerBounds.midY)
}
fileprivate func setupVisionDrawingLayers() {
let captureDeviceResolution = self.captureDeviceResolution
let rootLayer = self.layer
let captureDeviceBounds = CGRect(x: 0,
y: 0,
width: captureDeviceResolution.width,
height: captureDeviceResolution.height)
let captureDeviceBoundsCenterPoint = CGPoint(x: captureDeviceBounds.midX,
y: captureDeviceBounds.midY)
let normalizedCenterPoint = CGPoint(x: 0.5, y: 0.5)
let overlayLayer = CALayer()
overlayLayer.name = "DetectionOverlay"
overlayLayer.masksToBounds = true
overlayLayer.anchorPoint = normalizedCenterPoint
overlayLayer.bounds = captureDeviceBounds
overlayLayer.position = CGPoint(x: rootLayer.bounds.midX, y: rootLayer.bounds.midY)
let faceRectangleShapeLayer = CAShapeLayer()
faceRectangleShapeLayer.name = "RectangleOutlineLayer"
faceRectangleShapeLayer.bounds = captureDeviceBounds
faceRectangleShapeLayer.anchorPoint = normalizedCenterPoint
faceRectangleShapeLayer.position = captureDeviceBoundsCenterPoint
faceRectangleShapeLayer.fillColor = nil
faceRectangleShapeLayer.strokeColor = UIColor.green.withAlphaComponent(0.2).cgColor
faceRectangleShapeLayer.lineWidth = 2
let faceLandmarksShapeLayer = CAShapeLayer()
faceLandmarksShapeLayer.name = "FaceLandmarksLayer"
faceLandmarksShapeLayer.bounds = captureDeviceBounds
faceLandmarksShapeLayer.anchorPoint = normalizedCenterPoint
faceLandmarksShapeLayer.position = captureDeviceBoundsCenterPoint
faceLandmarksShapeLayer.fillColor = nil
faceLandmarksShapeLayer.strokeColor = UIColor.white.withAlphaComponent(0.7).cgColor
faceLandmarksShapeLayer.lineWidth = 2
faceLandmarksShapeLayer.shadowOpacity = 0.7
faceLandmarksShapeLayer.shadowRadius = 2
overlayLayer.addSublayer(faceRectangleShapeLayer)
faceRectangleShapeLayer.addSublayer(faceLandmarksShapeLayer)
self.layer.addSublayer(overlayLayer)
self.detectionOverlayLayer = overlayLayer
self.detectedFaceRectangleShapeLayer = faceRectangleShapeLayer
self.detectedFaceLandmarksShapeLayer = faceLandmarksShapeLayer
self.updateLayerGeometry()
}
}

View File

@ -1,5 +1,8 @@
import UIKit
import AVFoundation
import Foundation
import Accelerate
import CoreImage
extension AVFrameRateRange {
func clamp(rate: Float64) -> Float64 {
@ -56,7 +59,7 @@ extension CMSampleBuffer {
}
}
public extension AVCaptureVideoOrientation {
extension AVCaptureVideoOrientation {
init?(interfaceOrientation: UIInterfaceOrientation) {
switch interfaceOrientation {
case .portrait: self = .portrait
@ -68,7 +71,7 @@ public extension AVCaptureVideoOrientation {
}
}
public extension CameraPreviewView.Rotation {
extension CameraPreviewView.Rotation {
init?(with interfaceOrientation: UIInterfaceOrientation, videoOrientation: AVCaptureVideoOrientation, cameraPosition: AVCaptureDevice.Position) {
switch videoOrientation {
case .portrait:
@ -175,3 +178,106 @@ public extension CameraPreviewView.Rotation {
}
}
func exifOrientationForDeviceOrientation(_ deviceOrientation: UIDeviceOrientation) -> CGImagePropertyOrientation {
switch deviceOrientation {
case .portraitUpsideDown:
return .rightMirrored
case .landscapeLeft:
return .downMirrored
case .landscapeRight:
return .upMirrored
default:
return .leftMirrored
}
}
/**
First crops the pixel buffer, then resizes it.
This function requires the caller to pass in both the source and destination
pixel buffers. The dimensions of destination pixel buffer should be at least
`scaleWidth` x `scaleHeight` pixels.
*/
func resizePixelBuffer(from srcPixelBuffer: CVPixelBuffer,
to dstPixelBuffer: CVPixelBuffer,
cropX: Int,
cropY: Int,
cropWidth: Int,
cropHeight: Int,
scaleWidth: Int,
scaleHeight: Int) {
assert(CVPixelBufferGetWidth(dstPixelBuffer) >= scaleWidth)
assert(CVPixelBufferGetHeight(dstPixelBuffer) >= scaleHeight)
let srcFlags = CVPixelBufferLockFlags.readOnly
let dstFlags = CVPixelBufferLockFlags(rawValue: 0)
guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(srcPixelBuffer, srcFlags) else {
print("Error: could not lock source pixel buffer")
return
}
defer { CVPixelBufferUnlockBaseAddress(srcPixelBuffer, srcFlags) }
guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(dstPixelBuffer, dstFlags) else {
print("Error: could not lock destination pixel buffer")
return
}
defer { CVPixelBufferUnlockBaseAddress(dstPixelBuffer, dstFlags) }
guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer),
let dstData = CVPixelBufferGetBaseAddress(dstPixelBuffer) else {
print("Error: could not get pixel buffer base address")
return
}
let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
let offset = cropY*srcBytesPerRow + cropX*4
var srcBuffer = vImage_Buffer(data: srcData.advanced(by: offset),
height: vImagePixelCount(cropHeight),
width: vImagePixelCount(cropWidth),
rowBytes: srcBytesPerRow)
let dstBytesPerRow = CVPixelBufferGetBytesPerRow(dstPixelBuffer)
var dstBuffer = vImage_Buffer(data: dstData,
height: vImagePixelCount(scaleHeight),
width: vImagePixelCount(scaleWidth),
rowBytes: dstBytesPerRow)
let error = vImageScale_ARGB8888(&srcBuffer, &dstBuffer, nil, vImage_Flags(0))
if error != kvImageNoError {
print("Error:", error)
}
}
/**
Resizes a CVPixelBuffer to a new width and height.
This function requires the caller to pass in both the source and destination
pixel buffers. The dimensions of destination pixel buffer should be at least
`width` x `height` pixels.
*/
func resizePixelBuffer(from srcPixelBuffer: CVPixelBuffer,
to dstPixelBuffer: CVPixelBuffer,
width: Int, height: Int) {
resizePixelBuffer(from: srcPixelBuffer, to: dstPixelBuffer,
cropX: 0, cropY: 0,
cropWidth: CVPixelBufferGetWidth(srcPixelBuffer),
cropHeight: CVPixelBufferGetHeight(srcPixelBuffer),
scaleWidth: width, scaleHeight: height)
}
/**
Resizes a CVPixelBuffer to a new width and height, using Core Image.
*/
func resizePixelBuffer(_ pixelBuffer: CVPixelBuffer,
width: Int, height: Int,
output: CVPixelBuffer, context: CIContext) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let sx = CGFloat(width) / CGFloat(CVPixelBufferGetWidth(pixelBuffer))
let sy = CGFloat(height) / CGFloat(CVPixelBufferGetHeight(pixelBuffer))
let scaleTransform = CGAffineTransform(scaleX: sx, y: sy)
let scaledImage = ciImage.transformed(by: scaleTransform)
context.render(scaledImage, to: output)
}

View File

@ -0,0 +1,163 @@
import Foundation
import AVKit
import Vision
final class FaceLandmarksDataOutput {
private var ciContext: CIContext?
private var detectionRequests: [VNDetectFaceRectanglesRequest]?
private var trackingRequests: [VNTrackObjectRequest]?
lazy var sequenceRequestHandler = VNSequenceRequestHandler()
var outputFaceObservations: (([VNFaceObservation]) -> Void)?
private var outputColorSpace: CGColorSpace?
private var outputPixelBufferPool: CVPixelBufferPool?
private(set) var outputFormatDescription: CMFormatDescription?
init() {
self.ciContext = CIContext()
self.prepareVisionRequest()
}
fileprivate func prepareVisionRequest() {
var requests = [VNTrackObjectRequest]()
let faceDetectionRequest = VNDetectFaceRectanglesRequest(completionHandler: { (request, error) in
if error != nil {
print("FaceDetection error: \(String(describing: error)).")
}
guard let faceDetectionRequest = request as? VNDetectFaceRectanglesRequest, let results = faceDetectionRequest.results else {
return
}
DispatchQueue.main.async {
for observation in results {
let faceTrackingRequest = VNTrackObjectRequest(detectedObjectObservation: observation)
requests.append(faceTrackingRequest)
}
self.trackingRequests = requests
}
})
self.detectionRequests = [faceDetectionRequest]
self.sequenceRequestHandler = VNSequenceRequestHandler()
}
func exifOrientationForCurrentDeviceOrientation() -> CGImagePropertyOrientation {
return exifOrientationForDeviceOrientation(UIDevice.current.orientation)
}
func process(sampleBuffer: CMSampleBuffer) {
var requestHandlerOptions: [VNImageOption: AnyObject] = [:]
let cameraIntrinsicData = CMGetAttachment(sampleBuffer, key: kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, attachmentModeOut: nil)
if cameraIntrinsicData != nil {
requestHandlerOptions[VNImageOption.cameraIntrinsics] = cameraIntrinsicData
}
guard let inputPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
print("Failed to obtain a CVPixelBuffer for the current output frame.")
return
}
let width = CGFloat(CVPixelBufferGetWidth(inputPixelBuffer))
let height = CGFloat(CVPixelBufferGetHeight(inputPixelBuffer))
if #available(iOS 13.0, *), outputPixelBufferPool == nil, let formatDescription = try? CMFormatDescription(videoCodecType: .pixelFormat_32BGRA, width: Int(width / 3.0), height: Int(height / 3.0)) {
(outputPixelBufferPool,
outputColorSpace,
outputFormatDescription) = allocateOutputBufferPool(with: formatDescription, outputRetainedBufferCountHint: 3)
}
var pbuf: CVPixelBuffer?
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &pbuf)
guard let pixelBuffer = pbuf, let ciContext = self.ciContext else {
print("Allocation failure")
return
}
resizePixelBuffer(inputPixelBuffer, width: Int(width / 3.0), height: Int(height / 3.0), output: pixelBuffer, context: ciContext)
let exifOrientation = self.exifOrientationForCurrentDeviceOrientation()
guard let requests = self.trackingRequests, !requests.isEmpty else {
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: exifOrientation, options: requestHandlerOptions)
do {
guard let detectRequests = self.detectionRequests else {
return
}
try imageRequestHandler.perform(detectRequests)
} catch let error as NSError {
print("Failed to perform FaceRectangleRequest: \(String(describing: error)).")
}
return
}
do {
try self.sequenceRequestHandler.perform(requests, on: pixelBuffer, orientation: exifOrientation)
} catch let error as NSError {
print("Failed to perform SequenceRequest: \(String(describing: error)).")
}
var newTrackingRequests = [VNTrackObjectRequest]()
for trackingRequest in requests {
guard let results = trackingRequest.results else {
return
}
guard let observation = results[0] as? VNDetectedObjectObservation else {
return
}
if !trackingRequest.isLastFrame {
if observation.confidence > 0.3 {
trackingRequest.inputObservation = observation
} else {
trackingRequest.isLastFrame = true
}
newTrackingRequests.append(trackingRequest)
}
}
self.trackingRequests = newTrackingRequests
if newTrackingRequests.isEmpty {
DispatchQueue.main.async {
self.outputFaceObservations?([])
}
return
}
var faceLandmarkRequests = [VNDetectFaceLandmarksRequest]()
for trackingRequest in newTrackingRequests {
let faceLandmarksRequest = VNDetectFaceLandmarksRequest(completionHandler: { (request, error) in
if error != nil {
print("FaceLandmarks error: \(String(describing: error)).")
}
guard let landmarksRequest = request as? VNDetectFaceLandmarksRequest, let results = landmarksRequest.results else {
return
}
DispatchQueue.main.async {
self.outputFaceObservations?(results)
}
})
guard let trackingResults = trackingRequest.results else {
return
}
guard let observation = trackingResults[0] as? VNDetectedObjectObservation else {
return
}
let faceObservation = VNFaceObservation(boundingBox: observation.boundingBox)
faceLandmarksRequest.inputFaceObservations = [faceObservation]
faceLandmarkRequests.append(faceLandmarksRequest)
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: exifOrientation, options: requestHandlerOptions)
do {
try imageRequestHandler.perform(faceLandmarkRequests)
} catch let error as NSError {
print("Failed to perform FaceLandmarkRequest: \(String(describing: error)).")
}
}
}
}

View File

@ -13,7 +13,7 @@ public protocol TabBarController: ViewController {
var controllers: [ViewController] { get }
var selectedIndex: Int { get set }
var cameraItem: UITabBarItem? { get set }
var cameraItemAndAction: (item: UITabBarItem, action: () -> Void)? { get set }
func setControllers(_ controllers: [ViewController], selectedIndex: Int?)

View File

@ -7,6 +7,7 @@
@interface TGCameraCapturedPhoto : NSObject <TGMediaEditableItem, TGMediaSelectableItem>
@property (nonatomic, readonly) UIImage *existingImage;
@property (nonatomic, readonly) NSURL *url;
@property (nonatomic, readonly) PGCameraShotMetadata *metadata;
@property (nonatomic, readonly) PGRectangle *rectangle;

View File

@ -9,7 +9,6 @@
NSString *_identifier;
CGSize _dimensions;
UIImage *_existingImage;
SVariable *_thumbnail;
UIImage *_thumbImage;
}

View File

@ -98,7 +98,13 @@ public func legacyWallpaperEditor(context: AccountContext, item: TGMediaEditable
})
}
public func legacyFullMediaEditor(context: AccountContext, item: TGMediaEditableItem & TGMediaSelectableItem, getCaptionPanelView: @escaping () -> TGCaptionPanelView?, sendMessagesWithSignals: @escaping ([Any]?, Bool, Int32) -> Void, present: @escaping (ViewController, Any?) -> Void) {
public enum StoryMediaEditorResult {
case image(UIImage)
case video(String)
case asset(PHAsset)
}
public func legacyStoryMediaEditor(context: AccountContext, item: TGMediaEditableItem & TGMediaSelectableItem, getCaptionPanelView: @escaping () -> TGCaptionPanelView?, completion: @escaping (StoryMediaEditorResult) -> Void, present: @escaping (ViewController, Any?) -> Void) {
let paintStickersContext = LegacyPaintStickersContext(context: context)
paintStickersContext.captionPanelView = {
return getCaptionPanelView()
@ -123,18 +129,27 @@ public func legacyFullMediaEditor(context: AccountContext, item: TGMediaEditable
present(legacyController, nil)
TGPhotoVideoEditor.present(with: legacyController.context, controller: emptyController, caption: NSAttributedString(), withItem: item, paint: false, adjustments: false, recipientName: "Story", stickersContext: paintStickersContext, from: .zero, mainSnapshot: nil, snapshots: [] as [Any], immediate: true, appeared: {
TGPhotoVideoEditor.present(with: legacyController.context, controller: emptyController, caption: NSAttributedString(), withItem: item, paint: false, adjustments: false, recipientName: "", stickersContext: paintStickersContext, from: .zero, mainSnapshot: nil, snapshots: [] as [Any], immediate: true, appeared: {
}, completion: { result, editingContext in
let nativeGenerator = legacyAssetPickerItemGenerator()
var selectableResult: TGMediaSelectableItem?
if let result = result {
selectableResult = unsafeDowncast(result, to: TGMediaSelectableItem.self)
var completionResult: Signal<StoryMediaEditorResult, NoError>
if let photo = result as? TGCameraCapturedPhoto {
if let _ = editingContext?.adjustments(for: result) {
completionResult = .single(.image(photo.existingImage))
} else {
completionResult = .single(.image(photo.existingImage))
}
} else if let video = result as? TGCameraCapturedVideo {
completionResult = .single(.video(video.immediateAVAsset.url.absoluteString))
} else if let asset = result as? TGMediaAsset {
completionResult = .single(.asset(asset.backingAsset))
} else {
completionResult = .complete()
}
let signals = TGCameraController.resultSignals(for: nil, editingContext: editingContext, currentItem: selectableResult, storeAssets: false, saveEditedPhotos: false, descriptionGenerator: { _1, _2, _3 in
nativeGenerator(_1, _2, _3, nil)
let _ = (completionResult
|> deliverOnMainQueue).start(next: { value in
completion(value)
})
sendMessagesWithSignals(signals, false, 0)
}, dismissed: { [weak legacyController] in
legacyController?.dismiss()
})

View File

@ -25,6 +25,7 @@ func assetImage(asset: PHAsset, targetSize: CGSize, exact: Bool, deliveryMode: P
options.resizeMode = .exact
}
options.isSynchronous = synchronous
options.isNetworkAccessAllowed = true
let token = imageManager.requestImage(for: asset, targetSize: targetSize, contentMode: .aspectFill, options: options) { (image, info) in
var degraded = false

View File

@ -1030,6 +1030,9 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
}
func containerLayoutUpdated(_ layout: ContainerViewLayout, navigationBarHeight: CGFloat, transition: ContainedViewLayoutTransition) {
guard let controller = self.controller else {
return
}
let firstTime = self.validLayout == nil
self.validLayout = (layout, navigationBarHeight)
@ -1067,7 +1070,7 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
}
var bannedSendMedia: (Int32, Bool)?
if let bannedSendPhotos = self.controller?.bannedSendPhotos, let bannedSendVideos = self.controller?.bannedSendVideos {
if let bannedSendPhotos = controller.bannedSendPhotos, let bannedSendVideos = controller.bannedSendVideos {
bannedSendMedia = (max(bannedSendPhotos.0, bannedSendVideos.0), bannedSendPhotos.1 || bannedSendVideos.1)
}
@ -1151,7 +1154,12 @@ public final class MediaPickerScreen: ViewController, AttachmentContainable {
transition.updateFrame(node: self.containerNode, frame: CGRect(origin: CGPoint(), size: CGSize(width: bounds.width, height: bounds.height)))
self.gridNode.transaction(GridNodeTransaction(deleteItems: [], insertItems: [], updateItems: [], scrollToItem: nil, updateLayout: GridNodeUpdateLayout(layout: GridNodeLayout(size: bounds.size, insets: gridInsets, scrollIndicatorInsets: nil, preloadSize: itemWidth, type: .fixed(itemSize: CGSize(width: itemWidth, height: itemWidth), fillWidth: true, lineSpacing: itemSpacing, itemSpacing: itemSpacing), cutout: cameraRect), transition: transition), itemTransition: .immediate, stationaryItems: .none, updateFirstIndexInSectionOffset: nil, updateOpaqueState: nil, synchronousLoads: false), completion: { [weak self] _ in
var itemHeight = itemWidth
if case let .assets(_, mode) = controller.subject, case .story = mode {
itemHeight = 180.0
}
self.gridNode.transaction(GridNodeTransaction(deleteItems: [], insertItems: [], updateItems: [], scrollToItem: nil, updateLayout: GridNodeUpdateLayout(layout: GridNodeLayout(size: bounds.size, insets: gridInsets, scrollIndicatorInsets: nil, preloadSize: itemHeight * 3.0, type: .fixed(itemSize: CGSize(width: itemWidth, height: itemHeight), fillWidth: true, lineSpacing: itemSpacing, itemSpacing: itemSpacing), cutout: cameraRect), transition: transition), itemTransition: .immediate, stationaryItems: .none, updateFirstIndexInSectionOffset: nil, updateOpaqueState: nil, synchronousLoads: false), completion: { [weak self] _ in
guard let strongSelf = self else {
return
}

View File

@ -11,14 +11,14 @@ public final class MatrixView: MTKView, MTKViewDelegate, PhoneDemoDecorationView
private let commandQueue: MTLCommandQueue
private let drawPassthroughPipelineState: MTLRenderPipelineState
private var displayLink: CADisplayLink?
private let symbolTexture: MTLTexture
private let randomTexture: MTLTexture
private var viewportDimensions = CGSize(width: 1, height: 1)
private var displayLink: SharedDisplayLinkDriver.Link?
private var startTimestamp = CACurrentMediaTime()
public init?(test: Bool) {
@ -86,24 +86,11 @@ public final class MatrixView: MTKView, MTKViewDelegate, PhoneDemoDecorationView
self.backgroundColor = .clear
self.framebufferOnly = true
class DisplayLinkProxy: NSObject {
weak var target: MatrixView?
init(target: MatrixView) {
self.target = target
}
@objc func displayLinkEvent() {
self.target?.displayLinkEvent()
}
self.displayLink = SharedDisplayLinkDriver.shared.add { [weak self] in
self?.tick()
}
self.displayLink = CADisplayLink(target: DisplayLinkProxy(target: self), selector: #selector(DisplayLinkProxy.displayLinkEvent))
if #available(iOS 15.0, *) {
self.displayLink?.preferredFrameRateRange = CAFrameRateRange(minimum: 60.0, maximum: 60.0, preferred: 60.0)
}
self.displayLink?.add(to: .main, forMode: .common)
self.displayLink?.isPaused = false
self.displayLink?.isPaused = true
self.isPaused = true
}
@ -137,7 +124,7 @@ public final class MatrixView: MTKView, MTKViewDelegate, PhoneDemoDecorationView
}
@objc private func displayLinkEvent() {
private func tick() {
self.draw()
}
@ -145,7 +132,6 @@ public final class MatrixView: MTKView, MTKViewDelegate, PhoneDemoDecorationView
self.redraw(drawable: self.currentDrawable!)
}
private func redraw(drawable: MTLDrawable) {
guard let commandBuffer = self.commandQueue.makeCommandBuffer() else {
return

View File

@ -130,7 +130,7 @@ open class TabBarControllerImpl: ViewController, TabBarController {
private var navigationBarPresentationData: NavigationBarPresentationData
private var theme: TabBarControllerTheme
public var middleItemAction: () -> Void = {}
public var cameraItemAndAction: (item: UITabBarItem, action: () -> Void)?
public init(navigationBarPresentationData: NavigationBarPresentationData, theme: TabBarControllerTheme) {
self.navigationBarPresentationData = navigationBarPresentationData
@ -202,12 +202,14 @@ open class TabBarControllerImpl: ViewController, TabBarController {
self.displayNode = TabBarControllerNode(theme: self.theme, navigationBarPresentationData: self.navigationBarPresentationData, itemSelected: { [weak self] index, longTap, itemNodes in
if let strongSelf = self {
var index = index
if strongSelf.tabBarControllerNode.tabBarNode.tabBarItems.count == 5 {
if index == 2 {
strongSelf.middleItemAction()
return
} else if index > 2 {
index -= 1
if let (cameraItem, cameraAction) = strongSelf.cameraItemAndAction {
if let cameraItemIndex = strongSelf.tabBarControllerNode.tabBarNode.tabBarItems.firstIndex(where: { $0.item === cameraItem }) {
if index == cameraItemIndex {
cameraAction()
return
} else if index > cameraItemIndex {
index -= 1
}
}
}
if longTap, let controller = strongSelf.controllers[index] as? TabBarContainedController {
@ -277,14 +279,34 @@ open class TabBarControllerImpl: ViewController, TabBarController {
guard let strongSelf = self else {
return
}
if index >= 0 && index < strongSelf.controllers.count {
if index >= 0 && index < strongSelf.tabBarControllerNode.tabBarNode.tabBarItems.count {
var index = index
if let (cameraItem, _) = strongSelf.cameraItemAndAction {
if let cameraItemIndex = strongSelf.tabBarControllerNode.tabBarNode.tabBarItems.firstIndex(where: { $0.item === cameraItem }) {
if index == cameraItemIndex {
return
} else if index > cameraItemIndex {
index -= 1
}
}
}
strongSelf.controllers[index].tabBarItemContextAction(sourceNode: node, gesture: gesture)
}
}, swipeAction: { [weak self] index, direction in
guard let strongSelf = self else {
return
}
if index >= 0 && index < strongSelf.controllers.count {
if index >= 0 && index < strongSelf.tabBarControllerNode.tabBarNode.tabBarItems.count {
var index = index
if let (cameraItem, _) = strongSelf.cameraItemAndAction {
if let cameraItemIndex = strongSelf.tabBarControllerNode.tabBarNode.tabBarItems.firstIndex(where: { $0.item === cameraItem }) {
if index == cameraItemIndex {
return
} else if index > cameraItemIndex {
index -= 1
}
}
}
strongSelf.controllers[index].tabBarItemSwipeAction(direction: direction)
}
}, toolbarActionSelected: { [weak self] action in
@ -308,15 +330,15 @@ open class TabBarControllerImpl: ViewController, TabBarController {
return
}
if self.tabBarControllerNode.tabBarNode.tabBarItems.count == 5 {
var selectedIndex = self.selectedIndex
if selectedIndex >= 2 {
selectedIndex += 1
var tabBarSelectedIndex = self.selectedIndex
if let (cameraItem, _) = self.cameraItemAndAction {
if let cameraItemIndex = self.tabBarControllerNode.tabBarNode.tabBarItems.firstIndex(where: { $0.item === cameraItem }) {
if tabBarSelectedIndex >= cameraItemIndex {
tabBarSelectedIndex += 1
}
}
self.tabBarControllerNode.tabBarNode.selectedIndex = selectedIndex
} else {
self.tabBarControllerNode.tabBarNode.selectedIndex = self.selectedIndex
}
self.tabBarControllerNode.tabBarNode.selectedIndex = tabBarSelectedIndex
if let currentController = self.currentController {
currentController.willMove(toParent: nil)
@ -412,9 +434,7 @@ open class TabBarControllerImpl: ViewController, TabBarController {
currentController.viewDidDisappear(animated)
}
}
public var cameraItem: UITabBarItem?
public func setControllers(_ controllers: [ViewController], selectedIndex: Int?) {
var updatedSelectedIndex: Int? = selectedIndex
if updatedSelectedIndex == nil, let selectedIndex = self._selectedIndex, selectedIndex < self.controllers.count {
@ -427,8 +447,8 @@ open class TabBarControllerImpl: ViewController, TabBarController {
self.controllers = controllers
var tabBarItems = self.controllers.map({ TabBarNodeItem(item: $0.tabBarItem, contextActionType: $0.tabBarItemContextActionType) })
if tabBarItems.count == 4, let cameraItem = self.cameraItem {
tabBarItems.insert(TabBarNodeItem(item: cameraItem, contextActionType: .none), at: 2)
if let (cameraItem, _) = self.cameraItemAndAction {
tabBarItems.insert(TabBarNodeItem(item: cameraItem, contextActionType: .none), at: Int(floor(CGFloat(controllers.count) / 2)))
}
self.tabBarControllerNode.tabBarNode.tabBarItems = tabBarItems

View File

@ -1137,13 +1137,11 @@ func _internal_performPasswordRecovery(network: Network, code: String, updatedPa
}
func _internal_invalidateLoginCodes(network: Network, codes: [String]) -> Signal<Never, NoError> {
return network.request(Api.functions.account.invalidateLoginCodes(codes: codes))
return network.request(Api.functions.account.invalidateSignInCodes(codes: codes))
|> ignoreValues
|> `catch` { _ -> Signal<Never, NoError> in
return .never()
}
|> mapToSignal { _ in
return .complete()
}
}
public enum AccountResetError {

View File

@ -198,6 +198,10 @@ public extension TelegramEngine {
public func requestCancelAccountReset(phoneCodeHash: String, phoneCode: String) -> Signal<Never, CancelAccountResetError> {
return _internal_requestCancelAccountReset(network: self.account.network, phoneCodeHash: phoneCodeHash, phoneCode: phoneCode)
}
public func invalidateLoginCodes(codes: [String]) -> Signal<Never, NoError> {
return _internal_invalidateLoginCodes(network: self.account.network, codes: codes)
}
}
}

View File

@ -16,61 +16,54 @@ vertex RasterizerData cameraBlobVertex
uint vertexID [[ vertex_id ]]
) {
RasterizerData out;
out.position = vector_float4(vertexArray[vertexID].position[0], vertexArray[vertexID].position[1], 0.0, 1.0);
return out;
}
#define BINDING_DIST .15
#define AA_RADIUS 2.
#define BindingDistance 0.25
#define AARadius 2.0
float smin(float a, float b, float k) {
float h = clamp(0.5 + 0.5 * (a - b) / k, 0.0, 1.0);
return mix(a, b, h) - k * h * (1.0 - h);
}
float sdist_disk(float2 uv, float2 position, float radius) {
return length(uv - position) - radius;
}
float sdist_rect(float2 uv, float2 position, float size, float radius){
float sdfRoundedRectangle(float2 uv, float2 position, float size, float radius) {
float2 q = abs(uv - position) - size + radius;
return length(max(q, 0.0)) + min(max(q.x, q.y), 0.0) - radius;
}
float map(float2 uv, float2 diskPos, float2 rectPos) {
float disk = sdist_disk(uv, diskPos, 0.2);
float rect = sdist_rect(uv, rectPos, 0.15, 0.15);
float metaballs = 1.0;
metaballs = smin(metaballs, disk, BINDING_DIST);
metaballs = smin(metaballs, rect, BINDING_DIST);
return metaballs;
float sdfCircle(float2 uv, float2 position, float radius) {
return length(uv - position) - radius;
}
float mod(float x, float y) {
return x - y * floor(x / y);
float map(float2 uv, float4 primaryParameters, float2 secondaryParameters) {
float primary = sdfRoundedRectangle(uv, float2(primaryParameters.y, 0.0), primaryParameters.x, primaryParameters.w);
float secondary = sdfCircle(uv, float2(secondaryParameters.y, 0.0), secondaryParameters.x);
float metaballs = 1.0;
metaballs = smin(metaballs, primary, BindingDistance);
metaballs = smin(metaballs, secondary, BindingDistance);
return metaballs;
}
fragment half4 cameraBlobFragment(RasterizerData in[[stage_in]],
constant uint2 &resolution[[buffer(0)]],
constant float &time[[buffer(1)]])
constant float4 &primaryParameters[[buffer(1)]],
constant float2 &secondaryParameters[[buffer(2)]])
{
float finalTime = mod(time * 1.5, 3.0);
float2 R = float2(resolution.x, resolution.y);
float2 uv = (2.0 * in.position.xy - R.xy) / R.y;
float t = AA_RADIUS / resolution.y;
float t = AARadius / resolution.y;
float2 diskPos = float2(0.1, 0.4);
float2 rectPos = float2(0.2 - 0.3 * finalTime, 0.4);
float cAlpha = 0.0;
if (finalTime > 1.5) {
cAlpha = min(1.0, (finalTime - 1.5) * 1.75);
float cAlpha = 1.0 - primaryParameters.z;
float bound = primaryParameters.x + 0.05;
if (abs(uv.x) > bound) {
cAlpha = mix(0.0, 1.0, min(1.0, (abs(uv.x) - bound) * 2.4));
}
float c = smoothstep(t, -t, map(uv, primaryParameters, secondaryParameters));
float c = smoothstep(t, -t, map(uv, diskPos, rectPos));
return half4(c, cAlpha * c, cAlpha * c, c);
return half4(c, max(cAlpha, 0.231), max(cAlpha, 0.188), c);
}

View File

@ -69,19 +69,22 @@ private final class CameraScreenComponent: CombinedComponent {
let present: (ViewController) -> Void
let push: (ViewController) -> Void
let completion: (CameraScreen.Result) -> Void
let shootAction: ActionSlot<Void>
init(
context: AccountContext,
camera: Camera,
present: @escaping (ViewController) -> Void,
push: @escaping (ViewController) -> Void,
completion: @escaping (CameraScreen.Result) -> Void
completion: @escaping (CameraScreen.Result) -> Void,
shootAction: ActionSlot<Void>
) {
self.context = context
self.camera = camera
self.present = present
self.push = push
self.completion = completion
self.shootAction = shootAction
}
static func ==(lhs: CameraScreenComponent, rhs: CameraScreenComponent) -> Bool {
@ -120,6 +123,7 @@ private final class CameraScreenComponent: CombinedComponent {
fileprivate let camera: Camera
private let present: (ViewController) -> Void
private let completion: (CameraScreen.Result) -> Void
private let shootAction: ActionSlot<Void>
private var cameraStateDisposable: Disposable?
private var resultDisposable = MetaDisposable()
@ -131,11 +135,12 @@ private final class CameraScreenComponent: CombinedComponent {
var cameraState = CameraState(mode: .photo, flashMode: .off, recording: .none, duration: 0.0)
var swipeHint: CaptureControlsComponent.SwipeHint = .none
init(context: AccountContext, camera: Camera, present: @escaping (ViewController) -> Void, completion: @escaping (CameraScreen.Result) -> Void) {
init(context: AccountContext, camera: Camera, present: @escaping (ViewController) -> Void, completion: @escaping (CameraScreen.Result) -> Void, shootAction: ActionSlot<Void>) {
self.context = context
self.camera = camera
self.present = present
self.completion = completion
self.shootAction = shootAction
self.mediaAssetsContext = MediaAssetsContext()
@ -185,11 +190,11 @@ private final class CameraScreenComponent: CombinedComponent {
if let self {
switch value {
case .began:
print("blink")
self.shootAction.invoke(Void())
case let .finished(image):
self.completion(.image(image))
case .failed:
print("failed")
break
}
}
}))
@ -201,7 +206,7 @@ private final class CameraScreenComponent: CombinedComponent {
|> deliverOnMainQueue).start(next: { [weak self] duration in
if let self {
self.cameraState = self.cameraState.updatedDuration(duration)
self.updated(transition: .immediate)
self.updated(transition: .easeInOut(duration: 0.1))
}
}))
self.updated(transition: .spring(duration: 0.4))
@ -225,7 +230,7 @@ private final class CameraScreenComponent: CombinedComponent {
}
func makeState() -> State {
return State(context: self.context, camera: self.camera, present: self.present, completion: self.completion)
return State(context: self.context, camera: self.camera, present: self.present, completion: self.completion, shootAction: self.shootAction)
}
static var body: Body {
@ -263,7 +268,7 @@ private final class CameraScreenComponent: CombinedComponent {
guard let controller = controller() as? CameraScreen else {
return
}
controller.dismiss(animated: true)
controller.requestDismiss(animated: true)
}
).tagged(cancelButtonTag),
availableSize: CGSize(width: 40.0, height: 40.0),
@ -301,18 +306,25 @@ private final class CameraScreenComponent: CombinedComponent {
)
}
let zoomControl = zoomControl.update(
component: ZoomComponent(
availableValues: state.camera.metrics.zoomLevels,
value: 1.0,
tag: zoomControlTag
),
availableSize: context.availableSize,
transition: context.transition
)
context.add(zoomControl
.position(CGPoint(x: context.availableSize.width / 2.0, y: availableSize.height - zoomControl.size.height / 2.0 - 187.0 - environment.safeInsets.bottom))
)
if case .holding = state.cameraState.recording {
} else {
let _ = zoomControl
// let zoomControl = zoomControl.update(
// component: ZoomComponent(
// availableValues: state.camera.metrics.zoomLevels,
// value: 1.0,
// tag: zoomControlTag
// ),
// availableSize: context.availableSize,
// transition: context.transition
// )
// context.add(zoomControl
// .position(CGPoint(x: context.availableSize.width / 2.0, y: availableSize.height - zoomControl.size.height / 2.0 - 187.0 - environment.safeInsets.bottom))
// .appear(.default(alpha: true))
// .disappear(.default(alpha: true))
// )
}
let shutterState: ShutterButtonState
switch state.cameraState.recording {
@ -373,9 +385,14 @@ private final class CameraScreenComponent: CombinedComponent {
state.camera.togglePosition()
},
galleryTapped: {
var dismissGalleryControllerImpl: (() -> Void)?
let controller = accountContext.sharedContext.makeMediaPickerScreen(context: accountContext, completion: { asset in
dismissGalleryControllerImpl?()
completion(.asset(asset))
})
dismissGalleryControllerImpl = { [weak controller] in
controller?.dismiss(animated: false)
}
push(controller)
},
swipeHintUpdated: { hint in
@ -503,10 +520,12 @@ public class CameraScreen: ViewController {
private let context: AccountContext
private let updateState: ActionSlot<CameraState>
private let backgroundEffectView: UIVisualEffectView
private let backgroundDimView: UIView
fileprivate let componentHost: ComponentView<ViewControllerComponentContainer.Environment>
private let previewContainerView: UIView
fileprivate let previewView: CameraPreviewView
fileprivate let blurView: UIVisualEffectView
fileprivate let previewBlurView: UIVisualEffectView
fileprivate let camera: Camera
private var presentationData: PresentationData
@ -515,21 +534,28 @@ public class CameraScreen: ViewController {
private var changingPositionDisposable: Disposable?
private let shootAction: ActionSlot<Void>
init(controller: CameraScreen) {
self.controller = controller
self.context = controller.context
self.updateState = ActionSlot<CameraState>()
self.shootAction = ActionSlot<Void>()
self.presentationData = self.context.sharedContext.currentPresentationData.with { $0 }
self.backgroundEffectView = UIVisualEffectView(effect: nil)
self.backgroundDimView = UIView()
self.backgroundDimView.backgroundColor = UIColor(rgb: 0x000000)
self.componentHost = ComponentView<ViewControllerComponentContainer.Environment>()
self.previewContainerView = UIView()
self.previewContainerView.clipsToBounds = true
self.previewContainerView.layer.cornerRadius = 12.0
self.blurView = UIVisualEffectView(effect: nil)
self.blurView.isUserInteractionEnabled = false
self.previewBlurView = UIVisualEffectView(effect: nil)
self.previewBlurView.isUserInteractionEnabled = false
if let holder = controller.holder {
self.previewView = holder.previewView
@ -543,11 +569,14 @@ public class CameraScreen: ViewController {
super.init()
self.backgroundColor = .black
self.backgroundColor = .clear
self.view.addSubview(self.backgroundEffectView)
self.view.addSubview(self.backgroundDimView)
self.view.addSubview(self.previewContainerView)
self.previewContainerView.addSubview(self.previewView)
self.previewContainerView.addSubview(self.blurView)
self.previewContainerView.addSubview(self.previewBlurView)
self.changingPositionDisposable = (self.camera.changingPosition
|> deliverOnMainQueue).start(next: { [weak self] value in
@ -555,14 +584,18 @@ public class CameraScreen: ViewController {
UIView.animate(withDuration: 0.5) {
if value {
if #available(iOS 13.0, *) {
self.blurView.effect = UIBlurEffect(style: .systemThinMaterialDark)
self.previewBlurView.effect = UIBlurEffect(style: .systemThinMaterialDark)
}
} else {
self.blurView.effect = nil
self.previewBlurView.effect = nil
}
}
}
})
self.shootAction.connect { [weak self] _ in
self?.previewView.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.15)
}
}
deinit {
@ -577,6 +610,9 @@ public class CameraScreen: ViewController {
let pinchGestureRecognizer = UIPinchGestureRecognizer(target: self, action: #selector(self.handlePinch(_:)))
self.previewView.addGestureRecognizer(pinchGestureRecognizer)
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.handlePan(_:)))
self.previewView.addGestureRecognizer(panGestureRecognizer)
}
@objc private func handlePinch(_ gestureRecognizer: UIPinchGestureRecognizer) {
@ -590,30 +626,68 @@ public class CameraScreen: ViewController {
break
}
}
private var panStartLocation: CGPoint?
@objc private func handlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
let location = gestureRecognizer.location(in: self.view)
switch gestureRecognizer.state {
case .began:
self.panStartLocation = location
case .changed:
guard let _ = self.panStartLocation else {
return
}
// let translation = location.y - panStartLocation.y
default:
break
}
}
func animateIn() {
guard let layout = self.validLayout else {
return
}
// if let view = self.componentHost.findTaggedView(tag: topGradientTag) {
// view.layer.animateAlpha(from: 0.0, to: 1.0, duration: 0.3)
// }
self.backgroundDimView.alpha = 0.0
UIView.animate(withDuration: 0.4, animations: {
self.backgroundEffectView.effect = UIBlurEffect(style: .dark)
self.backgroundDimView.alpha = 1.0
}, completion: { _ in
self.backgroundEffectView.isHidden = true
})
self.camera.focus(at: CGPoint(x: 0.5, y: 0.5))
self.camera.startCapture()
self.layer.animatePosition(from: CGPoint(x: 0.0, y: layout.size.height), to: .zero, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
self.previewContainerView.layer.animatePosition(from: CGPoint(x: 0.0, y: layout.size.height / 2.0 - layout.intrinsicInsets.bottom - 22.0), to: .zero, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
self.componentHost.view?.layer.animatePosition(from: CGPoint(x: 0.0, y: layout.size.height / 2.0 - layout.intrinsicInsets.bottom - 22.0), to: .zero, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, additive: true)
self.previewContainerView.layer.animateScale(from: 0.2, to: 1.0, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring)
self.componentHost.view?.layer.animateScale(from: 0.2, to: 1.0, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring)
}
func animateOut(completion: @escaping () -> Void) {
// if let (layout, orientation) = self.validLayout {
// self.containerLayoutUpdated(layout: layout, orientation: orientation, animateOut: true, transition: .easeInOut(duration: 0.2))
// }
//
// if let view = self.componentHost.findTaggedView(tag: topGradientTag) {
// view.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.3, removeOnCompletion: false)
// }
guard let layout = self.validLayout else {
return
}
self.backgroundEffectView.isHidden = false
//self.layer.animatePosition(from: CGPoint(x: 0.0, y: self.frame.height), to: .zero, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring)
UIView.animate(withDuration: 0.25, animations: {
self.backgroundEffectView.effect = nil
self.backgroundDimView.alpha = 0.0
})
self.previewContainerView.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: layout.size.height / 2.0 - layout.intrinsicInsets.bottom - 8.0), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true)
self.componentHost.view?.layer.animatePosition(from: .zero, to: CGPoint(x: 0.0, y: layout.size.height / 2.0 - layout.intrinsicInsets.bottom - 8.0), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false, additive: true, completion: { _ in
completion()
})
self.previewContainerView.layer.animateScale(from: 1.0, to: 0.01, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false)
self.previewContainerView.layer.animateBounds(from: self.previewContainerView.bounds, to: CGRect(origin: .zero, size: CGSize(width: self.previewContainerView.bounds.width, height: self.previewContainerView.bounds.width)), duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false)
let transition = ContainedViewLayoutTransition.animated(duration: 0.4, curve: .spring)
transition.updateCornerRadius(layer: self.previewContainerView.layer, cornerRadius: self.previewContainerView.bounds.width / 2.0)
self.componentHost.view?.layer.animateScale(from: 1.0, to: 0.2, duration: 0.4, timingFunction: kCAMediaTimingFunctionSpring, removeOnCompletion: false)
self.componentHost.view?.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.15, removeOnCompletion: false)
self.previewContainerView.layer.animateAlpha(from: 1.0, to: 0.0, duration: 0.35, removeOnCompletion: false)
}
override func hitTest(_ point: CGPoint, with event: UIEvent?) -> UIView? {
@ -674,7 +748,8 @@ public class CameraScreen: ViewController {
push: { [weak self] c in
self?.controller?.push(c)
},
completion: controller.completion
completion: controller.completion,
shootAction: self.shootAction
)
),
environment: {
@ -685,7 +760,7 @@ public class CameraScreen: ViewController {
)
if let componentView = self.componentHost.view {
if componentView.superview == nil {
self.view.insertSubview(componentView, at: 1)
self.view.insertSubview(componentView, at: 3)
componentView.clipsToBounds = true
}
@ -697,11 +772,14 @@ public class CameraScreen: ViewController {
}
}
transition.setFrame(view: self.backgroundDimView, frame: CGRect(origin: .zero, size: layout.size))
transition.setFrame(view: self.backgroundEffectView, frame: CGRect(origin: .zero, size: layout.size))
let previewSize = CGSize(width: layout.size.width, height: floorToScreenPixels(layout.size.width * 1.77778))
let previewFrame = CGRect(origin: CGPoint(x: 0.0, y: 60.0), size: previewSize)
transition.setFrame(view: self.previewContainerView, frame: previewFrame)
transition.setFrame(view: self.previewView, frame: CGRect(origin: .zero, size: previewFrame.size))
transition.setFrame(view: self.blurView, frame: CGRect(origin: .zero, size: previewFrame.size))
transition.setFrame(view: self.previewBlurView, frame: CGRect(origin: .zero, size: previewFrame.size))
}
}
@ -738,6 +816,22 @@ public class CameraScreen: ViewController {
super.displayNodeDidLoad()
}
private var isDismissed = false
fileprivate func requestDismiss(animated: Bool) {
guard !self.isDismissed else {
return
}
self.statusBar.statusBarStyle = .Ignore
self.isDismissed = true
if animated {
self.node.animateOut(completion: {
self.dismiss(animated: false)
})
} else {
self.dismiss(animated: false)
}
}
override public func containerLayoutUpdated(_ layout: ContainerViewLayout, transition: ContainedViewLayoutTransition) {
super.containerLayoutUpdated(layout, transition: transition)

View File

@ -27,20 +27,29 @@ private extension SimpleShapeLayer {
private final class ShutterButtonContentComponent: Component {
let shutterState: ShutterButtonState
let blobState: ShutterBlobView.BlobState
let highlightedAction: ActionSlot<Bool>
let updateOffset: ActionSlot<(CGFloat, Transition)>
init(
shutterState: ShutterButtonState,
highlightedAction: ActionSlot<Bool>
blobState: ShutterBlobView.BlobState,
highlightedAction: ActionSlot<Bool>,
updateOffset: ActionSlot<(CGFloat, Transition)>
) {
self.shutterState = shutterState
self.blobState = blobState
self.highlightedAction = highlightedAction
self.updateOffset = updateOffset
}
static func ==(lhs: ShutterButtonContentComponent, rhs: ShutterButtonContentComponent) -> Bool {
if lhs.shutterState != rhs.shutterState {
return false
}
if lhs.blobState != rhs.blobState {
return false
}
return true
}
@ -48,17 +57,22 @@ private final class ShutterButtonContentComponent: Component {
private var component: ShutterButtonContentComponent?
private let ringLayer = SimpleShapeLayer()
private let innerLayer = SimpleLayer()
var blobView: ShutterBlobView!
//private let innerLayer = SimpleLayer()
private let progressLayer = SimpleShapeLayer()
init() {
super.init(frame: CGRect())
self.blobView = ShutterBlobView(test: false)
self.layer.allowsGroupOpacity = true
self.progressLayer.strokeEnd = 0.0
self.layer.addSublayer(self.ringLayer)
self.layer.addSublayer(self.innerLayer)
self.layer.addSublayer(self.progressLayer)
self.addSubview(self.blobView)
}
required init?(coder aDecoder: NSCoder) {
@ -68,7 +82,7 @@ private final class ShutterButtonContentComponent: Component {
func updateIsHighlighted(_ isHighlighted: Bool) {
let scale: CGFloat = isHighlighted ? 0.8 : 1.0
let transition = Transition(animation: .curve(duration: 0.3, curve: .easeInOut))
transition.setTransform(layer: self.innerLayer, transform: CATransform3DMakeScale(scale, scale, 1.0))
transition.setTransform(view: self.blobView, transform: CATransform3DMakeScale(scale, scale, 1.0))
}
func update(component: ShutterButtonContentComponent, availableSize: CGSize, transition: Transition) -> CGSize {
@ -78,6 +92,27 @@ private final class ShutterButtonContentComponent: Component {
self?.updateIsHighlighted(highlighted)
}
component.updateOffset.connect { [weak self] offset, transition in
if let self {
self.blobView.updateSecondaryOffset(offset, transition: transition)
if abs(offset) < 60.0 {
func rubberBandingOffset(offset: CGFloat, bandingStart: CGFloat) -> CGFloat {
let bandedOffset = offset - bandingStart
let range: CGFloat = 60.0
let coefficient: CGFloat = 0.1
return bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range
}
var bandedOffset = rubberBandingOffset(offset: abs(offset), bandingStart: 0.0)
if offset < 0.0 {
bandedOffset *= -1.0
}
self.blobView.updatePrimaryOffset(bandedOffset, transition: transition)
} else {
self.blobView.updatePrimaryOffset(0.0, transition: .spring(duration: 0.15))
}
}
}
let innerColor: UIColor
let innerSize: CGSize
let innerCornerRadius: CGFloat
@ -124,10 +159,13 @@ private final class ShutterButtonContentComponent: Component {
self.ringLayer.bounds = CGRect(origin: .zero, size: maximumShutterSize)
self.ringLayer.position = CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0)
transition.setBackgroundColor(layer: self.innerLayer, color: innerColor)
transition.setCornerRadius(layer: self.innerLayer, cornerRadius: innerCornerRadius)
transition.setPosition(layer: self.innerLayer, position: CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0))
transition.setBounds(layer: self.innerLayer, bounds: CGRect(origin: .zero, size: innerSize))
self.blobView.updateState(component.blobState, transition: transition)
self.blobView.bounds = CGRect(origin: .zero, size: CGSize(width: availableSize.width, height: maximumShutterSize.height))
self.blobView.center = CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0)
let _ = innerColor
let _ = innerSize
let _ = innerCornerRadius
self.progressLayer.bounds = CGRect(origin: .zero, size: maximumShutterSize)
self.progressLayer.position = CGPoint(x: maximumShutterSize.width / 2.0, y: maximumShutterSize.height / 2.0)
@ -309,6 +347,8 @@ final class CaptureControlsComponent: Component {
final class View: UIView, ComponentTaggedView, UIGestureRecognizerDelegate {
private var component: CaptureControlsComponent?
private var state: State?
private var availableSize: CGSize?
private let lockView = ComponentView<Empty>()
private let galleryButtonView = ComponentView<Empty>()
@ -318,6 +358,7 @@ final class CaptureControlsComponent: Component {
private let leftGuide = SimpleLayer()
private let rightGuide = SimpleLayer()
private let shutterUpdateOffset = ActionSlot<(CGFloat, Transition)>()
private let shutterHightlightedAction = ActionSlot<Bool>()
private let flipAnimationAction = ActionSlot<Void>()
@ -353,34 +394,99 @@ final class CaptureControlsComponent: Component {
case .began:
self.component?.shutterPressed()
self.component?.swipeHintUpdated(.zoom)
self.shutterUpdateOffset.invoke((0.0, .immediate))
case .ended, .cancelled:
if location.x < 75.0 {
if location.x < self.frame.width / 2.0 - 60.0 {
self.component?.lockRecording()
var blobOffset: CGFloat = 0.0
if let galleryButton = self.galleryButtonView.view {
blobOffset = galleryButton.center.x - self.frame.width / 2.0
}
self.shutterUpdateOffset.invoke((blobOffset, .spring(duration: 0.5)))
} else {
self.component?.shutterReleased()
self.shutterUpdateOffset.invoke((0.0, .spring(duration: 0.3)))
}
default:
break
}
}
private let hapticFeedback = HapticFeedback()
private var didFlip = false
private var wasBanding: Bool?
private var panBlobState: ShutterBlobView.BlobState?
@objc private func handlePan(_ gestureRecognizer: UIPanGestureRecognizer) {
func rubberBandingOffset(offset: CGFloat, bandingStart: CGFloat) -> CGFloat {
let bandedOffset = offset - bandingStart
let range: CGFloat = 60.0
let coefficient: CGFloat = 0.4
return bandingStart + (1.0 - (1.0 / ((bandedOffset * coefficient / range) + 1.0))) * range
}
let previousPanBlobState = self.panBlobState
let location = gestureRecognizer.location(in: self)
switch gestureRecognizer.state {
case .changed:
if location.x < self.frame.width / 2.0 - 40.0 {
if location.x < 75.0 {
var blobOffset: CGFloat = 0.0
if let galleryButton = self.galleryButtonView.view, let flipButton = self.flipButtonView.view {
blobOffset = max(galleryButton.center.x, min(flipButton.center.x, location.x))
}
blobOffset -= self.frame.width / 2.0
var isBanding = false
if location.x < self.frame.width / 2.0 - 20.0 {
if location.x < self.frame.width / 2.0 - 60.0 {
self.component?.swipeHintUpdated(.releaseLock)
if location.x < 75.0 {
self.panBlobState = .lock
} else {
self.panBlobState = .transientToLock
}
} else {
self.component?.swipeHintUpdated(.lock)
self.panBlobState = .video
blobOffset = rubberBandingOffset(offset: blobOffset, bandingStart: 0.0)
isBanding = true
}
} else if location.x > self.frame.width / 2.0 + 40.0 {
} else if location.x > self.frame.width / 2.0 + 20.0 {
self.component?.swipeHintUpdated(.flip)
if location.x > self.frame.width / 2.0 + 60.0 {
self.panBlobState = .transientToFlip
if !self.didFlip && location.x > self.frame.width - 80.0 {
self.didFlip = true
self.hapticFeedback.impact(.light)
self.flipAnimationAction.invoke(Void())
self.component?.flipTapped()
}
} else {
self.didFlip = false
self.panBlobState = .video
blobOffset = rubberBandingOffset(offset: -blobOffset, bandingStart: 0.0) * -1.0
isBanding = true
}
} else {
blobOffset = rubberBandingOffset(offset: blobOffset, bandingStart: 0.0)
self.component?.swipeHintUpdated(.zoom)
self.panBlobState = .video
isBanding = true
}
var transition: Transition = .immediate
if let wasBanding = self.wasBanding, wasBanding != isBanding {
self.hapticFeedback.impact(.light)
transition = .spring(duration: 0.3)
}
self.wasBanding = isBanding
self.shutterUpdateOffset.invoke((blobOffset, transition))
default:
break
self.panBlobState = nil
self.wasBanding = nil
self.didFlip = false
}
if previousPanBlobState != self.panBlobState, let component = self.component, let state = self.state, let availableSize = self.availableSize {
let _ = self.update(component: component, state: state, availableSize: availableSize, transition: .spring(duration: 0.5))
}
}
@ -401,7 +507,10 @@ final class CaptureControlsComponent: Component {
}
func update(component: CaptureControlsComponent, state: State, availableSize: CGSize, transition: Transition) -> CGSize {
let previousShutterState = self.component?.shutterState ?? .generic
self.component = component
self.state = state
self.availableSize = availableSize
state.lastGalleryAsset = component.lastGalleryAsset
let size = CGSize(width: availableSize.width, height: maximumShutterSize.height)
@ -472,42 +581,6 @@ final class CaptureControlsComponent: Component {
transition.setAlpha(view: lockView, alpha: isHolding ? 1.0 : 0.0)
}
let shutterButtonSize = self.shutterButtonView.update(
transition: transition,
component: AnyComponent(
Button(
content: AnyComponent(
ShutterButtonContentComponent(
shutterState: component.shutterState,
highlightedAction: self.shutterHightlightedAction
)
),
automaticHighlight: false,
action: {
component.shutterTapped()
},
highlightedAction: self.shutterHightlightedAction
).minSize(maximumShutterSize)
),
environment: {},
containerSize: availableSize
)
let shutterButtonFrame = CGRect(origin: CGPoint(x: (availableSize.width - shutterButtonSize.width) / 2.0, y: (size.height - shutterButtonSize.height) / 2.0), size: shutterButtonSize)
if let shutterButtonView = self.shutterButtonView.view {
if shutterButtonView.superview == nil {
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.handlePan(_:)))
panGestureRecognizer.delegate = self
shutterButtonView.addGestureRecognizer(panGestureRecognizer)
let pressGestureRecognizer = UILongPressGestureRecognizer(target: self, action: #selector(self.handlePress(_:)))
pressGestureRecognizer.delegate = self
shutterButtonView.addGestureRecognizer(pressGestureRecognizer)
self.addSubview(shutterButtonView)
}
transition.setFrame(view: shutterButtonView, frame: shutterButtonFrame)
}
let flipAnimationAction = self.flipAnimationAction
let flipButtonSize = self.flipButtonView.update(
transition: .immediate,
@ -533,18 +606,77 @@ final class CaptureControlsComponent: Component {
transition.setFrame(view: flipButtonView, frame: flipButtonFrame)
}
var blobState: ShutterBlobView.BlobState
switch component.shutterState {
case .generic:
blobState = .generic
case .video:
blobState = .video
case .stopRecording:
blobState = .stopVideo
case .holdRecording:
blobState = self.panBlobState ?? .video
}
let shutterButtonSize = self.shutterButtonView.update(
transition: transition,
component: AnyComponent(
Button(
content: AnyComponent(
ShutterButtonContentComponent(
shutterState: component.shutterState,
blobState: blobState,
highlightedAction: self.shutterHightlightedAction,
updateOffset: self.shutterUpdateOffset
)
),
automaticHighlight: false,
action: { [weak self] in
self?.shutterUpdateOffset.invoke((0.0, .immediate))
component.shutterTapped()
},
highlightedAction: self.shutterHightlightedAction
).minSize(maximumShutterSize)
),
environment: {},
containerSize: availableSize
)
let shutterButtonFrame = CGRect(origin: CGPoint(x: (availableSize.width - shutterButtonSize.width) / 2.0, y: (size.height - shutterButtonSize.height) / 2.0), size: shutterButtonSize)
if let shutterButtonView = self.shutterButtonView.view {
if shutterButtonView.superview == nil {
let panGestureRecognizer = UIPanGestureRecognizer(target: self, action: #selector(self.handlePan(_:)))
panGestureRecognizer.delegate = self
shutterButtonView.addGestureRecognizer(panGestureRecognizer)
let pressGestureRecognizer = UILongPressGestureRecognizer(target: self, action: #selector(self.handlePress(_:)))
pressGestureRecognizer.minimumPressDuration = 0.3
pressGestureRecognizer.delegate = self
shutterButtonView.addGestureRecognizer(pressGestureRecognizer)
self.addSubview(shutterButtonView)
}
transition.setFrame(view: shutterButtonView, frame: shutterButtonFrame)
}
let guideSpacing: CGFloat = 9.0
let guideSize = CGSize(width: isHolding ? 60.0 : 0.0, height: 1.0 + UIScreenPixel)
let guideAlpha = isHolding ? 1.0 : 0.0
let guideAlpha: CGFloat = isHolding ? 1.0 : 0.0
let leftGuideFrame = CGRect(origin: CGPoint(x: shutterButtonFrame.minX - guideSpacing - guideSize.width, y: (size.height - guideSize.height) / 2.0), size: guideSize)
transition.setFrame(layer: self.leftGuide, frame: leftGuideFrame)
transition.setAlpha(layer: self.leftGuide, alpha: guideAlpha)
self.leftGuide.cornerRadius = guideSize.height / 2.0
let rightGuideFrame = CGRect(origin: CGPoint(x: shutterButtonFrame.maxX + guideSpacing, y: (size.height - guideSize.height) / 2.0), size: guideSize)
transition.setFrame(layer: self.leftGuide, frame: leftGuideFrame)
transition.setFrame(layer: self.rightGuide, frame: rightGuideFrame)
transition.setAlpha(layer: self.rightGuide, alpha: guideAlpha)
if previousShutterState == .generic || previousShutterState == .video {
self.leftGuide.opacity = Float(guideAlpha)
self.rightGuide.opacity = Float(guideAlpha)
} else {
transition.setAlpha(layer: self.leftGuide, alpha: guideAlpha)
transition.setAlpha(layer: self.rightGuide, alpha: guideAlpha)
}
self.leftGuide.cornerRadius = guideSize.height / 2.0
self.rightGuide.cornerRadius = guideSize.height / 2.0
if let screenTransition = transition.userData(CameraScreenTransition.self) {

View File

@ -1,29 +1,177 @@
import Foundation
import Metal
import MetalKit
import ComponentFlow
import Display
private final class PropertyAnimation<T: Interpolatable> {
let from: T
let to: T
let animation: Transition.Animation
let startTimestamp: Double
private let interpolator: (Interpolatable, Interpolatable, CGFloat) -> Interpolatable
init(fromValue: T, toValue: T, animation: Transition.Animation, startTimestamp: Double) {
self.from = fromValue
self.to = toValue
self.animation = animation
self.startTimestamp = startTimestamp
self.interpolator = T.interpolator()
}
func valueAt(_ t: CGFloat) -> Interpolatable {
if t <= 0.0 {
return self.from
} else if t >= 1.0 {
return self.to
} else {
return self.interpolator(self.from, self.to, t)
}
}
}
private final class AnimatableProperty<T: Interpolatable> {
var presentationValue: T
var value: T
private var animation: PropertyAnimation<T>?
init(value: T) {
self.value = value
self.presentationValue = value
}
func update(value: T, transition: Transition = .immediate) {
if case .none = transition.animation {
if let animation = self.animation, case let .curve(duration, curve) = animation.animation {
self.value = value
let elapsed = duration - (CACurrentMediaTime() - animation.startTimestamp)
if elapsed < 0.1 {
self.presentationValue = value
self.animation = nil
} else {
self.animation = PropertyAnimation(fromValue: self.presentationValue, toValue: value, animation: .curve(duration: elapsed, curve: curve), startTimestamp: CACurrentMediaTime())
}
} else {
self.value = value
self.presentationValue = value
self.animation = nil
}
} else {
self.value = value
self.animation = PropertyAnimation(fromValue: self.presentationValue, toValue: value, animation: transition.animation, startTimestamp: CACurrentMediaTime())
}
}
func tick(timestamp: Double) -> Bool {
guard let animation = self.animation, case let .curve(duration, curve) = animation.animation else {
return false
}
let timeFromStart = timestamp - animation.startTimestamp
var t = max(0.0, timeFromStart / duration)
switch curve {
case .easeInOut:
t = listViewAnimationCurveEaseInOut(t)
case .spring:
t = listViewAnimationCurveSystem(t)
case let .custom(x1, y1, x2, y2):
t = bezierPoint(CGFloat(x1), CGFloat(y1), CGFloat(x2), CGFloat(y2), t)
}
self.presentationValue = animation.valueAt(t) as! T
return timeFromStart <= duration
}
}
final class ShutterBlobView: MTKView, MTKViewDelegate {
public func draw(in view: MTKView) {
enum BlobState {
case generic
case video
case transientToLock
case lock
case transientToFlip
case stopVideo
var primarySize: CGFloat {
switch self {
case .generic, .video, .transientToFlip:
return 0.63
case .transientToLock, .lock, .stopVideo:
return 0.275
}
}
var primaryRedness: CGFloat {
switch self {
case .generic:
return 0.0
default:
return 1.0
}
}
var primaryCornerRadius: CGFloat {
switch self {
case .generic, .video, .transientToFlip:
return 0.63
case .transientToLock, .lock, .stopVideo:
return 0.185
}
}
var secondarySize: CGFloat {
switch self {
case .generic, .video, .transientToFlip, .transientToLock:
return 0.335
case .lock:
return 0.5
case .stopVideo:
return 0.0
}
}
var secondaryRedness: CGFloat {
switch self {
case .generic, .lock, .transientToLock, .transientToFlip:
return 0.0
default:
return 1.0
}
}
}
private let commandQueue: MTLCommandQueue
private let drawPassthroughPipelineState: MTLRenderPipelineState
private var displayLink: CADisplayLink?
private var viewportDimensions = CGSize(width: 1, height: 1)
private var startTimestamp = CACurrentMediaTime()
private var displayLink: SharedDisplayLinkDriver.Link?
private var primarySize = AnimatableProperty<CGFloat>(value: 0.63)
private var primaryOffset = AnimatableProperty<CGFloat>(value: 0.0)
private var primaryRedness = AnimatableProperty<CGFloat>(value: 0.0)
private var primaryCornerRadius = AnimatableProperty<CGFloat>(value: 0.63)
private var secondarySize = AnimatableProperty<CGFloat>(value: 0.34)
private var secondaryOffset = AnimatableProperty<CGFloat>(value: 0.0)
private var secondaryRedness = AnimatableProperty<CGFloat>(value: 0.0)
private(set) var state: BlobState = .generic
public init?(test: Bool) {
let mainBundle = Bundle(for: ShutterBlobView.self)
guard let path = mainBundle.path(forResource: "CameraScreenBundle", ofType: "bundle") else {
return nil
}
guard let bundle = Bundle(path: path) else {
return nil
}
guard let device = MTLCreateSystemDefaultDevice() else {
return nil
}
guard let defaultLibrary = try? device.makeDefaultLibrary(bundle: mainBundle) else {
guard let defaultLibrary = try? device.makeDefaultLibrary(bundle: bundle) else {
return nil
}
@ -62,25 +210,11 @@ final class ShutterBlobView: MTKView, MTKViewDelegate {
self.backgroundColor = .clear
self.framebufferOnly = true
class DisplayLinkProxy: NSObject {
weak var target: ShutterBlobView?
init(target: ShutterBlobView) {
self.target = target
}
@objc func displayLinkEvent() {
self.target?.displayLinkEvent()
}
self.displayLink = SharedDisplayLinkDriver.shared.add { [weak self] in
self?.tick()
}
self.displayLink = CADisplayLink(target: DisplayLinkProxy(target: self), selector: #selector(DisplayLinkProxy.displayLinkEvent))
if #available(iOS 15.0, *) {
let maxFps = Float(UIScreen.main.maximumFramesPerSecond)
self.displayLink?.preferredFrameRateRange = CAFrameRateRange(minimum: 60.0, maximum: maxFps, preferred: maxFps)
}
self.displayLink?.add(to: .main, forMode: .common)
self.displayLink?.isPaused = false
self.displayLink?.isPaused = true
self.isPaused = true
}
@ -96,8 +230,63 @@ final class ShutterBlobView: MTKView, MTKViewDelegate {
deinit {
self.displayLink?.invalidate()
}
func updateState(_ state: BlobState, transition: Transition = .immediate) {
guard self.state != state else {
return
}
self.state = state
@objc private func displayLinkEvent() {
self.primarySize.update(value: state.primarySize, transition: transition)
self.primaryRedness.update(value: state.primaryRedness, transition: transition)
self.primaryCornerRadius.update(value: state.primaryCornerRadius, transition: transition)
self.secondarySize.update(value: state.secondarySize, transition: transition)
self.secondaryRedness.update(value: state.secondaryRedness, transition: transition)
self.tick()
}
func updatePrimaryOffset(_ offset: CGFloat, transition: Transition = .immediate) {
guard self.frame.height > 0.0 else {
return
}
let mappedOffset = offset / self.frame.height * 2.0
self.primaryOffset.update(value: mappedOffset, transition: transition)
self.tick()
}
func updateSecondaryOffset(_ offset: CGFloat, transition: Transition = .immediate) {
guard self.frame.height > 0.0 else {
return
}
let mappedOffset = offset / self.frame.height * 2.0
self.secondaryOffset.update(value: mappedOffset, transition: transition)
self.tick()
}
private func updateAnimations() {
let properties = [
self.primarySize,
self.primaryOffset,
self.primaryRedness,
self.primaryCornerRadius,
self.secondarySize,
self.secondaryOffset,
self.secondaryRedness
]
let timestamp = CACurrentMediaTime()
var hasAnimations = false
for property in properties {
if property.tick(timestamp: timestamp) {
hasAnimations = true
}
}
self.displayLink?.isPaused = !hasAnimations
}
private func tick() {
self.updateAnimations()
self.draw()
}
@ -138,9 +327,21 @@ final class ShutterBlobView: MTKView, MTKViewDelegate {
var resolution = simd_uint2(UInt32(viewportDimensions.width), UInt32(viewportDimensions.height))
renderEncoder.setFragmentBytes(&resolution, length: MemoryLayout<simd_uint2>.size * 2, index: 0)
var primaryParameters = simd_float4(
Float(self.primarySize.presentationValue),
Float(self.primaryOffset.presentationValue),
Float(self.primaryRedness.presentationValue),
Float(self.primaryCornerRadius.presentationValue)
)
renderEncoder.setFragmentBytes(&primaryParameters, length: MemoryLayout<simd_float4>.size, index: 1)
var time = Float(CACurrentMediaTime() - self.startTimestamp) * 0.5
renderEncoder.setFragmentBytes(&time, length: 4, index: 1)
var secondaryParameters = simd_float3(
Float(self.secondarySize.presentationValue),
Float(self.secondaryOffset.presentationValue),
Float(self.secondaryRedness.presentationValue)
)
renderEncoder.setFragmentBytes(&secondaryParameters, length: MemoryLayout<simd_float3>.size, index: 2)
renderEncoder.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6, instanceCount: 1)
@ -149,4 +350,14 @@ final class ShutterBlobView: MTKView, MTKViewDelegate {
commandBuffer.present(drawable)
commandBuffer.commit()
}
override func layoutSubviews() {
super.layoutSubviews()
self.tick()
}
func draw(in view: MTKView) {
}
}

View File

@ -185,7 +185,14 @@ public final class TelegramRootController: NavigationController {
accountSettingsController.parentController = self
controllers.append(accountSettingsController)
tabBarController.cameraItem = UITabBarItem(title: "Camera", image: UIImage(bundleImageName: "Chat List/Tabs/IconCamera"), tag: 2)
if self.context.sharedContext.immediateExperimentalUISettings.storiesExperiment {
tabBarController.cameraItemAndAction = (
UITabBarItem(title: "Camera", image: UIImage(bundleImageName: "Chat List/Tabs/IconCamera"), tag: 2),
{ [weak self] in
self?.openStoryCamera()
}
)
}
tabBarController.setControllers(controllers, selectedIndex: restoreSettignsController != nil ? (controllers.count - 1) : (controllers.count - 2))
@ -195,10 +202,6 @@ public final class TelegramRootController: NavigationController {
self.accountSettingsController = accountSettingsController
self.rootTabController = tabBarController
self.pushViewController(tabBarController, animated: false)
tabBarController.middleItemAction = { [weak self] in
self?.openStoryCamera()
}
}
public func updateRootControllers(showCallsTab: Bool) {
@ -253,6 +256,7 @@ public final class TelegramRootController: NavigationController {
controller.view.endEditing(true)
var presentImpl: ((ViewController) -> Void)?
var dismissCameraImpl: (() -> Void)?
let cameraController = CameraScreen(context: self.context, mode: .story, completion: { [weak self] result in
if let self {
let item: TGMediaEditableItem & TGMediaSelectableItem
@ -264,8 +268,16 @@ public final class TelegramRootController: NavigationController {
case let .asset(asset):
item = TGMediaAsset(phAsset: asset)
}
legacyFullMediaEditor(context: self.context, item: item, getCaptionPanelView: { return nil }, sendMessagesWithSignals: { _, _, _ in
legacyStoryMediaEditor(context: self.context, item: item, getCaptionPanelView: { return nil }, completion: { result in
dismissCameraImpl?()
switch result {
case let .image(image):
_ = image
case let .video(path):
_ = path
case let .asset(asset):
_ = asset
}
}, present: { c, a in
presentImpl?(c)
})
@ -275,6 +287,9 @@ public final class TelegramRootController: NavigationController {
presentImpl = { [weak cameraController] c in
cameraController?.present(c, in: .window(.root))
}
dismissCameraImpl = { [weak cameraController] in
cameraController?.dismiss(animated: false)
}
}
public func openSettings() {