Swiftgram/submodules/Camera/Sources/CameraOutput.swift
2019-09-25 00:00:49 +03:00

117 lines
4.1 KiB
Swift

import AVFoundation
public struct CameraCode: Equatable {
public enum CodeType {
case qr
}
public let type: CodeType
public let message: String
public let corners: [CGPoint]
public init(type: CameraCode.CodeType, message: String, corners: [CGPoint]) {
self.type = type
self.message = message
self.corners = corners
}
public var boundingBox: CGRect {
let x = self.corners.map { $0.x }
let y = self.corners.map { $0.y }
if let minX = x.min(), let minY = y.min(), let maxX = x.max(), let maxY = y.max() {
return CGRect(x: minX, y: minY, width: abs(maxX - minX), height: abs(maxY - minY))
}
return CGRect.null
}
public static func == (lhs: CameraCode, rhs: CameraCode) -> Bool {
if lhs.type != rhs.type {
return false
}
if lhs.message != rhs.message {
return false
}
if lhs.corners != rhs.corners {
return false
}
return true
}
}
final class CameraOutput: NSObject {
//private let photoOutput = CameraPhotoOutput()
private let videoOutput = AVCaptureVideoDataOutput()
private let audioOutput = AVCaptureAudioDataOutput()
private let metadataOutput = AVCaptureMetadataOutput()
private let queue = DispatchQueue(label: "")
private let metadataQueue = DispatchQueue(label: "")
var processSampleBuffer: ((CMSampleBuffer, AVCaptureConnection) -> Void)?
var processCodes: (([CameraCode]) -> Void)?
override init() {
super.init()
self.videoOutput.alwaysDiscardsLateVideoFrames = true;
self.videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] as [String : Any]
}
deinit {
self.videoOutput.setSampleBufferDelegate(nil, queue: nil)
self.audioOutput.setSampleBufferDelegate(nil, queue: nil)
}
func configure(for session: AVCaptureSession) {
if session.canAddOutput(self.videoOutput) {
session.addOutput(self.videoOutput)
self.videoOutput.setSampleBufferDelegate(self, queue: self.queue)
}
if session.canAddOutput(self.audioOutput) {
session.addOutput(self.audioOutput)
self.audioOutput.setSampleBufferDelegate(self, queue: self.queue)
}
if session.canAddOutput(self.metadataOutput) {
session.addOutput(self.metadataOutput)
self.metadataOutput.setMetadataObjectsDelegate(self, queue: self.metadataQueue)
if self.metadataOutput.availableMetadataObjectTypes.contains(.qr) {
self.metadataOutput.metadataObjectTypes = [.qr]
}
}
}
func invalidate(for session: AVCaptureSession) {
for output in session.outputs {
session.removeOutput(output)
}
}
}
extension CameraOutput: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard CMSampleBufferDataIsReady(sampleBuffer) else {
return
}
self.processSampleBuffer?(sampleBuffer, connection)
}
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
}
}
extension CameraOutput: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
let codes: [CameraCode] = metadataObjects.filter { $0.type == .qr }.compactMap { object in
if let object = object as? AVMetadataMachineReadableCodeObject, let stringValue = object.stringValue, !stringValue.isEmpty {
return CameraCode(type: .qr, message: stringValue, corners: object.corners)
} else {
return nil
}
}
self.processCodes?(codes)
}
}