Camera Manager
import AVFoundation
import CoreImage
import UIKit
class CameraManager: NSObject {
private let captureSession = AVCaptureSession()
private var deviceInput: AVCaptureDeviceInput?
private var videoOutput: AVCaptureVideoDataOutput?
private let systemPreferredCamera = AVCaptureDevice.default(for: .video)
private let ciContext = CIContext()
private var sessionQueue = DispatchQueue(label: "video.preview.session")
private var addToPreviewStream: ((CGImage) -> Void)?
lazy var previewStream: AsyncStream<CGImage> = {
AsyncStream(bufferingPolicy: .bufferingNewest(1)) { continuation in
addToPreviewStream = { cgImage in
continuation.yield(cgImage)
}
}
}()
private let classificationQueue = DispatchQueue(
label: "classification.queue",
qos: .userInitiated,
attributes: .concurrent
)
private var addToClassificationStream: ((CVPixelBuffer) -> Void)?
private var frameCounter = 0
lazy var classificationStream: AsyncStream<CVPixelBuffer> = {
AsyncStream(bufferingPolicy: .bufferingNewest(1)) { continuation in
addToClassificationStream = { pixelBuffer in
continuation.yield(pixelBuffer)
}
}
}()
private var isAuthorized: Bool {
get async {
let status = AVCaptureDevice.authorizationStatus(for: .video)
var isAuthorized = status == .authorized
if status == .notDetermined {
isAuthorized = await AVCaptureDevice.requestAccess(for: .video)
}
return isAuthorized
}
}
override init() {
super.init()
Task {
await configureSession()
await startSession()
}
}
private func configureSession() async {
guard await isAuthorized,
let systemPreferredCamera,
let deviceInput = try? AVCaptureDeviceInput(device: systemPreferredCamera)
else { return }
captureSession.beginConfiguration()
if captureSession.canSetSessionPreset(.hd1920x1080) {
captureSession.sessionPreset = .hd1920x1080
}
defer {
self.captureSession.commitConfiguration()
}
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: sessionQueue)
guard captureSession.canAddInput(deviceInput) else {
return
}
guard captureSession.canAddOutput(videoOutput) else {
return
}
captureSession.addInput(deviceInput)
captureSession.addOutput(videoOutput)
//For a vertical orientation of the camera stream
if let connection = videoOutput.connection(with: .video) {
connection.videoRotationAngle = 0
}
}
private func startSession() async {
guard await isAuthorized else { return }
captureSession.startRunning()
}
}
extension CameraManager: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection) {
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer),
let cgImage = ciContext.createCGImage(
CIImage(cvPixelBuffer: pixelBuffer),
from: CGRect(x: 0, y: 0,
width: CVPixelBufferGetWidth(pixelBuffer),
height: CVPixelBufferGetHeight(pixelBuffer)))
else { return }
addToPreviewStream?(cgImage)
frameCounter += 1
if frameCounter % 1 == 0 { // Process every frame
addToClassificationStream?(pixelBuffer)
}
}
}
Topic:
Media Technologies
SubTopic:
Photos & Camera
Tags: