|
| 1 | +import AVFoundation |
| 2 | +import Combine |
| 3 | +import CoreImage |
| 4 | +import Shared |
| 5 | +import UIKit |
| 6 | + |
| 7 | +// MARK: - Camera Motion Detector |
| 8 | + |
| 9 | +/// Detects motion using the device camera for wake-on-motion functionality |
| 10 | +@MainActor |
| 11 | +public final class CameraMotionDetector: NSObject, ObservableObject { |
| 12 | + // MARK: - Singleton |
| 13 | + |
| 14 | + public static let shared = CameraMotionDetector() |
| 15 | + |
| 16 | + // MARK: - Published State |
| 17 | + |
| 18 | + /// Whether motion detection is currently active |
| 19 | + @Published public private(set) var isActive: Bool = false |
| 20 | + |
| 21 | + /// Whether motion was detected recently |
| 22 | + @Published public private(set) var motionDetected: Bool = false |
| 23 | + |
| 24 | + /// Current motion level (0.0 - 1.0) |
| 25 | + @Published public private(set) var motionLevel: Float = 0 |
| 26 | + |
| 27 | + /// Camera authorization status |
| 28 | + @Published public private(set) var authorizationStatus: AVAuthorizationStatus = .notDetermined |
| 29 | + |
| 30 | + /// Error message if detection failed |
| 31 | + @Published public private(set) var errorMessage: String? |
| 32 | + |
| 33 | + // MARK: - Callbacks |
| 34 | + |
| 35 | + /// Called when motion is detected |
| 36 | + public var onMotionDetected: (() -> Void)? |
| 37 | + |
| 38 | + /// Called when motion level changes (for debugging/visualization) |
| 39 | + public var onMotionLevelChanged: ((Float) -> Void)? |
| 40 | + |
| 41 | + // MARK: - Private |
| 42 | + |
| 43 | + private var settings: KioskSettings { KioskModeManager.shared.settings } |
| 44 | + private var captureSession: AVCaptureSession? |
| 45 | + private var videoOutput: AVCaptureVideoDataOutput? |
| 46 | + private let processingQueue = DispatchQueue(label: "com.haframe.motion", qos: .userInitiated) |
| 47 | + |
| 48 | + private var previousFrame: CIImage? |
| 49 | + private var motionThreshold: Float = 0.02 // Adjustable based on sensitivity |
| 50 | + private var cooldownTimer: Timer? |
| 51 | + private var isInCooldown: Bool = false |
| 52 | + |
| 53 | + // MARK: - Initialization |
| 54 | + |
| 55 | + private override init() { |
| 56 | + super.init() |
| 57 | + checkAuthorizationStatus() |
| 58 | + } |
| 59 | + |
| 60 | + deinit { |
| 61 | + captureSession?.stopRunning() |
| 62 | + captureSession = nil |
| 63 | + cooldownTimer?.invalidate() |
| 64 | + cooldownTimer = nil |
| 65 | + } |
| 66 | + |
| 67 | + // MARK: - Public Methods |
| 68 | + |
| 69 | + /// Start motion detection |
| 70 | + public func start() { |
| 71 | + guard !isActive else { return } |
| 72 | + |
| 73 | + // Re-check authorization status before starting |
| 74 | + checkAuthorizationStatus() |
| 75 | + |
| 76 | + guard authorizationStatus == .authorized else { |
| 77 | + Current.Log.warning("Camera not authorized for motion detection (status: \(authorizationStatus.rawValue))") |
| 78 | + return |
| 79 | + } |
| 80 | + |
| 81 | + Current.Log.info("Starting camera motion detection") |
| 82 | + |
| 83 | + updateSensitivity() |
| 84 | + setupCaptureSession() |
| 85 | + |
| 86 | + processingQueue.async { [weak self] in |
| 87 | + self?.captureSession?.startRunning() |
| 88 | + DispatchQueue.main.async { |
| 89 | + self?.isActive = true |
| 90 | + self?.errorMessage = nil |
| 91 | + } |
| 92 | + } |
| 93 | + } |
| 94 | + |
| 95 | + /// Stop motion detection |
| 96 | + public func stop() { |
| 97 | + guard isActive else { return } |
| 98 | + |
| 99 | + Current.Log.info("Stopping camera motion detection") |
| 100 | + |
| 101 | + processingQueue.async { [weak self] in |
| 102 | + self?.captureSession?.stopRunning() |
| 103 | + DispatchQueue.main.async { |
| 104 | + self?.isActive = false |
| 105 | + self?.motionDetected = false |
| 106 | + self?.motionLevel = 0 |
| 107 | + self?.previousFrame = nil |
| 108 | + } |
| 109 | + } |
| 110 | + |
| 111 | + cooldownTimer?.invalidate() |
| 112 | + cooldownTimer = nil |
| 113 | + } |
| 114 | + |
| 115 | + /// Request camera authorization |
| 116 | + public func requestAuthorization() async -> Bool { |
| 117 | + let status = await AVCaptureDevice.requestAccess(for: .video) |
| 118 | + authorizationStatus = AVCaptureDevice.authorizationStatus(for: .video) |
| 119 | + return status |
| 120 | + } |
| 121 | + |
| 122 | + /// Update sensitivity from settings |
| 123 | + public func updateSensitivity() { |
| 124 | + switch settings.cameraMotionSensitivity { |
| 125 | + case .low: |
| 126 | + motionThreshold = 0.05 |
| 127 | + case .medium: |
| 128 | + motionThreshold = 0.02 |
| 129 | + case .high: |
| 130 | + motionThreshold = 0.008 |
| 131 | + } |
| 132 | + |
| 133 | + Current.Log.info("Motion sensitivity set to \(settings.cameraMotionSensitivity.rawValue), threshold: \(motionThreshold)") |
| 134 | + } |
| 135 | + |
| 136 | + // MARK: - Private Methods |
| 137 | + |
| 138 | + private func checkAuthorizationStatus() { |
| 139 | + authorizationStatus = AVCaptureDevice.authorizationStatus(for: .video) |
| 140 | + } |
| 141 | + |
| 142 | + private func setupCaptureSession() { |
| 143 | + let session = AVCaptureSession() |
| 144 | + session.sessionPreset = .low // Use low resolution for efficiency |
| 145 | + |
| 146 | + // Get front camera (facing user for wall-mounted display) |
| 147 | + guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) else { |
| 148 | + errorMessage = "Front camera not available" |
| 149 | + Current.Log.error("Front camera not available for motion detection") |
| 150 | + return |
| 151 | + } |
| 152 | + |
| 153 | + do { |
| 154 | + let input = try AVCaptureDeviceInput(device: camera) |
| 155 | + if session.canAddInput(input) { |
| 156 | + session.addInput(input) |
| 157 | + } |
| 158 | + |
| 159 | + // Configure low frame rate to save power |
| 160 | + try camera.lockForConfiguration() |
| 161 | + camera.activeVideoMinFrameDuration = CMTime(value: 1, timescale: 5) // 5 fps |
| 162 | + camera.activeVideoMaxFrameDuration = CMTime(value: 1, timescale: 5) |
| 163 | + camera.unlockForConfiguration() |
| 164 | + } catch { |
| 165 | + errorMessage = "Failed to configure camera: \(error.localizedDescription)" |
| 166 | + Current.Log.error("Camera configuration error: \(error)") |
| 167 | + return |
| 168 | + } |
| 169 | + |
| 170 | + // Setup video output |
| 171 | + let output = AVCaptureVideoDataOutput() |
| 172 | + output.videoSettings = [ |
| 173 | + kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA, |
| 174 | + ] |
| 175 | + output.alwaysDiscardsLateVideoFrames = true |
| 176 | + output.setSampleBufferDelegate(self, queue: processingQueue) |
| 177 | + |
| 178 | + if session.canAddOutput(output) { |
| 179 | + session.addOutput(output) |
| 180 | + } |
| 181 | + |
| 182 | + captureSession = session |
| 183 | + videoOutput = output |
| 184 | + } |
| 185 | + |
| 186 | + private func processFrame(_ pixelBuffer: CVPixelBuffer) { |
| 187 | + let ciImage = CIImage(cvPixelBuffer: pixelBuffer) |
| 188 | + |
| 189 | + guard let previous = previousFrame else { |
| 190 | + previousFrame = ciImage |
| 191 | + return |
| 192 | + } |
| 193 | + |
| 194 | + // Calculate difference between frames |
| 195 | + let difference = calculateDifference(current: ciImage, previous: previous) |
| 196 | + |
| 197 | + DispatchQueue.main.async { [weak self] in |
| 198 | + guard let self else { return } |
| 199 | + |
| 200 | + self.motionLevel = difference |
| 201 | + self.onMotionLevelChanged?(difference) |
| 202 | + |
| 203 | + if difference > self.motionThreshold && !self.isInCooldown { |
| 204 | + self.handleMotionDetected() |
| 205 | + } |
| 206 | + } |
| 207 | + |
| 208 | + previousFrame = ciImage |
| 209 | + } |
| 210 | + |
| 211 | + private func calculateDifference(current: CIImage, previous: CIImage) -> Float { |
| 212 | + // Create difference image |
| 213 | + let differenceFilter = CIFilter(name: "CIDifferenceBlendMode") |
| 214 | + differenceFilter?.setValue(current, forKey: kCIInputImageKey) |
| 215 | + differenceFilter?.setValue(previous, forKey: kCIInputBackgroundImageKey) |
| 216 | + |
| 217 | + guard let differenceImage = differenceFilter?.outputImage else { return 0 } |
| 218 | + |
| 219 | + // Calculate average luminance of difference |
| 220 | + let extentVector = CIVector( |
| 221 | + x: differenceImage.extent.origin.x, |
| 222 | + y: differenceImage.extent.origin.y, |
| 223 | + z: differenceImage.extent.size.width, |
| 224 | + w: differenceImage.extent.size.height |
| 225 | + ) |
| 226 | + |
| 227 | + let averageFilter = CIFilter(name: "CIAreaAverage") |
| 228 | + averageFilter?.setValue(differenceImage, forKey: kCIInputImageKey) |
| 229 | + averageFilter?.setValue(extentVector, forKey: kCIInputExtentKey) |
| 230 | + |
| 231 | + guard let outputImage = averageFilter?.outputImage else { return 0 } |
| 232 | + |
| 233 | + // Get average color |
| 234 | + var bitmap = [UInt8](repeating: 0, count: 4) |
| 235 | + let context = CIContext(options: [.workingColorSpace: kCFNull as Any]) |
| 236 | + context.render( |
| 237 | + outputImage, |
| 238 | + toBitmap: &bitmap, |
| 239 | + rowBytes: 4, |
| 240 | + bounds: CGRect(x: 0, y: 0, width: 1, height: 1), |
| 241 | + format: .RGBA8, |
| 242 | + colorSpace: nil |
| 243 | + ) |
| 244 | + |
| 245 | + // Calculate luminance from RGB |
| 246 | + let r = Float(bitmap[0]) / 255.0 |
| 247 | + let g = Float(bitmap[1]) / 255.0 |
| 248 | + let b = Float(bitmap[2]) / 255.0 |
| 249 | + |
| 250 | + return (r + g + b) / 3.0 |
| 251 | + } |
| 252 | + |
| 253 | + private func handleMotionDetected() { |
| 254 | + motionDetected = true |
| 255 | + isInCooldown = true |
| 256 | + |
| 257 | + Current.Log.info("Motion detected (level: \(motionLevel))") |
| 258 | + onMotionDetected?() |
| 259 | + |
| 260 | + // Start cooldown to prevent rapid re-triggering |
| 261 | + cooldownTimer?.invalidate() |
| 262 | + cooldownTimer = Timer.scheduledTimer(withTimeInterval: 2.0, repeats: false) { [weak self] _ in |
| 263 | + DispatchQueue.main.async { |
| 264 | + self?.isInCooldown = false |
| 265 | + self?.motionDetected = false |
| 266 | + } |
| 267 | + } |
| 268 | + } |
| 269 | +} |
| 270 | + |
| 271 | +// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate |
| 272 | + |
| 273 | +extension CameraMotionDetector: AVCaptureVideoDataOutputSampleBufferDelegate { |
| 274 | + nonisolated public func captureOutput( |
| 275 | + _ output: AVCaptureOutput, |
| 276 | + didOutput sampleBuffer: CMSampleBuffer, |
| 277 | + from connection: AVCaptureConnection |
| 278 | + ) { |
| 279 | + guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } |
| 280 | + |
| 281 | + Task { @MainActor in |
| 282 | + processFrame(pixelBuffer) |
| 283 | + } |
| 284 | + } |
| 285 | +} |
0 commit comments