@camstack/addon-vision 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/dist/addons/animal-classifier/index.d.mts +25 -0
  2. package/dist/addons/animal-classifier/index.d.ts +25 -0
  3. package/dist/addons/animal-classifier/index.js +652 -0
  4. package/dist/addons/animal-classifier/index.js.map +1 -0
  5. package/dist/addons/animal-classifier/index.mjs +10 -0
  6. package/dist/addons/animal-classifier/index.mjs.map +1 -0
  7. package/dist/addons/audio-classification/index.d.mts +31 -0
  8. package/dist/addons/audio-classification/index.d.ts +31 -0
  9. package/dist/addons/audio-classification/index.js +572 -0
  10. package/dist/addons/audio-classification/index.js.map +1 -0
  11. package/dist/addons/audio-classification/index.mjs +8 -0
  12. package/dist/addons/audio-classification/index.mjs.map +1 -0
  13. package/dist/addons/bird-global-classifier/index.d.mts +26 -0
  14. package/dist/addons/bird-global-classifier/index.d.ts +26 -0
  15. package/dist/addons/bird-global-classifier/index.js +658 -0
  16. package/dist/addons/bird-global-classifier/index.js.map +1 -0
  17. package/dist/addons/bird-global-classifier/index.mjs +10 -0
  18. package/dist/addons/bird-global-classifier/index.mjs.map +1 -0
  19. package/dist/addons/bird-nabirds-classifier/index.d.mts +28 -0
  20. package/dist/addons/bird-nabirds-classifier/index.d.ts +28 -0
  21. package/dist/addons/bird-nabirds-classifier/index.js +700 -0
  22. package/dist/addons/bird-nabirds-classifier/index.js.map +1 -0
  23. package/dist/addons/bird-nabirds-classifier/index.mjs +10 -0
  24. package/dist/addons/bird-nabirds-classifier/index.mjs.map +1 -0
  25. package/dist/addons/camera-native-detection/index.d.mts +32 -0
  26. package/dist/addons/camera-native-detection/index.d.ts +32 -0
  27. package/dist/addons/camera-native-detection/index.js +99 -0
  28. package/dist/addons/camera-native-detection/index.js.map +1 -0
  29. package/dist/addons/camera-native-detection/index.mjs +7 -0
  30. package/dist/addons/camera-native-detection/index.mjs.map +1 -0
  31. package/dist/addons/face-detection/index.d.mts +24 -0
  32. package/dist/addons/face-detection/index.d.ts +24 -0
  33. package/dist/addons/face-detection/index.js +720 -0
  34. package/dist/addons/face-detection/index.js.map +1 -0
  35. package/dist/addons/face-detection/index.mjs +10 -0
  36. package/dist/addons/face-detection/index.mjs.map +1 -0
  37. package/dist/addons/face-recognition/index.d.mts +24 -0
  38. package/dist/addons/face-recognition/index.d.ts +24 -0
  39. package/dist/addons/face-recognition/index.js +603 -0
  40. package/dist/addons/face-recognition/index.js.map +1 -0
  41. package/dist/addons/face-recognition/index.mjs +9 -0
  42. package/dist/addons/face-recognition/index.mjs.map +1 -0
  43. package/dist/addons/motion-detection/index.d.mts +26 -0
  44. package/dist/addons/motion-detection/index.d.ts +26 -0
  45. package/dist/addons/motion-detection/index.js +273 -0
  46. package/dist/addons/motion-detection/index.js.map +1 -0
  47. package/dist/addons/motion-detection/index.mjs +8 -0
  48. package/dist/addons/motion-detection/index.mjs.map +1 -0
  49. package/dist/addons/object-detection/index.d.mts +26 -0
  50. package/dist/addons/object-detection/index.d.ts +26 -0
  51. package/dist/addons/object-detection/index.js +1214 -0
  52. package/dist/addons/object-detection/index.js.map +1 -0
  53. package/dist/addons/object-detection/index.mjs +10 -0
  54. package/dist/addons/object-detection/index.mjs.map +1 -0
  55. package/dist/addons/plate-detection/index.d.mts +25 -0
  56. package/dist/addons/plate-detection/index.d.ts +25 -0
  57. package/dist/addons/plate-detection/index.js +646 -0
  58. package/dist/addons/plate-detection/index.js.map +1 -0
  59. package/dist/addons/plate-detection/index.mjs +10 -0
  60. package/dist/addons/plate-detection/index.mjs.map +1 -0
  61. package/dist/addons/plate-recognition/index.d.mts +25 -0
  62. package/dist/addons/plate-recognition/index.d.ts +25 -0
  63. package/dist/addons/plate-recognition/index.js +648 -0
  64. package/dist/addons/plate-recognition/index.js.map +1 -0
  65. package/dist/addons/plate-recognition/index.mjs +9 -0
  66. package/dist/addons/plate-recognition/index.mjs.map +1 -0
  67. package/dist/chunk-3MQFUDRU.mjs +260 -0
  68. package/dist/chunk-3MQFUDRU.mjs.map +1 -0
  69. package/dist/chunk-5AIQSN32.mjs +227 -0
  70. package/dist/chunk-5AIQSN32.mjs.map +1 -0
  71. package/dist/chunk-5JJZGKL7.mjs +186 -0
  72. package/dist/chunk-5JJZGKL7.mjs.map +1 -0
  73. package/dist/chunk-6OR5TE7A.mjs +101 -0
  74. package/dist/chunk-6OR5TE7A.mjs.map +1 -0
  75. package/dist/chunk-AYBFB7ID.mjs +763 -0
  76. package/dist/chunk-AYBFB7ID.mjs.map +1 -0
  77. package/dist/chunk-B3R66MPF.mjs +219 -0
  78. package/dist/chunk-B3R66MPF.mjs.map +1 -0
  79. package/dist/chunk-DTOAB2CE.mjs +79 -0
  80. package/dist/chunk-DTOAB2CE.mjs.map +1 -0
  81. package/dist/chunk-ISOIDU4U.mjs +54 -0
  82. package/dist/chunk-ISOIDU4U.mjs.map +1 -0
  83. package/dist/chunk-J4WRYHHY.mjs +212 -0
  84. package/dist/chunk-J4WRYHHY.mjs.map +1 -0
  85. package/dist/chunk-KUO2BVFY.mjs +90 -0
  86. package/dist/chunk-KUO2BVFY.mjs.map +1 -0
  87. package/dist/chunk-LPI42WL6.mjs +324 -0
  88. package/dist/chunk-LPI42WL6.mjs.map +1 -0
  89. package/dist/chunk-MEVASN3P.mjs +305 -0
  90. package/dist/chunk-MEVASN3P.mjs.map +1 -0
  91. package/dist/chunk-PDSHDDPV.mjs +255 -0
  92. package/dist/chunk-PDSHDDPV.mjs.map +1 -0
  93. package/dist/chunk-Q3SQOYG6.mjs +218 -0
  94. package/dist/chunk-Q3SQOYG6.mjs.map +1 -0
  95. package/dist/chunk-QIMDG34B.mjs +229 -0
  96. package/dist/chunk-QIMDG34B.mjs.map +1 -0
  97. package/dist/index.d.mts +171 -0
  98. package/dist/index.d.ts +171 -0
  99. package/dist/index.js +3463 -0
  100. package/dist/index.js.map +1 -0
  101. package/dist/index.mjs +111 -0
  102. package/dist/index.mjs.map +1 -0
  103. package/package.json +49 -0
  104. package/python/__pycache__/coreml_inference.cpython-313.pyc +0 -0
  105. package/python/__pycache__/openvino_inference.cpython-313.pyc +0 -0
  106. package/python/__pycache__/pytorch_inference.cpython-313.pyc +0 -0
  107. package/python/coreml_inference.py +319 -0
  108. package/python/openvino_inference.py +247 -0
  109. package/python/pytorch_inference.py +255 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/shared/node-engine.ts","../src/shared/python-engine.ts","../src/shared/engine-resolver.ts"],"sourcesContent":["import type { IInferenceEngine, DetectionRuntime, DetectionDevice } from '@camstack/types'\nimport * as path from 'node:path'\n\nconst BACKEND_TO_PROVIDER: Readonly<Record<string, string>> = {\n cpu: 'cpu',\n coreml: 'coreml',\n cuda: 'cuda',\n tensorrt: 'tensorrt',\n dml: 'dml',\n} as const\n\nconst BACKEND_TO_DEVICE: Readonly<Record<string, DetectionDevice>> = {\n cpu: 'cpu',\n coreml: 'gpu-mps',\n cuda: 'gpu-cuda',\n tensorrt: 'tensorrt',\n} as const\n\nexport class NodeInferenceEngine implements IInferenceEngine {\n readonly runtime: DetectionRuntime = 'onnx'\n readonly device: DetectionDevice\n private session: unknown = null\n\n constructor(\n private readonly modelPath: string,\n private readonly backend: string,\n ) {\n this.device = (BACKEND_TO_DEVICE[backend] ?? 'cpu') as DetectionDevice\n }\n\n async initialize(): Promise<void> {\n const ort = await import('onnxruntime-node')\n const provider = BACKEND_TO_PROVIDER[this.backend] ?? 'cpu'\n\n // Resolve absolute path\n const absModelPath = path.isAbsolute(this.modelPath)\n ? this.modelPath\n : path.resolve(process.cwd(), this.modelPath)\n\n const sessionOptions: Record<string, unknown> = {\n executionProviders: [provider],\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n this.session = await (ort as any).InferenceSession.create(absModelPath, sessionOptions)\n }\n\n async run(input: Float32Array, inputShape: readonly number[]): Promise<Float32Array> {\n if (!this.session) {\n throw new Error('NodeInferenceEngine: not initialized — call initialize() first')\n }\n\n const ort = await import('onnxruntime-node')\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const sess = this.session as any\n\n // Get the first input name\n const inputName: string = sess.inputNames[0]\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const tensor = new (ort as any).Tensor('float32', input, [...inputShape])\n const feeds: Record<string, unknown> = { [inputName]: tensor }\n\n const results = await sess.run(feeds)\n const outputName: string = sess.outputNames[0]\n const outputTensor = results[outputName]\n\n return outputTensor.data as Float32Array\n }\n\n async runMultiOutput(\n input: Float32Array,\n inputShape: readonly number[],\n ): Promise<Record<string, Float32Array>> {\n if (!this.session) {\n throw new Error('NodeInferenceEngine: not initialized — call initialize() first')\n }\n\n const ort = await import('onnxruntime-node')\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const sess = this.session as any\n\n const inputName: string = sess.inputNames[0]\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const tensor = new (ort as any).Tensor('float32', input, [...inputShape])\n const feeds: Record<string, unknown> = { [inputName]: tensor }\n\n const results = await sess.run(feeds)\n\n const out: Record<string, Float32Array> = {}\n for (const name of sess.outputNames as string[]) {\n out[name] = results[name].data as Float32Array\n }\n return out\n }\n\n async dispose(): Promise<void> {\n // onnxruntime-node sessions don't have explicit close in all versions\n // but we clear the reference\n this.session = null\n }\n}\n","import type { IInferenceEngine, DetectionRuntime, DetectionDevice } from '@camstack/types'\nimport { spawn, type ChildProcess } from 'node:child_process'\n\nexport class PythonInferenceEngine implements IInferenceEngine {\n readonly runtime: DetectionRuntime\n readonly device: DetectionDevice\n private process: ChildProcess | null = null\n private receiveBuffer: Buffer = Buffer.alloc(0)\n private pendingResolve: ((value: Record<string, unknown>) => void) | null = null\n private pendingReject: ((reason: Error) => void) | null = null\n\n constructor(\n private readonly pythonPath: string,\n private readonly scriptPath: string,\n runtime: DetectionRuntime,\n private readonly modelPath: string,\n private readonly extraArgs: readonly string[] = [],\n ) {\n this.runtime = runtime\n // Determine device from runtime\n const runtimeDeviceMap: Readonly<Record<DetectionRuntime, DetectionDevice>> = {\n onnx: 'cpu',\n coreml: 'gpu-mps',\n pytorch: 'cpu',\n openvino: 'cpu',\n tflite: 'cpu',\n }\n this.device = runtimeDeviceMap[runtime]\n }\n\n async initialize(): Promise<void> {\n const args = [this.scriptPath, this.modelPath, ...this.extraArgs]\n this.process = spawn(this.pythonPath, args, {\n stdio: ['pipe', 'pipe', 'pipe'],\n })\n\n if (!this.process.stdout || !this.process.stdin) {\n throw new Error('PythonInferenceEngine: failed to create process pipes')\n }\n\n this.process.stderr?.on('data', (chunk: Buffer) => {\n // Log stderr from python process for debugging\n process.stderr.write(`[python-engine] ${chunk.toString()}`)\n })\n\n this.process.on('error', (err) => {\n this.pendingReject?.(err)\n this.pendingReject = null\n this.pendingResolve = null\n })\n\n this.process.on('exit', (code) => {\n if (code !== 0) {\n const err = new Error(`PythonInferenceEngine: process exited with code ${code}`)\n this.pendingReject?.(err)\n this.pendingReject = null\n this.pendingResolve = null\n }\n })\n\n this.process.stdout.on('data', (chunk: Buffer) => {\n this.receiveBuffer = Buffer.concat([this.receiveBuffer, chunk])\n this._tryReceive()\n })\n\n // Give the process a moment to start up and load the model\n await new Promise<void>((resolve, reject) => {\n const timeout = setTimeout(() => resolve(), 2000)\n this.process?.on('error', (err) => {\n clearTimeout(timeout)\n reject(err)\n })\n this.process?.on('exit', (code) => {\n clearTimeout(timeout)\n if (code !== 0) {\n reject(new Error(`PythonInferenceEngine: process exited early with code ${code}`))\n }\n })\n })\n }\n\n private _tryReceive(): void {\n // Binary IPC: [4 bytes LE uint32 length][JSON bytes]\n if (this.receiveBuffer.length < 4) return\n\n const length = this.receiveBuffer.readUInt32LE(0)\n if (this.receiveBuffer.length < 4 + length) return\n\n const jsonBytes = this.receiveBuffer.subarray(4, 4 + length)\n this.receiveBuffer = this.receiveBuffer.subarray(4 + length)\n\n const resolve = this.pendingResolve\n const reject = this.pendingReject\n this.pendingResolve = null\n this.pendingReject = null\n\n if (!resolve) return\n\n try {\n const parsed = JSON.parse(jsonBytes.toString('utf8')) as Record<string, unknown>\n resolve(parsed)\n } catch (err) {\n reject?.(err instanceof Error ? err : new Error(String(err)))\n }\n }\n\n /** Send JPEG buffer, receive JSON detection results */\n async runJpeg(jpeg: Buffer): Promise<Record<string, unknown>> {\n if (!this.process?.stdin) {\n throw new Error('PythonInferenceEngine: process not initialized')\n }\n\n return new Promise<Record<string, unknown>>((resolve, reject) => {\n this.pendingResolve = resolve\n this.pendingReject = reject\n\n // Binary IPC: [4 bytes LE uint32 length][JPEG bytes]\n const lengthBuf = Buffer.allocUnsafe(4)\n lengthBuf.writeUInt32LE(jpeg.length, 0)\n this.process!.stdin!.write(Buffer.concat([lengthBuf, jpeg]))\n })\n }\n\n /** IInferenceEngine.run — wraps runJpeg for compatibility */\n async run(_input: Float32Array, _inputShape: readonly number[]): Promise<Float32Array> {\n throw new Error(\n 'PythonInferenceEngine: use runJpeg() directly — this engine operates on JPEG input',\n )\n }\n\n /** IInferenceEngine.runMultiOutput — not supported by Python engine (operates on JPEG input) */\n async runMultiOutput(\n _input: Float32Array,\n _inputShape: readonly number[],\n ): Promise<Record<string, Float32Array>> {\n throw new Error(\n 'PythonInferenceEngine: runMultiOutput() is not supported — this engine operates on JPEG input',\n )\n }\n\n async dispose(): Promise<void> {\n if (this.process) {\n this.process.stdin?.end()\n this.process.kill('SIGTERM')\n this.process = null\n }\n }\n}\n","// TODO: Wire PythonInferenceEngine for PyTorch/OpenVINO/TFLite runtimes\n// Currently falls back to ONNX CPU when non-ONNX runtime is requested.\n// See: packages/addon-vision/python/ for stub implementations.\n//\n// WHY THIS FILE USES RAW FILESYSTEM PATHS (modelsDir: string):\n//\n// Model files must be loaded via absolute filesystem paths because inference\n// engines (ONNX Runtime, CoreML, etc.) require direct file access — they do\n// not accept Buffer or stream inputs. This is fundamentally different from\n// user data (recordings, media) where IAddonFileStorage abstraction makes\n// sense. Models are closer to binaries or compiled artifacts that must live\n// on the local filesystem at runtime.\n//\n// IAddonFileStorage.readFile() returns a Buffer, but onnxruntime-node's\n// InferenceSession.create() only accepts a file path string or a Uint8Array\n// loaded into memory. For large models (hundreds of MB), loading into a\n// Uint8Array is impractical. Therefore, modelsDir stays as a raw string path\n// and is intentionally NOT replaced with IAddonFileStorage here.\n\nimport type {\n IInferenceEngine,\n DetectionRuntime,\n ModelCatalogEntry,\n ModelFormat,\n} from '@camstack/types'\nimport * as fs from 'node:fs'\nimport * as path from 'node:path'\nimport { NodeInferenceEngine } from './node-engine.js'\nimport { PythonInferenceEngine } from './python-engine.js'\n\nexport interface EngineResolverOptions {\n readonly runtime: DetectionRuntime | 'auto'\n readonly backend: string\n readonly modelEntry: ModelCatalogEntry\n readonly modelsDir: string\n readonly pythonPath?: string\n readonly downloadModel?: (url: string, destDir: string) => Promise<string>\n}\n\nexport interface ResolvedEngine {\n readonly engine: IInferenceEngine\n readonly format: ModelFormat\n readonly modelPath: string\n}\n\n/** Priority order for auto-selection of ONNX backends */\nconst AUTO_BACKEND_PRIORITY = ['coreml', 'cuda', 'tensorrt', 'cpu'] as const\n\n/** Map backend names to the model format they require */\nconst BACKEND_TO_FORMAT: Readonly<Record<string, ModelFormat>> = {\n cpu: 'onnx',\n coreml: 'coreml',\n cuda: 'onnx',\n tensorrt: 'onnx',\n} as const\n\n/** Map DetectionRuntime to ModelFormat */\nconst RUNTIME_TO_FORMAT: Readonly<Partial<Record<DetectionRuntime, ModelFormat>>> = {\n onnx: 'onnx',\n coreml: 'coreml',\n openvino: 'openvino',\n tflite: 'tflite',\n pytorch: 'pt',\n} as const\n\nfunction modelFilePath(modelsDir: string, modelEntry: ModelCatalogEntry, format: ModelFormat): string {\n const formatEntry = modelEntry.formats[format]\n if (!formatEntry) {\n throw new Error(`Model ${modelEntry.id} has no ${format} format`)\n }\n // Derive filename from URL\n const urlParts = formatEntry.url.split('/')\n const filename = urlParts[urlParts.length - 1] ?? `${modelEntry.id}.${format}`\n return path.join(modelsDir, filename)\n}\n\nfunction modelExists(filePath: string): boolean {\n try {\n return fs.existsSync(filePath)\n } catch {\n return false\n }\n}\n\nexport async function resolveEngine(options: EngineResolverOptions): Promise<ResolvedEngine> {\n const { runtime, backend, modelEntry, modelsDir, downloadModel } = options\n\n let selectedFormat: ModelFormat\n let selectedBackend: string\n\n if (runtime === 'auto') {\n // Probe available ONNX backends and pick best\n const available = await probeOnnxBackends()\n\n // Pick first priority backend that has a corresponding model format available\n let chosen: { backend: string; format: ModelFormat } | null = null\n\n for (const b of AUTO_BACKEND_PRIORITY) {\n if (!available.includes(b)) continue\n const fmt = BACKEND_TO_FORMAT[b]\n if (!fmt) continue\n if (!modelEntry.formats[fmt]) continue\n chosen = { backend: b, format: fmt }\n break\n }\n\n if (!chosen) {\n throw new Error(\n `resolveEngine: no compatible backend found for model ${modelEntry.id}. Available backends: ${available.join(', ')}`,\n )\n }\n\n selectedFormat = chosen.format\n selectedBackend = chosen.backend\n } else {\n // Explicit runtime requested\n const fmt = RUNTIME_TO_FORMAT[runtime]\n if (!fmt) {\n throw new Error(`resolveEngine: unsupported runtime \"${runtime}\"`)\n }\n if (!modelEntry.formats[fmt]) {\n throw new Error(\n `resolveEngine: model ${modelEntry.id} has no ${fmt} format for runtime ${runtime}`,\n )\n }\n selectedFormat = fmt\n // For onnx runtime, use the provided backend; otherwise use the runtime name\n selectedBackend = runtime === 'onnx' ? (backend || 'cpu') : runtime\n }\n\n // Resolve model path\n let modelPath = modelFilePath(modelsDir, modelEntry, selectedFormat)\n\n if (!modelExists(modelPath)) {\n if (downloadModel) {\n const formatEntry = modelEntry.formats[selectedFormat]!\n modelPath = await downloadModel(formatEntry.url, modelsDir)\n } else {\n throw new Error(\n `resolveEngine: model file not found at ${modelPath} and no downloadModel function provided`,\n )\n }\n }\n\n // Only ONNX runtime is handled by NodeInferenceEngine currently\n if (selectedFormat === 'onnx' || selectedFormat === 'coreml') {\n const engine = new NodeInferenceEngine(modelPath, selectedBackend)\n await engine.initialize()\n return { engine, format: selectedFormat, modelPath }\n }\n\n // For non-ONNX/CoreML formats, try PythonInferenceEngine when a python binary is available\n const { pythonPath } = options\n const PYTHON_SCRIPT_MAP: Readonly<Record<string, string>> = {\n coreml: 'coreml_inference.py',\n pytorch: 'pytorch_inference.py',\n openvino: 'openvino_inference.py',\n } as const\n\n const effectiveRuntime = runtime === 'auto' ? selectedBackend : runtime\n const scriptName = PYTHON_SCRIPT_MAP[effectiveRuntime]\n\n if (scriptName && pythonPath) {\n const scriptPath = path.join(__dirname, '../../python', scriptName)\n const inputSize = Math.max(modelEntry.inputSize.width, modelEntry.inputSize.height)\n const engine = new PythonInferenceEngine(pythonPath, scriptPath, effectiveRuntime as DetectionRuntime, modelPath, [\n `--input-size=${inputSize}`,\n `--confidence=0.25`,\n ])\n await engine.initialize()\n return { engine, format: selectedFormat, modelPath }\n }\n\n // Final fallback: use ONNX CPU if available\n const fallbackPath = modelFilePath(modelsDir, modelEntry, 'onnx')\n if (modelEntry.formats['onnx'] && modelExists(fallbackPath)) {\n const engine = new NodeInferenceEngine(fallbackPath, 'cpu')\n await engine.initialize()\n return { engine, format: 'onnx', modelPath: fallbackPath }\n }\n\n throw new Error(\n `resolveEngine: format ${selectedFormat} is not yet supported by NodeInferenceEngine, ` +\n `no Python runtime is available, and no ONNX fallback exists`,\n )\n}\n\n/** Probe which ONNX execution providers are available on this system */\nexport async function probeOnnxBackends(): Promise<string[]> {\n const available: string[] = ['cpu'] // CPU is always available\n\n try {\n const ort = await import('onnxruntime-node')\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n const providers: string[] = (ort as any).env?.webgl?.disabled !== undefined\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n ? ((ort as any).InferenceSession?.getAvailableProviders?.() ?? [])\n : []\n\n for (const p of providers) {\n const normalized = p.toLowerCase().replace('executionprovider', '')\n if (normalized === 'coreml') available.push('coreml')\n else if (normalized === 'cuda') available.push('cuda')\n else if (normalized === 'tensorrt') available.push('tensorrt')\n }\n } catch {\n // onnxruntime-node may not be installed; CPU only\n }\n\n // Platform-specific hints when getAvailableProviders isn't exposed\n if (process.platform === 'darwin' && !available.includes('coreml')) {\n available.push('coreml')\n }\n\n return [...new Set(available)]\n}\n"],"mappings":";AACA,YAAY,UAAU;AAEtB,IAAM,sBAAwD;AAAA,EAC5D,KAAK;AAAA,EACL,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,UAAU;AAAA,EACV,KAAK;AACP;AAEA,IAAM,oBAA+D;AAAA,EACnE,KAAK;AAAA,EACL,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,UAAU;AACZ;AAEO,IAAM,sBAAN,MAAsD;AAAA,EAK3D,YACmB,WACA,SACjB;AAFiB;AACA;AAEjB,SAAK,SAAU,kBAAkB,OAAO,KAAK;AAAA,EAC/C;AAAA,EATS,UAA4B;AAAA,EAC5B;AAAA,EACD,UAAmB;AAAA,EAS3B,MAAM,aAA4B;AAChC,UAAM,MAAM,MAAM,OAAO,kBAAkB;AAC3C,UAAM,WAAW,oBAAoB,KAAK,OAAO,KAAK;AAGtD,UAAM,eAAoB,gBAAW,KAAK,SAAS,IAC/C,KAAK,YACA,aAAQ,QAAQ,IAAI,GAAG,KAAK,SAAS;AAE9C,UAAM,iBAA0C;AAAA,MAC9C,oBAAoB,CAAC,QAAQ;AAAA,IAC/B;AAGA,SAAK,UAAU,MAAO,IAAY,iBAAiB,OAAO,cAAc,cAAc;AAAA,EACxF;AAAA,EAEA,MAAM,IAAI,OAAqB,YAAsD;AACnF,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,qEAAgE;AAAA,IAClF;AAEA,UAAM,MAAM,MAAM,OAAO,kBAAkB;AAE3C,UAAM,OAAO,KAAK;AAGlB,UAAM,YAAoB,KAAK,WAAW,CAAC;AAE3C,UAAM,SAAS,IAAK,IAAY,OAAO,WAAW,OAAO,CAAC,GAAG,UAAU,CAAC;AACxE,UAAM,QAAiC,EAAE,CAAC,SAAS,GAAG,OAAO;AAE7D,UAAM,UAAU,MAAM,KAAK,IAAI,KAAK;AACpC,UAAM,aAAqB,KAAK,YAAY,CAAC;AAC7C,UAAM,eAAe,QAAQ,UAAU;AAEvC,WAAO,aAAa;AAAA,EACtB;AAAA,EAEA,MAAM,eACJ,OACA,YACuC;AACvC,QAAI,CAAC,KAAK,SAAS;AACjB,YAAM,IAAI,MAAM,qEAAgE;AAAA,IAClF;AAEA,UAAM,MAAM,MAAM,OAAO,kBAAkB;AAE3C,UAAM,OAAO,KAAK;AAElB,UAAM,YAAoB,KAAK,WAAW,CAAC;AAE3C,UAAM,SAAS,IAAK,IAAY,OAAO,WAAW,OAAO,CAAC,GAAG,UAAU,CAAC;AACxE,UAAM,QAAiC,EAAE,CAAC,SAAS,GAAG,OAAO;AAE7D,UAAM,UAAU,MAAM,KAAK,IAAI,KAAK;AAEpC,UAAM,MAAoC,CAAC;AAC3C,eAAW,QAAQ,KAAK,aAAyB;AAC/C,UAAI,IAAI,IAAI,QAAQ,IAAI,EAAE;AAAA,IAC5B;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,UAAyB;AAG7B,SAAK,UAAU;AAAA,EACjB;AACF;;;ACnGA,SAAS,aAAgC;AAElC,IAAM,wBAAN,MAAwD;AAAA,EAQ7D,YACmB,YACA,YACjB,SACiB,WACA,YAA+B,CAAC,GACjD;AALiB;AACA;AAEA;AACA;AAEjB,SAAK,UAAU;AAEf,UAAM,mBAAwE;AAAA,MAC5E,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,UAAU;AAAA,MACV,QAAQ;AAAA,IACV;AACA,SAAK,SAAS,iBAAiB,OAAO;AAAA,EACxC;AAAA,EAxBS;AAAA,EACA;AAAA,EACD,UAA+B;AAAA,EAC/B,gBAAwB,OAAO,MAAM,CAAC;AAAA,EACtC,iBAAoE;AAAA,EACpE,gBAAkD;AAAA,EAqB1D,MAAM,aAA4B;AAChC,UAAM,OAAO,CAAC,KAAK,YAAY,KAAK,WAAW,GAAG,KAAK,SAAS;AAChE,SAAK,UAAU,MAAM,KAAK,YAAY,MAAM;AAAA,MAC1C,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,IAChC,CAAC;AAED,QAAI,CAAC,KAAK,QAAQ,UAAU,CAAC,KAAK,QAAQ,OAAO;AAC/C,YAAM,IAAI,MAAM,uDAAuD;AAAA,IACzE;AAEA,SAAK,QAAQ,QAAQ,GAAG,QAAQ,CAAC,UAAkB;AAEjD,cAAQ,OAAO,MAAM,mBAAmB,MAAM,SAAS,CAAC,EAAE;AAAA,IAC5D,CAAC;AAED,SAAK,QAAQ,GAAG,SAAS,CAAC,QAAQ;AAChC,WAAK,gBAAgB,GAAG;AACxB,WAAK,gBAAgB;AACrB,WAAK,iBAAiB;AAAA,IACxB,CAAC;AAED,SAAK,QAAQ,GAAG,QAAQ,CAAC,SAAS;AAChC,UAAI,SAAS,GAAG;AACd,cAAM,MAAM,IAAI,MAAM,mDAAmD,IAAI,EAAE;AAC/E,aAAK,gBAAgB,GAAG;AACxB,aAAK,gBAAgB;AACrB,aAAK,iBAAiB;AAAA,MACxB;AAAA,IACF,CAAC;AAED,SAAK,QAAQ,OAAO,GAAG,QAAQ,CAAC,UAAkB;AAChD,WAAK,gBAAgB,OAAO,OAAO,CAAC,KAAK,eAAe,KAAK,CAAC;AAC9D,WAAK,YAAY;AAAA,IACnB,CAAC;AAGD,UAAM,IAAI,QAAc,CAACA,UAAS,WAAW;AAC3C,YAAM,UAAU,WAAW,MAAMA,SAAQ,GAAG,GAAI;AAChD,WAAK,SAAS,GAAG,SAAS,CAAC,QAAQ;AACjC,qBAAa,OAAO;AACpB,eAAO,GAAG;AAAA,MACZ,CAAC;AACD,WAAK,SAAS,GAAG,QAAQ,CAAC,SAAS;AACjC,qBAAa,OAAO;AACpB,YAAI,SAAS,GAAG;AACd,iBAAO,IAAI,MAAM,yDAAyD,IAAI,EAAE,CAAC;AAAA,QACnF;AAAA,MACF,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA,EAEQ,cAAoB;AAE1B,QAAI,KAAK,cAAc,SAAS,EAAG;AAEnC,UAAM,SAAS,KAAK,cAAc,aAAa,CAAC;AAChD,QAAI,KAAK,cAAc,SAAS,IAAI,OAAQ;AAE5C,UAAM,YAAY,KAAK,cAAc,SAAS,GAAG,IAAI,MAAM;AAC3D,SAAK,gBAAgB,KAAK,cAAc,SAAS,IAAI,MAAM;AAE3D,UAAMA,WAAU,KAAK;AACrB,UAAM,SAAS,KAAK;AACpB,SAAK,iBAAiB;AACtB,SAAK,gBAAgB;AAErB,QAAI,CAACA,SAAS;AAEd,QAAI;AACF,YAAM,SAAS,KAAK,MAAM,UAAU,SAAS,MAAM,CAAC;AACpD,MAAAA,SAAQ,MAAM;AAAA,IAChB,SAAS,KAAK;AACZ,eAAS,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,IAC9D;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,QAAQ,MAAgD;AAC5D,QAAI,CAAC,KAAK,SAAS,OAAO;AACxB,YAAM,IAAI,MAAM,gDAAgD;AAAA,IAClE;AAEA,WAAO,IAAI,QAAiC,CAACA,UAAS,WAAW;AAC/D,WAAK,iBAAiBA;AACtB,WAAK,gBAAgB;AAGrB,YAAM,YAAY,OAAO,YAAY,CAAC;AACtC,gBAAU,cAAc,KAAK,QAAQ,CAAC;AACtC,WAAK,QAAS,MAAO,MAAM,OAAO,OAAO,CAAC,WAAW,IAAI,CAAC,CAAC;AAAA,IAC7D,CAAC;AAAA,EACH;AAAA;AAAA,EAGA,MAAM,IAAI,QAAsB,aAAuD;AACrF,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAGA,MAAM,eACJ,QACA,aACuC;AACvC,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,UAAyB;AAC7B,QAAI,KAAK,SAAS;AAChB,WAAK,QAAQ,OAAO,IAAI;AACxB,WAAK,QAAQ,KAAK,SAAS;AAC3B,WAAK,UAAU;AAAA,IACjB;AAAA,EACF;AACF;;;AC1HA,YAAY,QAAQ;AACpB,YAAYC,WAAU;AAoBtB,IAAM,wBAAwB,CAAC,UAAU,QAAQ,YAAY,KAAK;AAGlE,IAAM,oBAA2D;AAAA,EAC/D,KAAK;AAAA,EACL,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,UAAU;AACZ;AAGA,IAAM,oBAA8E;AAAA,EAClF,MAAM;AAAA,EACN,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,QAAQ;AAAA,EACR,SAAS;AACX;AAEA,SAAS,cAAc,WAAmB,YAA+B,QAA6B;AACpG,QAAM,cAAc,WAAW,QAAQ,MAAM;AAC7C,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI,MAAM,SAAS,WAAW,EAAE,WAAW,MAAM,SAAS;AAAA,EAClE;AAEA,QAAM,WAAW,YAAY,IAAI,MAAM,GAAG;AAC1C,QAAM,WAAW,SAAS,SAAS,SAAS,CAAC,KAAK,GAAG,WAAW,EAAE,IAAI,MAAM;AAC5E,SAAY,WAAK,WAAW,QAAQ;AACtC;AAEA,SAAS,YAAY,UAA2B;AAC9C,MAAI;AACF,WAAU,cAAW,QAAQ;AAAA,EAC/B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,eAAsB,cAAc,SAAyD;AAC3F,QAAM,EAAE,SAAS,SAAS,YAAY,WAAW,cAAc,IAAI;AAEnE,MAAI;AACJ,MAAI;AAEJ,MAAI,YAAY,QAAQ;AAEtB,UAAM,YAAY,MAAM,kBAAkB;AAG1C,QAAI,SAA0D;AAE9D,eAAW,KAAK,uBAAuB;AACrC,UAAI,CAAC,UAAU,SAAS,CAAC,EAAG;AAC5B,YAAM,MAAM,kBAAkB,CAAC;AAC/B,UAAI,CAAC,IAAK;AACV,UAAI,CAAC,WAAW,QAAQ,GAAG,EAAG;AAC9B,eAAS,EAAE,SAAS,GAAG,QAAQ,IAAI;AACnC;AAAA,IACF;AAEA,QAAI,CAAC,QAAQ;AACX,YAAM,IAAI;AAAA,QACR,wDAAwD,WAAW,EAAE,yBAAyB,UAAU,KAAK,IAAI,CAAC;AAAA,MACpH;AAAA,IACF;AAEA,qBAAiB,OAAO;AACxB,sBAAkB,OAAO;AAAA,EAC3B,OAAO;AAEL,UAAM,MAAM,kBAAkB,OAAO;AACrC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,uCAAuC,OAAO,GAAG;AAAA,IACnE;AACA,QAAI,CAAC,WAAW,QAAQ,GAAG,GAAG;AAC5B,YAAM,IAAI;AAAA,QACR,wBAAwB,WAAW,EAAE,WAAW,GAAG,uBAAuB,OAAO;AAAA,MACnF;AAAA,IACF;AACA,qBAAiB;AAEjB,sBAAkB,YAAY,SAAU,WAAW,QAAS;AAAA,EAC9D;AAGA,MAAI,YAAY,cAAc,WAAW,YAAY,cAAc;AAEnE,MAAI,CAAC,YAAY,SAAS,GAAG;AAC3B,QAAI,eAAe;AACjB,YAAM,cAAc,WAAW,QAAQ,cAAc;AACrD,kBAAY,MAAM,cAAc,YAAY,KAAK,SAAS;AAAA,IAC5D,OAAO;AACL,YAAM,IAAI;AAAA,QACR,0CAA0C,SAAS;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAGA,MAAI,mBAAmB,UAAU,mBAAmB,UAAU;AAC5D,UAAM,SAAS,IAAI,oBAAoB,WAAW,eAAe;AACjE,UAAM,OAAO,WAAW;AACxB,WAAO,EAAE,QAAQ,QAAQ,gBAAgB,UAAU;AAAA,EACrD;AAGA,QAAM,EAAE,WAAW,IAAI;AACvB,QAAM,oBAAsD;AAAA,IAC1D,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,EACZ;AAEA,QAAM,mBAAmB,YAAY,SAAS,kBAAkB;AAChE,QAAM,aAAa,kBAAkB,gBAAgB;AAErD,MAAI,cAAc,YAAY;AAC5B,UAAM,aAAkB,WAAK,WAAW,gBAAgB,UAAU;AAClE,UAAM,YAAY,KAAK,IAAI,WAAW,UAAU,OAAO,WAAW,UAAU,MAAM;AAClF,UAAM,SAAS,IAAI,sBAAsB,YAAY,YAAY,kBAAsC,WAAW;AAAA,MAChH,gBAAgB,SAAS;AAAA,MACzB;AAAA,IACF,CAAC;AACD,UAAM,OAAO,WAAW;AACxB,WAAO,EAAE,QAAQ,QAAQ,gBAAgB,UAAU;AAAA,EACrD;AAGA,QAAM,eAAe,cAAc,WAAW,YAAY,MAAM;AAChE,MAAI,WAAW,QAAQ,MAAM,KAAK,YAAY,YAAY,GAAG;AAC3D,UAAM,SAAS,IAAI,oBAAoB,cAAc,KAAK;AAC1D,UAAM,OAAO,WAAW;AACxB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,WAAW,aAAa;AAAA,EAC3D;AAEA,QAAM,IAAI;AAAA,IACR,yBAAyB,cAAc;AAAA,EAEzC;AACF;AAGA,eAAsB,oBAAuC;AAC3D,QAAM,YAAsB,CAAC,KAAK;AAElC,MAAI;AACF,UAAM,MAAM,MAAM,OAAO,kBAAkB;AAE3C,UAAM,YAAuB,IAAY,KAAK,OAAO,aAAa,SAE5D,IAAY,kBAAkB,wBAAwB,KAAK,CAAC,IAC9D,CAAC;AAEL,eAAW,KAAK,WAAW;AACzB,YAAM,aAAa,EAAE,YAAY,EAAE,QAAQ,qBAAqB,EAAE;AAClE,UAAI,eAAe,SAAU,WAAU,KAAK,QAAQ;AAAA,eAC3C,eAAe,OAAQ,WAAU,KAAK,MAAM;AAAA,eAC5C,eAAe,WAAY,WAAU,KAAK,UAAU;AAAA,IAC/D;AAAA,EACF,QAAQ;AAAA,EAER;AAGA,MAAI,QAAQ,aAAa,YAAY,CAAC,UAAU,SAAS,QAAQ,GAAG;AAClE,cAAU,KAAK,QAAQ;AAAA,EACzB;AAEA,SAAO,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC;AAC/B;","names":["resolve","path"]}
@@ -0,0 +1,305 @@
1
+ import {
2
+ nms
3
+ } from "./chunk-KUO2BVFY.mjs";
4
+ import {
5
+ cropRegion,
6
+ letterbox
7
+ } from "./chunk-6OR5TE7A.mjs";
8
+ import {
9
+ resolveEngine
10
+ } from "./chunk-LPI42WL6.mjs";
11
+
12
+ // src/catalogs/face-detection-models.ts
13
+ import { hfModelUrl } from "@camstack/types";
14
+ var HF_REPO = "camstack/camstack-models";
15
+ var FACE_LABELS = [
16
+ { id: "face", name: "Face" }
17
+ ];
18
+ var FACE_DETECTION_MODELS = [
19
+ {
20
+ id: "scrfd-500m",
21
+ name: "SCRFD 500M",
22
+ description: "SCRFD 500M \u2014 ultra-lightweight face detector",
23
+ inputSize: { width: 640, height: 640 },
24
+ labels: FACE_LABELS,
25
+ formats: {
26
+ onnx: {
27
+ url: hfModelUrl(HF_REPO, "faceDetection/scrfd/onnx/camstack-scrfd-500m.onnx"),
28
+ sizeMB: 2.2
29
+ },
30
+ coreml: {
31
+ url: hfModelUrl(HF_REPO, "faceDetection/scrfd/coreml/camstack-scrfd-500m.mlpackage"),
32
+ sizeMB: 1.2
33
+ },
34
+ openvino: {
35
+ url: hfModelUrl(HF_REPO, "faceDetection/scrfd/openvino/camstack-scrfd-500m.xml"),
36
+ sizeMB: 1.3
37
+ }
38
+ }
39
+ },
40
+ {
41
+ id: "scrfd-2.5g",
42
+ name: "SCRFD 2.5G",
43
+ description: "SCRFD 2.5G \u2014 balanced face detection model",
44
+ inputSize: { width: 640, height: 640 },
45
+ labels: FACE_LABELS,
46
+ formats: {
47
+ onnx: {
48
+ url: hfModelUrl(HF_REPO, "faceDetection/scrfd/onnx/camstack-scrfd-2.5g.onnx"),
49
+ sizeMB: 3.1
50
+ },
51
+ coreml: {
52
+ url: hfModelUrl(HF_REPO, "faceDetection/scrfd/coreml/camstack-scrfd-2.5g.mlpackage"),
53
+ sizeMB: 1.7
54
+ },
55
+ openvino: {
56
+ url: hfModelUrl(HF_REPO, "faceDetection/scrfd/openvino/camstack-scrfd-2.5g.xml"),
57
+ sizeMB: 1.8
58
+ }
59
+ }
60
+ },
61
+ {
62
+ id: "scrfd-10g",
63
+ name: "SCRFD 10G",
64
+ description: "SCRFD 10G \u2014 high-accuracy face detector",
65
+ inputSize: { width: 640, height: 640 },
66
+ labels: FACE_LABELS,
67
+ formats: {
68
+ onnx: {
69
+ url: hfModelUrl(HF_REPO, "faceDetection/scrfd/onnx/camstack-scrfd-10g.onnx"),
70
+ sizeMB: 16
71
+ },
72
+ coreml: {
73
+ url: hfModelUrl(HF_REPO, "faceDetection/scrfd/coreml/camstack-scrfd-10g.mlpackage"),
74
+ sizeMB: 8.2
75
+ },
76
+ openvino: {
77
+ url: hfModelUrl(HF_REPO, "faceDetection/scrfd/openvino/camstack-scrfd-10g.xml"),
78
+ sizeMB: 8.3
79
+ }
80
+ }
81
+ }
82
+ ];
83
+
84
+ // src/shared/postprocess/scrfd.ts
85
+ var STRIDES = [8, 16, 32];
86
+ var NUM_ANCHORS_PER_STRIDE = 2;
87
+ function generateAnchors(stride, inputSize) {
88
+ const featureSize = Math.ceil(inputSize / stride);
89
+ const anchors = [];
90
+ for (let y = 0; y < featureSize; y++) {
91
+ for (let x = 0; x < featureSize; x++) {
92
+ for (let k = 0; k < NUM_ANCHORS_PER_STRIDE; k++) {
93
+ anchors.push({
94
+ cx: (x + 0.5) * stride,
95
+ cy: (y + 0.5) * stride
96
+ });
97
+ }
98
+ }
99
+ }
100
+ return anchors;
101
+ }
102
+ function scrfdPostprocess(outputs, confidence, inputSize, originalWidth, originalHeight) {
103
+ const scaleX = originalWidth / inputSize;
104
+ const scaleY = originalHeight / inputSize;
105
+ const candidates = [];
106
+ for (const stride of STRIDES) {
107
+ const scoreKey = Object.keys(outputs).find((k) => k.includes(`score_${stride}`) || k.includes(`_${stride}_score`));
108
+ const bboxKey = Object.keys(outputs).find((k) => k.includes(`bbox_${stride}`) || k.includes(`_${stride}_bbox`));
109
+ const kpsKey = Object.keys(outputs).find((k) => k.includes(`kps_${stride}`) || k.includes(`_${stride}_kps`));
110
+ if (!scoreKey || !bboxKey) continue;
111
+ const scores = outputs[scoreKey];
112
+ const bboxes = outputs[bboxKey];
113
+ const kps = kpsKey ? outputs[kpsKey] : void 0;
114
+ const anchors = generateAnchors(stride, inputSize);
115
+ const n = anchors.length;
116
+ for (let i = 0; i < n; i++) {
117
+ const score = scores[i];
118
+ if (score < confidence) continue;
119
+ const anchor = anchors[i];
120
+ const x1 = anchor.cx - bboxes[i * 4] * stride;
121
+ const y1 = anchor.cy - bboxes[i * 4 + 1] * stride;
122
+ const x2 = anchor.cx + bboxes[i * 4 + 2] * stride;
123
+ const y2 = anchor.cy + bboxes[i * 4 + 3] * stride;
124
+ const bbox = {
125
+ x: x1 * scaleX,
126
+ y: y1 * scaleY,
127
+ w: (x2 - x1) * scaleX,
128
+ h: (y2 - y1) * scaleY
129
+ };
130
+ let landmarks;
131
+ if (kps) {
132
+ const pts = [];
133
+ for (let p = 0; p < 5; p++) {
134
+ pts.push({
135
+ x: (anchor.cx + kps[i * 10 + p * 2] * stride) * scaleX,
136
+ y: (anchor.cy + kps[i * 10 + p * 2 + 1] * stride) * scaleY
137
+ });
138
+ }
139
+ landmarks = pts;
140
+ }
141
+ candidates.push({ bbox, score, landmarks });
142
+ }
143
+ }
144
+ if (candidates.length === 0) return [];
145
+ const keptIndices = nms(candidates, 0.45);
146
+ return keptIndices.map((idx) => {
147
+ const { bbox, score, landmarks } = candidates[idx];
148
+ return {
149
+ class: "face",
150
+ originalClass: "face",
151
+ score,
152
+ bbox,
153
+ ...landmarks ? { landmarks } : {}
154
+ };
155
+ });
156
+ }
157
+
158
+ // src/addons/face-detection/index.ts
159
+ var FACE_LABEL = { id: "face", name: "Face" };
160
+ var FACE_LABELS2 = [FACE_LABEL];
161
+ var FACE_CLASS_MAP = { mapping: {}, preserveOriginal: true };
162
+ var FaceDetectionAddon = class {
163
+ id = "face-detection";
164
+ slot = "cropper";
165
+ inputClasses = ["person"];
166
+ outputClasses = ["face"];
167
+ slotPriority = 0;
168
+ manifest = {
169
+ id: "face-detection",
170
+ name: "Face Detection",
171
+ version: "0.1.0",
172
+ description: "SCRFD-based face detector \u2014 crops face regions from person detections",
173
+ packageName: "@camstack/addon-vision",
174
+ slot: "cropper",
175
+ inputClasses: ["person"],
176
+ outputClasses: ["face"],
177
+ supportsCustomModels: false,
178
+ mayRequirePython: false,
179
+ defaultConfig: {
180
+ modelId: "scrfd-500m",
181
+ runtime: "auto",
182
+ backend: "cpu",
183
+ confidence: 0.5
184
+ }
185
+ };
186
+ engine;
187
+ modelEntry;
188
+ confidence = 0.5;
189
+ async initialize(ctx) {
190
+ const cfg = ctx.addonConfig;
191
+ const modelId = cfg["modelId"] ?? "scrfd-500m";
192
+ const runtime = cfg["runtime"] ?? "auto";
193
+ const backend = cfg["backend"] ?? "cpu";
194
+ this.confidence = cfg["confidence"] ?? 0.5;
195
+ const entry = FACE_DETECTION_MODELS.find((m) => m.id === modelId);
196
+ if (!entry) {
197
+ throw new Error(`FaceDetectionAddon: unknown modelId "${modelId}"`);
198
+ }
199
+ this.modelEntry = entry;
200
+ const resolved = await resolveEngine({
201
+ runtime,
202
+ backend,
203
+ modelEntry: entry,
204
+ modelsDir: ctx.locationPaths.models
205
+ });
206
+ this.engine = resolved.engine;
207
+ }
208
+ async crop(input) {
209
+ const start = Date.now();
210
+ const { width: inputW, height: inputH } = this.modelEntry.inputSize;
211
+ const targetSize = Math.max(inputW, inputH);
212
+ const personCrop = await cropRegion(input.frame.data, input.roi);
213
+ const lb = await letterbox(personCrop, targetSize);
214
+ const engineWithMulti = this.engine;
215
+ let outputs;
216
+ if (typeof engineWithMulti.runMultiOutput === "function") {
217
+ outputs = await engineWithMulti.runMultiOutput(lb.data, [1, 3, targetSize, targetSize]);
218
+ } else {
219
+ const single = await this.engine.run(lb.data, [1, 3, targetSize, targetSize]);
220
+ outputs = { output0: single };
221
+ }
222
+ const crops = scrfdPostprocess(
223
+ outputs,
224
+ this.confidence,
225
+ targetSize,
226
+ lb.originalWidth,
227
+ lb.originalHeight
228
+ );
229
+ return {
230
+ crops,
231
+ inferenceMs: Date.now() - start,
232
+ modelId: this.modelEntry.id
233
+ };
234
+ }
235
+ async shutdown() {
236
+ await this.engine?.dispose();
237
+ }
238
+ getConfigSchema() {
239
+ return {
240
+ sections: [
241
+ {
242
+ id: "model",
243
+ title: "Model",
244
+ columns: 1,
245
+ fields: [
246
+ {
247
+ key: "modelId",
248
+ label: "Model",
249
+ type: "model-selector",
250
+ catalog: [...FACE_DETECTION_MODELS],
251
+ allowCustom: false,
252
+ allowConversion: false,
253
+ acceptFormats: ["onnx", "coreml", "openvino"],
254
+ requiredMetadata: ["inputSize", "labels", "outputFormat"],
255
+ outputFormatHint: "ssd"
256
+ }
257
+ ]
258
+ },
259
+ {
260
+ id: "thresholds",
261
+ title: "Detection Thresholds",
262
+ columns: 1,
263
+ fields: [
264
+ {
265
+ key: "confidence",
266
+ label: "Confidence Threshold",
267
+ type: "slider",
268
+ min: 0.1,
269
+ max: 1,
270
+ step: 0.05,
271
+ default: 0.5
272
+ }
273
+ ]
274
+ }
275
+ ]
276
+ };
277
+ }
278
+ getClassMap() {
279
+ return FACE_CLASS_MAP;
280
+ }
281
+ getModelCatalog() {
282
+ return [...FACE_DETECTION_MODELS];
283
+ }
284
+ getAvailableModels() {
285
+ return [];
286
+ }
287
+ getActiveLabels() {
288
+ return FACE_LABELS2;
289
+ }
290
+ async probe() {
291
+ return {
292
+ available: true,
293
+ runtime: this.engine?.runtime ?? "onnx",
294
+ device: this.engine?.device ?? "cpu",
295
+ capabilities: ["fp32"]
296
+ };
297
+ }
298
+ };
299
+
300
+ export {
301
+ scrfdPostprocess,
302
+ FACE_DETECTION_MODELS,
303
+ FaceDetectionAddon
304
+ };
305
+ //# sourceMappingURL=chunk-MEVASN3P.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/catalogs/face-detection-models.ts","../src/shared/postprocess/scrfd.ts","../src/addons/face-detection/index.ts"],"sourcesContent":["import type { ModelCatalogEntry, LabelDefinition } from '@camstack/types'\nimport { hfModelUrl } from '@camstack/types'\n\nconst HF_REPO = 'camstack/camstack-models'\n\nconst FACE_LABELS: readonly LabelDefinition[] = [\n { id: 'face', name: 'Face' },\n] as const\n\nexport const FACE_DETECTION_MODELS: readonly ModelCatalogEntry[] = [\n {\n id: 'scrfd-500m',\n name: 'SCRFD 500M',\n description: 'SCRFD 500M — ultra-lightweight face detector',\n inputSize: { width: 640, height: 640 },\n labels: FACE_LABELS,\n formats: {\n onnx: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/onnx/camstack-scrfd-500m.onnx'),\n sizeMB: 2.2,\n },\n coreml: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/coreml/camstack-scrfd-500m.mlpackage'),\n sizeMB: 1.2,\n },\n openvino: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/openvino/camstack-scrfd-500m.xml'),\n sizeMB: 1.3,\n },\n },\n },\n {\n id: 'scrfd-2.5g',\n name: 'SCRFD 2.5G',\n description: 'SCRFD 2.5G — balanced face detection model',\n inputSize: { width: 640, height: 640 },\n labels: FACE_LABELS,\n formats: {\n onnx: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/onnx/camstack-scrfd-2.5g.onnx'),\n sizeMB: 3.1,\n },\n coreml: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/coreml/camstack-scrfd-2.5g.mlpackage'),\n sizeMB: 1.7,\n },\n openvino: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/openvino/camstack-scrfd-2.5g.xml'),\n sizeMB: 1.8,\n },\n },\n },\n {\n id: 'scrfd-10g',\n name: 'SCRFD 10G',\n description: 'SCRFD 10G — high-accuracy face detector',\n inputSize: { width: 640, height: 640 },\n labels: FACE_LABELS,\n formats: {\n onnx: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/onnx/camstack-scrfd-10g.onnx'),\n sizeMB: 16,\n },\n coreml: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/coreml/camstack-scrfd-10g.mlpackage'),\n sizeMB: 8.2,\n },\n openvino: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/openvino/camstack-scrfd-10g.xml'),\n sizeMB: 8.3,\n },\n },\n },\n] as const\n","import type { SpatialDetection, BoundingBox, Landmark } from '@camstack/types'\nimport { iou, nms } from './yolo.js'\n\nexport interface ScrfdRawOutputs {\n readonly [key: string]: Float32Array\n}\n\nconst STRIDES = [8, 16, 32] as const\nconst NUM_ANCHORS_PER_STRIDE = 2\n\n/** Generate anchor points for a given stride and input size */\nfunction generateAnchors(stride: number, inputSize: number): Array<{ cx: number; cy: number }> {\n const featureSize = Math.ceil(inputSize / stride)\n const anchors: Array<{ cx: number; cy: number }> = []\n for (let y = 0; y < featureSize; y++) {\n for (let x = 0; x < featureSize; x++) {\n for (let k = 0; k < NUM_ANCHORS_PER_STRIDE; k++) {\n anchors.push({\n cx: (x + 0.5) * stride,\n cy: (y + 0.5) * stride,\n })\n }\n }\n }\n return anchors\n}\n\nexport function scrfdPostprocess(\n outputs: ScrfdRawOutputs,\n confidence: number,\n inputSize: number,\n originalWidth: number,\n originalHeight: number,\n): SpatialDetection[] {\n // Scale factor from letterbox (assume square crop, so same scale both axes)\n const scaleX = originalWidth / inputSize\n const scaleY = originalHeight / inputSize\n\n interface Candidate {\n readonly bbox: BoundingBox\n readonly score: number\n readonly landmarks?: readonly Landmark[]\n }\n\n const candidates: Candidate[] = []\n\n for (const stride of STRIDES) {\n const scoreKey = Object.keys(outputs).find((k) => k.includes(`score_${stride}`) || k.includes(`_${stride}_score`))\n const bboxKey = Object.keys(outputs).find((k) => k.includes(`bbox_${stride}`) || k.includes(`_${stride}_bbox`))\n const kpsKey = Object.keys(outputs).find((k) => k.includes(`kps_${stride}`) || k.includes(`_${stride}_kps`))\n\n if (!scoreKey || !bboxKey) continue\n\n const scores = outputs[scoreKey]!\n const bboxes = outputs[bboxKey]!\n const kps = kpsKey ? outputs[kpsKey] : undefined\n const anchors = generateAnchors(stride, inputSize)\n\n const n = anchors.length\n\n for (let i = 0; i < n; i++) {\n const score = scores[i]!\n if (score < confidence) continue\n\n const anchor = anchors[i]!\n\n // Bboxes are relative to the anchor center in stride units, scaled by stride\n const x1 = anchor.cx - bboxes[i * 4]! * stride\n const y1 = anchor.cy - bboxes[i * 4 + 1]! * stride\n const x2 = anchor.cx + bboxes[i * 4 + 2]! * stride\n const y2 = anchor.cy + bboxes[i * 4 + 3]! * stride\n\n const bbox: BoundingBox = {\n x: x1 * scaleX,\n y: y1 * scaleY,\n w: (x2 - x1) * scaleX,\n h: (y2 - y1) * scaleY,\n }\n\n let landmarks: readonly Landmark[] | undefined\n if (kps) {\n const pts: Landmark[] = []\n for (let p = 0; p < 5; p++) {\n pts.push({\n x: (anchor.cx + kps[i * 10 + p * 2]! * stride) * scaleX,\n y: (anchor.cy + kps[i * 10 + p * 2 + 1]! * stride) * scaleY,\n })\n }\n landmarks = pts\n }\n\n candidates.push({ bbox, score, landmarks })\n }\n }\n\n if (candidates.length === 0) return []\n\n const keptIndices = nms(candidates, 0.45)\n\n return keptIndices.map((idx) => {\n const { bbox, score, landmarks } = candidates[idx]!\n return {\n class: 'face',\n originalClass: 'face',\n score,\n bbox,\n ...(landmarks ? { landmarks } : {}),\n } satisfies SpatialDetection\n })\n}\n","import type {\n ICropperProvider,\n IDetectionAddon,\n AddonManifest,\n AddonContext,\n CropInput,\n CropperOutput,\n ConfigUISchema,\n ClassMapDefinition,\n ProbeResult,\n ModelCatalogEntry,\n DetectionModel,\n LabelDefinition,\n IInferenceEngine,\n} from '@camstack/types'\nimport { FACE_DETECTION_MODELS } from '../../catalogs/face-detection-models.js'\nimport { cropRegion, letterbox } from '../../shared/image-utils.js'\nimport { scrfdPostprocess } from '../../shared/postprocess/scrfd.js'\nimport { resolveEngine } from '../../shared/engine-resolver.js'\n\nconst FACE_LABEL: LabelDefinition = { id: 'face', name: 'Face' }\nconst FACE_LABELS: readonly LabelDefinition[] = [FACE_LABEL]\nconst FACE_CLASS_MAP: ClassMapDefinition = { mapping: {}, preserveOriginal: true }\n\nexport default class FaceDetectionAddon implements ICropperProvider, IDetectionAddon {\n readonly id = 'face-detection'\n readonly slot = 'cropper' as const\n readonly inputClasses = ['person'] as const\n readonly outputClasses = ['face'] as const\n readonly slotPriority = 0\n readonly manifest: AddonManifest = {\n id: 'face-detection',\n name: 'Face Detection',\n version: '0.1.0',\n description: 'SCRFD-based face detector — crops face regions from person detections',\n packageName: '@camstack/addon-vision',\n slot: 'cropper',\n inputClasses: ['person'],\n outputClasses: ['face'],\n supportsCustomModels: false,\n mayRequirePython: false,\n defaultConfig: {\n modelId: 'scrfd-500m',\n runtime: 'auto',\n backend: 'cpu',\n confidence: 0.5,\n },\n }\n\n private engine!: IInferenceEngine\n private modelEntry!: ModelCatalogEntry\n private confidence = 0.5\n\n async initialize(ctx: AddonContext): Promise<void> {\n const cfg = ctx.addonConfig\n const modelId = (cfg['modelId'] as string | undefined) ?? 'scrfd-500m'\n const runtime = (cfg['runtime'] as string | undefined) ?? 'auto'\n const backend = (cfg['backend'] as string | undefined) ?? 'cpu'\n this.confidence = (cfg['confidence'] as number | undefined) ?? 0.5\n\n const entry = FACE_DETECTION_MODELS.find((m) => m.id === modelId)\n if (!entry) {\n throw new Error(`FaceDetectionAddon: unknown modelId \"${modelId}\"`)\n }\n this.modelEntry = entry\n\n const resolved = await resolveEngine({\n runtime: runtime as 'auto',\n backend,\n modelEntry: entry,\n modelsDir: ctx.locationPaths.models,\n })\n this.engine = resolved.engine\n }\n\n async crop(input: CropInput): Promise<CropperOutput> {\n const start = Date.now()\n const { width: inputW, height: inputH } = this.modelEntry.inputSize\n const targetSize = Math.max(inputW, inputH)\n\n // Crop the person region from the full frame\n const personCrop = await cropRegion(input.frame.data, input.roi)\n\n // Letterbox resize to model input size\n const lb = await letterbox(personCrop, targetSize)\n\n // SCRFD has multiple output tensors\n const engineWithMulti = this.engine as IInferenceEngine & {\n runMultiOutput?: (input: Float32Array, shape: readonly number[]) => Promise<Record<string, Float32Array>>\n }\n\n let outputs: Record<string, Float32Array>\n if (typeof engineWithMulti.runMultiOutput === 'function') {\n outputs = await engineWithMulti.runMultiOutput(lb.data, [1, 3, targetSize, targetSize])\n } else {\n // Fallback: wrap single output\n const single = await this.engine.run(lb.data, [1, 3, targetSize, targetSize])\n outputs = { output0: single }\n }\n\n const crops = scrfdPostprocess(\n outputs,\n this.confidence,\n targetSize,\n lb.originalWidth,\n lb.originalHeight,\n )\n\n return {\n crops,\n inferenceMs: Date.now() - start,\n modelId: this.modelEntry.id,\n }\n }\n\n async shutdown(): Promise<void> {\n await this.engine?.dispose()\n }\n\n getConfigSchema(): ConfigUISchema {\n return {\n sections: [\n {\n id: 'model',\n title: 'Model',\n columns: 1,\n fields: [\n {\n key: 'modelId',\n label: 'Model',\n type: 'model-selector',\n catalog: [...FACE_DETECTION_MODELS],\n allowCustom: false,\n allowConversion: false,\n acceptFormats: ['onnx', 'coreml', 'openvino'],\n requiredMetadata: ['inputSize', 'labels', 'outputFormat'],\n outputFormatHint: 'ssd',\n },\n ],\n },\n {\n id: 'thresholds',\n title: 'Detection Thresholds',\n columns: 1,\n fields: [\n {\n key: 'confidence',\n label: 'Confidence Threshold',\n type: 'slider',\n min: 0.1,\n max: 1.0,\n step: 0.05,\n default: 0.5,\n },\n ],\n },\n ],\n }\n }\n\n getClassMap(): ClassMapDefinition {\n return FACE_CLASS_MAP\n }\n\n getModelCatalog(): ModelCatalogEntry[] {\n return [...FACE_DETECTION_MODELS]\n }\n\n getAvailableModels(): DetectionModel[] {\n return []\n }\n\n getActiveLabels(): readonly LabelDefinition[] {\n return FACE_LABELS\n }\n\n async probe(): Promise<ProbeResult> {\n return {\n available: true,\n runtime: this.engine?.runtime ?? 'onnx',\n device: this.engine?.device ?? 'cpu',\n capabilities: ['fp32'],\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;AACA,SAAS,kBAAkB;AAE3B,IAAM,UAAU;AAEhB,IAAM,cAA0C;AAAA,EAC9C,EAAE,IAAI,QAAQ,MAAM,OAAO;AAC7B;AAEO,IAAM,wBAAsD;AAAA,EACjE;AAAA,IACE,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,WAAW,EAAE,OAAO,KAAK,QAAQ,IAAI;AAAA,IACrC,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,MAAM;AAAA,QACJ,KAAK,WAAW,SAAS,mDAAmD;AAAA,QAC5E,QAAQ;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,QACN,KAAK,WAAW,SAAS,0DAA0D;AAAA,QACnF,QAAQ;AAAA,MACV;AAAA,MACA,UAAU;AAAA,QACR,KAAK,WAAW,SAAS,sDAAsD;AAAA,QAC/E,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,WAAW,EAAE,OAAO,KAAK,QAAQ,IAAI;AAAA,IACrC,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,MAAM;AAAA,QACJ,KAAK,WAAW,SAAS,mDAAmD;AAAA,QAC5E,QAAQ;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,QACN,KAAK,WAAW,SAAS,0DAA0D;AAAA,QACnF,QAAQ;AAAA,MACV;AAAA,MACA,UAAU;AAAA,QACR,KAAK,WAAW,SAAS,sDAAsD;AAAA,QAC/E,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,WAAW,EAAE,OAAO,KAAK,QAAQ,IAAI;AAAA,IACrC,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,MAAM;AAAA,QACJ,KAAK,WAAW,SAAS,kDAAkD;AAAA,QAC3E,QAAQ;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,QACN,KAAK,WAAW,SAAS,yDAAyD;AAAA,QAClF,QAAQ;AAAA,MACV;AAAA,MACA,UAAU;AAAA,QACR,KAAK,WAAW,SAAS,qDAAqD;AAAA,QAC9E,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,EACF;AACF;;;AClEA,IAAM,UAAU,CAAC,GAAG,IAAI,EAAE;AAC1B,IAAM,yBAAyB;AAG/B,SAAS,gBAAgB,QAAgB,WAAsD;AAC7F,QAAM,cAAc,KAAK,KAAK,YAAY,MAAM;AAChD,QAAM,UAA6C,CAAC;AACpD,WAAS,IAAI,GAAG,IAAI,aAAa,KAAK;AACpC,aAAS,IAAI,GAAG,IAAI,aAAa,KAAK;AACpC,eAAS,IAAI,GAAG,IAAI,wBAAwB,KAAK;AAC/C,gBAAQ,KAAK;AAAA,UACX,KAAK,IAAI,OAAO;AAAA,UAChB,KAAK,IAAI,OAAO;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAEO,SAAS,iBACd,SACA,YACA,WACA,eACA,gBACoB;AAEpB,QAAM,SAAS,gBAAgB;AAC/B,QAAM,SAAS,iBAAiB;AAQhC,QAAM,aAA0B,CAAC;AAEjC,aAAW,UAAU,SAAS;AAC5B,UAAM,WAAW,OAAO,KAAK,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS,MAAM,EAAE,KAAK,EAAE,SAAS,IAAI,MAAM,QAAQ,CAAC;AACjH,UAAM,UAAU,OAAO,KAAK,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ,MAAM,EAAE,KAAK,EAAE,SAAS,IAAI,MAAM,OAAO,CAAC;AAC9G,UAAM,SAAS,OAAO,KAAK,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,SAAS,OAAO,MAAM,EAAE,KAAK,EAAE,SAAS,IAAI,MAAM,MAAM,CAAC;AAE3G,QAAI,CAAC,YAAY,CAAC,QAAS;AAE3B,UAAM,SAAS,QAAQ,QAAQ;AAC/B,UAAM,SAAS,QAAQ,OAAO;AAC9B,UAAM,MAAM,SAAS,QAAQ,MAAM,IAAI;AACvC,UAAM,UAAU,gBAAgB,QAAQ,SAAS;AAEjD,UAAM,IAAI,QAAQ;AAElB,aAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,YAAM,QAAQ,OAAO,CAAC;AACtB,UAAI,QAAQ,WAAY;AAExB,YAAM,SAAS,QAAQ,CAAC;AAGxB,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,CAAC,IAAK;AACxC,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,IAAI,CAAC,IAAK;AAC5C,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,IAAI,CAAC,IAAK;AAC5C,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,IAAI,CAAC,IAAK;AAE5C,YAAM,OAAoB;AAAA,QACxB,GAAG,KAAK;AAAA,QACR,GAAG,KAAK;AAAA,QACR,IAAI,KAAK,MAAM;AAAA,QACf,IAAI,KAAK,MAAM;AAAA,MACjB;AAEA,UAAI;AACJ,UAAI,KAAK;AACP,cAAM,MAAkB,CAAC;AACzB,iBAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,cAAI,KAAK;AAAA,YACP,IAAI,OAAO,KAAK,IAAI,IAAI,KAAK,IAAI,CAAC,IAAK,UAAU;AAAA,YACjD,IAAI,OAAO,KAAK,IAAI,IAAI,KAAK,IAAI,IAAI,CAAC,IAAK,UAAU;AAAA,UACvD,CAAC;AAAA,QACH;AACA,oBAAY;AAAA,MACd;AAEA,iBAAW,KAAK,EAAE,MAAM,OAAO,UAAU,CAAC;AAAA,IAC5C;AAAA,EACF;AAEA,MAAI,WAAW,WAAW,EAAG,QAAO,CAAC;AAErC,QAAM,cAAc,IAAI,YAAY,IAAI;AAExC,SAAO,YAAY,IAAI,CAAC,QAAQ;AAC9B,UAAM,EAAE,MAAM,OAAO,UAAU,IAAI,WAAW,GAAG;AACjD,WAAO;AAAA,MACL,OAAO;AAAA,MACP,eAAe;AAAA,MACf;AAAA,MACA;AAAA,MACA,GAAI,YAAY,EAAE,UAAU,IAAI,CAAC;AAAA,IACnC;AAAA,EACF,CAAC;AACH;;;ACzFA,IAAM,aAA8B,EAAE,IAAI,QAAQ,MAAM,OAAO;AAC/D,IAAMA,eAA0C,CAAC,UAAU;AAC3D,IAAM,iBAAqC,EAAE,SAAS,CAAC,GAAG,kBAAkB,KAAK;AAEjF,IAAqB,qBAArB,MAAqF;AAAA,EAC1E,KAAK;AAAA,EACL,OAAO;AAAA,EACP,eAAe,CAAC,QAAQ;AAAA,EACxB,gBAAgB,CAAC,MAAM;AAAA,EACvB,eAAe;AAAA,EACf,WAA0B;AAAA,IACjC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,IACb,aAAa;AAAA,IACb,MAAM;AAAA,IACN,cAAc,CAAC,QAAQ;AAAA,IACvB,eAAe,CAAC,MAAM;AAAA,IACtB,sBAAsB;AAAA,IACtB,kBAAkB;AAAA,IAClB,eAAe;AAAA,MACb,SAAS;AAAA,MACT,SAAS;AAAA,MACT,SAAS;AAAA,MACT,YAAY;AAAA,IACd;AAAA,EACF;AAAA,EAEQ;AAAA,EACA;AAAA,EACA,aAAa;AAAA,EAErB,MAAM,WAAW,KAAkC;AACjD,UAAM,MAAM,IAAI;AAChB,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,SAAK,aAAc,IAAI,YAAY,KAA4B;AAE/D,UAAM,QAAQ,sBAAsB,KAAK,CAAC,MAAM,EAAE,OAAO,OAAO;AAChE,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,wCAAwC,OAAO,GAAG;AAAA,IACpE;AACA,SAAK,aAAa;AAElB,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ,WAAW,IAAI,cAAc;AAAA,IAC/B,CAAC;AACD,SAAK,SAAS,SAAS;AAAA,EACzB;AAAA,EAEA,MAAM,KAAK,OAA0C;AACnD,UAAM,QAAQ,KAAK,IAAI;AACvB,UAAM,EAAE,OAAO,QAAQ,QAAQ,OAAO,IAAI,KAAK,WAAW;AAC1D,UAAM,aAAa,KAAK,IAAI,QAAQ,MAAM;AAG1C,UAAM,aAAa,MAAM,WAAW,MAAM,MAAM,MAAM,MAAM,GAAG;AAG/D,UAAM,KAAK,MAAM,UAAU,YAAY,UAAU;AAGjD,UAAM,kBAAkB,KAAK;AAI7B,QAAI;AACJ,QAAI,OAAO,gBAAgB,mBAAmB,YAAY;AACxD,gBAAU,MAAM,gBAAgB,eAAe,GAAG,MAAM,CAAC,GAAG,GAAG,YAAY,UAAU,CAAC;AAAA,IACxF,OAAO;AAEL,YAAM,SAAS,MAAM,KAAK,OAAO,IAAI,GAAG,MAAM,CAAC,GAAG,GAAG,YAAY,UAAU,CAAC;AAC5E,gBAAU,EAAE,SAAS,OAAO;AAAA,IAC9B;AAEA,UAAM,QAAQ;AAAA,MACZ;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,GAAG;AAAA,IACL;AAEA,WAAO;AAAA,MACL;AAAA,MACA,aAAa,KAAK,IAAI,IAAI;AAAA,MAC1B,SAAS,KAAK,WAAW;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,WAA0B;AAC9B,UAAM,KAAK,QAAQ,QAAQ;AAAA,EAC7B;AAAA,EAEA,kBAAkC;AAChC,WAAO;AAAA,MACL,UAAU;AAAA,QACR;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,SAAS,CAAC,GAAG,qBAAqB;AAAA,cAClC,aAAa;AAAA,cACb,iBAAiB;AAAA,cACjB,eAAe,CAAC,QAAQ,UAAU,UAAU;AAAA,cAC5C,kBAAkB,CAAC,aAAa,UAAU,cAAc;AAAA,cACxD,kBAAkB;AAAA,YACpB;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,KAAK;AAAA,cACL,KAAK;AAAA,cACL,MAAM;AAAA,cACN,SAAS;AAAA,YACX;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,cAAkC;AAChC,WAAO;AAAA,EACT;AAAA,EAEA,kBAAuC;AACrC,WAAO,CAAC,GAAG,qBAAqB;AAAA,EAClC;AAAA,EAEA,qBAAuC;AACrC,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,kBAA8C;AAC5C,WAAOA;AAAA,EACT;AAAA,EAEA,MAAM,QAA8B;AAClC,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS,KAAK,QAAQ,WAAW;AAAA,MACjC,QAAQ,KAAK,QAAQ,UAAU;AAAA,MAC/B,cAAc,CAAC,MAAM;AAAA,IACvB;AAAA,EACF;AACF;","names":["FACE_LABELS"]}
@@ -0,0 +1,255 @@
1
+ import {
2
+ cropRegion,
3
+ resizeAndNormalize
4
+ } from "./chunk-6OR5TE7A.mjs";
5
+ import {
6
+ resolveEngine
7
+ } from "./chunk-LPI42WL6.mjs";
8
+
9
+ // src/catalogs/plate-recognition-models.ts
10
+ import { hfModelUrl } from "@camstack/types";
11
+ var HF_REPO = "camstack/camstack-models";
12
+ var PLATE_TEXT_LABELS = [
13
+ { id: "text", name: "Plate Text" }
14
+ ];
15
+ var PLATE_RECOGNITION_MODELS = [
16
+ {
17
+ id: "paddleocr-latin",
18
+ name: "PaddleOCR Latin",
19
+ description: "PaddleOCR recognition model for Latin-script license plates",
20
+ inputSize: { width: 320, height: 48 },
21
+ labels: PLATE_TEXT_LABELS,
22
+ formats: {
23
+ onnx: {
24
+ url: hfModelUrl(HF_REPO, "plateRecognition/paddleocr/onnx/camstack-paddleocr-latin-rec.onnx"),
25
+ sizeMB: 7.5
26
+ },
27
+ openvino: {
28
+ url: hfModelUrl(HF_REPO, "plateRecognition/paddleocr/openvino/camstack-paddleocr-latin.xml"),
29
+ sizeMB: 4
30
+ }
31
+ }
32
+ },
33
+ {
34
+ id: "paddleocr-en",
35
+ name: "PaddleOCR English",
36
+ description: "PaddleOCR recognition model optimized for English license plates",
37
+ inputSize: { width: 320, height: 48 },
38
+ labels: PLATE_TEXT_LABELS,
39
+ formats: {
40
+ onnx: {
41
+ url: hfModelUrl(HF_REPO, "plateRecognition/paddleocr/onnx/camstack-paddleocr-en-rec.onnx"),
42
+ sizeMB: 7.5
43
+ },
44
+ openvino: {
45
+ url: hfModelUrl(HF_REPO, "plateRecognition/paddleocr/openvino/camstack-paddleocr-en.xml"),
46
+ sizeMB: 4
47
+ }
48
+ }
49
+ }
50
+ ];
51
+
52
+ // src/shared/postprocess/paddleocr.ts
53
+ function ctcDecode(output, seqLen, numChars, charset) {
54
+ let totalLogScore = 0;
55
+ const rawIndices = [];
56
+ for (let t = 0; t < seqLen; t++) {
57
+ const offset = t * numChars;
58
+ let bestIdx = 0;
59
+ let bestVal = output[offset];
60
+ for (let c = 1; c < numChars; c++) {
61
+ const val = output[offset + c];
62
+ if (val > bestVal) {
63
+ bestVal = val;
64
+ bestIdx = c;
65
+ }
66
+ }
67
+ rawIndices.push(bestIdx);
68
+ totalLogScore += bestVal;
69
+ }
70
+ const collapsed = [];
71
+ for (let i = 0; i < rawIndices.length; i++) {
72
+ const cur = rawIndices[i];
73
+ if (i === 0 || cur !== rawIndices[i - 1]) {
74
+ collapsed.push(cur);
75
+ }
76
+ }
77
+ const filtered = collapsed.filter((idx) => idx !== 0);
78
+ const text = filtered.map((idx) => charset[idx] ?? "").join("");
79
+ const confidence = seqLen > 0 ? totalLogScore / seqLen : 0;
80
+ return { text, confidence };
81
+ }
82
+
83
+ // src/addons/plate-recognition/index.ts
84
+ import * as fs from "fs";
85
+ import * as path from "path";
86
+ var PLATE_TEXT_LABEL = { id: "plate-text", name: "Plate Text" };
87
+ var PLATE_TEXT_LABELS2 = [PLATE_TEXT_LABEL];
88
+ var PLATE_REC_CLASS_MAP = { mapping: {}, preserveOriginal: true };
89
+ function loadCharset(modelsDir, modelId) {
90
+ const dictNames = [
91
+ `camstack-${modelId}-dict.txt`,
92
+ `camstack-paddleocr-latin-dict.txt`,
93
+ `camstack-paddleocr-en-dict.txt`
94
+ ];
95
+ for (const name of dictNames) {
96
+ const dictPath = path.join(modelsDir, name);
97
+ if (fs.existsSync(dictPath)) {
98
+ const lines = fs.readFileSync(dictPath, "utf-8").split("\n").filter((l) => l.length > 0);
99
+ return ["", ...lines, " "];
100
+ }
101
+ }
102
+ throw new Error(`PlateRecognitionAddon: dict.txt not found in ${modelsDir}`);
103
+ }
104
+ var CHARSET = [];
105
+ var REQUIRED_STEPS = [
106
+ { slot: "cropper", outputClasses: ["plate"], description: "Requires a plate detector" }
107
+ ];
108
+ var PlateRecognitionAddon = class {
109
+ id = "plate-recognition";
110
+ slot = "classifier";
111
+ inputClasses = ["plate"];
112
+ outputClasses = ["plate-text:*"];
113
+ slotPriority = 0;
114
+ requiredSteps = REQUIRED_STEPS;
115
+ manifest = {
116
+ id: "plate-recognition",
117
+ name: "License Plate Recognition (OCR)",
118
+ version: "0.1.0",
119
+ description: "PaddleOCR-based license plate text recognition",
120
+ packageName: "@camstack/addon-vision",
121
+ slot: "classifier",
122
+ inputClasses: ["plate"],
123
+ outputClasses: ["plate-text:*"],
124
+ requiredSteps: REQUIRED_STEPS,
125
+ supportsCustomModels: false,
126
+ mayRequirePython: false,
127
+ defaultConfig: {
128
+ modelId: "paddleocr-latin",
129
+ runtime: "auto",
130
+ backend: "cpu",
131
+ minConfidence: 0.5
132
+ }
133
+ };
134
+ engine;
135
+ modelEntry;
136
+ minConfidence = 0.5;
137
+ async initialize(ctx) {
138
+ const cfg = ctx.addonConfig;
139
+ const modelId = cfg["modelId"] ?? "paddleocr-latin";
140
+ const runtime = cfg["runtime"] ?? "auto";
141
+ const backend = cfg["backend"] ?? "cpu";
142
+ this.minConfidence = cfg["minConfidence"] ?? 0.5;
143
+ const entry = PLATE_RECOGNITION_MODELS.find((m) => m.id === modelId);
144
+ if (!entry) {
145
+ throw new Error(`PlateRecognitionAddon: unknown modelId "${modelId}"`);
146
+ }
147
+ this.modelEntry = entry;
148
+ CHARSET = loadCharset(ctx.locationPaths.models, modelId);
149
+ const resolved = await resolveEngine({
150
+ runtime,
151
+ backend,
152
+ modelEntry: entry,
153
+ modelsDir: ctx.locationPaths.models
154
+ });
155
+ this.engine = resolved.engine;
156
+ }
157
+ async classify(input) {
158
+ const start = Date.now();
159
+ const { width: inputW, height: inputH } = this.modelEntry.inputSize;
160
+ const plateCrop = await cropRegion(input.frame.data, input.roi);
161
+ const normalized = await resizeAndNormalize(plateCrop, inputW, inputH, "zero-one", "nchw");
162
+ const output = await this.engine.run(normalized, [1, 3, inputH, inputW]);
163
+ const numChars = CHARSET.length;
164
+ const seqLen = output.length / numChars;
165
+ const { text, confidence } = ctcDecode(output, seqLen, numChars, CHARSET);
166
+ if (confidence < this.minConfidence || text.trim().length === 0) {
167
+ return {
168
+ classifications: [],
169
+ inferenceMs: Date.now() - start,
170
+ modelId: this.modelEntry.id
171
+ };
172
+ }
173
+ return {
174
+ classifications: [
175
+ {
176
+ class: "plate-text",
177
+ score: confidence,
178
+ text
179
+ }
180
+ ],
181
+ inferenceMs: Date.now() - start,
182
+ modelId: this.modelEntry.id
183
+ };
184
+ }
185
+ async shutdown() {
186
+ await this.engine?.dispose();
187
+ }
188
+ getConfigSchema() {
189
+ return {
190
+ sections: [
191
+ {
192
+ id: "model",
193
+ title: "Model",
194
+ columns: 1,
195
+ fields: [
196
+ {
197
+ key: "modelId",
198
+ label: "Model",
199
+ type: "model-selector",
200
+ catalog: [...PLATE_RECOGNITION_MODELS],
201
+ allowCustom: false,
202
+ allowConversion: false,
203
+ acceptFormats: ["onnx", "openvino"],
204
+ requiredMetadata: ["inputSize", "labels", "outputFormat"],
205
+ outputFormatHint: "ocr"
206
+ }
207
+ ]
208
+ },
209
+ {
210
+ id: "thresholds",
211
+ title: "Recognition Settings",
212
+ columns: 1,
213
+ fields: [
214
+ {
215
+ key: "minConfidence",
216
+ label: "Minimum Confidence",
217
+ type: "slider",
218
+ min: 0.1,
219
+ max: 1,
220
+ step: 0.05,
221
+ default: 0.5
222
+ }
223
+ ]
224
+ }
225
+ ]
226
+ };
227
+ }
228
+ getClassMap() {
229
+ return PLATE_REC_CLASS_MAP;
230
+ }
231
+ getModelCatalog() {
232
+ return [...PLATE_RECOGNITION_MODELS];
233
+ }
234
+ getAvailableModels() {
235
+ return [];
236
+ }
237
+ getActiveLabels() {
238
+ return PLATE_TEXT_LABELS2;
239
+ }
240
+ async probe() {
241
+ return {
242
+ available: true,
243
+ runtime: this.engine?.runtime ?? "onnx",
244
+ device: this.engine?.device ?? "cpu",
245
+ capabilities: ["fp32"]
246
+ };
247
+ }
248
+ };
249
+
250
+ export {
251
+ ctcDecode,
252
+ PLATE_RECOGNITION_MODELS,
253
+ PlateRecognitionAddon
254
+ };
255
+ //# sourceMappingURL=chunk-PDSHDDPV.mjs.map