@camstack/addon-vision 0.1.1 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. package/dist/addons/animal-classifier/index.js +999 -823
  2. package/dist/addons/animal-classifier/index.js.map +1 -1
  3. package/dist/addons/animal-classifier/index.mjs +242 -7
  4. package/dist/addons/animal-classifier/index.mjs.map +1 -1
  5. package/dist/addons/audio-classification/index.js +501 -379
  6. package/dist/addons/audio-classification/index.js.map +1 -1
  7. package/dist/addons/audio-classification/index.mjs +224 -4
  8. package/dist/addons/audio-classification/index.mjs.map +1 -1
  9. package/dist/addons/bird-global-classifier/index.js +1002 -826
  10. package/dist/addons/bird-global-classifier/index.js.map +1 -1
  11. package/dist/addons/bird-global-classifier/index.mjs +248 -7
  12. package/dist/addons/bird-global-classifier/index.mjs.map +1 -1
  13. package/dist/addons/bird-nabirds-classifier/index.js +1002 -826
  14. package/dist/addons/bird-nabirds-classifier/index.js.map +1 -1
  15. package/dist/addons/bird-nabirds-classifier/index.mjs +289 -7
  16. package/dist/addons/bird-nabirds-classifier/index.mjs.map +1 -1
  17. package/dist/addons/face-detection/index.js +1196 -935
  18. package/dist/addons/face-detection/index.js.map +1 -1
  19. package/dist/addons/face-detection/index.mjs +227 -7
  20. package/dist/addons/face-detection/index.mjs.map +1 -1
  21. package/dist/addons/face-recognition/index.js +1003 -808
  22. package/dist/addons/face-recognition/index.js.map +1 -1
  23. package/dist/addons/face-recognition/index.mjs +197 -6
  24. package/dist/addons/face-recognition/index.mjs.map +1 -1
  25. package/dist/addons/motion-detection/index.js +214 -111
  26. package/dist/addons/motion-detection/index.js.map +1 -1
  27. package/dist/addons/motion-detection/index.mjs +12 -9
  28. package/dist/addons/motion-detection/index.mjs.map +1 -1
  29. package/dist/addons/object-detection/index.js +1287 -1083
  30. package/dist/addons/object-detection/index.js.map +1 -1
  31. package/dist/addons/object-detection/index.mjs +373 -7
  32. package/dist/addons/object-detection/index.mjs.map +1 -1
  33. package/dist/addons/plate-detection/index.js +1075 -869
  34. package/dist/addons/plate-detection/index.js.map +1 -1
  35. package/dist/addons/plate-detection/index.mjs +230 -7
  36. package/dist/addons/plate-detection/index.mjs.map +1 -1
  37. package/dist/addons/plate-recognition/index.js +684 -506
  38. package/dist/addons/plate-recognition/index.js.map +1 -1
  39. package/dist/addons/plate-recognition/index.mjs +244 -5
  40. package/dist/addons/plate-recognition/index.mjs.map +1 -1
  41. package/dist/addons/segmentation-refiner/index.js +967 -791
  42. package/dist/addons/segmentation-refiner/index.js.map +1 -1
  43. package/dist/addons/segmentation-refiner/index.mjs +21 -17
  44. package/dist/addons/segmentation-refiner/index.mjs.map +1 -1
  45. package/dist/addons/vehicle-classifier/index.js +581 -411
  46. package/dist/addons/vehicle-classifier/index.js.map +1 -1
  47. package/dist/addons/vehicle-classifier/index.mjs +20 -16
  48. package/dist/addons/vehicle-classifier/index.mjs.map +1 -1
  49. package/dist/chunk-2YMA6QOV.mjs +193 -0
  50. package/dist/chunk-2YMA6QOV.mjs.map +1 -0
  51. package/dist/chunk-3IIFBJCD.mjs +45 -0
  52. package/dist/chunk-BS4DKYGN.mjs +48 -0
  53. package/dist/{chunk-7DYHXUPZ.mjs.map → chunk-BS4DKYGN.mjs.map} +1 -1
  54. package/dist/chunk-DE7I3VHO.mjs +106 -0
  55. package/dist/{chunk-KUO2BVFY.mjs.map → chunk-DE7I3VHO.mjs.map} +1 -1
  56. package/dist/chunk-F6D2OZ36.mjs +89 -0
  57. package/dist/chunk-F6D2OZ36.mjs.map +1 -0
  58. package/dist/chunk-GAOIFQDX.mjs +59 -0
  59. package/dist/chunk-GAOIFQDX.mjs.map +1 -0
  60. package/dist/chunk-HUIX2XVR.mjs +159 -0
  61. package/dist/chunk-HUIX2XVR.mjs.map +1 -0
  62. package/dist/chunk-K36R6HWY.mjs +51 -0
  63. package/dist/{chunk-XZ6ZMXXU.mjs.map → chunk-K36R6HWY.mjs.map} +1 -1
  64. package/dist/chunk-MBTAI3WE.mjs +78 -0
  65. package/dist/chunk-MBTAI3WE.mjs.map +1 -0
  66. package/dist/chunk-MGT6RUVX.mjs +423 -0
  67. package/dist/{chunk-BP7H4NFS.mjs.map → chunk-MGT6RUVX.mjs.map} +1 -1
  68. package/dist/chunk-PIFS7AIT.mjs +446 -0
  69. package/dist/chunk-PIFS7AIT.mjs.map +1 -0
  70. package/dist/chunk-WG66JYYW.mjs +116 -0
  71. package/dist/{chunk-22BHCDT5.mjs.map → chunk-WG66JYYW.mjs.map} +1 -1
  72. package/dist/chunk-XD7WGXHZ.mjs +82 -0
  73. package/dist/{chunk-DUN6XU3N.mjs.map → chunk-XD7WGXHZ.mjs.map} +1 -1
  74. package/dist/chunk-YYDM6V2F.mjs +113 -0
  75. package/dist/{chunk-BR2FPGOX.mjs.map → chunk-YYDM6V2F.mjs.map} +1 -1
  76. package/dist/chunk-ZK7P3TZN.mjs +286 -0
  77. package/dist/chunk-ZK7P3TZN.mjs.map +1 -0
  78. package/dist/index.js +4443 -3925
  79. package/dist/index.js.map +1 -1
  80. package/dist/index.mjs +2698 -250
  81. package/dist/index.mjs.map +1 -1
  82. package/package.json +2 -3
  83. package/dist/chunk-22BHCDT5.mjs +0 -101
  84. package/dist/chunk-6DJZZR64.mjs +0 -336
  85. package/dist/chunk-6DJZZR64.mjs.map +0 -1
  86. package/dist/chunk-7DYHXUPZ.mjs +0 -36
  87. package/dist/chunk-BJTO5JO5.mjs +0 -11
  88. package/dist/chunk-BP7H4NFS.mjs +0 -412
  89. package/dist/chunk-BR2FPGOX.mjs +0 -98
  90. package/dist/chunk-DNQNGDR4.mjs +0 -256
  91. package/dist/chunk-DNQNGDR4.mjs.map +0 -1
  92. package/dist/chunk-DUN6XU3N.mjs +0 -72
  93. package/dist/chunk-EPNWLSCG.mjs +0 -387
  94. package/dist/chunk-EPNWLSCG.mjs.map +0 -1
  95. package/dist/chunk-G32RCIUI.mjs +0 -645
  96. package/dist/chunk-G32RCIUI.mjs.map +0 -1
  97. package/dist/chunk-GR65KM6X.mjs +0 -289
  98. package/dist/chunk-GR65KM6X.mjs.map +0 -1
  99. package/dist/chunk-H7LMBTS5.mjs +0 -276
  100. package/dist/chunk-H7LMBTS5.mjs.map +0 -1
  101. package/dist/chunk-IK4XIQPC.mjs +0 -242
  102. package/dist/chunk-IK4XIQPC.mjs.map +0 -1
  103. package/dist/chunk-J6VNIIYX.mjs +0 -269
  104. package/dist/chunk-J6VNIIYX.mjs.map +0 -1
  105. package/dist/chunk-KUO2BVFY.mjs +0 -90
  106. package/dist/chunk-ML2JX43J.mjs +0 -248
  107. package/dist/chunk-ML2JX43J.mjs.map +0 -1
  108. package/dist/chunk-WUMV524J.mjs +0 -379
  109. package/dist/chunk-WUMV524J.mjs.map +0 -1
  110. package/dist/chunk-XZ6ZMXXU.mjs +0 -39
  111. /package/dist/{chunk-BJTO5JO5.mjs.map → chunk-3IIFBJCD.mjs.map} +0 -0
@@ -1,387 +0,0 @@
1
- import {
2
- nms
3
- } from "./chunk-KUO2BVFY.mjs";
4
- import {
5
- MLPACKAGE_FILES
6
- } from "./chunk-BP7H4NFS.mjs";
7
- import {
8
- cropRegion,
9
- letterbox
10
- } from "./chunk-22BHCDT5.mjs";
11
- import {
12
- resolveEngine
13
- } from "./chunk-6DJZZR64.mjs";
14
-
15
- // src/catalogs/face-detection-models.ts
16
- import { hfModelUrl } from "@camstack/types";
17
- var HF_REPO = "camstack/camstack-models";
18
- var FACE_LABELS = [
19
- { id: "face", name: "Face" }
20
- ];
21
- var FACE_DETECTION_MODELS = [
22
- {
23
- id: "scrfd-500m",
24
- name: "SCRFD 500M",
25
- description: "SCRFD 500M \u2014 ultra-lightweight face detector",
26
- inputSize: { width: 640, height: 640 },
27
- labels: FACE_LABELS,
28
- formats: {
29
- onnx: {
30
- url: hfModelUrl(HF_REPO, "faceDetection/scrfd/onnx/camstack-scrfd-500m.onnx"),
31
- sizeMB: 2.2
32
- },
33
- coreml: {
34
- url: hfModelUrl(HF_REPO, "faceDetection/scrfd/coreml/camstack-scrfd-500m.mlpackage"),
35
- sizeMB: 1.2,
36
- isDirectory: true,
37
- files: MLPACKAGE_FILES,
38
- runtimes: ["python"]
39
- },
40
- openvino: {
41
- url: hfModelUrl(HF_REPO, "faceDetection/scrfd/openvino/camstack-scrfd-500m.xml"),
42
- sizeMB: 1.3,
43
- runtimes: ["python"]
44
- }
45
- }
46
- },
47
- {
48
- id: "scrfd-2.5g",
49
- name: "SCRFD 2.5G",
50
- description: "SCRFD 2.5G \u2014 balanced face detection model",
51
- inputSize: { width: 640, height: 640 },
52
- labels: FACE_LABELS,
53
- formats: {
54
- onnx: {
55
- url: hfModelUrl(HF_REPO, "faceDetection/scrfd/onnx/camstack-scrfd-2.5g.onnx"),
56
- sizeMB: 3.1
57
- },
58
- coreml: {
59
- url: hfModelUrl(HF_REPO, "faceDetection/scrfd/coreml/camstack-scrfd-2.5g.mlpackage"),
60
- sizeMB: 1.7,
61
- isDirectory: true,
62
- files: MLPACKAGE_FILES,
63
- runtimes: ["python"]
64
- },
65
- openvino: {
66
- url: hfModelUrl(HF_REPO, "faceDetection/scrfd/openvino/camstack-scrfd-2.5g.xml"),
67
- sizeMB: 1.8,
68
- runtimes: ["python"]
69
- }
70
- }
71
- },
72
- {
73
- id: "scrfd-10g",
74
- name: "SCRFD 10G",
75
- description: "SCRFD 10G \u2014 high-accuracy face detector",
76
- inputSize: { width: 640, height: 640 },
77
- labels: FACE_LABELS,
78
- formats: {
79
- onnx: {
80
- url: hfModelUrl(HF_REPO, "faceDetection/scrfd/onnx/camstack-scrfd-10g.onnx"),
81
- sizeMB: 16
82
- },
83
- coreml: {
84
- url: hfModelUrl(HF_REPO, "faceDetection/scrfd/coreml/camstack-scrfd-10g.mlpackage"),
85
- sizeMB: 8.2,
86
- isDirectory: true,
87
- files: MLPACKAGE_FILES,
88
- runtimes: ["python"]
89
- },
90
- openvino: {
91
- url: hfModelUrl(HF_REPO, "faceDetection/scrfd/openvino/camstack-scrfd-10g.xml"),
92
- sizeMB: 8.3,
93
- runtimes: ["python"]
94
- }
95
- }
96
- }
97
- ];
98
-
99
- // src/shared/postprocess/scrfd.ts
100
- var STRIDES = [8, 16, 32];
101
- var NUM_ANCHORS_PER_STRIDE = 2;
102
- function generateAnchors(stride, inputSize) {
103
- const featureSize = Math.ceil(inputSize / stride);
104
- const anchors = [];
105
- for (let y = 0; y < featureSize; y++) {
106
- for (let x = 0; x < featureSize; x++) {
107
- for (let k = 0; k < NUM_ANCHORS_PER_STRIDE; k++) {
108
- anchors.push({
109
- cx: (x + 0.5) * stride,
110
- cy: (y + 0.5) * stride
111
- });
112
- }
113
- }
114
- }
115
- return anchors;
116
- }
117
- function scrfdPostprocess(outputs, confidence, inputSize, originalWidth, originalHeight) {
118
- const scaleX = originalWidth / inputSize;
119
- const scaleY = originalHeight / inputSize;
120
- const candidates = [];
121
- for (const stride of STRIDES) {
122
- const scoreKey = Object.keys(outputs).find((k) => k.includes(`score_${stride}`) || k.includes(`_${stride}_score`));
123
- const bboxKey = Object.keys(outputs).find((k) => k.includes(`bbox_${stride}`) || k.includes(`_${stride}_bbox`));
124
- const kpsKey = Object.keys(outputs).find((k) => k.includes(`kps_${stride}`) || k.includes(`_${stride}_kps`));
125
- if (!scoreKey || !bboxKey) continue;
126
- const scores = outputs[scoreKey];
127
- const bboxes = outputs[bboxKey];
128
- const kps = kpsKey ? outputs[kpsKey] : void 0;
129
- const anchors = generateAnchors(stride, inputSize);
130
- const n = anchors.length;
131
- for (let i = 0; i < n; i++) {
132
- const score = scores[i];
133
- if (score < confidence) continue;
134
- const anchor = anchors[i];
135
- const x1 = anchor.cx - bboxes[i * 4] * stride;
136
- const y1 = anchor.cy - bboxes[i * 4 + 1] * stride;
137
- const x2 = anchor.cx + bboxes[i * 4 + 2] * stride;
138
- const y2 = anchor.cy + bboxes[i * 4 + 3] * stride;
139
- const bbox = {
140
- x: x1 * scaleX,
141
- y: y1 * scaleY,
142
- w: (x2 - x1) * scaleX,
143
- h: (y2 - y1) * scaleY
144
- };
145
- let landmarks;
146
- if (kps) {
147
- const pts = [];
148
- for (let p = 0; p < 5; p++) {
149
- pts.push({
150
- x: (anchor.cx + kps[i * 10 + p * 2] * stride) * scaleX,
151
- y: (anchor.cy + kps[i * 10 + p * 2 + 1] * stride) * scaleY
152
- });
153
- }
154
- landmarks = pts;
155
- }
156
- candidates.push({ bbox, score, landmarks });
157
- }
158
- }
159
- if (candidates.length === 0) return [];
160
- const keptIndices = nms(candidates, 0.45);
161
- return keptIndices.map((idx) => {
162
- const { bbox, score, landmarks } = candidates[idx];
163
- return {
164
- class: "face",
165
- originalClass: "face",
166
- score,
167
- bbox,
168
- ...landmarks ? { landmarks } : {}
169
- };
170
- });
171
- }
172
-
173
- // src/addons/face-detection/index.ts
174
- var FACE_LABEL = { id: "face", name: "Face" };
175
- var FACE_LABELS2 = [FACE_LABEL];
176
- var FACE_CLASS_MAP = { mapping: {}, preserveOriginal: true };
177
- var RAM_ESTIMATES = {
178
- "scrfd-500m": 50,
179
- "scrfd-2.5g": 80,
180
- "scrfd-10g": 200
181
- };
182
- var ACCURACY_SCORES = {
183
- "scrfd-500m": 70,
184
- "scrfd-2.5g": 82,
185
- "scrfd-10g": 92
186
- };
187
- var FaceDetectionAddon = class {
188
- id = "face-detection";
189
- slot = "cropper";
190
- inputClasses = ["person"];
191
- outputClasses = ["face"];
192
- slotPriority = 0;
193
- manifest = {
194
- id: "face-detection",
195
- name: "Face Detection",
196
- version: "0.1.0",
197
- description: "SCRFD-based face detector \u2014 crops face regions from person detections",
198
- slot: "cropper",
199
- inputClasses: ["person"],
200
- outputClasses: ["face"],
201
- supportsCustomModels: false,
202
- mayRequirePython: false,
203
- defaultConfig: {
204
- modelId: "scrfd-500m",
205
- runtime: "node",
206
- backend: "cpu",
207
- confidence: 0.5
208
- }
209
- };
210
- engine = null;
211
- modelEntry;
212
- confidence = 0.5;
213
- resolvedConfig = null;
214
- ctx = null;
215
- getModelRequirements() {
216
- return FACE_DETECTION_MODELS.map((m) => ({
217
- modelId: m.id,
218
- name: m.name,
219
- minRAM_MB: RAM_ESTIMATES[m.id] ?? 50,
220
- accuracyScore: ACCURACY_SCORES[m.id] ?? 70,
221
- formats: Object.keys(m.formats)
222
- }));
223
- }
224
- configure(config) {
225
- this.resolvedConfig = config;
226
- }
227
- async initialize(ctx) {
228
- this.ctx = ctx;
229
- const cfg = ctx.addonConfig;
230
- const modelId = cfg["modelId"] ?? this.resolvedConfig?.modelId ?? "scrfd-500m";
231
- this.confidence = cfg["confidence"] ?? 0.5;
232
- const entry = FACE_DETECTION_MODELS.find((m) => m.id === modelId);
233
- if (!entry) {
234
- throw new Error(`FaceDetectionAddon: unknown modelId "${modelId}"`);
235
- }
236
- this.modelEntry = entry;
237
- }
238
- async crop(input) {
239
- if (!this.engine) await this.ensureEngine();
240
- const start = Date.now();
241
- const { width: inputW, height: inputH } = this.modelEntry.inputSize;
242
- const targetSize = Math.max(inputW, inputH);
243
- const personCrop = await cropRegion(input.frame.data, input.roi);
244
- const lb = await letterbox(personCrop, targetSize);
245
- const engineWithMulti = this.engine;
246
- let outputs;
247
- if (typeof engineWithMulti.runMultiOutput === "function") {
248
- outputs = await engineWithMulti.runMultiOutput(lb.data, [1, 3, targetSize, targetSize]);
249
- } else {
250
- const single = await this.engine.run(lb.data, [1, 3, targetSize, targetSize]);
251
- outputs = { output0: single };
252
- }
253
- const crops = scrfdPostprocess(
254
- outputs,
255
- this.confidence,
256
- targetSize,
257
- lb.originalWidth,
258
- lb.originalHeight
259
- );
260
- return {
261
- crops,
262
- inferenceMs: Date.now() - start,
263
- modelId: this.modelEntry.id
264
- };
265
- }
266
- async ensureEngine() {
267
- const config = this.resolvedConfig;
268
- const modelId = config?.modelId ?? this.modelEntry.id;
269
- const runtime = config?.runtime === "python" ? "coreml" : config?.runtime === "node" ? "onnx" : "auto";
270
- const backend = config?.backend ?? "cpu";
271
- const format = config?.format ?? "onnx";
272
- const entry = FACE_DETECTION_MODELS.find((m) => m.id === modelId) ?? this.modelEntry;
273
- this.modelEntry = entry;
274
- const modelsDir = this.ctx.models?.getModelsDir() ?? this.ctx.locationPaths.models;
275
- if (this.ctx.models) {
276
- await this.ctx.models.ensure(modelId, format);
277
- }
278
- const resolved = await resolveEngine({
279
- runtime,
280
- backend,
281
- modelEntry: entry,
282
- modelsDir,
283
- models: this.ctx.models
284
- });
285
- this.engine = resolved.engine;
286
- }
287
- async shutdown() {
288
- await this.engine?.dispose();
289
- }
290
- getConfigSchema() {
291
- return {
292
- sections: [
293
- {
294
- id: "model",
295
- title: "Model",
296
- columns: 1,
297
- fields: [
298
- {
299
- key: "modelId",
300
- label: "Model",
301
- type: "model-selector",
302
- catalog: [...FACE_DETECTION_MODELS],
303
- allowCustom: false,
304
- allowConversion: false,
305
- acceptFormats: ["onnx", "coreml", "openvino"],
306
- requiredMetadata: ["inputSize", "labels", "outputFormat"],
307
- outputFormatHint: "ssd"
308
- }
309
- ]
310
- },
311
- {
312
- id: "runtime",
313
- title: "Runtime",
314
- columns: 2,
315
- fields: [
316
- {
317
- key: "runtime",
318
- label: "Runtime",
319
- type: "select",
320
- options: [
321
- { value: "auto", label: "Auto" },
322
- { value: "onnx", label: "ONNX Runtime" },
323
- { value: "coreml", label: "CoreML (Apple)" },
324
- { value: "openvino", label: "OpenVINO (Intel)" }
325
- ]
326
- },
327
- {
328
- key: "backend",
329
- label: "Backend",
330
- type: "select",
331
- showWhen: { field: "runtime", equals: "onnx" },
332
- options: [
333
- { value: "auto", label: "Auto" },
334
- { value: "cpu", label: "CPU" },
335
- { value: "coreml", label: "CoreML" },
336
- { value: "cuda", label: "CUDA (NVIDIA)" }
337
- ]
338
- }
339
- ]
340
- },
341
- {
342
- id: "thresholds",
343
- title: "Detection Thresholds",
344
- columns: 1,
345
- fields: [
346
- {
347
- key: "confidence",
348
- label: "Confidence Threshold",
349
- type: "slider",
350
- min: 0.1,
351
- max: 1,
352
- step: 0.05,
353
- default: 0.5
354
- }
355
- ]
356
- }
357
- ]
358
- };
359
- }
360
- getClassMap() {
361
- return FACE_CLASS_MAP;
362
- }
363
- getModelCatalog() {
364
- return [...FACE_DETECTION_MODELS];
365
- }
366
- getAvailableModels() {
367
- return [];
368
- }
369
- getActiveLabels() {
370
- return FACE_LABELS2;
371
- }
372
- async probe() {
373
- return {
374
- available: true,
375
- runtime: this.engine?.runtime ?? "onnx",
376
- device: this.engine?.device ?? "cpu",
377
- capabilities: ["fp32"]
378
- };
379
- }
380
- };
381
-
382
- export {
383
- scrfdPostprocess,
384
- FACE_DETECTION_MODELS,
385
- FaceDetectionAddon
386
- };
387
- //# sourceMappingURL=chunk-EPNWLSCG.mjs.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/catalogs/face-detection-models.ts","../src/shared/postprocess/scrfd.ts","../src/addons/face-detection/index.ts"],"sourcesContent":["import type { ModelCatalogEntry, LabelDefinition } from '@camstack/types'\nimport { hfModelUrl } from '@camstack/types'\nimport { MLPACKAGE_FILES } from './object-detection-models.js'\n\nconst HF_REPO = 'camstack/camstack-models'\n\nconst FACE_LABELS: readonly LabelDefinition[] = [\n { id: 'face', name: 'Face' },\n] as const\n\nexport const FACE_DETECTION_MODELS: readonly ModelCatalogEntry[] = [\n {\n id: 'scrfd-500m',\n name: 'SCRFD 500M',\n description: 'SCRFD 500M — ultra-lightweight face detector',\n inputSize: { width: 640, height: 640 },\n labels: FACE_LABELS,\n formats: {\n onnx: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/onnx/camstack-scrfd-500m.onnx'),\n sizeMB: 2.2,\n },\n coreml: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/coreml/camstack-scrfd-500m.mlpackage'),\n sizeMB: 1.2,\n isDirectory: true,\n files: MLPACKAGE_FILES,\n runtimes: ['python'],\n },\n openvino: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/openvino/camstack-scrfd-500m.xml'),\n sizeMB: 1.3,\n runtimes: ['python'],\n },\n },\n },\n {\n id: 'scrfd-2.5g',\n name: 'SCRFD 2.5G',\n description: 'SCRFD 2.5G — balanced face detection model',\n inputSize: { width: 640, height: 640 },\n labels: FACE_LABELS,\n formats: {\n onnx: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/onnx/camstack-scrfd-2.5g.onnx'),\n sizeMB: 3.1,\n },\n coreml: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/coreml/camstack-scrfd-2.5g.mlpackage'),\n sizeMB: 1.7,\n isDirectory: true,\n files: MLPACKAGE_FILES,\n runtimes: ['python'],\n },\n openvino: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/openvino/camstack-scrfd-2.5g.xml'),\n sizeMB: 1.8,\n runtimes: ['python'],\n },\n },\n },\n {\n id: 'scrfd-10g',\n name: 'SCRFD 10G',\n description: 'SCRFD 10G — high-accuracy face detector',\n inputSize: { width: 640, height: 640 },\n labels: FACE_LABELS,\n formats: {\n onnx: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/onnx/camstack-scrfd-10g.onnx'),\n sizeMB: 16,\n },\n coreml: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/coreml/camstack-scrfd-10g.mlpackage'),\n sizeMB: 8.2,\n isDirectory: true,\n files: MLPACKAGE_FILES,\n runtimes: ['python'],\n },\n openvino: {\n url: hfModelUrl(HF_REPO, 'faceDetection/scrfd/openvino/camstack-scrfd-10g.xml'),\n sizeMB: 8.3,\n runtimes: ['python'],\n },\n },\n },\n] as const\n","import type { SpatialDetection, BoundingBox, Landmark } from '@camstack/types'\nimport { iou, nms } from './yolo.js'\n\nexport interface ScrfdRawOutputs {\n readonly [key: string]: Float32Array\n}\n\nconst STRIDES = [8, 16, 32] as const\nconst NUM_ANCHORS_PER_STRIDE = 2\n\n/** Generate anchor points for a given stride and input size */\nfunction generateAnchors(stride: number, inputSize: number): Array<{ cx: number; cy: number }> {\n const featureSize = Math.ceil(inputSize / stride)\n const anchors: Array<{ cx: number; cy: number }> = []\n for (let y = 0; y < featureSize; y++) {\n for (let x = 0; x < featureSize; x++) {\n for (let k = 0; k < NUM_ANCHORS_PER_STRIDE; k++) {\n anchors.push({\n cx: (x + 0.5) * stride,\n cy: (y + 0.5) * stride,\n })\n }\n }\n }\n return anchors\n}\n\nexport function scrfdPostprocess(\n outputs: ScrfdRawOutputs,\n confidence: number,\n inputSize: number,\n originalWidth: number,\n originalHeight: number,\n): SpatialDetection[] {\n // Scale factor from letterbox (assume square crop, so same scale both axes)\n const scaleX = originalWidth / inputSize\n const scaleY = originalHeight / inputSize\n\n interface Candidate {\n readonly bbox: BoundingBox\n readonly score: number\n readonly landmarks?: readonly Landmark[]\n }\n\n const candidates: Candidate[] = []\n\n for (const stride of STRIDES) {\n const scoreKey = Object.keys(outputs).find((k) => k.includes(`score_${stride}`) || k.includes(`_${stride}_score`))\n const bboxKey = Object.keys(outputs).find((k) => k.includes(`bbox_${stride}`) || k.includes(`_${stride}_bbox`))\n const kpsKey = Object.keys(outputs).find((k) => k.includes(`kps_${stride}`) || k.includes(`_${stride}_kps`))\n\n if (!scoreKey || !bboxKey) continue\n\n const scores = outputs[scoreKey]!\n const bboxes = outputs[bboxKey]!\n const kps = kpsKey ? outputs[kpsKey] : undefined\n const anchors = generateAnchors(stride, inputSize)\n\n const n = anchors.length\n\n for (let i = 0; i < n; i++) {\n const score = scores[i]!\n if (score < confidence) continue\n\n const anchor = anchors[i]!\n\n // Bboxes are relative to the anchor center in stride units, scaled by stride\n const x1 = anchor.cx - bboxes[i * 4]! * stride\n const y1 = anchor.cy - bboxes[i * 4 + 1]! * stride\n const x2 = anchor.cx + bboxes[i * 4 + 2]! * stride\n const y2 = anchor.cy + bboxes[i * 4 + 3]! * stride\n\n const bbox: BoundingBox = {\n x: x1 * scaleX,\n y: y1 * scaleY,\n w: (x2 - x1) * scaleX,\n h: (y2 - y1) * scaleY,\n }\n\n let landmarks: readonly Landmark[] | undefined\n if (kps) {\n const pts: Landmark[] = []\n for (let p = 0; p < 5; p++) {\n pts.push({\n x: (anchor.cx + kps[i * 10 + p * 2]! * stride) * scaleX,\n y: (anchor.cy + kps[i * 10 + p * 2 + 1]! * stride) * scaleY,\n })\n }\n landmarks = pts\n }\n\n candidates.push({ bbox, score, landmarks })\n }\n }\n\n if (candidates.length === 0) return []\n\n const keptIndices = nms(candidates, 0.45)\n\n return keptIndices.map((idx) => {\n const { bbox, score, landmarks } = candidates[idx]!\n return {\n class: 'face',\n originalClass: 'face',\n score,\n bbox,\n ...(landmarks ? { landmarks } : {}),\n } satisfies SpatialDetection\n })\n}\n","import type {\n ICropperProvider,\n IDetectionAddon,\n AddonManifest,\n AddonContext,\n CropInput,\n CropperOutput,\n ConfigUISchema,\n ClassMapDefinition,\n ProbeResult,\n ModelCatalogEntry,\n DetectionModel,\n LabelDefinition,\n IInferenceEngine,\n ModelRequirement,\n ResolvedInferenceConfig,\n} from '@camstack/types'\nimport { FACE_DETECTION_MODELS } from '../../catalogs/face-detection-models.js'\nimport { cropRegion, letterbox } from '../../shared/image-utils.js'\nimport { scrfdPostprocess } from '../../shared/postprocess/scrfd.js'\nimport { resolveEngine } from '../../shared/engine-resolver.js'\n\nconst FACE_LABEL: LabelDefinition = { id: 'face', name: 'Face' }\nconst FACE_LABELS: readonly LabelDefinition[] = [FACE_LABEL]\nconst FACE_CLASS_MAP: ClassMapDefinition = { mapping: {}, preserveOriginal: true }\n\n/** RAM estimates per model (MB) */\nconst RAM_ESTIMATES: Record<string, number> = {\n 'scrfd-500m': 50,\n 'scrfd-2.5g': 80,\n 'scrfd-10g': 200,\n}\n\n/** Accuracy scores per model (0-100) */\nconst ACCURACY_SCORES: Record<string, number> = {\n 'scrfd-500m': 70,\n 'scrfd-2.5g': 82,\n 'scrfd-10g': 92,\n}\n\nexport default class FaceDetectionAddon implements ICropperProvider, IDetectionAddon {\n readonly id = 'face-detection'\n readonly slot = 'cropper' as const\n readonly inputClasses = ['person'] as const\n readonly outputClasses = ['face'] as const\n readonly slotPriority = 0\n readonly manifest: AddonManifest = {\n id: 'face-detection',\n name: 'Face Detection',\n version: '0.1.0',\n\n description: 'SCRFD-based face detector — crops face regions from person detections',\n\n slot: 'cropper',\n inputClasses: ['person'],\n outputClasses: ['face'],\n supportsCustomModels: false,\n mayRequirePython: false,\n defaultConfig: {\n modelId: 'scrfd-500m',\n runtime: 'node',\n backend: 'cpu',\n confidence: 0.5,\n },\n }\n\n private engine: IInferenceEngine | null = null\n private modelEntry!: ModelCatalogEntry\n private confidence = 0.5\n private resolvedConfig: ResolvedInferenceConfig | null = null\n private ctx: AddonContext | null = null\n\n getModelRequirements(): ModelRequirement[] {\n return FACE_DETECTION_MODELS.map((m) => ({\n modelId: m.id,\n name: m.name,\n minRAM_MB: RAM_ESTIMATES[m.id] ?? 50,\n accuracyScore: ACCURACY_SCORES[m.id] ?? 70,\n formats: Object.keys(m.formats) as readonly string[],\n }))\n }\n\n configure(config: ResolvedInferenceConfig): void {\n this.resolvedConfig = config\n }\n\n async initialize(ctx: AddonContext): Promise<void> {\n this.ctx = ctx\n const cfg = ctx.addonConfig\n const modelId = (cfg['modelId'] as string | undefined) ?? this.resolvedConfig?.modelId ?? 'scrfd-500m'\n this.confidence = (cfg['confidence'] as number | undefined) ?? 0.5\n\n const entry = FACE_DETECTION_MODELS.find((m) => m.id === modelId)\n if (!entry) {\n throw new Error(`FaceDetectionAddon: unknown modelId \"${modelId}\"`)\n }\n this.modelEntry = entry\n }\n\n async crop(input: CropInput): Promise<CropperOutput> {\n if (!this.engine) await this.ensureEngine()\n const start = Date.now()\n const { width: inputW, height: inputH } = this.modelEntry.inputSize\n const targetSize = Math.max(inputW, inputH)\n\n // Crop the person region from the full frame\n const personCrop = await cropRegion(input.frame.data, input.roi)\n\n // Letterbox resize to model input size\n const lb = await letterbox(personCrop, targetSize)\n\n // SCRFD has multiple output tensors\n const engineWithMulti = this.engine as IInferenceEngine & {\n runMultiOutput?: (input: Float32Array, shape: readonly number[]) => Promise<Record<string, Float32Array>>\n }\n\n let outputs: Record<string, Float32Array>\n if (typeof engineWithMulti.runMultiOutput === 'function') {\n outputs = await engineWithMulti.runMultiOutput(lb.data, [1, 3, targetSize, targetSize])\n } else {\n // Fallback: wrap single output\n const single = await this.engine!.run(lb.data, [1, 3, targetSize, targetSize])\n outputs = { output0: single }\n }\n\n const crops = scrfdPostprocess(\n outputs,\n this.confidence,\n targetSize,\n lb.originalWidth,\n lb.originalHeight,\n )\n\n return {\n crops,\n inferenceMs: Date.now() - start,\n modelId: this.modelEntry.id,\n }\n }\n\n private async ensureEngine(): Promise<void> {\n const config = this.resolvedConfig\n const modelId = config?.modelId ?? this.modelEntry.id\n const runtime = config?.runtime === 'python' ? 'coreml' : (config?.runtime === 'node' ? 'onnx' : 'auto')\n const backend = config?.backend ?? 'cpu'\n const format = config?.format ?? 'onnx'\n\n const entry = FACE_DETECTION_MODELS.find((m) => m.id === modelId) ?? this.modelEntry\n this.modelEntry = entry\n\n const modelsDir = this.ctx!.models?.getModelsDir() ?? this.ctx!.locationPaths.models\n\n if (this.ctx!.models) {\n await this.ctx!.models.ensure(modelId, format as any)\n }\n\n const resolved = await resolveEngine({\n runtime: runtime as 'auto',\n backend,\n modelEntry: entry,\n modelsDir,\n models: this.ctx!.models,\n })\n this.engine = resolved.engine\n }\n\n async shutdown(): Promise<void> {\n await this.engine?.dispose()\n }\n\n getConfigSchema(): ConfigUISchema {\n return {\n sections: [\n {\n id: 'model',\n title: 'Model',\n columns: 1,\n fields: [\n {\n key: 'modelId',\n label: 'Model',\n type: 'model-selector',\n catalog: [...FACE_DETECTION_MODELS],\n allowCustom: false,\n allowConversion: false,\n acceptFormats: ['onnx', 'coreml', 'openvino'],\n requiredMetadata: ['inputSize', 'labels', 'outputFormat'],\n outputFormatHint: 'ssd',\n },\n ],\n },\n {\n id: 'runtime',\n title: 'Runtime',\n columns: 2,\n fields: [\n {\n key: 'runtime',\n label: 'Runtime',\n type: 'select',\n options: [\n { value: 'auto', label: 'Auto' },\n { value: 'onnx', label: 'ONNX Runtime' },\n { value: 'coreml', label: 'CoreML (Apple)' },\n { value: 'openvino', label: 'OpenVINO (Intel)' },\n ],\n },\n {\n key: 'backend',\n label: 'Backend',\n type: 'select',\n showWhen: { field: 'runtime', equals: 'onnx' },\n options: [\n { value: 'auto', label: 'Auto' },\n { value: 'cpu', label: 'CPU' },\n { value: 'coreml', label: 'CoreML' },\n { value: 'cuda', label: 'CUDA (NVIDIA)' },\n ],\n },\n ],\n },\n {\n id: 'thresholds',\n title: 'Detection Thresholds',\n columns: 1,\n fields: [\n {\n key: 'confidence',\n label: 'Confidence Threshold',\n type: 'slider',\n min: 0.1,\n max: 1.0,\n step: 0.05,\n default: 0.5,\n },\n ],\n },\n ],\n }\n }\n\n getClassMap(): ClassMapDefinition {\n return FACE_CLASS_MAP\n }\n\n getModelCatalog(): ModelCatalogEntry[] {\n return [...FACE_DETECTION_MODELS]\n }\n\n getAvailableModels(): DetectionModel[] {\n return []\n }\n\n getActiveLabels(): readonly LabelDefinition[] {\n return FACE_LABELS\n }\n\n async probe(): Promise<ProbeResult> {\n return {\n available: true,\n runtime: this.engine?.runtime ?? 'onnx',\n device: this.engine?.device ?? 'cpu',\n capabilities: ['fp32'],\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;AACA,SAAS,kBAAkB;AAG3B,IAAM,UAAU;AAEhB,IAAM,cAA0C;AAAA,EAC9C,EAAE,IAAI,QAAQ,MAAM,OAAO;AAC7B;AAEO,IAAM,wBAAsD;AAAA,EACjE;AAAA,IACE,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,WAAW,EAAE,OAAO,KAAK,QAAQ,IAAI;AAAA,IACrC,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,MAAM;AAAA,QACJ,KAAK,WAAW,SAAS,mDAAmD;AAAA,QAC5E,QAAQ;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,QACN,KAAK,WAAW,SAAS,0DAA0D;AAAA,QACnF,QAAQ;AAAA,QACR,aAAa;AAAA,QACb,OAAO;AAAA,QACP,UAAU,CAAC,QAAQ;AAAA,MACrB;AAAA,MACA,UAAU;AAAA,QACR,KAAK,WAAW,SAAS,sDAAsD;AAAA,QAC/E,QAAQ;AAAA,QACR,UAAU,CAAC,QAAQ;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,WAAW,EAAE,OAAO,KAAK,QAAQ,IAAI;AAAA,IACrC,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,MAAM;AAAA,QACJ,KAAK,WAAW,SAAS,mDAAmD;AAAA,QAC5E,QAAQ;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,QACN,KAAK,WAAW,SAAS,0DAA0D;AAAA,QACnF,QAAQ;AAAA,QACR,aAAa;AAAA,QACb,OAAO;AAAA,QACP,UAAU,CAAC,QAAQ;AAAA,MACrB;AAAA,MACA,UAAU;AAAA,QACR,KAAK,WAAW,SAAS,sDAAsD;AAAA,QAC/E,QAAQ;AAAA,QACR,UAAU,CAAC,QAAQ;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAAA,EACA;AAAA,IACE,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,IACb,WAAW,EAAE,OAAO,KAAK,QAAQ,IAAI;AAAA,IACrC,QAAQ;AAAA,IACR,SAAS;AAAA,MACP,MAAM;AAAA,QACJ,KAAK,WAAW,SAAS,kDAAkD;AAAA,QAC3E,QAAQ;AAAA,MACV;AAAA,MACA,QAAQ;AAAA,QACN,KAAK,WAAW,SAAS,yDAAyD;AAAA,QAClF,QAAQ;AAAA,QACR,aAAa;AAAA,QACb,OAAO;AAAA,QACP,UAAU,CAAC,QAAQ;AAAA,MACrB;AAAA,MACA,UAAU;AAAA,QACR,KAAK,WAAW,SAAS,qDAAqD;AAAA,QAC9E,QAAQ;AAAA,QACR,UAAU,CAAC,QAAQ;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AACF;;;AC/EA,IAAM,UAAU,CAAC,GAAG,IAAI,EAAE;AAC1B,IAAM,yBAAyB;AAG/B,SAAS,gBAAgB,QAAgB,WAAsD;AAC7F,QAAM,cAAc,KAAK,KAAK,YAAY,MAAM;AAChD,QAAM,UAA6C,CAAC;AACpD,WAAS,IAAI,GAAG,IAAI,aAAa,KAAK;AACpC,aAAS,IAAI,GAAG,IAAI,aAAa,KAAK;AACpC,eAAS,IAAI,GAAG,IAAI,wBAAwB,KAAK;AAC/C,gBAAQ,KAAK;AAAA,UACX,KAAK,IAAI,OAAO;AAAA,UAChB,KAAK,IAAI,OAAO;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAEO,SAAS,iBACd,SACA,YACA,WACA,eACA,gBACoB;AAEpB,QAAM,SAAS,gBAAgB;AAC/B,QAAM,SAAS,iBAAiB;AAQhC,QAAM,aAA0B,CAAC;AAEjC,aAAW,UAAU,SAAS;AAC5B,UAAM,WAAW,OAAO,KAAK,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS,MAAM,EAAE,KAAK,EAAE,SAAS,IAAI,MAAM,QAAQ,CAAC;AACjH,UAAM,UAAU,OAAO,KAAK,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ,MAAM,EAAE,KAAK,EAAE,SAAS,IAAI,MAAM,OAAO,CAAC;AAC9G,UAAM,SAAS,OAAO,KAAK,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,SAAS,OAAO,MAAM,EAAE,KAAK,EAAE,SAAS,IAAI,MAAM,MAAM,CAAC;AAE3G,QAAI,CAAC,YAAY,CAAC,QAAS;AAE3B,UAAM,SAAS,QAAQ,QAAQ;AAC/B,UAAM,SAAS,QAAQ,OAAO;AAC9B,UAAM,MAAM,SAAS,QAAQ,MAAM,IAAI;AACvC,UAAM,UAAU,gBAAgB,QAAQ,SAAS;AAEjD,UAAM,IAAI,QAAQ;AAElB,aAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,YAAM,QAAQ,OAAO,CAAC;AACtB,UAAI,QAAQ,WAAY;AAExB,YAAM,SAAS,QAAQ,CAAC;AAGxB,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,CAAC,IAAK;AACxC,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,IAAI,CAAC,IAAK;AAC5C,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,IAAI,CAAC,IAAK;AAC5C,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,IAAI,CAAC,IAAK;AAE5C,YAAM,OAAoB;AAAA,QACxB,GAAG,KAAK;AAAA,QACR,GAAG,KAAK;AAAA,QACR,IAAI,KAAK,MAAM;AAAA,QACf,IAAI,KAAK,MAAM;AAAA,MACjB;AAEA,UAAI;AACJ,UAAI,KAAK;AACP,cAAM,MAAkB,CAAC;AACzB,iBAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,cAAI,KAAK;AAAA,YACP,IAAI,OAAO,KAAK,IAAI,IAAI,KAAK,IAAI,CAAC,IAAK,UAAU;AAAA,YACjD,IAAI,OAAO,KAAK,IAAI,IAAI,KAAK,IAAI,IAAI,CAAC,IAAK,UAAU;AAAA,UACvD,CAAC;AAAA,QACH;AACA,oBAAY;AAAA,MACd;AAEA,iBAAW,KAAK,EAAE,MAAM,OAAO,UAAU,CAAC;AAAA,IAC5C;AAAA,EACF;AAEA,MAAI,WAAW,WAAW,EAAG,QAAO,CAAC;AAErC,QAAM,cAAc,IAAI,YAAY,IAAI;AAExC,SAAO,YAAY,IAAI,CAAC,QAAQ;AAC9B,UAAM,EAAE,MAAM,OAAO,UAAU,IAAI,WAAW,GAAG;AACjD,WAAO;AAAA,MACL,OAAO;AAAA,MACP,eAAe;AAAA,MACf;AAAA,MACA;AAAA,MACA,GAAI,YAAY,EAAE,UAAU,IAAI,CAAC;AAAA,IACnC;AAAA,EACF,CAAC;AACH;;;ACvFA,IAAM,aAA8B,EAAE,IAAI,QAAQ,MAAM,OAAO;AAC/D,IAAMA,eAA0C,CAAC,UAAU;AAC3D,IAAM,iBAAqC,EAAE,SAAS,CAAC,GAAG,kBAAkB,KAAK;AAGjF,IAAM,gBAAwC;AAAA,EAC5C,cAAc;AAAA,EACd,cAAc;AAAA,EACd,aAAa;AACf;AAGA,IAAM,kBAA0C;AAAA,EAC9C,cAAc;AAAA,EACd,cAAc;AAAA,EACd,aAAa;AACf;AAEA,IAAqB,qBAArB,MAAqF;AAAA,EAC1E,KAAK;AAAA,EACL,OAAO;AAAA,EACP,eAAe,CAAC,QAAQ;AAAA,EACxB,gBAAgB,CAAC,MAAM;AAAA,EACvB,eAAe;AAAA,EACf,WAA0B;AAAA,IACjC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IAET,aAAa;AAAA,IAEb,MAAM;AAAA,IACN,cAAc,CAAC,QAAQ;AAAA,IACvB,eAAe,CAAC,MAAM;AAAA,IACtB,sBAAsB;AAAA,IACtB,kBAAkB;AAAA,IAClB,eAAe;AAAA,MACb,SAAS;AAAA,MACT,SAAS;AAAA,MACT,SAAS;AAAA,MACT,YAAY;AAAA,IACd;AAAA,EACF;AAAA,EAEQ,SAAkC;AAAA,EAClC;AAAA,EACA,aAAa;AAAA,EACb,iBAAiD;AAAA,EACjD,MAA2B;AAAA,EAEnC,uBAA2C;AACzC,WAAO,sBAAsB,IAAI,CAAC,OAAO;AAAA,MACvC,SAAS,EAAE;AAAA,MACX,MAAM,EAAE;AAAA,MACR,WAAW,cAAc,EAAE,EAAE,KAAK;AAAA,MAClC,eAAe,gBAAgB,EAAE,EAAE,KAAK;AAAA,MACxC,SAAS,OAAO,KAAK,EAAE,OAAO;AAAA,IAChC,EAAE;AAAA,EACJ;AAAA,EAEA,UAAU,QAAuC;AAC/C,SAAK,iBAAiB;AAAA,EACxB;AAAA,EAEA,MAAM,WAAW,KAAkC;AACjD,SAAK,MAAM;AACX,UAAM,MAAM,IAAI;AAChB,UAAM,UAAW,IAAI,SAAS,KAA4B,KAAK,gBAAgB,WAAW;AAC1F,SAAK,aAAc,IAAI,YAAY,KAA4B;AAE/D,UAAM,QAAQ,sBAAsB,KAAK,CAAC,MAAM,EAAE,OAAO,OAAO;AAChE,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,wCAAwC,OAAO,GAAG;AAAA,IACpE;AACA,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,MAAM,KAAK,OAA0C;AACnD,QAAI,CAAC,KAAK,OAAQ,OAAM,KAAK,aAAa;AAC1C,UAAM,QAAQ,KAAK,IAAI;AACvB,UAAM,EAAE,OAAO,QAAQ,QAAQ,OAAO,IAAI,KAAK,WAAW;AAC1D,UAAM,aAAa,KAAK,IAAI,QAAQ,MAAM;AAG1C,UAAM,aAAa,MAAM,WAAW,MAAM,MAAM,MAAM,MAAM,GAAG;AAG/D,UAAM,KAAK,MAAM,UAAU,YAAY,UAAU;AAGjD,UAAM,kBAAkB,KAAK;AAI7B,QAAI;AACJ,QAAI,OAAO,gBAAgB,mBAAmB,YAAY;AACxD,gBAAU,MAAM,gBAAgB,eAAe,GAAG,MAAM,CAAC,GAAG,GAAG,YAAY,UAAU,CAAC;AAAA,IACxF,OAAO;AAEL,YAAM,SAAS,MAAM,KAAK,OAAQ,IAAI,GAAG,MAAM,CAAC,GAAG,GAAG,YAAY,UAAU,CAAC;AAC7E,gBAAU,EAAE,SAAS,OAAO;AAAA,IAC9B;AAEA,UAAM,QAAQ;AAAA,MACZ;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,GAAG;AAAA,IACL;AAEA,WAAO;AAAA,MACL;AAAA,MACA,aAAa,KAAK,IAAI,IAAI;AAAA,MAC1B,SAAS,KAAK,WAAW;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAc,eAA8B;AAC1C,UAAM,SAAS,KAAK;AACpB,UAAM,UAAU,QAAQ,WAAW,KAAK,WAAW;AACnD,UAAM,UAAU,QAAQ,YAAY,WAAW,WAAY,QAAQ,YAAY,SAAS,SAAS;AACjG,UAAM,UAAU,QAAQ,WAAW;AACnC,UAAM,SAAS,QAAQ,UAAU;AAEjC,UAAM,QAAQ,sBAAsB,KAAK,CAAC,MAAM,EAAE,OAAO,OAAO,KAAK,KAAK;AAC1E,SAAK,aAAa;AAElB,UAAM,YAAY,KAAK,IAAK,QAAQ,aAAa,KAAK,KAAK,IAAK,cAAc;AAE9E,QAAI,KAAK,IAAK,QAAQ;AACpB,YAAM,KAAK,IAAK,OAAO,OAAO,SAAS,MAAa;AAAA,IACtD;AAEA,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ;AAAA,MACA,QAAQ,KAAK,IAAK;AAAA,IACpB,CAAC;AACD,SAAK,SAAS,SAAS;AAAA,EACzB;AAAA,EAEA,MAAM,WAA0B;AAC9B,UAAM,KAAK,QAAQ,QAAQ;AAAA,EAC7B;AAAA,EAEA,kBAAkC;AAChC,WAAO;AAAA,MACL,UAAU;AAAA,QACR;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,SAAS,CAAC,GAAG,qBAAqB;AAAA,cAClC,aAAa;AAAA,cACb,iBAAiB;AAAA,cACjB,eAAe,CAAC,QAAQ,UAAU,UAAU;AAAA,cAC5C,kBAAkB,CAAC,aAAa,UAAU,cAAc;AAAA,cACxD,kBAAkB;AAAA,YACpB;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,SAAS;AAAA,gBACP,EAAE,OAAO,QAAQ,OAAO,OAAO;AAAA,gBAC/B,EAAE,OAAO,QAAQ,OAAO,eAAe;AAAA,gBACvC,EAAE,OAAO,UAAU,OAAO,iBAAiB;AAAA,gBAC3C,EAAE,OAAO,YAAY,OAAO,mBAAmB;AAAA,cACjD;AAAA,YACF;AAAA,YACA;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,UAAU,EAAE,OAAO,WAAW,QAAQ,OAAO;AAAA,cAC7C,SAAS;AAAA,gBACP,EAAE,OAAO,QAAQ,OAAO,OAAO;AAAA,gBAC/B,EAAE,OAAO,OAAO,OAAO,MAAM;AAAA,gBAC7B,EAAE,OAAO,UAAU,OAAO,SAAS;AAAA,gBACnC,EAAE,OAAO,QAAQ,OAAO,gBAAgB;AAAA,cAC1C;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,KAAK;AAAA,cACL,KAAK;AAAA,cACL,MAAM;AAAA,cACN,SAAS;AAAA,YACX;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,cAAkC;AAChC,WAAO;AAAA,EACT;AAAA,EAEA,kBAAuC;AACrC,WAAO,CAAC,GAAG,qBAAqB;AAAA,EAClC;AAAA,EAEA,qBAAuC;AACrC,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,kBAA8C;AAC5C,WAAOA;AAAA,EACT;AAAA,EAEA,MAAM,QAA8B;AAClC,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS,KAAK,QAAQ,WAAW;AAAA,MACjC,QAAQ,KAAK,QAAQ,UAAU;AAAA,MAC/B,cAAc,CAAC,MAAM;AAAA,IACvB;AAAA,EACF;AACF;","names":["FACE_LABELS"]}