@camstack/vision 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. package/dist/addons/animal-classifier/index.d.mts +25 -0
  2. package/dist/addons/animal-classifier/index.d.ts +25 -0
  3. package/dist/addons/animal-classifier/index.js +469 -0
  4. package/dist/addons/animal-classifier/index.js.map +1 -0
  5. package/dist/addons/animal-classifier/index.mjs +9 -0
  6. package/dist/addons/animal-classifier/index.mjs.map +1 -0
  7. package/dist/addons/audio-classification/index.d.mts +31 -0
  8. package/dist/addons/audio-classification/index.d.ts +31 -0
  9. package/dist/addons/audio-classification/index.js +411 -0
  10. package/dist/addons/audio-classification/index.js.map +1 -0
  11. package/dist/addons/audio-classification/index.mjs +8 -0
  12. package/dist/addons/audio-classification/index.mjs.map +1 -0
  13. package/dist/addons/bird-global-classifier/index.d.mts +26 -0
  14. package/dist/addons/bird-global-classifier/index.d.ts +26 -0
  15. package/dist/addons/bird-global-classifier/index.js +475 -0
  16. package/dist/addons/bird-global-classifier/index.js.map +1 -0
  17. package/dist/addons/bird-global-classifier/index.mjs +9 -0
  18. package/dist/addons/bird-global-classifier/index.mjs.map +1 -0
  19. package/dist/addons/bird-nabirds-classifier/index.d.mts +28 -0
  20. package/dist/addons/bird-nabirds-classifier/index.d.ts +28 -0
  21. package/dist/addons/bird-nabirds-classifier/index.js +517 -0
  22. package/dist/addons/bird-nabirds-classifier/index.js.map +1 -0
  23. package/dist/addons/bird-nabirds-classifier/index.mjs +9 -0
  24. package/dist/addons/bird-nabirds-classifier/index.mjs.map +1 -0
  25. package/dist/addons/camera-native-detection/index.d.mts +32 -0
  26. package/dist/addons/camera-native-detection/index.d.ts +32 -0
  27. package/dist/addons/camera-native-detection/index.js +99 -0
  28. package/dist/addons/camera-native-detection/index.js.map +1 -0
  29. package/dist/addons/camera-native-detection/index.mjs +7 -0
  30. package/dist/addons/camera-native-detection/index.mjs.map +1 -0
  31. package/dist/addons/face-detection/index.d.mts +24 -0
  32. package/dist/addons/face-detection/index.d.ts +24 -0
  33. package/dist/addons/face-detection/index.js +513 -0
  34. package/dist/addons/face-detection/index.js.map +1 -0
  35. package/dist/addons/face-detection/index.mjs +10 -0
  36. package/dist/addons/face-detection/index.mjs.map +1 -0
  37. package/dist/addons/face-recognition/index.d.mts +24 -0
  38. package/dist/addons/face-recognition/index.d.ts +24 -0
  39. package/dist/addons/face-recognition/index.js +437 -0
  40. package/dist/addons/face-recognition/index.js.map +1 -0
  41. package/dist/addons/face-recognition/index.mjs +9 -0
  42. package/dist/addons/face-recognition/index.mjs.map +1 -0
  43. package/dist/addons/motion-detection/index.d.mts +26 -0
  44. package/dist/addons/motion-detection/index.d.ts +26 -0
  45. package/dist/addons/motion-detection/index.js +273 -0
  46. package/dist/addons/motion-detection/index.js.map +1 -0
  47. package/dist/addons/motion-detection/index.mjs +8 -0
  48. package/dist/addons/motion-detection/index.mjs.map +1 -0
  49. package/dist/addons/object-detection/index.d.mts +25 -0
  50. package/dist/addons/object-detection/index.d.ts +25 -0
  51. package/dist/addons/object-detection/index.js +673 -0
  52. package/dist/addons/object-detection/index.js.map +1 -0
  53. package/dist/addons/object-detection/index.mjs +10 -0
  54. package/dist/addons/object-detection/index.mjs.map +1 -0
  55. package/dist/addons/plate-detection/index.d.mts +25 -0
  56. package/dist/addons/plate-detection/index.d.ts +25 -0
  57. package/dist/addons/plate-detection/index.js +477 -0
  58. package/dist/addons/plate-detection/index.js.map +1 -0
  59. package/dist/addons/plate-detection/index.mjs +10 -0
  60. package/dist/addons/plate-detection/index.mjs.map +1 -0
  61. package/dist/addons/plate-recognition/index.d.mts +25 -0
  62. package/dist/addons/plate-recognition/index.d.ts +25 -0
  63. package/dist/addons/plate-recognition/index.js +470 -0
  64. package/dist/addons/plate-recognition/index.js.map +1 -0
  65. package/dist/addons/plate-recognition/index.mjs +9 -0
  66. package/dist/addons/plate-recognition/index.mjs.map +1 -0
  67. package/dist/chunk-3BKYLBBH.mjs +229 -0
  68. package/dist/chunk-3BKYLBBH.mjs.map +1 -0
  69. package/dist/chunk-4PC262GU.mjs +203 -0
  70. package/dist/chunk-4PC262GU.mjs.map +1 -0
  71. package/dist/chunk-6OR5TE7A.mjs +101 -0
  72. package/dist/chunk-6OR5TE7A.mjs.map +1 -0
  73. package/dist/chunk-7SZAISGP.mjs +210 -0
  74. package/dist/chunk-7SZAISGP.mjs.map +1 -0
  75. package/dist/chunk-AD2TFYZA.mjs +235 -0
  76. package/dist/chunk-AD2TFYZA.mjs.map +1 -0
  77. package/dist/chunk-CGYSSHHM.mjs +363 -0
  78. package/dist/chunk-CGYSSHHM.mjs.map +1 -0
  79. package/dist/chunk-IYHMGYGP.mjs +79 -0
  80. package/dist/chunk-IYHMGYGP.mjs.map +1 -0
  81. package/dist/chunk-J3IUBPRE.mjs +187 -0
  82. package/dist/chunk-J3IUBPRE.mjs.map +1 -0
  83. package/dist/chunk-KFZDJPYL.mjs +190 -0
  84. package/dist/chunk-KFZDJPYL.mjs.map +1 -0
  85. package/dist/chunk-KUO2BVFY.mjs +90 -0
  86. package/dist/chunk-KUO2BVFY.mjs.map +1 -0
  87. package/dist/chunk-PXBY3QOA.mjs +152 -0
  88. package/dist/chunk-PXBY3QOA.mjs.map +1 -0
  89. package/dist/chunk-XUKDL23Y.mjs +216 -0
  90. package/dist/chunk-XUKDL23Y.mjs.map +1 -0
  91. package/dist/chunk-Z26BVC7S.mjs +214 -0
  92. package/dist/chunk-Z26BVC7S.mjs.map +1 -0
  93. package/dist/chunk-Z5AHZQEZ.mjs +258 -0
  94. package/dist/chunk-Z5AHZQEZ.mjs.map +1 -0
  95. package/dist/index.d.mts +152 -0
  96. package/dist/index.d.ts +152 -0
  97. package/dist/index.js +2775 -0
  98. package/dist/index.js.map +1 -0
  99. package/dist/index.mjs +205 -0
  100. package/dist/index.mjs.map +1 -0
  101. package/package.json +43 -0
  102. package/python/coreml_inference.py +67 -0
  103. package/python/openvino_inference.py +76 -0
  104. package/python/pytorch_inference.py +74 -0
@@ -0,0 +1,101 @@
1
+ // src/shared/image-utils.ts
2
+ import sharp from "sharp";
3
+ async function jpegToRgb(jpeg) {
4
+ const { data, info } = await sharp(jpeg).removeAlpha().raw().toBuffer({ resolveWithObject: true });
5
+ return { data, width: info.width, height: info.height };
6
+ }
7
+ async function cropRegion(jpeg, roi) {
8
+ return sharp(jpeg).extract({
9
+ left: Math.round(roi.x),
10
+ top: Math.round(roi.y),
11
+ width: Math.round(roi.w),
12
+ height: Math.round(roi.h)
13
+ }).jpeg().toBuffer();
14
+ }
15
+ async function letterbox(jpeg, targetSize) {
16
+ const meta = await sharp(jpeg).metadata();
17
+ const originalWidth = meta.width ?? 0;
18
+ const originalHeight = meta.height ?? 0;
19
+ const scale = Math.min(targetSize / originalWidth, targetSize / originalHeight);
20
+ const scaledWidth = Math.round(originalWidth * scale);
21
+ const scaledHeight = Math.round(originalHeight * scale);
22
+ const padX = Math.floor((targetSize - scaledWidth) / 2);
23
+ const padY = Math.floor((targetSize - scaledHeight) / 2);
24
+ const { data } = await sharp(jpeg).resize(scaledWidth, scaledHeight).extend({
25
+ top: padY,
26
+ bottom: targetSize - scaledHeight - padY,
27
+ left: padX,
28
+ right: targetSize - scaledWidth - padX,
29
+ background: { r: 114, g: 114, b: 114 }
30
+ }).removeAlpha().raw().toBuffer({ resolveWithObject: true });
31
+ const numPixels = targetSize * targetSize;
32
+ const float32 = new Float32Array(3 * numPixels);
33
+ for (let i = 0; i < numPixels; i++) {
34
+ const srcBase = i * 3;
35
+ float32[0 * numPixels + i] = data[srcBase] / 255;
36
+ float32[1 * numPixels + i] = data[srcBase + 1] / 255;
37
+ float32[2 * numPixels + i] = data[srcBase + 2] / 255;
38
+ }
39
+ return { data: float32, scale, padX, padY, originalWidth, originalHeight };
40
+ }
41
+ async function resizeAndNormalize(jpeg, targetWidth, targetHeight, normalization, layout) {
42
+ const { data } = await sharp(jpeg).resize(targetWidth, targetHeight).removeAlpha().raw().toBuffer({ resolveWithObject: true });
43
+ const numPixels = targetWidth * targetHeight;
44
+ const float32 = new Float32Array(3 * numPixels);
45
+ const mean = [0.485, 0.456, 0.406];
46
+ const std = [0.229, 0.224, 0.225];
47
+ if (layout === "nchw") {
48
+ for (let i = 0; i < numPixels; i++) {
49
+ const srcBase = i * 3;
50
+ for (let c = 0; c < 3; c++) {
51
+ const raw = data[srcBase + c] / 255;
52
+ let val;
53
+ if (normalization === "zero-one") {
54
+ val = raw;
55
+ } else if (normalization === "imagenet") {
56
+ val = (raw - mean[c]) / std[c];
57
+ } else {
58
+ val = data[srcBase + c];
59
+ }
60
+ float32[c * numPixels + i] = val;
61
+ }
62
+ }
63
+ } else {
64
+ for (let i = 0; i < numPixels; i++) {
65
+ const srcBase = i * 3;
66
+ for (let c = 0; c < 3; c++) {
67
+ const raw = data[srcBase + c] / 255;
68
+ let val;
69
+ if (normalization === "zero-one") {
70
+ val = raw;
71
+ } else if (normalization === "imagenet") {
72
+ val = (raw - mean[c]) / std[c];
73
+ } else {
74
+ val = data[srcBase + c];
75
+ }
76
+ float32[i * 3 + c] = val;
77
+ }
78
+ }
79
+ }
80
+ return float32;
81
+ }
82
+ function rgbToGrayscale(rgb, width, height) {
83
+ const numPixels = width * height;
84
+ const gray = new Uint8Array(numPixels);
85
+ for (let i = 0; i < numPixels; i++) {
86
+ const r = rgb[i * 3];
87
+ const g = rgb[i * 3 + 1];
88
+ const b = rgb[i * 3 + 2];
89
+ gray[i] = Math.round(0.299 * r + 0.587 * g + 0.114 * b);
90
+ }
91
+ return gray;
92
+ }
93
+
94
+ export {
95
+ jpegToRgb,
96
+ cropRegion,
97
+ letterbox,
98
+ resizeAndNormalize,
99
+ rgbToGrayscale
100
+ };
101
+ //# sourceMappingURL=chunk-6OR5TE7A.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/shared/image-utils.ts"],"sourcesContent":["import sharp from 'sharp'\nimport type { BoundingBox } from '@camstack/types'\n\n/** Decode JPEG to raw RGB pixels */\nexport async function jpegToRgb(\n jpeg: Buffer,\n): Promise<{ data: Buffer; width: number; height: number }> {\n const { data, info } = await sharp(jpeg)\n .removeAlpha()\n .raw()\n .toBuffer({ resolveWithObject: true })\n return { data, width: info.width, height: info.height }\n}\n\n/** Crop a region from a JPEG buffer */\nexport async function cropRegion(jpeg: Buffer, roi: BoundingBox): Promise<Buffer> {\n return sharp(jpeg)\n .extract({\n left: Math.round(roi.x),\n top: Math.round(roi.y),\n width: Math.round(roi.w),\n height: Math.round(roi.h),\n })\n .jpeg()\n .toBuffer()\n}\n\n/** Letterbox resize for YOLO: resize preserving aspect ratio, pad to square */\nexport async function letterbox(\n jpeg: Buffer,\n targetSize: number,\n): Promise<{\n data: Float32Array\n scale: number\n padX: number\n padY: number\n originalWidth: number\n originalHeight: number\n}> {\n const meta = await sharp(jpeg).metadata()\n const originalWidth = meta.width ?? 0\n const originalHeight = meta.height ?? 0\n\n const scale = Math.min(targetSize / originalWidth, targetSize / originalHeight)\n const scaledWidth = Math.round(originalWidth * scale)\n const scaledHeight = Math.round(originalHeight * scale)\n\n const padX = Math.floor((targetSize - scaledWidth) / 2)\n const padY = Math.floor((targetSize - scaledHeight) / 2)\n\n const { data } = await sharp(jpeg)\n .resize(scaledWidth, scaledHeight)\n .extend({\n top: padY,\n bottom: targetSize - scaledHeight - padY,\n left: padX,\n right: targetSize - scaledWidth - padX,\n background: { r: 114, g: 114, b: 114 },\n })\n .removeAlpha()\n .raw()\n .toBuffer({ resolveWithObject: true })\n\n // Convert HWC uint8 to CHW float [0,1]\n const numPixels = targetSize * targetSize\n const float32 = new Float32Array(3 * numPixels)\n for (let i = 0; i < numPixels; i++) {\n const srcBase = i * 3\n float32[0 * numPixels + i] = (data[srcBase]! / 255)\n float32[1 * numPixels + i] = (data[srcBase + 1]! / 255)\n float32[2 * numPixels + i] = (data[srcBase + 2]! / 255)\n }\n\n return { data: float32, scale, padX, padY, originalWidth, originalHeight }\n}\n\n/** Resize and normalize to Float32Array */\nexport async function resizeAndNormalize(\n jpeg: Buffer,\n targetWidth: number,\n targetHeight: number,\n normalization: 'zero-one' | 'imagenet' | 'none',\n layout: 'nchw' | 'nhwc',\n): Promise<Float32Array> {\n const { data } = await sharp(jpeg)\n .resize(targetWidth, targetHeight)\n .removeAlpha()\n .raw()\n .toBuffer({ resolveWithObject: true })\n\n const numPixels = targetWidth * targetHeight\n const float32 = new Float32Array(3 * numPixels)\n\n // ImageNet mean and std per channel\n const mean = [0.485, 0.456, 0.406]\n const std = [0.229, 0.224, 0.225]\n\n if (layout === 'nchw') {\n for (let i = 0; i < numPixels; i++) {\n const srcBase = i * 3\n for (let c = 0; c < 3; c++) {\n const raw = data[srcBase + c]! / 255\n let val: number\n if (normalization === 'zero-one') {\n val = raw\n } else if (normalization === 'imagenet') {\n val = (raw - mean[c]!) / std[c]!\n } else {\n val = data[srcBase + c]!\n }\n float32[c * numPixels + i] = val\n }\n }\n } else {\n // nhwc\n for (let i = 0; i < numPixels; i++) {\n const srcBase = i * 3\n for (let c = 0; c < 3; c++) {\n const raw = data[srcBase + c]! / 255\n let val: number\n if (normalization === 'zero-one') {\n val = raw\n } else if (normalization === 'imagenet') {\n val = (raw - mean[c]!) / std[c]!\n } else {\n val = data[srcBase + c]!\n }\n float32[i * 3 + c] = val\n }\n }\n }\n\n return float32\n}\n\n/** Convert raw RGB to grayscale Uint8Array */\nexport function rgbToGrayscale(rgb: Buffer, width: number, height: number): Uint8Array {\n const numPixels = width * height\n const gray = new Uint8Array(numPixels)\n for (let i = 0; i < numPixels; i++) {\n const r = rgb[i * 3]!\n const g = rgb[i * 3 + 1]!\n const b = rgb[i * 3 + 2]!\n // BT.601 luma\n gray[i] = Math.round(0.299 * r + 0.587 * g + 0.114 * b)\n }\n return gray\n}\n"],"mappings":";AAAA,OAAO,WAAW;AAIlB,eAAsB,UACpB,MAC0D;AAC1D,QAAM,EAAE,MAAM,KAAK,IAAI,MAAM,MAAM,IAAI,EACpC,YAAY,EACZ,IAAI,EACJ,SAAS,EAAE,mBAAmB,KAAK,CAAC;AACvC,SAAO,EAAE,MAAM,OAAO,KAAK,OAAO,QAAQ,KAAK,OAAO;AACxD;AAGA,eAAsB,WAAW,MAAc,KAAmC;AAChF,SAAO,MAAM,IAAI,EACd,QAAQ;AAAA,IACP,MAAM,KAAK,MAAM,IAAI,CAAC;AAAA,IACtB,KAAK,KAAK,MAAM,IAAI,CAAC;AAAA,IACrB,OAAO,KAAK,MAAM,IAAI,CAAC;AAAA,IACvB,QAAQ,KAAK,MAAM,IAAI,CAAC;AAAA,EAC1B,CAAC,EACA,KAAK,EACL,SAAS;AACd;AAGA,eAAsB,UACpB,MACA,YAQC;AACD,QAAM,OAAO,MAAM,MAAM,IAAI,EAAE,SAAS;AACxC,QAAM,gBAAgB,KAAK,SAAS;AACpC,QAAM,iBAAiB,KAAK,UAAU;AAEtC,QAAM,QAAQ,KAAK,IAAI,aAAa,eAAe,aAAa,cAAc;AAC9E,QAAM,cAAc,KAAK,MAAM,gBAAgB,KAAK;AACpD,QAAM,eAAe,KAAK,MAAM,iBAAiB,KAAK;AAEtD,QAAM,OAAO,KAAK,OAAO,aAAa,eAAe,CAAC;AACtD,QAAM,OAAO,KAAK,OAAO,aAAa,gBAAgB,CAAC;AAEvD,QAAM,EAAE,KAAK,IAAI,MAAM,MAAM,IAAI,EAC9B,OAAO,aAAa,YAAY,EAChC,OAAO;AAAA,IACN,KAAK;AAAA,IACL,QAAQ,aAAa,eAAe;AAAA,IACpC,MAAM;AAAA,IACN,OAAO,aAAa,cAAc;AAAA,IAClC,YAAY,EAAE,GAAG,KAAK,GAAG,KAAK,GAAG,IAAI;AAAA,EACvC,CAAC,EACA,YAAY,EACZ,IAAI,EACJ,SAAS,EAAE,mBAAmB,KAAK,CAAC;AAGvC,QAAM,YAAY,aAAa;AAC/B,QAAM,UAAU,IAAI,aAAa,IAAI,SAAS;AAC9C,WAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,UAAM,UAAU,IAAI;AACpB,YAAQ,IAAI,YAAY,CAAC,IAAK,KAAK,OAAO,IAAK;AAC/C,YAAQ,IAAI,YAAY,CAAC,IAAK,KAAK,UAAU,CAAC,IAAK;AACnD,YAAQ,IAAI,YAAY,CAAC,IAAK,KAAK,UAAU,CAAC,IAAK;AAAA,EACrD;AAEA,SAAO,EAAE,MAAM,SAAS,OAAO,MAAM,MAAM,eAAe,eAAe;AAC3E;AAGA,eAAsB,mBACpB,MACA,aACA,cACA,eACA,QACuB;AACvB,QAAM,EAAE,KAAK,IAAI,MAAM,MAAM,IAAI,EAC9B,OAAO,aAAa,YAAY,EAChC,YAAY,EACZ,IAAI,EACJ,SAAS,EAAE,mBAAmB,KAAK,CAAC;AAEvC,QAAM,YAAY,cAAc;AAChC,QAAM,UAAU,IAAI,aAAa,IAAI,SAAS;AAG9C,QAAM,OAAO,CAAC,OAAO,OAAO,KAAK;AACjC,QAAM,MAAM,CAAC,OAAO,OAAO,KAAK;AAEhC,MAAI,WAAW,QAAQ;AACrB,aAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,YAAM,UAAU,IAAI;AACpB,eAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,cAAM,MAAM,KAAK,UAAU,CAAC,IAAK;AACjC,YAAI;AACJ,YAAI,kBAAkB,YAAY;AAChC,gBAAM;AAAA,QACR,WAAW,kBAAkB,YAAY;AACvC,iBAAO,MAAM,KAAK,CAAC,KAAM,IAAI,CAAC;AAAA,QAChC,OAAO;AACL,gBAAM,KAAK,UAAU,CAAC;AAAA,QACxB;AACA,gBAAQ,IAAI,YAAY,CAAC,IAAI;AAAA,MAC/B;AAAA,IACF;AAAA,EACF,OAAO;AAEL,aAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,YAAM,UAAU,IAAI;AACpB,eAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,cAAM,MAAM,KAAK,UAAU,CAAC,IAAK;AACjC,YAAI;AACJ,YAAI,kBAAkB,YAAY;AAChC,gBAAM;AAAA,QACR,WAAW,kBAAkB,YAAY;AACvC,iBAAO,MAAM,KAAK,CAAC,KAAM,IAAI,CAAC;AAAA,QAChC,OAAO;AACL,gBAAM,KAAK,UAAU,CAAC;AAAA,QACxB;AACA,gBAAQ,IAAI,IAAI,CAAC,IAAI;AAAA,MACvB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAGO,SAAS,eAAe,KAAa,OAAe,QAA4B;AACrF,QAAM,YAAY,QAAQ;AAC1B,QAAM,OAAO,IAAI,WAAW,SAAS;AACrC,WAAS,IAAI,GAAG,IAAI,WAAW,KAAK;AAClC,UAAM,IAAI,IAAI,IAAI,CAAC;AACnB,UAAM,IAAI,IAAI,IAAI,IAAI,CAAC;AACvB,UAAM,IAAI,IAAI,IAAI,IAAI,CAAC;AAEvB,SAAK,CAAC,IAAI,KAAK,MAAM,QAAQ,IAAI,QAAQ,IAAI,QAAQ,CAAC;AAAA,EACxD;AACA,SAAO;AACT;","names":[]}
@@ -0,0 +1,210 @@
1
+ import {
2
+ cropRegion,
3
+ resizeAndNormalize
4
+ } from "./chunk-6OR5TE7A.mjs";
5
+ import {
6
+ resolveEngine
7
+ } from "./chunk-J3IUBPRE.mjs";
8
+
9
+ // src/addons/animal-classifier/index.ts
10
+ import { ANIMAL_TYPE_MODELS } from "@camstack/types";
11
+ var ANIMAL_TYPE_LABEL = { id: "animal-type", name: "Animal Type" };
12
+ var ANIMAL_TYPE_LABELS = [ANIMAL_TYPE_LABEL];
13
+ var ANIMAL_CLASS_MAP = { mapping: {}, preserveOriginal: true };
14
+ var ANIMAL_10_CLASSES = [
15
+ "cat",
16
+ "cow",
17
+ "dog",
18
+ "dolphin",
19
+ "eagle",
20
+ "giant panda",
21
+ "horse",
22
+ "monkey",
23
+ "sheep",
24
+ "spider"
25
+ ];
26
+ function softmax(logits) {
27
+ const max = logits.reduce((a, b) => Math.max(a, b), -Infinity);
28
+ const exps = logits.map((v) => Math.exp(v - max));
29
+ const sum = exps.reduce((a, b) => a + b, 0);
30
+ return exps.map((v) => v / sum);
31
+ }
32
+ var AnimalClassifierAddon = class {
33
+ id = "animal-classifier";
34
+ slot = "classifier";
35
+ inputClasses = ["animal"];
36
+ outputClasses = ["animal-type:*"];
37
+ slotPriority = 0;
38
+ requiredSteps = [];
39
+ manifest = {
40
+ id: "animal-classifier",
41
+ name: "Animal Classifier",
42
+ version: "0.1.0",
43
+ description: "ViT-based animal type classifier \u2014 10 common species",
44
+ packageName: "@camstack/vision",
45
+ slot: "classifier",
46
+ inputClasses: ["animal"],
47
+ outputClasses: ["animal-type:*"],
48
+ supportsCustomModels: false,
49
+ mayRequirePython: false,
50
+ defaultConfig: {
51
+ modelId: "animals-10",
52
+ runtime: "auto",
53
+ backend: "cpu",
54
+ minConfidence: 0.3
55
+ }
56
+ };
57
+ engine;
58
+ modelEntry;
59
+ minConfidence = 0.3;
60
+ async initialize(ctx) {
61
+ const cfg = ctx.addonConfig;
62
+ const modelId = cfg["modelId"] ?? "animals-10";
63
+ const runtime = cfg["runtime"] ?? "auto";
64
+ const backend = cfg["backend"] ?? "cpu";
65
+ this.minConfidence = cfg["minConfidence"] ?? 0.3;
66
+ const entry = ANIMAL_TYPE_MODELS.find((m) => m.id === modelId);
67
+ if (!entry) {
68
+ throw new Error(`AnimalClassifierAddon: unknown modelId "${modelId}"`);
69
+ }
70
+ this.modelEntry = entry;
71
+ const resolved = await resolveEngine({
72
+ runtime,
73
+ backend,
74
+ modelEntry: entry,
75
+ modelsDir: ctx.locationPaths.models
76
+ });
77
+ this.engine = resolved.engine;
78
+ }
79
+ async classify(input) {
80
+ const start = Date.now();
81
+ const { width: inputW, height: inputH } = this.modelEntry.inputSize;
82
+ const animalCrop = await cropRegion(input.frame.data, input.roi);
83
+ const normalized = await resizeAndNormalize(animalCrop, inputW, inputH, "imagenet", "nchw");
84
+ const rawOutput = await this.engine.run(normalized, [1, 3, inputH, inputW]);
85
+ const probs = softmax(rawOutput);
86
+ let maxIdx = 0;
87
+ let maxScore = probs[0] ?? 0;
88
+ for (let i = 1; i < probs.length; i++) {
89
+ const score = probs[i] ?? 0;
90
+ if (score > maxScore) {
91
+ maxScore = score;
92
+ maxIdx = i;
93
+ }
94
+ }
95
+ if (maxScore < this.minConfidence) {
96
+ return {
97
+ classifications: [],
98
+ inferenceMs: Date.now() - start,
99
+ modelId: this.modelEntry.id
100
+ };
101
+ }
102
+ const label = ANIMAL_10_CLASSES[maxIdx] ?? `animal_${maxIdx}`;
103
+ return {
104
+ classifications: [
105
+ {
106
+ class: label,
107
+ score: maxScore
108
+ }
109
+ ],
110
+ inferenceMs: Date.now() - start,
111
+ modelId: this.modelEntry.id
112
+ };
113
+ }
114
+ async shutdown() {
115
+ await this.engine?.dispose();
116
+ }
117
+ getConfigSchema() {
118
+ return {
119
+ sections: [
120
+ {
121
+ id: "model",
122
+ title: "Model",
123
+ columns: 1,
124
+ fields: [
125
+ {
126
+ key: "modelId",
127
+ label: "Model",
128
+ type: "model-selector",
129
+ catalog: [...ANIMAL_TYPE_MODELS],
130
+ allowCustom: false,
131
+ allowConversion: false,
132
+ acceptFormats: ["onnx", "coreml", "openvino"],
133
+ requiredMetadata: ["inputSize", "labels"],
134
+ outputFormatHint: "classification"
135
+ }
136
+ ]
137
+ },
138
+ {
139
+ id: "thresholds",
140
+ title: "Classification Settings",
141
+ columns: 1,
142
+ fields: [
143
+ {
144
+ key: "minConfidence",
145
+ label: "Minimum Confidence",
146
+ type: "slider",
147
+ min: 0.05,
148
+ max: 1,
149
+ step: 0.05,
150
+ default: 0.3
151
+ }
152
+ ]
153
+ },
154
+ {
155
+ id: "runtime",
156
+ title: "Runtime",
157
+ columns: 2,
158
+ fields: [
159
+ {
160
+ key: "runtime",
161
+ label: "Runtime",
162
+ type: "select",
163
+ options: [
164
+ { value: "auto", label: "Auto (recommended)" },
165
+ { value: "onnx", label: "ONNX Runtime" },
166
+ { value: "coreml", label: "CoreML (Apple)" }
167
+ ]
168
+ },
169
+ {
170
+ key: "backend",
171
+ label: "Backend",
172
+ type: "select",
173
+ dependsOn: { runtime: "onnx" },
174
+ options: [
175
+ { value: "cpu", label: "CPU" },
176
+ { value: "coreml", label: "CoreML" },
177
+ { value: "cuda", label: "CUDA (NVIDIA)" }
178
+ ]
179
+ }
180
+ ]
181
+ }
182
+ ]
183
+ };
184
+ }
185
+ getClassMap() {
186
+ return ANIMAL_CLASS_MAP;
187
+ }
188
+ getModelCatalog() {
189
+ return [...ANIMAL_TYPE_MODELS];
190
+ }
191
+ getAvailableModels() {
192
+ return [];
193
+ }
194
+ getActiveLabels() {
195
+ return ANIMAL_TYPE_LABELS;
196
+ }
197
+ async probe() {
198
+ return {
199
+ available: true,
200
+ runtime: this.engine?.runtime ?? "onnx",
201
+ device: this.engine?.device ?? "cpu",
202
+ capabilities: ["fp32"]
203
+ };
204
+ }
205
+ };
206
+
207
+ export {
208
+ AnimalClassifierAddon
209
+ };
210
+ //# sourceMappingURL=chunk-7SZAISGP.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/addons/animal-classifier/index.ts"],"sourcesContent":["import type {\n IClassifierProvider,\n IDetectionAddon,\n AddonManifest,\n AddonContext,\n CropInput,\n ClassifierOutput,\n ConfigUISchema,\n ClassMapDefinition,\n ProbeResult,\n ModelCatalogEntry,\n DetectionModel,\n LabelDefinition,\n IInferenceEngine,\n} from '@camstack/types'\nimport { ANIMAL_TYPE_MODELS } from '@camstack/types'\nimport { cropRegion, resizeAndNormalize } from '../../shared/image-utils.js'\nimport { resolveEngine } from '../../shared/engine-resolver.js'\n\nconst ANIMAL_TYPE_LABEL: LabelDefinition = { id: 'animal-type', name: 'Animal Type' }\nconst ANIMAL_TYPE_LABELS: readonly LabelDefinition[] = [ANIMAL_TYPE_LABEL]\nconst ANIMAL_CLASS_MAP: ClassMapDefinition = { mapping: {}, preserveOriginal: true }\n\nconst ANIMAL_10_CLASSES = [\n 'cat',\n 'cow',\n 'dog',\n 'dolphin',\n 'eagle',\n 'giant panda',\n 'horse',\n 'monkey',\n 'sheep',\n 'spider',\n] as const\n\nfunction softmax(logits: Float32Array): Float32Array {\n const max = logits.reduce((a, b) => Math.max(a, b), -Infinity)\n const exps = logits.map((v) => Math.exp(v - max))\n const sum = exps.reduce((a, b) => a + b, 0)\n return exps.map((v) => v / sum) as unknown as Float32Array\n}\n\nexport default class AnimalClassifierAddon implements IClassifierProvider, IDetectionAddon {\n readonly id = 'animal-classifier'\n readonly slot = 'classifier' as const\n readonly inputClasses = ['animal'] as const\n readonly outputClasses = ['animal-type:*'] as const\n readonly slotPriority = 0\n readonly requiredSteps = [] as const\n readonly manifest: AddonManifest = {\n id: 'animal-classifier',\n name: 'Animal Classifier',\n version: '0.1.0',\n description: 'ViT-based animal type classifier — 10 common species',\n packageName: '@camstack/vision',\n slot: 'classifier',\n inputClasses: ['animal'],\n outputClasses: ['animal-type:*'],\n supportsCustomModels: false,\n mayRequirePython: false,\n defaultConfig: {\n modelId: 'animals-10',\n runtime: 'auto',\n backend: 'cpu',\n minConfidence: 0.3,\n },\n }\n\n private engine!: IInferenceEngine\n private modelEntry!: ModelCatalogEntry\n private minConfidence = 0.3\n\n async initialize(ctx: AddonContext): Promise<void> {\n const cfg = ctx.addonConfig\n const modelId = (cfg['modelId'] as string | undefined) ?? 'animals-10'\n const runtime = (cfg['runtime'] as string | undefined) ?? 'auto'\n const backend = (cfg['backend'] as string | undefined) ?? 'cpu'\n this.minConfidence = (cfg['minConfidence'] as number | undefined) ?? 0.3\n\n const entry = ANIMAL_TYPE_MODELS.find((m) => m.id === modelId)\n if (!entry) {\n throw new Error(`AnimalClassifierAddon: unknown modelId \"${modelId}\"`)\n }\n this.modelEntry = entry\n\n const resolved = await resolveEngine({\n runtime: runtime as 'auto',\n backend,\n modelEntry: entry,\n modelsDir: ctx.locationPaths.models,\n })\n this.engine = resolved.engine\n }\n\n async classify(input: CropInput): Promise<ClassifierOutput> {\n const start = Date.now()\n const { width: inputW, height: inputH } = this.modelEntry.inputSize\n\n // Crop the animal region\n const animalCrop = await cropRegion(input.frame.data, input.roi)\n\n // Resize to 224x224, ImageNet normalization, NCHW\n const normalized = await resizeAndNormalize(animalCrop, inputW, inputH, 'imagenet', 'nchw')\n\n // Run inference — output shape: [1, 10]\n const rawOutput = await this.engine.run(normalized, [1, 3, inputH, inputW])\n\n // Softmax to get probabilities\n const probs = softmax(rawOutput)\n\n // Find argmax\n let maxIdx = 0\n let maxScore = probs[0] ?? 0\n for (let i = 1; i < probs.length; i++) {\n const score = probs[i] ?? 0\n if (score > maxScore) {\n maxScore = score\n maxIdx = i\n }\n }\n\n if (maxScore < this.minConfidence) {\n return {\n classifications: [],\n inferenceMs: Date.now() - start,\n modelId: this.modelEntry.id,\n }\n }\n\n const label = ANIMAL_10_CLASSES[maxIdx] ?? `animal_${maxIdx}`\n\n return {\n classifications: [\n {\n class: label,\n score: maxScore,\n },\n ],\n inferenceMs: Date.now() - start,\n modelId: this.modelEntry.id,\n }\n }\n\n async shutdown(): Promise<void> {\n await this.engine?.dispose()\n }\n\n getConfigSchema(): ConfigUISchema {\n return {\n sections: [\n {\n id: 'model',\n title: 'Model',\n columns: 1,\n fields: [\n {\n key: 'modelId',\n label: 'Model',\n type: 'model-selector',\n catalog: [...ANIMAL_TYPE_MODELS],\n allowCustom: false,\n allowConversion: false,\n acceptFormats: ['onnx', 'coreml', 'openvino'],\n requiredMetadata: ['inputSize', 'labels'],\n outputFormatHint: 'classification',\n },\n ],\n },\n {\n id: 'thresholds',\n title: 'Classification Settings',\n columns: 1,\n fields: [\n {\n key: 'minConfidence',\n label: 'Minimum Confidence',\n type: 'slider',\n min: 0.05,\n max: 1.0,\n step: 0.05,\n default: 0.3,\n },\n ],\n },\n {\n id: 'runtime',\n title: 'Runtime',\n columns: 2,\n fields: [\n {\n key: 'runtime',\n label: 'Runtime',\n type: 'select',\n options: [\n { value: 'auto', label: 'Auto (recommended)' },\n { value: 'onnx', label: 'ONNX Runtime' },\n { value: 'coreml', label: 'CoreML (Apple)' },\n ],\n },\n {\n key: 'backend',\n label: 'Backend',\n type: 'select',\n dependsOn: { runtime: 'onnx' },\n options: [\n { value: 'cpu', label: 'CPU' },\n { value: 'coreml', label: 'CoreML' },\n { value: 'cuda', label: 'CUDA (NVIDIA)' },\n ],\n },\n ],\n },\n ],\n }\n }\n\n getClassMap(): ClassMapDefinition {\n return ANIMAL_CLASS_MAP\n }\n\n getModelCatalog(): ModelCatalogEntry[] {\n return [...ANIMAL_TYPE_MODELS]\n }\n\n getAvailableModels(): DetectionModel[] {\n return []\n }\n\n getActiveLabels(): readonly LabelDefinition[] {\n return ANIMAL_TYPE_LABELS\n }\n\n async probe(): Promise<ProbeResult> {\n return {\n available: true,\n runtime: this.engine?.runtime ?? 'onnx',\n device: this.engine?.device ?? 'cpu',\n capabilities: ['fp32'],\n }\n }\n}\n"],"mappings":";;;;;;;;;AAeA,SAAS,0BAA0B;AAInC,IAAM,oBAAqC,EAAE,IAAI,eAAe,MAAM,cAAc;AACpF,IAAM,qBAAiD,CAAC,iBAAiB;AACzE,IAAM,mBAAuC,EAAE,SAAS,CAAC,GAAG,kBAAkB,KAAK;AAEnF,IAAM,oBAAoB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,SAAS,QAAQ,QAAoC;AACnD,QAAM,MAAM,OAAO,OAAO,CAAC,GAAG,MAAM,KAAK,IAAI,GAAG,CAAC,GAAG,SAAS;AAC7D,QAAM,OAAO,OAAO,IAAI,CAAC,MAAM,KAAK,IAAI,IAAI,GAAG,CAAC;AAChD,QAAM,MAAM,KAAK,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AAC1C,SAAO,KAAK,IAAI,CAAC,MAAM,IAAI,GAAG;AAChC;AAEA,IAAqB,wBAArB,MAA2F;AAAA,EAChF,KAAK;AAAA,EACL,OAAO;AAAA,EACP,eAAe,CAAC,QAAQ;AAAA,EACxB,gBAAgB,CAAC,eAAe;AAAA,EAChC,eAAe;AAAA,EACf,gBAAgB,CAAC;AAAA,EACjB,WAA0B;AAAA,IACjC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,IACb,aAAa;AAAA,IACb,MAAM;AAAA,IACN,cAAc,CAAC,QAAQ;AAAA,IACvB,eAAe,CAAC,eAAe;AAAA,IAC/B,sBAAsB;AAAA,IACtB,kBAAkB;AAAA,IAClB,eAAe;AAAA,MACb,SAAS;AAAA,MACT,SAAS;AAAA,MACT,SAAS;AAAA,MACT,eAAe;AAAA,IACjB;AAAA,EACF;AAAA,EAEQ;AAAA,EACA;AAAA,EACA,gBAAgB;AAAA,EAExB,MAAM,WAAW,KAAkC;AACjD,UAAM,MAAM,IAAI;AAChB,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,SAAK,gBAAiB,IAAI,eAAe,KAA4B;AAErE,UAAM,QAAQ,mBAAmB,KAAK,CAAC,MAAM,EAAE,OAAO,OAAO;AAC7D,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,2CAA2C,OAAO,GAAG;AAAA,IACvE;AACA,SAAK,aAAa;AAElB,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ,WAAW,IAAI,cAAc;AAAA,IAC/B,CAAC;AACD,SAAK,SAAS,SAAS;AAAA,EACzB;AAAA,EAEA,MAAM,SAAS,OAA6C;AAC1D,UAAM,QAAQ,KAAK,IAAI;AACvB,UAAM,EAAE,OAAO,QAAQ,QAAQ,OAAO,IAAI,KAAK,WAAW;AAG1D,UAAM,aAAa,MAAM,WAAW,MAAM,MAAM,MAAM,MAAM,GAAG;AAG/D,UAAM,aAAa,MAAM,mBAAmB,YAAY,QAAQ,QAAQ,YAAY,MAAM;AAG1F,UAAM,YAAY,MAAM,KAAK,OAAO,IAAI,YAAY,CAAC,GAAG,GAAG,QAAQ,MAAM,CAAC;AAG1E,UAAM,QAAQ,QAAQ,SAAS;AAG/B,QAAI,SAAS;AACb,QAAI,WAAW,MAAM,CAAC,KAAK;AAC3B,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAM,QAAQ,MAAM,CAAC,KAAK;AAC1B,UAAI,QAAQ,UAAU;AACpB,mBAAW;AACX,iBAAS;AAAA,MACX;AAAA,IACF;AAEA,QAAI,WAAW,KAAK,eAAe;AACjC,aAAO;AAAA,QACL,iBAAiB,CAAC;AAAA,QAClB,aAAa,KAAK,IAAI,IAAI;AAAA,QAC1B,SAAS,KAAK,WAAW;AAAA,MAC3B;AAAA,IACF;AAEA,UAAM,QAAQ,kBAAkB,MAAM,KAAK,UAAU,MAAM;AAE3D,WAAO;AAAA,MACL,iBAAiB;AAAA,QACf;AAAA,UACE,OAAO;AAAA,UACP,OAAO;AAAA,QACT;AAAA,MACF;AAAA,MACA,aAAa,KAAK,IAAI,IAAI;AAAA,MAC1B,SAAS,KAAK,WAAW;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,WAA0B;AAC9B,UAAM,KAAK,QAAQ,QAAQ;AAAA,EAC7B;AAAA,EAEA,kBAAkC;AAChC,WAAO;AAAA,MACL,UAAU;AAAA,QACR;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,SAAS,CAAC,GAAG,kBAAkB;AAAA,cAC/B,aAAa;AAAA,cACb,iBAAiB;AAAA,cACjB,eAAe,CAAC,QAAQ,UAAU,UAAU;AAAA,cAC5C,kBAAkB,CAAC,aAAa,QAAQ;AAAA,cACxC,kBAAkB;AAAA,YACpB;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,KAAK;AAAA,cACL,KAAK;AAAA,cACL,MAAM;AAAA,cACN,SAAS;AAAA,YACX;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,SAAS;AAAA,gBACP,EAAE,OAAO,QAAQ,OAAO,qBAAqB;AAAA,gBAC7C,EAAE,OAAO,QAAQ,OAAO,eAAe;AAAA,gBACvC,EAAE,OAAO,UAAU,OAAO,iBAAiB;AAAA,cAC7C;AAAA,YACF;AAAA,YACA;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,WAAW,EAAE,SAAS,OAAO;AAAA,cAC7B,SAAS;AAAA,gBACP,EAAE,OAAO,OAAO,OAAO,MAAM;AAAA,gBAC7B,EAAE,OAAO,UAAU,OAAO,SAAS;AAAA,gBACnC,EAAE,OAAO,QAAQ,OAAO,gBAAgB;AAAA,cAC1C;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,cAAkC;AAChC,WAAO;AAAA,EACT;AAAA,EAEA,kBAAuC;AACrC,WAAO,CAAC,GAAG,kBAAkB;AAAA,EAC/B;AAAA,EAEA,qBAAuC;AACrC,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,kBAA8C;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,QAA8B;AAClC,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS,KAAK,QAAQ,WAAW;AAAA,MACjC,QAAQ,KAAK,QAAQ,UAAU;AAAA,MAC/B,cAAc,CAAC,MAAM;AAAA,IACvB;AAAA,EACF;AACF;","names":[]}
@@ -0,0 +1,235 @@
1
+ import {
2
+ nms
3
+ } from "./chunk-KUO2BVFY.mjs";
4
+ import {
5
+ cropRegion,
6
+ letterbox
7
+ } from "./chunk-6OR5TE7A.mjs";
8
+ import {
9
+ resolveEngine
10
+ } from "./chunk-J3IUBPRE.mjs";
11
+
12
+ // src/addons/face-detection/index.ts
13
+ import { FACE_DETECTION_MODELS } from "@camstack/types";
14
+
15
+ // src/shared/postprocess/scrfd.ts
16
+ var STRIDES = [8, 16, 32];
17
+ var NUM_ANCHORS_PER_STRIDE = 2;
18
+ function generateAnchors(stride, inputSize) {
19
+ const featureSize = Math.ceil(inputSize / stride);
20
+ const anchors = [];
21
+ for (let y = 0; y < featureSize; y++) {
22
+ for (let x = 0; x < featureSize; x++) {
23
+ for (let k = 0; k < NUM_ANCHORS_PER_STRIDE; k++) {
24
+ anchors.push({
25
+ cx: (x + 0.5) * stride,
26
+ cy: (y + 0.5) * stride
27
+ });
28
+ }
29
+ }
30
+ }
31
+ return anchors;
32
+ }
33
+ function scrfdPostprocess(outputs, confidence, inputSize, originalWidth, originalHeight) {
34
+ const scaleX = originalWidth / inputSize;
35
+ const scaleY = originalHeight / inputSize;
36
+ const candidates = [];
37
+ for (const stride of STRIDES) {
38
+ const scoreKey = Object.keys(outputs).find((k) => k.includes(`score_${stride}`) || k.includes(`_${stride}_score`));
39
+ const bboxKey = Object.keys(outputs).find((k) => k.includes(`bbox_${stride}`) || k.includes(`_${stride}_bbox`));
40
+ const kpsKey = Object.keys(outputs).find((k) => k.includes(`kps_${stride}`) || k.includes(`_${stride}_kps`));
41
+ if (!scoreKey || !bboxKey) continue;
42
+ const scores = outputs[scoreKey];
43
+ const bboxes = outputs[bboxKey];
44
+ const kps = kpsKey ? outputs[kpsKey] : void 0;
45
+ const anchors = generateAnchors(stride, inputSize);
46
+ const n = anchors.length;
47
+ for (let i = 0; i < n; i++) {
48
+ const score = scores[i];
49
+ if (score < confidence) continue;
50
+ const anchor = anchors[i];
51
+ const x1 = anchor.cx - bboxes[i * 4] * stride;
52
+ const y1 = anchor.cy - bboxes[i * 4 + 1] * stride;
53
+ const x2 = anchor.cx + bboxes[i * 4 + 2] * stride;
54
+ const y2 = anchor.cy + bboxes[i * 4 + 3] * stride;
55
+ const bbox = {
56
+ x: x1 * scaleX,
57
+ y: y1 * scaleY,
58
+ w: (x2 - x1) * scaleX,
59
+ h: (y2 - y1) * scaleY
60
+ };
61
+ let landmarks;
62
+ if (kps) {
63
+ const pts = [];
64
+ for (let p = 0; p < 5; p++) {
65
+ pts.push({
66
+ x: (anchor.cx + kps[i * 10 + p * 2] * stride) * scaleX,
67
+ y: (anchor.cy + kps[i * 10 + p * 2 + 1] * stride) * scaleY
68
+ });
69
+ }
70
+ landmarks = pts;
71
+ }
72
+ candidates.push({ bbox, score, landmarks });
73
+ }
74
+ }
75
+ if (candidates.length === 0) return [];
76
+ const keptIndices = nms(candidates, 0.45);
77
+ return keptIndices.map((idx) => {
78
+ const { bbox, score, landmarks } = candidates[idx];
79
+ return {
80
+ class: "face",
81
+ originalClass: "face",
82
+ score,
83
+ bbox,
84
+ ...landmarks ? { landmarks } : {}
85
+ };
86
+ });
87
+ }
88
+
89
+ // src/addons/face-detection/index.ts
90
+ var FACE_LABEL = { id: "face", name: "Face" };
91
+ var FACE_LABELS = [FACE_LABEL];
92
+ var FACE_CLASS_MAP = { mapping: {}, preserveOriginal: true };
93
+ var FaceDetectionAddon = class {
94
+ id = "face-detection";
95
+ slot = "cropper";
96
+ inputClasses = ["person"];
97
+ outputClasses = ["face"];
98
+ slotPriority = 0;
99
+ manifest = {
100
+ id: "face-detection",
101
+ name: "Face Detection",
102
+ version: "0.1.0",
103
+ description: "SCRFD-based face detector \u2014 crops face regions from person detections",
104
+ packageName: "@camstack/vision",
105
+ slot: "cropper",
106
+ inputClasses: ["person"],
107
+ outputClasses: ["face"],
108
+ supportsCustomModels: false,
109
+ mayRequirePython: false,
110
+ defaultConfig: {
111
+ modelId: "scrfd-500m",
112
+ runtime: "auto",
113
+ backend: "cpu",
114
+ confidence: 0.5
115
+ }
116
+ };
117
+ engine;
118
+ modelEntry;
119
+ confidence = 0.5;
120
+ async initialize(ctx) {
121
+ const cfg = ctx.addonConfig;
122
+ const modelId = cfg["modelId"] ?? "scrfd-500m";
123
+ const runtime = cfg["runtime"] ?? "auto";
124
+ const backend = cfg["backend"] ?? "cpu";
125
+ this.confidence = cfg["confidence"] ?? 0.5;
126
+ const entry = FACE_DETECTION_MODELS.find((m) => m.id === modelId);
127
+ if (!entry) {
128
+ throw new Error(`FaceDetectionAddon: unknown modelId "${modelId}"`);
129
+ }
130
+ this.modelEntry = entry;
131
+ const resolved = await resolveEngine({
132
+ runtime,
133
+ backend,
134
+ modelEntry: entry,
135
+ modelsDir: ctx.locationPaths.models
136
+ });
137
+ this.engine = resolved.engine;
138
+ }
139
+ async crop(input) {
140
+ const start = Date.now();
141
+ const { width: inputW, height: inputH } = this.modelEntry.inputSize;
142
+ const targetSize = Math.max(inputW, inputH);
143
+ const personCrop = await cropRegion(input.frame.data, input.roi);
144
+ const lb = await letterbox(personCrop, targetSize);
145
+ const engineWithMulti = this.engine;
146
+ let outputs;
147
+ if (typeof engineWithMulti.runMultiOutput === "function") {
148
+ outputs = await engineWithMulti.runMultiOutput(lb.data, [1, 3, targetSize, targetSize]);
149
+ } else {
150
+ const single = await this.engine.run(lb.data, [1, 3, targetSize, targetSize]);
151
+ outputs = { output0: single };
152
+ }
153
+ const crops = scrfdPostprocess(
154
+ outputs,
155
+ this.confidence,
156
+ targetSize,
157
+ lb.originalWidth,
158
+ lb.originalHeight
159
+ );
160
+ return {
161
+ crops,
162
+ inferenceMs: Date.now() - start,
163
+ modelId: this.modelEntry.id
164
+ };
165
+ }
166
+ async shutdown() {
167
+ await this.engine?.dispose();
168
+ }
169
+ getConfigSchema() {
170
+ return {
171
+ sections: [
172
+ {
173
+ id: "model",
174
+ title: "Model",
175
+ columns: 1,
176
+ fields: [
177
+ {
178
+ key: "modelId",
179
+ label: "Model",
180
+ type: "model-selector",
181
+ catalog: [...FACE_DETECTION_MODELS],
182
+ allowCustom: false,
183
+ allowConversion: false,
184
+ acceptFormats: ["onnx", "coreml", "openvino"],
185
+ requiredMetadata: ["inputSize", "labels", "outputFormat"],
186
+ outputFormatHint: "ssd"
187
+ }
188
+ ]
189
+ },
190
+ {
191
+ id: "thresholds",
192
+ title: "Detection Thresholds",
193
+ columns: 1,
194
+ fields: [
195
+ {
196
+ key: "confidence",
197
+ label: "Confidence Threshold",
198
+ type: "slider",
199
+ min: 0.1,
200
+ max: 1,
201
+ step: 0.05,
202
+ default: 0.5
203
+ }
204
+ ]
205
+ }
206
+ ]
207
+ };
208
+ }
209
+ getClassMap() {
210
+ return FACE_CLASS_MAP;
211
+ }
212
+ getModelCatalog() {
213
+ return [...FACE_DETECTION_MODELS];
214
+ }
215
+ getAvailableModels() {
216
+ return [];
217
+ }
218
+ getActiveLabels() {
219
+ return FACE_LABELS;
220
+ }
221
+ async probe() {
222
+ return {
223
+ available: true,
224
+ runtime: this.engine?.runtime ?? "onnx",
225
+ device: this.engine?.device ?? "cpu",
226
+ capabilities: ["fp32"]
227
+ };
228
+ }
229
+ };
230
+
231
+ export {
232
+ scrfdPostprocess,
233
+ FaceDetectionAddon
234
+ };
235
+ //# sourceMappingURL=chunk-AD2TFYZA.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/addons/face-detection/index.ts","../src/shared/postprocess/scrfd.ts"],"sourcesContent":["import type {\n ICropperProvider,\n IDetectionAddon,\n AddonManifest,\n AddonContext,\n CropInput,\n CropperOutput,\n ConfigUISchema,\n ClassMapDefinition,\n ProbeResult,\n ModelCatalogEntry,\n DetectionModel,\n LabelDefinition,\n IInferenceEngine,\n} from '@camstack/types'\nimport { FACE_DETECTION_MODELS } from '@camstack/types'\nimport { cropRegion, letterbox } from '../../shared/image-utils.js'\nimport { scrfdPostprocess } from '../../shared/postprocess/scrfd.js'\nimport { resolveEngine } from '../../shared/engine-resolver.js'\n\nconst FACE_LABEL: LabelDefinition = { id: 'face', name: 'Face' }\nconst FACE_LABELS: readonly LabelDefinition[] = [FACE_LABEL]\nconst FACE_CLASS_MAP: ClassMapDefinition = { mapping: {}, preserveOriginal: true }\n\nexport default class FaceDetectionAddon implements ICropperProvider, IDetectionAddon {\n readonly id = 'face-detection'\n readonly slot = 'cropper' as const\n readonly inputClasses = ['person'] as const\n readonly outputClasses = ['face'] as const\n readonly slotPriority = 0\n readonly manifest: AddonManifest = {\n id: 'face-detection',\n name: 'Face Detection',\n version: '0.1.0',\n description: 'SCRFD-based face detector — crops face regions from person detections',\n packageName: '@camstack/vision',\n slot: 'cropper',\n inputClasses: ['person'],\n outputClasses: ['face'],\n supportsCustomModels: false,\n mayRequirePython: false,\n defaultConfig: {\n modelId: 'scrfd-500m',\n runtime: 'auto',\n backend: 'cpu',\n confidence: 0.5,\n },\n }\n\n private engine!: IInferenceEngine\n private modelEntry!: ModelCatalogEntry\n private confidence = 0.5\n\n async initialize(ctx: AddonContext): Promise<void> {\n const cfg = ctx.addonConfig\n const modelId = (cfg['modelId'] as string | undefined) ?? 'scrfd-500m'\n const runtime = (cfg['runtime'] as string | undefined) ?? 'auto'\n const backend = (cfg['backend'] as string | undefined) ?? 'cpu'\n this.confidence = (cfg['confidence'] as number | undefined) ?? 0.5\n\n const entry = FACE_DETECTION_MODELS.find((m) => m.id === modelId)\n if (!entry) {\n throw new Error(`FaceDetectionAddon: unknown modelId \"${modelId}\"`)\n }\n this.modelEntry = entry\n\n const resolved = await resolveEngine({\n runtime: runtime as 'auto',\n backend,\n modelEntry: entry,\n modelsDir: ctx.locationPaths.models,\n })\n this.engine = resolved.engine\n }\n\n async crop(input: CropInput): Promise<CropperOutput> {\n const start = Date.now()\n const { width: inputW, height: inputH } = this.modelEntry.inputSize\n const targetSize = Math.max(inputW, inputH)\n\n // Crop the person region from the full frame\n const personCrop = await cropRegion(input.frame.data, input.roi)\n\n // Letterbox resize to model input size\n const lb = await letterbox(personCrop, targetSize)\n\n // SCRFD has multiple output tensors\n const engineWithMulti = this.engine as IInferenceEngine & {\n runMultiOutput?: (input: Float32Array, shape: readonly number[]) => Promise<Record<string, Float32Array>>\n }\n\n let outputs: Record<string, Float32Array>\n if (typeof engineWithMulti.runMultiOutput === 'function') {\n outputs = await engineWithMulti.runMultiOutput(lb.data, [1, 3, targetSize, targetSize])\n } else {\n // Fallback: wrap single output\n const single = await this.engine.run(lb.data, [1, 3, targetSize, targetSize])\n outputs = { output0: single }\n }\n\n const crops = scrfdPostprocess(\n outputs,\n this.confidence,\n targetSize,\n lb.originalWidth,\n lb.originalHeight,\n )\n\n return {\n crops,\n inferenceMs: Date.now() - start,\n modelId: this.modelEntry.id,\n }\n }\n\n async shutdown(): Promise<void> {\n await this.engine?.dispose()\n }\n\n getConfigSchema(): ConfigUISchema {\n return {\n sections: [\n {\n id: 'model',\n title: 'Model',\n columns: 1,\n fields: [\n {\n key: 'modelId',\n label: 'Model',\n type: 'model-selector',\n catalog: [...FACE_DETECTION_MODELS],\n allowCustom: false,\n allowConversion: false,\n acceptFormats: ['onnx', 'coreml', 'openvino'],\n requiredMetadata: ['inputSize', 'labels', 'outputFormat'],\n outputFormatHint: 'ssd',\n },\n ],\n },\n {\n id: 'thresholds',\n title: 'Detection Thresholds',\n columns: 1,\n fields: [\n {\n key: 'confidence',\n label: 'Confidence Threshold',\n type: 'slider',\n min: 0.1,\n max: 1.0,\n step: 0.05,\n default: 0.5,\n },\n ],\n },\n ],\n }\n }\n\n getClassMap(): ClassMapDefinition {\n return FACE_CLASS_MAP\n }\n\n getModelCatalog(): ModelCatalogEntry[] {\n return [...FACE_DETECTION_MODELS]\n }\n\n getAvailableModels(): DetectionModel[] {\n return []\n }\n\n getActiveLabels(): readonly LabelDefinition[] {\n return FACE_LABELS\n }\n\n async probe(): Promise<ProbeResult> {\n return {\n available: true,\n runtime: this.engine?.runtime ?? 'onnx',\n device: this.engine?.device ?? 'cpu',\n capabilities: ['fp32'],\n }\n }\n}\n","import type { SpatialDetection, BoundingBox, Landmark } from '@camstack/types'\nimport { iou, nms } from './yolo.js'\n\nexport interface ScrfdRawOutputs {\n readonly [key: string]: Float32Array\n}\n\nconst STRIDES = [8, 16, 32] as const\nconst NUM_ANCHORS_PER_STRIDE = 2\n\n/** Generate anchor points for a given stride and input size */\nfunction generateAnchors(stride: number, inputSize: number): Array<{ cx: number; cy: number }> {\n const featureSize = Math.ceil(inputSize / stride)\n const anchors: Array<{ cx: number; cy: number }> = []\n for (let y = 0; y < featureSize; y++) {\n for (let x = 0; x < featureSize; x++) {\n for (let k = 0; k < NUM_ANCHORS_PER_STRIDE; k++) {\n anchors.push({\n cx: (x + 0.5) * stride,\n cy: (y + 0.5) * stride,\n })\n }\n }\n }\n return anchors\n}\n\nexport function scrfdPostprocess(\n outputs: ScrfdRawOutputs,\n confidence: number,\n inputSize: number,\n originalWidth: number,\n originalHeight: number,\n): SpatialDetection[] {\n // Scale factor from letterbox (assume square crop, so same scale both axes)\n const scaleX = originalWidth / inputSize\n const scaleY = originalHeight / inputSize\n\n interface Candidate {\n readonly bbox: BoundingBox\n readonly score: number\n readonly landmarks?: readonly Landmark[]\n }\n\n const candidates: Candidate[] = []\n\n for (const stride of STRIDES) {\n const scoreKey = Object.keys(outputs).find((k) => k.includes(`score_${stride}`) || k.includes(`_${stride}_score`))\n const bboxKey = Object.keys(outputs).find((k) => k.includes(`bbox_${stride}`) || k.includes(`_${stride}_bbox`))\n const kpsKey = Object.keys(outputs).find((k) => k.includes(`kps_${stride}`) || k.includes(`_${stride}_kps`))\n\n if (!scoreKey || !bboxKey) continue\n\n const scores = outputs[scoreKey]!\n const bboxes = outputs[bboxKey]!\n const kps = kpsKey ? outputs[kpsKey] : undefined\n const anchors = generateAnchors(stride, inputSize)\n\n const n = anchors.length\n\n for (let i = 0; i < n; i++) {\n const score = scores[i]!\n if (score < confidence) continue\n\n const anchor = anchors[i]!\n\n // Bboxes are relative to the anchor center in stride units, scaled by stride\n const x1 = anchor.cx - bboxes[i * 4]! * stride\n const y1 = anchor.cy - bboxes[i * 4 + 1]! * stride\n const x2 = anchor.cx + bboxes[i * 4 + 2]! * stride\n const y2 = anchor.cy + bboxes[i * 4 + 3]! * stride\n\n const bbox: BoundingBox = {\n x: x1 * scaleX,\n y: y1 * scaleY,\n w: (x2 - x1) * scaleX,\n h: (y2 - y1) * scaleY,\n }\n\n let landmarks: readonly Landmark[] | undefined\n if (kps) {\n const pts: Landmark[] = []\n for (let p = 0; p < 5; p++) {\n pts.push({\n x: (anchor.cx + kps[i * 10 + p * 2]! * stride) * scaleX,\n y: (anchor.cy + kps[i * 10 + p * 2 + 1]! * stride) * scaleY,\n })\n }\n landmarks = pts\n }\n\n candidates.push({ bbox, score, landmarks })\n }\n }\n\n if (candidates.length === 0) return []\n\n const keptIndices = nms(candidates, 0.45)\n\n return keptIndices.map((idx) => {\n const { bbox, score, landmarks } = candidates[idx]!\n return {\n class: 'face',\n originalClass: 'face',\n score,\n bbox,\n ...(landmarks ? { landmarks } : {}),\n } satisfies SpatialDetection\n })\n}\n"],"mappings":";;;;;;;;;;;;AAeA,SAAS,6BAA6B;;;ACRtC,IAAM,UAAU,CAAC,GAAG,IAAI,EAAE;AAC1B,IAAM,yBAAyB;AAG/B,SAAS,gBAAgB,QAAgB,WAAsD;AAC7F,QAAM,cAAc,KAAK,KAAK,YAAY,MAAM;AAChD,QAAM,UAA6C,CAAC;AACpD,WAAS,IAAI,GAAG,IAAI,aAAa,KAAK;AACpC,aAAS,IAAI,GAAG,IAAI,aAAa,KAAK;AACpC,eAAS,IAAI,GAAG,IAAI,wBAAwB,KAAK;AAC/C,gBAAQ,KAAK;AAAA,UACX,KAAK,IAAI,OAAO;AAAA,UAChB,KAAK,IAAI,OAAO;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAEO,SAAS,iBACd,SACA,YACA,WACA,eACA,gBACoB;AAEpB,QAAM,SAAS,gBAAgB;AAC/B,QAAM,SAAS,iBAAiB;AAQhC,QAAM,aAA0B,CAAC;AAEjC,aAAW,UAAU,SAAS;AAC5B,UAAM,WAAW,OAAO,KAAK,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS,MAAM,EAAE,KAAK,EAAE,SAAS,IAAI,MAAM,QAAQ,CAAC;AACjH,UAAM,UAAU,OAAO,KAAK,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ,MAAM,EAAE,KAAK,EAAE,SAAS,IAAI,MAAM,OAAO,CAAC;AAC9G,UAAM,SAAS,OAAO,KAAK,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,SAAS,OAAO,MAAM,EAAE,KAAK,EAAE,SAAS,IAAI,MAAM,MAAM,CAAC;AAE3G,QAAI,CAAC,YAAY,CAAC,QAAS;AAE3B,UAAM,SAAS,QAAQ,QAAQ;AAC/B,UAAM,SAAS,QAAQ,OAAO;AAC9B,UAAM,MAAM,SAAS,QAAQ,MAAM,IAAI;AACvC,UAAM,UAAU,gBAAgB,QAAQ,SAAS;AAEjD,UAAM,IAAI,QAAQ;AAElB,aAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,YAAM,QAAQ,OAAO,CAAC;AACtB,UAAI,QAAQ,WAAY;AAExB,YAAM,SAAS,QAAQ,CAAC;AAGxB,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,CAAC,IAAK;AACxC,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,IAAI,CAAC,IAAK;AAC5C,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,IAAI,CAAC,IAAK;AAC5C,YAAM,KAAK,OAAO,KAAK,OAAO,IAAI,IAAI,CAAC,IAAK;AAE5C,YAAM,OAAoB;AAAA,QACxB,GAAG,KAAK;AAAA,QACR,GAAG,KAAK;AAAA,QACR,IAAI,KAAK,MAAM;AAAA,QACf,IAAI,KAAK,MAAM;AAAA,MACjB;AAEA,UAAI;AACJ,UAAI,KAAK;AACP,cAAM,MAAkB,CAAC;AACzB,iBAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,cAAI,KAAK;AAAA,YACP,IAAI,OAAO,KAAK,IAAI,IAAI,KAAK,IAAI,CAAC,IAAK,UAAU;AAAA,YACjD,IAAI,OAAO,KAAK,IAAI,IAAI,KAAK,IAAI,IAAI,CAAC,IAAK,UAAU;AAAA,UACvD,CAAC;AAAA,QACH;AACA,oBAAY;AAAA,MACd;AAEA,iBAAW,KAAK,EAAE,MAAM,OAAO,UAAU,CAAC;AAAA,IAC5C;AAAA,EACF;AAEA,MAAI,WAAW,WAAW,EAAG,QAAO,CAAC;AAErC,QAAM,cAAc,IAAI,YAAY,IAAI;AAExC,SAAO,YAAY,IAAI,CAAC,QAAQ;AAC9B,UAAM,EAAE,MAAM,OAAO,UAAU,IAAI,WAAW,GAAG;AACjD,WAAO;AAAA,MACL,OAAO;AAAA,MACP,eAAe;AAAA,MACf;AAAA,MACA;AAAA,MACA,GAAI,YAAY,EAAE,UAAU,IAAI,CAAC;AAAA,IACnC;AAAA,EACF,CAAC;AACH;;;ADzFA,IAAM,aAA8B,EAAE,IAAI,QAAQ,MAAM,OAAO;AAC/D,IAAM,cAA0C,CAAC,UAAU;AAC3D,IAAM,iBAAqC,EAAE,SAAS,CAAC,GAAG,kBAAkB,KAAK;AAEjF,IAAqB,qBAArB,MAAqF;AAAA,EAC1E,KAAK;AAAA,EACL,OAAO;AAAA,EACP,eAAe,CAAC,QAAQ;AAAA,EACxB,gBAAgB,CAAC,MAAM;AAAA,EACvB,eAAe;AAAA,EACf,WAA0B;AAAA,IACjC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,IACb,aAAa;AAAA,IACb,MAAM;AAAA,IACN,cAAc,CAAC,QAAQ;AAAA,IACvB,eAAe,CAAC,MAAM;AAAA,IACtB,sBAAsB;AAAA,IACtB,kBAAkB;AAAA,IAClB,eAAe;AAAA,MACb,SAAS;AAAA,MACT,SAAS;AAAA,MACT,SAAS;AAAA,MACT,YAAY;AAAA,IACd;AAAA,EACF;AAAA,EAEQ;AAAA,EACA;AAAA,EACA,aAAa;AAAA,EAErB,MAAM,WAAW,KAAkC;AACjD,UAAM,MAAM,IAAI;AAChB,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,SAAK,aAAc,IAAI,YAAY,KAA4B;AAE/D,UAAM,QAAQ,sBAAsB,KAAK,CAAC,MAAM,EAAE,OAAO,OAAO;AAChE,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,wCAAwC,OAAO,GAAG;AAAA,IACpE;AACA,SAAK,aAAa;AAElB,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ,WAAW,IAAI,cAAc;AAAA,IAC/B,CAAC;AACD,SAAK,SAAS,SAAS;AAAA,EACzB;AAAA,EAEA,MAAM,KAAK,OAA0C;AACnD,UAAM,QAAQ,KAAK,IAAI;AACvB,UAAM,EAAE,OAAO,QAAQ,QAAQ,OAAO,IAAI,KAAK,WAAW;AAC1D,UAAM,aAAa,KAAK,IAAI,QAAQ,MAAM;AAG1C,UAAM,aAAa,MAAM,WAAW,MAAM,MAAM,MAAM,MAAM,GAAG;AAG/D,UAAM,KAAK,MAAM,UAAU,YAAY,UAAU;AAGjD,UAAM,kBAAkB,KAAK;AAI7B,QAAI;AACJ,QAAI,OAAO,gBAAgB,mBAAmB,YAAY;AACxD,gBAAU,MAAM,gBAAgB,eAAe,GAAG,MAAM,CAAC,GAAG,GAAG,YAAY,UAAU,CAAC;AAAA,IACxF,OAAO;AAEL,YAAM,SAAS,MAAM,KAAK,OAAO,IAAI,GAAG,MAAM,CAAC,GAAG,GAAG,YAAY,UAAU,CAAC;AAC5E,gBAAU,EAAE,SAAS,OAAO;AAAA,IAC9B;AAEA,UAAM,QAAQ;AAAA,MACZ;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,GAAG;AAAA,IACL;AAEA,WAAO;AAAA,MACL;AAAA,MACA,aAAa,KAAK,IAAI,IAAI;AAAA,MAC1B,SAAS,KAAK,WAAW;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,WAA0B;AAC9B,UAAM,KAAK,QAAQ,QAAQ;AAAA,EAC7B;AAAA,EAEA,kBAAkC;AAChC,WAAO;AAAA,MACL,UAAU;AAAA,QACR;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,SAAS,CAAC,GAAG,qBAAqB;AAAA,cAClC,aAAa;AAAA,cACb,iBAAiB;AAAA,cACjB,eAAe,CAAC,QAAQ,UAAU,UAAU;AAAA,cAC5C,kBAAkB,CAAC,aAAa,UAAU,cAAc;AAAA,cACxD,kBAAkB;AAAA,YACpB;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,KAAK;AAAA,cACL,KAAK;AAAA,cACL,MAAM;AAAA,cACN,SAAS;AAAA,YACX;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,cAAkC;AAChC,WAAO;AAAA,EACT;AAAA,EAEA,kBAAuC;AACrC,WAAO,CAAC,GAAG,qBAAqB;AAAA,EAClC;AAAA,EAEA,qBAAuC;AACrC,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,kBAA8C;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,QAA8B;AAClC,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS,KAAK,QAAQ,WAAW;AAAA,MACjC,QAAQ,KAAK,QAAQ,UAAU;AAAA,MAC/B,cAAc,CAAC,MAAM;AAAA,IACvB;AAAA,EACF;AACF;","names":[]}