@camstack/vision 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. package/dist/addons/animal-classifier/index.d.mts +25 -0
  2. package/dist/addons/animal-classifier/index.d.ts +25 -0
  3. package/dist/addons/animal-classifier/index.js +469 -0
  4. package/dist/addons/animal-classifier/index.js.map +1 -0
  5. package/dist/addons/animal-classifier/index.mjs +9 -0
  6. package/dist/addons/animal-classifier/index.mjs.map +1 -0
  7. package/dist/addons/audio-classification/index.d.mts +31 -0
  8. package/dist/addons/audio-classification/index.d.ts +31 -0
  9. package/dist/addons/audio-classification/index.js +411 -0
  10. package/dist/addons/audio-classification/index.js.map +1 -0
  11. package/dist/addons/audio-classification/index.mjs +8 -0
  12. package/dist/addons/audio-classification/index.mjs.map +1 -0
  13. package/dist/addons/bird-global-classifier/index.d.mts +26 -0
  14. package/dist/addons/bird-global-classifier/index.d.ts +26 -0
  15. package/dist/addons/bird-global-classifier/index.js +475 -0
  16. package/dist/addons/bird-global-classifier/index.js.map +1 -0
  17. package/dist/addons/bird-global-classifier/index.mjs +9 -0
  18. package/dist/addons/bird-global-classifier/index.mjs.map +1 -0
  19. package/dist/addons/bird-nabirds-classifier/index.d.mts +28 -0
  20. package/dist/addons/bird-nabirds-classifier/index.d.ts +28 -0
  21. package/dist/addons/bird-nabirds-classifier/index.js +517 -0
  22. package/dist/addons/bird-nabirds-classifier/index.js.map +1 -0
  23. package/dist/addons/bird-nabirds-classifier/index.mjs +9 -0
  24. package/dist/addons/bird-nabirds-classifier/index.mjs.map +1 -0
  25. package/dist/addons/camera-native-detection/index.d.mts +32 -0
  26. package/dist/addons/camera-native-detection/index.d.ts +32 -0
  27. package/dist/addons/camera-native-detection/index.js +99 -0
  28. package/dist/addons/camera-native-detection/index.js.map +1 -0
  29. package/dist/addons/camera-native-detection/index.mjs +7 -0
  30. package/dist/addons/camera-native-detection/index.mjs.map +1 -0
  31. package/dist/addons/face-detection/index.d.mts +24 -0
  32. package/dist/addons/face-detection/index.d.ts +24 -0
  33. package/dist/addons/face-detection/index.js +513 -0
  34. package/dist/addons/face-detection/index.js.map +1 -0
  35. package/dist/addons/face-detection/index.mjs +10 -0
  36. package/dist/addons/face-detection/index.mjs.map +1 -0
  37. package/dist/addons/face-recognition/index.d.mts +24 -0
  38. package/dist/addons/face-recognition/index.d.ts +24 -0
  39. package/dist/addons/face-recognition/index.js +437 -0
  40. package/dist/addons/face-recognition/index.js.map +1 -0
  41. package/dist/addons/face-recognition/index.mjs +9 -0
  42. package/dist/addons/face-recognition/index.mjs.map +1 -0
  43. package/dist/addons/motion-detection/index.d.mts +26 -0
  44. package/dist/addons/motion-detection/index.d.ts +26 -0
  45. package/dist/addons/motion-detection/index.js +273 -0
  46. package/dist/addons/motion-detection/index.js.map +1 -0
  47. package/dist/addons/motion-detection/index.mjs +8 -0
  48. package/dist/addons/motion-detection/index.mjs.map +1 -0
  49. package/dist/addons/object-detection/index.d.mts +25 -0
  50. package/dist/addons/object-detection/index.d.ts +25 -0
  51. package/dist/addons/object-detection/index.js +673 -0
  52. package/dist/addons/object-detection/index.js.map +1 -0
  53. package/dist/addons/object-detection/index.mjs +10 -0
  54. package/dist/addons/object-detection/index.mjs.map +1 -0
  55. package/dist/addons/plate-detection/index.d.mts +25 -0
  56. package/dist/addons/plate-detection/index.d.ts +25 -0
  57. package/dist/addons/plate-detection/index.js +477 -0
  58. package/dist/addons/plate-detection/index.js.map +1 -0
  59. package/dist/addons/plate-detection/index.mjs +10 -0
  60. package/dist/addons/plate-detection/index.mjs.map +1 -0
  61. package/dist/addons/plate-recognition/index.d.mts +25 -0
  62. package/dist/addons/plate-recognition/index.d.ts +25 -0
  63. package/dist/addons/plate-recognition/index.js +470 -0
  64. package/dist/addons/plate-recognition/index.js.map +1 -0
  65. package/dist/addons/plate-recognition/index.mjs +9 -0
  66. package/dist/addons/plate-recognition/index.mjs.map +1 -0
  67. package/dist/chunk-3BKYLBBH.mjs +229 -0
  68. package/dist/chunk-3BKYLBBH.mjs.map +1 -0
  69. package/dist/chunk-4PC262GU.mjs +203 -0
  70. package/dist/chunk-4PC262GU.mjs.map +1 -0
  71. package/dist/chunk-6OR5TE7A.mjs +101 -0
  72. package/dist/chunk-6OR5TE7A.mjs.map +1 -0
  73. package/dist/chunk-7SZAISGP.mjs +210 -0
  74. package/dist/chunk-7SZAISGP.mjs.map +1 -0
  75. package/dist/chunk-AD2TFYZA.mjs +235 -0
  76. package/dist/chunk-AD2TFYZA.mjs.map +1 -0
  77. package/dist/chunk-CGYSSHHM.mjs +363 -0
  78. package/dist/chunk-CGYSSHHM.mjs.map +1 -0
  79. package/dist/chunk-IYHMGYGP.mjs +79 -0
  80. package/dist/chunk-IYHMGYGP.mjs.map +1 -0
  81. package/dist/chunk-J3IUBPRE.mjs +187 -0
  82. package/dist/chunk-J3IUBPRE.mjs.map +1 -0
  83. package/dist/chunk-KFZDJPYL.mjs +190 -0
  84. package/dist/chunk-KFZDJPYL.mjs.map +1 -0
  85. package/dist/chunk-KUO2BVFY.mjs +90 -0
  86. package/dist/chunk-KUO2BVFY.mjs.map +1 -0
  87. package/dist/chunk-PXBY3QOA.mjs +152 -0
  88. package/dist/chunk-PXBY3QOA.mjs.map +1 -0
  89. package/dist/chunk-XUKDL23Y.mjs +216 -0
  90. package/dist/chunk-XUKDL23Y.mjs.map +1 -0
  91. package/dist/chunk-Z26BVC7S.mjs +214 -0
  92. package/dist/chunk-Z26BVC7S.mjs.map +1 -0
  93. package/dist/chunk-Z5AHZQEZ.mjs +258 -0
  94. package/dist/chunk-Z5AHZQEZ.mjs.map +1 -0
  95. package/dist/index.d.mts +152 -0
  96. package/dist/index.d.ts +152 -0
  97. package/dist/index.js +2775 -0
  98. package/dist/index.js.map +1 -0
  99. package/dist/index.mjs +205 -0
  100. package/dist/index.mjs.map +1 -0
  101. package/package.json +43 -0
  102. package/python/coreml_inference.py +67 -0
  103. package/python/openvino_inference.py +76 -0
  104. package/python/pytorch_inference.py +74 -0
@@ -0,0 +1,258 @@
1
+ import {
2
+ cropRegion,
3
+ resizeAndNormalize
4
+ } from "./chunk-6OR5TE7A.mjs";
5
+ import {
6
+ resolveEngine
7
+ } from "./chunk-J3IUBPRE.mjs";
8
+
9
+ // src/addons/bird-nabirds-classifier/index.ts
10
+ import { BIRD_NABIRDS_MODELS } from "@camstack/types";
11
+ import * as fs from "fs";
12
+ import * as path from "path";
13
+ var SPECIES_LABEL = { id: "species", name: "Bird Species" };
14
+ var SPECIES_LABELS = [SPECIES_LABEL];
15
+ var BIRD_CLASS_MAP = { mapping: {}, preserveOriginal: true };
16
+ function loadLabels(modelsDir, modelId) {
17
+ const labelNames = [
18
+ `camstack-${modelId}-labels.json`,
19
+ `camstack-bird-nabirds-404-labels.json`
20
+ ];
21
+ for (const name of labelNames) {
22
+ const labelPath = path.join(modelsDir, name);
23
+ if (fs.existsSync(labelPath)) {
24
+ const raw = fs.readFileSync(labelPath, "utf-8");
25
+ return JSON.parse(raw);
26
+ }
27
+ }
28
+ throw new Error(`BirdNABirdsClassifierAddon: labels JSON not found in ${modelsDir}`);
29
+ }
30
+ function softmax(logits) {
31
+ const max = logits.reduce((a, b) => Math.max(a, b), -Infinity);
32
+ const exps = logits.map((v) => Math.exp(v - max));
33
+ const sum = exps.reduce((a, b) => a + b, 0);
34
+ return exps.map((v) => v / sum);
35
+ }
36
+ var BirdNABirdsClassifierAddon = class {
37
+ id = "bird-nabirds-classifier";
38
+ slot = "classifier";
39
+ inputClasses = ["animal"];
40
+ outputClasses = ["species:*"];
41
+ slotPriority = 0;
42
+ requiredSteps = [];
43
+ manifest = {
44
+ id: "bird-nabirds-classifier",
45
+ name: "Bird Classifier (NABirds, 404 species)",
46
+ version: "0.1.0",
47
+ description: "ResNet50 \u2014 404 North American bird species (NABirds dataset, ONNX + CoreML + OpenVINO)",
48
+ packageName: "@camstack/vision",
49
+ slot: "classifier",
50
+ inputClasses: ["animal"],
51
+ outputClasses: ["species:*"],
52
+ supportsCustomModels: false,
53
+ mayRequirePython: false,
54
+ defaultConfig: {
55
+ modelId: "bird-nabirds-404",
56
+ runtime: "auto",
57
+ backend: "cpu",
58
+ minConfidence: 0.3
59
+ }
60
+ };
61
+ engine;
62
+ modelEntry;
63
+ labels = [];
64
+ minConfidence = 0.3;
65
+ allowedSpecies;
66
+ async initialize(ctx) {
67
+ const cfg = ctx.addonConfig;
68
+ const modelId = cfg["modelId"] ?? "bird-nabirds-404";
69
+ const runtime = cfg["runtime"] ?? "auto";
70
+ const backend = cfg["backend"] ?? "cpu";
71
+ this.minConfidence = cfg["minConfidence"] ?? 0.3;
72
+ this.allowedSpecies = cfg["allowedSpecies"];
73
+ const entry = BIRD_NABIRDS_MODELS.find((m) => m.id === modelId);
74
+ if (!entry) {
75
+ throw new Error(`BirdNABirdsClassifierAddon: unknown modelId "${modelId}"`);
76
+ }
77
+ this.modelEntry = entry;
78
+ this.labels = loadLabels(ctx.locationPaths.models, modelId);
79
+ const resolved = await resolveEngine({
80
+ runtime,
81
+ backend,
82
+ modelEntry: entry,
83
+ modelsDir: ctx.locationPaths.models
84
+ });
85
+ this.engine = resolved.engine;
86
+ }
87
+ applyRegionFilter(scores, labels) {
88
+ if (!this.allowedSpecies || this.allowedSpecies.length === 0) return;
89
+ const allowedSet = new Set(this.allowedSpecies.map((s) => s.toLowerCase()));
90
+ for (let i = 0; i < scores.length; i++) {
91
+ if (!allowedSet.has(labels[i].toLowerCase())) {
92
+ scores[i] = 0;
93
+ }
94
+ }
95
+ }
96
+ async classify(input) {
97
+ const start = Date.now();
98
+ const { width: inputW, height: inputH } = this.modelEntry.inputSize;
99
+ const animalCrop = await cropRegion(input.frame.data, input.roi);
100
+ const normalized = await resizeAndNormalize(animalCrop, inputW, inputH, "imagenet", "nchw");
101
+ const rawOutput = await this.engine.run(normalized, [1, 3, inputH, inputW]);
102
+ const probs = softmax(rawOutput);
103
+ this.applyRegionFilter(probs, this.labels);
104
+ const filteredSum = probs.reduce((a, b) => a + b, 0);
105
+ if (filteredSum > 0) {
106
+ for (let i = 0; i < probs.length; i++) {
107
+ probs[i] = (probs[i] ?? 0) / filteredSum;
108
+ }
109
+ }
110
+ let maxIdx = 0;
111
+ let maxScore = probs[0] ?? 0;
112
+ for (let i = 1; i < probs.length; i++) {
113
+ const score = probs[i] ?? 0;
114
+ if (score > maxScore) {
115
+ maxScore = score;
116
+ maxIdx = i;
117
+ }
118
+ }
119
+ if (maxScore < this.minConfidence) {
120
+ return {
121
+ classifications: [],
122
+ inferenceMs: Date.now() - start,
123
+ modelId: this.modelEntry.id
124
+ };
125
+ }
126
+ const label = this.labels[maxIdx] ?? `species_${maxIdx}`;
127
+ return {
128
+ classifications: [
129
+ {
130
+ class: label,
131
+ score: maxScore
132
+ }
133
+ ],
134
+ inferenceMs: Date.now() - start,
135
+ modelId: this.modelEntry.id
136
+ };
137
+ }
138
+ async shutdown() {
139
+ await this.engine?.dispose();
140
+ }
141
+ getConfigSchema() {
142
+ return {
143
+ sections: [
144
+ {
145
+ id: "model",
146
+ title: "Model",
147
+ columns: 1,
148
+ fields: [
149
+ {
150
+ key: "modelId",
151
+ label: "Model",
152
+ type: "model-selector",
153
+ catalog: [...BIRD_NABIRDS_MODELS],
154
+ allowCustom: false,
155
+ allowConversion: false,
156
+ acceptFormats: ["onnx", "coreml", "openvino"],
157
+ requiredMetadata: ["inputSize", "labels"],
158
+ outputFormatHint: "classification"
159
+ }
160
+ ]
161
+ },
162
+ {
163
+ id: "thresholds",
164
+ title: "Classification Settings",
165
+ columns: 1,
166
+ fields: [
167
+ {
168
+ key: "minConfidence",
169
+ label: "Minimum Confidence",
170
+ type: "slider",
171
+ min: 0.05,
172
+ max: 1,
173
+ step: 0.05,
174
+ default: 0.3
175
+ }
176
+ ]
177
+ },
178
+ {
179
+ id: "region",
180
+ title: "Regional Filter",
181
+ columns: 1,
182
+ fields: [
183
+ {
184
+ key: "regionFilter",
185
+ label: "Region Preset",
186
+ type: "select",
187
+ options: [
188
+ { value: "", label: "None (all 404 species)" },
189
+ { value: "north-america-east", label: "North America \u2014 Eastern" },
190
+ { value: "north-america-west", label: "North America \u2014 Western" },
191
+ { value: "north-america-south", label: "North America \u2014 Southern" }
192
+ ]
193
+ },
194
+ {
195
+ key: "allowedSpecies",
196
+ label: "Custom Allowed Species (comma-separated)",
197
+ type: "text"
198
+ }
199
+ ]
200
+ },
201
+ {
202
+ id: "runtime",
203
+ title: "Runtime",
204
+ columns: 2,
205
+ fields: [
206
+ {
207
+ key: "runtime",
208
+ label: "Runtime",
209
+ type: "select",
210
+ options: [
211
+ { value: "auto", label: "Auto (recommended)" },
212
+ { value: "onnx", label: "ONNX Runtime" },
213
+ { value: "coreml", label: "CoreML (Apple)" },
214
+ { value: "openvino", label: "OpenVINO (Intel)" }
215
+ ]
216
+ },
217
+ {
218
+ key: "backend",
219
+ label: "Backend",
220
+ type: "select",
221
+ dependsOn: { runtime: "onnx" },
222
+ options: [
223
+ { value: "cpu", label: "CPU" },
224
+ { value: "coreml", label: "CoreML" },
225
+ { value: "cuda", label: "CUDA (NVIDIA)" }
226
+ ]
227
+ }
228
+ ]
229
+ }
230
+ ]
231
+ };
232
+ }
233
+ getClassMap() {
234
+ return BIRD_CLASS_MAP;
235
+ }
236
+ getModelCatalog() {
237
+ return [...BIRD_NABIRDS_MODELS];
238
+ }
239
+ getAvailableModels() {
240
+ return [];
241
+ }
242
+ getActiveLabels() {
243
+ return SPECIES_LABELS;
244
+ }
245
+ async probe() {
246
+ return {
247
+ available: true,
248
+ runtime: this.engine?.runtime ?? "onnx",
249
+ device: this.engine?.device ?? "cpu",
250
+ capabilities: ["fp32"]
251
+ };
252
+ }
253
+ };
254
+
255
+ export {
256
+ BirdNABirdsClassifierAddon
257
+ };
258
+ //# sourceMappingURL=chunk-Z5AHZQEZ.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/addons/bird-nabirds-classifier/index.ts"],"sourcesContent":["import type {\n IClassifierProvider,\n IDetectionAddon,\n AddonManifest,\n AddonContext,\n CropInput,\n ClassifierOutput,\n ConfigUISchema,\n ClassMapDefinition,\n ProbeResult,\n ModelCatalogEntry,\n DetectionModel,\n LabelDefinition,\n IInferenceEngine,\n} from '@camstack/types'\nimport { BIRD_NABIRDS_MODELS } from '@camstack/types'\nimport { cropRegion, resizeAndNormalize } from '../../shared/image-utils.js'\nimport { resolveEngine } from '../../shared/engine-resolver.js'\n\nimport * as fs from 'node:fs'\nimport * as path from 'node:path'\n\nconst SPECIES_LABEL: LabelDefinition = { id: 'species', name: 'Bird Species' }\nconst SPECIES_LABELS: readonly LabelDefinition[] = [SPECIES_LABEL]\nconst BIRD_CLASS_MAP: ClassMapDefinition = { mapping: {}, preserveOriginal: true }\n\n/** Load NABirds labels from JSON file in modelsDir */\nfunction loadLabels(modelsDir: string, modelId: string): readonly string[] {\n const labelNames = [\n `camstack-${modelId}-labels.json`,\n `camstack-bird-nabirds-404-labels.json`,\n ]\n for (const name of labelNames) {\n const labelPath = path.join(modelsDir, name)\n if (fs.existsSync(labelPath)) {\n const raw = fs.readFileSync(labelPath, 'utf-8')\n return JSON.parse(raw) as string[]\n }\n }\n throw new Error(`BirdNABirdsClassifierAddon: labels JSON not found in ${modelsDir}`)\n}\n\nfunction softmax(logits: Float32Array): Float32Array {\n const max = logits.reduce((a, b) => Math.max(a, b), -Infinity)\n const exps = logits.map((v) => Math.exp(v - max))\n const sum = exps.reduce((a, b) => a + b, 0)\n return exps.map((v) => v / sum) as unknown as Float32Array\n}\n\nexport default class BirdNABirdsClassifierAddon implements IClassifierProvider, IDetectionAddon {\n readonly id = 'bird-nabirds-classifier'\n readonly slot = 'classifier' as const\n readonly inputClasses = ['animal'] as const\n readonly outputClasses = ['species:*'] as const\n readonly slotPriority = 0\n readonly requiredSteps = [] as const\n readonly manifest: AddonManifest = {\n id: 'bird-nabirds-classifier',\n name: 'Bird Classifier (NABirds, 404 species)',\n version: '0.1.0',\n description: 'ResNet50 — 404 North American bird species (NABirds dataset, ONNX + CoreML + OpenVINO)',\n packageName: '@camstack/vision',\n slot: 'classifier',\n inputClasses: ['animal'],\n outputClasses: ['species:*'],\n supportsCustomModels: false,\n mayRequirePython: false,\n defaultConfig: {\n modelId: 'bird-nabirds-404',\n runtime: 'auto',\n backend: 'cpu',\n minConfidence: 0.3,\n },\n }\n\n private engine!: IInferenceEngine\n private modelEntry!: ModelCatalogEntry\n private labels: readonly string[] = []\n private minConfidence = 0.3\n private allowedSpecies: string[] | undefined\n\n async initialize(ctx: AddonContext): Promise<void> {\n const cfg = ctx.addonConfig\n const modelId = (cfg['modelId'] as string | undefined) ?? 'bird-nabirds-404'\n const runtime = (cfg['runtime'] as string | undefined) ?? 'auto'\n const backend = (cfg['backend'] as string | undefined) ?? 'cpu'\n this.minConfidence = (cfg['minConfidence'] as number | undefined) ?? 0.3\n this.allowedSpecies = (cfg['allowedSpecies'] as string[] | undefined)\n\n const entry = BIRD_NABIRDS_MODELS.find((m) => m.id === modelId)\n if (!entry) {\n throw new Error(`BirdNABirdsClassifierAddon: unknown modelId \"${modelId}\"`)\n }\n this.modelEntry = entry\n\n // Load labels from JSON file\n this.labels = loadLabels(ctx.locationPaths.models, modelId)\n\n const resolved = await resolveEngine({\n runtime: runtime as 'auto',\n backend,\n modelEntry: entry,\n modelsDir: ctx.locationPaths.models,\n })\n this.engine = resolved.engine\n }\n\n private applyRegionFilter(scores: Float32Array, labels: string[]): void {\n if (!this.allowedSpecies || this.allowedSpecies.length === 0) return\n const allowedSet = new Set(this.allowedSpecies.map(s => s.toLowerCase()))\n for (let i = 0; i < scores.length; i++) {\n if (!allowedSet.has(labels[i]!.toLowerCase())) {\n scores[i] = 0\n }\n }\n }\n\n async classify(input: CropInput): Promise<ClassifierOutput> {\n const start = Date.now()\n const { width: inputW, height: inputH } = this.modelEntry.inputSize\n\n // Crop the animal region\n const animalCrop = await cropRegion(input.frame.data, input.roi)\n\n // Resize to 224x224, ImageNet normalization, NCHW\n const normalized = await resizeAndNormalize(animalCrop, inputW, inputH, 'imagenet', 'nchw')\n\n // Run inference — output shape: [1, 404]\n const rawOutput = await this.engine.run(normalized, [1, 3, inputH, inputW])\n\n // Softmax to get probabilities\n const probs = softmax(rawOutput)\n\n // Apply regional filter (zeroes out non-allowed species, then re-normalize)\n this.applyRegionFilter(probs, this.labels as string[])\n const filteredSum = probs.reduce((a, b) => a + b, 0)\n if (filteredSum > 0) {\n for (let i = 0; i < probs.length; i++) {\n probs[i] = (probs[i] ?? 0) / filteredSum\n }\n }\n\n // Find argmax\n let maxIdx = 0\n let maxScore = probs[0] ?? 0\n for (let i = 1; i < probs.length; i++) {\n const score = probs[i] ?? 0\n if (score > maxScore) {\n maxScore = score\n maxIdx = i\n }\n }\n\n if (maxScore < this.minConfidence) {\n return {\n classifications: [],\n inferenceMs: Date.now() - start,\n modelId: this.modelEntry.id,\n }\n }\n\n const label = this.labels[maxIdx] ?? `species_${maxIdx}`\n\n return {\n classifications: [\n {\n class: label,\n score: maxScore,\n },\n ],\n inferenceMs: Date.now() - start,\n modelId: this.modelEntry.id,\n }\n }\n\n async shutdown(): Promise<void> {\n await this.engine?.dispose()\n }\n\n getConfigSchema(): ConfigUISchema {\n return {\n sections: [\n {\n id: 'model',\n title: 'Model',\n columns: 1,\n fields: [\n {\n key: 'modelId',\n label: 'Model',\n type: 'model-selector',\n catalog: [...BIRD_NABIRDS_MODELS],\n allowCustom: false,\n allowConversion: false,\n acceptFormats: ['onnx', 'coreml', 'openvino'],\n requiredMetadata: ['inputSize', 'labels'],\n outputFormatHint: 'classification',\n },\n ],\n },\n {\n id: 'thresholds',\n title: 'Classification Settings',\n columns: 1,\n fields: [\n {\n key: 'minConfidence',\n label: 'Minimum Confidence',\n type: 'slider',\n min: 0.05,\n max: 1.0,\n step: 0.05,\n default: 0.3,\n },\n ],\n },\n {\n id: 'region',\n title: 'Regional Filter',\n columns: 1,\n fields: [\n {\n key: 'regionFilter',\n label: 'Region Preset',\n type: 'select',\n options: [\n { value: '', label: 'None (all 404 species)' },\n { value: 'north-america-east', label: 'North America — Eastern' },\n { value: 'north-america-west', label: 'North America — Western' },\n { value: 'north-america-south', label: 'North America — Southern' },\n ],\n },\n {\n key: 'allowedSpecies',\n label: 'Custom Allowed Species (comma-separated)',\n type: 'text',\n },\n ],\n },\n {\n id: 'runtime',\n title: 'Runtime',\n columns: 2,\n fields: [\n {\n key: 'runtime',\n label: 'Runtime',\n type: 'select',\n options: [\n { value: 'auto', label: 'Auto (recommended)' },\n { value: 'onnx', label: 'ONNX Runtime' },\n { value: 'coreml', label: 'CoreML (Apple)' },\n { value: 'openvino', label: 'OpenVINO (Intel)' },\n ],\n },\n {\n key: 'backend',\n label: 'Backend',\n type: 'select',\n dependsOn: { runtime: 'onnx' },\n options: [\n { value: 'cpu', label: 'CPU' },\n { value: 'coreml', label: 'CoreML' },\n { value: 'cuda', label: 'CUDA (NVIDIA)' },\n ],\n },\n ],\n },\n ],\n }\n }\n\n getClassMap(): ClassMapDefinition {\n return BIRD_CLASS_MAP\n }\n\n getModelCatalog(): ModelCatalogEntry[] {\n return [...BIRD_NABIRDS_MODELS]\n }\n\n getAvailableModels(): DetectionModel[] {\n return []\n }\n\n getActiveLabels(): readonly LabelDefinition[] {\n return SPECIES_LABELS\n }\n\n async probe(): Promise<ProbeResult> {\n return {\n available: true,\n runtime: this.engine?.runtime ?? 'onnx',\n device: this.engine?.device ?? 'cpu',\n capabilities: ['fp32'],\n }\n }\n}\n"],"mappings":";;;;;;;;;AAeA,SAAS,2BAA2B;AAIpC,YAAY,QAAQ;AACpB,YAAY,UAAU;AAEtB,IAAM,gBAAiC,EAAE,IAAI,WAAW,MAAM,eAAe;AAC7E,IAAM,iBAA6C,CAAC,aAAa;AACjE,IAAM,iBAAqC,EAAE,SAAS,CAAC,GAAG,kBAAkB,KAAK;AAGjF,SAAS,WAAW,WAAmB,SAAoC;AACzE,QAAM,aAAa;AAAA,IACjB,YAAY,OAAO;AAAA,IACnB;AAAA,EACF;AACA,aAAW,QAAQ,YAAY;AAC7B,UAAM,YAAiB,UAAK,WAAW,IAAI;AAC3C,QAAO,cAAW,SAAS,GAAG;AAC5B,YAAM,MAAS,gBAAa,WAAW,OAAO;AAC9C,aAAO,KAAK,MAAM,GAAG;AAAA,IACvB;AAAA,EACF;AACA,QAAM,IAAI,MAAM,wDAAwD,SAAS,EAAE;AACrF;AAEA,SAAS,QAAQ,QAAoC;AACnD,QAAM,MAAM,OAAO,OAAO,CAAC,GAAG,MAAM,KAAK,IAAI,GAAG,CAAC,GAAG,SAAS;AAC7D,QAAM,OAAO,OAAO,IAAI,CAAC,MAAM,KAAK,IAAI,IAAI,GAAG,CAAC;AAChD,QAAM,MAAM,KAAK,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AAC1C,SAAO,KAAK,IAAI,CAAC,MAAM,IAAI,GAAG;AAChC;AAEA,IAAqB,6BAArB,MAAgG;AAAA,EACrF,KAAK;AAAA,EACL,OAAO;AAAA,EACP,eAAe,CAAC,QAAQ;AAAA,EACxB,gBAAgB,CAAC,WAAW;AAAA,EAC5B,eAAe;AAAA,EACf,gBAAgB,CAAC;AAAA,EACjB,WAA0B;AAAA,IACjC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,IACb,aAAa;AAAA,IACb,MAAM;AAAA,IACN,cAAc,CAAC,QAAQ;AAAA,IACvB,eAAe,CAAC,WAAW;AAAA,IAC3B,sBAAsB;AAAA,IACtB,kBAAkB;AAAA,IAClB,eAAe;AAAA,MACb,SAAS;AAAA,MACT,SAAS;AAAA,MACT,SAAS;AAAA,MACT,eAAe;AAAA,IACjB;AAAA,EACF;AAAA,EAEQ;AAAA,EACA;AAAA,EACA,SAA4B,CAAC;AAAA,EAC7B,gBAAgB;AAAA,EAChB;AAAA,EAER,MAAM,WAAW,KAAkC;AACjD,UAAM,MAAM,IAAI;AAChB,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,UAAM,UAAW,IAAI,SAAS,KAA4B;AAC1D,SAAK,gBAAiB,IAAI,eAAe,KAA4B;AACrE,SAAK,iBAAkB,IAAI,gBAAgB;AAE3C,UAAM,QAAQ,oBAAoB,KAAK,CAAC,MAAM,EAAE,OAAO,OAAO;AAC9D,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,gDAAgD,OAAO,GAAG;AAAA,IAC5E;AACA,SAAK,aAAa;AAGlB,SAAK,SAAS,WAAW,IAAI,cAAc,QAAQ,OAAO;AAE1D,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ,WAAW,IAAI,cAAc;AAAA,IAC/B,CAAC;AACD,SAAK,SAAS,SAAS;AAAA,EACzB;AAAA,EAEQ,kBAAkB,QAAsB,QAAwB;AACtE,QAAI,CAAC,KAAK,kBAAkB,KAAK,eAAe,WAAW,EAAG;AAC9D,UAAM,aAAa,IAAI,IAAI,KAAK,eAAe,IAAI,OAAK,EAAE,YAAY,CAAC,CAAC;AACxE,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,UAAI,CAAC,WAAW,IAAI,OAAO,CAAC,EAAG,YAAY,CAAC,GAAG;AAC7C,eAAO,CAAC,IAAI;AAAA,MACd;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,OAA6C;AAC1D,UAAM,QAAQ,KAAK,IAAI;AACvB,UAAM,EAAE,OAAO,QAAQ,QAAQ,OAAO,IAAI,KAAK,WAAW;AAG1D,UAAM,aAAa,MAAM,WAAW,MAAM,MAAM,MAAM,MAAM,GAAG;AAG/D,UAAM,aAAa,MAAM,mBAAmB,YAAY,QAAQ,QAAQ,YAAY,MAAM;AAG1F,UAAM,YAAY,MAAM,KAAK,OAAO,IAAI,YAAY,CAAC,GAAG,GAAG,QAAQ,MAAM,CAAC;AAG1E,UAAM,QAAQ,QAAQ,SAAS;AAG/B,SAAK,kBAAkB,OAAO,KAAK,MAAkB;AACrD,UAAM,cAAc,MAAM,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AACnD,QAAI,cAAc,GAAG;AACnB,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAM,CAAC,KAAK,MAAM,CAAC,KAAK,KAAK;AAAA,MAC/B;AAAA,IACF;AAGA,QAAI,SAAS;AACb,QAAI,WAAW,MAAM,CAAC,KAAK;AAC3B,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAM,QAAQ,MAAM,CAAC,KAAK;AAC1B,UAAI,QAAQ,UAAU;AACpB,mBAAW;AACX,iBAAS;AAAA,MACX;AAAA,IACF;AAEA,QAAI,WAAW,KAAK,eAAe;AACjC,aAAO;AAAA,QACL,iBAAiB,CAAC;AAAA,QAClB,aAAa,KAAK,IAAI,IAAI;AAAA,QAC1B,SAAS,KAAK,WAAW;AAAA,MAC3B;AAAA,IACF;AAEA,UAAM,QAAQ,KAAK,OAAO,MAAM,KAAK,WAAW,MAAM;AAEtD,WAAO;AAAA,MACL,iBAAiB;AAAA,QACf;AAAA,UACE,OAAO;AAAA,UACP,OAAO;AAAA,QACT;AAAA,MACF;AAAA,MACA,aAAa,KAAK,IAAI,IAAI;AAAA,MAC1B,SAAS,KAAK,WAAW;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAM,WAA0B;AAC9B,UAAM,KAAK,QAAQ,QAAQ;AAAA,EAC7B;AAAA,EAEA,kBAAkC;AAChC,WAAO;AAAA,MACL,UAAU;AAAA,QACR;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,SAAS,CAAC,GAAG,mBAAmB;AAAA,cAChC,aAAa;AAAA,cACb,iBAAiB;AAAA,cACjB,eAAe,CAAC,QAAQ,UAAU,UAAU;AAAA,cAC5C,kBAAkB,CAAC,aAAa,QAAQ;AAAA,cACxC,kBAAkB;AAAA,YACpB;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,KAAK;AAAA,cACL,KAAK;AAAA,cACL,MAAM;AAAA,cACN,SAAS;AAAA,YACX;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,SAAS;AAAA,gBACP,EAAE,OAAO,IAAI,OAAO,yBAAyB;AAAA,gBAC7C,EAAE,OAAO,sBAAsB,OAAO,+BAA0B;AAAA,gBAChE,EAAE,OAAO,sBAAsB,OAAO,+BAA0B;AAAA,gBAChE,EAAE,OAAO,uBAAuB,OAAO,gCAA2B;AAAA,cACpE;AAAA,YACF;AAAA,YACA;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,SAAS;AAAA,gBACP,EAAE,OAAO,QAAQ,OAAO,qBAAqB;AAAA,gBAC7C,EAAE,OAAO,QAAQ,OAAO,eAAe;AAAA,gBACvC,EAAE,OAAO,UAAU,OAAO,iBAAiB;AAAA,gBAC3C,EAAE,OAAO,YAAY,OAAO,mBAAmB;AAAA,cACjD;AAAA,YACF;AAAA,YACA;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,WAAW,EAAE,SAAS,OAAO;AAAA,cAC7B,SAAS;AAAA,gBACP,EAAE,OAAO,OAAO,OAAO,MAAM;AAAA,gBAC7B,EAAE,OAAO,UAAU,OAAO,SAAS;AAAA,gBACnC,EAAE,OAAO,QAAQ,OAAO,gBAAgB;AAAA,cAC1C;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,cAAkC;AAChC,WAAO;AAAA,EACT;AAAA,EAEA,kBAAuC;AACrC,WAAO,CAAC,GAAG,mBAAmB;AAAA,EAChC;AAAA,EAEA,qBAAuC;AACrC,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,kBAA8C;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,QAA8B;AAClC,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS,KAAK,QAAQ,WAAW;AAAA,MACjC,QAAQ,KAAK,QAAQ,UAAU;AAAA,MAC/B,cAAc,CAAC,MAAM;AAAA,IACvB;AAAA,EACF;AACF;","names":[]}
@@ -0,0 +1,152 @@
1
+ import { BoundingBox, SpatialDetection, IInferenceEngine, DetectionRuntime, DetectionDevice, ModelCatalogEntry, ModelFormat } from '@camstack/types';
2
+ export { default as ObjectDetectionAddon } from './addons/object-detection/index.mjs';
3
+ export { default as MotionDetectionAddon } from './addons/motion-detection/index.mjs';
4
+ export { default as FaceDetectionAddon } from './addons/face-detection/index.mjs';
5
+ export { default as FaceRecognitionAddon } from './addons/face-recognition/index.mjs';
6
+ export { default as PlateDetectionAddon } from './addons/plate-detection/index.mjs';
7
+ export { default as PlateRecognitionAddon } from './addons/plate-recognition/index.mjs';
8
+ export { default as AudioClassificationAddon } from './addons/audio-classification/index.mjs';
9
+ export { default as CameraNativeDetectionAddon } from './addons/camera-native-detection/index.mjs';
10
+ export { default as BirdGlobalClassifierAddon } from './addons/bird-global-classifier/index.mjs';
11
+ export { default as BirdNABirdsClassifierAddon } from './addons/bird-nabirds-classifier/index.mjs';
12
+ export { default as AnimalClassifierAddon } from './addons/animal-classifier/index.mjs';
13
+
14
+ /** Decode JPEG to raw RGB pixels */
15
+ declare function jpegToRgb(jpeg: Buffer): Promise<{
16
+ data: Buffer;
17
+ width: number;
18
+ height: number;
19
+ }>;
20
+ /** Crop a region from a JPEG buffer */
21
+ declare function cropRegion(jpeg: Buffer, roi: BoundingBox): Promise<Buffer>;
22
+ /** Letterbox resize for YOLO: resize preserving aspect ratio, pad to square */
23
+ declare function letterbox(jpeg: Buffer, targetSize: number): Promise<{
24
+ data: Float32Array;
25
+ scale: number;
26
+ padX: number;
27
+ padY: number;
28
+ originalWidth: number;
29
+ originalHeight: number;
30
+ }>;
31
+ /** Resize and normalize to Float32Array */
32
+ declare function resizeAndNormalize(jpeg: Buffer, targetWidth: number, targetHeight: number, normalization: 'zero-one' | 'imagenet' | 'none', layout: 'nchw' | 'nhwc'): Promise<Float32Array>;
33
+ /** Convert raw RGB to grayscale Uint8Array */
34
+ declare function rgbToGrayscale(rgb: Buffer, width: number, height: number): Uint8Array;
35
+
36
+ interface YoloPostprocessOptions {
37
+ readonly confidence: number;
38
+ readonly iouThreshold: number;
39
+ readonly labels: readonly string[];
40
+ readonly scale: number;
41
+ readonly padX: number;
42
+ readonly padY: number;
43
+ readonly originalWidth: number;
44
+ readonly originalHeight: number;
45
+ }
46
+ /** Calculate IoU between two bounding boxes */
47
+ declare function iou(a: BoundingBox, b: BoundingBox): number;
48
+ /** Non-maximum suppression — returns indices of kept boxes (sorted by score desc) */
49
+ declare function nms(boxes: ReadonlyArray<{
50
+ readonly bbox: BoundingBox;
51
+ readonly score: number;
52
+ }>, iouThreshold: number): number[];
53
+ /** Full YOLO v8/v9 postprocessing: filter → NMS → scale back to original coords */
54
+ declare function yoloPostprocess(output: Float32Array, numClasses: number, numBoxes: number, options: YoloPostprocessOptions): SpatialDetection[];
55
+
56
+ interface ScrfdRawOutputs {
57
+ readonly [key: string]: Float32Array;
58
+ }
59
+ declare function scrfdPostprocess(outputs: ScrfdRawOutputs, confidence: number, inputSize: number, originalWidth: number, originalHeight: number): SpatialDetection[];
60
+
61
+ /** L2 normalize a vector in-place, returning a new Float32Array */
62
+ declare function l2Normalize(vec: Float32Array): Float32Array;
63
+ /** Cosine similarity between two embeddings (assumes they are already L2-normalized) */
64
+ declare function cosineSimilarity(a: Float32Array, b: Float32Array): number;
65
+
66
+ /** Decode CTC output to text.
67
+ *
68
+ * Output shape: [1, seqLen, numChars]
69
+ * Algorithm: argmax per timestep → collapse consecutive duplicates → remove blank (index 0) → join
70
+ */
71
+ declare function ctcDecode(output: Float32Array, seqLen: number, numChars: number, charset: readonly string[]): {
72
+ text: string;
73
+ confidence: number;
74
+ };
75
+
76
+ interface AudioClassification {
77
+ readonly className: string;
78
+ readonly score: number;
79
+ }
80
+ /** Average YAMNET scores across frames, return top classes above threshold */
81
+ declare function yamnetPostprocess(output: Float32Array, numFrames: number, numClasses: number, classNames: readonly string[], minScore: number): AudioClassification[];
82
+
83
+ declare class NodeInferenceEngine implements IInferenceEngine {
84
+ private readonly modelPath;
85
+ private readonly backend;
86
+ readonly runtime: DetectionRuntime;
87
+ readonly device: DetectionDevice;
88
+ private session;
89
+ constructor(modelPath: string, backend: string);
90
+ initialize(): Promise<void>;
91
+ run(input: Float32Array, inputShape: readonly number[]): Promise<Float32Array>;
92
+ runMultiOutput(input: Float32Array, inputShape: readonly number[]): Promise<Record<string, Float32Array>>;
93
+ dispose(): Promise<void>;
94
+ }
95
+
96
+ declare class PythonInferenceEngine implements IInferenceEngine {
97
+ private readonly pythonPath;
98
+ private readonly modelPath;
99
+ private readonly extraArgs;
100
+ readonly runtime: DetectionRuntime;
101
+ readonly device: DetectionDevice;
102
+ private process;
103
+ private receiveBuffer;
104
+ private pendingResolve;
105
+ private pendingReject;
106
+ constructor(pythonPath: string, runtime: DetectionRuntime, modelPath: string, extraArgs?: readonly string[]);
107
+ initialize(): Promise<void>;
108
+ private _tryReceive;
109
+ /** Send JPEG buffer, receive JSON detection results */
110
+ runJpeg(jpeg: Buffer): Promise<Record<string, unknown>>;
111
+ /** IInferenceEngine.run — wraps runJpeg for compatibility */
112
+ run(_input: Float32Array, _inputShape: readonly number[]): Promise<Float32Array>;
113
+ /** IInferenceEngine.runMultiOutput — not supported by Python engine (operates on JPEG input) */
114
+ runMultiOutput(_input: Float32Array, _inputShape: readonly number[]): Promise<Record<string, Float32Array>>;
115
+ dispose(): Promise<void>;
116
+ }
117
+
118
+ interface EngineResolverOptions {
119
+ readonly runtime: DetectionRuntime | 'auto';
120
+ readonly backend: string;
121
+ readonly modelEntry: ModelCatalogEntry;
122
+ readonly modelsDir: string;
123
+ readonly pythonPath?: string;
124
+ readonly downloadModel?: (url: string, destDir: string) => Promise<string>;
125
+ }
126
+ interface ResolvedEngine {
127
+ readonly engine: IInferenceEngine;
128
+ readonly format: ModelFormat;
129
+ readonly modelPath: string;
130
+ }
131
+ declare function resolveEngine(options: EngineResolverOptions): Promise<ResolvedEngine>;
132
+ /** Probe which ONNX execution providers are available on this system */
133
+ declare function probeOnnxBackends(): Promise<string[]>;
134
+
135
+ interface MotionRegion {
136
+ readonly bbox: BoundingBox;
137
+ readonly pixelCount: number;
138
+ readonly intensity: number;
139
+ }
140
+ /**
141
+ * Detect motion by frame differencing.
142
+ *
143
+ * @param current - Grayscale pixel array for the current frame (Uint8Array, length = width * height)
144
+ * @param previous - Grayscale pixel array for the previous frame
145
+ * @param width - Frame width in pixels
146
+ * @param height - Frame height in pixels
147
+ * @param threshold - Pixel diff threshold 0-255; differences below this are ignored
148
+ * @param minArea - Minimum number of changed pixels for a region to be reported
149
+ */
150
+ declare function detectMotion(current: Uint8Array, previous: Uint8Array, width: number, height: number, threshold: number, minArea: number): MotionRegion[];
151
+
152
+ export { type EngineResolverOptions, type MotionRegion, NodeInferenceEngine, PythonInferenceEngine, type ResolvedEngine, cosineSimilarity, cropRegion, ctcDecode, detectMotion, iou, jpegToRgb, l2Normalize, letterbox, nms, probeOnnxBackends, resizeAndNormalize, resolveEngine, rgbToGrayscale, scrfdPostprocess, yamnetPostprocess, yoloPostprocess };
@@ -0,0 +1,152 @@
1
+ import { BoundingBox, SpatialDetection, IInferenceEngine, DetectionRuntime, DetectionDevice, ModelCatalogEntry, ModelFormat } from '@camstack/types';
2
+ export { default as ObjectDetectionAddon } from './addons/object-detection/index.js';
3
+ export { default as MotionDetectionAddon } from './addons/motion-detection/index.js';
4
+ export { default as FaceDetectionAddon } from './addons/face-detection/index.js';
5
+ export { default as FaceRecognitionAddon } from './addons/face-recognition/index.js';
6
+ export { default as PlateDetectionAddon } from './addons/plate-detection/index.js';
7
+ export { default as PlateRecognitionAddon } from './addons/plate-recognition/index.js';
8
+ export { default as AudioClassificationAddon } from './addons/audio-classification/index.js';
9
+ export { default as CameraNativeDetectionAddon } from './addons/camera-native-detection/index.js';
10
+ export { default as BirdGlobalClassifierAddon } from './addons/bird-global-classifier/index.js';
11
+ export { default as BirdNABirdsClassifierAddon } from './addons/bird-nabirds-classifier/index.js';
12
+ export { default as AnimalClassifierAddon } from './addons/animal-classifier/index.js';
13
+
14
+ /** Decode JPEG to raw RGB pixels */
15
+ declare function jpegToRgb(jpeg: Buffer): Promise<{
16
+ data: Buffer;
17
+ width: number;
18
+ height: number;
19
+ }>;
20
+ /** Crop a region from a JPEG buffer */
21
+ declare function cropRegion(jpeg: Buffer, roi: BoundingBox): Promise<Buffer>;
22
+ /** Letterbox resize for YOLO: resize preserving aspect ratio, pad to square */
23
+ declare function letterbox(jpeg: Buffer, targetSize: number): Promise<{
24
+ data: Float32Array;
25
+ scale: number;
26
+ padX: number;
27
+ padY: number;
28
+ originalWidth: number;
29
+ originalHeight: number;
30
+ }>;
31
+ /** Resize and normalize to Float32Array */
32
+ declare function resizeAndNormalize(jpeg: Buffer, targetWidth: number, targetHeight: number, normalization: 'zero-one' | 'imagenet' | 'none', layout: 'nchw' | 'nhwc'): Promise<Float32Array>;
33
+ /** Convert raw RGB to grayscale Uint8Array */
34
+ declare function rgbToGrayscale(rgb: Buffer, width: number, height: number): Uint8Array;
35
+
36
+ interface YoloPostprocessOptions {
37
+ readonly confidence: number;
38
+ readonly iouThreshold: number;
39
+ readonly labels: readonly string[];
40
+ readonly scale: number;
41
+ readonly padX: number;
42
+ readonly padY: number;
43
+ readonly originalWidth: number;
44
+ readonly originalHeight: number;
45
+ }
46
+ /** Calculate IoU between two bounding boxes */
47
+ declare function iou(a: BoundingBox, b: BoundingBox): number;
48
+ /** Non-maximum suppression — returns indices of kept boxes (sorted by score desc) */
49
+ declare function nms(boxes: ReadonlyArray<{
50
+ readonly bbox: BoundingBox;
51
+ readonly score: number;
52
+ }>, iouThreshold: number): number[];
53
+ /** Full YOLO v8/v9 postprocessing: filter → NMS → scale back to original coords */
54
+ declare function yoloPostprocess(output: Float32Array, numClasses: number, numBoxes: number, options: YoloPostprocessOptions): SpatialDetection[];
55
+
56
+ interface ScrfdRawOutputs {
57
+ readonly [key: string]: Float32Array;
58
+ }
59
+ declare function scrfdPostprocess(outputs: ScrfdRawOutputs, confidence: number, inputSize: number, originalWidth: number, originalHeight: number): SpatialDetection[];
60
+
61
+ /** L2 normalize a vector in-place, returning a new Float32Array */
62
+ declare function l2Normalize(vec: Float32Array): Float32Array;
63
+ /** Cosine similarity between two embeddings (assumes they are already L2-normalized) */
64
+ declare function cosineSimilarity(a: Float32Array, b: Float32Array): number;
65
+
66
+ /** Decode CTC output to text.
67
+ *
68
+ * Output shape: [1, seqLen, numChars]
69
+ * Algorithm: argmax per timestep → collapse consecutive duplicates → remove blank (index 0) → join
70
+ */
71
+ declare function ctcDecode(output: Float32Array, seqLen: number, numChars: number, charset: readonly string[]): {
72
+ text: string;
73
+ confidence: number;
74
+ };
75
+
76
+ interface AudioClassification {
77
+ readonly className: string;
78
+ readonly score: number;
79
+ }
80
+ /** Average YAMNET scores across frames, return top classes above threshold */
81
+ declare function yamnetPostprocess(output: Float32Array, numFrames: number, numClasses: number, classNames: readonly string[], minScore: number): AudioClassification[];
82
+
83
+ declare class NodeInferenceEngine implements IInferenceEngine {
84
+ private readonly modelPath;
85
+ private readonly backend;
86
+ readonly runtime: DetectionRuntime;
87
+ readonly device: DetectionDevice;
88
+ private session;
89
+ constructor(modelPath: string, backend: string);
90
+ initialize(): Promise<void>;
91
+ run(input: Float32Array, inputShape: readonly number[]): Promise<Float32Array>;
92
+ runMultiOutput(input: Float32Array, inputShape: readonly number[]): Promise<Record<string, Float32Array>>;
93
+ dispose(): Promise<void>;
94
+ }
95
+
96
+ declare class PythonInferenceEngine implements IInferenceEngine {
97
+ private readonly pythonPath;
98
+ private readonly modelPath;
99
+ private readonly extraArgs;
100
+ readonly runtime: DetectionRuntime;
101
+ readonly device: DetectionDevice;
102
+ private process;
103
+ private receiveBuffer;
104
+ private pendingResolve;
105
+ private pendingReject;
106
+ constructor(pythonPath: string, runtime: DetectionRuntime, modelPath: string, extraArgs?: readonly string[]);
107
+ initialize(): Promise<void>;
108
+ private _tryReceive;
109
+ /** Send JPEG buffer, receive JSON detection results */
110
+ runJpeg(jpeg: Buffer): Promise<Record<string, unknown>>;
111
+ /** IInferenceEngine.run — wraps runJpeg for compatibility */
112
+ run(_input: Float32Array, _inputShape: readonly number[]): Promise<Float32Array>;
113
+ /** IInferenceEngine.runMultiOutput — not supported by Python engine (operates on JPEG input) */
114
+ runMultiOutput(_input: Float32Array, _inputShape: readonly number[]): Promise<Record<string, Float32Array>>;
115
+ dispose(): Promise<void>;
116
+ }
117
+
118
+ interface EngineResolverOptions {
119
+ readonly runtime: DetectionRuntime | 'auto';
120
+ readonly backend: string;
121
+ readonly modelEntry: ModelCatalogEntry;
122
+ readonly modelsDir: string;
123
+ readonly pythonPath?: string;
124
+ readonly downloadModel?: (url: string, destDir: string) => Promise<string>;
125
+ }
126
+ interface ResolvedEngine {
127
+ readonly engine: IInferenceEngine;
128
+ readonly format: ModelFormat;
129
+ readonly modelPath: string;
130
+ }
131
+ declare function resolveEngine(options: EngineResolverOptions): Promise<ResolvedEngine>;
132
+ /** Probe which ONNX execution providers are available on this system */
133
+ declare function probeOnnxBackends(): Promise<string[]>;
134
+
135
+ interface MotionRegion {
136
+ readonly bbox: BoundingBox;
137
+ readonly pixelCount: number;
138
+ readonly intensity: number;
139
+ }
140
+ /**
141
+ * Detect motion by frame differencing.
142
+ *
143
+ * @param current - Grayscale pixel array for the current frame (Uint8Array, length = width * height)
144
+ * @param previous - Grayscale pixel array for the previous frame
145
+ * @param width - Frame width in pixels
146
+ * @param height - Frame height in pixels
147
+ * @param threshold - Pixel diff threshold 0-255; differences below this are ignored
148
+ * @param minArea - Minimum number of changed pixels for a region to be reported
149
+ */
150
+ declare function detectMotion(current: Uint8Array, previous: Uint8Array, width: number, height: number, threshold: number, minArea: number): MotionRegion[];
151
+
152
+ export { type EngineResolverOptions, type MotionRegion, NodeInferenceEngine, PythonInferenceEngine, type ResolvedEngine, cosineSimilarity, cropRegion, ctcDecode, detectMotion, iou, jpegToRgb, l2Normalize, letterbox, nms, probeOnnxBackends, resizeAndNormalize, resolveEngine, rgbToGrayscale, scrfdPostprocess, yamnetPostprocess, yoloPostprocess };