@camstack/addon-vision 0.1.2 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. package/dist/addons/animal-classifier/index.js +999 -822
  2. package/dist/addons/animal-classifier/index.js.map +1 -1
  3. package/dist/addons/animal-classifier/index.mjs +242 -7
  4. package/dist/addons/animal-classifier/index.mjs.map +1 -1
  5. package/dist/addons/audio-classification/index.js +501 -378
  6. package/dist/addons/audio-classification/index.js.map +1 -1
  7. package/dist/addons/audio-classification/index.mjs +224 -4
  8. package/dist/addons/audio-classification/index.mjs.map +1 -1
  9. package/dist/addons/bird-global-classifier/index.js +1002 -825
  10. package/dist/addons/bird-global-classifier/index.js.map +1 -1
  11. package/dist/addons/bird-global-classifier/index.mjs +248 -7
  12. package/dist/addons/bird-global-classifier/index.mjs.map +1 -1
  13. package/dist/addons/bird-nabirds-classifier/index.js +1002 -825
  14. package/dist/addons/bird-nabirds-classifier/index.js.map +1 -1
  15. package/dist/addons/bird-nabirds-classifier/index.mjs +289 -7
  16. package/dist/addons/bird-nabirds-classifier/index.mjs.map +1 -1
  17. package/dist/addons/face-detection/index.js +1196 -934
  18. package/dist/addons/face-detection/index.js.map +1 -1
  19. package/dist/addons/face-detection/index.mjs +227 -7
  20. package/dist/addons/face-detection/index.mjs.map +1 -1
  21. package/dist/addons/face-recognition/index.js +1003 -807
  22. package/dist/addons/face-recognition/index.js.map +1 -1
  23. package/dist/addons/face-recognition/index.mjs +197 -6
  24. package/dist/addons/face-recognition/index.mjs.map +1 -1
  25. package/dist/addons/motion-detection/index.js +214 -111
  26. package/dist/addons/motion-detection/index.js.map +1 -1
  27. package/dist/addons/motion-detection/index.mjs +12 -9
  28. package/dist/addons/motion-detection/index.mjs.map +1 -1
  29. package/dist/addons/object-detection/index.js +1287 -1082
  30. package/dist/addons/object-detection/index.js.map +1 -1
  31. package/dist/addons/object-detection/index.mjs +373 -7
  32. package/dist/addons/object-detection/index.mjs.map +1 -1
  33. package/dist/addons/plate-detection/index.js +1075 -868
  34. package/dist/addons/plate-detection/index.js.map +1 -1
  35. package/dist/addons/plate-detection/index.mjs +230 -7
  36. package/dist/addons/plate-detection/index.mjs.map +1 -1
  37. package/dist/addons/plate-recognition/index.js +684 -505
  38. package/dist/addons/plate-recognition/index.js.map +1 -1
  39. package/dist/addons/plate-recognition/index.mjs +244 -5
  40. package/dist/addons/plate-recognition/index.mjs.map +1 -1
  41. package/dist/addons/segmentation-refiner/index.js +967 -790
  42. package/dist/addons/segmentation-refiner/index.js.map +1 -1
  43. package/dist/addons/segmentation-refiner/index.mjs +21 -17
  44. package/dist/addons/segmentation-refiner/index.mjs.map +1 -1
  45. package/dist/addons/vehicle-classifier/index.js +581 -410
  46. package/dist/addons/vehicle-classifier/index.js.map +1 -1
  47. package/dist/addons/vehicle-classifier/index.mjs +20 -16
  48. package/dist/addons/vehicle-classifier/index.mjs.map +1 -1
  49. package/dist/chunk-2YMA6QOV.mjs +193 -0
  50. package/dist/chunk-2YMA6QOV.mjs.map +1 -0
  51. package/dist/chunk-3IIFBJCD.mjs +45 -0
  52. package/dist/chunk-BS4DKYGN.mjs +48 -0
  53. package/dist/{chunk-7DYHXUPZ.mjs.map → chunk-BS4DKYGN.mjs.map} +1 -1
  54. package/dist/chunk-DE7I3VHO.mjs +106 -0
  55. package/dist/{chunk-KUO2BVFY.mjs.map → chunk-DE7I3VHO.mjs.map} +1 -1
  56. package/dist/chunk-F6D2OZ36.mjs +89 -0
  57. package/dist/chunk-F6D2OZ36.mjs.map +1 -0
  58. package/dist/chunk-GAOIFQDX.mjs +59 -0
  59. package/dist/chunk-GAOIFQDX.mjs.map +1 -0
  60. package/dist/chunk-HUIX2XVR.mjs +159 -0
  61. package/dist/chunk-HUIX2XVR.mjs.map +1 -0
  62. package/dist/chunk-K36R6HWY.mjs +51 -0
  63. package/dist/{chunk-XZ6ZMXXU.mjs.map → chunk-K36R6HWY.mjs.map} +1 -1
  64. package/dist/chunk-MBTAI3WE.mjs +78 -0
  65. package/dist/chunk-MBTAI3WE.mjs.map +1 -0
  66. package/dist/chunk-MGT6RUVX.mjs +423 -0
  67. package/dist/{chunk-BP7H4NFS.mjs.map → chunk-MGT6RUVX.mjs.map} +1 -1
  68. package/dist/chunk-PIFS7AIT.mjs +446 -0
  69. package/dist/{chunk-2IOKI4ES.mjs.map → chunk-PIFS7AIT.mjs.map} +1 -1
  70. package/dist/chunk-WG66JYYW.mjs +116 -0
  71. package/dist/{chunk-22BHCDT5.mjs.map → chunk-WG66JYYW.mjs.map} +1 -1
  72. package/dist/chunk-XD7WGXHZ.mjs +82 -0
  73. package/dist/{chunk-DUN6XU3N.mjs.map → chunk-XD7WGXHZ.mjs.map} +1 -1
  74. package/dist/chunk-YYDM6V2F.mjs +113 -0
  75. package/dist/{chunk-BR2FPGOX.mjs.map → chunk-YYDM6V2F.mjs.map} +1 -1
  76. package/dist/chunk-ZK7P3TZN.mjs +286 -0
  77. package/dist/chunk-ZK7P3TZN.mjs.map +1 -0
  78. package/dist/index.js +4443 -3924
  79. package/dist/index.js.map +1 -1
  80. package/dist/index.mjs +2698 -250
  81. package/dist/index.mjs.map +1 -1
  82. package/package.json +1 -1
  83. package/dist/chunk-22BHCDT5.mjs +0 -101
  84. package/dist/chunk-2IOKI4ES.mjs +0 -335
  85. package/dist/chunk-7DYHXUPZ.mjs +0 -36
  86. package/dist/chunk-BJTO5JO5.mjs +0 -11
  87. package/dist/chunk-BP7H4NFS.mjs +0 -412
  88. package/dist/chunk-BR2FPGOX.mjs +0 -98
  89. package/dist/chunk-D6WEHN33.mjs +0 -276
  90. package/dist/chunk-D6WEHN33.mjs.map +0 -1
  91. package/dist/chunk-DRYFGARD.mjs +0 -289
  92. package/dist/chunk-DRYFGARD.mjs.map +0 -1
  93. package/dist/chunk-DUN6XU3N.mjs +0 -72
  94. package/dist/chunk-ESLHNWWE.mjs +0 -387
  95. package/dist/chunk-ESLHNWWE.mjs.map +0 -1
  96. package/dist/chunk-JUQEW6ON.mjs +0 -256
  97. package/dist/chunk-JUQEW6ON.mjs.map +0 -1
  98. package/dist/chunk-KUO2BVFY.mjs +0 -90
  99. package/dist/chunk-R5J3WAUI.mjs +0 -645
  100. package/dist/chunk-R5J3WAUI.mjs.map +0 -1
  101. package/dist/chunk-XZ6ZMXXU.mjs +0 -39
  102. package/dist/chunk-YPU4WTXZ.mjs +0 -269
  103. package/dist/chunk-YPU4WTXZ.mjs.map +0 -1
  104. package/dist/chunk-YUCD2TFH.mjs +0 -242
  105. package/dist/chunk-YUCD2TFH.mjs.map +0 -1
  106. package/dist/chunk-ZTJENCFC.mjs +0 -379
  107. package/dist/chunk-ZTJENCFC.mjs.map +0 -1
  108. package/dist/chunk-ZWYXXCXP.mjs +0 -248
  109. package/dist/chunk-ZWYXXCXP.mjs.map +0 -1
  110. /package/dist/{chunk-BJTO5JO5.mjs.map → chunk-3IIFBJCD.mjs.map} +0 -0
@@ -1,242 +0,0 @@
1
- import {
2
- ANIMAL_TYPE_MODELS
3
- } from "./chunk-DUN6XU3N.mjs";
4
- import {
5
- cropRegion,
6
- resizeAndNormalize
7
- } from "./chunk-22BHCDT5.mjs";
8
- import {
9
- resolveEngine
10
- } from "./chunk-2IOKI4ES.mjs";
11
-
12
- // src/addons/animal-classifier/index.ts
13
- var ANIMAL_TYPE_LABEL = { id: "animal-type", name: "Animal Type" };
14
- var ANIMAL_TYPE_LABELS = [ANIMAL_TYPE_LABEL];
15
- var ANIMAL_CLASS_MAP = { mapping: {}, preserveOriginal: true };
16
- var ANIMAL_10_CLASSES = [
17
- "cat",
18
- "cow",
19
- "dog",
20
- "dolphin",
21
- "eagle",
22
- "giant panda",
23
- "horse",
24
- "monkey",
25
- "sheep",
26
- "spider"
27
- ];
28
- function softmax(logits) {
29
- const max = logits.reduce((a, b) => Math.max(a, b), -Infinity);
30
- const exps = logits.map((v) => Math.exp(v - max));
31
- const sum = exps.reduce((a, b) => a + b, 0);
32
- return exps.map((v) => v / sum);
33
- }
34
- var AnimalClassifierAddon = class {
35
- id = "animal-classifier";
36
- slot = "classifier";
37
- inputClasses = ["animal"];
38
- outputClasses = ["animal-type:*"];
39
- slotPriority = 0;
40
- requiredSteps = [];
41
- manifest = {
42
- id: "animal-classifier",
43
- name: "Animal Classifier",
44
- version: "0.1.0",
45
- description: "ViT-based animal type classifier \u2014 10 common species",
46
- slot: "classifier",
47
- labelOutputType: "classification",
48
- inputClasses: ["animal"],
49
- outputClasses: ["animal-type:*"],
50
- supportsCustomModels: false,
51
- mayRequirePython: false,
52
- defaultConfig: {
53
- modelId: "animals-10",
54
- runtime: "node",
55
- backend: "cpu",
56
- minConfidence: 0.3
57
- }
58
- };
59
- engine = null;
60
- modelEntry;
61
- minConfidence = 0.3;
62
- resolvedConfig = null;
63
- ctx = null;
64
- getModelRequirements() {
65
- return ANIMAL_TYPE_MODELS.map((m) => ({
66
- modelId: m.id,
67
- name: m.name,
68
- minRAM_MB: 800,
69
- accuracyScore: 75,
70
- formats: Object.keys(m.formats)
71
- }));
72
- }
73
- configure(config) {
74
- this.resolvedConfig = config;
75
- }
76
- async initialize(ctx) {
77
- this.ctx = ctx;
78
- const cfg = ctx.addonConfig;
79
- const modelId = cfg["modelId"] ?? this.resolvedConfig?.modelId ?? "animals-10";
80
- this.minConfidence = cfg["minConfidence"] ?? 0.3;
81
- const entry = ANIMAL_TYPE_MODELS.find((m) => m.id === modelId);
82
- if (!entry) {
83
- throw new Error(`AnimalClassifierAddon: unknown modelId "${modelId}"`);
84
- }
85
- this.modelEntry = entry;
86
- }
87
- async classify(input) {
88
- if (!this.engine) await this.ensureEngine();
89
- const start = Date.now();
90
- const { width: inputW, height: inputH } = this.modelEntry.inputSize;
91
- const animalCrop = await cropRegion(input.frame.data, input.roi);
92
- const normalized = await resizeAndNormalize(animalCrop, inputW, inputH, "imagenet", "nchw");
93
- const rawOutput = await this.engine.run(normalized, [1, 3, inputH, inputW]);
94
- const probs = softmax(rawOutput);
95
- let maxIdx = 0;
96
- let maxScore = probs[0] ?? 0;
97
- for (let i = 1; i < probs.length; i++) {
98
- const score = probs[i] ?? 0;
99
- if (score > maxScore) {
100
- maxScore = score;
101
- maxIdx = i;
102
- }
103
- }
104
- if (maxScore < this.minConfidence) {
105
- return {
106
- classifications: [],
107
- inferenceMs: Date.now() - start,
108
- modelId: this.modelEntry.id
109
- };
110
- }
111
- const label = ANIMAL_10_CLASSES[maxIdx] ?? `animal_${maxIdx}`;
112
- return {
113
- classifications: [
114
- {
115
- class: label,
116
- score: maxScore
117
- }
118
- ],
119
- inferenceMs: Date.now() - start,
120
- modelId: this.modelEntry.id
121
- };
122
- }
123
- async ensureEngine() {
124
- const config = this.resolvedConfig;
125
- const modelId = config?.modelId ?? this.modelEntry.id;
126
- const runtime = config?.runtime === "python" ? "coreml" : config?.runtime === "node" ? "onnx" : "auto";
127
- const backend = config?.backend ?? "cpu";
128
- const format = config?.format ?? "onnx";
129
- const entry = ANIMAL_TYPE_MODELS.find((m) => m.id === modelId) ?? this.modelEntry;
130
- this.modelEntry = entry;
131
- const modelsDir = this.ctx.models?.getModelsDir() ?? this.ctx.locationPaths.models;
132
- if (this.ctx.models) {
133
- await this.ctx.models.ensure(modelId, format);
134
- }
135
- const resolved = await resolveEngine({
136
- runtime,
137
- backend,
138
- modelEntry: entry,
139
- modelsDir,
140
- models: this.ctx.models
141
- });
142
- this.engine = resolved.engine;
143
- }
144
- async shutdown() {
145
- await this.engine?.dispose();
146
- }
147
- getConfigSchema() {
148
- return {
149
- sections: [
150
- {
151
- id: "model",
152
- title: "Model",
153
- columns: 1,
154
- fields: [
155
- {
156
- key: "modelId",
157
- label: "Model",
158
- type: "model-selector",
159
- catalog: [...ANIMAL_TYPE_MODELS],
160
- allowCustom: false,
161
- allowConversion: false,
162
- acceptFormats: ["onnx", "coreml", "openvino"],
163
- requiredMetadata: ["inputSize", "labels"],
164
- outputFormatHint: "classification"
165
- }
166
- ]
167
- },
168
- {
169
- id: "runtime",
170
- title: "Runtime",
171
- columns: 2,
172
- fields: [
173
- {
174
- key: "runtime",
175
- label: "Runtime",
176
- type: "select",
177
- options: [
178
- { value: "auto", label: "Auto" },
179
- { value: "onnx", label: "ONNX Runtime" },
180
- { value: "coreml", label: "CoreML (Apple)" },
181
- { value: "openvino", label: "OpenVINO (Intel)" }
182
- ]
183
- },
184
- {
185
- key: "backend",
186
- label: "Backend",
187
- type: "select",
188
- showWhen: { field: "runtime", equals: "onnx" },
189
- options: [
190
- { value: "auto", label: "Auto" },
191
- { value: "cpu", label: "CPU" },
192
- { value: "coreml", label: "CoreML" },
193
- { value: "cuda", label: "CUDA (NVIDIA)" }
194
- ]
195
- }
196
- ]
197
- },
198
- {
199
- id: "thresholds",
200
- title: "Classification Settings",
201
- columns: 1,
202
- fields: [
203
- {
204
- key: "minConfidence",
205
- label: "Minimum Confidence",
206
- type: "slider",
207
- min: 0.05,
208
- max: 1,
209
- step: 0.05,
210
- default: 0.3
211
- }
212
- ]
213
- }
214
- ]
215
- };
216
- }
217
- getClassMap() {
218
- return ANIMAL_CLASS_MAP;
219
- }
220
- getModelCatalog() {
221
- return [...ANIMAL_TYPE_MODELS];
222
- }
223
- getAvailableModels() {
224
- return [];
225
- }
226
- getActiveLabels() {
227
- return ANIMAL_TYPE_LABELS;
228
- }
229
- async probe() {
230
- return {
231
- available: true,
232
- runtime: this.engine?.runtime ?? "onnx",
233
- device: this.engine?.device ?? "cpu",
234
- capabilities: ["fp32"]
235
- };
236
- }
237
- };
238
-
239
- export {
240
- AnimalClassifierAddon
241
- };
242
- //# sourceMappingURL=chunk-YUCD2TFH.mjs.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/addons/animal-classifier/index.ts"],"sourcesContent":["import type {\n IClassifierProvider,\n IDetectionAddon,\n AddonManifest,\n AddonContext,\n CropInput,\n ClassifierOutput,\n ConfigUISchema,\n ClassMapDefinition,\n ProbeResult,\n ModelCatalogEntry,\n DetectionModel,\n LabelDefinition,\n IInferenceEngine,\n ModelRequirement,\n ResolvedInferenceConfig,\n} from '@camstack/types'\nimport { ANIMAL_TYPE_MODELS } from '../../catalogs/animal-classification-models.js'\nimport { cropRegion, resizeAndNormalize } from '../../shared/image-utils.js'\nimport { resolveEngine } from '../../shared/engine-resolver.js'\n\nconst ANIMAL_TYPE_LABEL: LabelDefinition = { id: 'animal-type', name: 'Animal Type' }\nconst ANIMAL_TYPE_LABELS: readonly LabelDefinition[] = [ANIMAL_TYPE_LABEL]\nconst ANIMAL_CLASS_MAP: ClassMapDefinition = { mapping: {}, preserveOriginal: true }\n\nconst ANIMAL_10_CLASSES = [\n 'cat',\n 'cow',\n 'dog',\n 'dolphin',\n 'eagle',\n 'giant panda',\n 'horse',\n 'monkey',\n 'sheep',\n 'spider',\n] as const\n\nfunction softmax(logits: Float32Array): Float32Array {\n const max = logits.reduce((a, b) => Math.max(a, b), -Infinity)\n const exps = logits.map((v) => Math.exp(v - max))\n const sum = exps.reduce((a, b) => a + b, 0)\n return exps.map((v) => v / sum) as unknown as Float32Array\n}\n\nexport default class AnimalClassifierAddon implements IClassifierProvider, IDetectionAddon {\n readonly id = 'animal-classifier'\n readonly slot = 'classifier' as const\n readonly inputClasses = ['animal'] as const\n readonly outputClasses = ['animal-type:*'] as const\n readonly slotPriority = 0\n readonly requiredSteps = [] as const\n readonly manifest: AddonManifest = {\n id: 'animal-classifier',\n name: 'Animal Classifier',\n version: '0.1.0',\n\n description: 'ViT-based animal type classifier — 10 common species',\n\n slot: 'classifier',\n labelOutputType: 'classification',\n inputClasses: ['animal'],\n outputClasses: ['animal-type:*'],\n supportsCustomModels: false,\n mayRequirePython: false,\n defaultConfig: {\n modelId: 'animals-10',\n runtime: 'node',\n backend: 'cpu',\n minConfidence: 0.3,\n },\n }\n\n private engine: IInferenceEngine | null = null\n private modelEntry!: ModelCatalogEntry\n private minConfidence = 0.3\n private resolvedConfig: ResolvedInferenceConfig | null = null\n private ctx: AddonContext | null = null\n\n getModelRequirements(): ModelRequirement[] {\n return ANIMAL_TYPE_MODELS.map((m) => ({\n modelId: m.id,\n name: m.name,\n minRAM_MB: 800,\n accuracyScore: 75,\n formats: Object.keys(m.formats) as readonly string[],\n }))\n }\n\n configure(config: ResolvedInferenceConfig): void {\n this.resolvedConfig = config\n }\n\n async initialize(ctx: AddonContext): Promise<void> {\n this.ctx = ctx\n const cfg = ctx.addonConfig\n const modelId = (cfg['modelId'] as string | undefined) ?? this.resolvedConfig?.modelId ?? 'animals-10'\n this.minConfidence = (cfg['minConfidence'] as number | undefined) ?? 0.3\n\n const entry = ANIMAL_TYPE_MODELS.find((m) => m.id === modelId)\n if (!entry) {\n throw new Error(`AnimalClassifierAddon: unknown modelId \"${modelId}\"`)\n }\n this.modelEntry = entry\n }\n\n async classify(input: CropInput): Promise<ClassifierOutput> {\n if (!this.engine) await this.ensureEngine()\n const start = Date.now()\n const { width: inputW, height: inputH } = this.modelEntry.inputSize\n\n // Crop the animal region\n const animalCrop = await cropRegion(input.frame.data, input.roi)\n\n // Resize to 224x224, ImageNet normalization, NCHW\n const normalized = await resizeAndNormalize(animalCrop, inputW, inputH, 'imagenet', 'nchw')\n\n // Run inference — output shape: [1, 10]\n const rawOutput = await this.engine!.run(normalized, [1, 3, inputH, inputW])\n\n // Softmax to get probabilities\n const probs = softmax(rawOutput)\n\n // Find argmax\n let maxIdx = 0\n let maxScore = probs[0] ?? 0\n for (let i = 1; i < probs.length; i++) {\n const score = probs[i] ?? 0\n if (score > maxScore) {\n maxScore = score\n maxIdx = i\n }\n }\n\n if (maxScore < this.minConfidence) {\n return {\n classifications: [],\n inferenceMs: Date.now() - start,\n modelId: this.modelEntry.id,\n }\n }\n\n const label = ANIMAL_10_CLASSES[maxIdx] ?? `animal_${maxIdx}`\n\n return {\n classifications: [\n {\n class: label,\n score: maxScore,\n },\n ],\n inferenceMs: Date.now() - start,\n modelId: this.modelEntry.id,\n }\n }\n\n private async ensureEngine(): Promise<void> {\n const config = this.resolvedConfig\n const modelId = config?.modelId ?? this.modelEntry.id\n const runtime = config?.runtime === 'python' ? 'coreml' : (config?.runtime === 'node' ? 'onnx' : 'auto')\n const backend = config?.backend ?? 'cpu'\n const format = config?.format ?? 'onnx'\n\n const entry = ANIMAL_TYPE_MODELS.find((m) => m.id === modelId) ?? this.modelEntry\n this.modelEntry = entry\n\n const modelsDir = this.ctx!.models?.getModelsDir() ?? this.ctx!.locationPaths.models\n\n if (this.ctx!.models) {\n await this.ctx!.models.ensure(modelId, format as any)\n }\n\n const resolved = await resolveEngine({\n runtime: runtime as 'auto',\n backend,\n modelEntry: entry,\n modelsDir,\n models: this.ctx!.models,\n })\n this.engine = resolved.engine\n }\n\n async shutdown(): Promise<void> {\n await this.engine?.dispose()\n }\n\n getConfigSchema(): ConfigUISchema {\n return {\n sections: [\n {\n id: 'model',\n title: 'Model',\n columns: 1,\n fields: [\n {\n key: 'modelId',\n label: 'Model',\n type: 'model-selector',\n catalog: [...ANIMAL_TYPE_MODELS],\n allowCustom: false,\n allowConversion: false,\n acceptFormats: ['onnx', 'coreml', 'openvino'],\n requiredMetadata: ['inputSize', 'labels'],\n outputFormatHint: 'classification',\n },\n ],\n },\n {\n id: 'runtime',\n title: 'Runtime',\n columns: 2,\n fields: [\n {\n key: 'runtime',\n label: 'Runtime',\n type: 'select',\n options: [\n { value: 'auto', label: 'Auto' },\n { value: 'onnx', label: 'ONNX Runtime' },\n { value: 'coreml', label: 'CoreML (Apple)' },\n { value: 'openvino', label: 'OpenVINO (Intel)' },\n ],\n },\n {\n key: 'backend',\n label: 'Backend',\n type: 'select',\n showWhen: { field: 'runtime', equals: 'onnx' },\n options: [\n { value: 'auto', label: 'Auto' },\n { value: 'cpu', label: 'CPU' },\n { value: 'coreml', label: 'CoreML' },\n { value: 'cuda', label: 'CUDA (NVIDIA)' },\n ],\n },\n ],\n },\n {\n id: 'thresholds',\n title: 'Classification Settings',\n columns: 1,\n fields: [\n {\n key: 'minConfidence',\n label: 'Minimum Confidence',\n type: 'slider',\n min: 0.05,\n max: 1.0,\n step: 0.05,\n default: 0.3,\n },\n ],\n },\n ],\n }\n }\n\n getClassMap(): ClassMapDefinition {\n return ANIMAL_CLASS_MAP\n }\n\n getModelCatalog(): ModelCatalogEntry[] {\n return [...ANIMAL_TYPE_MODELS]\n }\n\n getAvailableModels(): DetectionModel[] {\n return []\n }\n\n getActiveLabels(): readonly LabelDefinition[] {\n return ANIMAL_TYPE_LABELS\n }\n\n async probe(): Promise<ProbeResult> {\n return {\n available: true,\n runtime: this.engine?.runtime ?? 'onnx',\n device: this.engine?.device ?? 'cpu',\n capabilities: ['fp32'],\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;AAqBA,IAAM,oBAAqC,EAAE,IAAI,eAAe,MAAM,cAAc;AACpF,IAAM,qBAAiD,CAAC,iBAAiB;AACzE,IAAM,mBAAuC,EAAE,SAAS,CAAC,GAAG,kBAAkB,KAAK;AAEnF,IAAM,oBAAoB;AAAA,EACxB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,SAAS,QAAQ,QAAoC;AACnD,QAAM,MAAM,OAAO,OAAO,CAAC,GAAG,MAAM,KAAK,IAAI,GAAG,CAAC,GAAG,SAAS;AAC7D,QAAM,OAAO,OAAO,IAAI,CAAC,MAAM,KAAK,IAAI,IAAI,GAAG,CAAC;AAChD,QAAM,MAAM,KAAK,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AAC1C,SAAO,KAAK,IAAI,CAAC,MAAM,IAAI,GAAG;AAChC;AAEA,IAAqB,wBAArB,MAA2F;AAAA,EAChF,KAAK;AAAA,EACL,OAAO;AAAA,EACP,eAAe,CAAC,QAAQ;AAAA,EACxB,gBAAgB,CAAC,eAAe;AAAA,EAChC,eAAe;AAAA,EACf,gBAAgB,CAAC;AAAA,EACjB,WAA0B;AAAA,IACjC,IAAI;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IAET,aAAa;AAAA,IAEb,MAAM;AAAA,IACN,iBAAiB;AAAA,IACjB,cAAc,CAAC,QAAQ;AAAA,IACvB,eAAe,CAAC,eAAe;AAAA,IAC/B,sBAAsB;AAAA,IACtB,kBAAkB;AAAA,IAClB,eAAe;AAAA,MACb,SAAS;AAAA,MACT,SAAS;AAAA,MACT,SAAS;AAAA,MACT,eAAe;AAAA,IACjB;AAAA,EACF;AAAA,EAEQ,SAAkC;AAAA,EAClC;AAAA,EACA,gBAAgB;AAAA,EAChB,iBAAiD;AAAA,EACjD,MAA2B;AAAA,EAEnC,uBAA2C;AACzC,WAAO,mBAAmB,IAAI,CAAC,OAAO;AAAA,MACpC,SAAS,EAAE;AAAA,MACX,MAAM,EAAE;AAAA,MACR,WAAW;AAAA,MACX,eAAe;AAAA,MACf,SAAS,OAAO,KAAK,EAAE,OAAO;AAAA,IAChC,EAAE;AAAA,EACJ;AAAA,EAEA,UAAU,QAAuC;AAC/C,SAAK,iBAAiB;AAAA,EACxB;AAAA,EAEA,MAAM,WAAW,KAAkC;AACjD,SAAK,MAAM;AACX,UAAM,MAAM,IAAI;AAChB,UAAM,UAAW,IAAI,SAAS,KAA4B,KAAK,gBAAgB,WAAW;AAC1F,SAAK,gBAAiB,IAAI,eAAe,KAA4B;AAErE,UAAM,QAAQ,mBAAmB,KAAK,CAAC,MAAM,EAAE,OAAO,OAAO;AAC7D,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,MAAM,2CAA2C,OAAO,GAAG;AAAA,IACvE;AACA,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,MAAM,SAAS,OAA6C;AAC1D,QAAI,CAAC,KAAK,OAAQ,OAAM,KAAK,aAAa;AAC1C,UAAM,QAAQ,KAAK,IAAI;AACvB,UAAM,EAAE,OAAO,QAAQ,QAAQ,OAAO,IAAI,KAAK,WAAW;AAG1D,UAAM,aAAa,MAAM,WAAW,MAAM,MAAM,MAAM,MAAM,GAAG;AAG/D,UAAM,aAAa,MAAM,mBAAmB,YAAY,QAAQ,QAAQ,YAAY,MAAM;AAG1F,UAAM,YAAY,MAAM,KAAK,OAAQ,IAAI,YAAY,CAAC,GAAG,GAAG,QAAQ,MAAM,CAAC;AAG3E,UAAM,QAAQ,QAAQ,SAAS;AAG/B,QAAI,SAAS;AACb,QAAI,WAAW,MAAM,CAAC,KAAK;AAC3B,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAM,QAAQ,MAAM,CAAC,KAAK;AAC1B,UAAI,QAAQ,UAAU;AACpB,mBAAW;AACX,iBAAS;AAAA,MACX;AAAA,IACF;AAEA,QAAI,WAAW,KAAK,eAAe;AACjC,aAAO;AAAA,QACL,iBAAiB,CAAC;AAAA,QAClB,aAAa,KAAK,IAAI,IAAI;AAAA,QAC1B,SAAS,KAAK,WAAW;AAAA,MAC3B;AAAA,IACF;AAEA,UAAM,QAAQ,kBAAkB,MAAM,KAAK,UAAU,MAAM;AAE3D,WAAO;AAAA,MACL,iBAAiB;AAAA,QACf;AAAA,UACE,OAAO;AAAA,UACP,OAAO;AAAA,QACT;AAAA,MACF;AAAA,MACA,aAAa,KAAK,IAAI,IAAI;AAAA,MAC1B,SAAS,KAAK,WAAW;AAAA,IAC3B;AAAA,EACF;AAAA,EAEA,MAAc,eAA8B;AAC1C,UAAM,SAAS,KAAK;AACpB,UAAM,UAAU,QAAQ,WAAW,KAAK,WAAW;AACnD,UAAM,UAAU,QAAQ,YAAY,WAAW,WAAY,QAAQ,YAAY,SAAS,SAAS;AACjG,UAAM,UAAU,QAAQ,WAAW;AACnC,UAAM,SAAS,QAAQ,UAAU;AAEjC,UAAM,QAAQ,mBAAmB,KAAK,CAAC,MAAM,EAAE,OAAO,OAAO,KAAK,KAAK;AACvE,SAAK,aAAa;AAElB,UAAM,YAAY,KAAK,IAAK,QAAQ,aAAa,KAAK,KAAK,IAAK,cAAc;AAE9E,QAAI,KAAK,IAAK,QAAQ;AACpB,YAAM,KAAK,IAAK,OAAO,OAAO,SAAS,MAAa;AAAA,IACtD;AAEA,UAAM,WAAW,MAAM,cAAc;AAAA,MACnC;AAAA,MACA;AAAA,MACA,YAAY;AAAA,MACZ;AAAA,MACA,QAAQ,KAAK,IAAK;AAAA,IACpB,CAAC;AACD,SAAK,SAAS,SAAS;AAAA,EACzB;AAAA,EAEA,MAAM,WAA0B;AAC9B,UAAM,KAAK,QAAQ,QAAQ;AAAA,EAC7B;AAAA,EAEA,kBAAkC;AAChC,WAAO;AAAA,MACL,UAAU;AAAA,QACR;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,SAAS,CAAC,GAAG,kBAAkB;AAAA,cAC/B,aAAa;AAAA,cACb,iBAAiB;AAAA,cACjB,eAAe,CAAC,QAAQ,UAAU,UAAU;AAAA,cAC5C,kBAAkB,CAAC,aAAa,QAAQ;AAAA,cACxC,kBAAkB;AAAA,YACpB;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,SAAS;AAAA,gBACP,EAAE,OAAO,QAAQ,OAAO,OAAO;AAAA,gBAC/B,EAAE,OAAO,QAAQ,OAAO,eAAe;AAAA,gBACvC,EAAE,OAAO,UAAU,OAAO,iBAAiB;AAAA,gBAC3C,EAAE,OAAO,YAAY,OAAO,mBAAmB;AAAA,cACjD;AAAA,YACF;AAAA,YACA;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,UAAU,EAAE,OAAO,WAAW,QAAQ,OAAO;AAAA,cAC7C,SAAS;AAAA,gBACP,EAAE,OAAO,QAAQ,OAAO,OAAO;AAAA,gBAC/B,EAAE,OAAO,OAAO,OAAO,MAAM;AAAA,gBAC7B,EAAE,OAAO,UAAU,OAAO,SAAS;AAAA,gBACnC,EAAE,OAAO,QAAQ,OAAO,gBAAgB;AAAA,cAC1C;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,QACA;AAAA,UACE,IAAI;AAAA,UACJ,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,OAAO;AAAA,cACP,MAAM;AAAA,cACN,KAAK;AAAA,cACL,KAAK;AAAA,cACL,MAAM;AAAA,cACN,SAAS;AAAA,YACX;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,cAAkC;AAChC,WAAO;AAAA,EACT;AAAA,EAEA,kBAAuC;AACrC,WAAO,CAAC,GAAG,kBAAkB;AAAA,EAC/B;AAAA,EAEA,qBAAuC;AACrC,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,kBAA8C;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,QAA8B;AAClC,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS,KAAK,QAAQ,WAAW;AAAA,MACjC,QAAQ,KAAK,QAAQ,UAAU;AAAA,MAC/B,cAAc,CAAC,MAAM;AAAA,IACvB;AAAA,EACF;AACF;","names":[]}
@@ -1,379 +0,0 @@
1
- import {
2
- cropRegion,
3
- resizeAndNormalize
4
- } from "./chunk-22BHCDT5.mjs";
5
- import {
6
- resolveEngine
7
- } from "./chunk-2IOKI4ES.mjs";
8
- import {
9
- __require
10
- } from "./chunk-BJTO5JO5.mjs";
11
-
12
- // src/catalogs/plate-recognition-models.ts
13
- import { hfModelUrl } from "@camstack/types";
14
- var HF_REPO = "camstack/camstack-models";
15
- var PLATE_TEXT_LABELS = [
16
- { id: "text", name: "Plate Text" }
17
- ];
18
- var PLATE_RECOGNITION_MODELS = [
19
- // ── PaddleOCR PP-OCRv5 ────────────────────────────────────────
20
- {
21
- id: "paddleocr-latin",
22
- name: "PaddleOCR Latin",
23
- description: "PaddleOCR PP-OCRv5 recognition model for Latin-script license plates",
24
- inputSize: { width: 320, height: 48 },
25
- labels: PLATE_TEXT_LABELS,
26
- formats: {
27
- // ONNX only — PaddleOCR has dynamic dimensions incompatible with CoreML native conversion.
28
- // On Apple Silicon, ONNX Runtime uses CoreML EP automatically for acceleration.
29
- onnx: {
30
- url: hfModelUrl(HF_REPO, "plateRecognition/paddleocr/onnx/camstack-paddleocr-latin-rec.onnx"),
31
- sizeMB: 7.5
32
- },
33
- openvino: {
34
- url: hfModelUrl(HF_REPO, "plateRecognition/paddleocr/openvino/camstack-paddleocr-latin.xml"),
35
- sizeMB: 4,
36
- runtimes: ["python"]
37
- }
38
- },
39
- extraFiles: [
40
- {
41
- url: hfModelUrl(HF_REPO, "plateRecognition/paddleocr/onnx/camstack-paddleocr-latin-dict.txt"),
42
- filename: "camstack-paddleocr-latin-dict.txt",
43
- sizeMB: 0.01
44
- }
45
- ]
46
- },
47
- {
48
- id: "paddleocr-en",
49
- name: "PaddleOCR English",
50
- description: "PaddleOCR PP-OCRv5 recognition model optimized for English license plates",
51
- inputSize: { width: 320, height: 48 },
52
- labels: PLATE_TEXT_LABELS,
53
- formats: {
54
- onnx: {
55
- url: hfModelUrl(HF_REPO, "plateRecognition/paddleocr/onnx/camstack-paddleocr-en-rec.onnx"),
56
- sizeMB: 7.5
57
- },
58
- openvino: {
59
- url: hfModelUrl(HF_REPO, "plateRecognition/paddleocr/openvino/camstack-paddleocr-en.xml"),
60
- sizeMB: 4,
61
- runtimes: ["python"]
62
- }
63
- },
64
- extraFiles: [
65
- {
66
- url: hfModelUrl(HF_REPO, "plateRecognition/paddleocr/onnx/camstack-paddleocr-en-dict.txt"),
67
- filename: "camstack-paddleocr-en-dict.txt",
68
- sizeMB: 0.01
69
- }
70
- ]
71
- },
72
- // ── CRNN-MobileNetV3 (via OnnxTR/docTR) ─────────────────────────
73
- // Simple CNN+LSTM+CTC architecture — good CoreML compatibility (no dynamic ops)
74
- {
75
- id: "crnn-mobilenet-v3-small",
76
- name: "CRNN MobileNet V3 Small",
77
- description: "CRNN MobileNetV3-Small \u2014 lightweight text recognition, CoreML compatible via OnnxTR",
78
- inputSize: { width: 128, height: 32 },
79
- labels: PLATE_TEXT_LABELS,
80
- formats: {
81
- onnx: {
82
- url: hfModelUrl(HF_REPO, "plateRecognition/crnn-mobilenet/onnx/camstack-crnn-mobilenet-v3-small.onnx"),
83
- sizeMB: 8
84
- }
85
- },
86
- extraFiles: [
87
- {
88
- url: hfModelUrl(HF_REPO, "plateRecognition/crnn-mobilenet/camstack-crnn-mobilenet-charset.txt"),
89
- filename: "camstack-crnn-mobilenet-charset.txt",
90
- sizeMB: 0.01
91
- }
92
- ]
93
- },
94
- {
95
- id: "crnn-mobilenet-v3-large",
96
- name: "CRNN MobileNet V3 Large",
97
- description: "CRNN MobileNetV3-Large \u2014 higher accuracy text recognition, CoreML compatible",
98
- inputSize: { width: 128, height: 32 },
99
- labels: PLATE_TEXT_LABELS,
100
- formats: {
101
- onnx: {
102
- url: hfModelUrl(HF_REPO, "plateRecognition/crnn-mobilenet/onnx/camstack-crnn-mobilenet-v3-large.onnx"),
103
- sizeMB: 17
104
- }
105
- },
106
- extraFiles: [
107
- {
108
- url: hfModelUrl(HF_REPO, "plateRecognition/crnn-mobilenet/camstack-crnn-mobilenet-charset.txt"),
109
- filename: "camstack-crnn-mobilenet-charset.txt",
110
- sizeMB: 0.01
111
- }
112
- ]
113
- }
114
- ];
115
-
116
- // src/shared/postprocess/paddleocr.ts
117
- function ctcDecode(output, seqLen, numChars, charset) {
118
- let totalLogScore = 0;
119
- const rawIndices = [];
120
- for (let t = 0; t < seqLen; t++) {
121
- const offset = t * numChars;
122
- let bestIdx = 0;
123
- let bestVal = output[offset];
124
- for (let c = 1; c < numChars; c++) {
125
- const val = output[offset + c];
126
- if (val > bestVal) {
127
- bestVal = val;
128
- bestIdx = c;
129
- }
130
- }
131
- rawIndices.push(bestIdx);
132
- totalLogScore += bestVal;
133
- }
134
- const collapsed = [];
135
- for (let i = 0; i < rawIndices.length; i++) {
136
- const cur = rawIndices[i];
137
- if (i === 0 || cur !== rawIndices[i - 1]) {
138
- collapsed.push(cur);
139
- }
140
- }
141
- const filtered = collapsed.filter((idx) => idx !== 0);
142
- const text = filtered.map((idx) => charset[idx] ?? "").join("");
143
- const confidence = seqLen > 0 ? totalLogScore / seqLen : 0;
144
- return { text, confidence };
145
- }
146
-
147
- // src/addons/plate-recognition/index.ts
148
- import * as fs from "fs";
149
- import * as path from "path";
150
- var PLATE_TEXT_LABEL = { id: "plate-text", name: "Plate Text" };
151
- var PLATE_TEXT_LABELS2 = [PLATE_TEXT_LABEL];
152
- var PLATE_REC_CLASS_MAP = { mapping: {}, preserveOriginal: true };
153
- function loadCharset(modelsDir, modelId) {
154
- const dictNames = [
155
- `camstack-${modelId}-dict.txt`,
156
- `camstack-paddleocr-latin-dict.txt`,
157
- `camstack-paddleocr-en-dict.txt`,
158
- `camstack-crnn-mobilenet-charset.txt`
159
- ];
160
- for (const name of dictNames) {
161
- const dictPath = path.join(modelsDir, name);
162
- if (fs.existsSync(dictPath)) {
163
- const lines = fs.readFileSync(dictPath, "utf-8").split("\n").filter((l) => l.length > 0);
164
- return ["", ...lines, " "];
165
- }
166
- }
167
- throw new Error(`PlateRecognitionAddon: dict.txt not found in ${modelsDir}`);
168
- }
169
- var REQUIRED_STEPS = [
170
- { slot: "cropper", outputClasses: ["plate"], description: "Requires a plate detector" }
171
- ];
172
- var PlateRecognitionAddon = class {
173
- id = "plate-recognition";
174
- slot = "classifier";
175
- inputClasses = ["plate"];
176
- outputClasses = ["plate-text:*"];
177
- slotPriority = 0;
178
- requiredSteps = REQUIRED_STEPS;
179
- manifest = {
180
- id: "plate-recognition",
181
- name: "License Plate Recognition (OCR)",
182
- version: "0.1.0",
183
- description: "PaddleOCR-based license plate text recognition",
184
- slot: "classifier",
185
- labelOutputType: "plate",
186
- inputClasses: ["plate"],
187
- outputClasses: ["plate-text:*"],
188
- requiredSteps: REQUIRED_STEPS,
189
- supportsCustomModels: false,
190
- mayRequirePython: false,
191
- defaultConfig: {
192
- modelId: "paddleocr-latin",
193
- runtime: "node",
194
- backend: "cpu",
195
- minConfidence: 0.5
196
- }
197
- };
198
- engine = null;
199
- modelEntry;
200
- minConfidence = 0.5;
201
- charset = [];
202
- resolvedConfig = null;
203
- ctx = null;
204
- getModelRequirements() {
205
- const scores = {
206
- "paddleocr-latin": { ram: 100, accuracy: 80 },
207
- "paddleocr-en": { ram: 100, accuracy: 80 }
208
- };
209
- return PLATE_RECOGNITION_MODELS.map((m) => ({
210
- modelId: m.id,
211
- name: m.name,
212
- minRAM_MB: scores[m.id]?.ram ?? 100,
213
- accuracyScore: scores[m.id]?.accuracy ?? 75,
214
- formats: Object.keys(m.formats)
215
- }));
216
- }
217
- configure(config) {
218
- this.resolvedConfig = config;
219
- }
220
- async initialize(ctx) {
221
- this.ctx = ctx;
222
- const cfg = ctx.addonConfig;
223
- const modelId = cfg["modelId"] ?? this.resolvedConfig?.modelId ?? "paddleocr-latin";
224
- this.minConfidence = cfg["minConfidence"] ?? 0.5;
225
- const entry = PLATE_RECOGNITION_MODELS.find((m) => m.id === modelId);
226
- if (!entry) {
227
- throw new Error(`PlateRecognitionAddon: unknown modelId "${modelId}"`);
228
- }
229
- this.modelEntry = entry;
230
- }
231
- async classify(input) {
232
- if (!this.engine) await this.ensureEngine();
233
- const start = Date.now();
234
- const { width: inputW, height: inputH } = this.modelEntry.inputSize;
235
- console.log(`[plate-recognition] ROI: x=${input.roi?.x}, y=${input.roi?.y}, w=${input.roi?.w}, h=${input.roi?.h}, frameSize=${input.frame?.data?.length}`);
236
- const plateCrop = await cropRegion(input.frame.data, input.roi);
237
- console.log(`[plate-recognition] Crop size: ${plateCrop.length} bytes`);
238
- try {
239
- __require("fs").writeFileSync("/tmp/plate-recognition-crop.jpg", plateCrop);
240
- } catch {
241
- }
242
- const normalized = await resizeAndNormalize(plateCrop, inputW, inputH, "zero-one", "nchw");
243
- const output = await this.engine.run(normalized, [1, 3, inputH, inputW]);
244
- const numChars = this.charset.length;
245
- const seqLen = output.length / numChars;
246
- const { text, confidence } = ctcDecode(output, seqLen, numChars, this.charset);
247
- return {
248
- classifications: [
249
- {
250
- class: "plate-text",
251
- score: confidence,
252
- text: text.trim() || "(unreadable)"
253
- }
254
- ],
255
- inferenceMs: Date.now() - start,
256
- modelId: this.modelEntry.id
257
- };
258
- }
259
- async ensureEngine() {
260
- const config = this.resolvedConfig;
261
- const modelId = config?.modelId ?? this.modelEntry.id;
262
- const runtime = config?.runtime === "python" ? "coreml" : config?.runtime === "node" ? "onnx" : "auto";
263
- const backend = config?.backend ?? "cpu";
264
- const format = config?.format ?? "onnx";
265
- const entry = PLATE_RECOGNITION_MODELS.find((m) => m.id === modelId) ?? this.modelEntry;
266
- this.modelEntry = entry;
267
- const modelsDir = this.ctx.models?.getModelsDir() ?? this.ctx.locationPaths.models;
268
- if (this.ctx.models) {
269
- await this.ctx.models.ensure(modelId, format);
270
- }
271
- this.charset = loadCharset(modelsDir, modelId);
272
- const resolved = await resolveEngine({
273
- runtime,
274
- backend,
275
- modelEntry: entry,
276
- modelsDir,
277
- models: this.ctx.models
278
- });
279
- this.engine = resolved.engine;
280
- }
281
- async shutdown() {
282
- await this.engine?.dispose();
283
- }
284
- getConfigSchema() {
285
- return {
286
- sections: [
287
- {
288
- id: "model",
289
- title: "Model",
290
- columns: 1,
291
- fields: [
292
- {
293
- key: "modelId",
294
- label: "Model",
295
- type: "model-selector",
296
- catalog: [...PLATE_RECOGNITION_MODELS],
297
- allowCustom: false,
298
- allowConversion: false,
299
- acceptFormats: ["onnx", "openvino"],
300
- requiredMetadata: ["inputSize", "labels", "outputFormat"],
301
- outputFormatHint: "ocr"
302
- }
303
- ]
304
- },
305
- {
306
- id: "runtime",
307
- title: "Runtime",
308
- columns: 2,
309
- fields: [
310
- {
311
- key: "runtime",
312
- label: "Runtime",
313
- type: "select",
314
- options: [
315
- { value: "auto", label: "Auto" },
316
- { value: "onnx", label: "ONNX Runtime" },
317
- { value: "openvino", label: "OpenVINO (Intel)" }
318
- ]
319
- },
320
- {
321
- key: "backend",
322
- label: "Backend",
323
- type: "select",
324
- showWhen: { field: "runtime", equals: "onnx" },
325
- options: [
326
- { value: "auto", label: "Auto" },
327
- { value: "cpu", label: "CPU" },
328
- { value: "cuda", label: "CUDA (NVIDIA)" }
329
- ]
330
- }
331
- ]
332
- },
333
- {
334
- id: "thresholds",
335
- title: "Recognition Settings",
336
- columns: 1,
337
- fields: [
338
- {
339
- key: "minConfidence",
340
- label: "Minimum Confidence",
341
- type: "slider",
342
- min: 0.1,
343
- max: 1,
344
- step: 0.05,
345
- default: 0.5
346
- }
347
- ]
348
- }
349
- ]
350
- };
351
- }
352
- getClassMap() {
353
- return PLATE_REC_CLASS_MAP;
354
- }
355
- getModelCatalog() {
356
- return [...PLATE_RECOGNITION_MODELS];
357
- }
358
- getAvailableModels() {
359
- return [];
360
- }
361
- getActiveLabels() {
362
- return PLATE_TEXT_LABELS2;
363
- }
364
- async probe() {
365
- return {
366
- available: true,
367
- runtime: this.engine?.runtime ?? "onnx",
368
- device: this.engine?.device ?? "cpu",
369
- capabilities: ["fp32"]
370
- };
371
- }
372
- };
373
-
374
- export {
375
- ctcDecode,
376
- PLATE_RECOGNITION_MODELS,
377
- PlateRecognitionAddon
378
- };
379
- //# sourceMappingURL=chunk-ZTJENCFC.mjs.map