@camstack/types 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,805 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/catalogs/index.ts
21
+ var catalogs_exports = {};
22
+ __export(catalogs_exports, {
23
+ ANIMAL_TYPE_MODELS: () => ANIMAL_TYPE_MODELS,
24
+ AUDIO_CLASSIFICATION_MODELS: () => AUDIO_CLASSIFICATION_MODELS,
25
+ BIRD_NABIRDS_MODELS: () => BIRD_NABIRDS_MODELS,
26
+ BIRD_SPECIES_MODELS: () => BIRD_SPECIES_MODELS,
27
+ COCO_80_LABELS: () => COCO_80_LABELS,
28
+ COCO_TO_MACRO: () => COCO_TO_MACRO,
29
+ FACE_DETECTION_MODELS: () => FACE_DETECTION_MODELS,
30
+ FACE_RECOGNITION_MODELS: () => FACE_RECOGNITION_MODELS,
31
+ MACRO_LABELS: () => MACRO_LABELS,
32
+ OBJECT_DETECTION_MODELS: () => OBJECT_DETECTION_MODELS,
33
+ PLATE_DETECTION_MODELS: () => PLATE_DETECTION_MODELS,
34
+ PLATE_RECOGNITION_MODELS: () => PLATE_RECOGNITION_MODELS,
35
+ SEGMENTATION_MODELS: () => SEGMENTATION_MODELS
36
+ });
37
+ module.exports = __toCommonJS(catalogs_exports);
38
+
39
+ // src/catalogs/coco-classmap.ts
40
+ var COCO_80_LABELS = [
41
+ { id: "person", name: "Person" },
42
+ { id: "bicycle", name: "Bicycle" },
43
+ { id: "car", name: "Car" },
44
+ { id: "motorcycle", name: "Motorcycle" },
45
+ { id: "airplane", name: "Airplane" },
46
+ { id: "bus", name: "Bus" },
47
+ { id: "train", name: "Train" },
48
+ { id: "truck", name: "Truck" },
49
+ { id: "boat", name: "Boat" },
50
+ { id: "traffic light", name: "Traffic Light" },
51
+ { id: "fire hydrant", name: "Fire Hydrant" },
52
+ { id: "stop sign", name: "Stop Sign" },
53
+ { id: "parking meter", name: "Parking Meter" },
54
+ { id: "bench", name: "Bench" },
55
+ { id: "bird", name: "Bird" },
56
+ { id: "cat", name: "Cat" },
57
+ { id: "dog", name: "Dog" },
58
+ { id: "horse", name: "Horse" },
59
+ { id: "sheep", name: "Sheep" },
60
+ { id: "cow", name: "Cow" },
61
+ { id: "elephant", name: "Elephant" },
62
+ { id: "bear", name: "Bear" },
63
+ { id: "zebra", name: "Zebra" },
64
+ { id: "giraffe", name: "Giraffe" },
65
+ { id: "backpack", name: "Backpack" },
66
+ { id: "umbrella", name: "Umbrella" },
67
+ { id: "handbag", name: "Handbag" },
68
+ { id: "tie", name: "Tie" },
69
+ { id: "suitcase", name: "Suitcase" },
70
+ { id: "frisbee", name: "Frisbee" },
71
+ { id: "skis", name: "Skis" },
72
+ { id: "snowboard", name: "Snowboard" },
73
+ { id: "sports ball", name: "Sports Ball" },
74
+ { id: "kite", name: "Kite" },
75
+ { id: "baseball bat", name: "Baseball Bat" },
76
+ { id: "baseball glove", name: "Baseball Glove" },
77
+ { id: "skateboard", name: "Skateboard" },
78
+ { id: "surfboard", name: "Surfboard" },
79
+ { id: "tennis racket", name: "Tennis Racket" },
80
+ { id: "bottle", name: "Bottle" },
81
+ { id: "wine glass", name: "Wine Glass" },
82
+ { id: "cup", name: "Cup" },
83
+ { id: "fork", name: "Fork" },
84
+ { id: "knife", name: "Knife" },
85
+ { id: "spoon", name: "Spoon" },
86
+ { id: "bowl", name: "Bowl" },
87
+ { id: "banana", name: "Banana" },
88
+ { id: "apple", name: "Apple" },
89
+ { id: "sandwich", name: "Sandwich" },
90
+ { id: "orange", name: "Orange" },
91
+ { id: "broccoli", name: "Broccoli" },
92
+ { id: "carrot", name: "Carrot" },
93
+ { id: "hot dog", name: "Hot Dog" },
94
+ { id: "pizza", name: "Pizza" },
95
+ { id: "donut", name: "Donut" },
96
+ { id: "cake", name: "Cake" },
97
+ { id: "chair", name: "Chair" },
98
+ { id: "couch", name: "Couch" },
99
+ { id: "potted plant", name: "Potted Plant" },
100
+ { id: "bed", name: "Bed" },
101
+ { id: "dining table", name: "Dining Table" },
102
+ { id: "toilet", name: "Toilet" },
103
+ { id: "tv", name: "TV" },
104
+ { id: "laptop", name: "Laptop" },
105
+ { id: "mouse", name: "Mouse" },
106
+ { id: "remote", name: "Remote" },
107
+ { id: "keyboard", name: "Keyboard" },
108
+ { id: "cell phone", name: "Cell Phone" },
109
+ { id: "microwave", name: "Microwave" },
110
+ { id: "oven", name: "Oven" },
111
+ { id: "toaster", name: "Toaster" },
112
+ { id: "sink", name: "Sink" },
113
+ { id: "refrigerator", name: "Refrigerator" },
114
+ { id: "book", name: "Book" },
115
+ { id: "clock", name: "Clock" },
116
+ { id: "vase", name: "Vase" },
117
+ { id: "scissors", name: "Scissors" },
118
+ { id: "teddy bear", name: "Teddy Bear" },
119
+ { id: "hair drier", name: "Hair Drier" },
120
+ { id: "toothbrush", name: "Toothbrush" }
121
+ ];
122
+ var MACRO_LABELS = [
123
+ { id: "person", name: "Person" },
124
+ { id: "vehicle", name: "Vehicle" },
125
+ { id: "animal", name: "Animal" }
126
+ ];
127
+ var COCO_TO_MACRO = {
128
+ mapping: {
129
+ person: "person",
130
+ bicycle: "vehicle",
131
+ car: "vehicle",
132
+ motorcycle: "vehicle",
133
+ airplane: "vehicle",
134
+ bus: "vehicle",
135
+ train: "vehicle",
136
+ truck: "vehicle",
137
+ boat: "vehicle",
138
+ bird: "animal",
139
+ cat: "animal",
140
+ dog: "animal",
141
+ horse: "animal",
142
+ sheep: "animal",
143
+ cow: "animal",
144
+ elephant: "animal",
145
+ bear: "animal",
146
+ zebra: "animal",
147
+ giraffe: "animal"
148
+ },
149
+ preserveOriginal: true
150
+ };
151
+
152
+ // src/utils/hf-url.ts
153
+ function hfModelUrl(repo, path) {
154
+ return `https://huggingface.co/${repo}/resolve/main/${path}`;
155
+ }
156
+
157
+ // src/catalogs/object-detection-models.ts
158
+ var HF_REPO = "camstack/camstack-models";
159
+ var OBJECT_DETECTION_MODELS = [
160
+ {
161
+ id: "yolov8n",
162
+ name: "YOLOv8 Nano",
163
+ description: "YOLOv8 Nano \u2014 fastest, smallest object detection model",
164
+ inputSize: { width: 640, height: 640 },
165
+ labels: COCO_80_LABELS,
166
+ formats: {
167
+ onnx: {
168
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/onnx/camstack-yolov8n.onnx"),
169
+ sizeMB: 12
170
+ },
171
+ coreml: {
172
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/coreml/camstack-yolov8n.mlpackage"),
173
+ sizeMB: 6
174
+ },
175
+ openvino: {
176
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/openvino/camstack-yolov8n.xml"),
177
+ sizeMB: 7
178
+ },
179
+ tflite: {
180
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/tflite/camstack-yolov8n_float32.tflite"),
181
+ sizeMB: 12
182
+ }
183
+ }
184
+ },
185
+ {
186
+ id: "yolov8s",
187
+ name: "YOLOv8 Small",
188
+ description: "YOLOv8 Small \u2014 balanced speed and accuracy",
189
+ inputSize: { width: 640, height: 640 },
190
+ labels: COCO_80_LABELS,
191
+ formats: {
192
+ onnx: {
193
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/onnx/camstack-yolov8s.onnx"),
194
+ sizeMB: 43
195
+ },
196
+ coreml: {
197
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/coreml/camstack-yolov8s.mlpackage"),
198
+ sizeMB: 21
199
+ },
200
+ openvino: {
201
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/openvino/camstack-yolov8s.xml"),
202
+ sizeMB: 22
203
+ },
204
+ tflite: {
205
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/tflite/camstack-yolov8s_float32.tflite"),
206
+ sizeMB: 43
207
+ }
208
+ }
209
+ },
210
+ {
211
+ id: "yolov8m",
212
+ name: "YOLOv8 Medium",
213
+ description: "YOLOv8 Medium \u2014 higher accuracy, moderate size",
214
+ inputSize: { width: 640, height: 640 },
215
+ labels: COCO_80_LABELS,
216
+ formats: {
217
+ onnx: {
218
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/onnx/camstack-yolov8m.onnx"),
219
+ sizeMB: 99
220
+ },
221
+ coreml: {
222
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/coreml/camstack-yolov8m.mlpackage"),
223
+ sizeMB: 49
224
+ },
225
+ openvino: {
226
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/openvino/camstack-yolov8m.xml"),
227
+ sizeMB: 50
228
+ },
229
+ tflite: {
230
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov8/tflite/camstack-yolov8m_float32.tflite"),
231
+ sizeMB: 99
232
+ }
233
+ }
234
+ },
235
+ {
236
+ id: "yolov9t",
237
+ name: "YOLOv9 Tiny",
238
+ description: "YOLOv9 Tiny \u2014 ultra-lightweight next-gen detector",
239
+ inputSize: { width: 640, height: 640 },
240
+ labels: COCO_80_LABELS,
241
+ formats: {
242
+ onnx: {
243
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/onnx/camstack-yolov9t.onnx"),
244
+ sizeMB: 8
245
+ },
246
+ coreml: {
247
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/coreml/camstack-yolov9t.mlpackage"),
248
+ sizeMB: 4
249
+ },
250
+ openvino: {
251
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/openvino/camstack-yolov9t.xml"),
252
+ sizeMB: 6
253
+ },
254
+ tflite: {
255
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/tflite/camstack-yolov9t_float32.tflite"),
256
+ sizeMB: 8
257
+ }
258
+ }
259
+ },
260
+ {
261
+ id: "yolov9s",
262
+ name: "YOLOv9 Small",
263
+ description: "YOLOv9 Small \u2014 improved efficiency over YOLOv8s",
264
+ inputSize: { width: 640, height: 640 },
265
+ labels: COCO_80_LABELS,
266
+ formats: {
267
+ onnx: {
268
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/onnx/camstack-yolov9s.onnx"),
269
+ sizeMB: 28
270
+ },
271
+ coreml: {
272
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/coreml/camstack-yolov9s.mlpackage"),
273
+ sizeMB: 14
274
+ },
275
+ openvino: {
276
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/openvino/camstack-yolov9s.xml"),
277
+ sizeMB: 16
278
+ },
279
+ tflite: {
280
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/tflite/camstack-yolov9s_float32.tflite"),
281
+ sizeMB: 28
282
+ }
283
+ }
284
+ },
285
+ {
286
+ id: "yolov9c",
287
+ name: "YOLOv9 C",
288
+ description: "YOLOv9 C \u2014 high-accuracy compact model",
289
+ inputSize: { width: 640, height: 640 },
290
+ labels: COCO_80_LABELS,
291
+ formats: {
292
+ onnx: {
293
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/onnx/camstack-yolov9c.onnx"),
294
+ sizeMB: 97
295
+ },
296
+ coreml: {
297
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/coreml/camstack-yolov9c.mlpackage"),
298
+ sizeMB: 48
299
+ },
300
+ openvino: {
301
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/openvino/camstack-yolov9c.xml"),
302
+ sizeMB: 49
303
+ },
304
+ tflite: {
305
+ url: hfModelUrl(HF_REPO, "objectDetection/yolov9/tflite/camstack-yolov9c_float32.tflite"),
306
+ sizeMB: 97
307
+ }
308
+ }
309
+ },
310
+ // YOLO11 — no CoreML (coremltools incompatible)
311
+ {
312
+ id: "yolo11n",
313
+ name: "YOLO11 Nano",
314
+ description: "YOLO11 Nano \u2014 fastest, smallest YOLO11 detection model",
315
+ inputSize: { width: 640, height: 640 },
316
+ labels: COCO_80_LABELS,
317
+ formats: {
318
+ onnx: {
319
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/onnx/camstack-yolo11n.onnx"),
320
+ sizeMB: 10
321
+ },
322
+ openvino: {
323
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/openvino/camstack-yolo11n.xml"),
324
+ sizeMB: 5.4
325
+ },
326
+ tflite: {
327
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/tflite/camstack-yolo11n_float32.tflite"),
328
+ sizeMB: 10
329
+ }
330
+ }
331
+ },
332
+ {
333
+ id: "yolo11s",
334
+ name: "YOLO11 Small",
335
+ description: "YOLO11 Small \u2014 balanced speed and accuracy",
336
+ inputSize: { width: 640, height: 640 },
337
+ labels: COCO_80_LABELS,
338
+ formats: {
339
+ onnx: {
340
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/onnx/camstack-yolo11s.onnx"),
341
+ sizeMB: 36
342
+ },
343
+ openvino: {
344
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/openvino/camstack-yolo11s.xml"),
345
+ sizeMB: 18
346
+ },
347
+ tflite: {
348
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/tflite/camstack-yolo11s_float32.tflite"),
349
+ sizeMB: 36
350
+ }
351
+ }
352
+ },
353
+ {
354
+ id: "yolo11m",
355
+ name: "YOLO11 Medium",
356
+ description: "YOLO11 Medium \u2014 higher accuracy, moderate size",
357
+ inputSize: { width: 640, height: 640 },
358
+ labels: COCO_80_LABELS,
359
+ formats: {
360
+ onnx: {
361
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/onnx/camstack-yolo11m.onnx"),
362
+ sizeMB: 77
363
+ },
364
+ openvino: {
365
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/openvino/camstack-yolo11m.xml"),
366
+ sizeMB: 39
367
+ },
368
+ tflite: {
369
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/tflite/camstack-yolo11m_float32.tflite"),
370
+ sizeMB: 77
371
+ }
372
+ }
373
+ },
374
+ {
375
+ id: "yolo11l",
376
+ name: "YOLO11 Large",
377
+ description: "YOLO11 Large \u2014 high-accuracy large model",
378
+ inputSize: { width: 640, height: 640 },
379
+ labels: COCO_80_LABELS,
380
+ formats: {
381
+ onnx: {
382
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/onnx/camstack-yolo11l.onnx"),
383
+ sizeMB: 97
384
+ },
385
+ openvino: {
386
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/openvino/camstack-yolo11l.xml"),
387
+ sizeMB: 49
388
+ },
389
+ tflite: {
390
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/tflite/camstack-yolo11l_float32.tflite"),
391
+ sizeMB: 97
392
+ }
393
+ }
394
+ },
395
+ {
396
+ id: "yolo11x",
397
+ name: "YOLO11 Extra-Large",
398
+ description: "YOLO11 Extra-Large \u2014 maximum accuracy",
399
+ inputSize: { width: 640, height: 640 },
400
+ labels: COCO_80_LABELS,
401
+ formats: {
402
+ onnx: {
403
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/onnx/camstack-yolo11x.onnx"),
404
+ sizeMB: 218
405
+ },
406
+ openvino: {
407
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/openvino/camstack-yolo11x.xml"),
408
+ sizeMB: 109
409
+ },
410
+ tflite: {
411
+ url: hfModelUrl(HF_REPO, "objectDetection/yolo11/tflite/camstack-yolo11x_float32.tflite"),
412
+ sizeMB: 218
413
+ }
414
+ }
415
+ }
416
+ ];
417
+
418
+ // src/catalogs/face-detection-models.ts
419
+ var HF_REPO2 = "camstack/camstack-models";
420
+ var FACE_LABELS = [
421
+ { id: "face", name: "Face" }
422
+ ];
423
+ var FACE_DETECTION_MODELS = [
424
+ {
425
+ id: "scrfd-500m",
426
+ name: "SCRFD 500M",
427
+ description: "SCRFD 500M \u2014 ultra-lightweight face detector",
428
+ inputSize: { width: 640, height: 640 },
429
+ labels: FACE_LABELS,
430
+ formats: {
431
+ onnx: {
432
+ url: hfModelUrl(HF_REPO2, "faceDetection/scrfd/onnx/camstack-scrfd-500m.onnx"),
433
+ sizeMB: 2.2
434
+ },
435
+ coreml: {
436
+ url: hfModelUrl(HF_REPO2, "faceDetection/scrfd/coreml/camstack-scrfd-500m.mlpackage"),
437
+ sizeMB: 1.2
438
+ },
439
+ openvino: {
440
+ url: hfModelUrl(HF_REPO2, "faceDetection/scrfd/openvino/camstack-scrfd-500m.xml"),
441
+ sizeMB: 1.3
442
+ }
443
+ }
444
+ },
445
+ {
446
+ id: "scrfd-2.5g",
447
+ name: "SCRFD 2.5G",
448
+ description: "SCRFD 2.5G \u2014 balanced face detection model",
449
+ inputSize: { width: 640, height: 640 },
450
+ labels: FACE_LABELS,
451
+ formats: {
452
+ onnx: {
453
+ url: hfModelUrl(HF_REPO2, "faceDetection/scrfd/onnx/camstack-scrfd-2.5g.onnx"),
454
+ sizeMB: 3.1
455
+ },
456
+ coreml: {
457
+ url: hfModelUrl(HF_REPO2, "faceDetection/scrfd/coreml/camstack-scrfd-2.5g.mlpackage"),
458
+ sizeMB: 1.7
459
+ },
460
+ openvino: {
461
+ url: hfModelUrl(HF_REPO2, "faceDetection/scrfd/openvino/camstack-scrfd-2.5g.xml"),
462
+ sizeMB: 1.8
463
+ }
464
+ }
465
+ },
466
+ {
467
+ id: "scrfd-10g",
468
+ name: "SCRFD 10G",
469
+ description: "SCRFD 10G \u2014 high-accuracy face detector",
470
+ inputSize: { width: 640, height: 640 },
471
+ labels: FACE_LABELS,
472
+ formats: {
473
+ onnx: {
474
+ url: hfModelUrl(HF_REPO2, "faceDetection/scrfd/onnx/camstack-scrfd-10g.onnx"),
475
+ sizeMB: 16
476
+ },
477
+ coreml: {
478
+ url: hfModelUrl(HF_REPO2, "faceDetection/scrfd/coreml/camstack-scrfd-10g.mlpackage"),
479
+ sizeMB: 8.2
480
+ },
481
+ openvino: {
482
+ url: hfModelUrl(HF_REPO2, "faceDetection/scrfd/openvino/camstack-scrfd-10g.xml"),
483
+ sizeMB: 8.3
484
+ }
485
+ }
486
+ }
487
+ ];
488
+
489
+ // src/catalogs/face-recognition-models.ts
490
+ var HF_REPO3 = "camstack/camstack-models";
491
+ var FACE_EMBEDDING_LABELS = [
492
+ { id: "embedding", name: "Face Embedding" }
493
+ ];
494
+ var FACE_RECOGNITION_MODELS = [
495
+ {
496
+ id: "arcface-r100",
497
+ name: "ArcFace R100",
498
+ description: "ArcFace ResNet-100 \u2014 high-accuracy face recognition embeddings",
499
+ inputSize: { width: 112, height: 112 },
500
+ inputLayout: "nhwc",
501
+ labels: FACE_EMBEDDING_LABELS,
502
+ formats: {
503
+ onnx: {
504
+ url: hfModelUrl(HF_REPO3, "faceRecognition/arcface/onnx/camstack-arcface-arcface.onnx"),
505
+ sizeMB: 130
506
+ },
507
+ coreml: {
508
+ url: hfModelUrl(HF_REPO3, "faceRecognition/arcface/coreml/camstack-arcface-r100.mlpackage"),
509
+ sizeMB: 65
510
+ },
511
+ openvino: {
512
+ url: hfModelUrl(HF_REPO3, "faceRecognition/arcface/openvino/camstack-arcface-r100.xml"),
513
+ sizeMB: 65
514
+ }
515
+ }
516
+ }
517
+ ];
518
+
519
+ // src/catalogs/plate-detection-models.ts
520
+ var HF_REPO4 = "camstack/camstack-models";
521
+ var PLATE_LABELS = [
522
+ { id: "plate", name: "License Plate" }
523
+ ];
524
+ var PLATE_DETECTION_MODELS = [
525
+ {
526
+ id: "yolov8n-plate",
527
+ name: "YOLOv8 Nano \u2014 License Plate",
528
+ description: "YOLOv8 Nano fine-tuned for license plate detection",
529
+ inputSize: { width: 640, height: 640 },
530
+ labels: PLATE_LABELS,
531
+ formats: {
532
+ onnx: {
533
+ url: hfModelUrl(HF_REPO4, "plateDetection/yolov8-plate/onnx/camstack-yolov8n-plate.onnx"),
534
+ sizeMB: 12
535
+ },
536
+ coreml: {
537
+ url: hfModelUrl(HF_REPO4, "plateDetection/yolov8-plate/coreml/camstack-yolov8n-plate.mlpackage"),
538
+ sizeMB: 5.9
539
+ },
540
+ openvino: {
541
+ url: hfModelUrl(HF_REPO4, "plateDetection/yolov8-plate/openvino/camstack-yolov8n-plate.xml"),
542
+ sizeMB: 6.1
543
+ },
544
+ tflite: {
545
+ url: hfModelUrl(HF_REPO4, "plateDetection/yolov8-plate/tflite/camstack-yolov8n-plate_float32.tflite"),
546
+ sizeMB: 12
547
+ }
548
+ }
549
+ }
550
+ ];
551
+
552
+ // src/catalogs/plate-recognition-models.ts
553
+ var HF_REPO5 = "camstack/camstack-models";
554
+ var PLATE_TEXT_LABELS = [
555
+ { id: "text", name: "Plate Text" }
556
+ ];
557
+ var PLATE_RECOGNITION_MODELS = [
558
+ {
559
+ id: "paddleocr-latin",
560
+ name: "PaddleOCR Latin",
561
+ description: "PaddleOCR recognition model for Latin-script license plates",
562
+ inputSize: { width: 320, height: 48 },
563
+ labels: PLATE_TEXT_LABELS,
564
+ formats: {
565
+ onnx: {
566
+ url: hfModelUrl(HF_REPO5, "plateRecognition/paddleocr/onnx/camstack-paddleocr-latin-rec.onnx"),
567
+ sizeMB: 7.5
568
+ },
569
+ openvino: {
570
+ url: hfModelUrl(HF_REPO5, "plateRecognition/paddleocr/openvino/camstack-paddleocr-latin.xml"),
571
+ sizeMB: 4
572
+ }
573
+ }
574
+ },
575
+ {
576
+ id: "paddleocr-en",
577
+ name: "PaddleOCR English",
578
+ description: "PaddleOCR recognition model optimized for English license plates",
579
+ inputSize: { width: 320, height: 48 },
580
+ labels: PLATE_TEXT_LABELS,
581
+ formats: {
582
+ onnx: {
583
+ url: hfModelUrl(HF_REPO5, "plateRecognition/paddleocr/onnx/camstack-paddleocr-en-rec.onnx"),
584
+ sizeMB: 7.5
585
+ },
586
+ openvino: {
587
+ url: hfModelUrl(HF_REPO5, "plateRecognition/paddleocr/openvino/camstack-paddleocr-en.xml"),
588
+ sizeMB: 4
589
+ }
590
+ }
591
+ }
592
+ ];
593
+
594
+ // src/catalogs/audio-classification-models.ts
595
+ var HF_REPO6 = "camstack/camstack-models";
596
+ var AUDIO_LABELS = [
597
+ { id: "audio", name: "Audio Event" }
598
+ ];
599
+ var AUDIO_CLASSIFICATION_MODELS = [
600
+ {
601
+ id: "yamnet",
602
+ name: "YAMNet",
603
+ description: "YAMNet \u2014 audio event classification from raw waveform",
604
+ inputSize: { width: 1, height: 16e3 },
605
+ labels: AUDIO_LABELS,
606
+ formats: {
607
+ onnx: {
608
+ url: hfModelUrl(HF_REPO6, "audioClassification/yamnet/onnx/camstack-yamnet.onnx"),
609
+ sizeMB: 15
610
+ },
611
+ openvino: {
612
+ url: hfModelUrl(HF_REPO6, "audioClassification/yamnet/openvino/camstack-yamnet.xml"),
613
+ sizeMB: 8
614
+ }
615
+ }
616
+ }
617
+ ];
618
+
619
+ // src/catalogs/segmentation-models.ts
620
+ var HF_REPO7 = "camstack/camstack-models";
621
+ var SEGMENTATION_MODELS = [
622
+ // YOLO11-seg — no CoreML (coremltools incompatible)
623
+ {
624
+ id: "yolo11n-seg",
625
+ name: "YOLO11 Nano Segmentation",
626
+ description: "YOLO11 Nano \u2014 fastest, smallest YOLO11 instance segmentation model",
627
+ inputSize: { width: 640, height: 640 },
628
+ labels: COCO_80_LABELS,
629
+ formats: {
630
+ onnx: {
631
+ url: hfModelUrl(HF_REPO7, "segmentation/yolo11-seg/onnx/camstack-yolo11n-seg.onnx"),
632
+ sizeMB: 11
633
+ },
634
+ openvino: {
635
+ url: hfModelUrl(HF_REPO7, "segmentation/yolo11-seg/openvino/camstack-yolo11n-seg.xml"),
636
+ sizeMB: 6
637
+ }
638
+ }
639
+ },
640
+ {
641
+ id: "yolo11s-seg",
642
+ name: "YOLO11 Small Segmentation",
643
+ description: "YOLO11 Small \u2014 balanced speed and accuracy for instance segmentation",
644
+ inputSize: { width: 640, height: 640 },
645
+ labels: COCO_80_LABELS,
646
+ formats: {
647
+ onnx: {
648
+ url: hfModelUrl(HF_REPO7, "segmentation/yolo11-seg/onnx/camstack-yolo11s-seg.onnx"),
649
+ sizeMB: 39
650
+ },
651
+ openvino: {
652
+ url: hfModelUrl(HF_REPO7, "segmentation/yolo11-seg/openvino/camstack-yolo11s-seg.xml"),
653
+ sizeMB: 20
654
+ }
655
+ }
656
+ },
657
+ {
658
+ id: "yolo11m-seg",
659
+ name: "YOLO11 Medium Segmentation",
660
+ description: "YOLO11 Medium \u2014 higher accuracy instance segmentation",
661
+ inputSize: { width: 640, height: 640 },
662
+ labels: COCO_80_LABELS,
663
+ formats: {
664
+ onnx: {
665
+ url: hfModelUrl(HF_REPO7, "segmentation/yolo11-seg/onnx/camstack-yolo11m-seg.onnx"),
666
+ sizeMB: 86
667
+ },
668
+ openvino: {
669
+ url: hfModelUrl(HF_REPO7, "segmentation/yolo11-seg/openvino/camstack-yolo11m-seg.xml"),
670
+ sizeMB: 43
671
+ }
672
+ }
673
+ },
674
+ // YOLOv8-seg — CoreML available
675
+ {
676
+ id: "yolov8n-seg",
677
+ name: "YOLOv8 Nano Segmentation",
678
+ description: "YOLOv8 Nano \u2014 fastest, smallest YOLOv8 instance segmentation model",
679
+ inputSize: { width: 640, height: 640 },
680
+ labels: COCO_80_LABELS,
681
+ formats: {
682
+ onnx: {
683
+ url: hfModelUrl(HF_REPO7, "segmentation/yolov8-seg/onnx/camstack-yolov8n-seg.onnx"),
684
+ sizeMB: 13
685
+ },
686
+ coreml: {
687
+ url: hfModelUrl(HF_REPO7, "segmentation/yolov8-seg/coreml/camstack-yolov8n-seg.mlpackage"),
688
+ sizeMB: 7
689
+ },
690
+ openvino: {
691
+ url: hfModelUrl(HF_REPO7, "segmentation/yolov8-seg/openvino/camstack-yolov8n-seg.xml"),
692
+ sizeMB: 7
693
+ }
694
+ }
695
+ },
696
+ {
697
+ id: "yolov8s-seg",
698
+ name: "YOLOv8 Small Segmentation",
699
+ description: "YOLOv8 Small \u2014 balanced speed and accuracy for instance segmentation",
700
+ inputSize: { width: 640, height: 640 },
701
+ labels: COCO_80_LABELS,
702
+ formats: {
703
+ onnx: {
704
+ url: hfModelUrl(HF_REPO7, "segmentation/yolov8-seg/onnx/camstack-yolov8s-seg.onnx"),
705
+ sizeMB: 45
706
+ },
707
+ coreml: {
708
+ url: hfModelUrl(HF_REPO7, "segmentation/yolov8-seg/coreml/camstack-yolov8s-seg.mlpackage"),
709
+ sizeMB: 23
710
+ },
711
+ openvino: {
712
+ url: hfModelUrl(HF_REPO7, "segmentation/yolov8-seg/openvino/camstack-yolov8s-seg.xml"),
713
+ sizeMB: 23
714
+ }
715
+ }
716
+ },
717
+ {
718
+ id: "yolov8m-seg",
719
+ name: "YOLOv8 Medium Segmentation",
720
+ description: "YOLOv8 Medium \u2014 higher accuracy instance segmentation",
721
+ inputSize: { width: 640, height: 640 },
722
+ labels: COCO_80_LABELS,
723
+ formats: {
724
+ onnx: {
725
+ url: hfModelUrl(HF_REPO7, "segmentation/yolov8-seg/onnx/camstack-yolov8m-seg.onnx"),
726
+ sizeMB: 104
727
+ },
728
+ coreml: {
729
+ url: hfModelUrl(HF_REPO7, "segmentation/yolov8-seg/coreml/camstack-yolov8m-seg.mlpackage"),
730
+ sizeMB: 52
731
+ },
732
+ openvino: {
733
+ url: hfModelUrl(HF_REPO7, "segmentation/yolov8-seg/openvino/camstack-yolov8m-seg.xml"),
734
+ sizeMB: 53
735
+ }
736
+ }
737
+ }
738
+ ];
739
+
740
+ // src/constants.ts
741
+ var HF_REPO8 = "camstack/camstack-models";
742
+ var HF_BASE_URL = `https://huggingface.co/${HF_REPO8}/resolve/main`;
743
+
744
+ // src/catalogs/animal-classification-models.ts
745
+ var hf = (path) => hfModelUrl(HF_REPO8, path);
746
+ var BIRD_LABEL = { id: "species", name: "Bird Species" };
747
+ var ANIMAL_TYPE_LABEL = { id: "animal-type", name: "Animal Type" };
748
+ var BIRD_SPECIES_MODELS = [
749
+ {
750
+ id: "bird-species-525",
751
+ name: "Bird Species (525)",
752
+ description: "EfficientNet bird species classifier \u2014 525 species, MIT license",
753
+ inputSize: { width: 224, height: 224 },
754
+ inputNormalization: "imagenet",
755
+ labels: [BIRD_LABEL],
756
+ formats: {
757
+ onnx: { url: hf("animalClassification/bird-species/onnx/camstack-bird-species-525.onnx"), sizeMB: 32 }
758
+ }
759
+ }
760
+ ];
761
+ var BIRD_NABIRDS_MODELS = [
762
+ {
763
+ id: "bird-nabirds-404",
764
+ name: "NABirds (404 species)",
765
+ description: "ResNet50 trained on NABirds \u2014 404 North American species with ONNX, CoreML, OpenVINO",
766
+ inputSize: { width: 224, height: 224 },
767
+ inputNormalization: "imagenet",
768
+ labels: [{ id: "species", name: "Bird Species" }],
769
+ formats: {
770
+ onnx: { url: hf("animalClassification/bird-nabirds/onnx/camstack-bird-nabirds-404.onnx"), sizeMB: 93 },
771
+ coreml: { url: hf("animalClassification/bird-nabirds/coreml/camstack-bird-nabirds-404.mlpackage"), sizeMB: 47 },
772
+ openvino: { url: hf("animalClassification/bird-nabirds/openvino/camstack-bird-nabirds-404.xml"), sizeMB: 47 }
773
+ }
774
+ }
775
+ ];
776
+ var ANIMAL_TYPE_MODELS = [
777
+ {
778
+ id: "animals-10",
779
+ name: "Animal Classifier (10)",
780
+ description: "ViT-based animal type classifier \u2014 cat, cow, dog, dolphin, eagle, panda, horse, monkey, sheep, spider",
781
+ inputSize: { width: 224, height: 224 },
782
+ inputNormalization: "imagenet",
783
+ labels: [ANIMAL_TYPE_LABEL],
784
+ formats: {
785
+ onnx: { url: hf("animalClassification/animals-10/onnx/camstack-animals-10.onnx"), sizeMB: 328 }
786
+ }
787
+ }
788
+ ];
789
+ // Annotate the CommonJS export names for ESM import in node:
790
+ 0 && (module.exports = {
791
+ ANIMAL_TYPE_MODELS,
792
+ AUDIO_CLASSIFICATION_MODELS,
793
+ BIRD_NABIRDS_MODELS,
794
+ BIRD_SPECIES_MODELS,
795
+ COCO_80_LABELS,
796
+ COCO_TO_MACRO,
797
+ FACE_DETECTION_MODELS,
798
+ FACE_RECOGNITION_MODELS,
799
+ MACRO_LABELS,
800
+ OBJECT_DETECTION_MODELS,
801
+ PLATE_DETECTION_MODELS,
802
+ PLATE_RECOGNITION_MODELS,
803
+ SEGMENTATION_MODELS
804
+ });
805
+ //# sourceMappingURL=index.js.map