@workglow/test 0.0.84 → 0.0.85
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser.js +78 -78
- package/dist/browser.js.map +4 -4
- package/dist/bun.js +78 -78
- package/dist/bun.js.map +4 -4
- package/dist/node.js +78 -78
- package/dist/node.js.map +4 -4
- package/dist/test/task/FileLoaderTask.server.test.d.ts +7 -0
- package/dist/test/task/FileLoaderTask.server.test.d.ts.map +1 -0
- package/dist/test/task/FileLoaderTask.test.d.ts +7 -0
- package/dist/test/task/FileLoaderTask.test.d.ts.map +1 -0
- package/package.json +17 -17
package/dist/browser.js
CHANGED
|
@@ -51,9 +51,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
51
51
|
tasks: ["TextEmbeddingTask"],
|
|
52
52
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
53
53
|
provider_config: {
|
|
54
|
-
|
|
54
|
+
task_engine: "text",
|
|
55
55
|
pipeline: "text-embedder",
|
|
56
|
-
|
|
56
|
+
model_path: "https://storage.googleapis.com/mediapipe-tasks/text_embedder/universal_sentence_encoder.tflite"
|
|
57
57
|
},
|
|
58
58
|
metadata: {}
|
|
59
59
|
},
|
|
@@ -64,9 +64,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
64
64
|
tasks: ["TextClassificationTask"],
|
|
65
65
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
66
66
|
provider_config: {
|
|
67
|
-
|
|
67
|
+
task_engine: "text",
|
|
68
68
|
pipeline: "text-classifier",
|
|
69
|
-
|
|
69
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/text_classifier/bert_classifier/float32/1/bert_classifier.tflite"
|
|
70
70
|
},
|
|
71
71
|
metadata: {}
|
|
72
72
|
},
|
|
@@ -77,9 +77,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
77
77
|
tasks: ["TextLanguageDetectionTask"],
|
|
78
78
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
79
79
|
provider_config: {
|
|
80
|
-
|
|
80
|
+
task_engine: "text",
|
|
81
81
|
pipeline: "text-language-detector",
|
|
82
|
-
|
|
82
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/language_detector/language_detector/float32/1/language_detector.tflite"
|
|
83
83
|
},
|
|
84
84
|
metadata: {}
|
|
85
85
|
},
|
|
@@ -90,9 +90,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
90
90
|
tasks: ["ImageClassificationTask"],
|
|
91
91
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
92
92
|
provider_config: {
|
|
93
|
-
|
|
93
|
+
task_engine: "vision",
|
|
94
94
|
pipeline: "vision-image-classifier",
|
|
95
|
-
|
|
95
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/image_classifier/efficientnet_lite0/float32/1/efficientnet_lite0.tflite"
|
|
96
96
|
},
|
|
97
97
|
metadata: {}
|
|
98
98
|
},
|
|
@@ -103,9 +103,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
103
103
|
tasks: ["ImageEmbeddingTask"],
|
|
104
104
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
105
105
|
provider_config: {
|
|
106
|
-
|
|
106
|
+
task_engine: "vision",
|
|
107
107
|
pipeline: "vision-image-embedder",
|
|
108
|
-
|
|
108
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/image_embedder/mobilenet_v3_small/float32/1/mobilenet_v3_small.tflite"
|
|
109
109
|
},
|
|
110
110
|
metadata: {}
|
|
111
111
|
},
|
|
@@ -116,9 +116,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
116
116
|
tasks: ["ObjectDetectionTask"],
|
|
117
117
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
118
118
|
provider_config: {
|
|
119
|
-
|
|
119
|
+
task_engine: "vision",
|
|
120
120
|
pipeline: "vision-object-detector",
|
|
121
|
-
|
|
121
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/object_detector/efficientdet_lite0/float32/1/efficientdet_lite0.tflite"
|
|
122
122
|
},
|
|
123
123
|
metadata: {}
|
|
124
124
|
},
|
|
@@ -129,9 +129,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
129
129
|
tasks: ["ImageSegmentationTask"],
|
|
130
130
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
131
131
|
provider_config: {
|
|
132
|
-
|
|
132
|
+
task_engine: "vision",
|
|
133
133
|
pipeline: "vision-image-segmenter",
|
|
134
|
-
|
|
134
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/image_segmenter/deeplab_v3/float32/1/deeplab_v3.tflite"
|
|
135
135
|
},
|
|
136
136
|
metadata: {}
|
|
137
137
|
},
|
|
@@ -142,9 +142,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
142
142
|
tasks: ["AudioClassificationTask"],
|
|
143
143
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
144
144
|
provider_config: {
|
|
145
|
-
|
|
145
|
+
task_engine: "audio",
|
|
146
146
|
pipeline: "audio-classifier",
|
|
147
|
-
|
|
147
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/audio_classifier/yamnet/float32/1/yamnet.tflite"
|
|
148
148
|
},
|
|
149
149
|
metadata: {}
|
|
150
150
|
},
|
|
@@ -155,9 +155,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
155
155
|
tasks: ["GestureRecognizerTask"],
|
|
156
156
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
157
157
|
provider_config: {
|
|
158
|
-
|
|
158
|
+
task_engine: "vision",
|
|
159
159
|
pipeline: "vision-gesture-recognizer",
|
|
160
|
-
|
|
160
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/gesture_recognizer/gesture_recognizer/float16/1/gesture_recognizer.task"
|
|
161
161
|
},
|
|
162
162
|
metadata: {}
|
|
163
163
|
},
|
|
@@ -168,9 +168,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
168
168
|
tasks: ["HandLandmarkerTask"],
|
|
169
169
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
170
170
|
provider_config: {
|
|
171
|
-
|
|
171
|
+
task_engine: "vision",
|
|
172
172
|
pipeline: "vision-hand-landmarker",
|
|
173
|
-
|
|
173
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/hand_landmarker/hand_landmarker/float16/1/hand_landmarker.task"
|
|
174
174
|
},
|
|
175
175
|
metadata: {}
|
|
176
176
|
},
|
|
@@ -181,9 +181,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
181
181
|
tasks: ["FaceDetectorTask"],
|
|
182
182
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
183
183
|
provider_config: {
|
|
184
|
-
|
|
184
|
+
task_engine: "vision",
|
|
185
185
|
pipeline: "vision-face-detector",
|
|
186
|
-
|
|
186
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/face_detector/blaze_face_short_range/float16/1/blaze_face_short_range.tflite"
|
|
187
187
|
},
|
|
188
188
|
metadata: {}
|
|
189
189
|
},
|
|
@@ -194,9 +194,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
194
194
|
tasks: ["FaceLandmarkerTask"],
|
|
195
195
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
196
196
|
provider_config: {
|
|
197
|
-
|
|
197
|
+
task_engine: "vision",
|
|
198
198
|
pipeline: "vision-face-landmarker",
|
|
199
|
-
|
|
199
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/face_landmarker/face_landmarker/float16/1/face_landmarker.task"
|
|
200
200
|
},
|
|
201
201
|
metadata: {}
|
|
202
202
|
},
|
|
@@ -207,9 +207,9 @@ async function registerMediaPipeTfJsLocalModels() {
|
|
|
207
207
|
tasks: ["PoseLandmarkerTask"],
|
|
208
208
|
provider: TENSORFLOW_MEDIAPIPE,
|
|
209
209
|
provider_config: {
|
|
210
|
-
|
|
210
|
+
task_engine: "vision",
|
|
211
211
|
pipeline: "vision-pose-landmarker",
|
|
212
|
-
|
|
212
|
+
model_path: "https://storage.googleapis.com/mediapipe-models/pose_landmarker/pose_landmarker_lite/float16/1/pose_landmarker_lite.task"
|
|
213
213
|
},
|
|
214
214
|
metadata: {}
|
|
215
215
|
}
|
|
@@ -231,9 +231,9 @@ async function registerHuggingfaceLocalModels() {
|
|
|
231
231
|
provider: HF_TRANSFORMERS_ONNX,
|
|
232
232
|
provider_config: {
|
|
233
233
|
pipeline: "feature-extraction",
|
|
234
|
-
|
|
234
|
+
model_path: "Xenova/all-MiniLM-L6-v2",
|
|
235
235
|
device: "webgpu",
|
|
236
|
-
|
|
236
|
+
native_dimensions: 384
|
|
237
237
|
},
|
|
238
238
|
metadata: {}
|
|
239
239
|
},
|
|
@@ -245,9 +245,9 @@ async function registerHuggingfaceLocalModels() {
|
|
|
245
245
|
provider: HF_TRANSFORMERS_ONNX,
|
|
246
246
|
provider_config: {
|
|
247
247
|
pipeline: "feature-extraction",
|
|
248
|
-
|
|
248
|
+
model_path: "Xenova/bge-base-en-v1.5",
|
|
249
249
|
device: "webgpu",
|
|
250
|
-
|
|
250
|
+
native_dimensions: 768
|
|
251
251
|
},
|
|
252
252
|
metadata: {}
|
|
253
253
|
},
|
|
@@ -259,9 +259,9 @@ async function registerHuggingfaceLocalModels() {
|
|
|
259
259
|
provider: HF_TRANSFORMERS_ONNX,
|
|
260
260
|
provider_config: {
|
|
261
261
|
pipeline: "feature-extraction",
|
|
262
|
-
|
|
262
|
+
model_path: "Xenova/gte-small",
|
|
263
263
|
device: "webgpu",
|
|
264
|
-
|
|
264
|
+
native_dimensions: 384
|
|
265
265
|
},
|
|
266
266
|
metadata: {}
|
|
267
267
|
},
|
|
@@ -273,9 +273,9 @@ async function registerHuggingfaceLocalModels() {
|
|
|
273
273
|
provider: HF_TRANSFORMERS_ONNX,
|
|
274
274
|
provider_config: {
|
|
275
275
|
pipeline: "feature-extraction",
|
|
276
|
-
|
|
276
|
+
model_path: "onnx-community/bert_uncased_L-2_H-128_A-2-ONNX",
|
|
277
277
|
device: "webgpu",
|
|
278
|
-
|
|
278
|
+
native_dimensions: 128
|
|
279
279
|
},
|
|
280
280
|
metadata: {}
|
|
281
281
|
},
|
|
@@ -287,9 +287,9 @@ async function registerHuggingfaceLocalModels() {
|
|
|
287
287
|
provider: HF_TRANSFORMERS_ONNX,
|
|
288
288
|
provider_config: {
|
|
289
289
|
pipeline: "feature-extraction",
|
|
290
|
-
|
|
290
|
+
model_path: "Xenova/paraphrase-albert-base-v2",
|
|
291
291
|
device: "webgpu",
|
|
292
|
-
|
|
292
|
+
native_dimensions: 768
|
|
293
293
|
},
|
|
294
294
|
metadata: {}
|
|
295
295
|
},
|
|
@@ -301,7 +301,7 @@ async function registerHuggingfaceLocalModels() {
|
|
|
301
301
|
provider: HF_TRANSFORMERS_ONNX,
|
|
302
302
|
provider_config: {
|
|
303
303
|
pipeline: "question-answering",
|
|
304
|
-
|
|
304
|
+
model_path: "Xenova/distilbert-base-uncased-distilled-squad"
|
|
305
305
|
},
|
|
306
306
|
metadata: {}
|
|
307
307
|
},
|
|
@@ -313,8 +313,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
313
313
|
provider: HF_TRANSFORMERS_ONNX,
|
|
314
314
|
provider_config: {
|
|
315
315
|
pipeline: "text-generation",
|
|
316
|
-
|
|
317
|
-
|
|
316
|
+
model_path: "Xenova/gpt2",
|
|
317
|
+
dtype: "q8"
|
|
318
318
|
},
|
|
319
319
|
metadata: {}
|
|
320
320
|
},
|
|
@@ -326,10 +326,10 @@ async function registerHuggingfaceLocalModels() {
|
|
|
326
326
|
provider: HF_TRANSFORMERS_ONNX,
|
|
327
327
|
provider_config: {
|
|
328
328
|
pipeline: "text-generation",
|
|
329
|
-
|
|
329
|
+
model_path: "Xenova/Phi-3-mini-4k-instruct",
|
|
330
330
|
device: "webgpu",
|
|
331
|
-
|
|
332
|
-
|
|
331
|
+
dtype: "q4f16",
|
|
332
|
+
use_external_data_format: true
|
|
333
333
|
},
|
|
334
334
|
metadata: {}
|
|
335
335
|
},
|
|
@@ -341,8 +341,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
341
341
|
provider: HF_TRANSFORMERS_ONNX,
|
|
342
342
|
provider_config: {
|
|
343
343
|
pipeline: "text-generation",
|
|
344
|
-
|
|
345
|
-
|
|
344
|
+
model_path: "Xenova/distilgpt2",
|
|
345
|
+
dtype: "q8"
|
|
346
346
|
},
|
|
347
347
|
metadata: {}
|
|
348
348
|
},
|
|
@@ -354,7 +354,7 @@ async function registerHuggingfaceLocalModels() {
|
|
|
354
354
|
provider: HF_TRANSFORMERS_ONNX,
|
|
355
355
|
provider_config: {
|
|
356
356
|
pipeline: "text2text-generation",
|
|
357
|
-
|
|
357
|
+
model_path: "Xenova/LaMini-Flan-T5-783M"
|
|
358
358
|
},
|
|
359
359
|
metadata: {}
|
|
360
360
|
},
|
|
@@ -366,8 +366,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
366
366
|
provider: HF_TRANSFORMERS_ONNX,
|
|
367
367
|
provider_config: {
|
|
368
368
|
pipeline: "text2text-generation",
|
|
369
|
-
|
|
370
|
-
|
|
369
|
+
model_path: "Xenova/LaMini-Flan-T5-783M",
|
|
370
|
+
dtype: "q8"
|
|
371
371
|
},
|
|
372
372
|
metadata: {}
|
|
373
373
|
},
|
|
@@ -379,8 +379,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
379
379
|
provider: HF_TRANSFORMERS_ONNX,
|
|
380
380
|
provider_config: {
|
|
381
381
|
pipeline: "summarization",
|
|
382
|
-
|
|
383
|
-
|
|
382
|
+
model_path: "Falconsai/text_summarization",
|
|
383
|
+
dtype: "q8"
|
|
384
384
|
},
|
|
385
385
|
metadata: {}
|
|
386
386
|
},
|
|
@@ -392,9 +392,9 @@ async function registerHuggingfaceLocalModels() {
|
|
|
392
392
|
provider: HF_TRANSFORMERS_ONNX,
|
|
393
393
|
provider_config: {
|
|
394
394
|
pipeline: "translation",
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
395
|
+
model_path: "Xenova/nllb-200-distilled-600M",
|
|
396
|
+
language_style: "FLORES-200",
|
|
397
|
+
dtype: "q8"
|
|
398
398
|
},
|
|
399
399
|
metadata: {}
|
|
400
400
|
},
|
|
@@ -406,9 +406,9 @@ async function registerHuggingfaceLocalModels() {
|
|
|
406
406
|
provider: HF_TRANSFORMERS_ONNX,
|
|
407
407
|
provider_config: {
|
|
408
408
|
pipeline: "translation",
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
409
|
+
model_path: "Xenova/m2m100_418M",
|
|
410
|
+
language_style: "ISO-639",
|
|
411
|
+
dtype: "q8"
|
|
412
412
|
},
|
|
413
413
|
metadata: {}
|
|
414
414
|
},
|
|
@@ -420,9 +420,9 @@ async function registerHuggingfaceLocalModels() {
|
|
|
420
420
|
provider: HF_TRANSFORMERS_ONNX,
|
|
421
421
|
provider_config: {
|
|
422
422
|
pipeline: "translation",
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
423
|
+
model_path: "Xenova/m2m100_418M",
|
|
424
|
+
language_style: "ISO-639",
|
|
425
|
+
dtype: "q8"
|
|
426
426
|
},
|
|
427
427
|
metadata: {}
|
|
428
428
|
},
|
|
@@ -434,9 +434,9 @@ async function registerHuggingfaceLocalModels() {
|
|
|
434
434
|
provider: HF_TRANSFORMERS_ONNX,
|
|
435
435
|
provider_config: {
|
|
436
436
|
pipeline: "translation",
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
437
|
+
model_path: "Xenova/mbart-large-50-many-to-many-mmt",
|
|
438
|
+
language_style: "ISO-639_ISO-3166-1-alpha-2",
|
|
439
|
+
dtype: "q8"
|
|
440
440
|
},
|
|
441
441
|
metadata: {}
|
|
442
442
|
},
|
|
@@ -448,8 +448,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
448
448
|
provider: HF_TRANSFORMERS_ONNX,
|
|
449
449
|
provider_config: {
|
|
450
450
|
pipeline: "image-classification",
|
|
451
|
-
|
|
452
|
-
|
|
451
|
+
model_path: "Xenova/vit-base-patch16-224",
|
|
452
|
+
dtype: "q8"
|
|
453
453
|
},
|
|
454
454
|
metadata: {}
|
|
455
455
|
},
|
|
@@ -461,8 +461,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
461
461
|
provider: HF_TRANSFORMERS_ONNX,
|
|
462
462
|
provider_config: {
|
|
463
463
|
pipeline: "zero-shot-image-classification",
|
|
464
|
-
|
|
465
|
-
|
|
464
|
+
model_path: "Xenova/clip-vit-base-patch32",
|
|
465
|
+
dtype: "q8"
|
|
466
466
|
},
|
|
467
467
|
metadata: {}
|
|
468
468
|
},
|
|
@@ -474,8 +474,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
474
474
|
provider: HF_TRANSFORMERS_ONNX,
|
|
475
475
|
provider_config: {
|
|
476
476
|
pipeline: "object-detection",
|
|
477
|
-
|
|
478
|
-
|
|
477
|
+
model_path: "Xenova/detr-resnet-50",
|
|
478
|
+
dtype: "q8"
|
|
479
479
|
},
|
|
480
480
|
metadata: {}
|
|
481
481
|
},
|
|
@@ -487,8 +487,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
487
487
|
provider: HF_TRANSFORMERS_ONNX,
|
|
488
488
|
provider_config: {
|
|
489
489
|
pipeline: "zero-shot-object-detection",
|
|
490
|
-
|
|
491
|
-
|
|
490
|
+
model_path: "Xenova/owlvit-base-patch32",
|
|
491
|
+
dtype: "q8"
|
|
492
492
|
},
|
|
493
493
|
metadata: {}
|
|
494
494
|
},
|
|
@@ -500,8 +500,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
500
500
|
provider: HF_TRANSFORMERS_ONNX,
|
|
501
501
|
provider_config: {
|
|
502
502
|
pipeline: "image-segmentation",
|
|
503
|
-
|
|
504
|
-
|
|
503
|
+
model_path: "Xenova/segformer-b0-finetuned-ade-512-512",
|
|
504
|
+
dtype: "q8"
|
|
505
505
|
},
|
|
506
506
|
metadata: {}
|
|
507
507
|
},
|
|
@@ -513,8 +513,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
513
513
|
provider: HF_TRANSFORMERS_ONNX,
|
|
514
514
|
provider_config: {
|
|
515
515
|
pipeline: "image-to-text",
|
|
516
|
-
|
|
517
|
-
|
|
516
|
+
model_path: "Xenova/vit-gpt2-image-captioning",
|
|
517
|
+
dtype: "q8"
|
|
518
518
|
},
|
|
519
519
|
metadata: {}
|
|
520
520
|
},
|
|
@@ -526,8 +526,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
526
526
|
provider: HF_TRANSFORMERS_ONNX,
|
|
527
527
|
provider_config: {
|
|
528
528
|
pipeline: "background-removal",
|
|
529
|
-
|
|
530
|
-
|
|
529
|
+
model_path: "Xenova/modnet",
|
|
530
|
+
dtype: "q8"
|
|
531
531
|
},
|
|
532
532
|
metadata: {}
|
|
533
533
|
},
|
|
@@ -539,8 +539,8 @@ async function registerHuggingfaceLocalModels() {
|
|
|
539
539
|
provider: HF_TRANSFORMERS_ONNX,
|
|
540
540
|
provider_config: {
|
|
541
541
|
pipeline: "zero-shot-classification",
|
|
542
|
-
|
|
543
|
-
|
|
542
|
+
model_path: "Xenova/mobilebert-uncased-mnli",
|
|
543
|
+
dtype: "q8"
|
|
544
544
|
},
|
|
545
545
|
metadata: {}
|
|
546
546
|
}
|
|
@@ -634,4 +634,4 @@ export {
|
|
|
634
634
|
IDB_TASK_GRAPH_REPOSITORY
|
|
635
635
|
};
|
|
636
636
|
|
|
637
|
-
//# debugId=
|
|
637
|
+
//# debugId=07EEC9D41F5D089564756E2164756E21
|
package/dist/browser.js.map
CHANGED
|
@@ -5,12 +5,12 @@
|
|
|
5
5
|
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { InMemoryTabularRepository } from \"@workglow/storage\";\nimport {\n TaskGraphPrimaryKeyNames,\n TaskGraphSchema,\n TaskGraphTabularRepository,\n} from \"@workglow/task-graph\";\nimport { createServiceToken } from \"@workglow/util\";\n\nexport const MEMORY_TASK_GRAPH_REPOSITORY = createServiceToken<TaskGraphTabularRepository>(\n \"taskgraph.taskGraphRepository.inMemory\"\n);\n\n/**\n * In-memory implementation of a task graph repository.\n * Provides storage and retrieval for task graphs.\n */\nexport class InMemoryTaskGraphRepository extends TaskGraphTabularRepository {\n constructor() {\n super({\n tabularRepository: new InMemoryTabularRepository(TaskGraphSchema, TaskGraphPrimaryKeyNames),\n });\n }\n}\n",
|
|
6
6
|
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { InMemoryTabularRepository } from \"@workglow/storage\";\nimport {\n TaskOutputPrimaryKeyNames,\n TaskOutputSchema,\n TaskOutputTabularRepository,\n} from \"@workglow/task-graph\";\nimport { createServiceToken } from \"@workglow/util\";\n\nexport const MEMORY_TASK_OUTPUT_REPOSITORY = createServiceToken<InMemoryTaskOutputRepository>(\n \"taskgraph.taskOutputRepository.inMemory\"\n);\n\n/**\n * In-memory implementation of a task output repository.\n * Provides storage and retrieval for task outputs.\n */\nexport class InMemoryTaskOutputRepository extends TaskOutputTabularRepository {\n constructor() {\n super({\n tabularRepository: new InMemoryTabularRepository(\n TaskOutputSchema,\n TaskOutputPrimaryKeyNames,\n [\"createdAt\"]\n ),\n });\n }\n}\n",
|
|
7
7
|
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { AiJob, AiJobInput } from \"@workglow/ai\";\nimport { TENSORFLOW_MEDIAPIPE } from \"@workglow/ai-provider\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nexport * from \"./MediaPipeModelSamples\";\nexport * from \"./ONNXModelSamples\";\n\nexport async function register_HFT_InMemoryQueue(): Promise<void> {\n const queueName = \"HF_TRANSFORMERS_ONNX\";\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(queueName);\n await storage.setupDatabase();\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(\n AiJob<AiJobInput<TaskInput>, TaskOutput>,\n {\n storage,\n queueName,\n limiter: new ConcurrencyLimiter(1, 10),\n }\n );\n\n const client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n await server.start();\n}\n\nexport async function register_TFMP_InMemoryQueue(): Promise<void> {\n const queueName = TENSORFLOW_MEDIAPIPE;\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(queueName);\n await storage.setupDatabase();\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(\n AiJob<AiJobInput<TaskInput>, TaskOutput>,\n {\n storage,\n queueName,\n limiter: new ConcurrencyLimiter(1, 10),\n }\n );\n\n const client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n await server.start();\n}\n",
|
|
8
|
-
"import { getGlobalModelRepository } from \"@workglow/ai\";\nimport { TENSORFLOW_MEDIAPIPE, type TFMPModelRecord } from \"@workglow/ai-provider\";\n\nexport async function registerMediaPipeTfJsLocalModels(): Promise<void> {\n const models: TFMPModelRecord[] = [\n // Text Models\n {\n model_id: \"media-pipe:Universal Sentence Encoder\",\n title: \"Universal Sentence Encoder\",\n description: \"Universal Sentence Encoder\",\n tasks: [\"TextEmbeddingTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n
|
|
9
|
-
"import { getGlobalModelRepository } from \"@workglow/ai\";\nimport { HF_TRANSFORMERS_ONNX, HfTransformersOnnxModelRecord } from \"@workglow/ai-provider\";\n\nexport async function registerHuggingfaceLocalModels(): Promise<void> {\n const onnxModels: HfTransformersOnnxModelRecord[] = [\n {\n model_id: \"onnx:Xenova/all-MiniLM-L6-v2:q8\",\n title: \"All MiniLM L6 V2 384D\",\n description: \"Xenova/all-MiniLM-L6-v2\",\n tasks: [\"TextEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"feature-extraction\",\n modelPath: \"Xenova/all-MiniLM-L6-v2\",\n device: \"webgpu\",\n nativeDimensions: 384,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/bge-base-en-v1.5:q8\",\n title: \"BGE Base English V1.5 768D\",\n description: \"Xenova/bge-base-en-v1.5\",\n tasks: [\"TextEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"feature-extraction\",\n modelPath: \"Xenova/bge-base-en-v1.5\",\n device: \"webgpu\",\n nativeDimensions: 768,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/gte-small:q8\",\n title: \"GTE Small 384D\",\n description: \"Xenova/gte-small\",\n tasks: [\"TextEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"feature-extraction\",\n modelPath: \"Xenova/gte-small\",\n device: \"webgpu\",\n nativeDimensions: 384,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:onnx-community/bert_uncased_L-2_H-128_A-2-ONNX:q8\",\n title: \"BERT Uncased 128D\",\n description: \"onnx-community/bert_uncased_L-2_H-128_A-2-ONNX\",\n tasks: [\"TextEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"feature-extraction\",\n modelPath: \"onnx-community/bert_uncased_L-2_H-128_A-2-ONNX\",\n device: \"webgpu\",\n nativeDimensions: 128,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/paraphrase-albert-base-v2:q8\",\n title: \"Paraphrase ALBERT Base V2 768D\",\n description: \"Xenova/paraphrase-albert-base-v2\",\n tasks: [\"TextEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"feature-extraction\",\n modelPath: \"Xenova/paraphrase-albert-base-v2\",\n device: \"webgpu\",\n nativeDimensions: 768,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/distilbert-base-uncased-distilled-squad:q8\",\n title: \"distilbert-base-uncased-distilled-squad\",\n description: \"Xenova/distilbert-base-uncased-distilled-squad quantized to 8bit\",\n tasks: [\"TextQuestionAnsweringTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"question-answering\",\n modelPath: \"Xenova/distilbert-base-uncased-distilled-squad\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/gpt2:q8\",\n title: \"gpt2\",\n description: \"Xenova/gpt2 quantized to 8bit\",\n tasks: [\"TextGenerationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"text-generation\",\n modelPath: \"Xenova/gpt2\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/Phi-3-mini-4k-instruct:q4f16\",\n title: \"Phi-3-mini-4k-instruct:q4f16\",\n description: \"Xenova/Phi-3-mini-4k-instruct quantized to q4f16\",\n tasks: [\"TextGenerationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"text-generation\",\n modelPath: \"Xenova/Phi-3-mini-4k-instruct\",\n device: \"webgpu\",\n dType: \"q4f16\",\n useExternalDataFormat: true,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/distilgpt2:q8\",\n title: \"distilgpt2\",\n description: \"Xenova/distilgpt2 quantized to 8bit\",\n tasks: [\"TextGenerationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"text-generation\",\n modelPath: \"Xenova/distilgpt2\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/LaMini-Flan-T5-783M:q8\",\n title: \"LaMini-Flan-T5-783M\",\n description: \"Xenova/LaMini-Flan-T5-783M quantized to 8bit\",\n tasks: [\"TextGenerationTask\", \"TextRewriterTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"text2text-generation\",\n modelPath: \"Xenova/LaMini-Flan-T5-783M\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/LaMini-Flan-T5-783M:q8\",\n title: \"LaMini-Flan-T5-783M\",\n description: \"Xenova/LaMini-Flan-T5-783M quantized to 8bit\",\n tasks: [\"TextGenerationTask\", \"TextRewriterTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"text2text-generation\",\n modelPath: \"Xenova/LaMini-Flan-T5-783M\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Falconsai/text_summarization:q8\",\n title: \"text_summarization\",\n description: \"Falconsai/text_summarization quantized to 8bit\",\n tasks: [\"TextSummaryTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"summarization\",\n modelPath: \"Falconsai/text_summarization\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/nllb-200-distilled-600M:q8\",\n title: \"nllb-200-distilled-600M\",\n description: \"Xenova/nllb-200-distilled-600M quantized to 8bit\",\n tasks: [\"TextTranslationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"translation\",\n modelPath: \"Xenova/nllb-200-distilled-600M\",\n languageStyle: \"FLORES-200\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/m2m100_418M:q8\",\n title: \"m2m100_418M\",\n description: \"Xenova/m2m100_418M quantized to 8bit\",\n tasks: [\"TextTranslationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"translation\",\n modelPath: \"Xenova/m2m100_418M\",\n languageStyle: \"ISO-639\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/m2m100_418M:q8\",\n title: \"m2m100_418M\",\n description: \"Xenova/m2m100_418M quantized to 8bit\",\n tasks: [\"TextTranslationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"translation\",\n modelPath: \"Xenova/m2m100_418M\",\n languageStyle: \"ISO-639\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/mbart-large-50-many-to-many-mmt:q8\",\n title: \"mbart-large-50-many-to-many-mmt\",\n description: \"Xenova/mbart-large-50-many-to-many-mmt quantized to 8bit\",\n tasks: [\"TextTranslationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"translation\",\n modelPath: \"Xenova/mbart-large-50-many-to-many-mmt\",\n languageStyle: \"ISO-639_ISO-3166-1-alpha-2\",\n dType: \"q8\",\n },\n metadata: {},\n },\n // Vision Models\n {\n model_id: \"onnx:Xenova/vit-base-patch16-224:q8\",\n title: \"ViT Base Patch16 224\",\n description: \"Vision Transformer for image classification\",\n tasks: [\"ImageClassificationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"image-classification\",\n modelPath: \"Xenova/vit-base-patch16-224\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/clip-vit-base-patch32:q8\",\n title: \"CLIP ViT Base Patch32\",\n description: \"CLIP model for zero-shot image classification and embeddings\",\n tasks: [\"ImageClassificationTask\", \"ImageEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"zero-shot-image-classification\",\n modelPath: \"Xenova/clip-vit-base-patch32\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/detr-resnet-50:q8\",\n title: \"DETR ResNet-50\",\n description: \"Object detection model\",\n tasks: [\"ObjectDetectionTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"object-detection\",\n modelPath: \"Xenova/detr-resnet-50\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/owlvit-base-patch32:q8\",\n title: \"OWL-ViT Base Patch32\",\n description: \"Zero-shot object detection model\",\n tasks: [\"ObjectDetectionTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"zero-shot-object-detection\",\n modelPath: \"Xenova/owlvit-base-patch32\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/segformer-b0-finetuned-ade-512-512:q8\",\n title: \"Segformer B0 ADE\",\n description: \"Image segmentation model trained on ADE20K dataset\",\n tasks: [\"ImageSegmentationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"image-segmentation\",\n modelPath: \"Xenova/segformer-b0-finetuned-ade-512-512\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/vit-gpt2-image-captioning:q8\",\n title: \"ViT GPT2 Image Captioning\",\n description: \"Image to text captioning model\",\n tasks: [\"ImageToTextTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"image-to-text\",\n modelPath: \"Xenova/vit-gpt2-image-captioning\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/modnet:q8\",\n title: \"MODNet Background Removal\",\n description: \"Background removal model\",\n tasks: [\"BackgroundRemovalTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"background-removal\",\n modelPath: \"Xenova/modnet\",\n dType: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/mobilebert-uncased-mnli:q8\",\n title: \"MobileBERT MNLI\",\n description: \"Zero-shot text classification model\",\n tasks: [\"TextClassificationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"zero-shot-classification\",\n modelPath: \"Xenova/mobilebert-uncased-mnli\",\n dType: \"q8\",\n },\n metadata: {},\n },\n ];\n\n for (const model of onnxModels) {\n await getGlobalModelRepository().addModel(model);\n }\n}\n",
|
|
8
|
+
"import { getGlobalModelRepository } from \"@workglow/ai\";\nimport { TENSORFLOW_MEDIAPIPE, type TFMPModelRecord } from \"@workglow/ai-provider\";\n\nexport async function registerMediaPipeTfJsLocalModels(): Promise<void> {\n const models: TFMPModelRecord[] = [\n // Text Models\n {\n model_id: \"media-pipe:Universal Sentence Encoder\",\n title: \"Universal Sentence Encoder\",\n description: \"Universal Sentence Encoder\",\n tasks: [\"TextEmbeddingTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"text\",\n pipeline: \"text-embedder\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-tasks/text_embedder/universal_sentence_encoder.tflite\",\n },\n metadata: {},\n },\n {\n model_id: \"media-pipe:BERT Text Classifier\",\n title: \"BERT Text Classifier\",\n description: \"BERT-based text classification model\",\n tasks: [\"TextClassificationTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"text\",\n pipeline: \"text-classifier\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/text_classifier/bert_classifier/float32/1/bert_classifier.tflite\",\n },\n metadata: {},\n },\n {\n model_id: \"media-pipe:Language Detector\",\n title: \"Language Detector\",\n description: \"Language detection model\",\n tasks: [\"TextLanguageDetectionTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"text\",\n pipeline: \"text-language-detector\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/language_detector/language_detector/float32/1/language_detector.tflite\",\n },\n metadata: {},\n },\n // Vision Models\n {\n model_id: \"media-pipe:EfficientNet Lite0 Image Classifier\",\n title: \"EfficientNet Lite0\",\n description: \"Lightweight image classification model\",\n tasks: [\"ImageClassificationTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"vision\",\n pipeline: \"vision-image-classifier\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/image_classifier/efficientnet_lite0/float32/1/efficientnet_lite0.tflite\",\n },\n metadata: {},\n },\n {\n model_id: \"media-pipe:MobileNet V3 Image Embedder\",\n title: \"MobileNet V3 Small\",\n description: \"Lightweight image embedding model\",\n tasks: [\"ImageEmbeddingTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"vision\",\n pipeline: \"vision-image-embedder\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/image_embedder/mobilenet_v3_small/float32/1/mobilenet_v3_small.tflite\",\n },\n metadata: {},\n },\n {\n model_id: \"media-pipe:EfficientDet Lite0 Object Detector\",\n title: \"EfficientDet Lite0\",\n description: \"Lightweight object detection model\",\n tasks: [\"ObjectDetectionTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"vision\",\n pipeline: \"vision-object-detector\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/object_detector/efficientdet_lite0/float32/1/efficientdet_lite0.tflite\",\n },\n metadata: {},\n },\n {\n model_id: \"media-pipe:DeepLab V3 Image Segmenter\",\n title: \"DeepLab V3\",\n description: \"Image segmentation model\",\n tasks: [\"ImageSegmentationTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"vision\",\n pipeline: \"vision-image-segmenter\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/image_segmenter/deeplab_v3/float32/1/deeplab_v3.tflite\",\n },\n metadata: {},\n },\n // Audio Models\n {\n model_id: \"media-pipe:YAMNet Audio Classifier\",\n title: \"YAMNet\",\n description: \"Audio event classification model\",\n tasks: [\"AudioClassificationTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"audio\",\n pipeline: \"audio-classifier\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/audio_classifier/yamnet/float32/1/yamnet.tflite\",\n },\n metadata: {},\n },\n // New Vision Tasks\n {\n model_id: \"media-pipe:Gesture Recognizer\",\n title: \"Gesture Recognizer\",\n description: \"Recognizes hand gestures (thumbs up, victory, etc.)\",\n tasks: [\"GestureRecognizerTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"vision\",\n pipeline: \"vision-gesture-recognizer\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/gesture_recognizer/gesture_recognizer/float16/1/gesture_recognizer.task\",\n },\n metadata: {},\n },\n {\n model_id: \"media-pipe:Hand Landmarker\",\n title: \"Hand Landmarker\",\n description: \"Detects 21 hand landmarks\",\n tasks: [\"HandLandmarkerTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"vision\",\n pipeline: \"vision-hand-landmarker\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/hand_landmarker/hand_landmarker/float16/1/hand_landmarker.task\",\n },\n metadata: {},\n },\n {\n model_id: \"media-pipe:Face Detector\",\n title: \"Face Detector\",\n description: \"Detects faces with bounding boxes and keypoints\",\n tasks: [\"FaceDetectorTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"vision\",\n pipeline: \"vision-face-detector\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/face_detector/blaze_face_short_range/float16/1/blaze_face_short_range.tflite\",\n },\n metadata: {},\n },\n {\n model_id: \"media-pipe:Face Landmarker\",\n title: \"Face Landmarker\",\n description: \"Detects 478 facial landmarks with blendshapes\",\n tasks: [\"FaceLandmarkerTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"vision\",\n pipeline: \"vision-face-landmarker\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/face_landmarker/face_landmarker/float16/1/face_landmarker.task\",\n },\n metadata: {},\n },\n {\n model_id: \"media-pipe:Pose Landmarker\",\n title: \"Pose Landmarker\",\n description: \"Detects 33 body pose landmarks\",\n tasks: [\"PoseLandmarkerTask\"],\n provider: TENSORFLOW_MEDIAPIPE,\n provider_config: {\n task_engine: \"vision\",\n pipeline: \"vision-pose-landmarker\",\n model_path:\n \"https://storage.googleapis.com/mediapipe-models/pose_landmarker/pose_landmarker_lite/float16/1/pose_landmarker_lite.task\",\n },\n metadata: {},\n },\n ];\n\n for (const model of models) {\n await getGlobalModelRepository().addModel(model);\n }\n}\n",
|
|
9
|
+
"import { getGlobalModelRepository } from \"@workglow/ai\";\nimport { HF_TRANSFORMERS_ONNX, HfTransformersOnnxModelRecord } from \"@workglow/ai-provider\";\n\nexport async function registerHuggingfaceLocalModels(): Promise<void> {\n const onnxModels: HfTransformersOnnxModelRecord[] = [\n {\n model_id: \"onnx:Xenova/all-MiniLM-L6-v2:q8\",\n title: \"All MiniLM L6 V2 384D\",\n description: \"Xenova/all-MiniLM-L6-v2\",\n tasks: [\"TextEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"feature-extraction\",\n model_path: \"Xenova/all-MiniLM-L6-v2\",\n device: \"webgpu\",\n native_dimensions: 384,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/bge-base-en-v1.5:q8\",\n title: \"BGE Base English V1.5 768D\",\n description: \"Xenova/bge-base-en-v1.5\",\n tasks: [\"TextEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"feature-extraction\",\n model_path: \"Xenova/bge-base-en-v1.5\",\n device: \"webgpu\",\n native_dimensions: 768,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/gte-small:q8\",\n title: \"GTE Small 384D\",\n description: \"Xenova/gte-small\",\n tasks: [\"TextEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"feature-extraction\",\n model_path: \"Xenova/gte-small\",\n device: \"webgpu\",\n native_dimensions: 384,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:onnx-community/bert_uncased_L-2_H-128_A-2-ONNX:q8\",\n title: \"BERT Uncased 128D\",\n description: \"onnx-community/bert_uncased_L-2_H-128_A-2-ONNX\",\n tasks: [\"TextEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"feature-extraction\",\n model_path: \"onnx-community/bert_uncased_L-2_H-128_A-2-ONNX\",\n device: \"webgpu\",\n native_dimensions: 128,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/paraphrase-albert-base-v2:q8\",\n title: \"Paraphrase ALBERT Base V2 768D\",\n description: \"Xenova/paraphrase-albert-base-v2\",\n tasks: [\"TextEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"feature-extraction\",\n model_path: \"Xenova/paraphrase-albert-base-v2\",\n device: \"webgpu\",\n native_dimensions: 768,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/distilbert-base-uncased-distilled-squad:q8\",\n title: \"distilbert-base-uncased-distilled-squad\",\n description: \"Xenova/distilbert-base-uncased-distilled-squad quantized to 8bit\",\n tasks: [\"TextQuestionAnsweringTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"question-answering\",\n model_path: \"Xenova/distilbert-base-uncased-distilled-squad\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/gpt2:q8\",\n title: \"gpt2\",\n description: \"Xenova/gpt2 quantized to 8bit\",\n tasks: [\"TextGenerationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"text-generation\",\n model_path: \"Xenova/gpt2\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/Phi-3-mini-4k-instruct:q4f16\",\n title: \"Phi-3-mini-4k-instruct:q4f16\",\n description: \"Xenova/Phi-3-mini-4k-instruct quantized to q4f16\",\n tasks: [\"TextGenerationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"text-generation\",\n model_path: \"Xenova/Phi-3-mini-4k-instruct\",\n device: \"webgpu\",\n dtype: \"q4f16\",\n use_external_data_format: true,\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/distilgpt2:q8\",\n title: \"distilgpt2\",\n description: \"Xenova/distilgpt2 quantized to 8bit\",\n tasks: [\"TextGenerationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"text-generation\",\n model_path: \"Xenova/distilgpt2\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/LaMini-Flan-T5-783M:q8\",\n title: \"LaMini-Flan-T5-783M\",\n description: \"Xenova/LaMini-Flan-T5-783M quantized to 8bit\",\n tasks: [\"TextGenerationTask\", \"TextRewriterTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"text2text-generation\",\n model_path: \"Xenova/LaMini-Flan-T5-783M\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/LaMini-Flan-T5-783M:q8\",\n title: \"LaMini-Flan-T5-783M\",\n description: \"Xenova/LaMini-Flan-T5-783M quantized to 8bit\",\n tasks: [\"TextGenerationTask\", \"TextRewriterTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"text2text-generation\",\n model_path: \"Xenova/LaMini-Flan-T5-783M\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Falconsai/text_summarization:q8\",\n title: \"text_summarization\",\n description: \"Falconsai/text_summarization quantized to 8bit\",\n tasks: [\"TextSummaryTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"summarization\",\n model_path: \"Falconsai/text_summarization\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/nllb-200-distilled-600M:q8\",\n title: \"nllb-200-distilled-600M\",\n description: \"Xenova/nllb-200-distilled-600M quantized to 8bit\",\n tasks: [\"TextTranslationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"translation\",\n model_path: \"Xenova/nllb-200-distilled-600M\",\n language_style: \"FLORES-200\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/m2m100_418M:q8\",\n title: \"m2m100_418M\",\n description: \"Xenova/m2m100_418M quantized to 8bit\",\n tasks: [\"TextTranslationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"translation\",\n model_path: \"Xenova/m2m100_418M\",\n language_style: \"ISO-639\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/m2m100_418M:q8\",\n title: \"m2m100_418M\",\n description: \"Xenova/m2m100_418M quantized to 8bit\",\n tasks: [\"TextTranslationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"translation\",\n model_path: \"Xenova/m2m100_418M\",\n language_style: \"ISO-639\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/mbart-large-50-many-to-many-mmt:q8\",\n title: \"mbart-large-50-many-to-many-mmt\",\n description: \"Xenova/mbart-large-50-many-to-many-mmt quantized to 8bit\",\n tasks: [\"TextTranslationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"translation\",\n model_path: \"Xenova/mbart-large-50-many-to-many-mmt\",\n language_style: \"ISO-639_ISO-3166-1-alpha-2\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n // Vision Models\n {\n model_id: \"onnx:Xenova/vit-base-patch16-224:q8\",\n title: \"ViT Base Patch16 224\",\n description: \"Vision Transformer for image classification\",\n tasks: [\"ImageClassificationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"image-classification\",\n model_path: \"Xenova/vit-base-patch16-224\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/clip-vit-base-patch32:q8\",\n title: \"CLIP ViT Base Patch32\",\n description: \"CLIP model for zero-shot image classification and embeddings\",\n tasks: [\"ImageClassificationTask\", \"ImageEmbeddingTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"zero-shot-image-classification\",\n model_path: \"Xenova/clip-vit-base-patch32\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/detr-resnet-50:q8\",\n title: \"DETR ResNet-50\",\n description: \"Object detection model\",\n tasks: [\"ObjectDetectionTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"object-detection\",\n model_path: \"Xenova/detr-resnet-50\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/owlvit-base-patch32:q8\",\n title: \"OWL-ViT Base Patch32\",\n description: \"Zero-shot object detection model\",\n tasks: [\"ObjectDetectionTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"zero-shot-object-detection\",\n model_path: \"Xenova/owlvit-base-patch32\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/segformer-b0-finetuned-ade-512-512:q8\",\n title: \"Segformer B0 ADE\",\n description: \"Image segmentation model trained on ADE20K dataset\",\n tasks: [\"ImageSegmentationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"image-segmentation\",\n model_path: \"Xenova/segformer-b0-finetuned-ade-512-512\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/vit-gpt2-image-captioning:q8\",\n title: \"ViT GPT2 Image Captioning\",\n description: \"Image to text captioning model\",\n tasks: [\"ImageToTextTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"image-to-text\",\n model_path: \"Xenova/vit-gpt2-image-captioning\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/modnet:q8\",\n title: \"MODNet Background Removal\",\n description: \"Background removal model\",\n tasks: [\"BackgroundRemovalTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"background-removal\",\n model_path: \"Xenova/modnet\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n {\n model_id: \"onnx:Xenova/mobilebert-uncased-mnli:q8\",\n title: \"MobileBERT MNLI\",\n description: \"Zero-shot text classification model\",\n tasks: [\"TextClassificationTask\"],\n provider: HF_TRANSFORMERS_ONNX,\n provider_config: {\n pipeline: \"zero-shot-classification\",\n model_path: \"Xenova/mobilebert-uncased-mnli\",\n dtype: \"q8\",\n },\n metadata: {},\n },\n ];\n\n for (const model of onnxModels) {\n await getGlobalModelRepository().addModel(model);\n }\n}\n",
|
|
10
10
|
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { IndexedDbTabularRepository } from \"@workglow/storage\";\nimport {\n TaskGraphPrimaryKeyNames,\n TaskGraphSchema,\n TaskGraphTabularRepository,\n} from \"@workglow/task-graph\";\nimport { createServiceToken } from \"@workglow/util\";\n\nexport const IDB_TASK_GRAPH_REPOSITORY = createServiceToken<TaskGraphTabularRepository>(\n \"taskgraph.taskGraphRepository.indexedDb\"\n);\n\n/**\n * IndexedDB implementation of a task graph repository.\n * Provides storage and retrieval for task graphs using IndexedDB.\n */\nexport class IndexedDbTaskGraphRepository extends TaskGraphTabularRepository {\n constructor(table: string = \"task_graphs\") {\n super({\n tabularRepository: new IndexedDbTabularRepository(\n table,\n TaskGraphSchema,\n TaskGraphPrimaryKeyNames\n ),\n });\n }\n}\n",
|
|
11
11
|
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { IndexedDbTabularRepository } from \"@workglow/storage\";\nimport {\n TaskOutputPrimaryKeyNames,\n TaskOutputSchema,\n TaskOutputTabularRepository,\n} from \"@workglow/task-graph\";\nimport { createServiceToken } from \"@workglow/util\";\n\nexport const IDB_TASK_OUTPUT_REPOSITORY = createServiceToken<IndexedDbTaskOutputRepository>(\n \"taskgraph.taskOutputRepository.indexedDb\"\n);\n\n/**\n * IndexedDB implementation of a task output repository.\n * Provides storage and retrieval for task outputs using IndexedDB.\n */\nexport class IndexedDbTaskOutputRepository extends TaskOutputTabularRepository {\n constructor(table: string = \"task_outputs\") {\n super({\n tabularRepository: new IndexedDbTabularRepository(\n table,\n TaskOutputSchema,\n TaskOutputPrimaryKeyNames,\n [\"createdAt\"]\n ),\n });\n }\n}\n"
|
|
12
12
|
],
|
|
13
|
-
"mappings": ";AAMA;AACA;AAAA;AAAA;AAAA;AAAA;AAKA;AAEO,IAAM,+BAA+B,mBAC1C,wCACF;AAAA;AAMO,MAAM,oCAAoC,2BAA2B;AAAA,EAC1E,WAAW,GAAG;AAAA,IACZ,MAAM;AAAA,MACJ,mBAAmB,IAAI,0BAA0B,iBAAiB,wBAAwB;AAAA,IAC5F,CAAC;AAAA;AAEL;;ACtBA,sCAAS;AACT;AAAA;AAAA;AAAA;AAAA;AAKA,+BAAS;AAEF,IAAM,gCAAgC,oBAC3C,yCACF;AAAA;AAMO,MAAM,qCAAqC,4BAA4B;AAAA,EAC5E,WAAW,GAAG;AAAA,IACZ,MAAM;AAAA,MACJ,mBAAmB,IAAI,2BACrB,kBACA,2BACA,CAAC,WAAW,CACd;AAAA,IACF,CAAC;AAAA;AAEL;;AC1BA;AACA,iCAAS;AACT;AACA;AACA;;;ACVA;AACA;AAEA,eAAsB,gCAAgC,GAAkB;AAAA,EACtE,MAAM,SAA4B;AAAA,IAEhC;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,mBAAmB;AAAA,MAC3B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,
|
|
14
|
-
"debugId": "
|
|
13
|
+
"mappings": ";AAMA;AACA;AAAA;AAAA;AAAA;AAAA;AAKA;AAEO,IAAM,+BAA+B,mBAC1C,wCACF;AAAA;AAMO,MAAM,oCAAoC,2BAA2B;AAAA,EAC1E,WAAW,GAAG;AAAA,IACZ,MAAM;AAAA,MACJ,mBAAmB,IAAI,0BAA0B,iBAAiB,wBAAwB;AAAA,IAC5F,CAAC;AAAA;AAEL;;ACtBA,sCAAS;AACT;AAAA;AAAA;AAAA;AAAA;AAKA,+BAAS;AAEF,IAAM,gCAAgC,oBAC3C,yCACF;AAAA;AAMO,MAAM,qCAAqC,4BAA4B;AAAA,EAC5E,WAAW,GAAG;AAAA,IACZ,MAAM;AAAA,MACJ,mBAAmB,IAAI,2BACrB,kBACA,2BACA,CAAC,WAAW,CACd;AAAA,IACF,CAAC;AAAA;AAEL;;AC1BA;AACA,iCAAS;AACT;AACA;AACA;;;ACVA;AACA;AAEA,eAAsB,gCAAgC,GAAkB;AAAA,EACtE,MAAM,SAA4B;AAAA,IAEhC;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,mBAAmB;AAAA,MAC3B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,wBAAwB;AAAA,MAChC,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,2BAA2B;AAAA,MACnC,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IAEA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,yBAAyB;AAAA,MACjC,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,oBAAoB;AAAA,MAC5B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,qBAAqB;AAAA,MAC7B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,uBAAuB;AAAA,MAC/B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IAEA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,yBAAyB;AAAA,MACjC,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IAEA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,uBAAuB;AAAA,MAC/B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,oBAAoB;AAAA,MAC5B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,kBAAkB;AAAA,MAC1B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,oBAAoB;AAAA,MAC5B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,oBAAoB;AAAA,MAC5B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,aAAa;AAAA,QACb,UAAU;AAAA,QACV,YACE;AAAA,MACJ;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AAAA,EAEA,WAAW,SAAS,QAAQ;AAAA,IAC1B,MAAM,yBAAyB,EAAE,SAAS,KAAK;AAAA,EACjD;AAAA;;ACnMF,qCAAS;AACT;AAEA,eAAsB,8BAA8B,GAAkB;AAAA,EACpE,MAAM,aAA8C;AAAA,IAClD;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,mBAAmB;AAAA,MAC3B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,QAAQ;AAAA,QACR,mBAAmB;AAAA,MACrB;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,mBAAmB;AAAA,MAC3B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,QAAQ;AAAA,QACR,mBAAmB;AAAA,MACrB;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,mBAAmB;AAAA,MAC3B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,QAAQ;AAAA,QACR,mBAAmB;AAAA,MACrB;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,mBAAmB;AAAA,MAC3B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,QAAQ;AAAA,QACR,mBAAmB;AAAA,MACrB;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,mBAAmB;AAAA,MAC3B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,QAAQ;AAAA,QACR,mBAAmB;AAAA,MACrB;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,2BAA2B;AAAA,MACnC,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,MACd;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,oBAAoB;AAAA,MAC5B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,oBAAoB;AAAA,MAC5B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,0BAA0B;AAAA,MAC5B;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,oBAAoB;AAAA,MAC5B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,sBAAsB,kBAAkB;AAAA,MAChD,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,MACd;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,sBAAsB,kBAAkB;AAAA,MAChD,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,iBAAiB;AAAA,MACzB,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,qBAAqB;AAAA,MAC7B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,gBAAgB;AAAA,QAChB,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,qBAAqB;AAAA,MAC7B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,gBAAgB;AAAA,QAChB,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,qBAAqB;AAAA,MAC7B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,gBAAgB;AAAA,QAChB,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,qBAAqB;AAAA,MAC7B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,gBAAgB;AAAA,QAChB,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IAEA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,yBAAyB;AAAA,MACjC,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,2BAA2B,oBAAoB;AAAA,MACvD,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,qBAAqB;AAAA,MAC7B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,qBAAqB;AAAA,MAC7B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,uBAAuB;AAAA,MAC/B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,iBAAiB;AAAA,MACzB,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,uBAAuB;AAAA,MAC/B,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,IACA;AAAA,MACE,UAAU;AAAA,MACV,OAAO;AAAA,MACP,aAAa;AAAA,MACb,OAAO,CAAC,wBAAwB;AAAA,MAChC,UAAU;AAAA,MACV,iBAAiB;AAAA,QACf,UAAU;AAAA,QACV,YAAY;AAAA,QACZ,OAAO;AAAA,MACT;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAAA,EACF;AAAA,EAEA,WAAW,SAAS,YAAY;AAAA,IAC9B,MAAM,0BAAyB,EAAE,SAAS,KAAK;AAAA,EACjD;AAAA;;;AF7TF,eAAsB,0BAA0B,GAAkB;AAAA,EAChE,MAAM,YAAY;AAAA,EAClB,MAAM,UAAU,IAAI,qBAAwD,SAAS;AAAA,EACrF,MAAM,QAAQ,cAAc;AAAA,EAE5B,MAAM,SAAS,IAAI,eACjB,OACA;AAAA,IACE;AAAA,IACA;AAAA,IACA,SAAS,IAAI,mBAAmB,GAAG,EAAE;AAAA,EACvC,CACF;AAAA,EAEA,MAAM,SAAS,IAAI,eAAkD;AAAA,IACnE;AAAA,IACA;AAAA,EACF,CAAC;AAAA,EAED,OAAO,OAAO,MAAM;AAAA,EAEpB,qBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,EAChE,MAAM,OAAO,MAAM;AAAA;AAGrB,eAAsB,2BAA2B,GAAkB;AAAA,EACjE,MAAM,YAAY;AAAA,EAClB,MAAM,UAAU,IAAI,qBAAwD,SAAS;AAAA,EACrF,MAAM,QAAQ,cAAc;AAAA,EAE5B,MAAM,SAAS,IAAI,eACjB,OACA;AAAA,IACE;AAAA,IACA;AAAA,IACA,SAAS,IAAI,mBAAmB,GAAG,EAAE;AAAA,EACvC,CACF;AAAA,EAEA,MAAM,SAAS,IAAI,eAAkD;AAAA,IACnE;AAAA,IACA;AAAA,EACF,CAAC;AAAA,EAED,OAAO,OAAO,MAAM;AAAA,EAEpB,qBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,EAChE,MAAM,OAAO,MAAM;AAAA;;AGvDrB;AACA;AAAA,8BACE;AAAA,qBACA;AAAA,gCACA;AAAA;AAEF,+BAAS;AAEF,IAAM,4BAA4B,oBACvC,yCACF;AAAA;AAMO,MAAM,qCAAqC,4BAA2B;AAAA,EAC3E,WAAW,CAAC,QAAgB,eAAe;AAAA,IACzC,MAAM;AAAA,MACJ,mBAAmB,IAAI,2BACrB,OACA,kBACA,yBACF;AAAA,IACF,CAAC;AAAA;AAEL;;AC1BA,uCAAS;AACT;AAAA,+BACE;AAAA,sBACA;AAAA,iCACA;AAAA;AAEF,+BAAS;AAEF,IAAM,6BAA6B,oBACxC,0CACF;AAAA;AAMO,MAAM,sCAAsC,6BAA4B;AAAA,EAC7E,WAAW,CAAC,QAAgB,gBAAgB;AAAA,IAC1C,MAAM;AAAA,MACJ,mBAAmB,IAAI,4BACrB,OACA,mBACA,4BACA,CAAC,WAAW,CACd;AAAA,IACF,CAAC;AAAA;AAEL;",
|
|
14
|
+
"debugId": "07EEC9D41F5D089564756E2164756E21",
|
|
15
15
|
"names": []
|
|
16
16
|
}
|