@workglow/ai-provider 0.0.57 → 0.0.59

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. package/README.md +1 -1
  2. package/dist/hf-transformers/common/HFT_Constants.d.ts +77 -11
  3. package/dist/hf-transformers/common/HFT_Constants.d.ts.map +1 -1
  4. package/dist/hf-transformers/common/HFT_JobRunFns.d.ts +8 -10
  5. package/dist/hf-transformers/common/HFT_JobRunFns.d.ts.map +1 -1
  6. package/dist/hf-transformers/common/HFT_ModelSchema.d.ts +172 -0
  7. package/dist/hf-transformers/common/HFT_ModelSchema.d.ts.map +1 -0
  8. package/dist/hf-transformers/index.d.ts +1 -0
  9. package/dist/hf-transformers/index.d.ts.map +1 -1
  10. package/dist/hf-transformers/registry/HFT_Client_RegisterJobFns.d.ts +11 -1
  11. package/dist/hf-transformers/registry/HFT_Client_RegisterJobFns.d.ts.map +1 -1
  12. package/dist/hf-transformers/registry/HFT_Inline_RegisterJobFns.d.ts +10 -1
  13. package/dist/hf-transformers/registry/HFT_Inline_RegisterJobFns.d.ts.map +1 -1
  14. package/dist/index.js +273 -39
  15. package/dist/index.js.map +12 -10
  16. package/dist/tf-mediapipe/common/TFMP_JobRunFns.d.ts +3 -2
  17. package/dist/tf-mediapipe/common/TFMP_JobRunFns.d.ts.map +1 -1
  18. package/dist/tf-mediapipe/common/TFMP_ModelSchema.d.ts +74 -0
  19. package/dist/tf-mediapipe/common/TFMP_ModelSchema.d.ts.map +1 -0
  20. package/dist/tf-mediapipe/index.d.ts +1 -0
  21. package/dist/tf-mediapipe/index.d.ts.map +1 -1
  22. package/dist/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.d.ts +11 -1
  23. package/dist/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.d.ts.map +1 -1
  24. package/dist/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.d.ts +10 -1
  25. package/dist/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.d.ts.map +1 -1
  26. package/package.json +11 -11
package/dist/index.js CHANGED
@@ -1,17 +1,55 @@
1
1
  // src/hf-transformers/common/HFT_Constants.ts
2
2
  var HF_TRANSFORMERS_ONNX = "HF_TRANSFORMERS_ONNX";
3
- var QUANTIZATION_DATA_TYPES;
4
- ((QUANTIZATION_DATA_TYPES2) => {
5
- QUANTIZATION_DATA_TYPES2["auto"] = "auto";
6
- QUANTIZATION_DATA_TYPES2["fp32"] = "fp32";
7
- QUANTIZATION_DATA_TYPES2["fp16"] = "fp16";
8
- QUANTIZATION_DATA_TYPES2["q8"] = "q8";
9
- QUANTIZATION_DATA_TYPES2["int8"] = "int8";
10
- QUANTIZATION_DATA_TYPES2["uint8"] = "uint8";
11
- QUANTIZATION_DATA_TYPES2["q4"] = "q4";
12
- QUANTIZATION_DATA_TYPES2["bnb4"] = "bnb4";
13
- QUANTIZATION_DATA_TYPES2["q4f16"] = "q4f16";
14
- })(QUANTIZATION_DATA_TYPES ||= {});
3
+ var QuantizationDataType = {
4
+ auto: "auto",
5
+ fp32: "fp32",
6
+ fp16: "fp16",
7
+ q8: "q8",
8
+ int8: "int8",
9
+ uint8: "uint8",
10
+ q4: "q4",
11
+ bnb4: "bnb4",
12
+ q4f16: "q4f16"
13
+ };
14
+ var TextPipelineUseCase = {
15
+ "fill-mask": "fill-mask",
16
+ "token-classification": "token-classification",
17
+ "text-generation": "text-generation",
18
+ "text2text-generation": "text2text-generation",
19
+ "text-classification": "text-classification",
20
+ summarization: "summarization",
21
+ translation: "translation",
22
+ "feature-extraction": "feature-extraction",
23
+ "zero-shot-classification": "zero-shot-classification",
24
+ "question-answering": "question-answering"
25
+ };
26
+ var VisionPipelineUseCase = {
27
+ "background-removal": "background-removal",
28
+ "image-segmentation": "image-segmentation",
29
+ "depth-estimation": "depth-estimation",
30
+ "image-classification": "image-classification",
31
+ "image-to-image": "image-to-image",
32
+ "object-detection": "object-detection",
33
+ "image-feature-extraction": "image-feature-extraction"
34
+ };
35
+ var AudioPipelineUseCase = {
36
+ "audio-classification": "audio-classification",
37
+ "automatic-speech-recognition": "automatic-speech-recognition",
38
+ "text-to-speech": "text-to-speech"
39
+ };
40
+ var MultimodalPipelineUseCase = {
41
+ "document-question-answering": "document-question-answering",
42
+ "image-to-text": "image-to-text",
43
+ "zero-shot-audio-classification": "zero-shot-audio-classification",
44
+ "zero-shot-image-classification": "zero-shot-image-classification",
45
+ "zero-shot-object-detection": "zero-shot-object-detection"
46
+ };
47
+ var PipelineUseCase = {
48
+ ...TextPipelineUseCase,
49
+ ...VisionPipelineUseCase,
50
+ ...AudioPipelineUseCase,
51
+ ...MultimodalPipelineUseCase
52
+ };
15
53
  // src/hf-transformers/common/HFT_JobRunFns.ts
16
54
  import {
17
55
  pipeline,
@@ -20,8 +58,8 @@ import {
20
58
  import { PermanentJobError } from "@workglow/job-queue";
21
59
  var pipelines = new Map;
22
60
  var getPipeline = async (model, onProgress, options = {}) => {
23
- if (pipelines.has(model.name)) {
24
- return pipelines.get(model.name);
61
+ if (pipelines.has(model.model_id)) {
62
+ return pipelines.get(model.model_id);
25
63
  }
26
64
  const progressCallback = (status) => {
27
65
  const progress = status.status === "progress" ? Math.round(status.progress) : 0;
@@ -33,15 +71,15 @@ var getPipeline = async (model, onProgress, options = {}) => {
33
71
  }
34
72
  };
35
73
  const pipelineOptions = {
36
- dtype: model.quantization || "q8",
37
- ...model.use_external_data_format ? { use_external_data_format: model.use_external_data_format } : {},
38
- ...model.device ? { device: model.device } : {},
74
+ dtype: model.providerConfig.dType || "q8",
75
+ ...model.providerConfig.useExternalDataFormat ? { use_external_data_format: model.providerConfig.useExternalDataFormat } : {},
76
+ ...model.providerConfig.device ? { device: model.providerConfig.device } : {},
39
77
  ...options,
40
78
  progress_callback: progressCallback
41
79
  };
42
- const pipelineType = model.pipeline;
43
- const result = await pipeline(pipelineType, model.url, pipelineOptions);
44
- pipelines.set(model.name, result);
80
+ const pipelineType = model.providerConfig.pipeline;
81
+ const result = await pipeline(pipelineType, model.providerConfig.modelPath, pipelineOptions);
82
+ pipelines.set(model.model_id, result);
45
83
  return result;
46
84
  };
47
85
  var HFT_Download = async (input, model, onProgress, signal) => {
@@ -56,12 +94,12 @@ var HFT_TextEmbedding = async (input, model, onProgress, signal) => {
56
94
  });
57
95
  const hfVector = await generateEmbedding(input.text, {
58
96
  pooling: "mean",
59
- normalize: model.normalize,
97
+ normalize: model?.providerConfig.normalize,
60
98
  ...signal ? { abort_signal: signal } : {}
61
99
  });
62
- if (hfVector.size !== model.nativeDimensions) {
63
- console.warn(`HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model.nativeDimensions}`, input, hfVector);
64
- throw new PermanentJobError(`HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model.nativeDimensions}`);
100
+ if (hfVector.size !== model?.providerConfig.nativeDimensions) {
101
+ console.warn(`HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model?.providerConfig.nativeDimensions}`, input, hfVector);
102
+ throw new PermanentJobError(`HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model?.providerConfig.nativeDimensions}`);
65
103
  }
66
104
  return { vector: hfVector.data };
67
105
  };
@@ -184,10 +222,94 @@ function createTextStreamer(tokenizer, updateProgress, signal) {
184
222
  ...signal ? { abort_signal: signal } : {}
185
223
  });
186
224
  }
225
+ // src/hf-transformers/common/HFT_ModelSchema.ts
226
+ import { ModelSchema } from "@workglow/ai";
227
+ var HfTransformersOnnxModelSchema = {
228
+ type: "object",
229
+ properties: {
230
+ provider: {
231
+ const: HF_TRANSFORMERS_ONNX,
232
+ description: "Discriminator: ONNX runtime backend."
233
+ },
234
+ providerConfig: {
235
+ type: "object",
236
+ description: "ONNX runtime-specific options.",
237
+ properties: {
238
+ pipeline: {
239
+ type: "string",
240
+ enum: Object.values(PipelineUseCase),
241
+ description: "Pipeline type for the ONNX model.",
242
+ default: "text-generation"
243
+ },
244
+ modelPath: {
245
+ type: "string",
246
+ description: "Filesystem path or URI for the ONNX model."
247
+ },
248
+ dType: {
249
+ type: "string",
250
+ enum: Object.values(QuantizationDataType),
251
+ description: "Data type for the ONNX model.",
252
+ default: "float32"
253
+ },
254
+ device: {
255
+ type: "string",
256
+ enum: ["cpu", "gpu", "webgpu", "wasm", "metal"],
257
+ description: "High-level device selection.",
258
+ default: "webgpu"
259
+ },
260
+ executionProviders: {
261
+ type: "array",
262
+ items: { type: "string" },
263
+ description: "Raw ONNX Runtime execution provider identifiers."
264
+ },
265
+ intraOpNumThreads: {
266
+ type: "integer",
267
+ minimum: 1
268
+ },
269
+ interOpNumThreads: {
270
+ type: "integer",
271
+ minimum: 1
272
+ },
273
+ useExternalDataFormat: {
274
+ type: "boolean",
275
+ description: "Whether the model uses external data format."
276
+ },
277
+ nativeDimensions: {
278
+ type: "integer",
279
+ description: "The native dimensions of the model."
280
+ },
281
+ normalize: {
282
+ type: "boolean",
283
+ description: "Whether the model uses normalization."
284
+ },
285
+ languageStyle: {
286
+ type: "string",
287
+ description: "The language style of the model."
288
+ }
289
+ },
290
+ required: ["modelPath", "pipeline"],
291
+ additionalProperties: false
292
+ }
293
+ },
294
+ required: ["provider", "providerConfig"],
295
+ additionalProperties: true
296
+ };
297
+ var ExtendedModelSchema = {
298
+ type: "object",
299
+ properties: {
300
+ ...ModelSchema.properties,
301
+ ...HfTransformersOnnxModelSchema.properties
302
+ },
303
+ required: [...ModelSchema.required, ...HfTransformersOnnxModelSchema.required],
304
+ additionalProperties: false
305
+ };
187
306
  // src/hf-transformers/registry/HFT_Client_RegisterJobFns.ts
188
- import { getAiProviderRegistry } from "@workglow/ai";
307
+ import { AiJob, getAiProviderRegistry } from "@workglow/ai";
308
+ import { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from "@workglow/job-queue";
309
+ import { InMemoryQueueStorage } from "@workglow/storage";
310
+ import { getTaskQueueRegistry } from "@workglow/task-graph";
189
311
  import { globalServiceRegistry, WORKER_MANAGER } from "@workglow/util";
190
- async function register_HFT_ClientJobFns(worker) {
312
+ async function register_HFT_ClientJobFns(worker, client) {
191
313
  const workerManager = globalServiceRegistry.get(WORKER_MANAGER);
192
314
  workerManager.registerWorker(HF_TRANSFORMERS_ONNX, worker);
193
315
  const ProviderRegistry = getAiProviderRegistry();
@@ -203,11 +325,28 @@ async function register_HFT_ClientJobFns(worker) {
203
325
  for (const name of names) {
204
326
  ProviderRegistry.registerAsWorkerRunFn(HF_TRANSFORMERS_ONNX, name);
205
327
  }
328
+ if (!client) {
329
+ const storage = new InMemoryQueueStorage(HF_TRANSFORMERS_ONNX);
330
+ const server = new JobQueueServer(AiJob, {
331
+ storage,
332
+ queueName: HF_TRANSFORMERS_ONNX,
333
+ limiter: new ConcurrencyLimiter(1, 100)
334
+ });
335
+ client = new JobQueueClient({
336
+ storage,
337
+ queueName: HF_TRANSFORMERS_ONNX
338
+ });
339
+ client.attach(server);
340
+ getTaskQueueRegistry().registerQueue({ server, client, storage });
341
+ }
206
342
  }
207
343
  // src/hf-transformers/registry/HFT_Inline_RegisterJobFns.ts
208
344
  import { env } from "@sroussey/transformers";
209
- import { getAiProviderRegistry as getAiProviderRegistry2 } from "@workglow/ai";
210
- async function register_HFT_InlineJobFns() {
345
+ import { AiJob as AiJob2, getAiProviderRegistry as getAiProviderRegistry2 } from "@workglow/ai";
346
+ import { ConcurrencyLimiter as ConcurrencyLimiter2, JobQueueClient as JobQueueClient2, JobQueueServer as JobQueueServer2 } from "@workglow/job-queue";
347
+ import { InMemoryQueueStorage as InMemoryQueueStorage2 } from "@workglow/storage";
348
+ import { getTaskQueueRegistry as getTaskQueueRegistry2 } from "@workglow/task-graph";
349
+ async function register_HFT_InlineJobFns(client) {
211
350
  env.backends.onnx.wasm.proxy = true;
212
351
  const ProviderRegistry = getAiProviderRegistry2();
213
352
  const fns = {
@@ -222,6 +361,22 @@ async function register_HFT_InlineJobFns() {
222
361
  for (const [jobName, fn] of Object.entries(fns)) {
223
362
  ProviderRegistry.registerRunFn(HF_TRANSFORMERS_ONNX, jobName, fn);
224
363
  }
364
+ if (!client) {
365
+ const storage = new InMemoryQueueStorage2(HF_TRANSFORMERS_ONNX);
366
+ await storage.setupDatabase();
367
+ const server = new JobQueueServer2(AiJob2, {
368
+ storage,
369
+ queueName: HF_TRANSFORMERS_ONNX,
370
+ limiter: new ConcurrencyLimiter2(1, 100)
371
+ });
372
+ client = new JobQueueClient2({
373
+ storage,
374
+ queueName: HF_TRANSFORMERS_ONNX
375
+ });
376
+ client.attach(server);
377
+ getTaskQueueRegistry2().registerQueue({ server, client, storage });
378
+ await server.start();
379
+ }
225
380
  }
226
381
  // src/hf-transformers/registry/HFT_Worker_RegisterJobFns.ts
227
382
  import {
@@ -253,7 +408,7 @@ var TFMP_Download = async (input, model, onProgress, signal) => {
253
408
  const textFiles = await FilesetResolver.forTextTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm");
254
409
  const embedder = await TextEmbedder.createFromOptions(textFiles, {
255
410
  baseOptions: {
256
- modelAssetPath: model.url
411
+ modelAssetPath: model.providerConfig.modelPath
257
412
  }
258
413
  });
259
414
  return {
@@ -265,7 +420,7 @@ var TFMP_TextEmbedding = async (input, model, onProgress, signal) => {
265
420
  onProgress(0.1, "Model loaded");
266
421
  const embedder = await TextEmbedder.createFromOptions(textFiles, {
267
422
  baseOptions: {
268
- modelAssetPath: model.url
423
+ modelAssetPath: model.providerConfig.modelPath
269
424
  }
270
425
  });
271
426
  if (signal.aborted) {
@@ -281,10 +436,47 @@ var TFMP_TextEmbedding = async (input, model, onProgress, signal) => {
281
436
  vector: embedding
282
437
  };
283
438
  };
439
+ // src/tf-mediapipe/common/TFMP_ModelSchema.ts
440
+ import { ModelSchema as ModelSchema2 } from "@workglow/ai";
441
+ var TFMPModelSchema = {
442
+ type: "object",
443
+ properties: {
444
+ provider: {
445
+ const: TENSORFLOW_MEDIAPIPE,
446
+ description: "Discriminator: TensorFlow MediaPipe backend."
447
+ },
448
+ providerConfig: {
449
+ type: "object",
450
+ description: "TensorFlow MediaPipe-specific options.",
451
+ properties: {
452
+ modelPath: {
453
+ type: "string",
454
+ description: "Filesystem path or URI for the ONNX model."
455
+ }
456
+ },
457
+ required: ["modelPath"],
458
+ additionalProperties: false
459
+ }
460
+ },
461
+ required: ["provider", "providerConfig"],
462
+ additionalProperties: true
463
+ };
464
+ var ExtendedModelSchema2 = {
465
+ type: "object",
466
+ properties: {
467
+ ...ModelSchema2.properties,
468
+ ...TFMPModelSchema.properties
469
+ },
470
+ required: [...ModelSchema2.required, ...TFMPModelSchema.required],
471
+ additionalProperties: false
472
+ };
284
473
  // src/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.ts
285
- import { getAiProviderRegistry as getAiProviderRegistry3 } from "@workglow/ai";
474
+ import { AiJob as AiJob3, getAiProviderRegistry as getAiProviderRegistry3 } from "@workglow/ai";
475
+ import { ConcurrencyLimiter as ConcurrencyLimiter3, JobQueueClient as JobQueueClient3, JobQueueServer as JobQueueServer3 } from "@workglow/job-queue";
476
+ import { InMemoryQueueStorage as InMemoryQueueStorage3 } from "@workglow/storage";
477
+ import { getTaskQueueRegistry as getTaskQueueRegistry3 } from "@workglow/task-graph";
286
478
  import { globalServiceRegistry as globalServiceRegistry3, WORKER_MANAGER as WORKER_MANAGER2 } from "@workglow/util";
287
- var register_TFMP_ClientJobFns = (worker) => {
479
+ async function register_TFMP_ClientJobFns(worker, client) {
288
480
  const workerManager = globalServiceRegistry3.get(WORKER_MANAGER2);
289
481
  workerManager.registerWorker(TENSORFLOW_MEDIAPIPE, worker);
290
482
  const aiProviderRegistry = getAiProviderRegistry3();
@@ -292,14 +484,49 @@ var register_TFMP_ClientJobFns = (worker) => {
292
484
  for (const name of names) {
293
485
  aiProviderRegistry.registerAsWorkerRunFn(TENSORFLOW_MEDIAPIPE, name);
294
486
  }
295
- };
487
+ if (!client) {
488
+ const storage = new InMemoryQueueStorage3(TENSORFLOW_MEDIAPIPE);
489
+ await storage.setupDatabase();
490
+ const server = new JobQueueServer3(AiJob3, {
491
+ storage,
492
+ queueName: TENSORFLOW_MEDIAPIPE,
493
+ limiter: new ConcurrencyLimiter3(1, 100)
494
+ });
495
+ client = new JobQueueClient3({
496
+ storage,
497
+ queueName: TENSORFLOW_MEDIAPIPE
498
+ });
499
+ client.attach(server);
500
+ getTaskQueueRegistry3().registerQueue({ server, client, storage });
501
+ await server.start();
502
+ }
503
+ }
296
504
  // src/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.ts
297
- import { getAiProviderRegistry as getAiProviderRegistry4 } from "@workglow/ai";
298
- var register_TFMP_InlineJobFns = () => {
505
+ import { AiJob as AiJob4, getAiProviderRegistry as getAiProviderRegistry4 } from "@workglow/ai";
506
+ import { ConcurrencyLimiter as ConcurrencyLimiter4, JobQueueClient as JobQueueClient4, JobQueueServer as JobQueueServer4 } from "@workglow/job-queue";
507
+ import { InMemoryQueueStorage as InMemoryQueueStorage4 } from "@workglow/storage";
508
+ import { getTaskQueueRegistry as getTaskQueueRegistry4 } from "@workglow/task-graph";
509
+ async function register_TFMP_InlineJobFns(client) {
299
510
  const aiProviderRegistry = getAiProviderRegistry4();
300
511
  aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, "DownloadModelTask", TFMP_Download);
301
512
  aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, "TextEmbeddingTask", TFMP_TextEmbedding);
302
- };
513
+ if (!client) {
514
+ const storage = new InMemoryQueueStorage4(TENSORFLOW_MEDIAPIPE);
515
+ await storage.setupDatabase();
516
+ const server = new JobQueueServer4(AiJob4, {
517
+ storage,
518
+ queueName: TENSORFLOW_MEDIAPIPE,
519
+ limiter: new ConcurrencyLimiter4(1, 100)
520
+ });
521
+ client = new JobQueueClient4({
522
+ storage,
523
+ queueName: TENSORFLOW_MEDIAPIPE
524
+ });
525
+ client.attach(server);
526
+ getTaskQueueRegistry4().registerQueue({ server, client, storage });
527
+ await server.start();
528
+ }
529
+ }
303
530
  // src/tf-mediapipe/registry/TFMP_Worker_RegisterJobFns.ts
304
531
  import {
305
532
  createServiceToken as createServiceToken2,
@@ -321,12 +548,18 @@ export {
321
548
  register_TFMP_ClientJobFns,
322
549
  register_HFT_InlineJobFns,
323
550
  register_HFT_ClientJobFns,
551
+ VisionPipelineUseCase,
552
+ TextPipelineUseCase,
324
553
  TFMP_WORKER_JOBRUN_REGISTER,
325
554
  TFMP_WORKER_JOBRUN,
326
555
  TFMP_TextEmbedding,
327
556
  TFMP_Download,
557
+ TFMPModelSchema,
328
558
  TENSORFLOW_MEDIAPIPE,
329
- QUANTIZATION_DATA_TYPES,
559
+ QuantizationDataType,
560
+ PipelineUseCase,
561
+ MultimodalPipelineUseCase,
562
+ HfTransformersOnnxModelSchema,
330
563
  HF_TRANSFORMERS_ONNX,
331
564
  HFT_WORKER_JOBRUN_REGISTER,
332
565
  HFT_WORKER_JOBRUN,
@@ -336,7 +569,8 @@ export {
336
569
  HFT_TextQuestionAnswer,
337
570
  HFT_TextGeneration,
338
571
  HFT_TextEmbedding,
339
- HFT_Download
572
+ HFT_Download,
573
+ AudioPipelineUseCase
340
574
  };
341
575
 
342
- //# debugId=8CC1F78E1614B52764756E2164756E21
576
+ //# debugId=DB92C181C46D973A64756E2164756E21
package/dist/index.js.map CHANGED
@@ -1,19 +1,21 @@
1
1
  {
2
2
  "version": 3,
3
- "sources": ["../src/hf-transformers/common/HFT_Constants.ts", "../src/hf-transformers/common/HFT_JobRunFns.ts", "../src/hf-transformers/registry/HFT_Client_RegisterJobFns.ts", "../src/hf-transformers/registry/HFT_Inline_RegisterJobFns.ts", "../src/hf-transformers/registry/HFT_Worker_RegisterJobFns.ts", "../src/tf-mediapipe/common/TFMP_Constants.ts", "../src/tf-mediapipe/common/TFMP_JobRunFns.ts", "../src/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.ts", "../src/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.ts", "../src/tf-mediapipe/registry/TFMP_Worker_RegisterJobFns.ts"],
3
+ "sources": ["../src/hf-transformers/common/HFT_Constants.ts", "../src/hf-transformers/common/HFT_JobRunFns.ts", "../src/hf-transformers/common/HFT_ModelSchema.ts", "../src/hf-transformers/registry/HFT_Client_RegisterJobFns.ts", "../src/hf-transformers/registry/HFT_Inline_RegisterJobFns.ts", "../src/hf-transformers/registry/HFT_Worker_RegisterJobFns.ts", "../src/tf-mediapipe/common/TFMP_Constants.ts", "../src/tf-mediapipe/common/TFMP_JobRunFns.ts", "../src/tf-mediapipe/common/TFMP_ModelSchema.ts", "../src/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.ts", "../src/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.ts", "../src/tf-mediapipe/registry/TFMP_Worker_RegisterJobFns.ts"],
4
4
  "sourcesContent": [
5
- "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const HF_TRANSFORMERS_ONNX = \"HF_TRANSFORMERS_ONNX\";\n\nexport enum QUANTIZATION_DATA_TYPES {\n auto = \"auto\", // Auto-detect based on environment\n fp32 = \"fp32\",\n fp16 = \"fp16\",\n q8 = \"q8\",\n int8 = \"int8\",\n uint8 = \"uint8\",\n q4 = \"q4\",\n bnb4 = \"bnb4\",\n q4f16 = \"q4f16\", // fp16 model with int4 block weight quantization\n}\n",
6
- "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n DocumentQuestionAnsweringSingle,\n type FeatureExtractionPipeline,\n pipeline,\n type PipelineType,\n // @ts-ignore temporary \"fix\"\n type PretrainedModelOptions,\n QuestionAnsweringPipeline,\n SummarizationPipeline,\n SummarizationSingle,\n type TextGenerationPipeline,\n TextGenerationSingle,\n TextStreamer,\n TranslationPipeline,\n TranslationSingle,\n} from \"@sroussey/transformers\";\nimport {\n AiProviderRunFn,\n type DeReplicateFromSchema,\n DownloadModelTaskExecuteInput,\n Model,\n TextEmbeddingInputSchema,\n TextEmbeddingOutputSchema,\n TextGenerationInputSchema,\n TextGenerationOutputSchema,\n TextQuestionAnswerInputSchema,\n TextQuestionAnswerOutputSchema,\n TextRewriterInputSchema,\n TextRewriterOutputSchema,\n TextSummaryInputSchema,\n TextSummaryOutputSchema,\n TextTranslationInputSchema,\n TextTranslationOutputSchema,\n TypedArray,\n} from \"@workglow/ai\";\nimport { PermanentJobError } from \"@workglow/job-queue\";\nimport { CallbackStatus } from \"./HFT_CallbackStatus\";\nimport { QUANTIZATION_DATA_TYPES } from \"./HFT_Constants\";\n\nconst pipelines = new Map<string, any>();\n\n/**\n * Helper function to get a pipeline for a model\n */\nconst getPipeline = async (\n model: Model,\n onProgress: (progress: number, message?: string, details?: any) => void,\n options: PretrainedModelOptions = {}\n) => {\n if (pipelines.has(model.name)) {\n return pipelines.get(model.name);\n }\n\n // Create a callback status object for progress tracking\n const progressCallback = (status: CallbackStatus) => {\n const progress = status.status === \"progress\" ? Math.round(status.progress) : 0;\n if (status.status === \"progress\") {\n onProgress(progress, \"Downloading model\", {\n file: status.file,\n progress: status.progress,\n });\n }\n };\n\n const pipelineOptions: PretrainedModelOptions = {\n dtype: (model.quantization as QUANTIZATION_DATA_TYPES) || \"q8\",\n ...(model.use_external_data_format\n ? { use_external_data_format: model.use_external_data_format }\n : {}),\n ...(model.device ? { device: model.device as any } : {}),\n ...options,\n progress_callback: progressCallback,\n };\n\n const pipelineType = model.pipeline as PipelineType;\n const result = await pipeline(pipelineType, model.url, pipelineOptions);\n pipelines.set(model.name, result);\n return result;\n};\n\n/**\n * Core implementation for downloading and caching a Hugging Face Transformers model.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_Download: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteInput\n> = async (input, model, onProgress, signal) => {\n // Download the model by creating a pipeline\n await getPipeline(model!, onProgress, { abort_signal: signal });\n\n return {\n model: input.model!,\n };\n};\n\n/**\n * Core implementation for text embedding using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\n\ntype TextEmbeddingInput = DeReplicateFromSchema<typeof TextEmbeddingInputSchema>;\ntype TextEmbeddingOutput = DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>;\nexport const HFT_TextEmbedding: AiProviderRunFn<TextEmbeddingInput, TextEmbeddingOutput> = async (\n input,\n model,\n onProgress,\n signal\n) => {\n const generateEmbedding: FeatureExtractionPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n\n // Generate the embedding\n const hfVector = await generateEmbedding(input.text, {\n pooling: \"mean\",\n normalize: model!.normalize,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n // Validate the embedding dimensions\n if (hfVector.size !== model!.nativeDimensions) {\n console.warn(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model!.nativeDimensions}`,\n input,\n hfVector\n );\n throw new PermanentJobError(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model!.nativeDimensions}`\n );\n }\n\n return { vector: hfVector.data as TypedArray };\n};\n\n/**\n * Core implementation for text generation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextGeneration: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextGenerationInputSchema>,\n DeReplicateFromSchema<typeof TextGenerationOutputSchema>\n> = async (input, model, onProgress, signal) => {\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n\n const streamer = createTextStreamer(generateText.tokenizer, onProgress, signal);\n\n let results = await generateText(input.prompt, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n let text = (results[0] as TextGenerationSingle)?.generated_text;\n\n if (Array.isArray(text)) {\n text = text[text.length - 1]?.content;\n }\n return {\n text,\n };\n};\n\n/**\n * Core implementation for text translation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextTranslation: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextTranslationInputSchema>,\n DeReplicateFromSchema<typeof TextTranslationOutputSchema>\n> = async (input, model, onProgress, signal) => {\n const translate: TranslationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(translate.tokenizer, onProgress);\n\n const result = await translate(input.text, {\n src_lang: input.source_lang,\n tgt_lang: input.target_lang,\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let translatedText: string | string[] = \"\";\n if (Array.isArray(result)) {\n translatedText = result.map((r) => (r as TranslationSingle)?.translation_text || \"\");\n } else {\n translatedText = (result as TranslationSingle)?.translation_text || \"\";\n }\n\n return {\n text: translatedText,\n target_lang: input.target_lang,\n };\n};\n\n/**\n * Core implementation for text rewriting using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextRewriter: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextRewriterInputSchema>,\n DeReplicateFromSchema<typeof TextRewriterOutputSchema>\n> = async (input, model, onProgress, signal) => {\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateText.tokenizer, onProgress);\n\n // This lib doesn't support this kind of rewriting with a separate prompt vs text\n const promptedText = (input.prompt ? input.prompt + \"\\n\" : \"\") + input.text;\n\n let results = await generateText(promptedText, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n\n let text = (results[0] as TextGenerationSingle)?.generated_text;\n if (Array.isArray(text)) {\n text = text[text.length - 1]?.content;\n }\n\n if (text === promptedText) {\n throw new PermanentJobError(\"Rewriter failed to generate new text\");\n }\n\n return {\n text,\n };\n};\n\n/**\n * Core implementation for text summarization using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextSummary: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextSummaryInputSchema>,\n DeReplicateFromSchema<typeof TextSummaryOutputSchema>\n> = async (input, model, onProgress, signal) => {\n const generateSummary: SummarizationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateSummary.tokenizer, onProgress);\n\n let result = await generateSummary(input.text, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let summaryText = \"\";\n if (Array.isArray(result)) {\n summaryText = (result[0] as SummarizationSingle)?.summary_text || \"\";\n } else {\n summaryText = (result as SummarizationSingle)?.summary_text || \"\";\n }\n\n return {\n text: summaryText,\n };\n};\n\n/**\n * Core implementation for question answering using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextQuestionAnswer: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextQuestionAnswerInputSchema>,\n DeReplicateFromSchema<typeof TextQuestionAnswerOutputSchema>\n> = async (input, model, onProgress, signal) => {\n // Get the question answering pipeline\n const generateAnswer: QuestionAnsweringPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateAnswer.tokenizer, onProgress);\n\n const result = await generateAnswer(input.question, input.context, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let answerText = \"\";\n if (Array.isArray(result)) {\n answerText = (result[0] as DocumentQuestionAnsweringSingle)?.answer || \"\";\n } else {\n answerText = (result as DocumentQuestionAnsweringSingle)?.answer || \"\";\n }\n\n return {\n text: answerText,\n };\n};\n\n/**\n * Create a text streamer for a given tokenizer and update progress function\n * @param tokenizer - The tokenizer to use for the streamer\n * @param updateProgress - The function to call to update the progress\n * @param signal - The signal to use for the streamer for aborting\n * @returns The text streamer\n */\nfunction createTextStreamer(\n tokenizer: any,\n updateProgress: (progress: number, message?: string, details?: any) => void,\n signal?: AbortSignal\n) {\n let count = 0;\n return new TextStreamer(tokenizer, {\n skip_prompt: true,\n decode_kwargs: { skip_special_tokens: true },\n callback_function: (text: string) => {\n count++;\n const result = 100 * (1 - Math.exp(-0.05 * count));\n const progress = Math.round(Math.min(result, 100));\n updateProgress(progress, \"Generating\", { text, progress });\n },\n ...(signal ? { abort_signal: signal } : {}),\n });\n}\n",
7
- "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { getAiProviderRegistry } from \"@workglow/ai\";\nimport { globalServiceRegistry, WORKER_MANAGER } from \"@workglow/util\";\nimport { HF_TRANSFORMERS_ONNX } from \"../common/HFT_Constants\";\n\nexport async function register_HFT_ClientJobFns(worker: Worker) {\n const workerManager = globalServiceRegistry.get(WORKER_MANAGER);\n\n workerManager.registerWorker(HF_TRANSFORMERS_ONNX, worker);\n\n const ProviderRegistry = getAiProviderRegistry();\n const names = [\n \"DownloadModelTask\",\n \"TextEmbeddingTask\",\n \"TextGenerationTask\",\n \"TextTranslationTask\",\n \"TextRewriterTask\",\n \"TextSummaryTask\",\n \"TextQuestionAnswerTask\",\n ];\n for (const name of names) {\n ProviderRegistry.registerAsWorkerRunFn(HF_TRANSFORMERS_ONNX, name);\n }\n}\n",
8
- "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { env } from \"@sroussey/transformers\";\nimport { getAiProviderRegistry } from \"@workglow/ai\";\nimport { HF_TRANSFORMERS_ONNX } from \"../common/HFT_Constants\";\nimport {\n HFT_Download,\n HFT_TextEmbedding,\n HFT_TextGeneration,\n HFT_TextQuestionAnswer,\n HFT_TextRewriter,\n HFT_TextSummary,\n HFT_TextTranslation,\n} from \"../common/HFT_JobRunFns\";\n\nexport async function register_HFT_InlineJobFns() {\n // @ts-ignore\n env.backends.onnx.wasm.proxy = true;\n const ProviderRegistry = getAiProviderRegistry();\n const fns = {\n [\"DownloadModelTask\"]: HFT_Download,\n [\"TextEmbeddingTask\"]: HFT_TextEmbedding,\n [\"TextGenerationTask\"]: HFT_TextGeneration,\n [\"TextQuestionAnswerTask\"]: HFT_TextQuestionAnswer,\n [\"TextRewriterTask\"]: HFT_TextRewriter,\n [\"TextSummaryTask\"]: HFT_TextSummary,\n [\"TextTranslationTask\"]: HFT_TextTranslation,\n };\n for (const [jobName, fn] of Object.entries(fns)) {\n ProviderRegistry.registerRunFn<any, any>(HF_TRANSFORMERS_ONNX, jobName, fn);\n }\n}\n",
5
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const HF_TRANSFORMERS_ONNX = \"HF_TRANSFORMERS_ONNX\";\n\nexport type QuantizationDataType =\n | \"auto\" // Auto-detect based on environment\n | \"fp32\"\n | \"fp16\"\n | \"q8\"\n | \"int8\"\n | \"uint8\"\n | \"q4\"\n | \"bnb4\"\n | \"q4f16\"; // fp16 model with int4 block weight quantization\n\nexport const QuantizationDataType = {\n auto: \"auto\",\n fp32: \"fp32\",\n fp16: \"fp16\",\n q8: \"q8\",\n int8: \"int8\",\n uint8: \"uint8\",\n q4: \"q4\",\n bnb4: \"bnb4\",\n q4f16: \"q4f16\",\n} as const satisfies Record<QuantizationDataType, QuantizationDataType>;\n\nexport type TextPipelineUseCase =\n | \"fill-mask\" // https://huggingface.co/tasks/fill-mask\n | \"token-classification\" // https://huggingface.co/tasks/token-classification\n | \"text-generation\" // https://huggingface.co/tasks/text-generation#completion-generation-models\n | \"text2text-generation\" // https://huggingface.co/tasks/text-generation#text-to-text-generation-models\n | \"text-classification\" // https://huggingface.co/tasks/text-classification\n | \"summarization\" // https://huggingface.co/tasks/sentence-similarity\n | \"translation\" // https://huggingface.co/tasks/translation\n | \"feature-extraction\" // https://huggingface.co/tasks/feature-extraction\n | \"zero-shot-classification\" // https://huggingface.co/tasks/zero-shot-classification\n | \"question-answering\"; // https://huggingface.co/tasks/question-answering\n\nexport const TextPipelineUseCase = {\n \"fill-mask\": \"fill-mask\",\n \"token-classification\": \"token-classification\",\n \"text-generation\": \"text-generation\",\n \"text2text-generation\": \"text2text-generation\",\n \"text-classification\": \"text-classification\",\n summarization: \"summarization\",\n translation: \"translation\",\n \"feature-extraction\": \"feature-extraction\",\n \"zero-shot-classification\": \"zero-shot-classification\",\n \"question-answering\": \"question-answering\",\n} as const satisfies Record<TextPipelineUseCase, TextPipelineUseCase>;\n\nexport type VisionPipelineUseCase =\n | \"background-removal\" // https://huggingface.co/tasks/image-segmentation#background-removal\n | \"image-segmentation\" // https://huggingface.co/tasks/image-segmentation\n | \"depth-estimation\" // https://huggingface.co/tasks/depth-estimation\n | \"image-classification\" // https://huggingface.co/tasks/image-classification\n | \"image-to-image\" // https://huggingface.co/tasks/image-to-image\n | \"object-detection\" // https://huggingface.co/tasks/object-detection\n | \"image-feature-extraction\"; // https://huggingface.co/tasks/image-feature-extraction\n\nexport const VisionPipelineUseCase = {\n \"background-removal\": \"background-removal\",\n \"image-segmentation\": \"image-segmentation\",\n \"depth-estimation\": \"depth-estimation\",\n \"image-classification\": \"image-classification\",\n \"image-to-image\": \"image-to-image\",\n \"object-detection\": \"object-detection\",\n \"image-feature-extraction\": \"image-feature-extraction\",\n} as const satisfies Record<VisionPipelineUseCase, VisionPipelineUseCase>;\n\nexport type AudioPipelineUseCase =\n | \"audio-classification\" // https://huggingface.co/tasks/audio-classification\n | \"automatic-speech-recognition\" // https://huggingface.co/tasks/automatic-speech-recognition\n | \"text-to-speech\"; // https://huggingface.co/tasks/text-to-speech\n\nexport const AudioPipelineUseCase = {\n \"audio-classification\": \"audio-classification\",\n \"automatic-speech-recognition\": \"automatic-speech-recognition\",\n \"text-to-speech\": \"text-to-speech\",\n} as const satisfies Record<AudioPipelineUseCase, AudioPipelineUseCase>;\n\nexport type MultimodalPipelineUseCase =\n | \"document-question-answering\" // https://huggingface.co/tasks/document-question-answering\n | \"image-to-text\" // https://huggingface.co/tasks/image-to-text\n | \"zero-shot-audio-classification\" // https://huggingface.co/tasks/zero-shot-audio-classification\n | \"zero-shot-image-classification\" // https://huggingface.co/tasks/zero-shot-image-classification\n | \"zero-shot-object-detection\"; // https://huggingface.co/tasks/zero-shot-object-detection\n\nexport const MultimodalPipelineUseCase = {\n \"document-question-answering\": \"document-question-answering\",\n \"image-to-text\": \"image-to-text\",\n \"zero-shot-audio-classification\": \"zero-shot-audio-classification\",\n \"zero-shot-image-classification\": \"zero-shot-image-classification\",\n \"zero-shot-object-detection\": \"zero-shot-object-detection\",\n} as const satisfies Record<MultimodalPipelineUseCase, MultimodalPipelineUseCase>;\n\nexport type PipelineUseCase =\n | TextPipelineUseCase\n | VisionPipelineUseCase\n | AudioPipelineUseCase\n | MultimodalPipelineUseCase;\n\nexport const PipelineUseCase = {\n ...TextPipelineUseCase,\n ...VisionPipelineUseCase,\n ...AudioPipelineUseCase,\n ...MultimodalPipelineUseCase,\n} as const satisfies Record<PipelineUseCase, PipelineUseCase>;\n",
6
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n DocumentQuestionAnsweringSingle,\n type FeatureExtractionPipeline,\n pipeline,\n // @ts-ignore temporary \"fix\"\n type PretrainedModelOptions,\n QuestionAnsweringPipeline,\n SummarizationPipeline,\n SummarizationSingle,\n type TextGenerationPipeline,\n TextGenerationSingle,\n TextStreamer,\n TranslationPipeline,\n TranslationSingle,\n} from \"@sroussey/transformers\";\nimport {\n AiProviderRunFn,\n type DeReplicateFromSchema,\n DownloadModelTaskExecuteInput,\n TextEmbeddingInputSchema,\n TextEmbeddingOutputSchema,\n TextGenerationInputSchema,\n TextGenerationOutputSchema,\n TextQuestionAnswerInputSchema,\n TextQuestionAnswerOutputSchema,\n TextRewriterInputSchema,\n TextRewriterOutputSchema,\n TextSummaryInputSchema,\n TextSummaryOutputSchema,\n TextTranslationInputSchema,\n TextTranslationOutputSchema,\n TypedArray,\n} from \"@workglow/ai\";\nimport { PermanentJobError } from \"@workglow/job-queue\";\nimport { CallbackStatus } from \"./HFT_CallbackStatus\";\nimport { HfTransformersOnnxModelRecord } from \"./HFT_ModelSchema\";\n\nconst pipelines = new Map<string, any>();\n\n/**\n * Helper function to get a pipeline for a model\n */\nconst getPipeline = async (\n model: HfTransformersOnnxModelRecord,\n onProgress: (progress: number, message?: string, details?: any) => void,\n options: PretrainedModelOptions = {}\n) => {\n if (pipelines.has(model.model_id)) {\n return pipelines.get(model.model_id);\n }\n\n // Create a callback status object for progress tracking\n const progressCallback = (status: CallbackStatus) => {\n const progress = status.status === \"progress\" ? Math.round(status.progress) : 0;\n if (status.status === \"progress\") {\n onProgress(progress, \"Downloading model\", {\n file: status.file,\n progress: status.progress,\n });\n }\n };\n\n const pipelineOptions: PretrainedModelOptions = {\n dtype: model.providerConfig.dType || \"q8\",\n ...(model.providerConfig.useExternalDataFormat\n ? { use_external_data_format: model.providerConfig.useExternalDataFormat }\n : {}),\n ...(model.providerConfig.device ? { device: model.providerConfig.device as any } : {}),\n ...options,\n progress_callback: progressCallback,\n };\n\n const pipelineType = model.providerConfig.pipeline;\n const result = await pipeline(pipelineType, model.providerConfig.modelPath, pipelineOptions);\n pipelines.set(model.model_id, result);\n return result;\n};\n\n/**\n * Core implementation for downloading and caching a Hugging Face Transformers model.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_Download: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteInput,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n // Download the model by creating a pipeline\n await getPipeline(model!, onProgress, { abort_signal: signal });\n\n return {\n model: input.model!,\n };\n};\n\n/**\n * Core implementation for text embedding using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\n\nexport const HFT_TextEmbedding: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextEmbeddingInputSchema>,\n DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateEmbedding: FeatureExtractionPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n\n // Generate the embedding\n const hfVector = await generateEmbedding(input.text, {\n pooling: \"mean\",\n normalize: model?.providerConfig.normalize,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n // Validate the embedding dimensions\n if (hfVector.size !== model?.providerConfig.nativeDimensions) {\n console.warn(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model?.providerConfig.nativeDimensions}`,\n input,\n hfVector\n );\n throw new PermanentJobError(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model?.providerConfig.nativeDimensions}`\n );\n }\n\n return { vector: hfVector.data as TypedArray };\n};\n\n/**\n * Core implementation for text generation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextGeneration: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextGenerationInputSchema>,\n DeReplicateFromSchema<typeof TextGenerationOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n\n const streamer = createTextStreamer(generateText.tokenizer, onProgress, signal);\n\n let results = await generateText(input.prompt, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n let text = (results[0] as TextGenerationSingle)?.generated_text;\n\n if (Array.isArray(text)) {\n text = text[text.length - 1]?.content;\n }\n return {\n text,\n };\n};\n\n/**\n * Core implementation for text translation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextTranslation: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextTranslationInputSchema>,\n DeReplicateFromSchema<typeof TextTranslationOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const translate: TranslationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(translate.tokenizer, onProgress);\n\n const result = await translate(input.text, {\n src_lang: input.source_lang,\n tgt_lang: input.target_lang,\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let translatedText: string | string[] = \"\";\n if (Array.isArray(result)) {\n translatedText = result.map((r) => (r as TranslationSingle)?.translation_text || \"\");\n } else {\n translatedText = (result as TranslationSingle)?.translation_text || \"\";\n }\n\n return {\n text: translatedText,\n target_lang: input.target_lang,\n };\n};\n\n/**\n * Core implementation for text rewriting using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextRewriter: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextRewriterInputSchema>,\n DeReplicateFromSchema<typeof TextRewriterOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateText.tokenizer, onProgress);\n\n // This lib doesn't support this kind of rewriting with a separate prompt vs text\n const promptedText = (input.prompt ? input.prompt + \"\\n\" : \"\") + input.text;\n\n let results = await generateText(promptedText, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n\n let text = (results[0] as TextGenerationSingle)?.generated_text;\n if (Array.isArray(text)) {\n text = text[text.length - 1]?.content;\n }\n\n if (text === promptedText) {\n throw new PermanentJobError(\"Rewriter failed to generate new text\");\n }\n\n return {\n text,\n };\n};\n\n/**\n * Core implementation for text summarization using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextSummary: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextSummaryInputSchema>,\n DeReplicateFromSchema<typeof TextSummaryOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateSummary: SummarizationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateSummary.tokenizer, onProgress);\n\n let result = await generateSummary(input.text, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let summaryText = \"\";\n if (Array.isArray(result)) {\n summaryText = (result[0] as SummarizationSingle)?.summary_text || \"\";\n } else {\n summaryText = (result as SummarizationSingle)?.summary_text || \"\";\n }\n\n return {\n text: summaryText,\n };\n};\n\n/**\n * Core implementation for question answering using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextQuestionAnswer: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextQuestionAnswerInputSchema>,\n DeReplicateFromSchema<typeof TextQuestionAnswerOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n // Get the question answering pipeline\n const generateAnswer: QuestionAnsweringPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateAnswer.tokenizer, onProgress);\n\n const result = await generateAnswer(input.question, input.context, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let answerText = \"\";\n if (Array.isArray(result)) {\n answerText = (result[0] as DocumentQuestionAnsweringSingle)?.answer || \"\";\n } else {\n answerText = (result as DocumentQuestionAnsweringSingle)?.answer || \"\";\n }\n\n return {\n text: answerText,\n };\n};\n\n/**\n * Create a text streamer for a given tokenizer and update progress function\n * @param tokenizer - The tokenizer to use for the streamer\n * @param updateProgress - The function to call to update the progress\n * @param signal - The signal to use for the streamer for aborting\n * @returns The text streamer\n */\nfunction createTextStreamer(\n tokenizer: any,\n updateProgress: (progress: number, message?: string, details?: any) => void,\n signal?: AbortSignal\n) {\n let count = 0;\n return new TextStreamer(tokenizer, {\n skip_prompt: true,\n decode_kwargs: { skip_special_tokens: true },\n callback_function: (text: string) => {\n count++;\n const result = 100 * (1 - Math.exp(-0.05 * count));\n const progress = Math.round(Math.min(result, 100));\n updateProgress(progress, \"Generating\", { text, progress });\n },\n ...(signal ? { abort_signal: signal } : {}),\n });\n}\n",
7
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ModelSchema } from \"@workglow/ai\";\nimport { DataPortSchemaObject, FromSchema } from \"@workglow/util\";\nimport { HF_TRANSFORMERS_ONNX, PipelineUseCase, QuantizationDataType } from \"./HFT_Constants\";\n\nexport const HfTransformersOnnxModelSchema = {\n type: \"object\",\n properties: {\n provider: {\n const: HF_TRANSFORMERS_ONNX,\n description: \"Discriminator: ONNX runtime backend.\",\n },\n providerConfig: {\n type: \"object\",\n description: \"ONNX runtime-specific options.\",\n properties: {\n pipeline: {\n type: \"string\",\n enum: Object.values(PipelineUseCase),\n description: \"Pipeline type for the ONNX model.\",\n default: \"text-generation\",\n },\n modelPath: {\n type: \"string\",\n description: \"Filesystem path or URI for the ONNX model.\",\n },\n dType: {\n type: \"string\",\n enum: Object.values(QuantizationDataType),\n description: \"Data type for the ONNX model.\",\n default: \"float32\",\n },\n device: {\n type: \"string\",\n enum: [\"cpu\", \"gpu\", \"webgpu\", \"wasm\", \"metal\"],\n description: \"High-level device selection.\",\n default: \"webgpu\",\n },\n executionProviders: {\n type: \"array\",\n items: { type: \"string\" },\n description: \"Raw ONNX Runtime execution provider identifiers.\",\n },\n intraOpNumThreads: {\n type: \"integer\",\n minimum: 1,\n },\n interOpNumThreads: {\n type: \"integer\",\n minimum: 1,\n },\n useExternalDataFormat: {\n type: \"boolean\",\n description: \"Whether the model uses external data format.\",\n },\n nativeDimensions: {\n type: \"integer\",\n description: \"The native dimensions of the model.\",\n },\n normalize: {\n type: \"boolean\",\n description: \"Whether the model uses normalization.\",\n },\n languageStyle: {\n type: \"string\",\n description: \"The language style of the model.\",\n },\n },\n required: [\"modelPath\", \"pipeline\"],\n additionalProperties: false,\n },\n },\n required: [\"provider\", \"providerConfig\"],\n additionalProperties: true,\n} as const satisfies DataPortSchemaObject;\n\nconst ExtendedModelSchema = {\n type: \"object\",\n properties: {\n ...ModelSchema.properties,\n ...HfTransformersOnnxModelSchema.properties,\n },\n required: [...ModelSchema.required, ...HfTransformersOnnxModelSchema.required],\n additionalProperties: false,\n} as const satisfies DataPortSchemaObject;\n\nexport type HfTransformersOnnxModelRecord = FromSchema<typeof ExtendedModelSchema>;\n",
8
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { AiJob, AiJobInput, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { globalServiceRegistry, WORKER_MANAGER } from \"@workglow/util\";\nimport { HF_TRANSFORMERS_ONNX } from \"../common/HFT_Constants\";\n\n/**\n * Registers the HuggingFace Transformers client job functions with a web worker.\n * If no client is provided, creates a default in-memory queue and registers it.\n *\n * @param worker - The web worker to use for job execution\n * @param client - Optional existing JobQueueClient. If not provided, creates a default in-memory queue.\n */\nexport async function register_HFT_ClientJobFns(\n worker: Worker,\n client?: JobQueueClient<AiJobInput<TaskInput>, TaskOutput>\n): Promise<void> {\n const workerManager = globalServiceRegistry.get(WORKER_MANAGER);\n\n workerManager.registerWorker(HF_TRANSFORMERS_ONNX, worker);\n\n const ProviderRegistry = getAiProviderRegistry();\n const names = [\n \"DownloadModelTask\",\n \"TextEmbeddingTask\",\n \"TextGenerationTask\",\n \"TextTranslationTask\",\n \"TextRewriterTask\",\n \"TextSummaryTask\",\n \"TextQuestionAnswerTask\",\n ];\n for (const name of names) {\n ProviderRegistry.registerAsWorkerRunFn(HF_TRANSFORMERS_ONNX, name);\n }\n // If no client provided, create a default in-memory queue\n if (!client) {\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(\n HF_TRANSFORMERS_ONNX\n );\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(AiJob, {\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n limiter: new ConcurrencyLimiter(1, 100),\n });\n\n client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n // await server.start();\n }\n}\n",
9
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { env } from \"@sroussey/transformers\";\nimport { AiJob, AiJobInput, AiProviderRunFn, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { HF_TRANSFORMERS_ONNX } from \"../common/HFT_Constants\";\nimport {\n HFT_Download,\n HFT_TextEmbedding,\n HFT_TextGeneration,\n HFT_TextQuestionAnswer,\n HFT_TextRewriter,\n HFT_TextSummary,\n HFT_TextTranslation,\n} from \"../common/HFT_JobRunFns\";\n\n/**\n * Registers the HuggingFace Transformers inline job functions for same-thread execution.\n * If no client is provided, creates a default in-memory queue and registers it.\n *\n * @param client - Optional existing JobQueueClient. If not provided, creates a default in-memory queue.\n */\nexport async function register_HFT_InlineJobFns(\n client?: JobQueueClient<AiJobInput<TaskInput>, TaskOutput>\n): Promise<void> {\n // @ts-ignore\n env.backends.onnx.wasm.proxy = true;\n const ProviderRegistry = getAiProviderRegistry();\n const fns: Record<string, AiProviderRunFn<any, any, any>> = {\n [\"DownloadModelTask\"]: HFT_Download,\n [\"TextEmbeddingTask\"]: HFT_TextEmbedding,\n [\"TextGenerationTask\"]: HFT_TextGeneration,\n [\"TextQuestionAnswerTask\"]: HFT_TextQuestionAnswer,\n [\"TextRewriterTask\"]: HFT_TextRewriter,\n [\"TextSummaryTask\"]: HFT_TextSummary,\n [\"TextTranslationTask\"]: HFT_TextTranslation,\n };\n for (const [jobName, fn] of Object.entries(fns)) {\n ProviderRegistry.registerRunFn<any, any>(HF_TRANSFORMERS_ONNX, jobName, fn);\n }\n\n // If no client provided, create a default in-memory queue\n if (!client) {\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(\n HF_TRANSFORMERS_ONNX\n );\n await storage.setupDatabase();\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(AiJob, {\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n limiter: new ConcurrencyLimiter(1, 100),\n });\n\n client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n await server.start();\n }\n}\n",
9
10
  "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n createServiceToken,\n globalServiceRegistry,\n parentPort,\n WORKER_SERVER,\n} from \"@workglow/util\";\nimport {\n HFT_Download,\n HFT_TextEmbedding,\n HFT_TextGeneration,\n HFT_TextQuestionAnswer,\n HFT_TextRewriter,\n HFT_TextSummary,\n HFT_TextTranslation,\n} from \"../common/HFT_JobRunFns\";\n\nexport const HFT_WORKER_JOBRUN = createServiceToken(\"worker.ai-provider.hft\");\n\nexport const HFT_WORKER_JOBRUN_REGISTER = globalServiceRegistry.register(\n HFT_WORKER_JOBRUN,\n () => {\n const workerServer = globalServiceRegistry.get(WORKER_SERVER);\n workerServer.registerFunction(\"DownloadModelTask\", HFT_Download);\n workerServer.registerFunction(\"TextEmbeddingTask\", HFT_TextEmbedding);\n workerServer.registerFunction(\"TextGenerationTask\", HFT_TextGeneration);\n workerServer.registerFunction(\"TextTranslationTask\", HFT_TextTranslation);\n workerServer.registerFunction(\"TextRewriterTask\", HFT_TextRewriter);\n workerServer.registerFunction(\"TextSummaryTask\", HFT_TextSummary);\n workerServer.registerFunction(\"TextQuestionAnswerTask\", HFT_TextQuestionAnswer);\n parentPort.postMessage({ type: \"ready\" });\n console.log(\"HFT_WORKER_JOBRUN registered\");\n return workerServer;\n },\n true\n);\n",
10
11
  "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const TENSORFLOW_MEDIAPIPE = \"TENSORFLOW_MEDIAPIPE\";\n",
11
- "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { FilesetResolver, TextEmbedder } from \"@mediapipe/tasks-text\";\nimport type {\n AiProviderRunFn,\n DeReplicateFromSchema,\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteOutput,\n TextEmbeddingInputSchema,\n TextEmbeddingOutputSchema,\n} from \"@workglow/ai\";\nimport { PermanentJobError } from \"@workglow/job-queue\";\n\n/**\n * Core implementation for downloading and caching a MediaPipe TFJS model.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_Download: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteOutput\n> = async (input, model, onProgress, signal) => {\n const textFiles = await FilesetResolver.forTextTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm\"\n );\n\n // Create an embedder to get dimensions\n const embedder = await TextEmbedder.createFromOptions(textFiles, {\n baseOptions: {\n modelAssetPath: model!.url,\n },\n });\n\n return {\n model: input.model,\n };\n};\n\n/**\n * Core implementation for text embedding using MediaPipe TFJS.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_TextEmbedding: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextEmbeddingInputSchema>,\n DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>\n> = async (input, model, onProgress, signal) => {\n const textFiles = await FilesetResolver.forTextTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm\"\n );\n\n onProgress(0.1, \"Model loaded\");\n\n const embedder = await TextEmbedder.createFromOptions(textFiles, {\n baseOptions: {\n modelAssetPath: model!.url,\n },\n });\n\n if (signal.aborted) {\n throw new PermanentJobError(\"Aborted job\");\n }\n\n onProgress(0.2, \"Embedding text\");\n\n const result = embedder.embed(input.text);\n\n if (!result.embeddings?.[0]?.floatEmbedding) {\n throw new PermanentJobError(\"Failed to generate embedding: Empty result\");\n }\n\n const embedding = Float32Array.from(result.embeddings[0].floatEmbedding);\n\n return {\n vector: embedding,\n };\n};\n",
12
- "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { getAiProviderRegistry } from \"@workglow/ai\";\nimport { globalServiceRegistry, WORKER_MANAGER } from \"@workglow/util\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\n\nexport const register_TFMP_ClientJobFns = (worker: Worker) => {\n const workerManager = globalServiceRegistry.get(WORKER_MANAGER);\n workerManager.registerWorker(TENSORFLOW_MEDIAPIPE, worker);\n\n const aiProviderRegistry = getAiProviderRegistry();\n const names = [\"DownloadModelTask\", \"TextEmbeddingTask\"];\n for (const name of names) {\n aiProviderRegistry.registerAsWorkerRunFn(TENSORFLOW_MEDIAPIPE, name);\n }\n};\n",
13
- "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { getAiProviderRegistry } from \"@workglow/ai\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\nimport { TFMP_Download, TFMP_TextEmbedding } from \"../common/TFMP_JobRunFns\";\n\nexport const register_TFMP_InlineJobFns = () => {\n const aiProviderRegistry = getAiProviderRegistry();\n\n aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, \"DownloadModelTask\", TFMP_Download);\n aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, \"TextEmbeddingTask\", TFMP_TextEmbedding);\n};\n",
12
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { FilesetResolver, TextEmbedder } from \"@mediapipe/tasks-text\";\nimport type {\n AiProviderRunFn,\n DeReplicateFromSchema,\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteOutput,\n TextEmbeddingInputSchema,\n TextEmbeddingOutputSchema,\n} from \"@workglow/ai\";\nimport { PermanentJobError } from \"@workglow/job-queue\";\nimport { TFMPModelRecord } from \"./TFMP_ModelSchema\";\n\n/**\n * Core implementation for downloading and caching a MediaPipe TFJS model.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_Download: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteOutput,\n TFMPModelRecord\n> = async (input, model, onProgress, signal) => {\n const textFiles = await FilesetResolver.forTextTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm\"\n );\n\n // Create an embedder to get dimensions\n const embedder = await TextEmbedder.createFromOptions(textFiles, {\n baseOptions: {\n modelAssetPath: model!.providerConfig.modelPath,\n },\n });\n\n return {\n model: input.model,\n };\n};\n\n/**\n * Core implementation for text embedding using MediaPipe TFJS.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_TextEmbedding: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextEmbeddingInputSchema>,\n DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>,\n TFMPModelRecord\n> = async (input, model, onProgress, signal) => {\n const textFiles = await FilesetResolver.forTextTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm\"\n );\n\n onProgress(0.1, \"Model loaded\");\n\n const embedder = await TextEmbedder.createFromOptions(textFiles, {\n baseOptions: {\n modelAssetPath: model!.providerConfig.modelPath,\n },\n });\n\n if (signal.aborted) {\n throw new PermanentJobError(\"Aborted job\");\n }\n\n onProgress(0.2, \"Embedding text\");\n\n const result = embedder.embed(input.text);\n\n if (!result.embeddings?.[0]?.floatEmbedding) {\n throw new PermanentJobError(\"Failed to generate embedding: Empty result\");\n }\n\n const embedding = Float32Array.from(result.embeddings[0].floatEmbedding);\n\n return {\n vector: embedding,\n };\n};\n",
13
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ModelSchema } from \"@workglow/ai\";\nimport { DataPortSchemaObject, FromSchema } from \"@workglow/util\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\n\nexport const TFMPModelSchema = {\n type: \"object\",\n properties: {\n provider: {\n const: TENSORFLOW_MEDIAPIPE,\n description: \"Discriminator: TensorFlow MediaPipe backend.\",\n },\n providerConfig: {\n type: \"object\",\n description: \"TensorFlow MediaPipe-specific options.\",\n properties: {\n modelPath: {\n type: \"string\",\n description: \"Filesystem path or URI for the ONNX model.\",\n },\n },\n required: [\"modelPath\"],\n additionalProperties: false,\n },\n },\n required: [\"provider\", \"providerConfig\"],\n additionalProperties: true,\n} as const satisfies DataPortSchemaObject;\n\nconst ExtendedModelSchema = {\n type: \"object\",\n properties: {\n ...ModelSchema.properties,\n ...TFMPModelSchema.properties,\n },\n required: [...ModelSchema.required, ...TFMPModelSchema.required],\n additionalProperties: false,\n} as const satisfies DataPortSchemaObject;\n\nexport type TFMPModelRecord = FromSchema<typeof ExtendedModelSchema>;\n",
14
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { AiJob, AiJobInput, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { globalServiceRegistry, WORKER_MANAGER } from \"@workglow/util\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\n\n/**\n * Registers the TensorFlow MediaPipe client job functions with a web worker.\n * If no client is provided, creates a default in-memory queue and registers it.\n *\n * @param worker - The web worker to use for job execution\n * @param client - Optional existing JobQueueClient. If not provided, creates a default in-memory queue.\n */\nexport async function register_TFMP_ClientJobFns(\n worker: Worker,\n client?: JobQueueClient<AiJobInput<TaskInput>, TaskOutput>\n): Promise<void> {\n const workerManager = globalServiceRegistry.get(WORKER_MANAGER);\n workerManager.registerWorker(TENSORFLOW_MEDIAPIPE, worker);\n\n const aiProviderRegistry = getAiProviderRegistry();\n const names = [\"DownloadModelTask\", \"TextEmbeddingTask\"];\n for (const name of names) {\n aiProviderRegistry.registerAsWorkerRunFn(TENSORFLOW_MEDIAPIPE, name);\n }\n\n // If no client provided, create a default in-memory queue\n if (!client) {\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(\n TENSORFLOW_MEDIAPIPE\n );\n await storage.setupDatabase();\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(AiJob, {\n storage,\n queueName: TENSORFLOW_MEDIAPIPE,\n limiter: new ConcurrencyLimiter(1, 100),\n });\n\n client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName: TENSORFLOW_MEDIAPIPE,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n await server.start();\n }\n}\n",
15
+ "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { AiJob, AiJobInput, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\nimport { TFMP_Download, TFMP_TextEmbedding } from \"../common/TFMP_JobRunFns\";\n\n/**\n * Registers the TensorFlow MediaPipe inline job functions for same-thread execution.\n * If no client is provided, creates a default in-memory queue and registers it.\n *\n * @param client - Optional existing JobQueueClient. If not provided, creates a default in-memory queue.\n */\nexport async function register_TFMP_InlineJobFns(\n client?: JobQueueClient<AiJobInput<TaskInput>, TaskOutput>\n): Promise<void> {\n const aiProviderRegistry = getAiProviderRegistry();\n\n aiProviderRegistry.registerRunFn<any, any>(\n TENSORFLOW_MEDIAPIPE,\n \"DownloadModelTask\",\n TFMP_Download as any\n );\n aiProviderRegistry.registerRunFn<any, any>(\n TENSORFLOW_MEDIAPIPE,\n \"TextEmbeddingTask\",\n TFMP_TextEmbedding as any\n );\n\n // If no client provided, create a default in-memory queue\n if (!client) {\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(\n TENSORFLOW_MEDIAPIPE\n );\n await storage.setupDatabase();\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(AiJob, {\n storage,\n queueName: TENSORFLOW_MEDIAPIPE,\n limiter: new ConcurrencyLimiter(1, 100),\n });\n\n client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName: TENSORFLOW_MEDIAPIPE,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n await server.start();\n }\n}\n",
14
16
  "/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n createServiceToken,\n globalServiceRegistry,\n parentPort,\n WORKER_SERVER,\n} from \"@workglow/util\";\nimport { TFMP_Download, TFMP_TextEmbedding } from \"../common/TFMP_JobRunFns\";\n\n// Register the worker functions\nexport const TFMP_WORKER_JOBRUN = createServiceToken(\"worker.ai-provider.tfmp\");\n\nexport const TFMP_WORKER_JOBRUN_REGISTER = globalServiceRegistry.register(\n TFMP_WORKER_JOBRUN,\n () => {\n const workerServer = globalServiceRegistry.get(WORKER_SERVER);\n workerServer.registerFunction(\"DownloadModelTask\", TFMP_Download);\n workerServer.registerFunction(\"TextEmbeddingTask\", TFMP_TextEmbedding);\n parentPort.postMessage({ type: \"ready\" });\n console.log(\"TFMP_WORKER_JOBRUN registered\");\n return workerServer;\n },\n true\n);\n"
15
17
  ],
16
- "mappings": ";AAMO,IAAM,uBAAuB;AAE7B,IAAK;AAAA,CAAL,CAAK,6BAAL;AAAA,EACL,mCAAO;AAAA,EACP,mCAAO;AAAA,EACP,mCAAO;AAAA,EACP,iCAAK;AAAA,EACL,mCAAO;AAAA,EACP,oCAAQ;AAAA,EACR,iCAAK;AAAA,EACL,mCAAO;AAAA,EACP,oCAAQ;AAAA,GATE;;ACFZ;AAAA;AAAA;AAAA;AAmCA;AAIA,IAAM,YAAY,IAAI;AAKtB,IAAM,cAAc,OAClB,OACA,YACA,UAAkC,CAAC,MAChC;AAAA,EACH,IAAI,UAAU,IAAI,MAAM,IAAI,GAAG;AAAA,IAC7B,OAAO,UAAU,IAAI,MAAM,IAAI;AAAA,EACjC;AAAA,EAGA,MAAM,mBAAmB,CAAC,WAA2B;AAAA,IACnD,MAAM,WAAW,OAAO,WAAW,aAAa,KAAK,MAAM,OAAO,QAAQ,IAAI;AAAA,IAC9E,IAAI,OAAO,WAAW,YAAY;AAAA,MAChC,WAAW,UAAU,qBAAqB;AAAA,QACxC,MAAM,OAAO;AAAA,QACb,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAAA;AAAA,EAGF,MAAM,kBAA0C;AAAA,IAC9C,OAAQ,MAAM,gBAA4C;AAAA,OACtD,MAAM,2BACN,EAAE,0BAA0B,MAAM,yBAAyB,IAC3D,CAAC;AAAA,OACD,MAAM,SAAS,EAAE,QAAQ,MAAM,OAAc,IAAI,CAAC;AAAA,OACnD;AAAA,IACH,mBAAmB;AAAA,EACrB;AAAA,EAEA,MAAM,eAAe,MAAM;AAAA,EAC3B,MAAM,SAAS,MAAM,SAAS,cAAc,MAAM,KAAK,eAAe;AAAA,EACtE,UAAU,IAAI,MAAM,MAAM,MAAM;AAAA,EAChC,OAAO;AAAA;AAOF,IAAM,eAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAE9C,MAAM,YAAY,OAAQ,YAAY,EAAE,cAAc,OAAO,CAAC;AAAA,EAE9D,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;AAUK,IAAM,oBAA8E,OACzF,OACA,OACA,YACA,WACG;AAAA,EACH,MAAM,oBAA+C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACzF,cAAc;AAAA,EAChB,CAAC;AAAA,EAGD,MAAM,WAAW,MAAM,kBAAkB,MAAM,MAAM;AAAA,IACnD,SAAS;AAAA,IACT,WAAW,MAAO;AAAA,OACd,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAGD,IAAI,SAAS,SAAS,MAAO,kBAAkB;AAAA,IAC7C,QAAQ,KACN,wEAAwE,SAAS,YAAY,MAAO,oBACpG,OACA,QACF;AAAA,IACA,MAAM,IAAI,kBACR,wEAAwE,SAAS,YAAY,MAAO,kBACtG;AAAA,EACF;AAAA,EAEA,OAAO,EAAE,QAAQ,SAAS,KAAmB;AAAA;AAOxC,IAAM,qBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACjF,cAAc;AAAA,EAChB,CAAC;AAAA,EAED,MAAM,WAAW,mBAAmB,aAAa,WAAW,YAAY,MAAM;AAAA,EAE9E,IAAI,UAAU,MAAM,aAAa,MAAM,QAAQ;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EACA,IAAI,OAAQ,QAAQ,IAA6B;AAAA,EAEjD,IAAI,MAAM,QAAQ,IAAI,GAAG;AAAA,IACvB,OAAO,KAAK,KAAK,SAAS,IAAI;AAAA,EAChC;AAAA,EACA,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAOK,IAAM,sBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAiC,MAAM,YAAY,OAAQ,YAAY;AAAA,IAC3E,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,UAAU,WAAW,UAAU;AAAA,EAEnE,MAAM,SAAS,MAAM,UAAU,MAAM,MAAM;AAAA,IACzC,UAAU,MAAM;AAAA,IAChB,UAAU,MAAM;AAAA,IAChB;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,iBAAoC;AAAA,EACxC,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,iBAAiB,OAAO,IAAI,CAAC,MAAO,GAAyB,oBAAoB,EAAE;AAAA,EACrF,EAAO;AAAA,IACL,iBAAkB,QAA8B,oBAAoB;AAAA;AAAA,EAGtE,OAAO;AAAA,IACL,MAAM;AAAA,IACN,aAAa,MAAM;AAAA,EACrB;AAAA;AAOK,IAAM,mBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACjF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,aAAa,WAAW,UAAU;AAAA,EAGtE,MAAM,gBAAgB,MAAM,SAAS,MAAM,SAAS;AAAA,IAAO,MAAM,MAAM;AAAA,EAEvE,IAAI,UAAU,MAAM,aAAa,cAAc;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EAEA,IAAI,OAAQ,QAAQ,IAA6B;AAAA,EACjD,IAAI,MAAM,QAAQ,IAAI,GAAG;AAAA,IACvB,OAAO,KAAK,KAAK,SAAS,IAAI;AAAA,EAChC;AAAA,EAEA,IAAI,SAAS,cAAc;AAAA,IACzB,MAAM,IAAI,kBAAkB,sCAAsC;AAAA,EACpE;AAAA,EAEA,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAOK,IAAM,kBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,kBAAyC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACnF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,gBAAgB,WAAW,UAAU;AAAA,EAEzE,IAAI,SAAS,MAAM,gBAAgB,MAAM,MAAM;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,cAAc;AAAA,EAClB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,cAAe,OAAO,IAA4B,gBAAgB;AAAA,EACpE,EAAO;AAAA,IACL,cAAe,QAAgC,gBAAgB;AAAA;AAAA,EAGjE,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAOK,IAAM,yBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAE9C,MAAM,iBAA4C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACtF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,eAAe,WAAW,UAAU;AAAA,EAExE,MAAM,SAAS,MAAM,eAAe,MAAM,UAAU,MAAM,SAAS;AAAA,IACjE;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,aAAa;AAAA,EACjB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,aAAc,OAAO,IAAwC,UAAU;AAAA,EACzE,EAAO;AAAA,IACL,aAAc,QAA4C,UAAU;AAAA;AAAA,EAGtE,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAUF,SAAS,kBAAkB,CACzB,WACA,gBACA,QACA;AAAA,EACA,IAAI,QAAQ;AAAA,EACZ,OAAO,IAAI,aAAa,WAAW;AAAA,IACjC,aAAa;AAAA,IACb,eAAe,EAAE,qBAAqB,KAAK;AAAA,IAC3C,mBAAmB,CAAC,SAAiB;AAAA,MACnC;AAAA,MACA,MAAM,SAAS,OAAO,IAAI,KAAK,IAAI,QAAQ,KAAK;AAAA,MAChD,MAAM,WAAW,KAAK,MAAM,KAAK,IAAI,QAAQ,GAAG,CAAC;AAAA,MACjD,eAAe,UAAU,cAAc,EAAE,MAAM,SAAS,CAAC;AAAA;AAAA,OAEvD,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA;;ACnUH;AACA;AAGA,eAAsB,yBAAyB,CAAC,QAAgB;AAAA,EAC9D,MAAM,gBAAgB,sBAAsB,IAAI,cAAc;AAAA,EAE9D,cAAc,eAAe,sBAAsB,MAAM;AAAA,EAEzD,MAAM,mBAAmB,sBAAsB;AAAA,EAC/C,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA,WAAW,QAAQ,OAAO;AAAA,IACxB,iBAAiB,sBAAsB,sBAAsB,IAAI;AAAA,EACnE;AAAA;;ACrBF;AACA,kCAAS;AAYT,eAAsB,yBAAyB,GAAG;AAAA,EAEhD,IAAI,SAAS,KAAK,KAAK,QAAQ;AAAA,EAC/B,MAAM,mBAAmB,uBAAsB;AAAA,EAC/C,MAAM,MAAM;AAAA,KACT,sBAAsB;AAAA,KACtB,sBAAsB;AAAA,KACtB,uBAAuB;AAAA,KACvB,2BAA2B;AAAA,KAC3B,qBAAqB;AAAA,KACrB,oBAAoB;AAAA,KACpB,wBAAwB;AAAA,EAC3B;AAAA,EACA,YAAY,SAAS,OAAO,OAAO,QAAQ,GAAG,GAAG;AAAA,IAC/C,iBAAiB,cAAwB,sBAAsB,SAAS,EAAE;AAAA,EAC5E;AAAA;;AC5BF;AAAA;AAAA,2BAEE;AAAA;AAAA;AAAA;AAcK,IAAM,oBAAoB,mBAAmB,wBAAwB;AAErE,IAAM,6BAA6B,uBAAsB,SAC9D,mBACA,MAAM;AAAA,EACJ,MAAM,eAAe,uBAAsB,IAAI,aAAa;AAAA,EAC5D,aAAa,iBAAiB,qBAAqB,YAAY;AAAA,EAC/D,aAAa,iBAAiB,qBAAqB,iBAAiB;AAAA,EACpE,aAAa,iBAAiB,sBAAsB,kBAAkB;AAAA,EACtE,aAAa,iBAAiB,uBAAuB,mBAAmB;AAAA,EACxE,aAAa,iBAAiB,oBAAoB,gBAAgB;AAAA,EAClE,aAAa,iBAAiB,mBAAmB,eAAe;AAAA,EAChE,aAAa,iBAAiB,0BAA0B,sBAAsB;AAAA,EAC9E,WAAW,YAAY,EAAE,MAAM,QAAQ,CAAC;AAAA,EACxC,QAAQ,IAAI,8BAA8B;AAAA,EAC1C,OAAO;AAAA,GAET,IACF;;AClCO,IAAM,uBAAuB;;ACApC;AASA,8BAAS;AAMF,IAAM,gBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAY,MAAM,gBAAgB,aACtC,gEACF;AAAA,EAGA,MAAM,WAAW,MAAM,aAAa,kBAAkB,WAAW;AAAA,IAC/D,aAAa;AAAA,MACX,gBAAgB,MAAO;AAAA,IACzB;AAAA,EACF,CAAC;AAAA,EAED,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;AAOK,IAAM,qBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAY,MAAM,gBAAgB,aACtC,gEACF;AAAA,EAEA,WAAW,KAAK,cAAc;AAAA,EAE9B,MAAM,WAAW,MAAM,aAAa,kBAAkB,WAAW;AAAA,IAC/D,aAAa;AAAA,MACX,gBAAgB,MAAO;AAAA,IACzB;AAAA,EACF,CAAC;AAAA,EAED,IAAI,OAAO,SAAS;AAAA,IAClB,MAAM,IAAI,mBAAkB,aAAa;AAAA,EAC3C;AAAA,EAEA,WAAW,KAAK,gBAAgB;AAAA,EAEhC,MAAM,SAAS,SAAS,MAAM,MAAM,IAAI;AAAA,EAExC,IAAI,CAAC,OAAO,aAAa,IAAI,gBAAgB;AAAA,IAC3C,MAAM,IAAI,mBAAkB,4CAA4C;AAAA,EAC1E;AAAA,EAEA,MAAM,YAAY,aAAa,KAAK,OAAO,WAAW,GAAG,cAAc;AAAA,EAEvE,OAAO;AAAA,IACL,QAAQ;AAAA,EACV;AAAA;;ACvEF,kCAAS;AACT,kCAAS,0CAAuB;AAGzB,IAAM,6BAA6B,CAAC,WAAmB;AAAA,EAC5D,MAAM,gBAAgB,uBAAsB,IAAI,eAAc;AAAA,EAC9D,cAAc,eAAe,sBAAsB,MAAM;AAAA,EAEzD,MAAM,qBAAqB,uBAAsB;AAAA,EACjD,MAAM,QAAQ,CAAC,qBAAqB,mBAAmB;AAAA,EACvD,WAAW,QAAQ,OAAO;AAAA,IACxB,mBAAmB,sBAAsB,sBAAsB,IAAI;AAAA,EACrE;AAAA;;ACZF,kCAAS;AAIF,IAAM,6BAA6B,MAAM;AAAA,EAC9C,MAAM,qBAAqB,uBAAsB;AAAA,EAEjD,mBAAmB,cAAc,sBAAsB,qBAAqB,aAAa;AAAA,EACzF,mBAAmB,cAAc,sBAAsB,qBAAqB,kBAAkB;AAAA;;ACRhG;AAAA,wBACE;AAAA,2BACA;AAAA,gBACA;AAAA,mBACA;AAAA;AAKK,IAAM,qBAAqB,oBAAmB,yBAAyB;AAEvE,IAAM,8BAA8B,uBAAsB,SAC/D,oBACA,MAAM;AAAA,EACJ,MAAM,eAAe,uBAAsB,IAAI,cAAa;AAAA,EAC5D,aAAa,iBAAiB,qBAAqB,aAAa;AAAA,EAChE,aAAa,iBAAiB,qBAAqB,kBAAkB;AAAA,EACrE,YAAW,YAAY,EAAE,MAAM,QAAQ,CAAC;AAAA,EACxC,QAAQ,IAAI,+BAA+B;AAAA,EAC3C,OAAO;AAAA,GAET,IACF;",
17
- "debugId": "8CC1F78E1614B52764756E2164756E21",
18
+ "mappings": ";AAMO,IAAM,uBAAuB;AAa7B,IAAM,uBAAuB;AAAA,EAClC,MAAM;AAAA,EACN,MAAM;AAAA,EACN,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,OAAO;AAAA,EACP,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,OAAO;AACT;AAcO,IAAM,sBAAsB;AAAA,EACjC,aAAa;AAAA,EACb,wBAAwB;AAAA,EACxB,mBAAmB;AAAA,EACnB,wBAAwB;AAAA,EACxB,uBAAuB;AAAA,EACvB,eAAe;AAAA,EACf,aAAa;AAAA,EACb,sBAAsB;AAAA,EACtB,4BAA4B;AAAA,EAC5B,sBAAsB;AACxB;AAWO,IAAM,wBAAwB;AAAA,EACnC,sBAAsB;AAAA,EACtB,sBAAsB;AAAA,EACtB,oBAAoB;AAAA,EACpB,wBAAwB;AAAA,EACxB,kBAAkB;AAAA,EAClB,oBAAoB;AAAA,EACpB,4BAA4B;AAC9B;AAOO,IAAM,uBAAuB;AAAA,EAClC,wBAAwB;AAAA,EACxB,gCAAgC;AAAA,EAChC,kBAAkB;AACpB;AASO,IAAM,4BAA4B;AAAA,EACvC,+BAA+B;AAAA,EAC/B,iBAAiB;AAAA,EACjB,kCAAkC;AAAA,EAClC,kCAAkC;AAAA,EAClC,8BAA8B;AAChC;AAQO,IAAM,kBAAkB;AAAA,KAC1B;AAAA,KACA;AAAA,KACA;AAAA,KACA;AACL;;AC1GA;AAAA;AAAA;AAAA;AAiCA;AAIA,IAAM,YAAY,IAAI;AAKtB,IAAM,cAAc,OAClB,OACA,YACA,UAAkC,CAAC,MAChC;AAAA,EACH,IAAI,UAAU,IAAI,MAAM,QAAQ,GAAG;AAAA,IACjC,OAAO,UAAU,IAAI,MAAM,QAAQ;AAAA,EACrC;AAAA,EAGA,MAAM,mBAAmB,CAAC,WAA2B;AAAA,IACnD,MAAM,WAAW,OAAO,WAAW,aAAa,KAAK,MAAM,OAAO,QAAQ,IAAI;AAAA,IAC9E,IAAI,OAAO,WAAW,YAAY;AAAA,MAChC,WAAW,UAAU,qBAAqB;AAAA,QACxC,MAAM,OAAO;AAAA,QACb,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAAA;AAAA,EAGF,MAAM,kBAA0C;AAAA,IAC9C,OAAO,MAAM,eAAe,SAAS;AAAA,OACjC,MAAM,eAAe,wBACrB,EAAE,0BAA0B,MAAM,eAAe,sBAAsB,IACvE,CAAC;AAAA,OACD,MAAM,eAAe,SAAS,EAAE,QAAQ,MAAM,eAAe,OAAc,IAAI,CAAC;AAAA,OACjF;AAAA,IACH,mBAAmB;AAAA,EACrB;AAAA,EAEA,MAAM,eAAe,MAAM,eAAe;AAAA,EAC1C,MAAM,SAAS,MAAM,SAAS,cAAc,MAAM,eAAe,WAAW,eAAe;AAAA,EAC3F,UAAU,IAAI,MAAM,UAAU,MAAM;AAAA,EACpC,OAAO;AAAA;AAOF,IAAM,eAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAE9C,MAAM,YAAY,OAAQ,YAAY,EAAE,cAAc,OAAO,CAAC;AAAA,EAE9D,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;AAQK,IAAM,oBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,oBAA+C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACzF,cAAc;AAAA,EAChB,CAAC;AAAA,EAGD,MAAM,WAAW,MAAM,kBAAkB,MAAM,MAAM;AAAA,IACnD,SAAS;AAAA,IACT,WAAW,OAAO,eAAe;AAAA,OAC7B,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAGD,IAAI,SAAS,SAAS,OAAO,eAAe,kBAAkB;AAAA,IAC5D,QAAQ,KACN,wEAAwE,SAAS,YAAY,OAAO,eAAe,oBACnH,OACA,QACF;AAAA,IACA,MAAM,IAAI,kBACR,wEAAwE,SAAS,YAAY,OAAO,eAAe,kBACrH;AAAA,EACF;AAAA,EAEA,OAAO,EAAE,QAAQ,SAAS,KAAmB;AAAA;AAOxC,IAAM,qBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACjF,cAAc;AAAA,EAChB,CAAC;AAAA,EAED,MAAM,WAAW,mBAAmB,aAAa,WAAW,YAAY,MAAM;AAAA,EAE9E,IAAI,UAAU,MAAM,aAAa,MAAM,QAAQ;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EACA,IAAI,OAAQ,QAAQ,IAA6B;AAAA,EAEjD,IAAI,MAAM,QAAQ,IAAI,GAAG;AAAA,IACvB,OAAO,KAAK,KAAK,SAAS,IAAI;AAAA,EAChC;AAAA,EACA,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAOK,IAAM,sBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAiC,MAAM,YAAY,OAAQ,YAAY;AAAA,IAC3E,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,UAAU,WAAW,UAAU;AAAA,EAEnE,MAAM,SAAS,MAAM,UAAU,MAAM,MAAM;AAAA,IACzC,UAAU,MAAM;AAAA,IAChB,UAAU,MAAM;AAAA,IAChB;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,iBAAoC;AAAA,EACxC,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,iBAAiB,OAAO,IAAI,CAAC,MAAO,GAAyB,oBAAoB,EAAE;AAAA,EACrF,EAAO;AAAA,IACL,iBAAkB,QAA8B,oBAAoB;AAAA;AAAA,EAGtE,OAAO;AAAA,IACL,MAAM;AAAA,IACN,aAAa,MAAM;AAAA,EACrB;AAAA;AAOK,IAAM,mBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACjF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,aAAa,WAAW,UAAU;AAAA,EAGtE,MAAM,gBAAgB,MAAM,SAAS,MAAM,SAAS;AAAA,IAAO,MAAM,MAAM;AAAA,EAEvE,IAAI,UAAU,MAAM,aAAa,cAAc;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EAEA,IAAI,OAAQ,QAAQ,IAA6B;AAAA,EACjD,IAAI,MAAM,QAAQ,IAAI,GAAG;AAAA,IACvB,OAAO,KAAK,KAAK,SAAS,IAAI;AAAA,EAChC;AAAA,EAEA,IAAI,SAAS,cAAc;AAAA,IACzB,MAAM,IAAI,kBAAkB,sCAAsC;AAAA,EACpE;AAAA,EAEA,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAOK,IAAM,kBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,kBAAyC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACnF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,gBAAgB,WAAW,UAAU;AAAA,EAEzE,IAAI,SAAS,MAAM,gBAAgB,MAAM,MAAM;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,cAAc;AAAA,EAClB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,cAAe,OAAO,IAA4B,gBAAgB;AAAA,EACpE,EAAO;AAAA,IACL,cAAe,QAAgC,gBAAgB;AAAA;AAAA,EAGjE,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAOK,IAAM,yBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAE9C,MAAM,iBAA4C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACtF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,eAAe,WAAW,UAAU;AAAA,EAExE,MAAM,SAAS,MAAM,eAAe,MAAM,UAAU,MAAM,SAAS;AAAA,IACjE;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,aAAa;AAAA,EACjB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,aAAc,OAAO,IAAwC,UAAU;AAAA,EACzE,EAAO;AAAA,IACL,aAAc,QAA4C,UAAU;AAAA;AAAA,EAGtE,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAUF,SAAS,kBAAkB,CACzB,WACA,gBACA,QACA;AAAA,EACA,IAAI,QAAQ;AAAA,EACZ,OAAO,IAAI,aAAa,WAAW;AAAA,IACjC,aAAa;AAAA,IACb,eAAe,EAAE,qBAAqB,KAAK;AAAA,IAC3C,mBAAmB,CAAC,SAAiB;AAAA,MACnC;AAAA,MACA,MAAM,SAAS,OAAO,IAAI,KAAK,IAAI,QAAQ,KAAK;AAAA,MAChD,MAAM,WAAW,KAAK,MAAM,KAAK,IAAI,QAAQ,GAAG,CAAC;AAAA,MACjD,eAAe,UAAU,cAAc,EAAE,MAAM,SAAS,CAAC;AAAA;AAAA,OAEvD,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA;;ACpUH;AAIO,IAAM,gCAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,YAAY;AAAA,IACV,UAAU;AAAA,MACR,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,gBAAgB;AAAA,MACd,MAAM;AAAA,MACN,aAAa;AAAA,MACb,YAAY;AAAA,QACV,UAAU;AAAA,UACR,MAAM;AAAA,UACN,MAAM,OAAO,OAAO,eAAe;AAAA,UACnC,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,WAAW;AAAA,UACT,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,OAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM,OAAO,OAAO,oBAAoB;AAAA,UACxC,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,QAAQ;AAAA,UACN,MAAM;AAAA,UACN,MAAM,CAAC,OAAO,OAAO,UAAU,QAAQ,OAAO;AAAA,UAC9C,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,oBAAoB;AAAA,UAClB,MAAM;AAAA,UACN,OAAO,EAAE,MAAM,SAAS;AAAA,UACxB,aAAa;AAAA,QACf;AAAA,QACA,mBAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,mBAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,uBAAuB;AAAA,UACrB,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,kBAAkB;AAAA,UAChB,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,WAAW;AAAA,UACT,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,eAAe;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,MACF;AAAA,MACA,UAAU,CAAC,aAAa,UAAU;AAAA,MAClC,sBAAsB;AAAA,IACxB;AAAA,EACF;AAAA,EACA,UAAU,CAAC,YAAY,gBAAgB;AAAA,EACvC,sBAAsB;AACxB;AAEA,IAAM,sBAAsB;AAAA,EAC1B,MAAM;AAAA,EACN,YAAY;AAAA,OACP,YAAY;AAAA,OACZ,8BAA8B;AAAA,EACnC;AAAA,EACA,UAAU,CAAC,GAAG,YAAY,UAAU,GAAG,8BAA8B,QAAQ;AAAA,EAC7E,sBAAsB;AACxB;;ACnFA;AACA;AACA;AACA;AACA;AAUA,eAAsB,yBAAyB,CAC7C,QACA,QACe;AAAA,EACf,MAAM,gBAAgB,sBAAsB,IAAI,cAAc;AAAA,EAE9D,cAAc,eAAe,sBAAsB,MAAM;AAAA,EAEzD,MAAM,mBAAmB,sBAAsB;AAAA,EAC/C,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA,WAAW,QAAQ,OAAO;AAAA,IACxB,iBAAiB,sBAAsB,sBAAsB,IAAI;AAAA,EACnE;AAAA,EAEA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,qBAClB,oBACF;AAAA,IAEA,MAAM,SAAS,IAAI,eAAkD,OAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,mBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,eAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,qBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,EAElE;AAAA;;ACxDF;AACA,kBAAS,iCAAoC;AAC7C,+BAAS,uCAAoB,mCAAgB;AAC7C,iCAAS;AACT,iCAAS;AAkBT,eAAsB,yBAAyB,CAC7C,QACe;AAAA,EAEf,IAAI,SAAS,KAAK,KAAK,QAAQ;AAAA,EAC/B,MAAM,mBAAmB,uBAAsB;AAAA,EAC/C,MAAM,MAAsD;AAAA,KACzD,sBAAsB;AAAA,KACtB,sBAAsB;AAAA,KACtB,uBAAuB;AAAA,KACvB,2BAA2B;AAAA,KAC3B,qBAAqB;AAAA,KACrB,oBAAoB;AAAA,KACpB,wBAAwB;AAAA,EAC3B;AAAA,EACA,YAAY,SAAS,OAAO,OAAO,QAAQ,GAAG,GAAG;AAAA,IAC/C,iBAAiB,cAAwB,sBAAsB,SAAS,EAAE;AAAA,EAC5E;AAAA,EAGA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,sBAClB,oBACF;AAAA,IACA,MAAM,QAAQ,cAAc;AAAA,IAE5B,MAAM,SAAS,IAAI,gBAAkD,QAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,oBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,gBAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,sBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,IAChE,MAAM,OAAO,MAAM;AAAA,EACrB;AAAA;;AC/DF;AAAA;AAAA,2BAEE;AAAA;AAAA;AAAA;AAcK,IAAM,oBAAoB,mBAAmB,wBAAwB;AAErE,IAAM,6BAA6B,uBAAsB,SAC9D,mBACA,MAAM;AAAA,EACJ,MAAM,eAAe,uBAAsB,IAAI,aAAa;AAAA,EAC5D,aAAa,iBAAiB,qBAAqB,YAAY;AAAA,EAC/D,aAAa,iBAAiB,qBAAqB,iBAAiB;AAAA,EACpE,aAAa,iBAAiB,sBAAsB,kBAAkB;AAAA,EACtE,aAAa,iBAAiB,uBAAuB,mBAAmB;AAAA,EACxE,aAAa,iBAAiB,oBAAoB,gBAAgB;AAAA,EAClE,aAAa,iBAAiB,mBAAmB,eAAe;AAAA,EAChE,aAAa,iBAAiB,0BAA0B,sBAAsB;AAAA,EAC9E,WAAW,YAAY,EAAE,MAAM,QAAQ,CAAC;AAAA,EACxC,QAAQ,IAAI,8BAA8B;AAAA,EAC1C,OAAO;AAAA,GAET,IACF;;AClCO,IAAM,uBAAuB;;ACApC;AASA,8BAAS;AAOF,IAAM,gBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAY,MAAM,gBAAgB,aACtC,gEACF;AAAA,EAGA,MAAM,WAAW,MAAM,aAAa,kBAAkB,WAAW;AAAA,IAC/D,aAAa;AAAA,MACX,gBAAgB,MAAO,eAAe;AAAA,IACxC;AAAA,EACF,CAAC;AAAA,EAED,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;AAOK,IAAM,qBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAY,MAAM,gBAAgB,aACtC,gEACF;AAAA,EAEA,WAAW,KAAK,cAAc;AAAA,EAE9B,MAAM,WAAW,MAAM,aAAa,kBAAkB,WAAW;AAAA,IAC/D,aAAa;AAAA,MACX,gBAAgB,MAAO,eAAe;AAAA,IACxC;AAAA,EACF,CAAC;AAAA,EAED,IAAI,OAAO,SAAS;AAAA,IAClB,MAAM,IAAI,mBAAkB,aAAa;AAAA,EAC3C;AAAA,EAEA,WAAW,KAAK,gBAAgB;AAAA,EAEhC,MAAM,SAAS,SAAS,MAAM,MAAM,IAAI;AAAA,EAExC,IAAI,CAAC,OAAO,aAAa,IAAI,gBAAgB;AAAA,IAC3C,MAAM,IAAI,mBAAkB,4CAA4C;AAAA,EAC1E;AAAA,EAEA,MAAM,YAAY,aAAa,KAAK,OAAO,WAAW,GAAG,cAAc;AAAA,EAEvE,OAAO;AAAA,IACL,QAAQ;AAAA,EACV;AAAA;;AC1EF,wBAAS;AAIF,IAAM,kBAAkB;AAAA,EAC7B,MAAM;AAAA,EACN,YAAY;AAAA,IACV,UAAU;AAAA,MACR,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,gBAAgB;AAAA,MACd,MAAM;AAAA,MACN,aAAa;AAAA,MACb,YAAY;AAAA,QACV,WAAW;AAAA,UACT,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,MACF;AAAA,MACA,UAAU,CAAC,WAAW;AAAA,MACtB,sBAAsB;AAAA,IACxB;AAAA,EACF;AAAA,EACA,UAAU,CAAC,YAAY,gBAAgB;AAAA,EACvC,sBAAsB;AACxB;AAEA,IAAM,uBAAsB;AAAA,EAC1B,MAAM;AAAA,EACN,YAAY;AAAA,OACP,aAAY;AAAA,OACZ,gBAAgB;AAAA,EACrB;AAAA,EACA,UAAU,CAAC,GAAG,aAAY,UAAU,GAAG,gBAAgB,QAAQ;AAAA,EAC/D,sBAAsB;AACxB;;ACpCA,kBAAS,iCAAmB;AAC5B,+BAAS,uCAAoB,mCAAgB;AAC7C,iCAAS;AACT,iCAAS;AACT,kCAAS,0CAAuB;AAUhC,eAAsB,0BAA0B,CAC9C,QACA,QACe;AAAA,EACf,MAAM,gBAAgB,uBAAsB,IAAI,eAAc;AAAA,EAC9D,cAAc,eAAe,sBAAsB,MAAM;AAAA,EAEzD,MAAM,qBAAqB,uBAAsB;AAAA,EACjD,MAAM,QAAQ,CAAC,qBAAqB,mBAAmB;AAAA,EACvD,WAAW,QAAQ,OAAO;AAAA,IACxB,mBAAmB,sBAAsB,sBAAsB,IAAI;AAAA,EACrE;AAAA,EAGA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,sBAClB,oBACF;AAAA,IACA,MAAM,QAAQ,cAAc;AAAA,IAE5B,MAAM,SAAS,IAAI,gBAAkD,QAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,oBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,gBAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,sBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,IAChE,MAAM,OAAO,MAAM;AAAA,EACrB;AAAA;;ACjDF,kBAAS,iCAAmB;AAC5B,+BAAS,uCAAoB,mCAAgB;AAC7C,iCAAS;AACT,iCAAS;AAUT,eAAsB,0BAA0B,CAC9C,QACe;AAAA,EACf,MAAM,qBAAqB,uBAAsB;AAAA,EAEjD,mBAAmB,cACjB,sBACA,qBACA,aACF;AAAA,EACA,mBAAmB,cACjB,sBACA,qBACA,kBACF;AAAA,EAGA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,sBAClB,oBACF;AAAA,IACA,MAAM,QAAQ,cAAc;AAAA,IAE5B,MAAM,SAAS,IAAI,gBAAkD,QAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,oBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,gBAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,sBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,IAChE,MAAM,OAAO,MAAM;AAAA,EACrB;AAAA;;ACnDF;AAAA,wBACE;AAAA,2BACA;AAAA,gBACA;AAAA,mBACA;AAAA;AAKK,IAAM,qBAAqB,oBAAmB,yBAAyB;AAEvE,IAAM,8BAA8B,uBAAsB,SAC/D,oBACA,MAAM;AAAA,EACJ,MAAM,eAAe,uBAAsB,IAAI,cAAa;AAAA,EAC5D,aAAa,iBAAiB,qBAAqB,aAAa;AAAA,EAChE,aAAa,iBAAiB,qBAAqB,kBAAkB;AAAA,EACrE,YAAW,YAAY,EAAE,MAAM,QAAQ,CAAC;AAAA,EACxC,QAAQ,IAAI,+BAA+B;AAAA,EAC3C,OAAO;AAAA,GAET,IACF;",
19
+ "debugId": "DB92C181C46D973A64756E2164756E21",
18
20
  "names": []
19
21
  }