@workglow/test 0.0.57 → 0.0.59

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/dist/binding/IndexedDbModelRepository.d.ts +17 -0
  2. package/dist/binding/IndexedDbModelRepository.d.ts.map +1 -0
  3. package/dist/binding/PostgresModelRepository.d.ts +18 -0
  4. package/dist/binding/PostgresModelRepository.d.ts.map +1 -0
  5. package/dist/binding/SqliteModelRepository.d.ts +17 -0
  6. package/dist/binding/SqliteModelRepository.d.ts.map +1 -0
  7. package/dist/browser.d.ts +0 -2
  8. package/dist/browser.d.ts.map +1 -1
  9. package/dist/browser.js +207 -189
  10. package/dist/browser.js.map +7 -9
  11. package/dist/bun.js +203 -213
  12. package/dist/bun.js.map +7 -11
  13. package/dist/common-server.d.ts +0 -4
  14. package/dist/common-server.d.ts.map +1 -1
  15. package/dist/node.js +203 -213
  16. package/dist/node.js.map +7 -11
  17. package/dist/samples/MediaPipeModelSamples.d.ts.map +1 -1
  18. package/dist/samples/ONNXModelSamples.d.ts +0 -2
  19. package/dist/samples/ONNXModelSamples.d.ts.map +1 -1
  20. package/dist/samples/index.d.ts.map +1 -1
  21. package/dist/test/ai-model/IndexedDbModelRepository.test.d.ts.map +1 -1
  22. package/dist/test/helpers/SupabaseMockClient.d.ts.map +1 -1
  23. package/dist/test/job-queue/InMemoryPrefixedQueueStorage.test.d.ts +7 -0
  24. package/dist/test/job-queue/InMemoryPrefixedQueueStorage.test.d.ts.map +1 -0
  25. package/dist/test/job-queue/IndexedDbPrefixedQueueStorage.test.d.ts +7 -0
  26. package/dist/test/job-queue/IndexedDbPrefixedQueueStorage.test.d.ts.map +1 -0
  27. package/dist/test/job-queue/PostgresPrefixedQueueStorage.test.d.ts +7 -0
  28. package/dist/test/job-queue/PostgresPrefixedQueueStorage.test.d.ts.map +1 -0
  29. package/dist/test/job-queue/SqlitePrefixedQueueStorage.test.d.ts +7 -0
  30. package/dist/test/job-queue/SqlitePrefixedQueueStorage.test.d.ts.map +1 -0
  31. package/dist/test/job-queue/SupabasePrefixedQueueStorage.test.d.ts +7 -0
  32. package/dist/test/job-queue/SupabasePrefixedQueueStorage.test.d.ts.map +1 -0
  33. package/dist/test/job-queue/genericJobQueueTests.d.ts +8 -4
  34. package/dist/test/job-queue/genericJobQueueTests.d.ts.map +1 -1
  35. package/dist/test/job-queue/genericPrefixedQueueStorageTests.d.ts +18 -0
  36. package/dist/test/job-queue/genericPrefixedQueueStorageTests.d.ts.map +1 -0
  37. package/dist/test/job-queue/genericQueueStorageSubscriptionTests.d.ts +29 -0
  38. package/dist/test/job-queue/genericQueueStorageSubscriptionTests.d.ts.map +1 -0
  39. package/dist/test/storage-tabular/genericTabularRepositorySubscriptionTests.d.ts +17 -0
  40. package/dist/test/storage-tabular/genericTabularRepositorySubscriptionTests.d.ts.map +1 -0
  41. package/dist/test/storage-tabular/genericTabularRepositoryTests.d.ts +38 -1
  42. package/dist/test/storage-tabular/genericTabularRepositoryTests.d.ts.map +1 -1
  43. package/dist/test/task-graph-job-queue/IndexedDbTaskGraphJobQueue.test.d.ts.map +1 -1
  44. package/dist/test/task-graph-job-queue/genericTaskGraphJobQueueTests.d.ts +3 -3
  45. package/dist/test/task-graph-job-queue/genericTaskGraphJobQueueTests.d.ts.map +1 -1
  46. package/package.json +17 -17
  47. package/dist/binding/InMemoryJobQueue.d.ts +0 -11
  48. package/dist/binding/InMemoryJobQueue.d.ts.map +0 -1
  49. package/dist/binding/IndexedDbJobQueue.d.ts +0 -11
  50. package/dist/binding/IndexedDbJobQueue.d.ts.map +0 -1
  51. package/dist/binding/PostgresJobQueue.d.ts +0 -11
  52. package/dist/binding/PostgresJobQueue.d.ts.map +0 -1
  53. package/dist/binding/SqliteJobQueue.d.ts +0 -11
  54. package/dist/binding/SqliteJobQueue.d.ts.map +0 -1
@@ -0,0 +1,17 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 Steven Roussey <sroussey@gmail.com>
4
+ * SPDX-License-Identifier: Apache-2.0
5
+ */
6
+ import { ModelPrimaryKeyNames, ModelRepository, ModelSchema } from "@workglow/ai";
7
+ import { IndexedDbTabularRepository } from "@workglow/storage";
8
+ /**
9
+ * IndexedDB implementation of a model repository.
10
+ * Provides storage and retrieval for models and task-to-model mappings.
11
+ */
12
+ export declare class IndexedDbModelRepository extends ModelRepository {
13
+ modelTabularRepository: IndexedDbTabularRepository<typeof ModelSchema, typeof ModelPrimaryKeyNames>;
14
+ type: "IndexedDbModelRepository";
15
+ constructor(tableModels?: string, tableTask2Models?: string);
16
+ }
17
+ //# sourceMappingURL=IndexedDbModelRepository.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"IndexedDbModelRepository.d.ts","sourceRoot":"","sources":["../../src/binding/IndexedDbModelRepository.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,oBAAoB,EAAE,eAAe,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAClF,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAE/D;;;GAGG;AACH,qBAAa,wBAAyB,SAAQ,eAAe;IAC3D,sBAAsB,EAAE,0BAA0B,CAChD,OAAO,WAAW,EAClB,OAAO,oBAAoB,CAC5B,CAAC;IAEK,IAAI,EAAG,0BAA0B,CAAU;gBAEtC,WAAW,GAAE,MAAiB,EAAE,gBAAgB,GAAE,MAAsB;CAQrF"}
@@ -0,0 +1,18 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 Steven Roussey <sroussey@gmail.com>
4
+ * SPDX-License-Identifier: Apache-2.0
5
+ */
6
+ import { ModelPrimaryKeyNames, ModelRepository, ModelSchema } from "@workglow/ai";
7
+ import { PostgresTabularRepository } from "@workglow/storage";
8
+ import { Pool } from "pg";
9
+ /**
10
+ * PostgreSQL implementation of a model repository.
11
+ * Provides storage and retrieval for models and task-to-model mappings using PostgreSQL.
12
+ */
13
+ export declare class PostgresModelRepository extends ModelRepository {
14
+ type: "PostgresModelRepository";
15
+ modelTabularRepository: PostgresTabularRepository<typeof ModelSchema, typeof ModelPrimaryKeyNames>;
16
+ constructor(db: Pool, tableModels?: string, tableTask2Models?: string);
17
+ }
18
+ //# sourceMappingURL=PostgresModelRepository.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"PostgresModelRepository.d.ts","sourceRoot":"","sources":["../../src/binding/PostgresModelRepository.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,oBAAoB,EAAE,eAAe,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAClF,OAAO,EAAE,yBAAyB,EAAE,MAAM,mBAAmB,CAAC;AAC9D,OAAO,EAAE,IAAI,EAAE,MAAM,IAAI,CAAC;AAE1B;;;GAGG;AACH,qBAAa,uBAAwB,SAAQ,eAAe;IACnD,IAAI,EAAG,yBAAyB,CAAU;IACjD,sBAAsB,EAAE,yBAAyB,CAC/C,OAAO,WAAW,EAClB,OAAO,oBAAoB,CAC5B,CAAC;gBAGA,EAAE,EAAE,IAAI,EACR,WAAW,GAAE,MAAkB,EAC/B,gBAAgB,GAAE,MAAyB;CAU9C"}
@@ -0,0 +1,17 @@
1
+ /**
2
+ * @license
3
+ * Copyright 2025 Steven Roussey <sroussey@gmail.com>
4
+ * SPDX-License-Identifier: Apache-2.0
5
+ */
6
+ import { ModelPrimaryKeyNames, ModelRepository, ModelSchema } from "@workglow/ai";
7
+ import { SqliteTabularRepository } from "@workglow/storage";
8
+ /**
9
+ * SQLite implementation of a model repository.
10
+ * Provides storage and retrieval for models and task-to-model mappings using SQLite.
11
+ */
12
+ export declare class SqliteModelRepository extends ModelRepository {
13
+ type: "SqliteModelRepository";
14
+ modelTabularRepository: SqliteTabularRepository<typeof ModelSchema, typeof ModelPrimaryKeyNames>;
15
+ constructor(dbOrPath: string, tableModels?: string, tableTask2Models?: string);
16
+ }
17
+ //# sourceMappingURL=SqliteModelRepository.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"SqliteModelRepository.d.ts","sourceRoot":"","sources":["../../src/binding/SqliteModelRepository.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAE,oBAAoB,EAAE,eAAe,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAClF,OAAO,EAAE,uBAAuB,EAAE,MAAM,mBAAmB,CAAC;AAE5D;;;GAGG;AACH,qBAAa,qBAAsB,SAAQ,eAAe;IACjD,IAAI,EAAG,uBAAuB,CAAU;IAC/C,sBAAsB,EAAE,uBAAuB,CAAC,OAAO,WAAW,EAAE,OAAO,oBAAoB,CAAC,CAAC;gBAE/F,QAAQ,EAAE,MAAM,EAChB,WAAW,GAAE,MAAkB,EAC/B,gBAAgB,GAAE,MAAyB;CAU9C"}
package/dist/browser.d.ts CHANGED
@@ -4,8 +4,6 @@
4
4
  * SPDX-License-Identifier: Apache-2.0
5
5
  */
6
6
  export * from "./common";
7
- export * from "./binding/IndexedDbJobQueue";
8
7
  export * from "./binding/IndexedDbTaskGraphRepository";
9
8
  export * from "./binding/IndexedDbTaskOutputRepository";
10
- export * from "./binding/InMemoryJobQueue";
11
9
  //# sourceMappingURL=browser.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"browser.d.ts","sourceRoot":"","sources":["../src/browser.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,cAAc,UAAU,CAAC;AAEzB,cAAc,6BAA6B,CAAC;AAC5C,cAAc,wCAAwC,CAAC;AACvD,cAAc,yCAAyC,CAAC;AACxD,cAAc,4BAA4B,CAAC"}
1
+ {"version":3,"file":"browser.d.ts","sourceRoot":"","sources":["../src/browser.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,cAAc,UAAU,CAAC;AAEzB,cAAc,wCAAwC,CAAC;AACvD,cAAc,yCAAyC,CAAC"}
package/dist/browser.js CHANGED
@@ -35,194 +35,229 @@ class InMemoryTaskOutputRepository extends TaskOutputTabularRepository {
35
35
  // src/samples/index.ts
36
36
  import { AiJob } from "@workglow/ai";
37
37
  import { TENSORFLOW_MEDIAPIPE as TENSORFLOW_MEDIAPIPE2 } from "@workglow/ai-provider";
38
- import { ConcurrencyLimiter, JobQueue } from "@workglow/job-queue";
38
+ import { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from "@workglow/job-queue";
39
39
  import { InMemoryQueueStorage } from "@workglow/storage";
40
40
  import { getTaskQueueRegistry } from "@workglow/task-graph";
41
41
 
42
42
  // src/samples/MediaPipeModelSamples.ts
43
43
  import { getGlobalModelRepository } from "@workglow/ai";
44
44
  import { TENSORFLOW_MEDIAPIPE } from "@workglow/ai-provider";
45
- async function addMediaPipeModel(info, tasks) {
46
- const name = "mediapipe:" + info.name;
47
- const model = Object.assign({
48
- provider: TENSORFLOW_MEDIAPIPE,
49
- quantization: null,
50
- normalize: true,
51
- contextWindow: 4096,
52
- availableOnBrowser: true,
53
- availableOnServer: false,
54
- parameters: null,
55
- languageStyle: null,
56
- usingDimensions: info.nativeDimensions ?? null
57
- }, info, { name });
58
- await getGlobalModelRepository().addModel(model);
59
- await Promise.allSettled(tasks.map((task) => getGlobalModelRepository().connectTaskToModel(task, name)));
60
- }
61
45
  async function registerMediaPipeTfJsLocalModels() {
62
- await addMediaPipeModel({
63
- name: "Universal Sentence Encoder",
64
- pipeline: "text_embedder",
65
- nativeDimensions: 100,
66
- url: "https://storage.googleapis.com/mediapipe-tasks/text_embedder/universal_sentence_encoder.tflite"
67
- }, ["TextEmbeddingTask"]);
68
- await addMediaPipeModel({
69
- name: "Text Encoder",
70
- pipeline: "text_embedder",
71
- nativeDimensions: 100,
72
- url: "https://huggingface.co/keras-sd/text-encoder-tflite/resolve/main/text_encoder.tflite?download=true"
73
- }, ["TextEmbeddingTask"]);
46
+ await getGlobalModelRepository().addModel({
47
+ model_id: "media-pipe:Universal Sentence Encoder",
48
+ title: "Universal Sentence Encoder",
49
+ description: "Universal Sentence Encoder",
50
+ tasks: ["TextEmbeddingTask"],
51
+ provider: TENSORFLOW_MEDIAPIPE,
52
+ providerConfig: {
53
+ modelPath: "https://storage.googleapis.com/mediapipe-tasks/text_embedder/universal_sentence_encoder.tflite"
54
+ },
55
+ metadata: {}
56
+ });
57
+ await getGlobalModelRepository().addModel({
58
+ model_id: "media-pipe:Text Encoder",
59
+ title: "Text Encoder",
60
+ description: "Text Encoder",
61
+ tasks: ["TextEmbeddingTask"],
62
+ provider: TENSORFLOW_MEDIAPIPE,
63
+ providerConfig: {
64
+ modelPath: "https://huggingface.co/keras-sd/text-encoder-tflite/resolve/main/text_encoder.tflite?download=true"
65
+ },
66
+ metadata: {}
67
+ });
74
68
  }
75
69
  // src/samples/ONNXModelSamples.ts
76
70
  import { getGlobalModelRepository as getGlobalModelRepository2 } from "@workglow/ai";
77
- import { HF_TRANSFORMERS_ONNX, QUANTIZATION_DATA_TYPES } from "@workglow/ai-provider";
78
- async function addONNXModel(info, tasks) {
79
- const model = Object.assign({
80
- name: "onnx:" + info.url + ":" + (info.quantization ?? QUANTIZATION_DATA_TYPES.q8),
81
- provider: HF_TRANSFORMERS_ONNX,
82
- quantization: QUANTIZATION_DATA_TYPES.q8,
83
- normalize: true,
84
- contextWindow: 4096,
85
- availableOnBrowser: true,
86
- availableOnServer: true,
87
- parameters: null,
88
- languageStyle: null,
89
- usingDimensions: info.nativeDimensions ?? null
90
- }, info);
91
- await getGlobalModelRepository2().addModel(model);
92
- await Promise.allSettled(tasks.map((task) => getGlobalModelRepository2().connectTaskToModel(task, model.name)));
93
- }
71
+ import { HF_TRANSFORMERS_ONNX } from "@workglow/ai-provider";
94
72
  async function registerHuggingfaceLocalModels() {
95
- await addONNXModel({
96
- pipeline: "feature-extraction",
97
- nativeDimensions: 384,
98
- url: "Supabase/gte-small"
99
- }, ["TextEmbeddingTask"]);
100
- await addONNXModel({
101
- pipeline: "feature-extraction",
102
- nativeDimensions: 768,
103
- url: "Xenova/bge-base-en-v1.5"
104
- }, ["TextEmbeddingTask"]);
105
- await addONNXModel({
106
- pipeline: "feature-extraction",
107
- nativeDimensions: 384,
108
- url: "Xenova/all-MiniLM-L6-v2"
109
- }, ["TextEmbeddingTask"]);
110
- await addONNXModel({
111
- pipeline: "feature-extraction",
112
- nativeDimensions: 1024,
113
- url: "WhereIsAI/UAE-Large-V1"
114
- }, ["TextEmbeddingTask"]);
115
- await addONNXModel({
116
- pipeline: "feature-extraction",
117
- nativeDimensions: 384,
118
- url: "Xenova/bge-small-en-v1.5"
119
- }, ["TextEmbeddingTask"]);
120
- await addONNXModel({
121
- pipeline: "question-answering",
122
- url: "Xenova/distilbert-base-uncased-distilled-squad"
123
- }, ["TextQuestionAnsweringTask"]);
124
- await addONNXModel({
125
- pipeline: "zero-shot-classification",
126
- url: "Xenova/distilbert-base-uncased-mnli"
127
- }, ["TextClassificationTask"]);
128
- await addONNXModel({
129
- pipeline: "fill-mask",
130
- url: "answerdotai/ModernBERT-base"
131
- }, ["TextClassificationTask"]);
132
- await addONNXModel({
133
- pipeline: "feature-extraction",
134
- nativeDimensions: 768,
135
- url: "Xenova/multi-qa-mpnet-base-dot-v1"
136
- }, ["TextEmbeddingTask"]);
137
- await addONNXModel({
138
- pipeline: "text-generation",
139
- url: "Xenova/gpt2"
140
- }, ["TextGenerationTask"]);
141
- await addONNXModel({
142
- pipeline: "text-generation",
143
- url: "Xenova/Phi-3-mini-4k-instruct",
144
- device: "webgpu",
145
- quantization: QUANTIZATION_DATA_TYPES.q4,
146
- use_external_data_format: true
147
- }, ["TextGenerationTask"]);
148
- await addONNXModel({
149
- name: "onnx:Xenova/Phi-3-mini-4k-instruct:q4f16",
150
- pipeline: "text-generation",
151
- url: "Xenova/Phi-3-mini-4k-instruct_fp16",
152
- device: "webgpu",
153
- quantization: QUANTIZATION_DATA_TYPES.q4,
154
- use_external_data_format: true
155
- }, ["TextGenerationTask"]);
156
- await addONNXModel({
157
- name: "onnx:onnx-community/DeepSeek-R1-Distill-Qwen-1.5B:fp16",
158
- pipeline: "text-generation",
159
- url: "onnx-community/DeepSeek-R1-Distill-Qwen-1.5B-ONNX",
160
- device: "webgpu",
161
- quantization: QUANTIZATION_DATA_TYPES.fp16,
162
- use_external_data_format: true
163
- }, ["TextGenerationTask"]);
164
- await addONNXModel({
165
- pipeline: "text-generation",
166
- url: "Xenova/distilgpt2"
167
- }, ["TextGenerationTask"]);
168
- await addONNXModel({
169
- pipeline: "text2text-generation",
170
- url: "Xenova/flan-t5-small"
171
- }, ["TextGenerationTask"]);
172
- await addONNXModel({
173
- pipeline: "text2text-generation",
174
- url: "Xenova/LaMini-Flan-T5-783M"
175
- }, ["TextGenerationTask", "TextRewriterTask"]);
176
- await addONNXModel({
177
- pipeline: "summarization",
178
- url: "Falconsai/text_summarization",
179
- quantization: QUANTIZATION_DATA_TYPES.fp32
180
- }, ["TextSummaryTask"]);
181
- await addONNXModel({
182
- pipeline: "translation",
183
- url: "Xenova/nllb-200-distilled-600M",
184
- languageStyle: "FLORES-200"
185
- }, ["TextTranslationTask"]);
186
- await addONNXModel({
187
- pipeline: "translation",
188
- url: "Xenova/m2m100_418M",
189
- languageStyle: "ISO-639"
190
- }, ["TextTranslationTask"]);
191
- await addONNXModel({
192
- pipeline: "translation",
193
- url: "Xenova/mbart-large-50-many-to-many-mmt",
194
- languageStyle: "ISO-639_ISO-3166-1-alpha-2"
195
- }, ["TextTranslationTask"]);
73
+ const onnxModels = [
74
+ {
75
+ model_id: "onnx:Supabase/gte-small",
76
+ title: "gte-small",
77
+ description: "Supabase/gte-small",
78
+ tasks: ["TextEmbeddingTask"],
79
+ provider: HF_TRANSFORMERS_ONNX,
80
+ providerConfig: {
81
+ pipeline: "feature-extraction",
82
+ modelPath: "Supabase/gte-small"
83
+ },
84
+ metadata: {}
85
+ },
86
+ {
87
+ model_id: "onnx:Xenova/bge-base-en-v1.5",
88
+ title: "bge-base-en-v1.5",
89
+ description: "Xenova/bge-base-en-v1.5",
90
+ tasks: ["TextEmbeddingTask"],
91
+ provider: HF_TRANSFORMERS_ONNX,
92
+ providerConfig: {
93
+ pipeline: "feature-extraction",
94
+ modelPath: "Xenova/bge-base-en-v1.5"
95
+ },
96
+ metadata: {}
97
+ },
98
+ {
99
+ model_id: "onnx:Xenova/distilbert-base-uncased-distilled-squad",
100
+ title: "distilbert-base-uncased-distilled-squad",
101
+ description: "Xenova/distilbert-base-uncased-distilled-squad",
102
+ tasks: ["TextQuestionAnsweringTask"],
103
+ provider: HF_TRANSFORMERS_ONNX,
104
+ providerConfig: {
105
+ pipeline: "question-answering",
106
+ modelPath: "Xenova/distilbert-base-uncased-distilled-squad"
107
+ },
108
+ metadata: {}
109
+ },
110
+ {
111
+ model_id: "onnx:answerdotai/ModernBERT-base",
112
+ title: "ModernBERT-base",
113
+ description: "answerdotai/ModernBERT-base",
114
+ tasks: ["TextClassificationTask"],
115
+ provider: HF_TRANSFORMERS_ONNX,
116
+ providerConfig: {
117
+ pipeline: "feature-extraction",
118
+ modelPath: "Xenova/multi-qa-mpnet-base-dot-v1"
119
+ },
120
+ metadata: {}
121
+ },
122
+ {
123
+ model_id: "onnx:Xenova/gpt2",
124
+ title: "gpt2",
125
+ description: "Xenova/gpt2",
126
+ tasks: ["TextGenerationTask"],
127
+ provider: HF_TRANSFORMERS_ONNX,
128
+ providerConfig: {
129
+ pipeline: "text-generation",
130
+ modelPath: "Xenova/gpt2"
131
+ },
132
+ metadata: {}
133
+ },
134
+ {
135
+ model_id: "onnx:Xenova/Phi-3-mini-4k-instruct:q4f16",
136
+ title: "Phi-3-mini-4k-instruct:q4f16",
137
+ description: "Xenova/Phi-3-mini-4k-instruct:q4f16",
138
+ tasks: ["TextGenerationTask"],
139
+ provider: HF_TRANSFORMERS_ONNX,
140
+ providerConfig: {
141
+ pipeline: "text-generation",
142
+ modelPath: "onnx-community/DeepSeek-R1-Distill-Qwen-1.5B-ONNX"
143
+ },
144
+ metadata: {}
145
+ },
146
+ {
147
+ model_id: "onnx:Xenova/distilgpt2",
148
+ title: "distilgpt2",
149
+ description: "Xenova/distilgpt2",
150
+ tasks: ["TextGenerationTask"],
151
+ provider: HF_TRANSFORMERS_ONNX,
152
+ providerConfig: {
153
+ pipeline: "text-generation",
154
+ modelPath: "Xenova/distilgpt2"
155
+ },
156
+ metadata: {}
157
+ },
158
+ {
159
+ model_id: "onnx:Xenova/LaMini-Flan-T5-783M",
160
+ title: "LaMini-Flan-T5-783M",
161
+ description: "Xenova/LaMini-Flan-T5-783M",
162
+ tasks: ["TextGenerationTask", "TextRewriterTask"],
163
+ provider: HF_TRANSFORMERS_ONNX,
164
+ providerConfig: {
165
+ pipeline: "text2text-generation",
166
+ modelPath: "Xenova/LaMini-Flan-T5-783M"
167
+ },
168
+ metadata: {}
169
+ },
170
+ {
171
+ model_id: "onnx:Falconsai/text_summarization",
172
+ title: "text_summarization",
173
+ description: "Falconsai/text_summarization",
174
+ tasks: ["TextSummaryTask"],
175
+ provider: HF_TRANSFORMERS_ONNX,
176
+ providerConfig: {
177
+ pipeline: "summarization",
178
+ modelPath: "Falconsai/text_summarization"
179
+ },
180
+ metadata: {}
181
+ },
182
+ {
183
+ model_id: "onnx:Xenova/nllb-200-distilled-600M",
184
+ title: "nllb-200-distilled-600M",
185
+ description: "Xenova/nllb-200-distilled-600M",
186
+ tasks: ["TextTranslationTask"],
187
+ provider: HF_TRANSFORMERS_ONNX,
188
+ providerConfig: {
189
+ pipeline: "translation",
190
+ modelPath: "Xenova/nllb-200-distilled-600M",
191
+ languageStyle: "FLORES-200"
192
+ },
193
+ metadata: {}
194
+ },
195
+ {
196
+ model_id: "onnx:Xenova/m2m100_418M",
197
+ title: "m2m100_418M",
198
+ description: "Xenova/m2m100_418M",
199
+ tasks: ["TextTranslationTask"],
200
+ provider: HF_TRANSFORMERS_ONNX,
201
+ providerConfig: {
202
+ pipeline: "translation",
203
+ modelPath: "Xenova/m2m100_418M",
204
+ languageStyle: "ISO-639"
205
+ },
206
+ metadata: {}
207
+ },
208
+ {
209
+ model_id: "onnx:Xenova/mbart-large-50-many-to-many-mmt",
210
+ title: "mbart-large-50-many-to-many-mmt",
211
+ description: "Xenova/mbart-large-50-many-to-many-mmt",
212
+ tasks: ["TextTranslationTask"],
213
+ provider: HF_TRANSFORMERS_ONNX,
214
+ providerConfig: {
215
+ pipeline: "translation",
216
+ modelPath: "Xenova/mbart-large-50-many-to-many-mmt",
217
+ languageStyle: "ISO-639_ISO-3166-1-alpha-2"
218
+ },
219
+ metadata: {}
220
+ }
221
+ ];
222
+ for (const model of onnxModels) {
223
+ await getGlobalModelRepository2().addModel(model);
224
+ }
196
225
  }
197
226
 
198
227
  // src/samples/index.ts
199
228
  async function register_HFT_InMemoryQueue() {
200
- const jobQueue = new JobQueue("HF_TRANSFORMERS_ONNX", AiJob, {
201
- storage: new InMemoryQueueStorage("HF_TRANSFORMERS_ONNX"),
229
+ const queueName = "HF_TRANSFORMERS_ONNX";
230
+ const storage = new InMemoryQueueStorage(queueName);
231
+ await storage.setupDatabase();
232
+ const server = new JobQueueServer(AiJob, {
233
+ storage,
234
+ queueName,
202
235
  limiter: new ConcurrencyLimiter(1, 10)
203
236
  });
204
- getTaskQueueRegistry().registerQueue(jobQueue);
205
- jobQueue.start();
237
+ const client = new JobQueueClient({
238
+ storage,
239
+ queueName
240
+ });
241
+ client.attach(server);
242
+ getTaskQueueRegistry().registerQueue({ server, client, storage });
243
+ await server.start();
206
244
  }
207
245
  async function register_TFMP_InMemoryQueue() {
208
- const jobQueue = new JobQueue(TENSORFLOW_MEDIAPIPE2, AiJob, {
209
- storage: new InMemoryQueueStorage(TENSORFLOW_MEDIAPIPE2),
246
+ const queueName = TENSORFLOW_MEDIAPIPE2;
247
+ const storage = new InMemoryQueueStorage(queueName);
248
+ await storage.setupDatabase();
249
+ const server = new JobQueueServer(AiJob, {
250
+ storage,
251
+ queueName,
210
252
  limiter: new ConcurrencyLimiter(1, 10)
211
253
  });
212
- getTaskQueueRegistry().registerQueue(jobQueue);
213
- jobQueue.start();
214
- }
215
- // src/binding/IndexedDbJobQueue.ts
216
- import { JobQueue as JobQueue2 } from "@workglow/job-queue";
217
- import { IndexedDbQueueStorage } from "@workglow/storage";
218
- import { createServiceToken as createServiceToken3 } from "@workglow/util";
219
- var INDEXED_DB_JOB_QUEUE = createServiceToken3("jobQueue.indexedDb");
220
-
221
- class IndexedDbJobQueue extends JobQueue2 {
222
- constructor(queueName, jobCls, options) {
223
- options.storage ??= new IndexedDbQueueStorage(queueName);
224
- super(queueName, jobCls, options);
225
- }
254
+ const client = new JobQueueClient({
255
+ storage,
256
+ queueName
257
+ });
258
+ client.attach(server);
259
+ getTaskQueueRegistry().registerQueue({ server, client, storage });
260
+ await server.start();
226
261
  }
227
262
  // src/binding/IndexedDbTaskGraphRepository.ts
228
263
  import { IndexedDbTabularRepository } from "@workglow/storage";
@@ -231,8 +266,8 @@ import {
231
266
  TaskGraphSchema as TaskGraphSchema2,
232
267
  TaskGraphTabularRepository as TaskGraphTabularRepository2
233
268
  } from "@workglow/task-graph";
234
- import { createServiceToken as createServiceToken4 } from "@workglow/util";
235
- var IDB_TASK_GRAPH_REPOSITORY = createServiceToken4("taskgraph.taskGraphRepository.indexedDb");
269
+ import { createServiceToken as createServiceToken3 } from "@workglow/util";
270
+ var IDB_TASK_GRAPH_REPOSITORY = createServiceToken3("taskgraph.taskGraphRepository.indexedDb");
236
271
 
237
272
  class IndexedDbTaskGraphRepository extends TaskGraphTabularRepository2 {
238
273
  constructor(table = "task_graphs") {
@@ -248,8 +283,8 @@ import {
248
283
  TaskOutputSchema as TaskOutputSchema2,
249
284
  TaskOutputTabularRepository as TaskOutputTabularRepository2
250
285
  } from "@workglow/task-graph";
251
- import { createServiceToken as createServiceToken5 } from "@workglow/util";
252
- var IDB_TASK_OUTPUT_REPOSITORY = createServiceToken5("taskgraph.taskOutputRepository.indexedDb");
286
+ import { createServiceToken as createServiceToken4 } from "@workglow/util";
287
+ var IDB_TASK_OUTPUT_REPOSITORY = createServiceToken4("taskgraph.taskOutputRepository.indexedDb");
253
288
 
254
289
  class IndexedDbTaskOutputRepository extends TaskOutputTabularRepository2 {
255
290
  constructor(table = "task_outputs") {
@@ -258,36 +293,19 @@ class IndexedDbTaskOutputRepository extends TaskOutputTabularRepository2 {
258
293
  });
259
294
  }
260
295
  }
261
- // src/binding/InMemoryJobQueue.ts
262
- import { JobQueue as JobQueue3 } from "@workglow/job-queue";
263
- import { InMemoryQueueStorage as InMemoryQueueStorage2 } from "@workglow/storage";
264
- import { createServiceToken as createServiceToken6 } from "@workglow/util";
265
- var IN_MEMORY_JOB_QUEUE = createServiceToken6("jobQueue.inMemory");
266
-
267
- class InMemoryJobQueue extends JobQueue3 {
268
- constructor(queueName, jobCls, options) {
269
- options.storage ??= new InMemoryQueueStorage2(queueName);
270
- super(queueName, jobCls, options);
271
- }
272
- }
273
296
  export {
274
297
  register_TFMP_InMemoryQueue,
275
298
  register_HFT_InMemoryQueue,
276
299
  registerMediaPipeTfJsLocalModels,
277
300
  registerHuggingfaceLocalModels,
278
- addONNXModel,
279
301
  MEMORY_TASK_OUTPUT_REPOSITORY,
280
302
  MEMORY_TASK_GRAPH_REPOSITORY,
281
303
  IndexedDbTaskOutputRepository,
282
304
  IndexedDbTaskGraphRepository,
283
- IndexedDbJobQueue,
284
305
  InMemoryTaskOutputRepository,
285
306
  InMemoryTaskGraphRepository,
286
- InMemoryJobQueue,
287
- IN_MEMORY_JOB_QUEUE,
288
- INDEXED_DB_JOB_QUEUE,
289
307
  IDB_TASK_OUTPUT_REPOSITORY,
290
308
  IDB_TASK_GRAPH_REPOSITORY
291
309
  };
292
310
 
293
- //# debugId=E2AF6D56E368E09464756E2164756E21
311
+ //# debugId=391488397AB36BE864756E2164756E21