@workglow/ai-provider 0.0.66 → 0.0.67
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/hf-transformers/common/HFT_Constants.d.ts +1 -0
- package/dist/hf-transformers/common/HFT_Constants.d.ts.map +1 -1
- package/dist/hf-transformers/common/HFT_JobRunFns.d.ts +8 -1
- package/dist/hf-transformers/common/HFT_JobRunFns.d.ts.map +1 -1
- package/dist/hf-transformers/registry/HFT_Client_RegisterJobFns.d.ts.map +1 -1
- package/dist/hf-transformers/registry/HFT_Inline_RegisterJobFns.d.ts.map +1 -1
- package/dist/hf-transformers/registry/HFT_Worker_RegisterJobFns.d.ts.map +1 -1
- package/dist/index.js +282 -19
- package/dist/index.js.map +13 -13
- package/dist/tf-mediapipe/common/TFMP_Constants.d.ts +21 -0
- package/dist/tf-mediapipe/common/TFMP_Constants.d.ts.map +1 -1
- package/dist/tf-mediapipe/common/TFMP_JobRunFns.d.ts +21 -1
- package/dist/tf-mediapipe/common/TFMP_JobRunFns.d.ts.map +1 -1
- package/dist/tf-mediapipe/common/TFMP_ModelSchema.d.ts +22 -2
- package/dist/tf-mediapipe/common/TFMP_ModelSchema.d.ts.map +1 -1
- package/dist/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.d.ts.map +1 -1
- package/dist/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.d.ts.map +1 -1
- package/dist/tf-mediapipe/registry/TFMP_Worker_RegisterJobFns.d.ts.map +1 -1
- package/package.json +19 -13
package/README.md
CHANGED
|
@@ -33,7 +33,7 @@ Depending on which providers you use, you may need to install additional peer de
|
|
|
33
33
|
npm install @sroussey/transformers
|
|
34
34
|
|
|
35
35
|
# For MediaPipe support
|
|
36
|
-
npm install @mediapipe/tasks-text
|
|
36
|
+
npm install @mediapipe/tasks-text @mediapipe/tasks-vision @mediapipe/tasks-audio @mediapipe/tasks-genai
|
|
37
37
|
```
|
|
38
38
|
|
|
39
39
|
## Quick Start
|
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
* SPDX-License-Identifier: Apache-2.0
|
|
5
5
|
*/
|
|
6
6
|
export declare const HF_TRANSFORMERS_ONNX = "HF_TRANSFORMERS_ONNX";
|
|
7
|
+
export declare const HTF_CACHE_NAME = "transformers-cache";
|
|
7
8
|
export type QuantizationDataType = "auto" | "fp32" | "fp16" | "q8" | "int8" | "uint8" | "q4" | "bnb4" | "q4f16";
|
|
8
9
|
export declare const QuantizationDataType: {
|
|
9
10
|
readonly auto: "auto";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"HFT_Constants.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/common/HFT_Constants.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,eAAO,MAAM,oBAAoB,yBAAyB,CAAC;
|
|
1
|
+
{"version":3,"file":"HFT_Constants.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/common/HFT_Constants.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,eAAO,MAAM,oBAAoB,yBAAyB,CAAC;AAC3D,eAAO,MAAM,cAAc,uBAAuB,CAAC;AAEnD,MAAM,MAAM,oBAAoB,GAC5B,MAAM,GACN,MAAM,GACN,MAAM,GACN,IAAI,GACJ,MAAM,GACN,OAAO,GACP,IAAI,GACJ,MAAM,GACN,OAAO,CAAC;AAEZ,eAAO,MAAM,oBAAoB;;;;;;;;;;CAUsC,CAAC;AAExE,MAAM,MAAM,mBAAmB,GAC3B,WAAW,GACX,sBAAsB,GACtB,iBAAiB,GACjB,sBAAsB,GACtB,qBAAqB,GACrB,eAAe,GACf,aAAa,GACb,oBAAoB,GACpB,0BAA0B,GAC1B,oBAAoB,CAAC;AAEzB,eAAO,MAAM,mBAAmB;;;;;;;;;;;CAWqC,CAAC;AAEtE,MAAM,MAAM,qBAAqB,GAC7B,oBAAoB,GACpB,oBAAoB,GACpB,kBAAkB,GAClB,sBAAsB,GACtB,gBAAgB,GAChB,kBAAkB,GAClB,0BAA0B,CAAC;AAE/B,eAAO,MAAM,qBAAqB;;;;;;;;CAQuC,CAAC;AAE1E,MAAM,MAAM,oBAAoB,GAC5B,sBAAsB,GACtB,8BAA8B,GAC9B,gBAAgB,CAAC;AAErB,eAAO,MAAM,oBAAoB;;;;CAIsC,CAAC;AAExE,MAAM,MAAM,yBAAyB,GACjC,6BAA6B,GAC7B,eAAe,GACf,gCAAgC,GAChC,gCAAgC,GAChC,4BAA4B,CAAC;AAEjC,eAAO,MAAM,yBAAyB;;;;;;CAM2C,CAAC;AAElF,MAAM,MAAM,eAAe,GACvB,mBAAmB,GACnB,qBAAqB,GACrB,oBAAoB,GACpB,yBAAyB,CAAC;AAE9B,eAAO,MAAM,eAAe;;;;;;;;;;;;;;;;;;;;;;;;;;CAKiC,CAAC"}
|
|
@@ -3,18 +3,25 @@
|
|
|
3
3
|
* Copyright 2025 Steven Roussey <sroussey@gmail.com>
|
|
4
4
|
* SPDX-License-Identifier: Apache-2.0
|
|
5
5
|
*/
|
|
6
|
-
import { AiProviderRunFn, type DeReplicateFromSchema, DownloadModelTaskExecuteInput, TextEmbeddingInputSchema, TextEmbeddingOutputSchema, TextGenerationInputSchema, TextGenerationOutputSchema, TextQuestionAnswerInputSchema, TextQuestionAnswerOutputSchema, TextRewriterInputSchema, TextRewriterOutputSchema, TextSummaryInputSchema, TextSummaryOutputSchema, TextTranslationInputSchema, TextTranslationOutputSchema } from "@workglow/ai";
|
|
6
|
+
import { AiProviderRunFn, type DeReplicateFromSchema, DownloadModelTaskExecuteInput, TextClassifierInputSchema, TextClassifierOutputSchema, TextEmbeddingInputSchema, TextEmbeddingOutputSchema, TextGenerationInputSchema, TextGenerationOutputSchema, TextLanguageDetectionInputSchema, TextLanguageDetectionOutputSchema, TextQuestionAnswerInputSchema, TextQuestionAnswerOutputSchema, TextRewriterInputSchema, TextRewriterOutputSchema, TextSummaryInputSchema, TextSummaryOutputSchema, TextTranslationInputSchema, TextTranslationOutputSchema } from "@workglow/ai";
|
|
7
7
|
import { HfTransformersOnnxModelRecord } from "./HFT_ModelSchema";
|
|
8
8
|
/**
|
|
9
9
|
* Core implementation for downloading and caching a Hugging Face Transformers model.
|
|
10
10
|
* This is shared between inline and worker implementations.
|
|
11
11
|
*/
|
|
12
12
|
export declare const HFT_Download: AiProviderRunFn<DownloadModelTaskExecuteInput, DownloadModelTaskExecuteInput, HfTransformersOnnxModelRecord>;
|
|
13
|
+
/**
|
|
14
|
+
* Core implementation for unloading a Hugging Face Transformers model.
|
|
15
|
+
* This is shared between inline and worker implementations.
|
|
16
|
+
*/
|
|
17
|
+
export declare const HFT_Unload: AiProviderRunFn<DownloadModelTaskExecuteInput, DownloadModelTaskExecuteInput, HfTransformersOnnxModelRecord>;
|
|
13
18
|
/**
|
|
14
19
|
* Core implementation for text embedding using Hugging Face Transformers.
|
|
15
20
|
* This is shared between inline and worker implementations.
|
|
16
21
|
*/
|
|
17
22
|
export declare const HFT_TextEmbedding: AiProviderRunFn<DeReplicateFromSchema<typeof TextEmbeddingInputSchema>, DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>, HfTransformersOnnxModelRecord>;
|
|
23
|
+
export declare const HFT_TextClassifier: AiProviderRunFn<DeReplicateFromSchema<typeof TextClassifierInputSchema>, DeReplicateFromSchema<typeof TextClassifierOutputSchema>, HfTransformersOnnxModelRecord>;
|
|
24
|
+
export declare const HFT_TextLanguageDetection: AiProviderRunFn<DeReplicateFromSchema<typeof TextLanguageDetectionInputSchema>, DeReplicateFromSchema<typeof TextLanguageDetectionOutputSchema>, HfTransformersOnnxModelRecord>;
|
|
18
25
|
/**
|
|
19
26
|
* Core implementation for text generation using Hugging Face Transformers.
|
|
20
27
|
* This is shared between inline and worker implementations.
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"HFT_JobRunFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/common/HFT_JobRunFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;
|
|
1
|
+
{"version":3,"file":"HFT_JobRunFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/common/HFT_JobRunFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAmBH,OAAO,EACL,eAAe,EACf,KAAK,qBAAqB,EAC1B,6BAA6B,EAC7B,yBAAyB,EACzB,0BAA0B,EAC1B,wBAAwB,EACxB,yBAAyB,EACzB,yBAAyB,EACzB,0BAA0B,EAC1B,gCAAgC,EAChC,iCAAiC,EACjC,6BAA6B,EAC7B,8BAA8B,EAC9B,uBAAuB,EACvB,wBAAwB,EACxB,sBAAsB,EACtB,uBAAuB,EACvB,0BAA0B,EAC1B,2BAA2B,EAE5B,MAAM,cAAc,CAAC;AAItB,OAAO,EAAE,6BAA6B,EAAE,MAAM,mBAAmB,CAAC;AA2ClE;;;GAGG;AACH,eAAO,MAAM,YAAY,EAAE,eAAe,CACxC,6BAA6B,EAC7B,6BAA6B,EAC7B,6BAA6B,CAQ9B,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,UAAU,EAAE,eAAe,CACtC,6BAA6B,EAC7B,6BAA6B,EAC7B,6BAA6B,CAgB9B,CAAC;AAwCF;;;GAGG;AAEH,eAAO,MAAM,iBAAiB,EAAE,eAAe,CAC7C,qBAAqB,CAAC,OAAO,wBAAwB,CAAC,EACtD,qBAAqB,CAAC,OAAO,yBAAyB,CAAC,EACvD,6BAA6B,CA0B9B,CAAC;AAEF,eAAO,MAAM,kBAAkB,EAAE,eAAe,CAC9C,qBAAqB,CAAC,OAAO,yBAAyB,CAAC,EACvD,qBAAqB,CAAC,OAAO,0BAA0B,CAAC,EACxD,6BAA6B,CAyB9B,CAAC;AAEF,eAAO,MAAM,yBAAyB,EAAE,eAAe,CACrD,qBAAqB,CAAC,OAAO,gCAAgC,CAAC,EAC9D,qBAAqB,CAAC,OAAO,iCAAiC,CAAC,EAC/D,6BAA6B,CAyB9B,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,kBAAkB,EAAE,eAAe,CAC9C,qBAAqB,CAAC,OAAO,yBAAyB,CAAC,EACvD,qBAAqB,CAAC,OAAO,0BAA0B,CAAC,EACxD,6BAA6B,CAwB9B,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,mBAAmB,EAAE,eAAe,CAC/C,qBAAqB,CAAC,OAAO,0BAA0B,CAAC,EACxD,qBAAqB,CAAC,OAAO,2BAA2B,CAAC,EACzD,6BAA6B,CAyB9B,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,gBAAgB,EAAE,eAAe,CAC5C,qBAAqB,CAAC,OAAO,uBAAuB,CAAC,EACrD,qBAAqB,CAAC,OAAO,wBAAwB,CAAC,EACtD,6BAA6B,CA+B9B,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,eAAe,EAAE,eAAe,CAC3C,qBAAqB,CAAC,OAAO,sBAAsB,CAAC,EACpD,qBAAqB,CAAC,OAAO,uBAAuB,CAAC,EACrD,6BAA6B,CAsB9B,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,sBAAsB,EAAE,eAAe,CAClD,qBAAqB,CAAC,OAAO,6BAA6B,CAAC,EAC3D,qBAAqB,CAAC,OAAO,8BAA8B,CAAC,EAC5D,6BAA6B,CAuB9B,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"HFT_Client_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/registry/HFT_Client_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAS,UAAU,EAAyB,MAAM,cAAc,CAAC;AACxE,OAAO,EAAsB,cAAc,EAAkB,MAAM,qBAAqB,CAAC;AAEzF,OAAO,EAAwB,SAAS,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AAInF;;;;;;GAMG;AACH,wBAAsB,yBAAyB,CAC7C,MAAM,EAAE,MAAM,EACd,MAAM,CAAC,EAAE,cAAc,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,UAAU,CAAC,GACzD,OAAO,CAAC,IAAI,CAAC,
|
|
1
|
+
{"version":3,"file":"HFT_Client_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/registry/HFT_Client_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAS,UAAU,EAAyB,MAAM,cAAc,CAAC;AACxE,OAAO,EAAsB,cAAc,EAAkB,MAAM,qBAAqB,CAAC;AAEzF,OAAO,EAAwB,SAAS,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AAInF;;;;;;GAMG;AACH,wBAAsB,yBAAyB,CAC7C,MAAM,EAAE,MAAM,EACd,MAAM,CAAC,EAAE,cAAc,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,UAAU,CAAC,GACzD,OAAO,CAAC,IAAI,CAAC,CA2Cf"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"HFT_Inline_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/registry/HFT_Inline_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,EAAS,UAAU,EAA0C,MAAM,cAAc,CAAC;AACzF,OAAO,EAAsB,cAAc,EAAkB,MAAM,qBAAqB,CAAC;AAEzF,OAAO,EAAwB,SAAS,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;
|
|
1
|
+
{"version":3,"file":"HFT_Inline_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/registry/HFT_Inline_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,EAAS,UAAU,EAA0C,MAAM,cAAc,CAAC;AACzF,OAAO,EAAsB,cAAc,EAAkB,MAAM,qBAAqB,CAAC;AAEzF,OAAO,EAAwB,SAAS,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AAenF;;;;;GAKG;AACH,wBAAsB,yBAAyB,CAC7C,MAAM,CAAC,EAAE,cAAc,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,UAAU,CAAC,GACzD,OAAO,CAAC,IAAI,CAAC,CA2Cf"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"HFT_Worker_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/registry/HFT_Worker_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;
|
|
1
|
+
{"version":3,"file":"HFT_Worker_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/registry/HFT_Worker_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAoBH,eAAO,MAAM,iBAAiB,gDAA+C,CAAC;AAE9E,eAAO,MAAM,0BAA0B,MAkBtC,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
// src/hf-transformers/common/HFT_Constants.ts
|
|
2
2
|
var HF_TRANSFORMERS_ONNX = "HF_TRANSFORMERS_ONNX";
|
|
3
|
+
var HTF_CACHE_NAME = "transformers-cache";
|
|
3
4
|
var QuantizationDataType = {
|
|
4
5
|
auto: "auto",
|
|
5
6
|
fp32: "fp32",
|
|
@@ -88,6 +89,46 @@ var HFT_Download = async (input, model, onProgress, signal) => {
|
|
|
88
89
|
model: input.model
|
|
89
90
|
};
|
|
90
91
|
};
|
|
92
|
+
var HFT_Unload = async (input, model, onProgress, signal) => {
|
|
93
|
+
if (pipelines.has(model.model_id)) {
|
|
94
|
+
pipelines.delete(model.model_id);
|
|
95
|
+
onProgress(50, "Pipeline removed from memory");
|
|
96
|
+
}
|
|
97
|
+
const modelPath = model.providerConfig.modelPath;
|
|
98
|
+
await deleteModelCache(modelPath);
|
|
99
|
+
onProgress(100, "Model cache deleted");
|
|
100
|
+
return {
|
|
101
|
+
model: input.model
|
|
102
|
+
};
|
|
103
|
+
};
|
|
104
|
+
var deleteModelCache = async (modelPath) => {
|
|
105
|
+
const cache = await caches.open(HTF_CACHE_NAME);
|
|
106
|
+
const keys = await cache.keys();
|
|
107
|
+
const prefix = `/${modelPath}/`;
|
|
108
|
+
const requestsToDelete = [];
|
|
109
|
+
for (const request of keys) {
|
|
110
|
+
const url = new URL(request.url);
|
|
111
|
+
if (url.pathname.startsWith(prefix)) {
|
|
112
|
+
requestsToDelete.push(request);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
let deletedCount = 0;
|
|
116
|
+
for (const request of requestsToDelete) {
|
|
117
|
+
try {
|
|
118
|
+
const deleted = await cache.delete(request);
|
|
119
|
+
if (deleted) {
|
|
120
|
+
deletedCount++;
|
|
121
|
+
} else {
|
|
122
|
+
const deletedByUrl = await cache.delete(request.url);
|
|
123
|
+
if (deletedByUrl) {
|
|
124
|
+
deletedCount++;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
} catch (error) {
|
|
128
|
+
console.error(`Failed to delete cache entry: ${request.url}`, error);
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
};
|
|
91
132
|
var HFT_TextEmbedding = async (input, model, onProgress, signal) => {
|
|
92
133
|
const generateEmbedding = await getPipeline(model, onProgress, {
|
|
93
134
|
abort_signal: signal
|
|
@@ -103,6 +144,52 @@ var HFT_TextEmbedding = async (input, model, onProgress, signal) => {
|
|
|
103
144
|
}
|
|
104
145
|
return { vector: hfVector.data };
|
|
105
146
|
};
|
|
147
|
+
var HFT_TextClassifier = async (input, model, onProgress, signal) => {
|
|
148
|
+
const textClassifier = await getPipeline(model, onProgress, {
|
|
149
|
+
abort_signal: signal
|
|
150
|
+
});
|
|
151
|
+
const result = await textClassifier(input.text, {
|
|
152
|
+
top_k: input.maxCategories || undefined,
|
|
153
|
+
...signal ? { abort_signal: signal } : {}
|
|
154
|
+
});
|
|
155
|
+
if (Array.isArray(result[0])) {
|
|
156
|
+
return {
|
|
157
|
+
categories: result[0].map((category) => ({
|
|
158
|
+
label: category.label,
|
|
159
|
+
score: category.score
|
|
160
|
+
}))
|
|
161
|
+
};
|
|
162
|
+
}
|
|
163
|
+
return {
|
|
164
|
+
categories: result.map((category) => ({
|
|
165
|
+
label: category.label,
|
|
166
|
+
score: category.score
|
|
167
|
+
}))
|
|
168
|
+
};
|
|
169
|
+
};
|
|
170
|
+
var HFT_TextLanguageDetection = async (input, model, onProgress, signal) => {
|
|
171
|
+
const textClassifier = await getPipeline(model, onProgress, {
|
|
172
|
+
abort_signal: signal
|
|
173
|
+
});
|
|
174
|
+
const result = await textClassifier(input.text, {
|
|
175
|
+
top_k: input.maxLanguages || undefined,
|
|
176
|
+
...signal ? { abort_signal: signal } : {}
|
|
177
|
+
});
|
|
178
|
+
if (Array.isArray(result[0])) {
|
|
179
|
+
return {
|
|
180
|
+
languages: result[0].map((category) => ({
|
|
181
|
+
language: category.label,
|
|
182
|
+
score: category.score
|
|
183
|
+
}))
|
|
184
|
+
};
|
|
185
|
+
}
|
|
186
|
+
return {
|
|
187
|
+
languages: result.map((category) => ({
|
|
188
|
+
language: category.label,
|
|
189
|
+
score: category.score
|
|
190
|
+
}))
|
|
191
|
+
};
|
|
192
|
+
};
|
|
106
193
|
var HFT_TextGeneration = async (input, model, onProgress, signal) => {
|
|
107
194
|
const generateText = await getPipeline(model, onProgress, {
|
|
108
195
|
abort_signal: signal
|
|
@@ -315,7 +402,10 @@ async function register_HFT_ClientJobFns(worker, client) {
|
|
|
315
402
|
const ProviderRegistry = getAiProviderRegistry();
|
|
316
403
|
const names = [
|
|
317
404
|
"DownloadModelTask",
|
|
405
|
+
"UnloadModelTask",
|
|
318
406
|
"TextEmbeddingTask",
|
|
407
|
+
"TextLanguageDetectionTask",
|
|
408
|
+
"TextClassifierTask",
|
|
319
409
|
"TextGenerationTask",
|
|
320
410
|
"TextTranslationTask",
|
|
321
411
|
"TextRewriterTask",
|
|
@@ -351,9 +441,12 @@ async function register_HFT_InlineJobFns(client) {
|
|
|
351
441
|
const ProviderRegistry = getAiProviderRegistry2();
|
|
352
442
|
const fns = {
|
|
353
443
|
["DownloadModelTask"]: HFT_Download,
|
|
444
|
+
["UnloadModelTask"]: HFT_Unload,
|
|
354
445
|
["TextEmbeddingTask"]: HFT_TextEmbedding,
|
|
355
446
|
["TextGenerationTask"]: HFT_TextGeneration,
|
|
356
447
|
["TextQuestionAnswerTask"]: HFT_TextQuestionAnswer,
|
|
448
|
+
["TextLanguageDetectionTask"]: HFT_TextLanguageDetection,
|
|
449
|
+
["TextClassifierTask"]: HFT_TextClassifier,
|
|
357
450
|
["TextRewriterTask"]: HFT_TextRewriter,
|
|
358
451
|
["TextSummaryTask"]: HFT_TextSummary,
|
|
359
452
|
["TextTranslationTask"]: HFT_TextTranslation
|
|
@@ -391,6 +484,8 @@ var HFT_WORKER_JOBRUN_REGISTER = globalServiceRegistry2.register(HFT_WORKER_JOBR
|
|
|
391
484
|
workerServer.registerFunction("DownloadModelTask", HFT_Download);
|
|
392
485
|
workerServer.registerFunction("TextEmbeddingTask", HFT_TextEmbedding);
|
|
393
486
|
workerServer.registerFunction("TextGenerationTask", HFT_TextGeneration);
|
|
487
|
+
workerServer.registerFunction("TextLanguageDetectionTask", HFT_TextLanguageDetection);
|
|
488
|
+
workerServer.registerFunction("TextClassifierTask", HFT_TextClassifier);
|
|
394
489
|
workerServer.registerFunction("TextTranslationTask", HFT_TextTranslation);
|
|
395
490
|
workerServer.registerFunction("TextRewriterTask", HFT_TextRewriter);
|
|
396
491
|
workerServer.registerFunction("TextSummaryTask", HFT_TextSummary);
|
|
@@ -401,33 +496,116 @@ var HFT_WORKER_JOBRUN_REGISTER = globalServiceRegistry2.register(HFT_WORKER_JOBR
|
|
|
401
496
|
}, true);
|
|
402
497
|
// src/tf-mediapipe/common/TFMP_Constants.ts
|
|
403
498
|
var TENSORFLOW_MEDIAPIPE = "TENSORFLOW_MEDIAPIPE";
|
|
499
|
+
var TextPipelineTask = {
|
|
500
|
+
"text-embedder": "text-embedder",
|
|
501
|
+
"text-classifier": "text-classifier",
|
|
502
|
+
"text-language-detector": "text-language-detector",
|
|
503
|
+
"genai-text": "genai-text",
|
|
504
|
+
"audio-classifier": "audio-classifier",
|
|
505
|
+
"audio-embedder": "audio-embedder",
|
|
506
|
+
"vision-face-detector": "vision-face-detector",
|
|
507
|
+
"vision-face-landmarker": "vision-face-landmarker",
|
|
508
|
+
"vision-face-stylizer": "vision-face-stylizer",
|
|
509
|
+
"vision-gesture-recognizer": "vision-gesture-recognizer",
|
|
510
|
+
"vision-hand-landmarker": "vision-hand-landmarker",
|
|
511
|
+
"vision-holistic-landmarker": "vision-holistic-landmarker",
|
|
512
|
+
"vision-image-classifier": "vision-image-classifier",
|
|
513
|
+
"vision-image-embedder": "vision-image-embedder",
|
|
514
|
+
"vision-image-segmenter": "vision-image-segmenter",
|
|
515
|
+
"vision-image-interactive-segmenter": "vision-image-interactive-segmenter",
|
|
516
|
+
"vision-object-detector": "vision-object-detector",
|
|
517
|
+
"vision-pose-landmarker": "vision-pose-landmarker"
|
|
518
|
+
};
|
|
404
519
|
// src/tf-mediapipe/common/TFMP_JobRunFns.ts
|
|
405
|
-
import {
|
|
520
|
+
import {
|
|
521
|
+
FilesetResolver,
|
|
522
|
+
LanguageDetector,
|
|
523
|
+
TextClassifier,
|
|
524
|
+
TextEmbedder
|
|
525
|
+
} from "@mediapipe/tasks-text";
|
|
406
526
|
import { PermanentJobError as PermanentJobError2 } from "@workglow/job-queue";
|
|
407
|
-
var
|
|
408
|
-
|
|
409
|
-
|
|
527
|
+
var wasm_tasks = new Map;
|
|
528
|
+
var wasm_reference_counts = new Map;
|
|
529
|
+
var model_to_wasm_mapping = new Map;
|
|
530
|
+
var getWasmTask = async (model, onProgress, signal) => {
|
|
531
|
+
const taskEngine = model.providerConfig.taskEngine;
|
|
532
|
+
if (wasm_tasks.has(taskEngine)) {
|
|
533
|
+
return wasm_tasks.get(taskEngine);
|
|
534
|
+
}
|
|
535
|
+
if (signal.aborted) {
|
|
536
|
+
throw new PermanentJobError2("Aborted job");
|
|
537
|
+
}
|
|
538
|
+
onProgress(0.1, "Loading WASM task");
|
|
539
|
+
let wasmFileset;
|
|
540
|
+
switch (taskEngine) {
|
|
541
|
+
case "text":
|
|
542
|
+
wasmFileset = await FilesetResolver.forTextTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm");
|
|
543
|
+
break;
|
|
544
|
+
case "audio":
|
|
545
|
+
wasmFileset = await FilesetResolver.forAudioTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-audio@latest/wasm");
|
|
546
|
+
break;
|
|
547
|
+
case "vision":
|
|
548
|
+
wasmFileset = await FilesetResolver.forVisionTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@latest/wasm");
|
|
549
|
+
break;
|
|
550
|
+
case "genai":
|
|
551
|
+
wasmFileset = await FilesetResolver.forGenAiTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai@latest/wasm");
|
|
552
|
+
break;
|
|
553
|
+
default:
|
|
554
|
+
throw new PermanentJobError2("Invalid task engine");
|
|
555
|
+
}
|
|
556
|
+
wasm_tasks.set(taskEngine, wasmFileset);
|
|
557
|
+
return wasmFileset;
|
|
558
|
+
};
|
|
559
|
+
var modelTaskCache = new Map;
|
|
560
|
+
var getModelTask = async (model, onProgress, signal, TaskType) => {
|
|
561
|
+
const modelPath = model.providerConfig.modelPath;
|
|
562
|
+
const taskEngine = model.providerConfig.taskEngine;
|
|
563
|
+
if (modelTaskCache.has(modelPath)) {
|
|
564
|
+
return modelTaskCache.get(modelPath);
|
|
565
|
+
}
|
|
566
|
+
const wasmFileset = await getWasmTask(model, onProgress, signal);
|
|
567
|
+
const task = await TaskType.createFromOptions(wasmFileset, {
|
|
410
568
|
baseOptions: {
|
|
411
|
-
modelAssetPath:
|
|
569
|
+
modelAssetPath: modelPath
|
|
412
570
|
}
|
|
413
571
|
});
|
|
572
|
+
modelTaskCache.set(modelPath, task);
|
|
573
|
+
model_to_wasm_mapping.set(modelPath, taskEngine);
|
|
574
|
+
wasm_reference_counts.set(taskEngine, (wasm_reference_counts.get(taskEngine) || 0) + 1);
|
|
575
|
+
return task;
|
|
576
|
+
};
|
|
577
|
+
var getTextEmbedder = async (model, onProgress, signal) => {
|
|
578
|
+
return getModelTask(model, onProgress, signal, TextEmbedder);
|
|
579
|
+
};
|
|
580
|
+
var getTextClassifier = async (model, onProgress, signal) => {
|
|
581
|
+
return getModelTask(model, onProgress, signal, TextClassifier);
|
|
582
|
+
};
|
|
583
|
+
var getTextLanguageDetector = async (model, onProgress, signal) => {
|
|
584
|
+
return getModelTask(model, onProgress, signal, LanguageDetector);
|
|
585
|
+
};
|
|
586
|
+
var TFMP_Download = async (input, model, onProgress, signal) => {
|
|
587
|
+
switch (model?.providerConfig.pipeline) {
|
|
588
|
+
case "text-embedder":
|
|
589
|
+
await getTextEmbedder(model, onProgress, signal);
|
|
590
|
+
break;
|
|
591
|
+
case "text-classifier":
|
|
592
|
+
await getTextClassifier(model, onProgress, signal);
|
|
593
|
+
break;
|
|
594
|
+
case "text-language-detector":
|
|
595
|
+
await getTextLanguageDetector(model, onProgress, signal);
|
|
596
|
+
break;
|
|
597
|
+
default:
|
|
598
|
+
throw new PermanentJobError2("Invalid pipeline");
|
|
599
|
+
}
|
|
600
|
+
onProgress(0.9, "Pipeline loaded");
|
|
414
601
|
return {
|
|
415
602
|
model: input.model
|
|
416
603
|
};
|
|
417
604
|
};
|
|
418
605
|
var TFMP_TextEmbedding = async (input, model, onProgress, signal) => {
|
|
419
|
-
const textFiles = await FilesetResolver.forTextTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm");
|
|
420
606
|
onProgress(0.1, "Model loaded");
|
|
421
|
-
const
|
|
422
|
-
|
|
423
|
-
modelAssetPath: model.providerConfig.modelPath
|
|
424
|
-
}
|
|
425
|
-
});
|
|
426
|
-
if (signal.aborted) {
|
|
427
|
-
throw new PermanentJobError2("Aborted job");
|
|
428
|
-
}
|
|
429
|
-
onProgress(0.2, "Embedding text");
|
|
430
|
-
const result = embedder.embed(input.text);
|
|
607
|
+
const textEmbedder = await getTextEmbedder(model, onProgress, signal);
|
|
608
|
+
const result = textEmbedder.embed(input.text);
|
|
431
609
|
if (!result.embeddings?.[0]?.floatEmbedding) {
|
|
432
610
|
throw new PermanentJobError2("Failed to generate embedding: Empty result");
|
|
433
611
|
}
|
|
@@ -436,6 +614,61 @@ var TFMP_TextEmbedding = async (input, model, onProgress, signal) => {
|
|
|
436
614
|
vector: embedding
|
|
437
615
|
};
|
|
438
616
|
};
|
|
617
|
+
var TFMP_TextClassifier = async (input, model, onProgress, signal) => {
|
|
618
|
+
onProgress(0.1, "Model loaded");
|
|
619
|
+
const textClassifier = await getTextClassifier(model, onProgress, signal);
|
|
620
|
+
const result = textClassifier.classify(input.text);
|
|
621
|
+
if (!result.classifications?.[0]?.categories) {
|
|
622
|
+
throw new PermanentJobError2("Failed to classify text: Empty result");
|
|
623
|
+
}
|
|
624
|
+
const categories = result.classifications[0].categories.map((category) => ({
|
|
625
|
+
label: category.categoryName,
|
|
626
|
+
score: category.score
|
|
627
|
+
}));
|
|
628
|
+
return {
|
|
629
|
+
categories
|
|
630
|
+
};
|
|
631
|
+
};
|
|
632
|
+
var TFMP_TextLanguageDetection = async (input, model, onProgress, signal) => {
|
|
633
|
+
onProgress(0.1, "Model loaded");
|
|
634
|
+
const textLanguageDetector = await getTextLanguageDetector(model, onProgress, signal);
|
|
635
|
+
const result = textLanguageDetector.detect(input.text);
|
|
636
|
+
if (!result.languages?.[0]?.languageCode) {
|
|
637
|
+
throw new PermanentJobError2("Failed to detect language: Empty result");
|
|
638
|
+
}
|
|
639
|
+
const languages = result.languages.map((language) => ({
|
|
640
|
+
language: language.languageCode,
|
|
641
|
+
score: language.probability
|
|
642
|
+
}));
|
|
643
|
+
return {
|
|
644
|
+
languages
|
|
645
|
+
};
|
|
646
|
+
};
|
|
647
|
+
var TFMP_Unload = async (input, model, onProgress, signal) => {
|
|
648
|
+
const modelPath = model.providerConfig.modelPath;
|
|
649
|
+
if (modelTaskCache.has(modelPath)) {
|
|
650
|
+
const item = modelTaskCache.get(modelPath);
|
|
651
|
+
if ("dispose" in item && typeof item.dispose === "function") {
|
|
652
|
+
item.dispose();
|
|
653
|
+
}
|
|
654
|
+
modelTaskCache.delete(modelPath);
|
|
655
|
+
}
|
|
656
|
+
const taskEngine = model_to_wasm_mapping.get(modelPath);
|
|
657
|
+
if (taskEngine) {
|
|
658
|
+
const currentCount = wasm_reference_counts.get(taskEngine) || 0;
|
|
659
|
+
const newCount = currentCount - 1;
|
|
660
|
+
if (newCount <= 0) {
|
|
661
|
+
wasm_tasks.delete(taskEngine);
|
|
662
|
+
wasm_reference_counts.delete(taskEngine);
|
|
663
|
+
} else {
|
|
664
|
+
wasm_reference_counts.set(taskEngine, newCount);
|
|
665
|
+
}
|
|
666
|
+
model_to_wasm_mapping.delete(modelPath);
|
|
667
|
+
}
|
|
668
|
+
return {
|
|
669
|
+
model: input.model
|
|
670
|
+
};
|
|
671
|
+
};
|
|
439
672
|
// src/tf-mediapipe/common/TFMP_ModelSchema.ts
|
|
440
673
|
import { ModelSchema as ModelSchema2 } from "@workglow/ai";
|
|
441
674
|
var TFMPModelSchema = {
|
|
@@ -452,9 +685,19 @@ var TFMPModelSchema = {
|
|
|
452
685
|
modelPath: {
|
|
453
686
|
type: "string",
|
|
454
687
|
description: "Filesystem path or URI for the ONNX model."
|
|
688
|
+
},
|
|
689
|
+
taskEngine: {
|
|
690
|
+
type: "string",
|
|
691
|
+
enum: ["text", "audio", "vision", "genai"],
|
|
692
|
+
description: "Task engine for the MediaPipe model."
|
|
693
|
+
},
|
|
694
|
+
pipeline: {
|
|
695
|
+
type: "string",
|
|
696
|
+
enum: Object.values(TextPipelineTask),
|
|
697
|
+
description: "Pipeline task type for the MediaPipe model."
|
|
455
698
|
}
|
|
456
699
|
},
|
|
457
|
-
required: ["modelPath"],
|
|
700
|
+
required: ["modelPath", "taskEngine", "pipeline"],
|
|
458
701
|
additionalProperties: false
|
|
459
702
|
}
|
|
460
703
|
},
|
|
@@ -480,7 +723,13 @@ async function register_TFMP_ClientJobFns(worker, client) {
|
|
|
480
723
|
const workerManager = globalServiceRegistry3.get(WORKER_MANAGER2);
|
|
481
724
|
workerManager.registerWorker(TENSORFLOW_MEDIAPIPE, worker);
|
|
482
725
|
const aiProviderRegistry = getAiProviderRegistry3();
|
|
483
|
-
const names = [
|
|
726
|
+
const names = [
|
|
727
|
+
"DownloadModelTask",
|
|
728
|
+
"UnloadModelTask",
|
|
729
|
+
"TextEmbeddingTask",
|
|
730
|
+
"TextLanguageDetectionTask",
|
|
731
|
+
"TextClassifierTask"
|
|
732
|
+
];
|
|
484
733
|
for (const name of names) {
|
|
485
734
|
aiProviderRegistry.registerAsWorkerRunFn(TENSORFLOW_MEDIAPIPE, name);
|
|
486
735
|
}
|
|
@@ -509,7 +758,10 @@ import { getTaskQueueRegistry as getTaskQueueRegistry4 } from "@workglow/task-gr
|
|
|
509
758
|
async function register_TFMP_InlineJobFns(client) {
|
|
510
759
|
const aiProviderRegistry = getAiProviderRegistry4();
|
|
511
760
|
aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, "DownloadModelTask", TFMP_Download);
|
|
761
|
+
aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, "UnloadModelTask", TFMP_Unload);
|
|
512
762
|
aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, "TextEmbeddingTask", TFMP_TextEmbedding);
|
|
763
|
+
aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, "TextLanguageDetectionTask", TFMP_TextLanguageDetection);
|
|
764
|
+
aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, "TextClassifierTask", TFMP_TextClassifier);
|
|
513
765
|
if (!client) {
|
|
514
766
|
const storage = new InMemoryQueueStorage4(TENSORFLOW_MEDIAPIPE);
|
|
515
767
|
await storage.setupDatabase();
|
|
@@ -539,6 +791,9 @@ var TFMP_WORKER_JOBRUN_REGISTER = globalServiceRegistry4.register(TFMP_WORKER_JO
|
|
|
539
791
|
const workerServer = globalServiceRegistry4.get(WORKER_SERVER2);
|
|
540
792
|
workerServer.registerFunction("DownloadModelTask", TFMP_Download);
|
|
541
793
|
workerServer.registerFunction("TextEmbeddingTask", TFMP_TextEmbedding);
|
|
794
|
+
workerServer.registerFunction("TextLanguageDetectionTask", TFMP_TextLanguageDetection);
|
|
795
|
+
workerServer.registerFunction("TextClassifierTask", TFMP_TextClassifier);
|
|
796
|
+
workerServer.registerFunction("UnloadModelTask", TFMP_Unload);
|
|
542
797
|
parentPort2.postMessage({ type: "ready" });
|
|
543
798
|
console.log("TFMP_WORKER_JOBRUN registered");
|
|
544
799
|
return workerServer;
|
|
@@ -550,9 +805,13 @@ export {
|
|
|
550
805
|
register_HFT_ClientJobFns,
|
|
551
806
|
VisionPipelineUseCase,
|
|
552
807
|
TextPipelineUseCase,
|
|
808
|
+
TextPipelineTask,
|
|
553
809
|
TFMP_WORKER_JOBRUN_REGISTER,
|
|
554
810
|
TFMP_WORKER_JOBRUN,
|
|
811
|
+
TFMP_Unload,
|
|
812
|
+
TFMP_TextLanguageDetection,
|
|
555
813
|
TFMP_TextEmbedding,
|
|
814
|
+
TFMP_TextClassifier,
|
|
556
815
|
TFMP_Download,
|
|
557
816
|
TFMPModelSchema,
|
|
558
817
|
TENSORFLOW_MEDIAPIPE,
|
|
@@ -560,17 +819,21 @@ export {
|
|
|
560
819
|
PipelineUseCase,
|
|
561
820
|
MultimodalPipelineUseCase,
|
|
562
821
|
HfTransformersOnnxModelSchema,
|
|
822
|
+
HTF_CACHE_NAME,
|
|
563
823
|
HF_TRANSFORMERS_ONNX,
|
|
564
824
|
HFT_WORKER_JOBRUN_REGISTER,
|
|
565
825
|
HFT_WORKER_JOBRUN,
|
|
826
|
+
HFT_Unload,
|
|
566
827
|
HFT_TextTranslation,
|
|
567
828
|
HFT_TextSummary,
|
|
568
829
|
HFT_TextRewriter,
|
|
569
830
|
HFT_TextQuestionAnswer,
|
|
831
|
+
HFT_TextLanguageDetection,
|
|
570
832
|
HFT_TextGeneration,
|
|
571
833
|
HFT_TextEmbedding,
|
|
834
|
+
HFT_TextClassifier,
|
|
572
835
|
HFT_Download,
|
|
573
836
|
AudioPipelineUseCase
|
|
574
837
|
};
|
|
575
838
|
|
|
576
|
-
//# debugId=
|
|
839
|
+
//# debugId=A83B19F263EDDB8064756E2164756E21
|
package/dist/index.js.map
CHANGED
|
@@ -2,20 +2,20 @@
|
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/hf-transformers/common/HFT_Constants.ts", "../src/hf-transformers/common/HFT_JobRunFns.ts", "../src/hf-transformers/common/HFT_ModelSchema.ts", "../src/hf-transformers/registry/HFT_Client_RegisterJobFns.ts", "../src/hf-transformers/registry/HFT_Inline_RegisterJobFns.ts", "../src/hf-transformers/registry/HFT_Worker_RegisterJobFns.ts", "../src/tf-mediapipe/common/TFMP_Constants.ts", "../src/tf-mediapipe/common/TFMP_JobRunFns.ts", "../src/tf-mediapipe/common/TFMP_ModelSchema.ts", "../src/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.ts", "../src/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.ts", "../src/tf-mediapipe/registry/TFMP_Worker_RegisterJobFns.ts"],
|
|
4
4
|
"sourcesContent": [
|
|
5
|
-
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const HF_TRANSFORMERS_ONNX = \"HF_TRANSFORMERS_ONNX\";\n\nexport type QuantizationDataType =\n | \"auto\" // Auto-detect based on environment\n | \"fp32\"\n | \"fp16\"\n | \"q8\"\n | \"int8\"\n | \"uint8\"\n | \"q4\"\n | \"bnb4\"\n | \"q4f16\"; // fp16 model with int4 block weight quantization\n\nexport const QuantizationDataType = {\n auto: \"auto\",\n fp32: \"fp32\",\n fp16: \"fp16\",\n q8: \"q8\",\n int8: \"int8\",\n uint8: \"uint8\",\n q4: \"q4\",\n bnb4: \"bnb4\",\n q4f16: \"q4f16\",\n} as const satisfies Record<QuantizationDataType, QuantizationDataType>;\n\nexport type TextPipelineUseCase =\n | \"fill-mask\" // https://huggingface.co/tasks/fill-mask\n | \"token-classification\" // https://huggingface.co/tasks/token-classification\n | \"text-generation\" // https://huggingface.co/tasks/text-generation#completion-generation-models\n | \"text2text-generation\" // https://huggingface.co/tasks/text-generation#text-to-text-generation-models\n | \"text-classification\" // https://huggingface.co/tasks/text-classification\n | \"summarization\" // https://huggingface.co/tasks/sentence-similarity\n | \"translation\" // https://huggingface.co/tasks/translation\n | \"feature-extraction\" // https://huggingface.co/tasks/feature-extraction\n | \"zero-shot-classification\" // https://huggingface.co/tasks/zero-shot-classification\n | \"question-answering\"; // https://huggingface.co/tasks/question-answering\n\nexport const TextPipelineUseCase = {\n \"fill-mask\": \"fill-mask\",\n \"token-classification\": \"token-classification\",\n \"text-generation\": \"text-generation\",\n \"text2text-generation\": \"text2text-generation\",\n \"text-classification\": \"text-classification\",\n summarization: \"summarization\",\n translation: \"translation\",\n \"feature-extraction\": \"feature-extraction\",\n \"zero-shot-classification\": \"zero-shot-classification\",\n \"question-answering\": \"question-answering\",\n} as const satisfies Record<TextPipelineUseCase, TextPipelineUseCase>;\n\nexport type VisionPipelineUseCase =\n | \"background-removal\" // https://huggingface.co/tasks/image-segmentation#background-removal\n | \"image-segmentation\" // https://huggingface.co/tasks/image-segmentation\n | \"depth-estimation\" // https://huggingface.co/tasks/depth-estimation\n | \"image-classification\" // https://huggingface.co/tasks/image-classification\n | \"image-to-image\" // https://huggingface.co/tasks/image-to-image\n | \"object-detection\" // https://huggingface.co/tasks/object-detection\n | \"image-feature-extraction\"; // https://huggingface.co/tasks/image-feature-extraction\n\nexport const VisionPipelineUseCase = {\n \"background-removal\": \"background-removal\",\n \"image-segmentation\": \"image-segmentation\",\n \"depth-estimation\": \"depth-estimation\",\n \"image-classification\": \"image-classification\",\n \"image-to-image\": \"image-to-image\",\n \"object-detection\": \"object-detection\",\n \"image-feature-extraction\": \"image-feature-extraction\",\n} as const satisfies Record<VisionPipelineUseCase, VisionPipelineUseCase>;\n\nexport type AudioPipelineUseCase =\n | \"audio-classification\" // https://huggingface.co/tasks/audio-classification\n | \"automatic-speech-recognition\" // https://huggingface.co/tasks/automatic-speech-recognition\n | \"text-to-speech\"; // https://huggingface.co/tasks/text-to-speech\n\nexport const AudioPipelineUseCase = {\n \"audio-classification\": \"audio-classification\",\n \"automatic-speech-recognition\": \"automatic-speech-recognition\",\n \"text-to-speech\": \"text-to-speech\",\n} as const satisfies Record<AudioPipelineUseCase, AudioPipelineUseCase>;\n\nexport type MultimodalPipelineUseCase =\n | \"document-question-answering\" // https://huggingface.co/tasks/document-question-answering\n | \"image-to-text\" // https://huggingface.co/tasks/image-to-text\n | \"zero-shot-audio-classification\" // https://huggingface.co/tasks/zero-shot-audio-classification\n | \"zero-shot-image-classification\" // https://huggingface.co/tasks/zero-shot-image-classification\n | \"zero-shot-object-detection\"; // https://huggingface.co/tasks/zero-shot-object-detection\n\nexport const MultimodalPipelineUseCase = {\n \"document-question-answering\": \"document-question-answering\",\n \"image-to-text\": \"image-to-text\",\n \"zero-shot-audio-classification\": \"zero-shot-audio-classification\",\n \"zero-shot-image-classification\": \"zero-shot-image-classification\",\n \"zero-shot-object-detection\": \"zero-shot-object-detection\",\n} as const satisfies Record<MultimodalPipelineUseCase, MultimodalPipelineUseCase>;\n\nexport type PipelineUseCase =\n | TextPipelineUseCase\n | VisionPipelineUseCase\n | AudioPipelineUseCase\n | MultimodalPipelineUseCase;\n\nexport const PipelineUseCase = {\n ...TextPipelineUseCase,\n ...VisionPipelineUseCase,\n ...AudioPipelineUseCase,\n ...MultimodalPipelineUseCase,\n} as const satisfies Record<PipelineUseCase, PipelineUseCase>;\n",
|
|
6
|
-
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n DocumentQuestionAnsweringSingle,\n type FeatureExtractionPipeline,\n pipeline,\n // @ts-ignore temporary \"fix\"\n type PretrainedModelOptions,\n QuestionAnsweringPipeline,\n SummarizationPipeline,\n SummarizationSingle,\n type TextGenerationPipeline,\n TextGenerationSingle,\n TextStreamer,\n TranslationPipeline,\n TranslationSingle,\n} from \"@sroussey/transformers\";\nimport {\n AiProviderRunFn,\n type DeReplicateFromSchema,\n DownloadModelTaskExecuteInput,\n TextEmbeddingInputSchema,\n TextEmbeddingOutputSchema,\n TextGenerationInputSchema,\n TextGenerationOutputSchema,\n TextQuestionAnswerInputSchema,\n TextQuestionAnswerOutputSchema,\n TextRewriterInputSchema,\n TextRewriterOutputSchema,\n TextSummaryInputSchema,\n TextSummaryOutputSchema,\n TextTranslationInputSchema,\n TextTranslationOutputSchema,\n TypedArray,\n} from \"@workglow/ai\";\nimport { PermanentJobError } from \"@workglow/job-queue\";\nimport { CallbackStatus } from \"./HFT_CallbackStatus\";\nimport { HfTransformersOnnxModelRecord } from \"./HFT_ModelSchema\";\n\nconst pipelines = new Map<string, any>();\n\n/**\n * Helper function to get a pipeline for a model\n */\nconst getPipeline = async (\n model: HfTransformersOnnxModelRecord,\n onProgress: (progress: number, message?: string, details?: any) => void,\n options: PretrainedModelOptions = {}\n) => {\n if (pipelines.has(model.model_id)) {\n return pipelines.get(model.model_id);\n }\n\n // Create a callback status object for progress tracking\n const progressCallback = (status: CallbackStatus) => {\n const progress = status.status === \"progress\" ? Math.round(status.progress) : 0;\n if (status.status === \"progress\") {\n onProgress(progress, \"Downloading model\", {\n file: status.file,\n progress: status.progress,\n });\n }\n };\n\n const pipelineOptions: PretrainedModelOptions = {\n dtype: model.providerConfig.dType || \"q8\",\n ...(model.providerConfig.useExternalDataFormat\n ? { use_external_data_format: model.providerConfig.useExternalDataFormat }\n : {}),\n ...(model.providerConfig.device ? { device: model.providerConfig.device as any } : {}),\n ...options,\n progress_callback: progressCallback,\n };\n\n const pipelineType = model.providerConfig.pipeline;\n const result = await pipeline(pipelineType, model.providerConfig.modelPath, pipelineOptions);\n pipelines.set(model.model_id, result);\n return result;\n};\n\n/**\n * Core implementation for downloading and caching a Hugging Face Transformers model.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_Download: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteInput,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n // Download the model by creating a pipeline\n await getPipeline(model!, onProgress, { abort_signal: signal });\n\n return {\n model: input.model!,\n };\n};\n\n/**\n * Core implementation for text embedding using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\n\nexport const HFT_TextEmbedding: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextEmbeddingInputSchema>,\n DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateEmbedding: FeatureExtractionPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n\n // Generate the embedding\n const hfVector = await generateEmbedding(input.text, {\n pooling: \"mean\",\n normalize: model?.providerConfig.normalize,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n // Validate the embedding dimensions\n if (hfVector.size !== model?.providerConfig.nativeDimensions) {\n console.warn(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model?.providerConfig.nativeDimensions}`,\n input,\n hfVector\n );\n throw new PermanentJobError(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model?.providerConfig.nativeDimensions}`\n );\n }\n\n return { vector: hfVector.data as TypedArray };\n};\n\n/**\n * Core implementation for text generation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextGeneration: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextGenerationInputSchema>,\n DeReplicateFromSchema<typeof TextGenerationOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n\n const streamer = createTextStreamer(generateText.tokenizer, onProgress, signal);\n\n let results = await generateText(input.prompt, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n let text = (results[0] as TextGenerationSingle)?.generated_text;\n\n if (Array.isArray(text)) {\n text = text[text.length - 1]?.content;\n }\n return {\n text,\n };\n};\n\n/**\n * Core implementation for text translation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextTranslation: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextTranslationInputSchema>,\n DeReplicateFromSchema<typeof TextTranslationOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const translate: TranslationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(translate.tokenizer, onProgress);\n\n const result = await translate(input.text, {\n src_lang: input.source_lang,\n tgt_lang: input.target_lang,\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let translatedText: string | string[] = \"\";\n if (Array.isArray(result)) {\n translatedText = result.map((r) => (r as TranslationSingle)?.translation_text || \"\");\n } else {\n translatedText = (result as TranslationSingle)?.translation_text || \"\";\n }\n\n return {\n text: translatedText,\n target_lang: input.target_lang,\n };\n};\n\n/**\n * Core implementation for text rewriting using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextRewriter: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextRewriterInputSchema>,\n DeReplicateFromSchema<typeof TextRewriterOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateText.tokenizer, onProgress);\n\n // This lib doesn't support this kind of rewriting with a separate prompt vs text\n const promptedText = (input.prompt ? input.prompt + \"\\n\" : \"\") + input.text;\n\n let results = await generateText(promptedText, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n\n let text = (results[0] as TextGenerationSingle)?.generated_text;\n if (Array.isArray(text)) {\n text = text[text.length - 1]?.content;\n }\n\n if (text === promptedText) {\n throw new PermanentJobError(\"Rewriter failed to generate new text\");\n }\n\n return {\n text,\n };\n};\n\n/**\n * Core implementation for text summarization using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextSummary: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextSummaryInputSchema>,\n DeReplicateFromSchema<typeof TextSummaryOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateSummary: SummarizationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateSummary.tokenizer, onProgress);\n\n let result = await generateSummary(input.text, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let summaryText = \"\";\n if (Array.isArray(result)) {\n summaryText = (result[0] as SummarizationSingle)?.summary_text || \"\";\n } else {\n summaryText = (result as SummarizationSingle)?.summary_text || \"\";\n }\n\n return {\n text: summaryText,\n };\n};\n\n/**\n * Core implementation for question answering using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextQuestionAnswer: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextQuestionAnswerInputSchema>,\n DeReplicateFromSchema<typeof TextQuestionAnswerOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n // Get the question answering pipeline\n const generateAnswer: QuestionAnsweringPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateAnswer.tokenizer, onProgress);\n\n const result = await generateAnswer(input.question, input.context, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let answerText = \"\";\n if (Array.isArray(result)) {\n answerText = (result[0] as DocumentQuestionAnsweringSingle)?.answer || \"\";\n } else {\n answerText = (result as DocumentQuestionAnsweringSingle)?.answer || \"\";\n }\n\n return {\n text: answerText,\n };\n};\n\n/**\n * Create a text streamer for a given tokenizer and update progress function\n * @param tokenizer - The tokenizer to use for the streamer\n * @param updateProgress - The function to call to update the progress\n * @param signal - The signal to use for the streamer for aborting\n * @returns The text streamer\n */\nfunction createTextStreamer(\n tokenizer: any,\n updateProgress: (progress: number, message?: string, details?: any) => void,\n signal?: AbortSignal\n) {\n let count = 0;\n return new TextStreamer(tokenizer, {\n skip_prompt: true,\n decode_kwargs: { skip_special_tokens: true },\n callback_function: (text: string) => {\n count++;\n const result = 100 * (1 - Math.exp(-0.05 * count));\n const progress = Math.round(Math.min(result, 100));\n updateProgress(progress, \"Generating\", { text, progress });\n },\n ...(signal ? { abort_signal: signal } : {}),\n });\n}\n",
|
|
5
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const HF_TRANSFORMERS_ONNX = \"HF_TRANSFORMERS_ONNX\";\nexport const HTF_CACHE_NAME = \"transformers-cache\";\n\nexport type QuantizationDataType =\n | \"auto\" // Auto-detect based on environment\n | \"fp32\"\n | \"fp16\"\n | \"q8\"\n | \"int8\"\n | \"uint8\"\n | \"q4\"\n | \"bnb4\"\n | \"q4f16\"; // fp16 model with int4 block weight quantization\n\nexport const QuantizationDataType = {\n auto: \"auto\",\n fp32: \"fp32\",\n fp16: \"fp16\",\n q8: \"q8\",\n int8: \"int8\",\n uint8: \"uint8\",\n q4: \"q4\",\n bnb4: \"bnb4\",\n q4f16: \"q4f16\",\n} as const satisfies Record<QuantizationDataType, QuantizationDataType>;\n\nexport type TextPipelineUseCase =\n | \"fill-mask\" // https://huggingface.co/tasks/fill-mask\n | \"token-classification\" // https://huggingface.co/tasks/token-classification\n | \"text-generation\" // https://huggingface.co/tasks/text-generation#completion-generation-models\n | \"text2text-generation\" // https://huggingface.co/tasks/text-generation#text-to-text-generation-models\n | \"text-classification\" // https://huggingface.co/tasks/text-classification\n | \"summarization\" // https://huggingface.co/tasks/sentence-similarity\n | \"translation\" // https://huggingface.co/tasks/translation\n | \"feature-extraction\" // https://huggingface.co/tasks/feature-extraction\n | \"zero-shot-classification\" // https://huggingface.co/tasks/zero-shot-classification\n | \"question-answering\"; // https://huggingface.co/tasks/question-answering\n\nexport const TextPipelineUseCase = {\n \"fill-mask\": \"fill-mask\",\n \"token-classification\": \"token-classification\",\n \"text-generation\": \"text-generation\",\n \"text2text-generation\": \"text2text-generation\",\n \"text-classification\": \"text-classification\",\n summarization: \"summarization\",\n translation: \"translation\",\n \"feature-extraction\": \"feature-extraction\",\n \"zero-shot-classification\": \"zero-shot-classification\",\n \"question-answering\": \"question-answering\",\n} as const satisfies Record<TextPipelineUseCase, TextPipelineUseCase>;\n\nexport type VisionPipelineUseCase =\n | \"background-removal\" // https://huggingface.co/tasks/image-segmentation#background-removal\n | \"image-segmentation\" // https://huggingface.co/tasks/image-segmentation\n | \"depth-estimation\" // https://huggingface.co/tasks/depth-estimation\n | \"image-classification\" // https://huggingface.co/tasks/image-classification\n | \"image-to-image\" // https://huggingface.co/tasks/image-to-image\n | \"object-detection\" // https://huggingface.co/tasks/object-detection\n | \"image-feature-extraction\"; // https://huggingface.co/tasks/image-feature-extraction\n\nexport const VisionPipelineUseCase = {\n \"background-removal\": \"background-removal\",\n \"image-segmentation\": \"image-segmentation\",\n \"depth-estimation\": \"depth-estimation\",\n \"image-classification\": \"image-classification\",\n \"image-to-image\": \"image-to-image\",\n \"object-detection\": \"object-detection\",\n \"image-feature-extraction\": \"image-feature-extraction\",\n} as const satisfies Record<VisionPipelineUseCase, VisionPipelineUseCase>;\n\nexport type AudioPipelineUseCase =\n | \"audio-classification\" // https://huggingface.co/tasks/audio-classification\n | \"automatic-speech-recognition\" // https://huggingface.co/tasks/automatic-speech-recognition\n | \"text-to-speech\"; // https://huggingface.co/tasks/text-to-speech\n\nexport const AudioPipelineUseCase = {\n \"audio-classification\": \"audio-classification\",\n \"automatic-speech-recognition\": \"automatic-speech-recognition\",\n \"text-to-speech\": \"text-to-speech\",\n} as const satisfies Record<AudioPipelineUseCase, AudioPipelineUseCase>;\n\nexport type MultimodalPipelineUseCase =\n | \"document-question-answering\" // https://huggingface.co/tasks/document-question-answering\n | \"image-to-text\" // https://huggingface.co/tasks/image-to-text\n | \"zero-shot-audio-classification\" // https://huggingface.co/tasks/zero-shot-audio-classification\n | \"zero-shot-image-classification\" // https://huggingface.co/tasks/zero-shot-image-classification\n | \"zero-shot-object-detection\"; // https://huggingface.co/tasks/zero-shot-object-detection\n\nexport const MultimodalPipelineUseCase = {\n \"document-question-answering\": \"document-question-answering\",\n \"image-to-text\": \"image-to-text\",\n \"zero-shot-audio-classification\": \"zero-shot-audio-classification\",\n \"zero-shot-image-classification\": \"zero-shot-image-classification\",\n \"zero-shot-object-detection\": \"zero-shot-object-detection\",\n} as const satisfies Record<MultimodalPipelineUseCase, MultimodalPipelineUseCase>;\n\nexport type PipelineUseCase =\n | TextPipelineUseCase\n | VisionPipelineUseCase\n | AudioPipelineUseCase\n | MultimodalPipelineUseCase;\n\nexport const PipelineUseCase = {\n ...TextPipelineUseCase,\n ...VisionPipelineUseCase,\n ...AudioPipelineUseCase,\n ...MultimodalPipelineUseCase,\n} as const satisfies Record<PipelineUseCase, PipelineUseCase>;\n",
|
|
6
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n DocumentQuestionAnsweringSingle,\n type FeatureExtractionPipeline,\n pipeline,\n // @ts-ignore temporary \"fix\"\n type PretrainedModelOptions,\n QuestionAnsweringPipeline,\n SummarizationPipeline,\n SummarizationSingle,\n TextClassificationOutput,\n TextClassificationPipeline,\n type TextGenerationPipeline,\n TextGenerationSingle,\n TextStreamer,\n TranslationPipeline,\n TranslationSingle,\n} from \"@sroussey/transformers\";\nimport {\n AiProviderRunFn,\n type DeReplicateFromSchema,\n DownloadModelTaskExecuteInput,\n TextClassifierInputSchema,\n TextClassifierOutputSchema,\n TextEmbeddingInputSchema,\n TextEmbeddingOutputSchema,\n TextGenerationInputSchema,\n TextGenerationOutputSchema,\n TextLanguageDetectionInputSchema,\n TextLanguageDetectionOutputSchema,\n TextQuestionAnswerInputSchema,\n TextQuestionAnswerOutputSchema,\n TextRewriterInputSchema,\n TextRewriterOutputSchema,\n TextSummaryInputSchema,\n TextSummaryOutputSchema,\n TextTranslationInputSchema,\n TextTranslationOutputSchema,\n TypedArray,\n} from \"@workglow/ai\";\nimport { PermanentJobError } from \"@workglow/job-queue\";\nimport { CallbackStatus } from \"./HFT_CallbackStatus\";\nimport { HTF_CACHE_NAME } from \"./HFT_Constants\";\nimport { HfTransformersOnnxModelRecord } from \"./HFT_ModelSchema\";\n\nconst pipelines = new Map<string, any>();\n\n/**\n * Helper function to get a pipeline for a model\n */\nconst getPipeline = async (\n model: HfTransformersOnnxModelRecord,\n onProgress: (progress: number, message?: string, details?: any) => void,\n options: PretrainedModelOptions = {}\n) => {\n if (pipelines.has(model.model_id)) {\n return pipelines.get(model.model_id);\n }\n\n // Create a callback status object for progress tracking\n const progressCallback = (status: CallbackStatus) => {\n const progress = status.status === \"progress\" ? Math.round(status.progress) : 0;\n if (status.status === \"progress\") {\n onProgress(progress, \"Downloading model\", {\n file: status.file,\n progress: status.progress,\n });\n }\n };\n\n const pipelineOptions: PretrainedModelOptions = {\n dtype: model.providerConfig.dType || \"q8\",\n ...(model.providerConfig.useExternalDataFormat\n ? { use_external_data_format: model.providerConfig.useExternalDataFormat }\n : {}),\n ...(model.providerConfig.device ? { device: model.providerConfig.device as any } : {}),\n ...options,\n progress_callback: progressCallback,\n };\n\n const pipelineType = model.providerConfig.pipeline;\n const result = await pipeline(pipelineType, model.providerConfig.modelPath, pipelineOptions);\n pipelines.set(model.model_id, result);\n return result;\n};\n\n/**\n * Core implementation for downloading and caching a Hugging Face Transformers model.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_Download: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteInput,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n // Download the model by creating a pipeline\n await getPipeline(model!, onProgress, { abort_signal: signal });\n\n return {\n model: input.model!,\n };\n};\n\n/**\n * Core implementation for unloading a Hugging Face Transformers model.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_Unload: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteInput,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n // Delete the pipeline from the in-memory map\n if (pipelines.has(model!.model_id)) {\n pipelines.delete(model!.model_id);\n onProgress(50, \"Pipeline removed from memory\");\n }\n\n // Delete model cache entries\n const modelPath = model!.providerConfig.modelPath;\n await deleteModelCache(modelPath);\n onProgress(100, \"Model cache deleted\");\n\n return {\n model: input.model!,\n };\n};\n\n/**\n * Deletes all cache entries for a given model path\n * @param modelPath - The model path to delete from cache\n */\nconst deleteModelCache = async (modelPath: string): Promise<void> => {\n const cache = await caches.open(HTF_CACHE_NAME);\n const keys = await cache.keys();\n const prefix = `/${modelPath}/`;\n\n // Collect all matching requests first\n const requestsToDelete: Request[] = [];\n for (const request of keys) {\n const url = new URL(request.url);\n if (url.pathname.startsWith(prefix)) {\n requestsToDelete.push(request);\n }\n }\n\n // Delete all matching requests\n let deletedCount = 0;\n for (const request of requestsToDelete) {\n try {\n const deleted = await cache.delete(request);\n if (deleted) {\n deletedCount++;\n } else {\n // If delete returns false, try with URL string as fallback\n const deletedByUrl = await cache.delete(request.url);\n if (deletedByUrl) {\n deletedCount++;\n }\n }\n } catch (error) {\n console.error(`Failed to delete cache entry: ${request.url}`, error);\n }\n }\n};\n\n/**\n * Core implementation for text embedding using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\n\nexport const HFT_TextEmbedding: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextEmbeddingInputSchema>,\n DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateEmbedding: FeatureExtractionPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n\n // Generate the embedding\n const hfVector = await generateEmbedding(input.text, {\n pooling: \"mean\",\n normalize: model?.providerConfig.normalize,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n // Validate the embedding dimensions\n if (hfVector.size !== model?.providerConfig.nativeDimensions) {\n console.warn(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model?.providerConfig.nativeDimensions}`,\n input,\n hfVector\n );\n throw new PermanentJobError(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model?.providerConfig.nativeDimensions}`\n );\n }\n\n return { vector: hfVector.data as TypedArray };\n};\n\nexport const HFT_TextClassifier: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextClassifierInputSchema>,\n DeReplicateFromSchema<typeof TextClassifierOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const textClassifier: TextClassificationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const result = await textClassifier(input.text, {\n top_k: input.maxCategories || undefined,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (Array.isArray(result[0])) {\n return {\n categories: result[0].map((category) => ({\n label: category.label,\n score: category.score,\n })),\n };\n }\n\n return {\n categories: (result as TextClassificationOutput).map((category) => ({\n label: category.label,\n score: category.score,\n })),\n };\n};\n\nexport const HFT_TextLanguageDetection: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextLanguageDetectionInputSchema>,\n DeReplicateFromSchema<typeof TextLanguageDetectionOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const textClassifier: TextClassificationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const result = await textClassifier(input.text, {\n top_k: input.maxLanguages || undefined,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (Array.isArray(result[0])) {\n return {\n languages: result[0].map((category) => ({\n language: category.label,\n score: category.score,\n })),\n };\n }\n\n return {\n languages: (result as TextClassificationOutput).map((category) => ({\n language: category.label,\n score: category.score,\n })),\n };\n};\n\n/**\n * Core implementation for text generation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextGeneration: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextGenerationInputSchema>,\n DeReplicateFromSchema<typeof TextGenerationOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n\n const streamer = createTextStreamer(generateText.tokenizer, onProgress, signal);\n\n let results = await generateText(input.prompt, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n let text = (results[0] as TextGenerationSingle)?.generated_text;\n\n if (Array.isArray(text)) {\n text = text[text.length - 1]?.content;\n }\n return {\n text,\n };\n};\n\n/**\n * Core implementation for text translation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextTranslation: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextTranslationInputSchema>,\n DeReplicateFromSchema<typeof TextTranslationOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const translate: TranslationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(translate.tokenizer, onProgress);\n\n const result = await translate(input.text, {\n src_lang: input.source_lang,\n tgt_lang: input.target_lang,\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let translatedText: string | string[] = \"\";\n if (Array.isArray(result)) {\n translatedText = result.map((r) => (r as TranslationSingle)?.translation_text || \"\");\n } else {\n translatedText = (result as TranslationSingle)?.translation_text || \"\";\n }\n\n return {\n text: translatedText,\n target_lang: input.target_lang,\n };\n};\n\n/**\n * Core implementation for text rewriting using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextRewriter: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextRewriterInputSchema>,\n DeReplicateFromSchema<typeof TextRewriterOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateText.tokenizer, onProgress);\n\n // This lib doesn't support this kind of rewriting with a separate prompt vs text\n const promptedText = (input.prompt ? input.prompt + \"\\n\" : \"\") + input.text;\n\n let results = await generateText(promptedText, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n\n let text = (results[0] as TextGenerationSingle)?.generated_text;\n if (Array.isArray(text)) {\n text = text[text.length - 1]?.content;\n }\n\n if (text === promptedText) {\n throw new PermanentJobError(\"Rewriter failed to generate new text\");\n }\n\n return {\n text,\n };\n};\n\n/**\n * Core implementation for text summarization using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextSummary: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextSummaryInputSchema>,\n DeReplicateFromSchema<typeof TextSummaryOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n const generateSummary: SummarizationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateSummary.tokenizer, onProgress);\n\n let result = await generateSummary(input.text, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let summaryText = \"\";\n if (Array.isArray(result)) {\n summaryText = (result[0] as SummarizationSingle)?.summary_text || \"\";\n } else {\n summaryText = (result as SummarizationSingle)?.summary_text || \"\";\n }\n\n return {\n text: summaryText,\n };\n};\n\n/**\n * Core implementation for question answering using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextQuestionAnswer: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextQuestionAnswerInputSchema>,\n DeReplicateFromSchema<typeof TextQuestionAnswerOutputSchema>,\n HfTransformersOnnxModelRecord\n> = async (input, model, onProgress, signal) => {\n // Get the question answering pipeline\n const generateAnswer: QuestionAnsweringPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateAnswer.tokenizer, onProgress);\n\n const result = await generateAnswer(input.question, input.context, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let answerText = \"\";\n if (Array.isArray(result)) {\n answerText = (result[0] as DocumentQuestionAnsweringSingle)?.answer || \"\";\n } else {\n answerText = (result as DocumentQuestionAnsweringSingle)?.answer || \"\";\n }\n\n return {\n text: answerText,\n };\n};\n\n/**\n * Create a text streamer for a given tokenizer and update progress function\n * @param tokenizer - The tokenizer to use for the streamer\n * @param updateProgress - The function to call to update the progress\n * @param signal - The signal to use for the streamer for aborting\n * @returns The text streamer\n */\nfunction createTextStreamer(\n tokenizer: any,\n updateProgress: (progress: number, message?: string, details?: any) => void,\n signal?: AbortSignal\n) {\n let count = 0;\n return new TextStreamer(tokenizer, {\n skip_prompt: true,\n decode_kwargs: { skip_special_tokens: true },\n callback_function: (text: string) => {\n count++;\n const result = 100 * (1 - Math.exp(-0.05 * count));\n const progress = Math.round(Math.min(result, 100));\n updateProgress(progress, \"Generating\", { text, progress });\n },\n ...(signal ? { abort_signal: signal } : {}),\n });\n}\n",
|
|
7
7
|
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ModelSchema } from \"@workglow/ai\";\nimport { DataPortSchemaObject, FromSchema } from \"@workglow/util\";\nimport { HF_TRANSFORMERS_ONNX, PipelineUseCase, QuantizationDataType } from \"./HFT_Constants\";\n\nexport const HfTransformersOnnxModelSchema = {\n type: \"object\",\n properties: {\n provider: {\n const: HF_TRANSFORMERS_ONNX,\n description: \"Discriminator: ONNX runtime backend.\",\n },\n providerConfig: {\n type: \"object\",\n description: \"ONNX runtime-specific options.\",\n properties: {\n pipeline: {\n type: \"string\",\n enum: Object.values(PipelineUseCase),\n description: \"Pipeline type for the ONNX model.\",\n default: \"text-generation\",\n },\n modelPath: {\n type: \"string\",\n description: \"Filesystem path or URI for the ONNX model.\",\n },\n dType: {\n type: \"string\",\n enum: Object.values(QuantizationDataType),\n description: \"Data type for the ONNX model.\",\n default: \"float32\",\n },\n device: {\n type: \"string\",\n enum: [\"cpu\", \"gpu\", \"webgpu\", \"wasm\", \"metal\"],\n description: \"High-level device selection.\",\n default: \"webgpu\",\n },\n executionProviders: {\n type: \"array\",\n items: { type: \"string\" },\n description: \"Raw ONNX Runtime execution provider identifiers.\",\n },\n intraOpNumThreads: {\n type: \"integer\",\n minimum: 1,\n },\n interOpNumThreads: {\n type: \"integer\",\n minimum: 1,\n },\n useExternalDataFormat: {\n type: \"boolean\",\n description: \"Whether the model uses external data format.\",\n },\n nativeDimensions: {\n type: \"integer\",\n description: \"The native dimensions of the model.\",\n },\n normalize: {\n type: \"boolean\",\n description: \"Whether the model uses normalization.\",\n },\n languageStyle: {\n type: \"string\",\n description: \"The language style of the model.\",\n },\n },\n required: [\"modelPath\", \"pipeline\"],\n additionalProperties: false,\n },\n },\n required: [\"provider\", \"providerConfig\"],\n additionalProperties: true,\n} as const satisfies DataPortSchemaObject;\n\nconst ExtendedModelSchema = {\n type: \"object\",\n properties: {\n ...ModelSchema.properties,\n ...HfTransformersOnnxModelSchema.properties,\n },\n required: [...ModelSchema.required, ...HfTransformersOnnxModelSchema.required],\n additionalProperties: false,\n} as const satisfies DataPortSchemaObject;\n\nexport type HfTransformersOnnxModelRecord = FromSchema<typeof ExtendedModelSchema>;\n",
|
|
8
|
-
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { AiJob, AiJobInput, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { globalServiceRegistry, WORKER_MANAGER } from \"@workglow/util\";\nimport { HF_TRANSFORMERS_ONNX } from \"../common/HFT_Constants\";\n\n/**\n * Registers the HuggingFace Transformers client job functions with a web worker.\n * If no client is provided, creates a default in-memory queue and registers it.\n *\n * @param worker - The web worker to use for job execution\n * @param client - Optional existing JobQueueClient. If not provided, creates a default in-memory queue.\n */\nexport async function register_HFT_ClientJobFns(\n worker: Worker,\n client?: JobQueueClient<AiJobInput<TaskInput>, TaskOutput>\n): Promise<void> {\n const workerManager = globalServiceRegistry.get(WORKER_MANAGER);\n\n workerManager.registerWorker(HF_TRANSFORMERS_ONNX, worker);\n\n const ProviderRegistry = getAiProviderRegistry();\n const names = [\n \"DownloadModelTask\",\n \"TextEmbeddingTask\",\n \"TextGenerationTask\",\n \"TextTranslationTask\",\n \"TextRewriterTask\",\n \"TextSummaryTask\",\n \"TextQuestionAnswerTask\",\n ];\n for (const name of names) {\n ProviderRegistry.registerAsWorkerRunFn(HF_TRANSFORMERS_ONNX, name);\n }\n // If no client provided, create a default in-memory queue\n if (!client) {\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(\n HF_TRANSFORMERS_ONNX\n );\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(AiJob, {\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n limiter: new ConcurrencyLimiter(1, 100),\n });\n\n client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n // await server.start();\n }\n}\n",
|
|
9
|
-
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { env } from \"@sroussey/transformers\";\nimport { AiJob, AiJobInput, AiProviderRunFn, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { HF_TRANSFORMERS_ONNX } from \"../common/HFT_Constants\";\nimport {\n HFT_Download,\n HFT_TextEmbedding,\n HFT_TextGeneration,\n HFT_TextQuestionAnswer,\n HFT_TextRewriter,\n HFT_TextSummary,\n HFT_TextTranslation,\n} from \"../common/HFT_JobRunFns\";\n\n/**\n * Registers the HuggingFace Transformers inline job functions for same-thread execution.\n * If no client is provided, creates a default in-memory queue and registers it.\n *\n * @param client - Optional existing JobQueueClient. If not provided, creates a default in-memory queue.\n */\nexport async function register_HFT_InlineJobFns(\n client?: JobQueueClient<AiJobInput<TaskInput>, TaskOutput>\n): Promise<void> {\n // @ts-ignore\n env.backends.onnx.wasm.proxy = true;\n const ProviderRegistry = getAiProviderRegistry();\n const fns: Record<string, AiProviderRunFn<any, any, any>> = {\n [\"DownloadModelTask\"]: HFT_Download,\n [\"TextEmbeddingTask\"]: HFT_TextEmbedding,\n [\"TextGenerationTask\"]: HFT_TextGeneration,\n [\"TextQuestionAnswerTask\"]: HFT_TextQuestionAnswer,\n [\"TextRewriterTask\"]: HFT_TextRewriter,\n [\"TextSummaryTask\"]: HFT_TextSummary,\n [\"TextTranslationTask\"]: HFT_TextTranslation,\n };\n for (const [jobName, fn] of Object.entries(fns)) {\n ProviderRegistry.registerRunFn<any, any>(HF_TRANSFORMERS_ONNX, jobName, fn);\n }\n\n // If no client provided, create a default in-memory queue\n if (!client) {\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(\n HF_TRANSFORMERS_ONNX\n );\n await storage.setupDatabase();\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(AiJob, {\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n limiter: new ConcurrencyLimiter(1, 100),\n });\n\n client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n await server.start();\n }\n}\n",
|
|
10
|
-
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n createServiceToken,\n globalServiceRegistry,\n parentPort,\n WORKER_SERVER,\n} from \"@workglow/util\";\nimport {\n HFT_Download,\n HFT_TextEmbedding,\n HFT_TextGeneration,\n HFT_TextQuestionAnswer,\n HFT_TextRewriter,\n HFT_TextSummary,\n HFT_TextTranslation,\n} from \"../common/HFT_JobRunFns\";\n\nexport const HFT_WORKER_JOBRUN = createServiceToken(\"worker.ai-provider.hft\");\n\nexport const HFT_WORKER_JOBRUN_REGISTER = globalServiceRegistry.register(\n HFT_WORKER_JOBRUN,\n () => {\n const workerServer = globalServiceRegistry.get(WORKER_SERVER);\n workerServer.registerFunction(\"DownloadModelTask\", HFT_Download);\n workerServer.registerFunction(\"TextEmbeddingTask\", HFT_TextEmbedding);\n workerServer.registerFunction(\"TextGenerationTask\", HFT_TextGeneration);\n workerServer.registerFunction(\"TextTranslationTask\", HFT_TextTranslation);\n workerServer.registerFunction(\"TextRewriterTask\", HFT_TextRewriter);\n workerServer.registerFunction(\"TextSummaryTask\", HFT_TextSummary);\n workerServer.registerFunction(\"TextQuestionAnswerTask\", HFT_TextQuestionAnswer);\n parentPort.postMessage({ type: \"ready\" });\n console.log(\"HFT_WORKER_JOBRUN registered\");\n return workerServer;\n },\n true\n);\n",
|
|
11
|
-
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const TENSORFLOW_MEDIAPIPE = \"TENSORFLOW_MEDIAPIPE\";\n",
|
|
12
|
-
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { FilesetResolver, TextEmbedder } from \"@mediapipe/tasks-text\";\nimport type {\n AiProviderRunFn,\n DeReplicateFromSchema,\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteOutput,\n TextEmbeddingInputSchema,\n TextEmbeddingOutputSchema,\n} from \"@workglow/ai\";\nimport { PermanentJobError } from \"@workglow/job-queue\";\nimport { TFMPModelRecord } from \"./TFMP_ModelSchema\";\n\n/**\n * Core implementation for downloading and caching a MediaPipe TFJS model.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_Download: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteOutput,\n TFMPModelRecord\n> = async (input, model, onProgress, signal) => {\n const textFiles = await FilesetResolver.forTextTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm\"\n );\n\n // Create an embedder to get dimensions\n const embedder = await TextEmbedder.createFromOptions(textFiles, {\n baseOptions: {\n modelAssetPath: model!.providerConfig.modelPath,\n },\n });\n\n return {\n model: input.model,\n };\n};\n\n/**\n * Core implementation for text embedding using MediaPipe TFJS.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_TextEmbedding: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextEmbeddingInputSchema>,\n DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>,\n TFMPModelRecord\n> = async (input, model, onProgress, signal) => {\n const textFiles = await FilesetResolver.forTextTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm\"\n );\n\n onProgress(0.1, \"Model loaded\");\n\n const embedder = await TextEmbedder.createFromOptions(textFiles, {\n baseOptions: {\n modelAssetPath: model!.providerConfig.modelPath,\n },\n });\n\n if (signal.aborted) {\n throw new PermanentJobError(\"Aborted job\");\n }\n\n onProgress(0.2, \"Embedding text\");\n\n const result = embedder.embed(input.text);\n\n if (!result.embeddings?.[0]?.floatEmbedding) {\n throw new PermanentJobError(\"Failed to generate embedding: Empty result\");\n }\n\n const embedding = Float32Array.from(result.embeddings[0].floatEmbedding);\n\n return {\n vector: embedding,\n };\n};\n",
|
|
13
|
-
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ModelSchema } from \"@workglow/ai\";\nimport { DataPortSchemaObject, FromSchema } from \"@workglow/util\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\n\nexport const TFMPModelSchema = {\n type: \"object\",\n properties: {\n provider: {\n const: TENSORFLOW_MEDIAPIPE,\n description: \"Discriminator: TensorFlow MediaPipe backend.\",\n },\n providerConfig: {\n type: \"object\",\n description: \"TensorFlow MediaPipe-specific options.\",\n properties: {\n modelPath: {\n type: \"string\",\n description: \"Filesystem path or URI for the ONNX model.\",\n },\n },\n required: [\"modelPath\"],\n additionalProperties: false,\n },\n },\n required: [\"provider\", \"providerConfig\"],\n additionalProperties: true,\n} as const satisfies DataPortSchemaObject;\n\nconst ExtendedModelSchema = {\n type: \"object\",\n properties: {\n ...ModelSchema.properties,\n ...TFMPModelSchema.properties,\n },\n required: [...ModelSchema.required, ...TFMPModelSchema.required],\n additionalProperties: false,\n} as const satisfies DataPortSchemaObject;\n\nexport type TFMPModelRecord = FromSchema<typeof ExtendedModelSchema>;\n",
|
|
14
|
-
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { AiJob, AiJobInput, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { globalServiceRegistry, WORKER_MANAGER } from \"@workglow/util\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\n\n/**\n * Registers the TensorFlow MediaPipe client job functions with a web worker.\n * If no client is provided, creates a default in-memory queue and registers it.\n *\n * @param worker - The web worker to use for job execution\n * @param client - Optional existing JobQueueClient. If not provided, creates a default in-memory queue.\n */\nexport async function register_TFMP_ClientJobFns(\n worker: Worker,\n client?: JobQueueClient<AiJobInput<TaskInput>, TaskOutput>\n): Promise<void> {\n const workerManager = globalServiceRegistry.get(WORKER_MANAGER);\n workerManager.registerWorker(TENSORFLOW_MEDIAPIPE, worker);\n\n const aiProviderRegistry = getAiProviderRegistry();\n const names = [\"DownloadModelTask\"
|
|
15
|
-
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { AiJob, AiJobInput, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\nimport {
|
|
16
|
-
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n createServiceToken,\n globalServiceRegistry,\n parentPort,\n WORKER_SERVER,\n} from \"@workglow/util\";\nimport {
|
|
8
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { AiJob, AiJobInput, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { globalServiceRegistry, WORKER_MANAGER } from \"@workglow/util\";\nimport { HF_TRANSFORMERS_ONNX } from \"../common/HFT_Constants\";\n\n/**\n * Registers the HuggingFace Transformers client job functions with a web worker.\n * If no client is provided, creates a default in-memory queue and registers it.\n *\n * @param worker - The web worker to use for job execution\n * @param client - Optional existing JobQueueClient. If not provided, creates a default in-memory queue.\n */\nexport async function register_HFT_ClientJobFns(\n worker: Worker,\n client?: JobQueueClient<AiJobInput<TaskInput>, TaskOutput>\n): Promise<void> {\n const workerManager = globalServiceRegistry.get(WORKER_MANAGER);\n\n workerManager.registerWorker(HF_TRANSFORMERS_ONNX, worker);\n\n const ProviderRegistry = getAiProviderRegistry();\n const names = [\n \"DownloadModelTask\",\n \"UnloadModelTask\",\n \"TextEmbeddingTask\",\n \"TextLanguageDetectionTask\",\n \"TextClassifierTask\",\n \"TextGenerationTask\",\n \"TextTranslationTask\",\n \"TextRewriterTask\",\n \"TextSummaryTask\",\n \"TextQuestionAnswerTask\",\n ];\n for (const name of names) {\n ProviderRegistry.registerAsWorkerRunFn(HF_TRANSFORMERS_ONNX, name);\n }\n // If no client provided, create a default in-memory queue\n if (!client) {\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(\n HF_TRANSFORMERS_ONNX\n );\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(AiJob, {\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n limiter: new ConcurrencyLimiter(1, 100),\n });\n\n client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n // await server.start();\n }\n}\n",
|
|
9
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { env } from \"@sroussey/transformers\";\nimport { AiJob, AiJobInput, AiProviderRunFn, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { HF_TRANSFORMERS_ONNX } from \"../common/HFT_Constants\";\nimport {\n HFT_Download,\n HFT_TextClassifier,\n HFT_TextEmbedding,\n HFT_TextGeneration,\n HFT_TextLanguageDetection,\n HFT_TextQuestionAnswer,\n HFT_TextRewriter,\n HFT_TextSummary,\n HFT_TextTranslation,\n HFT_Unload,\n} from \"../common/HFT_JobRunFns\";\n\n/**\n * Registers the HuggingFace Transformers inline job functions for same-thread execution.\n * If no client is provided, creates a default in-memory queue and registers it.\n *\n * @param client - Optional existing JobQueueClient. If not provided, creates a default in-memory queue.\n */\nexport async function register_HFT_InlineJobFns(\n client?: JobQueueClient<AiJobInput<TaskInput>, TaskOutput>\n): Promise<void> {\n // @ts-ignore\n env.backends.onnx.wasm.proxy = true;\n const ProviderRegistry = getAiProviderRegistry();\n const fns: Record<string, AiProviderRunFn<any, any, any>> = {\n [\"DownloadModelTask\"]: HFT_Download,\n [\"UnloadModelTask\"]: HFT_Unload,\n [\"TextEmbeddingTask\"]: HFT_TextEmbedding,\n [\"TextGenerationTask\"]: HFT_TextGeneration,\n [\"TextQuestionAnswerTask\"]: HFT_TextQuestionAnswer,\n [\"TextLanguageDetectionTask\"]: HFT_TextLanguageDetection,\n [\"TextClassifierTask\"]: HFT_TextClassifier,\n [\"TextRewriterTask\"]: HFT_TextRewriter,\n [\"TextSummaryTask\"]: HFT_TextSummary,\n [\"TextTranslationTask\"]: HFT_TextTranslation,\n };\n for (const [jobName, fn] of Object.entries(fns)) {\n ProviderRegistry.registerRunFn<any, any>(HF_TRANSFORMERS_ONNX, jobName, fn);\n }\n\n // If no client provided, create a default in-memory queue\n if (!client) {\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(\n HF_TRANSFORMERS_ONNX\n );\n await storage.setupDatabase();\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(AiJob, {\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n limiter: new ConcurrencyLimiter(1, 100),\n });\n\n client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName: HF_TRANSFORMERS_ONNX,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n await server.start();\n }\n}\n",
|
|
10
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n createServiceToken,\n globalServiceRegistry,\n parentPort,\n WORKER_SERVER,\n} from \"@workglow/util\";\nimport {\n HFT_Download,\n HFT_TextClassifier,\n HFT_TextEmbedding,\n HFT_TextGeneration,\n HFT_TextLanguageDetection,\n HFT_TextQuestionAnswer,\n HFT_TextRewriter,\n HFT_TextSummary,\n HFT_TextTranslation,\n} from \"../common/HFT_JobRunFns\";\n\nexport const HFT_WORKER_JOBRUN = createServiceToken(\"worker.ai-provider.hft\");\n\nexport const HFT_WORKER_JOBRUN_REGISTER = globalServiceRegistry.register(\n HFT_WORKER_JOBRUN,\n () => {\n const workerServer = globalServiceRegistry.get(WORKER_SERVER);\n workerServer.registerFunction(\"DownloadModelTask\", HFT_Download);\n workerServer.registerFunction(\"TextEmbeddingTask\", HFT_TextEmbedding);\n workerServer.registerFunction(\"TextGenerationTask\", HFT_TextGeneration);\n workerServer.registerFunction(\"TextLanguageDetectionTask\", HFT_TextLanguageDetection);\n workerServer.registerFunction(\"TextClassifierTask\", HFT_TextClassifier);\n workerServer.registerFunction(\"TextTranslationTask\", HFT_TextTranslation);\n workerServer.registerFunction(\"TextRewriterTask\", HFT_TextRewriter);\n workerServer.registerFunction(\"TextSummaryTask\", HFT_TextSummary);\n workerServer.registerFunction(\"TextQuestionAnswerTask\", HFT_TextQuestionAnswer);\n parentPort.postMessage({ type: \"ready\" });\n console.log(\"HFT_WORKER_JOBRUN registered\");\n return workerServer;\n },\n true\n);\n",
|
|
11
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const TENSORFLOW_MEDIAPIPE = \"TENSORFLOW_MEDIAPIPE\";\n\nexport type TextPipelineTask =\n | \"text-embedder\"\n | \"text-classifier\"\n | \"text-language-detector\"\n | \"genai-text\"\n | \"audio-classifier\"\n | \"audio-embedder\"\n | \"vision-face-detector\"\n | \"vision-face-landmarker\"\n | \"vision-face-stylizer\"\n | \"vision-gesture-recognizer\"\n | \"vision-hand-landmarker\"\n | \"vision-holistic-landmarker\"\n | \"vision-image-classifier\"\n | \"vision-image-embedder\"\n | \"vision-image-segmenter\"\n | \"vision-image-interactive-segmenter\"\n | \"vision-object-detector\"\n | \"vision-pose-landmarker\";\n\nexport const TextPipelineTask = {\n \"text-embedder\": \"text-embedder\",\n \"text-classifier\": \"text-classifier\",\n \"text-language-detector\": \"text-language-detector\",\n \"genai-text\": \"genai-text\",\n \"audio-classifier\": \"audio-classifier\",\n \"audio-embedder\": \"audio-embedder\",\n \"vision-face-detector\": \"vision-face-detector\",\n \"vision-face-landmarker\": \"vision-face-landmarker\",\n \"vision-face-stylizer\": \"vision-face-stylizer\",\n \"vision-gesture-recognizer\": \"vision-gesture-recognizer\",\n \"vision-hand-landmarker\": \"vision-hand-landmarker\",\n \"vision-holistic-landmarker\": \"vision-holistic-landmarker\",\n \"vision-image-classifier\": \"vision-image-classifier\",\n \"vision-image-embedder\": \"vision-image-embedder\",\n \"vision-image-segmenter\": \"vision-image-segmenter\",\n \"vision-image-interactive-segmenter\": \"vision-image-interactive-segmenter\",\n \"vision-object-detector\": \"vision-object-detector\",\n \"vision-pose-landmarker\": \"vision-pose-landmarker\",\n} as const satisfies Record<TextPipelineTask, TextPipelineTask>;\n",
|
|
12
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n FilesetResolver,\n LanguageDetector,\n TextClassifier,\n TextEmbedder,\n} from \"@mediapipe/tasks-text\";\nimport type {\n AiProviderRunFn,\n DeReplicateFromSchema,\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteOutput,\n TextClassifierInputSchema,\n TextClassifierOutputSchema,\n TextEmbeddingInputSchema,\n TextEmbeddingOutputSchema,\n TextLanguageDetectionInputSchema,\n TextLanguageDetectionOutputSchema,\n} from \"@workglow/ai\";\nimport { PermanentJobError } from \"@workglow/job-queue\";\nimport { TFMPModelRecord } from \"./TFMP_ModelSchema\";\n\ninterface TFMPWasmFileset {\n /** The path to the Wasm loader script. */\n wasmLoaderPath: string;\n /** The path to the Wasm binary. */\n wasmBinaryPath: string;\n /** The optional path to the asset loader script. */\n assetLoaderPath?: string;\n /** The optional path to the assets binary. */\n assetBinaryPath?: string;\n}\n\n/**\n * Cache for WASM filesets by task engine (text, audio, vision, genai).\n * Multiple models may share the same WASM fileset.\n */\nconst wasm_tasks = new Map<string, TFMPWasmFileset>();\n\n/**\n * Reference counts tracking how many models are using each WASM fileset.\n * When count reaches 0, the WASM fileset can be safely unloaded.\n */\nconst wasm_reference_counts = new Map<string, number>();\n\n/**\n * Maps model paths to their corresponding task engine.\n * Used to determine which WASM fileset to decrement when a model is unloaded.\n */\nconst model_to_wasm_mapping = new Map<string, string>();\n\n/**\n * Helper function to get a WASM task for a model\n */\nconst getWasmTask = async (\n model: TFMPModelRecord,\n onProgress: (progress: number, message?: string, details?: any) => void,\n signal: AbortSignal\n): Promise<TFMPWasmFileset> => {\n const taskEngine = model.providerConfig.taskEngine;\n\n if (wasm_tasks.has(taskEngine)) {\n return wasm_tasks.get(taskEngine)!;\n }\n\n if (signal.aborted) {\n throw new PermanentJobError(\"Aborted job\");\n }\n\n onProgress(0.1, \"Loading WASM task\");\n\n let wasmFileset: TFMPWasmFileset;\n\n switch (taskEngine) {\n case \"text\":\n wasmFileset = await FilesetResolver.forTextTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm\"\n );\n break;\n case \"audio\":\n wasmFileset = await FilesetResolver.forAudioTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-audio@latest/wasm\"\n );\n break;\n case \"vision\":\n wasmFileset = await FilesetResolver.forVisionTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@latest/wasm\"\n );\n break;\n case \"genai\":\n wasmFileset = await FilesetResolver.forGenAiTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-genai@latest/wasm\"\n );\n break;\n default:\n throw new PermanentJobError(\"Invalid task engine\");\n }\n\n wasm_tasks.set(taskEngine, wasmFileset);\n return wasmFileset;\n};\n\nconst modelTaskCache = new Map<string, TextEmbedder | TextClassifier | LanguageDetector>();\n\ntype InferTaskInstance<T> = T extends typeof TextEmbedder\n ? TextEmbedder\n : T extends typeof TextClassifier\n ? TextClassifier\n : T extends typeof LanguageDetector\n ? LanguageDetector\n : never;\n\nconst getModelTask = async <\n T extends typeof TextEmbedder | typeof TextClassifier | typeof LanguageDetector,\n>(\n model: TFMPModelRecord,\n onProgress: (progress: number, message?: string, details?: any) => void,\n signal: AbortSignal,\n TaskType: T\n): Promise<InferTaskInstance<T>> => {\n const modelPath = model.providerConfig.modelPath;\n const taskEngine = model.providerConfig.taskEngine;\n\n if (modelTaskCache.has(modelPath)) {\n return modelTaskCache.get(modelPath)! as any;\n }\n\n // Load WASM if needed\n const wasmFileset = await getWasmTask(model, onProgress, signal);\n\n // Create new model instance\n const task = await TaskType.createFromOptions(wasmFileset, {\n baseOptions: {\n modelAssetPath: modelPath,\n },\n });\n\n // Cache the model\n modelTaskCache.set(modelPath, task);\n\n // Track WASM usage for this model and increment reference count\n model_to_wasm_mapping.set(modelPath, taskEngine);\n wasm_reference_counts.set(taskEngine, (wasm_reference_counts.get(taskEngine) || 0) + 1);\n\n return task as any;\n};\n\nconst getTextEmbedder = async (\n model: TFMPModelRecord,\n onProgress: (progress: number, message?: string, details?: any) => void,\n signal: AbortSignal\n): Promise<TextEmbedder> => {\n return getModelTask(model, onProgress, signal, TextEmbedder);\n};\n\nconst getTextClassifier = async (\n model: TFMPModelRecord,\n onProgress: (progress: number, message?: string, details?: any) => void,\n signal: AbortSignal\n): Promise<TextClassifier> => {\n return getModelTask(model, onProgress, signal, TextClassifier);\n};\n\nconst getTextLanguageDetector = async (\n model: TFMPModelRecord,\n onProgress: (progress: number, message?: string, details?: any) => void,\n signal: AbortSignal\n): Promise<LanguageDetector> => {\n return getModelTask(model, onProgress, signal, LanguageDetector);\n};\n\n/**\n * Core implementation for downloading and caching a MediaPipe TFJS model.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_Download: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteOutput,\n TFMPModelRecord\n> = async (input, model, onProgress, signal) => {\n switch (model?.providerConfig.pipeline) {\n case \"text-embedder\":\n await getTextEmbedder(model, onProgress, signal);\n break;\n case \"text-classifier\":\n await getTextClassifier(model, onProgress, signal);\n break;\n case \"text-language-detector\":\n await getTextLanguageDetector(model, onProgress, signal);\n break;\n default:\n throw new PermanentJobError(\"Invalid pipeline\");\n }\n onProgress(0.9, \"Pipeline loaded\");\n\n return {\n model: input.model,\n };\n};\n\n/**\n * Core implementation for text embedding using MediaPipe TFJS.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_TextEmbedding: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextEmbeddingInputSchema>,\n DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>,\n TFMPModelRecord\n> = async (input, model, onProgress, signal) => {\n onProgress(0.1, \"Model loaded\");\n\n const textEmbedder = await getTextEmbedder(model!, onProgress, signal);\n const result = textEmbedder.embed(input.text);\n\n if (!result.embeddings?.[0]?.floatEmbedding) {\n throw new PermanentJobError(\"Failed to generate embedding: Empty result\");\n }\n\n const embedding = Float32Array.from(result.embeddings[0].floatEmbedding);\n\n return {\n vector: embedding,\n };\n};\n\n/**\n * Core implementation for text classification using MediaPipe TFJS.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_TextClassifier: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextClassifierInputSchema>,\n DeReplicateFromSchema<typeof TextClassifierOutputSchema>,\n TFMPModelRecord\n> = async (input, model, onProgress, signal) => {\n onProgress(0.1, \"Model loaded\");\n\n const textClassifier = await getTextClassifier(model!, onProgress, signal);\n const result = textClassifier.classify(input.text);\n\n if (!result.classifications?.[0]?.categories) {\n throw new PermanentJobError(\"Failed to classify text: Empty result\");\n }\n\n const categories = result.classifications[0].categories.map((category) => ({\n label: category.categoryName,\n score: category.score,\n }));\n\n return {\n categories,\n };\n};\n\n/**\n * Core implementation for language detection using MediaPipe TFJS.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_TextLanguageDetection: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextLanguageDetectionInputSchema>,\n DeReplicateFromSchema<typeof TextLanguageDetectionOutputSchema>,\n TFMPModelRecord\n> = async (input, model, onProgress, signal) => {\n onProgress(0.1, \"Model loaded\");\n\n const textLanguageDetector = await getTextLanguageDetector(model!, onProgress, signal);\n const result = textLanguageDetector.detect(input.text);\n\n if (!result.languages?.[0]?.languageCode) {\n throw new PermanentJobError(\"Failed to detect language: Empty result\");\n }\n\n const languages = result.languages.map((language) => ({\n language: language.languageCode,\n score: language.probability,\n }));\n\n return {\n languages,\n };\n};\n\n/**\n * Core implementation for unloading a MediaPipe TFJS model.\n * This is shared between inline and worker implementations.\n *\n * When a model is unloaded, this function:\n * 1. Disposes of the model instance\n * 2. Decrements the reference count for the associated WASM fileset\n * 3. If no other models are using the WASM fileset (count reaches 0), unloads the WASM\n */\nexport const TFMP_Unload: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteOutput,\n TFMPModelRecord\n> = async (input, model, onProgress, signal) => {\n const modelPath = model!.providerConfig.modelPath;\n\n // Dispose of the model task if it exists\n if (modelTaskCache.has(modelPath)) {\n const item = modelTaskCache.get(modelPath)!;\n if (\"dispose\" in item && typeof item.dispose === \"function\") {\n item.dispose();\n }\n modelTaskCache.delete(modelPath);\n }\n\n // Decrease reference count for WASM fileset\n const taskEngine = model_to_wasm_mapping.get(modelPath);\n if (taskEngine) {\n const currentCount = wasm_reference_counts.get(taskEngine) || 0;\n const newCount = currentCount - 1;\n\n if (newCount <= 0) {\n // No more models using this WASM fileset, unload it\n wasm_tasks.delete(taskEngine);\n wasm_reference_counts.delete(taskEngine);\n } else {\n wasm_reference_counts.set(taskEngine, newCount);\n }\n\n model_to_wasm_mapping.delete(modelPath);\n }\n\n return {\n model: input.model,\n };\n};\n",
|
|
13
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { ModelSchema } from \"@workglow/ai\";\nimport { DataPortSchemaObject, FromSchema } from \"@workglow/util\";\nimport { TENSORFLOW_MEDIAPIPE, TextPipelineTask } from \"../common/TFMP_Constants\";\n\nexport const TFMPModelSchema = {\n type: \"object\",\n properties: {\n provider: {\n const: TENSORFLOW_MEDIAPIPE,\n description: \"Discriminator: TensorFlow MediaPipe backend.\",\n },\n providerConfig: {\n type: \"object\",\n description: \"TensorFlow MediaPipe-specific options.\",\n properties: {\n modelPath: {\n type: \"string\",\n description: \"Filesystem path or URI for the ONNX model.\",\n },\n taskEngine: {\n type: \"string\",\n enum: [\"text\", \"audio\", \"vision\", \"genai\"],\n description: \"Task engine for the MediaPipe model.\",\n },\n pipeline: {\n type: \"string\",\n enum: Object.values(TextPipelineTask),\n description: \"Pipeline task type for the MediaPipe model.\",\n },\n },\n required: [\"modelPath\", \"taskEngine\", \"pipeline\"],\n additionalProperties: false,\n },\n },\n required: [\"provider\", \"providerConfig\"],\n additionalProperties: true,\n} as const satisfies DataPortSchemaObject;\n\nconst ExtendedModelSchema = {\n type: \"object\",\n properties: {\n ...ModelSchema.properties,\n ...TFMPModelSchema.properties,\n },\n required: [...ModelSchema.required, ...TFMPModelSchema.required],\n additionalProperties: false,\n} as const satisfies DataPortSchemaObject;\n\nexport type TFMPModelRecord = FromSchema<typeof ExtendedModelSchema>;\n",
|
|
14
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { AiJob, AiJobInput, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { globalServiceRegistry, WORKER_MANAGER } from \"@workglow/util\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\n\n/**\n * Registers the TensorFlow MediaPipe client job functions with a web worker.\n * If no client is provided, creates a default in-memory queue and registers it.\n *\n * @param worker - The web worker to use for job execution\n * @param client - Optional existing JobQueueClient. If not provided, creates a default in-memory queue.\n */\nexport async function register_TFMP_ClientJobFns(\n worker: Worker,\n client?: JobQueueClient<AiJobInput<TaskInput>, TaskOutput>\n): Promise<void> {\n const workerManager = globalServiceRegistry.get(WORKER_MANAGER);\n workerManager.registerWorker(TENSORFLOW_MEDIAPIPE, worker);\n\n const aiProviderRegistry = getAiProviderRegistry();\n const names = [\n \"DownloadModelTask\",\n \"UnloadModelTask\",\n \"TextEmbeddingTask\",\n \"TextLanguageDetectionTask\",\n \"TextClassifierTask\",\n ];\n for (const name of names) {\n aiProviderRegistry.registerAsWorkerRunFn(TENSORFLOW_MEDIAPIPE, name);\n }\n\n // If no client provided, create a default in-memory queue\n if (!client) {\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(\n TENSORFLOW_MEDIAPIPE\n );\n await storage.setupDatabase();\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(AiJob, {\n storage,\n queueName: TENSORFLOW_MEDIAPIPE,\n limiter: new ConcurrencyLimiter(1, 100),\n });\n\n client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName: TENSORFLOW_MEDIAPIPE,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n await server.start();\n }\n}\n",
|
|
15
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { AiJob, AiJobInput, getAiProviderRegistry } from \"@workglow/ai\";\nimport { ConcurrencyLimiter, JobQueueClient, JobQueueServer } from \"@workglow/job-queue\";\nimport { InMemoryQueueStorage } from \"@workglow/storage\";\nimport { getTaskQueueRegistry, TaskInput, TaskOutput } from \"@workglow/task-graph\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\nimport {\n TFMP_Download,\n TFMP_TextClassifier,\n TFMP_TextEmbedding,\n TFMP_TextLanguageDetection,\n TFMP_Unload,\n} from \"../common/TFMP_JobRunFns\";\n\n/**\n * Registers the TensorFlow MediaPipe inline job functions for same-thread execution.\n * If no client is provided, creates a default in-memory queue and registers it.\n *\n * @param client - Optional existing JobQueueClient. If not provided, creates a default in-memory queue.\n */\nexport async function register_TFMP_InlineJobFns(\n client?: JobQueueClient<AiJobInput<TaskInput>, TaskOutput>\n): Promise<void> {\n const aiProviderRegistry = getAiProviderRegistry();\n\n aiProviderRegistry.registerRunFn<any, any>(\n TENSORFLOW_MEDIAPIPE,\n \"DownloadModelTask\",\n TFMP_Download as any\n );\n aiProviderRegistry.registerRunFn<any, any>(\n TENSORFLOW_MEDIAPIPE,\n \"UnloadModelTask\",\n TFMP_Unload as any\n );\n aiProviderRegistry.registerRunFn<any, any>(\n TENSORFLOW_MEDIAPIPE,\n \"TextEmbeddingTask\",\n TFMP_TextEmbedding as any\n );\n aiProviderRegistry.registerRunFn<any, any>(\n TENSORFLOW_MEDIAPIPE,\n \"TextLanguageDetectionTask\",\n TFMP_TextLanguageDetection as any\n );\n aiProviderRegistry.registerRunFn<any, any>(\n TENSORFLOW_MEDIAPIPE,\n \"TextClassifierTask\",\n TFMP_TextClassifier as any\n );\n\n // If no client provided, create a default in-memory queue\n if (!client) {\n const storage = new InMemoryQueueStorage<AiJobInput<TaskInput>, TaskOutput>(\n TENSORFLOW_MEDIAPIPE\n );\n await storage.setupDatabase();\n\n const server = new JobQueueServer<AiJobInput<TaskInput>, TaskOutput>(AiJob, {\n storage,\n queueName: TENSORFLOW_MEDIAPIPE,\n limiter: new ConcurrencyLimiter(1, 100),\n });\n\n client = new JobQueueClient<AiJobInput<TaskInput>, TaskOutput>({\n storage,\n queueName: TENSORFLOW_MEDIAPIPE,\n });\n\n client.attach(server);\n\n getTaskQueueRegistry().registerQueue({ server, client, storage });\n await server.start();\n }\n}\n",
|
|
16
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n createServiceToken,\n globalServiceRegistry,\n parentPort,\n WORKER_SERVER,\n} from \"@workglow/util\";\nimport {\n TFMP_Download,\n TFMP_TextClassifier,\n TFMP_TextEmbedding,\n TFMP_TextLanguageDetection,\n TFMP_Unload,\n} from \"../common/TFMP_JobRunFns\";\n\n// Register the worker functions\nexport const TFMP_WORKER_JOBRUN = createServiceToken(\"worker.ai-provider.tfmp\");\n\nexport const TFMP_WORKER_JOBRUN_REGISTER = globalServiceRegistry.register(\n TFMP_WORKER_JOBRUN,\n () => {\n const workerServer = globalServiceRegistry.get(WORKER_SERVER);\n workerServer.registerFunction(\"DownloadModelTask\", TFMP_Download);\n workerServer.registerFunction(\"TextEmbeddingTask\", TFMP_TextEmbedding);\n workerServer.registerFunction(\"TextLanguageDetectionTask\", TFMP_TextLanguageDetection);\n workerServer.registerFunction(\"TextClassifierTask\", TFMP_TextClassifier);\n workerServer.registerFunction(\"UnloadModelTask\", TFMP_Unload);\n parentPort.postMessage({ type: \"ready\" });\n console.log(\"TFMP_WORKER_JOBRUN registered\");\n return workerServer;\n },\n true\n);\n"
|
|
17
17
|
],
|
|
18
|
-
"mappings": ";AAMO,IAAM,uBAAuB;AAa7B,IAAM,uBAAuB;AAAA,EAClC,MAAM;AAAA,EACN,MAAM;AAAA,EACN,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,OAAO;AAAA,EACP,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,OAAO;AACT;AAcO,IAAM,sBAAsB;AAAA,EACjC,aAAa;AAAA,EACb,wBAAwB;AAAA,EACxB,mBAAmB;AAAA,EACnB,wBAAwB;AAAA,EACxB,uBAAuB;AAAA,EACvB,eAAe;AAAA,EACf,aAAa;AAAA,EACb,sBAAsB;AAAA,EACtB,4BAA4B;AAAA,EAC5B,sBAAsB;AACxB;AAWO,IAAM,wBAAwB;AAAA,EACnC,sBAAsB;AAAA,EACtB,sBAAsB;AAAA,EACtB,oBAAoB;AAAA,EACpB,wBAAwB;AAAA,EACxB,kBAAkB;AAAA,EAClB,oBAAoB;AAAA,EACpB,4BAA4B;AAC9B;AAOO,IAAM,uBAAuB;AAAA,EAClC,wBAAwB;AAAA,EACxB,gCAAgC;AAAA,EAChC,kBAAkB;AACpB;AASO,IAAM,4BAA4B;AAAA,EACvC,+BAA+B;AAAA,EAC/B,iBAAiB;AAAA,EACjB,kCAAkC;AAAA,EAClC,kCAAkC;AAAA,EAClC,8BAA8B;AAChC;AAQO,IAAM,kBAAkB;AAAA,KAC1B;AAAA,KACA;AAAA,KACA;AAAA,KACA;AACL;;AC1GA;AAAA;AAAA;AAAA;AAiCA;AAIA,IAAM,YAAY,IAAI;AAKtB,IAAM,cAAc,OAClB,OACA,YACA,UAAkC,CAAC,MAChC;AAAA,EACH,IAAI,UAAU,IAAI,MAAM,QAAQ,GAAG;AAAA,IACjC,OAAO,UAAU,IAAI,MAAM,QAAQ;AAAA,EACrC;AAAA,EAGA,MAAM,mBAAmB,CAAC,WAA2B;AAAA,IACnD,MAAM,WAAW,OAAO,WAAW,aAAa,KAAK,MAAM,OAAO,QAAQ,IAAI;AAAA,IAC9E,IAAI,OAAO,WAAW,YAAY;AAAA,MAChC,WAAW,UAAU,qBAAqB;AAAA,QACxC,MAAM,OAAO;AAAA,QACb,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAAA;AAAA,EAGF,MAAM,kBAA0C;AAAA,IAC9C,OAAO,MAAM,eAAe,SAAS;AAAA,OACjC,MAAM,eAAe,wBACrB,EAAE,0BAA0B,MAAM,eAAe,sBAAsB,IACvE,CAAC;AAAA,OACD,MAAM,eAAe,SAAS,EAAE,QAAQ,MAAM,eAAe,OAAc,IAAI,CAAC;AAAA,OACjF;AAAA,IACH,mBAAmB;AAAA,EACrB;AAAA,EAEA,MAAM,eAAe,MAAM,eAAe;AAAA,EAC1C,MAAM,SAAS,MAAM,SAAS,cAAc,MAAM,eAAe,WAAW,eAAe;AAAA,EAC3F,UAAU,IAAI,MAAM,UAAU,MAAM;AAAA,EACpC,OAAO;AAAA;AAOF,IAAM,eAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAE9C,MAAM,YAAY,OAAQ,YAAY,EAAE,cAAc,OAAO,CAAC;AAAA,EAE9D,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;AAQK,IAAM,oBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,oBAA+C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACzF,cAAc;AAAA,EAChB,CAAC;AAAA,EAGD,MAAM,WAAW,MAAM,kBAAkB,MAAM,MAAM;AAAA,IACnD,SAAS;AAAA,IACT,WAAW,OAAO,eAAe;AAAA,OAC7B,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAGD,IAAI,SAAS,SAAS,OAAO,eAAe,kBAAkB;AAAA,IAC5D,QAAQ,KACN,wEAAwE,SAAS,YAAY,OAAO,eAAe,oBACnH,OACA,QACF;AAAA,IACA,MAAM,IAAI,kBACR,wEAAwE,SAAS,YAAY,OAAO,eAAe,kBACrH;AAAA,EACF;AAAA,EAEA,OAAO,EAAE,QAAQ,SAAS,KAAmB;AAAA;AAOxC,IAAM,qBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACjF,cAAc;AAAA,EAChB,CAAC;AAAA,EAED,MAAM,WAAW,mBAAmB,aAAa,WAAW,YAAY,MAAM;AAAA,EAE9E,IAAI,UAAU,MAAM,aAAa,MAAM,QAAQ;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EACA,IAAI,OAAQ,QAAQ,IAA6B;AAAA,EAEjD,IAAI,MAAM,QAAQ,IAAI,GAAG;AAAA,IACvB,OAAO,KAAK,KAAK,SAAS,IAAI;AAAA,EAChC;AAAA,EACA,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAOK,IAAM,sBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAiC,MAAM,YAAY,OAAQ,YAAY;AAAA,IAC3E,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,UAAU,WAAW,UAAU;AAAA,EAEnE,MAAM,SAAS,MAAM,UAAU,MAAM,MAAM;AAAA,IACzC,UAAU,MAAM;AAAA,IAChB,UAAU,MAAM;AAAA,IAChB;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,iBAAoC;AAAA,EACxC,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,iBAAiB,OAAO,IAAI,CAAC,MAAO,GAAyB,oBAAoB,EAAE;AAAA,EACrF,EAAO;AAAA,IACL,iBAAkB,QAA8B,oBAAoB;AAAA;AAAA,EAGtE,OAAO;AAAA,IACL,MAAM;AAAA,IACN,aAAa,MAAM;AAAA,EACrB;AAAA;AAOK,IAAM,mBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACjF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,aAAa,WAAW,UAAU;AAAA,EAGtE,MAAM,gBAAgB,MAAM,SAAS,MAAM,SAAS;AAAA,IAAO,MAAM,MAAM;AAAA,EAEvE,IAAI,UAAU,MAAM,aAAa,cAAc;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EAEA,IAAI,OAAQ,QAAQ,IAA6B;AAAA,EACjD,IAAI,MAAM,QAAQ,IAAI,GAAG;AAAA,IACvB,OAAO,KAAK,KAAK,SAAS,IAAI;AAAA,EAChC;AAAA,EAEA,IAAI,SAAS,cAAc;AAAA,IACzB,MAAM,IAAI,kBAAkB,sCAAsC;AAAA,EACpE;AAAA,EAEA,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAOK,IAAM,kBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,kBAAyC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACnF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,gBAAgB,WAAW,UAAU;AAAA,EAEzE,IAAI,SAAS,MAAM,gBAAgB,MAAM,MAAM;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,cAAc;AAAA,EAClB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,cAAe,OAAO,IAA4B,gBAAgB;AAAA,EACpE,EAAO;AAAA,IACL,cAAe,QAAgC,gBAAgB;AAAA;AAAA,EAGjE,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAOK,IAAM,yBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAE9C,MAAM,iBAA4C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACtF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,eAAe,WAAW,UAAU;AAAA,EAExE,MAAM,SAAS,MAAM,eAAe,MAAM,UAAU,MAAM,SAAS;AAAA,IACjE;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,aAAa;AAAA,EACjB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,aAAc,OAAO,IAAwC,UAAU;AAAA,EACzE,EAAO;AAAA,IACL,aAAc,QAA4C,UAAU;AAAA;AAAA,EAGtE,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAUF,SAAS,kBAAkB,CACzB,WACA,gBACA,QACA;AAAA,EACA,IAAI,QAAQ;AAAA,EACZ,OAAO,IAAI,aAAa,WAAW;AAAA,IACjC,aAAa;AAAA,IACb,eAAe,EAAE,qBAAqB,KAAK;AAAA,IAC3C,mBAAmB,CAAC,SAAiB;AAAA,MACnC;AAAA,MACA,MAAM,SAAS,OAAO,IAAI,KAAK,IAAI,QAAQ,KAAK;AAAA,MAChD,MAAM,WAAW,KAAK,MAAM,KAAK,IAAI,QAAQ,GAAG,CAAC;AAAA,MACjD,eAAe,UAAU,cAAc,EAAE,MAAM,SAAS,CAAC;AAAA;AAAA,OAEvD,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA;;ACpUH;AAIO,IAAM,gCAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,YAAY;AAAA,IACV,UAAU;AAAA,MACR,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,gBAAgB;AAAA,MACd,MAAM;AAAA,MACN,aAAa;AAAA,MACb,YAAY;AAAA,QACV,UAAU;AAAA,UACR,MAAM;AAAA,UACN,MAAM,OAAO,OAAO,eAAe;AAAA,UACnC,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,WAAW;AAAA,UACT,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,OAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM,OAAO,OAAO,oBAAoB;AAAA,UACxC,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,QAAQ;AAAA,UACN,MAAM;AAAA,UACN,MAAM,CAAC,OAAO,OAAO,UAAU,QAAQ,OAAO;AAAA,UAC9C,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,oBAAoB;AAAA,UAClB,MAAM;AAAA,UACN,OAAO,EAAE,MAAM,SAAS;AAAA,UACxB,aAAa;AAAA,QACf;AAAA,QACA,mBAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,mBAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,uBAAuB;AAAA,UACrB,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,kBAAkB;AAAA,UAChB,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,WAAW;AAAA,UACT,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,eAAe;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,MACF;AAAA,MACA,UAAU,CAAC,aAAa,UAAU;AAAA,MAClC,sBAAsB;AAAA,IACxB;AAAA,EACF;AAAA,EACA,UAAU,CAAC,YAAY,gBAAgB;AAAA,EACvC,sBAAsB;AACxB;AAEA,IAAM,sBAAsB;AAAA,EAC1B,MAAM;AAAA,EACN,YAAY;AAAA,OACP,YAAY;AAAA,OACZ,8BAA8B;AAAA,EACnC;AAAA,EACA,UAAU,CAAC,GAAG,YAAY,UAAU,GAAG,8BAA8B,QAAQ;AAAA,EAC7E,sBAAsB;AACxB;;ACnFA;AACA;AACA;AACA;AACA;AAUA,eAAsB,yBAAyB,CAC7C,QACA,QACe;AAAA,EACf,MAAM,gBAAgB,sBAAsB,IAAI,cAAc;AAAA,EAE9D,cAAc,eAAe,sBAAsB,MAAM;AAAA,EAEzD,MAAM,mBAAmB,sBAAsB;AAAA,EAC/C,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA,WAAW,QAAQ,OAAO;AAAA,IACxB,iBAAiB,sBAAsB,sBAAsB,IAAI;AAAA,EACnE;AAAA,EAEA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,qBAClB,oBACF;AAAA,IAEA,MAAM,SAAS,IAAI,eAAkD,OAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,mBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,eAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,qBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,EAElE;AAAA;;ACxDF;AACA,kBAAS,iCAAoC;AAC7C,+BAAS,uCAAoB,mCAAgB;AAC7C,iCAAS;AACT,iCAAS;AAkBT,eAAsB,yBAAyB,CAC7C,QACe;AAAA,EAEf,IAAI,SAAS,KAAK,KAAK,QAAQ;AAAA,EAC/B,MAAM,mBAAmB,uBAAsB;AAAA,EAC/C,MAAM,MAAsD;AAAA,KACzD,sBAAsB;AAAA,KACtB,sBAAsB;AAAA,KACtB,uBAAuB;AAAA,KACvB,2BAA2B;AAAA,KAC3B,qBAAqB;AAAA,KACrB,oBAAoB;AAAA,KACpB,wBAAwB;AAAA,EAC3B;AAAA,EACA,YAAY,SAAS,OAAO,OAAO,QAAQ,GAAG,GAAG;AAAA,IAC/C,iBAAiB,cAAwB,sBAAsB,SAAS,EAAE;AAAA,EAC5E;AAAA,EAGA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,sBAClB,oBACF;AAAA,IACA,MAAM,QAAQ,cAAc;AAAA,IAE5B,MAAM,SAAS,IAAI,gBAAkD,QAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,oBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,gBAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,sBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,IAChE,MAAM,OAAO,MAAM;AAAA,EACrB;AAAA;;AC/DF;AAAA;AAAA,2BAEE;AAAA;AAAA;AAAA;AAcK,IAAM,oBAAoB,mBAAmB,wBAAwB;AAErE,IAAM,6BAA6B,uBAAsB,SAC9D,mBACA,MAAM;AAAA,EACJ,MAAM,eAAe,uBAAsB,IAAI,aAAa;AAAA,EAC5D,aAAa,iBAAiB,qBAAqB,YAAY;AAAA,EAC/D,aAAa,iBAAiB,qBAAqB,iBAAiB;AAAA,EACpE,aAAa,iBAAiB,sBAAsB,kBAAkB;AAAA,EACtE,aAAa,iBAAiB,uBAAuB,mBAAmB;AAAA,EACxE,aAAa,iBAAiB,oBAAoB,gBAAgB;AAAA,EAClE,aAAa,iBAAiB,mBAAmB,eAAe;AAAA,EAChE,aAAa,iBAAiB,0BAA0B,sBAAsB;AAAA,EAC9E,WAAW,YAAY,EAAE,MAAM,QAAQ,CAAC;AAAA,EACxC,QAAQ,IAAI,8BAA8B;AAAA,EAC1C,OAAO;AAAA,GAET,IACF;;AClCO,IAAM,uBAAuB;;ACApC;AASA,8BAAS;AAOF,IAAM,gBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAY,MAAM,gBAAgB,aACtC,gEACF;AAAA,EAGA,MAAM,WAAW,MAAM,aAAa,kBAAkB,WAAW;AAAA,IAC/D,aAAa;AAAA,MACX,gBAAgB,MAAO,eAAe;AAAA,IACxC;AAAA,EACF,CAAC;AAAA,EAED,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;AAOK,IAAM,qBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAY,MAAM,gBAAgB,aACtC,gEACF;AAAA,EAEA,WAAW,KAAK,cAAc;AAAA,EAE9B,MAAM,WAAW,MAAM,aAAa,kBAAkB,WAAW;AAAA,IAC/D,aAAa;AAAA,MACX,gBAAgB,MAAO,eAAe;AAAA,IACxC;AAAA,EACF,CAAC;AAAA,EAED,IAAI,OAAO,SAAS;AAAA,IAClB,MAAM,IAAI,mBAAkB,aAAa;AAAA,EAC3C;AAAA,EAEA,WAAW,KAAK,gBAAgB;AAAA,EAEhC,MAAM,SAAS,SAAS,MAAM,MAAM,IAAI;AAAA,EAExC,IAAI,CAAC,OAAO,aAAa,IAAI,gBAAgB;AAAA,IAC3C,MAAM,IAAI,mBAAkB,4CAA4C;AAAA,EAC1E;AAAA,EAEA,MAAM,YAAY,aAAa,KAAK,OAAO,WAAW,GAAG,cAAc;AAAA,EAEvE,OAAO;AAAA,IACL,QAAQ;AAAA,EACV;AAAA;;AC1EF,wBAAS;AAIF,IAAM,kBAAkB;AAAA,EAC7B,MAAM;AAAA,EACN,YAAY;AAAA,IACV,UAAU;AAAA,MACR,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,gBAAgB;AAAA,MACd,MAAM;AAAA,MACN,aAAa;AAAA,MACb,YAAY;AAAA,QACV,WAAW;AAAA,UACT,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,MACF;AAAA,MACA,UAAU,CAAC,WAAW;AAAA,MACtB,sBAAsB;AAAA,IACxB;AAAA,EACF;AAAA,EACA,UAAU,CAAC,YAAY,gBAAgB;AAAA,EACvC,sBAAsB;AACxB;AAEA,IAAM,uBAAsB;AAAA,EAC1B,MAAM;AAAA,EACN,YAAY;AAAA,OACP,aAAY;AAAA,OACZ,gBAAgB;AAAA,EACrB;AAAA,EACA,UAAU,CAAC,GAAG,aAAY,UAAU,GAAG,gBAAgB,QAAQ;AAAA,EAC/D,sBAAsB;AACxB;;ACpCA,kBAAS,iCAAmB;AAC5B,+BAAS,uCAAoB,mCAAgB;AAC7C,iCAAS;AACT,iCAAS;AACT,kCAAS,0CAAuB;AAUhC,eAAsB,0BAA0B,CAC9C,QACA,QACe;AAAA,EACf,MAAM,gBAAgB,uBAAsB,IAAI,eAAc;AAAA,EAC9D,cAAc,eAAe,sBAAsB,MAAM;AAAA,EAEzD,MAAM,qBAAqB,uBAAsB;AAAA,EACjD,MAAM,QAAQ,CAAC,qBAAqB,mBAAmB;AAAA,EACvD,WAAW,QAAQ,OAAO;AAAA,IACxB,mBAAmB,sBAAsB,sBAAsB,IAAI;AAAA,EACrE;AAAA,EAGA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,sBAClB,oBACF;AAAA,IACA,MAAM,QAAQ,cAAc;AAAA,IAE5B,MAAM,SAAS,IAAI,gBAAkD,QAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,oBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,gBAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,sBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,IAChE,MAAM,OAAO,MAAM;AAAA,EACrB;AAAA;;ACjDF,kBAAS,iCAAmB;AAC5B,+BAAS,uCAAoB,mCAAgB;AAC7C,iCAAS;AACT,iCAAS;AAUT,eAAsB,0BAA0B,CAC9C,QACe;AAAA,EACf,MAAM,qBAAqB,uBAAsB;AAAA,EAEjD,mBAAmB,cACjB,sBACA,qBACA,aACF;AAAA,EACA,mBAAmB,cACjB,sBACA,qBACA,kBACF;AAAA,EAGA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,sBAClB,oBACF;AAAA,IACA,MAAM,QAAQ,cAAc;AAAA,IAE5B,MAAM,SAAS,IAAI,gBAAkD,QAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,oBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,gBAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,sBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,IAChE,MAAM,OAAO,MAAM;AAAA,EACrB;AAAA;;ACnDF;AAAA,wBACE;AAAA,2BACA;AAAA,gBACA;AAAA,mBACA;AAAA;AAKK,IAAM,qBAAqB,oBAAmB,yBAAyB;AAEvE,IAAM,8BAA8B,uBAAsB,SAC/D,oBACA,MAAM;AAAA,EACJ,MAAM,eAAe,uBAAsB,IAAI,cAAa;AAAA,EAC5D,aAAa,iBAAiB,qBAAqB,aAAa;AAAA,EAChE,aAAa,iBAAiB,qBAAqB,kBAAkB;AAAA,EACrE,YAAW,YAAY,EAAE,MAAM,QAAQ,CAAC;AAAA,EACxC,QAAQ,IAAI,+BAA+B;AAAA,EAC3C,OAAO;AAAA,GAET,IACF;",
|
|
19
|
-
"debugId": "
|
|
18
|
+
"mappings": ";AAMO,IAAM,uBAAuB;AAC7B,IAAM,iBAAiB;AAavB,IAAM,uBAAuB;AAAA,EAClC,MAAM;AAAA,EACN,MAAM;AAAA,EACN,MAAM;AAAA,EACN,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,OAAO;AAAA,EACP,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,OAAO;AACT;AAcO,IAAM,sBAAsB;AAAA,EACjC,aAAa;AAAA,EACb,wBAAwB;AAAA,EACxB,mBAAmB;AAAA,EACnB,wBAAwB;AAAA,EACxB,uBAAuB;AAAA,EACvB,eAAe;AAAA,EACf,aAAa;AAAA,EACb,sBAAsB;AAAA,EACtB,4BAA4B;AAAA,EAC5B,sBAAsB;AACxB;AAWO,IAAM,wBAAwB;AAAA,EACnC,sBAAsB;AAAA,EACtB,sBAAsB;AAAA,EACtB,oBAAoB;AAAA,EACpB,wBAAwB;AAAA,EACxB,kBAAkB;AAAA,EAClB,oBAAoB;AAAA,EACpB,4BAA4B;AAC9B;AAOO,IAAM,uBAAuB;AAAA,EAClC,wBAAwB;AAAA,EACxB,gCAAgC;AAAA,EAChC,kBAAkB;AACpB;AASO,IAAM,4BAA4B;AAAA,EACvC,+BAA+B;AAAA,EAC/B,iBAAiB;AAAA,EACjB,kCAAkC;AAAA,EAClC,kCAAkC;AAAA,EAClC,8BAA8B;AAChC;AAQO,IAAM,kBAAkB;AAAA,KAC1B;AAAA,KACA;AAAA,KACA;AAAA,KACA;AACL;;AC3GA;AAAA;AAAA;AAAA;AAuCA;AAKA,IAAM,YAAY,IAAI;AAKtB,IAAM,cAAc,OAClB,OACA,YACA,UAAkC,CAAC,MAChC;AAAA,EACH,IAAI,UAAU,IAAI,MAAM,QAAQ,GAAG;AAAA,IACjC,OAAO,UAAU,IAAI,MAAM,QAAQ;AAAA,EACrC;AAAA,EAGA,MAAM,mBAAmB,CAAC,WAA2B;AAAA,IACnD,MAAM,WAAW,OAAO,WAAW,aAAa,KAAK,MAAM,OAAO,QAAQ,IAAI;AAAA,IAC9E,IAAI,OAAO,WAAW,YAAY;AAAA,MAChC,WAAW,UAAU,qBAAqB;AAAA,QACxC,MAAM,OAAO;AAAA,QACb,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAAA;AAAA,EAGF,MAAM,kBAA0C;AAAA,IAC9C,OAAO,MAAM,eAAe,SAAS;AAAA,OACjC,MAAM,eAAe,wBACrB,EAAE,0BAA0B,MAAM,eAAe,sBAAsB,IACvE,CAAC;AAAA,OACD,MAAM,eAAe,SAAS,EAAE,QAAQ,MAAM,eAAe,OAAc,IAAI,CAAC;AAAA,OACjF;AAAA,IACH,mBAAmB;AAAA,EACrB;AAAA,EAEA,MAAM,eAAe,MAAM,eAAe;AAAA,EAC1C,MAAM,SAAS,MAAM,SAAS,cAAc,MAAM,eAAe,WAAW,eAAe;AAAA,EAC3F,UAAU,IAAI,MAAM,UAAU,MAAM;AAAA,EACpC,OAAO;AAAA;AAOF,IAAM,eAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAE9C,MAAM,YAAY,OAAQ,YAAY,EAAE,cAAc,OAAO,CAAC;AAAA,EAE9D,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;AAOK,IAAM,aAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAE9C,IAAI,UAAU,IAAI,MAAO,QAAQ,GAAG;AAAA,IAClC,UAAU,OAAO,MAAO,QAAQ;AAAA,IAChC,WAAW,IAAI,8BAA8B;AAAA,EAC/C;AAAA,EAGA,MAAM,YAAY,MAAO,eAAe;AAAA,EACxC,MAAM,iBAAiB,SAAS;AAAA,EAChC,WAAW,KAAK,qBAAqB;AAAA,EAErC,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;AAOF,IAAM,mBAAmB,OAAO,cAAqC;AAAA,EACnE,MAAM,QAAQ,MAAM,OAAO,KAAK,cAAc;AAAA,EAC9C,MAAM,OAAO,MAAM,MAAM,KAAK;AAAA,EAC9B,MAAM,SAAS,IAAI;AAAA,EAGnB,MAAM,mBAA8B,CAAC;AAAA,EACrC,WAAW,WAAW,MAAM;AAAA,IAC1B,MAAM,MAAM,IAAI,IAAI,QAAQ,GAAG;AAAA,IAC/B,IAAI,IAAI,SAAS,WAAW,MAAM,GAAG;AAAA,MACnC,iBAAiB,KAAK,OAAO;AAAA,IAC/B;AAAA,EACF;AAAA,EAGA,IAAI,eAAe;AAAA,EACnB,WAAW,WAAW,kBAAkB;AAAA,IACtC,IAAI;AAAA,MACF,MAAM,UAAU,MAAM,MAAM,OAAO,OAAO;AAAA,MAC1C,IAAI,SAAS;AAAA,QACX;AAAA,MACF,EAAO;AAAA,QAEL,MAAM,eAAe,MAAM,MAAM,OAAO,QAAQ,GAAG;AAAA,QACnD,IAAI,cAAc;AAAA,UAChB;AAAA,QACF;AAAA;AAAA,MAEF,OAAO,OAAO;AAAA,MACd,QAAQ,MAAM,iCAAiC,QAAQ,OAAO,KAAK;AAAA;AAAA,EAEvE;AAAA;AAQK,IAAM,oBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,oBAA+C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACzF,cAAc;AAAA,EAChB,CAAC;AAAA,EAGD,MAAM,WAAW,MAAM,kBAAkB,MAAM,MAAM;AAAA,IACnD,SAAS;AAAA,IACT,WAAW,OAAO,eAAe;AAAA,OAC7B,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAGD,IAAI,SAAS,SAAS,OAAO,eAAe,kBAAkB;AAAA,IAC5D,QAAQ,KACN,wEAAwE,SAAS,YAAY,OAAO,eAAe,oBACnH,OACA,QACF;AAAA,IACA,MAAM,IAAI,kBACR,wEAAwE,SAAS,YAAY,OAAO,eAAe,kBACrH;AAAA,EACF;AAAA,EAEA,OAAO,EAAE,QAAQ,SAAS,KAAmB;AAAA;AAGxC,IAAM,qBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,iBAA6C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACvF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,SAAS,MAAM,eAAe,MAAM,MAAM;AAAA,IAC9C,OAAO,MAAM,iBAAiB;AAAA,OAC1B,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,MAAM,QAAQ,OAAO,EAAE,GAAG;AAAA,IAC5B,OAAO;AAAA,MACL,YAAY,OAAO,GAAG,IAAI,CAAC,cAAc;AAAA,QACvC,OAAO,SAAS;AAAA,QAChB,OAAO,SAAS;AAAA,MAClB,EAAE;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,OAAO;AAAA,IACL,YAAa,OAAoC,IAAI,CAAC,cAAc;AAAA,MAClE,OAAO,SAAS;AAAA,MAChB,OAAO,SAAS;AAAA,IAClB,EAAE;AAAA,EACJ;AAAA;AAGK,IAAM,4BAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,iBAA6C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACvF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,SAAS,MAAM,eAAe,MAAM,MAAM;AAAA,IAC9C,OAAO,MAAM,gBAAgB;AAAA,OACzB,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,MAAM,QAAQ,OAAO,EAAE,GAAG;AAAA,IAC5B,OAAO;AAAA,MACL,WAAW,OAAO,GAAG,IAAI,CAAC,cAAc;AAAA,QACtC,UAAU,SAAS;AAAA,QACnB,OAAO,SAAS;AAAA,MAClB,EAAE;AAAA,IACJ;AAAA,EACF;AAAA,EAEA,OAAO;AAAA,IACL,WAAY,OAAoC,IAAI,CAAC,cAAc;AAAA,MACjE,UAAU,SAAS;AAAA,MACnB,OAAO,SAAS;AAAA,IAClB,EAAE;AAAA,EACJ;AAAA;AAOK,IAAM,qBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACjF,cAAc;AAAA,EAChB,CAAC;AAAA,EAED,MAAM,WAAW,mBAAmB,aAAa,WAAW,YAAY,MAAM;AAAA,EAE9E,IAAI,UAAU,MAAM,aAAa,MAAM,QAAQ;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EACA,IAAI,OAAQ,QAAQ,IAA6B;AAAA,EAEjD,IAAI,MAAM,QAAQ,IAAI,GAAG;AAAA,IACvB,OAAO,KAAK,KAAK,SAAS,IAAI;AAAA,EAChC;AAAA,EACA,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAOK,IAAM,sBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAiC,MAAM,YAAY,OAAQ,YAAY;AAAA,IAC3E,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,UAAU,WAAW,UAAU;AAAA,EAEnE,MAAM,SAAS,MAAM,UAAU,MAAM,MAAM;AAAA,IACzC,UAAU,MAAM;AAAA,IAChB,UAAU,MAAM;AAAA,IAChB;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,iBAAoC;AAAA,EACxC,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,iBAAiB,OAAO,IAAI,CAAC,MAAO,GAAyB,oBAAoB,EAAE;AAAA,EACrF,EAAO;AAAA,IACL,iBAAkB,QAA8B,oBAAoB;AAAA;AAAA,EAGtE,OAAO;AAAA,IACL,MAAM;AAAA,IACN,aAAa,MAAM;AAAA,EACrB;AAAA;AAOK,IAAM,mBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACjF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,aAAa,WAAW,UAAU;AAAA,EAGtE,MAAM,gBAAgB,MAAM,SAAS,MAAM,SAAS;AAAA,IAAO,MAAM,MAAM;AAAA,EAEvE,IAAI,UAAU,MAAM,aAAa,cAAc;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EAEA,IAAI,OAAQ,QAAQ,IAA6B;AAAA,EACjD,IAAI,MAAM,QAAQ,IAAI,GAAG;AAAA,IACvB,OAAO,KAAK,KAAK,SAAS,IAAI;AAAA,EAChC;AAAA,EAEA,IAAI,SAAS,cAAc;AAAA,IACzB,MAAM,IAAI,kBAAkB,sCAAsC;AAAA,EACpE;AAAA,EAEA,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAOK,IAAM,kBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,kBAAyC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACnF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,gBAAgB,WAAW,UAAU;AAAA,EAEzE,IAAI,SAAS,MAAM,gBAAgB,MAAM,MAAM;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,cAAc;AAAA,EAClB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,cAAe,OAAO,IAA4B,gBAAgB;AAAA,EACpE,EAAO;AAAA,IACL,cAAe,QAAgC,gBAAgB;AAAA;AAAA,EAGjE,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAOK,IAAM,yBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAE9C,MAAM,iBAA4C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACtF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,eAAe,WAAW,UAAU;AAAA,EAExE,MAAM,SAAS,MAAM,eAAe,MAAM,UAAU,MAAM,SAAS;AAAA,IACjE;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,aAAa;AAAA,EACjB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,aAAc,OAAO,IAAwC,UAAU;AAAA,EACzE,EAAO;AAAA,IACL,aAAc,QAA4C,UAAU;AAAA;AAAA,EAGtE,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAUF,SAAS,kBAAkB,CACzB,WACA,gBACA,QACA;AAAA,EACA,IAAI,QAAQ;AAAA,EACZ,OAAO,IAAI,aAAa,WAAW;AAAA,IACjC,aAAa;AAAA,IACb,eAAe,EAAE,qBAAqB,KAAK;AAAA,IAC3C,mBAAmB,CAAC,SAAiB;AAAA,MACnC;AAAA,MACA,MAAM,SAAS,OAAO,IAAI,KAAK,IAAI,QAAQ,KAAK;AAAA,MAChD,MAAM,WAAW,KAAK,MAAM,KAAK,IAAI,QAAQ,GAAG,CAAC;AAAA,MACjD,eAAe,UAAU,cAAc,EAAE,MAAM,SAAS,CAAC;AAAA;AAAA,OAEvD,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA;;ACtcH;AAIO,IAAM,gCAAgC;AAAA,EAC3C,MAAM;AAAA,EACN,YAAY;AAAA,IACV,UAAU;AAAA,MACR,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,gBAAgB;AAAA,MACd,MAAM;AAAA,MACN,aAAa;AAAA,MACb,YAAY;AAAA,QACV,UAAU;AAAA,UACR,MAAM;AAAA,UACN,MAAM,OAAO,OAAO,eAAe;AAAA,UACnC,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,WAAW;AAAA,UACT,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,OAAO;AAAA,UACL,MAAM;AAAA,UACN,MAAM,OAAO,OAAO,oBAAoB;AAAA,UACxC,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,QAAQ;AAAA,UACN,MAAM;AAAA,UACN,MAAM,CAAC,OAAO,OAAO,UAAU,QAAQ,OAAO;AAAA,UAC9C,aAAa;AAAA,UACb,SAAS;AAAA,QACX;AAAA,QACA,oBAAoB;AAAA,UAClB,MAAM;AAAA,UACN,OAAO,EAAE,MAAM,SAAS;AAAA,UACxB,aAAa;AAAA,QACf;AAAA,QACA,mBAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,mBAAmB;AAAA,UACjB,MAAM;AAAA,UACN,SAAS;AAAA,QACX;AAAA,QACA,uBAAuB;AAAA,UACrB,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,kBAAkB;AAAA,UAChB,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,WAAW;AAAA,UACT,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,eAAe;AAAA,UACb,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,MACF;AAAA,MACA,UAAU,CAAC,aAAa,UAAU;AAAA,MAClC,sBAAsB;AAAA,IACxB;AAAA,EACF;AAAA,EACA,UAAU,CAAC,YAAY,gBAAgB;AAAA,EACvC,sBAAsB;AACxB;AAEA,IAAM,sBAAsB;AAAA,EAC1B,MAAM;AAAA,EACN,YAAY;AAAA,OACP,YAAY;AAAA,OACZ,8BAA8B;AAAA,EACnC;AAAA,EACA,UAAU,CAAC,GAAG,YAAY,UAAU,GAAG,8BAA8B,QAAQ;AAAA,EAC7E,sBAAsB;AACxB;;ACnFA;AACA;AACA;AACA;AACA;AAUA,eAAsB,yBAAyB,CAC7C,QACA,QACe;AAAA,EACf,MAAM,gBAAgB,sBAAsB,IAAI,cAAc;AAAA,EAE9D,cAAc,eAAe,sBAAsB,MAAM;AAAA,EAEzD,MAAM,mBAAmB,sBAAsB;AAAA,EAC/C,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA,WAAW,QAAQ,OAAO;AAAA,IACxB,iBAAiB,sBAAsB,sBAAsB,IAAI;AAAA,EACnE;AAAA,EAEA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,qBAClB,oBACF;AAAA,IAEA,MAAM,SAAS,IAAI,eAAkD,OAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,mBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,eAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,qBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,EAElE;AAAA;;AC3DF;AACA,kBAAS,iCAAoC;AAC7C,+BAAS,uCAAoB,mCAAgB;AAC7C,iCAAS;AACT,iCAAS;AAqBT,eAAsB,yBAAyB,CAC7C,QACe;AAAA,EAEf,IAAI,SAAS,KAAK,KAAK,QAAQ;AAAA,EAC/B,MAAM,mBAAmB,uBAAsB;AAAA,EAC/C,MAAM,MAAsD;AAAA,KACzD,sBAAsB;AAAA,KACtB,oBAAoB;AAAA,KACpB,sBAAsB;AAAA,KACtB,uBAAuB;AAAA,KACvB,2BAA2B;AAAA,KAC3B,8BAA8B;AAAA,KAC9B,uBAAuB;AAAA,KACvB,qBAAqB;AAAA,KACrB,oBAAoB;AAAA,KACpB,wBAAwB;AAAA,EAC3B;AAAA,EACA,YAAY,SAAS,OAAO,OAAO,QAAQ,GAAG,GAAG;AAAA,IAC/C,iBAAiB,cAAwB,sBAAsB,SAAS,EAAE;AAAA,EAC5E;AAAA,EAGA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,sBAClB,oBACF;AAAA,IACA,MAAM,QAAQ,cAAc;AAAA,IAE5B,MAAM,SAAS,IAAI,gBAAkD,QAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,oBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,gBAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,sBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,IAChE,MAAM,OAAO,MAAM;AAAA,EACrB;AAAA;;ACrEF;AAAA;AAAA,2BAEE;AAAA;AAAA;AAAA;AAgBK,IAAM,oBAAoB,mBAAmB,wBAAwB;AAErE,IAAM,6BAA6B,uBAAsB,SAC9D,mBACA,MAAM;AAAA,EACJ,MAAM,eAAe,uBAAsB,IAAI,aAAa;AAAA,EAC5D,aAAa,iBAAiB,qBAAqB,YAAY;AAAA,EAC/D,aAAa,iBAAiB,qBAAqB,iBAAiB;AAAA,EACpE,aAAa,iBAAiB,sBAAsB,kBAAkB;AAAA,EACtE,aAAa,iBAAiB,6BAA6B,yBAAyB;AAAA,EACpF,aAAa,iBAAiB,sBAAsB,kBAAkB;AAAA,EACtE,aAAa,iBAAiB,uBAAuB,mBAAmB;AAAA,EACxE,aAAa,iBAAiB,oBAAoB,gBAAgB;AAAA,EAClE,aAAa,iBAAiB,mBAAmB,eAAe;AAAA,EAChE,aAAa,iBAAiB,0BAA0B,sBAAsB;AAAA,EAC9E,WAAW,YAAY,EAAE,MAAM,QAAQ,CAAC;AAAA,EACxC,QAAQ,IAAI,8BAA8B;AAAA,EAC1C,OAAO;AAAA,GAET,IACF;;ACtCO,IAAM,uBAAuB;AAsB7B,IAAM,mBAAmB;AAAA,EAC9B,iBAAiB;AAAA,EACjB,mBAAmB;AAAA,EACnB,0BAA0B;AAAA,EAC1B,cAAc;AAAA,EACd,oBAAoB;AAAA,EACpB,kBAAkB;AAAA,EAClB,wBAAwB;AAAA,EACxB,0BAA0B;AAAA,EAC1B,wBAAwB;AAAA,EACxB,6BAA6B;AAAA,EAC7B,0BAA0B;AAAA,EAC1B,8BAA8B;AAAA,EAC9B,2BAA2B;AAAA,EAC3B,yBAAyB;AAAA,EACzB,0BAA0B;AAAA,EAC1B,sCAAsC;AAAA,EACtC,0BAA0B;AAAA,EAC1B,0BAA0B;AAC5B;;ACzCA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkBA,8BAAS;AAkBT,IAAM,aAAa,IAAI;AAMvB,IAAM,wBAAwB,IAAI;AAMlC,IAAM,wBAAwB,IAAI;AAKlC,IAAM,cAAc,OAClB,OACA,YACA,WAC6B;AAAA,EAC7B,MAAM,aAAa,MAAM,eAAe;AAAA,EAExC,IAAI,WAAW,IAAI,UAAU,GAAG;AAAA,IAC9B,OAAO,WAAW,IAAI,UAAU;AAAA,EAClC;AAAA,EAEA,IAAI,OAAO,SAAS;AAAA,IAClB,MAAM,IAAI,mBAAkB,aAAa;AAAA,EAC3C;AAAA,EAEA,WAAW,KAAK,mBAAmB;AAAA,EAEnC,IAAI;AAAA,EAEJ,QAAQ;AAAA,SACD;AAAA,MACH,cAAc,MAAM,gBAAgB,aAClC,gEACF;AAAA,MACA;AAAA,SACG;AAAA,MACH,cAAc,MAAM,gBAAgB,cAClC,iEACF;AAAA,MACA;AAAA,SACG;AAAA,MACH,cAAc,MAAM,gBAAgB,eAClC,kEACF;AAAA,MACA;AAAA,SACG;AAAA,MACH,cAAc,MAAM,gBAAgB,cAClC,iEACF;AAAA,MACA;AAAA;AAAA,MAEA,MAAM,IAAI,mBAAkB,qBAAqB;AAAA;AAAA,EAGrD,WAAW,IAAI,YAAY,WAAW;AAAA,EACtC,OAAO;AAAA;AAGT,IAAM,iBAAiB,IAAI;AAU3B,IAAM,eAAe,OAGnB,OACA,YACA,QACA,aACkC;AAAA,EAClC,MAAM,YAAY,MAAM,eAAe;AAAA,EACvC,MAAM,aAAa,MAAM,eAAe;AAAA,EAExC,IAAI,eAAe,IAAI,SAAS,GAAG;AAAA,IACjC,OAAO,eAAe,IAAI,SAAS;AAAA,EACrC;AAAA,EAGA,MAAM,cAAc,MAAM,YAAY,OAAO,YAAY,MAAM;AAAA,EAG/D,MAAM,OAAO,MAAM,SAAS,kBAAkB,aAAa;AAAA,IACzD,aAAa;AAAA,MACX,gBAAgB;AAAA,IAClB;AAAA,EACF,CAAC;AAAA,EAGD,eAAe,IAAI,WAAW,IAAI;AAAA,EAGlC,sBAAsB,IAAI,WAAW,UAAU;AAAA,EAC/C,sBAAsB,IAAI,aAAa,sBAAsB,IAAI,UAAU,KAAK,KAAK,CAAC;AAAA,EAEtF,OAAO;AAAA;AAGT,IAAM,kBAAkB,OACtB,OACA,YACA,WAC0B;AAAA,EAC1B,OAAO,aAAa,OAAO,YAAY,QAAQ,YAAY;AAAA;AAG7D,IAAM,oBAAoB,OACxB,OACA,YACA,WAC4B;AAAA,EAC5B,OAAO,aAAa,OAAO,YAAY,QAAQ,cAAc;AAAA;AAG/D,IAAM,0BAA0B,OAC9B,OACA,YACA,WAC8B;AAAA,EAC9B,OAAO,aAAa,OAAO,YAAY,QAAQ,gBAAgB;AAAA;AAO1D,IAAM,gBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,QAAQ,OAAO,eAAe;AAAA,SACvB;AAAA,MACH,MAAM,gBAAgB,OAAO,YAAY,MAAM;AAAA,MAC/C;AAAA,SACG;AAAA,MACH,MAAM,kBAAkB,OAAO,YAAY,MAAM;AAAA,MACjD;AAAA,SACG;AAAA,MACH,MAAM,wBAAwB,OAAO,YAAY,MAAM;AAAA,MACvD;AAAA;AAAA,MAEA,MAAM,IAAI,mBAAkB,kBAAkB;AAAA;AAAA,EAElD,WAAW,KAAK,iBAAiB;AAAA,EAEjC,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;AAOK,IAAM,qBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,WAAW,KAAK,cAAc;AAAA,EAE9B,MAAM,eAAe,MAAM,gBAAgB,OAAQ,YAAY,MAAM;AAAA,EACrE,MAAM,SAAS,aAAa,MAAM,MAAM,IAAI;AAAA,EAE5C,IAAI,CAAC,OAAO,aAAa,IAAI,gBAAgB;AAAA,IAC3C,MAAM,IAAI,mBAAkB,4CAA4C;AAAA,EAC1E;AAAA,EAEA,MAAM,YAAY,aAAa,KAAK,OAAO,WAAW,GAAG,cAAc;AAAA,EAEvE,OAAO;AAAA,IACL,QAAQ;AAAA,EACV;AAAA;AAOK,IAAM,sBAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,WAAW,KAAK,cAAc;AAAA,EAE9B,MAAM,iBAAiB,MAAM,kBAAkB,OAAQ,YAAY,MAAM;AAAA,EACzE,MAAM,SAAS,eAAe,SAAS,MAAM,IAAI;AAAA,EAEjD,IAAI,CAAC,OAAO,kBAAkB,IAAI,YAAY;AAAA,IAC5C,MAAM,IAAI,mBAAkB,uCAAuC;AAAA,EACrE;AAAA,EAEA,MAAM,aAAa,OAAO,gBAAgB,GAAG,WAAW,IAAI,CAAC,cAAc;AAAA,IACzE,OAAO,SAAS;AAAA,IAChB,OAAO,SAAS;AAAA,EAClB,EAAE;AAAA,EAEF,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAOK,IAAM,6BAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,WAAW,KAAK,cAAc;AAAA,EAE9B,MAAM,uBAAuB,MAAM,wBAAwB,OAAQ,YAAY,MAAM;AAAA,EACrF,MAAM,SAAS,qBAAqB,OAAO,MAAM,IAAI;AAAA,EAErD,IAAI,CAAC,OAAO,YAAY,IAAI,cAAc;AAAA,IACxC,MAAM,IAAI,mBAAkB,yCAAyC;AAAA,EACvE;AAAA,EAEA,MAAM,YAAY,OAAO,UAAU,IAAI,CAAC,cAAc;AAAA,IACpD,UAAU,SAAS;AAAA,IACnB,OAAO,SAAS;AAAA,EAClB,EAAE;AAAA,EAEF,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAYK,IAAM,cAIT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAY,MAAO,eAAe;AAAA,EAGxC,IAAI,eAAe,IAAI,SAAS,GAAG;AAAA,IACjC,MAAM,OAAO,eAAe,IAAI,SAAS;AAAA,IACzC,IAAI,aAAa,QAAQ,OAAO,KAAK,YAAY,YAAY;AAAA,MAC3D,KAAK,QAAQ;AAAA,IACf;AAAA,IACA,eAAe,OAAO,SAAS;AAAA,EACjC;AAAA,EAGA,MAAM,aAAa,sBAAsB,IAAI,SAAS;AAAA,EACtD,IAAI,YAAY;AAAA,IACd,MAAM,eAAe,sBAAsB,IAAI,UAAU,KAAK;AAAA,IAC9D,MAAM,WAAW,eAAe;AAAA,IAEhC,IAAI,YAAY,GAAG;AAAA,MAEjB,WAAW,OAAO,UAAU;AAAA,MAC5B,sBAAsB,OAAO,UAAU;AAAA,IACzC,EAAO;AAAA,MACL,sBAAsB,IAAI,YAAY,QAAQ;AAAA;AAAA,IAGhD,sBAAsB,OAAO,SAAS;AAAA,EACxC;AAAA,EAEA,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;;ACpUF,wBAAS;AAIF,IAAM,kBAAkB;AAAA,EAC7B,MAAM;AAAA,EACN,YAAY;AAAA,IACV,UAAU;AAAA,MACR,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,gBAAgB;AAAA,MACd,MAAM;AAAA,MACN,aAAa;AAAA,MACb,YAAY;AAAA,QACV,WAAW;AAAA,UACT,MAAM;AAAA,UACN,aAAa;AAAA,QACf;AAAA,QACA,YAAY;AAAA,UACV,MAAM;AAAA,UACN,MAAM,CAAC,QAAQ,SAAS,UAAU,OAAO;AAAA,UACzC,aAAa;AAAA,QACf;AAAA,QACA,UAAU;AAAA,UACR,MAAM;AAAA,UACN,MAAM,OAAO,OAAO,gBAAgB;AAAA,UACpC,aAAa;AAAA,QACf;AAAA,MACF;AAAA,MACA,UAAU,CAAC,aAAa,cAAc,UAAU;AAAA,MAChD,sBAAsB;AAAA,IACxB;AAAA,EACF;AAAA,EACA,UAAU,CAAC,YAAY,gBAAgB;AAAA,EACvC,sBAAsB;AACxB;AAEA,IAAM,uBAAsB;AAAA,EAC1B,MAAM;AAAA,EACN,YAAY;AAAA,OACP,aAAY;AAAA,OACZ,gBAAgB;AAAA,EACrB;AAAA,EACA,UAAU,CAAC,GAAG,aAAY,UAAU,GAAG,gBAAgB,QAAQ;AAAA,EAC/D,sBAAsB;AACxB;;AC9CA,kBAAS,iCAAmB;AAC5B,+BAAS,uCAAoB,mCAAgB;AAC7C,iCAAS;AACT,iCAAS;AACT,kCAAS,0CAAuB;AAUhC,eAAsB,0BAA0B,CAC9C,QACA,QACe;AAAA,EACf,MAAM,gBAAgB,uBAAsB,IAAI,eAAc;AAAA,EAC9D,cAAc,eAAe,sBAAsB,MAAM;AAAA,EAEzD,MAAM,qBAAqB,uBAAsB;AAAA,EACjD,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA,WAAW,QAAQ,OAAO;AAAA,IACxB,mBAAmB,sBAAsB,sBAAsB,IAAI;AAAA,EACrE;AAAA,EAGA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,sBAClB,oBACF;AAAA,IACA,MAAM,QAAQ,cAAc;AAAA,IAE5B,MAAM,SAAS,IAAI,gBAAkD,QAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,oBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,gBAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,sBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,IAChE,MAAM,OAAO,MAAM;AAAA,EACrB;AAAA;;ACvDF,kBAAS,iCAAmB;AAC5B,+BAAS,uCAAoB,mCAAgB;AAC7C,iCAAS;AACT,iCAAS;AAgBT,eAAsB,0BAA0B,CAC9C,QACe;AAAA,EACf,MAAM,qBAAqB,uBAAsB;AAAA,EAEjD,mBAAmB,cACjB,sBACA,qBACA,aACF;AAAA,EACA,mBAAmB,cACjB,sBACA,mBACA,WACF;AAAA,EACA,mBAAmB,cACjB,sBACA,qBACA,kBACF;AAAA,EACA,mBAAmB,cACjB,sBACA,6BACA,0BACF;AAAA,EACA,mBAAmB,cACjB,sBACA,sBACA,mBACF;AAAA,EAGA,IAAI,CAAC,QAAQ;AAAA,IACX,MAAM,UAAU,IAAI,sBAClB,oBACF;AAAA,IACA,MAAM,QAAQ,cAAc;AAAA,IAE5B,MAAM,SAAS,IAAI,gBAAkD,QAAO;AAAA,MAC1E;AAAA,MACA,WAAW;AAAA,MACX,SAAS,IAAI,oBAAmB,GAAG,GAAG;AAAA,IACxC,CAAC;AAAA,IAED,SAAS,IAAI,gBAAkD;AAAA,MAC7D;AAAA,MACA,WAAW;AAAA,IACb,CAAC;AAAA,IAED,OAAO,OAAO,MAAM;AAAA,IAEpB,sBAAqB,EAAE,cAAc,EAAE,QAAQ,QAAQ,QAAQ,CAAC;AAAA,IAChE,MAAM,OAAO,MAAM;AAAA,EACrB;AAAA;;ACxEF;AAAA,wBACE;AAAA,2BACA;AAAA,gBACA;AAAA,mBACA;AAAA;AAWK,IAAM,qBAAqB,oBAAmB,yBAAyB;AAEvE,IAAM,8BAA8B,uBAAsB,SAC/D,oBACA,MAAM;AAAA,EACJ,MAAM,eAAe,uBAAsB,IAAI,cAAa;AAAA,EAC5D,aAAa,iBAAiB,qBAAqB,aAAa;AAAA,EAChE,aAAa,iBAAiB,qBAAqB,kBAAkB;AAAA,EACrE,aAAa,iBAAiB,6BAA6B,0BAA0B;AAAA,EACrF,aAAa,iBAAiB,sBAAsB,mBAAmB;AAAA,EACvE,aAAa,iBAAiB,mBAAmB,WAAW;AAAA,EAC5D,YAAW,YAAY,EAAE,MAAM,QAAQ,CAAC;AAAA,EACxC,QAAQ,IAAI,+BAA+B;AAAA,EAC3C,OAAO;AAAA,GAET,IACF;",
|
|
19
|
+
"debugId": "A83B19F263EDDB8064756E2164756E21",
|
|
20
20
|
"names": []
|
|
21
21
|
}
|
|
@@ -4,4 +4,25 @@
|
|
|
4
4
|
* SPDX-License-Identifier: Apache-2.0
|
|
5
5
|
*/
|
|
6
6
|
export declare const TENSORFLOW_MEDIAPIPE = "TENSORFLOW_MEDIAPIPE";
|
|
7
|
+
export type TextPipelineTask = "text-embedder" | "text-classifier" | "text-language-detector" | "genai-text" | "audio-classifier" | "audio-embedder" | "vision-face-detector" | "vision-face-landmarker" | "vision-face-stylizer" | "vision-gesture-recognizer" | "vision-hand-landmarker" | "vision-holistic-landmarker" | "vision-image-classifier" | "vision-image-embedder" | "vision-image-segmenter" | "vision-image-interactive-segmenter" | "vision-object-detector" | "vision-pose-landmarker";
|
|
8
|
+
export declare const TextPipelineTask: {
|
|
9
|
+
readonly "text-embedder": "text-embedder";
|
|
10
|
+
readonly "text-classifier": "text-classifier";
|
|
11
|
+
readonly "text-language-detector": "text-language-detector";
|
|
12
|
+
readonly "genai-text": "genai-text";
|
|
13
|
+
readonly "audio-classifier": "audio-classifier";
|
|
14
|
+
readonly "audio-embedder": "audio-embedder";
|
|
15
|
+
readonly "vision-face-detector": "vision-face-detector";
|
|
16
|
+
readonly "vision-face-landmarker": "vision-face-landmarker";
|
|
17
|
+
readonly "vision-face-stylizer": "vision-face-stylizer";
|
|
18
|
+
readonly "vision-gesture-recognizer": "vision-gesture-recognizer";
|
|
19
|
+
readonly "vision-hand-landmarker": "vision-hand-landmarker";
|
|
20
|
+
readonly "vision-holistic-landmarker": "vision-holistic-landmarker";
|
|
21
|
+
readonly "vision-image-classifier": "vision-image-classifier";
|
|
22
|
+
readonly "vision-image-embedder": "vision-image-embedder";
|
|
23
|
+
readonly "vision-image-segmenter": "vision-image-segmenter";
|
|
24
|
+
readonly "vision-image-interactive-segmenter": "vision-image-interactive-segmenter";
|
|
25
|
+
readonly "vision-object-detector": "vision-object-detector";
|
|
26
|
+
readonly "vision-pose-landmarker": "vision-pose-landmarker";
|
|
27
|
+
};
|
|
7
28
|
//# sourceMappingURL=TFMP_Constants.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"TFMP_Constants.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/common/TFMP_Constants.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,eAAO,MAAM,oBAAoB,yBAAyB,CAAC"}
|
|
1
|
+
{"version":3,"file":"TFMP_Constants.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/common/TFMP_Constants.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,eAAO,MAAM,oBAAoB,yBAAyB,CAAC;AAE3D,MAAM,MAAM,gBAAgB,GACxB,eAAe,GACf,iBAAiB,GACjB,wBAAwB,GACxB,YAAY,GACZ,kBAAkB,GAClB,gBAAgB,GAChB,sBAAsB,GACtB,wBAAwB,GACxB,sBAAsB,GACtB,2BAA2B,GAC3B,wBAAwB,GACxB,4BAA4B,GAC5B,yBAAyB,GACzB,uBAAuB,GACvB,wBAAwB,GACxB,oCAAoC,GACpC,wBAAwB,GACxB,wBAAwB,CAAC;AAE7B,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;CAmBkC,CAAC"}
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
* Copyright 2025 Steven Roussey <sroussey@gmail.com>
|
|
4
4
|
* SPDX-License-Identifier: Apache-2.0
|
|
5
5
|
*/
|
|
6
|
-
import type { AiProviderRunFn, DeReplicateFromSchema, DownloadModelTaskExecuteInput, DownloadModelTaskExecuteOutput, TextEmbeddingInputSchema, TextEmbeddingOutputSchema } from "@workglow/ai";
|
|
6
|
+
import type { AiProviderRunFn, DeReplicateFromSchema, DownloadModelTaskExecuteInput, DownloadModelTaskExecuteOutput, TextClassifierInputSchema, TextClassifierOutputSchema, TextEmbeddingInputSchema, TextEmbeddingOutputSchema, TextLanguageDetectionInputSchema, TextLanguageDetectionOutputSchema } from "@workglow/ai";
|
|
7
7
|
import { TFMPModelRecord } from "./TFMP_ModelSchema";
|
|
8
8
|
/**
|
|
9
9
|
* Core implementation for downloading and caching a MediaPipe TFJS model.
|
|
@@ -15,4 +15,24 @@ export declare const TFMP_Download: AiProviderRunFn<DownloadModelTaskExecuteInpu
|
|
|
15
15
|
* This is shared between inline and worker implementations.
|
|
16
16
|
*/
|
|
17
17
|
export declare const TFMP_TextEmbedding: AiProviderRunFn<DeReplicateFromSchema<typeof TextEmbeddingInputSchema>, DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>, TFMPModelRecord>;
|
|
18
|
+
/**
|
|
19
|
+
* Core implementation for text classification using MediaPipe TFJS.
|
|
20
|
+
* This is shared between inline and worker implementations.
|
|
21
|
+
*/
|
|
22
|
+
export declare const TFMP_TextClassifier: AiProviderRunFn<DeReplicateFromSchema<typeof TextClassifierInputSchema>, DeReplicateFromSchema<typeof TextClassifierOutputSchema>, TFMPModelRecord>;
|
|
23
|
+
/**
|
|
24
|
+
* Core implementation for language detection using MediaPipe TFJS.
|
|
25
|
+
* This is shared between inline and worker implementations.
|
|
26
|
+
*/
|
|
27
|
+
export declare const TFMP_TextLanguageDetection: AiProviderRunFn<DeReplicateFromSchema<typeof TextLanguageDetectionInputSchema>, DeReplicateFromSchema<typeof TextLanguageDetectionOutputSchema>, TFMPModelRecord>;
|
|
28
|
+
/**
|
|
29
|
+
* Core implementation for unloading a MediaPipe TFJS model.
|
|
30
|
+
* This is shared between inline and worker implementations.
|
|
31
|
+
*
|
|
32
|
+
* When a model is unloaded, this function:
|
|
33
|
+
* 1. Disposes of the model instance
|
|
34
|
+
* 2. Decrements the reference count for the associated WASM fileset
|
|
35
|
+
* 3. If no other models are using the WASM fileset (count reaches 0), unloads the WASM
|
|
36
|
+
*/
|
|
37
|
+
export declare const TFMP_Unload: AiProviderRunFn<DownloadModelTaskExecuteInput, DownloadModelTaskExecuteOutput, TFMPModelRecord>;
|
|
18
38
|
//# sourceMappingURL=TFMP_JobRunFns.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"TFMP_JobRunFns.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/common/TFMP_JobRunFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;
|
|
1
|
+
{"version":3,"file":"TFMP_JobRunFns.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/common/TFMP_JobRunFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAQH,OAAO,KAAK,EACV,eAAe,EACf,qBAAqB,EACrB,6BAA6B,EAC7B,8BAA8B,EAC9B,yBAAyB,EACzB,0BAA0B,EAC1B,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,iCAAiC,EAClC,MAAM,cAAc,CAAC;AAEtB,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAuJrD;;;GAGG;AACH,eAAO,MAAM,aAAa,EAAE,eAAe,CACzC,6BAA6B,EAC7B,8BAA8B,EAC9B,eAAe,CAoBhB,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,kBAAkB,EAAE,eAAe,CAC9C,qBAAqB,CAAC,OAAO,wBAAwB,CAAC,EACtD,qBAAqB,CAAC,OAAO,yBAAyB,CAAC,EACvD,eAAe,CAgBhB,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,mBAAmB,EAAE,eAAe,CAC/C,qBAAqB,CAAC,OAAO,yBAAyB,CAAC,EACvD,qBAAqB,CAAC,OAAO,0BAA0B,CAAC,EACxD,eAAe,CAmBhB,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,0BAA0B,EAAE,eAAe,CACtD,qBAAqB,CAAC,OAAO,gCAAgC,CAAC,EAC9D,qBAAqB,CAAC,OAAO,iCAAiC,CAAC,EAC/D,eAAe,CAmBhB,CAAC;AAEF;;;;;;;;GAQG;AACH,eAAO,MAAM,WAAW,EAAE,eAAe,CACvC,6BAA6B,EAC7B,8BAA8B,EAC9B,eAAe,CAiChB,CAAC"}
|
|
@@ -19,8 +19,18 @@ export declare const TFMPModelSchema: {
|
|
|
19
19
|
readonly type: "string";
|
|
20
20
|
readonly description: "Filesystem path or URI for the ONNX model.";
|
|
21
21
|
};
|
|
22
|
+
readonly taskEngine: {
|
|
23
|
+
readonly type: "string";
|
|
24
|
+
readonly enum: readonly ["text", "audio", "vision", "genai"];
|
|
25
|
+
readonly description: "Task engine for the MediaPipe model.";
|
|
26
|
+
};
|
|
27
|
+
readonly pipeline: {
|
|
28
|
+
readonly type: "string";
|
|
29
|
+
readonly enum: ("text-embedder" | "text-classifier" | "text-language-detector" | "genai-text" | "audio-classifier" | "audio-embedder" | "vision-face-detector" | "vision-face-landmarker" | "vision-face-stylizer" | "vision-gesture-recognizer" | "vision-hand-landmarker" | "vision-holistic-landmarker" | "vision-image-classifier" | "vision-image-embedder" | "vision-image-segmenter" | "vision-image-interactive-segmenter" | "vision-object-detector" | "vision-pose-landmarker")[];
|
|
30
|
+
readonly description: "Pipeline task type for the MediaPipe model.";
|
|
31
|
+
};
|
|
22
32
|
};
|
|
23
|
-
readonly required: readonly ["modelPath"];
|
|
33
|
+
readonly required: readonly ["modelPath", "taskEngine", "pipeline"];
|
|
24
34
|
readonly additionalProperties: false;
|
|
25
35
|
};
|
|
26
36
|
};
|
|
@@ -42,8 +52,18 @@ declare const ExtendedModelSchema: {
|
|
|
42
52
|
readonly type: "string";
|
|
43
53
|
readonly description: "Filesystem path or URI for the ONNX model.";
|
|
44
54
|
};
|
|
55
|
+
readonly taskEngine: {
|
|
56
|
+
readonly type: "string";
|
|
57
|
+
readonly enum: readonly ["text", "audio", "vision", "genai"];
|
|
58
|
+
readonly description: "Task engine for the MediaPipe model.";
|
|
59
|
+
};
|
|
60
|
+
readonly pipeline: {
|
|
61
|
+
readonly type: "string";
|
|
62
|
+
readonly enum: ("text-embedder" | "text-classifier" | "text-language-detector" | "genai-text" | "audio-classifier" | "audio-embedder" | "vision-face-detector" | "vision-face-landmarker" | "vision-face-stylizer" | "vision-gesture-recognizer" | "vision-hand-landmarker" | "vision-holistic-landmarker" | "vision-image-classifier" | "vision-image-embedder" | "vision-image-segmenter" | "vision-image-interactive-segmenter" | "vision-object-detector" | "vision-pose-landmarker")[];
|
|
63
|
+
readonly description: "Pipeline task type for the MediaPipe model.";
|
|
64
|
+
};
|
|
45
65
|
};
|
|
46
|
-
readonly required: readonly ["modelPath"];
|
|
66
|
+
readonly required: readonly ["modelPath", "taskEngine", "pipeline"];
|
|
47
67
|
readonly additionalProperties: false;
|
|
48
68
|
};
|
|
49
69
|
readonly model_id: {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"TFMP_ModelSchema.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/common/TFMP_ModelSchema.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,EAAwB,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAGlE,eAAO,MAAM,eAAe
|
|
1
|
+
{"version":3,"file":"TFMP_ModelSchema.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/common/TFMP_ModelSchema.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,EAAwB,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAGlE,eAAO,MAAM,eAAe;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgCa,CAAC;AAE1C,QAAA,MAAM,mBAAmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAQgB,CAAC;AAE1C,MAAM,MAAM,eAAe,GAAG,UAAU,CAAC,OAAO,mBAAmB,CAAC,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"TFMP_Client_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAS,UAAU,EAAyB,MAAM,cAAc,CAAC;AACxE,OAAO,EAAsB,cAAc,EAAkB,MAAM,qBAAqB,CAAC;AAEzF,OAAO,EAAwB,SAAS,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AAInF;;;;;;GAMG;AACH,wBAAsB,0BAA0B,CAC9C,MAAM,EAAE,MAAM,EACd,MAAM,CAAC,EAAE,cAAc,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,UAAU,CAAC,GACzD,OAAO,CAAC,IAAI,CAAC,
|
|
1
|
+
{"version":3,"file":"TFMP_Client_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAS,UAAU,EAAyB,MAAM,cAAc,CAAC;AACxE,OAAO,EAAsB,cAAc,EAAkB,MAAM,qBAAqB,CAAC;AAEzF,OAAO,EAAwB,SAAS,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AAInF;;;;;;GAMG;AACH,wBAAsB,0BAA0B,CAC9C,MAAM,EAAE,MAAM,EACd,MAAM,CAAC,EAAE,cAAc,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,UAAU,CAAC,GACzD,OAAO,CAAC,IAAI,CAAC,CAuCf"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"TFMP_Inline_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAS,UAAU,EAAyB,MAAM,cAAc,CAAC;AACxE,OAAO,EAAsB,cAAc,EAAkB,MAAM,qBAAqB,CAAC;AAEzF,OAAO,EAAwB,SAAS,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;
|
|
1
|
+
{"version":3,"file":"TFMP_Inline_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,EAAS,UAAU,EAAyB,MAAM,cAAc,CAAC;AACxE,OAAO,EAAsB,cAAc,EAAkB,MAAM,qBAAqB,CAAC;AAEzF,OAAO,EAAwB,SAAS,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AAUnF;;;;;GAKG;AACH,wBAAsB,0BAA0B,CAC9C,MAAM,CAAC,EAAE,cAAc,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE,UAAU,CAAC,GACzD,OAAO,CAAC,IAAI,CAAC,CAoDf"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"TFMP_Worker_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/registry/TFMP_Worker_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;
|
|
1
|
+
{"version":3,"file":"TFMP_Worker_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/registry/TFMP_Worker_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAiBH,eAAO,MAAM,kBAAkB,gDAAgD,CAAC;AAEhF,eAAO,MAAM,2BAA2B,MAcvC,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@workglow/ai-provider",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.0.
|
|
4
|
+
"version": "0.0.67",
|
|
5
5
|
"description": "AI provider integrations for Workglow, supporting OpenAI, Hugging Face Transformers, MediaPipe, and GGML models.",
|
|
6
6
|
"scripts": {
|
|
7
7
|
"watch": "concurrently -c 'auto' 'bun:watch-*'",
|
|
@@ -30,13 +30,16 @@
|
|
|
30
30
|
"access": "public"
|
|
31
31
|
},
|
|
32
32
|
"peerDependencies": {
|
|
33
|
-
"@workglow/ai": "0.0.
|
|
34
|
-
"@workglow/job-queue": "0.0.
|
|
35
|
-
"@workglow/storage": "0.0.
|
|
36
|
-
"@workglow/task-graph": "0.0.
|
|
37
|
-
"@workglow/util": "0.0.
|
|
33
|
+
"@workglow/ai": "0.0.67",
|
|
34
|
+
"@workglow/job-queue": "0.0.67",
|
|
35
|
+
"@workglow/storage": "0.0.67",
|
|
36
|
+
"@workglow/task-graph": "0.0.67",
|
|
37
|
+
"@workglow/util": "0.0.67",
|
|
38
38
|
"@sroussey/transformers": "3.8.1",
|
|
39
|
-
"@mediapipe/tasks-text": "^0.10.
|
|
39
|
+
"@mediapipe/tasks-text": "^0.10.22-rc.20250304",
|
|
40
|
+
"@mediapipe/tasks-vision": "^0.10.22-rc.20250304",
|
|
41
|
+
"@mediapipe/tasks-audio": "^0.10.22-rc.20250304",
|
|
42
|
+
"@mediapipe/tasks-genai": "^0.10.22-rc.20250304"
|
|
40
43
|
},
|
|
41
44
|
"peerDependenciesMeta": {
|
|
42
45
|
"@workglow/ai": {
|
|
@@ -56,12 +59,15 @@
|
|
|
56
59
|
}
|
|
57
60
|
},
|
|
58
61
|
"devDependencies": {
|
|
59
|
-
"@workglow/ai": "0.0.
|
|
60
|
-
"@workglow/job-queue": "0.0.
|
|
61
|
-
"@workglow/storage": "0.0.
|
|
62
|
-
"@workglow/task-graph": "0.0.
|
|
63
|
-
"@workglow/util": "0.0.
|
|
62
|
+
"@workglow/ai": "0.0.67",
|
|
63
|
+
"@workglow/job-queue": "0.0.67",
|
|
64
|
+
"@workglow/storage": "0.0.67",
|
|
65
|
+
"@workglow/task-graph": "0.0.67",
|
|
66
|
+
"@workglow/util": "0.0.67",
|
|
64
67
|
"@sroussey/transformers": "3.8.1",
|
|
65
|
-
"@mediapipe/tasks-text": "^0.10.
|
|
68
|
+
"@mediapipe/tasks-text": "^0.10.22-rc.20250304",
|
|
69
|
+
"@mediapipe/tasks-vision": "^0.10.22-rc.20250304",
|
|
70
|
+
"@mediapipe/tasks-audio": "^0.10.22-rc.20250304",
|
|
71
|
+
"@mediapipe/tasks-genai": "^0.10.22-rc.20250304"
|
|
66
72
|
}
|
|
67
73
|
}
|