@workglow/ai-provider 0.0.52
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +488 -0
- package/dist/ggml/model/GgmlLocalModel.d.ts +7 -0
- package/dist/ggml/model/GgmlLocalModel.d.ts.map +1 -0
- package/dist/ggml/server.d.ts +2 -0
- package/dist/ggml/server.d.ts.map +1 -0
- package/dist/hf-transformers/common/HFT_CallbackStatus.d.ts +36 -0
- package/dist/hf-transformers/common/HFT_CallbackStatus.d.ts.map +1 -0
- package/dist/hf-transformers/common/HFT_Constants.d.ts +18 -0
- package/dist/hf-transformers/common/HFT_Constants.d.ts.map +1 -0
- package/dist/hf-transformers/common/HFT_JobRunFns.d.ts +45 -0
- package/dist/hf-transformers/common/HFT_JobRunFns.d.ts.map +1 -0
- package/dist/hf-transformers/index.d.ts +11 -0
- package/dist/hf-transformers/index.d.ts.map +1 -0
- package/dist/hf-transformers/registry/HFT_Client_RegisterJobFns.d.ts +7 -0
- package/dist/hf-transformers/registry/HFT_Client_RegisterJobFns.d.ts.map +1 -0
- package/dist/hf-transformers/registry/HFT_Inline_RegisterJobFns.d.ts +7 -0
- package/dist/hf-transformers/registry/HFT_Inline_RegisterJobFns.d.ts.map +1 -0
- package/dist/hf-transformers/registry/HFT_Worker_RegisterJobFns.d.ts +8 -0
- package/dist/hf-transformers/registry/HFT_Worker_RegisterJobFns.d.ts.map +1 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +342 -0
- package/dist/index.js.map +19 -0
- package/dist/openai/provider/OpenAI_TaskRun.d.ts +2 -0
- package/dist/openai/provider/OpenAI_TaskRun.d.ts.map +1 -0
- package/dist/tf-mediapipe/common/TFMP_Constants.d.ts +7 -0
- package/dist/tf-mediapipe/common/TFMP_Constants.d.ts.map +1 -0
- package/dist/tf-mediapipe/common/TFMP_JobRunFns.d.ts +17 -0
- package/dist/tf-mediapipe/common/TFMP_JobRunFns.d.ts.map +1 -0
- package/dist/tf-mediapipe/index.d.ts +11 -0
- package/dist/tf-mediapipe/index.d.ts.map +1 -0
- package/dist/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.d.ts +7 -0
- package/dist/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.d.ts.map +1 -0
- package/dist/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.d.ts +7 -0
- package/dist/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.d.ts.map +1 -0
- package/dist/tf-mediapipe/registry/TFMP_Worker_RegisterJobFns.d.ts +8 -0
- package/dist/tf-mediapipe/registry/TFMP_Worker_RegisterJobFns.d.ts.map +1 -0
- package/dist/types.d.ts +7 -0
- package/dist/types.d.ts.map +1 -0
- package/package.json +67 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../../src/ggml/server.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Steven Roussey <sroussey@gmail.com>
|
|
4
|
+
* SPDX-License-Identifier: Apache-2.0
|
|
5
|
+
*/
|
|
6
|
+
interface StatusFileBookends {
|
|
7
|
+
status: "initiate" | "download" | "done";
|
|
8
|
+
name: string;
|
|
9
|
+
file: string;
|
|
10
|
+
}
|
|
11
|
+
interface StatusFileProgress {
|
|
12
|
+
status: "progress";
|
|
13
|
+
name: string;
|
|
14
|
+
file: string;
|
|
15
|
+
loaded: number;
|
|
16
|
+
progress: number;
|
|
17
|
+
total: number;
|
|
18
|
+
}
|
|
19
|
+
interface StatusRunReady {
|
|
20
|
+
status: "ready";
|
|
21
|
+
model: string;
|
|
22
|
+
task: string;
|
|
23
|
+
}
|
|
24
|
+
interface StatusRunUpdate {
|
|
25
|
+
status: "update";
|
|
26
|
+
output: string;
|
|
27
|
+
}
|
|
28
|
+
interface StatusRunComplete {
|
|
29
|
+
status: "complete";
|
|
30
|
+
output: string[];
|
|
31
|
+
}
|
|
32
|
+
type StatusFile = StatusFileBookends | StatusFileProgress;
|
|
33
|
+
type StatusRun = StatusRunReady | StatusRunUpdate | StatusRunComplete;
|
|
34
|
+
export type CallbackStatus = StatusFile | StatusRun;
|
|
35
|
+
export {};
|
|
36
|
+
//# sourceMappingURL=HFT_CallbackStatus.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"HFT_CallbackStatus.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/common/HFT_CallbackStatus.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,UAAU,kBAAkB;IAC1B,MAAM,EAAE,UAAU,GAAG,UAAU,GAAG,MAAM,CAAC;IACzC,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;CACd;AAED,UAAU,kBAAkB;IAC1B,MAAM,EAAE,UAAU,CAAC;IACnB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,MAAM,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,UAAU,cAAc;IACtB,MAAM,EAAE,OAAO,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACd;AACD,UAAU,eAAe;IACvB,MAAM,EAAE,QAAQ,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;CAChB;AACD,UAAU,iBAAiB;IACzB,MAAM,EAAE,UAAU,CAAC;IACnB,MAAM,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,KAAK,UAAU,GAAG,kBAAkB,GAAG,kBAAkB,CAAC;AAC1D,KAAK,SAAS,GAAG,cAAc,GAAG,eAAe,GAAG,iBAAiB,CAAC;AACtE,MAAM,MAAM,cAAc,GAAG,UAAU,GAAG,SAAS,CAAC"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Steven Roussey <sroussey@gmail.com>
|
|
4
|
+
* SPDX-License-Identifier: Apache-2.0
|
|
5
|
+
*/
|
|
6
|
+
export declare const HF_TRANSFORMERS_ONNX = "HF_TRANSFORMERS_ONNX";
|
|
7
|
+
export declare enum QUANTIZATION_DATA_TYPES {
|
|
8
|
+
auto = "auto",// Auto-detect based on environment
|
|
9
|
+
fp32 = "fp32",
|
|
10
|
+
fp16 = "fp16",
|
|
11
|
+
q8 = "q8",
|
|
12
|
+
int8 = "int8",
|
|
13
|
+
uint8 = "uint8",
|
|
14
|
+
q4 = "q4",
|
|
15
|
+
bnb4 = "bnb4",
|
|
16
|
+
q4f16 = "q4f16"
|
|
17
|
+
}
|
|
18
|
+
//# sourceMappingURL=HFT_Constants.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"HFT_Constants.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/common/HFT_Constants.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,eAAO,MAAM,oBAAoB,yBAAyB,CAAC;AAE3D,oBAAY,uBAAuB;IACjC,IAAI,SAAS,CAAE,mCAAmC;IAClD,IAAI,SAAS;IACb,IAAI,SAAS;IACb,EAAE,OAAO;IACT,IAAI,SAAS;IACb,KAAK,UAAU;IACf,EAAE,OAAO;IACT,IAAI,SAAS;IACb,KAAK,UAAU;CAChB"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Steven Roussey <sroussey@gmail.com>
|
|
4
|
+
* SPDX-License-Identifier: Apache-2.0
|
|
5
|
+
*/
|
|
6
|
+
import { AiProviderRunFn, type DeReplicateFromSchema, DownloadModelTaskExecuteInput, TextEmbeddingInputSchema, TextEmbeddingOutputSchema, TextGenerationInputSchema, TextGenerationOutputSchema, TextQuestionAnswerInputSchema, TextQuestionAnswerOutputSchema, TextRewriterInputSchema, TextRewriterOutputSchema, TextSummaryInputSchema, TextSummaryOutputSchema, TextTranslationInputSchema, TextTranslationOutputSchema } from "@workglow/ai";
|
|
7
|
+
/**
|
|
8
|
+
* Core implementation for downloading and caching a Hugging Face Transformers model.
|
|
9
|
+
* This is shared between inline and worker implementations.
|
|
10
|
+
*/
|
|
11
|
+
export declare const HFT_Download: AiProviderRunFn<DownloadModelTaskExecuteInput, DownloadModelTaskExecuteInput>;
|
|
12
|
+
/**
|
|
13
|
+
* Core implementation for text embedding using Hugging Face Transformers.
|
|
14
|
+
* This is shared between inline and worker implementations.
|
|
15
|
+
*/
|
|
16
|
+
type TextEmbeddingInput = DeReplicateFromSchema<typeof TextEmbeddingInputSchema>;
|
|
17
|
+
type TextEmbeddingOutput = DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>;
|
|
18
|
+
export declare const HFT_TextEmbedding: AiProviderRunFn<TextEmbeddingInput, TextEmbeddingOutput>;
|
|
19
|
+
/**
|
|
20
|
+
* Core implementation for text generation using Hugging Face Transformers.
|
|
21
|
+
* This is shared between inline and worker implementations.
|
|
22
|
+
*/
|
|
23
|
+
export declare const HFT_TextGeneration: AiProviderRunFn<DeReplicateFromSchema<typeof TextGenerationInputSchema>, DeReplicateFromSchema<typeof TextGenerationOutputSchema>>;
|
|
24
|
+
/**
|
|
25
|
+
* Core implementation for text translation using Hugging Face Transformers.
|
|
26
|
+
* This is shared between inline and worker implementations.
|
|
27
|
+
*/
|
|
28
|
+
export declare const HFT_TextTranslation: AiProviderRunFn<DeReplicateFromSchema<typeof TextTranslationInputSchema>, DeReplicateFromSchema<typeof TextTranslationOutputSchema>>;
|
|
29
|
+
/**
|
|
30
|
+
* Core implementation for text rewriting using Hugging Face Transformers.
|
|
31
|
+
* This is shared between inline and worker implementations.
|
|
32
|
+
*/
|
|
33
|
+
export declare const HFT_TextRewriter: AiProviderRunFn<DeReplicateFromSchema<typeof TextRewriterInputSchema>, DeReplicateFromSchema<typeof TextRewriterOutputSchema>>;
|
|
34
|
+
/**
|
|
35
|
+
* Core implementation for text summarization using Hugging Face Transformers.
|
|
36
|
+
* This is shared between inline and worker implementations.
|
|
37
|
+
*/
|
|
38
|
+
export declare const HFT_TextSummary: AiProviderRunFn<DeReplicateFromSchema<typeof TextSummaryInputSchema>, DeReplicateFromSchema<typeof TextSummaryOutputSchema>>;
|
|
39
|
+
/**
|
|
40
|
+
* Core implementation for question answering using Hugging Face Transformers.
|
|
41
|
+
* This is shared between inline and worker implementations.
|
|
42
|
+
*/
|
|
43
|
+
export declare const HFT_TextQuestionAnswer: AiProviderRunFn<DeReplicateFromSchema<typeof TextQuestionAnswerInputSchema>, DeReplicateFromSchema<typeof TextQuestionAnswerOutputSchema>>;
|
|
44
|
+
export {};
|
|
45
|
+
//# sourceMappingURL=HFT_JobRunFns.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"HFT_JobRunFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/common/HFT_JobRunFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAkBH,OAAO,EACL,eAAe,EACf,KAAK,qBAAqB,EAC1B,6BAA6B,EAE7B,wBAAwB,EACxB,yBAAyB,EACzB,yBAAyB,EACzB,0BAA0B,EAC1B,6BAA6B,EAC7B,8BAA8B,EAC9B,uBAAuB,EACvB,wBAAwB,EACxB,sBAAsB,EACtB,uBAAuB,EACvB,0BAA0B,EAC1B,2BAA2B,EAE5B,MAAM,cAAc,CAAC;AA8CtB;;;GAGG;AACH,eAAO,MAAM,YAAY,EAAE,eAAe,CACxC,6BAA6B,EAC7B,6BAA6B,CAQ9B,CAAC;AAEF;;;GAGG;AAEH,KAAK,kBAAkB,GAAG,qBAAqB,CAAC,OAAO,wBAAwB,CAAC,CAAC;AACjF,KAAK,mBAAmB,GAAG,qBAAqB,CAAC,OAAO,yBAAyB,CAAC,CAAC;AACnF,eAAO,MAAM,iBAAiB,EAAE,eAAe,CAAC,kBAAkB,EAAE,mBAAmB,CA8BtF,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,kBAAkB,EAAE,eAAe,CAC9C,qBAAqB,CAAC,OAAO,yBAAyB,CAAC,EACvD,qBAAqB,CAAC,OAAO,0BAA0B,CAAC,CAwBzD,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,mBAAmB,EAAE,eAAe,CAC/C,qBAAqB,CAAC,OAAO,0BAA0B,CAAC,EACxD,qBAAqB,CAAC,OAAO,2BAA2B,CAAC,CAyB1D,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,gBAAgB,EAAE,eAAe,CAC5C,qBAAqB,CAAC,OAAO,uBAAuB,CAAC,EACrD,qBAAqB,CAAC,OAAO,wBAAwB,CAAC,CA+BvD,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,eAAe,EAAE,eAAe,CAC3C,qBAAqB,CAAC,OAAO,sBAAsB,CAAC,EACpD,qBAAqB,CAAC,OAAO,uBAAuB,CAAC,CAsBtD,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,sBAAsB,EAAE,eAAe,CAClD,qBAAqB,CAAC,OAAO,6BAA6B,CAAC,EAC3D,qBAAqB,CAAC,OAAO,8BAA8B,CAAC,CAuB7D,CAAC"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Steven Roussey <sroussey@gmail.com>
|
|
4
|
+
* SPDX-License-Identifier: Apache-2.0
|
|
5
|
+
*/
|
|
6
|
+
export * from "./common/HFT_Constants";
|
|
7
|
+
export * from "./common/HFT_JobRunFns";
|
|
8
|
+
export * from "./registry/HFT_Client_RegisterJobFns";
|
|
9
|
+
export * from "./registry/HFT_Inline_RegisterJobFns";
|
|
10
|
+
export * from "./registry/HFT_Worker_RegisterJobFns";
|
|
11
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/hf-transformers/index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,cAAc,wBAAwB,CAAC;AACvC,cAAc,wBAAwB,CAAC;AACvC,cAAc,sCAAsC,CAAC;AACrD,cAAc,sCAAsC,CAAC;AACrD,cAAc,sCAAsC,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"HFT_Client_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/registry/HFT_Client_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAMH,wBAAsB,yBAAyB,CAAC,MAAM,EAAE,MAAM,iBAkB7D"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"HFT_Inline_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/registry/HFT_Inline_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAeH,wBAAsB,yBAAyB,kBAgB9C"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Steven Roussey <sroussey@gmail.com>
|
|
4
|
+
* SPDX-License-Identifier: Apache-2.0
|
|
5
|
+
*/
|
|
6
|
+
export declare const HFT_WORKER_JOBRUN: import("@workglow/util").ServiceToken<unknown>;
|
|
7
|
+
export declare const HFT_WORKER_JOBRUN_REGISTER: void;
|
|
8
|
+
//# sourceMappingURL=HFT_Worker_RegisterJobFns.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"HFT_Worker_RegisterJobFns.d.ts","sourceRoot":"","sources":["../../../src/hf-transformers/registry/HFT_Worker_RegisterJobFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAkBH,eAAO,MAAM,iBAAiB,gDAA+C,CAAC;AAE9E,eAAO,MAAM,0BAA0B,MAgBtC,CAAC"}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,mBAAmB,CAAC;AAClC,cAAc,gBAAgB,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
1
|
+
// src/hf-transformers/common/HFT_Constants.ts
|
|
2
|
+
var HF_TRANSFORMERS_ONNX = "HF_TRANSFORMERS_ONNX";
|
|
3
|
+
var QUANTIZATION_DATA_TYPES;
|
|
4
|
+
((QUANTIZATION_DATA_TYPES2) => {
|
|
5
|
+
QUANTIZATION_DATA_TYPES2["auto"] = "auto";
|
|
6
|
+
QUANTIZATION_DATA_TYPES2["fp32"] = "fp32";
|
|
7
|
+
QUANTIZATION_DATA_TYPES2["fp16"] = "fp16";
|
|
8
|
+
QUANTIZATION_DATA_TYPES2["q8"] = "q8";
|
|
9
|
+
QUANTIZATION_DATA_TYPES2["int8"] = "int8";
|
|
10
|
+
QUANTIZATION_DATA_TYPES2["uint8"] = "uint8";
|
|
11
|
+
QUANTIZATION_DATA_TYPES2["q4"] = "q4";
|
|
12
|
+
QUANTIZATION_DATA_TYPES2["bnb4"] = "bnb4";
|
|
13
|
+
QUANTIZATION_DATA_TYPES2["q4f16"] = "q4f16";
|
|
14
|
+
})(QUANTIZATION_DATA_TYPES ||= {});
|
|
15
|
+
// src/hf-transformers/common/HFT_JobRunFns.ts
|
|
16
|
+
import {
|
|
17
|
+
pipeline,
|
|
18
|
+
TextStreamer
|
|
19
|
+
} from "@sroussey/transformers";
|
|
20
|
+
import { PermanentJobError } from "@workglow/job-queue";
|
|
21
|
+
var pipelines = new Map;
|
|
22
|
+
var getPipeline = async (model, onProgress, options = {}) => {
|
|
23
|
+
if (pipelines.has(model.name)) {
|
|
24
|
+
return pipelines.get(model.name);
|
|
25
|
+
}
|
|
26
|
+
const progressCallback = (status) => {
|
|
27
|
+
const progress = status.status === "progress" ? Math.round(status.progress) : 0;
|
|
28
|
+
if (status.status === "progress") {
|
|
29
|
+
onProgress(progress, "Downloading model", {
|
|
30
|
+
file: status.file,
|
|
31
|
+
progress: status.progress
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
};
|
|
35
|
+
const pipelineOptions = {
|
|
36
|
+
dtype: model.quantization || "q8",
|
|
37
|
+
...model.use_external_data_format ? { use_external_data_format: model.use_external_data_format } : {},
|
|
38
|
+
...model.device ? { device: model.device } : {},
|
|
39
|
+
...options,
|
|
40
|
+
progress_callback: progressCallback
|
|
41
|
+
};
|
|
42
|
+
const pipelineType = model.pipeline;
|
|
43
|
+
const result = await pipeline(pipelineType, model.url, pipelineOptions);
|
|
44
|
+
pipelines.set(model.name, result);
|
|
45
|
+
return result;
|
|
46
|
+
};
|
|
47
|
+
var HFT_Download = async (input, model, onProgress, signal) => {
|
|
48
|
+
await getPipeline(model, onProgress, { abort_signal: signal });
|
|
49
|
+
return {
|
|
50
|
+
model: input.model
|
|
51
|
+
};
|
|
52
|
+
};
|
|
53
|
+
var HFT_TextEmbedding = async (input, model, onProgress, signal) => {
|
|
54
|
+
const generateEmbedding = await getPipeline(model, onProgress, {
|
|
55
|
+
abort_signal: signal
|
|
56
|
+
});
|
|
57
|
+
const hfVector = await generateEmbedding(input.text, {
|
|
58
|
+
pooling: "mean",
|
|
59
|
+
normalize: model.normalize,
|
|
60
|
+
...signal ? { abort_signal: signal } : {}
|
|
61
|
+
});
|
|
62
|
+
if (hfVector.size !== model.nativeDimensions) {
|
|
63
|
+
console.warn(`HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model.nativeDimensions}`, input, hfVector);
|
|
64
|
+
throw new PermanentJobError(`HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model.nativeDimensions}`);
|
|
65
|
+
}
|
|
66
|
+
return { vector: hfVector.data };
|
|
67
|
+
};
|
|
68
|
+
var HFT_TextGeneration = async (input, model, onProgress, signal) => {
|
|
69
|
+
const generateText = await getPipeline(model, onProgress, {
|
|
70
|
+
abort_signal: signal
|
|
71
|
+
});
|
|
72
|
+
const streamer = createTextStreamer(generateText.tokenizer, onProgress, signal);
|
|
73
|
+
let results = await generateText(input.prompt, {
|
|
74
|
+
streamer,
|
|
75
|
+
...signal ? { abort_signal: signal } : {}
|
|
76
|
+
});
|
|
77
|
+
if (!Array.isArray(results)) {
|
|
78
|
+
results = [results];
|
|
79
|
+
}
|
|
80
|
+
let text = results[0]?.generated_text;
|
|
81
|
+
if (Array.isArray(text)) {
|
|
82
|
+
text = text[text.length - 1]?.content;
|
|
83
|
+
}
|
|
84
|
+
return {
|
|
85
|
+
text
|
|
86
|
+
};
|
|
87
|
+
};
|
|
88
|
+
var HFT_TextTranslation = async (input, model, onProgress, signal) => {
|
|
89
|
+
const translate = await getPipeline(model, onProgress, {
|
|
90
|
+
abort_signal: signal
|
|
91
|
+
});
|
|
92
|
+
const streamer = createTextStreamer(translate.tokenizer, onProgress);
|
|
93
|
+
const result = await translate(input.text, {
|
|
94
|
+
src_lang: input.source_lang,
|
|
95
|
+
tgt_lang: input.target_lang,
|
|
96
|
+
streamer,
|
|
97
|
+
...signal ? { abort_signal: signal } : {}
|
|
98
|
+
});
|
|
99
|
+
let translatedText = "";
|
|
100
|
+
if (Array.isArray(result)) {
|
|
101
|
+
translatedText = result.map((r) => r?.translation_text || "");
|
|
102
|
+
} else {
|
|
103
|
+
translatedText = result?.translation_text || "";
|
|
104
|
+
}
|
|
105
|
+
return {
|
|
106
|
+
text: translatedText,
|
|
107
|
+
target_lang: input.target_lang
|
|
108
|
+
};
|
|
109
|
+
};
|
|
110
|
+
var HFT_TextRewriter = async (input, model, onProgress, signal) => {
|
|
111
|
+
const generateText = await getPipeline(model, onProgress, {
|
|
112
|
+
abort_signal: signal
|
|
113
|
+
});
|
|
114
|
+
const streamer = createTextStreamer(generateText.tokenizer, onProgress);
|
|
115
|
+
const promptedText = (input.prompt ? input.prompt + `
|
|
116
|
+
` : "") + input.text;
|
|
117
|
+
let results = await generateText(promptedText, {
|
|
118
|
+
streamer,
|
|
119
|
+
...signal ? { abort_signal: signal } : {}
|
|
120
|
+
});
|
|
121
|
+
if (!Array.isArray(results)) {
|
|
122
|
+
results = [results];
|
|
123
|
+
}
|
|
124
|
+
let text = results[0]?.generated_text;
|
|
125
|
+
if (Array.isArray(text)) {
|
|
126
|
+
text = text[text.length - 1]?.content;
|
|
127
|
+
}
|
|
128
|
+
if (text === promptedText) {
|
|
129
|
+
throw new PermanentJobError("Rewriter failed to generate new text");
|
|
130
|
+
}
|
|
131
|
+
return {
|
|
132
|
+
text
|
|
133
|
+
};
|
|
134
|
+
};
|
|
135
|
+
var HFT_TextSummary = async (input, model, onProgress, signal) => {
|
|
136
|
+
const generateSummary = await getPipeline(model, onProgress, {
|
|
137
|
+
abort_signal: signal
|
|
138
|
+
});
|
|
139
|
+
const streamer = createTextStreamer(generateSummary.tokenizer, onProgress);
|
|
140
|
+
let result = await generateSummary(input.text, {
|
|
141
|
+
streamer,
|
|
142
|
+
...signal ? { abort_signal: signal } : {}
|
|
143
|
+
});
|
|
144
|
+
let summaryText = "";
|
|
145
|
+
if (Array.isArray(result)) {
|
|
146
|
+
summaryText = result[0]?.summary_text || "";
|
|
147
|
+
} else {
|
|
148
|
+
summaryText = result?.summary_text || "";
|
|
149
|
+
}
|
|
150
|
+
return {
|
|
151
|
+
text: summaryText
|
|
152
|
+
};
|
|
153
|
+
};
|
|
154
|
+
var HFT_TextQuestionAnswer = async (input, model, onProgress, signal) => {
|
|
155
|
+
const generateAnswer = await getPipeline(model, onProgress, {
|
|
156
|
+
abort_signal: signal
|
|
157
|
+
});
|
|
158
|
+
const streamer = createTextStreamer(generateAnswer.tokenizer, onProgress);
|
|
159
|
+
const result = await generateAnswer(input.question, input.context, {
|
|
160
|
+
streamer,
|
|
161
|
+
...signal ? { abort_signal: signal } : {}
|
|
162
|
+
});
|
|
163
|
+
let answerText = "";
|
|
164
|
+
if (Array.isArray(result)) {
|
|
165
|
+
answerText = result[0]?.answer || "";
|
|
166
|
+
} else {
|
|
167
|
+
answerText = result?.answer || "";
|
|
168
|
+
}
|
|
169
|
+
return {
|
|
170
|
+
text: answerText
|
|
171
|
+
};
|
|
172
|
+
};
|
|
173
|
+
function createTextStreamer(tokenizer, updateProgress, signal) {
|
|
174
|
+
let count = 0;
|
|
175
|
+
return new TextStreamer(tokenizer, {
|
|
176
|
+
skip_prompt: true,
|
|
177
|
+
decode_kwargs: { skip_special_tokens: true },
|
|
178
|
+
callback_function: (text) => {
|
|
179
|
+
count++;
|
|
180
|
+
const result = 100 * (1 - Math.exp(-0.05 * count));
|
|
181
|
+
const progress = Math.round(Math.min(result, 100));
|
|
182
|
+
updateProgress(progress, "Generating", { text, progress });
|
|
183
|
+
},
|
|
184
|
+
...signal ? { abort_signal: signal } : {}
|
|
185
|
+
});
|
|
186
|
+
}
|
|
187
|
+
// src/hf-transformers/registry/HFT_Client_RegisterJobFns.ts
|
|
188
|
+
import { getAiProviderRegistry } from "@workglow/ai";
|
|
189
|
+
import { globalServiceRegistry, WORKER_MANAGER } from "@workglow/util";
|
|
190
|
+
async function register_HFT_ClientJobFns(worker) {
|
|
191
|
+
const workerManager = globalServiceRegistry.get(WORKER_MANAGER);
|
|
192
|
+
workerManager.registerWorker(HF_TRANSFORMERS_ONNX, worker);
|
|
193
|
+
const ProviderRegistry = getAiProviderRegistry();
|
|
194
|
+
const names = [
|
|
195
|
+
"DownloadModelTask",
|
|
196
|
+
"TextEmbeddingTask",
|
|
197
|
+
"TextGenerationTask",
|
|
198
|
+
"TextTranslationTask",
|
|
199
|
+
"TextRewriterTask",
|
|
200
|
+
"TextSummaryTask",
|
|
201
|
+
"TextQuestionAnswerTask"
|
|
202
|
+
];
|
|
203
|
+
for (const name of names) {
|
|
204
|
+
ProviderRegistry.registerAsWorkerRunFn(HF_TRANSFORMERS_ONNX, name);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
// src/hf-transformers/registry/HFT_Inline_RegisterJobFns.ts
|
|
208
|
+
import { env } from "@sroussey/transformers";
|
|
209
|
+
import { getAiProviderRegistry as getAiProviderRegistry2 } from "@workglow/ai";
|
|
210
|
+
async function register_HFT_InlineJobFns() {
|
|
211
|
+
env.backends.onnx.wasm.proxy = true;
|
|
212
|
+
const ProviderRegistry = getAiProviderRegistry2();
|
|
213
|
+
const fns = {
|
|
214
|
+
["DownloadModelTask"]: HFT_Download,
|
|
215
|
+
["TextEmbeddingTask"]: HFT_TextEmbedding,
|
|
216
|
+
["TextGenerationTask"]: HFT_TextGeneration,
|
|
217
|
+
["TextQuestionAnswerTask"]: HFT_TextQuestionAnswer,
|
|
218
|
+
["TextRewriterTask"]: HFT_TextRewriter,
|
|
219
|
+
["TextSummaryTask"]: HFT_TextSummary,
|
|
220
|
+
["TextTranslationTask"]: HFT_TextTranslation
|
|
221
|
+
};
|
|
222
|
+
for (const [jobName, fn] of Object.entries(fns)) {
|
|
223
|
+
ProviderRegistry.registerRunFn(HF_TRANSFORMERS_ONNX, jobName, fn);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
// src/hf-transformers/registry/HFT_Worker_RegisterJobFns.ts
|
|
227
|
+
import {
|
|
228
|
+
createServiceToken,
|
|
229
|
+
globalServiceRegistry as globalServiceRegistry2,
|
|
230
|
+
parentPort,
|
|
231
|
+
WORKER_SERVER
|
|
232
|
+
} from "@workglow/util";
|
|
233
|
+
var HFT_WORKER_JOBRUN = createServiceToken("worker.ai-provider.hft");
|
|
234
|
+
var HFT_WORKER_JOBRUN_REGISTER = globalServiceRegistry2.register(HFT_WORKER_JOBRUN, () => {
|
|
235
|
+
const workerServer = globalServiceRegistry2.get(WORKER_SERVER);
|
|
236
|
+
workerServer.registerFunction("DownloadModelTask", HFT_Download);
|
|
237
|
+
workerServer.registerFunction("TextEmbeddingTask", HFT_TextEmbedding);
|
|
238
|
+
workerServer.registerFunction("TextGenerationTask", HFT_TextGeneration);
|
|
239
|
+
workerServer.registerFunction("TextTranslationTask", HFT_TextTranslation);
|
|
240
|
+
workerServer.registerFunction("TextRewriterTask", HFT_TextRewriter);
|
|
241
|
+
workerServer.registerFunction("TextSummaryTask", HFT_TextSummary);
|
|
242
|
+
workerServer.registerFunction("TextQuestionAnswerTask", HFT_TextQuestionAnswer);
|
|
243
|
+
parentPort.postMessage({ type: "ready" });
|
|
244
|
+
console.log("HFT_WORKER_JOBRUN registered");
|
|
245
|
+
return workerServer;
|
|
246
|
+
}, true);
|
|
247
|
+
// src/tf-mediapipe/common/TFMP_Constants.ts
|
|
248
|
+
var TENSORFLOW_MEDIAPIPE = "TENSORFLOW_MEDIAPIPE";
|
|
249
|
+
// src/tf-mediapipe/common/TFMP_JobRunFns.ts
|
|
250
|
+
import { FilesetResolver, TextEmbedder } from "@mediapipe/tasks-text";
|
|
251
|
+
import { PermanentJobError as PermanentJobError2 } from "@workglow/job-queue";
|
|
252
|
+
var TFMP_Download = async (input, model, onProgress, signal) => {
|
|
253
|
+
const textFiles = await FilesetResolver.forTextTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm");
|
|
254
|
+
const embedder = await TextEmbedder.createFromOptions(textFiles, {
|
|
255
|
+
baseOptions: {
|
|
256
|
+
modelAssetPath: model.url
|
|
257
|
+
}
|
|
258
|
+
});
|
|
259
|
+
return {
|
|
260
|
+
model: input.model
|
|
261
|
+
};
|
|
262
|
+
};
|
|
263
|
+
var TFMP_TextEmbedding = async (input, model, onProgress, signal) => {
|
|
264
|
+
const textFiles = await FilesetResolver.forTextTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm");
|
|
265
|
+
onProgress(0.1, "Model loaded");
|
|
266
|
+
const embedder = await TextEmbedder.createFromOptions(textFiles, {
|
|
267
|
+
baseOptions: {
|
|
268
|
+
modelAssetPath: model.url
|
|
269
|
+
}
|
|
270
|
+
});
|
|
271
|
+
if (signal.aborted) {
|
|
272
|
+
throw new PermanentJobError2("Aborted job");
|
|
273
|
+
}
|
|
274
|
+
onProgress(0.2, "Embedding text");
|
|
275
|
+
const result = embedder.embed(input.text);
|
|
276
|
+
if (!result.embeddings?.[0]?.floatEmbedding) {
|
|
277
|
+
throw new PermanentJobError2("Failed to generate embedding: Empty result");
|
|
278
|
+
}
|
|
279
|
+
const embedding = Float32Array.from(result.embeddings[0].floatEmbedding);
|
|
280
|
+
return {
|
|
281
|
+
vector: embedding
|
|
282
|
+
};
|
|
283
|
+
};
|
|
284
|
+
// src/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.ts
|
|
285
|
+
import { getAiProviderRegistry as getAiProviderRegistry3 } from "@workglow/ai";
|
|
286
|
+
import { globalServiceRegistry as globalServiceRegistry3, WORKER_MANAGER as WORKER_MANAGER2 } from "@workglow/util";
|
|
287
|
+
var register_TFMP_ClientJobFns = (worker) => {
|
|
288
|
+
const workerManager = globalServiceRegistry3.get(WORKER_MANAGER2);
|
|
289
|
+
workerManager.registerWorker(TENSORFLOW_MEDIAPIPE, worker);
|
|
290
|
+
const aiProviderRegistry = getAiProviderRegistry3();
|
|
291
|
+
const names = ["DownloadModelTask", "TextEmbeddingTask"];
|
|
292
|
+
for (const name of names) {
|
|
293
|
+
aiProviderRegistry.registerAsWorkerRunFn(TENSORFLOW_MEDIAPIPE, name);
|
|
294
|
+
}
|
|
295
|
+
};
|
|
296
|
+
// src/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.ts
|
|
297
|
+
import { getAiProviderRegistry as getAiProviderRegistry4 } from "@workglow/ai";
|
|
298
|
+
var register_TFMP_InlineJobFns = () => {
|
|
299
|
+
const aiProviderRegistry = getAiProviderRegistry4();
|
|
300
|
+
aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, "DownloadModelTask", TFMP_Download);
|
|
301
|
+
aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, "TextEmbeddingTask", TFMP_TextEmbedding);
|
|
302
|
+
};
|
|
303
|
+
// src/tf-mediapipe/registry/TFMP_Worker_RegisterJobFns.ts
|
|
304
|
+
import {
|
|
305
|
+
createServiceToken as createServiceToken2,
|
|
306
|
+
globalServiceRegistry as globalServiceRegistry4,
|
|
307
|
+
parentPort as parentPort2,
|
|
308
|
+
WORKER_SERVER as WORKER_SERVER2
|
|
309
|
+
} from "@workglow/util";
|
|
310
|
+
var TFMP_WORKER_JOBRUN = createServiceToken2("worker.ai-provider.tfmp");
|
|
311
|
+
var TFMP_WORKER_JOBRUN_REGISTER = globalServiceRegistry4.register(TFMP_WORKER_JOBRUN, () => {
|
|
312
|
+
const workerServer = globalServiceRegistry4.get(WORKER_SERVER2);
|
|
313
|
+
workerServer.registerFunction("DownloadModelTask", TFMP_Download);
|
|
314
|
+
workerServer.registerFunction("TextEmbeddingTask", TFMP_TextEmbedding);
|
|
315
|
+
parentPort2.postMessage({ type: "ready" });
|
|
316
|
+
console.log("TFMP_WORKER_JOBRUN registered");
|
|
317
|
+
return workerServer;
|
|
318
|
+
}, true);
|
|
319
|
+
export {
|
|
320
|
+
register_TFMP_InlineJobFns,
|
|
321
|
+
register_TFMP_ClientJobFns,
|
|
322
|
+
register_HFT_InlineJobFns,
|
|
323
|
+
register_HFT_ClientJobFns,
|
|
324
|
+
TFMP_WORKER_JOBRUN_REGISTER,
|
|
325
|
+
TFMP_WORKER_JOBRUN,
|
|
326
|
+
TFMP_TextEmbedding,
|
|
327
|
+
TFMP_Download,
|
|
328
|
+
TENSORFLOW_MEDIAPIPE,
|
|
329
|
+
QUANTIZATION_DATA_TYPES,
|
|
330
|
+
HF_TRANSFORMERS_ONNX,
|
|
331
|
+
HFT_WORKER_JOBRUN_REGISTER,
|
|
332
|
+
HFT_WORKER_JOBRUN,
|
|
333
|
+
HFT_TextTranslation,
|
|
334
|
+
HFT_TextSummary,
|
|
335
|
+
HFT_TextRewriter,
|
|
336
|
+
HFT_TextQuestionAnswer,
|
|
337
|
+
HFT_TextGeneration,
|
|
338
|
+
HFT_TextEmbedding,
|
|
339
|
+
HFT_Download
|
|
340
|
+
};
|
|
341
|
+
|
|
342
|
+
//# debugId=8CC1F78E1614B52764756E2164756E21
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../src/hf-transformers/common/HFT_Constants.ts", "../src/hf-transformers/common/HFT_JobRunFns.ts", "../src/hf-transformers/registry/HFT_Client_RegisterJobFns.ts", "../src/hf-transformers/registry/HFT_Inline_RegisterJobFns.ts", "../src/hf-transformers/registry/HFT_Worker_RegisterJobFns.ts", "../src/tf-mediapipe/common/TFMP_Constants.ts", "../src/tf-mediapipe/common/TFMP_JobRunFns.ts", "../src/tf-mediapipe/registry/TFMP_Client_RegisterJobFns.ts", "../src/tf-mediapipe/registry/TFMP_Inline_RegisterJobFns.ts", "../src/tf-mediapipe/registry/TFMP_Worker_RegisterJobFns.ts"],
|
|
4
|
+
"sourcesContent": [
|
|
5
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const HF_TRANSFORMERS_ONNX = \"HF_TRANSFORMERS_ONNX\";\n\nexport enum QUANTIZATION_DATA_TYPES {\n auto = \"auto\", // Auto-detect based on environment\n fp32 = \"fp32\",\n fp16 = \"fp16\",\n q8 = \"q8\",\n int8 = \"int8\",\n uint8 = \"uint8\",\n q4 = \"q4\",\n bnb4 = \"bnb4\",\n q4f16 = \"q4f16\", // fp16 model with int4 block weight quantization\n}\n",
|
|
6
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n DocumentQuestionAnsweringSingle,\n type FeatureExtractionPipeline,\n pipeline,\n type PipelineType,\n // @ts-ignore temporary \"fix\"\n type PretrainedModelOptions,\n QuestionAnsweringPipeline,\n SummarizationPipeline,\n SummarizationSingle,\n type TextGenerationPipeline,\n TextGenerationSingle,\n TextStreamer,\n TranslationPipeline,\n TranslationSingle,\n} from \"@sroussey/transformers\";\nimport {\n AiProviderRunFn,\n type DeReplicateFromSchema,\n DownloadModelTaskExecuteInput,\n Model,\n TextEmbeddingInputSchema,\n TextEmbeddingOutputSchema,\n TextGenerationInputSchema,\n TextGenerationOutputSchema,\n TextQuestionAnswerInputSchema,\n TextQuestionAnswerOutputSchema,\n TextRewriterInputSchema,\n TextRewriterOutputSchema,\n TextSummaryInputSchema,\n TextSummaryOutputSchema,\n TextTranslationInputSchema,\n TextTranslationOutputSchema,\n TypedArray,\n} from \"@workglow/ai\";\nimport { PermanentJobError } from \"@workglow/job-queue\";\nimport { CallbackStatus } from \"./HFT_CallbackStatus\";\nimport { QUANTIZATION_DATA_TYPES } from \"./HFT_Constants\";\n\nconst pipelines = new Map<string, any>();\n\n/**\n * Helper function to get a pipeline for a model\n */\nconst getPipeline = async (\n model: Model,\n onProgress: (progress: number, message?: string, details?: any) => void,\n options: PretrainedModelOptions = {}\n) => {\n if (pipelines.has(model.name)) {\n return pipelines.get(model.name);\n }\n\n // Create a callback status object for progress tracking\n const progressCallback = (status: CallbackStatus) => {\n const progress = status.status === \"progress\" ? Math.round(status.progress) : 0;\n if (status.status === \"progress\") {\n onProgress(progress, \"Downloading model\", {\n file: status.file,\n progress: status.progress,\n });\n }\n };\n\n const pipelineOptions: PretrainedModelOptions = {\n dtype: (model.quantization as QUANTIZATION_DATA_TYPES) || \"q8\",\n ...(model.use_external_data_format\n ? { use_external_data_format: model.use_external_data_format }\n : {}),\n ...(model.device ? { device: model.device as any } : {}),\n ...options,\n progress_callback: progressCallback,\n };\n\n const pipelineType = model.pipeline as PipelineType;\n const result = await pipeline(pipelineType, model.url, pipelineOptions);\n pipelines.set(model.name, result);\n return result;\n};\n\n/**\n * Core implementation for downloading and caching a Hugging Face Transformers model.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_Download: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteInput\n> = async (input, model, onProgress, signal) => {\n // Download the model by creating a pipeline\n await getPipeline(model!, onProgress, { abort_signal: signal });\n\n return {\n model: input.model!,\n };\n};\n\n/**\n * Core implementation for text embedding using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\n\ntype TextEmbeddingInput = DeReplicateFromSchema<typeof TextEmbeddingInputSchema>;\ntype TextEmbeddingOutput = DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>;\nexport const HFT_TextEmbedding: AiProviderRunFn<TextEmbeddingInput, TextEmbeddingOutput> = async (\n input,\n model,\n onProgress,\n signal\n) => {\n const generateEmbedding: FeatureExtractionPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n\n // Generate the embedding\n const hfVector = await generateEmbedding(input.text, {\n pooling: \"mean\",\n normalize: model!.normalize,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n // Validate the embedding dimensions\n if (hfVector.size !== model!.nativeDimensions) {\n console.warn(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model!.nativeDimensions}`,\n input,\n hfVector\n );\n throw new PermanentJobError(\n `HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${model!.nativeDimensions}`\n );\n }\n\n return { vector: hfVector.data as TypedArray };\n};\n\n/**\n * Core implementation for text generation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextGeneration: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextGenerationInputSchema>,\n DeReplicateFromSchema<typeof TextGenerationOutputSchema>\n> = async (input, model, onProgress, signal) => {\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n\n const streamer = createTextStreamer(generateText.tokenizer, onProgress, signal);\n\n let results = await generateText(input.prompt, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n let text = (results[0] as TextGenerationSingle)?.generated_text;\n\n if (Array.isArray(text)) {\n text = text[text.length - 1]?.content;\n }\n return {\n text,\n };\n};\n\n/**\n * Core implementation for text translation using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextTranslation: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextTranslationInputSchema>,\n DeReplicateFromSchema<typeof TextTranslationOutputSchema>\n> = async (input, model, onProgress, signal) => {\n const translate: TranslationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(translate.tokenizer, onProgress);\n\n const result = await translate(input.text, {\n src_lang: input.source_lang,\n tgt_lang: input.target_lang,\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let translatedText: string | string[] = \"\";\n if (Array.isArray(result)) {\n translatedText = result.map((r) => (r as TranslationSingle)?.translation_text || \"\");\n } else {\n translatedText = (result as TranslationSingle)?.translation_text || \"\";\n }\n\n return {\n text: translatedText,\n target_lang: input.target_lang,\n };\n};\n\n/**\n * Core implementation for text rewriting using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextRewriter: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextRewriterInputSchema>,\n DeReplicateFromSchema<typeof TextRewriterOutputSchema>\n> = async (input, model, onProgress, signal) => {\n const generateText: TextGenerationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateText.tokenizer, onProgress);\n\n // This lib doesn't support this kind of rewriting with a separate prompt vs text\n const promptedText = (input.prompt ? input.prompt + \"\\n\" : \"\") + input.text;\n\n let results = await generateText(promptedText, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n });\n\n if (!Array.isArray(results)) {\n results = [results];\n }\n\n let text = (results[0] as TextGenerationSingle)?.generated_text;\n if (Array.isArray(text)) {\n text = text[text.length - 1]?.content;\n }\n\n if (text === promptedText) {\n throw new PermanentJobError(\"Rewriter failed to generate new text\");\n }\n\n return {\n text,\n };\n};\n\n/**\n * Core implementation for text summarization using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextSummary: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextSummaryInputSchema>,\n DeReplicateFromSchema<typeof TextSummaryOutputSchema>\n> = async (input, model, onProgress, signal) => {\n const generateSummary: SummarizationPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateSummary.tokenizer, onProgress);\n\n let result = await generateSummary(input.text, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let summaryText = \"\";\n if (Array.isArray(result)) {\n summaryText = (result[0] as SummarizationSingle)?.summary_text || \"\";\n } else {\n summaryText = (result as SummarizationSingle)?.summary_text || \"\";\n }\n\n return {\n text: summaryText,\n };\n};\n\n/**\n * Core implementation for question answering using Hugging Face Transformers.\n * This is shared between inline and worker implementations.\n */\nexport const HFT_TextQuestionAnswer: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextQuestionAnswerInputSchema>,\n DeReplicateFromSchema<typeof TextQuestionAnswerOutputSchema>\n> = async (input, model, onProgress, signal) => {\n // Get the question answering pipeline\n const generateAnswer: QuestionAnsweringPipeline = await getPipeline(model!, onProgress, {\n abort_signal: signal,\n });\n const streamer = createTextStreamer(generateAnswer.tokenizer, onProgress);\n\n const result = await generateAnswer(input.question, input.context, {\n streamer,\n ...(signal ? { abort_signal: signal } : {}),\n } as any);\n\n let answerText = \"\";\n if (Array.isArray(result)) {\n answerText = (result[0] as DocumentQuestionAnsweringSingle)?.answer || \"\";\n } else {\n answerText = (result as DocumentQuestionAnsweringSingle)?.answer || \"\";\n }\n\n return {\n text: answerText,\n };\n};\n\n/**\n * Create a text streamer for a given tokenizer and update progress function\n * @param tokenizer - The tokenizer to use for the streamer\n * @param updateProgress - The function to call to update the progress\n * @param signal - The signal to use for the streamer for aborting\n * @returns The text streamer\n */\nfunction createTextStreamer(\n tokenizer: any,\n updateProgress: (progress: number, message?: string, details?: any) => void,\n signal?: AbortSignal\n) {\n let count = 0;\n return new TextStreamer(tokenizer, {\n skip_prompt: true,\n decode_kwargs: { skip_special_tokens: true },\n callback_function: (text: string) => {\n count++;\n const result = 100 * (1 - Math.exp(-0.05 * count));\n const progress = Math.round(Math.min(result, 100));\n updateProgress(progress, \"Generating\", { text, progress });\n },\n ...(signal ? { abort_signal: signal } : {}),\n });\n}\n",
|
|
7
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { getAiProviderRegistry } from \"@workglow/ai\";\nimport { globalServiceRegistry, WORKER_MANAGER } from \"@workglow/util\";\nimport { HF_TRANSFORMERS_ONNX } from \"../common/HFT_Constants\";\n\nexport async function register_HFT_ClientJobFns(worker: Worker) {\n const workerManager = globalServiceRegistry.get(WORKER_MANAGER);\n\n workerManager.registerWorker(HF_TRANSFORMERS_ONNX, worker);\n\n const ProviderRegistry = getAiProviderRegistry();\n const names = [\n \"DownloadModelTask\",\n \"TextEmbeddingTask\",\n \"TextGenerationTask\",\n \"TextTranslationTask\",\n \"TextRewriterTask\",\n \"TextSummaryTask\",\n \"TextQuestionAnswerTask\",\n ];\n for (const name of names) {\n ProviderRegistry.registerAsWorkerRunFn(HF_TRANSFORMERS_ONNX, name);\n }\n}\n",
|
|
8
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { env } from \"@sroussey/transformers\";\nimport { getAiProviderRegistry } from \"@workglow/ai\";\nimport { HF_TRANSFORMERS_ONNX } from \"../common/HFT_Constants\";\nimport {\n HFT_Download,\n HFT_TextEmbedding,\n HFT_TextGeneration,\n HFT_TextQuestionAnswer,\n HFT_TextRewriter,\n HFT_TextSummary,\n HFT_TextTranslation,\n} from \"../common/HFT_JobRunFns\";\n\nexport async function register_HFT_InlineJobFns() {\n // @ts-ignore\n env.backends.onnx.wasm.proxy = true;\n const ProviderRegistry = getAiProviderRegistry();\n const fns = {\n [\"DownloadModelTask\"]: HFT_Download,\n [\"TextEmbeddingTask\"]: HFT_TextEmbedding,\n [\"TextGenerationTask\"]: HFT_TextGeneration,\n [\"TextQuestionAnswerTask\"]: HFT_TextQuestionAnswer,\n [\"TextRewriterTask\"]: HFT_TextRewriter,\n [\"TextSummaryTask\"]: HFT_TextSummary,\n [\"TextTranslationTask\"]: HFT_TextTranslation,\n };\n for (const [jobName, fn] of Object.entries(fns)) {\n ProviderRegistry.registerRunFn<any, any>(HF_TRANSFORMERS_ONNX, jobName, fn);\n }\n}\n",
|
|
9
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n createServiceToken,\n globalServiceRegistry,\n parentPort,\n WORKER_SERVER,\n} from \"@workglow/util\";\nimport {\n HFT_Download,\n HFT_TextEmbedding,\n HFT_TextGeneration,\n HFT_TextQuestionAnswer,\n HFT_TextRewriter,\n HFT_TextSummary,\n HFT_TextTranslation,\n} from \"../common/HFT_JobRunFns\";\n\nexport const HFT_WORKER_JOBRUN = createServiceToken(\"worker.ai-provider.hft\");\n\nexport const HFT_WORKER_JOBRUN_REGISTER = globalServiceRegistry.register(\n HFT_WORKER_JOBRUN,\n () => {\n const workerServer = globalServiceRegistry.get(WORKER_SERVER);\n workerServer.registerFunction(\"DownloadModelTask\", HFT_Download);\n workerServer.registerFunction(\"TextEmbeddingTask\", HFT_TextEmbedding);\n workerServer.registerFunction(\"TextGenerationTask\", HFT_TextGeneration);\n workerServer.registerFunction(\"TextTranslationTask\", HFT_TextTranslation);\n workerServer.registerFunction(\"TextRewriterTask\", HFT_TextRewriter);\n workerServer.registerFunction(\"TextSummaryTask\", HFT_TextSummary);\n workerServer.registerFunction(\"TextQuestionAnswerTask\", HFT_TextQuestionAnswer);\n parentPort.postMessage({ type: \"ready\" });\n console.log(\"HFT_WORKER_JOBRUN registered\");\n return workerServer;\n },\n true\n);\n",
|
|
10
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const TENSORFLOW_MEDIAPIPE = \"TENSORFLOW_MEDIAPIPE\";\n",
|
|
11
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { FilesetResolver, TextEmbedder } from \"@mediapipe/tasks-text\";\nimport type {\n AiProviderRunFn,\n DeReplicateFromSchema,\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteOutput,\n TextEmbeddingInputSchema,\n TextEmbeddingOutputSchema,\n} from \"@workglow/ai\";\nimport { PermanentJobError } from \"@workglow/job-queue\";\n\n/**\n * Core implementation for downloading and caching a MediaPipe TFJS model.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_Download: AiProviderRunFn<\n DownloadModelTaskExecuteInput,\n DownloadModelTaskExecuteOutput\n> = async (input, model, onProgress, signal) => {\n const textFiles = await FilesetResolver.forTextTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm\"\n );\n\n // Create an embedder to get dimensions\n const embedder = await TextEmbedder.createFromOptions(textFiles, {\n baseOptions: {\n modelAssetPath: model!.url,\n },\n });\n\n return {\n model: input.model,\n };\n};\n\n/**\n * Core implementation for text embedding using MediaPipe TFJS.\n * This is shared between inline and worker implementations.\n */\nexport const TFMP_TextEmbedding: AiProviderRunFn<\n DeReplicateFromSchema<typeof TextEmbeddingInputSchema>,\n DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>\n> = async (input, model, onProgress, signal) => {\n const textFiles = await FilesetResolver.forTextTasks(\n \"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-text@latest/wasm\"\n );\n\n onProgress(0.1, \"Model loaded\");\n\n const embedder = await TextEmbedder.createFromOptions(textFiles, {\n baseOptions: {\n modelAssetPath: model!.url,\n },\n });\n\n if (signal.aborted) {\n throw new PermanentJobError(\"Aborted job\");\n }\n\n onProgress(0.2, \"Embedding text\");\n\n const result = embedder.embed(input.text);\n\n if (!result.embeddings?.[0]?.floatEmbedding) {\n throw new PermanentJobError(\"Failed to generate embedding: Empty result\");\n }\n\n const embedding = Float32Array.from(result.embeddings[0].floatEmbedding);\n\n return {\n vector: embedding,\n };\n};\n",
|
|
12
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { getAiProviderRegistry } from \"@workglow/ai\";\nimport { globalServiceRegistry, WORKER_MANAGER } from \"@workglow/util\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\n\nexport const register_TFMP_ClientJobFns = (worker: Worker) => {\n const workerManager = globalServiceRegistry.get(WORKER_MANAGER);\n workerManager.registerWorker(TENSORFLOW_MEDIAPIPE, worker);\n\n const aiProviderRegistry = getAiProviderRegistry();\n const names = [\"DownloadModelTask\", \"TextEmbeddingTask\"];\n for (const name of names) {\n aiProviderRegistry.registerAsWorkerRunFn(TENSORFLOW_MEDIAPIPE, name);\n }\n};\n",
|
|
13
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { getAiProviderRegistry } from \"@workglow/ai\";\nimport { TENSORFLOW_MEDIAPIPE } from \"../common/TFMP_Constants\";\nimport { TFMP_Download, TFMP_TextEmbedding } from \"../common/TFMP_JobRunFns\";\n\nexport const register_TFMP_InlineJobFns = () => {\n const aiProviderRegistry = getAiProviderRegistry();\n\n aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, \"DownloadModelTask\", TFMP_Download);\n aiProviderRegistry.registerRunFn(TENSORFLOW_MEDIAPIPE, \"TextEmbeddingTask\", TFMP_TextEmbedding);\n};\n",
|
|
14
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n createServiceToken,\n globalServiceRegistry,\n parentPort,\n WORKER_SERVER,\n} from \"@workglow/util\";\nimport { TFMP_Download, TFMP_TextEmbedding } from \"../common/TFMP_JobRunFns\";\n\n// Register the worker functions\nexport const TFMP_WORKER_JOBRUN = createServiceToken(\"worker.ai-provider.tfmp\");\n\nexport const TFMP_WORKER_JOBRUN_REGISTER = globalServiceRegistry.register(\n TFMP_WORKER_JOBRUN,\n () => {\n const workerServer = globalServiceRegistry.get(WORKER_SERVER);\n workerServer.registerFunction(\"DownloadModelTask\", TFMP_Download);\n workerServer.registerFunction(\"TextEmbeddingTask\", TFMP_TextEmbedding);\n parentPort.postMessage({ type: \"ready\" });\n console.log(\"TFMP_WORKER_JOBRUN registered\");\n return workerServer;\n },\n true\n);\n"
|
|
15
|
+
],
|
|
16
|
+
"mappings": ";AAMO,IAAM,uBAAuB;AAE7B,IAAK;AAAA,CAAL,CAAK,6BAAL;AAAA,EACL,mCAAO;AAAA,EACP,mCAAO;AAAA,EACP,mCAAO;AAAA,EACP,iCAAK;AAAA,EACL,mCAAO;AAAA,EACP,oCAAQ;AAAA,EACR,iCAAK;AAAA,EACL,mCAAO;AAAA,EACP,oCAAQ;AAAA,GATE;;ACFZ;AAAA;AAAA;AAAA;AAmCA;AAIA,IAAM,YAAY,IAAI;AAKtB,IAAM,cAAc,OAClB,OACA,YACA,UAAkC,CAAC,MAChC;AAAA,EACH,IAAI,UAAU,IAAI,MAAM,IAAI,GAAG;AAAA,IAC7B,OAAO,UAAU,IAAI,MAAM,IAAI;AAAA,EACjC;AAAA,EAGA,MAAM,mBAAmB,CAAC,WAA2B;AAAA,IACnD,MAAM,WAAW,OAAO,WAAW,aAAa,KAAK,MAAM,OAAO,QAAQ,IAAI;AAAA,IAC9E,IAAI,OAAO,WAAW,YAAY;AAAA,MAChC,WAAW,UAAU,qBAAqB;AAAA,QACxC,MAAM,OAAO;AAAA,QACb,UAAU,OAAO;AAAA,MACnB,CAAC;AAAA,IACH;AAAA;AAAA,EAGF,MAAM,kBAA0C;AAAA,IAC9C,OAAQ,MAAM,gBAA4C;AAAA,OACtD,MAAM,2BACN,EAAE,0BAA0B,MAAM,yBAAyB,IAC3D,CAAC;AAAA,OACD,MAAM,SAAS,EAAE,QAAQ,MAAM,OAAc,IAAI,CAAC;AAAA,OACnD;AAAA,IACH,mBAAmB;AAAA,EACrB;AAAA,EAEA,MAAM,eAAe,MAAM;AAAA,EAC3B,MAAM,SAAS,MAAM,SAAS,cAAc,MAAM,KAAK,eAAe;AAAA,EACtE,UAAU,IAAI,MAAM,MAAM,MAAM;AAAA,EAChC,OAAO;AAAA;AAOF,IAAM,eAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAE9C,MAAM,YAAY,OAAQ,YAAY,EAAE,cAAc,OAAO,CAAC;AAAA,EAE9D,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;AAUK,IAAM,oBAA8E,OACzF,OACA,OACA,YACA,WACG;AAAA,EACH,MAAM,oBAA+C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACzF,cAAc;AAAA,EAChB,CAAC;AAAA,EAGD,MAAM,WAAW,MAAM,kBAAkB,MAAM,MAAM;AAAA,IACnD,SAAS;AAAA,IACT,WAAW,MAAO;AAAA,OACd,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAGD,IAAI,SAAS,SAAS,MAAO,kBAAkB;AAAA,IAC7C,QAAQ,KACN,wEAAwE,SAAS,YAAY,MAAO,oBACpG,OACA,QACF;AAAA,IACA,MAAM,IAAI,kBACR,wEAAwE,SAAS,YAAY,MAAO,kBACtG;AAAA,EACF;AAAA,EAEA,OAAO,EAAE,QAAQ,SAAS,KAAmB;AAAA;AAOxC,IAAM,qBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACjF,cAAc;AAAA,EAChB,CAAC;AAAA,EAED,MAAM,WAAW,mBAAmB,aAAa,WAAW,YAAY,MAAM;AAAA,EAE9E,IAAI,UAAU,MAAM,aAAa,MAAM,QAAQ;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EACA,IAAI,OAAQ,QAAQ,IAA6B;AAAA,EAEjD,IAAI,MAAM,QAAQ,IAAI,GAAG;AAAA,IACvB,OAAO,KAAK,KAAK,SAAS,IAAI;AAAA,EAChC;AAAA,EACA,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAOK,IAAM,sBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAiC,MAAM,YAAY,OAAQ,YAAY;AAAA,IAC3E,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,UAAU,WAAW,UAAU;AAAA,EAEnE,MAAM,SAAS,MAAM,UAAU,MAAM,MAAM;AAAA,IACzC,UAAU,MAAM;AAAA,IAChB,UAAU,MAAM;AAAA,IAChB;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,iBAAoC;AAAA,EACxC,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,iBAAiB,OAAO,IAAI,CAAC,MAAO,GAAyB,oBAAoB,EAAE;AAAA,EACrF,EAAO;AAAA,IACL,iBAAkB,QAA8B,oBAAoB;AAAA;AAAA,EAGtE,OAAO;AAAA,IACL,MAAM;AAAA,IACN,aAAa,MAAM;AAAA,EACrB;AAAA;AAOK,IAAM,mBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,eAAuC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACjF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,aAAa,WAAW,UAAU;AAAA,EAGtE,MAAM,gBAAgB,MAAM,SAAS,MAAM,SAAS;AAAA,IAAO,MAAM,MAAM;AAAA,EAEvE,IAAI,UAAU,MAAM,aAAa,cAAc;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA,EAED,IAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAAA,IAC3B,UAAU,CAAC,OAAO;AAAA,EACpB;AAAA,EAEA,IAAI,OAAQ,QAAQ,IAA6B;AAAA,EACjD,IAAI,MAAM,QAAQ,IAAI,GAAG;AAAA,IACvB,OAAO,KAAK,KAAK,SAAS,IAAI;AAAA,EAChC;AAAA,EAEA,IAAI,SAAS,cAAc;AAAA,IACzB,MAAM,IAAI,kBAAkB,sCAAsC;AAAA,EACpE;AAAA,EAEA,OAAO;AAAA,IACL;AAAA,EACF;AAAA;AAOK,IAAM,kBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,kBAAyC,MAAM,YAAY,OAAQ,YAAY;AAAA,IACnF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,gBAAgB,WAAW,UAAU;AAAA,EAEzE,IAAI,SAAS,MAAM,gBAAgB,MAAM,MAAM;AAAA,IAC7C;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,cAAc;AAAA,EAClB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,cAAe,OAAO,IAA4B,gBAAgB;AAAA,EACpE,EAAO;AAAA,IACL,cAAe,QAAgC,gBAAgB;AAAA;AAAA,EAGjE,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAOK,IAAM,yBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAE9C,MAAM,iBAA4C,MAAM,YAAY,OAAQ,YAAY;AAAA,IACtF,cAAc;AAAA,EAChB,CAAC;AAAA,EACD,MAAM,WAAW,mBAAmB,eAAe,WAAW,UAAU;AAAA,EAExE,MAAM,SAAS,MAAM,eAAe,MAAM,UAAU,MAAM,SAAS;AAAA,IACjE;AAAA,OACI,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAQ;AAAA,EAER,IAAI,aAAa;AAAA,EACjB,IAAI,MAAM,QAAQ,MAAM,GAAG;AAAA,IACzB,aAAc,OAAO,IAAwC,UAAU;AAAA,EACzE,EAAO;AAAA,IACL,aAAc,QAA4C,UAAU;AAAA;AAAA,EAGtE,OAAO;AAAA,IACL,MAAM;AAAA,EACR;AAAA;AAUF,SAAS,kBAAkB,CACzB,WACA,gBACA,QACA;AAAA,EACA,IAAI,QAAQ;AAAA,EACZ,OAAO,IAAI,aAAa,WAAW;AAAA,IACjC,aAAa;AAAA,IACb,eAAe,EAAE,qBAAqB,KAAK;AAAA,IAC3C,mBAAmB,CAAC,SAAiB;AAAA,MACnC;AAAA,MACA,MAAM,SAAS,OAAO,IAAI,KAAK,IAAI,QAAQ,KAAK;AAAA,MAChD,MAAM,WAAW,KAAK,MAAM,KAAK,IAAI,QAAQ,GAAG,CAAC;AAAA,MACjD,eAAe,UAAU,cAAc,EAAE,MAAM,SAAS,CAAC;AAAA;AAAA,OAEvD,SAAS,EAAE,cAAc,OAAO,IAAI,CAAC;AAAA,EAC3C,CAAC;AAAA;;ACnUH;AACA;AAGA,eAAsB,yBAAyB,CAAC,QAAgB;AAAA,EAC9D,MAAM,gBAAgB,sBAAsB,IAAI,cAAc;AAAA,EAE9D,cAAc,eAAe,sBAAsB,MAAM;AAAA,EAEzD,MAAM,mBAAmB,sBAAsB;AAAA,EAC/C,MAAM,QAAQ;AAAA,IACZ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA,WAAW,QAAQ,OAAO;AAAA,IACxB,iBAAiB,sBAAsB,sBAAsB,IAAI;AAAA,EACnE;AAAA;;ACrBF;AACA,kCAAS;AAYT,eAAsB,yBAAyB,GAAG;AAAA,EAEhD,IAAI,SAAS,KAAK,KAAK,QAAQ;AAAA,EAC/B,MAAM,mBAAmB,uBAAsB;AAAA,EAC/C,MAAM,MAAM;AAAA,KACT,sBAAsB;AAAA,KACtB,sBAAsB;AAAA,KACtB,uBAAuB;AAAA,KACvB,2BAA2B;AAAA,KAC3B,qBAAqB;AAAA,KACrB,oBAAoB;AAAA,KACpB,wBAAwB;AAAA,EAC3B;AAAA,EACA,YAAY,SAAS,OAAO,OAAO,QAAQ,GAAG,GAAG;AAAA,IAC/C,iBAAiB,cAAwB,sBAAsB,SAAS,EAAE;AAAA,EAC5E;AAAA;;AC5BF;AAAA;AAAA,2BAEE;AAAA;AAAA;AAAA;AAcK,IAAM,oBAAoB,mBAAmB,wBAAwB;AAErE,IAAM,6BAA6B,uBAAsB,SAC9D,mBACA,MAAM;AAAA,EACJ,MAAM,eAAe,uBAAsB,IAAI,aAAa;AAAA,EAC5D,aAAa,iBAAiB,qBAAqB,YAAY;AAAA,EAC/D,aAAa,iBAAiB,qBAAqB,iBAAiB;AAAA,EACpE,aAAa,iBAAiB,sBAAsB,kBAAkB;AAAA,EACtE,aAAa,iBAAiB,uBAAuB,mBAAmB;AAAA,EACxE,aAAa,iBAAiB,oBAAoB,gBAAgB;AAAA,EAClE,aAAa,iBAAiB,mBAAmB,eAAe;AAAA,EAChE,aAAa,iBAAiB,0BAA0B,sBAAsB;AAAA,EAC9E,WAAW,YAAY,EAAE,MAAM,QAAQ,CAAC;AAAA,EACxC,QAAQ,IAAI,8BAA8B;AAAA,EAC1C,OAAO;AAAA,GAET,IACF;;AClCO,IAAM,uBAAuB;;ACApC;AASA,8BAAS;AAMF,IAAM,gBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAY,MAAM,gBAAgB,aACtC,gEACF;AAAA,EAGA,MAAM,WAAW,MAAM,aAAa,kBAAkB,WAAW;AAAA,IAC/D,aAAa;AAAA,MACX,gBAAgB,MAAO;AAAA,IACzB;AAAA,EACF,CAAC;AAAA,EAED,OAAO;AAAA,IACL,OAAO,MAAM;AAAA,EACf;AAAA;AAOK,IAAM,qBAGT,OAAO,OAAO,OAAO,YAAY,WAAW;AAAA,EAC9C,MAAM,YAAY,MAAM,gBAAgB,aACtC,gEACF;AAAA,EAEA,WAAW,KAAK,cAAc;AAAA,EAE9B,MAAM,WAAW,MAAM,aAAa,kBAAkB,WAAW;AAAA,IAC/D,aAAa;AAAA,MACX,gBAAgB,MAAO;AAAA,IACzB;AAAA,EACF,CAAC;AAAA,EAED,IAAI,OAAO,SAAS;AAAA,IAClB,MAAM,IAAI,mBAAkB,aAAa;AAAA,EAC3C;AAAA,EAEA,WAAW,KAAK,gBAAgB;AAAA,EAEhC,MAAM,SAAS,SAAS,MAAM,MAAM,IAAI;AAAA,EAExC,IAAI,CAAC,OAAO,aAAa,IAAI,gBAAgB;AAAA,IAC3C,MAAM,IAAI,mBAAkB,4CAA4C;AAAA,EAC1E;AAAA,EAEA,MAAM,YAAY,aAAa,KAAK,OAAO,WAAW,GAAG,cAAc;AAAA,EAEvE,OAAO;AAAA,IACL,QAAQ;AAAA,EACV;AAAA;;ACvEF,kCAAS;AACT,kCAAS,0CAAuB;AAGzB,IAAM,6BAA6B,CAAC,WAAmB;AAAA,EAC5D,MAAM,gBAAgB,uBAAsB,IAAI,eAAc;AAAA,EAC9D,cAAc,eAAe,sBAAsB,MAAM;AAAA,EAEzD,MAAM,qBAAqB,uBAAsB;AAAA,EACjD,MAAM,QAAQ,CAAC,qBAAqB,mBAAmB;AAAA,EACvD,WAAW,QAAQ,OAAO;AAAA,IACxB,mBAAmB,sBAAsB,sBAAsB,IAAI;AAAA,EACrE;AAAA;;ACZF,kCAAS;AAIF,IAAM,6BAA6B,MAAM;AAAA,EAC9C,MAAM,qBAAqB,uBAAsB;AAAA,EAEjD,mBAAmB,cAAc,sBAAsB,qBAAqB,aAAa;AAAA,EACzF,mBAAmB,cAAc,sBAAsB,qBAAqB,kBAAkB;AAAA;;ACRhG;AAAA,wBACE;AAAA,2BACA;AAAA,gBACA;AAAA,mBACA;AAAA;AAKK,IAAM,qBAAqB,oBAAmB,yBAAyB;AAEvE,IAAM,8BAA8B,uBAAsB,SAC/D,oBACA,MAAM;AAAA,EACJ,MAAM,eAAe,uBAAsB,IAAI,cAAa;AAAA,EAC5D,aAAa,iBAAiB,qBAAqB,aAAa;AAAA,EAChE,aAAa,iBAAiB,qBAAqB,kBAAkB;AAAA,EACrE,YAAW,YAAY,EAAE,MAAM,QAAQ,CAAC;AAAA,EACxC,QAAQ,IAAI,+BAA+B;AAAA,EAC3C,OAAO;AAAA,GAET,IACF;",
|
|
17
|
+
"debugId": "8CC1F78E1614B52764756E2164756E21",
|
|
18
|
+
"names": []
|
|
19
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"OpenAI_TaskRun.d.ts","sourceRoot":"","sources":["../../../src/openai/provider/OpenAI_TaskRun.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"TFMP_Constants.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/common/TFMP_Constants.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,eAAO,MAAM,oBAAoB,yBAAyB,CAAC"}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Steven Roussey <sroussey@gmail.com>
|
|
4
|
+
* SPDX-License-Identifier: Apache-2.0
|
|
5
|
+
*/
|
|
6
|
+
import type { AiProviderRunFn, DeReplicateFromSchema, DownloadModelTaskExecuteInput, DownloadModelTaskExecuteOutput, TextEmbeddingInputSchema, TextEmbeddingOutputSchema } from "@workglow/ai";
|
|
7
|
+
/**
|
|
8
|
+
* Core implementation for downloading and caching a MediaPipe TFJS model.
|
|
9
|
+
* This is shared between inline and worker implementations.
|
|
10
|
+
*/
|
|
11
|
+
export declare const TFMP_Download: AiProviderRunFn<DownloadModelTaskExecuteInput, DownloadModelTaskExecuteOutput>;
|
|
12
|
+
/**
|
|
13
|
+
* Core implementation for text embedding using MediaPipe TFJS.
|
|
14
|
+
* This is shared between inline and worker implementations.
|
|
15
|
+
*/
|
|
16
|
+
export declare const TFMP_TextEmbedding: AiProviderRunFn<DeReplicateFromSchema<typeof TextEmbeddingInputSchema>, DeReplicateFromSchema<typeof TextEmbeddingOutputSchema>>;
|
|
17
|
+
//# sourceMappingURL=TFMP_JobRunFns.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"TFMP_JobRunFns.d.ts","sourceRoot":"","sources":["../../../src/tf-mediapipe/common/TFMP_JobRunFns.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAGH,OAAO,KAAK,EACR,eAAe,EACf,qBAAqB,EACrB,6BAA6B,EAC7B,8BAA8B,EAC9B,wBAAwB,EACxB,yBAAyB,EAC5B,MAAM,cAAc,CAAC;AAGtB;;;GAGG;AACH,eAAO,MAAM,aAAa,EAAE,eAAe,CACzC,6BAA6B,EAC7B,8BAA8B,CAgB/B,CAAC;AAEF;;;GAGG;AACH,eAAO,MAAM,kBAAkB,EAAE,eAAe,CAC9C,qBAAqB,CAAC,OAAO,wBAAwB,CAAC,EACtD,qBAAqB,CAAC,OAAO,yBAAyB,CAAC,CA+BxD,CAAC"}
|