@workglow/ai-provider 0.0.121 → 0.0.123
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +30 -67
- package/dist/common/HfModelSearch.d.ts +32 -0
- package/dist/common/HfModelSearch.d.ts.map +1 -0
- package/dist/common/PipelineTaskMapping.d.ts +12 -0
- package/dist/common/PipelineTaskMapping.d.ts.map +1 -0
- package/dist/{anthropic → provider-anthropic}/AnthropicProvider.d.ts +2 -14
- package/dist/provider-anthropic/AnthropicProvider.d.ts.map +1 -0
- package/dist/provider-anthropic/AnthropicQueuedProvider.d.ts +16 -0
- package/dist/provider-anthropic/AnthropicQueuedProvider.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_Client.d.ts +13 -0
- package/dist/provider-anthropic/common/Anthropic_Client.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_Constants.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_CountTokens.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_CountTokens.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_JobRunFns.d.ts +12 -0
- package/dist/provider-anthropic/common/Anthropic_JobRunFns.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_ModelInfo.d.ts +9 -0
- package/dist/provider-anthropic/common/Anthropic_ModelInfo.d.ts.map +1 -0
- package/dist/{anthropic → provider-anthropic}/common/Anthropic_ModelSchema.d.ts +31 -31
- package/dist/provider-anthropic/common/Anthropic_ModelSchema.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_ModelSearch.d.ts +8 -0
- package/dist/provider-anthropic/common/Anthropic_ModelSearch.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_StructuredGeneration.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_StructuredGeneration.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_TextGeneration.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_TextGeneration.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_TextRewriter.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_TextRewriter.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_TextSummary.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_TextSummary.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_ToolCalling.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_ToolCalling.d.ts.map +1 -0
- package/dist/{anthropic → provider-anthropic}/index.d.ts +1 -3
- package/dist/provider-anthropic/index.d.ts.map +1 -0
- package/dist/{index-60ev6k93.js → provider-anthropic/index.js} +43 -11
- package/dist/provider-anthropic/index.js.map +13 -0
- package/dist/provider-anthropic/registerAnthropic.d.ts +10 -0
- package/dist/provider-anthropic/registerAnthropic.d.ts.map +1 -0
- package/dist/provider-anthropic/registerAnthropicInline.d.ts +8 -0
- package/dist/provider-anthropic/registerAnthropicInline.d.ts.map +1 -0
- package/dist/provider-anthropic/registerAnthropicWorker.d.ts +7 -0
- package/dist/provider-anthropic/registerAnthropicWorker.d.ts.map +1 -0
- package/dist/provider-anthropic/runtime.d.ts +16 -0
- package/dist/provider-anthropic/runtime.d.ts.map +1 -0
- package/dist/{anthropic/index.js → provider-anthropic/runtime.js} +291 -177
- package/dist/provider-anthropic/runtime.js.map +24 -0
- package/dist/{web-browser → provider-chrome}/WebBrowserProvider.d.ts +2 -15
- package/dist/provider-chrome/WebBrowserProvider.d.ts.map +1 -0
- package/dist/provider-chrome/WebBrowserQueuedProvider.d.ts +16 -0
- package/dist/provider-chrome/WebBrowserQueuedProvider.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_ChromeHelpers.d.ts +31 -0
- package/dist/provider-chrome/common/WebBrowser_ChromeHelpers.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_Constants.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_JobRunFns.d.ts +10 -0
- package/dist/provider-chrome/common/WebBrowser_JobRunFns.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_ModelInfo.d.ts +9 -0
- package/dist/provider-chrome/common/WebBrowser_ModelInfo.d.ts.map +1 -0
- package/dist/{web-browser → provider-chrome}/common/WebBrowser_ModelSchema.d.ts +31 -31
- package/dist/provider-chrome/common/WebBrowser_ModelSchema.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_ModelSearch.d.ts +8 -0
- package/dist/provider-chrome/common/WebBrowser_ModelSearch.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_TextGeneration.d.ts +10 -0
- package/dist/provider-chrome/common/WebBrowser_TextGeneration.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_TextLanguageDetection.d.ts +9 -0
- package/dist/provider-chrome/common/WebBrowser_TextLanguageDetection.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_TextRewriter.d.ts +10 -0
- package/dist/provider-chrome/common/WebBrowser_TextRewriter.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_TextSummary.d.ts +10 -0
- package/dist/provider-chrome/common/WebBrowser_TextSummary.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_TextTranslation.d.ts +10 -0
- package/dist/provider-chrome/common/WebBrowser_TextTranslation.d.ts.map +1 -0
- package/dist/{web-browser → provider-chrome}/index.d.ts +1 -3
- package/dist/provider-chrome/index.d.ts.map +1 -0
- package/dist/provider-chrome/index.js +132 -0
- package/dist/provider-chrome/index.js.map +13 -0
- package/dist/provider-chrome/registerWebBrowser.d.ts +10 -0
- package/dist/provider-chrome/registerWebBrowser.d.ts.map +1 -0
- package/dist/provider-chrome/registerWebBrowserInline.d.ts +8 -0
- package/dist/provider-chrome/registerWebBrowserInline.d.ts.map +1 -0
- package/dist/provider-chrome/registerWebBrowserWorker.d.ts +7 -0
- package/dist/provider-chrome/registerWebBrowserWorker.d.ts.map +1 -0
- package/dist/provider-chrome/runtime.d.ts +14 -0
- package/dist/provider-chrome/runtime.d.ts.map +1 -0
- package/dist/{web-browser/index.js → provider-chrome/runtime.js} +260 -235
- package/dist/provider-chrome/runtime.js.map +23 -0
- package/dist/{google-gemini → provider-gemini}/GoogleGeminiProvider.d.ts +2 -15
- package/dist/provider-gemini/GoogleGeminiProvider.d.ts.map +1 -0
- package/dist/provider-gemini/GoogleGeminiQueuedProvider.d.ts +16 -0
- package/dist/provider-gemini/GoogleGeminiQueuedProvider.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_Client.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_Client.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_Constants.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_CountTokens.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_CountTokens.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_JobRunFns.d.ts +13 -0
- package/dist/provider-gemini/common/Gemini_JobRunFns.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_ModelInfo.d.ts +9 -0
- package/dist/provider-gemini/common/Gemini_ModelInfo.d.ts.map +1 -0
- package/dist/{google-gemini → provider-gemini}/common/Gemini_ModelSchema.d.ts +31 -31
- package/dist/provider-gemini/common/Gemini_ModelSchema.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_ModelSearch.d.ts +8 -0
- package/dist/provider-gemini/common/Gemini_ModelSearch.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_Schema.d.ts +11 -0
- package/dist/provider-gemini/common/Gemini_Schema.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_StructuredGeneration.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_StructuredGeneration.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_TextEmbedding.d.ts +9 -0
- package/dist/provider-gemini/common/Gemini_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_TextGeneration.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_TextGeneration.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_TextRewriter.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_TextRewriter.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_TextSummary.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_TextSummary.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_ToolCalling.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_ToolCalling.d.ts.map +1 -0
- package/dist/{google-gemini → provider-gemini}/index.d.ts +1 -3
- package/dist/provider-gemini/index.d.ts.map +1 -0
- package/dist/{index-8651nz8y.js → provider-gemini/index.js} +43 -11
- package/dist/provider-gemini/index.js.map +13 -0
- package/dist/provider-gemini/registerGemini.d.ts +10 -0
- package/dist/provider-gemini/registerGemini.d.ts.map +1 -0
- package/dist/provider-gemini/registerGeminiInline.d.ts +8 -0
- package/dist/provider-gemini/registerGeminiInline.d.ts.map +1 -0
- package/dist/{anthropic/Anthropic_Worker.d.ts → provider-gemini/registerGeminiWorker.d.ts} +2 -2
- package/dist/provider-gemini/registerGeminiWorker.d.ts.map +1 -0
- package/dist/provider-gemini/runtime.d.ts +16 -0
- package/dist/provider-gemini/runtime.d.ts.map +1 -0
- package/dist/{google-gemini/index.js → provider-gemini/runtime.js} +281 -173
- package/dist/provider-gemini/runtime.js.map +26 -0
- package/dist/provider-hf-inference/HfInferenceProvider.d.ts +2 -19
- package/dist/provider-hf-inference/HfInferenceProvider.d.ts.map +1 -1
- package/dist/provider-hf-inference/HfInferenceQueuedProvider.d.ts +16 -0
- package/dist/provider-hf-inference/HfInferenceQueuedProvider.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_Client.d.ts +12 -0
- package/dist/provider-hf-inference/common/HFI_Client.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_JobRunFns.d.ts +2 -11
- package/dist/provider-hf-inference/common/HFI_JobRunFns.d.ts.map +1 -1
- package/dist/provider-hf-inference/common/HFI_ModelInfo.d.ts +9 -0
- package/dist/provider-hf-inference/common/HFI_ModelInfo.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_ModelSchema.d.ts +31 -31
- package/dist/provider-hf-inference/common/HFI_ModelSchema.d.ts.map +1 -1
- package/dist/provider-hf-inference/common/HFI_ModelSearch.d.ts +8 -0
- package/dist/provider-hf-inference/common/HFI_ModelSearch.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_TextEmbedding.d.ts +9 -0
- package/dist/provider-hf-inference/common/HFI_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_TextGeneration.d.ts +10 -0
- package/dist/provider-hf-inference/common/HFI_TextGeneration.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_TextRewriter.d.ts +10 -0
- package/dist/provider-hf-inference/common/HFI_TextRewriter.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_TextSummary.d.ts +10 -0
- package/dist/provider-hf-inference/common/HFI_TextSummary.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_ToolCalling.d.ts +10 -0
- package/dist/provider-hf-inference/common/HFI_ToolCalling.d.ts.map +1 -0
- package/dist/provider-hf-inference/index.d.ts +1 -3
- package/dist/provider-hf-inference/index.d.ts.map +1 -1
- package/dist/provider-hf-inference/index.js +98 -411
- package/dist/provider-hf-inference/index.js.map +7 -5
- package/dist/provider-hf-inference/registerHfInference.d.ts +10 -0
- package/dist/provider-hf-inference/registerHfInference.d.ts.map +1 -0
- package/dist/provider-hf-inference/registerHfInferenceInline.d.ts +8 -0
- package/dist/provider-hf-inference/registerHfInferenceInline.d.ts.map +1 -0
- package/dist/provider-hf-inference/registerHfInferenceWorker.d.ts +7 -0
- package/dist/provider-hf-inference/registerHfInferenceWorker.d.ts.map +1 -0
- package/dist/provider-hf-inference/runtime.d.ts +16 -0
- package/dist/provider-hf-inference/runtime.d.ts.map +1 -0
- package/dist/provider-hf-inference/runtime.js +592 -0
- package/dist/provider-hf-inference/runtime.js.map +25 -0
- package/dist/{hf-transformers → provider-hf-transformers}/HuggingFaceTransformersProvider.d.ts +2 -21
- package/dist/provider-hf-transformers/HuggingFaceTransformersProvider.d.ts.map +1 -0
- package/dist/provider-hf-transformers/HuggingFaceTransformersQueuedProvider.d.ts +16 -0
- package/dist/provider-hf-transformers/HuggingFaceTransformersQueuedProvider.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_BackgroundRemoval.d.ts +12 -0
- package/dist/provider-hf-transformers/common/HFT_BackgroundRemoval.d.ts.map +1 -0
- package/dist/{hf-transformers → provider-hf-transformers}/common/HFT_Constants.d.ts +25 -23
- package/dist/provider-hf-transformers/common/HFT_Constants.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_CountTokens.d.ts +10 -0
- package/dist/provider-hf-transformers/common/HFT_CountTokens.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_Download.d.ts +13 -0
- package/dist/provider-hf-transformers/common/HFT_Download.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ImageClassification.d.ts +13 -0
- package/dist/provider-hf-transformers/common/HFT_ImageClassification.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ImageEmbedding.d.ts +12 -0
- package/dist/provider-hf-transformers/common/HFT_ImageEmbedding.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ImageHelpers.d.ts +11 -0
- package/dist/provider-hf-transformers/common/HFT_ImageHelpers.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ImageSegmentation.d.ts +12 -0
- package/dist/provider-hf-transformers/common/HFT_ImageSegmentation.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ImageToText.d.ts +12 -0
- package/dist/provider-hf-transformers/common/HFT_ImageToText.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_InlineLifecycle.d.ts +7 -0
- package/dist/provider-hf-transformers/common/HFT_InlineLifecycle.d.ts.map +1 -0
- package/dist/{hf-transformers → provider-hf-transformers}/common/HFT_JobRunFns.d.ts +673 -789
- package/dist/provider-hf-transformers/common/HFT_JobRunFns.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ModelInfo.d.ts +9 -0
- package/dist/provider-hf-transformers/common/HFT_ModelInfo.d.ts.map +1 -0
- package/dist/{hf-transformers → provider-hf-transformers}/common/HFT_ModelSchema.d.ts +37 -37
- package/dist/provider-hf-transformers/common/HFT_ModelSchema.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ModelSearch.d.ts +8 -0
- package/dist/provider-hf-transformers/common/HFT_ModelSearch.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ObjectDetection.d.ts +13 -0
- package/dist/provider-hf-transformers/common/HFT_ObjectDetection.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_OnnxDtypes.d.ts +23 -0
- package/dist/provider-hf-transformers/common/HFT_OnnxDtypes.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_Pipeline.d.ts +32 -0
- package/dist/provider-hf-transformers/common/HFT_Pipeline.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_Streaming.d.ts +25 -0
- package/dist/provider-hf-transformers/common/HFT_Streaming.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_StructuredGeneration.d.ts +10 -0
- package/dist/provider-hf-transformers/common/HFT_StructuredGeneration.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextClassification.d.ts +9 -0
- package/dist/provider-hf-transformers/common/HFT_TextClassification.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextEmbedding.d.ts +13 -0
- package/dist/provider-hf-transformers/common/HFT_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextFillMask.d.ts +9 -0
- package/dist/provider-hf-transformers/common/HFT_TextFillMask.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextGeneration.d.ts +14 -0
- package/dist/provider-hf-transformers/common/HFT_TextGeneration.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextLanguageDetection.d.ts +9 -0
- package/dist/provider-hf-transformers/common/HFT_TextLanguageDetection.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextNamedEntityRecognition.d.ts +9 -0
- package/dist/provider-hf-transformers/common/HFT_TextNamedEntityRecognition.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextOutput.d.ts +8 -0
- package/dist/provider-hf-transformers/common/HFT_TextOutput.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextQuestionAnswer.d.ts +14 -0
- package/dist/provider-hf-transformers/common/HFT_TextQuestionAnswer.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextRewriter.d.ts +14 -0
- package/dist/provider-hf-transformers/common/HFT_TextRewriter.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextSummary.d.ts +14 -0
- package/dist/provider-hf-transformers/common/HFT_TextSummary.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextTranslation.d.ts +14 -0
- package/dist/provider-hf-transformers/common/HFT_TextTranslation.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ToolCalling.d.ts +10 -0
- package/dist/provider-hf-transformers/common/HFT_ToolCalling.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ToolMarkup.d.ts +40 -0
- package/dist/provider-hf-transformers/common/HFT_ToolMarkup.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_Unload.d.ts +13 -0
- package/dist/provider-hf-transformers/common/HFT_Unload.d.ts.map +1 -0
- package/dist/{hf-transformers → provider-hf-transformers}/index.d.ts +4 -2
- package/dist/provider-hf-transformers/index.d.ts.map +1 -0
- package/dist/provider-hf-transformers/index.js +513 -0
- package/dist/provider-hf-transformers/index.js.map +16 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformers.d.ts +14 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformers.d.ts.map +1 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformersInline.d.ts +15 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformersInline.d.ts.map +1 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformersWorker.d.ts +7 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformersWorker.d.ts.map +1 -0
- package/dist/provider-hf-transformers/runtime.d.ts +21 -0
- package/dist/provider-hf-transformers/runtime.d.ts.map +1 -0
- package/dist/{index-j4g81r4k.js → provider-hf-transformers/runtime.js} +1564 -928
- package/dist/provider-hf-transformers/runtime.js.map +49 -0
- package/dist/provider-llamacpp/LlamaCppProvider.d.ts +2 -15
- package/dist/provider-llamacpp/LlamaCppProvider.d.ts.map +1 -1
- package/dist/provider-llamacpp/LlamaCppQueuedProvider.d.ts +16 -0
- package/dist/provider-llamacpp/LlamaCppQueuedProvider.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_CountTokens.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_CountTokens.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Download.d.ts +9 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Download.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_JobRunFns.d.ts +2 -18
- package/dist/provider-llamacpp/common/LlamaCpp_JobRunFns.d.ts.map +1 -1
- package/dist/provider-llamacpp/common/LlamaCpp_ModelInfo.d.ts +9 -0
- package/dist/provider-llamacpp/common/LlamaCpp_ModelInfo.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_ModelSchema.d.ts +31 -31
- package/dist/provider-llamacpp/common/LlamaCpp_ModelSchema.d.ts.map +1 -1
- package/dist/provider-llamacpp/common/LlamaCpp_ModelSearch.d.ts +8 -0
- package/dist/provider-llamacpp/common/LlamaCpp_ModelSearch.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Runtime.d.ts +28 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Runtime.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_StructuredGeneration.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_StructuredGeneration.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextEmbedding.d.ts +9 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextGeneration.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextGeneration.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextRewriter.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextRewriter.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextSummary.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextSummary.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_ToolCalling.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_ToolCalling.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Unload.d.ts +9 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Unload.d.ts.map +1 -0
- package/dist/provider-llamacpp/index.d.ts +1 -3
- package/dist/provider-llamacpp/index.d.ts.map +1 -1
- package/dist/provider-llamacpp/index.js +121 -725
- package/dist/provider-llamacpp/index.js.map +7 -5
- package/dist/provider-llamacpp/registerLlamaCpp.d.ts +10 -0
- package/dist/provider-llamacpp/registerLlamaCpp.d.ts.map +1 -0
- package/dist/provider-llamacpp/registerLlamaCppInline.d.ts +8 -0
- package/dist/provider-llamacpp/registerLlamaCppInline.d.ts.map +1 -0
- package/dist/provider-llamacpp/registerLlamaCppWorker.d.ts +7 -0
- package/dist/provider-llamacpp/registerLlamaCppWorker.d.ts.map +1 -0
- package/dist/provider-llamacpp/runtime.d.ts +16 -0
- package/dist/provider-llamacpp/runtime.d.ts.map +1 -0
- package/dist/provider-llamacpp/runtime.js +929 -0
- package/dist/provider-llamacpp/runtime.js.map +29 -0
- package/dist/provider-ollama/OllamaProvider.d.ts +2 -15
- package/dist/provider-ollama/OllamaProvider.d.ts.map +1 -1
- package/dist/provider-ollama/OllamaQueuedProvider.d.ts +16 -0
- package/dist/provider-ollama/OllamaQueuedProvider.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_Client.browser.d.ts +13 -0
- package/dist/provider-ollama/common/Ollama_Client.browser.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_Client.d.ts +13 -0
- package/dist/provider-ollama/common/Ollama_Client.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_JobRunFns.browser.d.ts +362 -11
- package/dist/provider-ollama/common/Ollama_JobRunFns.browser.d.ts.map +1 -1
- package/dist/provider-ollama/common/Ollama_JobRunFns.d.ts +361 -11
- package/dist/provider-ollama/common/Ollama_JobRunFns.d.ts.map +1 -1
- package/dist/provider-ollama/common/Ollama_ModelInfo.d.ts +11 -0
- package/dist/provider-ollama/common/Ollama_ModelInfo.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_ModelSchema.d.ts +30 -30
- package/dist/provider-ollama/common/Ollama_ModelSchema.d.ts.map +1 -1
- package/dist/provider-ollama/common/Ollama_ModelSearch.d.ts +11 -0
- package/dist/provider-ollama/common/Ollama_ModelSearch.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_ModelUtil.d.ts +8 -0
- package/dist/provider-ollama/common/Ollama_ModelUtil.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_TextEmbedding.d.ts +11 -0
- package/dist/provider-ollama/common/Ollama_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_TextGeneration.d.ts +12 -0
- package/dist/provider-ollama/common/Ollama_TextGeneration.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_TextRewriter.d.ts +12 -0
- package/dist/provider-ollama/common/Ollama_TextRewriter.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_TextSummary.d.ts +12 -0
- package/dist/provider-ollama/common/Ollama_TextSummary.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_ToolCalling.d.ts +16 -0
- package/dist/provider-ollama/common/Ollama_ToolCalling.d.ts.map +1 -0
- package/dist/provider-ollama/index.browser.d.ts +1 -3
- package/dist/provider-ollama/index.browser.d.ts.map +1 -1
- package/dist/provider-ollama/index.browser.js +18 -396
- package/dist/provider-ollama/index.browser.js.map +6 -7
- package/dist/provider-ollama/index.d.ts +1 -3
- package/dist/provider-ollama/index.d.ts.map +1 -1
- package/dist/provider-ollama/index.js +93 -382
- package/dist/provider-ollama/index.js.map +7 -5
- package/dist/provider-ollama/registerOllama.d.ts +10 -0
- package/dist/provider-ollama/registerOllama.d.ts.map +1 -0
- package/dist/provider-ollama/registerOllamaInline.browser.d.ts +8 -0
- package/dist/provider-ollama/registerOllamaInline.browser.d.ts.map +1 -0
- package/dist/provider-ollama/registerOllamaInline.d.ts +8 -0
- package/dist/provider-ollama/registerOllamaInline.d.ts.map +1 -0
- package/dist/provider-ollama/registerOllamaWorker.browser.d.ts +7 -0
- package/dist/provider-ollama/registerOllamaWorker.browser.d.ts.map +1 -0
- package/dist/{google-gemini/Gemini_Worker.d.ts → provider-ollama/registerOllamaWorker.d.ts} +2 -2
- package/dist/provider-ollama/registerOllamaWorker.d.ts.map +1 -0
- package/dist/provider-ollama/runtime.browser.d.ts +16 -0
- package/dist/provider-ollama/runtime.browser.d.ts.map +1 -0
- package/dist/provider-ollama/runtime.browser.js +528 -0
- package/dist/provider-ollama/runtime.browser.js.map +24 -0
- package/dist/provider-ollama/runtime.d.ts +16 -0
- package/dist/provider-ollama/runtime.d.ts.map +1 -0
- package/dist/provider-ollama/runtime.js +538 -0
- package/dist/provider-ollama/runtime.js.map +24 -0
- package/dist/provider-openai/OpenAiProvider.d.ts +2 -19
- package/dist/provider-openai/OpenAiProvider.d.ts.map +1 -1
- package/dist/provider-openai/OpenAiQueuedProvider.d.ts +16 -0
- package/dist/provider-openai/OpenAiQueuedProvider.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_Client.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_Client.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_CountTokens.browser.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_CountTokens.browser.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_CountTokens.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_CountTokens.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_JobRunFns.browser.d.ts +12 -0
- package/dist/provider-openai/common/OpenAI_JobRunFns.browser.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_JobRunFns.d.ts +2 -15
- package/dist/provider-openai/common/OpenAI_JobRunFns.d.ts.map +1 -1
- package/dist/provider-openai/common/OpenAI_ModelInfo.d.ts +9 -0
- package/dist/provider-openai/common/OpenAI_ModelInfo.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_ModelSchema.d.ts +31 -31
- package/dist/provider-openai/common/OpenAI_ModelSchema.d.ts.map +1 -1
- package/dist/provider-openai/common/OpenAI_ModelSearch.d.ts +8 -0
- package/dist/provider-openai/common/OpenAI_ModelSearch.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_StructuredGeneration.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_StructuredGeneration.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_TextEmbedding.d.ts +9 -0
- package/dist/provider-openai/common/OpenAI_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_TextGeneration.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_TextGeneration.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_TextRewriter.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_TextRewriter.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_TextSummary.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_TextSummary.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_ToolCalling.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_ToolCalling.d.ts.map +1 -0
- package/dist/provider-openai/index.browser.d.ts +9 -0
- package/dist/provider-openai/index.browser.d.ts.map +1 -0
- package/dist/{index-q2t627d5.js → provider-openai/index.browser.js} +26 -9
- package/dist/provider-openai/index.browser.js.map +13 -0
- package/dist/provider-openai/index.d.ts +1 -3
- package/dist/provider-openai/index.d.ts.map +1 -1
- package/dist/provider-openai/index.js +108 -519
- package/dist/provider-openai/index.js.map +7 -5
- package/dist/provider-openai/registerOpenAi.d.ts +10 -0
- package/dist/provider-openai/registerOpenAi.d.ts.map +1 -0
- package/dist/provider-openai/registerOpenAiInline.browser.d.ts +8 -0
- package/dist/provider-openai/registerOpenAiInline.browser.d.ts.map +1 -0
- package/dist/provider-openai/registerOpenAiInline.d.ts +8 -0
- package/dist/provider-openai/registerOpenAiInline.d.ts.map +1 -0
- package/dist/provider-openai/registerOpenAiWorker.browser.d.ts +7 -0
- package/dist/provider-openai/registerOpenAiWorker.browser.d.ts.map +1 -0
- package/dist/{ggml/model/GgmlLocalModel.d.ts → provider-openai/registerOpenAiWorker.d.ts} +2 -2
- package/dist/provider-openai/registerOpenAiWorker.d.ts.map +1 -0
- package/dist/provider-openai/runtime.browser.d.ts +15 -0
- package/dist/provider-openai/runtime.browser.d.ts.map +1 -0
- package/dist/provider-openai/runtime.browser.js +647 -0
- package/dist/provider-openai/runtime.browser.js.map +25 -0
- package/dist/provider-openai/runtime.d.ts +16 -0
- package/dist/provider-openai/runtime.d.ts.map +1 -0
- package/dist/provider-openai/runtime.js +662 -0
- package/dist/provider-openai/runtime.js.map +25 -0
- package/dist/provider-tf-mediapipe/TensorFlowMediaPipeProvider.d.ts +24 -0
- package/dist/provider-tf-mediapipe/TensorFlowMediaPipeProvider.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/TensorFlowMediaPipeQueuedProvider.d.ts +16 -0
- package/dist/provider-tf-mediapipe/TensorFlowMediaPipeQueuedProvider.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Client.d.ts +8 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Client.d.ts.map +1 -0
- package/dist/{tf-mediapipe → provider-tf-mediapipe}/common/TFMP_Constants.d.ts +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Constants.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Download.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Download.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_FaceDetector.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_FaceDetector.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_FaceLandmarker.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_FaceLandmarker.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_GestureRecognizer.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_GestureRecognizer.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_HandLandmarker.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_HandLandmarker.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageClassification.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageClassification.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageEmbedding.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageEmbedding.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageSegmentation.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageSegmentation.d.ts.map +1 -0
- package/dist/{tf-mediapipe → provider-tf-mediapipe}/common/TFMP_JobRunFns.d.ts +341 -409
- package/dist/provider-tf-mediapipe/common/TFMP_JobRunFns.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ModelInfo.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ModelInfo.d.ts.map +1 -0
- package/dist/{tf-mediapipe → provider-tf-mediapipe}/common/TFMP_ModelSchema.d.ts +40 -37
- package/dist/provider-tf-mediapipe/common/TFMP_ModelSchema.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ModelSearch.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ModelSearch.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ObjectDetection.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ObjectDetection.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_PoseLandmarker.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_PoseLandmarker.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Runtime.d.ts +43 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Runtime.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextClassification.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextClassification.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextEmbedding.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextLanguageDetection.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextLanguageDetection.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Unload.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Unload.d.ts.map +1 -0
- package/dist/{tf-mediapipe → provider-tf-mediapipe}/index.d.ts +1 -3
- package/dist/provider-tf-mediapipe/index.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/index.js +129 -0
- package/dist/provider-tf-mediapipe/index.js.map +13 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipe.d.ts +10 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipe.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeInline.d.ts +8 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeInline.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeWorker.d.ts +7 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeWorker.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/runtime.d.ts +16 -0
- package/dist/provider-tf-mediapipe/runtime.d.ts.map +1 -0
- package/dist/{tf-mediapipe/index.js → provider-tf-mediapipe/runtime.js} +417 -380
- package/dist/provider-tf-mediapipe/runtime.js.map +33 -0
- package/package.json +74 -41
- package/dist/HFT_JobRunFns-8hcpea4c.js +0 -80
- package/dist/HFT_JobRunFns-8hcpea4c.js.map +0 -9
- package/dist/anthropic/AnthropicProvider.d.ts.map +0 -1
- package/dist/anthropic/Anthropic_Worker.d.ts.map +0 -1
- package/dist/anthropic/common/Anthropic_Constants.d.ts.map +0 -1
- package/dist/anthropic/common/Anthropic_JobRunFns.d.ts +0 -24
- package/dist/anthropic/common/Anthropic_JobRunFns.d.ts.map +0 -1
- package/dist/anthropic/common/Anthropic_ModelSchema.d.ts.map +0 -1
- package/dist/anthropic/index.d.ts.map +0 -1
- package/dist/anthropic/index.js.map +0 -11
- package/dist/ggml/model/GgmlLocalModel.d.ts.map +0 -1
- package/dist/google-gemini/Gemini_Worker.d.ts.map +0 -1
- package/dist/google-gemini/GoogleGeminiProvider.d.ts.map +0 -1
- package/dist/google-gemini/common/Gemini_Constants.d.ts.map +0 -1
- package/dist/google-gemini/common/Gemini_JobRunFns.d.ts +0 -25
- package/dist/google-gemini/common/Gemini_JobRunFns.d.ts.map +0 -1
- package/dist/google-gemini/common/Gemini_ModelSchema.d.ts.map +0 -1
- package/dist/google-gemini/index.d.ts.map +0 -1
- package/dist/google-gemini/index.js.map +0 -11
- package/dist/hf-transformers/HFT_Worker.d.ts +0 -7
- package/dist/hf-transformers/HFT_Worker.d.ts.map +0 -1
- package/dist/hf-transformers/HuggingFaceTransformersProvider.d.ts.map +0 -1
- package/dist/hf-transformers/common/HFT_Constants.d.ts.map +0 -1
- package/dist/hf-transformers/common/HFT_JobRunFns.d.ts.map +0 -1
- package/dist/hf-transformers/common/HFT_ModelSchema.d.ts.map +0 -1
- package/dist/hf-transformers/index.d.ts.map +0 -1
- package/dist/hf-transformers/index.js +0 -116
- package/dist/hf-transformers/index.js.map +0 -10
- package/dist/index-60ev6k93.js.map +0 -12
- package/dist/index-6j5pq722.js +0 -11
- package/dist/index-6j5pq722.js.map +0 -9
- package/dist/index-8651nz8y.js.map +0 -12
- package/dist/index-j4g81r4k.js.map +0 -10
- package/dist/index-pkd79j8b.js +0 -58
- package/dist/index-pkd79j8b.js.map +0 -10
- package/dist/index-q2t627d5.js.map +0 -12
- package/dist/index-tp5s7355.js +0 -77
- package/dist/index-tp5s7355.js.map +0 -12
- package/dist/index-v72vr07f.js +0 -81
- package/dist/index-v72vr07f.js.map +0 -12
- package/dist/index-wr57rwyx.js +0 -104
- package/dist/index-wr57rwyx.js.map +0 -12
- package/dist/index-zqq3kw0n.js +0 -171
- package/dist/index-zqq3kw0n.js.map +0 -11
- package/dist/index.browser-6j5pq722.js +0 -11
- package/dist/index.browser-6j5pq722.js.map +0 -9
- package/dist/index.d.ts +0 -33
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js +0 -316
- package/dist/index.js.map +0 -15
- package/dist/provider-hf-inference/HFI_Worker.d.ts +0 -7
- package/dist/provider-hf-inference/HFI_Worker.d.ts.map +0 -1
- package/dist/provider-llamacpp/LlamaCpp_Worker.d.ts +0 -7
- package/dist/provider-llamacpp/LlamaCpp_Worker.d.ts.map +0 -1
- package/dist/provider-ollama/Ollama_Worker.browser.d.ts +0 -7
- package/dist/provider-ollama/Ollama_Worker.browser.d.ts.map +0 -1
- package/dist/provider-ollama/Ollama_Worker.d.ts +0 -7
- package/dist/provider-ollama/Ollama_Worker.d.ts.map +0 -1
- package/dist/provider-openai/OpenAI_Worker.d.ts +0 -7
- package/dist/provider-openai/OpenAI_Worker.d.ts.map +0 -1
- package/dist/tf-mediapipe/TFMP_Worker.d.ts +0 -7
- package/dist/tf-mediapipe/TFMP_Worker.d.ts.map +0 -1
- package/dist/tf-mediapipe/TensorFlowMediaPipeProvider.d.ts +0 -41
- package/dist/tf-mediapipe/TensorFlowMediaPipeProvider.d.ts.map +0 -1
- package/dist/tf-mediapipe/common/TFMP_Constants.d.ts.map +0 -1
- package/dist/tf-mediapipe/common/TFMP_JobRunFns.d.ts.map +0 -1
- package/dist/tf-mediapipe/common/TFMP_ModelSchema.d.ts.map +0 -1
- package/dist/tf-mediapipe/index.d.ts.map +0 -1
- package/dist/tf-mediapipe/index.js.map +0 -14
- package/dist/types.d.ts +0 -7
- package/dist/types.d.ts.map +0 -1
- package/dist/web-browser/WebBrowserProvider.d.ts.map +0 -1
- package/dist/web-browser/WebBrowser_Worker.d.ts +0 -7
- package/dist/web-browser/WebBrowser_Worker.d.ts.map +0 -1
- package/dist/web-browser/common/WebBrowser_Constants.d.ts.map +0 -1
- package/dist/web-browser/common/WebBrowser_JobRunFns.d.ts +0 -20
- package/dist/web-browser/common/WebBrowser_JobRunFns.d.ts.map +0 -1
- package/dist/web-browser/common/WebBrowser_ModelSchema.d.ts.map +0 -1
- package/dist/web-browser/index.d.ts.map +0 -1
- package/dist/web-browser/index.js.map +0 -14
- /package/dist/{anthropic → provider-anthropic}/common/Anthropic_Constants.d.ts +0 -0
- /package/dist/{web-browser → provider-chrome}/common/WebBrowser_Constants.d.ts +0 -0
- /package/dist/{google-gemini → provider-gemini}/common/Gemini_Constants.d.ts +0 -0
|
@@ -1,26 +1,58 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __returnValue = (v) => v;
|
|
3
|
+
function __exportSetter(name, newValue) {
|
|
4
|
+
this[name] = __returnValue.bind(null, newValue);
|
|
5
|
+
}
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, {
|
|
9
|
+
get: all[name],
|
|
10
|
+
enumerable: true,
|
|
11
|
+
configurable: true,
|
|
12
|
+
set: __exportSetter.bind(all, name)
|
|
13
|
+
});
|
|
14
|
+
};
|
|
15
|
+
var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res);
|
|
16
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
17
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
18
|
+
}) : x)(function(x) {
|
|
19
|
+
if (typeof require !== "undefined")
|
|
20
|
+
return require.apply(this, arguments);
|
|
21
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
22
|
+
});
|
|
7
23
|
|
|
8
|
-
// src/hf-transformers/common/
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
24
|
+
// src/provider-hf-transformers/common/HFT_Pipeline.ts
|
|
25
|
+
var exports_HFT_Pipeline = {};
|
|
26
|
+
__export(exports_HFT_Pipeline, {
|
|
27
|
+
setHftCacheDir: () => setHftCacheDir,
|
|
28
|
+
removeCachedPipeline: () => removeCachedPipeline,
|
|
29
|
+
loadTransformersSDK: () => loadTransformersSDK,
|
|
30
|
+
hasCachedPipeline: () => hasCachedPipeline,
|
|
31
|
+
getPipelineCacheKey: () => getPipelineCacheKey,
|
|
32
|
+
getPipeline: () => getPipeline,
|
|
33
|
+
clearPipelineCache: () => clearPipelineCache
|
|
34
|
+
});
|
|
35
|
+
import { getLogger } from "@workglow/util/worker";
|
|
36
|
+
function setHftCacheDir(dir) {
|
|
37
|
+
_cacheDir = dir;
|
|
38
|
+
if (_transformersSdk) {
|
|
39
|
+
_transformersSdk.env.cacheDir = dir;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
12
42
|
async function loadTransformersSDK() {
|
|
13
43
|
if (!_transformersSdk) {
|
|
14
44
|
try {
|
|
15
45
|
_transformersSdk = await import("@huggingface/transformers");
|
|
16
46
|
_transformersSdk.env.fetch = abortableFetch;
|
|
47
|
+
if (_cacheDir) {
|
|
48
|
+
_transformersSdk.env.cacheDir = _cacheDir;
|
|
49
|
+
}
|
|
17
50
|
} catch {
|
|
18
51
|
throw new Error("@huggingface/transformers is required for HuggingFace Transformers tasks. Install it with: bun add @huggingface/transformers");
|
|
19
52
|
}
|
|
20
53
|
}
|
|
21
54
|
return _transformersSdk;
|
|
22
55
|
}
|
|
23
|
-
var modelAbortControllers = new Map;
|
|
24
56
|
function abortableFetch(url, options) {
|
|
25
57
|
let signal;
|
|
26
58
|
try {
|
|
@@ -34,17 +66,24 @@ function abortableFetch(url, options) {
|
|
|
34
66
|
} catch {}
|
|
35
67
|
return fetch(url, { ...options, ...signal ? { signal } : {} });
|
|
36
68
|
}
|
|
37
|
-
var pipelines = new Map;
|
|
38
|
-
var pipelineLoadPromises = new Map;
|
|
39
69
|
function clearPipelineCache() {
|
|
40
70
|
pipelines.clear();
|
|
41
71
|
}
|
|
72
|
+
function hasCachedPipeline(cacheKey) {
|
|
73
|
+
return pipelines.has(cacheKey);
|
|
74
|
+
}
|
|
75
|
+
function removeCachedPipeline(cacheKey) {
|
|
76
|
+
return pipelines.delete(cacheKey);
|
|
77
|
+
}
|
|
78
|
+
function isBrowserEnv() {
|
|
79
|
+
return typeof globalThis !== "undefined" && typeof globalThis.window !== "undefined";
|
|
80
|
+
}
|
|
42
81
|
function getPipelineCacheKey(model) {
|
|
43
82
|
const dtype = model.provider_config.dtype || "q8";
|
|
44
83
|
const device = model.provider_config.device || "";
|
|
45
84
|
return `${model.provider_config.model_path}:${model.provider_config.pipeline}:${dtype}:${device}`;
|
|
46
85
|
}
|
|
47
|
-
|
|
86
|
+
async function getPipeline(model, onProgress, options = {}, signal, progressScaleMax = 10) {
|
|
48
87
|
const cacheKey = getPipelineCacheKey(model);
|
|
49
88
|
if (pipelines.has(cacheKey)) {
|
|
50
89
|
getLogger().debug("HFT pipeline cache hit", { cacheKey });
|
|
@@ -62,13 +101,23 @@ var getPipeline = async (model, onProgress, options = {}, signal, progressScaleM
|
|
|
62
101
|
});
|
|
63
102
|
pipelineLoadPromises.set(cacheKey, loadPromise);
|
|
64
103
|
return loadPromise;
|
|
65
|
-
}
|
|
66
|
-
var doGetPipeline = async (model, onProgress, options, progressScaleMax, cacheKey, signal) => {
|
|
104
|
+
}
|
|
105
|
+
var _transformersSdk, _cacheDir, modelAbortControllers, pipelines, pipelineLoadPromises, doGetPipeline = async (model, onProgress, options, progressScaleMax, cacheKey, signal) => {
|
|
67
106
|
let lastProgressTime = 0;
|
|
68
107
|
let pendingProgress = null;
|
|
69
108
|
let throttleTimer = null;
|
|
70
109
|
const THROTTLE_MS = 160;
|
|
71
|
-
const
|
|
110
|
+
const buildProgressDetails = (file, fileProgress, filesMap) => {
|
|
111
|
+
const details = {
|
|
112
|
+
file,
|
|
113
|
+
progress: fileProgress
|
|
114
|
+
};
|
|
115
|
+
if (filesMap && Object.keys(filesMap).length > 0) {
|
|
116
|
+
details.files = filesMap;
|
|
117
|
+
}
|
|
118
|
+
return details;
|
|
119
|
+
};
|
|
120
|
+
const sendProgress = (progress, file, fileProgress, filesMap) => {
|
|
72
121
|
const now = Date.now();
|
|
73
122
|
const timeSinceLastEvent = now - lastProgressTime;
|
|
74
123
|
const isFirst = lastProgressTime === 0;
|
|
@@ -79,21 +128,19 @@ var doGetPipeline = async (model, onProgress, options, progressScaleMax, cacheKe
|
|
|
79
128
|
throttleTimer = null;
|
|
80
129
|
}
|
|
81
130
|
pendingProgress = null;
|
|
82
|
-
onProgress(Math.round(progress), "Downloading model",
|
|
131
|
+
onProgress(Math.round(progress), "Downloading model", buildProgressDetails(file, fileProgress, filesMap));
|
|
83
132
|
lastProgressTime = now;
|
|
84
133
|
return;
|
|
85
134
|
}
|
|
86
135
|
if (timeSinceLastEvent < THROTTLE_MS) {
|
|
87
|
-
pendingProgress = { progress, file, fileProgress };
|
|
136
|
+
pendingProgress = { progress, file, fileProgress, filesMap };
|
|
88
137
|
if (!throttleTimer) {
|
|
89
138
|
const timeRemaining = Math.max(1, THROTTLE_MS - timeSinceLastEvent);
|
|
90
139
|
throttleTimer = setTimeout(() => {
|
|
91
140
|
throttleTimer = null;
|
|
92
141
|
if (pendingProgress) {
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
progress: pendingProgress.fileProgress
|
|
96
|
-
});
|
|
142
|
+
const p = pendingProgress;
|
|
143
|
+
onProgress(Math.round(p.progress), "Downloading model", buildProgressDetails(p.file, p.fileProgress, p.filesMap));
|
|
97
144
|
lastProgressTime = Date.now();
|
|
98
145
|
pendingProgress = null;
|
|
99
146
|
}
|
|
@@ -101,7 +148,7 @@ var doGetPipeline = async (model, onProgress, options, progressScaleMax, cacheKe
|
|
|
101
148
|
}
|
|
102
149
|
return;
|
|
103
150
|
}
|
|
104
|
-
onProgress(Math.round(progress), "Downloading model",
|
|
151
|
+
onProgress(Math.round(progress), "Downloading model", buildProgressDetails(file, fileProgress, filesMap));
|
|
105
152
|
lastProgressTime = now;
|
|
106
153
|
pendingProgress = null;
|
|
107
154
|
};
|
|
@@ -141,13 +188,19 @@ var doGetPipeline = async (model, onProgress, options, progressScaleMax, cacheKe
|
|
|
141
188
|
}
|
|
142
189
|
}
|
|
143
190
|
}
|
|
144
|
-
sendProgress(scaledProgress, activeFile, activeFileProgress);
|
|
191
|
+
sendProgress(scaledProgress, activeFile, activeFileProgress, files);
|
|
145
192
|
}
|
|
146
193
|
};
|
|
194
|
+
let device = model.provider_config.device;
|
|
195
|
+
if (!isBrowserEnv()) {
|
|
196
|
+
if (device === "wasm" || device === "webgpu") {
|
|
197
|
+
device = undefined;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
147
200
|
const pipelineOptions = {
|
|
148
201
|
dtype: model.provider_config.dtype || "q8",
|
|
149
202
|
...model.provider_config.use_external_data_format ? { useExternalDataFormat: model.provider_config.use_external_data_format } : {},
|
|
150
|
-
...
|
|
203
|
+
...device ? { device } : {},
|
|
151
204
|
...options,
|
|
152
205
|
progress_callback: progressCallback
|
|
153
206
|
};
|
|
@@ -168,10 +221,7 @@ var doGetPipeline = async (model, onProgress, options, progressScaleMax, cacheKe
|
|
|
168
221
|
}
|
|
169
222
|
const finalPending = pendingProgress;
|
|
170
223
|
if (finalPending) {
|
|
171
|
-
onProgress(Math.round(finalPending.progress), "Downloading model",
|
|
172
|
-
file: finalPending.file,
|
|
173
|
-
progress: finalPending.fileProgress
|
|
174
|
-
});
|
|
224
|
+
onProgress(Math.round(finalPending.progress), "Downloading model", buildProgressDetails(finalPending.file, finalPending.fileProgress, finalPending.filesMap));
|
|
175
225
|
pendingProgress = null;
|
|
176
226
|
}
|
|
177
227
|
if (abortSignal?.aborted) {
|
|
@@ -191,234 +241,1066 @@ var doGetPipeline = async (model, onProgress, options, progressScaleMax, cacheKe
|
|
|
191
241
|
modelAbortControllers.delete(modelPath);
|
|
192
242
|
}
|
|
193
243
|
};
|
|
194
|
-
var
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
244
|
+
var init_HFT_Pipeline = __esm(() => {
|
|
245
|
+
modelAbortControllers = new Map;
|
|
246
|
+
pipelines = new Map;
|
|
247
|
+
pipelineLoadPromises = new Map;
|
|
248
|
+
});
|
|
249
|
+
|
|
250
|
+
// src/provider-hf-transformers/common/HFT_Constants.ts
|
|
251
|
+
var HF_TRANSFORMERS_ONNX = "HF_TRANSFORMERS_ONNX";
|
|
252
|
+
var HTF_CACHE_NAME = "transformers-cache";
|
|
253
|
+
var QuantizationDataType = {
|
|
254
|
+
auto: "auto",
|
|
255
|
+
fp32: "fp32",
|
|
256
|
+
fp16: "fp16",
|
|
257
|
+
q8: "q8",
|
|
258
|
+
int8: "int8",
|
|
259
|
+
uint8: "uint8",
|
|
260
|
+
q4: "q4",
|
|
261
|
+
bnb4: "bnb4",
|
|
262
|
+
q4f16: "q4f16"
|
|
203
263
|
};
|
|
204
|
-
var
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
};
|
|
264
|
+
var TextPipelineUseCase = {
|
|
265
|
+
"fill-mask": "fill-mask",
|
|
266
|
+
"token-classification": "token-classification",
|
|
267
|
+
"text-generation": "text-generation",
|
|
268
|
+
"text2text-generation": "text2text-generation",
|
|
269
|
+
"text-classification": "text-classification",
|
|
270
|
+
summarization: "summarization",
|
|
271
|
+
translation: "translation",
|
|
272
|
+
"feature-extraction": "feature-extraction",
|
|
273
|
+
"zero-shot-classification": "zero-shot-classification",
|
|
274
|
+
"question-answering": "question-answering"
|
|
216
275
|
};
|
|
217
|
-
var
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
276
|
+
var VisionPipelineUseCase = {
|
|
277
|
+
"background-removal": "background-removal",
|
|
278
|
+
"image-segmentation": "image-segmentation",
|
|
279
|
+
"depth-estimation": "depth-estimation",
|
|
280
|
+
"image-classification": "image-classification",
|
|
281
|
+
"image-to-image": "image-to-image",
|
|
282
|
+
"image-to-text": "image-to-text",
|
|
283
|
+
"object-detection": "object-detection",
|
|
284
|
+
"image-feature-extraction": "image-feature-extraction"
|
|
285
|
+
};
|
|
286
|
+
var AudioPipelineUseCase = {
|
|
287
|
+
"audio-classification": "audio-classification",
|
|
288
|
+
"automatic-speech-recognition": "automatic-speech-recognition",
|
|
289
|
+
"text-to-speech": "text-to-speech"
|
|
290
|
+
};
|
|
291
|
+
var MultimodalPipelineUseCase = {
|
|
292
|
+
"document-question-answering": "document-question-answering",
|
|
293
|
+
"image-to-text": "image-to-text",
|
|
294
|
+
"zero-shot-audio-classification": "zero-shot-audio-classification",
|
|
295
|
+
"zero-shot-image-classification": "zero-shot-image-classification",
|
|
296
|
+
"zero-shot-object-detection": "zero-shot-object-detection"
|
|
297
|
+
};
|
|
298
|
+
var PipelineUseCase = {
|
|
299
|
+
...TextPipelineUseCase,
|
|
300
|
+
...VisionPipelineUseCase,
|
|
301
|
+
...AudioPipelineUseCase,
|
|
302
|
+
...MultimodalPipelineUseCase
|
|
303
|
+
};
|
|
304
|
+
// src/provider-hf-transformers/common/HFT_ModelSchema.ts
|
|
305
|
+
import { ModelConfigSchema, ModelRecordSchema } from "@workglow/ai/worker";
|
|
306
|
+
var HfTransformersOnnxModelSchema = {
|
|
307
|
+
type: "object",
|
|
308
|
+
properties: {
|
|
309
|
+
provider: {
|
|
310
|
+
const: HF_TRANSFORMERS_ONNX,
|
|
311
|
+
description: "Discriminator: ONNX runtime backend."
|
|
312
|
+
},
|
|
313
|
+
provider_config: {
|
|
314
|
+
type: "object",
|
|
315
|
+
description: "ONNX runtime-specific options.",
|
|
316
|
+
properties: {
|
|
317
|
+
pipeline: {
|
|
318
|
+
type: "string",
|
|
319
|
+
enum: Object.values(PipelineUseCase),
|
|
320
|
+
description: "Pipeline type for the ONNX model.",
|
|
321
|
+
default: "text-generation"
|
|
322
|
+
},
|
|
323
|
+
model_path: {
|
|
324
|
+
type: "string",
|
|
325
|
+
description: "Filesystem path or URI for the ONNX model."
|
|
326
|
+
},
|
|
327
|
+
dtype: {
|
|
328
|
+
type: "string",
|
|
329
|
+
enum: Object.values(QuantizationDataType),
|
|
330
|
+
description: "Data type for the ONNX model.",
|
|
331
|
+
default: "auto"
|
|
332
|
+
},
|
|
333
|
+
device: {
|
|
334
|
+
type: "string",
|
|
335
|
+
enum: ["cpu", "gpu", "webgpu", "wasm", "metal"],
|
|
336
|
+
description: "High-level device selection.",
|
|
337
|
+
default: "webgpu"
|
|
338
|
+
},
|
|
339
|
+
execution_providers: {
|
|
340
|
+
type: "array",
|
|
341
|
+
items: { type: "string" },
|
|
342
|
+
description: "Raw ONNX Runtime execution provider identifiers.",
|
|
343
|
+
"x-ui-hidden": true
|
|
344
|
+
},
|
|
345
|
+
intra_op_num_threads: {
|
|
346
|
+
type: "integer",
|
|
347
|
+
minimum: 1
|
|
348
|
+
},
|
|
349
|
+
inter_op_num_threads: {
|
|
350
|
+
type: "integer",
|
|
351
|
+
minimum: 1
|
|
352
|
+
},
|
|
353
|
+
use_external_data_format: {
|
|
354
|
+
type: "boolean",
|
|
355
|
+
description: "Whether the model uses external data format."
|
|
356
|
+
},
|
|
357
|
+
native_dimensions: {
|
|
358
|
+
type: "integer",
|
|
359
|
+
description: "The native dimensions of the model."
|
|
360
|
+
},
|
|
361
|
+
pooling: {
|
|
362
|
+
type: "string",
|
|
363
|
+
enum: ["mean", "last_token", "cls"],
|
|
364
|
+
description: "The pooling strategy to use for the model.",
|
|
365
|
+
default: "mean"
|
|
366
|
+
},
|
|
367
|
+
normalize: {
|
|
368
|
+
type: "boolean",
|
|
369
|
+
description: "Whether the model uses normalization.",
|
|
370
|
+
default: true
|
|
371
|
+
},
|
|
372
|
+
language_style: {
|
|
373
|
+
type: "string",
|
|
374
|
+
description: "The language style of the model."
|
|
375
|
+
},
|
|
376
|
+
mrl: {
|
|
377
|
+
type: "boolean",
|
|
378
|
+
description: "Whether the model uses matryoshka.",
|
|
379
|
+
default: false
|
|
238
380
|
}
|
|
381
|
+
},
|
|
382
|
+
required: ["model_path", "pipeline"],
|
|
383
|
+
additionalProperties: false,
|
|
384
|
+
if: {
|
|
385
|
+
properties: {
|
|
386
|
+
pipeline: {
|
|
387
|
+
const: "feature-extraction"
|
|
388
|
+
}
|
|
389
|
+
}
|
|
390
|
+
},
|
|
391
|
+
then: {
|
|
392
|
+
required: ["native_dimensions"]
|
|
239
393
|
}
|
|
240
|
-
} catch (error) {
|
|
241
|
-
console.error(`Failed to delete cache entry: ${request.url}`, error);
|
|
242
394
|
}
|
|
243
|
-
}
|
|
395
|
+
},
|
|
396
|
+
required: ["provider", "provider_config"],
|
|
397
|
+
additionalProperties: true
|
|
244
398
|
};
|
|
245
|
-
var
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
inputLength: Array.isArray(input.text) ? input.text.length : input.text?.length
|
|
254
|
-
});
|
|
255
|
-
const hfVector = await generateEmbedding(input.text, {
|
|
256
|
-
pooling: model?.provider_config.pooling || "mean",
|
|
257
|
-
normalize: model?.provider_config.normalize
|
|
258
|
-
});
|
|
259
|
-
const isArrayInput = Array.isArray(input.text);
|
|
260
|
-
const embeddingDim = model?.provider_config.native_dimensions;
|
|
261
|
-
if (isArrayInput && hfVector.dims.length > 1) {
|
|
262
|
-
const [numTexts, vectorDim] = hfVector.dims;
|
|
263
|
-
if (numTexts !== input.text.length) {
|
|
264
|
-
throw new Error(`HuggingFace Embedding tensor batch size does not match input array length: ${numTexts} != ${input.text.length}`);
|
|
265
|
-
}
|
|
266
|
-
if (vectorDim !== embeddingDim) {
|
|
267
|
-
throw new Error(`HuggingFace Embedding vector dimension does not match model dimensions: ${vectorDim} != ${embeddingDim}`);
|
|
268
|
-
}
|
|
269
|
-
const vectors = Array.from({ length: numTexts }, (_, i) => hfVector[i].data.slice());
|
|
270
|
-
logger.timeEnd(timerLabel, { batchSize: numTexts, dimensions: vectorDim });
|
|
271
|
-
return { vector: vectors };
|
|
272
|
-
}
|
|
273
|
-
if (hfVector.size !== embeddingDim) {
|
|
274
|
-
logger.timeEnd(timerLabel, { status: "error", reason: "dimension mismatch" });
|
|
275
|
-
console.warn(`HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${embeddingDim}`, input, hfVector);
|
|
276
|
-
throw new Error(`HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${embeddingDim}`);
|
|
277
|
-
}
|
|
278
|
-
logger.timeEnd(timerLabel, { dimensions: hfVector.size });
|
|
279
|
-
return { vector: hfVector.data };
|
|
399
|
+
var HfTransformersOnnxModelRecordSchema = {
|
|
400
|
+
type: "object",
|
|
401
|
+
properties: {
|
|
402
|
+
...ModelRecordSchema.properties,
|
|
403
|
+
...HfTransformersOnnxModelSchema.properties
|
|
404
|
+
},
|
|
405
|
+
required: [...ModelRecordSchema.required, ...HfTransformersOnnxModelSchema.required],
|
|
406
|
+
additionalProperties: false
|
|
280
407
|
};
|
|
281
|
-
var
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
408
|
+
var HfTransformersOnnxModelConfigSchema = {
|
|
409
|
+
type: "object",
|
|
410
|
+
properties: {
|
|
411
|
+
...ModelConfigSchema.properties,
|
|
412
|
+
...HfTransformersOnnxModelSchema.properties
|
|
413
|
+
},
|
|
414
|
+
required: [...ModelConfigSchema.required, ...HfTransformersOnnxModelSchema.required],
|
|
415
|
+
additionalProperties: false
|
|
416
|
+
};
|
|
417
|
+
// src/provider-hf-transformers/common/HFT_OnnxDtypes.ts
|
|
418
|
+
var ONNX_QUANTIZATION_SUFFIX_MAPPING = {
|
|
419
|
+
fp32: "",
|
|
420
|
+
fp16: "_fp16",
|
|
421
|
+
int8: "_int8",
|
|
422
|
+
uint8: "_uint8",
|
|
423
|
+
q8: "_quantized",
|
|
424
|
+
q4: "_q4",
|
|
425
|
+
q4f16: "_q4f16",
|
|
426
|
+
bnb4: "_bnb4"
|
|
427
|
+
};
|
|
428
|
+
var SUFFIXES_LONGEST_FIRST = Object.entries(ONNX_QUANTIZATION_SUFFIX_MAPPING).filter(([, suffix]) => suffix !== "").sort((a, b) => b[1].length - a[1].length);
|
|
429
|
+
function parseOnnxQuantizations(params) {
|
|
430
|
+
const subfolder = params.subfolder ?? "onnx";
|
|
431
|
+
const prefix = subfolder + "/";
|
|
432
|
+
const stems = [];
|
|
433
|
+
for (const fp of params.filePaths) {
|
|
434
|
+
if (!fp.startsWith(prefix))
|
|
435
|
+
continue;
|
|
436
|
+
if (!fp.endsWith(".onnx"))
|
|
437
|
+
continue;
|
|
438
|
+
if (fp.endsWith(".onnx_data"))
|
|
439
|
+
continue;
|
|
440
|
+
stems.push(fp.slice(prefix.length, -".onnx".length));
|
|
441
|
+
}
|
|
442
|
+
if (stems.length === 0)
|
|
443
|
+
return [];
|
|
444
|
+
const parsed = [];
|
|
445
|
+
for (const stem of stems) {
|
|
446
|
+
let matched = false;
|
|
447
|
+
for (const [dtype, suffix] of SUFFIXES_LONGEST_FIRST) {
|
|
448
|
+
if (stem.endsWith(suffix)) {
|
|
449
|
+
parsed.push({ baseName: stem.slice(0, -suffix.length), dtype });
|
|
450
|
+
matched = true;
|
|
451
|
+
break;
|
|
452
|
+
}
|
|
286
453
|
}
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
if (isArrayInput) {
|
|
290
|
-
const results = Array.isArray(result2) && Array.isArray(result2[0]?.labels) ? result2 : [result2];
|
|
291
|
-
return {
|
|
292
|
-
categories: results.map((r) => r.labels.map((label, idx) => ({
|
|
293
|
-
label,
|
|
294
|
-
score: r.scores[idx]
|
|
295
|
-
})))
|
|
296
|
-
};
|
|
454
|
+
if (!matched) {
|
|
455
|
+
parsed.push({ baseName: stem, dtype: "fp32" });
|
|
297
456
|
}
|
|
298
|
-
return {
|
|
299
|
-
categories: result2.labels.map((label, idx) => ({
|
|
300
|
-
label,
|
|
301
|
-
score: result2.scores[idx]
|
|
302
|
-
}))
|
|
303
|
-
};
|
|
304
|
-
}
|
|
305
|
-
const TextClassification = await getPipeline(model, onProgress, {}, signal);
|
|
306
|
-
const result = await TextClassification(input.text, {
|
|
307
|
-
top_k: input.maxCategories || undefined
|
|
308
|
-
});
|
|
309
|
-
if (isArrayInput) {
|
|
310
|
-
return {
|
|
311
|
-
categories: result.map((perInput) => {
|
|
312
|
-
const items = Array.isArray(perInput) ? perInput : [perInput];
|
|
313
|
-
return items.map((category) => ({
|
|
314
|
-
label: category.label,
|
|
315
|
-
score: category.score
|
|
316
|
-
}));
|
|
317
|
-
})
|
|
318
|
-
};
|
|
319
|
-
}
|
|
320
|
-
if (Array.isArray(result[0])) {
|
|
321
|
-
return {
|
|
322
|
-
categories: result[0].map((category) => ({
|
|
323
|
-
label: category.label,
|
|
324
|
-
score: category.score
|
|
325
|
-
}))
|
|
326
|
-
};
|
|
327
|
-
}
|
|
328
|
-
return {
|
|
329
|
-
categories: result.map((category) => ({
|
|
330
|
-
label: category.label,
|
|
331
|
-
score: category.score
|
|
332
|
-
}))
|
|
333
|
-
};
|
|
334
|
-
};
|
|
335
|
-
var HFT_TextLanguageDetection = async (input, model, onProgress, signal) => {
|
|
336
|
-
const isArrayInput = Array.isArray(input.text);
|
|
337
|
-
const TextClassification = await getPipeline(model, onProgress, {}, signal);
|
|
338
|
-
const result = await TextClassification(input.text, {
|
|
339
|
-
top_k: input.maxLanguages || undefined
|
|
340
|
-
});
|
|
341
|
-
if (isArrayInput) {
|
|
342
|
-
return {
|
|
343
|
-
languages: result.map((perInput) => {
|
|
344
|
-
const items = Array.isArray(perInput) ? perInput : [perInput];
|
|
345
|
-
return items.map((category) => ({
|
|
346
|
-
language: category.label,
|
|
347
|
-
score: category.score
|
|
348
|
-
}));
|
|
349
|
-
})
|
|
350
|
-
};
|
|
351
457
|
}
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
458
|
+
const allBaseNames = new Set(parsed.map((p) => p.baseName));
|
|
459
|
+
const byDtype = new Map;
|
|
460
|
+
for (const { baseName, dtype } of parsed) {
|
|
461
|
+
let set = byDtype.get(dtype);
|
|
462
|
+
if (!set) {
|
|
463
|
+
set = new Set;
|
|
464
|
+
byDtype.set(dtype, set);
|
|
465
|
+
}
|
|
466
|
+
set.add(baseName);
|
|
359
467
|
}
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
}))
|
|
365
|
-
};
|
|
366
|
-
};
|
|
367
|
-
var HFT_TextNamedEntityRecognition = async (input, model, onProgress, signal) => {
|
|
368
|
-
const isArrayInput = Array.isArray(input.text);
|
|
369
|
-
const textNamedEntityRecognition = await getPipeline(model, onProgress, {}, signal);
|
|
370
|
-
const results = await textNamedEntityRecognition(input.text, {
|
|
371
|
-
ignore_labels: input.blockList
|
|
468
|
+
const allDtypes = Object.keys(ONNX_QUANTIZATION_SUFFIX_MAPPING);
|
|
469
|
+
return allDtypes.filter((dtype) => {
|
|
470
|
+
const set = byDtype.get(dtype);
|
|
471
|
+
return set !== undefined && set.size === allBaseNames.size;
|
|
372
472
|
});
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
return {
|
|
392
|
-
entities: entities.map((entity) => ({
|
|
393
|
-
entity: entity.entity,
|
|
394
|
-
score: entity.score,
|
|
395
|
-
word: entity.word
|
|
396
|
-
}))
|
|
397
|
-
};
|
|
398
|
-
};
|
|
399
|
-
var HFT_TextFillMask = async (input, model, onProgress, signal) => {
|
|
400
|
-
const isArrayInput = Array.isArray(input.text);
|
|
401
|
-
const unmasker = await getPipeline(model, onProgress, {}, signal);
|
|
402
|
-
const results = await unmasker(input.text);
|
|
403
|
-
if (isArrayInput) {
|
|
404
|
-
return {
|
|
405
|
-
predictions: results.map((perInput) => {
|
|
406
|
-
const items = Array.isArray(perInput) ? perInput : [perInput];
|
|
407
|
-
return items.map((prediction) => ({
|
|
408
|
-
entity: prediction.token_str,
|
|
409
|
-
score: prediction.score,
|
|
410
|
-
sequence: prediction.sequence
|
|
411
|
-
}));
|
|
412
|
-
})
|
|
413
|
-
};
|
|
473
|
+
}
|
|
474
|
+
// src/provider-hf-transformers/common/HFT_ToolMarkup.ts
|
|
475
|
+
function parseToolCallsFromText(responseText) {
|
|
476
|
+
const toolCalls = [];
|
|
477
|
+
let callIndex = 0;
|
|
478
|
+
let cleanedText = responseText;
|
|
479
|
+
const toolCallTagRegex = /<tool_call>([\s\S]*?)<\/tool_call>/g;
|
|
480
|
+
let tagMatch;
|
|
481
|
+
while ((tagMatch = toolCallTagRegex.exec(responseText)) !== null) {
|
|
482
|
+
try {
|
|
483
|
+
const parsed = JSON.parse(tagMatch[1].trim());
|
|
484
|
+
const id = `call_${callIndex++}`;
|
|
485
|
+
toolCalls.push({
|
|
486
|
+
id,
|
|
487
|
+
name: parsed.name ?? parsed.function?.name ?? "",
|
|
488
|
+
input: parsed.arguments ?? parsed.function?.arguments ?? parsed.parameters ?? {}
|
|
489
|
+
});
|
|
490
|
+
} catch {}
|
|
414
491
|
}
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
} else {
|
|
419
|
-
predictions = results;
|
|
492
|
+
if (toolCalls.length > 0) {
|
|
493
|
+
cleanedText = responseText.replace(/<tool_call>[\s\S]*?<\/tool_call>/g, "").trim();
|
|
494
|
+
return { text: cleanedText, toolCalls };
|
|
420
495
|
}
|
|
421
|
-
|
|
496
|
+
const jsonCandidates = [];
|
|
497
|
+
(function collectBalancedJsonBlocks(source) {
|
|
498
|
+
const length = source.length;
|
|
499
|
+
let i = 0;
|
|
500
|
+
while (i < length) {
|
|
501
|
+
if (source[i] !== "{") {
|
|
502
|
+
i++;
|
|
503
|
+
continue;
|
|
504
|
+
}
|
|
505
|
+
let depth = 1;
|
|
506
|
+
let j = i + 1;
|
|
507
|
+
let inString = false;
|
|
508
|
+
let escape = false;
|
|
509
|
+
while (j < length && depth > 0) {
|
|
510
|
+
const ch = source[j];
|
|
511
|
+
if (inString) {
|
|
512
|
+
if (escape) {
|
|
513
|
+
escape = false;
|
|
514
|
+
} else if (ch === "\\") {
|
|
515
|
+
escape = true;
|
|
516
|
+
} else if (ch === '"') {
|
|
517
|
+
inString = false;
|
|
518
|
+
}
|
|
519
|
+
} else {
|
|
520
|
+
if (ch === '"') {
|
|
521
|
+
inString = true;
|
|
522
|
+
} else if (ch === "{") {
|
|
523
|
+
depth++;
|
|
524
|
+
} else if (ch === "}") {
|
|
525
|
+
depth--;
|
|
526
|
+
}
|
|
527
|
+
}
|
|
528
|
+
j++;
|
|
529
|
+
}
|
|
530
|
+
if (depth === 0) {
|
|
531
|
+
jsonCandidates.push({ text: source.slice(i, j), start: i, end: j });
|
|
532
|
+
i = j;
|
|
533
|
+
} else {
|
|
534
|
+
break;
|
|
535
|
+
}
|
|
536
|
+
}
|
|
537
|
+
})(responseText);
|
|
538
|
+
const matchedRanges = [];
|
|
539
|
+
for (const candidate of jsonCandidates) {
|
|
540
|
+
try {
|
|
541
|
+
const parsed = JSON.parse(candidate.text);
|
|
542
|
+
if (parsed.name && (parsed.arguments !== undefined || parsed.parameters !== undefined)) {
|
|
543
|
+
const id = `call_${callIndex++}`;
|
|
544
|
+
toolCalls.push({
|
|
545
|
+
id,
|
|
546
|
+
name: parsed.name,
|
|
547
|
+
input: parsed.arguments ?? parsed.parameters ?? {}
|
|
548
|
+
});
|
|
549
|
+
matchedRanges.push({ start: candidate.start, end: candidate.end });
|
|
550
|
+
} else if (parsed.function?.name) {
|
|
551
|
+
let functionArgs = parsed.function.arguments ?? {};
|
|
552
|
+
if (typeof functionArgs === "string") {
|
|
553
|
+
try {
|
|
554
|
+
functionArgs = JSON.parse(functionArgs);
|
|
555
|
+
} catch (innerError) {
|
|
556
|
+
console.warn("Failed to parse tool call function.arguments as JSON", innerError);
|
|
557
|
+
functionArgs = {};
|
|
558
|
+
}
|
|
559
|
+
}
|
|
560
|
+
const id = `call_${callIndex++}`;
|
|
561
|
+
toolCalls.push({
|
|
562
|
+
id,
|
|
563
|
+
name: parsed.function.name,
|
|
564
|
+
input: functionArgs ?? {}
|
|
565
|
+
});
|
|
566
|
+
matchedRanges.push({ start: candidate.start, end: candidate.end });
|
|
567
|
+
}
|
|
568
|
+
} catch {}
|
|
569
|
+
}
|
|
570
|
+
if (toolCalls.length > 0) {
|
|
571
|
+
let result = "";
|
|
572
|
+
let lastIndex = 0;
|
|
573
|
+
for (const range of matchedRanges) {
|
|
574
|
+
result += responseText.slice(lastIndex, range.start);
|
|
575
|
+
lastIndex = range.end;
|
|
576
|
+
}
|
|
577
|
+
result += responseText.slice(lastIndex);
|
|
578
|
+
cleanedText = result.trim();
|
|
579
|
+
}
|
|
580
|
+
return { text: cleanedText, toolCalls };
|
|
581
|
+
}
|
|
582
|
+
function createToolCallMarkupFilter(emit) {
|
|
583
|
+
const OPEN_TAG = "<tool_call>";
|
|
584
|
+
const CLOSE_TAG = "</tool_call>";
|
|
585
|
+
let state = "text";
|
|
586
|
+
let pending = "";
|
|
587
|
+
function feed(token) {
|
|
588
|
+
if (state === "tag") {
|
|
589
|
+
pending += token;
|
|
590
|
+
const closeIdx = pending.indexOf(CLOSE_TAG);
|
|
591
|
+
if (closeIdx !== -1) {
|
|
592
|
+
const afterClose = pending.slice(closeIdx + CLOSE_TAG.length);
|
|
593
|
+
pending = "";
|
|
594
|
+
state = "text";
|
|
595
|
+
if (afterClose.length > 0) {
|
|
596
|
+
feed(afterClose);
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
return;
|
|
600
|
+
}
|
|
601
|
+
const combined = pending + token;
|
|
602
|
+
const openIdx = combined.indexOf(OPEN_TAG);
|
|
603
|
+
if (openIdx !== -1) {
|
|
604
|
+
const before = combined.slice(0, openIdx);
|
|
605
|
+
if (before.length > 0) {
|
|
606
|
+
emit(before);
|
|
607
|
+
}
|
|
608
|
+
pending = "";
|
|
609
|
+
state = "tag";
|
|
610
|
+
const afterOpen = combined.slice(openIdx + OPEN_TAG.length);
|
|
611
|
+
if (afterOpen.length > 0) {
|
|
612
|
+
feed(afterOpen);
|
|
613
|
+
}
|
|
614
|
+
return;
|
|
615
|
+
}
|
|
616
|
+
let prefixLen = 0;
|
|
617
|
+
for (let len = Math.min(combined.length, OPEN_TAG.length - 1);len >= 1; len--) {
|
|
618
|
+
if (combined.endsWith(OPEN_TAG.slice(0, len))) {
|
|
619
|
+
prefixLen = len;
|
|
620
|
+
break;
|
|
621
|
+
}
|
|
622
|
+
}
|
|
623
|
+
if (prefixLen > 0) {
|
|
624
|
+
const safe = combined.slice(0, combined.length - prefixLen);
|
|
625
|
+
if (safe.length > 0) {
|
|
626
|
+
emit(safe);
|
|
627
|
+
}
|
|
628
|
+
pending = combined.slice(combined.length - prefixLen);
|
|
629
|
+
} else {
|
|
630
|
+
if (combined.length > 0) {
|
|
631
|
+
emit(combined);
|
|
632
|
+
}
|
|
633
|
+
pending = "";
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
function flush() {
|
|
637
|
+
if (pending.length > 0 && state === "text") {
|
|
638
|
+
emit(pending);
|
|
639
|
+
pending = "";
|
|
640
|
+
}
|
|
641
|
+
pending = "";
|
|
642
|
+
state = "text";
|
|
643
|
+
}
|
|
644
|
+
return { feed, flush };
|
|
645
|
+
}
|
|
646
|
+
// src/provider-hf-transformers/common/HFT_InlineLifecycle.ts
|
|
647
|
+
async function clearHftInlinePipelineCache() {
|
|
648
|
+
const { clearPipelineCache: clearPipelineCache2 } = await Promise.resolve().then(() => (init_HFT_Pipeline(), exports_HFT_Pipeline));
|
|
649
|
+
clearPipelineCache2();
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
// src/common/PipelineTaskMapping.ts
|
|
653
|
+
var TASK_TO_PIPELINES = {
|
|
654
|
+
TextEmbeddingTask: ["feature-extraction"],
|
|
655
|
+
TextGenerationTask: ["text-generation"],
|
|
656
|
+
TextSummaryTask: ["sentence-similarity", "summarization"],
|
|
657
|
+
TextTranslationTask: ["translation"],
|
|
658
|
+
TextClassificationTask: ["text-classification", "zero-shot-classification"],
|
|
659
|
+
TextQuestionAnswerTask: ["question-answering"],
|
|
660
|
+
TextFillMaskTask: ["fill-mask"],
|
|
661
|
+
TextLanguageDetectionTask: ["text-classification"],
|
|
662
|
+
TextNamedEntityRecognitionTask: ["token-classification"],
|
|
663
|
+
TokenClassificationTask: ["token-classification"],
|
|
664
|
+
ImageClassificationTask: ["image-classification", "zero-shot-image-classification"],
|
|
665
|
+
ImageEmbeddingTask: ["image-feature-extraction"],
|
|
666
|
+
ImageSegmentationTask: ["image-segmentation"],
|
|
667
|
+
ImageToImageTask: ["image-to-image"],
|
|
668
|
+
ImageToTextTask: ["image-to-text"],
|
|
669
|
+
ObjectDetectionTask: ["object-detection", "zero-shot-object-detection"],
|
|
670
|
+
DepthEstimationTask: ["depth-estimation"],
|
|
671
|
+
AudioClassificationTask: ["audio-classification"],
|
|
672
|
+
SpeechRecognitionTask: ["automatic-speech-recognition"]
|
|
673
|
+
};
|
|
674
|
+
function pipelineToTaskTypes(pipeline) {
|
|
675
|
+
return Object.entries(TASK_TO_PIPELINES).filter(([, pipelines2]) => pipelines2.includes(pipeline)).map(([task]) => task);
|
|
676
|
+
}
|
|
677
|
+
|
|
678
|
+
// src/common/HfModelSearch.ts
|
|
679
|
+
var HF_API_BASE = "https://huggingface.co/api";
|
|
680
|
+
function formatDownloads(n) {
|
|
681
|
+
if (n >= 1e6)
|
|
682
|
+
return `${(n / 1e6).toFixed(1)}M`;
|
|
683
|
+
if (n >= 1000)
|
|
684
|
+
return `${(n / 1000).toFixed(1)}k`;
|
|
685
|
+
return String(n);
|
|
686
|
+
}
|
|
687
|
+
function mapHfProviderConfig(entry, provider) {
|
|
688
|
+
switch (provider) {
|
|
689
|
+
case "HF_TRANSFORMERS_ONNX":
|
|
690
|
+
return {
|
|
691
|
+
model_path: entry.id,
|
|
692
|
+
...entry.pipeline_tag ? { pipeline: entry.pipeline_tag } : {}
|
|
693
|
+
};
|
|
694
|
+
case "LOCAL_LLAMACPP":
|
|
695
|
+
return { model_path: entry.id };
|
|
696
|
+
default:
|
|
697
|
+
return { model_name: entry.id };
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
function mapHfModelResult(entry, provider) {
|
|
701
|
+
const badges = [entry.pipeline_tag, entry.library_name].filter(Boolean).join(" | ");
|
|
702
|
+
return {
|
|
703
|
+
id: entry.id,
|
|
704
|
+
label: `${entry.id}${badges ? ` ${badges}` : ""}`,
|
|
705
|
+
description: `${formatDownloads(entry.downloads)} downloads`,
|
|
706
|
+
record: {
|
|
707
|
+
model_id: entry.id,
|
|
708
|
+
provider,
|
|
709
|
+
title: entry.id.split("/").pop() ?? entry.id,
|
|
710
|
+
description: [entry.pipeline_tag, `${formatDownloads(entry.downloads)} downloads`].filter(Boolean).join(" — "),
|
|
711
|
+
tasks: entry.pipeline_tag ? pipelineToTaskTypes(entry.pipeline_tag) : [],
|
|
712
|
+
provider_config: mapHfProviderConfig(entry, provider),
|
|
713
|
+
metadata: {}
|
|
714
|
+
},
|
|
715
|
+
raw: entry
|
|
716
|
+
};
|
|
717
|
+
}
|
|
718
|
+
async function searchHfModels(query, extraParams, expandFields, signal) {
|
|
719
|
+
const params = new URLSearchParams({
|
|
720
|
+
search: query,
|
|
721
|
+
limit: "500",
|
|
722
|
+
sort: "downloads",
|
|
723
|
+
direction: "-1",
|
|
724
|
+
...extraParams
|
|
725
|
+
});
|
|
726
|
+
params.append("expand[]", "pipeline_tag");
|
|
727
|
+
if (expandFields) {
|
|
728
|
+
for (const field of expandFields) {
|
|
729
|
+
params.append("expand[]", field);
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
const res = await fetch(`${HF_API_BASE}/models?${params}`, { signal });
|
|
733
|
+
if (!res.ok)
|
|
734
|
+
throw new Error(`HuggingFace API returned ${res.status}`);
|
|
735
|
+
return res.json();
|
|
736
|
+
}
|
|
737
|
+
|
|
738
|
+
// src/provider-hf-transformers/common/HFT_ModelSearch.ts
|
|
739
|
+
var HFT_ModelSearch = async (input, _model, _onProgress, signal) => {
|
|
740
|
+
const entries = await searchHfModels(input.query, { filter: "onnx" }, ["siblings"], signal);
|
|
741
|
+
const results = entries.map((entry) => {
|
|
742
|
+
const item = mapHfModelResult(entry, HF_TRANSFORMERS_ONNX);
|
|
743
|
+
if (entry.siblings && entry.siblings.length > 0) {
|
|
744
|
+
const filePaths = entry.siblings.map((s) => s.rfilename);
|
|
745
|
+
const quantizations = parseOnnxQuantizations({ filePaths });
|
|
746
|
+
if (quantizations.length > 0) {
|
|
747
|
+
const record = item.record;
|
|
748
|
+
const providerConfig = record.provider_config ?? {};
|
|
749
|
+
providerConfig.quantizations = quantizations;
|
|
750
|
+
record.provider_config = providerConfig;
|
|
751
|
+
}
|
|
752
|
+
}
|
|
753
|
+
const raw = item.raw;
|
|
754
|
+
delete raw.siblings;
|
|
755
|
+
return item;
|
|
756
|
+
});
|
|
757
|
+
return { results };
|
|
758
|
+
};
|
|
759
|
+
|
|
760
|
+
// src/provider-hf-transformers/common/HFT_ImageHelpers.ts
|
|
761
|
+
function imageToBase64(image) {
|
|
762
|
+
return image.toBase64?.() || "";
|
|
763
|
+
}
|
|
764
|
+
|
|
765
|
+
// src/provider-hf-transformers/common/HFT_BackgroundRemoval.ts
|
|
766
|
+
init_HFT_Pipeline();
|
|
767
|
+
var HFT_BackgroundRemoval = async (input, model, onProgress, signal) => {
|
|
768
|
+
const remover = await getPipeline(model, onProgress, {}, signal);
|
|
769
|
+
const result = await remover(input.image);
|
|
770
|
+
const resultImage = Array.isArray(result) ? result[0] : result;
|
|
771
|
+
return {
|
|
772
|
+
image: imageToBase64(resultImage)
|
|
773
|
+
};
|
|
774
|
+
};
|
|
775
|
+
|
|
776
|
+
// src/provider-hf-transformers/common/HFT_CountTokens.ts
|
|
777
|
+
init_HFT_Pipeline();
|
|
778
|
+
var HFT_CountTokens = async (input, model, onProgress, _signal) => {
|
|
779
|
+
const isArrayInput = Array.isArray(input.text);
|
|
780
|
+
const { AutoTokenizer } = await loadTransformersSDK();
|
|
781
|
+
const tokenizer = await AutoTokenizer.from_pretrained(model.provider_config.model_path, {
|
|
782
|
+
progress_callback: (progress) => onProgress(progress?.progress ?? 0)
|
|
783
|
+
});
|
|
784
|
+
if (isArrayInput) {
|
|
785
|
+
const texts = input.text;
|
|
786
|
+
const counts = texts.map((t) => tokenizer.encode(t).length);
|
|
787
|
+
return { count: counts };
|
|
788
|
+
}
|
|
789
|
+
const tokenIds = tokenizer.encode(input.text);
|
|
790
|
+
return { count: tokenIds.length };
|
|
791
|
+
};
|
|
792
|
+
var HFT_CountTokens_Reactive = async (input, _output, model) => {
|
|
793
|
+
return HFT_CountTokens(input, model, () => {}, new AbortController().signal);
|
|
794
|
+
};
|
|
795
|
+
|
|
796
|
+
// src/provider-hf-transformers/common/HFT_Download.ts
|
|
797
|
+
init_HFT_Pipeline();
|
|
798
|
+
import { getLogger as getLogger2 } from "@workglow/util/worker";
|
|
799
|
+
var HFT_Download = async (input, model, onProgress, signal) => {
|
|
800
|
+
const logger = getLogger2();
|
|
801
|
+
const timerLabel = `hft:Download:${model?.provider_config.model_path}`;
|
|
802
|
+
logger.time(timerLabel, { model: model?.provider_config.model_path });
|
|
803
|
+
await getPipeline(model, onProgress, {}, signal, 100);
|
|
804
|
+
logger.timeEnd(timerLabel, { model: model?.provider_config.model_path });
|
|
805
|
+
return {
|
|
806
|
+
model: input.model
|
|
807
|
+
};
|
|
808
|
+
};
|
|
809
|
+
|
|
810
|
+
// src/provider-hf-transformers/common/HFT_ImageClassification.ts
|
|
811
|
+
init_HFT_Pipeline();
|
|
812
|
+
var HFT_ImageClassification = async (input, model, onProgress, signal) => {
|
|
813
|
+
if (model?.provider_config?.pipeline === "zero-shot-image-classification") {
|
|
814
|
+
if (!input.categories || !Array.isArray(input.categories) || input.categories.length === 0) {
|
|
815
|
+
console.warn("Zero-shot image classification requires categories", input);
|
|
816
|
+
throw new Error("Zero-shot image classification requires categories");
|
|
817
|
+
}
|
|
818
|
+
const zeroShotClassifier = await getPipeline(model, onProgress, {}, signal);
|
|
819
|
+
const result2 = await zeroShotClassifier(input.image, input.categories, {});
|
|
820
|
+
const results2 = Array.isArray(result2) ? result2 : [result2];
|
|
821
|
+
return {
|
|
822
|
+
categories: results2.map((r) => ({
|
|
823
|
+
label: r.label,
|
|
824
|
+
score: r.score
|
|
825
|
+
}))
|
|
826
|
+
};
|
|
827
|
+
}
|
|
828
|
+
const classifier = await getPipeline(model, onProgress, {}, signal);
|
|
829
|
+
const result = await classifier(input.image, {
|
|
830
|
+
top_k: input.maxCategories
|
|
831
|
+
});
|
|
832
|
+
const results = Array.isArray(result) ? result : [result];
|
|
833
|
+
return {
|
|
834
|
+
categories: results.map((r) => ({
|
|
835
|
+
label: r.label,
|
|
836
|
+
score: r.score
|
|
837
|
+
}))
|
|
838
|
+
};
|
|
839
|
+
};
|
|
840
|
+
|
|
841
|
+
// src/provider-hf-transformers/common/HFT_ImageEmbedding.ts
|
|
842
|
+
init_HFT_Pipeline();
|
|
843
|
+
import { getLogger as getLogger3 } from "@workglow/util/worker";
|
|
844
|
+
var HFT_ImageEmbedding = async (input, model, onProgress, signal) => {
|
|
845
|
+
const logger = getLogger3();
|
|
846
|
+
const timerLabel = `hft:ImageEmbedding:${model?.provider_config.model_path}`;
|
|
847
|
+
logger.time(timerLabel, { model: model?.provider_config.model_path });
|
|
848
|
+
const embedder = await getPipeline(model, onProgress, {}, signal);
|
|
849
|
+
logger.debug("HFT ImageEmbedding: pipeline ready, generating embedding", {
|
|
850
|
+
model: model?.provider_config.model_path
|
|
851
|
+
});
|
|
852
|
+
const result = await embedder(input.image);
|
|
853
|
+
logger.timeEnd(timerLabel, { dimensions: result?.data?.length });
|
|
854
|
+
return {
|
|
855
|
+
vector: result.data
|
|
856
|
+
};
|
|
857
|
+
};
|
|
858
|
+
|
|
859
|
+
// src/provider-hf-transformers/common/HFT_ImageSegmentation.ts
|
|
860
|
+
init_HFT_Pipeline();
|
|
861
|
+
var HFT_ImageSegmentation = async (input, model, onProgress, signal) => {
|
|
862
|
+
const segmenter = await getPipeline(model, onProgress, {}, signal);
|
|
863
|
+
const result = await segmenter(input.image, {
|
|
864
|
+
threshold: input.threshold,
|
|
865
|
+
mask_threshold: input.maskThreshold
|
|
866
|
+
});
|
|
867
|
+
const masks = Array.isArray(result) ? result : [result];
|
|
868
|
+
const processedMasks = await Promise.all(masks.map(async (mask) => ({
|
|
869
|
+
label: mask.label || "",
|
|
870
|
+
score: mask.score || 0,
|
|
871
|
+
mask: {}
|
|
872
|
+
})));
|
|
873
|
+
return {
|
|
874
|
+
masks: processedMasks
|
|
875
|
+
};
|
|
876
|
+
};
|
|
877
|
+
|
|
878
|
+
// src/provider-hf-transformers/common/HFT_ImageToText.ts
|
|
879
|
+
init_HFT_Pipeline();
|
|
880
|
+
var HFT_ImageToText = async (input, model, onProgress, signal) => {
|
|
881
|
+
const captioner = await getPipeline(model, onProgress, {}, signal);
|
|
882
|
+
const result = await captioner(input.image, {
|
|
883
|
+
max_new_tokens: input.maxTokens
|
|
884
|
+
});
|
|
885
|
+
const text = Array.isArray(result) ? result[0]?.generated_text : result?.generated_text;
|
|
886
|
+
return {
|
|
887
|
+
text: text || ""
|
|
888
|
+
};
|
|
889
|
+
};
|
|
890
|
+
|
|
891
|
+
// src/provider-hf-transformers/common/HFT_ModelInfo.ts
|
|
892
|
+
import { getLogger as getLogger4 } from "@workglow/util/worker";
|
|
893
|
+
init_HFT_Pipeline();
|
|
894
|
+
var HFT_ModelInfo = async (input, model) => {
|
|
895
|
+
const logger = getLogger4();
|
|
896
|
+
const { ModelRegistry } = await loadTransformersSDK();
|
|
897
|
+
const timerLabel = `hft:ModelInfo:${model?.provider_config.model_path}`;
|
|
898
|
+
logger.time(timerLabel, { model: model?.provider_config.model_path });
|
|
899
|
+
const detail = input.detail;
|
|
900
|
+
const is_loaded = hasCachedPipeline(getPipelineCacheKey(model));
|
|
901
|
+
const { pipeline: pipelineType, model_path, dtype } = model.provider_config;
|
|
902
|
+
const cacheStatus = await ModelRegistry.is_pipeline_cached_files(pipelineType, model_path, {
|
|
903
|
+
...dtype ? { dtype } : {}
|
|
904
|
+
});
|
|
905
|
+
logger.debug("is_pipeline_cached", {
|
|
906
|
+
input: [
|
|
907
|
+
pipelineType,
|
|
908
|
+
model_path,
|
|
909
|
+
{
|
|
910
|
+
...dtype ? { dtype } : {}
|
|
911
|
+
}
|
|
912
|
+
],
|
|
913
|
+
result: cacheStatus
|
|
914
|
+
});
|
|
915
|
+
const is_cached = is_loaded || cacheStatus.allCached;
|
|
916
|
+
let file_sizes = null;
|
|
917
|
+
if (detail === "files" && cacheStatus.files.length > 0) {
|
|
918
|
+
const sizes = {};
|
|
919
|
+
for (const { file } of cacheStatus.files) {
|
|
920
|
+
sizes[file] = 0;
|
|
921
|
+
}
|
|
922
|
+
file_sizes = sizes;
|
|
923
|
+
} else if (detail === "files_with_metadata" && cacheStatus.files.length > 0) {
|
|
924
|
+
const sizes = {};
|
|
925
|
+
await Promise.all(cacheStatus.files.map(async ({ file }) => {
|
|
926
|
+
const metadata = await ModelRegistry.get_file_metadata(model_path, file);
|
|
927
|
+
if (metadata.exists && metadata.size !== undefined) {
|
|
928
|
+
sizes[file] = metadata.size;
|
|
929
|
+
}
|
|
930
|
+
}));
|
|
931
|
+
if (Object.keys(sizes).length > 0) {
|
|
932
|
+
file_sizes = sizes;
|
|
933
|
+
}
|
|
934
|
+
}
|
|
935
|
+
let quantizations;
|
|
936
|
+
if (cacheStatus.files.length > 0) {
|
|
937
|
+
const filePaths = cacheStatus.files.map((f) => f.file);
|
|
938
|
+
const quantizations_parsed = parseOnnxQuantizations({ filePaths });
|
|
939
|
+
if (quantizations_parsed.length > 0) {
|
|
940
|
+
quantizations = quantizations_parsed;
|
|
941
|
+
}
|
|
942
|
+
}
|
|
943
|
+
logger.timeEnd(timerLabel, { model: model?.provider_config.model_path });
|
|
944
|
+
return {
|
|
945
|
+
model: input.model,
|
|
946
|
+
is_local: true,
|
|
947
|
+
is_remote: false,
|
|
948
|
+
supports_browser: true,
|
|
949
|
+
supports_node: true,
|
|
950
|
+
is_cached,
|
|
951
|
+
is_loaded,
|
|
952
|
+
file_sizes,
|
|
953
|
+
...quantizations ? { quantizations } : {}
|
|
954
|
+
};
|
|
955
|
+
};
|
|
956
|
+
|
|
957
|
+
// src/provider-hf-transformers/common/HFT_ObjectDetection.ts
|
|
958
|
+
init_HFT_Pipeline();
|
|
959
|
+
var HFT_ObjectDetection = async (input, model, onProgress, signal) => {
|
|
960
|
+
if (model?.provider_config?.pipeline === "zero-shot-object-detection") {
|
|
961
|
+
if (!input.labels || !Array.isArray(input.labels) || input.labels.length === 0) {
|
|
962
|
+
throw new Error("Zero-shot object detection requires labels");
|
|
963
|
+
}
|
|
964
|
+
const zeroShotDetector = await getPipeline(model, onProgress, {}, signal);
|
|
965
|
+
const result2 = await zeroShotDetector(input.image, Array.from(input.labels), {
|
|
966
|
+
threshold: input.threshold
|
|
967
|
+
});
|
|
968
|
+
const detections2 = Array.isArray(result2) ? result2 : [result2];
|
|
969
|
+
return {
|
|
970
|
+
detections: detections2.map((d) => ({
|
|
971
|
+
label: d.label,
|
|
972
|
+
score: d.score,
|
|
973
|
+
box: d.box
|
|
974
|
+
}))
|
|
975
|
+
};
|
|
976
|
+
}
|
|
977
|
+
const detector = await getPipeline(model, onProgress, {}, signal);
|
|
978
|
+
const result = await detector(input.image, {
|
|
979
|
+
threshold: input.threshold
|
|
980
|
+
});
|
|
981
|
+
const detections = Array.isArray(result) ? result : [result];
|
|
982
|
+
return {
|
|
983
|
+
detections: detections.map((d) => ({
|
|
984
|
+
label: d.label,
|
|
985
|
+
score: d.score,
|
|
986
|
+
box: d.box
|
|
987
|
+
}))
|
|
988
|
+
};
|
|
989
|
+
};
|
|
990
|
+
|
|
991
|
+
// src/provider-hf-transformers/common/HFT_StructuredGeneration.ts
|
|
992
|
+
init_HFT_Pipeline();
|
|
993
|
+
import { parsePartialJson } from "@workglow/util/worker";
|
|
994
|
+
|
|
995
|
+
// src/provider-hf-transformers/common/HFT_Streaming.ts
|
|
996
|
+
function createStreamEventQueue() {
|
|
997
|
+
const buffer = [];
|
|
998
|
+
let resolve = null;
|
|
999
|
+
let finished = false;
|
|
1000
|
+
let err = null;
|
|
1001
|
+
const push = (event) => {
|
|
1002
|
+
if (resolve) {
|
|
1003
|
+
const r = resolve;
|
|
1004
|
+
resolve = null;
|
|
1005
|
+
r({ value: event, done: false });
|
|
1006
|
+
} else {
|
|
1007
|
+
buffer.push(event);
|
|
1008
|
+
}
|
|
1009
|
+
};
|
|
1010
|
+
const done = () => {
|
|
1011
|
+
finished = true;
|
|
1012
|
+
if (resolve) {
|
|
1013
|
+
const r = resolve;
|
|
1014
|
+
resolve = null;
|
|
1015
|
+
r({ value: undefined, done: true });
|
|
1016
|
+
}
|
|
1017
|
+
};
|
|
1018
|
+
const error = (e) => {
|
|
1019
|
+
err = e;
|
|
1020
|
+
if (resolve) {
|
|
1021
|
+
const r = resolve;
|
|
1022
|
+
resolve = null;
|
|
1023
|
+
r({ value: undefined, done: true });
|
|
1024
|
+
}
|
|
1025
|
+
};
|
|
1026
|
+
const iterable = {
|
|
1027
|
+
[Symbol.asyncIterator]() {
|
|
1028
|
+
return {
|
|
1029
|
+
next() {
|
|
1030
|
+
if (err)
|
|
1031
|
+
return Promise.reject(err);
|
|
1032
|
+
if (buffer.length > 0) {
|
|
1033
|
+
return Promise.resolve({ value: buffer.shift(), done: false });
|
|
1034
|
+
}
|
|
1035
|
+
if (finished) {
|
|
1036
|
+
return Promise.resolve({ value: undefined, done: true });
|
|
1037
|
+
}
|
|
1038
|
+
return new Promise((r) => {
|
|
1039
|
+
resolve = r;
|
|
1040
|
+
});
|
|
1041
|
+
}
|
|
1042
|
+
};
|
|
1043
|
+
}
|
|
1044
|
+
};
|
|
1045
|
+
return { push, done, error, iterable };
|
|
1046
|
+
}
|
|
1047
|
+
function createStreamingTextStreamer(tokenizer, queue, textStreamer) {
|
|
1048
|
+
return new textStreamer(tokenizer, {
|
|
1049
|
+
skip_prompt: true,
|
|
1050
|
+
decode_kwargs: { skip_special_tokens: true },
|
|
1051
|
+
callback_function: (text) => {
|
|
1052
|
+
queue.push({ type: "text-delta", port: "text", textDelta: text });
|
|
1053
|
+
}
|
|
1054
|
+
});
|
|
1055
|
+
}
|
|
1056
|
+
function createTextStreamer(tokenizer, updateProgress, textStreamer) {
|
|
1057
|
+
let count = 0;
|
|
1058
|
+
return new textStreamer(tokenizer, {
|
|
1059
|
+
skip_prompt: true,
|
|
1060
|
+
decode_kwargs: { skip_special_tokens: true },
|
|
1061
|
+
callback_function: (text) => {
|
|
1062
|
+
count++;
|
|
1063
|
+
const result = 100 * (1 - Math.exp(-0.05 * count));
|
|
1064
|
+
const progress = Math.round(Math.min(result, 100));
|
|
1065
|
+
updateProgress(progress, "Generating", { text, progress });
|
|
1066
|
+
}
|
|
1067
|
+
});
|
|
1068
|
+
}
|
|
1069
|
+
|
|
1070
|
+
// src/provider-hf-transformers/common/HFT_TextOutput.ts
|
|
1071
|
+
function extractGeneratedText(generatedText) {
|
|
1072
|
+
if (generatedText == null)
|
|
1073
|
+
return "";
|
|
1074
|
+
if (typeof generatedText === "string")
|
|
1075
|
+
return generatedText;
|
|
1076
|
+
const lastMessage = generatedText[generatedText.length - 1];
|
|
1077
|
+
if (!lastMessage)
|
|
1078
|
+
return "";
|
|
1079
|
+
const content = lastMessage.content;
|
|
1080
|
+
if (typeof content === "string")
|
|
1081
|
+
return content;
|
|
1082
|
+
for (const part of content) {
|
|
1083
|
+
if (part.type === "text" && "text" in part) {
|
|
1084
|
+
return part.text;
|
|
1085
|
+
}
|
|
1086
|
+
}
|
|
1087
|
+
return "";
|
|
1088
|
+
}
|
|
1089
|
+
|
|
1090
|
+
// src/provider-hf-transformers/common/HFT_StructuredGeneration.ts
|
|
1091
|
+
function buildStructuredGenerationPrompt(input) {
|
|
1092
|
+
const schemaStr = JSON.stringify(input.outputSchema, null, 2);
|
|
1093
|
+
return `${input.prompt}
|
|
1094
|
+
|
|
1095
|
+
` + `You MUST respond with ONLY a valid JSON object conforming to this JSON schema:
|
|
1096
|
+
${schemaStr}
|
|
1097
|
+
|
|
1098
|
+
` + `Output ONLY the JSON object, no other text.`;
|
|
1099
|
+
}
|
|
1100
|
+
function extractJsonFromText(text) {
|
|
1101
|
+
try {
|
|
1102
|
+
return JSON.parse(text);
|
|
1103
|
+
} catch {
|
|
1104
|
+
const match = text.match(/\{[\s\S]*\}/);
|
|
1105
|
+
if (match) {
|
|
1106
|
+
try {
|
|
1107
|
+
return JSON.parse(match[0]);
|
|
1108
|
+
} catch {
|
|
1109
|
+
return parsePartialJson(match[0]) ?? {};
|
|
1110
|
+
}
|
|
1111
|
+
}
|
|
1112
|
+
return {};
|
|
1113
|
+
}
|
|
1114
|
+
}
|
|
1115
|
+
var HFT_StructuredGeneration = async (input, model, onProgress, signal) => {
|
|
1116
|
+
const generateText = await getPipeline(model, onProgress, {}, signal);
|
|
1117
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1118
|
+
const prompt = buildStructuredGenerationPrompt(input);
|
|
1119
|
+
const messages = [{ role: "user", content: prompt }];
|
|
1120
|
+
const formattedPrompt = generateText.tokenizer.apply_chat_template(messages, {
|
|
1121
|
+
tokenize: false,
|
|
1122
|
+
add_generation_prompt: true
|
|
1123
|
+
});
|
|
1124
|
+
const streamer = createTextStreamer(generateText.tokenizer, onProgress, TextStreamer);
|
|
1125
|
+
let results = await generateText(formattedPrompt, {
|
|
1126
|
+
max_new_tokens: input.maxTokens ?? 1024,
|
|
1127
|
+
temperature: input.temperature ?? undefined,
|
|
1128
|
+
return_full_text: false,
|
|
1129
|
+
streamer
|
|
1130
|
+
});
|
|
1131
|
+
if (!Array.isArray(results)) {
|
|
1132
|
+
results = [results];
|
|
1133
|
+
}
|
|
1134
|
+
const responseText = extractGeneratedText(results[0]?.generated_text).trim();
|
|
1135
|
+
const object = extractJsonFromText(responseText);
|
|
1136
|
+
return { object };
|
|
1137
|
+
};
|
|
1138
|
+
var HFT_StructuredGeneration_Stream = async function* (input, model, signal) {
|
|
1139
|
+
const noopProgress = () => {};
|
|
1140
|
+
const generateText = await getPipeline(model, noopProgress, {}, signal);
|
|
1141
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1142
|
+
const prompt = buildStructuredGenerationPrompt(input);
|
|
1143
|
+
const messages = [{ role: "user", content: prompt }];
|
|
1144
|
+
const formattedPrompt = generateText.tokenizer.apply_chat_template(messages, {
|
|
1145
|
+
tokenize: false,
|
|
1146
|
+
add_generation_prompt: true
|
|
1147
|
+
});
|
|
1148
|
+
const queue = createStreamEventQueue();
|
|
1149
|
+
const streamer = createStreamingTextStreamer(generateText.tokenizer, queue, TextStreamer);
|
|
1150
|
+
let fullText = "";
|
|
1151
|
+
const originalPush = queue.push;
|
|
1152
|
+
queue.push = (event) => {
|
|
1153
|
+
if (event.type === "text-delta" && "textDelta" in event) {
|
|
1154
|
+
fullText += event.textDelta;
|
|
1155
|
+
const match = fullText.match(/\{[\s\S]*/);
|
|
1156
|
+
if (match) {
|
|
1157
|
+
const partial = parsePartialJson(match[0]);
|
|
1158
|
+
if (partial !== undefined) {
|
|
1159
|
+
originalPush({
|
|
1160
|
+
type: "object-delta",
|
|
1161
|
+
port: "object",
|
|
1162
|
+
objectDelta: partial
|
|
1163
|
+
});
|
|
1164
|
+
return;
|
|
1165
|
+
}
|
|
1166
|
+
}
|
|
1167
|
+
}
|
|
1168
|
+
originalPush(event);
|
|
1169
|
+
};
|
|
1170
|
+
const pipelinePromise = generateText(formattedPrompt, {
|
|
1171
|
+
max_new_tokens: input.maxTokens ?? 1024,
|
|
1172
|
+
temperature: input.temperature ?? undefined,
|
|
1173
|
+
return_full_text: false,
|
|
1174
|
+
streamer
|
|
1175
|
+
}).then(() => queue.done(), (err) => queue.error(err));
|
|
1176
|
+
yield* queue.iterable;
|
|
1177
|
+
await pipelinePromise;
|
|
1178
|
+
const object = extractJsonFromText(fullText);
|
|
1179
|
+
yield { type: "finish", data: { object } };
|
|
1180
|
+
};
|
|
1181
|
+
|
|
1182
|
+
// src/provider-hf-transformers/common/HFT_TextClassification.ts
|
|
1183
|
+
init_HFT_Pipeline();
|
|
1184
|
+
var HFT_TextClassification = async (input, model, onProgress, signal) => {
|
|
1185
|
+
const isArrayInput = Array.isArray(input.text);
|
|
1186
|
+
if (model?.provider_config?.pipeline === "zero-shot-classification") {
|
|
1187
|
+
if (!input.candidateLabels || !Array.isArray(input.candidateLabels) || input.candidateLabels.length === 0) {
|
|
1188
|
+
throw new Error("Zero-shot text classification requires candidate labels");
|
|
1189
|
+
}
|
|
1190
|
+
const zeroShotClassifier = await getPipeline(model, onProgress, {}, signal);
|
|
1191
|
+
const result2 = await zeroShotClassifier(input.text, input.candidateLabels, {});
|
|
1192
|
+
if (isArrayInput) {
|
|
1193
|
+
const results = Array.isArray(result2) && Array.isArray(result2[0]?.labels) ? result2 : [result2];
|
|
1194
|
+
return {
|
|
1195
|
+
categories: results.map((r) => r.labels.map((label, idx) => ({
|
|
1196
|
+
label,
|
|
1197
|
+
score: r.scores[idx]
|
|
1198
|
+
})))
|
|
1199
|
+
};
|
|
1200
|
+
}
|
|
1201
|
+
return {
|
|
1202
|
+
categories: result2.labels.map((label, idx) => ({
|
|
1203
|
+
label,
|
|
1204
|
+
score: result2.scores[idx]
|
|
1205
|
+
}))
|
|
1206
|
+
};
|
|
1207
|
+
}
|
|
1208
|
+
const TextClassification = await getPipeline(model, onProgress, {}, signal);
|
|
1209
|
+
const result = await TextClassification(input.text, {
|
|
1210
|
+
top_k: input.maxCategories || undefined
|
|
1211
|
+
});
|
|
1212
|
+
if (isArrayInput) {
|
|
1213
|
+
return {
|
|
1214
|
+
categories: result.map((perInput) => {
|
|
1215
|
+
const items = Array.isArray(perInput) ? perInput : [perInput];
|
|
1216
|
+
return items.map((category) => ({
|
|
1217
|
+
label: category.label,
|
|
1218
|
+
score: category.score
|
|
1219
|
+
}));
|
|
1220
|
+
})
|
|
1221
|
+
};
|
|
1222
|
+
}
|
|
1223
|
+
if (Array.isArray(result[0])) {
|
|
1224
|
+
return {
|
|
1225
|
+
categories: result[0].map((category) => ({
|
|
1226
|
+
label: category.label,
|
|
1227
|
+
score: category.score
|
|
1228
|
+
}))
|
|
1229
|
+
};
|
|
1230
|
+
}
|
|
1231
|
+
return {
|
|
1232
|
+
categories: result.map((category) => ({
|
|
1233
|
+
label: category.label,
|
|
1234
|
+
score: category.score
|
|
1235
|
+
}))
|
|
1236
|
+
};
|
|
1237
|
+
};
|
|
1238
|
+
|
|
1239
|
+
// src/provider-hf-transformers/common/HFT_TextEmbedding.ts
|
|
1240
|
+
init_HFT_Pipeline();
|
|
1241
|
+
import { getLogger as getLogger5 } from "@workglow/util/worker";
|
|
1242
|
+
var HFT_TextEmbedding = async (input, model, onProgress, signal) => {
|
|
1243
|
+
const logger = getLogger5();
|
|
1244
|
+
const uuid = crypto.randomUUID();
|
|
1245
|
+
const timerLabel = `hft:TextEmbedding:${model?.provider_config.model_path}:${uuid}`;
|
|
1246
|
+
logger.time(timerLabel, { model: model?.provider_config.model_path });
|
|
1247
|
+
const generateEmbedding = await getPipeline(model, onProgress, {}, signal);
|
|
1248
|
+
logger.debug("HFT TextEmbedding: pipeline ready, generating embedding", {
|
|
1249
|
+
model: model?.provider_config.model_path,
|
|
1250
|
+
inputLength: Array.isArray(input.text) ? input.text.length : input.text?.length
|
|
1251
|
+
});
|
|
1252
|
+
const hfVector = await generateEmbedding(input.text, {
|
|
1253
|
+
pooling: model?.provider_config.pooling || "mean",
|
|
1254
|
+
normalize: model?.provider_config.normalize
|
|
1255
|
+
});
|
|
1256
|
+
const isArrayInput = Array.isArray(input.text);
|
|
1257
|
+
const embeddingDim = model?.provider_config.native_dimensions;
|
|
1258
|
+
if (isArrayInput && hfVector.dims.length > 1) {
|
|
1259
|
+
const [numTexts, vectorDim] = hfVector.dims;
|
|
1260
|
+
if (numTexts !== input.text.length) {
|
|
1261
|
+
throw new Error(`HuggingFace Embedding tensor batch size does not match input array length: ${numTexts} != ${input.text.length}`);
|
|
1262
|
+
}
|
|
1263
|
+
if (vectorDim !== embeddingDim) {
|
|
1264
|
+
throw new Error(`HuggingFace Embedding vector dimension does not match model dimensions: ${vectorDim} != ${embeddingDim}`);
|
|
1265
|
+
}
|
|
1266
|
+
const vectors = Array.from({ length: numTexts }, (_, i) => hfVector[i].data.slice());
|
|
1267
|
+
logger.timeEnd(timerLabel, { batchSize: numTexts, dimensions: vectorDim });
|
|
1268
|
+
return { vector: vectors };
|
|
1269
|
+
}
|
|
1270
|
+
if (hfVector.size !== embeddingDim) {
|
|
1271
|
+
logger.timeEnd(timerLabel, { status: "error", reason: "dimension mismatch" });
|
|
1272
|
+
console.warn(`HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${embeddingDim}`, input, hfVector);
|
|
1273
|
+
throw new Error(`HuggingFace Embedding vector length does not match model dimensions v${hfVector.size} != m${embeddingDim}`);
|
|
1274
|
+
}
|
|
1275
|
+
logger.timeEnd(timerLabel, { dimensions: hfVector.size });
|
|
1276
|
+
return { vector: hfVector.data };
|
|
1277
|
+
};
|
|
1278
|
+
|
|
1279
|
+
// src/provider-hf-transformers/common/HFT_TextFillMask.ts
|
|
1280
|
+
init_HFT_Pipeline();
|
|
1281
|
+
var HFT_TextFillMask = async (input, model, onProgress, signal) => {
|
|
1282
|
+
const isArrayInput = Array.isArray(input.text);
|
|
1283
|
+
const unmasker = await getPipeline(model, onProgress, {}, signal);
|
|
1284
|
+
const results = await unmasker(input.text);
|
|
1285
|
+
if (isArrayInput) {
|
|
1286
|
+
return {
|
|
1287
|
+
predictions: results.map((perInput) => {
|
|
1288
|
+
const items = Array.isArray(perInput) ? perInput : [perInput];
|
|
1289
|
+
return items.map((prediction) => ({
|
|
1290
|
+
entity: prediction.token_str,
|
|
1291
|
+
score: prediction.score,
|
|
1292
|
+
sequence: prediction.sequence
|
|
1293
|
+
}));
|
|
1294
|
+
})
|
|
1295
|
+
};
|
|
1296
|
+
}
|
|
1297
|
+
let predictions = [];
|
|
1298
|
+
if (!Array.isArray(results)) {
|
|
1299
|
+
predictions = [results];
|
|
1300
|
+
} else {
|
|
1301
|
+
predictions = results;
|
|
1302
|
+
}
|
|
1303
|
+
return {
|
|
422
1304
|
predictions: predictions.map((prediction) => ({
|
|
423
1305
|
entity: prediction.token_str,
|
|
424
1306
|
score: prediction.score,
|
|
@@ -426,17 +1308,22 @@ var HFT_TextFillMask = async (input, model, onProgress, signal) => {
|
|
|
426
1308
|
}))
|
|
427
1309
|
};
|
|
428
1310
|
};
|
|
1311
|
+
|
|
1312
|
+
// src/provider-hf-transformers/common/HFT_TextGeneration.ts
|
|
1313
|
+
init_HFT_Pipeline();
|
|
1314
|
+
import { getLogger as getLogger6 } from "@workglow/util/worker";
|
|
429
1315
|
var HFT_TextGeneration = async (input, model, onProgress, signal) => {
|
|
430
|
-
const logger =
|
|
1316
|
+
const logger = getLogger6();
|
|
431
1317
|
const timerLabel = `hft:TextGeneration:${model?.provider_config.model_path}`;
|
|
432
1318
|
logger.time(timerLabel, { model: model?.provider_config.model_path });
|
|
433
1319
|
const isArrayInput = Array.isArray(input.prompt);
|
|
434
1320
|
const generateText = await getPipeline(model, onProgress, {}, signal);
|
|
1321
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
435
1322
|
logger.debug("HFT TextGeneration: pipeline ready, generating text", {
|
|
436
1323
|
model: model?.provider_config.model_path,
|
|
437
1324
|
promptLength: isArrayInput ? input.prompt.length : input.prompt?.length
|
|
438
1325
|
});
|
|
439
|
-
const streamer = isArrayInput ? undefined : createTextStreamer(generateText.tokenizer, onProgress);
|
|
1326
|
+
const streamer = isArrayInput ? undefined : createTextStreamer(generateText.tokenizer, onProgress, TextStreamer);
|
|
440
1327
|
let results = await generateText(input.prompt, {
|
|
441
1328
|
...streamer ? { streamer } : {}
|
|
442
1329
|
});
|
|
@@ -451,440 +1338,142 @@ var HFT_TextGeneration = async (input, model, onProgress, signal) => {
|
|
|
451
1338
|
}
|
|
452
1339
|
if (!Array.isArray(results)) {
|
|
453
1340
|
results = [results];
|
|
454
|
-
}
|
|
455
|
-
const text = extractGeneratedText(results[0]?.generated_text);
|
|
456
|
-
logger.timeEnd(timerLabel, { outputLength: text?.length });
|
|
457
|
-
return {
|
|
458
|
-
text
|
|
459
|
-
};
|
|
460
|
-
};
|
|
461
|
-
var HFT_TextTranslation = async (input, model, onProgress, signal) => {
|
|
462
|
-
const isArrayInput = Array.isArray(input.text);
|
|
463
|
-
const translate = await getPipeline(model, onProgress, {}, signal);
|
|
464
|
-
const streamer = isArrayInput ? undefined : createTextStreamer(translate.tokenizer, onProgress);
|
|
465
|
-
const result = await translate(input.text, {
|
|
466
|
-
src_lang: input.source_lang,
|
|
467
|
-
tgt_lang: input.target_lang,
|
|
468
|
-
...streamer ? { streamer } : {}
|
|
469
|
-
});
|
|
470
|
-
if (isArrayInput) {
|
|
471
|
-
const batchResults = Array.isArray(result) ? result : [result];
|
|
472
|
-
return {
|
|
473
|
-
text: batchResults.map((r) => r?.translation_text || ""),
|
|
474
|
-
target_lang: input.target_lang
|
|
475
|
-
};
|
|
476
|
-
}
|
|
477
|
-
const translatedText = Array.isArray(result) ? result[0]?.translation_text || "" : result?.translation_text || "";
|
|
478
|
-
return {
|
|
479
|
-
text: translatedText,
|
|
480
|
-
target_lang: input.target_lang
|
|
481
|
-
};
|
|
482
|
-
};
|
|
483
|
-
var HFT_TextRewriter = async (input, model, onProgress, signal) => {
|
|
484
|
-
const isArrayInput = Array.isArray(input.text);
|
|
485
|
-
const generateText = await getPipeline(model, onProgress, {}, signal);
|
|
486
|
-
const streamer = isArrayInput ? undefined : createTextStreamer(generateText.tokenizer, onProgress);
|
|
487
|
-
if (isArrayInput) {
|
|
488
|
-
const texts = input.text;
|
|
489
|
-
const promptedTexts = texts.map((t) => (input.prompt ? input.prompt + `
|
|
490
|
-
` : "") + t);
|
|
491
|
-
let results2 = await generateText(promptedTexts, {});
|
|
492
|
-
const batchResults = Array.isArray(results2) ? results2 : [results2];
|
|
493
|
-
const outputTexts = batchResults.map((r, i) => {
|
|
494
|
-
const seqs = Array.isArray(r) ? r : [r];
|
|
495
|
-
const text2 = extractGeneratedText(seqs[0]?.generated_text);
|
|
496
|
-
if (text2 === promptedTexts[i]) {
|
|
497
|
-
throw new Error("Rewriter failed to generate new text");
|
|
498
|
-
}
|
|
499
|
-
return text2;
|
|
500
|
-
});
|
|
501
|
-
return { text: outputTexts };
|
|
502
|
-
}
|
|
503
|
-
const promptedText = (input.prompt ? input.prompt + `
|
|
504
|
-
` : "") + input.text;
|
|
505
|
-
let results = await generateText(promptedText, {
|
|
506
|
-
...streamer ? { streamer } : {}
|
|
507
|
-
});
|
|
508
|
-
if (!Array.isArray(results)) {
|
|
509
|
-
results = [results];
|
|
510
|
-
}
|
|
511
|
-
const text = extractGeneratedText(results[0]?.generated_text);
|
|
512
|
-
if (text === promptedText) {
|
|
513
|
-
throw new Error("Rewriter failed to generate new text");
|
|
514
|
-
}
|
|
515
|
-
return {
|
|
516
|
-
text
|
|
517
|
-
};
|
|
518
|
-
};
|
|
519
|
-
var HFT_TextSummary = async (input, model, onProgress, signal) => {
|
|
520
|
-
const isArrayInput = Array.isArray(input.text);
|
|
521
|
-
const generateSummary = await getPipeline(model, onProgress, {}, signal);
|
|
522
|
-
const streamer = isArrayInput ? undefined : createTextStreamer(generateSummary.tokenizer, onProgress);
|
|
523
|
-
const result = await generateSummary(input.text, {
|
|
524
|
-
...streamer ? { streamer } : {}
|
|
525
|
-
});
|
|
526
|
-
if (isArrayInput) {
|
|
527
|
-
const batchResults = Array.isArray(result) ? result : [result];
|
|
528
|
-
return {
|
|
529
|
-
text: batchResults.map((r) => r?.summary_text || "")
|
|
530
|
-
};
|
|
531
|
-
}
|
|
532
|
-
let summaryText = "";
|
|
533
|
-
if (Array.isArray(result)) {
|
|
534
|
-
summaryText = result[0]?.summary_text || "";
|
|
535
|
-
} else {
|
|
536
|
-
summaryText = result?.summary_text || "";
|
|
537
|
-
}
|
|
538
|
-
return {
|
|
539
|
-
text: summaryText
|
|
540
|
-
};
|
|
541
|
-
};
|
|
542
|
-
var HFT_TextQuestionAnswer = async (input, model, onProgress, signal) => {
|
|
543
|
-
const isArrayInput = Array.isArray(input.question);
|
|
544
|
-
const generateAnswer = await getPipeline(model, onProgress, {}, signal);
|
|
545
|
-
if (isArrayInput) {
|
|
546
|
-
const questions = input.question;
|
|
547
|
-
const contexts = input.context;
|
|
548
|
-
if (questions.length !== contexts.length) {
|
|
549
|
-
throw new Error(`question[] and context[] must have the same length: ${questions.length} != ${contexts.length}`);
|
|
550
|
-
}
|
|
551
|
-
const answers = [];
|
|
552
|
-
for (let i = 0;i < questions.length; i++) {
|
|
553
|
-
const result2 = await generateAnswer(questions[i], contexts[i], {});
|
|
554
|
-
let answerText2 = "";
|
|
555
|
-
if (Array.isArray(result2)) {
|
|
556
|
-
answerText2 = result2[0]?.answer || "";
|
|
557
|
-
} else {
|
|
558
|
-
answerText2 = result2?.answer || "";
|
|
559
|
-
}
|
|
560
|
-
answers.push(answerText2);
|
|
561
|
-
}
|
|
562
|
-
return { text: answers };
|
|
563
|
-
}
|
|
564
|
-
const streamer = createTextStreamer(generateAnswer.tokenizer, onProgress);
|
|
565
|
-
const result = await generateAnswer(input.question, input.context, {
|
|
566
|
-
streamer
|
|
567
|
-
});
|
|
568
|
-
let answerText = "";
|
|
569
|
-
if (Array.isArray(result)) {
|
|
570
|
-
answerText = result[0]?.answer || "";
|
|
571
|
-
} else {
|
|
572
|
-
answerText = result?.answer || "";
|
|
573
|
-
}
|
|
574
|
-
return {
|
|
575
|
-
text: answerText
|
|
576
|
-
};
|
|
577
|
-
};
|
|
578
|
-
var HFT_ImageSegmentation = async (input, model, onProgress, signal) => {
|
|
579
|
-
const segmenter = await getPipeline(model, onProgress, {}, signal);
|
|
580
|
-
const result = await segmenter(input.image, {
|
|
581
|
-
threshold: input.threshold,
|
|
582
|
-
mask_threshold: input.maskThreshold
|
|
583
|
-
});
|
|
584
|
-
const masks = Array.isArray(result) ? result : [result];
|
|
585
|
-
const processedMasks = await Promise.all(masks.map(async (mask) => ({
|
|
586
|
-
label: mask.label || "",
|
|
587
|
-
score: mask.score || 0,
|
|
588
|
-
mask: {}
|
|
589
|
-
})));
|
|
590
|
-
return {
|
|
591
|
-
masks: processedMasks
|
|
592
|
-
};
|
|
593
|
-
};
|
|
594
|
-
var HFT_ImageToText = async (input, model, onProgress, signal) => {
|
|
595
|
-
const captioner = await getPipeline(model, onProgress, {}, signal);
|
|
596
|
-
const result = await captioner(input.image, {
|
|
597
|
-
max_new_tokens: input.maxTokens
|
|
598
|
-
});
|
|
599
|
-
const text = Array.isArray(result) ? result[0]?.generated_text : result?.generated_text;
|
|
1341
|
+
}
|
|
1342
|
+
const text = extractGeneratedText(results[0]?.generated_text);
|
|
1343
|
+
logger.timeEnd(timerLabel, { outputLength: text?.length });
|
|
600
1344
|
return {
|
|
601
|
-
text
|
|
1345
|
+
text
|
|
602
1346
|
};
|
|
603
1347
|
};
|
|
604
|
-
var
|
|
605
|
-
const
|
|
606
|
-
const
|
|
607
|
-
const
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
1348
|
+
var HFT_TextGeneration_Stream = async function* (input, model, signal) {
|
|
1349
|
+
const noopProgress = () => {};
|
|
1350
|
+
const generateText = await getPipeline(model, noopProgress, {}, signal);
|
|
1351
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1352
|
+
const queue = createStreamEventQueue();
|
|
1353
|
+
const streamer = createStreamingTextStreamer(generateText.tokenizer, queue, TextStreamer);
|
|
1354
|
+
const pipelinePromise = generateText(input.prompt, {
|
|
1355
|
+
streamer
|
|
1356
|
+
}).then(() => queue.done(), (err) => queue.error(err));
|
|
1357
|
+
yield* queue.iterable;
|
|
1358
|
+
await pipelinePromise;
|
|
1359
|
+
yield { type: "finish", data: {} };
|
|
611
1360
|
};
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
const
|
|
617
|
-
|
|
618
|
-
|
|
1361
|
+
|
|
1362
|
+
// src/provider-hf-transformers/common/HFT_TextLanguageDetection.ts
|
|
1363
|
+
init_HFT_Pipeline();
|
|
1364
|
+
var HFT_TextLanguageDetection = async (input, model, onProgress, signal) => {
|
|
1365
|
+
const isArrayInput = Array.isArray(input.text);
|
|
1366
|
+
const TextClassification = await getPipeline(model, onProgress, {}, signal);
|
|
1367
|
+
const result = await TextClassification(input.text, {
|
|
1368
|
+
top_k: input.maxLanguages || undefined
|
|
619
1369
|
});
|
|
620
|
-
|
|
621
|
-
logger.timeEnd(timerLabel, { dimensions: result?.data?.length });
|
|
622
|
-
return {
|
|
623
|
-
vector: result.data
|
|
624
|
-
};
|
|
625
|
-
};
|
|
626
|
-
var HFT_ImageClassification = async (input, model, onProgress, signal) => {
|
|
627
|
-
if (model?.provider_config?.pipeline === "zero-shot-image-classification") {
|
|
628
|
-
if (!input.categories || !Array.isArray(input.categories) || input.categories.length === 0) {
|
|
629
|
-
console.warn("Zero-shot image classification requires categories", input);
|
|
630
|
-
throw new Error("Zero-shot image classification requires categories");
|
|
631
|
-
}
|
|
632
|
-
const zeroShotClassifier = await getPipeline(model, onProgress, {}, signal);
|
|
633
|
-
const result2 = await zeroShotClassifier(input.image, input.categories, {});
|
|
634
|
-
const results2 = Array.isArray(result2) ? result2 : [result2];
|
|
1370
|
+
if (isArrayInput) {
|
|
635
1371
|
return {
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
1372
|
+
languages: result.map((perInput) => {
|
|
1373
|
+
const items = Array.isArray(perInput) ? perInput : [perInput];
|
|
1374
|
+
return items.map((category) => ({
|
|
1375
|
+
language: category.label,
|
|
1376
|
+
score: category.score
|
|
1377
|
+
}));
|
|
1378
|
+
})
|
|
640
1379
|
};
|
|
641
1380
|
}
|
|
642
|
-
|
|
643
|
-
const result = await classifier(input.image, {
|
|
644
|
-
top_k: input.maxCategories
|
|
645
|
-
});
|
|
646
|
-
const results = Array.isArray(result) ? result : [result];
|
|
647
|
-
return {
|
|
648
|
-
categories: results.map((r) => ({
|
|
649
|
-
label: r.label,
|
|
650
|
-
score: r.score
|
|
651
|
-
}))
|
|
652
|
-
};
|
|
653
|
-
};
|
|
654
|
-
var HFT_ObjectDetection = async (input, model, onProgress, signal) => {
|
|
655
|
-
if (model?.provider_config?.pipeline === "zero-shot-object-detection") {
|
|
656
|
-
if (!input.labels || !Array.isArray(input.labels) || input.labels.length === 0) {
|
|
657
|
-
throw new Error("Zero-shot object detection requires labels");
|
|
658
|
-
}
|
|
659
|
-
const zeroShotDetector = await getPipeline(model, onProgress, {}, signal);
|
|
660
|
-
const result2 = await zeroShotDetector(input.image, Array.from(input.labels), {
|
|
661
|
-
threshold: input.threshold
|
|
662
|
-
});
|
|
663
|
-
const detections2 = Array.isArray(result2) ? result2 : [result2];
|
|
1381
|
+
if (Array.isArray(result[0])) {
|
|
664
1382
|
return {
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
score:
|
|
668
|
-
box: d.box
|
|
1383
|
+
languages: result[0].map((category) => ({
|
|
1384
|
+
language: category.label,
|
|
1385
|
+
score: category.score
|
|
669
1386
|
}))
|
|
670
1387
|
};
|
|
671
1388
|
}
|
|
672
|
-
const detector = await getPipeline(model, onProgress, {}, signal);
|
|
673
|
-
const result = await detector(input.image, {
|
|
674
|
-
threshold: input.threshold
|
|
675
|
-
});
|
|
676
|
-
const detections = Array.isArray(result) ? result : [result];
|
|
677
1389
|
return {
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
score:
|
|
681
|
-
box: d.box
|
|
1390
|
+
languages: result.map((category) => ({
|
|
1391
|
+
language: category.label,
|
|
1392
|
+
score: category.score
|
|
682
1393
|
}))
|
|
683
1394
|
};
|
|
684
1395
|
};
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
const
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
decode_kwargs: { skip_special_tokens: true },
|
|
694
|
-
callback_function: (text) => {
|
|
695
|
-
count++;
|
|
696
|
-
const result = 100 * (1 - Math.exp(-0.05 * count));
|
|
697
|
-
const progress = Math.round(Math.min(result, 100));
|
|
698
|
-
updateProgress(progress, "Generating", { text, progress });
|
|
699
|
-
}
|
|
1396
|
+
|
|
1397
|
+
// src/provider-hf-transformers/common/HFT_TextNamedEntityRecognition.ts
|
|
1398
|
+
init_HFT_Pipeline();
|
|
1399
|
+
var HFT_TextNamedEntityRecognition = async (input, model, onProgress, signal) => {
|
|
1400
|
+
const isArrayInput = Array.isArray(input.text);
|
|
1401
|
+
const textNamedEntityRecognition = await getPipeline(model, onProgress, {}, signal);
|
|
1402
|
+
const results = await textNamedEntityRecognition(input.text, {
|
|
1403
|
+
ignore_labels: input.blockList
|
|
700
1404
|
});
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
return content;
|
|
713
|
-
for (const part of content) {
|
|
714
|
-
if (part.type === "text" && "text" in part) {
|
|
715
|
-
return part.text;
|
|
716
|
-
}
|
|
1405
|
+
if (isArrayInput) {
|
|
1406
|
+
return {
|
|
1407
|
+
entities: results.map((perInput) => {
|
|
1408
|
+
const items = Array.isArray(perInput) ? perInput : [perInput];
|
|
1409
|
+
return items.map((entity) => ({
|
|
1410
|
+
entity: entity.entity,
|
|
1411
|
+
score: entity.score,
|
|
1412
|
+
word: entity.word
|
|
1413
|
+
}));
|
|
1414
|
+
})
|
|
1415
|
+
};
|
|
717
1416
|
}
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
err = e;
|
|
744
|
-
if (resolve) {
|
|
745
|
-
const r = resolve;
|
|
746
|
-
resolve = null;
|
|
747
|
-
r({ value: undefined, done: true });
|
|
748
|
-
}
|
|
749
|
-
};
|
|
750
|
-
const iterable = {
|
|
751
|
-
[Symbol.asyncIterator]() {
|
|
752
|
-
return {
|
|
753
|
-
next() {
|
|
754
|
-
if (err)
|
|
755
|
-
return Promise.reject(err);
|
|
756
|
-
if (buffer.length > 0) {
|
|
757
|
-
return Promise.resolve({ value: buffer.shift(), done: false });
|
|
758
|
-
}
|
|
759
|
-
if (finished) {
|
|
760
|
-
return Promise.resolve({ value: undefined, done: true });
|
|
761
|
-
}
|
|
762
|
-
return new Promise((r) => {
|
|
763
|
-
resolve = r;
|
|
764
|
-
});
|
|
765
|
-
}
|
|
766
|
-
};
|
|
767
|
-
}
|
|
768
|
-
};
|
|
769
|
-
return { push, done, error, iterable };
|
|
770
|
-
}
|
|
771
|
-
function createStreamingTextStreamer(tokenizer, queue) {
|
|
772
|
-
const { TextStreamer } = _transformersSdk;
|
|
773
|
-
return new TextStreamer(tokenizer, {
|
|
774
|
-
skip_prompt: true,
|
|
775
|
-
decode_kwargs: { skip_special_tokens: true },
|
|
776
|
-
callback_function: (text) => {
|
|
777
|
-
queue.push({ type: "text-delta", port: "text", textDelta: text });
|
|
778
|
-
}
|
|
779
|
-
});
|
|
780
|
-
}
|
|
781
|
-
function createToolCallMarkupFilter(emit) {
|
|
782
|
-
const OPEN_TAG = "<tool_call>";
|
|
783
|
-
const CLOSE_TAG = "</tool_call>";
|
|
784
|
-
let state = "text";
|
|
785
|
-
let pending = "";
|
|
786
|
-
function feed(token) {
|
|
787
|
-
if (state === "tag") {
|
|
788
|
-
pending += token;
|
|
789
|
-
const closeIdx = pending.indexOf(CLOSE_TAG);
|
|
790
|
-
if (closeIdx !== -1) {
|
|
791
|
-
const afterClose = pending.slice(closeIdx + CLOSE_TAG.length);
|
|
792
|
-
pending = "";
|
|
793
|
-
state = "text";
|
|
794
|
-
if (afterClose.length > 0) {
|
|
795
|
-
feed(afterClose);
|
|
796
|
-
}
|
|
797
|
-
}
|
|
798
|
-
return;
|
|
799
|
-
}
|
|
800
|
-
const combined = pending + token;
|
|
801
|
-
const openIdx = combined.indexOf(OPEN_TAG);
|
|
802
|
-
if (openIdx !== -1) {
|
|
803
|
-
const before = combined.slice(0, openIdx);
|
|
804
|
-
if (before.length > 0) {
|
|
805
|
-
emit(before);
|
|
806
|
-
}
|
|
807
|
-
pending = "";
|
|
808
|
-
state = "tag";
|
|
809
|
-
const afterOpen = combined.slice(openIdx + OPEN_TAG.length);
|
|
810
|
-
if (afterOpen.length > 0) {
|
|
811
|
-
feed(afterOpen);
|
|
812
|
-
}
|
|
813
|
-
return;
|
|
814
|
-
}
|
|
815
|
-
let prefixLen = 0;
|
|
816
|
-
for (let len = Math.min(combined.length, OPEN_TAG.length - 1);len >= 1; len--) {
|
|
817
|
-
if (combined.endsWith(OPEN_TAG.slice(0, len))) {
|
|
818
|
-
prefixLen = len;
|
|
819
|
-
break;
|
|
820
|
-
}
|
|
1417
|
+
let entities = [];
|
|
1418
|
+
if (!Array.isArray(results)) {
|
|
1419
|
+
entities = [results];
|
|
1420
|
+
} else {
|
|
1421
|
+
entities = results;
|
|
1422
|
+
}
|
|
1423
|
+
return {
|
|
1424
|
+
entities: entities.map((entity) => ({
|
|
1425
|
+
entity: entity.entity,
|
|
1426
|
+
score: entity.score,
|
|
1427
|
+
word: entity.word
|
|
1428
|
+
}))
|
|
1429
|
+
};
|
|
1430
|
+
};
|
|
1431
|
+
|
|
1432
|
+
// src/provider-hf-transformers/common/HFT_TextQuestionAnswer.ts
|
|
1433
|
+
init_HFT_Pipeline();
|
|
1434
|
+
var HFT_TextQuestionAnswer = async (input, model, onProgress, signal) => {
|
|
1435
|
+
const isArrayInput = Array.isArray(input.question);
|
|
1436
|
+
const generateAnswer = await getPipeline(model, onProgress, {}, signal);
|
|
1437
|
+
if (isArrayInput) {
|
|
1438
|
+
const questions = input.question;
|
|
1439
|
+
const contexts = input.context;
|
|
1440
|
+
if (questions.length !== contexts.length) {
|
|
1441
|
+
throw new Error(`question[] and context[] must have the same length: ${questions.length} != ${contexts.length}`);
|
|
821
1442
|
}
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
emit(combined);
|
|
1443
|
+
const answers = [];
|
|
1444
|
+
for (let i = 0;i < questions.length; i++) {
|
|
1445
|
+
const result2 = await generateAnswer(questions[i], contexts[i], {});
|
|
1446
|
+
let answerText2 = "";
|
|
1447
|
+
if (Array.isArray(result2)) {
|
|
1448
|
+
answerText2 = result2[0]?.answer || "";
|
|
1449
|
+
} else {
|
|
1450
|
+
answerText2 = result2?.answer || "";
|
|
831
1451
|
}
|
|
832
|
-
|
|
833
|
-
}
|
|
834
|
-
}
|
|
835
|
-
function flush() {
|
|
836
|
-
if (pending.length > 0 && state === "text") {
|
|
837
|
-
emit(pending);
|
|
838
|
-
pending = "";
|
|
1452
|
+
answers.push(answerText2);
|
|
839
1453
|
}
|
|
840
|
-
|
|
841
|
-
state = "text";
|
|
1454
|
+
return { text: answers };
|
|
842
1455
|
}
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
const noopProgress = () => {};
|
|
847
|
-
const generateText = await getPipeline(model, noopProgress, {}, signal);
|
|
848
|
-
const queue = createStreamEventQueue();
|
|
849
|
-
const streamer = createStreamingTextStreamer(generateText.tokenizer, queue);
|
|
850
|
-
const pipelinePromise = generateText(input.prompt, {
|
|
851
|
-
streamer
|
|
852
|
-
}).then(() => queue.done(), (err) => queue.error(err));
|
|
853
|
-
yield* queue.iterable;
|
|
854
|
-
await pipelinePromise;
|
|
855
|
-
yield { type: "finish", data: {} };
|
|
856
|
-
};
|
|
857
|
-
var HFT_TextRewriter_Stream = async function* (input, model, signal) {
|
|
858
|
-
const noopProgress = () => {};
|
|
859
|
-
const generateText = await getPipeline(model, noopProgress, {}, signal);
|
|
860
|
-
const queue = createStreamEventQueue();
|
|
861
|
-
const streamer = createStreamingTextStreamer(generateText.tokenizer, queue);
|
|
862
|
-
const promptedText = (input.prompt ? input.prompt + `
|
|
863
|
-
` : "") + input.text;
|
|
864
|
-
const pipelinePromise = generateText(promptedText, {
|
|
865
|
-
streamer
|
|
866
|
-
}).then(() => queue.done(), (err) => queue.error(err));
|
|
867
|
-
yield* queue.iterable;
|
|
868
|
-
await pipelinePromise;
|
|
869
|
-
yield { type: "finish", data: {} };
|
|
870
|
-
};
|
|
871
|
-
var HFT_TextSummary_Stream = async function* (input, model, signal) {
|
|
872
|
-
const noopProgress = () => {};
|
|
873
|
-
const generateSummary = await getPipeline(model, noopProgress, {}, signal);
|
|
874
|
-
const queue = createStreamEventQueue();
|
|
875
|
-
const streamer = createStreamingTextStreamer(generateSummary.tokenizer, queue);
|
|
876
|
-
const pipelinePromise = generateSummary(input.text, {
|
|
1456
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1457
|
+
const streamer = createTextStreamer(generateAnswer.tokenizer, onProgress, TextStreamer);
|
|
1458
|
+
const result = await generateAnswer(input.question, input.context, {
|
|
877
1459
|
streamer
|
|
878
|
-
})
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
1460
|
+
});
|
|
1461
|
+
let answerText = "";
|
|
1462
|
+
if (Array.isArray(result)) {
|
|
1463
|
+
answerText = result[0]?.answer || "";
|
|
1464
|
+
} else {
|
|
1465
|
+
answerText = result?.answer || "";
|
|
1466
|
+
}
|
|
1467
|
+
return {
|
|
1468
|
+
text: answerText
|
|
1469
|
+
};
|
|
882
1470
|
};
|
|
883
1471
|
var HFT_TextQuestionAnswer_Stream = async function* (input, model, signal) {
|
|
884
1472
|
const noopProgress = () => {};
|
|
885
1473
|
const generateAnswer = await getPipeline(model, noopProgress, {}, signal);
|
|
1474
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
886
1475
|
const queue = createStreamEventQueue();
|
|
887
|
-
const streamer = createStreamingTextStreamer(generateAnswer.tokenizer, queue);
|
|
1476
|
+
const streamer = createStreamingTextStreamer(generateAnswer.tokenizer, queue, TextStreamer);
|
|
888
1477
|
let pipelineResult;
|
|
889
1478
|
const pipelinePromise = generateAnswer(input.question, input.context, {
|
|
890
1479
|
streamer
|
|
@@ -904,153 +1493,159 @@ var HFT_TextQuestionAnswer_Stream = async function* (input, model, signal) {
|
|
|
904
1493
|
}
|
|
905
1494
|
yield { type: "finish", data: { text: answerText } };
|
|
906
1495
|
};
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
const streamer = createStreamingTextStreamer(translate.tokenizer, queue);
|
|
912
|
-
const pipelinePromise = translate(input.text, {
|
|
913
|
-
src_lang: input.source_lang,
|
|
914
|
-
tgt_lang: input.target_lang,
|
|
915
|
-
streamer
|
|
916
|
-
}).then(() => queue.done(), (err) => queue.error(err));
|
|
917
|
-
yield* queue.iterable;
|
|
918
|
-
await pipelinePromise;
|
|
919
|
-
yield { type: "finish", data: { target_lang: input.target_lang } };
|
|
920
|
-
};
|
|
921
|
-
var HFT_CountTokens = async (input, model, onProgress, signal) => {
|
|
1496
|
+
|
|
1497
|
+
// src/provider-hf-transformers/common/HFT_TextRewriter.ts
|
|
1498
|
+
init_HFT_Pipeline();
|
|
1499
|
+
var HFT_TextRewriter = async (input, model, onProgress, signal) => {
|
|
922
1500
|
const isArrayInput = Array.isArray(input.text);
|
|
923
|
-
const
|
|
924
|
-
const
|
|
925
|
-
|
|
926
|
-
});
|
|
1501
|
+
const generateText = await getPipeline(model, onProgress, {}, signal);
|
|
1502
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1503
|
+
const streamer = isArrayInput ? undefined : createTextStreamer(generateText.tokenizer, onProgress, TextStreamer);
|
|
927
1504
|
if (isArrayInput) {
|
|
928
1505
|
const texts = input.text;
|
|
929
|
-
const
|
|
930
|
-
|
|
1506
|
+
const promptedTexts = texts.map((t) => (input.prompt ? input.prompt + `
|
|
1507
|
+
` : "") + t);
|
|
1508
|
+
let results2 = await generateText(promptedTexts, {});
|
|
1509
|
+
const batchResults = Array.isArray(results2) ? results2 : [results2];
|
|
1510
|
+
const outputTexts = batchResults.map((r, i) => {
|
|
1511
|
+
const seqs = Array.isArray(r) ? r : [r];
|
|
1512
|
+
const text2 = extractGeneratedText(seqs[0]?.generated_text);
|
|
1513
|
+
if (text2 === promptedTexts[i]) {
|
|
1514
|
+
throw new Error("Rewriter failed to generate new text");
|
|
1515
|
+
}
|
|
1516
|
+
return text2;
|
|
1517
|
+
});
|
|
1518
|
+
return { text: outputTexts };
|
|
931
1519
|
}
|
|
932
|
-
const
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
return tools.map((t) => ({
|
|
940
|
-
type: "function",
|
|
941
|
-
function: {
|
|
942
|
-
name: t.name,
|
|
943
|
-
description: buildToolDescription(t),
|
|
944
|
-
parameters: t.inputSchema
|
|
945
|
-
}
|
|
946
|
-
}));
|
|
947
|
-
}
|
|
948
|
-
function parseToolCallsFromText(responseText) {
|
|
949
|
-
const toolCalls = [];
|
|
950
|
-
let callIndex = 0;
|
|
951
|
-
let cleanedText = responseText;
|
|
952
|
-
const toolCallTagRegex = /<tool_call>([\s\S]*?)<\/tool_call>/g;
|
|
953
|
-
let tagMatch;
|
|
954
|
-
while ((tagMatch = toolCallTagRegex.exec(responseText)) !== null) {
|
|
955
|
-
try {
|
|
956
|
-
const parsed = JSON.parse(tagMatch[1].trim());
|
|
957
|
-
const id = `call_${callIndex++}`;
|
|
958
|
-
toolCalls.push({
|
|
959
|
-
id,
|
|
960
|
-
name: parsed.name ?? parsed.function?.name ?? "",
|
|
961
|
-
input: parsed.arguments ?? parsed.function?.arguments ?? parsed.parameters ?? {}
|
|
962
|
-
});
|
|
963
|
-
} catch {}
|
|
1520
|
+
const promptedText = (input.prompt ? input.prompt + `
|
|
1521
|
+
` : "") + input.text;
|
|
1522
|
+
let results = await generateText(promptedText, {
|
|
1523
|
+
...streamer ? { streamer } : {}
|
|
1524
|
+
});
|
|
1525
|
+
if (!Array.isArray(results)) {
|
|
1526
|
+
results = [results];
|
|
964
1527
|
}
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
1528
|
+
const text = extractGeneratedText(results[0]?.generated_text);
|
|
1529
|
+
if (text === promptedText) {
|
|
1530
|
+
throw new Error("Rewriter failed to generate new text");
|
|
968
1531
|
}
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
jsonCandidates.push({ text: source.slice(i, j), start: i, end: j });
|
|
1005
|
-
i = j;
|
|
1006
|
-
} else {
|
|
1007
|
-
break;
|
|
1008
|
-
}
|
|
1009
|
-
}
|
|
1010
|
-
})(responseText);
|
|
1011
|
-
const matchedRanges = [];
|
|
1012
|
-
for (const candidate of jsonCandidates) {
|
|
1013
|
-
try {
|
|
1014
|
-
const parsed = JSON.parse(candidate.text);
|
|
1015
|
-
if (parsed.name && (parsed.arguments !== undefined || parsed.parameters !== undefined)) {
|
|
1016
|
-
const id = `call_${callIndex++}`;
|
|
1017
|
-
toolCalls.push({
|
|
1018
|
-
id,
|
|
1019
|
-
name: parsed.name,
|
|
1020
|
-
input: parsed.arguments ?? parsed.parameters ?? {}
|
|
1021
|
-
});
|
|
1022
|
-
matchedRanges.push({ start: candidate.start, end: candidate.end });
|
|
1023
|
-
} else if (parsed.function?.name) {
|
|
1024
|
-
let functionArgs = parsed.function.arguments ?? {};
|
|
1025
|
-
if (typeof functionArgs === "string") {
|
|
1026
|
-
try {
|
|
1027
|
-
functionArgs = JSON.parse(functionArgs);
|
|
1028
|
-
} catch (innerError) {
|
|
1029
|
-
console.warn("Failed to parse tool call function.arguments as JSON", innerError);
|
|
1030
|
-
functionArgs = {};
|
|
1031
|
-
}
|
|
1032
|
-
}
|
|
1033
|
-
const id = `call_${callIndex++}`;
|
|
1034
|
-
toolCalls.push({
|
|
1035
|
-
id,
|
|
1036
|
-
name: parsed.function.name,
|
|
1037
|
-
input: functionArgs ?? {}
|
|
1038
|
-
});
|
|
1039
|
-
matchedRanges.push({ start: candidate.start, end: candidate.end });
|
|
1040
|
-
}
|
|
1041
|
-
} catch {}
|
|
1532
|
+
return {
|
|
1533
|
+
text
|
|
1534
|
+
};
|
|
1535
|
+
};
|
|
1536
|
+
var HFT_TextRewriter_Stream = async function* (input, model, signal) {
|
|
1537
|
+
const noopProgress = () => {};
|
|
1538
|
+
const generateText = await getPipeline(model, noopProgress, {}, signal);
|
|
1539
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1540
|
+
const queue = createStreamEventQueue();
|
|
1541
|
+
const streamer = createStreamingTextStreamer(generateText.tokenizer, queue, TextStreamer);
|
|
1542
|
+
const promptedText = (input.prompt ? input.prompt + `
|
|
1543
|
+
` : "") + input.text;
|
|
1544
|
+
const pipelinePromise = generateText(promptedText, {
|
|
1545
|
+
streamer
|
|
1546
|
+
}).then(() => queue.done(), (err) => queue.error(err));
|
|
1547
|
+
yield* queue.iterable;
|
|
1548
|
+
await pipelinePromise;
|
|
1549
|
+
yield { type: "finish", data: {} };
|
|
1550
|
+
};
|
|
1551
|
+
|
|
1552
|
+
// src/provider-hf-transformers/common/HFT_TextSummary.ts
|
|
1553
|
+
init_HFT_Pipeline();
|
|
1554
|
+
var HFT_TextSummary = async (input, model, onProgress, signal) => {
|
|
1555
|
+
const isArrayInput = Array.isArray(input.text);
|
|
1556
|
+
const generateSummary = await getPipeline(model, onProgress, {}, signal);
|
|
1557
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1558
|
+
const streamer = isArrayInput ? undefined : createTextStreamer(generateSummary.tokenizer, onProgress, TextStreamer);
|
|
1559
|
+
const result = await generateSummary(input.text, {
|
|
1560
|
+
...streamer ? { streamer } : {}
|
|
1561
|
+
});
|
|
1562
|
+
if (isArrayInput) {
|
|
1563
|
+
const batchResults = Array.isArray(result) ? result : [result];
|
|
1564
|
+
return {
|
|
1565
|
+
text: batchResults.map((r) => r?.summary_text || "")
|
|
1566
|
+
};
|
|
1042
1567
|
}
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
lastIndex = range.end;
|
|
1049
|
-
}
|
|
1050
|
-
result += responseText.slice(lastIndex);
|
|
1051
|
-
cleanedText = result.trim();
|
|
1568
|
+
let summaryText = "";
|
|
1569
|
+
if (Array.isArray(result)) {
|
|
1570
|
+
summaryText = result[0]?.summary_text || "";
|
|
1571
|
+
} else {
|
|
1572
|
+
summaryText = result?.summary_text || "";
|
|
1052
1573
|
}
|
|
1053
|
-
return {
|
|
1574
|
+
return {
|
|
1575
|
+
text: summaryText
|
|
1576
|
+
};
|
|
1577
|
+
};
|
|
1578
|
+
var HFT_TextSummary_Stream = async function* (input, model, signal) {
|
|
1579
|
+
const noopProgress = () => {};
|
|
1580
|
+
const generateSummary = await getPipeline(model, noopProgress, {}, signal);
|
|
1581
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1582
|
+
const queue = createStreamEventQueue();
|
|
1583
|
+
const streamer = createStreamingTextStreamer(generateSummary.tokenizer, queue, TextStreamer);
|
|
1584
|
+
const pipelinePromise = generateSummary(input.text, {
|
|
1585
|
+
streamer
|
|
1586
|
+
}).then(() => queue.done(), (err) => queue.error(err));
|
|
1587
|
+
yield* queue.iterable;
|
|
1588
|
+
await pipelinePromise;
|
|
1589
|
+
yield { type: "finish", data: {} };
|
|
1590
|
+
};
|
|
1591
|
+
|
|
1592
|
+
// src/provider-hf-transformers/common/HFT_TextTranslation.ts
|
|
1593
|
+
init_HFT_Pipeline();
|
|
1594
|
+
var HFT_TextTranslation = async (input, model, onProgress, signal) => {
|
|
1595
|
+
const isArrayInput = Array.isArray(input.text);
|
|
1596
|
+
const translate = await getPipeline(model, onProgress, {}, signal);
|
|
1597
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1598
|
+
const streamer = isArrayInput ? undefined : createTextStreamer(translate.tokenizer, onProgress, TextStreamer);
|
|
1599
|
+
const result = await translate(input.text, {
|
|
1600
|
+
src_lang: input.source_lang,
|
|
1601
|
+
tgt_lang: input.target_lang,
|
|
1602
|
+
...streamer ? { streamer } : {}
|
|
1603
|
+
});
|
|
1604
|
+
if (isArrayInput) {
|
|
1605
|
+
const batchResults = Array.isArray(result) ? result : [result];
|
|
1606
|
+
return {
|
|
1607
|
+
text: batchResults.map((r) => r?.translation_text || ""),
|
|
1608
|
+
target_lang: input.target_lang
|
|
1609
|
+
};
|
|
1610
|
+
}
|
|
1611
|
+
const translatedText = Array.isArray(result) ? result[0]?.translation_text || "" : result?.translation_text || "";
|
|
1612
|
+
return {
|
|
1613
|
+
text: translatedText,
|
|
1614
|
+
target_lang: input.target_lang
|
|
1615
|
+
};
|
|
1616
|
+
};
|
|
1617
|
+
var HFT_TextTranslation_Stream = async function* (input, model, signal) {
|
|
1618
|
+
const noopProgress = () => {};
|
|
1619
|
+
const translate = await getPipeline(model, noopProgress, {}, signal);
|
|
1620
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1621
|
+
const queue = createStreamEventQueue();
|
|
1622
|
+
const streamer = createStreamingTextStreamer(translate.tokenizer, queue, TextStreamer);
|
|
1623
|
+
const pipelinePromise = translate(input.text, {
|
|
1624
|
+
src_lang: input.source_lang,
|
|
1625
|
+
tgt_lang: input.target_lang,
|
|
1626
|
+
streamer
|
|
1627
|
+
}).then(() => queue.done(), (err) => queue.error(err));
|
|
1628
|
+
yield* queue.iterable;
|
|
1629
|
+
await pipelinePromise;
|
|
1630
|
+
yield { type: "finish", data: { target_lang: input.target_lang } };
|
|
1631
|
+
};
|
|
1632
|
+
|
|
1633
|
+
// src/provider-hf-transformers/common/HFT_ToolCalling.ts
|
|
1634
|
+
init_HFT_Pipeline();
|
|
1635
|
+
import {
|
|
1636
|
+
buildToolDescription,
|
|
1637
|
+
filterValidToolCalls,
|
|
1638
|
+
toTextFlatMessages
|
|
1639
|
+
} from "@workglow/ai/worker";
|
|
1640
|
+
function mapHFTTools(tools) {
|
|
1641
|
+
return tools.map((t) => ({
|
|
1642
|
+
type: "function",
|
|
1643
|
+
function: {
|
|
1644
|
+
name: t.name,
|
|
1645
|
+
description: buildToolDescription(t),
|
|
1646
|
+
parameters: t.inputSchema
|
|
1647
|
+
}
|
|
1648
|
+
}));
|
|
1054
1649
|
}
|
|
1055
1650
|
function resolveHFTToolsAndMessages(input, messages) {
|
|
1056
1651
|
if (input.toolChoice === "none") {
|
|
@@ -1077,6 +1672,7 @@ ${requiredInstruction}` };
|
|
|
1077
1672
|
var HFT_ToolCalling = async (input, model, onProgress, signal) => {
|
|
1078
1673
|
const isArrayInput = Array.isArray(input.prompt);
|
|
1079
1674
|
const generateText = await getPipeline(model, onProgress, {}, signal);
|
|
1675
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1080
1676
|
if (isArrayInput) {
|
|
1081
1677
|
const prompts = input.prompt;
|
|
1082
1678
|
const texts = [];
|
|
@@ -1090,7 +1686,7 @@ var HFT_ToolCalling = async (input, model, onProgress, signal) => {
|
|
|
1090
1686
|
tokenize: false,
|
|
1091
1687
|
add_generation_prompt: true
|
|
1092
1688
|
});
|
|
1093
|
-
const streamer2 = createTextStreamer(generateText.tokenizer, onProgress);
|
|
1689
|
+
const streamer2 = createTextStreamer(generateText.tokenizer, onProgress, TextStreamer);
|
|
1094
1690
|
let results2 = await generateText(prompt2, {
|
|
1095
1691
|
max_new_tokens: input.maxTokens ?? 1024,
|
|
1096
1692
|
temperature: input.temperature ?? undefined,
|
|
@@ -1114,7 +1710,7 @@ var HFT_ToolCalling = async (input, model, onProgress, signal) => {
|
|
|
1114
1710
|
tokenize: false,
|
|
1115
1711
|
add_generation_prompt: true
|
|
1116
1712
|
});
|
|
1117
|
-
const streamer = createTextStreamer(generateText.tokenizer, onProgress);
|
|
1713
|
+
const streamer = createTextStreamer(generateText.tokenizer, onProgress, TextStreamer);
|
|
1118
1714
|
let results = await generateText(prompt, {
|
|
1119
1715
|
max_new_tokens: input.maxTokens ?? 1024,
|
|
1120
1716
|
temperature: input.temperature ?? undefined,
|
|
@@ -1134,6 +1730,7 @@ var HFT_ToolCalling = async (input, model, onProgress, signal) => {
|
|
|
1134
1730
|
var HFT_ToolCalling_Stream = async function* (input, model, signal) {
|
|
1135
1731
|
const noopProgress = () => {};
|
|
1136
1732
|
const generateText = await getPipeline(model, noopProgress, {}, signal);
|
|
1733
|
+
const { TextStreamer } = await loadTransformersSDK();
|
|
1137
1734
|
const messages = toTextFlatMessages(input);
|
|
1138
1735
|
const tools = resolveHFTToolsAndMessages(input, messages);
|
|
1139
1736
|
const prompt = generateText.tokenizer.apply_chat_template(messages, {
|
|
@@ -1143,7 +1740,7 @@ var HFT_ToolCalling_Stream = async function* (input, model, signal) {
|
|
|
1143
1740
|
});
|
|
1144
1741
|
const innerQueue = createStreamEventQueue();
|
|
1145
1742
|
const outerQueue = createStreamEventQueue();
|
|
1146
|
-
const streamer = createStreamingTextStreamer(generateText.tokenizer, innerQueue);
|
|
1743
|
+
const streamer = createStreamingTextStreamer(generateText.tokenizer, innerQueue, TextStreamer);
|
|
1147
1744
|
let fullText = "";
|
|
1148
1745
|
const filter = createToolCallMarkupFilter((text) => {
|
|
1149
1746
|
outerQueue.push({ type: "text-delta", port: "text", textDelta: text });
|
|
@@ -1188,147 +1785,61 @@ var HFT_ToolCalling_Stream = async function* (input, model, signal) {
|
|
|
1188
1785
|
data: { text: cleanedText, toolCalls: validToolCalls }
|
|
1189
1786
|
};
|
|
1190
1787
|
};
|
|
1191
|
-
var HFT_ModelInfo = async (input, model) => {
|
|
1192
|
-
const logger = getLogger();
|
|
1193
|
-
const { ModelRegistry } = await loadTransformersSDK();
|
|
1194
|
-
const timerLabel = `hft:ModelInfo:${model?.provider_config.model_path}`;
|
|
1195
|
-
logger.time(timerLabel, { model: model?.provider_config.model_path });
|
|
1196
|
-
const detail = input.detail;
|
|
1197
|
-
const is_loaded = pipelines.has(getPipelineCacheKey(model));
|
|
1198
|
-
const { pipeline: pipelineType, model_path, dtype, device } = model.provider_config;
|
|
1199
|
-
const cacheStatus = await ModelRegistry.is_pipeline_cached_files(pipelineType, model_path, {
|
|
1200
|
-
...dtype ? { dtype } : {}
|
|
1201
|
-
});
|
|
1202
|
-
logger.debug("is_pipeline_cached", {
|
|
1203
|
-
input: [
|
|
1204
|
-
pipelineType,
|
|
1205
|
-
model_path,
|
|
1206
|
-
{
|
|
1207
|
-
...dtype ? { dtype } : {}
|
|
1208
|
-
}
|
|
1209
|
-
],
|
|
1210
|
-
result: cacheStatus
|
|
1211
|
-
});
|
|
1212
|
-
const is_cached = is_loaded || cacheStatus.allCached;
|
|
1213
|
-
let file_sizes = null;
|
|
1214
|
-
if (detail === "files" && cacheStatus.files.length > 0) {
|
|
1215
|
-
const sizes = {};
|
|
1216
|
-
for (const { file } of cacheStatus.files) {
|
|
1217
|
-
sizes[file] = 0;
|
|
1218
|
-
}
|
|
1219
|
-
file_sizes = sizes;
|
|
1220
|
-
} else if (detail === "files_with_metadata" && cacheStatus.files.length > 0) {
|
|
1221
|
-
const sizes = {};
|
|
1222
|
-
await Promise.all(cacheStatus.files.map(async ({ file }) => {
|
|
1223
|
-
const metadata = await ModelRegistry.get_file_metadata(model_path, file);
|
|
1224
|
-
if (metadata.exists && metadata.size !== undefined) {
|
|
1225
|
-
sizes[file] = metadata.size;
|
|
1226
|
-
}
|
|
1227
|
-
}));
|
|
1228
|
-
if (Object.keys(sizes).length > 0) {
|
|
1229
|
-
file_sizes = sizes;
|
|
1230
|
-
}
|
|
1231
|
-
}
|
|
1232
|
-
logger.timeEnd(timerLabel, { model: model?.provider_config.model_path });
|
|
1233
|
-
return {
|
|
1234
|
-
model: input.model,
|
|
1235
|
-
is_local: true,
|
|
1236
|
-
is_remote: false,
|
|
1237
|
-
supports_browser: true,
|
|
1238
|
-
supports_node: true,
|
|
1239
|
-
is_cached,
|
|
1240
|
-
is_loaded,
|
|
1241
|
-
file_sizes
|
|
1242
|
-
};
|
|
1243
|
-
};
|
|
1244
|
-
function buildStructuredGenerationPrompt(input) {
|
|
1245
|
-
const schemaStr = JSON.stringify(input.outputSchema, null, 2);
|
|
1246
|
-
return `${input.prompt}
|
|
1247
1788
|
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1789
|
+
// src/provider-hf-transformers/common/HFT_Unload.ts
|
|
1790
|
+
init_HFT_Pipeline();
|
|
1791
|
+
function hasBrowserCacheStorage() {
|
|
1792
|
+
return typeof globalThis !== "undefined" && "caches" in globalThis && typeof globalThis.caches?.open === "function";
|
|
1252
1793
|
}
|
|
1253
|
-
function
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1794
|
+
async function deleteModelCacheFromBrowser(model_path) {
|
|
1795
|
+
const cachesApi = globalThis.caches;
|
|
1796
|
+
const cache = await cachesApi.open(HTF_CACHE_NAME);
|
|
1797
|
+
const keys = await cache.keys();
|
|
1798
|
+
const prefix = `/${model_path}/`;
|
|
1799
|
+
const requestsToDelete = [];
|
|
1800
|
+
for (const request of keys) {
|
|
1801
|
+
const url = new URL(request.url);
|
|
1802
|
+
if (url.pathname.startsWith(prefix)) {
|
|
1803
|
+
requestsToDelete.push(request);
|
|
1804
|
+
}
|
|
1805
|
+
}
|
|
1806
|
+
for (const request of requestsToDelete) {
|
|
1807
|
+
try {
|
|
1808
|
+
const deleted = await cache.delete(request);
|
|
1809
|
+
if (!deleted) {
|
|
1810
|
+
const deletedByUrl = await cache.delete(request.url);
|
|
1811
|
+
if (!deletedByUrl) {}
|
|
1263
1812
|
}
|
|
1813
|
+
} catch (error) {
|
|
1814
|
+
console.error(`Failed to delete cache entry: ${request.url}`, error);
|
|
1264
1815
|
}
|
|
1265
|
-
return {};
|
|
1266
1816
|
}
|
|
1267
1817
|
}
|
|
1268
|
-
|
|
1269
|
-
const
|
|
1270
|
-
const
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
tokenize: false,
|
|
1274
|
-
add_generation_prompt: true
|
|
1275
|
-
});
|
|
1276
|
-
const streamer = createTextStreamer(generateText.tokenizer, onProgress);
|
|
1277
|
-
let results = await generateText(formattedPrompt, {
|
|
1278
|
-
max_new_tokens: input.maxTokens ?? 1024,
|
|
1279
|
-
temperature: input.temperature ?? undefined,
|
|
1280
|
-
return_full_text: false,
|
|
1281
|
-
streamer
|
|
1818
|
+
async function deleteModelCacheFromFilesystem(model) {
|
|
1819
|
+
const { ModelRegistry } = await loadTransformersSDK();
|
|
1820
|
+
const { pipeline: pipelineType, model_path, dtype } = model.provider_config;
|
|
1821
|
+
await ModelRegistry.clear_pipeline_cache(pipelineType, model_path, {
|
|
1822
|
+
...dtype ? { dtype } : {}
|
|
1282
1823
|
});
|
|
1283
|
-
|
|
1284
|
-
|
|
1824
|
+
}
|
|
1825
|
+
var HFT_Unload = async (input, model, onProgress, _signal) => {
|
|
1826
|
+
const cacheKey = getPipelineCacheKey(model);
|
|
1827
|
+
if (removeCachedPipeline(cacheKey)) {
|
|
1828
|
+
onProgress(50, "Pipeline removed from memory");
|
|
1285
1829
|
}
|
|
1286
|
-
const
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
}
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
const formattedPrompt = generateText.tokenizer.apply_chat_template(messages, {
|
|
1296
|
-
tokenize: false,
|
|
1297
|
-
add_generation_prompt: true
|
|
1298
|
-
});
|
|
1299
|
-
const queue = createStreamEventQueue();
|
|
1300
|
-
const streamer = createStreamingTextStreamer(generateText.tokenizer, queue);
|
|
1301
|
-
let fullText = "";
|
|
1302
|
-
const originalPush = queue.push;
|
|
1303
|
-
queue.push = (event) => {
|
|
1304
|
-
if (event.type === "text-delta" && "textDelta" in event) {
|
|
1305
|
-
fullText += event.textDelta;
|
|
1306
|
-
const match = fullText.match(/\{[\s\S]*/);
|
|
1307
|
-
if (match) {
|
|
1308
|
-
const partial = parsePartialJson(match[0]);
|
|
1309
|
-
if (partial !== undefined) {
|
|
1310
|
-
originalPush({
|
|
1311
|
-
type: "object-delta",
|
|
1312
|
-
port: "object",
|
|
1313
|
-
objectDelta: partial
|
|
1314
|
-
});
|
|
1315
|
-
return;
|
|
1316
|
-
}
|
|
1317
|
-
}
|
|
1318
|
-
}
|
|
1319
|
-
originalPush(event);
|
|
1830
|
+
const model_path = model.provider_config.model_path;
|
|
1831
|
+
if (hasBrowserCacheStorage()) {
|
|
1832
|
+
await deleteModelCacheFromBrowser(model_path);
|
|
1833
|
+
} else {
|
|
1834
|
+
await deleteModelCacheFromFilesystem(model);
|
|
1835
|
+
}
|
|
1836
|
+
onProgress(100, "Model cache deleted");
|
|
1837
|
+
return {
|
|
1838
|
+
model: input.model
|
|
1320
1839
|
};
|
|
1321
|
-
const pipelinePromise = generateText(formattedPrompt, {
|
|
1322
|
-
max_new_tokens: input.maxTokens ?? 1024,
|
|
1323
|
-
temperature: input.temperature ?? undefined,
|
|
1324
|
-
return_full_text: false,
|
|
1325
|
-
streamer
|
|
1326
|
-
}).then(() => queue.done(), (err) => queue.error(err));
|
|
1327
|
-
yield* queue.iterable;
|
|
1328
|
-
await pipelinePromise;
|
|
1329
|
-
const object = extractJsonFromText(fullText);
|
|
1330
|
-
yield { type: "finish", data: { object } };
|
|
1331
1840
|
};
|
|
1841
|
+
|
|
1842
|
+
// src/provider-hf-transformers/common/HFT_JobRunFns.ts
|
|
1332
1843
|
var HFT_TASKS = {
|
|
1333
1844
|
DownloadModelTask: HFT_Download,
|
|
1334
1845
|
UnloadModelTask: HFT_Unload,
|
|
@@ -1351,7 +1862,8 @@ var HFT_TASKS = {
|
|
|
1351
1862
|
ImageClassificationTask: HFT_ImageClassification,
|
|
1352
1863
|
ObjectDetectionTask: HFT_ObjectDetection,
|
|
1353
1864
|
ToolCallingTask: HFT_ToolCalling,
|
|
1354
|
-
StructuredGenerationTask: HFT_StructuredGeneration
|
|
1865
|
+
StructuredGenerationTask: HFT_StructuredGeneration,
|
|
1866
|
+
ModelSearchTask: HFT_ModelSearch
|
|
1355
1867
|
};
|
|
1356
1868
|
var HFT_STREAM_TASKS = {
|
|
1357
1869
|
TextGenerationTask: HFT_TextGeneration_Stream,
|
|
@@ -1366,6 +1878,130 @@ var HFT_REACTIVE_TASKS = {
|
|
|
1366
1878
|
CountTokensTask: HFT_CountTokens_Reactive
|
|
1367
1879
|
};
|
|
1368
1880
|
|
|
1369
|
-
|
|
1881
|
+
// src/provider-hf-transformers/HuggingFaceTransformersQueuedProvider.ts
|
|
1882
|
+
import {
|
|
1883
|
+
QueuedAiProvider
|
|
1884
|
+
} from "@workglow/ai";
|
|
1885
|
+
class HuggingFaceTransformersQueuedProvider extends QueuedAiProvider {
|
|
1886
|
+
name = HF_TRANSFORMERS_ONNX;
|
|
1887
|
+
isLocal = true;
|
|
1888
|
+
supportsBrowser = true;
|
|
1889
|
+
taskTypes = [
|
|
1890
|
+
"DownloadModelTask",
|
|
1891
|
+
"UnloadModelTask",
|
|
1892
|
+
"ModelInfoTask",
|
|
1893
|
+
"CountTokensTask",
|
|
1894
|
+
"TextEmbeddingTask",
|
|
1895
|
+
"TextGenerationTask",
|
|
1896
|
+
"TextQuestionAnswerTask",
|
|
1897
|
+
"TextLanguageDetectionTask",
|
|
1898
|
+
"TextClassificationTask",
|
|
1899
|
+
"TextFillMaskTask",
|
|
1900
|
+
"TextNamedEntityRecognitionTask",
|
|
1901
|
+
"TextRewriterTask",
|
|
1902
|
+
"TextSummaryTask",
|
|
1903
|
+
"TextTranslationTask",
|
|
1904
|
+
"ImageSegmentationTask",
|
|
1905
|
+
"ImageToTextTask",
|
|
1906
|
+
"BackgroundRemovalTask",
|
|
1907
|
+
"ImageEmbeddingTask",
|
|
1908
|
+
"ImageClassificationTask",
|
|
1909
|
+
"ObjectDetectionTask",
|
|
1910
|
+
"ToolCallingTask",
|
|
1911
|
+
"ModelSearchTask"
|
|
1912
|
+
];
|
|
1913
|
+
constructor(tasks, streamTasks, reactiveTasks) {
|
|
1914
|
+
super(tasks, streamTasks, reactiveTasks);
|
|
1915
|
+
}
|
|
1916
|
+
}
|
|
1917
|
+
|
|
1918
|
+
// src/provider-hf-transformers/registerHuggingFaceTransformersInline.ts
|
|
1919
|
+
init_HFT_Pipeline();
|
|
1920
|
+
async function registerHuggingFaceTransformersInline(options) {
|
|
1921
|
+
const { env } = await loadTransformersSDK();
|
|
1922
|
+
env.backends.onnx.wasm.proxy = true;
|
|
1923
|
+
const provider = new HuggingFaceTransformersQueuedProvider(HFT_TASKS, HFT_STREAM_TASKS, HFT_REACTIVE_TASKS);
|
|
1924
|
+
const baseDispose = provider.dispose.bind(provider);
|
|
1925
|
+
provider.dispose = async () => {
|
|
1926
|
+
await clearHftInlinePipelineCache();
|
|
1927
|
+
await baseDispose();
|
|
1928
|
+
};
|
|
1929
|
+
await provider.register(options ?? {});
|
|
1930
|
+
}
|
|
1931
|
+
// src/provider-hf-transformers/registerHuggingFaceTransformersWorker.ts
|
|
1932
|
+
import { getLogger as getLogger7, globalServiceRegistry, WORKER_SERVER } from "@workglow/util/worker";
|
|
1933
|
+
|
|
1934
|
+
// src/provider-hf-transformers/HuggingFaceTransformersProvider.ts
|
|
1935
|
+
import {
|
|
1936
|
+
AiProvider
|
|
1937
|
+
} from "@workglow/ai/worker";
|
|
1938
|
+
class HuggingFaceTransformersProvider extends AiProvider {
|
|
1939
|
+
name = HF_TRANSFORMERS_ONNX;
|
|
1940
|
+
isLocal = true;
|
|
1941
|
+
supportsBrowser = true;
|
|
1942
|
+
taskTypes = [
|
|
1943
|
+
"DownloadModelTask",
|
|
1944
|
+
"UnloadModelTask",
|
|
1945
|
+
"ModelInfoTask",
|
|
1946
|
+
"CountTokensTask",
|
|
1947
|
+
"TextEmbeddingTask",
|
|
1948
|
+
"TextGenerationTask",
|
|
1949
|
+
"TextQuestionAnswerTask",
|
|
1950
|
+
"TextLanguageDetectionTask",
|
|
1951
|
+
"TextClassificationTask",
|
|
1952
|
+
"TextFillMaskTask",
|
|
1953
|
+
"TextNamedEntityRecognitionTask",
|
|
1954
|
+
"TextRewriterTask",
|
|
1955
|
+
"TextSummaryTask",
|
|
1956
|
+
"TextTranslationTask",
|
|
1957
|
+
"ImageSegmentationTask",
|
|
1958
|
+
"ImageToTextTask",
|
|
1959
|
+
"BackgroundRemovalTask",
|
|
1960
|
+
"ImageEmbeddingTask",
|
|
1961
|
+
"ImageClassificationTask",
|
|
1962
|
+
"ObjectDetectionTask",
|
|
1963
|
+
"ToolCallingTask",
|
|
1964
|
+
"ModelSearchTask"
|
|
1965
|
+
];
|
|
1966
|
+
constructor(tasks, streamTasks, reactiveTasks) {
|
|
1967
|
+
super(tasks, streamTasks, reactiveTasks);
|
|
1968
|
+
}
|
|
1969
|
+
}
|
|
1970
|
+
|
|
1971
|
+
// src/provider-hf-transformers/registerHuggingFaceTransformersWorker.ts
|
|
1972
|
+
init_HFT_Pipeline();
|
|
1973
|
+
async function registerHuggingFaceTransformersWorker() {
|
|
1974
|
+
const { env } = await loadTransformersSDK();
|
|
1975
|
+
env.backends.onnx.wasm.proxy = true;
|
|
1976
|
+
const workerServer = globalServiceRegistry.get(WORKER_SERVER);
|
|
1977
|
+
new HuggingFaceTransformersProvider(HFT_TASKS, HFT_STREAM_TASKS, HFT_REACTIVE_TASKS).registerOnWorkerServer(workerServer);
|
|
1978
|
+
workerServer.sendReady();
|
|
1979
|
+
getLogger7().info("HuggingFaceTransformers worker job run functions registered");
|
|
1980
|
+
}
|
|
1981
|
+
|
|
1982
|
+
// src/provider-hf-transformers/runtime.ts
|
|
1983
|
+
init_HFT_Pipeline();
|
|
1984
|
+
export {
|
|
1985
|
+
setHftCacheDir,
|
|
1986
|
+
removeCachedPipeline,
|
|
1987
|
+
registerHuggingFaceTransformersWorker,
|
|
1988
|
+
registerHuggingFaceTransformersInline,
|
|
1989
|
+
parseToolCallsFromText,
|
|
1990
|
+
parseOnnxQuantizations,
|
|
1991
|
+
loadTransformersSDK,
|
|
1992
|
+
hasCachedPipeline,
|
|
1993
|
+
getPipelineCacheKey,
|
|
1994
|
+
getPipeline,
|
|
1995
|
+
createToolCallMarkupFilter,
|
|
1996
|
+
clearPipelineCache,
|
|
1997
|
+
QuantizationDataType,
|
|
1998
|
+
PipelineUseCase,
|
|
1999
|
+
ONNX_QUANTIZATION_SUFFIX_MAPPING,
|
|
2000
|
+
HfTransformersOnnxModelSchema,
|
|
2001
|
+
HfTransformersOnnxModelRecordSchema,
|
|
2002
|
+
HfTransformersOnnxModelConfigSchema,
|
|
2003
|
+
HTF_CACHE_NAME,
|
|
2004
|
+
HF_TRANSFORMERS_ONNX
|
|
2005
|
+
};
|
|
1370
2006
|
|
|
1371
|
-
//# debugId=
|
|
2007
|
+
//# debugId=8397F27EBE0B0C0B64756E2164756E21
|