@workglow/ai-provider 0.0.121 → 0.0.122
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +30 -67
- package/dist/common/HfModelSearch.d.ts +32 -0
- package/dist/common/HfModelSearch.d.ts.map +1 -0
- package/dist/common/PipelineTaskMapping.d.ts +12 -0
- package/dist/common/PipelineTaskMapping.d.ts.map +1 -0
- package/dist/{anthropic → provider-anthropic}/AnthropicProvider.d.ts +2 -14
- package/dist/provider-anthropic/AnthropicProvider.d.ts.map +1 -0
- package/dist/provider-anthropic/AnthropicQueuedProvider.d.ts +16 -0
- package/dist/provider-anthropic/AnthropicQueuedProvider.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_Client.d.ts +13 -0
- package/dist/provider-anthropic/common/Anthropic_Client.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_Constants.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_CountTokens.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_CountTokens.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_JobRunFns.d.ts +12 -0
- package/dist/provider-anthropic/common/Anthropic_JobRunFns.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_ModelInfo.d.ts +9 -0
- package/dist/provider-anthropic/common/Anthropic_ModelInfo.d.ts.map +1 -0
- package/dist/{anthropic → provider-anthropic}/common/Anthropic_ModelSchema.d.ts +31 -31
- package/dist/provider-anthropic/common/Anthropic_ModelSchema.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_ModelSearch.d.ts +8 -0
- package/dist/provider-anthropic/common/Anthropic_ModelSearch.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_StructuredGeneration.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_StructuredGeneration.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_TextGeneration.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_TextGeneration.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_TextRewriter.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_TextRewriter.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_TextSummary.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_TextSummary.d.ts.map +1 -0
- package/dist/provider-anthropic/common/Anthropic_ToolCalling.d.ts +10 -0
- package/dist/provider-anthropic/common/Anthropic_ToolCalling.d.ts.map +1 -0
- package/dist/{anthropic → provider-anthropic}/index.d.ts +1 -3
- package/dist/provider-anthropic/index.d.ts.map +1 -0
- package/dist/{index-60ev6k93.js → provider-anthropic/index.js} +43 -11
- package/dist/provider-anthropic/index.js.map +13 -0
- package/dist/provider-anthropic/registerAnthropic.d.ts +10 -0
- package/dist/provider-anthropic/registerAnthropic.d.ts.map +1 -0
- package/dist/provider-anthropic/registerAnthropicInline.d.ts +8 -0
- package/dist/provider-anthropic/registerAnthropicInline.d.ts.map +1 -0
- package/dist/provider-anthropic/registerAnthropicWorker.d.ts +7 -0
- package/dist/provider-anthropic/registerAnthropicWorker.d.ts.map +1 -0
- package/dist/provider-anthropic/runtime.d.ts +16 -0
- package/dist/provider-anthropic/runtime.d.ts.map +1 -0
- package/dist/{anthropic/index.js → provider-anthropic/runtime.js} +291 -177
- package/dist/provider-anthropic/runtime.js.map +24 -0
- package/dist/{web-browser → provider-chrome}/WebBrowserProvider.d.ts +2 -15
- package/dist/provider-chrome/WebBrowserProvider.d.ts.map +1 -0
- package/dist/provider-chrome/WebBrowserQueuedProvider.d.ts +16 -0
- package/dist/provider-chrome/WebBrowserQueuedProvider.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_ChromeHelpers.d.ts +31 -0
- package/dist/provider-chrome/common/WebBrowser_ChromeHelpers.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_Constants.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_JobRunFns.d.ts +10 -0
- package/dist/provider-chrome/common/WebBrowser_JobRunFns.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_ModelInfo.d.ts +9 -0
- package/dist/provider-chrome/common/WebBrowser_ModelInfo.d.ts.map +1 -0
- package/dist/{web-browser → provider-chrome}/common/WebBrowser_ModelSchema.d.ts +31 -31
- package/dist/provider-chrome/common/WebBrowser_ModelSchema.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_ModelSearch.d.ts +8 -0
- package/dist/provider-chrome/common/WebBrowser_ModelSearch.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_TextGeneration.d.ts +10 -0
- package/dist/provider-chrome/common/WebBrowser_TextGeneration.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_TextLanguageDetection.d.ts +9 -0
- package/dist/provider-chrome/common/WebBrowser_TextLanguageDetection.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_TextRewriter.d.ts +10 -0
- package/dist/provider-chrome/common/WebBrowser_TextRewriter.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_TextSummary.d.ts +10 -0
- package/dist/provider-chrome/common/WebBrowser_TextSummary.d.ts.map +1 -0
- package/dist/provider-chrome/common/WebBrowser_TextTranslation.d.ts +10 -0
- package/dist/provider-chrome/common/WebBrowser_TextTranslation.d.ts.map +1 -0
- package/dist/{web-browser → provider-chrome}/index.d.ts +1 -3
- package/dist/provider-chrome/index.d.ts.map +1 -0
- package/dist/provider-chrome/index.js +132 -0
- package/dist/provider-chrome/index.js.map +13 -0
- package/dist/provider-chrome/registerWebBrowser.d.ts +10 -0
- package/dist/provider-chrome/registerWebBrowser.d.ts.map +1 -0
- package/dist/provider-chrome/registerWebBrowserInline.d.ts +8 -0
- package/dist/provider-chrome/registerWebBrowserInline.d.ts.map +1 -0
- package/dist/provider-chrome/registerWebBrowserWorker.d.ts +7 -0
- package/dist/provider-chrome/registerWebBrowserWorker.d.ts.map +1 -0
- package/dist/provider-chrome/runtime.d.ts +14 -0
- package/dist/provider-chrome/runtime.d.ts.map +1 -0
- package/dist/{web-browser/index.js → provider-chrome/runtime.js} +260 -235
- package/dist/provider-chrome/runtime.js.map +23 -0
- package/dist/{google-gemini → provider-gemini}/GoogleGeminiProvider.d.ts +2 -15
- package/dist/provider-gemini/GoogleGeminiProvider.d.ts.map +1 -0
- package/dist/provider-gemini/GoogleGeminiQueuedProvider.d.ts +16 -0
- package/dist/provider-gemini/GoogleGeminiQueuedProvider.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_Client.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_Client.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_Constants.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_CountTokens.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_CountTokens.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_JobRunFns.d.ts +13 -0
- package/dist/provider-gemini/common/Gemini_JobRunFns.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_ModelInfo.d.ts +9 -0
- package/dist/provider-gemini/common/Gemini_ModelInfo.d.ts.map +1 -0
- package/dist/{google-gemini → provider-gemini}/common/Gemini_ModelSchema.d.ts +31 -31
- package/dist/provider-gemini/common/Gemini_ModelSchema.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_ModelSearch.d.ts +8 -0
- package/dist/provider-gemini/common/Gemini_ModelSearch.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_Schema.d.ts +11 -0
- package/dist/provider-gemini/common/Gemini_Schema.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_StructuredGeneration.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_StructuredGeneration.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_TextEmbedding.d.ts +9 -0
- package/dist/provider-gemini/common/Gemini_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_TextGeneration.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_TextGeneration.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_TextRewriter.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_TextRewriter.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_TextSummary.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_TextSummary.d.ts.map +1 -0
- package/dist/provider-gemini/common/Gemini_ToolCalling.d.ts +10 -0
- package/dist/provider-gemini/common/Gemini_ToolCalling.d.ts.map +1 -0
- package/dist/{google-gemini → provider-gemini}/index.d.ts +1 -3
- package/dist/provider-gemini/index.d.ts.map +1 -0
- package/dist/{index-8651nz8y.js → provider-gemini/index.js} +43 -11
- package/dist/provider-gemini/index.js.map +13 -0
- package/dist/provider-gemini/registerGemini.d.ts +10 -0
- package/dist/provider-gemini/registerGemini.d.ts.map +1 -0
- package/dist/provider-gemini/registerGeminiInline.d.ts +8 -0
- package/dist/provider-gemini/registerGeminiInline.d.ts.map +1 -0
- package/dist/{anthropic/Anthropic_Worker.d.ts → provider-gemini/registerGeminiWorker.d.ts} +2 -2
- package/dist/provider-gemini/registerGeminiWorker.d.ts.map +1 -0
- package/dist/provider-gemini/runtime.d.ts +16 -0
- package/dist/provider-gemini/runtime.d.ts.map +1 -0
- package/dist/{google-gemini/index.js → provider-gemini/runtime.js} +281 -173
- package/dist/provider-gemini/runtime.js.map +26 -0
- package/dist/provider-hf-inference/HfInferenceProvider.d.ts +2 -19
- package/dist/provider-hf-inference/HfInferenceProvider.d.ts.map +1 -1
- package/dist/provider-hf-inference/HfInferenceQueuedProvider.d.ts +16 -0
- package/dist/provider-hf-inference/HfInferenceQueuedProvider.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_Client.d.ts +12 -0
- package/dist/provider-hf-inference/common/HFI_Client.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_JobRunFns.d.ts +2 -11
- package/dist/provider-hf-inference/common/HFI_JobRunFns.d.ts.map +1 -1
- package/dist/provider-hf-inference/common/HFI_ModelInfo.d.ts +9 -0
- package/dist/provider-hf-inference/common/HFI_ModelInfo.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_ModelSchema.d.ts +31 -31
- package/dist/provider-hf-inference/common/HFI_ModelSchema.d.ts.map +1 -1
- package/dist/provider-hf-inference/common/HFI_ModelSearch.d.ts +8 -0
- package/dist/provider-hf-inference/common/HFI_ModelSearch.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_TextEmbedding.d.ts +9 -0
- package/dist/provider-hf-inference/common/HFI_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_TextGeneration.d.ts +10 -0
- package/dist/provider-hf-inference/common/HFI_TextGeneration.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_TextRewriter.d.ts +10 -0
- package/dist/provider-hf-inference/common/HFI_TextRewriter.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_TextSummary.d.ts +10 -0
- package/dist/provider-hf-inference/common/HFI_TextSummary.d.ts.map +1 -0
- package/dist/provider-hf-inference/common/HFI_ToolCalling.d.ts +10 -0
- package/dist/provider-hf-inference/common/HFI_ToolCalling.d.ts.map +1 -0
- package/dist/provider-hf-inference/index.d.ts +1 -3
- package/dist/provider-hf-inference/index.d.ts.map +1 -1
- package/dist/provider-hf-inference/index.js +98 -411
- package/dist/provider-hf-inference/index.js.map +7 -5
- package/dist/provider-hf-inference/registerHfInference.d.ts +10 -0
- package/dist/provider-hf-inference/registerHfInference.d.ts.map +1 -0
- package/dist/provider-hf-inference/registerHfInferenceInline.d.ts +8 -0
- package/dist/provider-hf-inference/registerHfInferenceInline.d.ts.map +1 -0
- package/dist/provider-hf-inference/registerHfInferenceWorker.d.ts +7 -0
- package/dist/provider-hf-inference/registerHfInferenceWorker.d.ts.map +1 -0
- package/dist/provider-hf-inference/runtime.d.ts +16 -0
- package/dist/provider-hf-inference/runtime.d.ts.map +1 -0
- package/dist/provider-hf-inference/runtime.js +592 -0
- package/dist/provider-hf-inference/runtime.js.map +25 -0
- package/dist/{hf-transformers → provider-hf-transformers}/HuggingFaceTransformersProvider.d.ts +2 -21
- package/dist/provider-hf-transformers/HuggingFaceTransformersProvider.d.ts.map +1 -0
- package/dist/provider-hf-transformers/HuggingFaceTransformersQueuedProvider.d.ts +16 -0
- package/dist/provider-hf-transformers/HuggingFaceTransformersQueuedProvider.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_BackgroundRemoval.d.ts +12 -0
- package/dist/provider-hf-transformers/common/HFT_BackgroundRemoval.d.ts.map +1 -0
- package/dist/{hf-transformers → provider-hf-transformers}/common/HFT_Constants.d.ts +25 -23
- package/dist/provider-hf-transformers/common/HFT_Constants.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_CountTokens.d.ts +10 -0
- package/dist/provider-hf-transformers/common/HFT_CountTokens.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_Download.d.ts +13 -0
- package/dist/provider-hf-transformers/common/HFT_Download.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ImageClassification.d.ts +13 -0
- package/dist/provider-hf-transformers/common/HFT_ImageClassification.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ImageEmbedding.d.ts +12 -0
- package/dist/provider-hf-transformers/common/HFT_ImageEmbedding.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ImageHelpers.d.ts +11 -0
- package/dist/provider-hf-transformers/common/HFT_ImageHelpers.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ImageSegmentation.d.ts +12 -0
- package/dist/provider-hf-transformers/common/HFT_ImageSegmentation.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ImageToText.d.ts +12 -0
- package/dist/provider-hf-transformers/common/HFT_ImageToText.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_InlineLifecycle.d.ts +7 -0
- package/dist/provider-hf-transformers/common/HFT_InlineLifecycle.d.ts.map +1 -0
- package/dist/{hf-transformers → provider-hf-transformers}/common/HFT_JobRunFns.d.ts +673 -789
- package/dist/provider-hf-transformers/common/HFT_JobRunFns.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ModelInfo.d.ts +9 -0
- package/dist/provider-hf-transformers/common/HFT_ModelInfo.d.ts.map +1 -0
- package/dist/{hf-transformers → provider-hf-transformers}/common/HFT_ModelSchema.d.ts +37 -37
- package/dist/provider-hf-transformers/common/HFT_ModelSchema.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ModelSearch.d.ts +8 -0
- package/dist/provider-hf-transformers/common/HFT_ModelSearch.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ObjectDetection.d.ts +13 -0
- package/dist/provider-hf-transformers/common/HFT_ObjectDetection.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_OnnxDtypes.d.ts +23 -0
- package/dist/provider-hf-transformers/common/HFT_OnnxDtypes.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_Pipeline.d.ts +32 -0
- package/dist/provider-hf-transformers/common/HFT_Pipeline.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_Streaming.d.ts +24 -0
- package/dist/provider-hf-transformers/common/HFT_Streaming.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_StructuredGeneration.d.ts +10 -0
- package/dist/provider-hf-transformers/common/HFT_StructuredGeneration.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextClassification.d.ts +9 -0
- package/dist/provider-hf-transformers/common/HFT_TextClassification.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextEmbedding.d.ts +13 -0
- package/dist/provider-hf-transformers/common/HFT_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextFillMask.d.ts +9 -0
- package/dist/provider-hf-transformers/common/HFT_TextFillMask.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextGeneration.d.ts +14 -0
- package/dist/provider-hf-transformers/common/HFT_TextGeneration.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextLanguageDetection.d.ts +9 -0
- package/dist/provider-hf-transformers/common/HFT_TextLanguageDetection.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextNamedEntityRecognition.d.ts +9 -0
- package/dist/provider-hf-transformers/common/HFT_TextNamedEntityRecognition.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextOutput.d.ts +8 -0
- package/dist/provider-hf-transformers/common/HFT_TextOutput.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextQuestionAnswer.d.ts +14 -0
- package/dist/provider-hf-transformers/common/HFT_TextQuestionAnswer.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextRewriter.d.ts +14 -0
- package/dist/provider-hf-transformers/common/HFT_TextRewriter.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextSummary.d.ts +14 -0
- package/dist/provider-hf-transformers/common/HFT_TextSummary.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_TextTranslation.d.ts +14 -0
- package/dist/provider-hf-transformers/common/HFT_TextTranslation.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ToolCalling.d.ts +10 -0
- package/dist/provider-hf-transformers/common/HFT_ToolCalling.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_ToolMarkup.d.ts +40 -0
- package/dist/provider-hf-transformers/common/HFT_ToolMarkup.d.ts.map +1 -0
- package/dist/provider-hf-transformers/common/HFT_Unload.d.ts +13 -0
- package/dist/provider-hf-transformers/common/HFT_Unload.d.ts.map +1 -0
- package/dist/{hf-transformers → provider-hf-transformers}/index.d.ts +4 -2
- package/dist/provider-hf-transformers/index.d.ts.map +1 -0
- package/dist/provider-hf-transformers/index.js +513 -0
- package/dist/provider-hf-transformers/index.js.map +16 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformers.d.ts +14 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformers.d.ts.map +1 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformersInline.d.ts +15 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformersInline.d.ts.map +1 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformersWorker.d.ts +7 -0
- package/dist/provider-hf-transformers/registerHuggingFaceTransformersWorker.d.ts.map +1 -0
- package/dist/provider-hf-transformers/runtime.d.ts +21 -0
- package/dist/provider-hf-transformers/runtime.d.ts.map +1 -0
- package/dist/{index-j4g81r4k.js → provider-hf-transformers/runtime.js} +1561 -927
- package/dist/provider-hf-transformers/runtime.js.map +49 -0
- package/dist/provider-llamacpp/LlamaCppProvider.d.ts +2 -15
- package/dist/provider-llamacpp/LlamaCppProvider.d.ts.map +1 -1
- package/dist/provider-llamacpp/LlamaCppQueuedProvider.d.ts +16 -0
- package/dist/provider-llamacpp/LlamaCppQueuedProvider.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_CountTokens.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_CountTokens.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Download.d.ts +9 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Download.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_JobRunFns.d.ts +2 -18
- package/dist/provider-llamacpp/common/LlamaCpp_JobRunFns.d.ts.map +1 -1
- package/dist/provider-llamacpp/common/LlamaCpp_ModelInfo.d.ts +9 -0
- package/dist/provider-llamacpp/common/LlamaCpp_ModelInfo.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_ModelSchema.d.ts +31 -31
- package/dist/provider-llamacpp/common/LlamaCpp_ModelSchema.d.ts.map +1 -1
- package/dist/provider-llamacpp/common/LlamaCpp_ModelSearch.d.ts +8 -0
- package/dist/provider-llamacpp/common/LlamaCpp_ModelSearch.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Runtime.d.ts +31 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Runtime.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_StructuredGeneration.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_StructuredGeneration.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextEmbedding.d.ts +9 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextGeneration.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextGeneration.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextRewriter.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextRewriter.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextSummary.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_TextSummary.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_ToolCalling.d.ts +10 -0
- package/dist/provider-llamacpp/common/LlamaCpp_ToolCalling.d.ts.map +1 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Unload.d.ts +9 -0
- package/dist/provider-llamacpp/common/LlamaCpp_Unload.d.ts.map +1 -0
- package/dist/provider-llamacpp/index.d.ts +1 -3
- package/dist/provider-llamacpp/index.d.ts.map +1 -1
- package/dist/provider-llamacpp/index.js +121 -725
- package/dist/provider-llamacpp/index.js.map +7 -5
- package/dist/provider-llamacpp/registerLlamaCpp.d.ts +10 -0
- package/dist/provider-llamacpp/registerLlamaCpp.d.ts.map +1 -0
- package/dist/provider-llamacpp/registerLlamaCppInline.d.ts +8 -0
- package/dist/provider-llamacpp/registerLlamaCppInline.d.ts.map +1 -0
- package/dist/provider-llamacpp/registerLlamaCppWorker.d.ts +7 -0
- package/dist/provider-llamacpp/registerLlamaCppWorker.d.ts.map +1 -0
- package/dist/provider-llamacpp/runtime.d.ts +16 -0
- package/dist/provider-llamacpp/runtime.d.ts.map +1 -0
- package/dist/provider-llamacpp/runtime.js +929 -0
- package/dist/provider-llamacpp/runtime.js.map +29 -0
- package/dist/provider-ollama/OllamaProvider.d.ts +2 -15
- package/dist/provider-ollama/OllamaProvider.d.ts.map +1 -1
- package/dist/provider-ollama/OllamaQueuedProvider.d.ts +16 -0
- package/dist/provider-ollama/OllamaQueuedProvider.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_Client.browser.d.ts +13 -0
- package/dist/provider-ollama/common/Ollama_Client.browser.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_Client.d.ts +13 -0
- package/dist/provider-ollama/common/Ollama_Client.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_JobRunFns.browser.d.ts +362 -11
- package/dist/provider-ollama/common/Ollama_JobRunFns.browser.d.ts.map +1 -1
- package/dist/provider-ollama/common/Ollama_JobRunFns.d.ts +361 -11
- package/dist/provider-ollama/common/Ollama_JobRunFns.d.ts.map +1 -1
- package/dist/provider-ollama/common/Ollama_ModelInfo.d.ts +11 -0
- package/dist/provider-ollama/common/Ollama_ModelInfo.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_ModelSchema.d.ts +30 -30
- package/dist/provider-ollama/common/Ollama_ModelSchema.d.ts.map +1 -1
- package/dist/provider-ollama/common/Ollama_ModelSearch.d.ts +11 -0
- package/dist/provider-ollama/common/Ollama_ModelSearch.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_ModelUtil.d.ts +8 -0
- package/dist/provider-ollama/common/Ollama_ModelUtil.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_TextEmbedding.d.ts +11 -0
- package/dist/provider-ollama/common/Ollama_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_TextGeneration.d.ts +12 -0
- package/dist/provider-ollama/common/Ollama_TextGeneration.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_TextRewriter.d.ts +12 -0
- package/dist/provider-ollama/common/Ollama_TextRewriter.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_TextSummary.d.ts +12 -0
- package/dist/provider-ollama/common/Ollama_TextSummary.d.ts.map +1 -0
- package/dist/provider-ollama/common/Ollama_ToolCalling.d.ts +16 -0
- package/dist/provider-ollama/common/Ollama_ToolCalling.d.ts.map +1 -0
- package/dist/provider-ollama/index.browser.d.ts +1 -3
- package/dist/provider-ollama/index.browser.d.ts.map +1 -1
- package/dist/provider-ollama/index.browser.js +18 -396
- package/dist/provider-ollama/index.browser.js.map +6 -7
- package/dist/provider-ollama/index.d.ts +1 -3
- package/dist/provider-ollama/index.d.ts.map +1 -1
- package/dist/provider-ollama/index.js +93 -382
- package/dist/provider-ollama/index.js.map +7 -5
- package/dist/provider-ollama/registerOllama.d.ts +10 -0
- package/dist/provider-ollama/registerOllama.d.ts.map +1 -0
- package/dist/provider-ollama/registerOllamaInline.browser.d.ts +8 -0
- package/dist/provider-ollama/registerOllamaInline.browser.d.ts.map +1 -0
- package/dist/provider-ollama/registerOllamaInline.d.ts +8 -0
- package/dist/provider-ollama/registerOllamaInline.d.ts.map +1 -0
- package/dist/provider-ollama/registerOllamaWorker.browser.d.ts +7 -0
- package/dist/provider-ollama/registerOllamaWorker.browser.d.ts.map +1 -0
- package/dist/{google-gemini/Gemini_Worker.d.ts → provider-ollama/registerOllamaWorker.d.ts} +2 -2
- package/dist/provider-ollama/registerOllamaWorker.d.ts.map +1 -0
- package/dist/provider-ollama/runtime.browser.d.ts +16 -0
- package/dist/provider-ollama/runtime.browser.d.ts.map +1 -0
- package/dist/provider-ollama/runtime.browser.js +528 -0
- package/dist/provider-ollama/runtime.browser.js.map +24 -0
- package/dist/provider-ollama/runtime.d.ts +16 -0
- package/dist/provider-ollama/runtime.d.ts.map +1 -0
- package/dist/provider-ollama/runtime.js +538 -0
- package/dist/provider-ollama/runtime.js.map +24 -0
- package/dist/provider-openai/OpenAiProvider.d.ts +2 -19
- package/dist/provider-openai/OpenAiProvider.d.ts.map +1 -1
- package/dist/provider-openai/OpenAiQueuedProvider.d.ts +16 -0
- package/dist/provider-openai/OpenAiQueuedProvider.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_Client.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_Client.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_CountTokens.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_CountTokens.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_JobRunFns.d.ts +2 -15
- package/dist/provider-openai/common/OpenAI_JobRunFns.d.ts.map +1 -1
- package/dist/provider-openai/common/OpenAI_ModelInfo.d.ts +9 -0
- package/dist/provider-openai/common/OpenAI_ModelInfo.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_ModelSchema.d.ts +31 -31
- package/dist/provider-openai/common/OpenAI_ModelSchema.d.ts.map +1 -1
- package/dist/provider-openai/common/OpenAI_ModelSearch.d.ts +8 -0
- package/dist/provider-openai/common/OpenAI_ModelSearch.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_StructuredGeneration.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_StructuredGeneration.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_TextEmbedding.d.ts +9 -0
- package/dist/provider-openai/common/OpenAI_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_TextGeneration.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_TextGeneration.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_TextRewriter.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_TextRewriter.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_TextSummary.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_TextSummary.d.ts.map +1 -0
- package/dist/provider-openai/common/OpenAI_ToolCalling.d.ts +10 -0
- package/dist/provider-openai/common/OpenAI_ToolCalling.d.ts.map +1 -0
- package/dist/provider-openai/index.d.ts +1 -3
- package/dist/provider-openai/index.d.ts.map +1 -1
- package/dist/provider-openai/index.js +108 -519
- package/dist/provider-openai/index.js.map +7 -5
- package/dist/provider-openai/registerOpenAi.d.ts +10 -0
- package/dist/provider-openai/registerOpenAi.d.ts.map +1 -0
- package/dist/provider-openai/registerOpenAiInline.d.ts +8 -0
- package/dist/provider-openai/registerOpenAiInline.d.ts.map +1 -0
- package/dist/{ggml/model/GgmlLocalModel.d.ts → provider-openai/registerOpenAiWorker.d.ts} +2 -2
- package/dist/provider-openai/registerOpenAiWorker.d.ts.map +1 -0
- package/dist/provider-openai/runtime.d.ts +16 -0
- package/dist/provider-openai/runtime.d.ts.map +1 -0
- package/dist/provider-openai/runtime.js +662 -0
- package/dist/provider-openai/runtime.js.map +25 -0
- package/dist/provider-tf-mediapipe/TensorFlowMediaPipeProvider.d.ts +24 -0
- package/dist/provider-tf-mediapipe/TensorFlowMediaPipeProvider.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/TensorFlowMediaPipeQueuedProvider.d.ts +16 -0
- package/dist/provider-tf-mediapipe/TensorFlowMediaPipeQueuedProvider.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Client.d.ts +8 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Client.d.ts.map +1 -0
- package/dist/{tf-mediapipe → provider-tf-mediapipe}/common/TFMP_Constants.d.ts +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Constants.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Download.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Download.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_FaceDetector.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_FaceDetector.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_FaceLandmarker.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_FaceLandmarker.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_GestureRecognizer.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_GestureRecognizer.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_HandLandmarker.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_HandLandmarker.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageClassification.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageClassification.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageEmbedding.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageEmbedding.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageSegmentation.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ImageSegmentation.d.ts.map +1 -0
- package/dist/{tf-mediapipe → provider-tf-mediapipe}/common/TFMP_JobRunFns.d.ts +340 -408
- package/dist/provider-tf-mediapipe/common/TFMP_JobRunFns.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ModelInfo.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ModelInfo.d.ts.map +1 -0
- package/dist/{tf-mediapipe → provider-tf-mediapipe}/common/TFMP_ModelSchema.d.ts +40 -37
- package/dist/provider-tf-mediapipe/common/TFMP_ModelSchema.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ModelSearch.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ModelSearch.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ObjectDetection.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_ObjectDetection.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_PoseLandmarker.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_PoseLandmarker.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Runtime.d.ts +43 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Runtime.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextClassification.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextClassification.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextEmbedding.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextEmbedding.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextLanguageDetection.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_TextLanguageDetection.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Unload.d.ts +9 -0
- package/dist/provider-tf-mediapipe/common/TFMP_Unload.d.ts.map +1 -0
- package/dist/{tf-mediapipe → provider-tf-mediapipe}/index.d.ts +1 -3
- package/dist/provider-tf-mediapipe/index.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/index.js +129 -0
- package/dist/provider-tf-mediapipe/index.js.map +13 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipe.d.ts +10 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipe.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeInline.d.ts +8 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeInline.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeWorker.d.ts +7 -0
- package/dist/provider-tf-mediapipe/registerTensorFlowMediaPipeWorker.d.ts.map +1 -0
- package/dist/provider-tf-mediapipe/runtime.d.ts +16 -0
- package/dist/provider-tf-mediapipe/runtime.d.ts.map +1 -0
- package/dist/{tf-mediapipe/index.js → provider-tf-mediapipe/runtime.js} +417 -380
- package/dist/provider-tf-mediapipe/runtime.js.map +33 -0
- package/package.json +66 -40
- package/dist/HFT_JobRunFns-8hcpea4c.js +0 -80
- package/dist/HFT_JobRunFns-8hcpea4c.js.map +0 -9
- package/dist/anthropic/AnthropicProvider.d.ts.map +0 -1
- package/dist/anthropic/Anthropic_Worker.d.ts.map +0 -1
- package/dist/anthropic/common/Anthropic_Constants.d.ts.map +0 -1
- package/dist/anthropic/common/Anthropic_JobRunFns.d.ts +0 -24
- package/dist/anthropic/common/Anthropic_JobRunFns.d.ts.map +0 -1
- package/dist/anthropic/common/Anthropic_ModelSchema.d.ts.map +0 -1
- package/dist/anthropic/index.d.ts.map +0 -1
- package/dist/anthropic/index.js.map +0 -11
- package/dist/ggml/model/GgmlLocalModel.d.ts.map +0 -1
- package/dist/google-gemini/Gemini_Worker.d.ts.map +0 -1
- package/dist/google-gemini/GoogleGeminiProvider.d.ts.map +0 -1
- package/dist/google-gemini/common/Gemini_Constants.d.ts.map +0 -1
- package/dist/google-gemini/common/Gemini_JobRunFns.d.ts +0 -25
- package/dist/google-gemini/common/Gemini_JobRunFns.d.ts.map +0 -1
- package/dist/google-gemini/common/Gemini_ModelSchema.d.ts.map +0 -1
- package/dist/google-gemini/index.d.ts.map +0 -1
- package/dist/google-gemini/index.js.map +0 -11
- package/dist/hf-transformers/HFT_Worker.d.ts +0 -7
- package/dist/hf-transformers/HFT_Worker.d.ts.map +0 -1
- package/dist/hf-transformers/HuggingFaceTransformersProvider.d.ts.map +0 -1
- package/dist/hf-transformers/common/HFT_Constants.d.ts.map +0 -1
- package/dist/hf-transformers/common/HFT_JobRunFns.d.ts.map +0 -1
- package/dist/hf-transformers/common/HFT_ModelSchema.d.ts.map +0 -1
- package/dist/hf-transformers/index.d.ts.map +0 -1
- package/dist/hf-transformers/index.js +0 -116
- package/dist/hf-transformers/index.js.map +0 -10
- package/dist/index-60ev6k93.js.map +0 -12
- package/dist/index-6j5pq722.js +0 -11
- package/dist/index-6j5pq722.js.map +0 -9
- package/dist/index-8651nz8y.js.map +0 -12
- package/dist/index-j4g81r4k.js.map +0 -10
- package/dist/index-pkd79j8b.js +0 -58
- package/dist/index-pkd79j8b.js.map +0 -10
- package/dist/index-q2t627d5.js +0 -88
- package/dist/index-q2t627d5.js.map +0 -12
- package/dist/index-tp5s7355.js +0 -77
- package/dist/index-tp5s7355.js.map +0 -12
- package/dist/index-v72vr07f.js +0 -81
- package/dist/index-v72vr07f.js.map +0 -12
- package/dist/index-wr57rwyx.js +0 -104
- package/dist/index-wr57rwyx.js.map +0 -12
- package/dist/index-zqq3kw0n.js +0 -171
- package/dist/index-zqq3kw0n.js.map +0 -11
- package/dist/index.browser-6j5pq722.js +0 -11
- package/dist/index.browser-6j5pq722.js.map +0 -9
- package/dist/index.d.ts +0 -33
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js +0 -316
- package/dist/index.js.map +0 -15
- package/dist/provider-hf-inference/HFI_Worker.d.ts +0 -7
- package/dist/provider-hf-inference/HFI_Worker.d.ts.map +0 -1
- package/dist/provider-llamacpp/LlamaCpp_Worker.d.ts +0 -7
- package/dist/provider-llamacpp/LlamaCpp_Worker.d.ts.map +0 -1
- package/dist/provider-ollama/Ollama_Worker.browser.d.ts +0 -7
- package/dist/provider-ollama/Ollama_Worker.browser.d.ts.map +0 -1
- package/dist/provider-ollama/Ollama_Worker.d.ts +0 -7
- package/dist/provider-ollama/Ollama_Worker.d.ts.map +0 -1
- package/dist/provider-openai/OpenAI_Worker.d.ts +0 -7
- package/dist/provider-openai/OpenAI_Worker.d.ts.map +0 -1
- package/dist/tf-mediapipe/TFMP_Worker.d.ts +0 -7
- package/dist/tf-mediapipe/TFMP_Worker.d.ts.map +0 -1
- package/dist/tf-mediapipe/TensorFlowMediaPipeProvider.d.ts +0 -41
- package/dist/tf-mediapipe/TensorFlowMediaPipeProvider.d.ts.map +0 -1
- package/dist/tf-mediapipe/common/TFMP_Constants.d.ts.map +0 -1
- package/dist/tf-mediapipe/common/TFMP_JobRunFns.d.ts.map +0 -1
- package/dist/tf-mediapipe/common/TFMP_ModelSchema.d.ts.map +0 -1
- package/dist/tf-mediapipe/index.d.ts.map +0 -1
- package/dist/tf-mediapipe/index.js.map +0 -14
- package/dist/types.d.ts +0 -7
- package/dist/types.d.ts.map +0 -1
- package/dist/web-browser/WebBrowserProvider.d.ts.map +0 -1
- package/dist/web-browser/WebBrowser_Worker.d.ts +0 -7
- package/dist/web-browser/WebBrowser_Worker.d.ts.map +0 -1
- package/dist/web-browser/common/WebBrowser_Constants.d.ts.map +0 -1
- package/dist/web-browser/common/WebBrowser_JobRunFns.d.ts +0 -20
- package/dist/web-browser/common/WebBrowser_JobRunFns.d.ts.map +0 -1
- package/dist/web-browser/common/WebBrowser_ModelSchema.d.ts.map +0 -1
- package/dist/web-browser/index.d.ts.map +0 -1
- package/dist/web-browser/index.js.map +0 -14
- /package/dist/{anthropic → provider-anthropic}/common/Anthropic_Constants.d.ts +0 -0
- /package/dist/{web-browser → provider-chrome}/common/WebBrowser_Constants.d.ts +0 -0
- /package/dist/{google-gemini → provider-gemini}/common/Gemini_Constants.d.ts +0 -0
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @license
|
|
3
|
+
* Copyright 2025 Steven Roussey <sroussey@gmail.com>
|
|
4
|
+
* SPDX-License-Identifier: Apache-2.0
|
|
5
|
+
*/
|
|
6
|
+
/**
|
|
7
|
+
* Worker server and main-thread inline Ollama registration (node build; pulls in `Ollama_JobRunFns`),
|
|
8
|
+
* plus SDK client helpers (`Ollama_Client`).
|
|
9
|
+
* Import from `@workglow/ai-provider/ollama/runtime` — not from the main `ollama` barrel.
|
|
10
|
+
*
|
|
11
|
+
* Use `export *` (not `export { … } from "…"`) so the Bun bundler keeps the module graph.
|
|
12
|
+
*/
|
|
13
|
+
export * from "./common/Ollama_Client";
|
|
14
|
+
export * from "./registerOllamaInline";
|
|
15
|
+
export * from "./registerOllamaWorker";
|
|
16
|
+
//# sourceMappingURL=runtime.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"runtime.d.ts","sourceRoot":"","sources":["../../src/provider-ollama/runtime.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;;;;;GAMG;AACH,cAAc,wBAAwB,CAAC;AACvC,cAAc,wBAAwB,CAAC;AACvC,cAAc,wBAAwB,CAAC"}
|
|
@@ -0,0 +1,538 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __returnValue = (v) => v;
|
|
3
|
+
function __exportSetter(name, newValue) {
|
|
4
|
+
this[name] = __returnValue.bind(null, newValue);
|
|
5
|
+
}
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, {
|
|
9
|
+
get: all[name],
|
|
10
|
+
enumerable: true,
|
|
11
|
+
configurable: true,
|
|
12
|
+
set: __exportSetter.bind(all, name)
|
|
13
|
+
});
|
|
14
|
+
};
|
|
15
|
+
var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res);
|
|
16
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
17
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
18
|
+
}) : x)(function(x) {
|
|
19
|
+
if (typeof require !== "undefined")
|
|
20
|
+
return require.apply(this, arguments);
|
|
21
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
// src/provider-ollama/common/Ollama_Constants.ts
|
|
25
|
+
var OLLAMA = "OLLAMA";
|
|
26
|
+
var OLLAMA_DEFAULT_BASE_URL = "http://localhost:11434";
|
|
27
|
+
|
|
28
|
+
// src/provider-ollama/common/Ollama_ModelUtil.ts
|
|
29
|
+
function getOllamaModelName(model) {
|
|
30
|
+
const name = model?.provider_config?.model_name;
|
|
31
|
+
if (!name) {
|
|
32
|
+
throw new Error("Missing model name in provider_config.model_name.");
|
|
33
|
+
}
|
|
34
|
+
return name;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// src/provider-ollama/common/Ollama_Client.ts
|
|
38
|
+
var _OllamaClass;
|
|
39
|
+
async function loadOllamaSDK() {
|
|
40
|
+
if (!_OllamaClass) {
|
|
41
|
+
try {
|
|
42
|
+
const sdk = await import("ollama");
|
|
43
|
+
_OllamaClass = sdk.Ollama;
|
|
44
|
+
} catch {
|
|
45
|
+
throw new Error("ollama is required for Ollama tasks. Install it with: bun add ollama");
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
return _OllamaClass;
|
|
49
|
+
}
|
|
50
|
+
async function getClient(model) {
|
|
51
|
+
const Ollama = await loadOllamaSDK();
|
|
52
|
+
const host = model?.provider_config?.base_url || OLLAMA_DEFAULT_BASE_URL;
|
|
53
|
+
return new Ollama({ host });
|
|
54
|
+
}
|
|
55
|
+
var getModelName = getOllamaModelName;
|
|
56
|
+
// src/provider-ollama/common/Ollama_JobRunFns.ts
|
|
57
|
+
import { toTextFlatMessages } from "@workglow/ai/worker";
|
|
58
|
+
|
|
59
|
+
// src/provider-ollama/common/Ollama_ModelInfo.ts
|
|
60
|
+
function createOllamaModelInfo(getClient2) {
|
|
61
|
+
return async (input, model) => {
|
|
62
|
+
const client = await getClient2(model);
|
|
63
|
+
const modelName = getOllamaModelName(model);
|
|
64
|
+
let is_cached = false;
|
|
65
|
+
let is_loaded = false;
|
|
66
|
+
let file_sizes = null;
|
|
67
|
+
try {
|
|
68
|
+
const showResponse = await client.show({ model: modelName });
|
|
69
|
+
is_cached = true;
|
|
70
|
+
const size = showResponse.size;
|
|
71
|
+
if (size != null) {
|
|
72
|
+
file_sizes = { model: size };
|
|
73
|
+
}
|
|
74
|
+
} catch {}
|
|
75
|
+
try {
|
|
76
|
+
const psResponse = await client.ps();
|
|
77
|
+
is_loaded = psResponse.models.some((m) => m.name === modelName);
|
|
78
|
+
} catch {}
|
|
79
|
+
return {
|
|
80
|
+
model: input.model,
|
|
81
|
+
is_local: true,
|
|
82
|
+
is_remote: false,
|
|
83
|
+
supports_browser: true,
|
|
84
|
+
supports_node: true,
|
|
85
|
+
is_cached,
|
|
86
|
+
is_loaded,
|
|
87
|
+
file_sizes
|
|
88
|
+
};
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// src/provider-ollama/common/Ollama_ModelSearch.ts
|
|
93
|
+
function createOllamaModelSearch(getClient2) {
|
|
94
|
+
return async () => {
|
|
95
|
+
try {
|
|
96
|
+
const client = await getClient2(undefined);
|
|
97
|
+
const response = await client.list();
|
|
98
|
+
const results = response.models.map((m) => ({
|
|
99
|
+
id: m.name,
|
|
100
|
+
label: `${m.name} ${m.details.parameter_size} ${m.details.quantization_level}`,
|
|
101
|
+
description: `${m.details.parameter_size} ${m.details.quantization_level}`,
|
|
102
|
+
record: {
|
|
103
|
+
model_id: m.name,
|
|
104
|
+
provider: OLLAMA,
|
|
105
|
+
title: m.name,
|
|
106
|
+
description: `${m.details.parameter_size} ${m.details.quantization_level}`,
|
|
107
|
+
tasks: [],
|
|
108
|
+
provider_config: { model_name: m.name },
|
|
109
|
+
metadata: {}
|
|
110
|
+
},
|
|
111
|
+
raw: m
|
|
112
|
+
}));
|
|
113
|
+
return { results };
|
|
114
|
+
} catch {
|
|
115
|
+
return { results: [] };
|
|
116
|
+
}
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// src/provider-ollama/common/Ollama_TextEmbedding.ts
|
|
121
|
+
function createOllamaTextEmbedding(getClient2) {
|
|
122
|
+
return async (input, model, update_progress, _signal) => {
|
|
123
|
+
update_progress(0, "Starting Ollama text embedding");
|
|
124
|
+
const client = await getClient2(model);
|
|
125
|
+
const modelName = getOllamaModelName(model);
|
|
126
|
+
const texts = Array.isArray(input.text) ? input.text : [input.text];
|
|
127
|
+
const response = await client.embed({
|
|
128
|
+
model: modelName,
|
|
129
|
+
input: texts
|
|
130
|
+
});
|
|
131
|
+
update_progress(100, "Completed Ollama text embedding");
|
|
132
|
+
if (Array.isArray(input.text)) {
|
|
133
|
+
return {
|
|
134
|
+
vector: response.embeddings.map((e) => new Float32Array(e))
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
return { vector: new Float32Array(response.embeddings[0]) };
|
|
138
|
+
};
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// src/provider-ollama/common/Ollama_TextGeneration.ts
|
|
142
|
+
import { getLogger } from "@workglow/util/worker";
|
|
143
|
+
function createOllamaTextGeneration(getClient2) {
|
|
144
|
+
const run = async (input, model, update_progress, _signal) => {
|
|
145
|
+
if (Array.isArray(input.prompt)) {
|
|
146
|
+
getLogger().warn("Ollama_TextGeneration: array input received; processing sequentially (no native batch support)");
|
|
147
|
+
const prompts = input.prompt;
|
|
148
|
+
const results = [];
|
|
149
|
+
for (const item of prompts) {
|
|
150
|
+
const r = await run({ ...input, prompt: item }, model, update_progress, _signal);
|
|
151
|
+
results.push(r.text);
|
|
152
|
+
}
|
|
153
|
+
return { text: results };
|
|
154
|
+
}
|
|
155
|
+
update_progress(0, "Starting Ollama text generation");
|
|
156
|
+
const client = await getClient2(model);
|
|
157
|
+
const modelName = getOllamaModelName(model);
|
|
158
|
+
const response = await client.chat({
|
|
159
|
+
model: modelName,
|
|
160
|
+
messages: [{ role: "user", content: input.prompt }],
|
|
161
|
+
options: {
|
|
162
|
+
temperature: input.temperature,
|
|
163
|
+
top_p: input.topP,
|
|
164
|
+
num_predict: input.maxTokens,
|
|
165
|
+
frequency_penalty: input.frequencyPenalty,
|
|
166
|
+
presence_penalty: input.presencePenalty
|
|
167
|
+
}
|
|
168
|
+
});
|
|
169
|
+
update_progress(100, "Completed Ollama text generation");
|
|
170
|
+
return { text: response.message.content };
|
|
171
|
+
};
|
|
172
|
+
return run;
|
|
173
|
+
}
|
|
174
|
+
function createOllamaTextGenerationStream(getClient2) {
|
|
175
|
+
return async function* (input, model, signal) {
|
|
176
|
+
const client = await getClient2(model);
|
|
177
|
+
const modelName = getOllamaModelName(model);
|
|
178
|
+
const stream = await client.chat({
|
|
179
|
+
model: modelName,
|
|
180
|
+
messages: [{ role: "user", content: input.prompt }],
|
|
181
|
+
options: {
|
|
182
|
+
temperature: input.temperature,
|
|
183
|
+
top_p: input.topP,
|
|
184
|
+
num_predict: input.maxTokens,
|
|
185
|
+
frequency_penalty: input.frequencyPenalty,
|
|
186
|
+
presence_penalty: input.presencePenalty
|
|
187
|
+
},
|
|
188
|
+
stream: true
|
|
189
|
+
});
|
|
190
|
+
const onAbort = () => stream.abort();
|
|
191
|
+
signal.addEventListener("abort", onAbort, { once: true });
|
|
192
|
+
try {
|
|
193
|
+
for await (const chunk of stream) {
|
|
194
|
+
const delta = chunk.message.content;
|
|
195
|
+
if (delta) {
|
|
196
|
+
yield { type: "text-delta", port: "text", textDelta: delta };
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
yield { type: "finish", data: {} };
|
|
200
|
+
} finally {
|
|
201
|
+
signal.removeEventListener("abort", onAbort);
|
|
202
|
+
}
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// src/provider-ollama/common/Ollama_TextRewriter.ts
|
|
207
|
+
import { getLogger as getLogger2 } from "@workglow/util/worker";
|
|
208
|
+
function createOllamaTextRewriter(getClient2) {
|
|
209
|
+
const run = async (input, model, update_progress, _signal) => {
|
|
210
|
+
if (Array.isArray(input.text)) {
|
|
211
|
+
getLogger2().warn("Ollama_TextRewriter: array input received; processing sequentially (no native batch support)");
|
|
212
|
+
const texts = input.text;
|
|
213
|
+
const results = [];
|
|
214
|
+
for (const item of texts) {
|
|
215
|
+
const r = await run({ ...input, text: item }, model, update_progress, _signal);
|
|
216
|
+
results.push(r.text);
|
|
217
|
+
}
|
|
218
|
+
return { text: results };
|
|
219
|
+
}
|
|
220
|
+
update_progress(0, "Starting Ollama text rewriting");
|
|
221
|
+
const client = await getClient2(model);
|
|
222
|
+
const modelName = getOllamaModelName(model);
|
|
223
|
+
const response = await client.chat({
|
|
224
|
+
model: modelName,
|
|
225
|
+
messages: [
|
|
226
|
+
{ role: "system", content: input.prompt },
|
|
227
|
+
{ role: "user", content: input.text }
|
|
228
|
+
]
|
|
229
|
+
});
|
|
230
|
+
update_progress(100, "Completed Ollama text rewriting");
|
|
231
|
+
return { text: response.message.content };
|
|
232
|
+
};
|
|
233
|
+
return run;
|
|
234
|
+
}
|
|
235
|
+
function createOllamaTextRewriterStream(getClient2) {
|
|
236
|
+
return async function* (input, model, signal) {
|
|
237
|
+
const client = await getClient2(model);
|
|
238
|
+
const modelName = getOllamaModelName(model);
|
|
239
|
+
const stream = await client.chat({
|
|
240
|
+
model: modelName,
|
|
241
|
+
messages: [
|
|
242
|
+
{ role: "system", content: input.prompt },
|
|
243
|
+
{ role: "user", content: input.text }
|
|
244
|
+
],
|
|
245
|
+
stream: true
|
|
246
|
+
});
|
|
247
|
+
const onAbort = () => stream.abort();
|
|
248
|
+
signal.addEventListener("abort", onAbort, { once: true });
|
|
249
|
+
try {
|
|
250
|
+
for await (const chunk of stream) {
|
|
251
|
+
const delta = chunk.message.content;
|
|
252
|
+
if (delta) {
|
|
253
|
+
yield { type: "text-delta", port: "text", textDelta: delta };
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
yield { type: "finish", data: {} };
|
|
257
|
+
} finally {
|
|
258
|
+
signal.removeEventListener("abort", onAbort);
|
|
259
|
+
}
|
|
260
|
+
};
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// src/provider-ollama/common/Ollama_TextSummary.ts
|
|
264
|
+
import { getLogger as getLogger3 } from "@workglow/util/worker";
|
|
265
|
+
function createOllamaTextSummary(getClient2) {
|
|
266
|
+
const run = async (input, model, update_progress, _signal) => {
|
|
267
|
+
if (Array.isArray(input.text)) {
|
|
268
|
+
getLogger3().warn("Ollama_TextSummary: array input received; processing sequentially (no native batch support)");
|
|
269
|
+
const texts = input.text;
|
|
270
|
+
const results = [];
|
|
271
|
+
for (const item of texts) {
|
|
272
|
+
const r = await run({ ...input, text: item }, model, update_progress, _signal);
|
|
273
|
+
results.push(r.text);
|
|
274
|
+
}
|
|
275
|
+
return { text: results };
|
|
276
|
+
}
|
|
277
|
+
update_progress(0, "Starting Ollama text summarization");
|
|
278
|
+
const client = await getClient2(model);
|
|
279
|
+
const modelName = getOllamaModelName(model);
|
|
280
|
+
const response = await client.chat({
|
|
281
|
+
model: modelName,
|
|
282
|
+
messages: [
|
|
283
|
+
{ role: "system", content: "Summarize the following text concisely." },
|
|
284
|
+
{ role: "user", content: input.text }
|
|
285
|
+
]
|
|
286
|
+
});
|
|
287
|
+
update_progress(100, "Completed Ollama text summarization");
|
|
288
|
+
return { text: response.message.content };
|
|
289
|
+
};
|
|
290
|
+
return run;
|
|
291
|
+
}
|
|
292
|
+
function createOllamaTextSummaryStream(getClient2) {
|
|
293
|
+
return async function* (input, model, signal) {
|
|
294
|
+
const client = await getClient2(model);
|
|
295
|
+
const modelName = getOllamaModelName(model);
|
|
296
|
+
const stream = await client.chat({
|
|
297
|
+
model: modelName,
|
|
298
|
+
messages: [
|
|
299
|
+
{ role: "system", content: "Summarize the following text concisely." },
|
|
300
|
+
{ role: "user", content: input.text }
|
|
301
|
+
],
|
|
302
|
+
stream: true
|
|
303
|
+
});
|
|
304
|
+
const onAbort = () => stream.abort();
|
|
305
|
+
signal.addEventListener("abort", onAbort, { once: true });
|
|
306
|
+
try {
|
|
307
|
+
for await (const chunk of stream) {
|
|
308
|
+
const delta = chunk.message.content;
|
|
309
|
+
if (delta) {
|
|
310
|
+
yield { type: "text-delta", port: "text", textDelta: delta };
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
yield { type: "finish", data: {} };
|
|
314
|
+
} finally {
|
|
315
|
+
signal.removeEventListener("abort", onAbort);
|
|
316
|
+
}
|
|
317
|
+
};
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
// src/provider-ollama/common/Ollama_ToolCalling.ts
|
|
321
|
+
import { buildToolDescription, filterValidToolCalls } from "@workglow/ai/worker";
|
|
322
|
+
import { getLogger as getLogger4, parsePartialJson } from "@workglow/util/worker";
|
|
323
|
+
function mapOllamaTools(tools) {
|
|
324
|
+
return tools.map((t) => ({
|
|
325
|
+
type: "function",
|
|
326
|
+
function: {
|
|
327
|
+
name: t.name,
|
|
328
|
+
description: buildToolDescription(t),
|
|
329
|
+
parameters: t.inputSchema
|
|
330
|
+
}
|
|
331
|
+
}));
|
|
332
|
+
}
|
|
333
|
+
function createOllamaToolCalling(getClient2, buildMessages) {
|
|
334
|
+
const run = async (input, model, update_progress, _signal) => {
|
|
335
|
+
if (Array.isArray(input.prompt)) {
|
|
336
|
+
getLogger4().warn("Ollama_ToolCalling: array input received; processing sequentially (no native batch support)");
|
|
337
|
+
const prompts = input.prompt;
|
|
338
|
+
const texts = [];
|
|
339
|
+
const toolCallsList = [];
|
|
340
|
+
for (const item of prompts) {
|
|
341
|
+
const r = await run({ ...input, prompt: item }, model, update_progress, _signal);
|
|
342
|
+
texts.push(r.text);
|
|
343
|
+
toolCallsList.push(r.toolCalls);
|
|
344
|
+
}
|
|
345
|
+
return { text: texts, toolCalls: toolCallsList };
|
|
346
|
+
}
|
|
347
|
+
update_progress(0, "Starting Ollama tool calling");
|
|
348
|
+
const client = await getClient2(model);
|
|
349
|
+
const modelName = getOllamaModelName(model);
|
|
350
|
+
const messages = buildMessages(input);
|
|
351
|
+
const tools = input.toolChoice === "none" ? undefined : mapOllamaTools(input.tools);
|
|
352
|
+
const response = await client.chat({
|
|
353
|
+
model: modelName,
|
|
354
|
+
messages,
|
|
355
|
+
tools,
|
|
356
|
+
options: {
|
|
357
|
+
temperature: input.temperature,
|
|
358
|
+
num_predict: input.maxTokens
|
|
359
|
+
}
|
|
360
|
+
});
|
|
361
|
+
const text = response.message.content ?? "";
|
|
362
|
+
const toolCalls = [];
|
|
363
|
+
(response.message.tool_calls ?? []).forEach((tc, index) => {
|
|
364
|
+
let parsedInput = {};
|
|
365
|
+
const fnArgs = tc.function.arguments;
|
|
366
|
+
if (typeof fnArgs === "string") {
|
|
367
|
+
try {
|
|
368
|
+
parsedInput = JSON.parse(fnArgs);
|
|
369
|
+
} catch {
|
|
370
|
+
const partial = parsePartialJson(fnArgs);
|
|
371
|
+
parsedInput = partial ?? {};
|
|
372
|
+
}
|
|
373
|
+
} else if (fnArgs != null) {
|
|
374
|
+
parsedInput = fnArgs;
|
|
375
|
+
}
|
|
376
|
+
const id = `call_${index}`;
|
|
377
|
+
toolCalls.push({ id, name: tc.function.name, input: parsedInput });
|
|
378
|
+
});
|
|
379
|
+
update_progress(100, "Completed Ollama tool calling");
|
|
380
|
+
return { text, toolCalls: filterValidToolCalls(toolCalls, input.tools) };
|
|
381
|
+
};
|
|
382
|
+
return run;
|
|
383
|
+
}
|
|
384
|
+
function createOllamaToolCallingStream(getClient2, buildMessages) {
|
|
385
|
+
return async function* (input, model, signal) {
|
|
386
|
+
const client = await getClient2(model);
|
|
387
|
+
const modelName = getOllamaModelName(model);
|
|
388
|
+
const messages = buildMessages(input);
|
|
389
|
+
const tools = input.toolChoice === "none" ? undefined : mapOllamaTools(input.tools);
|
|
390
|
+
const stream = await client.chat({
|
|
391
|
+
model: modelName,
|
|
392
|
+
messages,
|
|
393
|
+
tools,
|
|
394
|
+
options: {
|
|
395
|
+
temperature: input.temperature,
|
|
396
|
+
num_predict: input.maxTokens
|
|
397
|
+
},
|
|
398
|
+
stream: true
|
|
399
|
+
});
|
|
400
|
+
const onAbort = () => stream.abort();
|
|
401
|
+
signal.addEventListener("abort", onAbort, { once: true });
|
|
402
|
+
let accumulatedText = "";
|
|
403
|
+
const toolCalls = [];
|
|
404
|
+
let callIndex = 0;
|
|
405
|
+
try {
|
|
406
|
+
for await (const chunk of stream) {
|
|
407
|
+
const delta = chunk.message.content;
|
|
408
|
+
if (delta) {
|
|
409
|
+
accumulatedText += delta;
|
|
410
|
+
yield { type: "text-delta", port: "text", textDelta: delta };
|
|
411
|
+
}
|
|
412
|
+
const chunkToolCalls = chunk.message.tool_calls;
|
|
413
|
+
if (Array.isArray(chunkToolCalls) && chunkToolCalls.length > 0) {
|
|
414
|
+
for (const tc of chunkToolCalls) {
|
|
415
|
+
let parsedInput = {};
|
|
416
|
+
const fnArgs = tc.function.arguments;
|
|
417
|
+
if (typeof fnArgs === "string") {
|
|
418
|
+
try {
|
|
419
|
+
parsedInput = JSON.parse(fnArgs);
|
|
420
|
+
} catch {
|
|
421
|
+
const partial = parsePartialJson(fnArgs);
|
|
422
|
+
parsedInput = partial ?? {};
|
|
423
|
+
}
|
|
424
|
+
} else if (fnArgs != null) {
|
|
425
|
+
parsedInput = fnArgs;
|
|
426
|
+
}
|
|
427
|
+
const id = `call_${callIndex++}`;
|
|
428
|
+
toolCalls.push({ id, name: tc.function.name, input: parsedInput });
|
|
429
|
+
}
|
|
430
|
+
yield { type: "object-delta", port: "toolCalls", objectDelta: [...toolCalls] };
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
const validToolCalls = filterValidToolCalls(toolCalls, input.tools);
|
|
434
|
+
yield {
|
|
435
|
+
type: "finish",
|
|
436
|
+
data: { text: accumulatedText, toolCalls: validToolCalls }
|
|
437
|
+
};
|
|
438
|
+
} finally {
|
|
439
|
+
signal.removeEventListener("abort", onAbort);
|
|
440
|
+
}
|
|
441
|
+
};
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
// src/provider-ollama/common/Ollama_JobRunFns.ts
|
|
445
|
+
var Ollama_TextGeneration = createOllamaTextGeneration(getClient);
|
|
446
|
+
var Ollama_TextEmbedding = createOllamaTextEmbedding(getClient);
|
|
447
|
+
var Ollama_TextRewriter = createOllamaTextRewriter(getClient);
|
|
448
|
+
var Ollama_TextSummary = createOllamaTextSummary(getClient);
|
|
449
|
+
var Ollama_TextGeneration_Stream = createOllamaTextGenerationStream(getClient);
|
|
450
|
+
var Ollama_TextRewriter_Stream = createOllamaTextRewriterStream(getClient);
|
|
451
|
+
var Ollama_TextSummary_Stream = createOllamaTextSummaryStream(getClient);
|
|
452
|
+
var Ollama_ToolCalling = createOllamaToolCalling(getClient, toTextFlatMessages);
|
|
453
|
+
var Ollama_ToolCalling_Stream = createOllamaToolCallingStream(getClient, toTextFlatMessages);
|
|
454
|
+
var Ollama_ModelInfo = createOllamaModelInfo(getClient);
|
|
455
|
+
var Ollama_ModelSearch = createOllamaModelSearch(getClient);
|
|
456
|
+
var OLLAMA_TASKS = {
|
|
457
|
+
ModelInfoTask: Ollama_ModelInfo,
|
|
458
|
+
TextGenerationTask: Ollama_TextGeneration,
|
|
459
|
+
TextEmbeddingTask: Ollama_TextEmbedding,
|
|
460
|
+
TextRewriterTask: Ollama_TextRewriter,
|
|
461
|
+
TextSummaryTask: Ollama_TextSummary,
|
|
462
|
+
ToolCallingTask: Ollama_ToolCalling,
|
|
463
|
+
ModelSearchTask: Ollama_ModelSearch
|
|
464
|
+
};
|
|
465
|
+
var OLLAMA_STREAM_TASKS = {
|
|
466
|
+
TextGenerationTask: Ollama_TextGeneration_Stream,
|
|
467
|
+
TextRewriterTask: Ollama_TextRewriter_Stream,
|
|
468
|
+
TextSummaryTask: Ollama_TextSummary_Stream,
|
|
469
|
+
ToolCallingTask: Ollama_ToolCalling_Stream
|
|
470
|
+
};
|
|
471
|
+
|
|
472
|
+
// src/provider-ollama/OllamaQueuedProvider.ts
|
|
473
|
+
import {
|
|
474
|
+
QueuedAiProvider
|
|
475
|
+
} from "@workglow/ai";
|
|
476
|
+
class OllamaQueuedProvider extends QueuedAiProvider {
|
|
477
|
+
name = OLLAMA;
|
|
478
|
+
isLocal = true;
|
|
479
|
+
supportsBrowser = true;
|
|
480
|
+
taskTypes = [
|
|
481
|
+
"ModelInfoTask",
|
|
482
|
+
"TextGenerationTask",
|
|
483
|
+
"TextEmbeddingTask",
|
|
484
|
+
"TextRewriterTask",
|
|
485
|
+
"TextSummaryTask",
|
|
486
|
+
"ToolCallingTask",
|
|
487
|
+
"ModelSearchTask"
|
|
488
|
+
];
|
|
489
|
+
constructor(tasks, streamTasks, reactiveTasks) {
|
|
490
|
+
super(tasks, streamTasks, reactiveTasks);
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
// src/provider-ollama/registerOllamaInline.ts
|
|
495
|
+
async function registerOllamaInline(options) {
|
|
496
|
+
await new OllamaQueuedProvider(OLLAMA_TASKS, OLLAMA_STREAM_TASKS).register(options ?? {});
|
|
497
|
+
}
|
|
498
|
+
// src/provider-ollama/registerOllamaWorker.ts
|
|
499
|
+
import { getLogger as getLogger5, globalServiceRegistry, WORKER_SERVER } from "@workglow/util/worker";
|
|
500
|
+
|
|
501
|
+
// src/provider-ollama/OllamaProvider.ts
|
|
502
|
+
import {
|
|
503
|
+
AiProvider
|
|
504
|
+
} from "@workglow/ai/worker";
|
|
505
|
+
class OllamaProvider extends AiProvider {
|
|
506
|
+
name = OLLAMA;
|
|
507
|
+
isLocal = true;
|
|
508
|
+
supportsBrowser = true;
|
|
509
|
+
taskTypes = [
|
|
510
|
+
"ModelInfoTask",
|
|
511
|
+
"TextGenerationTask",
|
|
512
|
+
"TextEmbeddingTask",
|
|
513
|
+
"TextRewriterTask",
|
|
514
|
+
"TextSummaryTask",
|
|
515
|
+
"ToolCallingTask",
|
|
516
|
+
"ModelSearchTask"
|
|
517
|
+
];
|
|
518
|
+
constructor(tasks, streamTasks, reactiveTasks) {
|
|
519
|
+
super(tasks, streamTasks, reactiveTasks);
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
|
|
523
|
+
// src/provider-ollama/registerOllamaWorker.ts
|
|
524
|
+
async function registerOllamaWorker() {
|
|
525
|
+
const workerServer = globalServiceRegistry.get(WORKER_SERVER);
|
|
526
|
+
new OllamaProvider(OLLAMA_TASKS, OLLAMA_STREAM_TASKS).registerOnWorkerServer(workerServer);
|
|
527
|
+
workerServer.sendReady();
|
|
528
|
+
getLogger5().info("Ollama worker job run functions registered");
|
|
529
|
+
}
|
|
530
|
+
export {
|
|
531
|
+
registerOllamaWorker,
|
|
532
|
+
registerOllamaInline,
|
|
533
|
+
loadOllamaSDK,
|
|
534
|
+
getModelName,
|
|
535
|
+
getClient
|
|
536
|
+
};
|
|
537
|
+
|
|
538
|
+
//# debugId=03F4CFAA5A39F7EC64756E2164756E21
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../src/provider-ollama/common/Ollama_Constants.ts", "../src/provider-ollama/common/Ollama_ModelUtil.ts", "../src/provider-ollama/common/Ollama_Client.ts", "../src/provider-ollama/common/Ollama_JobRunFns.ts", "../src/provider-ollama/common/Ollama_ModelInfo.ts", "../src/provider-ollama/common/Ollama_ModelSearch.ts", "../src/provider-ollama/common/Ollama_TextEmbedding.ts", "../src/provider-ollama/common/Ollama_TextGeneration.ts", "../src/provider-ollama/common/Ollama_TextRewriter.ts", "../src/provider-ollama/common/Ollama_TextSummary.ts", "../src/provider-ollama/common/Ollama_ToolCalling.ts", "../src/provider-ollama/OllamaQueuedProvider.ts", "../src/provider-ollama/registerOllamaInline.ts", "../src/provider-ollama/registerOllamaWorker.ts", "../src/provider-ollama/OllamaProvider.ts"],
|
|
4
|
+
"sourcesContent": [
|
|
5
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nexport const OLLAMA = \"OLLAMA\";\nexport const OLLAMA_DEFAULT_BASE_URL = \"http://localhost:11434\";\n",
|
|
6
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { OllamaModelConfig } from \"./Ollama_ModelSchema\";\n\nexport function getOllamaModelName(model: OllamaModelConfig | undefined): string {\n const name = model?.provider_config?.model_name;\n if (!name) {\n throw new Error(\"Missing model name in provider_config.model_name.\");\n }\n return name;\n}\n",
|
|
7
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { OLLAMA_DEFAULT_BASE_URL } from \"./Ollama_Constants\";\nimport type { OllamaModelConfig } from \"./Ollama_ModelSchema\";\nimport { getOllamaModelName } from \"./Ollama_ModelUtil\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nlet _OllamaClass: (new (config: { host: string }) => any) | undefined;\n\nexport async function loadOllamaSDK(): Promise<(new (config: { host: string }) => any) & {}> {\n if (!_OllamaClass) {\n try {\n const sdk = await import(\"ollama\");\n _OllamaClass = sdk.Ollama;\n } catch {\n throw new Error(\"ollama is required for Ollama tasks. Install it with: bun add ollama\");\n }\n }\n return _OllamaClass;\n}\n\nexport async function getClient(model: OllamaModelConfig | undefined) {\n const Ollama = await loadOllamaSDK();\n const host = model?.provider_config?.base_url || OLLAMA_DEFAULT_BASE_URL;\n return new Ollama({ host });\n}\n\nexport const getModelName = getOllamaModelName;\n",
|
|
8
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { toTextFlatMessages, type AiProviderRunFn, type AiProviderStreamFn } from \"@workglow/ai/worker\";\nimport type { OllamaModelConfig } from \"./Ollama_ModelSchema\";\nimport { getClient } from \"./Ollama_Client\";\nimport { createOllamaModelInfo } from \"./Ollama_ModelInfo\";\nimport { createOllamaModelSearch } from \"./Ollama_ModelSearch\";\nimport { createOllamaTextEmbedding } from \"./Ollama_TextEmbedding\";\nimport {\n createOllamaTextGeneration,\n createOllamaTextGenerationStream,\n} from \"./Ollama_TextGeneration\";\nimport { createOllamaTextRewriter, createOllamaTextRewriterStream } from \"./Ollama_TextRewriter\";\nimport { createOllamaTextSummary, createOllamaTextSummaryStream } from \"./Ollama_TextSummary\";\nimport { createOllamaToolCalling, createOllamaToolCallingStream } from \"./Ollama_ToolCalling\";\n\nexport const Ollama_TextGeneration = createOllamaTextGeneration(getClient);\nexport const Ollama_TextEmbedding = createOllamaTextEmbedding(getClient);\nexport const Ollama_TextRewriter = createOllamaTextRewriter(getClient);\nexport const Ollama_TextSummary = createOllamaTextSummary(getClient);\n\nexport const Ollama_TextGeneration_Stream = createOllamaTextGenerationStream(getClient);\nexport const Ollama_TextRewriter_Stream = createOllamaTextRewriterStream(getClient);\nexport const Ollama_TextSummary_Stream = createOllamaTextSummaryStream(getClient);\n\nexport const Ollama_ToolCalling = createOllamaToolCalling(getClient, toTextFlatMessages);\nexport const Ollama_ToolCalling_Stream = createOllamaToolCallingStream(\n getClient,\n toTextFlatMessages\n);\n\nexport const Ollama_ModelInfo = createOllamaModelInfo(getClient);\nexport const Ollama_ModelSearch = createOllamaModelSearch(getClient);\n\nexport const OLLAMA_TASKS: Record<string, AiProviderRunFn<any, any, OllamaModelConfig>> = {\n ModelInfoTask: Ollama_ModelInfo,\n TextGenerationTask: Ollama_TextGeneration,\n TextEmbeddingTask: Ollama_TextEmbedding,\n TextRewriterTask: Ollama_TextRewriter,\n TextSummaryTask: Ollama_TextSummary,\n ToolCallingTask: Ollama_ToolCalling,\n ModelSearchTask: Ollama_ModelSearch,\n};\n\nexport const OLLAMA_STREAM_TASKS: Record<\n string,\n AiProviderStreamFn<any, any, OllamaModelConfig>\n> = {\n TextGenerationTask: Ollama_TextGeneration_Stream,\n TextRewriterTask: Ollama_TextRewriter_Stream,\n TextSummaryTask: Ollama_TextSummary_Stream,\n ToolCallingTask: Ollama_ToolCalling_Stream,\n};\n",
|
|
9
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { AiProviderRunFn, ModelInfoTaskInput, ModelInfoTaskOutput } from \"@workglow/ai\";\nimport type { OllamaModelConfig } from \"./Ollama_ModelSchema\";\nimport { getOllamaModelName } from \"./Ollama_ModelUtil\";\n\ntype GetClient = (model: OllamaModelConfig | undefined) => Promise<any>;\n\nexport function createOllamaModelInfo(\n getClient: GetClient\n): AiProviderRunFn<ModelInfoTaskInput, ModelInfoTaskOutput, OllamaModelConfig> {\n return async (input, model) => {\n const client = await getClient(model);\n const modelName = getOllamaModelName(model);\n\n let is_cached = false;\n let is_loaded = false;\n let file_sizes: Record<string, number> | null = null;\n\n try {\n const showResponse = await client.show({ model: modelName });\n is_cached = true;\n const size = (showResponse as any).size as number | undefined;\n if (size != null) {\n file_sizes = { model: size };\n }\n } catch {\n // Model not available on server\n }\n\n try {\n const psResponse = await client.ps();\n is_loaded = psResponse.models.some((m: any) => m.name === modelName);\n } catch {\n // ps() not available or failed\n }\n\n return {\n model: input.model,\n is_local: true,\n is_remote: false,\n supports_browser: true,\n supports_node: true,\n is_cached,\n is_loaded,\n file_sizes,\n };\n };\n}\n",
|
|
10
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { AiProviderRunFn, ModelSearchTaskInput, ModelSearchTaskOutput } from \"@workglow/ai\";\nimport { OLLAMA } from \"./Ollama_Constants\";\nimport type { OllamaModelConfig } from \"./Ollama_ModelSchema\";\n\ntype GetClient = (model: OllamaModelConfig | undefined) => Promise<any>;\n\nexport function createOllamaModelSearch(\n getClient: GetClient\n): AiProviderRunFn<ModelSearchTaskInput, ModelSearchTaskOutput> {\n return async () => {\n try {\n const client = await getClient(undefined);\n const response = await client.list();\n const results = response.models.map((m: any) => ({\n id: m.name,\n label: `${m.name} ${m.details.parameter_size} ${m.details.quantization_level}`,\n description: `${m.details.parameter_size} ${m.details.quantization_level}`,\n record: {\n model_id: m.name,\n provider: OLLAMA,\n title: m.name,\n description: `${m.details.parameter_size} ${m.details.quantization_level}`,\n tasks: [],\n provider_config: { model_name: m.name },\n metadata: {},\n },\n raw: m,\n }));\n return { results };\n } catch {\n return { results: [] };\n }\n };\n}\n",
|
|
11
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n AiProviderRunFn,\n TextEmbeddingTaskInput,\n TextEmbeddingTaskOutput,\n} from \"@workglow/ai\";\nimport type { OllamaModelConfig } from \"./Ollama_ModelSchema\";\nimport { getOllamaModelName } from \"./Ollama_ModelUtil\";\n\ntype GetClient = (model: OllamaModelConfig | undefined) => Promise<any>;\n\nexport function createOllamaTextEmbedding(\n getClient: GetClient\n): AiProviderRunFn<TextEmbeddingTaskInput, TextEmbeddingTaskOutput, OllamaModelConfig> {\n return async (input, model, update_progress, _signal) => {\n update_progress(0, \"Starting Ollama text embedding\");\n const client = await getClient(model);\n const modelName = getOllamaModelName(model);\n\n const texts = Array.isArray(input.text) ? input.text : [input.text];\n\n const response = await client.embed({\n model: modelName,\n input: texts,\n });\n\n update_progress(100, \"Completed Ollama text embedding\");\n\n if (Array.isArray(input.text)) {\n return {\n vector: response.embeddings.map((e: number[]) => new Float32Array(e)),\n };\n }\n return { vector: new Float32Array(response.embeddings[0]) };\n };\n}\n",
|
|
12
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n AiProviderRunFn,\n AiProviderStreamFn,\n TextGenerationTaskInput,\n TextGenerationTaskOutput,\n} from \"@workglow/ai\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\nimport { getLogger } from \"@workglow/util/worker\";\nimport type { OllamaModelConfig } from \"./Ollama_ModelSchema\";\nimport { getOllamaModelName } from \"./Ollama_ModelUtil\";\n\ntype GetClient = (model: OllamaModelConfig | undefined) => Promise<any>;\n\nexport function createOllamaTextGeneration(\n getClient: GetClient\n): AiProviderRunFn<TextGenerationTaskInput, TextGenerationTaskOutput, OllamaModelConfig> {\n const run: AiProviderRunFn<\n TextGenerationTaskInput,\n TextGenerationTaskOutput,\n OllamaModelConfig\n > = async (input, model, update_progress, _signal) => {\n if (Array.isArray(input.prompt)) {\n getLogger().warn(\n \"Ollama_TextGeneration: array input received; processing sequentially (no native batch support)\"\n );\n const prompts = input.prompt as string[];\n const results: string[] = [];\n for (const item of prompts) {\n const r = await run({ ...input, prompt: item }, model, update_progress, _signal);\n results.push(r.text as string);\n }\n return { text: results };\n }\n\n update_progress(0, \"Starting Ollama text generation\");\n const client = await getClient(model);\n const modelName = getOllamaModelName(model);\n\n const response = await client.chat({\n model: modelName,\n messages: [{ role: \"user\", content: input.prompt as string }],\n options: {\n temperature: input.temperature,\n top_p: input.topP,\n num_predict: input.maxTokens,\n frequency_penalty: input.frequencyPenalty,\n presence_penalty: input.presencePenalty,\n },\n });\n\n update_progress(100, \"Completed Ollama text generation\");\n return { text: response.message.content };\n };\n return run;\n}\n\nexport function createOllamaTextGenerationStream(\n getClient: GetClient\n): AiProviderStreamFn<TextGenerationTaskInput, TextGenerationTaskOutput, OllamaModelConfig> {\n return async function* (\n input,\n model,\n signal\n ): AsyncIterable<StreamEvent<TextGenerationTaskOutput>> {\n const client = await getClient(model);\n const modelName = getOllamaModelName(model);\n\n const stream = await client.chat({\n model: modelName,\n messages: [{ role: \"user\", content: input.prompt as string }],\n options: {\n temperature: input.temperature,\n top_p: input.topP,\n num_predict: input.maxTokens,\n frequency_penalty: input.frequencyPenalty,\n presence_penalty: input.presencePenalty,\n },\n stream: true,\n });\n\n const onAbort = () => stream.abort();\n signal.addEventListener(\"abort\", onAbort, { once: true });\n try {\n for await (const chunk of stream) {\n const delta = chunk.message.content;\n if (delta) {\n yield { type: \"text-delta\", port: \"text\", textDelta: delta };\n }\n }\n yield { type: \"finish\", data: {} as TextGenerationTaskOutput };\n } finally {\n signal.removeEventListener(\"abort\", onAbort);\n }\n };\n}\n",
|
|
13
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n AiProviderRunFn,\n AiProviderStreamFn,\n TextRewriterTaskInput,\n TextRewriterTaskOutput,\n} from \"@workglow/ai\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\nimport { getLogger } from \"@workglow/util/worker\";\nimport type { OllamaModelConfig } from \"./Ollama_ModelSchema\";\nimport { getOllamaModelName } from \"./Ollama_ModelUtil\";\n\ntype GetClient = (model: OllamaModelConfig | undefined) => Promise<any>;\n\nexport function createOllamaTextRewriter(\n getClient: GetClient\n): AiProviderRunFn<TextRewriterTaskInput, TextRewriterTaskOutput, OllamaModelConfig> {\n const run: AiProviderRunFn<\n TextRewriterTaskInput,\n TextRewriterTaskOutput,\n OllamaModelConfig\n > = async (input, model, update_progress, _signal) => {\n if (Array.isArray(input.text)) {\n getLogger().warn(\n \"Ollama_TextRewriter: array input received; processing sequentially (no native batch support)\"\n );\n const texts = input.text as string[];\n const results: string[] = [];\n for (const item of texts) {\n const r = await run({ ...input, text: item }, model, update_progress, _signal);\n results.push(r.text as string);\n }\n return { text: results };\n }\n\n update_progress(0, \"Starting Ollama text rewriting\");\n const client = await getClient(model);\n const modelName = getOllamaModelName(model);\n\n const response = await client.chat({\n model: modelName,\n messages: [\n { role: \"system\", content: input.prompt as string },\n { role: \"user\", content: input.text as string },\n ],\n });\n\n update_progress(100, \"Completed Ollama text rewriting\");\n return { text: response.message.content };\n };\n return run;\n}\n\nexport function createOllamaTextRewriterStream(\n getClient: GetClient\n): AiProviderStreamFn<TextRewriterTaskInput, TextRewriterTaskOutput, OllamaModelConfig> {\n return async function* (\n input,\n model,\n signal\n ): AsyncIterable<StreamEvent<TextRewriterTaskOutput>> {\n const client = await getClient(model);\n const modelName = getOllamaModelName(model);\n\n const stream = await client.chat({\n model: modelName,\n messages: [\n { role: \"system\", content: input.prompt as string },\n { role: \"user\", content: input.text as string },\n ],\n stream: true,\n });\n\n const onAbort = () => stream.abort();\n signal.addEventListener(\"abort\", onAbort, { once: true });\n try {\n for await (const chunk of stream) {\n const delta = chunk.message.content;\n if (delta) {\n yield { type: \"text-delta\", port: \"text\", textDelta: delta };\n }\n }\n yield { type: \"finish\", data: {} as TextRewriterTaskOutput };\n } finally {\n signal.removeEventListener(\"abort\", onAbort);\n }\n };\n}\n",
|
|
14
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type {\n AiProviderRunFn,\n AiProviderStreamFn,\n TextSummaryTaskInput,\n TextSummaryTaskOutput,\n} from \"@workglow/ai\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\nimport { getLogger } from \"@workglow/util/worker\";\nimport type { OllamaModelConfig } from \"./Ollama_ModelSchema\";\nimport { getOllamaModelName } from \"./Ollama_ModelUtil\";\n\ntype GetClient = (model: OllamaModelConfig | undefined) => Promise<any>;\n\nexport function createOllamaTextSummary(\n getClient: GetClient\n): AiProviderRunFn<TextSummaryTaskInput, TextSummaryTaskOutput, OllamaModelConfig> {\n const run: AiProviderRunFn<\n TextSummaryTaskInput,\n TextSummaryTaskOutput,\n OllamaModelConfig\n > = async (input, model, update_progress, _signal) => {\n if (Array.isArray(input.text)) {\n getLogger().warn(\n \"Ollama_TextSummary: array input received; processing sequentially (no native batch support)\"\n );\n const texts = input.text as string[];\n const results: string[] = [];\n for (const item of texts) {\n const r = await run({ ...input, text: item }, model, update_progress, _signal);\n results.push(r.text as string);\n }\n return { text: results };\n }\n\n update_progress(0, \"Starting Ollama text summarization\");\n const client = await getClient(model);\n const modelName = getOllamaModelName(model);\n\n const response = await client.chat({\n model: modelName,\n messages: [\n { role: \"system\", content: \"Summarize the following text concisely.\" },\n { role: \"user\", content: input.text as string },\n ],\n });\n\n update_progress(100, \"Completed Ollama text summarization\");\n return { text: response.message.content };\n };\n return run;\n}\n\nexport function createOllamaTextSummaryStream(\n getClient: GetClient\n): AiProviderStreamFn<TextSummaryTaskInput, TextSummaryTaskOutput, OllamaModelConfig> {\n return async function* (input, model, signal): AsyncIterable<StreamEvent<TextSummaryTaskOutput>> {\n const client = await getClient(model);\n const modelName = getOllamaModelName(model);\n\n const stream = await client.chat({\n model: modelName,\n messages: [\n { role: \"system\", content: \"Summarize the following text concisely.\" },\n { role: \"user\", content: input.text as string },\n ],\n stream: true,\n });\n\n const onAbort = () => stream.abort();\n signal.addEventListener(\"abort\", onAbort, { once: true });\n try {\n for await (const chunk of stream) {\n const delta = chunk.message.content;\n if (delta) {\n yield { type: \"text-delta\", port: \"text\", textDelta: delta };\n }\n }\n yield { type: \"finish\", data: {} as TextSummaryTaskOutput };\n } finally {\n signal.removeEventListener(\"abort\", onAbort);\n }\n };\n}\n",
|
|
15
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { buildToolDescription, filterValidToolCalls } from \"@workglow/ai/worker\";\nimport type {\n AiProviderRunFn,\n AiProviderStreamFn,\n ToolCallingTaskInput,\n ToolCallingTaskOutput,\n ToolCalls,\n ToolDefinition,\n} from \"@workglow/ai\";\nimport type { StreamEvent } from \"@workglow/task-graph\";\nimport { getLogger, parsePartialJson } from \"@workglow/util/worker\";\nimport type { OllamaModelConfig } from \"./Ollama_ModelSchema\";\nimport { getOllamaModelName } from \"./Ollama_ModelUtil\";\n\ntype GetClient = (model: OllamaModelConfig | undefined) => Promise<any>;\n\nexport type OllamaToolCallingMessagesFn = (\n input: ToolCallingTaskInput\n) => Array<{ role: string; content: string }>;\n\nfunction mapOllamaTools(tools: ReadonlyArray<ToolDefinition>) {\n return tools.map((t) => ({\n type: \"function\" as const,\n function: {\n name: t.name,\n description: buildToolDescription(t),\n parameters: t.inputSchema as any,\n },\n }));\n}\n\nexport function createOllamaToolCalling(\n getClient: GetClient,\n buildMessages: OllamaToolCallingMessagesFn\n): AiProviderRunFn<ToolCallingTaskInput, ToolCallingTaskOutput, OllamaModelConfig> {\n const run: AiProviderRunFn<\n ToolCallingTaskInput,\n ToolCallingTaskOutput,\n OllamaModelConfig\n > = async (input, model, update_progress, _signal) => {\n if (Array.isArray(input.prompt)) {\n getLogger().warn(\n \"Ollama_ToolCalling: array input received; processing sequentially (no native batch support)\"\n );\n const prompts = input.prompt as string[];\n const texts: string[] = [];\n const toolCallsList: ToolCalls[] = [];\n for (const item of prompts) {\n const r = await run({ ...input, prompt: item }, model, update_progress, _signal);\n texts.push(r.text as string);\n toolCallsList.push(r.toolCalls as ToolCalls);\n }\n return { text: texts, toolCalls: toolCallsList } as unknown as ToolCallingTaskOutput;\n }\n\n update_progress(0, \"Starting Ollama tool calling\");\n const client = await getClient(model);\n const modelName = getOllamaModelName(model);\n\n const messages = buildMessages(input);\n\n const tools = input.toolChoice === \"none\" ? undefined : mapOllamaTools(input.tools);\n\n const response = await client.chat({\n model: modelName,\n messages,\n tools,\n options: {\n temperature: input.temperature,\n num_predict: input.maxTokens,\n },\n });\n\n const text = response.message.content ?? \"\";\n const toolCalls: ToolCalls = [];\n (response.message.tool_calls ?? []).forEach((tc: any, index: number) => {\n let parsedInput: Record<string, unknown> = {};\n const fnArgs = tc.function.arguments;\n if (typeof fnArgs === \"string\") {\n try {\n parsedInput = JSON.parse(fnArgs);\n } catch {\n const partial = parsePartialJson(fnArgs);\n parsedInput = (partial as Record<string, unknown>) ?? {};\n }\n } else if (fnArgs != null) {\n parsedInput = fnArgs as Record<string, unknown>;\n }\n const id = `call_${index}`;\n toolCalls.push({ id, name: tc.function.name as string, input: parsedInput });\n });\n\n update_progress(100, \"Completed Ollama tool calling\");\n return { text, toolCalls: filterValidToolCalls(toolCalls, input.tools) };\n };\n return run;\n}\n\nexport function createOllamaToolCallingStream(\n getClient: GetClient,\n buildMessages: OllamaToolCallingMessagesFn\n): AiProviderStreamFn<ToolCallingTaskInput, ToolCallingTaskOutput, OllamaModelConfig> {\n return async function* (input, model, signal): AsyncIterable<StreamEvent<ToolCallingTaskOutput>> {\n const client = await getClient(model);\n const modelName = getOllamaModelName(model);\n\n const messages = buildMessages(input);\n\n const tools = input.toolChoice === \"none\" ? undefined : mapOllamaTools(input.tools);\n\n const stream = await client.chat({\n model: modelName,\n messages,\n tools,\n options: {\n temperature: input.temperature,\n num_predict: input.maxTokens,\n },\n stream: true,\n });\n\n const onAbort = () => stream.abort();\n signal.addEventListener(\"abort\", onAbort, { once: true });\n\n let accumulatedText = \"\";\n const toolCalls: ToolCalls = [];\n let callIndex = 0;\n\n try {\n for await (const chunk of stream) {\n const delta = chunk.message.content;\n if (delta) {\n accumulatedText += delta;\n yield { type: \"text-delta\", port: \"text\", textDelta: delta };\n }\n\n const chunkToolCalls = (chunk.message as any).tool_calls;\n if (Array.isArray(chunkToolCalls) && chunkToolCalls.length > 0) {\n for (const tc of chunkToolCalls) {\n let parsedInput: Record<string, unknown> = {};\n const fnArgs = tc.function.arguments;\n if (typeof fnArgs === \"string\") {\n try {\n parsedInput = JSON.parse(fnArgs);\n } catch {\n const partial = parsePartialJson(fnArgs);\n parsedInput = (partial as Record<string, unknown>) ?? {};\n }\n } else if (fnArgs != null) {\n parsedInput = fnArgs as Record<string, unknown>;\n }\n const id = `call_${callIndex++}`;\n toolCalls.push({ id, name: tc.function.name as string, input: parsedInput });\n }\n yield { type: \"object-delta\", port: \"toolCalls\", objectDelta: [...toolCalls] };\n }\n }\n\n const validToolCalls = filterValidToolCalls(toolCalls, input.tools);\n yield {\n type: \"finish\",\n data: { text: accumulatedText, toolCalls: validToolCalls } as ToolCallingTaskOutput,\n };\n } finally {\n signal.removeEventListener(\"abort\", onAbort);\n }\n };\n}\n",
|
|
16
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n QueuedAiProvider,\n type AiProviderReactiveRunFn,\n type AiProviderRunFn,\n type AiProviderStreamFn,\n} from \"@workglow/ai\";\nimport { OLLAMA } from \"./common/Ollama_Constants\";\nimport type { OllamaModelConfig } from \"./common/Ollama_ModelSchema\";\n\n/** Main-thread registration (inline or worker-backed); creates the default job queue. */\nexport class OllamaQueuedProvider extends QueuedAiProvider<OllamaModelConfig> {\n readonly name = OLLAMA;\n readonly isLocal = true;\n readonly supportsBrowser = true;\n\n readonly taskTypes = [\n \"ModelInfoTask\",\n \"TextGenerationTask\",\n \"TextEmbeddingTask\",\n \"TextRewriterTask\",\n \"TextSummaryTask\",\n \"ToolCallingTask\",\n \"ModelSearchTask\",\n ] as const;\n\n constructor(\n tasks?: Record<string, AiProviderRunFn<any, any, OllamaModelConfig>>,\n streamTasks?: Record<string, AiProviderStreamFn<any, any, OllamaModelConfig>>,\n reactiveTasks?: Record<string, AiProviderReactiveRunFn<any, any, OllamaModelConfig>>\n ) {\n super(tasks, streamTasks, reactiveTasks);\n }\n}\n",
|
|
17
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport type { AiProviderRegisterOptions } from \"@workglow/ai\";\nimport { OLLAMA_STREAM_TASKS, OLLAMA_TASKS } from \"./common/Ollama_JobRunFns\";\nimport { OllamaQueuedProvider } from \"./OllamaQueuedProvider\";\n\nexport async function registerOllamaInline(options?: AiProviderRegisterOptions): Promise<void> {\n await new OllamaQueuedProvider(OLLAMA_TASKS, OLLAMA_STREAM_TASKS).register(options ?? {});\n}\n",
|
|
18
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport { getLogger, globalServiceRegistry, WORKER_SERVER } from \"@workglow/util/worker\";\nimport { OLLAMA_STREAM_TASKS, OLLAMA_TASKS } from \"./common/Ollama_JobRunFns\";\nimport { OllamaProvider } from \"./OllamaProvider\";\n\nexport async function registerOllamaWorker(): Promise<void> {\n const workerServer = globalServiceRegistry.get(WORKER_SERVER);\n new OllamaProvider(OLLAMA_TASKS, OLLAMA_STREAM_TASKS).registerOnWorkerServer(workerServer);\n workerServer.sendReady();\n getLogger().info(\"Ollama worker job run functions registered\");\n}\n",
|
|
19
|
+
"/**\n * @license\n * Copyright 2025 Steven Roussey <sroussey@gmail.com>\n * SPDX-License-Identifier: Apache-2.0\n */\n\nimport {\n AiProvider,\n type AiProviderReactiveRunFn,\n type AiProviderRunFn,\n type AiProviderStreamFn,\n} from \"@workglow/ai/worker\";\nimport { OLLAMA } from \"./common/Ollama_Constants\";\nimport type { OllamaModelConfig } from \"./common/Ollama_ModelSchema\";\n\n/**\n * AI provider for Ollama local LLM server.\n *\n * Supports text generation, text embedding, text rewriting, and text summarization\n * via the Ollama API using the `ollama` SDK.\n *\n * Ollama runs locally and does not require an API key -- only a `base_url`\n * (defaults to `http://localhost:11434`).\n *\n * Task run functions are injected via the constructor so that the `ollama` SDK\n * is only imported where actually needed (inline mode, worker server), not on\n * the main thread in worker mode.\n */\nexport class OllamaProvider extends AiProvider<OllamaModelConfig> {\n readonly name = OLLAMA;\n readonly isLocal = true;\n readonly supportsBrowser = true;\n\n readonly taskTypes = [\n \"ModelInfoTask\",\n \"TextGenerationTask\",\n \"TextEmbeddingTask\",\n \"TextRewriterTask\",\n \"TextSummaryTask\",\n \"ToolCallingTask\",\n \"ModelSearchTask\",\n ] as const;\n\n constructor(\n tasks?: Record<string, AiProviderRunFn<any, any, OllamaModelConfig>>,\n streamTasks?: Record<string, AiProviderStreamFn<any, any, OllamaModelConfig>>,\n reactiveTasks?: Record<string, AiProviderReactiveRunFn<any, any, OllamaModelConfig>>\n ) {\n super(tasks, streamTasks, reactiveTasks);\n }\n}\n"
|
|
20
|
+
],
|
|
21
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAMO,IAAM,SAAS;AACf,IAAM,0BAA0B;;;ACChC,SAAS,kBAAkB,CAAC,OAA8C;AAAA,EAC/E,MAAM,OAAO,OAAO,iBAAiB;AAAA,EACrC,IAAI,CAAC,MAAM;AAAA,IACT,MAAM,IAAI,MAAM,mDAAmD;AAAA,EACrE;AAAA,EACA,OAAO;AAAA;;;ACFT,IAAI;AAEJ,eAAsB,aAAa,GAA0D;AAAA,EAC3F,IAAI,CAAC,cAAc;AAAA,IACjB,IAAI;AAAA,MACF,MAAM,MAAM,MAAa;AAAA,MACzB,eAAe,IAAI;AAAA,MACnB,MAAM;AAAA,MACN,MAAM,IAAI,MAAM,sEAAsE;AAAA;AAAA,EAE1F;AAAA,EACA,OAAO;AAAA;AAGT,eAAsB,SAAS,CAAC,OAAsC;AAAA,EACpE,MAAM,SAAS,MAAM,cAAc;AAAA,EACnC,MAAM,OAAO,OAAO,iBAAiB,YAAY;AAAA,EACjD,OAAO,IAAI,OAAO,EAAE,KAAK,CAAC;AAAA;AAGrB,IAAM,eAAe;;ACzB5B;;;ACMO,SAAS,qBAAqB,CACnC,YAC6E;AAAA,EAC7E,OAAO,OAAO,OAAO,UAAU;AAAA,IAC7B,MAAM,SAAS,MAAM,WAAU,KAAK;AAAA,IACpC,MAAM,YAAY,mBAAmB,KAAK;AAAA,IAE1C,IAAI,YAAY;AAAA,IAChB,IAAI,YAAY;AAAA,IAChB,IAAI,aAA4C;AAAA,IAEhD,IAAI;AAAA,MACF,MAAM,eAAe,MAAM,OAAO,KAAK,EAAE,OAAO,UAAU,CAAC;AAAA,MAC3D,YAAY;AAAA,MACZ,MAAM,OAAQ,aAAqB;AAAA,MACnC,IAAI,QAAQ,MAAM;AAAA,QAChB,aAAa,EAAE,OAAO,KAAK;AAAA,MAC7B;AAAA,MACA,MAAM;AAAA,IAIR,IAAI;AAAA,MACF,MAAM,aAAa,MAAM,OAAO,GAAG;AAAA,MACnC,YAAY,WAAW,OAAO,KAAK,CAAC,MAAW,EAAE,SAAS,SAAS;AAAA,MACnE,MAAM;AAAA,IAIR,OAAO;AAAA,MACL,OAAO,MAAM;AAAA,MACb,UAAU;AAAA,MACV,WAAW;AAAA,MACX,kBAAkB;AAAA,MAClB,eAAe;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA;AAAA;;;ACtCG,SAAS,uBAAuB,CACrC,YAC8D;AAAA,EAC9D,OAAO,YAAY;AAAA,IACjB,IAAI;AAAA,MACF,MAAM,SAAS,MAAM,WAAU,SAAS;AAAA,MACxC,MAAM,WAAW,MAAM,OAAO,KAAK;AAAA,MACnC,MAAM,UAAU,SAAS,OAAO,IAAI,CAAC,OAAY;AAAA,QAC/C,IAAI,EAAE;AAAA,QACN,OAAO,GAAG,EAAE,SAAS,EAAE,QAAQ,mBAAmB,EAAE,QAAQ;AAAA,QAC5D,aAAa,GAAG,EAAE,QAAQ,mBAAmB,EAAE,QAAQ;AAAA,QACvD,QAAQ;AAAA,UACN,UAAU,EAAE;AAAA,UACZ,UAAU;AAAA,UACV,OAAO,EAAE;AAAA,UACT,aAAa,GAAG,EAAE,QAAQ,mBAAmB,EAAE,QAAQ;AAAA,UACvD,OAAO,CAAC;AAAA,UACR,iBAAiB,EAAE,YAAY,EAAE,KAAK;AAAA,UACtC,UAAU,CAAC;AAAA,QACb;AAAA,QACA,KAAK;AAAA,MACP,EAAE;AAAA,MACF,OAAO,EAAE,QAAQ;AAAA,MACjB,MAAM;AAAA,MACN,OAAO,EAAE,SAAS,CAAC,EAAE;AAAA;AAAA;AAAA;;;ACpBpB,SAAS,yBAAyB,CACvC,YACqF;AAAA,EACrF,OAAO,OAAO,OAAO,OAAO,iBAAiB,YAAY;AAAA,IACvD,gBAAgB,GAAG,gCAAgC;AAAA,IACnD,MAAM,SAAS,MAAM,WAAU,KAAK;AAAA,IACpC,MAAM,YAAY,mBAAmB,KAAK;AAAA,IAE1C,MAAM,QAAQ,MAAM,QAAQ,MAAM,IAAI,IAAI,MAAM,OAAO,CAAC,MAAM,IAAI;AAAA,IAElE,MAAM,WAAW,MAAM,OAAO,MAAM;AAAA,MAClC,OAAO;AAAA,MACP,OAAO;AAAA,IACT,CAAC;AAAA,IAED,gBAAgB,KAAK,iCAAiC;AAAA,IAEtD,IAAI,MAAM,QAAQ,MAAM,IAAI,GAAG;AAAA,MAC7B,OAAO;AAAA,QACL,QAAQ,SAAS,WAAW,IAAI,CAAC,MAAgB,IAAI,aAAa,CAAC,CAAC;AAAA,MACtE;AAAA,IACF;AAAA,IACA,OAAO,EAAE,QAAQ,IAAI,aAAa,SAAS,WAAW,EAAE,EAAE;AAAA;AAAA;;;ACzB9D;AAMO,SAAS,0BAA0B,CACxC,YACuF;AAAA,EACvF,MAAM,MAIF,OAAO,OAAO,OAAO,iBAAiB,YAAY;AAAA,IACpD,IAAI,MAAM,QAAQ,MAAM,MAAM,GAAG;AAAA,MAC/B,UAAU,EAAE,KACV,gGACF;AAAA,MACA,MAAM,UAAU,MAAM;AAAA,MACtB,MAAM,UAAoB,CAAC;AAAA,MAC3B,WAAW,QAAQ,SAAS;AAAA,QAC1B,MAAM,IAAI,MAAM,IAAI,KAAK,OAAO,QAAQ,KAAK,GAAG,OAAO,iBAAiB,OAAO;AAAA,QAC/E,QAAQ,KAAK,EAAE,IAAc;AAAA,MAC/B;AAAA,MACA,OAAO,EAAE,MAAM,QAAQ;AAAA,IACzB;AAAA,IAEA,gBAAgB,GAAG,iCAAiC;AAAA,IACpD,MAAM,SAAS,MAAM,WAAU,KAAK;AAAA,IACpC,MAAM,YAAY,mBAAmB,KAAK;AAAA,IAE1C,MAAM,WAAW,MAAM,OAAO,KAAK;AAAA,MACjC,OAAO;AAAA,MACP,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,MAAM,OAAiB,CAAC;AAAA,MAC5D,SAAS;AAAA,QACP,aAAa,MAAM;AAAA,QACnB,OAAO,MAAM;AAAA,QACb,aAAa,MAAM;AAAA,QACnB,mBAAmB,MAAM;AAAA,QACzB,kBAAkB,MAAM;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,IAED,gBAAgB,KAAK,kCAAkC;AAAA,IACvD,OAAO,EAAE,MAAM,SAAS,QAAQ,QAAQ;AAAA;AAAA,EAE1C,OAAO;AAAA;AAGF,SAAS,gCAAgC,CAC9C,YAC0F;AAAA,EAC1F,OAAO,gBAAgB,CACrB,OACA,OACA,QACsD;AAAA,IACtD,MAAM,SAAS,MAAM,WAAU,KAAK;AAAA,IACpC,MAAM,YAAY,mBAAmB,KAAK;AAAA,IAE1C,MAAM,SAAS,MAAM,OAAO,KAAK;AAAA,MAC/B,OAAO;AAAA,MACP,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,MAAM,OAAiB,CAAC;AAAA,MAC5D,SAAS;AAAA,QACP,aAAa,MAAM;AAAA,QACnB,OAAO,MAAM;AAAA,QACb,aAAa,MAAM;AAAA,QACnB,mBAAmB,MAAM;AAAA,QACzB,kBAAkB,MAAM;AAAA,MAC1B;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAAA,IAED,MAAM,UAAU,MAAM,OAAO,MAAM;AAAA,IACnC,OAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,IACxD,IAAI;AAAA,MACF,iBAAiB,SAAS,QAAQ;AAAA,QAChC,MAAM,QAAQ,MAAM,QAAQ;AAAA,QAC5B,IAAI,OAAO;AAAA,UACT,MAAM,EAAE,MAAM,cAAc,MAAM,QAAQ,WAAW,MAAM;AAAA,QAC7D;AAAA,MACF;AAAA,MACA,MAAM,EAAE,MAAM,UAAU,MAAM,CAAC,EAA8B;AAAA,cAC7D;AAAA,MACA,OAAO,oBAAoB,SAAS,OAAO;AAAA;AAAA;AAAA;;;ACpFjD,sBAAS;AAMF,SAAS,wBAAwB,CACtC,YACmF;AAAA,EACnF,MAAM,MAIF,OAAO,OAAO,OAAO,iBAAiB,YAAY;AAAA,IACpD,IAAI,MAAM,QAAQ,MAAM,IAAI,GAAG;AAAA,MAC7B,WAAU,EAAE,KACV,8FACF;AAAA,MACA,MAAM,QAAQ,MAAM;AAAA,MACpB,MAAM,UAAoB,CAAC;AAAA,MAC3B,WAAW,QAAQ,OAAO;AAAA,QACxB,MAAM,IAAI,MAAM,IAAI,KAAK,OAAO,MAAM,KAAK,GAAG,OAAO,iBAAiB,OAAO;AAAA,QAC7E,QAAQ,KAAK,EAAE,IAAc;AAAA,MAC/B;AAAA,MACA,OAAO,EAAE,MAAM,QAAQ;AAAA,IACzB;AAAA,IAEA,gBAAgB,GAAG,gCAAgC;AAAA,IACnD,MAAM,SAAS,MAAM,WAAU,KAAK;AAAA,IACpC,MAAM,YAAY,mBAAmB,KAAK;AAAA,IAE1C,MAAM,WAAW,MAAM,OAAO,KAAK;AAAA,MACjC,OAAO;AAAA,MACP,UAAU;AAAA,QACR,EAAE,MAAM,UAAU,SAAS,MAAM,OAAiB;AAAA,QAClD,EAAE,MAAM,QAAQ,SAAS,MAAM,KAAe;AAAA,MAChD;AAAA,IACF,CAAC;AAAA,IAED,gBAAgB,KAAK,iCAAiC;AAAA,IACtD,OAAO,EAAE,MAAM,SAAS,QAAQ,QAAQ;AAAA;AAAA,EAE1C,OAAO;AAAA;AAGF,SAAS,8BAA8B,CAC5C,YACsF;AAAA,EACtF,OAAO,gBAAgB,CACrB,OACA,OACA,QACoD;AAAA,IACpD,MAAM,SAAS,MAAM,WAAU,KAAK;AAAA,IACpC,MAAM,YAAY,mBAAmB,KAAK;AAAA,IAE1C,MAAM,SAAS,MAAM,OAAO,KAAK;AAAA,MAC/B,OAAO;AAAA,MACP,UAAU;AAAA,QACR,EAAE,MAAM,UAAU,SAAS,MAAM,OAAiB;AAAA,QAClD,EAAE,MAAM,QAAQ,SAAS,MAAM,KAAe;AAAA,MAChD;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAAA,IAED,MAAM,UAAU,MAAM,OAAO,MAAM;AAAA,IACnC,OAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,IACxD,IAAI;AAAA,MACF,iBAAiB,SAAS,QAAQ;AAAA,QAChC,MAAM,QAAQ,MAAM,QAAQ;AAAA,QAC5B,IAAI,OAAO;AAAA,UACT,MAAM,EAAE,MAAM,cAAc,MAAM,QAAQ,WAAW,MAAM;AAAA,QAC7D;AAAA,MACF;AAAA,MACA,MAAM,EAAE,MAAM,UAAU,MAAM,CAAC,EAA4B;AAAA,cAC3D;AAAA,MACA,OAAO,oBAAoB,SAAS,OAAO;AAAA;AAAA;AAAA;;;AC5EjD,sBAAS;AAMF,SAAS,uBAAuB,CACrC,YACiF;AAAA,EACjF,MAAM,MAIF,OAAO,OAAO,OAAO,iBAAiB,YAAY;AAAA,IACpD,IAAI,MAAM,QAAQ,MAAM,IAAI,GAAG;AAAA,MAC7B,WAAU,EAAE,KACV,6FACF;AAAA,MACA,MAAM,QAAQ,MAAM;AAAA,MACpB,MAAM,UAAoB,CAAC;AAAA,MAC3B,WAAW,QAAQ,OAAO;AAAA,QACxB,MAAM,IAAI,MAAM,IAAI,KAAK,OAAO,MAAM,KAAK,GAAG,OAAO,iBAAiB,OAAO;AAAA,QAC7E,QAAQ,KAAK,EAAE,IAAc;AAAA,MAC/B;AAAA,MACA,OAAO,EAAE,MAAM,QAAQ;AAAA,IACzB;AAAA,IAEA,gBAAgB,GAAG,oCAAoC;AAAA,IACvD,MAAM,SAAS,MAAM,WAAU,KAAK;AAAA,IACpC,MAAM,YAAY,mBAAmB,KAAK;AAAA,IAE1C,MAAM,WAAW,MAAM,OAAO,KAAK;AAAA,MACjC,OAAO;AAAA,MACP,UAAU;AAAA,QACR,EAAE,MAAM,UAAU,SAAS,0CAA0C;AAAA,QACrE,EAAE,MAAM,QAAQ,SAAS,MAAM,KAAe;AAAA,MAChD;AAAA,IACF,CAAC;AAAA,IAED,gBAAgB,KAAK,qCAAqC;AAAA,IAC1D,OAAO,EAAE,MAAM,SAAS,QAAQ,QAAQ;AAAA;AAAA,EAE1C,OAAO;AAAA;AAGF,SAAS,6BAA6B,CAC3C,YACoF;AAAA,EACpF,OAAO,gBAAgB,CAAC,OAAO,OAAO,QAA2D;AAAA,IAC/F,MAAM,SAAS,MAAM,WAAU,KAAK;AAAA,IACpC,MAAM,YAAY,mBAAmB,KAAK;AAAA,IAE1C,MAAM,SAAS,MAAM,OAAO,KAAK;AAAA,MAC/B,OAAO;AAAA,MACP,UAAU;AAAA,QACR,EAAE,MAAM,UAAU,SAAS,0CAA0C;AAAA,QACrE,EAAE,MAAM,QAAQ,SAAS,MAAM,KAAe;AAAA,MAChD;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAAA,IAED,MAAM,UAAU,MAAM,OAAO,MAAM;AAAA,IACnC,OAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,IACxD,IAAI;AAAA,MACF,iBAAiB,SAAS,QAAQ;AAAA,QAChC,MAAM,QAAQ,MAAM,QAAQ;AAAA,QAC5B,IAAI,OAAO;AAAA,UACT,MAAM,EAAE,MAAM,cAAc,MAAM,QAAQ,WAAW,MAAM;AAAA,QAC7D;AAAA,MACF;AAAA,MACA,MAAM,EAAE,MAAM,UAAU,MAAM,CAAC,EAA2B;AAAA,cAC1D;AAAA,MACA,OAAO,oBAAoB,SAAS,OAAO;AAAA;AAAA;AAAA;;;AC/EjD;AAUA,sBAAS;AAUT,SAAS,cAAc,CAAC,OAAsC;AAAA,EAC5D,OAAO,MAAM,IAAI,CAAC,OAAO;AAAA,IACvB,MAAM;AAAA,IACN,UAAU;AAAA,MACR,MAAM,EAAE;AAAA,MACR,aAAa,qBAAqB,CAAC;AAAA,MACnC,YAAY,EAAE;AAAA,IAChB;AAAA,EACF,EAAE;AAAA;AAGG,SAAS,uBAAuB,CACrC,YACA,eACiF;AAAA,EACjF,MAAM,MAIF,OAAO,OAAO,OAAO,iBAAiB,YAAY;AAAA,IACpD,IAAI,MAAM,QAAQ,MAAM,MAAM,GAAG;AAAA,MAC/B,WAAU,EAAE,KACV,6FACF;AAAA,MACA,MAAM,UAAU,MAAM;AAAA,MACtB,MAAM,QAAkB,CAAC;AAAA,MACzB,MAAM,gBAA6B,CAAC;AAAA,MACpC,WAAW,QAAQ,SAAS;AAAA,QAC1B,MAAM,IAAI,MAAM,IAAI,KAAK,OAAO,QAAQ,KAAK,GAAG,OAAO,iBAAiB,OAAO;AAAA,QAC/E,MAAM,KAAK,EAAE,IAAc;AAAA,QAC3B,cAAc,KAAK,EAAE,SAAsB;AAAA,MAC7C;AAAA,MACA,OAAO,EAAE,MAAM,OAAO,WAAW,cAAc;AAAA,IACjD;AAAA,IAEA,gBAAgB,GAAG,8BAA8B;AAAA,IACjD,MAAM,SAAS,MAAM,WAAU,KAAK;AAAA,IACpC,MAAM,YAAY,mBAAmB,KAAK;AAAA,IAE1C,MAAM,WAAW,cAAc,KAAK;AAAA,IAEpC,MAAM,QAAQ,MAAM,eAAe,SAAS,YAAY,eAAe,MAAM,KAAK;AAAA,IAElF,MAAM,WAAW,MAAM,OAAO,KAAK;AAAA,MACjC,OAAO;AAAA,MACP;AAAA,MACA;AAAA,MACA,SAAS;AAAA,QACP,aAAa,MAAM;AAAA,QACnB,aAAa,MAAM;AAAA,MACrB;AAAA,IACF,CAAC;AAAA,IAED,MAAM,OAAO,SAAS,QAAQ,WAAW;AAAA,IACzC,MAAM,YAAuB,CAAC;AAAA,KAC7B,SAAS,QAAQ,cAAc,CAAC,GAAG,QAAQ,CAAC,IAAS,UAAkB;AAAA,MACtE,IAAI,cAAuC,CAAC;AAAA,MAC5C,MAAM,SAAS,GAAG,SAAS;AAAA,MAC3B,IAAI,OAAO,WAAW,UAAU;AAAA,QAC9B,IAAI;AAAA,UACF,cAAc,KAAK,MAAM,MAAM;AAAA,UAC/B,MAAM;AAAA,UACN,MAAM,UAAU,iBAAiB,MAAM;AAAA,UACvC,cAAe,WAAuC,CAAC;AAAA;AAAA,MAE3D,EAAO,SAAI,UAAU,MAAM;AAAA,QACzB,cAAc;AAAA,MAChB;AAAA,MACA,MAAM,KAAK,QAAQ;AAAA,MACnB,UAAU,KAAK,EAAE,IAAI,MAAM,GAAG,SAAS,MAAgB,OAAO,YAAY,CAAC;AAAA,KAC5E;AAAA,IAED,gBAAgB,KAAK,+BAA+B;AAAA,IACpD,OAAO,EAAE,MAAM,WAAW,qBAAqB,WAAW,MAAM,KAAK,EAAE;AAAA;AAAA,EAEzE,OAAO;AAAA;AAGF,SAAS,6BAA6B,CAC3C,YACA,eACoF;AAAA,EACpF,OAAO,gBAAgB,CAAC,OAAO,OAAO,QAA2D;AAAA,IAC/F,MAAM,SAAS,MAAM,WAAU,KAAK;AAAA,IACpC,MAAM,YAAY,mBAAmB,KAAK;AAAA,IAE1C,MAAM,WAAW,cAAc,KAAK;AAAA,IAEpC,MAAM,QAAQ,MAAM,eAAe,SAAS,YAAY,eAAe,MAAM,KAAK;AAAA,IAElF,MAAM,SAAS,MAAM,OAAO,KAAK;AAAA,MAC/B,OAAO;AAAA,MACP;AAAA,MACA;AAAA,MACA,SAAS;AAAA,QACP,aAAa,MAAM;AAAA,QACnB,aAAa,MAAM;AAAA,MACrB;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AAAA,IAED,MAAM,UAAU,MAAM,OAAO,MAAM;AAAA,IACnC,OAAO,iBAAiB,SAAS,SAAS,EAAE,MAAM,KAAK,CAAC;AAAA,IAExD,IAAI,kBAAkB;AAAA,IACtB,MAAM,YAAuB,CAAC;AAAA,IAC9B,IAAI,YAAY;AAAA,IAEhB,IAAI;AAAA,MACF,iBAAiB,SAAS,QAAQ;AAAA,QAChC,MAAM,QAAQ,MAAM,QAAQ;AAAA,QAC5B,IAAI,OAAO;AAAA,UACT,mBAAmB;AAAA,UACnB,MAAM,EAAE,MAAM,cAAc,MAAM,QAAQ,WAAW,MAAM;AAAA,QAC7D;AAAA,QAEA,MAAM,iBAAkB,MAAM,QAAgB;AAAA,QAC9C,IAAI,MAAM,QAAQ,cAAc,KAAK,eAAe,SAAS,GAAG;AAAA,UAC9D,WAAW,MAAM,gBAAgB;AAAA,YAC/B,IAAI,cAAuC,CAAC;AAAA,YAC5C,MAAM,SAAS,GAAG,SAAS;AAAA,YAC3B,IAAI,OAAO,WAAW,UAAU;AAAA,cAC9B,IAAI;AAAA,gBACF,cAAc,KAAK,MAAM,MAAM;AAAA,gBAC/B,MAAM;AAAA,gBACN,MAAM,UAAU,iBAAiB,MAAM;AAAA,gBACvC,cAAe,WAAuC,CAAC;AAAA;AAAA,YAE3D,EAAO,SAAI,UAAU,MAAM;AAAA,cACzB,cAAc;AAAA,YAChB;AAAA,YACA,MAAM,KAAK,QAAQ;AAAA,YACnB,UAAU,KAAK,EAAE,IAAI,MAAM,GAAG,SAAS,MAAgB,OAAO,YAAY,CAAC;AAAA,UAC7E;AAAA,UACA,MAAM,EAAE,MAAM,gBAAgB,MAAM,aAAa,aAAa,CAAC,GAAG,SAAS,EAAE;AAAA,QAC/E;AAAA,MACF;AAAA,MAEA,MAAM,iBAAiB,qBAAqB,WAAW,MAAM,KAAK;AAAA,MAClE,MAAM;AAAA,QACJ,MAAM;AAAA,QACN,MAAM,EAAE,MAAM,iBAAiB,WAAW,eAAe;AAAA,MAC3D;AAAA,cACA;AAAA,MACA,OAAO,oBAAoB,SAAS,OAAO;AAAA;AAAA;AAAA;;;APtJ1C,IAAM,wBAAwB,2BAA2B,SAAS;AAClE,IAAM,uBAAuB,0BAA0B,SAAS;AAChE,IAAM,sBAAsB,yBAAyB,SAAS;AAC9D,IAAM,qBAAqB,wBAAwB,SAAS;AAE5D,IAAM,+BAA+B,iCAAiC,SAAS;AAC/E,IAAM,6BAA6B,+BAA+B,SAAS;AAC3E,IAAM,4BAA4B,8BAA8B,SAAS;AAEzE,IAAM,qBAAqB,wBAAwB,WAAW,kBAAkB;AAChF,IAAM,4BAA4B,8BACvC,WACA,kBACF;AAEO,IAAM,mBAAmB,sBAAsB,SAAS;AACxD,IAAM,qBAAqB,wBAAwB,SAAS;AAE5D,IAAM,eAA6E;AAAA,EACxF,eAAe;AAAA,EACf,oBAAoB;AAAA,EACpB,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAClB,iBAAiB;AAAA,EACjB,iBAAiB;AAAA,EACjB,iBAAiB;AACnB;AAEO,IAAM,sBAGT;AAAA,EACF,oBAAoB;AAAA,EACpB,kBAAkB;AAAA,EAClB,iBAAiB;AAAA,EACjB,iBAAiB;AACnB;;;AQlDA;AAAA;AAAA;AAUO,MAAM,6BAA6B,iBAAoC;AAAA,EACnE,OAAO;AAAA,EACP,UAAU;AAAA,EACV,kBAAkB;AAAA,EAElB,YAAY;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EAEA,WAAW,CACT,OACA,aACA,eACA;AAAA,IACA,MAAM,OAAO,aAAa,aAAa;AAAA;AAE3C;;;AC5BA,eAAsB,oBAAoB,CAAC,SAAoD;AAAA,EAC7F,MAAM,IAAI,qBAAqB,cAAc,mBAAmB,EAAE,SAAS,WAAW,CAAC,CAAC;AAAA;;ACL1F,sBAAS;;;ACAT;AAAA;AAAA;AAsBO,MAAM,uBAAuB,WAA8B;AAAA,EACvD,OAAO;AAAA,EACP,UAAU;AAAA,EACV,kBAAkB;AAAA,EAElB,YAAY;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EAEA,WAAW,CACT,OACA,aACA,eACA;AAAA,IACA,MAAM,OAAO,aAAa,aAAa;AAAA;AAE3C;;;ADxCA,eAAsB,oBAAoB,GAAkB;AAAA,EAC1D,MAAM,eAAe,sBAAsB,IAAI,aAAa;AAAA,EAC5D,IAAI,eAAe,cAAc,mBAAmB,EAAE,uBAAuB,YAAY;AAAA,EACzF,aAAa,UAAU;AAAA,EACvB,WAAU,EAAE,KAAK,4CAA4C;AAAA;",
|
|
22
|
+
"debugId": "03F4CFAA5A39F7EC64756E2164756E21",
|
|
23
|
+
"names": []
|
|
24
|
+
}
|