@librechat/agents 3.0.33 → 3.0.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/common/enum.cjs +0 -1
- package/dist/cjs/common/enum.cjs.map +1 -1
- package/dist/cjs/llm/providers.cjs +0 -3
- package/dist/cjs/llm/providers.cjs.map +1 -1
- package/dist/cjs/utils/llm.cjs +0 -1
- package/dist/cjs/utils/llm.cjs.map +1 -1
- package/dist/esm/common/enum.mjs +0 -1
- package/dist/esm/common/enum.mjs.map +1 -1
- package/dist/esm/llm/providers.mjs +0 -3
- package/dist/esm/llm/providers.mjs.map +1 -1
- package/dist/esm/utils/llm.mjs +0 -1
- package/dist/esm/utils/llm.mjs.map +1 -1
- package/dist/types/common/enum.d.ts +0 -1
- package/dist/types/types/llm.d.ts +1 -6
- package/package.json +1 -2
- package/src/common/enum.ts +0 -1
- package/src/llm/providers.ts +0 -3
- package/src/types/llm.ts +0 -6
- package/src/utils/llm.ts +0 -1
- package/src/utils/llmConfig.ts +5 -3
- package/dist/cjs/llm/ollama/index.cjs +0 -70
- package/dist/cjs/llm/ollama/index.cjs.map +0 -1
- package/dist/cjs/llm/ollama/utils.cjs +0 -158
- package/dist/cjs/llm/ollama/utils.cjs.map +0 -1
- package/dist/esm/llm/ollama/index.mjs +0 -68
- package/dist/esm/llm/ollama/index.mjs.map +0 -1
- package/dist/esm/llm/ollama/utils.mjs +0 -155
- package/dist/esm/llm/ollama/utils.mjs.map +0 -1
- package/dist/types/llm/ollama/index.d.ts +0 -8
- package/dist/types/llm/ollama/utils.d.ts +0 -7
- package/src/llm/ollama/index.ts +0 -92
- package/src/llm/ollama/utils.ts +0 -193
package/dist/cjs/common/enum.cjs
CHANGED
|
@@ -63,7 +63,6 @@ exports.Providers = void 0;
|
|
|
63
63
|
Providers["ANTHROPIC"] = "anthropic";
|
|
64
64
|
Providers["MISTRALAI"] = "mistralai";
|
|
65
65
|
Providers["MISTRAL"] = "mistral";
|
|
66
|
-
Providers["OLLAMA"] = "ollama";
|
|
67
66
|
Providers["GOOGLE"] = "google";
|
|
68
67
|
Providers["AZURE"] = "azureOpenAI";
|
|
69
68
|
Providers["DEEPSEEK"] = "deepseek";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"enum.cjs","sources":["../../../src/common/enum.ts"],"sourcesContent":["/**\n * Enum representing the various event types emitted during the execution of runnables.\n * These events provide real-time information about the progress and state of different components.\n *\n * @enum {string}\n */\nexport enum GraphEvents {\n /* Custom Events */\n\n /** [Custom] Agent update event in multi-agent graph/workflow */\n ON_AGENT_UPDATE = 'on_agent_update',\n /** [Custom] Delta event for run steps (message creation and tool calls) */\n ON_RUN_STEP = 'on_run_step',\n /** [Custom] Delta event for run steps (tool calls) */\n ON_RUN_STEP_DELTA = 'on_run_step_delta',\n /** [Custom] Completed event for run steps (tool calls) */\n ON_RUN_STEP_COMPLETED = 'on_run_step_completed',\n /** [Custom] Delta events for messages */\n ON_MESSAGE_DELTA = 'on_message_delta',\n /** [Custom] Reasoning Delta events for messages */\n ON_REASONING_DELTA = 'on_reasoning_delta',\n\n /* Official Events */\n\n /** Custom event, emitted by system */\n ON_CUSTOM_EVENT = 'on_custom_event',\n /** Emitted when a chat model starts processing. */\n CHAT_MODEL_START = 'on_chat_model_start',\n\n /** Emitted when a chat model streams a chunk of its response. */\n CHAT_MODEL_STREAM = 'on_chat_model_stream',\n\n /** Emitted when a chat model completes its processing. */\n CHAT_MODEL_END = 'on_chat_model_end',\n\n /** Emitted when a language model starts processing. */\n LLM_START = 'on_llm_start',\n\n /** Emitted when a language model streams a chunk of its response. */\n LLM_STREAM = 'on_llm_stream',\n\n /** Emitted when a language model completes its processing. */\n LLM_END = 'on_llm_end',\n\n /** Emitted when a chain starts processing. */\n CHAIN_START = 'on_chain_start',\n\n /** Emitted when a chain streams a chunk of its output. */\n CHAIN_STREAM = 'on_chain_stream',\n\n /** Emitted when a chain completes its processing. */\n CHAIN_END = 'on_chain_end',\n\n /** Emitted when a tool starts its operation. */\n TOOL_START = 'on_tool_start',\n\n /** Emitted when a tool completes its operation. */\n TOOL_END = 'on_tool_end',\n\n /** Emitted when a retriever starts its operation. */\n RETRIEVER_START = 'on_retriever_start',\n\n /** Emitted when a retriever completes its operation. */\n RETRIEVER_END = 'on_retriever_end',\n\n /** Emitted when a prompt starts processing. */\n PROMPT_START = 'on_prompt_start',\n\n /** Emitted when a prompt completes its processing. */\n PROMPT_END = 'on_prompt_end',\n}\n\nexport enum Providers {\n OPENAI = 'openAI',\n VERTEXAI = 'vertexai',\n BEDROCK = 'bedrock',\n ANTHROPIC = 'anthropic',\n MISTRALAI = 'mistralai',\n MISTRAL = 'mistral',\n
|
|
1
|
+
{"version":3,"file":"enum.cjs","sources":["../../../src/common/enum.ts"],"sourcesContent":["/**\n * Enum representing the various event types emitted during the execution of runnables.\n * These events provide real-time information about the progress and state of different components.\n *\n * @enum {string}\n */\nexport enum GraphEvents {\n /* Custom Events */\n\n /** [Custom] Agent update event in multi-agent graph/workflow */\n ON_AGENT_UPDATE = 'on_agent_update',\n /** [Custom] Delta event for run steps (message creation and tool calls) */\n ON_RUN_STEP = 'on_run_step',\n /** [Custom] Delta event for run steps (tool calls) */\n ON_RUN_STEP_DELTA = 'on_run_step_delta',\n /** [Custom] Completed event for run steps (tool calls) */\n ON_RUN_STEP_COMPLETED = 'on_run_step_completed',\n /** [Custom] Delta events for messages */\n ON_MESSAGE_DELTA = 'on_message_delta',\n /** [Custom] Reasoning Delta events for messages */\n ON_REASONING_DELTA = 'on_reasoning_delta',\n\n /* Official Events */\n\n /** Custom event, emitted by system */\n ON_CUSTOM_EVENT = 'on_custom_event',\n /** Emitted when a chat model starts processing. */\n CHAT_MODEL_START = 'on_chat_model_start',\n\n /** Emitted when a chat model streams a chunk of its response. */\n CHAT_MODEL_STREAM = 'on_chat_model_stream',\n\n /** Emitted when a chat model completes its processing. */\n CHAT_MODEL_END = 'on_chat_model_end',\n\n /** Emitted when a language model starts processing. */\n LLM_START = 'on_llm_start',\n\n /** Emitted when a language model streams a chunk of its response. */\n LLM_STREAM = 'on_llm_stream',\n\n /** Emitted when a language model completes its processing. */\n LLM_END = 'on_llm_end',\n\n /** Emitted when a chain starts processing. */\n CHAIN_START = 'on_chain_start',\n\n /** Emitted when a chain streams a chunk of its output. */\n CHAIN_STREAM = 'on_chain_stream',\n\n /** Emitted when a chain completes its processing. */\n CHAIN_END = 'on_chain_end',\n\n /** Emitted when a tool starts its operation. */\n TOOL_START = 'on_tool_start',\n\n /** Emitted when a tool completes its operation. */\n TOOL_END = 'on_tool_end',\n\n /** Emitted when a retriever starts its operation. */\n RETRIEVER_START = 'on_retriever_start',\n\n /** Emitted when a retriever completes its operation. */\n RETRIEVER_END = 'on_retriever_end',\n\n /** Emitted when a prompt starts processing. */\n PROMPT_START = 'on_prompt_start',\n\n /** Emitted when a prompt completes its processing. */\n PROMPT_END = 'on_prompt_end',\n}\n\nexport enum Providers {\n OPENAI = 'openAI',\n VERTEXAI = 'vertexai',\n BEDROCK = 'bedrock',\n ANTHROPIC = 'anthropic',\n MISTRALAI = 'mistralai',\n MISTRAL = 'mistral',\n GOOGLE = 'google',\n AZURE = 'azureOpenAI',\n DEEPSEEK = 'deepseek',\n OPENROUTER = 'openrouter',\n XAI = 'xai',\n}\n\nexport enum GraphNodeKeys {\n TOOLS = 'tools=',\n AGENT = 'agent=',\n ROUTER = 'router',\n PRE_TOOLS = 'pre_tools',\n POST_TOOLS = 'post_tools',\n}\n\nexport enum GraphNodeActions {\n TOOL_NODE = 'tool_node',\n CALL_MODEL = 'call_model',\n ROUTE_MESSAGE = 'route_message',\n}\n\nexport enum CommonEvents {\n LANGGRAPH = 'LangGraph',\n}\n\nexport enum StepTypes {\n TOOL_CALLS = 'tool_calls',\n MESSAGE_CREATION = 'message_creation',\n}\n\nexport enum ContentTypes {\n TEXT = 'text',\n ERROR = 'error',\n THINK = 'think',\n TOOL_CALL = 'tool_call',\n IMAGE_URL = 'image_url',\n IMAGE_FILE = 'image_file',\n /** Anthropic */\n THINKING = 'thinking',\n /** Vertex AI / Google Common */\n REASONING = 'reasoning',\n /** Multi-Agent Switch */\n AGENT_UPDATE = 'agent_update',\n /** Bedrock */\n REASONING_CONTENT = 'reasoning_content',\n}\n\nexport enum ToolCallTypes {\n FUNCTION = 'function',\n RETRIEVAL = 'retrieval',\n FILE_SEARCH = 'file_search',\n CODE_INTERPRETER = 'code_interpreter',\n /* Agents Tool Call */\n TOOL_CALL = 'tool_call',\n}\n\nexport enum Callback {\n TOOL_ERROR = 'handleToolError',\n TOOL_START = 'handleToolStart',\n TOOL_END = 'handleToolEnd',\n CUSTOM_EVENT = 'handleCustomEvent',\n /*\n LLM_START = 'handleLLMStart',\n LLM_NEW_TOKEN = 'handleLLMNewToken',\n LLM_ERROR = 'handleLLMError',\n LLM_END = 'handleLLMEnd',\n CHAT_MODEL_START = 'handleChatModelStart',\n CHAIN_START = 'handleChainStart',\n CHAIN_ERROR = 'handleChainError',\n CHAIN_END = 'handleChainEnd',\n TEXT = 'handleText',\n AGENT_ACTION = 'handleAgentAction',\n AGENT_END = 'handleAgentEnd',\n RETRIEVER_START = 'handleRetrieverStart',\n RETRIEVER_END = 'handleRetrieverEnd',\n RETRIEVER_ERROR = 'handleRetrieverError',\n */\n}\n\nexport enum Constants {\n OFFICIAL_CODE_BASEURL = 'https://api.librechat.ai/v1',\n EXECUTE_CODE = 'execute_code',\n WEB_SEARCH = 'web_search',\n CONTENT_AND_ARTIFACT = 'content_and_artifact',\n LC_TRANSFER_TO_ = 'lc_transfer_to_',\n}\n\nexport enum TitleMethod {\n STRUCTURED = 'structured',\n FUNCTIONS = 'functions',\n COMPLETION = 'completion',\n}\n\nexport enum EnvVar {\n CODE_API_KEY = 'LIBRECHAT_CODE_API_KEY',\n CODE_BASEURL = 'LIBRECHAT_CODE_BASEURL',\n}\n"],"names":["GraphEvents","Providers","GraphNodeKeys","GraphNodeActions","CommonEvents","StepTypes","ContentTypes","ToolCallTypes","Callback","Constants","TitleMethod","EnvVar"],"mappings":";;AAAA;;;;;AAKG;AACSA;AAAZ,CAAA,UAAY,WAAW,EAAA;;;AAIrB,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,iBAAmC;;AAEnC,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;;AAE3B,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;;AAEvC,IAAA,WAAA,CAAA,uBAAA,CAAA,GAAA,uBAA+C;;AAE/C,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,WAAA,CAAA,oBAAA,CAAA,GAAA,oBAAyC;;;AAKzC,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,iBAAmC;;AAEnC,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,qBAAwC;;AAGxC,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,sBAA0C;;AAG1C,IAAA,WAAA,CAAA,gBAAA,CAAA,GAAA,mBAAoC;;AAGpC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,SAAA,CAAA,GAAA,YAAsB;;AAGtB,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,gBAA8B;;AAG9B,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,UAAA,CAAA,GAAA,aAAwB;;AAGxB,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,oBAAsC;;AAGtC,IAAA,WAAA,CAAA,eAAA,CAAA,GAAA,kBAAkC;;AAGlC,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;AAC9B,CAAC,EAhEWA,mBAAW,KAAXA,mBAAW,GAgEtB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,SAAA,CAAA,GAAA,SAAmB;AACnB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,SAAA,CAAA,GAAA,SAAmB;AACnB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,OAAA,CAAA,GAAA,aAAqB;AACrB,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,KAAA,CAAA,GAAA,KAAW;AACb,CAAC,EAZWA,iBAAS,KAATA,iBAAS,GAYpB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,QAAgB;AAChB,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,QAAgB;AAChB,IAAA,aAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AAC3B,CAAC,EANWA,qBAAa,KAAbA,qBAAa,GAMxB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,gBAAgB,EAAA;AAC1B,IAAA,gBAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,gBAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,gBAAA,CAAA,eAAA,CAAA,GAAA,eAA+B;AACjC,CAAC,EAJWA,wBAAgB,KAAhBA,wBAAgB,GAI3B,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAFWA,oBAAY,KAAZA,oBAAY,GAEvB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;AACvC,CAAC,EAHWA,iBAAS,KAATA,iBAAS,GAGpB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,MAAA,CAAA,GAAA,MAAa;AACb,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;;AAEzB,IAAA,YAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;;AAErB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;;AAEvB,IAAA,YAAA,CAAA,cAAA,CAAA,GAAA,cAA6B;;AAE7B,IAAA,YAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;AACzC,CAAC,EAfWA,oBAAY,KAAZA,oBAAY,GAevB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;AAC3B,IAAA,aAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAPWA,qBAAa,KAAbA,qBAAa,GAOxB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,QAAQ,EAAA;AAClB,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,UAAA,CAAA,GAAA,eAA0B;AAC1B,IAAA,QAAA,CAAA,cAAA,CAAA,GAAA,mBAAkC;AAClC;;;;;;;;;;;;;;;AAeE;AACJ,CAAC,EArBWA,gBAAQ,KAARA,gBAAQ,GAqBnB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,uBAAA,CAAA,GAAA,6BAAqD;AACrD,IAAA,SAAA,CAAA,cAAA,CAAA,GAAA,cAA6B;AAC7B,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,sBAAA,CAAA,GAAA,sBAA6C;AAC7C,IAAA,SAAA,CAAA,iBAAA,CAAA,GAAA,iBAAmC;AACrC,CAAC,EANWA,iBAAS,KAATA,iBAAS,GAMpB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,WAAW,EAAA;AACrB,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AAC3B,CAAC,EAJWA,mBAAW,KAAXA,mBAAW,GAItB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,MAAM,EAAA;AAChB,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACvC,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACzC,CAAC,EAHWA,cAAM,KAANA,cAAM,GAGjB,EAAA,CAAA,CAAA;;"}
|
|
@@ -7,14 +7,12 @@ var index$1 = require('./bedrock/index.cjs');
|
|
|
7
7
|
var index$3 = require('./anthropic/index.cjs');
|
|
8
8
|
var index$2 = require('./openrouter/index.cjs');
|
|
9
9
|
var index$5 = require('./vertexai/index.cjs');
|
|
10
|
-
var index$6 = require('./ollama/index.cjs');
|
|
11
10
|
var _enum = require('../common/enum.cjs');
|
|
12
11
|
|
|
13
12
|
// src/llm/providers.ts
|
|
14
13
|
const llmProviders = {
|
|
15
14
|
[_enum.Providers.XAI]: index$4.ChatXAI,
|
|
16
15
|
[_enum.Providers.OPENAI]: index$4.ChatOpenAI,
|
|
17
|
-
[_enum.Providers.OLLAMA]: index$6.ChatOllama,
|
|
18
16
|
[_enum.Providers.AZURE]: index$4.AzureChatOpenAI,
|
|
19
17
|
[_enum.Providers.VERTEXAI]: index$5.ChatVertexAI,
|
|
20
18
|
[_enum.Providers.DEEPSEEK]: index$4.ChatDeepSeek,
|
|
@@ -29,7 +27,6 @@ const llmProviders = {
|
|
|
29
27
|
const manualToolStreamProviders = new Set([
|
|
30
28
|
_enum.Providers.ANTHROPIC,
|
|
31
29
|
_enum.Providers.BEDROCK,
|
|
32
|
-
_enum.Providers.OLLAMA,
|
|
33
30
|
]);
|
|
34
31
|
const getChatModelClass = (provider) => {
|
|
35
32
|
const ChatModelClass = llmProviders[provider];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"providers.cjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport type {\n ChatModelConstructorMap,\n ProviderOptionsMap,\n ChatModelMap,\n} from '@/types';\nimport {\n AzureChatOpenAI,\n ChatDeepSeek,\n ChatOpenAI,\n ChatXAI,\n} from '@/llm/openai';\nimport { CustomChatGoogleGenerativeAI } from '@/llm/google';\nimport { CustomChatBedrockConverse } from '@/llm/bedrock';\nimport { CustomAnthropic } from '@/llm/anthropic';\nimport { ChatOpenRouter } from '@/llm/openrouter';\nimport { ChatVertexAI } from '@/llm/vertexai';\nimport {
|
|
1
|
+
{"version":3,"file":"providers.cjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport type {\n ChatModelConstructorMap,\n ProviderOptionsMap,\n ChatModelMap,\n} from '@/types';\nimport {\n AzureChatOpenAI,\n ChatDeepSeek,\n ChatOpenAI,\n ChatXAI,\n} from '@/llm/openai';\nimport { CustomChatGoogleGenerativeAI } from '@/llm/google';\nimport { CustomChatBedrockConverse } from '@/llm/bedrock';\nimport { CustomAnthropic } from '@/llm/anthropic';\nimport { ChatOpenRouter } from '@/llm/openrouter';\nimport { ChatVertexAI } from '@/llm/vertexai';\nimport { Providers } from '@/common';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.XAI]: ChatXAI,\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.AZURE]: AzureChatOpenAI,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.DEEPSEEK]: ChatDeepSeek,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.MISTRAL]: ChatMistralAI,\n [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.OPENROUTER]: ChatOpenRouter,\n [Providers.BEDROCK]: CustomChatBedrockConverse,\n // [Providers.ANTHROPIC]: ChatAnthropic,\n [Providers.GOOGLE]: CustomChatGoogleGenerativeAI,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([\n Providers.ANTHROPIC,\n Providers.BEDROCK,\n]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};\n"],"names":["Providers","ChatXAI","ChatOpenAI","AzureChatOpenAI","ChatVertexAI","ChatDeepSeek","ChatMistralAI","CustomAnthropic","ChatOpenRouter","CustomChatBedrockConverse","CustomChatGoogleGenerativeAI"],"mappings":";;;;;;;;;;;AAAA;AAoBa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAACA,eAAS,CAAC,GAAG,GAAGC,eAAO;AACxB,IAAA,CAACD,eAAS,CAAC,MAAM,GAAGE,kBAAU;AAC9B,IAAA,CAACF,eAAS,CAAC,KAAK,GAAGG,uBAAe;AAClC,IAAA,CAACH,eAAS,CAAC,QAAQ,GAAGI,oBAAY;AAClC,IAAA,CAACJ,eAAS,CAAC,QAAQ,GAAGK,oBAAY;AAClC,IAAA,CAACL,eAAS,CAAC,SAAS,GAAGM,uBAAa;AACpC,IAAA,CAACN,eAAS,CAAC,OAAO,GAAGM,uBAAa;AAClC,IAAA,CAACN,eAAS,CAAC,SAAS,GAAGO,uBAAe;AACtC,IAAA,CAACP,eAAS,CAAC,UAAU,GAAGQ,sBAAc;AACtC,IAAA,CAACR,eAAS,CAAC,OAAO,GAAGS,iCAAyB;;AAE9C,IAAA,CAACT,eAAS,CAAC,MAAM,GAAGU,kCAA4B;;AAGrC,MAAA,yBAAyB,GAAG,IAAI,GAAG,CAAqB;AACnE,IAAAV,eAAS,CAAC,SAAS;AACnB,IAAAA,eAAS,CAAC,OAAO;AAClB,CAAA;AAEY,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC;IAC7C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC;;AAG1D,IAAA,OAAO,cAAc;AACvB;;;;;;"}
|
package/dist/cjs/utils/llm.cjs
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm.cjs","sources":["../../../src/utils/llm.ts"],"sourcesContent":["// src/utils/llm.ts\nimport { Providers } from '@/common';\n\nexport function isOpenAILike(provider?: string | Providers): boolean {\n if (provider == null) {\n return false;\n }\n return (\n [\n Providers.OPENAI,\n Providers.AZURE,\n Providers.OPENROUTER,\n Providers.XAI,\n Providers.DEEPSEEK,\n
|
|
1
|
+
{"version":3,"file":"llm.cjs","sources":["../../../src/utils/llm.ts"],"sourcesContent":["// src/utils/llm.ts\nimport { Providers } from '@/common';\n\nexport function isOpenAILike(provider?: string | Providers): boolean {\n if (provider == null) {\n return false;\n }\n return (\n [\n Providers.OPENAI,\n Providers.AZURE,\n Providers.OPENROUTER,\n Providers.XAI,\n Providers.DEEPSEEK,\n ] as string[]\n ).includes(provider);\n}\n\nexport function isGoogleLike(provider?: string | Providers): boolean {\n if (provider == null) {\n return false;\n }\n return ([Providers.GOOGLE, Providers.VERTEXAI] as string[]).includes(\n provider\n );\n}\n"],"names":["Providers"],"mappings":";;;;AAAA;AAGM,SAAU,YAAY,CAAC,QAA6B,EAAA;AACxD,IAAA,IAAI,QAAQ,IAAI,IAAI,EAAE;AACpB,QAAA,OAAO,KAAK;;IAEd,OACE;AACE,QAAAA,eAAS,CAAC,MAAM;AAChB,QAAAA,eAAS,CAAC,KAAK;AACf,QAAAA,eAAS,CAAC,UAAU;AACpB,QAAAA,eAAS,CAAC,GAAG;AACb,QAAAA,eAAS,CAAC,QAAQ;AAErB,KAAA,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACtB;AAEM,SAAU,YAAY,CAAC,QAA6B,EAAA;AACxD,IAAA,IAAI,QAAQ,IAAI,IAAI,EAAE;AACpB,QAAA,OAAO,KAAK;;AAEd,IAAA,OAAQ,CAACA,eAAS,CAAC,MAAM,EAAEA,eAAS,CAAC,QAAQ,CAAc,CAAC,QAAQ,CAClE,QAAQ,CACT;AACH;;;;;"}
|
package/dist/esm/common/enum.mjs
CHANGED
|
@@ -61,7 +61,6 @@ var Providers;
|
|
|
61
61
|
Providers["ANTHROPIC"] = "anthropic";
|
|
62
62
|
Providers["MISTRALAI"] = "mistralai";
|
|
63
63
|
Providers["MISTRAL"] = "mistral";
|
|
64
|
-
Providers["OLLAMA"] = "ollama";
|
|
65
64
|
Providers["GOOGLE"] = "google";
|
|
66
65
|
Providers["AZURE"] = "azureOpenAI";
|
|
67
66
|
Providers["DEEPSEEK"] = "deepseek";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"enum.mjs","sources":["../../../src/common/enum.ts"],"sourcesContent":["/**\n * Enum representing the various event types emitted during the execution of runnables.\n * These events provide real-time information about the progress and state of different components.\n *\n * @enum {string}\n */\nexport enum GraphEvents {\n /* Custom Events */\n\n /** [Custom] Agent update event in multi-agent graph/workflow */\n ON_AGENT_UPDATE = 'on_agent_update',\n /** [Custom] Delta event for run steps (message creation and tool calls) */\n ON_RUN_STEP = 'on_run_step',\n /** [Custom] Delta event for run steps (tool calls) */\n ON_RUN_STEP_DELTA = 'on_run_step_delta',\n /** [Custom] Completed event for run steps (tool calls) */\n ON_RUN_STEP_COMPLETED = 'on_run_step_completed',\n /** [Custom] Delta events for messages */\n ON_MESSAGE_DELTA = 'on_message_delta',\n /** [Custom] Reasoning Delta events for messages */\n ON_REASONING_DELTA = 'on_reasoning_delta',\n\n /* Official Events */\n\n /** Custom event, emitted by system */\n ON_CUSTOM_EVENT = 'on_custom_event',\n /** Emitted when a chat model starts processing. */\n CHAT_MODEL_START = 'on_chat_model_start',\n\n /** Emitted when a chat model streams a chunk of its response. */\n CHAT_MODEL_STREAM = 'on_chat_model_stream',\n\n /** Emitted when a chat model completes its processing. */\n CHAT_MODEL_END = 'on_chat_model_end',\n\n /** Emitted when a language model starts processing. */\n LLM_START = 'on_llm_start',\n\n /** Emitted when a language model streams a chunk of its response. */\n LLM_STREAM = 'on_llm_stream',\n\n /** Emitted when a language model completes its processing. */\n LLM_END = 'on_llm_end',\n\n /** Emitted when a chain starts processing. */\n CHAIN_START = 'on_chain_start',\n\n /** Emitted when a chain streams a chunk of its output. */\n CHAIN_STREAM = 'on_chain_stream',\n\n /** Emitted when a chain completes its processing. */\n CHAIN_END = 'on_chain_end',\n\n /** Emitted when a tool starts its operation. */\n TOOL_START = 'on_tool_start',\n\n /** Emitted when a tool completes its operation. */\n TOOL_END = 'on_tool_end',\n\n /** Emitted when a retriever starts its operation. */\n RETRIEVER_START = 'on_retriever_start',\n\n /** Emitted when a retriever completes its operation. */\n RETRIEVER_END = 'on_retriever_end',\n\n /** Emitted when a prompt starts processing. */\n PROMPT_START = 'on_prompt_start',\n\n /** Emitted when a prompt completes its processing. */\n PROMPT_END = 'on_prompt_end',\n}\n\nexport enum Providers {\n OPENAI = 'openAI',\n VERTEXAI = 'vertexai',\n BEDROCK = 'bedrock',\n ANTHROPIC = 'anthropic',\n MISTRALAI = 'mistralai',\n MISTRAL = 'mistral',\n
|
|
1
|
+
{"version":3,"file":"enum.mjs","sources":["../../../src/common/enum.ts"],"sourcesContent":["/**\n * Enum representing the various event types emitted during the execution of runnables.\n * These events provide real-time information about the progress and state of different components.\n *\n * @enum {string}\n */\nexport enum GraphEvents {\n /* Custom Events */\n\n /** [Custom] Agent update event in multi-agent graph/workflow */\n ON_AGENT_UPDATE = 'on_agent_update',\n /** [Custom] Delta event for run steps (message creation and tool calls) */\n ON_RUN_STEP = 'on_run_step',\n /** [Custom] Delta event for run steps (tool calls) */\n ON_RUN_STEP_DELTA = 'on_run_step_delta',\n /** [Custom] Completed event for run steps (tool calls) */\n ON_RUN_STEP_COMPLETED = 'on_run_step_completed',\n /** [Custom] Delta events for messages */\n ON_MESSAGE_DELTA = 'on_message_delta',\n /** [Custom] Reasoning Delta events for messages */\n ON_REASONING_DELTA = 'on_reasoning_delta',\n\n /* Official Events */\n\n /** Custom event, emitted by system */\n ON_CUSTOM_EVENT = 'on_custom_event',\n /** Emitted when a chat model starts processing. */\n CHAT_MODEL_START = 'on_chat_model_start',\n\n /** Emitted when a chat model streams a chunk of its response. */\n CHAT_MODEL_STREAM = 'on_chat_model_stream',\n\n /** Emitted when a chat model completes its processing. */\n CHAT_MODEL_END = 'on_chat_model_end',\n\n /** Emitted when a language model starts processing. */\n LLM_START = 'on_llm_start',\n\n /** Emitted when a language model streams a chunk of its response. */\n LLM_STREAM = 'on_llm_stream',\n\n /** Emitted when a language model completes its processing. */\n LLM_END = 'on_llm_end',\n\n /** Emitted when a chain starts processing. */\n CHAIN_START = 'on_chain_start',\n\n /** Emitted when a chain streams a chunk of its output. */\n CHAIN_STREAM = 'on_chain_stream',\n\n /** Emitted when a chain completes its processing. */\n CHAIN_END = 'on_chain_end',\n\n /** Emitted when a tool starts its operation. */\n TOOL_START = 'on_tool_start',\n\n /** Emitted when a tool completes its operation. */\n TOOL_END = 'on_tool_end',\n\n /** Emitted when a retriever starts its operation. */\n RETRIEVER_START = 'on_retriever_start',\n\n /** Emitted when a retriever completes its operation. */\n RETRIEVER_END = 'on_retriever_end',\n\n /** Emitted when a prompt starts processing. */\n PROMPT_START = 'on_prompt_start',\n\n /** Emitted when a prompt completes its processing. */\n PROMPT_END = 'on_prompt_end',\n}\n\nexport enum Providers {\n OPENAI = 'openAI',\n VERTEXAI = 'vertexai',\n BEDROCK = 'bedrock',\n ANTHROPIC = 'anthropic',\n MISTRALAI = 'mistralai',\n MISTRAL = 'mistral',\n GOOGLE = 'google',\n AZURE = 'azureOpenAI',\n DEEPSEEK = 'deepseek',\n OPENROUTER = 'openrouter',\n XAI = 'xai',\n}\n\nexport enum GraphNodeKeys {\n TOOLS = 'tools=',\n AGENT = 'agent=',\n ROUTER = 'router',\n PRE_TOOLS = 'pre_tools',\n POST_TOOLS = 'post_tools',\n}\n\nexport enum GraphNodeActions {\n TOOL_NODE = 'tool_node',\n CALL_MODEL = 'call_model',\n ROUTE_MESSAGE = 'route_message',\n}\n\nexport enum CommonEvents {\n LANGGRAPH = 'LangGraph',\n}\n\nexport enum StepTypes {\n TOOL_CALLS = 'tool_calls',\n MESSAGE_CREATION = 'message_creation',\n}\n\nexport enum ContentTypes {\n TEXT = 'text',\n ERROR = 'error',\n THINK = 'think',\n TOOL_CALL = 'tool_call',\n IMAGE_URL = 'image_url',\n IMAGE_FILE = 'image_file',\n /** Anthropic */\n THINKING = 'thinking',\n /** Vertex AI / Google Common */\n REASONING = 'reasoning',\n /** Multi-Agent Switch */\n AGENT_UPDATE = 'agent_update',\n /** Bedrock */\n REASONING_CONTENT = 'reasoning_content',\n}\n\nexport enum ToolCallTypes {\n FUNCTION = 'function',\n RETRIEVAL = 'retrieval',\n FILE_SEARCH = 'file_search',\n CODE_INTERPRETER = 'code_interpreter',\n /* Agents Tool Call */\n TOOL_CALL = 'tool_call',\n}\n\nexport enum Callback {\n TOOL_ERROR = 'handleToolError',\n TOOL_START = 'handleToolStart',\n TOOL_END = 'handleToolEnd',\n CUSTOM_EVENT = 'handleCustomEvent',\n /*\n LLM_START = 'handleLLMStart',\n LLM_NEW_TOKEN = 'handleLLMNewToken',\n LLM_ERROR = 'handleLLMError',\n LLM_END = 'handleLLMEnd',\n CHAT_MODEL_START = 'handleChatModelStart',\n CHAIN_START = 'handleChainStart',\n CHAIN_ERROR = 'handleChainError',\n CHAIN_END = 'handleChainEnd',\n TEXT = 'handleText',\n AGENT_ACTION = 'handleAgentAction',\n AGENT_END = 'handleAgentEnd',\n RETRIEVER_START = 'handleRetrieverStart',\n RETRIEVER_END = 'handleRetrieverEnd',\n RETRIEVER_ERROR = 'handleRetrieverError',\n */\n}\n\nexport enum Constants {\n OFFICIAL_CODE_BASEURL = 'https://api.librechat.ai/v1',\n EXECUTE_CODE = 'execute_code',\n WEB_SEARCH = 'web_search',\n CONTENT_AND_ARTIFACT = 'content_and_artifact',\n LC_TRANSFER_TO_ = 'lc_transfer_to_',\n}\n\nexport enum TitleMethod {\n STRUCTURED = 'structured',\n FUNCTIONS = 'functions',\n COMPLETION = 'completion',\n}\n\nexport enum EnvVar {\n CODE_API_KEY = 'LIBRECHAT_CODE_API_KEY',\n CODE_BASEURL = 'LIBRECHAT_CODE_BASEURL',\n}\n"],"names":[],"mappings":"AAAA;;;;;AAKG;IACS;AAAZ,CAAA,UAAY,WAAW,EAAA;;;AAIrB,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,iBAAmC;;AAEnC,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;;AAE3B,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;;AAEvC,IAAA,WAAA,CAAA,uBAAA,CAAA,GAAA,uBAA+C;;AAE/C,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,WAAA,CAAA,oBAAA,CAAA,GAAA,oBAAyC;;;AAKzC,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,iBAAmC;;AAEnC,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,qBAAwC;;AAGxC,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,sBAA0C;;AAG1C,IAAA,WAAA,CAAA,gBAAA,CAAA,GAAA,mBAAoC;;AAGpC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,SAAA,CAAA,GAAA,YAAsB;;AAGtB,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,gBAA8B;;AAG9B,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,UAAA,CAAA,GAAA,aAAwB;;AAGxB,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,oBAAsC;;AAGtC,IAAA,WAAA,CAAA,eAAA,CAAA,GAAA,kBAAkC;;AAGlC,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;AAC9B,CAAC,EAhEW,WAAW,KAAX,WAAW,GAgEtB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,SAAA,CAAA,GAAA,SAAmB;AACnB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,SAAA,CAAA,GAAA,SAAmB;AACnB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,OAAA,CAAA,GAAA,aAAqB;AACrB,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,KAAA,CAAA,GAAA,KAAW;AACb,CAAC,EAZW,SAAS,KAAT,SAAS,GAYpB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,QAAgB;AAChB,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,QAAgB;AAChB,IAAA,aAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AAC3B,CAAC,EANW,aAAa,KAAb,aAAa,GAMxB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,gBAAgB,EAAA;AAC1B,IAAA,gBAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,gBAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,gBAAA,CAAA,eAAA,CAAA,GAAA,eAA+B;AACjC,CAAC,EAJW,gBAAgB,KAAhB,gBAAgB,GAI3B,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAFW,YAAY,KAAZ,YAAY,GAEvB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;AACvC,CAAC,EAHW,SAAS,KAAT,SAAS,GAGpB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,MAAA,CAAA,GAAA,MAAa;AACb,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;;AAEzB,IAAA,YAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;;AAErB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;;AAEvB,IAAA,YAAA,CAAA,cAAA,CAAA,GAAA,cAA6B;;AAE7B,IAAA,YAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;AACzC,CAAC,EAfW,YAAY,KAAZ,YAAY,GAevB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;AAC3B,IAAA,aAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAPW,aAAa,KAAb,aAAa,GAOxB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,QAAQ,EAAA;AAClB,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,UAAA,CAAA,GAAA,eAA0B;AAC1B,IAAA,QAAA,CAAA,cAAA,CAAA,GAAA,mBAAkC;AAClC;;;;;;;;;;;;;;;AAeE;AACJ,CAAC,EArBW,QAAQ,KAAR,QAAQ,GAqBnB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,uBAAA,CAAA,GAAA,6BAAqD;AACrD,IAAA,SAAA,CAAA,cAAA,CAAA,GAAA,cAA6B;AAC7B,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,sBAAA,CAAA,GAAA,sBAA6C;AAC7C,IAAA,SAAA,CAAA,iBAAA,CAAA,GAAA,iBAAmC;AACrC,CAAC,EANW,SAAS,KAAT,SAAS,GAMpB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,WAAW,EAAA;AACrB,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AAC3B,CAAC,EAJW,WAAW,KAAX,WAAW,GAItB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,MAAM,EAAA;AAChB,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACvC,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACzC,CAAC,EAHW,MAAM,KAAN,MAAM,GAGjB,EAAA,CAAA,CAAA;;;;"}
|
|
@@ -5,14 +5,12 @@ import { CustomChatBedrockConverse } from './bedrock/index.mjs';
|
|
|
5
5
|
import { CustomAnthropic } from './anthropic/index.mjs';
|
|
6
6
|
import { ChatOpenRouter } from './openrouter/index.mjs';
|
|
7
7
|
import { ChatVertexAI } from './vertexai/index.mjs';
|
|
8
|
-
import { ChatOllama } from './ollama/index.mjs';
|
|
9
8
|
import { Providers } from '../common/enum.mjs';
|
|
10
9
|
|
|
11
10
|
// src/llm/providers.ts
|
|
12
11
|
const llmProviders = {
|
|
13
12
|
[Providers.XAI]: ChatXAI,
|
|
14
13
|
[Providers.OPENAI]: ChatOpenAI,
|
|
15
|
-
[Providers.OLLAMA]: ChatOllama,
|
|
16
14
|
[Providers.AZURE]: AzureChatOpenAI,
|
|
17
15
|
[Providers.VERTEXAI]: ChatVertexAI,
|
|
18
16
|
[Providers.DEEPSEEK]: ChatDeepSeek,
|
|
@@ -27,7 +25,6 @@ const llmProviders = {
|
|
|
27
25
|
const manualToolStreamProviders = new Set([
|
|
28
26
|
Providers.ANTHROPIC,
|
|
29
27
|
Providers.BEDROCK,
|
|
30
|
-
Providers.OLLAMA,
|
|
31
28
|
]);
|
|
32
29
|
const getChatModelClass = (provider) => {
|
|
33
30
|
const ChatModelClass = llmProviders[provider];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport type {\n ChatModelConstructorMap,\n ProviderOptionsMap,\n ChatModelMap,\n} from '@/types';\nimport {\n AzureChatOpenAI,\n ChatDeepSeek,\n ChatOpenAI,\n ChatXAI,\n} from '@/llm/openai';\nimport { CustomChatGoogleGenerativeAI } from '@/llm/google';\nimport { CustomChatBedrockConverse } from '@/llm/bedrock';\nimport { CustomAnthropic } from '@/llm/anthropic';\nimport { ChatOpenRouter } from '@/llm/openrouter';\nimport { ChatVertexAI } from '@/llm/vertexai';\nimport {
|
|
1
|
+
{"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport type {\n ChatModelConstructorMap,\n ProviderOptionsMap,\n ChatModelMap,\n} from '@/types';\nimport {\n AzureChatOpenAI,\n ChatDeepSeek,\n ChatOpenAI,\n ChatXAI,\n} from '@/llm/openai';\nimport { CustomChatGoogleGenerativeAI } from '@/llm/google';\nimport { CustomChatBedrockConverse } from '@/llm/bedrock';\nimport { CustomAnthropic } from '@/llm/anthropic';\nimport { ChatOpenRouter } from '@/llm/openrouter';\nimport { ChatVertexAI } from '@/llm/vertexai';\nimport { Providers } from '@/common';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.XAI]: ChatXAI,\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.AZURE]: AzureChatOpenAI,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.DEEPSEEK]: ChatDeepSeek,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.MISTRAL]: ChatMistralAI,\n [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.OPENROUTER]: ChatOpenRouter,\n [Providers.BEDROCK]: CustomChatBedrockConverse,\n // [Providers.ANTHROPIC]: ChatAnthropic,\n [Providers.GOOGLE]: CustomChatGoogleGenerativeAI,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([\n Providers.ANTHROPIC,\n Providers.BEDROCK,\n]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};\n"],"names":[],"mappings":";;;;;;;;;AAAA;AAoBa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAAC,SAAS,CAAC,GAAG,GAAG,OAAO;AACxB,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,KAAK,GAAG,eAAe;AAClC,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;AACpC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,aAAa;AAClC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,eAAe;AACtC,IAAA,CAAC,SAAS,CAAC,UAAU,GAAG,cAAc;AACtC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,yBAAyB;;AAE9C,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,4BAA4B;;AAGrC,MAAA,yBAAyB,GAAG,IAAI,GAAG,CAAqB;AACnE,IAAA,SAAS,CAAC,SAAS;AACnB,IAAA,SAAS,CAAC,OAAO;AAClB,CAAA;AAEY,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC;IAC7C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC;;AAG1D,IAAA,OAAO,cAAc;AACvB;;;;"}
|
package/dist/esm/utils/llm.mjs
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm.mjs","sources":["../../../src/utils/llm.ts"],"sourcesContent":["// src/utils/llm.ts\nimport { Providers } from '@/common';\n\nexport function isOpenAILike(provider?: string | Providers): boolean {\n if (provider == null) {\n return false;\n }\n return (\n [\n Providers.OPENAI,\n Providers.AZURE,\n Providers.OPENROUTER,\n Providers.XAI,\n Providers.DEEPSEEK,\n
|
|
1
|
+
{"version":3,"file":"llm.mjs","sources":["../../../src/utils/llm.ts"],"sourcesContent":["// src/utils/llm.ts\nimport { Providers } from '@/common';\n\nexport function isOpenAILike(provider?: string | Providers): boolean {\n if (provider == null) {\n return false;\n }\n return (\n [\n Providers.OPENAI,\n Providers.AZURE,\n Providers.OPENROUTER,\n Providers.XAI,\n Providers.DEEPSEEK,\n ] as string[]\n ).includes(provider);\n}\n\nexport function isGoogleLike(provider?: string | Providers): boolean {\n if (provider == null) {\n return false;\n }\n return ([Providers.GOOGLE, Providers.VERTEXAI] as string[]).includes(\n provider\n );\n}\n"],"names":[],"mappings":";;AAAA;AAGM,SAAU,YAAY,CAAC,QAA6B,EAAA;AACxD,IAAA,IAAI,QAAQ,IAAI,IAAI,EAAE;AACpB,QAAA,OAAO,KAAK;;IAEd,OACE;AACE,QAAA,SAAS,CAAC,MAAM;AAChB,QAAA,SAAS,CAAC,KAAK;AACf,QAAA,SAAS,CAAC,UAAU;AACpB,QAAA,SAAS,CAAC,GAAG;AACb,QAAA,SAAS,CAAC,QAAQ;AAErB,KAAA,CAAC,QAAQ,CAAC,QAAQ,CAAC;AACtB;AAEM,SAAU,YAAY,CAAC,QAA6B,EAAA;AACxD,IAAA,IAAI,QAAQ,IAAI,IAAI,EAAE;AACpB,QAAA,OAAO,KAAK;;AAEd,IAAA,OAAQ,CAAC,SAAS,CAAC,MAAM,EAAE,SAAS,CAAC,QAAQ,CAAc,CAAC,QAAQ,CAClE,QAAQ,CACT;AACH;;;;"}
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { ChatOllama } from '@langchain/ollama';
|
|
2
1
|
import { ChatMistralAI } from '@langchain/mistralai';
|
|
3
2
|
import type { BindToolsInput, BaseChatModelParams } from '@langchain/core/language_models/chat_models';
|
|
4
3
|
import type { OpenAIChatInput, ChatOpenAIFields, AzureOpenAIInput, ClientOptions as OAIClientOptions } from '@langchain/openai';
|
|
@@ -13,7 +12,6 @@ import type { RequestOptions } from '@google/generative-ai';
|
|
|
13
12
|
import type { StructuredTool } from '@langchain/core/tools';
|
|
14
13
|
import type { AnthropicInput } from '@langchain/anthropic';
|
|
15
14
|
import type { Runnable } from '@langchain/core/runnables';
|
|
16
|
-
import type { ChatOllamaInput } from '@langchain/ollama';
|
|
17
15
|
import type { OpenAI as OpenAIClient } from 'openai';
|
|
18
16
|
import type { ChatXAIInput } from '@langchain/xai';
|
|
19
17
|
import { AzureChatOpenAI, ChatDeepSeek, ChatOpenAI, ChatXAI } from '@/llm/openai';
|
|
@@ -39,7 +37,6 @@ export type AnthropicReasoning = {
|
|
|
39
37
|
thinkingBudget?: number;
|
|
40
38
|
};
|
|
41
39
|
export type OpenAIClientOptions = ChatOpenAIFields;
|
|
42
|
-
export type OllamaClientOptions = ChatOllamaInput;
|
|
43
40
|
export type AnthropicClientOptions = AnthropicInput;
|
|
44
41
|
export type MistralAIClientOptions = ChatMistralAIInput;
|
|
45
42
|
export type VertexAIClientOptions = ChatVertexAIInput & {
|
|
@@ -57,7 +54,7 @@ export type GoogleClientOptions = GoogleGenerativeAIChatInput & {
|
|
|
57
54
|
};
|
|
58
55
|
export type DeepSeekClientOptions = ChatDeepSeekCallOptions;
|
|
59
56
|
export type XAIClientOptions = ChatXAIInput;
|
|
60
|
-
export type ClientOptions = OpenAIClientOptions | AzureClientOptions |
|
|
57
|
+
export type ClientOptions = OpenAIClientOptions | AzureClientOptions | AnthropicClientOptions | MistralAIClientOptions | VertexAIClientOptions | BedrockConverseClientOptions | GoogleClientOptions | DeepSeekClientOptions | XAIClientOptions;
|
|
61
58
|
export type SharedLLMConfig = {
|
|
62
59
|
provider: Providers;
|
|
63
60
|
_lc_stream_delay?: number;
|
|
@@ -72,7 +69,6 @@ export type LLMConfig = SharedLLMConfig & ClientOptions & {
|
|
|
72
69
|
export type ProviderOptionsMap = {
|
|
73
70
|
[Providers.AZURE]: AzureClientOptions;
|
|
74
71
|
[Providers.OPENAI]: OpenAIClientOptions;
|
|
75
|
-
[Providers.OLLAMA]: OllamaClientOptions;
|
|
76
72
|
[Providers.GOOGLE]: GoogleClientOptions;
|
|
77
73
|
[Providers.VERTEXAI]: VertexAIClientOptions;
|
|
78
74
|
[Providers.DEEPSEEK]: DeepSeekClientOptions;
|
|
@@ -86,7 +82,6 @@ export type ProviderOptionsMap = {
|
|
|
86
82
|
export type ChatModelMap = {
|
|
87
83
|
[Providers.XAI]: ChatXAI;
|
|
88
84
|
[Providers.OPENAI]: ChatOpenAI;
|
|
89
|
-
[Providers.OLLAMA]: ChatOllama;
|
|
90
85
|
[Providers.AZURE]: AzureChatOpenAI;
|
|
91
86
|
[Providers.DEEPSEEK]: ChatDeepSeek;
|
|
92
87
|
[Providers.VERTEXAI]: ChatVertexAI;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@librechat/agents",
|
|
3
|
-
"version": "3.0.
|
|
3
|
+
"version": "3.0.34",
|
|
4
4
|
"main": "./dist/cjs/main.cjs",
|
|
5
5
|
"module": "./dist/esm/main.mjs",
|
|
6
6
|
"types": "./dist/types/index.d.ts",
|
|
@@ -106,7 +106,6 @@
|
|
|
106
106
|
"@langchain/google-vertexai": "^0.2.18",
|
|
107
107
|
"@langchain/langgraph": "^0.4.9",
|
|
108
108
|
"@langchain/mistralai": "^0.2.1",
|
|
109
|
-
"@langchain/ollama": "^0.2.3",
|
|
110
109
|
"@langchain/openai": "0.5.18",
|
|
111
110
|
"@langchain/textsplitters": "^0.1.0",
|
|
112
111
|
"@langchain/xai": "^0.0.3",
|
package/src/common/enum.ts
CHANGED
package/src/llm/providers.ts
CHANGED
|
@@ -16,13 +16,11 @@ import { CustomChatBedrockConverse } from '@/llm/bedrock';
|
|
|
16
16
|
import { CustomAnthropic } from '@/llm/anthropic';
|
|
17
17
|
import { ChatOpenRouter } from '@/llm/openrouter';
|
|
18
18
|
import { ChatVertexAI } from '@/llm/vertexai';
|
|
19
|
-
import { ChatOllama } from '@/llm/ollama';
|
|
20
19
|
import { Providers } from '@/common';
|
|
21
20
|
|
|
22
21
|
export const llmProviders: Partial<ChatModelConstructorMap> = {
|
|
23
22
|
[Providers.XAI]: ChatXAI,
|
|
24
23
|
[Providers.OPENAI]: ChatOpenAI,
|
|
25
|
-
[Providers.OLLAMA]: ChatOllama,
|
|
26
24
|
[Providers.AZURE]: AzureChatOpenAI,
|
|
27
25
|
[Providers.VERTEXAI]: ChatVertexAI,
|
|
28
26
|
[Providers.DEEPSEEK]: ChatDeepSeek,
|
|
@@ -38,7 +36,6 @@ export const llmProviders: Partial<ChatModelConstructorMap> = {
|
|
|
38
36
|
export const manualToolStreamProviders = new Set<Providers | string>([
|
|
39
37
|
Providers.ANTHROPIC,
|
|
40
38
|
Providers.BEDROCK,
|
|
41
|
-
Providers.OLLAMA,
|
|
42
39
|
]);
|
|
43
40
|
|
|
44
41
|
export const getChatModelClass = <P extends Providers>(
|
package/src/types/llm.ts
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
// src/types/llm.ts
|
|
2
|
-
import { ChatOllama } from '@langchain/ollama';
|
|
3
2
|
import { ChatMistralAI } from '@langchain/mistralai';
|
|
4
3
|
import type {
|
|
5
4
|
BindToolsInput,
|
|
@@ -22,7 +21,6 @@ import type { RequestOptions } from '@google/generative-ai';
|
|
|
22
21
|
import type { StructuredTool } from '@langchain/core/tools';
|
|
23
22
|
import type { AnthropicInput } from '@langchain/anthropic';
|
|
24
23
|
import type { Runnable } from '@langchain/core/runnables';
|
|
25
|
-
import type { ChatOllamaInput } from '@langchain/ollama';
|
|
26
24
|
import type { OpenAI as OpenAIClient } from 'openai';
|
|
27
25
|
import type { ChatXAIInput } from '@langchain/xai';
|
|
28
26
|
import {
|
|
@@ -57,7 +55,6 @@ export type AnthropicReasoning = {
|
|
|
57
55
|
thinkingBudget?: number;
|
|
58
56
|
};
|
|
59
57
|
export type OpenAIClientOptions = ChatOpenAIFields;
|
|
60
|
-
export type OllamaClientOptions = ChatOllamaInput;
|
|
61
58
|
export type AnthropicClientOptions = AnthropicInput;
|
|
62
59
|
export type MistralAIClientOptions = ChatMistralAIInput;
|
|
63
60
|
export type VertexAIClientOptions = ChatVertexAIInput & {
|
|
@@ -80,7 +77,6 @@ export type XAIClientOptions = ChatXAIInput;
|
|
|
80
77
|
export type ClientOptions =
|
|
81
78
|
| OpenAIClientOptions
|
|
82
79
|
| AzureClientOptions
|
|
83
|
-
| OllamaClientOptions
|
|
84
80
|
| AnthropicClientOptions
|
|
85
81
|
| MistralAIClientOptions
|
|
86
82
|
| VertexAIClientOptions
|
|
@@ -103,7 +99,6 @@ export type LLMConfig = SharedLLMConfig &
|
|
|
103
99
|
export type ProviderOptionsMap = {
|
|
104
100
|
[Providers.AZURE]: AzureClientOptions;
|
|
105
101
|
[Providers.OPENAI]: OpenAIClientOptions;
|
|
106
|
-
[Providers.OLLAMA]: OllamaClientOptions;
|
|
107
102
|
[Providers.GOOGLE]: GoogleClientOptions;
|
|
108
103
|
[Providers.VERTEXAI]: VertexAIClientOptions;
|
|
109
104
|
[Providers.DEEPSEEK]: DeepSeekClientOptions;
|
|
@@ -118,7 +113,6 @@ export type ProviderOptionsMap = {
|
|
|
118
113
|
export type ChatModelMap = {
|
|
119
114
|
[Providers.XAI]: ChatXAI;
|
|
120
115
|
[Providers.OPENAI]: ChatOpenAI;
|
|
121
|
-
[Providers.OLLAMA]: ChatOllama;
|
|
122
116
|
[Providers.AZURE]: AzureChatOpenAI;
|
|
123
117
|
[Providers.DEEPSEEK]: ChatDeepSeek;
|
|
124
118
|
[Providers.VERTEXAI]: ChatVertexAI;
|
package/src/utils/llm.ts
CHANGED
package/src/utils/llmConfig.ts
CHANGED
|
@@ -78,12 +78,14 @@ export const llmConfigs: Record<string, t.LLMConfig | undefined> = {
|
|
|
78
78
|
azureOpenAIApiVersion: process.env.AZURE_OPENAI_API_VERSION,
|
|
79
79
|
model: process.env.AZURE_MODEL_NAME ?? 'gpt-4o',
|
|
80
80
|
},
|
|
81
|
-
|
|
82
|
-
provider: Providers.
|
|
81
|
+
ollama: {
|
|
82
|
+
provider: Providers.OPENAI,
|
|
83
83
|
model: 'gpt-oss:20b',
|
|
84
84
|
streaming: true,
|
|
85
85
|
streamUsage: true,
|
|
86
|
-
|
|
86
|
+
configuration: {
|
|
87
|
+
baseURL: 'http://localhost:11434/v1',
|
|
88
|
+
},
|
|
87
89
|
},
|
|
88
90
|
lmstudio: {
|
|
89
91
|
provider: Providers.OPENAI,
|
|
@@ -1,70 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
var messages = require('@langchain/core/messages');
|
|
4
|
-
var outputs = require('@langchain/core/outputs');
|
|
5
|
-
var ollama = require('@langchain/ollama');
|
|
6
|
-
var utils = require('./utils.cjs');
|
|
7
|
-
|
|
8
|
-
class ChatOllama extends ollama.ChatOllama {
|
|
9
|
-
static lc_name() {
|
|
10
|
-
return 'LibreChatOllama';
|
|
11
|
-
}
|
|
12
|
-
async *_streamResponseChunks(messages$1, options, runManager) {
|
|
13
|
-
if (this.checkOrPullModel) {
|
|
14
|
-
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
15
|
-
// @ts-ignore
|
|
16
|
-
if (!(await this.checkModelExistsOnMachine(this.model))) {
|
|
17
|
-
await this.pull(this.model, {
|
|
18
|
-
logProgress: true,
|
|
19
|
-
});
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
const params = this.invocationParams(options);
|
|
23
|
-
// TODO: remove cast after SDK adds support for tool calls
|
|
24
|
-
const ollamaMessages = utils.convertToOllamaMessages(messages$1);
|
|
25
|
-
const usageMetadata = {
|
|
26
|
-
input_tokens: 0,
|
|
27
|
-
output_tokens: 0,
|
|
28
|
-
total_tokens: 0,
|
|
29
|
-
};
|
|
30
|
-
const stream = await this.client.chat({
|
|
31
|
-
...params,
|
|
32
|
-
messages: ollamaMessages,
|
|
33
|
-
stream: true,
|
|
34
|
-
});
|
|
35
|
-
let lastMetadata;
|
|
36
|
-
for await (const chunk of stream) {
|
|
37
|
-
if (options.signal?.aborted === true) {
|
|
38
|
-
this.client.abort();
|
|
39
|
-
}
|
|
40
|
-
const { message: responseMessage, ...rest } = chunk;
|
|
41
|
-
usageMetadata.input_tokens += rest.prompt_eval_count ?? 0;
|
|
42
|
-
usageMetadata.output_tokens += rest.eval_count ?? 0;
|
|
43
|
-
usageMetadata.total_tokens =
|
|
44
|
-
usageMetadata.input_tokens + usageMetadata.output_tokens;
|
|
45
|
-
lastMetadata = rest;
|
|
46
|
-
if (!responseMessage) {
|
|
47
|
-
continue;
|
|
48
|
-
}
|
|
49
|
-
const message = utils.convertOllamaMessagesToLangChain(responseMessage);
|
|
50
|
-
const generationChunk = new outputs.ChatGenerationChunk({
|
|
51
|
-
text: responseMessage.content || '',
|
|
52
|
-
message,
|
|
53
|
-
});
|
|
54
|
-
yield generationChunk;
|
|
55
|
-
await runManager?.handleLLMNewToken(responseMessage.content || '', undefined, undefined, undefined, undefined, { chunk: generationChunk });
|
|
56
|
-
}
|
|
57
|
-
// Yield the `response_metadata` as the final chunk.
|
|
58
|
-
yield new outputs.ChatGenerationChunk({
|
|
59
|
-
text: '',
|
|
60
|
-
message: new messages.AIMessageChunk({
|
|
61
|
-
content: '',
|
|
62
|
-
response_metadata: lastMetadata,
|
|
63
|
-
usage_metadata: usageMetadata,
|
|
64
|
-
}),
|
|
65
|
-
});
|
|
66
|
-
}
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
exports.ChatOllama = ChatOllama;
|
|
70
|
-
//# sourceMappingURL=index.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs","sources":["../../../../src/llm/ollama/index.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatOllama as BaseChatOllama } from '@langchain/ollama';\nimport { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type {\n ChatResponse as OllamaChatResponse,\n Message as OllamaMessage,\n} from 'ollama';\nimport type { UsageMetadata, BaseMessage } from '@langchain/core/messages';\nimport {\n convertOllamaMessagesToLangChain,\n convertToOllamaMessages,\n} from './utils';\n\nexport class ChatOllama extends BaseChatOllama {\n static lc_name(): 'LibreChatOllama' {\n return 'LibreChatOllama';\n }\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.checkOrPullModel) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n if (!((await this.checkModelExistsOnMachine(this.model)) as boolean)) {\n await this.pull(this.model, {\n logProgress: true,\n });\n }\n }\n\n const params = this.invocationParams(options);\n // TODO: remove cast after SDK adds support for tool calls\n const ollamaMessages = convertToOllamaMessages(messages) as OllamaMessage[];\n\n const usageMetadata: UsageMetadata = {\n input_tokens: 0,\n output_tokens: 0,\n total_tokens: 0,\n };\n\n const stream = await this.client.chat({\n ...params,\n messages: ollamaMessages,\n stream: true,\n });\n\n let lastMetadata: Omit<OllamaChatResponse, 'message'> | undefined;\n\n for await (const chunk of stream) {\n if (options.signal?.aborted === true) {\n this.client.abort();\n }\n const { message: responseMessage, ...rest } =\n chunk as Partial<OllamaChatResponse>;\n usageMetadata.input_tokens += rest.prompt_eval_count ?? 0;\n usageMetadata.output_tokens += rest.eval_count ?? 0;\n usageMetadata.total_tokens =\n usageMetadata.input_tokens + usageMetadata.output_tokens;\n lastMetadata = rest as Omit<OllamaChatResponse, 'message'>;\n if (!responseMessage) {\n continue;\n }\n const message = convertOllamaMessagesToLangChain(responseMessage);\n const generationChunk = new ChatGenerationChunk({\n text: responseMessage.content || '',\n message,\n });\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n responseMessage.content || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n }\n\n // Yield the `response_metadata` as the final chunk.\n yield new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n response_metadata: lastMetadata,\n usage_metadata: usageMetadata,\n }),\n });\n }\n}\n"],"names":["BaseChatOllama","messages","convertToOllamaMessages","convertOllamaMessagesToLangChain","ChatGenerationChunk","AIMessageChunk"],"mappings":";;;;;;;AAcM,MAAO,UAAW,SAAQA,iBAAc,CAAA;AAC5C,IAAA,OAAO,OAAO,GAAA;AACZ,QAAA,OAAO,iBAAiB;;IAE1B,OAAO,qBAAqB,CAC1BC,UAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,IAAI,IAAI,CAAC,gBAAgB,EAAE;;;AAGzB,YAAA,IAAI,EAAG,MAAM,IAAI,CAAC,yBAAyB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAa,EAAE;AACpE,gBAAA,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AAC1B,oBAAA,WAAW,EAAE,IAAI;AAClB,iBAAA,CAAC;;;QAIN,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;;AAE7C,QAAA,MAAM,cAAc,GAAGC,6BAAuB,CAACD,UAAQ,CAAoB;AAE3E,QAAA,MAAM,aAAa,GAAkB;AACnC,YAAA,YAAY,EAAE,CAAC;AACf,YAAA,aAAa,EAAE,CAAC;AAChB,YAAA,YAAY,EAAE,CAAC;SAChB;QAED,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC;AACpC,YAAA,GAAG,MAAM;AACT,YAAA,QAAQ,EAAE,cAAc;AACxB,YAAA,MAAM,EAAE,IAAI;AACb,SAAA,CAAC;AAEF,QAAA,IAAI,YAA6D;AAEjE,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,gBAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE;;YAErB,MAAM,EAAE,OAAO,EAAE,eAAe,EAAE,GAAG,IAAI,EAAE,GACzC,KAAoC;YACtC,aAAa,CAAC,YAAY,IAAI,IAAI,CAAC,iBAAiB,IAAI,CAAC;YACzD,aAAa,CAAC,aAAa,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC;AACnD,YAAA,aAAa,CAAC,YAAY;AACxB,gBAAA,aAAa,CAAC,YAAY,GAAG,aAAa,CAAC,aAAa;YAC1D,YAAY,GAAG,IAA2C;YAC1D,IAAI,CAAC,eAAe,EAAE;gBACpB;;AAEF,YAAA,MAAM,OAAO,GAAGE,sCAAgC,CAAC,eAAe,CAAC;AACjE,YAAA,MAAM,eAAe,GAAG,IAAIC,2BAAmB,CAAC;AAC9C,gBAAA,IAAI,EAAE,eAAe,CAAC,OAAO,IAAI,EAAE;gBACnC,OAAO;AACR,aAAA,CAAC;AACF,YAAA,MAAM,eAAe;YACrB,MAAM,UAAU,EAAE,iBAAiB,CACjC,eAAe,CAAC,OAAO,IAAI,EAAE,EAC7B,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B;;;QAIH,MAAM,IAAIA,2BAAmB,CAAC;AAC5B,YAAA,IAAI,EAAE,EAAE;YACR,OAAO,EAAE,IAAIC,uBAAc,CAAC;AAC1B,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,iBAAiB,EAAE,YAAY;AAC/B,gBAAA,cAAc,EAAE,aAAa;aAC9B,CAAC;AACH,SAAA,CAAC;;AAEL;;;;"}
|
|
@@ -1,158 +0,0 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
var messages = require('@langchain/core/messages');
|
|
4
|
-
var uuid = require('uuid');
|
|
5
|
-
|
|
6
|
-
function convertOllamaMessagesToLangChain(messages$1, extra) {
|
|
7
|
-
const additional_kwargs = {};
|
|
8
|
-
if ('thinking' in messages$1) {
|
|
9
|
-
additional_kwargs.reasoning_content = messages$1.thinking;
|
|
10
|
-
}
|
|
11
|
-
return new messages.AIMessageChunk({
|
|
12
|
-
content: messages$1.content || '',
|
|
13
|
-
tool_call_chunks: messages$1.tool_calls?.map((tc) => ({
|
|
14
|
-
name: tc.function.name,
|
|
15
|
-
args: JSON.stringify(tc.function.arguments),
|
|
16
|
-
type: 'tool_call_chunk',
|
|
17
|
-
index: 0,
|
|
18
|
-
id: uuid.v4(),
|
|
19
|
-
})),
|
|
20
|
-
response_metadata: extra?.responseMetadata,
|
|
21
|
-
usage_metadata: extra?.usageMetadata,
|
|
22
|
-
additional_kwargs,
|
|
23
|
-
});
|
|
24
|
-
}
|
|
25
|
-
function extractBase64FromDataUrl(dataUrl) {
|
|
26
|
-
const match = dataUrl.match(/^data:.*?;base64,(.*)$/);
|
|
27
|
-
return match ? match[1] : '';
|
|
28
|
-
}
|
|
29
|
-
function convertAMessagesToOllama(messages) {
|
|
30
|
-
if (typeof messages.content === 'string') {
|
|
31
|
-
return [
|
|
32
|
-
{
|
|
33
|
-
role: 'assistant',
|
|
34
|
-
content: messages.content,
|
|
35
|
-
},
|
|
36
|
-
];
|
|
37
|
-
}
|
|
38
|
-
const textFields = messages.content.filter((c) => c.type === 'text' && typeof c.text === 'string');
|
|
39
|
-
const textMessages = textFields.map((c) => ({
|
|
40
|
-
role: 'assistant',
|
|
41
|
-
content: c.text,
|
|
42
|
-
}));
|
|
43
|
-
let toolCallMsgs;
|
|
44
|
-
if (messages.content.find((c) => c.type === 'tool_use') &&
|
|
45
|
-
messages.tool_calls?.length) {
|
|
46
|
-
// `tool_use` content types are accepted if the message has tool calls
|
|
47
|
-
const toolCalls = messages.tool_calls.map((tc) => ({
|
|
48
|
-
id: tc.id,
|
|
49
|
-
type: 'function',
|
|
50
|
-
function: {
|
|
51
|
-
name: tc.name,
|
|
52
|
-
arguments: tc.args,
|
|
53
|
-
},
|
|
54
|
-
}));
|
|
55
|
-
if (toolCalls) {
|
|
56
|
-
toolCallMsgs = {
|
|
57
|
-
role: 'assistant',
|
|
58
|
-
tool_calls: toolCalls,
|
|
59
|
-
content: '',
|
|
60
|
-
};
|
|
61
|
-
}
|
|
62
|
-
}
|
|
63
|
-
else if (messages.content.find((c) => c.type === 'tool_use') &&
|
|
64
|
-
!messages.tool_calls?.length) {
|
|
65
|
-
throw new Error('\'tool_use\' content type is not supported without tool calls.');
|
|
66
|
-
}
|
|
67
|
-
return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];
|
|
68
|
-
}
|
|
69
|
-
function convertHumanGenericMessagesToOllama(message) {
|
|
70
|
-
if (typeof message.content === 'string') {
|
|
71
|
-
return [
|
|
72
|
-
{
|
|
73
|
-
role: 'user',
|
|
74
|
-
content: message.content,
|
|
75
|
-
},
|
|
76
|
-
];
|
|
77
|
-
}
|
|
78
|
-
return message.content.map((c) => {
|
|
79
|
-
if (c.type === 'text') {
|
|
80
|
-
return {
|
|
81
|
-
role: 'user',
|
|
82
|
-
content: c.text,
|
|
83
|
-
};
|
|
84
|
-
}
|
|
85
|
-
else if (c.type === 'image_url') {
|
|
86
|
-
if (typeof c.image_url === 'string') {
|
|
87
|
-
return {
|
|
88
|
-
role: 'user',
|
|
89
|
-
content: '',
|
|
90
|
-
images: [extractBase64FromDataUrl(c.image_url)],
|
|
91
|
-
};
|
|
92
|
-
}
|
|
93
|
-
else if (c.image_url.url && typeof c.image_url.url === 'string') {
|
|
94
|
-
return {
|
|
95
|
-
role: 'user',
|
|
96
|
-
content: '',
|
|
97
|
-
images: [extractBase64FromDataUrl(c.image_url.url)],
|
|
98
|
-
};
|
|
99
|
-
}
|
|
100
|
-
}
|
|
101
|
-
throw new Error(`Unsupported content type: ${c.type}`);
|
|
102
|
-
});
|
|
103
|
-
}
|
|
104
|
-
function convertSystemMessageToOllama(message) {
|
|
105
|
-
if (typeof message.content === 'string') {
|
|
106
|
-
return [
|
|
107
|
-
{
|
|
108
|
-
role: 'system',
|
|
109
|
-
content: message.content,
|
|
110
|
-
},
|
|
111
|
-
];
|
|
112
|
-
}
|
|
113
|
-
else if (message.content.every((c) => c.type === 'text' && typeof c.text === 'string')) {
|
|
114
|
-
return message.content.map((c) => ({
|
|
115
|
-
role: 'system',
|
|
116
|
-
content: c.text,
|
|
117
|
-
}));
|
|
118
|
-
}
|
|
119
|
-
else {
|
|
120
|
-
throw new Error(`Unsupported content type(s): ${message.content
|
|
121
|
-
.map((c) => c.type)
|
|
122
|
-
.join(', ')}`);
|
|
123
|
-
}
|
|
124
|
-
}
|
|
125
|
-
function convertToolMessageToOllama(message) {
|
|
126
|
-
if (typeof message.content !== 'string') {
|
|
127
|
-
throw new Error('Non string tool message content is not supported');
|
|
128
|
-
}
|
|
129
|
-
return [
|
|
130
|
-
{
|
|
131
|
-
role: 'tool',
|
|
132
|
-
content: message.content,
|
|
133
|
-
},
|
|
134
|
-
];
|
|
135
|
-
}
|
|
136
|
-
function convertToOllamaMessages(messages) {
|
|
137
|
-
return messages.flatMap((msg) => {
|
|
138
|
-
if (['human', 'generic'].includes(msg._getType())) {
|
|
139
|
-
return convertHumanGenericMessagesToOllama(msg);
|
|
140
|
-
}
|
|
141
|
-
else if (msg._getType() === 'ai') {
|
|
142
|
-
return convertAMessagesToOllama(msg);
|
|
143
|
-
}
|
|
144
|
-
else if (msg._getType() === 'system') {
|
|
145
|
-
return convertSystemMessageToOllama(msg);
|
|
146
|
-
}
|
|
147
|
-
else if (msg._getType() === 'tool') {
|
|
148
|
-
return convertToolMessageToOllama(msg);
|
|
149
|
-
}
|
|
150
|
-
else {
|
|
151
|
-
throw new Error(`Unsupported message type: ${msg._getType()}`);
|
|
152
|
-
}
|
|
153
|
-
});
|
|
154
|
-
}
|
|
155
|
-
|
|
156
|
-
exports.convertOllamaMessagesToLangChain = convertOllamaMessagesToLangChain;
|
|
157
|
-
exports.convertToOllamaMessages = convertToOllamaMessages;
|
|
158
|
-
//# sourceMappingURL=utils.cjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"utils.cjs","sources":["../../../../src/llm/ollama/utils.ts"],"sourcesContent":["import {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n HumanMessage,\n MessageContentText,\n SystemMessage,\n ToolMessage,\n UsageMetadata,\n} from '@langchain/core/messages';\nimport type {\n Message as OllamaMessage,\n ToolCall as OllamaToolCall,\n} from 'ollama';\nimport { v4 as uuidv4 } from 'uuid';\n\nexport function convertOllamaMessagesToLangChain(\n messages: OllamaMessage,\n extra?: {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n responseMetadata?: Record<string, any>;\n usageMetadata?: UsageMetadata;\n }\n): AIMessageChunk {\n const additional_kwargs: BaseMessage['additional_kwargs'] = {};\n if ('thinking' in messages) {\n additional_kwargs.reasoning_content = messages.thinking as string;\n }\n return new AIMessageChunk({\n content: messages.content || '',\n tool_call_chunks: messages.tool_calls?.map((tc) => ({\n name: tc.function.name,\n args: JSON.stringify(tc.function.arguments),\n type: 'tool_call_chunk',\n index: 0,\n id: uuidv4(),\n })),\n response_metadata: extra?.responseMetadata,\n usage_metadata: extra?.usageMetadata,\n additional_kwargs,\n });\n}\n\nfunction extractBase64FromDataUrl(dataUrl: string): string {\n const match = dataUrl.match(/^data:.*?;base64,(.*)$/);\n return match ? match[1] : '';\n}\n\nfunction convertAMessagesToOllama(messages: AIMessage): OllamaMessage[] {\n if (typeof messages.content === 'string') {\n return [\n {\n role: 'assistant',\n content: messages.content,\n },\n ];\n }\n\n const textFields = messages.content.filter(\n (c) => c.type === 'text' && typeof c.text === 'string'\n );\n const textMessages = (textFields as MessageContentText[]).map((c) => ({\n role: 'assistant',\n content: c.text,\n }));\n let toolCallMsgs: OllamaMessage | undefined;\n\n if (\n messages.content.find((c) => c.type === 'tool_use') &&\n messages.tool_calls?.length\n ) {\n // `tool_use` content types are accepted if the message has tool calls\n const toolCalls: OllamaToolCall[] | undefined = messages.tool_calls.map(\n (tc) => ({\n id: tc.id,\n type: 'function',\n function: {\n name: tc.name,\n arguments: tc.args,\n },\n })\n );\n\n if (toolCalls) {\n toolCallMsgs = {\n role: 'assistant',\n tool_calls: toolCalls,\n content: '',\n };\n }\n } else if (\n messages.content.find((c) => c.type === 'tool_use') &&\n !messages.tool_calls?.length\n ) {\n throw new Error(\n '\\'tool_use\\' content type is not supported without tool calls.'\n );\n }\n\n return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];\n}\n\nfunction convertHumanGenericMessagesToOllama(\n message: HumanMessage\n): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'user',\n content: message.content,\n },\n ];\n }\n return message.content.map((c) => {\n if (c.type === 'text') {\n return {\n role: 'user',\n content: c.text,\n };\n } else if (c.type === 'image_url') {\n if (typeof c.image_url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url)],\n };\n } else if (c.image_url.url && typeof c.image_url.url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url.url)],\n };\n }\n }\n throw new Error(`Unsupported content type: ${c.type}`);\n });\n}\n\nfunction convertSystemMessageToOllama(message: SystemMessage): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'system',\n content: message.content,\n },\n ];\n } else if (\n message.content.every(\n (c) => c.type === 'text' && typeof c.text === 'string'\n )\n ) {\n return (message.content as MessageContentText[]).map((c) => ({\n role: 'system',\n content: c.text,\n }));\n } else {\n throw new Error(\n `Unsupported content type(s): ${message.content\n .map((c) => c.type)\n .join(', ')}`\n );\n }\n}\n\nfunction convertToolMessageToOllama(message: ToolMessage): OllamaMessage[] {\n if (typeof message.content !== 'string') {\n throw new Error('Non string tool message content is not supported');\n }\n return [\n {\n role: 'tool',\n content: message.content,\n },\n ];\n}\n\nexport function convertToOllamaMessages(\n messages: BaseMessage[]\n): OllamaMessage[] {\n return messages.flatMap((msg) => {\n if (['human', 'generic'].includes(msg._getType())) {\n return convertHumanGenericMessagesToOllama(msg);\n } else if (msg._getType() === 'ai') {\n return convertAMessagesToOllama(msg);\n } else if (msg._getType() === 'system') {\n return convertSystemMessageToOllama(msg);\n } else if (msg._getType() === 'tool') {\n return convertToolMessageToOllama(msg as ToolMessage);\n } else {\n throw new Error(`Unsupported message type: ${msg._getType()}`);\n }\n });\n}\n"],"names":["messages","AIMessageChunk","uuidv4"],"mappings":";;;;;AAgBgB,SAAA,gCAAgC,CAC9CA,UAAuB,EACvB,KAIC,EAAA;IAED,MAAM,iBAAiB,GAAqC,EAAE;AAC9D,IAAA,IAAI,UAAU,IAAIA,UAAQ,EAAE;AAC1B,QAAA,iBAAiB,CAAC,iBAAiB,GAAGA,UAAQ,CAAC,QAAkB;;IAEnE,OAAO,IAAIC,uBAAc,CAAC;AACxB,QAAA,OAAO,EAAED,UAAQ,CAAC,OAAO,IAAI,EAAE;AAC/B,QAAA,gBAAgB,EAAEA,UAAQ,CAAC,UAAU,EAAE,GAAG,CAAC,CAAC,EAAE,MAAM;AAClD,YAAA,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI;YACtB,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC;AAC3C,YAAA,IAAI,EAAE,iBAAiB;AACvB,YAAA,KAAK,EAAE,CAAC;YACR,EAAE,EAAEE,OAAM,EAAE;AACb,SAAA,CAAC,CAAC;QACH,iBAAiB,EAAE,KAAK,EAAE,gBAAgB;QAC1C,cAAc,EAAE,KAAK,EAAE,aAAa;QACpC,iBAAiB;AAClB,KAAA,CAAC;AACJ;AAEA,SAAS,wBAAwB,CAAC,OAAe,EAAA;IAC/C,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,wBAAwB,CAAC;AACrD,IAAA,OAAO,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;AAC9B;AAEA,SAAS,wBAAwB,CAAC,QAAmB,EAAA;AACnD,IAAA,IAAI,OAAO,QAAQ,CAAC,OAAO,KAAK,QAAQ,EAAE;QACxC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,QAAQ,CAAC,OAAO;AAC1B,aAAA;SACF;;IAGH,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,MAAM,CACxC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD;IACD,MAAM,YAAY,GAAI,UAAmC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AACpE,QAAA,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,KAAA,CAAC,CAAC;AACH,IAAA,IAAI,YAAuC;AAE3C,IAAA,IACE,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC3B;;AAEA,QAAA,MAAM,SAAS,GAAiC,QAAQ,CAAC,UAAU,CAAC,GAAG,CACrE,CAAC,EAAE,MAAM;YACP,EAAE,EAAE,EAAE,CAAC,EAAE;AACT,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,QAAQ,EAAE;gBACR,IAAI,EAAE,EAAE,CAAC,IAAI;gBACb,SAAS,EAAE,EAAE,CAAC,IAAI;AACnB,aAAA;AACF,SAAA,CAAC,CACH;QAED,IAAI,SAAS,EAAE;AACb,YAAA,YAAY,GAAG;AACb,gBAAA,IAAI,EAAE,WAAW;AACjB,gBAAA,UAAU,EAAE,SAAS;AACrB,gBAAA,OAAO,EAAE,EAAE;aACZ;;;AAEE,SAAA,IACL,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,CAAC,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC5B;AACA,QAAA,MAAM,IAAI,KAAK,CACb,gEAAgE,CACjE;;AAGH,IAAA,OAAO,CAAC,GAAG,YAAY,EAAE,IAAI,YAAY,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC;AACnE;AAEA,SAAS,mCAAmC,CAC1C,OAAqB,EAAA;AAErB,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;IAEH,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAI;AAC/B,QAAA,IAAI,CAAC,CAAC,IAAI,KAAK,MAAM,EAAE;YACrB,OAAO;AACL,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,CAAC,CAAC,IAAI;aAChB;;AACI,aAAA,IAAI,CAAC,CAAC,IAAI,KAAK,WAAW,EAAE;AACjC,YAAA,IAAI,OAAO,CAAC,CAAC,SAAS,KAAK,QAAQ,EAAE;gBACnC,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;iBAChD;;AACI,iBAAA,IAAI,CAAC,CAAC,SAAS,CAAC,GAAG,IAAI,OAAO,CAAC,CAAC,SAAS,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACjE,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;iBACpD;;;QAGL,MAAM,IAAI,KAAK,CAAC,CAAA,0BAAA,EAA6B,CAAC,CAAC,IAAI,CAAE,CAAA,CAAC;AACxD,KAAC,CAAC;AACJ;AAEA,SAAS,4BAA4B,CAAC,OAAsB,EAAA;AAC1D,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;SACI,IACL,OAAO,CAAC,OAAO,CAAC,KAAK,CACnB,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD,EACD;QACA,OAAQ,OAAO,CAAC,OAAgC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAC3D,YAAA,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,SAAA,CAAC,CAAC;;SACE;AACL,QAAA,MAAM,IAAI,KAAK,CACb,CAAgC,6BAAA,EAAA,OAAO,CAAC;aACrC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACjB,aAAA,IAAI,CAAC,IAAI,CAAC,CAAA,CAAE,CAChB;;AAEL;AAEA,SAAS,0BAA0B,CAAC,OAAoB,EAAA;AACtD,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;AACvC,QAAA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC;;IAErE,OAAO;AACL,QAAA;AACE,YAAA,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA;KACF;AACH;AAEM,SAAU,uBAAuB,CACrC,QAAuB,EAAA;AAEvB,IAAA,OAAO,QAAQ,CAAC,OAAO,CAAC,CAAC,GAAG,KAAI;AAC9B,QAAA,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,EAAE;AACjD,YAAA,OAAO,mCAAmC,CAAC,GAAG,CAAC;;AAC1C,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,IAAI,EAAE;AAClC,YAAA,OAAO,wBAAwB,CAAC,GAAG,CAAC;;AAC/B,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,QAAQ,EAAE;AACtC,YAAA,OAAO,4BAA4B,CAAC,GAAG,CAAC;;AACnC,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,MAAM,EAAE;AACpC,YAAA,OAAO,0BAA0B,CAAC,GAAkB,CAAC;;aAChD;YACL,MAAM,IAAI,KAAK,CAAC,CAA6B,0BAAA,EAAA,GAAG,CAAC,QAAQ,EAAE,CAAE,CAAA,CAAC;;AAElE,KAAC,CAAC;AACJ;;;;;"}
|
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
import { AIMessageChunk } from '@langchain/core/messages';
|
|
2
|
-
import { ChatGenerationChunk } from '@langchain/core/outputs';
|
|
3
|
-
import { ChatOllama as ChatOllama$1 } from '@langchain/ollama';
|
|
4
|
-
import { convertToOllamaMessages, convertOllamaMessagesToLangChain } from './utils.mjs';
|
|
5
|
-
|
|
6
|
-
class ChatOllama extends ChatOllama$1 {
|
|
7
|
-
static lc_name() {
|
|
8
|
-
return 'LibreChatOllama';
|
|
9
|
-
}
|
|
10
|
-
async *_streamResponseChunks(messages, options, runManager) {
|
|
11
|
-
if (this.checkOrPullModel) {
|
|
12
|
-
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
13
|
-
// @ts-ignore
|
|
14
|
-
if (!(await this.checkModelExistsOnMachine(this.model))) {
|
|
15
|
-
await this.pull(this.model, {
|
|
16
|
-
logProgress: true,
|
|
17
|
-
});
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
const params = this.invocationParams(options);
|
|
21
|
-
// TODO: remove cast after SDK adds support for tool calls
|
|
22
|
-
const ollamaMessages = convertToOllamaMessages(messages);
|
|
23
|
-
const usageMetadata = {
|
|
24
|
-
input_tokens: 0,
|
|
25
|
-
output_tokens: 0,
|
|
26
|
-
total_tokens: 0,
|
|
27
|
-
};
|
|
28
|
-
const stream = await this.client.chat({
|
|
29
|
-
...params,
|
|
30
|
-
messages: ollamaMessages,
|
|
31
|
-
stream: true,
|
|
32
|
-
});
|
|
33
|
-
let lastMetadata;
|
|
34
|
-
for await (const chunk of stream) {
|
|
35
|
-
if (options.signal?.aborted === true) {
|
|
36
|
-
this.client.abort();
|
|
37
|
-
}
|
|
38
|
-
const { message: responseMessage, ...rest } = chunk;
|
|
39
|
-
usageMetadata.input_tokens += rest.prompt_eval_count ?? 0;
|
|
40
|
-
usageMetadata.output_tokens += rest.eval_count ?? 0;
|
|
41
|
-
usageMetadata.total_tokens =
|
|
42
|
-
usageMetadata.input_tokens + usageMetadata.output_tokens;
|
|
43
|
-
lastMetadata = rest;
|
|
44
|
-
if (!responseMessage) {
|
|
45
|
-
continue;
|
|
46
|
-
}
|
|
47
|
-
const message = convertOllamaMessagesToLangChain(responseMessage);
|
|
48
|
-
const generationChunk = new ChatGenerationChunk({
|
|
49
|
-
text: responseMessage.content || '',
|
|
50
|
-
message,
|
|
51
|
-
});
|
|
52
|
-
yield generationChunk;
|
|
53
|
-
await runManager?.handleLLMNewToken(responseMessage.content || '', undefined, undefined, undefined, undefined, { chunk: generationChunk });
|
|
54
|
-
}
|
|
55
|
-
// Yield the `response_metadata` as the final chunk.
|
|
56
|
-
yield new ChatGenerationChunk({
|
|
57
|
-
text: '',
|
|
58
|
-
message: new AIMessageChunk({
|
|
59
|
-
content: '',
|
|
60
|
-
response_metadata: lastMetadata,
|
|
61
|
-
usage_metadata: usageMetadata,
|
|
62
|
-
}),
|
|
63
|
-
});
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
export { ChatOllama };
|
|
68
|
-
//# sourceMappingURL=index.mjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","sources":["../../../../src/llm/ollama/index.ts"],"sourcesContent":["import { AIMessageChunk } from '@langchain/core/messages';\nimport { ChatGenerationChunk } from '@langchain/core/outputs';\nimport { ChatOllama as BaseChatOllama } from '@langchain/ollama';\nimport { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';\nimport type {\n ChatResponse as OllamaChatResponse,\n Message as OllamaMessage,\n} from 'ollama';\nimport type { UsageMetadata, BaseMessage } from '@langchain/core/messages';\nimport {\n convertOllamaMessagesToLangChain,\n convertToOllamaMessages,\n} from './utils';\n\nexport class ChatOllama extends BaseChatOllama {\n static lc_name(): 'LibreChatOllama' {\n return 'LibreChatOllama';\n }\n async *_streamResponseChunks(\n messages: BaseMessage[],\n options: this['ParsedCallOptions'],\n runManager?: CallbackManagerForLLMRun\n ): AsyncGenerator<ChatGenerationChunk> {\n if (this.checkOrPullModel) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n if (!((await this.checkModelExistsOnMachine(this.model)) as boolean)) {\n await this.pull(this.model, {\n logProgress: true,\n });\n }\n }\n\n const params = this.invocationParams(options);\n // TODO: remove cast after SDK adds support for tool calls\n const ollamaMessages = convertToOllamaMessages(messages) as OllamaMessage[];\n\n const usageMetadata: UsageMetadata = {\n input_tokens: 0,\n output_tokens: 0,\n total_tokens: 0,\n };\n\n const stream = await this.client.chat({\n ...params,\n messages: ollamaMessages,\n stream: true,\n });\n\n let lastMetadata: Omit<OllamaChatResponse, 'message'> | undefined;\n\n for await (const chunk of stream) {\n if (options.signal?.aborted === true) {\n this.client.abort();\n }\n const { message: responseMessage, ...rest } =\n chunk as Partial<OllamaChatResponse>;\n usageMetadata.input_tokens += rest.prompt_eval_count ?? 0;\n usageMetadata.output_tokens += rest.eval_count ?? 0;\n usageMetadata.total_tokens =\n usageMetadata.input_tokens + usageMetadata.output_tokens;\n lastMetadata = rest as Omit<OllamaChatResponse, 'message'>;\n if (!responseMessage) {\n continue;\n }\n const message = convertOllamaMessagesToLangChain(responseMessage);\n const generationChunk = new ChatGenerationChunk({\n text: responseMessage.content || '',\n message,\n });\n yield generationChunk;\n await runManager?.handleLLMNewToken(\n responseMessage.content || '',\n undefined,\n undefined,\n undefined,\n undefined,\n { chunk: generationChunk }\n );\n }\n\n // Yield the `response_metadata` as the final chunk.\n yield new ChatGenerationChunk({\n text: '',\n message: new AIMessageChunk({\n content: '',\n response_metadata: lastMetadata,\n usage_metadata: usageMetadata,\n }),\n });\n }\n}\n"],"names":["BaseChatOllama"],"mappings":";;;;;AAcM,MAAO,UAAW,SAAQA,YAAc,CAAA;AAC5C,IAAA,OAAO,OAAO,GAAA;AACZ,QAAA,OAAO,iBAAiB;;IAE1B,OAAO,qBAAqB,CAC1B,QAAuB,EACvB,OAAkC,EAClC,UAAqC,EAAA;AAErC,QAAA,IAAI,IAAI,CAAC,gBAAgB,EAAE;;;AAGzB,YAAA,IAAI,EAAG,MAAM,IAAI,CAAC,yBAAyB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAa,EAAE;AACpE,gBAAA,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AAC1B,oBAAA,WAAW,EAAE,IAAI;AAClB,iBAAA,CAAC;;;QAIN,MAAM,MAAM,GAAG,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;;AAE7C,QAAA,MAAM,cAAc,GAAG,uBAAuB,CAAC,QAAQ,CAAoB;AAE3E,QAAA,MAAM,aAAa,GAAkB;AACnC,YAAA,YAAY,EAAE,CAAC;AACf,YAAA,aAAa,EAAE,CAAC;AAChB,YAAA,YAAY,EAAE,CAAC;SAChB;QAED,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC;AACpC,YAAA,GAAG,MAAM;AACT,YAAA,QAAQ,EAAE,cAAc;AACxB,YAAA,MAAM,EAAE,IAAI;AACb,SAAA,CAAC;AAEF,QAAA,IAAI,YAA6D;AAEjE,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;YAChC,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,KAAK,IAAI,EAAE;AACpC,gBAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE;;YAErB,MAAM,EAAE,OAAO,EAAE,eAAe,EAAE,GAAG,IAAI,EAAE,GACzC,KAAoC;YACtC,aAAa,CAAC,YAAY,IAAI,IAAI,CAAC,iBAAiB,IAAI,CAAC;YACzD,aAAa,CAAC,aAAa,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC;AACnD,YAAA,aAAa,CAAC,YAAY;AACxB,gBAAA,aAAa,CAAC,YAAY,GAAG,aAAa,CAAC,aAAa;YAC1D,YAAY,GAAG,IAA2C;YAC1D,IAAI,CAAC,eAAe,EAAE;gBACpB;;AAEF,YAAA,MAAM,OAAO,GAAG,gCAAgC,CAAC,eAAe,CAAC;AACjE,YAAA,MAAM,eAAe,GAAG,IAAI,mBAAmB,CAAC;AAC9C,gBAAA,IAAI,EAAE,eAAe,CAAC,OAAO,IAAI,EAAE;gBACnC,OAAO;AACR,aAAA,CAAC;AACF,YAAA,MAAM,eAAe;YACrB,MAAM,UAAU,EAAE,iBAAiB,CACjC,eAAe,CAAC,OAAO,IAAI,EAAE,EAC7B,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,EAAE,KAAK,EAAE,eAAe,EAAE,CAC3B;;;QAIH,MAAM,IAAI,mBAAmB,CAAC;AAC5B,YAAA,IAAI,EAAE,EAAE;YACR,OAAO,EAAE,IAAI,cAAc,CAAC;AAC1B,gBAAA,OAAO,EAAE,EAAE;AACX,gBAAA,iBAAiB,EAAE,YAAY;AAC/B,gBAAA,cAAc,EAAE,aAAa;aAC9B,CAAC;AACH,SAAA,CAAC;;AAEL;;;;"}
|
|
@@ -1,155 +0,0 @@
|
|
|
1
|
-
import { AIMessageChunk } from '@langchain/core/messages';
|
|
2
|
-
import { v4 } from 'uuid';
|
|
3
|
-
|
|
4
|
-
function convertOllamaMessagesToLangChain(messages, extra) {
|
|
5
|
-
const additional_kwargs = {};
|
|
6
|
-
if ('thinking' in messages) {
|
|
7
|
-
additional_kwargs.reasoning_content = messages.thinking;
|
|
8
|
-
}
|
|
9
|
-
return new AIMessageChunk({
|
|
10
|
-
content: messages.content || '',
|
|
11
|
-
tool_call_chunks: messages.tool_calls?.map((tc) => ({
|
|
12
|
-
name: tc.function.name,
|
|
13
|
-
args: JSON.stringify(tc.function.arguments),
|
|
14
|
-
type: 'tool_call_chunk',
|
|
15
|
-
index: 0,
|
|
16
|
-
id: v4(),
|
|
17
|
-
})),
|
|
18
|
-
response_metadata: extra?.responseMetadata,
|
|
19
|
-
usage_metadata: extra?.usageMetadata,
|
|
20
|
-
additional_kwargs,
|
|
21
|
-
});
|
|
22
|
-
}
|
|
23
|
-
function extractBase64FromDataUrl(dataUrl) {
|
|
24
|
-
const match = dataUrl.match(/^data:.*?;base64,(.*)$/);
|
|
25
|
-
return match ? match[1] : '';
|
|
26
|
-
}
|
|
27
|
-
function convertAMessagesToOllama(messages) {
|
|
28
|
-
if (typeof messages.content === 'string') {
|
|
29
|
-
return [
|
|
30
|
-
{
|
|
31
|
-
role: 'assistant',
|
|
32
|
-
content: messages.content,
|
|
33
|
-
},
|
|
34
|
-
];
|
|
35
|
-
}
|
|
36
|
-
const textFields = messages.content.filter((c) => c.type === 'text' && typeof c.text === 'string');
|
|
37
|
-
const textMessages = textFields.map((c) => ({
|
|
38
|
-
role: 'assistant',
|
|
39
|
-
content: c.text,
|
|
40
|
-
}));
|
|
41
|
-
let toolCallMsgs;
|
|
42
|
-
if (messages.content.find((c) => c.type === 'tool_use') &&
|
|
43
|
-
messages.tool_calls?.length) {
|
|
44
|
-
// `tool_use` content types are accepted if the message has tool calls
|
|
45
|
-
const toolCalls = messages.tool_calls.map((tc) => ({
|
|
46
|
-
id: tc.id,
|
|
47
|
-
type: 'function',
|
|
48
|
-
function: {
|
|
49
|
-
name: tc.name,
|
|
50
|
-
arguments: tc.args,
|
|
51
|
-
},
|
|
52
|
-
}));
|
|
53
|
-
if (toolCalls) {
|
|
54
|
-
toolCallMsgs = {
|
|
55
|
-
role: 'assistant',
|
|
56
|
-
tool_calls: toolCalls,
|
|
57
|
-
content: '',
|
|
58
|
-
};
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
else if (messages.content.find((c) => c.type === 'tool_use') &&
|
|
62
|
-
!messages.tool_calls?.length) {
|
|
63
|
-
throw new Error('\'tool_use\' content type is not supported without tool calls.');
|
|
64
|
-
}
|
|
65
|
-
return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];
|
|
66
|
-
}
|
|
67
|
-
function convertHumanGenericMessagesToOllama(message) {
|
|
68
|
-
if (typeof message.content === 'string') {
|
|
69
|
-
return [
|
|
70
|
-
{
|
|
71
|
-
role: 'user',
|
|
72
|
-
content: message.content,
|
|
73
|
-
},
|
|
74
|
-
];
|
|
75
|
-
}
|
|
76
|
-
return message.content.map((c) => {
|
|
77
|
-
if (c.type === 'text') {
|
|
78
|
-
return {
|
|
79
|
-
role: 'user',
|
|
80
|
-
content: c.text,
|
|
81
|
-
};
|
|
82
|
-
}
|
|
83
|
-
else if (c.type === 'image_url') {
|
|
84
|
-
if (typeof c.image_url === 'string') {
|
|
85
|
-
return {
|
|
86
|
-
role: 'user',
|
|
87
|
-
content: '',
|
|
88
|
-
images: [extractBase64FromDataUrl(c.image_url)],
|
|
89
|
-
};
|
|
90
|
-
}
|
|
91
|
-
else if (c.image_url.url && typeof c.image_url.url === 'string') {
|
|
92
|
-
return {
|
|
93
|
-
role: 'user',
|
|
94
|
-
content: '',
|
|
95
|
-
images: [extractBase64FromDataUrl(c.image_url.url)],
|
|
96
|
-
};
|
|
97
|
-
}
|
|
98
|
-
}
|
|
99
|
-
throw new Error(`Unsupported content type: ${c.type}`);
|
|
100
|
-
});
|
|
101
|
-
}
|
|
102
|
-
function convertSystemMessageToOllama(message) {
|
|
103
|
-
if (typeof message.content === 'string') {
|
|
104
|
-
return [
|
|
105
|
-
{
|
|
106
|
-
role: 'system',
|
|
107
|
-
content: message.content,
|
|
108
|
-
},
|
|
109
|
-
];
|
|
110
|
-
}
|
|
111
|
-
else if (message.content.every((c) => c.type === 'text' && typeof c.text === 'string')) {
|
|
112
|
-
return message.content.map((c) => ({
|
|
113
|
-
role: 'system',
|
|
114
|
-
content: c.text,
|
|
115
|
-
}));
|
|
116
|
-
}
|
|
117
|
-
else {
|
|
118
|
-
throw new Error(`Unsupported content type(s): ${message.content
|
|
119
|
-
.map((c) => c.type)
|
|
120
|
-
.join(', ')}`);
|
|
121
|
-
}
|
|
122
|
-
}
|
|
123
|
-
function convertToolMessageToOllama(message) {
|
|
124
|
-
if (typeof message.content !== 'string') {
|
|
125
|
-
throw new Error('Non string tool message content is not supported');
|
|
126
|
-
}
|
|
127
|
-
return [
|
|
128
|
-
{
|
|
129
|
-
role: 'tool',
|
|
130
|
-
content: message.content,
|
|
131
|
-
},
|
|
132
|
-
];
|
|
133
|
-
}
|
|
134
|
-
function convertToOllamaMessages(messages) {
|
|
135
|
-
return messages.flatMap((msg) => {
|
|
136
|
-
if (['human', 'generic'].includes(msg._getType())) {
|
|
137
|
-
return convertHumanGenericMessagesToOllama(msg);
|
|
138
|
-
}
|
|
139
|
-
else if (msg._getType() === 'ai') {
|
|
140
|
-
return convertAMessagesToOllama(msg);
|
|
141
|
-
}
|
|
142
|
-
else if (msg._getType() === 'system') {
|
|
143
|
-
return convertSystemMessageToOllama(msg);
|
|
144
|
-
}
|
|
145
|
-
else if (msg._getType() === 'tool') {
|
|
146
|
-
return convertToolMessageToOllama(msg);
|
|
147
|
-
}
|
|
148
|
-
else {
|
|
149
|
-
throw new Error(`Unsupported message type: ${msg._getType()}`);
|
|
150
|
-
}
|
|
151
|
-
});
|
|
152
|
-
}
|
|
153
|
-
|
|
154
|
-
export { convertOllamaMessagesToLangChain, convertToOllamaMessages };
|
|
155
|
-
//# sourceMappingURL=utils.mjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"utils.mjs","sources":["../../../../src/llm/ollama/utils.ts"],"sourcesContent":["import {\n AIMessage,\n AIMessageChunk,\n BaseMessage,\n HumanMessage,\n MessageContentText,\n SystemMessage,\n ToolMessage,\n UsageMetadata,\n} from '@langchain/core/messages';\nimport type {\n Message as OllamaMessage,\n ToolCall as OllamaToolCall,\n} from 'ollama';\nimport { v4 as uuidv4 } from 'uuid';\n\nexport function convertOllamaMessagesToLangChain(\n messages: OllamaMessage,\n extra?: {\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n responseMetadata?: Record<string, any>;\n usageMetadata?: UsageMetadata;\n }\n): AIMessageChunk {\n const additional_kwargs: BaseMessage['additional_kwargs'] = {};\n if ('thinking' in messages) {\n additional_kwargs.reasoning_content = messages.thinking as string;\n }\n return new AIMessageChunk({\n content: messages.content || '',\n tool_call_chunks: messages.tool_calls?.map((tc) => ({\n name: tc.function.name,\n args: JSON.stringify(tc.function.arguments),\n type: 'tool_call_chunk',\n index: 0,\n id: uuidv4(),\n })),\n response_metadata: extra?.responseMetadata,\n usage_metadata: extra?.usageMetadata,\n additional_kwargs,\n });\n}\n\nfunction extractBase64FromDataUrl(dataUrl: string): string {\n const match = dataUrl.match(/^data:.*?;base64,(.*)$/);\n return match ? match[1] : '';\n}\n\nfunction convertAMessagesToOllama(messages: AIMessage): OllamaMessage[] {\n if (typeof messages.content === 'string') {\n return [\n {\n role: 'assistant',\n content: messages.content,\n },\n ];\n }\n\n const textFields = messages.content.filter(\n (c) => c.type === 'text' && typeof c.text === 'string'\n );\n const textMessages = (textFields as MessageContentText[]).map((c) => ({\n role: 'assistant',\n content: c.text,\n }));\n let toolCallMsgs: OllamaMessage | undefined;\n\n if (\n messages.content.find((c) => c.type === 'tool_use') &&\n messages.tool_calls?.length\n ) {\n // `tool_use` content types are accepted if the message has tool calls\n const toolCalls: OllamaToolCall[] | undefined = messages.tool_calls.map(\n (tc) => ({\n id: tc.id,\n type: 'function',\n function: {\n name: tc.name,\n arguments: tc.args,\n },\n })\n );\n\n if (toolCalls) {\n toolCallMsgs = {\n role: 'assistant',\n tool_calls: toolCalls,\n content: '',\n };\n }\n } else if (\n messages.content.find((c) => c.type === 'tool_use') &&\n !messages.tool_calls?.length\n ) {\n throw new Error(\n '\\'tool_use\\' content type is not supported without tool calls.'\n );\n }\n\n return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];\n}\n\nfunction convertHumanGenericMessagesToOllama(\n message: HumanMessage\n): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'user',\n content: message.content,\n },\n ];\n }\n return message.content.map((c) => {\n if (c.type === 'text') {\n return {\n role: 'user',\n content: c.text,\n };\n } else if (c.type === 'image_url') {\n if (typeof c.image_url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url)],\n };\n } else if (c.image_url.url && typeof c.image_url.url === 'string') {\n return {\n role: 'user',\n content: '',\n images: [extractBase64FromDataUrl(c.image_url.url)],\n };\n }\n }\n throw new Error(`Unsupported content type: ${c.type}`);\n });\n}\n\nfunction convertSystemMessageToOllama(message: SystemMessage): OllamaMessage[] {\n if (typeof message.content === 'string') {\n return [\n {\n role: 'system',\n content: message.content,\n },\n ];\n } else if (\n message.content.every(\n (c) => c.type === 'text' && typeof c.text === 'string'\n )\n ) {\n return (message.content as MessageContentText[]).map((c) => ({\n role: 'system',\n content: c.text,\n }));\n } else {\n throw new Error(\n `Unsupported content type(s): ${message.content\n .map((c) => c.type)\n .join(', ')}`\n );\n }\n}\n\nfunction convertToolMessageToOllama(message: ToolMessage): OllamaMessage[] {\n if (typeof message.content !== 'string') {\n throw new Error('Non string tool message content is not supported');\n }\n return [\n {\n role: 'tool',\n content: message.content,\n },\n ];\n}\n\nexport function convertToOllamaMessages(\n messages: BaseMessage[]\n): OllamaMessage[] {\n return messages.flatMap((msg) => {\n if (['human', 'generic'].includes(msg._getType())) {\n return convertHumanGenericMessagesToOllama(msg);\n } else if (msg._getType() === 'ai') {\n return convertAMessagesToOllama(msg);\n } else if (msg._getType() === 'system') {\n return convertSystemMessageToOllama(msg);\n } else if (msg._getType() === 'tool') {\n return convertToolMessageToOllama(msg as ToolMessage);\n } else {\n throw new Error(`Unsupported message type: ${msg._getType()}`);\n }\n });\n}\n"],"names":["uuidv4"],"mappings":";;;AAgBgB,SAAA,gCAAgC,CAC9C,QAAuB,EACvB,KAIC,EAAA;IAED,MAAM,iBAAiB,GAAqC,EAAE;AAC9D,IAAA,IAAI,UAAU,IAAI,QAAQ,EAAE;AAC1B,QAAA,iBAAiB,CAAC,iBAAiB,GAAG,QAAQ,CAAC,QAAkB;;IAEnE,OAAO,IAAI,cAAc,CAAC;AACxB,QAAA,OAAO,EAAE,QAAQ,CAAC,OAAO,IAAI,EAAE;AAC/B,QAAA,gBAAgB,EAAE,QAAQ,CAAC,UAAU,EAAE,GAAG,CAAC,CAAC,EAAE,MAAM;AAClD,YAAA,IAAI,EAAE,EAAE,CAAC,QAAQ,CAAC,IAAI;YACtB,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC;AAC3C,YAAA,IAAI,EAAE,iBAAiB;AACvB,YAAA,KAAK,EAAE,CAAC;YACR,EAAE,EAAEA,EAAM,EAAE;AACb,SAAA,CAAC,CAAC;QACH,iBAAiB,EAAE,KAAK,EAAE,gBAAgB;QAC1C,cAAc,EAAE,KAAK,EAAE,aAAa;QACpC,iBAAiB;AAClB,KAAA,CAAC;AACJ;AAEA,SAAS,wBAAwB,CAAC,OAAe,EAAA;IAC/C,MAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,wBAAwB,CAAC;AACrD,IAAA,OAAO,KAAK,GAAG,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;AAC9B;AAEA,SAAS,wBAAwB,CAAC,QAAmB,EAAA;AACnD,IAAA,IAAI,OAAO,QAAQ,CAAC,OAAO,KAAK,QAAQ,EAAE;QACxC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,WAAW;gBACjB,OAAO,EAAE,QAAQ,CAAC,OAAO;AAC1B,aAAA;SACF;;IAGH,MAAM,UAAU,GAAG,QAAQ,CAAC,OAAO,CAAC,MAAM,CACxC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD;IACD,MAAM,YAAY,GAAI,UAAmC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AACpE,QAAA,IAAI,EAAE,WAAW;QACjB,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,KAAA,CAAC,CAAC;AACH,IAAA,IAAI,YAAuC;AAE3C,IAAA,IACE,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC3B;;AAEA,QAAA,MAAM,SAAS,GAAiC,QAAQ,CAAC,UAAU,CAAC,GAAG,CACrE,CAAC,EAAE,MAAM;YACP,EAAE,EAAE,EAAE,CAAC,EAAE;AACT,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,QAAQ,EAAE;gBACR,IAAI,EAAE,EAAE,CAAC,IAAI;gBACb,SAAS,EAAE,EAAE,CAAC,IAAI;AACnB,aAAA;AACF,SAAA,CAAC,CACH;QAED,IAAI,SAAS,EAAE;AACb,YAAA,YAAY,GAAG;AACb,gBAAA,IAAI,EAAE,WAAW;AACjB,gBAAA,UAAU,EAAE,SAAS;AACrB,gBAAA,OAAO,EAAE,EAAE;aACZ;;;AAEE,SAAA,IACL,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC;AACnD,QAAA,CAAC,QAAQ,CAAC,UAAU,EAAE,MAAM,EAC5B;AACA,QAAA,MAAM,IAAI,KAAK,CACb,gEAAgE,CACjE;;AAGH,IAAA,OAAO,CAAC,GAAG,YAAY,EAAE,IAAI,YAAY,GAAG,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC;AACnE;AAEA,SAAS,mCAAmC,CAC1C,OAAqB,EAAA;AAErB,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;IAEH,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,KAAI;AAC/B,QAAA,IAAI,CAAC,CAAC,IAAI,KAAK,MAAM,EAAE;YACrB,OAAO;AACL,gBAAA,IAAI,EAAE,MAAM;gBACZ,OAAO,EAAE,CAAC,CAAC,IAAI;aAChB;;AACI,aAAA,IAAI,CAAC,CAAC,IAAI,KAAK,WAAW,EAAE;AACjC,YAAA,IAAI,OAAO,CAAC,CAAC,SAAS,KAAK,QAAQ,EAAE;gBACnC,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;iBAChD;;AACI,iBAAA,IAAI,CAAC,CAAC,SAAS,CAAC,GAAG,IAAI,OAAO,CAAC,CAAC,SAAS,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACjE,OAAO;AACL,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,OAAO,EAAE,EAAE;oBACX,MAAM,EAAE,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;iBACpD;;;QAGL,MAAM,IAAI,KAAK,CAAC,CAAA,0BAAA,EAA6B,CAAC,CAAC,IAAI,CAAE,CAAA,CAAC;AACxD,KAAC,CAAC;AACJ;AAEA,SAAS,4BAA4B,CAAC,OAAsB,EAAA;AAC1D,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;QACvC,OAAO;AACL,YAAA;AACE,gBAAA,IAAI,EAAE,QAAQ;gBACd,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA;SACF;;SACI,IACL,OAAO,CAAC,OAAO,CAAC,KAAK,CACnB,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CACvD,EACD;QACA,OAAQ,OAAO,CAAC,OAAgC,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AAC3D,YAAA,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,CAAC,CAAC,IAAI;AAChB,SAAA,CAAC,CAAC;;SACE;AACL,QAAA,MAAM,IAAI,KAAK,CACb,CAAgC,6BAAA,EAAA,OAAO,CAAC;aACrC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI;AACjB,aAAA,IAAI,CAAC,IAAI,CAAC,CAAA,CAAE,CAChB;;AAEL;AAEA,SAAS,0BAA0B,CAAC,OAAoB,EAAA;AACtD,IAAA,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE;AACvC,QAAA,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC;;IAErE,OAAO;AACL,QAAA;AACE,YAAA,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,SAAA;KACF;AACH;AAEM,SAAU,uBAAuB,CACrC,QAAuB,EAAA;AAEvB,IAAA,OAAO,QAAQ,CAAC,OAAO,CAAC,CAAC,GAAG,KAAI;AAC9B,QAAA,IAAI,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,EAAE;AACjD,YAAA,OAAO,mCAAmC,CAAC,GAAG,CAAC;;AAC1C,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,IAAI,EAAE;AAClC,YAAA,OAAO,wBAAwB,CAAC,GAAG,CAAC;;AAC/B,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,QAAQ,EAAE;AACtC,YAAA,OAAO,4BAA4B,CAAC,GAAG,CAAC;;AACnC,aAAA,IAAI,GAAG,CAAC,QAAQ,EAAE,KAAK,MAAM,EAAE;AACpC,YAAA,OAAO,0BAA0B,CAAC,GAAkB,CAAC;;aAChD;YACL,MAAM,IAAI,KAAK,CAAC,CAA6B,0BAAA,EAAA,GAAG,CAAC,QAAQ,EAAE,CAAE,CAAA,CAAC;;AAElE,KAAC,CAAC;AACJ;;;;"}
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import { ChatGenerationChunk } from '@langchain/core/outputs';
|
|
2
|
-
import { ChatOllama as BaseChatOllama } from '@langchain/ollama';
|
|
3
|
-
import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
|
|
4
|
-
import type { BaseMessage } from '@langchain/core/messages';
|
|
5
|
-
export declare class ChatOllama extends BaseChatOllama {
|
|
6
|
-
static lc_name(): 'LibreChatOllama';
|
|
7
|
-
_streamResponseChunks(messages: BaseMessage[], options: this['ParsedCallOptions'], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
8
|
-
}
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
import { AIMessageChunk, BaseMessage, UsageMetadata } from '@langchain/core/messages';
|
|
2
|
-
import type { Message as OllamaMessage } from 'ollama';
|
|
3
|
-
export declare function convertOllamaMessagesToLangChain(messages: OllamaMessage, extra?: {
|
|
4
|
-
responseMetadata?: Record<string, any>;
|
|
5
|
-
usageMetadata?: UsageMetadata;
|
|
6
|
-
}): AIMessageChunk;
|
|
7
|
-
export declare function convertToOllamaMessages(messages: BaseMessage[]): OllamaMessage[];
|
package/src/llm/ollama/index.ts
DELETED
|
@@ -1,92 +0,0 @@
|
|
|
1
|
-
import { AIMessageChunk } from '@langchain/core/messages';
|
|
2
|
-
import { ChatGenerationChunk } from '@langchain/core/outputs';
|
|
3
|
-
import { ChatOllama as BaseChatOllama } from '@langchain/ollama';
|
|
4
|
-
import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager';
|
|
5
|
-
import type {
|
|
6
|
-
ChatResponse as OllamaChatResponse,
|
|
7
|
-
Message as OllamaMessage,
|
|
8
|
-
} from 'ollama';
|
|
9
|
-
import type { UsageMetadata, BaseMessage } from '@langchain/core/messages';
|
|
10
|
-
import {
|
|
11
|
-
convertOllamaMessagesToLangChain,
|
|
12
|
-
convertToOllamaMessages,
|
|
13
|
-
} from './utils';
|
|
14
|
-
|
|
15
|
-
export class ChatOllama extends BaseChatOllama {
|
|
16
|
-
static lc_name(): 'LibreChatOllama' {
|
|
17
|
-
return 'LibreChatOllama';
|
|
18
|
-
}
|
|
19
|
-
async *_streamResponseChunks(
|
|
20
|
-
messages: BaseMessage[],
|
|
21
|
-
options: this['ParsedCallOptions'],
|
|
22
|
-
runManager?: CallbackManagerForLLMRun
|
|
23
|
-
): AsyncGenerator<ChatGenerationChunk> {
|
|
24
|
-
if (this.checkOrPullModel) {
|
|
25
|
-
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
26
|
-
// @ts-ignore
|
|
27
|
-
if (!((await this.checkModelExistsOnMachine(this.model)) as boolean)) {
|
|
28
|
-
await this.pull(this.model, {
|
|
29
|
-
logProgress: true,
|
|
30
|
-
});
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
const params = this.invocationParams(options);
|
|
35
|
-
// TODO: remove cast after SDK adds support for tool calls
|
|
36
|
-
const ollamaMessages = convertToOllamaMessages(messages) as OllamaMessage[];
|
|
37
|
-
|
|
38
|
-
const usageMetadata: UsageMetadata = {
|
|
39
|
-
input_tokens: 0,
|
|
40
|
-
output_tokens: 0,
|
|
41
|
-
total_tokens: 0,
|
|
42
|
-
};
|
|
43
|
-
|
|
44
|
-
const stream = await this.client.chat({
|
|
45
|
-
...params,
|
|
46
|
-
messages: ollamaMessages,
|
|
47
|
-
stream: true,
|
|
48
|
-
});
|
|
49
|
-
|
|
50
|
-
let lastMetadata: Omit<OllamaChatResponse, 'message'> | undefined;
|
|
51
|
-
|
|
52
|
-
for await (const chunk of stream) {
|
|
53
|
-
if (options.signal?.aborted === true) {
|
|
54
|
-
this.client.abort();
|
|
55
|
-
}
|
|
56
|
-
const { message: responseMessage, ...rest } =
|
|
57
|
-
chunk as Partial<OllamaChatResponse>;
|
|
58
|
-
usageMetadata.input_tokens += rest.prompt_eval_count ?? 0;
|
|
59
|
-
usageMetadata.output_tokens += rest.eval_count ?? 0;
|
|
60
|
-
usageMetadata.total_tokens =
|
|
61
|
-
usageMetadata.input_tokens + usageMetadata.output_tokens;
|
|
62
|
-
lastMetadata = rest as Omit<OllamaChatResponse, 'message'>;
|
|
63
|
-
if (!responseMessage) {
|
|
64
|
-
continue;
|
|
65
|
-
}
|
|
66
|
-
const message = convertOllamaMessagesToLangChain(responseMessage);
|
|
67
|
-
const generationChunk = new ChatGenerationChunk({
|
|
68
|
-
text: responseMessage.content || '',
|
|
69
|
-
message,
|
|
70
|
-
});
|
|
71
|
-
yield generationChunk;
|
|
72
|
-
await runManager?.handleLLMNewToken(
|
|
73
|
-
responseMessage.content || '',
|
|
74
|
-
undefined,
|
|
75
|
-
undefined,
|
|
76
|
-
undefined,
|
|
77
|
-
undefined,
|
|
78
|
-
{ chunk: generationChunk }
|
|
79
|
-
);
|
|
80
|
-
}
|
|
81
|
-
|
|
82
|
-
// Yield the `response_metadata` as the final chunk.
|
|
83
|
-
yield new ChatGenerationChunk({
|
|
84
|
-
text: '',
|
|
85
|
-
message: new AIMessageChunk({
|
|
86
|
-
content: '',
|
|
87
|
-
response_metadata: lastMetadata,
|
|
88
|
-
usage_metadata: usageMetadata,
|
|
89
|
-
}),
|
|
90
|
-
});
|
|
91
|
-
}
|
|
92
|
-
}
|
package/src/llm/ollama/utils.ts
DELETED
|
@@ -1,193 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
AIMessage,
|
|
3
|
-
AIMessageChunk,
|
|
4
|
-
BaseMessage,
|
|
5
|
-
HumanMessage,
|
|
6
|
-
MessageContentText,
|
|
7
|
-
SystemMessage,
|
|
8
|
-
ToolMessage,
|
|
9
|
-
UsageMetadata,
|
|
10
|
-
} from '@langchain/core/messages';
|
|
11
|
-
import type {
|
|
12
|
-
Message as OllamaMessage,
|
|
13
|
-
ToolCall as OllamaToolCall,
|
|
14
|
-
} from 'ollama';
|
|
15
|
-
import { v4 as uuidv4 } from 'uuid';
|
|
16
|
-
|
|
17
|
-
export function convertOllamaMessagesToLangChain(
|
|
18
|
-
messages: OllamaMessage,
|
|
19
|
-
extra?: {
|
|
20
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
21
|
-
responseMetadata?: Record<string, any>;
|
|
22
|
-
usageMetadata?: UsageMetadata;
|
|
23
|
-
}
|
|
24
|
-
): AIMessageChunk {
|
|
25
|
-
const additional_kwargs: BaseMessage['additional_kwargs'] = {};
|
|
26
|
-
if ('thinking' in messages) {
|
|
27
|
-
additional_kwargs.reasoning_content = messages.thinking as string;
|
|
28
|
-
}
|
|
29
|
-
return new AIMessageChunk({
|
|
30
|
-
content: messages.content || '',
|
|
31
|
-
tool_call_chunks: messages.tool_calls?.map((tc) => ({
|
|
32
|
-
name: tc.function.name,
|
|
33
|
-
args: JSON.stringify(tc.function.arguments),
|
|
34
|
-
type: 'tool_call_chunk',
|
|
35
|
-
index: 0,
|
|
36
|
-
id: uuidv4(),
|
|
37
|
-
})),
|
|
38
|
-
response_metadata: extra?.responseMetadata,
|
|
39
|
-
usage_metadata: extra?.usageMetadata,
|
|
40
|
-
additional_kwargs,
|
|
41
|
-
});
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
function extractBase64FromDataUrl(dataUrl: string): string {
|
|
45
|
-
const match = dataUrl.match(/^data:.*?;base64,(.*)$/);
|
|
46
|
-
return match ? match[1] : '';
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
function convertAMessagesToOllama(messages: AIMessage): OllamaMessage[] {
|
|
50
|
-
if (typeof messages.content === 'string') {
|
|
51
|
-
return [
|
|
52
|
-
{
|
|
53
|
-
role: 'assistant',
|
|
54
|
-
content: messages.content,
|
|
55
|
-
},
|
|
56
|
-
];
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
const textFields = messages.content.filter(
|
|
60
|
-
(c) => c.type === 'text' && typeof c.text === 'string'
|
|
61
|
-
);
|
|
62
|
-
const textMessages = (textFields as MessageContentText[]).map((c) => ({
|
|
63
|
-
role: 'assistant',
|
|
64
|
-
content: c.text,
|
|
65
|
-
}));
|
|
66
|
-
let toolCallMsgs: OllamaMessage | undefined;
|
|
67
|
-
|
|
68
|
-
if (
|
|
69
|
-
messages.content.find((c) => c.type === 'tool_use') &&
|
|
70
|
-
messages.tool_calls?.length
|
|
71
|
-
) {
|
|
72
|
-
// `tool_use` content types are accepted if the message has tool calls
|
|
73
|
-
const toolCalls: OllamaToolCall[] | undefined = messages.tool_calls.map(
|
|
74
|
-
(tc) => ({
|
|
75
|
-
id: tc.id,
|
|
76
|
-
type: 'function',
|
|
77
|
-
function: {
|
|
78
|
-
name: tc.name,
|
|
79
|
-
arguments: tc.args,
|
|
80
|
-
},
|
|
81
|
-
})
|
|
82
|
-
);
|
|
83
|
-
|
|
84
|
-
if (toolCalls) {
|
|
85
|
-
toolCallMsgs = {
|
|
86
|
-
role: 'assistant',
|
|
87
|
-
tool_calls: toolCalls,
|
|
88
|
-
content: '',
|
|
89
|
-
};
|
|
90
|
-
}
|
|
91
|
-
} else if (
|
|
92
|
-
messages.content.find((c) => c.type === 'tool_use') &&
|
|
93
|
-
!messages.tool_calls?.length
|
|
94
|
-
) {
|
|
95
|
-
throw new Error(
|
|
96
|
-
'\'tool_use\' content type is not supported without tool calls.'
|
|
97
|
-
);
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
return [...textMessages, ...(toolCallMsgs ? [toolCallMsgs] : [])];
|
|
101
|
-
}
|
|
102
|
-
|
|
103
|
-
function convertHumanGenericMessagesToOllama(
|
|
104
|
-
message: HumanMessage
|
|
105
|
-
): OllamaMessage[] {
|
|
106
|
-
if (typeof message.content === 'string') {
|
|
107
|
-
return [
|
|
108
|
-
{
|
|
109
|
-
role: 'user',
|
|
110
|
-
content: message.content,
|
|
111
|
-
},
|
|
112
|
-
];
|
|
113
|
-
}
|
|
114
|
-
return message.content.map((c) => {
|
|
115
|
-
if (c.type === 'text') {
|
|
116
|
-
return {
|
|
117
|
-
role: 'user',
|
|
118
|
-
content: c.text,
|
|
119
|
-
};
|
|
120
|
-
} else if (c.type === 'image_url') {
|
|
121
|
-
if (typeof c.image_url === 'string') {
|
|
122
|
-
return {
|
|
123
|
-
role: 'user',
|
|
124
|
-
content: '',
|
|
125
|
-
images: [extractBase64FromDataUrl(c.image_url)],
|
|
126
|
-
};
|
|
127
|
-
} else if (c.image_url.url && typeof c.image_url.url === 'string') {
|
|
128
|
-
return {
|
|
129
|
-
role: 'user',
|
|
130
|
-
content: '',
|
|
131
|
-
images: [extractBase64FromDataUrl(c.image_url.url)],
|
|
132
|
-
};
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
throw new Error(`Unsupported content type: ${c.type}`);
|
|
136
|
-
});
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
function convertSystemMessageToOllama(message: SystemMessage): OllamaMessage[] {
|
|
140
|
-
if (typeof message.content === 'string') {
|
|
141
|
-
return [
|
|
142
|
-
{
|
|
143
|
-
role: 'system',
|
|
144
|
-
content: message.content,
|
|
145
|
-
},
|
|
146
|
-
];
|
|
147
|
-
} else if (
|
|
148
|
-
message.content.every(
|
|
149
|
-
(c) => c.type === 'text' && typeof c.text === 'string'
|
|
150
|
-
)
|
|
151
|
-
) {
|
|
152
|
-
return (message.content as MessageContentText[]).map((c) => ({
|
|
153
|
-
role: 'system',
|
|
154
|
-
content: c.text,
|
|
155
|
-
}));
|
|
156
|
-
} else {
|
|
157
|
-
throw new Error(
|
|
158
|
-
`Unsupported content type(s): ${message.content
|
|
159
|
-
.map((c) => c.type)
|
|
160
|
-
.join(', ')}`
|
|
161
|
-
);
|
|
162
|
-
}
|
|
163
|
-
}
|
|
164
|
-
|
|
165
|
-
function convertToolMessageToOllama(message: ToolMessage): OllamaMessage[] {
|
|
166
|
-
if (typeof message.content !== 'string') {
|
|
167
|
-
throw new Error('Non string tool message content is not supported');
|
|
168
|
-
}
|
|
169
|
-
return [
|
|
170
|
-
{
|
|
171
|
-
role: 'tool',
|
|
172
|
-
content: message.content,
|
|
173
|
-
},
|
|
174
|
-
];
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
export function convertToOllamaMessages(
|
|
178
|
-
messages: BaseMessage[]
|
|
179
|
-
): OllamaMessage[] {
|
|
180
|
-
return messages.flatMap((msg) => {
|
|
181
|
-
if (['human', 'generic'].includes(msg._getType())) {
|
|
182
|
-
return convertHumanGenericMessagesToOllama(msg);
|
|
183
|
-
} else if (msg._getType() === 'ai') {
|
|
184
|
-
return convertAMessagesToOllama(msg);
|
|
185
|
-
} else if (msg._getType() === 'system') {
|
|
186
|
-
return convertSystemMessageToOllama(msg);
|
|
187
|
-
} else if (msg._getType() === 'tool') {
|
|
188
|
-
return convertToolMessageToOllama(msg as ToolMessage);
|
|
189
|
-
} else {
|
|
190
|
-
throw new Error(`Unsupported message type: ${msg._getType()}`);
|
|
191
|
-
}
|
|
192
|
-
});
|
|
193
|
-
}
|