@librechat/agents 2.1.8 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -66,6 +66,7 @@ exports.Providers = void 0;
66
66
  Providers["AZURE"] = "azureOpenAI";
67
67
  Providers["DEEPSEEK"] = "deepseek";
68
68
  Providers["OPENROUTER"] = "openrouter";
69
+ Providers["XAI"] = "xai";
69
70
  })(exports.Providers || (exports.Providers = {}));
70
71
  exports.GraphNodeKeys = void 0;
71
72
  (function (GraphNodeKeys) {
@@ -1 +1 @@
1
- {"version":3,"file":"enum.cjs","sources":["../../../src/common/enum.ts"],"sourcesContent":["/**\n * Enum representing the various event types emitted during the execution of runnables.\n * These events provide real-time information about the progress and state of different components.\n *\n * @enum {string}\n */\nexport enum GraphEvents {\n /* Custom Events */\n\n /** [Custom] Delta event for run steps (message creation and tool calls) */\n ON_RUN_STEP = 'on_run_step',\n /** [Custom] Delta event for run steps (tool calls) */\n ON_RUN_STEP_DELTA = 'on_run_step_delta',\n /** [Custom] Completed event for run steps (tool calls) */\n ON_RUN_STEP_COMPLETED = 'on_run_step_completed',\n /** [Custom] Delta events for messages */\n ON_MESSAGE_DELTA = 'on_message_delta',\n /** [Custom] Reasoning Delta events for messages */\n ON_REASONING_DELTA = 'on_reasoning_delta',\n\n /* Official Events */\n\n /** Custom event, emitted by system */\n ON_CUSTOM_EVENT = 'on_custom_event',\n /** Emitted when a chat model starts processing. */\n CHAT_MODEL_START = 'on_chat_model_start',\n\n /** Emitted when a chat model streams a chunk of its response. */\n CHAT_MODEL_STREAM = 'on_chat_model_stream',\n\n /** Emitted when a chat model completes its processing. */\n CHAT_MODEL_END = 'on_chat_model_end',\n\n /** Emitted when a language model starts processing. */\n LLM_START = 'on_llm_start',\n\n /** Emitted when a language model streams a chunk of its response. */\n LLM_STREAM = 'on_llm_stream',\n\n /** Emitted when a language model completes its processing. */\n LLM_END = 'on_llm_end',\n\n /** Emitted when a chain starts processing. */\n CHAIN_START = 'on_chain_start',\n\n /** Emitted when a chain streams a chunk of its output. */\n CHAIN_STREAM = 'on_chain_stream',\n\n /** Emitted when a chain completes its processing. */\n CHAIN_END = 'on_chain_end',\n\n /** Emitted when a tool starts its operation. */\n TOOL_START = 'on_tool_start',\n\n /** Emitted when a tool completes its operation. */\n TOOL_END = 'on_tool_end',\n\n /** Emitted when a retriever starts its operation. */\n RETRIEVER_START = 'on_retriever_start',\n\n /** Emitted when a retriever completes its operation. */\n RETRIEVER_END = 'on_retriever_end',\n\n /** Emitted when a prompt starts processing. */\n PROMPT_START = 'on_prompt_start',\n\n /** Emitted when a prompt completes its processing. */\n PROMPT_END = 'on_prompt_end'\n}\n\nexport enum Providers {\n OPENAI = 'openAI',\n BEDROCK_LEGACY = 'bedrock_legacy',\n VERTEXAI = 'vertexai',\n BEDROCK = 'bedrock',\n ANTHROPIC = 'anthropic',\n MISTRALAI = 'mistralai',\n OLLAMA = 'ollama',\n GOOGLE = 'google',\n AZURE = 'azureOpenAI',\n DEEPSEEK = 'deepseek',\n OPENROUTER = 'openrouter',\n}\n\nexport enum GraphNodeKeys {\n TOOLS = 'tools',\n AGENT = 'agent',\n PRE_TOOLS = 'pre_tools',\n POST_TOOLS = 'post_tools',\n}\n\nexport enum GraphNodeActions {\n TOOL_NODE = 'tool_node',\n CALL_MODEL = 'call_model',\n ROUTE_MESSAGE = 'route_message',\n}\n\nexport enum CommonEvents {\n LANGGRAPH = 'LangGraph',\n}\n\nexport enum StepTypes {\n TOOL_CALLS = 'tool_calls',\n MESSAGE_CREATION = 'message_creation'\n}\n\nexport enum ContentTypes {\n TEXT = 'text',\n ERROR = 'error',\n THINK = 'think',\n TOOL_CALL = 'tool_call',\n IMAGE_URL = 'image_url',\n IMAGE_FILE = 'image_file',\n /** Anthropic */\n THINKING = 'thinking',\n /** Bedrock */\n REASONING_CONTENT = 'reasoning_content',\n}\n\nexport enum ToolCallTypes {\n FUNCTION = 'function',\n RETRIEVAL = 'retrieval',\n FILE_SEARCH = 'file_search',\n CODE_INTERPRETER = 'code_interpreter',\n /* Agents Tool Call */\n TOOL_CALL = 'tool_call',\n}\n\nexport enum Callback {\n TOOL_ERROR = 'handleToolError',\n TOOL_START = 'handleToolStart',\n TOOL_END = 'handleToolEnd',\n /*\n LLM_START = 'handleLLMStart',\n LLM_NEW_TOKEN = 'handleLLMNewToken',\n LLM_ERROR = 'handleLLMError',\n LLM_END = 'handleLLMEnd',\n CHAT_MODEL_START = 'handleChatModelStart',\n CHAIN_START = 'handleChainStart',\n CHAIN_ERROR = 'handleChainError',\n CHAIN_END = 'handleChainEnd',\n TEXT = 'handleText',\n AGENT_ACTION = 'handleAgentAction',\n AGENT_END = 'handleAgentEnd',\n RETRIEVER_START = 'handleRetrieverStart',\n RETRIEVER_END = 'handleRetrieverEnd',\n RETRIEVER_ERROR = 'handleRetrieverError',\n CUSTOM_EVENT = 'handleCustomEvent'\n */\n}\n\nexport enum Constants {\n OFFICIAL_CODE_BASEURL = 'https://api.librechat.ai/v1',\n EXECUTE_CODE = 'execute_code',\n CONTENT_AND_ARTIFACT = 'content_and_artifact',\n}\n\nexport enum EnvVar {\n CODE_API_KEY = 'LIBRECHAT_CODE_API_KEY',\n CODE_BASEURL = 'LIBRECHAT_CODE_BASEURL'\n}\n"],"names":["GraphEvents","Providers","GraphNodeKeys","GraphNodeActions","CommonEvents","StepTypes","ContentTypes","ToolCallTypes","Callback","Constants","EnvVar"],"mappings":";;AAAA;;;;;AAKG;AACSA;AAAZ,CAAA,UAAY,WAAW,EAAA;;;AAIrB,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;;AAE3B,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;;AAEvC,IAAA,WAAA,CAAA,uBAAA,CAAA,GAAA,uBAA+C;;AAE/C,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,WAAA,CAAA,oBAAA,CAAA,GAAA,oBAAyC;;;AAKzC,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,iBAAmC;;AAEnC,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,qBAAwC;;AAGxC,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,sBAA0C;;AAG1C,IAAA,WAAA,CAAA,gBAAA,CAAA,GAAA,mBAAoC;;AAGpC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,SAAA,CAAA,GAAA,YAAsB;;AAGtB,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,gBAA8B;;AAG9B,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,UAAA,CAAA,GAAA,aAAwB;;AAGxB,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,oBAAsC;;AAGtC,IAAA,WAAA,CAAA,eAAA,CAAA,GAAA,kBAAkC;;AAGlC,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;AAC9B,CAAC,EA9DWA,mBAAW,KAAXA,mBAAW,GA8DtB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,gBAAA,CAAA,GAAA,gBAAiC;AACjC,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,SAAA,CAAA,GAAA,SAAmB;AACnB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,OAAA,CAAA,GAAA,aAAqB;AACrB,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AAC3B,CAAC,EAZWA,iBAAS,KAATA,iBAAS,GAYpB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AAC3B,CAAC,EALWA,qBAAa,KAAbA,qBAAa,GAKxB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,gBAAgB,EAAA;AAC1B,IAAA,gBAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,gBAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,gBAAA,CAAA,eAAA,CAAA,GAAA,eAA+B;AACjC,CAAC,EAJWA,wBAAgB,KAAhBA,wBAAgB,GAI3B,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAFWA,oBAAY,KAAZA,oBAAY,GAEvB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;AACvC,CAAC,EAHWA,iBAAS,KAATA,iBAAS,GAGpB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,MAAA,CAAA,GAAA,MAAa;AACb,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;;AAEzB,IAAA,YAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;;AAErB,IAAA,YAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;AACzC,CAAC,EAXWA,oBAAY,KAAZA,oBAAY,GAWvB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;AAC3B,IAAA,aAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAPWA,qBAAa,KAAbA,qBAAa,GAOxB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,QAAQ,EAAA;AAClB,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,UAAA,CAAA,GAAA,eAA0B;AAC1B;;;;;;;;;;;;;;;;AAgBE;AACJ,CAAC,EArBWA,gBAAQ,KAARA,gBAAQ,GAqBnB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,uBAAA,CAAA,GAAA,6BAAqD;AACrD,IAAA,SAAA,CAAA,cAAA,CAAA,GAAA,cAA6B;AAC7B,IAAA,SAAA,CAAA,sBAAA,CAAA,GAAA,sBAA6C;AAC/C,CAAC,EAJWA,iBAAS,KAATA,iBAAS,GAIpB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,MAAM,EAAA;AAChB,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACvC,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACzC,CAAC,EAHWA,cAAM,KAANA,cAAM,GAGjB,EAAA,CAAA,CAAA;;"}
1
+ {"version":3,"file":"enum.cjs","sources":["../../../src/common/enum.ts"],"sourcesContent":["/**\n * Enum representing the various event types emitted during the execution of runnables.\n * These events provide real-time information about the progress and state of different components.\n *\n * @enum {string}\n */\nexport enum GraphEvents {\n /* Custom Events */\n\n /** [Custom] Delta event for run steps (message creation and tool calls) */\n ON_RUN_STEP = 'on_run_step',\n /** [Custom] Delta event for run steps (tool calls) */\n ON_RUN_STEP_DELTA = 'on_run_step_delta',\n /** [Custom] Completed event for run steps (tool calls) */\n ON_RUN_STEP_COMPLETED = 'on_run_step_completed',\n /** [Custom] Delta events for messages */\n ON_MESSAGE_DELTA = 'on_message_delta',\n /** [Custom] Reasoning Delta events for messages */\n ON_REASONING_DELTA = 'on_reasoning_delta',\n\n /* Official Events */\n\n /** Custom event, emitted by system */\n ON_CUSTOM_EVENT = 'on_custom_event',\n /** Emitted when a chat model starts processing. */\n CHAT_MODEL_START = 'on_chat_model_start',\n\n /** Emitted when a chat model streams a chunk of its response. */\n CHAT_MODEL_STREAM = 'on_chat_model_stream',\n\n /** Emitted when a chat model completes its processing. */\n CHAT_MODEL_END = 'on_chat_model_end',\n\n /** Emitted when a language model starts processing. */\n LLM_START = 'on_llm_start',\n\n /** Emitted when a language model streams a chunk of its response. */\n LLM_STREAM = 'on_llm_stream',\n\n /** Emitted when a language model completes its processing. */\n LLM_END = 'on_llm_end',\n\n /** Emitted when a chain starts processing. */\n CHAIN_START = 'on_chain_start',\n\n /** Emitted when a chain streams a chunk of its output. */\n CHAIN_STREAM = 'on_chain_stream',\n\n /** Emitted when a chain completes its processing. */\n CHAIN_END = 'on_chain_end',\n\n /** Emitted when a tool starts its operation. */\n TOOL_START = 'on_tool_start',\n\n /** Emitted when a tool completes its operation. */\n TOOL_END = 'on_tool_end',\n\n /** Emitted when a retriever starts its operation. */\n RETRIEVER_START = 'on_retriever_start',\n\n /** Emitted when a retriever completes its operation. */\n RETRIEVER_END = 'on_retriever_end',\n\n /** Emitted when a prompt starts processing. */\n PROMPT_START = 'on_prompt_start',\n\n /** Emitted when a prompt completes its processing. */\n PROMPT_END = 'on_prompt_end'\n}\n\nexport enum Providers {\n OPENAI = 'openAI',\n BEDROCK_LEGACY = 'bedrock_legacy',\n VERTEXAI = 'vertexai',\n BEDROCK = 'bedrock',\n ANTHROPIC = 'anthropic',\n MISTRALAI = 'mistralai',\n OLLAMA = 'ollama',\n GOOGLE = 'google',\n AZURE = 'azureOpenAI',\n DEEPSEEK = 'deepseek',\n OPENROUTER = 'openrouter',\n XAI = 'xai',\n}\n\nexport enum GraphNodeKeys {\n TOOLS = 'tools',\n AGENT = 'agent',\n PRE_TOOLS = 'pre_tools',\n POST_TOOLS = 'post_tools',\n}\n\nexport enum GraphNodeActions {\n TOOL_NODE = 'tool_node',\n CALL_MODEL = 'call_model',\n ROUTE_MESSAGE = 'route_message',\n}\n\nexport enum CommonEvents {\n LANGGRAPH = 'LangGraph',\n}\n\nexport enum StepTypes {\n TOOL_CALLS = 'tool_calls',\n MESSAGE_CREATION = 'message_creation'\n}\n\nexport enum ContentTypes {\n TEXT = 'text',\n ERROR = 'error',\n THINK = 'think',\n TOOL_CALL = 'tool_call',\n IMAGE_URL = 'image_url',\n IMAGE_FILE = 'image_file',\n /** Anthropic */\n THINKING = 'thinking',\n /** Bedrock */\n REASONING_CONTENT = 'reasoning_content',\n}\n\nexport enum ToolCallTypes {\n FUNCTION = 'function',\n RETRIEVAL = 'retrieval',\n FILE_SEARCH = 'file_search',\n CODE_INTERPRETER = 'code_interpreter',\n /* Agents Tool Call */\n TOOL_CALL = 'tool_call',\n}\n\nexport enum Callback {\n TOOL_ERROR = 'handleToolError',\n TOOL_START = 'handleToolStart',\n TOOL_END = 'handleToolEnd',\n /*\n LLM_START = 'handleLLMStart',\n LLM_NEW_TOKEN = 'handleLLMNewToken',\n LLM_ERROR = 'handleLLMError',\n LLM_END = 'handleLLMEnd',\n CHAT_MODEL_START = 'handleChatModelStart',\n CHAIN_START = 'handleChainStart',\n CHAIN_ERROR = 'handleChainError',\n CHAIN_END = 'handleChainEnd',\n TEXT = 'handleText',\n AGENT_ACTION = 'handleAgentAction',\n AGENT_END = 'handleAgentEnd',\n RETRIEVER_START = 'handleRetrieverStart',\n RETRIEVER_END = 'handleRetrieverEnd',\n RETRIEVER_ERROR = 'handleRetrieverError',\n CUSTOM_EVENT = 'handleCustomEvent'\n */\n}\n\nexport enum Constants {\n OFFICIAL_CODE_BASEURL = 'https://api.librechat.ai/v1',\n EXECUTE_CODE = 'execute_code',\n CONTENT_AND_ARTIFACT = 'content_and_artifact',\n}\n\nexport enum EnvVar {\n CODE_API_KEY = 'LIBRECHAT_CODE_API_KEY',\n CODE_BASEURL = 'LIBRECHAT_CODE_BASEURL'\n}\n"],"names":["GraphEvents","Providers","GraphNodeKeys","GraphNodeActions","CommonEvents","StepTypes","ContentTypes","ToolCallTypes","Callback","Constants","EnvVar"],"mappings":";;AAAA;;;;;AAKG;AACSA;AAAZ,CAAA,UAAY,WAAW,EAAA;;;AAIrB,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;;AAE3B,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;;AAEvC,IAAA,WAAA,CAAA,uBAAA,CAAA,GAAA,uBAA+C;;AAE/C,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,WAAA,CAAA,oBAAA,CAAA,GAAA,oBAAyC;;;AAKzC,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,iBAAmC;;AAEnC,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,qBAAwC;;AAGxC,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,sBAA0C;;AAG1C,IAAA,WAAA,CAAA,gBAAA,CAAA,GAAA,mBAAoC;;AAGpC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,SAAA,CAAA,GAAA,YAAsB;;AAGtB,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,gBAA8B;;AAG9B,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,UAAA,CAAA,GAAA,aAAwB;;AAGxB,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,oBAAsC;;AAGtC,IAAA,WAAA,CAAA,eAAA,CAAA,GAAA,kBAAkC;;AAGlC,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;AAC9B,CAAC,EA9DWA,mBAAW,KAAXA,mBAAW,GA8DtB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,gBAAA,CAAA,GAAA,gBAAiC;AACjC,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,SAAA,CAAA,GAAA,SAAmB;AACnB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,OAAA,CAAA,GAAA,aAAqB;AACrB,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,KAAA,CAAA,GAAA,KAAW;AACb,CAAC,EAbWA,iBAAS,KAATA,iBAAS,GAapB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AAC3B,CAAC,EALWA,qBAAa,KAAbA,qBAAa,GAKxB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,gBAAgB,EAAA;AAC1B,IAAA,gBAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,gBAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,gBAAA,CAAA,eAAA,CAAA,GAAA,eAA+B;AACjC,CAAC,EAJWA,wBAAgB,KAAhBA,wBAAgB,GAI3B,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAFWA,oBAAY,KAAZA,oBAAY,GAEvB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;AACvC,CAAC,EAHWA,iBAAS,KAATA,iBAAS,GAGpB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,MAAA,CAAA,GAAA,MAAa;AACb,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;;AAEzB,IAAA,YAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;;AAErB,IAAA,YAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;AACzC,CAAC,EAXWA,oBAAY,KAAZA,oBAAY,GAWvB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;AAC3B,IAAA,aAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAPWA,qBAAa,KAAbA,qBAAa,GAOxB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,QAAQ,EAAA;AAClB,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,UAAA,CAAA,GAAA,eAA0B;AAC1B;;;;;;;;;;;;;;;;AAgBE;AACJ,CAAC,EArBWA,gBAAQ,KAARA,gBAAQ,GAqBnB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,uBAAA,CAAA,GAAA,6BAAqD;AACrD,IAAA,SAAA,CAAA,cAAA,CAAA,GAAA,cAA6B;AAC7B,IAAA,SAAA,CAAA,sBAAA,CAAA,GAAA,sBAA6C;AAC/C,CAAC,EAJWA,iBAAS,KAATA,iBAAS,GAIpB,EAAA,CAAA,CAAA;AAEWC;AAAZ,CAAA,UAAY,MAAM,EAAA;AAChB,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACvC,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACzC,CAAC,EAHWA,cAAM,KAANA,cAAM,GAGjB,EAAA,CAAA,CAAA;;"}
@@ -1,5 +1,6 @@
1
1
  'use strict';
2
2
 
3
+ var xai = require('@langchain/xai');
3
4
  var ollama = require('@langchain/ollama');
4
5
  var deepseek = require('@langchain/deepseek');
5
6
  var mistralai = require('@langchain/mistralai');
@@ -14,6 +15,7 @@ var _enum = require('../common/enum.cjs');
14
15
 
15
16
  // src/llm/providers.ts
16
17
  const llmProviders = {
18
+ [_enum.Providers.XAI]: xai.ChatXAI,
17
19
  [_enum.Providers.OPENAI]: openai.ChatOpenAI,
18
20
  [_enum.Providers.OLLAMA]: ollama.ChatOllama,
19
21
  [_enum.Providers.AZURE]: openai.AzureChatOpenAI,
@@ -1 +1 @@
1
- {"version":3,"file":"providers.cjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatDeepSeek } from '@langchain/deepseek';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatBedrockConverse } from '@langchain/aws';\n// import { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { CustomAnthropic } from '@/llm/anthropic/llm';\nimport { ChatOpenRouter } from '@/llm/openrouter/llm';\nimport { Providers } from '@/common';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.AZURE]: AzureChatOpenAI,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.DEEPSEEK]: ChatDeepSeek,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.OPENROUTER]: ChatOpenRouter,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: ChatAnthropic,\n [Providers.GOOGLE]: ChatGoogleGenerativeAI,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":["Providers","ChatOpenAI","ChatOllama","AzureChatOpenAI","ChatVertexAI","ChatDeepSeek","ChatMistralAI","CustomAnthropic","ChatOpenRouter","BedrockChat","ChatBedrockConverse","ChatGoogleGenerativeAI"],"mappings":";;;;;;;;;;;;;;AAAA;AAea,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAACA,eAAS,CAAC,MAAM,GAAGC,iBAAU;AAC9B,IAAA,CAACD,eAAS,CAAC,MAAM,GAAGE,iBAAU;AAC9B,IAAA,CAACF,eAAS,CAAC,KAAK,GAAGG,sBAAe;AAClC,IAAA,CAACH,eAAS,CAAC,QAAQ,GAAGI,2BAAY;AAClC,IAAA,CAACJ,eAAS,CAAC,QAAQ,GAAGK,qBAAY;AAClC,IAAA,CAACL,eAAS,CAAC,SAAS,GAAGM,uBAAa;AACpC,IAAA,CAACN,eAAS,CAAC,SAAS,GAAGO,qBAAe;AACtC,IAAA,CAACP,eAAS,CAAC,UAAU,GAAGQ,kBAAc;AACtC,IAAA,CAACR,eAAS,CAAC,cAAc,GAAGS,eAAW;AACvC,IAAA,CAACT,eAAS,CAAC,OAAO,GAAGU,uBAAmB;;AAExC,IAAA,CAACV,eAAS,CAAC,MAAM,GAAGW,kCAAsB;;MAG/B,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAACX,eAAS,CAAC,SAAS,EAAEA,eAAS,CAAC,OAAO,EAAEA,eAAS,CAAC,MAAM,CAAC;AAElH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC;IAC7C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC;;AAG1D,IAAA,OAAO,cAAc;AACvB;;;;;;"}
1
+ {"version":3,"file":"providers.cjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatXAI } from '@langchain/xai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatDeepSeek } from '@langchain/deepseek';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatBedrockConverse } from '@langchain/aws';\n// import { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { CustomAnthropic } from '@/llm/anthropic/llm';\nimport { ChatOpenRouter } from '@/llm/openrouter/llm';\nimport { Providers } from '@/common';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.XAI]: ChatXAI,\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.AZURE]: AzureChatOpenAI,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.DEEPSEEK]: ChatDeepSeek,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.OPENROUTER]: ChatOpenRouter,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: ChatAnthropic,\n [Providers.GOOGLE]: ChatGoogleGenerativeAI,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":["Providers","ChatXAI","ChatOpenAI","ChatOllama","AzureChatOpenAI","ChatVertexAI","ChatDeepSeek","ChatMistralAI","CustomAnthropic","ChatOpenRouter","BedrockChat","ChatBedrockConverse","ChatGoogleGenerativeAI"],"mappings":";;;;;;;;;;;;;;;AAAA;AAgBa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAACA,eAAS,CAAC,GAAG,GAAGC,WAAO;AACxB,IAAA,CAACD,eAAS,CAAC,MAAM,GAAGE,iBAAU;AAC9B,IAAA,CAACF,eAAS,CAAC,MAAM,GAAGG,iBAAU;AAC9B,IAAA,CAACH,eAAS,CAAC,KAAK,GAAGI,sBAAe;AAClC,IAAA,CAACJ,eAAS,CAAC,QAAQ,GAAGK,2BAAY;AAClC,IAAA,CAACL,eAAS,CAAC,QAAQ,GAAGM,qBAAY;AAClC,IAAA,CAACN,eAAS,CAAC,SAAS,GAAGO,uBAAa;AACpC,IAAA,CAACP,eAAS,CAAC,SAAS,GAAGQ,qBAAe;AACtC,IAAA,CAACR,eAAS,CAAC,UAAU,GAAGS,kBAAc;AACtC,IAAA,CAACT,eAAS,CAAC,cAAc,GAAGU,eAAW;AACvC,IAAA,CAACV,eAAS,CAAC,OAAO,GAAGW,uBAAmB;;AAExC,IAAA,CAACX,eAAS,CAAC,MAAM,GAAGY,kCAAsB;;MAG/B,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAACZ,eAAS,CAAC,SAAS,EAAEA,eAAS,CAAC,OAAO,EAAEA,eAAS,CAAC,MAAM,CAAC;AAElH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC;IAC7C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC;;AAG1D,IAAA,OAAO,cAAc;AACvB;;;;;;"}
package/dist/cjs/run.cjs CHANGED
@@ -131,7 +131,7 @@ class Run {
131
131
  const convo = (await convoTemplate.invoke({ input: inputText, output: response })).value;
132
132
  const model = this.Graph?.getNewModel({
133
133
  clientOptions,
134
- omitOriginalOptions: ['streaming', 'thinking', 'maxTokens', 'maxOutputTokens'],
134
+ omitOriginalOptions: ['streaming', 'stream', 'thinking', 'maxTokens', 'maxOutputTokens', 'additionalModelRequestFields'],
135
135
  });
136
136
  if (!model) {
137
137
  return { language: '', title: '' };
@@ -1 +1 @@
1
- {"version":3,"file":"run.cjs","sources":["../../src/run.ts"],"sourcesContent":["// src/run.ts\nimport { PromptTemplate } from '@langchain/core/prompts';\nimport { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';\nimport type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';\nimport type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type * as t from '@/types';\nimport { GraphEvents, Providers, Callback } from '@/common';\nimport { manualToolStreamProviders } from '@/llm/providers';\nimport { createTitleRunnable } from '@/utils/title';\nimport { StandardGraph } from '@/graphs/Graph';\nimport { HandlerRegistry } from '@/events';\nimport { isOpenAILike } from '@/utils/llm';\n\nexport class Run<T extends t.BaseGraphState> {\n graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;\n // private collab!: CollabGraph;\n // private taskManager!: TaskManager;\n private handlerRegistry: HandlerRegistry;\n id: string;\n Graph: StandardGraph | undefined;\n provider: Providers | undefined;\n returnContent: boolean = false;\n\n private constructor(config: Partial<t.RunConfig>) {\n const runId = config.runId ?? '';\n if (!runId) {\n throw new Error('Run ID not provided');\n }\n\n this.id = runId;\n\n const handlerRegistry = new HandlerRegistry();\n\n if (config.customHandlers) {\n for (const [eventType, handler] of Object.entries(config.customHandlers)) {\n handlerRegistry.register(eventType, handler);\n }\n }\n\n this.handlerRegistry = handlerRegistry;\n\n if (!config.graphConfig) {\n throw new Error('Graph config not provided');\n }\n\n if (config.graphConfig.type === 'standard' || !config.graphConfig.type) {\n this.provider = config.graphConfig.llmConfig.provider;\n this.graphRunnable = this.createStandardGraph(config.graphConfig) as unknown as t.CompiledWorkflow<T, Partial<T>, string>;\n if (this.Graph) {\n this.Graph.handlerRegistry = handlerRegistry;\n }\n }\n\n this.returnContent = config.returnContent ?? false;\n }\n\n private createStandardGraph(config: t.StandardGraphConfig): t.CompiledWorkflow<t.IState, Partial<t.IState>, string> {\n const { llmConfig, tools = [], ...graphInput } = config;\n const { provider, ...clientOptions } = llmConfig;\n\n const standardGraph = new StandardGraph({\n tools,\n provider,\n clientOptions,\n ...graphInput,\n runId: this.id,\n });\n this.Graph = standardGraph;\n return standardGraph.createWorkflow();\n }\n\n static async create<T extends t.BaseGraphState>(config: t.RunConfig): Promise<Run<T>> {\n return new Run<T>(config);\n }\n\n getRunMessages(): BaseMessage[] | undefined {\n if (!this.Graph) {\n throw new Error('Graph not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n return this.Graph.getRunMessages();\n }\n\n async processStream(\n inputs: t.IState,\n config: Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string },\n streamOptions?: t.EventStreamOptions,\n ): Promise<MessageContentComplex[] | undefined> {\n if (!this.graphRunnable) {\n throw new Error('Run not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n if (!this.Graph) {\n throw new Error('Graph not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n\n this.Graph.resetValues(streamOptions?.keepContent);\n const provider = this.Graph.provider;\n const hasTools = this.Graph.tools ? this.Graph.tools.length > 0 : false;\n if (streamOptions?.callbacks) {\n /* TODO: conflicts with callback manager */\n const callbacks = config.callbacks as t.ProvidedCallbacks ?? [];\n config.callbacks = callbacks.concat(this.getCallbacks(streamOptions.callbacks));\n }\n\n if (!this.id) {\n throw new Error('Run ID not provided');\n }\n\n config.run_id = this.id;\n config.configurable = Object.assign(config.configurable ?? {}, { run_id: this.id, provider: this.provider });\n\n const stream = this.graphRunnable.streamEvents(inputs, config);\n\n for await (const event of stream) {\n const { data, name, metadata, ...info } = event;\n\n let eventName: t.EventName = info.event;\n if (hasTools && manualToolStreamProviders.has(provider) && eventName === GraphEvents.CHAT_MODEL_STREAM) {\n /* Skipping CHAT_MODEL_STREAM event due to double-call edge case */\n continue;\n }\n\n if (eventName && eventName === GraphEvents.ON_CUSTOM_EVENT) {\n eventName = name;\n }\n\n const handler = this.handlerRegistry.getHandler(eventName);\n if (handler) {\n handler.handle(eventName, data, metadata, this.Graph);\n }\n }\n\n if (this.returnContent) {\n return this.Graph.getContentParts();\n }\n }\n\n private createSystemCallback<K extends keyof ClientCallbacks>(\n clientCallbacks: ClientCallbacks,\n key: K\n ): SystemCallbacks[K] {\n return ((...args: unknown[]) => {\n const clientCallback = clientCallbacks[key];\n if (clientCallback && this.Graph) {\n (clientCallback as (...args: unknown[]) => void)(this.Graph, ...args);\n }\n }) as SystemCallbacks[K];\n }\n\n getCallbacks(clientCallbacks: ClientCallbacks): SystemCallbacks {\n return {\n [Callback.TOOL_ERROR]: this.createSystemCallback(clientCallbacks, Callback.TOOL_ERROR),\n [Callback.TOOL_START]: this.createSystemCallback(clientCallbacks, Callback.TOOL_START),\n [Callback.TOOL_END]: this.createSystemCallback(clientCallbacks, Callback.TOOL_END),\n };\n }\n\n async generateTitle({\n inputText,\n contentParts,\n titlePrompt,\n clientOptions,\n chainOptions,\n skipLanguage,\n } : {\n inputText: string;\n contentParts: (t.MessageContentComplex | undefined)[];\n titlePrompt?: string;\n skipLanguage?: boolean;\n clientOptions?: t.ClientOptions;\n chainOptions?: Partial<RunnableConfig> | undefined;\n }): Promise<{ language: string; title: string }> {\n const convoTemplate = PromptTemplate.fromTemplate('User: {input}\\nAI: {output}');\n const response = contentParts.map((part) => {\n if (part?.type === 'text') return part.text;\n return '';\n }).join('\\n');\n const convo = (await convoTemplate.invoke({ input: inputText, output: response })).value;\n const model = this.Graph?.getNewModel({\n clientOptions,\n omitOriginalOptions: ['streaming', 'thinking', 'maxTokens', 'maxOutputTokens'],\n });\n if (!model) {\n return { language: '', title: '' };\n }\n if (isOpenAILike(this.provider) && (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)) {\n model.temperature = (clientOptions as t.OpenAIClientOptions | undefined)?.temperature as number;\n model.topP = (clientOptions as t.OpenAIClientOptions | undefined)?.topP as number;\n model.frequencyPenalty = (clientOptions as t.OpenAIClientOptions | undefined)?.frequencyPenalty as number;\n model.presencePenalty = (clientOptions as t.OpenAIClientOptions | undefined)?.presencePenalty as number;\n model.n = (clientOptions as t.OpenAIClientOptions | undefined)?.n as number;\n }\n const chain = await createTitleRunnable(model, titlePrompt);\n return await chain.invoke({ convo, inputText, skipLanguage }, chainOptions) as { language: string; title: string };\n }\n}\n"],"names":["HandlerRegistry","StandardGraph","manualToolStreamProviders","GraphEvents","Callback","PromptTemplate","isOpenAILike","ChatOpenAI","AzureChatOpenAI","createTitleRunnable"],"mappings":";;;;;;;;;;;AAAA;MAca,GAAG,CAAA;AACd,IAAA,aAAa;;;AAGL,IAAA,eAAe;AACvB,IAAA,EAAE;AACF,IAAA,KAAK;AACL,IAAA,QAAQ;IACR,aAAa,GAAY,KAAK;AAE9B,IAAA,WAAA,CAAoB,MAA4B,EAAA;AAC9C,QAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,EAAE;QAChC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,IAAI,CAAC,EAAE,GAAG,KAAK;AAEf,QAAA,MAAM,eAAe,GAAG,IAAIA,sBAAe,EAAE;AAE7C,QAAA,IAAI,MAAM,CAAC,cAAc,EAAE;AACzB,YAAA,KAAK,MAAM,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,EAAE;AACxE,gBAAA,eAAe,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC;;;AAIhD,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;AAEtC,QAAA,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAG9C,QAAA,IAAI,MAAM,CAAC,WAAW,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;YACtE,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,QAAQ;YACrD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,mBAAmB,CAAC,MAAM,CAAC,WAAW,CAAyD;AACzH,YAAA,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,IAAI,CAAC,KAAK,CAAC,eAAe,GAAG,eAAe;;;QAIhD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,KAAK;;AAG5C,IAAA,mBAAmB,CAAC,MAA6B,EAAA;AACvD,QAAA,MAAM,EAAE,SAAS,EAAE,KAAK,GAAG,EAAE,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM;QACvD,MAAM,EAAE,QAAQ,EAAE,GAAG,aAAa,EAAE,GAAG,SAAS;AAEhD,QAAA,MAAM,aAAa,GAAG,IAAIC,mBAAa,CAAC;YACtC,KAAK;YACL,QAAQ;YACR,aAAa;AACb,YAAA,GAAG,UAAU;YACb,KAAK,EAAE,IAAI,CAAC,EAAE;AACf,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,KAAK,GAAG,aAAa;AAC1B,QAAA,OAAO,aAAa,CAAC,cAAc,EAAE;;AAGvC,IAAA,aAAa,MAAM,CAA6B,MAAmB,EAAA;AACjE,QAAA,OAAO,IAAI,GAAG,CAAI,MAAM,CAAC;;IAG3B,cAAc,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CAAC,8EAA8E,CAAC;;AAEjG,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;;AAGpC,IAAA,MAAM,aAAa,CACjB,MAAgB,EAChB,MAA2E,EAC3E,aAAoC,EAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,4EAA4E,CAAC;;AAE/F,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CAAC,8EAA8E,CAAC;;QAGjG,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC;AAClD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG,KAAK;AACvE,QAAA,IAAI,aAAa,EAAE,SAAS,EAAE;;AAE5B,YAAA,MAAM,SAAS,GAAG,MAAM,CAAC,SAAgC,IAAI,EAAE;AAC/D,YAAA,MAAM,CAAC,SAAS,GAAG,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;;AAGjF,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;QACvB,MAAM,CAAC,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,EAAE,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAC;AAE5G,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,MAAM,EAAE,MAAM,CAAC;AAE9D,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;AAChC,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK;AAE/C,YAAA,IAAI,SAAS,GAAgB,IAAI,CAAC,KAAK;AACvC,YAAA,IAAI,QAAQ,IAAIC,mCAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,SAAS,KAAKC,iBAAW,CAAC,iBAAiB,EAAE;;gBAEtG;;YAGF,IAAI,SAAS,IAAI,SAAS,KAAKA,iBAAW,CAAC,eAAe,EAAE;gBAC1D,SAAS,GAAG,IAAI;;YAGlB,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,SAAS,CAAC;YAC1D,IAAI,OAAO,EAAE;AACX,gBAAA,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;;;AAIzD,QAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;;;IAI/B,oBAAoB,CAC1B,eAAgC,EAChC,GAAM,EAAA;AAEN,QAAA,QAAQ,CAAC,GAAG,IAAe,KAAI;AAC7B,YAAA,MAAM,cAAc,GAAG,eAAe,CAAC,GAAG,CAAC;AAC3C,YAAA,IAAI,cAAc,IAAI,IAAI,CAAC,KAAK,EAAE;gBAC/B,cAA+C,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,IAAI,CAAC;;AAEzE,SAAC;;AAGH,IAAA,YAAY,CAAC,eAAgC,EAAA;QAC3C,OAAO;AACL,YAAA,CAACC,cAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAEA,cAAQ,CAAC,UAAU,CAAC;AACtF,YAAA,CAACA,cAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAEA,cAAQ,CAAC,UAAU,CAAC;AACtF,YAAA,CAACA,cAAQ,CAAC,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAEA,cAAQ,CAAC,QAAQ,CAAC;SACnF;;AAGH,IAAA,MAAM,aAAa,CAAC,EAClB,SAAS,EACT,YAAY,EACZ,WAAW,EACX,aAAa,EACb,YAAY,EACZ,YAAY,GAQb,EAAA;QACC,MAAM,aAAa,GAAGC,sBAAc,CAAC,YAAY,CAAC,6BAA6B,CAAC;QAChF,MAAM,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,KAAI;AACzC,YAAA,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM;gBAAE,OAAO,IAAI,CAAC,IAAI;AAC3C,YAAA,OAAO,EAAE;AACX,SAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;QACb,MAAM,KAAK,GAAG,CAAC,MAAM,aAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,EAAE,KAAK;AACxF,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC;YACpC,aAAa;YACb,mBAAmB,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,WAAW,EAAE,iBAAiB,CAAC;AAC/E,SAAA,CAAC;QACF,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;;AAEpC,QAAA,IAAIC,gBAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,YAAYC,iBAAU,IAAI,KAAK,YAAYC,sBAAe,CAAC,EAAE;AACpG,YAAA,KAAK,CAAC,WAAW,GAAI,aAAmD,EAAE,WAAqB;AAC/F,YAAA,KAAK,CAAC,IAAI,GAAI,aAAmD,EAAE,IAAc;AACjF,YAAA,KAAK,CAAC,gBAAgB,GAAI,aAAmD,EAAE,gBAA0B;AACzG,YAAA,KAAK,CAAC,eAAe,GAAI,aAAmD,EAAE,eAAyB;AACvG,YAAA,KAAK,CAAC,CAAC,GAAI,aAAmD,EAAE,CAAW;;QAE7E,MAAM,KAAK,GAAG,MAAMC,yBAAmB,CAAC,KAAK,EAAE,WAAW,CAAC;AAC3D,QAAA,OAAO,MAAM,KAAK,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,YAAY,EAAE,EAAE,YAAY,CAAwC;;AAErH;;;;"}
1
+ {"version":3,"file":"run.cjs","sources":["../../src/run.ts"],"sourcesContent":["// src/run.ts\nimport { PromptTemplate } from '@langchain/core/prompts';\nimport { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';\nimport type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';\nimport type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type * as t from '@/types';\nimport { GraphEvents, Providers, Callback } from '@/common';\nimport { manualToolStreamProviders } from '@/llm/providers';\nimport { createTitleRunnable } from '@/utils/title';\nimport { StandardGraph } from '@/graphs/Graph';\nimport { HandlerRegistry } from '@/events';\nimport { isOpenAILike } from '@/utils/llm';\n\nexport class Run<T extends t.BaseGraphState> {\n graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;\n // private collab!: CollabGraph;\n // private taskManager!: TaskManager;\n private handlerRegistry: HandlerRegistry;\n id: string;\n Graph: StandardGraph | undefined;\n provider: Providers | undefined;\n returnContent: boolean = false;\n\n private constructor(config: Partial<t.RunConfig>) {\n const runId = config.runId ?? '';\n if (!runId) {\n throw new Error('Run ID not provided');\n }\n\n this.id = runId;\n\n const handlerRegistry = new HandlerRegistry();\n\n if (config.customHandlers) {\n for (const [eventType, handler] of Object.entries(config.customHandlers)) {\n handlerRegistry.register(eventType, handler);\n }\n }\n\n this.handlerRegistry = handlerRegistry;\n\n if (!config.graphConfig) {\n throw new Error('Graph config not provided');\n }\n\n if (config.graphConfig.type === 'standard' || !config.graphConfig.type) {\n this.provider = config.graphConfig.llmConfig.provider;\n this.graphRunnable = this.createStandardGraph(config.graphConfig) as unknown as t.CompiledWorkflow<T, Partial<T>, string>;\n if (this.Graph) {\n this.Graph.handlerRegistry = handlerRegistry;\n }\n }\n\n this.returnContent = config.returnContent ?? false;\n }\n\n private createStandardGraph(config: t.StandardGraphConfig): t.CompiledWorkflow<t.IState, Partial<t.IState>, string> {\n const { llmConfig, tools = [], ...graphInput } = config;\n const { provider, ...clientOptions } = llmConfig;\n\n const standardGraph = new StandardGraph({\n tools,\n provider,\n clientOptions,\n ...graphInput,\n runId: this.id,\n });\n this.Graph = standardGraph;\n return standardGraph.createWorkflow();\n }\n\n static async create<T extends t.BaseGraphState>(config: t.RunConfig): Promise<Run<T>> {\n return new Run<T>(config);\n }\n\n getRunMessages(): BaseMessage[] | undefined {\n if (!this.Graph) {\n throw new Error('Graph not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n return this.Graph.getRunMessages();\n }\n\n async processStream(\n inputs: t.IState,\n config: Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string },\n streamOptions?: t.EventStreamOptions,\n ): Promise<MessageContentComplex[] | undefined> {\n if (!this.graphRunnable) {\n throw new Error('Run not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n if (!this.Graph) {\n throw new Error('Graph not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n\n this.Graph.resetValues(streamOptions?.keepContent);\n const provider = this.Graph.provider;\n const hasTools = this.Graph.tools ? this.Graph.tools.length > 0 : false;\n if (streamOptions?.callbacks) {\n /* TODO: conflicts with callback manager */\n const callbacks = config.callbacks as t.ProvidedCallbacks ?? [];\n config.callbacks = callbacks.concat(this.getCallbacks(streamOptions.callbacks));\n }\n\n if (!this.id) {\n throw new Error('Run ID not provided');\n }\n\n config.run_id = this.id;\n config.configurable = Object.assign(config.configurable ?? {}, { run_id: this.id, provider: this.provider });\n\n const stream = this.graphRunnable.streamEvents(inputs, config);\n\n for await (const event of stream) {\n const { data, name, metadata, ...info } = event;\n\n let eventName: t.EventName = info.event;\n if (hasTools && manualToolStreamProviders.has(provider) && eventName === GraphEvents.CHAT_MODEL_STREAM) {\n /* Skipping CHAT_MODEL_STREAM event due to double-call edge case */\n continue;\n }\n\n if (eventName && eventName === GraphEvents.ON_CUSTOM_EVENT) {\n eventName = name;\n }\n\n const handler = this.handlerRegistry.getHandler(eventName);\n if (handler) {\n handler.handle(eventName, data, metadata, this.Graph);\n }\n }\n\n if (this.returnContent) {\n return this.Graph.getContentParts();\n }\n }\n\n private createSystemCallback<K extends keyof ClientCallbacks>(\n clientCallbacks: ClientCallbacks,\n key: K\n ): SystemCallbacks[K] {\n return ((...args: unknown[]) => {\n const clientCallback = clientCallbacks[key];\n if (clientCallback && this.Graph) {\n (clientCallback as (...args: unknown[]) => void)(this.Graph, ...args);\n }\n }) as SystemCallbacks[K];\n }\n\n getCallbacks(clientCallbacks: ClientCallbacks): SystemCallbacks {\n return {\n [Callback.TOOL_ERROR]: this.createSystemCallback(clientCallbacks, Callback.TOOL_ERROR),\n [Callback.TOOL_START]: this.createSystemCallback(clientCallbacks, Callback.TOOL_START),\n [Callback.TOOL_END]: this.createSystemCallback(clientCallbacks, Callback.TOOL_END),\n };\n }\n\n async generateTitle({\n inputText,\n contentParts,\n titlePrompt,\n clientOptions,\n chainOptions,\n skipLanguage,\n } : {\n inputText: string;\n contentParts: (t.MessageContentComplex | undefined)[];\n titlePrompt?: string;\n skipLanguage?: boolean;\n clientOptions?: t.ClientOptions;\n chainOptions?: Partial<RunnableConfig> | undefined;\n }): Promise<{ language: string; title: string }> {\n const convoTemplate = PromptTemplate.fromTemplate('User: {input}\\nAI: {output}');\n const response = contentParts.map((part) => {\n if (part?.type === 'text') return part.text;\n return '';\n }).join('\\n');\n const convo = (await convoTemplate.invoke({ input: inputText, output: response })).value;\n const model = this.Graph?.getNewModel({\n clientOptions,\n omitOriginalOptions: ['streaming', 'stream', 'thinking', 'maxTokens', 'maxOutputTokens', 'additionalModelRequestFields'],\n });\n if (!model) {\n return { language: '', title: '' };\n }\n if (isOpenAILike(this.provider) && (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)) {\n model.temperature = (clientOptions as t.OpenAIClientOptions | undefined)?.temperature as number;\n model.topP = (clientOptions as t.OpenAIClientOptions | undefined)?.topP as number;\n model.frequencyPenalty = (clientOptions as t.OpenAIClientOptions | undefined)?.frequencyPenalty as number;\n model.presencePenalty = (clientOptions as t.OpenAIClientOptions | undefined)?.presencePenalty as number;\n model.n = (clientOptions as t.OpenAIClientOptions | undefined)?.n as number;\n }\n const chain = await createTitleRunnable(model, titlePrompt);\n return await chain.invoke({ convo, inputText, skipLanguage }, chainOptions) as { language: string; title: string };\n }\n}\n"],"names":["HandlerRegistry","StandardGraph","manualToolStreamProviders","GraphEvents","Callback","PromptTemplate","isOpenAILike","ChatOpenAI","AzureChatOpenAI","createTitleRunnable"],"mappings":";;;;;;;;;;;AAAA;MAca,GAAG,CAAA;AACd,IAAA,aAAa;;;AAGL,IAAA,eAAe;AACvB,IAAA,EAAE;AACF,IAAA,KAAK;AACL,IAAA,QAAQ;IACR,aAAa,GAAY,KAAK;AAE9B,IAAA,WAAA,CAAoB,MAA4B,EAAA;AAC9C,QAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,EAAE;QAChC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,IAAI,CAAC,EAAE,GAAG,KAAK;AAEf,QAAA,MAAM,eAAe,GAAG,IAAIA,sBAAe,EAAE;AAE7C,QAAA,IAAI,MAAM,CAAC,cAAc,EAAE;AACzB,YAAA,KAAK,MAAM,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,EAAE;AACxE,gBAAA,eAAe,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC;;;AAIhD,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;AAEtC,QAAA,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAG9C,QAAA,IAAI,MAAM,CAAC,WAAW,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;YACtE,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,QAAQ;YACrD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,mBAAmB,CAAC,MAAM,CAAC,WAAW,CAAyD;AACzH,YAAA,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,IAAI,CAAC,KAAK,CAAC,eAAe,GAAG,eAAe;;;QAIhD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,KAAK;;AAG5C,IAAA,mBAAmB,CAAC,MAA6B,EAAA;AACvD,QAAA,MAAM,EAAE,SAAS,EAAE,KAAK,GAAG,EAAE,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM;QACvD,MAAM,EAAE,QAAQ,EAAE,GAAG,aAAa,EAAE,GAAG,SAAS;AAEhD,QAAA,MAAM,aAAa,GAAG,IAAIC,mBAAa,CAAC;YACtC,KAAK;YACL,QAAQ;YACR,aAAa;AACb,YAAA,GAAG,UAAU;YACb,KAAK,EAAE,IAAI,CAAC,EAAE;AACf,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,KAAK,GAAG,aAAa;AAC1B,QAAA,OAAO,aAAa,CAAC,cAAc,EAAE;;AAGvC,IAAA,aAAa,MAAM,CAA6B,MAAmB,EAAA;AACjE,QAAA,OAAO,IAAI,GAAG,CAAI,MAAM,CAAC;;IAG3B,cAAc,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CAAC,8EAA8E,CAAC;;AAEjG,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;;AAGpC,IAAA,MAAM,aAAa,CACjB,MAAgB,EAChB,MAA2E,EAC3E,aAAoC,EAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,4EAA4E,CAAC;;AAE/F,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CAAC,8EAA8E,CAAC;;QAGjG,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC;AAClD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG,KAAK;AACvE,QAAA,IAAI,aAAa,EAAE,SAAS,EAAE;;AAE5B,YAAA,MAAM,SAAS,GAAG,MAAM,CAAC,SAAgC,IAAI,EAAE;AAC/D,YAAA,MAAM,CAAC,SAAS,GAAG,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;;AAGjF,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;QACvB,MAAM,CAAC,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,EAAE,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAC;AAE5G,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,MAAM,EAAE,MAAM,CAAC;AAE9D,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;AAChC,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK;AAE/C,YAAA,IAAI,SAAS,GAAgB,IAAI,CAAC,KAAK;AACvC,YAAA,IAAI,QAAQ,IAAIC,mCAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,SAAS,KAAKC,iBAAW,CAAC,iBAAiB,EAAE;;gBAEtG;;YAGF,IAAI,SAAS,IAAI,SAAS,KAAKA,iBAAW,CAAC,eAAe,EAAE;gBAC1D,SAAS,GAAG,IAAI;;YAGlB,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,SAAS,CAAC;YAC1D,IAAI,OAAO,EAAE;AACX,gBAAA,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;;;AAIzD,QAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;;;IAI/B,oBAAoB,CAC1B,eAAgC,EAChC,GAAM,EAAA;AAEN,QAAA,QAAQ,CAAC,GAAG,IAAe,KAAI;AAC7B,YAAA,MAAM,cAAc,GAAG,eAAe,CAAC,GAAG,CAAC;AAC3C,YAAA,IAAI,cAAc,IAAI,IAAI,CAAC,KAAK,EAAE;gBAC/B,cAA+C,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,IAAI,CAAC;;AAEzE,SAAC;;AAGH,IAAA,YAAY,CAAC,eAAgC,EAAA;QAC3C,OAAO;AACL,YAAA,CAACC,cAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAEA,cAAQ,CAAC,UAAU,CAAC;AACtF,YAAA,CAACA,cAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAEA,cAAQ,CAAC,UAAU,CAAC;AACtF,YAAA,CAACA,cAAQ,CAAC,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAEA,cAAQ,CAAC,QAAQ,CAAC;SACnF;;AAGH,IAAA,MAAM,aAAa,CAAC,EAClB,SAAS,EACT,YAAY,EACZ,WAAW,EACX,aAAa,EACb,YAAY,EACZ,YAAY,GAQb,EAAA;QACC,MAAM,aAAa,GAAGC,sBAAc,CAAC,YAAY,CAAC,6BAA6B,CAAC;QAChF,MAAM,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,KAAI;AACzC,YAAA,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM;gBAAE,OAAO,IAAI,CAAC,IAAI;AAC3C,YAAA,OAAO,EAAE;AACX,SAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;QACb,MAAM,KAAK,GAAG,CAAC,MAAM,aAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,EAAE,KAAK;AACxF,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC;YACpC,aAAa;AACb,YAAA,mBAAmB,EAAE,CAAC,WAAW,EAAE,QAAQ,EAAE,UAAU,EAAE,WAAW,EAAE,iBAAiB,EAAE,8BAA8B,CAAC;AACzH,SAAA,CAAC;QACF,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;;AAEpC,QAAA,IAAIC,gBAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,YAAYC,iBAAU,IAAI,KAAK,YAAYC,sBAAe,CAAC,EAAE;AACpG,YAAA,KAAK,CAAC,WAAW,GAAI,aAAmD,EAAE,WAAqB;AAC/F,YAAA,KAAK,CAAC,IAAI,GAAI,aAAmD,EAAE,IAAc;AACjF,YAAA,KAAK,CAAC,gBAAgB,GAAI,aAAmD,EAAE,gBAA0B;AACzG,YAAA,KAAK,CAAC,eAAe,GAAI,aAAmD,EAAE,eAAyB;AACvG,YAAA,KAAK,CAAC,CAAC,GAAI,aAAmD,EAAE,CAAW;;QAE7E,MAAM,KAAK,GAAG,MAAMC,yBAAmB,CAAC,KAAK,EAAE,WAAW,CAAC;AAC3D,QAAA,OAAO,MAAM,KAAK,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,YAAY,EAAE,EAAE,YAAY,CAAwC;;AAErH;;;;"}
@@ -64,6 +64,7 @@ var Providers;
64
64
  Providers["AZURE"] = "azureOpenAI";
65
65
  Providers["DEEPSEEK"] = "deepseek";
66
66
  Providers["OPENROUTER"] = "openrouter";
67
+ Providers["XAI"] = "xai";
67
68
  })(Providers || (Providers = {}));
68
69
  var GraphNodeKeys;
69
70
  (function (GraphNodeKeys) {
@@ -1 +1 @@
1
- {"version":3,"file":"enum.mjs","sources":["../../../src/common/enum.ts"],"sourcesContent":["/**\n * Enum representing the various event types emitted during the execution of runnables.\n * These events provide real-time information about the progress and state of different components.\n *\n * @enum {string}\n */\nexport enum GraphEvents {\n /* Custom Events */\n\n /** [Custom] Delta event for run steps (message creation and tool calls) */\n ON_RUN_STEP = 'on_run_step',\n /** [Custom] Delta event for run steps (tool calls) */\n ON_RUN_STEP_DELTA = 'on_run_step_delta',\n /** [Custom] Completed event for run steps (tool calls) */\n ON_RUN_STEP_COMPLETED = 'on_run_step_completed',\n /** [Custom] Delta events for messages */\n ON_MESSAGE_DELTA = 'on_message_delta',\n /** [Custom] Reasoning Delta events for messages */\n ON_REASONING_DELTA = 'on_reasoning_delta',\n\n /* Official Events */\n\n /** Custom event, emitted by system */\n ON_CUSTOM_EVENT = 'on_custom_event',\n /** Emitted when a chat model starts processing. */\n CHAT_MODEL_START = 'on_chat_model_start',\n\n /** Emitted when a chat model streams a chunk of its response. */\n CHAT_MODEL_STREAM = 'on_chat_model_stream',\n\n /** Emitted when a chat model completes its processing. */\n CHAT_MODEL_END = 'on_chat_model_end',\n\n /** Emitted when a language model starts processing. */\n LLM_START = 'on_llm_start',\n\n /** Emitted when a language model streams a chunk of its response. */\n LLM_STREAM = 'on_llm_stream',\n\n /** Emitted when a language model completes its processing. */\n LLM_END = 'on_llm_end',\n\n /** Emitted when a chain starts processing. */\n CHAIN_START = 'on_chain_start',\n\n /** Emitted when a chain streams a chunk of its output. */\n CHAIN_STREAM = 'on_chain_stream',\n\n /** Emitted when a chain completes its processing. */\n CHAIN_END = 'on_chain_end',\n\n /** Emitted when a tool starts its operation. */\n TOOL_START = 'on_tool_start',\n\n /** Emitted when a tool completes its operation. */\n TOOL_END = 'on_tool_end',\n\n /** Emitted when a retriever starts its operation. */\n RETRIEVER_START = 'on_retriever_start',\n\n /** Emitted when a retriever completes its operation. */\n RETRIEVER_END = 'on_retriever_end',\n\n /** Emitted when a prompt starts processing. */\n PROMPT_START = 'on_prompt_start',\n\n /** Emitted when a prompt completes its processing. */\n PROMPT_END = 'on_prompt_end'\n}\n\nexport enum Providers {\n OPENAI = 'openAI',\n BEDROCK_LEGACY = 'bedrock_legacy',\n VERTEXAI = 'vertexai',\n BEDROCK = 'bedrock',\n ANTHROPIC = 'anthropic',\n MISTRALAI = 'mistralai',\n OLLAMA = 'ollama',\n GOOGLE = 'google',\n AZURE = 'azureOpenAI',\n DEEPSEEK = 'deepseek',\n OPENROUTER = 'openrouter',\n}\n\nexport enum GraphNodeKeys {\n TOOLS = 'tools',\n AGENT = 'agent',\n PRE_TOOLS = 'pre_tools',\n POST_TOOLS = 'post_tools',\n}\n\nexport enum GraphNodeActions {\n TOOL_NODE = 'tool_node',\n CALL_MODEL = 'call_model',\n ROUTE_MESSAGE = 'route_message',\n}\n\nexport enum CommonEvents {\n LANGGRAPH = 'LangGraph',\n}\n\nexport enum StepTypes {\n TOOL_CALLS = 'tool_calls',\n MESSAGE_CREATION = 'message_creation'\n}\n\nexport enum ContentTypes {\n TEXT = 'text',\n ERROR = 'error',\n THINK = 'think',\n TOOL_CALL = 'tool_call',\n IMAGE_URL = 'image_url',\n IMAGE_FILE = 'image_file',\n /** Anthropic */\n THINKING = 'thinking',\n /** Bedrock */\n REASONING_CONTENT = 'reasoning_content',\n}\n\nexport enum ToolCallTypes {\n FUNCTION = 'function',\n RETRIEVAL = 'retrieval',\n FILE_SEARCH = 'file_search',\n CODE_INTERPRETER = 'code_interpreter',\n /* Agents Tool Call */\n TOOL_CALL = 'tool_call',\n}\n\nexport enum Callback {\n TOOL_ERROR = 'handleToolError',\n TOOL_START = 'handleToolStart',\n TOOL_END = 'handleToolEnd',\n /*\n LLM_START = 'handleLLMStart',\n LLM_NEW_TOKEN = 'handleLLMNewToken',\n LLM_ERROR = 'handleLLMError',\n LLM_END = 'handleLLMEnd',\n CHAT_MODEL_START = 'handleChatModelStart',\n CHAIN_START = 'handleChainStart',\n CHAIN_ERROR = 'handleChainError',\n CHAIN_END = 'handleChainEnd',\n TEXT = 'handleText',\n AGENT_ACTION = 'handleAgentAction',\n AGENT_END = 'handleAgentEnd',\n RETRIEVER_START = 'handleRetrieverStart',\n RETRIEVER_END = 'handleRetrieverEnd',\n RETRIEVER_ERROR = 'handleRetrieverError',\n CUSTOM_EVENT = 'handleCustomEvent'\n */\n}\n\nexport enum Constants {\n OFFICIAL_CODE_BASEURL = 'https://api.librechat.ai/v1',\n EXECUTE_CODE = 'execute_code',\n CONTENT_AND_ARTIFACT = 'content_and_artifact',\n}\n\nexport enum EnvVar {\n CODE_API_KEY = 'LIBRECHAT_CODE_API_KEY',\n CODE_BASEURL = 'LIBRECHAT_CODE_BASEURL'\n}\n"],"names":[],"mappings":"AAAA;;;;;AAKG;IACS;AAAZ,CAAA,UAAY,WAAW,EAAA;;;AAIrB,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;;AAE3B,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;;AAEvC,IAAA,WAAA,CAAA,uBAAA,CAAA,GAAA,uBAA+C;;AAE/C,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,WAAA,CAAA,oBAAA,CAAA,GAAA,oBAAyC;;;AAKzC,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,iBAAmC;;AAEnC,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,qBAAwC;;AAGxC,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,sBAA0C;;AAG1C,IAAA,WAAA,CAAA,gBAAA,CAAA,GAAA,mBAAoC;;AAGpC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,SAAA,CAAA,GAAA,YAAsB;;AAGtB,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,gBAA8B;;AAG9B,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,UAAA,CAAA,GAAA,aAAwB;;AAGxB,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,oBAAsC;;AAGtC,IAAA,WAAA,CAAA,eAAA,CAAA,GAAA,kBAAkC;;AAGlC,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;AAC9B,CAAC,EA9DW,WAAW,KAAX,WAAW,GA8DtB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,gBAAA,CAAA,GAAA,gBAAiC;AACjC,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,SAAA,CAAA,GAAA,SAAmB;AACnB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,OAAA,CAAA,GAAA,aAAqB;AACrB,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AAC3B,CAAC,EAZW,SAAS,KAAT,SAAS,GAYpB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AAC3B,CAAC,EALW,aAAa,KAAb,aAAa,GAKxB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,gBAAgB,EAAA;AAC1B,IAAA,gBAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,gBAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,gBAAA,CAAA,eAAA,CAAA,GAAA,eAA+B;AACjC,CAAC,EAJW,gBAAgB,KAAhB,gBAAgB,GAI3B,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAFW,YAAY,KAAZ,YAAY,GAEvB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;AACvC,CAAC,EAHW,SAAS,KAAT,SAAS,GAGpB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,MAAA,CAAA,GAAA,MAAa;AACb,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;;AAEzB,IAAA,YAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;;AAErB,IAAA,YAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;AACzC,CAAC,EAXW,YAAY,KAAZ,YAAY,GAWvB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;AAC3B,IAAA,aAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAPW,aAAa,KAAb,aAAa,GAOxB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,QAAQ,EAAA;AAClB,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,UAAA,CAAA,GAAA,eAA0B;AAC1B;;;;;;;;;;;;;;;;AAgBE;AACJ,CAAC,EArBW,QAAQ,KAAR,QAAQ,GAqBnB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,uBAAA,CAAA,GAAA,6BAAqD;AACrD,IAAA,SAAA,CAAA,cAAA,CAAA,GAAA,cAA6B;AAC7B,IAAA,SAAA,CAAA,sBAAA,CAAA,GAAA,sBAA6C;AAC/C,CAAC,EAJW,SAAS,KAAT,SAAS,GAIpB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,MAAM,EAAA;AAChB,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACvC,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACzC,CAAC,EAHW,MAAM,KAAN,MAAM,GAGjB,EAAA,CAAA,CAAA;;;;"}
1
+ {"version":3,"file":"enum.mjs","sources":["../../../src/common/enum.ts"],"sourcesContent":["/**\n * Enum representing the various event types emitted during the execution of runnables.\n * These events provide real-time information about the progress and state of different components.\n *\n * @enum {string}\n */\nexport enum GraphEvents {\n /* Custom Events */\n\n /** [Custom] Delta event for run steps (message creation and tool calls) */\n ON_RUN_STEP = 'on_run_step',\n /** [Custom] Delta event for run steps (tool calls) */\n ON_RUN_STEP_DELTA = 'on_run_step_delta',\n /** [Custom] Completed event for run steps (tool calls) */\n ON_RUN_STEP_COMPLETED = 'on_run_step_completed',\n /** [Custom] Delta events for messages */\n ON_MESSAGE_DELTA = 'on_message_delta',\n /** [Custom] Reasoning Delta events for messages */\n ON_REASONING_DELTA = 'on_reasoning_delta',\n\n /* Official Events */\n\n /** Custom event, emitted by system */\n ON_CUSTOM_EVENT = 'on_custom_event',\n /** Emitted when a chat model starts processing. */\n CHAT_MODEL_START = 'on_chat_model_start',\n\n /** Emitted when a chat model streams a chunk of its response. */\n CHAT_MODEL_STREAM = 'on_chat_model_stream',\n\n /** Emitted when a chat model completes its processing. */\n CHAT_MODEL_END = 'on_chat_model_end',\n\n /** Emitted when a language model starts processing. */\n LLM_START = 'on_llm_start',\n\n /** Emitted when a language model streams a chunk of its response. */\n LLM_STREAM = 'on_llm_stream',\n\n /** Emitted when a language model completes its processing. */\n LLM_END = 'on_llm_end',\n\n /** Emitted when a chain starts processing. */\n CHAIN_START = 'on_chain_start',\n\n /** Emitted when a chain streams a chunk of its output. */\n CHAIN_STREAM = 'on_chain_stream',\n\n /** Emitted when a chain completes its processing. */\n CHAIN_END = 'on_chain_end',\n\n /** Emitted when a tool starts its operation. */\n TOOL_START = 'on_tool_start',\n\n /** Emitted when a tool completes its operation. */\n TOOL_END = 'on_tool_end',\n\n /** Emitted when a retriever starts its operation. */\n RETRIEVER_START = 'on_retriever_start',\n\n /** Emitted when a retriever completes its operation. */\n RETRIEVER_END = 'on_retriever_end',\n\n /** Emitted when a prompt starts processing. */\n PROMPT_START = 'on_prompt_start',\n\n /** Emitted when a prompt completes its processing. */\n PROMPT_END = 'on_prompt_end'\n}\n\nexport enum Providers {\n OPENAI = 'openAI',\n BEDROCK_LEGACY = 'bedrock_legacy',\n VERTEXAI = 'vertexai',\n BEDROCK = 'bedrock',\n ANTHROPIC = 'anthropic',\n MISTRALAI = 'mistralai',\n OLLAMA = 'ollama',\n GOOGLE = 'google',\n AZURE = 'azureOpenAI',\n DEEPSEEK = 'deepseek',\n OPENROUTER = 'openrouter',\n XAI = 'xai',\n}\n\nexport enum GraphNodeKeys {\n TOOLS = 'tools',\n AGENT = 'agent',\n PRE_TOOLS = 'pre_tools',\n POST_TOOLS = 'post_tools',\n}\n\nexport enum GraphNodeActions {\n TOOL_NODE = 'tool_node',\n CALL_MODEL = 'call_model',\n ROUTE_MESSAGE = 'route_message',\n}\n\nexport enum CommonEvents {\n LANGGRAPH = 'LangGraph',\n}\n\nexport enum StepTypes {\n TOOL_CALLS = 'tool_calls',\n MESSAGE_CREATION = 'message_creation'\n}\n\nexport enum ContentTypes {\n TEXT = 'text',\n ERROR = 'error',\n THINK = 'think',\n TOOL_CALL = 'tool_call',\n IMAGE_URL = 'image_url',\n IMAGE_FILE = 'image_file',\n /** Anthropic */\n THINKING = 'thinking',\n /** Bedrock */\n REASONING_CONTENT = 'reasoning_content',\n}\n\nexport enum ToolCallTypes {\n FUNCTION = 'function',\n RETRIEVAL = 'retrieval',\n FILE_SEARCH = 'file_search',\n CODE_INTERPRETER = 'code_interpreter',\n /* Agents Tool Call */\n TOOL_CALL = 'tool_call',\n}\n\nexport enum Callback {\n TOOL_ERROR = 'handleToolError',\n TOOL_START = 'handleToolStart',\n TOOL_END = 'handleToolEnd',\n /*\n LLM_START = 'handleLLMStart',\n LLM_NEW_TOKEN = 'handleLLMNewToken',\n LLM_ERROR = 'handleLLMError',\n LLM_END = 'handleLLMEnd',\n CHAT_MODEL_START = 'handleChatModelStart',\n CHAIN_START = 'handleChainStart',\n CHAIN_ERROR = 'handleChainError',\n CHAIN_END = 'handleChainEnd',\n TEXT = 'handleText',\n AGENT_ACTION = 'handleAgentAction',\n AGENT_END = 'handleAgentEnd',\n RETRIEVER_START = 'handleRetrieverStart',\n RETRIEVER_END = 'handleRetrieverEnd',\n RETRIEVER_ERROR = 'handleRetrieverError',\n CUSTOM_EVENT = 'handleCustomEvent'\n */\n}\n\nexport enum Constants {\n OFFICIAL_CODE_BASEURL = 'https://api.librechat.ai/v1',\n EXECUTE_CODE = 'execute_code',\n CONTENT_AND_ARTIFACT = 'content_and_artifact',\n}\n\nexport enum EnvVar {\n CODE_API_KEY = 'LIBRECHAT_CODE_API_KEY',\n CODE_BASEURL = 'LIBRECHAT_CODE_BASEURL'\n}\n"],"names":[],"mappings":"AAAA;;;;;AAKG;IACS;AAAZ,CAAA,UAAY,WAAW,EAAA;;;AAIrB,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;;AAE3B,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;;AAEvC,IAAA,WAAA,CAAA,uBAAA,CAAA,GAAA,uBAA+C;;AAE/C,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,WAAA,CAAA,oBAAA,CAAA,GAAA,oBAAyC;;;AAKzC,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,iBAAmC;;AAEnC,IAAA,WAAA,CAAA,kBAAA,CAAA,GAAA,qBAAwC;;AAGxC,IAAA,WAAA,CAAA,mBAAA,CAAA,GAAA,sBAA0C;;AAG1C,IAAA,WAAA,CAAA,gBAAA,CAAA,GAAA,mBAAoC;;AAGpC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,SAAA,CAAA,GAAA,YAAsB;;AAGtB,IAAA,WAAA,CAAA,aAAA,CAAA,GAAA,gBAA8B;;AAG9B,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,WAAA,CAAA,GAAA,cAA0B;;AAG1B,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;;AAG5B,IAAA,WAAA,CAAA,UAAA,CAAA,GAAA,aAAwB;;AAGxB,IAAA,WAAA,CAAA,iBAAA,CAAA,GAAA,oBAAsC;;AAGtC,IAAA,WAAA,CAAA,eAAA,CAAA,GAAA,kBAAkC;;AAGlC,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,iBAAgC;;AAGhC,IAAA,WAAA,CAAA,YAAA,CAAA,GAAA,eAA4B;AAC9B,CAAC,EA9DW,WAAW,KAAX,WAAW,GA8DtB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,gBAAA,CAAA,GAAA,gBAAiC;AACjC,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,SAAA,CAAA,GAAA,SAAmB;AACnB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,QAAA,CAAA,GAAA,QAAiB;AACjB,IAAA,SAAA,CAAA,OAAA,CAAA,GAAA,aAAqB;AACrB,IAAA,SAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,KAAA,CAAA,GAAA,KAAW;AACb,CAAC,EAbW,SAAS,KAAT,SAAS,GAapB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AAC3B,CAAC,EALW,aAAa,KAAb,aAAa,GAKxB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,gBAAgB,EAAA;AAC1B,IAAA,gBAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,gBAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,gBAAA,CAAA,eAAA,CAAA,GAAA,eAA+B;AACjC,CAAC,EAJW,gBAAgB,KAAhB,gBAAgB,GAI3B,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAFW,YAAY,KAAZ,YAAY,GAEvB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;AACzB,IAAA,SAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;AACvC,CAAC,EAHW,SAAS,KAAT,SAAS,GAGpB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,YAAY,EAAA;AACtB,IAAA,YAAA,CAAA,MAAA,CAAA,GAAA,MAAa;AACb,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,OAAA,CAAA,GAAA,OAAe;AACf,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,YAAA,CAAA,YAAA,CAAA,GAAA,YAAyB;;AAEzB,IAAA,YAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;;AAErB,IAAA,YAAA,CAAA,mBAAA,CAAA,GAAA,mBAAuC;AACzC,CAAC,EAXW,YAAY,KAAZ,YAAY,GAWvB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,aAAa,EAAA;AACvB,IAAA,aAAA,CAAA,UAAA,CAAA,GAAA,UAAqB;AACrB,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACvB,IAAA,aAAA,CAAA,aAAA,CAAA,GAAA,aAA2B;AAC3B,IAAA,aAAA,CAAA,kBAAA,CAAA,GAAA,kBAAqC;;AAErC,IAAA,aAAA,CAAA,WAAA,CAAA,GAAA,WAAuB;AACzB,CAAC,EAPW,aAAa,KAAb,aAAa,GAOxB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,QAAQ,EAAA;AAClB,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,YAAA,CAAA,GAAA,iBAA8B;AAC9B,IAAA,QAAA,CAAA,UAAA,CAAA,GAAA,eAA0B;AAC1B;;;;;;;;;;;;;;;;AAgBE;AACJ,CAAC,EArBW,QAAQ,KAAR,QAAQ,GAqBnB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,SAAS,EAAA;AACnB,IAAA,SAAA,CAAA,uBAAA,CAAA,GAAA,6BAAqD;AACrD,IAAA,SAAA,CAAA,cAAA,CAAA,GAAA,cAA6B;AAC7B,IAAA,SAAA,CAAA,sBAAA,CAAA,GAAA,sBAA6C;AAC/C,CAAC,EAJW,SAAS,KAAT,SAAS,GAIpB,EAAA,CAAA,CAAA;IAEW;AAAZ,CAAA,UAAY,MAAM,EAAA;AAChB,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACvC,IAAA,MAAA,CAAA,cAAA,CAAA,GAAA,wBAAuC;AACzC,CAAC,EAHW,MAAM,KAAN,MAAM,GAGjB,EAAA,CAAA,CAAA;;;;"}
@@ -1,3 +1,4 @@
1
+ import { ChatXAI } from '@langchain/xai';
1
2
  import { ChatOllama } from '@langchain/ollama';
2
3
  import { ChatDeepSeek } from '@langchain/deepseek';
3
4
  import { ChatMistralAI } from '@langchain/mistralai';
@@ -12,6 +13,7 @@ import { Providers } from '../common/enum.mjs';
12
13
 
13
14
  // src/llm/providers.ts
14
15
  const llmProviders = {
16
+ [Providers.XAI]: ChatXAI,
15
17
  [Providers.OPENAI]: ChatOpenAI,
16
18
  [Providers.OLLAMA]: ChatOllama,
17
19
  [Providers.AZURE]: AzureChatOpenAI,
@@ -1 +1 @@
1
- {"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatDeepSeek } from '@langchain/deepseek';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatBedrockConverse } from '@langchain/aws';\n// import { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { CustomAnthropic } from '@/llm/anthropic/llm';\nimport { ChatOpenRouter } from '@/llm/openrouter/llm';\nimport { Providers } from '@/common';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.AZURE]: AzureChatOpenAI,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.DEEPSEEK]: ChatDeepSeek,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.OPENROUTER]: ChatOpenRouter,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: ChatAnthropic,\n [Providers.GOOGLE]: ChatGoogleGenerativeAI,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":[],"mappings":";;;;;;;;;;;;AAAA;AAea,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,KAAK,GAAG,eAAe;AAClC,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;AACpC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,eAAe;AACtC,IAAA,CAAC,SAAS,CAAC,UAAU,GAAG,cAAc;AACtC,IAAA,CAAC,SAAS,CAAC,cAAc,GAAG,WAAW;AACvC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,mBAAmB;;AAExC,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,sBAAsB;;MAG/B,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAAC,SAAS,CAAC,SAAS,EAAE,SAAS,CAAC,OAAO,EAAE,SAAS,CAAC,MAAM,CAAC;AAElH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC;IAC7C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC;;AAG1D,IAAA,OAAO,cAAc;AACvB;;;;"}
1
+ {"version":3,"file":"providers.mjs","sources":["../../../src/llm/providers.ts"],"sourcesContent":["// src/llm/providers.ts\nimport { ChatXAI } from '@langchain/xai';\nimport { ChatOllama } from '@langchain/ollama';\nimport { ChatDeepSeek } from '@langchain/deepseek';\nimport { ChatMistralAI } from '@langchain/mistralai';\nimport { ChatBedrockConverse } from '@langchain/aws';\n// import { ChatAnthropic } from '@langchain/anthropic';\nimport { ChatVertexAI } from '@langchain/google-vertexai';\nimport { ChatOpenAI, AzureChatOpenAI } from '@langchain/openai';\nimport { ChatGoogleGenerativeAI } from '@langchain/google-genai';\nimport { BedrockChat } from '@langchain/community/chat_models/bedrock/web';\nimport type { ChatModelConstructorMap, ProviderOptionsMap, ChatModelMap } from '@/types';\nimport { CustomAnthropic } from '@/llm/anthropic/llm';\nimport { ChatOpenRouter } from '@/llm/openrouter/llm';\nimport { Providers } from '@/common';\n\nexport const llmProviders: Partial<ChatModelConstructorMap> = {\n [Providers.XAI]: ChatXAI,\n [Providers.OPENAI]: ChatOpenAI,\n [Providers.OLLAMA]: ChatOllama,\n [Providers.AZURE]: AzureChatOpenAI,\n [Providers.VERTEXAI]: ChatVertexAI,\n [Providers.DEEPSEEK]: ChatDeepSeek,\n [Providers.MISTRALAI]: ChatMistralAI,\n [Providers.ANTHROPIC]: CustomAnthropic,\n [Providers.OPENROUTER]: ChatOpenRouter,\n [Providers.BEDROCK_LEGACY]: BedrockChat,\n [Providers.BEDROCK]: ChatBedrockConverse,\n // [Providers.ANTHROPIC]: ChatAnthropic,\n [Providers.GOOGLE]: ChatGoogleGenerativeAI,\n};\n\nexport const manualToolStreamProviders = new Set<Providers | string>([Providers.ANTHROPIC, Providers.BEDROCK, Providers.OLLAMA]);\n\nexport const getChatModelClass = <P extends Providers>(\n provider: P\n): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {\n const ChatModelClass = llmProviders[provider];\n if (!ChatModelClass) {\n throw new Error(`Unsupported LLM provider: ${provider}`);\n }\n\n return ChatModelClass;\n};"],"names":[],"mappings":";;;;;;;;;;;;;AAAA;AAgBa,MAAA,YAAY,GAAqC;AAC5D,IAAA,CAAC,SAAS,CAAC,GAAG,GAAG,OAAO;AACxB,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU;AAC9B,IAAA,CAAC,SAAS,CAAC,KAAK,GAAG,eAAe;AAClC,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,QAAQ,GAAG,YAAY;AAClC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,aAAa;AACpC,IAAA,CAAC,SAAS,CAAC,SAAS,GAAG,eAAe;AACtC,IAAA,CAAC,SAAS,CAAC,UAAU,GAAG,cAAc;AACtC,IAAA,CAAC,SAAS,CAAC,cAAc,GAAG,WAAW;AACvC,IAAA,CAAC,SAAS,CAAC,OAAO,GAAG,mBAAmB;;AAExC,IAAA,CAAC,SAAS,CAAC,MAAM,GAAG,sBAAsB;;MAG/B,yBAAyB,GAAG,IAAI,GAAG,CAAqB,CAAC,SAAS,CAAC,SAAS,EAAE,SAAS,CAAC,OAAO,EAAE,SAAS,CAAC,MAAM,CAAC;AAElH,MAAA,iBAAiB,GAAG,CAC/B,QAAW,KAC+C;AAC1D,IAAA,MAAM,cAAc,GAAG,YAAY,CAAC,QAAQ,CAAC;IAC7C,IAAI,CAAC,cAAc,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,QAAQ,CAAA,CAAE,CAAC;;AAG1D,IAAA,OAAO,cAAc;AACvB;;;;"}
package/dist/esm/run.mjs CHANGED
@@ -129,7 +129,7 @@ class Run {
129
129
  const convo = (await convoTemplate.invoke({ input: inputText, output: response })).value;
130
130
  const model = this.Graph?.getNewModel({
131
131
  clientOptions,
132
- omitOriginalOptions: ['streaming', 'thinking', 'maxTokens', 'maxOutputTokens'],
132
+ omitOriginalOptions: ['streaming', 'stream', 'thinking', 'maxTokens', 'maxOutputTokens', 'additionalModelRequestFields'],
133
133
  });
134
134
  if (!model) {
135
135
  return { language: '', title: '' };
@@ -1 +1 @@
1
- {"version":3,"file":"run.mjs","sources":["../../src/run.ts"],"sourcesContent":["// src/run.ts\nimport { PromptTemplate } from '@langchain/core/prompts';\nimport { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';\nimport type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';\nimport type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type * as t from '@/types';\nimport { GraphEvents, Providers, Callback } from '@/common';\nimport { manualToolStreamProviders } from '@/llm/providers';\nimport { createTitleRunnable } from '@/utils/title';\nimport { StandardGraph } from '@/graphs/Graph';\nimport { HandlerRegistry } from '@/events';\nimport { isOpenAILike } from '@/utils/llm';\n\nexport class Run<T extends t.BaseGraphState> {\n graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;\n // private collab!: CollabGraph;\n // private taskManager!: TaskManager;\n private handlerRegistry: HandlerRegistry;\n id: string;\n Graph: StandardGraph | undefined;\n provider: Providers | undefined;\n returnContent: boolean = false;\n\n private constructor(config: Partial<t.RunConfig>) {\n const runId = config.runId ?? '';\n if (!runId) {\n throw new Error('Run ID not provided');\n }\n\n this.id = runId;\n\n const handlerRegistry = new HandlerRegistry();\n\n if (config.customHandlers) {\n for (const [eventType, handler] of Object.entries(config.customHandlers)) {\n handlerRegistry.register(eventType, handler);\n }\n }\n\n this.handlerRegistry = handlerRegistry;\n\n if (!config.graphConfig) {\n throw new Error('Graph config not provided');\n }\n\n if (config.graphConfig.type === 'standard' || !config.graphConfig.type) {\n this.provider = config.graphConfig.llmConfig.provider;\n this.graphRunnable = this.createStandardGraph(config.graphConfig) as unknown as t.CompiledWorkflow<T, Partial<T>, string>;\n if (this.Graph) {\n this.Graph.handlerRegistry = handlerRegistry;\n }\n }\n\n this.returnContent = config.returnContent ?? false;\n }\n\n private createStandardGraph(config: t.StandardGraphConfig): t.CompiledWorkflow<t.IState, Partial<t.IState>, string> {\n const { llmConfig, tools = [], ...graphInput } = config;\n const { provider, ...clientOptions } = llmConfig;\n\n const standardGraph = new StandardGraph({\n tools,\n provider,\n clientOptions,\n ...graphInput,\n runId: this.id,\n });\n this.Graph = standardGraph;\n return standardGraph.createWorkflow();\n }\n\n static async create<T extends t.BaseGraphState>(config: t.RunConfig): Promise<Run<T>> {\n return new Run<T>(config);\n }\n\n getRunMessages(): BaseMessage[] | undefined {\n if (!this.Graph) {\n throw new Error('Graph not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n return this.Graph.getRunMessages();\n }\n\n async processStream(\n inputs: t.IState,\n config: Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string },\n streamOptions?: t.EventStreamOptions,\n ): Promise<MessageContentComplex[] | undefined> {\n if (!this.graphRunnable) {\n throw new Error('Run not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n if (!this.Graph) {\n throw new Error('Graph not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n\n this.Graph.resetValues(streamOptions?.keepContent);\n const provider = this.Graph.provider;\n const hasTools = this.Graph.tools ? this.Graph.tools.length > 0 : false;\n if (streamOptions?.callbacks) {\n /* TODO: conflicts with callback manager */\n const callbacks = config.callbacks as t.ProvidedCallbacks ?? [];\n config.callbacks = callbacks.concat(this.getCallbacks(streamOptions.callbacks));\n }\n\n if (!this.id) {\n throw new Error('Run ID not provided');\n }\n\n config.run_id = this.id;\n config.configurable = Object.assign(config.configurable ?? {}, { run_id: this.id, provider: this.provider });\n\n const stream = this.graphRunnable.streamEvents(inputs, config);\n\n for await (const event of stream) {\n const { data, name, metadata, ...info } = event;\n\n let eventName: t.EventName = info.event;\n if (hasTools && manualToolStreamProviders.has(provider) && eventName === GraphEvents.CHAT_MODEL_STREAM) {\n /* Skipping CHAT_MODEL_STREAM event due to double-call edge case */\n continue;\n }\n\n if (eventName && eventName === GraphEvents.ON_CUSTOM_EVENT) {\n eventName = name;\n }\n\n const handler = this.handlerRegistry.getHandler(eventName);\n if (handler) {\n handler.handle(eventName, data, metadata, this.Graph);\n }\n }\n\n if (this.returnContent) {\n return this.Graph.getContentParts();\n }\n }\n\n private createSystemCallback<K extends keyof ClientCallbacks>(\n clientCallbacks: ClientCallbacks,\n key: K\n ): SystemCallbacks[K] {\n return ((...args: unknown[]) => {\n const clientCallback = clientCallbacks[key];\n if (clientCallback && this.Graph) {\n (clientCallback as (...args: unknown[]) => void)(this.Graph, ...args);\n }\n }) as SystemCallbacks[K];\n }\n\n getCallbacks(clientCallbacks: ClientCallbacks): SystemCallbacks {\n return {\n [Callback.TOOL_ERROR]: this.createSystemCallback(clientCallbacks, Callback.TOOL_ERROR),\n [Callback.TOOL_START]: this.createSystemCallback(clientCallbacks, Callback.TOOL_START),\n [Callback.TOOL_END]: this.createSystemCallback(clientCallbacks, Callback.TOOL_END),\n };\n }\n\n async generateTitle({\n inputText,\n contentParts,\n titlePrompt,\n clientOptions,\n chainOptions,\n skipLanguage,\n } : {\n inputText: string;\n contentParts: (t.MessageContentComplex | undefined)[];\n titlePrompt?: string;\n skipLanguage?: boolean;\n clientOptions?: t.ClientOptions;\n chainOptions?: Partial<RunnableConfig> | undefined;\n }): Promise<{ language: string; title: string }> {\n const convoTemplate = PromptTemplate.fromTemplate('User: {input}\\nAI: {output}');\n const response = contentParts.map((part) => {\n if (part?.type === 'text') return part.text;\n return '';\n }).join('\\n');\n const convo = (await convoTemplate.invoke({ input: inputText, output: response })).value;\n const model = this.Graph?.getNewModel({\n clientOptions,\n omitOriginalOptions: ['streaming', 'thinking', 'maxTokens', 'maxOutputTokens'],\n });\n if (!model) {\n return { language: '', title: '' };\n }\n if (isOpenAILike(this.provider) && (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)) {\n model.temperature = (clientOptions as t.OpenAIClientOptions | undefined)?.temperature as number;\n model.topP = (clientOptions as t.OpenAIClientOptions | undefined)?.topP as number;\n model.frequencyPenalty = (clientOptions as t.OpenAIClientOptions | undefined)?.frequencyPenalty as number;\n model.presencePenalty = (clientOptions as t.OpenAIClientOptions | undefined)?.presencePenalty as number;\n model.n = (clientOptions as t.OpenAIClientOptions | undefined)?.n as number;\n }\n const chain = await createTitleRunnable(model, titlePrompt);\n return await chain.invoke({ convo, inputText, skipLanguage }, chainOptions) as { language: string; title: string };\n }\n}\n"],"names":[],"mappings":";;;;;;;;;AAAA;MAca,GAAG,CAAA;AACd,IAAA,aAAa;;;AAGL,IAAA,eAAe;AACvB,IAAA,EAAE;AACF,IAAA,KAAK;AACL,IAAA,QAAQ;IACR,aAAa,GAAY,KAAK;AAE9B,IAAA,WAAA,CAAoB,MAA4B,EAAA;AAC9C,QAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,EAAE;QAChC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,IAAI,CAAC,EAAE,GAAG,KAAK;AAEf,QAAA,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE;AAE7C,QAAA,IAAI,MAAM,CAAC,cAAc,EAAE;AACzB,YAAA,KAAK,MAAM,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,EAAE;AACxE,gBAAA,eAAe,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC;;;AAIhD,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;AAEtC,QAAA,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAG9C,QAAA,IAAI,MAAM,CAAC,WAAW,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;YACtE,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,QAAQ;YACrD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,mBAAmB,CAAC,MAAM,CAAC,WAAW,CAAyD;AACzH,YAAA,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,IAAI,CAAC,KAAK,CAAC,eAAe,GAAG,eAAe;;;QAIhD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,KAAK;;AAG5C,IAAA,mBAAmB,CAAC,MAA6B,EAAA;AACvD,QAAA,MAAM,EAAE,SAAS,EAAE,KAAK,GAAG,EAAE,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM;QACvD,MAAM,EAAE,QAAQ,EAAE,GAAG,aAAa,EAAE,GAAG,SAAS;AAEhD,QAAA,MAAM,aAAa,GAAG,IAAI,aAAa,CAAC;YACtC,KAAK;YACL,QAAQ;YACR,aAAa;AACb,YAAA,GAAG,UAAU;YACb,KAAK,EAAE,IAAI,CAAC,EAAE;AACf,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,KAAK,GAAG,aAAa;AAC1B,QAAA,OAAO,aAAa,CAAC,cAAc,EAAE;;AAGvC,IAAA,aAAa,MAAM,CAA6B,MAAmB,EAAA;AACjE,QAAA,OAAO,IAAI,GAAG,CAAI,MAAM,CAAC;;IAG3B,cAAc,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CAAC,8EAA8E,CAAC;;AAEjG,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;;AAGpC,IAAA,MAAM,aAAa,CACjB,MAAgB,EAChB,MAA2E,EAC3E,aAAoC,EAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,4EAA4E,CAAC;;AAE/F,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CAAC,8EAA8E,CAAC;;QAGjG,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC;AAClD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG,KAAK;AACvE,QAAA,IAAI,aAAa,EAAE,SAAS,EAAE;;AAE5B,YAAA,MAAM,SAAS,GAAG,MAAM,CAAC,SAAgC,IAAI,EAAE;AAC/D,YAAA,MAAM,CAAC,SAAS,GAAG,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;;AAGjF,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;QACvB,MAAM,CAAC,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,EAAE,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAC;AAE5G,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,MAAM,EAAE,MAAM,CAAC;AAE9D,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;AAChC,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK;AAE/C,YAAA,IAAI,SAAS,GAAgB,IAAI,CAAC,KAAK;AACvC,YAAA,IAAI,QAAQ,IAAI,yBAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,SAAS,KAAK,WAAW,CAAC,iBAAiB,EAAE;;gBAEtG;;YAGF,IAAI,SAAS,IAAI,SAAS,KAAK,WAAW,CAAC,eAAe,EAAE;gBAC1D,SAAS,GAAG,IAAI;;YAGlB,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,SAAS,CAAC;YAC1D,IAAI,OAAO,EAAE;AACX,gBAAA,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;;;AAIzD,QAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;;;IAI/B,oBAAoB,CAC1B,eAAgC,EAChC,GAAM,EAAA;AAEN,QAAA,QAAQ,CAAC,GAAG,IAAe,KAAI;AAC7B,YAAA,MAAM,cAAc,GAAG,eAAe,CAAC,GAAG,CAAC;AAC3C,YAAA,IAAI,cAAc,IAAI,IAAI,CAAC,KAAK,EAAE;gBAC/B,cAA+C,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,IAAI,CAAC;;AAEzE,SAAC;;AAGH,IAAA,YAAY,CAAC,eAAgC,EAAA;QAC3C,OAAO;AACL,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAE,QAAQ,CAAC,UAAU,CAAC;AACtF,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAE,QAAQ,CAAC,UAAU,CAAC;AACtF,YAAA,CAAC,QAAQ,CAAC,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAE,QAAQ,CAAC,QAAQ,CAAC;SACnF;;AAGH,IAAA,MAAM,aAAa,CAAC,EAClB,SAAS,EACT,YAAY,EACZ,WAAW,EACX,aAAa,EACb,YAAY,EACZ,YAAY,GAQb,EAAA;QACC,MAAM,aAAa,GAAG,cAAc,CAAC,YAAY,CAAC,6BAA6B,CAAC;QAChF,MAAM,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,KAAI;AACzC,YAAA,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM;gBAAE,OAAO,IAAI,CAAC,IAAI;AAC3C,YAAA,OAAO,EAAE;AACX,SAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;QACb,MAAM,KAAK,GAAG,CAAC,MAAM,aAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,EAAE,KAAK;AACxF,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC;YACpC,aAAa;YACb,mBAAmB,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,WAAW,EAAE,iBAAiB,CAAC;AAC/E,SAAA,CAAC;QACF,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;;AAEpC,QAAA,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,YAAY,UAAU,IAAI,KAAK,YAAY,eAAe,CAAC,EAAE;AACpG,YAAA,KAAK,CAAC,WAAW,GAAI,aAAmD,EAAE,WAAqB;AAC/F,YAAA,KAAK,CAAC,IAAI,GAAI,aAAmD,EAAE,IAAc;AACjF,YAAA,KAAK,CAAC,gBAAgB,GAAI,aAAmD,EAAE,gBAA0B;AACzG,YAAA,KAAK,CAAC,eAAe,GAAI,aAAmD,EAAE,eAAyB;AACvG,YAAA,KAAK,CAAC,CAAC,GAAI,aAAmD,EAAE,CAAW;;QAE7E,MAAM,KAAK,GAAG,MAAM,mBAAmB,CAAC,KAAK,EAAE,WAAW,CAAC;AAC3D,QAAA,OAAO,MAAM,KAAK,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,YAAY,EAAE,EAAE,YAAY,CAAwC;;AAErH;;;;"}
1
+ {"version":3,"file":"run.mjs","sources":["../../src/run.ts"],"sourcesContent":["// src/run.ts\nimport { PromptTemplate } from '@langchain/core/prompts';\nimport { AzureChatOpenAI, ChatOpenAI } from '@langchain/openai';\nimport type { BaseMessage, MessageContentComplex } from '@langchain/core/messages';\nimport type { ClientCallbacks, SystemCallbacks } from '@/graphs/Graph';\nimport type { RunnableConfig } from '@langchain/core/runnables';\nimport type * as t from '@/types';\nimport { GraphEvents, Providers, Callback } from '@/common';\nimport { manualToolStreamProviders } from '@/llm/providers';\nimport { createTitleRunnable } from '@/utils/title';\nimport { StandardGraph } from '@/graphs/Graph';\nimport { HandlerRegistry } from '@/events';\nimport { isOpenAILike } from '@/utils/llm';\n\nexport class Run<T extends t.BaseGraphState> {\n graphRunnable?: t.CompiledWorkflow<T, Partial<T>, string>;\n // private collab!: CollabGraph;\n // private taskManager!: TaskManager;\n private handlerRegistry: HandlerRegistry;\n id: string;\n Graph: StandardGraph | undefined;\n provider: Providers | undefined;\n returnContent: boolean = false;\n\n private constructor(config: Partial<t.RunConfig>) {\n const runId = config.runId ?? '';\n if (!runId) {\n throw new Error('Run ID not provided');\n }\n\n this.id = runId;\n\n const handlerRegistry = new HandlerRegistry();\n\n if (config.customHandlers) {\n for (const [eventType, handler] of Object.entries(config.customHandlers)) {\n handlerRegistry.register(eventType, handler);\n }\n }\n\n this.handlerRegistry = handlerRegistry;\n\n if (!config.graphConfig) {\n throw new Error('Graph config not provided');\n }\n\n if (config.graphConfig.type === 'standard' || !config.graphConfig.type) {\n this.provider = config.graphConfig.llmConfig.provider;\n this.graphRunnable = this.createStandardGraph(config.graphConfig) as unknown as t.CompiledWorkflow<T, Partial<T>, string>;\n if (this.Graph) {\n this.Graph.handlerRegistry = handlerRegistry;\n }\n }\n\n this.returnContent = config.returnContent ?? false;\n }\n\n private createStandardGraph(config: t.StandardGraphConfig): t.CompiledWorkflow<t.IState, Partial<t.IState>, string> {\n const { llmConfig, tools = [], ...graphInput } = config;\n const { provider, ...clientOptions } = llmConfig;\n\n const standardGraph = new StandardGraph({\n tools,\n provider,\n clientOptions,\n ...graphInput,\n runId: this.id,\n });\n this.Graph = standardGraph;\n return standardGraph.createWorkflow();\n }\n\n static async create<T extends t.BaseGraphState>(config: t.RunConfig): Promise<Run<T>> {\n return new Run<T>(config);\n }\n\n getRunMessages(): BaseMessage[] | undefined {\n if (!this.Graph) {\n throw new Error('Graph not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n return this.Graph.getRunMessages();\n }\n\n async processStream(\n inputs: t.IState,\n config: Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string },\n streamOptions?: t.EventStreamOptions,\n ): Promise<MessageContentComplex[] | undefined> {\n if (!this.graphRunnable) {\n throw new Error('Run not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n if (!this.Graph) {\n throw new Error('Graph not initialized. Make sure to use Run.create() to instantiate the Run.');\n }\n\n this.Graph.resetValues(streamOptions?.keepContent);\n const provider = this.Graph.provider;\n const hasTools = this.Graph.tools ? this.Graph.tools.length > 0 : false;\n if (streamOptions?.callbacks) {\n /* TODO: conflicts with callback manager */\n const callbacks = config.callbacks as t.ProvidedCallbacks ?? [];\n config.callbacks = callbacks.concat(this.getCallbacks(streamOptions.callbacks));\n }\n\n if (!this.id) {\n throw new Error('Run ID not provided');\n }\n\n config.run_id = this.id;\n config.configurable = Object.assign(config.configurable ?? {}, { run_id: this.id, provider: this.provider });\n\n const stream = this.graphRunnable.streamEvents(inputs, config);\n\n for await (const event of stream) {\n const { data, name, metadata, ...info } = event;\n\n let eventName: t.EventName = info.event;\n if (hasTools && manualToolStreamProviders.has(provider) && eventName === GraphEvents.CHAT_MODEL_STREAM) {\n /* Skipping CHAT_MODEL_STREAM event due to double-call edge case */\n continue;\n }\n\n if (eventName && eventName === GraphEvents.ON_CUSTOM_EVENT) {\n eventName = name;\n }\n\n const handler = this.handlerRegistry.getHandler(eventName);\n if (handler) {\n handler.handle(eventName, data, metadata, this.Graph);\n }\n }\n\n if (this.returnContent) {\n return this.Graph.getContentParts();\n }\n }\n\n private createSystemCallback<K extends keyof ClientCallbacks>(\n clientCallbacks: ClientCallbacks,\n key: K\n ): SystemCallbacks[K] {\n return ((...args: unknown[]) => {\n const clientCallback = clientCallbacks[key];\n if (clientCallback && this.Graph) {\n (clientCallback as (...args: unknown[]) => void)(this.Graph, ...args);\n }\n }) as SystemCallbacks[K];\n }\n\n getCallbacks(clientCallbacks: ClientCallbacks): SystemCallbacks {\n return {\n [Callback.TOOL_ERROR]: this.createSystemCallback(clientCallbacks, Callback.TOOL_ERROR),\n [Callback.TOOL_START]: this.createSystemCallback(clientCallbacks, Callback.TOOL_START),\n [Callback.TOOL_END]: this.createSystemCallback(clientCallbacks, Callback.TOOL_END),\n };\n }\n\n async generateTitle({\n inputText,\n contentParts,\n titlePrompt,\n clientOptions,\n chainOptions,\n skipLanguage,\n } : {\n inputText: string;\n contentParts: (t.MessageContentComplex | undefined)[];\n titlePrompt?: string;\n skipLanguage?: boolean;\n clientOptions?: t.ClientOptions;\n chainOptions?: Partial<RunnableConfig> | undefined;\n }): Promise<{ language: string; title: string }> {\n const convoTemplate = PromptTemplate.fromTemplate('User: {input}\\nAI: {output}');\n const response = contentParts.map((part) => {\n if (part?.type === 'text') return part.text;\n return '';\n }).join('\\n');\n const convo = (await convoTemplate.invoke({ input: inputText, output: response })).value;\n const model = this.Graph?.getNewModel({\n clientOptions,\n omitOriginalOptions: ['streaming', 'stream', 'thinking', 'maxTokens', 'maxOutputTokens', 'additionalModelRequestFields'],\n });\n if (!model) {\n return { language: '', title: '' };\n }\n if (isOpenAILike(this.provider) && (model instanceof ChatOpenAI || model instanceof AzureChatOpenAI)) {\n model.temperature = (clientOptions as t.OpenAIClientOptions | undefined)?.temperature as number;\n model.topP = (clientOptions as t.OpenAIClientOptions | undefined)?.topP as number;\n model.frequencyPenalty = (clientOptions as t.OpenAIClientOptions | undefined)?.frequencyPenalty as number;\n model.presencePenalty = (clientOptions as t.OpenAIClientOptions | undefined)?.presencePenalty as number;\n model.n = (clientOptions as t.OpenAIClientOptions | undefined)?.n as number;\n }\n const chain = await createTitleRunnable(model, titlePrompt);\n return await chain.invoke({ convo, inputText, skipLanguage }, chainOptions) as { language: string; title: string };\n }\n}\n"],"names":[],"mappings":";;;;;;;;;AAAA;MAca,GAAG,CAAA;AACd,IAAA,aAAa;;;AAGL,IAAA,eAAe;AACvB,IAAA,EAAE;AACF,IAAA,KAAK;AACL,IAAA,QAAQ;IACR,aAAa,GAAY,KAAK;AAE9B,IAAA,WAAA,CAAoB,MAA4B,EAAA;AAC9C,QAAA,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,IAAI,EAAE;QAChC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,IAAI,CAAC,EAAE,GAAG,KAAK;AAEf,QAAA,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE;AAE7C,QAAA,IAAI,MAAM,CAAC,cAAc,EAAE;AACzB,YAAA,KAAK,MAAM,CAAC,SAAS,EAAE,OAAO,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,EAAE;AACxE,gBAAA,eAAe,CAAC,QAAQ,CAAC,SAAS,EAAE,OAAO,CAAC;;;AAIhD,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe;AAEtC,QAAA,IAAI,CAAC,MAAM,CAAC,WAAW,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,2BAA2B,CAAC;;AAG9C,QAAA,IAAI,MAAM,CAAC,WAAW,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE;YACtE,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,QAAQ;YACrD,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,mBAAmB,CAAC,MAAM,CAAC,WAAW,CAAyD;AACzH,YAAA,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,IAAI,CAAC,KAAK,CAAC,eAAe,GAAG,eAAe;;;QAIhD,IAAI,CAAC,aAAa,GAAG,MAAM,CAAC,aAAa,IAAI,KAAK;;AAG5C,IAAA,mBAAmB,CAAC,MAA6B,EAAA;AACvD,QAAA,MAAM,EAAE,SAAS,EAAE,KAAK,GAAG,EAAE,EAAE,GAAG,UAAU,EAAE,GAAG,MAAM;QACvD,MAAM,EAAE,QAAQ,EAAE,GAAG,aAAa,EAAE,GAAG,SAAS;AAEhD,QAAA,MAAM,aAAa,GAAG,IAAI,aAAa,CAAC;YACtC,KAAK;YACL,QAAQ;YACR,aAAa;AACb,YAAA,GAAG,UAAU;YACb,KAAK,EAAE,IAAI,CAAC,EAAE;AACf,SAAA,CAAC;AACF,QAAA,IAAI,CAAC,KAAK,GAAG,aAAa;AAC1B,QAAA,OAAO,aAAa,CAAC,cAAc,EAAE;;AAGvC,IAAA,aAAa,MAAM,CAA6B,MAAmB,EAAA;AACjE,QAAA,OAAO,IAAI,GAAG,CAAI,MAAM,CAAC;;IAG3B,cAAc,GAAA;AACZ,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CAAC,8EAA8E,CAAC;;AAEjG,QAAA,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE;;AAGpC,IAAA,MAAM,aAAa,CACjB,MAAgB,EAChB,MAA2E,EAC3E,aAAoC,EAAA;AAEpC,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,4EAA4E,CAAC;;AAE/F,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,YAAA,MAAM,IAAI,KAAK,CAAC,8EAA8E,CAAC;;QAGjG,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC;AAClD,QAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,QAAQ;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,GAAG,KAAK;AACvE,QAAA,IAAI,aAAa,EAAE,SAAS,EAAE;;AAE5B,YAAA,MAAM,SAAS,GAAG,MAAM,CAAC,SAAgC,IAAI,EAAE;AAC/D,YAAA,MAAM,CAAC,SAAS,GAAG,SAAS,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;;AAGjF,QAAA,IAAI,CAAC,IAAI,CAAC,EAAE,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,qBAAqB,CAAC;;AAGxC,QAAA,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,EAAE;QACvB,MAAM,CAAC,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,EAAE,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,EAAE,CAAC;AAE5G,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,MAAM,EAAE,MAAM,CAAC;AAE9D,QAAA,WAAW,MAAM,KAAK,IAAI,MAAM,EAAE;AAChC,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,GAAG,IAAI,EAAE,GAAG,KAAK;AAE/C,YAAA,IAAI,SAAS,GAAgB,IAAI,CAAC,KAAK;AACvC,YAAA,IAAI,QAAQ,IAAI,yBAAyB,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,SAAS,KAAK,WAAW,CAAC,iBAAiB,EAAE;;gBAEtG;;YAGF,IAAI,SAAS,IAAI,SAAS,KAAK,WAAW,CAAC,eAAe,EAAE;gBAC1D,SAAS,GAAG,IAAI;;YAGlB,MAAM,OAAO,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,SAAS,CAAC;YAC1D,IAAI,OAAO,EAAE;AACX,gBAAA,OAAO,CAAC,MAAM,CAAC,SAAS,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC;;;AAIzD,QAAA,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,EAAE;;;IAI/B,oBAAoB,CAC1B,eAAgC,EAChC,GAAM,EAAA;AAEN,QAAA,QAAQ,CAAC,GAAG,IAAe,KAAI;AAC7B,YAAA,MAAM,cAAc,GAAG,eAAe,CAAC,GAAG,CAAC;AAC3C,YAAA,IAAI,cAAc,IAAI,IAAI,CAAC,KAAK,EAAE;gBAC/B,cAA+C,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,IAAI,CAAC;;AAEzE,SAAC;;AAGH,IAAA,YAAY,CAAC,eAAgC,EAAA;QAC3C,OAAO;AACL,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAE,QAAQ,CAAC,UAAU,CAAC;AACtF,YAAA,CAAC,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAE,QAAQ,CAAC,UAAU,CAAC;AACtF,YAAA,CAAC,QAAQ,CAAC,QAAQ,GAAG,IAAI,CAAC,oBAAoB,CAAC,eAAe,EAAE,QAAQ,CAAC,QAAQ,CAAC;SACnF;;AAGH,IAAA,MAAM,aAAa,CAAC,EAClB,SAAS,EACT,YAAY,EACZ,WAAW,EACX,aAAa,EACb,YAAY,EACZ,YAAY,GAQb,EAAA;QACC,MAAM,aAAa,GAAG,cAAc,CAAC,YAAY,CAAC,6BAA6B,CAAC;QAChF,MAAM,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,KAAI;AACzC,YAAA,IAAI,IAAI,EAAE,IAAI,KAAK,MAAM;gBAAE,OAAO,IAAI,CAAC,IAAI;AAC3C,YAAA,OAAO,EAAE;AACX,SAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC;QACb,MAAM,KAAK,GAAG,CAAC,MAAM,aAAa,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,EAAE,KAAK;AACxF,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC;YACpC,aAAa;AACb,YAAA,mBAAmB,EAAE,CAAC,WAAW,EAAE,QAAQ,EAAE,UAAU,EAAE,WAAW,EAAE,iBAAiB,EAAE,8BAA8B,CAAC;AACzH,SAAA,CAAC;QACF,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,QAAQ,EAAE,EAAE,EAAE,KAAK,EAAE,EAAE,EAAE;;AAEpC,QAAA,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,KAAK,KAAK,YAAY,UAAU,IAAI,KAAK,YAAY,eAAe,CAAC,EAAE;AACpG,YAAA,KAAK,CAAC,WAAW,GAAI,aAAmD,EAAE,WAAqB;AAC/F,YAAA,KAAK,CAAC,IAAI,GAAI,aAAmD,EAAE,IAAc;AACjF,YAAA,KAAK,CAAC,gBAAgB,GAAI,aAAmD,EAAE,gBAA0B;AACzG,YAAA,KAAK,CAAC,eAAe,GAAI,aAAmD,EAAE,eAAyB;AACvG,YAAA,KAAK,CAAC,CAAC,GAAI,aAAmD,EAAE,CAAW;;QAE7E,MAAM,KAAK,GAAG,MAAM,mBAAmB,CAAC,KAAK,EAAE,WAAW,CAAC;AAC3D,QAAA,OAAO,MAAM,KAAK,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,SAAS,EAAE,YAAY,EAAE,EAAE,YAAY,CAAwC;;AAErH;;;;"}
@@ -59,7 +59,8 @@ export declare enum Providers {
59
59
  GOOGLE = "google",
60
60
  AZURE = "azureOpenAI",
61
61
  DEEPSEEK = "deepseek",
62
- OPENROUTER = "openrouter"
62
+ OPENROUTER = "openrouter",
63
+ XAI = "xai"
63
64
  }
64
65
  export declare enum GraphNodeKeys {
65
66
  TOOLS = "tools",
@@ -1,3 +1,4 @@
1
+ import { ChatXAI } from '@langchain/xai';
1
2
  import { ChatOllama } from '@langchain/ollama';
2
3
  import { ChatDeepSeek } from '@langchain/deepseek';
3
4
  import { ChatAnthropic } from '@langchain/anthropic';
@@ -20,6 +21,7 @@ import type { AnthropicInput } from '@langchain/anthropic';
20
21
  import type { Runnable } from '@langchain/core/runnables';
21
22
  import type { ChatOllamaInput } from '@langchain/ollama';
22
23
  import type { OpenAI as OpenAIClient } from 'openai';
24
+ import type { ChatXAIInput } from '@langchain/xai';
23
25
  import type { ChatOpenRouterCallOptions } from '@/llm/openrouter/llm';
24
26
  import { ChatOpenRouter } from '@/llm/openrouter/llm';
25
27
  import { Providers } from '@/common';
@@ -42,7 +44,8 @@ export type BedrockClientOptions = BedrockChatFields;
42
44
  export type BedrockConverseClientOptions = ChatBedrockConverseInput;
43
45
  export type GoogleClientOptions = GoogleGenerativeAIChatInput;
44
46
  export type DeepSeekClientOptions = ChatDeepSeekCallOptions;
45
- export type ClientOptions = OpenAIClientOptions | AzureClientOptions | OllamaClientOptions | AnthropicClientOptions | MistralAIClientOptions | VertexAIClientOptions | BedrockClientOptions | BedrockConverseClientOptions | GoogleClientOptions | DeepSeekClientOptions;
47
+ export type XAIClientOptions = ChatXAIInput;
48
+ export type ClientOptions = OpenAIClientOptions | AzureClientOptions | OllamaClientOptions | AnthropicClientOptions | MistralAIClientOptions | VertexAIClientOptions | BedrockClientOptions | BedrockConverseClientOptions | GoogleClientOptions | DeepSeekClientOptions | XAIClientOptions;
46
49
  export type LLMConfig = {
47
50
  provider: Providers;
48
51
  } & ClientOptions;
@@ -58,6 +61,7 @@ export type ProviderOptionsMap = {
58
61
  [Providers.OPENROUTER]: ChatOpenRouterCallOptions;
59
62
  [Providers.BEDROCK_LEGACY]: BedrockClientOptions;
60
63
  [Providers.BEDROCK]: BedrockConverseClientOptions;
64
+ [Providers.XAI]: XAIClientOptions;
61
65
  };
62
66
  export type ChatModelMap = {
63
67
  [Providers.OPENAI]: ChatOpenAI;
@@ -71,6 +75,7 @@ export type ChatModelMap = {
71
75
  [Providers.BEDROCK_LEGACY]: BedrockChat;
72
76
  [Providers.BEDROCK]: ChatBedrockConverse;
73
77
  [Providers.GOOGLE]: ChatGoogleGenerativeAI;
78
+ [Providers.XAI]: ChatXAI;
74
79
  };
75
80
  export type ChatModelConstructorMap = {
76
81
  [P in Providers]: new (config: ProviderOptionsMap[P]) => ChatModelMap[P];
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@librechat/agents",
3
- "version": "2.1.8",
3
+ "version": "2.2.0",
4
4
  "main": "./dist/cjs/main.cjs",
5
5
  "module": "./dist/esm/main.mjs",
6
6
  "types": "./dist/types/index.d.ts",
@@ -83,6 +83,7 @@
83
83
  "@langchain/mistralai": "^0.0.26",
84
84
  "@langchain/ollama": "^0.1.5",
85
85
  "@langchain/openai": "^0.4.2",
86
+ "@langchain/xai": "^0.0.2",
86
87
  "@smithy/eventstream-codec": "^2.2.0",
87
88
  "@smithy/protocol-http": "^3.0.6",
88
89
  "@smithy/signature-v4": "^2.0.10",
@@ -80,6 +80,7 @@ export enum Providers {
80
80
  AZURE = 'azureOpenAI',
81
81
  DEEPSEEK = 'deepseek',
82
82
  OPENROUTER = 'openrouter',
83
+ XAI = 'xai',
83
84
  }
84
85
 
85
86
  export enum GraphNodeKeys {
@@ -1,4 +1,5 @@
1
1
  // src/llm/providers.ts
2
+ import { ChatXAI } from '@langchain/xai';
2
3
  import { ChatOllama } from '@langchain/ollama';
3
4
  import { ChatDeepSeek } from '@langchain/deepseek';
4
5
  import { ChatMistralAI } from '@langchain/mistralai';
@@ -14,6 +15,7 @@ import { ChatOpenRouter } from '@/llm/openrouter/llm';
14
15
  import { Providers } from '@/common';
15
16
 
16
17
  export const llmProviders: Partial<ChatModelConstructorMap> = {
18
+ [Providers.XAI]: ChatXAI,
17
19
  [Providers.OPENAI]: ChatOpenAI,
18
20
  [Providers.OLLAMA]: ChatOllama,
19
21
  [Providers.AZURE]: AzureChatOpenAI,
package/src/run.ts CHANGED
@@ -178,7 +178,7 @@ export class Run<T extends t.BaseGraphState> {
178
178
  const convo = (await convoTemplate.invoke({ input: inputText, output: response })).value;
179
179
  const model = this.Graph?.getNewModel({
180
180
  clientOptions,
181
- omitOriginalOptions: ['streaming', 'thinking', 'maxTokens', 'maxOutputTokens'],
181
+ omitOriginalOptions: ['streaming', 'stream', 'thinking', 'maxTokens', 'maxOutputTokens', 'additionalModelRequestFields'],
182
182
  });
183
183
  if (!model) {
184
184
  return { language: '', title: '' };
package/src/types/llm.ts CHANGED
@@ -1,4 +1,5 @@
1
1
  // src/types/llm.ts
2
+ import { ChatXAI } from '@langchain/xai';
2
3
  import { ChatOllama } from '@langchain/ollama';
3
4
  import { ChatDeepSeek } from '@langchain/deepseek';
4
5
  import { ChatAnthropic } from '@langchain/anthropic';
@@ -21,6 +22,7 @@ import type { AnthropicInput } from '@langchain/anthropic';
21
22
  import type { Runnable } from '@langchain/core/runnables';
22
23
  import type { ChatOllamaInput } from '@langchain/ollama';
23
24
  import type { OpenAI as OpenAIClient } from 'openai';
25
+ import type { ChatXAIInput } from '@langchain/xai';
24
26
  import type { ChatOpenRouterCallOptions } from '@/llm/openrouter/llm';
25
27
  import { ChatOpenRouter } from '@/llm/openrouter/llm';
26
28
  import { Providers } from '@/common';
@@ -46,8 +48,20 @@ export type BedrockClientOptions = BedrockChatFields;
46
48
  export type BedrockConverseClientOptions = ChatBedrockConverseInput;
47
49
  export type GoogleClientOptions = GoogleGenerativeAIChatInput;
48
50
  export type DeepSeekClientOptions = ChatDeepSeekCallOptions;
51
+ export type XAIClientOptions = ChatXAIInput;
49
52
 
50
- export type ClientOptions = OpenAIClientOptions | AzureClientOptions | OllamaClientOptions | AnthropicClientOptions | MistralAIClientOptions | VertexAIClientOptions | BedrockClientOptions | BedrockConverseClientOptions | GoogleClientOptions | DeepSeekClientOptions;
53
+ export type ClientOptions =
54
+ | OpenAIClientOptions
55
+ | AzureClientOptions
56
+ | OllamaClientOptions
57
+ | AnthropicClientOptions
58
+ | MistralAIClientOptions
59
+ | VertexAIClientOptions
60
+ | BedrockClientOptions
61
+ | BedrockConverseClientOptions
62
+ | GoogleClientOptions
63
+ | DeepSeekClientOptions
64
+ | XAIClientOptions;
51
65
 
52
66
  export type LLMConfig = {
53
67
  provider: Providers;
@@ -65,6 +79,7 @@ export type ProviderOptionsMap = {
65
79
  [Providers.OPENROUTER]: ChatOpenRouterCallOptions;
66
80
  [Providers.BEDROCK_LEGACY]: BedrockClientOptions;
67
81
  [Providers.BEDROCK]: BedrockConverseClientOptions;
82
+ [Providers.XAI]: XAIClientOptions;
68
83
  };
69
84
 
70
85
  export type ChatModelMap = {
@@ -79,6 +94,7 @@ export type ChatModelMap = {
79
94
  [Providers.BEDROCK_LEGACY]: BedrockChat;
80
95
  [Providers.BEDROCK]: ChatBedrockConverse;
81
96
  [Providers.GOOGLE]: ChatGoogleGenerativeAI;
97
+ [Providers.XAI]: ChatXAI;
82
98
  };
83
99
 
84
100
  export type ChatModelConstructorMap = {
@@ -12,6 +12,12 @@ export const llmConfigs: Record<string, t.LLMConfig | undefined> = {
12
12
  streamUsage: true,
13
13
  // disableStreaming: true,
14
14
  },
15
+ [Providers.XAI]: {
16
+ provider: Providers.XAI,
17
+ model: 'grok-2-latest',
18
+ streaming: true,
19
+ streamUsage: true,
20
+ },
15
21
  alibaba: {
16
22
  provider: Providers.OPENAI,
17
23
  streaming: true,