illuma-agents 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/cjs/common/enum.cjs +163 -0
- package/dist/cjs/common/enum.cjs.map +1 -0
- package/dist/cjs/events.cjs +143 -0
- package/dist/cjs/events.cjs.map +1 -0
- package/dist/cjs/graphs/Graph.cjs +581 -0
- package/dist/cjs/graphs/Graph.cjs.map +1 -0
- package/dist/cjs/instrumentation.cjs +21 -0
- package/dist/cjs/instrumentation.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/index.cjs +292 -0
- package/dist/cjs/llm/anthropic/index.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/types.cjs +50 -0
- package/dist/cjs/llm/anthropic/types.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +553 -0
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs +218 -0
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/utils/tools.cjs +29 -0
- package/dist/cjs/llm/anthropic/utils/tools.cjs.map +1 -0
- package/dist/cjs/llm/fake.cjs +97 -0
- package/dist/cjs/llm/fake.cjs.map +1 -0
- package/dist/cjs/llm/google/index.cjs +147 -0
- package/dist/cjs/llm/google/index.cjs.map +1 -0
- package/dist/cjs/llm/google/utils/common.cjs +490 -0
- package/dist/cjs/llm/google/utils/common.cjs.map +1 -0
- package/dist/cjs/llm/ollama/index.cjs +70 -0
- package/dist/cjs/llm/ollama/index.cjs.map +1 -0
- package/dist/cjs/llm/ollama/utils.cjs +158 -0
- package/dist/cjs/llm/ollama/utils.cjs.map +1 -0
- package/dist/cjs/llm/openai/index.cjs +613 -0
- package/dist/cjs/llm/openai/index.cjs.map +1 -0
- package/dist/cjs/llm/openai/utils/index.cjs +677 -0
- package/dist/cjs/llm/openai/utils/index.cjs.map +1 -0
- package/dist/cjs/llm/openrouter/index.cjs +29 -0
- package/dist/cjs/llm/openrouter/index.cjs.map +1 -0
- package/dist/cjs/llm/providers.cjs +47 -0
- package/dist/cjs/llm/providers.cjs.map +1 -0
- package/dist/cjs/llm/text.cjs +69 -0
- package/dist/cjs/llm/text.cjs.map +1 -0
- package/dist/cjs/llm/vertexai/index.cjs +330 -0
- package/dist/cjs/llm/vertexai/index.cjs.map +1 -0
- package/dist/cjs/main.cjs +127 -0
- package/dist/cjs/main.cjs.map +1 -0
- package/dist/cjs/messages/core.cjs +359 -0
- package/dist/cjs/messages/core.cjs.map +1 -0
- package/dist/cjs/messages/format.cjs +455 -0
- package/dist/cjs/messages/format.cjs.map +1 -0
- package/dist/cjs/messages/ids.cjs +23 -0
- package/dist/cjs/messages/ids.cjs.map +1 -0
- package/dist/cjs/messages/prune.cjs +398 -0
- package/dist/cjs/messages/prune.cjs.map +1 -0
- package/dist/cjs/run.cjs +264 -0
- package/dist/cjs/run.cjs.map +1 -0
- package/dist/cjs/splitStream.cjs +210 -0
- package/dist/cjs/splitStream.cjs.map +1 -0
- package/dist/cjs/stream.cjs +504 -0
- package/dist/cjs/stream.cjs.map +1 -0
- package/dist/cjs/tools/CodeExecutor.cjs +192 -0
- package/dist/cjs/tools/CodeExecutor.cjs.map +1 -0
- package/dist/cjs/tools/ToolNode.cjs +125 -0
- package/dist/cjs/tools/ToolNode.cjs.map +1 -0
- package/dist/cjs/tools/handlers.cjs +250 -0
- package/dist/cjs/tools/handlers.cjs.map +1 -0
- package/dist/cjs/tools/search/anthropic.cjs +40 -0
- package/dist/cjs/tools/search/anthropic.cjs.map +1 -0
- package/dist/cjs/tools/search/content.cjs +140 -0
- package/dist/cjs/tools/search/content.cjs.map +1 -0
- package/dist/cjs/tools/search/firecrawl.cjs +179 -0
- package/dist/cjs/tools/search/firecrawl.cjs.map +1 -0
- package/dist/cjs/tools/search/format.cjs +203 -0
- package/dist/cjs/tools/search/format.cjs.map +1 -0
- package/dist/cjs/tools/search/highlights.cjs +245 -0
- package/dist/cjs/tools/search/highlights.cjs.map +1 -0
- package/dist/cjs/tools/search/rerankers.cjs +174 -0
- package/dist/cjs/tools/search/rerankers.cjs.map +1 -0
- package/dist/cjs/tools/search/schema.cjs +70 -0
- package/dist/cjs/tools/search/schema.cjs.map +1 -0
- package/dist/cjs/tools/search/search.cjs +561 -0
- package/dist/cjs/tools/search/search.cjs.map +1 -0
- package/dist/cjs/tools/search/serper-scraper.cjs +132 -0
- package/dist/cjs/tools/search/serper-scraper.cjs.map +1 -0
- package/dist/cjs/tools/search/tool.cjs +331 -0
- package/dist/cjs/tools/search/tool.cjs.map +1 -0
- package/dist/cjs/tools/search/utils.cjs +66 -0
- package/dist/cjs/tools/search/utils.cjs.map +1 -0
- package/dist/cjs/utils/graph.cjs +16 -0
- package/dist/cjs/utils/graph.cjs.map +1 -0
- package/dist/cjs/utils/llm.cjs +28 -0
- package/dist/cjs/utils/llm.cjs.map +1 -0
- package/dist/cjs/utils/misc.cjs +56 -0
- package/dist/cjs/utils/misc.cjs.map +1 -0
- package/dist/cjs/utils/run.cjs +69 -0
- package/dist/cjs/utils/run.cjs.map +1 -0
- package/dist/cjs/utils/title.cjs +111 -0
- package/dist/cjs/utils/title.cjs.map +1 -0
- package/dist/cjs/utils/tokens.cjs +65 -0
- package/dist/cjs/utils/tokens.cjs.map +1 -0
- package/dist/esm/common/enum.mjs +163 -0
- package/dist/esm/common/enum.mjs.map +1 -0
- package/dist/esm/events.mjs +135 -0
- package/dist/esm/events.mjs.map +1 -0
- package/dist/esm/graphs/Graph.mjs +578 -0
- package/dist/esm/graphs/Graph.mjs.map +1 -0
- package/dist/esm/instrumentation.mjs +19 -0
- package/dist/esm/instrumentation.mjs.map +1 -0
- package/dist/esm/llm/anthropic/index.mjs +290 -0
- package/dist/esm/llm/anthropic/index.mjs.map +1 -0
- package/dist/esm/llm/anthropic/types.mjs +48 -0
- package/dist/esm/llm/anthropic/types.mjs.map +1 -0
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs +550 -0
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -0
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs +216 -0
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs.map +1 -0
- package/dist/esm/llm/anthropic/utils/tools.mjs +27 -0
- package/dist/esm/llm/anthropic/utils/tools.mjs.map +1 -0
- package/dist/esm/llm/fake.mjs +94 -0
- package/dist/esm/llm/fake.mjs.map +1 -0
- package/dist/esm/llm/google/index.mjs +145 -0
- package/dist/esm/llm/google/index.mjs.map +1 -0
- package/dist/esm/llm/google/utils/common.mjs +484 -0
- package/dist/esm/llm/google/utils/common.mjs.map +1 -0
- package/dist/esm/llm/ollama/index.mjs +68 -0
- package/dist/esm/llm/ollama/index.mjs.map +1 -0
- package/dist/esm/llm/ollama/utils.mjs +155 -0
- package/dist/esm/llm/ollama/utils.mjs.map +1 -0
- package/dist/esm/llm/openai/index.mjs +604 -0
- package/dist/esm/llm/openai/index.mjs.map +1 -0
- package/dist/esm/llm/openai/utils/index.mjs +671 -0
- package/dist/esm/llm/openai/utils/index.mjs.map +1 -0
- package/dist/esm/llm/openrouter/index.mjs +27 -0
- package/dist/esm/llm/openrouter/index.mjs.map +1 -0
- package/dist/esm/llm/providers.mjs +43 -0
- package/dist/esm/llm/providers.mjs.map +1 -0
- package/dist/esm/llm/text.mjs +67 -0
- package/dist/esm/llm/text.mjs.map +1 -0
- package/dist/esm/llm/vertexai/index.mjs +328 -0
- package/dist/esm/llm/vertexai/index.mjs.map +1 -0
- package/dist/esm/main.mjs +20 -0
- package/dist/esm/main.mjs.map +1 -0
- package/dist/esm/messages/core.mjs +351 -0
- package/dist/esm/messages/core.mjs.map +1 -0
- package/dist/esm/messages/format.mjs +447 -0
- package/dist/esm/messages/format.mjs.map +1 -0
- package/dist/esm/messages/ids.mjs +21 -0
- package/dist/esm/messages/ids.mjs.map +1 -0
- package/dist/esm/messages/prune.mjs +393 -0
- package/dist/esm/messages/prune.mjs.map +1 -0
- package/dist/esm/run.mjs +261 -0
- package/dist/esm/run.mjs.map +1 -0
- package/dist/esm/splitStream.mjs +207 -0
- package/dist/esm/splitStream.mjs.map +1 -0
- package/dist/esm/stream.mjs +500 -0
- package/dist/esm/stream.mjs.map +1 -0
- package/dist/esm/tools/CodeExecutor.mjs +188 -0
- package/dist/esm/tools/CodeExecutor.mjs.map +1 -0
- package/dist/esm/tools/ToolNode.mjs +122 -0
- package/dist/esm/tools/ToolNode.mjs.map +1 -0
- package/dist/esm/tools/handlers.mjs +245 -0
- package/dist/esm/tools/handlers.mjs.map +1 -0
- package/dist/esm/tools/search/anthropic.mjs +37 -0
- package/dist/esm/tools/search/anthropic.mjs.map +1 -0
- package/dist/esm/tools/search/content.mjs +119 -0
- package/dist/esm/tools/search/content.mjs.map +1 -0
- package/dist/esm/tools/search/firecrawl.mjs +176 -0
- package/dist/esm/tools/search/firecrawl.mjs.map +1 -0
- package/dist/esm/tools/search/format.mjs +201 -0
- package/dist/esm/tools/search/format.mjs.map +1 -0
- package/dist/esm/tools/search/highlights.mjs +243 -0
- package/dist/esm/tools/search/highlights.mjs.map +1 -0
- package/dist/esm/tools/search/rerankers.mjs +168 -0
- package/dist/esm/tools/search/rerankers.mjs.map +1 -0
- package/dist/esm/tools/search/schema.mjs +61 -0
- package/dist/esm/tools/search/schema.mjs.map +1 -0
- package/dist/esm/tools/search/search.mjs +558 -0
- package/dist/esm/tools/search/search.mjs.map +1 -0
- package/dist/esm/tools/search/serper-scraper.mjs +129 -0
- package/dist/esm/tools/search/serper-scraper.mjs.map +1 -0
- package/dist/esm/tools/search/tool.mjs +329 -0
- package/dist/esm/tools/search/tool.mjs.map +1 -0
- package/dist/esm/tools/search/utils.mjs +61 -0
- package/dist/esm/tools/search/utils.mjs.map +1 -0
- package/dist/esm/utils/graph.mjs +13 -0
- package/dist/esm/utils/graph.mjs.map +1 -0
- package/dist/esm/utils/llm.mjs +25 -0
- package/dist/esm/utils/llm.mjs.map +1 -0
- package/dist/esm/utils/misc.mjs +53 -0
- package/dist/esm/utils/misc.mjs.map +1 -0
- package/dist/esm/utils/run.mjs +66 -0
- package/dist/esm/utils/run.mjs.map +1 -0
- package/dist/esm/utils/title.mjs +108 -0
- package/dist/esm/utils/title.mjs.map +1 -0
- package/dist/esm/utils/tokens.mjs +62 -0
- package/dist/esm/utils/tokens.mjs.map +1 -0
- package/dist/types/common/enum.d.ts +128 -0
- package/dist/types/common/index.d.ts +1 -0
- package/dist/types/events.d.ts +29 -0
- package/dist/types/graphs/Graph.d.ts +122 -0
- package/dist/types/graphs/index.d.ts +1 -0
- package/dist/types/index.d.ts +13 -0
- package/dist/types/instrumentation.d.ts +1 -0
- package/dist/types/llm/anthropic/index.d.ts +39 -0
- package/dist/types/llm/anthropic/types.d.ts +37 -0
- package/dist/types/llm/anthropic/utils/message_inputs.d.ts +14 -0
- package/dist/types/llm/anthropic/utils/message_outputs.d.ts +14 -0
- package/dist/types/llm/anthropic/utils/output_parsers.d.ts +22 -0
- package/dist/types/llm/anthropic/utils/tools.d.ts +3 -0
- package/dist/types/llm/fake.d.ts +31 -0
- package/dist/types/llm/google/index.d.ts +14 -0
- package/dist/types/llm/google/types.d.ts +32 -0
- package/dist/types/llm/google/utils/common.d.ts +19 -0
- package/dist/types/llm/google/utils/tools.d.ts +10 -0
- package/dist/types/llm/google/utils/zod_to_genai_parameters.d.ts +14 -0
- package/dist/types/llm/ollama/index.d.ts +8 -0
- package/dist/types/llm/ollama/utils.d.ts +7 -0
- package/dist/types/llm/openai/index.d.ts +103 -0
- package/dist/types/llm/openai/types.d.ts +10 -0
- package/dist/types/llm/openai/utils/index.d.ts +20 -0
- package/dist/types/llm/openrouter/index.d.ts +12 -0
- package/dist/types/llm/providers.d.ts +5 -0
- package/dist/types/llm/text.d.ts +21 -0
- package/dist/types/llm/vertexai/index.d.ts +293 -0
- package/dist/types/messages/core.d.ts +14 -0
- package/dist/types/messages/format.d.ts +113 -0
- package/dist/types/messages/ids.d.ts +3 -0
- package/dist/types/messages/index.d.ts +4 -0
- package/dist/types/messages/prune.d.ts +51 -0
- package/dist/types/mockStream.d.ts +32 -0
- package/dist/types/prompts/collab.d.ts +1 -0
- package/dist/types/prompts/index.d.ts +2 -0
- package/dist/types/prompts/taskmanager.d.ts +41 -0
- package/dist/types/run.d.ts +30 -0
- package/dist/types/scripts/abort.d.ts +1 -0
- package/dist/types/scripts/ant_web_search.d.ts +1 -0
- package/dist/types/scripts/args.d.ts +7 -0
- package/dist/types/scripts/caching.d.ts +1 -0
- package/dist/types/scripts/cli.d.ts +1 -0
- package/dist/types/scripts/cli2.d.ts +1 -0
- package/dist/types/scripts/cli3.d.ts +1 -0
- package/dist/types/scripts/cli4.d.ts +1 -0
- package/dist/types/scripts/cli5.d.ts +1 -0
- package/dist/types/scripts/code_exec.d.ts +1 -0
- package/dist/types/scripts/code_exec_files.d.ts +1 -0
- package/dist/types/scripts/code_exec_simple.d.ts +1 -0
- package/dist/types/scripts/content.d.ts +1 -0
- package/dist/types/scripts/empty_input.d.ts +1 -0
- package/dist/types/scripts/image.d.ts +1 -0
- package/dist/types/scripts/memory.d.ts +1 -0
- package/dist/types/scripts/search.d.ts +1 -0
- package/dist/types/scripts/simple.d.ts +1 -0
- package/dist/types/scripts/stream.d.ts +1 -0
- package/dist/types/scripts/thinking.d.ts +1 -0
- package/dist/types/scripts/tools.d.ts +1 -0
- package/dist/types/specs/spec.utils.d.ts +1 -0
- package/dist/types/splitStream.d.ts +37 -0
- package/dist/types/stream.d.ts +14 -0
- package/dist/types/tools/CodeExecutor.d.ts +23 -0
- package/dist/types/tools/ToolNode.d.ts +22 -0
- package/dist/types/tools/example.d.ts +78 -0
- package/dist/types/tools/handlers.d.ts +19 -0
- package/dist/types/tools/search/anthropic.d.ts +16 -0
- package/dist/types/tools/search/content.d.ts +4 -0
- package/dist/types/tools/search/firecrawl.d.ts +54 -0
- package/dist/types/tools/search/format.d.ts +5 -0
- package/dist/types/tools/search/highlights.d.ts +13 -0
- package/dist/types/tools/search/index.d.ts +2 -0
- package/dist/types/tools/search/rerankers.d.ts +38 -0
- package/dist/types/tools/search/schema.d.ts +16 -0
- package/dist/types/tools/search/search.d.ts +8 -0
- package/dist/types/tools/search/serper-scraper.d.ts +59 -0
- package/dist/types/tools/search/test.d.ts +1 -0
- package/dist/types/tools/search/tool.d.ts +54 -0
- package/dist/types/tools/search/types.d.ts +591 -0
- package/dist/types/tools/search/utils.d.ts +10 -0
- package/dist/types/types/graph.d.ts +138 -0
- package/dist/types/types/index.d.ts +5 -0
- package/dist/types/types/llm.d.ts +102 -0
- package/dist/types/types/run.d.ts +74 -0
- package/dist/types/types/stream.d.ts +293 -0
- package/dist/types/types/tools.d.ts +61 -0
- package/dist/types/utils/graph.d.ts +2 -0
- package/dist/types/utils/index.d.ts +5 -0
- package/dist/types/utils/llm.d.ts +3 -0
- package/dist/types/utils/llmConfig.d.ts +3 -0
- package/dist/types/utils/logging.d.ts +1 -0
- package/dist/types/utils/misc.d.ts +7 -0
- package/dist/types/utils/run.d.ts +27 -0
- package/dist/types/utils/title.d.ts +4 -0
- package/dist/types/utils/tokens.d.ts +3 -0
- package/package.json +145 -0
- package/src/common/enum.ts +176 -0
- package/src/common/index.ts +2 -0
- package/src/events.ts +191 -0
- package/src/graphs/Graph.ts +846 -0
- package/src/graphs/index.ts +1 -0
- package/src/index.ts +24 -0
- package/src/instrumentation.ts +22 -0
- package/src/llm/anthropic/Jacob_Lee_Resume_2023.pdf +0 -0
- package/src/llm/anthropic/index.ts +413 -0
- package/src/llm/anthropic/llm.spec.ts +1442 -0
- package/src/llm/anthropic/types.ts +140 -0
- package/src/llm/anthropic/utils/message_inputs.ts +660 -0
- package/src/llm/anthropic/utils/message_outputs.ts +289 -0
- package/src/llm/anthropic/utils/output_parsers.ts +133 -0
- package/src/llm/anthropic/utils/tools.ts +29 -0
- package/src/llm/fake.ts +133 -0
- package/src/llm/google/index.ts +222 -0
- package/src/llm/google/types.ts +43 -0
- package/src/llm/google/utils/common.ts +660 -0
- package/src/llm/google/utils/tools.ts +160 -0
- package/src/llm/google/utils/zod_to_genai_parameters.ts +88 -0
- package/src/llm/ollama/index.ts +92 -0
- package/src/llm/ollama/utils.ts +193 -0
- package/src/llm/openai/index.ts +853 -0
- package/src/llm/openai/types.ts +24 -0
- package/src/llm/openai/utils/index.ts +918 -0
- package/src/llm/openai/utils/isReasoningModel.test.ts +90 -0
- package/src/llm/openrouter/index.ts +60 -0
- package/src/llm/providers.ts +57 -0
- package/src/llm/text.ts +94 -0
- package/src/llm/vertexai/index.ts +360 -0
- package/src/messages/core.ts +463 -0
- package/src/messages/format.ts +625 -0
- package/src/messages/formatAgentMessages.test.ts +917 -0
- package/src/messages/formatAgentMessages.tools.test.ts +400 -0
- package/src/messages/formatMessage.test.ts +693 -0
- package/src/messages/ids.ts +26 -0
- package/src/messages/index.ts +4 -0
- package/src/messages/prune.ts +567 -0
- package/src/messages/shiftIndexTokenCountMap.test.ts +81 -0
- package/src/mockStream.ts +99 -0
- package/src/prompts/collab.ts +6 -0
- package/src/prompts/index.ts +2 -0
- package/src/prompts/taskmanager.ts +61 -0
- package/src/proto/CollabGraph.ts +269 -0
- package/src/proto/TaskManager.ts +243 -0
- package/src/proto/collab.ts +200 -0
- package/src/proto/collab_design.ts +184 -0
- package/src/proto/collab_design_v2.ts +224 -0
- package/src/proto/collab_design_v3.ts +255 -0
- package/src/proto/collab_design_v4.ts +220 -0
- package/src/proto/collab_design_v5.ts +251 -0
- package/src/proto/collab_graph.ts +181 -0
- package/src/proto/collab_original.ts +123 -0
- package/src/proto/example.ts +93 -0
- package/src/proto/example_new.ts +68 -0
- package/src/proto/example_old.ts +201 -0
- package/src/proto/example_test.ts +152 -0
- package/src/proto/example_test_anthropic.ts +100 -0
- package/src/proto/log_stream.ts +202 -0
- package/src/proto/main_collab_community_event.ts +133 -0
- package/src/proto/main_collab_design_v2.ts +96 -0
- package/src/proto/main_collab_design_v4.ts +100 -0
- package/src/proto/main_collab_design_v5.ts +135 -0
- package/src/proto/main_collab_global_analysis.ts +122 -0
- package/src/proto/main_collab_hackathon_event.ts +153 -0
- package/src/proto/main_collab_space_mission.ts +153 -0
- package/src/proto/main_philosophy.ts +210 -0
- package/src/proto/original_script.ts +126 -0
- package/src/proto/standard.ts +100 -0
- package/src/proto/stream.ts +56 -0
- package/src/proto/tasks.ts +118 -0
- package/src/proto/tools/global_analysis_tools.ts +86 -0
- package/src/proto/tools/space_mission_tools.ts +60 -0
- package/src/proto/vertexai.ts +54 -0
- package/src/run.ts +381 -0
- package/src/scripts/abort.ts +138 -0
- package/src/scripts/ant_web_search.ts +158 -0
- package/src/scripts/args.ts +48 -0
- package/src/scripts/caching.ts +124 -0
- package/src/scripts/cli.ts +167 -0
- package/src/scripts/cli2.ts +125 -0
- package/src/scripts/cli3.ts +178 -0
- package/src/scripts/cli4.ts +184 -0
- package/src/scripts/cli5.ts +184 -0
- package/src/scripts/code_exec.ts +214 -0
- package/src/scripts/code_exec_files.ts +193 -0
- package/src/scripts/code_exec_simple.ts +129 -0
- package/src/scripts/content.ts +120 -0
- package/src/scripts/empty_input.ts +137 -0
- package/src/scripts/image.ts +178 -0
- package/src/scripts/memory.ts +97 -0
- package/src/scripts/search.ts +150 -0
- package/src/scripts/simple.ts +225 -0
- package/src/scripts/stream.ts +122 -0
- package/src/scripts/thinking.ts +150 -0
- package/src/scripts/tools.ts +155 -0
- package/src/specs/anthropic.simple.test.ts +317 -0
- package/src/specs/azure.simple.test.ts +316 -0
- package/src/specs/openai.simple.test.ts +316 -0
- package/src/specs/prune.test.ts +763 -0
- package/src/specs/reasoning.test.ts +165 -0
- package/src/specs/spec.utils.ts +3 -0
- package/src/specs/thinking-prune.test.ts +703 -0
- package/src/specs/token-distribution-edge-case.test.ts +316 -0
- package/src/specs/tool-error.test.ts +193 -0
- package/src/splitStream.test.ts +691 -0
- package/src/splitStream.ts +234 -0
- package/src/stream.test.ts +94 -0
- package/src/stream.ts +651 -0
- package/src/tools/CodeExecutor.ts +220 -0
- package/src/tools/ToolNode.ts +170 -0
- package/src/tools/example.ts +129 -0
- package/src/tools/handlers.ts +336 -0
- package/src/tools/search/anthropic.ts +51 -0
- package/src/tools/search/content.test.ts +173 -0
- package/src/tools/search/content.ts +147 -0
- package/src/tools/search/firecrawl.ts +210 -0
- package/src/tools/search/format.ts +250 -0
- package/src/tools/search/highlights.ts +320 -0
- package/src/tools/search/index.ts +2 -0
- package/src/tools/search/jina-reranker.test.ts +126 -0
- package/src/tools/search/output.md +2775 -0
- package/src/tools/search/rerankers.ts +242 -0
- package/src/tools/search/schema.ts +63 -0
- package/src/tools/search/search.ts +759 -0
- package/src/tools/search/serper-scraper.ts +155 -0
- package/src/tools/search/test.html +884 -0
- package/src/tools/search/test.md +643 -0
- package/src/tools/search/test.ts +159 -0
- package/src/tools/search/tool.ts +471 -0
- package/src/tools/search/types.ts +687 -0
- package/src/tools/search/utils.ts +79 -0
- package/src/types/graph.ts +185 -0
- package/src/types/index.ts +6 -0
- package/src/types/llm.ts +140 -0
- package/src/types/run.ts +89 -0
- package/src/types/stream.ts +400 -0
- package/src/types/tools.ts +80 -0
- package/src/utils/graph.ts +11 -0
- package/src/utils/index.ts +5 -0
- package/src/utils/llm.ts +27 -0
- package/src/utils/llmConfig.ts +183 -0
- package/src/utils/logging.ts +48 -0
- package/src/utils/misc.ts +57 -0
- package/src/utils/run.ts +101 -0
- package/src/utils/title.ts +165 -0
- package/src/utils/tokens.ts +70 -0
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import { isReasoningModel } from './index';
|
|
2
|
+
|
|
3
|
+
describe('isReasoningModel', () => {
|
|
4
|
+
describe('should return true for reasoning models', () => {
|
|
5
|
+
test('basic o-series models', () => {
|
|
6
|
+
expect(isReasoningModel('o1')).toBe(true);
|
|
7
|
+
expect(isReasoningModel('o2')).toBe(true);
|
|
8
|
+
expect(isReasoningModel('o9')).toBe(true);
|
|
9
|
+
expect(isReasoningModel('o1-preview')).toBe(true);
|
|
10
|
+
expect(isReasoningModel('o1-mini')).toBe(true);
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
test('gpt-5+ models', () => {
|
|
14
|
+
expect(isReasoningModel('gpt-5')).toBe(true);
|
|
15
|
+
expect(isReasoningModel('gpt-6')).toBe(true);
|
|
16
|
+
expect(isReasoningModel('gpt-7')).toBe(true);
|
|
17
|
+
expect(isReasoningModel('gpt-8')).toBe(true);
|
|
18
|
+
expect(isReasoningModel('gpt-9')).toBe(true);
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
test('with provider prefixes', () => {
|
|
22
|
+
expect(isReasoningModel('azure/o1')).toBe(true);
|
|
23
|
+
expect(isReasoningModel('azure/gpt-5')).toBe(true);
|
|
24
|
+
expect(isReasoningModel('openai/o1')).toBe(true);
|
|
25
|
+
expect(isReasoningModel('openai/gpt-5')).toBe(true);
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
test('with custom prefixes', () => {
|
|
29
|
+
expect(isReasoningModel('custom-provider/o1')).toBe(true);
|
|
30
|
+
expect(isReasoningModel('my-deployment/gpt-5')).toBe(true);
|
|
31
|
+
expect(isReasoningModel('company/azure/gpt-5')).toBe(true);
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
test('case insensitive', () => {
|
|
35
|
+
expect(isReasoningModel('O1')).toBe(true);
|
|
36
|
+
expect(isReasoningModel('GPT-5')).toBe(true);
|
|
37
|
+
expect(isReasoningModel('gPt-6')).toBe(true);
|
|
38
|
+
expect(isReasoningModel('Azure/O1')).toBe(true);
|
|
39
|
+
});
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
describe('should return false for non-reasoning models', () => {
|
|
43
|
+
test('older GPT models', () => {
|
|
44
|
+
expect(isReasoningModel('gpt-3.5-turbo')).toBe(false);
|
|
45
|
+
expect(isReasoningModel('gpt-4')).toBe(false);
|
|
46
|
+
expect(isReasoningModel('gpt-4-turbo')).toBe(false);
|
|
47
|
+
expect(isReasoningModel('gpt-4o')).toBe(false);
|
|
48
|
+
expect(isReasoningModel('gpt-4o-mini')).toBe(false);
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
test('other model families', () => {
|
|
52
|
+
expect(isReasoningModel('claude-3')).toBe(false);
|
|
53
|
+
expect(isReasoningModel('claude-3-opus')).toBe(false);
|
|
54
|
+
expect(isReasoningModel('llama-2')).toBe(false);
|
|
55
|
+
expect(isReasoningModel('gemini-pro')).toBe(false);
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
test('partial matches that should not match', () => {
|
|
59
|
+
expect(isReasoningModel('proto1')).toBe(false);
|
|
60
|
+
expect(isReasoningModel('version-o1')).toBe(true);
|
|
61
|
+
expect(isReasoningModel('gpt-40')).toBe(false);
|
|
62
|
+
expect(isReasoningModel('gpt-3.5')).toBe(false);
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
test('empty, null, and undefined', () => {
|
|
66
|
+
expect(isReasoningModel('')).toBe(false);
|
|
67
|
+
expect(isReasoningModel()).toBe(false);
|
|
68
|
+
expect(isReasoningModel(undefined)).toBe(false);
|
|
69
|
+
});
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
describe('edge cases', () => {
|
|
73
|
+
test('with special characters', () => {
|
|
74
|
+
expect(isReasoningModel('deployment_o1_model')).toBe(false);
|
|
75
|
+
expect(isReasoningModel('gpt-5-deployment')).toBe(true);
|
|
76
|
+
expect(isReasoningModel('o1@latest')).toBe(true);
|
|
77
|
+
expect(isReasoningModel('gpt-5.0')).toBe(true);
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
test('word boundary behavior', () => {
|
|
81
|
+
// These should match because o1 and gpt-5 are whole words
|
|
82
|
+
expect(isReasoningModel('use-o1-model')).toBe(true);
|
|
83
|
+
expect(isReasoningModel('model-gpt-5-latest')).toBe(true);
|
|
84
|
+
|
|
85
|
+
// These should not match because o1/gpt-5 are not whole words
|
|
86
|
+
expect(isReasoningModel('proto1model')).toBe(false);
|
|
87
|
+
expect(isReasoningModel('supergpt-50')).toBe(false);
|
|
88
|
+
});
|
|
89
|
+
});
|
|
90
|
+
});
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import { ChatOpenAI } from '@/llm/openai';
|
|
2
|
+
import type {
|
|
3
|
+
FunctionMessageChunk,
|
|
4
|
+
SystemMessageChunk,
|
|
5
|
+
HumanMessageChunk,
|
|
6
|
+
ToolMessageChunk,
|
|
7
|
+
ChatMessageChunk,
|
|
8
|
+
AIMessageChunk,
|
|
9
|
+
} from '@langchain/core/messages';
|
|
10
|
+
import type {
|
|
11
|
+
ChatOpenAICallOptions,
|
|
12
|
+
OpenAIChatInput,
|
|
13
|
+
OpenAIClient,
|
|
14
|
+
} from '@langchain/openai';
|
|
15
|
+
|
|
16
|
+
export interface ChatOpenRouterCallOptions extends ChatOpenAICallOptions {
|
|
17
|
+
include_reasoning?: boolean;
|
|
18
|
+
modelKwargs?: OpenAIChatInput['modelKwargs'];
|
|
19
|
+
}
|
|
20
|
+
export class ChatOpenRouter extends ChatOpenAI {
|
|
21
|
+
constructor(_fields: Partial<ChatOpenRouterCallOptions>) {
|
|
22
|
+
const { include_reasoning, modelKwargs = {}, ...fields } = _fields;
|
|
23
|
+
super({
|
|
24
|
+
...fields,
|
|
25
|
+
modelKwargs: {
|
|
26
|
+
...modelKwargs,
|
|
27
|
+
include_reasoning,
|
|
28
|
+
},
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
static lc_name(): 'IllumaOpenRouter' {
|
|
32
|
+
return 'IllumaOpenRouter';
|
|
33
|
+
}
|
|
34
|
+
protected override _convertOpenAIDeltaToBaseMessageChunk(
|
|
35
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
36
|
+
delta: Record<string, any>,
|
|
37
|
+
rawResponse: OpenAIClient.ChatCompletionChunk,
|
|
38
|
+
defaultRole?:
|
|
39
|
+
| 'function'
|
|
40
|
+
| 'user'
|
|
41
|
+
| 'system'
|
|
42
|
+
| 'developer'
|
|
43
|
+
| 'assistant'
|
|
44
|
+
| 'tool'
|
|
45
|
+
):
|
|
46
|
+
| AIMessageChunk
|
|
47
|
+
| HumanMessageChunk
|
|
48
|
+
| SystemMessageChunk
|
|
49
|
+
| FunctionMessageChunk
|
|
50
|
+
| ToolMessageChunk
|
|
51
|
+
| ChatMessageChunk {
|
|
52
|
+
const messageChunk = super._convertOpenAIDeltaToBaseMessageChunk(
|
|
53
|
+
delta,
|
|
54
|
+
rawResponse,
|
|
55
|
+
defaultRole
|
|
56
|
+
);
|
|
57
|
+
messageChunk.additional_kwargs.reasoning = delta.reasoning;
|
|
58
|
+
return messageChunk;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
// src/llm/providers.ts
|
|
2
|
+
import { ChatMistralAI } from '@langchain/mistralai';
|
|
3
|
+
import { ChatBedrockConverse } from '@langchain/aws';
|
|
4
|
+
// import { ChatAnthropic } from '@langchain/anthropic';
|
|
5
|
+
// import { ChatVertexAI } from '@langchain/google-vertexai';
|
|
6
|
+
import { BedrockChat } from '@langchain/community/chat_models/bedrock/web';
|
|
7
|
+
import type {
|
|
8
|
+
ChatModelConstructorMap,
|
|
9
|
+
ProviderOptionsMap,
|
|
10
|
+
ChatModelMap,
|
|
11
|
+
} from '@/types';
|
|
12
|
+
import {
|
|
13
|
+
AzureChatOpenAI,
|
|
14
|
+
ChatDeepSeek,
|
|
15
|
+
ChatOpenAI,
|
|
16
|
+
ChatXAI,
|
|
17
|
+
} from '@/llm/openai';
|
|
18
|
+
import { CustomChatGoogleGenerativeAI } from '@/llm/google';
|
|
19
|
+
import { CustomAnthropic } from '@/llm/anthropic';
|
|
20
|
+
import { ChatOpenRouter } from '@/llm/openrouter';
|
|
21
|
+
import { ChatVertexAI } from '@/llm/vertexai';
|
|
22
|
+
import { ChatOllama } from '@/llm/ollama';
|
|
23
|
+
import { Providers } from '@/common';
|
|
24
|
+
|
|
25
|
+
export const llmProviders: Partial<ChatModelConstructorMap> = {
|
|
26
|
+
[Providers.XAI]: ChatXAI,
|
|
27
|
+
[Providers.OPENAI]: ChatOpenAI,
|
|
28
|
+
[Providers.OLLAMA]: ChatOllama,
|
|
29
|
+
[Providers.AZURE]: AzureChatOpenAI,
|
|
30
|
+
[Providers.VERTEXAI]: ChatVertexAI,
|
|
31
|
+
[Providers.DEEPSEEK]: ChatDeepSeek,
|
|
32
|
+
[Providers.MISTRALAI]: ChatMistralAI,
|
|
33
|
+
[Providers.MISTRAL]: ChatMistralAI,
|
|
34
|
+
[Providers.ANTHROPIC]: CustomAnthropic,
|
|
35
|
+
[Providers.OPENROUTER]: ChatOpenRouter,
|
|
36
|
+
[Providers.BEDROCK_LEGACY]: BedrockChat,
|
|
37
|
+
[Providers.BEDROCK]: ChatBedrockConverse,
|
|
38
|
+
// [Providers.ANTHROPIC]: ChatAnthropic,
|
|
39
|
+
[Providers.GOOGLE]: CustomChatGoogleGenerativeAI,
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
export const manualToolStreamProviders = new Set<Providers | string>([
|
|
43
|
+
Providers.ANTHROPIC,
|
|
44
|
+
Providers.BEDROCK,
|
|
45
|
+
Providers.OLLAMA,
|
|
46
|
+
]);
|
|
47
|
+
|
|
48
|
+
export const getChatModelClass = <P extends Providers>(
|
|
49
|
+
provider: P
|
|
50
|
+
): new (config: ProviderOptionsMap[P]) => ChatModelMap[P] => {
|
|
51
|
+
const ChatModelClass = llmProviders[provider];
|
|
52
|
+
if (!ChatModelClass) {
|
|
53
|
+
throw new Error(`Unsupported LLM provider: ${provider}`);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return ChatModelClass;
|
|
57
|
+
};
|
package/src/llm/text.ts
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
export interface TextStreamOptions {
|
|
2
|
+
minChunkSize?: number;
|
|
3
|
+
maxChunkSize?: number;
|
|
4
|
+
delay?: number;
|
|
5
|
+
firstWordChunk?: boolean;
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export type ProgressCallback = (chunk: string) => void;
|
|
9
|
+
export type PostChunkCallback = (chunk: string) => void;
|
|
10
|
+
|
|
11
|
+
export class TextStream {
|
|
12
|
+
private text: string;
|
|
13
|
+
private currentIndex: number;
|
|
14
|
+
private minChunkSize: number;
|
|
15
|
+
private maxChunkSize: number;
|
|
16
|
+
private delay: number;
|
|
17
|
+
private firstWordChunk: boolean;
|
|
18
|
+
|
|
19
|
+
constructor(text: string, options: TextStreamOptions = {}) {
|
|
20
|
+
this.text = text;
|
|
21
|
+
this.currentIndex = 0;
|
|
22
|
+
this.minChunkSize = options.minChunkSize ?? 4;
|
|
23
|
+
this.maxChunkSize = options.maxChunkSize ?? 8;
|
|
24
|
+
this.delay = options.delay ?? 20;
|
|
25
|
+
this.firstWordChunk = options.firstWordChunk ?? true;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
private randomInt(min: number, max: number): number {
|
|
29
|
+
return Math.floor(Math.random() * (max - min)) + min;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
private static readonly BOUNDARIES = new Set([
|
|
33
|
+
' ',
|
|
34
|
+
'.',
|
|
35
|
+
',',
|
|
36
|
+
'!',
|
|
37
|
+
'?',
|
|
38
|
+
';',
|
|
39
|
+
':',
|
|
40
|
+
]);
|
|
41
|
+
|
|
42
|
+
private findFirstWordBoundary(text: string, minSize: number): number {
|
|
43
|
+
if (minSize >= text.length) return text.length;
|
|
44
|
+
|
|
45
|
+
// Ensure we meet the minimum size first
|
|
46
|
+
let pos = minSize;
|
|
47
|
+
|
|
48
|
+
// Look forward until we find a boundary
|
|
49
|
+
while (pos < text.length) {
|
|
50
|
+
if (TextStream.BOUNDARIES.has(text[pos])) {
|
|
51
|
+
return pos + 1; // Include the boundary character
|
|
52
|
+
}
|
|
53
|
+
pos++;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return text.length; // If no boundary found, return entire remaining text
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
async *generateText(
|
|
60
|
+
signal?: AbortSignal,
|
|
61
|
+
progressCallback?: ProgressCallback
|
|
62
|
+
): AsyncGenerator<string, void, unknown> {
|
|
63
|
+
const { delay, minChunkSize, maxChunkSize } = this;
|
|
64
|
+
|
|
65
|
+
while (this.currentIndex < this.text.length) {
|
|
66
|
+
if (signal?.aborted === true) {
|
|
67
|
+
break;
|
|
68
|
+
}
|
|
69
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
70
|
+
|
|
71
|
+
const remainingText = this.text.slice(this.currentIndex);
|
|
72
|
+
let chunkSize: number;
|
|
73
|
+
|
|
74
|
+
if (this.firstWordChunk) {
|
|
75
|
+
chunkSize = this.findFirstWordBoundary(remainingText, minChunkSize);
|
|
76
|
+
} else {
|
|
77
|
+
const remainingChars = remainingText.length;
|
|
78
|
+
chunkSize = Math.min(
|
|
79
|
+
this.randomInt(minChunkSize, maxChunkSize + 1),
|
|
80
|
+
remainingChars
|
|
81
|
+
);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
const chunk = this.text.slice(
|
|
85
|
+
this.currentIndex,
|
|
86
|
+
this.currentIndex + chunkSize
|
|
87
|
+
);
|
|
88
|
+
progressCallback?.(chunk);
|
|
89
|
+
|
|
90
|
+
yield chunk;
|
|
91
|
+
this.currentIndex += chunkSize;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
import { ChatGoogle } from '@langchain/google-gauth';
|
|
2
|
+
import { ChatConnection } from '@langchain/google-common';
|
|
3
|
+
import type {
|
|
4
|
+
GeminiRequest,
|
|
5
|
+
GoogleAIModelRequestParams,
|
|
6
|
+
GoogleAbstractedClient,
|
|
7
|
+
} from '@langchain/google-common';
|
|
8
|
+
import type { BaseMessage } from '@langchain/core/messages';
|
|
9
|
+
import type { VertexAIClientOptions } from '@/types';
|
|
10
|
+
|
|
11
|
+
class CustomChatConnection extends ChatConnection<VertexAIClientOptions> {
|
|
12
|
+
async formatData(
|
|
13
|
+
input: BaseMessage[],
|
|
14
|
+
parameters: GoogleAIModelRequestParams
|
|
15
|
+
): Promise<unknown> {
|
|
16
|
+
const formattedData = (await super.formatData(
|
|
17
|
+
input,
|
|
18
|
+
parameters
|
|
19
|
+
)) as GeminiRequest;
|
|
20
|
+
if (
|
|
21
|
+
formattedData.generationConfig?.thinkingConfig?.thinkingBudget === -1 &&
|
|
22
|
+
formattedData.generationConfig.thinkingConfig.includeThoughts === false
|
|
23
|
+
) {
|
|
24
|
+
formattedData.generationConfig.thinkingConfig.includeThoughts = true;
|
|
25
|
+
}
|
|
26
|
+
return formattedData;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Integration with Google Vertex AI chat models.
|
|
32
|
+
*
|
|
33
|
+
* Setup:
|
|
34
|
+
* Install `@langchain/google-vertexai` and set your stringified
|
|
35
|
+
* Vertex AI credentials as an environment variable named `GOOGLE_APPLICATION_CREDENTIALS`.
|
|
36
|
+
*
|
|
37
|
+
* ```bash
|
|
38
|
+
* npm install @langchain/google-vertexai
|
|
39
|
+
* export GOOGLE_APPLICATION_CREDENTIALS="path/to/credentials"
|
|
40
|
+
* ```
|
|
41
|
+
*
|
|
42
|
+
* ## [Constructor args](https://api.js.langchain.com/classes/_langchain_google_vertexai.index.ChatVertexAI.html#constructor.new_ChatVertexAI)
|
|
43
|
+
*
|
|
44
|
+
* ## [Runtime args](https://api.js.langchain.com/interfaces/langchain_google_common_types.GoogleAIBaseLanguageModelCallOptions.html)
|
|
45
|
+
*
|
|
46
|
+
* Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.
|
|
47
|
+
* They can also be passed via `.withConfig`, or the second arg in `.bindTools`, like shown in the examples below:
|
|
48
|
+
*
|
|
49
|
+
* ```typescript
|
|
50
|
+
* // When calling `.withConfig`, call options should be passed via the first argument
|
|
51
|
+
* const llmWithArgsBound = llm.withConfig({
|
|
52
|
+
* stop: ["\n"],
|
|
53
|
+
* tools: [...],
|
|
54
|
+
* });
|
|
55
|
+
*
|
|
56
|
+
* // When calling `.bindTools`, call options should be passed via the second argument
|
|
57
|
+
* const llmWithTools = llm.bindTools(
|
|
58
|
+
* [...],
|
|
59
|
+
* {
|
|
60
|
+
* tool_choice: "auto",
|
|
61
|
+
* }
|
|
62
|
+
* );
|
|
63
|
+
* ```
|
|
64
|
+
*
|
|
65
|
+
* ## Examples
|
|
66
|
+
*
|
|
67
|
+
* <details open>
|
|
68
|
+
* <summary><strong>Instantiate</strong></summary>
|
|
69
|
+
*
|
|
70
|
+
* ```typescript
|
|
71
|
+
* import { ChatVertexAI } from '@langchain/google-vertexai';
|
|
72
|
+
*
|
|
73
|
+
* const llm = new ChatVertexAI({
|
|
74
|
+
* model: "gemini-1.5-pro",
|
|
75
|
+
* temperature: 0,
|
|
76
|
+
* // other params...
|
|
77
|
+
* });
|
|
78
|
+
* ```
|
|
79
|
+
* </details>
|
|
80
|
+
*
|
|
81
|
+
* <br />
|
|
82
|
+
*
|
|
83
|
+
* <details>
|
|
84
|
+
* <summary><strong>Invoking</strong></summary>
|
|
85
|
+
*
|
|
86
|
+
* ```typescript
|
|
87
|
+
* const input = `Translate "I love programming" into French.`;
|
|
88
|
+
*
|
|
89
|
+
* // Models also accept a list of chat messages or a formatted prompt
|
|
90
|
+
* const result = await llm.invoke(input);
|
|
91
|
+
* console.log(result);
|
|
92
|
+
* ```
|
|
93
|
+
*
|
|
94
|
+
* ```txt
|
|
95
|
+
* AIMessageChunk {
|
|
96
|
+
* "content": "\"J'adore programmer\" \n\nHere's why this is the best translation:\n\n* **J'adore** means \"I love\" and conveys a strong passion.\n* **Programmer** is the French verb for \"to program.\"\n\nThis translation is natural and idiomatic in French. \n",
|
|
97
|
+
* "additional_kwargs": {},
|
|
98
|
+
* "response_metadata": {},
|
|
99
|
+
* "tool_calls": [],
|
|
100
|
+
* "tool_call_chunks": [],
|
|
101
|
+
* "invalid_tool_calls": [],
|
|
102
|
+
* "usage_metadata": {
|
|
103
|
+
* "input_tokens": 9,
|
|
104
|
+
* "output_tokens": 63,
|
|
105
|
+
* "total_tokens": 72
|
|
106
|
+
* }
|
|
107
|
+
* }
|
|
108
|
+
* ```
|
|
109
|
+
* </details>
|
|
110
|
+
*
|
|
111
|
+
* <br />
|
|
112
|
+
*
|
|
113
|
+
* <details>
|
|
114
|
+
* <summary><strong>Streaming Chunks</strong></summary>
|
|
115
|
+
*
|
|
116
|
+
* ```typescript
|
|
117
|
+
* for await (const chunk of await llm.stream(input)) {
|
|
118
|
+
* console.log(chunk);
|
|
119
|
+
* }
|
|
120
|
+
* ```
|
|
121
|
+
*
|
|
122
|
+
* ```txt
|
|
123
|
+
* AIMessageChunk {
|
|
124
|
+
* "content": "\"",
|
|
125
|
+
* "additional_kwargs": {},
|
|
126
|
+
* "response_metadata": {},
|
|
127
|
+
* "tool_calls": [],
|
|
128
|
+
* "tool_call_chunks": [],
|
|
129
|
+
* "invalid_tool_calls": []
|
|
130
|
+
* }
|
|
131
|
+
* AIMessageChunk {
|
|
132
|
+
* "content": "J'adore programmer\" \n",
|
|
133
|
+
* "additional_kwargs": {},
|
|
134
|
+
* "response_metadata": {},
|
|
135
|
+
* "tool_calls": [],
|
|
136
|
+
* "tool_call_chunks": [],
|
|
137
|
+
* "invalid_tool_calls": []
|
|
138
|
+
* }
|
|
139
|
+
* AIMessageChunk {
|
|
140
|
+
* "content": "",
|
|
141
|
+
* "additional_kwargs": {},
|
|
142
|
+
* "response_metadata": {},
|
|
143
|
+
* "tool_calls": [],
|
|
144
|
+
* "tool_call_chunks": [],
|
|
145
|
+
* "invalid_tool_calls": []
|
|
146
|
+
* }
|
|
147
|
+
* AIMessageChunk {
|
|
148
|
+
* "content": "",
|
|
149
|
+
* "additional_kwargs": {},
|
|
150
|
+
* "response_metadata": {
|
|
151
|
+
* "finishReason": "stop"
|
|
152
|
+
* },
|
|
153
|
+
* "tool_calls": [],
|
|
154
|
+
* "tool_call_chunks": [],
|
|
155
|
+
* "invalid_tool_calls": [],
|
|
156
|
+
* "usage_metadata": {
|
|
157
|
+
* "input_tokens": 9,
|
|
158
|
+
* "output_tokens": 8,
|
|
159
|
+
* "total_tokens": 17
|
|
160
|
+
* }
|
|
161
|
+
* }
|
|
162
|
+
* ```
|
|
163
|
+
* </details>
|
|
164
|
+
*
|
|
165
|
+
* <br />
|
|
166
|
+
*
|
|
167
|
+
* <details>
|
|
168
|
+
* <summary><strong>Aggregate Streamed Chunks</strong></summary>
|
|
169
|
+
*
|
|
170
|
+
* ```typescript
|
|
171
|
+
* import { AIMessageChunk } from '@langchain/core/messages';
|
|
172
|
+
* import { concat } from '@langchain/core/utils/stream';
|
|
173
|
+
*
|
|
174
|
+
* const stream = await llm.stream(input);
|
|
175
|
+
* let full: AIMessageChunk | undefined;
|
|
176
|
+
* for await (const chunk of stream) {
|
|
177
|
+
* full = !full ? chunk : concat(full, chunk);
|
|
178
|
+
* }
|
|
179
|
+
* console.log(full);
|
|
180
|
+
* ```
|
|
181
|
+
*
|
|
182
|
+
* ```txt
|
|
183
|
+
* AIMessageChunk {
|
|
184
|
+
* "content": "\"J'adore programmer\" \n",
|
|
185
|
+
* "additional_kwargs": {},
|
|
186
|
+
* "response_metadata": {
|
|
187
|
+
* "finishReason": "stop"
|
|
188
|
+
* },
|
|
189
|
+
* "tool_calls": [],
|
|
190
|
+
* "tool_call_chunks": [],
|
|
191
|
+
* "invalid_tool_calls": [],
|
|
192
|
+
* "usage_metadata": {
|
|
193
|
+
* "input_tokens": 9,
|
|
194
|
+
* "output_tokens": 8,
|
|
195
|
+
* "total_tokens": 17
|
|
196
|
+
* }
|
|
197
|
+
* }
|
|
198
|
+
* ```
|
|
199
|
+
* </details>
|
|
200
|
+
*
|
|
201
|
+
* <br />
|
|
202
|
+
*
|
|
203
|
+
* <details>
|
|
204
|
+
* <summary><strong>Bind tools</strong></summary>
|
|
205
|
+
*
|
|
206
|
+
* ```typescript
|
|
207
|
+
* import { z } from 'zod';
|
|
208
|
+
*
|
|
209
|
+
* const GetWeather = {
|
|
210
|
+
* name: "GetWeather",
|
|
211
|
+
* description: "Get the current weather in a given location",
|
|
212
|
+
* schema: z.object({
|
|
213
|
+
* location: z.string().describe("The city and state, e.g. San Francisco, CA")
|
|
214
|
+
* }),
|
|
215
|
+
* }
|
|
216
|
+
*
|
|
217
|
+
* const GetPopulation = {
|
|
218
|
+
* name: "GetPopulation",
|
|
219
|
+
* description: "Get the current population in a given location",
|
|
220
|
+
* schema: z.object({
|
|
221
|
+
* location: z.string().describe("The city and state, e.g. San Francisco, CA")
|
|
222
|
+
* }),
|
|
223
|
+
* }
|
|
224
|
+
*
|
|
225
|
+
* const llmWithTools = llm.bindTools([GetWeather, GetPopulation]);
|
|
226
|
+
* const aiMsg = await llmWithTools.invoke(
|
|
227
|
+
* "Which city is hotter today and which is bigger: LA or NY?"
|
|
228
|
+
* );
|
|
229
|
+
* console.log(aiMsg.tool_calls);
|
|
230
|
+
* ```
|
|
231
|
+
*
|
|
232
|
+
* ```txt
|
|
233
|
+
* [
|
|
234
|
+
* {
|
|
235
|
+
* name: 'GetPopulation',
|
|
236
|
+
* args: { location: 'New York City, NY' },
|
|
237
|
+
* id: '33c1c1f47e2f492799c77d2800a43912',
|
|
238
|
+
* type: 'tool_call'
|
|
239
|
+
* }
|
|
240
|
+
* ]
|
|
241
|
+
* ```
|
|
242
|
+
* </details>
|
|
243
|
+
*
|
|
244
|
+
* <br />
|
|
245
|
+
*
|
|
246
|
+
* <details>
|
|
247
|
+
* <summary><strong>Structured Output</strong></summary>
|
|
248
|
+
*
|
|
249
|
+
* ```typescript
|
|
250
|
+
* import { z } from 'zod';
|
|
251
|
+
*
|
|
252
|
+
* const Joke = z.object({
|
|
253
|
+
* setup: z.string().describe("The setup of the joke"),
|
|
254
|
+
* punchline: z.string().describe("The punchline to the joke"),
|
|
255
|
+
* rating: z.number().optional().describe("How funny the joke is, from 1 to 10")
|
|
256
|
+
* }).describe('Joke to tell user.');
|
|
257
|
+
*
|
|
258
|
+
* const structuredLlm = llm.withStructuredOutput(Joke, { name: "Joke" });
|
|
259
|
+
* const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
|
|
260
|
+
* console.log(jokeResult);
|
|
261
|
+
* ```
|
|
262
|
+
*
|
|
263
|
+
* ```txt
|
|
264
|
+
* {
|
|
265
|
+
* setup: 'What do you call a cat that loves to bowl?',
|
|
266
|
+
* punchline: 'An alley cat!'
|
|
267
|
+
* }
|
|
268
|
+
* ```
|
|
269
|
+
* </details>
|
|
270
|
+
*
|
|
271
|
+
* <br />
|
|
272
|
+
*
|
|
273
|
+
* <details>
|
|
274
|
+
* <summary><strong>Usage Metadata</strong></summary>
|
|
275
|
+
*
|
|
276
|
+
* ```typescript
|
|
277
|
+
* const aiMsgForMetadata = await llm.invoke(input);
|
|
278
|
+
* console.log(aiMsgForMetadata.usage_metadata);
|
|
279
|
+
* ```
|
|
280
|
+
*
|
|
281
|
+
* ```txt
|
|
282
|
+
* { input_tokens: 9, output_tokens: 8, total_tokens: 17 }
|
|
283
|
+
* ```
|
|
284
|
+
* </details>
|
|
285
|
+
*
|
|
286
|
+
* <br />
|
|
287
|
+
*
|
|
288
|
+
* <details>
|
|
289
|
+
* <summary><strong>Stream Usage Metadata</strong></summary>
|
|
290
|
+
*
|
|
291
|
+
* ```typescript
|
|
292
|
+
* const streamForMetadata = await llm.stream(
|
|
293
|
+
* input,
|
|
294
|
+
* {
|
|
295
|
+
* streamUsage: true
|
|
296
|
+
* }
|
|
297
|
+
* );
|
|
298
|
+
* let fullForMetadata: AIMessageChunk | undefined;
|
|
299
|
+
* for await (const chunk of streamForMetadata) {
|
|
300
|
+
* fullForMetadata = !fullForMetadata ? chunk : concat(fullForMetadata, chunk);
|
|
301
|
+
* }
|
|
302
|
+
* console.log(fullForMetadata?.usage_metadata);
|
|
303
|
+
* ```
|
|
304
|
+
*
|
|
305
|
+
* ```txt
|
|
306
|
+
* { input_tokens: 9, output_tokens: 8, total_tokens: 17 }
|
|
307
|
+
* ```
|
|
308
|
+
* </details>
|
|
309
|
+
*
|
|
310
|
+
* <br />
|
|
311
|
+
*/
|
|
312
|
+
export class ChatVertexAI extends ChatGoogle {
|
|
313
|
+
lc_namespace = ['langchain', 'chat_models', 'vertexai'];
|
|
314
|
+
dynamicThinkingBudget = false;
|
|
315
|
+
|
|
316
|
+
static lc_name(): 'IllumaVertexAI' {
|
|
317
|
+
return 'IllumaVertexAI';
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
constructor(fields?: VertexAIClientOptions) {
|
|
321
|
+
let dynamicThinkingBudget = false;
|
|
322
|
+
if (fields?.thinkingBudget === -1) {
|
|
323
|
+
dynamicThinkingBudget = true;
|
|
324
|
+
fields.thinkingBudget = 1;
|
|
325
|
+
}
|
|
326
|
+
super({
|
|
327
|
+
...fields,
|
|
328
|
+
platformType: 'gcp',
|
|
329
|
+
});
|
|
330
|
+
this.dynamicThinkingBudget = dynamicThinkingBudget;
|
|
331
|
+
}
|
|
332
|
+
invocationParams(
|
|
333
|
+
options?: this['ParsedCallOptions'] | undefined
|
|
334
|
+
): GoogleAIModelRequestParams {
|
|
335
|
+
const params = super.invocationParams(options);
|
|
336
|
+
if (this.dynamicThinkingBudget) {
|
|
337
|
+
params.maxReasoningTokens = -1;
|
|
338
|
+
}
|
|
339
|
+
return params;
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
buildConnection(
|
|
343
|
+
fields: VertexAIClientOptions,
|
|
344
|
+
client: GoogleAbstractedClient
|
|
345
|
+
): void {
|
|
346
|
+
this.connection = new CustomChatConnection(
|
|
347
|
+
{ ...fields, ...this },
|
|
348
|
+
this.caller,
|
|
349
|
+
client,
|
|
350
|
+
false
|
|
351
|
+
);
|
|
352
|
+
|
|
353
|
+
this.streamedConnection = new CustomChatConnection(
|
|
354
|
+
{ ...fields, ...this },
|
|
355
|
+
this.caller,
|
|
356
|
+
client,
|
|
357
|
+
true
|
|
358
|
+
);
|
|
359
|
+
}
|
|
360
|
+
}
|