illuma-agents 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/cjs/common/enum.cjs +163 -0
- package/dist/cjs/common/enum.cjs.map +1 -0
- package/dist/cjs/events.cjs +143 -0
- package/dist/cjs/events.cjs.map +1 -0
- package/dist/cjs/graphs/Graph.cjs +581 -0
- package/dist/cjs/graphs/Graph.cjs.map +1 -0
- package/dist/cjs/instrumentation.cjs +21 -0
- package/dist/cjs/instrumentation.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/index.cjs +292 -0
- package/dist/cjs/llm/anthropic/index.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/types.cjs +50 -0
- package/dist/cjs/llm/anthropic/types.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs +553 -0
- package/dist/cjs/llm/anthropic/utils/message_inputs.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs +218 -0
- package/dist/cjs/llm/anthropic/utils/message_outputs.cjs.map +1 -0
- package/dist/cjs/llm/anthropic/utils/tools.cjs +29 -0
- package/dist/cjs/llm/anthropic/utils/tools.cjs.map +1 -0
- package/dist/cjs/llm/fake.cjs +97 -0
- package/dist/cjs/llm/fake.cjs.map +1 -0
- package/dist/cjs/llm/google/index.cjs +147 -0
- package/dist/cjs/llm/google/index.cjs.map +1 -0
- package/dist/cjs/llm/google/utils/common.cjs +490 -0
- package/dist/cjs/llm/google/utils/common.cjs.map +1 -0
- package/dist/cjs/llm/ollama/index.cjs +70 -0
- package/dist/cjs/llm/ollama/index.cjs.map +1 -0
- package/dist/cjs/llm/ollama/utils.cjs +158 -0
- package/dist/cjs/llm/ollama/utils.cjs.map +1 -0
- package/dist/cjs/llm/openai/index.cjs +613 -0
- package/dist/cjs/llm/openai/index.cjs.map +1 -0
- package/dist/cjs/llm/openai/utils/index.cjs +677 -0
- package/dist/cjs/llm/openai/utils/index.cjs.map +1 -0
- package/dist/cjs/llm/openrouter/index.cjs +29 -0
- package/dist/cjs/llm/openrouter/index.cjs.map +1 -0
- package/dist/cjs/llm/providers.cjs +47 -0
- package/dist/cjs/llm/providers.cjs.map +1 -0
- package/dist/cjs/llm/text.cjs +69 -0
- package/dist/cjs/llm/text.cjs.map +1 -0
- package/dist/cjs/llm/vertexai/index.cjs +330 -0
- package/dist/cjs/llm/vertexai/index.cjs.map +1 -0
- package/dist/cjs/main.cjs +127 -0
- package/dist/cjs/main.cjs.map +1 -0
- package/dist/cjs/messages/core.cjs +359 -0
- package/dist/cjs/messages/core.cjs.map +1 -0
- package/dist/cjs/messages/format.cjs +455 -0
- package/dist/cjs/messages/format.cjs.map +1 -0
- package/dist/cjs/messages/ids.cjs +23 -0
- package/dist/cjs/messages/ids.cjs.map +1 -0
- package/dist/cjs/messages/prune.cjs +398 -0
- package/dist/cjs/messages/prune.cjs.map +1 -0
- package/dist/cjs/run.cjs +264 -0
- package/dist/cjs/run.cjs.map +1 -0
- package/dist/cjs/splitStream.cjs +210 -0
- package/dist/cjs/splitStream.cjs.map +1 -0
- package/dist/cjs/stream.cjs +504 -0
- package/dist/cjs/stream.cjs.map +1 -0
- package/dist/cjs/tools/CodeExecutor.cjs +192 -0
- package/dist/cjs/tools/CodeExecutor.cjs.map +1 -0
- package/dist/cjs/tools/ToolNode.cjs +125 -0
- package/dist/cjs/tools/ToolNode.cjs.map +1 -0
- package/dist/cjs/tools/handlers.cjs +250 -0
- package/dist/cjs/tools/handlers.cjs.map +1 -0
- package/dist/cjs/tools/search/anthropic.cjs +40 -0
- package/dist/cjs/tools/search/anthropic.cjs.map +1 -0
- package/dist/cjs/tools/search/content.cjs +140 -0
- package/dist/cjs/tools/search/content.cjs.map +1 -0
- package/dist/cjs/tools/search/firecrawl.cjs +179 -0
- package/dist/cjs/tools/search/firecrawl.cjs.map +1 -0
- package/dist/cjs/tools/search/format.cjs +203 -0
- package/dist/cjs/tools/search/format.cjs.map +1 -0
- package/dist/cjs/tools/search/highlights.cjs +245 -0
- package/dist/cjs/tools/search/highlights.cjs.map +1 -0
- package/dist/cjs/tools/search/rerankers.cjs +174 -0
- package/dist/cjs/tools/search/rerankers.cjs.map +1 -0
- package/dist/cjs/tools/search/schema.cjs +70 -0
- package/dist/cjs/tools/search/schema.cjs.map +1 -0
- package/dist/cjs/tools/search/search.cjs +561 -0
- package/dist/cjs/tools/search/search.cjs.map +1 -0
- package/dist/cjs/tools/search/serper-scraper.cjs +132 -0
- package/dist/cjs/tools/search/serper-scraper.cjs.map +1 -0
- package/dist/cjs/tools/search/tool.cjs +331 -0
- package/dist/cjs/tools/search/tool.cjs.map +1 -0
- package/dist/cjs/tools/search/utils.cjs +66 -0
- package/dist/cjs/tools/search/utils.cjs.map +1 -0
- package/dist/cjs/utils/graph.cjs +16 -0
- package/dist/cjs/utils/graph.cjs.map +1 -0
- package/dist/cjs/utils/llm.cjs +28 -0
- package/dist/cjs/utils/llm.cjs.map +1 -0
- package/dist/cjs/utils/misc.cjs +56 -0
- package/dist/cjs/utils/misc.cjs.map +1 -0
- package/dist/cjs/utils/run.cjs +69 -0
- package/dist/cjs/utils/run.cjs.map +1 -0
- package/dist/cjs/utils/title.cjs +111 -0
- package/dist/cjs/utils/title.cjs.map +1 -0
- package/dist/cjs/utils/tokens.cjs +65 -0
- package/dist/cjs/utils/tokens.cjs.map +1 -0
- package/dist/esm/common/enum.mjs +163 -0
- package/dist/esm/common/enum.mjs.map +1 -0
- package/dist/esm/events.mjs +135 -0
- package/dist/esm/events.mjs.map +1 -0
- package/dist/esm/graphs/Graph.mjs +578 -0
- package/dist/esm/graphs/Graph.mjs.map +1 -0
- package/dist/esm/instrumentation.mjs +19 -0
- package/dist/esm/instrumentation.mjs.map +1 -0
- package/dist/esm/llm/anthropic/index.mjs +290 -0
- package/dist/esm/llm/anthropic/index.mjs.map +1 -0
- package/dist/esm/llm/anthropic/types.mjs +48 -0
- package/dist/esm/llm/anthropic/types.mjs.map +1 -0
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs +550 -0
- package/dist/esm/llm/anthropic/utils/message_inputs.mjs.map +1 -0
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs +216 -0
- package/dist/esm/llm/anthropic/utils/message_outputs.mjs.map +1 -0
- package/dist/esm/llm/anthropic/utils/tools.mjs +27 -0
- package/dist/esm/llm/anthropic/utils/tools.mjs.map +1 -0
- package/dist/esm/llm/fake.mjs +94 -0
- package/dist/esm/llm/fake.mjs.map +1 -0
- package/dist/esm/llm/google/index.mjs +145 -0
- package/dist/esm/llm/google/index.mjs.map +1 -0
- package/dist/esm/llm/google/utils/common.mjs +484 -0
- package/dist/esm/llm/google/utils/common.mjs.map +1 -0
- package/dist/esm/llm/ollama/index.mjs +68 -0
- package/dist/esm/llm/ollama/index.mjs.map +1 -0
- package/dist/esm/llm/ollama/utils.mjs +155 -0
- package/dist/esm/llm/ollama/utils.mjs.map +1 -0
- package/dist/esm/llm/openai/index.mjs +604 -0
- package/dist/esm/llm/openai/index.mjs.map +1 -0
- package/dist/esm/llm/openai/utils/index.mjs +671 -0
- package/dist/esm/llm/openai/utils/index.mjs.map +1 -0
- package/dist/esm/llm/openrouter/index.mjs +27 -0
- package/dist/esm/llm/openrouter/index.mjs.map +1 -0
- package/dist/esm/llm/providers.mjs +43 -0
- package/dist/esm/llm/providers.mjs.map +1 -0
- package/dist/esm/llm/text.mjs +67 -0
- package/dist/esm/llm/text.mjs.map +1 -0
- package/dist/esm/llm/vertexai/index.mjs +328 -0
- package/dist/esm/llm/vertexai/index.mjs.map +1 -0
- package/dist/esm/main.mjs +20 -0
- package/dist/esm/main.mjs.map +1 -0
- package/dist/esm/messages/core.mjs +351 -0
- package/dist/esm/messages/core.mjs.map +1 -0
- package/dist/esm/messages/format.mjs +447 -0
- package/dist/esm/messages/format.mjs.map +1 -0
- package/dist/esm/messages/ids.mjs +21 -0
- package/dist/esm/messages/ids.mjs.map +1 -0
- package/dist/esm/messages/prune.mjs +393 -0
- package/dist/esm/messages/prune.mjs.map +1 -0
- package/dist/esm/run.mjs +261 -0
- package/dist/esm/run.mjs.map +1 -0
- package/dist/esm/splitStream.mjs +207 -0
- package/dist/esm/splitStream.mjs.map +1 -0
- package/dist/esm/stream.mjs +500 -0
- package/dist/esm/stream.mjs.map +1 -0
- package/dist/esm/tools/CodeExecutor.mjs +188 -0
- package/dist/esm/tools/CodeExecutor.mjs.map +1 -0
- package/dist/esm/tools/ToolNode.mjs +122 -0
- package/dist/esm/tools/ToolNode.mjs.map +1 -0
- package/dist/esm/tools/handlers.mjs +245 -0
- package/dist/esm/tools/handlers.mjs.map +1 -0
- package/dist/esm/tools/search/anthropic.mjs +37 -0
- package/dist/esm/tools/search/anthropic.mjs.map +1 -0
- package/dist/esm/tools/search/content.mjs +119 -0
- package/dist/esm/tools/search/content.mjs.map +1 -0
- package/dist/esm/tools/search/firecrawl.mjs +176 -0
- package/dist/esm/tools/search/firecrawl.mjs.map +1 -0
- package/dist/esm/tools/search/format.mjs +201 -0
- package/dist/esm/tools/search/format.mjs.map +1 -0
- package/dist/esm/tools/search/highlights.mjs +243 -0
- package/dist/esm/tools/search/highlights.mjs.map +1 -0
- package/dist/esm/tools/search/rerankers.mjs +168 -0
- package/dist/esm/tools/search/rerankers.mjs.map +1 -0
- package/dist/esm/tools/search/schema.mjs +61 -0
- package/dist/esm/tools/search/schema.mjs.map +1 -0
- package/dist/esm/tools/search/search.mjs +558 -0
- package/dist/esm/tools/search/search.mjs.map +1 -0
- package/dist/esm/tools/search/serper-scraper.mjs +129 -0
- package/dist/esm/tools/search/serper-scraper.mjs.map +1 -0
- package/dist/esm/tools/search/tool.mjs +329 -0
- package/dist/esm/tools/search/tool.mjs.map +1 -0
- package/dist/esm/tools/search/utils.mjs +61 -0
- package/dist/esm/tools/search/utils.mjs.map +1 -0
- package/dist/esm/utils/graph.mjs +13 -0
- package/dist/esm/utils/graph.mjs.map +1 -0
- package/dist/esm/utils/llm.mjs +25 -0
- package/dist/esm/utils/llm.mjs.map +1 -0
- package/dist/esm/utils/misc.mjs +53 -0
- package/dist/esm/utils/misc.mjs.map +1 -0
- package/dist/esm/utils/run.mjs +66 -0
- package/dist/esm/utils/run.mjs.map +1 -0
- package/dist/esm/utils/title.mjs +108 -0
- package/dist/esm/utils/title.mjs.map +1 -0
- package/dist/esm/utils/tokens.mjs +62 -0
- package/dist/esm/utils/tokens.mjs.map +1 -0
- package/dist/types/common/enum.d.ts +128 -0
- package/dist/types/common/index.d.ts +1 -0
- package/dist/types/events.d.ts +29 -0
- package/dist/types/graphs/Graph.d.ts +122 -0
- package/dist/types/graphs/index.d.ts +1 -0
- package/dist/types/index.d.ts +13 -0
- package/dist/types/instrumentation.d.ts +1 -0
- package/dist/types/llm/anthropic/index.d.ts +39 -0
- package/dist/types/llm/anthropic/types.d.ts +37 -0
- package/dist/types/llm/anthropic/utils/message_inputs.d.ts +14 -0
- package/dist/types/llm/anthropic/utils/message_outputs.d.ts +14 -0
- package/dist/types/llm/anthropic/utils/output_parsers.d.ts +22 -0
- package/dist/types/llm/anthropic/utils/tools.d.ts +3 -0
- package/dist/types/llm/fake.d.ts +31 -0
- package/dist/types/llm/google/index.d.ts +14 -0
- package/dist/types/llm/google/types.d.ts +32 -0
- package/dist/types/llm/google/utils/common.d.ts +19 -0
- package/dist/types/llm/google/utils/tools.d.ts +10 -0
- package/dist/types/llm/google/utils/zod_to_genai_parameters.d.ts +14 -0
- package/dist/types/llm/ollama/index.d.ts +8 -0
- package/dist/types/llm/ollama/utils.d.ts +7 -0
- package/dist/types/llm/openai/index.d.ts +103 -0
- package/dist/types/llm/openai/types.d.ts +10 -0
- package/dist/types/llm/openai/utils/index.d.ts +20 -0
- package/dist/types/llm/openrouter/index.d.ts +12 -0
- package/dist/types/llm/providers.d.ts +5 -0
- package/dist/types/llm/text.d.ts +21 -0
- package/dist/types/llm/vertexai/index.d.ts +293 -0
- package/dist/types/messages/core.d.ts +14 -0
- package/dist/types/messages/format.d.ts +113 -0
- package/dist/types/messages/ids.d.ts +3 -0
- package/dist/types/messages/index.d.ts +4 -0
- package/dist/types/messages/prune.d.ts +51 -0
- package/dist/types/mockStream.d.ts +32 -0
- package/dist/types/prompts/collab.d.ts +1 -0
- package/dist/types/prompts/index.d.ts +2 -0
- package/dist/types/prompts/taskmanager.d.ts +41 -0
- package/dist/types/run.d.ts +30 -0
- package/dist/types/scripts/abort.d.ts +1 -0
- package/dist/types/scripts/ant_web_search.d.ts +1 -0
- package/dist/types/scripts/args.d.ts +7 -0
- package/dist/types/scripts/caching.d.ts +1 -0
- package/dist/types/scripts/cli.d.ts +1 -0
- package/dist/types/scripts/cli2.d.ts +1 -0
- package/dist/types/scripts/cli3.d.ts +1 -0
- package/dist/types/scripts/cli4.d.ts +1 -0
- package/dist/types/scripts/cli5.d.ts +1 -0
- package/dist/types/scripts/code_exec.d.ts +1 -0
- package/dist/types/scripts/code_exec_files.d.ts +1 -0
- package/dist/types/scripts/code_exec_simple.d.ts +1 -0
- package/dist/types/scripts/content.d.ts +1 -0
- package/dist/types/scripts/empty_input.d.ts +1 -0
- package/dist/types/scripts/image.d.ts +1 -0
- package/dist/types/scripts/memory.d.ts +1 -0
- package/dist/types/scripts/search.d.ts +1 -0
- package/dist/types/scripts/simple.d.ts +1 -0
- package/dist/types/scripts/stream.d.ts +1 -0
- package/dist/types/scripts/thinking.d.ts +1 -0
- package/dist/types/scripts/tools.d.ts +1 -0
- package/dist/types/specs/spec.utils.d.ts +1 -0
- package/dist/types/splitStream.d.ts +37 -0
- package/dist/types/stream.d.ts +14 -0
- package/dist/types/tools/CodeExecutor.d.ts +23 -0
- package/dist/types/tools/ToolNode.d.ts +22 -0
- package/dist/types/tools/example.d.ts +78 -0
- package/dist/types/tools/handlers.d.ts +19 -0
- package/dist/types/tools/search/anthropic.d.ts +16 -0
- package/dist/types/tools/search/content.d.ts +4 -0
- package/dist/types/tools/search/firecrawl.d.ts +54 -0
- package/dist/types/tools/search/format.d.ts +5 -0
- package/dist/types/tools/search/highlights.d.ts +13 -0
- package/dist/types/tools/search/index.d.ts +2 -0
- package/dist/types/tools/search/rerankers.d.ts +38 -0
- package/dist/types/tools/search/schema.d.ts +16 -0
- package/dist/types/tools/search/search.d.ts +8 -0
- package/dist/types/tools/search/serper-scraper.d.ts +59 -0
- package/dist/types/tools/search/test.d.ts +1 -0
- package/dist/types/tools/search/tool.d.ts +54 -0
- package/dist/types/tools/search/types.d.ts +591 -0
- package/dist/types/tools/search/utils.d.ts +10 -0
- package/dist/types/types/graph.d.ts +138 -0
- package/dist/types/types/index.d.ts +5 -0
- package/dist/types/types/llm.d.ts +102 -0
- package/dist/types/types/run.d.ts +74 -0
- package/dist/types/types/stream.d.ts +293 -0
- package/dist/types/types/tools.d.ts +61 -0
- package/dist/types/utils/graph.d.ts +2 -0
- package/dist/types/utils/index.d.ts +5 -0
- package/dist/types/utils/llm.d.ts +3 -0
- package/dist/types/utils/llmConfig.d.ts +3 -0
- package/dist/types/utils/logging.d.ts +1 -0
- package/dist/types/utils/misc.d.ts +7 -0
- package/dist/types/utils/run.d.ts +27 -0
- package/dist/types/utils/title.d.ts +4 -0
- package/dist/types/utils/tokens.d.ts +3 -0
- package/package.json +145 -0
- package/src/common/enum.ts +176 -0
- package/src/common/index.ts +2 -0
- package/src/events.ts +191 -0
- package/src/graphs/Graph.ts +846 -0
- package/src/graphs/index.ts +1 -0
- package/src/index.ts +24 -0
- package/src/instrumentation.ts +22 -0
- package/src/llm/anthropic/Jacob_Lee_Resume_2023.pdf +0 -0
- package/src/llm/anthropic/index.ts +413 -0
- package/src/llm/anthropic/llm.spec.ts +1442 -0
- package/src/llm/anthropic/types.ts +140 -0
- package/src/llm/anthropic/utils/message_inputs.ts +660 -0
- package/src/llm/anthropic/utils/message_outputs.ts +289 -0
- package/src/llm/anthropic/utils/output_parsers.ts +133 -0
- package/src/llm/anthropic/utils/tools.ts +29 -0
- package/src/llm/fake.ts +133 -0
- package/src/llm/google/index.ts +222 -0
- package/src/llm/google/types.ts +43 -0
- package/src/llm/google/utils/common.ts +660 -0
- package/src/llm/google/utils/tools.ts +160 -0
- package/src/llm/google/utils/zod_to_genai_parameters.ts +88 -0
- package/src/llm/ollama/index.ts +92 -0
- package/src/llm/ollama/utils.ts +193 -0
- package/src/llm/openai/index.ts +853 -0
- package/src/llm/openai/types.ts +24 -0
- package/src/llm/openai/utils/index.ts +918 -0
- package/src/llm/openai/utils/isReasoningModel.test.ts +90 -0
- package/src/llm/openrouter/index.ts +60 -0
- package/src/llm/providers.ts +57 -0
- package/src/llm/text.ts +94 -0
- package/src/llm/vertexai/index.ts +360 -0
- package/src/messages/core.ts +463 -0
- package/src/messages/format.ts +625 -0
- package/src/messages/formatAgentMessages.test.ts +917 -0
- package/src/messages/formatAgentMessages.tools.test.ts +400 -0
- package/src/messages/formatMessage.test.ts +693 -0
- package/src/messages/ids.ts +26 -0
- package/src/messages/index.ts +4 -0
- package/src/messages/prune.ts +567 -0
- package/src/messages/shiftIndexTokenCountMap.test.ts +81 -0
- package/src/mockStream.ts +99 -0
- package/src/prompts/collab.ts +6 -0
- package/src/prompts/index.ts +2 -0
- package/src/prompts/taskmanager.ts +61 -0
- package/src/proto/CollabGraph.ts +269 -0
- package/src/proto/TaskManager.ts +243 -0
- package/src/proto/collab.ts +200 -0
- package/src/proto/collab_design.ts +184 -0
- package/src/proto/collab_design_v2.ts +224 -0
- package/src/proto/collab_design_v3.ts +255 -0
- package/src/proto/collab_design_v4.ts +220 -0
- package/src/proto/collab_design_v5.ts +251 -0
- package/src/proto/collab_graph.ts +181 -0
- package/src/proto/collab_original.ts +123 -0
- package/src/proto/example.ts +93 -0
- package/src/proto/example_new.ts +68 -0
- package/src/proto/example_old.ts +201 -0
- package/src/proto/example_test.ts +152 -0
- package/src/proto/example_test_anthropic.ts +100 -0
- package/src/proto/log_stream.ts +202 -0
- package/src/proto/main_collab_community_event.ts +133 -0
- package/src/proto/main_collab_design_v2.ts +96 -0
- package/src/proto/main_collab_design_v4.ts +100 -0
- package/src/proto/main_collab_design_v5.ts +135 -0
- package/src/proto/main_collab_global_analysis.ts +122 -0
- package/src/proto/main_collab_hackathon_event.ts +153 -0
- package/src/proto/main_collab_space_mission.ts +153 -0
- package/src/proto/main_philosophy.ts +210 -0
- package/src/proto/original_script.ts +126 -0
- package/src/proto/standard.ts +100 -0
- package/src/proto/stream.ts +56 -0
- package/src/proto/tasks.ts +118 -0
- package/src/proto/tools/global_analysis_tools.ts +86 -0
- package/src/proto/tools/space_mission_tools.ts +60 -0
- package/src/proto/vertexai.ts +54 -0
- package/src/run.ts +381 -0
- package/src/scripts/abort.ts +138 -0
- package/src/scripts/ant_web_search.ts +158 -0
- package/src/scripts/args.ts +48 -0
- package/src/scripts/caching.ts +124 -0
- package/src/scripts/cli.ts +167 -0
- package/src/scripts/cli2.ts +125 -0
- package/src/scripts/cli3.ts +178 -0
- package/src/scripts/cli4.ts +184 -0
- package/src/scripts/cli5.ts +184 -0
- package/src/scripts/code_exec.ts +214 -0
- package/src/scripts/code_exec_files.ts +193 -0
- package/src/scripts/code_exec_simple.ts +129 -0
- package/src/scripts/content.ts +120 -0
- package/src/scripts/empty_input.ts +137 -0
- package/src/scripts/image.ts +178 -0
- package/src/scripts/memory.ts +97 -0
- package/src/scripts/search.ts +150 -0
- package/src/scripts/simple.ts +225 -0
- package/src/scripts/stream.ts +122 -0
- package/src/scripts/thinking.ts +150 -0
- package/src/scripts/tools.ts +155 -0
- package/src/specs/anthropic.simple.test.ts +317 -0
- package/src/specs/azure.simple.test.ts +316 -0
- package/src/specs/openai.simple.test.ts +316 -0
- package/src/specs/prune.test.ts +763 -0
- package/src/specs/reasoning.test.ts +165 -0
- package/src/specs/spec.utils.ts +3 -0
- package/src/specs/thinking-prune.test.ts +703 -0
- package/src/specs/token-distribution-edge-case.test.ts +316 -0
- package/src/specs/tool-error.test.ts +193 -0
- package/src/splitStream.test.ts +691 -0
- package/src/splitStream.ts +234 -0
- package/src/stream.test.ts +94 -0
- package/src/stream.ts +651 -0
- package/src/tools/CodeExecutor.ts +220 -0
- package/src/tools/ToolNode.ts +170 -0
- package/src/tools/example.ts +129 -0
- package/src/tools/handlers.ts +336 -0
- package/src/tools/search/anthropic.ts +51 -0
- package/src/tools/search/content.test.ts +173 -0
- package/src/tools/search/content.ts +147 -0
- package/src/tools/search/firecrawl.ts +210 -0
- package/src/tools/search/format.ts +250 -0
- package/src/tools/search/highlights.ts +320 -0
- package/src/tools/search/index.ts +2 -0
- package/src/tools/search/jina-reranker.test.ts +126 -0
- package/src/tools/search/output.md +2775 -0
- package/src/tools/search/rerankers.ts +242 -0
- package/src/tools/search/schema.ts +63 -0
- package/src/tools/search/search.ts +759 -0
- package/src/tools/search/serper-scraper.ts +155 -0
- package/src/tools/search/test.html +884 -0
- package/src/tools/search/test.md +643 -0
- package/src/tools/search/test.ts +159 -0
- package/src/tools/search/tool.ts +471 -0
- package/src/tools/search/types.ts +687 -0
- package/src/tools/search/utils.ts +79 -0
- package/src/types/graph.ts +185 -0
- package/src/types/index.ts +6 -0
- package/src/types/llm.ts +140 -0
- package/src/types/run.ts +89 -0
- package/src/types/stream.ts +400 -0
- package/src/types/tools.ts +80 -0
- package/src/utils/graph.ts +11 -0
- package/src/utils/index.ts +5 -0
- package/src/utils/llm.ts +27 -0
- package/src/utils/llmConfig.ts +183 -0
- package/src/utils/logging.ts +48 -0
- package/src/utils/misc.ts +57 -0
- package/src/utils/run.ts +101 -0
- package/src/utils/title.ts +165 -0
- package/src/utils/tokens.ts +70 -0
|
@@ -0,0 +1,759 @@
|
|
|
1
|
+
import axios from 'axios';
|
|
2
|
+
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
|
|
3
|
+
import type * as t from './types';
|
|
4
|
+
import { getAttribution, createDefaultLogger } from './utils';
|
|
5
|
+
import { BaseReranker } from './rerankers';
|
|
6
|
+
|
|
7
|
+
const chunker = {
|
|
8
|
+
cleanText: (text: string): string => {
|
|
9
|
+
if (!text) return '';
|
|
10
|
+
|
|
11
|
+
/** Normalized all line endings to '\n' */
|
|
12
|
+
const normalizedText = text.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
|
|
13
|
+
|
|
14
|
+
/** Handle multiple backslashes followed by newlines
|
|
15
|
+
* This replaces patterns like '\\\\\\n' with a single newline */
|
|
16
|
+
const fixedBackslashes = normalizedText.replace(/\\+\n/g, '\n');
|
|
17
|
+
|
|
18
|
+
/** Cleaned up consecutive newlines, tabs, and spaces around newlines */
|
|
19
|
+
const cleanedNewlines = fixedBackslashes.replace(/[\t ]*\n[\t \n]*/g, '\n');
|
|
20
|
+
|
|
21
|
+
/** Cleaned up excessive spaces and tabs */
|
|
22
|
+
const cleanedSpaces = cleanedNewlines.replace(/[ \t]+/g, ' ');
|
|
23
|
+
|
|
24
|
+
return cleanedSpaces.trim();
|
|
25
|
+
},
|
|
26
|
+
splitText: async (
|
|
27
|
+
text: string,
|
|
28
|
+
options?: {
|
|
29
|
+
chunkSize?: number;
|
|
30
|
+
chunkOverlap?: number;
|
|
31
|
+
separators?: string[];
|
|
32
|
+
}
|
|
33
|
+
): Promise<string[]> => {
|
|
34
|
+
const chunkSize = options?.chunkSize ?? 150;
|
|
35
|
+
const chunkOverlap = options?.chunkOverlap ?? 50;
|
|
36
|
+
const separators = options?.separators || ['\n\n', '\n'];
|
|
37
|
+
|
|
38
|
+
const splitter = new RecursiveCharacterTextSplitter({
|
|
39
|
+
separators,
|
|
40
|
+
chunkSize,
|
|
41
|
+
chunkOverlap,
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
return await splitter.splitText(text);
|
|
45
|
+
},
|
|
46
|
+
|
|
47
|
+
splitTexts: async (
|
|
48
|
+
texts: string[],
|
|
49
|
+
options?: {
|
|
50
|
+
chunkSize?: number;
|
|
51
|
+
chunkOverlap?: number;
|
|
52
|
+
separators?: string[];
|
|
53
|
+
},
|
|
54
|
+
logger?: t.Logger
|
|
55
|
+
): Promise<string[][]> => {
|
|
56
|
+
// Split multiple texts
|
|
57
|
+
const logger_ = logger || createDefaultLogger();
|
|
58
|
+
const promises = texts.map((text) =>
|
|
59
|
+
chunker.splitText(text, options).catch((error) => {
|
|
60
|
+
logger_.error('Error splitting text:', error);
|
|
61
|
+
return [text];
|
|
62
|
+
})
|
|
63
|
+
);
|
|
64
|
+
return Promise.all(promises);
|
|
65
|
+
},
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
function createSourceUpdateCallback(sourceMap: Map<string, t.ValidSource>) {
|
|
69
|
+
return (link: string, update?: Partial<t.ValidSource>): void => {
|
|
70
|
+
const source = sourceMap.get(link);
|
|
71
|
+
if (source) {
|
|
72
|
+
sourceMap.set(link, {
|
|
73
|
+
...source,
|
|
74
|
+
...update,
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
const getHighlights = async ({
|
|
81
|
+
query,
|
|
82
|
+
content,
|
|
83
|
+
reranker,
|
|
84
|
+
topResults = 5,
|
|
85
|
+
logger,
|
|
86
|
+
}: {
|
|
87
|
+
content: string;
|
|
88
|
+
query: string;
|
|
89
|
+
reranker?: BaseReranker;
|
|
90
|
+
topResults?: number;
|
|
91
|
+
logger?: t.Logger;
|
|
92
|
+
}): Promise<t.Highlight[] | undefined> => {
|
|
93
|
+
const logger_ = logger || createDefaultLogger();
|
|
94
|
+
|
|
95
|
+
if (!content) {
|
|
96
|
+
logger_.warn('No content provided for highlights');
|
|
97
|
+
return;
|
|
98
|
+
}
|
|
99
|
+
if (!reranker) {
|
|
100
|
+
logger_.warn('No reranker provided for highlights');
|
|
101
|
+
return;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
try {
|
|
105
|
+
const documents = await chunker.splitText(content);
|
|
106
|
+
if (Array.isArray(documents)) {
|
|
107
|
+
return await reranker.rerank(query, documents, topResults);
|
|
108
|
+
} else {
|
|
109
|
+
logger_.error(
|
|
110
|
+
'Expected documents to be an array, got:',
|
|
111
|
+
typeof documents
|
|
112
|
+
);
|
|
113
|
+
return;
|
|
114
|
+
}
|
|
115
|
+
} catch (error) {
|
|
116
|
+
logger_.error('Error in content processing:', error);
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
const createSerperAPI = (
|
|
122
|
+
apiKey?: string
|
|
123
|
+
): {
|
|
124
|
+
getSources: (params: t.GetSourcesParams) => Promise<t.SearchResult>;
|
|
125
|
+
} => {
|
|
126
|
+
const config = {
|
|
127
|
+
apiKey: apiKey ?? process.env.SERPER_API_KEY,
|
|
128
|
+
apiUrl: 'https://google.serper.dev/search',
|
|
129
|
+
timeout: 10000,
|
|
130
|
+
};
|
|
131
|
+
|
|
132
|
+
if (config.apiKey == null || config.apiKey === '') {
|
|
133
|
+
throw new Error('SERPER_API_KEY is required for SerperAPI');
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const getSources = async ({
|
|
137
|
+
query,
|
|
138
|
+
date,
|
|
139
|
+
country,
|
|
140
|
+
safeSearch,
|
|
141
|
+
numResults = 8,
|
|
142
|
+
type,
|
|
143
|
+
}: t.GetSourcesParams): Promise<t.SearchResult> => {
|
|
144
|
+
if (!query.trim()) {
|
|
145
|
+
return { success: false, error: 'Query cannot be empty' };
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
try {
|
|
149
|
+
const safe = ['off', 'moderate', 'active'] as const;
|
|
150
|
+
const payload: t.SerperSearchPayload = {
|
|
151
|
+
q: query,
|
|
152
|
+
safe: safe[safeSearch ?? 1],
|
|
153
|
+
num: Math.min(Math.max(1, numResults), 10),
|
|
154
|
+
};
|
|
155
|
+
|
|
156
|
+
// Set the search type if provided
|
|
157
|
+
if (type) {
|
|
158
|
+
payload.type = type;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
if (date != null) {
|
|
162
|
+
payload.tbs = `qdr:${date}`;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
if (country != null && country !== '') {
|
|
166
|
+
payload['gl'] = country.toLowerCase();
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// Determine the API endpoint based on the search type
|
|
170
|
+
let apiEndpoint = config.apiUrl;
|
|
171
|
+
if (type === 'images') {
|
|
172
|
+
apiEndpoint = 'https://google.serper.dev/images';
|
|
173
|
+
} else if (type === 'videos') {
|
|
174
|
+
apiEndpoint = 'https://google.serper.dev/videos';
|
|
175
|
+
} else if (type === 'news') {
|
|
176
|
+
apiEndpoint = 'https://google.serper.dev/news';
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
const response = await axios.post<t.SerperResultData>(
|
|
180
|
+
apiEndpoint,
|
|
181
|
+
payload,
|
|
182
|
+
{
|
|
183
|
+
headers: {
|
|
184
|
+
'X-API-KEY': config.apiKey,
|
|
185
|
+
'Content-Type': 'application/json',
|
|
186
|
+
},
|
|
187
|
+
timeout: config.timeout,
|
|
188
|
+
}
|
|
189
|
+
);
|
|
190
|
+
|
|
191
|
+
const data = response.data;
|
|
192
|
+
const results: t.SearchResultData = {
|
|
193
|
+
organic: data.organic,
|
|
194
|
+
images: data.images ?? [],
|
|
195
|
+
answerBox: data.answerBox,
|
|
196
|
+
topStories: data.topStories ?? [],
|
|
197
|
+
peopleAlsoAsk: data.peopleAlsoAsk,
|
|
198
|
+
knowledgeGraph: data.knowledgeGraph,
|
|
199
|
+
relatedSearches: data.relatedSearches,
|
|
200
|
+
videos: data.videos ?? [],
|
|
201
|
+
news: data.news ?? [],
|
|
202
|
+
};
|
|
203
|
+
|
|
204
|
+
return { success: true, data: results };
|
|
205
|
+
} catch (error) {
|
|
206
|
+
const errorMessage =
|
|
207
|
+
error instanceof Error ? error.message : String(error);
|
|
208
|
+
return { success: false, error: `API request failed: ${errorMessage}` };
|
|
209
|
+
}
|
|
210
|
+
};
|
|
211
|
+
|
|
212
|
+
return { getSources };
|
|
213
|
+
};
|
|
214
|
+
|
|
215
|
+
const createSearXNGAPI = (
|
|
216
|
+
instanceUrl?: string,
|
|
217
|
+
apiKey?: string
|
|
218
|
+
): {
|
|
219
|
+
getSources: (params: t.GetSourcesParams) => Promise<t.SearchResult>;
|
|
220
|
+
} => {
|
|
221
|
+
const config = {
|
|
222
|
+
instanceUrl: instanceUrl ?? process.env.SEARXNG_INSTANCE_URL,
|
|
223
|
+
apiKey: apiKey ?? process.env.SEARXNG_API_KEY,
|
|
224
|
+
defaultLocation: 'all',
|
|
225
|
+
timeout: 10000,
|
|
226
|
+
};
|
|
227
|
+
|
|
228
|
+
if (config.instanceUrl == null || config.instanceUrl === '') {
|
|
229
|
+
throw new Error('SEARXNG_INSTANCE_URL is required for SearXNG API');
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
const getSources = async ({
|
|
233
|
+
query,
|
|
234
|
+
numResults = 8,
|
|
235
|
+
safeSearch,
|
|
236
|
+
type,
|
|
237
|
+
}: t.GetSourcesParams): Promise<t.SearchResult> => {
|
|
238
|
+
if (!query.trim()) {
|
|
239
|
+
return { success: false, error: 'Query cannot be empty' };
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
try {
|
|
243
|
+
// Ensure the instance URL ends with /search
|
|
244
|
+
if (config.instanceUrl == null || config.instanceUrl === '') {
|
|
245
|
+
return { success: false, error: 'Instance URL is not defined' };
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
let searchUrl = config.instanceUrl;
|
|
249
|
+
if (!searchUrl.endsWith('/search')) {
|
|
250
|
+
searchUrl = searchUrl.replace(/\/$/, '') + '/search';
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
// Determine the search category based on the type
|
|
254
|
+
let category = 'general';
|
|
255
|
+
if (type === 'images') {
|
|
256
|
+
category = 'images';
|
|
257
|
+
} else if (type === 'videos') {
|
|
258
|
+
category = 'videos';
|
|
259
|
+
} else if (type === 'news') {
|
|
260
|
+
category = 'news';
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
// Prepare parameters for SearXNG
|
|
264
|
+
const params: t.SearxNGSearchPayload = {
|
|
265
|
+
q: query,
|
|
266
|
+
format: 'json',
|
|
267
|
+
pageno: 1,
|
|
268
|
+
categories: category,
|
|
269
|
+
language: 'all',
|
|
270
|
+
safesearch: safeSearch,
|
|
271
|
+
engines: 'google,bing,duckduckgo',
|
|
272
|
+
};
|
|
273
|
+
|
|
274
|
+
const headers: Record<string, string> = {
|
|
275
|
+
'Content-Type': 'application/json',
|
|
276
|
+
};
|
|
277
|
+
|
|
278
|
+
if (config.apiKey != null && config.apiKey !== '') {
|
|
279
|
+
headers['X-API-Key'] = config.apiKey;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
const response = await axios.get(searchUrl, {
|
|
283
|
+
headers,
|
|
284
|
+
params,
|
|
285
|
+
timeout: config.timeout,
|
|
286
|
+
});
|
|
287
|
+
|
|
288
|
+
const data = response.data;
|
|
289
|
+
|
|
290
|
+
// Helper function to identify news results since SearXNG doesn't provide that classification by default
|
|
291
|
+
const isNewsResult = (result: t.SearXNGResult): boolean => {
|
|
292
|
+
const url = result.url?.toLowerCase() ?? '';
|
|
293
|
+
const title = result.title?.toLowerCase() ?? '';
|
|
294
|
+
|
|
295
|
+
// News-related keywords in title/content
|
|
296
|
+
const newsKeywords = [
|
|
297
|
+
'breaking news',
|
|
298
|
+
'latest news',
|
|
299
|
+
'top stories',
|
|
300
|
+
'news today',
|
|
301
|
+
'developing story',
|
|
302
|
+
'trending news',
|
|
303
|
+
'news',
|
|
304
|
+
];
|
|
305
|
+
|
|
306
|
+
// Check if title/content contains news keywords
|
|
307
|
+
const hasNewsKeywords = newsKeywords.some(
|
|
308
|
+
(keyword) => title.toLowerCase().includes(keyword) // just title probably fine, content parsing is overkill for what we need: || content.includes(keyword)
|
|
309
|
+
);
|
|
310
|
+
|
|
311
|
+
// Check if URL contains news-related paths
|
|
312
|
+
const hasNewsPath =
|
|
313
|
+
url.includes('/news/') ||
|
|
314
|
+
url.includes('/world/') ||
|
|
315
|
+
url.includes('/politics/') ||
|
|
316
|
+
url.includes('/breaking/');
|
|
317
|
+
|
|
318
|
+
return hasNewsKeywords || hasNewsPath;
|
|
319
|
+
};
|
|
320
|
+
|
|
321
|
+
// Transform SearXNG results to match SerperAPI format
|
|
322
|
+
const organicResults = (data.results ?? [])
|
|
323
|
+
.slice(0, numResults)
|
|
324
|
+
.map((result: t.SearXNGResult, index: number) => {
|
|
325
|
+
let attribution = '';
|
|
326
|
+
try {
|
|
327
|
+
attribution = new URL(result.url ?? '').hostname;
|
|
328
|
+
} catch {
|
|
329
|
+
attribution = '';
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
return {
|
|
333
|
+
position: index + 1,
|
|
334
|
+
title: result.title ?? '',
|
|
335
|
+
link: result.url ?? '',
|
|
336
|
+
snippet: result.content ?? '',
|
|
337
|
+
date: result.publishedDate ?? '',
|
|
338
|
+
attribution,
|
|
339
|
+
};
|
|
340
|
+
});
|
|
341
|
+
|
|
342
|
+
const imageResults = (data.results ?? [])
|
|
343
|
+
.filter((result: t.SearXNGResult) => result.img_src)
|
|
344
|
+
.slice(0, 6)
|
|
345
|
+
.map((result: t.SearXNGResult, index: number) => ({
|
|
346
|
+
title: result.title ?? '',
|
|
347
|
+
imageUrl: result.img_src ?? '',
|
|
348
|
+
position: index + 1,
|
|
349
|
+
source: new URL(result.url ?? '').hostname,
|
|
350
|
+
domain: new URL(result.url ?? '').hostname,
|
|
351
|
+
link: result.url ?? '',
|
|
352
|
+
}));
|
|
353
|
+
|
|
354
|
+
// Extract news results from organic results
|
|
355
|
+
const newsResults = (data.results ?? [])
|
|
356
|
+
.filter(isNewsResult)
|
|
357
|
+
.map((result: t.SearXNGResult, index: number) => {
|
|
358
|
+
let attribution = '';
|
|
359
|
+
try {
|
|
360
|
+
attribution = new URL(result.url ?? '').hostname;
|
|
361
|
+
} catch {
|
|
362
|
+
attribution = '';
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
return {
|
|
366
|
+
title: result.title ?? '',
|
|
367
|
+
link: result.url ?? '',
|
|
368
|
+
snippet: result.content ?? '',
|
|
369
|
+
date: result.publishedDate ?? '',
|
|
370
|
+
source: attribution,
|
|
371
|
+
imageUrl: result.img_src ?? '',
|
|
372
|
+
position: index + 1,
|
|
373
|
+
};
|
|
374
|
+
});
|
|
375
|
+
|
|
376
|
+
const topStories = newsResults.slice(0, 5);
|
|
377
|
+
|
|
378
|
+
const relatedSearches = Array.isArray(data.suggestions)
|
|
379
|
+
? data.suggestions.map((suggestion: string) => ({ query: suggestion }))
|
|
380
|
+
: [];
|
|
381
|
+
|
|
382
|
+
const results: t.SearchResultData = {
|
|
383
|
+
organic: organicResults,
|
|
384
|
+
images: imageResults,
|
|
385
|
+
topStories: topStories, // Use first 5 extracted news as top stories
|
|
386
|
+
relatedSearches,
|
|
387
|
+
videos: [],
|
|
388
|
+
news: newsResults,
|
|
389
|
+
// Add empty arrays for other Serper fields to maintain parity
|
|
390
|
+
places: [],
|
|
391
|
+
shopping: [],
|
|
392
|
+
peopleAlsoAsk: [],
|
|
393
|
+
knowledgeGraph: undefined,
|
|
394
|
+
answerBox: undefined,
|
|
395
|
+
};
|
|
396
|
+
|
|
397
|
+
return { success: true, data: results };
|
|
398
|
+
} catch (error) {
|
|
399
|
+
const errorMessage =
|
|
400
|
+
error instanceof Error ? error.message : String(error);
|
|
401
|
+
return {
|
|
402
|
+
success: false,
|
|
403
|
+
error: `SearXNG API request failed: ${errorMessage}`,
|
|
404
|
+
};
|
|
405
|
+
}
|
|
406
|
+
};
|
|
407
|
+
|
|
408
|
+
return { getSources };
|
|
409
|
+
};
|
|
410
|
+
|
|
411
|
+
export const createSearchAPI = (
|
|
412
|
+
config: t.SearchConfig
|
|
413
|
+
): {
|
|
414
|
+
getSources: (params: t.GetSourcesParams) => Promise<t.SearchResult>;
|
|
415
|
+
} => {
|
|
416
|
+
const {
|
|
417
|
+
searchProvider = 'serper',
|
|
418
|
+
serperApiKey,
|
|
419
|
+
searxngInstanceUrl,
|
|
420
|
+
searxngApiKey,
|
|
421
|
+
} = config;
|
|
422
|
+
|
|
423
|
+
if (searchProvider.toLowerCase() === 'serper') {
|
|
424
|
+
return createSerperAPI(serperApiKey);
|
|
425
|
+
} else if (searchProvider.toLowerCase() === 'searxng') {
|
|
426
|
+
return createSearXNGAPI(searxngInstanceUrl, searxngApiKey);
|
|
427
|
+
} else {
|
|
428
|
+
throw new Error(
|
|
429
|
+
`Invalid search provider: ${searchProvider}. Must be 'serper' or 'searxng'`
|
|
430
|
+
);
|
|
431
|
+
}
|
|
432
|
+
};
|
|
433
|
+
|
|
434
|
+
export const createSourceProcessor = (
|
|
435
|
+
config: t.ProcessSourcesConfig = {},
|
|
436
|
+
scraperInstance?: t.BaseScraper
|
|
437
|
+
): {
|
|
438
|
+
processSources: (
|
|
439
|
+
fields: t.ProcessSourcesFields
|
|
440
|
+
) => Promise<t.SearchResultData>;
|
|
441
|
+
topResults: number;
|
|
442
|
+
} => {
|
|
443
|
+
if (!scraperInstance) {
|
|
444
|
+
throw new Error('Scraper instance is required');
|
|
445
|
+
}
|
|
446
|
+
const {
|
|
447
|
+
topResults = 5,
|
|
448
|
+
// strategies = ['no_extraction'],
|
|
449
|
+
// filterContent = true,
|
|
450
|
+
reranker,
|
|
451
|
+
logger,
|
|
452
|
+
} = config;
|
|
453
|
+
|
|
454
|
+
const logger_ = logger || createDefaultLogger();
|
|
455
|
+
const scraper = scraperInstance;
|
|
456
|
+
|
|
457
|
+
const webScraper = {
|
|
458
|
+
scrapeMany: async ({
|
|
459
|
+
query,
|
|
460
|
+
links,
|
|
461
|
+
onGetHighlights,
|
|
462
|
+
}: {
|
|
463
|
+
query: string;
|
|
464
|
+
links: string[];
|
|
465
|
+
onGetHighlights: t.SearchToolConfig['onGetHighlights'];
|
|
466
|
+
}): Promise<Array<t.ScrapeResult>> => {
|
|
467
|
+
logger_.debug(`Scraping ${links.length} links`);
|
|
468
|
+
const promises: Array<Promise<t.ScrapeResult>> = [];
|
|
469
|
+
try {
|
|
470
|
+
for (let i = 0; i < links.length; i++) {
|
|
471
|
+
const currentLink = links[i];
|
|
472
|
+
const promise: Promise<t.ScrapeResult> = scraper
|
|
473
|
+
.scrapeUrl(currentLink, {})
|
|
474
|
+
.then(([url, response]) => {
|
|
475
|
+
const attribution = getAttribution(
|
|
476
|
+
url,
|
|
477
|
+
response.data?.metadata,
|
|
478
|
+
logger_
|
|
479
|
+
);
|
|
480
|
+
if (response.success && response.data) {
|
|
481
|
+
const [content, references] = scraper.extractContent(response);
|
|
482
|
+
return {
|
|
483
|
+
url,
|
|
484
|
+
references,
|
|
485
|
+
attribution,
|
|
486
|
+
content: chunker.cleanText(content),
|
|
487
|
+
} as t.ScrapeResult;
|
|
488
|
+
} else {
|
|
489
|
+
logger_.error(
|
|
490
|
+
`Error scraping ${url}: ${response.error ?? 'Unknown error'}`
|
|
491
|
+
);
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
return {
|
|
495
|
+
url,
|
|
496
|
+
attribution,
|
|
497
|
+
error: true,
|
|
498
|
+
content: '',
|
|
499
|
+
} as t.ScrapeResult;
|
|
500
|
+
})
|
|
501
|
+
.then(async (result) => {
|
|
502
|
+
try {
|
|
503
|
+
if (result.error != null) {
|
|
504
|
+
logger_.error(
|
|
505
|
+
`Error scraping ${result.url}: ${result.content}`
|
|
506
|
+
);
|
|
507
|
+
return {
|
|
508
|
+
...result,
|
|
509
|
+
};
|
|
510
|
+
}
|
|
511
|
+
const highlights = await getHighlights({
|
|
512
|
+
query,
|
|
513
|
+
reranker,
|
|
514
|
+
content: result.content,
|
|
515
|
+
logger: logger_,
|
|
516
|
+
});
|
|
517
|
+
if (onGetHighlights) {
|
|
518
|
+
onGetHighlights(result.url);
|
|
519
|
+
}
|
|
520
|
+
return {
|
|
521
|
+
...result,
|
|
522
|
+
highlights,
|
|
523
|
+
};
|
|
524
|
+
} catch (error) {
|
|
525
|
+
logger_.error('Error processing scraped content:', error);
|
|
526
|
+
return {
|
|
527
|
+
...result,
|
|
528
|
+
};
|
|
529
|
+
}
|
|
530
|
+
})
|
|
531
|
+
.catch((error) => {
|
|
532
|
+
logger_.error(`Error scraping ${currentLink}:`, error);
|
|
533
|
+
return {
|
|
534
|
+
url: currentLink,
|
|
535
|
+
error: true,
|
|
536
|
+
content: '',
|
|
537
|
+
};
|
|
538
|
+
});
|
|
539
|
+
promises.push(promise);
|
|
540
|
+
}
|
|
541
|
+
return await Promise.all(promises);
|
|
542
|
+
} catch (error) {
|
|
543
|
+
logger_.error('Error in scrapeMany:', error);
|
|
544
|
+
return [];
|
|
545
|
+
}
|
|
546
|
+
},
|
|
547
|
+
};
|
|
548
|
+
|
|
549
|
+
const fetchContents = async ({
|
|
550
|
+
links,
|
|
551
|
+
query,
|
|
552
|
+
target,
|
|
553
|
+
onGetHighlights,
|
|
554
|
+
onContentScraped,
|
|
555
|
+
}: {
|
|
556
|
+
links: string[];
|
|
557
|
+
query: string;
|
|
558
|
+
target: number;
|
|
559
|
+
onGetHighlights: t.SearchToolConfig['onGetHighlights'];
|
|
560
|
+
onContentScraped?: (link: string, update?: Partial<t.ValidSource>) => void;
|
|
561
|
+
}): Promise<void> => {
|
|
562
|
+
const initialLinks = links.slice(0, target);
|
|
563
|
+
// const remainingLinks = links.slice(target).reverse();
|
|
564
|
+
const results = await webScraper.scrapeMany({
|
|
565
|
+
query,
|
|
566
|
+
links: initialLinks,
|
|
567
|
+
onGetHighlights,
|
|
568
|
+
});
|
|
569
|
+
for (const result of results) {
|
|
570
|
+
if (result.error === true) {
|
|
571
|
+
continue;
|
|
572
|
+
}
|
|
573
|
+
const { url, content, attribution, references, highlights } = result;
|
|
574
|
+
onContentScraped?.(url, {
|
|
575
|
+
content,
|
|
576
|
+
attribution,
|
|
577
|
+
references,
|
|
578
|
+
highlights,
|
|
579
|
+
});
|
|
580
|
+
}
|
|
581
|
+
};
|
|
582
|
+
|
|
583
|
+
const processSources = async ({
|
|
584
|
+
result,
|
|
585
|
+
numElements,
|
|
586
|
+
query,
|
|
587
|
+
news,
|
|
588
|
+
proMode = true,
|
|
589
|
+
onGetHighlights,
|
|
590
|
+
}: t.ProcessSourcesFields): Promise<t.SearchResultData> => {
|
|
591
|
+
try {
|
|
592
|
+
if (!result.data) {
|
|
593
|
+
return {
|
|
594
|
+
organic: [],
|
|
595
|
+
topStories: [],
|
|
596
|
+
images: [],
|
|
597
|
+
relatedSearches: [],
|
|
598
|
+
};
|
|
599
|
+
} else if (!result.data.organic) {
|
|
600
|
+
return result.data;
|
|
601
|
+
}
|
|
602
|
+
|
|
603
|
+
if (!proMode) {
|
|
604
|
+
const wikiSources = result.data.organic.filter((source) =>
|
|
605
|
+
source.link.includes('wikipedia.org')
|
|
606
|
+
);
|
|
607
|
+
|
|
608
|
+
if (!wikiSources.length) {
|
|
609
|
+
return result.data;
|
|
610
|
+
}
|
|
611
|
+
|
|
612
|
+
const wikiSourceMap = new Map<string, t.ValidSource>();
|
|
613
|
+
wikiSourceMap.set(wikiSources[0].link, wikiSources[0]);
|
|
614
|
+
const onContentScraped = createSourceUpdateCallback(wikiSourceMap);
|
|
615
|
+
await fetchContents({
|
|
616
|
+
query,
|
|
617
|
+
target: 1,
|
|
618
|
+
onGetHighlights,
|
|
619
|
+
onContentScraped,
|
|
620
|
+
links: [wikiSources[0].link],
|
|
621
|
+
});
|
|
622
|
+
|
|
623
|
+
for (let i = 0; i < result.data.organic.length; i++) {
|
|
624
|
+
const source = result.data.organic[i];
|
|
625
|
+
const updatedSource = wikiSourceMap.get(source.link);
|
|
626
|
+
if (updatedSource) {
|
|
627
|
+
result.data.organic[i] = {
|
|
628
|
+
...source,
|
|
629
|
+
...updatedSource,
|
|
630
|
+
};
|
|
631
|
+
}
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
return result.data;
|
|
635
|
+
}
|
|
636
|
+
|
|
637
|
+
const sourceMap = new Map<string, t.ValidSource>();
|
|
638
|
+
const organicLinksSet = new Set<string>();
|
|
639
|
+
|
|
640
|
+
// Collect organic links
|
|
641
|
+
const organicLinks = collectLinks(
|
|
642
|
+
result.data.organic,
|
|
643
|
+
sourceMap,
|
|
644
|
+
organicLinksSet
|
|
645
|
+
);
|
|
646
|
+
|
|
647
|
+
// Collect top story links, excluding any that are already in organic links
|
|
648
|
+
const topStories = result.data.topStories ?? [];
|
|
649
|
+
const topStoryLinks = collectLinks(
|
|
650
|
+
topStories,
|
|
651
|
+
sourceMap,
|
|
652
|
+
organicLinksSet
|
|
653
|
+
);
|
|
654
|
+
|
|
655
|
+
if (organicLinks.length === 0 && (topStoryLinks.length === 0 || !news)) {
|
|
656
|
+
return result.data;
|
|
657
|
+
}
|
|
658
|
+
|
|
659
|
+
const onContentScraped = createSourceUpdateCallback(sourceMap);
|
|
660
|
+
const promises: Promise<void>[] = [];
|
|
661
|
+
|
|
662
|
+
// Process organic links
|
|
663
|
+
if (organicLinks.length > 0) {
|
|
664
|
+
promises.push(
|
|
665
|
+
fetchContents({
|
|
666
|
+
query,
|
|
667
|
+
onGetHighlights,
|
|
668
|
+
onContentScraped,
|
|
669
|
+
links: organicLinks,
|
|
670
|
+
target: numElements,
|
|
671
|
+
})
|
|
672
|
+
);
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
// Process top story links
|
|
676
|
+
if (news && topStoryLinks.length > 0) {
|
|
677
|
+
promises.push(
|
|
678
|
+
fetchContents({
|
|
679
|
+
query,
|
|
680
|
+
onGetHighlights,
|
|
681
|
+
onContentScraped,
|
|
682
|
+
links: topStoryLinks,
|
|
683
|
+
target: numElements,
|
|
684
|
+
})
|
|
685
|
+
);
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
await Promise.all(promises);
|
|
689
|
+
|
|
690
|
+
if (result.data.organic.length > 0) {
|
|
691
|
+
updateSourcesWithContent(result.data.organic, sourceMap);
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
if (news && topStories.length > 0) {
|
|
695
|
+
updateSourcesWithContent(topStories, sourceMap);
|
|
696
|
+
}
|
|
697
|
+
|
|
698
|
+
return result.data;
|
|
699
|
+
} catch (error) {
|
|
700
|
+
logger_.error('Error in processSources:', error);
|
|
701
|
+
return {
|
|
702
|
+
organic: [],
|
|
703
|
+
topStories: [],
|
|
704
|
+
images: [],
|
|
705
|
+
relatedSearches: [],
|
|
706
|
+
...result.data,
|
|
707
|
+
error: error instanceof Error ? error.message : String(error),
|
|
708
|
+
};
|
|
709
|
+
}
|
|
710
|
+
};
|
|
711
|
+
|
|
712
|
+
return {
|
|
713
|
+
processSources,
|
|
714
|
+
topResults,
|
|
715
|
+
};
|
|
716
|
+
};
|
|
717
|
+
|
|
718
|
+
/** Helper function to collect links and update sourceMap */
|
|
719
|
+
function collectLinks(
|
|
720
|
+
sources: Array<t.OrganicResult | t.TopStoryResult>,
|
|
721
|
+
sourceMap: Map<string, t.ValidSource>,
|
|
722
|
+
existingLinksSet?: Set<string>
|
|
723
|
+
): string[] {
|
|
724
|
+
const links: string[] = [];
|
|
725
|
+
|
|
726
|
+
for (const source of sources) {
|
|
727
|
+
if (source.link) {
|
|
728
|
+
// For topStories, only add if not already in organic links
|
|
729
|
+
if (existingLinksSet && existingLinksSet.has(source.link)) {
|
|
730
|
+
continue;
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
links.push(source.link);
|
|
734
|
+
if (existingLinksSet) {
|
|
735
|
+
existingLinksSet.add(source.link);
|
|
736
|
+
}
|
|
737
|
+
sourceMap.set(source.link, source as t.ValidSource);
|
|
738
|
+
}
|
|
739
|
+
}
|
|
740
|
+
|
|
741
|
+
return links;
|
|
742
|
+
}
|
|
743
|
+
|
|
744
|
+
/** Helper function to update sources with scraped content */
|
|
745
|
+
function updateSourcesWithContent<T extends t.ValidSource>(
|
|
746
|
+
sources: T[],
|
|
747
|
+
sourceMap: Map<string, t.ValidSource>
|
|
748
|
+
): void {
|
|
749
|
+
for (let i = 0; i < sources.length; i++) {
|
|
750
|
+
const source = sources[i];
|
|
751
|
+
const updatedSource = sourceMap.get(source.link);
|
|
752
|
+
if (updatedSource) {
|
|
753
|
+
sources[i] = {
|
|
754
|
+
...source,
|
|
755
|
+
...updatedSource,
|
|
756
|
+
} as T;
|
|
757
|
+
}
|
|
758
|
+
}
|
|
759
|
+
}
|