ag2 0.9.6__py3-none-any.whl → 0.9.8.post1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ag2 might be problematic. Click here for more details.
- {ag2-0.9.6.dist-info → ag2-0.9.8.post1.dist-info}/METADATA +102 -75
- ag2-0.9.8.post1.dist-info/RECORD +387 -0
- autogen/__init__.py +1 -2
- autogen/_website/generate_api_references.py +4 -5
- autogen/_website/generate_mkdocs.py +9 -15
- autogen/_website/notebook_processor.py +13 -14
- autogen/_website/process_notebooks.py +10 -10
- autogen/_website/utils.py +5 -4
- autogen/agentchat/agent.py +13 -13
- autogen/agentchat/assistant_agent.py +7 -6
- autogen/agentchat/contrib/agent_eval/agent_eval.py +3 -3
- autogen/agentchat/contrib/agent_eval/critic_agent.py +3 -3
- autogen/agentchat/contrib/agent_eval/quantifier_agent.py +3 -3
- autogen/agentchat/contrib/agent_eval/subcritic_agent.py +3 -3
- autogen/agentchat/contrib/agent_optimizer.py +3 -3
- autogen/agentchat/contrib/capabilities/generate_images.py +11 -11
- autogen/agentchat/contrib/capabilities/teachability.py +15 -15
- autogen/agentchat/contrib/capabilities/transforms.py +17 -18
- autogen/agentchat/contrib/capabilities/transforms_util.py +5 -5
- autogen/agentchat/contrib/capabilities/vision_capability.py +4 -3
- autogen/agentchat/contrib/captainagent/agent_builder.py +30 -30
- autogen/agentchat/contrib/captainagent/captainagent.py +22 -21
- autogen/agentchat/contrib/captainagent/tool_retriever.py +2 -3
- autogen/agentchat/contrib/gpt_assistant_agent.py +9 -9
- autogen/agentchat/contrib/graph_rag/document.py +3 -3
- autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py +3 -3
- autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py +6 -6
- autogen/agentchat/contrib/graph_rag/graph_query_engine.py +3 -3
- autogen/agentchat/contrib/graph_rag/neo4j_graph_query_engine.py +5 -11
- autogen/agentchat/contrib/graph_rag/neo4j_graph_rag_capability.py +6 -6
- autogen/agentchat/contrib/graph_rag/neo4j_native_graph_query_engine.py +7 -7
- autogen/agentchat/contrib/graph_rag/neo4j_native_graph_rag_capability.py +6 -6
- autogen/agentchat/contrib/img_utils.py +1 -1
- autogen/agentchat/contrib/llamaindex_conversable_agent.py +11 -11
- autogen/agentchat/contrib/llava_agent.py +18 -4
- autogen/agentchat/contrib/math_user_proxy_agent.py +11 -11
- autogen/agentchat/contrib/multimodal_conversable_agent.py +8 -8
- autogen/agentchat/contrib/qdrant_retrieve_user_proxy_agent.py +6 -5
- autogen/agentchat/contrib/rag/chromadb_query_engine.py +22 -26
- autogen/agentchat/contrib/rag/llamaindex_query_engine.py +14 -17
- autogen/agentchat/contrib/rag/mongodb_query_engine.py +27 -37
- autogen/agentchat/contrib/rag/query_engine.py +7 -5
- autogen/agentchat/contrib/retrieve_assistant_agent.py +5 -5
- autogen/agentchat/contrib/retrieve_user_proxy_agent.py +8 -7
- autogen/agentchat/contrib/society_of_mind_agent.py +15 -14
- autogen/agentchat/contrib/swarm_agent.py +76 -98
- autogen/agentchat/contrib/text_analyzer_agent.py +7 -7
- autogen/agentchat/contrib/vectordb/base.py +10 -18
- autogen/agentchat/contrib/vectordb/chromadb.py +2 -1
- autogen/agentchat/contrib/vectordb/couchbase.py +18 -20
- autogen/agentchat/contrib/vectordb/mongodb.py +6 -5
- autogen/agentchat/contrib/vectordb/pgvectordb.py +40 -41
- autogen/agentchat/contrib/vectordb/qdrant.py +5 -5
- autogen/agentchat/contrib/web_surfer.py +20 -19
- autogen/agentchat/conversable_agent.py +311 -295
- autogen/agentchat/group/context_str.py +1 -3
- autogen/agentchat/group/context_variables.py +15 -25
- autogen/agentchat/group/group_tool_executor.py +10 -10
- autogen/agentchat/group/group_utils.py +15 -15
- autogen/agentchat/group/guardrails.py +7 -7
- autogen/agentchat/group/handoffs.py +19 -36
- autogen/agentchat/group/multi_agent_chat.py +7 -7
- autogen/agentchat/group/on_condition.py +4 -7
- autogen/agentchat/group/on_context_condition.py +4 -7
- autogen/agentchat/group/patterns/auto.py +8 -7
- autogen/agentchat/group/patterns/manual.py +7 -6
- autogen/agentchat/group/patterns/pattern.py +13 -12
- autogen/agentchat/group/patterns/random.py +3 -3
- autogen/agentchat/group/patterns/round_robin.py +3 -3
- autogen/agentchat/group/reply_result.py +2 -4
- autogen/agentchat/group/speaker_selection_result.py +5 -5
- autogen/agentchat/group/targets/group_chat_target.py +7 -6
- autogen/agentchat/group/targets/group_manager_target.py +4 -4
- autogen/agentchat/group/targets/transition_target.py +2 -1
- autogen/agentchat/groupchat.py +58 -61
- autogen/agentchat/realtime/experimental/audio_adapters/twilio_audio_adapter.py +4 -4
- autogen/agentchat/realtime/experimental/audio_adapters/websocket_audio_adapter.py +4 -4
- autogen/agentchat/realtime/experimental/clients/gemini/client.py +7 -7
- autogen/agentchat/realtime/experimental/clients/oai/base_client.py +8 -8
- autogen/agentchat/realtime/experimental/clients/oai/rtc_client.py +6 -6
- autogen/agentchat/realtime/experimental/clients/realtime_client.py +10 -9
- autogen/agentchat/realtime/experimental/realtime_agent.py +10 -9
- autogen/agentchat/realtime/experimental/realtime_observer.py +3 -3
- autogen/agentchat/realtime/experimental/realtime_swarm.py +44 -44
- autogen/agentchat/user_proxy_agent.py +10 -9
- autogen/agentchat/utils.py +3 -3
- autogen/agents/contrib/time/time_reply_agent.py +6 -5
- autogen/agents/contrib/time/time_tool_agent.py +2 -1
- autogen/agents/experimental/deep_research/deep_research.py +3 -3
- autogen/agents/experimental/discord/discord.py +2 -2
- autogen/agents/experimental/document_agent/chroma_query_engine.py +29 -44
- autogen/agents/experimental/document_agent/docling_doc_ingest_agent.py +9 -14
- autogen/agents/experimental/document_agent/document_agent.py +15 -16
- autogen/agents/experimental/document_agent/document_conditions.py +3 -3
- autogen/agents/experimental/document_agent/document_utils.py +5 -9
- autogen/agents/experimental/document_agent/inmemory_query_engine.py +14 -20
- autogen/agents/experimental/document_agent/parser_utils.py +4 -4
- autogen/agents/experimental/document_agent/url_utils.py +14 -23
- autogen/agents/experimental/reasoning/reasoning_agent.py +33 -33
- autogen/agents/experimental/slack/slack.py +2 -2
- autogen/agents/experimental/telegram/telegram.py +2 -3
- autogen/agents/experimental/websurfer/websurfer.py +4 -4
- autogen/agents/experimental/wikipedia/wikipedia.py +5 -7
- autogen/browser_utils.py +8 -8
- autogen/cache/abstract_cache_base.py +5 -5
- autogen/cache/cache.py +12 -12
- autogen/cache/cache_factory.py +4 -4
- autogen/cache/cosmos_db_cache.py +9 -9
- autogen/cache/disk_cache.py +6 -6
- autogen/cache/in_memory_cache.py +4 -4
- autogen/cache/redis_cache.py +4 -4
- autogen/code_utils.py +18 -18
- autogen/coding/base.py +6 -6
- autogen/coding/docker_commandline_code_executor.py +9 -9
- autogen/coding/func_with_reqs.py +7 -6
- autogen/coding/jupyter/base.py +3 -3
- autogen/coding/jupyter/docker_jupyter_server.py +3 -4
- autogen/coding/jupyter/import_utils.py +3 -3
- autogen/coding/jupyter/jupyter_client.py +5 -5
- autogen/coding/jupyter/jupyter_code_executor.py +3 -4
- autogen/coding/jupyter/local_jupyter_server.py +2 -6
- autogen/coding/local_commandline_code_executor.py +8 -7
- autogen/coding/markdown_code_extractor.py +1 -2
- autogen/coding/utils.py +1 -2
- autogen/doc_utils.py +3 -2
- autogen/environments/docker_python_environment.py +19 -29
- autogen/environments/python_environment.py +8 -17
- autogen/environments/system_python_environment.py +3 -4
- autogen/environments/venv_python_environment.py +8 -12
- autogen/environments/working_directory.py +1 -2
- autogen/events/agent_events.py +106 -109
- autogen/events/base_event.py +6 -5
- autogen/events/client_events.py +15 -14
- autogen/events/helpers.py +1 -1
- autogen/events/print_event.py +4 -5
- autogen/fast_depends/_compat.py +10 -15
- autogen/fast_depends/core/build.py +17 -36
- autogen/fast_depends/core/model.py +64 -113
- autogen/fast_depends/dependencies/model.py +2 -1
- autogen/fast_depends/dependencies/provider.py +3 -2
- autogen/fast_depends/library/model.py +4 -4
- autogen/fast_depends/schema.py +7 -7
- autogen/fast_depends/use.py +17 -25
- autogen/fast_depends/utils.py +10 -30
- autogen/formatting_utils.py +6 -6
- autogen/graph_utils.py +1 -4
- autogen/import_utils.py +13 -13
- autogen/interop/crewai/crewai.py +2 -2
- autogen/interop/interoperable.py +2 -2
- autogen/interop/langchain/langchain_chat_model_factory.py +3 -2
- autogen/interop/langchain/langchain_tool.py +2 -6
- autogen/interop/litellm/litellm_config_factory.py +6 -7
- autogen/interop/pydantic_ai/pydantic_ai.py +4 -7
- autogen/interop/registry.py +2 -1
- autogen/io/base.py +5 -5
- autogen/io/run_response.py +33 -32
- autogen/io/websockets.py +6 -5
- autogen/json_utils.py +1 -2
- autogen/llm_config/__init__.py +11 -0
- autogen/llm_config/client.py +58 -0
- autogen/llm_config/config.py +384 -0
- autogen/llm_config/entry.py +154 -0
- autogen/logger/base_logger.py +4 -3
- autogen/logger/file_logger.py +2 -1
- autogen/logger/logger_factory.py +2 -2
- autogen/logger/logger_utils.py +2 -2
- autogen/logger/sqlite_logger.py +3 -2
- autogen/math_utils.py +4 -5
- autogen/mcp/__main__.py +6 -6
- autogen/mcp/helpers.py +4 -4
- autogen/mcp/mcp_client.py +170 -29
- autogen/mcp/mcp_proxy/fastapi_code_generator_helpers.py +3 -4
- autogen/mcp/mcp_proxy/mcp_proxy.py +23 -26
- autogen/mcp/mcp_proxy/operation_grouping.py +4 -5
- autogen/mcp/mcp_proxy/operation_renaming.py +6 -10
- autogen/mcp/mcp_proxy/security.py +2 -3
- autogen/messages/agent_messages.py +96 -98
- autogen/messages/base_message.py +6 -5
- autogen/messages/client_messages.py +15 -14
- autogen/messages/print_message.py +4 -5
- autogen/oai/__init__.py +1 -2
- autogen/oai/anthropic.py +42 -41
- autogen/oai/bedrock.py +68 -57
- autogen/oai/cerebras.py +26 -25
- autogen/oai/client.py +118 -138
- autogen/oai/client_utils.py +3 -3
- autogen/oai/cohere.py +34 -11
- autogen/oai/gemini.py +40 -17
- autogen/oai/gemini_types.py +11 -12
- autogen/oai/groq.py +22 -10
- autogen/oai/mistral.py +17 -11
- autogen/oai/oai_models/__init__.py +14 -2
- autogen/oai/oai_models/_models.py +2 -2
- autogen/oai/oai_models/chat_completion.py +13 -14
- autogen/oai/oai_models/chat_completion_message.py +11 -9
- autogen/oai/oai_models/chat_completion_message_tool_call.py +26 -3
- autogen/oai/oai_models/chat_completion_token_logprob.py +3 -4
- autogen/oai/oai_models/completion_usage.py +8 -9
- autogen/oai/ollama.py +22 -10
- autogen/oai/openai_responses.py +40 -17
- autogen/oai/openai_utils.py +159 -85
- autogen/oai/together.py +29 -14
- autogen/retrieve_utils.py +6 -7
- autogen/runtime_logging.py +5 -4
- autogen/token_count_utils.py +7 -4
- autogen/tools/contrib/time/time.py +0 -1
- autogen/tools/dependency_injection.py +5 -6
- autogen/tools/experimental/browser_use/browser_use.py +10 -10
- autogen/tools/experimental/code_execution/python_code_execution.py +5 -7
- autogen/tools/experimental/crawl4ai/crawl4ai.py +12 -15
- autogen/tools/experimental/deep_research/deep_research.py +9 -8
- autogen/tools/experimental/duckduckgo/duckduckgo_search.py +5 -11
- autogen/tools/experimental/firecrawl/firecrawl_tool.py +98 -115
- autogen/tools/experimental/google/authentication/credentials_local_provider.py +1 -1
- autogen/tools/experimental/google/drive/drive_functions.py +4 -4
- autogen/tools/experimental/google/drive/toolkit.py +5 -5
- autogen/tools/experimental/google_search/google_search.py +5 -5
- autogen/tools/experimental/google_search/youtube_search.py +5 -5
- autogen/tools/experimental/messageplatform/discord/discord.py +8 -12
- autogen/tools/experimental/messageplatform/slack/slack.py +14 -20
- autogen/tools/experimental/messageplatform/telegram/telegram.py +8 -12
- autogen/tools/experimental/perplexity/perplexity_search.py +18 -29
- autogen/tools/experimental/reliable/reliable.py +68 -74
- autogen/tools/experimental/searxng/searxng_search.py +20 -19
- autogen/tools/experimental/tavily/tavily_search.py +12 -19
- autogen/tools/experimental/web_search_preview/web_search_preview.py +13 -7
- autogen/tools/experimental/wikipedia/wikipedia.py +7 -10
- autogen/tools/function_utils.py +7 -7
- autogen/tools/tool.py +6 -5
- autogen/types.py +2 -2
- autogen/version.py +1 -1
- ag2-0.9.6.dist-info/RECORD +0 -421
- autogen/llm_config.py +0 -385
- {ag2-0.9.6.dist-info → ag2-0.9.8.post1.dist-info}/WHEEL +0 -0
- {ag2-0.9.6.dist-info → ag2-0.9.8.post1.dist-info}/licenses/LICENSE +0 -0
- {ag2-0.9.6.dist-info → ag2-0.9.8.post1.dist-info}/licenses/NOTICE.md +0 -0
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
import logging
|
|
6
6
|
import os
|
|
7
|
-
from typing import Annotated, Any
|
|
7
|
+
from typing import Annotated, Any
|
|
8
8
|
|
|
9
9
|
from ....doc_utils import export_module
|
|
10
10
|
from ....import_utils import optional_import_block, require_optional_import
|
|
@@ -27,16 +27,15 @@ with optional_import_block():
|
|
|
27
27
|
def _execute_firecrawl_scrape(
|
|
28
28
|
url: str,
|
|
29
29
|
firecrawl_api_key: str,
|
|
30
|
-
firecrawl_api_url:
|
|
31
|
-
formats:
|
|
32
|
-
include_tags:
|
|
33
|
-
exclude_tags:
|
|
34
|
-
headers:
|
|
35
|
-
wait_for:
|
|
36
|
-
timeout:
|
|
30
|
+
firecrawl_api_url: str | None = None,
|
|
31
|
+
formats: list[str] | None = None,
|
|
32
|
+
include_tags: list[str] | None = None,
|
|
33
|
+
exclude_tags: list[str] | None = None,
|
|
34
|
+
headers: dict[str, str] | None = None,
|
|
35
|
+
wait_for: int | None = None,
|
|
36
|
+
timeout: int | None = None,
|
|
37
37
|
) -> dict[str, Any]:
|
|
38
|
-
"""
|
|
39
|
-
Execute a scrape operation using the Firecrawl API.
|
|
38
|
+
"""Execute a scrape operation using the Firecrawl API.
|
|
40
39
|
|
|
41
40
|
Args:
|
|
42
41
|
url (str): The URL to scrape.
|
|
@@ -78,17 +77,16 @@ def _execute_firecrawl_scrape(
|
|
|
78
77
|
def _execute_firecrawl_crawl(
|
|
79
78
|
url: str,
|
|
80
79
|
firecrawl_api_key: str,
|
|
81
|
-
firecrawl_api_url:
|
|
80
|
+
firecrawl_api_url: str | None = None,
|
|
82
81
|
limit: int = 5,
|
|
83
|
-
formats:
|
|
84
|
-
include_paths:
|
|
85
|
-
exclude_paths:
|
|
86
|
-
max_depth:
|
|
82
|
+
formats: list[str] | None = None,
|
|
83
|
+
include_paths: list[str] | None = None,
|
|
84
|
+
exclude_paths: list[str] | None = None,
|
|
85
|
+
max_depth: int | None = None,
|
|
87
86
|
allow_backward_crawling: bool = False,
|
|
88
87
|
allow_external_content_links: bool = False,
|
|
89
88
|
) -> dict[str, Any]:
|
|
90
|
-
"""
|
|
91
|
-
Execute a crawl operation using the Firecrawl API.
|
|
89
|
+
"""Execute a crawl operation using the Firecrawl API.
|
|
92
90
|
|
|
93
91
|
Args:
|
|
94
92
|
url (str): The starting URL to crawl.
|
|
@@ -137,14 +135,13 @@ def _execute_firecrawl_crawl(
|
|
|
137
135
|
def _execute_firecrawl_map(
|
|
138
136
|
url: str,
|
|
139
137
|
firecrawl_api_key: str,
|
|
140
|
-
firecrawl_api_url:
|
|
141
|
-
search:
|
|
138
|
+
firecrawl_api_url: str | None = None,
|
|
139
|
+
search: str | None = None,
|
|
142
140
|
ignore_sitemap: bool = False,
|
|
143
141
|
include_subdomains: bool = False,
|
|
144
142
|
limit: int = 5000,
|
|
145
143
|
) -> dict[str, Any]:
|
|
146
|
-
"""
|
|
147
|
-
Execute a map operation using the Firecrawl API to get URLs from a website.
|
|
144
|
+
"""Execute a map operation using the Firecrawl API to get URLs from a website.
|
|
148
145
|
|
|
149
146
|
Args:
|
|
150
147
|
url (str): The website URL to map.
|
|
@@ -179,17 +176,16 @@ def _execute_firecrawl_map(
|
|
|
179
176
|
def _execute_firecrawl_search(
|
|
180
177
|
query: str,
|
|
181
178
|
firecrawl_api_key: str,
|
|
182
|
-
firecrawl_api_url:
|
|
179
|
+
firecrawl_api_url: str | None = None,
|
|
183
180
|
limit: int = 5,
|
|
184
|
-
tbs:
|
|
185
|
-
filter:
|
|
181
|
+
tbs: str | None = None,
|
|
182
|
+
filter: str | None = None,
|
|
186
183
|
lang: str = "en",
|
|
187
184
|
country: str = "us",
|
|
188
|
-
location:
|
|
189
|
-
timeout:
|
|
185
|
+
location: str | None = None,
|
|
186
|
+
timeout: int | None = None,
|
|
190
187
|
) -> dict[str, Any]:
|
|
191
|
-
"""
|
|
192
|
-
Execute a search operation using the Firecrawl API.
|
|
188
|
+
"""Execute a search operation using the Firecrawl API.
|
|
193
189
|
|
|
194
190
|
Args:
|
|
195
191
|
query (str): The search query string.
|
|
@@ -230,15 +226,14 @@ def _execute_firecrawl_search(
|
|
|
230
226
|
def _execute_firecrawl_deep_research(
|
|
231
227
|
query: str,
|
|
232
228
|
firecrawl_api_key: str,
|
|
233
|
-
firecrawl_api_url:
|
|
229
|
+
firecrawl_api_url: str | None = None,
|
|
234
230
|
max_depth: int = 7,
|
|
235
231
|
time_limit: int = 270,
|
|
236
232
|
max_urls: int = 20,
|
|
237
|
-
analysis_prompt:
|
|
238
|
-
system_prompt:
|
|
233
|
+
analysis_prompt: str | None = None,
|
|
234
|
+
system_prompt: str | None = None,
|
|
239
235
|
) -> dict[str, Any]:
|
|
240
|
-
"""
|
|
241
|
-
Execute a deep research operation using the Firecrawl API.
|
|
236
|
+
"""Execute a deep research operation using the Firecrawl API.
|
|
242
237
|
|
|
243
238
|
Args:
|
|
244
239
|
query (str): The research query or topic to investigate.
|
|
@@ -269,16 +264,15 @@ def _execute_firecrawl_deep_research(
|
|
|
269
264
|
def _firecrawl_scrape(
|
|
270
265
|
url: str,
|
|
271
266
|
firecrawl_api_key: str,
|
|
272
|
-
firecrawl_api_url:
|
|
273
|
-
formats:
|
|
274
|
-
include_tags:
|
|
275
|
-
exclude_tags:
|
|
276
|
-
headers:
|
|
277
|
-
wait_for:
|
|
278
|
-
timeout:
|
|
267
|
+
firecrawl_api_url: str | None = None,
|
|
268
|
+
formats: list[str] | None = None,
|
|
269
|
+
include_tags: list[str] | None = None,
|
|
270
|
+
exclude_tags: list[str] | None = None,
|
|
271
|
+
headers: dict[str, str] | None = None,
|
|
272
|
+
wait_for: int | None = None,
|
|
273
|
+
timeout: int | None = None,
|
|
279
274
|
) -> list[dict[str, Any]]:
|
|
280
|
-
"""
|
|
281
|
-
Perform a Firecrawl scrape and format the results.
|
|
275
|
+
"""Perform a Firecrawl scrape and format the results.
|
|
282
276
|
|
|
283
277
|
Args:
|
|
284
278
|
url (str): The URL to scrape.
|
|
@@ -324,17 +318,16 @@ def _firecrawl_scrape(
|
|
|
324
318
|
def _firecrawl_crawl(
|
|
325
319
|
url: str,
|
|
326
320
|
firecrawl_api_key: str,
|
|
327
|
-
firecrawl_api_url:
|
|
321
|
+
firecrawl_api_url: str | None = None,
|
|
328
322
|
limit: int = 5,
|
|
329
|
-
formats:
|
|
330
|
-
include_paths:
|
|
331
|
-
exclude_paths:
|
|
332
|
-
max_depth:
|
|
323
|
+
formats: list[str] | None = None,
|
|
324
|
+
include_paths: list[str] | None = None,
|
|
325
|
+
exclude_paths: list[str] | None = None,
|
|
326
|
+
max_depth: int | None = None,
|
|
333
327
|
allow_backward_crawling: bool = False,
|
|
334
328
|
allow_external_content_links: bool = False,
|
|
335
329
|
) -> list[dict[str, Any]]:
|
|
336
|
-
"""
|
|
337
|
-
Perform a Firecrawl crawl and format the results.
|
|
330
|
+
"""Perform a Firecrawl crawl and format the results.
|
|
338
331
|
|
|
339
332
|
Args:
|
|
340
333
|
url (str): The starting URL to crawl.
|
|
@@ -387,14 +380,13 @@ def _firecrawl_crawl(
|
|
|
387
380
|
def _firecrawl_map(
|
|
388
381
|
url: str,
|
|
389
382
|
firecrawl_api_key: str,
|
|
390
|
-
firecrawl_api_url:
|
|
391
|
-
search:
|
|
383
|
+
firecrawl_api_url: str | None = None,
|
|
384
|
+
search: str | None = None,
|
|
392
385
|
ignore_sitemap: bool = False,
|
|
393
386
|
include_subdomains: bool = False,
|
|
394
387
|
limit: int = 5000,
|
|
395
388
|
) -> list[dict[str, Any]]:
|
|
396
|
-
"""
|
|
397
|
-
Perform a Firecrawl map operation and format the results.
|
|
389
|
+
"""Perform a Firecrawl map operation and format the results.
|
|
398
390
|
|
|
399
391
|
Args:
|
|
400
392
|
url (str): The website URL to map.
|
|
@@ -440,17 +432,16 @@ def _firecrawl_map(
|
|
|
440
432
|
def _firecrawl_search(
|
|
441
433
|
query: str,
|
|
442
434
|
firecrawl_api_key: str,
|
|
443
|
-
firecrawl_api_url:
|
|
435
|
+
firecrawl_api_url: str | None = None,
|
|
444
436
|
limit: int = 5,
|
|
445
|
-
tbs:
|
|
446
|
-
filter:
|
|
437
|
+
tbs: str | None = None,
|
|
438
|
+
filter: str | None = None,
|
|
447
439
|
lang: str = "en",
|
|
448
440
|
country: str = "us",
|
|
449
|
-
location:
|
|
450
|
-
timeout:
|
|
441
|
+
location: str | None = None,
|
|
442
|
+
timeout: int | None = None,
|
|
451
443
|
) -> list[dict[str, Any]]:
|
|
452
|
-
"""
|
|
453
|
-
Perform a Firecrawl search and format the results.
|
|
444
|
+
"""Perform a Firecrawl search and format the results.
|
|
454
445
|
|
|
455
446
|
Args:
|
|
456
447
|
query (str): The search query string.
|
|
@@ -504,15 +495,14 @@ def _firecrawl_search(
|
|
|
504
495
|
def _firecrawl_deep_research(
|
|
505
496
|
query: str,
|
|
506
497
|
firecrawl_api_key: str,
|
|
507
|
-
firecrawl_api_url:
|
|
498
|
+
firecrawl_api_url: str | None = None,
|
|
508
499
|
max_depth: int = 7,
|
|
509
500
|
time_limit: int = 270,
|
|
510
501
|
max_urls: int = 20,
|
|
511
|
-
analysis_prompt:
|
|
512
|
-
system_prompt:
|
|
502
|
+
analysis_prompt: str | None = None,
|
|
503
|
+
system_prompt: str | None = None,
|
|
513
504
|
) -> dict[str, Any]:
|
|
514
|
-
"""
|
|
515
|
-
Perform a Firecrawl deep research operation and format the results.
|
|
505
|
+
"""Perform a Firecrawl deep research operation and format the results.
|
|
516
506
|
|
|
517
507
|
Args:
|
|
518
508
|
query (str): The research query or topic to investigate.
|
|
@@ -568,8 +558,7 @@ def _firecrawl_deep_research(
|
|
|
568
558
|
|
|
569
559
|
@export_module("autogen.tools.experimental")
|
|
570
560
|
class FirecrawlTool(Tool):
|
|
571
|
-
"""
|
|
572
|
-
FirecrawlTool is a tool that uses the Firecrawl API to scrape, crawl, map, search, and research websites.
|
|
561
|
+
"""FirecrawlTool is a tool that uses the Firecrawl API to scrape, crawl, map, search, and research websites.
|
|
573
562
|
|
|
574
563
|
This tool allows agents to leverage Firecrawl for web content extraction, discovery, and research.
|
|
575
564
|
It requires a Firecrawl API key, which can be provided during initialization or set as
|
|
@@ -590,12 +579,11 @@ class FirecrawlTool(Tool):
|
|
|
590
579
|
def __init__(
|
|
591
580
|
self,
|
|
592
581
|
*,
|
|
593
|
-
llm_config:
|
|
594
|
-
firecrawl_api_key:
|
|
595
|
-
firecrawl_api_url:
|
|
582
|
+
llm_config: LLMConfig | dict[str, Any] | None = None,
|
|
583
|
+
firecrawl_api_key: str | None = None,
|
|
584
|
+
firecrawl_api_url: str | None = None,
|
|
596
585
|
):
|
|
597
|
-
"""
|
|
598
|
-
Initializes the FirecrawlTool.
|
|
586
|
+
"""Initializes the FirecrawlTool.
|
|
599
587
|
|
|
600
588
|
Args:
|
|
601
589
|
llm_config (Optional[Union[LLMConfig, dict[str, Any]]]): LLM configuration. (Currently unused but kept for potential future integration).
|
|
@@ -618,17 +606,16 @@ class FirecrawlTool(Tool):
|
|
|
618
606
|
|
|
619
607
|
def firecrawl_scrape(
|
|
620
608
|
url: Annotated[str, "The URL to scrape."],
|
|
621
|
-
firecrawl_api_key: Annotated[
|
|
622
|
-
firecrawl_api_url: Annotated[
|
|
623
|
-
formats: Annotated[
|
|
624
|
-
include_tags: Annotated[
|
|
625
|
-
exclude_tags: Annotated[
|
|
626
|
-
headers: Annotated[
|
|
627
|
-
wait_for: Annotated[
|
|
628
|
-
timeout: Annotated[
|
|
609
|
+
firecrawl_api_key: Annotated[str | None, Depends(on(self.firecrawl_api_key))],
|
|
610
|
+
firecrawl_api_url: Annotated[str | None, Depends(on(self.firecrawl_api_url))],
|
|
611
|
+
formats: Annotated[list[str] | None, "Output formats (e.g., ['markdown', 'html'])"] = None,
|
|
612
|
+
include_tags: Annotated[list[str] | None, "HTML tags to include"] = None,
|
|
613
|
+
exclude_tags: Annotated[list[str] | None, "HTML tags to exclude"] = None,
|
|
614
|
+
headers: Annotated[dict[str, str] | None, "HTTP headers to use"] = None,
|
|
615
|
+
wait_for: Annotated[int | None, "Time to wait for page load in milliseconds"] = None,
|
|
616
|
+
timeout: Annotated[int | None, "Request timeout in milliseconds"] = None,
|
|
629
617
|
) -> list[dict[str, Any]]:
|
|
630
|
-
"""
|
|
631
|
-
Scrapes a single URL and returns the content.
|
|
618
|
+
"""Scrapes a single URL and returns the content.
|
|
632
619
|
|
|
633
620
|
Args:
|
|
634
621
|
url: The URL to scrape.
|
|
@@ -663,18 +650,17 @@ class FirecrawlTool(Tool):
|
|
|
663
650
|
|
|
664
651
|
def firecrawl_crawl(
|
|
665
652
|
url: Annotated[str, "The starting URL to crawl."],
|
|
666
|
-
firecrawl_api_key: Annotated[
|
|
667
|
-
firecrawl_api_url: Annotated[
|
|
653
|
+
firecrawl_api_key: Annotated[str | None, Depends(on(self.firecrawl_api_key))],
|
|
654
|
+
firecrawl_api_url: Annotated[str | None, Depends(on(self.firecrawl_api_url))],
|
|
668
655
|
limit: Annotated[int, "Maximum number of pages to crawl"] = 5,
|
|
669
|
-
formats: Annotated[
|
|
670
|
-
include_paths: Annotated[
|
|
671
|
-
exclude_paths: Annotated[
|
|
672
|
-
max_depth: Annotated[
|
|
673
|
-
allow_backward_crawling: Annotated[
|
|
674
|
-
allow_external_content_links: Annotated[
|
|
656
|
+
formats: Annotated[list[str] | None, "Output formats (e.g., ['markdown', 'html'])"] = None,
|
|
657
|
+
include_paths: Annotated[list[str] | None, "URL patterns to include"] = None,
|
|
658
|
+
exclude_paths: Annotated[list[str] | None, "URL patterns to exclude"] = None,
|
|
659
|
+
max_depth: Annotated[int | None, "Maximum crawl depth"] = None,
|
|
660
|
+
allow_backward_crawling: Annotated[bool | None, "Allow crawling backward links"] = False,
|
|
661
|
+
allow_external_content_links: Annotated[bool | None, "Allow external links"] = False,
|
|
675
662
|
) -> list[dict[str, Any]]:
|
|
676
|
-
"""
|
|
677
|
-
Crawls a website starting from a URL and returns the content from multiple pages.
|
|
663
|
+
"""Crawls a website starting from a URL and returns the content from multiple pages.
|
|
678
664
|
|
|
679
665
|
Args:
|
|
680
666
|
url: The starting URL to crawl.
|
|
@@ -711,15 +697,14 @@ class FirecrawlTool(Tool):
|
|
|
711
697
|
|
|
712
698
|
def firecrawl_map(
|
|
713
699
|
url: Annotated[str, "The website URL to map."],
|
|
714
|
-
firecrawl_api_key: Annotated[
|
|
715
|
-
firecrawl_api_url: Annotated[
|
|
716
|
-
search: Annotated[
|
|
717
|
-
ignore_sitemap: Annotated[
|
|
718
|
-
include_subdomains: Annotated[
|
|
700
|
+
firecrawl_api_key: Annotated[str | None, Depends(on(self.firecrawl_api_key))],
|
|
701
|
+
firecrawl_api_url: Annotated[str | None, Depends(on(self.firecrawl_api_url))],
|
|
702
|
+
search: Annotated[str | None, "Search term to filter URLs"] = None,
|
|
703
|
+
ignore_sitemap: Annotated[bool | None, "Whether to ignore the sitemap"] = False,
|
|
704
|
+
include_subdomains: Annotated[bool | None, "Whether to include subdomains"] = False,
|
|
719
705
|
limit: Annotated[int, "Maximum number of URLs to return"] = 5000,
|
|
720
706
|
) -> list[dict[str, Any]]:
|
|
721
|
-
"""
|
|
722
|
-
Maps a website to discover URLs.
|
|
707
|
+
"""Maps a website to discover URLs.
|
|
723
708
|
|
|
724
709
|
Args:
|
|
725
710
|
url: The website URL to map.
|
|
@@ -750,18 +735,17 @@ class FirecrawlTool(Tool):
|
|
|
750
735
|
|
|
751
736
|
def firecrawl_search(
|
|
752
737
|
query: Annotated[str, "The search query string."],
|
|
753
|
-
firecrawl_api_key: Annotated[
|
|
754
|
-
firecrawl_api_url: Annotated[
|
|
738
|
+
firecrawl_api_key: Annotated[str | None, Depends(on(self.firecrawl_api_key))],
|
|
739
|
+
firecrawl_api_url: Annotated[str | None, Depends(on(self.firecrawl_api_url))],
|
|
755
740
|
limit: Annotated[int, "Maximum number of results to return"] = 5,
|
|
756
|
-
tbs: Annotated[
|
|
757
|
-
filter: Annotated[
|
|
758
|
-
lang: Annotated[
|
|
759
|
-
country: Annotated[
|
|
760
|
-
location: Annotated[
|
|
761
|
-
timeout: Annotated[
|
|
741
|
+
tbs: Annotated[str | None, "Time filter (e.g., 'qdr:d' for past day)"] = None,
|
|
742
|
+
filter: Annotated[str | None, "Custom result filter"] = None,
|
|
743
|
+
lang: Annotated[str | None, "Language code"] = "en",
|
|
744
|
+
country: Annotated[str | None, "Country code"] = "us",
|
|
745
|
+
location: Annotated[str | None, "Geo-targeting location"] = None,
|
|
746
|
+
timeout: Annotated[int | None, "Request timeout in milliseconds"] = None,
|
|
762
747
|
) -> list[dict[str, Any]]:
|
|
763
|
-
"""
|
|
764
|
-
Executes a search operation using the Firecrawl API.
|
|
748
|
+
"""Executes a search operation using the Firecrawl API.
|
|
765
749
|
|
|
766
750
|
Args:
|
|
767
751
|
query: The search query string.
|
|
@@ -798,16 +782,15 @@ class FirecrawlTool(Tool):
|
|
|
798
782
|
|
|
799
783
|
def firecrawl_deep_research(
|
|
800
784
|
query: Annotated[str, "The research query or topic to investigate."],
|
|
801
|
-
firecrawl_api_key: Annotated[
|
|
802
|
-
firecrawl_api_url: Annotated[
|
|
785
|
+
firecrawl_api_key: Annotated[str | None, Depends(on(self.firecrawl_api_key))],
|
|
786
|
+
firecrawl_api_url: Annotated[str | None, Depends(on(self.firecrawl_api_url))],
|
|
803
787
|
max_depth: Annotated[int, "Maximum depth of research exploration"] = 7,
|
|
804
788
|
time_limit: Annotated[int, "Time limit in seconds for research"] = 270,
|
|
805
789
|
max_urls: Annotated[int, "Maximum number of URLs to process"] = 20,
|
|
806
|
-
analysis_prompt: Annotated[
|
|
807
|
-
system_prompt: Annotated[
|
|
790
|
+
analysis_prompt: Annotated[str | None, "Custom prompt for analysis"] = None,
|
|
791
|
+
system_prompt: Annotated[str | None, "Custom system prompt"] = None,
|
|
808
792
|
) -> dict[str, Any]:
|
|
809
|
-
"""
|
|
810
|
-
Executes a deep research operation using the Firecrawl API.
|
|
793
|
+
"""Executes a deep research operation using the Firecrawl API.
|
|
811
794
|
|
|
812
795
|
Args:
|
|
813
796
|
query: The research query or topic to investigate.
|
|
@@ -25,7 +25,7 @@ class GoogleCredentialsLocalProvider(GoogleCredentialsProvider):
|
|
|
25
25
|
self,
|
|
26
26
|
client_secret_file: str,
|
|
27
27
|
scopes: list[str], # e.g. ['https://www.googleapis.com/auth/drive/readonly']
|
|
28
|
-
token_file:
|
|
28
|
+
token_file: str | None = None,
|
|
29
29
|
port: int = 8080,
|
|
30
30
|
) -> None:
|
|
31
31
|
"""A Google credentials provider that gets the credentials locally.
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
import io
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import Any
|
|
7
|
+
from typing import Any
|
|
8
8
|
|
|
9
9
|
from .....import_utils import optional_import_block, require_optional_import
|
|
10
10
|
from ..model import GoogleFileInfo
|
|
@@ -25,7 +25,7 @@ __all__ = [
|
|
|
25
25
|
],
|
|
26
26
|
"google-api",
|
|
27
27
|
)
|
|
28
|
-
def list_files_and_folders(service: Any, page_size: int, folder_id:
|
|
28
|
+
def list_files_and_folders(service: Any, page_size: int, folder_id: str | None) -> list[GoogleFileInfo]:
|
|
29
29
|
kwargs = {
|
|
30
30
|
"pageSize": page_size,
|
|
31
31
|
"fields": "nextPageToken, files(id, name, mimeType)",
|
|
@@ -40,7 +40,7 @@ def list_files_and_folders(service: Any, page_size: int, folder_id: Optional[str
|
|
|
40
40
|
return result
|
|
41
41
|
|
|
42
42
|
|
|
43
|
-
def _get_file_extension(mime_type: str) ->
|
|
43
|
+
def _get_file_extension(mime_type: str) -> str | None:
|
|
44
44
|
"""Returns the correct file extension for a given MIME type."""
|
|
45
45
|
mime_extensions = {
|
|
46
46
|
"application/vnd.google-apps.document": "docx", # Google Docs → Word
|
|
@@ -70,7 +70,7 @@ def download_file(
|
|
|
70
70
|
file_name: str,
|
|
71
71
|
mime_type: str,
|
|
72
72
|
download_folder: Path,
|
|
73
|
-
subfolder_path:
|
|
73
|
+
subfolder_path: str | None = None,
|
|
74
74
|
) -> str:
|
|
75
75
|
"""Download or export file based on its MIME type, optionally saving to a subfolder."""
|
|
76
76
|
file_extension = _get_file_extension(mime_type)
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
|
4
4
|
|
|
5
5
|
from pathlib import Path
|
|
6
|
-
from typing import Annotated, Literal
|
|
6
|
+
from typing import Annotated, Literal
|
|
7
7
|
|
|
8
8
|
from .....doc_utils import export_module
|
|
9
9
|
from .....import_utils import optional_import_block
|
|
@@ -29,8 +29,8 @@ class GoogleDriveToolkit(Toolkit, GoogleToolkitProtocol):
|
|
|
29
29
|
self,
|
|
30
30
|
*,
|
|
31
31
|
credentials: "Credentials",
|
|
32
|
-
download_folder:
|
|
33
|
-
exclude:
|
|
32
|
+
download_folder: Path | str,
|
|
33
|
+
exclude: list[Literal["list_drive_files_and_folders", "download_file_from_drive"]] | None = None,
|
|
34
34
|
api_version: str = "v3",
|
|
35
35
|
) -> None:
|
|
36
36
|
"""Initialize the Google Drive tool map.
|
|
@@ -51,7 +51,7 @@ class GoogleDriveToolkit(Toolkit, GoogleToolkitProtocol):
|
|
|
51
51
|
def list_drive_files_and_folders(
|
|
52
52
|
page_size: Annotated[int, "The number of files to list per page."] = 10,
|
|
53
53
|
folder_id: Annotated[
|
|
54
|
-
|
|
54
|
+
str | None,
|
|
55
55
|
"The ID of the folder to list files from. If not provided, lists all files in the root folder.",
|
|
56
56
|
] = None,
|
|
57
57
|
) -> list[GoogleFileInfo]:
|
|
@@ -61,7 +61,7 @@ class GoogleDriveToolkit(Toolkit, GoogleToolkitProtocol):
|
|
|
61
61
|
def download_file_from_drive(
|
|
62
62
|
file_info: Annotated[GoogleFileInfo, "The file info to download."],
|
|
63
63
|
subfolder_path: Annotated[
|
|
64
|
-
|
|
64
|
+
str | None,
|
|
65
65
|
"The subfolder path to save the file in. If not provided, saves in the main download folder.",
|
|
66
66
|
] = None,
|
|
67
67
|
) -> str:
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
import logging
|
|
7
|
-
from typing import Annotated, Any
|
|
7
|
+
from typing import Annotated, Any
|
|
8
8
|
|
|
9
9
|
from ....doc_utils import export_module
|
|
10
10
|
from ....import_utils import optional_import_block, require_optional_import
|
|
@@ -49,8 +49,8 @@ class GoogleSearchTool(Tool):
|
|
|
49
49
|
def __init__(
|
|
50
50
|
self,
|
|
51
51
|
*,
|
|
52
|
-
search_api_key:
|
|
53
|
-
search_engine_id:
|
|
52
|
+
search_api_key: str | None = None,
|
|
53
|
+
search_engine_id: str | None = None,
|
|
54
54
|
use_internal_llm_tool_if_available: bool = True,
|
|
55
55
|
):
|
|
56
56
|
"""GoogleSearchTool is a tool that uses the Google Search API to perform a search.
|
|
@@ -74,8 +74,8 @@ class GoogleSearchTool(Tool):
|
|
|
74
74
|
|
|
75
75
|
def google_search(
|
|
76
76
|
query: Annotated[str, "The search query."],
|
|
77
|
-
search_api_key: Annotated[
|
|
78
|
-
search_engine_id: Annotated[
|
|
77
|
+
search_api_key: Annotated[str | None, Depends(on(search_api_key))],
|
|
78
|
+
search_engine_id: Annotated[str | None, Depends(on(search_engine_id))],
|
|
79
79
|
num_results: Annotated[int, "The number of results to return."] = 10,
|
|
80
80
|
) -> list[dict[str, Any]]:
|
|
81
81
|
if search_api_key is None or search_engine_id is None:
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
|
|
6
6
|
import logging
|
|
7
|
-
from typing import Annotated, Any
|
|
7
|
+
from typing import Annotated, Any
|
|
8
8
|
|
|
9
9
|
from ....doc_utils import export_module
|
|
10
10
|
from ....import_utils import optional_import_block, require_optional_import
|
|
@@ -48,7 +48,7 @@ def _execute_search_query(query: str, youtube_api_key: str, max_results: int) ->
|
|
|
48
48
|
["googleapiclient"],
|
|
49
49
|
"google-search",
|
|
50
50
|
)
|
|
51
|
-
def _get_video_details(video_ids:
|
|
51
|
+
def _get_video_details(video_ids: list[str], youtube_api_key: str) -> Any:
|
|
52
52
|
"""Get detailed information about specific YouTube videos.
|
|
53
53
|
|
|
54
54
|
Args:
|
|
@@ -79,7 +79,7 @@ def _youtube_search(
|
|
|
79
79
|
youtube_api_key: str,
|
|
80
80
|
max_results: int,
|
|
81
81
|
include_video_details: bool = True,
|
|
82
|
-
) ->
|
|
82
|
+
) -> list[dict[str, Any]]:
|
|
83
83
|
"""Search YouTube videos based on a query.
|
|
84
84
|
|
|
85
85
|
Args:
|
|
@@ -140,7 +140,7 @@ class YoutubeSearchTool(Tool):
|
|
|
140
140
|
def __init__(
|
|
141
141
|
self,
|
|
142
142
|
*,
|
|
143
|
-
youtube_api_key:
|
|
143
|
+
youtube_api_key: str | None = None,
|
|
144
144
|
):
|
|
145
145
|
"""Initialize a YouTube search tool.
|
|
146
146
|
|
|
@@ -157,7 +157,7 @@ class YoutubeSearchTool(Tool):
|
|
|
157
157
|
youtube_api_key: Annotated[str, Depends(on(youtube_api_key))],
|
|
158
158
|
max_results: Annotated[int, "The maximum number of results to return."] = 5,
|
|
159
159
|
include_video_details: Annotated[bool, "Whether to include detailed video information."] = True,
|
|
160
|
-
) ->
|
|
160
|
+
) -> list[dict[str, Any]]:
|
|
161
161
|
"""Search for YouTube videos based on a query.
|
|
162
162
|
|
|
163
163
|
Args:
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
|
|
5
5
|
import asyncio
|
|
6
6
|
from datetime import datetime, timezone
|
|
7
|
-
from typing import Annotated, Any
|
|
7
|
+
from typing import Annotated, Any
|
|
8
8
|
|
|
9
9
|
from .....doc_utils import export_module
|
|
10
10
|
from .....import_utils import optional_import_block, require_optional_import
|
|
@@ -26,8 +26,7 @@ class DiscordSendTool(Tool):
|
|
|
26
26
|
"""Sends a message to a Discord channel."""
|
|
27
27
|
|
|
28
28
|
def __init__(self, *, bot_token: str, channel_name: str, guild_name: str) -> None:
|
|
29
|
-
"""
|
|
30
|
-
Initialize the DiscordSendTool.
|
|
29
|
+
"""Initialize the DiscordSendTool.
|
|
31
30
|
|
|
32
31
|
Args:
|
|
33
32
|
bot_token: The bot token to use for sending messages.
|
|
@@ -42,8 +41,7 @@ class DiscordSendTool(Tool):
|
|
|
42
41
|
guild_name: Annotated[str, Depends(on(guild_name))],
|
|
43
42
|
channel_name: Annotated[str, Depends(on(channel_name))],
|
|
44
43
|
) -> Any:
|
|
45
|
-
"""
|
|
46
|
-
Sends a message to a Discord channel.
|
|
44
|
+
"""Sends a message to a Discord channel.
|
|
47
45
|
|
|
48
46
|
Args:
|
|
49
47
|
message: The message to send to the channel.
|
|
@@ -123,8 +121,7 @@ class DiscordRetrieveTool(Tool):
|
|
|
123
121
|
"""Retrieves messages from a Discord channel."""
|
|
124
122
|
|
|
125
123
|
def __init__(self, *, bot_token: str, channel_name: str, guild_name: str) -> None:
|
|
126
|
-
"""
|
|
127
|
-
Initialize the DiscordRetrieveTool.
|
|
124
|
+
"""Initialize the DiscordRetrieveTool.
|
|
128
125
|
|
|
129
126
|
Args:
|
|
130
127
|
bot_token: The bot token to use for retrieving messages.
|
|
@@ -137,15 +134,14 @@ class DiscordRetrieveTool(Tool):
|
|
|
137
134
|
guild_name: Annotated[str, Depends(on(guild_name))],
|
|
138
135
|
channel_name: Annotated[str, Depends(on(channel_name))],
|
|
139
136
|
messages_since: Annotated[
|
|
140
|
-
|
|
137
|
+
str | None,
|
|
141
138
|
"Date to retrieve messages from (ISO format) OR Discord snowflake ID. If None, retrieves latest messages.",
|
|
142
139
|
] = None,
|
|
143
140
|
maximum_messages: Annotated[
|
|
144
|
-
|
|
141
|
+
int | None, "Maximum number of messages to retrieve. If None, retrieves all messages since date."
|
|
145
142
|
] = None,
|
|
146
143
|
) -> Any:
|
|
147
|
-
"""
|
|
148
|
-
Retrieves messages from a Discord channel.
|
|
144
|
+
"""Retrieves messages from a Discord channel.
|
|
149
145
|
|
|
150
146
|
Args:
|
|
151
147
|
bot_token: The bot token to use for Discord. (uses dependency injection)
|
|
@@ -162,7 +158,7 @@ class DiscordRetrieveTool(Tool):
|
|
|
162
158
|
client = Client(intents=intents)
|
|
163
159
|
result_future: asyncio.Future[list[dict[str, Any]]] = asyncio.Future()
|
|
164
160
|
|
|
165
|
-
messages_since_date:
|
|
161
|
+
messages_since_date: str | None = None
|
|
166
162
|
if messages_since is not None:
|
|
167
163
|
if DiscordRetrieveTool._is_snowflake(messages_since):
|
|
168
164
|
messages_since_date = DiscordRetrieveTool._snowflake_to_iso(messages_since)
|