ag2 0.9.7__py3-none-any.whl → 0.9.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ag2 might be problematic. Click here for more details.

Files changed (236) hide show
  1. {ag2-0.9.7.dist-info → ag2-0.9.9.dist-info}/METADATA +102 -75
  2. ag2-0.9.9.dist-info/RECORD +387 -0
  3. autogen/__init__.py +1 -2
  4. autogen/_website/generate_api_references.py +4 -5
  5. autogen/_website/generate_mkdocs.py +9 -15
  6. autogen/_website/notebook_processor.py +13 -14
  7. autogen/_website/process_notebooks.py +10 -10
  8. autogen/_website/utils.py +5 -4
  9. autogen/agentchat/agent.py +13 -13
  10. autogen/agentchat/assistant_agent.py +7 -6
  11. autogen/agentchat/contrib/agent_eval/agent_eval.py +3 -3
  12. autogen/agentchat/contrib/agent_eval/critic_agent.py +3 -3
  13. autogen/agentchat/contrib/agent_eval/quantifier_agent.py +3 -3
  14. autogen/agentchat/contrib/agent_eval/subcritic_agent.py +3 -3
  15. autogen/agentchat/contrib/agent_optimizer.py +3 -3
  16. autogen/agentchat/contrib/capabilities/generate_images.py +11 -11
  17. autogen/agentchat/contrib/capabilities/teachability.py +15 -15
  18. autogen/agentchat/contrib/capabilities/transforms.py +17 -18
  19. autogen/agentchat/contrib/capabilities/transforms_util.py +5 -5
  20. autogen/agentchat/contrib/capabilities/vision_capability.py +4 -3
  21. autogen/agentchat/contrib/captainagent/agent_builder.py +30 -30
  22. autogen/agentchat/contrib/captainagent/captainagent.py +22 -21
  23. autogen/agentchat/contrib/captainagent/tool_retriever.py +2 -3
  24. autogen/agentchat/contrib/gpt_assistant_agent.py +9 -9
  25. autogen/agentchat/contrib/graph_rag/document.py +3 -3
  26. autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py +3 -3
  27. autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py +6 -6
  28. autogen/agentchat/contrib/graph_rag/graph_query_engine.py +3 -3
  29. autogen/agentchat/contrib/graph_rag/neo4j_graph_query_engine.py +5 -11
  30. autogen/agentchat/contrib/graph_rag/neo4j_graph_rag_capability.py +6 -6
  31. autogen/agentchat/contrib/graph_rag/neo4j_native_graph_query_engine.py +7 -7
  32. autogen/agentchat/contrib/graph_rag/neo4j_native_graph_rag_capability.py +6 -6
  33. autogen/agentchat/contrib/img_utils.py +1 -1
  34. autogen/agentchat/contrib/llamaindex_conversable_agent.py +11 -11
  35. autogen/agentchat/contrib/llava_agent.py +18 -4
  36. autogen/agentchat/contrib/math_user_proxy_agent.py +11 -11
  37. autogen/agentchat/contrib/multimodal_conversable_agent.py +8 -8
  38. autogen/agentchat/contrib/qdrant_retrieve_user_proxy_agent.py +6 -5
  39. autogen/agentchat/contrib/rag/chromadb_query_engine.py +22 -26
  40. autogen/agentchat/contrib/rag/llamaindex_query_engine.py +14 -17
  41. autogen/agentchat/contrib/rag/mongodb_query_engine.py +27 -37
  42. autogen/agentchat/contrib/rag/query_engine.py +7 -5
  43. autogen/agentchat/contrib/retrieve_assistant_agent.py +5 -5
  44. autogen/agentchat/contrib/retrieve_user_proxy_agent.py +8 -7
  45. autogen/agentchat/contrib/society_of_mind_agent.py +15 -14
  46. autogen/agentchat/contrib/swarm_agent.py +76 -98
  47. autogen/agentchat/contrib/text_analyzer_agent.py +7 -7
  48. autogen/agentchat/contrib/vectordb/base.py +10 -18
  49. autogen/agentchat/contrib/vectordb/chromadb.py +2 -1
  50. autogen/agentchat/contrib/vectordb/couchbase.py +18 -20
  51. autogen/agentchat/contrib/vectordb/mongodb.py +6 -5
  52. autogen/agentchat/contrib/vectordb/pgvectordb.py +40 -41
  53. autogen/agentchat/contrib/vectordb/qdrant.py +5 -5
  54. autogen/agentchat/contrib/web_surfer.py +20 -19
  55. autogen/agentchat/conversable_agent.py +292 -290
  56. autogen/agentchat/group/context_str.py +1 -3
  57. autogen/agentchat/group/context_variables.py +15 -25
  58. autogen/agentchat/group/group_tool_executor.py +10 -10
  59. autogen/agentchat/group/group_utils.py +15 -15
  60. autogen/agentchat/group/guardrails.py +7 -7
  61. autogen/agentchat/group/handoffs.py +19 -36
  62. autogen/agentchat/group/multi_agent_chat.py +7 -7
  63. autogen/agentchat/group/on_condition.py +4 -7
  64. autogen/agentchat/group/on_context_condition.py +4 -7
  65. autogen/agentchat/group/patterns/auto.py +8 -7
  66. autogen/agentchat/group/patterns/manual.py +7 -6
  67. autogen/agentchat/group/patterns/pattern.py +13 -12
  68. autogen/agentchat/group/patterns/random.py +3 -3
  69. autogen/agentchat/group/patterns/round_robin.py +3 -3
  70. autogen/agentchat/group/reply_result.py +2 -4
  71. autogen/agentchat/group/speaker_selection_result.py +5 -5
  72. autogen/agentchat/group/targets/group_chat_target.py +7 -6
  73. autogen/agentchat/group/targets/group_manager_target.py +4 -4
  74. autogen/agentchat/group/targets/transition_target.py +2 -1
  75. autogen/agentchat/groupchat.py +60 -63
  76. autogen/agentchat/realtime/experimental/audio_adapters/twilio_audio_adapter.py +4 -4
  77. autogen/agentchat/realtime/experimental/audio_adapters/websocket_audio_adapter.py +4 -4
  78. autogen/agentchat/realtime/experimental/clients/gemini/client.py +7 -7
  79. autogen/agentchat/realtime/experimental/clients/oai/base_client.py +8 -8
  80. autogen/agentchat/realtime/experimental/clients/oai/rtc_client.py +6 -6
  81. autogen/agentchat/realtime/experimental/clients/realtime_client.py +10 -9
  82. autogen/agentchat/realtime/experimental/realtime_agent.py +10 -9
  83. autogen/agentchat/realtime/experimental/realtime_observer.py +3 -3
  84. autogen/agentchat/realtime/experimental/realtime_swarm.py +44 -44
  85. autogen/agentchat/user_proxy_agent.py +10 -9
  86. autogen/agentchat/utils.py +3 -3
  87. autogen/agents/contrib/time/time_reply_agent.py +6 -5
  88. autogen/agents/contrib/time/time_tool_agent.py +2 -1
  89. autogen/agents/experimental/deep_research/deep_research.py +3 -3
  90. autogen/agents/experimental/discord/discord.py +2 -2
  91. autogen/agents/experimental/document_agent/chroma_query_engine.py +29 -44
  92. autogen/agents/experimental/document_agent/docling_doc_ingest_agent.py +9 -14
  93. autogen/agents/experimental/document_agent/document_agent.py +15 -16
  94. autogen/agents/experimental/document_agent/document_conditions.py +3 -3
  95. autogen/agents/experimental/document_agent/document_utils.py +5 -9
  96. autogen/agents/experimental/document_agent/inmemory_query_engine.py +14 -20
  97. autogen/agents/experimental/document_agent/parser_utils.py +4 -4
  98. autogen/agents/experimental/document_agent/url_utils.py +14 -23
  99. autogen/agents/experimental/reasoning/reasoning_agent.py +33 -33
  100. autogen/agents/experimental/slack/slack.py +2 -2
  101. autogen/agents/experimental/telegram/telegram.py +2 -3
  102. autogen/agents/experimental/websurfer/websurfer.py +4 -4
  103. autogen/agents/experimental/wikipedia/wikipedia.py +5 -7
  104. autogen/browser_utils.py +8 -8
  105. autogen/cache/abstract_cache_base.py +5 -5
  106. autogen/cache/cache.py +12 -12
  107. autogen/cache/cache_factory.py +4 -4
  108. autogen/cache/cosmos_db_cache.py +9 -9
  109. autogen/cache/disk_cache.py +6 -6
  110. autogen/cache/in_memory_cache.py +4 -4
  111. autogen/cache/redis_cache.py +4 -4
  112. autogen/code_utils.py +18 -18
  113. autogen/coding/base.py +6 -6
  114. autogen/coding/docker_commandline_code_executor.py +9 -9
  115. autogen/coding/func_with_reqs.py +7 -6
  116. autogen/coding/jupyter/base.py +3 -3
  117. autogen/coding/jupyter/docker_jupyter_server.py +3 -4
  118. autogen/coding/jupyter/import_utils.py +3 -3
  119. autogen/coding/jupyter/jupyter_client.py +5 -5
  120. autogen/coding/jupyter/jupyter_code_executor.py +3 -4
  121. autogen/coding/jupyter/local_jupyter_server.py +2 -6
  122. autogen/coding/local_commandline_code_executor.py +8 -7
  123. autogen/coding/markdown_code_extractor.py +1 -2
  124. autogen/coding/utils.py +1 -2
  125. autogen/doc_utils.py +3 -2
  126. autogen/environments/docker_python_environment.py +19 -29
  127. autogen/environments/python_environment.py +8 -17
  128. autogen/environments/system_python_environment.py +3 -4
  129. autogen/environments/venv_python_environment.py +8 -12
  130. autogen/environments/working_directory.py +1 -2
  131. autogen/events/agent_events.py +106 -109
  132. autogen/events/base_event.py +6 -5
  133. autogen/events/client_events.py +15 -14
  134. autogen/events/helpers.py +1 -1
  135. autogen/events/print_event.py +4 -5
  136. autogen/fast_depends/_compat.py +10 -15
  137. autogen/fast_depends/core/build.py +17 -36
  138. autogen/fast_depends/core/model.py +64 -113
  139. autogen/fast_depends/dependencies/model.py +2 -1
  140. autogen/fast_depends/dependencies/provider.py +3 -2
  141. autogen/fast_depends/library/model.py +4 -4
  142. autogen/fast_depends/schema.py +7 -7
  143. autogen/fast_depends/use.py +17 -25
  144. autogen/fast_depends/utils.py +10 -30
  145. autogen/formatting_utils.py +6 -6
  146. autogen/graph_utils.py +1 -4
  147. autogen/import_utils.py +38 -27
  148. autogen/interop/crewai/crewai.py +2 -2
  149. autogen/interop/interoperable.py +2 -2
  150. autogen/interop/langchain/langchain_chat_model_factory.py +3 -2
  151. autogen/interop/langchain/langchain_tool.py +2 -6
  152. autogen/interop/litellm/litellm_config_factory.py +6 -7
  153. autogen/interop/pydantic_ai/pydantic_ai.py +4 -7
  154. autogen/interop/registry.py +2 -1
  155. autogen/io/base.py +5 -5
  156. autogen/io/run_response.py +33 -32
  157. autogen/io/websockets.py +6 -5
  158. autogen/json_utils.py +1 -2
  159. autogen/llm_config/__init__.py +11 -0
  160. autogen/llm_config/client.py +58 -0
  161. autogen/llm_config/config.py +384 -0
  162. autogen/llm_config/entry.py +154 -0
  163. autogen/logger/base_logger.py +4 -3
  164. autogen/logger/file_logger.py +2 -1
  165. autogen/logger/logger_factory.py +2 -2
  166. autogen/logger/logger_utils.py +2 -2
  167. autogen/logger/sqlite_logger.py +2 -1
  168. autogen/math_utils.py +4 -5
  169. autogen/mcp/__main__.py +6 -6
  170. autogen/mcp/helpers.py +4 -4
  171. autogen/mcp/mcp_client.py +170 -29
  172. autogen/mcp/mcp_proxy/fastapi_code_generator_helpers.py +3 -4
  173. autogen/mcp/mcp_proxy/mcp_proxy.py +23 -26
  174. autogen/mcp/mcp_proxy/operation_grouping.py +4 -5
  175. autogen/mcp/mcp_proxy/operation_renaming.py +6 -10
  176. autogen/mcp/mcp_proxy/security.py +2 -3
  177. autogen/messages/agent_messages.py +96 -98
  178. autogen/messages/base_message.py +6 -5
  179. autogen/messages/client_messages.py +15 -14
  180. autogen/messages/print_message.py +4 -5
  181. autogen/oai/__init__.py +1 -2
  182. autogen/oai/anthropic.py +42 -41
  183. autogen/oai/bedrock.py +68 -57
  184. autogen/oai/cerebras.py +26 -25
  185. autogen/oai/client.py +113 -139
  186. autogen/oai/client_utils.py +3 -3
  187. autogen/oai/cohere.py +34 -11
  188. autogen/oai/gemini.py +39 -17
  189. autogen/oai/gemini_types.py +11 -12
  190. autogen/oai/groq.py +22 -10
  191. autogen/oai/mistral.py +17 -11
  192. autogen/oai/oai_models/__init__.py +14 -2
  193. autogen/oai/oai_models/_models.py +2 -2
  194. autogen/oai/oai_models/chat_completion.py +13 -14
  195. autogen/oai/oai_models/chat_completion_message.py +11 -9
  196. autogen/oai/oai_models/chat_completion_message_tool_call.py +26 -3
  197. autogen/oai/oai_models/chat_completion_token_logprob.py +3 -4
  198. autogen/oai/oai_models/completion_usage.py +8 -9
  199. autogen/oai/ollama.py +19 -9
  200. autogen/oai/openai_responses.py +40 -17
  201. autogen/oai/openai_utils.py +48 -38
  202. autogen/oai/together.py +29 -14
  203. autogen/retrieve_utils.py +6 -7
  204. autogen/runtime_logging.py +5 -4
  205. autogen/token_count_utils.py +7 -4
  206. autogen/tools/contrib/time/time.py +0 -1
  207. autogen/tools/dependency_injection.py +5 -6
  208. autogen/tools/experimental/browser_use/browser_use.py +10 -10
  209. autogen/tools/experimental/code_execution/python_code_execution.py +5 -7
  210. autogen/tools/experimental/crawl4ai/crawl4ai.py +12 -15
  211. autogen/tools/experimental/deep_research/deep_research.py +9 -8
  212. autogen/tools/experimental/duckduckgo/duckduckgo_search.py +5 -11
  213. autogen/tools/experimental/firecrawl/firecrawl_tool.py +98 -115
  214. autogen/tools/experimental/google/authentication/credentials_local_provider.py +1 -1
  215. autogen/tools/experimental/google/drive/drive_functions.py +4 -4
  216. autogen/tools/experimental/google/drive/toolkit.py +5 -5
  217. autogen/tools/experimental/google_search/google_search.py +5 -5
  218. autogen/tools/experimental/google_search/youtube_search.py +5 -5
  219. autogen/tools/experimental/messageplatform/discord/discord.py +8 -12
  220. autogen/tools/experimental/messageplatform/slack/slack.py +14 -20
  221. autogen/tools/experimental/messageplatform/telegram/telegram.py +8 -12
  222. autogen/tools/experimental/perplexity/perplexity_search.py +18 -29
  223. autogen/tools/experimental/reliable/reliable.py +68 -74
  224. autogen/tools/experimental/searxng/searxng_search.py +20 -19
  225. autogen/tools/experimental/tavily/tavily_search.py +12 -19
  226. autogen/tools/experimental/web_search_preview/web_search_preview.py +13 -7
  227. autogen/tools/experimental/wikipedia/wikipedia.py +7 -10
  228. autogen/tools/function_utils.py +7 -7
  229. autogen/tools/tool.py +8 -6
  230. autogen/types.py +2 -2
  231. autogen/version.py +1 -1
  232. ag2-0.9.7.dist-info/RECORD +0 -421
  233. autogen/llm_config.py +0 -385
  234. {ag2-0.9.7.dist-info → ag2-0.9.9.dist-info}/WHEEL +0 -0
  235. {ag2-0.9.7.dist-info → ag2-0.9.9.dist-info}/licenses/LICENSE +0 -0
  236. {ag2-0.9.7.dist-info → ag2-0.9.9.dist-info}/licenses/NOTICE.md +0 -0
@@ -2,7 +2,7 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
- from typing import Annotated, Any, Optional, Union
5
+ from typing import Annotated, Any, Optional
6
6
 
7
7
  from pydantic import BaseModel, field_validator
8
8
 
@@ -32,11 +32,11 @@ class ExtractedContent(BaseModel):
32
32
  """
33
33
 
34
34
  content: str
35
- url: Optional[str]
35
+ url: str | None
36
36
 
37
37
  @field_validator("url")
38
38
  @classmethod
39
- def check_url(cls, v: str) -> Optional[str]:
39
+ def check_url(cls, v: str) -> str | None:
40
40
  """Check if the URL is about:blank and return None if it is.
41
41
 
42
42
  Args:
@@ -57,7 +57,7 @@ class BrowserUseResult(BaseModel):
57
57
  """
58
58
 
59
59
  extracted_content: list[ExtractedContent]
60
- final_result: Optional[str]
60
+ final_result: str | None
61
61
 
62
62
 
63
63
  @require_optional_import(
@@ -78,10 +78,10 @@ class BrowserUseTool(Tool):
78
78
  def __init__( # type: ignore[no-any-unimported]
79
79
  self,
80
80
  *,
81
- llm_config: Optional[Union[LLMConfig, dict[str, Any]]] = None,
81
+ llm_config: LLMConfig | dict[str, Any] | None = None,
82
82
  browser: Optional["Browser"] = None,
83
- agent_kwargs: Optional[dict[str, Any]] = None,
84
- browser_config: Optional[dict[str, Any]] = None,
83
+ agent_kwargs: dict[str, Any] | None = None,
84
+ browser_config: dict[str, Any] | None = None,
85
85
  ):
86
86
  """Use the browser to perform a task.
87
87
 
@@ -104,8 +104,8 @@ class BrowserUseTool(Tool):
104
104
 
105
105
  async def browser_use( # type: ignore[no-any-unimported]
106
106
  task: Annotated[str, "The task to perform."],
107
- llm_config: Annotated[Union[LLMConfig, dict[str, Any]], Depends(on(llm_config))],
108
- browser: Annotated[Optional[Browser], Depends(on(browser))],
107
+ llm_config: Annotated[LLMConfig | dict[str, Any], Depends(on(llm_config))],
108
+ browser: Annotated[Browser | None, Depends(on(browser))],
109
109
  agent_kwargs: Annotated[dict[str, Any], Depends(on(agent_kwargs))],
110
110
  browser_config: Annotated[dict[str, Any], Depends(on(browser_config))],
111
111
  ) -> BrowserUseResult:
@@ -145,7 +145,7 @@ class BrowserUseTool(Tool):
145
145
  )
146
146
 
147
147
  @staticmethod
148
- def _get_controller(llm_config: Union[LLMConfig, dict[str, Any]]) -> Any:
148
+ def _get_controller(llm_config: LLMConfig | dict[str, Any]) -> Any:
149
149
  response_format = (
150
150
  llm_config["config_list"][0].get("response_format", None)
151
151
  if "config_list" in llm_config
@@ -4,7 +4,7 @@
4
4
 
5
5
  import os
6
6
  import tempfile
7
- from typing import Annotated, Any, Optional
7
+ from typing import Annotated, Any
8
8
 
9
9
  from pydantic import BaseModel, Field
10
10
 
@@ -24,11 +24,10 @@ class PythonCodeExecutionTool(Tool):
24
24
  self,
25
25
  *,
26
26
  timeout: int = 30,
27
- working_directory: Optional[WorkingDirectory] = None,
28
- python_environment: Optional[PythonEnvironment] = None,
27
+ working_directory: WorkingDirectory | None = None,
28
+ python_environment: PythonEnvironment | None = None,
29
29
  ) -> None:
30
- """
31
- Initialize the PythonCodeExecutionTool.
30
+ """Initialize the PythonCodeExecutionTool.
32
31
 
33
32
  **CAUTION**: If provided a local environment, this tool will execute code in your local environment, which can be dangerous if the code is untrusted.
34
33
 
@@ -56,8 +55,7 @@ class PythonCodeExecutionTool(Tool):
56
55
  async def execute_python_code(
57
56
  code_execution_request: Annotated[CodeExecutionRequest, "Python code and the libraries required"],
58
57
  ) -> dict[str, Any]:
59
- """
60
- Executes Python code in the attached environment and returns the result.
58
+ """Executes Python code in the attached environment and returns the result.
61
59
 
62
60
  Args:
63
61
  code_execution_request (CodeExecutionRequest): The Python code and libraries to execute
@@ -2,7 +2,7 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
- from typing import Annotated, Any, Optional, Union
5
+ from typing import Annotated, Any, Optional
6
6
 
7
7
  from pydantic import BaseModel
8
8
 
@@ -23,18 +23,15 @@ __all__ = ["Crawl4AITool"]
23
23
  @require_optional_import(["crawl4ai"], "crawl4ai")
24
24
  @export_module("autogen.tools.experimental")
25
25
  class Crawl4AITool(Tool):
26
- """
27
- Crawl a website and extract information using the crawl4ai library.
28
- """
26
+ """Crawl a website and extract information using the crawl4ai library."""
29
27
 
30
28
  def __init__(
31
29
  self,
32
- llm_config: Optional[Union[LLMConfig, dict[str, Any]]] = None,
33
- extraction_model: Optional[type[BaseModel]] = None,
34
- llm_strategy_kwargs: Optional[dict[str, Any]] = None,
30
+ llm_config: LLMConfig | dict[str, Any] | None = None,
31
+ extraction_model: type[BaseModel] | None = None,
32
+ llm_strategy_kwargs: dict[str, Any] | None = None,
35
33
  ) -> None:
36
- """
37
- Initialize the Crawl4AITool.
34
+ """Initialize the Crawl4AITool.
38
35
 
39
36
  Args:
40
37
  llm_config: The config dictionary for the LLM model. If None, the tool will run without LLM.
@@ -70,8 +67,8 @@ class Crawl4AITool(Tool):
70
67
  url: Annotated[str, "The url to crawl and extract information from."],
71
68
  instruction: Annotated[str, "The instruction to provide on how and what to extract."],
72
69
  llm_config: Annotated[Any, Depends(on(llm_config))],
73
- llm_strategy_kwargs: Annotated[Optional[dict[str, Any]], Depends(on(llm_strategy_kwargs))],
74
- extraction_model: Annotated[Optional[type[BaseModel]], Depends(on(extraction_model))],
70
+ llm_strategy_kwargs: Annotated[dict[str, Any] | None, Depends(on(llm_strategy_kwargs))],
71
+ extraction_model: Annotated[type[BaseModel] | None, Depends(on(extraction_model))],
75
72
  ) -> Any:
76
73
  browser_cfg = BrowserConfig(headless=True)
77
74
  crawl_config = Crawl4AITool._get_crawl_config(
@@ -90,7 +87,7 @@ class Crawl4AITool(Tool):
90
87
  )
91
88
 
92
89
  @staticmethod
93
- def _validate_llm_strategy_kwargs(llm_strategy_kwargs: Optional[dict[str, Any]], llm_config_provided: bool) -> None:
90
+ def _validate_llm_strategy_kwargs(llm_strategy_kwargs: dict[str, Any] | None, llm_config_provided: bool) -> None:
94
91
  if not llm_strategy_kwargs:
95
92
  return
96
93
 
@@ -120,10 +117,10 @@ class Crawl4AITool(Tool):
120
117
 
121
118
  @staticmethod
122
119
  def _get_crawl_config( # type: ignore[no-any-unimported]
123
- llm_config: Union[LLMConfig, dict[str, Any]],
120
+ llm_config: LLMConfig | dict[str, Any],
124
121
  instruction: str,
125
- llm_strategy_kwargs: Optional[dict[str, Any]] = None,
126
- extraction_model: Optional[type[BaseModel]] = None,
122
+ llm_strategy_kwargs: dict[str, Any] | None = None,
123
+ extraction_model: type[BaseModel] | None = None,
127
124
  ) -> "CrawlerRunConfig":
128
125
  lite_llm_config = LiteLLmConfigFactory.create_lite_llm_config(llm_config)
129
126
 
@@ -3,7 +3,8 @@
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
5
  import copy
6
- from typing import Annotated, Any, Callable, Union
6
+ from collections.abc import Callable
7
+ from typing import Annotated, Any
7
8
 
8
9
  from pydantic import BaseModel, Field
9
10
 
@@ -77,7 +78,7 @@ class DeepResearchTool(Tool):
77
78
 
78
79
  def __init__(
79
80
  self,
80
- llm_config: Union[LLMConfig, dict[str, Any]],
81
+ llm_config: LLMConfig | dict[str, Any],
81
82
  max_web_steps: int = 30,
82
83
  ):
83
84
  """Initialize the DeepResearchTool.
@@ -117,7 +118,7 @@ class DeepResearchTool(Tool):
117
118
 
118
119
  def delegate_research_task(
119
120
  task: Annotated[str, "The task to perform a research on."],
120
- llm_config: Annotated[Union[LLMConfig, dict[str, Any]], Depends(on(llm_config))],
121
+ llm_config: Annotated[LLMConfig | dict[str, Any], Depends(on(llm_config))],
121
122
  max_web_steps: Annotated[int, Depends(on(max_web_steps))],
122
123
  ) -> str:
123
124
  """Delegate a research task to the agent.
@@ -165,11 +166,11 @@ class DeepResearchTool(Tool):
165
166
 
166
167
  @staticmethod
167
168
  def _get_split_question_and_answer_subquestions(
168
- llm_config: Union[LLMConfig, dict[str, Any]], max_web_steps: int
169
+ llm_config: LLMConfig | dict[str, Any], max_web_steps: int
169
170
  ) -> Callable[..., Any]:
170
171
  def split_question_and_answer_subquestions(
171
172
  question: Annotated[str, "The question to split and answer."],
172
- llm_config: Annotated[Union[LLMConfig, dict[str, Any]], Depends(on(llm_config))],
173
+ llm_config: Annotated[LLMConfig | dict[str, Any], Depends(on(llm_config))],
173
174
  max_web_steps: Annotated[int, Depends(on(max_web_steps))],
174
175
  ) -> str:
175
176
  decomposition_agent = ConversableAgent(
@@ -233,7 +234,7 @@ class DeepResearchTool(Tool):
233
234
 
234
235
  @staticmethod
235
236
  def _get_generate_subquestions(
236
- llm_config: Union[LLMConfig, dict[str, Any]],
237
+ llm_config: LLMConfig | dict[str, Any],
237
238
  max_web_steps: int,
238
239
  ) -> Callable[..., str]:
239
240
  """Get the generate_subquestions method.
@@ -248,7 +249,7 @@ class DeepResearchTool(Tool):
248
249
 
249
250
  def generate_subquestions(
250
251
  task: Task,
251
- llm_config: Annotated[Union[LLMConfig, dict[str, Any]], Depends(on(llm_config))],
252
+ llm_config: Annotated[LLMConfig | dict[str, Any], Depends(on(llm_config))],
252
253
  max_web_steps: Annotated[int, Depends(on(max_web_steps))],
253
254
  ) -> str:
254
255
  if not task.subquestions:
@@ -270,7 +271,7 @@ class DeepResearchTool(Tool):
270
271
  @staticmethod
271
272
  def _answer_question(
272
273
  question: str,
273
- llm_config: Union[LLMConfig, dict[str, Any]],
274
+ llm_config: LLMConfig | dict[str, Any],
274
275
  max_web_steps: int,
275
276
  ) -> str:
276
277
  from ....agents.experimental.websurfer import WebSurferAgent
@@ -21,8 +21,7 @@ def _execute_duckduckgo_query(
21
21
  query: str,
22
22
  num_results: int = 5,
23
23
  ) -> list[dict[str, Any]]:
24
- """
25
- Execute a search query using the DuckDuckGo Search API.
24
+ """Execute a search query using the DuckDuckGo Search API.
26
25
 
27
26
  Args:
28
27
  query (str): The search query string.
@@ -45,8 +44,7 @@ def _duckduckgo_search(
45
44
  query: str,
46
45
  num_results: int = 5,
47
46
  ) -> list[dict[str, Any]]:
48
- """
49
- Perform a DuckDuckGo search and format the results.
47
+ """Perform a DuckDuckGo search and format the results.
50
48
 
51
49
  This function takes search parameters, executes the query using `_execute_duckduckgo_query`,
52
50
  and formats the results into a list of dictionaries containing title, link, and snippet.
@@ -71,24 +69,20 @@ def _duckduckgo_search(
71
69
 
72
70
  @export_module("autogen.tools.experimental")
73
71
  class DuckDuckGoSearchTool(Tool):
74
- """
75
- DuckDuckGoSearchTool is a tool that uses DuckDuckGo to perform a search.
72
+ """DuckDuckGoSearchTool is a tool that uses DuckDuckGo to perform a search.
76
73
 
77
74
  This tool allows agents to leverage the DuckDuckGo search engine for information retrieval.
78
75
  DuckDuckGo does not require an API key, making it easy to use.
79
76
  """
80
77
 
81
78
  def __init__(self) -> None:
82
- """
83
- Initializes the DuckDuckGoSearchTool.
84
- """
79
+ """Initializes the DuckDuckGoSearchTool."""
85
80
 
86
81
  def duckduckgo_search(
87
82
  query: Annotated[str, "The search query."],
88
83
  num_results: Annotated[int, "The number of results to return."] = 5,
89
84
  ) -> list[dict[str, Any]]:
90
- """
91
- Performs a search using the DuckDuckGo Search API and returns formatted results.
85
+ """Performs a search using the DuckDuckGo Search API and returns formatted results.
92
86
 
93
87
  Args:
94
88
  query: The search query string.
@@ -4,7 +4,7 @@
4
4
 
5
5
  import logging
6
6
  import os
7
- from typing import Annotated, Any, Optional, Union
7
+ from typing import Annotated, Any
8
8
 
9
9
  from ....doc_utils import export_module
10
10
  from ....import_utils import optional_import_block, require_optional_import
@@ -27,16 +27,15 @@ with optional_import_block():
27
27
  def _execute_firecrawl_scrape(
28
28
  url: str,
29
29
  firecrawl_api_key: str,
30
- firecrawl_api_url: Optional[str] = None,
31
- formats: Optional[list[str]] = None,
32
- include_tags: Optional[list[str]] = None,
33
- exclude_tags: Optional[list[str]] = None,
34
- headers: Optional[dict[str, str]] = None,
35
- wait_for: Optional[int] = None,
36
- timeout: Optional[int] = None,
30
+ firecrawl_api_url: str | None = None,
31
+ formats: list[str] | None = None,
32
+ include_tags: list[str] | None = None,
33
+ exclude_tags: list[str] | None = None,
34
+ headers: dict[str, str] | None = None,
35
+ wait_for: int | None = None,
36
+ timeout: int | None = None,
37
37
  ) -> dict[str, Any]:
38
- """
39
- Execute a scrape operation using the Firecrawl API.
38
+ """Execute a scrape operation using the Firecrawl API.
40
39
 
41
40
  Args:
42
41
  url (str): The URL to scrape.
@@ -78,17 +77,16 @@ def _execute_firecrawl_scrape(
78
77
  def _execute_firecrawl_crawl(
79
78
  url: str,
80
79
  firecrawl_api_key: str,
81
- firecrawl_api_url: Optional[str] = None,
80
+ firecrawl_api_url: str | None = None,
82
81
  limit: int = 5,
83
- formats: Optional[list[str]] = None,
84
- include_paths: Optional[list[str]] = None,
85
- exclude_paths: Optional[list[str]] = None,
86
- max_depth: Optional[int] = None,
82
+ formats: list[str] | None = None,
83
+ include_paths: list[str] | None = None,
84
+ exclude_paths: list[str] | None = None,
85
+ max_depth: int | None = None,
87
86
  allow_backward_crawling: bool = False,
88
87
  allow_external_content_links: bool = False,
89
88
  ) -> dict[str, Any]:
90
- """
91
- Execute a crawl operation using the Firecrawl API.
89
+ """Execute a crawl operation using the Firecrawl API.
92
90
 
93
91
  Args:
94
92
  url (str): The starting URL to crawl.
@@ -137,14 +135,13 @@ def _execute_firecrawl_crawl(
137
135
  def _execute_firecrawl_map(
138
136
  url: str,
139
137
  firecrawl_api_key: str,
140
- firecrawl_api_url: Optional[str] = None,
141
- search: Optional[str] = None,
138
+ firecrawl_api_url: str | None = None,
139
+ search: str | None = None,
142
140
  ignore_sitemap: bool = False,
143
141
  include_subdomains: bool = False,
144
142
  limit: int = 5000,
145
143
  ) -> dict[str, Any]:
146
- """
147
- Execute a map operation using the Firecrawl API to get URLs from a website.
144
+ """Execute a map operation using the Firecrawl API to get URLs from a website.
148
145
 
149
146
  Args:
150
147
  url (str): The website URL to map.
@@ -179,17 +176,16 @@ def _execute_firecrawl_map(
179
176
  def _execute_firecrawl_search(
180
177
  query: str,
181
178
  firecrawl_api_key: str,
182
- firecrawl_api_url: Optional[str] = None,
179
+ firecrawl_api_url: str | None = None,
183
180
  limit: int = 5,
184
- tbs: Optional[str] = None,
185
- filter: Optional[str] = None,
181
+ tbs: str | None = None,
182
+ filter: str | None = None,
186
183
  lang: str = "en",
187
184
  country: str = "us",
188
- location: Optional[str] = None,
189
- timeout: Optional[int] = None,
185
+ location: str | None = None,
186
+ timeout: int | None = None,
190
187
  ) -> dict[str, Any]:
191
- """
192
- Execute a search operation using the Firecrawl API.
188
+ """Execute a search operation using the Firecrawl API.
193
189
 
194
190
  Args:
195
191
  query (str): The search query string.
@@ -230,15 +226,14 @@ def _execute_firecrawl_search(
230
226
  def _execute_firecrawl_deep_research(
231
227
  query: str,
232
228
  firecrawl_api_key: str,
233
- firecrawl_api_url: Optional[str] = None,
229
+ firecrawl_api_url: str | None = None,
234
230
  max_depth: int = 7,
235
231
  time_limit: int = 270,
236
232
  max_urls: int = 20,
237
- analysis_prompt: Optional[str] = None,
238
- system_prompt: Optional[str] = None,
233
+ analysis_prompt: str | None = None,
234
+ system_prompt: str | None = None,
239
235
  ) -> dict[str, Any]:
240
- """
241
- Execute a deep research operation using the Firecrawl API.
236
+ """Execute a deep research operation using the Firecrawl API.
242
237
 
243
238
  Args:
244
239
  query (str): The research query or topic to investigate.
@@ -269,16 +264,15 @@ def _execute_firecrawl_deep_research(
269
264
  def _firecrawl_scrape(
270
265
  url: str,
271
266
  firecrawl_api_key: str,
272
- firecrawl_api_url: Optional[str] = None,
273
- formats: Optional[list[str]] = None,
274
- include_tags: Optional[list[str]] = None,
275
- exclude_tags: Optional[list[str]] = None,
276
- headers: Optional[dict[str, str]] = None,
277
- wait_for: Optional[int] = None,
278
- timeout: Optional[int] = None,
267
+ firecrawl_api_url: str | None = None,
268
+ formats: list[str] | None = None,
269
+ include_tags: list[str] | None = None,
270
+ exclude_tags: list[str] | None = None,
271
+ headers: dict[str, str] | None = None,
272
+ wait_for: int | None = None,
273
+ timeout: int | None = None,
279
274
  ) -> list[dict[str, Any]]:
280
- """
281
- Perform a Firecrawl scrape and format the results.
275
+ """Perform a Firecrawl scrape and format the results.
282
276
 
283
277
  Args:
284
278
  url (str): The URL to scrape.
@@ -324,17 +318,16 @@ def _firecrawl_scrape(
324
318
  def _firecrawl_crawl(
325
319
  url: str,
326
320
  firecrawl_api_key: str,
327
- firecrawl_api_url: Optional[str] = None,
321
+ firecrawl_api_url: str | None = None,
328
322
  limit: int = 5,
329
- formats: Optional[list[str]] = None,
330
- include_paths: Optional[list[str]] = None,
331
- exclude_paths: Optional[list[str]] = None,
332
- max_depth: Optional[int] = None,
323
+ formats: list[str] | None = None,
324
+ include_paths: list[str] | None = None,
325
+ exclude_paths: list[str] | None = None,
326
+ max_depth: int | None = None,
333
327
  allow_backward_crawling: bool = False,
334
328
  allow_external_content_links: bool = False,
335
329
  ) -> list[dict[str, Any]]:
336
- """
337
- Perform a Firecrawl crawl and format the results.
330
+ """Perform a Firecrawl crawl and format the results.
338
331
 
339
332
  Args:
340
333
  url (str): The starting URL to crawl.
@@ -387,14 +380,13 @@ def _firecrawl_crawl(
387
380
  def _firecrawl_map(
388
381
  url: str,
389
382
  firecrawl_api_key: str,
390
- firecrawl_api_url: Optional[str] = None,
391
- search: Optional[str] = None,
383
+ firecrawl_api_url: str | None = None,
384
+ search: str | None = None,
392
385
  ignore_sitemap: bool = False,
393
386
  include_subdomains: bool = False,
394
387
  limit: int = 5000,
395
388
  ) -> list[dict[str, Any]]:
396
- """
397
- Perform a Firecrawl map operation and format the results.
389
+ """Perform a Firecrawl map operation and format the results.
398
390
 
399
391
  Args:
400
392
  url (str): The website URL to map.
@@ -440,17 +432,16 @@ def _firecrawl_map(
440
432
  def _firecrawl_search(
441
433
  query: str,
442
434
  firecrawl_api_key: str,
443
- firecrawl_api_url: Optional[str] = None,
435
+ firecrawl_api_url: str | None = None,
444
436
  limit: int = 5,
445
- tbs: Optional[str] = None,
446
- filter: Optional[str] = None,
437
+ tbs: str | None = None,
438
+ filter: str | None = None,
447
439
  lang: str = "en",
448
440
  country: str = "us",
449
- location: Optional[str] = None,
450
- timeout: Optional[int] = None,
441
+ location: str | None = None,
442
+ timeout: int | None = None,
451
443
  ) -> list[dict[str, Any]]:
452
- """
453
- Perform a Firecrawl search and format the results.
444
+ """Perform a Firecrawl search and format the results.
454
445
 
455
446
  Args:
456
447
  query (str): The search query string.
@@ -504,15 +495,14 @@ def _firecrawl_search(
504
495
  def _firecrawl_deep_research(
505
496
  query: str,
506
497
  firecrawl_api_key: str,
507
- firecrawl_api_url: Optional[str] = None,
498
+ firecrawl_api_url: str | None = None,
508
499
  max_depth: int = 7,
509
500
  time_limit: int = 270,
510
501
  max_urls: int = 20,
511
- analysis_prompt: Optional[str] = None,
512
- system_prompt: Optional[str] = None,
502
+ analysis_prompt: str | None = None,
503
+ system_prompt: str | None = None,
513
504
  ) -> dict[str, Any]:
514
- """
515
- Perform a Firecrawl deep research operation and format the results.
505
+ """Perform a Firecrawl deep research operation and format the results.
516
506
 
517
507
  Args:
518
508
  query (str): The research query or topic to investigate.
@@ -568,8 +558,7 @@ def _firecrawl_deep_research(
568
558
 
569
559
  @export_module("autogen.tools.experimental")
570
560
  class FirecrawlTool(Tool):
571
- """
572
- FirecrawlTool is a tool that uses the Firecrawl API to scrape, crawl, map, search, and research websites.
561
+ """FirecrawlTool is a tool that uses the Firecrawl API to scrape, crawl, map, search, and research websites.
573
562
 
574
563
  This tool allows agents to leverage Firecrawl for web content extraction, discovery, and research.
575
564
  It requires a Firecrawl API key, which can be provided during initialization or set as
@@ -590,12 +579,11 @@ class FirecrawlTool(Tool):
590
579
  def __init__(
591
580
  self,
592
581
  *,
593
- llm_config: Optional[Union[LLMConfig, dict[str, Any]]] = None,
594
- firecrawl_api_key: Optional[str] = None,
595
- firecrawl_api_url: Optional[str] = None,
582
+ llm_config: LLMConfig | dict[str, Any] | None = None,
583
+ firecrawl_api_key: str | None = None,
584
+ firecrawl_api_url: str | None = None,
596
585
  ):
597
- """
598
- Initializes the FirecrawlTool.
586
+ """Initializes the FirecrawlTool.
599
587
 
600
588
  Args:
601
589
  llm_config (Optional[Union[LLMConfig, dict[str, Any]]]): LLM configuration. (Currently unused but kept for potential future integration).
@@ -618,17 +606,16 @@ class FirecrawlTool(Tool):
618
606
 
619
607
  def firecrawl_scrape(
620
608
  url: Annotated[str, "The URL to scrape."],
621
- firecrawl_api_key: Annotated[Optional[str], Depends(on(self.firecrawl_api_key))],
622
- firecrawl_api_url: Annotated[Optional[str], Depends(on(self.firecrawl_api_url))],
623
- formats: Annotated[Optional[list[str]], "Output formats (e.g., ['markdown', 'html'])"] = None,
624
- include_tags: Annotated[Optional[list[str]], "HTML tags to include"] = None,
625
- exclude_tags: Annotated[Optional[list[str]], "HTML tags to exclude"] = None,
626
- headers: Annotated[Optional[dict[str, str]], "HTTP headers to use"] = None,
627
- wait_for: Annotated[Optional[int], "Time to wait for page load in milliseconds"] = None,
628
- timeout: Annotated[Optional[int], "Request timeout in milliseconds"] = None,
609
+ firecrawl_api_key: Annotated[str | None, Depends(on(self.firecrawl_api_key))],
610
+ firecrawl_api_url: Annotated[str | None, Depends(on(self.firecrawl_api_url))],
611
+ formats: Annotated[list[str] | None, "Output formats (e.g., ['markdown', 'html'])"] = None,
612
+ include_tags: Annotated[list[str] | None, "HTML tags to include"] = None,
613
+ exclude_tags: Annotated[list[str] | None, "HTML tags to exclude"] = None,
614
+ headers: Annotated[dict[str, str] | None, "HTTP headers to use"] = None,
615
+ wait_for: Annotated[int | None, "Time to wait for page load in milliseconds"] = None,
616
+ timeout: Annotated[int | None, "Request timeout in milliseconds"] = None,
629
617
  ) -> list[dict[str, Any]]:
630
- """
631
- Scrapes a single URL and returns the content.
618
+ """Scrapes a single URL and returns the content.
632
619
 
633
620
  Args:
634
621
  url: The URL to scrape.
@@ -663,18 +650,17 @@ class FirecrawlTool(Tool):
663
650
 
664
651
  def firecrawl_crawl(
665
652
  url: Annotated[str, "The starting URL to crawl."],
666
- firecrawl_api_key: Annotated[Optional[str], Depends(on(self.firecrawl_api_key))],
667
- firecrawl_api_url: Annotated[Optional[str], Depends(on(self.firecrawl_api_url))],
653
+ firecrawl_api_key: Annotated[str | None, Depends(on(self.firecrawl_api_key))],
654
+ firecrawl_api_url: Annotated[str | None, Depends(on(self.firecrawl_api_url))],
668
655
  limit: Annotated[int, "Maximum number of pages to crawl"] = 5,
669
- formats: Annotated[Optional[list[str]], "Output formats (e.g., ['markdown', 'html'])"] = None,
670
- include_paths: Annotated[Optional[list[str]], "URL patterns to include"] = None,
671
- exclude_paths: Annotated[Optional[list[str]], "URL patterns to exclude"] = None,
672
- max_depth: Annotated[Optional[int], "Maximum crawl depth"] = None,
673
- allow_backward_crawling: Annotated[Optional[bool], "Allow crawling backward links"] = False,
674
- allow_external_content_links: Annotated[Optional[bool], "Allow external links"] = False,
656
+ formats: Annotated[list[str] | None, "Output formats (e.g., ['markdown', 'html'])"] = None,
657
+ include_paths: Annotated[list[str] | None, "URL patterns to include"] = None,
658
+ exclude_paths: Annotated[list[str] | None, "URL patterns to exclude"] = None,
659
+ max_depth: Annotated[int | None, "Maximum crawl depth"] = None,
660
+ allow_backward_crawling: Annotated[bool | None, "Allow crawling backward links"] = False,
661
+ allow_external_content_links: Annotated[bool | None, "Allow external links"] = False,
675
662
  ) -> list[dict[str, Any]]:
676
- """
677
- Crawls a website starting from a URL and returns the content from multiple pages.
663
+ """Crawls a website starting from a URL and returns the content from multiple pages.
678
664
 
679
665
  Args:
680
666
  url: The starting URL to crawl.
@@ -711,15 +697,14 @@ class FirecrawlTool(Tool):
711
697
 
712
698
  def firecrawl_map(
713
699
  url: Annotated[str, "The website URL to map."],
714
- firecrawl_api_key: Annotated[Optional[str], Depends(on(self.firecrawl_api_key))],
715
- firecrawl_api_url: Annotated[Optional[str], Depends(on(self.firecrawl_api_url))],
716
- search: Annotated[Optional[str], "Search term to filter URLs"] = None,
717
- ignore_sitemap: Annotated[Optional[bool], "Whether to ignore the sitemap"] = False,
718
- include_subdomains: Annotated[Optional[bool], "Whether to include subdomains"] = False,
700
+ firecrawl_api_key: Annotated[str | None, Depends(on(self.firecrawl_api_key))],
701
+ firecrawl_api_url: Annotated[str | None, Depends(on(self.firecrawl_api_url))],
702
+ search: Annotated[str | None, "Search term to filter URLs"] = None,
703
+ ignore_sitemap: Annotated[bool | None, "Whether to ignore the sitemap"] = False,
704
+ include_subdomains: Annotated[bool | None, "Whether to include subdomains"] = False,
719
705
  limit: Annotated[int, "Maximum number of URLs to return"] = 5000,
720
706
  ) -> list[dict[str, Any]]:
721
- """
722
- Maps a website to discover URLs.
707
+ """Maps a website to discover URLs.
723
708
 
724
709
  Args:
725
710
  url: The website URL to map.
@@ -750,18 +735,17 @@ class FirecrawlTool(Tool):
750
735
 
751
736
  def firecrawl_search(
752
737
  query: Annotated[str, "The search query string."],
753
- firecrawl_api_key: Annotated[Optional[str], Depends(on(self.firecrawl_api_key))],
754
- firecrawl_api_url: Annotated[Optional[str], Depends(on(self.firecrawl_api_url))],
738
+ firecrawl_api_key: Annotated[str | None, Depends(on(self.firecrawl_api_key))],
739
+ firecrawl_api_url: Annotated[str | None, Depends(on(self.firecrawl_api_url))],
755
740
  limit: Annotated[int, "Maximum number of results to return"] = 5,
756
- tbs: Annotated[Optional[str], "Time filter (e.g., 'qdr:d' for past day)"] = None,
757
- filter: Annotated[Optional[str], "Custom result filter"] = None,
758
- lang: Annotated[Optional[str], "Language code"] = "en",
759
- country: Annotated[Optional[str], "Country code"] = "us",
760
- location: Annotated[Optional[str], "Geo-targeting location"] = None,
761
- timeout: Annotated[Optional[int], "Request timeout in milliseconds"] = None,
741
+ tbs: Annotated[str | None, "Time filter (e.g., 'qdr:d' for past day)"] = None,
742
+ filter: Annotated[str | None, "Custom result filter"] = None,
743
+ lang: Annotated[str | None, "Language code"] = "en",
744
+ country: Annotated[str | None, "Country code"] = "us",
745
+ location: Annotated[str | None, "Geo-targeting location"] = None,
746
+ timeout: Annotated[int | None, "Request timeout in milliseconds"] = None,
762
747
  ) -> list[dict[str, Any]]:
763
- """
764
- Executes a search operation using the Firecrawl API.
748
+ """Executes a search operation using the Firecrawl API.
765
749
 
766
750
  Args:
767
751
  query: The search query string.
@@ -798,16 +782,15 @@ class FirecrawlTool(Tool):
798
782
 
799
783
  def firecrawl_deep_research(
800
784
  query: Annotated[str, "The research query or topic to investigate."],
801
- firecrawl_api_key: Annotated[Optional[str], Depends(on(self.firecrawl_api_key))],
802
- firecrawl_api_url: Annotated[Optional[str], Depends(on(self.firecrawl_api_url))],
785
+ firecrawl_api_key: Annotated[str | None, Depends(on(self.firecrawl_api_key))],
786
+ firecrawl_api_url: Annotated[str | None, Depends(on(self.firecrawl_api_url))],
803
787
  max_depth: Annotated[int, "Maximum depth of research exploration"] = 7,
804
788
  time_limit: Annotated[int, "Time limit in seconds for research"] = 270,
805
789
  max_urls: Annotated[int, "Maximum number of URLs to process"] = 20,
806
- analysis_prompt: Annotated[Optional[str], "Custom prompt for analysis"] = None,
807
- system_prompt: Annotated[Optional[str], "Custom system prompt"] = None,
790
+ analysis_prompt: Annotated[str | None, "Custom prompt for analysis"] = None,
791
+ system_prompt: Annotated[str | None, "Custom system prompt"] = None,
808
792
  ) -> dict[str, Any]:
809
- """
810
- Executes a deep research operation using the Firecrawl API.
793
+ """Executes a deep research operation using the Firecrawl API.
811
794
 
812
795
  Args:
813
796
  query: The research query or topic to investigate.