camel-ai 0.2.21__py3-none-any.whl → 0.2.23a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (106) hide show
  1. camel/__init__.py +1 -1
  2. camel/agents/_types.py +41 -0
  3. camel/agents/_utils.py +188 -0
  4. camel/agents/chat_agent.py +556 -965
  5. camel/agents/knowledge_graph_agent.py +7 -1
  6. camel/agents/multi_hop_generator_agent.py +1 -1
  7. camel/configs/base_config.py +10 -13
  8. camel/configs/deepseek_config.py +4 -30
  9. camel/configs/gemini_config.py +5 -31
  10. camel/configs/openai_config.py +14 -32
  11. camel/configs/qwen_config.py +36 -36
  12. camel/datagen/self_improving_cot.py +79 -1
  13. camel/datagen/self_instruct/filter/instruction_filter.py +19 -3
  14. camel/datagen/self_instruct/self_instruct.py +7 -2
  15. camel/datasets/__init__.py +28 -0
  16. camel/datasets/base.py +969 -0
  17. camel/embeddings/openai_embedding.py +10 -1
  18. camel/environments/__init__.py +16 -0
  19. camel/environments/base.py +503 -0
  20. camel/extractors/__init__.py +16 -0
  21. camel/extractors/base.py +263 -0
  22. camel/interpreters/docker/Dockerfile +12 -0
  23. camel/interpreters/docker_interpreter.py +19 -1
  24. camel/interpreters/subprocess_interpreter.py +42 -17
  25. camel/loaders/__init__.py +2 -0
  26. camel/loaders/mineru_extractor.py +250 -0
  27. camel/memories/agent_memories.py +16 -1
  28. camel/memories/blocks/chat_history_block.py +10 -2
  29. camel/memories/blocks/vectordb_block.py +1 -0
  30. camel/memories/context_creators/score_based.py +20 -3
  31. camel/memories/records.py +10 -0
  32. camel/messages/base.py +8 -8
  33. camel/models/_utils.py +57 -0
  34. camel/models/aiml_model.py +48 -17
  35. camel/models/anthropic_model.py +41 -3
  36. camel/models/azure_openai_model.py +39 -3
  37. camel/models/base_model.py +132 -4
  38. camel/models/cohere_model.py +88 -11
  39. camel/models/deepseek_model.py +107 -63
  40. camel/models/gemini_model.py +133 -15
  41. camel/models/groq_model.py +72 -10
  42. camel/models/internlm_model.py +14 -3
  43. camel/models/litellm_model.py +9 -2
  44. camel/models/mistral_model.py +42 -5
  45. camel/models/model_manager.py +48 -3
  46. camel/models/moonshot_model.py +33 -4
  47. camel/models/nemotron_model.py +32 -3
  48. camel/models/nvidia_model.py +43 -3
  49. camel/models/ollama_model.py +139 -17
  50. camel/models/openai_audio_models.py +7 -1
  51. camel/models/openai_compatible_model.py +37 -3
  52. camel/models/openai_model.py +158 -46
  53. camel/models/qwen_model.py +61 -4
  54. camel/models/reka_model.py +53 -3
  55. camel/models/samba_model.py +209 -4
  56. camel/models/sglang_model.py +153 -14
  57. camel/models/siliconflow_model.py +16 -3
  58. camel/models/stub_model.py +46 -4
  59. camel/models/togetherai_model.py +38 -3
  60. camel/models/vllm_model.py +37 -3
  61. camel/models/yi_model.py +36 -3
  62. camel/models/zhipuai_model.py +38 -3
  63. camel/retrievers/__init__.py +3 -0
  64. camel/retrievers/hybrid_retrival.py +237 -0
  65. camel/toolkits/__init__.py +9 -2
  66. camel/toolkits/arxiv_toolkit.py +2 -1
  67. camel/toolkits/ask_news_toolkit.py +4 -2
  68. camel/toolkits/base.py +22 -3
  69. camel/toolkits/code_execution.py +2 -0
  70. camel/toolkits/dappier_toolkit.py +2 -1
  71. camel/toolkits/data_commons_toolkit.py +38 -12
  72. camel/toolkits/function_tool.py +13 -0
  73. camel/toolkits/github_toolkit.py +5 -1
  74. camel/toolkits/google_maps_toolkit.py +2 -1
  75. camel/toolkits/google_scholar_toolkit.py +2 -0
  76. camel/toolkits/human_toolkit.py +0 -3
  77. camel/toolkits/linkedin_toolkit.py +3 -2
  78. camel/toolkits/meshy_toolkit.py +3 -2
  79. camel/toolkits/mineru_toolkit.py +178 -0
  80. camel/toolkits/networkx_toolkit.py +240 -0
  81. camel/toolkits/notion_toolkit.py +2 -0
  82. camel/toolkits/openbb_toolkit.py +3 -2
  83. camel/toolkits/reddit_toolkit.py +11 -3
  84. camel/toolkits/retrieval_toolkit.py +6 -1
  85. camel/toolkits/semantic_scholar_toolkit.py +2 -1
  86. camel/toolkits/stripe_toolkit.py +8 -2
  87. camel/toolkits/sympy_toolkit.py +44 -1
  88. camel/toolkits/video_toolkit.py +2 -0
  89. camel/toolkits/whatsapp_toolkit.py +3 -2
  90. camel/toolkits/zapier_toolkit.py +191 -0
  91. camel/types/__init__.py +2 -2
  92. camel/types/agents/__init__.py +16 -0
  93. camel/types/agents/tool_calling_record.py +52 -0
  94. camel/types/enums.py +3 -0
  95. camel/types/openai_types.py +16 -14
  96. camel/utils/__init__.py +2 -1
  97. camel/utils/async_func.py +2 -2
  98. camel/utils/commons.py +114 -1
  99. camel/verifiers/__init__.py +23 -0
  100. camel/verifiers/base.py +340 -0
  101. camel/verifiers/models.py +82 -0
  102. camel/verifiers/python_verifier.py +202 -0
  103. {camel_ai-0.2.21.dist-info → camel_ai-0.2.23a0.dist-info}/METADATA +273 -256
  104. {camel_ai-0.2.21.dist-info → camel_ai-0.2.23a0.dist-info}/RECORD +106 -85
  105. {camel_ai-0.2.21.dist-info → camel_ai-0.2.23a0.dist-info}/WHEEL +1 -1
  106. {camel_ai-0.2.21.dist-info → camel_ai-0.2.23a0.dist-info}/LICENSE +0 -0
@@ -14,6 +14,7 @@
14
14
  import logging
15
15
  from typing import Any, Dict, List, Optional, Union
16
16
 
17
+ from camel.toolkits import FunctionTool
17
18
  from camel.toolkits.base import BaseToolkit
18
19
 
19
20
  logger = logging.getLogger(__name__)
@@ -35,8 +36,18 @@ class DataCommonsToolkit(BaseToolkit):
35
36
  Refer to https://datacommons.org/browser/ for more details.
36
37
  """
37
38
 
38
- @staticmethod
39
+ def __init__(self, timeout: Optional[float] = None):
40
+ r"""Initialize the DataCommonsToolkit.
41
+
42
+ Args:
43
+ timeout (Optional[float], optional): Maximum time in seconds to
44
+ wait for API calls to complete. If None, will wait indefinitely.
45
+ (default: :obj:`None`)
46
+ """
47
+ super().__init__(timeout=timeout)
48
+
39
49
  def query_data_commons(
50
+ self,
40
51
  query_string: str,
41
52
  ) -> Optional[List[Dict[str, Any]]]:
42
53
  r"""Query the Data Commons knowledge graph using SPARQL.
@@ -76,9 +87,8 @@ class DataCommonsToolkit(BaseToolkit):
76
87
  )
77
88
  return None
78
89
 
79
- @staticmethod
80
90
  def get_triples(
81
- dcids: Union[str, List[str]], limit: int = 500
91
+ self, dcids: Union[str, List[str]], limit: int = 500
82
92
  ) -> Optional[Dict[str, List[tuple]]]:
83
93
  r"""Retrieve triples associated with nodes.
84
94
 
@@ -117,8 +127,8 @@ class DataCommonsToolkit(BaseToolkit):
117
127
  logger.error(f"An error occurred: {e!s}")
118
128
  return None
119
129
 
120
- @staticmethod
121
130
  def get_stat_time_series(
131
+ self,
122
132
  place: str,
123
133
  stat_var: str,
124
134
  measurement_method: Optional[str] = None,
@@ -166,9 +176,8 @@ class DataCommonsToolkit(BaseToolkit):
166
176
  )
167
177
  return None
168
178
 
169
- @staticmethod
170
179
  def get_property_labels(
171
- dcids: Union[str, List[str]], out: bool = True
180
+ self, dcids: Union[str, List[str]], out: bool = True
172
181
  ) -> Optional[Dict[str, List[str]]]:
173
182
  r"""Retrieves and analyzes property labels for given DCIDs.
174
183
 
@@ -195,8 +204,8 @@ class DataCommonsToolkit(BaseToolkit):
195
204
  )
196
205
  return None
197
206
 
198
- @staticmethod
199
207
  def get_property_values(
208
+ self,
200
209
  dcids: Union[str, List[str]],
201
210
  prop: str,
202
211
  out: Optional[bool] = True,
@@ -239,9 +248,8 @@ class DataCommonsToolkit(BaseToolkit):
239
248
  )
240
249
  return None
241
250
 
242
- @staticmethod
243
251
  def get_places_in(
244
- dcids: list, place_type: str
252
+ self, dcids: list, place_type: str
245
253
  ) -> Optional[Dict[str, Any]]:
246
254
  r"""Retrieves places within a given place type.
247
255
 
@@ -269,8 +277,8 @@ class DataCommonsToolkit(BaseToolkit):
269
277
  )
270
278
  return None
271
279
 
272
- @staticmethod
273
280
  def get_stat_value(
281
+ self,
274
282
  place: str,
275
283
  stat_var: str,
276
284
  date: Optional[str] = None,
@@ -326,8 +334,7 @@ class DataCommonsToolkit(BaseToolkit):
326
334
  )
327
335
  return None
328
336
 
329
- @staticmethod
330
- def get_stat_all(places: str, stat_vars: str) -> Optional[dict]:
337
+ def get_stat_all(self, places: str, stat_vars: str) -> Optional[dict]:
331
338
  r"""Retrieves the value of a statistical variable for a given place
332
339
  and date.
333
340
 
@@ -358,3 +365,22 @@ class DataCommonsToolkit(BaseToolkit):
358
365
  f"statistical variable: {e!s}"
359
366
  )
360
367
  return None
368
+
369
+ def get_tools(self) -> List[FunctionTool]:
370
+ r"""Returns a list of FunctionTool objects representing the functions
371
+ in the toolkit.
372
+
373
+ Returns:
374
+ List[FunctionTool]: A list of FunctionTool objects representing
375
+ the functions in the toolkit.
376
+ """
377
+ return [
378
+ FunctionTool(self.query_data_commons),
379
+ FunctionTool(self.get_triples),
380
+ FunctionTool(self.get_stat_time_series),
381
+ FunctionTool(self.get_property_labels),
382
+ FunctionTool(self.get_property_values),
383
+ FunctionTool(self.get_places_in),
384
+ FunctionTool(self.get_stat_value),
385
+ FunctionTool(self.get_stat_all),
386
+ ]
@@ -398,6 +398,19 @@ class FunctionTool:
398
398
  f"Error: {e}"
399
399
  )
400
400
 
401
+ async def async_call(self, *args: Any, **kwargs: Any) -> Any:
402
+ if self.synthesize_output:
403
+ result = self.synthesize_execution_output(args, kwargs)
404
+ return result
405
+ if self.is_async:
406
+ return await self.func(*args, **kwargs)
407
+ else:
408
+ return self.func(*args, **kwargs)
409
+
410
+ @property
411
+ def is_async(self) -> bool:
412
+ return inspect.iscoroutinefunction(self.func)
413
+
401
414
  @staticmethod
402
415
  def validate_openai_tool_schema(
403
416
  openai_tool_schema: Dict[str, Any],
@@ -39,7 +39,10 @@ class GithubToolkit(BaseToolkit):
39
39
 
40
40
  @dependencies_required('github')
41
41
  def __init__(
42
- self, repo_name: str, access_token: Optional[str] = None
42
+ self,
43
+ repo_name: str,
44
+ access_token: Optional[str] = None,
45
+ timeout: Optional[float] = None,
43
46
  ) -> None:
44
47
  r"""Initializes a new instance of the GitHubToolkit class.
45
48
 
@@ -49,6 +52,7 @@ class GithubToolkit(BaseToolkit):
49
52
  with GitHub. If not provided, it will be obtained using the
50
53
  `get_github_access_token` method.
51
54
  """
55
+ super().__init__(timeout=timeout)
52
56
  from github import Auth, Github
53
57
 
54
58
  if access_token is None:
@@ -101,7 +101,8 @@ class GoogleMapsToolkit(BaseToolkit):
101
101
  """
102
102
 
103
103
  @dependencies_required('googlemaps')
104
- def __init__(self) -> None:
104
+ def __init__(self, timeout: Optional[float] = None) -> None:
105
+ super().__init__(timeout=timeout)
105
106
  import googlemaps
106
107
 
107
108
  api_key = os.environ.get('GOOGLE_API_KEY')
@@ -39,6 +39,7 @@ class GoogleScholarToolkit(BaseToolkit):
39
39
  use_free_proxies: bool = False,
40
40
  proxy_http: Optional[str] = None,
41
41
  proxy_https: Optional[str] = None,
42
+ timeout: Optional[float] = None,
42
43
  ) -> None:
43
44
  r"""Initializes the GoogleScholarToolkit with the author's identifier.
44
45
 
@@ -54,6 +55,7 @@ class GoogleScholarToolkit(BaseToolkit):
54
55
  proxy_https ( Optional[str]): Proxy https address pass to pg.
55
56
  SingleProxy. (default: :obj:`None`)
56
57
  """
58
+ super().__init__(timeout=timeout)
57
59
  from scholarly import ProxyGenerator, scholarly
58
60
 
59
61
  # Set Free Proxies is needed
@@ -24,9 +24,6 @@ logger = logging.getLogger(__name__)
24
24
  class HumanToolkit(BaseToolkit):
25
25
  r"""A class representing a toolkit for human interaction."""
26
26
 
27
- def __init__(self):
28
- pass
29
-
30
27
  def ask_human_via_console(self, question: str) -> str:
31
28
  r"""Ask a question to the human via the console.
32
29
 
@@ -15,7 +15,7 @@
15
15
  import json
16
16
  import os
17
17
  from http import HTTPStatus
18
- from typing import List
18
+ from typing import List, Optional
19
19
 
20
20
  import requests
21
21
 
@@ -33,7 +33,8 @@ class LinkedInToolkit(BaseToolkit):
33
33
  retrieving the authenticated user's profile information.
34
34
  """
35
35
 
36
- def __init__(self):
36
+ def __init__(self, timeout: Optional[float] = None):
37
+ super().__init__(timeout=timeout)
37
38
  self._access_token = self._get_access_token()
38
39
 
39
40
  def create_post(self, text: str) -> dict:
@@ -13,7 +13,7 @@
13
13
  # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14
14
 
15
15
  import os
16
- from typing import Any, Dict
16
+ from typing import Any, Dict, Optional
17
17
 
18
18
  import requests
19
19
 
@@ -38,10 +38,11 @@ class MeshyToolkit(BaseToolkit):
38
38
  (None, 'MESHY_API_KEY'),
39
39
  ]
40
40
  )
41
- def __init__(self):
41
+ def __init__(self, timeout: Optional[float] = None):
42
42
  r"""Initializes the MeshyToolkit with the API key from the
43
43
  environment.
44
44
  """
45
+ super().__init__(timeout=timeout)
45
46
  self.api_key = os.getenv('MESHY_API_KEY')
46
47
 
47
48
  def generate_3d_preview(
@@ -0,0 +1,178 @@
1
+ # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14
+
15
+ from typing import Dict, List, Optional
16
+
17
+ from camel.loaders.mineru_extractor import MinerU
18
+ from camel.toolkits.base import BaseToolkit
19
+ from camel.toolkits.function_tool import FunctionTool
20
+ from camel.utils import api_keys_required
21
+
22
+
23
+ class MinerUToolkit(BaseToolkit):
24
+ r"""Toolkit for extracting and processing document content
25
+ using MinerU API.
26
+
27
+ Provides comprehensive document processing capabilities including content
28
+ extraction from URLs and files, with support for OCR, formula recognition,
29
+ and table detection through the MinerU API service.
30
+
31
+ Note:
32
+ - Maximum file size: 200MB per file
33
+ - Maximum pages: 600 pages per file
34
+ - Daily quota: 2000 pages for high-priority parsing
35
+ - Network restrictions may affect certain URLs (e.g., GitHub, AWS)
36
+ """
37
+
38
+ @api_keys_required(
39
+ [
40
+ (None, "MINERU_API_KEY"),
41
+ ]
42
+ )
43
+ def __init__(
44
+ self,
45
+ api_key: Optional[str] = None,
46
+ api_url: Optional[str] = "https://mineru.net/api/v4",
47
+ is_ocr: bool = False,
48
+ enable_formula: bool = False,
49
+ enable_table: bool = True,
50
+ layout_model: str = "doclayout_yolo",
51
+ language: str = "en",
52
+ wait: bool = True,
53
+ timeout: float = 300,
54
+ ) -> None:
55
+ r"""Initialize the MinerU document processing toolkit.
56
+
57
+ Args:
58
+ api_key (Optional[str]): Authentication key for MinerU API access.
59
+ If not provided, uses MINERU_API_KEY environment variable.
60
+ (default: :obj:`None`)
61
+ api_url (Optional[str]): Base endpoint URL for MinerU API service.
62
+ (default: :obj:`"https://mineru.net/api/v4"`)
63
+ is_ocr (bool): Enable Optical Character Recognition for image-based
64
+ text extraction. (default: :obj:`False`)
65
+ enable_formula (bool): Enable mathematical formula detection and
66
+ recognition. (default: :obj:`False`)
67
+ enable_table (bool): Enable table structure detection and
68
+ extraction. (default: :obj:`True`)
69
+ layout_model (str): Document layout analysis model selection.
70
+ Available options: 'doclayout_yolo', 'layoutlmv3'.
71
+ (default: :obj:`"doclayout_yolo"`)
72
+ language (str): Primary language of the document for processing.
73
+ (default: :obj:`"en"`)
74
+ wait (bool): Block execution until processing completion.
75
+ (default: :obj:`True`)
76
+ timeout (float): Maximum duration in seconds to wait for task
77
+ completion. (default: :obj:`300`)
78
+ """
79
+ self.client = MinerU(
80
+ api_key=api_key,
81
+ api_url=api_url,
82
+ is_ocr=is_ocr,
83
+ enable_formula=enable_formula,
84
+ enable_table=enable_table,
85
+ layout_model=layout_model,
86
+ language=language,
87
+ )
88
+ self.wait = wait
89
+ self.timeout = timeout
90
+
91
+ def extract_from_urls(
92
+ self,
93
+ urls: str | List[str],
94
+ ) -> Dict:
95
+ r"""Process and extract content from one or multiple URLs.
96
+
97
+ Args:
98
+ urls (str | List[str]): Target URL or list of URLs for content
99
+ extraction. Supports both single URL string and multiple URLs
100
+ in a list.
101
+
102
+ Returns:
103
+ Dict: Response containing either completed task results when wait
104
+ is True, or task/batch identifiers for status tracking when
105
+ wait is False.
106
+ """
107
+ if isinstance(urls, str):
108
+ # Single URL case
109
+ response = self.client.extract_url(url=urls)
110
+
111
+ if self.wait:
112
+ return self.client.wait_for_completion(
113
+ response['task_id'],
114
+ timeout=self.timeout, # type: ignore[arg-type]
115
+ )
116
+ return response
117
+ else:
118
+ # Multiple URLs case
119
+ files: List[Dict[str, str | bool]] = [
120
+ {"url": str(url)} for url in urls
121
+ ]
122
+ batch_id = self.client.batch_extract_urls(files=files)
123
+
124
+ if self.wait:
125
+ return self.client.wait_for_completion(
126
+ batch_id,
127
+ is_batch=True,
128
+ timeout=self.timeout if self.timeout > 300 else 600, # type: ignore[arg-type,operator]
129
+ )
130
+ return {"batch_id": batch_id}
131
+
132
+ def get_task_status(self, task_id: str) -> Dict:
133
+ r"""Retrieve current status of an individual extraction task.
134
+
135
+ Args:
136
+ task_id (str): Unique identifier for the extraction task to check.
137
+
138
+ Returns:
139
+ Dict: Status information and results (if task is completed) for
140
+ the specified task.
141
+
142
+ Note:
143
+ This is a low-level status checking method. For most use cases,
144
+ prefer using extract_from_url with wait=True for automatic
145
+ completion handling.
146
+ """
147
+ return self.client.get_task_status(task_id)
148
+
149
+ def get_batch_status(self, batch_id: str) -> Dict:
150
+ r"""Retrieve current status of a batch extraction task.
151
+
152
+ Args:
153
+ batch_id (str): Unique identifier for the batch extraction task
154
+ to check.
155
+
156
+ Returns:
157
+ Dict: Comprehensive status information and results for all files
158
+ in the batch task.
159
+
160
+ Note:
161
+ This is a low-level status checking method. For most use cases,
162
+ prefer using batch_extract_from_urls with wait=True for automatic
163
+ completion handling.
164
+ """
165
+ return self.client.get_batch_status(batch_id)
166
+
167
+ def get_tools(self) -> List[FunctionTool]:
168
+ r"""Retrieve available toolkit functions as FunctionTool objects.
169
+
170
+ Returns:
171
+ List[FunctionTool]: Collection of FunctionTool objects representing
172
+ the available document processing functions in this toolkit.
173
+ """
174
+ return [
175
+ FunctionTool(self.extract_from_urls),
176
+ FunctionTool(self.get_task_status),
177
+ FunctionTool(self.get_batch_status),
178
+ ]
@@ -0,0 +1,240 @@
1
+ # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14
+
15
+ import json
16
+ from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union
17
+
18
+ from camel.logger import get_logger
19
+ from camel.toolkits import FunctionTool
20
+ from camel.toolkits.base import BaseToolkit
21
+
22
+ logger = get_logger(__name__)
23
+
24
+
25
+ class NetworkXToolkit(BaseToolkit):
26
+ _nx = None # Class variable to store the networkx module
27
+
28
+ @classmethod
29
+ def _get_nx(cls):
30
+ r"""Lazily import networkx module when needed."""
31
+ if cls._nx is None:
32
+ import networkx
33
+
34
+ cls._nx = networkx
35
+ return cls._nx
36
+
37
+ def __init__(
38
+ self,
39
+ graph_type: Literal[
40
+ 'graph', 'digraph', 'multigraph', 'multidigraph'
41
+ ] = 'graph',
42
+ ):
43
+ r"""Initializes the NetworkX graph client.
44
+
45
+ Args:
46
+ graph_type (Literal['graph', 'digraph', 'multigraph',
47
+ 'multidigraph']):
48
+ Type of graph to create. Options are:
49
+ - 'graph': Undirected graph
50
+ - 'digraph': Directed graph
51
+ - 'multigraph': Undirected graph with parallel edges
52
+ - 'multidigraph': Directed graph with parallel edges
53
+ (default: :obj:`'graph'`)
54
+ """
55
+ nx = self._get_nx()
56
+ graph_types = {
57
+ 'graph': nx.Graph,
58
+ 'digraph': nx.DiGraph,
59
+ 'multigraph': nx.MultiGraph,
60
+ 'multidigraph': nx.MultiDiGraph,
61
+ }
62
+ graph_class = graph_types.get(graph_type.lower())
63
+ if graph_class is None:
64
+ raise ValueError(
65
+ f"Invalid graph type: {graph_type}. Must be one "
66
+ f"of: {list(graph_types.keys())}"
67
+ )
68
+
69
+ self.graph = graph_class()
70
+ logger.info(f"Initialized NetworkX {graph_type} instance.")
71
+
72
+ def add_node(self, node_id: str, **attributes: Any) -> None:
73
+ r"""Adds a node to the graph.
74
+
75
+ Args:
76
+ node_id (str): The ID of the node.
77
+ attributes (dict): Additional node attributes.
78
+ """
79
+ logger.info(f"Adding node: {node_id}, attributes: {attributes}")
80
+ self.graph.add_node(node_id, **attributes)
81
+
82
+ def add_edge(self, source: str, target: str, **attributes: Any) -> None:
83
+ r"""Adds an edge to the graph.
84
+
85
+ Args:
86
+ source (str): Source node ID.
87
+ target (str): Target node ID.
88
+ attributes (dict): Additional edge attributes.
89
+ """
90
+ logger.info(
91
+ f"Adding edge: {source} -> {target}, attributes: {attributes}"
92
+ )
93
+ self.graph.add_edge(source, target, **attributes)
94
+
95
+ def get_nodes(self) -> List[str]:
96
+ r"""Returns all nodes in the graph.
97
+
98
+ Returns:
99
+ List[str]: A list of node IDs.
100
+ """
101
+ logger.info("Fetching all nodes.")
102
+ return list(self.graph.nodes)
103
+
104
+ def get_edges(self) -> List[Tuple[str, str]]:
105
+ r"""Returns all edges in the graph.
106
+
107
+ Returns:
108
+ List[Tuple[str, str]]: A list of edges as (source, target).
109
+ """
110
+ logger.info("Fetching all edges.")
111
+ return list(self.graph.edges)
112
+
113
+ def get_shortest_path(
114
+ self,
115
+ source: str,
116
+ target: str,
117
+ weight: Optional[Union[str, Callable]] = None,
118
+ method: Literal['dijkstra', 'bellman-ford'] = 'dijkstra',
119
+ ) -> List[str]:
120
+ r"""Finds the shortest path between two nodes.
121
+
122
+ Args:
123
+ source (str): The source node ID.
124
+ target (str): The target node ID.
125
+ weight (None, str or function, optional): Edge weights/distances.
126
+ If None, every edge has weight/distance/cost 1.
127
+ If string, use this edge attribute as the edge weight.
128
+ If function, the weight of an edge is the value returned by
129
+ the function. The function must accept three positional
130
+ arguments: the two endpoints and the edge attribute
131
+ dictionary. (default: :obj:`None`)
132
+ method (Literal['dijkstra', 'bellman-ford'], optional): Algorithm
133
+ to compute the path. Ignored if weight is None. (default:
134
+ :obj:`'dijkstra'`)
135
+
136
+ Returns:
137
+ List[str]: A list of nodes in the shortest path.
138
+ """
139
+ logger.info(
140
+ f"Finding shortest path from '{source}' to '{target}' "
141
+ f"using {method} algorithm"
142
+ )
143
+ try:
144
+ nx = self._get_nx()
145
+ path = nx.shortest_path(
146
+ self.graph,
147
+ source=source,
148
+ target=target,
149
+ weight=weight,
150
+ method=method,
151
+ )
152
+ logger.debug(f"Found path: {' -> '.join(path)}")
153
+ return path
154
+ except nx.NetworkXNoPath:
155
+ error_msg = f"No path exists between '{source}' and '{target}'"
156
+ logger.error(error_msg)
157
+ return [error_msg]
158
+ except nx.NodeNotFound as e:
159
+ error_msg = f"Node not found in graph: {e!s}"
160
+ logger.error(error_msg)
161
+ return [error_msg]
162
+
163
+ def compute_centrality(self) -> Dict[str, float]:
164
+ r"""Computes centrality measures for the graph.
165
+
166
+ Returns:
167
+ Dict[str, float]: Centrality values for each node.
168
+ """
169
+ logger.info("Computing centrality measures.")
170
+ nx = self._get_nx()
171
+ return nx.degree_centrality(self.graph)
172
+
173
+ def serialize_graph(self) -> str:
174
+ r"""Serializes the graph to a JSON string.
175
+
176
+ Returns:
177
+ str: The serialized graph in JSON format.
178
+ """
179
+ logger.info("Serializing the graph.")
180
+ nx = self._get_nx()
181
+ return json.dumps(nx.node_link_data(self.graph))
182
+
183
+ def deserialize_graph(self, data: str) -> None:
184
+ r"""Loads a graph from a serialized JSON string.
185
+
186
+ Args:
187
+ data (str): The JSON string representing the graph.
188
+ """
189
+ logger.info("Deserializing graph from JSON data.")
190
+ nx = self._get_nx()
191
+ self.graph = nx.node_link_graph(json.loads(data))
192
+
193
+ def export_to_file(self, file_path: str) -> None:
194
+ r"""Exports the graph to a file in JSON format.
195
+
196
+ Args:
197
+ file_path (str): The file path to save the graph.
198
+ """
199
+ logger.info(f"Exporting graph to file: {file_path}")
200
+ nx = self._get_nx()
201
+ with open(file_path, "w") as file:
202
+ json.dump(nx.node_link_data(self.graph), file)
203
+
204
+ def import_from_file(self, file_path: str) -> None:
205
+ r"""Imports a graph from a JSON file.
206
+
207
+ Args:
208
+ file_path (str): The file path to load the graph from.
209
+ """
210
+ logger.info(f"Importing graph from file: {file_path}")
211
+ nx = self._get_nx()
212
+ with open(file_path, "r") as file:
213
+ self.graph = nx.node_link_graph(json.load(file))
214
+
215
+ def clear_graph(self) -> None:
216
+ r"""Clears the current graph."""
217
+ logger.info("Clearing the graph.")
218
+ self.graph.clear()
219
+
220
+ def get_tools(self) -> List[FunctionTool]:
221
+ r"""Returns a list of FunctionTool objects representing the
222
+ functions in the toolkit.
223
+
224
+ Returns:
225
+ List[FunctionTool]: A list of FunctionTool objects for the
226
+ toolkit methods.
227
+ """
228
+ return [
229
+ FunctionTool(self.add_edge),
230
+ FunctionTool(self.add_node),
231
+ FunctionTool(self.clear_graph),
232
+ FunctionTool(self.compute_centrality),
233
+ FunctionTool(self.deserialize_graph),
234
+ FunctionTool(self.export_to_file),
235
+ FunctionTool(self.get_edges),
236
+ FunctionTool(self.get_nodes),
237
+ FunctionTool(self.import_from_file),
238
+ FunctionTool(self.serialize_graph),
239
+ FunctionTool(self.get_shortest_path),
240
+ ]
@@ -79,6 +79,7 @@ class NotionToolkit(BaseToolkit):
79
79
  def __init__(
80
80
  self,
81
81
  notion_token: Optional[str] = None,
82
+ timeout: Optional[float] = None,
82
83
  ) -> None:
83
84
  r"""Initializes the NotionToolkit.
84
85
 
@@ -86,6 +87,7 @@ class NotionToolkit(BaseToolkit):
86
87
  notion_token (Optional[str], optional): The optional notion_token
87
88
  used to interact with notion APIs.(default: :obj:`None`)
88
89
  """
90
+ super().__init__(timeout=timeout)
89
91
  from notion_client import Client
90
92
 
91
93
  self.notion_token = notion_token or os.environ.get("NOTION_TOKEN")