camel-ai 0.2.74a4__py3-none-any.whl → 0.2.75__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (81) hide show
  1. camel/__init__.py +1 -1
  2. camel/agents/chat_agent.py +149 -95
  3. camel/configs/__init__.py +3 -0
  4. camel/configs/nebius_config.py +103 -0
  5. camel/interpreters/e2b_interpreter.py +34 -1
  6. camel/models/__init__.py +2 -0
  7. camel/models/aiml_model.py +1 -16
  8. camel/models/anthropic_model.py +6 -19
  9. camel/models/aws_bedrock_model.py +1 -16
  10. camel/models/azure_openai_model.py +1 -16
  11. camel/models/base_model.py +0 -12
  12. camel/models/cohere_model.py +1 -16
  13. camel/models/crynux_model.py +1 -16
  14. camel/models/deepseek_model.py +1 -16
  15. camel/models/gemini_model.py +1 -16
  16. camel/models/groq_model.py +1 -17
  17. camel/models/internlm_model.py +1 -16
  18. camel/models/litellm_model.py +1 -16
  19. camel/models/lmstudio_model.py +1 -17
  20. camel/models/mistral_model.py +1 -16
  21. camel/models/model_factory.py +2 -0
  22. camel/models/modelscope_model.py +1 -16
  23. camel/models/moonshot_model.py +6 -22
  24. camel/models/nebius_model.py +83 -0
  25. camel/models/nemotron_model.py +0 -5
  26. camel/models/netmind_model.py +1 -16
  27. camel/models/novita_model.py +1 -16
  28. camel/models/nvidia_model.py +1 -16
  29. camel/models/ollama_model.py +4 -19
  30. camel/models/openai_compatible_model.py +0 -3
  31. camel/models/openai_model.py +1 -22
  32. camel/models/openrouter_model.py +1 -17
  33. camel/models/ppio_model.py +1 -16
  34. camel/models/qianfan_model.py +1 -16
  35. camel/models/qwen_model.py +1 -16
  36. camel/models/reka_model.py +1 -16
  37. camel/models/samba_model.py +0 -32
  38. camel/models/sglang_model.py +1 -16
  39. camel/models/siliconflow_model.py +1 -16
  40. camel/models/stub_model.py +0 -4
  41. camel/models/togetherai_model.py +1 -16
  42. camel/models/vllm_model.py +1 -16
  43. camel/models/volcano_model.py +0 -17
  44. camel/models/watsonx_model.py +1 -16
  45. camel/models/yi_model.py +1 -16
  46. camel/models/zhipuai_model.py +1 -16
  47. camel/societies/workforce/prompts.py +1 -8
  48. camel/societies/workforce/task_channel.py +120 -27
  49. camel/societies/workforce/workforce.py +35 -3
  50. camel/toolkits/__init__.py +0 -2
  51. camel/toolkits/github_toolkit.py +104 -17
  52. camel/toolkits/hybrid_browser_toolkit/config_loader.py +3 -0
  53. camel/toolkits/hybrid_browser_toolkit/hybrid_browser_toolkit_ts.py +260 -5
  54. camel/toolkits/hybrid_browser_toolkit/ts/src/browser-session.ts +288 -37
  55. camel/toolkits/hybrid_browser_toolkit/ts/src/config-loader.ts +3 -1
  56. camel/toolkits/hybrid_browser_toolkit/ts/src/hybrid-browser-toolkit.ts +209 -41
  57. camel/toolkits/hybrid_browser_toolkit/ts/src/types.ts +22 -3
  58. camel/toolkits/hybrid_browser_toolkit/ts/websocket-server.js +28 -1
  59. camel/toolkits/hybrid_browser_toolkit/ws_wrapper.py +101 -0
  60. camel/toolkits/hybrid_browser_toolkit_py/actions.py +158 -0
  61. camel/toolkits/hybrid_browser_toolkit_py/browser_session.py +55 -8
  62. camel/toolkits/hybrid_browser_toolkit_py/config_loader.py +43 -0
  63. camel/toolkits/hybrid_browser_toolkit_py/hybrid_browser_toolkit.py +312 -3
  64. camel/toolkits/hybrid_browser_toolkit_py/snapshot.py +10 -4
  65. camel/toolkits/hybrid_browser_toolkit_py/unified_analyzer.js +45 -4
  66. camel/toolkits/math_toolkit.py +64 -10
  67. camel/toolkits/mcp_toolkit.py +39 -14
  68. camel/toolkits/note_taking_toolkit.py +3 -4
  69. camel/toolkits/openai_image_toolkit.py +55 -24
  70. camel/toolkits/search_toolkit.py +153 -29
  71. camel/types/__init__.py +2 -2
  72. camel/types/enums.py +54 -10
  73. camel/types/openai_types.py +2 -2
  74. camel/types/unified_model_type.py +5 -0
  75. camel/utils/mcp.py +2 -2
  76. camel/utils/token_counting.py +18 -3
  77. {camel_ai-0.2.74a4.dist-info → camel_ai-0.2.75.dist-info}/METADATA +9 -15
  78. {camel_ai-0.2.74a4.dist-info → camel_ai-0.2.75.dist-info}/RECORD +80 -79
  79. camel/toolkits/openai_agent_toolkit.py +0 -135
  80. {camel_ai-0.2.74a4.dist-info → camel_ai-0.2.75.dist-info}/WHEEL +0 -0
  81. {camel_ai-0.2.74a4.dist-info → camel_ai-0.2.75.dist-info}/licenses/LICENSE +0 -0
@@ -12,8 +12,9 @@
12
12
  # limitations under the License.
13
13
  # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14
14
  import asyncio
15
+ from collections import defaultdict, deque
15
16
  from enum import Enum
16
- from typing import Dict, List, Optional
17
+ from typing import Dict, List, Optional, Set
17
18
 
18
19
  from camel.tasks import Task
19
20
 
@@ -79,27 +80,92 @@ class Packet:
79
80
 
80
81
 
81
82
  class TaskChannel:
82
- r"""An internal class used by Workforce to manage tasks."""
83
+ r"""An internal class used by Workforce to manage tasks.
84
+
85
+ This implementation uses a hybrid data structure approach:
86
+ - Hash map (_task_dict) for O(1) task lookup by ID
87
+ - Status-based index (_task_by_status) for efficient filtering by status
88
+ - Assignee/publisher queues for ordered task processing
89
+ """
83
90
 
84
91
  def __init__(self) -> None:
85
92
  self._condition = asyncio.Condition()
86
93
  self._task_dict: Dict[str, Packet] = {}
87
94
 
95
+ self._task_by_status: Dict[PacketStatus, Set[str]] = defaultdict(set)
96
+
97
+ # task by assignee store which are sent to
98
+ self._task_by_assignee: Dict[str, deque[str]] = defaultdict(deque)
99
+
100
+ self._task_by_publisher: Dict[str, deque[str]] = defaultdict(deque)
101
+
102
+ def _update_task_status(
103
+ self, task_id: str, new_status: PacketStatus
104
+ ) -> None:
105
+ r"""Helper method to properly update task status in all indexes."""
106
+ if task_id not in self._task_dict:
107
+ return
108
+
109
+ packet = self._task_dict[task_id]
110
+ old_status = packet.status
111
+
112
+ if old_status in self._task_by_status:
113
+ self._task_by_status[old_status].discard(task_id)
114
+
115
+ packet.status = new_status
116
+
117
+ self._task_by_status[new_status].add(task_id)
118
+
119
+ def _cleanup_task_from_indexes(self, task_id: str) -> None:
120
+ r"""Helper method to remove a task from all indexes.
121
+
122
+ Args:
123
+ task_id (str): The ID of the task to remove from indexes.
124
+ """
125
+ if task_id not in self._task_dict:
126
+ return
127
+
128
+ packet = self._task_dict[task_id]
129
+
130
+ if packet.status in self._task_by_status:
131
+ self._task_by_status[packet.status].discard(task_id)
132
+
133
+ if packet.assignee_id and packet.assignee_id in self._task_by_assignee:
134
+ assignee_queue = self._task_by_assignee[packet.assignee_id]
135
+ self._task_by_assignee[packet.assignee_id] = deque(
136
+ task for task in assignee_queue if task != task_id
137
+ )
138
+
139
+ if packet.publisher_id in self._task_by_publisher:
140
+ publisher_queue = self._task_by_publisher[packet.publisher_id]
141
+ self._task_by_publisher[packet.publisher_id] = deque(
142
+ task for task in publisher_queue if task != task_id
143
+ )
144
+
88
145
  async def get_returned_task_by_publisher(self, publisher_id: str) -> Task:
89
146
  r"""Get a task from the channel that has been returned by the
90
147
  publisher.
91
148
  """
92
149
  async with self._condition:
93
150
  while True:
94
- for task_id, packet in list(self._task_dict.items()):
95
- if packet.publisher_id != publisher_id:
96
- continue
97
- if packet.status != PacketStatus.RETURNED:
98
- continue
99
- # Remove the task to prevent returning it again
100
- del self._task_dict[task_id]
101
- self._condition.notify_all()
102
- return packet.task
151
+ task_ids = self._task_by_publisher[publisher_id]
152
+
153
+ if task_ids:
154
+ task_id = task_ids.popleft()
155
+
156
+ if task_id in self._task_dict:
157
+ packet = self._task_dict[task_id]
158
+
159
+ if (
160
+ packet.status == PacketStatus.RETURNED
161
+ and packet.publisher_id == publisher_id
162
+ ):
163
+ # Clean up all indexes before removing
164
+ self._cleanup_task_from_indexes(task_id)
165
+ del self._task_dict[task_id]
166
+ self._condition.notify_all()
167
+ return packet.task
168
+
103
169
  await self._condition.wait()
104
170
 
105
171
  async def get_assigned_task_by_assignee(self, assignee_id: str) -> Task:
@@ -109,15 +175,26 @@ class TaskChannel:
109
175
  """
110
176
  async with self._condition:
111
177
  while True:
112
- for packet in self._task_dict.values():
113
- if (
114
- packet.status == PacketStatus.SENT
115
- and packet.assignee_id == assignee_id
116
- ):
117
- # Atomically claim the task by changing its status
118
- packet.status = PacketStatus.PROCESSING
119
- self._condition.notify_all()
120
- return packet.task
178
+ task_ids = self._task_by_assignee.get(assignee_id, deque())
179
+
180
+ # Process all available tasks until we find a valid one
181
+ while task_ids:
182
+ task_id = task_ids.popleft()
183
+
184
+ if task_id in self._task_dict:
185
+ packet = self._task_dict[task_id]
186
+
187
+ if (
188
+ packet.status == PacketStatus.SENT
189
+ and packet.assignee_id == assignee_id
190
+ ):
191
+ # Use helper method to properly update status
192
+ self._update_task_status(
193
+ task_id, PacketStatus.PROCESSING
194
+ )
195
+ self._condition.notify_all()
196
+ return packet.task
197
+
121
198
  await self._condition.wait()
122
199
 
123
200
  async def post_task(
@@ -128,6 +205,8 @@ class TaskChannel:
128
205
  async with self._condition:
129
206
  packet = Packet(task, publisher_id, assignee_id)
130
207
  self._task_dict[packet.task.id] = packet
208
+ self._task_by_status[PacketStatus.SENT].add(packet.task.id)
209
+ self._task_by_assignee[assignee_id].append(packet.task.id)
131
210
  self._condition.notify_all()
132
211
 
133
212
  async def post_dependency(
@@ -140,6 +219,7 @@ class TaskChannel:
140
219
  dependency, publisher_id, status=PacketStatus.ARCHIVED
141
220
  )
142
221
  self._task_dict[packet.task.id] = packet
222
+ self._task_by_status[PacketStatus.ARCHIVED].add(packet.task.id)
143
223
  self._condition.notify_all()
144
224
 
145
225
  async def return_task(self, task_id: str) -> None:
@@ -148,7 +228,12 @@ class TaskChannel:
148
228
  async with self._condition:
149
229
  if task_id in self._task_dict:
150
230
  packet = self._task_dict[task_id]
151
- packet.status = PacketStatus.RETURNED
231
+ # Only add to publisher queue if not already returned
232
+ if packet.status != PacketStatus.RETURNED:
233
+ self._update_task_status(task_id, PacketStatus.RETURNED)
234
+ self._task_by_publisher[packet.publisher_id].append(
235
+ packet.task.id
236
+ )
152
237
  self._condition.notify_all()
153
238
 
154
239
  async def archive_task(self, task_id: str) -> None:
@@ -156,7 +241,17 @@ class TaskChannel:
156
241
  async with self._condition:
157
242
  if task_id in self._task_dict:
158
243
  packet = self._task_dict[task_id]
159
- packet.status = PacketStatus.ARCHIVED
244
+ # Remove from assignee queue before archiving
245
+ if (
246
+ packet.assignee_id
247
+ and packet.assignee_id in self._task_by_assignee
248
+ ):
249
+ assignee_queue = self._task_by_assignee[packet.assignee_id]
250
+ self._task_by_assignee[packet.assignee_id] = deque(
251
+ task for task in assignee_queue if task != task_id
252
+ )
253
+ # Update status (keeps in status index for dependencies)
254
+ self._update_task_status(task_id, PacketStatus.ARCHIVED)
160
255
  self._condition.notify_all()
161
256
 
162
257
  async def remove_task(self, task_id: str) -> None:
@@ -164,17 +259,15 @@ class TaskChannel:
164
259
  async with self._condition:
165
260
  # Check if task ID exists before removing
166
261
  if task_id in self._task_dict:
262
+ # Clean up all indexes before removing
263
+ self._cleanup_task_from_indexes(task_id)
167
264
  del self._task_dict[task_id]
168
265
  self._condition.notify_all()
169
266
 
170
267
  async def get_dependency_ids(self) -> List[str]:
171
268
  r"""Get the IDs of all dependencies in the channel."""
172
269
  async with self._condition:
173
- dependency_ids = []
174
- for task_id, packet in self._task_dict.items():
175
- if packet.status == PacketStatus.ARCHIVED:
176
- dependency_ids.append(task_id)
177
- return dependency_ids
270
+ return list(self._task_by_status[PacketStatus.ARCHIVED])
178
271
 
179
272
  async def get_task_by_id(self, task_id: str) -> Task:
180
273
  r"""Get a task from the channel by its ID."""
@@ -84,7 +84,7 @@ logger = get_logger(__name__)
84
84
  # Constants for configuration values
85
85
  MAX_TASK_RETRIES = 3
86
86
  MAX_PENDING_TASKS_LIMIT = 20
87
- TASK_TIMEOUT_SECONDS = 480.0
87
+ TASK_TIMEOUT_SECONDS = 600.0
88
88
  DEFAULT_WORKER_POOL_SIZE = 10
89
89
 
90
90
 
@@ -1497,6 +1497,9 @@ class Workforce(BaseNode):
1497
1497
  start_coroutine, self._loop
1498
1498
  )
1499
1499
  self._child_listening_tasks.append(child_task)
1500
+ else:
1501
+ # Close the coroutine to prevent RuntimeWarning
1502
+ start_coroutine.close()
1500
1503
  else:
1501
1504
  # Close the coroutine to prevent RuntimeWarning
1502
1505
  start_coroutine.close()
@@ -2310,6 +2313,9 @@ class Workforce(BaseNode):
2310
2313
  r"""Get the task that's published by this node and just get returned
2311
2314
  from the assignee. Includes timeout handling to prevent indefinite
2312
2315
  waiting.
2316
+
2317
+ Raises:
2318
+ asyncio.TimeoutError: If waiting for task exceeds timeout
2313
2319
  """
2314
2320
  try:
2315
2321
  # Add timeout to prevent indefinite waiting
@@ -2317,6 +2323,17 @@ class Workforce(BaseNode):
2317
2323
  self._channel.get_returned_task_by_publisher(self.node_id),
2318
2324
  timeout=self.task_timeout_seconds,
2319
2325
  )
2326
+ except asyncio.TimeoutError:
2327
+ # Re-raise timeout errors to be handled by caller
2328
+ # This prevents hanging when tasks are stuck
2329
+ logger.warning(
2330
+ f"Timeout waiting for task return in workforce "
2331
+ f"{self.node_id}. "
2332
+ f"Timeout: {self.task_timeout_seconds}s, "
2333
+ f"Pending tasks: {len(self._pending_tasks)}, "
2334
+ f"In-flight tasks: {self._in_flight_tasks}"
2335
+ )
2336
+ raise
2320
2337
  except Exception as e:
2321
2338
  error_msg = (
2322
2339
  f"Error getting returned task {e} in "
@@ -2835,9 +2852,24 @@ class Workforce(BaseNode):
2835
2852
  self._last_snapshot_time = time.time()
2836
2853
 
2837
2854
  # Get returned task
2838
- returned_task = await self._get_returned_task()
2855
+ try:
2856
+ returned_task = await self._get_returned_task()
2857
+ except asyncio.TimeoutError:
2858
+ # Handle timeout - check if we have tasks stuck in flight
2859
+ if self._in_flight_tasks > 0:
2860
+ logger.warning(
2861
+ f"Timeout waiting for {self._in_flight_tasks} "
2862
+ f"in-flight tasks. Breaking to prevent hanging."
2863
+ )
2864
+ # Break the loop to prevent indefinite hanging
2865
+ # The finally block will handle cleanup
2866
+ break
2867
+ else:
2868
+ # No tasks in flight, safe to continue
2869
+ await self._post_ready_tasks()
2870
+ continue
2839
2871
 
2840
- # If no task was returned, continue
2872
+ # If no task was returned (other errors), continue
2841
2873
  if returned_task is None:
2842
2874
  logger.debug(
2843
2875
  f"No task returned in workforce {self.node_id}. "
@@ -68,7 +68,6 @@ from .pubmed_toolkit import PubMedToolkit
68
68
  from .data_commons_toolkit import DataCommonsToolkit
69
69
  from .thinking_toolkit import ThinkingToolkit
70
70
  from .pyautogui_toolkit import PyAutoGUIToolkit
71
- from .openai_agent_toolkit import OpenAIAgentToolkit
72
71
  from .searxng_toolkit import SearxNGToolkit
73
72
  from .jina_reranker_toolkit import JinaRerankerToolkit
74
73
  from .pulse_mcp_search_toolkit import PulseMCPSearchToolkit
@@ -143,7 +142,6 @@ __all__ = [
143
142
  'DataCommonsToolkit',
144
143
  'ThinkingToolkit',
145
144
  'PyAutoGUIToolkit',
146
- 'OpenAIAgentToolkit',
147
145
  'SearxNGToolkit',
148
146
  'JinaRerankerToolkit',
149
147
  'OrigeneToolkit',
@@ -14,6 +14,7 @@
14
14
 
15
15
  import logging
16
16
  import os
17
+ import warnings
17
18
  from typing import Dict, List, Literal, Optional, Union
18
19
 
19
20
  from camel.toolkits import FunctionTool
@@ -80,7 +81,7 @@ class GithubToolkit(BaseToolkit):
80
81
  )
81
82
  return GITHUB_ACCESS_TOKEN
82
83
 
83
- def create_pull_request(
84
+ def github_create_pull_request(
84
85
  self,
85
86
  repo_name: str,
86
87
  file_path: str,
@@ -150,7 +151,7 @@ class GithubToolkit(BaseToolkit):
150
151
  else:
151
152
  raise ValueError("PRs with multiple files aren't supported yet.")
152
153
 
153
- def get_issue_list(
154
+ def github_get_issue_list(
154
155
  self, repo_name: str, state: Literal["open", "closed", "all"] = "all"
155
156
  ) -> List[Dict[str, object]]:
156
157
  r"""Retrieves all issues from the GitHub repository.
@@ -177,7 +178,9 @@ class GithubToolkit(BaseToolkit):
177
178
 
178
179
  return issues_info
179
180
 
180
- def get_issue_content(self, repo_name: str, issue_number: int) -> str:
181
+ def github_get_issue_content(
182
+ self, repo_name: str, issue_number: int
183
+ ) -> str:
181
184
  r"""Retrieves the content of a specific issue by its number.
182
185
 
183
186
  Args:
@@ -194,7 +197,7 @@ class GithubToolkit(BaseToolkit):
194
197
  except Exception as e:
195
198
  return f"can't get Issue number {issue_number}: {e!s}"
196
199
 
197
- def get_pull_request_list(
200
+ def github_get_pull_request_list(
198
201
  self, repo_name: str, state: Literal["open", "closed", "all"] = "all"
199
202
  ) -> List[Dict[str, object]]:
200
203
  r"""Retrieves all pull requests from the GitHub repository.
@@ -221,7 +224,7 @@ class GithubToolkit(BaseToolkit):
221
224
 
222
225
  return pull_requests_info
223
226
 
224
- def get_pull_request_code(
227
+ def github_get_pull_request_code(
225
228
  self, repo_name: str, pr_number: int
226
229
  ) -> List[Dict[str, str]]:
227
230
  r"""Retrieves the code changes of a specific pull request.
@@ -253,7 +256,7 @@ class GithubToolkit(BaseToolkit):
253
256
 
254
257
  return files_changed
255
258
 
256
- def get_pull_request_comments(
259
+ def github_get_pull_request_comments(
257
260
  self, repo_name: str, pr_number: int
258
261
  ) -> List[Dict[str, str]]:
259
262
  r"""Retrieves the comments from a specific pull request.
@@ -278,7 +281,9 @@ class GithubToolkit(BaseToolkit):
278
281
 
279
282
  return comments
280
283
 
281
- def get_all_file_paths(self, repo_name: str, path: str = "") -> List[str]:
284
+ def github_get_all_file_paths(
285
+ self, repo_name: str, path: str = ""
286
+ ) -> List[str]:
282
287
  r"""Recursively retrieves all file paths in the GitHub repository.
283
288
 
284
289
  Args:
@@ -308,13 +313,15 @@ class GithubToolkit(BaseToolkit):
308
313
  for content in contents:
309
314
  if content.type == "dir":
310
315
  # If it's a directory, recursively retrieve its file paths
311
- files.extend(self.get_all_file_paths(content.path))
316
+ files.extend(self.github_get_all_file_paths(content.path))
312
317
  else:
313
318
  # If it's a file, add its path to the list
314
319
  files.append(content.path)
315
320
  return files
316
321
 
317
- def retrieve_file_content(self, repo_name: str, file_path: str) -> str:
322
+ def github_retrieve_file_content(
323
+ self, repo_name: str, file_path: str
324
+ ) -> str:
318
325
  r"""Retrieves the content of a file from the GitHub repository.
319
326
 
320
327
  Args:
@@ -343,12 +350,92 @@ class GithubToolkit(BaseToolkit):
343
350
  the functions in the toolkit.
344
351
  """
345
352
  return [
346
- FunctionTool(self.create_pull_request),
347
- FunctionTool(self.get_issue_list),
348
- FunctionTool(self.get_issue_content),
349
- FunctionTool(self.get_pull_request_list),
350
- FunctionTool(self.get_pull_request_code),
351
- FunctionTool(self.get_pull_request_comments),
352
- FunctionTool(self.get_all_file_paths),
353
- FunctionTool(self.retrieve_file_content),
353
+ FunctionTool(self.github_create_pull_request),
354
+ FunctionTool(self.github_get_issue_list),
355
+ FunctionTool(self.github_get_issue_content),
356
+ FunctionTool(self.github_get_pull_request_list),
357
+ FunctionTool(self.github_get_pull_request_code),
358
+ FunctionTool(self.github_get_pull_request_comments),
359
+ FunctionTool(self.github_get_all_file_paths),
360
+ FunctionTool(self.github_retrieve_file_content),
354
361
  ]
362
+
363
+ # Deprecated method aliases for backward compatibility
364
+ def create_pull_request(self, *args, **kwargs):
365
+ r"""Deprecated: Use github_create_pull_request instead."""
366
+ warnings.warn(
367
+ "create_pull_request is deprecated. Use "
368
+ "github_create_pull_request instead.",
369
+ DeprecationWarning,
370
+ stacklevel=2,
371
+ )
372
+ return self.github_create_pull_request(*args, **kwargs)
373
+
374
+ def get_issue_list(self, *args, **kwargs):
375
+ r"""Deprecated: Use github_get_issue_list instead."""
376
+ warnings.warn(
377
+ "get_issue_list is deprecated. Use github_get_issue_list instead.",
378
+ DeprecationWarning,
379
+ stacklevel=2,
380
+ )
381
+ return self.github_get_issue_list(*args, **kwargs)
382
+
383
+ def get_issue_content(self, *args, **kwargs):
384
+ r"""Deprecated: Use github_get_issue_content instead."""
385
+ warnings.warn(
386
+ "get_issue_content is deprecated. Use "
387
+ "github_get_issue_content instead.",
388
+ DeprecationWarning,
389
+ stacklevel=2,
390
+ )
391
+ return self.github_get_issue_content(*args, **kwargs)
392
+
393
+ def get_pull_request_list(self, *args, **kwargs):
394
+ r"""Deprecated: Use github_get_pull_request_list instead."""
395
+ warnings.warn(
396
+ "get_pull_request_list is deprecated. "
397
+ "Use github_get_pull_request_list instead.",
398
+ DeprecationWarning,
399
+ stacklevel=2,
400
+ )
401
+ return self.github_get_pull_request_list(*args, **kwargs)
402
+
403
+ def get_pull_request_code(self, *args, **kwargs):
404
+ r"""Deprecated: Use github_get_pull_request_code instead."""
405
+ warnings.warn(
406
+ "get_pull_request_code is deprecated. Use "
407
+ "github_get_pull_request_code instead.",
408
+ DeprecationWarning,
409
+ stacklevel=2,
410
+ )
411
+ return self.github_get_pull_request_code(*args, **kwargs)
412
+
413
+ def get_pull_request_comments(self, *args, **kwargs):
414
+ r"""Deprecated: Use github_get_pull_request_comments instead."""
415
+ warnings.warn(
416
+ "get_pull_request_comments is deprecated. "
417
+ "Use github_get_pull_request_comments instead.",
418
+ DeprecationWarning,
419
+ stacklevel=2,
420
+ )
421
+ return self.github_get_pull_request_comments(*args, **kwargs)
422
+
423
+ def get_all_file_paths(self, *args, **kwargs):
424
+ r"""Deprecated: Use github_get_all_file_paths instead."""
425
+ warnings.warn(
426
+ "get_all_file_paths is deprecated. Use "
427
+ "github_get_all_file_paths instead.",
428
+ DeprecationWarning,
429
+ stacklevel=2,
430
+ )
431
+ return self.github_get_all_file_paths(*args, **kwargs)
432
+
433
+ def retrieve_file_content(self, *args, **kwargs):
434
+ r"""Deprecated: Use github_retrieve_file_content instead."""
435
+ warnings.warn(
436
+ "retrieve_file_content is deprecated. "
437
+ "Use github_retrieve_file_content instead.",
438
+ DeprecationWarning,
439
+ stacklevel=2,
440
+ )
441
+ return self.github_retrieve_file_content(*args, **kwargs)
@@ -23,6 +23,7 @@ class BrowserConfig:
23
23
  headless: bool = True
24
24
  user_data_dir: Optional[str] = None
25
25
  stealth: bool = False
26
+ console_log_limit: int = 1000
26
27
 
27
28
  # Default settings
28
29
  default_start_url: str = "https://google.com/"
@@ -105,6 +106,8 @@ class ConfigLoader:
105
106
  browser_kwargs["connect_over_cdp"] = value
106
107
  elif key == "cdpUrl":
107
108
  browser_kwargs["cdp_url"] = value
109
+ elif key == "consoleLogLimit":
110
+ browser_kwargs["console_log_limit"] = value
108
111
  elif key == "cacheDir":
109
112
  toolkit_kwargs["cache_dir"] = value
110
113
  elif key == "browserLogToFile":