camel-ai 0.2.75a3__py3-none-any.whl → 0.2.75a6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of camel-ai might be problematic. Click here for more details.
- camel/__init__.py +1 -1
- camel/agents/chat_agent.py +147 -93
- camel/configs/__init__.py +3 -0
- camel/configs/nebius_config.py +103 -0
- camel/models/__init__.py +2 -0
- camel/models/model_factory.py +2 -0
- camel/models/nebius_model.py +83 -0
- camel/models/ollama_model.py +3 -3
- camel/societies/workforce/task_channel.py +120 -27
- camel/societies/workforce/workforce.py +35 -3
- camel/toolkits/github_toolkit.py +104 -17
- camel/toolkits/hybrid_browser_toolkit/hybrid_browser_toolkit_ts.py +35 -5
- camel/toolkits/hybrid_browser_toolkit/ts/src/browser-session.ts +124 -29
- camel/toolkits/hybrid_browser_toolkit/ts/src/config-loader.ts +1 -1
- camel/toolkits/hybrid_browser_toolkit/ts/src/hybrid-browser-toolkit.ts +103 -40
- camel/toolkits/hybrid_browser_toolkit/ts/src/types.ts +3 -2
- camel/toolkits/hybrid_browser_toolkit/ts/websocket-server.js +8 -1
- camel/toolkits/hybrid_browser_toolkit/ws_wrapper.py +60 -0
- camel/toolkits/math_toolkit.py +64 -10
- camel/toolkits/openai_image_toolkit.py +55 -24
- camel/toolkits/search_toolkit.py +13 -2
- camel/types/enums.py +34 -9
- camel/types/unified_model_type.py +5 -0
- {camel_ai-0.2.75a3.dist-info → camel_ai-0.2.75a6.dist-info}/METADATA +4 -11
- {camel_ai-0.2.75a3.dist-info → camel_ai-0.2.75a6.dist-info}/RECORD +27 -25
- {camel_ai-0.2.75a3.dist-info → camel_ai-0.2.75a6.dist-info}/WHEEL +0 -0
- {camel_ai-0.2.75a3.dist-info → camel_ai-0.2.75a6.dist-info}/licenses/LICENSE +0 -0
|
@@ -12,8 +12,9 @@
|
|
|
12
12
|
# limitations under the License.
|
|
13
13
|
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
14
|
import asyncio
|
|
15
|
+
from collections import defaultdict, deque
|
|
15
16
|
from enum import Enum
|
|
16
|
-
from typing import Dict, List, Optional
|
|
17
|
+
from typing import Dict, List, Optional, Set
|
|
17
18
|
|
|
18
19
|
from camel.tasks import Task
|
|
19
20
|
|
|
@@ -79,27 +80,92 @@ class Packet:
|
|
|
79
80
|
|
|
80
81
|
|
|
81
82
|
class TaskChannel:
|
|
82
|
-
r"""An internal class used by Workforce to manage tasks.
|
|
83
|
+
r"""An internal class used by Workforce to manage tasks.
|
|
84
|
+
|
|
85
|
+
This implementation uses a hybrid data structure approach:
|
|
86
|
+
- Hash map (_task_dict) for O(1) task lookup by ID
|
|
87
|
+
- Status-based index (_task_by_status) for efficient filtering by status
|
|
88
|
+
- Assignee/publisher queues for ordered task processing
|
|
89
|
+
"""
|
|
83
90
|
|
|
84
91
|
def __init__(self) -> None:
|
|
85
92
|
self._condition = asyncio.Condition()
|
|
86
93
|
self._task_dict: Dict[str, Packet] = {}
|
|
87
94
|
|
|
95
|
+
self._task_by_status: Dict[PacketStatus, Set[str]] = defaultdict(set)
|
|
96
|
+
|
|
97
|
+
# task by assignee store which are sent to
|
|
98
|
+
self._task_by_assignee: Dict[str, deque[str]] = defaultdict(deque)
|
|
99
|
+
|
|
100
|
+
self._task_by_publisher: Dict[str, deque[str]] = defaultdict(deque)
|
|
101
|
+
|
|
102
|
+
def _update_task_status(
|
|
103
|
+
self, task_id: str, new_status: PacketStatus
|
|
104
|
+
) -> None:
|
|
105
|
+
r"""Helper method to properly update task status in all indexes."""
|
|
106
|
+
if task_id not in self._task_dict:
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
packet = self._task_dict[task_id]
|
|
110
|
+
old_status = packet.status
|
|
111
|
+
|
|
112
|
+
if old_status in self._task_by_status:
|
|
113
|
+
self._task_by_status[old_status].discard(task_id)
|
|
114
|
+
|
|
115
|
+
packet.status = new_status
|
|
116
|
+
|
|
117
|
+
self._task_by_status[new_status].add(task_id)
|
|
118
|
+
|
|
119
|
+
def _cleanup_task_from_indexes(self, task_id: str) -> None:
|
|
120
|
+
r"""Helper method to remove a task from all indexes.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
task_id (str): The ID of the task to remove from indexes.
|
|
124
|
+
"""
|
|
125
|
+
if task_id not in self._task_dict:
|
|
126
|
+
return
|
|
127
|
+
|
|
128
|
+
packet = self._task_dict[task_id]
|
|
129
|
+
|
|
130
|
+
if packet.status in self._task_by_status:
|
|
131
|
+
self._task_by_status[packet.status].discard(task_id)
|
|
132
|
+
|
|
133
|
+
if packet.assignee_id and packet.assignee_id in self._task_by_assignee:
|
|
134
|
+
assignee_queue = self._task_by_assignee[packet.assignee_id]
|
|
135
|
+
self._task_by_assignee[packet.assignee_id] = deque(
|
|
136
|
+
task for task in assignee_queue if task != task_id
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
if packet.publisher_id in self._task_by_publisher:
|
|
140
|
+
publisher_queue = self._task_by_publisher[packet.publisher_id]
|
|
141
|
+
self._task_by_publisher[packet.publisher_id] = deque(
|
|
142
|
+
task for task in publisher_queue if task != task_id
|
|
143
|
+
)
|
|
144
|
+
|
|
88
145
|
async def get_returned_task_by_publisher(self, publisher_id: str) -> Task:
|
|
89
146
|
r"""Get a task from the channel that has been returned by the
|
|
90
147
|
publisher.
|
|
91
148
|
"""
|
|
92
149
|
async with self._condition:
|
|
93
150
|
while True:
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
151
|
+
task_ids = self._task_by_publisher[publisher_id]
|
|
152
|
+
|
|
153
|
+
if task_ids:
|
|
154
|
+
task_id = task_ids.popleft()
|
|
155
|
+
|
|
156
|
+
if task_id in self._task_dict:
|
|
157
|
+
packet = self._task_dict[task_id]
|
|
158
|
+
|
|
159
|
+
if (
|
|
160
|
+
packet.status == PacketStatus.RETURNED
|
|
161
|
+
and packet.publisher_id == publisher_id
|
|
162
|
+
):
|
|
163
|
+
# Clean up all indexes before removing
|
|
164
|
+
self._cleanup_task_from_indexes(task_id)
|
|
165
|
+
del self._task_dict[task_id]
|
|
166
|
+
self._condition.notify_all()
|
|
167
|
+
return packet.task
|
|
168
|
+
|
|
103
169
|
await self._condition.wait()
|
|
104
170
|
|
|
105
171
|
async def get_assigned_task_by_assignee(self, assignee_id: str) -> Task:
|
|
@@ -109,15 +175,26 @@ class TaskChannel:
|
|
|
109
175
|
"""
|
|
110
176
|
async with self._condition:
|
|
111
177
|
while True:
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
)
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
self.
|
|
120
|
-
|
|
178
|
+
task_ids = self._task_by_assignee.get(assignee_id, deque())
|
|
179
|
+
|
|
180
|
+
# Process all available tasks until we find a valid one
|
|
181
|
+
while task_ids:
|
|
182
|
+
task_id = task_ids.popleft()
|
|
183
|
+
|
|
184
|
+
if task_id in self._task_dict:
|
|
185
|
+
packet = self._task_dict[task_id]
|
|
186
|
+
|
|
187
|
+
if (
|
|
188
|
+
packet.status == PacketStatus.SENT
|
|
189
|
+
and packet.assignee_id == assignee_id
|
|
190
|
+
):
|
|
191
|
+
# Use helper method to properly update status
|
|
192
|
+
self._update_task_status(
|
|
193
|
+
task_id, PacketStatus.PROCESSING
|
|
194
|
+
)
|
|
195
|
+
self._condition.notify_all()
|
|
196
|
+
return packet.task
|
|
197
|
+
|
|
121
198
|
await self._condition.wait()
|
|
122
199
|
|
|
123
200
|
async def post_task(
|
|
@@ -128,6 +205,8 @@ class TaskChannel:
|
|
|
128
205
|
async with self._condition:
|
|
129
206
|
packet = Packet(task, publisher_id, assignee_id)
|
|
130
207
|
self._task_dict[packet.task.id] = packet
|
|
208
|
+
self._task_by_status[PacketStatus.SENT].add(packet.task.id)
|
|
209
|
+
self._task_by_assignee[assignee_id].append(packet.task.id)
|
|
131
210
|
self._condition.notify_all()
|
|
132
211
|
|
|
133
212
|
async def post_dependency(
|
|
@@ -140,6 +219,7 @@ class TaskChannel:
|
|
|
140
219
|
dependency, publisher_id, status=PacketStatus.ARCHIVED
|
|
141
220
|
)
|
|
142
221
|
self._task_dict[packet.task.id] = packet
|
|
222
|
+
self._task_by_status[PacketStatus.ARCHIVED].add(packet.task.id)
|
|
143
223
|
self._condition.notify_all()
|
|
144
224
|
|
|
145
225
|
async def return_task(self, task_id: str) -> None:
|
|
@@ -148,7 +228,12 @@ class TaskChannel:
|
|
|
148
228
|
async with self._condition:
|
|
149
229
|
if task_id in self._task_dict:
|
|
150
230
|
packet = self._task_dict[task_id]
|
|
151
|
-
|
|
231
|
+
# Only add to publisher queue if not already returned
|
|
232
|
+
if packet.status != PacketStatus.RETURNED:
|
|
233
|
+
self._update_task_status(task_id, PacketStatus.RETURNED)
|
|
234
|
+
self._task_by_publisher[packet.publisher_id].append(
|
|
235
|
+
packet.task.id
|
|
236
|
+
)
|
|
152
237
|
self._condition.notify_all()
|
|
153
238
|
|
|
154
239
|
async def archive_task(self, task_id: str) -> None:
|
|
@@ -156,7 +241,17 @@ class TaskChannel:
|
|
|
156
241
|
async with self._condition:
|
|
157
242
|
if task_id in self._task_dict:
|
|
158
243
|
packet = self._task_dict[task_id]
|
|
159
|
-
|
|
244
|
+
# Remove from assignee queue before archiving
|
|
245
|
+
if (
|
|
246
|
+
packet.assignee_id
|
|
247
|
+
and packet.assignee_id in self._task_by_assignee
|
|
248
|
+
):
|
|
249
|
+
assignee_queue = self._task_by_assignee[packet.assignee_id]
|
|
250
|
+
self._task_by_assignee[packet.assignee_id] = deque(
|
|
251
|
+
task for task in assignee_queue if task != task_id
|
|
252
|
+
)
|
|
253
|
+
# Update status (keeps in status index for dependencies)
|
|
254
|
+
self._update_task_status(task_id, PacketStatus.ARCHIVED)
|
|
160
255
|
self._condition.notify_all()
|
|
161
256
|
|
|
162
257
|
async def remove_task(self, task_id: str) -> None:
|
|
@@ -164,17 +259,15 @@ class TaskChannel:
|
|
|
164
259
|
async with self._condition:
|
|
165
260
|
# Check if task ID exists before removing
|
|
166
261
|
if task_id in self._task_dict:
|
|
262
|
+
# Clean up all indexes before removing
|
|
263
|
+
self._cleanup_task_from_indexes(task_id)
|
|
167
264
|
del self._task_dict[task_id]
|
|
168
265
|
self._condition.notify_all()
|
|
169
266
|
|
|
170
267
|
async def get_dependency_ids(self) -> List[str]:
|
|
171
268
|
r"""Get the IDs of all dependencies in the channel."""
|
|
172
269
|
async with self._condition:
|
|
173
|
-
|
|
174
|
-
for task_id, packet in self._task_dict.items():
|
|
175
|
-
if packet.status == PacketStatus.ARCHIVED:
|
|
176
|
-
dependency_ids.append(task_id)
|
|
177
|
-
return dependency_ids
|
|
270
|
+
return list(self._task_by_status[PacketStatus.ARCHIVED])
|
|
178
271
|
|
|
179
272
|
async def get_task_by_id(self, task_id: str) -> Task:
|
|
180
273
|
r"""Get a task from the channel by its ID."""
|
|
@@ -84,7 +84,7 @@ logger = get_logger(__name__)
|
|
|
84
84
|
# Constants for configuration values
|
|
85
85
|
MAX_TASK_RETRIES = 3
|
|
86
86
|
MAX_PENDING_TASKS_LIMIT = 20
|
|
87
|
-
TASK_TIMEOUT_SECONDS =
|
|
87
|
+
TASK_TIMEOUT_SECONDS = 600.0
|
|
88
88
|
DEFAULT_WORKER_POOL_SIZE = 10
|
|
89
89
|
|
|
90
90
|
|
|
@@ -1497,6 +1497,9 @@ class Workforce(BaseNode):
|
|
|
1497
1497
|
start_coroutine, self._loop
|
|
1498
1498
|
)
|
|
1499
1499
|
self._child_listening_tasks.append(child_task)
|
|
1500
|
+
else:
|
|
1501
|
+
# Close the coroutine to prevent RuntimeWarning
|
|
1502
|
+
start_coroutine.close()
|
|
1500
1503
|
else:
|
|
1501
1504
|
# Close the coroutine to prevent RuntimeWarning
|
|
1502
1505
|
start_coroutine.close()
|
|
@@ -2310,6 +2313,9 @@ class Workforce(BaseNode):
|
|
|
2310
2313
|
r"""Get the task that's published by this node and just get returned
|
|
2311
2314
|
from the assignee. Includes timeout handling to prevent indefinite
|
|
2312
2315
|
waiting.
|
|
2316
|
+
|
|
2317
|
+
Raises:
|
|
2318
|
+
asyncio.TimeoutError: If waiting for task exceeds timeout
|
|
2313
2319
|
"""
|
|
2314
2320
|
try:
|
|
2315
2321
|
# Add timeout to prevent indefinite waiting
|
|
@@ -2317,6 +2323,17 @@ class Workforce(BaseNode):
|
|
|
2317
2323
|
self._channel.get_returned_task_by_publisher(self.node_id),
|
|
2318
2324
|
timeout=self.task_timeout_seconds,
|
|
2319
2325
|
)
|
|
2326
|
+
except asyncio.TimeoutError:
|
|
2327
|
+
# Re-raise timeout errors to be handled by caller
|
|
2328
|
+
# This prevents hanging when tasks are stuck
|
|
2329
|
+
logger.warning(
|
|
2330
|
+
f"Timeout waiting for task return in workforce "
|
|
2331
|
+
f"{self.node_id}. "
|
|
2332
|
+
f"Timeout: {self.task_timeout_seconds}s, "
|
|
2333
|
+
f"Pending tasks: {len(self._pending_tasks)}, "
|
|
2334
|
+
f"In-flight tasks: {self._in_flight_tasks}"
|
|
2335
|
+
)
|
|
2336
|
+
raise
|
|
2320
2337
|
except Exception as e:
|
|
2321
2338
|
error_msg = (
|
|
2322
2339
|
f"Error getting returned task {e} in "
|
|
@@ -2835,9 +2852,24 @@ class Workforce(BaseNode):
|
|
|
2835
2852
|
self._last_snapshot_time = time.time()
|
|
2836
2853
|
|
|
2837
2854
|
# Get returned task
|
|
2838
|
-
|
|
2855
|
+
try:
|
|
2856
|
+
returned_task = await self._get_returned_task()
|
|
2857
|
+
except asyncio.TimeoutError:
|
|
2858
|
+
# Handle timeout - check if we have tasks stuck in flight
|
|
2859
|
+
if self._in_flight_tasks > 0:
|
|
2860
|
+
logger.warning(
|
|
2861
|
+
f"Timeout waiting for {self._in_flight_tasks} "
|
|
2862
|
+
f"in-flight tasks. Breaking to prevent hanging."
|
|
2863
|
+
)
|
|
2864
|
+
# Break the loop to prevent indefinite hanging
|
|
2865
|
+
# The finally block will handle cleanup
|
|
2866
|
+
break
|
|
2867
|
+
else:
|
|
2868
|
+
# No tasks in flight, safe to continue
|
|
2869
|
+
await self._post_ready_tasks()
|
|
2870
|
+
continue
|
|
2839
2871
|
|
|
2840
|
-
# If no task was returned, continue
|
|
2872
|
+
# If no task was returned (other errors), continue
|
|
2841
2873
|
if returned_task is None:
|
|
2842
2874
|
logger.debug(
|
|
2843
2875
|
f"No task returned in workforce {self.node_id}. "
|
camel/toolkits/github_toolkit.py
CHANGED
|
@@ -14,6 +14,7 @@
|
|
|
14
14
|
|
|
15
15
|
import logging
|
|
16
16
|
import os
|
|
17
|
+
import warnings
|
|
17
18
|
from typing import Dict, List, Literal, Optional, Union
|
|
18
19
|
|
|
19
20
|
from camel.toolkits import FunctionTool
|
|
@@ -80,7 +81,7 @@ class GithubToolkit(BaseToolkit):
|
|
|
80
81
|
)
|
|
81
82
|
return GITHUB_ACCESS_TOKEN
|
|
82
83
|
|
|
83
|
-
def
|
|
84
|
+
def github_create_pull_request(
|
|
84
85
|
self,
|
|
85
86
|
repo_name: str,
|
|
86
87
|
file_path: str,
|
|
@@ -150,7 +151,7 @@ class GithubToolkit(BaseToolkit):
|
|
|
150
151
|
else:
|
|
151
152
|
raise ValueError("PRs with multiple files aren't supported yet.")
|
|
152
153
|
|
|
153
|
-
def
|
|
154
|
+
def github_get_issue_list(
|
|
154
155
|
self, repo_name: str, state: Literal["open", "closed", "all"] = "all"
|
|
155
156
|
) -> List[Dict[str, object]]:
|
|
156
157
|
r"""Retrieves all issues from the GitHub repository.
|
|
@@ -177,7 +178,9 @@ class GithubToolkit(BaseToolkit):
|
|
|
177
178
|
|
|
178
179
|
return issues_info
|
|
179
180
|
|
|
180
|
-
def
|
|
181
|
+
def github_get_issue_content(
|
|
182
|
+
self, repo_name: str, issue_number: int
|
|
183
|
+
) -> str:
|
|
181
184
|
r"""Retrieves the content of a specific issue by its number.
|
|
182
185
|
|
|
183
186
|
Args:
|
|
@@ -194,7 +197,7 @@ class GithubToolkit(BaseToolkit):
|
|
|
194
197
|
except Exception as e:
|
|
195
198
|
return f"can't get Issue number {issue_number}: {e!s}"
|
|
196
199
|
|
|
197
|
-
def
|
|
200
|
+
def github_get_pull_request_list(
|
|
198
201
|
self, repo_name: str, state: Literal["open", "closed", "all"] = "all"
|
|
199
202
|
) -> List[Dict[str, object]]:
|
|
200
203
|
r"""Retrieves all pull requests from the GitHub repository.
|
|
@@ -221,7 +224,7 @@ class GithubToolkit(BaseToolkit):
|
|
|
221
224
|
|
|
222
225
|
return pull_requests_info
|
|
223
226
|
|
|
224
|
-
def
|
|
227
|
+
def github_get_pull_request_code(
|
|
225
228
|
self, repo_name: str, pr_number: int
|
|
226
229
|
) -> List[Dict[str, str]]:
|
|
227
230
|
r"""Retrieves the code changes of a specific pull request.
|
|
@@ -253,7 +256,7 @@ class GithubToolkit(BaseToolkit):
|
|
|
253
256
|
|
|
254
257
|
return files_changed
|
|
255
258
|
|
|
256
|
-
def
|
|
259
|
+
def github_get_pull_request_comments(
|
|
257
260
|
self, repo_name: str, pr_number: int
|
|
258
261
|
) -> List[Dict[str, str]]:
|
|
259
262
|
r"""Retrieves the comments from a specific pull request.
|
|
@@ -278,7 +281,9 @@ class GithubToolkit(BaseToolkit):
|
|
|
278
281
|
|
|
279
282
|
return comments
|
|
280
283
|
|
|
281
|
-
def
|
|
284
|
+
def github_get_all_file_paths(
|
|
285
|
+
self, repo_name: str, path: str = ""
|
|
286
|
+
) -> List[str]:
|
|
282
287
|
r"""Recursively retrieves all file paths in the GitHub repository.
|
|
283
288
|
|
|
284
289
|
Args:
|
|
@@ -308,13 +313,15 @@ class GithubToolkit(BaseToolkit):
|
|
|
308
313
|
for content in contents:
|
|
309
314
|
if content.type == "dir":
|
|
310
315
|
# If it's a directory, recursively retrieve its file paths
|
|
311
|
-
files.extend(self.
|
|
316
|
+
files.extend(self.github_get_all_file_paths(content.path))
|
|
312
317
|
else:
|
|
313
318
|
# If it's a file, add its path to the list
|
|
314
319
|
files.append(content.path)
|
|
315
320
|
return files
|
|
316
321
|
|
|
317
|
-
def
|
|
322
|
+
def github_retrieve_file_content(
|
|
323
|
+
self, repo_name: str, file_path: str
|
|
324
|
+
) -> str:
|
|
318
325
|
r"""Retrieves the content of a file from the GitHub repository.
|
|
319
326
|
|
|
320
327
|
Args:
|
|
@@ -343,12 +350,92 @@ class GithubToolkit(BaseToolkit):
|
|
|
343
350
|
the functions in the toolkit.
|
|
344
351
|
"""
|
|
345
352
|
return [
|
|
346
|
-
FunctionTool(self.
|
|
347
|
-
FunctionTool(self.
|
|
348
|
-
FunctionTool(self.
|
|
349
|
-
FunctionTool(self.
|
|
350
|
-
FunctionTool(self.
|
|
351
|
-
FunctionTool(self.
|
|
352
|
-
FunctionTool(self.
|
|
353
|
-
FunctionTool(self.
|
|
353
|
+
FunctionTool(self.github_create_pull_request),
|
|
354
|
+
FunctionTool(self.github_get_issue_list),
|
|
355
|
+
FunctionTool(self.github_get_issue_content),
|
|
356
|
+
FunctionTool(self.github_get_pull_request_list),
|
|
357
|
+
FunctionTool(self.github_get_pull_request_code),
|
|
358
|
+
FunctionTool(self.github_get_pull_request_comments),
|
|
359
|
+
FunctionTool(self.github_get_all_file_paths),
|
|
360
|
+
FunctionTool(self.github_retrieve_file_content),
|
|
354
361
|
]
|
|
362
|
+
|
|
363
|
+
# Deprecated method aliases for backward compatibility
|
|
364
|
+
def create_pull_request(self, *args, **kwargs):
|
|
365
|
+
r"""Deprecated: Use github_create_pull_request instead."""
|
|
366
|
+
warnings.warn(
|
|
367
|
+
"create_pull_request is deprecated. Use "
|
|
368
|
+
"github_create_pull_request instead.",
|
|
369
|
+
DeprecationWarning,
|
|
370
|
+
stacklevel=2,
|
|
371
|
+
)
|
|
372
|
+
return self.github_create_pull_request(*args, **kwargs)
|
|
373
|
+
|
|
374
|
+
def get_issue_list(self, *args, **kwargs):
|
|
375
|
+
r"""Deprecated: Use github_get_issue_list instead."""
|
|
376
|
+
warnings.warn(
|
|
377
|
+
"get_issue_list is deprecated. Use github_get_issue_list instead.",
|
|
378
|
+
DeprecationWarning,
|
|
379
|
+
stacklevel=2,
|
|
380
|
+
)
|
|
381
|
+
return self.github_get_issue_list(*args, **kwargs)
|
|
382
|
+
|
|
383
|
+
def get_issue_content(self, *args, **kwargs):
|
|
384
|
+
r"""Deprecated: Use github_get_issue_content instead."""
|
|
385
|
+
warnings.warn(
|
|
386
|
+
"get_issue_content is deprecated. Use "
|
|
387
|
+
"github_get_issue_content instead.",
|
|
388
|
+
DeprecationWarning,
|
|
389
|
+
stacklevel=2,
|
|
390
|
+
)
|
|
391
|
+
return self.github_get_issue_content(*args, **kwargs)
|
|
392
|
+
|
|
393
|
+
def get_pull_request_list(self, *args, **kwargs):
|
|
394
|
+
r"""Deprecated: Use github_get_pull_request_list instead."""
|
|
395
|
+
warnings.warn(
|
|
396
|
+
"get_pull_request_list is deprecated. "
|
|
397
|
+
"Use github_get_pull_request_list instead.",
|
|
398
|
+
DeprecationWarning,
|
|
399
|
+
stacklevel=2,
|
|
400
|
+
)
|
|
401
|
+
return self.github_get_pull_request_list(*args, **kwargs)
|
|
402
|
+
|
|
403
|
+
def get_pull_request_code(self, *args, **kwargs):
|
|
404
|
+
r"""Deprecated: Use github_get_pull_request_code instead."""
|
|
405
|
+
warnings.warn(
|
|
406
|
+
"get_pull_request_code is deprecated. Use "
|
|
407
|
+
"github_get_pull_request_code instead.",
|
|
408
|
+
DeprecationWarning,
|
|
409
|
+
stacklevel=2,
|
|
410
|
+
)
|
|
411
|
+
return self.github_get_pull_request_code(*args, **kwargs)
|
|
412
|
+
|
|
413
|
+
def get_pull_request_comments(self, *args, **kwargs):
|
|
414
|
+
r"""Deprecated: Use github_get_pull_request_comments instead."""
|
|
415
|
+
warnings.warn(
|
|
416
|
+
"get_pull_request_comments is deprecated. "
|
|
417
|
+
"Use github_get_pull_request_comments instead.",
|
|
418
|
+
DeprecationWarning,
|
|
419
|
+
stacklevel=2,
|
|
420
|
+
)
|
|
421
|
+
return self.github_get_pull_request_comments(*args, **kwargs)
|
|
422
|
+
|
|
423
|
+
def get_all_file_paths(self, *args, **kwargs):
|
|
424
|
+
r"""Deprecated: Use github_get_all_file_paths instead."""
|
|
425
|
+
warnings.warn(
|
|
426
|
+
"get_all_file_paths is deprecated. Use "
|
|
427
|
+
"github_get_all_file_paths instead.",
|
|
428
|
+
DeprecationWarning,
|
|
429
|
+
stacklevel=2,
|
|
430
|
+
)
|
|
431
|
+
return self.github_get_all_file_paths(*args, **kwargs)
|
|
432
|
+
|
|
433
|
+
def retrieve_file_content(self, *args, **kwargs):
|
|
434
|
+
r"""Deprecated: Use github_retrieve_file_content instead."""
|
|
435
|
+
warnings.warn(
|
|
436
|
+
"retrieve_file_content is deprecated. "
|
|
437
|
+
"Use github_retrieve_file_content instead.",
|
|
438
|
+
DeprecationWarning,
|
|
439
|
+
stacklevel=2,
|
|
440
|
+
)
|
|
441
|
+
return self.github_retrieve_file_content(*args, **kwargs)
|
|
@@ -674,12 +674,29 @@ class HybridBrowserToolkit(BaseToolkit, RegisteredAgentToolkit):
|
|
|
674
674
|
"total_tabs": 0,
|
|
675
675
|
}
|
|
676
676
|
|
|
677
|
-
async def browser_type(
|
|
678
|
-
|
|
677
|
+
async def browser_type(
|
|
678
|
+
self,
|
|
679
|
+
*,
|
|
680
|
+
ref: Optional[str] = None,
|
|
681
|
+
text: Optional[str] = None,
|
|
682
|
+
inputs: Optional[List[Dict[str, str]]] = None,
|
|
683
|
+
) -> Dict[str, Any]:
|
|
684
|
+
r"""Types text into one or more input elements on the page.
|
|
685
|
+
|
|
686
|
+
This method supports two modes:
|
|
687
|
+
1. Single input mode (backward compatible): Provide 'ref' and 'text'
|
|
688
|
+
2. Multiple inputs mode: Provide 'inputs' as a list of dictionaries
|
|
689
|
+
with 'ref' and 'text' keys
|
|
679
690
|
|
|
680
691
|
Args:
|
|
681
|
-
ref (str): The `ref` ID of the input element, from a
|
|
682
|
-
|
|
692
|
+
ref (Optional[str]): The `ref` ID of the input element, from a
|
|
693
|
+
snapshot. Required when using single input mode.
|
|
694
|
+
text (Optional[str]): The text to type into the element. Required
|
|
695
|
+
when using single input mode.
|
|
696
|
+
inputs (Optional[List[Dict[str, str]]]): List of dictionaries,
|
|
697
|
+
each containing 'ref' and 'text' keys for typing into multiple
|
|
698
|
+
elements. Example: [{'ref': '1', 'text': 'username'},
|
|
699
|
+
{'ref': '2', 'text': 'password'}]
|
|
683
700
|
|
|
684
701
|
Returns:
|
|
685
702
|
Dict[str, Any]: A dictionary with the result of the action:
|
|
@@ -689,10 +706,23 @@ class HybridBrowserToolkit(BaseToolkit, RegisteredAgentToolkit):
|
|
|
689
706
|
- "tabs" (List[Dict]): Information about all open tabs.
|
|
690
707
|
- "current_tab" (int): Index of the active tab.
|
|
691
708
|
- "total_tabs" (int): Total number of open tabs.
|
|
709
|
+
- "details" (Dict[str, Any]): When using multiple inputs,
|
|
710
|
+
contains success/error status for each ref.
|
|
692
711
|
"""
|
|
693
712
|
try:
|
|
694
713
|
ws_wrapper = await self._get_ws_wrapper()
|
|
695
|
-
|
|
714
|
+
|
|
715
|
+
# Handle single input mode (backward compatibility)
|
|
716
|
+
if ref is not None and text is not None:
|
|
717
|
+
result = await ws_wrapper.type(ref, text)
|
|
718
|
+
# Handle multiple inputs mode
|
|
719
|
+
elif inputs is not None:
|
|
720
|
+
result = await ws_wrapper.type_multiple(inputs)
|
|
721
|
+
else:
|
|
722
|
+
raise ValueError(
|
|
723
|
+
"Either provide 'ref' and 'text' for single input, "
|
|
724
|
+
"or 'inputs' for multiple inputs"
|
|
725
|
+
)
|
|
696
726
|
|
|
697
727
|
# Add tab information
|
|
698
728
|
tab_info = await ws_wrapper.get_tab_info()
|