camel-ai 0.2.78__py3-none-any.whl → 0.2.79a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of camel-ai might be problematic. Click here for more details.
- camel/__init__.py +1 -1
- camel/agents/_utils.py +38 -0
- camel/agents/chat_agent.py +1112 -287
- camel/datasets/base_generator.py +39 -10
- camel/environments/single_step.py +28 -3
- camel/memories/__init__.py +1 -2
- camel/memories/agent_memories.py +34 -0
- camel/memories/base.py +26 -0
- camel/memories/blocks/chat_history_block.py +117 -17
- camel/memories/context_creators/score_based.py +25 -384
- camel/messages/base.py +26 -0
- camel/models/aws_bedrock_model.py +1 -17
- camel/models/azure_openai_model.py +113 -67
- camel/models/model_factory.py +17 -1
- camel/models/moonshot_model.py +102 -5
- camel/models/openai_compatible_model.py +62 -32
- camel/models/openai_model.py +61 -35
- camel/models/samba_model.py +34 -15
- camel/models/sglang_model.py +41 -11
- camel/societies/workforce/__init__.py +2 -0
- camel/societies/workforce/events.py +122 -0
- camel/societies/workforce/role_playing_worker.py +15 -11
- camel/societies/workforce/single_agent_worker.py +143 -291
- camel/societies/workforce/utils.py +2 -1
- camel/societies/workforce/workflow_memory_manager.py +772 -0
- camel/societies/workforce/workforce.py +513 -188
- camel/societies/workforce/workforce_callback.py +74 -0
- camel/societies/workforce/workforce_logger.py +144 -140
- camel/societies/workforce/workforce_metrics.py +33 -0
- camel/storages/vectordb_storages/oceanbase.py +5 -4
- camel/toolkits/file_toolkit.py +166 -0
- camel/toolkits/message_integration.py +15 -13
- camel/toolkits/terminal_toolkit/terminal_toolkit.py +112 -79
- camel/types/enums.py +1 -0
- camel/utils/context_utils.py +201 -2
- {camel_ai-0.2.78.dist-info → camel_ai-0.2.79a1.dist-info}/METADATA +14 -13
- {camel_ai-0.2.78.dist-info → camel_ai-0.2.79a1.dist-info}/RECORD +39 -35
- {camel_ai-0.2.78.dist-info → camel_ai-0.2.79a1.dist-info}/WHEEL +0 -0
- {camel_ai-0.2.78.dist-info → camel_ai-0.2.79a1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
from abc import ABC, abstractmethod
|
|
17
|
+
|
|
18
|
+
from .events import (
|
|
19
|
+
AllTasksCompletedEvent,
|
|
20
|
+
TaskAssignedEvent,
|
|
21
|
+
TaskCompletedEvent,
|
|
22
|
+
TaskCreatedEvent,
|
|
23
|
+
TaskDecomposedEvent,
|
|
24
|
+
TaskFailedEvent,
|
|
25
|
+
TaskStartedEvent,
|
|
26
|
+
WorkerCreatedEvent,
|
|
27
|
+
WorkerDeletedEvent,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class WorkforceCallback(ABC):
|
|
32
|
+
r"""Interface for recording workforce lifecycle events.
|
|
33
|
+
|
|
34
|
+
Implementations should persist or stream events as appropriate.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
@abstractmethod
|
|
38
|
+
def log_task_created(
|
|
39
|
+
self,
|
|
40
|
+
event: TaskCreatedEvent,
|
|
41
|
+
) -> None:
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
@abstractmethod
|
|
45
|
+
def log_task_decomposed(self, event: TaskDecomposedEvent) -> None:
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
@abstractmethod
|
|
49
|
+
def log_task_assigned(self, event: TaskAssignedEvent) -> None:
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
@abstractmethod
|
|
53
|
+
def log_task_started(self, event: TaskStartedEvent) -> None:
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
@abstractmethod
|
|
57
|
+
def log_task_completed(self, event: TaskCompletedEvent) -> None:
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
@abstractmethod
|
|
61
|
+
def log_task_failed(self, event: TaskFailedEvent) -> None:
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
@abstractmethod
|
|
65
|
+
def log_worker_created(self, event: WorkerCreatedEvent) -> None:
|
|
66
|
+
pass
|
|
67
|
+
|
|
68
|
+
@abstractmethod
|
|
69
|
+
def log_worker_deleted(self, event: WorkerDeletedEvent) -> None:
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
@abstractmethod
|
|
73
|
+
def log_all_tasks_completed(self, event: AllTasksCompletedEvent) -> None:
|
|
74
|
+
pass
|
|
@@ -16,12 +16,26 @@ from datetime import datetime, timezone
|
|
|
16
16
|
from typing import Any, Dict, List, Optional
|
|
17
17
|
|
|
18
18
|
from camel.logger import get_logger
|
|
19
|
+
from camel.societies.workforce.events import (
|
|
20
|
+
AllTasksCompletedEvent,
|
|
21
|
+
QueueStatusEvent,
|
|
22
|
+
TaskAssignedEvent,
|
|
23
|
+
TaskCompletedEvent,
|
|
24
|
+
TaskCreatedEvent,
|
|
25
|
+
TaskDecomposedEvent,
|
|
26
|
+
TaskFailedEvent,
|
|
27
|
+
TaskStartedEvent,
|
|
28
|
+
WorkerCreatedEvent,
|
|
29
|
+
WorkerDeletedEvent,
|
|
30
|
+
)
|
|
31
|
+
from camel.societies.workforce.workforce_callback import WorkforceCallback
|
|
32
|
+
from camel.societies.workforce.workforce_metrics import WorkforceMetrics
|
|
19
33
|
from camel.types.agents import ToolCallingRecord
|
|
20
34
|
|
|
21
35
|
logger = get_logger(__name__)
|
|
22
36
|
|
|
23
37
|
|
|
24
|
-
class WorkforceLogger:
|
|
38
|
+
class WorkforceLogger(WorkforceCallback, WorkforceMetrics):
|
|
25
39
|
r"""Logs events and metrics for a Workforce instance."""
|
|
26
40
|
|
|
27
41
|
def __init__(self, workforce_id: str):
|
|
@@ -55,195 +69,201 @@ class WorkforceLogger:
|
|
|
55
69
|
|
|
56
70
|
def log_task_created(
|
|
57
71
|
self,
|
|
58
|
-
|
|
59
|
-
description: str,
|
|
60
|
-
parent_task_id: Optional[str] = None,
|
|
61
|
-
task_type: Optional[str] = None,
|
|
62
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
72
|
+
event: TaskCreatedEvent,
|
|
63
73
|
) -> None:
|
|
64
74
|
r"""Logs the creation of a new task."""
|
|
65
75
|
self._log_event(
|
|
66
|
-
|
|
67
|
-
task_id=task_id,
|
|
68
|
-
description=description,
|
|
69
|
-
parent_task_id=parent_task_id,
|
|
70
|
-
task_type=task_type,
|
|
71
|
-
metadata=metadata or {},
|
|
76
|
+
event_type=event.event_type,
|
|
77
|
+
task_id=event.task_id,
|
|
78
|
+
description=event.description,
|
|
79
|
+
parent_task_id=event.parent_task_id,
|
|
80
|
+
task_type=event.task_type,
|
|
81
|
+
metadata=event.metadata or {},
|
|
72
82
|
)
|
|
73
|
-
self._task_hierarchy[task_id] = {
|
|
74
|
-
'parent': parent_task_id,
|
|
83
|
+
self._task_hierarchy[event.task_id] = {
|
|
84
|
+
'parent': event.parent_task_id,
|
|
75
85
|
'children': [],
|
|
76
86
|
'status': 'created',
|
|
77
|
-
'description': description,
|
|
87
|
+
'description': event.description,
|
|
78
88
|
'assigned_to': None,
|
|
79
|
-
**(metadata or {}),
|
|
89
|
+
**(event.metadata or {}),
|
|
80
90
|
}
|
|
81
|
-
if
|
|
82
|
-
|
|
91
|
+
if (
|
|
92
|
+
event.parent_task_id
|
|
93
|
+
and event.parent_task_id in self._task_hierarchy
|
|
94
|
+
):
|
|
95
|
+
self._task_hierarchy[event.parent_task_id]['children'].append(
|
|
96
|
+
event.task_id
|
|
97
|
+
)
|
|
83
98
|
|
|
84
99
|
def log_task_decomposed(
|
|
85
100
|
self,
|
|
86
|
-
|
|
87
|
-
subtask_ids: List[str],
|
|
88
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
101
|
+
event: TaskDecomposedEvent,
|
|
89
102
|
) -> None:
|
|
90
103
|
r"""Logs the decomposition of a task into subtasks."""
|
|
91
104
|
self._log_event(
|
|
92
|
-
|
|
93
|
-
parent_task_id=parent_task_id,
|
|
94
|
-
subtask_ids=subtask_ids,
|
|
95
|
-
metadata=metadata or {},
|
|
105
|
+
event_type=event.event_type,
|
|
106
|
+
parent_task_id=event.parent_task_id,
|
|
107
|
+
subtask_ids=event.subtask_ids,
|
|
108
|
+
metadata=event.metadata or {},
|
|
96
109
|
)
|
|
97
|
-
if parent_task_id in self._task_hierarchy:
|
|
98
|
-
self._task_hierarchy[parent_task_id]['status'] = "decomposed"
|
|
110
|
+
if event.parent_task_id in self._task_hierarchy:
|
|
111
|
+
self._task_hierarchy[event.parent_task_id]['status'] = "decomposed"
|
|
99
112
|
|
|
100
113
|
def log_task_assigned(
|
|
101
114
|
self,
|
|
102
|
-
|
|
103
|
-
worker_id: str,
|
|
104
|
-
queue_time_seconds: Optional[float] = None,
|
|
105
|
-
dependencies: Optional[List[str]] = None,
|
|
106
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
115
|
+
event: TaskAssignedEvent,
|
|
107
116
|
) -> None:
|
|
108
117
|
r"""Logs the assignment of a task to a worker."""
|
|
109
118
|
self._log_event(
|
|
110
|
-
|
|
111
|
-
task_id=task_id,
|
|
112
|
-
worker_id=worker_id,
|
|
113
|
-
queue_time_seconds=queue_time_seconds,
|
|
114
|
-
dependencies=dependencies or [],
|
|
115
|
-
metadata=metadata or {},
|
|
119
|
+
event_type=event.event_type,
|
|
120
|
+
task_id=event.task_id,
|
|
121
|
+
worker_id=event.worker_id,
|
|
122
|
+
queue_time_seconds=event.queue_time_seconds,
|
|
123
|
+
dependencies=event.dependencies or [],
|
|
124
|
+
metadata=event.metadata or {},
|
|
116
125
|
)
|
|
117
|
-
if task_id in self._task_hierarchy:
|
|
118
|
-
self._task_hierarchy[task_id]['status'] = 'assigned'
|
|
119
|
-
self._task_hierarchy[task_id]['assigned_to'] =
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
self.
|
|
123
|
-
|
|
126
|
+
if event.task_id in self._task_hierarchy:
|
|
127
|
+
self._task_hierarchy[event.task_id]['status'] = 'assigned'
|
|
128
|
+
self._task_hierarchy[event.task_id]['assigned_to'] = (
|
|
129
|
+
event.worker_id
|
|
130
|
+
)
|
|
131
|
+
self._task_hierarchy[event.task_id]['dependencies'] = (
|
|
132
|
+
event.dependencies or []
|
|
133
|
+
)
|
|
134
|
+
if event.worker_id in self._worker_information:
|
|
135
|
+
self._worker_information[event.worker_id]['current_task_id'] = (
|
|
136
|
+
event.task_id
|
|
137
|
+
)
|
|
138
|
+
self._worker_information[event.worker_id]['status'] = 'busy'
|
|
124
139
|
|
|
125
140
|
def log_task_started(
|
|
126
141
|
self,
|
|
127
|
-
|
|
128
|
-
worker_id: str,
|
|
129
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
142
|
+
event: TaskStartedEvent,
|
|
130
143
|
) -> None:
|
|
131
144
|
r"""Logs when a worker starts processing a task."""
|
|
132
145
|
self._log_event(
|
|
133
|
-
|
|
134
|
-
task_id=task_id,
|
|
135
|
-
worker_id=worker_id,
|
|
136
|
-
metadata=metadata or {},
|
|
146
|
+
event_type=event.event_type,
|
|
147
|
+
task_id=event.task_id,
|
|
148
|
+
worker_id=event.worker_id,
|
|
149
|
+
metadata=event.metadata or {},
|
|
137
150
|
)
|
|
138
|
-
if task_id in self._task_hierarchy:
|
|
139
|
-
self._task_hierarchy[task_id]['status'] = 'processing'
|
|
151
|
+
if event.task_id in self._task_hierarchy:
|
|
152
|
+
self._task_hierarchy[event.task_id]['status'] = 'processing'
|
|
140
153
|
|
|
141
|
-
def log_task_completed(
|
|
142
|
-
self,
|
|
143
|
-
task_id: str,
|
|
144
|
-
worker_id: str,
|
|
145
|
-
result_summary: Optional[str] = None,
|
|
146
|
-
processing_time_seconds: Optional[float] = None,
|
|
147
|
-
token_usage: Optional[Dict[str, int]] = None,
|
|
148
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
149
|
-
) -> None:
|
|
154
|
+
def log_task_completed(self, event: TaskCompletedEvent) -> None:
|
|
150
155
|
r"""Logs the successful completion of a task."""
|
|
151
156
|
self._log_event(
|
|
152
|
-
|
|
153
|
-
task_id=task_id,
|
|
154
|
-
worker_id=worker_id,
|
|
155
|
-
result_summary=result_summary,
|
|
156
|
-
processing_time_seconds=processing_time_seconds,
|
|
157
|
-
token_usage=token_usage or {},
|
|
158
|
-
metadata=metadata or {},
|
|
157
|
+
event_type=event.event_type,
|
|
158
|
+
task_id=event.task_id,
|
|
159
|
+
worker_id=event.worker_id,
|
|
160
|
+
result_summary=event.result_summary,
|
|
161
|
+
processing_time_seconds=event.processing_time_seconds,
|
|
162
|
+
token_usage=event.token_usage or {},
|
|
163
|
+
metadata=event.metadata or {},
|
|
159
164
|
)
|
|
160
|
-
if task_id in self._task_hierarchy:
|
|
161
|
-
self._task_hierarchy[task_id]['status'] = 'completed'
|
|
162
|
-
self._task_hierarchy[task_id]['assigned_to'] = None
|
|
165
|
+
if event.task_id in self._task_hierarchy:
|
|
166
|
+
self._task_hierarchy[event.task_id]['status'] = 'completed'
|
|
167
|
+
self._task_hierarchy[event.task_id]['assigned_to'] = None
|
|
163
168
|
# Store processing time in task hierarchy for display in tree
|
|
164
|
-
if processing_time_seconds is not None:
|
|
165
|
-
self._task_hierarchy[task_id][
|
|
166
|
-
|
|
167
|
-
|
|
169
|
+
if event.processing_time_seconds is not None:
|
|
170
|
+
self._task_hierarchy[event.task_id][
|
|
171
|
+
'completion_time_seconds'
|
|
172
|
+
] = event.processing_time_seconds
|
|
168
173
|
# Store token usage in task hierarchy for display in tree
|
|
169
|
-
if token_usage is not None:
|
|
170
|
-
self._task_hierarchy[task_id]['token_usage'] =
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
self._worker_information[worker_id]['
|
|
175
|
-
|
|
174
|
+
if event.token_usage is not None:
|
|
175
|
+
self._task_hierarchy[event.task_id]['token_usage'] = (
|
|
176
|
+
event.token_usage
|
|
177
|
+
)
|
|
178
|
+
if event.worker_id in self._worker_information:
|
|
179
|
+
self._worker_information[event.worker_id]['current_task_id'] = None
|
|
180
|
+
self._worker_information[event.worker_id]['status'] = 'idle'
|
|
181
|
+
self._worker_information[event.worker_id]['tasks_completed'] = (
|
|
182
|
+
self._worker_information[event.worker_id].get(
|
|
183
|
+
'tasks_completed', 0
|
|
184
|
+
)
|
|
176
185
|
+ 1
|
|
177
186
|
)
|
|
178
187
|
|
|
179
188
|
def log_task_failed(
|
|
180
189
|
self,
|
|
181
|
-
|
|
182
|
-
error_message: str,
|
|
183
|
-
worker_id: Optional[str] = None,
|
|
184
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
190
|
+
event: TaskFailedEvent,
|
|
185
191
|
) -> None:
|
|
186
192
|
r"""Logs the failure of a task."""
|
|
187
193
|
self._log_event(
|
|
188
|
-
|
|
189
|
-
task_id=task_id,
|
|
190
|
-
worker_id=worker_id,
|
|
191
|
-
error_message=error_message,
|
|
192
|
-
metadata=metadata or {},
|
|
194
|
+
event_type=event.event_type,
|
|
195
|
+
task_id=event.task_id,
|
|
196
|
+
worker_id=event.worker_id,
|
|
197
|
+
error_message=event.error_message,
|
|
198
|
+
metadata=event.metadata or {},
|
|
193
199
|
)
|
|
194
|
-
if task_id in self._task_hierarchy:
|
|
195
|
-
self._task_hierarchy[task_id]['status'] = 'failed'
|
|
196
|
-
self._task_hierarchy[task_id]['error'] = error_message
|
|
197
|
-
self._task_hierarchy[task_id]['assigned_to'] = None
|
|
198
|
-
if worker_id and worker_id in self._worker_information:
|
|
199
|
-
self._worker_information[worker_id]['current_task_id'] = None
|
|
200
|
-
self._worker_information[worker_id]['status'] = 'idle'
|
|
201
|
-
self._worker_information[worker_id]['tasks_failed'] = (
|
|
202
|
-
self._worker_information[worker_id].get(
|
|
200
|
+
if event.task_id in self._task_hierarchy:
|
|
201
|
+
self._task_hierarchy[event.task_id]['status'] = 'failed'
|
|
202
|
+
self._task_hierarchy[event.task_id]['error'] = event.error_message
|
|
203
|
+
self._task_hierarchy[event.task_id]['assigned_to'] = None
|
|
204
|
+
if event.worker_id and event.worker_id in self._worker_information:
|
|
205
|
+
self._worker_information[event.worker_id]['current_task_id'] = None
|
|
206
|
+
self._worker_information[event.worker_id]['status'] = 'idle'
|
|
207
|
+
self._worker_information[event.worker_id]['tasks_failed'] = (
|
|
208
|
+
self._worker_information[event.worker_id].get(
|
|
209
|
+
'tasks_failed', 0
|
|
210
|
+
)
|
|
211
|
+
+ 1
|
|
203
212
|
)
|
|
204
213
|
|
|
205
214
|
def log_worker_created(
|
|
206
215
|
self,
|
|
207
|
-
|
|
208
|
-
worker_type: str,
|
|
209
|
-
role: str,
|
|
210
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
216
|
+
event: WorkerCreatedEvent,
|
|
211
217
|
) -> None:
|
|
212
218
|
r"""Logs the creation of a new worker."""
|
|
213
219
|
self._log_event(
|
|
214
|
-
|
|
215
|
-
worker_id=worker_id,
|
|
216
|
-
worker_type=worker_type,
|
|
217
|
-
role=role,
|
|
218
|
-
metadata=metadata or {},
|
|
220
|
+
event_type=event.event_type,
|
|
221
|
+
worker_id=event.worker_id,
|
|
222
|
+
worker_type=event.worker_type,
|
|
223
|
+
role=event.role,
|
|
224
|
+
metadata=event.metadata or {},
|
|
219
225
|
)
|
|
220
|
-
self._worker_information[worker_id] = {
|
|
221
|
-
'type': worker_type,
|
|
222
|
-
'role': role,
|
|
226
|
+
self._worker_information[event.worker_id] = {
|
|
227
|
+
'type': event.worker_type,
|
|
228
|
+
'role': event.role,
|
|
223
229
|
'status': 'idle',
|
|
224
230
|
'current_task_id': None,
|
|
225
231
|
'tasks_completed': 0,
|
|
226
232
|
'tasks_failed': 0,
|
|
227
|
-
**(metadata or {}),
|
|
233
|
+
**(event.metadata or {}),
|
|
228
234
|
}
|
|
229
235
|
|
|
230
236
|
def log_worker_deleted(
|
|
231
237
|
self,
|
|
232
|
-
|
|
233
|
-
reason: Optional[str] = None,
|
|
234
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
238
|
+
event: WorkerDeletedEvent,
|
|
235
239
|
) -> None:
|
|
236
240
|
r"""Logs the deletion of a worker."""
|
|
237
241
|
self._log_event(
|
|
238
|
-
|
|
239
|
-
worker_id=worker_id,
|
|
240
|
-
reason=reason,
|
|
241
|
-
metadata=metadata or {},
|
|
242
|
+
event_type=event.event_type,
|
|
243
|
+
worker_id=event.worker_id,
|
|
244
|
+
reason=event.reason,
|
|
245
|
+
metadata=event.metadata or {},
|
|
242
246
|
)
|
|
243
|
-
if worker_id in self._worker_information:
|
|
244
|
-
self._worker_information[worker_id]['status'] = 'deleted'
|
|
247
|
+
if event.worker_id in self._worker_information:
|
|
248
|
+
self._worker_information[event.worker_id]['status'] = 'deleted'
|
|
245
249
|
# Or del self._worker_information[worker_id]
|
|
246
250
|
|
|
251
|
+
def log_queue_status(
|
|
252
|
+
self,
|
|
253
|
+
event: QueueStatusEvent,
|
|
254
|
+
) -> None:
|
|
255
|
+
r"""Logs the status of a task queue."""
|
|
256
|
+
self._log_event(
|
|
257
|
+
event_type=event.event_type,
|
|
258
|
+
queue_name=event.queue_name,
|
|
259
|
+
length=event.length,
|
|
260
|
+
pending_task_ids=event.pending_task_ids or [],
|
|
261
|
+
metadata=event.metadata or {},
|
|
262
|
+
)
|
|
263
|
+
|
|
264
|
+
def log_all_tasks_completed(self, event: AllTasksCompletedEvent) -> None:
|
|
265
|
+
pass
|
|
266
|
+
|
|
247
267
|
def reset_task_data(self) -> None:
|
|
248
268
|
r"""Resets logs and data related to tasks, preserving worker
|
|
249
269
|
information.
|
|
@@ -263,22 +283,6 @@ class WorkforceLogger:
|
|
|
263
283
|
f"{self.workforce_id}"
|
|
264
284
|
)
|
|
265
285
|
|
|
266
|
-
def log_queue_status(
|
|
267
|
-
self,
|
|
268
|
-
queue_name: str,
|
|
269
|
-
length: int,
|
|
270
|
-
pending_task_ids: Optional[List[str]] = None,
|
|
271
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
272
|
-
) -> None:
|
|
273
|
-
r"""Logs the status of a task queue."""
|
|
274
|
-
self._log_event(
|
|
275
|
-
'queue_status',
|
|
276
|
-
queue_name=queue_name,
|
|
277
|
-
length=length,
|
|
278
|
-
pending_task_ids=pending_task_ids or [],
|
|
279
|
-
metadata=metadata or {},
|
|
280
|
-
)
|
|
281
|
-
|
|
282
286
|
def dump_to_json(self, file_path: str) -> None:
|
|
283
287
|
r"""Dumps all log entries to a JSON file.
|
|
284
288
|
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
from abc import ABC, abstractmethod
|
|
15
|
+
from typing import Any, Dict
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class WorkforceMetrics(ABC):
|
|
19
|
+
@abstractmethod
|
|
20
|
+
def reset_task_data(self) -> None:
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
@abstractmethod
|
|
24
|
+
def dump_to_json(self, file_path: str) -> None:
|
|
25
|
+
pass
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
def get_ascii_tree_representation(self) -> str:
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
@abstractmethod
|
|
32
|
+
def get_kpis(self) -> Dict[str, Any]:
|
|
33
|
+
pass
|
|
@@ -121,10 +121,11 @@ class OceanBaseStorage(BaseVectorStorage):
|
|
|
121
121
|
)
|
|
122
122
|
|
|
123
123
|
# Get the first index parameter
|
|
124
|
-
first_index_param = next(iter(index_params))
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
124
|
+
first_index_param = next(iter(index_params), None)
|
|
125
|
+
if first_index_param is not None:
|
|
126
|
+
self._client.create_vidx_with_vec_index_param(
|
|
127
|
+
table_name=self.table_name, vidx_param=first_index_param
|
|
128
|
+
)
|
|
128
129
|
|
|
129
130
|
logger.info(f"Created table {self.table_name} with vector index")
|
|
130
131
|
else:
|
camel/toolkits/file_toolkit.py
CHANGED
|
@@ -1201,6 +1201,171 @@ class FileToolkit(BaseToolkit):
|
|
|
1201
1201
|
except Exception as e:
|
|
1202
1202
|
return f"Error editing file: {e}"
|
|
1203
1203
|
|
|
1204
|
+
def search_files(
|
|
1205
|
+
self,
|
|
1206
|
+
pattern: str,
|
|
1207
|
+
file_types: Optional[List[str]] = None,
|
|
1208
|
+
file_pattern: Optional[str] = None,
|
|
1209
|
+
path: Optional[str] = None,
|
|
1210
|
+
) -> str:
|
|
1211
|
+
r"""Search for a text pattern in files with specified extensions or
|
|
1212
|
+
file patterns.
|
|
1213
|
+
|
|
1214
|
+
This method searches for a text pattern (case-insensitive substring
|
|
1215
|
+
match) in files matching either the specified file types or a file
|
|
1216
|
+
pattern. It returns structured results showing which files contain
|
|
1217
|
+
the pattern, along with line numbers and matching content.
|
|
1218
|
+
|
|
1219
|
+
Args:
|
|
1220
|
+
pattern (str): The text pattern to search for (case-insensitive
|
|
1221
|
+
string match).
|
|
1222
|
+
file_types (Optional[List[str]]): List of file extensions to
|
|
1223
|
+
search (e.g., ["md", "txt", "py"]). Do not include the dot.
|
|
1224
|
+
If not provided and file_pattern is also not provided,
|
|
1225
|
+
defaults to ["md"] (markdown files). Ignored if file_pattern
|
|
1226
|
+
is provided. (default: :obj:`None`)
|
|
1227
|
+
file_pattern (Optional[str]): Glob pattern for matching files
|
|
1228
|
+
(e.g., "*_workflow.md", "test_*.py"). If provided, this
|
|
1229
|
+
overrides file_types. (default: :obj:`None`)
|
|
1230
|
+
path (Optional[str]): Directory to search in. If not provided,
|
|
1231
|
+
uses the working_directory. Can be relative or absolute.
|
|
1232
|
+
(default: :obj:`None`)
|
|
1233
|
+
|
|
1234
|
+
Returns:
|
|
1235
|
+
str: JSON-formatted string containing search results with the
|
|
1236
|
+
structure:
|
|
1237
|
+
{
|
|
1238
|
+
"pattern": "search_pattern",
|
|
1239
|
+
"searched_path": "/absolute/path",
|
|
1240
|
+
"file_types": ["md", "txt"],
|
|
1241
|
+
"file_pattern": "*_workflow.md",
|
|
1242
|
+
"matches": [
|
|
1243
|
+
{
|
|
1244
|
+
"file": "relative/path/to/file.md",
|
|
1245
|
+
"line": 42,
|
|
1246
|
+
"content": "matching line content"
|
|
1247
|
+
},
|
|
1248
|
+
...
|
|
1249
|
+
],
|
|
1250
|
+
"total_matches": 10,
|
|
1251
|
+
"files_searched": 5
|
|
1252
|
+
}
|
|
1253
|
+
If an error occurs, returns a JSON string with an "error" key.
|
|
1254
|
+
"""
|
|
1255
|
+
import json
|
|
1256
|
+
|
|
1257
|
+
try:
|
|
1258
|
+
# resolve search path
|
|
1259
|
+
if path:
|
|
1260
|
+
path_obj = Path(path)
|
|
1261
|
+
if not path_obj.is_absolute():
|
|
1262
|
+
search_path = (self.working_directory / path_obj).resolve()
|
|
1263
|
+
else:
|
|
1264
|
+
search_path = path_obj.resolve()
|
|
1265
|
+
else:
|
|
1266
|
+
search_path = self.working_directory
|
|
1267
|
+
|
|
1268
|
+
# validate that search path exists
|
|
1269
|
+
if not search_path.exists():
|
|
1270
|
+
return json.dumps(
|
|
1271
|
+
{"error": f"Search path does not exist: {search_path}"}
|
|
1272
|
+
)
|
|
1273
|
+
|
|
1274
|
+
if not search_path.is_dir():
|
|
1275
|
+
return json.dumps(
|
|
1276
|
+
{"error": f"Search path is not a directory: {search_path}"}
|
|
1277
|
+
)
|
|
1278
|
+
|
|
1279
|
+
# collect all matching files
|
|
1280
|
+
matching_files: List[Path] = []
|
|
1281
|
+
|
|
1282
|
+
if file_pattern:
|
|
1283
|
+
# use file_pattern if provided (overrides file_types)
|
|
1284
|
+
pattern_glob = f"**/{file_pattern}"
|
|
1285
|
+
matching_files.extend(search_path.rglob(pattern_glob))
|
|
1286
|
+
else:
|
|
1287
|
+
# use file_types if file_pattern not provided
|
|
1288
|
+
if file_types is None:
|
|
1289
|
+
file_types = ["md"]
|
|
1290
|
+
|
|
1291
|
+
# normalize and deduplicate file types
|
|
1292
|
+
normalized_types = set()
|
|
1293
|
+
for file_type in file_types:
|
|
1294
|
+
file_type = file_type.lstrip('.')
|
|
1295
|
+
if file_type: # skip empty strings
|
|
1296
|
+
normalized_types.add(file_type)
|
|
1297
|
+
|
|
1298
|
+
for file_type in normalized_types:
|
|
1299
|
+
# use rglob for recursive search
|
|
1300
|
+
pattern_glob = f"**/*.{file_type}"
|
|
1301
|
+
matching_files.extend(search_path.rglob(pattern_glob))
|
|
1302
|
+
|
|
1303
|
+
# search through files (case-insensitive)
|
|
1304
|
+
matches = []
|
|
1305
|
+
files_searched = 0
|
|
1306
|
+
pattern_lower = pattern.lower()
|
|
1307
|
+
|
|
1308
|
+
for file_path in matching_files:
|
|
1309
|
+
files_searched += 1
|
|
1310
|
+
try:
|
|
1311
|
+
# read file content
|
|
1312
|
+
content = file_path.read_text(
|
|
1313
|
+
encoding=self.default_encoding
|
|
1314
|
+
)
|
|
1315
|
+
lines = content.splitlines()
|
|
1316
|
+
|
|
1317
|
+
# search each line for pattern (case-insensitive)
|
|
1318
|
+
for line_num, line in enumerate(lines, start=1):
|
|
1319
|
+
if pattern_lower in line.lower():
|
|
1320
|
+
# get relative path for cleaner output
|
|
1321
|
+
try:
|
|
1322
|
+
relative_path = file_path.relative_to(
|
|
1323
|
+
search_path
|
|
1324
|
+
)
|
|
1325
|
+
except ValueError:
|
|
1326
|
+
relative_path = file_path
|
|
1327
|
+
|
|
1328
|
+
matches.append(
|
|
1329
|
+
{
|
|
1330
|
+
"file": str(relative_path),
|
|
1331
|
+
"line": line_num,
|
|
1332
|
+
"content": line.strip(),
|
|
1333
|
+
}
|
|
1334
|
+
)
|
|
1335
|
+
|
|
1336
|
+
except (UnicodeDecodeError, PermissionError) as e:
|
|
1337
|
+
# skip files that can't be read
|
|
1338
|
+
logger.debug(f"Skipping file {file_path}: {e}")
|
|
1339
|
+
continue
|
|
1340
|
+
|
|
1341
|
+
# build result
|
|
1342
|
+
result = {
|
|
1343
|
+
"pattern": pattern,
|
|
1344
|
+
"searched_path": str(search_path),
|
|
1345
|
+
"matches": matches,
|
|
1346
|
+
"total_matches": len(matches),
|
|
1347
|
+
"files_searched": files_searched,
|
|
1348
|
+
}
|
|
1349
|
+
|
|
1350
|
+
# include file_pattern or file_types in result
|
|
1351
|
+
if file_pattern:
|
|
1352
|
+
result["file_pattern"] = file_pattern
|
|
1353
|
+
else:
|
|
1354
|
+
result["file_types"] = (
|
|
1355
|
+
sorted(normalized_types) if normalized_types else ["md"]
|
|
1356
|
+
)
|
|
1357
|
+
|
|
1358
|
+
logger.info(
|
|
1359
|
+
f"Search completed: found {len(matches)} matches "
|
|
1360
|
+
f"in {files_searched} files"
|
|
1361
|
+
)
|
|
1362
|
+
return json.dumps(result, indent=2)
|
|
1363
|
+
|
|
1364
|
+
except Exception as e:
|
|
1365
|
+
error_msg = f"Error during file search: {e}"
|
|
1366
|
+
logger.error(error_msg)
|
|
1367
|
+
return json.dumps({"error": error_msg})
|
|
1368
|
+
|
|
1204
1369
|
def get_tools(self) -> List[FunctionTool]:
|
|
1205
1370
|
r"""Return a list of FunctionTool objects representing the functions
|
|
1206
1371
|
in the toolkit.
|
|
@@ -1213,6 +1378,7 @@ class FileToolkit(BaseToolkit):
|
|
|
1213
1378
|
FunctionTool(self.write_to_file),
|
|
1214
1379
|
FunctionTool(self.read_file),
|
|
1215
1380
|
FunctionTool(self.edit_file),
|
|
1381
|
+
FunctionTool(self.search_files),
|
|
1216
1382
|
]
|
|
1217
1383
|
|
|
1218
1384
|
|