camel-ai 0.2.77__py3-none-any.whl → 0.2.79a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of camel-ai might be problematic. Click here for more details.
- camel/__init__.py +1 -1
- camel/agents/chat_agent.py +321 -325
- camel/datasets/base_generator.py +39 -10
- camel/environments/single_step.py +28 -3
- camel/memories/__init__.py +1 -2
- camel/memories/blocks/chat_history_block.py +2 -17
- camel/models/aws_bedrock_model.py +1 -17
- camel/models/moonshot_model.py +102 -5
- camel/societies/workforce/events.py +122 -0
- camel/societies/workforce/single_agent_worker.py +164 -34
- camel/societies/workforce/workforce.py +417 -156
- camel/societies/workforce/workforce_callback.py +74 -0
- camel/societies/workforce/workforce_logger.py +144 -140
- camel/societies/workforce/workforce_metrics.py +33 -0
- camel/toolkits/excel_toolkit.py +1 -1
- camel/toolkits/file_toolkit.py +3 -2
- camel/utils/context_utils.py +53 -0
- {camel_ai-0.2.77.dist-info → camel_ai-0.2.79a0.dist-info}/METADATA +23 -13
- {camel_ai-0.2.77.dist-info → camel_ai-0.2.79a0.dist-info}/RECORD +21 -18
- {camel_ai-0.2.77.dist-info → camel_ai-0.2.79a0.dist-info}/WHEEL +0 -0
- {camel_ai-0.2.77.dist-info → camel_ai-0.2.79a0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
from abc import ABC, abstractmethod
|
|
17
|
+
|
|
18
|
+
from .events import (
|
|
19
|
+
AllTasksCompletedEvent,
|
|
20
|
+
TaskAssignedEvent,
|
|
21
|
+
TaskCompletedEvent,
|
|
22
|
+
TaskCreatedEvent,
|
|
23
|
+
TaskDecomposedEvent,
|
|
24
|
+
TaskFailedEvent,
|
|
25
|
+
TaskStartedEvent,
|
|
26
|
+
WorkerCreatedEvent,
|
|
27
|
+
WorkerDeletedEvent,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class WorkforceCallback(ABC):
|
|
32
|
+
r"""Interface for recording workforce lifecycle events.
|
|
33
|
+
|
|
34
|
+
Implementations should persist or stream events as appropriate.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
@abstractmethod
|
|
38
|
+
def log_task_created(
|
|
39
|
+
self,
|
|
40
|
+
event: TaskCreatedEvent,
|
|
41
|
+
) -> None:
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
@abstractmethod
|
|
45
|
+
def log_task_decomposed(self, event: TaskDecomposedEvent) -> None:
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
@abstractmethod
|
|
49
|
+
def log_task_assigned(self, event: TaskAssignedEvent) -> None:
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
@abstractmethod
|
|
53
|
+
def log_task_started(self, event: TaskStartedEvent) -> None:
|
|
54
|
+
pass
|
|
55
|
+
|
|
56
|
+
@abstractmethod
|
|
57
|
+
def log_task_completed(self, event: TaskCompletedEvent) -> None:
|
|
58
|
+
pass
|
|
59
|
+
|
|
60
|
+
@abstractmethod
|
|
61
|
+
def log_task_failed(self, event: TaskFailedEvent) -> None:
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
@abstractmethod
|
|
65
|
+
def log_worker_created(self, event: WorkerCreatedEvent) -> None:
|
|
66
|
+
pass
|
|
67
|
+
|
|
68
|
+
@abstractmethod
|
|
69
|
+
def log_worker_deleted(self, event: WorkerDeletedEvent) -> None:
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
@abstractmethod
|
|
73
|
+
def log_all_tasks_completed(self, event: AllTasksCompletedEvent) -> None:
|
|
74
|
+
pass
|
|
@@ -16,12 +16,26 @@ from datetime import datetime, timezone
|
|
|
16
16
|
from typing import Any, Dict, List, Optional
|
|
17
17
|
|
|
18
18
|
from camel.logger import get_logger
|
|
19
|
+
from camel.societies.workforce.events import (
|
|
20
|
+
AllTasksCompletedEvent,
|
|
21
|
+
QueueStatusEvent,
|
|
22
|
+
TaskAssignedEvent,
|
|
23
|
+
TaskCompletedEvent,
|
|
24
|
+
TaskCreatedEvent,
|
|
25
|
+
TaskDecomposedEvent,
|
|
26
|
+
TaskFailedEvent,
|
|
27
|
+
TaskStartedEvent,
|
|
28
|
+
WorkerCreatedEvent,
|
|
29
|
+
WorkerDeletedEvent,
|
|
30
|
+
)
|
|
31
|
+
from camel.societies.workforce.workforce_callback import WorkforceCallback
|
|
32
|
+
from camel.societies.workforce.workforce_metrics import WorkforceMetrics
|
|
19
33
|
from camel.types.agents import ToolCallingRecord
|
|
20
34
|
|
|
21
35
|
logger = get_logger(__name__)
|
|
22
36
|
|
|
23
37
|
|
|
24
|
-
class WorkforceLogger:
|
|
38
|
+
class WorkforceLogger(WorkforceCallback, WorkforceMetrics):
|
|
25
39
|
r"""Logs events and metrics for a Workforce instance."""
|
|
26
40
|
|
|
27
41
|
def __init__(self, workforce_id: str):
|
|
@@ -55,195 +69,201 @@ class WorkforceLogger:
|
|
|
55
69
|
|
|
56
70
|
def log_task_created(
|
|
57
71
|
self,
|
|
58
|
-
|
|
59
|
-
description: str,
|
|
60
|
-
parent_task_id: Optional[str] = None,
|
|
61
|
-
task_type: Optional[str] = None,
|
|
62
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
72
|
+
event: TaskCreatedEvent,
|
|
63
73
|
) -> None:
|
|
64
74
|
r"""Logs the creation of a new task."""
|
|
65
75
|
self._log_event(
|
|
66
|
-
|
|
67
|
-
task_id=task_id,
|
|
68
|
-
description=description,
|
|
69
|
-
parent_task_id=parent_task_id,
|
|
70
|
-
task_type=task_type,
|
|
71
|
-
metadata=metadata or {},
|
|
76
|
+
event_type=event.event_type,
|
|
77
|
+
task_id=event.task_id,
|
|
78
|
+
description=event.description,
|
|
79
|
+
parent_task_id=event.parent_task_id,
|
|
80
|
+
task_type=event.task_type,
|
|
81
|
+
metadata=event.metadata or {},
|
|
72
82
|
)
|
|
73
|
-
self._task_hierarchy[task_id] = {
|
|
74
|
-
'parent': parent_task_id,
|
|
83
|
+
self._task_hierarchy[event.task_id] = {
|
|
84
|
+
'parent': event.parent_task_id,
|
|
75
85
|
'children': [],
|
|
76
86
|
'status': 'created',
|
|
77
|
-
'description': description,
|
|
87
|
+
'description': event.description,
|
|
78
88
|
'assigned_to': None,
|
|
79
|
-
**(metadata or {}),
|
|
89
|
+
**(event.metadata or {}),
|
|
80
90
|
}
|
|
81
|
-
if
|
|
82
|
-
|
|
91
|
+
if (
|
|
92
|
+
event.parent_task_id
|
|
93
|
+
and event.parent_task_id in self._task_hierarchy
|
|
94
|
+
):
|
|
95
|
+
self._task_hierarchy[event.parent_task_id]['children'].append(
|
|
96
|
+
event.task_id
|
|
97
|
+
)
|
|
83
98
|
|
|
84
99
|
def log_task_decomposed(
|
|
85
100
|
self,
|
|
86
|
-
|
|
87
|
-
subtask_ids: List[str],
|
|
88
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
101
|
+
event: TaskDecomposedEvent,
|
|
89
102
|
) -> None:
|
|
90
103
|
r"""Logs the decomposition of a task into subtasks."""
|
|
91
104
|
self._log_event(
|
|
92
|
-
|
|
93
|
-
parent_task_id=parent_task_id,
|
|
94
|
-
subtask_ids=subtask_ids,
|
|
95
|
-
metadata=metadata or {},
|
|
105
|
+
event_type=event.event_type,
|
|
106
|
+
parent_task_id=event.parent_task_id,
|
|
107
|
+
subtask_ids=event.subtask_ids,
|
|
108
|
+
metadata=event.metadata or {},
|
|
96
109
|
)
|
|
97
|
-
if parent_task_id in self._task_hierarchy:
|
|
98
|
-
self._task_hierarchy[parent_task_id]['status'] = "decomposed"
|
|
110
|
+
if event.parent_task_id in self._task_hierarchy:
|
|
111
|
+
self._task_hierarchy[event.parent_task_id]['status'] = "decomposed"
|
|
99
112
|
|
|
100
113
|
def log_task_assigned(
|
|
101
114
|
self,
|
|
102
|
-
|
|
103
|
-
worker_id: str,
|
|
104
|
-
queue_time_seconds: Optional[float] = None,
|
|
105
|
-
dependencies: Optional[List[str]] = None,
|
|
106
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
115
|
+
event: TaskAssignedEvent,
|
|
107
116
|
) -> None:
|
|
108
117
|
r"""Logs the assignment of a task to a worker."""
|
|
109
118
|
self._log_event(
|
|
110
|
-
|
|
111
|
-
task_id=task_id,
|
|
112
|
-
worker_id=worker_id,
|
|
113
|
-
queue_time_seconds=queue_time_seconds,
|
|
114
|
-
dependencies=dependencies or [],
|
|
115
|
-
metadata=metadata or {},
|
|
119
|
+
event_type=event.event_type,
|
|
120
|
+
task_id=event.task_id,
|
|
121
|
+
worker_id=event.worker_id,
|
|
122
|
+
queue_time_seconds=event.queue_time_seconds,
|
|
123
|
+
dependencies=event.dependencies or [],
|
|
124
|
+
metadata=event.metadata or {},
|
|
116
125
|
)
|
|
117
|
-
if task_id in self._task_hierarchy:
|
|
118
|
-
self._task_hierarchy[task_id]['status'] = 'assigned'
|
|
119
|
-
self._task_hierarchy[task_id]['assigned_to'] =
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
self.
|
|
123
|
-
|
|
126
|
+
if event.task_id in self._task_hierarchy:
|
|
127
|
+
self._task_hierarchy[event.task_id]['status'] = 'assigned'
|
|
128
|
+
self._task_hierarchy[event.task_id]['assigned_to'] = (
|
|
129
|
+
event.worker_id
|
|
130
|
+
)
|
|
131
|
+
self._task_hierarchy[event.task_id]['dependencies'] = (
|
|
132
|
+
event.dependencies or []
|
|
133
|
+
)
|
|
134
|
+
if event.worker_id in self._worker_information:
|
|
135
|
+
self._worker_information[event.worker_id]['current_task_id'] = (
|
|
136
|
+
event.task_id
|
|
137
|
+
)
|
|
138
|
+
self._worker_information[event.worker_id]['status'] = 'busy'
|
|
124
139
|
|
|
125
140
|
def log_task_started(
|
|
126
141
|
self,
|
|
127
|
-
|
|
128
|
-
worker_id: str,
|
|
129
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
142
|
+
event: TaskStartedEvent,
|
|
130
143
|
) -> None:
|
|
131
144
|
r"""Logs when a worker starts processing a task."""
|
|
132
145
|
self._log_event(
|
|
133
|
-
|
|
134
|
-
task_id=task_id,
|
|
135
|
-
worker_id=worker_id,
|
|
136
|
-
metadata=metadata or {},
|
|
146
|
+
event_type=event.event_type,
|
|
147
|
+
task_id=event.task_id,
|
|
148
|
+
worker_id=event.worker_id,
|
|
149
|
+
metadata=event.metadata or {},
|
|
137
150
|
)
|
|
138
|
-
if task_id in self._task_hierarchy:
|
|
139
|
-
self._task_hierarchy[task_id]['status'] = 'processing'
|
|
151
|
+
if event.task_id in self._task_hierarchy:
|
|
152
|
+
self._task_hierarchy[event.task_id]['status'] = 'processing'
|
|
140
153
|
|
|
141
|
-
def log_task_completed(
|
|
142
|
-
self,
|
|
143
|
-
task_id: str,
|
|
144
|
-
worker_id: str,
|
|
145
|
-
result_summary: Optional[str] = None,
|
|
146
|
-
processing_time_seconds: Optional[float] = None,
|
|
147
|
-
token_usage: Optional[Dict[str, int]] = None,
|
|
148
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
149
|
-
) -> None:
|
|
154
|
+
def log_task_completed(self, event: TaskCompletedEvent) -> None:
|
|
150
155
|
r"""Logs the successful completion of a task."""
|
|
151
156
|
self._log_event(
|
|
152
|
-
|
|
153
|
-
task_id=task_id,
|
|
154
|
-
worker_id=worker_id,
|
|
155
|
-
result_summary=result_summary,
|
|
156
|
-
processing_time_seconds=processing_time_seconds,
|
|
157
|
-
token_usage=token_usage or {},
|
|
158
|
-
metadata=metadata or {},
|
|
157
|
+
event_type=event.event_type,
|
|
158
|
+
task_id=event.task_id,
|
|
159
|
+
worker_id=event.worker_id,
|
|
160
|
+
result_summary=event.result_summary,
|
|
161
|
+
processing_time_seconds=event.processing_time_seconds,
|
|
162
|
+
token_usage=event.token_usage or {},
|
|
163
|
+
metadata=event.metadata or {},
|
|
159
164
|
)
|
|
160
|
-
if task_id in self._task_hierarchy:
|
|
161
|
-
self._task_hierarchy[task_id]['status'] = 'completed'
|
|
162
|
-
self._task_hierarchy[task_id]['assigned_to'] = None
|
|
165
|
+
if event.task_id in self._task_hierarchy:
|
|
166
|
+
self._task_hierarchy[event.task_id]['status'] = 'completed'
|
|
167
|
+
self._task_hierarchy[event.task_id]['assigned_to'] = None
|
|
163
168
|
# Store processing time in task hierarchy for display in tree
|
|
164
|
-
if processing_time_seconds is not None:
|
|
165
|
-
self._task_hierarchy[task_id][
|
|
166
|
-
|
|
167
|
-
|
|
169
|
+
if event.processing_time_seconds is not None:
|
|
170
|
+
self._task_hierarchy[event.task_id][
|
|
171
|
+
'completion_time_seconds'
|
|
172
|
+
] = event.processing_time_seconds
|
|
168
173
|
# Store token usage in task hierarchy for display in tree
|
|
169
|
-
if token_usage is not None:
|
|
170
|
-
self._task_hierarchy[task_id]['token_usage'] =
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
self._worker_information[worker_id]['
|
|
175
|
-
|
|
174
|
+
if event.token_usage is not None:
|
|
175
|
+
self._task_hierarchy[event.task_id]['token_usage'] = (
|
|
176
|
+
event.token_usage
|
|
177
|
+
)
|
|
178
|
+
if event.worker_id in self._worker_information:
|
|
179
|
+
self._worker_information[event.worker_id]['current_task_id'] = None
|
|
180
|
+
self._worker_information[event.worker_id]['status'] = 'idle'
|
|
181
|
+
self._worker_information[event.worker_id]['tasks_completed'] = (
|
|
182
|
+
self._worker_information[event.worker_id].get(
|
|
183
|
+
'tasks_completed', 0
|
|
184
|
+
)
|
|
176
185
|
+ 1
|
|
177
186
|
)
|
|
178
187
|
|
|
179
188
|
def log_task_failed(
|
|
180
189
|
self,
|
|
181
|
-
|
|
182
|
-
error_message: str,
|
|
183
|
-
worker_id: Optional[str] = None,
|
|
184
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
190
|
+
event: TaskFailedEvent,
|
|
185
191
|
) -> None:
|
|
186
192
|
r"""Logs the failure of a task."""
|
|
187
193
|
self._log_event(
|
|
188
|
-
|
|
189
|
-
task_id=task_id,
|
|
190
|
-
worker_id=worker_id,
|
|
191
|
-
error_message=error_message,
|
|
192
|
-
metadata=metadata or {},
|
|
194
|
+
event_type=event.event_type,
|
|
195
|
+
task_id=event.task_id,
|
|
196
|
+
worker_id=event.worker_id,
|
|
197
|
+
error_message=event.error_message,
|
|
198
|
+
metadata=event.metadata or {},
|
|
193
199
|
)
|
|
194
|
-
if task_id in self._task_hierarchy:
|
|
195
|
-
self._task_hierarchy[task_id]['status'] = 'failed'
|
|
196
|
-
self._task_hierarchy[task_id]['error'] = error_message
|
|
197
|
-
self._task_hierarchy[task_id]['assigned_to'] = None
|
|
198
|
-
if worker_id and worker_id in self._worker_information:
|
|
199
|
-
self._worker_information[worker_id]['current_task_id'] = None
|
|
200
|
-
self._worker_information[worker_id]['status'] = 'idle'
|
|
201
|
-
self._worker_information[worker_id]['tasks_failed'] = (
|
|
202
|
-
self._worker_information[worker_id].get(
|
|
200
|
+
if event.task_id in self._task_hierarchy:
|
|
201
|
+
self._task_hierarchy[event.task_id]['status'] = 'failed'
|
|
202
|
+
self._task_hierarchy[event.task_id]['error'] = event.error_message
|
|
203
|
+
self._task_hierarchy[event.task_id]['assigned_to'] = None
|
|
204
|
+
if event.worker_id and event.worker_id in self._worker_information:
|
|
205
|
+
self._worker_information[event.worker_id]['current_task_id'] = None
|
|
206
|
+
self._worker_information[event.worker_id]['status'] = 'idle'
|
|
207
|
+
self._worker_information[event.worker_id]['tasks_failed'] = (
|
|
208
|
+
self._worker_information[event.worker_id].get(
|
|
209
|
+
'tasks_failed', 0
|
|
210
|
+
)
|
|
211
|
+
+ 1
|
|
203
212
|
)
|
|
204
213
|
|
|
205
214
|
def log_worker_created(
|
|
206
215
|
self,
|
|
207
|
-
|
|
208
|
-
worker_type: str,
|
|
209
|
-
role: str,
|
|
210
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
216
|
+
event: WorkerCreatedEvent,
|
|
211
217
|
) -> None:
|
|
212
218
|
r"""Logs the creation of a new worker."""
|
|
213
219
|
self._log_event(
|
|
214
|
-
|
|
215
|
-
worker_id=worker_id,
|
|
216
|
-
worker_type=worker_type,
|
|
217
|
-
role=role,
|
|
218
|
-
metadata=metadata or {},
|
|
220
|
+
event_type=event.event_type,
|
|
221
|
+
worker_id=event.worker_id,
|
|
222
|
+
worker_type=event.worker_type,
|
|
223
|
+
role=event.role,
|
|
224
|
+
metadata=event.metadata or {},
|
|
219
225
|
)
|
|
220
|
-
self._worker_information[worker_id] = {
|
|
221
|
-
'type': worker_type,
|
|
222
|
-
'role': role,
|
|
226
|
+
self._worker_information[event.worker_id] = {
|
|
227
|
+
'type': event.worker_type,
|
|
228
|
+
'role': event.role,
|
|
223
229
|
'status': 'idle',
|
|
224
230
|
'current_task_id': None,
|
|
225
231
|
'tasks_completed': 0,
|
|
226
232
|
'tasks_failed': 0,
|
|
227
|
-
**(metadata or {}),
|
|
233
|
+
**(event.metadata or {}),
|
|
228
234
|
}
|
|
229
235
|
|
|
230
236
|
def log_worker_deleted(
|
|
231
237
|
self,
|
|
232
|
-
|
|
233
|
-
reason: Optional[str] = None,
|
|
234
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
238
|
+
event: WorkerDeletedEvent,
|
|
235
239
|
) -> None:
|
|
236
240
|
r"""Logs the deletion of a worker."""
|
|
237
241
|
self._log_event(
|
|
238
|
-
|
|
239
|
-
worker_id=worker_id,
|
|
240
|
-
reason=reason,
|
|
241
|
-
metadata=metadata or {},
|
|
242
|
+
event_type=event.event_type,
|
|
243
|
+
worker_id=event.worker_id,
|
|
244
|
+
reason=event.reason,
|
|
245
|
+
metadata=event.metadata or {},
|
|
242
246
|
)
|
|
243
|
-
if worker_id in self._worker_information:
|
|
244
|
-
self._worker_information[worker_id]['status'] = 'deleted'
|
|
247
|
+
if event.worker_id in self._worker_information:
|
|
248
|
+
self._worker_information[event.worker_id]['status'] = 'deleted'
|
|
245
249
|
# Or del self._worker_information[worker_id]
|
|
246
250
|
|
|
251
|
+
def log_queue_status(
|
|
252
|
+
self,
|
|
253
|
+
event: QueueStatusEvent,
|
|
254
|
+
) -> None:
|
|
255
|
+
r"""Logs the status of a task queue."""
|
|
256
|
+
self._log_event(
|
|
257
|
+
event_type=event.event_type,
|
|
258
|
+
queue_name=event.queue_name,
|
|
259
|
+
length=event.length,
|
|
260
|
+
pending_task_ids=event.pending_task_ids or [],
|
|
261
|
+
metadata=event.metadata or {},
|
|
262
|
+
)
|
|
263
|
+
|
|
264
|
+
def log_all_tasks_completed(self, event: AllTasksCompletedEvent) -> None:
|
|
265
|
+
pass
|
|
266
|
+
|
|
247
267
|
def reset_task_data(self) -> None:
|
|
248
268
|
r"""Resets logs and data related to tasks, preserving worker
|
|
249
269
|
information.
|
|
@@ -263,22 +283,6 @@ class WorkforceLogger:
|
|
|
263
283
|
f"{self.workforce_id}"
|
|
264
284
|
)
|
|
265
285
|
|
|
266
|
-
def log_queue_status(
|
|
267
|
-
self,
|
|
268
|
-
queue_name: str,
|
|
269
|
-
length: int,
|
|
270
|
-
pending_task_ids: Optional[List[str]] = None,
|
|
271
|
-
metadata: Optional[Dict[str, Any]] = None,
|
|
272
|
-
) -> None:
|
|
273
|
-
r"""Logs the status of a task queue."""
|
|
274
|
-
self._log_event(
|
|
275
|
-
'queue_status',
|
|
276
|
-
queue_name=queue_name,
|
|
277
|
-
length=length,
|
|
278
|
-
pending_task_ids=pending_task_ids or [],
|
|
279
|
-
metadata=metadata or {},
|
|
280
|
-
)
|
|
281
|
-
|
|
282
286
|
def dump_to_json(self, file_path: str) -> None:
|
|
283
287
|
r"""Dumps all log entries to a JSON file.
|
|
284
288
|
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
from abc import ABC, abstractmethod
|
|
15
|
+
from typing import Any, Dict
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class WorkforceMetrics(ABC):
|
|
19
|
+
@abstractmethod
|
|
20
|
+
def reset_task_data(self) -> None:
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
@abstractmethod
|
|
24
|
+
def dump_to_json(self, file_path: str) -> None:
|
|
25
|
+
pass
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
def get_ascii_tree_representation(self) -> str:
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
@abstractmethod
|
|
32
|
+
def get_kpis(self) -> Dict[str, Any]:
|
|
33
|
+
pass
|
camel/toolkits/excel_toolkit.py
CHANGED
|
@@ -872,7 +872,7 @@ class ExcelToolkit(BaseToolkit):
|
|
|
872
872
|
import csv
|
|
873
873
|
|
|
874
874
|
with open(
|
|
875
|
-
resolved_csv_path, 'w', newline='', encoding='utf-8'
|
|
875
|
+
resolved_csv_path, 'w', newline='', encoding='utf-8-sig'
|
|
876
876
|
) as csvfile:
|
|
877
877
|
writer = csv.writer(csvfile)
|
|
878
878
|
writer.writerows(data)
|
camel/toolkits/file_toolkit.py
CHANGED
|
@@ -906,7 +906,7 @@ class FileToolkit(BaseToolkit):
|
|
|
906
906
|
self,
|
|
907
907
|
file_path: Path,
|
|
908
908
|
content: Union[str, List[List]],
|
|
909
|
-
encoding: str = "utf-8",
|
|
909
|
+
encoding: str = "utf-8-sig",
|
|
910
910
|
) -> None:
|
|
911
911
|
r"""Write CSV content to a file.
|
|
912
912
|
|
|
@@ -914,7 +914,8 @@ class FileToolkit(BaseToolkit):
|
|
|
914
914
|
file_path (Path): The target file path.
|
|
915
915
|
content (Union[str, List[List]]): The CSV content as a string or
|
|
916
916
|
list of lists.
|
|
917
|
-
encoding (str): Character encoding to use.
|
|
917
|
+
encoding (str): Character encoding to use.
|
|
918
|
+
(default: :obj:`utf-8-sig`)
|
|
918
919
|
"""
|
|
919
920
|
import csv
|
|
920
921
|
|
camel/utils/context_utils.py
CHANGED
|
@@ -13,6 +13,7 @@
|
|
|
13
13
|
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
14
|
|
|
15
15
|
import os
|
|
16
|
+
import re
|
|
16
17
|
from datetime import datetime
|
|
17
18
|
from pathlib import Path
|
|
18
19
|
from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional
|
|
@@ -131,6 +132,10 @@ class ContextUtility:
|
|
|
131
132
|
- Shared session management for workforce workflows
|
|
132
133
|
"""
|
|
133
134
|
|
|
135
|
+
# maximum filename length for workflow files (chosen for filesystem
|
|
136
|
+
# compatibility and readability)
|
|
137
|
+
MAX_WORKFLOW_FILENAME_LENGTH: ClassVar[int] = 50
|
|
138
|
+
|
|
134
139
|
# Class variables for shared session management
|
|
135
140
|
_shared_sessions: ClassVar[Dict[str, 'ContextUtility']] = {}
|
|
136
141
|
_default_workforce_session: ClassVar[Optional['ContextUtility']] = None
|
|
@@ -191,6 +196,54 @@ class ContextUtility:
|
|
|
191
196
|
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S_%f')
|
|
192
197
|
return f"session_{timestamp}"
|
|
193
198
|
|
|
199
|
+
@staticmethod
|
|
200
|
+
def sanitize_workflow_filename(
|
|
201
|
+
name: str,
|
|
202
|
+
max_length: Optional[int] = None,
|
|
203
|
+
) -> str:
|
|
204
|
+
r"""Sanitize a name string for use as a workflow filename.
|
|
205
|
+
|
|
206
|
+
Converts the input string to a safe filename by:
|
|
207
|
+
- converting to lowercase
|
|
208
|
+
- replacing spaces with underscores
|
|
209
|
+
- removing special characters (keeping only alphanumeric and
|
|
210
|
+
underscores)
|
|
211
|
+
- truncating to maximum length if specified
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
name (str): The name string to sanitize (e.g., role_name or
|
|
215
|
+
task_title).
|
|
216
|
+
max_length (Optional[int]): Maximum length for the sanitized
|
|
217
|
+
filename. If None, uses MAX_WORKFLOW_FILENAME_LENGTH.
|
|
218
|
+
(default: :obj:`None`)
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
str: Sanitized filename string suitable for filesystem use.
|
|
222
|
+
Returns "agent" if sanitization results in empty string.
|
|
223
|
+
|
|
224
|
+
Example:
|
|
225
|
+
>>> ContextUtility.sanitize_workflow_filename("Data Analyst!")
|
|
226
|
+
'data_analyst'
|
|
227
|
+
>>> ContextUtility.sanitize_workflow_filename("Test@123", 5)
|
|
228
|
+
'test1'
|
|
229
|
+
"""
|
|
230
|
+
if max_length is None:
|
|
231
|
+
max_length = ContextUtility.MAX_WORKFLOW_FILENAME_LENGTH
|
|
232
|
+
|
|
233
|
+
# sanitize: lowercase, spaces to underscores, remove special chars
|
|
234
|
+
clean_name = name.lower().replace(" ", "_")
|
|
235
|
+
clean_name = re.sub(r'[^a-z0-9_]', '', clean_name)
|
|
236
|
+
|
|
237
|
+
# truncate if too long
|
|
238
|
+
if len(clean_name) > max_length:
|
|
239
|
+
clean_name = clean_name[:max_length]
|
|
240
|
+
|
|
241
|
+
# ensure it's not empty after sanitization
|
|
242
|
+
if not clean_name:
|
|
243
|
+
clean_name = "agent"
|
|
244
|
+
|
|
245
|
+
return clean_name
|
|
246
|
+
|
|
194
247
|
# ========= GENERIC FILE MANAGEMENT METHODS =========
|
|
195
248
|
|
|
196
249
|
def _ensure_directory_exists(self) -> None:
|