camel-ai 0.2.78__py3-none-any.whl → 0.2.79a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (39) hide show
  1. camel/__init__.py +1 -1
  2. camel/agents/_utils.py +38 -0
  3. camel/agents/chat_agent.py +1112 -287
  4. camel/datasets/base_generator.py +39 -10
  5. camel/environments/single_step.py +28 -3
  6. camel/memories/__init__.py +1 -2
  7. camel/memories/agent_memories.py +34 -0
  8. camel/memories/base.py +26 -0
  9. camel/memories/blocks/chat_history_block.py +117 -17
  10. camel/memories/context_creators/score_based.py +25 -384
  11. camel/messages/base.py +26 -0
  12. camel/models/aws_bedrock_model.py +1 -17
  13. camel/models/azure_openai_model.py +113 -67
  14. camel/models/model_factory.py +17 -1
  15. camel/models/moonshot_model.py +102 -5
  16. camel/models/openai_compatible_model.py +62 -32
  17. camel/models/openai_model.py +61 -35
  18. camel/models/samba_model.py +34 -15
  19. camel/models/sglang_model.py +41 -11
  20. camel/societies/workforce/__init__.py +2 -0
  21. camel/societies/workforce/events.py +122 -0
  22. camel/societies/workforce/role_playing_worker.py +15 -11
  23. camel/societies/workforce/single_agent_worker.py +143 -291
  24. camel/societies/workforce/utils.py +2 -1
  25. camel/societies/workforce/workflow_memory_manager.py +772 -0
  26. camel/societies/workforce/workforce.py +513 -188
  27. camel/societies/workforce/workforce_callback.py +74 -0
  28. camel/societies/workforce/workforce_logger.py +144 -140
  29. camel/societies/workforce/workforce_metrics.py +33 -0
  30. camel/storages/vectordb_storages/oceanbase.py +5 -4
  31. camel/toolkits/file_toolkit.py +166 -0
  32. camel/toolkits/message_integration.py +15 -13
  33. camel/toolkits/terminal_toolkit/terminal_toolkit.py +112 -79
  34. camel/types/enums.py +1 -0
  35. camel/utils/context_utils.py +201 -2
  36. {camel_ai-0.2.78.dist-info → camel_ai-0.2.79a1.dist-info}/METADATA +14 -13
  37. {camel_ai-0.2.78.dist-info → camel_ai-0.2.79a1.dist-info}/RECORD +39 -35
  38. {camel_ai-0.2.78.dist-info → camel_ai-0.2.79a1.dist-info}/WHEEL +0 -0
  39. {camel_ai-0.2.78.dist-info → camel_ai-0.2.79a1.dist-info}/licenses/LICENSE +0 -0
@@ -62,6 +62,7 @@ class BaseGenerator(abc.ABC, IterableDataset):
62
62
  self._buffer = buffer
63
63
  self._data: List[DataPoint] = []
64
64
  self._batch_to_save: List[DataPoint] = []
65
+ self._iter_position: int = 0
65
66
 
66
67
  if data_path:
67
68
  file_path = Path(data_path)
@@ -103,9 +104,9 @@ class BaseGenerator(abc.ABC, IterableDataset):
103
104
  r"""Async iterator that yields datapoints dynamically.
104
105
 
105
106
  If a `data_path` was provided during initialization, those datapoints
106
- are yielded first. When self._data is empty, 20 new datapoints
107
- are generated. Every 100 yields, the batch is appended to the
108
- JSONL file or discarded if `cache` is None.
107
+ are yielded first. When self._iter_position reaches the end of _data,
108
+ new datapoints are generated. Every 100 yields, the batch is appended
109
+ to the JSONL file or discarded if `cache` is None.
109
110
 
110
111
  Yields:
111
112
  DataPoint: A single datapoint.
@@ -113,9 +114,10 @@ class BaseGenerator(abc.ABC, IterableDataset):
113
114
 
114
115
  async def generator():
115
116
  while True:
116
- if not self._data:
117
+ if self._iter_position >= len(self._data):
117
118
  await self.generate_new(self._buffer)
118
- datapoint = self._data.pop(0)
119
+ datapoint = self._data[self._iter_position]
120
+ self._iter_position += 1
119
121
  yield datapoint
120
122
  self._batch_to_save.append(datapoint)
121
123
  if len(self._batch_to_save) == 100:
@@ -132,9 +134,9 @@ class BaseGenerator(abc.ABC, IterableDataset):
132
134
  r"""Synchronous iterator for PyTorch IterableDataset compatibility.
133
135
 
134
136
  If a `data_path` was provided during initialization, those datapoints
135
- are yielded first. When self._data is empty, 20 new datapoints
136
- are generated. Every 100 yields, the batch is appended to the
137
- JSONL file or discarded if `cache` is None.
137
+ are yielded first. When self._iter_position reaches the end of _data,
138
+ new datapoints are generated. Every 100 yields, the batch is appended
139
+ to the JSONL file or discarded if `cache` is None.
138
140
 
139
141
  Yields:
140
142
  DataPoint: A single datapoint.
@@ -150,9 +152,10 @@ class BaseGenerator(abc.ABC, IterableDataset):
150
152
  raise
151
153
 
152
154
  while True:
153
- if not self._data:
155
+ if self._iter_position >= len(self._data):
154
156
  asyncio.run(self.generate_new(self._buffer))
155
- datapoint = self._data.pop(0)
157
+ datapoint = self._data[self._iter_position]
158
+ self._iter_position += 1
156
159
  yield datapoint
157
160
  self._batch_to_save.append(datapoint)
158
161
  if len(self._batch_to_save) == 100:
@@ -248,6 +251,7 @@ class BaseGenerator(abc.ABC, IterableDataset):
248
251
 
249
252
  self.save_to_jsonl(file_path)
250
253
  self._data = []
254
+ self._iter_position = 0
251
255
  logger.info(f"Data flushed to {file_path} and cleared from the memory")
252
256
 
253
257
  def _init_from_jsonl(self, file_path: Path) -> List[Dict[str, Any]]:
@@ -290,3 +294,28 @@ class BaseGenerator(abc.ABC, IterableDataset):
290
294
  f"Successfully loaded {len(raw_data)} items from {file_path}"
291
295
  )
292
296
  return raw_data
297
+
298
+ def __getitem__(self, index: int) -> DataPoint:
299
+ r"""Get a datapoint by index without removing the datapoint from _data.
300
+
301
+ Args:
302
+ index (int): Index of the datapoint to retrieve.
303
+
304
+ Returns:
305
+ DataPoint: The datapoint at the specified index.
306
+
307
+ Raises:
308
+ IndexError: If the index is out of range.
309
+ """
310
+ if index < 0 or index >= len(self._data):
311
+ raise IndexError(f"Index {index} is out of range")
312
+
313
+ return self._data[index]
314
+
315
+ def __len__(self) -> int:
316
+ r"""Get the number of datapoints in the dataset.
317
+
318
+ Returns:
319
+ int: The number of datapoints.
320
+ """
321
+ return len(self._data)
@@ -218,9 +218,34 @@ class SingleStepEnv:
218
218
  return observations[0] if batch_size == 1 else observations
219
219
 
220
220
  elif isinstance(self.dataset, BaseGenerator):
221
- self._states = [
222
- await self.dataset.async_sample() for _ in range(batch_size)
223
- ]
221
+ # Generate more data if needed
222
+ if batch_size > len(self.dataset):
223
+ new_datapoints_needed = batch_size - len(self.dataset)
224
+ await self.dataset.generate_new(n=new_datapoints_needed)
225
+
226
+ # Verify that enough data was generated
227
+ if len(self.dataset) < batch_size:
228
+ raise RuntimeError(
229
+ f"Failed to generate enough datapoints. "
230
+ f"Requested {batch_size}, but only "
231
+ f"{len(self.dataset)} available after generation."
232
+ )
233
+
234
+ # Choose sampling strategy based on whether seed is provided
235
+ if seed is not None:
236
+ # Deterministic random sampling when seed is provided
237
+ random_indices = rng.sample(
238
+ range(len(self.dataset)), batch_size
239
+ )
240
+ self._states = [self.dataset[ind] for ind in random_indices]
241
+ else:
242
+ # Sequential sampling when no seed (backward compatible)
243
+ # Use async_sample to maintain sequential behavior
244
+ self._states = [
245
+ await self.dataset.async_sample()
246
+ for _ in range(batch_size)
247
+ ]
248
+
224
249
  self.current_batch_size = batch_size
225
250
  self._states_done = [False] * batch_size
226
251
 
@@ -18,7 +18,7 @@ from .agent_memories import (
18
18
  VectorDBMemory,
19
19
  )
20
20
  from .base import AgentMemory, BaseContextCreator, MemoryBlock
21
- from .blocks.chat_history_block import ChatHistoryBlock, EmptyMemoryWarning
21
+ from .blocks.chat_history_block import ChatHistoryBlock
22
22
  from .blocks.vectordb_block import VectorDBBlock
23
23
  from .context_creators.score_based import ScoreBasedContextCreator
24
24
  from .records import ContextRecord, MemoryRecord
@@ -35,5 +35,4 @@ __all__ = [
35
35
  'ChatHistoryBlock',
36
36
  'VectorDBBlock',
37
37
  'LongtermAgentMemory',
38
- 'EmptyMemoryWarning',
39
38
  ]
@@ -129,6 +129,16 @@ class ChatHistoryMemory(AgentMemory):
129
129
  # Save the modified records back to storage
130
130
  self._chat_history_block.storage.save(record_dicts)
131
131
 
132
+ def pop_records(self, count: int) -> List[MemoryRecord]:
133
+ r"""Removes the most recent records from chat history memory."""
134
+ return self._chat_history_block.pop_records(count)
135
+
136
+ def remove_records_by_indices(
137
+ self, indices: List[int]
138
+ ) -> List[MemoryRecord]:
139
+ r"""Removes records at specified indices from chat history memory."""
140
+ return self._chat_history_block.remove_records_by_indices(indices)
141
+
132
142
 
133
143
  class VectorDBMemory(AgentMemory):
134
144
  r"""An agent memory wrapper of :obj:`VectorDBBlock`. This memory queries
@@ -193,6 +203,20 @@ class VectorDBMemory(AgentMemory):
193
203
  r"""Removes all records from the vector database memory."""
194
204
  self._vectordb_block.clear()
195
205
 
206
+ def pop_records(self, count: int) -> List[MemoryRecord]:
207
+ r"""Rolling back is unsupported for vector database memory."""
208
+ raise NotImplementedError(
209
+ "VectorDBMemory does not support removing historical records."
210
+ )
211
+
212
+ def remove_records_by_indices(
213
+ self, indices: List[int]
214
+ ) -> List[MemoryRecord]:
215
+ r"""Removing by indices is unsupported for vector database memory."""
216
+ raise NotImplementedError(
217
+ "VectorDBMemory does not support removing records by indices."
218
+ )
219
+
196
220
 
197
221
  class LongtermAgentMemory(AgentMemory):
198
222
  r"""An implementation of the :obj:`AgentMemory` abstract base class for
@@ -277,3 +301,13 @@ class LongtermAgentMemory(AgentMemory):
277
301
  r"""Removes all records from the memory."""
278
302
  self.chat_history_block.clear()
279
303
  self.vector_db_block.clear()
304
+
305
+ def pop_records(self, count: int) -> List[MemoryRecord]:
306
+ r"""Removes recent chat history records while leaving vector memory."""
307
+ return self.chat_history_block.pop_records(count)
308
+
309
+ def remove_records_by_indices(
310
+ self, indices: List[int]
311
+ ) -> List[MemoryRecord]:
312
+ r"""Removes records at specified indices from chat history."""
313
+ return self.chat_history_block.remove_records_by_indices(indices)
camel/memories/base.py CHANGED
@@ -45,6 +45,32 @@ class MemoryBlock(ABC):
45
45
  """
46
46
  self.write_records([record])
47
47
 
48
+ def pop_records(self, count: int) -> List[MemoryRecord]:
49
+ r"""Removes records from the memory and returns the removed records.
50
+
51
+ Args:
52
+ count (int): Number of records to remove.
53
+
54
+ Returns:
55
+ List[MemoryRecord]: The records that were removed from the memory
56
+ in their original order.
57
+ """
58
+ raise NotImplementedError
59
+
60
+ def remove_records_by_indices(
61
+ self, indices: List[int]
62
+ ) -> List[MemoryRecord]:
63
+ r"""Removes records at specified indices from the memory.
64
+
65
+ Args:
66
+ indices (List[int]): List of indices to remove. Indices should be
67
+ valid positions in the current record list.
68
+
69
+ Returns:
70
+ List[MemoryRecord]: The removed records in their original order.
71
+ """
72
+ raise NotImplementedError
73
+
48
74
  @abstractmethod
49
75
  def clear(self) -> None:
50
76
  r"""Clears all messages from the memory."""
@@ -11,7 +11,6 @@
11
11
  # See the License for the specific language governing permissions and
12
12
  # limitations under the License.
13
13
  # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14
- import warnings
15
14
  from typing import List, Optional
16
15
 
17
16
  from camel.memories.base import MemoryBlock
@@ -21,17 +20,6 @@ from camel.storages.key_value_storages.in_memory import InMemoryKeyValueStorage
21
20
  from camel.types import OpenAIBackendRole
22
21
 
23
22
 
24
- class EmptyMemoryWarning(UserWarning):
25
- """Warning raised when attempting to access an empty memory.
26
-
27
- This warning is raised when operations are performed on memory
28
- that contains no records. It can be safely caught and suppressed
29
- in contexts where empty memory is expected.
30
- """
31
-
32
- pass
33
-
34
-
35
23
  class ChatHistoryBlock(MemoryBlock):
36
24
  r"""An implementation of the :obj:`MemoryBlock` abstract base class for
37
25
  maintaining a record of chat histories.
@@ -81,11 +69,8 @@ class ChatHistoryBlock(MemoryBlock):
81
69
  """
82
70
  record_dicts = self.storage.load()
83
71
  if len(record_dicts) == 0:
84
- warnings.warn(
85
- "The `ChatHistoryMemory` is empty.",
86
- EmptyMemoryWarning,
87
- stacklevel=1,
88
- )
72
+ # Empty memory is a valid state (e.g., during initialization).
73
+ # Users can check if memory is empty by checking the returned list.
89
74
  return list()
90
75
 
91
76
  if window_size is not None and window_size >= 0:
@@ -182,3 +167,118 @@ class ChatHistoryBlock(MemoryBlock):
182
167
  def clear(self) -> None:
183
168
  r"""Clears all chat messages from the memory."""
184
169
  self.storage.clear()
170
+
171
+ def pop_records(self, count: int) -> List[MemoryRecord]:
172
+ r"""Removes the most recent records from the memory.
173
+
174
+ Args:
175
+ count (int): Number of records to remove from the end of the
176
+ conversation history. A value of 0 results in no changes.
177
+
178
+ Returns:
179
+ List[MemoryRecord]: The removed records in chronological order.
180
+ """
181
+ if not isinstance(count, int):
182
+ raise TypeError("`count` must be an integer.")
183
+ if count < 0:
184
+ raise ValueError("`count` must be non-negative.")
185
+ if count == 0:
186
+ return []
187
+
188
+ record_dicts = self.storage.load()
189
+ if not record_dicts:
190
+ return []
191
+
192
+ # Preserve initial system/developer instruction if present.
193
+ protected_prefix = (
194
+ 1
195
+ if (
196
+ record_dicts
197
+ and record_dicts[0]['role_at_backend']
198
+ in {
199
+ OpenAIBackendRole.SYSTEM.value,
200
+ OpenAIBackendRole.DEVELOPER.value,
201
+ }
202
+ )
203
+ else 0
204
+ )
205
+
206
+ removable_count = max(len(record_dicts) - protected_prefix, 0)
207
+ if removable_count == 0:
208
+ return []
209
+
210
+ pop_count = min(count, removable_count)
211
+ split_index = len(record_dicts) - pop_count
212
+
213
+ popped_dicts = record_dicts[split_index:]
214
+ remaining_dicts = record_dicts[:split_index]
215
+
216
+ self.storage.clear()
217
+ if remaining_dicts:
218
+ self.storage.save(remaining_dicts)
219
+
220
+ return [MemoryRecord.from_dict(record) for record in popped_dicts]
221
+
222
+ def remove_records_by_indices(
223
+ self, indices: List[int]
224
+ ) -> List[MemoryRecord]:
225
+ r"""Removes records at specified indices from the memory.
226
+
227
+ Args:
228
+ indices (List[int]): List of indices to remove. Indices are
229
+ positions in the current record list (0-based).
230
+ System/developer messages at index 0 are protected and will
231
+ not be removed.
232
+
233
+ Returns:
234
+ List[MemoryRecord]: The removed records in their original order.
235
+ """
236
+ if not indices:
237
+ return []
238
+
239
+ record_dicts = self.storage.load()
240
+ if not record_dicts:
241
+ return []
242
+
243
+ # Preserve initial system/developer instruction if present.
244
+ protected_prefix = (
245
+ 1
246
+ if (
247
+ record_dicts
248
+ and record_dicts[0]['role_at_backend']
249
+ in {
250
+ OpenAIBackendRole.SYSTEM.value,
251
+ OpenAIBackendRole.DEVELOPER.value,
252
+ }
253
+ )
254
+ else 0
255
+ )
256
+
257
+ # Filter out protected indices and invalid ones
258
+ valid_indices = sorted(
259
+ {
260
+ idx
261
+ for idx in indices
262
+ if idx >= protected_prefix and idx < len(record_dicts)
263
+ }
264
+ )
265
+
266
+ if not valid_indices:
267
+ return []
268
+
269
+ # Extract records to remove (in original order)
270
+ removed_records = [record_dicts[idx] for idx in valid_indices]
271
+
272
+ # Build remaining records by excluding removed indices
273
+ remaining_dicts = [
274
+ record
275
+ for idx, record in enumerate(record_dicts)
276
+ if idx not in valid_indices
277
+ ]
278
+
279
+ # Save back to storage
280
+ self.storage.clear()
281
+ if remaining_dicts:
282
+ self.storage.save(remaining_dicts)
283
+
284
+ return [MemoryRecord.from_dict(record) for record in removed_records]