swarms 7.8.3__py3-none-any.whl → 7.8.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. swarms/agents/ape_agent.py +5 -22
  2. swarms/agents/consistency_agent.py +1 -1
  3. swarms/agents/i_agent.py +1 -1
  4. swarms/agents/reasoning_agents.py +99 -3
  5. swarms/agents/reasoning_duo.py +1 -1
  6. swarms/cli/main.py +1 -1
  7. swarms/communication/__init__.py +1 -0
  8. swarms/communication/duckdb_wrap.py +32 -2
  9. swarms/communication/pulsar_struct.py +45 -19
  10. swarms/communication/redis_wrap.py +56 -11
  11. swarms/communication/supabase_wrap.py +1659 -0
  12. swarms/prompts/agent_conversation_aggregator.py +38 -0
  13. swarms/prompts/prompt.py +0 -3
  14. swarms/schemas/agent_completion_response.py +71 -0
  15. swarms/schemas/agent_rag_schema.py +7 -0
  16. swarms/schemas/conversation_schema.py +9 -0
  17. swarms/schemas/llm_agent_schema.py +99 -81
  18. swarms/schemas/swarms_api_schemas.py +164 -0
  19. swarms/structs/__init__.py +15 -9
  20. swarms/structs/agent.py +219 -199
  21. swarms/structs/agent_rag_handler.py +685 -0
  22. swarms/structs/base_swarm.py +2 -1
  23. swarms/structs/conversation.py +832 -264
  24. swarms/structs/csv_to_agent.py +153 -100
  25. swarms/structs/deep_research_swarm.py +197 -193
  26. swarms/structs/dynamic_conversational_swarm.py +18 -7
  27. swarms/structs/hiearchical_swarm.py +1 -1
  28. swarms/structs/hybrid_hiearchical_peer_swarm.py +2 -18
  29. swarms/structs/image_batch_processor.py +261 -0
  30. swarms/structs/interactive_groupchat.py +356 -0
  31. swarms/structs/ma_blocks.py +159 -0
  32. swarms/structs/majority_voting.py +1 -1
  33. swarms/structs/mixture_of_agents.py +1 -1
  34. swarms/structs/multi_agent_exec.py +25 -26
  35. swarms/structs/multi_agent_router.py +3 -2
  36. swarms/structs/rearrange.py +3 -3
  37. swarms/structs/sequential_workflow.py +3 -3
  38. swarms/structs/swarm_matcher.py +499 -408
  39. swarms/structs/swarm_router.py +15 -97
  40. swarms/structs/swarming_architectures.py +1 -1
  41. swarms/tools/mcp_client_call.py +3 -0
  42. swarms/utils/__init__.py +10 -2
  43. swarms/utils/check_all_model_max_tokens.py +43 -0
  44. swarms/utils/generate_keys.py +0 -27
  45. swarms/utils/history_output_formatter.py +5 -20
  46. swarms/utils/litellm_wrapper.py +208 -60
  47. swarms/utils/output_types.py +24 -0
  48. swarms/utils/vllm_wrapper.py +14 -13
  49. swarms/utils/xml_utils.py +37 -2
  50. {swarms-7.8.3.dist-info → swarms-7.8.7.dist-info}/METADATA +31 -55
  51. {swarms-7.8.3.dist-info → swarms-7.8.7.dist-info}/RECORD +55 -48
  52. swarms/structs/multi_agent_collab.py +0 -242
  53. swarms/structs/output_types.py +0 -6
  54. swarms/utils/markdown_message.py +0 -21
  55. swarms/utils/visualizer.py +0 -510
  56. swarms/utils/wrapper_clusterop.py +0 -127
  57. /swarms/{tools → schemas}/tool_schema_base_model.py +0 -0
  58. {swarms-7.8.3.dist-info → swarms-7.8.7.dist-info}/LICENSE +0 -0
  59. {swarms-7.8.3.dist-info → swarms-7.8.7.dist-info}/WHEEL +0 -0
  60. {swarms-7.8.3.dist-info → swarms-7.8.7.dist-info}/entry_points.txt +0 -0
@@ -1,18 +1,18 @@
1
1
  import concurrent.futures
2
2
  import datetime
3
- import hashlib
4
3
  import json
5
4
  import os
6
5
  import threading
7
6
  import uuid
8
7
  from typing import (
9
8
  TYPE_CHECKING,
10
- Any,
9
+ Callable,
11
10
  Dict,
12
11
  List,
13
12
  Optional,
14
13
  Union,
15
14
  Literal,
15
+ Any,
16
16
  )
17
17
 
18
18
  import yaml
@@ -33,8 +33,115 @@ def generate_conversation_id():
33
33
  return str(uuid.uuid4())
34
34
 
35
35
 
36
+ def get_conversation_dir():
37
+ """Get the directory for storing conversation logs."""
38
+ # Get the current working directory
39
+ conversation_dir = os.path.join(os.getcwd(), "conversations")
40
+ try:
41
+ os.makedirs(conversation_dir, mode=0o755, exist_ok=True)
42
+ except Exception as e:
43
+ logger.error(
44
+ f"Failed to create conversations directory: {str(e)}"
45
+ )
46
+ # Fallback to the same directory as the script
47
+ conversation_dir = os.path.join(
48
+ os.path.dirname(os.path.abspath(__file__)),
49
+ "conversations",
50
+ )
51
+ os.makedirs(conversation_dir, mode=0o755, exist_ok=True)
52
+ return conversation_dir
53
+
54
+
36
55
  # Define available providers
37
- providers = Literal["mem0", "in-memory"]
56
+ providers = Literal[
57
+ "mem0",
58
+ "in-memory",
59
+ "supabase",
60
+ "redis",
61
+ "sqlite",
62
+ "duckdb",
63
+ "pulsar",
64
+ ]
65
+
66
+
67
+ def _create_backend_conversation(backend: str, **kwargs):
68
+ """
69
+ Create a backend conversation instance based on the specified backend type.
70
+
71
+ This function uses lazy loading to import backend dependencies only when needed.
72
+ Each backend class handles its own dependency management and error messages.
73
+
74
+ Args:
75
+ backend (str): The backend type to create
76
+ **kwargs: Arguments to pass to the backend constructor
77
+
78
+ Returns:
79
+ Backend conversation instance
80
+
81
+ Raises:
82
+ ImportError: If required packages for the backend are not installed (raised by lazy loading)
83
+ ValueError: If backend is not supported
84
+ """
85
+ try:
86
+ if backend == "supabase":
87
+ from swarms.communication.supabase_wrap import (
88
+ SupabaseConversation,
89
+ )
90
+
91
+ return SupabaseConversation(**kwargs)
92
+ elif backend == "redis":
93
+ from swarms.communication.redis_wrap import (
94
+ RedisConversation,
95
+ )
96
+
97
+ return RedisConversation(**kwargs)
98
+ elif backend == "sqlite":
99
+ from swarms.communication.sqlite_wrap import (
100
+ SQLiteConversation,
101
+ )
102
+
103
+ return SQLiteConversation(**kwargs)
104
+ elif backend == "duckdb":
105
+ from swarms.communication.duckdb_wrap import (
106
+ DuckDBConversation,
107
+ )
108
+
109
+ return DuckDBConversation(**kwargs)
110
+ elif backend == "pulsar":
111
+ from swarms.communication.pulsar_struct import (
112
+ PulsarConversation,
113
+ )
114
+
115
+ return PulsarConversation(**kwargs)
116
+ else:
117
+ raise ValueError(
118
+ f"Unsupported backend: {backend}. "
119
+ f"Available backends: supabase, redis, sqlite, duckdb, pulsar"
120
+ )
121
+ except ImportError as e:
122
+ # Provide helpful error messages for missing dependencies
123
+ backend_deps = {
124
+ "supabase": "pip install supabase",
125
+ "redis": "pip install redis",
126
+ "sqlite": "Built-in to Python - check your installation",
127
+ "duckdb": "pip install duckdb",
128
+ "pulsar": "pip install pulsar-client",
129
+ }
130
+
131
+ install_cmd = backend_deps.get(
132
+ backend, f"Check documentation for {backend}"
133
+ )
134
+ logger.error(
135
+ f"Failed to initialize {backend} backend. "
136
+ f"Missing dependencies. Install with: {install_cmd}"
137
+ )
138
+ raise ImportError(
139
+ f"Backend '{backend}' dependencies not available. "
140
+ f"Install with: {install_cmd}. Original error: {e}"
141
+ )
142
+ except Exception as e:
143
+ logger.error(f"Failed to create {backend} backend: {e}")
144
+ raise
38
145
 
39
146
 
40
147
  class Conversation(BaseStructure):
@@ -43,6 +150,19 @@ class Conversation(BaseStructure):
43
150
  and retrieval of messages, as well as saving and loading the conversation
44
151
  history in various formats.
45
152
 
153
+ The Conversation class now supports multiple backends for persistent storage:
154
+ - "in-memory": Default memory-based storage (no persistence)
155
+ - "mem0": Memory-based storage with mem0 integration (requires: pip install mem0ai)
156
+ - "supabase": PostgreSQL-based storage using Supabase (requires: pip install supabase)
157
+ - "redis": Redis-based storage (requires: pip install redis)
158
+ - "sqlite": SQLite-based storage (built-in to Python)
159
+ - "duckdb": DuckDB-based storage (requires: pip install duckdb)
160
+ - "pulsar": Apache Pulsar messaging backend (requires: pip install pulsar-client)
161
+
162
+ All backends use lazy loading - database dependencies are only imported when the
163
+ specific backend is instantiated. Each backend class provides its own detailed
164
+ error messages if required packages are not installed.
165
+
46
166
  Attributes:
47
167
  system_prompt (Optional[str]): The system prompt for the conversation.
48
168
  time_enabled (bool): Flag to enable time tracking for messages.
@@ -58,10 +178,6 @@ class Conversation(BaseStructure):
58
178
  save_as_json_bool (bool): Flag to save conversation history as JSON.
59
179
  token_count (bool): Flag to enable token counting for messages.
60
180
  conversation_history (list): List to store the history of messages.
61
- cache_enabled (bool): Flag to enable prompt caching.
62
- cache_stats (dict): Statistics about cache usage.
63
- cache_lock (threading.Lock): Lock for thread-safe cache operations.
64
- conversations_dir (str): Directory to store cached conversations.
65
181
  """
66
182
 
67
183
  def __init__(
@@ -70,54 +186,228 @@ class Conversation(BaseStructure):
70
186
  name: str = None,
71
187
  system_prompt: Optional[str] = None,
72
188
  time_enabled: bool = False,
73
- autosave: bool = False,
189
+ autosave: bool = False, # Changed default to False
190
+ save_enabled: bool = False, # New parameter to control if saving is enabled
74
191
  save_filepath: str = None,
75
- tokenizer: Any = None,
192
+ load_filepath: str = None, # New parameter to specify which file to load from
193
+ tokenizer: Callable = None,
76
194
  context_length: int = 8192,
77
195
  rules: str = None,
78
196
  custom_rules_prompt: str = None,
79
197
  user: str = "User:",
80
- auto_save: bool = True,
81
- save_as_yaml: bool = True,
198
+ save_as_yaml: bool = False,
82
199
  save_as_json_bool: bool = False,
83
200
  token_count: bool = True,
84
- cache_enabled: bool = True,
85
- conversations_dir: Optional[str] = None,
201
+ message_id_on: bool = False,
86
202
  provider: providers = "in-memory",
203
+ backend: Optional[str] = None,
204
+ # Backend-specific parameters
205
+ supabase_url: Optional[str] = None,
206
+ supabase_key: Optional[str] = None,
207
+ redis_host: str = "localhost",
208
+ redis_port: int = 6379,
209
+ redis_db: int = 0,
210
+ redis_password: Optional[str] = None,
211
+ db_path: Optional[str] = None,
212
+ table_name: str = "conversations",
213
+ # Additional backend parameters
214
+ use_embedded_redis: bool = True,
215
+ persist_redis: bool = True,
216
+ auto_persist: bool = True,
217
+ redis_data_dir: Optional[str] = None,
218
+ conversations_dir: Optional[str] = None,
87
219
  *args,
88
220
  **kwargs,
89
221
  ):
90
222
  super().__init__()
91
223
 
224
+ # Support both 'provider' and 'backend' parameters for backwards compatibility
225
+ # 'backend' takes precedence if both are provided
226
+ self.backend = backend or provider
227
+ self.backend_instance = None
228
+
229
+ # Validate backend
230
+ valid_backends = [
231
+ "in-memory",
232
+ "mem0",
233
+ "supabase",
234
+ "redis",
235
+ "sqlite",
236
+ "duckdb",
237
+ "pulsar",
238
+ ]
239
+ if self.backend not in valid_backends:
240
+ raise ValueError(
241
+ f"Invalid backend: '{self.backend}'. "
242
+ f"Valid backends are: {', '.join(valid_backends)}"
243
+ )
244
+
92
245
  # Initialize all attributes first
93
246
  self.id = id
94
247
  self.name = name or id
95
248
  self.system_prompt = system_prompt
96
249
  self.time_enabled = time_enabled
97
250
  self.autosave = autosave
98
- self.save_filepath = save_filepath
251
+ self.save_enabled = save_enabled
252
+ self.conversations_dir = conversations_dir
253
+ self.message_id_on = message_id_on
254
+
255
+ # Handle save filepath
256
+ if save_enabled and save_filepath:
257
+ self.save_filepath = save_filepath
258
+ elif save_enabled and conversations_dir:
259
+ self.save_filepath = os.path.join(
260
+ conversations_dir, f"{self.id}.json"
261
+ )
262
+ else:
263
+ self.save_filepath = None
264
+
265
+ self.load_filepath = load_filepath
99
266
  self.conversation_history = []
100
267
  self.tokenizer = tokenizer
101
268
  self.context_length = context_length
102
269
  self.rules = rules
103
270
  self.custom_rules_prompt = custom_rules_prompt
104
271
  self.user = user
105
- self.auto_save = auto_save
106
272
  self.save_as_yaml = save_as_yaml
107
273
  self.save_as_json_bool = save_as_json_bool
108
274
  self.token_count = token_count
109
- self.cache_enabled = cache_enabled
110
- self.provider = provider
111
- self.cache_stats = {
112
- "hits": 0,
113
- "misses": 0,
114
- "cached_tokens": 0,
115
- "total_tokens": 0,
116
- }
117
- self.cache_lock = threading.Lock()
275
+ self.provider = provider # Keep for backwards compatibility
118
276
  self.conversations_dir = conversations_dir
119
277
 
120
- self.setup()
278
+ # Initialize backend if using persistent storage
279
+ if self.backend in [
280
+ "supabase",
281
+ "redis",
282
+ "sqlite",
283
+ "duckdb",
284
+ "pulsar",
285
+ ]:
286
+ try:
287
+ self._initialize_backend(
288
+ supabase_url=supabase_url,
289
+ supabase_key=supabase_key,
290
+ redis_host=redis_host,
291
+ redis_port=redis_port,
292
+ redis_db=redis_db,
293
+ redis_password=redis_password,
294
+ db_path=db_path,
295
+ table_name=table_name,
296
+ use_embedded_redis=use_embedded_redis,
297
+ persist_redis=persist_redis,
298
+ auto_persist=auto_persist,
299
+ redis_data_dir=redis_data_dir,
300
+ **kwargs,
301
+ )
302
+ except Exception as e:
303
+ logger.warning(
304
+ f"Failed to initialize {self.backend} backend: {e}. "
305
+ f"Falling back to in-memory storage."
306
+ )
307
+ self.backend = "in-memory"
308
+ self.backend_instance = None
309
+ self.setup()
310
+ else:
311
+ # For in-memory and mem0 backends, use the original setup
312
+ self.setup()
313
+
314
+ def _initialize_backend(self, **kwargs):
315
+ """
316
+ Initialize the persistent storage backend.
317
+
318
+ Args:
319
+ **kwargs: Backend-specific configuration parameters
320
+ """
321
+ # Prepare common backend arguments
322
+ backend_kwargs = {
323
+ "system_prompt": self.system_prompt,
324
+ "time_enabled": self.time_enabled,
325
+ "autosave": self.autosave,
326
+ "save_filepath": self.save_filepath,
327
+ "tokenizer": self.tokenizer,
328
+ "context_length": self.context_length,
329
+ "rules": self.rules,
330
+ "custom_rules_prompt": self.custom_rules_prompt,
331
+ "user": self.user,
332
+ "save_as_yaml": self.save_as_yaml,
333
+ "save_as_json_bool": self.save_as_json_bool,
334
+ "token_count": self.token_count,
335
+ }
336
+
337
+ # Add backend-specific parameters
338
+ if self.backend == "supabase":
339
+ supabase_url = kwargs.get("supabase_url") or os.getenv(
340
+ "SUPABASE_URL"
341
+ )
342
+ supabase_key = kwargs.get("supabase_key") or os.getenv(
343
+ "SUPABASE_ANON_KEY"
344
+ )
345
+
346
+ if not supabase_url or not supabase_key:
347
+ raise ValueError(
348
+ "Supabase backend requires 'supabase_url' and 'supabase_key' parameters "
349
+ "or SUPABASE_URL and SUPABASE_ANON_KEY environment variables"
350
+ )
351
+ backend_kwargs.update(
352
+ {
353
+ "supabase_url": supabase_url,
354
+ "supabase_key": supabase_key,
355
+ "table_name": kwargs.get(
356
+ "table_name", "conversations"
357
+ ),
358
+ }
359
+ )
360
+
361
+ elif self.backend == "redis":
362
+ backend_kwargs.update(
363
+ {
364
+ "redis_host": kwargs.get(
365
+ "redis_host", "localhost"
366
+ ),
367
+ "redis_port": kwargs.get("redis_port", 6379),
368
+ "redis_db": kwargs.get("redis_db", 0),
369
+ "redis_password": kwargs.get("redis_password"),
370
+ "use_embedded_redis": kwargs.get(
371
+ "use_embedded_redis", True
372
+ ),
373
+ "persist_redis": kwargs.get(
374
+ "persist_redis", True
375
+ ),
376
+ "auto_persist": kwargs.get("auto_persist", True),
377
+ "redis_data_dir": kwargs.get("redis_data_dir"),
378
+ "conversation_id": self.id,
379
+ "name": self.name,
380
+ }
381
+ )
382
+
383
+ elif self.backend in ["sqlite", "duckdb"]:
384
+ db_path = kwargs.get("db_path")
385
+ if db_path:
386
+ backend_kwargs["db_path"] = db_path
387
+
388
+ elif self.backend == "pulsar":
389
+ # Add pulsar-specific parameters
390
+ backend_kwargs.update(
391
+ {
392
+ "pulsar_url": kwargs.get(
393
+ "pulsar_url", "pulsar://localhost:6650"
394
+ ),
395
+ "topic": kwargs.get(
396
+ "topic", f"conversation-{self.id}"
397
+ ),
398
+ }
399
+ )
400
+
401
+ # Create the backend instance
402
+ logger.info(f"Initializing {self.backend} backend...")
403
+ self.backend_instance = _create_backend_conversation(
404
+ self.backend, **backend_kwargs
405
+ )
406
+
407
+ # Log successful initialization
408
+ logger.info(
409
+ f"Successfully initialized {self.backend} backend for conversation '{self.name}'"
410
+ )
121
411
 
122
412
  def setup(self):
123
413
  # Set up conversations directory
@@ -146,22 +436,32 @@ class Conversation(BaseStructure):
146
436
  "history", []
147
437
  )
148
438
  else:
149
- # If system prompt is not None, add it to the conversation history
150
- if self.system_prompt is not None:
151
- self.add("System", self.system_prompt)
152
-
153
- if self.rules is not None:
154
- self.add(self.user or "User", self.rules)
155
-
156
- if self.custom_rules_prompt is not None:
157
- self.add(
158
- self.user or "User", self.custom_rules_prompt
439
+ self._initialize_new_conversation()
440
+
441
+ def _initialize_new_conversation(self):
442
+ """Initialize a new conversation with system prompt and rules."""
443
+ if self.system_prompt is not None:
444
+ self.add("System", self.system_prompt)
445
+
446
+ if self.rules is not None:
447
+ self.add(self.user or "User", self.rules)
448
+
449
+ if self.custom_rules_prompt is not None:
450
+ self.add(self.user or "User", self.custom_rules_prompt)
451
+
452
+ if self.tokenizer is not None:
453
+ self.truncate_memory_with_tokenizer()
454
+
455
+ def _autosave(self):
456
+ """Automatically save the conversation if autosave is enabled."""
457
+ if self.autosave and self.save_filepath:
458
+ try:
459
+ self.save_as_json(self.save_filepath)
460
+ except Exception as e:
461
+ logger.error(
462
+ f"Failed to autosave conversation: {str(e)}"
159
463
  )
160
464
 
161
- # If tokenizer then truncate
162
- if self.tokenizer is not None:
163
- self.truncate_memory_with_tokenizer()
164
-
165
465
  def mem0_provider(self):
166
466
  try:
167
467
  from mem0 import AsyncMemory
@@ -180,104 +480,10 @@ class Conversation(BaseStructure):
180
480
  )
181
481
  return None
182
482
 
183
- def _generate_cache_key(
184
- self, content: Union[str, dict, list]
185
- ) -> str:
186
- """Generate a cache key for the given content.
187
-
188
- Args:
189
- content (Union[str, dict, list]): The content to generate a cache key for.
190
-
191
- Returns:
192
- str: The cache key.
193
- """
194
- if isinstance(content, (dict, list)):
195
- content = json.dumps(content, sort_keys=True)
196
- return hashlib.md5(content.encode()).hexdigest()
197
-
198
- def _get_cached_tokens(
199
- self, content: Union[str, dict, list]
200
- ) -> Optional[int]:
201
- """Get the number of cached tokens for the given content.
202
-
203
- Args:
204
- content (Union[str, dict, list]): The content to check.
205
-
206
- Returns:
207
- Optional[int]: The number of cached tokens, or None if not cached.
208
- """
209
- if not self.cache_enabled:
210
- return None
211
-
212
- with self.cache_lock:
213
- cache_key = self._generate_cache_key(content)
214
- if cache_key in self.cache_stats:
215
- self.cache_stats["hits"] += 1
216
- return self.cache_stats[cache_key]
217
- self.cache_stats["misses"] += 1
218
- return None
219
-
220
- def _update_cache_stats(
221
- self, content: Union[str, dict, list], token_count: int
222
- ):
223
- """Update cache statistics for the given content.
224
-
225
- Args:
226
- content (Union[str, dict, list]): The content to update stats for.
227
- token_count (int): The number of tokens in the content.
228
- """
229
- if not self.cache_enabled:
230
- return
231
-
232
- with self.cache_lock:
233
- cache_key = self._generate_cache_key(content)
234
- self.cache_stats[cache_key] = token_count
235
- self.cache_stats["cached_tokens"] += token_count
236
- self.cache_stats["total_tokens"] += token_count
237
-
238
- def _save_to_cache(self):
239
- """Save the current conversation state to the cache directory."""
240
- if not self.conversations_dir:
241
- return
242
-
243
- conversation_file = os.path.join(
244
- self.conversations_dir, f"{self.name}.json"
245
- )
246
-
247
- # Prepare metadata
248
- metadata = {
249
- "id": self.id,
250
- "name": self.name,
251
- "system_prompt": self.system_prompt,
252
- "time_enabled": self.time_enabled,
253
- "autosave": self.autosave,
254
- "save_filepath": self.save_filepath,
255
- "context_length": self.context_length,
256
- "rules": self.rules,
257
- "custom_rules_prompt": self.custom_rules_prompt,
258
- "user": self.user,
259
- "auto_save": self.auto_save,
260
- "save_as_yaml": self.save_as_yaml,
261
- "save_as_json_bool": self.save_as_json_bool,
262
- "token_count": self.token_count,
263
- "cache_enabled": self.cache_enabled,
264
- }
265
-
266
- # Prepare data to save
267
- save_data = {
268
- "metadata": metadata,
269
- "history": self.conversation_history,
270
- "cache_stats": self.cache_stats,
271
- }
272
-
273
- # Save to file
274
- with open(conversation_file, "w") as f:
275
- json.dump(save_data, f, indent=4)
276
-
277
483
  def add_in_memory(
278
484
  self,
279
485
  role: str,
280
- content: Union[str, dict, list],
486
+ content: Union[str, dict, list, Any],
281
487
  *args,
282
488
  **kwargs,
283
489
  ):
@@ -287,39 +493,32 @@ class Conversation(BaseStructure):
287
493
  role (str): The role of the speaker (e.g., 'User', 'System').
288
494
  content (Union[str, dict, list]): The content of the message to be added.
289
495
  """
290
- # Base message with role
496
+ # Base message with role and timestamp
291
497
  message = {
292
498
  "role": role,
499
+ "content": content,
293
500
  }
294
501
 
295
- # Handle different content types
296
- if isinstance(content, dict) or isinstance(content, list):
297
- message["content"] = content
298
- elif self.time_enabled:
299
- message["content"] = (
300
- f"Time: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')} \n {content}"
301
- )
302
- else:
303
- message["content"] = content
502
+ if self.time_enabled:
503
+ message["timestamp"] = datetime.datetime.now().isoformat()
304
504
 
305
- # Check cache for token count
306
- cached_tokens = self._get_cached_tokens(content)
307
- if cached_tokens is not None:
308
- message["token_count"] = cached_tokens
309
- message["cached"] = True
310
- else:
311
- message["cached"] = False
505
+ if self.message_id_on:
506
+ message["message_id"] = str(uuid.uuid4())
312
507
 
313
- # Add message to appropriate backend
508
+ # Add message to conversation history
314
509
  self.conversation_history.append(message)
315
510
 
316
- if self.token_count is True and not message.get(
317
- "cached", False
318
- ):
511
+ if self.token_count is True:
319
512
  self._count_tokens(content, message)
320
513
 
321
- # Save to cache after adding message
322
- self._save_to_cache()
514
+ # Autosave after adding message, but only if saving is enabled
515
+ if self.autosave and self.save_enabled and self.save_filepath:
516
+ try:
517
+ self.save_as_json(self.save_filepath)
518
+ except Exception as e:
519
+ logger.error(
520
+ f"Failed to autosave conversation: {str(e)}"
521
+ )
323
522
 
324
523
  def add_mem0(
325
524
  self,
@@ -330,12 +529,19 @@ class Conversation(BaseStructure):
330
529
  """Add a message to the conversation history using the Mem0 provider."""
331
530
  if self.provider == "mem0":
332
531
  memory = self.mem0_provider()
333
- memory.add(
334
- messages=content,
335
- agent_id=role,
336
- run_id=self.id,
337
- metadata=metadata,
338
- )
532
+ if memory is not None:
533
+ memory.add(
534
+ messages=content,
535
+ agent_id=role,
536
+ run_id=self.id,
537
+ metadata=metadata,
538
+ )
539
+ else:
540
+ # Fallback to in-memory if mem0 is not available
541
+ logger.warning(
542
+ "Mem0 provider not available, falling back to in-memory storage"
543
+ )
544
+ self.add_in_memory(role, content)
339
545
 
340
546
  def add(
341
547
  self,
@@ -344,10 +550,21 @@ class Conversation(BaseStructure):
344
550
  metadata: Optional[dict] = None,
345
551
  ):
346
552
  """Add a message to the conversation history."""
347
- if self.provider == "in-memory":
348
- self.add_in_memory(role, content)
553
+ # If using a persistent backend, delegate to it
554
+ if self.backend_instance:
555
+ try:
556
+ return self.backend_instance.add(
557
+ role=role, content=content, metadata=metadata
558
+ )
559
+ except Exception as e:
560
+ logger.error(
561
+ f"Backend add failed: {e}. Falling back to in-memory."
562
+ )
563
+ return self.add_in_memory(role, content)
564
+ elif self.provider == "in-memory":
565
+ return self.add_in_memory(role, content)
349
566
  elif self.provider == "mem0":
350
- self.add_mem0(
567
+ return self.add_mem0(
351
568
  role=role, content=content, metadata=metadata
352
569
  )
353
570
  else:
@@ -367,8 +584,6 @@ class Conversation(BaseStructure):
367
584
  tokens = count_tokens(any_to_str(content))
368
585
  # Update the message that's already in the conversation history
369
586
  message["token_count"] = int(tokens)
370
- # Update cache stats
371
- self._update_cache_stats(content, int(tokens))
372
587
 
373
588
  # If autosave is enabled, save after token count is updated
374
589
  if self.autosave:
@@ -407,52 +622,77 @@ class Conversation(BaseStructure):
407
622
  concurrent.futures.wait(futures)
408
623
 
409
624
  def delete(self, index: str):
410
- """Delete a message from the conversation history.
411
-
412
- Args:
413
- index (str): Index of the message to delete.
414
- """
415
- self.conversation_history.pop(index)
416
- self._save_to_cache()
625
+ """Delete a message from the conversation history."""
626
+ if self.backend_instance:
627
+ try:
628
+ return self.backend_instance.delete(index)
629
+ except Exception as e:
630
+ logger.error(f"Backend delete failed: {e}")
631
+ raise
632
+ self.conversation_history.pop(int(index))
417
633
 
418
634
  def update(self, index: str, role, content):
419
635
  """Update a message in the conversation history.
420
636
 
421
637
  Args:
422
- index (str): Index of the message to update.
423
- role (str): Role of the speaker.
424
- content (Union[str, dict]): New content of the message.
638
+ index (int): The index of the message to update.
639
+ role (str): The role of the speaker.
640
+ content: The new content of the message.
425
641
  """
426
- self.conversation_history[index] = {
427
- "role": role,
428
- "content": content,
429
- }
430
- self._save_to_cache()
642
+ if self.backend_instance:
643
+ try:
644
+ return self.backend_instance.update(
645
+ index, role, content
646
+ )
647
+ except Exception as e:
648
+ logger.error(f"Backend update failed: {e}")
649
+ raise
650
+ if 0 <= int(index) < len(self.conversation_history):
651
+ self.conversation_history[int(index)]["role"] = role
652
+ self.conversation_history[int(index)]["content"] = content
653
+ else:
654
+ logger.warning(f"Invalid index: {index}")
431
655
 
432
656
  def query(self, index: str):
433
- """Query a message in the conversation history.
657
+ """Query a message from the conversation history.
434
658
 
435
659
  Args:
436
- index (str): Index of the message to query.
660
+ index (int): The index of the message to query.
437
661
 
438
662
  Returns:
439
- dict: The message with its role and content.
663
+ dict: The message at the specified index.
440
664
  """
441
- return self.conversation_history[index]
665
+ if self.backend_instance:
666
+ try:
667
+ return self.backend_instance.query(index)
668
+ except Exception as e:
669
+ logger.error(f"Backend query failed: {e}")
670
+ raise
671
+ if 0 <= int(index) < len(self.conversation_history):
672
+ return self.conversation_history[int(index)]
673
+ return None
442
674
 
443
675
  def search(self, keyword: str):
444
- """Search for a message in the conversation history.
676
+ """Search for messages containing a keyword.
445
677
 
446
678
  Args:
447
- keyword (str): Keyword to search for.
679
+ keyword (str): The keyword to search for.
448
680
 
449
681
  Returns:
450
- list: List of messages containing the keyword.
682
+ list: A list of messages containing the keyword.
451
683
  """
684
+ if self.backend_instance:
685
+ try:
686
+ return self.backend_instance.search(keyword)
687
+ except Exception as e:
688
+ logger.error(f"Backend search failed: {e}")
689
+ # Fallback to in-memory search
690
+ pass
691
+
452
692
  return [
453
- msg
454
- for msg in self.conversation_history
455
- if keyword in msg["content"]
693
+ message
694
+ for message in self.conversation_history
695
+ if keyword in str(message["content"])
456
696
  ]
457
697
 
458
698
  def display_conversation(self, detailed: bool = False):
@@ -461,10 +701,38 @@ class Conversation(BaseStructure):
461
701
  Args:
462
702
  detailed (bool, optional): Flag to display detailed information. Defaults to False.
463
703
  """
704
+ if self.backend_instance:
705
+ try:
706
+ return self.backend_instance.display_conversation(
707
+ detailed
708
+ )
709
+ except Exception as e:
710
+ logger.error(f"Backend display failed: {e}")
711
+ # Fallback to in-memory display
712
+ pass
713
+
714
+ # In-memory display implementation with proper formatting
464
715
  for message in self.conversation_history:
465
- formatter.print_panel(
466
- f"{message['role']}: {message['content']}\n\n"
467
- )
716
+ content = message.get("content", "")
717
+ role = message.get("role", "Unknown")
718
+
719
+ # Format the message content
720
+ if isinstance(content, (dict, list)):
721
+ content = json.dumps(content, indent=2)
722
+
723
+ # Create the display string
724
+ display_str = f"{role}: {content}"
725
+
726
+ # Add details if requested
727
+ if detailed:
728
+ display_str += f"\nTimestamp: {message.get('timestamp', 'Unknown')}"
729
+ display_str += f"\nMessage ID: {message.get('message_id', 'Unknown')}"
730
+ if "token_count" in message:
731
+ display_str += (
732
+ f"\nTokens: {message['token_count']}"
733
+ )
734
+
735
+ formatter.print_panel(display_str)
468
736
 
469
737
  def export_conversation(self, filename: str, *args, **kwargs):
470
738
  """Export the conversation history to a file.
@@ -472,9 +740,28 @@ class Conversation(BaseStructure):
472
740
  Args:
473
741
  filename (str): Filename to export to.
474
742
  """
475
- with open(filename, "w") as f:
476
- for message in self.conversation_history:
477
- f.write(f"{message['role']}: {message['content']}\n")
743
+
744
+ if self.backend_instance:
745
+ try:
746
+ return self.backend_instance.export_conversation(
747
+ filename, *args, **kwargs
748
+ )
749
+ except Exception as e:
750
+ logger.error(f"Backend export failed: {e}")
751
+ # Fallback to in-memory export
752
+ pass
753
+
754
+ # In-memory export implementation
755
+ # If the filename ends with .json, use save_as_json
756
+ if filename.endswith(".json"):
757
+ self.save_as_json(filename)
758
+ else:
759
+ # Simple text export for non-JSON files
760
+ with open(filename, "w", encoding="utf-8") as f:
761
+ for message in self.conversation_history:
762
+ f.write(
763
+ f"{message['role']}: {message['content']}\n"
764
+ )
478
765
 
479
766
  def import_conversation(self, filename: str):
480
767
  """Import a conversation history from a file.
@@ -482,10 +769,16 @@ class Conversation(BaseStructure):
482
769
  Args:
483
770
  filename (str): Filename to import from.
484
771
  """
485
- with open(filename) as f:
486
- for line in f:
487
- role, content = line.split(": ", 1)
488
- self.add(role, content.strip())
772
+ if self.backend_instance:
773
+ try:
774
+ return self.backend_instance.import_conversation(
775
+ filename
776
+ )
777
+ except Exception as e:
778
+ logger.error(f"Backend import failed: {e}")
779
+ # Fallback to in-memory import
780
+ pass
781
+ self.load_from_json(filename)
489
782
 
490
783
  def count_messages_by_role(self):
491
784
  """Count the number of messages by role.
@@ -493,14 +786,33 @@ class Conversation(BaseStructure):
493
786
  Returns:
494
787
  dict: A dictionary with counts of messages by role.
495
788
  """
789
+ # Check backend instance first
790
+ if self.backend_instance:
791
+ try:
792
+ return self.backend_instance.count_messages_by_role()
793
+ except Exception as e:
794
+ logger.error(
795
+ f"Backend count_messages_by_role failed: {e}"
796
+ )
797
+ # Fallback to local implementation below
798
+ pass
799
+ # Initialize counts with expected roles
496
800
  counts = {
497
801
  "system": 0,
498
802
  "user": 0,
499
803
  "assistant": 0,
500
804
  "function": 0,
501
805
  }
806
+
807
+ # Count messages by role
502
808
  for message in self.conversation_history:
503
- counts[message["role"]] += 1
809
+ role = message["role"]
810
+ if role in counts:
811
+ counts[role] += 1
812
+ else:
813
+ # Handle unexpected roles dynamically
814
+ counts[role] = counts.get(role, 0) + 1
815
+
504
816
  return counts
505
817
 
506
818
  def return_history_as_string(self):
@@ -509,6 +821,18 @@ class Conversation(BaseStructure):
509
821
  Returns:
510
822
  str: The conversation history formatted as a string.
511
823
  """
824
+ if self.backend_instance:
825
+ try:
826
+ return (
827
+ self.backend_instance.return_history_as_string()
828
+ )
829
+ except Exception as e:
830
+ logger.error(
831
+ f"Backend return_history_as_string failed: {e}"
832
+ )
833
+ # Fallback to in-memory implementation
834
+ pass
835
+
512
836
  formatted_messages = []
513
837
  for message in self.conversation_history:
514
838
  formatted_messages.append(
@@ -523,6 +847,13 @@ class Conversation(BaseStructure):
523
847
  Returns:
524
848
  str: The conversation history.
525
849
  """
850
+ if self.backend_instance:
851
+ try:
852
+ return self.backend_instance.get_str()
853
+ except Exception as e:
854
+ logger.error(f"Backend get_str failed: {e}")
855
+ # Fallback to in-memory implementation
856
+ pass
526
857
  return self.return_history_as_string()
527
858
 
528
859
  def save_as_json(self, filename: str = None):
@@ -531,9 +862,55 @@ class Conversation(BaseStructure):
531
862
  Args:
532
863
  filename (str): Filename to save the conversation history.
533
864
  """
534
- if filename is not None:
535
- with open(filename, "w") as f:
536
- json.dump(self.conversation_history, f)
865
+ # Check backend instance first
866
+ if self.backend_instance:
867
+ try:
868
+ return self.backend_instance.save_as_json(filename)
869
+ except Exception as e:
870
+ logger.error(f"Backend save_as_json failed: {e}")
871
+ # Fallback to local save implementation below
872
+
873
+ # Don't save if saving is disabled
874
+ if not self.save_enabled:
875
+ return
876
+
877
+ save_path = filename or self.save_filepath
878
+ if save_path is not None:
879
+ try:
880
+ # Prepare metadata
881
+ metadata = {
882
+ "id": self.id,
883
+ "name": self.name,
884
+ "created_at": datetime.datetime.now().isoformat(),
885
+ "system_prompt": self.system_prompt,
886
+ "rules": self.rules,
887
+ "custom_rules_prompt": self.custom_rules_prompt,
888
+ }
889
+
890
+ # Prepare save data
891
+ save_data = {
892
+ "metadata": metadata,
893
+ "history": self.conversation_history,
894
+ }
895
+
896
+ # Create directory if it doesn't exist
897
+ os.makedirs(
898
+ os.path.dirname(save_path),
899
+ mode=0o755,
900
+ exist_ok=True,
901
+ )
902
+
903
+ # Write directly to file
904
+ with open(save_path, "w") as f:
905
+ json.dump(save_data, f, indent=2)
906
+
907
+ # Only log explicit saves, not autosaves
908
+ if not self.autosave:
909
+ logger.info(
910
+ f"Successfully saved conversation to {save_path}"
911
+ )
912
+ except Exception as e:
913
+ logger.error(f"Failed to save conversation: {str(e)}")
537
914
 
538
915
  def load_from_json(self, filename: str):
539
916
  """Load the conversation history from a JSON file.
@@ -541,9 +918,32 @@ class Conversation(BaseStructure):
541
918
  Args:
542
919
  filename (str): Filename to load from.
543
920
  """
544
- if filename is not None:
545
- with open(filename) as f:
546
- self.conversation_history = json.load(f)
921
+ if filename is not None and os.path.exists(filename):
922
+ try:
923
+ with open(filename) as f:
924
+ data = json.load(f)
925
+
926
+ # Load metadata
927
+ metadata = data.get("metadata", {})
928
+ self.id = metadata.get("id", self.id)
929
+ self.name = metadata.get("name", self.name)
930
+ self.system_prompt = metadata.get(
931
+ "system_prompt", self.system_prompt
932
+ )
933
+ self.rules = metadata.get("rules", self.rules)
934
+ self.custom_rules_prompt = metadata.get(
935
+ "custom_rules_prompt", self.custom_rules_prompt
936
+ )
937
+
938
+ # Load conversation history
939
+ self.conversation_history = data.get("history", [])
940
+
941
+ logger.info(
942
+ f"Successfully loaded conversation from {filename}"
943
+ )
944
+ except Exception as e:
945
+ logger.error(f"Failed to load conversation: {str(e)}")
946
+ raise
547
947
 
548
948
  def search_keyword_in_conversation(self, keyword: str):
549
949
  """Search for a keyword in the conversation history.
@@ -599,8 +999,14 @@ class Conversation(BaseStructure):
599
999
 
600
1000
  def clear(self):
601
1001
  """Clear the conversation history."""
1002
+ if self.backend_instance:
1003
+ try:
1004
+ return self.backend_instance.clear()
1005
+ except Exception as e:
1006
+ logger.error(f"Backend clear failed: {e}")
1007
+ # Fallback to in-memory clear
1008
+ pass
602
1009
  self.conversation_history = []
603
- self._save_to_cache()
604
1010
 
605
1011
  def to_json(self):
606
1012
  """Convert the conversation history to a JSON string.
@@ -608,6 +1014,13 @@ class Conversation(BaseStructure):
608
1014
  Returns:
609
1015
  str: The conversation history as a JSON string.
610
1016
  """
1017
+ if self.backend_instance:
1018
+ try:
1019
+ return self.backend_instance.to_json()
1020
+ except Exception as e:
1021
+ logger.error(f"Backend to_json failed: {e}")
1022
+ # Fallback to in-memory implementation
1023
+ pass
611
1024
  return json.dumps(self.conversation_history)
612
1025
 
613
1026
  def to_dict(self):
@@ -616,6 +1029,13 @@ class Conversation(BaseStructure):
616
1029
  Returns:
617
1030
  list: The conversation history as a list of dictionaries.
618
1031
  """
1032
+ if self.backend_instance:
1033
+ try:
1034
+ return self.backend_instance.to_dict()
1035
+ except Exception as e:
1036
+ logger.error(f"Backend to_dict failed: {e}")
1037
+ # Fallback to in-memory implementation
1038
+ pass
619
1039
  return self.conversation_history
620
1040
 
621
1041
  def to_yaml(self):
@@ -624,6 +1044,13 @@ class Conversation(BaseStructure):
624
1044
  Returns:
625
1045
  str: The conversation history as a YAML string.
626
1046
  """
1047
+ if self.backend_instance:
1048
+ try:
1049
+ return self.backend_instance.to_yaml()
1050
+ except Exception as e:
1051
+ logger.error(f"Backend to_yaml failed: {e}")
1052
+ # Fallback to in-memory implementation
1053
+ pass
627
1054
  return yaml.dump(self.conversation_history)
628
1055
 
629
1056
  def get_visible_messages(self, agent: "Agent", turn: int):
@@ -659,11 +1086,24 @@ class Conversation(BaseStructure):
659
1086
  Returns:
660
1087
  str: The last message formatted as 'role: content'.
661
1088
  """
662
- if self.provider == "mem0":
1089
+ if self.backend_instance:
1090
+ try:
1091
+ return (
1092
+ self.backend_instance.get_last_message_as_string()
1093
+ )
1094
+ except Exception as e:
1095
+ logger.error(
1096
+ f"Backend get_last_message_as_string failed: {e}"
1097
+ )
1098
+ # Fallback to in-memory implementation
1099
+ pass
1100
+ elif self.provider == "mem0":
663
1101
  memory = self.mem0_provider()
664
1102
  return memory.get_all(run_id=self.id)
665
1103
  elif self.provider == "in-memory":
666
- return f"{self.conversation_history[-1]['role']}: {self.conversation_history[-1]['content']}"
1104
+ if self.conversation_history:
1105
+ return f"{self.conversation_history[-1]['role']}: {self.conversation_history[-1]['content']}"
1106
+ return ""
667
1107
  else:
668
1108
  raise ValueError(f"Invalid provider: {self.provider}")
669
1109
 
@@ -673,6 +1113,15 @@ class Conversation(BaseStructure):
673
1113
  Returns:
674
1114
  list: List of messages formatted as 'role: content'.
675
1115
  """
1116
+ if self.backend_instance:
1117
+ try:
1118
+ return self.backend_instance.return_messages_as_list()
1119
+ except Exception as e:
1120
+ logger.error(
1121
+ f"Backend return_messages_as_list failed: {e}"
1122
+ )
1123
+ # Fallback to in-memory implementation
1124
+ pass
676
1125
  return [
677
1126
  f"{message['role']}: {message['content']}"
678
1127
  for message in self.conversation_history
@@ -684,6 +1133,17 @@ class Conversation(BaseStructure):
684
1133
  Returns:
685
1134
  list: List of dictionaries containing role and content of each message.
686
1135
  """
1136
+ if self.backend_instance:
1137
+ try:
1138
+ return (
1139
+ self.backend_instance.return_messages_as_dictionary()
1140
+ )
1141
+ except Exception as e:
1142
+ logger.error(
1143
+ f"Backend return_messages_as_dictionary failed: {e}"
1144
+ )
1145
+ # Fallback to in-memory implementation
1146
+ pass
687
1147
  return [
688
1148
  {
689
1149
  "role": message["role"],
@@ -718,7 +1178,16 @@ class Conversation(BaseStructure):
718
1178
  Returns:
719
1179
  str: The final message formatted as 'role: content'.
720
1180
  """
721
- return f"{self.conversation_history[-1]['role']}: {self.conversation_history[-1]['content']}"
1181
+ if self.backend_instance:
1182
+ try:
1183
+ return self.backend_instance.get_final_message()
1184
+ except Exception as e:
1185
+ logger.error(f"Backend get_final_message failed: {e}")
1186
+ # Fallback to in-memory implementation
1187
+ pass
1188
+ if self.conversation_history:
1189
+ return f"{self.conversation_history[-1]['role']}: {self.conversation_history[-1]['content']}"
1190
+ return ""
722
1191
 
723
1192
  def get_final_message_content(self):
724
1193
  """Return the content of the final message from the conversation history.
@@ -726,9 +1195,21 @@ class Conversation(BaseStructure):
726
1195
  Returns:
727
1196
  str: The content of the final message.
728
1197
  """
729
- output = self.conversation_history[-1]["content"]
730
- # print(output)
731
- return output
1198
+ if self.backend_instance:
1199
+ try:
1200
+ return (
1201
+ self.backend_instance.get_final_message_content()
1202
+ )
1203
+ except Exception as e:
1204
+ logger.error(
1205
+ f"Backend get_final_message_content failed: {e}"
1206
+ )
1207
+ # Fallback to in-memory implementation
1208
+ pass
1209
+ if self.conversation_history:
1210
+ output = self.conversation_history[-1]["content"]
1211
+ return output
1212
+ return ""
732
1213
 
733
1214
  def return_all_except_first(self):
734
1215
  """Return all messages except the first one.
@@ -736,6 +1217,15 @@ class Conversation(BaseStructure):
736
1217
  Returns:
737
1218
  list: List of messages except the first one.
738
1219
  """
1220
+ if self.backend_instance:
1221
+ try:
1222
+ return self.backend_instance.return_all_except_first()
1223
+ except Exception as e:
1224
+ logger.error(
1225
+ f"Backend return_all_except_first failed: {e}"
1226
+ )
1227
+ # Fallback to in-memory implementation
1228
+ pass
739
1229
  return self.conversation_history[2:]
740
1230
 
741
1231
  def return_all_except_first_string(self):
@@ -744,6 +1234,17 @@ class Conversation(BaseStructure):
744
1234
  Returns:
745
1235
  str: All messages except the first one as a string.
746
1236
  """
1237
+ if self.backend_instance:
1238
+ try:
1239
+ return (
1240
+ self.backend_instance.return_all_except_first_string()
1241
+ )
1242
+ except Exception as e:
1243
+ logger.error(
1244
+ f"Backend return_all_except_first_string failed: {e}"
1245
+ )
1246
+ # Fallback to in-memory implementation
1247
+ pass
747
1248
  return "\n".join(
748
1249
  [
749
1250
  f"{msg['content']}"
@@ -757,80 +1258,147 @@ class Conversation(BaseStructure):
757
1258
  Args:
758
1259
  messages (List[dict]): List of messages to add.
759
1260
  """
1261
+ if self.backend_instance:
1262
+ try:
1263
+ return self.backend_instance.batch_add(messages)
1264
+ except Exception as e:
1265
+ logger.error(f"Backend batch_add failed: {e}")
1266
+ # Fallback to in-memory implementation
1267
+ pass
760
1268
  self.conversation_history.extend(messages)
761
1269
 
762
- def get_cache_stats(self) -> Dict[str, int]:
763
- """Get statistics about cache usage.
764
-
765
- Returns:
766
- Dict[str, int]: Statistics about cache usage.
767
- """
768
- with self.cache_lock:
769
- return {
770
- "hits": self.cache_stats["hits"],
771
- "misses": self.cache_stats["misses"],
772
- "cached_tokens": self.cache_stats["cached_tokens"],
773
- "total_tokens": self.cache_stats["total_tokens"],
774
- "hit_rate": (
775
- self.cache_stats["hits"]
776
- / (
777
- self.cache_stats["hits"]
778
- + self.cache_stats["misses"]
779
- )
780
- if (
781
- self.cache_stats["hits"]
782
- + self.cache_stats["misses"]
783
- )
784
- > 0
785
- else 0
786
- ),
787
- }
1270
+ def clear_memory(self):
1271
+ """Clear the memory of the conversation."""
1272
+ self.conversation_history = []
788
1273
 
789
1274
  @classmethod
790
1275
  def load_conversation(
791
- cls, name: str, conversations_dir: Optional[str] = None
1276
+ cls,
1277
+ name: str,
1278
+ conversations_dir: Optional[str] = None,
1279
+ load_filepath: Optional[str] = None,
792
1280
  ) -> "Conversation":
793
- """Load a conversation from the cache by name.
1281
+ """Load a conversation from saved file by name or specific file.
794
1282
 
795
1283
  Args:
796
1284
  name (str): Name of the conversation to load
797
- conversations_dir (Optional[str]): Directory containing cached conversations
1285
+ conversations_dir (Optional[str]): Directory containing conversations
1286
+ load_filepath (Optional[str]): Specific file to load from
798
1287
 
799
1288
  Returns:
800
1289
  Conversation: The loaded conversation object
801
1290
  """
802
- return cls(name=name, conversations_dir=conversations_dir)
1291
+ if load_filepath:
1292
+ return cls(
1293
+ name=name,
1294
+ load_filepath=load_filepath,
1295
+ save_enabled=False, # Don't enable saving when loading specific file
1296
+ )
1297
+
1298
+ conv_dir = conversations_dir or get_conversation_dir()
1299
+ # Try loading by name first
1300
+ filepath = os.path.join(conv_dir, f"{name}.json")
1301
+
1302
+ # If not found by name, try loading by ID
1303
+ if not os.path.exists(filepath):
1304
+ filepath = os.path.join(conv_dir, f"{name}")
1305
+ if not os.path.exists(filepath):
1306
+ logger.warning(
1307
+ f"No conversation found with name or ID: {name}"
1308
+ )
1309
+ return cls(
1310
+ name=name,
1311
+ conversations_dir=conv_dir,
1312
+ save_enabled=True,
1313
+ )
1314
+
1315
+ return cls(
1316
+ name=name,
1317
+ conversations_dir=conv_dir,
1318
+ load_filepath=filepath,
1319
+ save_enabled=True,
1320
+ )
1321
+
1322
+ def return_dict_final(self):
1323
+ """Return the final message as a dictionary."""
1324
+ return (
1325
+ self.conversation_history[-1]["content"],
1326
+ self.conversation_history[-1]["content"],
1327
+ )
803
1328
 
804
1329
  @classmethod
805
- def list_cached_conversations(
1330
+ def list_conversations(
806
1331
  cls, conversations_dir: Optional[str] = None
807
- ) -> List[str]:
808
- """List all cached conversations.
1332
+ ) -> List[Dict[str, str]]:
1333
+ """List all saved conversations.
809
1334
 
810
1335
  Args:
811
- conversations_dir (Optional[str]): Directory containing cached conversations
1336
+ conversations_dir (Optional[str]): Directory containing conversations
812
1337
 
813
1338
  Returns:
814
- List[str]: List of conversation names (without .json extension)
1339
+ List[Dict[str, str]]: List of conversation metadata
815
1340
  """
816
- if conversations_dir is None:
817
- conversations_dir = os.path.join(
818
- os.path.expanduser("~"), ".swarms", "conversations"
819
- )
820
-
821
- if not os.path.exists(conversations_dir):
1341
+ conv_dir = conversations_dir or get_conversation_dir()
1342
+ if not os.path.exists(conv_dir):
822
1343
  return []
823
1344
 
824
1345
  conversations = []
825
- for file in os.listdir(conversations_dir):
826
- if file.endswith(".json"):
827
- conversations.append(
828
- file[:-5]
829
- ) # Remove .json extension
830
- return conversations
1346
+ seen_ids = (
1347
+ set()
1348
+ ) # Track seen conversation IDs to avoid duplicates
1349
+
1350
+ for filename in os.listdir(conv_dir):
1351
+ if filename.endswith(".json"):
1352
+ try:
1353
+ filepath = os.path.join(conv_dir, filename)
1354
+ with open(filepath) as f:
1355
+ data = json.load(f)
1356
+ metadata = data.get("metadata", {})
1357
+ conv_id = metadata.get("id")
1358
+ name = metadata.get("name")
1359
+ created_at = metadata.get("created_at")
1360
+
1361
+ # Skip if we've already seen this ID or if required fields are missing
1362
+ if (
1363
+ not all([conv_id, name, created_at])
1364
+ or conv_id in seen_ids
1365
+ ):
1366
+ continue
1367
+
1368
+ seen_ids.add(conv_id)
1369
+ conversations.append(
1370
+ {
1371
+ "id": conv_id,
1372
+ "name": name,
1373
+ "created_at": created_at,
1374
+ "filepath": filepath,
1375
+ }
1376
+ )
1377
+ except json.JSONDecodeError:
1378
+ logger.warning(
1379
+ f"Skipping corrupted conversation file: {filename}"
1380
+ )
1381
+ continue
1382
+ except Exception as e:
1383
+ logger.error(
1384
+ f"Failed to read conversation {filename}: {str(e)}"
1385
+ )
1386
+ continue
1387
+
1388
+ # Sort by creation date, newest first
1389
+ return sorted(
1390
+ conversations, key=lambda x: x["created_at"], reverse=True
1391
+ )
831
1392
 
832
1393
  def clear_memory(self):
833
1394
  """Clear the memory of the conversation."""
1395
+ if self.backend_instance:
1396
+ try:
1397
+ return self.backend_instance.clear()
1398
+ except Exception as e:
1399
+ logger.error(f"Backend clear_memory failed: {e}")
1400
+ # Fallback to in-memory implementation
1401
+ pass
834
1402
  self.conversation_history = []
835
1403
 
836
1404