noesium 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. noesium/core/__init__.py +4 -0
  2. noesium/core/agent/__init__.py +14 -0
  3. noesium/core/agent/base.py +227 -0
  4. noesium/core/consts.py +6 -0
  5. noesium/core/goalith/conflict/conflict.py +104 -0
  6. noesium/core/goalith/conflict/detector.py +53 -0
  7. noesium/core/goalith/decomposer/__init__.py +6 -0
  8. noesium/core/goalith/decomposer/base.py +46 -0
  9. noesium/core/goalith/decomposer/callable_decomposer.py +65 -0
  10. noesium/core/goalith/decomposer/llm_decomposer.py +326 -0
  11. noesium/core/goalith/decomposer/prompts.py +140 -0
  12. noesium/core/goalith/decomposer/simple_decomposer.py +61 -0
  13. noesium/core/goalith/errors.py +22 -0
  14. noesium/core/goalith/goalgraph/graph.py +526 -0
  15. noesium/core/goalith/goalgraph/node.py +179 -0
  16. noesium/core/goalith/replanner/base.py +31 -0
  17. noesium/core/goalith/replanner/replanner.py +36 -0
  18. noesium/core/goalith/service.py +26 -0
  19. noesium/core/llm/__init__.py +154 -0
  20. noesium/core/llm/base.py +152 -0
  21. noesium/core/llm/litellm.py +528 -0
  22. noesium/core/llm/llamacpp.py +487 -0
  23. noesium/core/llm/message.py +184 -0
  24. noesium/core/llm/ollama.py +459 -0
  25. noesium/core/llm/openai.py +520 -0
  26. noesium/core/llm/openrouter.py +89 -0
  27. noesium/core/llm/prompt.py +551 -0
  28. noesium/core/memory/__init__.py +11 -0
  29. noesium/core/memory/base.py +464 -0
  30. noesium/core/memory/memu/__init__.py +24 -0
  31. noesium/core/memory/memu/config/__init__.py +26 -0
  32. noesium/core/memory/memu/config/activity/config.py +46 -0
  33. noesium/core/memory/memu/config/event/config.py +46 -0
  34. noesium/core/memory/memu/config/markdown_config.py +241 -0
  35. noesium/core/memory/memu/config/profile/config.py +48 -0
  36. noesium/core/memory/memu/llm_adapter.py +129 -0
  37. noesium/core/memory/memu/memory/__init__.py +31 -0
  38. noesium/core/memory/memu/memory/actions/__init__.py +40 -0
  39. noesium/core/memory/memu/memory/actions/add_activity_memory.py +299 -0
  40. noesium/core/memory/memu/memory/actions/base_action.py +342 -0
  41. noesium/core/memory/memu/memory/actions/cluster_memories.py +262 -0
  42. noesium/core/memory/memu/memory/actions/generate_suggestions.py +198 -0
  43. noesium/core/memory/memu/memory/actions/get_available_categories.py +66 -0
  44. noesium/core/memory/memu/memory/actions/link_related_memories.py +515 -0
  45. noesium/core/memory/memu/memory/actions/run_theory_of_mind.py +254 -0
  46. noesium/core/memory/memu/memory/actions/update_memory_with_suggestions.py +514 -0
  47. noesium/core/memory/memu/memory/embeddings.py +130 -0
  48. noesium/core/memory/memu/memory/file_manager.py +306 -0
  49. noesium/core/memory/memu/memory/memory_agent.py +578 -0
  50. noesium/core/memory/memu/memory/recall_agent.py +376 -0
  51. noesium/core/memory/memu/memory_store.py +628 -0
  52. noesium/core/memory/models.py +149 -0
  53. noesium/core/msgbus/__init__.py +12 -0
  54. noesium/core/msgbus/base.py +395 -0
  55. noesium/core/orchestrix/__init__.py +0 -0
  56. noesium/core/py.typed +0 -0
  57. noesium/core/routing/__init__.py +20 -0
  58. noesium/core/routing/base.py +66 -0
  59. noesium/core/routing/router.py +241 -0
  60. noesium/core/routing/strategies/__init__.py +9 -0
  61. noesium/core/routing/strategies/dynamic_complexity.py +361 -0
  62. noesium/core/routing/strategies/self_assessment.py +147 -0
  63. noesium/core/routing/types.py +38 -0
  64. noesium/core/toolify/__init__.py +39 -0
  65. noesium/core/toolify/base.py +360 -0
  66. noesium/core/toolify/config.py +138 -0
  67. noesium/core/toolify/mcp_integration.py +275 -0
  68. noesium/core/toolify/registry.py +214 -0
  69. noesium/core/toolify/toolkits/__init__.py +1 -0
  70. noesium/core/tracing/__init__.py +37 -0
  71. noesium/core/tracing/langgraph_hooks.py +308 -0
  72. noesium/core/tracing/opik_tracing.py +144 -0
  73. noesium/core/tracing/token_tracker.py +166 -0
  74. noesium/core/utils/__init__.py +10 -0
  75. noesium/core/utils/logging.py +172 -0
  76. noesium/core/utils/statistics.py +12 -0
  77. noesium/core/utils/typing.py +17 -0
  78. noesium/core/vector_store/__init__.py +79 -0
  79. noesium/core/vector_store/base.py +94 -0
  80. noesium/core/vector_store/pgvector.py +304 -0
  81. noesium/core/vector_store/weaviate.py +383 -0
  82. noesium-0.1.0.dist-info/METADATA +525 -0
  83. noesium-0.1.0.dist-info/RECORD +86 -0
  84. noesium-0.1.0.dist-info/WHEEL +5 -0
  85. noesium-0.1.0.dist-info/licenses/LICENSE +21 -0
  86. noesium-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,628 @@
1
+ """
2
+ MemU-based memory store implementation.
3
+
4
+ This module provides a concrete implementation of the BaseMemoryStore interface
5
+ using the MemU memory agent system. It bridges the action-based MemU architecture
6
+ with the standard memory store API.
7
+ """
8
+
9
+ import json
10
+ import logging
11
+ import uuid
12
+ from datetime import datetime
13
+ from pathlib import Path
14
+ from typing import Any, Dict, List, Optional
15
+
16
+ from noesium.core.llm import BaseLLMClient
17
+ from noesium.core.memory.base import BaseMemoryStore
18
+ from noesium.core.memory.models import MemoryFilter, MemoryItem, MemoryStats, SearchResult
19
+
20
+ from .memory import MemoryAgent
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+
25
+ class MemuMemoryStore(BaseMemoryStore):
26
+ """
27
+ MemU-based implementation of the memory store interface.
28
+
29
+ This implementation uses the MemU memory agent system for storage and retrieval,
30
+ providing file-based memory management with function calling capabilities.
31
+ """
32
+
33
+ def __init__(
34
+ self,
35
+ memory_dir: str,
36
+ agent_id: str = "default_agent",
37
+ user_id: str = "default_user",
38
+ llm_client: Optional[BaseLLMClient] = None,
39
+ enable_embeddings: bool = True,
40
+ **kwargs,
41
+ ):
42
+ """
43
+ Initialize the MemU memory store.
44
+
45
+ Args:
46
+ memory_dir: Directory to store memory files
47
+ agent_id: Agent identifier for memory organization
48
+ user_id: User identifier for memory organization
49
+ llm_client: LLM client for memory operations
50
+ enable_embeddings: Whether to enable embedding-based similarity search
51
+ **kwargs: Additional parameters
52
+ """
53
+ self.memory_dir = Path(memory_dir)
54
+ self.agent_id = agent_id
55
+ self.user_id = user_id
56
+ self.enable_embeddings = enable_embeddings
57
+
58
+ # Initialize LLM client if not provided
59
+ if llm_client is None:
60
+ try:
61
+ from .llm_adapter import _get_llm_client_memu_compatible
62
+
63
+ llm_client = _get_llm_client_memu_compatible()
64
+ except Exception as e:
65
+ logger.warning(f"Failed to initialize default LLM client: {e}")
66
+ # Continue without LLM client for basic file operations
67
+
68
+ self.llm_client = llm_client
69
+
70
+ # Initialize MemU memory agent
71
+ self.memory_agent = MemoryAgent(
72
+ llm_client=llm_client,
73
+ agent_id=agent_id,
74
+ user_id=user_id,
75
+ memory_dir=str(memory_dir),
76
+ enable_embeddings=enable_embeddings,
77
+ )
78
+
79
+ logger.info(f"MemuMemoryStore initialized: agent={agent_id}, user={user_id}, dir={memory_dir}")
80
+
81
+ # ==========================================
82
+ # Core CRUD Operations
83
+ # ==========================================
84
+
85
+ async def add(self, memory_item: MemoryItem, **kwargs) -> str:
86
+ """Add a new memory item to the store."""
87
+ try:
88
+ # Convert memory item to conversation format for MemU
89
+ conversation_content = self._memory_item_to_content(memory_item)
90
+
91
+ # Use MemU's add_activity_memory action
92
+ result = self.memory_agent.call_function(
93
+ "add_activity_memory",
94
+ {
95
+ "character_name": memory_item.user_id or "User",
96
+ "content": conversation_content,
97
+ "session_date": memory_item.created_at.strftime("%Y-%m-%d") if memory_item.created_at else None,
98
+ },
99
+ )
100
+
101
+ if result.get("success"):
102
+ # Store the memory item ID in metadata for retrieval
103
+ memory_items = result.get("memory_items", [])
104
+ if memory_items:
105
+ # Use the provided ID or generate a new one
106
+ memory_id = memory_item.id or str(uuid.uuid4())
107
+
108
+ # Store mapping in a tracking file
109
+ self._store_memory_mapping(memory_id, memory_item, memory_items)
110
+
111
+ return memory_id
112
+ else:
113
+ raise Exception("No memory items were created")
114
+ else:
115
+ raise Exception(f"Failed to add memory: {result.get('error', 'Unknown error')}")
116
+
117
+ except Exception as e:
118
+ logger.error(f"Error adding memory item: {e}")
119
+ raise
120
+
121
+ async def get(self, memory_id: str, **kwargs) -> Optional[MemoryItem]:
122
+ """Retrieve a memory item by its unique identifier."""
123
+ try:
124
+ # Get memory mapping
125
+ mapping = self._get_memory_mapping(memory_id)
126
+ if not mapping:
127
+ return None
128
+
129
+ # Reconstruct memory item from stored data
130
+ return self._reconstruct_memory_item(memory_id, mapping)
131
+
132
+ except Exception as e:
133
+ logger.error(f"Error retrieving memory item {memory_id}: {e}")
134
+ return None
135
+
136
+ async def update(self, memory_id: str, updates: Dict[str, Any], **kwargs) -> bool:
137
+ """Update an existing memory item."""
138
+ try:
139
+ # Get existing memory item
140
+ existing_item = await self.get(memory_id)
141
+ if not existing_item:
142
+ return False
143
+
144
+ # Apply updates
145
+ for key, value in updates.items():
146
+ if hasattr(existing_item, key):
147
+ setattr(existing_item, key, value)
148
+
149
+ existing_item.updated_at = datetime.utcnow()
150
+ existing_item.version += 1
151
+
152
+ # Store updated mapping
153
+ mapping = self._get_memory_mapping(memory_id)
154
+ if mapping:
155
+ mapping.update(
156
+ {
157
+ "content": existing_item.content,
158
+ "metadata": existing_item.metadata,
159
+ "tags": existing_item.tags,
160
+ "importance": existing_item.importance,
161
+ "updated_at": existing_item.updated_at.isoformat(),
162
+ "version": existing_item.version,
163
+ }
164
+ )
165
+ self._store_memory_mapping(memory_id, existing_item, mapping.get("memory_items", []))
166
+
167
+ return True
168
+
169
+ except Exception as e:
170
+ logger.error(f"Error updating memory item {memory_id}: {e}")
171
+ return False
172
+
173
+ async def delete(self, memory_id: str, **kwargs) -> bool:
174
+ """Delete a memory item from the store."""
175
+ try:
176
+ # Remove memory mapping
177
+ return self._remove_memory_mapping(memory_id)
178
+
179
+ except Exception as e:
180
+ logger.error(f"Error deleting memory item {memory_id}: {e}")
181
+ return False
182
+
183
+ # ==========================================
184
+ # Batch Operations
185
+ # ==========================================
186
+
187
+ async def add_many(self, memory_items: List[MemoryItem], **kwargs) -> List[str]:
188
+ """Add multiple memory items in a batch operation."""
189
+ results = []
190
+ for item in memory_items:
191
+ try:
192
+ memory_id = await self.add(item)
193
+ results.append(memory_id)
194
+ except Exception as e:
195
+ logger.error(f"Error adding memory item in batch: {e}")
196
+ # Continue with other items
197
+ results.append(str(uuid.uuid4())) # Generate placeholder ID
198
+ return results
199
+
200
+ async def delete_many(self, memory_ids: List[str], **kwargs) -> int:
201
+ """Delete multiple memory items in a batch operation."""
202
+ deleted_count = 0
203
+ for memory_id in memory_ids:
204
+ try:
205
+ if await self.delete(memory_id):
206
+ deleted_count += 1
207
+ except Exception as e:
208
+ logger.error(f"Error deleting memory item {memory_id} in batch: {e}")
209
+ # Continue with other items
210
+ return deleted_count
211
+
212
+ # ==========================================
213
+ # Query and Filtering Operations
214
+ # ==========================================
215
+
216
+ async def get_all(
217
+ self,
218
+ filters: Optional[MemoryFilter] = None,
219
+ limit: Optional[int] = None,
220
+ offset: Optional[int] = None,
221
+ sort_by: Optional[str] = None,
222
+ sort_order: str = "desc",
223
+ **kwargs,
224
+ ) -> List[MemoryItem]:
225
+ """Retrieve multiple memory items with optional filtering and pagination."""
226
+ try:
227
+ # Get all memory mappings
228
+ all_mappings = self._get_all_memory_mappings()
229
+
230
+ # Reconstruct memory items
231
+ items = []
232
+ for memory_id, mapping in all_mappings.items():
233
+ item = self._reconstruct_memory_item(memory_id, mapping)
234
+ if item:
235
+ items.append(item)
236
+
237
+ # Apply filters
238
+ if filters:
239
+ items = self._apply_filters(items, filters)
240
+
241
+ # Apply sorting
242
+ if sort_by:
243
+ reverse = sort_order.lower() == "desc"
244
+ items.sort(key=lambda x: getattr(x, sort_by, None) or 0, reverse=reverse)
245
+
246
+ # Apply pagination
247
+ if offset:
248
+ items = items[offset:]
249
+ if limit:
250
+ items = items[:limit]
251
+
252
+ return items
253
+
254
+ except Exception as e:
255
+ logger.error(f"Error retrieving memory items: {e}")
256
+ return []
257
+
258
+ async def count(self, filters: Optional[MemoryFilter] = None, **kwargs) -> int:
259
+ """Count memory items matching the given filters."""
260
+ try:
261
+ items = await self.get_all(filters=filters)
262
+ return len(items)
263
+ except Exception as e:
264
+ logger.error(f"Error counting memory items: {e}")
265
+ return 0
266
+
267
+ # ==========================================
268
+ # Search Operations
269
+ # ==========================================
270
+
271
+ async def search(
272
+ self,
273
+ query: str,
274
+ limit: int = 10,
275
+ threshold: float = 0.7,
276
+ memory_types: Optional[List[str]] = None,
277
+ filters: Optional[MemoryFilter] = None,
278
+ **kwargs,
279
+ ) -> List[SearchResult]:
280
+ """Perform semantic search across memory items."""
281
+ try:
282
+ # Get all items first
283
+ all_items = await self.get_all(filters=filters)
284
+
285
+ # Filter by memory types if specified
286
+ if memory_types:
287
+ all_items = [item for item in all_items if item.memory_type in memory_types]
288
+
289
+ # If embeddings are enabled, use similarity search
290
+ if self.enable_embeddings and self.llm_client:
291
+ # This is a simplified implementation
292
+ # In practice, you'd use the embedding system from MemU
293
+ results = []
294
+ for item in all_items:
295
+ # Simple text similarity scoring (can be improved with actual embeddings)
296
+ score = self._calculate_similarity(query, item.content)
297
+ if score >= threshold:
298
+ results.append(
299
+ SearchResult(
300
+ memory_item=item,
301
+ relevance_score=score,
302
+ search_metadata={"search_type": "text_similarity"},
303
+ )
304
+ )
305
+
306
+ # Sort by relevance score
307
+ results.sort(key=lambda x: x.relevance_score, reverse=True)
308
+ return results[:limit]
309
+ else:
310
+ # Fallback to simple text search
311
+ results = []
312
+ for item in all_items:
313
+ if query.lower() in item.content.lower():
314
+ results.append(
315
+ SearchResult(
316
+ memory_item=item,
317
+ relevance_score=0.8, # Fixed score for text match
318
+ search_metadata={"search_type": "text_match"},
319
+ )
320
+ )
321
+
322
+ return results[:limit]
323
+
324
+ except Exception as e:
325
+ logger.error(f"Error searching memory items: {e}")
326
+ return []
327
+
328
+ async def similarity_search(
329
+ self,
330
+ reference_items: List[MemoryItem],
331
+ limit: int = 10,
332
+ threshold: float = 0.7,
333
+ filters: Optional[MemoryFilter] = None,
334
+ **kwargs,
335
+ ) -> List[SearchResult]:
336
+ """Find memory items similar to a list of reference items."""
337
+ try:
338
+ if not reference_items:
339
+ return []
340
+
341
+ # Use the first reference item as the primary query
342
+ primary_item = reference_items[0]
343
+
344
+ # Perform search using the content of the reference item
345
+ return await self.search(
346
+ query=primary_item.content, limit=limit, threshold=threshold, filters=filters, **kwargs
347
+ )
348
+
349
+ except Exception as e:
350
+ logger.error(f"Error in similarity search: {e}")
351
+ return []
352
+
353
+ # ==========================================
354
+ # Memory Management Operations
355
+ # ==========================================
356
+
357
+ async def get_stats(self, filters: Optional[MemoryFilter] = None, **kwargs) -> MemoryStats:
358
+ """Get statistics about the memory store."""
359
+ try:
360
+ items = await self.get_all(filters=filters)
361
+
362
+ if not items:
363
+ return MemoryStats(total_items=0, items_by_type={}, items_by_user={}, average_importance=0.0)
364
+
365
+ # Calculate statistics
366
+ items_by_type = {}
367
+ items_by_user = {}
368
+ total_importance = 0.0
369
+ oldest_date = None
370
+ newest_date = None
371
+
372
+ for item in items:
373
+ # Count by type
374
+ items_by_type[item.memory_type] = items_by_type.get(item.memory_type, 0) + 1
375
+
376
+ # Count by user
377
+ user = item.user_id or "unknown"
378
+ items_by_user[user] = items_by_user.get(user, 0) + 1
379
+
380
+ # Accumulate importance
381
+ total_importance += item.importance
382
+
383
+ # Track dates
384
+ if oldest_date is None or item.created_at < oldest_date:
385
+ oldest_date = item.created_at
386
+ if newest_date is None or item.created_at > newest_date:
387
+ newest_date = item.created_at
388
+
389
+ # Calculate storage size (approximate)
390
+ storage_size = sum(len(item.content.encode("utf-8")) for item in items)
391
+
392
+ return MemoryStats(
393
+ total_items=len(items),
394
+ items_by_type=items_by_type,
395
+ items_by_user=items_by_user,
396
+ oldest_item_date=oldest_date,
397
+ newest_item_date=newest_date,
398
+ average_importance=total_importance / len(items),
399
+ storage_size_bytes=storage_size,
400
+ )
401
+
402
+ except Exception as e:
403
+ logger.error(f"Error getting memory stats: {e}")
404
+ return MemoryStats(total_items=0, items_by_type={}, items_by_user={}, average_importance=0.0)
405
+
406
+ async def cleanup_old_memories(
407
+ self,
408
+ older_than: datetime,
409
+ memory_types: Optional[List[str]] = None,
410
+ preserve_important: bool = True,
411
+ dry_run: bool = True,
412
+ **kwargs,
413
+ ) -> int:
414
+ """Clean up old memory items based on age and criteria."""
415
+ try:
416
+ # Get all items
417
+ items = await self.get_all()
418
+
419
+ # Find items to delete
420
+ items_to_delete = []
421
+ for item in items:
422
+ # Check age
423
+ if item.created_at >= older_than:
424
+ continue
425
+
426
+ # Check type filter
427
+ if memory_types and item.memory_type not in memory_types:
428
+ continue
429
+
430
+ # Check importance preservation
431
+ if preserve_important and item.importance > 0.8:
432
+ continue
433
+
434
+ items_to_delete.append(item)
435
+
436
+ if dry_run:
437
+ return len(items_to_delete)
438
+ else:
439
+ # Actually delete items
440
+ deleted_count = 0
441
+ for item in items_to_delete:
442
+ if await self.delete(item.id):
443
+ deleted_count += 1
444
+ return deleted_count
445
+
446
+ except Exception as e:
447
+ logger.error(f"Error cleaning up old memories: {e}")
448
+ return 0
449
+
450
+ # ==========================================
451
+ # Helper Methods
452
+ # ==========================================
453
+
454
+ def _memory_item_to_content(self, memory_item: MemoryItem) -> str:
455
+ """Convert a MemoryItem to content format suitable for MemU."""
456
+ # Create a simple conversation format
457
+ memory_item.user_id or "User"
458
+ content = memory_item.content
459
+
460
+ # Format as conversation if not already formatted
461
+ if not content.startswith(("USER:", "ASSISTANT:", "SYSTEM:")):
462
+ content = f"USER: {content}"
463
+
464
+ return content
465
+
466
+ def _store_memory_mapping(self, memory_id: str, memory_item: MemoryItem, memory_items: List[Any]):
467
+ """Store mapping between memory ID and MemU storage."""
468
+ mappings_file = self.memory_dir / f"{self.agent_id}_{self.user_id}_mappings.json"
469
+
470
+ # Load existing mappings
471
+ mappings = {}
472
+ if mappings_file.exists():
473
+ try:
474
+ with open(mappings_file, "r") as f:
475
+ mappings = json.load(f)
476
+ except Exception as e:
477
+ logger.warning(f"Error loading mappings: {e}")
478
+
479
+ # Store new mapping
480
+ mappings[memory_id] = {
481
+ "content": memory_item.content,
482
+ "memory_type": memory_item.memory_type,
483
+ "user_id": memory_item.user_id,
484
+ "agent_id": memory_item.agent_id,
485
+ "session_id": memory_item.session_id,
486
+ "importance": memory_item.importance,
487
+ "context": memory_item.context,
488
+ "metadata": memory_item.metadata,
489
+ "tags": memory_item.tags,
490
+ "created_at": memory_item.created_at.isoformat(),
491
+ "updated_at": memory_item.updated_at.isoformat() if memory_item.updated_at else None,
492
+ "version": memory_item.version,
493
+ "memory_items": memory_items, # MemU-specific data
494
+ }
495
+
496
+ # Save mappings
497
+ try:
498
+ mappings_file.parent.mkdir(parents=True, exist_ok=True)
499
+ with open(mappings_file, "w") as f:
500
+ json.dump(mappings, f, indent=2)
501
+ except Exception as e:
502
+ logger.error(f"Error saving mappings: {e}")
503
+
504
+ def _get_memory_mapping(self, memory_id: str) -> Optional[Dict[str, Any]]:
505
+ """Get mapping data for a memory ID."""
506
+ mappings_file = self.memory_dir / f"{self.agent_id}_{self.user_id}_mappings.json"
507
+
508
+ if not mappings_file.exists():
509
+ return None
510
+
511
+ try:
512
+ with open(mappings_file, "r") as f:
513
+ mappings = json.load(f)
514
+ return mappings.get(memory_id)
515
+ except Exception as e:
516
+ logger.error(f"Error loading mapping for {memory_id}: {e}")
517
+ return None
518
+
519
+ def _get_all_memory_mappings(self) -> Dict[str, Dict[str, Any]]:
520
+ """Get all memory mappings."""
521
+ mappings_file = self.memory_dir / f"{self.agent_id}_{self.user_id}_mappings.json"
522
+
523
+ if not mappings_file.exists():
524
+ return {}
525
+
526
+ try:
527
+ with open(mappings_file, "r") as f:
528
+ return json.load(f)
529
+ except Exception as e:
530
+ logger.error(f"Error loading all mappings: {e}")
531
+ return {}
532
+
533
+ def _remove_memory_mapping(self, memory_id: str) -> bool:
534
+ """Remove a memory mapping."""
535
+ mappings_file = self.memory_dir / f"{self.agent_id}_{self.user_id}_mappings.json"
536
+
537
+ if not mappings_file.exists():
538
+ return False
539
+
540
+ try:
541
+ with open(mappings_file, "r") as f:
542
+ mappings = json.load(f)
543
+
544
+ if memory_id in mappings:
545
+ del mappings[memory_id]
546
+
547
+ with open(mappings_file, "w") as f:
548
+ json.dump(mappings, f, indent=2)
549
+ return True
550
+ else:
551
+ return False
552
+
553
+ except Exception as e:
554
+ logger.error(f"Error removing mapping for {memory_id}: {e}")
555
+ return False
556
+
557
+ def _reconstruct_memory_item(self, memory_id: str, mapping: Dict[str, Any]) -> Optional[MemoryItem]:
558
+ """Reconstruct a MemoryItem from mapping data."""
559
+ try:
560
+ return MemoryItem(
561
+ id=memory_id,
562
+ content=mapping.get("content", ""),
563
+ memory_type=mapping.get("memory_type", "message"),
564
+ user_id=mapping.get("user_id"),
565
+ agent_id=mapping.get("agent_id"),
566
+ session_id=mapping.get("session_id"),
567
+ importance=mapping.get("importance", 0.5),
568
+ context=mapping.get("context", {}),
569
+ metadata=mapping.get("metadata", {}),
570
+ tags=mapping.get("tags", []),
571
+ created_at=datetime.fromisoformat(mapping["created_at"]),
572
+ updated_at=datetime.fromisoformat(mapping["updated_at"]) if mapping.get("updated_at") else None,
573
+ version=mapping.get("version", 1),
574
+ )
575
+ except Exception as e:
576
+ logger.error(f"Error reconstructing memory item {memory_id}: {e}")
577
+ return None
578
+
579
+ def _apply_filters(self, items: List[MemoryItem], filters: MemoryFilter) -> List[MemoryItem]:
580
+ """Apply filters to a list of memory items."""
581
+ filtered_items = items
582
+
583
+ if filters.user_id:
584
+ filtered_items = [item for item in filtered_items if item.user_id == filters.user_id]
585
+
586
+ if filters.agent_id:
587
+ filtered_items = [item for item in filtered_items if item.agent_id == filters.agent_id]
588
+
589
+ if filters.session_id:
590
+ filtered_items = [item for item in filtered_items if item.session_id == filters.session_id]
591
+
592
+ if filters.memory_type:
593
+ filtered_items = [item for item in filtered_items if item.memory_type == filters.memory_type]
594
+
595
+ if filters.tags:
596
+ filtered_items = [item for item in filtered_items if all(tag in item.tags for tag in filters.tags)]
597
+
598
+ if filters.date_from:
599
+ filtered_items = [item for item in filtered_items if item.created_at >= filters.date_from]
600
+
601
+ if filters.date_to:
602
+ filtered_items = [item for item in filtered_items if item.created_at <= filters.date_to]
603
+
604
+ if filters.min_importance is not None:
605
+ filtered_items = [item for item in filtered_items if item.importance >= filters.min_importance]
606
+
607
+ # Apply metadata filters
608
+ for key, value in filters.metadata_filters.items():
609
+ filtered_items = [item for item in filtered_items if item.metadata.get(key) == value]
610
+
611
+ return filtered_items
612
+
613
+ def _calculate_similarity(self, query: str, content: str) -> float:
614
+ """Calculate simple text similarity between query and content."""
615
+ # Simple implementation - can be improved with actual similarity algorithms
616
+ query_words = set(query.lower().split())
617
+ content_words = set(content.lower().split())
618
+
619
+ if not query_words:
620
+ return 0.0
621
+
622
+ intersection = query_words.intersection(content_words)
623
+ union = query_words.union(content_words)
624
+
625
+ if not union:
626
+ return 0.0
627
+
628
+ return len(intersection) / len(union)