roampal 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- roampal/__init__.py +29 -0
- roampal/__main__.py +6 -0
- roampal/backend/__init__.py +1 -0
- roampal/backend/modules/__init__.py +1 -0
- roampal/backend/modules/memory/__init__.py +43 -0
- roampal/backend/modules/memory/chromadb_adapter.py +623 -0
- roampal/backend/modules/memory/config.py +102 -0
- roampal/backend/modules/memory/content_graph.py +543 -0
- roampal/backend/modules/memory/context_service.py +455 -0
- roampal/backend/modules/memory/embedding_service.py +96 -0
- roampal/backend/modules/memory/knowledge_graph_service.py +1052 -0
- roampal/backend/modules/memory/memory_bank_service.py +433 -0
- roampal/backend/modules/memory/memory_types.py +296 -0
- roampal/backend/modules/memory/outcome_service.py +400 -0
- roampal/backend/modules/memory/promotion_service.py +473 -0
- roampal/backend/modules/memory/routing_service.py +444 -0
- roampal/backend/modules/memory/scoring_service.py +324 -0
- roampal/backend/modules/memory/search_service.py +646 -0
- roampal/backend/modules/memory/tests/__init__.py +1 -0
- roampal/backend/modules/memory/tests/conftest.py +12 -0
- roampal/backend/modules/memory/tests/unit/__init__.py +1 -0
- roampal/backend/modules/memory/tests/unit/conftest.py +7 -0
- roampal/backend/modules/memory/tests/unit/test_knowledge_graph_service.py +517 -0
- roampal/backend/modules/memory/tests/unit/test_memory_bank_service.py +504 -0
- roampal/backend/modules/memory/tests/unit/test_outcome_service.py +485 -0
- roampal/backend/modules/memory/tests/unit/test_scoring_service.py +255 -0
- roampal/backend/modules/memory/tests/unit/test_search_service.py +413 -0
- roampal/backend/modules/memory/tests/unit/test_unified_memory_system.py +418 -0
- roampal/backend/modules/memory/unified_memory_system.py +1277 -0
- roampal/cli.py +638 -0
- roampal/hooks/__init__.py +16 -0
- roampal/hooks/session_manager.py +587 -0
- roampal/hooks/stop_hook.py +176 -0
- roampal/hooks/user_prompt_submit_hook.py +103 -0
- roampal/mcp/__init__.py +7 -0
- roampal/mcp/server.py +611 -0
- roampal/server/__init__.py +7 -0
- roampal/server/main.py +744 -0
- roampal-0.1.4.dist-info/METADATA +179 -0
- roampal-0.1.4.dist-info/RECORD +44 -0
- roampal-0.1.4.dist-info/WHEEL +5 -0
- roampal-0.1.4.dist-info/entry_points.txt +2 -0
- roampal-0.1.4.dist-info/licenses/LICENSE +190 -0
- roampal-0.1.4.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,473 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Promotion Service - Automatic memory promotion/demotion between collections.
|
|
3
|
+
|
|
4
|
+
Extracted from UnifiedMemorySystem as part of refactoring.
|
|
5
|
+
|
|
6
|
+
Responsibilities:
|
|
7
|
+
- Promote valuable memories (working -> history -> patterns)
|
|
8
|
+
- Demote failing patterns
|
|
9
|
+
- Delete persistently failing memories
|
|
10
|
+
- Batch promotion of valuable working memory
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import asyncio
|
|
14
|
+
import json
|
|
15
|
+
import logging
|
|
16
|
+
import uuid
|
|
17
|
+
from datetime import datetime
|
|
18
|
+
from typing import Any, Callable, Dict, List, Optional
|
|
19
|
+
|
|
20
|
+
from .config import MemoryConfig
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class PromotionService:
|
|
26
|
+
"""
|
|
27
|
+
Handles automatic promotion and demotion of memories between collections.
|
|
28
|
+
|
|
29
|
+
Promotion Flow:
|
|
30
|
+
- working -> history: score >= 0.7, uses >= 2
|
|
31
|
+
- history -> patterns: score >= 0.9 (HIGH_VALUE_THRESHOLD), uses >= 3
|
|
32
|
+
|
|
33
|
+
Demotion Flow:
|
|
34
|
+
- patterns -> history: score < 0.4 (DEMOTION_SCORE_THRESHOLD)
|
|
35
|
+
|
|
36
|
+
Deletion:
|
|
37
|
+
- Any collection: score < 0.2 (DELETION_SCORE_THRESHOLD)
|
|
38
|
+
- New items (< 7 days): score < 0.1 (NEW_ITEM_DELETION_THRESHOLD)
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
def __init__(
|
|
42
|
+
self,
|
|
43
|
+
collections: Dict[str, Any], # ChromaDBAdapter instances
|
|
44
|
+
embed_fn: Callable[[str], Any], # Async function to embed text
|
|
45
|
+
add_relationship_fn: Optional[Callable] = None, # For evolution tracking
|
|
46
|
+
config: Optional[MemoryConfig] = None,
|
|
47
|
+
):
|
|
48
|
+
"""
|
|
49
|
+
Initialize PromotionService.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
collections: Dict mapping collection name to ChromaDBAdapter
|
|
53
|
+
embed_fn: Async function to generate embeddings
|
|
54
|
+
add_relationship_fn: Optional callback to track evolution relationships
|
|
55
|
+
config: Optional MemoryConfig for thresholds
|
|
56
|
+
"""
|
|
57
|
+
self.collections = collections
|
|
58
|
+
self.embed_fn = embed_fn
|
|
59
|
+
self.add_relationship_fn = add_relationship_fn
|
|
60
|
+
self.config = config or MemoryConfig()
|
|
61
|
+
|
|
62
|
+
# Promotion lock to prevent concurrent promotion operations
|
|
63
|
+
self._promotion_lock = asyncio.Lock()
|
|
64
|
+
|
|
65
|
+
# =========================================================================
|
|
66
|
+
# Main Promotion/Demotion Handler
|
|
67
|
+
# =========================================================================
|
|
68
|
+
|
|
69
|
+
async def handle_promotion(
|
|
70
|
+
self,
|
|
71
|
+
doc_id: str,
|
|
72
|
+
collection: str,
|
|
73
|
+
score: float,
|
|
74
|
+
uses: int,
|
|
75
|
+
metadata: Dict[str, Any],
|
|
76
|
+
collection_size: int = 0
|
|
77
|
+
) -> Optional[str]:
|
|
78
|
+
"""
|
|
79
|
+
Handle automatic promotion/demotion using outcome-based thresholds.
|
|
80
|
+
|
|
81
|
+
Args:
|
|
82
|
+
doc_id: Document ID to evaluate
|
|
83
|
+
collection: Current collection name
|
|
84
|
+
score: Current score (0-1)
|
|
85
|
+
uses: Number of times used
|
|
86
|
+
metadata: Document metadata
|
|
87
|
+
collection_size: Current collection size (unused, kept for API compatibility)
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
New doc_id if promoted/demoted, None otherwise
|
|
91
|
+
"""
|
|
92
|
+
# Get full document for evaluation
|
|
93
|
+
if collection not in self.collections:
|
|
94
|
+
logger.warning(f"Collection {collection} not found")
|
|
95
|
+
return None
|
|
96
|
+
|
|
97
|
+
doc = self.collections[collection].get_fragment(doc_id)
|
|
98
|
+
if not doc:
|
|
99
|
+
logger.warning(f"Cannot evaluate {doc_id}: document not found")
|
|
100
|
+
return None
|
|
101
|
+
|
|
102
|
+
# Promotion: working -> history
|
|
103
|
+
if collection == "working":
|
|
104
|
+
if score >= self.config.promotion_score_threshold and uses >= 2:
|
|
105
|
+
return await self._promote_working_to_history(doc_id, doc, metadata, score, uses)
|
|
106
|
+
|
|
107
|
+
# Promotion: history -> patterns
|
|
108
|
+
elif collection == "history":
|
|
109
|
+
if score >= self.config.high_value_threshold and uses >= 3:
|
|
110
|
+
return await self._promote_history_to_patterns(doc_id, doc, metadata, score, uses)
|
|
111
|
+
|
|
112
|
+
# Demotion: patterns -> history
|
|
113
|
+
elif collection == "patterns":
|
|
114
|
+
if score < self.config.demotion_score_threshold:
|
|
115
|
+
return await self._demote_patterns_to_history(doc_id, metadata, score)
|
|
116
|
+
|
|
117
|
+
# Deletion: score too low
|
|
118
|
+
if score < self.config.deletion_score_threshold:
|
|
119
|
+
await self._handle_deletion(doc_id, collection, metadata, score)
|
|
120
|
+
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
async def _promote_working_to_history(
|
|
124
|
+
self,
|
|
125
|
+
doc_id: str,
|
|
126
|
+
doc: Dict,
|
|
127
|
+
metadata: Dict[str, Any],
|
|
128
|
+
score: float,
|
|
129
|
+
uses: int
|
|
130
|
+
) -> Optional[str]:
|
|
131
|
+
"""Promote from working to history collection."""
|
|
132
|
+
new_id = doc_id.replace("working_", "history_")
|
|
133
|
+
|
|
134
|
+
# Build promotion record
|
|
135
|
+
promotion_record = {
|
|
136
|
+
"from": "working",
|
|
137
|
+
"to": "history",
|
|
138
|
+
"timestamp": datetime.now().isoformat(),
|
|
139
|
+
"score": score,
|
|
140
|
+
"uses": uses
|
|
141
|
+
}
|
|
142
|
+
promotion_history = json.loads(metadata.get("promotion_history", "[]"))
|
|
143
|
+
promotion_history.append(promotion_record)
|
|
144
|
+
metadata["promotion_history"] = json.dumps(promotion_history)
|
|
145
|
+
metadata["promoted_from"] = "working"
|
|
146
|
+
|
|
147
|
+
# Get text for embedding
|
|
148
|
+
text_for_embedding = metadata.get("text") or metadata.get("content") or doc.get("content", "")
|
|
149
|
+
if not text_for_embedding:
|
|
150
|
+
logger.error(f"Cannot promote {doc_id}: no text found")
|
|
151
|
+
return None
|
|
152
|
+
|
|
153
|
+
try:
|
|
154
|
+
await self.collections["history"].upsert_vectors(
|
|
155
|
+
ids=[new_id],
|
|
156
|
+
vectors=[await self.embed_fn(text_for_embedding)],
|
|
157
|
+
metadatas=[metadata]
|
|
158
|
+
)
|
|
159
|
+
logger.info(f"Created history memory: {new_id}")
|
|
160
|
+
except Exception as e:
|
|
161
|
+
logger.error(f"Failed to create history memory {new_id}: {e}")
|
|
162
|
+
return None
|
|
163
|
+
|
|
164
|
+
# Only delete from working AFTER successful promotion
|
|
165
|
+
self.collections["working"].delete_vectors([doc_id])
|
|
166
|
+
|
|
167
|
+
# Track evolution relationship
|
|
168
|
+
if self.add_relationship_fn:
|
|
169
|
+
await self.add_relationship_fn(new_id, "evolution", {"parent": doc_id})
|
|
170
|
+
|
|
171
|
+
logger.info(f"Promoted {doc_id} from working -> history (score: {score:.2f}, uses: {uses})")
|
|
172
|
+
return new_id
|
|
173
|
+
|
|
174
|
+
async def _promote_history_to_patterns(
|
|
175
|
+
self,
|
|
176
|
+
doc_id: str,
|
|
177
|
+
doc: Dict,
|
|
178
|
+
metadata: Dict[str, Any],
|
|
179
|
+
score: float,
|
|
180
|
+
uses: int
|
|
181
|
+
) -> Optional[str]:
|
|
182
|
+
"""Promote from history to patterns collection."""
|
|
183
|
+
new_id = doc_id.replace("history_", "patterns_")
|
|
184
|
+
|
|
185
|
+
# Build promotion record
|
|
186
|
+
promotion_record = {
|
|
187
|
+
"from": "history",
|
|
188
|
+
"to": "patterns",
|
|
189
|
+
"timestamp": datetime.now().isoformat(),
|
|
190
|
+
"score": score,
|
|
191
|
+
"uses": uses
|
|
192
|
+
}
|
|
193
|
+
promotion_history = json.loads(metadata.get("promotion_history", "[]"))
|
|
194
|
+
promotion_history.append(promotion_record)
|
|
195
|
+
metadata["promotion_history"] = json.dumps(promotion_history)
|
|
196
|
+
metadata["promoted_from"] = "history"
|
|
197
|
+
|
|
198
|
+
# Get text for embedding
|
|
199
|
+
text_for_embedding = metadata.get("text") or metadata.get("content") or doc.get("content", "")
|
|
200
|
+
if not text_for_embedding:
|
|
201
|
+
logger.error(f"Cannot promote {doc_id}: no text found")
|
|
202
|
+
return None
|
|
203
|
+
|
|
204
|
+
try:
|
|
205
|
+
await self.collections["patterns"].upsert_vectors(
|
|
206
|
+
ids=[new_id],
|
|
207
|
+
vectors=[await self.embed_fn(text_for_embedding)],
|
|
208
|
+
metadatas=[metadata]
|
|
209
|
+
)
|
|
210
|
+
except Exception as e:
|
|
211
|
+
logger.error(f"Failed to create patterns memory {new_id}: {e}")
|
|
212
|
+
return None
|
|
213
|
+
|
|
214
|
+
self.collections["history"].delete_vectors([doc_id])
|
|
215
|
+
|
|
216
|
+
if self.add_relationship_fn:
|
|
217
|
+
await self.add_relationship_fn(new_id, "evolution", {"parent": doc_id})
|
|
218
|
+
|
|
219
|
+
logger.info(f"Promoted {doc_id} from history -> patterns (score: {score:.2f}, uses: {uses})")
|
|
220
|
+
return new_id
|
|
221
|
+
|
|
222
|
+
async def _demote_patterns_to_history(
|
|
223
|
+
self,
|
|
224
|
+
doc_id: str,
|
|
225
|
+
metadata: Dict[str, Any],
|
|
226
|
+
score: float
|
|
227
|
+
) -> Optional[str]:
|
|
228
|
+
"""Demote from patterns back to history collection."""
|
|
229
|
+
new_id = doc_id.replace("patterns_", "history_")
|
|
230
|
+
|
|
231
|
+
text_for_embedding = metadata.get("text") or metadata.get("content", "")
|
|
232
|
+
if not text_for_embedding:
|
|
233
|
+
logger.error(f"Cannot demote {doc_id}: no text found")
|
|
234
|
+
return None
|
|
235
|
+
|
|
236
|
+
try:
|
|
237
|
+
await self.collections["history"].upsert_vectors(
|
|
238
|
+
ids=[new_id],
|
|
239
|
+
vectors=[await self.embed_fn(text_for_embedding)],
|
|
240
|
+
metadatas=[{**metadata, "demoted_from": "patterns"}]
|
|
241
|
+
)
|
|
242
|
+
except Exception as e:
|
|
243
|
+
logger.error(f"Failed to demote {doc_id}: {e}")
|
|
244
|
+
return None
|
|
245
|
+
|
|
246
|
+
self.collections["patterns"].delete_vectors([doc_id])
|
|
247
|
+
logger.info(f"Demoted {doc_id} to history (score: {score:.2f})")
|
|
248
|
+
return new_id
|
|
249
|
+
|
|
250
|
+
async def _handle_deletion(
|
|
251
|
+
self,
|
|
252
|
+
doc_id: str,
|
|
253
|
+
collection: str,
|
|
254
|
+
metadata: Dict[str, Any],
|
|
255
|
+
score: float
|
|
256
|
+
):
|
|
257
|
+
"""Handle deletion of low-scoring memories."""
|
|
258
|
+
# Calculate age
|
|
259
|
+
age_days = 0
|
|
260
|
+
if metadata.get("timestamp"):
|
|
261
|
+
try:
|
|
262
|
+
age_days = (datetime.now() - datetime.fromisoformat(metadata["timestamp"])).days
|
|
263
|
+
except Exception:
|
|
264
|
+
pass
|
|
265
|
+
|
|
266
|
+
# Newer items get more lenient threshold
|
|
267
|
+
deletion_threshold = (
|
|
268
|
+
self.config.deletion_score_threshold
|
|
269
|
+
if age_days > 7
|
|
270
|
+
else self.config.new_item_deletion_threshold
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
if score < deletion_threshold:
|
|
274
|
+
self.collections[collection].delete_vectors([doc_id])
|
|
275
|
+
logger.info(f"Deleted {doc_id} from {collection} (score {score:.2f} < threshold {deletion_threshold})")
|
|
276
|
+
|
|
277
|
+
# =========================================================================
|
|
278
|
+
# Batch Promotion
|
|
279
|
+
# =========================================================================
|
|
280
|
+
|
|
281
|
+
async def promote_valuable_working_memory(
|
|
282
|
+
self,
|
|
283
|
+
conversation_id: Optional[str] = None
|
|
284
|
+
) -> int:
|
|
285
|
+
"""
|
|
286
|
+
Promote valuable working memory to history collection.
|
|
287
|
+
|
|
288
|
+
This is typically run as a background task (hourly).
|
|
289
|
+
|
|
290
|
+
Args:
|
|
291
|
+
conversation_id: Optional filter (not enforced - working memory is global)
|
|
292
|
+
|
|
293
|
+
Returns:
|
|
294
|
+
Number of memories promoted
|
|
295
|
+
"""
|
|
296
|
+
async with self._promotion_lock:
|
|
297
|
+
return await self._do_batch_promotion()
|
|
298
|
+
|
|
299
|
+
async def _do_batch_promotion(self) -> int:
|
|
300
|
+
"""Internal batch promotion logic."""
|
|
301
|
+
try:
|
|
302
|
+
working_adapter = self.collections.get("working")
|
|
303
|
+
if not working_adapter:
|
|
304
|
+
return 0
|
|
305
|
+
|
|
306
|
+
promoted_count = 0
|
|
307
|
+
checked_count = 0
|
|
308
|
+
|
|
309
|
+
# Get all working memory items
|
|
310
|
+
all_ids = working_adapter.list_all_ids()
|
|
311
|
+
|
|
312
|
+
for doc_id in all_ids:
|
|
313
|
+
doc = working_adapter.get_fragment(doc_id)
|
|
314
|
+
if not doc:
|
|
315
|
+
continue
|
|
316
|
+
|
|
317
|
+
metadata = doc.get("metadata", {})
|
|
318
|
+
checked_count += 1
|
|
319
|
+
|
|
320
|
+
# Get promotion criteria
|
|
321
|
+
text = metadata.get("text", "")
|
|
322
|
+
score = metadata.get("score", 0.5)
|
|
323
|
+
uses = metadata.get("uses", 0)
|
|
324
|
+
timestamp_str = metadata.get("timestamp", "")
|
|
325
|
+
|
|
326
|
+
# Calculate age
|
|
327
|
+
age_hours = self._calculate_age_hours(timestamp_str)
|
|
328
|
+
|
|
329
|
+
# Promote if: high score AND used multiple times
|
|
330
|
+
if score >= self.config.promotion_score_threshold and uses >= 2:
|
|
331
|
+
new_id = doc_id.replace("working_", "history_")
|
|
332
|
+
|
|
333
|
+
await self.collections["history"].upsert_vectors(
|
|
334
|
+
ids=[new_id],
|
|
335
|
+
vectors=[await self.embed_fn(text)],
|
|
336
|
+
metadatas=[{
|
|
337
|
+
**metadata,
|
|
338
|
+
"promoted_from": "working",
|
|
339
|
+
"promotion_time": datetime.now().isoformat(),
|
|
340
|
+
"promotion_reason": "batch_promotion"
|
|
341
|
+
}]
|
|
342
|
+
)
|
|
343
|
+
|
|
344
|
+
working_adapter.delete_vectors([doc_id])
|
|
345
|
+
promoted_count += 1
|
|
346
|
+
logger.info(f"Promoted {doc_id} to history (score: {score:.2f}, uses: {uses}, age: {age_hours:.1f}h)")
|
|
347
|
+
|
|
348
|
+
# Cleanup: Remove items older than 24 hours that weren't promoted
|
|
349
|
+
elif age_hours > 24:
|
|
350
|
+
working_adapter.delete_vectors([doc_id])
|
|
351
|
+
logger.info(f"Cleaned up old working memory {doc_id} (age: {age_hours:.1f}h, score: {score:.2f})")
|
|
352
|
+
|
|
353
|
+
if promoted_count > 0:
|
|
354
|
+
logger.info(f"Batch promotion: checked {checked_count}, promoted {promoted_count} memories")
|
|
355
|
+
|
|
356
|
+
return promoted_count
|
|
357
|
+
|
|
358
|
+
except Exception as e:
|
|
359
|
+
logger.error(f"Error in batch promotion: {e}")
|
|
360
|
+
return 0
|
|
361
|
+
|
|
362
|
+
def _calculate_age_hours(self, timestamp_str: str) -> float:
|
|
363
|
+
"""Calculate age in hours from timestamp string."""
|
|
364
|
+
try:
|
|
365
|
+
if timestamp_str:
|
|
366
|
+
doc_time = datetime.fromisoformat(timestamp_str.replace("Z", "+00:00"))
|
|
367
|
+
return (datetime.now() - doc_time).total_seconds() / 3600
|
|
368
|
+
except Exception:
|
|
369
|
+
pass
|
|
370
|
+
return 0.0
|
|
371
|
+
|
|
372
|
+
# =========================================================================
|
|
373
|
+
# Item Movement (generic)
|
|
374
|
+
# =========================================================================
|
|
375
|
+
|
|
376
|
+
async def promote_item(
|
|
377
|
+
self,
|
|
378
|
+
doc_id: str,
|
|
379
|
+
from_collection: str,
|
|
380
|
+
to_collection: str,
|
|
381
|
+
metadata: Dict[str, Any]
|
|
382
|
+
) -> Optional[str]:
|
|
383
|
+
"""
|
|
384
|
+
Move an item from one collection to another (generic version).
|
|
385
|
+
|
|
386
|
+
Args:
|
|
387
|
+
doc_id: Source document ID
|
|
388
|
+
from_collection: Source collection name
|
|
389
|
+
to_collection: Target collection name
|
|
390
|
+
metadata: Document metadata
|
|
391
|
+
|
|
392
|
+
Returns:
|
|
393
|
+
New document ID if successful, None otherwise
|
|
394
|
+
"""
|
|
395
|
+
try:
|
|
396
|
+
# Get the document from source collection
|
|
397
|
+
doc = self.collections[from_collection].get_fragment(doc_id)
|
|
398
|
+
if not doc:
|
|
399
|
+
logger.warning(f"Cannot promote {doc_id}: not found in {from_collection}")
|
|
400
|
+
return None
|
|
401
|
+
|
|
402
|
+
# Add promotion metadata
|
|
403
|
+
metadata["promoted_from"] = from_collection
|
|
404
|
+
metadata["promoted_at"] = datetime.now().isoformat()
|
|
405
|
+
metadata["original_id"] = doc_id
|
|
406
|
+
|
|
407
|
+
# Generate new ID
|
|
408
|
+
new_id = f"{to_collection}_{uuid.uuid4().hex[:8]}"
|
|
409
|
+
|
|
410
|
+
# Get text for embedding
|
|
411
|
+
text = doc.get("content", "") or metadata.get("text", "")
|
|
412
|
+
if not text:
|
|
413
|
+
logger.error(f"Cannot promote {doc_id}: no text found")
|
|
414
|
+
return None
|
|
415
|
+
|
|
416
|
+
# Store in target collection
|
|
417
|
+
await self.collections[to_collection].upsert_vectors(
|
|
418
|
+
ids=[new_id],
|
|
419
|
+
vectors=[await self.embed_fn(text)],
|
|
420
|
+
metadatas=[metadata]
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
logger.info(f"Promoted {doc_id} from {from_collection} to {to_collection} as {new_id}")
|
|
424
|
+
return new_id
|
|
425
|
+
|
|
426
|
+
except Exception as e:
|
|
427
|
+
logger.error(f"Failed to promote item: {e}")
|
|
428
|
+
return None
|
|
429
|
+
|
|
430
|
+
# =========================================================================
|
|
431
|
+
# Cleanup
|
|
432
|
+
# =========================================================================
|
|
433
|
+
|
|
434
|
+
async def cleanup_old_working_memory(self, max_age_hours: float = 24.0) -> int:
|
|
435
|
+
"""
|
|
436
|
+
Clean up working memory items older than specified age.
|
|
437
|
+
|
|
438
|
+
Args:
|
|
439
|
+
max_age_hours: Maximum age in hours
|
|
440
|
+
|
|
441
|
+
Returns:
|
|
442
|
+
Number of items cleaned up
|
|
443
|
+
"""
|
|
444
|
+
try:
|
|
445
|
+
working_adapter = self.collections.get("working")
|
|
446
|
+
if not working_adapter:
|
|
447
|
+
return 0
|
|
448
|
+
|
|
449
|
+
cleaned_count = 0
|
|
450
|
+
all_ids = working_adapter.list_all_ids()
|
|
451
|
+
|
|
452
|
+
for doc_id in all_ids:
|
|
453
|
+
doc = working_adapter.get_fragment(doc_id)
|
|
454
|
+
if not doc:
|
|
455
|
+
continue
|
|
456
|
+
|
|
457
|
+
metadata = doc.get("metadata", {})
|
|
458
|
+
timestamp_str = metadata.get("timestamp", "")
|
|
459
|
+
age_hours = self._calculate_age_hours(timestamp_str)
|
|
460
|
+
|
|
461
|
+
if age_hours > max_age_hours:
|
|
462
|
+
working_adapter.delete_vectors([doc_id])
|
|
463
|
+
cleaned_count += 1
|
|
464
|
+
logger.debug(f"Cleaned up old working memory {doc_id} (age: {age_hours:.1f}h)")
|
|
465
|
+
|
|
466
|
+
if cleaned_count > 0:
|
|
467
|
+
logger.info(f"Working memory cleanup: removed {cleaned_count} old items")
|
|
468
|
+
|
|
469
|
+
return cleaned_count
|
|
470
|
+
|
|
471
|
+
except Exception as e:
|
|
472
|
+
logger.error(f"Error in working memory cleanup: {e}")
|
|
473
|
+
return 0
|