roampal 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- roampal/__init__.py +29 -0
- roampal/__main__.py +6 -0
- roampal/backend/__init__.py +1 -0
- roampal/backend/modules/__init__.py +1 -0
- roampal/backend/modules/memory/__init__.py +43 -0
- roampal/backend/modules/memory/chromadb_adapter.py +623 -0
- roampal/backend/modules/memory/config.py +102 -0
- roampal/backend/modules/memory/content_graph.py +543 -0
- roampal/backend/modules/memory/context_service.py +455 -0
- roampal/backend/modules/memory/embedding_service.py +96 -0
- roampal/backend/modules/memory/knowledge_graph_service.py +1052 -0
- roampal/backend/modules/memory/memory_bank_service.py +433 -0
- roampal/backend/modules/memory/memory_types.py +296 -0
- roampal/backend/modules/memory/outcome_service.py +400 -0
- roampal/backend/modules/memory/promotion_service.py +473 -0
- roampal/backend/modules/memory/routing_service.py +444 -0
- roampal/backend/modules/memory/scoring_service.py +324 -0
- roampal/backend/modules/memory/search_service.py +646 -0
- roampal/backend/modules/memory/tests/__init__.py +1 -0
- roampal/backend/modules/memory/tests/conftest.py +12 -0
- roampal/backend/modules/memory/tests/unit/__init__.py +1 -0
- roampal/backend/modules/memory/tests/unit/conftest.py +7 -0
- roampal/backend/modules/memory/tests/unit/test_knowledge_graph_service.py +517 -0
- roampal/backend/modules/memory/tests/unit/test_memory_bank_service.py +504 -0
- roampal/backend/modules/memory/tests/unit/test_outcome_service.py +485 -0
- roampal/backend/modules/memory/tests/unit/test_scoring_service.py +255 -0
- roampal/backend/modules/memory/tests/unit/test_search_service.py +413 -0
- roampal/backend/modules/memory/tests/unit/test_unified_memory_system.py +418 -0
- roampal/backend/modules/memory/unified_memory_system.py +1277 -0
- roampal/cli.py +638 -0
- roampal/hooks/__init__.py +16 -0
- roampal/hooks/session_manager.py +587 -0
- roampal/hooks/stop_hook.py +176 -0
- roampal/hooks/user_prompt_submit_hook.py +103 -0
- roampal/mcp/__init__.py +7 -0
- roampal/mcp/server.py +611 -0
- roampal/server/__init__.py +7 -0
- roampal/server/main.py +744 -0
- roampal-0.1.4.dist-info/METADATA +179 -0
- roampal-0.1.4.dist-info/RECORD +44 -0
- roampal-0.1.4.dist-info/WHEEL +5 -0
- roampal-0.1.4.dist-info/entry_points.txt +2 -0
- roampal-0.1.4.dist-info/licenses/LICENSE +190 -0
- roampal-0.1.4.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,433 @@
|
|
|
1
|
+
"""
|
|
2
|
+
MemoryBankService - Extracted from UnifiedMemorySystem
|
|
3
|
+
|
|
4
|
+
Handles memory_bank operations for user identity, preferences, and facts.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
import os
|
|
10
|
+
import uuid
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from typing import Dict, Any, Optional, List, Callable, Awaitable
|
|
13
|
+
|
|
14
|
+
from .config import MemoryConfig
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class MemoryBankService:
|
|
20
|
+
"""
|
|
21
|
+
Service for managing memory_bank collection.
|
|
22
|
+
|
|
23
|
+
Memory bank stores persistent user information like:
|
|
24
|
+
- Identity (name, role, company)
|
|
25
|
+
- Preferences (coding style, tools)
|
|
26
|
+
- Goals and projects
|
|
27
|
+
- Learned facts about the user
|
|
28
|
+
|
|
29
|
+
Unlike working/history/patterns, memory_bank items:
|
|
30
|
+
- Are not scored by outcomes
|
|
31
|
+
- Use importance/confidence instead
|
|
32
|
+
- Can be archived but not promoted/demoted
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
# Capacity limit for memory bank
|
|
36
|
+
MAX_ITEMS = 500
|
|
37
|
+
|
|
38
|
+
def __init__(
|
|
39
|
+
self,
|
|
40
|
+
collection: Any,
|
|
41
|
+
embed_fn: Callable[[str], Awaitable[List[float]]],
|
|
42
|
+
search_fn: Optional[Callable] = None,
|
|
43
|
+
config: Optional[MemoryConfig] = None
|
|
44
|
+
):
|
|
45
|
+
"""
|
|
46
|
+
Initialize MemoryBankService.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
collection: Memory bank collection adapter
|
|
50
|
+
embed_fn: Async function to embed text
|
|
51
|
+
search_fn: Optional search function for queries
|
|
52
|
+
config: Memory configuration
|
|
53
|
+
"""
|
|
54
|
+
self.collection = collection
|
|
55
|
+
self.embed_fn = embed_fn
|
|
56
|
+
self.search_fn = search_fn
|
|
57
|
+
self.config = config or MemoryConfig()
|
|
58
|
+
|
|
59
|
+
async def store(
|
|
60
|
+
self,
|
|
61
|
+
text: str,
|
|
62
|
+
tags: List[str],
|
|
63
|
+
importance: float = 0.7,
|
|
64
|
+
confidence: float = 0.7
|
|
65
|
+
) -> str:
|
|
66
|
+
"""
|
|
67
|
+
Store user memory in memory_bank collection.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
text: Memory content
|
|
71
|
+
tags: List of tags (identity, preference, project, context, goal)
|
|
72
|
+
importance: 0.0-1.0 (how critical is this memory)
|
|
73
|
+
confidence: 0.0-1.0 (how sure are we about this)
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
Document ID
|
|
77
|
+
|
|
78
|
+
Raises:
|
|
79
|
+
ValueError: If memory bank is at capacity
|
|
80
|
+
"""
|
|
81
|
+
# Capacity check (skip in benchmark mode - uncapped in 0.2.8)
|
|
82
|
+
benchmark_mode = os.environ.get("ROAMPAL_BENCHMARK_MODE", "").lower() == "true"
|
|
83
|
+
if not benchmark_mode:
|
|
84
|
+
current_count = self._get_count()
|
|
85
|
+
if current_count >= self.MAX_ITEMS:
|
|
86
|
+
error_msg = (
|
|
87
|
+
f"Memory bank at capacity ({current_count}/{self.MAX_ITEMS}). "
|
|
88
|
+
"Please archive or delete old memories."
|
|
89
|
+
)
|
|
90
|
+
logger.error(error_msg)
|
|
91
|
+
raise ValueError(error_msg)
|
|
92
|
+
|
|
93
|
+
doc_id = f"memory_bank_{uuid.uuid4().hex[:8]}"
|
|
94
|
+
|
|
95
|
+
# Generate embedding
|
|
96
|
+
embedding = await self.embed_fn(text)
|
|
97
|
+
|
|
98
|
+
# Build metadata
|
|
99
|
+
metadata = {
|
|
100
|
+
"text": text,
|
|
101
|
+
"content": text,
|
|
102
|
+
"tags": json.dumps(tags),
|
|
103
|
+
"importance": importance,
|
|
104
|
+
"confidence": confidence,
|
|
105
|
+
"score": 1.0, # Never decays
|
|
106
|
+
"status": "active",
|
|
107
|
+
"created_at": datetime.now().isoformat(),
|
|
108
|
+
"mentioned_count": 1,
|
|
109
|
+
"last_mentioned": datetime.now().isoformat()
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
# Store in collection
|
|
113
|
+
await self.collection.upsert_vectors(
|
|
114
|
+
ids=[doc_id],
|
|
115
|
+
vectors=[embedding],
|
|
116
|
+
metadatas=[metadata]
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
logger.info(f"Stored memory_bank item: {text[:50]}... (tags: {tags})")
|
|
120
|
+
return doc_id
|
|
121
|
+
|
|
122
|
+
async def update(
|
|
123
|
+
self,
|
|
124
|
+
doc_id: str,
|
|
125
|
+
new_text: str,
|
|
126
|
+
reason: str = "llm_update"
|
|
127
|
+
) -> str:
|
|
128
|
+
"""
|
|
129
|
+
Update memory with auto-archiving of old version.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
doc_id: Memory to update
|
|
133
|
+
new_text: New content
|
|
134
|
+
reason: Why updating (for audit trail)
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
Document ID
|
|
138
|
+
"""
|
|
139
|
+
# Get current version
|
|
140
|
+
old_doc = self.collection.get_fragment(doc_id)
|
|
141
|
+
if not old_doc:
|
|
142
|
+
logger.warning(f"Memory {doc_id} not found, creating new")
|
|
143
|
+
return await self.store(new_text, tags=["updated"])
|
|
144
|
+
|
|
145
|
+
# Auto-archive old version
|
|
146
|
+
archive_id = f"{doc_id}_archived_{int(datetime.now().timestamp())}"
|
|
147
|
+
old_text = old_doc.get("metadata", {}).get("content",
|
|
148
|
+
old_doc.get("metadata", {}).get("text", ""))
|
|
149
|
+
old_embedding = await self.embed_fn(old_text)
|
|
150
|
+
|
|
151
|
+
await self.collection.upsert_vectors(
|
|
152
|
+
ids=[archive_id],
|
|
153
|
+
vectors=[old_embedding],
|
|
154
|
+
metadatas=[{
|
|
155
|
+
**old_doc.get("metadata", {}),
|
|
156
|
+
"status": "archived",
|
|
157
|
+
"original_id": doc_id,
|
|
158
|
+
"archive_reason": reason,
|
|
159
|
+
"archived_at": datetime.now().isoformat()
|
|
160
|
+
}]
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
# Update in-place
|
|
164
|
+
new_embedding = await self.embed_fn(new_text)
|
|
165
|
+
old_metadata = old_doc.get("metadata", {})
|
|
166
|
+
|
|
167
|
+
await self.collection.upsert_vectors(
|
|
168
|
+
ids=[doc_id],
|
|
169
|
+
vectors=[new_embedding],
|
|
170
|
+
metadatas=[{
|
|
171
|
+
**old_metadata,
|
|
172
|
+
"text": new_text,
|
|
173
|
+
"content": new_text,
|
|
174
|
+
"updated_at": datetime.now().isoformat(),
|
|
175
|
+
"update_reason": reason
|
|
176
|
+
}]
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
logger.info(f"Updated memory_bank item {doc_id}: {reason}")
|
|
180
|
+
return doc_id
|
|
181
|
+
|
|
182
|
+
async def archive(
|
|
183
|
+
self,
|
|
184
|
+
doc_id: str,
|
|
185
|
+
reason: str = "llm_decision"
|
|
186
|
+
) -> bool:
|
|
187
|
+
"""
|
|
188
|
+
Archive memory (soft delete).
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
doc_id: Memory to archive
|
|
192
|
+
reason: Why archiving
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
Success status
|
|
196
|
+
"""
|
|
197
|
+
doc = self.collection.get_fragment(doc_id)
|
|
198
|
+
if not doc:
|
|
199
|
+
return False
|
|
200
|
+
|
|
201
|
+
metadata = doc.get("metadata", {})
|
|
202
|
+
metadata["status"] = "archived"
|
|
203
|
+
metadata["archive_reason"] = reason
|
|
204
|
+
metadata["archived_at"] = datetime.now().isoformat()
|
|
205
|
+
|
|
206
|
+
self.collection.update_fragment_metadata(doc_id, metadata)
|
|
207
|
+
logger.info(f"Archived memory_bank item {doc_id}: {reason}")
|
|
208
|
+
return True
|
|
209
|
+
|
|
210
|
+
async def search(
|
|
211
|
+
self,
|
|
212
|
+
query: str = None,
|
|
213
|
+
tags: List[str] = None,
|
|
214
|
+
include_archived: bool = False,
|
|
215
|
+
limit: int = 20
|
|
216
|
+
) -> List[Dict[str, Any]]:
|
|
217
|
+
"""
|
|
218
|
+
Search memory_bank collection with filtering.
|
|
219
|
+
|
|
220
|
+
Args:
|
|
221
|
+
query: Semantic search query (None = get all)
|
|
222
|
+
tags: Filter by tags
|
|
223
|
+
include_archived: Include archived memories
|
|
224
|
+
limit: Max results
|
|
225
|
+
|
|
226
|
+
Returns:
|
|
227
|
+
List of memories
|
|
228
|
+
"""
|
|
229
|
+
# Use provided search function or basic vector search
|
|
230
|
+
if query and self.search_fn:
|
|
231
|
+
results = await self.search_fn(
|
|
232
|
+
query=query,
|
|
233
|
+
collections=["memory_bank"],
|
|
234
|
+
limit=limit * 2 # Get extra for filtering
|
|
235
|
+
)
|
|
236
|
+
else:
|
|
237
|
+
# Fallback: get all and filter
|
|
238
|
+
results = await self._get_all_items(limit * 2)
|
|
239
|
+
|
|
240
|
+
# Filter by status and tags
|
|
241
|
+
filtered = []
|
|
242
|
+
for r in results:
|
|
243
|
+
metadata = r.get("metadata", {})
|
|
244
|
+
status = metadata.get("status", "active")
|
|
245
|
+
|
|
246
|
+
# Skip archived unless requested
|
|
247
|
+
if status == "archived" and not include_archived:
|
|
248
|
+
continue
|
|
249
|
+
|
|
250
|
+
# Filter by tags if specified
|
|
251
|
+
if tags:
|
|
252
|
+
doc_tags = json.loads(metadata.get("tags", "[]"))
|
|
253
|
+
if not any(tag in doc_tags for tag in tags):
|
|
254
|
+
continue
|
|
255
|
+
|
|
256
|
+
filtered.append(r)
|
|
257
|
+
|
|
258
|
+
return filtered[:limit]
|
|
259
|
+
|
|
260
|
+
async def restore(self, doc_id: str) -> bool:
|
|
261
|
+
"""
|
|
262
|
+
User manually restores archived memory.
|
|
263
|
+
|
|
264
|
+
Args:
|
|
265
|
+
doc_id: Memory to restore
|
|
266
|
+
|
|
267
|
+
Returns:
|
|
268
|
+
Success status
|
|
269
|
+
"""
|
|
270
|
+
doc = self.collection.get_fragment(doc_id)
|
|
271
|
+
if not doc:
|
|
272
|
+
return False
|
|
273
|
+
|
|
274
|
+
metadata = doc.get("metadata", {})
|
|
275
|
+
metadata["status"] = "active"
|
|
276
|
+
metadata["restored_at"] = datetime.now().isoformat()
|
|
277
|
+
metadata["restored_by"] = "user"
|
|
278
|
+
|
|
279
|
+
self.collection.update_fragment_metadata(doc_id, metadata)
|
|
280
|
+
logger.info(f"User restored memory: {doc_id}")
|
|
281
|
+
return True
|
|
282
|
+
|
|
283
|
+
async def delete(self, doc_id: str) -> bool:
|
|
284
|
+
"""
|
|
285
|
+
User permanently deletes memory (hard delete).
|
|
286
|
+
|
|
287
|
+
Args:
|
|
288
|
+
doc_id: Memory to delete
|
|
289
|
+
|
|
290
|
+
Returns:
|
|
291
|
+
Success status
|
|
292
|
+
"""
|
|
293
|
+
try:
|
|
294
|
+
self.collection.delete_vectors([doc_id])
|
|
295
|
+
logger.info(f"User permanently deleted memory: {doc_id}")
|
|
296
|
+
return True
|
|
297
|
+
except Exception as e:
|
|
298
|
+
logger.error(f"Failed to delete memory {doc_id}: {e}")
|
|
299
|
+
return False
|
|
300
|
+
|
|
301
|
+
def get(self, doc_id: str) -> Optional[Dict[str, Any]]:
|
|
302
|
+
"""
|
|
303
|
+
Get a specific memory by ID.
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
doc_id: Memory ID
|
|
307
|
+
|
|
308
|
+
Returns:
|
|
309
|
+
Memory document or None
|
|
310
|
+
"""
|
|
311
|
+
return self.collection.get_fragment(doc_id)
|
|
312
|
+
|
|
313
|
+
def list_all(
|
|
314
|
+
self,
|
|
315
|
+
include_archived: bool = False,
|
|
316
|
+
tags: List[str] = None
|
|
317
|
+
) -> List[Dict[str, Any]]:
|
|
318
|
+
"""
|
|
319
|
+
List all memories with optional filtering.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
include_archived: Include archived memories
|
|
323
|
+
tags: Filter by tags
|
|
324
|
+
|
|
325
|
+
Returns:
|
|
326
|
+
List of memories
|
|
327
|
+
"""
|
|
328
|
+
all_ids = self.collection.list_all_ids()
|
|
329
|
+
results = []
|
|
330
|
+
|
|
331
|
+
for doc_id in all_ids:
|
|
332
|
+
doc = self.collection.get_fragment(doc_id)
|
|
333
|
+
if doc:
|
|
334
|
+
metadata = doc.get("metadata", {})
|
|
335
|
+
status = metadata.get("status", "active")
|
|
336
|
+
|
|
337
|
+
# Skip archived unless requested
|
|
338
|
+
if status == "archived" and not include_archived:
|
|
339
|
+
continue
|
|
340
|
+
|
|
341
|
+
# Filter by tags if specified
|
|
342
|
+
if tags:
|
|
343
|
+
doc_tags = json.loads(metadata.get("tags", "[]"))
|
|
344
|
+
if not any(tag in doc_tags for tag in tags):
|
|
345
|
+
continue
|
|
346
|
+
|
|
347
|
+
results.append({
|
|
348
|
+
"id": doc_id,
|
|
349
|
+
"content": doc.get("content", ""),
|
|
350
|
+
"metadata": metadata
|
|
351
|
+
})
|
|
352
|
+
|
|
353
|
+
return results
|
|
354
|
+
|
|
355
|
+
def get_stats(self) -> Dict[str, Any]:
|
|
356
|
+
"""
|
|
357
|
+
Get memory bank statistics.
|
|
358
|
+
|
|
359
|
+
Returns:
|
|
360
|
+
Dict with stats
|
|
361
|
+
"""
|
|
362
|
+
all_items = self.list_all(include_archived=True)
|
|
363
|
+
|
|
364
|
+
active = [i for i in all_items if i["metadata"].get("status") == "active"]
|
|
365
|
+
archived = [i for i in all_items if i["metadata"].get("status") == "archived"]
|
|
366
|
+
|
|
367
|
+
# Count by tags
|
|
368
|
+
tag_counts = {}
|
|
369
|
+
for item in active:
|
|
370
|
+
tags = json.loads(item["metadata"].get("tags", "[]"))
|
|
371
|
+
for tag in tags:
|
|
372
|
+
tag_counts[tag] = tag_counts.get(tag, 0) + 1
|
|
373
|
+
|
|
374
|
+
# Average importance/confidence
|
|
375
|
+
importances = [i["metadata"].get("importance", 0.7) for i in active]
|
|
376
|
+
confidences = [i["metadata"].get("confidence", 0.7) for i in active]
|
|
377
|
+
|
|
378
|
+
return {
|
|
379
|
+
"total": len(all_items),
|
|
380
|
+
"active": len(active),
|
|
381
|
+
"archived": len(archived),
|
|
382
|
+
"capacity": self.MAX_ITEMS,
|
|
383
|
+
"usage_percent": len(active) / self.MAX_ITEMS * 100,
|
|
384
|
+
"tag_counts": tag_counts,
|
|
385
|
+
"avg_importance": sum(importances) / len(importances) if importances else 0,
|
|
386
|
+
"avg_confidence": sum(confidences) / len(confidences) if confidences else 0
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
def increment_mention(self, doc_id: str) -> bool:
|
|
390
|
+
"""
|
|
391
|
+
Increment mention count for a memory.
|
|
392
|
+
|
|
393
|
+
Args:
|
|
394
|
+
doc_id: Memory ID
|
|
395
|
+
|
|
396
|
+
Returns:
|
|
397
|
+
Success status
|
|
398
|
+
"""
|
|
399
|
+
doc = self.collection.get_fragment(doc_id)
|
|
400
|
+
if not doc:
|
|
401
|
+
return False
|
|
402
|
+
|
|
403
|
+
metadata = doc.get("metadata", {})
|
|
404
|
+
metadata["mentioned_count"] = metadata.get("mentioned_count", 0) + 1
|
|
405
|
+
metadata["last_mentioned"] = datetime.now().isoformat()
|
|
406
|
+
|
|
407
|
+
self.collection.update_fragment_metadata(doc_id, metadata)
|
|
408
|
+
return True
|
|
409
|
+
|
|
410
|
+
def _get_count(self) -> int:
|
|
411
|
+
"""Get current item count."""
|
|
412
|
+
try:
|
|
413
|
+
return self.collection.collection.count()
|
|
414
|
+
except Exception as e:
|
|
415
|
+
logger.warning(f"Could not get memory_bank count: {e}")
|
|
416
|
+
return 0
|
|
417
|
+
|
|
418
|
+
async def _get_all_items(self, limit: int) -> List[Dict[str, Any]]:
|
|
419
|
+
"""Get all items (fallback when no search query)."""
|
|
420
|
+
all_ids = self.collection.list_all_ids()
|
|
421
|
+
results = []
|
|
422
|
+
|
|
423
|
+
for doc_id in all_ids[:limit]:
|
|
424
|
+
doc = self.collection.get_fragment(doc_id)
|
|
425
|
+
if doc:
|
|
426
|
+
results.append({
|
|
427
|
+
"id": doc_id,
|
|
428
|
+
"content": doc.get("content", ""),
|
|
429
|
+
"metadata": doc.get("metadata", {}),
|
|
430
|
+
"distance": 0 # No distance for direct fetch
|
|
431
|
+
})
|
|
432
|
+
|
|
433
|
+
return results
|