claude-memory-agent 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +107 -0
- package/README.md +200 -0
- package/agent_card.py +512 -0
- package/bin/cli.js +181 -0
- package/bin/postinstall.js +216 -0
- package/config.py +104 -0
- package/dashboard.html +2689 -0
- package/hooks/README.md +196 -0
- package/hooks/__pycache__/auto-detect-response.cpython-312.pyc +0 -0
- package/hooks/__pycache__/auto_capture.cpython-312.pyc +0 -0
- package/hooks/__pycache__/session_end.cpython-312.pyc +0 -0
- package/hooks/__pycache__/session_start.cpython-312.pyc +0 -0
- package/hooks/auto-detect-response.py +348 -0
- package/hooks/auto_capture.py +255 -0
- package/hooks/detect-correction.py +173 -0
- package/hooks/grounding-hook.py +348 -0
- package/hooks/log-tool-use.py +234 -0
- package/hooks/log-user-request.py +208 -0
- package/hooks/pre-tool-decision.py +218 -0
- package/hooks/problem-detector.py +343 -0
- package/hooks/session_end.py +192 -0
- package/hooks/session_start.py +227 -0
- package/install.py +887 -0
- package/main.py +2859 -0
- package/manager.py +997 -0
- package/package.json +55 -0
- package/requirements.txt +8 -0
- package/run_server.py +136 -0
- package/services/__init__.py +50 -0
- package/services/__pycache__/__init__.cpython-312.pyc +0 -0
- package/services/__pycache__/agent_registry.cpython-312.pyc +0 -0
- package/services/__pycache__/auth.cpython-312.pyc +0 -0
- package/services/__pycache__/auto_inject.cpython-312.pyc +0 -0
- package/services/__pycache__/claude_md_sync.cpython-312.pyc +0 -0
- package/services/__pycache__/cleanup.cpython-312.pyc +0 -0
- package/services/__pycache__/compaction_flush.cpython-312.pyc +0 -0
- package/services/__pycache__/confidence.cpython-312.pyc +0 -0
- package/services/__pycache__/daily_log.cpython-312.pyc +0 -0
- package/services/__pycache__/database.cpython-312.pyc +0 -0
- package/services/__pycache__/embeddings.cpython-312.pyc +0 -0
- package/services/__pycache__/insights.cpython-312.pyc +0 -0
- package/services/__pycache__/llm_analyzer.cpython-312.pyc +0 -0
- package/services/__pycache__/memory_md_sync.cpython-312.pyc +0 -0
- package/services/__pycache__/retry_queue.cpython-312.pyc +0 -0
- package/services/__pycache__/timeline.cpython-312.pyc +0 -0
- package/services/__pycache__/vector_index.cpython-312.pyc +0 -0
- package/services/__pycache__/websocket.cpython-312.pyc +0 -0
- package/services/agent_registry.py +753 -0
- package/services/auth.py +331 -0
- package/services/auto_inject.py +250 -0
- package/services/claude_md_sync.py +275 -0
- package/services/cleanup.py +667 -0
- package/services/compaction_flush.py +447 -0
- package/services/confidence.py +301 -0
- package/services/daily_log.py +333 -0
- package/services/database.py +2485 -0
- package/services/embeddings.py +358 -0
- package/services/insights.py +632 -0
- package/services/llm_analyzer.py +595 -0
- package/services/memory_md_sync.py +409 -0
- package/services/retry_queue.py +453 -0
- package/services/timeline.py +579 -0
- package/services/vector_index.py +398 -0
- package/services/websocket.py +257 -0
- package/skills/__init__.py +6 -0
- package/skills/__pycache__/__init__.cpython-312.pyc +0 -0
- package/skills/__pycache__/admin.cpython-312.pyc +0 -0
- package/skills/__pycache__/checkpoint.cpython-312.pyc +0 -0
- package/skills/__pycache__/claude_md.cpython-312.pyc +0 -0
- package/skills/__pycache__/cleanup.cpython-312.pyc +0 -0
- package/skills/__pycache__/grounding.cpython-312.pyc +0 -0
- package/skills/__pycache__/insights.cpython-312.pyc +0 -0
- package/skills/__pycache__/natural_language.cpython-312.pyc +0 -0
- package/skills/__pycache__/retrieve.cpython-312.pyc +0 -0
- package/skills/__pycache__/search.cpython-312.pyc +0 -0
- package/skills/__pycache__/state.cpython-312.pyc +0 -0
- package/skills/__pycache__/store.cpython-312.pyc +0 -0
- package/skills/__pycache__/summarize.cpython-312.pyc +0 -0
- package/skills/__pycache__/timeline.cpython-312.pyc +0 -0
- package/skills/__pycache__/verification.cpython-312.pyc +0 -0
- package/skills/admin.py +469 -0
- package/skills/checkpoint.py +198 -0
- package/skills/claude_md.py +363 -0
- package/skills/cleanup.py +241 -0
- package/skills/grounding.py +801 -0
- package/skills/insights.py +231 -0
- package/skills/natural_language.py +277 -0
- package/skills/retrieve.py +67 -0
- package/skills/search.py +213 -0
- package/skills/state.py +182 -0
- package/skills/store.py +179 -0
- package/skills/summarize.py +588 -0
- package/skills/timeline.py +387 -0
- package/skills/verification.py +391 -0
- package/start_daemon.py +155 -0
- package/test_automation.py +221 -0
- package/test_complete.py +338 -0
- package/test_full.py +322 -0
- package/update_system.py +817 -0
- package/verify_db.py +134 -0
|
@@ -0,0 +1,363 @@
|
|
|
1
|
+
"""Skills for managing CLAUDE.md instructions file."""
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Dict, Any, Optional, List
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def get_claude_md_path() -> Path:
|
|
10
|
+
"""Get the path to the user's CLAUDE.md file."""
|
|
11
|
+
# Check common locations
|
|
12
|
+
home = Path.home()
|
|
13
|
+
|
|
14
|
+
# Primary location: ~/.claude/CLAUDE.md
|
|
15
|
+
primary = home / ".claude" / "CLAUDE.md"
|
|
16
|
+
if primary.exists():
|
|
17
|
+
return primary
|
|
18
|
+
|
|
19
|
+
# Create if doesn't exist
|
|
20
|
+
primary.parent.mkdir(parents=True, exist_ok=True)
|
|
21
|
+
return primary
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def read_claude_md() -> str:
|
|
25
|
+
"""Read the current CLAUDE.md content."""
|
|
26
|
+
path = get_claude_md_path()
|
|
27
|
+
if path.exists():
|
|
28
|
+
return path.read_text(encoding='utf-8')
|
|
29
|
+
return ""
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def write_claude_md(content: str) -> bool:
|
|
33
|
+
"""Write content to CLAUDE.md."""
|
|
34
|
+
path = get_claude_md_path()
|
|
35
|
+
try:
|
|
36
|
+
path.write_text(content, encoding='utf-8')
|
|
37
|
+
return True
|
|
38
|
+
except Exception as e:
|
|
39
|
+
return False
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
async def claude_md_read(
|
|
43
|
+
section: Optional[str] = None
|
|
44
|
+
) -> Dict[str, Any]:
|
|
45
|
+
"""
|
|
46
|
+
Read CLAUDE.md content.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
section: Optional section header to read (e.g., "Memory System")
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
Dict with content and metadata
|
|
53
|
+
"""
|
|
54
|
+
content = read_claude_md()
|
|
55
|
+
|
|
56
|
+
if not content:
|
|
57
|
+
return {
|
|
58
|
+
"success": True,
|
|
59
|
+
"exists": False,
|
|
60
|
+
"content": None,
|
|
61
|
+
"message": "CLAUDE.md does not exist or is empty"
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
if section:
|
|
65
|
+
# Extract specific section
|
|
66
|
+
pattern = rf'^##\s+{re.escape(section)}.*?(?=^##|\Z)'
|
|
67
|
+
match = re.search(pattern, content, re.MULTILINE | re.DOTALL | re.IGNORECASE)
|
|
68
|
+
if match:
|
|
69
|
+
return {
|
|
70
|
+
"success": True,
|
|
71
|
+
"exists": True,
|
|
72
|
+
"section": section,
|
|
73
|
+
"content": match.group(0).strip(),
|
|
74
|
+
"path": str(get_claude_md_path())
|
|
75
|
+
}
|
|
76
|
+
else:
|
|
77
|
+
return {
|
|
78
|
+
"success": True,
|
|
79
|
+
"exists": True,
|
|
80
|
+
"section": section,
|
|
81
|
+
"content": None,
|
|
82
|
+
"message": f"Section '{section}' not found"
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return {
|
|
86
|
+
"success": True,
|
|
87
|
+
"exists": True,
|
|
88
|
+
"content": content,
|
|
89
|
+
"path": str(get_claude_md_path())
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
async def claude_md_add_section(
|
|
94
|
+
section_name: str,
|
|
95
|
+
content: str,
|
|
96
|
+
position: str = "end"
|
|
97
|
+
) -> Dict[str, Any]:
|
|
98
|
+
"""
|
|
99
|
+
Add a new section to CLAUDE.md.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
section_name: Name for the section header (without ##)
|
|
103
|
+
content: Content for the section
|
|
104
|
+
position: Where to add - "end", "start", or "after:<section_name>"
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
Dict with result
|
|
108
|
+
"""
|
|
109
|
+
current = read_claude_md()
|
|
110
|
+
|
|
111
|
+
# Check if section already exists
|
|
112
|
+
if re.search(rf'^##\s+{re.escape(section_name)}\s*$', current, re.MULTILINE | re.IGNORECASE):
|
|
113
|
+
return {
|
|
114
|
+
"success": False,
|
|
115
|
+
"error": f"Section '{section_name}' already exists. Use claude_md_update_section to modify it."
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
# Build new section
|
|
119
|
+
new_section = f"\n## {section_name}\n{content}\n"
|
|
120
|
+
|
|
121
|
+
if position == "start":
|
|
122
|
+
# After the title line
|
|
123
|
+
if current.startswith("#"):
|
|
124
|
+
lines = current.split('\n', 1)
|
|
125
|
+
new_content = lines[0] + "\n" + new_section + (lines[1] if len(lines) > 1 else "")
|
|
126
|
+
else:
|
|
127
|
+
new_content = new_section + current
|
|
128
|
+
elif position.startswith("after:"):
|
|
129
|
+
after_section = position[6:]
|
|
130
|
+
pattern = rf'(^##\s+{re.escape(after_section)}.*?)(?=^##|\Z)'
|
|
131
|
+
match = re.search(pattern, current, re.MULTILINE | re.DOTALL | re.IGNORECASE)
|
|
132
|
+
if match:
|
|
133
|
+
insert_pos = match.end()
|
|
134
|
+
new_content = current[:insert_pos] + new_section + current[insert_pos:]
|
|
135
|
+
else:
|
|
136
|
+
new_content = current + new_section
|
|
137
|
+
else: # end
|
|
138
|
+
new_content = current.rstrip() + "\n" + new_section
|
|
139
|
+
|
|
140
|
+
if write_claude_md(new_content):
|
|
141
|
+
return {
|
|
142
|
+
"success": True,
|
|
143
|
+
"section": section_name,
|
|
144
|
+
"message": f"Added section '{section_name}' to CLAUDE.md",
|
|
145
|
+
"path": str(get_claude_md_path())
|
|
146
|
+
}
|
|
147
|
+
else:
|
|
148
|
+
return {
|
|
149
|
+
"success": False,
|
|
150
|
+
"error": "Failed to write CLAUDE.md"
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
async def claude_md_update_section(
|
|
155
|
+
section_name: str,
|
|
156
|
+
content: str,
|
|
157
|
+
mode: str = "replace"
|
|
158
|
+
) -> Dict[str, Any]:
|
|
159
|
+
"""
|
|
160
|
+
Update an existing section in CLAUDE.md.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
section_name: Name of the section to update
|
|
164
|
+
content: New content
|
|
165
|
+
mode: "replace" (replace entire section), "append" (add to end), "prepend" (add to start)
|
|
166
|
+
|
|
167
|
+
Returns:
|
|
168
|
+
Dict with result
|
|
169
|
+
"""
|
|
170
|
+
current = read_claude_md()
|
|
171
|
+
|
|
172
|
+
pattern = rf'(^##\s+{re.escape(section_name)}\s*\n)(.*?)(?=^##|\Z)'
|
|
173
|
+
match = re.search(pattern, current, re.MULTILINE | re.DOTALL | re.IGNORECASE)
|
|
174
|
+
|
|
175
|
+
if not match:
|
|
176
|
+
return {
|
|
177
|
+
"success": False,
|
|
178
|
+
"error": f"Section '{section_name}' not found. Use claude_md_add_section to create it."
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
header = match.group(1)
|
|
182
|
+
existing_content = match.group(2)
|
|
183
|
+
|
|
184
|
+
if mode == "replace":
|
|
185
|
+
new_section_content = content + "\n"
|
|
186
|
+
elif mode == "append":
|
|
187
|
+
new_section_content = existing_content.rstrip() + "\n" + content + "\n"
|
|
188
|
+
elif mode == "prepend":
|
|
189
|
+
new_section_content = content + "\n" + existing_content
|
|
190
|
+
else:
|
|
191
|
+
return {"success": False, "error": f"Unknown mode: {mode}"}
|
|
192
|
+
|
|
193
|
+
new_content = current[:match.start()] + header + new_section_content + current[match.end():]
|
|
194
|
+
|
|
195
|
+
if write_claude_md(new_content):
|
|
196
|
+
return {
|
|
197
|
+
"success": True,
|
|
198
|
+
"section": section_name,
|
|
199
|
+
"mode": mode,
|
|
200
|
+
"message": f"Updated section '{section_name}' in CLAUDE.md",
|
|
201
|
+
"path": str(get_claude_md_path())
|
|
202
|
+
}
|
|
203
|
+
else:
|
|
204
|
+
return {
|
|
205
|
+
"success": False,
|
|
206
|
+
"error": "Failed to write CLAUDE.md"
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
async def claude_md_add_instruction(
|
|
211
|
+
section_name: str,
|
|
212
|
+
instruction: str,
|
|
213
|
+
bullet_style: str = "-"
|
|
214
|
+
) -> Dict[str, Any]:
|
|
215
|
+
"""
|
|
216
|
+
Add a single instruction/rule to a section.
|
|
217
|
+
|
|
218
|
+
Args:
|
|
219
|
+
section_name: Section to add instruction to
|
|
220
|
+
instruction: The instruction text
|
|
221
|
+
bullet_style: Bullet character ("-", "*", or numbered like "1.")
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
Dict with result
|
|
225
|
+
"""
|
|
226
|
+
current = read_claude_md()
|
|
227
|
+
|
|
228
|
+
pattern = rf'(^##\s+{re.escape(section_name)}\s*\n)(.*?)(?=^##|\Z)'
|
|
229
|
+
match = re.search(pattern, current, re.MULTILINE | re.DOTALL | re.IGNORECASE)
|
|
230
|
+
|
|
231
|
+
if not match:
|
|
232
|
+
# Section doesn't exist, create it
|
|
233
|
+
return await claude_md_add_section(
|
|
234
|
+
section_name,
|
|
235
|
+
f"{bullet_style} {instruction}"
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
existing_content = match.group(2).rstrip()
|
|
239
|
+
|
|
240
|
+
# Check if instruction already exists
|
|
241
|
+
if instruction.lower() in existing_content.lower():
|
|
242
|
+
return {
|
|
243
|
+
"success": True,
|
|
244
|
+
"already_exists": True,
|
|
245
|
+
"message": f"Instruction already exists in section '{section_name}'"
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
# Add the instruction
|
|
249
|
+
new_instruction = f"\n{bullet_style} {instruction}"
|
|
250
|
+
new_section_content = existing_content + new_instruction + "\n"
|
|
251
|
+
|
|
252
|
+
new_content = current[:match.start()] + match.group(1) + new_section_content + current[match.end():]
|
|
253
|
+
|
|
254
|
+
if write_claude_md(new_content):
|
|
255
|
+
return {
|
|
256
|
+
"success": True,
|
|
257
|
+
"section": section_name,
|
|
258
|
+
"instruction": instruction,
|
|
259
|
+
"message": f"Added instruction to '{section_name}'",
|
|
260
|
+
"path": str(get_claude_md_path())
|
|
261
|
+
}
|
|
262
|
+
else:
|
|
263
|
+
return {
|
|
264
|
+
"success": False,
|
|
265
|
+
"error": "Failed to write CLAUDE.md"
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
async def claude_md_list_sections() -> Dict[str, Any]:
|
|
270
|
+
"""
|
|
271
|
+
List all sections in CLAUDE.md.
|
|
272
|
+
|
|
273
|
+
Returns:
|
|
274
|
+
Dict with list of section names
|
|
275
|
+
"""
|
|
276
|
+
content = read_claude_md()
|
|
277
|
+
|
|
278
|
+
if not content:
|
|
279
|
+
return {
|
|
280
|
+
"success": True,
|
|
281
|
+
"sections": [],
|
|
282
|
+
"message": "CLAUDE.md is empty or doesn't exist"
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
sections = re.findall(r'^##\s+(.+?)\s*$', content, re.MULTILINE)
|
|
286
|
+
|
|
287
|
+
return {
|
|
288
|
+
"success": True,
|
|
289
|
+
"sections": sections,
|
|
290
|
+
"count": len(sections),
|
|
291
|
+
"path": str(get_claude_md_path())
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
async def claude_md_suggest_from_session(
|
|
296
|
+
db,
|
|
297
|
+
session_id: str,
|
|
298
|
+
min_importance: int = 7
|
|
299
|
+
) -> Dict[str, Any]:
|
|
300
|
+
"""
|
|
301
|
+
Suggest CLAUDE.md additions based on session learnings.
|
|
302
|
+
|
|
303
|
+
Analyzes anchors, high-confidence decisions, and patterns
|
|
304
|
+
to suggest instructions that should be persisted.
|
|
305
|
+
|
|
306
|
+
Args:
|
|
307
|
+
db: Database service
|
|
308
|
+
session_id: Session to analyze
|
|
309
|
+
min_importance: Minimum importance level to consider
|
|
310
|
+
|
|
311
|
+
Returns:
|
|
312
|
+
Dict with suggestions
|
|
313
|
+
"""
|
|
314
|
+
suggestions = []
|
|
315
|
+
|
|
316
|
+
# Get anchors (verified facts)
|
|
317
|
+
events = await db.get_timeline_events(
|
|
318
|
+
session_id=session_id,
|
|
319
|
+
limit=100,
|
|
320
|
+
anchors_only=True
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
for event in events:
|
|
324
|
+
if event.get("is_anchor"):
|
|
325
|
+
suggestions.append({
|
|
326
|
+
"type": "anchor",
|
|
327
|
+
"content": event["summary"],
|
|
328
|
+
"suggested_section": "Project Facts",
|
|
329
|
+
"reason": "Verified fact from session"
|
|
330
|
+
})
|
|
331
|
+
|
|
332
|
+
# Get high-confidence decisions
|
|
333
|
+
decisions = await db.get_timeline_events(
|
|
334
|
+
session_id=session_id,
|
|
335
|
+
limit=50,
|
|
336
|
+
event_type="decision"
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
for decision in decisions:
|
|
340
|
+
confidence = decision.get("confidence", 0)
|
|
341
|
+
if confidence >= 0.8:
|
|
342
|
+
suggestions.append({
|
|
343
|
+
"type": "decision",
|
|
344
|
+
"content": decision["summary"],
|
|
345
|
+
"confidence": confidence,
|
|
346
|
+
"suggested_section": "Development Decisions",
|
|
347
|
+
"reason": "High-confidence decision"
|
|
348
|
+
})
|
|
349
|
+
|
|
350
|
+
# Get error patterns that were solved
|
|
351
|
+
errors = await db.search_similar(
|
|
352
|
+
embedding=None, # Will need embedding service for this
|
|
353
|
+
memory_type="error",
|
|
354
|
+
success_only=True,
|
|
355
|
+
limit=10
|
|
356
|
+
) if hasattr(db, 'search_similar') else []
|
|
357
|
+
|
|
358
|
+
return {
|
|
359
|
+
"success": True,
|
|
360
|
+
"suggestions": suggestions[:10], # Limit to top 10
|
|
361
|
+
"count": len(suggestions),
|
|
362
|
+
"message": f"Found {len(suggestions)} potential CLAUDE.md additions"
|
|
363
|
+
}
|
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
"""Skills for memory cleanup and maintenance.
|
|
2
|
+
|
|
3
|
+
Provides:
|
|
4
|
+
- Manual cleanup triggers
|
|
5
|
+
- Dry-run preview
|
|
6
|
+
- Archive management
|
|
7
|
+
- Configuration management
|
|
8
|
+
"""
|
|
9
|
+
from typing import Dict, Any, Optional, List
|
|
10
|
+
from services.cleanup import get_cleanup_service
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
async def memory_cleanup(
|
|
14
|
+
db,
|
|
15
|
+
embeddings,
|
|
16
|
+
project_path: Optional[str] = None,
|
|
17
|
+
dry_run: bool = True
|
|
18
|
+
) -> Dict[str, Any]:
|
|
19
|
+
"""Run memory cleanup with optional preview mode.
|
|
20
|
+
|
|
21
|
+
Cleans up:
|
|
22
|
+
- Low-relevance memories (below threshold)
|
|
23
|
+
- Expired memories (older than retention period)
|
|
24
|
+
- Duplicate memories (merged by similarity)
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
db: Database service
|
|
28
|
+
embeddings: Embeddings service
|
|
29
|
+
project_path: Filter to specific project (None = all)
|
|
30
|
+
dry_run: If True, only preview what would be cleaned
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
Cleanup results with counts and details
|
|
34
|
+
"""
|
|
35
|
+
cleanup = get_cleanup_service(db, embeddings)
|
|
36
|
+
result = await cleanup.run_cleanup(
|
|
37
|
+
project_path=project_path,
|
|
38
|
+
dry_run=dry_run
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
# Add helpful message
|
|
42
|
+
if dry_run:
|
|
43
|
+
result["message"] = (
|
|
44
|
+
f"DRY RUN: Would archive {result['total_archived']} memories, "
|
|
45
|
+
f"delete {result['total_deleted']}, merge {result['total_merged']} duplicates. "
|
|
46
|
+
f"Run with dry_run=False to execute."
|
|
47
|
+
)
|
|
48
|
+
else:
|
|
49
|
+
result["message"] = (
|
|
50
|
+
f"Cleanup complete: Archived {result['total_archived']}, "
|
|
51
|
+
f"deleted {result['total_deleted']}, merged {result['total_merged']} duplicates."
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
return result
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
async def get_archived_memories(
|
|
58
|
+
db,
|
|
59
|
+
embeddings,
|
|
60
|
+
project_path: Optional[str] = None,
|
|
61
|
+
reason: Optional[str] = None,
|
|
62
|
+
limit: int = 50
|
|
63
|
+
) -> Dict[str, Any]:
|
|
64
|
+
"""Get archived memories that can be restored.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
db: Database service
|
|
68
|
+
embeddings: Embeddings service (not used but kept for consistency)
|
|
69
|
+
project_path: Filter by project
|
|
70
|
+
reason: Filter by archive reason (low_relevance, expired, duplicate)
|
|
71
|
+
limit: Maximum results
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
List of archived memories
|
|
75
|
+
"""
|
|
76
|
+
cleanup = get_cleanup_service(db, embeddings)
|
|
77
|
+
archives = await cleanup.get_archived_memories(
|
|
78
|
+
project_path=project_path,
|
|
79
|
+
reason=reason,
|
|
80
|
+
limit=limit
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
return {
|
|
84
|
+
"success": True,
|
|
85
|
+
"archives": archives,
|
|
86
|
+
"count": len(archives),
|
|
87
|
+
"filters": {
|
|
88
|
+
"project_path": project_path,
|
|
89
|
+
"reason": reason
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
async def restore_memory(
|
|
95
|
+
db,
|
|
96
|
+
embeddings,
|
|
97
|
+
archive_id: int
|
|
98
|
+
) -> Dict[str, Any]:
|
|
99
|
+
"""Restore an archived memory back to active storage.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
db: Database service
|
|
103
|
+
embeddings: Embeddings service
|
|
104
|
+
archive_id: ID of the archived memory to restore
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
Restoration result with new memory ID
|
|
108
|
+
"""
|
|
109
|
+
cleanup = get_cleanup_service(db, embeddings)
|
|
110
|
+
return await cleanup.restore_memory(archive_id)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
async def get_cleanup_config(
|
|
114
|
+
db,
|
|
115
|
+
embeddings,
|
|
116
|
+
project_path: Optional[str] = None
|
|
117
|
+
) -> Dict[str, Any]:
|
|
118
|
+
"""Get cleanup configuration for a project.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
db: Database service
|
|
122
|
+
embeddings: Embeddings service
|
|
123
|
+
project_path: Project to get config for (None = defaults)
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
Cleanup configuration settings
|
|
127
|
+
"""
|
|
128
|
+
cleanup = get_cleanup_service(db, embeddings)
|
|
129
|
+
config = await cleanup.get_config(project_path)
|
|
130
|
+
|
|
131
|
+
return {
|
|
132
|
+
"success": True,
|
|
133
|
+
"project_path": project_path,
|
|
134
|
+
"config": config
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
async def set_cleanup_config(
|
|
139
|
+
db,
|
|
140
|
+
embeddings,
|
|
141
|
+
project_path: Optional[str] = None,
|
|
142
|
+
retention_days: Optional[int] = None,
|
|
143
|
+
min_relevance_score: Optional[float] = None,
|
|
144
|
+
keep_high_importance: Optional[bool] = None,
|
|
145
|
+
importance_threshold: Optional[int] = None,
|
|
146
|
+
dedup_enabled: Optional[bool] = None,
|
|
147
|
+
dedup_threshold: Optional[float] = None,
|
|
148
|
+
archive_before_delete: Optional[bool] = None,
|
|
149
|
+
auto_cleanup_enabled: Optional[bool] = None
|
|
150
|
+
) -> Dict[str, Any]:
|
|
151
|
+
"""Update cleanup configuration for a project.
|
|
152
|
+
|
|
153
|
+
Args:
|
|
154
|
+
db: Database service
|
|
155
|
+
embeddings: Embeddings service
|
|
156
|
+
project_path: Project to configure
|
|
157
|
+
retention_days: Days to keep memories before cleanup
|
|
158
|
+
min_relevance_score: Minimum relevance score to keep
|
|
159
|
+
keep_high_importance: Whether to protect high-importance memories
|
|
160
|
+
importance_threshold: What counts as "high importance"
|
|
161
|
+
dedup_enabled: Whether to deduplicate
|
|
162
|
+
dedup_threshold: Similarity threshold for duplicates
|
|
163
|
+
archive_before_delete: Whether to archive before deleting
|
|
164
|
+
auto_cleanup_enabled: Whether to run automatic cleanup
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
Updated configuration
|
|
168
|
+
"""
|
|
169
|
+
cleanup = get_cleanup_service(db, embeddings)
|
|
170
|
+
|
|
171
|
+
# Get current config as base
|
|
172
|
+
current = await cleanup.get_config(project_path)
|
|
173
|
+
|
|
174
|
+
# Update with provided values
|
|
175
|
+
if retention_days is not None:
|
|
176
|
+
current["retention_days"] = retention_days
|
|
177
|
+
if min_relevance_score is not None:
|
|
178
|
+
current["min_relevance_score"] = min_relevance_score
|
|
179
|
+
if keep_high_importance is not None:
|
|
180
|
+
current["keep_high_importance"] = keep_high_importance
|
|
181
|
+
if importance_threshold is not None:
|
|
182
|
+
current["importance_threshold"] = importance_threshold
|
|
183
|
+
if dedup_enabled is not None:
|
|
184
|
+
current["dedup_enabled"] = dedup_enabled
|
|
185
|
+
if dedup_threshold is not None:
|
|
186
|
+
current["dedup_threshold"] = dedup_threshold
|
|
187
|
+
if archive_before_delete is not None:
|
|
188
|
+
current["archive_before_delete"] = archive_before_delete
|
|
189
|
+
if auto_cleanup_enabled is not None:
|
|
190
|
+
current["auto_cleanup_enabled"] = auto_cleanup_enabled
|
|
191
|
+
|
|
192
|
+
await cleanup.save_config(project_path, current)
|
|
193
|
+
|
|
194
|
+
return {
|
|
195
|
+
"success": True,
|
|
196
|
+
"project_path": project_path,
|
|
197
|
+
"config": current,
|
|
198
|
+
"message": "Configuration updated"
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
async def get_cleanup_stats(
|
|
203
|
+
db,
|
|
204
|
+
embeddings
|
|
205
|
+
) -> Dict[str, Any]:
|
|
206
|
+
"""Get overall cleanup statistics.
|
|
207
|
+
|
|
208
|
+
Args:
|
|
209
|
+
db: Database service
|
|
210
|
+
embeddings: Embeddings service
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
Cleanup statistics including recent activity
|
|
214
|
+
"""
|
|
215
|
+
cleanup = get_cleanup_service(db, embeddings)
|
|
216
|
+
stats = await cleanup.get_cleanup_stats()
|
|
217
|
+
|
|
218
|
+
return {
|
|
219
|
+
"success": True,
|
|
220
|
+
"stats": stats
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
async def purge_expired_archives(
|
|
225
|
+
db,
|
|
226
|
+
embeddings
|
|
227
|
+
) -> Dict[str, Any]:
|
|
228
|
+
"""Permanently delete archived memories past their expiration.
|
|
229
|
+
|
|
230
|
+
This action is irreversible. Only call when you're sure
|
|
231
|
+
you want to permanently remove old archives.
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
db: Database service
|
|
235
|
+
embeddings: Embeddings service
|
|
236
|
+
|
|
237
|
+
Returns:
|
|
238
|
+
Purge results
|
|
239
|
+
"""
|
|
240
|
+
cleanup = get_cleanup_service(db, embeddings)
|
|
241
|
+
return await cleanup.purge_expired_archives()
|