claude-memory-agent 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +107 -0
- package/README.md +200 -0
- package/agent_card.py +512 -0
- package/bin/cli.js +181 -0
- package/bin/postinstall.js +216 -0
- package/config.py +104 -0
- package/dashboard.html +2689 -0
- package/hooks/README.md +196 -0
- package/hooks/__pycache__/auto-detect-response.cpython-312.pyc +0 -0
- package/hooks/__pycache__/auto_capture.cpython-312.pyc +0 -0
- package/hooks/__pycache__/session_end.cpython-312.pyc +0 -0
- package/hooks/__pycache__/session_start.cpython-312.pyc +0 -0
- package/hooks/auto-detect-response.py +348 -0
- package/hooks/auto_capture.py +255 -0
- package/hooks/detect-correction.py +173 -0
- package/hooks/grounding-hook.py +348 -0
- package/hooks/log-tool-use.py +234 -0
- package/hooks/log-user-request.py +208 -0
- package/hooks/pre-tool-decision.py +218 -0
- package/hooks/problem-detector.py +343 -0
- package/hooks/session_end.py +192 -0
- package/hooks/session_start.py +227 -0
- package/install.py +887 -0
- package/main.py +2859 -0
- package/manager.py +997 -0
- package/package.json +55 -0
- package/requirements.txt +8 -0
- package/run_server.py +136 -0
- package/services/__init__.py +50 -0
- package/services/__pycache__/__init__.cpython-312.pyc +0 -0
- package/services/__pycache__/agent_registry.cpython-312.pyc +0 -0
- package/services/__pycache__/auth.cpython-312.pyc +0 -0
- package/services/__pycache__/auto_inject.cpython-312.pyc +0 -0
- package/services/__pycache__/claude_md_sync.cpython-312.pyc +0 -0
- package/services/__pycache__/cleanup.cpython-312.pyc +0 -0
- package/services/__pycache__/compaction_flush.cpython-312.pyc +0 -0
- package/services/__pycache__/confidence.cpython-312.pyc +0 -0
- package/services/__pycache__/daily_log.cpython-312.pyc +0 -0
- package/services/__pycache__/database.cpython-312.pyc +0 -0
- package/services/__pycache__/embeddings.cpython-312.pyc +0 -0
- package/services/__pycache__/insights.cpython-312.pyc +0 -0
- package/services/__pycache__/llm_analyzer.cpython-312.pyc +0 -0
- package/services/__pycache__/memory_md_sync.cpython-312.pyc +0 -0
- package/services/__pycache__/retry_queue.cpython-312.pyc +0 -0
- package/services/__pycache__/timeline.cpython-312.pyc +0 -0
- package/services/__pycache__/vector_index.cpython-312.pyc +0 -0
- package/services/__pycache__/websocket.cpython-312.pyc +0 -0
- package/services/agent_registry.py +753 -0
- package/services/auth.py +331 -0
- package/services/auto_inject.py +250 -0
- package/services/claude_md_sync.py +275 -0
- package/services/cleanup.py +667 -0
- package/services/compaction_flush.py +447 -0
- package/services/confidence.py +301 -0
- package/services/daily_log.py +333 -0
- package/services/database.py +2485 -0
- package/services/embeddings.py +358 -0
- package/services/insights.py +632 -0
- package/services/llm_analyzer.py +595 -0
- package/services/memory_md_sync.py +409 -0
- package/services/retry_queue.py +453 -0
- package/services/timeline.py +579 -0
- package/services/vector_index.py +398 -0
- package/services/websocket.py +257 -0
- package/skills/__init__.py +6 -0
- package/skills/__pycache__/__init__.cpython-312.pyc +0 -0
- package/skills/__pycache__/admin.cpython-312.pyc +0 -0
- package/skills/__pycache__/checkpoint.cpython-312.pyc +0 -0
- package/skills/__pycache__/claude_md.cpython-312.pyc +0 -0
- package/skills/__pycache__/cleanup.cpython-312.pyc +0 -0
- package/skills/__pycache__/grounding.cpython-312.pyc +0 -0
- package/skills/__pycache__/insights.cpython-312.pyc +0 -0
- package/skills/__pycache__/natural_language.cpython-312.pyc +0 -0
- package/skills/__pycache__/retrieve.cpython-312.pyc +0 -0
- package/skills/__pycache__/search.cpython-312.pyc +0 -0
- package/skills/__pycache__/state.cpython-312.pyc +0 -0
- package/skills/__pycache__/store.cpython-312.pyc +0 -0
- package/skills/__pycache__/summarize.cpython-312.pyc +0 -0
- package/skills/__pycache__/timeline.cpython-312.pyc +0 -0
- package/skills/__pycache__/verification.cpython-312.pyc +0 -0
- package/skills/admin.py +469 -0
- package/skills/checkpoint.py +198 -0
- package/skills/claude_md.py +363 -0
- package/skills/cleanup.py +241 -0
- package/skills/grounding.py +801 -0
- package/skills/insights.py +231 -0
- package/skills/natural_language.py +277 -0
- package/skills/retrieve.py +67 -0
- package/skills/search.py +213 -0
- package/skills/state.py +182 -0
- package/skills/store.py +179 -0
- package/skills/summarize.py +588 -0
- package/skills/timeline.py +387 -0
- package/skills/verification.py +391 -0
- package/start_daemon.py +155 -0
- package/test_automation.py +221 -0
- package/test_complete.py +338 -0
- package/test_full.py +322 -0
- package/update_system.py +817 -0
- package/verify_db.py +134 -0
|
@@ -0,0 +1,447 @@
|
|
|
1
|
+
"""Pre-Compaction Flush Service - Export memories before context loss.
|
|
2
|
+
|
|
3
|
+
Since Claude Code doesn't expose a pre-compaction hook, this service uses
|
|
4
|
+
heuristic-based flush detection:
|
|
5
|
+
- Flush if events_since_checkpoint > 50
|
|
6
|
+
- Flush if session active > 30 minutes without flush
|
|
7
|
+
|
|
8
|
+
Creates flush_YYYYMMDD_HHMMSS.md files in <project>/.claude/memory/
|
|
9
|
+
"""
|
|
10
|
+
import os
|
|
11
|
+
import logging
|
|
12
|
+
from datetime import datetime, timedelta
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Optional, Dict, Any, List
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
# Flush thresholds
|
|
19
|
+
EVENT_THRESHOLD = 50 # Flush after this many events
|
|
20
|
+
TIME_THRESHOLD_MINUTES = 30 # Flush after this many minutes
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_flush_path(project_path: str, timestamp: Optional[datetime] = None) -> Path:
|
|
24
|
+
"""Get the path for a flush file.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
project_path: Root path of the project
|
|
28
|
+
timestamp: Timestamp for the flush file (defaults to now)
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
Path to the flush markdown file
|
|
32
|
+
"""
|
|
33
|
+
if timestamp is None:
|
|
34
|
+
timestamp = datetime.now()
|
|
35
|
+
|
|
36
|
+
# Normalize project path
|
|
37
|
+
project_path = project_path.replace("\\", "/").rstrip("/")
|
|
38
|
+
|
|
39
|
+
# Create memory directory structure
|
|
40
|
+
memory_dir = Path(project_path) / ".claude" / "memory"
|
|
41
|
+
memory_dir.mkdir(parents=True, exist_ok=True)
|
|
42
|
+
|
|
43
|
+
# Flush filename includes full timestamp
|
|
44
|
+
filename = timestamp.strftime("flush_%Y%m%d_%H%M%S.md")
|
|
45
|
+
return memory_dir / filename
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
async def check_flush_needed(
|
|
49
|
+
db,
|
|
50
|
+
session_id: str,
|
|
51
|
+
event_threshold: int = EVENT_THRESHOLD,
|
|
52
|
+
time_threshold_minutes: int = TIME_THRESHOLD_MINUTES
|
|
53
|
+
) -> Dict[str, Any]:
|
|
54
|
+
"""Check if a pre-compaction flush is needed.
|
|
55
|
+
|
|
56
|
+
Uses heuristics since Claude Code doesn't expose compaction hooks:
|
|
57
|
+
1. Event count since last checkpoint
|
|
58
|
+
2. Time since last flush or session start
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
db: Database service instance
|
|
62
|
+
session_id: Current session ID
|
|
63
|
+
event_threshold: Number of events to trigger flush
|
|
64
|
+
time_threshold_minutes: Minutes since last flush to trigger
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
Dict with flush_needed flag and reason
|
|
68
|
+
"""
|
|
69
|
+
cursor = db.conn.cursor()
|
|
70
|
+
|
|
71
|
+
# Get session state
|
|
72
|
+
cursor.execute("""
|
|
73
|
+
SELECT last_checkpoint_id, last_flush_at, events_since_checkpoint, created_at
|
|
74
|
+
FROM session_state
|
|
75
|
+
WHERE session_id = ?
|
|
76
|
+
""", (session_id,))
|
|
77
|
+
row = cursor.fetchone()
|
|
78
|
+
|
|
79
|
+
if not row:
|
|
80
|
+
return {
|
|
81
|
+
"flush_needed": False,
|
|
82
|
+
"reason": "no_session_state"
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
state = dict(row)
|
|
86
|
+
events_count = state.get("events_since_checkpoint", 0)
|
|
87
|
+
last_flush = state.get("last_flush_at")
|
|
88
|
+
session_start = state.get("created_at")
|
|
89
|
+
|
|
90
|
+
reasons = []
|
|
91
|
+
|
|
92
|
+
# Check event threshold
|
|
93
|
+
if events_count >= event_threshold:
|
|
94
|
+
reasons.append(f"events_threshold ({events_count} >= {event_threshold})")
|
|
95
|
+
|
|
96
|
+
# Check time threshold
|
|
97
|
+
reference_time = last_flush if last_flush else session_start
|
|
98
|
+
if reference_time:
|
|
99
|
+
try:
|
|
100
|
+
ref_dt = datetime.fromisoformat(reference_time.replace("Z", "+00:00"))
|
|
101
|
+
# Handle naive datetime
|
|
102
|
+
if ref_dt.tzinfo:
|
|
103
|
+
ref_dt = ref_dt.replace(tzinfo=None)
|
|
104
|
+
minutes_elapsed = (datetime.now() - ref_dt).total_seconds() / 60
|
|
105
|
+
|
|
106
|
+
if minutes_elapsed >= time_threshold_minutes:
|
|
107
|
+
reasons.append(f"time_threshold ({minutes_elapsed:.1f}min >= {time_threshold_minutes}min)")
|
|
108
|
+
except Exception as e:
|
|
109
|
+
logger.warning(f"Failed to parse reference time: {e}")
|
|
110
|
+
|
|
111
|
+
return {
|
|
112
|
+
"flush_needed": len(reasons) > 0,
|
|
113
|
+
"reasons": reasons,
|
|
114
|
+
"events_since_checkpoint": events_count,
|
|
115
|
+
"session_id": session_id
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
async def format_flush_markdown(
|
|
120
|
+
db,
|
|
121
|
+
session_id: str,
|
|
122
|
+
project_path: str
|
|
123
|
+
) -> str:
|
|
124
|
+
"""Format the flush content as markdown.
|
|
125
|
+
|
|
126
|
+
Gathers all important session data for human-readable export.
|
|
127
|
+
|
|
128
|
+
Args:
|
|
129
|
+
db: Database service instance
|
|
130
|
+
session_id: Session to flush
|
|
131
|
+
project_path: Project path for context
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
Formatted markdown content
|
|
135
|
+
"""
|
|
136
|
+
from services.database import normalize_path
|
|
137
|
+
normalized_path = normalize_path(project_path)
|
|
138
|
+
|
|
139
|
+
cursor = db.conn.cursor()
|
|
140
|
+
now = datetime.now()
|
|
141
|
+
|
|
142
|
+
lines = [
|
|
143
|
+
f"# Memory Flush - {now.strftime('%Y-%m-%d %H:%M:%S')}",
|
|
144
|
+
f"Session: `{session_id}`",
|
|
145
|
+
f"Project: `{project_path}`",
|
|
146
|
+
"",
|
|
147
|
+
"---",
|
|
148
|
+
""
|
|
149
|
+
]
|
|
150
|
+
|
|
151
|
+
# Get high-importance decisions from this session
|
|
152
|
+
cursor.execute("""
|
|
153
|
+
SELECT content, importance, created_at, outcome
|
|
154
|
+
FROM memories
|
|
155
|
+
WHERE session_id = ?
|
|
156
|
+
AND type = 'decision'
|
|
157
|
+
AND importance >= 7
|
|
158
|
+
ORDER BY importance DESC, created_at DESC
|
|
159
|
+
LIMIT 10
|
|
160
|
+
""", (session_id,))
|
|
161
|
+
decisions = cursor.fetchall()
|
|
162
|
+
|
|
163
|
+
if decisions:
|
|
164
|
+
lines.append("## Important Decisions")
|
|
165
|
+
lines.append("")
|
|
166
|
+
for d in decisions:
|
|
167
|
+
d = dict(d)
|
|
168
|
+
content = d.get("content", "")[:300]
|
|
169
|
+
importance = d.get("importance", 5)
|
|
170
|
+
outcome = d.get("outcome")
|
|
171
|
+
lines.append(f"### Decision (importance: {importance})")
|
|
172
|
+
lines.append(content)
|
|
173
|
+
if outcome:
|
|
174
|
+
lines.append(f"\n**Outcome**: {outcome}")
|
|
175
|
+
lines.append("")
|
|
176
|
+
|
|
177
|
+
# Get anchors (verified facts) from this session
|
|
178
|
+
cursor.execute("""
|
|
179
|
+
SELECT summary, details, created_at
|
|
180
|
+
FROM timeline_events
|
|
181
|
+
WHERE session_id = ?
|
|
182
|
+
AND is_anchor = 1
|
|
183
|
+
ORDER BY created_at DESC
|
|
184
|
+
LIMIT 15
|
|
185
|
+
""", (session_id,))
|
|
186
|
+
anchors = cursor.fetchall()
|
|
187
|
+
|
|
188
|
+
if anchors:
|
|
189
|
+
lines.append("## Anchors (Verified Facts)")
|
|
190
|
+
lines.append("")
|
|
191
|
+
for a in anchors:
|
|
192
|
+
a = dict(a)
|
|
193
|
+
summary = a.get("summary", "")
|
|
194
|
+
details = a.get("details")
|
|
195
|
+
lines.append(f"- {summary}")
|
|
196
|
+
if details:
|
|
197
|
+
lines.append(f" - Details: {details[:150]}")
|
|
198
|
+
lines.append("")
|
|
199
|
+
|
|
200
|
+
# Get recent events
|
|
201
|
+
cursor.execute("""
|
|
202
|
+
SELECT event_type, summary, created_at, status
|
|
203
|
+
FROM timeline_events
|
|
204
|
+
WHERE session_id = ?
|
|
205
|
+
ORDER BY created_at DESC
|
|
206
|
+
LIMIT 30
|
|
207
|
+
""", (session_id,))
|
|
208
|
+
events = cursor.fetchall()
|
|
209
|
+
|
|
210
|
+
if events:
|
|
211
|
+
lines.append("## Recent Actions")
|
|
212
|
+
lines.append("")
|
|
213
|
+
for e in events:
|
|
214
|
+
e = dict(e)
|
|
215
|
+
event_type = e.get("event_type", "unknown")
|
|
216
|
+
summary = e.get("summary", "")[:100]
|
|
217
|
+
timestamp = e.get("created_at", "")[:19]
|
|
218
|
+
status = e.get("status", "")
|
|
219
|
+
status_str = f" [{status}]" if status else ""
|
|
220
|
+
lines.append(f"- [{timestamp}] **{event_type}**{status_str}: {summary}")
|
|
221
|
+
lines.append("")
|
|
222
|
+
|
|
223
|
+
# Get session state
|
|
224
|
+
cursor.execute("""
|
|
225
|
+
SELECT current_goal, pending_questions, decisions_summary, entity_registry
|
|
226
|
+
FROM session_state
|
|
227
|
+
WHERE session_id = ?
|
|
228
|
+
""", (session_id,))
|
|
229
|
+
state_row = cursor.fetchone()
|
|
230
|
+
|
|
231
|
+
if state_row:
|
|
232
|
+
state = dict(state_row)
|
|
233
|
+
if state.get("current_goal"):
|
|
234
|
+
lines.append("## Current Goal")
|
|
235
|
+
lines.append(state["current_goal"])
|
|
236
|
+
lines.append("")
|
|
237
|
+
|
|
238
|
+
if state.get("pending_questions"):
|
|
239
|
+
import json
|
|
240
|
+
try:
|
|
241
|
+
questions = json.loads(state["pending_questions"])
|
|
242
|
+
if questions:
|
|
243
|
+
lines.append("## Pending Questions")
|
|
244
|
+
for q in questions:
|
|
245
|
+
lines.append(f"- {q}")
|
|
246
|
+
lines.append("")
|
|
247
|
+
except Exception:
|
|
248
|
+
pass
|
|
249
|
+
|
|
250
|
+
if state.get("entity_registry"):
|
|
251
|
+
import json
|
|
252
|
+
try:
|
|
253
|
+
registry = json.loads(state["entity_registry"])
|
|
254
|
+
if registry:
|
|
255
|
+
lines.append("## Entity Registry")
|
|
256
|
+
for key, value in list(registry.items())[:20]:
|
|
257
|
+
lines.append(f"- `{key}`: {value}")
|
|
258
|
+
lines.append("")
|
|
259
|
+
except Exception:
|
|
260
|
+
pass
|
|
261
|
+
|
|
262
|
+
# Get errors solved in this session
|
|
263
|
+
cursor.execute("""
|
|
264
|
+
SELECT content, outcome
|
|
265
|
+
FROM memories
|
|
266
|
+
WHERE session_id = ?
|
|
267
|
+
AND type = 'error'
|
|
268
|
+
AND success = 1
|
|
269
|
+
ORDER BY created_at DESC
|
|
270
|
+
LIMIT 5
|
|
271
|
+
""", (session_id,))
|
|
272
|
+
errors = cursor.fetchall()
|
|
273
|
+
|
|
274
|
+
if errors:
|
|
275
|
+
lines.append("## Errors Solved")
|
|
276
|
+
lines.append("")
|
|
277
|
+
for e in errors:
|
|
278
|
+
e = dict(e)
|
|
279
|
+
content = e.get("content", "")[:200]
|
|
280
|
+
outcome = e.get("outcome", "")[:100]
|
|
281
|
+
lines.append(f"- **Error**: {content}")
|
|
282
|
+
if outcome:
|
|
283
|
+
lines.append(f" - **Solution**: {outcome}")
|
|
284
|
+
lines.append("")
|
|
285
|
+
|
|
286
|
+
lines.append("---")
|
|
287
|
+
lines.append(f"*Generated at {now.isoformat()}*")
|
|
288
|
+
|
|
289
|
+
return "\n".join(lines)
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
async def execute_flush(
|
|
293
|
+
db,
|
|
294
|
+
project_path: str,
|
|
295
|
+
session_id: str
|
|
296
|
+
) -> Dict[str, Any]:
|
|
297
|
+
"""Execute a pre-compaction flush.
|
|
298
|
+
|
|
299
|
+
Exports all important session data to a markdown file.
|
|
300
|
+
|
|
301
|
+
Args:
|
|
302
|
+
db: Database service instance
|
|
303
|
+
project_path: Root path of the project
|
|
304
|
+
session_id: Session to flush
|
|
305
|
+
|
|
306
|
+
Returns:
|
|
307
|
+
Dict with flush results
|
|
308
|
+
"""
|
|
309
|
+
now = datetime.now()
|
|
310
|
+
flush_path = get_flush_path(project_path, now)
|
|
311
|
+
|
|
312
|
+
try:
|
|
313
|
+
# Generate flush content
|
|
314
|
+
content = await format_flush_markdown(db, session_id, project_path)
|
|
315
|
+
|
|
316
|
+
# Write to file
|
|
317
|
+
flush_path.write_text(content, encoding="utf-8")
|
|
318
|
+
|
|
319
|
+
# Update session state with flush timestamp
|
|
320
|
+
cursor = db.conn.cursor()
|
|
321
|
+
cursor.execute("""
|
|
322
|
+
UPDATE session_state
|
|
323
|
+
SET last_flush_at = ?,
|
|
324
|
+
events_since_checkpoint = 0
|
|
325
|
+
WHERE session_id = ?
|
|
326
|
+
""", (now.isoformat(), session_id))
|
|
327
|
+
db.conn.commit()
|
|
328
|
+
|
|
329
|
+
return {
|
|
330
|
+
"success": True,
|
|
331
|
+
"file_path": str(flush_path),
|
|
332
|
+
"flushed_at": now.isoformat(),
|
|
333
|
+
"session_id": session_id,
|
|
334
|
+
"content_length": len(content)
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
except Exception as e:
|
|
338
|
+
logger.error(f"Failed to execute flush: {e}")
|
|
339
|
+
return {
|
|
340
|
+
"success": False,
|
|
341
|
+
"error": str(e)
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
async def list_flushes(
|
|
346
|
+
project_path: str,
|
|
347
|
+
limit: int = 20
|
|
348
|
+
) -> Dict[str, Any]:
|
|
349
|
+
"""List available flush files for a project.
|
|
350
|
+
|
|
351
|
+
Args:
|
|
352
|
+
project_path: Root path of the project
|
|
353
|
+
limit: Maximum number of flushes to list
|
|
354
|
+
|
|
355
|
+
Returns:
|
|
356
|
+
Dict with list of flush files
|
|
357
|
+
"""
|
|
358
|
+
memory_dir = Path(project_path) / ".claude" / "memory"
|
|
359
|
+
|
|
360
|
+
if not memory_dir.exists():
|
|
361
|
+
return {
|
|
362
|
+
"success": True,
|
|
363
|
+
"flushes": [],
|
|
364
|
+
"total_count": 0
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
flushes = []
|
|
368
|
+
for flush_file in sorted(memory_dir.glob("flush_*.md"), reverse=True):
|
|
369
|
+
if len(flushes) >= limit:
|
|
370
|
+
break
|
|
371
|
+
|
|
372
|
+
try:
|
|
373
|
+
stat = flush_file.stat()
|
|
374
|
+
# Parse timestamp from filename
|
|
375
|
+
name = flush_file.stem # flush_YYYYMMDD_HHMMSS
|
|
376
|
+
timestamp_str = name.replace("flush_", "")
|
|
377
|
+
|
|
378
|
+
flushes.append({
|
|
379
|
+
"filename": flush_file.name,
|
|
380
|
+
"path": str(flush_file),
|
|
381
|
+
"timestamp": timestamp_str,
|
|
382
|
+
"size_bytes": stat.st_size,
|
|
383
|
+
"modified": datetime.fromtimestamp(stat.st_mtime).isoformat()
|
|
384
|
+
})
|
|
385
|
+
except Exception as e:
|
|
386
|
+
logger.warning(f"Failed to process flush file {flush_file}: {e}")
|
|
387
|
+
|
|
388
|
+
return {
|
|
389
|
+
"success": True,
|
|
390
|
+
"flushes": flushes,
|
|
391
|
+
"total_count": len(flushes)
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
async def read_flush(
|
|
396
|
+
project_path: str,
|
|
397
|
+
filename: Optional[str] = None
|
|
398
|
+
) -> Dict[str, Any]:
|
|
399
|
+
"""Read a flush file.
|
|
400
|
+
|
|
401
|
+
Args:
|
|
402
|
+
project_path: Root path of the project
|
|
403
|
+
filename: Specific flush filename (defaults to most recent)
|
|
404
|
+
|
|
405
|
+
Returns:
|
|
406
|
+
Dict with flush content
|
|
407
|
+
"""
|
|
408
|
+
memory_dir = Path(project_path) / ".claude" / "memory"
|
|
409
|
+
|
|
410
|
+
if not memory_dir.exists():
|
|
411
|
+
return {
|
|
412
|
+
"success": False,
|
|
413
|
+
"error": "No memory directory found"
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
if filename:
|
|
417
|
+
flush_path = memory_dir / filename
|
|
418
|
+
else:
|
|
419
|
+
# Get most recent flush
|
|
420
|
+
flushes = sorted(memory_dir.glob("flush_*.md"), reverse=True)
|
|
421
|
+
if not flushes:
|
|
422
|
+
return {
|
|
423
|
+
"success": False,
|
|
424
|
+
"error": "No flush files found"
|
|
425
|
+
}
|
|
426
|
+
flush_path = flushes[0]
|
|
427
|
+
|
|
428
|
+
if not flush_path.exists():
|
|
429
|
+
return {
|
|
430
|
+
"success": False,
|
|
431
|
+
"error": f"Flush file not found: {flush_path}"
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
try:
|
|
435
|
+
content = flush_path.read_text(encoding="utf-8")
|
|
436
|
+
return {
|
|
437
|
+
"success": True,
|
|
438
|
+
"content": content,
|
|
439
|
+
"file_path": str(flush_path),
|
|
440
|
+
"filename": flush_path.name
|
|
441
|
+
}
|
|
442
|
+
except Exception as e:
|
|
443
|
+
logger.error(f"Failed to read flush file: {e}")
|
|
444
|
+
return {
|
|
445
|
+
"success": False,
|
|
446
|
+
"error": str(e)
|
|
447
|
+
}
|