claude-memory-agent 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +107 -0
- package/README.md +200 -0
- package/agent_card.py +512 -0
- package/bin/cli.js +181 -0
- package/bin/postinstall.js +216 -0
- package/config.py +104 -0
- package/dashboard.html +2689 -0
- package/hooks/README.md +196 -0
- package/hooks/__pycache__/auto-detect-response.cpython-312.pyc +0 -0
- package/hooks/__pycache__/auto_capture.cpython-312.pyc +0 -0
- package/hooks/__pycache__/session_end.cpython-312.pyc +0 -0
- package/hooks/__pycache__/session_start.cpython-312.pyc +0 -0
- package/hooks/auto-detect-response.py +348 -0
- package/hooks/auto_capture.py +255 -0
- package/hooks/detect-correction.py +173 -0
- package/hooks/grounding-hook.py +348 -0
- package/hooks/log-tool-use.py +234 -0
- package/hooks/log-user-request.py +208 -0
- package/hooks/pre-tool-decision.py +218 -0
- package/hooks/problem-detector.py +343 -0
- package/hooks/session_end.py +192 -0
- package/hooks/session_start.py +227 -0
- package/install.py +887 -0
- package/main.py +2859 -0
- package/manager.py +997 -0
- package/package.json +55 -0
- package/requirements.txt +8 -0
- package/run_server.py +136 -0
- package/services/__init__.py +50 -0
- package/services/__pycache__/__init__.cpython-312.pyc +0 -0
- package/services/__pycache__/agent_registry.cpython-312.pyc +0 -0
- package/services/__pycache__/auth.cpython-312.pyc +0 -0
- package/services/__pycache__/auto_inject.cpython-312.pyc +0 -0
- package/services/__pycache__/claude_md_sync.cpython-312.pyc +0 -0
- package/services/__pycache__/cleanup.cpython-312.pyc +0 -0
- package/services/__pycache__/compaction_flush.cpython-312.pyc +0 -0
- package/services/__pycache__/confidence.cpython-312.pyc +0 -0
- package/services/__pycache__/daily_log.cpython-312.pyc +0 -0
- package/services/__pycache__/database.cpython-312.pyc +0 -0
- package/services/__pycache__/embeddings.cpython-312.pyc +0 -0
- package/services/__pycache__/insights.cpython-312.pyc +0 -0
- package/services/__pycache__/llm_analyzer.cpython-312.pyc +0 -0
- package/services/__pycache__/memory_md_sync.cpython-312.pyc +0 -0
- package/services/__pycache__/retry_queue.cpython-312.pyc +0 -0
- package/services/__pycache__/timeline.cpython-312.pyc +0 -0
- package/services/__pycache__/vector_index.cpython-312.pyc +0 -0
- package/services/__pycache__/websocket.cpython-312.pyc +0 -0
- package/services/agent_registry.py +753 -0
- package/services/auth.py +331 -0
- package/services/auto_inject.py +250 -0
- package/services/claude_md_sync.py +275 -0
- package/services/cleanup.py +667 -0
- package/services/compaction_flush.py +447 -0
- package/services/confidence.py +301 -0
- package/services/daily_log.py +333 -0
- package/services/database.py +2485 -0
- package/services/embeddings.py +358 -0
- package/services/insights.py +632 -0
- package/services/llm_analyzer.py +595 -0
- package/services/memory_md_sync.py +409 -0
- package/services/retry_queue.py +453 -0
- package/services/timeline.py +579 -0
- package/services/vector_index.py +398 -0
- package/services/websocket.py +257 -0
- package/skills/__init__.py +6 -0
- package/skills/__pycache__/__init__.cpython-312.pyc +0 -0
- package/skills/__pycache__/admin.cpython-312.pyc +0 -0
- package/skills/__pycache__/checkpoint.cpython-312.pyc +0 -0
- package/skills/__pycache__/claude_md.cpython-312.pyc +0 -0
- package/skills/__pycache__/cleanup.cpython-312.pyc +0 -0
- package/skills/__pycache__/grounding.cpython-312.pyc +0 -0
- package/skills/__pycache__/insights.cpython-312.pyc +0 -0
- package/skills/__pycache__/natural_language.cpython-312.pyc +0 -0
- package/skills/__pycache__/retrieve.cpython-312.pyc +0 -0
- package/skills/__pycache__/search.cpython-312.pyc +0 -0
- package/skills/__pycache__/state.cpython-312.pyc +0 -0
- package/skills/__pycache__/store.cpython-312.pyc +0 -0
- package/skills/__pycache__/summarize.cpython-312.pyc +0 -0
- package/skills/__pycache__/timeline.cpython-312.pyc +0 -0
- package/skills/__pycache__/verification.cpython-312.pyc +0 -0
- package/skills/admin.py +469 -0
- package/skills/checkpoint.py +198 -0
- package/skills/claude_md.py +363 -0
- package/skills/cleanup.py +241 -0
- package/skills/grounding.py +801 -0
- package/skills/insights.py +231 -0
- package/skills/natural_language.py +277 -0
- package/skills/retrieve.py +67 -0
- package/skills/search.py +213 -0
- package/skills/state.py +182 -0
- package/skills/store.py +179 -0
- package/skills/summarize.py +588 -0
- package/skills/timeline.py +387 -0
- package/skills/verification.py +391 -0
- package/start_daemon.py +155 -0
- package/test_automation.py +221 -0
- package/test_complete.py +338 -0
- package/test_full.py +322 -0
- package/update_system.py +817 -0
- package/verify_db.py +134 -0
|
@@ -0,0 +1,409 @@
|
|
|
1
|
+
"""MEMORY.md Sync Service - Moltbot-inspired core facts file.
|
|
2
|
+
|
|
3
|
+
Maintains a single MEMORY.md file per project containing:
|
|
4
|
+
- Anchors (verified facts)
|
|
5
|
+
- Key decisions (importance >= 7)
|
|
6
|
+
- Proven patterns (success_count >= 3)
|
|
7
|
+
- User preferences
|
|
8
|
+
|
|
9
|
+
Storage: <project>/.claude/MEMORY.md
|
|
10
|
+
"""
|
|
11
|
+
import os
|
|
12
|
+
import hashlib
|
|
13
|
+
import logging
|
|
14
|
+
from datetime import datetime
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import Optional, List, Dict, Any
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def get_memory_md_path(project_path: str) -> Path:
|
|
22
|
+
"""Get the path to the MEMORY.md file for a project.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
project_path: Root path of the project
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
Path to MEMORY.md file
|
|
29
|
+
"""
|
|
30
|
+
# Normalize project path
|
|
31
|
+
project_path = project_path.replace("\\", "/").rstrip("/")
|
|
32
|
+
|
|
33
|
+
# MEMORY.md lives directly in .claude folder
|
|
34
|
+
claude_dir = Path(project_path) / ".claude"
|
|
35
|
+
claude_dir.mkdir(parents=True, exist_ok=True)
|
|
36
|
+
|
|
37
|
+
return claude_dir / "MEMORY.md"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _format_memory_md(
|
|
41
|
+
anchors: List[Dict[str, Any]],
|
|
42
|
+
decisions: List[Dict[str, Any]],
|
|
43
|
+
patterns: List[Dict[str, Any]],
|
|
44
|
+
preferences: List[Dict[str, Any]],
|
|
45
|
+
last_updated: datetime
|
|
46
|
+
) -> str:
|
|
47
|
+
"""Format the MEMORY.md content.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
anchors: List of verified facts/anchors
|
|
51
|
+
decisions: List of important decisions
|
|
52
|
+
patterns: List of proven solution patterns
|
|
53
|
+
preferences: List of user preferences
|
|
54
|
+
last_updated: Timestamp for the file
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
Formatted markdown content
|
|
58
|
+
"""
|
|
59
|
+
lines = [
|
|
60
|
+
"# MEMORY.md - Core Facts",
|
|
61
|
+
f"Last updated: {last_updated.strftime('%Y-%m-%d %H:%M:%S')}",
|
|
62
|
+
"",
|
|
63
|
+
"<!-- This file is auto-generated from the memory database. -->",
|
|
64
|
+
"<!-- High-importance memories and proven patterns are synced here. -->",
|
|
65
|
+
""
|
|
66
|
+
]
|
|
67
|
+
|
|
68
|
+
# Anchors section
|
|
69
|
+
if anchors:
|
|
70
|
+
lines.append("## Anchors (Verified Facts)")
|
|
71
|
+
lines.append("")
|
|
72
|
+
for anchor in anchors:
|
|
73
|
+
fact = anchor.get("fact", anchor.get("content", ""))
|
|
74
|
+
if anchor.get("created_at"):
|
|
75
|
+
date_str = anchor["created_at"][:10]
|
|
76
|
+
lines.append(f"- [{date_str}] {fact}")
|
|
77
|
+
else:
|
|
78
|
+
lines.append(f"- {fact}")
|
|
79
|
+
lines.append("")
|
|
80
|
+
|
|
81
|
+
# Decisions section
|
|
82
|
+
if decisions:
|
|
83
|
+
lines.append("## Key Decisions")
|
|
84
|
+
lines.append("")
|
|
85
|
+
for decision in decisions:
|
|
86
|
+
content = decision.get("content", "")[:200]
|
|
87
|
+
date_str = decision.get("created_at", "")[:10] if decision.get("created_at") else ""
|
|
88
|
+
importance = decision.get("importance", 5)
|
|
89
|
+
if date_str:
|
|
90
|
+
lines.append(f"- [{date_str}] (imp:{importance}) {content}")
|
|
91
|
+
else:
|
|
92
|
+
lines.append(f"- (imp:{importance}) {content}")
|
|
93
|
+
lines.append("")
|
|
94
|
+
|
|
95
|
+
# Patterns section
|
|
96
|
+
if patterns:
|
|
97
|
+
lines.append("## Patterns (Proven Solutions)")
|
|
98
|
+
lines.append("")
|
|
99
|
+
for pattern in patterns:
|
|
100
|
+
name = pattern.get("name", "Unnamed")
|
|
101
|
+
solution = pattern.get("solution", "")[:150]
|
|
102
|
+
success_count = pattern.get("success_count", 0)
|
|
103
|
+
lines.append(f"### {name} (used: {success_count}x)")
|
|
104
|
+
lines.append(f"{solution}")
|
|
105
|
+
lines.append("")
|
|
106
|
+
|
|
107
|
+
# Preferences section
|
|
108
|
+
if preferences:
|
|
109
|
+
lines.append("## Preferences")
|
|
110
|
+
lines.append("")
|
|
111
|
+
for pref in preferences:
|
|
112
|
+
content = pref.get("content", "")
|
|
113
|
+
lines.append(f"- {content}")
|
|
114
|
+
lines.append("")
|
|
115
|
+
|
|
116
|
+
return "\n".join(lines)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
async def sync_to_memory_md(
|
|
120
|
+
db,
|
|
121
|
+
project_path: str,
|
|
122
|
+
min_importance: int = 7,
|
|
123
|
+
min_pattern_success: int = 3
|
|
124
|
+
) -> Dict[str, Any]:
|
|
125
|
+
"""Sync high-importance memories to MEMORY.md.
|
|
126
|
+
|
|
127
|
+
Queries the database for important content and writes to MEMORY.md.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
db: Database service instance
|
|
131
|
+
project_path: Root path of the project
|
|
132
|
+
min_importance: Minimum importance level for decisions (default 7)
|
|
133
|
+
min_pattern_success: Minimum success count for patterns (default 3)
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
Dict with sync results
|
|
137
|
+
"""
|
|
138
|
+
from services.database import normalize_path
|
|
139
|
+
normalized_path = normalize_path(project_path)
|
|
140
|
+
|
|
141
|
+
# Query anchors (from timeline_events where is_anchor=1)
|
|
142
|
+
cursor = db.conn.cursor()
|
|
143
|
+
|
|
144
|
+
anchors = []
|
|
145
|
+
try:
|
|
146
|
+
cursor.execute("""
|
|
147
|
+
SELECT summary as fact, created_at
|
|
148
|
+
FROM timeline_events
|
|
149
|
+
WHERE is_anchor = 1
|
|
150
|
+
AND (project_path = ? OR project_path IS NULL)
|
|
151
|
+
ORDER BY created_at DESC
|
|
152
|
+
LIMIT 20
|
|
153
|
+
""", (normalized_path,))
|
|
154
|
+
anchors = [dict(row) for row in cursor.fetchall()]
|
|
155
|
+
except Exception as e:
|
|
156
|
+
logger.warning(f"Failed to query anchors: {e}")
|
|
157
|
+
|
|
158
|
+
# Query high-importance decisions
|
|
159
|
+
decisions = []
|
|
160
|
+
try:
|
|
161
|
+
cursor.execute("""
|
|
162
|
+
SELECT content, importance, created_at
|
|
163
|
+
FROM memories
|
|
164
|
+
WHERE type = 'decision'
|
|
165
|
+
AND importance >= ?
|
|
166
|
+
AND (project_path = ? OR project_path IS NULL)
|
|
167
|
+
ORDER BY importance DESC, created_at DESC
|
|
168
|
+
LIMIT 15
|
|
169
|
+
""", (min_importance, normalized_path))
|
|
170
|
+
decisions = [dict(row) for row in cursor.fetchall()]
|
|
171
|
+
except Exception as e:
|
|
172
|
+
logger.warning(f"Failed to query decisions: {e}")
|
|
173
|
+
|
|
174
|
+
# Query proven patterns
|
|
175
|
+
patterns = []
|
|
176
|
+
try:
|
|
177
|
+
cursor.execute("""
|
|
178
|
+
SELECT name, solution, success_count
|
|
179
|
+
FROM patterns
|
|
180
|
+
WHERE success_count >= ?
|
|
181
|
+
ORDER BY success_count DESC
|
|
182
|
+
LIMIT 10
|
|
183
|
+
""", (min_pattern_success,))
|
|
184
|
+
patterns = [dict(row) for row in cursor.fetchall()]
|
|
185
|
+
except Exception as e:
|
|
186
|
+
logger.warning(f"Failed to query patterns: {e}")
|
|
187
|
+
|
|
188
|
+
# Query preferences (memories with type='preference')
|
|
189
|
+
preferences = []
|
|
190
|
+
try:
|
|
191
|
+
cursor.execute("""
|
|
192
|
+
SELECT content, importance
|
|
193
|
+
FROM memories
|
|
194
|
+
WHERE type = 'preference'
|
|
195
|
+
AND (project_path = ? OR project_path IS NULL)
|
|
196
|
+
ORDER BY importance DESC, created_at DESC
|
|
197
|
+
LIMIT 10
|
|
198
|
+
""", (normalized_path,))
|
|
199
|
+
preferences = [dict(row) for row in cursor.fetchall()]
|
|
200
|
+
except Exception as e:
|
|
201
|
+
logger.warning(f"Failed to query preferences: {e}")
|
|
202
|
+
|
|
203
|
+
# Format and write MEMORY.md
|
|
204
|
+
now = datetime.now()
|
|
205
|
+
content = _format_memory_md(anchors, decisions, patterns, preferences, now)
|
|
206
|
+
|
|
207
|
+
memory_path = get_memory_md_path(project_path)
|
|
208
|
+
|
|
209
|
+
try:
|
|
210
|
+
# Calculate content hash for tracking
|
|
211
|
+
content_hash = hashlib.md5(content.encode()).hexdigest()[:16]
|
|
212
|
+
|
|
213
|
+
memory_path.write_text(content, encoding="utf-8")
|
|
214
|
+
|
|
215
|
+
return {
|
|
216
|
+
"success": True,
|
|
217
|
+
"file_path": str(memory_path),
|
|
218
|
+
"synced_at": now.isoformat(),
|
|
219
|
+
"content_hash": content_hash,
|
|
220
|
+
"counts": {
|
|
221
|
+
"anchors": len(anchors),
|
|
222
|
+
"decisions": len(decisions),
|
|
223
|
+
"patterns": len(patterns),
|
|
224
|
+
"preferences": len(preferences)
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
except Exception as e:
|
|
229
|
+
logger.error(f"Failed to write MEMORY.md: {e}")
|
|
230
|
+
return {
|
|
231
|
+
"success": False,
|
|
232
|
+
"error": str(e)
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
async def read_memory_md(project_path: str) -> Dict[str, Any]:
|
|
237
|
+
"""Read the MEMORY.md file for a project.
|
|
238
|
+
|
|
239
|
+
Args:
|
|
240
|
+
project_path: Root path of the project
|
|
241
|
+
|
|
242
|
+
Returns:
|
|
243
|
+
Dict with file content and metadata
|
|
244
|
+
"""
|
|
245
|
+
memory_path = get_memory_md_path(project_path)
|
|
246
|
+
|
|
247
|
+
if not memory_path.exists():
|
|
248
|
+
return {
|
|
249
|
+
"success": True,
|
|
250
|
+
"exists": False,
|
|
251
|
+
"content": "",
|
|
252
|
+
"file_path": str(memory_path)
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
try:
|
|
256
|
+
content = memory_path.read_text(encoding="utf-8")
|
|
257
|
+
stat = memory_path.stat()
|
|
258
|
+
|
|
259
|
+
return {
|
|
260
|
+
"success": True,
|
|
261
|
+
"exists": True,
|
|
262
|
+
"content": content,
|
|
263
|
+
"file_path": str(memory_path),
|
|
264
|
+
"size_bytes": stat.st_size,
|
|
265
|
+
"modified": datetime.fromtimestamp(stat.st_mtime).isoformat()
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
except Exception as e:
|
|
269
|
+
logger.error(f"Failed to read MEMORY.md: {e}")
|
|
270
|
+
return {
|
|
271
|
+
"success": False,
|
|
272
|
+
"error": str(e)
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
async def add_fact(
|
|
277
|
+
project_path: str,
|
|
278
|
+
fact: str,
|
|
279
|
+
section: str = "anchors"
|
|
280
|
+
) -> Dict[str, Any]:
|
|
281
|
+
"""Add a fact directly to MEMORY.md without going through the database.
|
|
282
|
+
|
|
283
|
+
This is for quick additions that should persist immediately.
|
|
284
|
+
|
|
285
|
+
Args:
|
|
286
|
+
project_path: Root path of the project
|
|
287
|
+
fact: The fact/decision/preference to add
|
|
288
|
+
section: Section to add to (anchors, decisions, preferences)
|
|
289
|
+
|
|
290
|
+
Returns:
|
|
291
|
+
Dict with success status
|
|
292
|
+
"""
|
|
293
|
+
memory_path = get_memory_md_path(project_path)
|
|
294
|
+
|
|
295
|
+
try:
|
|
296
|
+
# Read existing content or create new
|
|
297
|
+
if memory_path.exists():
|
|
298
|
+
content = memory_path.read_text(encoding="utf-8")
|
|
299
|
+
else:
|
|
300
|
+
content = f"# MEMORY.md - Core Facts\nLast updated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n\n"
|
|
301
|
+
|
|
302
|
+
# Find the right section and append
|
|
303
|
+
date_str = datetime.now().strftime("%Y-%m-%d")
|
|
304
|
+
new_line = f"- [{date_str}] {fact}\n"
|
|
305
|
+
|
|
306
|
+
section_headers = {
|
|
307
|
+
"anchors": "## Anchors (Verified Facts)",
|
|
308
|
+
"decisions": "## Key Decisions",
|
|
309
|
+
"patterns": "## Patterns (Proven Solutions)",
|
|
310
|
+
"preferences": "## Preferences"
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
target_header = section_headers.get(section, section_headers["anchors"])
|
|
314
|
+
|
|
315
|
+
if target_header in content:
|
|
316
|
+
# Find the section and add after the header
|
|
317
|
+
parts = content.split(target_header)
|
|
318
|
+
if len(parts) == 2:
|
|
319
|
+
# Find the next section or end
|
|
320
|
+
after_header = parts[1]
|
|
321
|
+
# Insert after any blank line following header
|
|
322
|
+
insert_pos = 0
|
|
323
|
+
for i, char in enumerate(after_header):
|
|
324
|
+
if char == '\n':
|
|
325
|
+
insert_pos = i + 1
|
|
326
|
+
if i + 1 < len(after_header) and after_header[i + 1] not in ('\n', '-'):
|
|
327
|
+
break
|
|
328
|
+
elif char != '\n' and char != ' ':
|
|
329
|
+
break
|
|
330
|
+
|
|
331
|
+
after_header = after_header[:insert_pos] + new_line + after_header[insert_pos:]
|
|
332
|
+
content = parts[0] + target_header + after_header
|
|
333
|
+
else:
|
|
334
|
+
# Section doesn't exist, create it
|
|
335
|
+
content += f"\n{target_header}\n\n{new_line}"
|
|
336
|
+
|
|
337
|
+
# Update the last updated timestamp
|
|
338
|
+
lines = content.split("\n")
|
|
339
|
+
for i, line in enumerate(lines):
|
|
340
|
+
if line.startswith("Last updated:"):
|
|
341
|
+
lines[i] = f"Last updated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
|
342
|
+
break
|
|
343
|
+
content = "\n".join(lines)
|
|
344
|
+
|
|
345
|
+
memory_path.write_text(content, encoding="utf-8")
|
|
346
|
+
|
|
347
|
+
return {
|
|
348
|
+
"success": True,
|
|
349
|
+
"file_path": str(memory_path),
|
|
350
|
+
"section": section,
|
|
351
|
+
"fact": fact
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
except Exception as e:
|
|
355
|
+
logger.error(f"Failed to add fact to MEMORY.md: {e}")
|
|
356
|
+
return {
|
|
357
|
+
"success": False,
|
|
358
|
+
"error": str(e)
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
|
|
362
|
+
async def get_memory_md_summary(project_path: str) -> Dict[str, Any]:
|
|
363
|
+
"""Get a summary of MEMORY.md for context injection.
|
|
364
|
+
|
|
365
|
+
Returns a condensed version suitable for grounding context.
|
|
366
|
+
|
|
367
|
+
Args:
|
|
368
|
+
project_path: Root path of the project
|
|
369
|
+
|
|
370
|
+
Returns:
|
|
371
|
+
Dict with summary content
|
|
372
|
+
"""
|
|
373
|
+
result = await read_memory_md(project_path)
|
|
374
|
+
|
|
375
|
+
if not result.get("success") or not result.get("exists"):
|
|
376
|
+
return {
|
|
377
|
+
"success": True,
|
|
378
|
+
"summary": "",
|
|
379
|
+
"exists": False
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
content = result.get("content", "")
|
|
383
|
+
|
|
384
|
+
# Extract just the essential facts (first line of each bullet)
|
|
385
|
+
lines = content.split("\n")
|
|
386
|
+
summary_lines = []
|
|
387
|
+
in_section = False
|
|
388
|
+
current_section = ""
|
|
389
|
+
|
|
390
|
+
for line in lines:
|
|
391
|
+
if line.startswith("## "):
|
|
392
|
+
in_section = True
|
|
393
|
+
current_section = line[3:].split("(")[0].strip()
|
|
394
|
+
summary_lines.append(f"**{current_section}**:")
|
|
395
|
+
elif in_section and line.startswith("- "):
|
|
396
|
+
# Extract first 100 chars of the fact
|
|
397
|
+
fact = line[2:].strip()[:100]
|
|
398
|
+
summary_lines.append(f" {fact}")
|
|
399
|
+
elif line.startswith("### ") and in_section:
|
|
400
|
+
# Pattern name
|
|
401
|
+
pattern_name = line[4:].split("(")[0].strip()
|
|
402
|
+
summary_lines.append(f" Pattern: {pattern_name}")
|
|
403
|
+
|
|
404
|
+
return {
|
|
405
|
+
"success": True,
|
|
406
|
+
"summary": "\n".join(summary_lines),
|
|
407
|
+
"exists": True,
|
|
408
|
+
"line_count": len(summary_lines)
|
|
409
|
+
}
|