ragtime-cli 0.2.1__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ragtime-cli
3
- Version: 0.2.1
3
+ Version: 0.2.2
4
4
  Summary: Local-first memory and RAG system for Claude Code - semantic search over code, docs, and team knowledge
5
5
  Author-email: Bret Martineau <bretwardjames@gmail.com>
6
6
  License-Expression: MIT
@@ -1,12 +1,13 @@
1
- ragtime_cli-0.2.1.dist-info/licenses/LICENSE,sha256=9A0wJs2PRDciGRH4F8JUJ-aMKYQyq_gVu2ixrXs-l5A,1070
1
+ ragtime_cli-0.2.2.dist-info/licenses/LICENSE,sha256=9A0wJs2PRDciGRH4F8JUJ-aMKYQyq_gVu2ixrXs-l5A,1070
2
2
  src/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- src/cli.py,sha256=uZldKb8m53E3lKdhEVHjBYEnTvFSIv90AQ2iyuXIhDA,38083
3
+ src/cli.py,sha256=bjLpugyxTB3h6Ir-D5eHilYmKGhx1sa4r8K_ZXiqaO8,44717
4
4
  src/config.py,sha256=_zSMnGSO8uFFF8Was_Jtm2m1JDPGhT3Lh8Zz2rcQs98,3232
5
5
  src/db.py,sha256=BKrlhilXYHNaj-ZcffinSXVhdUqowmwpFPBx7aLxamU,4642
6
6
  src/mcp_server.py,sha256=Tx0i73GXO0YmcVrdO7UjRMS0auN8fBG2LOpHuf_LUC0,20374
7
7
  src/memory.py,sha256=POT2lYeBcEx4_MPbsIdet6ScwcjmuETz8Dxmz-Z_7IY,11939
8
8
  src/commands/audit.md,sha256=Xkucm-gfBIMalK9wf7NBbyejpsqBTUAGGlb7GxMtMPY,5137
9
9
  src/commands/handoff.md,sha256=8VxTddtW08jGTW36GW_rS77JdeSn8vHeMfklrWwVUD4,5055
10
+ src/commands/import-docs.md,sha256=ByIdcfbdiF77HoFv5U6zZ_YvZf00-hAs9EMconXssvY,6927
10
11
  src/commands/pr-graduate.md,sha256=TdqcIwtemrvLbbbUw-mY7hvixjOSh8H_L-63_QsAtpI,6455
11
12
  src/commands/recall.md,sha256=unQPWsmocKRoQR7jRtjrj8aVcMHverjGR6u5mYL8TLw,6008
12
13
  src/commands/remember.md,sha256=nNewsUhIqF4wtD1jhVDZvmLZjdcmPN6NmUM43SdWepc,5368
@@ -14,8 +15,8 @@ src/commands/save.md,sha256=7gTpW46AU9Y4l8XVZ8f4h1sEdBfVqIRA7hlidUxMAC4,251
14
15
  src/commands/start.md,sha256=qoqhkMgET74DBx8YPIT1-wqCiVBUDxlmevigsCinHSY,6506
15
16
  src/indexers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
17
  src/indexers/docs.py,sha256=7FENHaKSvC1T557bRzvmrjyaG_vK94GuQG9XMZdr89w,3349
17
- ragtime_cli-0.2.1.dist-info/METADATA,sha256=Cg_bRuQ87B6OuOLhQ4TBQ_uUMQS7dGBL_Q-ZtkjI3JY,5311
18
- ragtime_cli-0.2.1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
19
- ragtime_cli-0.2.1.dist-info/entry_points.txt,sha256=cWLbeyMxZNbew-THS3bHXTpCRXt1EaUy5QUOXGXLjl4,75
20
- ragtime_cli-0.2.1.dist-info/top_level.txt,sha256=74rtVfumQlgAPzR5_2CgYN24MB0XARCg0t-gzk6gTrM,4
21
- ragtime_cli-0.2.1.dist-info/RECORD,,
18
+ ragtime_cli-0.2.2.dist-info/METADATA,sha256=TqC9Yyq8QFFSxwXnJaRAMxnW3PttYEyoGkRhowwouLo,5311
19
+ ragtime_cli-0.2.2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
20
+ ragtime_cli-0.2.2.dist-info/entry_points.txt,sha256=cWLbeyMxZNbew-THS3bHXTpCRXt1EaUy5QUOXGXLjl4,75
21
+ ragtime_cli-0.2.2.dist-info/top_level.txt,sha256=74rtVfumQlgAPzR5_2CgYN24MB0XARCg0t-gzk6gTrM,4
22
+ ragtime_cli-0.2.2.dist-info/RECORD,,
src/cli.py CHANGED
@@ -166,7 +166,7 @@ def get_remote_branches_with_ragtime(path: Path) -> list[str]:
166
166
 
167
167
 
168
168
  @click.group()
169
- @click.version_option(version="0.2.1")
169
+ @click.version_option(version="0.2.2")
170
170
  def main():
171
171
  """Ragtime - semantic search over code and documentation."""
172
172
  pass
@@ -1105,6 +1105,193 @@ def daemon_status(path: Path):
1105
1105
  pid_file.unlink()
1106
1106
 
1107
1107
 
1108
+ @main.command()
1109
+ @click.argument("docs_path", type=click.Path(exists=True, path_type=Path), default="docs")
1110
+ @click.option("--path", type=click.Path(exists=True, path_type=Path), default=".")
1111
+ @click.option("--fix", is_flag=True, help="Interactively add frontmatter to files")
1112
+ @click.option("--json", "as_json", is_flag=True, help="Output as JSON")
1113
+ def audit(docs_path: Path, path: Path, fix: bool, as_json: bool):
1114
+ """Audit docs for ragtime-compatible frontmatter.
1115
+
1116
+ Scans markdown files and suggests metadata for better indexing.
1117
+
1118
+ Examples:
1119
+ ragtime audit docs/ # Audit docs folder
1120
+ ragtime audit docs/ --fix # Interactively add frontmatter
1121
+ ragtime audit . --json # Output suggestions as JSON
1122
+ """
1123
+ import re
1124
+ import json as json_module
1125
+
1126
+ path = Path(path).resolve()
1127
+ docs_path = Path(docs_path).resolve()
1128
+
1129
+ if not docs_path.exists():
1130
+ click.echo(f"✗ Path not found: {docs_path}", err=True)
1131
+ return
1132
+
1133
+ # Find all markdown files
1134
+ md_files = list(docs_path.rglob("*.md"))
1135
+
1136
+ if not md_files:
1137
+ click.echo(f"No markdown files found in {docs_path}")
1138
+ return
1139
+
1140
+ results = []
1141
+
1142
+ for md_file in md_files:
1143
+ content = md_file.read_text()
1144
+ relative = md_file.relative_to(path) if md_file.is_relative_to(path) else md_file
1145
+
1146
+ # Check for existing frontmatter
1147
+ has_frontmatter = content.startswith("---")
1148
+ existing_meta = {}
1149
+
1150
+ if has_frontmatter:
1151
+ try:
1152
+ import yaml
1153
+ parts = content.split("---", 2)
1154
+ if len(parts) >= 3:
1155
+ existing_meta = yaml.safe_load(parts[1]) or {}
1156
+ except:
1157
+ pass
1158
+
1159
+ # Analyze file for suggestions
1160
+ suggestions = analyze_doc_for_metadata(md_file, content, existing_meta)
1161
+
1162
+ status = "ok" if not suggestions["missing"] else "needs_update"
1163
+ if not has_frontmatter:
1164
+ status = "no_frontmatter"
1165
+
1166
+ results.append({
1167
+ "file": str(relative),
1168
+ "status": status,
1169
+ "has_frontmatter": has_frontmatter,
1170
+ "existing": existing_meta,
1171
+ "suggestions": suggestions,
1172
+ })
1173
+
1174
+ if as_json:
1175
+ click.echo(json_module.dumps(results, indent=2))
1176
+ return
1177
+
1178
+ # Summary
1179
+ no_fm = [r for r in results if r["status"] == "no_frontmatter"]
1180
+ needs_update = [r for r in results if r["status"] == "needs_update"]
1181
+ ok = [r for r in results if r["status"] == "ok"]
1182
+
1183
+ click.echo(f"\nAudited {len(md_files)} files in {docs_path.name}/\n")
1184
+
1185
+ if ok:
1186
+ click.echo(f"✓ {len(ok)} files have complete frontmatter")
1187
+
1188
+ if needs_update:
1189
+ click.echo(f"• {len(needs_update)} files could use more metadata")
1190
+
1191
+ if no_fm:
1192
+ click.echo(f"✗ {len(no_fm)} files have no frontmatter\n")
1193
+
1194
+ for r in no_fm[:10]: # Show first 10
1195
+ click.echo(f"{'─' * 60}")
1196
+ click.echo(f" {r['file']}")
1197
+ s = r["suggestions"]
1198
+ click.echo(f" Suggested frontmatter:")
1199
+ click.echo(f" namespace: {s.get('namespace', 'app')}")
1200
+ click.echo(f" type: {s.get('type', 'document')}")
1201
+ if s.get("component"):
1202
+ click.echo(f" component: {s['component']}")
1203
+
1204
+ if len(no_fm) > 10:
1205
+ click.echo(f"\n ... and {len(no_fm) - 10} more files")
1206
+
1207
+ if fix and no_fm:
1208
+ click.echo(f"\n{'─' * 60}")
1209
+ if click.confirm(f"\nAdd frontmatter to {len(no_fm)} files?"):
1210
+ added = 0
1211
+ for r in no_fm:
1212
+ file_path = path / r["file"]
1213
+ content = file_path.read_text()
1214
+ s = r["suggestions"]
1215
+
1216
+ # Build frontmatter
1217
+ fm_lines = ["---"]
1218
+ fm_lines.append(f"namespace: {s.get('namespace', 'app')}")
1219
+ fm_lines.append(f"type: {s.get('type', 'document')}")
1220
+ if s.get("component"):
1221
+ fm_lines.append(f"component: {s['component']}")
1222
+ fm_lines.append("---")
1223
+ fm_lines.append("")
1224
+
1225
+ new_content = "\n".join(fm_lines) + content
1226
+ file_path.write_text(new_content)
1227
+ added += 1
1228
+ click.echo(f" ✓ {r['file']}")
1229
+
1230
+ click.echo(f"\n✓ Added frontmatter to {added} files")
1231
+ click.echo(f" Run 'ragtime reindex' to update the search index")
1232
+
1233
+
1234
+ def analyze_doc_for_metadata(file_path: Path, content: str, existing: dict) -> dict:
1235
+ """Analyze a document and suggest metadata."""
1236
+ import re
1237
+
1238
+ suggestions = {}
1239
+ missing = []
1240
+
1241
+ # Infer from path
1242
+ parts = file_path.parts
1243
+ path_lower = str(file_path).lower()
1244
+
1245
+ # Namespace inference
1246
+ if "namespace" not in existing:
1247
+ missing.append("namespace")
1248
+ if ".ragtime" in path_lower or "memory" in path_lower:
1249
+ suggestions["namespace"] = "app"
1250
+ elif "team" in path_lower or "convention" in path_lower:
1251
+ suggestions["namespace"] = "team"
1252
+ else:
1253
+ suggestions["namespace"] = "app"
1254
+
1255
+ # Type inference
1256
+ if "type" not in existing:
1257
+ missing.append("type")
1258
+
1259
+ # Check content for clues
1260
+ content_lower = content.lower()
1261
+
1262
+ if "api" in path_lower or "endpoint" in content_lower:
1263
+ suggestions["type"] = "architecture"
1264
+ elif "decision" in path_lower or "adr" in path_lower or "we decided" in content_lower:
1265
+ suggestions["type"] = "decision"
1266
+ elif "guide" in path_lower or "how to" in content_lower:
1267
+ suggestions["type"] = "pattern"
1268
+ elif "setup" in path_lower or "install" in path_lower:
1269
+ suggestions["type"] = "convention"
1270
+ elif "readme" in path_lower:
1271
+ suggestions["type"] = "document"
1272
+ elif "changelog" in path_lower or "release" in path_lower:
1273
+ suggestions["type"] = "document"
1274
+ else:
1275
+ suggestions["type"] = "document"
1276
+
1277
+ # Component inference from path
1278
+ if "component" not in existing:
1279
+ # Look for component-like folder names
1280
+ component_candidates = []
1281
+ skip = {"docs", "src", "lib", "app", "pages", "components", "memory", ".ragtime"}
1282
+
1283
+ for part in parts[:-1]: # Exclude filename
1284
+ if part.lower() not in skip and not part.startswith("."):
1285
+ component_candidates.append(part.lower())
1286
+
1287
+ if component_candidates:
1288
+ suggestions["component"] = component_candidates[-1] # Most specific
1289
+ missing.append("component")
1290
+
1291
+ suggestions["missing"] = missing
1292
+ return suggestions
1293
+
1294
+
1108
1295
  @main.command()
1109
1296
  @click.option("--check", is_flag=True, help="Only check for updates, don't install")
1110
1297
  def update(check: bool):
@@ -1113,7 +1300,7 @@ def update(check: bool):
1113
1300
  from urllib.request import urlopen
1114
1301
  from urllib.error import URLError
1115
1302
 
1116
- current = "0.2.1"
1303
+ current = "0.2.2"
1117
1304
 
1118
1305
  click.echo(f"Current version: {current}")
1119
1306
  click.echo("Checking PyPI for updates...")
@@ -0,0 +1,259 @@
1
+ ---
2
+ description: Migrate existing docs into ragtime memory structure with AI-assisted classification
3
+ allowed-arguments: path to docs folder (e.g., /import-docs docs/)
4
+ allowed-tools: Bash, Read, Write, Edit, AskUserQuestion
5
+ ---
6
+
7
+ # Import Docs
8
+
9
+ Analyze an existing docs folder and migrate content into the ragtime memory structure.
10
+
11
+ **Usage:**
12
+ - `/import-docs docs/` - Analyze docs folder and create memories
13
+ - `/import-docs` - Interactive mode, asks for path
14
+
15
+ ## Overview
16
+
17
+ This command helps teams that have existing documentation migrate into ragtime's structured memory system. It:
18
+
19
+ 1. Scans all markdown files using `ragtime audit --json`
20
+ 2. Analyzes each document's content to classify properly
21
+ 3. Determines what should become memories vs. stay as indexed docs
22
+ 4. Creates memories in `.ragtime/app/` or `.ragtime/team/` as appropriate
23
+
24
+ ## Step 1: Get the Docs Path
25
+
26
+ **If `$ARGUMENTS` provided:**
27
+ - Use it as the docs path
28
+
29
+ **If no arguments:**
30
+ - Ask: "What docs folder should I analyze? (e.g., docs/, documentation/)"
31
+
32
+ ## Step 2: Run Audit
33
+
34
+ Run the ragtime audit command to get a structured view of all docs:
35
+
36
+ ```bash
37
+ ragtime audit {docs_path} --json
38
+ ```
39
+
40
+ Parse the JSON output to get the list of files and initial suggestions.
41
+
42
+ ## Step 3: Analyze Documents
43
+
44
+ For each document (or batch), read the content and classify:
45
+
46
+ ### Classification Questions
47
+
48
+ For each doc, determine:
49
+
50
+ 1. **Is this memory-worthy or just reference?**
51
+ - Memory-worthy: Contains decisions, patterns, architecture insights, conventions
52
+ - Reference: API docs, changelogs, auto-generated content, raw specs
53
+
54
+ 2. **What type of knowledge is it?**
55
+ - `architecture` - How systems/components work
56
+ - `decision` - Why we chose X over Y (look for "we decided", "because", trade-off discussion)
57
+ - `convention` - Team rules, coding standards, process docs
58
+ - `pattern` - Reusable solutions, "how to do X"
59
+ - `integration` - How external services connect
60
+ - `feature` - Feature documentation
61
+
62
+ 3. **What namespace?**
63
+ - `app` - Technical knowledge about this codebase
64
+ - `team` - Team conventions, process, standards
65
+
66
+ 4. **What component?** (infer from path and content)
67
+
68
+ 5. **Should this doc be split?**
69
+ - Large docs with multiple distinct sections may become multiple memories
70
+ - Each memory should be focused on ONE concept
71
+
72
+ ### Classification Hints
73
+
74
+ Look for signals in the content:
75
+
76
+ | Signal | Likely Type |
77
+ |--------|-------------|
78
+ | "We decided to..." | decision |
79
+ | "Always do X when Y" | convention |
80
+ | "The {system} works by..." | architecture |
81
+ | "To implement X, follow these steps..." | pattern |
82
+ | "Integrates with {service} via..." | integration |
83
+ | ADR format, numbered decisions | decision |
84
+ | API endpoints, request/response | architecture |
85
+ | Setup instructions, onboarding | convention |
86
+
87
+ ## Step 4: Choose Migration Strategy
88
+
89
+ Ask the user:
90
+
91
+ ```
92
+ How should I handle the original docs?
93
+
94
+ 1. **Memories only** - Create memories in .ragtime/, leave original docs unchanged
95
+ 2. **Frontmatter only** - Add frontmatter to original docs for better indexing, no memories
96
+ 3. **Both** - Add frontmatter to originals AND create memories for key insights (recommended)
97
+ ```
98
+
99
+ Based on their choice, adjust what the migration plan includes.
100
+
101
+ ## Step 5: Generate Migration Plan
102
+
103
+ Create a migration plan based on the user's strategy choice:
104
+
105
+ ```
106
+ ## Migration Plan
107
+
108
+ ### Will Add Frontmatter (if strategy includes frontmatter)
109
+
110
+ | File | Namespace | Type | Component |
111
+ |------|-----------|------|-----------|
112
+ | docs/auth/JWT_DESIGN.md | app | architecture | auth |
113
+ | docs/CODING_STANDARDS.md | team | convention | - |
114
+ | ... | | | |
115
+
116
+ ### Will Create Memories (if strategy includes memories)
117
+
118
+ | File | Type | Namespace | Component | Notes |
119
+ |------|------|-----------|-----------|-------|
120
+ | docs/auth/JWT_DESIGN.md | architecture | app | auth | Split into 2 memories |
121
+ | docs/CODING_STANDARDS.md | convention | team | - | Full doc |
122
+ | ... | | | | |
123
+
124
+ ### Will Index Only (no memory, just frontmatter or skip)
125
+
126
+ These stay as searchable docs but don't become memories:
127
+ - docs/CHANGELOG.md (reference)
128
+ - docs/api/endpoints.md (reference, auto-generated)
129
+ - ...
130
+
131
+ ### Will Skip (Z files)
132
+
133
+ - docs/archive/old-stuff.md (outdated)
134
+ - ...
135
+ ```
136
+
137
+ ## Step 6: Get Approval
138
+
139
+ Present the plan and ask:
140
+
141
+ ```
142
+ I've analyzed {total} docs:
143
+ - {X} will become memories in .ragtime/
144
+ - {Y} will be indexed as reference docs
145
+ - {Z} will be skipped
146
+
147
+ Review the plan above. Should I:
148
+ 1. Proceed with all
149
+ 2. Let me adjust some classifications
150
+ 3. Show me a specific file's analysis
151
+ 4. Cancel
152
+ ```
153
+
154
+ ## Step 7: Execute Migration
155
+
156
+ ### If adding frontmatter to originals:
157
+
158
+ For each doc that needs frontmatter, prepend:
159
+
160
+ ```yaml
161
+ ---
162
+ namespace: {app|team}
163
+ type: {type}
164
+ component: {if applicable}
165
+ ---
166
+ ```
167
+
168
+ This makes the original docs index better with `ragtime index`.
169
+
170
+ ### If creating memories:
171
+
172
+ 1. **Extract the key content** - Don't copy the whole doc verbatim unless it's focused. Extract the essential knowledge.
173
+
174
+ 2. **Write the memory file** to `.ragtime/app/{component}/{id}-{slug}.md` or `.ragtime/team/{id}-{slug}.md`:
175
+
176
+ ```yaml
177
+ ---
178
+ id: {8-char-uuid}
179
+ namespace: {app|team}
180
+ type: {type}
181
+ component: {if applicable}
182
+ confidence: medium
183
+ confidence_reason: import-docs
184
+ source: import-docs
185
+ status: active
186
+ added: {today}
187
+ migrated_from: {original-file-path}
188
+ ---
189
+
190
+ {extracted content}
191
+ ```
192
+
193
+ 3. **For split documents**, create multiple focused memories with related IDs.
194
+
195
+ ## Step 8: Reindex
196
+
197
+ After all memories are created:
198
+
199
+ ```bash
200
+ ragtime reindex
201
+ ```
202
+
203
+ ## Step 9: Summary
204
+
205
+ Summarize based on what was done:
206
+
207
+ ```
208
+ ## Migration Complete
209
+
210
+ ### Frontmatter Added (if applicable)
211
+ Updated {X} docs with frontmatter for better indexing.
212
+
213
+ ### Memories Created (if applicable)
214
+ Created {Y} memories:
215
+ - {N} architecture in app/
216
+ - {N} conventions in team/
217
+ - {N} decisions in app/
218
+ - ...
219
+
220
+ Next steps:
221
+ - Run 'ragtime index' to update the search index
222
+ - Review memories with: ragtime memories --namespace app
223
+ - Search with: ragtime search "your query"
224
+ - Edit any misclassified content in .ragtime/ or docs/
225
+ ```
226
+
227
+ ## Tips
228
+
229
+ - **Don't over-migrate**: Not every doc needs to be a memory. Reference docs are fine as indexed content.
230
+ - **Preserve originals**: This creates memories FROM docs, doesn't delete originals.
231
+ - **Review component inference**: The path-based component detection may need adjustment.
232
+ - **Batch by folder**: For large doc sets, consider migrating one folder at a time.
233
+
234
+ ## Example Session
235
+
236
+ ```
237
+ User: /import-docs docs/
238
+
239
+ Claude: I'll analyze your docs folder...
240
+
241
+ Running: ragtime audit docs/ --json
242
+
243
+ Found 45 markdown files. Let me analyze each one...
244
+
245
+ [Analyzes docs/auth/JWT_DESIGN.md]
246
+ This is an architecture doc about JWT implementation. Key insights:
247
+ - 15-minute access token expiry
248
+ - Refresh token rotation strategy
249
+ - Why we chose asymmetric keys
250
+
251
+ I'll create 1 memory in app/auth/ for this.
252
+
253
+ [Continues for other files...]
254
+
255
+ ## Migration Plan
256
+ ...
257
+
258
+ Proceed? (yes/adjust/cancel)
259
+ ```