ragtime-cli 0.2.3__py3-none-any.whl → 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ragtime-cli might be problematic. Click here for more details.
- ragtime_cli-0.2.5.dist-info/METADATA +402 -0
- {ragtime_cli-0.2.3.dist-info → ragtime_cli-0.2.5.dist-info}/RECORD +13 -11
- src/cli.py +625 -4
- src/commands/generate-docs.md +325 -0
- src/config.py +1 -1
- src/indexers/__init__.py +6 -0
- src/indexers/code.py +473 -0
- src/mcp_server.py +1 -1
- src/memory.py +6 -1
- ragtime_cli-0.2.3.dist-info/METADATA +0 -220
- {ragtime_cli-0.2.3.dist-info → ragtime_cli-0.2.5.dist-info}/WHEEL +0 -0
- {ragtime_cli-0.2.3.dist-info → ragtime_cli-0.2.5.dist-info}/entry_points.txt +0 -0
- {ragtime_cli-0.2.3.dist-info → ragtime_cli-0.2.5.dist-info}/licenses/LICENSE +0 -0
- {ragtime_cli-0.2.3.dist-info → ragtime_cli-0.2.5.dist-info}/top_level.txt +0 -0
src/cli.py
CHANGED
|
@@ -12,6 +12,7 @@ import sys
|
|
|
12
12
|
from .db import RagtimeDB
|
|
13
13
|
from .config import RagtimeConfig, init_config
|
|
14
14
|
from .indexers.docs import index_directory as index_docs
|
|
15
|
+
from .indexers.code import index_directory as index_code
|
|
15
16
|
from .memory import Memory, MemoryStore
|
|
16
17
|
|
|
17
18
|
|
|
@@ -166,7 +167,7 @@ def get_remote_branches_with_ragtime(path: Path) -> list[str]:
|
|
|
166
167
|
|
|
167
168
|
|
|
168
169
|
@click.group()
|
|
169
|
-
@click.version_option(version="0.2.
|
|
170
|
+
@click.version_option(version="0.2.5")
|
|
170
171
|
def main():
|
|
171
172
|
"""Ragtime - semantic search over code and documentation."""
|
|
172
173
|
pass
|
|
@@ -297,10 +298,41 @@ def index(path: Path, index_type: str, clear: bool):
|
|
|
297
298
|
click.echo(" No documents found")
|
|
298
299
|
|
|
299
300
|
if index_type in ("all", "code"):
|
|
301
|
+
# Build exclusion list for code
|
|
300
302
|
code_exclude = list(config.code.exclude)
|
|
301
303
|
for docs_path in config.docs.paths:
|
|
302
304
|
code_exclude.append(f"**/{docs_path}/**")
|
|
303
|
-
|
|
305
|
+
|
|
306
|
+
total_entries = []
|
|
307
|
+
for code_path_str in config.code.paths:
|
|
308
|
+
code_root = path / code_path_str
|
|
309
|
+
if not code_root.exists():
|
|
310
|
+
click.echo(f" Code path {code_root} not found, skipping...")
|
|
311
|
+
continue
|
|
312
|
+
click.echo(f"Indexing code in {code_root}...")
|
|
313
|
+
entries = index_code(
|
|
314
|
+
code_root,
|
|
315
|
+
languages=config.code.languages,
|
|
316
|
+
exclude=code_exclude,
|
|
317
|
+
)
|
|
318
|
+
total_entries.extend(entries)
|
|
319
|
+
|
|
320
|
+
if total_entries:
|
|
321
|
+
# Create unique IDs: file:line:symbol
|
|
322
|
+
ids = [f"{e.file_path}:{e.line_number}:{e.symbol_name}" for e in total_entries]
|
|
323
|
+
documents = [e.content for e in total_entries]
|
|
324
|
+
metadatas = [e.to_metadata() for e in total_entries]
|
|
325
|
+
db.upsert(ids=ids, documents=documents, metadatas=metadatas)
|
|
326
|
+
click.echo(f" Indexed {len(total_entries)} code symbols")
|
|
327
|
+
|
|
328
|
+
# Show breakdown by type
|
|
329
|
+
by_type = {}
|
|
330
|
+
for e in total_entries:
|
|
331
|
+
by_type[e.symbol_type] = by_type.get(e.symbol_type, 0) + 1
|
|
332
|
+
breakdown = ", ".join(f"{count} {typ}s" for typ, count in sorted(by_type.items()))
|
|
333
|
+
click.echo(f" ({breakdown})")
|
|
334
|
+
else:
|
|
335
|
+
click.echo(" No code symbols found")
|
|
304
336
|
|
|
305
337
|
stats = db.stats()
|
|
306
338
|
click.echo(f"\nIndex stats: {stats['total']} total ({stats['docs']} docs, {stats['code']} code)")
|
|
@@ -1041,7 +1073,15 @@ def daemon_start(path: Path, interval: str):
|
|
|
1041
1073
|
|
|
1042
1074
|
Runs git fetch && ragtime sync on an interval to keep
|
|
1043
1075
|
remote branches synced automatically.
|
|
1076
|
+
|
|
1077
|
+
Note: This command requires Unix (Linux/macOS). On Windows, use Task Scheduler instead.
|
|
1044
1078
|
"""
|
|
1079
|
+
# Check for Windows - os.fork() is Unix-only
|
|
1080
|
+
if sys.platform == "win32":
|
|
1081
|
+
click.echo("✗ Daemon mode is not supported on Windows.", err=True)
|
|
1082
|
+
click.echo(" Use Windows Task Scheduler to run 'ragtime sync' periodically instead.")
|
|
1083
|
+
return
|
|
1084
|
+
|
|
1045
1085
|
path = Path(path).resolve()
|
|
1046
1086
|
pid_file = get_pid_file(path)
|
|
1047
1087
|
log_file = get_log_file(path)
|
|
@@ -1088,6 +1128,7 @@ def daemon_start(path: Path, interval: str):
|
|
|
1088
1128
|
pid_file.write_text(str(os.getpid()))
|
|
1089
1129
|
|
|
1090
1130
|
# Redirect output to log file
|
|
1131
|
+
# Note: log_fd is intentionally kept open for the lifetime of the daemon
|
|
1091
1132
|
log_fd = open(log_file, "a")
|
|
1092
1133
|
os.dup2(log_fd.fileno(), sys.stdout.fileno())
|
|
1093
1134
|
os.dup2(log_fd.fileno(), sys.stderr.fileno())
|
|
@@ -1175,6 +1216,586 @@ def daemon_status(path: Path):
|
|
|
1175
1216
|
pid_file.unlink()
|
|
1176
1217
|
|
|
1177
1218
|
|
|
1219
|
+
# ============================================================================
|
|
1220
|
+
# Debug Commands
|
|
1221
|
+
# ============================================================================
|
|
1222
|
+
|
|
1223
|
+
|
|
1224
|
+
@main.group()
|
|
1225
|
+
def debug():
|
|
1226
|
+
"""Debug and verify the vector index."""
|
|
1227
|
+
pass
|
|
1228
|
+
|
|
1229
|
+
|
|
1230
|
+
@debug.command("search")
|
|
1231
|
+
@click.argument("query")
|
|
1232
|
+
@click.option("--path", type=click.Path(exists=True, path_type=Path), default=".")
|
|
1233
|
+
@click.option("--limit", "-l", default=5, help="Max results")
|
|
1234
|
+
@click.option("--show-vectors", is_flag=True, help="Show vector statistics")
|
|
1235
|
+
def debug_search(query: str, path: Path, limit: int, show_vectors: bool):
|
|
1236
|
+
"""Debug a search query - show scores and ranking details."""
|
|
1237
|
+
path = Path(path).resolve()
|
|
1238
|
+
db = get_db(path)
|
|
1239
|
+
|
|
1240
|
+
results = db.search(query=query, limit=limit)
|
|
1241
|
+
|
|
1242
|
+
if not results:
|
|
1243
|
+
click.echo("No results found.")
|
|
1244
|
+
return
|
|
1245
|
+
|
|
1246
|
+
click.echo(f"\nQuery: \"{query}\"")
|
|
1247
|
+
click.echo(f"{'─' * 60}")
|
|
1248
|
+
|
|
1249
|
+
for i, result in enumerate(results, 1):
|
|
1250
|
+
meta = result["metadata"]
|
|
1251
|
+
distance = result["distance"]
|
|
1252
|
+
similarity = 1 - distance if distance else None
|
|
1253
|
+
|
|
1254
|
+
click.echo(f"\n[{i}] {meta.get('file', 'unknown')}")
|
|
1255
|
+
click.echo(f" Distance: {distance:.4f}")
|
|
1256
|
+
click.echo(f" Similarity: {similarity:.4f} ({similarity * 100:.1f}%)")
|
|
1257
|
+
click.echo(f" Namespace: {meta.get('namespace', '-')}")
|
|
1258
|
+
click.echo(f" Type: {meta.get('type', '-')}")
|
|
1259
|
+
|
|
1260
|
+
# Show content preview
|
|
1261
|
+
preview = result["content"][:100].replace("\n", " ")
|
|
1262
|
+
click.echo(f" Preview: {preview}...")
|
|
1263
|
+
|
|
1264
|
+
if show_vectors:
|
|
1265
|
+
click.echo(f"\n{'─' * 60}")
|
|
1266
|
+
click.echo("Vector Statistics:")
|
|
1267
|
+
click.echo(f" Total indexed: {db.collection.count()}")
|
|
1268
|
+
click.echo(f" Embedding model: all-MiniLM-L6-v2 (ChromaDB default)")
|
|
1269
|
+
click.echo(f" Vector dimensions: 384")
|
|
1270
|
+
click.echo(f" Distance metric: cosine")
|
|
1271
|
+
|
|
1272
|
+
|
|
1273
|
+
@debug.command("similar")
|
|
1274
|
+
@click.argument("file_path", type=click.Path(exists=True))
|
|
1275
|
+
@click.option("--path", type=click.Path(exists=True, path_type=Path), default=".")
|
|
1276
|
+
@click.option("--limit", "-l", default=5, help="Max similar docs")
|
|
1277
|
+
def debug_similar(file_path: str, path: Path, limit: int):
|
|
1278
|
+
"""Find documents similar to a given file."""
|
|
1279
|
+
path = Path(path).resolve()
|
|
1280
|
+
db = get_db(path)
|
|
1281
|
+
|
|
1282
|
+
# Read the file content
|
|
1283
|
+
try:
|
|
1284
|
+
content = Path(file_path).read_text()
|
|
1285
|
+
except Exception as e:
|
|
1286
|
+
click.echo(f"✗ Could not read file: {e}", err=True)
|
|
1287
|
+
return
|
|
1288
|
+
|
|
1289
|
+
# Use the content as the query
|
|
1290
|
+
results = db.search(query=content, limit=limit + 1) # +1 to exclude self
|
|
1291
|
+
|
|
1292
|
+
click.echo(f"\nDocuments similar to: {file_path}")
|
|
1293
|
+
click.echo(f"{'─' * 60}")
|
|
1294
|
+
|
|
1295
|
+
shown = 0
|
|
1296
|
+
for result in results:
|
|
1297
|
+
# Skip the file itself
|
|
1298
|
+
if result["metadata"].get("file", "").endswith(file_path):
|
|
1299
|
+
continue
|
|
1300
|
+
|
|
1301
|
+
shown += 1
|
|
1302
|
+
if shown > limit:
|
|
1303
|
+
break
|
|
1304
|
+
|
|
1305
|
+
meta = result["metadata"]
|
|
1306
|
+
distance = result["distance"]
|
|
1307
|
+
similarity = 1 - distance if distance else None
|
|
1308
|
+
|
|
1309
|
+
click.echo(f"\n[{shown}] {meta.get('file', 'unknown')}")
|
|
1310
|
+
click.echo(f" Similarity: {similarity:.4f} ({similarity * 100:.1f}%)")
|
|
1311
|
+
|
|
1312
|
+
preview = result["content"][:100].replace("\n", " ")
|
|
1313
|
+
click.echo(f" Preview: {preview}...")
|
|
1314
|
+
|
|
1315
|
+
|
|
1316
|
+
@debug.command("stats")
|
|
1317
|
+
@click.option("--path", type=click.Path(exists=True, path_type=Path), default=".")
|
|
1318
|
+
@click.option("--by-namespace", is_flag=True, help="Show counts by namespace")
|
|
1319
|
+
@click.option("--by-type", is_flag=True, help="Show counts by type")
|
|
1320
|
+
def debug_stats(path: Path, by_namespace: bool, by_type: bool):
|
|
1321
|
+
"""Show detailed index statistics."""
|
|
1322
|
+
path = Path(path).resolve()
|
|
1323
|
+
db = get_db(path)
|
|
1324
|
+
|
|
1325
|
+
total = db.collection.count()
|
|
1326
|
+
click.echo(f"\nIndex Statistics")
|
|
1327
|
+
click.echo(f"{'─' * 40}")
|
|
1328
|
+
click.echo(f"Total documents: {total}")
|
|
1329
|
+
|
|
1330
|
+
if total == 0:
|
|
1331
|
+
click.echo("\nIndex is empty. Run 'ragtime index' first.")
|
|
1332
|
+
return
|
|
1333
|
+
|
|
1334
|
+
# Get all documents for analysis
|
|
1335
|
+
all_docs = db.collection.get()
|
|
1336
|
+
|
|
1337
|
+
if by_namespace or (not by_namespace and not by_type):
|
|
1338
|
+
namespaces = {}
|
|
1339
|
+
for meta in all_docs["metadatas"]:
|
|
1340
|
+
ns = meta.get("namespace", "unknown")
|
|
1341
|
+
namespaces[ns] = namespaces.get(ns, 0) + 1
|
|
1342
|
+
|
|
1343
|
+
click.echo(f"\nBy Namespace:")
|
|
1344
|
+
for ns, count in sorted(namespaces.items(), key=lambda x: -x[1]):
|
|
1345
|
+
pct = count / total * 100
|
|
1346
|
+
click.echo(f" {ns}: {count} ({pct:.1f}%)")
|
|
1347
|
+
|
|
1348
|
+
if by_type or (not by_namespace and not by_type):
|
|
1349
|
+
types = {}
|
|
1350
|
+
for meta in all_docs["metadatas"]:
|
|
1351
|
+
t = meta.get("type", "unknown")
|
|
1352
|
+
types[t] = types.get(t, 0) + 1
|
|
1353
|
+
|
|
1354
|
+
click.echo(f"\nBy Type:")
|
|
1355
|
+
for t, count in sorted(types.items(), key=lambda x: -x[1]):
|
|
1356
|
+
pct = count / total * 100
|
|
1357
|
+
click.echo(f" {t}: {count} ({pct:.1f}%)")
|
|
1358
|
+
|
|
1359
|
+
|
|
1360
|
+
@debug.command("verify")
|
|
1361
|
+
@click.option("--path", type=click.Path(exists=True, path_type=Path), default=".")
|
|
1362
|
+
def debug_verify(path: Path):
|
|
1363
|
+
"""Verify index integrity with test queries."""
|
|
1364
|
+
path = Path(path).resolve()
|
|
1365
|
+
db = get_db(path)
|
|
1366
|
+
|
|
1367
|
+
total = db.collection.count()
|
|
1368
|
+
if total == 0:
|
|
1369
|
+
click.echo("✗ Index is empty. Run 'ragtime index' first.")
|
|
1370
|
+
return
|
|
1371
|
+
|
|
1372
|
+
click.echo(f"\nVerifying index ({total} documents)...")
|
|
1373
|
+
click.echo(f"{'─' * 40}")
|
|
1374
|
+
|
|
1375
|
+
issues = []
|
|
1376
|
+
|
|
1377
|
+
# Test 1: Basic search works
|
|
1378
|
+
click.echo("\n1. Testing basic search...")
|
|
1379
|
+
try:
|
|
1380
|
+
results = db.search("test", limit=1)
|
|
1381
|
+
if results:
|
|
1382
|
+
click.echo(" ✓ Search returns results")
|
|
1383
|
+
else:
|
|
1384
|
+
click.echo(" ⚠ Search returned no results (might be ok if no relevant docs)")
|
|
1385
|
+
except Exception as e:
|
|
1386
|
+
click.echo(f" ✗ Search failed: {e}")
|
|
1387
|
+
issues.append("Basic search failed")
|
|
1388
|
+
|
|
1389
|
+
# Test 2: Check for documents with missing metadata
|
|
1390
|
+
click.echo("\n2. Checking metadata integrity...")
|
|
1391
|
+
all_docs = db.collection.get()
|
|
1392
|
+
missing_namespace = 0
|
|
1393
|
+
missing_type = 0
|
|
1394
|
+
|
|
1395
|
+
for meta in all_docs["metadatas"]:
|
|
1396
|
+
if not meta.get("namespace"):
|
|
1397
|
+
missing_namespace += 1
|
|
1398
|
+
if not meta.get("type"):
|
|
1399
|
+
missing_type += 1
|
|
1400
|
+
|
|
1401
|
+
if missing_namespace:
|
|
1402
|
+
click.echo(f" ⚠ {missing_namespace} docs missing namespace")
|
|
1403
|
+
else:
|
|
1404
|
+
click.echo(" ✓ All docs have namespace")
|
|
1405
|
+
|
|
1406
|
+
if missing_type:
|
|
1407
|
+
click.echo(f" ⚠ {missing_type} docs missing type")
|
|
1408
|
+
else:
|
|
1409
|
+
click.echo(" ✓ All docs have type")
|
|
1410
|
+
|
|
1411
|
+
# Test 3: Check for duplicate IDs
|
|
1412
|
+
click.echo("\n3. Checking for duplicates...")
|
|
1413
|
+
ids = all_docs["ids"]
|
|
1414
|
+
unique_ids = set(ids)
|
|
1415
|
+
if len(ids) != len(unique_ids):
|
|
1416
|
+
dup_count = len(ids) - len(unique_ids)
|
|
1417
|
+
click.echo(f" ✗ {dup_count} duplicate IDs found")
|
|
1418
|
+
issues.append("Duplicate IDs")
|
|
1419
|
+
else:
|
|
1420
|
+
click.echo(" ✓ No duplicate IDs")
|
|
1421
|
+
|
|
1422
|
+
# Test 4: Similarity sanity check
|
|
1423
|
+
click.echo("\n4. Testing similarity consistency...")
|
|
1424
|
+
if total >= 2:
|
|
1425
|
+
# Pick first doc and find similar
|
|
1426
|
+
first_content = all_docs["documents"][0]
|
|
1427
|
+
results = db.search(first_content[:500], limit=2)
|
|
1428
|
+
if results and len(results) >= 1:
|
|
1429
|
+
top_similarity = 1 - results[0]["distance"]
|
|
1430
|
+
if top_similarity > 0.9:
|
|
1431
|
+
click.echo(f" ✓ Self-similarity check passed ({top_similarity:.2f})")
|
|
1432
|
+
else:
|
|
1433
|
+
click.echo(f" ⚠ Self-similarity lower than expected ({top_similarity:.2f})")
|
|
1434
|
+
else:
|
|
1435
|
+
click.echo(" ⚠ Could not perform similarity check")
|
|
1436
|
+
else:
|
|
1437
|
+
click.echo(" - Skipped (need at least 2 docs)")
|
|
1438
|
+
|
|
1439
|
+
# Summary
|
|
1440
|
+
click.echo(f"\n{'─' * 40}")
|
|
1441
|
+
if issues:
|
|
1442
|
+
click.echo(f"⚠ Found {len(issues)} issues:")
|
|
1443
|
+
for issue in issues:
|
|
1444
|
+
click.echo(f" - {issue}")
|
|
1445
|
+
else:
|
|
1446
|
+
click.echo("✓ Index verification passed")
|
|
1447
|
+
|
|
1448
|
+
|
|
1449
|
+
# ============================================================================
|
|
1450
|
+
# Documentation Generation
|
|
1451
|
+
# ============================================================================
|
|
1452
|
+
|
|
1453
|
+
|
|
1454
|
+
@main.command()
|
|
1455
|
+
@click.argument("code_path", type=click.Path(exists=True, path_type=Path))
|
|
1456
|
+
@click.option("--output", "-o", type=click.Path(path_type=Path), default="docs/api",
|
|
1457
|
+
help="Output directory for docs")
|
|
1458
|
+
@click.option("--stubs", is_flag=True, help="Generate stub docs with TODOs (no AI)")
|
|
1459
|
+
@click.option("--language", "-l", multiple=True,
|
|
1460
|
+
help="Languages to document (python, typescript, javascript)")
|
|
1461
|
+
@click.option("--include-private", is_flag=True, help="Include private methods (_name)")
|
|
1462
|
+
def generate(code_path: Path, output: Path, stubs: bool, language: tuple, include_private: bool):
|
|
1463
|
+
"""Generate documentation from code.
|
|
1464
|
+
|
|
1465
|
+
Creates markdown documentation from code structure.
|
|
1466
|
+
|
|
1467
|
+
Examples:
|
|
1468
|
+
ragtime generate src/ --stubs # Create stub docs
|
|
1469
|
+
ragtime generate src/ -o docs/api # Specify output
|
|
1470
|
+
ragtime generate src/ -l python # Python only
|
|
1471
|
+
"""
|
|
1472
|
+
import ast
|
|
1473
|
+
import re as re_module
|
|
1474
|
+
|
|
1475
|
+
code_path = Path(code_path).resolve()
|
|
1476
|
+
output = Path(output)
|
|
1477
|
+
|
|
1478
|
+
if not stubs:
|
|
1479
|
+
click.echo("Use --stubs for stub generation, or /generate-docs for AI-powered docs")
|
|
1480
|
+
click.echo("\nExample: ragtime generate src/ --stubs")
|
|
1481
|
+
return
|
|
1482
|
+
|
|
1483
|
+
# Determine languages
|
|
1484
|
+
if language:
|
|
1485
|
+
languages = list(language)
|
|
1486
|
+
else:
|
|
1487
|
+
languages = ["python", "typescript", "javascript"]
|
|
1488
|
+
|
|
1489
|
+
# Map extensions to languages
|
|
1490
|
+
ext_map = {
|
|
1491
|
+
"python": [".py"],
|
|
1492
|
+
"typescript": [".ts", ".tsx"],
|
|
1493
|
+
"javascript": [".js", ".jsx"],
|
|
1494
|
+
}
|
|
1495
|
+
|
|
1496
|
+
extensions = []
|
|
1497
|
+
for lang in languages:
|
|
1498
|
+
extensions.extend(ext_map.get(lang, []))
|
|
1499
|
+
|
|
1500
|
+
# Find code files
|
|
1501
|
+
code_files = []
|
|
1502
|
+
for ext in extensions:
|
|
1503
|
+
code_files.extend(code_path.rglob(f"*{ext}"))
|
|
1504
|
+
|
|
1505
|
+
# Filter out common exclusions
|
|
1506
|
+
exclude_patterns = ["__pycache__", "node_modules", ".venv", "venv", "dist", "build"]
|
|
1507
|
+
code_files = [
|
|
1508
|
+
f for f in code_files
|
|
1509
|
+
if not any(ex in str(f) for ex in exclude_patterns)
|
|
1510
|
+
]
|
|
1511
|
+
|
|
1512
|
+
if not code_files:
|
|
1513
|
+
click.echo(f"No code files found in {code_path}")
|
|
1514
|
+
return
|
|
1515
|
+
|
|
1516
|
+
click.echo(f"Found {len(code_files)} code files")
|
|
1517
|
+
click.echo(f"Output: {output}/")
|
|
1518
|
+
click.echo(f"{'─' * 50}")
|
|
1519
|
+
|
|
1520
|
+
output.mkdir(parents=True, exist_ok=True)
|
|
1521
|
+
generated = 0
|
|
1522
|
+
|
|
1523
|
+
for code_file in code_files:
|
|
1524
|
+
try:
|
|
1525
|
+
content = code_file.read_text()
|
|
1526
|
+
except Exception:
|
|
1527
|
+
continue
|
|
1528
|
+
|
|
1529
|
+
relative = code_file.relative_to(code_path)
|
|
1530
|
+
doc_path = output / relative.with_suffix(".md")
|
|
1531
|
+
|
|
1532
|
+
# Parse based on extension
|
|
1533
|
+
if code_file.suffix == ".py":
|
|
1534
|
+
doc_content = generate_python_stub(code_file, content, include_private)
|
|
1535
|
+
elif code_file.suffix in [".ts", ".tsx", ".js", ".jsx"]:
|
|
1536
|
+
doc_content = generate_typescript_stub(code_file, content, include_private)
|
|
1537
|
+
else:
|
|
1538
|
+
continue
|
|
1539
|
+
|
|
1540
|
+
if doc_content:
|
|
1541
|
+
doc_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1542
|
+
doc_path.write_text(doc_content)
|
|
1543
|
+
try:
|
|
1544
|
+
doc_display = doc_path.relative_to(Path.cwd())
|
|
1545
|
+
except ValueError:
|
|
1546
|
+
doc_display = doc_path
|
|
1547
|
+
click.echo(f" ✓ {relative} → {doc_display}")
|
|
1548
|
+
generated += 1
|
|
1549
|
+
|
|
1550
|
+
click.echo(f"\n{'─' * 50}")
|
|
1551
|
+
click.echo(f"✓ Generated {generated} documentation stubs")
|
|
1552
|
+
click.echo(f"\nNext steps:")
|
|
1553
|
+
click.echo(f" 1. Fill in the TODO placeholders")
|
|
1554
|
+
click.echo(f" 2. Or use /generate-docs for AI-generated content")
|
|
1555
|
+
click.echo(f" 3. Run 'ragtime index' to make searchable")
|
|
1556
|
+
|
|
1557
|
+
|
|
1558
|
+
def generate_python_stub(file_path: Path, content: str, include_private: bool) -> str:
|
|
1559
|
+
"""Generate markdown stub from Python code."""
|
|
1560
|
+
import ast
|
|
1561
|
+
|
|
1562
|
+
try:
|
|
1563
|
+
tree = ast.parse(content)
|
|
1564
|
+
except SyntaxError:
|
|
1565
|
+
return ""
|
|
1566
|
+
|
|
1567
|
+
lines = []
|
|
1568
|
+
lines.append(f"# {file_path.stem}")
|
|
1569
|
+
lines.append(f"\n> **File:** `{file_path}`")
|
|
1570
|
+
lines.append("\n## Overview\n")
|
|
1571
|
+
lines.append("TODO: Describe what this module does.\n")
|
|
1572
|
+
|
|
1573
|
+
# Get module docstring
|
|
1574
|
+
if ast.get_docstring(tree):
|
|
1575
|
+
lines.append(f"> {ast.get_docstring(tree)}\n")
|
|
1576
|
+
|
|
1577
|
+
classes = []
|
|
1578
|
+
functions = []
|
|
1579
|
+
|
|
1580
|
+
for node in ast.iter_child_nodes(tree):
|
|
1581
|
+
if isinstance(node, ast.ClassDef):
|
|
1582
|
+
if not include_private and node.name.startswith("_"):
|
|
1583
|
+
continue
|
|
1584
|
+
classes.append(node)
|
|
1585
|
+
elif isinstance(node, ast.FunctionDef) or isinstance(node, ast.AsyncFunctionDef):
|
|
1586
|
+
if not include_private and node.name.startswith("_"):
|
|
1587
|
+
continue
|
|
1588
|
+
functions.append(node)
|
|
1589
|
+
|
|
1590
|
+
# Document classes
|
|
1591
|
+
if classes:
|
|
1592
|
+
lines.append("---\n")
|
|
1593
|
+
lines.append("## Classes\n")
|
|
1594
|
+
|
|
1595
|
+
for cls in classes:
|
|
1596
|
+
lines.append(f"### `{cls.name}`\n")
|
|
1597
|
+
if ast.get_docstring(cls):
|
|
1598
|
+
lines.append(f"{ast.get_docstring(cls)}\n")
|
|
1599
|
+
else:
|
|
1600
|
+
lines.append("TODO: Describe this class.\n")
|
|
1601
|
+
|
|
1602
|
+
# Find __init__ and methods
|
|
1603
|
+
methods = []
|
|
1604
|
+
init_node = None
|
|
1605
|
+
for item in cls.body:
|
|
1606
|
+
if isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)):
|
|
1607
|
+
if item.name == "__init__":
|
|
1608
|
+
init_node = item
|
|
1609
|
+
elif not item.name.startswith("_") or include_private:
|
|
1610
|
+
methods.append(item)
|
|
1611
|
+
|
|
1612
|
+
# Constructor
|
|
1613
|
+
if init_node:
|
|
1614
|
+
lines.append("#### Constructor\n")
|
|
1615
|
+
sig = get_function_signature(init_node)
|
|
1616
|
+
lines.append(f"```python\n{sig}\n```\n")
|
|
1617
|
+
params = get_function_params(init_node)
|
|
1618
|
+
if params:
|
|
1619
|
+
lines.append("| Parameter | Type | Default | Description |")
|
|
1620
|
+
lines.append("|-----------|------|---------|-------------|")
|
|
1621
|
+
for p in params:
|
|
1622
|
+
lines.append(f"| `{p['name']}` | `{p['type']}` | {p['default']} | TODO |")
|
|
1623
|
+
lines.append("")
|
|
1624
|
+
|
|
1625
|
+
# Methods
|
|
1626
|
+
if methods:
|
|
1627
|
+
lines.append("#### Methods\n")
|
|
1628
|
+
for method in methods:
|
|
1629
|
+
async_prefix = "async " if isinstance(method, ast.AsyncFunctionDef) else ""
|
|
1630
|
+
ret = get_return_annotation(method)
|
|
1631
|
+
lines.append(f"##### `{async_prefix}{method.name}(...) -> {ret}`\n")
|
|
1632
|
+
if ast.get_docstring(method):
|
|
1633
|
+
lines.append(f"{ast.get_docstring(method)}\n")
|
|
1634
|
+
else:
|
|
1635
|
+
lines.append("TODO: Describe this method.\n")
|
|
1636
|
+
|
|
1637
|
+
# Document functions
|
|
1638
|
+
if functions:
|
|
1639
|
+
lines.append("---\n")
|
|
1640
|
+
lines.append("## Functions\n")
|
|
1641
|
+
|
|
1642
|
+
for func in functions:
|
|
1643
|
+
async_prefix = "async " if isinstance(func, ast.AsyncFunctionDef) else ""
|
|
1644
|
+
ret = get_return_annotation(func)
|
|
1645
|
+
lines.append(f"### `{async_prefix}{func.name}(...) -> {ret}`\n")
|
|
1646
|
+
if ast.get_docstring(func):
|
|
1647
|
+
lines.append(f"{ast.get_docstring(func)}\n")
|
|
1648
|
+
else:
|
|
1649
|
+
lines.append("TODO: Describe this function.\n")
|
|
1650
|
+
|
|
1651
|
+
params = get_function_params(func)
|
|
1652
|
+
if params:
|
|
1653
|
+
lines.append("**Parameters:**\n")
|
|
1654
|
+
for p in params:
|
|
1655
|
+
lines.append(f"- `{p['name']}` (`{p['type']}`): TODO")
|
|
1656
|
+
lines.append("")
|
|
1657
|
+
|
|
1658
|
+
lines.append(f"**Returns:** `{ret}` - TODO\n")
|
|
1659
|
+
|
|
1660
|
+
return "\n".join(lines)
|
|
1661
|
+
|
|
1662
|
+
|
|
1663
|
+
def get_function_signature(node) -> str:
|
|
1664
|
+
"""Get function signature string."""
|
|
1665
|
+
import ast
|
|
1666
|
+
|
|
1667
|
+
args = []
|
|
1668
|
+
for arg in node.args.args:
|
|
1669
|
+
if arg.arg == "self":
|
|
1670
|
+
continue
|
|
1671
|
+
type_hint = ""
|
|
1672
|
+
if arg.annotation:
|
|
1673
|
+
type_hint = f": {ast.unparse(arg.annotation)}"
|
|
1674
|
+
args.append(f"{arg.arg}{type_hint}")
|
|
1675
|
+
|
|
1676
|
+
return f"def {node.name}({', '.join(args)})"
|
|
1677
|
+
|
|
1678
|
+
|
|
1679
|
+
def get_function_params(node) -> list:
|
|
1680
|
+
"""Get function parameters with types and defaults."""
|
|
1681
|
+
import ast
|
|
1682
|
+
|
|
1683
|
+
params = []
|
|
1684
|
+
defaults = node.args.defaults
|
|
1685
|
+
num_defaults = len(defaults)
|
|
1686
|
+
num_args = len(node.args.args)
|
|
1687
|
+
|
|
1688
|
+
for i, arg in enumerate(node.args.args):
|
|
1689
|
+
if arg.arg in ("self", "cls"):
|
|
1690
|
+
continue
|
|
1691
|
+
|
|
1692
|
+
type_hint = "Any"
|
|
1693
|
+
if arg.annotation:
|
|
1694
|
+
try:
|
|
1695
|
+
type_hint = ast.unparse(arg.annotation)
|
|
1696
|
+
except Exception:
|
|
1697
|
+
type_hint = "Any"
|
|
1698
|
+
|
|
1699
|
+
default = "-"
|
|
1700
|
+
default_idx = i - (num_args - num_defaults)
|
|
1701
|
+
if default_idx >= 0 and default_idx < len(defaults):
|
|
1702
|
+
try:
|
|
1703
|
+
default = f"`{ast.unparse(defaults[default_idx])}`"
|
|
1704
|
+
except Exception:
|
|
1705
|
+
default = "..."
|
|
1706
|
+
|
|
1707
|
+
params.append({
|
|
1708
|
+
"name": arg.arg,
|
|
1709
|
+
"type": type_hint,
|
|
1710
|
+
"default": default,
|
|
1711
|
+
})
|
|
1712
|
+
|
|
1713
|
+
return params
|
|
1714
|
+
|
|
1715
|
+
|
|
1716
|
+
def get_return_annotation(node) -> str:
|
|
1717
|
+
"""Get return type annotation."""
|
|
1718
|
+
import ast
|
|
1719
|
+
|
|
1720
|
+
if node.returns:
|
|
1721
|
+
try:
|
|
1722
|
+
return ast.unparse(node.returns)
|
|
1723
|
+
except Exception:
|
|
1724
|
+
return "Any"
|
|
1725
|
+
return "None"
|
|
1726
|
+
|
|
1727
|
+
|
|
1728
|
+
def generate_typescript_stub(file_path: Path, content: str, include_private: bool) -> str:
|
|
1729
|
+
"""Generate markdown stub from TypeScript/JavaScript code."""
|
|
1730
|
+
import re as re_module
|
|
1731
|
+
|
|
1732
|
+
lines = []
|
|
1733
|
+
lines.append(f"# {file_path.stem}")
|
|
1734
|
+
lines.append(f"\n> **File:** `{file_path}`")
|
|
1735
|
+
lines.append("\n## Overview\n")
|
|
1736
|
+
lines.append("TODO: Describe what this module does.\n")
|
|
1737
|
+
|
|
1738
|
+
# Find exports using regex
|
|
1739
|
+
class_pattern = r'export\s+(?:default\s+)?class\s+(\w+)(?:\s+extends\s+(\w+))?'
|
|
1740
|
+
func_pattern = r'export\s+(?:default\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)(?:\s*:\s*([^\{]+))?'
|
|
1741
|
+
const_pattern = r'export\s+const\s+(\w+)\s*(?::\s*([^=]+))?\s*='
|
|
1742
|
+
interface_pattern = r'export\s+(?:default\s+)?interface\s+(\w+)'
|
|
1743
|
+
type_pattern = r'export\s+type\s+(\w+)'
|
|
1744
|
+
|
|
1745
|
+
classes = re_module.findall(class_pattern, content)
|
|
1746
|
+
functions = re_module.findall(func_pattern, content)
|
|
1747
|
+
consts = re_module.findall(const_pattern, content)
|
|
1748
|
+
interfaces = re_module.findall(interface_pattern, content)
|
|
1749
|
+
types = re_module.findall(type_pattern, content)
|
|
1750
|
+
|
|
1751
|
+
# Interfaces and Types
|
|
1752
|
+
if interfaces or types:
|
|
1753
|
+
lines.append("---\n")
|
|
1754
|
+
lines.append("## Types\n")
|
|
1755
|
+
for iface in interfaces:
|
|
1756
|
+
lines.append(f"### `interface {iface}`\n")
|
|
1757
|
+
lines.append("TODO: Describe this interface.\n")
|
|
1758
|
+
for t in types:
|
|
1759
|
+
lines.append(f"### `type {t}`\n")
|
|
1760
|
+
lines.append("TODO: Describe this type.\n")
|
|
1761
|
+
|
|
1762
|
+
# Classes
|
|
1763
|
+
if classes:
|
|
1764
|
+
lines.append("---\n")
|
|
1765
|
+
lines.append("## Classes\n")
|
|
1766
|
+
for cls_name, extends in classes:
|
|
1767
|
+
lines.append(f"### `{cls_name}`")
|
|
1768
|
+
if extends:
|
|
1769
|
+
lines.append(f" extends `{extends}`")
|
|
1770
|
+
lines.append("\n")
|
|
1771
|
+
lines.append("TODO: Describe this class.\n")
|
|
1772
|
+
|
|
1773
|
+
# Functions
|
|
1774
|
+
if functions:
|
|
1775
|
+
lines.append("---\n")
|
|
1776
|
+
lines.append("## Functions\n")
|
|
1777
|
+
for func_name, params, return_type in functions:
|
|
1778
|
+
ret = return_type.strip() if return_type else "void"
|
|
1779
|
+
lines.append(f"### `{func_name}({params}) => {ret}`\n")
|
|
1780
|
+
lines.append("TODO: Describe this function.\n")
|
|
1781
|
+
|
|
1782
|
+
# Constants
|
|
1783
|
+
if consts:
|
|
1784
|
+
lines.append("---\n")
|
|
1785
|
+
lines.append("## Constants\n")
|
|
1786
|
+
lines.append("| Name | Type | Description |")
|
|
1787
|
+
lines.append("|------|------|-------------|")
|
|
1788
|
+
for const_name, const_type in consts:
|
|
1789
|
+
t = const_type.strip() if const_type else "unknown"
|
|
1790
|
+
lines.append(f"| `{const_name}` | `{t}` | TODO |")
|
|
1791
|
+
lines.append("")
|
|
1792
|
+
|
|
1793
|
+
if len(lines) <= 5: # Only header
|
|
1794
|
+
return ""
|
|
1795
|
+
|
|
1796
|
+
return "\n".join(lines)
|
|
1797
|
+
|
|
1798
|
+
|
|
1178
1799
|
@main.command()
|
|
1179
1800
|
@click.argument("docs_path", type=click.Path(exists=True, path_type=Path), default="docs")
|
|
1180
1801
|
@click.option("--path", type=click.Path(exists=True, path_type=Path), default=".")
|
|
@@ -1223,7 +1844,7 @@ def audit(docs_path: Path, path: Path, fix: bool, as_json: bool):
|
|
|
1223
1844
|
parts = content.split("---", 2)
|
|
1224
1845
|
if len(parts) >= 3:
|
|
1225
1846
|
existing_meta = yaml.safe_load(parts[1]) or {}
|
|
1226
|
-
except:
|
|
1847
|
+
except Exception:
|
|
1227
1848
|
pass
|
|
1228
1849
|
|
|
1229
1850
|
# Analyze file for suggestions
|
|
@@ -1370,7 +1991,7 @@ def update(check: bool):
|
|
|
1370
1991
|
from urllib.request import urlopen
|
|
1371
1992
|
from urllib.error import URLError
|
|
1372
1993
|
|
|
1373
|
-
current = "0.2.
|
|
1994
|
+
current = "0.2.5"
|
|
1374
1995
|
|
|
1375
1996
|
click.echo(f"Current version: {current}")
|
|
1376
1997
|
click.echo("Checking PyPI for updates...")
|