repo-docs-framework 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +119 -0
- package/bin/repo-docs.mjs +31 -0
- package/config/default.json +99 -0
- package/package.json +33 -0
- package/src/index.mjs +4 -0
- package/src/lib/args.mjs +51 -0
- package/src/lib/config.mjs +24 -0
- package/src/lib/languages.mjs +50 -0
- package/src/lib/run.mjs +214 -0
- package/src/lib/scan.mjs +40 -0
- package/src/lib/stack.mjs +74 -0
- package/src/lib/templates.mjs +56 -0
- package/src/lib/tree.mjs +69 -0
- package/src/lib/tree_sitter.mjs +81 -0
package/README.md
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
# Repo Docs Framework (Tree-sitter → Repo Map → Sys Design)
|
|
2
|
+
|
|
3
|
+
This is a **repo-agnostic framework** that generates Copilot-friendly documentation for **any** codebase:
|
|
4
|
+
|
|
5
|
+
```
|
|
6
|
+
Codebase
|
|
7
|
+
↓
|
|
8
|
+
Tree-sitter AST (optional, for symbols)
|
|
9
|
+
↓
|
|
10
|
+
Symbol extraction
|
|
11
|
+
↓
|
|
12
|
+
Repo Map (repo-map.md)
|
|
13
|
+
↓
|
|
14
|
+
System Design (sys-design.md = repo-map + architecture + api-flow + detected stack/languages)
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
It’s intentionally dependency-light:
|
|
18
|
+
- Uses **Node.js** only (no npm install required to run the generators)
|
|
19
|
+
- Uses **Tree-sitter CLI globally** (not installed in the target repo)
|
|
20
|
+
|
|
21
|
+
## What it generates
|
|
22
|
+
|
|
23
|
+
Inside a chosen output folder (default: `docs/`):
|
|
24
|
+
- `repo-map.md` (auto-generated)
|
|
25
|
+
- `sys-design.md` (auto-generated)
|
|
26
|
+
- `architecture.md` (optional template, if missing)
|
|
27
|
+
- `api-flow.md` (optional template, if missing)
|
|
28
|
+
|
|
29
|
+
## Quick start (any repo)
|
|
30
|
+
|
|
31
|
+
From the repo root you want to document:
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
node /path/to/repo-docs-framework/bin/repo-docs.mjs --root . --out docs
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
Re-run any time things change.
|
|
38
|
+
|
|
39
|
+
## Tree-sitter global install (recommended)
|
|
40
|
+
|
|
41
|
+
The framework can generate a repo map without Tree-sitter, but **symbol extraction requires it**.
|
|
42
|
+
|
|
43
|
+
### Install Tree-sitter CLI globally
|
|
44
|
+
|
|
45
|
+
Pick one:
|
|
46
|
+
|
|
47
|
+
```bash
|
|
48
|
+
# npm global install
|
|
49
|
+
npm i -g tree-sitter-cli
|
|
50
|
+
|
|
51
|
+
# or cargo install
|
|
52
|
+
cargo install tree-sitter-cli
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
Verify:
|
|
56
|
+
|
|
57
|
+
```bash
|
|
58
|
+
tree-sitter --version
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
### Install grammars (multi-language)
|
|
62
|
+
|
|
63
|
+
Tree-sitter discovers grammars via a global config.
|
|
64
|
+
|
|
65
|
+
1) Create config:
|
|
66
|
+
|
|
67
|
+
```bash
|
|
68
|
+
tree-sitter init-config
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
2) Add grammar directory in `~/.config/tree-sitter/config.json` (Linux):
|
|
72
|
+
|
|
73
|
+
```json
|
|
74
|
+
{
|
|
75
|
+
"parser-directories": [
|
|
76
|
+
"$HOME/.tree-sitter-parsers"
|
|
77
|
+
]
|
|
78
|
+
}
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
3) Clone grammars into that directory (names must start with `tree-sitter-`):
|
|
82
|
+
|
|
83
|
+
```bash
|
|
84
|
+
mkdir -p "$HOME/.tree-sitter-parsers"
|
|
85
|
+
cd "$HOME/.tree-sitter-parsers"
|
|
86
|
+
|
|
87
|
+
# Common starters
|
|
88
|
+
git clone https://github.com/tree-sitter/tree-sitter-typescript
|
|
89
|
+
git clone https://github.com/tree-sitter/tree-sitter-javascript
|
|
90
|
+
git clone https://github.com/tree-sitter/tree-sitter-python
|
|
91
|
+
git clone https://github.com/tree-sitter/tree-sitter-go
|
|
92
|
+
git clone https://github.com/tree-sitter/tree-sitter-java
|
|
93
|
+
git clone https://github.com/tree-sitter/tree-sitter-rust
|
|
94
|
+
git clone https://github.com/tree-sitter/tree-sitter-json
|
|
95
|
+
git clone https://github.com/tree-sitter/tree-sitter-yaml
|
|
96
|
+
git clone https://github.com/tree-sitter/tree-sitter-bash
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
Confirm:
|
|
100
|
+
|
|
101
|
+
```bash
|
|
102
|
+
tree-sitter dump-languages
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
## How it decides “stack” and “languages”
|
|
106
|
+
|
|
107
|
+
- **Languages**: file extension counts across the target `--root`, excluding configured ignore dirs.
|
|
108
|
+
- **Stack**: heuristic detection from:
|
|
109
|
+
- package manifests (`package.json`, `pyproject.toml`, `go.mod`, etc.)
|
|
110
|
+
- well-known config files (`docker-compose.yml`, `k8s/`, `terraform`, etc.)
|
|
111
|
+
- common folder patterns (`src/`, `apps/`, `services/`, etc.)
|
|
112
|
+
|
|
113
|
+
You can customize ignores and behavior via `--config` (see `config/default.json`).
|
|
114
|
+
|
|
115
|
+
## Suggested automation (optional)
|
|
116
|
+
|
|
117
|
+
- Pre-commit hook to regenerate docs
|
|
118
|
+
- CI job to ensure generated docs are up to date (fail on diff)
|
|
119
|
+
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { loadConfig, parseArgs, runRepoDocs } from "../src/index.mjs";
|
|
5
|
+
|
|
6
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
7
|
+
const __dirname = path.dirname(__filename);
|
|
8
|
+
|
|
9
|
+
async function main() {
|
|
10
|
+
const args = parseArgs(process.argv.slice(2));
|
|
11
|
+
const cfg = await loadConfig({
|
|
12
|
+
frameworkRoot: path.resolve(__dirname, ".."),
|
|
13
|
+
configPath: args.config,
|
|
14
|
+
overrides: {
|
|
15
|
+
outDir: args.out,
|
|
16
|
+
// allow renaming output filenames if user wants later
|
|
17
|
+
}
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
await runRepoDocs({
|
|
21
|
+
rootDir: path.resolve(process.cwd(), args.root ?? "."),
|
|
22
|
+
outDir: path.resolve(process.cwd(), args.out ?? cfg.outDir),
|
|
23
|
+
cfg
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
main().catch((err) => {
|
|
28
|
+
process.stderr.write(String(err?.stack ?? err) + "\n");
|
|
29
|
+
process.exitCode = 1;
|
|
30
|
+
});
|
|
31
|
+
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
{
|
|
2
|
+
"outDir": "docs",
|
|
3
|
+
"repoMapFile": "repo-map.md",
|
|
4
|
+
"sysDesignFile": "sys-design.md",
|
|
5
|
+
"architectureFile": "architecture.md",
|
|
6
|
+
"apiFlowFile": "api-flow.md",
|
|
7
|
+
|
|
8
|
+
"maxTreeDepth": 6,
|
|
9
|
+
"maxFilesPerDir": 120,
|
|
10
|
+
|
|
11
|
+
"ignoreDirs": [
|
|
12
|
+
".git",
|
|
13
|
+
".hg",
|
|
14
|
+
".svn",
|
|
15
|
+
".idea",
|
|
16
|
+
".vscode",
|
|
17
|
+
"node_modules",
|
|
18
|
+
"dist",
|
|
19
|
+
"build",
|
|
20
|
+
"out",
|
|
21
|
+
"coverage",
|
|
22
|
+
".nyc_output",
|
|
23
|
+
".next",
|
|
24
|
+
".turbo",
|
|
25
|
+
".cache",
|
|
26
|
+
".venv",
|
|
27
|
+
"venv",
|
|
28
|
+
"__pycache__",
|
|
29
|
+
"target",
|
|
30
|
+
"vendor",
|
|
31
|
+
".terraform",
|
|
32
|
+
".gradle",
|
|
33
|
+
".m2"
|
|
34
|
+
],
|
|
35
|
+
|
|
36
|
+
"ignoreFiles": [
|
|
37
|
+
"pnpm-lock.yaml",
|
|
38
|
+
"package-lock.json",
|
|
39
|
+
"yarn.lock",
|
|
40
|
+
"poetry.lock",
|
|
41
|
+
"uv.lock",
|
|
42
|
+
"composer.lock",
|
|
43
|
+
"Cargo.lock",
|
|
44
|
+
"go.sum",
|
|
45
|
+
"*.tsbuildinfo",
|
|
46
|
+
"*.log"
|
|
47
|
+
],
|
|
48
|
+
|
|
49
|
+
"treeIncludeExtensions": [
|
|
50
|
+
".ts",
|
|
51
|
+
".tsx",
|
|
52
|
+
".js",
|
|
53
|
+
".jsx",
|
|
54
|
+
".mjs",
|
|
55
|
+
".cjs",
|
|
56
|
+
".py",
|
|
57
|
+
".go",
|
|
58
|
+
".rs",
|
|
59
|
+
".java",
|
|
60
|
+
".kt",
|
|
61
|
+
".cs",
|
|
62
|
+
".php",
|
|
63
|
+
".rb",
|
|
64
|
+
".swift",
|
|
65
|
+
".scala",
|
|
66
|
+
".tf",
|
|
67
|
+
".yaml",
|
|
68
|
+
".yml",
|
|
69
|
+
".json",
|
|
70
|
+
".toml",
|
|
71
|
+
".ini",
|
|
72
|
+
".md",
|
|
73
|
+
".sh",
|
|
74
|
+
".dockerfile"
|
|
75
|
+
],
|
|
76
|
+
|
|
77
|
+
"symbolExtensions": [
|
|
78
|
+
".ts",
|
|
79
|
+
".tsx",
|
|
80
|
+
".js",
|
|
81
|
+
".jsx",
|
|
82
|
+
".mjs",
|
|
83
|
+
".cjs",
|
|
84
|
+
".py",
|
|
85
|
+
".go",
|
|
86
|
+
".rs",
|
|
87
|
+
".java",
|
|
88
|
+
".kt",
|
|
89
|
+
".cs",
|
|
90
|
+
".php",
|
|
91
|
+
".rb"
|
|
92
|
+
],
|
|
93
|
+
|
|
94
|
+
"copilot": {
|
|
95
|
+
"recommendedContextFiles": ["architecture.md", "repo-map.md", "api-flow.md", "sys-design.md"],
|
|
96
|
+
"recommendedHint": "Use repo-map.md + sys-design.md as the repo map. Prefer source directories over generated outputs; avoid editing build artifacts. When adding a feature, update the entrypoints, the core module, and any API/schema definitions."
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
package/package.json
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "repo-docs-framework",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Repo-agnostic Tree-sitter powered repo-map + system-design generator (Copilot-friendly).",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"license": "UNLICENSED",
|
|
7
|
+
"bin": {
|
|
8
|
+
"repo-docs": "./bin/repo-docs.mjs"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"bin/",
|
|
12
|
+
"config/",
|
|
13
|
+
"src/",
|
|
14
|
+
"README.md"
|
|
15
|
+
],
|
|
16
|
+
"engines": {
|
|
17
|
+
"node": ">=18"
|
|
18
|
+
},
|
|
19
|
+
"scripts": {
|
|
20
|
+
"test:smoke": "node ./test/smoke.mjs",
|
|
21
|
+
"prepack": "npm run test:smoke"
|
|
22
|
+
},
|
|
23
|
+
"keywords": [
|
|
24
|
+
"tree-sitter",
|
|
25
|
+
"repo-map",
|
|
26
|
+
"architecture",
|
|
27
|
+
"system-design",
|
|
28
|
+
"copilot",
|
|
29
|
+
"documentation",
|
|
30
|
+
"cli"
|
|
31
|
+
]
|
|
32
|
+
}
|
|
33
|
+
|
package/src/index.mjs
ADDED
package/src/lib/args.mjs
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
function usage() {
|
|
2
|
+
return `
|
|
3
|
+
Repo Docs Framework
|
|
4
|
+
|
|
5
|
+
Generate:
|
|
6
|
+
- repo-map.md
|
|
7
|
+
- sys-design.md
|
|
8
|
+
- (optional templates) architecture.md, api-flow.md
|
|
9
|
+
|
|
10
|
+
Usage:
|
|
11
|
+
repo-docs.mjs --root <path> --out <path> [--config <path>]
|
|
12
|
+
|
|
13
|
+
Examples:
|
|
14
|
+
node tools/repo-docs-framework/bin/repo-docs.mjs --root . --out docs
|
|
15
|
+
node /abs/path/repo-docs.mjs --root /path/to/repo --out /path/to/repo/docs
|
|
16
|
+
|
|
17
|
+
Flags:
|
|
18
|
+
--root Target repository root to scan (default ".")
|
|
19
|
+
--out Output docs folder (default "docs")
|
|
20
|
+
--config Optional path to config JSON
|
|
21
|
+
--help Show help
|
|
22
|
+
`.trim();
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function parseArgs(argv) {
|
|
26
|
+
const out = { root: ".", out: "docs", config: undefined };
|
|
27
|
+
for (let i = 0; i < argv.length; i++) {
|
|
28
|
+
const a = argv[i];
|
|
29
|
+
if (a === "--help" || a === "-h") {
|
|
30
|
+
process.stdout.write(usage() + "\n");
|
|
31
|
+
process.exit(0);
|
|
32
|
+
}
|
|
33
|
+
if (a === "--root") {
|
|
34
|
+
out.root = argv[++i];
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
if (a === "--out") {
|
|
38
|
+
out.out = argv[++i];
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
if (a === "--config") {
|
|
42
|
+
out.config = argv[++i];
|
|
43
|
+
continue;
|
|
44
|
+
}
|
|
45
|
+
if (a.startsWith("-")) {
|
|
46
|
+
throw new Error(`Unknown flag: ${a}\n\n${usage()}`);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return out;
|
|
50
|
+
}
|
|
51
|
+
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { promises as fs } from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
async function readJson(filePath) {
|
|
5
|
+
return JSON.parse(await fs.readFile(filePath, "utf8"));
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
function deepMerge(a, b) {
|
|
9
|
+
if (Array.isArray(a) || Array.isArray(b)) return b ?? a;
|
|
10
|
+
if (a && typeof a === "object" && b && typeof b === "object") {
|
|
11
|
+
const out = { ...a };
|
|
12
|
+
for (const [k, v] of Object.entries(b)) out[k] = deepMerge(a[k], v);
|
|
13
|
+
return out;
|
|
14
|
+
}
|
|
15
|
+
return b ?? a;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export async function loadConfig({ frameworkRoot, configPath, overrides }) {
|
|
19
|
+
const defaultPath = path.join(frameworkRoot, "config/default.json");
|
|
20
|
+
const base = await readJson(defaultPath);
|
|
21
|
+
const user = configPath ? await readJson(path.resolve(process.cwd(), configPath)) : {};
|
|
22
|
+
return deepMerge(deepMerge(base, user), overrides ?? {});
|
|
23
|
+
}
|
|
24
|
+
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
|
|
3
|
+
const extToLang = new Map([
|
|
4
|
+
[".ts", "TypeScript"],
|
|
5
|
+
[".tsx", "TypeScript (React)"],
|
|
6
|
+
[".js", "JavaScript"],
|
|
7
|
+
[".jsx", "JavaScript (React)"],
|
|
8
|
+
[".mjs", "JavaScript (ESM)"],
|
|
9
|
+
[".cjs", "JavaScript (CJS)"],
|
|
10
|
+
[".py", "Python"],
|
|
11
|
+
[".go", "Go"],
|
|
12
|
+
[".rs", "Rust"],
|
|
13
|
+
[".java", "Java"],
|
|
14
|
+
[".kt", "Kotlin"],
|
|
15
|
+
[".cs", "C#"],
|
|
16
|
+
[".php", "PHP"],
|
|
17
|
+
[".rb", "Ruby"],
|
|
18
|
+
[".swift", "Swift"],
|
|
19
|
+
[".scala", "Scala"],
|
|
20
|
+
[".sql", "SQL"],
|
|
21
|
+
[".tf", "Terraform"],
|
|
22
|
+
[".yaml", "YAML"],
|
|
23
|
+
[".yml", "YAML"],
|
|
24
|
+
[".toml", "TOML"],
|
|
25
|
+
[".ini", "INI"],
|
|
26
|
+
[".json", "JSON"],
|
|
27
|
+
[".md", "Markdown"],
|
|
28
|
+
[".sh", "Shell"]
|
|
29
|
+
]);
|
|
30
|
+
|
|
31
|
+
function ext(rel) {
|
|
32
|
+
return path.extname(rel).toLowerCase();
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export function summarizeLanguages(filesRel) {
|
|
36
|
+
const langCounts = new Map();
|
|
37
|
+
const extCounts = new Map();
|
|
38
|
+
|
|
39
|
+
for (const f of filesRel) {
|
|
40
|
+
const e = ext(f);
|
|
41
|
+
if (e) extCounts.set(e, (extCounts.get(e) ?? 0) + 1);
|
|
42
|
+
const lang = extToLang.get(e) ?? "Other/Unknown";
|
|
43
|
+
langCounts.set(lang, (langCounts.get(lang) ?? 0) + 1);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
const langs = [...langCounts.entries()].sort((a, b) => b[1] - a[1]);
|
|
47
|
+
const exts = [...extCounts.entries()].sort((a, b) => b[1] - a[1]);
|
|
48
|
+
return { langs, exts };
|
|
49
|
+
}
|
|
50
|
+
|
package/src/lib/run.mjs
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
import { promises as fs } from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
import { walkFiles } from "./scan.mjs";
|
|
5
|
+
import { treeLines } from "./tree.mjs";
|
|
6
|
+
import { summarizeLanguages } from "./languages.mjs";
|
|
7
|
+
import { detectStack } from "./stack.mjs";
|
|
8
|
+
import { extractSymbols } from "./tree_sitter.mjs";
|
|
9
|
+
import { architectureTemplate, apiFlowTemplate } from "./templates.mjs";
|
|
10
|
+
|
|
11
|
+
function nowIso() {
|
|
12
|
+
return new Date().toISOString();
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
async function exists(p) {
|
|
16
|
+
try {
|
|
17
|
+
await fs.stat(p);
|
|
18
|
+
return true;
|
|
19
|
+
} catch {
|
|
20
|
+
return false;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async function readUtf8(p) {
|
|
25
|
+
return await fs.readFile(p, "utf8");
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
async function writeUtf8(p, content) {
|
|
29
|
+
await fs.mkdir(path.dirname(p), { recursive: true });
|
|
30
|
+
await fs.writeFile(p, content, "utf8");
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function mdEsc(s) {
|
|
34
|
+
return String(s).replaceAll("|", "\\|");
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function formatSymbols(grouped, { maxPerFile = 60 } = {}) {
|
|
38
|
+
const lines = [];
|
|
39
|
+
for (const [file, syms] of grouped) {
|
|
40
|
+
lines.push(`- **\`${mdEsc(file)}\`**`);
|
|
41
|
+
const shown = syms.slice(0, maxPerFile);
|
|
42
|
+
for (const s of shown) {
|
|
43
|
+
const loc = s.line ? `:${s.line}` : "";
|
|
44
|
+
lines.push(` - \`${mdEsc(s.kind)}\` \`${mdEsc(s.name)}\`${loc}`);
|
|
45
|
+
}
|
|
46
|
+
if (syms.length > maxPerFile) lines.push(` - … (${syms.length - maxPerFile} more)`);
|
|
47
|
+
}
|
|
48
|
+
return lines;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function frontMatter(obj) {
|
|
52
|
+
const lines = ["---"];
|
|
53
|
+
for (const [k, v] of Object.entries(obj)) {
|
|
54
|
+
if (Array.isArray(v)) {
|
|
55
|
+
lines.push(`${k}:`);
|
|
56
|
+
for (const item of v) lines.push(` - ${item}`);
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
lines.push(`${k}: ${v}`);
|
|
60
|
+
}
|
|
61
|
+
lines.push("---");
|
|
62
|
+
return lines.join("\n");
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function rootNameFromPath(rootDir) {
|
|
66
|
+
const base = path.basename(rootDir);
|
|
67
|
+
return base || "repo";
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
async function readPackageJsonMaybe(rootDir) {
|
|
71
|
+
const pj = path.join(rootDir, "package.json");
|
|
72
|
+
if (!(await exists(pj))) return null;
|
|
73
|
+
try {
|
|
74
|
+
return JSON.parse(await readUtf8(pj));
|
|
75
|
+
} catch {
|
|
76
|
+
return null;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export async function runRepoDocs({ rootDir, outDir, cfg }) {
|
|
81
|
+
const filesRel = await walkFiles(rootDir, cfg);
|
|
82
|
+
const pkg = await readPackageJsonMaybe(rootDir);
|
|
83
|
+
|
|
84
|
+
const rootName = rootNameFromPath(rootDir);
|
|
85
|
+
const stack = detectStack({ filesRel, packageJson: pkg });
|
|
86
|
+
const { langs, exts } = summarizeLanguages(filesRel);
|
|
87
|
+
|
|
88
|
+
// Ensure base docs exist (templates) in output folder.
|
|
89
|
+
const architecturePath = path.join(outDir, cfg.architectureFile);
|
|
90
|
+
const apiFlowPath = path.join(outDir, cfg.apiFlowFile);
|
|
91
|
+
const repoMapPath = path.join(outDir, cfg.repoMapFile);
|
|
92
|
+
const sysDesignPath = path.join(outDir, cfg.sysDesignFile);
|
|
93
|
+
|
|
94
|
+
if (!(await exists(architecturePath))) {
|
|
95
|
+
await writeUtf8(architecturePath, architectureTemplate({ rootName, stackList: stack }));
|
|
96
|
+
}
|
|
97
|
+
if (!(await exists(apiFlowPath))) {
|
|
98
|
+
await writeUtf8(apiFlowPath, apiFlowTemplate({ rootName }));
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Repo map generation.
|
|
102
|
+
const tree = treeLines(filesRel, cfg);
|
|
103
|
+
const symbols = extractSymbols({ rootDir, filesRel, cfg });
|
|
104
|
+
|
|
105
|
+
const repoMapMd = [];
|
|
106
|
+
repoMapMd.push(
|
|
107
|
+
frontMatter({
|
|
108
|
+
generatedAt: nowIso(),
|
|
109
|
+
scope: rootName,
|
|
110
|
+
generator: "repo-docs-framework",
|
|
111
|
+
rootDir: path.resolve(rootDir)
|
|
112
|
+
})
|
|
113
|
+
);
|
|
114
|
+
repoMapMd.push("");
|
|
115
|
+
repoMapMd.push("## Repo map");
|
|
116
|
+
repoMapMd.push("");
|
|
117
|
+
repoMapMd.push("Regenerate:");
|
|
118
|
+
repoMapMd.push("");
|
|
119
|
+
repoMapMd.push("```bash");
|
|
120
|
+
repoMapMd.push(`node tools/repo-docs-framework/bin/repo-docs.mjs --root "${path.resolve(rootDir)}" --out "${path.resolve(outDir)}"`);
|
|
121
|
+
repoMapMd.push("```");
|
|
122
|
+
repoMapMd.push("");
|
|
123
|
+
repoMapMd.push("### Directory tree (filtered)");
|
|
124
|
+
repoMapMd.push("");
|
|
125
|
+
repoMapMd.push("```text");
|
|
126
|
+
repoMapMd.push(`${rootName}/`);
|
|
127
|
+
for (const l of tree) repoMapMd.push(` ${l}`);
|
|
128
|
+
repoMapMd.push("```");
|
|
129
|
+
repoMapMd.push("");
|
|
130
|
+
repoMapMd.push("### Symbol index (Tree-sitter tags)");
|
|
131
|
+
repoMapMd.push("");
|
|
132
|
+
if (!symbols.available) {
|
|
133
|
+
repoMapMd.push("**Tree-sitter CLI not found.** Install it globally to enable symbol extraction.");
|
|
134
|
+
} else if (symbols.grouped.length === 0) {
|
|
135
|
+
repoMapMd.push("**No symbols extracted.** Install/configure the required grammars for your languages.");
|
|
136
|
+
} else {
|
|
137
|
+
if (symbols.failures) repoMapMd.push(`_Note: tags failed for ${symbols.failures} file(s); index is partial._\n`);
|
|
138
|
+
repoMapMd.push(...formatSymbols(symbols.grouped, { maxPerFile: 60 }));
|
|
139
|
+
}
|
|
140
|
+
repoMapMd.push("");
|
|
141
|
+
repoMapMd.push("### Copilot Chat suggested context");
|
|
142
|
+
repoMapMd.push("");
|
|
143
|
+
const ctx = (cfg.copilot?.recommendedContextFiles ?? []).map((f) => `- \`${f}\``);
|
|
144
|
+
repoMapMd.push(...ctx);
|
|
145
|
+
repoMapMd.push("");
|
|
146
|
+
if (cfg.copilot?.recommendedHint) {
|
|
147
|
+
repoMapMd.push("Suggested hint to paste into Copilot Chat:");
|
|
148
|
+
repoMapMd.push("");
|
|
149
|
+
repoMapMd.push("```text");
|
|
150
|
+
repoMapMd.push(cfg.copilot.recommendedHint);
|
|
151
|
+
repoMapMd.push("```");
|
|
152
|
+
repoMapMd.push("");
|
|
153
|
+
}
|
|
154
|
+
await writeUtf8(repoMapPath, repoMapMd.join("\n"));
|
|
155
|
+
|
|
156
|
+
// Sys design = merge + detected tables.
|
|
157
|
+
const [architectureMd, apiFlowMd, repoMapFinal] = await Promise.all([
|
|
158
|
+
readUtf8(architecturePath),
|
|
159
|
+
readUtf8(apiFlowPath),
|
|
160
|
+
readUtf8(repoMapPath)
|
|
161
|
+
]);
|
|
162
|
+
|
|
163
|
+
const sys = [];
|
|
164
|
+
sys.push(
|
|
165
|
+
frontMatter({
|
|
166
|
+
generatedAt: nowIso(),
|
|
167
|
+
scope: rootName,
|
|
168
|
+
generator: "repo-docs-framework",
|
|
169
|
+
inputs: [path.relative(outDir, architecturePath), path.relative(outDir, apiFlowPath), path.relative(outDir, repoMapPath)].map((p) =>
|
|
170
|
+
p.split(path.sep).join("/")
|
|
171
|
+
)
|
|
172
|
+
})
|
|
173
|
+
);
|
|
174
|
+
sys.push("");
|
|
175
|
+
sys.push(`# System design (${rootName})`);
|
|
176
|
+
sys.push("");
|
|
177
|
+
sys.push("## Stack (detected)");
|
|
178
|
+
sys.push("");
|
|
179
|
+
for (const s of stack) sys.push(`- ${s}`);
|
|
180
|
+
if (!stack.length) sys.push("- (no stack signals detected)");
|
|
181
|
+
sys.push("");
|
|
182
|
+
sys.push("## Languages (detected)");
|
|
183
|
+
sys.push("");
|
|
184
|
+
sys.push("| Language | Files |");
|
|
185
|
+
sys.push("|---|---:|");
|
|
186
|
+
for (const [lang, count] of langs) sys.push(`| ${lang} | ${count} |`);
|
|
187
|
+
sys.push("");
|
|
188
|
+
sys.push("### Extensions (top)");
|
|
189
|
+
sys.push("");
|
|
190
|
+
sys.push("| Extension | Files |");
|
|
191
|
+
sys.push("|---|---:|");
|
|
192
|
+
for (const [e, count] of exts.slice(0, 20)) sys.push(`| \`${e}\` | ${count} |`);
|
|
193
|
+
sys.push("");
|
|
194
|
+
sys.push("## Merged docs");
|
|
195
|
+
sys.push("");
|
|
196
|
+
sys.push("### Architecture");
|
|
197
|
+
sys.push("");
|
|
198
|
+
sys.push(architectureMd.trim());
|
|
199
|
+
sys.push("");
|
|
200
|
+
sys.push("### API flow");
|
|
201
|
+
sys.push("");
|
|
202
|
+
sys.push(apiFlowMd.trim());
|
|
203
|
+
sys.push("");
|
|
204
|
+
sys.push("### Repo map");
|
|
205
|
+
sys.push("");
|
|
206
|
+
sys.push(repoMapFinal.trim());
|
|
207
|
+
sys.push("");
|
|
208
|
+
|
|
209
|
+
await writeUtf8(sysDesignPath, sys.join("\n"));
|
|
210
|
+
|
|
211
|
+
process.stdout.write(`Wrote ${path.relative(process.cwd(), repoMapPath)}\n`);
|
|
212
|
+
process.stdout.write(`Wrote ${path.relative(process.cwd(), sysDesignPath)}\n`);
|
|
213
|
+
}
|
|
214
|
+
|
package/src/lib/scan.mjs
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { promises as fs } from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
function matchesGlobLike(name, patterns) {
|
|
5
|
+
// Very small glob subset: only supports "*.ext" patterns for ignoreFiles.
|
|
6
|
+
for (const p of patterns) {
|
|
7
|
+
if (p.includes("*")) {
|
|
8
|
+
const suffix = p.replaceAll("*", "");
|
|
9
|
+
if (suffix && name.endsWith(suffix)) return true;
|
|
10
|
+
} else {
|
|
11
|
+
if (name === p) return true;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
return false;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export async function walkFiles(rootDir, cfg) {
|
|
18
|
+
const ignoreDirs = new Set(cfg.ignoreDirs ?? []);
|
|
19
|
+
const ignoreFiles = cfg.ignoreFiles ?? [];
|
|
20
|
+
|
|
21
|
+
async function rec(dirAbs) {
|
|
22
|
+
const out = [];
|
|
23
|
+
const entries = await fs.readdir(dirAbs, { withFileTypes: true });
|
|
24
|
+
for (const ent of entries) {
|
|
25
|
+
const abs = path.join(dirAbs, ent.name);
|
|
26
|
+
const rel = path.relative(rootDir, abs).split(path.sep).join("/");
|
|
27
|
+
if (ent.isDirectory()) {
|
|
28
|
+
if (ignoreDirs.has(ent.name)) continue;
|
|
29
|
+
out.push(...(await rec(abs)));
|
|
30
|
+
} else if (ent.isFile()) {
|
|
31
|
+
if (matchesGlobLike(ent.name, ignoreFiles)) continue;
|
|
32
|
+
out.push(rel);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return out;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return await rec(rootDir);
|
|
39
|
+
}
|
|
40
|
+
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
// Heuristic stack detection for arbitrary repos.
|
|
2
|
+
// Strategy: look for well-known manifest/config files and dependency names.
|
|
3
|
+
|
|
4
|
+
function hasFile(filesRel, p) {
|
|
5
|
+
return filesRel.includes(p);
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
function hasPrefix(filesRel, pref) {
|
|
9
|
+
return filesRel.some((f) => f.startsWith(pref));
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
function getJsonDeps(pkgJson) {
|
|
13
|
+
const deps = { ...(pkgJson?.dependencies ?? {}), ...(pkgJson?.devDependencies ?? {}) };
|
|
14
|
+
return new Set(Object.keys(deps));
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function addIf(arr, cond, label) {
|
|
18
|
+
if (cond) arr.push(label);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function detectStack({ filesRel, packageJson }) {
|
|
22
|
+
const out = [];
|
|
23
|
+
const deps = packageJson ? getJsonDeps(packageJson) : new Set();
|
|
24
|
+
|
|
25
|
+
// Containers & orchestration.
|
|
26
|
+
addIf(out, hasFile(filesRel, "docker-compose.yml") || hasFile(filesRel, "compose.yaml"), "Docker Compose");
|
|
27
|
+
addIf(out, hasPrefix(filesRel, "k8s/") || hasPrefix(filesRel, "helm/"), "Kubernetes");
|
|
28
|
+
|
|
29
|
+
// IaC.
|
|
30
|
+
addIf(out, hasPrefix(filesRel, "terraform/") || filesRel.some((f) => f.endsWith(".tf")), "Terraform");
|
|
31
|
+
addIf(out, hasFile(filesRel, "cdk.json") || deps.has("aws-cdk-lib"), "AWS CDK");
|
|
32
|
+
addIf(out, deps.has("serverless") || hasFile(filesRel, "serverless.yml"), "Serverless Framework");
|
|
33
|
+
|
|
34
|
+
// JS/TS ecosystem.
|
|
35
|
+
addIf(out, hasFile(filesRel, "package.json"), "Node.js");
|
|
36
|
+
addIf(out, hasFile(filesRel, "pnpm-lock.yaml"), "pnpm");
|
|
37
|
+
addIf(out, hasFile(filesRel, "yarn.lock"), "Yarn");
|
|
38
|
+
addIf(out, hasFile(filesRel, "package-lock.json"), "npm");
|
|
39
|
+
addIf(out, hasFile(filesRel, "tsconfig.json") || deps.has("typescript"), "TypeScript");
|
|
40
|
+
addIf(out, deps.has("next") || hasPrefix(filesRel, "apps/") && hasPrefix(filesRel, "apps/") && filesRel.some((f) => f.includes("next.config")), "Next.js");
|
|
41
|
+
addIf(out, deps.has("react") || filesRel.some((f) => f.endsWith(".tsx") || f.endsWith(".jsx")), "React");
|
|
42
|
+
addIf(out, deps.has("vite"), "Vite");
|
|
43
|
+
addIf(out, deps.has("express"), "Express");
|
|
44
|
+
addIf(out, deps.has("fastify"), "Fastify");
|
|
45
|
+
addIf(out, deps.has("nestjs/core") || deps.has("@nestjs/core"), "NestJS");
|
|
46
|
+
|
|
47
|
+
// Python.
|
|
48
|
+
addIf(out, hasFile(filesRel, "pyproject.toml") || hasFile(filesRel, "requirements.txt"), "Python");
|
|
49
|
+
addIf(out, filesRel.some((f) => f.includes("poetry.lock")), "Poetry");
|
|
50
|
+
|
|
51
|
+
// Go.
|
|
52
|
+
addIf(out, hasFile(filesRel, "go.mod"), "Go");
|
|
53
|
+
|
|
54
|
+
// Rust.
|
|
55
|
+
addIf(out, hasFile(filesRel, "Cargo.toml"), "Rust");
|
|
56
|
+
|
|
57
|
+
// Java/Kotlin.
|
|
58
|
+
addIf(out, hasFile(filesRel, "pom.xml"), "Maven (Java)");
|
|
59
|
+
addIf(out, hasFile(filesRel, "build.gradle") || hasFile(filesRel, "build.gradle.kts"), "Gradle (Java/Kotlin)");
|
|
60
|
+
|
|
61
|
+
// .NET.
|
|
62
|
+
addIf(out, filesRel.some((f) => f.endsWith(".csproj") || f.endsWith(".sln")), ".NET");
|
|
63
|
+
|
|
64
|
+
// Common clouds/services.
|
|
65
|
+
addIf(out, deps.has("@aws-sdk/client-s3") || deps.has("@aws-sdk/client-dynamodb") || deps.has("@aws-sdk/client-lambda"), "AWS SDK");
|
|
66
|
+
addIf(out, deps.has("@google-cloud/storage") || deps.has("@google-cloud/firestore"), "Google Cloud SDK");
|
|
67
|
+
addIf(out, deps.has("@azure/storage-blob"), "Azure SDK");
|
|
68
|
+
|
|
69
|
+
// API specs.
|
|
70
|
+
addIf(out, filesRel.some((f) => f.toLowerCase().includes("openapi") || f.endsWith("openapi.yaml") || f.endsWith("openapi.yml")), "OpenAPI");
|
|
71
|
+
|
|
72
|
+
return [...new Set(out)];
|
|
73
|
+
}
|
|
74
|
+
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
export function architectureTemplate({ rootName, stackList }) {
|
|
2
|
+
const stack = stackList.length ? stackList.map((s) => `- ${s}`).join("\n") : "- (detected at generation time)";
|
|
3
|
+
return `
|
|
4
|
+
# Architecture (${rootName})
|
|
5
|
+
|
|
6
|
+
This file is a **human-owned** summary of the system architecture.
|
|
7
|
+
|
|
8
|
+
## Stack (detected hints)
|
|
9
|
+
|
|
10
|
+
${stack}
|
|
11
|
+
|
|
12
|
+
## Key entry points
|
|
13
|
+
|
|
14
|
+
- **Entrypoints**: list runtime entrypoints (server startup, CLI, workers)
|
|
15
|
+
- **Core modules**: list the modules that hold the domain/business logic
|
|
16
|
+
- **Data stores**: list databases/queues/caches used
|
|
17
|
+
|
|
18
|
+
## Conventions
|
|
19
|
+
|
|
20
|
+
- Note which directories are **source of truth** vs generated output.
|
|
21
|
+
- Note how to run locally and how to deploy.
|
|
22
|
+
`.trimStart();
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function apiFlowTemplate({ rootName }) {
|
|
26
|
+
return `
|
|
27
|
+
# API flow (${rootName})
|
|
28
|
+
|
|
29
|
+
Describe request flow(s) relevant to this repo.
|
|
30
|
+
|
|
31
|
+
## Local flow
|
|
32
|
+
|
|
33
|
+
\`\`\`text
|
|
34
|
+
Client
|
|
35
|
+
↓
|
|
36
|
+
Local server / emulator
|
|
37
|
+
↓
|
|
38
|
+
Handlers / controllers
|
|
39
|
+
↓
|
|
40
|
+
DB / queues / external services
|
|
41
|
+
\`\`\`
|
|
42
|
+
|
|
43
|
+
## Deployed flow
|
|
44
|
+
|
|
45
|
+
\`\`\`text
|
|
46
|
+
Client
|
|
47
|
+
↓
|
|
48
|
+
Edge / gateway
|
|
49
|
+
↓
|
|
50
|
+
App services
|
|
51
|
+
↓
|
|
52
|
+
Data stores + integrations
|
|
53
|
+
\`\`\`
|
|
54
|
+
`.trimStart();
|
|
55
|
+
}
|
|
56
|
+
|
package/src/lib/tree.mjs
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
|
|
3
|
+
function extOf(relPath) {
|
|
4
|
+
return path.extname(relPath).toLowerCase();
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
function includeInTree(relPath, cfg) {
|
|
8
|
+
const base = path.basename(relPath).toLowerCase();
|
|
9
|
+
if (base === "dockerfile") return true;
|
|
10
|
+
const ext = extOf(relPath);
|
|
11
|
+
return (cfg.treeIncludeExtensions ?? []).includes(ext);
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function treeLines(filesRel, cfg) {
|
|
15
|
+
const files = filesRel.filter((f) => includeInTree(f, cfg)).sort();
|
|
16
|
+
const root = { dirs: new Map(), files: [] };
|
|
17
|
+
|
|
18
|
+
for (const f of files) {
|
|
19
|
+
const parts = f.split("/").filter(Boolean);
|
|
20
|
+
let cur = root;
|
|
21
|
+
for (let i = 0; i < parts.length; i++) {
|
|
22
|
+
const part = parts[i];
|
|
23
|
+
const isLeaf = i === parts.length - 1;
|
|
24
|
+
if (isLeaf) cur.files.push(part);
|
|
25
|
+
else {
|
|
26
|
+
if (!cur.dirs.has(part)) cur.dirs.set(part, { dirs: new Map(), files: [] });
|
|
27
|
+
cur = cur.dirs.get(part);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const maxDepth = cfg.maxTreeDepth ?? 6;
|
|
33
|
+
const maxFilesPerDir = cfg.maxFilesPerDir ?? 120;
|
|
34
|
+
const lines = [];
|
|
35
|
+
|
|
36
|
+
function rec(node, prefix, depth) {
|
|
37
|
+
if (depth > maxDepth) return;
|
|
38
|
+
const dirNames = [...node.dirs.keys()].sort();
|
|
39
|
+
const fileNames = [...node.files].sort();
|
|
40
|
+
|
|
41
|
+
const total = dirNames.length + fileNames.length;
|
|
42
|
+
let shown = 0;
|
|
43
|
+
|
|
44
|
+
for (const d of dirNames) {
|
|
45
|
+
shown++;
|
|
46
|
+
const isLast = shown === Math.min(total, maxFilesPerDir);
|
|
47
|
+
lines.push(`${prefix}${isLast ? "└── " : "├── "}${d}/`);
|
|
48
|
+
rec(node.dirs.get(d), `${prefix}${isLast ? " " : "│ "}`, depth + 1);
|
|
49
|
+
if (shown >= maxFilesPerDir) break;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (shown < maxFilesPerDir) {
|
|
53
|
+
for (const f of fileNames) {
|
|
54
|
+
shown++;
|
|
55
|
+
const isLast = shown === Math.min(total, maxFilesPerDir);
|
|
56
|
+
lines.push(`${prefix}${isLast ? "└── " : "├── "}${f}`);
|
|
57
|
+
if (shown >= maxFilesPerDir) break;
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (total > maxFilesPerDir) {
|
|
62
|
+
lines.push(`${prefix}└── … (${total - maxFilesPerDir} more items)`);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
rec(root, "", 1);
|
|
67
|
+
return lines;
|
|
68
|
+
}
|
|
69
|
+
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import { spawnSync } from "node:child_process";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
|
|
4
|
+
function ext(rel) {
|
|
5
|
+
return path.extname(rel).toLowerCase();
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export function hasTreeSitter() {
|
|
9
|
+
const r = spawnSync("tree-sitter", ["--version"], { encoding: "utf8" });
|
|
10
|
+
return r.status === 0;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function shouldSymbolScan(rel, cfg) {
|
|
14
|
+
return (cfg.symbolExtensions ?? []).includes(ext(rel));
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function runTags(rootDir, fileRel) {
|
|
18
|
+
const r = spawnSync("tree-sitter", ["tags", fileRel], {
|
|
19
|
+
cwd: rootDir,
|
|
20
|
+
encoding: "utf8",
|
|
21
|
+
maxBuffer: 20 * 1024 * 1024
|
|
22
|
+
});
|
|
23
|
+
if (r.status !== 0) return { ok: false, stdout: r.stdout ?? "", stderr: r.stderr ?? "" };
|
|
24
|
+
return { ok: true, stdout: r.stdout ?? "" };
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function parseCtags(stdout) {
|
|
28
|
+
const symbols = [];
|
|
29
|
+
const lines = stdout.split(/\r?\n/).filter(Boolean);
|
|
30
|
+
for (const line of lines) {
|
|
31
|
+
const parts = line.split("\t");
|
|
32
|
+
if (parts.length < 4) continue;
|
|
33
|
+
const [name, file, exCmd, kind, ...rest] = parts;
|
|
34
|
+
let lineNo = null;
|
|
35
|
+
const lineField = rest.find((r) => r.startsWith("line:"));
|
|
36
|
+
if (lineField) {
|
|
37
|
+
const n = Number(lineField.slice("line:".length));
|
|
38
|
+
if (Number.isFinite(n)) lineNo = n;
|
|
39
|
+
} else {
|
|
40
|
+
const m = exCmd.match(/(\d+)/);
|
|
41
|
+
if (m) {
|
|
42
|
+
const n = Number(m[1]);
|
|
43
|
+
if (Number.isFinite(n)) lineNo = n;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
symbols.push({ name, file, kind, line: lineNo });
|
|
47
|
+
}
|
|
48
|
+
return symbols;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
function groupByFile(symbols) {
|
|
52
|
+
const byFile = new Map();
|
|
53
|
+
for (const s of symbols) {
|
|
54
|
+
if (!byFile.has(s.file)) byFile.set(s.file, []);
|
|
55
|
+
byFile.get(s.file).push(s);
|
|
56
|
+
}
|
|
57
|
+
for (const [file, list] of byFile) {
|
|
58
|
+
list.sort((a, b) => `${a.kind}:${a.name}`.localeCompare(`${b.kind}:${b.name}`));
|
|
59
|
+
byFile.set(file, list);
|
|
60
|
+
}
|
|
61
|
+
return [...byFile.entries()].sort((a, b) => a[0].localeCompare(b[0]));
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export function extractSymbols({ rootDir, filesRel, cfg }) {
|
|
65
|
+
if (!hasTreeSitter()) return { available: false, failures: 0, grouped: [] };
|
|
66
|
+
const symbols = [];
|
|
67
|
+
let failures = 0;
|
|
68
|
+
|
|
69
|
+
for (const f of filesRel) {
|
|
70
|
+
if (!shouldSymbolScan(f, cfg)) continue;
|
|
71
|
+
const r = runTags(rootDir, f);
|
|
72
|
+
if (!r.ok) {
|
|
73
|
+
failures++;
|
|
74
|
+
continue;
|
|
75
|
+
}
|
|
76
|
+
symbols.push(...parseCtags(r.stdout));
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return { available: true, failures, grouped: groupByFile(symbols) };
|
|
80
|
+
}
|
|
81
|
+
|