docs-to-agent 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +19 -0
- package/README.md +149 -0
- package/dist/_chunks/content.mjs +181 -0
- package/dist/bin/cli.d.mts +1 -0
- package/dist/bin/cli.mjs +53 -0
- package/dist/index.d.mts +51 -0
- package/dist/index.mjs +2 -0
- package/package.json +57 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
4
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
5
|
+
in the Software without restriction, including without limitation the rights
|
|
6
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
7
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
8
|
+
furnished to do so, subject to the following conditions:
|
|
9
|
+
|
|
10
|
+
The above copyright notice and this permission notice shall be included in all
|
|
11
|
+
copies or substantial portions of the Software.
|
|
12
|
+
|
|
13
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
14
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
15
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
16
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
17
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
18
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
19
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
# docs-to-agent
|
|
2
|
+
|
|
3
|
+
<!-- automd:badges license color=yellow -->
|
|
4
|
+
|
|
5
|
+
[](https://npmjs.com/package/docs-to-agent)
|
|
6
|
+
[](https://npm.chart.dev/docs-to-agent)
|
|
7
|
+
[](https://github.com/angelorc/docs-to-agent/blob/main/LICENSE)
|
|
8
|
+
|
|
9
|
+
<!-- /automd -->
|
|
10
|
+
|
|
11
|
+
Download docs from any GitHub repo and generate a compact index for AI coding agents.
|
|
12
|
+
|
|
13
|
+
Inspired by the Next.js team's `agents-md` approach ([PR #88961](https://github.com/vercel/next.js/pull/88961)) which scored 100% on agent evals vs 79% for skill-based approaches.
|
|
14
|
+
|
|
15
|
+
- **PR**: [vercel/next.js#88961](https://github.com/vercel/next.js/pull/88961) - the original implementation
|
|
16
|
+
- **Article**: [`AGENTS.md` outperforms skills in our agent evals](https://vercel.com/blog/agents-md-outperforms-skills-in-our-agent-evals) - explains the eval methodology and why retrieval beats pre-training
|
|
17
|
+
|
|
18
|
+
**This tool extracts that approach into a standalone CLI that works with any GitHub repo's docs** - not just Next.js.
|
|
19
|
+
|
|
20
|
+
## Usage
|
|
21
|
+
|
|
22
|
+
<!-- automd:pm-x args="https://github.com/nuxt/nuxt/tree/main/docs" -->
|
|
23
|
+
|
|
24
|
+
```sh
|
|
25
|
+
# npm
|
|
26
|
+
npx docs-to-agent https://github.com/nuxt/nuxt/tree/main/docs
|
|
27
|
+
|
|
28
|
+
# pnpm
|
|
29
|
+
pnpm dlx docs-to-agent https://github.com/nuxt/nuxt/tree/main/docs
|
|
30
|
+
|
|
31
|
+
# bun
|
|
32
|
+
bunx docs-to-agent https://github.com/nuxt/nuxt/tree/main/docs
|
|
33
|
+
|
|
34
|
+
# deno
|
|
35
|
+
deno run -A npm:docs-to-agent https://github.com/nuxt/nuxt/tree/main/docs
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
<!-- /automd -->
|
|
39
|
+
|
|
40
|
+
This will:
|
|
41
|
+
|
|
42
|
+
1. Sparse-checkout only the docs folder
|
|
43
|
+
2. Store it under `.docs-to-agent/nuxt-nuxt/`
|
|
44
|
+
3. Generate a compact index in `AGENTS.md`
|
|
45
|
+
4. Add `.docs-to-agent/` to `.gitignore`
|
|
46
|
+
|
|
47
|
+
### Multiple repos
|
|
48
|
+
|
|
49
|
+
Run it multiple times — each repo gets its own namespaced folder and keyed block in `AGENTS.md`:
|
|
50
|
+
|
|
51
|
+
```bash
|
|
52
|
+
npx docs-to-agent https://github.com/nuxt/nuxt/tree/main/docs
|
|
53
|
+
npx docs-to-agent https://github.com/drizzle-team/drizzle-orm/tree/main/docs
|
|
54
|
+
npx docs-to-agent https://github.com/shadcn-ui/ui/tree/main/apps/v4/content/docs
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
```
|
|
58
|
+
.docs-to-agent/
|
|
59
|
+
├── nuxt-nuxt/
|
|
60
|
+
├── drizzle-team-drizzle-orm/
|
|
61
|
+
└── shadcn-ui-ui/
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
### Options
|
|
65
|
+
|
|
66
|
+
```
|
|
67
|
+
-o, --output <file> Target file (default: AGENTS.md)
|
|
68
|
+
--name <name> Project name override (default: repo name)
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
## Install
|
|
72
|
+
|
|
73
|
+
<!-- automd:pm-install -->
|
|
74
|
+
|
|
75
|
+
```sh
|
|
76
|
+
# ✨ Auto-detect
|
|
77
|
+
npx nypm install docs-to-agent
|
|
78
|
+
|
|
79
|
+
# npm
|
|
80
|
+
npm install docs-to-agent
|
|
81
|
+
|
|
82
|
+
# yarn
|
|
83
|
+
yarn add docs-to-agent
|
|
84
|
+
|
|
85
|
+
# pnpm
|
|
86
|
+
pnpm add docs-to-agent
|
|
87
|
+
|
|
88
|
+
# bun
|
|
89
|
+
bun install docs-to-agent
|
|
90
|
+
|
|
91
|
+
# deno
|
|
92
|
+
deno install npm:docs-to-agent
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
<!-- /automd -->
|
|
96
|
+
|
|
97
|
+
## Programmatic Usage
|
|
98
|
+
|
|
99
|
+
<!-- automd:jsimport src="./src/index.ts" -->
|
|
100
|
+
|
|
101
|
+
**ESM** (Node.js, Bun, Deno)
|
|
102
|
+
|
|
103
|
+
```js
|
|
104
|
+
import {
|
|
105
|
+
DOCS_BASE_DIR,
|
|
106
|
+
cloneDocsFolder,
|
|
107
|
+
collectDocFiles,
|
|
108
|
+
ensureGitignoreEntry,
|
|
109
|
+
parseGitHubUrl,
|
|
110
|
+
pullDocs,
|
|
111
|
+
repoKey,
|
|
112
|
+
buildDocTree,
|
|
113
|
+
generateIndex,
|
|
114
|
+
injectIntoFile,
|
|
115
|
+
} from "docs-to-agent";
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
<!-- /automd -->
|
|
119
|
+
|
|
120
|
+
## Development
|
|
121
|
+
|
|
122
|
+
```bash
|
|
123
|
+
pnpm install
|
|
124
|
+
pnpm run build # obuild
|
|
125
|
+
pnpm run lint # oxlint + oxfmt --check
|
|
126
|
+
pnpm run test # vitest
|
|
127
|
+
pnpm run typecheck # tsc --noEmit
|
|
128
|
+
```
|
|
129
|
+
|
|
130
|
+
## License
|
|
131
|
+
|
|
132
|
+
<!-- automd:contributors license=MIT author="angelorc" -->
|
|
133
|
+
|
|
134
|
+
Published under the [MIT](https://github.com/angelorc/docs-to-agent/blob/main/LICENSE) license.
|
|
135
|
+
Made by [@angelorc](https://github.com/angelorc) and [community](https://github.com/angelorc/docs-to-agent/graphs/contributors) 💛
|
|
136
|
+
<br><br>
|
|
137
|
+
<a href="https://github.com/angelorc/docs-to-agent/graphs/contributors">
|
|
138
|
+
<img src="https://contrib.rocks/image?repo=angelorc/docs-to-agent" />
|
|
139
|
+
</a>
|
|
140
|
+
|
|
141
|
+
<!-- /automd -->
|
|
142
|
+
|
|
143
|
+
<!-- automd:with-automd -->
|
|
144
|
+
|
|
145
|
+
---
|
|
146
|
+
|
|
147
|
+
_🤖 auto updated with [automd](https://automd.unjs.io)_
|
|
148
|
+
|
|
149
|
+
<!-- /automd -->
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
import { execSync } from "node:child_process";
|
|
2
|
+
import { appendFileSync, copyFileSync, existsSync, mkdirSync, readFileSync, readdirSync, rmSync, writeFileSync } from "node:fs";
|
|
3
|
+
import { join, relative } from "node:path";
|
|
4
|
+
const DOCS_BASE_DIR = ".docs-to-agent";
|
|
5
|
+
function repoKey(owner, repo) {
|
|
6
|
+
return `${owner}-${repo}`;
|
|
7
|
+
}
|
|
8
|
+
function parseGitHubUrl(url) {
|
|
9
|
+
const treeMatch = url.match(/^https?:\/\/github\.com\/([^/]+)\/([^/]+)\/tree\/([^/]+)\/(.+?)\/?\s*$/);
|
|
10
|
+
if (treeMatch) return {
|
|
11
|
+
owner: treeMatch[1],
|
|
12
|
+
repo: treeMatch[2],
|
|
13
|
+
branch: treeMatch[3],
|
|
14
|
+
docsPath: treeMatch[4]
|
|
15
|
+
};
|
|
16
|
+
const repoMatch = url.match(/^https?:\/\/github\.com\/([^/]+)\/([^/]+?)(\.git)?\/?$/);
|
|
17
|
+
if (repoMatch) throw new Error(`URL must include a docs path. Example: https://github.com/${repoMatch[1]}/${repoMatch[2]}/tree/main/docs`);
|
|
18
|
+
throw new Error("Invalid GitHub URL. Expected format: https://github.com/owner/repo/tree/branch/docs-path");
|
|
19
|
+
}
|
|
20
|
+
function copyDirRecursive(src, dest) {
|
|
21
|
+
mkdirSync(dest, { recursive: true });
|
|
22
|
+
const entries = readdirSync(src, { withFileTypes: true });
|
|
23
|
+
for (const entry of entries) {
|
|
24
|
+
const srcPath = join(src, entry.name);
|
|
25
|
+
const destPath = join(dest, entry.name);
|
|
26
|
+
if (entry.isDirectory()) copyDirRecursive(srcPath, destPath);
|
|
27
|
+
else copyFileSync(srcPath, destPath);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
function cloneDocsFolder(repoUrl, branch, docsPath, destDir) {
|
|
31
|
+
const tmpDir = destDir + "-tmp";
|
|
32
|
+
if (existsSync(tmpDir)) rmSync(tmpDir, {
|
|
33
|
+
recursive: true,
|
|
34
|
+
force: true
|
|
35
|
+
});
|
|
36
|
+
mkdirSync(tmpDir, { recursive: true });
|
|
37
|
+
try {
|
|
38
|
+
execSync(`git clone --depth 1 --filter=blob:none --sparse --branch ${branch} ${repoUrl} .`, {
|
|
39
|
+
cwd: tmpDir,
|
|
40
|
+
stdio: "pipe"
|
|
41
|
+
});
|
|
42
|
+
execSync(`git sparse-checkout set ${docsPath}`, {
|
|
43
|
+
cwd: tmpDir,
|
|
44
|
+
stdio: "pipe"
|
|
45
|
+
});
|
|
46
|
+
const srcDir = join(tmpDir, docsPath);
|
|
47
|
+
if (!existsSync(srcDir)) throw new Error(`Docs path "${docsPath}" not found in repo. Check the URL path.`);
|
|
48
|
+
if (existsSync(destDir)) rmSync(destDir, {
|
|
49
|
+
recursive: true,
|
|
50
|
+
force: true
|
|
51
|
+
});
|
|
52
|
+
copyDirRecursive(srcDir, destDir);
|
|
53
|
+
} finally {
|
|
54
|
+
rmSync(tmpDir, {
|
|
55
|
+
recursive: true,
|
|
56
|
+
force: true
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
function pullDocs(options) {
|
|
61
|
+
const repoUrl = `https://github.com/${options.owner}/${options.repo}.git`;
|
|
62
|
+
const key = repoKey(options.owner, options.repo);
|
|
63
|
+
const localDocsDir = join(options.cwd, DOCS_BASE_DIR, key);
|
|
64
|
+
cloneDocsFolder(repoUrl, options.branch, options.docsPath, localDocsDir);
|
|
65
|
+
const files = collectDocFiles(localDocsDir);
|
|
66
|
+
return {
|
|
67
|
+
localDocsDir: `${DOCS_BASE_DIR}/${key}`,
|
|
68
|
+
fileCount: files.length
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
function collectDocFiles(dir) {
|
|
72
|
+
const files = [];
|
|
73
|
+
function walk(currentDir) {
|
|
74
|
+
if (!existsSync(currentDir)) return;
|
|
75
|
+
const entries = readdirSync(currentDir, { withFileTypes: true });
|
|
76
|
+
for (const entry of entries) {
|
|
77
|
+
const fullPath = join(currentDir, entry.name);
|
|
78
|
+
if (entry.isDirectory()) walk(fullPath);
|
|
79
|
+
else if (/\.(md|mdx)$/i.test(entry.name)) {
|
|
80
|
+
if (/^index\.(md|mdx)$/i.test(entry.name)) continue;
|
|
81
|
+
files.push({ relativePath: relative(dir, fullPath) });
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
walk(dir);
|
|
86
|
+
return files.sort((a, b) => a.relativePath.localeCompare(b.relativePath));
|
|
87
|
+
}
|
|
88
|
+
function ensureGitignoreEntry(cwd, dirName) {
|
|
89
|
+
const gitignorePath = join(cwd, ".gitignore");
|
|
90
|
+
const entry = `${dirName}/`;
|
|
91
|
+
if (existsSync(gitignorePath)) {
|
|
92
|
+
const content = readFileSync(gitignorePath, "utf-8");
|
|
93
|
+
if (content.split("\n").some((line) => line.trim() === entry || line.trim() === dirName)) return {
|
|
94
|
+
path: gitignorePath,
|
|
95
|
+
updated: false,
|
|
96
|
+
alreadyPresent: true
|
|
97
|
+
};
|
|
98
|
+
appendFileSync(gitignorePath, `${content.endsWith("\n") ? "" : "\n"}${entry}\n`);
|
|
99
|
+
return {
|
|
100
|
+
path: gitignorePath,
|
|
101
|
+
updated: true,
|
|
102
|
+
alreadyPresent: false
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
writeFileSync(gitignorePath, `${entry}\n`);
|
|
106
|
+
return {
|
|
107
|
+
path: gitignorePath,
|
|
108
|
+
updated: true,
|
|
109
|
+
alreadyPresent: false
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
function buildDocTree(files) {
|
|
113
|
+
const sectionMap = /* @__PURE__ */ new Map();
|
|
114
|
+
for (const file of files) {
|
|
115
|
+
const parts = file.relativePath.split("/");
|
|
116
|
+
if (parts.length < 2) continue;
|
|
117
|
+
const topDir = parts[0];
|
|
118
|
+
if (!sectionMap.has(topDir)) sectionMap.set(topDir, {
|
|
119
|
+
files: [],
|
|
120
|
+
children: /* @__PURE__ */ new Map()
|
|
121
|
+
});
|
|
122
|
+
const section = sectionMap.get(topDir);
|
|
123
|
+
if (parts.length === 2) section.files.push(parts[1]);
|
|
124
|
+
else {
|
|
125
|
+
const subDir = parts.slice(1, -1).join("/");
|
|
126
|
+
if (!section.children.has(subDir)) section.children.set(subDir, []);
|
|
127
|
+
section.children.get(subDir).push(file);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
const sections = [];
|
|
131
|
+
for (const [name, data] of sectionMap) {
|
|
132
|
+
const subsections = [];
|
|
133
|
+
for (const [subName, subFiles] of data.children) subsections.push({
|
|
134
|
+
name: subName,
|
|
135
|
+
files: subFiles.map((f) => {
|
|
136
|
+
const parts = f.relativePath.split("/");
|
|
137
|
+
return parts[parts.length - 1];
|
|
138
|
+
}),
|
|
139
|
+
subsections: []
|
|
140
|
+
});
|
|
141
|
+
subsections.sort((a, b) => a.name.localeCompare(b.name));
|
|
142
|
+
sections.push({
|
|
143
|
+
name,
|
|
144
|
+
files: [...data.files].sort(),
|
|
145
|
+
subsections
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
sections.sort((a, b) => a.name.localeCompare(b.name));
|
|
149
|
+
return sections;
|
|
150
|
+
}
|
|
151
|
+
function generateIndex(data) {
|
|
152
|
+
const parts = [];
|
|
153
|
+
parts.push(`[${data.name} Docs Index]|root: ./${data.docsDir}|IMPORTANT: Prefer retrieval-led reasoning over pre-training-led reasoning for any ${data.name} tasks.`);
|
|
154
|
+
for (const section of data.sections) {
|
|
155
|
+
const sectionPrefix = section.name;
|
|
156
|
+
if (section.files.length > 0) parts.push(`${sectionPrefix}:{${section.files.join(",")}}`);
|
|
157
|
+
for (const sub of section.subsections) {
|
|
158
|
+
const subPrefix = `${sectionPrefix}/${sub.name}`;
|
|
159
|
+
if (sub.files.length > 0) parts.push(`${subPrefix}:{${sub.files.join(",")}}`);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
return parts.join("|");
|
|
163
|
+
}
|
|
164
|
+
function markerStart(key) {
|
|
165
|
+
return `<!-- DOCS-TO-AGENT:${key}-START -->`;
|
|
166
|
+
}
|
|
167
|
+
function markerEnd(key) {
|
|
168
|
+
return `<!-- DOCS-TO-AGENT:${key}-END -->`;
|
|
169
|
+
}
|
|
170
|
+
function injectIntoFile(content, indexContent, key) {
|
|
171
|
+
const start = markerStart(key);
|
|
172
|
+
const end = markerEnd(key);
|
|
173
|
+
const block = `${start}\n${indexContent}\n${end}`;
|
|
174
|
+
const startIdx = content.indexOf(start);
|
|
175
|
+
const endIdx = content.indexOf(end);
|
|
176
|
+
if (startIdx !== -1 && endIdx !== -1) return content.slice(0, startIdx) + block + content.slice(endIdx + end.length);
|
|
177
|
+
if (content.length > 0 && !content.endsWith("\n")) return content + "\n\n" + block + "\n";
|
|
178
|
+
if (content.length > 0) return content + "\n" + block + "\n";
|
|
179
|
+
return block + "\n";
|
|
180
|
+
}
|
|
181
|
+
export { cloneDocsFolder as a, parseGitHubUrl as c, DOCS_BASE_DIR as i, pullDocs as l, generateIndex as n, collectDocFiles as o, injectIntoFile as r, ensureGitignoreEntry as s, buildDocTree as t, repoKey as u };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { };
|
package/dist/bin/cli.mjs
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { c as parseGitHubUrl, i as DOCS_BASE_DIR, l as pullDocs, n as generateIndex, o as collectDocFiles, r as injectIntoFile, s as ensureGitignoreEntry, t as buildDocTree, u as repoKey } from "../_chunks/content.mjs";
|
|
3
|
+
import "../index.mjs";
|
|
4
|
+
import { existsSync, readFileSync, writeFileSync } from "node:fs";
|
|
5
|
+
import { join, resolve } from "node:path";
|
|
6
|
+
import { Command } from "commander";
|
|
7
|
+
import pc from "picocolors";
|
|
8
|
+
const program = new Command();
|
|
9
|
+
program.name("docs-to-agent").description("Download docs from a GitHub repo and generate a compact index for AI coding agents").argument("<github-url>", "GitHub URL with docs path (e.g. https://github.com/nuxt/nuxt/tree/main/docs)").option("-o, --output <file>", "Target file", "AGENTS.md").option("--name <name>", "Project name override (defaults to repo name)").action(async (url, opts) => {
|
|
10
|
+
try {
|
|
11
|
+
console.log(pc.cyan("Parsing GitHub URL..."));
|
|
12
|
+
const parsed = parseGitHubUrl(url);
|
|
13
|
+
const projectName = opts.name || parsed.repo;
|
|
14
|
+
const key = repoKey(parsed.owner, parsed.repo);
|
|
15
|
+
console.log(pc.dim(` repo: ${parsed.owner}/${parsed.repo}, branch: ${parsed.branch}, path: ${parsed.docsPath}`));
|
|
16
|
+
const cwd = process.cwd();
|
|
17
|
+
console.log(pc.cyan("Downloading documentation..."));
|
|
18
|
+
const result = pullDocs({
|
|
19
|
+
owner: parsed.owner,
|
|
20
|
+
repo: parsed.repo,
|
|
21
|
+
branch: parsed.branch,
|
|
22
|
+
docsPath: parsed.docsPath,
|
|
23
|
+
cwd
|
|
24
|
+
});
|
|
25
|
+
console.log(pc.green(` Downloaded ${result.fileCount} doc files → ${result.localDocsDir}/`));
|
|
26
|
+
const files = collectDocFiles(join(cwd, result.localDocsDir));
|
|
27
|
+
if (files.length === 0) {
|
|
28
|
+
console.log(pc.yellow("No .md/.mdx files found in docs folder."));
|
|
29
|
+
process.exit(1);
|
|
30
|
+
}
|
|
31
|
+
const sections = buildDocTree(files);
|
|
32
|
+
const indexContent = generateIndex({
|
|
33
|
+
name: projectName,
|
|
34
|
+
docsDir: result.localDocsDir,
|
|
35
|
+
sections
|
|
36
|
+
});
|
|
37
|
+
const outputPath = resolve(cwd, opts.output);
|
|
38
|
+
let existingContent = "";
|
|
39
|
+
if (existsSync(outputPath)) existingContent = readFileSync(outputPath, "utf-8");
|
|
40
|
+
writeFileSync(outputPath, injectIntoFile(existingContent, indexContent, key));
|
|
41
|
+
console.log(pc.green(` Updated ${opts.output}`));
|
|
42
|
+
if (ensureGitignoreEntry(cwd, DOCS_BASE_DIR).updated) console.log(pc.dim(` Added ${DOCS_BASE_DIR}/ to .gitignore`));
|
|
43
|
+
console.log();
|
|
44
|
+
console.log(pc.green(pc.bold("Done!")));
|
|
45
|
+
console.log(pc.dim(` ${files.length} docs indexed → ${opts.output} [${key}]`));
|
|
46
|
+
} catch (err) {
|
|
47
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
48
|
+
console.error(pc.red(`Error: ${msg}`));
|
|
49
|
+
process.exit(1);
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
program.parse();
|
|
53
|
+
export {};
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
//#region src/types.d.ts
|
|
2
|
+
interface GitHubUrl {
|
|
3
|
+
owner: string;
|
|
4
|
+
repo: string;
|
|
5
|
+
branch: string;
|
|
6
|
+
docsPath: string;
|
|
7
|
+
}
|
|
8
|
+
interface DocFile {
|
|
9
|
+
relativePath: string;
|
|
10
|
+
}
|
|
11
|
+
interface DocSection {
|
|
12
|
+
name: string;
|
|
13
|
+
files: string[];
|
|
14
|
+
subsections: DocSection[];
|
|
15
|
+
}
|
|
16
|
+
interface PullDocsOptions {
|
|
17
|
+
owner: string;
|
|
18
|
+
repo: string;
|
|
19
|
+
branch: string;
|
|
20
|
+
docsPath: string;
|
|
21
|
+
cwd: string;
|
|
22
|
+
}
|
|
23
|
+
interface PullDocsResult {
|
|
24
|
+
localDocsDir: string;
|
|
25
|
+
fileCount: number;
|
|
26
|
+
}
|
|
27
|
+
interface IndexData {
|
|
28
|
+
name: string;
|
|
29
|
+
docsDir: string;
|
|
30
|
+
sections: DocSection[];
|
|
31
|
+
}
|
|
32
|
+
//#endregion
|
|
33
|
+
//#region src/utils.d.ts
|
|
34
|
+
declare const DOCS_BASE_DIR = ".docs-to-agent";
|
|
35
|
+
declare function repoKey(owner: string, repo: string): string;
|
|
36
|
+
declare function parseGitHubUrl(url: string): GitHubUrl;
|
|
37
|
+
declare function cloneDocsFolder(repoUrl: string, branch: string, docsPath: string, destDir: string): void;
|
|
38
|
+
declare function pullDocs(options: PullDocsOptions): PullDocsResult;
|
|
39
|
+
declare function collectDocFiles(dir: string): DocFile[];
|
|
40
|
+
declare function ensureGitignoreEntry(cwd: string, dirName: string): {
|
|
41
|
+
path: string;
|
|
42
|
+
updated: boolean;
|
|
43
|
+
alreadyPresent: boolean;
|
|
44
|
+
};
|
|
45
|
+
//#endregion
|
|
46
|
+
//#region src/content.d.ts
|
|
47
|
+
declare function buildDocTree(files: DocFile[]): DocSection[];
|
|
48
|
+
declare function generateIndex(data: IndexData): string;
|
|
49
|
+
declare function injectIntoFile(content: string, indexContent: string, key: string): string;
|
|
50
|
+
//#endregion
|
|
51
|
+
export { DOCS_BASE_DIR, type DocFile, type DocSection, type GitHubUrl, type IndexData, type PullDocsOptions, type PullDocsResult, buildDocTree, cloneDocsFolder, collectDocFiles, ensureGitignoreEntry, generateIndex, injectIntoFile, parseGitHubUrl, pullDocs, repoKey };
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import { a as cloneDocsFolder, c as parseGitHubUrl, i as DOCS_BASE_DIR, l as pullDocs, n as generateIndex, o as collectDocFiles, r as injectIntoFile, s as ensureGitignoreEntry, t as buildDocTree, u as repoKey } from "./_chunks/content.mjs";
|
|
2
|
+
export { DOCS_BASE_DIR, buildDocTree, cloneDocsFolder, collectDocFiles, ensureGitignoreEntry, generateIndex, injectIntoFile, parseGitHubUrl, pullDocs, repoKey };
|
package/package.json
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "docs-to-agent",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Download docs from any GitHub repo and generate a compact index for AI coding agents",
|
|
5
|
+
"homepage": "https://github.com/angelorc/docs-to-agent",
|
|
6
|
+
"bugs": "https://github.com/angelorc/docs-to-agent/issues",
|
|
7
|
+
"license": "MIT",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "https://github.com/angelorc/docs-to-agent.git"
|
|
11
|
+
},
|
|
12
|
+
"bin": {
|
|
13
|
+
"docs-to-agent": "./dist/bin/cli.mjs"
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"dist"
|
|
17
|
+
],
|
|
18
|
+
"type": "module",
|
|
19
|
+
"main": "./dist/index.mjs",
|
|
20
|
+
"types": "./dist/index.d.mts",
|
|
21
|
+
"exports": {
|
|
22
|
+
".": {
|
|
23
|
+
"types": "./dist/index.d.mts",
|
|
24
|
+
"import": "./dist/index.mjs"
|
|
25
|
+
}
|
|
26
|
+
},
|
|
27
|
+
"dependencies": {
|
|
28
|
+
"commander": "^14.0.3",
|
|
29
|
+
"picocolors": "^1.1.1"
|
|
30
|
+
},
|
|
31
|
+
"devDependencies": {
|
|
32
|
+
"@changesets/cli": "^2.29.4",
|
|
33
|
+
"@types/node": "^25.2.3",
|
|
34
|
+
"automd": "^0.4.3",
|
|
35
|
+
"obuild": "^0.4.27",
|
|
36
|
+
"oxfmt": "^0.31.0",
|
|
37
|
+
"oxlint": "^1.46.0",
|
|
38
|
+
"typescript": "^5.7.3",
|
|
39
|
+
"vitest": "^4.0.18"
|
|
40
|
+
},
|
|
41
|
+
"engines": {
|
|
42
|
+
"node": ">=20"
|
|
43
|
+
},
|
|
44
|
+
"scripts": {
|
|
45
|
+
"build": "obuild",
|
|
46
|
+
"dev": "obuild --stub",
|
|
47
|
+
"lint": "oxlint . && oxfmt --check .",
|
|
48
|
+
"lint:fix": "automd && oxlint . --fix && oxfmt .",
|
|
49
|
+
"fmt": "oxfmt .",
|
|
50
|
+
"fmt:check": "oxfmt --check .",
|
|
51
|
+
"test": "vitest run",
|
|
52
|
+
"typecheck": "tsc --noEmit",
|
|
53
|
+
"changeset": "changeset",
|
|
54
|
+
"version": "changeset version",
|
|
55
|
+
"release": "changeset version && git add . && git commit -m 'chore(release): version packages' && changeset publish && git push --follow-tags"
|
|
56
|
+
}
|
|
57
|
+
}
|