docs-ready 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,226 @@
1
+ #!/usr/bin/env node
2
+ import {
3
+ log,
4
+ spinner
5
+ } from "./chunk-7YN54Y4Y.js";
6
+
7
+ // src/cli/index.ts
8
+ import { Command } from "commander";
9
+ import { readFile } from "fs/promises";
10
+ import { fileURLToPath } from "url";
11
+ import path3 from "path";
12
+
13
+ // src/cli/commands/init.ts
14
+ import fs2 from "fs/promises";
15
+ import path2 from "path";
16
+ import { createInterface } from "readline/promises";
17
+ import { stdin, stdout } from "process";
18
+
19
+ // src/frameworks/detector.ts
20
+ import fs from "fs/promises";
21
+ import path from "path";
22
+ var DETECTORS = [
23
+ {
24
+ name: "docusaurus",
25
+ files: [
26
+ "docusaurus.config.ts",
27
+ "docusaurus.config.js",
28
+ "docusaurus.config.mts",
29
+ "docusaurus.config.mjs"
30
+ ],
31
+ docsDir: "./docs"
32
+ },
33
+ {
34
+ name: "vitepress",
35
+ files: [
36
+ ".vitepress/config.ts",
37
+ ".vitepress/config.js",
38
+ ".vitepress/config.mts",
39
+ ".vitepress/config.mjs"
40
+ ],
41
+ docsDir: "./docs"
42
+ },
43
+ {
44
+ name: "mkdocs",
45
+ files: ["mkdocs.yml", "mkdocs.yaml"],
46
+ docsDir: "./docs"
47
+ },
48
+ {
49
+ name: "starlight",
50
+ files: ["astro.config.mjs", "astro.config.ts", "astro.config.js"],
51
+ docsDir: "./src/content/docs"
52
+ }
53
+ ];
54
+ async function detectFramework(projectDir) {
55
+ for (const detector of DETECTORS) {
56
+ for (const file of detector.files) {
57
+ const fullPath = path.join(projectDir, file);
58
+ try {
59
+ await fs.access(fullPath);
60
+ if (detector.name === "starlight") {
61
+ const content = await fs.readFile(fullPath, "utf-8");
62
+ if (!content.includes("starlight")) {
63
+ continue;
64
+ }
65
+ }
66
+ return {
67
+ name: detector.name,
68
+ configFile: fullPath,
69
+ docsDir: detector.docsDir
70
+ };
71
+ } catch {
72
+ }
73
+ }
74
+ }
75
+ return {
76
+ name: "generic",
77
+ configFile: null,
78
+ docsDir: "./docs"
79
+ };
80
+ }
81
+
82
+ // src/cli/commands/init.ts
83
+ async function initCommand(options = {}) {
84
+ const cwd = options.cwd ?? process.cwd();
85
+ const configPath = path2.join(cwd, ".docs-ready.yaml");
86
+ try {
87
+ await fs2.access(configPath);
88
+ log.warn(".docs-ready.yaml already exists. Use --force to overwrite.");
89
+ return;
90
+ } catch {
91
+ }
92
+ const spin = spinner("Detecting project structure...");
93
+ spin.start();
94
+ const framework = await detectFramework(cwd);
95
+ spin.stop();
96
+ log.info(`Detected framework: ${framework.name}`);
97
+ log.info(`Docs directory: ${framework.docsDir}`);
98
+ const platform = await detectPlatform(cwd);
99
+ if (platform !== "none") {
100
+ log.info(`Detected platform: ${platform}`);
101
+ }
102
+ const hasLlmsPlugin = await detectLlmsPlugin(cwd);
103
+ if (hasLlmsPlugin) {
104
+ log.warn("Detected docusaurus-plugin-llms \u2014 disabling llms.txt/llms-full.txt generation.");
105
+ }
106
+ const rl = createInterface({ input: stdin, output: stdout });
107
+ const title = await rl.question("Project title: ");
108
+ const description = await rl.question("Description: ");
109
+ const url = await rl.question("Docs URL (e.g. https://docs.example.com): ");
110
+ rl.close();
111
+ const templateData = {
112
+ title: title || "My Project",
113
+ description: description || "Project documentation",
114
+ url: url || "https://docs.example.com",
115
+ docsDir: framework.docsDir,
116
+ llmsTxt: String(!hasLlmsPlugin),
117
+ llmsFullTxt: String(!hasLlmsPlugin),
118
+ platform
119
+ };
120
+ const template = await loadTemplate();
121
+ const rendered = template.replace(/\{\{(\w+)\}\}/g, (_, key) => templateData[key] ?? "");
122
+ await fs2.writeFile(configPath, rendered, "utf-8");
123
+ log.success("Created .docs-ready.yaml");
124
+ log.dim("Next: run `docs-ready generate` to create AI-facing files.");
125
+ }
126
+ async function loadTemplate() {
127
+ const candidates = [
128
+ path2.join(path2.dirname(new URL(import.meta.url).pathname), "../../templates/config.yaml.tmpl"),
129
+ path2.join(path2.dirname(new URL(import.meta.url).pathname), "../templates/config.yaml.tmpl")
130
+ ];
131
+ for (const candidate of candidates) {
132
+ try {
133
+ return await fs2.readFile(candidate, "utf-8");
134
+ } catch {
135
+ }
136
+ }
137
+ return `# docs-ready configuration
138
+ title: "{{title}}"
139
+ description: "{{description}}"
140
+ url: "{{url}}"
141
+ docs:
142
+ dir: "{{docsDir}}"
143
+ include:
144
+ - "**/*.md"
145
+ - "**/*.mdx"
146
+ exclude:
147
+ - "**/node_modules/**"
148
+ - "**/_*"
149
+ generate:
150
+ llms_txt: {{llmsTxt}}
151
+ llms_full_txt: {{llmsFullTxt}}
152
+ ai_context: true
153
+ output_dir: "./build"
154
+ deploy:
155
+ platform: "{{platform}}"
156
+ `;
157
+ }
158
+ async function detectPlatform(cwd) {
159
+ const checks = [
160
+ { file: "vercel.json", platform: "vercel" },
161
+ { file: "netlify.toml", platform: "netlify" },
162
+ { file: "_headers", platform: "cloudflare" },
163
+ { file: "wrangler.toml", platform: "cloudflare" }
164
+ ];
165
+ for (const { file, platform } of checks) {
166
+ try {
167
+ await fs2.access(path2.join(cwd, file));
168
+ return platform;
169
+ } catch {
170
+ }
171
+ }
172
+ return "none";
173
+ }
174
+ async function detectLlmsPlugin(cwd) {
175
+ try {
176
+ const pkgPath = path2.join(cwd, "package.json");
177
+ const content = await fs2.readFile(pkgPath, "utf-8");
178
+ const pkg = JSON.parse(content);
179
+ const allDeps = {
180
+ ...pkg.dependencies,
181
+ ...pkg.devDependencies
182
+ };
183
+ return "docusaurus-plugin-llms" in allDeps;
184
+ } catch {
185
+ return false;
186
+ }
187
+ }
188
+
189
+ // src/cli/index.ts
190
+ var __dirname2 = path3.dirname(fileURLToPath(import.meta.url));
191
+ async function getVersion() {
192
+ let dir = __dirname2;
193
+ for (let i = 0; i < 5; i++) {
194
+ try {
195
+ const pkg = JSON.parse(await readFile(path3.join(dir, "package.json"), "utf-8"));
196
+ return pkg.version;
197
+ } catch {
198
+ dir = path3.dirname(dir);
199
+ }
200
+ }
201
+ return "0.0.0";
202
+ }
203
+ async function main() {
204
+ const version = await getVersion();
205
+ const program = new Command();
206
+ program.name("docs-ready").description("Make your docs AI-ready. Keep them that way.").version(version);
207
+ program.command("init").description("Initialize docs-ready in your project").action(async () => {
208
+ await initCommand();
209
+ });
210
+ program.command("generate").description("Generate AI-facing documentation files").option("--dry-run", "Show what would be generated without writing files").option("--only <type>", "Generate only: llms-txt or llms-full").action(async (opts) => {
211
+ const { generateCommand } = await import("./generate-56HFRN5I.js");
212
+ await generateCommand({ dryRun: opts.dryRun, only: opts.only });
213
+ });
214
+ program.command("guard").description("Check AI-facing docs for staleness").action(() => {
215
+ console.log("Guard command coming in v0.4.0");
216
+ });
217
+ program.command("validate").description("Lint and validate AI-facing docs").action(() => {
218
+ console.log("Validate command coming in v0.6.0");
219
+ });
220
+ await program.parseAsync(process.argv);
221
+ }
222
+ main().catch((err) => {
223
+ console.error(err);
224
+ process.exit(1);
225
+ });
226
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/cli/index.ts","../src/cli/commands/init.ts","../src/frameworks/detector.ts"],"sourcesContent":["import { Command } from \"commander\";\nimport { readFile } from \"node:fs/promises\";\nimport { fileURLToPath } from \"node:url\";\nimport path from \"node:path\";\nimport { initCommand } from \"./commands/init.js\";\n\nconst __dirname = path.dirname(fileURLToPath(import.meta.url));\n\nasync function getVersion(): Promise<string> {\n let dir = __dirname;\n for (let i = 0; i < 5; i++) {\n try {\n const pkg = JSON.parse(await readFile(path.join(dir, \"package.json\"), \"utf-8\"));\n return pkg.version;\n } catch {\n dir = path.dirname(dir);\n }\n }\n return \"0.0.0\";\n}\n\nasync function main(): Promise<void> {\n const version = await getVersion();\n\n const program = new Command();\n\n program\n .name(\"docs-ready\")\n .description(\"Make your docs AI-ready. Keep them that way.\")\n .version(version);\n\n program\n .command(\"init\")\n .description(\"Initialize docs-ready in your project\")\n .action(async () => {\n await initCommand();\n });\n\n program\n .command(\"generate\")\n .description(\"Generate AI-facing documentation files\")\n .option(\"--dry-run\", \"Show what would be generated without writing files\")\n .option(\"--only <type>\", \"Generate only: llms-txt or llms-full\")\n .action(async (opts) => {\n const { generateCommand } = await import(\"./commands/generate.js\");\n await generateCommand({ dryRun: opts.dryRun, only: opts.only });\n });\n\n program\n .command(\"guard\")\n .description(\"Check AI-facing docs for staleness\")\n .action(() => {\n console.log(\"Guard command coming in v0.4.0\");\n });\n\n program\n .command(\"validate\")\n .description(\"Lint and validate AI-facing docs\")\n .action(() => {\n console.log(\"Validate command coming in v0.6.0\");\n });\n\n await program.parseAsync(process.argv);\n}\n\nmain().catch((err) => {\n console.error(err);\n process.exit(1);\n});\n","import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { createInterface } from \"node:readline/promises\";\nimport { stdin, stdout } from \"node:process\";\nimport { detectFramework } from \"../../frameworks/detector.js\";\nimport { log, spinner } from \"../../utils/logger.js\";\n\ninterface InitOptions {\n cwd?: string;\n}\n\nexport async function initCommand(options: InitOptions = {}): Promise<void> {\n const cwd = options.cwd ?? process.cwd();\n const configPath = path.join(cwd, \".docs-ready.yaml\");\n\n // Check if config already exists\n try {\n await fs.access(configPath);\n log.warn(\".docs-ready.yaml already exists. Use --force to overwrite.\");\n return;\n } catch {\n // Config doesn't exist, proceed\n }\n\n const spin = spinner(\"Detecting project structure...\");\n spin.start();\n\n // Detect framework\n const framework = await detectFramework(cwd);\n spin.stop();\n\n log.info(`Detected framework: ${framework.name}`);\n log.info(`Docs directory: ${framework.docsDir}`);\n\n // Detect deployment platform\n const platform = await detectPlatform(cwd);\n if (platform !== \"none\") {\n log.info(`Detected platform: ${platform}`);\n }\n\n // Detect docusaurus-plugin-llms\n const hasLlmsPlugin = await detectLlmsPlugin(cwd);\n if (hasLlmsPlugin) {\n log.warn(\"Detected docusaurus-plugin-llms — disabling llms.txt/llms-full.txt generation.\");\n }\n\n // Prompt for project info\n const rl = createInterface({ input: stdin, output: stdout });\n const title = await rl.question(\"Project title: \");\n const description = await rl.question(\"Description: \");\n const url = await rl.question(\"Docs URL (e.g. https://docs.example.com): \");\n rl.close();\n\n // Render config from template (try file first, fall back to inline)\n const templateData: Record<string, string> = {\n title: title || \"My Project\",\n description: description || \"Project documentation\",\n url: url || \"https://docs.example.com\",\n docsDir: framework.docsDir,\n llmsTxt: String(!hasLlmsPlugin),\n llmsFullTxt: String(!hasLlmsPlugin),\n platform,\n };\n\n const template = await loadTemplate();\n const rendered = template.replace(/\\{\\{(\\w+)\\}\\}/g, (_, key) => templateData[key] ?? \"\");\n\n await fs.writeFile(configPath, rendered, \"utf-8\");\n log.success(\"Created .docs-ready.yaml\");\n log.dim(\"Next: run `docs-ready generate` to create AI-facing files.\");\n}\n\nasync function loadTemplate(): Promise<string> {\n // Try to load the template file (works when installed as npm package)\n const candidates = [\n path.join(path.dirname(new URL(import.meta.url).pathname), \"../../templates/config.yaml.tmpl\"),\n path.join(path.dirname(new URL(import.meta.url).pathname), \"../templates/config.yaml.tmpl\"),\n ];\n\n for (const candidate of candidates) {\n try {\n return await fs.readFile(candidate, \"utf-8\");\n } catch {\n // Try next\n }\n }\n\n // Inline fallback\n return `# docs-ready configuration\ntitle: \"{{title}}\"\ndescription: \"{{description}}\"\nurl: \"{{url}}\"\ndocs:\n dir: \"{{docsDir}}\"\n include:\n - \"**/*.md\"\n - \"**/*.mdx\"\n exclude:\n - \"**/node_modules/**\"\n - \"**/_*\"\ngenerate:\n llms_txt: {{llmsTxt}}\n llms_full_txt: {{llmsFullTxt}}\n ai_context: true\n output_dir: \"./build\"\ndeploy:\n platform: \"{{platform}}\"\n`;\n}\n\nasync function detectPlatform(\n cwd: string\n): Promise<\"vercel\" | \"netlify\" | \"cloudflare\" | \"none\"> {\n const checks: Array<{ file: string; platform: \"vercel\" | \"netlify\" | \"cloudflare\" }> = [\n { file: \"vercel.json\", platform: \"vercel\" },\n { file: \"netlify.toml\", platform: \"netlify\" },\n { file: \"_headers\", platform: \"cloudflare\" },\n { file: \"wrangler.toml\", platform: \"cloudflare\" },\n ];\n\n for (const { file, platform } of checks) {\n try {\n await fs.access(path.join(cwd, file));\n return platform;\n } catch {\n // Not found, continue\n }\n }\n\n return \"none\";\n}\n\nasync function detectLlmsPlugin(cwd: string): Promise<boolean> {\n try {\n const pkgPath = path.join(cwd, \"package.json\");\n const content = await fs.readFile(pkgPath, \"utf-8\");\n const pkg = JSON.parse(content);\n const allDeps = {\n ...pkg.dependencies,\n ...pkg.devDependencies,\n };\n return \"docusaurus-plugin-llms\" in allDeps;\n } catch {\n return false;\n }\n}\n","import fs from \"node:fs/promises\";\nimport path from \"node:path\";\n\nexport interface FrameworkResult {\n name: \"docusaurus\" | \"vitepress\" | \"mkdocs\" | \"starlight\" | \"generic\";\n configFile: string | null;\n docsDir: string;\n}\n\nconst DETECTORS: Array<{\n name: FrameworkResult[\"name\"];\n files: string[];\n docsDir: string;\n}> = [\n {\n name: \"docusaurus\",\n files: [\n \"docusaurus.config.ts\",\n \"docusaurus.config.js\",\n \"docusaurus.config.mts\",\n \"docusaurus.config.mjs\",\n ],\n docsDir: \"./docs\",\n },\n {\n name: \"vitepress\",\n files: [\n \".vitepress/config.ts\",\n \".vitepress/config.js\",\n \".vitepress/config.mts\",\n \".vitepress/config.mjs\",\n ],\n docsDir: \"./docs\",\n },\n {\n name: \"mkdocs\",\n files: [\"mkdocs.yml\", \"mkdocs.yaml\"],\n docsDir: \"./docs\",\n },\n {\n name: \"starlight\",\n files: [\"astro.config.mjs\", \"astro.config.ts\", \"astro.config.js\"],\n docsDir: \"./src/content/docs\",\n },\n];\n\nexport async function detectFramework(projectDir: string): Promise<FrameworkResult> {\n for (const detector of DETECTORS) {\n for (const file of detector.files) {\n const fullPath = path.join(projectDir, file);\n try {\n await fs.access(fullPath);\n\n if (detector.name === \"starlight\") {\n const content = await fs.readFile(fullPath, \"utf-8\");\n if (!content.includes(\"starlight\")) {\n continue;\n }\n }\n\n return {\n name: detector.name,\n configFile: fullPath,\n docsDir: detector.docsDir,\n };\n } catch {\n // File doesn't exist, try next\n }\n }\n }\n\n return {\n name: \"generic\",\n configFile: null,\n docsDir: \"./docs\",\n };\n}\n"],"mappings":";;;;;;;AAAA,SAAS,eAAe;AACxB,SAAS,gBAAgB;AACzB,SAAS,qBAAqB;AAC9B,OAAOA,WAAU;;;ACHjB,OAAOC,SAAQ;AACf,OAAOC,WAAU;AACjB,SAAS,uBAAuB;AAChC,SAAS,OAAO,cAAc;;;ACH9B,OAAO,QAAQ;AACf,OAAO,UAAU;AAQjB,IAAM,YAID;AAAA,EACH;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO,CAAC,cAAc,aAAa;AAAA,IACnC,SAAS;AAAA,EACX;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,OAAO,CAAC,oBAAoB,mBAAmB,iBAAiB;AAAA,IAChE,SAAS;AAAA,EACX;AACF;AAEA,eAAsB,gBAAgB,YAA8C;AAClF,aAAW,YAAY,WAAW;AAChC,eAAW,QAAQ,SAAS,OAAO;AACjC,YAAM,WAAW,KAAK,KAAK,YAAY,IAAI;AAC3C,UAAI;AACF,cAAM,GAAG,OAAO,QAAQ;AAExB,YAAI,SAAS,SAAS,aAAa;AACjC,gBAAM,UAAU,MAAM,GAAG,SAAS,UAAU,OAAO;AACnD,cAAI,CAAC,QAAQ,SAAS,WAAW,GAAG;AAClC;AAAA,UACF;AAAA,QACF;AAEA,eAAO;AAAA,UACL,MAAM,SAAS;AAAA,UACf,YAAY;AAAA,UACZ,SAAS,SAAS;AAAA,QACpB;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,MAAM;AAAA,IACN,YAAY;AAAA,IACZ,SAAS;AAAA,EACX;AACF;;;ADjEA,eAAsB,YAAY,UAAuB,CAAC,GAAkB;AAC1E,QAAM,MAAM,QAAQ,OAAO,QAAQ,IAAI;AACvC,QAAM,aAAaC,MAAK,KAAK,KAAK,kBAAkB;AAGpD,MAAI;AACF,UAAMC,IAAG,OAAO,UAAU;AAC1B,QAAI,KAAK,4DAA4D;AACrE;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,QAAM,OAAO,QAAQ,gCAAgC;AACrD,OAAK,MAAM;AAGX,QAAM,YAAY,MAAM,gBAAgB,GAAG;AAC3C,OAAK,KAAK;AAEV,MAAI,KAAK,uBAAuB,UAAU,IAAI,EAAE;AAChD,MAAI,KAAK,mBAAmB,UAAU,OAAO,EAAE;AAG/C,QAAM,WAAW,MAAM,eAAe,GAAG;AACzC,MAAI,aAAa,QAAQ;AACvB,QAAI,KAAK,sBAAsB,QAAQ,EAAE;AAAA,EAC3C;AAGA,QAAM,gBAAgB,MAAM,iBAAiB,GAAG;AAChD,MAAI,eAAe;AACjB,QAAI,KAAK,qFAAgF;AAAA,EAC3F;AAGA,QAAM,KAAK,gBAAgB,EAAE,OAAO,OAAO,QAAQ,OAAO,CAAC;AAC3D,QAAM,QAAQ,MAAM,GAAG,SAAS,iBAAiB;AACjD,QAAM,cAAc,MAAM,GAAG,SAAS,eAAe;AACrD,QAAM,MAAM,MAAM,GAAG,SAAS,4CAA4C;AAC1E,KAAG,MAAM;AAGT,QAAM,eAAuC;AAAA,IAC3C,OAAO,SAAS;AAAA,IAChB,aAAa,eAAe;AAAA,IAC5B,KAAK,OAAO;AAAA,IACZ,SAAS,UAAU;AAAA,IACnB,SAAS,OAAO,CAAC,aAAa;AAAA,IAC9B,aAAa,OAAO,CAAC,aAAa;AAAA,IAClC;AAAA,EACF;AAEA,QAAM,WAAW,MAAM,aAAa;AACpC,QAAM,WAAW,SAAS,QAAQ,kBAAkB,CAAC,GAAG,QAAQ,aAAa,GAAG,KAAK,EAAE;AAEvF,QAAMA,IAAG,UAAU,YAAY,UAAU,OAAO;AAChD,MAAI,QAAQ,0BAA0B;AACtC,MAAI,IAAI,4DAA4D;AACtE;AAEA,eAAe,eAAgC;AAE7C,QAAM,aAAa;AAAA,IACjBD,MAAK,KAAKA,MAAK,QAAQ,IAAI,IAAI,YAAY,GAAG,EAAE,QAAQ,GAAG,kCAAkC;AAAA,IAC7FA,MAAK,KAAKA,MAAK,QAAQ,IAAI,IAAI,YAAY,GAAG,EAAE,QAAQ,GAAG,+BAA+B;AAAA,EAC5F;AAEA,aAAW,aAAa,YAAY;AAClC,QAAI;AACF,aAAO,MAAMC,IAAG,SAAS,WAAW,OAAO;AAAA,IAC7C,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoBT;AAEA,eAAe,eACb,KACuD;AACvD,QAAM,SAAiF;AAAA,IACrF,EAAE,MAAM,eAAe,UAAU,SAAS;AAAA,IAC1C,EAAE,MAAM,gBAAgB,UAAU,UAAU;AAAA,IAC5C,EAAE,MAAM,YAAY,UAAU,aAAa;AAAA,IAC3C,EAAE,MAAM,iBAAiB,UAAU,aAAa;AAAA,EAClD;AAEA,aAAW,EAAE,MAAM,SAAS,KAAK,QAAQ;AACvC,QAAI;AACF,YAAMA,IAAG,OAAOD,MAAK,KAAK,KAAK,IAAI,CAAC;AACpC,aAAO;AAAA,IACT,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AACT;AAEA,eAAe,iBAAiB,KAA+B;AAC7D,MAAI;AACF,UAAM,UAAUA,MAAK,KAAK,KAAK,cAAc;AAC7C,UAAM,UAAU,MAAMC,IAAG,SAAS,SAAS,OAAO;AAClD,UAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,UAAM,UAAU;AAAA,MACd,GAAG,IAAI;AAAA,MACP,GAAG,IAAI;AAAA,IACT;AACA,WAAO,4BAA4B;AAAA,EACrC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AD3IA,IAAMC,aAAYC,MAAK,QAAQ,cAAc,YAAY,GAAG,CAAC;AAE7D,eAAe,aAA8B;AAC3C,MAAI,MAAMD;AACV,WAAS,IAAI,GAAG,IAAI,GAAG,KAAK;AAC1B,QAAI;AACF,YAAM,MAAM,KAAK,MAAM,MAAM,SAASC,MAAK,KAAK,KAAK,cAAc,GAAG,OAAO,CAAC;AAC9E,aAAO,IAAI;AAAA,IACb,QAAQ;AACN,YAAMA,MAAK,QAAQ,GAAG;AAAA,IACxB;AAAA,EACF;AACA,SAAO;AACT;AAEA,eAAe,OAAsB;AACnC,QAAM,UAAU,MAAM,WAAW;AAEjC,QAAM,UAAU,IAAI,QAAQ;AAE5B,UACG,KAAK,YAAY,EACjB,YAAY,8CAA8C,EAC1D,QAAQ,OAAO;AAElB,UACG,QAAQ,MAAM,EACd,YAAY,uCAAuC,EACnD,OAAO,YAAY;AAClB,UAAM,YAAY;AAAA,EACpB,CAAC;AAEH,UACG,QAAQ,UAAU,EAClB,YAAY,wCAAwC,EACpD,OAAO,aAAa,oDAAoD,EACxE,OAAO,iBAAiB,sCAAsC,EAC9D,OAAO,OAAO,SAAS;AACtB,UAAM,EAAE,gBAAgB,IAAI,MAAM,OAAO,wBAAwB;AACjE,UAAM,gBAAgB,EAAE,QAAQ,KAAK,QAAQ,MAAM,KAAK,KAAK,CAAC;AAAA,EAChE,CAAC;AAEH,UACG,QAAQ,OAAO,EACf,YAAY,oCAAoC,EAChD,OAAO,MAAM;AACZ,YAAQ,IAAI,gCAAgC;AAAA,EAC9C,CAAC;AAEH,UACG,QAAQ,UAAU,EAClB,YAAY,kCAAkC,EAC9C,OAAO,MAAM;AACZ,YAAQ,IAAI,mCAAmC;AAAA,EACjD,CAAC;AAEH,QAAM,QAAQ,WAAW,QAAQ,IAAI;AACvC;AAEA,KAAK,EAAE,MAAM,CAAC,QAAQ;AACpB,UAAQ,MAAM,GAAG;AACjB,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":["path","fs","path","path","fs","__dirname","path"]}
package/package.json ADDED
@@ -0,0 +1,65 @@
1
+ {
2
+ "name": "docs-ready",
3
+ "version": "0.2.0",
4
+ "description": "Make your docs AI-ready. Keep them that way.",
5
+ "type": "module",
6
+ "bin": {
7
+ "docs-ready": "./dist/index.js"
8
+ },
9
+ "main": "./dist/index.js",
10
+ "types": "./dist/index.d.ts",
11
+ "exports": {
12
+ ".": {
13
+ "import": "./dist/index.js",
14
+ "types": "./dist/index.d.ts"
15
+ }
16
+ },
17
+ "files": [
18
+ "dist",
19
+ "templates"
20
+ ],
21
+ "scripts": {
22
+ "build": "tsup",
23
+ "dev": "tsup --watch",
24
+ "test": "vitest run",
25
+ "test:watch": "vitest",
26
+ "typecheck": "tsc --noEmit",
27
+ "prepublishOnly": "npm run build"
28
+ },
29
+ "keywords": [
30
+ "docs-ready",
31
+ "llms-txt",
32
+ "llms.txt",
33
+ "ai-context",
34
+ "documentation",
35
+ "ai-ready",
36
+ "llm",
37
+ "docusaurus",
38
+ "vitepress",
39
+ "mkdocs"
40
+ ],
41
+ "author": "udhaykumarbala",
42
+ "license": "MIT",
43
+ "repository": {
44
+ "type": "git",
45
+ "url": "https://github.com/udhaykumarbala/docs-ready"
46
+ },
47
+ "homepage": "https://github.com/udhaykumarbala/docs-ready",
48
+ "engines": {
49
+ "node": ">=18"
50
+ },
51
+ "dependencies": {
52
+ "chalk": "^5.3.0",
53
+ "commander": "^12.1.0",
54
+ "glob": "^11.0.0",
55
+ "gray-matter": "^4.0.3",
56
+ "ora": "^8.1.0",
57
+ "yaml": "^2.5.0"
58
+ },
59
+ "devDependencies": {
60
+ "@types/node": "^25.5.2",
61
+ "tsup": "^8.2.0",
62
+ "typescript": "^5.5.0",
63
+ "vitest": "^2.0.0"
64
+ }
65
+ }
@@ -0,0 +1,31 @@
1
+ # ============================================================
2
+ # docs-ready configuration
3
+ # Generated by `docs-ready init`
4
+ # Full reference: https://github.com/udhaykumarbala/docs-ready
5
+ # ============================================================
6
+
7
+ # --- Project metadata ---
8
+ title: "{{title}}"
9
+ description: "{{description}}"
10
+ url: "{{url}}"
11
+
12
+ # --- Documentation source ---
13
+ docs:
14
+ dir: "{{docsDir}}"
15
+ include:
16
+ - "**/*.md"
17
+ - "**/*.mdx"
18
+ exclude:
19
+ - "**/node_modules/**"
20
+ - "**/_*"
21
+
22
+ # --- Generation ---
23
+ generate:
24
+ llms_txt: {{llmsTxt}}
25
+ llms_full_txt: {{llmsFullTxt}}
26
+ ai_context: true
27
+ output_dir: "./build"
28
+
29
+ # --- Deployment ---
30
+ deploy:
31
+ platform: "{{platform}}"