docs2ai 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,62 @@
1
+ # docs2ai
2
+
3
+ Convert documentation URLs into clean, AI-ready Markdown files. Drop them into your project so AI coding assistants (Cursor, Claude Code, Copilot, etc.) have accurate, up-to-date context.
4
+
5
+ ## Install
6
+
7
+ ```bash
8
+ # Run directly
9
+ npx docs2ai <url>
10
+
11
+ # Or install globally
12
+ npm install -g docs2ai
13
+ ```
14
+
15
+ ## Usage
16
+
17
+ ```bash
18
+ # Fetch a single page to stdout
19
+ docs2ai https://docs.stripe.com/api/charges
20
+
21
+ # Write to a file
22
+ docs2ai https://docs.stripe.com/api/charges -o .ai/stripe.md
23
+
24
+ # Crawl linked pages
25
+ docs2ai https://docs.stripe.com/api/charges --crawl --max-depth 2 -o .ai/stripe.md
26
+
27
+ # Manage sources in a config file
28
+ docs2ai add https://docs.stripe.com/api/charges --name stripe --crawl
29
+ docs2ai update # refresh all sources
30
+ docs2ai update --name stripe # refresh one
31
+ docs2ai list # show configured sources
32
+ ```
33
+
34
+ ## Features
35
+
36
+ - **Platform detection** — auto-detects Mintlify, Docusaurus, GitBook, ReadMe, and falls back to Readability for generic sites
37
+ - **Code block preservation** — language tags and indentation survive extraction perfectly
38
+ - **Crawl mode** — follows sidebar/nav links with configurable depth
39
+ - **YAML frontmatter** — each output includes source URL, fetch date, platform, and title
40
+ - **Config file** — manage multiple doc sources with `.docs2ai.yaml`
41
+ - **Playwright optional** — uses fast static fetch by default, Playwright only when needed for JS-rendered pages
42
+
43
+ ## Config (.docs2ai.yaml)
44
+
45
+ ```yaml
46
+ version: 1
47
+ output_dir: .ai/docs
48
+ sources:
49
+ - name: stripe
50
+ url: https://docs.stripe.com/api/charges
51
+ crawl: true
52
+ max_depth: 2
53
+ output: stripe.md
54
+ - name: yousign
55
+ url: https://developers.yousign.com/docs/set-up-your-account
56
+ crawl: false
57
+ output: yousign.md
58
+ ```
59
+
60
+ ## License
61
+
62
+ MIT
package/dist/cli.mjs ADDED
@@ -0,0 +1,902 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/cli.ts
4
+ import { defineCommand as defineCommand5, runMain } from "citty";
5
+
6
+ // src/commands/fetch.ts
7
+ import { defineCommand } from "citty";
8
+ import consola from "consola";
9
+ import * as cheerio3 from "cheerio";
10
+
11
+ // src/pipeline/fetcher.ts
12
+ import { ofetch } from "ofetch";
13
+ async function fetchPage(url) {
14
+ const html = await ofetch(url, { responseType: "text" });
15
+ return html;
16
+ }
17
+ async function fetchWithBrowser(url) {
18
+ let playwright;
19
+ try {
20
+ playwright = await import("playwright");
21
+ } catch {
22
+ const err = new Error(
23
+ "Playwright is not installed. Run:\n npm install -D playwright && npx playwright install chromium"
24
+ );
25
+ err.code = "ERR_PLAYWRIGHT_NOT_INSTALLED";
26
+ throw err;
27
+ }
28
+ const browser = await playwright.chromium.launch({ headless: true });
29
+ try {
30
+ const page = await browser.newPage();
31
+ await page.goto(url, { waitUntil: "networkidle" });
32
+ await page.waitForTimeout(1e3);
33
+ const html = await page.content();
34
+ return html;
35
+ } finally {
36
+ await browser.close();
37
+ }
38
+ }
39
+
40
+ // src/pipeline/extractor.ts
41
+ import * as cheerio from "cheerio";
42
+ import { Readability } from "@mozilla/readability";
43
+ import { parseHTML } from "linkedom";
44
+
45
+ // src/platforms/mintlify.ts
46
+ var mintlify = {
47
+ id: "mintlify",
48
+ detect(url, $) {
49
+ if ($('meta[name="generator"][content*="Mintlify"]').length > 0) return true;
50
+ if ($("script[src*='mintlify']").length > 0) return true;
51
+ if ($("[data-mintlify]").length > 0) return true;
52
+ return false;
53
+ },
54
+ contentSelector() {
55
+ return "article, main";
56
+ },
57
+ removeSelectors() {
58
+ return [
59
+ "nav",
60
+ "header",
61
+ "footer",
62
+ "[role='navigation']",
63
+ ".sidebar",
64
+ "[class*='sidebar']",
65
+ "[class*='cookie']",
66
+ "[class*='banner']",
67
+ "script",
68
+ "style"
69
+ ];
70
+ },
71
+ navLinkSelector() {
72
+ return "nav a[href], .sidebar a[href]";
73
+ }
74
+ };
75
+
76
+ // src/platforms/docusaurus.ts
77
+ var docusaurus = {
78
+ id: "docusaurus",
79
+ detect(url, $) {
80
+ if ($('meta[name="generator"][content*="Docusaurus"]').length > 0)
81
+ return true;
82
+ if ($(".theme-doc-sidebar-container").length > 0) return true;
83
+ if ($('meta[name="docusaurus_locale"]').length > 0) return true;
84
+ return false;
85
+ },
86
+ contentSelector() {
87
+ return "article, [role='main'], .theme-doc-markdown";
88
+ },
89
+ removeSelectors() {
90
+ return [
91
+ ".navbar",
92
+ "footer",
93
+ ".theme-doc-toc-desktop",
94
+ ".theme-doc-sidebar-container",
95
+ ".pagination-nav",
96
+ ".theme-doc-breadcrumbs",
97
+ "nav",
98
+ "script",
99
+ "style"
100
+ ];
101
+ },
102
+ navLinkSelector() {
103
+ return ".menu__link[href]";
104
+ }
105
+ };
106
+
107
+ // src/platforms/readme.ts
108
+ var readme = {
109
+ id: "readme",
110
+ detect(url, $) {
111
+ let rmClassCount = 0;
112
+ $("[class]").each((_, el) => {
113
+ const cls = $(el).attr("class") || "";
114
+ if (/\brm-/.test(cls)) rmClassCount++;
115
+ });
116
+ if (rmClassCount > 2) return true;
117
+ if ($(".rm-Article").length > 0) return true;
118
+ if ($(".rm-Markdown").length > 0) return true;
119
+ return false;
120
+ },
121
+ contentSelector() {
122
+ return ".markdown-body, .rm-Article, .rm-Markdown";
123
+ },
124
+ removeSelectors() {
125
+ return [
126
+ "nav",
127
+ "header",
128
+ "footer",
129
+ ".rm-Sidebar",
130
+ ".rm-TableOfContents",
131
+ "[class*='cookie']",
132
+ "script",
133
+ "style"
134
+ ];
135
+ },
136
+ navLinkSelector() {
137
+ return ".rm-Sidebar a[href]";
138
+ }
139
+ };
140
+
141
+ // src/platforms/gitbook.ts
142
+ var gitbook = {
143
+ id: "gitbook",
144
+ detect(url, $) {
145
+ if ($('meta[name="generator"][content*="GitBook"]').length > 0) return true;
146
+ try {
147
+ const parsed = new URL(url);
148
+ if (parsed.hostname.endsWith(".gitbook.io")) return true;
149
+ } catch {
150
+ }
151
+ if ($('[data-testid="page.contentEditor"]').length > 0) return true;
152
+ return false;
153
+ },
154
+ contentSelector() {
155
+ return '[data-testid="page.contentEditor"], main, article';
156
+ },
157
+ removeSelectors() {
158
+ return [
159
+ "nav",
160
+ "header",
161
+ "footer",
162
+ "[class*='sidebar']",
163
+ "[class*='toc']",
164
+ "[class*='cookie']",
165
+ "script",
166
+ "style"
167
+ ];
168
+ },
169
+ navLinkSelector() {
170
+ return "nav a[href]";
171
+ }
172
+ };
173
+
174
+ // src/platforms/generic.ts
175
+ var generic = {
176
+ id: "generic",
177
+ detect(_url, _$) {
178
+ return true;
179
+ },
180
+ contentSelector() {
181
+ return "article, main, [role='main'], .content";
182
+ },
183
+ removeSelectors() {
184
+ return [
185
+ "nav",
186
+ "header",
187
+ "footer",
188
+ "[role='navigation']",
189
+ "[class*='sidebar']",
190
+ "[class*='cookie']",
191
+ "[class*='banner']",
192
+ "script",
193
+ "style",
194
+ "noscript"
195
+ ];
196
+ },
197
+ navLinkSelector() {
198
+ return null;
199
+ }
200
+ };
201
+
202
+ // src/platforms/registry.ts
203
+ var platformStrategies = [
204
+ mintlify,
205
+ docusaurus,
206
+ readme,
207
+ gitbook,
208
+ generic
209
+ ];
210
+ function getStrategy(id) {
211
+ const strategy = platformStrategies.find((s) => s.id === id);
212
+ if (!strategy) {
213
+ throw new Error(`Unknown platform: ${id}`);
214
+ }
215
+ return strategy;
216
+ }
217
+
218
+ // src/pipeline/resolver.ts
219
+ function resolve(url, $) {
220
+ for (const strategy of platformStrategies) {
221
+ if (strategy.detect(url, $)) {
222
+ return strategy.id;
223
+ }
224
+ }
225
+ return "generic";
226
+ }
227
+
228
+ // src/pipeline/extractor.ts
229
+ function extract(html, url) {
230
+ const $ = cheerio.load(html);
231
+ const platform = resolve(url, $);
232
+ const strategy = getStrategy(platform);
233
+ const title = extractTitle($);
234
+ if (platform !== "generic") {
235
+ for (const sel of strategy.removeSelectors()) {
236
+ $(sel).remove();
237
+ }
238
+ const contentEl = $(strategy.contentSelector()).first();
239
+ const selectorContent = contentEl.html();
240
+ if (selectorContent && selectorContent.trim().length >= 100) {
241
+ return { content: selectorContent, title, platform };
242
+ }
243
+ }
244
+ const { document } = parseHTML(html);
245
+ const reader = new Readability(document);
246
+ const article = reader.parse();
247
+ const content = article?.content || $("body").html() || html;
248
+ return {
249
+ content,
250
+ title: title || article?.title || "",
251
+ platform
252
+ };
253
+ }
254
+ function extractTitle($) {
255
+ const h1 = $("h1").first().text().trim();
256
+ if (h1) return h1;
257
+ const ogTitle = $('meta[property="og:title"]').attr("content")?.trim();
258
+ if (ogTitle) return ogTitle;
259
+ return $("title").text().trim();
260
+ }
261
+
262
+ // src/pipeline/transformer.ts
263
+ import TurndownService from "turndown";
264
+ import { gfm } from "turndown-plugin-gfm";
265
+ function transform(html) {
266
+ const td = new TurndownService({
267
+ headingStyle: "atx",
268
+ codeBlockStyle: "fenced",
269
+ bulletListMarker: "-"
270
+ });
271
+ td.use(gfm);
272
+ addCalloutRule(td);
273
+ addTabbedContentRule(td);
274
+ addCodeBlockLangRule(td);
275
+ addHiddenElementRule(td);
276
+ return td.turndown(html);
277
+ }
278
+ function isElement(node) {
279
+ return node.nodeType === 1;
280
+ }
281
+ function getAttr(node, attr) {
282
+ if (isElement(node)) {
283
+ return node.getAttribute(attr) || "";
284
+ }
285
+ return "";
286
+ }
287
+ function getTagName(node) {
288
+ if (isElement(node)) {
289
+ return node.tagName.toLowerCase();
290
+ }
291
+ return "";
292
+ }
293
+ function addCalloutRule(td) {
294
+ td.addRule("callouts", {
295
+ filter(node) {
296
+ if (!isElement(node)) return false;
297
+ const tag = getTagName(node);
298
+ if (tag === "aside") return true;
299
+ const cls = getAttr(node, "class");
300
+ if (/\b(admonition|callout|alert|notice|warning|info|tip|note|caution|danger)\b/i.test(
301
+ cls
302
+ ))
303
+ return true;
304
+ if (getAttr(node, "role") === "alert") return true;
305
+ return false;
306
+ },
307
+ replacement(content, node) {
308
+ const cls = getAttr(node, "class").toLowerCase();
309
+ let type = "Note";
310
+ if (/warning|caution/.test(cls)) type = "Warning";
311
+ else if (/danger|error/.test(cls)) type = "Danger";
312
+ else if (/tip|success/.test(cls)) type = "Tip";
313
+ else if (/info/.test(cls)) type = "Info";
314
+ const lines = content.trim().split("\n");
315
+ const quoted = lines.map((line) => `> ${line}`).join("\n");
316
+ return `
317
+ > **${type}**
318
+ ${quoted}
319
+
320
+ `;
321
+ }
322
+ });
323
+ }
324
+ function addTabbedContentRule(td) {
325
+ td.addRule("tabbed-content", {
326
+ filter(node) {
327
+ if (!isElement(node)) return false;
328
+ const cls = getAttr(node, "class");
329
+ if (/\b(tab-panel|tabpanel|tabs__item)\b/i.test(cls)) return true;
330
+ if (getAttr(node, "role") === "tabpanel") return true;
331
+ return false;
332
+ },
333
+ replacement(content, node) {
334
+ const label = getAttr(node, "aria-label") || getAttr(node, "data-label") || getAttr(node, "data-value") || "";
335
+ if (label) {
336
+ return `
337
+ **${label}**
338
+
339
+ ${content.trim()}
340
+
341
+ `;
342
+ }
343
+ return `
344
+ ${content.trim()}
345
+
346
+ `;
347
+ }
348
+ });
349
+ }
350
+ function addCodeBlockLangRule(td) {
351
+ td.addRule("code-block-lang", {
352
+ filter(node) {
353
+ if (!isElement(node)) return false;
354
+ if (getTagName(node) !== "pre") return false;
355
+ const codeEl = node.querySelector("code");
356
+ if (!codeEl) return false;
357
+ const lang = getAttr(node, "data-language") || getAttr(node, "data-lang") || (codeEl.getAttribute("data-language") || "") || (codeEl.getAttribute("data-lang") || "");
358
+ return lang.length > 0;
359
+ },
360
+ replacement(_content, node) {
361
+ if (!isElement(node)) return _content;
362
+ const codeEl = node.querySelector("code");
363
+ const lang = getAttr(node, "data-language") || getAttr(node, "data-lang") || (codeEl.getAttribute("data-language") || "") || (codeEl.getAttribute("data-lang") || "");
364
+ const code = codeEl.textContent || "";
365
+ return `
366
+ \`\`\`${lang}
367
+ ${code}
368
+ \`\`\`
369
+ `;
370
+ }
371
+ });
372
+ }
373
+ function addHiddenElementRule(td) {
374
+ td.addRule("hidden-elements", {
375
+ filter(node) {
376
+ if (!isElement(node)) return false;
377
+ const style = getAttr(node, "style");
378
+ if (!/display\s*:\s*none/i.test(style)) return false;
379
+ const cls = getAttr(node, "class");
380
+ if (/\b(tab-panel|tabpanel)\b/i.test(cls)) return false;
381
+ if (getAttr(node, "role") === "tabpanel") return false;
382
+ return true;
383
+ },
384
+ replacement() {
385
+ return "";
386
+ }
387
+ });
388
+ }
389
+
390
+ // src/pipeline/writer.ts
391
+ import { writeFileSync, mkdirSync } from "fs";
392
+ import { dirname } from "path";
393
+ import matter from "gray-matter";
394
+ function write(markdown, outputPath, options) {
395
+ const content = matter.stringify(markdown, {
396
+ source: options.sourceUrl,
397
+ fetched_at: (/* @__PURE__ */ new Date()).toISOString(),
398
+ platform: options.platform,
399
+ title: options.title,
400
+ docs2ai_version: "0.1.0"
401
+ });
402
+ if (outputPath) {
403
+ mkdirSync(dirname(outputPath), { recursive: true });
404
+ writeFileSync(outputPath, content, "utf-8");
405
+ } else {
406
+ process.stdout.write(content);
407
+ }
408
+ }
409
+
410
+ // src/crawl/crawler.ts
411
+ import * as cheerio2 from "cheerio";
412
+
413
+ // src/utils/url.ts
414
+ function normalizeUrl(url) {
415
+ const parsed = new URL(url);
416
+ parsed.hash = "";
417
+ parsed.search = "";
418
+ return parsed.href.replace(/\/$/, "");
419
+ }
420
+
421
+ // src/crawl/boundary.ts
422
+ function getCrawlPrefix(url) {
423
+ const parsed = new URL(url);
424
+ const pathParts = parsed.pathname.split("/");
425
+ pathParts.pop();
426
+ const pathPrefix = pathParts.join("/") + "/";
427
+ return { origin: parsed.origin, pathPrefix };
428
+ }
429
+ function isInBounds(candidateUrl, origin, pathPrefix) {
430
+ try {
431
+ const parsed = new URL(candidateUrl);
432
+ return parsed.origin === origin && parsed.pathname.startsWith(pathPrefix);
433
+ } catch {
434
+ return false;
435
+ }
436
+ }
437
+
438
+ // src/crawl/crawler.ts
439
+ async function crawl(startUrl, options) {
440
+ const { origin, pathPrefix } = getCrawlPrefix(startUrl);
441
+ const visited = /* @__PURE__ */ new Set();
442
+ const results = [];
443
+ const queue = [[startUrl, 0]];
444
+ visited.add(normalizeUrl(startUrl));
445
+ while (queue.length > 0) {
446
+ const [url, depth] = queue.shift();
447
+ let html;
448
+ try {
449
+ html = await fetchPage(url);
450
+ } catch {
451
+ options.onPageFetched?.(url, results.length, results.length + queue.length);
452
+ continue;
453
+ }
454
+ results.push({ url, html });
455
+ options.onPageFetched?.(url, results.length, results.length + queue.length);
456
+ if (depth < options.maxDepth) {
457
+ const links = discoverLinks(
458
+ html,
459
+ url,
460
+ origin,
461
+ pathPrefix,
462
+ options.navLinkSelector
463
+ );
464
+ for (const link of links) {
465
+ const normalized = normalizeUrl(link);
466
+ if (!visited.has(normalized)) {
467
+ visited.add(normalized);
468
+ queue.push([link, depth + 1]);
469
+ }
470
+ }
471
+ }
472
+ if (queue.length > 0) {
473
+ await delay(200);
474
+ }
475
+ }
476
+ return results;
477
+ }
478
+ function discoverLinks(html, baseUrl, origin, pathPrefix, navLinkSelector) {
479
+ const $ = cheerio2.load(html);
480
+ const links = [];
481
+ const selector = navLinkSelector || "a[href]";
482
+ $(selector).each((_, el) => {
483
+ const href = $(el).attr("href");
484
+ if (!href) return;
485
+ try {
486
+ const resolved = new URL(href, baseUrl).href;
487
+ if (isInBounds(resolved, origin, pathPrefix)) {
488
+ links.push(resolved);
489
+ }
490
+ } catch {
491
+ }
492
+ });
493
+ return [...new Set(links)];
494
+ }
495
+ function delay(ms) {
496
+ return new Promise((resolve2) => setTimeout(resolve2, ms));
497
+ }
498
+
499
+ // src/commands/fetch.ts
500
+ var fetchCommand = defineCommand({
501
+ meta: {
502
+ name: "fetch",
503
+ description: "Fetch a documentation URL and convert to Markdown"
504
+ },
505
+ args: {
506
+ url: {
507
+ type: "positional",
508
+ description: "Documentation URL to convert",
509
+ required: true
510
+ },
511
+ output: {
512
+ type: "string",
513
+ alias: "o",
514
+ description: "Output file path"
515
+ },
516
+ crawl: {
517
+ type: "boolean",
518
+ description: "Follow sidebar/nav links",
519
+ default: false
520
+ },
521
+ "max-depth": {
522
+ type: "string",
523
+ description: "Maximum crawl depth",
524
+ default: "2"
525
+ }
526
+ },
527
+ async run({ args }) {
528
+ const url = args.url;
529
+ const output = args.output;
530
+ const shouldCrawl = args.crawl;
531
+ const maxDepth = parseInt(args["max-depth"], 10);
532
+ const silent = !output;
533
+ if (shouldCrawl) {
534
+ if (!silent) consola.start(`Crawling from ${url} (max depth: ${maxDepth})...`);
535
+ const firstHtml = await fetchPage(url);
536
+ const $ = cheerio3.load(firstHtml);
537
+ const platformId = resolve(url, $);
538
+ const strategy = getStrategy(platformId);
539
+ const navLinkSelector = strategy.navLinkSelector();
540
+ const pages = await crawl(url, {
541
+ maxDepth,
542
+ navLinkSelector,
543
+ onPageFetched: (pageUrl, current, total) => {
544
+ if (!silent) consola.info(`[${current}/${total}] ${pageUrl}`);
545
+ }
546
+ });
547
+ if (!silent) consola.success(`Crawled ${pages.length} pages`);
548
+ const sections = [];
549
+ let firstTitle = "";
550
+ let firstPlatform = "";
551
+ for (const page of pages) {
552
+ const { content, title, platform } = extract(page.html, page.url);
553
+ if (!firstTitle) {
554
+ firstTitle = title;
555
+ firstPlatform = platform;
556
+ }
557
+ const md = transform(content);
558
+ sections.push(`## ${title}
559
+
560
+ Source: ${page.url}
561
+
562
+ ${md}`);
563
+ }
564
+ const markdown = sections.join("\n\n---\n\n");
565
+ write(markdown, output, {
566
+ sourceUrl: url,
567
+ title: firstTitle,
568
+ platform: firstPlatform
569
+ });
570
+ if (!silent) consola.success(`Written to ${output}`);
571
+ } else {
572
+ if (!silent) consola.start(`Fetching ${url}...`);
573
+ let html = await fetchPage(url);
574
+ const { content, title, platform } = extract(html, url);
575
+ if (content.trim().length < 200) {
576
+ if (!silent) consola.warn("Content looks thin, retrying with browser...");
577
+ try {
578
+ html = await fetchWithBrowser(url);
579
+ const result = extract(html, url);
580
+ const markdown2 = transform(result.content);
581
+ write(markdown2, output, {
582
+ sourceUrl: url,
583
+ title: result.title || title,
584
+ platform: result.platform
585
+ });
586
+ if (!silent) consola.success(`Written to ${output}`);
587
+ return;
588
+ } catch (err) {
589
+ if (err?.code === "ERR_PLAYWRIGHT_NOT_INSTALLED") {
590
+ consola.warn(
591
+ "This page may require a browser to render. Install Playwright:\n npm install -D playwright && npx playwright install chromium"
592
+ );
593
+ } else {
594
+ consola.warn("Browser fallback failed, using static content.");
595
+ }
596
+ }
597
+ }
598
+ if (!silent) consola.success(`Extracted content (platform: ${platform})`);
599
+ const markdown = transform(content);
600
+ write(markdown, output, {
601
+ sourceUrl: url,
602
+ title,
603
+ platform
604
+ });
605
+ if (!silent) consola.success(`Written to ${output}`);
606
+ }
607
+ }
608
+ });
609
+
610
+ // src/commands/add.ts
611
+ import { defineCommand as defineCommand2 } from "citty";
612
+ import { join as join2 } from "path";
613
+ import consola2 from "consola";
614
+
615
+ // src/config/manager.ts
616
+ import { readFileSync, writeFileSync as writeFileSync2, existsSync } from "fs";
617
+ import { join, dirname as dirname2 } from "path";
618
+ import yaml from "js-yaml";
619
+ var CONFIG_FILENAME = ".docs2ai.yaml";
620
+ function loadConfig(startDir) {
621
+ const configPath = findConfigFile(startDir || process.cwd());
622
+ if (!configPath) return null;
623
+ const raw = readFileSync(configPath, "utf-8");
624
+ const data = yaml.load(raw);
625
+ const config = {
626
+ version: data.version ?? 1,
627
+ outputDir: data.output_dir ?? ".ai/docs",
628
+ sources: (data.sources ?? []).map(snakeToCamelSource)
629
+ };
630
+ return { config, configPath };
631
+ }
632
+ function saveConfig(config, configPath) {
633
+ const data = {
634
+ version: config.version,
635
+ output_dir: config.outputDir,
636
+ sources: config.sources.map(camelToSnakeSource)
637
+ };
638
+ const content = yaml.dump(data, { lineWidth: -1 });
639
+ writeFileSync2(configPath, content, "utf-8");
640
+ }
641
+ function addSource(config, source) {
642
+ const idx = config.sources.findIndex((s) => s.name === source.name);
643
+ if (idx >= 0) {
644
+ config.sources[idx] = source;
645
+ } else {
646
+ config.sources.push(source);
647
+ }
648
+ }
649
+ function findConfigFile(startDir) {
650
+ let dir = startDir;
651
+ while (true) {
652
+ const candidate = join(dir, CONFIG_FILENAME);
653
+ if (existsSync(candidate)) return candidate;
654
+ const parent = dirname2(dir);
655
+ if (parent === dir) return null;
656
+ dir = parent;
657
+ }
658
+ }
659
+ function snakeToCamelSource(s) {
660
+ return {
661
+ name: s.name ?? "",
662
+ url: s.url ?? "",
663
+ crawl: s.crawl ?? false,
664
+ maxDepth: s.max_depth ?? 2,
665
+ output: s.output ?? ""
666
+ };
667
+ }
668
+ function camelToSnakeSource(s) {
669
+ return {
670
+ name: s.name,
671
+ url: s.url,
672
+ crawl: s.crawl,
673
+ max_depth: s.maxDepth,
674
+ output: s.output
675
+ };
676
+ }
677
+
678
+ // src/commands/add.ts
679
+ var addCommand = defineCommand2({
680
+ meta: {
681
+ name: "add",
682
+ description: "Add a documentation source to .docs2ai.yaml"
683
+ },
684
+ args: {
685
+ url: {
686
+ type: "positional",
687
+ description: "Documentation URL to add",
688
+ required: true
689
+ },
690
+ name: {
691
+ type: "string",
692
+ description: "Name for this source (auto-derived from hostname if omitted)"
693
+ },
694
+ crawl: {
695
+ type: "boolean",
696
+ description: "Enable crawl mode for this source",
697
+ default: false
698
+ },
699
+ "max-depth": {
700
+ type: "string",
701
+ description: "Maximum crawl depth",
702
+ default: "2"
703
+ },
704
+ output: {
705
+ type: "string",
706
+ alias: "o",
707
+ description: "Output filename"
708
+ }
709
+ },
710
+ run({ args }) {
711
+ const url = args.url;
712
+ const crawl2 = args.crawl;
713
+ const maxDepth = parseInt(args["max-depth"], 10);
714
+ const name = args.name || slugFromUrl(url);
715
+ const output = args.output || `${name}.md`;
716
+ const existing = loadConfig();
717
+ let config;
718
+ let configPath;
719
+ if (existing) {
720
+ config = existing.config;
721
+ configPath = existing.configPath;
722
+ } else {
723
+ configPath = join2(process.cwd(), ".docs2ai.yaml");
724
+ config = { version: 1, outputDir: ".ai/docs", sources: [] };
725
+ }
726
+ addSource(config, { name, url, crawl: crawl2, maxDepth, output });
727
+ saveConfig(config, configPath);
728
+ consola2.success(`Added source "${name}" \u2192 ${url}`);
729
+ consola2.info(`Config: ${configPath}`);
730
+ }
731
+ });
732
+ function slugFromUrl(url) {
733
+ try {
734
+ const parsed = new URL(url);
735
+ return parsed.hostname.replace(/\./g, "-").replace(/^www-/, "");
736
+ } catch {
737
+ return "source";
738
+ }
739
+ }
740
+
741
+ // src/commands/update.ts
742
+ import { defineCommand as defineCommand3 } from "citty";
743
+ import { join as join3, dirname as dirname3 } from "path";
744
+ import { mkdirSync as mkdirSync2 } from "fs";
745
+ import consola3 from "consola";
746
+ var updateCommand = defineCommand3({
747
+ meta: {
748
+ name: "update",
749
+ description: "Refresh configured documentation sources"
750
+ },
751
+ args: {
752
+ name: {
753
+ type: "string",
754
+ description: "Update only the named source"
755
+ }
756
+ },
757
+ async run({ args }) {
758
+ const result = loadConfig();
759
+ if (!result) {
760
+ consola3.error("No .docs2ai.yaml found. Run `docs2ai add <url>` first.");
761
+ process.exit(1);
762
+ }
763
+ const { config, configPath } = result;
764
+ const configDir = dirname3(configPath);
765
+ const filterName = args.name;
766
+ const sources = filterName ? config.sources.filter((s) => s.name === filterName) : config.sources;
767
+ if (sources.length === 0) {
768
+ if (filterName) {
769
+ consola3.error(`Source "${filterName}" not found in config.`);
770
+ } else {
771
+ consola3.error("No sources configured.");
772
+ }
773
+ process.exit(1);
774
+ }
775
+ for (const source of sources) {
776
+ const outputPath = join3(configDir, config.outputDir, source.output);
777
+ mkdirSync2(dirname3(outputPath), { recursive: true });
778
+ consola3.start(`Updating "${source.name}" from ${source.url}...`);
779
+ if (source.crawl) {
780
+ const pages = await crawl(source.url, {
781
+ maxDepth: source.maxDepth,
782
+ onPageFetched: (url, current, total) => {
783
+ consola3.info(` [${current}/${total}] ${url}`);
784
+ }
785
+ });
786
+ const sections = [];
787
+ let firstTitle = "";
788
+ let firstPlatform = "";
789
+ for (const page of pages) {
790
+ const { content, title, platform } = extract(page.html, page.url);
791
+ if (!firstTitle) {
792
+ firstTitle = title;
793
+ firstPlatform = platform;
794
+ }
795
+ const md = transform(content);
796
+ sections.push(`## ${title}
797
+
798
+ Source: ${page.url}
799
+
800
+ ${md}`);
801
+ }
802
+ const markdown = sections.join("\n\n---\n\n");
803
+ write(markdown, outputPath, {
804
+ sourceUrl: source.url,
805
+ title: firstTitle,
806
+ platform: firstPlatform
807
+ });
808
+ } else {
809
+ const html = await fetchPage(source.url);
810
+ const { content, title, platform } = extract(html, source.url);
811
+ const markdown = transform(content);
812
+ write(markdown, outputPath, {
813
+ sourceUrl: source.url,
814
+ title,
815
+ platform
816
+ });
817
+ }
818
+ consola3.success(`Updated "${source.name}" \u2192 ${outputPath}`);
819
+ }
820
+ }
821
+ });
822
+
823
+ // src/commands/list.ts
824
+ import { defineCommand as defineCommand4 } from "citty";
825
+ import consola4 from "consola";
826
+ var listCommand = defineCommand4({
827
+ meta: {
828
+ name: "list",
829
+ description: "List configured documentation sources"
830
+ },
831
+ run() {
832
+ const result = loadConfig();
833
+ if (!result) {
834
+ consola4.info("No .docs2ai.yaml found. Run `docs2ai add <url>` to get started.");
835
+ return;
836
+ }
837
+ const { config, configPath } = result;
838
+ consola4.info(`Config: ${configPath}`);
839
+ consola4.info(`Output dir: ${config.outputDir}
840
+ `);
841
+ if (config.sources.length === 0) {
842
+ consola4.info("No sources configured.");
843
+ return;
844
+ }
845
+ for (const source of config.sources) {
846
+ const crawlInfo = source.crawl ? ` (crawl, depth: ${source.maxDepth})` : "";
847
+ console.log(` ${source.name}${crawlInfo}`);
848
+ console.log(` URL: ${source.url}`);
849
+ console.log(` Output: ${source.output}`);
850
+ console.log();
851
+ }
852
+ }
853
+ });
854
+
855
+ // src/cli.ts
856
+ var main = defineCommand5({
857
+ meta: {
858
+ name: "docs2ai",
859
+ version: "0.1.0",
860
+ description: "Convert documentation URLs into AI-ready Markdown files"
861
+ },
862
+ args: {
863
+ url: {
864
+ type: "positional",
865
+ description: "Documentation URL to convert",
866
+ required: false
867
+ },
868
+ output: {
869
+ type: "string",
870
+ alias: "o",
871
+ description: "Output file path"
872
+ },
873
+ crawl: {
874
+ type: "boolean",
875
+ description: "Follow sidebar/nav links",
876
+ default: false
877
+ },
878
+ "max-depth": {
879
+ type: "string",
880
+ description: "Maximum crawl depth",
881
+ default: "2"
882
+ }
883
+ },
884
+ subCommands: {
885
+ add: addCommand,
886
+ update: updateCommand,
887
+ list: listCommand
888
+ },
889
+ run({ args }) {
890
+ if (!args.url) {
891
+ console.log("Usage: docs2ai <url> [-o output.md] [--crawl]");
892
+ console.log(" docs2ai add <url> [--name name] [--crawl]");
893
+ console.log(" docs2ai update [--name name]");
894
+ console.log(" docs2ai list");
895
+ console.log("\nRun `docs2ai --help` for full usage.");
896
+ return;
897
+ }
898
+ return fetchCommand.run({ args });
899
+ }
900
+ });
901
+ runMain(main);
902
+ //# sourceMappingURL=cli.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/cli.ts","../src/commands/fetch.ts","../src/pipeline/fetcher.ts","../src/pipeline/extractor.ts","../src/platforms/mintlify.ts","../src/platforms/docusaurus.ts","../src/platforms/readme.ts","../src/platforms/gitbook.ts","../src/platforms/generic.ts","../src/platforms/registry.ts","../src/pipeline/resolver.ts","../src/pipeline/transformer.ts","../src/pipeline/writer.ts","../src/crawl/crawler.ts","../src/utils/url.ts","../src/crawl/boundary.ts","../src/commands/add.ts","../src/config/manager.ts","../src/commands/update.ts","../src/commands/list.ts"],"sourcesContent":["import { defineCommand, runMain } from \"citty\";\nimport { fetchCommand } from \"./commands/fetch\";\nimport { addCommand } from \"./commands/add\";\nimport { updateCommand } from \"./commands/update\";\nimport { listCommand } from \"./commands/list\";\n\nconst main = defineCommand({\n meta: {\n name: \"docs2ai\",\n version: \"0.1.0\",\n description: \"Convert documentation URLs into AI-ready Markdown files\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to convert\",\n required: false,\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output file path\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Follow sidebar/nav links\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n },\n subCommands: {\n add: addCommand,\n update: updateCommand,\n list: listCommand,\n },\n run({ args }) {\n if (!args.url) {\n console.log(\"Usage: docs2ai <url> [-o output.md] [--crawl]\");\n console.log(\" docs2ai add <url> [--name name] [--crawl]\");\n console.log(\" docs2ai update [--name name]\");\n console.log(\" docs2ai list\");\n console.log(\"\\nRun `docs2ai --help` for full usage.\");\n return;\n }\n return (fetchCommand as any).run({ args });\n },\n});\n\nrunMain(main);\n","import { defineCommand } from \"citty\";\nimport consola from \"consola\";\nimport * as cheerio from \"cheerio\";\nimport { fetchPage, fetchWithBrowser } from \"../pipeline/fetcher\";\nimport { extract } from \"../pipeline/extractor\";\nimport { transform } from \"../pipeline/transformer\";\nimport { write } from \"../pipeline/writer\";\nimport { crawl } from \"../crawl/crawler\";\nimport { resolve } from \"../pipeline/resolver\";\nimport { getStrategy } from \"../platforms/registry\";\n\nexport const fetchCommand = defineCommand({\n meta: {\n name: \"fetch\",\n description: \"Fetch a documentation URL and convert to Markdown\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to convert\",\n required: true,\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output file path\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Follow sidebar/nav links\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n },\n async run({ args }) {\n const url = args.url as string;\n const output = args.output as string | undefined;\n const shouldCrawl = args.crawl as boolean;\n const maxDepth = parseInt(args[\"max-depth\"] as string, 10);\n const silent = !output;\n\n if (shouldCrawl) {\n if (!silent) consola.start(`Crawling from ${url} (max depth: ${maxDepth})...`);\n\n // Fetch first page to resolve platform and get navLinkSelector\n const firstHtml = await fetchPage(url);\n const $ = cheerio.load(firstHtml);\n const platformId = resolve(url, $);\n const strategy = getStrategy(platformId);\n const navLinkSelector = strategy.navLinkSelector();\n\n const pages = await crawl(url, {\n maxDepth,\n navLinkSelector,\n onPageFetched: (pageUrl, current, total) => {\n if (!silent) consola.info(`[${current}/${total}] ${pageUrl}`);\n },\n });\n\n if (!silent) consola.success(`Crawled ${pages.length} pages`);\n\n const sections: string[] = [];\n let firstTitle = \"\";\n let firstPlatform = \"\";\n\n for (const page of pages) {\n const { content, title, platform } = extract(page.html, page.url);\n if (!firstTitle) {\n firstTitle = title;\n firstPlatform = platform;\n }\n const md = transform(content);\n sections.push(`## ${title}\\n\\nSource: ${page.url}\\n\\n${md}`);\n }\n\n const markdown = sections.join(\"\\n\\n---\\n\\n\");\n\n write(markdown, output, {\n sourceUrl: url,\n title: firstTitle,\n platform: firstPlatform,\n });\n\n if (!silent) consola.success(`Written to ${output}`);\n } else {\n if (!silent) consola.start(`Fetching ${url}...`);\n let html = await fetchPage(url);\n\n const { content, title, platform } = extract(html, url);\n\n // If content is suspiciously small, try Playwright\n if (content.trim().length < 200) {\n if (!silent) consola.warn(\"Content looks thin, retrying with browser...\");\n try {\n html = await fetchWithBrowser(url);\n const result = extract(html, url);\n const markdown = transform(result.content);\n write(markdown, output, {\n sourceUrl: url,\n title: result.title || title,\n platform: result.platform,\n });\n if (!silent) consola.success(`Written to ${output}`);\n return;\n } catch (err: any) {\n if (err?.code === \"ERR_PLAYWRIGHT_NOT_INSTALLED\") {\n consola.warn(\n \"This page may require a browser to render. Install Playwright:\\n\" +\n \" npm install -D playwright && npx playwright install chromium\"\n );\n } else {\n consola.warn(\"Browser fallback failed, using static content.\");\n }\n }\n }\n\n if (!silent) consola.success(`Extracted content (platform: ${platform})`);\n const markdown = transform(content);\n\n write(markdown, output, {\n sourceUrl: url,\n title,\n platform,\n });\n\n if (!silent) consola.success(`Written to ${output}`);\n }\n },\n});\n","import { ofetch } from \"ofetch\";\n\n/**\n * Fetch the raw HTML of a documentation page.\n * Uses static fetch by default.\n */\nexport async function fetchPage(url: string): Promise<string> {\n const html = await ofetch(url, { responseType: \"text\" });\n return html;\n}\n\n/**\n * Fetch a page using Playwright for JS-rendered sites.\n * Playwright is an optional dependency — throws a typed error if not installed.\n */\nexport async function fetchWithBrowser(url: string): Promise<string> {\n let playwright;\n try {\n playwright = await import(\"playwright\");\n } catch {\n const err = new Error(\n \"Playwright is not installed. Run:\\n npm install -D playwright && npx playwright install chromium\"\n );\n (err as any).code = \"ERR_PLAYWRIGHT_NOT_INSTALLED\";\n throw err;\n }\n\n const browser = await playwright.chromium.launch({ headless: true });\n try {\n const page = await browser.newPage();\n await page.goto(url, { waitUntil: \"networkidle\" });\n // Extra buffer for late-loading content\n await page.waitForTimeout(1000);\n const html = await page.content();\n return html;\n } finally {\n await browser.close();\n }\n}\n","import * as cheerio from \"cheerio\";\nimport { Readability } from \"@mozilla/readability\";\nimport { parseHTML } from \"linkedom\";\nimport { resolve } from \"./resolver\";\nimport { getStrategy } from \"../platforms/registry\";\nimport type { PlatformId } from \"../platforms/base\";\n\nexport interface ExtractResult {\n content: string;\n title: string;\n platform: PlatformId;\n}\n\n/**\n * Extract meaningful content from raw HTML.\n * Uses platform-specific selectors when available, falls back to Readability.\n */\nexport function extract(html: string, url: string): ExtractResult {\n const $ = cheerio.load(html);\n const platform = resolve(url, $);\n const strategy = getStrategy(platform);\n\n const title = extractTitle($);\n\n // Non-generic platforms: use selector-based extraction first\n if (platform !== \"generic\") {\n for (const sel of strategy.removeSelectors()) {\n $(sel).remove();\n }\n\n const contentEl = $(strategy.contentSelector()).first();\n const selectorContent = contentEl.html();\n\n if (selectorContent && selectorContent.trim().length >= 100) {\n return { content: selectorContent, title, platform };\n }\n // Fall through to Readability if selector extraction yields too little\n }\n\n // Generic / fallback: Readability extraction\n const { document } = parseHTML(html);\n const reader = new Readability(document as any);\n const article = reader.parse();\n\n const content = article?.content || $(\"body\").html() || html;\n\n return {\n content,\n title: title || article?.title || \"\",\n platform,\n };\n}\n\n/**\n * Extract page title from common sources.\n */\nfunction extractTitle($: cheerio.CheerioAPI): string {\n const h1 = $(\"h1\").first().text().trim();\n if (h1) return h1;\n\n const ogTitle = $('meta[property=\"og:title\"]').attr(\"content\")?.trim();\n if (ogTitle) return ogTitle;\n\n return $(\"title\").text().trim();\n}\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const mintlify: PlatformStrategy = {\n id: \"mintlify\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"Mintlify\"]').length > 0) return true;\n if ($(\"script[src*='mintlify']\").length > 0) return true;\n if ($(\"[data-mintlify]\").length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \"article, main\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[role='navigation']\",\n \".sidebar\",\n \"[class*='sidebar']\",\n \"[class*='cookie']\",\n \"[class*='banner']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \"nav a[href], .sidebar a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const docusaurus: PlatformStrategy = {\n id: \"docusaurus\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"Docusaurus\"]').length > 0)\n return true;\n if ($(\".theme-doc-sidebar-container\").length > 0) return true;\n if ($('meta[name=\"docusaurus_locale\"]').length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \"article, [role='main'], .theme-doc-markdown\";\n },\n\n removeSelectors(): string[] {\n return [\n \".navbar\",\n \"footer\",\n \".theme-doc-toc-desktop\",\n \".theme-doc-sidebar-container\",\n \".pagination-nav\",\n \".theme-doc-breadcrumbs\",\n \"nav\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \".menu__link[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const readme: PlatformStrategy = {\n id: \"readme\",\n\n detect(url: string, $: CheerioAPI): boolean {\n let rmClassCount = 0;\n $(\"[class]\").each((_, el) => {\n const cls = $(el).attr(\"class\") || \"\";\n if (/\\brm-/.test(cls)) rmClassCount++;\n });\n if (rmClassCount > 2) return true;\n if ($(\".rm-Article\").length > 0) return true;\n if ($(\".rm-Markdown\").length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \".markdown-body, .rm-Article, .rm-Markdown\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \".rm-Sidebar\",\n \".rm-TableOfContents\",\n \"[class*='cookie']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \".rm-Sidebar a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const gitbook: PlatformStrategy = {\n id: \"gitbook\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"GitBook\"]').length > 0) return true;\n try {\n const parsed = new URL(url);\n if (parsed.hostname.endsWith(\".gitbook.io\")) return true;\n } catch {\n // invalid URL, skip host check\n }\n if ($('[data-testid=\"page.contentEditor\"]').length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return '[data-testid=\"page.contentEditor\"], main, article';\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[class*='sidebar']\",\n \"[class*='toc']\",\n \"[class*='cookie']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \"nav a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const generic: PlatformStrategy = {\n id: \"generic\",\n\n detect(_url: string, _$: CheerioAPI): boolean {\n return true;\n },\n\n contentSelector(): string {\n return \"article, main, [role='main'], .content\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[role='navigation']\",\n \"[class*='sidebar']\",\n \"[class*='cookie']\",\n \"[class*='banner']\",\n \"script\",\n \"style\",\n \"noscript\",\n ];\n },\n\n navLinkSelector(): string | null {\n return null;\n },\n};\n","import type { PlatformId, PlatformStrategy } from \"./base\";\nimport { mintlify } from \"./mintlify\";\nimport { docusaurus } from \"./docusaurus\";\nimport { readme } from \"./readme\";\nimport { gitbook } from \"./gitbook\";\nimport { generic } from \"./generic\";\n\n/** Ordered list of platform strategies. Generic must be last (always matches). */\nexport const platformStrategies: PlatformStrategy[] = [\n mintlify,\n docusaurus,\n readme,\n gitbook,\n generic,\n];\n\n/** Get a strategy by its platform ID. */\nexport function getStrategy(id: PlatformId): PlatformStrategy {\n const strategy = platformStrategies.find((s) => s.id === id);\n if (!strategy) {\n throw new Error(`Unknown platform: ${id}`);\n }\n return strategy;\n}\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformId } from \"../platforms/base\";\nimport { platformStrategies } from \"../platforms/registry\";\n\n/**\n * Detect which documentation platform a page belongs to.\n * Tries platform-specific strategies in order, falls back to generic.\n */\nexport function resolve(url: string, $: CheerioAPI): PlatformId {\n for (const strategy of platformStrategies) {\n if (strategy.detect(url, $)) {\n return strategy.id;\n }\n }\n return \"generic\";\n}\n","import TurndownService from \"turndown\";\nimport { gfm } from \"turndown-plugin-gfm\";\n\n/**\n * Convert clean HTML to Markdown.\n */\nexport function transform(html: string): string {\n const td = new TurndownService({\n headingStyle: \"atx\",\n codeBlockStyle: \"fenced\",\n bulletListMarker: \"-\",\n });\n\n td.use(gfm);\n\n addCalloutRule(td);\n addTabbedContentRule(td);\n addCodeBlockLangRule(td);\n addHiddenElementRule(td);\n\n return td.turndown(html);\n}\n\nfunction isElement(node: TurndownService.Node): node is HTMLElement {\n return node.nodeType === 1;\n}\n\nfunction getAttr(node: TurndownService.Node, attr: string): string {\n if (isElement(node)) {\n return node.getAttribute(attr) || \"\";\n }\n return \"\";\n}\n\nfunction getTagName(node: TurndownService.Node): string {\n if (isElement(node)) {\n return node.tagName.toLowerCase();\n }\n return \"\";\n}\n\n/**\n * Convert callouts/admonitions to blockquotes.\n * Matches: aside, .admonition, .callout, .alert, [role=\"alert\"]\n */\nfunction addCalloutRule(td: TurndownService): void {\n td.addRule(\"callouts\", {\n filter(node) {\n if (!isElement(node)) return false;\n const tag = getTagName(node);\n if (tag === \"aside\") return true;\n const cls = getAttr(node, \"class\");\n if (\n /\\b(admonition|callout|alert|notice|warning|info|tip|note|caution|danger)\\b/i.test(\n cls\n )\n )\n return true;\n if (getAttr(node, \"role\") === \"alert\") return true;\n return false;\n },\n replacement(content, node) {\n const cls = getAttr(node, \"class\").toLowerCase();\n let type = \"Note\";\n if (/warning|caution/.test(cls)) type = \"Warning\";\n else if (/danger|error/.test(cls)) type = \"Danger\";\n else if (/tip|success/.test(cls)) type = \"Tip\";\n else if (/info/.test(cls)) type = \"Info\";\n\n const lines = content.trim().split(\"\\n\");\n const quoted = lines.map((line) => `> ${line}`).join(\"\\n\");\n return `\\n> **${type}**\\n${quoted}\\n\\n`;\n },\n });\n}\n\n/**\n * Convert tabbed content into labeled sections.\n * Matches: .tab-panel, .tabpanel, [role=\"tabpanel\"]\n */\nfunction addTabbedContentRule(td: TurndownService): void {\n td.addRule(\"tabbed-content\", {\n filter(node) {\n if (!isElement(node)) return false;\n const cls = getAttr(node, \"class\");\n if (/\\b(tab-panel|tabpanel|tabs__item)\\b/i.test(cls)) return true;\n if (getAttr(node, \"role\") === \"tabpanel\") return true;\n return false;\n },\n replacement(content, node) {\n const label =\n getAttr(node, \"aria-label\") ||\n getAttr(node, \"data-label\") ||\n getAttr(node, \"data-value\") ||\n \"\";\n if (label) {\n return `\\n**${label}**\\n\\n${content.trim()}\\n\\n`;\n }\n return `\\n${content.trim()}\\n\\n`;\n },\n });\n}\n\n/**\n * Ensure code blocks with data-language/data-lang produce proper fenced blocks.\n */\nfunction addCodeBlockLangRule(td: TurndownService): void {\n td.addRule(\"code-block-lang\", {\n filter(node) {\n if (!isElement(node)) return false;\n if (getTagName(node) !== \"pre\") return false;\n const codeEl = node.querySelector(\"code\");\n if (!codeEl) return false;\n const lang =\n getAttr(node, \"data-language\") ||\n getAttr(node, \"data-lang\") ||\n (codeEl.getAttribute(\"data-language\") || \"\") ||\n (codeEl.getAttribute(\"data-lang\") || \"\");\n return lang.length > 0;\n },\n replacement(_content, node) {\n if (!isElement(node)) return _content;\n const codeEl = node.querySelector(\"code\")!;\n const lang =\n getAttr(node, \"data-language\") ||\n getAttr(node, \"data-lang\") ||\n (codeEl.getAttribute(\"data-language\") || \"\") ||\n (codeEl.getAttribute(\"data-lang\") || \"\");\n const code = codeEl.textContent || \"\";\n return `\\n\\`\\`\\`${lang}\\n${code}\\n\\`\\`\\`\\n`;\n },\n });\n}\n\n/**\n * Remove hidden elements (display:none) except tab panels.\n */\nfunction addHiddenElementRule(td: TurndownService): void {\n td.addRule(\"hidden-elements\", {\n filter(node) {\n if (!isElement(node)) return false;\n const style = getAttr(node, \"style\");\n if (!/display\\s*:\\s*none/i.test(style)) return false;\n // Don't remove tab panels — they're hidden but contain valid content\n const cls = getAttr(node, \"class\");\n if (/\\b(tab-panel|tabpanel)\\b/i.test(cls)) return false;\n if (getAttr(node, \"role\") === \"tabpanel\") return false;\n return true;\n },\n replacement() {\n return \"\";\n },\n });\n}\n","import { writeFileSync, mkdirSync } from \"node:fs\";\nimport { dirname } from \"node:path\";\nimport matter from \"gray-matter\";\n\nexport interface WriterOptions {\n sourceUrl: string;\n title: string;\n platform: string;\n}\n\n/**\n * Write Markdown with frontmatter to a file or stdout.\n */\nexport function write(\n markdown: string,\n outputPath: string | undefined,\n options: WriterOptions\n): void {\n const content = matter.stringify(markdown, {\n source: options.sourceUrl,\n fetched_at: new Date().toISOString(),\n platform: options.platform,\n title: options.title,\n docs2ai_version: \"0.1.0\",\n });\n\n if (outputPath) {\n mkdirSync(dirname(outputPath), { recursive: true });\n writeFileSync(outputPath, content, \"utf-8\");\n } else {\n process.stdout.write(content);\n }\n}\n","import * as cheerio from \"cheerio\";\nimport { fetchPage } from \"../pipeline/fetcher\";\nimport { getCrawlPrefix, isInBounds, normalizeUrl } from \"./boundary\";\n\nexport interface CrawledPage {\n url: string;\n html: string;\n}\n\nexport interface CrawlOptions {\n maxDepth: number;\n navLinkSelector?: string | null;\n onPageFetched?: (url: string, current: number, total: number) => void;\n}\n\n/**\n * Crawl documentation pages starting from a URL.\n * Follows in-bounds links via BFS up to maxDepth.\n */\nexport async function crawl(\n startUrl: string,\n options: CrawlOptions\n): Promise<CrawledPage[]> {\n const { origin, pathPrefix } = getCrawlPrefix(startUrl);\n const visited = new Set<string>();\n const results: CrawledPage[] = [];\n\n // BFS queue: [url, depth]\n const queue: [string, number][] = [[startUrl, 0]];\n visited.add(normalizeUrl(startUrl));\n\n while (queue.length > 0) {\n const [url, depth] = queue.shift()!;\n\n let html: string;\n try {\n html = await fetchPage(url);\n } catch {\n options.onPageFetched?.(url, results.length, results.length + queue.length);\n continue;\n }\n results.push({ url, html });\n options.onPageFetched?.(url, results.length, results.length + queue.length);\n\n if (depth < options.maxDepth) {\n const links = discoverLinks(\n html,\n url,\n origin,\n pathPrefix,\n options.navLinkSelector\n );\n for (const link of links) {\n const normalized = normalizeUrl(link);\n if (!visited.has(normalized)) {\n visited.add(normalized);\n queue.push([link, depth + 1]);\n }\n }\n }\n\n // Politeness delay between requests\n if (queue.length > 0) {\n await delay(200);\n }\n }\n\n return results;\n}\n\n/**\n * Extract all in-bounds links from a page's HTML.\n * When navLinkSelector is provided, only links matching that selector are used.\n */\nfunction discoverLinks(\n html: string,\n baseUrl: string,\n origin: string,\n pathPrefix: string,\n navLinkSelector?: string | null\n): string[] {\n const $ = cheerio.load(html);\n const links: string[] = [];\n const selector = navLinkSelector || \"a[href]\";\n\n $(selector).each((_, el) => {\n const href = $(el).attr(\"href\");\n if (!href) return;\n\n try {\n const resolved = new URL(href, baseUrl).href;\n if (isInBounds(resolved, origin, pathPrefix)) {\n links.push(resolved);\n }\n } catch {\n // Invalid URL, skip\n }\n });\n\n return [...new Set(links)];\n}\n\nfunction delay(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n","/**\n * Validate whether a string is a valid URL.\n */\nexport function isValidUrl(input: string): boolean {\n try {\n new URL(input);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Normalize a URL for deduplication: strip hash, query, trailing slash.\n */\nexport function normalizeUrl(url: string): string {\n const parsed = new URL(url);\n parsed.hash = \"\";\n parsed.search = \"\";\n return parsed.href.replace(/\\/$/, \"\");\n}\n\n/**\n * Derive a short name/slug from a URL's hostname.\n */\nexport function slugFromUrl(url: string): string {\n try {\n const parsed = new URL(url);\n return parsed.hostname.replace(/\\./g, \"-\").replace(/^www-/, \"\");\n } catch {\n return \"source\";\n }\n}\n","import { normalizeUrl } from \"../utils/url\";\n\nexport { normalizeUrl };\n\n/**\n * Determine the crawl boundary from a starting URL.\n * Links are in-bounds if they share the same origin and path prefix.\n */\nexport function getCrawlPrefix(url: string): {\n origin: string;\n pathPrefix: string;\n} {\n const parsed = new URL(url);\n const pathParts = parsed.pathname.split(\"/\");\n // Remove the last segment (the current page slug)\n pathParts.pop();\n const pathPrefix = pathParts.join(\"/\") + \"/\";\n return { origin: parsed.origin, pathPrefix };\n}\n\n/**\n * Check whether a candidate URL falls within the crawl boundary.\n */\nexport function isInBounds(\n candidateUrl: string,\n origin: string,\n pathPrefix: string\n): boolean {\n try {\n const parsed = new URL(candidateUrl);\n return parsed.origin === origin && parsed.pathname.startsWith(pathPrefix);\n } catch {\n return false;\n }\n}\n","import { defineCommand } from \"citty\";\nimport { join } from \"node:path\";\nimport consola from \"consola\";\nimport { loadConfig, saveConfig, addSource } from \"../config/manager\";\nimport type { Docs2aiConfig } from \"../config/schema\";\n\nexport const addCommand = defineCommand({\n meta: {\n name: \"add\",\n description: \"Add a documentation source to .docs2ai.yaml\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to add\",\n required: true,\n },\n name: {\n type: \"string\",\n description: \"Name for this source (auto-derived from hostname if omitted)\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Enable crawl mode for this source\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output filename\",\n },\n },\n run({ args }) {\n const url = args.url as string;\n const crawl = args.crawl as boolean;\n const maxDepth = parseInt(args[\"max-depth\"] as string, 10);\n\n const name = (args.name as string) || slugFromUrl(url);\n const output = (args.output as string) || `${name}.md`;\n\n const existing = loadConfig();\n let config: Docs2aiConfig;\n let configPath: string;\n\n if (existing) {\n config = existing.config;\n configPath = existing.configPath;\n } else {\n configPath = join(process.cwd(), \".docs2ai.yaml\");\n config = { version: 1, outputDir: \".ai/docs\", sources: [] };\n }\n\n addSource(config, { name, url, crawl, maxDepth, output });\n saveConfig(config, configPath);\n\n consola.success(`Added source \"${name}\" → ${url}`);\n consola.info(`Config: ${configPath}`);\n },\n});\n\nfunction slugFromUrl(url: string): string {\n try {\n const parsed = new URL(url);\n return parsed.hostname.replace(/\\./g, \"-\").replace(/^www-/, \"\");\n } catch {\n return \"source\";\n }\n}\n","import { readFileSync, writeFileSync, existsSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport yaml from \"js-yaml\";\nimport type { Docs2aiConfig, SourceConfig } from \"./schema\";\n\nconst CONFIG_FILENAME = \".docs2ai.yaml\";\n\n/**\n * Load the .docs2ai.yaml config file, searching up from cwd.\n * Returns null if no config file is found.\n */\nexport function loadConfig(startDir?: string): {\n config: Docs2aiConfig;\n configPath: string;\n} | null {\n const configPath = findConfigFile(startDir || process.cwd());\n if (!configPath) return null;\n\n const raw = readFileSync(configPath, \"utf-8\");\n const data = yaml.load(raw) as Record<string, any>;\n\n const config: Docs2aiConfig = {\n version: data.version ?? 1,\n outputDir: data.output_dir ?? \".ai/docs\",\n sources: (data.sources ?? []).map(snakeToCamelSource),\n };\n\n return { config, configPath };\n}\n\n/**\n * Save configuration to a .docs2ai.yaml file.\n */\nexport function saveConfig(config: Docs2aiConfig, configPath: string): void {\n const data = {\n version: config.version,\n output_dir: config.outputDir,\n sources: config.sources.map(camelToSnakeSource),\n };\n\n const content = yaml.dump(data, { lineWidth: -1 });\n writeFileSync(configPath, content, \"utf-8\");\n}\n\n/**\n * Add or update a source in the config (upsert by name).\n */\nexport function addSource(config: Docs2aiConfig, source: SourceConfig): void {\n const idx = config.sources.findIndex((s) => s.name === source.name);\n if (idx >= 0) {\n config.sources[idx] = source;\n } else {\n config.sources.push(source);\n }\n}\n\n/**\n * Walk up the directory tree looking for .docs2ai.yaml.\n */\nfunction findConfigFile(startDir: string): string | null {\n let dir = startDir;\n while (true) {\n const candidate = join(dir, CONFIG_FILENAME);\n if (existsSync(candidate)) return candidate;\n const parent = dirname(dir);\n if (parent === dir) return null;\n dir = parent;\n }\n}\n\nfunction snakeToCamelSource(s: Record<string, any>): SourceConfig {\n return {\n name: s.name ?? \"\",\n url: s.url ?? \"\",\n crawl: s.crawl ?? false,\n maxDepth: s.max_depth ?? 2,\n output: s.output ?? \"\",\n };\n}\n\nfunction camelToSnakeSource(\n s: SourceConfig\n): Record<string, string | number | boolean> {\n return {\n name: s.name,\n url: s.url,\n crawl: s.crawl,\n max_depth: s.maxDepth,\n output: s.output,\n };\n}\n","import { defineCommand } from \"citty\";\nimport { join, dirname } from \"node:path\";\nimport { mkdirSync } from \"node:fs\";\nimport consola from \"consola\";\nimport { loadConfig } from \"../config/manager\";\nimport { fetchPage } from \"../pipeline/fetcher\";\nimport { extract } from \"../pipeline/extractor\";\nimport { transform } from \"../pipeline/transformer\";\nimport { write } from \"../pipeline/writer\";\nimport { crawl } from \"../crawl/crawler\";\n\nexport const updateCommand = defineCommand({\n meta: {\n name: \"update\",\n description: \"Refresh configured documentation sources\",\n },\n args: {\n name: {\n type: \"string\",\n description: \"Update only the named source\",\n },\n },\n async run({ args }) {\n const result = loadConfig();\n if (!result) {\n consola.error(\"No .docs2ai.yaml found. Run `docs2ai add <url>` first.\");\n process.exit(1);\n }\n\n const { config, configPath } = result;\n const configDir = dirname(configPath);\n const filterName = args.name as string | undefined;\n\n const sources = filterName\n ? config.sources.filter((s) => s.name === filterName)\n : config.sources;\n\n if (sources.length === 0) {\n if (filterName) {\n consola.error(`Source \"${filterName}\" not found in config.`);\n } else {\n consola.error(\"No sources configured.\");\n }\n process.exit(1);\n }\n\n for (const source of sources) {\n const outputPath = join(configDir, config.outputDir, source.output);\n mkdirSync(dirname(outputPath), { recursive: true });\n\n consola.start(`Updating \"${source.name}\" from ${source.url}...`);\n\n if (source.crawl) {\n const pages = await crawl(source.url, {\n maxDepth: source.maxDepth,\n onPageFetched: (url, current, total) => {\n consola.info(` [${current}/${total}] ${url}`);\n },\n });\n\n const sections: string[] = [];\n let firstTitle = \"\";\n let firstPlatform = \"\";\n\n for (const page of pages) {\n const { content, title, platform } = extract(page.html, page.url);\n if (!firstTitle) {\n firstTitle = title;\n firstPlatform = platform;\n }\n const md = transform(content);\n sections.push(`## ${title}\\n\\nSource: ${page.url}\\n\\n${md}`);\n }\n\n const markdown = sections.join(\"\\n\\n---\\n\\n\");\n write(markdown, outputPath, {\n sourceUrl: source.url,\n title: firstTitle,\n platform: firstPlatform,\n });\n } else {\n const html = await fetchPage(source.url);\n const { content, title, platform } = extract(html, source.url);\n const markdown = transform(content);\n write(markdown, outputPath, {\n sourceUrl: source.url,\n title,\n platform,\n });\n }\n\n consola.success(`Updated \"${source.name}\" → ${outputPath}`);\n }\n },\n});\n","import { defineCommand } from \"citty\";\nimport consola from \"consola\";\nimport { loadConfig } from \"../config/manager\";\n\nexport const listCommand = defineCommand({\n meta: {\n name: \"list\",\n description: \"List configured documentation sources\",\n },\n run() {\n const result = loadConfig();\n if (!result) {\n consola.info(\"No .docs2ai.yaml found. Run `docs2ai add <url>` to get started.\");\n return;\n }\n\n const { config, configPath } = result;\n consola.info(`Config: ${configPath}`);\n consola.info(`Output dir: ${config.outputDir}\\n`);\n\n if (config.sources.length === 0) {\n consola.info(\"No sources configured.\");\n return;\n }\n\n for (const source of config.sources) {\n const crawlInfo = source.crawl\n ? ` (crawl, depth: ${source.maxDepth})`\n : \"\";\n console.log(` ${source.name}${crawlInfo}`);\n console.log(` URL: ${source.url}`);\n console.log(` Output: ${source.output}`);\n console.log();\n }\n },\n});\n"],"mappings":";;;AAAA,SAAS,iBAAAA,gBAAe,eAAe;;;ACAvC,SAAS,qBAAqB;AAC9B,OAAO,aAAa;AACpB,YAAYC,cAAa;;;ACFzB,SAAS,cAAc;AAMvB,eAAsB,UAAU,KAA8B;AAC5D,QAAM,OAAO,MAAM,OAAO,KAAK,EAAE,cAAc,OAAO,CAAC;AACvD,SAAO;AACT;AAMA,eAAsB,iBAAiB,KAA8B;AACnE,MAAI;AACJ,MAAI;AACF,iBAAa,MAAM,OAAO,YAAY;AAAA,EACxC,QAAQ;AACN,UAAM,MAAM,IAAI;AAAA,MACd;AAAA,IACF;AACA,IAAC,IAAY,OAAO;AACpB,UAAM;AAAA,EACR;AAEA,QAAM,UAAU,MAAM,WAAW,SAAS,OAAO,EAAE,UAAU,KAAK,CAAC;AACnE,MAAI;AACF,UAAM,OAAO,MAAM,QAAQ,QAAQ;AACnC,UAAM,KAAK,KAAK,KAAK,EAAE,WAAW,cAAc,CAAC;AAEjD,UAAM,KAAK,eAAe,GAAI;AAC9B,UAAM,OAAO,MAAM,KAAK,QAAQ;AAChC,WAAO;AAAA,EACT,UAAE;AACA,UAAM,QAAQ,MAAM;AAAA,EACtB;AACF;;;ACtCA,YAAY,aAAa;AACzB,SAAS,mBAAmB;AAC5B,SAAS,iBAAiB;;;ACCnB,IAAM,WAA6B;AAAA,EACxC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,6CAA6C,EAAE,SAAS,EAAG,QAAO;AACxE,QAAI,EAAE,yBAAyB,EAAE,SAAS,EAAG,QAAO;AACpD,QAAI,EAAE,iBAAiB,EAAE,SAAS,EAAG,QAAO;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;AChCO,IAAM,aAA+B;AAAA,EAC1C,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,+CAA+C,EAAE,SAAS;AAC9D,aAAO;AACT,QAAI,EAAE,8BAA8B,EAAE,SAAS,EAAG,QAAO;AACzD,QAAI,EAAE,gCAAgC,EAAE,SAAS,EAAG,QAAO;AAC3D,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;AChCO,IAAM,SAA2B;AAAA,EACtC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,eAAe;AACnB,MAAE,SAAS,EAAE,KAAK,CAAC,GAAG,OAAO;AAC3B,YAAM,MAAM,EAAE,EAAE,EAAE,KAAK,OAAO,KAAK;AACnC,UAAI,QAAQ,KAAK,GAAG,EAAG;AAAA,IACzB,CAAC;AACD,QAAI,eAAe,EAAG,QAAO;AAC7B,QAAI,EAAE,aAAa,EAAE,SAAS,EAAG,QAAO;AACxC,QAAI,EAAE,cAAc,EAAE,SAAS,EAAG,QAAO;AACzC,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACnCO,IAAM,UAA4B;AAAA,EACvC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,4CAA4C,EAAE,SAAS,EAAG,QAAO;AACvE,QAAI;AACF,YAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,UAAI,OAAO,SAAS,SAAS,aAAa,EAAG,QAAO;AAAA,IACtD,QAAQ;AAAA,IAER;AACA,QAAI,EAAE,oCAAoC,EAAE,SAAS,EAAG,QAAO;AAC/D,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACnCO,IAAM,UAA4B;AAAA,EACvC,IAAI;AAAA,EAEJ,OAAO,MAAc,IAAyB;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACxBO,IAAM,qBAAyC;AAAA,EACpD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAGO,SAAS,YAAY,IAAkC;AAC5D,QAAM,WAAW,mBAAmB,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE;AAC3D,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,qBAAqB,EAAE,EAAE;AAAA,EAC3C;AACA,SAAO;AACT;;;ACfO,SAAS,QAAQ,KAAa,GAA2B;AAC9D,aAAW,YAAY,oBAAoB;AACzC,QAAI,SAAS,OAAO,KAAK,CAAC,GAAG;AAC3B,aAAO,SAAS;AAAA,IAClB;AAAA,EACF;AACA,SAAO;AACT;;;APEO,SAAS,QAAQ,MAAc,KAA4B;AAChE,QAAM,IAAY,aAAK,IAAI;AAC3B,QAAM,WAAW,QAAQ,KAAK,CAAC;AAC/B,QAAM,WAAW,YAAY,QAAQ;AAErC,QAAM,QAAQ,aAAa,CAAC;AAG5B,MAAI,aAAa,WAAW;AAC1B,eAAW,OAAO,SAAS,gBAAgB,GAAG;AAC5C,QAAE,GAAG,EAAE,OAAO;AAAA,IAChB;AAEA,UAAM,YAAY,EAAE,SAAS,gBAAgB,CAAC,EAAE,MAAM;AACtD,UAAM,kBAAkB,UAAU,KAAK;AAEvC,QAAI,mBAAmB,gBAAgB,KAAK,EAAE,UAAU,KAAK;AAC3D,aAAO,EAAE,SAAS,iBAAiB,OAAO,SAAS;AAAA,IACrD;AAAA,EAEF;AAGA,QAAM,EAAE,SAAS,IAAI,UAAU,IAAI;AACnC,QAAM,SAAS,IAAI,YAAY,QAAe;AAC9C,QAAM,UAAU,OAAO,MAAM;AAE7B,QAAM,UAAU,SAAS,WAAW,EAAE,MAAM,EAAE,KAAK,KAAK;AAExD,SAAO;AAAA,IACL;AAAA,IACA,OAAO,SAAS,SAAS,SAAS;AAAA,IAClC;AAAA,EACF;AACF;AAKA,SAAS,aAAa,GAA+B;AACnD,QAAM,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK;AACvC,MAAI,GAAI,QAAO;AAEf,QAAM,UAAU,EAAE,2BAA2B,EAAE,KAAK,SAAS,GAAG,KAAK;AACrE,MAAI,QAAS,QAAO;AAEpB,SAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;AAChC;;;AQhEA,OAAO,qBAAqB;AAC5B,SAAS,WAAW;AAKb,SAAS,UAAU,MAAsB;AAC9C,QAAM,KAAK,IAAI,gBAAgB;AAAA,IAC7B,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,kBAAkB;AAAA,EACpB,CAAC;AAED,KAAG,IAAI,GAAG;AAEV,iBAAe,EAAE;AACjB,uBAAqB,EAAE;AACvB,uBAAqB,EAAE;AACvB,uBAAqB,EAAE;AAEvB,SAAO,GAAG,SAAS,IAAI;AACzB;AAEA,SAAS,UAAU,MAAiD;AAClE,SAAO,KAAK,aAAa;AAC3B;AAEA,SAAS,QAAQ,MAA4B,MAAsB;AACjE,MAAI,UAAU,IAAI,GAAG;AACnB,WAAO,KAAK,aAAa,IAAI,KAAK;AAAA,EACpC;AACA,SAAO;AACT;AAEA,SAAS,WAAW,MAAoC;AACtD,MAAI,UAAU,IAAI,GAAG;AACnB,WAAO,KAAK,QAAQ,YAAY;AAAA,EAClC;AACA,SAAO;AACT;AAMA,SAAS,eAAe,IAA2B;AACjD,KAAG,QAAQ,YAAY;AAAA,IACrB,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,MAAM,WAAW,IAAI;AAC3B,UAAI,QAAQ,QAAS,QAAO;AAC5B,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UACE,8EAA8E;AAAA,QAC5E;AAAA,MACF;AAEA,eAAO;AACT,UAAI,QAAQ,MAAM,MAAM,MAAM,QAAS,QAAO;AAC9C,aAAO;AAAA,IACT;AAAA,IACA,YAAY,SAAS,MAAM;AACzB,YAAM,MAAM,QAAQ,MAAM,OAAO,EAAE,YAAY;AAC/C,UAAI,OAAO;AACX,UAAI,kBAAkB,KAAK,GAAG,EAAG,QAAO;AAAA,eAC/B,eAAe,KAAK,GAAG,EAAG,QAAO;AAAA,eACjC,cAAc,KAAK,GAAG,EAAG,QAAO;AAAA,eAChC,OAAO,KAAK,GAAG,EAAG,QAAO;AAElC,YAAM,QAAQ,QAAQ,KAAK,EAAE,MAAM,IAAI;AACvC,YAAM,SAAS,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI,EAAE,EAAE,KAAK,IAAI;AACzD,aAAO;AAAA,MAAS,IAAI;AAAA,EAAO,MAAM;AAAA;AAAA;AAAA,IACnC;AAAA,EACF,CAAC;AACH;AAMA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,kBAAkB;AAAA,IAC3B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UAAI,uCAAuC,KAAK,GAAG,EAAG,QAAO;AAC7D,UAAI,QAAQ,MAAM,MAAM,MAAM,WAAY,QAAO;AACjD,aAAO;AAAA,IACT;AAAA,IACA,YAAY,SAAS,MAAM;AACzB,YAAM,QACJ,QAAQ,MAAM,YAAY,KAC1B,QAAQ,MAAM,YAAY,KAC1B,QAAQ,MAAM,YAAY,KAC1B;AACF,UAAI,OAAO;AACT,eAAO;AAAA,IAAO,KAAK;AAAA;AAAA,EAAS,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA,MAC5C;AACA,aAAO;AAAA,EAAK,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA,IAC5B;AAAA,EACF,CAAC;AACH;AAKA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,mBAAmB;AAAA,IAC5B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,UAAI,WAAW,IAAI,MAAM,MAAO,QAAO;AACvC,YAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAI,CAAC,OAAQ,QAAO;AACpB,YAAM,OACJ,QAAQ,MAAM,eAAe,KAC7B,QAAQ,MAAM,WAAW,MACxB,OAAO,aAAa,eAAe,KAAK,QACxC,OAAO,aAAa,WAAW,KAAK;AACvC,aAAO,KAAK,SAAS;AAAA,IACvB;AAAA,IACA,YAAY,UAAU,MAAM;AAC1B,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,SAAS,KAAK,cAAc,MAAM;AACxC,YAAM,OACJ,QAAQ,MAAM,eAAe,KAC7B,QAAQ,MAAM,WAAW,MACxB,OAAO,aAAa,eAAe,KAAK,QACxC,OAAO,aAAa,WAAW,KAAK;AACvC,YAAM,OAAO,OAAO,eAAe;AACnC,aAAO;AAAA,QAAW,IAAI;AAAA,EAAK,IAAI;AAAA;AAAA;AAAA,IACjC;AAAA,EACF,CAAC;AACH;AAKA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,mBAAmB;AAAA,IAC5B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,QAAQ,QAAQ,MAAM,OAAO;AACnC,UAAI,CAAC,sBAAsB,KAAK,KAAK,EAAG,QAAO;AAE/C,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UAAI,4BAA4B,KAAK,GAAG,EAAG,QAAO;AAClD,UAAI,QAAQ,MAAM,MAAM,MAAM,WAAY,QAAO;AACjD,aAAO;AAAA,IACT;AAAA,IACA,cAAc;AACZ,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AACH;;;ACzJA,SAAS,eAAe,iBAAiB;AACzC,SAAS,eAAe;AACxB,OAAO,YAAY;AAWZ,SAAS,MACd,UACA,YACA,SACM;AACN,QAAM,UAAU,OAAO,UAAU,UAAU;AAAA,IACzC,QAAQ,QAAQ;AAAA,IAChB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,UAAU,QAAQ;AAAA,IAClB,OAAO,QAAQ;AAAA,IACf,iBAAiB;AAAA,EACnB,CAAC;AAED,MAAI,YAAY;AACd,cAAU,QAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAClD,kBAAc,YAAY,SAAS,OAAO;AAAA,EAC5C,OAAO;AACL,YAAQ,OAAO,MAAM,OAAO;AAAA,EAC9B;AACF;;;AChCA,YAAYC,cAAa;;;ACelB,SAAS,aAAa,KAAqB;AAChD,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,SAAO,OAAO;AACd,SAAO,SAAS;AAChB,SAAO,OAAO,KAAK,QAAQ,OAAO,EAAE;AACtC;;;ACZO,SAAS,eAAe,KAG7B;AACA,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,QAAM,YAAY,OAAO,SAAS,MAAM,GAAG;AAE3C,YAAU,IAAI;AACd,QAAM,aAAa,UAAU,KAAK,GAAG,IAAI;AACzC,SAAO,EAAE,QAAQ,OAAO,QAAQ,WAAW;AAC7C;AAKO,SAAS,WACd,cACA,QACA,YACS;AACT,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,YAAY;AACnC,WAAO,OAAO,WAAW,UAAU,OAAO,SAAS,WAAW,UAAU;AAAA,EAC1E,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AFfA,eAAsB,MACpB,UACA,SACwB;AACxB,QAAM,EAAE,QAAQ,WAAW,IAAI,eAAe,QAAQ;AACtD,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,UAAyB,CAAC;AAGhC,QAAM,QAA4B,CAAC,CAAC,UAAU,CAAC,CAAC;AAChD,UAAQ,IAAI,aAAa,QAAQ,CAAC;AAElC,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,CAAC,KAAK,KAAK,IAAI,MAAM,MAAM;AAEjC,QAAI;AACJ,QAAI;AACF,aAAO,MAAM,UAAU,GAAG;AAAA,IAC5B,QAAQ;AACN,cAAQ,gBAAgB,KAAK,QAAQ,QAAQ,QAAQ,SAAS,MAAM,MAAM;AAC1E;AAAA,IACF;AACA,YAAQ,KAAK,EAAE,KAAK,KAAK,CAAC;AAC1B,YAAQ,gBAAgB,KAAK,QAAQ,QAAQ,QAAQ,SAAS,MAAM,MAAM;AAE1E,QAAI,QAAQ,QAAQ,UAAU;AAC5B,YAAM,QAAQ;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,MACV;AACA,iBAAW,QAAQ,OAAO;AACxB,cAAM,aAAa,aAAa,IAAI;AACpC,YAAI,CAAC,QAAQ,IAAI,UAAU,GAAG;AAC5B,kBAAQ,IAAI,UAAU;AACtB,gBAAM,KAAK,CAAC,MAAM,QAAQ,CAAC,CAAC;AAAA,QAC9B;AAAA,MACF;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,MAAM,GAAG;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,cACP,MACA,SACA,QACA,YACA,iBACU;AACV,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAW,mBAAmB;AAEpC,IAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,OAAO;AAC1B,UAAM,OAAO,EAAE,EAAE,EAAE,KAAK,MAAM;AAC9B,QAAI,CAAC,KAAM;AAEX,QAAI;AACF,YAAM,WAAW,IAAI,IAAI,MAAM,OAAO,EAAE;AACxC,UAAI,WAAW,UAAU,QAAQ,UAAU,GAAG;AAC5C,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF,CAAC;AAED,SAAO,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC;AAC3B;AAEA,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAACC,aAAY,WAAWA,UAAS,EAAE,CAAC;AACzD;;;AZ7FO,IAAM,eAAe,cAAc;AAAA,EACxC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,MAAM,KAAK;AACjB,UAAM,SAAS,KAAK;AACpB,UAAM,cAAc,KAAK;AACzB,UAAM,WAAW,SAAS,KAAK,WAAW,GAAa,EAAE;AACzD,UAAM,SAAS,CAAC;AAEhB,QAAI,aAAa;AACf,UAAI,CAAC,OAAQ,SAAQ,MAAM,iBAAiB,GAAG,gBAAgB,QAAQ,MAAM;AAG7E,YAAM,YAAY,MAAM,UAAU,GAAG;AACrC,YAAM,IAAY,cAAK,SAAS;AAChC,YAAM,aAAa,QAAQ,KAAK,CAAC;AACjC,YAAM,WAAW,YAAY,UAAU;AACvC,YAAM,kBAAkB,SAAS,gBAAgB;AAEjD,YAAM,QAAQ,MAAM,MAAM,KAAK;AAAA,QAC7B;AAAA,QACA;AAAA,QACA,eAAe,CAAC,SAAS,SAAS,UAAU;AAC1C,cAAI,CAAC,OAAQ,SAAQ,KAAK,IAAI,OAAO,IAAI,KAAK,KAAK,OAAO,EAAE;AAAA,QAC9D;AAAA,MACF,CAAC;AAED,UAAI,CAAC,OAAQ,SAAQ,QAAQ,WAAW,MAAM,MAAM,QAAQ;AAE5D,YAAM,WAAqB,CAAC;AAC5B,UAAI,aAAa;AACjB,UAAI,gBAAgB;AAEpB,iBAAW,QAAQ,OAAO;AACxB,cAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,YAAI,CAAC,YAAY;AACf,uBAAa;AACb,0BAAgB;AAAA,QAClB;AACA,cAAM,KAAK,UAAU,OAAO;AAC5B,iBAAS,KAAK,MAAM,KAAK;AAAA;AAAA,UAAe,KAAK,GAAG;AAAA;AAAA,EAAO,EAAE,EAAE;AAAA,MAC7D;AAEA,YAAM,WAAW,SAAS,KAAK,aAAa;AAE5C,YAAM,UAAU,QAAQ;AAAA,QACtB,WAAW;AAAA,QACX,OAAO;AAAA,QACP,UAAU;AAAA,MACZ,CAAC;AAED,UAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,MAAM,EAAE;AAAA,IACrD,OAAO;AACL,UAAI,CAAC,OAAQ,SAAQ,MAAM,YAAY,GAAG,KAAK;AAC/C,UAAI,OAAO,MAAM,UAAU,GAAG;AAE9B,YAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,MAAM,GAAG;AAGtD,UAAI,QAAQ,KAAK,EAAE,SAAS,KAAK;AAC/B,YAAI,CAAC,OAAQ,SAAQ,KAAK,8CAA8C;AACxE,YAAI;AACF,iBAAO,MAAM,iBAAiB,GAAG;AACjC,gBAAM,SAAS,QAAQ,MAAM,GAAG;AAChC,gBAAMC,YAAW,UAAU,OAAO,OAAO;AACzC,gBAAMA,WAAU,QAAQ;AAAA,YACtB,WAAW;AAAA,YACX,OAAO,OAAO,SAAS;AAAA,YACvB,UAAU,OAAO;AAAA,UACnB,CAAC;AACD,cAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,MAAM,EAAE;AACnD;AAAA,QACF,SAAS,KAAU;AACjB,cAAI,KAAK,SAAS,gCAAgC;AAChD,oBAAQ;AAAA,cACN;AAAA,YAEF;AAAA,UACF,OAAO;AACL,oBAAQ,KAAK,gDAAgD;AAAA,UAC/D;AAAA,QACF;AAAA,MACF;AAEA,UAAI,CAAC,OAAQ,SAAQ,QAAQ,gCAAgC,QAAQ,GAAG;AACxE,YAAM,WAAW,UAAU,OAAO;AAElC,YAAM,UAAU,QAAQ;AAAA,QACtB,WAAW;AAAA,QACX;AAAA,QACA;AAAA,MACF,CAAC;AAED,UAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,MAAM,EAAE;AAAA,IACrD;AAAA,EACF;AACF,CAAC;;;AepID,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,QAAAC,aAAY;AACrB,OAAOC,cAAa;;;ACFpB,SAAS,cAAc,iBAAAC,gBAAe,kBAAkB;AACxD,SAAS,MAAM,WAAAC,gBAAe;AAC9B,OAAO,UAAU;AAGjB,IAAM,kBAAkB;AAMjB,SAAS,WAAW,UAGlB;AACP,QAAM,aAAa,eAAe,YAAY,QAAQ,IAAI,CAAC;AAC3D,MAAI,CAAC,WAAY,QAAO;AAExB,QAAM,MAAM,aAAa,YAAY,OAAO;AAC5C,QAAM,OAAO,KAAK,KAAK,GAAG;AAE1B,QAAM,SAAwB;AAAA,IAC5B,SAAS,KAAK,WAAW;AAAA,IACzB,WAAW,KAAK,cAAc;AAAA,IAC9B,UAAU,KAAK,WAAW,CAAC,GAAG,IAAI,kBAAkB;AAAA,EACtD;AAEA,SAAO,EAAE,QAAQ,WAAW;AAC9B;AAKO,SAAS,WAAW,QAAuB,YAA0B;AAC1E,QAAM,OAAO;AAAA,IACX,SAAS,OAAO;AAAA,IAChB,YAAY,OAAO;AAAA,IACnB,SAAS,OAAO,QAAQ,IAAI,kBAAkB;AAAA,EAChD;AAEA,QAAM,UAAU,KAAK,KAAK,MAAM,EAAE,WAAW,GAAG,CAAC;AACjD,EAAAD,eAAc,YAAY,SAAS,OAAO;AAC5C;AAKO,SAAS,UAAU,QAAuB,QAA4B;AAC3E,QAAM,MAAM,OAAO,QAAQ,UAAU,CAAC,MAAM,EAAE,SAAS,OAAO,IAAI;AAClE,MAAI,OAAO,GAAG;AACZ,WAAO,QAAQ,GAAG,IAAI;AAAA,EACxB,OAAO;AACL,WAAO,QAAQ,KAAK,MAAM;AAAA,EAC5B;AACF;AAKA,SAAS,eAAe,UAAiC;AACvD,MAAI,MAAM;AACV,SAAO,MAAM;AACX,UAAM,YAAY,KAAK,KAAK,eAAe;AAC3C,QAAI,WAAW,SAAS,EAAG,QAAO;AAClC,UAAM,SAASC,SAAQ,GAAG;AAC1B,QAAI,WAAW,IAAK,QAAO;AAC3B,UAAM;AAAA,EACR;AACF;AAEA,SAAS,mBAAmB,GAAsC;AAChE,SAAO;AAAA,IACL,MAAM,EAAE,QAAQ;AAAA,IAChB,KAAK,EAAE,OAAO;AAAA,IACd,OAAO,EAAE,SAAS;AAAA,IAClB,UAAU,EAAE,aAAa;AAAA,IACzB,QAAQ,EAAE,UAAU;AAAA,EACtB;AACF;AAEA,SAAS,mBACP,GAC2C;AAC3C,SAAO;AAAA,IACL,MAAM,EAAE;AAAA,IACR,KAAK,EAAE;AAAA,IACP,OAAO,EAAE;AAAA,IACT,WAAW,EAAE;AAAA,IACb,QAAQ,EAAE;AAAA,EACZ;AACF;;;ADpFO,IAAM,aAAaC,eAAc;AAAA,EACtC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,IAAI,EAAE,KAAK,GAAG;AACZ,UAAM,MAAM,KAAK;AACjB,UAAMC,SAAQ,KAAK;AACnB,UAAM,WAAW,SAAS,KAAK,WAAW,GAAa,EAAE;AAEzD,UAAM,OAAQ,KAAK,QAAmB,YAAY,GAAG;AACrD,UAAM,SAAU,KAAK,UAAqB,GAAG,IAAI;AAEjD,UAAM,WAAW,WAAW;AAC5B,QAAI;AACJ,QAAI;AAEJ,QAAI,UAAU;AACZ,eAAS,SAAS;AAClB,mBAAa,SAAS;AAAA,IACxB,OAAO;AACL,mBAAaC,MAAK,QAAQ,IAAI,GAAG,eAAe;AAChD,eAAS,EAAE,SAAS,GAAG,WAAW,YAAY,SAAS,CAAC,EAAE;AAAA,IAC5D;AAEA,cAAU,QAAQ,EAAE,MAAM,KAAK,OAAAD,QAAO,UAAU,OAAO,CAAC;AACxD,eAAW,QAAQ,UAAU;AAE7B,IAAAE,SAAQ,QAAQ,iBAAiB,IAAI,YAAO,GAAG,EAAE;AACjD,IAAAA,SAAQ,KAAK,WAAW,UAAU,EAAE;AAAA,EACtC;AACF,CAAC;AAED,SAAS,YAAY,KAAqB;AACxC,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,WAAO,OAAO,SAAS,QAAQ,OAAO,GAAG,EAAE,QAAQ,SAAS,EAAE;AAAA,EAChE,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AExEA,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,QAAAC,OAAM,WAAAC,gBAAe;AAC9B,SAAS,aAAAC,kBAAiB;AAC1B,OAAOC,cAAa;AAQb,IAAM,gBAAgBC,eAAc;AAAA,EACzC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,SAAS,WAAW;AAC1B,QAAI,CAAC,QAAQ;AACX,MAAAC,SAAQ,MAAM,wDAAwD;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,UAAM,YAAYC,SAAQ,UAAU;AACpC,UAAM,aAAa,KAAK;AAExB,UAAM,UAAU,aACZ,OAAO,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,IAClD,OAAO;AAEX,QAAI,QAAQ,WAAW,GAAG;AACxB,UAAI,YAAY;AACd,QAAAD,SAAQ,MAAM,WAAW,UAAU,wBAAwB;AAAA,MAC7D,OAAO;AACL,QAAAA,SAAQ,MAAM,wBAAwB;AAAA,MACxC;AACA,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,eAAW,UAAU,SAAS;AAC5B,YAAM,aAAaE,MAAK,WAAW,OAAO,WAAW,OAAO,MAAM;AAClE,MAAAC,WAAUF,SAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,MAAAD,SAAQ,MAAM,aAAa,OAAO,IAAI,UAAU,OAAO,GAAG,KAAK;AAE/D,UAAI,OAAO,OAAO;AAChB,cAAM,QAAQ,MAAM,MAAM,OAAO,KAAK;AAAA,UACpC,UAAU,OAAO;AAAA,UACjB,eAAe,CAAC,KAAK,SAAS,UAAU;AACtC,YAAAA,SAAQ,KAAK,MAAM,OAAO,IAAI,KAAK,KAAK,GAAG,EAAE;AAAA,UAC/C;AAAA,QACF,CAAC;AAED,cAAM,WAAqB,CAAC;AAC5B,YAAI,aAAa;AACjB,YAAI,gBAAgB;AAEpB,mBAAW,QAAQ,OAAO;AACxB,gBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,cAAI,CAAC,YAAY;AACf,yBAAa;AACb,4BAAgB;AAAA,UAClB;AACA,gBAAM,KAAK,UAAU,OAAO;AAC5B,mBAAS,KAAK,MAAM,KAAK;AAAA;AAAA,UAAe,KAAK,GAAG;AAAA;AAAA,EAAO,EAAE,EAAE;AAAA,QAC7D;AAEA,cAAM,WAAW,SAAS,KAAK,aAAa;AAC5C,cAAM,UAAU,YAAY;AAAA,UAC1B,WAAW,OAAO;AAAA,UAClB,OAAO;AAAA,UACP,UAAU;AAAA,QACZ,CAAC;AAAA,MACH,OAAO;AACL,cAAM,OAAO,MAAM,UAAU,OAAO,GAAG;AACvC,cAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,MAAM,OAAO,GAAG;AAC7D,cAAM,WAAW,UAAU,OAAO;AAClC,cAAM,UAAU,YAAY;AAAA,UAC1B,WAAW,OAAO;AAAA,UAClB;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAEA,MAAAA,SAAQ,QAAQ,YAAY,OAAO,IAAI,YAAO,UAAU,EAAE;AAAA,IAC5D;AAAA,EACF;AACF,CAAC;;;AC9FD,SAAS,iBAAAI,sBAAqB;AAC9B,OAAOC,cAAa;AAGb,IAAM,cAAcC,eAAc;AAAA,EACvC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AACJ,UAAM,SAAS,WAAW;AAC1B,QAAI,CAAC,QAAQ;AACX,MAAAC,SAAQ,KAAK,iEAAiE;AAC9E;AAAA,IACF;AAEA,UAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,IAAAA,SAAQ,KAAK,WAAW,UAAU,EAAE;AACpC,IAAAA,SAAQ,KAAK,eAAe,OAAO,SAAS;AAAA,CAAI;AAEhD,QAAI,OAAO,QAAQ,WAAW,GAAG;AAC/B,MAAAA,SAAQ,KAAK,wBAAwB;AACrC;AAAA,IACF;AAEA,eAAW,UAAU,OAAO,SAAS;AACnC,YAAM,YAAY,OAAO,QACrB,mBAAmB,OAAO,QAAQ,MAClC;AACJ,cAAQ,IAAI,KAAK,OAAO,IAAI,GAAG,SAAS,EAAE;AAC1C,cAAQ,IAAI,eAAe,OAAO,GAAG,EAAE;AACvC,cAAQ,IAAI,eAAe,OAAO,MAAM,EAAE;AAC1C,cAAQ,IAAI;AAAA,IACd;AAAA,EACF;AACF,CAAC;;;AnB7BD,IAAM,OAAOC,eAAc;AAAA,EACzB,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,aAAa;AAAA,IACX,KAAK;AAAA,IACL,QAAQ;AAAA,IACR,MAAM;AAAA,EACR;AAAA,EACA,IAAI,EAAE,KAAK,GAAG;AACZ,QAAI,CAAC,KAAK,KAAK;AACb,cAAQ,IAAI,+CAA+C;AAC3D,cAAQ,IAAI,kDAAkD;AAC9D,cAAQ,IAAI,qCAAqC;AACjD,cAAQ,IAAI,qBAAqB;AACjC,cAAQ,IAAI,wCAAwC;AACpD;AAAA,IACF;AACA,WAAQ,aAAqB,IAAI,EAAE,KAAK,CAAC;AAAA,EAC3C;AACF,CAAC;AAED,QAAQ,IAAI;","names":["defineCommand","cheerio","cheerio","resolve","markdown","defineCommand","join","consola","writeFileSync","dirname","defineCommand","crawl","join","consola","defineCommand","join","dirname","mkdirSync","consola","defineCommand","consola","dirname","join","mkdirSync","defineCommand","consola","defineCommand","consola","defineCommand"]}
package/package.json ADDED
@@ -0,0 +1,72 @@
1
+ {
2
+ "name": "docs2ai",
3
+ "version": "0.1.0",
4
+ "description": "Convert online documentation into AI-ready Markdown context files",
5
+ "type": "module",
6
+ "bin": {
7
+ "docs2ai": "dist/cli.mjs"
8
+ },
9
+ "scripts": {
10
+ "dev": "tsx src/cli.ts",
11
+ "build": "tsup",
12
+ "test": "vitest run",
13
+ "test:watch": "vitest",
14
+ "lint": "eslint src/",
15
+ "format": "prettier --write src/"
16
+ },
17
+ "keywords": [
18
+ "documentation",
19
+ "markdown",
20
+ "ai",
21
+ "context",
22
+ "cli",
23
+ "llm",
24
+ "vibe-coding",
25
+ "cursor",
26
+ "copilot",
27
+ "claude",
28
+ "docs",
29
+ "scraper",
30
+ "turndown",
31
+ "readability"
32
+ ],
33
+ "license": "MIT",
34
+ "repository": {
35
+ "type": "git",
36
+ "url": "git+https://github.com/CharlesGrangerTheveniau/ctxify.git"
37
+ },
38
+ "homepage": "https://github.com/CharlesGrangerTheveniau/ctxify",
39
+ "bugs": {
40
+ "url": "https://github.com/CharlesGrangerTheveniau/ctxify/issues"
41
+ },
42
+ "files": [
43
+ "dist",
44
+ "README.md"
45
+ ],
46
+ "engines": {
47
+ "node": ">=18"
48
+ },
49
+ "dependencies": {
50
+ "@mozilla/readability": "^0.6.0",
51
+ "cheerio": "^1.2.0",
52
+ "citty": "^0.2.0",
53
+ "consola": "^3.4.2",
54
+ "gray-matter": "^4.0.3",
55
+ "js-yaml": "^4.1.1",
56
+ "linkedom": "^0.18.12",
57
+ "ofetch": "^1.5.1",
58
+ "turndown": "^7.2.2",
59
+ "turndown-plugin-gfm": "^1.0.2"
60
+ },
61
+ "devDependencies": {
62
+ "@types/js-yaml": "^4.0.9",
63
+ "@types/node": "^25.2.2",
64
+ "@types/turndown": "^5.0.6",
65
+ "eslint": "^10.0.0",
66
+ "prettier": "^3.8.1",
67
+ "tsup": "^8.5.1",
68
+ "tsx": "^4.21.0",
69
+ "typescript": "^5.9.3",
70
+ "vitest": "^4.0.18"
71
+ }
72
+ }