docs2ai 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.mjs CHANGED
@@ -1,12 +1,13 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  // src/cli.ts
4
- import { defineCommand as defineCommand5, runMain } from "citty";
4
+ import { defineCommand as defineCommand5, runMain, runCommand } from "citty";
5
5
 
6
6
  // src/commands/fetch.ts
7
7
  import { defineCommand } from "citty";
8
+ import { dirname as dirname3 } from "path";
8
9
  import consola from "consola";
9
- import * as cheerio3 from "cheerio";
10
+ import * as cheerio4 from "cheerio";
10
11
 
11
12
  // src/pipeline/fetcher.ts
12
13
  import { ofetch } from "ofetch";
@@ -38,11 +39,12 @@ async function fetchWithBrowser(url) {
38
39
  }
39
40
 
40
41
  // src/pipeline/extractor.ts
41
- import * as cheerio from "cheerio";
42
+ import * as cheerio2 from "cheerio";
42
43
  import { Readability } from "@mozilla/readability";
43
44
  import { parseHTML } from "linkedom";
44
45
 
45
46
  // src/platforms/mintlify.ts
47
+ import * as cheerio from "cheerio";
46
48
  var mintlify = {
47
49
  id: "mintlify",
48
50
  detect(url, $) {
@@ -69,7 +71,28 @@ var mintlify = {
69
71
  ];
70
72
  },
71
73
  navLinkSelector() {
72
- return "nav a[href], .sidebar a[href]";
74
+ return "nav a[href], .sidebar a[href], [class*='sidebar'] a[href]";
75
+ },
76
+ discoverUrls(html, baseUrl) {
77
+ const $ = cheerio.load(html);
78
+ const paths = /* @__PURE__ */ new Set();
79
+ $("script").each((_, el) => {
80
+ const text = $(el).html() || "";
81
+ const escaped = /\\?"href\\?"\s*:\s*\\?"(\/[a-z0-9][a-z0-9\/-]*)\\?"/g;
82
+ let match = escaped.exec(text);
83
+ while (match !== null) {
84
+ paths.add(match[1]);
85
+ match = escaped.exec(text);
86
+ }
87
+ });
88
+ const origin = new URL(baseUrl).origin;
89
+ const basePath = new URL(baseUrl).pathname.split("/").slice(0, 2).join("/");
90
+ return [...paths].map((p) => {
91
+ if (p.startsWith(basePath)) {
92
+ return origin + p;
93
+ }
94
+ return origin + basePath + p;
95
+ });
73
96
  }
74
97
  };
75
98
 
@@ -167,7 +190,7 @@ var gitbook = {
167
190
  ];
168
191
  },
169
192
  navLinkSelector() {
170
- return "nav a[href]";
193
+ return "nav a[href], aside a[href]";
171
194
  }
172
195
  };
173
196
 
@@ -227,7 +250,7 @@ function resolve(url, $) {
227
250
 
228
251
  // src/pipeline/extractor.ts
229
252
  function extract(html, url) {
230
- const $ = cheerio.load(html);
253
+ const $ = cheerio2.load(html);
231
254
  const platform = resolve(url, $);
232
255
  const strategy = getStrategy(platform);
233
256
  const title = extractTitle($);
@@ -389,8 +412,23 @@ function addHiddenElementRule(td) {
389
412
 
390
413
  // src/pipeline/writer.ts
391
414
  import { writeFileSync, mkdirSync } from "fs";
392
- import { dirname } from "path";
415
+ import { dirname, join } from "path";
393
416
  import matter from "gray-matter";
417
+
418
+ // src/utils/slug.ts
419
+ function filePathForPage(pageUrl, basePrefix) {
420
+ const parsed = new URL(pageUrl);
421
+ let pathname = parsed.pathname.replace(/\/+$/, "");
422
+ const normalizedPrefix = basePrefix.replace(/\/+$/, "");
423
+ if (pathname.startsWith(normalizedPrefix)) {
424
+ pathname = pathname.slice(normalizedPrefix.length);
425
+ }
426
+ pathname = pathname.replace(/^\/+/, "");
427
+ if (!pathname) return "index.md";
428
+ return pathname + ".md";
429
+ }
430
+
431
+ // src/pipeline/writer.ts
394
432
  function write(markdown, outputPath, options) {
395
433
  const content = matter.stringify(markdown, {
396
434
  source: options.sourceUrl,
@@ -406,9 +444,42 @@ function write(markdown, outputPath, options) {
406
444
  process.stdout.write(content);
407
445
  }
408
446
  }
447
+ function writePage(markdown, filePath, options) {
448
+ const content = matter.stringify(markdown, {
449
+ source: options.sourceUrl,
450
+ fetched_at: (/* @__PURE__ */ new Date()).toISOString(),
451
+ platform: options.platform,
452
+ title: options.title,
453
+ docs2ai_version: "0.1.0"
454
+ });
455
+ mkdirSync(dirname(filePath), { recursive: true });
456
+ writeFileSync(filePath, content, "utf-8");
457
+ }
458
+ function writePages(pages, outputDir, basePrefix) {
459
+ const usedPaths = /* @__PURE__ */ new Set();
460
+ const entries = [];
461
+ for (const page of pages) {
462
+ let relPath = filePathForPage(page.url, basePrefix);
463
+ if (usedPaths.has(relPath)) {
464
+ const base = relPath.replace(/\.md$/, "");
465
+ let i = 2;
466
+ while (usedPaths.has(`${base}-${i}.md`)) i++;
467
+ relPath = `${base}-${i}.md`;
468
+ }
469
+ usedPaths.add(relPath);
470
+ const filePath = join(outputDir, relPath);
471
+ writePage(page.markdown, filePath, {
472
+ sourceUrl: page.url,
473
+ title: page.title,
474
+ platform: page.platform
475
+ });
476
+ entries.push({ title: page.title, path: relPath });
477
+ }
478
+ return entries;
479
+ }
409
480
 
410
481
  // src/crawl/crawler.ts
411
- import * as cheerio2 from "cheerio";
482
+ import * as cheerio3 from "cheerio";
412
483
 
413
484
  // src/utils/url.ts
414
485
  function normalizeUrl(url) {
@@ -417,6 +488,14 @@ function normalizeUrl(url) {
417
488
  parsed.search = "";
418
489
  return parsed.href.replace(/\/$/, "");
419
490
  }
491
+ function slugFromUrl(url) {
492
+ try {
493
+ const parsed = new URL(url);
494
+ return parsed.hostname.replace(/\./g, "-").replace(/^www-/, "");
495
+ } catch {
496
+ return "source";
497
+ }
498
+ }
420
499
 
421
500
  // src/crawl/boundary.ts
422
501
  function getCrawlPrefix(url) {
@@ -426,6 +505,19 @@ function getCrawlPrefix(url) {
426
505
  const pathPrefix = pathParts.join("/") + "/";
427
506
  return { origin: parsed.origin, pathPrefix };
428
507
  }
508
+ function computeCommonPrefix(startUrl, navUrls) {
509
+ const startParts = new URL(startUrl).pathname.split("/").filter(Boolean);
510
+ const parts = [...startParts];
511
+ for (const url of navUrls) {
512
+ const urlParts = new URL(url).pathname.split("/").filter(Boolean);
513
+ let i = 0;
514
+ while (i < parts.length && i < urlParts.length && parts[i] === urlParts[i]) {
515
+ i++;
516
+ }
517
+ parts.length = i;
518
+ }
519
+ return "/" + (parts.length > 0 ? parts.join("/") + "/" : "");
520
+ }
429
521
  function isInBounds(candidateUrl, origin, pathPrefix) {
430
522
  try {
431
523
  const parsed = new URL(candidateUrl);
@@ -437,9 +529,11 @@ function isInBounds(candidateUrl, origin, pathPrefix) {
437
529
 
438
530
  // src/crawl/crawler.ts
439
531
  async function crawl(startUrl, options) {
440
- const { origin, pathPrefix } = getCrawlPrefix(startUrl);
532
+ const { origin } = getCrawlPrefix(startUrl);
533
+ let { pathPrefix } = getCrawlPrefix(startUrl);
441
534
  const visited = /* @__PURE__ */ new Set();
442
535
  const results = [];
536
+ let isFirstPage = true;
443
537
  const queue = [[startUrl, 0]];
444
538
  visited.add(normalizeUrl(startUrl));
445
539
  while (queue.length > 0) {
@@ -454,13 +548,14 @@ async function crawl(startUrl, options) {
454
548
  results.push({ url, html });
455
549
  options.onPageFetched?.(url, results.length, results.length + queue.length);
456
550
  if (depth < options.maxDepth) {
457
- const links = discoverLinks(
458
- html,
459
- url,
460
- origin,
461
- pathPrefix,
462
- options.navLinkSelector
463
- );
551
+ if (isFirstPage) {
552
+ const allNavUrls = options.discoverUrls ? discoverSameOriginCustom(html, url, origin, options.discoverUrls) : discoverSameOrigin(html, url, origin, options.navLinkSelector);
553
+ if (allNavUrls.length > 0) {
554
+ pathPrefix = computeCommonPrefix(startUrl, allNavUrls);
555
+ }
556
+ isFirstPage = false;
557
+ }
558
+ const links = options.discoverUrls ? discoverLinksCustom(html, url, origin, pathPrefix, options.discoverUrls) : discoverLinks(html, url, origin, pathPrefix, options.navLinkSelector);
464
559
  for (const link of links) {
465
560
  const normalized = normalizeUrl(link);
466
561
  if (!visited.has(normalized)) {
@@ -473,10 +568,10 @@ async function crawl(startUrl, options) {
473
568
  await delay(200);
474
569
  }
475
570
  }
476
- return results;
571
+ return { pages: results, effectivePrefix: pathPrefix };
477
572
  }
478
573
  function discoverLinks(html, baseUrl, origin, pathPrefix, navLinkSelector) {
479
- const $ = cheerio2.load(html);
574
+ const $ = cheerio3.load(html);
480
575
  const links = [];
481
576
  const selector = navLinkSelector || "a[href]";
482
577
  $(selector).each((_, el) => {
@@ -492,11 +587,103 @@ function discoverLinks(html, baseUrl, origin, pathPrefix, navLinkSelector) {
492
587
  });
493
588
  return [...new Set(links)];
494
589
  }
590
+ function discoverSameOrigin(html, baseUrl, origin, navLinkSelector) {
591
+ const $ = cheerio3.load(html);
592
+ const links = [];
593
+ const selector = navLinkSelector || "a[href]";
594
+ $(selector).each((_, el) => {
595
+ const href = $(el).attr("href");
596
+ if (!href) return;
597
+ try {
598
+ const resolved = new URL(href, baseUrl).href;
599
+ if (new URL(resolved).origin === origin) {
600
+ links.push(resolved);
601
+ }
602
+ } catch {
603
+ }
604
+ });
605
+ return [...new Set(links)];
606
+ }
607
+ function discoverSameOriginCustom(html, baseUrl, origin, discoverUrls) {
608
+ const urls = discoverUrls(html, baseUrl);
609
+ return [
610
+ ...new Set(
611
+ urls.filter((u) => {
612
+ try {
613
+ return new URL(u).origin === origin;
614
+ } catch {
615
+ return false;
616
+ }
617
+ })
618
+ )
619
+ ];
620
+ }
621
+ function discoverLinksCustom(html, baseUrl, origin, pathPrefix, discoverUrls) {
622
+ const urls = discoverUrls(html, baseUrl);
623
+ return [...new Set(urls.filter((u) => isInBounds(u, origin, pathPrefix)))];
624
+ }
495
625
  function delay(ms) {
496
626
  return new Promise((resolve2) => setTimeout(resolve2, ms));
497
627
  }
498
628
 
629
+ // src/pipeline/manifest.ts
630
+ import { readFileSync, writeFileSync as writeFileSync2, mkdirSync as mkdirSync2 } from "fs";
631
+ import { join as join2 } from "path";
632
+ function buildSourceManifest(name, url, platform, pages) {
633
+ return {
634
+ name,
635
+ url,
636
+ platform,
637
+ fetched_at: (/* @__PURE__ */ new Date()).toISOString(),
638
+ pages
639
+ };
640
+ }
641
+ function writeSourceManifest(manifest, outputDir) {
642
+ mkdirSync2(outputDir, { recursive: true });
643
+ writeFileSync2(
644
+ join2(outputDir, "_index.json"),
645
+ JSON.stringify(manifest, null, 2) + "\n",
646
+ "utf-8"
647
+ );
648
+ }
649
+ function loadRootManifest(rootDir) {
650
+ try {
651
+ const raw = readFileSync(join2(rootDir, "manifest.json"), "utf-8");
652
+ return JSON.parse(raw);
653
+ } catch {
654
+ return { sources: [] };
655
+ }
656
+ }
657
+ function updateRootManifest(rootDir, entry) {
658
+ const manifest = loadRootManifest(rootDir);
659
+ const idx = manifest.sources.findIndex((s) => s.name === entry.name);
660
+ if (idx >= 0) {
661
+ manifest.sources[idx] = entry;
662
+ } else {
663
+ manifest.sources.push(entry);
664
+ }
665
+ mkdirSync2(rootDir, { recursive: true });
666
+ writeFileSync2(
667
+ join2(rootDir, "manifest.json"),
668
+ JSON.stringify(manifest, null, 2) + "\n",
669
+ "utf-8"
670
+ );
671
+ }
672
+
499
673
  // src/commands/fetch.ts
674
+ function resolveOutputMode(output, shouldCrawl, name) {
675
+ if (!shouldCrawl) {
676
+ return { mode: "single-file", outputPath: output, outputDir: "" };
677
+ }
678
+ if (output && output.endsWith(".md")) {
679
+ return { mode: "single-file", outputPath: output, outputDir: "" };
680
+ }
681
+ if (output) {
682
+ const dir = output.endsWith("/") ? output : output + "/";
683
+ return { mode: "directory", outputPath: void 0, outputDir: dir };
684
+ }
685
+ return { mode: "directory", outputPath: void 0, outputDir: `.ai/docs/${name}/` };
686
+ }
500
687
  var fetchCommand = defineCommand({
501
688
  meta: {
502
689
  name: "fetch",
@@ -511,7 +698,11 @@ var fetchCommand = defineCommand({
511
698
  output: {
512
699
  type: "string",
513
700
  alias: "o",
514
- description: "Output file path"
701
+ description: "Output file path or directory"
702
+ },
703
+ name: {
704
+ type: "string",
705
+ description: "Name for this source (auto-derived from hostname if omitted)"
515
706
  },
516
707
  crawl: {
517
708
  type: "boolean",
@@ -529,45 +720,68 @@ var fetchCommand = defineCommand({
529
720
  const output = args.output;
530
721
  const shouldCrawl = args.crawl;
531
722
  const maxDepth = parseInt(args["max-depth"], 10);
532
- const silent = !output;
723
+ const name = args.name || slugFromUrl(url);
724
+ const { mode, outputPath, outputDir } = resolveOutputMode(output, shouldCrawl, name);
725
+ const silent = mode === "single-file" && !outputPath;
533
726
  if (shouldCrawl) {
534
727
  if (!silent) consola.start(`Crawling from ${url} (max depth: ${maxDepth})...`);
535
728
  const firstHtml = await fetchPage(url);
536
- const $ = cheerio3.load(firstHtml);
729
+ const $ = cheerio4.load(firstHtml);
537
730
  const platformId = resolve(url, $);
538
731
  const strategy = getStrategy(platformId);
539
732
  const navLinkSelector = strategy.navLinkSelector();
540
- const pages = await crawl(url, {
733
+ const crawlResult = await crawl(url, {
541
734
  maxDepth,
542
735
  navLinkSelector,
736
+ discoverUrls: strategy.discoverUrls?.bind(strategy),
543
737
  onPageFetched: (pageUrl, current, total) => {
544
738
  if (!silent) consola.info(`[${current}/${total}] ${pageUrl}`);
545
739
  }
546
740
  });
741
+ const { pages, effectivePrefix } = crawlResult;
547
742
  if (!silent) consola.success(`Crawled ${pages.length} pages`);
548
- const sections = [];
549
- let firstTitle = "";
550
- let firstPlatform = "";
551
- for (const page of pages) {
552
- const { content, title, platform } = extract(page.html, page.url);
553
- if (!firstTitle) {
554
- firstTitle = title;
555
- firstPlatform = platform;
556
- }
557
- const md = transform(content);
558
- sections.push(`## ${title}
743
+ if (mode === "directory") {
744
+ const pageEntries = pages.map((page) => {
745
+ const { content, title, platform } = extract(page.html, page.url);
746
+ const md = transform(content);
747
+ return { url: page.url, title, platform, markdown: md };
748
+ });
749
+ const firstPlatform = pageEntries[0]?.platform || "generic";
750
+ const manifestPages = writePages(pageEntries, outputDir, effectivePrefix);
751
+ const sourceManifest = buildSourceManifest(name, url, firstPlatform, manifestPages);
752
+ writeSourceManifest(sourceManifest, outputDir);
753
+ const rootDir = dirname3(outputDir.replace(/\/$/, ""));
754
+ updateRootManifest(rootDir, {
755
+ name,
756
+ path: name + "/",
757
+ fetched_at: sourceManifest.fetched_at
758
+ });
759
+ consola.success(`Written ${pages.length} pages to ${outputDir}`);
760
+ } else {
761
+ const sections = [];
762
+ let firstTitle = "";
763
+ let firstPlatform = "";
764
+ for (const page of pages) {
765
+ const { content, title, platform } = extract(page.html, page.url);
766
+ if (!firstTitle) {
767
+ firstTitle = title;
768
+ firstPlatform = platform;
769
+ }
770
+ const md = transform(content);
771
+ sections.push(`## ${title}
559
772
 
560
773
  Source: ${page.url}
561
774
 
562
775
  ${md}`);
776
+ }
777
+ const markdown = sections.join("\n\n---\n\n");
778
+ write(markdown, outputPath, {
779
+ sourceUrl: url,
780
+ title: firstTitle,
781
+ platform: firstPlatform
782
+ });
783
+ if (!silent) consola.success(`Written to ${outputPath}`);
563
784
  }
564
- const markdown = sections.join("\n\n---\n\n");
565
- write(markdown, output, {
566
- sourceUrl: url,
567
- title: firstTitle,
568
- platform: firstPlatform
569
- });
570
- if (!silent) consola.success(`Written to ${output}`);
571
785
  } else {
572
786
  if (!silent) consola.start(`Fetching ${url}...`);
573
787
  let html = await fetchPage(url);
@@ -578,12 +792,12 @@ ${md}`);
578
792
  html = await fetchWithBrowser(url);
579
793
  const result = extract(html, url);
580
794
  const markdown2 = transform(result.content);
581
- write(markdown2, output, {
795
+ write(markdown2, outputPath, {
582
796
  sourceUrl: url,
583
797
  title: result.title || title,
584
798
  platform: result.platform
585
799
  });
586
- if (!silent) consola.success(`Written to ${output}`);
800
+ if (!silent) consola.success(`Written to ${outputPath}`);
587
801
  return;
588
802
  } catch (err) {
589
803
  if (err?.code === "ERR_PLAYWRIGHT_NOT_INSTALLED") {
@@ -597,30 +811,30 @@ ${md}`);
597
811
  }
598
812
  if (!silent) consola.success(`Extracted content (platform: ${platform})`);
599
813
  const markdown = transform(content);
600
- write(markdown, output, {
814
+ write(markdown, outputPath, {
601
815
  sourceUrl: url,
602
816
  title,
603
817
  platform
604
818
  });
605
- if (!silent) consola.success(`Written to ${output}`);
819
+ if (!silent) consola.success(`Written to ${outputPath}`);
606
820
  }
607
821
  }
608
822
  });
609
823
 
610
824
  // src/commands/add.ts
611
825
  import { defineCommand as defineCommand2 } from "citty";
612
- import { join as join2 } from "path";
826
+ import { join as join4 } from "path";
613
827
  import consola2 from "consola";
614
828
 
615
829
  // src/config/manager.ts
616
- import { readFileSync, writeFileSync as writeFileSync2, existsSync } from "fs";
617
- import { join, dirname as dirname2 } from "path";
830
+ import { readFileSync as readFileSync2, writeFileSync as writeFileSync3, existsSync } from "fs";
831
+ import { join as join3, dirname as dirname4 } from "path";
618
832
  import yaml from "js-yaml";
619
833
  var CONFIG_FILENAME = ".docs2ai.yaml";
620
834
  function loadConfig(startDir) {
621
835
  const configPath = findConfigFile(startDir || process.cwd());
622
836
  if (!configPath) return null;
623
- const raw = readFileSync(configPath, "utf-8");
837
+ const raw = readFileSync2(configPath, "utf-8");
624
838
  const data = yaml.load(raw);
625
839
  const config = {
626
840
  version: data.version ?? 1,
@@ -636,7 +850,7 @@ function saveConfig(config, configPath) {
636
850
  sources: config.sources.map(camelToSnakeSource)
637
851
  };
638
852
  const content = yaml.dump(data, { lineWidth: -1 });
639
- writeFileSync2(configPath, content, "utf-8");
853
+ writeFileSync3(configPath, content, "utf-8");
640
854
  }
641
855
  function addSource(config, source) {
642
856
  const idx = config.sources.findIndex((s) => s.name === source.name);
@@ -649,9 +863,9 @@ function addSource(config, source) {
649
863
  function findConfigFile(startDir) {
650
864
  let dir = startDir;
651
865
  while (true) {
652
- const candidate = join(dir, CONFIG_FILENAME);
866
+ const candidate = join3(dir, CONFIG_FILENAME);
653
867
  if (existsSync(candidate)) return candidate;
654
- const parent = dirname2(dir);
868
+ const parent = dirname4(dir);
655
869
  if (parent === dir) return null;
656
870
  dir = parent;
657
871
  }
@@ -704,15 +918,15 @@ var addCommand = defineCommand2({
704
918
  output: {
705
919
  type: "string",
706
920
  alias: "o",
707
- description: "Output filename"
921
+ description: "Output filename or directory"
708
922
  }
709
923
  },
710
924
  run({ args }) {
711
925
  const url = args.url;
712
- const crawl2 = args.crawl;
926
+ const shouldCrawl = args.crawl;
713
927
  const maxDepth = parseInt(args["max-depth"], 10);
714
928
  const name = args.name || slugFromUrl(url);
715
- const output = args.output || `${name}.md`;
929
+ const output = args.output || (shouldCrawl ? `${name}/` : `${name}.md`);
716
930
  const existing = loadConfig();
717
931
  let config;
718
932
  let configPath;
@@ -720,28 +934,21 @@ var addCommand = defineCommand2({
720
934
  config = existing.config;
721
935
  configPath = existing.configPath;
722
936
  } else {
723
- configPath = join2(process.cwd(), ".docs2ai.yaml");
937
+ configPath = join4(process.cwd(), ".docs2ai.yaml");
724
938
  config = { version: 1, outputDir: ".ai/docs", sources: [] };
725
939
  }
726
- addSource(config, { name, url, crawl: crawl2, maxDepth, output });
940
+ addSource(config, { name, url, crawl: shouldCrawl, maxDepth, output });
727
941
  saveConfig(config, configPath);
728
942
  consola2.success(`Added source "${name}" \u2192 ${url}`);
729
943
  consola2.info(`Config: ${configPath}`);
730
944
  }
731
945
  });
732
- function slugFromUrl(url) {
733
- try {
734
- const parsed = new URL(url);
735
- return parsed.hostname.replace(/\./g, "-").replace(/^www-/, "");
736
- } catch {
737
- return "source";
738
- }
739
- }
740
946
 
741
947
  // src/commands/update.ts
742
948
  import { defineCommand as defineCommand3 } from "citty";
743
- import { join as join3, dirname as dirname3 } from "path";
744
- import { mkdirSync as mkdirSync2 } from "fs";
949
+ import { join as join5, dirname as dirname5 } from "path";
950
+ import { mkdirSync as mkdirSync3 } from "fs";
951
+ import * as cheerio5 from "cheerio";
745
952
  import consola3 from "consola";
746
953
  var updateCommand = defineCommand3({
747
954
  meta: {
@@ -761,7 +968,7 @@ var updateCommand = defineCommand3({
761
968
  process.exit(1);
762
969
  }
763
970
  const { config, configPath } = result;
764
- const configDir = dirname3(configPath);
971
+ const configDir = dirname5(configPath);
765
972
  const filterName = args.name;
766
973
  const sources = filterName ? config.sources.filter((s) => s.name === filterName) : config.sources;
767
974
  if (sources.length === 0) {
@@ -773,39 +980,75 @@ var updateCommand = defineCommand3({
773
980
  process.exit(1);
774
981
  }
775
982
  for (const source of sources) {
776
- const outputPath = join3(configDir, config.outputDir, source.output);
777
- mkdirSync2(dirname3(outputPath), { recursive: true });
983
+ const isDirectoryOutput = !source.output.endsWith(".md");
778
984
  consola3.start(`Updating "${source.name}" from ${source.url}...`);
779
985
  if (source.crawl) {
780
- const pages = await crawl(source.url, {
986
+ const firstHtml = await fetchPage(source.url);
987
+ const $ = cheerio5.load(firstHtml);
988
+ const platformId = resolve(source.url, $);
989
+ const strategy = getStrategy(platformId);
990
+ const crawlResult = await crawl(source.url, {
781
991
  maxDepth: source.maxDepth,
992
+ navLinkSelector: strategy.navLinkSelector(),
993
+ discoverUrls: strategy.discoverUrls?.bind(strategy),
782
994
  onPageFetched: (url, current, total) => {
783
995
  consola3.info(` [${current}/${total}] ${url}`);
784
996
  }
785
997
  });
786
- const sections = [];
787
- let firstTitle = "";
788
- let firstPlatform = "";
789
- for (const page of pages) {
790
- const { content, title, platform } = extract(page.html, page.url);
791
- if (!firstTitle) {
792
- firstTitle = title;
793
- firstPlatform = platform;
794
- }
795
- const md = transform(content);
796
- sections.push(`## ${title}
998
+ const { pages, effectivePrefix } = crawlResult;
999
+ if (isDirectoryOutput) {
1000
+ const outputDir = join5(configDir, config.outputDir, source.output);
1001
+ const pageEntries = pages.map((page) => {
1002
+ const { content, title, platform } = extract(page.html, page.url);
1003
+ const md = transform(content);
1004
+ return { url: page.url, title, platform, markdown: md };
1005
+ });
1006
+ const firstPlatform = pageEntries[0]?.platform || "generic";
1007
+ const manifestPages = writePages(pageEntries, outputDir, effectivePrefix);
1008
+ const sourceManifest = buildSourceManifest(
1009
+ source.name,
1010
+ source.url,
1011
+ firstPlatform,
1012
+ manifestPages
1013
+ );
1014
+ writeSourceManifest(sourceManifest, outputDir);
1015
+ const rootDir = join5(configDir, config.outputDir);
1016
+ updateRootManifest(rootDir, {
1017
+ name: source.name,
1018
+ path: source.output,
1019
+ fetched_at: sourceManifest.fetched_at
1020
+ });
1021
+ consola3.success(`Updated "${source.name}" \u2192 ${outputDir} (${pages.length} pages)`);
1022
+ } else {
1023
+ const outputPath = join5(configDir, config.outputDir, source.output);
1024
+ mkdirSync3(dirname5(outputPath), { recursive: true });
1025
+ const sections = [];
1026
+ let firstTitle = "";
1027
+ let firstPlatform = "";
1028
+ for (const page of pages) {
1029
+ const { content, title, platform } = extract(page.html, page.url);
1030
+ if (!firstTitle) {
1031
+ firstTitle = title;
1032
+ firstPlatform = platform;
1033
+ }
1034
+ const md = transform(content);
1035
+ sections.push(`## ${title}
797
1036
 
798
1037
  Source: ${page.url}
799
1038
 
800
1039
  ${md}`);
1040
+ }
1041
+ const markdown = sections.join("\n\n---\n\n");
1042
+ write(markdown, outputPath, {
1043
+ sourceUrl: source.url,
1044
+ title: firstTitle,
1045
+ platform: firstPlatform
1046
+ });
1047
+ consola3.success(`Updated "${source.name}" \u2192 ${outputPath}`);
801
1048
  }
802
- const markdown = sections.join("\n\n---\n\n");
803
- write(markdown, outputPath, {
804
- sourceUrl: source.url,
805
- title: firstTitle,
806
- platform: firstPlatform
807
- });
808
1049
  } else {
1050
+ const outputPath = join5(configDir, config.outputDir, source.output);
1051
+ mkdirSync3(dirname5(outputPath), { recursive: true });
809
1052
  const html = await fetchPage(source.url);
810
1053
  const { content, title, platform } = extract(html, source.url);
811
1054
  const markdown = transform(content);
@@ -814,8 +1057,8 @@ ${md}`);
814
1057
  title,
815
1058
  platform
816
1059
  });
1060
+ consola3.success(`Updated "${source.name}" \u2192 ${outputPath}`);
817
1061
  }
818
- consola3.success(`Updated "${source.name}" \u2192 ${outputPath}`);
819
1062
  }
820
1063
  }
821
1064
  });
@@ -853,50 +1096,41 @@ var listCommand = defineCommand4({
853
1096
  });
854
1097
 
855
1098
  // src/cli.ts
856
- var main = defineCommand5({
857
- meta: {
858
- name: "docs2ai",
859
- version: "0.1.0",
860
- description: "Convert documentation URLs into AI-ready Markdown files"
861
- },
862
- args: {
863
- url: {
864
- type: "positional",
865
- description: "Documentation URL to convert",
866
- required: false
867
- },
868
- output: {
869
- type: "string",
870
- alias: "o",
871
- description: "Output file path"
1099
+ var subCommands = {
1100
+ add: addCommand,
1101
+ update: updateCommand,
1102
+ list: listCommand
1103
+ };
1104
+ var firstArg = process.argv[2];
1105
+ var isSubCommand = firstArg && firstArg in subCommands;
1106
+ if (isSubCommand) {
1107
+ const main = defineCommand5({
1108
+ meta: {
1109
+ name: "docs2ai",
1110
+ version: "0.1.0",
1111
+ description: "Convert documentation URLs into AI-ready Markdown files"
872
1112
  },
873
- crawl: {
874
- type: "boolean",
875
- description: "Follow sidebar/nav links",
876
- default: false
1113
+ subCommands
1114
+ });
1115
+ runMain(main);
1116
+ } else if (firstArg && !firstArg.startsWith("-") && firstArg !== "--help") {
1117
+ runCommand(fetchCommand, { rawArgs: process.argv.slice(2) });
1118
+ } else {
1119
+ const main = defineCommand5({
1120
+ meta: {
1121
+ name: "docs2ai",
1122
+ version: "0.1.0",
1123
+ description: "Convert documentation URLs into AI-ready Markdown files"
877
1124
  },
878
- "max-depth": {
879
- type: "string",
880
- description: "Maximum crawl depth",
881
- default: "2"
882
- }
883
- },
884
- subCommands: {
885
- add: addCommand,
886
- update: updateCommand,
887
- list: listCommand
888
- },
889
- run({ args }) {
890
- if (!args.url) {
1125
+ subCommands,
1126
+ run() {
891
1127
  console.log("Usage: docs2ai <url> [-o output.md] [--crawl]");
892
1128
  console.log(" docs2ai add <url> [--name name] [--crawl]");
893
1129
  console.log(" docs2ai update [--name name]");
894
1130
  console.log(" docs2ai list");
895
1131
  console.log("\nRun `docs2ai --help` for full usage.");
896
- return;
897
1132
  }
898
- return fetchCommand.run({ args });
899
- }
900
- });
901
- runMain(main);
1133
+ });
1134
+ runMain(main);
1135
+ }
902
1136
  //# sourceMappingURL=cli.mjs.map
package/dist/cli.mjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/cli.ts","../src/commands/fetch.ts","../src/pipeline/fetcher.ts","../src/pipeline/extractor.ts","../src/platforms/mintlify.ts","../src/platforms/docusaurus.ts","../src/platforms/readme.ts","../src/platforms/gitbook.ts","../src/platforms/generic.ts","../src/platforms/registry.ts","../src/pipeline/resolver.ts","../src/pipeline/transformer.ts","../src/pipeline/writer.ts","../src/crawl/crawler.ts","../src/utils/url.ts","../src/crawl/boundary.ts","../src/commands/add.ts","../src/config/manager.ts","../src/commands/update.ts","../src/commands/list.ts"],"sourcesContent":["import { defineCommand, runMain } from \"citty\";\nimport { fetchCommand } from \"./commands/fetch\";\nimport { addCommand } from \"./commands/add\";\nimport { updateCommand } from \"./commands/update\";\nimport { listCommand } from \"./commands/list\";\n\nconst main = defineCommand({\n meta: {\n name: \"docs2ai\",\n version: \"0.1.0\",\n description: \"Convert documentation URLs into AI-ready Markdown files\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to convert\",\n required: false,\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output file path\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Follow sidebar/nav links\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n },\n subCommands: {\n add: addCommand,\n update: updateCommand,\n list: listCommand,\n },\n run({ args }) {\n if (!args.url) {\n console.log(\"Usage: docs2ai <url> [-o output.md] [--crawl]\");\n console.log(\" docs2ai add <url> [--name name] [--crawl]\");\n console.log(\" docs2ai update [--name name]\");\n console.log(\" docs2ai list\");\n console.log(\"\\nRun `docs2ai --help` for full usage.\");\n return;\n }\n return (fetchCommand as any).run({ args });\n },\n});\n\nrunMain(main);\n","import { defineCommand } from \"citty\";\nimport consola from \"consola\";\nimport * as cheerio from \"cheerio\";\nimport { fetchPage, fetchWithBrowser } from \"../pipeline/fetcher\";\nimport { extract } from \"../pipeline/extractor\";\nimport { transform } from \"../pipeline/transformer\";\nimport { write } from \"../pipeline/writer\";\nimport { crawl } from \"../crawl/crawler\";\nimport { resolve } from \"../pipeline/resolver\";\nimport { getStrategy } from \"../platforms/registry\";\n\nexport const fetchCommand = defineCommand({\n meta: {\n name: \"fetch\",\n description: \"Fetch a documentation URL and convert to Markdown\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to convert\",\n required: true,\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output file path\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Follow sidebar/nav links\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n },\n async run({ args }) {\n const url = args.url as string;\n const output = args.output as string | undefined;\n const shouldCrawl = args.crawl as boolean;\n const maxDepth = parseInt(args[\"max-depth\"] as string, 10);\n const silent = !output;\n\n if (shouldCrawl) {\n if (!silent) consola.start(`Crawling from ${url} (max depth: ${maxDepth})...`);\n\n // Fetch first page to resolve platform and get navLinkSelector\n const firstHtml = await fetchPage(url);\n const $ = cheerio.load(firstHtml);\n const platformId = resolve(url, $);\n const strategy = getStrategy(platformId);\n const navLinkSelector = strategy.navLinkSelector();\n\n const pages = await crawl(url, {\n maxDepth,\n navLinkSelector,\n onPageFetched: (pageUrl, current, total) => {\n if (!silent) consola.info(`[${current}/${total}] ${pageUrl}`);\n },\n });\n\n if (!silent) consola.success(`Crawled ${pages.length} pages`);\n\n const sections: string[] = [];\n let firstTitle = \"\";\n let firstPlatform = \"\";\n\n for (const page of pages) {\n const { content, title, platform } = extract(page.html, page.url);\n if (!firstTitle) {\n firstTitle = title;\n firstPlatform = platform;\n }\n const md = transform(content);\n sections.push(`## ${title}\\n\\nSource: ${page.url}\\n\\n${md}`);\n }\n\n const markdown = sections.join(\"\\n\\n---\\n\\n\");\n\n write(markdown, output, {\n sourceUrl: url,\n title: firstTitle,\n platform: firstPlatform,\n });\n\n if (!silent) consola.success(`Written to ${output}`);\n } else {\n if (!silent) consola.start(`Fetching ${url}...`);\n let html = await fetchPage(url);\n\n const { content, title, platform } = extract(html, url);\n\n // If content is suspiciously small, try Playwright\n if (content.trim().length < 200) {\n if (!silent) consola.warn(\"Content looks thin, retrying with browser...\");\n try {\n html = await fetchWithBrowser(url);\n const result = extract(html, url);\n const markdown = transform(result.content);\n write(markdown, output, {\n sourceUrl: url,\n title: result.title || title,\n platform: result.platform,\n });\n if (!silent) consola.success(`Written to ${output}`);\n return;\n } catch (err: any) {\n if (err?.code === \"ERR_PLAYWRIGHT_NOT_INSTALLED\") {\n consola.warn(\n \"This page may require a browser to render. Install Playwright:\\n\" +\n \" npm install -D playwright && npx playwright install chromium\"\n );\n } else {\n consola.warn(\"Browser fallback failed, using static content.\");\n }\n }\n }\n\n if (!silent) consola.success(`Extracted content (platform: ${platform})`);\n const markdown = transform(content);\n\n write(markdown, output, {\n sourceUrl: url,\n title,\n platform,\n });\n\n if (!silent) consola.success(`Written to ${output}`);\n }\n },\n});\n","import { ofetch } from \"ofetch\";\n\n/**\n * Fetch the raw HTML of a documentation page.\n * Uses static fetch by default.\n */\nexport async function fetchPage(url: string): Promise<string> {\n const html = await ofetch(url, { responseType: \"text\" });\n return html;\n}\n\n/**\n * Fetch a page using Playwright for JS-rendered sites.\n * Playwright is an optional dependency — throws a typed error if not installed.\n */\nexport async function fetchWithBrowser(url: string): Promise<string> {\n let playwright;\n try {\n playwright = await import(\"playwright\");\n } catch {\n const err = new Error(\n \"Playwright is not installed. Run:\\n npm install -D playwright && npx playwright install chromium\"\n );\n (err as any).code = \"ERR_PLAYWRIGHT_NOT_INSTALLED\";\n throw err;\n }\n\n const browser = await playwright.chromium.launch({ headless: true });\n try {\n const page = await browser.newPage();\n await page.goto(url, { waitUntil: \"networkidle\" });\n // Extra buffer for late-loading content\n await page.waitForTimeout(1000);\n const html = await page.content();\n return html;\n } finally {\n await browser.close();\n }\n}\n","import * as cheerio from \"cheerio\";\nimport { Readability } from \"@mozilla/readability\";\nimport { parseHTML } from \"linkedom\";\nimport { resolve } from \"./resolver\";\nimport { getStrategy } from \"../platforms/registry\";\nimport type { PlatformId } from \"../platforms/base\";\n\nexport interface ExtractResult {\n content: string;\n title: string;\n platform: PlatformId;\n}\n\n/**\n * Extract meaningful content from raw HTML.\n * Uses platform-specific selectors when available, falls back to Readability.\n */\nexport function extract(html: string, url: string): ExtractResult {\n const $ = cheerio.load(html);\n const platform = resolve(url, $);\n const strategy = getStrategy(platform);\n\n const title = extractTitle($);\n\n // Non-generic platforms: use selector-based extraction first\n if (platform !== \"generic\") {\n for (const sel of strategy.removeSelectors()) {\n $(sel).remove();\n }\n\n const contentEl = $(strategy.contentSelector()).first();\n const selectorContent = contentEl.html();\n\n if (selectorContent && selectorContent.trim().length >= 100) {\n return { content: selectorContent, title, platform };\n }\n // Fall through to Readability if selector extraction yields too little\n }\n\n // Generic / fallback: Readability extraction\n const { document } = parseHTML(html);\n const reader = new Readability(document as any);\n const article = reader.parse();\n\n const content = article?.content || $(\"body\").html() || html;\n\n return {\n content,\n title: title || article?.title || \"\",\n platform,\n };\n}\n\n/**\n * Extract page title from common sources.\n */\nfunction extractTitle($: cheerio.CheerioAPI): string {\n const h1 = $(\"h1\").first().text().trim();\n if (h1) return h1;\n\n const ogTitle = $('meta[property=\"og:title\"]').attr(\"content\")?.trim();\n if (ogTitle) return ogTitle;\n\n return $(\"title\").text().trim();\n}\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const mintlify: PlatformStrategy = {\n id: \"mintlify\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"Mintlify\"]').length > 0) return true;\n if ($(\"script[src*='mintlify']\").length > 0) return true;\n if ($(\"[data-mintlify]\").length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \"article, main\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[role='navigation']\",\n \".sidebar\",\n \"[class*='sidebar']\",\n \"[class*='cookie']\",\n \"[class*='banner']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \"nav a[href], .sidebar a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const docusaurus: PlatformStrategy = {\n id: \"docusaurus\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"Docusaurus\"]').length > 0)\n return true;\n if ($(\".theme-doc-sidebar-container\").length > 0) return true;\n if ($('meta[name=\"docusaurus_locale\"]').length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \"article, [role='main'], .theme-doc-markdown\";\n },\n\n removeSelectors(): string[] {\n return [\n \".navbar\",\n \"footer\",\n \".theme-doc-toc-desktop\",\n \".theme-doc-sidebar-container\",\n \".pagination-nav\",\n \".theme-doc-breadcrumbs\",\n \"nav\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \".menu__link[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const readme: PlatformStrategy = {\n id: \"readme\",\n\n detect(url: string, $: CheerioAPI): boolean {\n let rmClassCount = 0;\n $(\"[class]\").each((_, el) => {\n const cls = $(el).attr(\"class\") || \"\";\n if (/\\brm-/.test(cls)) rmClassCount++;\n });\n if (rmClassCount > 2) return true;\n if ($(\".rm-Article\").length > 0) return true;\n if ($(\".rm-Markdown\").length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \".markdown-body, .rm-Article, .rm-Markdown\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \".rm-Sidebar\",\n \".rm-TableOfContents\",\n \"[class*='cookie']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \".rm-Sidebar a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const gitbook: PlatformStrategy = {\n id: \"gitbook\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"GitBook\"]').length > 0) return true;\n try {\n const parsed = new URL(url);\n if (parsed.hostname.endsWith(\".gitbook.io\")) return true;\n } catch {\n // invalid URL, skip host check\n }\n if ($('[data-testid=\"page.contentEditor\"]').length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return '[data-testid=\"page.contentEditor\"], main, article';\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[class*='sidebar']\",\n \"[class*='toc']\",\n \"[class*='cookie']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \"nav a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const generic: PlatformStrategy = {\n id: \"generic\",\n\n detect(_url: string, _$: CheerioAPI): boolean {\n return true;\n },\n\n contentSelector(): string {\n return \"article, main, [role='main'], .content\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[role='navigation']\",\n \"[class*='sidebar']\",\n \"[class*='cookie']\",\n \"[class*='banner']\",\n \"script\",\n \"style\",\n \"noscript\",\n ];\n },\n\n navLinkSelector(): string | null {\n return null;\n },\n};\n","import type { PlatformId, PlatformStrategy } from \"./base\";\nimport { mintlify } from \"./mintlify\";\nimport { docusaurus } from \"./docusaurus\";\nimport { readme } from \"./readme\";\nimport { gitbook } from \"./gitbook\";\nimport { generic } from \"./generic\";\n\n/** Ordered list of platform strategies. Generic must be last (always matches). */\nexport const platformStrategies: PlatformStrategy[] = [\n mintlify,\n docusaurus,\n readme,\n gitbook,\n generic,\n];\n\n/** Get a strategy by its platform ID. */\nexport function getStrategy(id: PlatformId): PlatformStrategy {\n const strategy = platformStrategies.find((s) => s.id === id);\n if (!strategy) {\n throw new Error(`Unknown platform: ${id}`);\n }\n return strategy;\n}\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformId } from \"../platforms/base\";\nimport { platformStrategies } from \"../platforms/registry\";\n\n/**\n * Detect which documentation platform a page belongs to.\n * Tries platform-specific strategies in order, falls back to generic.\n */\nexport function resolve(url: string, $: CheerioAPI): PlatformId {\n for (const strategy of platformStrategies) {\n if (strategy.detect(url, $)) {\n return strategy.id;\n }\n }\n return \"generic\";\n}\n","import TurndownService from \"turndown\";\nimport { gfm } from \"turndown-plugin-gfm\";\n\n/**\n * Convert clean HTML to Markdown.\n */\nexport function transform(html: string): string {\n const td = new TurndownService({\n headingStyle: \"atx\",\n codeBlockStyle: \"fenced\",\n bulletListMarker: \"-\",\n });\n\n td.use(gfm);\n\n addCalloutRule(td);\n addTabbedContentRule(td);\n addCodeBlockLangRule(td);\n addHiddenElementRule(td);\n\n return td.turndown(html);\n}\n\nfunction isElement(node: TurndownService.Node): node is HTMLElement {\n return node.nodeType === 1;\n}\n\nfunction getAttr(node: TurndownService.Node, attr: string): string {\n if (isElement(node)) {\n return node.getAttribute(attr) || \"\";\n }\n return \"\";\n}\n\nfunction getTagName(node: TurndownService.Node): string {\n if (isElement(node)) {\n return node.tagName.toLowerCase();\n }\n return \"\";\n}\n\n/**\n * Convert callouts/admonitions to blockquotes.\n * Matches: aside, .admonition, .callout, .alert, [role=\"alert\"]\n */\nfunction addCalloutRule(td: TurndownService): void {\n td.addRule(\"callouts\", {\n filter(node) {\n if (!isElement(node)) return false;\n const tag = getTagName(node);\n if (tag === \"aside\") return true;\n const cls = getAttr(node, \"class\");\n if (\n /\\b(admonition|callout|alert|notice|warning|info|tip|note|caution|danger)\\b/i.test(\n cls\n )\n )\n return true;\n if (getAttr(node, \"role\") === \"alert\") return true;\n return false;\n },\n replacement(content, node) {\n const cls = getAttr(node, \"class\").toLowerCase();\n let type = \"Note\";\n if (/warning|caution/.test(cls)) type = \"Warning\";\n else if (/danger|error/.test(cls)) type = \"Danger\";\n else if (/tip|success/.test(cls)) type = \"Tip\";\n else if (/info/.test(cls)) type = \"Info\";\n\n const lines = content.trim().split(\"\\n\");\n const quoted = lines.map((line) => `> ${line}`).join(\"\\n\");\n return `\\n> **${type}**\\n${quoted}\\n\\n`;\n },\n });\n}\n\n/**\n * Convert tabbed content into labeled sections.\n * Matches: .tab-panel, .tabpanel, [role=\"tabpanel\"]\n */\nfunction addTabbedContentRule(td: TurndownService): void {\n td.addRule(\"tabbed-content\", {\n filter(node) {\n if (!isElement(node)) return false;\n const cls = getAttr(node, \"class\");\n if (/\\b(tab-panel|tabpanel|tabs__item)\\b/i.test(cls)) return true;\n if (getAttr(node, \"role\") === \"tabpanel\") return true;\n return false;\n },\n replacement(content, node) {\n const label =\n getAttr(node, \"aria-label\") ||\n getAttr(node, \"data-label\") ||\n getAttr(node, \"data-value\") ||\n \"\";\n if (label) {\n return `\\n**${label}**\\n\\n${content.trim()}\\n\\n`;\n }\n return `\\n${content.trim()}\\n\\n`;\n },\n });\n}\n\n/**\n * Ensure code blocks with data-language/data-lang produce proper fenced blocks.\n */\nfunction addCodeBlockLangRule(td: TurndownService): void {\n td.addRule(\"code-block-lang\", {\n filter(node) {\n if (!isElement(node)) return false;\n if (getTagName(node) !== \"pre\") return false;\n const codeEl = node.querySelector(\"code\");\n if (!codeEl) return false;\n const lang =\n getAttr(node, \"data-language\") ||\n getAttr(node, \"data-lang\") ||\n (codeEl.getAttribute(\"data-language\") || \"\") ||\n (codeEl.getAttribute(\"data-lang\") || \"\");\n return lang.length > 0;\n },\n replacement(_content, node) {\n if (!isElement(node)) return _content;\n const codeEl = node.querySelector(\"code\")!;\n const lang =\n getAttr(node, \"data-language\") ||\n getAttr(node, \"data-lang\") ||\n (codeEl.getAttribute(\"data-language\") || \"\") ||\n (codeEl.getAttribute(\"data-lang\") || \"\");\n const code = codeEl.textContent || \"\";\n return `\\n\\`\\`\\`${lang}\\n${code}\\n\\`\\`\\`\\n`;\n },\n });\n}\n\n/**\n * Remove hidden elements (display:none) except tab panels.\n */\nfunction addHiddenElementRule(td: TurndownService): void {\n td.addRule(\"hidden-elements\", {\n filter(node) {\n if (!isElement(node)) return false;\n const style = getAttr(node, \"style\");\n if (!/display\\s*:\\s*none/i.test(style)) return false;\n // Don't remove tab panels — they're hidden but contain valid content\n const cls = getAttr(node, \"class\");\n if (/\\b(tab-panel|tabpanel)\\b/i.test(cls)) return false;\n if (getAttr(node, \"role\") === \"tabpanel\") return false;\n return true;\n },\n replacement() {\n return \"\";\n },\n });\n}\n","import { writeFileSync, mkdirSync } from \"node:fs\";\nimport { dirname } from \"node:path\";\nimport matter from \"gray-matter\";\n\nexport interface WriterOptions {\n sourceUrl: string;\n title: string;\n platform: string;\n}\n\n/**\n * Write Markdown with frontmatter to a file or stdout.\n */\nexport function write(\n markdown: string,\n outputPath: string | undefined,\n options: WriterOptions\n): void {\n const content = matter.stringify(markdown, {\n source: options.sourceUrl,\n fetched_at: new Date().toISOString(),\n platform: options.platform,\n title: options.title,\n docs2ai_version: \"0.1.0\",\n });\n\n if (outputPath) {\n mkdirSync(dirname(outputPath), { recursive: true });\n writeFileSync(outputPath, content, \"utf-8\");\n } else {\n process.stdout.write(content);\n }\n}\n","import * as cheerio from \"cheerio\";\nimport { fetchPage } from \"../pipeline/fetcher\";\nimport { getCrawlPrefix, isInBounds, normalizeUrl } from \"./boundary\";\n\nexport interface CrawledPage {\n url: string;\n html: string;\n}\n\nexport interface CrawlOptions {\n maxDepth: number;\n navLinkSelector?: string | null;\n onPageFetched?: (url: string, current: number, total: number) => void;\n}\n\n/**\n * Crawl documentation pages starting from a URL.\n * Follows in-bounds links via BFS up to maxDepth.\n */\nexport async function crawl(\n startUrl: string,\n options: CrawlOptions\n): Promise<CrawledPage[]> {\n const { origin, pathPrefix } = getCrawlPrefix(startUrl);\n const visited = new Set<string>();\n const results: CrawledPage[] = [];\n\n // BFS queue: [url, depth]\n const queue: [string, number][] = [[startUrl, 0]];\n visited.add(normalizeUrl(startUrl));\n\n while (queue.length > 0) {\n const [url, depth] = queue.shift()!;\n\n let html: string;\n try {\n html = await fetchPage(url);\n } catch {\n options.onPageFetched?.(url, results.length, results.length + queue.length);\n continue;\n }\n results.push({ url, html });\n options.onPageFetched?.(url, results.length, results.length + queue.length);\n\n if (depth < options.maxDepth) {\n const links = discoverLinks(\n html,\n url,\n origin,\n pathPrefix,\n options.navLinkSelector\n );\n for (const link of links) {\n const normalized = normalizeUrl(link);\n if (!visited.has(normalized)) {\n visited.add(normalized);\n queue.push([link, depth + 1]);\n }\n }\n }\n\n // Politeness delay between requests\n if (queue.length > 0) {\n await delay(200);\n }\n }\n\n return results;\n}\n\n/**\n * Extract all in-bounds links from a page's HTML.\n * When navLinkSelector is provided, only links matching that selector are used.\n */\nfunction discoverLinks(\n html: string,\n baseUrl: string,\n origin: string,\n pathPrefix: string,\n navLinkSelector?: string | null\n): string[] {\n const $ = cheerio.load(html);\n const links: string[] = [];\n const selector = navLinkSelector || \"a[href]\";\n\n $(selector).each((_, el) => {\n const href = $(el).attr(\"href\");\n if (!href) return;\n\n try {\n const resolved = new URL(href, baseUrl).href;\n if (isInBounds(resolved, origin, pathPrefix)) {\n links.push(resolved);\n }\n } catch {\n // Invalid URL, skip\n }\n });\n\n return [...new Set(links)];\n}\n\nfunction delay(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n","/**\n * Validate whether a string is a valid URL.\n */\nexport function isValidUrl(input: string): boolean {\n try {\n new URL(input);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Normalize a URL for deduplication: strip hash, query, trailing slash.\n */\nexport function normalizeUrl(url: string): string {\n const parsed = new URL(url);\n parsed.hash = \"\";\n parsed.search = \"\";\n return parsed.href.replace(/\\/$/, \"\");\n}\n\n/**\n * Derive a short name/slug from a URL's hostname.\n */\nexport function slugFromUrl(url: string): string {\n try {\n const parsed = new URL(url);\n return parsed.hostname.replace(/\\./g, \"-\").replace(/^www-/, \"\");\n } catch {\n return \"source\";\n }\n}\n","import { normalizeUrl } from \"../utils/url\";\n\nexport { normalizeUrl };\n\n/**\n * Determine the crawl boundary from a starting URL.\n * Links are in-bounds if they share the same origin and path prefix.\n */\nexport function getCrawlPrefix(url: string): {\n origin: string;\n pathPrefix: string;\n} {\n const parsed = new URL(url);\n const pathParts = parsed.pathname.split(\"/\");\n // Remove the last segment (the current page slug)\n pathParts.pop();\n const pathPrefix = pathParts.join(\"/\") + \"/\";\n return { origin: parsed.origin, pathPrefix };\n}\n\n/**\n * Check whether a candidate URL falls within the crawl boundary.\n */\nexport function isInBounds(\n candidateUrl: string,\n origin: string,\n pathPrefix: string\n): boolean {\n try {\n const parsed = new URL(candidateUrl);\n return parsed.origin === origin && parsed.pathname.startsWith(pathPrefix);\n } catch {\n return false;\n }\n}\n","import { defineCommand } from \"citty\";\nimport { join } from \"node:path\";\nimport consola from \"consola\";\nimport { loadConfig, saveConfig, addSource } from \"../config/manager\";\nimport type { Docs2aiConfig } from \"../config/schema\";\n\nexport const addCommand = defineCommand({\n meta: {\n name: \"add\",\n description: \"Add a documentation source to .docs2ai.yaml\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to add\",\n required: true,\n },\n name: {\n type: \"string\",\n description: \"Name for this source (auto-derived from hostname if omitted)\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Enable crawl mode for this source\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output filename\",\n },\n },\n run({ args }) {\n const url = args.url as string;\n const crawl = args.crawl as boolean;\n const maxDepth = parseInt(args[\"max-depth\"] as string, 10);\n\n const name = (args.name as string) || slugFromUrl(url);\n const output = (args.output as string) || `${name}.md`;\n\n const existing = loadConfig();\n let config: Docs2aiConfig;\n let configPath: string;\n\n if (existing) {\n config = existing.config;\n configPath = existing.configPath;\n } else {\n configPath = join(process.cwd(), \".docs2ai.yaml\");\n config = { version: 1, outputDir: \".ai/docs\", sources: [] };\n }\n\n addSource(config, { name, url, crawl, maxDepth, output });\n saveConfig(config, configPath);\n\n consola.success(`Added source \"${name}\" → ${url}`);\n consola.info(`Config: ${configPath}`);\n },\n});\n\nfunction slugFromUrl(url: string): string {\n try {\n const parsed = new URL(url);\n return parsed.hostname.replace(/\\./g, \"-\").replace(/^www-/, \"\");\n } catch {\n return \"source\";\n }\n}\n","import { readFileSync, writeFileSync, existsSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport yaml from \"js-yaml\";\nimport type { Docs2aiConfig, SourceConfig } from \"./schema\";\n\nconst CONFIG_FILENAME = \".docs2ai.yaml\";\n\n/**\n * Load the .docs2ai.yaml config file, searching up from cwd.\n * Returns null if no config file is found.\n */\nexport function loadConfig(startDir?: string): {\n config: Docs2aiConfig;\n configPath: string;\n} | null {\n const configPath = findConfigFile(startDir || process.cwd());\n if (!configPath) return null;\n\n const raw = readFileSync(configPath, \"utf-8\");\n const data = yaml.load(raw) as Record<string, any>;\n\n const config: Docs2aiConfig = {\n version: data.version ?? 1,\n outputDir: data.output_dir ?? \".ai/docs\",\n sources: (data.sources ?? []).map(snakeToCamelSource),\n };\n\n return { config, configPath };\n}\n\n/**\n * Save configuration to a .docs2ai.yaml file.\n */\nexport function saveConfig(config: Docs2aiConfig, configPath: string): void {\n const data = {\n version: config.version,\n output_dir: config.outputDir,\n sources: config.sources.map(camelToSnakeSource),\n };\n\n const content = yaml.dump(data, { lineWidth: -1 });\n writeFileSync(configPath, content, \"utf-8\");\n}\n\n/**\n * Add or update a source in the config (upsert by name).\n */\nexport function addSource(config: Docs2aiConfig, source: SourceConfig): void {\n const idx = config.sources.findIndex((s) => s.name === source.name);\n if (idx >= 0) {\n config.sources[idx] = source;\n } else {\n config.sources.push(source);\n }\n}\n\n/**\n * Walk up the directory tree looking for .docs2ai.yaml.\n */\nfunction findConfigFile(startDir: string): string | null {\n let dir = startDir;\n while (true) {\n const candidate = join(dir, CONFIG_FILENAME);\n if (existsSync(candidate)) return candidate;\n const parent = dirname(dir);\n if (parent === dir) return null;\n dir = parent;\n }\n}\n\nfunction snakeToCamelSource(s: Record<string, any>): SourceConfig {\n return {\n name: s.name ?? \"\",\n url: s.url ?? \"\",\n crawl: s.crawl ?? false,\n maxDepth: s.max_depth ?? 2,\n output: s.output ?? \"\",\n };\n}\n\nfunction camelToSnakeSource(\n s: SourceConfig\n): Record<string, string | number | boolean> {\n return {\n name: s.name,\n url: s.url,\n crawl: s.crawl,\n max_depth: s.maxDepth,\n output: s.output,\n };\n}\n","import { defineCommand } from \"citty\";\nimport { join, dirname } from \"node:path\";\nimport { mkdirSync } from \"node:fs\";\nimport consola from \"consola\";\nimport { loadConfig } from \"../config/manager\";\nimport { fetchPage } from \"../pipeline/fetcher\";\nimport { extract } from \"../pipeline/extractor\";\nimport { transform } from \"../pipeline/transformer\";\nimport { write } from \"../pipeline/writer\";\nimport { crawl } from \"../crawl/crawler\";\n\nexport const updateCommand = defineCommand({\n meta: {\n name: \"update\",\n description: \"Refresh configured documentation sources\",\n },\n args: {\n name: {\n type: \"string\",\n description: \"Update only the named source\",\n },\n },\n async run({ args }) {\n const result = loadConfig();\n if (!result) {\n consola.error(\"No .docs2ai.yaml found. Run `docs2ai add <url>` first.\");\n process.exit(1);\n }\n\n const { config, configPath } = result;\n const configDir = dirname(configPath);\n const filterName = args.name as string | undefined;\n\n const sources = filterName\n ? config.sources.filter((s) => s.name === filterName)\n : config.sources;\n\n if (sources.length === 0) {\n if (filterName) {\n consola.error(`Source \"${filterName}\" not found in config.`);\n } else {\n consola.error(\"No sources configured.\");\n }\n process.exit(1);\n }\n\n for (const source of sources) {\n const outputPath = join(configDir, config.outputDir, source.output);\n mkdirSync(dirname(outputPath), { recursive: true });\n\n consola.start(`Updating \"${source.name}\" from ${source.url}...`);\n\n if (source.crawl) {\n const pages = await crawl(source.url, {\n maxDepth: source.maxDepth,\n onPageFetched: (url, current, total) => {\n consola.info(` [${current}/${total}] ${url}`);\n },\n });\n\n const sections: string[] = [];\n let firstTitle = \"\";\n let firstPlatform = \"\";\n\n for (const page of pages) {\n const { content, title, platform } = extract(page.html, page.url);\n if (!firstTitle) {\n firstTitle = title;\n firstPlatform = platform;\n }\n const md = transform(content);\n sections.push(`## ${title}\\n\\nSource: ${page.url}\\n\\n${md}`);\n }\n\n const markdown = sections.join(\"\\n\\n---\\n\\n\");\n write(markdown, outputPath, {\n sourceUrl: source.url,\n title: firstTitle,\n platform: firstPlatform,\n });\n } else {\n const html = await fetchPage(source.url);\n const { content, title, platform } = extract(html, source.url);\n const markdown = transform(content);\n write(markdown, outputPath, {\n sourceUrl: source.url,\n title,\n platform,\n });\n }\n\n consola.success(`Updated \"${source.name}\" → ${outputPath}`);\n }\n },\n});\n","import { defineCommand } from \"citty\";\nimport consola from \"consola\";\nimport { loadConfig } from \"../config/manager\";\n\nexport const listCommand = defineCommand({\n meta: {\n name: \"list\",\n description: \"List configured documentation sources\",\n },\n run() {\n const result = loadConfig();\n if (!result) {\n consola.info(\"No .docs2ai.yaml found. Run `docs2ai add <url>` to get started.\");\n return;\n }\n\n const { config, configPath } = result;\n consola.info(`Config: ${configPath}`);\n consola.info(`Output dir: ${config.outputDir}\\n`);\n\n if (config.sources.length === 0) {\n consola.info(\"No sources configured.\");\n return;\n }\n\n for (const source of config.sources) {\n const crawlInfo = source.crawl\n ? ` (crawl, depth: ${source.maxDepth})`\n : \"\";\n console.log(` ${source.name}${crawlInfo}`);\n console.log(` URL: ${source.url}`);\n console.log(` Output: ${source.output}`);\n console.log();\n }\n },\n});\n"],"mappings":";;;AAAA,SAAS,iBAAAA,gBAAe,eAAe;;;ACAvC,SAAS,qBAAqB;AAC9B,OAAO,aAAa;AACpB,YAAYC,cAAa;;;ACFzB,SAAS,cAAc;AAMvB,eAAsB,UAAU,KAA8B;AAC5D,QAAM,OAAO,MAAM,OAAO,KAAK,EAAE,cAAc,OAAO,CAAC;AACvD,SAAO;AACT;AAMA,eAAsB,iBAAiB,KAA8B;AACnE,MAAI;AACJ,MAAI;AACF,iBAAa,MAAM,OAAO,YAAY;AAAA,EACxC,QAAQ;AACN,UAAM,MAAM,IAAI;AAAA,MACd;AAAA,IACF;AACA,IAAC,IAAY,OAAO;AACpB,UAAM;AAAA,EACR;AAEA,QAAM,UAAU,MAAM,WAAW,SAAS,OAAO,EAAE,UAAU,KAAK,CAAC;AACnE,MAAI;AACF,UAAM,OAAO,MAAM,QAAQ,QAAQ;AACnC,UAAM,KAAK,KAAK,KAAK,EAAE,WAAW,cAAc,CAAC;AAEjD,UAAM,KAAK,eAAe,GAAI;AAC9B,UAAM,OAAO,MAAM,KAAK,QAAQ;AAChC,WAAO;AAAA,EACT,UAAE;AACA,UAAM,QAAQ,MAAM;AAAA,EACtB;AACF;;;ACtCA,YAAY,aAAa;AACzB,SAAS,mBAAmB;AAC5B,SAAS,iBAAiB;;;ACCnB,IAAM,WAA6B;AAAA,EACxC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,6CAA6C,EAAE,SAAS,EAAG,QAAO;AACxE,QAAI,EAAE,yBAAyB,EAAE,SAAS,EAAG,QAAO;AACpD,QAAI,EAAE,iBAAiB,EAAE,SAAS,EAAG,QAAO;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;AChCO,IAAM,aAA+B;AAAA,EAC1C,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,+CAA+C,EAAE,SAAS;AAC9D,aAAO;AACT,QAAI,EAAE,8BAA8B,EAAE,SAAS,EAAG,QAAO;AACzD,QAAI,EAAE,gCAAgC,EAAE,SAAS,EAAG,QAAO;AAC3D,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;AChCO,IAAM,SAA2B;AAAA,EACtC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,eAAe;AACnB,MAAE,SAAS,EAAE,KAAK,CAAC,GAAG,OAAO;AAC3B,YAAM,MAAM,EAAE,EAAE,EAAE,KAAK,OAAO,KAAK;AACnC,UAAI,QAAQ,KAAK,GAAG,EAAG;AAAA,IACzB,CAAC;AACD,QAAI,eAAe,EAAG,QAAO;AAC7B,QAAI,EAAE,aAAa,EAAE,SAAS,EAAG,QAAO;AACxC,QAAI,EAAE,cAAc,EAAE,SAAS,EAAG,QAAO;AACzC,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACnCO,IAAM,UAA4B;AAAA,EACvC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,4CAA4C,EAAE,SAAS,EAAG,QAAO;AACvE,QAAI;AACF,YAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,UAAI,OAAO,SAAS,SAAS,aAAa,EAAG,QAAO;AAAA,IACtD,QAAQ;AAAA,IAER;AACA,QAAI,EAAE,oCAAoC,EAAE,SAAS,EAAG,QAAO;AAC/D,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACnCO,IAAM,UAA4B;AAAA,EACvC,IAAI;AAAA,EAEJ,OAAO,MAAc,IAAyB;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACxBO,IAAM,qBAAyC;AAAA,EACpD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAGO,SAAS,YAAY,IAAkC;AAC5D,QAAM,WAAW,mBAAmB,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE;AAC3D,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,qBAAqB,EAAE,EAAE;AAAA,EAC3C;AACA,SAAO;AACT;;;ACfO,SAAS,QAAQ,KAAa,GAA2B;AAC9D,aAAW,YAAY,oBAAoB;AACzC,QAAI,SAAS,OAAO,KAAK,CAAC,GAAG;AAC3B,aAAO,SAAS;AAAA,IAClB;AAAA,EACF;AACA,SAAO;AACT;;;APEO,SAAS,QAAQ,MAAc,KAA4B;AAChE,QAAM,IAAY,aAAK,IAAI;AAC3B,QAAM,WAAW,QAAQ,KAAK,CAAC;AAC/B,QAAM,WAAW,YAAY,QAAQ;AAErC,QAAM,QAAQ,aAAa,CAAC;AAG5B,MAAI,aAAa,WAAW;AAC1B,eAAW,OAAO,SAAS,gBAAgB,GAAG;AAC5C,QAAE,GAAG,EAAE,OAAO;AAAA,IAChB;AAEA,UAAM,YAAY,EAAE,SAAS,gBAAgB,CAAC,EAAE,MAAM;AACtD,UAAM,kBAAkB,UAAU,KAAK;AAEvC,QAAI,mBAAmB,gBAAgB,KAAK,EAAE,UAAU,KAAK;AAC3D,aAAO,EAAE,SAAS,iBAAiB,OAAO,SAAS;AAAA,IACrD;AAAA,EAEF;AAGA,QAAM,EAAE,SAAS,IAAI,UAAU,IAAI;AACnC,QAAM,SAAS,IAAI,YAAY,QAAe;AAC9C,QAAM,UAAU,OAAO,MAAM;AAE7B,QAAM,UAAU,SAAS,WAAW,EAAE,MAAM,EAAE,KAAK,KAAK;AAExD,SAAO;AAAA,IACL;AAAA,IACA,OAAO,SAAS,SAAS,SAAS;AAAA,IAClC;AAAA,EACF;AACF;AAKA,SAAS,aAAa,GAA+B;AACnD,QAAM,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK;AACvC,MAAI,GAAI,QAAO;AAEf,QAAM,UAAU,EAAE,2BAA2B,EAAE,KAAK,SAAS,GAAG,KAAK;AACrE,MAAI,QAAS,QAAO;AAEpB,SAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;AAChC;;;AQhEA,OAAO,qBAAqB;AAC5B,SAAS,WAAW;AAKb,SAAS,UAAU,MAAsB;AAC9C,QAAM,KAAK,IAAI,gBAAgB;AAAA,IAC7B,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,kBAAkB;AAAA,EACpB,CAAC;AAED,KAAG,IAAI,GAAG;AAEV,iBAAe,EAAE;AACjB,uBAAqB,EAAE;AACvB,uBAAqB,EAAE;AACvB,uBAAqB,EAAE;AAEvB,SAAO,GAAG,SAAS,IAAI;AACzB;AAEA,SAAS,UAAU,MAAiD;AAClE,SAAO,KAAK,aAAa;AAC3B;AAEA,SAAS,QAAQ,MAA4B,MAAsB;AACjE,MAAI,UAAU,IAAI,GAAG;AACnB,WAAO,KAAK,aAAa,IAAI,KAAK;AAAA,EACpC;AACA,SAAO;AACT;AAEA,SAAS,WAAW,MAAoC;AACtD,MAAI,UAAU,IAAI,GAAG;AACnB,WAAO,KAAK,QAAQ,YAAY;AAAA,EAClC;AACA,SAAO;AACT;AAMA,SAAS,eAAe,IAA2B;AACjD,KAAG,QAAQ,YAAY;AAAA,IACrB,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,MAAM,WAAW,IAAI;AAC3B,UAAI,QAAQ,QAAS,QAAO;AAC5B,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UACE,8EAA8E;AAAA,QAC5E;AAAA,MACF;AAEA,eAAO;AACT,UAAI,QAAQ,MAAM,MAAM,MAAM,QAAS,QAAO;AAC9C,aAAO;AAAA,IACT;AAAA,IACA,YAAY,SAAS,MAAM;AACzB,YAAM,MAAM,QAAQ,MAAM,OAAO,EAAE,YAAY;AAC/C,UAAI,OAAO;AACX,UAAI,kBAAkB,KAAK,GAAG,EAAG,QAAO;AAAA,eAC/B,eAAe,KAAK,GAAG,EAAG,QAAO;AAAA,eACjC,cAAc,KAAK,GAAG,EAAG,QAAO;AAAA,eAChC,OAAO,KAAK,GAAG,EAAG,QAAO;AAElC,YAAM,QAAQ,QAAQ,KAAK,EAAE,MAAM,IAAI;AACvC,YAAM,SAAS,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI,EAAE,EAAE,KAAK,IAAI;AACzD,aAAO;AAAA,MAAS,IAAI;AAAA,EAAO,MAAM;AAAA;AAAA;AAAA,IACnC;AAAA,EACF,CAAC;AACH;AAMA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,kBAAkB;AAAA,IAC3B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UAAI,uCAAuC,KAAK,GAAG,EAAG,QAAO;AAC7D,UAAI,QAAQ,MAAM,MAAM,MAAM,WAAY,QAAO;AACjD,aAAO;AAAA,IACT;AAAA,IACA,YAAY,SAAS,MAAM;AACzB,YAAM,QACJ,QAAQ,MAAM,YAAY,KAC1B,QAAQ,MAAM,YAAY,KAC1B,QAAQ,MAAM,YAAY,KAC1B;AACF,UAAI,OAAO;AACT,eAAO;AAAA,IAAO,KAAK;AAAA;AAAA,EAAS,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA,MAC5C;AACA,aAAO;AAAA,EAAK,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA,IAC5B;AAAA,EACF,CAAC;AACH;AAKA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,mBAAmB;AAAA,IAC5B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,UAAI,WAAW,IAAI,MAAM,MAAO,QAAO;AACvC,YAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAI,CAAC,OAAQ,QAAO;AACpB,YAAM,OACJ,QAAQ,MAAM,eAAe,KAC7B,QAAQ,MAAM,WAAW,MACxB,OAAO,aAAa,eAAe,KAAK,QACxC,OAAO,aAAa,WAAW,KAAK;AACvC,aAAO,KAAK,SAAS;AAAA,IACvB;AAAA,IACA,YAAY,UAAU,MAAM;AAC1B,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,SAAS,KAAK,cAAc,MAAM;AACxC,YAAM,OACJ,QAAQ,MAAM,eAAe,KAC7B,QAAQ,MAAM,WAAW,MACxB,OAAO,aAAa,eAAe,KAAK,QACxC,OAAO,aAAa,WAAW,KAAK;AACvC,YAAM,OAAO,OAAO,eAAe;AACnC,aAAO;AAAA,QAAW,IAAI;AAAA,EAAK,IAAI;AAAA;AAAA;AAAA,IACjC;AAAA,EACF,CAAC;AACH;AAKA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,mBAAmB;AAAA,IAC5B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,QAAQ,QAAQ,MAAM,OAAO;AACnC,UAAI,CAAC,sBAAsB,KAAK,KAAK,EAAG,QAAO;AAE/C,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UAAI,4BAA4B,KAAK,GAAG,EAAG,QAAO;AAClD,UAAI,QAAQ,MAAM,MAAM,MAAM,WAAY,QAAO;AACjD,aAAO;AAAA,IACT;AAAA,IACA,cAAc;AACZ,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AACH;;;ACzJA,SAAS,eAAe,iBAAiB;AACzC,SAAS,eAAe;AACxB,OAAO,YAAY;AAWZ,SAAS,MACd,UACA,YACA,SACM;AACN,QAAM,UAAU,OAAO,UAAU,UAAU;AAAA,IACzC,QAAQ,QAAQ;AAAA,IAChB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,UAAU,QAAQ;AAAA,IAClB,OAAO,QAAQ;AAAA,IACf,iBAAiB;AAAA,EACnB,CAAC;AAED,MAAI,YAAY;AACd,cAAU,QAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAClD,kBAAc,YAAY,SAAS,OAAO;AAAA,EAC5C,OAAO;AACL,YAAQ,OAAO,MAAM,OAAO;AAAA,EAC9B;AACF;;;AChCA,YAAYC,cAAa;;;ACelB,SAAS,aAAa,KAAqB;AAChD,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,SAAO,OAAO;AACd,SAAO,SAAS;AAChB,SAAO,OAAO,KAAK,QAAQ,OAAO,EAAE;AACtC;;;ACZO,SAAS,eAAe,KAG7B;AACA,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,QAAM,YAAY,OAAO,SAAS,MAAM,GAAG;AAE3C,YAAU,IAAI;AACd,QAAM,aAAa,UAAU,KAAK,GAAG,IAAI;AACzC,SAAO,EAAE,QAAQ,OAAO,QAAQ,WAAW;AAC7C;AAKO,SAAS,WACd,cACA,QACA,YACS;AACT,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,YAAY;AACnC,WAAO,OAAO,WAAW,UAAU,OAAO,SAAS,WAAW,UAAU;AAAA,EAC1E,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AFfA,eAAsB,MACpB,UACA,SACwB;AACxB,QAAM,EAAE,QAAQ,WAAW,IAAI,eAAe,QAAQ;AACtD,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,UAAyB,CAAC;AAGhC,QAAM,QAA4B,CAAC,CAAC,UAAU,CAAC,CAAC;AAChD,UAAQ,IAAI,aAAa,QAAQ,CAAC;AAElC,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,CAAC,KAAK,KAAK,IAAI,MAAM,MAAM;AAEjC,QAAI;AACJ,QAAI;AACF,aAAO,MAAM,UAAU,GAAG;AAAA,IAC5B,QAAQ;AACN,cAAQ,gBAAgB,KAAK,QAAQ,QAAQ,QAAQ,SAAS,MAAM,MAAM;AAC1E;AAAA,IACF;AACA,YAAQ,KAAK,EAAE,KAAK,KAAK,CAAC;AAC1B,YAAQ,gBAAgB,KAAK,QAAQ,QAAQ,QAAQ,SAAS,MAAM,MAAM;AAE1E,QAAI,QAAQ,QAAQ,UAAU;AAC5B,YAAM,QAAQ;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,MACV;AACA,iBAAW,QAAQ,OAAO;AACxB,cAAM,aAAa,aAAa,IAAI;AACpC,YAAI,CAAC,QAAQ,IAAI,UAAU,GAAG;AAC5B,kBAAQ,IAAI,UAAU;AACtB,gBAAM,KAAK,CAAC,MAAM,QAAQ,CAAC,CAAC;AAAA,QAC9B;AAAA,MACF;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,MAAM,GAAG;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,cACP,MACA,SACA,QACA,YACA,iBACU;AACV,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAW,mBAAmB;AAEpC,IAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,OAAO;AAC1B,UAAM,OAAO,EAAE,EAAE,EAAE,KAAK,MAAM;AAC9B,QAAI,CAAC,KAAM;AAEX,QAAI;AACF,YAAM,WAAW,IAAI,IAAI,MAAM,OAAO,EAAE;AACxC,UAAI,WAAW,UAAU,QAAQ,UAAU,GAAG;AAC5C,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF,CAAC;AAED,SAAO,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC;AAC3B;AAEA,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAACC,aAAY,WAAWA,UAAS,EAAE,CAAC;AACzD;;;AZ7FO,IAAM,eAAe,cAAc;AAAA,EACxC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,MAAM,KAAK;AACjB,UAAM,SAAS,KAAK;AACpB,UAAM,cAAc,KAAK;AACzB,UAAM,WAAW,SAAS,KAAK,WAAW,GAAa,EAAE;AACzD,UAAM,SAAS,CAAC;AAEhB,QAAI,aAAa;AACf,UAAI,CAAC,OAAQ,SAAQ,MAAM,iBAAiB,GAAG,gBAAgB,QAAQ,MAAM;AAG7E,YAAM,YAAY,MAAM,UAAU,GAAG;AACrC,YAAM,IAAY,cAAK,SAAS;AAChC,YAAM,aAAa,QAAQ,KAAK,CAAC;AACjC,YAAM,WAAW,YAAY,UAAU;AACvC,YAAM,kBAAkB,SAAS,gBAAgB;AAEjD,YAAM,QAAQ,MAAM,MAAM,KAAK;AAAA,QAC7B;AAAA,QACA;AAAA,QACA,eAAe,CAAC,SAAS,SAAS,UAAU;AAC1C,cAAI,CAAC,OAAQ,SAAQ,KAAK,IAAI,OAAO,IAAI,KAAK,KAAK,OAAO,EAAE;AAAA,QAC9D;AAAA,MACF,CAAC;AAED,UAAI,CAAC,OAAQ,SAAQ,QAAQ,WAAW,MAAM,MAAM,QAAQ;AAE5D,YAAM,WAAqB,CAAC;AAC5B,UAAI,aAAa;AACjB,UAAI,gBAAgB;AAEpB,iBAAW,QAAQ,OAAO;AACxB,cAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,YAAI,CAAC,YAAY;AACf,uBAAa;AACb,0BAAgB;AAAA,QAClB;AACA,cAAM,KAAK,UAAU,OAAO;AAC5B,iBAAS,KAAK,MAAM,KAAK;AAAA;AAAA,UAAe,KAAK,GAAG;AAAA;AAAA,EAAO,EAAE,EAAE;AAAA,MAC7D;AAEA,YAAM,WAAW,SAAS,KAAK,aAAa;AAE5C,YAAM,UAAU,QAAQ;AAAA,QACtB,WAAW;AAAA,QACX,OAAO;AAAA,QACP,UAAU;AAAA,MACZ,CAAC;AAED,UAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,MAAM,EAAE;AAAA,IACrD,OAAO;AACL,UAAI,CAAC,OAAQ,SAAQ,MAAM,YAAY,GAAG,KAAK;AAC/C,UAAI,OAAO,MAAM,UAAU,GAAG;AAE9B,YAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,MAAM,GAAG;AAGtD,UAAI,QAAQ,KAAK,EAAE,SAAS,KAAK;AAC/B,YAAI,CAAC,OAAQ,SAAQ,KAAK,8CAA8C;AACxE,YAAI;AACF,iBAAO,MAAM,iBAAiB,GAAG;AACjC,gBAAM,SAAS,QAAQ,MAAM,GAAG;AAChC,gBAAMC,YAAW,UAAU,OAAO,OAAO;AACzC,gBAAMA,WAAU,QAAQ;AAAA,YACtB,WAAW;AAAA,YACX,OAAO,OAAO,SAAS;AAAA,YACvB,UAAU,OAAO;AAAA,UACnB,CAAC;AACD,cAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,MAAM,EAAE;AACnD;AAAA,QACF,SAAS,KAAU;AACjB,cAAI,KAAK,SAAS,gCAAgC;AAChD,oBAAQ;AAAA,cACN;AAAA,YAEF;AAAA,UACF,OAAO;AACL,oBAAQ,KAAK,gDAAgD;AAAA,UAC/D;AAAA,QACF;AAAA,MACF;AAEA,UAAI,CAAC,OAAQ,SAAQ,QAAQ,gCAAgC,QAAQ,GAAG;AACxE,YAAM,WAAW,UAAU,OAAO;AAElC,YAAM,UAAU,QAAQ;AAAA,QACtB,WAAW;AAAA,QACX;AAAA,QACA;AAAA,MACF,CAAC;AAED,UAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,MAAM,EAAE;AAAA,IACrD;AAAA,EACF;AACF,CAAC;;;AepID,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,QAAAC,aAAY;AACrB,OAAOC,cAAa;;;ACFpB,SAAS,cAAc,iBAAAC,gBAAe,kBAAkB;AACxD,SAAS,MAAM,WAAAC,gBAAe;AAC9B,OAAO,UAAU;AAGjB,IAAM,kBAAkB;AAMjB,SAAS,WAAW,UAGlB;AACP,QAAM,aAAa,eAAe,YAAY,QAAQ,IAAI,CAAC;AAC3D,MAAI,CAAC,WAAY,QAAO;AAExB,QAAM,MAAM,aAAa,YAAY,OAAO;AAC5C,QAAM,OAAO,KAAK,KAAK,GAAG;AAE1B,QAAM,SAAwB;AAAA,IAC5B,SAAS,KAAK,WAAW;AAAA,IACzB,WAAW,KAAK,cAAc;AAAA,IAC9B,UAAU,KAAK,WAAW,CAAC,GAAG,IAAI,kBAAkB;AAAA,EACtD;AAEA,SAAO,EAAE,QAAQ,WAAW;AAC9B;AAKO,SAAS,WAAW,QAAuB,YAA0B;AAC1E,QAAM,OAAO;AAAA,IACX,SAAS,OAAO;AAAA,IAChB,YAAY,OAAO;AAAA,IACnB,SAAS,OAAO,QAAQ,IAAI,kBAAkB;AAAA,EAChD;AAEA,QAAM,UAAU,KAAK,KAAK,MAAM,EAAE,WAAW,GAAG,CAAC;AACjD,EAAAD,eAAc,YAAY,SAAS,OAAO;AAC5C;AAKO,SAAS,UAAU,QAAuB,QAA4B;AAC3E,QAAM,MAAM,OAAO,QAAQ,UAAU,CAAC,MAAM,EAAE,SAAS,OAAO,IAAI;AAClE,MAAI,OAAO,GAAG;AACZ,WAAO,QAAQ,GAAG,IAAI;AAAA,EACxB,OAAO;AACL,WAAO,QAAQ,KAAK,MAAM;AAAA,EAC5B;AACF;AAKA,SAAS,eAAe,UAAiC;AACvD,MAAI,MAAM;AACV,SAAO,MAAM;AACX,UAAM,YAAY,KAAK,KAAK,eAAe;AAC3C,QAAI,WAAW,SAAS,EAAG,QAAO;AAClC,UAAM,SAASC,SAAQ,GAAG;AAC1B,QAAI,WAAW,IAAK,QAAO;AAC3B,UAAM;AAAA,EACR;AACF;AAEA,SAAS,mBAAmB,GAAsC;AAChE,SAAO;AAAA,IACL,MAAM,EAAE,QAAQ;AAAA,IAChB,KAAK,EAAE,OAAO;AAAA,IACd,OAAO,EAAE,SAAS;AAAA,IAClB,UAAU,EAAE,aAAa;AAAA,IACzB,QAAQ,EAAE,UAAU;AAAA,EACtB;AACF;AAEA,SAAS,mBACP,GAC2C;AAC3C,SAAO;AAAA,IACL,MAAM,EAAE;AAAA,IACR,KAAK,EAAE;AAAA,IACP,OAAO,EAAE;AAAA,IACT,WAAW,EAAE;AAAA,IACb,QAAQ,EAAE;AAAA,EACZ;AACF;;;ADpFO,IAAM,aAAaC,eAAc;AAAA,EACtC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,IAAI,EAAE,KAAK,GAAG;AACZ,UAAM,MAAM,KAAK;AACjB,UAAMC,SAAQ,KAAK;AACnB,UAAM,WAAW,SAAS,KAAK,WAAW,GAAa,EAAE;AAEzD,UAAM,OAAQ,KAAK,QAAmB,YAAY,GAAG;AACrD,UAAM,SAAU,KAAK,UAAqB,GAAG,IAAI;AAEjD,UAAM,WAAW,WAAW;AAC5B,QAAI;AACJ,QAAI;AAEJ,QAAI,UAAU;AACZ,eAAS,SAAS;AAClB,mBAAa,SAAS;AAAA,IACxB,OAAO;AACL,mBAAaC,MAAK,QAAQ,IAAI,GAAG,eAAe;AAChD,eAAS,EAAE,SAAS,GAAG,WAAW,YAAY,SAAS,CAAC,EAAE;AAAA,IAC5D;AAEA,cAAU,QAAQ,EAAE,MAAM,KAAK,OAAAD,QAAO,UAAU,OAAO,CAAC;AACxD,eAAW,QAAQ,UAAU;AAE7B,IAAAE,SAAQ,QAAQ,iBAAiB,IAAI,YAAO,GAAG,EAAE;AACjD,IAAAA,SAAQ,KAAK,WAAW,UAAU,EAAE;AAAA,EACtC;AACF,CAAC;AAED,SAAS,YAAY,KAAqB;AACxC,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,WAAO,OAAO,SAAS,QAAQ,OAAO,GAAG,EAAE,QAAQ,SAAS,EAAE;AAAA,EAChE,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AExEA,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,QAAAC,OAAM,WAAAC,gBAAe;AAC9B,SAAS,aAAAC,kBAAiB;AAC1B,OAAOC,cAAa;AAQb,IAAM,gBAAgBC,eAAc;AAAA,EACzC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,SAAS,WAAW;AAC1B,QAAI,CAAC,QAAQ;AACX,MAAAC,SAAQ,MAAM,wDAAwD;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,UAAM,YAAYC,SAAQ,UAAU;AACpC,UAAM,aAAa,KAAK;AAExB,UAAM,UAAU,aACZ,OAAO,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,IAClD,OAAO;AAEX,QAAI,QAAQ,WAAW,GAAG;AACxB,UAAI,YAAY;AACd,QAAAD,SAAQ,MAAM,WAAW,UAAU,wBAAwB;AAAA,MAC7D,OAAO;AACL,QAAAA,SAAQ,MAAM,wBAAwB;AAAA,MACxC;AACA,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,eAAW,UAAU,SAAS;AAC5B,YAAM,aAAaE,MAAK,WAAW,OAAO,WAAW,OAAO,MAAM;AAClE,MAAAC,WAAUF,SAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,MAAAD,SAAQ,MAAM,aAAa,OAAO,IAAI,UAAU,OAAO,GAAG,KAAK;AAE/D,UAAI,OAAO,OAAO;AAChB,cAAM,QAAQ,MAAM,MAAM,OAAO,KAAK;AAAA,UACpC,UAAU,OAAO;AAAA,UACjB,eAAe,CAAC,KAAK,SAAS,UAAU;AACtC,YAAAA,SAAQ,KAAK,MAAM,OAAO,IAAI,KAAK,KAAK,GAAG,EAAE;AAAA,UAC/C;AAAA,QACF,CAAC;AAED,cAAM,WAAqB,CAAC;AAC5B,YAAI,aAAa;AACjB,YAAI,gBAAgB;AAEpB,mBAAW,QAAQ,OAAO;AACxB,gBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,cAAI,CAAC,YAAY;AACf,yBAAa;AACb,4BAAgB;AAAA,UAClB;AACA,gBAAM,KAAK,UAAU,OAAO;AAC5B,mBAAS,KAAK,MAAM,KAAK;AAAA;AAAA,UAAe,KAAK,GAAG;AAAA;AAAA,EAAO,EAAE,EAAE;AAAA,QAC7D;AAEA,cAAM,WAAW,SAAS,KAAK,aAAa;AAC5C,cAAM,UAAU,YAAY;AAAA,UAC1B,WAAW,OAAO;AAAA,UAClB,OAAO;AAAA,UACP,UAAU;AAAA,QACZ,CAAC;AAAA,MACH,OAAO;AACL,cAAM,OAAO,MAAM,UAAU,OAAO,GAAG;AACvC,cAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,MAAM,OAAO,GAAG;AAC7D,cAAM,WAAW,UAAU,OAAO;AAClC,cAAM,UAAU,YAAY;AAAA,UAC1B,WAAW,OAAO;AAAA,UAClB;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAEA,MAAAA,SAAQ,QAAQ,YAAY,OAAO,IAAI,YAAO,UAAU,EAAE;AAAA,IAC5D;AAAA,EACF;AACF,CAAC;;;AC9FD,SAAS,iBAAAI,sBAAqB;AAC9B,OAAOC,cAAa;AAGb,IAAM,cAAcC,eAAc;AAAA,EACvC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AACJ,UAAM,SAAS,WAAW;AAC1B,QAAI,CAAC,QAAQ;AACX,MAAAC,SAAQ,KAAK,iEAAiE;AAC9E;AAAA,IACF;AAEA,UAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,IAAAA,SAAQ,KAAK,WAAW,UAAU,EAAE;AACpC,IAAAA,SAAQ,KAAK,eAAe,OAAO,SAAS;AAAA,CAAI;AAEhD,QAAI,OAAO,QAAQ,WAAW,GAAG;AAC/B,MAAAA,SAAQ,KAAK,wBAAwB;AACrC;AAAA,IACF;AAEA,eAAW,UAAU,OAAO,SAAS;AACnC,YAAM,YAAY,OAAO,QACrB,mBAAmB,OAAO,QAAQ,MAClC;AACJ,cAAQ,IAAI,KAAK,OAAO,IAAI,GAAG,SAAS,EAAE;AAC1C,cAAQ,IAAI,eAAe,OAAO,GAAG,EAAE;AACvC,cAAQ,IAAI,eAAe,OAAO,MAAM,EAAE;AAC1C,cAAQ,IAAI;AAAA,IACd;AAAA,EACF;AACF,CAAC;;;AnB7BD,IAAM,OAAOC,eAAc;AAAA,EACzB,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,aAAa;AAAA,IACX,KAAK;AAAA,IACL,QAAQ;AAAA,IACR,MAAM;AAAA,EACR;AAAA,EACA,IAAI,EAAE,KAAK,GAAG;AACZ,QAAI,CAAC,KAAK,KAAK;AACb,cAAQ,IAAI,+CAA+C;AAC3D,cAAQ,IAAI,kDAAkD;AAC9D,cAAQ,IAAI,qCAAqC;AACjD,cAAQ,IAAI,qBAAqB;AACjC,cAAQ,IAAI,wCAAwC;AACpD;AAAA,IACF;AACA,WAAQ,aAAqB,IAAI,EAAE,KAAK,CAAC;AAAA,EAC3C;AACF,CAAC;AAED,QAAQ,IAAI;","names":["defineCommand","cheerio","cheerio","resolve","markdown","defineCommand","join","consola","writeFileSync","dirname","defineCommand","crawl","join","consola","defineCommand","join","dirname","mkdirSync","consola","defineCommand","consola","dirname","join","mkdirSync","defineCommand","consola","defineCommand","consola","defineCommand"]}
1
+ {"version":3,"sources":["../src/cli.ts","../src/commands/fetch.ts","../src/pipeline/fetcher.ts","../src/pipeline/extractor.ts","../src/platforms/mintlify.ts","../src/platforms/docusaurus.ts","../src/platforms/readme.ts","../src/platforms/gitbook.ts","../src/platforms/generic.ts","../src/platforms/registry.ts","../src/pipeline/resolver.ts","../src/pipeline/transformer.ts","../src/pipeline/writer.ts","../src/utils/slug.ts","../src/crawl/crawler.ts","../src/utils/url.ts","../src/crawl/boundary.ts","../src/pipeline/manifest.ts","../src/commands/add.ts","../src/config/manager.ts","../src/commands/update.ts","../src/commands/list.ts"],"sourcesContent":["import { defineCommand, runMain, runCommand } from \"citty\";\nimport { fetchCommand } from \"./commands/fetch\";\nimport { addCommand } from \"./commands/add\";\nimport { updateCommand } from \"./commands/update\";\nimport { listCommand } from \"./commands/list\";\n\nconst subCommands: Record<string, any> = {\n add: addCommand,\n update: updateCommand,\n list: listCommand,\n};\n\n// Check if first non-flag arg is a subcommand\nconst firstArg = process.argv[2];\nconst isSubCommand = firstArg && firstArg in subCommands;\n\nif (isSubCommand) {\n // Let citty handle subcommand routing\n const main = defineCommand({\n meta: {\n name: \"docs2ai\",\n version: \"0.1.0\",\n description: \"Convert documentation URLs into AI-ready Markdown files\",\n },\n subCommands,\n });\n runMain(main);\n} else if (firstArg && !firstArg.startsWith(\"-\") && firstArg !== \"--help\") {\n // Treat as a URL — run fetch command directly\n runCommand(fetchCommand, { rawArgs: process.argv.slice(2) });\n} else {\n // No args or --help — show usage\n const main = defineCommand({\n meta: {\n name: \"docs2ai\",\n version: \"0.1.0\",\n description: \"Convert documentation URLs into AI-ready Markdown files\",\n },\n subCommands,\n run() {\n console.log(\"Usage: docs2ai <url> [-o output.md] [--crawl]\");\n console.log(\" docs2ai add <url> [--name name] [--crawl]\");\n console.log(\" docs2ai update [--name name]\");\n console.log(\" docs2ai list\");\n console.log(\"\\nRun `docs2ai --help` for full usage.\");\n },\n });\n runMain(main);\n}\n","import { defineCommand } from \"citty\";\nimport { dirname } from \"node:path\";\nimport consola from \"consola\";\nimport * as cheerio from \"cheerio\";\nimport { fetchPage, fetchWithBrowser } from \"../pipeline/fetcher\";\nimport { extract } from \"../pipeline/extractor\";\nimport { transform } from \"../pipeline/transformer\";\nimport { write, writePages } from \"../pipeline/writer\";\nimport { crawl } from \"../crawl/crawler\";\nimport { resolve } from \"../pipeline/resolver\";\nimport { getStrategy } from \"../platforms/registry\";\nimport { slugFromUrl } from \"../utils/url\";\nimport {\n buildSourceManifest,\n writeSourceManifest,\n updateRootManifest,\n} from \"../pipeline/manifest\";\n\n/**\n * Determine whether crawl output should go to a directory (one file per page)\n * or a single stitched file.\n */\nfunction resolveOutputMode(\n output: string | undefined,\n shouldCrawl: boolean,\n name: string\n): { mode: \"single-file\" | \"directory\"; outputPath: string | undefined; outputDir: string } {\n if (!shouldCrawl) {\n return { mode: \"single-file\", outputPath: output, outputDir: \"\" };\n }\n\n // Crawl + explicit .md output → single file (backward compat)\n if (output && output.endsWith(\".md\")) {\n return { mode: \"single-file\", outputPath: output, outputDir: \"\" };\n }\n\n // Crawl + explicit directory path\n if (output) {\n const dir = output.endsWith(\"/\") ? output : output + \"/\";\n return { mode: \"directory\", outputPath: undefined, outputDir: dir };\n }\n\n // Crawl + no output → default directory\n return { mode: \"directory\", outputPath: undefined, outputDir: `.ai/docs/${name}/` };\n}\n\nexport const fetchCommand = defineCommand({\n meta: {\n name: \"fetch\",\n description: \"Fetch a documentation URL and convert to Markdown\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to convert\",\n required: true,\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output file path or directory\",\n },\n name: {\n type: \"string\",\n description: \"Name for this source (auto-derived from hostname if omitted)\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Follow sidebar/nav links\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n },\n async run({ args }) {\n const url = args.url as string;\n const output = args.output as string | undefined;\n const shouldCrawl = args.crawl as boolean;\n const maxDepth = parseInt(args[\"max-depth\"] as string, 10);\n const name = (args.name as string) || slugFromUrl(url);\n\n const { mode, outputPath, outputDir } = resolveOutputMode(output, shouldCrawl, name);\n const silent = mode === \"single-file\" && !outputPath;\n\n if (shouldCrawl) {\n if (!silent) consola.start(`Crawling from ${url} (max depth: ${maxDepth})...`);\n\n // Fetch first page to resolve platform and get navLinkSelector\n const firstHtml = await fetchPage(url);\n const $ = cheerio.load(firstHtml);\n const platformId = resolve(url, $);\n const strategy = getStrategy(platformId);\n const navLinkSelector = strategy.navLinkSelector();\n\n const crawlResult = await crawl(url, {\n maxDepth,\n navLinkSelector,\n discoverUrls: strategy.discoverUrls?.bind(strategy),\n onPageFetched: (pageUrl, current, total) => {\n if (!silent) consola.info(`[${current}/${total}] ${pageUrl}`);\n },\n });\n\n const { pages, effectivePrefix } = crawlResult;\n if (!silent) consola.success(`Crawled ${pages.length} pages`);\n\n if (mode === \"directory\") {\n // Directory mode: one .md file per page + manifests\n const pageEntries = pages.map((page) => {\n const { content, title, platform } = extract(page.html, page.url);\n const md = transform(content);\n return { url: page.url, title, platform, markdown: md };\n });\n\n const firstPlatform = pageEntries[0]?.platform || \"generic\";\n const manifestPages = writePages(pageEntries, outputDir, effectivePrefix);\n\n const sourceManifest = buildSourceManifest(name, url, firstPlatform, manifestPages);\n writeSourceManifest(sourceManifest, outputDir);\n\n // Update root manifest in the parent directory\n const rootDir = dirname(outputDir.replace(/\\/$/, \"\"));\n updateRootManifest(rootDir, {\n name,\n path: name + \"/\",\n fetched_at: sourceManifest.fetched_at,\n });\n\n consola.success(`Written ${pages.length} pages to ${outputDir}`);\n } else {\n // Single-file mode: stitch all pages together\n const sections: string[] = [];\n let firstTitle = \"\";\n let firstPlatform = \"\";\n\n for (const page of pages) {\n const { content, title, platform } = extract(page.html, page.url);\n if (!firstTitle) {\n firstTitle = title;\n firstPlatform = platform;\n }\n const md = transform(content);\n sections.push(`## ${title}\\n\\nSource: ${page.url}\\n\\n${md}`);\n }\n\n const markdown = sections.join(\"\\n\\n---\\n\\n\");\n\n write(markdown, outputPath, {\n sourceUrl: url,\n title: firstTitle,\n platform: firstPlatform,\n });\n\n if (!silent) consola.success(`Written to ${outputPath}`);\n }\n } else {\n if (!silent) consola.start(`Fetching ${url}...`);\n let html = await fetchPage(url);\n\n const { content, title, platform } = extract(html, url);\n\n // If content is suspiciously small, try Playwright\n if (content.trim().length < 200) {\n if (!silent) consola.warn(\"Content looks thin, retrying with browser...\");\n try {\n html = await fetchWithBrowser(url);\n const result = extract(html, url);\n const markdown = transform(result.content);\n write(markdown, outputPath, {\n sourceUrl: url,\n title: result.title || title,\n platform: result.platform,\n });\n if (!silent) consola.success(`Written to ${outputPath}`);\n return;\n } catch (err: any) {\n if (err?.code === \"ERR_PLAYWRIGHT_NOT_INSTALLED\") {\n consola.warn(\n \"This page may require a browser to render. Install Playwright:\\n\" +\n \" npm install -D playwright && npx playwright install chromium\"\n );\n } else {\n consola.warn(\"Browser fallback failed, using static content.\");\n }\n }\n }\n\n if (!silent) consola.success(`Extracted content (platform: ${platform})`);\n const markdown = transform(content);\n\n write(markdown, outputPath, {\n sourceUrl: url,\n title,\n platform,\n });\n\n if (!silent) consola.success(`Written to ${outputPath}`);\n }\n },\n});\n","import { ofetch } from \"ofetch\";\n\n/**\n * Fetch the raw HTML of a documentation page.\n * Uses static fetch by default.\n */\nexport async function fetchPage(url: string): Promise<string> {\n const html = await ofetch(url, { responseType: \"text\" });\n return html;\n}\n\n/**\n * Fetch a page using Playwright for JS-rendered sites.\n * Playwright is an optional dependency — throws a typed error if not installed.\n */\nexport async function fetchWithBrowser(url: string): Promise<string> {\n let playwright;\n try {\n playwright = await import(\"playwright\");\n } catch {\n const err = new Error(\n \"Playwright is not installed. Run:\\n npm install -D playwright && npx playwright install chromium\"\n );\n (err as any).code = \"ERR_PLAYWRIGHT_NOT_INSTALLED\";\n throw err;\n }\n\n const browser = await playwright.chromium.launch({ headless: true });\n try {\n const page = await browser.newPage();\n await page.goto(url, { waitUntil: \"networkidle\" });\n // Extra buffer for late-loading content\n await page.waitForTimeout(1000);\n const html = await page.content();\n return html;\n } finally {\n await browser.close();\n }\n}\n","import * as cheerio from \"cheerio\";\nimport { Readability } from \"@mozilla/readability\";\nimport { parseHTML } from \"linkedom\";\nimport { resolve } from \"./resolver\";\nimport { getStrategy } from \"../platforms/registry\";\nimport type { PlatformId } from \"../platforms/base\";\n\nexport interface ExtractResult {\n content: string;\n title: string;\n platform: PlatformId;\n}\n\n/**\n * Extract meaningful content from raw HTML.\n * Uses platform-specific selectors when available, falls back to Readability.\n */\nexport function extract(html: string, url: string): ExtractResult {\n const $ = cheerio.load(html);\n const platform = resolve(url, $);\n const strategy = getStrategy(platform);\n\n const title = extractTitle($);\n\n // Non-generic platforms: use selector-based extraction first\n if (platform !== \"generic\") {\n for (const sel of strategy.removeSelectors()) {\n $(sel).remove();\n }\n\n const contentEl = $(strategy.contentSelector()).first();\n const selectorContent = contentEl.html();\n\n if (selectorContent && selectorContent.trim().length >= 100) {\n return { content: selectorContent, title, platform };\n }\n // Fall through to Readability if selector extraction yields too little\n }\n\n // Generic / fallback: Readability extraction\n const { document } = parseHTML(html);\n const reader = new Readability(document as any);\n const article = reader.parse();\n\n const content = article?.content || $(\"body\").html() || html;\n\n return {\n content,\n title: title || article?.title || \"\",\n platform,\n };\n}\n\n/**\n * Extract page title from common sources.\n */\nfunction extractTitle($: cheerio.CheerioAPI): string {\n const h1 = $(\"h1\").first().text().trim();\n if (h1) return h1;\n\n const ogTitle = $('meta[property=\"og:title\"]').attr(\"content\")?.trim();\n if (ogTitle) return ogTitle;\n\n return $(\"title\").text().trim();\n}\n","import * as cheerio from \"cheerio\";\nimport type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const mintlify: PlatformStrategy = {\n id: \"mintlify\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"Mintlify\"]').length > 0) return true;\n if ($(\"script[src*='mintlify']\").length > 0) return true;\n if ($(\"[data-mintlify]\").length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \"article, main\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[role='navigation']\",\n \".sidebar\",\n \"[class*='sidebar']\",\n \"[class*='cookie']\",\n \"[class*='banner']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \"nav a[href], .sidebar a[href], [class*='sidebar'] a[href]\";\n },\n\n discoverUrls(html: string, baseUrl: string): string[] {\n const $ = cheerio.load(html);\n const paths = new Set<string>();\n\n // Mintlify uses Next.js — sidebar nav is in __next_f script data, not <a> tags.\n // The data contains escaped JSON like \\\"href\\\":\\\"/api-reference/checkouts/create\\\"\n $(\"script\").each((_, el) => {\n const text = $(el).html() || \"\";\n // Match escaped JSON paths: \\\"href\\\":\\\"/some-path\\\"\n const escaped = /\\\\?\"href\\\\?\"\\s*:\\s*\\\\?\"(\\/[a-z0-9][a-z0-9\\/-]*)\\\\?\"/g;\n let match = escaped.exec(text);\n while (match !== null) {\n paths.add(match[1]);\n match = escaped.exec(text);\n }\n });\n\n // Resolve to absolute URLs\n const origin = new URL(baseUrl).origin;\n // Determine docs base path (e.g. /docs from /docs/api-reference/intro)\n const basePath = new URL(baseUrl).pathname.split(\"/\").slice(0, 2).join(\"/\");\n\n return [...paths].map((p) => {\n if (p.startsWith(basePath)) {\n return origin + p;\n }\n return origin + basePath + p;\n });\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const docusaurus: PlatformStrategy = {\n id: \"docusaurus\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"Docusaurus\"]').length > 0)\n return true;\n if ($(\".theme-doc-sidebar-container\").length > 0) return true;\n if ($('meta[name=\"docusaurus_locale\"]').length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \"article, [role='main'], .theme-doc-markdown\";\n },\n\n removeSelectors(): string[] {\n return [\n \".navbar\",\n \"footer\",\n \".theme-doc-toc-desktop\",\n \".theme-doc-sidebar-container\",\n \".pagination-nav\",\n \".theme-doc-breadcrumbs\",\n \"nav\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \".menu__link[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const readme: PlatformStrategy = {\n id: \"readme\",\n\n detect(url: string, $: CheerioAPI): boolean {\n let rmClassCount = 0;\n $(\"[class]\").each((_, el) => {\n const cls = $(el).attr(\"class\") || \"\";\n if (/\\brm-/.test(cls)) rmClassCount++;\n });\n if (rmClassCount > 2) return true;\n if ($(\".rm-Article\").length > 0) return true;\n if ($(\".rm-Markdown\").length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \".markdown-body, .rm-Article, .rm-Markdown\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \".rm-Sidebar\",\n \".rm-TableOfContents\",\n \"[class*='cookie']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \".rm-Sidebar a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const gitbook: PlatformStrategy = {\n id: \"gitbook\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"GitBook\"]').length > 0) return true;\n try {\n const parsed = new URL(url);\n if (parsed.hostname.endsWith(\".gitbook.io\")) return true;\n } catch {\n // invalid URL, skip host check\n }\n if ($('[data-testid=\"page.contentEditor\"]').length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return '[data-testid=\"page.contentEditor\"], main, article';\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[class*='sidebar']\",\n \"[class*='toc']\",\n \"[class*='cookie']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \"nav a[href], aside a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const generic: PlatformStrategy = {\n id: \"generic\",\n\n detect(_url: string, _$: CheerioAPI): boolean {\n return true;\n },\n\n contentSelector(): string {\n return \"article, main, [role='main'], .content\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[role='navigation']\",\n \"[class*='sidebar']\",\n \"[class*='cookie']\",\n \"[class*='banner']\",\n \"script\",\n \"style\",\n \"noscript\",\n ];\n },\n\n navLinkSelector(): string | null {\n return null;\n },\n};\n","import type { PlatformId, PlatformStrategy } from \"./base\";\nimport { mintlify } from \"./mintlify\";\nimport { docusaurus } from \"./docusaurus\";\nimport { readme } from \"./readme\";\nimport { gitbook } from \"./gitbook\";\nimport { generic } from \"./generic\";\n\n/** Ordered list of platform strategies. Generic must be last (always matches). */\nexport const platformStrategies: PlatformStrategy[] = [\n mintlify,\n docusaurus,\n readme,\n gitbook,\n generic,\n];\n\n/** Get a strategy by its platform ID. */\nexport function getStrategy(id: PlatformId): PlatformStrategy {\n const strategy = platformStrategies.find((s) => s.id === id);\n if (!strategy) {\n throw new Error(`Unknown platform: ${id}`);\n }\n return strategy;\n}\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformId } from \"../platforms/base\";\nimport { platformStrategies } from \"../platforms/registry\";\n\n/**\n * Detect which documentation platform a page belongs to.\n * Tries platform-specific strategies in order, falls back to generic.\n */\nexport function resolve(url: string, $: CheerioAPI): PlatformId {\n for (const strategy of platformStrategies) {\n if (strategy.detect(url, $)) {\n return strategy.id;\n }\n }\n return \"generic\";\n}\n","import TurndownService from \"turndown\";\nimport { gfm } from \"turndown-plugin-gfm\";\n\n/**\n * Convert clean HTML to Markdown.\n */\nexport function transform(html: string): string {\n const td = new TurndownService({\n headingStyle: \"atx\",\n codeBlockStyle: \"fenced\",\n bulletListMarker: \"-\",\n });\n\n td.use(gfm);\n\n addCalloutRule(td);\n addTabbedContentRule(td);\n addCodeBlockLangRule(td);\n addHiddenElementRule(td);\n\n return td.turndown(html);\n}\n\nfunction isElement(node: TurndownService.Node): node is HTMLElement {\n return node.nodeType === 1;\n}\n\nfunction getAttr(node: TurndownService.Node, attr: string): string {\n if (isElement(node)) {\n return node.getAttribute(attr) || \"\";\n }\n return \"\";\n}\n\nfunction getTagName(node: TurndownService.Node): string {\n if (isElement(node)) {\n return node.tagName.toLowerCase();\n }\n return \"\";\n}\n\n/**\n * Convert callouts/admonitions to blockquotes.\n * Matches: aside, .admonition, .callout, .alert, [role=\"alert\"]\n */\nfunction addCalloutRule(td: TurndownService): void {\n td.addRule(\"callouts\", {\n filter(node) {\n if (!isElement(node)) return false;\n const tag = getTagName(node);\n if (tag === \"aside\") return true;\n const cls = getAttr(node, \"class\");\n if (\n /\\b(admonition|callout|alert|notice|warning|info|tip|note|caution|danger)\\b/i.test(\n cls\n )\n )\n return true;\n if (getAttr(node, \"role\") === \"alert\") return true;\n return false;\n },\n replacement(content, node) {\n const cls = getAttr(node, \"class\").toLowerCase();\n let type = \"Note\";\n if (/warning|caution/.test(cls)) type = \"Warning\";\n else if (/danger|error/.test(cls)) type = \"Danger\";\n else if (/tip|success/.test(cls)) type = \"Tip\";\n else if (/info/.test(cls)) type = \"Info\";\n\n const lines = content.trim().split(\"\\n\");\n const quoted = lines.map((line) => `> ${line}`).join(\"\\n\");\n return `\\n> **${type}**\\n${quoted}\\n\\n`;\n },\n });\n}\n\n/**\n * Convert tabbed content into labeled sections.\n * Matches: .tab-panel, .tabpanel, [role=\"tabpanel\"]\n */\nfunction addTabbedContentRule(td: TurndownService): void {\n td.addRule(\"tabbed-content\", {\n filter(node) {\n if (!isElement(node)) return false;\n const cls = getAttr(node, \"class\");\n if (/\\b(tab-panel|tabpanel|tabs__item)\\b/i.test(cls)) return true;\n if (getAttr(node, \"role\") === \"tabpanel\") return true;\n return false;\n },\n replacement(content, node) {\n const label =\n getAttr(node, \"aria-label\") ||\n getAttr(node, \"data-label\") ||\n getAttr(node, \"data-value\") ||\n \"\";\n if (label) {\n return `\\n**${label}**\\n\\n${content.trim()}\\n\\n`;\n }\n return `\\n${content.trim()}\\n\\n`;\n },\n });\n}\n\n/**\n * Ensure code blocks with data-language/data-lang produce proper fenced blocks.\n */\nfunction addCodeBlockLangRule(td: TurndownService): void {\n td.addRule(\"code-block-lang\", {\n filter(node) {\n if (!isElement(node)) return false;\n if (getTagName(node) !== \"pre\") return false;\n const codeEl = node.querySelector(\"code\");\n if (!codeEl) return false;\n const lang =\n getAttr(node, \"data-language\") ||\n getAttr(node, \"data-lang\") ||\n (codeEl.getAttribute(\"data-language\") || \"\") ||\n (codeEl.getAttribute(\"data-lang\") || \"\");\n return lang.length > 0;\n },\n replacement(_content, node) {\n if (!isElement(node)) return _content;\n const codeEl = node.querySelector(\"code\")!;\n const lang =\n getAttr(node, \"data-language\") ||\n getAttr(node, \"data-lang\") ||\n (codeEl.getAttribute(\"data-language\") || \"\") ||\n (codeEl.getAttribute(\"data-lang\") || \"\");\n const code = codeEl.textContent || \"\";\n return `\\n\\`\\`\\`${lang}\\n${code}\\n\\`\\`\\`\\n`;\n },\n });\n}\n\n/**\n * Remove hidden elements (display:none) except tab panels.\n */\nfunction addHiddenElementRule(td: TurndownService): void {\n td.addRule(\"hidden-elements\", {\n filter(node) {\n if (!isElement(node)) return false;\n const style = getAttr(node, \"style\");\n if (!/display\\s*:\\s*none/i.test(style)) return false;\n // Don't remove tab panels — they're hidden but contain valid content\n const cls = getAttr(node, \"class\");\n if (/\\b(tab-panel|tabpanel)\\b/i.test(cls)) return false;\n if (getAttr(node, \"role\") === \"tabpanel\") return false;\n return true;\n },\n replacement() {\n return \"\";\n },\n });\n}\n","import { writeFileSync, mkdirSync } from \"node:fs\";\nimport { dirname, join } from \"node:path\";\nimport matter from \"gray-matter\";\nimport { filePathForPage } from \"../utils/slug\";\n\nexport interface WriterOptions {\n sourceUrl: string;\n title: string;\n platform: string;\n}\n\n/**\n * Write Markdown with frontmatter to a file or stdout.\n */\nexport function write(\n markdown: string,\n outputPath: string | undefined,\n options: WriterOptions\n): void {\n const content = matter.stringify(markdown, {\n source: options.sourceUrl,\n fetched_at: new Date().toISOString(),\n platform: options.platform,\n title: options.title,\n docs2ai_version: \"0.1.0\",\n });\n\n if (outputPath) {\n mkdirSync(dirname(outputPath), { recursive: true });\n writeFileSync(outputPath, content, \"utf-8\");\n } else {\n process.stdout.write(content);\n }\n}\n\n/**\n * Write a single page's Markdown with frontmatter to a file path (always writes to disk).\n */\nexport function writePage(\n markdown: string,\n filePath: string,\n options: WriterOptions\n): void {\n const content = matter.stringify(markdown, {\n source: options.sourceUrl,\n fetched_at: new Date().toISOString(),\n platform: options.platform,\n title: options.title,\n docs2ai_version: \"0.1.0\",\n });\n\n mkdirSync(dirname(filePath), { recursive: true });\n writeFileSync(filePath, content, \"utf-8\");\n}\n\nexport interface PageEntry {\n url: string;\n title: string;\n platform: string;\n markdown: string;\n}\n\n/**\n * Write multiple crawled pages to a directory, one .md file per page.\n * Returns manifest page entries (title + relative path) for each written page.\n */\nexport function writePages(\n pages: PageEntry[],\n outputDir: string,\n basePrefix: string\n): { title: string; path: string }[] {\n const usedPaths = new Set<string>();\n const entries: { title: string; path: string }[] = [];\n\n for (const page of pages) {\n let relPath = filePathForPage(page.url, basePrefix);\n\n // Handle slug collisions by appending a numeric suffix\n if (usedPaths.has(relPath)) {\n const base = relPath.replace(/\\.md$/, \"\");\n let i = 2;\n while (usedPaths.has(`${base}-${i}.md`)) i++;\n relPath = `${base}-${i}.md`;\n }\n usedPaths.add(relPath);\n\n const filePath = join(outputDir, relPath);\n writePage(page.markdown, filePath, {\n sourceUrl: page.url,\n title: page.title,\n platform: page.platform,\n });\n\n entries.push({ title: page.title, path: relPath });\n }\n\n return entries;\n}\n","/**\n * Derive a filename slug from a URL's pathname.\n * Strips leading/trailing slashes and returns the last segment.\n */\nexport function slugFromPathname(url: string): string {\n const parsed = new URL(url);\n const pathname = parsed.pathname.replace(/\\/+$/, \"\");\n if (!pathname || pathname === \"/\") return \"index\";\n const segments = pathname.split(\"/\").filter(Boolean);\n return segments[segments.length - 1];\n}\n\n/**\n * Compute a relative file path for a crawled page based on its URL and a base prefix.\n * Strips the basePrefix from the pathname, intermediate segments become subdirectories,\n * and the last segment becomes the filename with .md extension.\n *\n * @example filePathForPage(\"https://x.com/docs/guides/auth\", \"/docs/\") → \"guides/auth.md\"\n * @example filePathForPage(\"https://x.com/docs/getting-started\", \"/docs/\") → \"getting-started.md\"\n * @example filePathForPage(\"https://x.com/docs/\", \"/docs/\") → \"index.md\"\n */\nexport function filePathForPage(pageUrl: string, basePrefix: string): string {\n const parsed = new URL(pageUrl);\n let pathname = parsed.pathname.replace(/\\/+$/, \"\");\n\n // Strip the base prefix\n const normalizedPrefix = basePrefix.replace(/\\/+$/, \"\");\n if (pathname.startsWith(normalizedPrefix)) {\n pathname = pathname.slice(normalizedPrefix.length);\n }\n\n // Remove leading slash\n pathname = pathname.replace(/^\\/+/, \"\");\n\n if (!pathname) return \"index.md\";\n\n return pathname + \".md\";\n}\n","import * as cheerio from \"cheerio\";\nimport { fetchPage } from \"../pipeline/fetcher\";\nimport {\n getCrawlPrefix,\n computeCommonPrefix,\n isInBounds,\n normalizeUrl,\n} from \"./boundary\";\n\nexport interface CrawledPage {\n url: string;\n html: string;\n}\n\nexport interface CrawlResult {\n pages: CrawledPage[];\n effectivePrefix: string;\n}\n\nexport interface CrawlOptions {\n maxDepth: number;\n navLinkSelector?: string | null;\n /** Custom URL discovery for SPA-rendered sidebars (overrides navLinkSelector) */\n discoverUrls?: (html: string, baseUrl: string) => string[];\n onPageFetched?: (url: string, current: number, total: number) => void;\n}\n\n/**\n * Crawl documentation pages starting from a URL.\n * Follows in-bounds links via BFS up to maxDepth.\n */\nexport async function crawl(\n startUrl: string,\n options: CrawlOptions\n): Promise<CrawlResult> {\n const { origin } = getCrawlPrefix(startUrl);\n let { pathPrefix } = getCrawlPrefix(startUrl);\n const visited = new Set<string>();\n const results: CrawledPage[] = [];\n let isFirstPage = true;\n\n // BFS queue: [url, depth]\n const queue: [string, number][] = [[startUrl, 0]];\n visited.add(normalizeUrl(startUrl));\n\n while (queue.length > 0) {\n const [url, depth] = queue.shift()!;\n\n let html: string;\n try {\n html = await fetchPage(url);\n } catch {\n options.onPageFetched?.(url, results.length, results.length + queue.length);\n continue;\n }\n results.push({ url, html });\n options.onPageFetched?.(url, results.length, results.length + queue.length);\n\n if (depth < options.maxDepth) {\n // On the first page, discover all same-origin nav links to widen the boundary\n if (isFirstPage) {\n const allNavUrls = options.discoverUrls\n ? discoverSameOriginCustom(html, url, origin, options.discoverUrls)\n : discoverSameOrigin(html, url, origin, options.navLinkSelector);\n if (allNavUrls.length > 0) {\n pathPrefix = computeCommonPrefix(startUrl, allNavUrls);\n }\n isFirstPage = false;\n }\n\n const links = options.discoverUrls\n ? discoverLinksCustom(html, url, origin, pathPrefix, options.discoverUrls)\n : discoverLinks(html, url, origin, pathPrefix, options.navLinkSelector);\n for (const link of links) {\n const normalized = normalizeUrl(link);\n if (!visited.has(normalized)) {\n visited.add(normalized);\n queue.push([link, depth + 1]);\n }\n }\n }\n\n // Politeness delay between requests\n if (queue.length > 0) {\n await delay(200);\n }\n }\n\n return { pages: results, effectivePrefix: pathPrefix };\n}\n\n/**\n * Extract all in-bounds links from a page's HTML.\n * When navLinkSelector is provided, only links matching that selector are used.\n */\nfunction discoverLinks(\n html: string,\n baseUrl: string,\n origin: string,\n pathPrefix: string,\n navLinkSelector?: string | null\n): string[] {\n const $ = cheerio.load(html);\n const links: string[] = [];\n const selector = navLinkSelector || \"a[href]\";\n\n $(selector).each((_, el) => {\n const href = $(el).attr(\"href\");\n if (!href) return;\n\n try {\n const resolved = new URL(href, baseUrl).href;\n if (isInBounds(resolved, origin, pathPrefix)) {\n links.push(resolved);\n }\n } catch {\n // Invalid URL, skip\n }\n });\n\n return [...new Set(links)];\n}\n\n/**\n * Discover all same-origin links from nav (no path prefix filter).\n * Used on the first page to compute the crawl boundary from nav structure.\n */\nfunction discoverSameOrigin(\n html: string,\n baseUrl: string,\n origin: string,\n navLinkSelector?: string | null\n): string[] {\n const $ = cheerio.load(html);\n const links: string[] = [];\n const selector = navLinkSelector || \"a[href]\";\n\n $(selector).each((_, el) => {\n const href = $(el).attr(\"href\");\n if (!href) return;\n try {\n const resolved = new URL(href, baseUrl).href;\n if (new URL(resolved).origin === origin) {\n links.push(resolved);\n }\n } catch {\n // Invalid URL, skip\n }\n });\n\n return [...new Set(links)];\n}\n\n/**\n * Discover all same-origin links via custom discovery (no path prefix filter).\n */\nfunction discoverSameOriginCustom(\n html: string,\n baseUrl: string,\n origin: string,\n discoverUrls: (html: string, baseUrl: string) => string[]\n): string[] {\n const urls = discoverUrls(html, baseUrl);\n return [\n ...new Set(\n urls.filter((u) => {\n try {\n return new URL(u).origin === origin;\n } catch {\n return false;\n }\n })\n ),\n ];\n}\n\n/**\n * Extract in-bounds links using a custom discovery function from the platform strategy.\n */\nfunction discoverLinksCustom(\n html: string,\n baseUrl: string,\n origin: string,\n pathPrefix: string,\n discoverUrls: (html: string, baseUrl: string) => string[]\n): string[] {\n const urls = discoverUrls(html, baseUrl);\n return [...new Set(urls.filter((u) => isInBounds(u, origin, pathPrefix)))];\n}\n\nfunction delay(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n","/**\n * Validate whether a string is a valid URL.\n */\nexport function isValidUrl(input: string): boolean {\n try {\n new URL(input);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Normalize a URL for deduplication: strip hash, query, trailing slash.\n */\nexport function normalizeUrl(url: string): string {\n const parsed = new URL(url);\n parsed.hash = \"\";\n parsed.search = \"\";\n return parsed.href.replace(/\\/$/, \"\");\n}\n\n/**\n * Derive a short name/slug from a URL's hostname.\n */\nexport function slugFromUrl(url: string): string {\n try {\n const parsed = new URL(url);\n return parsed.hostname.replace(/\\./g, \"-\").replace(/^www-/, \"\");\n } catch {\n return \"source\";\n }\n}\n","import { normalizeUrl } from \"../utils/url\";\n\nexport { normalizeUrl };\n\n/**\n * Determine the crawl boundary from a starting URL.\n * Links are in-bounds if they share the same origin and path prefix.\n */\nexport function getCrawlPrefix(url: string): {\n origin: string;\n pathPrefix: string;\n} {\n const parsed = new URL(url);\n const pathParts = parsed.pathname.split(\"/\");\n // Remove the last segment (the current page slug)\n pathParts.pop();\n const pathPrefix = pathParts.join(\"/\") + \"/\";\n return { origin: parsed.origin, pathPrefix };\n}\n\n/**\n * Compute the longest common path prefix between a start URL and discovered nav URLs.\n * Used to widen the crawl boundary when sidebar links span multiple sections.\n */\nexport function computeCommonPrefix(\n startUrl: string,\n navUrls: string[]\n): string {\n const startParts = new URL(startUrl).pathname.split(\"/\").filter(Boolean);\n const parts = [...startParts];\n\n for (const url of navUrls) {\n const urlParts = new URL(url).pathname.split(\"/\").filter(Boolean);\n let i = 0;\n while (i < parts.length && i < urlParts.length && parts[i] === urlParts[i]) {\n i++;\n }\n parts.length = i;\n }\n\n return \"/\" + (parts.length > 0 ? parts.join(\"/\") + \"/\" : \"\");\n}\n\n/**\n * Check whether a candidate URL falls within the crawl boundary.\n */\nexport function isInBounds(\n candidateUrl: string,\n origin: string,\n pathPrefix: string\n): boolean {\n try {\n const parsed = new URL(candidateUrl);\n return parsed.origin === origin && parsed.pathname.startsWith(pathPrefix);\n } catch {\n return false;\n }\n}\n","import { readFileSync, writeFileSync, mkdirSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\n\n/** Manifest for a single documentation source (written as _index.json). */\nexport interface SourceManifest {\n name: string;\n url: string;\n platform: string;\n fetched_at: string;\n pages: { title: string; path: string }[];\n}\n\n/** Root manifest listing all sources (written as manifest.json). */\nexport interface RootManifest {\n sources: { name: string; path: string; fetched_at: string }[];\n}\n\n/**\n * Build a source manifest object.\n */\nexport function buildSourceManifest(\n name: string,\n url: string,\n platform: string,\n pages: { title: string; path: string }[]\n): SourceManifest {\n return {\n name,\n url,\n platform,\n fetched_at: new Date().toISOString(),\n pages,\n };\n}\n\n/**\n * Write a source manifest (_index.json) to a directory.\n */\nexport function writeSourceManifest(\n manifest: SourceManifest,\n outputDir: string\n): void {\n mkdirSync(outputDir, { recursive: true });\n writeFileSync(\n join(outputDir, \"_index.json\"),\n JSON.stringify(manifest, null, 2) + \"\\n\",\n \"utf-8\"\n );\n}\n\n/**\n * Load the root manifest (manifest.json) from a directory.\n * Returns an empty manifest if the file doesn't exist.\n */\nexport function loadRootManifest(rootDir: string): RootManifest {\n try {\n const raw = readFileSync(join(rootDir, \"manifest.json\"), \"utf-8\");\n return JSON.parse(raw) as RootManifest;\n } catch {\n return { sources: [] };\n }\n}\n\n/**\n * Upsert a source entry in the root manifest and write it to disk.\n */\nexport function updateRootManifest(\n rootDir: string,\n entry: { name: string; path: string; fetched_at: string }\n): void {\n const manifest = loadRootManifest(rootDir);\n const idx = manifest.sources.findIndex((s) => s.name === entry.name);\n if (idx >= 0) {\n manifest.sources[idx] = entry;\n } else {\n manifest.sources.push(entry);\n }\n mkdirSync(rootDir, { recursive: true });\n writeFileSync(\n join(rootDir, \"manifest.json\"),\n JSON.stringify(manifest, null, 2) + \"\\n\",\n \"utf-8\"\n );\n}\n","import { defineCommand } from \"citty\";\nimport { join } from \"node:path\";\nimport consola from \"consola\";\nimport { loadConfig, saveConfig, addSource } from \"../config/manager\";\nimport type { Docs2aiConfig } from \"../config/schema\";\nimport { slugFromUrl } from \"../utils/url\";\n\nexport const addCommand = defineCommand({\n meta: {\n name: \"add\",\n description: \"Add a documentation source to .docs2ai.yaml\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to add\",\n required: true,\n },\n name: {\n type: \"string\",\n description: \"Name for this source (auto-derived from hostname if omitted)\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Enable crawl mode for this source\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output filename or directory\",\n },\n },\n run({ args }) {\n const url = args.url as string;\n const shouldCrawl = args.crawl as boolean;\n const maxDepth = parseInt(args[\"max-depth\"] as string, 10);\n\n const name = (args.name as string) || slugFromUrl(url);\n const output =\n (args.output as string) || (shouldCrawl ? `${name}/` : `${name}.md`);\n\n const existing = loadConfig();\n let config: Docs2aiConfig;\n let configPath: string;\n\n if (existing) {\n config = existing.config;\n configPath = existing.configPath;\n } else {\n configPath = join(process.cwd(), \".docs2ai.yaml\");\n config = { version: 1, outputDir: \".ai/docs\", sources: [] };\n }\n\n addSource(config, { name, url, crawl: shouldCrawl, maxDepth, output });\n saveConfig(config, configPath);\n\n consola.success(`Added source \"${name}\" → ${url}`);\n consola.info(`Config: ${configPath}`);\n },\n});\n","import { readFileSync, writeFileSync, existsSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport yaml from \"js-yaml\";\nimport type { Docs2aiConfig, SourceConfig } from \"./schema\";\n\nconst CONFIG_FILENAME = \".docs2ai.yaml\";\n\n/**\n * Load the .docs2ai.yaml config file, searching up from cwd.\n * Returns null if no config file is found.\n */\nexport function loadConfig(startDir?: string): {\n config: Docs2aiConfig;\n configPath: string;\n} | null {\n const configPath = findConfigFile(startDir || process.cwd());\n if (!configPath) return null;\n\n const raw = readFileSync(configPath, \"utf-8\");\n const data = yaml.load(raw) as Record<string, any>;\n\n const config: Docs2aiConfig = {\n version: data.version ?? 1,\n outputDir: data.output_dir ?? \".ai/docs\",\n sources: (data.sources ?? []).map(snakeToCamelSource),\n };\n\n return { config, configPath };\n}\n\n/**\n * Save configuration to a .docs2ai.yaml file.\n */\nexport function saveConfig(config: Docs2aiConfig, configPath: string): void {\n const data = {\n version: config.version,\n output_dir: config.outputDir,\n sources: config.sources.map(camelToSnakeSource),\n };\n\n const content = yaml.dump(data, { lineWidth: -1 });\n writeFileSync(configPath, content, \"utf-8\");\n}\n\n/**\n * Add or update a source in the config (upsert by name).\n */\nexport function addSource(config: Docs2aiConfig, source: SourceConfig): void {\n const idx = config.sources.findIndex((s) => s.name === source.name);\n if (idx >= 0) {\n config.sources[idx] = source;\n } else {\n config.sources.push(source);\n }\n}\n\n/**\n * Walk up the directory tree looking for .docs2ai.yaml.\n */\nfunction findConfigFile(startDir: string): string | null {\n let dir = startDir;\n while (true) {\n const candidate = join(dir, CONFIG_FILENAME);\n if (existsSync(candidate)) return candidate;\n const parent = dirname(dir);\n if (parent === dir) return null;\n dir = parent;\n }\n}\n\nfunction snakeToCamelSource(s: Record<string, any>): SourceConfig {\n return {\n name: s.name ?? \"\",\n url: s.url ?? \"\",\n crawl: s.crawl ?? false,\n maxDepth: s.max_depth ?? 2,\n output: s.output ?? \"\",\n };\n}\n\nfunction camelToSnakeSource(\n s: SourceConfig\n): Record<string, string | number | boolean> {\n return {\n name: s.name,\n url: s.url,\n crawl: s.crawl,\n max_depth: s.maxDepth,\n output: s.output,\n };\n}\n","import { defineCommand } from \"citty\";\nimport { join, dirname } from \"node:path\";\nimport { mkdirSync } from \"node:fs\";\nimport * as cheerio from \"cheerio\";\nimport consola from \"consola\";\nimport { loadConfig } from \"../config/manager\";\nimport { fetchPage } from \"../pipeline/fetcher\";\nimport { extract } from \"../pipeline/extractor\";\nimport { transform } from \"../pipeline/transformer\";\nimport { write, writePages } from \"../pipeline/writer\";\nimport { crawl } from \"../crawl/crawler\";\nimport { resolve } from \"../pipeline/resolver\";\nimport { getStrategy } from \"../platforms/registry\";\nimport {\n buildSourceManifest,\n writeSourceManifest,\n updateRootManifest,\n} from \"../pipeline/manifest\";\n\nexport const updateCommand = defineCommand({\n meta: {\n name: \"update\",\n description: \"Refresh configured documentation sources\",\n },\n args: {\n name: {\n type: \"string\",\n description: \"Update only the named source\",\n },\n },\n async run({ args }) {\n const result = loadConfig();\n if (!result) {\n consola.error(\"No .docs2ai.yaml found. Run `docs2ai add <url>` first.\");\n process.exit(1);\n }\n\n const { config, configPath } = result;\n const configDir = dirname(configPath);\n const filterName = args.name as string | undefined;\n\n const sources = filterName\n ? config.sources.filter((s) => s.name === filterName)\n : config.sources;\n\n if (sources.length === 0) {\n if (filterName) {\n consola.error(`Source \"${filterName}\" not found in config.`);\n } else {\n consola.error(\"No sources configured.\");\n }\n process.exit(1);\n }\n\n for (const source of sources) {\n const isDirectoryOutput = !source.output.endsWith(\".md\");\n\n consola.start(`Updating \"${source.name}\" from ${source.url}...`);\n\n if (source.crawl) {\n // Fetch first page to resolve platform and get link discovery\n const firstHtml = await fetchPage(source.url);\n const $ = cheerio.load(firstHtml);\n const platformId = resolve(source.url, $);\n const strategy = getStrategy(platformId);\n\n const crawlResult = await crawl(source.url, {\n maxDepth: source.maxDepth,\n navLinkSelector: strategy.navLinkSelector(),\n discoverUrls: strategy.discoverUrls?.bind(strategy),\n onPageFetched: (url, current, total) => {\n consola.info(` [${current}/${total}] ${url}`);\n },\n });\n\n const { pages, effectivePrefix } = crawlResult;\n\n if (isDirectoryOutput) {\n // Directory mode: one .md file per page + manifests\n const outputDir = join(configDir, config.outputDir, source.output);\n const pageEntries = pages.map((page) => {\n const { content, title, platform } = extract(page.html, page.url);\n const md = transform(content);\n return { url: page.url, title, platform, markdown: md };\n });\n\n const firstPlatform = pageEntries[0]?.platform || \"generic\";\n const manifestPages = writePages(pageEntries, outputDir, effectivePrefix);\n\n const sourceManifest = buildSourceManifest(\n source.name,\n source.url,\n firstPlatform,\n manifestPages\n );\n writeSourceManifest(sourceManifest, outputDir);\n\n const rootDir = join(configDir, config.outputDir);\n updateRootManifest(rootDir, {\n name: source.name,\n path: source.output,\n fetched_at: sourceManifest.fetched_at,\n });\n\n consola.success(`Updated \"${source.name}\" → ${outputDir} (${pages.length} pages)`);\n } else {\n // Single-file mode: stitch all pages together\n const outputPath = join(configDir, config.outputDir, source.output);\n mkdirSync(dirname(outputPath), { recursive: true });\n\n const sections: string[] = [];\n let firstTitle = \"\";\n let firstPlatform = \"\";\n\n for (const page of pages) {\n const { content, title, platform } = extract(page.html, page.url);\n if (!firstTitle) {\n firstTitle = title;\n firstPlatform = platform;\n }\n const md = transform(content);\n sections.push(`## ${title}\\n\\nSource: ${page.url}\\n\\n${md}`);\n }\n\n const markdown = sections.join(\"\\n\\n---\\n\\n\");\n write(markdown, outputPath, {\n sourceUrl: source.url,\n title: firstTitle,\n platform: firstPlatform,\n });\n\n consola.success(`Updated \"${source.name}\" → ${outputPath}`);\n }\n } else {\n const outputPath = join(configDir, config.outputDir, source.output);\n mkdirSync(dirname(outputPath), { recursive: true });\n\n const html = await fetchPage(source.url);\n const { content, title, platform } = extract(html, source.url);\n const markdown = transform(content);\n write(markdown, outputPath, {\n sourceUrl: source.url,\n title,\n platform,\n });\n\n consola.success(`Updated \"${source.name}\" → ${outputPath}`);\n }\n }\n },\n});\n","import { defineCommand } from \"citty\";\nimport consola from \"consola\";\nimport { loadConfig } from \"../config/manager\";\n\nexport const listCommand = defineCommand({\n meta: {\n name: \"list\",\n description: \"List configured documentation sources\",\n },\n run() {\n const result = loadConfig();\n if (!result) {\n consola.info(\"No .docs2ai.yaml found. Run `docs2ai add <url>` to get started.\");\n return;\n }\n\n const { config, configPath } = result;\n consola.info(`Config: ${configPath}`);\n consola.info(`Output dir: ${config.outputDir}\\n`);\n\n if (config.sources.length === 0) {\n consola.info(\"No sources configured.\");\n return;\n }\n\n for (const source of config.sources) {\n const crawlInfo = source.crawl\n ? ` (crawl, depth: ${source.maxDepth})`\n : \"\";\n console.log(` ${source.name}${crawlInfo}`);\n console.log(` URL: ${source.url}`);\n console.log(` Output: ${source.output}`);\n console.log();\n }\n },\n});\n"],"mappings":";;;AAAA,SAAS,iBAAAA,gBAAe,SAAS,kBAAkB;;;ACAnD,SAAS,qBAAqB;AAC9B,SAAS,WAAAC,gBAAe;AACxB,OAAO,aAAa;AACpB,YAAYC,cAAa;;;ACHzB,SAAS,cAAc;AAMvB,eAAsB,UAAU,KAA8B;AAC5D,QAAM,OAAO,MAAM,OAAO,KAAK,EAAE,cAAc,OAAO,CAAC;AACvD,SAAO;AACT;AAMA,eAAsB,iBAAiB,KAA8B;AACnE,MAAI;AACJ,MAAI;AACF,iBAAa,MAAM,OAAO,YAAY;AAAA,EACxC,QAAQ;AACN,UAAM,MAAM,IAAI;AAAA,MACd;AAAA,IACF;AACA,IAAC,IAAY,OAAO;AACpB,UAAM;AAAA,EACR;AAEA,QAAM,UAAU,MAAM,WAAW,SAAS,OAAO,EAAE,UAAU,KAAK,CAAC;AACnE,MAAI;AACF,UAAM,OAAO,MAAM,QAAQ,QAAQ;AACnC,UAAM,KAAK,KAAK,KAAK,EAAE,WAAW,cAAc,CAAC;AAEjD,UAAM,KAAK,eAAe,GAAI;AAC9B,UAAM,OAAO,MAAM,KAAK,QAAQ;AAChC,WAAO;AAAA,EACT,UAAE;AACA,UAAM,QAAQ,MAAM;AAAA,EACtB;AACF;;;ACtCA,YAAYC,cAAa;AACzB,SAAS,mBAAmB;AAC5B,SAAS,iBAAiB;;;ACF1B,YAAY,aAAa;AAIlB,IAAM,WAA6B;AAAA,EACxC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,6CAA6C,EAAE,SAAS,EAAG,QAAO;AACxE,QAAI,EAAE,yBAAyB,EAAE,SAAS,EAAG,QAAO;AACpD,QAAI,EAAE,iBAAiB,EAAE,SAAS,EAAG,QAAO;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AAAA,EAEA,aAAa,MAAc,SAA2B;AACpD,UAAM,IAAY,aAAK,IAAI;AAC3B,UAAM,QAAQ,oBAAI,IAAY;AAI9B,MAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,OAAO;AAC1B,YAAM,OAAO,EAAE,EAAE,EAAE,KAAK,KAAK;AAE7B,YAAM,UAAU;AAChB,UAAI,QAAQ,QAAQ,KAAK,IAAI;AAC7B,aAAO,UAAU,MAAM;AACrB,cAAM,IAAI,MAAM,CAAC,CAAC;AAClB,gBAAQ,QAAQ,KAAK,IAAI;AAAA,MAC3B;AAAA,IACF,CAAC;AAGD,UAAM,SAAS,IAAI,IAAI,OAAO,EAAE;AAEhC,UAAM,WAAW,IAAI,IAAI,OAAO,EAAE,SAAS,MAAM,GAAG,EAAE,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG;AAE1E,WAAO,CAAC,GAAG,KAAK,EAAE,IAAI,CAAC,MAAM;AAC3B,UAAI,EAAE,WAAW,QAAQ,GAAG;AAC1B,eAAO,SAAS;AAAA,MAClB;AACA,aAAO,SAAS,WAAW;AAAA,IAC7B,CAAC;AAAA,EACH;AACF;;;AC/DO,IAAM,aAA+B;AAAA,EAC1C,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,+CAA+C,EAAE,SAAS;AAC9D,aAAO;AACT,QAAI,EAAE,8BAA8B,EAAE,SAAS,EAAG,QAAO;AACzD,QAAI,EAAE,gCAAgC,EAAE,SAAS,EAAG,QAAO;AAC3D,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;AChCO,IAAM,SAA2B;AAAA,EACtC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,eAAe;AACnB,MAAE,SAAS,EAAE,KAAK,CAAC,GAAG,OAAO;AAC3B,YAAM,MAAM,EAAE,EAAE,EAAE,KAAK,OAAO,KAAK;AACnC,UAAI,QAAQ,KAAK,GAAG,EAAG;AAAA,IACzB,CAAC;AACD,QAAI,eAAe,EAAG,QAAO;AAC7B,QAAI,EAAE,aAAa,EAAE,SAAS,EAAG,QAAO;AACxC,QAAI,EAAE,cAAc,EAAE,SAAS,EAAG,QAAO;AACzC,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACnCO,IAAM,UAA4B;AAAA,EACvC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,4CAA4C,EAAE,SAAS,EAAG,QAAO;AACvE,QAAI;AACF,YAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,UAAI,OAAO,SAAS,SAAS,aAAa,EAAG,QAAO;AAAA,IACtD,QAAQ;AAAA,IAER;AACA,QAAI,EAAE,oCAAoC,EAAE,SAAS,EAAG,QAAO;AAC/D,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACnCO,IAAM,UAA4B;AAAA,EACvC,IAAI;AAAA,EAEJ,OAAO,MAAc,IAAyB;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACxBO,IAAM,qBAAyC;AAAA,EACpD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAGO,SAAS,YAAY,IAAkC;AAC5D,QAAM,WAAW,mBAAmB,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE;AAC3D,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,qBAAqB,EAAE,EAAE;AAAA,EAC3C;AACA,SAAO;AACT;;;ACfO,SAAS,QAAQ,KAAa,GAA2B;AAC9D,aAAW,YAAY,oBAAoB;AACzC,QAAI,SAAS,OAAO,KAAK,CAAC,GAAG;AAC3B,aAAO,SAAS;AAAA,IAClB;AAAA,EACF;AACA,SAAO;AACT;;;APEO,SAAS,QAAQ,MAAc,KAA4B;AAChE,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,WAAW,QAAQ,KAAK,CAAC;AAC/B,QAAM,WAAW,YAAY,QAAQ;AAErC,QAAM,QAAQ,aAAa,CAAC;AAG5B,MAAI,aAAa,WAAW;AAC1B,eAAW,OAAO,SAAS,gBAAgB,GAAG;AAC5C,QAAE,GAAG,EAAE,OAAO;AAAA,IAChB;AAEA,UAAM,YAAY,EAAE,SAAS,gBAAgB,CAAC,EAAE,MAAM;AACtD,UAAM,kBAAkB,UAAU,KAAK;AAEvC,QAAI,mBAAmB,gBAAgB,KAAK,EAAE,UAAU,KAAK;AAC3D,aAAO,EAAE,SAAS,iBAAiB,OAAO,SAAS;AAAA,IACrD;AAAA,EAEF;AAGA,QAAM,EAAE,SAAS,IAAI,UAAU,IAAI;AACnC,QAAM,SAAS,IAAI,YAAY,QAAe;AAC9C,QAAM,UAAU,OAAO,MAAM;AAE7B,QAAM,UAAU,SAAS,WAAW,EAAE,MAAM,EAAE,KAAK,KAAK;AAExD,SAAO;AAAA,IACL;AAAA,IACA,OAAO,SAAS,SAAS,SAAS;AAAA,IAClC;AAAA,EACF;AACF;AAKA,SAAS,aAAa,GAA+B;AACnD,QAAM,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK;AACvC,MAAI,GAAI,QAAO;AAEf,QAAM,UAAU,EAAE,2BAA2B,EAAE,KAAK,SAAS,GAAG,KAAK;AACrE,MAAI,QAAS,QAAO;AAEpB,SAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;AAChC;;;AQhEA,OAAO,qBAAqB;AAC5B,SAAS,WAAW;AAKb,SAAS,UAAU,MAAsB;AAC9C,QAAM,KAAK,IAAI,gBAAgB;AAAA,IAC7B,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,kBAAkB;AAAA,EACpB,CAAC;AAED,KAAG,IAAI,GAAG;AAEV,iBAAe,EAAE;AACjB,uBAAqB,EAAE;AACvB,uBAAqB,EAAE;AACvB,uBAAqB,EAAE;AAEvB,SAAO,GAAG,SAAS,IAAI;AACzB;AAEA,SAAS,UAAU,MAAiD;AAClE,SAAO,KAAK,aAAa;AAC3B;AAEA,SAAS,QAAQ,MAA4B,MAAsB;AACjE,MAAI,UAAU,IAAI,GAAG;AACnB,WAAO,KAAK,aAAa,IAAI,KAAK;AAAA,EACpC;AACA,SAAO;AACT;AAEA,SAAS,WAAW,MAAoC;AACtD,MAAI,UAAU,IAAI,GAAG;AACnB,WAAO,KAAK,QAAQ,YAAY;AAAA,EAClC;AACA,SAAO;AACT;AAMA,SAAS,eAAe,IAA2B;AACjD,KAAG,QAAQ,YAAY;AAAA,IACrB,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,MAAM,WAAW,IAAI;AAC3B,UAAI,QAAQ,QAAS,QAAO;AAC5B,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UACE,8EAA8E;AAAA,QAC5E;AAAA,MACF;AAEA,eAAO;AACT,UAAI,QAAQ,MAAM,MAAM,MAAM,QAAS,QAAO;AAC9C,aAAO;AAAA,IACT;AAAA,IACA,YAAY,SAAS,MAAM;AACzB,YAAM,MAAM,QAAQ,MAAM,OAAO,EAAE,YAAY;AAC/C,UAAI,OAAO;AACX,UAAI,kBAAkB,KAAK,GAAG,EAAG,QAAO;AAAA,eAC/B,eAAe,KAAK,GAAG,EAAG,QAAO;AAAA,eACjC,cAAc,KAAK,GAAG,EAAG,QAAO;AAAA,eAChC,OAAO,KAAK,GAAG,EAAG,QAAO;AAElC,YAAM,QAAQ,QAAQ,KAAK,EAAE,MAAM,IAAI;AACvC,YAAM,SAAS,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI,EAAE,EAAE,KAAK,IAAI;AACzD,aAAO;AAAA,MAAS,IAAI;AAAA,EAAO,MAAM;AAAA;AAAA;AAAA,IACnC;AAAA,EACF,CAAC;AACH;AAMA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,kBAAkB;AAAA,IAC3B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UAAI,uCAAuC,KAAK,GAAG,EAAG,QAAO;AAC7D,UAAI,QAAQ,MAAM,MAAM,MAAM,WAAY,QAAO;AACjD,aAAO;AAAA,IACT;AAAA,IACA,YAAY,SAAS,MAAM;AACzB,YAAM,QACJ,QAAQ,MAAM,YAAY,KAC1B,QAAQ,MAAM,YAAY,KAC1B,QAAQ,MAAM,YAAY,KAC1B;AACF,UAAI,OAAO;AACT,eAAO;AAAA,IAAO,KAAK;AAAA;AAAA,EAAS,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA,MAC5C;AACA,aAAO;AAAA,EAAK,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA,IAC5B;AAAA,EACF,CAAC;AACH;AAKA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,mBAAmB;AAAA,IAC5B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,UAAI,WAAW,IAAI,MAAM,MAAO,QAAO;AACvC,YAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAI,CAAC,OAAQ,QAAO;AACpB,YAAM,OACJ,QAAQ,MAAM,eAAe,KAC7B,QAAQ,MAAM,WAAW,MACxB,OAAO,aAAa,eAAe,KAAK,QACxC,OAAO,aAAa,WAAW,KAAK;AACvC,aAAO,KAAK,SAAS;AAAA,IACvB;AAAA,IACA,YAAY,UAAU,MAAM;AAC1B,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,SAAS,KAAK,cAAc,MAAM;AACxC,YAAM,OACJ,QAAQ,MAAM,eAAe,KAC7B,QAAQ,MAAM,WAAW,MACxB,OAAO,aAAa,eAAe,KAAK,QACxC,OAAO,aAAa,WAAW,KAAK;AACvC,YAAM,OAAO,OAAO,eAAe;AACnC,aAAO;AAAA,QAAW,IAAI;AAAA,EAAK,IAAI;AAAA;AAAA;AAAA,IACjC;AAAA,EACF,CAAC;AACH;AAKA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,mBAAmB;AAAA,IAC5B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,QAAQ,QAAQ,MAAM,OAAO;AACnC,UAAI,CAAC,sBAAsB,KAAK,KAAK,EAAG,QAAO;AAE/C,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UAAI,4BAA4B,KAAK,GAAG,EAAG,QAAO;AAClD,UAAI,QAAQ,MAAM,MAAM,MAAM,WAAY,QAAO;AACjD,aAAO;AAAA,IACT;AAAA,IACA,cAAc;AACZ,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AACH;;;ACzJA,SAAS,eAAe,iBAAiB;AACzC,SAAS,SAAS,YAAY;AAC9B,OAAO,YAAY;;;ACmBZ,SAAS,gBAAgB,SAAiB,YAA4B;AAC3E,QAAM,SAAS,IAAI,IAAI,OAAO;AAC9B,MAAI,WAAW,OAAO,SAAS,QAAQ,QAAQ,EAAE;AAGjD,QAAM,mBAAmB,WAAW,QAAQ,QAAQ,EAAE;AACtD,MAAI,SAAS,WAAW,gBAAgB,GAAG;AACzC,eAAW,SAAS,MAAM,iBAAiB,MAAM;AAAA,EACnD;AAGA,aAAW,SAAS,QAAQ,QAAQ,EAAE;AAEtC,MAAI,CAAC,SAAU,QAAO;AAEtB,SAAO,WAAW;AACpB;;;ADvBO,SAAS,MACd,UACA,YACA,SACM;AACN,QAAM,UAAU,OAAO,UAAU,UAAU;AAAA,IACzC,QAAQ,QAAQ;AAAA,IAChB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,UAAU,QAAQ;AAAA,IAClB,OAAO,QAAQ;AAAA,IACf,iBAAiB;AAAA,EACnB,CAAC;AAED,MAAI,YAAY;AACd,cAAU,QAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAClD,kBAAc,YAAY,SAAS,OAAO;AAAA,EAC5C,OAAO;AACL,YAAQ,OAAO,MAAM,OAAO;AAAA,EAC9B;AACF;AAKO,SAAS,UACd,UACA,UACA,SACM;AACN,QAAM,UAAU,OAAO,UAAU,UAAU;AAAA,IACzC,QAAQ,QAAQ;AAAA,IAChB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,UAAU,QAAQ;AAAA,IAClB,OAAO,QAAQ;AAAA,IACf,iBAAiB;AAAA,EACnB,CAAC;AAED,YAAU,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAChD,gBAAc,UAAU,SAAS,OAAO;AAC1C;AAaO,SAAS,WACd,OACA,WACA,YACmC;AACnC,QAAM,YAAY,oBAAI,IAAY;AAClC,QAAM,UAA6C,CAAC;AAEpD,aAAW,QAAQ,OAAO;AACxB,QAAI,UAAU,gBAAgB,KAAK,KAAK,UAAU;AAGlD,QAAI,UAAU,IAAI,OAAO,GAAG;AAC1B,YAAM,OAAO,QAAQ,QAAQ,SAAS,EAAE;AACxC,UAAI,IAAI;AACR,aAAO,UAAU,IAAI,GAAG,IAAI,IAAI,CAAC,KAAK,EAAG;AACzC,gBAAU,GAAG,IAAI,IAAI,CAAC;AAAA,IACxB;AACA,cAAU,IAAI,OAAO;AAErB,UAAM,WAAW,KAAK,WAAW,OAAO;AACxC,cAAU,KAAK,UAAU,UAAU;AAAA,MACjC,WAAW,KAAK;AAAA,MAChB,OAAO,KAAK;AAAA,MACZ,UAAU,KAAK;AAAA,IACjB,CAAC;AAED,YAAQ,KAAK,EAAE,OAAO,KAAK,OAAO,MAAM,QAAQ,CAAC;AAAA,EACnD;AAEA,SAAO;AACT;;;AEjGA,YAAYC,cAAa;;;ACelB,SAAS,aAAa,KAAqB;AAChD,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,SAAO,OAAO;AACd,SAAO,SAAS;AAChB,SAAO,OAAO,KAAK,QAAQ,OAAO,EAAE;AACtC;AAKO,SAAS,YAAY,KAAqB;AAC/C,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,WAAO,OAAO,SAAS,QAAQ,OAAO,GAAG,EAAE,QAAQ,SAAS,EAAE;AAAA,EAChE,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACxBO,SAAS,eAAe,KAG7B;AACA,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,QAAM,YAAY,OAAO,SAAS,MAAM,GAAG;AAE3C,YAAU,IAAI;AACd,QAAM,aAAa,UAAU,KAAK,GAAG,IAAI;AACzC,SAAO,EAAE,QAAQ,OAAO,QAAQ,WAAW;AAC7C;AAMO,SAAS,oBACd,UACA,SACQ;AACR,QAAM,aAAa,IAAI,IAAI,QAAQ,EAAE,SAAS,MAAM,GAAG,EAAE,OAAO,OAAO;AACvE,QAAM,QAAQ,CAAC,GAAG,UAAU;AAE5B,aAAW,OAAO,SAAS;AACzB,UAAM,WAAW,IAAI,IAAI,GAAG,EAAE,SAAS,MAAM,GAAG,EAAE,OAAO,OAAO;AAChE,QAAI,IAAI;AACR,WAAO,IAAI,MAAM,UAAU,IAAI,SAAS,UAAU,MAAM,CAAC,MAAM,SAAS,CAAC,GAAG;AAC1E;AAAA,IACF;AACA,UAAM,SAAS;AAAA,EACjB;AAEA,SAAO,OAAO,MAAM,SAAS,IAAI,MAAM,KAAK,GAAG,IAAI,MAAM;AAC3D;AAKO,SAAS,WACd,cACA,QACA,YACS;AACT,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,YAAY;AACnC,WAAO,OAAO,WAAW,UAAU,OAAO,SAAS,WAAW,UAAU;AAAA,EAC1E,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AF1BA,eAAsB,MACpB,UACA,SACsB;AACtB,QAAM,EAAE,OAAO,IAAI,eAAe,QAAQ;AAC1C,MAAI,EAAE,WAAW,IAAI,eAAe,QAAQ;AAC5C,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,UAAyB,CAAC;AAChC,MAAI,cAAc;AAGlB,QAAM,QAA4B,CAAC,CAAC,UAAU,CAAC,CAAC;AAChD,UAAQ,IAAI,aAAa,QAAQ,CAAC;AAElC,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,CAAC,KAAK,KAAK,IAAI,MAAM,MAAM;AAEjC,QAAI;AACJ,QAAI;AACF,aAAO,MAAM,UAAU,GAAG;AAAA,IAC5B,QAAQ;AACN,cAAQ,gBAAgB,KAAK,QAAQ,QAAQ,QAAQ,SAAS,MAAM,MAAM;AAC1E;AAAA,IACF;AACA,YAAQ,KAAK,EAAE,KAAK,KAAK,CAAC;AAC1B,YAAQ,gBAAgB,KAAK,QAAQ,QAAQ,QAAQ,SAAS,MAAM,MAAM;AAE1E,QAAI,QAAQ,QAAQ,UAAU;AAE5B,UAAI,aAAa;AACf,cAAM,aAAa,QAAQ,eACvB,yBAAyB,MAAM,KAAK,QAAQ,QAAQ,YAAY,IAChE,mBAAmB,MAAM,KAAK,QAAQ,QAAQ,eAAe;AACjE,YAAI,WAAW,SAAS,GAAG;AACzB,uBAAa,oBAAoB,UAAU,UAAU;AAAA,QACvD;AACA,sBAAc;AAAA,MAChB;AAEA,YAAM,QAAQ,QAAQ,eAClB,oBAAoB,MAAM,KAAK,QAAQ,YAAY,QAAQ,YAAY,IACvE,cAAc,MAAM,KAAK,QAAQ,YAAY,QAAQ,eAAe;AACxE,iBAAW,QAAQ,OAAO;AACxB,cAAM,aAAa,aAAa,IAAI;AACpC,YAAI,CAAC,QAAQ,IAAI,UAAU,GAAG;AAC5B,kBAAQ,IAAI,UAAU;AACtB,gBAAM,KAAK,CAAC,MAAM,QAAQ,CAAC,CAAC;AAAA,QAC9B;AAAA,MACF;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,MAAM,GAAG;AAAA,IACjB;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,SAAS,iBAAiB,WAAW;AACvD;AAMA,SAAS,cACP,MACA,SACA,QACA,YACA,iBACU;AACV,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAW,mBAAmB;AAEpC,IAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,OAAO;AAC1B,UAAM,OAAO,EAAE,EAAE,EAAE,KAAK,MAAM;AAC9B,QAAI,CAAC,KAAM;AAEX,QAAI;AACF,YAAM,WAAW,IAAI,IAAI,MAAM,OAAO,EAAE;AACxC,UAAI,WAAW,UAAU,QAAQ,UAAU,GAAG;AAC5C,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF,CAAC;AAED,SAAO,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC;AAC3B;AAMA,SAAS,mBACP,MACA,SACA,QACA,iBACU;AACV,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAW,mBAAmB;AAEpC,IAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,OAAO;AAC1B,UAAM,OAAO,EAAE,EAAE,EAAE,KAAK,MAAM;AAC9B,QAAI,CAAC,KAAM;AACX,QAAI;AACF,YAAM,WAAW,IAAI,IAAI,MAAM,OAAO,EAAE;AACxC,UAAI,IAAI,IAAI,QAAQ,EAAE,WAAW,QAAQ;AACvC,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF,CAAC;AAED,SAAO,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC;AAC3B;AAKA,SAAS,yBACP,MACA,SACA,QACA,cACU;AACV,QAAM,OAAO,aAAa,MAAM,OAAO;AACvC,SAAO;AAAA,IACL,GAAG,IAAI;AAAA,MACL,KAAK,OAAO,CAAC,MAAM;AACjB,YAAI;AACF,iBAAO,IAAI,IAAI,CAAC,EAAE,WAAW;AAAA,QAC/B,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;AAKA,SAAS,oBACP,MACA,SACA,QACA,YACA,cACU;AACV,QAAM,OAAO,aAAa,MAAM,OAAO;AACvC,SAAO,CAAC,GAAG,IAAI,IAAI,KAAK,OAAO,CAAC,MAAM,WAAW,GAAG,QAAQ,UAAU,CAAC,CAAC,CAAC;AAC3E;AAEA,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAACC,aAAY,WAAWA,UAAS,EAAE,CAAC;AACzD;;;AGhMA,SAAS,cAAc,iBAAAC,gBAAe,aAAAC,kBAAiB;AACvD,SAAS,QAAAC,aAAqB;AAmBvB,SAAS,oBACd,MACA,KACA,UACA,OACgB;AAChB,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC;AAAA,EACF;AACF;AAKO,SAAS,oBACd,UACA,WACM;AACN,EAAAD,WAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AACxC,EAAAD;AAAA,IACEE,MAAK,WAAW,aAAa;AAAA,IAC7B,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI;AAAA,IACpC;AAAA,EACF;AACF;AAMO,SAAS,iBAAiB,SAA+B;AAC9D,MAAI;AACF,UAAM,MAAM,aAAaA,MAAK,SAAS,eAAe,GAAG,OAAO;AAChE,WAAO,KAAK,MAAM,GAAG;AAAA,EACvB,QAAQ;AACN,WAAO,EAAE,SAAS,CAAC,EAAE;AAAA,EACvB;AACF;AAKO,SAAS,mBACd,SACA,OACM;AACN,QAAM,WAAW,iBAAiB,OAAO;AACzC,QAAM,MAAM,SAAS,QAAQ,UAAU,CAAC,MAAM,EAAE,SAAS,MAAM,IAAI;AACnE,MAAI,OAAO,GAAG;AACZ,aAAS,QAAQ,GAAG,IAAI;AAAA,EAC1B,OAAO;AACL,aAAS,QAAQ,KAAK,KAAK;AAAA,EAC7B;AACA,EAAAD,WAAU,SAAS,EAAE,WAAW,KAAK,CAAC;AACtC,EAAAD;AAAA,IACEE,MAAK,SAAS,eAAe;AAAA,IAC7B,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI;AAAA,IACpC;AAAA,EACF;AACF;;;AhB7DA,SAAS,kBACP,QACA,aACA,MAC0F;AAC1F,MAAI,CAAC,aAAa;AAChB,WAAO,EAAE,MAAM,eAAe,YAAY,QAAQ,WAAW,GAAG;AAAA,EAClE;AAGA,MAAI,UAAU,OAAO,SAAS,KAAK,GAAG;AACpC,WAAO,EAAE,MAAM,eAAe,YAAY,QAAQ,WAAW,GAAG;AAAA,EAClE;AAGA,MAAI,QAAQ;AACV,UAAM,MAAM,OAAO,SAAS,GAAG,IAAI,SAAS,SAAS;AACrD,WAAO,EAAE,MAAM,aAAa,YAAY,QAAW,WAAW,IAAI;AAAA,EACpE;AAGA,SAAO,EAAE,MAAM,aAAa,YAAY,QAAW,WAAW,YAAY,IAAI,IAAI;AACpF;AAEO,IAAM,eAAe,cAAc;AAAA,EACxC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,MAAM,KAAK;AACjB,UAAM,SAAS,KAAK;AACpB,UAAM,cAAc,KAAK;AACzB,UAAM,WAAW,SAAS,KAAK,WAAW,GAAa,EAAE;AACzD,UAAM,OAAQ,KAAK,QAAmB,YAAY,GAAG;AAErD,UAAM,EAAE,MAAM,YAAY,UAAU,IAAI,kBAAkB,QAAQ,aAAa,IAAI;AACnF,UAAM,SAAS,SAAS,iBAAiB,CAAC;AAE1C,QAAI,aAAa;AACf,UAAI,CAAC,OAAQ,SAAQ,MAAM,iBAAiB,GAAG,gBAAgB,QAAQ,MAAM;AAG7E,YAAM,YAAY,MAAM,UAAU,GAAG;AACrC,YAAM,IAAY,cAAK,SAAS;AAChC,YAAM,aAAa,QAAQ,KAAK,CAAC;AACjC,YAAM,WAAW,YAAY,UAAU;AACvC,YAAM,kBAAkB,SAAS,gBAAgB;AAEjD,YAAM,cAAc,MAAM,MAAM,KAAK;AAAA,QACnC;AAAA,QACA;AAAA,QACA,cAAc,SAAS,cAAc,KAAK,QAAQ;AAAA,QAClD,eAAe,CAAC,SAAS,SAAS,UAAU;AAC1C,cAAI,CAAC,OAAQ,SAAQ,KAAK,IAAI,OAAO,IAAI,KAAK,KAAK,OAAO,EAAE;AAAA,QAC9D;AAAA,MACF,CAAC;AAED,YAAM,EAAE,OAAO,gBAAgB,IAAI;AACnC,UAAI,CAAC,OAAQ,SAAQ,QAAQ,WAAW,MAAM,MAAM,QAAQ;AAE5D,UAAI,SAAS,aAAa;AAExB,cAAM,cAAc,MAAM,IAAI,CAAC,SAAS;AACtC,gBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,gBAAM,KAAK,UAAU,OAAO;AAC5B,iBAAO,EAAE,KAAK,KAAK,KAAK,OAAO,UAAU,UAAU,GAAG;AAAA,QACxD,CAAC;AAED,cAAM,gBAAgB,YAAY,CAAC,GAAG,YAAY;AAClD,cAAM,gBAAgB,WAAW,aAAa,WAAW,eAAe;AAExE,cAAM,iBAAiB,oBAAoB,MAAM,KAAK,eAAe,aAAa;AAClF,4BAAoB,gBAAgB,SAAS;AAG7C,cAAM,UAAUC,SAAQ,UAAU,QAAQ,OAAO,EAAE,CAAC;AACpD,2BAAmB,SAAS;AAAA,UAC1B;AAAA,UACA,MAAM,OAAO;AAAA,UACb,YAAY,eAAe;AAAA,QAC7B,CAAC;AAED,gBAAQ,QAAQ,WAAW,MAAM,MAAM,aAAa,SAAS,EAAE;AAAA,MACjE,OAAO;AAEL,cAAM,WAAqB,CAAC;AAC5B,YAAI,aAAa;AACjB,YAAI,gBAAgB;AAEpB,mBAAW,QAAQ,OAAO;AACxB,gBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,cAAI,CAAC,YAAY;AACf,yBAAa;AACb,4BAAgB;AAAA,UAClB;AACA,gBAAM,KAAK,UAAU,OAAO;AAC5B,mBAAS,KAAK,MAAM,KAAK;AAAA;AAAA,UAAe,KAAK,GAAG;AAAA;AAAA,EAAO,EAAE,EAAE;AAAA,QAC7D;AAEA,cAAM,WAAW,SAAS,KAAK,aAAa;AAE5C,cAAM,UAAU,YAAY;AAAA,UAC1B,WAAW;AAAA,UACX,OAAO;AAAA,UACP,UAAU;AAAA,QACZ,CAAC;AAED,YAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,UAAU,EAAE;AAAA,MACzD;AAAA,IACF,OAAO;AACL,UAAI,CAAC,OAAQ,SAAQ,MAAM,YAAY,GAAG,KAAK;AAC/C,UAAI,OAAO,MAAM,UAAU,GAAG;AAE9B,YAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,MAAM,GAAG;AAGtD,UAAI,QAAQ,KAAK,EAAE,SAAS,KAAK;AAC/B,YAAI,CAAC,OAAQ,SAAQ,KAAK,8CAA8C;AACxE,YAAI;AACF,iBAAO,MAAM,iBAAiB,GAAG;AACjC,gBAAM,SAAS,QAAQ,MAAM,GAAG;AAChC,gBAAMC,YAAW,UAAU,OAAO,OAAO;AACzC,gBAAMA,WAAU,YAAY;AAAA,YAC1B,WAAW;AAAA,YACX,OAAO,OAAO,SAAS;AAAA,YACvB,UAAU,OAAO;AAAA,UACnB,CAAC;AACD,cAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,UAAU,EAAE;AACvD;AAAA,QACF,SAAS,KAAU;AACjB,cAAI,KAAK,SAAS,gCAAgC;AAChD,oBAAQ;AAAA,cACN;AAAA,YAEF;AAAA,UACF,OAAO;AACL,oBAAQ,KAAK,gDAAgD;AAAA,UAC/D;AAAA,QACF;AAAA,MACF;AAEA,UAAI,CAAC,OAAQ,SAAQ,QAAQ,gCAAgC,QAAQ,GAAG;AACxE,YAAM,WAAW,UAAU,OAAO;AAElC,YAAM,UAAU,YAAY;AAAA,QAC1B,WAAW;AAAA,QACX;AAAA,QACA;AAAA,MACF,CAAC;AAED,UAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,UAAU,EAAE;AAAA,IACzD;AAAA,EACF;AACF,CAAC;;;AiB1MD,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,QAAAC,aAAY;AACrB,OAAOC,cAAa;;;ACFpB,SAAS,gBAAAC,eAAc,iBAAAC,gBAAe,kBAAkB;AACxD,SAAS,QAAAC,OAAM,WAAAC,gBAAe;AAC9B,OAAO,UAAU;AAGjB,IAAM,kBAAkB;AAMjB,SAAS,WAAW,UAGlB;AACP,QAAM,aAAa,eAAe,YAAY,QAAQ,IAAI,CAAC;AAC3D,MAAI,CAAC,WAAY,QAAO;AAExB,QAAM,MAAMH,cAAa,YAAY,OAAO;AAC5C,QAAM,OAAO,KAAK,KAAK,GAAG;AAE1B,QAAM,SAAwB;AAAA,IAC5B,SAAS,KAAK,WAAW;AAAA,IACzB,WAAW,KAAK,cAAc;AAAA,IAC9B,UAAU,KAAK,WAAW,CAAC,GAAG,IAAI,kBAAkB;AAAA,EACtD;AAEA,SAAO,EAAE,QAAQ,WAAW;AAC9B;AAKO,SAAS,WAAW,QAAuB,YAA0B;AAC1E,QAAM,OAAO;AAAA,IACX,SAAS,OAAO;AAAA,IAChB,YAAY,OAAO;AAAA,IACnB,SAAS,OAAO,QAAQ,IAAI,kBAAkB;AAAA,EAChD;AAEA,QAAM,UAAU,KAAK,KAAK,MAAM,EAAE,WAAW,GAAG,CAAC;AACjD,EAAAC,eAAc,YAAY,SAAS,OAAO;AAC5C;AAKO,SAAS,UAAU,QAAuB,QAA4B;AAC3E,QAAM,MAAM,OAAO,QAAQ,UAAU,CAAC,MAAM,EAAE,SAAS,OAAO,IAAI;AAClE,MAAI,OAAO,GAAG;AACZ,WAAO,QAAQ,GAAG,IAAI;AAAA,EACxB,OAAO;AACL,WAAO,QAAQ,KAAK,MAAM;AAAA,EAC5B;AACF;AAKA,SAAS,eAAe,UAAiC;AACvD,MAAI,MAAM;AACV,SAAO,MAAM;AACX,UAAM,YAAYC,MAAK,KAAK,eAAe;AAC3C,QAAI,WAAW,SAAS,EAAG,QAAO;AAClC,UAAM,SAASC,SAAQ,GAAG;AAC1B,QAAI,WAAW,IAAK,QAAO;AAC3B,UAAM;AAAA,EACR;AACF;AAEA,SAAS,mBAAmB,GAAsC;AAChE,SAAO;AAAA,IACL,MAAM,EAAE,QAAQ;AAAA,IAChB,KAAK,EAAE,OAAO;AAAA,IACd,OAAO,EAAE,SAAS;AAAA,IAClB,UAAU,EAAE,aAAa;AAAA,IACzB,QAAQ,EAAE,UAAU;AAAA,EACtB;AACF;AAEA,SAAS,mBACP,GAC2C;AAC3C,SAAO;AAAA,IACL,MAAM,EAAE;AAAA,IACR,KAAK,EAAE;AAAA,IACP,OAAO,EAAE;AAAA,IACT,WAAW,EAAE;AAAA,IACb,QAAQ,EAAE;AAAA,EACZ;AACF;;;ADnFO,IAAM,aAAaC,eAAc;AAAA,EACtC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,IAAI,EAAE,KAAK,GAAG;AACZ,UAAM,MAAM,KAAK;AACjB,UAAM,cAAc,KAAK;AACzB,UAAM,WAAW,SAAS,KAAK,WAAW,GAAa,EAAE;AAEzD,UAAM,OAAQ,KAAK,QAAmB,YAAY,GAAG;AACrD,UAAM,SACH,KAAK,WAAsB,cAAc,GAAG,IAAI,MAAM,GAAG,IAAI;AAEhE,UAAM,WAAW,WAAW;AAC5B,QAAI;AACJ,QAAI;AAEJ,QAAI,UAAU;AACZ,eAAS,SAAS;AAClB,mBAAa,SAAS;AAAA,IACxB,OAAO;AACL,mBAAaC,MAAK,QAAQ,IAAI,GAAG,eAAe;AAChD,eAAS,EAAE,SAAS,GAAG,WAAW,YAAY,SAAS,CAAC,EAAE;AAAA,IAC5D;AAEA,cAAU,QAAQ,EAAE,MAAM,KAAK,OAAO,aAAa,UAAU,OAAO,CAAC;AACrE,eAAW,QAAQ,UAAU;AAE7B,IAAAC,SAAQ,QAAQ,iBAAiB,IAAI,YAAO,GAAG,EAAE;AACjD,IAAAA,SAAQ,KAAK,WAAW,UAAU,EAAE;AAAA,EACtC;AACF,CAAC;;;AEjED,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,QAAAC,OAAM,WAAAC,gBAAe;AAC9B,SAAS,aAAAC,kBAAiB;AAC1B,YAAYC,cAAa;AACzB,OAAOC,cAAa;AAeb,IAAM,gBAAgBC,eAAc;AAAA,EACzC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,SAAS,WAAW;AAC1B,QAAI,CAAC,QAAQ;AACX,MAAAC,SAAQ,MAAM,wDAAwD;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,UAAM,YAAYC,SAAQ,UAAU;AACpC,UAAM,aAAa,KAAK;AAExB,UAAM,UAAU,aACZ,OAAO,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,IAClD,OAAO;AAEX,QAAI,QAAQ,WAAW,GAAG;AACxB,UAAI,YAAY;AACd,QAAAD,SAAQ,MAAM,WAAW,UAAU,wBAAwB;AAAA,MAC7D,OAAO;AACL,QAAAA,SAAQ,MAAM,wBAAwB;AAAA,MACxC;AACA,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,eAAW,UAAU,SAAS;AAC5B,YAAM,oBAAoB,CAAC,OAAO,OAAO,SAAS,KAAK;AAEvD,MAAAA,SAAQ,MAAM,aAAa,OAAO,IAAI,UAAU,OAAO,GAAG,KAAK;AAE/D,UAAI,OAAO,OAAO;AAEhB,cAAM,YAAY,MAAM,UAAU,OAAO,GAAG;AAC5C,cAAM,IAAY,cAAK,SAAS;AAChC,cAAM,aAAa,QAAQ,OAAO,KAAK,CAAC;AACxC,cAAM,WAAW,YAAY,UAAU;AAEvC,cAAM,cAAc,MAAM,MAAM,OAAO,KAAK;AAAA,UAC1C,UAAU,OAAO;AAAA,UACjB,iBAAiB,SAAS,gBAAgB;AAAA,UAC1C,cAAc,SAAS,cAAc,KAAK,QAAQ;AAAA,UAClD,eAAe,CAAC,KAAK,SAAS,UAAU;AACtC,YAAAA,SAAQ,KAAK,MAAM,OAAO,IAAI,KAAK,KAAK,GAAG,EAAE;AAAA,UAC/C;AAAA,QACF,CAAC;AAED,cAAM,EAAE,OAAO,gBAAgB,IAAI;AAEnC,YAAI,mBAAmB;AAErB,gBAAM,YAAYE,MAAK,WAAW,OAAO,WAAW,OAAO,MAAM;AACjE,gBAAM,cAAc,MAAM,IAAI,CAAC,SAAS;AACtC,kBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,kBAAM,KAAK,UAAU,OAAO;AAC5B,mBAAO,EAAE,KAAK,KAAK,KAAK,OAAO,UAAU,UAAU,GAAG;AAAA,UACxD,CAAC;AAED,gBAAM,gBAAgB,YAAY,CAAC,GAAG,YAAY;AAClD,gBAAM,gBAAgB,WAAW,aAAa,WAAW,eAAe;AAExE,gBAAM,iBAAiB;AAAA,YACrB,OAAO;AAAA,YACP,OAAO;AAAA,YACP;AAAA,YACA;AAAA,UACF;AACA,8BAAoB,gBAAgB,SAAS;AAE7C,gBAAM,UAAUA,MAAK,WAAW,OAAO,SAAS;AAChD,6BAAmB,SAAS;AAAA,YAC1B,MAAM,OAAO;AAAA,YACb,MAAM,OAAO;AAAA,YACb,YAAY,eAAe;AAAA,UAC7B,CAAC;AAED,UAAAF,SAAQ,QAAQ,YAAY,OAAO,IAAI,YAAO,SAAS,KAAK,MAAM,MAAM,SAAS;AAAA,QACnF,OAAO;AAEL,gBAAM,aAAaE,MAAK,WAAW,OAAO,WAAW,OAAO,MAAM;AAClE,UAAAC,WAAUF,SAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,gBAAM,WAAqB,CAAC;AAC5B,cAAI,aAAa;AACjB,cAAI,gBAAgB;AAEpB,qBAAW,QAAQ,OAAO;AACxB,kBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,gBAAI,CAAC,YAAY;AACf,2BAAa;AACb,8BAAgB;AAAA,YAClB;AACA,kBAAM,KAAK,UAAU,OAAO;AAC5B,qBAAS,KAAK,MAAM,KAAK;AAAA;AAAA,UAAe,KAAK,GAAG;AAAA;AAAA,EAAO,EAAE,EAAE;AAAA,UAC7D;AAEA,gBAAM,WAAW,SAAS,KAAK,aAAa;AAC5C,gBAAM,UAAU,YAAY;AAAA,YAC1B,WAAW,OAAO;AAAA,YAClB,OAAO;AAAA,YACP,UAAU;AAAA,UACZ,CAAC;AAED,UAAAD,SAAQ,QAAQ,YAAY,OAAO,IAAI,YAAO,UAAU,EAAE;AAAA,QAC5D;AAAA,MACF,OAAO;AACL,cAAM,aAAaE,MAAK,WAAW,OAAO,WAAW,OAAO,MAAM;AAClE,QAAAC,WAAUF,SAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,cAAM,OAAO,MAAM,UAAU,OAAO,GAAG;AACvC,cAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,MAAM,OAAO,GAAG;AAC7D,cAAM,WAAW,UAAU,OAAO;AAClC,cAAM,UAAU,YAAY;AAAA,UAC1B,WAAW,OAAO;AAAA,UAClB;AAAA,UACA;AAAA,QACF,CAAC;AAED,QAAAD,SAAQ,QAAQ,YAAY,OAAO,IAAI,YAAO,UAAU,EAAE;AAAA,MAC5D;AAAA,IACF;AAAA,EACF;AACF,CAAC;;;ACtJD,SAAS,iBAAAI,sBAAqB;AAC9B,OAAOC,cAAa;AAGb,IAAM,cAAcC,eAAc;AAAA,EACvC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AACJ,UAAM,SAAS,WAAW;AAC1B,QAAI,CAAC,QAAQ;AACX,MAAAC,SAAQ,KAAK,iEAAiE;AAC9E;AAAA,IACF;AAEA,UAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,IAAAA,SAAQ,KAAK,WAAW,UAAU,EAAE;AACpC,IAAAA,SAAQ,KAAK,eAAe,OAAO,SAAS;AAAA,CAAI;AAEhD,QAAI,OAAO,QAAQ,WAAW,GAAG;AAC/B,MAAAA,SAAQ,KAAK,wBAAwB;AACrC;AAAA,IACF;AAEA,eAAW,UAAU,OAAO,SAAS;AACnC,YAAM,YAAY,OAAO,QACrB,mBAAmB,OAAO,QAAQ,MAClC;AACJ,cAAQ,IAAI,KAAK,OAAO,IAAI,GAAG,SAAS,EAAE;AAC1C,cAAQ,IAAI,eAAe,OAAO,GAAG,EAAE;AACvC,cAAQ,IAAI,eAAe,OAAO,MAAM,EAAE;AAC1C,cAAQ,IAAI;AAAA,IACd;AAAA,EACF;AACF,CAAC;;;ArB7BD,IAAM,cAAmC;AAAA,EACvC,KAAK;AAAA,EACL,QAAQ;AAAA,EACR,MAAM;AACR;AAGA,IAAM,WAAW,QAAQ,KAAK,CAAC;AAC/B,IAAM,eAAe,YAAY,YAAY;AAE7C,IAAI,cAAc;AAEhB,QAAM,OAAOC,eAAc;AAAA,IACzB,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,IACf;AAAA,IACA;AAAA,EACF,CAAC;AACD,UAAQ,IAAI;AACd,WAAW,YAAY,CAAC,SAAS,WAAW,GAAG,KAAK,aAAa,UAAU;AAEzE,aAAW,cAAc,EAAE,SAAS,QAAQ,KAAK,MAAM,CAAC,EAAE,CAAC;AAC7D,OAAO;AAEL,QAAM,OAAOA,eAAc;AAAA,IACzB,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,IACf;AAAA,IACA;AAAA,IACA,MAAM;AACJ,cAAQ,IAAI,+CAA+C;AAC3D,cAAQ,IAAI,kDAAkD;AAC9D,cAAQ,IAAI,qCAAqC;AACjD,cAAQ,IAAI,qBAAqB;AACjC,cAAQ,IAAI,wCAAwC;AAAA,IACtD;AAAA,EACF,CAAC;AACD,UAAQ,IAAI;AACd;","names":["defineCommand","dirname","cheerio","cheerio","cheerio","resolve","writeFileSync","mkdirSync","join","dirname","markdown","defineCommand","join","consola","readFileSync","writeFileSync","join","dirname","defineCommand","join","consola","defineCommand","join","dirname","mkdirSync","cheerio","consola","defineCommand","consola","dirname","join","mkdirSync","defineCommand","consola","defineCommand","consola","defineCommand"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "docs2ai",
3
- "version": "0.1.0",
3
+ "version": "0.1.2",
4
4
  "description": "Convert online documentation into AI-ready Markdown context files",
5
5
  "type": "module",
6
6
  "bin": {