docs2ai 0.1.4 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -45,19 +45,37 @@ docs2ai list # show configured sources
45
45
 
46
46
  Once you've crawled documentation, `docs2ai serve` starts an MCP server that lets AI coding tools query your docs directly.
47
47
 
48
+ > **Prerequisite:** Install docs2ai globally (`npm install -g docs2ai`) or use `npx` to run it without installing. The setup examples below use `npx`, which downloads the package automatically if needed.
49
+
48
50
  ### Quick start
49
51
 
50
52
  ```bash
51
53
  # 1. Crawl some docs
52
- docs2ai https://docs.stripe.com/api/charges --crawl --name stripe
54
+ npx docs2ai https://docs.stripe.com/api/charges --crawl --name stripe
53
55
 
54
56
  # 2. Start the MCP server
55
- docs2ai serve
57
+ npx docs2ai serve
56
58
  ```
57
59
 
58
60
  ### Claude Code
59
61
 
60
- Add to `.mcp.json` in your project root:
62
+ ```bash
63
+ claude mcp add --scope project docs2ai -- npx docs2ai serve -d .ai/docs/
64
+ ```
65
+
66
+ That's it. Run `/mcp` inside Claude Code to verify the server is connected.
67
+
68
+ Use `--scope user` instead to make it available across all your projects.
69
+
70
+ ### Cursor
71
+
72
+ Open Cursor Settings (`Cmd+,` / `Ctrl+,`) → **MCP** → **+ Add new MCP server**, then:
73
+
74
+ - **Name**: `docs2ai`
75
+ - **Type**: `command`
76
+ - **Command**: `npx docs2ai serve -d .ai/docs/`
77
+
78
+ Alternatively, create a `.cursor/mcp.json` file at your project root:
61
79
 
62
80
  ```json
63
81
  {
@@ -70,7 +88,11 @@ Add to `.mcp.json` in your project root:
70
88
  }
71
89
  ```
72
90
 
73
- Claude Code will then have access to 4 tools:
91
+ Restart Cursor for the server to be picked up. A green dot next to the server name in Settings → MCP confirms it's running.
92
+
93
+ ### Available tools
94
+
95
+ Once connected, your AI assistant has access to:
74
96
 
75
97
  - **`list_sources`** — see all available documentation sources
76
98
  - **`list_pages`** — list pages within a source
package/dist/cli.mjs CHANGED
@@ -51,13 +51,17 @@ function loadDocs(docsDir) {
51
51
  continue;
52
52
  }
53
53
  }
54
- sources.push({
54
+ const loadedSource = {
55
55
  name: sourceManifest.name,
56
56
  url: sourceManifest.url,
57
57
  platform: sourceManifest.platform,
58
58
  fetchedAt: sourceManifest.fetched_at,
59
59
  pageCount
60
- });
60
+ };
61
+ if (sourceManifest.display_name) loadedSource.displayName = sourceManifest.display_name;
62
+ if (sourceManifest.description) loadedSource.description = sourceManifest.description;
63
+ if (sourceManifest.icon_url !== void 0) loadedSource.iconUrl = sourceManifest.icon_url;
64
+ sources.push(loadedSource);
61
65
  }
62
66
  return { sources, pages };
63
67
  }
@@ -135,7 +139,10 @@ function createMcpServer(docsDir) {
135
139
  url: s.url,
136
140
  platform: s.platform,
137
141
  fetchedAt: s.fetchedAt,
138
- pageCount: s.pageCount
142
+ pageCount: s.pageCount,
143
+ ...s.displayName && { displayName: s.displayName },
144
+ ...s.description && { description: s.description },
145
+ ...s.iconUrl !== void 0 && { iconUrl: s.iconUrl }
139
146
  })),
140
147
  null,
141
148
  2
@@ -230,37 +237,80 @@ var init_server = __esm({
230
237
 
231
238
  // src/cli.ts
232
239
  import { defineCommand as defineCommand6, runMain, runCommand } from "citty";
240
+ import consola6 from "consola";
233
241
 
234
242
  // src/commands/fetch.ts
235
243
  import { defineCommand } from "citty";
236
244
  import { dirname as dirname3 } from "path";
237
- import consola from "consola";
238
- import * as cheerio4 from "cheerio";
245
+ import consola2 from "consola";
246
+ import * as cheerio5 from "cheerio";
239
247
 
240
248
  // src/pipeline/fetcher.ts
241
249
  import { ofetch } from "ofetch";
250
+ import { execSync } from "child_process";
251
+ import consola from "consola";
252
+ var BROWSER_RETRY_CODES = /* @__PURE__ */ new Set([403, 406, 429]);
253
+ var CHALLENGE_PATTERNS = [
254
+ "verify you are human",
255
+ "just a moment",
256
+ "checking your browser",
257
+ "attention required",
258
+ "enable javascript and cookies"
259
+ ];
260
+ function isChallengeContent(html) {
261
+ const lower = html.toLowerCase();
262
+ return CHALLENGE_PATTERNS.some((p) => lower.includes(p));
263
+ }
242
264
  async function fetchPage(url) {
243
- const html = await ofetch(url, { responseType: "text" });
244
- return html;
265
+ try {
266
+ return await ofetch(url, { responseType: "text" });
267
+ } catch (err) {
268
+ const status = err?.response?.status ?? err?.statusCode;
269
+ if (status && BROWSER_RETRY_CODES.has(status)) {
270
+ consola.warn(
271
+ `Static fetch returned ${status}, retrying with browser...`
272
+ );
273
+ return fetchWithBrowser(url);
274
+ }
275
+ throw err;
276
+ }
245
277
  }
246
278
  async function fetchWithBrowser(url) {
247
- let playwright;
279
+ const playwright = await loadPlaywright();
280
+ let html = await launchAndFetch(playwright, url, true);
281
+ if (isChallengeContent(html)) {
282
+ consola.warn("Bot protection detected, retrying with visible browser...");
283
+ html = await launchAndFetch(playwright, url, false);
284
+ }
285
+ return html;
286
+ }
287
+ async function loadPlaywright() {
248
288
  try {
249
- playwright = await import("playwright");
289
+ return await import("playwright");
250
290
  } catch {
251
- const err = new Error(
252
- "Playwright is not installed. Run:\n npm install -D playwright && npx playwright install chromium"
291
+ consola.info(
292
+ "This site requires a browser to fetch. Installing Playwright..."
253
293
  );
254
- err.code = "ERR_PLAYWRIGHT_NOT_INSTALLED";
255
- throw err;
294
+ try {
295
+ execSync("npm install -g playwright", { stdio: "inherit" });
296
+ execSync("npx playwright install chromium", { stdio: "inherit" });
297
+ return await import("playwright");
298
+ } catch {
299
+ const err = new Error(
300
+ "Failed to auto-install Playwright. Install it manually:\n\n npm install -g playwright && npx playwright install chromium\n"
301
+ );
302
+ err.code = "ERR_PLAYWRIGHT_NOT_INSTALLED";
303
+ throw err;
304
+ }
256
305
  }
257
- const browser = await playwright.chromium.launch({ headless: true });
306
+ }
307
+ async function launchAndFetch(playwright, url, headless) {
308
+ const browser = await playwright.chromium.launch({ headless });
258
309
  try {
259
310
  const page = await browser.newPage();
260
- await page.goto(url, { waitUntil: "networkidle" });
261
- await page.waitForTimeout(1e3);
262
- const html = await page.content();
263
- return html;
311
+ await page.goto(url, { waitUntil: "domcontentloaded", timeout: 3e4 });
312
+ await page.waitForTimeout(2e3);
313
+ return await page.content();
264
314
  } finally {
265
315
  await browser.close();
266
316
  }
@@ -492,9 +542,13 @@ function extract(html, url) {
492
542
  return { content: selectorContent, title, platform };
493
543
  }
494
544
  }
495
- const { document } = parseHTML(html);
496
- const reader = new Readability(document);
497
- const article = reader.parse();
545
+ let article = null;
546
+ try {
547
+ const { document } = parseHTML(html);
548
+ const reader = new Readability(document);
549
+ article = reader.parse();
550
+ } catch {
551
+ }
498
552
  const content = article?.content || $("body").html() || html;
499
553
  return {
500
554
  content,
@@ -857,14 +911,23 @@ function delay(ms) {
857
911
  // src/pipeline/manifest.ts
858
912
  import { readFileSync, writeFileSync as writeFileSync2, mkdirSync as mkdirSync2 } from "fs";
859
913
  import { join as join2 } from "path";
860
- function buildSourceManifest(name, url, platform, pages) {
861
- return {
914
+ function buildSourceManifest(name, url, platform, pages, siteMeta) {
915
+ const manifest = {
862
916
  name,
863
917
  url,
864
918
  platform,
865
919
  fetched_at: (/* @__PURE__ */ new Date()).toISOString(),
866
920
  pages
867
921
  };
922
+ if (siteMeta) {
923
+ manifest.display_name = siteMeta.displayName;
924
+ manifest.description = siteMeta.description;
925
+ manifest.icon_url = siteMeta.iconUrl;
926
+ manifest.og_image = siteMeta.ogImage;
927
+ manifest.language = siteMeta.language;
928
+ manifest.page_count = pages.length;
929
+ }
930
+ return manifest;
868
931
  }
869
932
  function writeSourceManifest(manifest, outputDir) {
870
933
  mkdirSync2(outputDir, { recursive: true });
@@ -898,6 +961,73 @@ function updateRootManifest(rootDir, entry) {
898
961
  );
899
962
  }
900
963
 
964
+ // src/pipeline/meta-extractor.ts
965
+ import * as cheerio4 from "cheerio";
966
+ function extractSiteMeta(html, url) {
967
+ const $ = cheerio4.load(html);
968
+ const origin = new URL(url).origin;
969
+ return {
970
+ displayName: extractDisplayName($, url),
971
+ description: extractDescription($),
972
+ iconUrl: extractIconUrl($, origin),
973
+ ogImage: extractOgImage($, origin),
974
+ language: extractLanguage($)
975
+ };
976
+ }
977
+ function nonEmpty(value) {
978
+ return value?.trim() || void 0;
979
+ }
980
+ function extractDisplayName($, url) {
981
+ const ogSiteName = nonEmpty($('meta[property="og:site_name"]').attr("content"));
982
+ if (ogSiteName) return ogSiteName;
983
+ const appName = nonEmpty($('meta[name="application-name"]').attr("content"));
984
+ if (appName) return appName;
985
+ const title = nonEmpty($("title").text());
986
+ if (title) {
987
+ const parts = title.split(/\s[-|—]\s/);
988
+ return parts[0].trim();
989
+ }
990
+ return new URL(url).hostname;
991
+ }
992
+ function extractDescription($) {
993
+ const ogDesc = nonEmpty($('meta[property="og:description"]').attr("content"));
994
+ if (ogDesc) return ogDesc;
995
+ const metaDesc = nonEmpty($('meta[name="description"]').attr("content"));
996
+ if (metaDesc) return metaDesc;
997
+ return "";
998
+ }
999
+ function extractIconUrl($, origin) {
1000
+ const selectors = [
1001
+ 'link[rel="apple-touch-icon"]',
1002
+ 'link[rel="icon"][type="image/svg+xml"]',
1003
+ 'link[rel="icon"]',
1004
+ 'link[rel="shortcut icon"]'
1005
+ ];
1006
+ for (const selector of selectors) {
1007
+ const href = nonEmpty($(selector).attr("href"));
1008
+ if (href) return resolveUrl(href, origin);
1009
+ }
1010
+ return `${origin}/favicon.ico`;
1011
+ }
1012
+ function extractOgImage($, origin) {
1013
+ const ogImage = nonEmpty($('meta[property="og:image"]').attr("content"));
1014
+ if (ogImage) return resolveUrl(ogImage, origin);
1015
+ return null;
1016
+ }
1017
+ function extractLanguage($) {
1018
+ const htmlLang = nonEmpty($("html").attr("lang"));
1019
+ if (htmlLang) return htmlLang;
1020
+ const ogLocale = nonEmpty($('meta[property="og:locale"]').attr("content"));
1021
+ if (ogLocale) return ogLocale;
1022
+ return null;
1023
+ }
1024
+ function resolveUrl(href, origin) {
1025
+ if (href.startsWith("http://") || href.startsWith("https://")) return href;
1026
+ if (href.startsWith("//")) return `https:${href}`;
1027
+ if (href.startsWith("/")) return `${origin}${href}`;
1028
+ return `${origin}/${href}`;
1029
+ }
1030
+
901
1031
  // src/commands/fetch.ts
902
1032
  function resolveOutputMode(output, shouldCrawl, name) {
903
1033
  if (!shouldCrawl) {
@@ -952,9 +1082,9 @@ var fetchCommand = defineCommand({
952
1082
  const { mode, outputPath, outputDir } = resolveOutputMode(output, shouldCrawl, name);
953
1083
  const silent = mode === "single-file" && !outputPath;
954
1084
  if (shouldCrawl) {
955
- if (!silent) consola.start(`Crawling from ${url} (max depth: ${maxDepth})...`);
1085
+ if (!silent) consola2.start(`Crawling from ${url} (max depth: ${maxDepth})...`);
956
1086
  const firstHtml = await fetchPage(url);
957
- const $ = cheerio4.load(firstHtml);
1087
+ const $ = cheerio5.load(firstHtml);
958
1088
  const platformId = resolve(url, $);
959
1089
  const strategy = getStrategy(platformId);
960
1090
  const navLinkSelector = strategy.navLinkSelector();
@@ -963,11 +1093,11 @@ var fetchCommand = defineCommand({
963
1093
  navLinkSelector,
964
1094
  discoverUrls: strategy.discoverUrls?.bind(strategy),
965
1095
  onPageFetched: (pageUrl, current, total) => {
966
- if (!silent) consola.info(`[${current}/${total}] ${pageUrl}`);
1096
+ if (!silent) consola2.info(`[${current}/${total}] ${pageUrl}`);
967
1097
  }
968
1098
  });
969
1099
  const { pages, effectivePrefix } = crawlResult;
970
- if (!silent) consola.success(`Crawled ${pages.length} pages`);
1100
+ if (!silent) consola2.success(`Crawled ${pages.length} pages`);
971
1101
  if (mode === "directory") {
972
1102
  const pageEntries = pages.map((page) => {
973
1103
  const { content, title, platform } = extract(page.html, page.url);
@@ -976,15 +1106,20 @@ var fetchCommand = defineCommand({
976
1106
  });
977
1107
  const firstPlatform = pageEntries[0]?.platform || "generic";
978
1108
  const manifestPages = writePages(pageEntries, outputDir, effectivePrefix);
979
- const sourceManifest = buildSourceManifest(name, url, firstPlatform, manifestPages);
1109
+ const siteMeta = extractSiteMeta(firstHtml, url);
1110
+ const sourceManifest = buildSourceManifest(name, url, firstPlatform, manifestPages, siteMeta);
980
1111
  writeSourceManifest(sourceManifest, outputDir);
981
1112
  const rootDir = dirname3(outputDir.replace(/\/$/, ""));
982
1113
  updateRootManifest(rootDir, {
983
1114
  name,
984
1115
  path: name + "/",
985
- fetched_at: sourceManifest.fetched_at
1116
+ fetched_at: sourceManifest.fetched_at,
1117
+ display_name: siteMeta.displayName,
1118
+ description: siteMeta.description,
1119
+ icon_url: siteMeta.iconUrl,
1120
+ page_count: manifestPages.length
986
1121
  });
987
- consola.success(`Written ${pages.length} pages to ${outputDir}`);
1122
+ consola2.success(`Written ${pages.length} pages to ${outputDir}`);
988
1123
  } else {
989
1124
  const sections = [];
990
1125
  let firstTitle = "";
@@ -1008,14 +1143,14 @@ ${md}`);
1008
1143
  title: firstTitle,
1009
1144
  platform: firstPlatform
1010
1145
  });
1011
- if (!silent) consola.success(`Written to ${outputPath}`);
1146
+ if (!silent) consola2.success(`Written to ${outputPath}`);
1012
1147
  }
1013
1148
  } else {
1014
- if (!silent) consola.start(`Fetching ${url}...`);
1149
+ if (!silent) consola2.start(`Fetching ${url}...`);
1015
1150
  let html = await fetchPage(url);
1016
1151
  const { content, title, platform } = extract(html, url);
1017
1152
  if (content.trim().length < 200) {
1018
- if (!silent) consola.warn("Content looks thin, retrying with browser...");
1153
+ if (!silent) consola2.warn("Content looks thin, retrying with browser...");
1019
1154
  try {
1020
1155
  html = await fetchWithBrowser(url);
1021
1156
  const result = extract(html, url);
@@ -1025,26 +1160,26 @@ ${md}`);
1025
1160
  title: result.title || title,
1026
1161
  platform: result.platform
1027
1162
  });
1028
- if (!silent) consola.success(`Written to ${outputPath}`);
1163
+ if (!silent) consola2.success(`Written to ${outputPath}`);
1029
1164
  return;
1030
1165
  } catch (err) {
1031
1166
  if (err?.code === "ERR_PLAYWRIGHT_NOT_INSTALLED") {
1032
- consola.warn(
1167
+ consola2.warn(
1033
1168
  "This page may require a browser to render. Install Playwright:\n npm install -D playwright && npx playwright install chromium"
1034
1169
  );
1035
1170
  } else {
1036
- consola.warn("Browser fallback failed, using static content.");
1171
+ consola2.warn("Browser fallback failed, using static content.");
1037
1172
  }
1038
1173
  }
1039
1174
  }
1040
- if (!silent) consola.success(`Extracted content (platform: ${platform})`);
1175
+ if (!silent) consola2.success(`Extracted content (platform: ${platform})`);
1041
1176
  const markdown = transform(content);
1042
1177
  write(markdown, outputPath, {
1043
1178
  sourceUrl: url,
1044
1179
  title,
1045
1180
  platform
1046
1181
  });
1047
- if (!silent) consola.success(`Written to ${outputPath}`);
1182
+ if (!silent) consola2.success(`Written to ${outputPath}`);
1048
1183
  }
1049
1184
  }
1050
1185
  });
@@ -1052,7 +1187,7 @@ ${md}`);
1052
1187
  // src/commands/add.ts
1053
1188
  import { defineCommand as defineCommand2 } from "citty";
1054
1189
  import { join as join4 } from "path";
1055
- import consola2 from "consola";
1190
+ import consola3 from "consola";
1056
1191
 
1057
1192
  // src/config/manager.ts
1058
1193
  import { readFileSync as readFileSync2, writeFileSync as writeFileSync3, existsSync } from "fs";
@@ -1167,8 +1302,8 @@ var addCommand = defineCommand2({
1167
1302
  }
1168
1303
  addSource(config, { name, url, crawl: shouldCrawl, maxDepth, output });
1169
1304
  saveConfig(config, configPath);
1170
- consola2.success(`Added source "${name}" \u2192 ${url}`);
1171
- consola2.info(`Config: ${configPath}`);
1305
+ consola3.success(`Added source "${name}" \u2192 ${url}`);
1306
+ consola3.info(`Config: ${configPath}`);
1172
1307
  }
1173
1308
  });
1174
1309
 
@@ -1176,8 +1311,8 @@ var addCommand = defineCommand2({
1176
1311
  import { defineCommand as defineCommand3 } from "citty";
1177
1312
  import { join as join5, dirname as dirname5 } from "path";
1178
1313
  import { mkdirSync as mkdirSync3 } from "fs";
1179
- import * as cheerio5 from "cheerio";
1180
- import consola3 from "consola";
1314
+ import * as cheerio6 from "cheerio";
1315
+ import consola4 from "consola";
1181
1316
  var updateCommand = defineCommand3({
1182
1317
  meta: {
1183
1318
  name: "update",
@@ -1192,7 +1327,7 @@ var updateCommand = defineCommand3({
1192
1327
  async run({ args }) {
1193
1328
  const result = loadConfig();
1194
1329
  if (!result) {
1195
- consola3.error("No .docs2ai.yaml found. Run `docs2ai add <url>` first.");
1330
+ consola4.error("No .docs2ai.yaml found. Run `docs2ai add <url>` first.");
1196
1331
  process.exit(1);
1197
1332
  }
1198
1333
  const { config, configPath } = result;
@@ -1201,18 +1336,18 @@ var updateCommand = defineCommand3({
1201
1336
  const sources = filterName ? config.sources.filter((s) => s.name === filterName) : config.sources;
1202
1337
  if (sources.length === 0) {
1203
1338
  if (filterName) {
1204
- consola3.error(`Source "${filterName}" not found in config.`);
1339
+ consola4.error(`Source "${filterName}" not found in config.`);
1205
1340
  } else {
1206
- consola3.error("No sources configured.");
1341
+ consola4.error("No sources configured.");
1207
1342
  }
1208
1343
  process.exit(1);
1209
1344
  }
1210
1345
  for (const source of sources) {
1211
1346
  const isDirectoryOutput = !source.output.endsWith(".md");
1212
- consola3.start(`Updating "${source.name}" from ${source.url}...`);
1347
+ consola4.start(`Updating "${source.name}" from ${source.url}...`);
1213
1348
  if (source.crawl) {
1214
1349
  const firstHtml = await fetchPage(source.url);
1215
- const $ = cheerio5.load(firstHtml);
1350
+ const $ = cheerio6.load(firstHtml);
1216
1351
  const platformId = resolve(source.url, $);
1217
1352
  const strategy = getStrategy(platformId);
1218
1353
  const crawlResult = await crawl(source.url, {
@@ -1220,7 +1355,7 @@ var updateCommand = defineCommand3({
1220
1355
  navLinkSelector: strategy.navLinkSelector(),
1221
1356
  discoverUrls: strategy.discoverUrls?.bind(strategy),
1222
1357
  onPageFetched: (url, current, total) => {
1223
- consola3.info(` [${current}/${total}] ${url}`);
1358
+ consola4.info(` [${current}/${total}] ${url}`);
1224
1359
  }
1225
1360
  });
1226
1361
  const { pages, effectivePrefix } = crawlResult;
@@ -1233,20 +1368,26 @@ var updateCommand = defineCommand3({
1233
1368
  });
1234
1369
  const firstPlatform = pageEntries[0]?.platform || "generic";
1235
1370
  const manifestPages = writePages(pageEntries, outputDir, effectivePrefix);
1371
+ const siteMeta = extractSiteMeta(firstHtml, source.url);
1236
1372
  const sourceManifest = buildSourceManifest(
1237
1373
  source.name,
1238
1374
  source.url,
1239
1375
  firstPlatform,
1240
- manifestPages
1376
+ manifestPages,
1377
+ siteMeta
1241
1378
  );
1242
1379
  writeSourceManifest(sourceManifest, outputDir);
1243
1380
  const rootDir = join5(configDir, config.outputDir);
1244
1381
  updateRootManifest(rootDir, {
1245
1382
  name: source.name,
1246
1383
  path: source.output,
1247
- fetched_at: sourceManifest.fetched_at
1384
+ fetched_at: sourceManifest.fetched_at,
1385
+ display_name: siteMeta.displayName,
1386
+ description: siteMeta.description,
1387
+ icon_url: siteMeta.iconUrl,
1388
+ page_count: manifestPages.length
1248
1389
  });
1249
- consola3.success(`Updated "${source.name}" \u2192 ${outputDir} (${pages.length} pages)`);
1390
+ consola4.success(`Updated "${source.name}" \u2192 ${outputDir} (${pages.length} pages)`);
1250
1391
  } else {
1251
1392
  const outputPath = join5(configDir, config.outputDir, source.output);
1252
1393
  mkdirSync3(dirname5(outputPath), { recursive: true });
@@ -1272,7 +1413,7 @@ ${md}`);
1272
1413
  title: firstTitle,
1273
1414
  platform: firstPlatform
1274
1415
  });
1275
- consola3.success(`Updated "${source.name}" \u2192 ${outputPath}`);
1416
+ consola4.success(`Updated "${source.name}" \u2192 ${outputPath}`);
1276
1417
  }
1277
1418
  } else {
1278
1419
  const outputPath = join5(configDir, config.outputDir, source.output);
@@ -1285,7 +1426,7 @@ ${md}`);
1285
1426
  title,
1286
1427
  platform
1287
1428
  });
1288
- consola3.success(`Updated "${source.name}" \u2192 ${outputPath}`);
1429
+ consola4.success(`Updated "${source.name}" \u2192 ${outputPath}`);
1289
1430
  }
1290
1431
  }
1291
1432
  }
@@ -1293,7 +1434,7 @@ ${md}`);
1293
1434
 
1294
1435
  // src/commands/list.ts
1295
1436
  import { defineCommand as defineCommand4 } from "citty";
1296
- import consola4 from "consola";
1437
+ import consola5 from "consola";
1297
1438
  var listCommand = defineCommand4({
1298
1439
  meta: {
1299
1440
  name: "list",
@@ -1302,15 +1443,15 @@ var listCommand = defineCommand4({
1302
1443
  run() {
1303
1444
  const result = loadConfig();
1304
1445
  if (!result) {
1305
- consola4.info("No .docs2ai.yaml found. Run `docs2ai add <url>` to get started.");
1446
+ consola5.info("No .docs2ai.yaml found. Run `docs2ai add <url>` to get started.");
1306
1447
  return;
1307
1448
  }
1308
1449
  const { config, configPath } = result;
1309
- consola4.info(`Config: ${configPath}`);
1310
- consola4.info(`Output dir: ${config.outputDir}
1450
+ consola5.info(`Config: ${configPath}`);
1451
+ consola5.info(`Output dir: ${config.outputDir}
1311
1452
  `);
1312
1453
  if (config.sources.length === 0) {
1313
- consola4.info("No sources configured.");
1454
+ consola5.info("No sources configured.");
1314
1455
  return;
1315
1456
  }
1316
1457
  for (const source of config.sources) {
@@ -1350,6 +1491,22 @@ var serveCommand = defineCommand5({
1350
1491
  });
1351
1492
 
1352
1493
  // src/cli.ts
1494
+ process.on("uncaughtException", (err) => {
1495
+ if (err.code === "ERR_PLAYWRIGHT_NOT_INSTALLED") {
1496
+ consola6.error(err.message);
1497
+ } else {
1498
+ consola6.error(err.message || err);
1499
+ }
1500
+ process.exit(1);
1501
+ });
1502
+ process.on("unhandledRejection", (err) => {
1503
+ if (err?.code === "ERR_PLAYWRIGHT_NOT_INSTALLED") {
1504
+ consola6.error(err.message);
1505
+ } else {
1506
+ consola6.error(err?.message || err);
1507
+ }
1508
+ process.exit(1);
1509
+ });
1353
1510
  var subCommands = {
1354
1511
  add: addCommand,
1355
1512
  update: updateCommand,
package/dist/cli.mjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/mcp/loader.ts","../src/mcp/search.ts","../src/mcp/server.ts","../src/cli.ts","../src/commands/fetch.ts","../src/pipeline/fetcher.ts","../src/pipeline/extractor.ts","../src/platforms/mintlify.ts","../src/platforms/docusaurus.ts","../src/platforms/readme.ts","../src/platforms/gitbook.ts","../src/platforms/generic.ts","../src/platforms/registry.ts","../src/pipeline/resolver.ts","../src/pipeline/transformer.ts","../src/pipeline/writer.ts","../src/utils/slug.ts","../src/crawl/crawler.ts","../src/utils/url.ts","../src/crawl/boundary.ts","../src/pipeline/manifest.ts","../src/commands/add.ts","../src/config/manager.ts","../src/commands/update.ts","../src/commands/list.ts","../src/commands/serve.ts"],"sourcesContent":["import { readFileSync } from \"node:fs\";\nimport { join } from \"node:path\";\nimport matter from \"gray-matter\";\n\n/** A single loaded documentation page. */\nexport interface LoadedPage {\n source: string;\n path: string;\n title: string;\n url: string;\n platform: string;\n fetchedAt: string;\n content: string;\n}\n\n/** A loaded documentation source (from _index.json). */\nexport interface LoadedSource {\n name: string;\n url: string;\n platform: string;\n fetchedAt: string;\n pageCount: number;\n}\n\n/** All loaded documentation data. */\nexport interface LoadedDocs {\n sources: LoadedSource[];\n pages: LoadedPage[];\n}\n\ninterface RootManifest {\n sources: { name: string; path: string; fetched_at: string }[];\n}\n\ninterface SourceManifest {\n name: string;\n url: string;\n platform: string;\n fetched_at: string;\n pages: { title: string; path: string }[];\n}\n\n/**\n * Load all documentation from a docs directory into memory.\n * Reads manifest.json → each source's _index.json → each page's .md file.\n * Silently skips missing files (no console output — critical for stdio transport).\n */\nexport function loadDocs(docsDir: string): LoadedDocs {\n const sources: LoadedSource[] = [];\n const pages: LoadedPage[] = [];\n\n let rootManifest: RootManifest;\n try {\n const raw = readFileSync(join(docsDir, \"manifest.json\"), \"utf-8\");\n rootManifest = JSON.parse(raw);\n } catch {\n return { sources: [], pages: [] };\n }\n\n for (const sourceEntry of rootManifest.sources) {\n const sourceDir = join(docsDir, sourceEntry.path);\n let sourceManifest: SourceManifest;\n try {\n const raw = readFileSync(join(sourceDir, \"_index.json\"), \"utf-8\");\n sourceManifest = JSON.parse(raw);\n } catch {\n continue;\n }\n\n let pageCount = 0;\n\n for (const pageEntry of sourceManifest.pages) {\n try {\n const raw = readFileSync(join(sourceDir, pageEntry.path), \"utf-8\");\n const parsed = matter(raw);\n pages.push({\n source: sourceManifest.name,\n path: pageEntry.path,\n title: pageEntry.title,\n url: String(parsed.data.source || \"\"),\n platform: String(parsed.data.platform || sourceManifest.platform),\n fetchedAt: String(parsed.data.fetched_at || sourceManifest.fetched_at),\n content: parsed.content.trim(),\n });\n pageCount++;\n } catch {\n continue;\n }\n }\n\n sources.push({\n name: sourceManifest.name,\n url: sourceManifest.url,\n platform: sourceManifest.platform,\n fetchedAt: sourceManifest.fetched_at,\n pageCount,\n });\n }\n\n return { sources, pages };\n}\n","import MiniSearch from \"minisearch\";\nimport type { LoadedPage } from \"./loader\";\n\n/** A search result with metadata (no content). */\nexport interface SearchResult {\n source: string;\n path: string;\n title: string;\n url: string;\n score: number;\n}\n\n/** Options for searching the index. */\nexport interface SearchOptions {\n source?: string;\n limit?: number;\n}\n\n/** Search index over loaded documentation pages. */\nexport interface SearchIndex {\n search(query: string, options?: SearchOptions): SearchResult[];\n}\n\n/**\n * Build a full-text search index over all loaded pages.\n * Indexes title and content fields with prefix and fuzzy matching.\n */\nexport function buildSearchIndex(pages: LoadedPage[]): SearchIndex {\n const miniSearch = new MiniSearch<LoadedPage>({\n fields: [\"title\", \"content\"],\n storeFields: [\"source\", \"path\", \"title\", \"url\"],\n idField: \"id\",\n });\n\n const documents = pages.map((page, i) => ({\n id: String(i),\n ...page,\n }));\n\n miniSearch.addAll(documents);\n\n return {\n search(query: string, options?: SearchOptions): SearchResult[] {\n if (!query.trim()) return [];\n\n const filter = options?.source\n ? (result: { source: string }) => result.source === options.source\n : undefined;\n\n const results = miniSearch.search(query, {\n prefix: true,\n fuzzy: 0.2,\n filter: filter as any,\n });\n\n const limit = options?.limit ?? 10;\n\n return results.slice(0, limit).map((r) => ({\n source: r.source as string,\n path: r.path as string,\n title: r.title as string,\n url: r.url as string,\n score: r.score,\n }));\n },\n };\n}\n","import { McpServer } from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport { z } from \"zod\";\nimport { loadDocs } from \"./loader\";\nimport { buildSearchIndex } from \"./search\";\n\n/**\n * Create an MCP server that exposes documentation tools.\n * Eagerly loads all docs and builds a search index at creation time.\n */\nexport function createMcpServer(docsDir: string): McpServer {\n const docs = loadDocs(docsDir);\n const searchIndex = buildSearchIndex(docs.pages);\n\n const server = new McpServer({\n name: \"doc2ctx\",\n version: \"0.1.0\",\n });\n\n server.tool(\n \"list_sources\",\n \"List all documentation sources available in the docs directory\",\n {},\n async () => {\n return {\n content: [\n {\n type: \"text\",\n text: JSON.stringify(\n docs.sources.map((s) => ({\n name: s.name,\n url: s.url,\n platform: s.platform,\n fetchedAt: s.fetchedAt,\n pageCount: s.pageCount,\n })),\n null,\n 2\n ),\n },\n ],\n };\n }\n );\n\n server.tool(\n \"list_pages\",\n \"List all pages for a specific documentation source\",\n { source: z.string().describe(\"Name of the documentation source\") },\n async ({ source }) => {\n const sourceEntry = docs.sources.find((s) => s.name === source);\n if (!sourceEntry) {\n return {\n content: [{ type: \"text\", text: `Source \"${source}\" not found. Use list_sources to see available sources.` }],\n isError: true,\n };\n }\n const pages = docs.pages\n .filter((p) => p.source === source)\n .map((p) => ({ title: p.title, path: p.path }));\n return {\n content: [{ type: \"text\", text: JSON.stringify(pages, null, 2) }],\n };\n }\n );\n\n server.tool(\n \"read_page\",\n \"Read the full markdown content of a documentation page\",\n {\n source: z.string().describe(\"Name of the documentation source\"),\n path: z.string().describe(\"Path of the page within the source (from list_pages)\"),\n },\n async ({ source, path }) => {\n const page = docs.pages.find((p) => p.source === source && p.path === path);\n if (!page) {\n return {\n content: [{ type: \"text\", text: `Page \"${path}\" not found in source \"${source}\". Use list_pages to see available pages.` }],\n isError: true,\n };\n }\n return {\n content: [{ type: \"text\", text: page.content }],\n };\n }\n );\n\n server.tool(\n \"search_docs\",\n \"Search across all documentation pages by keyword\",\n {\n query: z.string().describe(\"Search query\"),\n source: z.string().optional().describe(\"Filter results to a specific source\"),\n limit: z.number().optional().describe(\"Maximum number of results (default 10)\"),\n },\n async ({ query, source, limit }) => {\n if (!query.trim()) {\n return {\n content: [{ type: \"text\", text: \"Search query cannot be empty.\" }],\n isError: true,\n };\n }\n const results = searchIndex.search(query, { source, limit });\n return {\n content: [\n {\n type: \"text\",\n text: JSON.stringify(\n results.map((r) => ({\n source: r.source,\n path: r.path,\n title: r.title,\n score: r.score,\n })),\n null,\n 2\n ),\n },\n ],\n };\n }\n );\n\n return server;\n}\n","import { defineCommand, runMain, runCommand } from \"citty\";\nimport { fetchCommand } from \"./commands/fetch\";\nimport { addCommand } from \"./commands/add\";\nimport { updateCommand } from \"./commands/update\";\nimport { listCommand } from \"./commands/list\";\nimport { serveCommand } from \"./commands/serve\";\n\nconst subCommands: Record<string, any> = {\n add: addCommand,\n update: updateCommand,\n list: listCommand,\n serve: serveCommand,\n};\n\n// Check if first non-flag arg is a subcommand\nconst firstArg = process.argv[2];\nconst isSubCommand = firstArg && firstArg in subCommands;\n\nif (isSubCommand) {\n // Let citty handle subcommand routing\n const main = defineCommand({\n meta: {\n name: \"docs2ai\",\n version: \"0.1.0\",\n description: \"Convert documentation URLs into AI-ready Markdown files\",\n },\n subCommands,\n });\n runMain(main);\n} else if (firstArg && !firstArg.startsWith(\"-\") && firstArg !== \"--help\") {\n // Treat as a URL — run fetch command directly\n runCommand(fetchCommand, { rawArgs: process.argv.slice(2) });\n} else {\n // No args or --help — show usage\n const main = defineCommand({\n meta: {\n name: \"docs2ai\",\n version: \"0.1.0\",\n description: \"Convert documentation URLs into AI-ready Markdown files\",\n },\n subCommands,\n run() {\n console.log(\"Usage: docs2ai <url> [-o output.md] [--crawl]\");\n console.log(\" docs2ai add <url> [--name name] [--crawl]\");\n console.log(\" docs2ai update [--name name]\");\n console.log(\" docs2ai list\");\n console.log(\" docs2ai serve [-d dir]\");\n console.log(\"\\nRun `docs2ai --help` for full usage.\");\n },\n });\n runMain(main);\n}\n","import { defineCommand } from \"citty\";\nimport { dirname } from \"node:path\";\nimport consola from \"consola\";\nimport * as cheerio from \"cheerio\";\nimport { fetchPage, fetchWithBrowser } from \"../pipeline/fetcher\";\nimport { extract } from \"../pipeline/extractor\";\nimport { transform } from \"../pipeline/transformer\";\nimport { write, writePages } from \"../pipeline/writer\";\nimport { crawl } from \"../crawl/crawler\";\nimport { resolve } from \"../pipeline/resolver\";\nimport { getStrategy } from \"../platforms/registry\";\nimport { slugFromUrl } from \"../utils/url\";\nimport {\n buildSourceManifest,\n writeSourceManifest,\n updateRootManifest,\n} from \"../pipeline/manifest\";\n\n/**\n * Determine whether crawl output should go to a directory (one file per page)\n * or a single stitched file.\n */\nfunction resolveOutputMode(\n output: string | undefined,\n shouldCrawl: boolean,\n name: string\n): { mode: \"single-file\" | \"directory\"; outputPath: string | undefined; outputDir: string } {\n if (!shouldCrawl) {\n return { mode: \"single-file\", outputPath: output, outputDir: \"\" };\n }\n\n // Crawl + explicit .md output → single file (backward compat)\n if (output && output.endsWith(\".md\")) {\n return { mode: \"single-file\", outputPath: output, outputDir: \"\" };\n }\n\n // Crawl + explicit directory path\n if (output) {\n const dir = output.endsWith(\"/\") ? output : output + \"/\";\n return { mode: \"directory\", outputPath: undefined, outputDir: dir };\n }\n\n // Crawl + no output → default directory\n return { mode: \"directory\", outputPath: undefined, outputDir: `.ai/docs/${name}/` };\n}\n\nexport const fetchCommand = defineCommand({\n meta: {\n name: \"fetch\",\n description: \"Fetch a documentation URL and convert to Markdown\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to convert\",\n required: true,\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output file path or directory\",\n },\n name: {\n type: \"string\",\n description: \"Name for this source (auto-derived from hostname if omitted)\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Follow sidebar/nav links\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n },\n async run({ args }) {\n const url = args.url as string;\n const output = args.output as string | undefined;\n const shouldCrawl = args.crawl as boolean;\n const maxDepth = parseInt(args[\"max-depth\"] as string, 10);\n const name = (args.name as string) || slugFromUrl(url);\n\n const { mode, outputPath, outputDir } = resolveOutputMode(output, shouldCrawl, name);\n const silent = mode === \"single-file\" && !outputPath;\n\n if (shouldCrawl) {\n if (!silent) consola.start(`Crawling from ${url} (max depth: ${maxDepth})...`);\n\n // Fetch first page to resolve platform and get navLinkSelector\n const firstHtml = await fetchPage(url);\n const $ = cheerio.load(firstHtml);\n const platformId = resolve(url, $);\n const strategy = getStrategy(platformId);\n const navLinkSelector = strategy.navLinkSelector();\n\n const crawlResult = await crawl(url, {\n maxDepth,\n navLinkSelector,\n discoverUrls: strategy.discoverUrls?.bind(strategy),\n onPageFetched: (pageUrl, current, total) => {\n if (!silent) consola.info(`[${current}/${total}] ${pageUrl}`);\n },\n });\n\n const { pages, effectivePrefix } = crawlResult;\n if (!silent) consola.success(`Crawled ${pages.length} pages`);\n\n if (mode === \"directory\") {\n // Directory mode: one .md file per page + manifests\n const pageEntries = pages.map((page) => {\n const { content, title, platform } = extract(page.html, page.url);\n const md = transform(content);\n return { url: page.url, title, platform, markdown: md };\n });\n\n const firstPlatform = pageEntries[0]?.platform || \"generic\";\n const manifestPages = writePages(pageEntries, outputDir, effectivePrefix);\n\n const sourceManifest = buildSourceManifest(name, url, firstPlatform, manifestPages);\n writeSourceManifest(sourceManifest, outputDir);\n\n // Update root manifest in the parent directory\n const rootDir = dirname(outputDir.replace(/\\/$/, \"\"));\n updateRootManifest(rootDir, {\n name,\n path: name + \"/\",\n fetched_at: sourceManifest.fetched_at,\n });\n\n consola.success(`Written ${pages.length} pages to ${outputDir}`);\n } else {\n // Single-file mode: stitch all pages together\n const sections: string[] = [];\n let firstTitle = \"\";\n let firstPlatform = \"\";\n\n for (const page of pages) {\n const { content, title, platform } = extract(page.html, page.url);\n if (!firstTitle) {\n firstTitle = title;\n firstPlatform = platform;\n }\n const md = transform(content);\n sections.push(`## ${title}\\n\\nSource: ${page.url}\\n\\n${md}`);\n }\n\n const markdown = sections.join(\"\\n\\n---\\n\\n\");\n\n write(markdown, outputPath, {\n sourceUrl: url,\n title: firstTitle,\n platform: firstPlatform,\n });\n\n if (!silent) consola.success(`Written to ${outputPath}`);\n }\n } else {\n if (!silent) consola.start(`Fetching ${url}...`);\n let html = await fetchPage(url);\n\n const { content, title, platform } = extract(html, url);\n\n // If content is suspiciously small, try Playwright\n if (content.trim().length < 200) {\n if (!silent) consola.warn(\"Content looks thin, retrying with browser...\");\n try {\n html = await fetchWithBrowser(url);\n const result = extract(html, url);\n const markdown = transform(result.content);\n write(markdown, outputPath, {\n sourceUrl: url,\n title: result.title || title,\n platform: result.platform,\n });\n if (!silent) consola.success(`Written to ${outputPath}`);\n return;\n } catch (err: any) {\n if (err?.code === \"ERR_PLAYWRIGHT_NOT_INSTALLED\") {\n consola.warn(\n \"This page may require a browser to render. Install Playwright:\\n\" +\n \" npm install -D playwright && npx playwright install chromium\"\n );\n } else {\n consola.warn(\"Browser fallback failed, using static content.\");\n }\n }\n }\n\n if (!silent) consola.success(`Extracted content (platform: ${platform})`);\n const markdown = transform(content);\n\n write(markdown, outputPath, {\n sourceUrl: url,\n title,\n platform,\n });\n\n if (!silent) consola.success(`Written to ${outputPath}`);\n }\n },\n});\n","import { ofetch } from \"ofetch\";\n\n/**\n * Fetch the raw HTML of a documentation page.\n * Uses static fetch by default.\n */\nexport async function fetchPage(url: string): Promise<string> {\n const html = await ofetch(url, { responseType: \"text\" });\n return html;\n}\n\n/**\n * Fetch a page using Playwright for JS-rendered sites.\n * Playwright is an optional dependency — throws a typed error if not installed.\n */\nexport async function fetchWithBrowser(url: string): Promise<string> {\n let playwright;\n try {\n playwright = await import(\"playwright\");\n } catch {\n const err = new Error(\n \"Playwright is not installed. Run:\\n npm install -D playwright && npx playwright install chromium\"\n );\n (err as any).code = \"ERR_PLAYWRIGHT_NOT_INSTALLED\";\n throw err;\n }\n\n const browser = await playwright.chromium.launch({ headless: true });\n try {\n const page = await browser.newPage();\n await page.goto(url, { waitUntil: \"networkidle\" });\n // Extra buffer for late-loading content\n await page.waitForTimeout(1000);\n const html = await page.content();\n return html;\n } finally {\n await browser.close();\n }\n}\n","import * as cheerio from \"cheerio\";\nimport { Readability } from \"@mozilla/readability\";\nimport { parseHTML } from \"linkedom\";\nimport { resolve } from \"./resolver\";\nimport { getStrategy } from \"../platforms/registry\";\nimport type { PlatformId } from \"../platforms/base\";\n\nexport interface ExtractResult {\n content: string;\n title: string;\n platform: PlatformId;\n}\n\n/**\n * Extract meaningful content from raw HTML.\n * Uses platform-specific selectors when available, falls back to Readability.\n */\nexport function extract(html: string, url: string): ExtractResult {\n const $ = cheerio.load(html);\n const platform = resolve(url, $);\n const strategy = getStrategy(platform);\n\n const title = extractTitle($);\n\n // Non-generic platforms: use selector-based extraction first\n if (platform !== \"generic\") {\n for (const sel of strategy.removeSelectors()) {\n $(sel).remove();\n }\n\n const contentEl = $(strategy.contentSelector()).first();\n const selectorContent = contentEl.html();\n\n if (selectorContent && selectorContent.trim().length >= 100) {\n return { content: selectorContent, title, platform };\n }\n // Fall through to Readability if selector extraction yields too little\n }\n\n // Generic / fallback: Readability extraction\n const { document } = parseHTML(html);\n const reader = new Readability(document as any);\n const article = reader.parse();\n\n const content = article?.content || $(\"body\").html() || html;\n\n return {\n content,\n title: title || article?.title || \"\",\n platform,\n };\n}\n\n/**\n * Extract page title from common sources.\n */\nfunction extractTitle($: cheerio.CheerioAPI): string {\n const h1 = $(\"h1\").first().text().trim();\n if (h1) return h1;\n\n const ogTitle = $('meta[property=\"og:title\"]').attr(\"content\")?.trim();\n if (ogTitle) return ogTitle;\n\n return $(\"title\").text().trim();\n}\n","import * as cheerio from \"cheerio\";\nimport type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const mintlify: PlatformStrategy = {\n id: \"mintlify\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"Mintlify\"]').length > 0) return true;\n if ($(\"script[src*='mintlify']\").length > 0) return true;\n if ($(\"[data-mintlify]\").length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \"article, main\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[role='navigation']\",\n \".sidebar\",\n \"[class*='sidebar']\",\n \"[class*='cookie']\",\n \"[class*='banner']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \"nav a[href], .sidebar a[href], [class*='sidebar'] a[href]\";\n },\n\n discoverUrls(html: string, baseUrl: string): string[] {\n const $ = cheerio.load(html);\n const paths = new Set<string>();\n\n // Mintlify uses Next.js — sidebar nav is in __next_f script data, not <a> tags.\n // The data contains escaped JSON like \\\"href\\\":\\\"/api-reference/checkouts/create\\\"\n $(\"script\").each((_, el) => {\n const text = $(el).html() || \"\";\n // Match escaped JSON paths: \\\"href\\\":\\\"/some-path\\\"\n const escaped = /\\\\?\"href\\\\?\"\\s*:\\s*\\\\?\"(\\/[a-z0-9][a-z0-9\\/-]*)\\\\?\"/g;\n let match = escaped.exec(text);\n while (match !== null) {\n paths.add(match[1]);\n match = escaped.exec(text);\n }\n });\n\n // Resolve to absolute URLs\n const origin = new URL(baseUrl).origin;\n // Determine docs base path (e.g. /docs from /docs/api-reference/intro)\n const basePath = new URL(baseUrl).pathname.split(\"/\").slice(0, 2).join(\"/\");\n\n return [...paths].map((p) => {\n if (p.startsWith(basePath)) {\n return origin + p;\n }\n return origin + basePath + p;\n });\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const docusaurus: PlatformStrategy = {\n id: \"docusaurus\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"Docusaurus\"]').length > 0)\n return true;\n if ($(\".theme-doc-sidebar-container\").length > 0) return true;\n if ($('meta[name=\"docusaurus_locale\"]').length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \"article, [role='main'], .theme-doc-markdown\";\n },\n\n removeSelectors(): string[] {\n return [\n \".navbar\",\n \"footer\",\n \".theme-doc-toc-desktop\",\n \".theme-doc-sidebar-container\",\n \".pagination-nav\",\n \".theme-doc-breadcrumbs\",\n \"nav\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \".menu__link[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const readme: PlatformStrategy = {\n id: \"readme\",\n\n detect(url: string, $: CheerioAPI): boolean {\n let rmClassCount = 0;\n $(\"[class]\").each((_, el) => {\n const cls = $(el).attr(\"class\") || \"\";\n if (/\\brm-/.test(cls)) rmClassCount++;\n });\n if (rmClassCount > 2) return true;\n if ($(\".rm-Article\").length > 0) return true;\n if ($(\".rm-Markdown\").length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \".markdown-body, .rm-Article, .rm-Markdown\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \".rm-Sidebar\",\n \".rm-TableOfContents\",\n \"[class*='cookie']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \".rm-Sidebar a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const gitbook: PlatformStrategy = {\n id: \"gitbook\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"GitBook\"]').length > 0) return true;\n try {\n const parsed = new URL(url);\n if (parsed.hostname.endsWith(\".gitbook.io\")) return true;\n } catch {\n // invalid URL, skip host check\n }\n if ($('[data-testid=\"page.contentEditor\"]').length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return '[data-testid=\"page.contentEditor\"], main, article';\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[class*='sidebar']\",\n \"[class*='toc']\",\n \"[class*='cookie']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \"nav a[href], aside a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const generic: PlatformStrategy = {\n id: \"generic\",\n\n detect(_url: string, _$: CheerioAPI): boolean {\n return true;\n },\n\n contentSelector(): string {\n return \"article, main, [role='main'], .content\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[role='navigation']\",\n \"[class*='sidebar']\",\n \"[class*='cookie']\",\n \"[class*='banner']\",\n \"script\",\n \"style\",\n \"noscript\",\n ];\n },\n\n navLinkSelector(): string | null {\n return null;\n },\n};\n","import type { PlatformId, PlatformStrategy } from \"./base\";\nimport { mintlify } from \"./mintlify\";\nimport { docusaurus } from \"./docusaurus\";\nimport { readme } from \"./readme\";\nimport { gitbook } from \"./gitbook\";\nimport { generic } from \"./generic\";\n\n/** Ordered list of platform strategies. Generic must be last (always matches). */\nexport const platformStrategies: PlatformStrategy[] = [\n mintlify,\n docusaurus,\n readme,\n gitbook,\n generic,\n];\n\n/** Get a strategy by its platform ID. */\nexport function getStrategy(id: PlatformId): PlatformStrategy {\n const strategy = platformStrategies.find((s) => s.id === id);\n if (!strategy) {\n throw new Error(`Unknown platform: ${id}`);\n }\n return strategy;\n}\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformId } from \"../platforms/base\";\nimport { platformStrategies } from \"../platforms/registry\";\n\n/**\n * Detect which documentation platform a page belongs to.\n * Tries platform-specific strategies in order, falls back to generic.\n */\nexport function resolve(url: string, $: CheerioAPI): PlatformId {\n for (const strategy of platformStrategies) {\n if (strategy.detect(url, $)) {\n return strategy.id;\n }\n }\n return \"generic\";\n}\n","import TurndownService from \"turndown\";\nimport { gfm } from \"turndown-plugin-gfm\";\n\n/**\n * Convert clean HTML to Markdown.\n */\nexport function transform(html: string): string {\n const td = new TurndownService({\n headingStyle: \"atx\",\n codeBlockStyle: \"fenced\",\n bulletListMarker: \"-\",\n });\n\n td.use(gfm);\n\n addCalloutRule(td);\n addTabbedContentRule(td);\n addCodeBlockLangRule(td);\n addHiddenElementRule(td);\n\n return td.turndown(html);\n}\n\nfunction isElement(node: TurndownService.Node): node is HTMLElement {\n return node.nodeType === 1;\n}\n\nfunction getAttr(node: TurndownService.Node, attr: string): string {\n if (isElement(node)) {\n return node.getAttribute(attr) || \"\";\n }\n return \"\";\n}\n\nfunction getTagName(node: TurndownService.Node): string {\n if (isElement(node)) {\n return node.tagName.toLowerCase();\n }\n return \"\";\n}\n\n/**\n * Convert callouts/admonitions to blockquotes.\n * Matches: aside, .admonition, .callout, .alert, [role=\"alert\"]\n */\nfunction addCalloutRule(td: TurndownService): void {\n td.addRule(\"callouts\", {\n filter(node) {\n if (!isElement(node)) return false;\n const tag = getTagName(node);\n if (tag === \"aside\") return true;\n const cls = getAttr(node, \"class\");\n if (\n /\\b(admonition|callout|alert|notice|warning|info|tip|note|caution|danger)\\b/i.test(\n cls\n )\n )\n return true;\n if (getAttr(node, \"role\") === \"alert\") return true;\n return false;\n },\n replacement(content, node) {\n const cls = getAttr(node, \"class\").toLowerCase();\n let type = \"Note\";\n if (/warning|caution/.test(cls)) type = \"Warning\";\n else if (/danger|error/.test(cls)) type = \"Danger\";\n else if (/tip|success/.test(cls)) type = \"Tip\";\n else if (/info/.test(cls)) type = \"Info\";\n\n const lines = content.trim().split(\"\\n\");\n const quoted = lines.map((line) => `> ${line}`).join(\"\\n\");\n return `\\n> **${type}**\\n${quoted}\\n\\n`;\n },\n });\n}\n\n/**\n * Convert tabbed content into labeled sections.\n * Matches: .tab-panel, .tabpanel, [role=\"tabpanel\"]\n */\nfunction addTabbedContentRule(td: TurndownService): void {\n td.addRule(\"tabbed-content\", {\n filter(node) {\n if (!isElement(node)) return false;\n const cls = getAttr(node, \"class\");\n if (/\\b(tab-panel|tabpanel|tabs__item)\\b/i.test(cls)) return true;\n if (getAttr(node, \"role\") === \"tabpanel\") return true;\n return false;\n },\n replacement(content, node) {\n const label =\n getAttr(node, \"aria-label\") ||\n getAttr(node, \"data-label\") ||\n getAttr(node, \"data-value\") ||\n \"\";\n if (label) {\n return `\\n**${label}**\\n\\n${content.trim()}\\n\\n`;\n }\n return `\\n${content.trim()}\\n\\n`;\n },\n });\n}\n\n/**\n * Ensure code blocks with data-language/data-lang produce proper fenced blocks.\n */\nfunction addCodeBlockLangRule(td: TurndownService): void {\n td.addRule(\"code-block-lang\", {\n filter(node) {\n if (!isElement(node)) return false;\n if (getTagName(node) !== \"pre\") return false;\n const codeEl = node.querySelector(\"code\");\n if (!codeEl) return false;\n const lang =\n getAttr(node, \"data-language\") ||\n getAttr(node, \"data-lang\") ||\n (codeEl.getAttribute(\"data-language\") || \"\") ||\n (codeEl.getAttribute(\"data-lang\") || \"\");\n return lang.length > 0;\n },\n replacement(_content, node) {\n if (!isElement(node)) return _content;\n const codeEl = node.querySelector(\"code\")!;\n const lang =\n getAttr(node, \"data-language\") ||\n getAttr(node, \"data-lang\") ||\n (codeEl.getAttribute(\"data-language\") || \"\") ||\n (codeEl.getAttribute(\"data-lang\") || \"\");\n const code = codeEl.textContent || \"\";\n return `\\n\\`\\`\\`${lang}\\n${code}\\n\\`\\`\\`\\n`;\n },\n });\n}\n\n/**\n * Remove hidden elements (display:none) except tab panels.\n */\nfunction addHiddenElementRule(td: TurndownService): void {\n td.addRule(\"hidden-elements\", {\n filter(node) {\n if (!isElement(node)) return false;\n const style = getAttr(node, \"style\");\n if (!/display\\s*:\\s*none/i.test(style)) return false;\n // Don't remove tab panels — they're hidden but contain valid content\n const cls = getAttr(node, \"class\");\n if (/\\b(tab-panel|tabpanel)\\b/i.test(cls)) return false;\n if (getAttr(node, \"role\") === \"tabpanel\") return false;\n return true;\n },\n replacement() {\n return \"\";\n },\n });\n}\n","import { writeFileSync, mkdirSync } from \"node:fs\";\nimport { dirname, join } from \"node:path\";\nimport matter from \"gray-matter\";\nimport { filePathForPage } from \"../utils/slug\";\n\nexport interface WriterOptions {\n sourceUrl: string;\n title: string;\n platform: string;\n}\n\n/**\n * Write Markdown with frontmatter to a file or stdout.\n */\nexport function write(\n markdown: string,\n outputPath: string | undefined,\n options: WriterOptions\n): void {\n const content = matter.stringify(markdown, {\n source: options.sourceUrl,\n fetched_at: new Date().toISOString(),\n platform: options.platform,\n title: options.title,\n docs2ai_version: \"0.1.0\",\n });\n\n if (outputPath) {\n mkdirSync(dirname(outputPath), { recursive: true });\n writeFileSync(outputPath, content, \"utf-8\");\n } else {\n process.stdout.write(content);\n }\n}\n\n/**\n * Write a single page's Markdown with frontmatter to a file path (always writes to disk).\n */\nexport function writePage(\n markdown: string,\n filePath: string,\n options: WriterOptions\n): void {\n const content = matter.stringify(markdown, {\n source: options.sourceUrl,\n fetched_at: new Date().toISOString(),\n platform: options.platform,\n title: options.title,\n docs2ai_version: \"0.1.0\",\n });\n\n mkdirSync(dirname(filePath), { recursive: true });\n writeFileSync(filePath, content, \"utf-8\");\n}\n\nexport interface PageEntry {\n url: string;\n title: string;\n platform: string;\n markdown: string;\n}\n\n/**\n * Write multiple crawled pages to a directory, one .md file per page.\n * Returns manifest page entries (title + relative path) for each written page.\n */\nexport function writePages(\n pages: PageEntry[],\n outputDir: string,\n basePrefix: string\n): { title: string; path: string }[] {\n const usedPaths = new Set<string>();\n const entries: { title: string; path: string }[] = [];\n\n for (const page of pages) {\n let relPath = filePathForPage(page.url, basePrefix);\n\n // Handle slug collisions by appending a numeric suffix\n if (usedPaths.has(relPath)) {\n const base = relPath.replace(/\\.md$/, \"\");\n let i = 2;\n while (usedPaths.has(`${base}-${i}.md`)) i++;\n relPath = `${base}-${i}.md`;\n }\n usedPaths.add(relPath);\n\n const filePath = join(outputDir, relPath);\n writePage(page.markdown, filePath, {\n sourceUrl: page.url,\n title: page.title,\n platform: page.platform,\n });\n\n entries.push({ title: page.title, path: relPath });\n }\n\n return entries;\n}\n","/**\n * Derive a filename slug from a URL's pathname.\n * Strips leading/trailing slashes and returns the last segment.\n */\nexport function slugFromPathname(url: string): string {\n const parsed = new URL(url);\n const pathname = parsed.pathname.replace(/\\/+$/, \"\");\n if (!pathname || pathname === \"/\") return \"index\";\n const segments = pathname.split(\"/\").filter(Boolean);\n return segments[segments.length - 1];\n}\n\n/**\n * Compute a relative file path for a crawled page based on its URL and a base prefix.\n * Strips the basePrefix from the pathname, intermediate segments become subdirectories,\n * and the last segment becomes the filename with .md extension.\n *\n * @example filePathForPage(\"https://x.com/docs/guides/auth\", \"/docs/\") → \"guides/auth.md\"\n * @example filePathForPage(\"https://x.com/docs/getting-started\", \"/docs/\") → \"getting-started.md\"\n * @example filePathForPage(\"https://x.com/docs/\", \"/docs/\") → \"index.md\"\n */\nexport function filePathForPage(pageUrl: string, basePrefix: string): string {\n const parsed = new URL(pageUrl);\n let pathname = parsed.pathname.replace(/\\/+$/, \"\");\n\n // Strip the base prefix\n const normalizedPrefix = basePrefix.replace(/\\/+$/, \"\");\n if (pathname.startsWith(normalizedPrefix)) {\n pathname = pathname.slice(normalizedPrefix.length);\n }\n\n // Remove leading slash\n pathname = pathname.replace(/^\\/+/, \"\");\n\n if (!pathname) return \"index.md\";\n\n return pathname + \".md\";\n}\n","import * as cheerio from \"cheerio\";\nimport { fetchPage } from \"../pipeline/fetcher\";\nimport {\n getCrawlPrefix,\n computeCommonPrefix,\n isInBounds,\n normalizeUrl,\n} from \"./boundary\";\n\nexport interface CrawledPage {\n url: string;\n html: string;\n}\n\nexport interface CrawlResult {\n pages: CrawledPage[];\n effectivePrefix: string;\n}\n\nexport interface CrawlOptions {\n maxDepth: number;\n navLinkSelector?: string | null;\n /** Custom URL discovery for SPA-rendered sidebars (overrides navLinkSelector) */\n discoverUrls?: (html: string, baseUrl: string) => string[];\n onPageFetched?: (url: string, current: number, total: number) => void;\n}\n\n/**\n * Crawl documentation pages starting from a URL.\n * Follows in-bounds links via BFS up to maxDepth.\n */\nexport async function crawl(\n startUrl: string,\n options: CrawlOptions\n): Promise<CrawlResult> {\n const { origin } = getCrawlPrefix(startUrl);\n let { pathPrefix } = getCrawlPrefix(startUrl);\n const visited = new Set<string>();\n const results: CrawledPage[] = [];\n let isFirstPage = true;\n\n // BFS queue: [url, depth]\n const queue: [string, number][] = [[startUrl, 0]];\n visited.add(normalizeUrl(startUrl));\n\n while (queue.length > 0) {\n const [url, depth] = queue.shift()!;\n\n let html: string;\n try {\n html = await fetchPage(url);\n } catch {\n options.onPageFetched?.(url, results.length, results.length + queue.length);\n continue;\n }\n results.push({ url, html });\n options.onPageFetched?.(url, results.length, results.length + queue.length);\n\n if (depth < options.maxDepth) {\n // On the first page, discover all same-origin nav links to widen the boundary\n if (isFirstPage) {\n const allNavUrls = options.discoverUrls\n ? discoverSameOriginCustom(html, url, origin, options.discoverUrls)\n : discoverSameOrigin(html, url, origin, options.navLinkSelector);\n if (allNavUrls.length > 0) {\n pathPrefix = computeCommonPrefix(startUrl, allNavUrls);\n }\n isFirstPage = false;\n }\n\n const links = options.discoverUrls\n ? discoverLinksCustom(html, url, origin, pathPrefix, options.discoverUrls)\n : discoverLinks(html, url, origin, pathPrefix, options.navLinkSelector);\n for (const link of links) {\n const normalized = normalizeUrl(link);\n if (!visited.has(normalized)) {\n visited.add(normalized);\n queue.push([link, depth + 1]);\n }\n }\n }\n\n // Politeness delay between requests\n if (queue.length > 0) {\n await delay(200);\n }\n }\n\n return { pages: results, effectivePrefix: pathPrefix };\n}\n\n/**\n * Extract all in-bounds links from a page's HTML.\n * When navLinkSelector is provided, only links matching that selector are used.\n */\nfunction discoverLinks(\n html: string,\n baseUrl: string,\n origin: string,\n pathPrefix: string,\n navLinkSelector?: string | null\n): string[] {\n const $ = cheerio.load(html);\n const links: string[] = [];\n const selector = navLinkSelector || \"a[href]\";\n\n $(selector).each((_, el) => {\n const href = $(el).attr(\"href\");\n if (!href) return;\n\n try {\n const resolved = new URL(href, baseUrl).href;\n if (isInBounds(resolved, origin, pathPrefix)) {\n links.push(resolved);\n }\n } catch {\n // Invalid URL, skip\n }\n });\n\n return [...new Set(links)];\n}\n\n/**\n * Discover all same-origin links from nav (no path prefix filter).\n * Used on the first page to compute the crawl boundary from nav structure.\n */\nfunction discoverSameOrigin(\n html: string,\n baseUrl: string,\n origin: string,\n navLinkSelector?: string | null\n): string[] {\n const $ = cheerio.load(html);\n const links: string[] = [];\n const selector = navLinkSelector || \"a[href]\";\n\n $(selector).each((_, el) => {\n const href = $(el).attr(\"href\");\n if (!href) return;\n try {\n const resolved = new URL(href, baseUrl).href;\n if (new URL(resolved).origin === origin) {\n links.push(resolved);\n }\n } catch {\n // Invalid URL, skip\n }\n });\n\n return [...new Set(links)];\n}\n\n/**\n * Discover all same-origin links via custom discovery (no path prefix filter).\n */\nfunction discoverSameOriginCustom(\n html: string,\n baseUrl: string,\n origin: string,\n discoverUrls: (html: string, baseUrl: string) => string[]\n): string[] {\n const urls = discoverUrls(html, baseUrl);\n return [\n ...new Set(\n urls.filter((u) => {\n try {\n return new URL(u).origin === origin;\n } catch {\n return false;\n }\n })\n ),\n ];\n}\n\n/**\n * Extract in-bounds links using a custom discovery function from the platform strategy.\n */\nfunction discoverLinksCustom(\n html: string,\n baseUrl: string,\n origin: string,\n pathPrefix: string,\n discoverUrls: (html: string, baseUrl: string) => string[]\n): string[] {\n const urls = discoverUrls(html, baseUrl);\n return [...new Set(urls.filter((u) => isInBounds(u, origin, pathPrefix)))];\n}\n\nfunction delay(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n","/**\n * Validate whether a string is a valid URL.\n */\nexport function isValidUrl(input: string): boolean {\n try {\n new URL(input);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Normalize a URL for deduplication: strip hash, query, trailing slash.\n */\nexport function normalizeUrl(url: string): string {\n const parsed = new URL(url);\n parsed.hash = \"\";\n parsed.search = \"\";\n return parsed.href.replace(/\\/$/, \"\");\n}\n\n/**\n * Derive a short name/slug from a URL's hostname.\n */\nexport function slugFromUrl(url: string): string {\n try {\n const parsed = new URL(url);\n return parsed.hostname.replace(/\\./g, \"-\").replace(/^www-/, \"\");\n } catch {\n return \"source\";\n }\n}\n","import { normalizeUrl } from \"../utils/url\";\n\nexport { normalizeUrl };\n\n/**\n * Determine the crawl boundary from a starting URL.\n * Links are in-bounds if they share the same origin and path prefix.\n */\nexport function getCrawlPrefix(url: string): {\n origin: string;\n pathPrefix: string;\n} {\n const parsed = new URL(url);\n const pathParts = parsed.pathname.split(\"/\");\n // Remove the last segment (the current page slug)\n pathParts.pop();\n const pathPrefix = pathParts.join(\"/\") + \"/\";\n return { origin: parsed.origin, pathPrefix };\n}\n\n/**\n * Compute the longest common path prefix between a start URL and discovered nav URLs.\n * Used to widen the crawl boundary when sidebar links span multiple sections.\n */\nexport function computeCommonPrefix(\n startUrl: string,\n navUrls: string[]\n): string {\n const startParts = new URL(startUrl).pathname.split(\"/\").filter(Boolean);\n const parts = [...startParts];\n\n for (const url of navUrls) {\n const urlParts = new URL(url).pathname.split(\"/\").filter(Boolean);\n let i = 0;\n while (i < parts.length && i < urlParts.length && parts[i] === urlParts[i]) {\n i++;\n }\n parts.length = i;\n }\n\n return \"/\" + (parts.length > 0 ? parts.join(\"/\") + \"/\" : \"\");\n}\n\n/**\n * Check whether a candidate URL falls within the crawl boundary.\n */\nexport function isInBounds(\n candidateUrl: string,\n origin: string,\n pathPrefix: string\n): boolean {\n try {\n const parsed = new URL(candidateUrl);\n return parsed.origin === origin && parsed.pathname.startsWith(pathPrefix);\n } catch {\n return false;\n }\n}\n","import { readFileSync, writeFileSync, mkdirSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\n\n/** Manifest for a single documentation source (written as _index.json). */\nexport interface SourceManifest {\n name: string;\n url: string;\n platform: string;\n fetched_at: string;\n pages: { title: string; path: string }[];\n}\n\n/** Root manifest listing all sources (written as manifest.json). */\nexport interface RootManifest {\n sources: { name: string; path: string; fetched_at: string }[];\n}\n\n/**\n * Build a source manifest object.\n */\nexport function buildSourceManifest(\n name: string,\n url: string,\n platform: string,\n pages: { title: string; path: string }[]\n): SourceManifest {\n return {\n name,\n url,\n platform,\n fetched_at: new Date().toISOString(),\n pages,\n };\n}\n\n/**\n * Write a source manifest (_index.json) to a directory.\n */\nexport function writeSourceManifest(\n manifest: SourceManifest,\n outputDir: string\n): void {\n mkdirSync(outputDir, { recursive: true });\n writeFileSync(\n join(outputDir, \"_index.json\"),\n JSON.stringify(manifest, null, 2) + \"\\n\",\n \"utf-8\"\n );\n}\n\n/**\n * Load the root manifest (manifest.json) from a directory.\n * Returns an empty manifest if the file doesn't exist.\n */\nexport function loadRootManifest(rootDir: string): RootManifest {\n try {\n const raw = readFileSync(join(rootDir, \"manifest.json\"), \"utf-8\");\n return JSON.parse(raw) as RootManifest;\n } catch {\n return { sources: [] };\n }\n}\n\n/**\n * Upsert a source entry in the root manifest and write it to disk.\n */\nexport function updateRootManifest(\n rootDir: string,\n entry: { name: string; path: string; fetched_at: string }\n): void {\n const manifest = loadRootManifest(rootDir);\n const idx = manifest.sources.findIndex((s) => s.name === entry.name);\n if (idx >= 0) {\n manifest.sources[idx] = entry;\n } else {\n manifest.sources.push(entry);\n }\n mkdirSync(rootDir, { recursive: true });\n writeFileSync(\n join(rootDir, \"manifest.json\"),\n JSON.stringify(manifest, null, 2) + \"\\n\",\n \"utf-8\"\n );\n}\n","import { defineCommand } from \"citty\";\nimport { join } from \"node:path\";\nimport consola from \"consola\";\nimport { loadConfig, saveConfig, addSource } from \"../config/manager\";\nimport type { Docs2aiConfig } from \"../config/schema\";\nimport { slugFromUrl } from \"../utils/url\";\n\nexport const addCommand = defineCommand({\n meta: {\n name: \"add\",\n description: \"Add a documentation source to .docs2ai.yaml\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to add\",\n required: true,\n },\n name: {\n type: \"string\",\n description: \"Name for this source (auto-derived from hostname if omitted)\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Enable crawl mode for this source\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output filename or directory\",\n },\n },\n run({ args }) {\n const url = args.url as string;\n const shouldCrawl = args.crawl as boolean;\n const maxDepth = parseInt(args[\"max-depth\"] as string, 10);\n\n const name = (args.name as string) || slugFromUrl(url);\n const output =\n (args.output as string) || (shouldCrawl ? `${name}/` : `${name}.md`);\n\n const existing = loadConfig();\n let config: Docs2aiConfig;\n let configPath: string;\n\n if (existing) {\n config = existing.config;\n configPath = existing.configPath;\n } else {\n configPath = join(process.cwd(), \".docs2ai.yaml\");\n config = { version: 1, outputDir: \".ai/docs\", sources: [] };\n }\n\n addSource(config, { name, url, crawl: shouldCrawl, maxDepth, output });\n saveConfig(config, configPath);\n\n consola.success(`Added source \"${name}\" → ${url}`);\n consola.info(`Config: ${configPath}`);\n },\n});\n","import { readFileSync, writeFileSync, existsSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport yaml from \"js-yaml\";\nimport type { Docs2aiConfig, SourceConfig } from \"./schema\";\n\nconst CONFIG_FILENAME = \".docs2ai.yaml\";\n\n/**\n * Load the .docs2ai.yaml config file, searching up from cwd.\n * Returns null if no config file is found.\n */\nexport function loadConfig(startDir?: string): {\n config: Docs2aiConfig;\n configPath: string;\n} | null {\n const configPath = findConfigFile(startDir || process.cwd());\n if (!configPath) return null;\n\n const raw = readFileSync(configPath, \"utf-8\");\n const data = yaml.load(raw) as Record<string, any>;\n\n const config: Docs2aiConfig = {\n version: data.version ?? 1,\n outputDir: data.output_dir ?? \".ai/docs\",\n sources: (data.sources ?? []).map(snakeToCamelSource),\n };\n\n return { config, configPath };\n}\n\n/**\n * Save configuration to a .docs2ai.yaml file.\n */\nexport function saveConfig(config: Docs2aiConfig, configPath: string): void {\n const data = {\n version: config.version,\n output_dir: config.outputDir,\n sources: config.sources.map(camelToSnakeSource),\n };\n\n const content = yaml.dump(data, { lineWidth: -1 });\n writeFileSync(configPath, content, \"utf-8\");\n}\n\n/**\n * Add or update a source in the config (upsert by name).\n */\nexport function addSource(config: Docs2aiConfig, source: SourceConfig): void {\n const idx = config.sources.findIndex((s) => s.name === source.name);\n if (idx >= 0) {\n config.sources[idx] = source;\n } else {\n config.sources.push(source);\n }\n}\n\n/**\n * Walk up the directory tree looking for .docs2ai.yaml.\n */\nfunction findConfigFile(startDir: string): string | null {\n let dir = startDir;\n while (true) {\n const candidate = join(dir, CONFIG_FILENAME);\n if (existsSync(candidate)) return candidate;\n const parent = dirname(dir);\n if (parent === dir) return null;\n dir = parent;\n }\n}\n\nfunction snakeToCamelSource(s: Record<string, any>): SourceConfig {\n return {\n name: s.name ?? \"\",\n url: s.url ?? \"\",\n crawl: s.crawl ?? false,\n maxDepth: s.max_depth ?? 2,\n output: s.output ?? \"\",\n };\n}\n\nfunction camelToSnakeSource(\n s: SourceConfig\n): Record<string, string | number | boolean> {\n return {\n name: s.name,\n url: s.url,\n crawl: s.crawl,\n max_depth: s.maxDepth,\n output: s.output,\n };\n}\n","import { defineCommand } from \"citty\";\nimport { join, dirname } from \"node:path\";\nimport { mkdirSync } from \"node:fs\";\nimport * as cheerio from \"cheerio\";\nimport consola from \"consola\";\nimport { loadConfig } from \"../config/manager\";\nimport { fetchPage } from \"../pipeline/fetcher\";\nimport { extract } from \"../pipeline/extractor\";\nimport { transform } from \"../pipeline/transformer\";\nimport { write, writePages } from \"../pipeline/writer\";\nimport { crawl } from \"../crawl/crawler\";\nimport { resolve } from \"../pipeline/resolver\";\nimport { getStrategy } from \"../platforms/registry\";\nimport {\n buildSourceManifest,\n writeSourceManifest,\n updateRootManifest,\n} from \"../pipeline/manifest\";\n\nexport const updateCommand = defineCommand({\n meta: {\n name: \"update\",\n description: \"Refresh configured documentation sources\",\n },\n args: {\n name: {\n type: \"string\",\n description: \"Update only the named source\",\n },\n },\n async run({ args }) {\n const result = loadConfig();\n if (!result) {\n consola.error(\"No .docs2ai.yaml found. Run `docs2ai add <url>` first.\");\n process.exit(1);\n }\n\n const { config, configPath } = result;\n const configDir = dirname(configPath);\n const filterName = args.name as string | undefined;\n\n const sources = filterName\n ? config.sources.filter((s) => s.name === filterName)\n : config.sources;\n\n if (sources.length === 0) {\n if (filterName) {\n consola.error(`Source \"${filterName}\" not found in config.`);\n } else {\n consola.error(\"No sources configured.\");\n }\n process.exit(1);\n }\n\n for (const source of sources) {\n const isDirectoryOutput = !source.output.endsWith(\".md\");\n\n consola.start(`Updating \"${source.name}\" from ${source.url}...`);\n\n if (source.crawl) {\n // Fetch first page to resolve platform and get link discovery\n const firstHtml = await fetchPage(source.url);\n const $ = cheerio.load(firstHtml);\n const platformId = resolve(source.url, $);\n const strategy = getStrategy(platformId);\n\n const crawlResult = await crawl(source.url, {\n maxDepth: source.maxDepth,\n navLinkSelector: strategy.navLinkSelector(),\n discoverUrls: strategy.discoverUrls?.bind(strategy),\n onPageFetched: (url, current, total) => {\n consola.info(` [${current}/${total}] ${url}`);\n },\n });\n\n const { pages, effectivePrefix } = crawlResult;\n\n if (isDirectoryOutput) {\n // Directory mode: one .md file per page + manifests\n const outputDir = join(configDir, config.outputDir, source.output);\n const pageEntries = pages.map((page) => {\n const { content, title, platform } = extract(page.html, page.url);\n const md = transform(content);\n return { url: page.url, title, platform, markdown: md };\n });\n\n const firstPlatform = pageEntries[0]?.platform || \"generic\";\n const manifestPages = writePages(pageEntries, outputDir, effectivePrefix);\n\n const sourceManifest = buildSourceManifest(\n source.name,\n source.url,\n firstPlatform,\n manifestPages\n );\n writeSourceManifest(sourceManifest, outputDir);\n\n const rootDir = join(configDir, config.outputDir);\n updateRootManifest(rootDir, {\n name: source.name,\n path: source.output,\n fetched_at: sourceManifest.fetched_at,\n });\n\n consola.success(`Updated \"${source.name}\" → ${outputDir} (${pages.length} pages)`);\n } else {\n // Single-file mode: stitch all pages together\n const outputPath = join(configDir, config.outputDir, source.output);\n mkdirSync(dirname(outputPath), { recursive: true });\n\n const sections: string[] = [];\n let firstTitle = \"\";\n let firstPlatform = \"\";\n\n for (const page of pages) {\n const { content, title, platform } = extract(page.html, page.url);\n if (!firstTitle) {\n firstTitle = title;\n firstPlatform = platform;\n }\n const md = transform(content);\n sections.push(`## ${title}\\n\\nSource: ${page.url}\\n\\n${md}`);\n }\n\n const markdown = sections.join(\"\\n\\n---\\n\\n\");\n write(markdown, outputPath, {\n sourceUrl: source.url,\n title: firstTitle,\n platform: firstPlatform,\n });\n\n consola.success(`Updated \"${source.name}\" → ${outputPath}`);\n }\n } else {\n const outputPath = join(configDir, config.outputDir, source.output);\n mkdirSync(dirname(outputPath), { recursive: true });\n\n const html = await fetchPage(source.url);\n const { content, title, platform } = extract(html, source.url);\n const markdown = transform(content);\n write(markdown, outputPath, {\n sourceUrl: source.url,\n title,\n platform,\n });\n\n consola.success(`Updated \"${source.name}\" → ${outputPath}`);\n }\n }\n },\n});\n","import { defineCommand } from \"citty\";\nimport consola from \"consola\";\nimport { loadConfig } from \"../config/manager\";\n\nexport const listCommand = defineCommand({\n meta: {\n name: \"list\",\n description: \"List configured documentation sources\",\n },\n run() {\n const result = loadConfig();\n if (!result) {\n consola.info(\"No .docs2ai.yaml found. Run `docs2ai add <url>` to get started.\");\n return;\n }\n\n const { config, configPath } = result;\n consola.info(`Config: ${configPath}`);\n consola.info(`Output dir: ${config.outputDir}\\n`);\n\n if (config.sources.length === 0) {\n consola.info(\"No sources configured.\");\n return;\n }\n\n for (const source of config.sources) {\n const crawlInfo = source.crawl\n ? ` (crawl, depth: ${source.maxDepth})`\n : \"\";\n console.log(` ${source.name}${crawlInfo}`);\n console.log(` URL: ${source.url}`);\n console.log(` Output: ${source.output}`);\n console.log();\n }\n },\n});\n","import { defineCommand } from \"citty\";\nimport { resolve } from \"node:path\";\n\nexport const serveCommand = defineCommand({\n meta: {\n name: \"serve\",\n description: \"Start an MCP server exposing documentation tools\",\n },\n args: {\n dir: {\n type: \"string\",\n alias: \"d\",\n description: \"Documentation directory to serve\",\n default: \".ai/docs/\",\n },\n },\n async run({ args }) {\n const docsDir = resolve(process.cwd(), args.dir as string);\n const { createMcpServer } = await import(\"../mcp/server\");\n const { StdioServerTransport } = await import(\n \"@modelcontextprotocol/sdk/server/stdio.js\"\n );\n const server = createMcpServer(docsDir);\n const transport = new StdioServerTransport();\n await server.connect(transport);\n },\n});\n"],"mappings":";;;;;;;;;;;;AAAA,SAAS,gBAAAA,qBAAoB;AAC7B,SAAS,QAAAC,aAAY;AACrB,OAAOC,aAAY;AA6CZ,SAAS,SAAS,SAA6B;AACpD,QAAM,UAA0B,CAAC;AACjC,QAAM,QAAsB,CAAC;AAE7B,MAAI;AACJ,MAAI;AACF,UAAM,MAAMF,cAAaC,MAAK,SAAS,eAAe,GAAG,OAAO;AAChE,mBAAe,KAAK,MAAM,GAAG;AAAA,EAC/B,QAAQ;AACN,WAAO,EAAE,SAAS,CAAC,GAAG,OAAO,CAAC,EAAE;AAAA,EAClC;AAEA,aAAW,eAAe,aAAa,SAAS;AAC9C,UAAM,YAAYA,MAAK,SAAS,YAAY,IAAI;AAChD,QAAI;AACJ,QAAI;AACF,YAAM,MAAMD,cAAaC,MAAK,WAAW,aAAa,GAAG,OAAO;AAChE,uBAAiB,KAAK,MAAM,GAAG;AAAA,IACjC,QAAQ;AACN;AAAA,IACF;AAEA,QAAI,YAAY;AAEhB,eAAW,aAAa,eAAe,OAAO;AAC5C,UAAI;AACF,cAAM,MAAMD,cAAaC,MAAK,WAAW,UAAU,IAAI,GAAG,OAAO;AACjE,cAAM,SAASC,QAAO,GAAG;AACzB,cAAM,KAAK;AAAA,UACT,QAAQ,eAAe;AAAA,UACvB,MAAM,UAAU;AAAA,UAChB,OAAO,UAAU;AAAA,UACjB,KAAK,OAAO,OAAO,KAAK,UAAU,EAAE;AAAA,UACpC,UAAU,OAAO,OAAO,KAAK,YAAY,eAAe,QAAQ;AAAA,UAChE,WAAW,OAAO,OAAO,KAAK,cAAc,eAAe,UAAU;AAAA,UACrE,SAAS,OAAO,QAAQ,KAAK;AAAA,QAC/B,CAAC;AACD;AAAA,MACF,QAAQ;AACN;AAAA,MACF;AAAA,IACF;AAEA,YAAQ,KAAK;AAAA,MACX,MAAM,eAAe;AAAA,MACrB,KAAK,eAAe;AAAA,MACpB,UAAU,eAAe;AAAA,MACzB,WAAW,eAAe;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAEA,SAAO,EAAE,SAAS,MAAM;AAC1B;AApGA;AAAA;AAAA;AAAA;AAAA;;;ACAA,OAAO,gBAAgB;AA2BhB,SAAS,iBAAiB,OAAkC;AACjE,QAAM,aAAa,IAAI,WAAuB;AAAA,IAC5C,QAAQ,CAAC,SAAS,SAAS;AAAA,IAC3B,aAAa,CAAC,UAAU,QAAQ,SAAS,KAAK;AAAA,IAC9C,SAAS;AAAA,EACX,CAAC;AAED,QAAM,YAAY,MAAM,IAAI,CAAC,MAAM,OAAO;AAAA,IACxC,IAAI,OAAO,CAAC;AAAA,IACZ,GAAG;AAAA,EACL,EAAE;AAEF,aAAW,OAAO,SAAS;AAE3B,SAAO;AAAA,IACL,OAAO,OAAe,SAAyC;AAC7D,UAAI,CAAC,MAAM,KAAK,EAAG,QAAO,CAAC;AAE3B,YAAM,SAAS,SAAS,SACpB,CAAC,WAA+B,OAAO,WAAW,QAAQ,SAC1D;AAEJ,YAAM,UAAU,WAAW,OAAO,OAAO;AAAA,QACvC,QAAQ;AAAA,QACR,OAAO;AAAA,QACP;AAAA,MACF,CAAC;AAED,YAAM,QAAQ,SAAS,SAAS;AAEhC,aAAO,QAAQ,MAAM,GAAG,KAAK,EAAE,IAAI,CAAC,OAAO;AAAA,QACzC,QAAQ,EAAE;AAAA,QACV,MAAM,EAAE;AAAA,QACR,OAAO,EAAE;AAAA,QACT,KAAK,EAAE;AAAA,QACP,OAAO,EAAE;AAAA,MACX,EAAE;AAAA,IACJ;AAAA,EACF;AACF;AAlEA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA,SAAS,iBAAiB;AAC1B,SAAS,SAAS;AAQX,SAAS,gBAAgB,SAA4B;AAC1D,QAAM,OAAO,SAAS,OAAO;AAC7B,QAAM,cAAc,iBAAiB,KAAK,KAAK;AAE/C,QAAM,SAAS,IAAI,UAAU;AAAA,IAC3B,MAAM;AAAA,IACN,SAAS;AAAA,EACX,CAAC;AAED,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,CAAC;AAAA,IACD,YAAY;AACV,aAAO;AAAA,QACL,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,KAAK;AAAA,cACT,KAAK,QAAQ,IAAI,CAAC,OAAO;AAAA,gBACvB,MAAM,EAAE;AAAA,gBACR,KAAK,EAAE;AAAA,gBACP,UAAU,EAAE;AAAA,gBACZ,WAAW,EAAE;AAAA,gBACb,WAAW,EAAE;AAAA,cACf,EAAE;AAAA,cACF;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,EAAE,QAAQ,EAAE,OAAO,EAAE,SAAS,kCAAkC,EAAE;AAAA,IAClE,OAAO,EAAE,OAAO,MAAM;AACpB,YAAM,cAAc,KAAK,QAAQ,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM;AAC9D,UAAI,CAAC,aAAa;AAChB,eAAO;AAAA,UACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,WAAW,MAAM,0DAA0D,CAAC;AAAA,UAC5G,SAAS;AAAA,QACX;AAAA,MACF;AACA,YAAM,QAAQ,KAAK,MAChB,OAAO,CAAC,MAAM,EAAE,WAAW,MAAM,EACjC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,MAAM,EAAE,KAAK,EAAE;AAChD,aAAO;AAAA,QACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,KAAK,UAAU,OAAO,MAAM,CAAC,EAAE,CAAC;AAAA,MAClE;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,MACE,QAAQ,EAAE,OAAO,EAAE,SAAS,kCAAkC;AAAA,MAC9D,MAAM,EAAE,OAAO,EAAE,SAAS,sDAAsD;AAAA,IAClF;AAAA,IACA,OAAO,EAAE,QAAQ,KAAK,MAAM;AAC1B,YAAM,OAAO,KAAK,MAAM,KAAK,CAAC,MAAM,EAAE,WAAW,UAAU,EAAE,SAAS,IAAI;AAC1E,UAAI,CAAC,MAAM;AACT,eAAO;AAAA,UACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,SAAS,IAAI,0BAA0B,MAAM,4CAA4C,CAAC;AAAA,UAC1H,SAAS;AAAA,QACX;AAAA,MACF;AACA,aAAO;AAAA,QACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,KAAK,QAAQ,CAAC;AAAA,MAChD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,MACE,OAAO,EAAE,OAAO,EAAE,SAAS,cAAc;AAAA,MACzC,QAAQ,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS,qCAAqC;AAAA,MAC5E,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS,wCAAwC;AAAA,IAChF;AAAA,IACA,OAAO,EAAE,OAAO,QAAQ,MAAM,MAAM;AAClC,UAAI,CAAC,MAAM,KAAK,GAAG;AACjB,eAAO;AAAA,UACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,gCAAgC,CAAC;AAAA,UACjE,SAAS;AAAA,QACX;AAAA,MACF;AACA,YAAM,UAAU,YAAY,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC;AAC3D,aAAO;AAAA,QACL,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,KAAK;AAAA,cACT,QAAQ,IAAI,CAAC,OAAO;AAAA,gBAClB,QAAQ,EAAE;AAAA,gBACV,MAAM,EAAE;AAAA,gBACR,OAAO,EAAE;AAAA,gBACT,OAAO,EAAE;AAAA,cACX,EAAE;AAAA,cACF;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AA3HA;AAAA;AAAA;AAEA;AACA;AAAA;AAAA;;;ACHA,SAAS,iBAAAC,gBAAe,SAAS,kBAAkB;;;ACAnD,SAAS,qBAAqB;AAC9B,SAAS,WAAAC,gBAAe;AACxB,OAAO,aAAa;AACpB,YAAYC,cAAa;;;ACHzB,SAAS,cAAc;AAMvB,eAAsB,UAAU,KAA8B;AAC5D,QAAM,OAAO,MAAM,OAAO,KAAK,EAAE,cAAc,OAAO,CAAC;AACvD,SAAO;AACT;AAMA,eAAsB,iBAAiB,KAA8B;AACnE,MAAI;AACJ,MAAI;AACF,iBAAa,MAAM,OAAO,YAAY;AAAA,EACxC,QAAQ;AACN,UAAM,MAAM,IAAI;AAAA,MACd;AAAA,IACF;AACA,IAAC,IAAY,OAAO;AACpB,UAAM;AAAA,EACR;AAEA,QAAM,UAAU,MAAM,WAAW,SAAS,OAAO,EAAE,UAAU,KAAK,CAAC;AACnE,MAAI;AACF,UAAM,OAAO,MAAM,QAAQ,QAAQ;AACnC,UAAM,KAAK,KAAK,KAAK,EAAE,WAAW,cAAc,CAAC;AAEjD,UAAM,KAAK,eAAe,GAAI;AAC9B,UAAM,OAAO,MAAM,KAAK,QAAQ;AAChC,WAAO;AAAA,EACT,UAAE;AACA,UAAM,QAAQ,MAAM;AAAA,EACtB;AACF;;;ACtCA,YAAYC,cAAa;AACzB,SAAS,mBAAmB;AAC5B,SAAS,iBAAiB;;;ACF1B,YAAY,aAAa;AAIlB,IAAM,WAA6B;AAAA,EACxC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,6CAA6C,EAAE,SAAS,EAAG,QAAO;AACxE,QAAI,EAAE,yBAAyB,EAAE,SAAS,EAAG,QAAO;AACpD,QAAI,EAAE,iBAAiB,EAAE,SAAS,EAAG,QAAO;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AAAA,EAEA,aAAa,MAAc,SAA2B;AACpD,UAAM,IAAY,aAAK,IAAI;AAC3B,UAAM,QAAQ,oBAAI,IAAY;AAI9B,MAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,OAAO;AAC1B,YAAM,OAAO,EAAE,EAAE,EAAE,KAAK,KAAK;AAE7B,YAAM,UAAU;AAChB,UAAI,QAAQ,QAAQ,KAAK,IAAI;AAC7B,aAAO,UAAU,MAAM;AACrB,cAAM,IAAI,MAAM,CAAC,CAAC;AAClB,gBAAQ,QAAQ,KAAK,IAAI;AAAA,MAC3B;AAAA,IACF,CAAC;AAGD,UAAM,SAAS,IAAI,IAAI,OAAO,EAAE;AAEhC,UAAM,WAAW,IAAI,IAAI,OAAO,EAAE,SAAS,MAAM,GAAG,EAAE,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG;AAE1E,WAAO,CAAC,GAAG,KAAK,EAAE,IAAI,CAAC,MAAM;AAC3B,UAAI,EAAE,WAAW,QAAQ,GAAG;AAC1B,eAAO,SAAS;AAAA,MAClB;AACA,aAAO,SAAS,WAAW;AAAA,IAC7B,CAAC;AAAA,EACH;AACF;;;AC/DO,IAAM,aAA+B;AAAA,EAC1C,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,+CAA+C,EAAE,SAAS;AAC9D,aAAO;AACT,QAAI,EAAE,8BAA8B,EAAE,SAAS,EAAG,QAAO;AACzD,QAAI,EAAE,gCAAgC,EAAE,SAAS,EAAG,QAAO;AAC3D,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;AChCO,IAAM,SAA2B;AAAA,EACtC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,eAAe;AACnB,MAAE,SAAS,EAAE,KAAK,CAAC,GAAG,OAAO;AAC3B,YAAM,MAAM,EAAE,EAAE,EAAE,KAAK,OAAO,KAAK;AACnC,UAAI,QAAQ,KAAK,GAAG,EAAG;AAAA,IACzB,CAAC;AACD,QAAI,eAAe,EAAG,QAAO;AAC7B,QAAI,EAAE,aAAa,EAAE,SAAS,EAAG,QAAO;AACxC,QAAI,EAAE,cAAc,EAAE,SAAS,EAAG,QAAO;AACzC,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACnCO,IAAM,UAA4B;AAAA,EACvC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,4CAA4C,EAAE,SAAS,EAAG,QAAO;AACvE,QAAI;AACF,YAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,UAAI,OAAO,SAAS,SAAS,aAAa,EAAG,QAAO;AAAA,IACtD,QAAQ;AAAA,IAER;AACA,QAAI,EAAE,oCAAoC,EAAE,SAAS,EAAG,QAAO;AAC/D,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACnCO,IAAM,UAA4B;AAAA,EACvC,IAAI;AAAA,EAEJ,OAAO,MAAc,IAAyB;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACxBO,IAAM,qBAAyC;AAAA,EACpD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAGO,SAAS,YAAY,IAAkC;AAC5D,QAAM,WAAW,mBAAmB,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE;AAC3D,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,qBAAqB,EAAE,EAAE;AAAA,EAC3C;AACA,SAAO;AACT;;;ACfO,SAAS,QAAQ,KAAa,GAA2B;AAC9D,aAAW,YAAY,oBAAoB;AACzC,QAAI,SAAS,OAAO,KAAK,CAAC,GAAG;AAC3B,aAAO,SAAS;AAAA,IAClB;AAAA,EACF;AACA,SAAO;AACT;;;APEO,SAAS,QAAQ,MAAc,KAA4B;AAChE,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,WAAW,QAAQ,KAAK,CAAC;AAC/B,QAAM,WAAW,YAAY,QAAQ;AAErC,QAAM,QAAQ,aAAa,CAAC;AAG5B,MAAI,aAAa,WAAW;AAC1B,eAAW,OAAO,SAAS,gBAAgB,GAAG;AAC5C,QAAE,GAAG,EAAE,OAAO;AAAA,IAChB;AAEA,UAAM,YAAY,EAAE,SAAS,gBAAgB,CAAC,EAAE,MAAM;AACtD,UAAM,kBAAkB,UAAU,KAAK;AAEvC,QAAI,mBAAmB,gBAAgB,KAAK,EAAE,UAAU,KAAK;AAC3D,aAAO,EAAE,SAAS,iBAAiB,OAAO,SAAS;AAAA,IACrD;AAAA,EAEF;AAGA,QAAM,EAAE,SAAS,IAAI,UAAU,IAAI;AACnC,QAAM,SAAS,IAAI,YAAY,QAAe;AAC9C,QAAM,UAAU,OAAO,MAAM;AAE7B,QAAM,UAAU,SAAS,WAAW,EAAE,MAAM,EAAE,KAAK,KAAK;AAExD,SAAO;AAAA,IACL;AAAA,IACA,OAAO,SAAS,SAAS,SAAS;AAAA,IAClC;AAAA,EACF;AACF;AAKA,SAAS,aAAa,GAA+B;AACnD,QAAM,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK;AACvC,MAAI,GAAI,QAAO;AAEf,QAAM,UAAU,EAAE,2BAA2B,EAAE,KAAK,SAAS,GAAG,KAAK;AACrE,MAAI,QAAS,QAAO;AAEpB,SAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;AAChC;;;AQhEA,OAAO,qBAAqB;AAC5B,SAAS,WAAW;AAKb,SAAS,UAAU,MAAsB;AAC9C,QAAM,KAAK,IAAI,gBAAgB;AAAA,IAC7B,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,kBAAkB;AAAA,EACpB,CAAC;AAED,KAAG,IAAI,GAAG;AAEV,iBAAe,EAAE;AACjB,uBAAqB,EAAE;AACvB,uBAAqB,EAAE;AACvB,uBAAqB,EAAE;AAEvB,SAAO,GAAG,SAAS,IAAI;AACzB;AAEA,SAAS,UAAU,MAAiD;AAClE,SAAO,KAAK,aAAa;AAC3B;AAEA,SAAS,QAAQ,MAA4B,MAAsB;AACjE,MAAI,UAAU,IAAI,GAAG;AACnB,WAAO,KAAK,aAAa,IAAI,KAAK;AAAA,EACpC;AACA,SAAO;AACT;AAEA,SAAS,WAAW,MAAoC;AACtD,MAAI,UAAU,IAAI,GAAG;AACnB,WAAO,KAAK,QAAQ,YAAY;AAAA,EAClC;AACA,SAAO;AACT;AAMA,SAAS,eAAe,IAA2B;AACjD,KAAG,QAAQ,YAAY;AAAA,IACrB,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,MAAM,WAAW,IAAI;AAC3B,UAAI,QAAQ,QAAS,QAAO;AAC5B,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UACE,8EAA8E;AAAA,QAC5E;AAAA,MACF;AAEA,eAAO;AACT,UAAI,QAAQ,MAAM,MAAM,MAAM,QAAS,QAAO;AAC9C,aAAO;AAAA,IACT;AAAA,IACA,YAAY,SAAS,MAAM;AACzB,YAAM,MAAM,QAAQ,MAAM,OAAO,EAAE,YAAY;AAC/C,UAAI,OAAO;AACX,UAAI,kBAAkB,KAAK,GAAG,EAAG,QAAO;AAAA,eAC/B,eAAe,KAAK,GAAG,EAAG,QAAO;AAAA,eACjC,cAAc,KAAK,GAAG,EAAG,QAAO;AAAA,eAChC,OAAO,KAAK,GAAG,EAAG,QAAO;AAElC,YAAM,QAAQ,QAAQ,KAAK,EAAE,MAAM,IAAI;AACvC,YAAM,SAAS,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI,EAAE,EAAE,KAAK,IAAI;AACzD,aAAO;AAAA,MAAS,IAAI;AAAA,EAAO,MAAM;AAAA;AAAA;AAAA,IACnC;AAAA,EACF,CAAC;AACH;AAMA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,kBAAkB;AAAA,IAC3B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UAAI,uCAAuC,KAAK,GAAG,EAAG,QAAO;AAC7D,UAAI,QAAQ,MAAM,MAAM,MAAM,WAAY,QAAO;AACjD,aAAO;AAAA,IACT;AAAA,IACA,YAAY,SAAS,MAAM;AACzB,YAAM,QACJ,QAAQ,MAAM,YAAY,KAC1B,QAAQ,MAAM,YAAY,KAC1B,QAAQ,MAAM,YAAY,KAC1B;AACF,UAAI,OAAO;AACT,eAAO;AAAA,IAAO,KAAK;AAAA;AAAA,EAAS,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA,MAC5C;AACA,aAAO;AAAA,EAAK,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA,IAC5B;AAAA,EACF,CAAC;AACH;AAKA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,mBAAmB;AAAA,IAC5B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,UAAI,WAAW,IAAI,MAAM,MAAO,QAAO;AACvC,YAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAI,CAAC,OAAQ,QAAO;AACpB,YAAM,OACJ,QAAQ,MAAM,eAAe,KAC7B,QAAQ,MAAM,WAAW,MACxB,OAAO,aAAa,eAAe,KAAK,QACxC,OAAO,aAAa,WAAW,KAAK;AACvC,aAAO,KAAK,SAAS;AAAA,IACvB;AAAA,IACA,YAAY,UAAU,MAAM;AAC1B,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,SAAS,KAAK,cAAc,MAAM;AACxC,YAAM,OACJ,QAAQ,MAAM,eAAe,KAC7B,QAAQ,MAAM,WAAW,MACxB,OAAO,aAAa,eAAe,KAAK,QACxC,OAAO,aAAa,WAAW,KAAK;AACvC,YAAM,OAAO,OAAO,eAAe;AACnC,aAAO;AAAA,QAAW,IAAI;AAAA,EAAK,IAAI;AAAA;AAAA;AAAA,IACjC;AAAA,EACF,CAAC;AACH;AAKA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,mBAAmB;AAAA,IAC5B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,QAAQ,QAAQ,MAAM,OAAO;AACnC,UAAI,CAAC,sBAAsB,KAAK,KAAK,EAAG,QAAO;AAE/C,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UAAI,4BAA4B,KAAK,GAAG,EAAG,QAAO;AAClD,UAAI,QAAQ,MAAM,MAAM,MAAM,WAAY,QAAO;AACjD,aAAO;AAAA,IACT;AAAA,IACA,cAAc;AACZ,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AACH;;;ACzJA,SAAS,eAAe,iBAAiB;AACzC,SAAS,SAAS,YAAY;AAC9B,OAAO,YAAY;;;ACmBZ,SAAS,gBAAgB,SAAiB,YAA4B;AAC3E,QAAM,SAAS,IAAI,IAAI,OAAO;AAC9B,MAAI,WAAW,OAAO,SAAS,QAAQ,QAAQ,EAAE;AAGjD,QAAM,mBAAmB,WAAW,QAAQ,QAAQ,EAAE;AACtD,MAAI,SAAS,WAAW,gBAAgB,GAAG;AACzC,eAAW,SAAS,MAAM,iBAAiB,MAAM;AAAA,EACnD;AAGA,aAAW,SAAS,QAAQ,QAAQ,EAAE;AAEtC,MAAI,CAAC,SAAU,QAAO;AAEtB,SAAO,WAAW;AACpB;;;ADvBO,SAAS,MACd,UACA,YACA,SACM;AACN,QAAM,UAAU,OAAO,UAAU,UAAU;AAAA,IACzC,QAAQ,QAAQ;AAAA,IAChB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,UAAU,QAAQ;AAAA,IAClB,OAAO,QAAQ;AAAA,IACf,iBAAiB;AAAA,EACnB,CAAC;AAED,MAAI,YAAY;AACd,cAAU,QAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAClD,kBAAc,YAAY,SAAS,OAAO;AAAA,EAC5C,OAAO;AACL,YAAQ,OAAO,MAAM,OAAO;AAAA,EAC9B;AACF;AAKO,SAAS,UACd,UACA,UACA,SACM;AACN,QAAM,UAAU,OAAO,UAAU,UAAU;AAAA,IACzC,QAAQ,QAAQ;AAAA,IAChB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,UAAU,QAAQ;AAAA,IAClB,OAAO,QAAQ;AAAA,IACf,iBAAiB;AAAA,EACnB,CAAC;AAED,YAAU,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAChD,gBAAc,UAAU,SAAS,OAAO;AAC1C;AAaO,SAAS,WACd,OACA,WACA,YACmC;AACnC,QAAM,YAAY,oBAAI,IAAY;AAClC,QAAM,UAA6C,CAAC;AAEpD,aAAW,QAAQ,OAAO;AACxB,QAAI,UAAU,gBAAgB,KAAK,KAAK,UAAU;AAGlD,QAAI,UAAU,IAAI,OAAO,GAAG;AAC1B,YAAM,OAAO,QAAQ,QAAQ,SAAS,EAAE;AACxC,UAAI,IAAI;AACR,aAAO,UAAU,IAAI,GAAG,IAAI,IAAI,CAAC,KAAK,EAAG;AACzC,gBAAU,GAAG,IAAI,IAAI,CAAC;AAAA,IACxB;AACA,cAAU,IAAI,OAAO;AAErB,UAAM,WAAW,KAAK,WAAW,OAAO;AACxC,cAAU,KAAK,UAAU,UAAU;AAAA,MACjC,WAAW,KAAK;AAAA,MAChB,OAAO,KAAK;AAAA,MACZ,UAAU,KAAK;AAAA,IACjB,CAAC;AAED,YAAQ,KAAK,EAAE,OAAO,KAAK,OAAO,MAAM,QAAQ,CAAC;AAAA,EACnD;AAEA,SAAO;AACT;;;AEjGA,YAAYC,cAAa;;;ACelB,SAAS,aAAa,KAAqB;AAChD,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,SAAO,OAAO;AACd,SAAO,SAAS;AAChB,SAAO,OAAO,KAAK,QAAQ,OAAO,EAAE;AACtC;AAKO,SAAS,YAAY,KAAqB;AAC/C,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,WAAO,OAAO,SAAS,QAAQ,OAAO,GAAG,EAAE,QAAQ,SAAS,EAAE;AAAA,EAChE,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACxBO,SAAS,eAAe,KAG7B;AACA,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,QAAM,YAAY,OAAO,SAAS,MAAM,GAAG;AAE3C,YAAU,IAAI;AACd,QAAM,aAAa,UAAU,KAAK,GAAG,IAAI;AACzC,SAAO,EAAE,QAAQ,OAAO,QAAQ,WAAW;AAC7C;AAMO,SAAS,oBACd,UACA,SACQ;AACR,QAAM,aAAa,IAAI,IAAI,QAAQ,EAAE,SAAS,MAAM,GAAG,EAAE,OAAO,OAAO;AACvE,QAAM,QAAQ,CAAC,GAAG,UAAU;AAE5B,aAAW,OAAO,SAAS;AACzB,UAAM,WAAW,IAAI,IAAI,GAAG,EAAE,SAAS,MAAM,GAAG,EAAE,OAAO,OAAO;AAChE,QAAI,IAAI;AACR,WAAO,IAAI,MAAM,UAAU,IAAI,SAAS,UAAU,MAAM,CAAC,MAAM,SAAS,CAAC,GAAG;AAC1E;AAAA,IACF;AACA,UAAM,SAAS;AAAA,EACjB;AAEA,SAAO,OAAO,MAAM,SAAS,IAAI,MAAM,KAAK,GAAG,IAAI,MAAM;AAC3D;AAKO,SAAS,WACd,cACA,QACA,YACS;AACT,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,YAAY;AACnC,WAAO,OAAO,WAAW,UAAU,OAAO,SAAS,WAAW,UAAU;AAAA,EAC1E,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AF1BA,eAAsB,MACpB,UACA,SACsB;AACtB,QAAM,EAAE,OAAO,IAAI,eAAe,QAAQ;AAC1C,MAAI,EAAE,WAAW,IAAI,eAAe,QAAQ;AAC5C,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,UAAyB,CAAC;AAChC,MAAI,cAAc;AAGlB,QAAM,QAA4B,CAAC,CAAC,UAAU,CAAC,CAAC;AAChD,UAAQ,IAAI,aAAa,QAAQ,CAAC;AAElC,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,CAAC,KAAK,KAAK,IAAI,MAAM,MAAM;AAEjC,QAAI;AACJ,QAAI;AACF,aAAO,MAAM,UAAU,GAAG;AAAA,IAC5B,QAAQ;AACN,cAAQ,gBAAgB,KAAK,QAAQ,QAAQ,QAAQ,SAAS,MAAM,MAAM;AAC1E;AAAA,IACF;AACA,YAAQ,KAAK,EAAE,KAAK,KAAK,CAAC;AAC1B,YAAQ,gBAAgB,KAAK,QAAQ,QAAQ,QAAQ,SAAS,MAAM,MAAM;AAE1E,QAAI,QAAQ,QAAQ,UAAU;AAE5B,UAAI,aAAa;AACf,cAAM,aAAa,QAAQ,eACvB,yBAAyB,MAAM,KAAK,QAAQ,QAAQ,YAAY,IAChE,mBAAmB,MAAM,KAAK,QAAQ,QAAQ,eAAe;AACjE,YAAI,WAAW,SAAS,GAAG;AACzB,uBAAa,oBAAoB,UAAU,UAAU;AAAA,QACvD;AACA,sBAAc;AAAA,MAChB;AAEA,YAAM,QAAQ,QAAQ,eAClB,oBAAoB,MAAM,KAAK,QAAQ,YAAY,QAAQ,YAAY,IACvE,cAAc,MAAM,KAAK,QAAQ,YAAY,QAAQ,eAAe;AACxE,iBAAW,QAAQ,OAAO;AACxB,cAAM,aAAa,aAAa,IAAI;AACpC,YAAI,CAAC,QAAQ,IAAI,UAAU,GAAG;AAC5B,kBAAQ,IAAI,UAAU;AACtB,gBAAM,KAAK,CAAC,MAAM,QAAQ,CAAC,CAAC;AAAA,QAC9B;AAAA,MACF;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,MAAM,GAAG;AAAA,IACjB;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,SAAS,iBAAiB,WAAW;AACvD;AAMA,SAAS,cACP,MACA,SACA,QACA,YACA,iBACU;AACV,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAW,mBAAmB;AAEpC,IAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,OAAO;AAC1B,UAAM,OAAO,EAAE,EAAE,EAAE,KAAK,MAAM;AAC9B,QAAI,CAAC,KAAM;AAEX,QAAI;AACF,YAAM,WAAW,IAAI,IAAI,MAAM,OAAO,EAAE;AACxC,UAAI,WAAW,UAAU,QAAQ,UAAU,GAAG;AAC5C,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF,CAAC;AAED,SAAO,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC;AAC3B;AAMA,SAAS,mBACP,MACA,SACA,QACA,iBACU;AACV,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAW,mBAAmB;AAEpC,IAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,OAAO;AAC1B,UAAM,OAAO,EAAE,EAAE,EAAE,KAAK,MAAM;AAC9B,QAAI,CAAC,KAAM;AACX,QAAI;AACF,YAAM,WAAW,IAAI,IAAI,MAAM,OAAO,EAAE;AACxC,UAAI,IAAI,IAAI,QAAQ,EAAE,WAAW,QAAQ;AACvC,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF,CAAC;AAED,SAAO,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC;AAC3B;AAKA,SAAS,yBACP,MACA,SACA,QACA,cACU;AACV,QAAM,OAAO,aAAa,MAAM,OAAO;AACvC,SAAO;AAAA,IACL,GAAG,IAAI;AAAA,MACL,KAAK,OAAO,CAAC,MAAM;AACjB,YAAI;AACF,iBAAO,IAAI,IAAI,CAAC,EAAE,WAAW;AAAA,QAC/B,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;AAKA,SAAS,oBACP,MACA,SACA,QACA,YACA,cACU;AACV,QAAM,OAAO,aAAa,MAAM,OAAO;AACvC,SAAO,CAAC,GAAG,IAAI,IAAI,KAAK,OAAO,CAAC,MAAM,WAAW,GAAG,QAAQ,UAAU,CAAC,CAAC,CAAC;AAC3E;AAEA,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAACC,aAAY,WAAWA,UAAS,EAAE,CAAC;AACzD;;;AGhMA,SAAS,cAAc,iBAAAC,gBAAe,aAAAC,kBAAiB;AACvD,SAAS,QAAAC,aAAqB;AAmBvB,SAAS,oBACd,MACA,KACA,UACA,OACgB;AAChB,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC;AAAA,EACF;AACF;AAKO,SAAS,oBACd,UACA,WACM;AACN,EAAAD,WAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AACxC,EAAAD;AAAA,IACEE,MAAK,WAAW,aAAa;AAAA,IAC7B,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI;AAAA,IACpC;AAAA,EACF;AACF;AAMO,SAAS,iBAAiB,SAA+B;AAC9D,MAAI;AACF,UAAM,MAAM,aAAaA,MAAK,SAAS,eAAe,GAAG,OAAO;AAChE,WAAO,KAAK,MAAM,GAAG;AAAA,EACvB,QAAQ;AACN,WAAO,EAAE,SAAS,CAAC,EAAE;AAAA,EACvB;AACF;AAKO,SAAS,mBACd,SACA,OACM;AACN,QAAM,WAAW,iBAAiB,OAAO;AACzC,QAAM,MAAM,SAAS,QAAQ,UAAU,CAAC,MAAM,EAAE,SAAS,MAAM,IAAI;AACnE,MAAI,OAAO,GAAG;AACZ,aAAS,QAAQ,GAAG,IAAI;AAAA,EAC1B,OAAO;AACL,aAAS,QAAQ,KAAK,KAAK;AAAA,EAC7B;AACA,EAAAD,WAAU,SAAS,EAAE,WAAW,KAAK,CAAC;AACtC,EAAAD;AAAA,IACEE,MAAK,SAAS,eAAe;AAAA,IAC7B,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI;AAAA,IACpC;AAAA,EACF;AACF;;;AhB7DA,SAAS,kBACP,QACA,aACA,MAC0F;AAC1F,MAAI,CAAC,aAAa;AAChB,WAAO,EAAE,MAAM,eAAe,YAAY,QAAQ,WAAW,GAAG;AAAA,EAClE;AAGA,MAAI,UAAU,OAAO,SAAS,KAAK,GAAG;AACpC,WAAO,EAAE,MAAM,eAAe,YAAY,QAAQ,WAAW,GAAG;AAAA,EAClE;AAGA,MAAI,QAAQ;AACV,UAAM,MAAM,OAAO,SAAS,GAAG,IAAI,SAAS,SAAS;AACrD,WAAO,EAAE,MAAM,aAAa,YAAY,QAAW,WAAW,IAAI;AAAA,EACpE;AAGA,SAAO,EAAE,MAAM,aAAa,YAAY,QAAW,WAAW,YAAY,IAAI,IAAI;AACpF;AAEO,IAAM,eAAe,cAAc;AAAA,EACxC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,MAAM,KAAK;AACjB,UAAM,SAAS,KAAK;AACpB,UAAM,cAAc,KAAK;AACzB,UAAM,WAAW,SAAS,KAAK,WAAW,GAAa,EAAE;AACzD,UAAM,OAAQ,KAAK,QAAmB,YAAY,GAAG;AAErD,UAAM,EAAE,MAAM,YAAY,UAAU,IAAI,kBAAkB,QAAQ,aAAa,IAAI;AACnF,UAAM,SAAS,SAAS,iBAAiB,CAAC;AAE1C,QAAI,aAAa;AACf,UAAI,CAAC,OAAQ,SAAQ,MAAM,iBAAiB,GAAG,gBAAgB,QAAQ,MAAM;AAG7E,YAAM,YAAY,MAAM,UAAU,GAAG;AACrC,YAAM,IAAY,cAAK,SAAS;AAChC,YAAM,aAAa,QAAQ,KAAK,CAAC;AACjC,YAAM,WAAW,YAAY,UAAU;AACvC,YAAM,kBAAkB,SAAS,gBAAgB;AAEjD,YAAM,cAAc,MAAM,MAAM,KAAK;AAAA,QACnC;AAAA,QACA;AAAA,QACA,cAAc,SAAS,cAAc,KAAK,QAAQ;AAAA,QAClD,eAAe,CAAC,SAAS,SAAS,UAAU;AAC1C,cAAI,CAAC,OAAQ,SAAQ,KAAK,IAAI,OAAO,IAAI,KAAK,KAAK,OAAO,EAAE;AAAA,QAC9D;AAAA,MACF,CAAC;AAED,YAAM,EAAE,OAAO,gBAAgB,IAAI;AACnC,UAAI,CAAC,OAAQ,SAAQ,QAAQ,WAAW,MAAM,MAAM,QAAQ;AAE5D,UAAI,SAAS,aAAa;AAExB,cAAM,cAAc,MAAM,IAAI,CAAC,SAAS;AACtC,gBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,gBAAM,KAAK,UAAU,OAAO;AAC5B,iBAAO,EAAE,KAAK,KAAK,KAAK,OAAO,UAAU,UAAU,GAAG;AAAA,QACxD,CAAC;AAED,cAAM,gBAAgB,YAAY,CAAC,GAAG,YAAY;AAClD,cAAM,gBAAgB,WAAW,aAAa,WAAW,eAAe;AAExE,cAAM,iBAAiB,oBAAoB,MAAM,KAAK,eAAe,aAAa;AAClF,4BAAoB,gBAAgB,SAAS;AAG7C,cAAM,UAAUC,SAAQ,UAAU,QAAQ,OAAO,EAAE,CAAC;AACpD,2BAAmB,SAAS;AAAA,UAC1B;AAAA,UACA,MAAM,OAAO;AAAA,UACb,YAAY,eAAe;AAAA,QAC7B,CAAC;AAED,gBAAQ,QAAQ,WAAW,MAAM,MAAM,aAAa,SAAS,EAAE;AAAA,MACjE,OAAO;AAEL,cAAM,WAAqB,CAAC;AAC5B,YAAI,aAAa;AACjB,YAAI,gBAAgB;AAEpB,mBAAW,QAAQ,OAAO;AACxB,gBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,cAAI,CAAC,YAAY;AACf,yBAAa;AACb,4BAAgB;AAAA,UAClB;AACA,gBAAM,KAAK,UAAU,OAAO;AAC5B,mBAAS,KAAK,MAAM,KAAK;AAAA;AAAA,UAAe,KAAK,GAAG;AAAA;AAAA,EAAO,EAAE,EAAE;AAAA,QAC7D;AAEA,cAAM,WAAW,SAAS,KAAK,aAAa;AAE5C,cAAM,UAAU,YAAY;AAAA,UAC1B,WAAW;AAAA,UACX,OAAO;AAAA,UACP,UAAU;AAAA,QACZ,CAAC;AAED,YAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,UAAU,EAAE;AAAA,MACzD;AAAA,IACF,OAAO;AACL,UAAI,CAAC,OAAQ,SAAQ,MAAM,YAAY,GAAG,KAAK;AAC/C,UAAI,OAAO,MAAM,UAAU,GAAG;AAE9B,YAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,MAAM,GAAG;AAGtD,UAAI,QAAQ,KAAK,EAAE,SAAS,KAAK;AAC/B,YAAI,CAAC,OAAQ,SAAQ,KAAK,8CAA8C;AACxE,YAAI;AACF,iBAAO,MAAM,iBAAiB,GAAG;AACjC,gBAAM,SAAS,QAAQ,MAAM,GAAG;AAChC,gBAAMC,YAAW,UAAU,OAAO,OAAO;AACzC,gBAAMA,WAAU,YAAY;AAAA,YAC1B,WAAW;AAAA,YACX,OAAO,OAAO,SAAS;AAAA,YACvB,UAAU,OAAO;AAAA,UACnB,CAAC;AACD,cAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,UAAU,EAAE;AACvD;AAAA,QACF,SAAS,KAAU;AACjB,cAAI,KAAK,SAAS,gCAAgC;AAChD,oBAAQ;AAAA,cACN;AAAA,YAEF;AAAA,UACF,OAAO;AACL,oBAAQ,KAAK,gDAAgD;AAAA,UAC/D;AAAA,QACF;AAAA,MACF;AAEA,UAAI,CAAC,OAAQ,SAAQ,QAAQ,gCAAgC,QAAQ,GAAG;AACxE,YAAM,WAAW,UAAU,OAAO;AAElC,YAAM,UAAU,YAAY;AAAA,QAC1B,WAAW;AAAA,QACX;AAAA,QACA;AAAA,MACF,CAAC;AAED,UAAI,CAAC,OAAQ,SAAQ,QAAQ,cAAc,UAAU,EAAE;AAAA,IACzD;AAAA,EACF;AACF,CAAC;;;AiB1MD,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,QAAAC,aAAY;AACrB,OAAOC,cAAa;;;ACFpB,SAAS,gBAAAC,eAAc,iBAAAC,gBAAe,kBAAkB;AACxD,SAAS,QAAAC,OAAM,WAAAC,gBAAe;AAC9B,OAAO,UAAU;AAGjB,IAAM,kBAAkB;AAMjB,SAAS,WAAW,UAGlB;AACP,QAAM,aAAa,eAAe,YAAY,QAAQ,IAAI,CAAC;AAC3D,MAAI,CAAC,WAAY,QAAO;AAExB,QAAM,MAAMH,cAAa,YAAY,OAAO;AAC5C,QAAM,OAAO,KAAK,KAAK,GAAG;AAE1B,QAAM,SAAwB;AAAA,IAC5B,SAAS,KAAK,WAAW;AAAA,IACzB,WAAW,KAAK,cAAc;AAAA,IAC9B,UAAU,KAAK,WAAW,CAAC,GAAG,IAAI,kBAAkB;AAAA,EACtD;AAEA,SAAO,EAAE,QAAQ,WAAW;AAC9B;AAKO,SAAS,WAAW,QAAuB,YAA0B;AAC1E,QAAM,OAAO;AAAA,IACX,SAAS,OAAO;AAAA,IAChB,YAAY,OAAO;AAAA,IACnB,SAAS,OAAO,QAAQ,IAAI,kBAAkB;AAAA,EAChD;AAEA,QAAM,UAAU,KAAK,KAAK,MAAM,EAAE,WAAW,GAAG,CAAC;AACjD,EAAAC,eAAc,YAAY,SAAS,OAAO;AAC5C;AAKO,SAAS,UAAU,QAAuB,QAA4B;AAC3E,QAAM,MAAM,OAAO,QAAQ,UAAU,CAAC,MAAM,EAAE,SAAS,OAAO,IAAI;AAClE,MAAI,OAAO,GAAG;AACZ,WAAO,QAAQ,GAAG,IAAI;AAAA,EACxB,OAAO;AACL,WAAO,QAAQ,KAAK,MAAM;AAAA,EAC5B;AACF;AAKA,SAAS,eAAe,UAAiC;AACvD,MAAI,MAAM;AACV,SAAO,MAAM;AACX,UAAM,YAAYC,MAAK,KAAK,eAAe;AAC3C,QAAI,WAAW,SAAS,EAAG,QAAO;AAClC,UAAM,SAASC,SAAQ,GAAG;AAC1B,QAAI,WAAW,IAAK,QAAO;AAC3B,UAAM;AAAA,EACR;AACF;AAEA,SAAS,mBAAmB,GAAsC;AAChE,SAAO;AAAA,IACL,MAAM,EAAE,QAAQ;AAAA,IAChB,KAAK,EAAE,OAAO;AAAA,IACd,OAAO,EAAE,SAAS;AAAA,IAClB,UAAU,EAAE,aAAa;AAAA,IACzB,QAAQ,EAAE,UAAU;AAAA,EACtB;AACF;AAEA,SAAS,mBACP,GAC2C;AAC3C,SAAO;AAAA,IACL,MAAM,EAAE;AAAA,IACR,KAAK,EAAE;AAAA,IACP,OAAO,EAAE;AAAA,IACT,WAAW,EAAE;AAAA,IACb,QAAQ,EAAE;AAAA,EACZ;AACF;;;ADnFO,IAAM,aAAaC,eAAc;AAAA,EACtC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,IAAI,EAAE,KAAK,GAAG;AACZ,UAAM,MAAM,KAAK;AACjB,UAAM,cAAc,KAAK;AACzB,UAAM,WAAW,SAAS,KAAK,WAAW,GAAa,EAAE;AAEzD,UAAM,OAAQ,KAAK,QAAmB,YAAY,GAAG;AACrD,UAAM,SACH,KAAK,WAAsB,cAAc,GAAG,IAAI,MAAM,GAAG,IAAI;AAEhE,UAAM,WAAW,WAAW;AAC5B,QAAI;AACJ,QAAI;AAEJ,QAAI,UAAU;AACZ,eAAS,SAAS;AAClB,mBAAa,SAAS;AAAA,IACxB,OAAO;AACL,mBAAaC,MAAK,QAAQ,IAAI,GAAG,eAAe;AAChD,eAAS,EAAE,SAAS,GAAG,WAAW,YAAY,SAAS,CAAC,EAAE;AAAA,IAC5D;AAEA,cAAU,QAAQ,EAAE,MAAM,KAAK,OAAO,aAAa,UAAU,OAAO,CAAC;AACrE,eAAW,QAAQ,UAAU;AAE7B,IAAAC,SAAQ,QAAQ,iBAAiB,IAAI,YAAO,GAAG,EAAE;AACjD,IAAAA,SAAQ,KAAK,WAAW,UAAU,EAAE;AAAA,EACtC;AACF,CAAC;;;AEjED,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,QAAAC,OAAM,WAAAC,gBAAe;AAC9B,SAAS,aAAAC,kBAAiB;AAC1B,YAAYC,cAAa;AACzB,OAAOC,cAAa;AAeb,IAAM,gBAAgBC,eAAc;AAAA,EACzC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,SAAS,WAAW;AAC1B,QAAI,CAAC,QAAQ;AACX,MAAAC,SAAQ,MAAM,wDAAwD;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,UAAM,YAAYC,SAAQ,UAAU;AACpC,UAAM,aAAa,KAAK;AAExB,UAAM,UAAU,aACZ,OAAO,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,IAClD,OAAO;AAEX,QAAI,QAAQ,WAAW,GAAG;AACxB,UAAI,YAAY;AACd,QAAAD,SAAQ,MAAM,WAAW,UAAU,wBAAwB;AAAA,MAC7D,OAAO;AACL,QAAAA,SAAQ,MAAM,wBAAwB;AAAA,MACxC;AACA,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,eAAW,UAAU,SAAS;AAC5B,YAAM,oBAAoB,CAAC,OAAO,OAAO,SAAS,KAAK;AAEvD,MAAAA,SAAQ,MAAM,aAAa,OAAO,IAAI,UAAU,OAAO,GAAG,KAAK;AAE/D,UAAI,OAAO,OAAO;AAEhB,cAAM,YAAY,MAAM,UAAU,OAAO,GAAG;AAC5C,cAAM,IAAY,cAAK,SAAS;AAChC,cAAM,aAAa,QAAQ,OAAO,KAAK,CAAC;AACxC,cAAM,WAAW,YAAY,UAAU;AAEvC,cAAM,cAAc,MAAM,MAAM,OAAO,KAAK;AAAA,UAC1C,UAAU,OAAO;AAAA,UACjB,iBAAiB,SAAS,gBAAgB;AAAA,UAC1C,cAAc,SAAS,cAAc,KAAK,QAAQ;AAAA,UAClD,eAAe,CAAC,KAAK,SAAS,UAAU;AACtC,YAAAA,SAAQ,KAAK,MAAM,OAAO,IAAI,KAAK,KAAK,GAAG,EAAE;AAAA,UAC/C;AAAA,QACF,CAAC;AAED,cAAM,EAAE,OAAO,gBAAgB,IAAI;AAEnC,YAAI,mBAAmB;AAErB,gBAAM,YAAYE,MAAK,WAAW,OAAO,WAAW,OAAO,MAAM;AACjE,gBAAM,cAAc,MAAM,IAAI,CAAC,SAAS;AACtC,kBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,kBAAM,KAAK,UAAU,OAAO;AAC5B,mBAAO,EAAE,KAAK,KAAK,KAAK,OAAO,UAAU,UAAU,GAAG;AAAA,UACxD,CAAC;AAED,gBAAM,gBAAgB,YAAY,CAAC,GAAG,YAAY;AAClD,gBAAM,gBAAgB,WAAW,aAAa,WAAW,eAAe;AAExE,gBAAM,iBAAiB;AAAA,YACrB,OAAO;AAAA,YACP,OAAO;AAAA,YACP;AAAA,YACA;AAAA,UACF;AACA,8BAAoB,gBAAgB,SAAS;AAE7C,gBAAM,UAAUA,MAAK,WAAW,OAAO,SAAS;AAChD,6BAAmB,SAAS;AAAA,YAC1B,MAAM,OAAO;AAAA,YACb,MAAM,OAAO;AAAA,YACb,YAAY,eAAe;AAAA,UAC7B,CAAC;AAED,UAAAF,SAAQ,QAAQ,YAAY,OAAO,IAAI,YAAO,SAAS,KAAK,MAAM,MAAM,SAAS;AAAA,QACnF,OAAO;AAEL,gBAAM,aAAaE,MAAK,WAAW,OAAO,WAAW,OAAO,MAAM;AAClE,UAAAC,WAAUF,SAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,gBAAM,WAAqB,CAAC;AAC5B,cAAI,aAAa;AACjB,cAAI,gBAAgB;AAEpB,qBAAW,QAAQ,OAAO;AACxB,kBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,gBAAI,CAAC,YAAY;AACf,2BAAa;AACb,8BAAgB;AAAA,YAClB;AACA,kBAAM,KAAK,UAAU,OAAO;AAC5B,qBAAS,KAAK,MAAM,KAAK;AAAA;AAAA,UAAe,KAAK,GAAG;AAAA;AAAA,EAAO,EAAE,EAAE;AAAA,UAC7D;AAEA,gBAAM,WAAW,SAAS,KAAK,aAAa;AAC5C,gBAAM,UAAU,YAAY;AAAA,YAC1B,WAAW,OAAO;AAAA,YAClB,OAAO;AAAA,YACP,UAAU;AAAA,UACZ,CAAC;AAED,UAAAD,SAAQ,QAAQ,YAAY,OAAO,IAAI,YAAO,UAAU,EAAE;AAAA,QAC5D;AAAA,MACF,OAAO;AACL,cAAM,aAAaE,MAAK,WAAW,OAAO,WAAW,OAAO,MAAM;AAClE,QAAAC,WAAUF,SAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,cAAM,OAAO,MAAM,UAAU,OAAO,GAAG;AACvC,cAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,MAAM,OAAO,GAAG;AAC7D,cAAM,WAAW,UAAU,OAAO;AAClC,cAAM,UAAU,YAAY;AAAA,UAC1B,WAAW,OAAO;AAAA,UAClB;AAAA,UACA;AAAA,QACF,CAAC;AAED,QAAAD,SAAQ,QAAQ,YAAY,OAAO,IAAI,YAAO,UAAU,EAAE;AAAA,MAC5D;AAAA,IACF;AAAA,EACF;AACF,CAAC;;;ACtJD,SAAS,iBAAAI,sBAAqB;AAC9B,OAAOC,cAAa;AAGb,IAAM,cAAcC,eAAc;AAAA,EACvC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AACJ,UAAM,SAAS,WAAW;AAC1B,QAAI,CAAC,QAAQ;AACX,MAAAC,SAAQ,KAAK,iEAAiE;AAC9E;AAAA,IACF;AAEA,UAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,IAAAA,SAAQ,KAAK,WAAW,UAAU,EAAE;AACpC,IAAAA,SAAQ,KAAK,eAAe,OAAO,SAAS;AAAA,CAAI;AAEhD,QAAI,OAAO,QAAQ,WAAW,GAAG;AAC/B,MAAAA,SAAQ,KAAK,wBAAwB;AACrC;AAAA,IACF;AAEA,eAAW,UAAU,OAAO,SAAS;AACnC,YAAM,YAAY,OAAO,QACrB,mBAAmB,OAAO,QAAQ,MAClC;AACJ,cAAQ,IAAI,KAAK,OAAO,IAAI,GAAG,SAAS,EAAE;AAC1C,cAAQ,IAAI,eAAe,OAAO,GAAG,EAAE;AACvC,cAAQ,IAAI,eAAe,OAAO,MAAM,EAAE;AAC1C,cAAQ,IAAI;AAAA,IACd;AAAA,EACF;AACF,CAAC;;;ACnCD,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,WAAAC,gBAAe;AAEjB,IAAM,eAAeD,eAAc;AAAA,EACxC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,UAAUC,SAAQ,QAAQ,IAAI,GAAG,KAAK,GAAa;AACzD,UAAM,EAAE,iBAAAC,iBAAgB,IAAI,MAAM;AAClC,UAAM,EAAE,qBAAqB,IAAI,MAAM,OACrC,2CACF;AACA,UAAM,SAASA,iBAAgB,OAAO;AACtC,UAAM,YAAY,IAAI,qBAAqB;AAC3C,UAAM,OAAO,QAAQ,SAAS;AAAA,EAChC;AACF,CAAC;;;AtBnBD,IAAM,cAAmC;AAAA,EACvC,KAAK;AAAA,EACL,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,OAAO;AACT;AAGA,IAAM,WAAW,QAAQ,KAAK,CAAC;AAC/B,IAAM,eAAe,YAAY,YAAY;AAE7C,IAAI,cAAc;AAEhB,QAAM,OAAOC,eAAc;AAAA,IACzB,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,IACf;AAAA,IACA;AAAA,EACF,CAAC;AACD,UAAQ,IAAI;AACd,WAAW,YAAY,CAAC,SAAS,WAAW,GAAG,KAAK,aAAa,UAAU;AAEzE,aAAW,cAAc,EAAE,SAAS,QAAQ,KAAK,MAAM,CAAC,EAAE,CAAC;AAC7D,OAAO;AAEL,QAAM,OAAOA,eAAc;AAAA,IACzB,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,IACf;AAAA,IACA;AAAA,IACA,MAAM;AACJ,cAAQ,IAAI,+CAA+C;AAC3D,cAAQ,IAAI,kDAAkD;AAC9D,cAAQ,IAAI,qCAAqC;AACjD,cAAQ,IAAI,qBAAqB;AACjC,cAAQ,IAAI,+BAA+B;AAC3C,cAAQ,IAAI,wCAAwC;AAAA,IACtD;AAAA,EACF,CAAC;AACD,UAAQ,IAAI;AACd;","names":["readFileSync","join","matter","defineCommand","dirname","cheerio","cheerio","cheerio","resolve","writeFileSync","mkdirSync","join","dirname","markdown","defineCommand","join","consola","readFileSync","writeFileSync","join","dirname","defineCommand","join","consola","defineCommand","join","dirname","mkdirSync","cheerio","consola","defineCommand","consola","dirname","join","mkdirSync","defineCommand","consola","defineCommand","consola","defineCommand","resolve","createMcpServer","defineCommand"]}
1
+ {"version":3,"sources":["../src/mcp/loader.ts","../src/mcp/search.ts","../src/mcp/server.ts","../src/cli.ts","../src/commands/fetch.ts","../src/pipeline/fetcher.ts","../src/pipeline/extractor.ts","../src/platforms/mintlify.ts","../src/platforms/docusaurus.ts","../src/platforms/readme.ts","../src/platforms/gitbook.ts","../src/platforms/generic.ts","../src/platforms/registry.ts","../src/pipeline/resolver.ts","../src/pipeline/transformer.ts","../src/pipeline/writer.ts","../src/utils/slug.ts","../src/crawl/crawler.ts","../src/utils/url.ts","../src/crawl/boundary.ts","../src/pipeline/manifest.ts","../src/pipeline/meta-extractor.ts","../src/commands/add.ts","../src/config/manager.ts","../src/commands/update.ts","../src/commands/list.ts","../src/commands/serve.ts"],"sourcesContent":["import { readFileSync } from \"node:fs\";\nimport { join } from \"node:path\";\nimport matter from \"gray-matter\";\n\n/** A single loaded documentation page. */\nexport interface LoadedPage {\n source: string;\n path: string;\n title: string;\n url: string;\n platform: string;\n fetchedAt: string;\n content: string;\n}\n\n/** A loaded documentation source (from _index.json). */\nexport interface LoadedSource {\n name: string;\n url: string;\n platform: string;\n fetchedAt: string;\n pageCount: number;\n displayName?: string;\n description?: string;\n iconUrl?: string | null;\n}\n\n/** All loaded documentation data. */\nexport interface LoadedDocs {\n sources: LoadedSource[];\n pages: LoadedPage[];\n}\n\ninterface RootManifest {\n sources: { name: string; path: string; fetched_at: string }[];\n}\n\ninterface SourceManifest {\n name: string;\n url: string;\n platform: string;\n fetched_at: string;\n pages: { title: string; path: string }[];\n display_name?: string;\n description?: string;\n icon_url?: string | null;\n}\n\n/**\n * Load all documentation from a docs directory into memory.\n * Reads manifest.json → each source's _index.json → each page's .md file.\n * Silently skips missing files (no console output — critical for stdio transport).\n */\nexport function loadDocs(docsDir: string): LoadedDocs {\n const sources: LoadedSource[] = [];\n const pages: LoadedPage[] = [];\n\n let rootManifest: RootManifest;\n try {\n const raw = readFileSync(join(docsDir, \"manifest.json\"), \"utf-8\");\n rootManifest = JSON.parse(raw);\n } catch {\n return { sources: [], pages: [] };\n }\n\n for (const sourceEntry of rootManifest.sources) {\n const sourceDir = join(docsDir, sourceEntry.path);\n let sourceManifest: SourceManifest;\n try {\n const raw = readFileSync(join(sourceDir, \"_index.json\"), \"utf-8\");\n sourceManifest = JSON.parse(raw);\n } catch {\n continue;\n }\n\n let pageCount = 0;\n\n for (const pageEntry of sourceManifest.pages) {\n try {\n const raw = readFileSync(join(sourceDir, pageEntry.path), \"utf-8\");\n const parsed = matter(raw);\n pages.push({\n source: sourceManifest.name,\n path: pageEntry.path,\n title: pageEntry.title,\n url: String(parsed.data.source || \"\"),\n platform: String(parsed.data.platform || sourceManifest.platform),\n fetchedAt: String(parsed.data.fetched_at || sourceManifest.fetched_at),\n content: parsed.content.trim(),\n });\n pageCount++;\n } catch {\n continue;\n }\n }\n\n const loadedSource: LoadedSource = {\n name: sourceManifest.name,\n url: sourceManifest.url,\n platform: sourceManifest.platform,\n fetchedAt: sourceManifest.fetched_at,\n pageCount,\n };\n if (sourceManifest.display_name) loadedSource.displayName = sourceManifest.display_name;\n if (sourceManifest.description) loadedSource.description = sourceManifest.description;\n if (sourceManifest.icon_url !== undefined) loadedSource.iconUrl = sourceManifest.icon_url;\n sources.push(loadedSource);\n }\n\n return { sources, pages };\n}\n","import MiniSearch from \"minisearch\";\nimport type { LoadedPage } from \"./loader\";\n\n/** A search result with metadata (no content). */\nexport interface SearchResult {\n source: string;\n path: string;\n title: string;\n url: string;\n score: number;\n}\n\n/** Options for searching the index. */\nexport interface SearchOptions {\n source?: string;\n limit?: number;\n}\n\n/** Search index over loaded documentation pages. */\nexport interface SearchIndex {\n search(query: string, options?: SearchOptions): SearchResult[];\n}\n\n/**\n * Build a full-text search index over all loaded pages.\n * Indexes title and content fields with prefix and fuzzy matching.\n */\nexport function buildSearchIndex(pages: LoadedPage[]): SearchIndex {\n const miniSearch = new MiniSearch<LoadedPage>({\n fields: [\"title\", \"content\"],\n storeFields: [\"source\", \"path\", \"title\", \"url\"],\n idField: \"id\",\n });\n\n const documents = pages.map((page, i) => ({\n id: String(i),\n ...page,\n }));\n\n miniSearch.addAll(documents);\n\n return {\n search(query: string, options?: SearchOptions): SearchResult[] {\n if (!query.trim()) return [];\n\n const filter = options?.source\n ? (result: { source: string }) => result.source === options.source\n : undefined;\n\n const results = miniSearch.search(query, {\n prefix: true,\n fuzzy: 0.2,\n filter: filter as any,\n });\n\n const limit = options?.limit ?? 10;\n\n return results.slice(0, limit).map((r) => ({\n source: r.source as string,\n path: r.path as string,\n title: r.title as string,\n url: r.url as string,\n score: r.score,\n }));\n },\n };\n}\n","import { McpServer } from \"@modelcontextprotocol/sdk/server/mcp.js\";\nimport { z } from \"zod\";\nimport { loadDocs } from \"./loader\";\nimport { buildSearchIndex } from \"./search\";\n\n/**\n * Create an MCP server that exposes documentation tools.\n * Eagerly loads all docs and builds a search index at creation time.\n */\nexport function createMcpServer(docsDir: string): McpServer {\n const docs = loadDocs(docsDir);\n const searchIndex = buildSearchIndex(docs.pages);\n\n const server = new McpServer({\n name: \"doc2ctx\",\n version: \"0.1.0\",\n });\n\n server.tool(\n \"list_sources\",\n \"List all documentation sources available in the docs directory\",\n {},\n async () => {\n return {\n content: [\n {\n type: \"text\",\n text: JSON.stringify(\n docs.sources.map((s) => ({\n name: s.name,\n url: s.url,\n platform: s.platform,\n fetchedAt: s.fetchedAt,\n pageCount: s.pageCount,\n ...(s.displayName && { displayName: s.displayName }),\n ...(s.description && { description: s.description }),\n ...(s.iconUrl !== undefined && { iconUrl: s.iconUrl }),\n })),\n null,\n 2\n ),\n },\n ],\n };\n }\n );\n\n server.tool(\n \"list_pages\",\n \"List all pages for a specific documentation source\",\n { source: z.string().describe(\"Name of the documentation source\") },\n async ({ source }) => {\n const sourceEntry = docs.sources.find((s) => s.name === source);\n if (!sourceEntry) {\n return {\n content: [{ type: \"text\", text: `Source \"${source}\" not found. Use list_sources to see available sources.` }],\n isError: true,\n };\n }\n const pages = docs.pages\n .filter((p) => p.source === source)\n .map((p) => ({ title: p.title, path: p.path }));\n return {\n content: [{ type: \"text\", text: JSON.stringify(pages, null, 2) }],\n };\n }\n );\n\n server.tool(\n \"read_page\",\n \"Read the full markdown content of a documentation page\",\n {\n source: z.string().describe(\"Name of the documentation source\"),\n path: z.string().describe(\"Path of the page within the source (from list_pages)\"),\n },\n async ({ source, path }) => {\n const page = docs.pages.find((p) => p.source === source && p.path === path);\n if (!page) {\n return {\n content: [{ type: \"text\", text: `Page \"${path}\" not found in source \"${source}\". Use list_pages to see available pages.` }],\n isError: true,\n };\n }\n return {\n content: [{ type: \"text\", text: page.content }],\n };\n }\n );\n\n server.tool(\n \"search_docs\",\n \"Search across all documentation pages by keyword\",\n {\n query: z.string().describe(\"Search query\"),\n source: z.string().optional().describe(\"Filter results to a specific source\"),\n limit: z.number().optional().describe(\"Maximum number of results (default 10)\"),\n },\n async ({ query, source, limit }) => {\n if (!query.trim()) {\n return {\n content: [{ type: \"text\", text: \"Search query cannot be empty.\" }],\n isError: true,\n };\n }\n const results = searchIndex.search(query, { source, limit });\n return {\n content: [\n {\n type: \"text\",\n text: JSON.stringify(\n results.map((r) => ({\n source: r.source,\n path: r.path,\n title: r.title,\n score: r.score,\n })),\n null,\n 2\n ),\n },\n ],\n };\n }\n );\n\n return server;\n}\n","import { defineCommand, runMain, runCommand } from \"citty\";\nimport consola from \"consola\";\nimport { fetchCommand } from \"./commands/fetch\";\nimport { addCommand } from \"./commands/add\";\nimport { updateCommand } from \"./commands/update\";\nimport { listCommand } from \"./commands/list\";\nimport { serveCommand } from \"./commands/serve\";\n\nprocess.on(\"uncaughtException\", (err: any) => {\n if (err.code === \"ERR_PLAYWRIGHT_NOT_INSTALLED\") {\n consola.error(err.message);\n } else {\n consola.error(err.message || err);\n }\n process.exit(1);\n});\n\nprocess.on(\"unhandledRejection\", (err: any) => {\n if (err?.code === \"ERR_PLAYWRIGHT_NOT_INSTALLED\") {\n consola.error(err.message);\n } else {\n consola.error(err?.message || err);\n }\n process.exit(1);\n});\n\nconst subCommands: Record<string, any> = {\n add: addCommand,\n update: updateCommand,\n list: listCommand,\n serve: serveCommand,\n};\n\n// Check if first non-flag arg is a subcommand\nconst firstArg = process.argv[2];\nconst isSubCommand = firstArg && firstArg in subCommands;\n\nif (isSubCommand) {\n // Let citty handle subcommand routing\n const main = defineCommand({\n meta: {\n name: \"docs2ai\",\n version: \"0.1.0\",\n description: \"Convert documentation URLs into AI-ready Markdown files\",\n },\n subCommands,\n });\n runMain(main);\n} else if (firstArg && !firstArg.startsWith(\"-\") && firstArg !== \"--help\") {\n // Treat as a URL — run fetch command directly\n runCommand(fetchCommand, { rawArgs: process.argv.slice(2) });\n} else {\n // No args or --help — show usage\n const main = defineCommand({\n meta: {\n name: \"docs2ai\",\n version: \"0.1.0\",\n description: \"Convert documentation URLs into AI-ready Markdown files\",\n },\n subCommands,\n run() {\n console.log(\"Usage: docs2ai <url> [-o output.md] [--crawl]\");\n console.log(\" docs2ai add <url> [--name name] [--crawl]\");\n console.log(\" docs2ai update [--name name]\");\n console.log(\" docs2ai list\");\n console.log(\" docs2ai serve [-d dir]\");\n console.log(\"\\nRun `docs2ai --help` for full usage.\");\n },\n });\n runMain(main);\n}\n","import { defineCommand } from \"citty\";\nimport { dirname } from \"node:path\";\nimport consola from \"consola\";\nimport * as cheerio from \"cheerio\";\nimport { fetchPage, fetchWithBrowser } from \"../pipeline/fetcher\";\nimport { extract } from \"../pipeline/extractor\";\nimport { transform } from \"../pipeline/transformer\";\nimport { write, writePages } from \"../pipeline/writer\";\nimport { crawl } from \"../crawl/crawler\";\nimport { resolve } from \"../pipeline/resolver\";\nimport { getStrategy } from \"../platforms/registry\";\nimport { slugFromUrl } from \"../utils/url\";\nimport {\n buildSourceManifest,\n writeSourceManifest,\n updateRootManifest,\n} from \"../pipeline/manifest\";\nimport { extractSiteMeta } from \"../pipeline/meta-extractor\";\n\n/**\n * Determine whether crawl output should go to a directory (one file per page)\n * or a single stitched file.\n */\nfunction resolveOutputMode(\n output: string | undefined,\n shouldCrawl: boolean,\n name: string\n): { mode: \"single-file\" | \"directory\"; outputPath: string | undefined; outputDir: string } {\n if (!shouldCrawl) {\n return { mode: \"single-file\", outputPath: output, outputDir: \"\" };\n }\n\n // Crawl + explicit .md output → single file (backward compat)\n if (output && output.endsWith(\".md\")) {\n return { mode: \"single-file\", outputPath: output, outputDir: \"\" };\n }\n\n // Crawl + explicit directory path\n if (output) {\n const dir = output.endsWith(\"/\") ? output : output + \"/\";\n return { mode: \"directory\", outputPath: undefined, outputDir: dir };\n }\n\n // Crawl + no output → default directory\n return { mode: \"directory\", outputPath: undefined, outputDir: `.ai/docs/${name}/` };\n}\n\nexport const fetchCommand = defineCommand({\n meta: {\n name: \"fetch\",\n description: \"Fetch a documentation URL and convert to Markdown\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to convert\",\n required: true,\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output file path or directory\",\n },\n name: {\n type: \"string\",\n description: \"Name for this source (auto-derived from hostname if omitted)\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Follow sidebar/nav links\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n },\n async run({ args }) {\n const url = args.url as string;\n const output = args.output as string | undefined;\n const shouldCrawl = args.crawl as boolean;\n const maxDepth = parseInt(args[\"max-depth\"] as string, 10);\n const name = (args.name as string) || slugFromUrl(url);\n\n const { mode, outputPath, outputDir } = resolveOutputMode(output, shouldCrawl, name);\n const silent = mode === \"single-file\" && !outputPath;\n\n if (shouldCrawl) {\n if (!silent) consola.start(`Crawling from ${url} (max depth: ${maxDepth})...`);\n\n // Fetch first page to resolve platform and get navLinkSelector\n const firstHtml = await fetchPage(url);\n const $ = cheerio.load(firstHtml);\n const platformId = resolve(url, $);\n const strategy = getStrategy(platformId);\n const navLinkSelector = strategy.navLinkSelector();\n\n const crawlResult = await crawl(url, {\n maxDepth,\n navLinkSelector,\n discoverUrls: strategy.discoverUrls?.bind(strategy),\n onPageFetched: (pageUrl, current, total) => {\n if (!silent) consola.info(`[${current}/${total}] ${pageUrl}`);\n },\n });\n\n const { pages, effectivePrefix } = crawlResult;\n if (!silent) consola.success(`Crawled ${pages.length} pages`);\n\n if (mode === \"directory\") {\n // Directory mode: one .md file per page + manifests\n const pageEntries = pages.map((page) => {\n const { content, title, platform } = extract(page.html, page.url);\n const md = transform(content);\n return { url: page.url, title, platform, markdown: md };\n });\n\n const firstPlatform = pageEntries[0]?.platform || \"generic\";\n const manifestPages = writePages(pageEntries, outputDir, effectivePrefix);\n\n const siteMeta = extractSiteMeta(firstHtml, url);\n const sourceManifest = buildSourceManifest(name, url, firstPlatform, manifestPages, siteMeta);\n writeSourceManifest(sourceManifest, outputDir);\n\n // Update root manifest in the parent directory\n const rootDir = dirname(outputDir.replace(/\\/$/, \"\"));\n updateRootManifest(rootDir, {\n name,\n path: name + \"/\",\n fetched_at: sourceManifest.fetched_at,\n display_name: siteMeta.displayName,\n description: siteMeta.description,\n icon_url: siteMeta.iconUrl,\n page_count: manifestPages.length,\n });\n\n consola.success(`Written ${pages.length} pages to ${outputDir}`);\n } else {\n // Single-file mode: stitch all pages together\n const sections: string[] = [];\n let firstTitle = \"\";\n let firstPlatform = \"\";\n\n for (const page of pages) {\n const { content, title, platform } = extract(page.html, page.url);\n if (!firstTitle) {\n firstTitle = title;\n firstPlatform = platform;\n }\n const md = transform(content);\n sections.push(`## ${title}\\n\\nSource: ${page.url}\\n\\n${md}`);\n }\n\n const markdown = sections.join(\"\\n\\n---\\n\\n\");\n\n write(markdown, outputPath, {\n sourceUrl: url,\n title: firstTitle,\n platform: firstPlatform,\n });\n\n if (!silent) consola.success(`Written to ${outputPath}`);\n }\n } else {\n if (!silent) consola.start(`Fetching ${url}...`);\n let html = await fetchPage(url);\n\n const { content, title, platform } = extract(html, url);\n\n // If content is suspiciously small, try Playwright\n if (content.trim().length < 200) {\n if (!silent) consola.warn(\"Content looks thin, retrying with browser...\");\n try {\n html = await fetchWithBrowser(url);\n const result = extract(html, url);\n const markdown = transform(result.content);\n write(markdown, outputPath, {\n sourceUrl: url,\n title: result.title || title,\n platform: result.platform,\n });\n if (!silent) consola.success(`Written to ${outputPath}`);\n return;\n } catch (err: any) {\n if (err?.code === \"ERR_PLAYWRIGHT_NOT_INSTALLED\") {\n consola.warn(\n \"This page may require a browser to render. Install Playwright:\\n\" +\n \" npm install -D playwright && npx playwright install chromium\"\n );\n } else {\n consola.warn(\"Browser fallback failed, using static content.\");\n }\n }\n }\n\n if (!silent) consola.success(`Extracted content (platform: ${platform})`);\n const markdown = transform(content);\n\n write(markdown, outputPath, {\n sourceUrl: url,\n title,\n platform,\n });\n\n if (!silent) consola.success(`Written to ${outputPath}`);\n }\n },\n});\n","import { ofetch } from \"ofetch\";\nimport { execSync } from \"node:child_process\";\nimport consola from \"consola\";\n\n/** HTTP status codes that warrant a Playwright retry */\nconst BROWSER_RETRY_CODES = new Set([403, 406, 429]);\n\n/** Patterns that indicate a bot-protection challenge page */\nconst CHALLENGE_PATTERNS = [\n \"verify you are human\",\n \"just a moment\",\n \"checking your browser\",\n \"attention required\",\n \"enable javascript and cookies\",\n];\n\n/**\n * Detect if fetched HTML is a bot-protection challenge page\n * rather than real content.\n */\nfunction isChallengeContent(html: string): boolean {\n const lower = html.toLowerCase();\n return CHALLENGE_PATTERNS.some((p) => lower.includes(p));\n}\n\n/**\n * Fetch the raw HTML of a documentation page.\n * Uses static fetch by default, falls back to Playwright on blocked responses.\n */\nexport async function fetchPage(url: string): Promise<string> {\n try {\n return await ofetch(url, { responseType: \"text\" });\n } catch (err: any) {\n const status = err?.response?.status ?? err?.statusCode;\n if (status && BROWSER_RETRY_CODES.has(status)) {\n consola.warn(\n `Static fetch returned ${status}, retrying with browser...`\n );\n return fetchWithBrowser(url);\n }\n throw err;\n }\n}\n\n/**\n * Fetch a page using Playwright for JS-rendered sites.\n * Tries headless first, falls back to non-headless if a bot challenge is detected.\n * Playwright is an optional dependency — throws a typed error if not installed.\n */\nexport async function fetchWithBrowser(url: string): Promise<string> {\n const playwright = await loadPlaywright();\n\n // Try headless first\n let html = await launchAndFetch(playwright, url, true);\n\n // If we got a challenge page, retry with visible browser\n if (isChallengeContent(html)) {\n consola.warn(\"Bot protection detected, retrying with visible browser...\");\n html = await launchAndFetch(playwright, url, false);\n }\n\n return html;\n}\n\nasync function loadPlaywright() {\n try {\n return await import(\"playwright\");\n } catch {\n consola.info(\n \"This site requires a browser to fetch. Installing Playwright...\"\n );\n try {\n execSync(\"npm install -g playwright\", { stdio: \"inherit\" });\n execSync(\"npx playwright install chromium\", { stdio: \"inherit\" });\n return await import(\"playwright\");\n } catch {\n const err = new Error(\n \"Failed to auto-install Playwright. Install it manually:\\n\\n\" +\n \" npm install -g playwright && npx playwright install chromium\\n\"\n );\n (err as any).code = \"ERR_PLAYWRIGHT_NOT_INSTALLED\";\n throw err;\n }\n }\n}\n\nasync function launchAndFetch(\n playwright: typeof import(\"playwright\"),\n url: string,\n headless: boolean\n): Promise<string> {\n const browser = await playwright.chromium.launch({ headless });\n try {\n const page = await browser.newPage();\n await page.goto(url, { waitUntil: \"domcontentloaded\", timeout: 30000 });\n // Wait for JS-rendered content to settle\n await page.waitForTimeout(2000);\n return await page.content();\n } finally {\n await browser.close();\n }\n}\n","import * as cheerio from \"cheerio\";\nimport { Readability } from \"@mozilla/readability\";\nimport { parseHTML } from \"linkedom\";\nimport { resolve } from \"./resolver\";\nimport { getStrategy } from \"../platforms/registry\";\nimport type { PlatformId } from \"../platforms/base\";\n\nexport interface ExtractResult {\n content: string;\n title: string;\n platform: PlatformId;\n}\n\n/**\n * Extract meaningful content from raw HTML.\n * Uses platform-specific selectors when available, falls back to Readability.\n */\nexport function extract(html: string, url: string): ExtractResult {\n const $ = cheerio.load(html);\n const platform = resolve(url, $);\n const strategy = getStrategy(platform);\n\n const title = extractTitle($);\n\n // Non-generic platforms: use selector-based extraction first\n if (platform !== \"generic\") {\n for (const sel of strategy.removeSelectors()) {\n $(sel).remove();\n }\n\n const contentEl = $(strategy.contentSelector()).first();\n const selectorContent = contentEl.html();\n\n if (selectorContent && selectorContent.trim().length >= 100) {\n return { content: selectorContent, title, platform };\n }\n // Fall through to Readability if selector extraction yields too little\n }\n\n // Generic / fallback: Readability extraction\n let article: ReturnType<Readability[\"parse\"]> = null;\n try {\n const { document } = parseHTML(html);\n const reader = new Readability(document as any);\n article = reader.parse();\n } catch {\n // Readability can throw on empty/malformed HTML — fall through to body/raw\n }\n\n const content = article?.content || $(\"body\").html() || html;\n\n return {\n content,\n title: title || article?.title || \"\",\n platform,\n };\n}\n\n/**\n * Extract page title from common sources.\n */\nfunction extractTitle($: cheerio.CheerioAPI): string {\n const h1 = $(\"h1\").first().text().trim();\n if (h1) return h1;\n\n const ogTitle = $('meta[property=\"og:title\"]').attr(\"content\")?.trim();\n if (ogTitle) return ogTitle;\n\n return $(\"title\").text().trim();\n}\n","import * as cheerio from \"cheerio\";\nimport type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const mintlify: PlatformStrategy = {\n id: \"mintlify\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"Mintlify\"]').length > 0) return true;\n if ($(\"script[src*='mintlify']\").length > 0) return true;\n if ($(\"[data-mintlify]\").length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \"article, main\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[role='navigation']\",\n \".sidebar\",\n \"[class*='sidebar']\",\n \"[class*='cookie']\",\n \"[class*='banner']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \"nav a[href], .sidebar a[href], [class*='sidebar'] a[href]\";\n },\n\n discoverUrls(html: string, baseUrl: string): string[] {\n const $ = cheerio.load(html);\n const paths = new Set<string>();\n\n // Mintlify uses Next.js — sidebar nav is in __next_f script data, not <a> tags.\n // The data contains escaped JSON like \\\"href\\\":\\\"/api-reference/checkouts/create\\\"\n $(\"script\").each((_, el) => {\n const text = $(el).html() || \"\";\n // Match escaped JSON paths: \\\"href\\\":\\\"/some-path\\\"\n const escaped = /\\\\?\"href\\\\?\"\\s*:\\s*\\\\?\"(\\/[a-z0-9][a-z0-9\\/-]*)\\\\?\"/g;\n let match = escaped.exec(text);\n while (match !== null) {\n paths.add(match[1]);\n match = escaped.exec(text);\n }\n });\n\n // Resolve to absolute URLs\n const origin = new URL(baseUrl).origin;\n // Determine docs base path (e.g. /docs from /docs/api-reference/intro)\n const basePath = new URL(baseUrl).pathname.split(\"/\").slice(0, 2).join(\"/\");\n\n return [...paths].map((p) => {\n if (p.startsWith(basePath)) {\n return origin + p;\n }\n return origin + basePath + p;\n });\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const docusaurus: PlatformStrategy = {\n id: \"docusaurus\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"Docusaurus\"]').length > 0)\n return true;\n if ($(\".theme-doc-sidebar-container\").length > 0) return true;\n if ($('meta[name=\"docusaurus_locale\"]').length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \"article, [role='main'], .theme-doc-markdown\";\n },\n\n removeSelectors(): string[] {\n return [\n \".navbar\",\n \"footer\",\n \".theme-doc-toc-desktop\",\n \".theme-doc-sidebar-container\",\n \".pagination-nav\",\n \".theme-doc-breadcrumbs\",\n \"nav\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \".menu__link[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const readme: PlatformStrategy = {\n id: \"readme\",\n\n detect(url: string, $: CheerioAPI): boolean {\n let rmClassCount = 0;\n $(\"[class]\").each((_, el) => {\n const cls = $(el).attr(\"class\") || \"\";\n if (/\\brm-/.test(cls)) rmClassCount++;\n });\n if (rmClassCount > 2) return true;\n if ($(\".rm-Article\").length > 0) return true;\n if ($(\".rm-Markdown\").length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return \".markdown-body, .rm-Article, .rm-Markdown\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \".rm-Sidebar\",\n \".rm-TableOfContents\",\n \"[class*='cookie']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \".rm-Sidebar a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const gitbook: PlatformStrategy = {\n id: \"gitbook\",\n\n detect(url: string, $: CheerioAPI): boolean {\n if ($('meta[name=\"generator\"][content*=\"GitBook\"]').length > 0) return true;\n try {\n const parsed = new URL(url);\n if (parsed.hostname.endsWith(\".gitbook.io\")) return true;\n } catch {\n // invalid URL, skip host check\n }\n if ($('[data-testid=\"page.contentEditor\"]').length > 0) return true;\n return false;\n },\n\n contentSelector(): string {\n return '[data-testid=\"page.contentEditor\"], main, article';\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[class*='sidebar']\",\n \"[class*='toc']\",\n \"[class*='cookie']\",\n \"script\",\n \"style\",\n ];\n },\n\n navLinkSelector(): string | null {\n return \"nav a[href], aside a[href]\";\n },\n};\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformStrategy } from \"./base\";\n\nexport const generic: PlatformStrategy = {\n id: \"generic\",\n\n detect(_url: string, _$: CheerioAPI): boolean {\n return true;\n },\n\n contentSelector(): string {\n return \"article, main, [role='main'], .content\";\n },\n\n removeSelectors(): string[] {\n return [\n \"nav\",\n \"header\",\n \"footer\",\n \"[role='navigation']\",\n \"[class*='sidebar']\",\n \"[class*='cookie']\",\n \"[class*='banner']\",\n \"script\",\n \"style\",\n \"noscript\",\n ];\n },\n\n navLinkSelector(): string | null {\n return null;\n },\n};\n","import type { PlatformId, PlatformStrategy } from \"./base\";\nimport { mintlify } from \"./mintlify\";\nimport { docusaurus } from \"./docusaurus\";\nimport { readme } from \"./readme\";\nimport { gitbook } from \"./gitbook\";\nimport { generic } from \"./generic\";\n\n/** Ordered list of platform strategies. Generic must be last (always matches). */\nexport const platformStrategies: PlatformStrategy[] = [\n mintlify,\n docusaurus,\n readme,\n gitbook,\n generic,\n];\n\n/** Get a strategy by its platform ID. */\nexport function getStrategy(id: PlatformId): PlatformStrategy {\n const strategy = platformStrategies.find((s) => s.id === id);\n if (!strategy) {\n throw new Error(`Unknown platform: ${id}`);\n }\n return strategy;\n}\n","import type { CheerioAPI } from \"cheerio\";\nimport type { PlatformId } from \"../platforms/base\";\nimport { platformStrategies } from \"../platforms/registry\";\n\n/**\n * Detect which documentation platform a page belongs to.\n * Tries platform-specific strategies in order, falls back to generic.\n */\nexport function resolve(url: string, $: CheerioAPI): PlatformId {\n for (const strategy of platformStrategies) {\n if (strategy.detect(url, $)) {\n return strategy.id;\n }\n }\n return \"generic\";\n}\n","import TurndownService from \"turndown\";\nimport { gfm } from \"turndown-plugin-gfm\";\n\n/**\n * Convert clean HTML to Markdown.\n */\nexport function transform(html: string): string {\n const td = new TurndownService({\n headingStyle: \"atx\",\n codeBlockStyle: \"fenced\",\n bulletListMarker: \"-\",\n });\n\n td.use(gfm);\n\n addCalloutRule(td);\n addTabbedContentRule(td);\n addCodeBlockLangRule(td);\n addHiddenElementRule(td);\n\n return td.turndown(html);\n}\n\nfunction isElement(node: TurndownService.Node): node is HTMLElement {\n return node.nodeType === 1;\n}\n\nfunction getAttr(node: TurndownService.Node, attr: string): string {\n if (isElement(node)) {\n return node.getAttribute(attr) || \"\";\n }\n return \"\";\n}\n\nfunction getTagName(node: TurndownService.Node): string {\n if (isElement(node)) {\n return node.tagName.toLowerCase();\n }\n return \"\";\n}\n\n/**\n * Convert callouts/admonitions to blockquotes.\n * Matches: aside, .admonition, .callout, .alert, [role=\"alert\"]\n */\nfunction addCalloutRule(td: TurndownService): void {\n td.addRule(\"callouts\", {\n filter(node) {\n if (!isElement(node)) return false;\n const tag = getTagName(node);\n if (tag === \"aside\") return true;\n const cls = getAttr(node, \"class\");\n if (\n /\\b(admonition|callout|alert|notice|warning|info|tip|note|caution|danger)\\b/i.test(\n cls\n )\n )\n return true;\n if (getAttr(node, \"role\") === \"alert\") return true;\n return false;\n },\n replacement(content, node) {\n const cls = getAttr(node, \"class\").toLowerCase();\n let type = \"Note\";\n if (/warning|caution/.test(cls)) type = \"Warning\";\n else if (/danger|error/.test(cls)) type = \"Danger\";\n else if (/tip|success/.test(cls)) type = \"Tip\";\n else if (/info/.test(cls)) type = \"Info\";\n\n const lines = content.trim().split(\"\\n\");\n const quoted = lines.map((line) => `> ${line}`).join(\"\\n\");\n return `\\n> **${type}**\\n${quoted}\\n\\n`;\n },\n });\n}\n\n/**\n * Convert tabbed content into labeled sections.\n * Matches: .tab-panel, .tabpanel, [role=\"tabpanel\"]\n */\nfunction addTabbedContentRule(td: TurndownService): void {\n td.addRule(\"tabbed-content\", {\n filter(node) {\n if (!isElement(node)) return false;\n const cls = getAttr(node, \"class\");\n if (/\\b(tab-panel|tabpanel|tabs__item)\\b/i.test(cls)) return true;\n if (getAttr(node, \"role\") === \"tabpanel\") return true;\n return false;\n },\n replacement(content, node) {\n const label =\n getAttr(node, \"aria-label\") ||\n getAttr(node, \"data-label\") ||\n getAttr(node, \"data-value\") ||\n \"\";\n if (label) {\n return `\\n**${label}**\\n\\n${content.trim()}\\n\\n`;\n }\n return `\\n${content.trim()}\\n\\n`;\n },\n });\n}\n\n/**\n * Ensure code blocks with data-language/data-lang produce proper fenced blocks.\n */\nfunction addCodeBlockLangRule(td: TurndownService): void {\n td.addRule(\"code-block-lang\", {\n filter(node) {\n if (!isElement(node)) return false;\n if (getTagName(node) !== \"pre\") return false;\n const codeEl = node.querySelector(\"code\");\n if (!codeEl) return false;\n const lang =\n getAttr(node, \"data-language\") ||\n getAttr(node, \"data-lang\") ||\n (codeEl.getAttribute(\"data-language\") || \"\") ||\n (codeEl.getAttribute(\"data-lang\") || \"\");\n return lang.length > 0;\n },\n replacement(_content, node) {\n if (!isElement(node)) return _content;\n const codeEl = node.querySelector(\"code\")!;\n const lang =\n getAttr(node, \"data-language\") ||\n getAttr(node, \"data-lang\") ||\n (codeEl.getAttribute(\"data-language\") || \"\") ||\n (codeEl.getAttribute(\"data-lang\") || \"\");\n const code = codeEl.textContent || \"\";\n return `\\n\\`\\`\\`${lang}\\n${code}\\n\\`\\`\\`\\n`;\n },\n });\n}\n\n/**\n * Remove hidden elements (display:none) except tab panels.\n */\nfunction addHiddenElementRule(td: TurndownService): void {\n td.addRule(\"hidden-elements\", {\n filter(node) {\n if (!isElement(node)) return false;\n const style = getAttr(node, \"style\");\n if (!/display\\s*:\\s*none/i.test(style)) return false;\n // Don't remove tab panels — they're hidden but contain valid content\n const cls = getAttr(node, \"class\");\n if (/\\b(tab-panel|tabpanel)\\b/i.test(cls)) return false;\n if (getAttr(node, \"role\") === \"tabpanel\") return false;\n return true;\n },\n replacement() {\n return \"\";\n },\n });\n}\n","import { writeFileSync, mkdirSync } from \"node:fs\";\nimport { dirname, join } from \"node:path\";\nimport matter from \"gray-matter\";\nimport { filePathForPage } from \"../utils/slug\";\n\nexport interface WriterOptions {\n sourceUrl: string;\n title: string;\n platform: string;\n}\n\n/**\n * Write Markdown with frontmatter to a file or stdout.\n */\nexport function write(\n markdown: string,\n outputPath: string | undefined,\n options: WriterOptions\n): void {\n const content = matter.stringify(markdown, {\n source: options.sourceUrl,\n fetched_at: new Date().toISOString(),\n platform: options.platform,\n title: options.title,\n docs2ai_version: \"0.1.0\",\n });\n\n if (outputPath) {\n mkdirSync(dirname(outputPath), { recursive: true });\n writeFileSync(outputPath, content, \"utf-8\");\n } else {\n process.stdout.write(content);\n }\n}\n\n/**\n * Write a single page's Markdown with frontmatter to a file path (always writes to disk).\n */\nexport function writePage(\n markdown: string,\n filePath: string,\n options: WriterOptions\n): void {\n const content = matter.stringify(markdown, {\n source: options.sourceUrl,\n fetched_at: new Date().toISOString(),\n platform: options.platform,\n title: options.title,\n docs2ai_version: \"0.1.0\",\n });\n\n mkdirSync(dirname(filePath), { recursive: true });\n writeFileSync(filePath, content, \"utf-8\");\n}\n\nexport interface PageEntry {\n url: string;\n title: string;\n platform: string;\n markdown: string;\n}\n\n/**\n * Write multiple crawled pages to a directory, one .md file per page.\n * Returns manifest page entries (title + relative path) for each written page.\n */\nexport function writePages(\n pages: PageEntry[],\n outputDir: string,\n basePrefix: string\n): { title: string; path: string }[] {\n const usedPaths = new Set<string>();\n const entries: { title: string; path: string }[] = [];\n\n for (const page of pages) {\n let relPath = filePathForPage(page.url, basePrefix);\n\n // Handle slug collisions by appending a numeric suffix\n if (usedPaths.has(relPath)) {\n const base = relPath.replace(/\\.md$/, \"\");\n let i = 2;\n while (usedPaths.has(`${base}-${i}.md`)) i++;\n relPath = `${base}-${i}.md`;\n }\n usedPaths.add(relPath);\n\n const filePath = join(outputDir, relPath);\n writePage(page.markdown, filePath, {\n sourceUrl: page.url,\n title: page.title,\n platform: page.platform,\n });\n\n entries.push({ title: page.title, path: relPath });\n }\n\n return entries;\n}\n","/**\n * Derive a filename slug from a URL's pathname.\n * Strips leading/trailing slashes and returns the last segment.\n */\nexport function slugFromPathname(url: string): string {\n const parsed = new URL(url);\n const pathname = parsed.pathname.replace(/\\/+$/, \"\");\n if (!pathname || pathname === \"/\") return \"index\";\n const segments = pathname.split(\"/\").filter(Boolean);\n return segments[segments.length - 1];\n}\n\n/**\n * Compute a relative file path for a crawled page based on its URL and a base prefix.\n * Strips the basePrefix from the pathname, intermediate segments become subdirectories,\n * and the last segment becomes the filename with .md extension.\n *\n * @example filePathForPage(\"https://x.com/docs/guides/auth\", \"/docs/\") → \"guides/auth.md\"\n * @example filePathForPage(\"https://x.com/docs/getting-started\", \"/docs/\") → \"getting-started.md\"\n * @example filePathForPage(\"https://x.com/docs/\", \"/docs/\") → \"index.md\"\n */\nexport function filePathForPage(pageUrl: string, basePrefix: string): string {\n const parsed = new URL(pageUrl);\n let pathname = parsed.pathname.replace(/\\/+$/, \"\");\n\n // Strip the base prefix\n const normalizedPrefix = basePrefix.replace(/\\/+$/, \"\");\n if (pathname.startsWith(normalizedPrefix)) {\n pathname = pathname.slice(normalizedPrefix.length);\n }\n\n // Remove leading slash\n pathname = pathname.replace(/^\\/+/, \"\");\n\n if (!pathname) return \"index.md\";\n\n return pathname + \".md\";\n}\n","import * as cheerio from \"cheerio\";\nimport { fetchPage } from \"../pipeline/fetcher\";\nimport {\n getCrawlPrefix,\n computeCommonPrefix,\n isInBounds,\n normalizeUrl,\n} from \"./boundary\";\n\nexport interface CrawledPage {\n url: string;\n html: string;\n}\n\nexport interface CrawlResult {\n pages: CrawledPage[];\n effectivePrefix: string;\n}\n\nexport interface CrawlOptions {\n maxDepth: number;\n navLinkSelector?: string | null;\n /** Custom URL discovery for SPA-rendered sidebars (overrides navLinkSelector) */\n discoverUrls?: (html: string, baseUrl: string) => string[];\n onPageFetched?: (url: string, current: number, total: number) => void;\n}\n\n/**\n * Crawl documentation pages starting from a URL.\n * Follows in-bounds links via BFS up to maxDepth.\n */\nexport async function crawl(\n startUrl: string,\n options: CrawlOptions\n): Promise<CrawlResult> {\n const { origin } = getCrawlPrefix(startUrl);\n let { pathPrefix } = getCrawlPrefix(startUrl);\n const visited = new Set<string>();\n const results: CrawledPage[] = [];\n let isFirstPage = true;\n\n // BFS queue: [url, depth]\n const queue: [string, number][] = [[startUrl, 0]];\n visited.add(normalizeUrl(startUrl));\n\n while (queue.length > 0) {\n const [url, depth] = queue.shift()!;\n\n let html: string;\n try {\n html = await fetchPage(url);\n } catch {\n options.onPageFetched?.(url, results.length, results.length + queue.length);\n continue;\n }\n results.push({ url, html });\n options.onPageFetched?.(url, results.length, results.length + queue.length);\n\n if (depth < options.maxDepth) {\n // On the first page, discover all same-origin nav links to widen the boundary\n if (isFirstPage) {\n const allNavUrls = options.discoverUrls\n ? discoverSameOriginCustom(html, url, origin, options.discoverUrls)\n : discoverSameOrigin(html, url, origin, options.navLinkSelector);\n if (allNavUrls.length > 0) {\n pathPrefix = computeCommonPrefix(startUrl, allNavUrls);\n }\n isFirstPage = false;\n }\n\n const links = options.discoverUrls\n ? discoverLinksCustom(html, url, origin, pathPrefix, options.discoverUrls)\n : discoverLinks(html, url, origin, pathPrefix, options.navLinkSelector);\n for (const link of links) {\n const normalized = normalizeUrl(link);\n if (!visited.has(normalized)) {\n visited.add(normalized);\n queue.push([link, depth + 1]);\n }\n }\n }\n\n // Politeness delay between requests\n if (queue.length > 0) {\n await delay(200);\n }\n }\n\n return { pages: results, effectivePrefix: pathPrefix };\n}\n\n/**\n * Extract all in-bounds links from a page's HTML.\n * When navLinkSelector is provided, only links matching that selector are used.\n */\nfunction discoverLinks(\n html: string,\n baseUrl: string,\n origin: string,\n pathPrefix: string,\n navLinkSelector?: string | null\n): string[] {\n const $ = cheerio.load(html);\n const links: string[] = [];\n const selector = navLinkSelector || \"a[href]\";\n\n $(selector).each((_, el) => {\n const href = $(el).attr(\"href\");\n if (!href) return;\n\n try {\n const resolved = new URL(href, baseUrl).href;\n if (isInBounds(resolved, origin, pathPrefix)) {\n links.push(resolved);\n }\n } catch {\n // Invalid URL, skip\n }\n });\n\n return [...new Set(links)];\n}\n\n/**\n * Discover all same-origin links from nav (no path prefix filter).\n * Used on the first page to compute the crawl boundary from nav structure.\n */\nfunction discoverSameOrigin(\n html: string,\n baseUrl: string,\n origin: string,\n navLinkSelector?: string | null\n): string[] {\n const $ = cheerio.load(html);\n const links: string[] = [];\n const selector = navLinkSelector || \"a[href]\";\n\n $(selector).each((_, el) => {\n const href = $(el).attr(\"href\");\n if (!href) return;\n try {\n const resolved = new URL(href, baseUrl).href;\n if (new URL(resolved).origin === origin) {\n links.push(resolved);\n }\n } catch {\n // Invalid URL, skip\n }\n });\n\n return [...new Set(links)];\n}\n\n/**\n * Discover all same-origin links via custom discovery (no path prefix filter).\n */\nfunction discoverSameOriginCustom(\n html: string,\n baseUrl: string,\n origin: string,\n discoverUrls: (html: string, baseUrl: string) => string[]\n): string[] {\n const urls = discoverUrls(html, baseUrl);\n return [\n ...new Set(\n urls.filter((u) => {\n try {\n return new URL(u).origin === origin;\n } catch {\n return false;\n }\n })\n ),\n ];\n}\n\n/**\n * Extract in-bounds links using a custom discovery function from the platform strategy.\n */\nfunction discoverLinksCustom(\n html: string,\n baseUrl: string,\n origin: string,\n pathPrefix: string,\n discoverUrls: (html: string, baseUrl: string) => string[]\n): string[] {\n const urls = discoverUrls(html, baseUrl);\n return [...new Set(urls.filter((u) => isInBounds(u, origin, pathPrefix)))];\n}\n\nfunction delay(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n","/**\n * Validate whether a string is a valid URL.\n */\nexport function isValidUrl(input: string): boolean {\n try {\n new URL(input);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Normalize a URL for deduplication: strip hash, query, trailing slash.\n */\nexport function normalizeUrl(url: string): string {\n const parsed = new URL(url);\n parsed.hash = \"\";\n parsed.search = \"\";\n return parsed.href.replace(/\\/$/, \"\");\n}\n\n/**\n * Derive a short name/slug from a URL's hostname.\n */\nexport function slugFromUrl(url: string): string {\n try {\n const parsed = new URL(url);\n return parsed.hostname.replace(/\\./g, \"-\").replace(/^www-/, \"\");\n } catch {\n return \"source\";\n }\n}\n","import { normalizeUrl } from \"../utils/url\";\n\nexport { normalizeUrl };\n\n/**\n * Determine the crawl boundary from a starting URL.\n * Links are in-bounds if they share the same origin and path prefix.\n */\nexport function getCrawlPrefix(url: string): {\n origin: string;\n pathPrefix: string;\n} {\n const parsed = new URL(url);\n const pathParts = parsed.pathname.split(\"/\");\n // Remove the last segment (the current page slug)\n pathParts.pop();\n const pathPrefix = pathParts.join(\"/\") + \"/\";\n return { origin: parsed.origin, pathPrefix };\n}\n\n/**\n * Compute the longest common path prefix between a start URL and discovered nav URLs.\n * Used to widen the crawl boundary when sidebar links span multiple sections.\n */\nexport function computeCommonPrefix(\n startUrl: string,\n navUrls: string[]\n): string {\n const startParts = new URL(startUrl).pathname.split(\"/\").filter(Boolean);\n const parts = [...startParts];\n\n for (const url of navUrls) {\n const urlParts = new URL(url).pathname.split(\"/\").filter(Boolean);\n let i = 0;\n while (i < parts.length && i < urlParts.length && parts[i] === urlParts[i]) {\n i++;\n }\n parts.length = i;\n }\n\n return \"/\" + (parts.length > 0 ? parts.join(\"/\") + \"/\" : \"\");\n}\n\n/**\n * Check whether a candidate URL falls within the crawl boundary.\n */\nexport function isInBounds(\n candidateUrl: string,\n origin: string,\n pathPrefix: string\n): boolean {\n try {\n const parsed = new URL(candidateUrl);\n return parsed.origin === origin && parsed.pathname.startsWith(pathPrefix);\n } catch {\n return false;\n }\n}\n","import { readFileSync, writeFileSync, mkdirSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport type { SiteMeta } from \"./meta-extractor\";\n\n/** Manifest for a single documentation source (written as _index.json). */\nexport interface SourceManifest {\n name: string;\n url: string;\n platform: string;\n fetched_at: string;\n pages: { title: string; path: string }[];\n display_name?: string;\n description?: string;\n icon_url?: string | null;\n og_image?: string | null;\n language?: string | null;\n page_count?: number;\n}\n\n/** Entry in the root manifest's sources array. */\nexport interface RootManifestEntry {\n name: string;\n path: string;\n fetched_at: string;\n display_name?: string;\n description?: string;\n icon_url?: string | null;\n page_count?: number;\n}\n\n/** Root manifest listing all sources (written as manifest.json). */\nexport interface RootManifest {\n sources: RootManifestEntry[];\n}\n\n/**\n * Build a source manifest object.\n * When siteMeta is provided, its fields are included in the manifest.\n */\nexport function buildSourceManifest(\n name: string,\n url: string,\n platform: string,\n pages: { title: string; path: string }[],\n siteMeta?: SiteMeta\n): SourceManifest {\n const manifest: SourceManifest = {\n name,\n url,\n platform,\n fetched_at: new Date().toISOString(),\n pages,\n };\n\n if (siteMeta) {\n manifest.display_name = siteMeta.displayName;\n manifest.description = siteMeta.description;\n manifest.icon_url = siteMeta.iconUrl;\n manifest.og_image = siteMeta.ogImage;\n manifest.language = siteMeta.language;\n manifest.page_count = pages.length;\n }\n\n return manifest;\n}\n\n/**\n * Write a source manifest (_index.json) to a directory.\n */\nexport function writeSourceManifest(\n manifest: SourceManifest,\n outputDir: string\n): void {\n mkdirSync(outputDir, { recursive: true });\n writeFileSync(\n join(outputDir, \"_index.json\"),\n JSON.stringify(manifest, null, 2) + \"\\n\",\n \"utf-8\"\n );\n}\n\n/**\n * Load the root manifest (manifest.json) from a directory.\n * Returns an empty manifest if the file doesn't exist.\n */\nexport function loadRootManifest(rootDir: string): RootManifest {\n try {\n const raw = readFileSync(join(rootDir, \"manifest.json\"), \"utf-8\");\n return JSON.parse(raw) as RootManifest;\n } catch {\n return { sources: [] };\n }\n}\n\n/**\n * Upsert a source entry in the root manifest and write it to disk.\n */\nexport function updateRootManifest(\n rootDir: string,\n entry: RootManifestEntry\n): void {\n const manifest = loadRootManifest(rootDir);\n const idx = manifest.sources.findIndex((s) => s.name === entry.name);\n if (idx >= 0) {\n manifest.sources[idx] = entry;\n } else {\n manifest.sources.push(entry);\n }\n mkdirSync(rootDir, { recursive: true });\n writeFileSync(\n join(rootDir, \"manifest.json\"),\n JSON.stringify(manifest, null, 2) + \"\\n\",\n \"utf-8\"\n );\n}\n","import * as cheerio from \"cheerio\";\n\n/** Site-level metadata extracted from a page's <head>. */\nexport interface SiteMeta {\n displayName: string;\n description: string;\n iconUrl: string | null;\n ogImage: string | null;\n language: string | null;\n}\n\n/**\n * Extract site metadata from an HTML page's <head> element.\n * Pure function — no side effects or network calls.\n */\nexport function extractSiteMeta(html: string, url: string): SiteMeta {\n const $ = cheerio.load(html);\n const origin = new URL(url).origin;\n\n return {\n displayName: extractDisplayName($, url),\n description: extractDescription($),\n iconUrl: extractIconUrl($, origin),\n ogImage: extractOgImage($, origin),\n language: extractLanguage($),\n };\n}\n\nfunction nonEmpty(value: string | undefined): string | undefined {\n return value?.trim() || undefined;\n}\n\nfunction extractDisplayName($: cheerio.CheerioAPI, url: string): string {\n const ogSiteName = nonEmpty($('meta[property=\"og:site_name\"]').attr(\"content\"));\n if (ogSiteName) return ogSiteName;\n\n const appName = nonEmpty($('meta[name=\"application-name\"]').attr(\"content\"));\n if (appName) return appName;\n\n const title = nonEmpty($(\"title\").text());\n if (title) {\n const parts = title.split(/\\s[-|—]\\s/);\n return parts[0].trim();\n }\n\n return new URL(url).hostname;\n}\n\nfunction extractDescription($: cheerio.CheerioAPI): string {\n const ogDesc = nonEmpty($('meta[property=\"og:description\"]').attr(\"content\"));\n if (ogDesc) return ogDesc;\n\n const metaDesc = nonEmpty($('meta[name=\"description\"]').attr(\"content\"));\n if (metaDesc) return metaDesc;\n\n return \"\";\n}\n\nfunction extractIconUrl($: cheerio.CheerioAPI, origin: string): string | null {\n const selectors = [\n 'link[rel=\"apple-touch-icon\"]',\n 'link[rel=\"icon\"][type=\"image/svg+xml\"]',\n 'link[rel=\"icon\"]',\n 'link[rel=\"shortcut icon\"]',\n ];\n\n for (const selector of selectors) {\n const href = nonEmpty($(selector).attr(\"href\"));\n if (href) return resolveUrl(href, origin);\n }\n\n return `${origin}/favicon.ico`;\n}\n\nfunction extractOgImage($: cheerio.CheerioAPI, origin: string): string | null {\n const ogImage = nonEmpty($('meta[property=\"og:image\"]').attr(\"content\"));\n if (ogImage) return resolveUrl(ogImage, origin);\n\n return null;\n}\n\nfunction extractLanguage($: cheerio.CheerioAPI): string | null {\n const htmlLang = nonEmpty($(\"html\").attr(\"lang\"));\n if (htmlLang) return htmlLang;\n\n const ogLocale = nonEmpty($('meta[property=\"og:locale\"]').attr(\"content\"));\n if (ogLocale) return ogLocale;\n\n return null;\n}\n\nfunction resolveUrl(href: string, origin: string): string {\n if (href.startsWith(\"http://\") || href.startsWith(\"https://\")) return href;\n if (href.startsWith(\"//\")) return `https:${href}`;\n if (href.startsWith(\"/\")) return `${origin}${href}`;\n return `${origin}/${href}`;\n}\n","import { defineCommand } from \"citty\";\nimport { join } from \"node:path\";\nimport consola from \"consola\";\nimport { loadConfig, saveConfig, addSource } from \"../config/manager\";\nimport type { Docs2aiConfig } from \"../config/schema\";\nimport { slugFromUrl } from \"../utils/url\";\n\nexport const addCommand = defineCommand({\n meta: {\n name: \"add\",\n description: \"Add a documentation source to .docs2ai.yaml\",\n },\n args: {\n url: {\n type: \"positional\",\n description: \"Documentation URL to add\",\n required: true,\n },\n name: {\n type: \"string\",\n description: \"Name for this source (auto-derived from hostname if omitted)\",\n },\n crawl: {\n type: \"boolean\",\n description: \"Enable crawl mode for this source\",\n default: false,\n },\n \"max-depth\": {\n type: \"string\",\n description: \"Maximum crawl depth\",\n default: \"2\",\n },\n output: {\n type: \"string\",\n alias: \"o\",\n description: \"Output filename or directory\",\n },\n },\n run({ args }) {\n const url = args.url as string;\n const shouldCrawl = args.crawl as boolean;\n const maxDepth = parseInt(args[\"max-depth\"] as string, 10);\n\n const name = (args.name as string) || slugFromUrl(url);\n const output =\n (args.output as string) || (shouldCrawl ? `${name}/` : `${name}.md`);\n\n const existing = loadConfig();\n let config: Docs2aiConfig;\n let configPath: string;\n\n if (existing) {\n config = existing.config;\n configPath = existing.configPath;\n } else {\n configPath = join(process.cwd(), \".docs2ai.yaml\");\n config = { version: 1, outputDir: \".ai/docs\", sources: [] };\n }\n\n addSource(config, { name, url, crawl: shouldCrawl, maxDepth, output });\n saveConfig(config, configPath);\n\n consola.success(`Added source \"${name}\" → ${url}`);\n consola.info(`Config: ${configPath}`);\n },\n});\n","import { readFileSync, writeFileSync, existsSync } from \"node:fs\";\nimport { join, dirname } from \"node:path\";\nimport yaml from \"js-yaml\";\nimport type { Docs2aiConfig, SourceConfig } from \"./schema\";\n\nconst CONFIG_FILENAME = \".docs2ai.yaml\";\n\n/**\n * Load the .docs2ai.yaml config file, searching up from cwd.\n * Returns null if no config file is found.\n */\nexport function loadConfig(startDir?: string): {\n config: Docs2aiConfig;\n configPath: string;\n} | null {\n const configPath = findConfigFile(startDir || process.cwd());\n if (!configPath) return null;\n\n const raw = readFileSync(configPath, \"utf-8\");\n const data = yaml.load(raw) as Record<string, any>;\n\n const config: Docs2aiConfig = {\n version: data.version ?? 1,\n outputDir: data.output_dir ?? \".ai/docs\",\n sources: (data.sources ?? []).map(snakeToCamelSource),\n };\n\n return { config, configPath };\n}\n\n/**\n * Save configuration to a .docs2ai.yaml file.\n */\nexport function saveConfig(config: Docs2aiConfig, configPath: string): void {\n const data = {\n version: config.version,\n output_dir: config.outputDir,\n sources: config.sources.map(camelToSnakeSource),\n };\n\n const content = yaml.dump(data, { lineWidth: -1 });\n writeFileSync(configPath, content, \"utf-8\");\n}\n\n/**\n * Add or update a source in the config (upsert by name).\n */\nexport function addSource(config: Docs2aiConfig, source: SourceConfig): void {\n const idx = config.sources.findIndex((s) => s.name === source.name);\n if (idx >= 0) {\n config.sources[idx] = source;\n } else {\n config.sources.push(source);\n }\n}\n\n/**\n * Walk up the directory tree looking for .docs2ai.yaml.\n */\nfunction findConfigFile(startDir: string): string | null {\n let dir = startDir;\n while (true) {\n const candidate = join(dir, CONFIG_FILENAME);\n if (existsSync(candidate)) return candidate;\n const parent = dirname(dir);\n if (parent === dir) return null;\n dir = parent;\n }\n}\n\nfunction snakeToCamelSource(s: Record<string, any>): SourceConfig {\n return {\n name: s.name ?? \"\",\n url: s.url ?? \"\",\n crawl: s.crawl ?? false,\n maxDepth: s.max_depth ?? 2,\n output: s.output ?? \"\",\n };\n}\n\nfunction camelToSnakeSource(\n s: SourceConfig\n): Record<string, string | number | boolean> {\n return {\n name: s.name,\n url: s.url,\n crawl: s.crawl,\n max_depth: s.maxDepth,\n output: s.output,\n };\n}\n","import { defineCommand } from \"citty\";\nimport { join, dirname } from \"node:path\";\nimport { mkdirSync } from \"node:fs\";\nimport * as cheerio from \"cheerio\";\nimport consola from \"consola\";\nimport { loadConfig } from \"../config/manager\";\nimport { fetchPage } from \"../pipeline/fetcher\";\nimport { extract } from \"../pipeline/extractor\";\nimport { transform } from \"../pipeline/transformer\";\nimport { write, writePages } from \"../pipeline/writer\";\nimport { crawl } from \"../crawl/crawler\";\nimport { resolve } from \"../pipeline/resolver\";\nimport { getStrategy } from \"../platforms/registry\";\nimport {\n buildSourceManifest,\n writeSourceManifest,\n updateRootManifest,\n} from \"../pipeline/manifest\";\nimport { extractSiteMeta } from \"../pipeline/meta-extractor\";\n\nexport const updateCommand = defineCommand({\n meta: {\n name: \"update\",\n description: \"Refresh configured documentation sources\",\n },\n args: {\n name: {\n type: \"string\",\n description: \"Update only the named source\",\n },\n },\n async run({ args }) {\n const result = loadConfig();\n if (!result) {\n consola.error(\"No .docs2ai.yaml found. Run `docs2ai add <url>` first.\");\n process.exit(1);\n }\n\n const { config, configPath } = result;\n const configDir = dirname(configPath);\n const filterName = args.name as string | undefined;\n\n const sources = filterName\n ? config.sources.filter((s) => s.name === filterName)\n : config.sources;\n\n if (sources.length === 0) {\n if (filterName) {\n consola.error(`Source \"${filterName}\" not found in config.`);\n } else {\n consola.error(\"No sources configured.\");\n }\n process.exit(1);\n }\n\n for (const source of sources) {\n const isDirectoryOutput = !source.output.endsWith(\".md\");\n\n consola.start(`Updating \"${source.name}\" from ${source.url}...`);\n\n if (source.crawl) {\n // Fetch first page to resolve platform and get link discovery\n const firstHtml = await fetchPage(source.url);\n const $ = cheerio.load(firstHtml);\n const platformId = resolve(source.url, $);\n const strategy = getStrategy(platformId);\n\n const crawlResult = await crawl(source.url, {\n maxDepth: source.maxDepth,\n navLinkSelector: strategy.navLinkSelector(),\n discoverUrls: strategy.discoverUrls?.bind(strategy),\n onPageFetched: (url, current, total) => {\n consola.info(` [${current}/${total}] ${url}`);\n },\n });\n\n const { pages, effectivePrefix } = crawlResult;\n\n if (isDirectoryOutput) {\n // Directory mode: one .md file per page + manifests\n const outputDir = join(configDir, config.outputDir, source.output);\n const pageEntries = pages.map((page) => {\n const { content, title, platform } = extract(page.html, page.url);\n const md = transform(content);\n return { url: page.url, title, platform, markdown: md };\n });\n\n const firstPlatform = pageEntries[0]?.platform || \"generic\";\n const manifestPages = writePages(pageEntries, outputDir, effectivePrefix);\n\n const siteMeta = extractSiteMeta(firstHtml, source.url);\n const sourceManifest = buildSourceManifest(\n source.name,\n source.url,\n firstPlatform,\n manifestPages,\n siteMeta\n );\n writeSourceManifest(sourceManifest, outputDir);\n\n const rootDir = join(configDir, config.outputDir);\n updateRootManifest(rootDir, {\n name: source.name,\n path: source.output,\n fetched_at: sourceManifest.fetched_at,\n display_name: siteMeta.displayName,\n description: siteMeta.description,\n icon_url: siteMeta.iconUrl,\n page_count: manifestPages.length,\n });\n\n consola.success(`Updated \"${source.name}\" → ${outputDir} (${pages.length} pages)`);\n } else {\n // Single-file mode: stitch all pages together\n const outputPath = join(configDir, config.outputDir, source.output);\n mkdirSync(dirname(outputPath), { recursive: true });\n\n const sections: string[] = [];\n let firstTitle = \"\";\n let firstPlatform = \"\";\n\n for (const page of pages) {\n const { content, title, platform } = extract(page.html, page.url);\n if (!firstTitle) {\n firstTitle = title;\n firstPlatform = platform;\n }\n const md = transform(content);\n sections.push(`## ${title}\\n\\nSource: ${page.url}\\n\\n${md}`);\n }\n\n const markdown = sections.join(\"\\n\\n---\\n\\n\");\n write(markdown, outputPath, {\n sourceUrl: source.url,\n title: firstTitle,\n platform: firstPlatform,\n });\n\n consola.success(`Updated \"${source.name}\" → ${outputPath}`);\n }\n } else {\n const outputPath = join(configDir, config.outputDir, source.output);\n mkdirSync(dirname(outputPath), { recursive: true });\n\n const html = await fetchPage(source.url);\n const { content, title, platform } = extract(html, source.url);\n const markdown = transform(content);\n write(markdown, outputPath, {\n sourceUrl: source.url,\n title,\n platform,\n });\n\n consola.success(`Updated \"${source.name}\" → ${outputPath}`);\n }\n }\n },\n});\n","import { defineCommand } from \"citty\";\nimport consola from \"consola\";\nimport { loadConfig } from \"../config/manager\";\n\nexport const listCommand = defineCommand({\n meta: {\n name: \"list\",\n description: \"List configured documentation sources\",\n },\n run() {\n const result = loadConfig();\n if (!result) {\n consola.info(\"No .docs2ai.yaml found. Run `docs2ai add <url>` to get started.\");\n return;\n }\n\n const { config, configPath } = result;\n consola.info(`Config: ${configPath}`);\n consola.info(`Output dir: ${config.outputDir}\\n`);\n\n if (config.sources.length === 0) {\n consola.info(\"No sources configured.\");\n return;\n }\n\n for (const source of config.sources) {\n const crawlInfo = source.crawl\n ? ` (crawl, depth: ${source.maxDepth})`\n : \"\";\n console.log(` ${source.name}${crawlInfo}`);\n console.log(` URL: ${source.url}`);\n console.log(` Output: ${source.output}`);\n console.log();\n }\n },\n});\n","import { defineCommand } from \"citty\";\nimport { resolve } from \"node:path\";\n\nexport const serveCommand = defineCommand({\n meta: {\n name: \"serve\",\n description: \"Start an MCP server exposing documentation tools\",\n },\n args: {\n dir: {\n type: \"string\",\n alias: \"d\",\n description: \"Documentation directory to serve\",\n default: \".ai/docs/\",\n },\n },\n async run({ args }) {\n const docsDir = resolve(process.cwd(), args.dir as string);\n const { createMcpServer } = await import(\"../mcp/server\");\n const { StdioServerTransport } = await import(\n \"@modelcontextprotocol/sdk/server/stdio.js\"\n );\n const server = createMcpServer(docsDir);\n const transport = new StdioServerTransport();\n await server.connect(transport);\n },\n});\n"],"mappings":";;;;;;;;;;;;AAAA,SAAS,gBAAAA,qBAAoB;AAC7B,SAAS,QAAAC,aAAY;AACrB,OAAOC,aAAY;AAmDZ,SAAS,SAAS,SAA6B;AACpD,QAAM,UAA0B,CAAC;AACjC,QAAM,QAAsB,CAAC;AAE7B,MAAI;AACJ,MAAI;AACF,UAAM,MAAMF,cAAaC,MAAK,SAAS,eAAe,GAAG,OAAO;AAChE,mBAAe,KAAK,MAAM,GAAG;AAAA,EAC/B,QAAQ;AACN,WAAO,EAAE,SAAS,CAAC,GAAG,OAAO,CAAC,EAAE;AAAA,EAClC;AAEA,aAAW,eAAe,aAAa,SAAS;AAC9C,UAAM,YAAYA,MAAK,SAAS,YAAY,IAAI;AAChD,QAAI;AACJ,QAAI;AACF,YAAM,MAAMD,cAAaC,MAAK,WAAW,aAAa,GAAG,OAAO;AAChE,uBAAiB,KAAK,MAAM,GAAG;AAAA,IACjC,QAAQ;AACN;AAAA,IACF;AAEA,QAAI,YAAY;AAEhB,eAAW,aAAa,eAAe,OAAO;AAC5C,UAAI;AACF,cAAM,MAAMD,cAAaC,MAAK,WAAW,UAAU,IAAI,GAAG,OAAO;AACjE,cAAM,SAASC,QAAO,GAAG;AACzB,cAAM,KAAK;AAAA,UACT,QAAQ,eAAe;AAAA,UACvB,MAAM,UAAU;AAAA,UAChB,OAAO,UAAU;AAAA,UACjB,KAAK,OAAO,OAAO,KAAK,UAAU,EAAE;AAAA,UACpC,UAAU,OAAO,OAAO,KAAK,YAAY,eAAe,QAAQ;AAAA,UAChE,WAAW,OAAO,OAAO,KAAK,cAAc,eAAe,UAAU;AAAA,UACrE,SAAS,OAAO,QAAQ,KAAK;AAAA,QAC/B,CAAC;AACD;AAAA,MACF,QAAQ;AACN;AAAA,MACF;AAAA,IACF;AAEA,UAAM,eAA6B;AAAA,MACjC,MAAM,eAAe;AAAA,MACrB,KAAK,eAAe;AAAA,MACpB,UAAU,eAAe;AAAA,MACzB,WAAW,eAAe;AAAA,MAC1B;AAAA,IACF;AACA,QAAI,eAAe,aAAc,cAAa,cAAc,eAAe;AAC3E,QAAI,eAAe,YAAa,cAAa,cAAc,eAAe;AAC1E,QAAI,eAAe,aAAa,OAAW,cAAa,UAAU,eAAe;AACjF,YAAQ,KAAK,YAAY;AAAA,EAC3B;AAEA,SAAO,EAAE,SAAS,MAAM;AAC1B;AA9GA;AAAA;AAAA;AAAA;AAAA;;;ACAA,OAAO,gBAAgB;AA2BhB,SAAS,iBAAiB,OAAkC;AACjE,QAAM,aAAa,IAAI,WAAuB;AAAA,IAC5C,QAAQ,CAAC,SAAS,SAAS;AAAA,IAC3B,aAAa,CAAC,UAAU,QAAQ,SAAS,KAAK;AAAA,IAC9C,SAAS;AAAA,EACX,CAAC;AAED,QAAM,YAAY,MAAM,IAAI,CAAC,MAAM,OAAO;AAAA,IACxC,IAAI,OAAO,CAAC;AAAA,IACZ,GAAG;AAAA,EACL,EAAE;AAEF,aAAW,OAAO,SAAS;AAE3B,SAAO;AAAA,IACL,OAAO,OAAe,SAAyC;AAC7D,UAAI,CAAC,MAAM,KAAK,EAAG,QAAO,CAAC;AAE3B,YAAM,SAAS,SAAS,SACpB,CAAC,WAA+B,OAAO,WAAW,QAAQ,SAC1D;AAEJ,YAAM,UAAU,WAAW,OAAO,OAAO;AAAA,QACvC,QAAQ;AAAA,QACR,OAAO;AAAA,QACP;AAAA,MACF,CAAC;AAED,YAAM,QAAQ,SAAS,SAAS;AAEhC,aAAO,QAAQ,MAAM,GAAG,KAAK,EAAE,IAAI,CAAC,OAAO;AAAA,QACzC,QAAQ,EAAE;AAAA,QACV,MAAM,EAAE;AAAA,QACR,OAAO,EAAE;AAAA,QACT,KAAK,EAAE;AAAA,QACP,OAAO,EAAE;AAAA,MACX,EAAE;AAAA,IACJ;AAAA,EACF;AACF;AAlEA;AAAA;AAAA;AAAA;AAAA;;;ACAA;AAAA;AAAA;AAAA;AAAA,SAAS,iBAAiB;AAC1B,SAAS,SAAS;AAQX,SAAS,gBAAgB,SAA4B;AAC1D,QAAM,OAAO,SAAS,OAAO;AAC7B,QAAM,cAAc,iBAAiB,KAAK,KAAK;AAE/C,QAAM,SAAS,IAAI,UAAU;AAAA,IAC3B,MAAM;AAAA,IACN,SAAS;AAAA,EACX,CAAC;AAED,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,CAAC;AAAA,IACD,YAAY;AACV,aAAO;AAAA,QACL,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,KAAK;AAAA,cACT,KAAK,QAAQ,IAAI,CAAC,OAAO;AAAA,gBACvB,MAAM,EAAE;AAAA,gBACR,KAAK,EAAE;AAAA,gBACP,UAAU,EAAE;AAAA,gBACZ,WAAW,EAAE;AAAA,gBACb,WAAW,EAAE;AAAA,gBACb,GAAI,EAAE,eAAe,EAAE,aAAa,EAAE,YAAY;AAAA,gBAClD,GAAI,EAAE,eAAe,EAAE,aAAa,EAAE,YAAY;AAAA,gBAClD,GAAI,EAAE,YAAY,UAAa,EAAE,SAAS,EAAE,QAAQ;AAAA,cACtD,EAAE;AAAA,cACF;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,EAAE,QAAQ,EAAE,OAAO,EAAE,SAAS,kCAAkC,EAAE;AAAA,IAClE,OAAO,EAAE,OAAO,MAAM;AACpB,YAAM,cAAc,KAAK,QAAQ,KAAK,CAAC,MAAM,EAAE,SAAS,MAAM;AAC9D,UAAI,CAAC,aAAa;AAChB,eAAO;AAAA,UACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,WAAW,MAAM,0DAA0D,CAAC;AAAA,UAC5G,SAAS;AAAA,QACX;AAAA,MACF;AACA,YAAM,QAAQ,KAAK,MAChB,OAAO,CAAC,MAAM,EAAE,WAAW,MAAM,EACjC,IAAI,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,MAAM,EAAE,KAAK,EAAE;AAChD,aAAO;AAAA,QACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,KAAK,UAAU,OAAO,MAAM,CAAC,EAAE,CAAC;AAAA,MAClE;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,MACE,QAAQ,EAAE,OAAO,EAAE,SAAS,kCAAkC;AAAA,MAC9D,MAAM,EAAE,OAAO,EAAE,SAAS,sDAAsD;AAAA,IAClF;AAAA,IACA,OAAO,EAAE,QAAQ,KAAK,MAAM;AAC1B,YAAM,OAAO,KAAK,MAAM,KAAK,CAAC,MAAM,EAAE,WAAW,UAAU,EAAE,SAAS,IAAI;AAC1E,UAAI,CAAC,MAAM;AACT,eAAO;AAAA,UACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,SAAS,IAAI,0BAA0B,MAAM,4CAA4C,CAAC;AAAA,UAC1H,SAAS;AAAA,QACX;AAAA,MACF;AACA,aAAO;AAAA,QACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,KAAK,QAAQ,CAAC;AAAA,MAChD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,MACE,OAAO,EAAE,OAAO,EAAE,SAAS,cAAc;AAAA,MACzC,QAAQ,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS,qCAAqC;AAAA,MAC5E,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,SAAS,wCAAwC;AAAA,IAChF;AAAA,IACA,OAAO,EAAE,OAAO,QAAQ,MAAM,MAAM;AAClC,UAAI,CAAC,MAAM,KAAK,GAAG;AACjB,eAAO;AAAA,UACL,SAAS,CAAC,EAAE,MAAM,QAAQ,MAAM,gCAAgC,CAAC;AAAA,UACjE,SAAS;AAAA,QACX;AAAA,MACF;AACA,YAAM,UAAU,YAAY,OAAO,OAAO,EAAE,QAAQ,MAAM,CAAC;AAC3D,aAAO;AAAA,QACL,SAAS;AAAA,UACP;AAAA,YACE,MAAM;AAAA,YACN,MAAM,KAAK;AAAA,cACT,QAAQ,IAAI,CAAC,OAAO;AAAA,gBAClB,QAAQ,EAAE;AAAA,gBACV,MAAM,EAAE;AAAA,gBACR,OAAO,EAAE;AAAA,gBACT,OAAO,EAAE;AAAA,cACX,EAAE;AAAA,cACF;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AA9HA;AAAA;AAAA;AAEA;AACA;AAAA;AAAA;;;ACHA,SAAS,iBAAAC,gBAAe,SAAS,kBAAkB;AACnD,OAAOC,cAAa;;;ACDpB,SAAS,qBAAqB;AAC9B,SAAS,WAAAC,gBAAe;AACxB,OAAOC,cAAa;AACpB,YAAYC,cAAa;;;ACHzB,SAAS,cAAc;AACvB,SAAS,gBAAgB;AACzB,OAAO,aAAa;AAGpB,IAAM,sBAAsB,oBAAI,IAAI,CAAC,KAAK,KAAK,GAAG,CAAC;AAGnD,IAAM,qBAAqB;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAMA,SAAS,mBAAmB,MAAuB;AACjD,QAAM,QAAQ,KAAK,YAAY;AAC/B,SAAO,mBAAmB,KAAK,CAAC,MAAM,MAAM,SAAS,CAAC,CAAC;AACzD;AAMA,eAAsB,UAAU,KAA8B;AAC5D,MAAI;AACF,WAAO,MAAM,OAAO,KAAK,EAAE,cAAc,OAAO,CAAC;AAAA,EACnD,SAAS,KAAU;AACjB,UAAM,SAAS,KAAK,UAAU,UAAU,KAAK;AAC7C,QAAI,UAAU,oBAAoB,IAAI,MAAM,GAAG;AAC7C,cAAQ;AAAA,QACN,yBAAyB,MAAM;AAAA,MACjC;AACA,aAAO,iBAAiB,GAAG;AAAA,IAC7B;AACA,UAAM;AAAA,EACR;AACF;AAOA,eAAsB,iBAAiB,KAA8B;AACnE,QAAM,aAAa,MAAM,eAAe;AAGxC,MAAI,OAAO,MAAM,eAAe,YAAY,KAAK,IAAI;AAGrD,MAAI,mBAAmB,IAAI,GAAG;AAC5B,YAAQ,KAAK,2DAA2D;AACxE,WAAO,MAAM,eAAe,YAAY,KAAK,KAAK;AAAA,EACpD;AAEA,SAAO;AACT;AAEA,eAAe,iBAAiB;AAC9B,MAAI;AACF,WAAO,MAAM,OAAO,YAAY;AAAA,EAClC,QAAQ;AACN,YAAQ;AAAA,MACN;AAAA,IACF;AACA,QAAI;AACF,eAAS,6BAA6B,EAAE,OAAO,UAAU,CAAC;AAC1D,eAAS,mCAAmC,EAAE,OAAO,UAAU,CAAC;AAChE,aAAO,MAAM,OAAO,YAAY;AAAA,IAClC,QAAQ;AACN,YAAM,MAAM,IAAI;AAAA,QACd;AAAA,MAEF;AACA,MAAC,IAAY,OAAO;AACpB,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,eAAe,eACb,YACA,KACA,UACiB;AACjB,QAAM,UAAU,MAAM,WAAW,SAAS,OAAO,EAAE,SAAS,CAAC;AAC7D,MAAI;AACF,UAAM,OAAO,MAAM,QAAQ,QAAQ;AACnC,UAAM,KAAK,KAAK,KAAK,EAAE,WAAW,oBAAoB,SAAS,IAAM,CAAC;AAEtE,UAAM,KAAK,eAAe,GAAI;AAC9B,WAAO,MAAM,KAAK,QAAQ;AAAA,EAC5B,UAAE;AACA,UAAM,QAAQ,MAAM;AAAA,EACtB;AACF;;;ACrGA,YAAYC,cAAa;AACzB,SAAS,mBAAmB;AAC5B,SAAS,iBAAiB;;;ACF1B,YAAY,aAAa;AAIlB,IAAM,WAA6B;AAAA,EACxC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,6CAA6C,EAAE,SAAS,EAAG,QAAO;AACxE,QAAI,EAAE,yBAAyB,EAAE,SAAS,EAAG,QAAO;AACpD,QAAI,EAAE,iBAAiB,EAAE,SAAS,EAAG,QAAO;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AAAA,EAEA,aAAa,MAAc,SAA2B;AACpD,UAAM,IAAY,aAAK,IAAI;AAC3B,UAAM,QAAQ,oBAAI,IAAY;AAI9B,MAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,OAAO;AAC1B,YAAM,OAAO,EAAE,EAAE,EAAE,KAAK,KAAK;AAE7B,YAAM,UAAU;AAChB,UAAI,QAAQ,QAAQ,KAAK,IAAI;AAC7B,aAAO,UAAU,MAAM;AACrB,cAAM,IAAI,MAAM,CAAC,CAAC;AAClB,gBAAQ,QAAQ,KAAK,IAAI;AAAA,MAC3B;AAAA,IACF,CAAC;AAGD,UAAM,SAAS,IAAI,IAAI,OAAO,EAAE;AAEhC,UAAM,WAAW,IAAI,IAAI,OAAO,EAAE,SAAS,MAAM,GAAG,EAAE,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG;AAE1E,WAAO,CAAC,GAAG,KAAK,EAAE,IAAI,CAAC,MAAM;AAC3B,UAAI,EAAE,WAAW,QAAQ,GAAG;AAC1B,eAAO,SAAS;AAAA,MAClB;AACA,aAAO,SAAS,WAAW;AAAA,IAC7B,CAAC;AAAA,EACH;AACF;;;AC/DO,IAAM,aAA+B;AAAA,EAC1C,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,+CAA+C,EAAE,SAAS;AAC9D,aAAO;AACT,QAAI,EAAE,8BAA8B,EAAE,SAAS,EAAG,QAAO;AACzD,QAAI,EAAE,gCAAgC,EAAE,SAAS,EAAG,QAAO;AAC3D,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;AChCO,IAAM,SAA2B;AAAA,EACtC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,eAAe;AACnB,MAAE,SAAS,EAAE,KAAK,CAAC,GAAG,OAAO;AAC3B,YAAM,MAAM,EAAE,EAAE,EAAE,KAAK,OAAO,KAAK;AACnC,UAAI,QAAQ,KAAK,GAAG,EAAG;AAAA,IACzB,CAAC;AACD,QAAI,eAAe,EAAG,QAAO;AAC7B,QAAI,EAAE,aAAa,EAAE,SAAS,EAAG,QAAO;AACxC,QAAI,EAAE,cAAc,EAAE,SAAS,EAAG,QAAO;AACzC,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACnCO,IAAM,UAA4B;AAAA,EACvC,IAAI;AAAA,EAEJ,OAAO,KAAa,GAAwB;AAC1C,QAAI,EAAE,4CAA4C,EAAE,SAAS,EAAG,QAAO;AACvE,QAAI;AACF,YAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,UAAI,OAAO,SAAS,SAAS,aAAa,EAAG,QAAO;AAAA,IACtD,QAAQ;AAAA,IAER;AACA,QAAI,EAAE,oCAAoC,EAAE,SAAS,EAAG,QAAO;AAC/D,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACnCO,IAAM,UAA4B;AAAA,EACvC,IAAI;AAAA,EAEJ,OAAO,MAAc,IAAyB;AAC5C,WAAO;AAAA,EACT;AAAA,EAEA,kBAA0B;AACxB,WAAO;AAAA,EACT;AAAA,EAEA,kBAA4B;AAC1B,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EAEA,kBAAiC;AAC/B,WAAO;AAAA,EACT;AACF;;;ACxBO,IAAM,qBAAyC;AAAA,EACpD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAGO,SAAS,YAAY,IAAkC;AAC5D,QAAM,WAAW,mBAAmB,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE;AAC3D,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,qBAAqB,EAAE,EAAE;AAAA,EAC3C;AACA,SAAO;AACT;;;ACfO,SAAS,QAAQ,KAAa,GAA2B;AAC9D,aAAW,YAAY,oBAAoB;AACzC,QAAI,SAAS,OAAO,KAAK,CAAC,GAAG;AAC3B,aAAO,SAAS;AAAA,IAClB;AAAA,EACF;AACA,SAAO;AACT;;;APEO,SAAS,QAAQ,MAAc,KAA4B;AAChE,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,WAAW,QAAQ,KAAK,CAAC;AAC/B,QAAM,WAAW,YAAY,QAAQ;AAErC,QAAM,QAAQ,aAAa,CAAC;AAG5B,MAAI,aAAa,WAAW;AAC1B,eAAW,OAAO,SAAS,gBAAgB,GAAG;AAC5C,QAAE,GAAG,EAAE,OAAO;AAAA,IAChB;AAEA,UAAM,YAAY,EAAE,SAAS,gBAAgB,CAAC,EAAE,MAAM;AACtD,UAAM,kBAAkB,UAAU,KAAK;AAEvC,QAAI,mBAAmB,gBAAgB,KAAK,EAAE,UAAU,KAAK;AAC3D,aAAO,EAAE,SAAS,iBAAiB,OAAO,SAAS;AAAA,IACrD;AAAA,EAEF;AAGA,MAAI,UAA4C;AAChD,MAAI;AACF,UAAM,EAAE,SAAS,IAAI,UAAU,IAAI;AACnC,UAAM,SAAS,IAAI,YAAY,QAAe;AAC9C,cAAU,OAAO,MAAM;AAAA,EACzB,QAAQ;AAAA,EAER;AAEA,QAAM,UAAU,SAAS,WAAW,EAAE,MAAM,EAAE,KAAK,KAAK;AAExD,SAAO;AAAA,IACL;AAAA,IACA,OAAO,SAAS,SAAS,SAAS;AAAA,IAClC;AAAA,EACF;AACF;AAKA,SAAS,aAAa,GAA+B;AACnD,QAAM,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK;AACvC,MAAI,GAAI,QAAO;AAEf,QAAM,UAAU,EAAE,2BAA2B,EAAE,KAAK,SAAS,GAAG,KAAK;AACrE,MAAI,QAAS,QAAO;AAEpB,SAAO,EAAE,OAAO,EAAE,KAAK,EAAE,KAAK;AAChC;;;AQrEA,OAAO,qBAAqB;AAC5B,SAAS,WAAW;AAKb,SAAS,UAAU,MAAsB;AAC9C,QAAM,KAAK,IAAI,gBAAgB;AAAA,IAC7B,cAAc;AAAA,IACd,gBAAgB;AAAA,IAChB,kBAAkB;AAAA,EACpB,CAAC;AAED,KAAG,IAAI,GAAG;AAEV,iBAAe,EAAE;AACjB,uBAAqB,EAAE;AACvB,uBAAqB,EAAE;AACvB,uBAAqB,EAAE;AAEvB,SAAO,GAAG,SAAS,IAAI;AACzB;AAEA,SAAS,UAAU,MAAiD;AAClE,SAAO,KAAK,aAAa;AAC3B;AAEA,SAAS,QAAQ,MAA4B,MAAsB;AACjE,MAAI,UAAU,IAAI,GAAG;AACnB,WAAO,KAAK,aAAa,IAAI,KAAK;AAAA,EACpC;AACA,SAAO;AACT;AAEA,SAAS,WAAW,MAAoC;AACtD,MAAI,UAAU,IAAI,GAAG;AACnB,WAAO,KAAK,QAAQ,YAAY;AAAA,EAClC;AACA,SAAO;AACT;AAMA,SAAS,eAAe,IAA2B;AACjD,KAAG,QAAQ,YAAY;AAAA,IACrB,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,MAAM,WAAW,IAAI;AAC3B,UAAI,QAAQ,QAAS,QAAO;AAC5B,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UACE,8EAA8E;AAAA,QAC5E;AAAA,MACF;AAEA,eAAO;AACT,UAAI,QAAQ,MAAM,MAAM,MAAM,QAAS,QAAO;AAC9C,aAAO;AAAA,IACT;AAAA,IACA,YAAY,SAAS,MAAM;AACzB,YAAM,MAAM,QAAQ,MAAM,OAAO,EAAE,YAAY;AAC/C,UAAI,OAAO;AACX,UAAI,kBAAkB,KAAK,GAAG,EAAG,QAAO;AAAA,eAC/B,eAAe,KAAK,GAAG,EAAG,QAAO;AAAA,eACjC,cAAc,KAAK,GAAG,EAAG,QAAO;AAAA,eAChC,OAAO,KAAK,GAAG,EAAG,QAAO;AAElC,YAAM,QAAQ,QAAQ,KAAK,EAAE,MAAM,IAAI;AACvC,YAAM,SAAS,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI,EAAE,EAAE,KAAK,IAAI;AACzD,aAAO;AAAA,MAAS,IAAI;AAAA,EAAO,MAAM;AAAA;AAAA;AAAA,IACnC;AAAA,EACF,CAAC;AACH;AAMA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,kBAAkB;AAAA,IAC3B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UAAI,uCAAuC,KAAK,GAAG,EAAG,QAAO;AAC7D,UAAI,QAAQ,MAAM,MAAM,MAAM,WAAY,QAAO;AACjD,aAAO;AAAA,IACT;AAAA,IACA,YAAY,SAAS,MAAM;AACzB,YAAM,QACJ,QAAQ,MAAM,YAAY,KAC1B,QAAQ,MAAM,YAAY,KAC1B,QAAQ,MAAM,YAAY,KAC1B;AACF,UAAI,OAAO;AACT,eAAO;AAAA,IAAO,KAAK;AAAA;AAAA,EAAS,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA,MAC5C;AACA,aAAO;AAAA,EAAK,QAAQ,KAAK,CAAC;AAAA;AAAA;AAAA,IAC5B;AAAA,EACF,CAAC;AACH;AAKA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,mBAAmB;AAAA,IAC5B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,UAAI,WAAW,IAAI,MAAM,MAAO,QAAO;AACvC,YAAM,SAAS,KAAK,cAAc,MAAM;AACxC,UAAI,CAAC,OAAQ,QAAO;AACpB,YAAM,OACJ,QAAQ,MAAM,eAAe,KAC7B,QAAQ,MAAM,WAAW,MACxB,OAAO,aAAa,eAAe,KAAK,QACxC,OAAO,aAAa,WAAW,KAAK;AACvC,aAAO,KAAK,SAAS;AAAA,IACvB;AAAA,IACA,YAAY,UAAU,MAAM;AAC1B,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,SAAS,KAAK,cAAc,MAAM;AACxC,YAAM,OACJ,QAAQ,MAAM,eAAe,KAC7B,QAAQ,MAAM,WAAW,MACxB,OAAO,aAAa,eAAe,KAAK,QACxC,OAAO,aAAa,WAAW,KAAK;AACvC,YAAM,OAAO,OAAO,eAAe;AACnC,aAAO;AAAA,QAAW,IAAI;AAAA,EAAK,IAAI;AAAA;AAAA;AAAA,IACjC;AAAA,EACF,CAAC;AACH;AAKA,SAAS,qBAAqB,IAA2B;AACvD,KAAG,QAAQ,mBAAmB;AAAA,IAC5B,OAAO,MAAM;AACX,UAAI,CAAC,UAAU,IAAI,EAAG,QAAO;AAC7B,YAAM,QAAQ,QAAQ,MAAM,OAAO;AACnC,UAAI,CAAC,sBAAsB,KAAK,KAAK,EAAG,QAAO;AAE/C,YAAM,MAAM,QAAQ,MAAM,OAAO;AACjC,UAAI,4BAA4B,KAAK,GAAG,EAAG,QAAO;AAClD,UAAI,QAAQ,MAAM,MAAM,MAAM,WAAY,QAAO;AACjD,aAAO;AAAA,IACT;AAAA,IACA,cAAc;AACZ,aAAO;AAAA,IACT;AAAA,EACF,CAAC;AACH;;;ACzJA,SAAS,eAAe,iBAAiB;AACzC,SAAS,SAAS,YAAY;AAC9B,OAAO,YAAY;;;ACmBZ,SAAS,gBAAgB,SAAiB,YAA4B;AAC3E,QAAM,SAAS,IAAI,IAAI,OAAO;AAC9B,MAAI,WAAW,OAAO,SAAS,QAAQ,QAAQ,EAAE;AAGjD,QAAM,mBAAmB,WAAW,QAAQ,QAAQ,EAAE;AACtD,MAAI,SAAS,WAAW,gBAAgB,GAAG;AACzC,eAAW,SAAS,MAAM,iBAAiB,MAAM;AAAA,EACnD;AAGA,aAAW,SAAS,QAAQ,QAAQ,EAAE;AAEtC,MAAI,CAAC,SAAU,QAAO;AAEtB,SAAO,WAAW;AACpB;;;ADvBO,SAAS,MACd,UACA,YACA,SACM;AACN,QAAM,UAAU,OAAO,UAAU,UAAU;AAAA,IACzC,QAAQ,QAAQ;AAAA,IAChB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,UAAU,QAAQ;AAAA,IAClB,OAAO,QAAQ;AAAA,IACf,iBAAiB;AAAA,EACnB,CAAC;AAED,MAAI,YAAY;AACd,cAAU,QAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAClD,kBAAc,YAAY,SAAS,OAAO;AAAA,EAC5C,OAAO;AACL,YAAQ,OAAO,MAAM,OAAO;AAAA,EAC9B;AACF;AAKO,SAAS,UACd,UACA,UACA,SACM;AACN,QAAM,UAAU,OAAO,UAAU,UAAU;AAAA,IACzC,QAAQ,QAAQ;AAAA,IAChB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC,UAAU,QAAQ;AAAA,IAClB,OAAO,QAAQ;AAAA,IACf,iBAAiB;AAAA,EACnB,CAAC;AAED,YAAU,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAChD,gBAAc,UAAU,SAAS,OAAO;AAC1C;AAaO,SAAS,WACd,OACA,WACA,YACmC;AACnC,QAAM,YAAY,oBAAI,IAAY;AAClC,QAAM,UAA6C,CAAC;AAEpD,aAAW,QAAQ,OAAO;AACxB,QAAI,UAAU,gBAAgB,KAAK,KAAK,UAAU;AAGlD,QAAI,UAAU,IAAI,OAAO,GAAG;AAC1B,YAAM,OAAO,QAAQ,QAAQ,SAAS,EAAE;AACxC,UAAI,IAAI;AACR,aAAO,UAAU,IAAI,GAAG,IAAI,IAAI,CAAC,KAAK,EAAG;AACzC,gBAAU,GAAG,IAAI,IAAI,CAAC;AAAA,IACxB;AACA,cAAU,IAAI,OAAO;AAErB,UAAM,WAAW,KAAK,WAAW,OAAO;AACxC,cAAU,KAAK,UAAU,UAAU;AAAA,MACjC,WAAW,KAAK;AAAA,MAChB,OAAO,KAAK;AAAA,MACZ,UAAU,KAAK;AAAA,IACjB,CAAC;AAED,YAAQ,KAAK,EAAE,OAAO,KAAK,OAAO,MAAM,QAAQ,CAAC;AAAA,EACnD;AAEA,SAAO;AACT;;;AEjGA,YAAYC,cAAa;;;ACelB,SAAS,aAAa,KAAqB;AAChD,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,SAAO,OAAO;AACd,SAAO,SAAS;AAChB,SAAO,OAAO,KAAK,QAAQ,OAAO,EAAE;AACtC;AAKO,SAAS,YAAY,KAAqB;AAC/C,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,WAAO,OAAO,SAAS,QAAQ,OAAO,GAAG,EAAE,QAAQ,SAAS,EAAE;AAAA,EAChE,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ACxBO,SAAS,eAAe,KAG7B;AACA,QAAM,SAAS,IAAI,IAAI,GAAG;AAC1B,QAAM,YAAY,OAAO,SAAS,MAAM,GAAG;AAE3C,YAAU,IAAI;AACd,QAAM,aAAa,UAAU,KAAK,GAAG,IAAI;AACzC,SAAO,EAAE,QAAQ,OAAO,QAAQ,WAAW;AAC7C;AAMO,SAAS,oBACd,UACA,SACQ;AACR,QAAM,aAAa,IAAI,IAAI,QAAQ,EAAE,SAAS,MAAM,GAAG,EAAE,OAAO,OAAO;AACvE,QAAM,QAAQ,CAAC,GAAG,UAAU;AAE5B,aAAW,OAAO,SAAS;AACzB,UAAM,WAAW,IAAI,IAAI,GAAG,EAAE,SAAS,MAAM,GAAG,EAAE,OAAO,OAAO;AAChE,QAAI,IAAI;AACR,WAAO,IAAI,MAAM,UAAU,IAAI,SAAS,UAAU,MAAM,CAAC,MAAM,SAAS,CAAC,GAAG;AAC1E;AAAA,IACF;AACA,UAAM,SAAS;AAAA,EACjB;AAEA,SAAO,OAAO,MAAM,SAAS,IAAI,MAAM,KAAK,GAAG,IAAI,MAAM;AAC3D;AAKO,SAAS,WACd,cACA,QACA,YACS;AACT,MAAI;AACF,UAAM,SAAS,IAAI,IAAI,YAAY;AACnC,WAAO,OAAO,WAAW,UAAU,OAAO,SAAS,WAAW,UAAU;AAAA,EAC1E,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;AF1BA,eAAsB,MACpB,UACA,SACsB;AACtB,QAAM,EAAE,OAAO,IAAI,eAAe,QAAQ;AAC1C,MAAI,EAAE,WAAW,IAAI,eAAe,QAAQ;AAC5C,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,UAAyB,CAAC;AAChC,MAAI,cAAc;AAGlB,QAAM,QAA4B,CAAC,CAAC,UAAU,CAAC,CAAC;AAChD,UAAQ,IAAI,aAAa,QAAQ,CAAC;AAElC,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,CAAC,KAAK,KAAK,IAAI,MAAM,MAAM;AAEjC,QAAI;AACJ,QAAI;AACF,aAAO,MAAM,UAAU,GAAG;AAAA,IAC5B,QAAQ;AACN,cAAQ,gBAAgB,KAAK,QAAQ,QAAQ,QAAQ,SAAS,MAAM,MAAM;AAC1E;AAAA,IACF;AACA,YAAQ,KAAK,EAAE,KAAK,KAAK,CAAC;AAC1B,YAAQ,gBAAgB,KAAK,QAAQ,QAAQ,QAAQ,SAAS,MAAM,MAAM;AAE1E,QAAI,QAAQ,QAAQ,UAAU;AAE5B,UAAI,aAAa;AACf,cAAM,aAAa,QAAQ,eACvB,yBAAyB,MAAM,KAAK,QAAQ,QAAQ,YAAY,IAChE,mBAAmB,MAAM,KAAK,QAAQ,QAAQ,eAAe;AACjE,YAAI,WAAW,SAAS,GAAG;AACzB,uBAAa,oBAAoB,UAAU,UAAU;AAAA,QACvD;AACA,sBAAc;AAAA,MAChB;AAEA,YAAM,QAAQ,QAAQ,eAClB,oBAAoB,MAAM,KAAK,QAAQ,YAAY,QAAQ,YAAY,IACvE,cAAc,MAAM,KAAK,QAAQ,YAAY,QAAQ,eAAe;AACxE,iBAAW,QAAQ,OAAO;AACxB,cAAM,aAAa,aAAa,IAAI;AACpC,YAAI,CAAC,QAAQ,IAAI,UAAU,GAAG;AAC5B,kBAAQ,IAAI,UAAU;AACtB,gBAAM,KAAK,CAAC,MAAM,QAAQ,CAAC,CAAC;AAAA,QAC9B;AAAA,MACF;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,GAAG;AACpB,YAAM,MAAM,GAAG;AAAA,IACjB;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,SAAS,iBAAiB,WAAW;AACvD;AAMA,SAAS,cACP,MACA,SACA,QACA,YACA,iBACU;AACV,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAW,mBAAmB;AAEpC,IAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,OAAO;AAC1B,UAAM,OAAO,EAAE,EAAE,EAAE,KAAK,MAAM;AAC9B,QAAI,CAAC,KAAM;AAEX,QAAI;AACF,YAAM,WAAW,IAAI,IAAI,MAAM,OAAO,EAAE;AACxC,UAAI,WAAW,UAAU,QAAQ,UAAU,GAAG;AAC5C,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF,CAAC;AAED,SAAO,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC;AAC3B;AAMA,SAAS,mBACP,MACA,SACA,QACA,iBACU;AACV,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAW,mBAAmB;AAEpC,IAAE,QAAQ,EAAE,KAAK,CAAC,GAAG,OAAO;AAC1B,UAAM,OAAO,EAAE,EAAE,EAAE,KAAK,MAAM;AAC9B,QAAI,CAAC,KAAM;AACX,QAAI;AACF,YAAM,WAAW,IAAI,IAAI,MAAM,OAAO,EAAE;AACxC,UAAI,IAAI,IAAI,QAAQ,EAAE,WAAW,QAAQ;AACvC,cAAM,KAAK,QAAQ;AAAA,MACrB;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF,CAAC;AAED,SAAO,CAAC,GAAG,IAAI,IAAI,KAAK,CAAC;AAC3B;AAKA,SAAS,yBACP,MACA,SACA,QACA,cACU;AACV,QAAM,OAAO,aAAa,MAAM,OAAO;AACvC,SAAO;AAAA,IACL,GAAG,IAAI;AAAA,MACL,KAAK,OAAO,CAAC,MAAM;AACjB,YAAI;AACF,iBAAO,IAAI,IAAI,CAAC,EAAE,WAAW;AAAA,QAC/B,QAAQ;AACN,iBAAO;AAAA,QACT;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF;AACF;AAKA,SAAS,oBACP,MACA,SACA,QACA,YACA,cACU;AACV,QAAM,OAAO,aAAa,MAAM,OAAO;AACvC,SAAO,CAAC,GAAG,IAAI,IAAI,KAAK,OAAO,CAAC,MAAM,WAAW,GAAG,QAAQ,UAAU,CAAC,CAAC,CAAC;AAC3E;AAEA,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAACC,aAAY,WAAWA,UAAS,EAAE,CAAC;AACzD;;;AGhMA,SAAS,cAAc,iBAAAC,gBAAe,aAAAC,kBAAiB;AACvD,SAAS,QAAAC,aAAqB;AAsCvB,SAAS,oBACd,MACA,KACA,UACA,OACA,UACgB;AAChB,QAAM,WAA2B;AAAA,IAC/B;AAAA,IACA;AAAA,IACA;AAAA,IACA,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,IACnC;AAAA,EACF;AAEA,MAAI,UAAU;AACZ,aAAS,eAAe,SAAS;AACjC,aAAS,cAAc,SAAS;AAChC,aAAS,WAAW,SAAS;AAC7B,aAAS,WAAW,SAAS;AAC7B,aAAS,WAAW,SAAS;AAC7B,aAAS,aAAa,MAAM;AAAA,EAC9B;AAEA,SAAO;AACT;AAKO,SAAS,oBACd,UACA,WACM;AACN,EAAAD,WAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AACxC,EAAAD;AAAA,IACEE,MAAK,WAAW,aAAa;AAAA,IAC7B,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI;AAAA,IACpC;AAAA,EACF;AACF;AAMO,SAAS,iBAAiB,SAA+B;AAC9D,MAAI;AACF,UAAM,MAAM,aAAaA,MAAK,SAAS,eAAe,GAAG,OAAO;AAChE,WAAO,KAAK,MAAM,GAAG;AAAA,EACvB,QAAQ;AACN,WAAO,EAAE,SAAS,CAAC,EAAE;AAAA,EACvB;AACF;AAKO,SAAS,mBACd,SACA,OACM;AACN,QAAM,WAAW,iBAAiB,OAAO;AACzC,QAAM,MAAM,SAAS,QAAQ,UAAU,CAAC,MAAM,EAAE,SAAS,MAAM,IAAI;AACnE,MAAI,OAAO,GAAG;AACZ,aAAS,QAAQ,GAAG,IAAI;AAAA,EAC1B,OAAO;AACL,aAAS,QAAQ,KAAK,KAAK;AAAA,EAC7B;AACA,EAAAD,WAAU,SAAS,EAAE,WAAW,KAAK,CAAC;AACtC,EAAAD;AAAA,IACEE,MAAK,SAAS,eAAe;AAAA,IAC7B,KAAK,UAAU,UAAU,MAAM,CAAC,IAAI;AAAA,IACpC;AAAA,EACF;AACF;;;AClHA,YAAYC,cAAa;AAelB,SAAS,gBAAgB,MAAc,KAAuB;AACnE,QAAM,IAAY,cAAK,IAAI;AAC3B,QAAM,SAAS,IAAI,IAAI,GAAG,EAAE;AAE5B,SAAO;AAAA,IACL,aAAa,mBAAmB,GAAG,GAAG;AAAA,IACtC,aAAa,mBAAmB,CAAC;AAAA,IACjC,SAAS,eAAe,GAAG,MAAM;AAAA,IACjC,SAAS,eAAe,GAAG,MAAM;AAAA,IACjC,UAAU,gBAAgB,CAAC;AAAA,EAC7B;AACF;AAEA,SAAS,SAAS,OAA+C;AAC/D,SAAO,OAAO,KAAK,KAAK;AAC1B;AAEA,SAAS,mBAAmB,GAAuB,KAAqB;AACtE,QAAM,aAAa,SAAS,EAAE,+BAA+B,EAAE,KAAK,SAAS,CAAC;AAC9E,MAAI,WAAY,QAAO;AAEvB,QAAM,UAAU,SAAS,EAAE,+BAA+B,EAAE,KAAK,SAAS,CAAC;AAC3E,MAAI,QAAS,QAAO;AAEpB,QAAM,QAAQ,SAAS,EAAE,OAAO,EAAE,KAAK,CAAC;AACxC,MAAI,OAAO;AACT,UAAM,QAAQ,MAAM,MAAM,WAAW;AACrC,WAAO,MAAM,CAAC,EAAE,KAAK;AAAA,EACvB;AAEA,SAAO,IAAI,IAAI,GAAG,EAAE;AACtB;AAEA,SAAS,mBAAmB,GAA+B;AACzD,QAAM,SAAS,SAAS,EAAE,iCAAiC,EAAE,KAAK,SAAS,CAAC;AAC5E,MAAI,OAAQ,QAAO;AAEnB,QAAM,WAAW,SAAS,EAAE,0BAA0B,EAAE,KAAK,SAAS,CAAC;AACvE,MAAI,SAAU,QAAO;AAErB,SAAO;AACT;AAEA,SAAS,eAAe,GAAuB,QAA+B;AAC5E,QAAM,YAAY;AAAA,IAChB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,aAAW,YAAY,WAAW;AAChC,UAAM,OAAO,SAAS,EAAE,QAAQ,EAAE,KAAK,MAAM,CAAC;AAC9C,QAAI,KAAM,QAAO,WAAW,MAAM,MAAM;AAAA,EAC1C;AAEA,SAAO,GAAG,MAAM;AAClB;AAEA,SAAS,eAAe,GAAuB,QAA+B;AAC5E,QAAM,UAAU,SAAS,EAAE,2BAA2B,EAAE,KAAK,SAAS,CAAC;AACvE,MAAI,QAAS,QAAO,WAAW,SAAS,MAAM;AAE9C,SAAO;AACT;AAEA,SAAS,gBAAgB,GAAsC;AAC7D,QAAM,WAAW,SAAS,EAAE,MAAM,EAAE,KAAK,MAAM,CAAC;AAChD,MAAI,SAAU,QAAO;AAErB,QAAM,WAAW,SAAS,EAAE,4BAA4B,EAAE,KAAK,SAAS,CAAC;AACzE,MAAI,SAAU,QAAO;AAErB,SAAO;AACT;AAEA,SAAS,WAAW,MAAc,QAAwB;AACxD,MAAI,KAAK,WAAW,SAAS,KAAK,KAAK,WAAW,UAAU,EAAG,QAAO;AACtE,MAAI,KAAK,WAAW,IAAI,EAAG,QAAO,SAAS,IAAI;AAC/C,MAAI,KAAK,WAAW,GAAG,EAAG,QAAO,GAAG,MAAM,GAAG,IAAI;AACjD,SAAO,GAAG,MAAM,IAAI,IAAI;AAC1B;;;AjBzEA,SAAS,kBACP,QACA,aACA,MAC0F;AAC1F,MAAI,CAAC,aAAa;AAChB,WAAO,EAAE,MAAM,eAAe,YAAY,QAAQ,WAAW,GAAG;AAAA,EAClE;AAGA,MAAI,UAAU,OAAO,SAAS,KAAK,GAAG;AACpC,WAAO,EAAE,MAAM,eAAe,YAAY,QAAQ,WAAW,GAAG;AAAA,EAClE;AAGA,MAAI,QAAQ;AACV,UAAM,MAAM,OAAO,SAAS,GAAG,IAAI,SAAS,SAAS;AACrD,WAAO,EAAE,MAAM,aAAa,YAAY,QAAW,WAAW,IAAI;AAAA,EACpE;AAGA,SAAO,EAAE,MAAM,aAAa,YAAY,QAAW,WAAW,YAAY,IAAI,IAAI;AACpF;AAEO,IAAM,eAAe,cAAc;AAAA,EACxC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,MAAM,KAAK;AACjB,UAAM,SAAS,KAAK;AACpB,UAAM,cAAc,KAAK;AACzB,UAAM,WAAW,SAAS,KAAK,WAAW,GAAa,EAAE;AACzD,UAAM,OAAQ,KAAK,QAAmB,YAAY,GAAG;AAErD,UAAM,EAAE,MAAM,YAAY,UAAU,IAAI,kBAAkB,QAAQ,aAAa,IAAI;AACnF,UAAM,SAAS,SAAS,iBAAiB,CAAC;AAE1C,QAAI,aAAa;AACf,UAAI,CAAC,OAAQ,CAAAC,SAAQ,MAAM,iBAAiB,GAAG,gBAAgB,QAAQ,MAAM;AAG7E,YAAM,YAAY,MAAM,UAAU,GAAG;AACrC,YAAM,IAAY,cAAK,SAAS;AAChC,YAAM,aAAa,QAAQ,KAAK,CAAC;AACjC,YAAM,WAAW,YAAY,UAAU;AACvC,YAAM,kBAAkB,SAAS,gBAAgB;AAEjD,YAAM,cAAc,MAAM,MAAM,KAAK;AAAA,QACnC;AAAA,QACA;AAAA,QACA,cAAc,SAAS,cAAc,KAAK,QAAQ;AAAA,QAClD,eAAe,CAAC,SAAS,SAAS,UAAU;AAC1C,cAAI,CAAC,OAAQ,CAAAA,SAAQ,KAAK,IAAI,OAAO,IAAI,KAAK,KAAK,OAAO,EAAE;AAAA,QAC9D;AAAA,MACF,CAAC;AAED,YAAM,EAAE,OAAO,gBAAgB,IAAI;AACnC,UAAI,CAAC,OAAQ,CAAAA,SAAQ,QAAQ,WAAW,MAAM,MAAM,QAAQ;AAE5D,UAAI,SAAS,aAAa;AAExB,cAAM,cAAc,MAAM,IAAI,CAAC,SAAS;AACtC,gBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,gBAAM,KAAK,UAAU,OAAO;AAC5B,iBAAO,EAAE,KAAK,KAAK,KAAK,OAAO,UAAU,UAAU,GAAG;AAAA,QACxD,CAAC;AAED,cAAM,gBAAgB,YAAY,CAAC,GAAG,YAAY;AAClD,cAAM,gBAAgB,WAAW,aAAa,WAAW,eAAe;AAExE,cAAM,WAAW,gBAAgB,WAAW,GAAG;AAC/C,cAAM,iBAAiB,oBAAoB,MAAM,KAAK,eAAe,eAAe,QAAQ;AAC5F,4BAAoB,gBAAgB,SAAS;AAG7C,cAAM,UAAUC,SAAQ,UAAU,QAAQ,OAAO,EAAE,CAAC;AACpD,2BAAmB,SAAS;AAAA,UAC1B;AAAA,UACA,MAAM,OAAO;AAAA,UACb,YAAY,eAAe;AAAA,UAC3B,cAAc,SAAS;AAAA,UACvB,aAAa,SAAS;AAAA,UACtB,UAAU,SAAS;AAAA,UACnB,YAAY,cAAc;AAAA,QAC5B,CAAC;AAED,QAAAD,SAAQ,QAAQ,WAAW,MAAM,MAAM,aAAa,SAAS,EAAE;AAAA,MACjE,OAAO;AAEL,cAAM,WAAqB,CAAC;AAC5B,YAAI,aAAa;AACjB,YAAI,gBAAgB;AAEpB,mBAAW,QAAQ,OAAO;AACxB,gBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,cAAI,CAAC,YAAY;AACf,yBAAa;AACb,4BAAgB;AAAA,UAClB;AACA,gBAAM,KAAK,UAAU,OAAO;AAC5B,mBAAS,KAAK,MAAM,KAAK;AAAA;AAAA,UAAe,KAAK,GAAG;AAAA;AAAA,EAAO,EAAE,EAAE;AAAA,QAC7D;AAEA,cAAM,WAAW,SAAS,KAAK,aAAa;AAE5C,cAAM,UAAU,YAAY;AAAA,UAC1B,WAAW;AAAA,UACX,OAAO;AAAA,UACP,UAAU;AAAA,QACZ,CAAC;AAED,YAAI,CAAC,OAAQ,CAAAA,SAAQ,QAAQ,cAAc,UAAU,EAAE;AAAA,MACzD;AAAA,IACF,OAAO;AACL,UAAI,CAAC,OAAQ,CAAAA,SAAQ,MAAM,YAAY,GAAG,KAAK;AAC/C,UAAI,OAAO,MAAM,UAAU,GAAG;AAE9B,YAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,MAAM,GAAG;AAGtD,UAAI,QAAQ,KAAK,EAAE,SAAS,KAAK;AAC/B,YAAI,CAAC,OAAQ,CAAAA,SAAQ,KAAK,8CAA8C;AACxE,YAAI;AACF,iBAAO,MAAM,iBAAiB,GAAG;AACjC,gBAAM,SAAS,QAAQ,MAAM,GAAG;AAChC,gBAAME,YAAW,UAAU,OAAO,OAAO;AACzC,gBAAMA,WAAU,YAAY;AAAA,YAC1B,WAAW;AAAA,YACX,OAAO,OAAO,SAAS;AAAA,YACvB,UAAU,OAAO;AAAA,UACnB,CAAC;AACD,cAAI,CAAC,OAAQ,CAAAF,SAAQ,QAAQ,cAAc,UAAU,EAAE;AACvD;AAAA,QACF,SAAS,KAAU;AACjB,cAAI,KAAK,SAAS,gCAAgC;AAChD,YAAAA,SAAQ;AAAA,cACN;AAAA,YAEF;AAAA,UACF,OAAO;AACL,YAAAA,SAAQ,KAAK,gDAAgD;AAAA,UAC/D;AAAA,QACF;AAAA,MACF;AAEA,UAAI,CAAC,OAAQ,CAAAA,SAAQ,QAAQ,gCAAgC,QAAQ,GAAG;AACxE,YAAM,WAAW,UAAU,OAAO;AAElC,YAAM,UAAU,YAAY;AAAA,QAC1B,WAAW;AAAA,QACX;AAAA,QACA;AAAA,MACF,CAAC;AAED,UAAI,CAAC,OAAQ,CAAAA,SAAQ,QAAQ,cAAc,UAAU,EAAE;AAAA,IACzD;AAAA,EACF;AACF,CAAC;;;AkBhND,SAAS,iBAAAG,sBAAqB;AAC9B,SAAS,QAAAC,aAAY;AACrB,OAAOC,cAAa;;;ACFpB,SAAS,gBAAAC,eAAc,iBAAAC,gBAAe,kBAAkB;AACxD,SAAS,QAAAC,OAAM,WAAAC,gBAAe;AAC9B,OAAO,UAAU;AAGjB,IAAM,kBAAkB;AAMjB,SAAS,WAAW,UAGlB;AACP,QAAM,aAAa,eAAe,YAAY,QAAQ,IAAI,CAAC;AAC3D,MAAI,CAAC,WAAY,QAAO;AAExB,QAAM,MAAMH,cAAa,YAAY,OAAO;AAC5C,QAAM,OAAO,KAAK,KAAK,GAAG;AAE1B,QAAM,SAAwB;AAAA,IAC5B,SAAS,KAAK,WAAW;AAAA,IACzB,WAAW,KAAK,cAAc;AAAA,IAC9B,UAAU,KAAK,WAAW,CAAC,GAAG,IAAI,kBAAkB;AAAA,EACtD;AAEA,SAAO,EAAE,QAAQ,WAAW;AAC9B;AAKO,SAAS,WAAW,QAAuB,YAA0B;AAC1E,QAAM,OAAO;AAAA,IACX,SAAS,OAAO;AAAA,IAChB,YAAY,OAAO;AAAA,IACnB,SAAS,OAAO,QAAQ,IAAI,kBAAkB;AAAA,EAChD;AAEA,QAAM,UAAU,KAAK,KAAK,MAAM,EAAE,WAAW,GAAG,CAAC;AACjD,EAAAC,eAAc,YAAY,SAAS,OAAO;AAC5C;AAKO,SAAS,UAAU,QAAuB,QAA4B;AAC3E,QAAM,MAAM,OAAO,QAAQ,UAAU,CAAC,MAAM,EAAE,SAAS,OAAO,IAAI;AAClE,MAAI,OAAO,GAAG;AACZ,WAAO,QAAQ,GAAG,IAAI;AAAA,EACxB,OAAO;AACL,WAAO,QAAQ,KAAK,MAAM;AAAA,EAC5B;AACF;AAKA,SAAS,eAAe,UAAiC;AACvD,MAAI,MAAM;AACV,SAAO,MAAM;AACX,UAAM,YAAYC,MAAK,KAAK,eAAe;AAC3C,QAAI,WAAW,SAAS,EAAG,QAAO;AAClC,UAAM,SAASC,SAAQ,GAAG;AAC1B,QAAI,WAAW,IAAK,QAAO;AAC3B,UAAM;AAAA,EACR;AACF;AAEA,SAAS,mBAAmB,GAAsC;AAChE,SAAO;AAAA,IACL,MAAM,EAAE,QAAQ;AAAA,IAChB,KAAK,EAAE,OAAO;AAAA,IACd,OAAO,EAAE,SAAS;AAAA,IAClB,UAAU,EAAE,aAAa;AAAA,IACzB,QAAQ,EAAE,UAAU;AAAA,EACtB;AACF;AAEA,SAAS,mBACP,GAC2C;AAC3C,SAAO;AAAA,IACL,MAAM,EAAE;AAAA,IACR,KAAK,EAAE;AAAA,IACP,OAAO,EAAE;AAAA,IACT,WAAW,EAAE;AAAA,IACb,QAAQ,EAAE;AAAA,EACZ;AACF;;;ADnFO,IAAM,aAAaC,eAAc;AAAA,EACtC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,aAAa;AAAA,MACb,UAAU;AAAA,IACZ;AAAA,IACA,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,IACA,OAAO;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,aAAa;AAAA,MACX,MAAM;AAAA,MACN,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,IAAI,EAAE,KAAK,GAAG;AACZ,UAAM,MAAM,KAAK;AACjB,UAAM,cAAc,KAAK;AACzB,UAAM,WAAW,SAAS,KAAK,WAAW,GAAa,EAAE;AAEzD,UAAM,OAAQ,KAAK,QAAmB,YAAY,GAAG;AACrD,UAAM,SACH,KAAK,WAAsB,cAAc,GAAG,IAAI,MAAM,GAAG,IAAI;AAEhE,UAAM,WAAW,WAAW;AAC5B,QAAI;AACJ,QAAI;AAEJ,QAAI,UAAU;AACZ,eAAS,SAAS;AAClB,mBAAa,SAAS;AAAA,IACxB,OAAO;AACL,mBAAaC,MAAK,QAAQ,IAAI,GAAG,eAAe;AAChD,eAAS,EAAE,SAAS,GAAG,WAAW,YAAY,SAAS,CAAC,EAAE;AAAA,IAC5D;AAEA,cAAU,QAAQ,EAAE,MAAM,KAAK,OAAO,aAAa,UAAU,OAAO,CAAC;AACrE,eAAW,QAAQ,UAAU;AAE7B,IAAAC,SAAQ,QAAQ,iBAAiB,IAAI,YAAO,GAAG,EAAE;AACjD,IAAAA,SAAQ,KAAK,WAAW,UAAU,EAAE;AAAA,EACtC;AACF,CAAC;;;AEjED,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,QAAAC,OAAM,WAAAC,gBAAe;AAC9B,SAAS,aAAAC,kBAAiB;AAC1B,YAAYC,cAAa;AACzB,OAAOC,cAAa;AAgBb,IAAM,gBAAgBC,eAAc;AAAA,EACzC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,aAAa;AAAA,IACf;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,SAAS,WAAW;AAC1B,QAAI,CAAC,QAAQ;AACX,MAAAC,SAAQ,MAAM,wDAAwD;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,UAAM,YAAYC,SAAQ,UAAU;AACpC,UAAM,aAAa,KAAK;AAExB,UAAM,UAAU,aACZ,OAAO,QAAQ,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU,IAClD,OAAO;AAEX,QAAI,QAAQ,WAAW,GAAG;AACxB,UAAI,YAAY;AACd,QAAAD,SAAQ,MAAM,WAAW,UAAU,wBAAwB;AAAA,MAC7D,OAAO;AACL,QAAAA,SAAQ,MAAM,wBAAwB;AAAA,MACxC;AACA,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,eAAW,UAAU,SAAS;AAC5B,YAAM,oBAAoB,CAAC,OAAO,OAAO,SAAS,KAAK;AAEvD,MAAAA,SAAQ,MAAM,aAAa,OAAO,IAAI,UAAU,OAAO,GAAG,KAAK;AAE/D,UAAI,OAAO,OAAO;AAEhB,cAAM,YAAY,MAAM,UAAU,OAAO,GAAG;AAC5C,cAAM,IAAY,cAAK,SAAS;AAChC,cAAM,aAAa,QAAQ,OAAO,KAAK,CAAC;AACxC,cAAM,WAAW,YAAY,UAAU;AAEvC,cAAM,cAAc,MAAM,MAAM,OAAO,KAAK;AAAA,UAC1C,UAAU,OAAO;AAAA,UACjB,iBAAiB,SAAS,gBAAgB;AAAA,UAC1C,cAAc,SAAS,cAAc,KAAK,QAAQ;AAAA,UAClD,eAAe,CAAC,KAAK,SAAS,UAAU;AACtC,YAAAA,SAAQ,KAAK,MAAM,OAAO,IAAI,KAAK,KAAK,GAAG,EAAE;AAAA,UAC/C;AAAA,QACF,CAAC;AAED,cAAM,EAAE,OAAO,gBAAgB,IAAI;AAEnC,YAAI,mBAAmB;AAErB,gBAAM,YAAYE,MAAK,WAAW,OAAO,WAAW,OAAO,MAAM;AACjE,gBAAM,cAAc,MAAM,IAAI,CAAC,SAAS;AACtC,kBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,kBAAM,KAAK,UAAU,OAAO;AAC5B,mBAAO,EAAE,KAAK,KAAK,KAAK,OAAO,UAAU,UAAU,GAAG;AAAA,UACxD,CAAC;AAED,gBAAM,gBAAgB,YAAY,CAAC,GAAG,YAAY;AAClD,gBAAM,gBAAgB,WAAW,aAAa,WAAW,eAAe;AAExE,gBAAM,WAAW,gBAAgB,WAAW,OAAO,GAAG;AACtD,gBAAM,iBAAiB;AAAA,YACrB,OAAO;AAAA,YACP,OAAO;AAAA,YACP;AAAA,YACA;AAAA,YACA;AAAA,UACF;AACA,8BAAoB,gBAAgB,SAAS;AAE7C,gBAAM,UAAUA,MAAK,WAAW,OAAO,SAAS;AAChD,6BAAmB,SAAS;AAAA,YAC1B,MAAM,OAAO;AAAA,YACb,MAAM,OAAO;AAAA,YACb,YAAY,eAAe;AAAA,YAC3B,cAAc,SAAS;AAAA,YACvB,aAAa,SAAS;AAAA,YACtB,UAAU,SAAS;AAAA,YACnB,YAAY,cAAc;AAAA,UAC5B,CAAC;AAED,UAAAF,SAAQ,QAAQ,YAAY,OAAO,IAAI,YAAO,SAAS,KAAK,MAAM,MAAM,SAAS;AAAA,QACnF,OAAO;AAEL,gBAAM,aAAaE,MAAK,WAAW,OAAO,WAAW,OAAO,MAAM;AAClE,UAAAC,WAAUF,SAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,gBAAM,WAAqB,CAAC;AAC5B,cAAI,aAAa;AACjB,cAAI,gBAAgB;AAEpB,qBAAW,QAAQ,OAAO;AACxB,kBAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,KAAK,MAAM,KAAK,GAAG;AAChE,gBAAI,CAAC,YAAY;AACf,2BAAa;AACb,8BAAgB;AAAA,YAClB;AACA,kBAAM,KAAK,UAAU,OAAO;AAC5B,qBAAS,KAAK,MAAM,KAAK;AAAA;AAAA,UAAe,KAAK,GAAG;AAAA;AAAA,EAAO,EAAE,EAAE;AAAA,UAC7D;AAEA,gBAAM,WAAW,SAAS,KAAK,aAAa;AAC5C,gBAAM,UAAU,YAAY;AAAA,YAC1B,WAAW,OAAO;AAAA,YAClB,OAAO;AAAA,YACP,UAAU;AAAA,UACZ,CAAC;AAED,UAAAD,SAAQ,QAAQ,YAAY,OAAO,IAAI,YAAO,UAAU,EAAE;AAAA,QAC5D;AAAA,MACF,OAAO;AACL,cAAM,aAAaE,MAAK,WAAW,OAAO,WAAW,OAAO,MAAM;AAClE,QAAAC,WAAUF,SAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,cAAM,OAAO,MAAM,UAAU,OAAO,GAAG;AACvC,cAAM,EAAE,SAAS,OAAO,SAAS,IAAI,QAAQ,MAAM,OAAO,GAAG;AAC7D,cAAM,WAAW,UAAU,OAAO;AAClC,cAAM,UAAU,YAAY;AAAA,UAC1B,WAAW,OAAO;AAAA,UAClB;AAAA,UACA;AAAA,QACF,CAAC;AAED,QAAAD,SAAQ,QAAQ,YAAY,OAAO,IAAI,YAAO,UAAU,EAAE;AAAA,MAC5D;AAAA,IACF;AAAA,EACF;AACF,CAAC;;;AC7JD,SAAS,iBAAAI,sBAAqB;AAC9B,OAAOC,cAAa;AAGb,IAAM,cAAcC,eAAc;AAAA,EACvC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AACJ,UAAM,SAAS,WAAW;AAC1B,QAAI,CAAC,QAAQ;AACX,MAAAC,SAAQ,KAAK,iEAAiE;AAC9E;AAAA,IACF;AAEA,UAAM,EAAE,QAAQ,WAAW,IAAI;AAC/B,IAAAA,SAAQ,KAAK,WAAW,UAAU,EAAE;AACpC,IAAAA,SAAQ,KAAK,eAAe,OAAO,SAAS;AAAA,CAAI;AAEhD,QAAI,OAAO,QAAQ,WAAW,GAAG;AAC/B,MAAAA,SAAQ,KAAK,wBAAwB;AACrC;AAAA,IACF;AAEA,eAAW,UAAU,OAAO,SAAS;AACnC,YAAM,YAAY,OAAO,QACrB,mBAAmB,OAAO,QAAQ,MAClC;AACJ,cAAQ,IAAI,KAAK,OAAO,IAAI,GAAG,SAAS,EAAE;AAC1C,cAAQ,IAAI,eAAe,OAAO,GAAG,EAAE;AACvC,cAAQ,IAAI,eAAe,OAAO,MAAM,EAAE;AAC1C,cAAQ,IAAI;AAAA,IACd;AAAA,EACF;AACF,CAAC;;;ACnCD,SAAS,iBAAAC,sBAAqB;AAC9B,SAAS,WAAAC,gBAAe;AAEjB,IAAM,eAAeD,eAAc;AAAA,EACxC,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,aAAa;AAAA,EACf;AAAA,EACA,MAAM;AAAA,IACJ,KAAK;AAAA,MACH,MAAM;AAAA,MACN,OAAO;AAAA,MACP,aAAa;AAAA,MACb,SAAS;AAAA,IACX;AAAA,EACF;AAAA,EACA,MAAM,IAAI,EAAE,KAAK,GAAG;AAClB,UAAM,UAAUC,SAAQ,QAAQ,IAAI,GAAG,KAAK,GAAa;AACzD,UAAM,EAAE,iBAAAC,iBAAgB,IAAI,MAAM;AAClC,UAAM,EAAE,qBAAqB,IAAI,MAAM,OACrC,2CACF;AACA,UAAM,SAASA,iBAAgB,OAAO;AACtC,UAAM,YAAY,IAAI,qBAAqB;AAC3C,UAAM,OAAO,QAAQ,SAAS;AAAA,EAChC;AACF,CAAC;;;AvBlBD,QAAQ,GAAG,qBAAqB,CAAC,QAAa;AAC5C,MAAI,IAAI,SAAS,gCAAgC;AAC/C,IAAAC,SAAQ,MAAM,IAAI,OAAO;AAAA,EAC3B,OAAO;AACL,IAAAA,SAAQ,MAAM,IAAI,WAAW,GAAG;AAAA,EAClC;AACA,UAAQ,KAAK,CAAC;AAChB,CAAC;AAED,QAAQ,GAAG,sBAAsB,CAAC,QAAa;AAC7C,MAAI,KAAK,SAAS,gCAAgC;AAChD,IAAAA,SAAQ,MAAM,IAAI,OAAO;AAAA,EAC3B,OAAO;AACL,IAAAA,SAAQ,MAAM,KAAK,WAAW,GAAG;AAAA,EACnC;AACA,UAAQ,KAAK,CAAC;AAChB,CAAC;AAED,IAAM,cAAmC;AAAA,EACvC,KAAK;AAAA,EACL,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,OAAO;AACT;AAGA,IAAM,WAAW,QAAQ,KAAK,CAAC;AAC/B,IAAM,eAAe,YAAY,YAAY;AAE7C,IAAI,cAAc;AAEhB,QAAM,OAAOC,eAAc;AAAA,IACzB,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,IACf;AAAA,IACA;AAAA,EACF,CAAC;AACD,UAAQ,IAAI;AACd,WAAW,YAAY,CAAC,SAAS,WAAW,GAAG,KAAK,aAAa,UAAU;AAEzE,aAAW,cAAc,EAAE,SAAS,QAAQ,KAAK,MAAM,CAAC,EAAE,CAAC;AAC7D,OAAO;AAEL,QAAM,OAAOA,eAAc;AAAA,IACzB,MAAM;AAAA,MACJ,MAAM;AAAA,MACN,SAAS;AAAA,MACT,aAAa;AAAA,IACf;AAAA,IACA;AAAA,IACA,MAAM;AACJ,cAAQ,IAAI,+CAA+C;AAC3D,cAAQ,IAAI,kDAAkD;AAC9D,cAAQ,IAAI,qCAAqC;AACjD,cAAQ,IAAI,qBAAqB;AACjC,cAAQ,IAAI,+BAA+B;AAC3C,cAAQ,IAAI,wCAAwC;AAAA,IACtD;AAAA,EACF,CAAC;AACD,UAAQ,IAAI;AACd;","names":["readFileSync","join","matter","defineCommand","consola","dirname","consola","cheerio","cheerio","cheerio","resolve","writeFileSync","mkdirSync","join","cheerio","consola","dirname","markdown","defineCommand","join","consola","readFileSync","writeFileSync","join","dirname","defineCommand","join","consola","defineCommand","join","dirname","mkdirSync","cheerio","consola","defineCommand","consola","dirname","join","mkdirSync","defineCommand","consola","defineCommand","consola","defineCommand","resolve","createMcpServer","consola","defineCommand"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "docs2ai",
3
- "version": "0.1.4",
3
+ "version": "0.1.7",
4
4
  "description": "Convert online documentation into AI-ready Markdown context files",
5
5
  "type": "module",
6
6
  "bin": {
@@ -66,6 +66,7 @@
66
66
  "@types/node": "^25.2.2",
67
67
  "@types/turndown": "^5.0.6",
68
68
  "eslint": "^10.0.0",
69
+ "playwright": "^1.58.2",
69
70
  "prettier": "^3.8.1",
70
71
  "tsup": "^8.5.1",
71
72
  "tsx": "^4.21.0",