@lexbuild/fr 1.15.2 → 1.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -332,6 +332,70 @@ interface FrConvertResult {
332
332
  */
333
333
  declare function convertFrDocuments(options: FrConvertOptions): Promise<FrConvertResult>;
334
334
 
335
+ /**
336
+ * Federal Register frontmatter enricher.
337
+ *
338
+ * Fetches rich JSON metadata from the FederalRegister.gov API listing endpoint
339
+ * and patches frontmatter in existing converted Markdown files. This is used to
340
+ * backfill metadata (agencies, CFR references, docket IDs, citations, etc.) into
341
+ * files originally converted from govinfo bulk XML, which lacks this data.
342
+ *
343
+ * The enricher does NOT re-parse XML or re-render Markdown — it only updates the
344
+ * YAML frontmatter block while preserving the body content exactly as-is.
345
+ */
346
+ /** Options for enriching FR documents */
347
+ interface EnrichFrOptions {
348
+ /** Output root directory where .md files live (e.g., "./output") */
349
+ output: string;
350
+ /** Start date (YYYY-MM-DD, inclusive) */
351
+ from: string;
352
+ /** End date (YYYY-MM-DD, inclusive). Defaults to today. */
353
+ to?: string | undefined;
354
+ /** Overwrite files that are already enriched (have fr_citation) */
355
+ force?: boolean | undefined;
356
+ /** Progress callback */
357
+ onProgress?: ((progress: EnrichFrProgress) => void) | undefined;
358
+ }
359
+ /** Progress info for enrichment callback */
360
+ interface EnrichFrProgress {
361
+ /** Documents whose frontmatter was updated */
362
+ enriched: number;
363
+ /** Documents skipped (already enriched or no frontmatter) */
364
+ skipped: number;
365
+ /** Documents in API but no .md file found locally */
366
+ notFound: number;
367
+ /** Total documents seen in API responses */
368
+ total: number;
369
+ /** Current month chunk being processed (YYYY-MM) */
370
+ currentChunk: string;
371
+ /** Current document number */
372
+ currentDocument: string;
373
+ }
374
+ /** Result of an enrichment operation */
375
+ interface EnrichFrResult {
376
+ /** Documents whose frontmatter was updated */
377
+ enriched: number;
378
+ /** Documents skipped (already enriched or unparseable) */
379
+ skipped: number;
380
+ /** Documents in API but no .md file found locally */
381
+ notFound: number;
382
+ /** Total documents seen in API responses */
383
+ total: number;
384
+ /** Date range covered */
385
+ dateRange: {
386
+ from: string;
387
+ to: string;
388
+ };
389
+ }
390
+ /**
391
+ * Enrich existing FR Markdown files with metadata from the FederalRegister.gov API.
392
+ *
393
+ * Paginates through the API listing endpoint (200 docs/page), matches each document
394
+ * to its .md file by document number + publication date, and patches the YAML
395
+ * frontmatter with enriched fields (citation, agencies, CFR references, etc.).
396
+ */
397
+ declare function enrichFrDocuments(options: EnrichFrOptions): Promise<EnrichFrResult>;
398
+
335
399
  /**
336
400
  * Federal Register API downloader.
337
401
  *
@@ -427,6 +491,16 @@ declare function downloadFrDocuments(options: FrDownloadOptions): Promise<FrDown
427
491
  * Fetches both the JSON metadata and XML full text.
428
492
  */
429
493
  declare function downloadSingleFrDocument(documentNumber: string, output: string): Promise<FrDownloadedFile>;
494
+ /**
495
+ * Break a date range into month-sized chunks.
496
+ * Each chunk covers one calendar month (or partial month at boundaries).
497
+ */
498
+ declare function buildMonthChunks(from: string, to: string): Array<{
499
+ from: string;
500
+ to: string;
501
+ }>;
502
+ /** Fetch with retry on transient HTTP and network errors */
503
+ declare function fetchWithRetry(url: string, attempt?: number): Promise<Response>;
430
504
 
431
505
  /**
432
506
  * Federal Register govinfo bulk downloader.
@@ -509,4 +583,4 @@ declare function buildGovinfoBulkPath(date: string, outputDir: string): string;
509
583
  */
510
584
  declare function downloadFrBulk(options: FrGovinfoBulkOptions): Promise<FrGovinfoResult>;
511
585
 
512
- export { FR_BLOCK_ELEMENTS, FR_CONTENT_ELEMENTS, FR_DOCUMENT_ELEMENTS, FR_DOCUMENT_TYPE_KEYS, FR_DOCUMENT_TYPE_MAP, FR_EMPHASIS_MAP, FR_HD_SOURCE_TO_DEPTH, FR_HEADING_ELEMENT, FR_IGNORE_ELEMENTS, FR_INLINE_ELEMENTS, FR_NOTE_ELEMENTS, FR_PASSTHROUGH_ELEMENTS, FR_PREAMBLE_META_ELEMENTS, FR_PREAMBLE_SECTIONS, FR_PRESIDENTIAL_SUBTYPES, FR_REGTEXT_ELEMENTS, FR_SECTION_CONTAINERS, FR_SIGNATURE_ELEMENTS, FR_SKIP_ELEMENTS, FR_TABLE_ELEMENTS, FrASTBuilder, type FrASTBuilderOptions, type FrConvertOptions, type FrConvertProgress, type FrConvertResult, type FrDocumentJsonMeta, type FrDocumentType, type FrDocumentXmlMeta, type FrDownloadFailure, type FrDownloadOptions, type FrDownloadProgress, type FrDownloadResult, type FrDownloadedFile, type FrGovinfoBulkOptions, type FrGovinfoDownloadedFile, type FrGovinfoProgress, type FrGovinfoResult, buildFrApiListUrl, buildFrDownloadJsonPath, buildFrDownloadXmlPath, buildFrFrontmatter, buildFrOutputPath, buildGovinfoBulkPath, buildGovinfoFrUrl, buildMonthDir, buildYearDir, convertFrDocuments, downloadFrBulk, downloadFrDocuments, downloadSingleFrDocument };
586
+ export { type EnrichFrOptions, type EnrichFrProgress, type EnrichFrResult, FR_BLOCK_ELEMENTS, FR_CONTENT_ELEMENTS, FR_DOCUMENT_ELEMENTS, FR_DOCUMENT_TYPE_KEYS, FR_DOCUMENT_TYPE_MAP, FR_EMPHASIS_MAP, FR_HD_SOURCE_TO_DEPTH, FR_HEADING_ELEMENT, FR_IGNORE_ELEMENTS, FR_INLINE_ELEMENTS, FR_NOTE_ELEMENTS, FR_PASSTHROUGH_ELEMENTS, FR_PREAMBLE_META_ELEMENTS, FR_PREAMBLE_SECTIONS, FR_PRESIDENTIAL_SUBTYPES, FR_REGTEXT_ELEMENTS, FR_SECTION_CONTAINERS, FR_SIGNATURE_ELEMENTS, FR_SKIP_ELEMENTS, FR_TABLE_ELEMENTS, FrASTBuilder, type FrASTBuilderOptions, type FrConvertOptions, type FrConvertProgress, type FrConvertResult, type FrDocumentJsonMeta, type FrDocumentType, type FrDocumentXmlMeta, type FrDownloadFailure, type FrDownloadOptions, type FrDownloadProgress, type FrDownloadResult, type FrDownloadedFile, type FrGovinfoBulkOptions, type FrGovinfoDownloadedFile, type FrGovinfoProgress, type FrGovinfoResult, buildFrApiListUrl, buildFrDownloadJsonPath, buildFrDownloadXmlPath, buildFrFrontmatter, buildFrOutputPath, buildGovinfoBulkPath, buildGovinfoFrUrl, buildMonthChunks, buildMonthDir, buildYearDir, convertFrDocuments, downloadFrBulk, downloadFrDocuments, downloadSingleFrDocument, enrichFrDocuments, fetchWithRetry };
package/dist/index.js CHANGED
@@ -1,12 +1,7 @@
1
1
  // src/fr-elements.ts
2
2
  var FR_DOCUMENT_TYPE_KEYS = ["RULE", "PRORULE", "NOTICE", "PRESDOCU"];
3
3
  var FR_DOCUMENT_ELEMENTS = new Set(FR_DOCUMENT_TYPE_KEYS);
4
- var FR_SECTION_CONTAINERS = /* @__PURE__ */ new Set([
5
- "RULES",
6
- "PRORULES",
7
- "NOTICES",
8
- "PRESDOCS"
9
- ]);
4
+ var FR_SECTION_CONTAINERS = /* @__PURE__ */ new Set(["RULES", "PRORULES", "NOTICES", "PRESDOCS"]);
10
5
  var FR_DOCUMENT_TYPE_MAP = {
11
6
  RULE: "rule",
12
7
  PRORULE: "proposed_rule",
@@ -1074,13 +1069,7 @@ function parseDateComponents(date) {
1074
1069
  import { createReadStream, existsSync } from "fs";
1075
1070
  import { readFile, readdir, stat } from "fs/promises";
1076
1071
  import { join as join2, dirname } from "path";
1077
- import {
1078
- XMLParser,
1079
- renderDocument,
1080
- createLinkResolver,
1081
- writeFile,
1082
- mkdir
1083
- } from "@lexbuild/core";
1072
+ import { XMLParser, renderDocument, createLinkResolver, writeFile, mkdir } from "@lexbuild/core";
1084
1073
  var FR_DOC_TYPE_SET = new Set(FR_DOCUMENT_TYPE_KEYS);
1085
1074
  async function convertFrDocuments(options) {
1086
1075
  const xmlFiles = await discoverXmlFiles(options.input, options.from, options.to);
@@ -1109,11 +1098,7 @@ async function convertFrDocuments(options) {
1109
1098
  documentsConverted++;
1110
1099
  continue;
1111
1100
  }
1112
- const outputPath = buildFrOutputPath(
1113
- doc.documentNumber,
1114
- doc.publicationDate,
1115
- options.output
1116
- );
1101
+ const outputPath = buildFrOutputPath(doc.documentNumber, doc.publicationDate, options.output);
1117
1102
  const frontmatter = buildFrFrontmatter(doc.node, doc.context, doc.xmlMeta, doc.jsonMeta);
1118
1103
  const markdown = renderDocument(doc.node, frontmatter, {
1119
1104
  headingOffset: 0,
@@ -1253,6 +1238,11 @@ function inferDateFromPath(filePath) {
1253
1238
  return "";
1254
1239
  }
1255
1240
 
1241
+ // src/enricher.ts
1242
+ import { readFile as readFile2, writeFile as writeFile2 } from "fs/promises";
1243
+ import { existsSync as existsSync2 } from "fs";
1244
+ import { parse, stringify } from "yaml";
1245
+
1256
1246
  // src/downloader.ts
1257
1247
  import { createWriteStream } from "fs";
1258
1248
  import { mkdir as mkdir2, stat as stat2, writeFile as fsWriteFile } from "fs/promises";
@@ -1440,14 +1430,10 @@ function buildMonthChunks(from, to) {
1440
1430
  const end = /* @__PURE__ */ new Date(to + "T00:00:00Z");
1441
1431
  while (current <= end) {
1442
1432
  const chunkStart = current.toISOString().slice(0, 10);
1443
- const monthEnd = new Date(
1444
- Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 0)
1445
- );
1433
+ const monthEnd = new Date(Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 0));
1446
1434
  const chunkEnd = monthEnd <= end ? monthEnd.toISOString().slice(0, 10) : to;
1447
1435
  chunks.push({ from: chunkStart, to: chunkEnd });
1448
- current = new Date(
1449
- Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 1)
1450
- );
1436
+ current = new Date(Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 1));
1451
1437
  }
1452
1438
  return chunks;
1453
1439
  }
@@ -1486,6 +1472,153 @@ function sleep(ms) {
1486
1472
  return new Promise((resolve) => setTimeout(resolve, ms));
1487
1473
  }
1488
1474
 
1475
+ // src/enricher.ts
1476
+ async function enrichFrDocuments(options) {
1477
+ const to = options.to ?? (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
1478
+ const force = options.force ?? false;
1479
+ let enriched = 0;
1480
+ let skipped = 0;
1481
+ let notFound = 0;
1482
+ let total = 0;
1483
+ const chunks = buildMonthChunks(options.from, to);
1484
+ for (const chunk of chunks) {
1485
+ let page = 1;
1486
+ let hasMore = true;
1487
+ const chunkLabel = chunk.from.slice(0, 7);
1488
+ while (hasMore) {
1489
+ const listUrl = buildFrApiListUrl(chunk.from, chunk.to, page);
1490
+ const response = await fetchWithRetry(listUrl);
1491
+ const data = await response.json();
1492
+ if (typeof data.count !== "number") {
1493
+ throw new Error(
1494
+ `Unexpected API response for ${listUrl}: missing or invalid 'count' field.`
1495
+ );
1496
+ }
1497
+ if (page === 1) {
1498
+ total += data.count;
1499
+ }
1500
+ const results = data.results ?? [];
1501
+ for (const doc of results) {
1502
+ if (!doc.document_number || !doc.publication_date) continue;
1503
+ const mdPath = buildFrOutputPath(
1504
+ doc.document_number,
1505
+ doc.publication_date,
1506
+ options.output
1507
+ );
1508
+ if (!existsSync2(mdPath)) {
1509
+ notFound++;
1510
+ options.onProgress?.({
1511
+ enriched,
1512
+ skipped,
1513
+ notFound,
1514
+ total,
1515
+ currentChunk: chunkLabel,
1516
+ currentDocument: doc.document_number
1517
+ });
1518
+ continue;
1519
+ }
1520
+ const content = await readFile2(mdPath, "utf-8");
1521
+ const fmEnd = content.indexOf("\n---\n", 4);
1522
+ if (!content.startsWith("---\n") || fmEnd === -1) {
1523
+ skipped++;
1524
+ continue;
1525
+ }
1526
+ const yamlStr = content.slice(4, fmEnd);
1527
+ const body = content.slice(fmEnd + 5);
1528
+ const fm = parse(yamlStr);
1529
+ if (!force && fm["fr_citation"]) {
1530
+ skipped++;
1531
+ options.onProgress?.({
1532
+ enriched,
1533
+ skipped,
1534
+ notFound,
1535
+ total,
1536
+ currentChunk: chunkLabel,
1537
+ currentDocument: doc.document_number
1538
+ });
1539
+ continue;
1540
+ }
1541
+ applyEnrichment(fm, doc);
1542
+ const newYaml = stringify(fm, {
1543
+ lineWidth: 0,
1544
+ defaultStringType: "QUOTE_DOUBLE",
1545
+ defaultKeyType: "PLAIN"
1546
+ });
1547
+ const newContent = `---
1548
+ ${newYaml}---
1549
+ ${body}`;
1550
+ await writeFile2(mdPath, newContent, "utf-8");
1551
+ enriched++;
1552
+ options.onProgress?.({
1553
+ enriched,
1554
+ skipped,
1555
+ notFound,
1556
+ total,
1557
+ currentChunk: chunkLabel,
1558
+ currentDocument: doc.document_number
1559
+ });
1560
+ }
1561
+ hasMore = page < (data.total_pages ?? 0);
1562
+ page++;
1563
+ }
1564
+ }
1565
+ return { enriched, skipped, notFound, total, dateRange: { from: options.from, to } };
1566
+ }
1567
+ function normalizeDocumentType2(apiType) {
1568
+ const map = {
1569
+ Rule: "rule",
1570
+ "Proposed Rule": "proposed_rule",
1571
+ Notice: "notice",
1572
+ "Presidential Document": "presidential_document"
1573
+ };
1574
+ return map[apiType] ?? apiType.toLowerCase().replace(/\s+/g, "_");
1575
+ }
1576
+ function applyEnrichment(fm, doc) {
1577
+ if (doc.type) {
1578
+ fm["document_type"] = normalizeDocumentType2(doc.type);
1579
+ }
1580
+ if (doc.citation) {
1581
+ fm["fr_citation"] = doc.citation;
1582
+ }
1583
+ if (doc.volume) {
1584
+ fm["fr_volume"] = doc.volume;
1585
+ }
1586
+ if (doc.publication_date) {
1587
+ fm["publication_date"] = doc.publication_date;
1588
+ fm["currency"] = doc.publication_date;
1589
+ fm["last_updated"] = doc.publication_date;
1590
+ }
1591
+ if (doc.agencies && doc.agencies.length > 0) {
1592
+ const [primary] = doc.agencies;
1593
+ if (primary) fm["agency"] = primary.name;
1594
+ fm["agencies"] = doc.agencies.map((a) => a.name);
1595
+ }
1596
+ if (doc.cfr_references && doc.cfr_references.length > 0) {
1597
+ fm["cfr_references"] = doc.cfr_references.map(
1598
+ (r) => `${r.title} CFR Part ${r.part}`
1599
+ );
1600
+ }
1601
+ if (doc.docket_ids && doc.docket_ids.length > 0) {
1602
+ fm["docket_ids"] = doc.docket_ids;
1603
+ }
1604
+ if (doc.regulation_id_numbers && doc.regulation_id_numbers.length > 0) {
1605
+ fm["rin"] = doc.regulation_id_numbers[0];
1606
+ }
1607
+ if (doc.effective_on) {
1608
+ fm["effective_date"] = doc.effective_on;
1609
+ }
1610
+ if (doc.comments_close_on) {
1611
+ fm["comments_close_date"] = doc.comments_close_on;
1612
+ }
1613
+ if (doc.action) {
1614
+ fm["fr_action"] = doc.action;
1615
+ }
1616
+ if (doc.title) {
1617
+ fm["title"] = doc.title;
1618
+ fm["section_name"] = doc.title;
1619
+ }
1620
+ }
1621
+
1489
1622
  // src/govinfo-downloader.ts
1490
1623
  import { createWriteStream as createWriteStream2 } from "fs";
1491
1624
  import { mkdir as mkdir3, stat as stat3 } from "fs/promises";
@@ -1535,7 +1668,9 @@ async function downloadFrBulk(options) {
1535
1668
  skipped++;
1536
1669
  }
1537
1670
  } catch (err) {
1538
- console.warn(`Warning: Failed to download ${date}: ${err instanceof Error ? err.message : String(err)}`);
1671
+ console.warn(
1672
+ `Warning: Failed to download ${date}: ${err instanceof Error ? err.message : String(err)}`
1673
+ );
1539
1674
  failed++;
1540
1675
  }
1541
1676
  }
@@ -1646,11 +1781,14 @@ export {
1646
1781
  buildFrOutputPath,
1647
1782
  buildGovinfoBulkPath,
1648
1783
  buildGovinfoFrUrl,
1784
+ buildMonthChunks,
1649
1785
  buildMonthDir,
1650
1786
  buildYearDir,
1651
1787
  convertFrDocuments,
1652
1788
  downloadFrBulk,
1653
1789
  downloadFrDocuments,
1654
- downloadSingleFrDocument
1790
+ downloadSingleFrDocument,
1791
+ enrichFrDocuments,
1792
+ fetchWithRetry
1655
1793
  };
1656
1794
  //# sourceMappingURL=index.js.map
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/fr-elements.ts","../src/fr-builder.ts","../src/fr-frontmatter.ts","../src/fr-path.ts","../src/converter.ts","../src/downloader.ts","../src/govinfo-downloader.ts"],"sourcesContent":["/**\n * Federal Register XML element classification.\n *\n * The FR XML is GPO/SGML-derived with no namespace. It shares many\n * inline formatting elements with eCFR (E T=\"nn\", SU, FTNT) but uses\n * a flat document-centric structure rather than a hierarchical DIV system.\n *\n * Each FR document (RULE, PRORULE, NOTICE, PRESDOCU) contains a preamble\n * (PREAMB) with structured metadata, supplementary information (SUPLINF)\n * with the document body, and optional regulatory text (REGTEXT).\n */\n\nimport type { InlineType } from \"@lexbuild/core\";\n\n// ── Document type elements ──\n\n/** FR document type element names as a const tuple — single source of truth */\nexport const FR_DOCUMENT_TYPE_KEYS = [\"RULE\", \"PRORULE\", \"NOTICE\", \"PRESDOCU\"] as const;\n\n/** FR document types supported by the API and XML */\nexport type FrDocumentType = (typeof FR_DOCUMENT_TYPE_KEYS)[number];\n\n/** Top-level document elements — each becomes an emitted section-level node */\nexport const FR_DOCUMENT_ELEMENTS = new Set<string>(FR_DOCUMENT_TYPE_KEYS);\n\n/** Container elements that group documents within daily issues */\nexport const FR_SECTION_CONTAINERS = new Set([\n \"RULES\",\n \"PRORULES\",\n \"NOTICES\",\n \"PRESDOCS\",\n]);\n\n/** Map from document element name to normalized document type string */\nexport const FR_DOCUMENT_TYPE_MAP: Readonly<Record<string, string>> = {\n RULE: \"rule\",\n PRORULE: \"proposed_rule\",\n NOTICE: \"notice\",\n PRESDOCU: \"presidential_document\",\n};\n\n// ── Preamble elements ──\n\n/** Preamble section elements containing structured content */\nexport const FR_PREAMBLE_SECTIONS = new Set([\n \"AGY\", // Agency section (HD + P)\n \"ACT\", // Action section (HD + P)\n \"SUM\", // Summary section (HD + P)\n \"DATES\", // Dates section (HD + P)\n \"EFFDATE\", // Effective date section (HD + P)\n \"ADD\", // Addresses section (HD + P)\n \"FURINF\", // Further information section (HD + P)\n]);\n\n/** Preamble metadata elements — text extracted for frontmatter */\nexport const FR_PREAMBLE_META_ELEMENTS = new Set([\n \"AGENCY\", // Issuing agency name (attrs: TYPE)\n \"SUBAGY\", // Sub-agency name\n \"CFR\", // CFR citation affected (e.g., \"10 CFR Part 2\")\n \"SUBJECT\", // Document title/subject\n \"DEPDOC\", // Department document number\n \"RIN\", // Regulation Identifier Number\n]);\n\n// ── Content elements ──\n\n/** Elements that contain paragraph text */\nexport const FR_CONTENT_ELEMENTS = new Set([\n \"P\", // Paragraph\n \"FP\", // Flush paragraph (attrs: SOURCE for indent level)\n]);\n\n/** Heading element — level determined by SOURCE attribute */\nexport const FR_HEADING_ELEMENT = \"HD\";\n\n/**\n * Map from HD SOURCE attribute to heading depth.\n * HED = top-level (section-like), HD1 = subsection, etc.\n */\nexport const FR_HD_SOURCE_TO_DEPTH: Readonly<Record<string, number>> = {\n HED: 1,\n HD1: 2,\n HD2: 3,\n HD3: 4,\n HD4: 5,\n HD5: 6,\n HD6: 6,\n HD8: 6,\n};\n\n// ── Inline formatting ──\n\n/** Inline formatting elements */\nexport const FR_INLINE_ELEMENTS = new Set([\n \"I\", // Italic\n \"B\", // Bold\n \"E\", // Emphasis (type varies by T attribute)\n \"SU\", // Superscript / footnote marker\n \"FR\", // Fraction\n \"AC\", // Accent/diacritical\n]);\n\n/**\n * Map from E element T attribute to InlineType.\n * Duplicated from eCFR — source packages must not import each other.\n */\nexport const FR_EMPHASIS_MAP: Readonly<Record<string, InlineType>> = {\n \"01\": \"bold\",\n \"02\": \"italic\",\n \"03\": \"italic\", // bold italic in print — FR uses T=\"03\" for case names, citations, and publication titles which render as italic\n \"04\": \"italic\", // italic in headings\n \"05\": \"italic\", // small caps — render as italic\n \"51\": \"sub\", // subscript\n \"52\": \"sub\", // subscript\n \"54\": \"sub\", // subscript (math)\n \"7462\": \"italic\", // special terms (et seq., De minimis)\n};\n\n// ── Regulatory text elements ──\n\n/** Regulatory text amendment elements (within SUPLINF) */\nexport const FR_REGTEXT_ELEMENTS = new Set([\n \"REGTEXT\", // Regulatory text container (attrs: TITLE, PART)\n \"AMDPAR\", // Amendment instruction paragraph\n \"SECTION\", // Section container\n \"SECTNO\", // Section number designation\n \"PART\", // Part container within REGTEXT\n \"AUTH\", // Authority citation in REGTEXT\n]);\n\n/** LSTSUB — List of subjects (CFR parts affected) */\nexport const FR_LSTSUB_ELEMENT = \"LSTSUB\";\n\n// ── Signature block ──\n\n/** Signature block elements */\nexport const FR_SIGNATURE_ELEMENTS = new Set([\n \"SIG\", // Signature block container\n \"NAME\", // Signer name\n \"TITLE\", // Signer title\n \"DATED\", // Date of signature\n]);\n\n// ── Presidential document subtypes ──\n\n/** Presidential document subtype containers */\nexport const FR_PRESIDENTIAL_SUBTYPES = new Set([\n \"EXECORD\", // Executive Order\n \"PRMEMO\", // Presidential Memorandum\n \"PROCLA\", // Proclamation\n \"DETERM\", // Presidential Determination\n \"PRNOTICE\", // Presidential Notice\n \"PRORDER\", // Presidential Order\n]);\n\n/** Presidential document metadata elements */\nexport const FR_PRESIDENTIAL_META_ELEMENTS = new Set([\n \"PSIG\", // Presidential signature (initials)\n \"PLACE\", // Place of issuance\n \"TITLE3\", // CFR Title 3 marker\n \"PRES\", // President name\n]);\n\n// ── Note elements ──\n\n/** Footnote and editorial note elements */\nexport const FR_NOTE_ELEMENTS = new Set([\n \"FTNT\", // Footnote\n \"EDNOTE\", // Editorial note\n \"OLNOTE1\", // Overlay note\n]);\n\n/** Footnote reference marker */\nexport const FR_FTREF_ELEMENT = \"FTREF\";\n\n// ── Block elements ──\n\n/** Block-level content wrappers */\nexport const FR_BLOCK_ELEMENTS = new Set([\n \"EXTRACT\", // Extracted/quoted text\n \"EXAMPLE\", // Illustrative example\n]);\n\n// ── Table elements (GPOTABLE format) ──\n\n/** GPOTABLE elements */\nexport const FR_TABLE_ELEMENTS = new Set([\n \"GPOTABLE\", // Table root\n \"TTITLE\", // Table title\n \"BOXHD\", // Header box container\n \"CHED\", // Column header entry (attrs: H for level)\n \"ROW\", // Data row (attrs: RUL for horizontal rules)\n \"ENT\", // Cell entry (attrs: I for indent, A for alignment)\n]);\n\n// ── Elements to ignore (skip entire subtree) ──\n\n/** Elements whose entire subtree should be skipped */\nexport const FR_IGNORE_ELEMENTS = new Set([\n \"CNTNTS\", // Table of contents in daily issue\n \"GPH\", // Graphics (not available in XML)\n \"GID\", // Graphics ID\n]);\n\n// ── Elements to skip (self only, no subtree) ──\n\n/** Self-contained elements to skip — metadata extracted elsewhere or irrelevant */\nexport const FR_SKIP_ELEMENTS = new Set([\n \"PRTPAGE\", // Page number reference (attrs: P for page)\n \"STARS\", // Visual separator (****)\n \"FILED\", // Filing info\n \"UNITNAME\", // Section name in daily issue\n \"VOL\", // Volume number (daily issue metadata)\n \"NO\", // Issue number (daily issue metadata)\n \"DATE\", // Date (daily issue level — document dates from preamble)\n \"NEWPART\", // New part container in daily issue\n \"PTITLE\", // Part title in daily issue\n \"PARTNO\", // Part number in daily issue\n \"PNOTICE\", // Part notice text\n]);\n\n// ── Passthrough elements ──\n\n/** Transparent wrappers — pass through without creating frames */\nexport const FR_PASSTHROUGH_ELEMENTS = new Set([\n \"FEDREG\", // Daily issue root element\n \"PREAMB\", // Preamble — children are handled individually\n \"SUPLINF\", // Supplementary information — children are handled individually\n]);\n\n// ── Metadata extraction elements ──\n\n/** FRDOC — Federal Register document citation, e.g., \"[FR Doc. 2026-06029 ...]\" */\nexport const FR_FRDOC_ELEMENT = \"FRDOC\";\n\n/** BILCOD — Billing code (skip) */\nexport const FR_BILCOD_ELEMENT = \"BILCOD\";\n","/**\n * Federal Register AST Builder — converts SAX events from FR XML into AST nodes.\n *\n * Follows the stack-based pattern from the eCFR builder but adapted for FR's\n * flat, document-centric structure. Each FR document (RULE, NOTICE, PRORULE,\n * PRESDOCU) becomes a single section-level LevelNode emitted via onEmit.\n *\n * FR XML is GPO/SGML-derived with no namespace. It shares inline formatting\n * (E T=\"nn\", SU, FTNT) with eCFR but uses a different document structure:\n * preamble (PREAMB) → supplementary info (SUPLINF) → signature (SIG).\n */\n\nimport type { Attributes } from \"@lexbuild/core\";\nimport type {\n LevelNode,\n ContentNode,\n InlineNode,\n InlineType,\n NoteNode,\n TableNode,\n ASTNode,\n AncestorInfo,\n EmitContext,\n} from \"@lexbuild/core\";\nimport {\n FR_DOCUMENT_ELEMENTS,\n FR_SECTION_CONTAINERS,\n FR_DOCUMENT_TYPE_MAP,\n FR_PREAMBLE_SECTIONS,\n FR_PREAMBLE_META_ELEMENTS,\n FR_CONTENT_ELEMENTS,\n FR_HEADING_ELEMENT,\n FR_HD_SOURCE_TO_DEPTH,\n FR_INLINE_ELEMENTS,\n FR_EMPHASIS_MAP,\n FR_REGTEXT_ELEMENTS,\n FR_LSTSUB_ELEMENT,\n FR_SIGNATURE_ELEMENTS,\n FR_PRESIDENTIAL_SUBTYPES,\n FR_PRESIDENTIAL_META_ELEMENTS,\n FR_NOTE_ELEMENTS,\n FR_FTREF_ELEMENT,\n FR_BLOCK_ELEMENTS,\n FR_TABLE_ELEMENTS,\n FR_IGNORE_ELEMENTS,\n FR_SKIP_ELEMENTS,\n FR_PASSTHROUGH_ELEMENTS,\n FR_FRDOC_ELEMENT,\n FR_BILCOD_ELEMENT,\n} from \"./fr-elements.js\";\n\n/** Options for configuring the FR AST builder */\nexport interface FrASTBuilderOptions {\n /** Callback when a completed document node is ready */\n onEmit: (node: LevelNode, context: EmitContext) => void | Promise<void>;\n}\n\n/** Metadata extracted from the FR document XML during parsing */\nexport interface FrDocumentXmlMeta {\n /** Document type element name (RULE, NOTICE, etc.) */\n documentType: string;\n /** Normalized document type (rule, proposed_rule, etc.) */\n documentTypeNormalized: string;\n /** Agency name from AGENCY element */\n agency?: string | undefined;\n /** Sub-agency name from SUBAGY element */\n subAgency?: string | undefined;\n /** Subject/title from SUBJECT element */\n subject?: string | undefined;\n /** CFR citation from CFR element */\n cfrCitation?: string | undefined;\n /** Regulation Identifier Number from RIN element */\n rin?: string | undefined;\n /** FR document number extracted from FRDOC text */\n documentNumber?: string | undefined;\n /** Publication date inferred from FRDOC filing date (YYYY-MM-DD) */\n publicationDate?: string | undefined;\n}\n\n/** Frame kinds for the stack */\ntype FrameKind =\n | \"document\"\n | \"content\"\n | \"inline\"\n | \"heading\"\n | \"preambleSection\"\n | \"preambleMeta\"\n | \"note\"\n | \"signature\"\n | \"signatureField\"\n | \"table\"\n | \"tableHeader\"\n | \"tableRow\"\n | \"tableCell\"\n | \"block\"\n | \"regtext\"\n | \"frdoc\"\n | \"ignore\";\n\n/** A stack frame tracking an in-progress element */\ninterface StackFrame {\n kind: FrameKind;\n elementName: string;\n node?: ASTNode;\n textBuffer: string;\n /** For GPOTABLE collection */\n headers?: string[][];\n rows?: string[][];\n currentRow?: string[];\n headerLevel?: number;\n}\n\n/**\n * Federal Register AST Builder.\n *\n * Consumes SAX events and produces LexBuild AST nodes. Each FR document\n * (RULE, NOTICE, PRORULE, PRESDOCU) is emitted as a single section-level\n * LevelNode via the onEmit callback.\n */\nexport class FrASTBuilder {\n private readonly options: FrASTBuilderOptions;\n private readonly stack: StackFrame[] = [];\n /** Depth inside fully-ignored elements (CNTNTS, GPH) */\n private ignoredContainerDepth = 0;\n /** Metadata extracted from current document */\n private currentDocMeta: FrDocumentXmlMeta = {\n documentType: \"\",\n documentTypeNormalized: \"\",\n };\n /** All document metadata collected during parsing */\n private readonly documentMetas: FrDocumentXmlMeta[] = [];\n\n constructor(options: FrASTBuilderOptions) {\n this.options = options;\n }\n\n /** Get metadata for all documents parsed so far */\n getDocumentMetas(): readonly FrDocumentXmlMeta[] {\n return this.documentMetas;\n }\n\n /** Handle SAX open element */\n onOpenElement(name: string, attrs: Attributes): void {\n // Track ignored containers (skip entire subtree)\n if (this.ignoredContainerDepth > 0) {\n this.ignoredContainerDepth++;\n return;\n }\n\n // Full-subtree ignore elements (CNTNTS, GPH, GID)\n if (FR_IGNORE_ELEMENTS.has(name)) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Self-contained skip elements\n if (FR_SKIP_ELEMENTS.has(name)) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Transparent pass-through wrappers (FEDREG, PREAMB, SUPLINF)\n if (FR_PASSTHROUGH_ELEMENTS.has(name)) {\n return;\n }\n\n // Section containers (RULES, PRORULES, NOTICES, PRESDOCS) — pass through\n if (FR_SECTION_CONTAINERS.has(name)) {\n return;\n }\n\n // Document elements (RULE, NOTICE, PRORULE, PRESDOCU) → open document-level node\n if (FR_DOCUMENT_ELEMENTS.has(name)) {\n this.openDocument(name);\n return;\n }\n\n // Presidential document subtypes (EXECORD, PRMEMO, etc.) — pass through\n if (FR_PRESIDENTIAL_SUBTYPES.has(name)) {\n return;\n }\n\n // Presidential metadata (PSIG, PLACE, TITLE3, PRES)\n if (FR_PRESIDENTIAL_META_ELEMENTS.has(name)) {\n // PSIG and PLACE contain text we want to capture as content\n if (name === \"PSIG\" || name === \"PLACE\") {\n this.openContent(name);\n return;\n }\n // TITLE3, PRES — skip\n this.stack.push({ kind: \"ignore\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Preamble metadata elements (AGENCY, SUBAGY, CFR, SUBJECT, RIN, DEPDOC)\n if (FR_PREAMBLE_META_ELEMENTS.has(name)) {\n this.stack.push({ kind: \"preambleMeta\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Preamble sections (AGY, ACT, SUM, DATES, EFFDATE, ADD, FURINF)\n if (FR_PREAMBLE_SECTIONS.has(name)) {\n this.stack.push({ kind: \"preambleSection\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Heading element (HD) — level from SOURCE attribute\n if (name === FR_HEADING_ELEMENT) {\n this.openHeading(name, attrs);\n return;\n }\n\n // Content elements (P, FP)\n if (FR_CONTENT_ELEMENTS.has(name)) {\n this.openContent(name);\n return;\n }\n\n // Inline elements (I, B, E, SU, FR, AC)\n if (FR_INLINE_ELEMENTS.has(name)) {\n this.openInline(name, attrs);\n return;\n }\n\n // Footnote reference marker — FTREF is empty and follows <SU>N</SU>.\n // Convert the preceding SU (rendered as sup) to a footnoteRef.\n if (name === FR_FTREF_ELEMENT) {\n const parentFrame = this.stack[this.stack.length - 1];\n if (parentFrame?.kind === \"content\" && parentFrame.node?.type === \"content\") {\n const contentNode = parentFrame.node as ContentNode;\n // Find the last sup child and convert it to footnoteRef\n for (let i = contentNode.children.length - 1; i >= 0; i--) {\n const child = contentNode.children[i];\n if (child?.type === \"inline\" && (child as InlineNode).inlineType === \"sup\") {\n (child as InlineNode).inlineType = \"footnoteRef\";\n break;\n }\n }\n }\n // FTREF is self-closing, push+pop to maintain balance\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Note elements (FTNT, EDNOTE, OLNOTE1)\n if (FR_NOTE_ELEMENTS.has(name)) {\n this.openNote(name);\n return;\n }\n\n // REGTEXT and related elements\n if (FR_REGTEXT_ELEMENTS.has(name)) {\n this.openRegtext(name, attrs);\n return;\n }\n\n // LSTSUB — List of subjects\n if (name === FR_LSTSUB_ELEMENT) {\n this.stack.push({ kind: \"block\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Signature block\n if (FR_SIGNATURE_ELEMENTS.has(name)) {\n this.openSignature(name);\n return;\n }\n\n // Block elements (EXTRACT, EXAMPLE)\n if (FR_BLOCK_ELEMENTS.has(name)) {\n this.stack.push({ kind: \"block\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // GPOTABLE elements\n if (FR_TABLE_ELEMENTS.has(name)) {\n this.openTableElement(name, attrs);\n return;\n }\n\n // FRDOC — extract document number\n if (name === FR_FRDOC_ELEMENT) {\n this.stack.push({ kind: \"frdoc\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // BILCOD — skip\n if (name === FR_BILCOD_ELEMENT) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Unknown elements — push as ignore to maintain stack balance\n this.stack.push({ kind: \"ignore\", elementName: name, textBuffer: \"\" });\n }\n\n /** Handle SAX close element */\n onCloseElement(name: string): void {\n // Track ignored containers\n if (this.ignoredContainerDepth > 0) {\n this.ignoredContainerDepth--;\n return;\n }\n\n // Pass-through elements — no frame to pop\n if (FR_PASSTHROUGH_ELEMENTS.has(name) || FR_SECTION_CONTAINERS.has(name)) {\n return;\n }\n\n // Presidential subtypes — pass through\n if (FR_PRESIDENTIAL_SUBTYPES.has(name)) {\n return;\n }\n\n // Document elements → emit\n if (FR_DOCUMENT_ELEMENTS.has(name)) {\n this.closeDocument(name);\n return;\n }\n\n // Preamble metadata → extract text\n if (FR_PREAMBLE_META_ELEMENTS.has(name)) {\n this.closePreambleMeta(name);\n return;\n }\n\n // Preamble sections → just pop the frame\n if (FR_PREAMBLE_SECTIONS.has(name)) {\n this.popFrame(name);\n return;\n }\n\n // Heading\n if (name === FR_HEADING_ELEMENT) {\n this.closeHeading(name);\n return;\n }\n\n // Content elements\n if (FR_CONTENT_ELEMENTS.has(name)) {\n this.closeContent(name);\n return;\n }\n\n // Presidential metadata content (PSIG, PLACE)\n if (name === \"PSIG\" || name === \"PLACE\") {\n this.closeContent(name);\n return;\n }\n\n // Inline elements\n if (FR_INLINE_ELEMENTS.has(name) || name === FR_FTREF_ELEMENT) {\n this.closeInline(name);\n return;\n }\n\n // Note elements\n if (FR_NOTE_ELEMENTS.has(name)) {\n this.closeNote(name);\n return;\n }\n\n // REGTEXT elements\n if (FR_REGTEXT_ELEMENTS.has(name)) {\n this.closeRegtext(name);\n return;\n }\n\n // LSTSUB\n if (name === FR_LSTSUB_ELEMENT) {\n this.popFrame(name);\n return;\n }\n\n // Signature block\n if (FR_SIGNATURE_ELEMENTS.has(name)) {\n this.closeSignature(name);\n return;\n }\n\n // Block elements\n if (FR_BLOCK_ELEMENTS.has(name)) {\n this.popFrame(name);\n return;\n }\n\n // GPOTABLE elements\n if (FR_TABLE_ELEMENTS.has(name)) {\n this.closeTableElement(name);\n return;\n }\n\n // FRDOC → extract document number\n if (name === FR_FRDOC_ELEMENT) {\n this.closeFrdoc();\n return;\n }\n\n // Pop any remaining frames (ignore, etc.)\n if (this.stack.length > 0 && this.stack[this.stack.length - 1]?.elementName === name) {\n this.stack.pop();\n }\n }\n\n /** Handle SAX text content */\n onText(text: string): void {\n if (this.ignoredContainerDepth > 0) return;\n\n const frame = this.stack[this.stack.length - 1];\n if (!frame) return;\n\n // Accumulate text in text-collecting frames\n if (\n frame.kind === \"heading\" ||\n frame.kind === \"preambleMeta\" ||\n frame.kind === \"signatureField\" ||\n frame.kind === \"tableCell\" ||\n frame.kind === \"tableHeader\" ||\n frame.kind === \"frdoc\"\n ) {\n frame.textBuffer += text;\n return;\n }\n\n // Content frames → create inline text node\n if (frame.kind === \"content\" && frame.node?.type === \"content\") {\n const contentNode = frame.node as ContentNode;\n // Normalize XML indentation whitespace: collapse runs of whitespace to single spaces\n const normalized = text.replace(/\\s+/g, \" \");\n if (normalized && normalized !== \" \") {\n contentNode.children.push({\n type: \"inline\",\n inlineType: \"text\",\n text: normalized,\n });\n }\n return;\n }\n\n // Inline frames → set text or add child\n if (frame.kind === \"inline\" && frame.node?.type === \"inline\") {\n const inlineNode = frame.node as InlineNode;\n const normalized = text.replace(/\\s+/g, \" \");\n if (inlineNode.children) {\n if (normalized && normalized !== \" \") {\n inlineNode.children.push({\n type: \"inline\",\n inlineType: \"text\",\n text: normalized,\n });\n }\n } else {\n inlineNode.text = (inlineNode.text ?? \"\") + normalized;\n }\n return;\n }\n\n // Note frames with direct text\n if (frame.kind === \"note\" && frame.node?.type === \"note\") {\n frame.textBuffer += text;\n return;\n }\n\n // Document-level, preambleSection, block, regtext — ignore stray text\n }\n\n // ── Private helpers: Document ──\n\n private openDocument(elementName: string): void {\n this.currentDocMeta = {\n documentType: elementName,\n documentTypeNormalized: FR_DOCUMENT_TYPE_MAP[elementName] ?? elementName.toLowerCase(),\n };\n\n const node: LevelNode = {\n type: \"level\",\n levelType: \"section\",\n children: [],\n sourceElement: elementName,\n };\n\n this.stack.push({ kind: \"document\", elementName, node, textBuffer: \"\" });\n }\n\n private closeDocument(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"document\" || !frame.node) return;\n\n const levelNode = frame.node as LevelNode;\n\n // Set heading from subject\n if (this.currentDocMeta.subject) {\n levelNode.heading = this.currentDocMeta.subject;\n }\n\n // Set identifier from document number\n if (this.currentDocMeta.documentNumber) {\n levelNode.identifier = `/us/fr/${this.currentDocMeta.documentNumber}`;\n levelNode.numValue = this.currentDocMeta.documentNumber;\n }\n\n // Build emit context\n const ancestors: AncestorInfo[] = [];\n for (const f of this.stack) {\n if (f.kind === \"document\" && f.node?.type === \"level\") {\n const ln = f.node as LevelNode;\n ancestors.push({\n levelType: ln.levelType,\n numValue: ln.numValue,\n heading: ln.heading,\n identifier: ln.identifier,\n });\n }\n }\n\n const context: EmitContext = {\n ancestors,\n documentMeta: {\n dcTitle: this.currentDocMeta.subject,\n dcType: this.currentDocMeta.documentTypeNormalized,\n },\n };\n\n // Save metadata before emitting\n this.documentMetas.push({ ...this.currentDocMeta });\n\n this.options.onEmit(levelNode, context);\n }\n\n // ── Private helpers: Preamble ──\n\n private closePreambleMeta(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"preambleMeta\") return;\n\n const text = frame.textBuffer.trim();\n if (!text) return;\n\n switch (elementName) {\n case \"AGENCY\":\n this.currentDocMeta.agency = text;\n break;\n case \"SUBAGY\":\n this.currentDocMeta.subAgency = text;\n break;\n case \"CFR\":\n this.currentDocMeta.cfrCitation = text;\n break;\n case \"SUBJECT\":\n this.currentDocMeta.subject = text;\n break;\n case \"RIN\":\n this.currentDocMeta.rin = text.replace(/^RIN\\s+/i, \"\").trim();\n break;\n case \"DEPDOC\":\n // Department document number — store for potential use\n break;\n }\n }\n\n // ── Private helpers: Heading ──\n\n private openHeading(_elementName: string, attrs: Attributes): void {\n const source = attrs[\"SOURCE\"] ?? \"HD1\";\n const depth = FR_HD_SOURCE_TO_DEPTH[source] ?? 3;\n\n this.stack.push({\n kind: \"heading\",\n elementName: FR_HEADING_ELEMENT,\n textBuffer: \"\",\n headerLevel: depth,\n });\n }\n\n private closeHeading(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"heading\") return;\n\n const headingText = frame.textBuffer.trim();\n if (!headingText) return;\n\n // In preamble sections (AGY, ACT, SUM, etc.), the HD contains the label\n // like \"AGENCY:\", \"ACTION:\", \"SUMMARY:\". We render these as bold labels.\n const parentFrame = this.stack[this.stack.length - 1];\n\n if (parentFrame?.kind === \"preambleSection\") {\n // Create a bold label content node\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: headingText,\n },\n ],\n };\n this.addToDocument(contentNode);\n return;\n }\n\n // Outside preamble: render as a bold heading content node\n // The depth from SOURCE attribute determines visual weight\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: headingText,\n },\n ],\n };\n this.addToDocument(contentNode);\n }\n\n // ── Private helpers: Content ──\n\n private openContent(elementName: string): void {\n const node: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [],\n };\n this.stack.push({ kind: \"content\", elementName, node, textBuffer: \"\" });\n }\n\n private closeContent(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const contentNode = frame.node as ContentNode;\n\n // Skip empty content nodes\n if (contentNode.children.length === 0) return;\n\n // Add to parent: document, note, or block\n const parent = this.findParentDocument() ?? this.findParentNote();\n if (parent?.node) {\n if (parent.node.type === \"level\") {\n (parent.node as LevelNode).children.push(contentNode);\n } else if (parent.node.type === \"note\") {\n (parent.node as NoteNode).children.push(contentNode);\n }\n }\n }\n\n // ── Private helpers: Inline ──\n\n private openInline(elementName: string, attrs: Attributes): void {\n let inlineType: InlineType = \"text\";\n\n if (elementName === \"I\") {\n inlineType = \"italic\";\n } else if (elementName === \"B\") {\n inlineType = \"bold\";\n } else if (elementName === \"SU\") {\n // SU inside a footnote (FTNT) is the footnote marker, not a generic superscript.\n // Check if we're inside a note frame to determine the correct type.\n const insideFootnote = this.findFrame(\"note\") !== undefined;\n inlineType = insideFootnote ? \"footnoteRef\" : \"sup\";\n } else if (elementName === \"FR\") {\n inlineType = \"text\"; // Fractions render as text\n } else if (elementName === \"E\") {\n const tValue = attrs[\"T\"] ?? \"\";\n inlineType = FR_EMPHASIS_MAP[tValue] ?? \"italic\";\n }\n\n const node: InlineNode = {\n type: \"inline\",\n inlineType,\n children: [],\n };\n\n this.stack.push({ kind: \"inline\", elementName, node, textBuffer: \"\" });\n }\n\n private closeInline(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const inlineNode = frame.node as InlineNode;\n\n // For footnoteRef, set text from buffer\n if (inlineNode.inlineType === \"footnoteRef\" && frame.textBuffer) {\n inlineNode.text = frame.textBuffer.trim();\n }\n\n // Find parent to attach to\n const parentFrame = this.stack[this.stack.length - 1];\n if (!parentFrame) return;\n\n if (parentFrame.kind === \"content\" && parentFrame.node?.type === \"content\") {\n (parentFrame.node as ContentNode).children.push(inlineNode);\n } else if (parentFrame.kind === \"inline\" && parentFrame.node?.type === \"inline\") {\n const parentInline = parentFrame.node as InlineNode;\n if (parentInline.children) {\n parentInline.children.push(inlineNode);\n }\n } else if (parentFrame.kind === \"heading\" || parentFrame.kind === \"preambleMeta\") {\n // Inline inside heading or preamble metadata — accumulate text\n if (inlineNode.text) {\n parentFrame.textBuffer += inlineNode.text;\n } else if (inlineNode.children) {\n for (const child of inlineNode.children) {\n if (child.text) parentFrame.textBuffer += child.text;\n }\n }\n }\n }\n\n // ── Private helpers: Notes ──\n\n private openNote(elementName: string): void {\n const noteTypeMap: Record<string, string> = {\n FTNT: \"footnote\",\n EDNOTE: \"editorial\",\n OLNOTE1: \"general\",\n };\n\n const noteType = noteTypeMap[elementName] ?? elementName.toLowerCase();\n const node: NoteNode = {\n type: \"note\",\n noteType,\n children: [],\n };\n\n this.stack.push({ kind: \"note\", elementName, node, textBuffer: \"\" });\n }\n\n private closeNote(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const noteNode = frame.node as NoteNode;\n\n // If text was collected directly (no child content nodes), create one\n if (frame.textBuffer.trim() && noteNode.children.length === 0) {\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"text\",\n text: frame.textBuffer.trim(),\n },\n ],\n };\n noteNode.children.push(contentNode);\n }\n\n // Add to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(noteNode);\n }\n }\n\n // ── Private helpers: Regulatory text ──\n\n private openRegtext(elementName: string, attrs: Attributes): void {\n if (elementName === \"REGTEXT\") {\n // REGTEXT container with TITLE and PART attributes\n const title = attrs[\"TITLE\"] ?? \"\";\n const part = attrs[\"PART\"] ?? \"\";\n const label = title && part ? `${title} CFR Part ${part}` : \"\";\n\n // Create a bold label if we have CFR reference info\n if (label) {\n const labelNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: label,\n },\n ],\n };\n this.addToDocument(labelNode);\n }\n\n this.stack.push({ kind: \"regtext\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"AMDPAR\") {\n // Amendment instruction paragraph — render as italic content\n this.openContent(elementName);\n return;\n }\n\n if (elementName === \"SECTION\") {\n // Section container within REGTEXT — pass through\n this.stack.push({ kind: \"block\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"SECTNO\") {\n // Section number — collect as content\n this.openContent(elementName);\n return;\n }\n\n if (elementName === \"PART\") {\n // Part container within REGTEXT — pass through\n this.stack.push({ kind: \"block\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"AUTH\") {\n // Authority citation in REGTEXT\n this.openNote(elementName);\n return;\n }\n }\n\n private closeRegtext(elementName: string): void {\n if (elementName === \"REGTEXT\") {\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"AMDPAR\" || elementName === \"SECTNO\") {\n this.closeContent(elementName);\n return;\n }\n\n if (elementName === \"SECTION\" || elementName === \"PART\") {\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"AUTH\") {\n this.closeNote(elementName);\n return;\n }\n }\n\n // ── Private helpers: Signature block ──\n\n private openSignature(elementName: string): void {\n if (elementName === \"SIG\") {\n // Signature container\n const node: NoteNode = {\n type: \"note\",\n noteType: \"signature\",\n children: [],\n };\n this.stack.push({ kind: \"signature\", elementName, node, textBuffer: \"\" });\n return;\n }\n\n // NAME, TITLE, DATED — collect text\n this.stack.push({ kind: \"signatureField\", elementName, textBuffer: \"\" });\n }\n\n private closeSignature(elementName: string): void {\n if (elementName === \"SIG\") {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const sigNode = frame.node as NoteNode;\n\n // Add signature to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(sigNode);\n }\n return;\n }\n\n // NAME, TITLE, DATED fields\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"signatureField\") return;\n\n const text = frame.textBuffer.trim();\n if (!text) return;\n\n // Add as content to parent signature node\n const sigFrame = this.findFrame(\"signature\");\n if (sigFrame?.node && sigFrame.node.type === \"note\") {\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"text\",\n text,\n },\n ],\n };\n (sigFrame.node as NoteNode).children.push(contentNode);\n }\n }\n\n // ── Private helpers: GPOTABLE ──\n\n private openTableElement(elementName: string, _attrs: Attributes): void {\n if (elementName === \"GPOTABLE\") {\n this.stack.push({\n kind: \"table\",\n elementName,\n textBuffer: \"\",\n headers: [],\n rows: [],\n currentRow: [],\n });\n return;\n }\n\n if (elementName === \"TTITLE\") {\n // Table title — collect text as heading\n this.stack.push({ kind: \"heading\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"BOXHD\") {\n // Header container — no frame needed, children (CHED) handle themselves\n return;\n }\n\n if (elementName === \"CHED\") {\n // Column header entry\n this.stack.push({ kind: \"tableHeader\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"ROW\") {\n const tableFrame = this.findTableFrame();\n if (tableFrame) {\n tableFrame.currentRow = [];\n }\n this.stack.push({ kind: \"tableRow\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"ENT\") {\n // Cell entry\n this.stack.push({ kind: \"tableCell\", elementName, textBuffer: \"\" });\n return;\n }\n }\n\n private closeTableElement(elementName: string): void {\n if (elementName === \"GPOTABLE\") {\n this.closeGpoTable();\n return;\n }\n\n if (elementName === \"TTITLE\") {\n // Table title — drop the heading frame (title is informational)\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"BOXHD\") {\n // No frame to pop\n return;\n }\n\n if (elementName === \"CHED\") {\n this.closeTableHeader();\n return;\n }\n\n if (elementName === \"ROW\") {\n this.closeTableRow();\n return;\n }\n\n if (elementName === \"ENT\") {\n this.closeTableCell();\n return;\n }\n }\n\n private closeGpoTable(): void {\n const frame = this.popFrame(\"GPOTABLE\");\n if (!frame || frame.kind !== \"table\") return;\n\n const tableNode: TableNode = {\n type: \"table\",\n variant: \"xhtml\", // Reuse the same variant for rendering\n headers: frame.headers ?? [],\n rows: frame.rows ?? [],\n };\n\n // Add to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(tableNode);\n }\n }\n\n private closeTableHeader(): void {\n const headerFrame = this.popFrame(\"CHED\");\n if (!headerFrame || headerFrame.kind !== \"tableHeader\") return;\n\n const tableFrame = this.findTableFrame();\n if (!tableFrame) return;\n\n const text = headerFrame.textBuffer.trim();\n\n // GPOTABLE headers are flat — each CHED is one column header.\n // We build a single header row from all CHED elements.\n if (!tableFrame.headers || tableFrame.headers.length === 0) {\n tableFrame.headers = [[]];\n }\n const headerRow = tableFrame.headers[0];\n if (headerRow) {\n headerRow.push(text);\n }\n }\n\n private closeTableRow(): void {\n const rowFrame = this.popFrame(\"ROW\");\n if (!rowFrame) return;\n\n const tableFrame = this.findTableFrame();\n if (tableFrame?.currentRow) {\n tableFrame.rows?.push([...tableFrame.currentRow]);\n tableFrame.currentRow = [];\n }\n }\n\n private closeTableCell(): void {\n const cellFrame = this.stack.pop();\n if (!cellFrame || cellFrame.kind !== \"tableCell\") return;\n\n const tableFrame = this.findTableFrame();\n if (tableFrame?.currentRow) {\n tableFrame.currentRow.push(cellFrame.textBuffer.trim());\n }\n }\n\n // ── Private helpers: FRDOC ──\n\n private closeFrdoc(): void {\n const frame = this.popFrame(FR_FRDOC_ELEMENT);\n if (!frame || frame.kind !== \"frdoc\") return;\n\n const text = frame.textBuffer.trim();\n // Extract document number from FRDOC text. Formats vary by era:\n // Modern: \"[FR Doc. 2026-06029 Filed 3-27-26; 8:45 am]\"\n // Pre-2009: \"[FR Doc. E8-17594 Filed 7-31-08; 8:45 am]\"\n // Very old: \"[FR Doc. 00-123 Filed 1-2-00; 8:45 am]\"\n const docMatch = /FR\\s+Doc\\.\\s+([\\w-]+)/i.exec(text);\n if (docMatch) {\n this.currentDocMeta.documentNumber = docMatch[1];\n }\n\n // Extract publication date from filing date (Filed M-D-YY).\n // Publication = filing date + 1 calendar day (FR publishes the morning after).\n const dateMatch = /Filed\\s+(\\d{1,2})-(\\d{1,2})-(\\d{2})\\b/.exec(text);\n if (dateMatch) {\n const [, mmStr, ddStr, yyStr] = dateMatch;\n const mm = parseInt(mmStr ?? \"0\", 10);\n const dd = parseInt(ddStr ?? \"0\", 10);\n const yy = parseInt(yyStr ?? \"0\", 10);\n // 2-digit year: 00-49 → 2000s, 50-99 → 1900s\n const fullYear = yy < 50 ? 2000 + yy : 1900 + yy;\n const filed = new Date(fullYear, mm - 1, dd);\n // Validate — Date constructor silently wraps invalid values (month 13 → next year)\n if (filed.getMonth() !== mm - 1 || filed.getDate() !== dd) {\n return; // Invalid filing date — skip rather than produce wrong date\n }\n // Publication date = next calendar day\n filed.setDate(filed.getDate() + 1);\n const pubYear = filed.getFullYear();\n const pubMonth = String(filed.getMonth() + 1).padStart(2, \"0\");\n const pubDay = String(filed.getDate()).padStart(2, \"0\");\n this.currentDocMeta.publicationDate = `${pubYear}-${pubMonth}-${pubDay}`;\n }\n }\n\n // ── Private helpers: Stack navigation ──\n\n private addToDocument(node: ASTNode): void {\n const docFrame = this.findParentDocument();\n if (docFrame?.node && docFrame.node.type === \"level\") {\n (docFrame.node as LevelNode).children.push(node);\n }\n }\n\n private findParentDocument(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"document\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findParentNote(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"note\" || this.stack[i]?.kind === \"signature\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findTableFrame(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"table\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findFrame(kind: FrameKind): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === kind) {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private popFrame(elementName: string): StackFrame | undefined {\n if (this.stack.length === 0) return undefined;\n\n // Find the matching frame (may not be exactly on top)\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.elementName === elementName) {\n return this.stack.splice(i, 1)[0];\n }\n }\n\n // No matching frame found — warn rather than popping an unrelated frame\n console.warn(\n `FrASTBuilder: no matching frame for closing element </${elementName}>, ` +\n `stack has: [${this.stack.map((f) => f.elementName).join(\", \")}]`,\n );\n return undefined;\n }\n}\n","/**\n * Federal Register frontmatter builder.\n *\n * Constructs FrontmatterData from an emitted FR AST node, its context,\n * and optional JSON metadata from the FederalRegister.gov API.\n */\n\nimport type { LevelNode, EmitContext, FrontmatterData } from \"@lexbuild/core\";\nimport type { FrDocumentXmlMeta } from \"./fr-builder.js\";\n\n/**\n * Metadata from the FederalRegister.gov API JSON response.\n * Stored as a sidecar `.json` file alongside each `.xml` download.\n */\nexport interface FrDocumentJsonMeta {\n /** FR document number (e.g., \"2026-06029\") */\n document_number: string;\n /** Document type (Rule, Proposed Rule, Notice, Presidential Document) */\n type: string;\n /** Document title */\n title: string;\n /** Publication date (YYYY-MM-DD) */\n publication_date: string;\n /** Full FR citation (e.g., \"91 FR 14523\") */\n citation: string;\n /** FR volume number */\n volume: number;\n /** Start page number */\n start_page: number;\n /** End page number */\n end_page: number;\n /** Agencies with hierarchy info */\n agencies: Array<{\n name: string;\n id: number;\n slug: string;\n parent_id?: number | null;\n raw_name?: string;\n }>;\n /** CFR title/part references */\n cfr_references: Array<{ title: number; part: number }>;\n /** Docket identifiers */\n docket_ids: string[];\n /** Regulation Identifier Numbers */\n regulation_id_numbers: string[];\n /** Effective date (YYYY-MM-DD) */\n effective_on?: string | null;\n /** Comment period end date (YYYY-MM-DD) */\n comments_close_on?: string | null;\n /** Action description (e.g., \"Final rule.\") */\n action?: string | null;\n /** Document abstract */\n abstract?: string | null;\n /** Whether the document is significant */\n significant?: boolean | null;\n /** Topics/keywords */\n topics: string[];\n /** URL to full text XML */\n full_text_xml_url: string;\n}\n\n/** Normalize API document type to lowercase snake_case */\nfunction normalizeDocumentType(apiType: string): string {\n const map: Record<string, string> = {\n Rule: \"rule\",\n \"Proposed Rule\": \"proposed_rule\",\n Notice: \"notice\",\n \"Presidential Document\": \"presidential_document\",\n };\n return map[apiType] ?? apiType.toLowerCase().replace(/\\s+/g, \"_\");\n}\n\n/**\n * Build FrontmatterData from an FR document node with optional JSON metadata.\n *\n * If JSON metadata is available (from the API sidecar file), it enriches\n * the frontmatter with structured agency, CFR reference, docket, and\n * date information that isn't available in the XML alone.\n */\nexport function buildFrFrontmatter(\n node: LevelNode,\n _context: EmitContext,\n xmlMeta: FrDocumentXmlMeta,\n jsonMeta?: FrDocumentJsonMeta,\n): FrontmatterData {\n const documentNumber = jsonMeta?.document_number ?? xmlMeta.documentNumber ?? \"\";\n const subject = jsonMeta?.title ?? xmlMeta.subject ?? node.heading ?? \"\";\n const publicationDate = jsonMeta?.publication_date ?? xmlMeta.publicationDate ?? \"\";\n const documentType =\n jsonMeta ? normalizeDocumentType(jsonMeta.type) : xmlMeta.documentTypeNormalized;\n\n // Build agencies list\n let agencies: string[] | undefined;\n if (jsonMeta?.agencies && jsonMeta.agencies.length > 0) {\n agencies = jsonMeta.agencies.map((a) => a.name);\n } else if (xmlMeta.agency) {\n agencies = [xmlMeta.agency];\n if (xmlMeta.subAgency) {\n agencies.push(xmlMeta.subAgency);\n }\n }\n\n // Build CFR references list\n let cfrReferences: string[] | undefined;\n if (jsonMeta?.cfr_references && jsonMeta.cfr_references.length > 0) {\n cfrReferences = jsonMeta.cfr_references.map((r) => `${r.title} CFR Part ${r.part}`);\n } else if (xmlMeta.cfrCitation) {\n cfrReferences = [xmlMeta.cfrCitation];\n }\n\n // Build docket IDs list\n let docketIds: string[] | undefined;\n if (jsonMeta?.docket_ids && jsonMeta.docket_ids.length > 0) {\n docketIds = jsonMeta.docket_ids;\n }\n\n // Primary agency for the existing `agency` field\n const primaryAgency =\n agencies && agencies.length > 0 ? agencies[0] : undefined;\n\n // FR citation\n const frCitation = jsonMeta?.citation;\n\n // RIN\n const rin = jsonMeta?.regulation_id_numbers?.[0] ?? xmlMeta.rin;\n\n const fm: FrontmatterData = {\n source: \"fr\",\n legal_status: \"authoritative_unofficial\",\n identifier: node.identifier ?? `/us/fr/${documentNumber}`,\n title: subject,\n title_number: 0, // FR documents don't belong to a USC/CFR title\n title_name: \"Federal Register\",\n section_number: documentNumber,\n section_name: subject,\n positive_law: false,\n currency: publicationDate,\n last_updated: publicationDate,\n\n // Shared optional fields\n agency: primaryAgency,\n\n // FR-specific fields\n document_number: documentNumber || undefined,\n document_type: documentType || undefined,\n fr_citation: frCitation,\n fr_volume: jsonMeta?.volume,\n publication_date: publicationDate || undefined,\n agencies: agencies && agencies.length > 0 ? agencies : undefined,\n cfr_references: cfrReferences && cfrReferences.length > 0 ? cfrReferences : undefined,\n docket_ids: docketIds && docketIds.length > 0 ? docketIds : undefined,\n rin: rin || undefined,\n effective_date: jsonMeta?.effective_on ?? undefined,\n comments_close_date: jsonMeta?.comments_close_on ?? undefined,\n fr_action: jsonMeta?.action ?? undefined,\n };\n\n return fm;\n}\n","/**\n * Output path builder for Federal Register directory structure.\n *\n * FR path structure:\n * output/fr/{YYYY}/{MM}/{document_number}.md\n *\n * Downloads path structure:\n * downloads/fr/{YYYY}/{MM}/{document_number}.xml\n * downloads/fr/{YYYY}/{MM}/{document_number}.json\n */\n\nimport { join } from \"node:path\";\n\n/**\n * Build the output file path for an FR document.\n *\n * @param documentNumber - FR document number (e.g., \"2026-06029\")\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param outputRoot - Output root directory (e.g., \"./output\")\n * @returns Full output file path (e.g., \"output/fr/2026/03/2026-06029.md\")\n */\nexport function buildFrOutputPath(\n documentNumber: string,\n publicationDate: string,\n outputRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(outputRoot, \"fr\", year, month, `${documentNumber}.md`);\n}\n\n/**\n * Build the download file path for an FR document XML.\n *\n * @param documentNumber - FR document number\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param downloadRoot - Download root directory (e.g., \"./downloads/fr\")\n * @returns Full download file path (e.g., \"downloads/fr/2026/03/2026-06029.xml\")\n */\nexport function buildFrDownloadXmlPath(\n documentNumber: string,\n publicationDate: string,\n downloadRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(downloadRoot, year, month, `${documentNumber}.xml`);\n}\n\n/**\n * Build the download file path for an FR document JSON metadata.\n *\n * @param documentNumber - FR document number\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param downloadRoot - Download root directory\n * @returns Full download file path (e.g., \"downloads/fr/2026/03/2026-06029.json\")\n */\nexport function buildFrDownloadJsonPath(\n documentNumber: string,\n publicationDate: string,\n downloadRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(downloadRoot, year, month, `${documentNumber}.json`);\n}\n\n/**\n * Build the directory path for a year/month within the FR output structure.\n */\nexport function buildMonthDir(year: string, month: string, outputRoot: string): string {\n return join(outputRoot, \"fr\", year, month);\n}\n\n/**\n * Build the directory path for a year.\n */\nexport function buildYearDir(year: string, outputRoot: string): string {\n return join(outputRoot, \"fr\", year);\n}\n\n/**\n * Parse a YYYY-MM-DD date string into year and month components.\n */\nfunction parseDateComponents(date: string): { year: string; month: string } {\n const parts = date.split(\"-\");\n return {\n year: parts[0] || \"0000\",\n month: parts[1] || \"00\",\n };\n}\n","/**\n * Federal Register conversion orchestrator.\n *\n * Discovers downloaded FR XML files, parses them with FrASTBuilder,\n * enriches frontmatter with JSON sidecar metadata, renders via core's\n * renderDocument, and writes structured Markdown output.\n *\n * Processes FR documents in a single streaming pass: parse each XML file,\n * render Markdown, and write output immediately. No link pre-registration\n * since FR documents rarely cross-reference each other.\n */\n\nimport { createReadStream, existsSync } from \"node:fs\";\nimport { readFile, readdir, stat } from \"node:fs/promises\";\nimport { join, dirname } from \"node:path\";\nimport {\n XMLParser,\n renderDocument,\n createLinkResolver,\n writeFile,\n mkdir,\n} from \"@lexbuild/core\";\nimport type { LevelNode, EmitContext } from \"@lexbuild/core\";\nimport { FrASTBuilder } from \"./fr-builder.js\";\nimport type { FrDocumentXmlMeta } from \"./fr-builder.js\";\nimport { buildFrFrontmatter } from \"./fr-frontmatter.js\";\nimport type { FrDocumentJsonMeta } from \"./fr-frontmatter.js\";\nimport { buildFrOutputPath } from \"./fr-path.js\";\nimport { FR_DOCUMENT_TYPE_KEYS } from \"./fr-elements.js\";\nimport type { FrDocumentType } from \"./fr-elements.js\";\n\n// ── Public types ──\n\n/** Progress info for conversion callback */\nexport interface FrConvertProgress {\n /** Documents converted so far */\n documentsConverted: number;\n /** XML files processed so far */\n filesProcessed: number;\n /** Total XML files to process */\n totalFiles: number;\n /** Current XML file being processed */\n currentFile: string;\n}\n\n/** Options for converting FR documents */\nexport interface FrConvertOptions {\n /** Path to input file or directory containing .xml/.json files */\n input: string;\n /** Output root directory */\n output: string;\n /** Link style for cross-references */\n linkStyle: \"relative\" | \"canonical\" | \"plaintext\";\n /** Parse only, don't write files */\n dryRun: boolean;\n /** Filter: start date (YYYY-MM-DD) */\n from?: string | undefined;\n /** Filter: end date (YYYY-MM-DD) */\n to?: string | undefined;\n /** Filter: document types */\n types?: FrDocumentType[] | undefined;\n /** Progress callback */\n onProgress?: ((progress: FrConvertProgress) => void) | undefined;\n}\n\n/** Result of a conversion operation */\nexport interface FrConvertResult {\n /** Number of documents converted */\n documentsConverted: number;\n /** Paths of written files */\n files: string[];\n /** Total estimated tokens */\n totalTokenEstimate: number;\n /** Peak RSS in bytes */\n peakMemoryBytes: number;\n /** Whether this was a dry run */\n dryRun: boolean;\n}\n\n/** Collected document info during parsing */\ninterface CollectedDoc {\n node: LevelNode;\n context: EmitContext;\n xmlMeta: FrDocumentXmlMeta;\n jsonMeta?: FrDocumentJsonMeta;\n publicationDate: string;\n documentNumber: string;\n}\n\n/** Set of valid FR document type element names for filtering */\nconst FR_DOC_TYPE_SET = new Set<string>(FR_DOCUMENT_TYPE_KEYS);\n\n// ── Public function ──\n\n/**\n * Convert FR XML documents to Markdown.\n *\n * Supports both single-file mode (input is a .xml path) and batch mode\n * (input is a directory containing year/month/doc.xml structure).\n */\nexport async function convertFrDocuments(options: FrConvertOptions): Promise<FrConvertResult> {\n const xmlFiles = await discoverXmlFiles(options.input, options.from, options.to);\n\n let documentsConverted = 0;\n let totalTokenEstimate = 0;\n let peakMemoryBytes = 0;\n\n const linkResolver = createLinkResolver();\n\n // Stream: parse each file, render, and write immediately.\n // FR documents rarely cross-reference each other, so we skip the two-pass\n // link registration that USC/eCFR use. This keeps memory bounded for\n // bulk XML processing (750k+ documents across 9,500+ files).\n let filesProcessed = 0;\n for (const xmlPath of xmlFiles) {\n let collected: CollectedDoc[];\n try {\n collected = await parseXmlFile(xmlPath);\n } catch (err) {\n console.warn(\n `Warning: Failed to parse ${xmlPath}: ${err instanceof Error ? err.message : String(err)}. Skipping.`,\n );\n continue;\n }\n\n for (const doc of collected) {\n // Apply type filter\n if (options.types && options.types.length > 0) {\n if (\n !FR_DOC_TYPE_SET.has(doc.xmlMeta.documentType) ||\n !options.types.includes(doc.xmlMeta.documentType as FrDocumentType)\n ) {\n continue;\n }\n }\n\n if (options.dryRun) {\n documentsConverted++;\n continue;\n }\n\n const outputPath = buildFrOutputPath(\n doc.documentNumber,\n doc.publicationDate,\n options.output,\n );\n\n const frontmatter = buildFrFrontmatter(doc.node, doc.context, doc.xmlMeta, doc.jsonMeta);\n\n const markdown = renderDocument(doc.node, frontmatter, {\n headingOffset: 0,\n linkStyle: options.linkStyle,\n resolveLink:\n options.linkStyle === \"relative\"\n ? (id) => linkResolver.resolve(id, outputPath)\n : undefined,\n });\n\n await mkdir(dirname(outputPath), { recursive: true });\n await writeFile(outputPath, markdown, \"utf-8\");\n\n documentsConverted++;\n totalTokenEstimate += Math.round(markdown.length / 4);\n\n // Track memory\n const mem = process.memoryUsage().rss;\n if (mem > peakMemoryBytes) {\n peakMemoryBytes = mem;\n }\n }\n\n filesProcessed++;\n\n options.onProgress?.({\n documentsConverted,\n filesProcessed,\n totalFiles: xmlFiles.length,\n currentFile: xmlPath,\n });\n }\n\n return {\n documentsConverted,\n files: [], // Don't accumulate 750k+ file paths in memory\n totalTokenEstimate,\n peakMemoryBytes,\n dryRun: options.dryRun,\n };\n}\n\n// ── Private helpers ──\n\n/**\n * Parse a single XML file and collect document nodes + metadata.\n */\nasync function parseXmlFile(xmlPath: string): Promise<CollectedDoc[]> {\n const collected: CollectedDoc[] = [];\n\n const builder = new FrASTBuilder({\n onEmit: (node, context) => {\n // Snapshot metas at emit time\n const currentMetas = builder.getDocumentMetas();\n const meta = currentMetas[currentMetas.length - 1];\n if (!meta) {\n console.warn(\n `Warning: No XML metadata extracted for emitted document in ${xmlPath}. ` +\n `Frontmatter will have empty document_type and document_number.`,\n );\n }\n collected.push({\n node,\n context,\n xmlMeta: meta ?? { documentType: \"\", documentTypeNormalized: \"\" },\n publicationDate: \"\",\n documentNumber: meta?.documentNumber ?? \"\",\n });\n },\n });\n\n const parser = new XMLParser({ defaultNamespace: \"\" });\n parser.on(\"openElement\", (name, attrs) => builder.onOpenElement(name, attrs));\n parser.on(\"closeElement\", (name) => builder.onCloseElement(name));\n parser.on(\"text\", (text) => builder.onText(text));\n\n const stream = createReadStream(xmlPath, \"utf-8\");\n await parser.parseStream(stream);\n\n // Try to load JSON sidecar\n const jsonPath = xmlPath.replace(/\\.xml$/, \".json\");\n let jsonMeta: FrDocumentJsonMeta | undefined;\n if (existsSync(jsonPath)) {\n try {\n const raw = await readFile(jsonPath, \"utf-8\");\n jsonMeta = JSON.parse(raw) as FrDocumentJsonMeta;\n } catch (err) {\n console.warn(\n `Warning: Failed to parse JSON sidecar ${jsonPath}: ${err instanceof Error ? err.message : String(err)}. Continuing without enriched metadata.`,\n );\n }\n }\n\n // Enrich collected docs with JSON metadata and publication date\n for (const doc of collected) {\n if (jsonMeta && jsonMeta.document_number === doc.documentNumber) {\n doc.jsonMeta = jsonMeta;\n doc.publicationDate = jsonMeta.publication_date;\n } else {\n // Infer date from file path (downloads/fr/YYYY/MM/doc.xml)\n const inferredDate = inferDateFromPath(xmlPath);\n if (!inferredDate) {\n console.warn(\n `Warning: No publication date for document ${doc.documentNumber || \"(unknown)\"} — ` +\n `no JSON sidecar and path ${xmlPath} has no YYYY/MM/ pattern. Output will be in 0000/00/.`,\n );\n }\n doc.publicationDate = inferredDate;\n }\n }\n\n return collected;\n}\n\n/**\n * Discover XML files in a directory or return the single file path.\n */\nasync function discoverXmlFiles(\n input: string,\n from?: string,\n to?: string,\n): Promise<string[]> {\n let inputStat;\n try {\n inputStat = await stat(input);\n } catch (err) {\n throw new Error(\n `Cannot access input path \"${input}\": ${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n if (inputStat.isFile()) {\n return [input];\n }\n\n if (!inputStat.isDirectory()) {\n throw new Error(`Input path \"${input}\" is not a file or directory`);\n }\n\n // Recursively find all .xml files\n const xmlFiles: string[] = [];\n await walkDir(input, xmlFiles);\n\n // Apply date range filter based on file path structure (YYYY/MM/)\n let filtered = xmlFiles;\n if (from || to) {\n filtered = xmlFiles.filter((f) => {\n const date = inferDateFromPath(f);\n if (!date) return true; // Can't filter if no date in path\n if (from && date < from) return false;\n if (to && date > to + \"-32\") return false; // Month-level comparison\n return true;\n });\n }\n\n return filtered.sort();\n}\n\n/** Recursively walk a directory collecting .xml files */\nasync function walkDir(dir: string, results: string[]): Promise<void> {\n const entries = await readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n if (entry.isDirectory()) {\n await walkDir(fullPath, results);\n } else if (entry.isFile() && entry.name.endsWith(\".xml\")) {\n results.push(fullPath);\n }\n }\n}\n\n/**\n * Infer a date string from the file path. Used when no JSON sidecar is available.\n *\n * Supports two patterns:\n * - Per-document: \"downloads/fr/2026/03/doc.xml\" → \"2026-03-01\"\n * - Govinfo bulk: \"downloads/fr/bulk/2026/FR-2026-03-02.xml\" → \"2026-03-02\"\n */\n/** @internal Exported for testing. */\nexport function inferDateFromPath(filePath: string): string {\n // Govinfo bulk: FR-YYYY-MM-DD.xml\n const bulkMatch = /FR-(\\d{4})-(\\d{2})-(\\d{2})\\.xml$/.exec(filePath);\n if (bulkMatch) {\n return `${bulkMatch[1]}-${bulkMatch[2]}-${bulkMatch[3]}`;\n }\n\n // Per-document: YYYY/MM/doc.xml\n const perDocMatch = /(\\d{4})\\/(\\d{2})\\/[^/]+\\.xml$/.exec(filePath);\n if (perDocMatch) {\n return `${perDocMatch[1]}-${perDocMatch[2]}-01`;\n }\n\n return \"\";\n}\n","/**\n * Federal Register API downloader.\n *\n * Downloads FR documents (XML + JSON metadata) from the FederalRegister.gov API.\n * The API provides per-document endpoints, rich JSON metadata, and requires no\n * authentication. Results are paginated (max 200/page) with a 10,000 result cap\n * per query — the downloader auto-chunks by month for large date ranges.\n *\n * API base: https://www.federalregister.gov/api/v1/\n */\n\nimport { createWriteStream } from \"node:fs\";\nimport { mkdir, stat, writeFile as fsWriteFile } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\nimport { pipeline } from \"node:stream/promises\";\nimport { Readable } from \"node:stream\";\nimport { buildFrDownloadXmlPath, buildFrDownloadJsonPath } from \"./fr-path.js\";\nimport type { FrDocumentJsonMeta } from \"./fr-frontmatter.js\";\nimport type { FrDocumentType } from \"./fr-elements.js\";\n\n/** Base URL for the FederalRegister.gov API */\nconst FR_API_BASE = \"https://www.federalregister.gov/api/v1\";\n\n/** Maximum results per page (API max) */\nconst PER_PAGE = 200;\n\n/** Default number of concurrent XML downloads */\nconst DEFAULT_CONCURRENCY = 10;\n\n/** Maximum retry attempts for transient errors */\nconst MAX_RETRIES = 2;\n\n/** Base delay between retries (ms) */\nconst RETRY_BASE_DELAY_MS = 2000;\n\n/** Fields to request from the API documents endpoint */\nconst API_FIELDS = [\n \"document_number\",\n \"type\",\n \"title\",\n \"publication_date\",\n \"citation\",\n \"volume\",\n \"start_page\",\n \"end_page\",\n \"agencies\",\n \"cfr_references\",\n \"docket_ids\",\n \"regulation_id_numbers\",\n \"effective_on\",\n \"comments_close_on\",\n \"action\",\n \"abstract\",\n \"significant\",\n \"topics\",\n \"full_text_xml_url\",\n];\n\n// ── Public types ──\n\n/** Options for downloading FR documents */\nexport interface FrDownloadOptions {\n /** Download directory (e.g., \"./downloads/fr\") */\n output: string;\n /** Start date (YYYY-MM-DD, inclusive) */\n from: string;\n /** End date (YYYY-MM-DD, inclusive). Defaults to today. */\n to?: string | undefined;\n /** Document types to download. All types if omitted. */\n types?: FrDocumentType[] | undefined;\n /** Maximum number of documents to download (for testing) */\n limit?: number | undefined;\n /** Number of concurrent XML downloads (default 10) */\n concurrency?: number | undefined;\n /** Progress callback */\n onProgress?: ((progress: FrDownloadProgress) => void) | undefined;\n}\n\n/** Progress info for download callback */\nexport interface FrDownloadProgress {\n /** Documents downloaded so far */\n documentsDownloaded: number;\n /** Total documents found across all pages */\n totalDocuments: number;\n /** Current document number being downloaded */\n currentDocument: string;\n /** Current date chunk being processed (YYYY-MM) */\n currentChunk: string;\n}\n\n/** A successfully downloaded FR document */\nexport interface FrDownloadedFile {\n /** Absolute path to the XML file */\n xmlPath: string;\n /** Absolute path to the JSON metadata file */\n jsonPath: string;\n /** Document number */\n documentNumber: string;\n /** Publication date */\n publicationDate: string;\n /** Combined size in bytes (XML + JSON) */\n size: number;\n}\n\n/** A failed download */\nexport interface FrDownloadFailure {\n /** Document number */\n documentNumber: string;\n /** Error message */\n error: string;\n}\n\n/** Result of a download operation */\nexport interface FrDownloadResult {\n /** Number of documents downloaded */\n documentsDownloaded: number;\n /** Paths of downloaded files */\n files: FrDownloadedFile[];\n /** Total bytes downloaded */\n totalBytes: number;\n /** Date range covered */\n dateRange: { from: string; to: string };\n /** Documents without XML (pre-2000) */\n skipped: number;\n /** Documents that failed to download */\n failed: FrDownloadFailure[];\n}\n\n/** API listing response */\ninterface FrApiListResponse {\n count: number;\n total_pages: number;\n next_page_url?: string | null;\n /** Can be absent on weekends/holidays when count is 0 */\n results?: FrDocumentJsonMeta[];\n}\n\n// ── Public functions ──\n\n/**\n * Build the API documents listing URL for a date range.\n */\nexport function buildFrApiListUrl(\n from: string,\n to: string,\n page: number,\n types?: FrDocumentType[],\n): string {\n const params = new URLSearchParams();\n params.set(\"conditions[publication_date][gte]\", from);\n params.set(\"conditions[publication_date][lte]\", to);\n params.set(\"per_page\", String(PER_PAGE));\n params.set(\"page\", String(page));\n params.set(\"order\", \"oldest\");\n\n for (const field of API_FIELDS) {\n params.append(\"fields[]\", field);\n }\n\n if (types && types.length > 0) {\n for (const t of types) {\n params.append(\"conditions[type][]\", t);\n }\n }\n\n return `${FR_API_BASE}/documents.json?${params.toString()}`;\n}\n\n/**\n * Download FR documents for a date range.\n *\n * Automatically chunks large date ranges into month-sized windows to stay\n * under the API's 10,000 result cap per query. Within each chunk, document\n * XML files are downloaded concurrently (default 10 at a time).\n */\nexport async function downloadFrDocuments(options: FrDownloadOptions): Promise<FrDownloadResult> {\n const to = options.to ?? new Date().toISOString().slice(0, 10);\n const concurrency = options.concurrency ?? DEFAULT_CONCURRENCY;\n\n const files: FrDownloadedFile[] = [];\n const failed: FrDownloadFailure[] = [];\n let totalBytes = 0;\n let skipped = 0;\n let totalDocumentsFound = 0;\n\n // Break date range into month-sized chunks\n const chunks = buildMonthChunks(options.from, to);\n\n for (const chunk of chunks) {\n if (options.limit !== undefined && files.length >= options.limit) break;\n\n // Phase 1: Collect all document metadata for this chunk (pagination is fast, JSON only)\n const chunkDocs: FrDocumentJsonMeta[] = [];\n let page = 1;\n let hasMore = true;\n\n while (hasMore) {\n const listUrl = buildFrApiListUrl(chunk.from, chunk.to, page, options.types);\n const response = await fetchWithRetry(listUrl);\n const data = (await response.json()) as FrApiListResponse;\n\n if (typeof data.count !== \"number\") {\n throw new Error(\n `Unexpected API response for ${listUrl}: missing or invalid 'count' field. ` +\n `The FederalRegister.gov API may have changed its response format.`,\n );\n }\n\n // Each chunk has its own count — accumulate on the first page of each chunk\n if (page === 1) {\n totalDocumentsFound += data.count;\n }\n\n const results = data.results ?? [];\n\n for (const doc of results) {\n if (!doc.full_text_xml_url) {\n skipped++;\n continue;\n }\n chunkDocs.push(doc);\n }\n\n hasMore = page < (data.total_pages ?? 0);\n page++;\n }\n\n // Apply limit to this chunk\n const remaining = options.limit !== undefined ? options.limit - files.length : chunkDocs.length;\n const docsToDownload = chunkDocs.slice(0, remaining);\n const chunkLabel = chunk.from.slice(0, 7);\n\n // Phase 2: Download XML files concurrently\n await downloadPool(docsToDownload, concurrency, options.output, (doc, result, error) => {\n if (result) {\n files.push(result);\n totalBytes += result.size;\n } else if (error) {\n failed.push({ documentNumber: doc.document_number, error });\n }\n options.onProgress?.({\n documentsDownloaded: files.length,\n totalDocuments: totalDocumentsFound,\n currentDocument: doc.document_number,\n currentChunk: chunkLabel,\n });\n });\n }\n\n return {\n documentsDownloaded: files.length,\n files,\n totalBytes,\n dateRange: { from: options.from, to },\n skipped,\n failed,\n };\n}\n\n/**\n * Download a single FR document by document number.\n *\n * Fetches both the JSON metadata and XML full text.\n */\nexport async function downloadSingleFrDocument(\n documentNumber: string,\n output: string,\n): Promise<FrDownloadedFile> {\n // Fetch JSON metadata first to get publication date and XML URL\n const metaUrl = `${FR_API_BASE}/documents/${documentNumber}.json?${new URLSearchParams(API_FIELDS.map((f) => [\"fields[]\", f])).toString()}`;\n const metaResponse = await fetchWithRetry(metaUrl);\n const doc = (await metaResponse.json()) as FrDocumentJsonMeta;\n\n if (!doc.document_number || !doc.publication_date) {\n throw new Error(\n `Invalid API response for document ${documentNumber}: missing document_number or publication_date`,\n );\n }\n\n return downloadSingleDocument(doc, output);\n}\n\n// ── Private helpers ──\n\n/**\n * Download multiple documents concurrently using a worker pool.\n * Workers pull from a shared index, so concurrency is bounded without batching.\n */\nasync function downloadPool(\n docs: FrDocumentJsonMeta[],\n concurrency: number,\n outputDir: string,\n onComplete: (doc: FrDocumentJsonMeta, result: FrDownloadedFile | null, error: string | null) => void,\n): Promise<void> {\n let nextIndex = 0;\n\n async function worker(): Promise<void> {\n while (nextIndex < docs.length) {\n const i = nextIndex++;\n const doc = docs[i];\n if (!doc) break;\n try {\n const result = await downloadSingleDocument(doc, outputDir);\n onComplete(doc, result, null);\n } catch (err) {\n onComplete(doc, null, err instanceof Error ? err.message : String(err));\n }\n }\n }\n\n const workerCount = Math.min(concurrency, docs.length);\n await Promise.all(Array.from({ length: workerCount }, () => worker()));\n}\n\nasync function downloadSingleDocument(\n doc: FrDocumentJsonMeta,\n outputDir: string,\n): Promise<FrDownloadedFile> {\n if (!doc.document_number || !doc.publication_date) {\n throw new Error(\n `Invalid document in API response: missing document_number or publication_date`,\n );\n }\n if (!doc.full_text_xml_url) {\n throw new Error(\n `Document ${doc.document_number} has no full_text_xml_url — cannot download XML`,\n );\n }\n\n const xmlPath = buildFrDownloadXmlPath(doc.document_number, doc.publication_date, outputDir);\n const jsonPath = buildFrDownloadJsonPath(doc.document_number, doc.publication_date, outputDir);\n\n // Ensure directory exists\n await mkdir(dirname(xmlPath), { recursive: true });\n\n // Write JSON metadata\n const jsonContent = JSON.stringify(doc, null, 2);\n await fsWriteFile(jsonPath, jsonContent, \"utf-8\");\n\n // Fetch and write XML\n const xmlResponse = await fetchWithRetry(doc.full_text_xml_url);\n if (!xmlResponse.body) {\n throw new Error(`No response body for ${doc.document_number} XML`);\n }\n\n const dest = createWriteStream(xmlPath);\n try {\n await pipeline(Readable.fromWeb(xmlResponse.body as never), dest);\n } catch (err) {\n throw new Error(\n `Failed to write XML for document ${doc.document_number} from ${doc.full_text_xml_url}: ` +\n `${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n // Get file sizes\n const xmlStat = await stat(xmlPath);\n const jsonSize = Buffer.byteLength(jsonContent, \"utf-8\");\n\n return {\n xmlPath,\n jsonPath,\n documentNumber: doc.document_number,\n publicationDate: doc.publication_date,\n size: Number(xmlStat.size) + jsonSize,\n };\n}\n\n/**\n * Break a date range into month-sized chunks.\n * Each chunk covers one calendar month (or partial month at boundaries).\n */\nfunction buildMonthChunks(from: string, to: string): Array<{ from: string; to: string }> {\n const chunks: Array<{ from: string; to: string }> = [];\n\n let current = new Date(from + \"T00:00:00Z\");\n const end = new Date(to + \"T00:00:00Z\");\n\n while (current <= end) {\n const chunkStart = current.toISOString().slice(0, 10);\n\n // End of this month\n const monthEnd = new Date(\n Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 0),\n );\n const chunkEnd = monthEnd <= end ? monthEnd.toISOString().slice(0, 10) : to;\n\n chunks.push({ from: chunkStart, to: chunkEnd });\n\n // Move to first day of next month\n current = new Date(\n Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 1),\n );\n }\n\n return chunks;\n}\n\n/** Fetch with retry on transient HTTP and network errors */\nasync function fetchWithRetry(url: string, attempt = 0): Promise<Response> {\n let response: Response;\n try {\n response = await fetch(url);\n } catch (err) {\n // Network-level error (DNS, TLS, connection reset) — retry\n if (attempt < MAX_RETRIES) {\n const delay = RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n console.warn(\n `Network error for ${url}: ${err instanceof Error ? err.message : String(err)}. ` +\n `Retrying in ${delay}ms (attempt ${attempt + 1}/${MAX_RETRIES})...`,\n );\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n throw new Error(\n `Network error after ${MAX_RETRIES + 1} attempts for ${url}: ${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n if (response.ok) return response;\n\n // Retry on transient HTTP errors\n if ((response.status === 429 || response.status === 503 || response.status === 504) && attempt < MAX_RETRIES) {\n const retryAfter = response.headers.get(\"Retry-After\");\n const parsedRetry = retryAfter ? parseInt(retryAfter, 10) : NaN;\n const delay = !isNaN(parsedRetry) && parsedRetry > 0\n ? parsedRetry * 1000\n : RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n console.warn(\n `HTTP ${response.status} for ${url}. Retrying in ${delay}ms (attempt ${attempt + 1}/${MAX_RETRIES})...`,\n );\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n\n throw new Error(`HTTP ${response.status}: ${response.statusText} for ${url}`);\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n","/**\n * Federal Register govinfo bulk downloader.\n *\n * Downloads complete daily-issue XML files from govinfo.gov. Each file contains\n * all FR documents published on a single day (~150 documents, ~2.4 MB average).\n * This is dramatically faster than the per-document API for historical backfill.\n *\n * URL pattern: https://www.govinfo.gov/content/pkg/FR-{YYYY-MM-DD}/xml/FR-{YYYY-MM-DD}.xml\n *\n * The existing FrASTBuilder handles daily-issue XML natively: FEDREG root is a\n * passthrough, section containers (RULES, NOTICES, etc.) are passthroughs, and\n * individual document elements emit via onEmit. No splitter needed.\n */\n\nimport { createWriteStream } from \"node:fs\";\nimport { mkdir, stat } from \"node:fs/promises\";\nimport { dirname, join } from \"node:path\";\nimport { pipeline } from \"node:stream/promises\";\nimport { Readable } from \"node:stream\";\n\n/** Base URL for govinfo FR bulk data */\nconst GOVINFO_BASE = \"https://www.govinfo.gov/content/pkg\";\n\n/** Default number of concurrent downloads */\nconst DEFAULT_CONCURRENCY = 10;\n\n/** Maximum retry attempts for transient errors */\nconst MAX_RETRIES = 2;\n\n/** Base delay between retries (ms) */\nconst RETRY_BASE_DELAY_MS = 2000;\n\n// ── Public types ──\n\n/** Options for downloading FR bulk XML from govinfo */\nexport interface FrGovinfoBulkOptions {\n /** Download directory (e.g., \"./downloads/fr\") */\n output: string;\n /** Start date (YYYY-MM-DD, inclusive) */\n from: string;\n /** End date (YYYY-MM-DD, inclusive). Defaults to today. */\n to?: string | undefined;\n /** Number of concurrent downloads (default 10) */\n concurrency?: number | undefined;\n /** Progress callback */\n onProgress?: ((progress: FrGovinfoProgress) => void) | undefined;\n}\n\n/** Progress info for govinfo download callback */\nexport interface FrGovinfoProgress {\n /** Files downloaded so far */\n downloaded: number;\n /** Total publishing days in date range */\n totalDays: number;\n /** Skipped days (weekends/holidays — 404) */\n skipped: number;\n /** Failed downloads */\n failed: number;\n /** Current date being downloaded */\n currentDate: string;\n}\n\n/** A successfully downloaded bulk file */\nexport interface FrGovinfoDownloadedFile {\n /** Absolute path to the downloaded XML file */\n path: string;\n /** Publication date (YYYY-MM-DD) */\n date: string;\n /** File size in bytes */\n size: number;\n}\n\n/** Result of a govinfo bulk download */\nexport interface FrGovinfoResult {\n /** Number of daily files downloaded */\n filesDownloaded: number;\n /** Downloaded files */\n files: FrGovinfoDownloadedFile[];\n /** Total bytes downloaded */\n totalBytes: number;\n /** Date range covered */\n dateRange: { from: string; to: string };\n /** Days skipped (no issue published — weekends/holidays) */\n skipped: number;\n /** Days that failed to download */\n failed: number;\n}\n\n// ── Public functions ──\n\n/**\n * Build the govinfo download URL for a single day's FR issue.\n */\nexport function buildGovinfoFrUrl(date: string): string {\n return `${GOVINFO_BASE}/FR-${date}/xml/FR-${date}.xml`;\n}\n\n/**\n * Build the local file path for a downloaded daily-issue XML.\n * Stored as: {output}/bulk/{YYYY}/FR-{YYYY-MM-DD}.xml\n */\nexport function buildGovinfoBulkPath(date: string, outputDir: string): string {\n const year = date.slice(0, 4);\n return join(outputDir, \"bulk\", year, `FR-${date}.xml`);\n}\n\n/**\n * Download FR daily-issue XML files from govinfo for a date range.\n * Skips weekends/holidays (404 responses) and retries transient errors.\n */\nexport async function downloadFrBulk(options: FrGovinfoBulkOptions): Promise<FrGovinfoResult> {\n const to = options.to ?? new Date().toISOString().slice(0, 10);\n const concurrency = options.concurrency ?? DEFAULT_CONCURRENCY;\n\n // Generate all dates in range\n const dates = generateDateRange(options.from, to);\n\n const files: FrGovinfoDownloadedFile[] = [];\n let totalBytes = 0;\n let skipped = 0;\n let failed = 0;\n\n // Download concurrently using a worker pool\n let nextIndex = 0;\n\n async function worker(): Promise<void> {\n while (nextIndex < dates.length) {\n const i = nextIndex++;\n const date = dates[i];\n if (!date) break;\n\n options.onProgress?.({\n downloaded: files.length,\n totalDays: dates.length,\n skipped,\n failed,\n currentDate: date,\n });\n\n const url = buildGovinfoFrUrl(date);\n const filePath = buildGovinfoBulkPath(date, options.output);\n\n try {\n const result = await downloadSingleDay(url, filePath, date);\n if (result) {\n files.push(result);\n totalBytes += result.size;\n } else {\n // null means 404 — no issue published on this date\n skipped++;\n }\n } catch (err) {\n console.warn(`Warning: Failed to download ${date}: ${err instanceof Error ? err.message : String(err)}`);\n failed++;\n }\n }\n }\n\n const workerCount = Math.min(concurrency, dates.length);\n await Promise.all(Array.from({ length: workerCount }, () => worker()));\n\n // Final progress update\n options.onProgress?.({\n downloaded: files.length,\n totalDays: dates.length,\n skipped,\n failed,\n currentDate: \"done\",\n });\n\n return {\n filesDownloaded: files.length,\n files,\n totalBytes,\n dateRange: { from: options.from, to },\n skipped,\n failed,\n };\n}\n\n// ── Private helpers ──\n\n/**\n * Download a single day's FR issue XML. Returns null if 404 (no issue).\n */\nasync function downloadSingleDay(\n url: string,\n filePath: string,\n date: string,\n): Promise<FrGovinfoDownloadedFile | null> {\n const response = await fetchWithRetry(url);\n\n if (response.status === 404) {\n return null; // No issue published on this date (weekend/holiday)\n }\n\n if (!response.ok) {\n throw new Error(`HTTP ${response.status} for ${url}`);\n }\n\n if (!response.body) {\n throw new Error(`No response body for ${url}`);\n }\n\n await mkdir(dirname(filePath), { recursive: true });\n\n const dest = createWriteStream(filePath);\n await pipeline(Readable.fromWeb(response.body as never), dest);\n\n const fileStat = await stat(filePath);\n\n return {\n path: filePath,\n date,\n size: Number(fileStat.size),\n };\n}\n\n/**\n * Generate all dates (YYYY-MM-DD) in a range, inclusive.\n */\nfunction generateDateRange(from: string, to: string): string[] {\n const dates: string[] = [];\n const current = new Date(from + \"T12:00:00Z\"); // Noon UTC to avoid DST issues\n const end = new Date(to + \"T12:00:00Z\");\n\n while (current <= end) {\n dates.push(current.toISOString().slice(0, 10));\n current.setUTCDate(current.getUTCDate() + 1);\n }\n\n return dates;\n}\n\n/** Fetch with retry on transient HTTP and network errors */\nasync function fetchWithRetry(url: string, attempt = 0): Promise<Response> {\n let response: Response;\n try {\n response = await fetch(url);\n } catch (err) {\n if (attempt < MAX_RETRIES) {\n const delay = RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n throw new Error(\n `Network error after ${MAX_RETRIES + 1} attempts for ${url}: ${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n if (response.ok || response.status === 404) return response;\n\n if (\n (response.status === 429 || response.status === 503 || response.status === 504) &&\n attempt < MAX_RETRIES\n ) {\n const retryAfter = response.headers.get(\"Retry-After\");\n const parsedRetry = retryAfter ? parseInt(retryAfter, 10) : NaN;\n const delay =\n !isNaN(parsedRetry) && parsedRetry > 0\n ? parsedRetry * 1000\n : RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n\n throw new Error(`HTTP ${response.status}: ${response.statusText} for ${url}`);\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n"],"mappings":";AAiBO,IAAM,wBAAwB,CAAC,QAAQ,WAAW,UAAU,UAAU;AAMtE,IAAM,uBAAuB,IAAI,IAAY,qBAAqB;AAGlE,IAAM,wBAAwB,oBAAI,IAAI;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAGM,IAAM,uBAAyD;AAAA,EACpE,MAAM;AAAA,EACN,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AACZ;AAKO,IAAM,uBAAuB,oBAAI,IAAI;AAAA,EAC1C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,4BAA4B,oBAAI,IAAI;AAAA,EAC/C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,sBAAsB,oBAAI,IAAI;AAAA,EACzC;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,qBAAqB;AAM3B,IAAM,wBAA0D;AAAA,EACrE,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AACP;AAKO,IAAM,qBAAqB,oBAAI,IAAI;AAAA,EACxC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAMM,IAAM,kBAAwD;AAAA,EACnE,MAAM;AAAA,EACN,MAAM;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,QAAQ;AAAA;AACV;AAKO,IAAM,sBAAsB,oBAAI,IAAI;AAAA,EACzC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,oBAAoB;AAK1B,IAAM,wBAAwB,oBAAI,IAAI;AAAA,EAC3C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,2BAA2B,oBAAI,IAAI;AAAA,EAC9C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,gCAAgC,oBAAI,IAAI;AAAA,EACnD;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB,oBAAI,IAAI;AAAA,EACtC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,mBAAmB;AAKzB,IAAM,oBAAoB,oBAAI,IAAI;AAAA,EACvC;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,oBAAoB,oBAAI,IAAI;AAAA,EACvC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,qBAAqB,oBAAI,IAAI;AAAA,EACxC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB,oBAAI,IAAI;AAAA,EACtC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,0BAA0B,oBAAI,IAAI;AAAA,EAC7C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB;AAGzB,IAAM,oBAAoB;;;ACrH1B,IAAM,eAAN,MAAmB;AAAA,EACP;AAAA,EACA,QAAsB,CAAC;AAAA;AAAA,EAEhC,wBAAwB;AAAA;AAAA,EAExB,iBAAoC;AAAA,IAC1C,cAAc;AAAA,IACd,wBAAwB;AAAA,EAC1B;AAAA;AAAA,EAEiB,gBAAqC,CAAC;AAAA,EAEvD,YAAY,SAA8B;AACxC,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA,EAGA,mBAAiD;AAC/C,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,cAAc,MAAc,OAAyB;AAEnD,QAAI,KAAK,wBAAwB,GAAG;AAClC,WAAK;AACL;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,GAAG;AAChC,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,QAAI,wBAAwB,IAAI,IAAI,GAAG;AACrC;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,yBAAyB,IAAI,IAAI,GAAG;AACtC;AAAA,IACF;AAGA,QAAI,8BAA8B,IAAI,IAAI,GAAG;AAE3C,UAAI,SAAS,UAAU,SAAS,SAAS;AACvC,aAAK,YAAY,IAAI;AACrB;AAAA,MACF;AAEA,WAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AACrE;AAAA,IACF;AAGA,QAAI,0BAA0B,IAAI,IAAI,GAAG;AACvC,WAAK,MAAM,KAAK,EAAE,MAAM,gBAAgB,aAAa,MAAM,YAAY,GAAG,CAAC;AAC3E;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,MAAM,KAAK,EAAE,MAAM,mBAAmB,aAAa,MAAM,YAAY,GAAG,CAAC;AAC9E;AAAA,IACF;AAGA,QAAI,SAAS,oBAAoB;AAC/B,WAAK,YAAY,MAAM,KAAK;AAC5B;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,YAAY,IAAI;AACrB;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,GAAG;AAChC,WAAK,WAAW,MAAM,KAAK;AAC3B;AAAA,IACF;AAIA,QAAI,SAAS,kBAAkB;AAC7B,YAAM,cAAc,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AACpD,UAAI,aAAa,SAAS,aAAa,YAAY,MAAM,SAAS,WAAW;AAC3E,cAAM,cAAc,YAAY;AAEhC,iBAAS,IAAI,YAAY,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;AACzD,gBAAM,QAAQ,YAAY,SAAS,CAAC;AACpC,cAAI,OAAO,SAAS,YAAa,MAAqB,eAAe,OAAO;AAC1E,YAAC,MAAqB,aAAa;AACnC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,YAAY,MAAM,KAAK;AAC5B;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC,WAAK,cAAc,IAAI;AACvB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,iBAAiB,MAAM,KAAK;AACjC;AAAA,IACF;AAGA,QAAI,SAAS,kBAAkB;AAC7B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,SAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACvE;AAAA;AAAA,EAGA,eAAe,MAAoB;AAEjC,QAAI,KAAK,wBAAwB,GAAG;AAClC,WAAK;AACL;AAAA,IACF;AAGA,QAAI,wBAAwB,IAAI,IAAI,KAAK,sBAAsB,IAAI,IAAI,GAAG;AACxE;AAAA,IACF;AAGA,QAAI,yBAAyB,IAAI,IAAI,GAAG;AACtC;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,cAAc,IAAI;AACvB;AAAA,IACF;AAGA,QAAI,0BAA0B,IAAI,IAAI,GAAG;AACvC,WAAK,kBAAkB,IAAI;AAC3B;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,SAAS,oBAAoB;AAC/B,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,SAAS,UAAU,SAAS,SAAS;AACvC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,KAAK,SAAS,kBAAkB;AAC7D,WAAK,YAAY,IAAI;AACrB;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,UAAU,IAAI;AACnB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC,WAAK,eAAe,IAAI;AACxB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,kBAAkB,IAAI;AAC3B;AAAA,IACF;AAGA,QAAI,SAAS,kBAAkB;AAC7B,WAAK,WAAW;AAChB;AAAA,IACF;AAGA,QAAI,KAAK,MAAM,SAAS,KAAK,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC,GAAG,gBAAgB,MAAM;AACpF,WAAK,MAAM,IAAI;AAAA,IACjB;AAAA,EACF;AAAA;AAAA,EAGA,OAAO,MAAoB;AACzB,QAAI,KAAK,wBAAwB,EAAG;AAEpC,UAAM,QAAQ,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AAC9C,QAAI,CAAC,MAAO;AAGZ,QACE,MAAM,SAAS,aACf,MAAM,SAAS,kBACf,MAAM,SAAS,oBACf,MAAM,SAAS,eACf,MAAM,SAAS,iBACf,MAAM,SAAS,SACf;AACA,YAAM,cAAc;AACpB;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,aAAa,MAAM,MAAM,SAAS,WAAW;AAC9D,YAAM,cAAc,MAAM;AAE1B,YAAM,aAAa,KAAK,QAAQ,QAAQ,GAAG;AAC3C,UAAI,cAAc,eAAe,KAAK;AACpC,oBAAY,SAAS,KAAK;AAAA,UACxB,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AACA;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,YAAY,MAAM,MAAM,SAAS,UAAU;AAC5D,YAAM,aAAa,MAAM;AACzB,YAAM,aAAa,KAAK,QAAQ,QAAQ,GAAG;AAC3C,UAAI,WAAW,UAAU;AACvB,YAAI,cAAc,eAAe,KAAK;AACpC,qBAAW,SAAS,KAAK;AAAA,YACvB,MAAM;AAAA,YACN,YAAY;AAAA,YACZ,MAAM;AAAA,UACR,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AACL,mBAAW,QAAQ,WAAW,QAAQ,MAAM;AAAA,MAC9C;AACA;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,UAAU,MAAM,MAAM,SAAS,QAAQ;AACxD,YAAM,cAAc;AACpB;AAAA,IACF;AAAA,EAGF;AAAA;AAAA,EAIQ,aAAa,aAA2B;AAC9C,SAAK,iBAAiB;AAAA,MACpB,cAAc;AAAA,MACd,wBAAwB,qBAAqB,WAAW,KAAK,YAAY,YAAY;AAAA,IACvF;AAEA,UAAM,OAAkB;AAAA,MACtB,MAAM;AAAA,MACN,WAAW;AAAA,MACX,UAAU,CAAC;AAAA,MACX,eAAe;AAAA,IACjB;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,YAAY,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACzE;AAAA,EAEQ,cAAc,aAA2B;AAC/C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,cAAc,CAAC,MAAM,KAAM;AAExD,UAAM,YAAY,MAAM;AAGxB,QAAI,KAAK,eAAe,SAAS;AAC/B,gBAAU,UAAU,KAAK,eAAe;AAAA,IAC1C;AAGA,QAAI,KAAK,eAAe,gBAAgB;AACtC,gBAAU,aAAa,UAAU,KAAK,eAAe,cAAc;AACnE,gBAAU,WAAW,KAAK,eAAe;AAAA,IAC3C;AAGA,UAAM,YAA4B,CAAC;AACnC,eAAW,KAAK,KAAK,OAAO;AAC1B,UAAI,EAAE,SAAS,cAAc,EAAE,MAAM,SAAS,SAAS;AACrD,cAAM,KAAK,EAAE;AACb,kBAAU,KAAK;AAAA,UACb,WAAW,GAAG;AAAA,UACd,UAAU,GAAG;AAAA,UACb,SAAS,GAAG;AAAA,UACZ,YAAY,GAAG;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM,UAAuB;AAAA,MAC3B;AAAA,MACA,cAAc;AAAA,QACZ,SAAS,KAAK,eAAe;AAAA,QAC7B,QAAQ,KAAK,eAAe;AAAA,MAC9B;AAAA,IACF;AAGA,SAAK,cAAc,KAAK,EAAE,GAAG,KAAK,eAAe,CAAC;AAElD,SAAK,QAAQ,OAAO,WAAW,OAAO;AAAA,EACxC;AAAA;AAAA,EAIQ,kBAAkB,aAA2B;AACnD,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,eAAgB;AAE7C,UAAM,OAAO,MAAM,WAAW,KAAK;AACnC,QAAI,CAAC,KAAM;AAEX,YAAQ,aAAa;AAAA,MACnB,KAAK;AACH,aAAK,eAAe,SAAS;AAC7B;AAAA,MACF,KAAK;AACH,aAAK,eAAe,YAAY;AAChC;AAAA,MACF,KAAK;AACH,aAAK,eAAe,cAAc;AAClC;AAAA,MACF,KAAK;AACH,aAAK,eAAe,UAAU;AAC9B;AAAA,MACF,KAAK;AACH,aAAK,eAAe,MAAM,KAAK,QAAQ,YAAY,EAAE,EAAE,KAAK;AAC5D;AAAA,MACF,KAAK;AAEH;AAAA,IACJ;AAAA,EACF;AAAA;AAAA,EAIQ,YAAY,cAAsB,OAAyB;AACjE,UAAM,SAAS,MAAM,QAAQ,KAAK;AAClC,UAAM,QAAQ,sBAAsB,MAAM,KAAK;AAE/C,SAAK,MAAM,KAAK;AAAA,MACd,MAAM;AAAA,MACN,aAAa;AAAA,MACb,YAAY;AAAA,MACZ,aAAa;AAAA,IACf,CAAC;AAAA,EACH;AAAA,EAEQ,aAAa,aAA2B;AAC9C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,UAAW;AAExC,UAAM,cAAc,MAAM,WAAW,KAAK;AAC1C,QAAI,CAAC,YAAa;AAIlB,UAAM,cAAc,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AAEpD,QAAI,aAAa,SAAS,mBAAmB;AAE3C,YAAMA,eAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ,MAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,WAAK,cAAcA,YAAW;AAC9B;AAAA,IACF;AAIA,UAAM,cAA2B;AAAA,MAC/B,MAAM;AAAA,MACN,SAAS;AAAA,MACT,UAAU;AAAA,QACR;AAAA,UACE,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,SAAK,cAAc,WAAW;AAAA,EAChC;AAAA;AAAA,EAIQ,YAAY,aAA2B;AAC7C,UAAM,OAAoB;AAAA,MACxB,MAAM;AAAA,MACN,SAAS;AAAA,MACT,UAAU,CAAC;AAAA,IACb;AACA,SAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACxE;AAAA,EAEQ,aAAa,aAA2B;AAC9C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,cAAc,MAAM;AAG1B,QAAI,YAAY,SAAS,WAAW,EAAG;AAGvC,UAAM,SAAS,KAAK,mBAAmB,KAAK,KAAK,eAAe;AAChE,QAAI,QAAQ,MAAM;AAChB,UAAI,OAAO,KAAK,SAAS,SAAS;AAChC,QAAC,OAAO,KAAmB,SAAS,KAAK,WAAW;AAAA,MACtD,WAAW,OAAO,KAAK,SAAS,QAAQ;AACtC,QAAC,OAAO,KAAkB,SAAS,KAAK,WAAW;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,WAAW,aAAqB,OAAyB;AAC/D,QAAI,aAAyB;AAE7B,QAAI,gBAAgB,KAAK;AACvB,mBAAa;AAAA,IACf,WAAW,gBAAgB,KAAK;AAC9B,mBAAa;AAAA,IACf,WAAW,gBAAgB,MAAM;AAG/B,YAAM,iBAAiB,KAAK,UAAU,MAAM,MAAM;AAClD,mBAAa,iBAAiB,gBAAgB;AAAA,IAChD,WAAW,gBAAgB,MAAM;AAC/B,mBAAa;AAAA,IACf,WAAW,gBAAgB,KAAK;AAC9B,YAAM,SAAS,MAAM,GAAG,KAAK;AAC7B,mBAAa,gBAAgB,MAAM,KAAK;AAAA,IAC1C;AAEA,UAAM,OAAmB;AAAA,MACvB,MAAM;AAAA,MACN;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACvE;AAAA,EAEQ,YAAY,aAA2B;AAC7C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,aAAa,MAAM;AAGzB,QAAI,WAAW,eAAe,iBAAiB,MAAM,YAAY;AAC/D,iBAAW,OAAO,MAAM,WAAW,KAAK;AAAA,IAC1C;AAGA,UAAM,cAAc,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AACpD,QAAI,CAAC,YAAa;AAElB,QAAI,YAAY,SAAS,aAAa,YAAY,MAAM,SAAS,WAAW;AAC1E,MAAC,YAAY,KAAqB,SAAS,KAAK,UAAU;AAAA,IAC5D,WAAW,YAAY,SAAS,YAAY,YAAY,MAAM,SAAS,UAAU;AAC/E,YAAM,eAAe,YAAY;AACjC,UAAI,aAAa,UAAU;AACzB,qBAAa,SAAS,KAAK,UAAU;AAAA,MACvC;AAAA,IACF,WAAW,YAAY,SAAS,aAAa,YAAY,SAAS,gBAAgB;AAEhF,UAAI,WAAW,MAAM;AACnB,oBAAY,cAAc,WAAW;AAAA,MACvC,WAAW,WAAW,UAAU;AAC9B,mBAAW,SAAS,WAAW,UAAU;AACvC,cAAI,MAAM,KAAM,aAAY,cAAc,MAAM;AAAA,QAClD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,SAAS,aAA2B;AAC1C,UAAM,cAAsC;AAAA,MAC1C,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS;AAAA,IACX;AAEA,UAAM,WAAW,YAAY,WAAW,KAAK,YAAY,YAAY;AACrE,UAAM,OAAiB;AAAA,MACrB,MAAM;AAAA,MACN;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,QAAQ,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACrE;AAAA,EAEQ,UAAU,aAA2B;AAC3C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,WAAW,MAAM;AAGvB,QAAI,MAAM,WAAW,KAAK,KAAK,SAAS,SAAS,WAAW,GAAG;AAC7D,YAAM,cAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ,MAAM,MAAM,WAAW,KAAK;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AACA,eAAS,SAAS,KAAK,WAAW;AAAA,IACpC;AAGA,UAAM,YAAY,KAAK,mBAAmB;AAC1C,QAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,MAAC,UAAU,KAAmB,SAAS,KAAK,QAAQ;AAAA,IACtD;AAAA,EACF;AAAA;AAAA,EAIQ,YAAY,aAAqB,OAAyB;AAChE,QAAI,gBAAgB,WAAW;AAE7B,YAAM,QAAQ,MAAM,OAAO,KAAK;AAChC,YAAM,OAAO,MAAM,MAAM,KAAK;AAC9B,YAAM,QAAQ,SAAS,OAAO,GAAG,KAAK,aAAa,IAAI,KAAK;AAG5D,UAAI,OAAO;AACT,cAAM,YAAyB;AAAA,UAC7B,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,YACR;AAAA,cACE,MAAM;AAAA,cACN,YAAY;AAAA,cACZ,MAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AACA,aAAK,cAAc,SAAS;AAAA,MAC9B;AAEA,WAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,YAAY,GAAG,CAAC;AAChE;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,YAAY,WAAW;AAC5B;AAAA,IACF;AAEA,QAAI,gBAAgB,WAAW;AAE7B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,YAAY,GAAG,CAAC;AAC9D;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,YAAY,WAAW;AAC5B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,YAAY,GAAG,CAAC;AAC9D;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,aAAa,aAA2B;AAC9C,QAAI,gBAAgB,WAAW;AAC7B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,YAAY,gBAAgB,UAAU;AACxD,WAAK,aAAa,WAAW;AAC7B;AAAA,IACF;AAEA,QAAI,gBAAgB,aAAa,gBAAgB,QAAQ;AACvD,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAC1B,WAAK,UAAU,WAAW;AAC1B;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,cAAc,aAA2B;AAC/C,QAAI,gBAAgB,OAAO;AAEzB,YAAM,OAAiB;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,QACV,UAAU,CAAC;AAAA,MACb;AACA,WAAK,MAAM,KAAK,EAAE,MAAM,aAAa,aAAa,MAAM,YAAY,GAAG,CAAC;AACxE;AAAA,IACF;AAGA,SAAK,MAAM,KAAK,EAAE,MAAM,kBAAkB,aAAa,YAAY,GAAG,CAAC;AAAA,EACzE;AAAA,EAEQ,eAAe,aAA2B;AAChD,QAAI,gBAAgB,OAAO;AACzB,YAAMC,SAAQ,KAAK,SAAS,WAAW;AACvC,UAAI,CAACA,UAAS,CAACA,OAAM,KAAM;AAE3B,YAAM,UAAUA,OAAM;AAGtB,YAAM,YAAY,KAAK,mBAAmB;AAC1C,UAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,QAAC,UAAU,KAAmB,SAAS,KAAK,OAAO;AAAA,MACrD;AACA;AAAA,IACF;AAGA,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,iBAAkB;AAE/C,UAAM,OAAO,MAAM,WAAW,KAAK;AACnC,QAAI,CAAC,KAAM;AAGX,UAAM,WAAW,KAAK,UAAU,WAAW;AAC3C,QAAI,UAAU,QAAQ,SAAS,KAAK,SAAS,QAAQ;AACnD,YAAM,cAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,MAAC,SAAS,KAAkB,SAAS,KAAK,WAAW;AAAA,IACvD;AAAA,EACF;AAAA;AAAA,EAIQ,iBAAiB,aAAqB,QAA0B;AACtE,QAAI,gBAAgB,YAAY;AAC9B,WAAK,MAAM,KAAK;AAAA,QACd,MAAM;AAAA,QACN;AAAA,QACA,YAAY;AAAA,QACZ,SAAS,CAAC;AAAA,QACV,MAAM,CAAC;AAAA,QACP,YAAY,CAAC;AAAA,MACf,CAAC;AACD;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,YAAY,GAAG,CAAC;AAChE;AAAA,IACF;AAEA,QAAI,gBAAgB,SAAS;AAE3B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,MAAM,KAAK,EAAE,MAAM,eAAe,aAAa,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,YAAM,aAAa,KAAK,eAAe;AACvC,UAAI,YAAY;AACd,mBAAW,aAAa,CAAC;AAAA,MAC3B;AACA,WAAK,MAAM,KAAK,EAAE,MAAM,YAAY,aAAa,YAAY,GAAG,CAAC;AACjE;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AAEzB,WAAK,MAAM,KAAK,EAAE,MAAM,aAAa,aAAa,YAAY,GAAG,CAAC;AAClE;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,kBAAkB,aAA2B;AACnD,QAAI,gBAAgB,YAAY;AAC9B,WAAK,cAAc;AACnB;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,SAAS;AAE3B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAC1B,WAAK,iBAAiB;AACtB;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,WAAK,cAAc;AACnB;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,WAAK,eAAe;AACpB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,gBAAsB;AAC5B,UAAM,QAAQ,KAAK,SAAS,UAAU;AACtC,QAAI,CAAC,SAAS,MAAM,SAAS,QAAS;AAEtC,UAAM,YAAuB;AAAA,MAC3B,MAAM;AAAA,MACN,SAAS;AAAA;AAAA,MACT,SAAS,MAAM,WAAW,CAAC;AAAA,MAC3B,MAAM,MAAM,QAAQ,CAAC;AAAA,IACvB;AAGA,UAAM,YAAY,KAAK,mBAAmB;AAC1C,QAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,MAAC,UAAU,KAAmB,SAAS,KAAK,SAAS;AAAA,IACvD;AAAA,EACF;AAAA,EAEQ,mBAAyB;AAC/B,UAAM,cAAc,KAAK,SAAS,MAAM;AACxC,QAAI,CAAC,eAAe,YAAY,SAAS,cAAe;AAExD,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,CAAC,WAAY;AAEjB,UAAM,OAAO,YAAY,WAAW,KAAK;AAIzC,QAAI,CAAC,WAAW,WAAW,WAAW,QAAQ,WAAW,GAAG;AAC1D,iBAAW,UAAU,CAAC,CAAC,CAAC;AAAA,IAC1B;AACA,UAAM,YAAY,WAAW,QAAQ,CAAC;AACtC,QAAI,WAAW;AACb,gBAAU,KAAK,IAAI;AAAA,IACrB;AAAA,EACF;AAAA,EAEQ,gBAAsB;AAC5B,UAAM,WAAW,KAAK,SAAS,KAAK;AACpC,QAAI,CAAC,SAAU;AAEf,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,YAAY,YAAY;AAC1B,iBAAW,MAAM,KAAK,CAAC,GAAG,WAAW,UAAU,CAAC;AAChD,iBAAW,aAAa,CAAC;AAAA,IAC3B;AAAA,EACF;AAAA,EAEQ,iBAAuB;AAC7B,UAAM,YAAY,KAAK,MAAM,IAAI;AACjC,QAAI,CAAC,aAAa,UAAU,SAAS,YAAa;AAElD,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,YAAY,YAAY;AAC1B,iBAAW,WAAW,KAAK,UAAU,WAAW,KAAK,CAAC;AAAA,IACxD;AAAA,EACF;AAAA;AAAA,EAIQ,aAAmB;AACzB,UAAM,QAAQ,KAAK,SAAS,gBAAgB;AAC5C,QAAI,CAAC,SAAS,MAAM,SAAS,QAAS;AAEtC,UAAM,OAAO,MAAM,WAAW,KAAK;AAKnC,UAAM,WAAW,yBAAyB,KAAK,IAAI;AACnD,QAAI,UAAU;AACZ,WAAK,eAAe,iBAAiB,SAAS,CAAC;AAAA,IACjD;AAIA,UAAM,YAAY,wCAAwC,KAAK,IAAI;AACnE,QAAI,WAAW;AACb,YAAM,CAAC,EAAE,OAAO,OAAO,KAAK,IAAI;AAChC,YAAM,KAAK,SAAS,SAAS,KAAK,EAAE;AACpC,YAAM,KAAK,SAAS,SAAS,KAAK,EAAE;AACpC,YAAM,KAAK,SAAS,SAAS,KAAK,EAAE;AAEpC,YAAM,WAAW,KAAK,KAAK,MAAO,KAAK,OAAO;AAC9C,YAAM,QAAQ,IAAI,KAAK,UAAU,KAAK,GAAG,EAAE;AAE3C,UAAI,MAAM,SAAS,MAAM,KAAK,KAAK,MAAM,QAAQ,MAAM,IAAI;AACzD;AAAA,MACF;AAEA,YAAM,QAAQ,MAAM,QAAQ,IAAI,CAAC;AACjC,YAAM,UAAU,MAAM,YAAY;AAClC,YAAM,WAAW,OAAO,MAAM,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AAC7D,YAAM,SAAS,OAAO,MAAM,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AACtD,WAAK,eAAe,kBAAkB,GAAG,OAAO,IAAI,QAAQ,IAAI,MAAM;AAAA,IACxE;AAAA,EACF;AAAA;AAAA,EAIQ,cAAc,MAAqB;AACzC,UAAM,WAAW,KAAK,mBAAmB;AACzC,QAAI,UAAU,QAAQ,SAAS,KAAK,SAAS,SAAS;AACpD,MAAC,SAAS,KAAmB,SAAS,KAAK,IAAI;AAAA,IACjD;AAAA,EACF;AAAA,EAEQ,qBAA6C;AACnD,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,YAAY;AACtC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAyC;AAC/C,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,UAAU,KAAK,MAAM,CAAC,GAAG,SAAS,aAAa;AACzE,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAyC;AAC/C,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,SAAS;AACnC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU,MAAyC;AACzD,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,MAAM;AAChC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,SAAS,aAA6C;AAC5D,QAAI,KAAK,MAAM,WAAW,EAAG,QAAO;AAGpC,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,gBAAgB,aAAa;AAC9C,eAAO,KAAK,MAAM,OAAO,GAAG,CAAC,EAAE,CAAC;AAAA,MAClC;AAAA,IACF;AAGA,YAAQ;AAAA,MACN,yDAAyD,WAAW,kBACnD,KAAK,MAAM,IAAI,CAAC,MAAM,EAAE,WAAW,EAAE,KAAK,IAAI,CAAC;AAAA,IAClE;AACA,WAAO;AAAA,EACT;AACF;;;ACzjCA,SAAS,sBAAsB,SAAyB;AACtD,QAAM,MAA8B;AAAA,IAClC,MAAM;AAAA,IACN,iBAAiB;AAAA,IACjB,QAAQ;AAAA,IACR,yBAAyB;AAAA,EAC3B;AACA,SAAO,IAAI,OAAO,KAAK,QAAQ,YAAY,EAAE,QAAQ,QAAQ,GAAG;AAClE;AASO,SAAS,mBACd,MACA,UACA,SACA,UACiB;AACjB,QAAM,iBAAiB,UAAU,mBAAmB,QAAQ,kBAAkB;AAC9E,QAAM,UAAU,UAAU,SAAS,QAAQ,WAAW,KAAK,WAAW;AACtE,QAAM,kBAAkB,UAAU,oBAAoB,QAAQ,mBAAmB;AACjF,QAAM,eACJ,WAAW,sBAAsB,SAAS,IAAI,IAAI,QAAQ;AAG5D,MAAI;AACJ,MAAI,UAAU,YAAY,SAAS,SAAS,SAAS,GAAG;AACtD,eAAW,SAAS,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EAChD,WAAW,QAAQ,QAAQ;AACzB,eAAW,CAAC,QAAQ,MAAM;AAC1B,QAAI,QAAQ,WAAW;AACrB,eAAS,KAAK,QAAQ,SAAS;AAAA,IACjC;AAAA,EACF;AAGA,MAAI;AACJ,MAAI,UAAU,kBAAkB,SAAS,eAAe,SAAS,GAAG;AAClE,oBAAgB,SAAS,eAAe,IAAI,CAAC,MAAM,GAAG,EAAE,KAAK,aAAa,EAAE,IAAI,EAAE;AAAA,EACpF,WAAW,QAAQ,aAAa;AAC9B,oBAAgB,CAAC,QAAQ,WAAW;AAAA,EACtC;AAGA,MAAI;AACJ,MAAI,UAAU,cAAc,SAAS,WAAW,SAAS,GAAG;AAC1D,gBAAY,SAAS;AAAA,EACvB;AAGA,QAAM,gBACJ,YAAY,SAAS,SAAS,IAAI,SAAS,CAAC,IAAI;AAGlD,QAAM,aAAa,UAAU;AAG7B,QAAM,MAAM,UAAU,wBAAwB,CAAC,KAAK,QAAQ;AAE5D,QAAM,KAAsB;AAAA,IAC1B,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,YAAY,KAAK,cAAc,UAAU,cAAc;AAAA,IACvD,OAAO;AAAA,IACP,cAAc;AAAA;AAAA,IACd,YAAY;AAAA,IACZ,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,cAAc;AAAA,IACd,UAAU;AAAA,IACV,cAAc;AAAA;AAAA,IAGd,QAAQ;AAAA;AAAA,IAGR,iBAAiB,kBAAkB;AAAA,IACnC,eAAe,gBAAgB;AAAA,IAC/B,aAAa;AAAA,IACb,WAAW,UAAU;AAAA,IACrB,kBAAkB,mBAAmB;AAAA,IACrC,UAAU,YAAY,SAAS,SAAS,IAAI,WAAW;AAAA,IACvD,gBAAgB,iBAAiB,cAAc,SAAS,IAAI,gBAAgB;AAAA,IAC5E,YAAY,aAAa,UAAU,SAAS,IAAI,YAAY;AAAA,IAC5D,KAAK,OAAO;AAAA,IACZ,gBAAgB,UAAU,gBAAgB;AAAA,IAC1C,qBAAqB,UAAU,qBAAqB;AAAA,IACpD,WAAW,UAAU,UAAU;AAAA,EACjC;AAEA,SAAO;AACT;;;ACnJA,SAAS,YAAY;AAUd,SAAS,kBACd,gBACA,iBACA,YACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,YAAY,MAAM,MAAM,OAAO,GAAG,cAAc,KAAK;AACnE;AAUO,SAAS,uBACd,gBACA,iBACA,cACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,cAAc,MAAM,OAAO,GAAG,cAAc,MAAM;AAChE;AAUO,SAAS,wBACd,gBACA,iBACA,cACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,cAAc,MAAM,OAAO,GAAG,cAAc,OAAO;AACjE;AAKO,SAAS,cAAc,MAAc,OAAe,YAA4B;AACrF,SAAO,KAAK,YAAY,MAAM,MAAM,KAAK;AAC3C;AAKO,SAAS,aAAa,MAAc,YAA4B;AACrE,SAAO,KAAK,YAAY,MAAM,IAAI;AACpC;AAKA,SAAS,oBAAoB,MAA+C;AAC1E,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,SAAO;AAAA,IACL,MAAM,MAAM,CAAC,KAAK;AAAA,IAClB,OAAO,MAAM,CAAC,KAAK;AAAA,EACrB;AACF;;;AC3EA,SAAS,kBAAkB,kBAAkB;AAC7C,SAAS,UAAU,SAAS,YAAY;AACxC,SAAS,QAAAC,OAAM,eAAe;AAC9B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAqEP,IAAM,kBAAkB,IAAI,IAAY,qBAAqB;AAU7D,eAAsB,mBAAmB,SAAqD;AAC5F,QAAM,WAAW,MAAM,iBAAiB,QAAQ,OAAO,QAAQ,MAAM,QAAQ,EAAE;AAE/E,MAAI,qBAAqB;AACzB,MAAI,qBAAqB;AACzB,MAAI,kBAAkB;AAEtB,QAAM,eAAe,mBAAmB;AAMxC,MAAI,iBAAiB;AACrB,aAAW,WAAW,UAAU;AAC9B,QAAI;AACJ,QAAI;AACF,kBAAY,MAAM,aAAa,OAAO;AAAA,IACxC,SAAS,KAAK;AACZ,cAAQ;AAAA,QACN,4BAA4B,OAAO,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC1F;AACA;AAAA,IACF;AAEA,eAAW,OAAO,WAAW;AAE3B,UAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,YACE,CAAC,gBAAgB,IAAI,IAAI,QAAQ,YAAY,KAC7C,CAAC,QAAQ,MAAM,SAAS,IAAI,QAAQ,YAA8B,GAClE;AACA;AAAA,QACF;AAAA,MACF;AAEA,UAAI,QAAQ,QAAQ;AAClB;AACA;AAAA,MACF;AAEA,YAAM,aAAa;AAAA,QACjB,IAAI;AAAA,QACJ,IAAI;AAAA,QACJ,QAAQ;AAAA,MACV;AAEA,YAAM,cAAc,mBAAmB,IAAI,MAAM,IAAI,SAAS,IAAI,SAAS,IAAI,QAAQ;AAEvF,YAAM,WAAW,eAAe,IAAI,MAAM,aAAa;AAAA,QACrD,eAAe;AAAA,QACf,WAAW,QAAQ;AAAA,QACnB,aACE,QAAQ,cAAc,aAClB,CAAC,OAAO,aAAa,QAAQ,IAAI,UAAU,IAC3C;AAAA,MACR,CAAC;AAED,YAAM,MAAM,QAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AACpD,YAAM,UAAU,YAAY,UAAU,OAAO;AAE7C;AACA,4BAAsB,KAAK,MAAM,SAAS,SAAS,CAAC;AAGpD,YAAM,MAAM,QAAQ,YAAY,EAAE;AAClC,UAAI,MAAM,iBAAiB;AACzB,0BAAkB;AAAA,MACpB;AAAA,IACF;AAEA;AAEA,YAAQ,aAAa;AAAA,MACnB;AAAA,MACA;AAAA,MACA,YAAY,SAAS;AAAA,MACrB,aAAa;AAAA,IACf,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL;AAAA,IACA,OAAO,CAAC;AAAA;AAAA,IACR;AAAA,IACA;AAAA,IACA,QAAQ,QAAQ;AAAA,EAClB;AACF;AAOA,eAAe,aAAa,SAA0C;AACpE,QAAM,YAA4B,CAAC;AAEnC,QAAM,UAAU,IAAI,aAAa;AAAA,IAC/B,QAAQ,CAAC,MAAM,YAAY;AAEzB,YAAM,eAAe,QAAQ,iBAAiB;AAC9C,YAAM,OAAO,aAAa,aAAa,SAAS,CAAC;AACjD,UAAI,CAAC,MAAM;AACT,gBAAQ;AAAA,UACN,8DAA8D,OAAO;AAAA,QAEvE;AAAA,MACF;AACA,gBAAU,KAAK;AAAA,QACb;AAAA,QACA;AAAA,QACA,SAAS,QAAQ,EAAE,cAAc,IAAI,wBAAwB,GAAG;AAAA,QAChE,iBAAiB;AAAA,QACjB,gBAAgB,MAAM,kBAAkB;AAAA,MAC1C,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AAED,QAAM,SAAS,IAAI,UAAU,EAAE,kBAAkB,GAAG,CAAC;AACrD,SAAO,GAAG,eAAe,CAAC,MAAM,UAAU,QAAQ,cAAc,MAAM,KAAK,CAAC;AAC5E,SAAO,GAAG,gBAAgB,CAAC,SAAS,QAAQ,eAAe,IAAI,CAAC;AAChE,SAAO,GAAG,QAAQ,CAAC,SAAS,QAAQ,OAAO,IAAI,CAAC;AAEhD,QAAM,SAAS,iBAAiB,SAAS,OAAO;AAChD,QAAM,OAAO,YAAY,MAAM;AAG/B,QAAM,WAAW,QAAQ,QAAQ,UAAU,OAAO;AAClD,MAAI;AACJ,MAAI,WAAW,QAAQ,GAAG;AACxB,QAAI;AACF,YAAM,MAAM,MAAM,SAAS,UAAU,OAAO;AAC5C,iBAAW,KAAK,MAAM,GAAG;AAAA,IAC3B,SAAS,KAAK;AACZ,cAAQ;AAAA,QACN,yCAAyC,QAAQ,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACxG;AAAA,IACF;AAAA,EACF;AAGA,aAAW,OAAO,WAAW;AAC3B,QAAI,YAAY,SAAS,oBAAoB,IAAI,gBAAgB;AAC/D,UAAI,WAAW;AACf,UAAI,kBAAkB,SAAS;AAAA,IACjC,OAAO;AAEL,YAAM,eAAe,kBAAkB,OAAO;AAC9C,UAAI,CAAC,cAAc;AACjB,gBAAQ;AAAA,UACN,6CAA6C,IAAI,kBAAkB,WAAW,oCAChD,OAAO;AAAA,QACvC;AAAA,MACF;AACA,UAAI,kBAAkB;AAAA,IACxB;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,iBACb,OACA,MACA,IACmB;AACnB,MAAI;AACJ,MAAI;AACF,gBAAY,MAAM,KAAK,KAAK;AAAA,EAC9B,SAAS,KAAK;AACZ,UAAM,IAAI;AAAA,MACR,6BAA6B,KAAK,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACxF,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAEA,MAAI,UAAU,OAAO,GAAG;AACtB,WAAO,CAAC,KAAK;AAAA,EACf;AAEA,MAAI,CAAC,UAAU,YAAY,GAAG;AAC5B,UAAM,IAAI,MAAM,eAAe,KAAK,8BAA8B;AAAA,EACpE;AAGA,QAAM,WAAqB,CAAC;AAC5B,QAAM,QAAQ,OAAO,QAAQ;AAG7B,MAAI,WAAW;AACf,MAAI,QAAQ,IAAI;AACd,eAAW,SAAS,OAAO,CAAC,MAAM;AAChC,YAAM,OAAO,kBAAkB,CAAC;AAChC,UAAI,CAAC,KAAM,QAAO;AAClB,UAAI,QAAQ,OAAO,KAAM,QAAO;AAChC,UAAI,MAAM,OAAO,KAAK,MAAO,QAAO;AACpC,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAEA,SAAO,SAAS,KAAK;AACvB;AAGA,eAAe,QAAQ,KAAa,SAAkC;AACpE,QAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC1D,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAWC,MAAK,KAAK,MAAM,IAAI;AACrC,QAAI,MAAM,YAAY,GAAG;AACvB,YAAM,QAAQ,UAAU,OAAO;AAAA,IACjC,WAAW,MAAM,OAAO,KAAK,MAAM,KAAK,SAAS,MAAM,GAAG;AACxD,cAAQ,KAAK,QAAQ;AAAA,IACvB;AAAA,EACF;AACF;AAUO,SAAS,kBAAkB,UAA0B;AAE1D,QAAM,YAAY,mCAAmC,KAAK,QAAQ;AAClE,MAAI,WAAW;AACb,WAAO,GAAG,UAAU,CAAC,CAAC,IAAI,UAAU,CAAC,CAAC,IAAI,UAAU,CAAC,CAAC;AAAA,EACxD;AAGA,QAAM,cAAc,gCAAgC,KAAK,QAAQ;AACjE,MAAI,aAAa;AACf,WAAO,GAAG,YAAY,CAAC,CAAC,IAAI,YAAY,CAAC,CAAC;AAAA,EAC5C;AAEA,SAAO;AACT;;;AC3UA,SAAS,yBAAyB;AAClC,SAAS,SAAAC,QAAO,QAAAC,OAAM,aAAa,mBAAmB;AACtD,SAAS,WAAAC,gBAAe;AACxB,SAAS,gBAAgB;AACzB,SAAS,gBAAgB;AAMzB,IAAM,cAAc;AAGpB,IAAM,WAAW;AAGjB,IAAM,sBAAsB;AAG5B,IAAM,cAAc;AAGpB,IAAM,sBAAsB;AAG5B,IAAM,aAAa;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAsFO,SAAS,kBACd,MACA,IACA,MACA,OACQ;AACR,QAAM,SAAS,IAAI,gBAAgB;AACnC,SAAO,IAAI,qCAAqC,IAAI;AACpD,SAAO,IAAI,qCAAqC,EAAE;AAClD,SAAO,IAAI,YAAY,OAAO,QAAQ,CAAC;AACvC,SAAO,IAAI,QAAQ,OAAO,IAAI,CAAC;AAC/B,SAAO,IAAI,SAAS,QAAQ;AAE5B,aAAW,SAAS,YAAY;AAC9B,WAAO,OAAO,YAAY,KAAK;AAAA,EACjC;AAEA,MAAI,SAAS,MAAM,SAAS,GAAG;AAC7B,eAAW,KAAK,OAAO;AACrB,aAAO,OAAO,sBAAsB,CAAC;AAAA,IACvC;AAAA,EACF;AAEA,SAAO,GAAG,WAAW,mBAAmB,OAAO,SAAS,CAAC;AAC3D;AASA,eAAsB,oBAAoB,SAAuD;AAC/F,QAAM,KAAK,QAAQ,OAAM,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAC7D,QAAM,cAAc,QAAQ,eAAe;AAE3C,QAAM,QAA4B,CAAC;AACnC,QAAM,SAA8B,CAAC;AACrC,MAAI,aAAa;AACjB,MAAI,UAAU;AACd,MAAI,sBAAsB;AAG1B,QAAM,SAAS,iBAAiB,QAAQ,MAAM,EAAE;AAEhD,aAAW,SAAS,QAAQ;AAC1B,QAAI,QAAQ,UAAU,UAAa,MAAM,UAAU,QAAQ,MAAO;AAGlE,UAAM,YAAkC,CAAC;AACzC,QAAI,OAAO;AACX,QAAI,UAAU;AAEd,WAAO,SAAS;AACd,YAAM,UAAU,kBAAkB,MAAM,MAAM,MAAM,IAAI,MAAM,QAAQ,KAAK;AAC3E,YAAM,WAAW,MAAM,eAAe,OAAO;AAC7C,YAAM,OAAQ,MAAM,SAAS,KAAK;AAElC,UAAI,OAAO,KAAK,UAAU,UAAU;AAClC,cAAM,IAAI;AAAA,UACR,+BAA+B,OAAO;AAAA,QAExC;AAAA,MACF;AAGA,UAAI,SAAS,GAAG;AACd,+BAAuB,KAAK;AAAA,MAC9B;AAEA,YAAM,UAAU,KAAK,WAAW,CAAC;AAEjC,iBAAW,OAAO,SAAS;AACzB,YAAI,CAAC,IAAI,mBAAmB;AAC1B;AACA;AAAA,QACF;AACA,kBAAU,KAAK,GAAG;AAAA,MACpB;AAEA,gBAAU,QAAQ,KAAK,eAAe;AACtC;AAAA,IACF;AAGA,UAAM,YAAY,QAAQ,UAAU,SAAY,QAAQ,QAAQ,MAAM,SAAS,UAAU;AACzF,UAAM,iBAAiB,UAAU,MAAM,GAAG,SAAS;AACnD,UAAM,aAAa,MAAM,KAAK,MAAM,GAAG,CAAC;AAGxC,UAAM,aAAa,gBAAgB,aAAa,QAAQ,QAAQ,CAAC,KAAK,QAAQ,UAAU;AACtF,UAAI,QAAQ;AACV,cAAM,KAAK,MAAM;AACjB,sBAAc,OAAO;AAAA,MACvB,WAAW,OAAO;AAChB,eAAO,KAAK,EAAE,gBAAgB,IAAI,iBAAiB,MAAM,CAAC;AAAA,MAC5D;AACA,cAAQ,aAAa;AAAA,QACnB,qBAAqB,MAAM;AAAA,QAC3B,gBAAgB;AAAA,QAChB,iBAAiB,IAAI;AAAA,QACrB,cAAc;AAAA,MAChB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,qBAAqB,MAAM;AAAA,IAC3B;AAAA,IACA;AAAA,IACA,WAAW,EAAE,MAAM,QAAQ,MAAM,GAAG;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AACF;AAOA,eAAsB,yBACpB,gBACA,QAC2B;AAE3B,QAAM,UAAU,GAAG,WAAW,cAAc,cAAc,SAAS,IAAI,gBAAgB,WAAW,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC;AACzI,QAAM,eAAe,MAAM,eAAe,OAAO;AACjD,QAAM,MAAO,MAAM,aAAa,KAAK;AAErC,MAAI,CAAC,IAAI,mBAAmB,CAAC,IAAI,kBAAkB;AACjD,UAAM,IAAI;AAAA,MACR,qCAAqC,cAAc;AAAA,IACrD;AAAA,EACF;AAEA,SAAO,uBAAuB,KAAK,MAAM;AAC3C;AAQA,eAAe,aACb,MACA,aACA,WACA,YACe;AACf,MAAI,YAAY;AAEhB,iBAAe,SAAwB;AACrC,WAAO,YAAY,KAAK,QAAQ;AAC9B,YAAM,IAAI;AACV,YAAM,MAAM,KAAK,CAAC;AAClB,UAAI,CAAC,IAAK;AACV,UAAI;AACF,cAAM,SAAS,MAAM,uBAAuB,KAAK,SAAS;AAC1D,mBAAW,KAAK,QAAQ,IAAI;AAAA,MAC9B,SAAS,KAAK;AACZ,mBAAW,KAAK,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACxE;AAAA,IACF;AAAA,EACF;AAEA,QAAM,cAAc,KAAK,IAAI,aAAa,KAAK,MAAM;AACrD,QAAM,QAAQ,IAAI,MAAM,KAAK,EAAE,QAAQ,YAAY,GAAG,MAAM,OAAO,CAAC,CAAC;AACvE;AAEA,eAAe,uBACb,KACA,WAC2B;AAC3B,MAAI,CAAC,IAAI,mBAAmB,CAAC,IAAI,kBAAkB;AACjD,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,IAAI,mBAAmB;AAC1B,UAAM,IAAI;AAAA,MACR,YAAY,IAAI,eAAe;AAAA,IACjC;AAAA,EACF;AAEA,QAAM,UAAU,uBAAuB,IAAI,iBAAiB,IAAI,kBAAkB,SAAS;AAC3F,QAAM,WAAW,wBAAwB,IAAI,iBAAiB,IAAI,kBAAkB,SAAS;AAG7F,QAAMC,OAAMC,SAAQ,OAAO,GAAG,EAAE,WAAW,KAAK,CAAC;AAGjD,QAAM,cAAc,KAAK,UAAU,KAAK,MAAM,CAAC;AAC/C,QAAM,YAAY,UAAU,aAAa,OAAO;AAGhD,QAAM,cAAc,MAAM,eAAe,IAAI,iBAAiB;AAC9D,MAAI,CAAC,YAAY,MAAM;AACrB,UAAM,IAAI,MAAM,wBAAwB,IAAI,eAAe,MAAM;AAAA,EACnE;AAEA,QAAM,OAAO,kBAAkB,OAAO;AACtC,MAAI;AACF,UAAM,SAAS,SAAS,QAAQ,YAAY,IAAa,GAAG,IAAI;AAAA,EAClE,SAAS,KAAK;AACZ,UAAM,IAAI;AAAA,MACR,oCAAoC,IAAI,eAAe,SAAS,IAAI,iBAAiB,KAChF,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACrD,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAGA,QAAM,UAAU,MAAMC,MAAK,OAAO;AAClC,QAAM,WAAW,OAAO,WAAW,aAAa,OAAO;AAEvD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,gBAAgB,IAAI;AAAA,IACpB,iBAAiB,IAAI;AAAA,IACrB,MAAM,OAAO,QAAQ,IAAI,IAAI;AAAA,EAC/B;AACF;AAMA,SAAS,iBAAiB,MAAc,IAAiD;AACvF,QAAM,SAA8C,CAAC;AAErD,MAAI,UAAU,oBAAI,KAAK,OAAO,YAAY;AAC1C,QAAM,MAAM,oBAAI,KAAK,KAAK,YAAY;AAEtC,SAAO,WAAW,KAAK;AACrB,UAAM,aAAa,QAAQ,YAAY,EAAE,MAAM,GAAG,EAAE;AAGpD,UAAM,WAAW,IAAI;AAAA,MACnB,KAAK,IAAI,QAAQ,eAAe,GAAG,QAAQ,YAAY,IAAI,GAAG,CAAC;AAAA,IACjE;AACA,UAAM,WAAW,YAAY,MAAM,SAAS,YAAY,EAAE,MAAM,GAAG,EAAE,IAAI;AAEzE,WAAO,KAAK,EAAE,MAAM,YAAY,IAAI,SAAS,CAAC;AAG9C,cAAU,IAAI;AAAA,MACZ,KAAK,IAAI,QAAQ,eAAe,GAAG,QAAQ,YAAY,IAAI,GAAG,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,SAAO;AACT;AAGA,eAAe,eAAe,KAAa,UAAU,GAAsB;AACzE,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,MAAM,GAAG;AAAA,EAC5B,SAAS,KAAK;AAEZ,QAAI,UAAU,aAAa;AACzB,YAAM,QAAQ,sBAAsB,KAAK,IAAI,GAAG,OAAO;AACvD,cAAQ;AAAA,QACN,qBAAqB,GAAG,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,iBAC5D,KAAK,eAAe,UAAU,CAAC,IAAI,WAAW;AAAA,MACjE;AACA,YAAM,MAAM,KAAK;AACjB,aAAO,eAAe,KAAK,UAAU,CAAC;AAAA,IACxC;AACA,UAAM,IAAI;AAAA,MACR,uBAAuB,cAAc,CAAC,iBAAiB,GAAG,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC/G,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAEA,MAAI,SAAS,GAAI,QAAO;AAGxB,OAAK,SAAS,WAAW,OAAO,SAAS,WAAW,OAAO,SAAS,WAAW,QAAQ,UAAU,aAAa;AAC5G,UAAM,aAAa,SAAS,QAAQ,IAAI,aAAa;AACrD,UAAM,cAAc,aAAa,SAAS,YAAY,EAAE,IAAI;AAC5D,UAAM,QAAQ,CAAC,MAAM,WAAW,KAAK,cAAc,IAC/C,cAAc,MACd,sBAAsB,KAAK,IAAI,GAAG,OAAO;AAC7C,YAAQ;AAAA,MACN,QAAQ,SAAS,MAAM,QAAQ,GAAG,iBAAiB,KAAK,eAAe,UAAU,CAAC,IAAI,WAAW;AAAA,IACnG;AACA,UAAM,MAAM,KAAK;AACjB,WAAO,eAAe,KAAK,UAAU,CAAC;AAAA,EACxC;AAEA,QAAM,IAAI,MAAM,QAAQ,SAAS,MAAM,KAAK,SAAS,UAAU,QAAQ,GAAG,EAAE;AAC9E;AAEA,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AACzD;;;AC5aA,SAAS,qBAAAC,0BAAyB;AAClC,SAAS,SAAAC,QAAO,QAAAC,aAAY;AAC5B,SAAS,WAAAC,UAAS,QAAAC,aAAY;AAC9B,SAAS,YAAAC,iBAAgB;AACzB,SAAS,YAAAC,iBAAgB;AAGzB,IAAM,eAAe;AAGrB,IAAMC,uBAAsB;AAG5B,IAAMC,eAAc;AAGpB,IAAMC,uBAAsB;AA+DrB,SAAS,kBAAkB,MAAsB;AACtD,SAAO,GAAG,YAAY,OAAO,IAAI,WAAW,IAAI;AAClD;AAMO,SAAS,qBAAqB,MAAc,WAA2B;AAC5E,QAAM,OAAO,KAAK,MAAM,GAAG,CAAC;AAC5B,SAAOL,MAAK,WAAW,QAAQ,MAAM,MAAM,IAAI,MAAM;AACvD;AAMA,eAAsB,eAAe,SAAyD;AAC5F,QAAM,KAAK,QAAQ,OAAM,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAC7D,QAAM,cAAc,QAAQ,eAAeG;AAG3C,QAAM,QAAQ,kBAAkB,QAAQ,MAAM,EAAE;AAEhD,QAAM,QAAmC,CAAC;AAC1C,MAAI,aAAa;AACjB,MAAI,UAAU;AACd,MAAI,SAAS;AAGb,MAAI,YAAY;AAEhB,iBAAe,SAAwB;AACrC,WAAO,YAAY,MAAM,QAAQ;AAC/B,YAAM,IAAI;AACV,YAAM,OAAO,MAAM,CAAC;AACpB,UAAI,CAAC,KAAM;AAEX,cAAQ,aAAa;AAAA,QACnB,YAAY,MAAM;AAAA,QAClB,WAAW,MAAM;AAAA,QACjB;AAAA,QACA;AAAA,QACA,aAAa;AAAA,MACf,CAAC;AAED,YAAM,MAAM,kBAAkB,IAAI;AAClC,YAAM,WAAW,qBAAqB,MAAM,QAAQ,MAAM;AAE1D,UAAI;AACF,cAAM,SAAS,MAAM,kBAAkB,KAAK,UAAU,IAAI;AAC1D,YAAI,QAAQ;AACV,gBAAM,KAAK,MAAM;AACjB,wBAAc,OAAO;AAAA,QACvB,OAAO;AAEL;AAAA,QACF;AAAA,MACF,SAAS,KAAK;AACZ,gBAAQ,KAAK,+BAA+B,IAAI,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AACvG;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,cAAc,KAAK,IAAI,aAAa,MAAM,MAAM;AACtD,QAAM,QAAQ,IAAI,MAAM,KAAK,EAAE,QAAQ,YAAY,GAAG,MAAM,OAAO,CAAC,CAAC;AAGrE,UAAQ,aAAa;AAAA,IACnB,YAAY,MAAM;AAAA,IAClB,WAAW,MAAM;AAAA,IACjB;AAAA,IACA;AAAA,IACA,aAAa;AAAA,EACf,CAAC;AAED,SAAO;AAAA,IACL,iBAAiB,MAAM;AAAA,IACvB;AAAA,IACA;AAAA,IACA,WAAW,EAAE,MAAM,QAAQ,MAAM,GAAG;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AACF;AAOA,eAAe,kBACb,KACA,UACA,MACyC;AACzC,QAAM,WAAW,MAAMG,gBAAe,GAAG;AAEzC,MAAI,SAAS,WAAW,KAAK;AAC3B,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI,MAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,EAAE;AAAA,EACtD;AAEA,MAAI,CAAC,SAAS,MAAM;AAClB,UAAM,IAAI,MAAM,wBAAwB,GAAG,EAAE;AAAA,EAC/C;AAEA,QAAMT,OAAME,SAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,QAAM,OAAOH,mBAAkB,QAAQ;AACvC,QAAMK,UAASC,UAAS,QAAQ,SAAS,IAAa,GAAG,IAAI;AAE7D,QAAM,WAAW,MAAMJ,MAAK,QAAQ;AAEpC,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA,MAAM,OAAO,SAAS,IAAI;AAAA,EAC5B;AACF;AAKA,SAAS,kBAAkB,MAAc,IAAsB;AAC7D,QAAM,QAAkB,CAAC;AACzB,QAAM,UAAU,oBAAI,KAAK,OAAO,YAAY;AAC5C,QAAM,MAAM,oBAAI,KAAK,KAAK,YAAY;AAEtC,SAAO,WAAW,KAAK;AACrB,UAAM,KAAK,QAAQ,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAC7C,YAAQ,WAAW,QAAQ,WAAW,IAAI,CAAC;AAAA,EAC7C;AAEA,SAAO;AACT;AAGA,eAAeQ,gBAAe,KAAa,UAAU,GAAsB;AACzE,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,MAAM,GAAG;AAAA,EAC5B,SAAS,KAAK;AACZ,QAAI,UAAUF,cAAa;AACzB,YAAM,QAAQC,uBAAsB,KAAK,IAAI,GAAG,OAAO;AACvD,YAAME,OAAM,KAAK;AACjB,aAAOD,gBAAe,KAAK,UAAU,CAAC;AAAA,IACxC;AACA,UAAM,IAAI;AAAA,MACR,uBAAuBF,eAAc,CAAC,iBAAiB,GAAG,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC/G,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAEA,MAAI,SAAS,MAAM,SAAS,WAAW,IAAK,QAAO;AAEnD,OACG,SAAS,WAAW,OAAO,SAAS,WAAW,OAAO,SAAS,WAAW,QAC3E,UAAUA,cACV;AACA,UAAM,aAAa,SAAS,QAAQ,IAAI,aAAa;AACrD,UAAM,cAAc,aAAa,SAAS,YAAY,EAAE,IAAI;AAC5D,UAAM,QACJ,CAAC,MAAM,WAAW,KAAK,cAAc,IACjC,cAAc,MACdC,uBAAsB,KAAK,IAAI,GAAG,OAAO;AAC/C,UAAME,OAAM,KAAK;AACjB,WAAOD,gBAAe,KAAK,UAAU,CAAC;AAAA,EACxC;AAEA,QAAM,IAAI,MAAM,QAAQ,SAAS,MAAM,KAAK,SAAS,UAAU,QAAQ,GAAG,EAAE;AAC9E;AAEA,SAASC,OAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AACzD;","names":["contentNode","frame","join","join","mkdir","stat","dirname","mkdir","dirname","stat","createWriteStream","mkdir","stat","dirname","join","pipeline","Readable","DEFAULT_CONCURRENCY","MAX_RETRIES","RETRY_BASE_DELAY_MS","fetchWithRetry","sleep"]}
1
+ {"version":3,"sources":["../src/fr-elements.ts","../src/fr-builder.ts","../src/fr-frontmatter.ts","../src/fr-path.ts","../src/converter.ts","../src/enricher.ts","../src/downloader.ts","../src/govinfo-downloader.ts"],"sourcesContent":["/**\n * Federal Register XML element classification.\n *\n * The FR XML is GPO/SGML-derived with no namespace. It shares many\n * inline formatting elements with eCFR (E T=\"nn\", SU, FTNT) but uses\n * a flat document-centric structure rather than a hierarchical DIV system.\n *\n * Each FR document (RULE, PRORULE, NOTICE, PRESDOCU) contains a preamble\n * (PREAMB) with structured metadata, supplementary information (SUPLINF)\n * with the document body, and optional regulatory text (REGTEXT).\n */\n\nimport type { InlineType } from \"@lexbuild/core\";\n\n// ── Document type elements ──\n\n/** FR document type element names as a const tuple — single source of truth */\nexport const FR_DOCUMENT_TYPE_KEYS = [\"RULE\", \"PRORULE\", \"NOTICE\", \"PRESDOCU\"] as const;\n\n/** FR document types supported by the API and XML */\nexport type FrDocumentType = (typeof FR_DOCUMENT_TYPE_KEYS)[number];\n\n/** Top-level document elements — each becomes an emitted section-level node */\nexport const FR_DOCUMENT_ELEMENTS = new Set<string>(FR_DOCUMENT_TYPE_KEYS);\n\n/** Container elements that group documents within daily issues */\nexport const FR_SECTION_CONTAINERS = new Set([\"RULES\", \"PRORULES\", \"NOTICES\", \"PRESDOCS\"]);\n\n/** Map from document element name to normalized document type string */\nexport const FR_DOCUMENT_TYPE_MAP: Readonly<Record<string, string>> = {\n RULE: \"rule\",\n PRORULE: \"proposed_rule\",\n NOTICE: \"notice\",\n PRESDOCU: \"presidential_document\",\n};\n\n// ── Preamble elements ──\n\n/** Preamble section elements containing structured content */\nexport const FR_PREAMBLE_SECTIONS = new Set([\n \"AGY\", // Agency section (HD + P)\n \"ACT\", // Action section (HD + P)\n \"SUM\", // Summary section (HD + P)\n \"DATES\", // Dates section (HD + P)\n \"EFFDATE\", // Effective date section (HD + P)\n \"ADD\", // Addresses section (HD + P)\n \"FURINF\", // Further information section (HD + P)\n]);\n\n/** Preamble metadata elements — text extracted for frontmatter */\nexport const FR_PREAMBLE_META_ELEMENTS = new Set([\n \"AGENCY\", // Issuing agency name (attrs: TYPE)\n \"SUBAGY\", // Sub-agency name\n \"CFR\", // CFR citation affected (e.g., \"10 CFR Part 2\")\n \"SUBJECT\", // Document title/subject\n \"DEPDOC\", // Department document number\n \"RIN\", // Regulation Identifier Number\n]);\n\n// ── Content elements ──\n\n/** Elements that contain paragraph text */\nexport const FR_CONTENT_ELEMENTS = new Set([\n \"P\", // Paragraph\n \"FP\", // Flush paragraph (attrs: SOURCE for indent level)\n]);\n\n/** Heading element — level determined by SOURCE attribute */\nexport const FR_HEADING_ELEMENT = \"HD\";\n\n/**\n * Map from HD SOURCE attribute to heading depth.\n * HED = top-level (section-like), HD1 = subsection, etc.\n */\nexport const FR_HD_SOURCE_TO_DEPTH: Readonly<Record<string, number>> = {\n HED: 1,\n HD1: 2,\n HD2: 3,\n HD3: 4,\n HD4: 5,\n HD5: 6,\n HD6: 6,\n HD8: 6,\n};\n\n// ── Inline formatting ──\n\n/** Inline formatting elements */\nexport const FR_INLINE_ELEMENTS = new Set([\n \"I\", // Italic\n \"B\", // Bold\n \"E\", // Emphasis (type varies by T attribute)\n \"SU\", // Superscript / footnote marker\n \"FR\", // Fraction\n \"AC\", // Accent/diacritical\n]);\n\n/**\n * Map from E element T attribute to InlineType.\n * Duplicated from eCFR — source packages must not import each other.\n */\nexport const FR_EMPHASIS_MAP: Readonly<Record<string, InlineType>> = {\n \"01\": \"bold\",\n \"02\": \"italic\",\n \"03\": \"italic\", // bold italic in print — FR uses T=\"03\" for case names, citations, and publication titles which render as italic\n \"04\": \"italic\", // italic in headings\n \"05\": \"italic\", // small caps — render as italic\n \"51\": \"sub\", // subscript\n \"52\": \"sub\", // subscript\n \"54\": \"sub\", // subscript (math)\n \"7462\": \"italic\", // special terms (et seq., De minimis)\n};\n\n// ── Regulatory text elements ──\n\n/** Regulatory text amendment elements (within SUPLINF) */\nexport const FR_REGTEXT_ELEMENTS = new Set([\n \"REGTEXT\", // Regulatory text container (attrs: TITLE, PART)\n \"AMDPAR\", // Amendment instruction paragraph\n \"SECTION\", // Section container\n \"SECTNO\", // Section number designation\n \"PART\", // Part container within REGTEXT\n \"AUTH\", // Authority citation in REGTEXT\n]);\n\n/** LSTSUB — List of subjects (CFR parts affected) */\nexport const FR_LSTSUB_ELEMENT = \"LSTSUB\";\n\n// ── Signature block ──\n\n/** Signature block elements */\nexport const FR_SIGNATURE_ELEMENTS = new Set([\n \"SIG\", // Signature block container\n \"NAME\", // Signer name\n \"TITLE\", // Signer title\n \"DATED\", // Date of signature\n]);\n\n// ── Presidential document subtypes ──\n\n/** Presidential document subtype containers */\nexport const FR_PRESIDENTIAL_SUBTYPES = new Set([\n \"EXECORD\", // Executive Order\n \"PRMEMO\", // Presidential Memorandum\n \"PROCLA\", // Proclamation\n \"DETERM\", // Presidential Determination\n \"PRNOTICE\", // Presidential Notice\n \"PRORDER\", // Presidential Order\n]);\n\n/** Presidential document metadata elements */\nexport const FR_PRESIDENTIAL_META_ELEMENTS = new Set([\n \"PSIG\", // Presidential signature (initials)\n \"PLACE\", // Place of issuance\n \"TITLE3\", // CFR Title 3 marker\n \"PRES\", // President name\n]);\n\n// ── Note elements ──\n\n/** Footnote and editorial note elements */\nexport const FR_NOTE_ELEMENTS = new Set([\n \"FTNT\", // Footnote\n \"EDNOTE\", // Editorial note\n \"OLNOTE1\", // Overlay note\n]);\n\n/** Footnote reference marker */\nexport const FR_FTREF_ELEMENT = \"FTREF\";\n\n// ── Block elements ──\n\n/** Block-level content wrappers */\nexport const FR_BLOCK_ELEMENTS = new Set([\n \"EXTRACT\", // Extracted/quoted text\n \"EXAMPLE\", // Illustrative example\n]);\n\n// ── Table elements (GPOTABLE format) ──\n\n/** GPOTABLE elements */\nexport const FR_TABLE_ELEMENTS = new Set([\n \"GPOTABLE\", // Table root\n \"TTITLE\", // Table title\n \"BOXHD\", // Header box container\n \"CHED\", // Column header entry (attrs: H for level)\n \"ROW\", // Data row (attrs: RUL for horizontal rules)\n \"ENT\", // Cell entry (attrs: I for indent, A for alignment)\n]);\n\n// ── Elements to ignore (skip entire subtree) ──\n\n/** Elements whose entire subtree should be skipped */\nexport const FR_IGNORE_ELEMENTS = new Set([\n \"CNTNTS\", // Table of contents in daily issue\n \"GPH\", // Graphics (not available in XML)\n \"GID\", // Graphics ID\n]);\n\n// ── Elements to skip (self only, no subtree) ──\n\n/** Self-contained elements to skip — metadata extracted elsewhere or irrelevant */\nexport const FR_SKIP_ELEMENTS = new Set([\n \"PRTPAGE\", // Page number reference (attrs: P for page)\n \"STARS\", // Visual separator (****)\n \"FILED\", // Filing info\n \"UNITNAME\", // Section name in daily issue\n \"VOL\", // Volume number (daily issue metadata)\n \"NO\", // Issue number (daily issue metadata)\n \"DATE\", // Date (daily issue level — document dates from preamble)\n \"NEWPART\", // New part container in daily issue\n \"PTITLE\", // Part title in daily issue\n \"PARTNO\", // Part number in daily issue\n \"PNOTICE\", // Part notice text\n]);\n\n// ── Passthrough elements ──\n\n/** Transparent wrappers — pass through without creating frames */\nexport const FR_PASSTHROUGH_ELEMENTS = new Set([\n \"FEDREG\", // Daily issue root element\n \"PREAMB\", // Preamble — children are handled individually\n \"SUPLINF\", // Supplementary information — children are handled individually\n]);\n\n// ── Metadata extraction elements ──\n\n/** FRDOC — Federal Register document citation, e.g., \"[FR Doc. 2026-06029 ...]\" */\nexport const FR_FRDOC_ELEMENT = \"FRDOC\";\n\n/** BILCOD — Billing code (skip) */\nexport const FR_BILCOD_ELEMENT = \"BILCOD\";\n","/**\n * Federal Register AST Builder — converts SAX events from FR XML into AST nodes.\n *\n * Follows the stack-based pattern from the eCFR builder but adapted for FR's\n * flat, document-centric structure. Each FR document (RULE, NOTICE, PRORULE,\n * PRESDOCU) becomes a single section-level LevelNode emitted via onEmit.\n *\n * FR XML is GPO/SGML-derived with no namespace. It shares inline formatting\n * (E T=\"nn\", SU, FTNT) with eCFR but uses a different document structure:\n * preamble (PREAMB) → supplementary info (SUPLINF) → signature (SIG).\n */\n\nimport type { Attributes } from \"@lexbuild/core\";\nimport type {\n LevelNode,\n ContentNode,\n InlineNode,\n InlineType,\n NoteNode,\n TableNode,\n ASTNode,\n AncestorInfo,\n EmitContext,\n} from \"@lexbuild/core\";\nimport {\n FR_DOCUMENT_ELEMENTS,\n FR_SECTION_CONTAINERS,\n FR_DOCUMENT_TYPE_MAP,\n FR_PREAMBLE_SECTIONS,\n FR_PREAMBLE_META_ELEMENTS,\n FR_CONTENT_ELEMENTS,\n FR_HEADING_ELEMENT,\n FR_HD_SOURCE_TO_DEPTH,\n FR_INLINE_ELEMENTS,\n FR_EMPHASIS_MAP,\n FR_REGTEXT_ELEMENTS,\n FR_LSTSUB_ELEMENT,\n FR_SIGNATURE_ELEMENTS,\n FR_PRESIDENTIAL_SUBTYPES,\n FR_PRESIDENTIAL_META_ELEMENTS,\n FR_NOTE_ELEMENTS,\n FR_FTREF_ELEMENT,\n FR_BLOCK_ELEMENTS,\n FR_TABLE_ELEMENTS,\n FR_IGNORE_ELEMENTS,\n FR_SKIP_ELEMENTS,\n FR_PASSTHROUGH_ELEMENTS,\n FR_FRDOC_ELEMENT,\n FR_BILCOD_ELEMENT,\n} from \"./fr-elements.js\";\n\n/** Options for configuring the FR AST builder */\nexport interface FrASTBuilderOptions {\n /** Callback when a completed document node is ready */\n onEmit: (node: LevelNode, context: EmitContext) => void | Promise<void>;\n}\n\n/** Metadata extracted from the FR document XML during parsing */\nexport interface FrDocumentXmlMeta {\n /** Document type element name (RULE, NOTICE, etc.) */\n documentType: string;\n /** Normalized document type (rule, proposed_rule, etc.) */\n documentTypeNormalized: string;\n /** Agency name from AGENCY element */\n agency?: string | undefined;\n /** Sub-agency name from SUBAGY element */\n subAgency?: string | undefined;\n /** Subject/title from SUBJECT element */\n subject?: string | undefined;\n /** CFR citation from CFR element */\n cfrCitation?: string | undefined;\n /** Regulation Identifier Number from RIN element */\n rin?: string | undefined;\n /** FR document number extracted from FRDOC text */\n documentNumber?: string | undefined;\n /** Publication date inferred from FRDOC filing date (YYYY-MM-DD) */\n publicationDate?: string | undefined;\n}\n\n/** Frame kinds for the stack */\ntype FrameKind =\n | \"document\"\n | \"content\"\n | \"inline\"\n | \"heading\"\n | \"preambleSection\"\n | \"preambleMeta\"\n | \"note\"\n | \"signature\"\n | \"signatureField\"\n | \"table\"\n | \"tableHeader\"\n | \"tableRow\"\n | \"tableCell\"\n | \"block\"\n | \"regtext\"\n | \"frdoc\"\n | \"ignore\";\n\n/** A stack frame tracking an in-progress element */\ninterface StackFrame {\n kind: FrameKind;\n elementName: string;\n node?: ASTNode;\n textBuffer: string;\n /** For GPOTABLE collection */\n headers?: string[][];\n rows?: string[][];\n currentRow?: string[];\n headerLevel?: number;\n}\n\n/**\n * Federal Register AST Builder.\n *\n * Consumes SAX events and produces LexBuild AST nodes. Each FR document\n * (RULE, NOTICE, PRORULE, PRESDOCU) is emitted as a single section-level\n * LevelNode via the onEmit callback.\n */\nexport class FrASTBuilder {\n private readonly options: FrASTBuilderOptions;\n private readonly stack: StackFrame[] = [];\n /** Depth inside fully-ignored elements (CNTNTS, GPH) */\n private ignoredContainerDepth = 0;\n /** Metadata extracted from current document */\n private currentDocMeta: FrDocumentXmlMeta = {\n documentType: \"\",\n documentTypeNormalized: \"\",\n };\n /** All document metadata collected during parsing */\n private readonly documentMetas: FrDocumentXmlMeta[] = [];\n\n constructor(options: FrASTBuilderOptions) {\n this.options = options;\n }\n\n /** Get metadata for all documents parsed so far */\n getDocumentMetas(): readonly FrDocumentXmlMeta[] {\n return this.documentMetas;\n }\n\n /** Handle SAX open element */\n onOpenElement(name: string, attrs: Attributes): void {\n // Track ignored containers (skip entire subtree)\n if (this.ignoredContainerDepth > 0) {\n this.ignoredContainerDepth++;\n return;\n }\n\n // Full-subtree ignore elements (CNTNTS, GPH, GID)\n if (FR_IGNORE_ELEMENTS.has(name)) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Self-contained skip elements\n if (FR_SKIP_ELEMENTS.has(name)) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Transparent pass-through wrappers (FEDREG, PREAMB, SUPLINF)\n if (FR_PASSTHROUGH_ELEMENTS.has(name)) {\n return;\n }\n\n // Section containers (RULES, PRORULES, NOTICES, PRESDOCS) — pass through\n if (FR_SECTION_CONTAINERS.has(name)) {\n return;\n }\n\n // Document elements (RULE, NOTICE, PRORULE, PRESDOCU) → open document-level node\n if (FR_DOCUMENT_ELEMENTS.has(name)) {\n this.openDocument(name);\n return;\n }\n\n // Presidential document subtypes (EXECORD, PRMEMO, etc.) — pass through\n if (FR_PRESIDENTIAL_SUBTYPES.has(name)) {\n return;\n }\n\n // Presidential metadata (PSIG, PLACE, TITLE3, PRES)\n if (FR_PRESIDENTIAL_META_ELEMENTS.has(name)) {\n // PSIG and PLACE contain text we want to capture as content\n if (name === \"PSIG\" || name === \"PLACE\") {\n this.openContent(name);\n return;\n }\n // TITLE3, PRES — skip\n this.stack.push({ kind: \"ignore\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Preamble metadata elements (AGENCY, SUBAGY, CFR, SUBJECT, RIN, DEPDOC)\n if (FR_PREAMBLE_META_ELEMENTS.has(name)) {\n this.stack.push({ kind: \"preambleMeta\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Preamble sections (AGY, ACT, SUM, DATES, EFFDATE, ADD, FURINF)\n if (FR_PREAMBLE_SECTIONS.has(name)) {\n this.stack.push({ kind: \"preambleSection\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Heading element (HD) — level from SOURCE attribute\n if (name === FR_HEADING_ELEMENT) {\n this.openHeading(name, attrs);\n return;\n }\n\n // Content elements (P, FP)\n if (FR_CONTENT_ELEMENTS.has(name)) {\n this.openContent(name);\n return;\n }\n\n // Inline elements (I, B, E, SU, FR, AC)\n if (FR_INLINE_ELEMENTS.has(name)) {\n this.openInline(name, attrs);\n return;\n }\n\n // Footnote reference marker — FTREF is empty and follows <SU>N</SU>.\n // Convert the preceding SU (rendered as sup) to a footnoteRef.\n if (name === FR_FTREF_ELEMENT) {\n const parentFrame = this.stack[this.stack.length - 1];\n if (parentFrame?.kind === \"content\" && parentFrame.node?.type === \"content\") {\n const contentNode = parentFrame.node as ContentNode;\n // Find the last sup child and convert it to footnoteRef\n for (let i = contentNode.children.length - 1; i >= 0; i--) {\n const child = contentNode.children[i];\n if (child?.type === \"inline\" && (child as InlineNode).inlineType === \"sup\") {\n (child as InlineNode).inlineType = \"footnoteRef\";\n break;\n }\n }\n }\n // FTREF is self-closing, push+pop to maintain balance\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Note elements (FTNT, EDNOTE, OLNOTE1)\n if (FR_NOTE_ELEMENTS.has(name)) {\n this.openNote(name);\n return;\n }\n\n // REGTEXT and related elements\n if (FR_REGTEXT_ELEMENTS.has(name)) {\n this.openRegtext(name, attrs);\n return;\n }\n\n // LSTSUB — List of subjects\n if (name === FR_LSTSUB_ELEMENT) {\n this.stack.push({ kind: \"block\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Signature block\n if (FR_SIGNATURE_ELEMENTS.has(name)) {\n this.openSignature(name);\n return;\n }\n\n // Block elements (EXTRACT, EXAMPLE)\n if (FR_BLOCK_ELEMENTS.has(name)) {\n this.stack.push({ kind: \"block\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // GPOTABLE elements\n if (FR_TABLE_ELEMENTS.has(name)) {\n this.openTableElement(name, attrs);\n return;\n }\n\n // FRDOC — extract document number\n if (name === FR_FRDOC_ELEMENT) {\n this.stack.push({ kind: \"frdoc\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // BILCOD — skip\n if (name === FR_BILCOD_ELEMENT) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Unknown elements — push as ignore to maintain stack balance\n this.stack.push({ kind: \"ignore\", elementName: name, textBuffer: \"\" });\n }\n\n /** Handle SAX close element */\n onCloseElement(name: string): void {\n // Track ignored containers\n if (this.ignoredContainerDepth > 0) {\n this.ignoredContainerDepth--;\n return;\n }\n\n // Pass-through elements — no frame to pop\n if (FR_PASSTHROUGH_ELEMENTS.has(name) || FR_SECTION_CONTAINERS.has(name)) {\n return;\n }\n\n // Presidential subtypes — pass through\n if (FR_PRESIDENTIAL_SUBTYPES.has(name)) {\n return;\n }\n\n // Document elements → emit\n if (FR_DOCUMENT_ELEMENTS.has(name)) {\n this.closeDocument(name);\n return;\n }\n\n // Preamble metadata → extract text\n if (FR_PREAMBLE_META_ELEMENTS.has(name)) {\n this.closePreambleMeta(name);\n return;\n }\n\n // Preamble sections → just pop the frame\n if (FR_PREAMBLE_SECTIONS.has(name)) {\n this.popFrame(name);\n return;\n }\n\n // Heading\n if (name === FR_HEADING_ELEMENT) {\n this.closeHeading(name);\n return;\n }\n\n // Content elements\n if (FR_CONTENT_ELEMENTS.has(name)) {\n this.closeContent(name);\n return;\n }\n\n // Presidential metadata content (PSIG, PLACE)\n if (name === \"PSIG\" || name === \"PLACE\") {\n this.closeContent(name);\n return;\n }\n\n // Inline elements\n if (FR_INLINE_ELEMENTS.has(name) || name === FR_FTREF_ELEMENT) {\n this.closeInline(name);\n return;\n }\n\n // Note elements\n if (FR_NOTE_ELEMENTS.has(name)) {\n this.closeNote(name);\n return;\n }\n\n // REGTEXT elements\n if (FR_REGTEXT_ELEMENTS.has(name)) {\n this.closeRegtext(name);\n return;\n }\n\n // LSTSUB\n if (name === FR_LSTSUB_ELEMENT) {\n this.popFrame(name);\n return;\n }\n\n // Signature block\n if (FR_SIGNATURE_ELEMENTS.has(name)) {\n this.closeSignature(name);\n return;\n }\n\n // Block elements\n if (FR_BLOCK_ELEMENTS.has(name)) {\n this.popFrame(name);\n return;\n }\n\n // GPOTABLE elements\n if (FR_TABLE_ELEMENTS.has(name)) {\n this.closeTableElement(name);\n return;\n }\n\n // FRDOC → extract document number\n if (name === FR_FRDOC_ELEMENT) {\n this.closeFrdoc();\n return;\n }\n\n // Pop any remaining frames (ignore, etc.)\n if (this.stack.length > 0 && this.stack[this.stack.length - 1]?.elementName === name) {\n this.stack.pop();\n }\n }\n\n /** Handle SAX text content */\n onText(text: string): void {\n if (this.ignoredContainerDepth > 0) return;\n\n const frame = this.stack[this.stack.length - 1];\n if (!frame) return;\n\n // Accumulate text in text-collecting frames\n if (\n frame.kind === \"heading\" ||\n frame.kind === \"preambleMeta\" ||\n frame.kind === \"signatureField\" ||\n frame.kind === \"tableCell\" ||\n frame.kind === \"tableHeader\" ||\n frame.kind === \"frdoc\"\n ) {\n frame.textBuffer += text;\n return;\n }\n\n // Content frames → create inline text node\n if (frame.kind === \"content\" && frame.node?.type === \"content\") {\n const contentNode = frame.node as ContentNode;\n // Normalize XML indentation whitespace: collapse runs of whitespace to single spaces\n const normalized = text.replace(/\\s+/g, \" \");\n if (normalized && normalized !== \" \") {\n contentNode.children.push({\n type: \"inline\",\n inlineType: \"text\",\n text: normalized,\n });\n }\n return;\n }\n\n // Inline frames → set text or add child\n if (frame.kind === \"inline\" && frame.node?.type === \"inline\") {\n const inlineNode = frame.node as InlineNode;\n const normalized = text.replace(/\\s+/g, \" \");\n if (inlineNode.children) {\n if (normalized && normalized !== \" \") {\n inlineNode.children.push({\n type: \"inline\",\n inlineType: \"text\",\n text: normalized,\n });\n }\n } else {\n inlineNode.text = (inlineNode.text ?? \"\") + normalized;\n }\n return;\n }\n\n // Note frames with direct text\n if (frame.kind === \"note\" && frame.node?.type === \"note\") {\n frame.textBuffer += text;\n return;\n }\n\n // Document-level, preambleSection, block, regtext — ignore stray text\n }\n\n // ── Private helpers: Document ──\n\n private openDocument(elementName: string): void {\n this.currentDocMeta = {\n documentType: elementName,\n documentTypeNormalized: FR_DOCUMENT_TYPE_MAP[elementName] ?? elementName.toLowerCase(),\n };\n\n const node: LevelNode = {\n type: \"level\",\n levelType: \"section\",\n children: [],\n sourceElement: elementName,\n };\n\n this.stack.push({ kind: \"document\", elementName, node, textBuffer: \"\" });\n }\n\n private closeDocument(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"document\" || !frame.node) return;\n\n const levelNode = frame.node as LevelNode;\n\n // Set heading from subject\n if (this.currentDocMeta.subject) {\n levelNode.heading = this.currentDocMeta.subject;\n }\n\n // Set identifier from document number\n if (this.currentDocMeta.documentNumber) {\n levelNode.identifier = `/us/fr/${this.currentDocMeta.documentNumber}`;\n levelNode.numValue = this.currentDocMeta.documentNumber;\n }\n\n // Build emit context\n const ancestors: AncestorInfo[] = [];\n for (const f of this.stack) {\n if (f.kind === \"document\" && f.node?.type === \"level\") {\n const ln = f.node as LevelNode;\n ancestors.push({\n levelType: ln.levelType,\n numValue: ln.numValue,\n heading: ln.heading,\n identifier: ln.identifier,\n });\n }\n }\n\n const context: EmitContext = {\n ancestors,\n documentMeta: {\n dcTitle: this.currentDocMeta.subject,\n dcType: this.currentDocMeta.documentTypeNormalized,\n },\n };\n\n // Save metadata before emitting\n this.documentMetas.push({ ...this.currentDocMeta });\n\n this.options.onEmit(levelNode, context);\n }\n\n // ── Private helpers: Preamble ──\n\n private closePreambleMeta(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"preambleMeta\") return;\n\n const text = frame.textBuffer.trim();\n if (!text) return;\n\n switch (elementName) {\n case \"AGENCY\":\n this.currentDocMeta.agency = text;\n break;\n case \"SUBAGY\":\n this.currentDocMeta.subAgency = text;\n break;\n case \"CFR\":\n this.currentDocMeta.cfrCitation = text;\n break;\n case \"SUBJECT\":\n this.currentDocMeta.subject = text;\n break;\n case \"RIN\":\n this.currentDocMeta.rin = text.replace(/^RIN\\s+/i, \"\").trim();\n break;\n case \"DEPDOC\":\n // Department document number — store for potential use\n break;\n }\n }\n\n // ── Private helpers: Heading ──\n\n private openHeading(_elementName: string, attrs: Attributes): void {\n const source = attrs[\"SOURCE\"] ?? \"HD1\";\n const depth = FR_HD_SOURCE_TO_DEPTH[source] ?? 3;\n\n this.stack.push({\n kind: \"heading\",\n elementName: FR_HEADING_ELEMENT,\n textBuffer: \"\",\n headerLevel: depth,\n });\n }\n\n private closeHeading(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"heading\") return;\n\n const headingText = frame.textBuffer.trim();\n if (!headingText) return;\n\n // In preamble sections (AGY, ACT, SUM, etc.), the HD contains the label\n // like \"AGENCY:\", \"ACTION:\", \"SUMMARY:\". We render these as bold labels.\n const parentFrame = this.stack[this.stack.length - 1];\n\n if (parentFrame?.kind === \"preambleSection\") {\n // Create a bold label content node\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: headingText,\n },\n ],\n };\n this.addToDocument(contentNode);\n return;\n }\n\n // Outside preamble: render as a bold heading content node\n // The depth from SOURCE attribute determines visual weight\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: headingText,\n },\n ],\n };\n this.addToDocument(contentNode);\n }\n\n // ── Private helpers: Content ──\n\n private openContent(elementName: string): void {\n const node: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [],\n };\n this.stack.push({ kind: \"content\", elementName, node, textBuffer: \"\" });\n }\n\n private closeContent(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const contentNode = frame.node as ContentNode;\n\n // Skip empty content nodes\n if (contentNode.children.length === 0) return;\n\n // Add to parent: document, note, or block\n const parent = this.findParentDocument() ?? this.findParentNote();\n if (parent?.node) {\n if (parent.node.type === \"level\") {\n (parent.node as LevelNode).children.push(contentNode);\n } else if (parent.node.type === \"note\") {\n (parent.node as NoteNode).children.push(contentNode);\n }\n }\n }\n\n // ── Private helpers: Inline ──\n\n private openInline(elementName: string, attrs: Attributes): void {\n let inlineType: InlineType = \"text\";\n\n if (elementName === \"I\") {\n inlineType = \"italic\";\n } else if (elementName === \"B\") {\n inlineType = \"bold\";\n } else if (elementName === \"SU\") {\n // SU inside a footnote (FTNT) is the footnote marker, not a generic superscript.\n // Check if we're inside a note frame to determine the correct type.\n const insideFootnote = this.findFrame(\"note\") !== undefined;\n inlineType = insideFootnote ? \"footnoteRef\" : \"sup\";\n } else if (elementName === \"FR\") {\n inlineType = \"text\"; // Fractions render as text\n } else if (elementName === \"E\") {\n const tValue = attrs[\"T\"] ?? \"\";\n inlineType = FR_EMPHASIS_MAP[tValue] ?? \"italic\";\n }\n\n const node: InlineNode = {\n type: \"inline\",\n inlineType,\n children: [],\n };\n\n this.stack.push({ kind: \"inline\", elementName, node, textBuffer: \"\" });\n }\n\n private closeInline(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const inlineNode = frame.node as InlineNode;\n\n // For footnoteRef, set text from buffer\n if (inlineNode.inlineType === \"footnoteRef\" && frame.textBuffer) {\n inlineNode.text = frame.textBuffer.trim();\n }\n\n // Find parent to attach to\n const parentFrame = this.stack[this.stack.length - 1];\n if (!parentFrame) return;\n\n if (parentFrame.kind === \"content\" && parentFrame.node?.type === \"content\") {\n (parentFrame.node as ContentNode).children.push(inlineNode);\n } else if (parentFrame.kind === \"inline\" && parentFrame.node?.type === \"inline\") {\n const parentInline = parentFrame.node as InlineNode;\n if (parentInline.children) {\n parentInline.children.push(inlineNode);\n }\n } else if (parentFrame.kind === \"heading\" || parentFrame.kind === \"preambleMeta\") {\n // Inline inside heading or preamble metadata — accumulate text\n if (inlineNode.text) {\n parentFrame.textBuffer += inlineNode.text;\n } else if (inlineNode.children) {\n for (const child of inlineNode.children) {\n if (child.text) parentFrame.textBuffer += child.text;\n }\n }\n }\n }\n\n // ── Private helpers: Notes ──\n\n private openNote(elementName: string): void {\n const noteTypeMap: Record<string, string> = {\n FTNT: \"footnote\",\n EDNOTE: \"editorial\",\n OLNOTE1: \"general\",\n };\n\n const noteType = noteTypeMap[elementName] ?? elementName.toLowerCase();\n const node: NoteNode = {\n type: \"note\",\n noteType,\n children: [],\n };\n\n this.stack.push({ kind: \"note\", elementName, node, textBuffer: \"\" });\n }\n\n private closeNote(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const noteNode = frame.node as NoteNode;\n\n // If text was collected directly (no child content nodes), create one\n if (frame.textBuffer.trim() && noteNode.children.length === 0) {\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"text\",\n text: frame.textBuffer.trim(),\n },\n ],\n };\n noteNode.children.push(contentNode);\n }\n\n // Add to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(noteNode);\n }\n }\n\n // ── Private helpers: Regulatory text ──\n\n private openRegtext(elementName: string, attrs: Attributes): void {\n if (elementName === \"REGTEXT\") {\n // REGTEXT container with TITLE and PART attributes\n const title = attrs[\"TITLE\"] ?? \"\";\n const part = attrs[\"PART\"] ?? \"\";\n const label = title && part ? `${title} CFR Part ${part}` : \"\";\n\n // Create a bold label if we have CFR reference info\n if (label) {\n const labelNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: label,\n },\n ],\n };\n this.addToDocument(labelNode);\n }\n\n this.stack.push({ kind: \"regtext\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"AMDPAR\") {\n // Amendment instruction paragraph — render as italic content\n this.openContent(elementName);\n return;\n }\n\n if (elementName === \"SECTION\") {\n // Section container within REGTEXT — pass through\n this.stack.push({ kind: \"block\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"SECTNO\") {\n // Section number — collect as content\n this.openContent(elementName);\n return;\n }\n\n if (elementName === \"PART\") {\n // Part container within REGTEXT — pass through\n this.stack.push({ kind: \"block\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"AUTH\") {\n // Authority citation in REGTEXT\n this.openNote(elementName);\n return;\n }\n }\n\n private closeRegtext(elementName: string): void {\n if (elementName === \"REGTEXT\") {\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"AMDPAR\" || elementName === \"SECTNO\") {\n this.closeContent(elementName);\n return;\n }\n\n if (elementName === \"SECTION\" || elementName === \"PART\") {\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"AUTH\") {\n this.closeNote(elementName);\n return;\n }\n }\n\n // ── Private helpers: Signature block ──\n\n private openSignature(elementName: string): void {\n if (elementName === \"SIG\") {\n // Signature container\n const node: NoteNode = {\n type: \"note\",\n noteType: \"signature\",\n children: [],\n };\n this.stack.push({ kind: \"signature\", elementName, node, textBuffer: \"\" });\n return;\n }\n\n // NAME, TITLE, DATED — collect text\n this.stack.push({ kind: \"signatureField\", elementName, textBuffer: \"\" });\n }\n\n private closeSignature(elementName: string): void {\n if (elementName === \"SIG\") {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const sigNode = frame.node as NoteNode;\n\n // Add signature to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(sigNode);\n }\n return;\n }\n\n // NAME, TITLE, DATED fields\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"signatureField\") return;\n\n const text = frame.textBuffer.trim();\n if (!text) return;\n\n // Add as content to parent signature node\n const sigFrame = this.findFrame(\"signature\");\n if (sigFrame?.node && sigFrame.node.type === \"note\") {\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"text\",\n text,\n },\n ],\n };\n (sigFrame.node as NoteNode).children.push(contentNode);\n }\n }\n\n // ── Private helpers: GPOTABLE ──\n\n private openTableElement(elementName: string, _attrs: Attributes): void {\n if (elementName === \"GPOTABLE\") {\n this.stack.push({\n kind: \"table\",\n elementName,\n textBuffer: \"\",\n headers: [],\n rows: [],\n currentRow: [],\n });\n return;\n }\n\n if (elementName === \"TTITLE\") {\n // Table title — collect text as heading\n this.stack.push({ kind: \"heading\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"BOXHD\") {\n // Header container — no frame needed, children (CHED) handle themselves\n return;\n }\n\n if (elementName === \"CHED\") {\n // Column header entry\n this.stack.push({ kind: \"tableHeader\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"ROW\") {\n const tableFrame = this.findTableFrame();\n if (tableFrame) {\n tableFrame.currentRow = [];\n }\n this.stack.push({ kind: \"tableRow\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"ENT\") {\n // Cell entry\n this.stack.push({ kind: \"tableCell\", elementName, textBuffer: \"\" });\n return;\n }\n }\n\n private closeTableElement(elementName: string): void {\n if (elementName === \"GPOTABLE\") {\n this.closeGpoTable();\n return;\n }\n\n if (elementName === \"TTITLE\") {\n // Table title — drop the heading frame (title is informational)\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"BOXHD\") {\n // No frame to pop\n return;\n }\n\n if (elementName === \"CHED\") {\n this.closeTableHeader();\n return;\n }\n\n if (elementName === \"ROW\") {\n this.closeTableRow();\n return;\n }\n\n if (elementName === \"ENT\") {\n this.closeTableCell();\n return;\n }\n }\n\n private closeGpoTable(): void {\n const frame = this.popFrame(\"GPOTABLE\");\n if (!frame || frame.kind !== \"table\") return;\n\n const tableNode: TableNode = {\n type: \"table\",\n variant: \"xhtml\", // Reuse the same variant for rendering\n headers: frame.headers ?? [],\n rows: frame.rows ?? [],\n };\n\n // Add to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(tableNode);\n }\n }\n\n private closeTableHeader(): void {\n const headerFrame = this.popFrame(\"CHED\");\n if (!headerFrame || headerFrame.kind !== \"tableHeader\") return;\n\n const tableFrame = this.findTableFrame();\n if (!tableFrame) return;\n\n const text = headerFrame.textBuffer.trim();\n\n // GPOTABLE headers are flat — each CHED is one column header.\n // We build a single header row from all CHED elements.\n if (!tableFrame.headers || tableFrame.headers.length === 0) {\n tableFrame.headers = [[]];\n }\n const headerRow = tableFrame.headers[0];\n if (headerRow) {\n headerRow.push(text);\n }\n }\n\n private closeTableRow(): void {\n const rowFrame = this.popFrame(\"ROW\");\n if (!rowFrame) return;\n\n const tableFrame = this.findTableFrame();\n if (tableFrame?.currentRow) {\n tableFrame.rows?.push([...tableFrame.currentRow]);\n tableFrame.currentRow = [];\n }\n }\n\n private closeTableCell(): void {\n const cellFrame = this.stack.pop();\n if (!cellFrame || cellFrame.kind !== \"tableCell\") return;\n\n const tableFrame = this.findTableFrame();\n if (tableFrame?.currentRow) {\n tableFrame.currentRow.push(cellFrame.textBuffer.trim());\n }\n }\n\n // ── Private helpers: FRDOC ──\n\n private closeFrdoc(): void {\n const frame = this.popFrame(FR_FRDOC_ELEMENT);\n if (!frame || frame.kind !== \"frdoc\") return;\n\n const text = frame.textBuffer.trim();\n // Extract document number from FRDOC text. Formats vary by era:\n // Modern: \"[FR Doc. 2026-06029 Filed 3-27-26; 8:45 am]\"\n // Pre-2009: \"[FR Doc. E8-17594 Filed 7-31-08; 8:45 am]\"\n // Very old: \"[FR Doc. 00-123 Filed 1-2-00; 8:45 am]\"\n const docMatch = /FR\\s+Doc\\.\\s+([\\w-]+)/i.exec(text);\n if (docMatch) {\n this.currentDocMeta.documentNumber = docMatch[1];\n }\n\n // Extract publication date from filing date (Filed M-D-YY).\n // Publication = filing date + 1 calendar day (FR publishes the morning after).\n const dateMatch = /Filed\\s+(\\d{1,2})-(\\d{1,2})-(\\d{2})\\b/.exec(text);\n if (dateMatch) {\n const [, mmStr, ddStr, yyStr] = dateMatch;\n const mm = parseInt(mmStr ?? \"0\", 10);\n const dd = parseInt(ddStr ?? \"0\", 10);\n const yy = parseInt(yyStr ?? \"0\", 10);\n // 2-digit year: 00-49 → 2000s, 50-99 → 1900s\n const fullYear = yy < 50 ? 2000 + yy : 1900 + yy;\n const filed = new Date(fullYear, mm - 1, dd);\n // Validate — Date constructor silently wraps invalid values (month 13 → next year)\n if (filed.getMonth() !== mm - 1 || filed.getDate() !== dd) {\n return; // Invalid filing date — skip rather than produce wrong date\n }\n // Publication date = next calendar day\n filed.setDate(filed.getDate() + 1);\n const pubYear = filed.getFullYear();\n const pubMonth = String(filed.getMonth() + 1).padStart(2, \"0\");\n const pubDay = String(filed.getDate()).padStart(2, \"0\");\n this.currentDocMeta.publicationDate = `${pubYear}-${pubMonth}-${pubDay}`;\n }\n }\n\n // ── Private helpers: Stack navigation ──\n\n private addToDocument(node: ASTNode): void {\n const docFrame = this.findParentDocument();\n if (docFrame?.node && docFrame.node.type === \"level\") {\n (docFrame.node as LevelNode).children.push(node);\n }\n }\n\n private findParentDocument(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"document\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findParentNote(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"note\" || this.stack[i]?.kind === \"signature\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findTableFrame(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"table\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findFrame(kind: FrameKind): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === kind) {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private popFrame(elementName: string): StackFrame | undefined {\n if (this.stack.length === 0) return undefined;\n\n // Find the matching frame (may not be exactly on top)\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.elementName === elementName) {\n return this.stack.splice(i, 1)[0];\n }\n }\n\n // No matching frame found — warn rather than popping an unrelated frame\n console.warn(\n `FrASTBuilder: no matching frame for closing element </${elementName}>, ` +\n `stack has: [${this.stack.map((f) => f.elementName).join(\", \")}]`,\n );\n return undefined;\n }\n}\n","/**\n * Federal Register frontmatter builder.\n *\n * Constructs FrontmatterData from an emitted FR AST node, its context,\n * and optional JSON metadata from the FederalRegister.gov API.\n */\n\nimport type { LevelNode, EmitContext, FrontmatterData } from \"@lexbuild/core\";\nimport type { FrDocumentXmlMeta } from \"./fr-builder.js\";\n\n/**\n * Metadata from the FederalRegister.gov API JSON response.\n * Stored as a sidecar `.json` file alongside each `.xml` download.\n */\nexport interface FrDocumentJsonMeta {\n /** FR document number (e.g., \"2026-06029\") */\n document_number: string;\n /** Document type (Rule, Proposed Rule, Notice, Presidential Document) */\n type: string;\n /** Document title */\n title: string;\n /** Publication date (YYYY-MM-DD) */\n publication_date: string;\n /** Full FR citation (e.g., \"91 FR 14523\") */\n citation: string;\n /** FR volume number */\n volume: number;\n /** Start page number */\n start_page: number;\n /** End page number */\n end_page: number;\n /** Agencies with hierarchy info */\n agencies: Array<{\n name: string;\n id: number;\n slug: string;\n parent_id?: number | null;\n raw_name?: string;\n }>;\n /** CFR title/part references */\n cfr_references: Array<{ title: number; part: number }>;\n /** Docket identifiers */\n docket_ids: string[];\n /** Regulation Identifier Numbers */\n regulation_id_numbers: string[];\n /** Effective date (YYYY-MM-DD) */\n effective_on?: string | null;\n /** Comment period end date (YYYY-MM-DD) */\n comments_close_on?: string | null;\n /** Action description (e.g., \"Final rule.\") */\n action?: string | null;\n /** Document abstract */\n abstract?: string | null;\n /** Whether the document is significant */\n significant?: boolean | null;\n /** Topics/keywords */\n topics: string[];\n /** URL to full text XML */\n full_text_xml_url: string;\n}\n\n/** Normalize API document type to lowercase snake_case */\nfunction normalizeDocumentType(apiType: string): string {\n const map: Record<string, string> = {\n Rule: \"rule\",\n \"Proposed Rule\": \"proposed_rule\",\n Notice: \"notice\",\n \"Presidential Document\": \"presidential_document\",\n };\n return map[apiType] ?? apiType.toLowerCase().replace(/\\s+/g, \"_\");\n}\n\n/**\n * Build FrontmatterData from an FR document node with optional JSON metadata.\n *\n * If JSON metadata is available (from the API sidecar file), it enriches\n * the frontmatter with structured agency, CFR reference, docket, and\n * date information that isn't available in the XML alone.\n */\nexport function buildFrFrontmatter(\n node: LevelNode,\n _context: EmitContext,\n xmlMeta: FrDocumentXmlMeta,\n jsonMeta?: FrDocumentJsonMeta,\n): FrontmatterData {\n const documentNumber = jsonMeta?.document_number ?? xmlMeta.documentNumber ?? \"\";\n const subject = jsonMeta?.title ?? xmlMeta.subject ?? node.heading ?? \"\";\n const publicationDate = jsonMeta?.publication_date ?? xmlMeta.publicationDate ?? \"\";\n const documentType = jsonMeta\n ? normalizeDocumentType(jsonMeta.type)\n : xmlMeta.documentTypeNormalized;\n\n // Build agencies list\n let agencies: string[] | undefined;\n if (jsonMeta?.agencies && jsonMeta.agencies.length > 0) {\n agencies = jsonMeta.agencies.map((a) => a.name);\n } else if (xmlMeta.agency) {\n agencies = [xmlMeta.agency];\n if (xmlMeta.subAgency) {\n agencies.push(xmlMeta.subAgency);\n }\n }\n\n // Build CFR references list\n let cfrReferences: string[] | undefined;\n if (jsonMeta?.cfr_references && jsonMeta.cfr_references.length > 0) {\n cfrReferences = jsonMeta.cfr_references.map((r) => `${r.title} CFR Part ${r.part}`);\n } else if (xmlMeta.cfrCitation) {\n cfrReferences = [xmlMeta.cfrCitation];\n }\n\n // Build docket IDs list\n let docketIds: string[] | undefined;\n if (jsonMeta?.docket_ids && jsonMeta.docket_ids.length > 0) {\n docketIds = jsonMeta.docket_ids;\n }\n\n // Primary agency for the existing `agency` field\n const primaryAgency = agencies && agencies.length > 0 ? agencies[0] : undefined;\n\n // FR citation\n const frCitation = jsonMeta?.citation;\n\n // RIN\n const rin = jsonMeta?.regulation_id_numbers?.[0] ?? xmlMeta.rin;\n\n const fm: FrontmatterData = {\n source: \"fr\",\n legal_status: \"authoritative_unofficial\",\n identifier: node.identifier ?? `/us/fr/${documentNumber}`,\n title: subject,\n title_number: 0, // FR documents don't belong to a USC/CFR title\n title_name: \"Federal Register\",\n section_number: documentNumber,\n section_name: subject,\n positive_law: false,\n currency: publicationDate,\n last_updated: publicationDate,\n\n // Shared optional fields\n agency: primaryAgency,\n\n // FR-specific fields\n document_number: documentNumber || undefined,\n document_type: documentType || undefined,\n fr_citation: frCitation,\n fr_volume: jsonMeta?.volume,\n publication_date: publicationDate || undefined,\n agencies: agencies && agencies.length > 0 ? agencies : undefined,\n cfr_references: cfrReferences && cfrReferences.length > 0 ? cfrReferences : undefined,\n docket_ids: docketIds && docketIds.length > 0 ? docketIds : undefined,\n rin: rin || undefined,\n effective_date: jsonMeta?.effective_on ?? undefined,\n comments_close_date: jsonMeta?.comments_close_on ?? undefined,\n fr_action: jsonMeta?.action ?? undefined,\n };\n\n return fm;\n}\n","/**\n * Output path builder for Federal Register directory structure.\n *\n * FR path structure:\n * output/fr/{YYYY}/{MM}/{document_number}.md\n *\n * Downloads path structure:\n * downloads/fr/{YYYY}/{MM}/{document_number}.xml\n * downloads/fr/{YYYY}/{MM}/{document_number}.json\n */\n\nimport { join } from \"node:path\";\n\n/**\n * Build the output file path for an FR document.\n *\n * @param documentNumber - FR document number (e.g., \"2026-06029\")\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param outputRoot - Output root directory (e.g., \"./output\")\n * @returns Full output file path (e.g., \"output/fr/2026/03/2026-06029.md\")\n */\nexport function buildFrOutputPath(\n documentNumber: string,\n publicationDate: string,\n outputRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(outputRoot, \"fr\", year, month, `${documentNumber}.md`);\n}\n\n/**\n * Build the download file path for an FR document XML.\n *\n * @param documentNumber - FR document number\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param downloadRoot - Download root directory (e.g., \"./downloads/fr\")\n * @returns Full download file path (e.g., \"downloads/fr/2026/03/2026-06029.xml\")\n */\nexport function buildFrDownloadXmlPath(\n documentNumber: string,\n publicationDate: string,\n downloadRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(downloadRoot, year, month, `${documentNumber}.xml`);\n}\n\n/**\n * Build the download file path for an FR document JSON metadata.\n *\n * @param documentNumber - FR document number\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param downloadRoot - Download root directory\n * @returns Full download file path (e.g., \"downloads/fr/2026/03/2026-06029.json\")\n */\nexport function buildFrDownloadJsonPath(\n documentNumber: string,\n publicationDate: string,\n downloadRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(downloadRoot, year, month, `${documentNumber}.json`);\n}\n\n/**\n * Build the directory path for a year/month within the FR output structure.\n */\nexport function buildMonthDir(year: string, month: string, outputRoot: string): string {\n return join(outputRoot, \"fr\", year, month);\n}\n\n/**\n * Build the directory path for a year.\n */\nexport function buildYearDir(year: string, outputRoot: string): string {\n return join(outputRoot, \"fr\", year);\n}\n\n/**\n * Parse a YYYY-MM-DD date string into year and month components.\n */\nfunction parseDateComponents(date: string): { year: string; month: string } {\n const parts = date.split(\"-\");\n return {\n year: parts[0] || \"0000\",\n month: parts[1] || \"00\",\n };\n}\n","/**\n * Federal Register conversion orchestrator.\n *\n * Discovers downloaded FR XML files, parses them with FrASTBuilder,\n * enriches frontmatter with JSON sidecar metadata, renders via core's\n * renderDocument, and writes structured Markdown output.\n *\n * Processes FR documents in a single streaming pass: parse each XML file,\n * render Markdown, and write output immediately. No link pre-registration\n * since FR documents rarely cross-reference each other.\n */\n\nimport { createReadStream, existsSync } from \"node:fs\";\nimport { readFile, readdir, stat } from \"node:fs/promises\";\nimport { join, dirname } from \"node:path\";\nimport { XMLParser, renderDocument, createLinkResolver, writeFile, mkdir } from \"@lexbuild/core\";\nimport type { LevelNode, EmitContext } from \"@lexbuild/core\";\nimport { FrASTBuilder } from \"./fr-builder.js\";\nimport type { FrDocumentXmlMeta } from \"./fr-builder.js\";\nimport { buildFrFrontmatter } from \"./fr-frontmatter.js\";\nimport type { FrDocumentJsonMeta } from \"./fr-frontmatter.js\";\nimport { buildFrOutputPath } from \"./fr-path.js\";\nimport { FR_DOCUMENT_TYPE_KEYS } from \"./fr-elements.js\";\nimport type { FrDocumentType } from \"./fr-elements.js\";\n\n// ── Public types ──\n\n/** Progress info for conversion callback */\nexport interface FrConvertProgress {\n /** Documents converted so far */\n documentsConverted: number;\n /** XML files processed so far */\n filesProcessed: number;\n /** Total XML files to process */\n totalFiles: number;\n /** Current XML file being processed */\n currentFile: string;\n}\n\n/** Options for converting FR documents */\nexport interface FrConvertOptions {\n /** Path to input file or directory containing .xml/.json files */\n input: string;\n /** Output root directory */\n output: string;\n /** Link style for cross-references */\n linkStyle: \"relative\" | \"canonical\" | \"plaintext\";\n /** Parse only, don't write files */\n dryRun: boolean;\n /** Filter: start date (YYYY-MM-DD) */\n from?: string | undefined;\n /** Filter: end date (YYYY-MM-DD) */\n to?: string | undefined;\n /** Filter: document types */\n types?: FrDocumentType[] | undefined;\n /** Progress callback */\n onProgress?: ((progress: FrConvertProgress) => void) | undefined;\n}\n\n/** Result of a conversion operation */\nexport interface FrConvertResult {\n /** Number of documents converted */\n documentsConverted: number;\n /** Paths of written files */\n files: string[];\n /** Total estimated tokens */\n totalTokenEstimate: number;\n /** Peak RSS in bytes */\n peakMemoryBytes: number;\n /** Whether this was a dry run */\n dryRun: boolean;\n}\n\n/** Collected document info during parsing */\ninterface CollectedDoc {\n node: LevelNode;\n context: EmitContext;\n xmlMeta: FrDocumentXmlMeta;\n jsonMeta?: FrDocumentJsonMeta;\n publicationDate: string;\n documentNumber: string;\n}\n\n/** Set of valid FR document type element names for filtering */\nconst FR_DOC_TYPE_SET = new Set<string>(FR_DOCUMENT_TYPE_KEYS);\n\n// ── Public function ──\n\n/**\n * Convert FR XML documents to Markdown.\n *\n * Supports both single-file mode (input is a .xml path) and batch mode\n * (input is a directory containing year/month/doc.xml structure).\n */\nexport async function convertFrDocuments(options: FrConvertOptions): Promise<FrConvertResult> {\n const xmlFiles = await discoverXmlFiles(options.input, options.from, options.to);\n\n let documentsConverted = 0;\n let totalTokenEstimate = 0;\n let peakMemoryBytes = 0;\n\n const linkResolver = createLinkResolver();\n\n // Stream: parse each file, render, and write immediately.\n // FR documents rarely cross-reference each other, so we skip the two-pass\n // link registration that USC/eCFR use. This keeps memory bounded for\n // bulk XML processing (750k+ documents across 9,500+ files).\n let filesProcessed = 0;\n for (const xmlPath of xmlFiles) {\n let collected: CollectedDoc[];\n try {\n collected = await parseXmlFile(xmlPath);\n } catch (err) {\n console.warn(\n `Warning: Failed to parse ${xmlPath}: ${err instanceof Error ? err.message : String(err)}. Skipping.`,\n );\n continue;\n }\n\n for (const doc of collected) {\n // Apply type filter\n if (options.types && options.types.length > 0) {\n if (\n !FR_DOC_TYPE_SET.has(doc.xmlMeta.documentType) ||\n !options.types.includes(doc.xmlMeta.documentType as FrDocumentType)\n ) {\n continue;\n }\n }\n\n if (options.dryRun) {\n documentsConverted++;\n continue;\n }\n\n const outputPath = buildFrOutputPath(doc.documentNumber, doc.publicationDate, options.output);\n\n const frontmatter = buildFrFrontmatter(doc.node, doc.context, doc.xmlMeta, doc.jsonMeta);\n\n const markdown = renderDocument(doc.node, frontmatter, {\n headingOffset: 0,\n linkStyle: options.linkStyle,\n resolveLink:\n options.linkStyle === \"relative\"\n ? (id) => linkResolver.resolve(id, outputPath)\n : undefined,\n });\n\n await mkdir(dirname(outputPath), { recursive: true });\n await writeFile(outputPath, markdown, \"utf-8\");\n\n documentsConverted++;\n totalTokenEstimate += Math.round(markdown.length / 4);\n\n // Track memory\n const mem = process.memoryUsage().rss;\n if (mem > peakMemoryBytes) {\n peakMemoryBytes = mem;\n }\n }\n\n filesProcessed++;\n\n options.onProgress?.({\n documentsConverted,\n filesProcessed,\n totalFiles: xmlFiles.length,\n currentFile: xmlPath,\n });\n }\n\n return {\n documentsConverted,\n files: [], // Don't accumulate 750k+ file paths in memory\n totalTokenEstimate,\n peakMemoryBytes,\n dryRun: options.dryRun,\n };\n}\n\n// ── Private helpers ──\n\n/**\n * Parse a single XML file and collect document nodes + metadata.\n */\nasync function parseXmlFile(xmlPath: string): Promise<CollectedDoc[]> {\n const collected: CollectedDoc[] = [];\n\n const builder = new FrASTBuilder({\n onEmit: (node, context) => {\n // Snapshot metas at emit time\n const currentMetas = builder.getDocumentMetas();\n const meta = currentMetas[currentMetas.length - 1];\n if (!meta) {\n console.warn(\n `Warning: No XML metadata extracted for emitted document in ${xmlPath}. ` +\n `Frontmatter will have empty document_type and document_number.`,\n );\n }\n collected.push({\n node,\n context,\n xmlMeta: meta ?? { documentType: \"\", documentTypeNormalized: \"\" },\n publicationDate: \"\",\n documentNumber: meta?.documentNumber ?? \"\",\n });\n },\n });\n\n const parser = new XMLParser({ defaultNamespace: \"\" });\n parser.on(\"openElement\", (name, attrs) => builder.onOpenElement(name, attrs));\n parser.on(\"closeElement\", (name) => builder.onCloseElement(name));\n parser.on(\"text\", (text) => builder.onText(text));\n\n const stream = createReadStream(xmlPath, \"utf-8\");\n await parser.parseStream(stream);\n\n // Try to load JSON sidecar\n const jsonPath = xmlPath.replace(/\\.xml$/, \".json\");\n let jsonMeta: FrDocumentJsonMeta | undefined;\n if (existsSync(jsonPath)) {\n try {\n const raw = await readFile(jsonPath, \"utf-8\");\n jsonMeta = JSON.parse(raw) as FrDocumentJsonMeta;\n } catch (err) {\n console.warn(\n `Warning: Failed to parse JSON sidecar ${jsonPath}: ${err instanceof Error ? err.message : String(err)}. Continuing without enriched metadata.`,\n );\n }\n }\n\n // Enrich collected docs with JSON metadata and publication date\n for (const doc of collected) {\n if (jsonMeta && jsonMeta.document_number === doc.documentNumber) {\n doc.jsonMeta = jsonMeta;\n doc.publicationDate = jsonMeta.publication_date;\n } else {\n // Infer date from file path (downloads/fr/YYYY/MM/doc.xml)\n const inferredDate = inferDateFromPath(xmlPath);\n if (!inferredDate) {\n console.warn(\n `Warning: No publication date for document ${doc.documentNumber || \"(unknown)\"} — ` +\n `no JSON sidecar and path ${xmlPath} has no YYYY/MM/ pattern. Output will be in 0000/00/.`,\n );\n }\n doc.publicationDate = inferredDate;\n }\n }\n\n return collected;\n}\n\n/**\n * Discover XML files in a directory or return the single file path.\n */\nasync function discoverXmlFiles(input: string, from?: string, to?: string): Promise<string[]> {\n let inputStat;\n try {\n inputStat = await stat(input);\n } catch (err) {\n throw new Error(\n `Cannot access input path \"${input}\": ${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n if (inputStat.isFile()) {\n return [input];\n }\n\n if (!inputStat.isDirectory()) {\n throw new Error(`Input path \"${input}\" is not a file or directory`);\n }\n\n // Recursively find all .xml files\n const xmlFiles: string[] = [];\n await walkDir(input, xmlFiles);\n\n // Apply date range filter based on file path structure (YYYY/MM/)\n let filtered = xmlFiles;\n if (from || to) {\n filtered = xmlFiles.filter((f) => {\n const date = inferDateFromPath(f);\n if (!date) return true; // Can't filter if no date in path\n if (from && date < from) return false;\n if (to && date > to + \"-32\") return false; // Month-level comparison\n return true;\n });\n }\n\n return filtered.sort();\n}\n\n/** Recursively walk a directory collecting .xml files */\nasync function walkDir(dir: string, results: string[]): Promise<void> {\n const entries = await readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n if (entry.isDirectory()) {\n await walkDir(fullPath, results);\n } else if (entry.isFile() && entry.name.endsWith(\".xml\")) {\n results.push(fullPath);\n }\n }\n}\n\n/**\n * Infer a date string from the file path. Used when no JSON sidecar is available.\n *\n * Supports two patterns:\n * - Per-document: \"downloads/fr/2026/03/doc.xml\" → \"2026-03-01\"\n * - Govinfo bulk: \"downloads/fr/bulk/2026/FR-2026-03-02.xml\" → \"2026-03-02\"\n */\n/** @internal Exported for testing. */\nexport function inferDateFromPath(filePath: string): string {\n // Govinfo bulk: FR-YYYY-MM-DD.xml\n const bulkMatch = /FR-(\\d{4})-(\\d{2})-(\\d{2})\\.xml$/.exec(filePath);\n if (bulkMatch) {\n return `${bulkMatch[1]}-${bulkMatch[2]}-${bulkMatch[3]}`;\n }\n\n // Per-document: YYYY/MM/doc.xml\n const perDocMatch = /(\\d{4})\\/(\\d{2})\\/[^/]+\\.xml$/.exec(filePath);\n if (perDocMatch) {\n return `${perDocMatch[1]}-${perDocMatch[2]}-01`;\n }\n\n return \"\";\n}\n","/**\n * Federal Register frontmatter enricher.\n *\n * Fetches rich JSON metadata from the FederalRegister.gov API listing endpoint\n * and patches frontmatter in existing converted Markdown files. This is used to\n * backfill metadata (agencies, CFR references, docket IDs, citations, etc.) into\n * files originally converted from govinfo bulk XML, which lacks this data.\n *\n * The enricher does NOT re-parse XML or re-render Markdown — it only updates the\n * YAML frontmatter block while preserving the body content exactly as-is.\n */\n\nimport { readFile, writeFile } from \"node:fs/promises\";\nimport { existsSync } from \"node:fs\";\nimport { parse, stringify } from \"yaml\";\nimport { buildFrOutputPath } from \"./fr-path.js\";\nimport {\n buildFrApiListUrl,\n fetchWithRetry,\n buildMonthChunks,\n} from \"./downloader.js\";\nimport type { FrApiListResponse } from \"./downloader.js\";\nimport type { FrDocumentJsonMeta } from \"./fr-frontmatter.js\";\n\n// --- Public types ---\n\n/** Options for enriching FR documents */\nexport interface EnrichFrOptions {\n /** Output root directory where .md files live (e.g., \"./output\") */\n output: string;\n /** Start date (YYYY-MM-DD, inclusive) */\n from: string;\n /** End date (YYYY-MM-DD, inclusive). Defaults to today. */\n to?: string | undefined;\n /** Overwrite files that are already enriched (have fr_citation) */\n force?: boolean | undefined;\n /** Progress callback */\n onProgress?: ((progress: EnrichFrProgress) => void) | undefined;\n}\n\n/** Progress info for enrichment callback */\nexport interface EnrichFrProgress {\n /** Documents whose frontmatter was updated */\n enriched: number;\n /** Documents skipped (already enriched or no frontmatter) */\n skipped: number;\n /** Documents in API but no .md file found locally */\n notFound: number;\n /** Total documents seen in API responses */\n total: number;\n /** Current month chunk being processed (YYYY-MM) */\n currentChunk: string;\n /** Current document number */\n currentDocument: string;\n}\n\n/** Result of an enrichment operation */\nexport interface EnrichFrResult {\n /** Documents whose frontmatter was updated */\n enriched: number;\n /** Documents skipped (already enriched or unparseable) */\n skipped: number;\n /** Documents in API but no .md file found locally */\n notFound: number;\n /** Total documents seen in API responses */\n total: number;\n /** Date range covered */\n dateRange: { from: string; to: string };\n}\n\n// --- Public function ---\n\n/**\n * Enrich existing FR Markdown files with metadata from the FederalRegister.gov API.\n *\n * Paginates through the API listing endpoint (200 docs/page), matches each document\n * to its .md file by document number + publication date, and patches the YAML\n * frontmatter with enriched fields (citation, agencies, CFR references, etc.).\n */\nexport async function enrichFrDocuments(options: EnrichFrOptions): Promise<EnrichFrResult> {\n const to = options.to ?? new Date().toISOString().slice(0, 10);\n const force = options.force ?? false;\n\n let enriched = 0;\n let skipped = 0;\n let notFound = 0;\n let total = 0;\n\n const chunks = buildMonthChunks(options.from, to);\n\n for (const chunk of chunks) {\n let page = 1;\n let hasMore = true;\n const chunkLabel = chunk.from.slice(0, 7);\n\n while (hasMore) {\n const listUrl = buildFrApiListUrl(chunk.from, chunk.to, page);\n const response = await fetchWithRetry(listUrl);\n const data = (await response.json()) as FrApiListResponse;\n\n if (typeof data.count !== \"number\") {\n throw new Error(\n `Unexpected API response for ${listUrl}: missing or invalid 'count' field.`,\n );\n }\n\n if (page === 1) {\n total += data.count;\n }\n\n const results = data.results ?? [];\n\n for (const doc of results) {\n if (!doc.document_number || !doc.publication_date) continue;\n\n const mdPath = buildFrOutputPath(\n doc.document_number,\n doc.publication_date,\n options.output,\n );\n\n if (!existsSync(mdPath)) {\n notFound++;\n options.onProgress?.({\n enriched,\n skipped,\n notFound,\n total,\n currentChunk: chunkLabel,\n currentDocument: doc.document_number,\n });\n continue;\n }\n\n const content = await readFile(mdPath, \"utf-8\");\n\n // Split on frontmatter delimiters: ---\\n...\\n---\\n\n const fmEnd = content.indexOf(\"\\n---\\n\", 4);\n if (!content.startsWith(\"---\\n\") || fmEnd === -1) {\n skipped++;\n continue;\n }\n\n const yamlStr = content.slice(4, fmEnd);\n const body = content.slice(fmEnd + 5); // after \"\\n---\\n\"\n\n const fm = parse(yamlStr) as Record<string, unknown>;\n\n // Skip already-enriched files unless --force\n if (!force && fm[\"fr_citation\"]) {\n skipped++;\n options.onProgress?.({\n enriched,\n skipped,\n notFound,\n total,\n currentChunk: chunkLabel,\n currentDocument: doc.document_number,\n });\n continue;\n }\n\n applyEnrichment(fm, doc);\n\n const newYaml = stringify(fm, {\n lineWidth: 0,\n defaultStringType: \"QUOTE_DOUBLE\",\n defaultKeyType: \"PLAIN\",\n });\n\n const newContent = `---\\n${newYaml}---\\n${body}`;\n await writeFile(mdPath, newContent, \"utf-8\");\n\n enriched++;\n options.onProgress?.({\n enriched,\n skipped,\n notFound,\n total,\n currentChunk: chunkLabel,\n currentDocument: doc.document_number,\n });\n }\n\n hasMore = page < (data.total_pages ?? 0);\n page++;\n }\n }\n\n return { enriched, skipped, notFound, total, dateRange: { from: options.from, to } };\n}\n\n// --- Private helpers ---\n\n/** Normalize API document type to lowercase snake_case */\nfunction normalizeDocumentType(apiType: string): string {\n const map: Record<string, string> = {\n Rule: \"rule\",\n \"Proposed Rule\": \"proposed_rule\",\n Notice: \"notice\",\n \"Presidential Document\": \"presidential_document\",\n };\n return map[apiType] ?? apiType.toLowerCase().replace(/\\s+/g, \"_\");\n}\n\n/** Merge API metadata fields into an existing frontmatter object */\nfunction applyEnrichment(fm: Record<string, unknown>, doc: FrDocumentJsonMeta): void {\n if (doc.type) {\n fm[\"document_type\"] = normalizeDocumentType(doc.type);\n }\n\n if (doc.citation) {\n fm[\"fr_citation\"] = doc.citation;\n }\n\n if (doc.volume) {\n fm[\"fr_volume\"] = doc.volume;\n }\n\n if (doc.publication_date) {\n fm[\"publication_date\"] = doc.publication_date;\n fm[\"currency\"] = doc.publication_date;\n fm[\"last_updated\"] = doc.publication_date;\n }\n\n if (doc.agencies && doc.agencies.length > 0) {\n const [primary] = doc.agencies;\n if (primary) fm[\"agency\"] = primary.name;\n fm[\"agencies\"] = doc.agencies.map((a) => a.name);\n }\n\n if (doc.cfr_references && doc.cfr_references.length > 0) {\n fm[\"cfr_references\"] = doc.cfr_references.map(\n (r) => `${r.title} CFR Part ${r.part}`,\n );\n }\n\n if (doc.docket_ids && doc.docket_ids.length > 0) {\n fm[\"docket_ids\"] = doc.docket_ids;\n }\n\n if (doc.regulation_id_numbers && doc.regulation_id_numbers.length > 0) {\n fm[\"rin\"] = doc.regulation_id_numbers[0];\n }\n\n if (doc.effective_on) {\n fm[\"effective_date\"] = doc.effective_on;\n }\n\n if (doc.comments_close_on) {\n fm[\"comments_close_date\"] = doc.comments_close_on;\n }\n\n if (doc.action) {\n fm[\"fr_action\"] = doc.action;\n }\n\n // Update title from API if available (often more descriptive than XML subject)\n if (doc.title) {\n fm[\"title\"] = doc.title;\n fm[\"section_name\"] = doc.title;\n }\n}\n","/**\n * Federal Register API downloader.\n *\n * Downloads FR documents (XML + JSON metadata) from the FederalRegister.gov API.\n * The API provides per-document endpoints, rich JSON metadata, and requires no\n * authentication. Results are paginated (max 200/page) with a 10,000 result cap\n * per query — the downloader auto-chunks by month for large date ranges.\n *\n * API base: https://www.federalregister.gov/api/v1/\n */\n\nimport { createWriteStream } from \"node:fs\";\nimport { mkdir, stat, writeFile as fsWriteFile } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\nimport { pipeline } from \"node:stream/promises\";\nimport { Readable } from \"node:stream\";\nimport { buildFrDownloadXmlPath, buildFrDownloadJsonPath } from \"./fr-path.js\";\nimport type { FrDocumentJsonMeta } from \"./fr-frontmatter.js\";\nimport type { FrDocumentType } from \"./fr-elements.js\";\n\n/** Base URL for the FederalRegister.gov API */\nconst FR_API_BASE = \"https://www.federalregister.gov/api/v1\";\n\n/** Maximum results per page (API max) */\nconst PER_PAGE = 200;\n\n/** Default number of concurrent XML downloads */\nconst DEFAULT_CONCURRENCY = 10;\n\n/** Maximum retry attempts for transient errors */\nconst MAX_RETRIES = 2;\n\n/** Base delay between retries (ms) */\nconst RETRY_BASE_DELAY_MS = 2000;\n\n/** Fields to request from the API documents endpoint */\nconst API_FIELDS = [\n \"document_number\",\n \"type\",\n \"title\",\n \"publication_date\",\n \"citation\",\n \"volume\",\n \"start_page\",\n \"end_page\",\n \"agencies\",\n \"cfr_references\",\n \"docket_ids\",\n \"regulation_id_numbers\",\n \"effective_on\",\n \"comments_close_on\",\n \"action\",\n \"abstract\",\n \"significant\",\n \"topics\",\n \"full_text_xml_url\",\n];\n\n// ── Public types ──\n\n/** Options for downloading FR documents */\nexport interface FrDownloadOptions {\n /** Download directory (e.g., \"./downloads/fr\") */\n output: string;\n /** Start date (YYYY-MM-DD, inclusive) */\n from: string;\n /** End date (YYYY-MM-DD, inclusive). Defaults to today. */\n to?: string | undefined;\n /** Document types to download. All types if omitted. */\n types?: FrDocumentType[] | undefined;\n /** Maximum number of documents to download (for testing) */\n limit?: number | undefined;\n /** Number of concurrent XML downloads (default 10) */\n concurrency?: number | undefined;\n /** Progress callback */\n onProgress?: ((progress: FrDownloadProgress) => void) | undefined;\n}\n\n/** Progress info for download callback */\nexport interface FrDownloadProgress {\n /** Documents downloaded so far */\n documentsDownloaded: number;\n /** Total documents found across all pages */\n totalDocuments: number;\n /** Current document number being downloaded */\n currentDocument: string;\n /** Current date chunk being processed (YYYY-MM) */\n currentChunk: string;\n}\n\n/** A successfully downloaded FR document */\nexport interface FrDownloadedFile {\n /** Absolute path to the XML file */\n xmlPath: string;\n /** Absolute path to the JSON metadata file */\n jsonPath: string;\n /** Document number */\n documentNumber: string;\n /** Publication date */\n publicationDate: string;\n /** Combined size in bytes (XML + JSON) */\n size: number;\n}\n\n/** A failed download */\nexport interface FrDownloadFailure {\n /** Document number */\n documentNumber: string;\n /** Error message */\n error: string;\n}\n\n/** Result of a download operation */\nexport interface FrDownloadResult {\n /** Number of documents downloaded */\n documentsDownloaded: number;\n /** Paths of downloaded files */\n files: FrDownloadedFile[];\n /** Total bytes downloaded */\n totalBytes: number;\n /** Date range covered */\n dateRange: { from: string; to: string };\n /** Documents without XML (pre-2000) */\n skipped: number;\n /** Documents that failed to download */\n failed: FrDownloadFailure[];\n}\n\n/** API listing response */\nexport interface FrApiListResponse {\n count: number;\n total_pages: number;\n next_page_url?: string | null;\n /** Can be absent on weekends/holidays when count is 0 */\n results?: FrDocumentJsonMeta[];\n}\n\n// ── Public functions ──\n\n/**\n * Build the API documents listing URL for a date range.\n */\nexport function buildFrApiListUrl(\n from: string,\n to: string,\n page: number,\n types?: FrDocumentType[],\n): string {\n const params = new URLSearchParams();\n params.set(\"conditions[publication_date][gte]\", from);\n params.set(\"conditions[publication_date][lte]\", to);\n params.set(\"per_page\", String(PER_PAGE));\n params.set(\"page\", String(page));\n params.set(\"order\", \"oldest\");\n\n for (const field of API_FIELDS) {\n params.append(\"fields[]\", field);\n }\n\n if (types && types.length > 0) {\n for (const t of types) {\n params.append(\"conditions[type][]\", t);\n }\n }\n\n return `${FR_API_BASE}/documents.json?${params.toString()}`;\n}\n\n/**\n * Download FR documents for a date range.\n *\n * Automatically chunks large date ranges into month-sized windows to stay\n * under the API's 10,000 result cap per query. Within each chunk, document\n * XML files are downloaded concurrently (default 10 at a time).\n */\nexport async function downloadFrDocuments(options: FrDownloadOptions): Promise<FrDownloadResult> {\n const to = options.to ?? new Date().toISOString().slice(0, 10);\n const concurrency = options.concurrency ?? DEFAULT_CONCURRENCY;\n\n const files: FrDownloadedFile[] = [];\n const failed: FrDownloadFailure[] = [];\n let totalBytes = 0;\n let skipped = 0;\n let totalDocumentsFound = 0;\n\n // Break date range into month-sized chunks\n const chunks = buildMonthChunks(options.from, to);\n\n for (const chunk of chunks) {\n if (options.limit !== undefined && files.length >= options.limit) break;\n\n // Phase 1: Collect all document metadata for this chunk (pagination is fast, JSON only)\n const chunkDocs: FrDocumentJsonMeta[] = [];\n let page = 1;\n let hasMore = true;\n\n while (hasMore) {\n const listUrl = buildFrApiListUrl(chunk.from, chunk.to, page, options.types);\n const response = await fetchWithRetry(listUrl);\n const data = (await response.json()) as FrApiListResponse;\n\n if (typeof data.count !== \"number\") {\n throw new Error(\n `Unexpected API response for ${listUrl}: missing or invalid 'count' field. ` +\n `The FederalRegister.gov API may have changed its response format.`,\n );\n }\n\n // Each chunk has its own count — accumulate on the first page of each chunk\n if (page === 1) {\n totalDocumentsFound += data.count;\n }\n\n const results = data.results ?? [];\n\n for (const doc of results) {\n if (!doc.full_text_xml_url) {\n skipped++;\n continue;\n }\n chunkDocs.push(doc);\n }\n\n hasMore = page < (data.total_pages ?? 0);\n page++;\n }\n\n // Apply limit to this chunk\n const remaining = options.limit !== undefined ? options.limit - files.length : chunkDocs.length;\n const docsToDownload = chunkDocs.slice(0, remaining);\n const chunkLabel = chunk.from.slice(0, 7);\n\n // Phase 2: Download XML files concurrently\n await downloadPool(docsToDownload, concurrency, options.output, (doc, result, error) => {\n if (result) {\n files.push(result);\n totalBytes += result.size;\n } else if (error) {\n failed.push({ documentNumber: doc.document_number, error });\n }\n options.onProgress?.({\n documentsDownloaded: files.length,\n totalDocuments: totalDocumentsFound,\n currentDocument: doc.document_number,\n currentChunk: chunkLabel,\n });\n });\n }\n\n return {\n documentsDownloaded: files.length,\n files,\n totalBytes,\n dateRange: { from: options.from, to },\n skipped,\n failed,\n };\n}\n\n/**\n * Download a single FR document by document number.\n *\n * Fetches both the JSON metadata and XML full text.\n */\nexport async function downloadSingleFrDocument(\n documentNumber: string,\n output: string,\n): Promise<FrDownloadedFile> {\n // Fetch JSON metadata first to get publication date and XML URL\n const metaUrl = `${FR_API_BASE}/documents/${documentNumber}.json?${new URLSearchParams(API_FIELDS.map((f) => [\"fields[]\", f])).toString()}`;\n const metaResponse = await fetchWithRetry(metaUrl);\n const doc = (await metaResponse.json()) as FrDocumentJsonMeta;\n\n if (!doc.document_number || !doc.publication_date) {\n throw new Error(\n `Invalid API response for document ${documentNumber}: missing document_number or publication_date`,\n );\n }\n\n return downloadSingleDocument(doc, output);\n}\n\n// ── Private helpers ──\n\n/**\n * Download multiple documents concurrently using a worker pool.\n * Workers pull from a shared index, so concurrency is bounded without batching.\n */\nasync function downloadPool(\n docs: FrDocumentJsonMeta[],\n concurrency: number,\n outputDir: string,\n onComplete: (\n doc: FrDocumentJsonMeta,\n result: FrDownloadedFile | null,\n error: string | null,\n ) => void,\n): Promise<void> {\n let nextIndex = 0;\n\n async function worker(): Promise<void> {\n while (nextIndex < docs.length) {\n const i = nextIndex++;\n const doc = docs[i];\n if (!doc) break;\n try {\n const result = await downloadSingleDocument(doc, outputDir);\n onComplete(doc, result, null);\n } catch (err) {\n onComplete(doc, null, err instanceof Error ? err.message : String(err));\n }\n }\n }\n\n const workerCount = Math.min(concurrency, docs.length);\n await Promise.all(Array.from({ length: workerCount }, () => worker()));\n}\n\nasync function downloadSingleDocument(\n doc: FrDocumentJsonMeta,\n outputDir: string,\n): Promise<FrDownloadedFile> {\n if (!doc.document_number || !doc.publication_date) {\n throw new Error(\n `Invalid document in API response: missing document_number or publication_date`,\n );\n }\n if (!doc.full_text_xml_url) {\n throw new Error(\n `Document ${doc.document_number} has no full_text_xml_url — cannot download XML`,\n );\n }\n\n const xmlPath = buildFrDownloadXmlPath(doc.document_number, doc.publication_date, outputDir);\n const jsonPath = buildFrDownloadJsonPath(doc.document_number, doc.publication_date, outputDir);\n\n // Ensure directory exists\n await mkdir(dirname(xmlPath), { recursive: true });\n\n // Write JSON metadata\n const jsonContent = JSON.stringify(doc, null, 2);\n await fsWriteFile(jsonPath, jsonContent, \"utf-8\");\n\n // Fetch and write XML\n const xmlResponse = await fetchWithRetry(doc.full_text_xml_url);\n if (!xmlResponse.body) {\n throw new Error(`No response body for ${doc.document_number} XML`);\n }\n\n const dest = createWriteStream(xmlPath);\n try {\n await pipeline(Readable.fromWeb(xmlResponse.body as never), dest);\n } catch (err) {\n throw new Error(\n `Failed to write XML for document ${doc.document_number} from ${doc.full_text_xml_url}: ` +\n `${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n // Get file sizes\n const xmlStat = await stat(xmlPath);\n const jsonSize = Buffer.byteLength(jsonContent, \"utf-8\");\n\n return {\n xmlPath,\n jsonPath,\n documentNumber: doc.document_number,\n publicationDate: doc.publication_date,\n size: Number(xmlStat.size) + jsonSize,\n };\n}\n\n/**\n * Break a date range into month-sized chunks.\n * Each chunk covers one calendar month (or partial month at boundaries).\n */\nexport function buildMonthChunks(from: string, to: string): Array<{ from: string; to: string }> {\n const chunks: Array<{ from: string; to: string }> = [];\n\n let current = new Date(from + \"T00:00:00Z\");\n const end = new Date(to + \"T00:00:00Z\");\n\n while (current <= end) {\n const chunkStart = current.toISOString().slice(0, 10);\n\n // End of this month\n const monthEnd = new Date(Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 0));\n const chunkEnd = monthEnd <= end ? monthEnd.toISOString().slice(0, 10) : to;\n\n chunks.push({ from: chunkStart, to: chunkEnd });\n\n // Move to first day of next month\n current = new Date(Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 1));\n }\n\n return chunks;\n}\n\n/** Fetch with retry on transient HTTP and network errors */\nexport async function fetchWithRetry(url: string, attempt = 0): Promise<Response> {\n let response: Response;\n try {\n response = await fetch(url);\n } catch (err) {\n // Network-level error (DNS, TLS, connection reset) — retry\n if (attempt < MAX_RETRIES) {\n const delay = RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n console.warn(\n `Network error for ${url}: ${err instanceof Error ? err.message : String(err)}. ` +\n `Retrying in ${delay}ms (attempt ${attempt + 1}/${MAX_RETRIES})...`,\n );\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n throw new Error(\n `Network error after ${MAX_RETRIES + 1} attempts for ${url}: ${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n if (response.ok) return response;\n\n // Retry on transient HTTP errors\n if (\n (response.status === 429 || response.status === 503 || response.status === 504) &&\n attempt < MAX_RETRIES\n ) {\n const retryAfter = response.headers.get(\"Retry-After\");\n const parsedRetry = retryAfter ? parseInt(retryAfter, 10) : NaN;\n const delay =\n !isNaN(parsedRetry) && parsedRetry > 0\n ? parsedRetry * 1000\n : RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n console.warn(\n `HTTP ${response.status} for ${url}. Retrying in ${delay}ms (attempt ${attempt + 1}/${MAX_RETRIES})...`,\n );\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n\n throw new Error(`HTTP ${response.status}: ${response.statusText} for ${url}`);\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n","/**\n * Federal Register govinfo bulk downloader.\n *\n * Downloads complete daily-issue XML files from govinfo.gov. Each file contains\n * all FR documents published on a single day (~150 documents, ~2.4 MB average).\n * This is dramatically faster than the per-document API for historical backfill.\n *\n * URL pattern: https://www.govinfo.gov/content/pkg/FR-{YYYY-MM-DD}/xml/FR-{YYYY-MM-DD}.xml\n *\n * The existing FrASTBuilder handles daily-issue XML natively: FEDREG root is a\n * passthrough, section containers (RULES, NOTICES, etc.) are passthroughs, and\n * individual document elements emit via onEmit. No splitter needed.\n */\n\nimport { createWriteStream } from \"node:fs\";\nimport { mkdir, stat } from \"node:fs/promises\";\nimport { dirname, join } from \"node:path\";\nimport { pipeline } from \"node:stream/promises\";\nimport { Readable } from \"node:stream\";\n\n/** Base URL for govinfo FR bulk data */\nconst GOVINFO_BASE = \"https://www.govinfo.gov/content/pkg\";\n\n/** Default number of concurrent downloads */\nconst DEFAULT_CONCURRENCY = 10;\n\n/** Maximum retry attempts for transient errors */\nconst MAX_RETRIES = 2;\n\n/** Base delay between retries (ms) */\nconst RETRY_BASE_DELAY_MS = 2000;\n\n// ── Public types ──\n\n/** Options for downloading FR bulk XML from govinfo */\nexport interface FrGovinfoBulkOptions {\n /** Download directory (e.g., \"./downloads/fr\") */\n output: string;\n /** Start date (YYYY-MM-DD, inclusive) */\n from: string;\n /** End date (YYYY-MM-DD, inclusive). Defaults to today. */\n to?: string | undefined;\n /** Number of concurrent downloads (default 10) */\n concurrency?: number | undefined;\n /** Progress callback */\n onProgress?: ((progress: FrGovinfoProgress) => void) | undefined;\n}\n\n/** Progress info for govinfo download callback */\nexport interface FrGovinfoProgress {\n /** Files downloaded so far */\n downloaded: number;\n /** Total publishing days in date range */\n totalDays: number;\n /** Skipped days (weekends/holidays — 404) */\n skipped: number;\n /** Failed downloads */\n failed: number;\n /** Current date being downloaded */\n currentDate: string;\n}\n\n/** A successfully downloaded bulk file */\nexport interface FrGovinfoDownloadedFile {\n /** Absolute path to the downloaded XML file */\n path: string;\n /** Publication date (YYYY-MM-DD) */\n date: string;\n /** File size in bytes */\n size: number;\n}\n\n/** Result of a govinfo bulk download */\nexport interface FrGovinfoResult {\n /** Number of daily files downloaded */\n filesDownloaded: number;\n /** Downloaded files */\n files: FrGovinfoDownloadedFile[];\n /** Total bytes downloaded */\n totalBytes: number;\n /** Date range covered */\n dateRange: { from: string; to: string };\n /** Days skipped (no issue published — weekends/holidays) */\n skipped: number;\n /** Days that failed to download */\n failed: number;\n}\n\n// ── Public functions ──\n\n/**\n * Build the govinfo download URL for a single day's FR issue.\n */\nexport function buildGovinfoFrUrl(date: string): string {\n return `${GOVINFO_BASE}/FR-${date}/xml/FR-${date}.xml`;\n}\n\n/**\n * Build the local file path for a downloaded daily-issue XML.\n * Stored as: {output}/bulk/{YYYY}/FR-{YYYY-MM-DD}.xml\n */\nexport function buildGovinfoBulkPath(date: string, outputDir: string): string {\n const year = date.slice(0, 4);\n return join(outputDir, \"bulk\", year, `FR-${date}.xml`);\n}\n\n/**\n * Download FR daily-issue XML files from govinfo for a date range.\n * Skips weekends/holidays (404 responses) and retries transient errors.\n */\nexport async function downloadFrBulk(options: FrGovinfoBulkOptions): Promise<FrGovinfoResult> {\n const to = options.to ?? new Date().toISOString().slice(0, 10);\n const concurrency = options.concurrency ?? DEFAULT_CONCURRENCY;\n\n // Generate all dates in range\n const dates = generateDateRange(options.from, to);\n\n const files: FrGovinfoDownloadedFile[] = [];\n let totalBytes = 0;\n let skipped = 0;\n let failed = 0;\n\n // Download concurrently using a worker pool\n let nextIndex = 0;\n\n async function worker(): Promise<void> {\n while (nextIndex < dates.length) {\n const i = nextIndex++;\n const date = dates[i];\n if (!date) break;\n\n options.onProgress?.({\n downloaded: files.length,\n totalDays: dates.length,\n skipped,\n failed,\n currentDate: date,\n });\n\n const url = buildGovinfoFrUrl(date);\n const filePath = buildGovinfoBulkPath(date, options.output);\n\n try {\n const result = await downloadSingleDay(url, filePath, date);\n if (result) {\n files.push(result);\n totalBytes += result.size;\n } else {\n // null means 404 — no issue published on this date\n skipped++;\n }\n } catch (err) {\n console.warn(\n `Warning: Failed to download ${date}: ${err instanceof Error ? err.message : String(err)}`,\n );\n failed++;\n }\n }\n }\n\n const workerCount = Math.min(concurrency, dates.length);\n await Promise.all(Array.from({ length: workerCount }, () => worker()));\n\n // Final progress update\n options.onProgress?.({\n downloaded: files.length,\n totalDays: dates.length,\n skipped,\n failed,\n currentDate: \"done\",\n });\n\n return {\n filesDownloaded: files.length,\n files,\n totalBytes,\n dateRange: { from: options.from, to },\n skipped,\n failed,\n };\n}\n\n// ── Private helpers ──\n\n/**\n * Download a single day's FR issue XML. Returns null if 404 (no issue).\n */\nasync function downloadSingleDay(\n url: string,\n filePath: string,\n date: string,\n): Promise<FrGovinfoDownloadedFile | null> {\n const response = await fetchWithRetry(url);\n\n if (response.status === 404) {\n return null; // No issue published on this date (weekend/holiday)\n }\n\n if (!response.ok) {\n throw new Error(`HTTP ${response.status} for ${url}`);\n }\n\n if (!response.body) {\n throw new Error(`No response body for ${url}`);\n }\n\n await mkdir(dirname(filePath), { recursive: true });\n\n const dest = createWriteStream(filePath);\n await pipeline(Readable.fromWeb(response.body as never), dest);\n\n const fileStat = await stat(filePath);\n\n return {\n path: filePath,\n date,\n size: Number(fileStat.size),\n };\n}\n\n/**\n * Generate all dates (YYYY-MM-DD) in a range, inclusive.\n */\nfunction generateDateRange(from: string, to: string): string[] {\n const dates: string[] = [];\n const current = new Date(from + \"T12:00:00Z\"); // Noon UTC to avoid DST issues\n const end = new Date(to + \"T12:00:00Z\");\n\n while (current <= end) {\n dates.push(current.toISOString().slice(0, 10));\n current.setUTCDate(current.getUTCDate() + 1);\n }\n\n return dates;\n}\n\n/** Fetch with retry on transient HTTP and network errors */\nasync function fetchWithRetry(url: string, attempt = 0): Promise<Response> {\n let response: Response;\n try {\n response = await fetch(url);\n } catch (err) {\n if (attempt < MAX_RETRIES) {\n const delay = RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n throw new Error(\n `Network error after ${MAX_RETRIES + 1} attempts for ${url}: ${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n if (response.ok || response.status === 404) return response;\n\n if (\n (response.status === 429 || response.status === 503 || response.status === 504) &&\n attempt < MAX_RETRIES\n ) {\n const retryAfter = response.headers.get(\"Retry-After\");\n const parsedRetry = retryAfter ? parseInt(retryAfter, 10) : NaN;\n const delay =\n !isNaN(parsedRetry) && parsedRetry > 0\n ? parsedRetry * 1000\n : RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n\n throw new Error(`HTTP ${response.status}: ${response.statusText} for ${url}`);\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n"],"mappings":";AAiBO,IAAM,wBAAwB,CAAC,QAAQ,WAAW,UAAU,UAAU;AAMtE,IAAM,uBAAuB,IAAI,IAAY,qBAAqB;AAGlE,IAAM,wBAAwB,oBAAI,IAAI,CAAC,SAAS,YAAY,WAAW,UAAU,CAAC;AAGlF,IAAM,uBAAyD;AAAA,EACpE,MAAM;AAAA,EACN,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AACZ;AAKO,IAAM,uBAAuB,oBAAI,IAAI;AAAA,EAC1C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,4BAA4B,oBAAI,IAAI;AAAA,EAC/C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,sBAAsB,oBAAI,IAAI;AAAA,EACzC;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,qBAAqB;AAM3B,IAAM,wBAA0D;AAAA,EACrE,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AACP;AAKO,IAAM,qBAAqB,oBAAI,IAAI;AAAA,EACxC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAMM,IAAM,kBAAwD;AAAA,EACnE,MAAM;AAAA,EACN,MAAM;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,QAAQ;AAAA;AACV;AAKO,IAAM,sBAAsB,oBAAI,IAAI;AAAA,EACzC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,oBAAoB;AAK1B,IAAM,wBAAwB,oBAAI,IAAI;AAAA,EAC3C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,2BAA2B,oBAAI,IAAI;AAAA,EAC9C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,gCAAgC,oBAAI,IAAI;AAAA,EACnD;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB,oBAAI,IAAI;AAAA,EACtC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,mBAAmB;AAKzB,IAAM,oBAAoB,oBAAI,IAAI;AAAA,EACvC;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,oBAAoB,oBAAI,IAAI;AAAA,EACvC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,qBAAqB,oBAAI,IAAI;AAAA,EACxC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB,oBAAI,IAAI;AAAA,EACtC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,0BAA0B,oBAAI,IAAI;AAAA,EAC7C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB;AAGzB,IAAM,oBAAoB;;;AChH1B,IAAM,eAAN,MAAmB;AAAA,EACP;AAAA,EACA,QAAsB,CAAC;AAAA;AAAA,EAEhC,wBAAwB;AAAA;AAAA,EAExB,iBAAoC;AAAA,IAC1C,cAAc;AAAA,IACd,wBAAwB;AAAA,EAC1B;AAAA;AAAA,EAEiB,gBAAqC,CAAC;AAAA,EAEvD,YAAY,SAA8B;AACxC,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA,EAGA,mBAAiD;AAC/C,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,cAAc,MAAc,OAAyB;AAEnD,QAAI,KAAK,wBAAwB,GAAG;AAClC,WAAK;AACL;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,GAAG;AAChC,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,QAAI,wBAAwB,IAAI,IAAI,GAAG;AACrC;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,yBAAyB,IAAI,IAAI,GAAG;AACtC;AAAA,IACF;AAGA,QAAI,8BAA8B,IAAI,IAAI,GAAG;AAE3C,UAAI,SAAS,UAAU,SAAS,SAAS;AACvC,aAAK,YAAY,IAAI;AACrB;AAAA,MACF;AAEA,WAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AACrE;AAAA,IACF;AAGA,QAAI,0BAA0B,IAAI,IAAI,GAAG;AACvC,WAAK,MAAM,KAAK,EAAE,MAAM,gBAAgB,aAAa,MAAM,YAAY,GAAG,CAAC;AAC3E;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,MAAM,KAAK,EAAE,MAAM,mBAAmB,aAAa,MAAM,YAAY,GAAG,CAAC;AAC9E;AAAA,IACF;AAGA,QAAI,SAAS,oBAAoB;AAC/B,WAAK,YAAY,MAAM,KAAK;AAC5B;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,YAAY,IAAI;AACrB;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,GAAG;AAChC,WAAK,WAAW,MAAM,KAAK;AAC3B;AAAA,IACF;AAIA,QAAI,SAAS,kBAAkB;AAC7B,YAAM,cAAc,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AACpD,UAAI,aAAa,SAAS,aAAa,YAAY,MAAM,SAAS,WAAW;AAC3E,cAAM,cAAc,YAAY;AAEhC,iBAAS,IAAI,YAAY,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;AACzD,gBAAM,QAAQ,YAAY,SAAS,CAAC;AACpC,cAAI,OAAO,SAAS,YAAa,MAAqB,eAAe,OAAO;AAC1E,YAAC,MAAqB,aAAa;AACnC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,YAAY,MAAM,KAAK;AAC5B;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC,WAAK,cAAc,IAAI;AACvB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,iBAAiB,MAAM,KAAK;AACjC;AAAA,IACF;AAGA,QAAI,SAAS,kBAAkB;AAC7B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,SAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACvE;AAAA;AAAA,EAGA,eAAe,MAAoB;AAEjC,QAAI,KAAK,wBAAwB,GAAG;AAClC,WAAK;AACL;AAAA,IACF;AAGA,QAAI,wBAAwB,IAAI,IAAI,KAAK,sBAAsB,IAAI,IAAI,GAAG;AACxE;AAAA,IACF;AAGA,QAAI,yBAAyB,IAAI,IAAI,GAAG;AACtC;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,cAAc,IAAI;AACvB;AAAA,IACF;AAGA,QAAI,0BAA0B,IAAI,IAAI,GAAG;AACvC,WAAK,kBAAkB,IAAI;AAC3B;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,SAAS,oBAAoB;AAC/B,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,SAAS,UAAU,SAAS,SAAS;AACvC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,KAAK,SAAS,kBAAkB;AAC7D,WAAK,YAAY,IAAI;AACrB;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,UAAU,IAAI;AACnB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC,WAAK,eAAe,IAAI;AACxB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,kBAAkB,IAAI;AAC3B;AAAA,IACF;AAGA,QAAI,SAAS,kBAAkB;AAC7B,WAAK,WAAW;AAChB;AAAA,IACF;AAGA,QAAI,KAAK,MAAM,SAAS,KAAK,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC,GAAG,gBAAgB,MAAM;AACpF,WAAK,MAAM,IAAI;AAAA,IACjB;AAAA,EACF;AAAA;AAAA,EAGA,OAAO,MAAoB;AACzB,QAAI,KAAK,wBAAwB,EAAG;AAEpC,UAAM,QAAQ,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AAC9C,QAAI,CAAC,MAAO;AAGZ,QACE,MAAM,SAAS,aACf,MAAM,SAAS,kBACf,MAAM,SAAS,oBACf,MAAM,SAAS,eACf,MAAM,SAAS,iBACf,MAAM,SAAS,SACf;AACA,YAAM,cAAc;AACpB;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,aAAa,MAAM,MAAM,SAAS,WAAW;AAC9D,YAAM,cAAc,MAAM;AAE1B,YAAM,aAAa,KAAK,QAAQ,QAAQ,GAAG;AAC3C,UAAI,cAAc,eAAe,KAAK;AACpC,oBAAY,SAAS,KAAK;AAAA,UACxB,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AACA;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,YAAY,MAAM,MAAM,SAAS,UAAU;AAC5D,YAAM,aAAa,MAAM;AACzB,YAAM,aAAa,KAAK,QAAQ,QAAQ,GAAG;AAC3C,UAAI,WAAW,UAAU;AACvB,YAAI,cAAc,eAAe,KAAK;AACpC,qBAAW,SAAS,KAAK;AAAA,YACvB,MAAM;AAAA,YACN,YAAY;AAAA,YACZ,MAAM;AAAA,UACR,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AACL,mBAAW,QAAQ,WAAW,QAAQ,MAAM;AAAA,MAC9C;AACA;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,UAAU,MAAM,MAAM,SAAS,QAAQ;AACxD,YAAM,cAAc;AACpB;AAAA,IACF;AAAA,EAGF;AAAA;AAAA,EAIQ,aAAa,aAA2B;AAC9C,SAAK,iBAAiB;AAAA,MACpB,cAAc;AAAA,MACd,wBAAwB,qBAAqB,WAAW,KAAK,YAAY,YAAY;AAAA,IACvF;AAEA,UAAM,OAAkB;AAAA,MACtB,MAAM;AAAA,MACN,WAAW;AAAA,MACX,UAAU,CAAC;AAAA,MACX,eAAe;AAAA,IACjB;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,YAAY,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACzE;AAAA,EAEQ,cAAc,aAA2B;AAC/C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,cAAc,CAAC,MAAM,KAAM;AAExD,UAAM,YAAY,MAAM;AAGxB,QAAI,KAAK,eAAe,SAAS;AAC/B,gBAAU,UAAU,KAAK,eAAe;AAAA,IAC1C;AAGA,QAAI,KAAK,eAAe,gBAAgB;AACtC,gBAAU,aAAa,UAAU,KAAK,eAAe,cAAc;AACnE,gBAAU,WAAW,KAAK,eAAe;AAAA,IAC3C;AAGA,UAAM,YAA4B,CAAC;AACnC,eAAW,KAAK,KAAK,OAAO;AAC1B,UAAI,EAAE,SAAS,cAAc,EAAE,MAAM,SAAS,SAAS;AACrD,cAAM,KAAK,EAAE;AACb,kBAAU,KAAK;AAAA,UACb,WAAW,GAAG;AAAA,UACd,UAAU,GAAG;AAAA,UACb,SAAS,GAAG;AAAA,UACZ,YAAY,GAAG;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM,UAAuB;AAAA,MAC3B;AAAA,MACA,cAAc;AAAA,QACZ,SAAS,KAAK,eAAe;AAAA,QAC7B,QAAQ,KAAK,eAAe;AAAA,MAC9B;AAAA,IACF;AAGA,SAAK,cAAc,KAAK,EAAE,GAAG,KAAK,eAAe,CAAC;AAElD,SAAK,QAAQ,OAAO,WAAW,OAAO;AAAA,EACxC;AAAA;AAAA,EAIQ,kBAAkB,aAA2B;AACnD,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,eAAgB;AAE7C,UAAM,OAAO,MAAM,WAAW,KAAK;AACnC,QAAI,CAAC,KAAM;AAEX,YAAQ,aAAa;AAAA,MACnB,KAAK;AACH,aAAK,eAAe,SAAS;AAC7B;AAAA,MACF,KAAK;AACH,aAAK,eAAe,YAAY;AAChC;AAAA,MACF,KAAK;AACH,aAAK,eAAe,cAAc;AAClC;AAAA,MACF,KAAK;AACH,aAAK,eAAe,UAAU;AAC9B;AAAA,MACF,KAAK;AACH,aAAK,eAAe,MAAM,KAAK,QAAQ,YAAY,EAAE,EAAE,KAAK;AAC5D;AAAA,MACF,KAAK;AAEH;AAAA,IACJ;AAAA,EACF;AAAA;AAAA,EAIQ,YAAY,cAAsB,OAAyB;AACjE,UAAM,SAAS,MAAM,QAAQ,KAAK;AAClC,UAAM,QAAQ,sBAAsB,MAAM,KAAK;AAE/C,SAAK,MAAM,KAAK;AAAA,MACd,MAAM;AAAA,MACN,aAAa;AAAA,MACb,YAAY;AAAA,MACZ,aAAa;AAAA,IACf,CAAC;AAAA,EACH;AAAA,EAEQ,aAAa,aAA2B;AAC9C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,UAAW;AAExC,UAAM,cAAc,MAAM,WAAW,KAAK;AAC1C,QAAI,CAAC,YAAa;AAIlB,UAAM,cAAc,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AAEpD,QAAI,aAAa,SAAS,mBAAmB;AAE3C,YAAMA,eAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ,MAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,WAAK,cAAcA,YAAW;AAC9B;AAAA,IACF;AAIA,UAAM,cAA2B;AAAA,MAC/B,MAAM;AAAA,MACN,SAAS;AAAA,MACT,UAAU;AAAA,QACR;AAAA,UACE,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,SAAK,cAAc,WAAW;AAAA,EAChC;AAAA;AAAA,EAIQ,YAAY,aAA2B;AAC7C,UAAM,OAAoB;AAAA,MACxB,MAAM;AAAA,MACN,SAAS;AAAA,MACT,UAAU,CAAC;AAAA,IACb;AACA,SAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACxE;AAAA,EAEQ,aAAa,aAA2B;AAC9C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,cAAc,MAAM;AAG1B,QAAI,YAAY,SAAS,WAAW,EAAG;AAGvC,UAAM,SAAS,KAAK,mBAAmB,KAAK,KAAK,eAAe;AAChE,QAAI,QAAQ,MAAM;AAChB,UAAI,OAAO,KAAK,SAAS,SAAS;AAChC,QAAC,OAAO,KAAmB,SAAS,KAAK,WAAW;AAAA,MACtD,WAAW,OAAO,KAAK,SAAS,QAAQ;AACtC,QAAC,OAAO,KAAkB,SAAS,KAAK,WAAW;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,WAAW,aAAqB,OAAyB;AAC/D,QAAI,aAAyB;AAE7B,QAAI,gBAAgB,KAAK;AACvB,mBAAa;AAAA,IACf,WAAW,gBAAgB,KAAK;AAC9B,mBAAa;AAAA,IACf,WAAW,gBAAgB,MAAM;AAG/B,YAAM,iBAAiB,KAAK,UAAU,MAAM,MAAM;AAClD,mBAAa,iBAAiB,gBAAgB;AAAA,IAChD,WAAW,gBAAgB,MAAM;AAC/B,mBAAa;AAAA,IACf,WAAW,gBAAgB,KAAK;AAC9B,YAAM,SAAS,MAAM,GAAG,KAAK;AAC7B,mBAAa,gBAAgB,MAAM,KAAK;AAAA,IAC1C;AAEA,UAAM,OAAmB;AAAA,MACvB,MAAM;AAAA,MACN;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACvE;AAAA,EAEQ,YAAY,aAA2B;AAC7C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,aAAa,MAAM;AAGzB,QAAI,WAAW,eAAe,iBAAiB,MAAM,YAAY;AAC/D,iBAAW,OAAO,MAAM,WAAW,KAAK;AAAA,IAC1C;AAGA,UAAM,cAAc,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AACpD,QAAI,CAAC,YAAa;AAElB,QAAI,YAAY,SAAS,aAAa,YAAY,MAAM,SAAS,WAAW;AAC1E,MAAC,YAAY,KAAqB,SAAS,KAAK,UAAU;AAAA,IAC5D,WAAW,YAAY,SAAS,YAAY,YAAY,MAAM,SAAS,UAAU;AAC/E,YAAM,eAAe,YAAY;AACjC,UAAI,aAAa,UAAU;AACzB,qBAAa,SAAS,KAAK,UAAU;AAAA,MACvC;AAAA,IACF,WAAW,YAAY,SAAS,aAAa,YAAY,SAAS,gBAAgB;AAEhF,UAAI,WAAW,MAAM;AACnB,oBAAY,cAAc,WAAW;AAAA,MACvC,WAAW,WAAW,UAAU;AAC9B,mBAAW,SAAS,WAAW,UAAU;AACvC,cAAI,MAAM,KAAM,aAAY,cAAc,MAAM;AAAA,QAClD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,SAAS,aAA2B;AAC1C,UAAM,cAAsC;AAAA,MAC1C,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS;AAAA,IACX;AAEA,UAAM,WAAW,YAAY,WAAW,KAAK,YAAY,YAAY;AACrE,UAAM,OAAiB;AAAA,MACrB,MAAM;AAAA,MACN;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,QAAQ,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACrE;AAAA,EAEQ,UAAU,aAA2B;AAC3C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,WAAW,MAAM;AAGvB,QAAI,MAAM,WAAW,KAAK,KAAK,SAAS,SAAS,WAAW,GAAG;AAC7D,YAAM,cAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ,MAAM,MAAM,WAAW,KAAK;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AACA,eAAS,SAAS,KAAK,WAAW;AAAA,IACpC;AAGA,UAAM,YAAY,KAAK,mBAAmB;AAC1C,QAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,MAAC,UAAU,KAAmB,SAAS,KAAK,QAAQ;AAAA,IACtD;AAAA,EACF;AAAA;AAAA,EAIQ,YAAY,aAAqB,OAAyB;AAChE,QAAI,gBAAgB,WAAW;AAE7B,YAAM,QAAQ,MAAM,OAAO,KAAK;AAChC,YAAM,OAAO,MAAM,MAAM,KAAK;AAC9B,YAAM,QAAQ,SAAS,OAAO,GAAG,KAAK,aAAa,IAAI,KAAK;AAG5D,UAAI,OAAO;AACT,cAAM,YAAyB;AAAA,UAC7B,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,YACR;AAAA,cACE,MAAM;AAAA,cACN,YAAY;AAAA,cACZ,MAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AACA,aAAK,cAAc,SAAS;AAAA,MAC9B;AAEA,WAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,YAAY,GAAG,CAAC;AAChE;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,YAAY,WAAW;AAC5B;AAAA,IACF;AAEA,QAAI,gBAAgB,WAAW;AAE7B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,YAAY,GAAG,CAAC;AAC9D;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,YAAY,WAAW;AAC5B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,YAAY,GAAG,CAAC;AAC9D;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,aAAa,aAA2B;AAC9C,QAAI,gBAAgB,WAAW;AAC7B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,YAAY,gBAAgB,UAAU;AACxD,WAAK,aAAa,WAAW;AAC7B;AAAA,IACF;AAEA,QAAI,gBAAgB,aAAa,gBAAgB,QAAQ;AACvD,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAC1B,WAAK,UAAU,WAAW;AAC1B;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,cAAc,aAA2B;AAC/C,QAAI,gBAAgB,OAAO;AAEzB,YAAM,OAAiB;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,QACV,UAAU,CAAC;AAAA,MACb;AACA,WAAK,MAAM,KAAK,EAAE,MAAM,aAAa,aAAa,MAAM,YAAY,GAAG,CAAC;AACxE;AAAA,IACF;AAGA,SAAK,MAAM,KAAK,EAAE,MAAM,kBAAkB,aAAa,YAAY,GAAG,CAAC;AAAA,EACzE;AAAA,EAEQ,eAAe,aAA2B;AAChD,QAAI,gBAAgB,OAAO;AACzB,YAAMC,SAAQ,KAAK,SAAS,WAAW;AACvC,UAAI,CAACA,UAAS,CAACA,OAAM,KAAM;AAE3B,YAAM,UAAUA,OAAM;AAGtB,YAAM,YAAY,KAAK,mBAAmB;AAC1C,UAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,QAAC,UAAU,KAAmB,SAAS,KAAK,OAAO;AAAA,MACrD;AACA;AAAA,IACF;AAGA,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,iBAAkB;AAE/C,UAAM,OAAO,MAAM,WAAW,KAAK;AACnC,QAAI,CAAC,KAAM;AAGX,UAAM,WAAW,KAAK,UAAU,WAAW;AAC3C,QAAI,UAAU,QAAQ,SAAS,KAAK,SAAS,QAAQ;AACnD,YAAM,cAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,MAAC,SAAS,KAAkB,SAAS,KAAK,WAAW;AAAA,IACvD;AAAA,EACF;AAAA;AAAA,EAIQ,iBAAiB,aAAqB,QAA0B;AACtE,QAAI,gBAAgB,YAAY;AAC9B,WAAK,MAAM,KAAK;AAAA,QACd,MAAM;AAAA,QACN;AAAA,QACA,YAAY;AAAA,QACZ,SAAS,CAAC;AAAA,QACV,MAAM,CAAC;AAAA,QACP,YAAY,CAAC;AAAA,MACf,CAAC;AACD;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,YAAY,GAAG,CAAC;AAChE;AAAA,IACF;AAEA,QAAI,gBAAgB,SAAS;AAE3B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,MAAM,KAAK,EAAE,MAAM,eAAe,aAAa,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,YAAM,aAAa,KAAK,eAAe;AACvC,UAAI,YAAY;AACd,mBAAW,aAAa,CAAC;AAAA,MAC3B;AACA,WAAK,MAAM,KAAK,EAAE,MAAM,YAAY,aAAa,YAAY,GAAG,CAAC;AACjE;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AAEzB,WAAK,MAAM,KAAK,EAAE,MAAM,aAAa,aAAa,YAAY,GAAG,CAAC;AAClE;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,kBAAkB,aAA2B;AACnD,QAAI,gBAAgB,YAAY;AAC9B,WAAK,cAAc;AACnB;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,SAAS;AAE3B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAC1B,WAAK,iBAAiB;AACtB;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,WAAK,cAAc;AACnB;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,WAAK,eAAe;AACpB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,gBAAsB;AAC5B,UAAM,QAAQ,KAAK,SAAS,UAAU;AACtC,QAAI,CAAC,SAAS,MAAM,SAAS,QAAS;AAEtC,UAAM,YAAuB;AAAA,MAC3B,MAAM;AAAA,MACN,SAAS;AAAA;AAAA,MACT,SAAS,MAAM,WAAW,CAAC;AAAA,MAC3B,MAAM,MAAM,QAAQ,CAAC;AAAA,IACvB;AAGA,UAAM,YAAY,KAAK,mBAAmB;AAC1C,QAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,MAAC,UAAU,KAAmB,SAAS,KAAK,SAAS;AAAA,IACvD;AAAA,EACF;AAAA,EAEQ,mBAAyB;AAC/B,UAAM,cAAc,KAAK,SAAS,MAAM;AACxC,QAAI,CAAC,eAAe,YAAY,SAAS,cAAe;AAExD,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,CAAC,WAAY;AAEjB,UAAM,OAAO,YAAY,WAAW,KAAK;AAIzC,QAAI,CAAC,WAAW,WAAW,WAAW,QAAQ,WAAW,GAAG;AAC1D,iBAAW,UAAU,CAAC,CAAC,CAAC;AAAA,IAC1B;AACA,UAAM,YAAY,WAAW,QAAQ,CAAC;AACtC,QAAI,WAAW;AACb,gBAAU,KAAK,IAAI;AAAA,IACrB;AAAA,EACF;AAAA,EAEQ,gBAAsB;AAC5B,UAAM,WAAW,KAAK,SAAS,KAAK;AACpC,QAAI,CAAC,SAAU;AAEf,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,YAAY,YAAY;AAC1B,iBAAW,MAAM,KAAK,CAAC,GAAG,WAAW,UAAU,CAAC;AAChD,iBAAW,aAAa,CAAC;AAAA,IAC3B;AAAA,EACF;AAAA,EAEQ,iBAAuB;AAC7B,UAAM,YAAY,KAAK,MAAM,IAAI;AACjC,QAAI,CAAC,aAAa,UAAU,SAAS,YAAa;AAElD,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,YAAY,YAAY;AAC1B,iBAAW,WAAW,KAAK,UAAU,WAAW,KAAK,CAAC;AAAA,IACxD;AAAA,EACF;AAAA;AAAA,EAIQ,aAAmB;AACzB,UAAM,QAAQ,KAAK,SAAS,gBAAgB;AAC5C,QAAI,CAAC,SAAS,MAAM,SAAS,QAAS;AAEtC,UAAM,OAAO,MAAM,WAAW,KAAK;AAKnC,UAAM,WAAW,yBAAyB,KAAK,IAAI;AACnD,QAAI,UAAU;AACZ,WAAK,eAAe,iBAAiB,SAAS,CAAC;AAAA,IACjD;AAIA,UAAM,YAAY,wCAAwC,KAAK,IAAI;AACnE,QAAI,WAAW;AACb,YAAM,CAAC,EAAE,OAAO,OAAO,KAAK,IAAI;AAChC,YAAM,KAAK,SAAS,SAAS,KAAK,EAAE;AACpC,YAAM,KAAK,SAAS,SAAS,KAAK,EAAE;AACpC,YAAM,KAAK,SAAS,SAAS,KAAK,EAAE;AAEpC,YAAM,WAAW,KAAK,KAAK,MAAO,KAAK,OAAO;AAC9C,YAAM,QAAQ,IAAI,KAAK,UAAU,KAAK,GAAG,EAAE;AAE3C,UAAI,MAAM,SAAS,MAAM,KAAK,KAAK,MAAM,QAAQ,MAAM,IAAI;AACzD;AAAA,MACF;AAEA,YAAM,QAAQ,MAAM,QAAQ,IAAI,CAAC;AACjC,YAAM,UAAU,MAAM,YAAY;AAClC,YAAM,WAAW,OAAO,MAAM,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AAC7D,YAAM,SAAS,OAAO,MAAM,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AACtD,WAAK,eAAe,kBAAkB,GAAG,OAAO,IAAI,QAAQ,IAAI,MAAM;AAAA,IACxE;AAAA,EACF;AAAA;AAAA,EAIQ,cAAc,MAAqB;AACzC,UAAM,WAAW,KAAK,mBAAmB;AACzC,QAAI,UAAU,QAAQ,SAAS,KAAK,SAAS,SAAS;AACpD,MAAC,SAAS,KAAmB,SAAS,KAAK,IAAI;AAAA,IACjD;AAAA,EACF;AAAA,EAEQ,qBAA6C;AACnD,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,YAAY;AACtC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAyC;AAC/C,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,UAAU,KAAK,MAAM,CAAC,GAAG,SAAS,aAAa;AACzE,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAyC;AAC/C,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,SAAS;AACnC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU,MAAyC;AACzD,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,MAAM;AAChC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,SAAS,aAA6C;AAC5D,QAAI,KAAK,MAAM,WAAW,EAAG,QAAO;AAGpC,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,gBAAgB,aAAa;AAC9C,eAAO,KAAK,MAAM,OAAO,GAAG,CAAC,EAAE,CAAC;AAAA,MAClC;AAAA,IACF;AAGA,YAAQ;AAAA,MACN,yDAAyD,WAAW,kBACnD,KAAK,MAAM,IAAI,CAAC,MAAM,EAAE,WAAW,EAAE,KAAK,IAAI,CAAC;AAAA,IAClE;AACA,WAAO;AAAA,EACT;AACF;;;ACzjCA,SAAS,sBAAsB,SAAyB;AACtD,QAAM,MAA8B;AAAA,IAClC,MAAM;AAAA,IACN,iBAAiB;AAAA,IACjB,QAAQ;AAAA,IACR,yBAAyB;AAAA,EAC3B;AACA,SAAO,IAAI,OAAO,KAAK,QAAQ,YAAY,EAAE,QAAQ,QAAQ,GAAG;AAClE;AASO,SAAS,mBACd,MACA,UACA,SACA,UACiB;AACjB,QAAM,iBAAiB,UAAU,mBAAmB,QAAQ,kBAAkB;AAC9E,QAAM,UAAU,UAAU,SAAS,QAAQ,WAAW,KAAK,WAAW;AACtE,QAAM,kBAAkB,UAAU,oBAAoB,QAAQ,mBAAmB;AACjF,QAAM,eAAe,WACjB,sBAAsB,SAAS,IAAI,IACnC,QAAQ;AAGZ,MAAI;AACJ,MAAI,UAAU,YAAY,SAAS,SAAS,SAAS,GAAG;AACtD,eAAW,SAAS,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EAChD,WAAW,QAAQ,QAAQ;AACzB,eAAW,CAAC,QAAQ,MAAM;AAC1B,QAAI,QAAQ,WAAW;AACrB,eAAS,KAAK,QAAQ,SAAS;AAAA,IACjC;AAAA,EACF;AAGA,MAAI;AACJ,MAAI,UAAU,kBAAkB,SAAS,eAAe,SAAS,GAAG;AAClE,oBAAgB,SAAS,eAAe,IAAI,CAAC,MAAM,GAAG,EAAE,KAAK,aAAa,EAAE,IAAI,EAAE;AAAA,EACpF,WAAW,QAAQ,aAAa;AAC9B,oBAAgB,CAAC,QAAQ,WAAW;AAAA,EACtC;AAGA,MAAI;AACJ,MAAI,UAAU,cAAc,SAAS,WAAW,SAAS,GAAG;AAC1D,gBAAY,SAAS;AAAA,EACvB;AAGA,QAAM,gBAAgB,YAAY,SAAS,SAAS,IAAI,SAAS,CAAC,IAAI;AAGtE,QAAM,aAAa,UAAU;AAG7B,QAAM,MAAM,UAAU,wBAAwB,CAAC,KAAK,QAAQ;AAE5D,QAAM,KAAsB;AAAA,IAC1B,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,YAAY,KAAK,cAAc,UAAU,cAAc;AAAA,IACvD,OAAO;AAAA,IACP,cAAc;AAAA;AAAA,IACd,YAAY;AAAA,IACZ,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,cAAc;AAAA,IACd,UAAU;AAAA,IACV,cAAc;AAAA;AAAA,IAGd,QAAQ;AAAA;AAAA,IAGR,iBAAiB,kBAAkB;AAAA,IACnC,eAAe,gBAAgB;AAAA,IAC/B,aAAa;AAAA,IACb,WAAW,UAAU;AAAA,IACrB,kBAAkB,mBAAmB;AAAA,IACrC,UAAU,YAAY,SAAS,SAAS,IAAI,WAAW;AAAA,IACvD,gBAAgB,iBAAiB,cAAc,SAAS,IAAI,gBAAgB;AAAA,IAC5E,YAAY,aAAa,UAAU,SAAS,IAAI,YAAY;AAAA,IAC5D,KAAK,OAAO;AAAA,IACZ,gBAAgB,UAAU,gBAAgB;AAAA,IAC1C,qBAAqB,UAAU,qBAAqB;AAAA,IACpD,WAAW,UAAU,UAAU;AAAA,EACjC;AAEA,SAAO;AACT;;;ACnJA,SAAS,YAAY;AAUd,SAAS,kBACd,gBACA,iBACA,YACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,YAAY,MAAM,MAAM,OAAO,GAAG,cAAc,KAAK;AACnE;AAUO,SAAS,uBACd,gBACA,iBACA,cACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,cAAc,MAAM,OAAO,GAAG,cAAc,MAAM;AAChE;AAUO,SAAS,wBACd,gBACA,iBACA,cACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,cAAc,MAAM,OAAO,GAAG,cAAc,OAAO;AACjE;AAKO,SAAS,cAAc,MAAc,OAAe,YAA4B;AACrF,SAAO,KAAK,YAAY,MAAM,MAAM,KAAK;AAC3C;AAKO,SAAS,aAAa,MAAc,YAA4B;AACrE,SAAO,KAAK,YAAY,MAAM,IAAI;AACpC;AAKA,SAAS,oBAAoB,MAA+C;AAC1E,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,SAAO;AAAA,IACL,MAAM,MAAM,CAAC,KAAK;AAAA,IAClB,OAAO,MAAM,CAAC,KAAK;AAAA,EACrB;AACF;;;AC3EA,SAAS,kBAAkB,kBAAkB;AAC7C,SAAS,UAAU,SAAS,YAAY;AACxC,SAAS,QAAAC,OAAM,eAAe;AAC9B,SAAS,WAAW,gBAAgB,oBAAoB,WAAW,aAAa;AAqEhF,IAAM,kBAAkB,IAAI,IAAY,qBAAqB;AAU7D,eAAsB,mBAAmB,SAAqD;AAC5F,QAAM,WAAW,MAAM,iBAAiB,QAAQ,OAAO,QAAQ,MAAM,QAAQ,EAAE;AAE/E,MAAI,qBAAqB;AACzB,MAAI,qBAAqB;AACzB,MAAI,kBAAkB;AAEtB,QAAM,eAAe,mBAAmB;AAMxC,MAAI,iBAAiB;AACrB,aAAW,WAAW,UAAU;AAC9B,QAAI;AACJ,QAAI;AACF,kBAAY,MAAM,aAAa,OAAO;AAAA,IACxC,SAAS,KAAK;AACZ,cAAQ;AAAA,QACN,4BAA4B,OAAO,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC1F;AACA;AAAA,IACF;AAEA,eAAW,OAAO,WAAW;AAE3B,UAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,YACE,CAAC,gBAAgB,IAAI,IAAI,QAAQ,YAAY,KAC7C,CAAC,QAAQ,MAAM,SAAS,IAAI,QAAQ,YAA8B,GAClE;AACA;AAAA,QACF;AAAA,MACF;AAEA,UAAI,QAAQ,QAAQ;AAClB;AACA;AAAA,MACF;AAEA,YAAM,aAAa,kBAAkB,IAAI,gBAAgB,IAAI,iBAAiB,QAAQ,MAAM;AAE5F,YAAM,cAAc,mBAAmB,IAAI,MAAM,IAAI,SAAS,IAAI,SAAS,IAAI,QAAQ;AAEvF,YAAM,WAAW,eAAe,IAAI,MAAM,aAAa;AAAA,QACrD,eAAe;AAAA,QACf,WAAW,QAAQ;AAAA,QACnB,aACE,QAAQ,cAAc,aAClB,CAAC,OAAO,aAAa,QAAQ,IAAI,UAAU,IAC3C;AAAA,MACR,CAAC;AAED,YAAM,MAAM,QAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AACpD,YAAM,UAAU,YAAY,UAAU,OAAO;AAE7C;AACA,4BAAsB,KAAK,MAAM,SAAS,SAAS,CAAC;AAGpD,YAAM,MAAM,QAAQ,YAAY,EAAE;AAClC,UAAI,MAAM,iBAAiB;AACzB,0BAAkB;AAAA,MACpB;AAAA,IACF;AAEA;AAEA,YAAQ,aAAa;AAAA,MACnB;AAAA,MACA;AAAA,MACA,YAAY,SAAS;AAAA,MACrB,aAAa;AAAA,IACf,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL;AAAA,IACA,OAAO,CAAC;AAAA;AAAA,IACR;AAAA,IACA;AAAA,IACA,QAAQ,QAAQ;AAAA,EAClB;AACF;AAOA,eAAe,aAAa,SAA0C;AACpE,QAAM,YAA4B,CAAC;AAEnC,QAAM,UAAU,IAAI,aAAa;AAAA,IAC/B,QAAQ,CAAC,MAAM,YAAY;AAEzB,YAAM,eAAe,QAAQ,iBAAiB;AAC9C,YAAM,OAAO,aAAa,aAAa,SAAS,CAAC;AACjD,UAAI,CAAC,MAAM;AACT,gBAAQ;AAAA,UACN,8DAA8D,OAAO;AAAA,QAEvE;AAAA,MACF;AACA,gBAAU,KAAK;AAAA,QACb;AAAA,QACA;AAAA,QACA,SAAS,QAAQ,EAAE,cAAc,IAAI,wBAAwB,GAAG;AAAA,QAChE,iBAAiB;AAAA,QACjB,gBAAgB,MAAM,kBAAkB;AAAA,MAC1C,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AAED,QAAM,SAAS,IAAI,UAAU,EAAE,kBAAkB,GAAG,CAAC;AACrD,SAAO,GAAG,eAAe,CAAC,MAAM,UAAU,QAAQ,cAAc,MAAM,KAAK,CAAC;AAC5E,SAAO,GAAG,gBAAgB,CAAC,SAAS,QAAQ,eAAe,IAAI,CAAC;AAChE,SAAO,GAAG,QAAQ,CAAC,SAAS,QAAQ,OAAO,IAAI,CAAC;AAEhD,QAAM,SAAS,iBAAiB,SAAS,OAAO;AAChD,QAAM,OAAO,YAAY,MAAM;AAG/B,QAAM,WAAW,QAAQ,QAAQ,UAAU,OAAO;AAClD,MAAI;AACJ,MAAI,WAAW,QAAQ,GAAG;AACxB,QAAI;AACF,YAAM,MAAM,MAAM,SAAS,UAAU,OAAO;AAC5C,iBAAW,KAAK,MAAM,GAAG;AAAA,IAC3B,SAAS,KAAK;AACZ,cAAQ;AAAA,QACN,yCAAyC,QAAQ,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACxG;AAAA,IACF;AAAA,EACF;AAGA,aAAW,OAAO,WAAW;AAC3B,QAAI,YAAY,SAAS,oBAAoB,IAAI,gBAAgB;AAC/D,UAAI,WAAW;AACf,UAAI,kBAAkB,SAAS;AAAA,IACjC,OAAO;AAEL,YAAM,eAAe,kBAAkB,OAAO;AAC9C,UAAI,CAAC,cAAc;AACjB,gBAAQ;AAAA,UACN,6CAA6C,IAAI,kBAAkB,WAAW,oCAChD,OAAO;AAAA,QACvC;AAAA,MACF;AACA,UAAI,kBAAkB;AAAA,IACxB;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,iBAAiB,OAAe,MAAe,IAAgC;AAC5F,MAAI;AACJ,MAAI;AACF,gBAAY,MAAM,KAAK,KAAK;AAAA,EAC9B,SAAS,KAAK;AACZ,UAAM,IAAI;AAAA,MACR,6BAA6B,KAAK,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACxF,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAEA,MAAI,UAAU,OAAO,GAAG;AACtB,WAAO,CAAC,KAAK;AAAA,EACf;AAEA,MAAI,CAAC,UAAU,YAAY,GAAG;AAC5B,UAAM,IAAI,MAAM,eAAe,KAAK,8BAA8B;AAAA,EACpE;AAGA,QAAM,WAAqB,CAAC;AAC5B,QAAM,QAAQ,OAAO,QAAQ;AAG7B,MAAI,WAAW;AACf,MAAI,QAAQ,IAAI;AACd,eAAW,SAAS,OAAO,CAAC,MAAM;AAChC,YAAM,OAAO,kBAAkB,CAAC;AAChC,UAAI,CAAC,KAAM,QAAO;AAClB,UAAI,QAAQ,OAAO,KAAM,QAAO;AAChC,UAAI,MAAM,OAAO,KAAK,MAAO,QAAO;AACpC,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAEA,SAAO,SAAS,KAAK;AACvB;AAGA,eAAe,QAAQ,KAAa,SAAkC;AACpE,QAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC1D,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAWC,MAAK,KAAK,MAAM,IAAI;AACrC,QAAI,MAAM,YAAY,GAAG;AACvB,YAAM,QAAQ,UAAU,OAAO;AAAA,IACjC,WAAW,MAAM,OAAO,KAAK,MAAM,KAAK,SAAS,MAAM,GAAG;AACxD,cAAQ,KAAK,QAAQ;AAAA,IACvB;AAAA,EACF;AACF;AAUO,SAAS,kBAAkB,UAA0B;AAE1D,QAAM,YAAY,mCAAmC,KAAK,QAAQ;AAClE,MAAI,WAAW;AACb,WAAO,GAAG,UAAU,CAAC,CAAC,IAAI,UAAU,CAAC,CAAC,IAAI,UAAU,CAAC,CAAC;AAAA,EACxD;AAGA,QAAM,cAAc,gCAAgC,KAAK,QAAQ;AACjE,MAAI,aAAa;AACf,WAAO,GAAG,YAAY,CAAC,CAAC,IAAI,YAAY,CAAC,CAAC;AAAA,EAC5C;AAEA,SAAO;AACT;;;AC5TA,SAAS,YAAAC,WAAU,aAAAC,kBAAiB;AACpC,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,OAAO,iBAAiB;;;ACHjC,SAAS,yBAAyB;AAClC,SAAS,SAAAC,QAAO,QAAAC,OAAM,aAAa,mBAAmB;AACtD,SAAS,WAAAC,gBAAe;AACxB,SAAS,gBAAgB;AACzB,SAAS,gBAAgB;AAMzB,IAAM,cAAc;AAGpB,IAAM,WAAW;AAGjB,IAAM,sBAAsB;AAG5B,IAAM,cAAc;AAGpB,IAAM,sBAAsB;AAG5B,IAAM,aAAa;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAsFO,SAAS,kBACd,MACA,IACA,MACA,OACQ;AACR,QAAM,SAAS,IAAI,gBAAgB;AACnC,SAAO,IAAI,qCAAqC,IAAI;AACpD,SAAO,IAAI,qCAAqC,EAAE;AAClD,SAAO,IAAI,YAAY,OAAO,QAAQ,CAAC;AACvC,SAAO,IAAI,QAAQ,OAAO,IAAI,CAAC;AAC/B,SAAO,IAAI,SAAS,QAAQ;AAE5B,aAAW,SAAS,YAAY;AAC9B,WAAO,OAAO,YAAY,KAAK;AAAA,EACjC;AAEA,MAAI,SAAS,MAAM,SAAS,GAAG;AAC7B,eAAW,KAAK,OAAO;AACrB,aAAO,OAAO,sBAAsB,CAAC;AAAA,IACvC;AAAA,EACF;AAEA,SAAO,GAAG,WAAW,mBAAmB,OAAO,SAAS,CAAC;AAC3D;AASA,eAAsB,oBAAoB,SAAuD;AAC/F,QAAM,KAAK,QAAQ,OAAM,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAC7D,QAAM,cAAc,QAAQ,eAAe;AAE3C,QAAM,QAA4B,CAAC;AACnC,QAAM,SAA8B,CAAC;AACrC,MAAI,aAAa;AACjB,MAAI,UAAU;AACd,MAAI,sBAAsB;AAG1B,QAAM,SAAS,iBAAiB,QAAQ,MAAM,EAAE;AAEhD,aAAW,SAAS,QAAQ;AAC1B,QAAI,QAAQ,UAAU,UAAa,MAAM,UAAU,QAAQ,MAAO;AAGlE,UAAM,YAAkC,CAAC;AACzC,QAAI,OAAO;AACX,QAAI,UAAU;AAEd,WAAO,SAAS;AACd,YAAM,UAAU,kBAAkB,MAAM,MAAM,MAAM,IAAI,MAAM,QAAQ,KAAK;AAC3E,YAAM,WAAW,MAAM,eAAe,OAAO;AAC7C,YAAM,OAAQ,MAAM,SAAS,KAAK;AAElC,UAAI,OAAO,KAAK,UAAU,UAAU;AAClC,cAAM,IAAI;AAAA,UACR,+BAA+B,OAAO;AAAA,QAExC;AAAA,MACF;AAGA,UAAI,SAAS,GAAG;AACd,+BAAuB,KAAK;AAAA,MAC9B;AAEA,YAAM,UAAU,KAAK,WAAW,CAAC;AAEjC,iBAAW,OAAO,SAAS;AACzB,YAAI,CAAC,IAAI,mBAAmB;AAC1B;AACA;AAAA,QACF;AACA,kBAAU,KAAK,GAAG;AAAA,MACpB;AAEA,gBAAU,QAAQ,KAAK,eAAe;AACtC;AAAA,IACF;AAGA,UAAM,YAAY,QAAQ,UAAU,SAAY,QAAQ,QAAQ,MAAM,SAAS,UAAU;AACzF,UAAM,iBAAiB,UAAU,MAAM,GAAG,SAAS;AACnD,UAAM,aAAa,MAAM,KAAK,MAAM,GAAG,CAAC;AAGxC,UAAM,aAAa,gBAAgB,aAAa,QAAQ,QAAQ,CAAC,KAAK,QAAQ,UAAU;AACtF,UAAI,QAAQ;AACV,cAAM,KAAK,MAAM;AACjB,sBAAc,OAAO;AAAA,MACvB,WAAW,OAAO;AAChB,eAAO,KAAK,EAAE,gBAAgB,IAAI,iBAAiB,MAAM,CAAC;AAAA,MAC5D;AACA,cAAQ,aAAa;AAAA,QACnB,qBAAqB,MAAM;AAAA,QAC3B,gBAAgB;AAAA,QAChB,iBAAiB,IAAI;AAAA,QACrB,cAAc;AAAA,MAChB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,qBAAqB,MAAM;AAAA,IAC3B;AAAA,IACA;AAAA,IACA,WAAW,EAAE,MAAM,QAAQ,MAAM,GAAG;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AACF;AAOA,eAAsB,yBACpB,gBACA,QAC2B;AAE3B,QAAM,UAAU,GAAG,WAAW,cAAc,cAAc,SAAS,IAAI,gBAAgB,WAAW,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC;AACzI,QAAM,eAAe,MAAM,eAAe,OAAO;AACjD,QAAM,MAAO,MAAM,aAAa,KAAK;AAErC,MAAI,CAAC,IAAI,mBAAmB,CAAC,IAAI,kBAAkB;AACjD,UAAM,IAAI;AAAA,MACR,qCAAqC,cAAc;AAAA,IACrD;AAAA,EACF;AAEA,SAAO,uBAAuB,KAAK,MAAM;AAC3C;AAQA,eAAe,aACb,MACA,aACA,WACA,YAKe;AACf,MAAI,YAAY;AAEhB,iBAAe,SAAwB;AACrC,WAAO,YAAY,KAAK,QAAQ;AAC9B,YAAM,IAAI;AACV,YAAM,MAAM,KAAK,CAAC;AAClB,UAAI,CAAC,IAAK;AACV,UAAI;AACF,cAAM,SAAS,MAAM,uBAAuB,KAAK,SAAS;AAC1D,mBAAW,KAAK,QAAQ,IAAI;AAAA,MAC9B,SAAS,KAAK;AACZ,mBAAW,KAAK,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACxE;AAAA,IACF;AAAA,EACF;AAEA,QAAM,cAAc,KAAK,IAAI,aAAa,KAAK,MAAM;AACrD,QAAM,QAAQ,IAAI,MAAM,KAAK,EAAE,QAAQ,YAAY,GAAG,MAAM,OAAO,CAAC,CAAC;AACvE;AAEA,eAAe,uBACb,KACA,WAC2B;AAC3B,MAAI,CAAC,IAAI,mBAAmB,CAAC,IAAI,kBAAkB;AACjD,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,IAAI,mBAAmB;AAC1B,UAAM,IAAI;AAAA,MACR,YAAY,IAAI,eAAe;AAAA,IACjC;AAAA,EACF;AAEA,QAAM,UAAU,uBAAuB,IAAI,iBAAiB,IAAI,kBAAkB,SAAS;AAC3F,QAAM,WAAW,wBAAwB,IAAI,iBAAiB,IAAI,kBAAkB,SAAS;AAG7F,QAAMC,OAAMC,SAAQ,OAAO,GAAG,EAAE,WAAW,KAAK,CAAC;AAGjD,QAAM,cAAc,KAAK,UAAU,KAAK,MAAM,CAAC;AAC/C,QAAM,YAAY,UAAU,aAAa,OAAO;AAGhD,QAAM,cAAc,MAAM,eAAe,IAAI,iBAAiB;AAC9D,MAAI,CAAC,YAAY,MAAM;AACrB,UAAM,IAAI,MAAM,wBAAwB,IAAI,eAAe,MAAM;AAAA,EACnE;AAEA,QAAM,OAAO,kBAAkB,OAAO;AACtC,MAAI;AACF,UAAM,SAAS,SAAS,QAAQ,YAAY,IAAa,GAAG,IAAI;AAAA,EAClE,SAAS,KAAK;AACZ,UAAM,IAAI;AAAA,MACR,oCAAoC,IAAI,eAAe,SAAS,IAAI,iBAAiB,KAChF,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACrD,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAGA,QAAM,UAAU,MAAMC,MAAK,OAAO;AAClC,QAAM,WAAW,OAAO,WAAW,aAAa,OAAO;AAEvD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,gBAAgB,IAAI;AAAA,IACpB,iBAAiB,IAAI;AAAA,IACrB,MAAM,OAAO,QAAQ,IAAI,IAAI;AAAA,EAC/B;AACF;AAMO,SAAS,iBAAiB,MAAc,IAAiD;AAC9F,QAAM,SAA8C,CAAC;AAErD,MAAI,UAAU,oBAAI,KAAK,OAAO,YAAY;AAC1C,QAAM,MAAM,oBAAI,KAAK,KAAK,YAAY;AAEtC,SAAO,WAAW,KAAK;AACrB,UAAM,aAAa,QAAQ,YAAY,EAAE,MAAM,GAAG,EAAE;AAGpD,UAAM,WAAW,IAAI,KAAK,KAAK,IAAI,QAAQ,eAAe,GAAG,QAAQ,YAAY,IAAI,GAAG,CAAC,CAAC;AAC1F,UAAM,WAAW,YAAY,MAAM,SAAS,YAAY,EAAE,MAAM,GAAG,EAAE,IAAI;AAEzE,WAAO,KAAK,EAAE,MAAM,YAAY,IAAI,SAAS,CAAC;AAG9C,cAAU,IAAI,KAAK,KAAK,IAAI,QAAQ,eAAe,GAAG,QAAQ,YAAY,IAAI,GAAG,CAAC,CAAC;AAAA,EACrF;AAEA,SAAO;AACT;AAGA,eAAsB,eAAe,KAAa,UAAU,GAAsB;AAChF,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,MAAM,GAAG;AAAA,EAC5B,SAAS,KAAK;AAEZ,QAAI,UAAU,aAAa;AACzB,YAAM,QAAQ,sBAAsB,KAAK,IAAI,GAAG,OAAO;AACvD,cAAQ;AAAA,QACN,qBAAqB,GAAG,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,iBAC5D,KAAK,eAAe,UAAU,CAAC,IAAI,WAAW;AAAA,MACjE;AACA,YAAM,MAAM,KAAK;AACjB,aAAO,eAAe,KAAK,UAAU,CAAC;AAAA,IACxC;AACA,UAAM,IAAI;AAAA,MACR,uBAAuB,cAAc,CAAC,iBAAiB,GAAG,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC/G,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAEA,MAAI,SAAS,GAAI,QAAO;AAGxB,OACG,SAAS,WAAW,OAAO,SAAS,WAAW,OAAO,SAAS,WAAW,QAC3E,UAAU,aACV;AACA,UAAM,aAAa,SAAS,QAAQ,IAAI,aAAa;AACrD,UAAM,cAAc,aAAa,SAAS,YAAY,EAAE,IAAI;AAC5D,UAAM,QACJ,CAAC,MAAM,WAAW,KAAK,cAAc,IACjC,cAAc,MACd,sBAAsB,KAAK,IAAI,GAAG,OAAO;AAC/C,YAAQ;AAAA,MACN,QAAQ,SAAS,MAAM,QAAQ,GAAG,iBAAiB,KAAK,eAAe,UAAU,CAAC,IAAI,WAAW;AAAA,IACnG;AACA,UAAM,MAAM,KAAK;AACjB,WAAO,eAAe,KAAK,UAAU,CAAC;AAAA,EACxC;AAEA,QAAM,IAAI,MAAM,QAAQ,SAAS,MAAM,KAAK,SAAS,UAAU,QAAQ,GAAG,EAAE;AAC9E;AAEA,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AACzD;;;AD/WA,eAAsB,kBAAkB,SAAmD;AACzF,QAAM,KAAK,QAAQ,OAAM,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAC7D,QAAM,QAAQ,QAAQ,SAAS;AAE/B,MAAI,WAAW;AACf,MAAI,UAAU;AACd,MAAI,WAAW;AACf,MAAI,QAAQ;AAEZ,QAAM,SAAS,iBAAiB,QAAQ,MAAM,EAAE;AAEhD,aAAW,SAAS,QAAQ;AAC1B,QAAI,OAAO;AACX,QAAI,UAAU;AACd,UAAM,aAAa,MAAM,KAAK,MAAM,GAAG,CAAC;AAExC,WAAO,SAAS;AACd,YAAM,UAAU,kBAAkB,MAAM,MAAM,MAAM,IAAI,IAAI;AAC5D,YAAM,WAAW,MAAM,eAAe,OAAO;AAC7C,YAAM,OAAQ,MAAM,SAAS,KAAK;AAElC,UAAI,OAAO,KAAK,UAAU,UAAU;AAClC,cAAM,IAAI;AAAA,UACR,+BAA+B,OAAO;AAAA,QACxC;AAAA,MACF;AAEA,UAAI,SAAS,GAAG;AACd,iBAAS,KAAK;AAAA,MAChB;AAEA,YAAM,UAAU,KAAK,WAAW,CAAC;AAEjC,iBAAW,OAAO,SAAS;AACzB,YAAI,CAAC,IAAI,mBAAmB,CAAC,IAAI,iBAAkB;AAEnD,cAAM,SAAS;AAAA,UACb,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,QACV;AAEA,YAAI,CAACC,YAAW,MAAM,GAAG;AACvB;AACA,kBAAQ,aAAa;AAAA,YACnB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA,cAAc;AAAA,YACd,iBAAiB,IAAI;AAAA,UACvB,CAAC;AACD;AAAA,QACF;AAEA,cAAM,UAAU,MAAMC,UAAS,QAAQ,OAAO;AAG9C,cAAM,QAAQ,QAAQ,QAAQ,WAAW,CAAC;AAC1C,YAAI,CAAC,QAAQ,WAAW,OAAO,KAAK,UAAU,IAAI;AAChD;AACA;AAAA,QACF;AAEA,cAAM,UAAU,QAAQ,MAAM,GAAG,KAAK;AACtC,cAAM,OAAO,QAAQ,MAAM,QAAQ,CAAC;AAEpC,cAAM,KAAK,MAAM,OAAO;AAGxB,YAAI,CAAC,SAAS,GAAG,aAAa,GAAG;AAC/B;AACA,kBAAQ,aAAa;AAAA,YACnB;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA,cAAc;AAAA,YACd,iBAAiB,IAAI;AAAA,UACvB,CAAC;AACD;AAAA,QACF;AAEA,wBAAgB,IAAI,GAAG;AAEvB,cAAM,UAAU,UAAU,IAAI;AAAA,UAC5B,WAAW;AAAA,UACX,mBAAmB;AAAA,UACnB,gBAAgB;AAAA,QAClB,CAAC;AAED,cAAM,aAAa;AAAA,EAAQ,OAAO;AAAA,EAAQ,IAAI;AAC9C,cAAMC,WAAU,QAAQ,YAAY,OAAO;AAE3C;AACA,gBAAQ,aAAa;AAAA,UACnB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,cAAc;AAAA,UACd,iBAAiB,IAAI;AAAA,QACvB,CAAC;AAAA,MACH;AAEA,gBAAU,QAAQ,KAAK,eAAe;AACtC;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,UAAU,SAAS,UAAU,OAAO,WAAW,EAAE,MAAM,QAAQ,MAAM,GAAG,EAAE;AACrF;AAKA,SAASC,uBAAsB,SAAyB;AACtD,QAAM,MAA8B;AAAA,IAClC,MAAM;AAAA,IACN,iBAAiB;AAAA,IACjB,QAAQ;AAAA,IACR,yBAAyB;AAAA,EAC3B;AACA,SAAO,IAAI,OAAO,KAAK,QAAQ,YAAY,EAAE,QAAQ,QAAQ,GAAG;AAClE;AAGA,SAAS,gBAAgB,IAA6B,KAA+B;AACnF,MAAI,IAAI,MAAM;AACZ,OAAG,eAAe,IAAIA,uBAAsB,IAAI,IAAI;AAAA,EACtD;AAEA,MAAI,IAAI,UAAU;AAChB,OAAG,aAAa,IAAI,IAAI;AAAA,EAC1B;AAEA,MAAI,IAAI,QAAQ;AACd,OAAG,WAAW,IAAI,IAAI;AAAA,EACxB;AAEA,MAAI,IAAI,kBAAkB;AACxB,OAAG,kBAAkB,IAAI,IAAI;AAC7B,OAAG,UAAU,IAAI,IAAI;AACrB,OAAG,cAAc,IAAI,IAAI;AAAA,EAC3B;AAEA,MAAI,IAAI,YAAY,IAAI,SAAS,SAAS,GAAG;AAC3C,UAAM,CAAC,OAAO,IAAI,IAAI;AACtB,QAAI,QAAS,IAAG,QAAQ,IAAI,QAAQ;AACpC,OAAG,UAAU,IAAI,IAAI,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EACjD;AAEA,MAAI,IAAI,kBAAkB,IAAI,eAAe,SAAS,GAAG;AACvD,OAAG,gBAAgB,IAAI,IAAI,eAAe;AAAA,MACxC,CAAC,MAAM,GAAG,EAAE,KAAK,aAAa,EAAE,IAAI;AAAA,IACtC;AAAA,EACF;AAEA,MAAI,IAAI,cAAc,IAAI,WAAW,SAAS,GAAG;AAC/C,OAAG,YAAY,IAAI,IAAI;AAAA,EACzB;AAEA,MAAI,IAAI,yBAAyB,IAAI,sBAAsB,SAAS,GAAG;AACrE,OAAG,KAAK,IAAI,IAAI,sBAAsB,CAAC;AAAA,EACzC;AAEA,MAAI,IAAI,cAAc;AACpB,OAAG,gBAAgB,IAAI,IAAI;AAAA,EAC7B;AAEA,MAAI,IAAI,mBAAmB;AACzB,OAAG,qBAAqB,IAAI,IAAI;AAAA,EAClC;AAEA,MAAI,IAAI,QAAQ;AACd,OAAG,WAAW,IAAI,IAAI;AAAA,EACxB;AAGA,MAAI,IAAI,OAAO;AACb,OAAG,OAAO,IAAI,IAAI;AAClB,OAAG,cAAc,IAAI,IAAI;AAAA,EAC3B;AACF;;;AExPA,SAAS,qBAAAC,0BAAyB;AAClC,SAAS,SAAAC,QAAO,QAAAC,aAAY;AAC5B,SAAS,WAAAC,UAAS,QAAAC,aAAY;AAC9B,SAAS,YAAAC,iBAAgB;AACzB,SAAS,YAAAC,iBAAgB;AAGzB,IAAM,eAAe;AAGrB,IAAMC,uBAAsB;AAG5B,IAAMC,eAAc;AAGpB,IAAMC,uBAAsB;AA+DrB,SAAS,kBAAkB,MAAsB;AACtD,SAAO,GAAG,YAAY,OAAO,IAAI,WAAW,IAAI;AAClD;AAMO,SAAS,qBAAqB,MAAc,WAA2B;AAC5E,QAAM,OAAO,KAAK,MAAM,GAAG,CAAC;AAC5B,SAAOL,MAAK,WAAW,QAAQ,MAAM,MAAM,IAAI,MAAM;AACvD;AAMA,eAAsB,eAAe,SAAyD;AAC5F,QAAM,KAAK,QAAQ,OAAM,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAC7D,QAAM,cAAc,QAAQ,eAAeG;AAG3C,QAAM,QAAQ,kBAAkB,QAAQ,MAAM,EAAE;AAEhD,QAAM,QAAmC,CAAC;AAC1C,MAAI,aAAa;AACjB,MAAI,UAAU;AACd,MAAI,SAAS;AAGb,MAAI,YAAY;AAEhB,iBAAe,SAAwB;AACrC,WAAO,YAAY,MAAM,QAAQ;AAC/B,YAAM,IAAI;AACV,YAAM,OAAO,MAAM,CAAC;AACpB,UAAI,CAAC,KAAM;AAEX,cAAQ,aAAa;AAAA,QACnB,YAAY,MAAM;AAAA,QAClB,WAAW,MAAM;AAAA,QACjB;AAAA,QACA;AAAA,QACA,aAAa;AAAA,MACf,CAAC;AAED,YAAM,MAAM,kBAAkB,IAAI;AAClC,YAAM,WAAW,qBAAqB,MAAM,QAAQ,MAAM;AAE1D,UAAI;AACF,cAAM,SAAS,MAAM,kBAAkB,KAAK,UAAU,IAAI;AAC1D,YAAI,QAAQ;AACV,gBAAM,KAAK,MAAM;AACjB,wBAAc,OAAO;AAAA,QACvB,OAAO;AAEL;AAAA,QACF;AAAA,MACF,SAAS,KAAK;AACZ,gBAAQ;AAAA,UACN,+BAA+B,IAAI,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,QAC1F;AACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,cAAc,KAAK,IAAI,aAAa,MAAM,MAAM;AACtD,QAAM,QAAQ,IAAI,MAAM,KAAK,EAAE,QAAQ,YAAY,GAAG,MAAM,OAAO,CAAC,CAAC;AAGrE,UAAQ,aAAa;AAAA,IACnB,YAAY,MAAM;AAAA,IAClB,WAAW,MAAM;AAAA,IACjB;AAAA,IACA;AAAA,IACA,aAAa;AAAA,EACf,CAAC;AAED,SAAO;AAAA,IACL,iBAAiB,MAAM;AAAA,IACvB;AAAA,IACA;AAAA,IACA,WAAW,EAAE,MAAM,QAAQ,MAAM,GAAG;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AACF;AAOA,eAAe,kBACb,KACA,UACA,MACyC;AACzC,QAAM,WAAW,MAAMG,gBAAe,GAAG;AAEzC,MAAI,SAAS,WAAW,KAAK;AAC3B,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI,MAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,EAAE;AAAA,EACtD;AAEA,MAAI,CAAC,SAAS,MAAM;AAClB,UAAM,IAAI,MAAM,wBAAwB,GAAG,EAAE;AAAA,EAC/C;AAEA,QAAMT,OAAME,SAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,QAAM,OAAOH,mBAAkB,QAAQ;AACvC,QAAMK,UAASC,UAAS,QAAQ,SAAS,IAAa,GAAG,IAAI;AAE7D,QAAM,WAAW,MAAMJ,MAAK,QAAQ;AAEpC,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA,MAAM,OAAO,SAAS,IAAI;AAAA,EAC5B;AACF;AAKA,SAAS,kBAAkB,MAAc,IAAsB;AAC7D,QAAM,QAAkB,CAAC;AACzB,QAAM,UAAU,oBAAI,KAAK,OAAO,YAAY;AAC5C,QAAM,MAAM,oBAAI,KAAK,KAAK,YAAY;AAEtC,SAAO,WAAW,KAAK;AACrB,UAAM,KAAK,QAAQ,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAC7C,YAAQ,WAAW,QAAQ,WAAW,IAAI,CAAC;AAAA,EAC7C;AAEA,SAAO;AACT;AAGA,eAAeQ,gBAAe,KAAa,UAAU,GAAsB;AACzE,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,MAAM,GAAG;AAAA,EAC5B,SAAS,KAAK;AACZ,QAAI,UAAUF,cAAa;AACzB,YAAM,QAAQC,uBAAsB,KAAK,IAAI,GAAG,OAAO;AACvD,YAAME,OAAM,KAAK;AACjB,aAAOD,gBAAe,KAAK,UAAU,CAAC;AAAA,IACxC;AACA,UAAM,IAAI;AAAA,MACR,uBAAuBF,eAAc,CAAC,iBAAiB,GAAG,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC/G,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAEA,MAAI,SAAS,MAAM,SAAS,WAAW,IAAK,QAAO;AAEnD,OACG,SAAS,WAAW,OAAO,SAAS,WAAW,OAAO,SAAS,WAAW,QAC3E,UAAUA,cACV;AACA,UAAM,aAAa,SAAS,QAAQ,IAAI,aAAa;AACrD,UAAM,cAAc,aAAa,SAAS,YAAY,EAAE,IAAI;AAC5D,UAAM,QACJ,CAAC,MAAM,WAAW,KAAK,cAAc,IACjC,cAAc,MACdC,uBAAsB,KAAK,IAAI,GAAG,OAAO;AAC/C,UAAME,OAAM,KAAK;AACjB,WAAOD,gBAAe,KAAK,UAAU,CAAC;AAAA,EACxC;AAEA,QAAM,IAAI,MAAM,QAAQ,SAAS,MAAM,KAAK,SAAS,UAAU,QAAQ,GAAG,EAAE;AAC9E;AAEA,SAASC,OAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AACzD;","names":["contentNode","frame","join","join","readFile","writeFile","existsSync","mkdir","stat","dirname","mkdir","dirname","stat","existsSync","readFile","writeFile","normalizeDocumentType","createWriteStream","mkdir","stat","dirname","join","pipeline","Readable","DEFAULT_CONCURRENCY","MAX_RETRIES","RETRY_BASE_DELAY_MS","fetchWithRetry","sleep"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lexbuild/fr",
3
- "version": "1.15.2",
3
+ "version": "1.16.0",
4
4
  "description": "Federal Register XML to Markdown converter for LexBuild",
5
5
  "author": "Chris Thomas",
6
6
  "license": "MIT",
@@ -41,7 +41,8 @@
41
41
  "dist"
42
42
  ],
43
43
  "dependencies": {
44
- "@lexbuild/core": "1.15.2"
44
+ "yaml": "^2.8.3",
45
+ "@lexbuild/core": "1.16.0"
45
46
  },
46
47
  "devDependencies": {
47
48
  "@types/node": "^25.3.2",