@lexbuild/fr 1.14.1 → 1.15.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -35,6 +35,8 @@ interface FrDocumentXmlMeta {
35
35
  rin?: string | undefined;
36
36
  /** FR document number extracted from FRDOC text */
37
37
  documentNumber?: string | undefined;
38
+ /** Publication date inferred from FRDOC filing date (YYYY-MM-DD) */
39
+ publicationDate?: string | undefined;
38
40
  }
39
41
  /**
40
42
  * Federal Register AST Builder.
@@ -274,10 +276,22 @@ declare function buildYearDir(year: string, outputRoot: string): string;
274
276
  * enriches frontmatter with JSON sidecar metadata, renders via core's
275
277
  * renderDocument, and writes structured Markdown output.
276
278
  *
277
- * Processes FR documents in two passes: (1) parse all files and register
278
- * identifiers for link resolution, (2) render and write output files.
279
+ * Processes FR documents in a single streaming pass: parse each XML file,
280
+ * render Markdown, and write output immediately. No link pre-registration
281
+ * since FR documents rarely cross-reference each other.
279
282
  */
280
283
 
284
+ /** Progress info for conversion callback */
285
+ interface FrConvertProgress {
286
+ /** Documents converted so far */
287
+ documentsConverted: number;
288
+ /** XML files processed so far */
289
+ filesProcessed: number;
290
+ /** Total XML files to process */
291
+ totalFiles: number;
292
+ /** Current XML file being processed */
293
+ currentFile: string;
294
+ }
281
295
  /** Options for converting FR documents */
282
296
  interface FrConvertOptions {
283
297
  /** Path to input file or directory containing .xml/.json files */
@@ -294,6 +308,8 @@ interface FrConvertOptions {
294
308
  to?: string | undefined;
295
309
  /** Filter: document types */
296
310
  types?: FrDocumentType[] | undefined;
311
+ /** Progress callback */
312
+ onProgress?: ((progress: FrConvertProgress) => void) | undefined;
297
313
  }
298
314
  /** Result of a conversion operation */
299
315
  interface FrConvertResult {
@@ -339,8 +355,8 @@ interface FrDownloadOptions {
339
355
  types?: FrDocumentType[] | undefined;
340
356
  /** Maximum number of documents to download (for testing) */
341
357
  limit?: number | undefined;
342
- /** Delay between XML fetches in milliseconds */
343
- fetchDelayMs?: number | undefined;
358
+ /** Number of concurrent XML downloads (default 10) */
359
+ concurrency?: number | undefined;
344
360
  /** Progress callback */
345
361
  onProgress?: ((progress: FrDownloadProgress) => void) | undefined;
346
362
  }
@@ -401,7 +417,8 @@ declare function buildFrApiListUrl(from: string, to: string, page: number, types
401
417
  * Download FR documents for a date range.
402
418
  *
403
419
  * Automatically chunks large date ranges into month-sized windows to stay
404
- * under the API's 10,000 result cap per query.
420
+ * under the API's 10,000 result cap per query. Within each chunk, document
421
+ * XML files are downloaded concurrently (default 10 at a time).
405
422
  */
406
423
  declare function downloadFrDocuments(options: FrDownloadOptions): Promise<FrDownloadResult>;
407
424
  /**
@@ -411,4 +428,85 @@ declare function downloadFrDocuments(options: FrDownloadOptions): Promise<FrDown
411
428
  */
412
429
  declare function downloadSingleFrDocument(documentNumber: string, output: string): Promise<FrDownloadedFile>;
413
430
 
414
- export { FR_BLOCK_ELEMENTS, FR_CONTENT_ELEMENTS, FR_DOCUMENT_ELEMENTS, FR_DOCUMENT_TYPE_KEYS, FR_DOCUMENT_TYPE_MAP, FR_EMPHASIS_MAP, FR_HD_SOURCE_TO_DEPTH, FR_HEADING_ELEMENT, FR_IGNORE_ELEMENTS, FR_INLINE_ELEMENTS, FR_NOTE_ELEMENTS, FR_PASSTHROUGH_ELEMENTS, FR_PREAMBLE_META_ELEMENTS, FR_PREAMBLE_SECTIONS, FR_PRESIDENTIAL_SUBTYPES, FR_REGTEXT_ELEMENTS, FR_SECTION_CONTAINERS, FR_SIGNATURE_ELEMENTS, FR_SKIP_ELEMENTS, FR_TABLE_ELEMENTS, FrASTBuilder, type FrASTBuilderOptions, type FrConvertOptions, type FrConvertResult, type FrDocumentJsonMeta, type FrDocumentType, type FrDocumentXmlMeta, type FrDownloadFailure, type FrDownloadOptions, type FrDownloadProgress, type FrDownloadResult, type FrDownloadedFile, buildFrApiListUrl, buildFrDownloadJsonPath, buildFrDownloadXmlPath, buildFrFrontmatter, buildFrOutputPath, buildMonthDir, buildYearDir, convertFrDocuments, downloadFrDocuments, downloadSingleFrDocument };
431
+ /**
432
+ * Federal Register govinfo bulk downloader.
433
+ *
434
+ * Downloads complete daily-issue XML files from govinfo.gov. Each file contains
435
+ * all FR documents published on a single day (~150 documents, ~2.4 MB average).
436
+ * This is dramatically faster than the per-document API for historical backfill.
437
+ *
438
+ * URL pattern: https://www.govinfo.gov/content/pkg/FR-{YYYY-MM-DD}/xml/FR-{YYYY-MM-DD}.xml
439
+ *
440
+ * The existing FrASTBuilder handles daily-issue XML natively: FEDREG root is a
441
+ * passthrough, section containers (RULES, NOTICES, etc.) are passthroughs, and
442
+ * individual document elements emit via onEmit. No splitter needed.
443
+ */
444
+ /** Options for downloading FR bulk XML from govinfo */
445
+ interface FrGovinfoBulkOptions {
446
+ /** Download directory (e.g., "./downloads/fr") */
447
+ output: string;
448
+ /** Start date (YYYY-MM-DD, inclusive) */
449
+ from: string;
450
+ /** End date (YYYY-MM-DD, inclusive). Defaults to today. */
451
+ to?: string | undefined;
452
+ /** Number of concurrent downloads (default 10) */
453
+ concurrency?: number | undefined;
454
+ /** Progress callback */
455
+ onProgress?: ((progress: FrGovinfoProgress) => void) | undefined;
456
+ }
457
+ /** Progress info for govinfo download callback */
458
+ interface FrGovinfoProgress {
459
+ /** Files downloaded so far */
460
+ downloaded: number;
461
+ /** Total publishing days in date range */
462
+ totalDays: number;
463
+ /** Skipped days (weekends/holidays — 404) */
464
+ skipped: number;
465
+ /** Failed downloads */
466
+ failed: number;
467
+ /** Current date being downloaded */
468
+ currentDate: string;
469
+ }
470
+ /** A successfully downloaded bulk file */
471
+ interface FrGovinfoDownloadedFile {
472
+ /** Absolute path to the downloaded XML file */
473
+ path: string;
474
+ /** Publication date (YYYY-MM-DD) */
475
+ date: string;
476
+ /** File size in bytes */
477
+ size: number;
478
+ }
479
+ /** Result of a govinfo bulk download */
480
+ interface FrGovinfoResult {
481
+ /** Number of daily files downloaded */
482
+ filesDownloaded: number;
483
+ /** Downloaded files */
484
+ files: FrGovinfoDownloadedFile[];
485
+ /** Total bytes downloaded */
486
+ totalBytes: number;
487
+ /** Date range covered */
488
+ dateRange: {
489
+ from: string;
490
+ to: string;
491
+ };
492
+ /** Days skipped (no issue published — weekends/holidays) */
493
+ skipped: number;
494
+ /** Days that failed to download */
495
+ failed: number;
496
+ }
497
+ /**
498
+ * Build the govinfo download URL for a single day's FR issue.
499
+ */
500
+ declare function buildGovinfoFrUrl(date: string): string;
501
+ /**
502
+ * Build the local file path for a downloaded daily-issue XML.
503
+ * Stored as: {output}/bulk/{YYYY}/FR-{YYYY-MM-DD}.xml
504
+ */
505
+ declare function buildGovinfoBulkPath(date: string, outputDir: string): string;
506
+ /**
507
+ * Download FR daily-issue XML files from govinfo for a date range.
508
+ * Skips weekends/holidays (404 responses) and retries transient errors.
509
+ */
510
+ declare function downloadFrBulk(options: FrGovinfoBulkOptions): Promise<FrGovinfoResult>;
511
+
512
+ export { FR_BLOCK_ELEMENTS, FR_CONTENT_ELEMENTS, FR_DOCUMENT_ELEMENTS, FR_DOCUMENT_TYPE_KEYS, FR_DOCUMENT_TYPE_MAP, FR_EMPHASIS_MAP, FR_HD_SOURCE_TO_DEPTH, FR_HEADING_ELEMENT, FR_IGNORE_ELEMENTS, FR_INLINE_ELEMENTS, FR_NOTE_ELEMENTS, FR_PASSTHROUGH_ELEMENTS, FR_PREAMBLE_META_ELEMENTS, FR_PREAMBLE_SECTIONS, FR_PRESIDENTIAL_SUBTYPES, FR_REGTEXT_ELEMENTS, FR_SECTION_CONTAINERS, FR_SIGNATURE_ELEMENTS, FR_SKIP_ELEMENTS, FR_TABLE_ELEMENTS, FrASTBuilder, type FrASTBuilderOptions, type FrConvertOptions, type FrConvertProgress, type FrConvertResult, type FrDocumentJsonMeta, type FrDocumentType, type FrDocumentXmlMeta, type FrDownloadFailure, type FrDownloadOptions, type FrDownloadProgress, type FrDownloadResult, type FrDownloadedFile, type FrGovinfoBulkOptions, type FrGovinfoDownloadedFile, type FrGovinfoProgress, type FrGovinfoResult, buildFrApiListUrl, buildFrDownloadJsonPath, buildFrDownloadXmlPath, buildFrFrontmatter, buildFrOutputPath, buildGovinfoBulkPath, buildGovinfoFrUrl, buildMonthDir, buildYearDir, convertFrDocuments, downloadFrBulk, downloadFrDocuments, downloadSingleFrDocument };
package/dist/index.js CHANGED
@@ -77,8 +77,8 @@ var FR_INLINE_ELEMENTS = /* @__PURE__ */ new Set([
77
77
  var FR_EMPHASIS_MAP = {
78
78
  "01": "bold",
79
79
  "02": "italic",
80
- "03": "bold",
81
- // bold italic in print — treat as bold for Markdown
80
+ "03": "italic",
81
+ // bold italic in print — FR uses T="03" for case names, citations, and publication titles which render as italic
82
82
  "04": "italic",
83
83
  // italic in headings
84
84
  "05": "italic",
@@ -289,12 +289,18 @@ var FrASTBuilder = class {
289
289
  return;
290
290
  }
291
291
  if (name === FR_FTREF_ELEMENT) {
292
- const node = {
293
- type: "inline",
294
- inlineType: "footnoteRef",
295
- idref: attrs["ID"]
296
- };
297
- this.stack.push({ kind: "inline", elementName: name, node, textBuffer: "" });
292
+ const parentFrame = this.stack[this.stack.length - 1];
293
+ if (parentFrame?.kind === "content" && parentFrame.node?.type === "content") {
294
+ const contentNode = parentFrame.node;
295
+ for (let i = contentNode.children.length - 1; i >= 0; i--) {
296
+ const child = contentNode.children[i];
297
+ if (child?.type === "inline" && child.inlineType === "sup") {
298
+ child.inlineType = "footnoteRef";
299
+ break;
300
+ }
301
+ }
302
+ }
303
+ this.ignoredContainerDepth = 1;
298
304
  return;
299
305
  }
300
306
  if (FR_NOTE_ELEMENTS.has(name)) {
@@ -414,25 +420,29 @@ var FrASTBuilder = class {
414
420
  }
415
421
  if (frame.kind === "content" && frame.node?.type === "content") {
416
422
  const contentNode = frame.node;
417
- if (text) {
423
+ const normalized = text.replace(/\s+/g, " ");
424
+ if (normalized && normalized !== " ") {
418
425
  contentNode.children.push({
419
426
  type: "inline",
420
427
  inlineType: "text",
421
- text
428
+ text: normalized
422
429
  });
423
430
  }
424
431
  return;
425
432
  }
426
433
  if (frame.kind === "inline" && frame.node?.type === "inline") {
427
434
  const inlineNode = frame.node;
435
+ const normalized = text.replace(/\s+/g, " ");
428
436
  if (inlineNode.children) {
429
- inlineNode.children.push({
430
- type: "inline",
431
- inlineType: "text",
432
- text
433
- });
437
+ if (normalized && normalized !== " ") {
438
+ inlineNode.children.push({
439
+ type: "inline",
440
+ inlineType: "text",
441
+ text: normalized
442
+ });
443
+ }
434
444
  } else {
435
- inlineNode.text = (inlineNode.text ?? "") + text;
445
+ inlineNode.text = (inlineNode.text ?? "") + normalized;
436
446
  }
437
447
  return;
438
448
  }
@@ -590,7 +600,8 @@ var FrASTBuilder = class {
590
600
  } else if (elementName === "B") {
591
601
  inlineType = "bold";
592
602
  } else if (elementName === "SU") {
593
- inlineType = "sup";
603
+ const insideFootnote = this.findFrame("note") !== void 0;
604
+ inlineType = insideFootnote ? "footnoteRef" : "sup";
594
605
  } else if (elementName === "FR") {
595
606
  inlineType = "text";
596
607
  } else if (elementName === "E") {
@@ -887,9 +898,26 @@ var FrASTBuilder = class {
887
898
  const frame = this.popFrame(FR_FRDOC_ELEMENT);
888
899
  if (!frame || frame.kind !== "frdoc") return;
889
900
  const text = frame.textBuffer.trim();
890
- const match = /FR\s+Doc\.\s+([\d-]+)/i.exec(text);
891
- if (match) {
892
- this.currentDocMeta.documentNumber = match[1];
901
+ const docMatch = /FR\s+Doc\.\s+([\w-]+)/i.exec(text);
902
+ if (docMatch) {
903
+ this.currentDocMeta.documentNumber = docMatch[1];
904
+ }
905
+ const dateMatch = /Filed\s+(\d{1,2})-(\d{1,2})-(\d{2})\b/.exec(text);
906
+ if (dateMatch) {
907
+ const [, mmStr, ddStr, yyStr] = dateMatch;
908
+ const mm = parseInt(mmStr ?? "0", 10);
909
+ const dd = parseInt(ddStr ?? "0", 10);
910
+ const yy = parseInt(yyStr ?? "0", 10);
911
+ const fullYear = yy < 50 ? 2e3 + yy : 1900 + yy;
912
+ const filed = new Date(fullYear, mm - 1, dd);
913
+ if (filed.getMonth() !== mm - 1 || filed.getDate() !== dd) {
914
+ return;
915
+ }
916
+ filed.setDate(filed.getDate() + 1);
917
+ const pubYear = filed.getFullYear();
918
+ const pubMonth = String(filed.getMonth() + 1).padStart(2, "0");
919
+ const pubDay = String(filed.getDate()).padStart(2, "0");
920
+ this.currentDocMeta.publicationDate = `${pubYear}-${pubMonth}-${pubDay}`;
893
921
  }
894
922
  }
895
923
  // ── Private helpers: Stack navigation ──
@@ -958,7 +986,7 @@ function normalizeDocumentType(apiType) {
958
986
  function buildFrFrontmatter(node, _context, xmlMeta, jsonMeta) {
959
987
  const documentNumber = jsonMeta?.document_number ?? xmlMeta.documentNumber ?? "";
960
988
  const subject = jsonMeta?.title ?? xmlMeta.subject ?? node.heading ?? "";
961
- const publicationDate = jsonMeta?.publication_date ?? "";
989
+ const publicationDate = jsonMeta?.publication_date ?? xmlMeta.publicationDate ?? "";
962
990
  const documentType = jsonMeta ? normalizeDocumentType(jsonMeta.type) : xmlMeta.documentTypeNormalized;
963
991
  let agencies;
964
992
  if (jsonMeta?.agencies && jsonMeta.agencies.length > 0) {
@@ -1056,64 +1084,30 @@ import {
1056
1084
  var FR_DOC_TYPE_SET = new Set(FR_DOCUMENT_TYPE_KEYS);
1057
1085
  async function convertFrDocuments(options) {
1058
1086
  const xmlFiles = await discoverXmlFiles(options.input, options.from, options.to);
1059
- const files = [];
1087
+ let documentsConverted = 0;
1060
1088
  let totalTokenEstimate = 0;
1061
1089
  let peakMemoryBytes = 0;
1062
1090
  const linkResolver = createLinkResolver();
1063
- const parsedFiles = /* @__PURE__ */ new Map();
1091
+ let filesProcessed = 0;
1064
1092
  for (const xmlPath of xmlFiles) {
1093
+ let collected;
1065
1094
  try {
1066
- const collected = await parseXmlFile(xmlPath);
1067
- parsedFiles.set(xmlPath, collected);
1095
+ collected = await parseXmlFile(xmlPath);
1068
1096
  } catch (err) {
1069
1097
  console.warn(
1070
1098
  `Warning: Failed to parse ${xmlPath}: ${err instanceof Error ? err.message : String(err)}. Skipping.`
1071
1099
  );
1100
+ continue;
1072
1101
  }
1073
- }
1074
- for (const [, collected] of parsedFiles) {
1075
1102
  for (const doc of collected) {
1076
1103
  if (options.types && options.types.length > 0) {
1077
1104
  if (!FR_DOC_TYPE_SET.has(doc.xmlMeta.documentType) || !options.types.includes(doc.xmlMeta.documentType)) {
1078
1105
  continue;
1079
1106
  }
1080
1107
  }
1081
- if (doc.node.identifier) {
1082
- const outputPath = buildFrOutputPath(
1083
- doc.documentNumber,
1084
- doc.publicationDate,
1085
- options.output
1086
- );
1087
- linkResolver.register(doc.node.identifier, outputPath);
1088
- }
1089
- }
1090
- }
1091
- if (options.dryRun) {
1092
- let count = 0;
1093
- for (const [, collected] of parsedFiles) {
1094
- for (const doc of collected) {
1095
- if (options.types && options.types.length > 0) {
1096
- if (!FR_DOC_TYPE_SET.has(doc.xmlMeta.documentType) || !options.types.includes(doc.xmlMeta.documentType)) {
1097
- continue;
1098
- }
1099
- }
1100
- count++;
1101
- }
1102
- }
1103
- return {
1104
- documentsConverted: count,
1105
- files: [],
1106
- totalTokenEstimate: 0,
1107
- peakMemoryBytes: 0,
1108
- dryRun: true
1109
- };
1110
- }
1111
- for (const [, collected] of parsedFiles) {
1112
- for (const doc of collected) {
1113
- if (options.types && options.types.length > 0) {
1114
- if (!FR_DOC_TYPE_SET.has(doc.xmlMeta.documentType) || !options.types.includes(doc.xmlMeta.documentType)) {
1115
- continue;
1116
- }
1108
+ if (options.dryRun) {
1109
+ documentsConverted++;
1110
+ continue;
1117
1111
  }
1118
1112
  const outputPath = buildFrOutputPath(
1119
1113
  doc.documentNumber,
@@ -1128,21 +1122,28 @@ async function convertFrDocuments(options) {
1128
1122
  });
1129
1123
  await mkdir(dirname(outputPath), { recursive: true });
1130
1124
  await writeFile(outputPath, markdown, "utf-8");
1131
- files.push(outputPath);
1132
- const tokenEstimate = Math.round(markdown.length / 4);
1133
- totalTokenEstimate += tokenEstimate;
1125
+ documentsConverted++;
1126
+ totalTokenEstimate += Math.round(markdown.length / 4);
1134
1127
  const mem = process.memoryUsage().rss;
1135
1128
  if (mem > peakMemoryBytes) {
1136
1129
  peakMemoryBytes = mem;
1137
1130
  }
1138
1131
  }
1132
+ filesProcessed++;
1133
+ options.onProgress?.({
1134
+ documentsConverted,
1135
+ filesProcessed,
1136
+ totalFiles: xmlFiles.length,
1137
+ currentFile: xmlPath
1138
+ });
1139
1139
  }
1140
1140
  return {
1141
- documentsConverted: files.length,
1142
- files,
1141
+ documentsConverted,
1142
+ files: [],
1143
+ // Don't accumulate 750k+ file paths in memory
1143
1144
  totalTokenEstimate,
1144
1145
  peakMemoryBytes,
1145
- dryRun: false
1146
+ dryRun: options.dryRun
1146
1147
  };
1147
1148
  }
1148
1149
  async function parseXmlFile(xmlPath) {
@@ -1241,9 +1242,13 @@ async function walkDir(dir, results) {
1241
1242
  }
1242
1243
  }
1243
1244
  function inferDateFromPath(filePath) {
1244
- const match = /(\d{4})\/(\d{2})\/[^/]+\.xml$/.exec(filePath);
1245
- if (match) {
1246
- return `${match[1]}-${match[2]}-01`;
1245
+ const bulkMatch = /FR-(\d{4})-(\d{2})-(\d{2})\.xml$/.exec(filePath);
1246
+ if (bulkMatch) {
1247
+ return `${bulkMatch[1]}-${bulkMatch[2]}-${bulkMatch[3]}`;
1248
+ }
1249
+ const perDocMatch = /(\d{4})\/(\d{2})\/[^/]+\.xml$/.exec(filePath);
1250
+ if (perDocMatch) {
1251
+ return `${perDocMatch[1]}-${perDocMatch[2]}-01`;
1247
1252
  }
1248
1253
  return "";
1249
1254
  }
@@ -1256,7 +1261,7 @@ import { pipeline } from "stream/promises";
1256
1261
  import { Readable } from "stream";
1257
1262
  var FR_API_BASE = "https://www.federalregister.gov/api/v1";
1258
1263
  var PER_PAGE = 200;
1259
- var DEFAULT_FETCH_DELAY_MS = 100;
1264
+ var DEFAULT_CONCURRENCY = 10;
1260
1265
  var MAX_RETRIES = 2;
1261
1266
  var RETRY_BASE_DELAY_MS = 2e3;
1262
1267
  var API_FIELDS = [
@@ -1299,7 +1304,7 @@ function buildFrApiListUrl(from, to, page, types) {
1299
1304
  }
1300
1305
  async function downloadFrDocuments(options) {
1301
1306
  const to = options.to ?? (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
1302
- const fetchDelay = options.fetchDelayMs ?? DEFAULT_FETCH_DELAY_MS;
1307
+ const concurrency = options.concurrency ?? DEFAULT_CONCURRENCY;
1303
1308
  const files = [];
1304
1309
  const failed = [];
1305
1310
  let totalBytes = 0;
@@ -1308,6 +1313,7 @@ async function downloadFrDocuments(options) {
1308
1313
  const chunks = buildMonthChunks(options.from, to);
1309
1314
  for (const chunk of chunks) {
1310
1315
  if (options.limit !== void 0 && files.length >= options.limit) break;
1316
+ const chunkDocs = [];
1311
1317
  let page = 1;
1312
1318
  let hasMore = true;
1313
1319
  while (hasMore) {
@@ -1319,39 +1325,37 @@ async function downloadFrDocuments(options) {
1319
1325
  `Unexpected API response for ${listUrl}: missing or invalid 'count' field. The FederalRegister.gov API may have changed its response format.`
1320
1326
  );
1321
1327
  }
1322
- if (page === 1 && totalDocumentsFound === 0) {
1323
- totalDocumentsFound = data.count;
1328
+ if (page === 1) {
1329
+ totalDocumentsFound += data.count;
1324
1330
  }
1325
1331
  const results = data.results ?? [];
1326
1332
  for (const doc of results) {
1327
- if (options.limit !== void 0 && files.length >= options.limit) {
1328
- hasMore = false;
1329
- break;
1330
- }
1331
- options.onProgress?.({
1332
- documentsDownloaded: files.length,
1333
- totalDocuments: totalDocumentsFound,
1334
- currentDocument: doc.document_number,
1335
- currentChunk: `${chunk.from.slice(0, 7)}`
1336
- });
1337
1333
  if (!doc.full_text_xml_url) {
1338
1334
  skipped++;
1339
1335
  continue;
1340
1336
  }
1341
- try {
1342
- const result = await downloadSingleDocument(doc, options.output, fetchDelay);
1343
- files.push(result);
1344
- totalBytes += result.size;
1345
- } catch (err) {
1346
- failed.push({
1347
- documentNumber: doc.document_number,
1348
- error: err instanceof Error ? err.message : String(err)
1349
- });
1350
- }
1337
+ chunkDocs.push(doc);
1351
1338
  }
1352
- hasMore = hasMore && page < (data.total_pages ?? 0);
1339
+ hasMore = page < (data.total_pages ?? 0);
1353
1340
  page++;
1354
1341
  }
1342
+ const remaining = options.limit !== void 0 ? options.limit - files.length : chunkDocs.length;
1343
+ const docsToDownload = chunkDocs.slice(0, remaining);
1344
+ const chunkLabel = chunk.from.slice(0, 7);
1345
+ await downloadPool(docsToDownload, concurrency, options.output, (doc, result, error) => {
1346
+ if (result) {
1347
+ files.push(result);
1348
+ totalBytes += result.size;
1349
+ } else if (error) {
1350
+ failed.push({ documentNumber: doc.document_number, error });
1351
+ }
1352
+ options.onProgress?.({
1353
+ documentsDownloaded: files.length,
1354
+ totalDocuments: totalDocumentsFound,
1355
+ currentDocument: doc.document_number,
1356
+ currentChunk: chunkLabel
1357
+ });
1358
+ });
1355
1359
  }
1356
1360
  return {
1357
1361
  documentsDownloaded: files.length,
@@ -1371,9 +1375,27 @@ async function downloadSingleFrDocument(documentNumber, output) {
1371
1375
  `Invalid API response for document ${documentNumber}: missing document_number or publication_date`
1372
1376
  );
1373
1377
  }
1374
- return downloadSingleDocument(doc, output, 0);
1378
+ return downloadSingleDocument(doc, output);
1375
1379
  }
1376
- async function downloadSingleDocument(doc, outputDir, fetchDelay) {
1380
+ async function downloadPool(docs, concurrency, outputDir, onComplete) {
1381
+ let nextIndex = 0;
1382
+ async function worker() {
1383
+ while (nextIndex < docs.length) {
1384
+ const i = nextIndex++;
1385
+ const doc = docs[i];
1386
+ if (!doc) break;
1387
+ try {
1388
+ const result = await downloadSingleDocument(doc, outputDir);
1389
+ onComplete(doc, result, null);
1390
+ } catch (err) {
1391
+ onComplete(doc, null, err instanceof Error ? err.message : String(err));
1392
+ }
1393
+ }
1394
+ }
1395
+ const workerCount = Math.min(concurrency, docs.length);
1396
+ await Promise.all(Array.from({ length: workerCount }, () => worker()));
1397
+ }
1398
+ async function downloadSingleDocument(doc, outputDir) {
1377
1399
  if (!doc.document_number || !doc.publication_date) {
1378
1400
  throw new Error(
1379
1401
  `Invalid document in API response: missing document_number or publication_date`
@@ -1389,9 +1411,6 @@ async function downloadSingleDocument(doc, outputDir, fetchDelay) {
1389
1411
  await mkdir2(dirname2(xmlPath), { recursive: true });
1390
1412
  const jsonContent = JSON.stringify(doc, null, 2);
1391
1413
  await fsWriteFile(jsonPath, jsonContent, "utf-8");
1392
- if (fetchDelay > 0) {
1393
- await sleep(fetchDelay);
1394
- }
1395
1414
  const xmlResponse = await fetchWithRetry(doc.full_text_xml_url);
1396
1415
  if (!xmlResponse.body) {
1397
1416
  throw new Error(`No response body for ${doc.document_number} XML`);
@@ -1466,6 +1485,138 @@ async function fetchWithRetry(url, attempt = 0) {
1466
1485
  function sleep(ms) {
1467
1486
  return new Promise((resolve) => setTimeout(resolve, ms));
1468
1487
  }
1488
+
1489
+ // src/govinfo-downloader.ts
1490
+ import { createWriteStream as createWriteStream2 } from "fs";
1491
+ import { mkdir as mkdir3, stat as stat3 } from "fs/promises";
1492
+ import { dirname as dirname3, join as join3 } from "path";
1493
+ import { pipeline as pipeline2 } from "stream/promises";
1494
+ import { Readable as Readable2 } from "stream";
1495
+ var GOVINFO_BASE = "https://www.govinfo.gov/content/pkg";
1496
+ var DEFAULT_CONCURRENCY2 = 10;
1497
+ var MAX_RETRIES2 = 2;
1498
+ var RETRY_BASE_DELAY_MS2 = 2e3;
1499
+ function buildGovinfoFrUrl(date) {
1500
+ return `${GOVINFO_BASE}/FR-${date}/xml/FR-${date}.xml`;
1501
+ }
1502
+ function buildGovinfoBulkPath(date, outputDir) {
1503
+ const year = date.slice(0, 4);
1504
+ return join3(outputDir, "bulk", year, `FR-${date}.xml`);
1505
+ }
1506
+ async function downloadFrBulk(options) {
1507
+ const to = options.to ?? (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
1508
+ const concurrency = options.concurrency ?? DEFAULT_CONCURRENCY2;
1509
+ const dates = generateDateRange(options.from, to);
1510
+ const files = [];
1511
+ let totalBytes = 0;
1512
+ let skipped = 0;
1513
+ let failed = 0;
1514
+ let nextIndex = 0;
1515
+ async function worker() {
1516
+ while (nextIndex < dates.length) {
1517
+ const i = nextIndex++;
1518
+ const date = dates[i];
1519
+ if (!date) break;
1520
+ options.onProgress?.({
1521
+ downloaded: files.length,
1522
+ totalDays: dates.length,
1523
+ skipped,
1524
+ failed,
1525
+ currentDate: date
1526
+ });
1527
+ const url = buildGovinfoFrUrl(date);
1528
+ const filePath = buildGovinfoBulkPath(date, options.output);
1529
+ try {
1530
+ const result = await downloadSingleDay(url, filePath, date);
1531
+ if (result) {
1532
+ files.push(result);
1533
+ totalBytes += result.size;
1534
+ } else {
1535
+ skipped++;
1536
+ }
1537
+ } catch (err) {
1538
+ console.warn(`Warning: Failed to download ${date}: ${err instanceof Error ? err.message : String(err)}`);
1539
+ failed++;
1540
+ }
1541
+ }
1542
+ }
1543
+ const workerCount = Math.min(concurrency, dates.length);
1544
+ await Promise.all(Array.from({ length: workerCount }, () => worker()));
1545
+ options.onProgress?.({
1546
+ downloaded: files.length,
1547
+ totalDays: dates.length,
1548
+ skipped,
1549
+ failed,
1550
+ currentDate: "done"
1551
+ });
1552
+ return {
1553
+ filesDownloaded: files.length,
1554
+ files,
1555
+ totalBytes,
1556
+ dateRange: { from: options.from, to },
1557
+ skipped,
1558
+ failed
1559
+ };
1560
+ }
1561
+ async function downloadSingleDay(url, filePath, date) {
1562
+ const response = await fetchWithRetry2(url);
1563
+ if (response.status === 404) {
1564
+ return null;
1565
+ }
1566
+ if (!response.ok) {
1567
+ throw new Error(`HTTP ${response.status} for ${url}`);
1568
+ }
1569
+ if (!response.body) {
1570
+ throw new Error(`No response body for ${url}`);
1571
+ }
1572
+ await mkdir3(dirname3(filePath), { recursive: true });
1573
+ const dest = createWriteStream2(filePath);
1574
+ await pipeline2(Readable2.fromWeb(response.body), dest);
1575
+ const fileStat = await stat3(filePath);
1576
+ return {
1577
+ path: filePath,
1578
+ date,
1579
+ size: Number(fileStat.size)
1580
+ };
1581
+ }
1582
+ function generateDateRange(from, to) {
1583
+ const dates = [];
1584
+ const current = /* @__PURE__ */ new Date(from + "T12:00:00Z");
1585
+ const end = /* @__PURE__ */ new Date(to + "T12:00:00Z");
1586
+ while (current <= end) {
1587
+ dates.push(current.toISOString().slice(0, 10));
1588
+ current.setUTCDate(current.getUTCDate() + 1);
1589
+ }
1590
+ return dates;
1591
+ }
1592
+ async function fetchWithRetry2(url, attempt = 0) {
1593
+ let response;
1594
+ try {
1595
+ response = await fetch(url);
1596
+ } catch (err) {
1597
+ if (attempt < MAX_RETRIES2) {
1598
+ const delay = RETRY_BASE_DELAY_MS2 * Math.pow(2, attempt);
1599
+ await sleep2(delay);
1600
+ return fetchWithRetry2(url, attempt + 1);
1601
+ }
1602
+ throw new Error(
1603
+ `Network error after ${MAX_RETRIES2 + 1} attempts for ${url}: ${err instanceof Error ? err.message : String(err)}`,
1604
+ { cause: err }
1605
+ );
1606
+ }
1607
+ if (response.ok || response.status === 404) return response;
1608
+ if ((response.status === 429 || response.status === 503 || response.status === 504) && attempt < MAX_RETRIES2) {
1609
+ const retryAfter = response.headers.get("Retry-After");
1610
+ const parsedRetry = retryAfter ? parseInt(retryAfter, 10) : NaN;
1611
+ const delay = !isNaN(parsedRetry) && parsedRetry > 0 ? parsedRetry * 1e3 : RETRY_BASE_DELAY_MS2 * Math.pow(2, attempt);
1612
+ await sleep2(delay);
1613
+ return fetchWithRetry2(url, attempt + 1);
1614
+ }
1615
+ throw new Error(`HTTP ${response.status}: ${response.statusText} for ${url}`);
1616
+ }
1617
+ function sleep2(ms) {
1618
+ return new Promise((resolve) => setTimeout(resolve, ms));
1619
+ }
1469
1620
  export {
1470
1621
  FR_BLOCK_ELEMENTS,
1471
1622
  FR_CONTENT_ELEMENTS,
@@ -1493,9 +1644,12 @@ export {
1493
1644
  buildFrDownloadXmlPath,
1494
1645
  buildFrFrontmatter,
1495
1646
  buildFrOutputPath,
1647
+ buildGovinfoBulkPath,
1648
+ buildGovinfoFrUrl,
1496
1649
  buildMonthDir,
1497
1650
  buildYearDir,
1498
1651
  convertFrDocuments,
1652
+ downloadFrBulk,
1499
1653
  downloadFrDocuments,
1500
1654
  downloadSingleFrDocument
1501
1655
  };
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/fr-elements.ts","../src/fr-builder.ts","../src/fr-frontmatter.ts","../src/fr-path.ts","../src/converter.ts","../src/downloader.ts"],"sourcesContent":["/**\n * Federal Register XML element classification.\n *\n * The FR XML is GPO/SGML-derived with no namespace. It shares many\n * inline formatting elements with eCFR (E T=\"nn\", SU, FTNT) but uses\n * a flat document-centric structure rather than a hierarchical DIV system.\n *\n * Each FR document (RULE, PRORULE, NOTICE, PRESDOCU) contains a preamble\n * (PREAMB) with structured metadata, supplementary information (SUPLINF)\n * with the document body, and optional regulatory text (REGTEXT).\n */\n\nimport type { InlineType } from \"@lexbuild/core\";\n\n// ── Document type elements ──\n\n/** FR document type element names as a const tuple — single source of truth */\nexport const FR_DOCUMENT_TYPE_KEYS = [\"RULE\", \"PRORULE\", \"NOTICE\", \"PRESDOCU\"] as const;\n\n/** FR document types supported by the API and XML */\nexport type FrDocumentType = (typeof FR_DOCUMENT_TYPE_KEYS)[number];\n\n/** Top-level document elements — each becomes an emitted section-level node */\nexport const FR_DOCUMENT_ELEMENTS = new Set<string>(FR_DOCUMENT_TYPE_KEYS);\n\n/** Container elements that group documents within daily issues */\nexport const FR_SECTION_CONTAINERS = new Set([\n \"RULES\",\n \"PRORULES\",\n \"NOTICES\",\n \"PRESDOCS\",\n]);\n\n/** Map from document element name to normalized document type string */\nexport const FR_DOCUMENT_TYPE_MAP: Readonly<Record<string, string>> = {\n RULE: \"rule\",\n PRORULE: \"proposed_rule\",\n NOTICE: \"notice\",\n PRESDOCU: \"presidential_document\",\n};\n\n// ── Preamble elements ──\n\n/** Preamble section elements containing structured content */\nexport const FR_PREAMBLE_SECTIONS = new Set([\n \"AGY\", // Agency section (HD + P)\n \"ACT\", // Action section (HD + P)\n \"SUM\", // Summary section (HD + P)\n \"DATES\", // Dates section (HD + P)\n \"EFFDATE\", // Effective date section (HD + P)\n \"ADD\", // Addresses section (HD + P)\n \"FURINF\", // Further information section (HD + P)\n]);\n\n/** Preamble metadata elements — text extracted for frontmatter */\nexport const FR_PREAMBLE_META_ELEMENTS = new Set([\n \"AGENCY\", // Issuing agency name (attrs: TYPE)\n \"SUBAGY\", // Sub-agency name\n \"CFR\", // CFR citation affected (e.g., \"10 CFR Part 2\")\n \"SUBJECT\", // Document title/subject\n \"DEPDOC\", // Department document number\n \"RIN\", // Regulation Identifier Number\n]);\n\n// ── Content elements ──\n\n/** Elements that contain paragraph text */\nexport const FR_CONTENT_ELEMENTS = new Set([\n \"P\", // Paragraph\n \"FP\", // Flush paragraph (attrs: SOURCE for indent level)\n]);\n\n/** Heading element — level determined by SOURCE attribute */\nexport const FR_HEADING_ELEMENT = \"HD\";\n\n/**\n * Map from HD SOURCE attribute to heading depth.\n * HED = top-level (section-like), HD1 = subsection, etc.\n */\nexport const FR_HD_SOURCE_TO_DEPTH: Readonly<Record<string, number>> = {\n HED: 1,\n HD1: 2,\n HD2: 3,\n HD3: 4,\n HD4: 5,\n HD5: 6,\n HD6: 6,\n HD8: 6,\n};\n\n// ── Inline formatting ──\n\n/** Inline formatting elements */\nexport const FR_INLINE_ELEMENTS = new Set([\n \"I\", // Italic\n \"B\", // Bold\n \"E\", // Emphasis (type varies by T attribute)\n \"SU\", // Superscript / footnote marker\n \"FR\", // Fraction\n \"AC\", // Accent/diacritical\n]);\n\n/**\n * Map from E element T attribute to InlineType.\n * Duplicated from eCFR — source packages must not import each other.\n */\nexport const FR_EMPHASIS_MAP: Readonly<Record<string, InlineType>> = {\n \"01\": \"bold\",\n \"02\": \"italic\",\n \"03\": \"bold\", // bold italic in print — treat as bold for Markdown\n \"04\": \"italic\", // italic in headings\n \"05\": \"italic\", // small caps — render as italic\n \"51\": \"sub\", // subscript\n \"52\": \"sub\", // subscript\n \"54\": \"sub\", // subscript (math)\n \"7462\": \"italic\", // special terms (et seq., De minimis)\n};\n\n// ── Regulatory text elements ──\n\n/** Regulatory text amendment elements (within SUPLINF) */\nexport const FR_REGTEXT_ELEMENTS = new Set([\n \"REGTEXT\", // Regulatory text container (attrs: TITLE, PART)\n \"AMDPAR\", // Amendment instruction paragraph\n \"SECTION\", // Section container\n \"SECTNO\", // Section number designation\n \"PART\", // Part container within REGTEXT\n \"AUTH\", // Authority citation in REGTEXT\n]);\n\n/** LSTSUB — List of subjects (CFR parts affected) */\nexport const FR_LSTSUB_ELEMENT = \"LSTSUB\";\n\n// ── Signature block ──\n\n/** Signature block elements */\nexport const FR_SIGNATURE_ELEMENTS = new Set([\n \"SIG\", // Signature block container\n \"NAME\", // Signer name\n \"TITLE\", // Signer title\n \"DATED\", // Date of signature\n]);\n\n// ── Presidential document subtypes ──\n\n/** Presidential document subtype containers */\nexport const FR_PRESIDENTIAL_SUBTYPES = new Set([\n \"EXECORD\", // Executive Order\n \"PRMEMO\", // Presidential Memorandum\n \"PROCLA\", // Proclamation\n \"DETERM\", // Presidential Determination\n \"PRNOTICE\", // Presidential Notice\n \"PRORDER\", // Presidential Order\n]);\n\n/** Presidential document metadata elements */\nexport const FR_PRESIDENTIAL_META_ELEMENTS = new Set([\n \"PSIG\", // Presidential signature (initials)\n \"PLACE\", // Place of issuance\n \"TITLE3\", // CFR Title 3 marker\n \"PRES\", // President name\n]);\n\n// ── Note elements ──\n\n/** Footnote and editorial note elements */\nexport const FR_NOTE_ELEMENTS = new Set([\n \"FTNT\", // Footnote\n \"EDNOTE\", // Editorial note\n \"OLNOTE1\", // Overlay note\n]);\n\n/** Footnote reference marker */\nexport const FR_FTREF_ELEMENT = \"FTREF\";\n\n// ── Block elements ──\n\n/** Block-level content wrappers */\nexport const FR_BLOCK_ELEMENTS = new Set([\n \"EXTRACT\", // Extracted/quoted text\n \"EXAMPLE\", // Illustrative example\n]);\n\n// ── Table elements (GPOTABLE format) ──\n\n/** GPOTABLE elements */\nexport const FR_TABLE_ELEMENTS = new Set([\n \"GPOTABLE\", // Table root\n \"TTITLE\", // Table title\n \"BOXHD\", // Header box container\n \"CHED\", // Column header entry (attrs: H for level)\n \"ROW\", // Data row (attrs: RUL for horizontal rules)\n \"ENT\", // Cell entry (attrs: I for indent, A for alignment)\n]);\n\n// ── Elements to ignore (skip entire subtree) ──\n\n/** Elements whose entire subtree should be skipped */\nexport const FR_IGNORE_ELEMENTS = new Set([\n \"CNTNTS\", // Table of contents in daily issue\n \"GPH\", // Graphics (not available in XML)\n \"GID\", // Graphics ID\n]);\n\n// ── Elements to skip (self only, no subtree) ──\n\n/** Self-contained elements to skip — metadata extracted elsewhere or irrelevant */\nexport const FR_SKIP_ELEMENTS = new Set([\n \"PRTPAGE\", // Page number reference (attrs: P for page)\n \"STARS\", // Visual separator (****)\n \"FILED\", // Filing info\n \"UNITNAME\", // Section name in daily issue\n \"VOL\", // Volume number (daily issue metadata)\n \"NO\", // Issue number (daily issue metadata)\n \"DATE\", // Date (daily issue level — document dates from preamble)\n \"NEWPART\", // New part container in daily issue\n \"PTITLE\", // Part title in daily issue\n \"PARTNO\", // Part number in daily issue\n \"PNOTICE\", // Part notice text\n]);\n\n// ── Passthrough elements ──\n\n/** Transparent wrappers — pass through without creating frames */\nexport const FR_PASSTHROUGH_ELEMENTS = new Set([\n \"FEDREG\", // Daily issue root element\n \"PREAMB\", // Preamble — children are handled individually\n \"SUPLINF\", // Supplementary information — children are handled individually\n]);\n\n// ── Metadata extraction elements ──\n\n/** FRDOC — Federal Register document citation, e.g., \"[FR Doc. 2026-06029 ...]\" */\nexport const FR_FRDOC_ELEMENT = \"FRDOC\";\n\n/** BILCOD — Billing code (skip) */\nexport const FR_BILCOD_ELEMENT = \"BILCOD\";\n","/**\n * Federal Register AST Builder — converts SAX events from FR XML into AST nodes.\n *\n * Follows the stack-based pattern from the eCFR builder but adapted for FR's\n * flat, document-centric structure. Each FR document (RULE, NOTICE, PRORULE,\n * PRESDOCU) becomes a single section-level LevelNode emitted via onEmit.\n *\n * FR XML is GPO/SGML-derived with no namespace. It shares inline formatting\n * (E T=\"nn\", SU, FTNT) with eCFR but uses a different document structure:\n * preamble (PREAMB) → supplementary info (SUPLINF) → signature (SIG).\n */\n\nimport type { Attributes } from \"@lexbuild/core\";\nimport type {\n LevelNode,\n ContentNode,\n InlineNode,\n InlineType,\n NoteNode,\n TableNode,\n ASTNode,\n AncestorInfo,\n EmitContext,\n} from \"@lexbuild/core\";\nimport {\n FR_DOCUMENT_ELEMENTS,\n FR_SECTION_CONTAINERS,\n FR_DOCUMENT_TYPE_MAP,\n FR_PREAMBLE_SECTIONS,\n FR_PREAMBLE_META_ELEMENTS,\n FR_CONTENT_ELEMENTS,\n FR_HEADING_ELEMENT,\n FR_HD_SOURCE_TO_DEPTH,\n FR_INLINE_ELEMENTS,\n FR_EMPHASIS_MAP,\n FR_REGTEXT_ELEMENTS,\n FR_LSTSUB_ELEMENT,\n FR_SIGNATURE_ELEMENTS,\n FR_PRESIDENTIAL_SUBTYPES,\n FR_PRESIDENTIAL_META_ELEMENTS,\n FR_NOTE_ELEMENTS,\n FR_FTREF_ELEMENT,\n FR_BLOCK_ELEMENTS,\n FR_TABLE_ELEMENTS,\n FR_IGNORE_ELEMENTS,\n FR_SKIP_ELEMENTS,\n FR_PASSTHROUGH_ELEMENTS,\n FR_FRDOC_ELEMENT,\n FR_BILCOD_ELEMENT,\n} from \"./fr-elements.js\";\n\n/** Options for configuring the FR AST builder */\nexport interface FrASTBuilderOptions {\n /** Callback when a completed document node is ready */\n onEmit: (node: LevelNode, context: EmitContext) => void | Promise<void>;\n}\n\n/** Metadata extracted from the FR document XML during parsing */\nexport interface FrDocumentXmlMeta {\n /** Document type element name (RULE, NOTICE, etc.) */\n documentType: string;\n /** Normalized document type (rule, proposed_rule, etc.) */\n documentTypeNormalized: string;\n /** Agency name from AGENCY element */\n agency?: string | undefined;\n /** Sub-agency name from SUBAGY element */\n subAgency?: string | undefined;\n /** Subject/title from SUBJECT element */\n subject?: string | undefined;\n /** CFR citation from CFR element */\n cfrCitation?: string | undefined;\n /** Regulation Identifier Number from RIN element */\n rin?: string | undefined;\n /** FR document number extracted from FRDOC text */\n documentNumber?: string | undefined;\n}\n\n/** Frame kinds for the stack */\ntype FrameKind =\n | \"document\"\n | \"content\"\n | \"inline\"\n | \"heading\"\n | \"preambleSection\"\n | \"preambleMeta\"\n | \"note\"\n | \"signature\"\n | \"signatureField\"\n | \"table\"\n | \"tableHeader\"\n | \"tableRow\"\n | \"tableCell\"\n | \"block\"\n | \"regtext\"\n | \"frdoc\"\n | \"ignore\";\n\n/** A stack frame tracking an in-progress element */\ninterface StackFrame {\n kind: FrameKind;\n elementName: string;\n node?: ASTNode;\n textBuffer: string;\n /** For GPOTABLE collection */\n headers?: string[][];\n rows?: string[][];\n currentRow?: string[];\n headerLevel?: number;\n}\n\n/**\n * Federal Register AST Builder.\n *\n * Consumes SAX events and produces LexBuild AST nodes. Each FR document\n * (RULE, NOTICE, PRORULE, PRESDOCU) is emitted as a single section-level\n * LevelNode via the onEmit callback.\n */\nexport class FrASTBuilder {\n private readonly options: FrASTBuilderOptions;\n private readonly stack: StackFrame[] = [];\n /** Depth inside fully-ignored elements (CNTNTS, GPH) */\n private ignoredContainerDepth = 0;\n /** Metadata extracted from current document */\n private currentDocMeta: FrDocumentXmlMeta = {\n documentType: \"\",\n documentTypeNormalized: \"\",\n };\n /** All document metadata collected during parsing */\n private readonly documentMetas: FrDocumentXmlMeta[] = [];\n\n constructor(options: FrASTBuilderOptions) {\n this.options = options;\n }\n\n /** Get metadata for all documents parsed so far */\n getDocumentMetas(): readonly FrDocumentXmlMeta[] {\n return this.documentMetas;\n }\n\n /** Handle SAX open element */\n onOpenElement(name: string, attrs: Attributes): void {\n // Track ignored containers (skip entire subtree)\n if (this.ignoredContainerDepth > 0) {\n this.ignoredContainerDepth++;\n return;\n }\n\n // Full-subtree ignore elements (CNTNTS, GPH, GID)\n if (FR_IGNORE_ELEMENTS.has(name)) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Self-contained skip elements\n if (FR_SKIP_ELEMENTS.has(name)) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Transparent pass-through wrappers (FEDREG, PREAMB, SUPLINF)\n if (FR_PASSTHROUGH_ELEMENTS.has(name)) {\n return;\n }\n\n // Section containers (RULES, PRORULES, NOTICES, PRESDOCS) — pass through\n if (FR_SECTION_CONTAINERS.has(name)) {\n return;\n }\n\n // Document elements (RULE, NOTICE, PRORULE, PRESDOCU) → open document-level node\n if (FR_DOCUMENT_ELEMENTS.has(name)) {\n this.openDocument(name);\n return;\n }\n\n // Presidential document subtypes (EXECORD, PRMEMO, etc.) — pass through\n if (FR_PRESIDENTIAL_SUBTYPES.has(name)) {\n return;\n }\n\n // Presidential metadata (PSIG, PLACE, TITLE3, PRES)\n if (FR_PRESIDENTIAL_META_ELEMENTS.has(name)) {\n // PSIG and PLACE contain text we want to capture as content\n if (name === \"PSIG\" || name === \"PLACE\") {\n this.openContent(name);\n return;\n }\n // TITLE3, PRES — skip\n this.stack.push({ kind: \"ignore\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Preamble metadata elements (AGENCY, SUBAGY, CFR, SUBJECT, RIN, DEPDOC)\n if (FR_PREAMBLE_META_ELEMENTS.has(name)) {\n this.stack.push({ kind: \"preambleMeta\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Preamble sections (AGY, ACT, SUM, DATES, EFFDATE, ADD, FURINF)\n if (FR_PREAMBLE_SECTIONS.has(name)) {\n this.stack.push({ kind: \"preambleSection\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Heading element (HD) — level from SOURCE attribute\n if (name === FR_HEADING_ELEMENT) {\n this.openHeading(name, attrs);\n return;\n }\n\n // Content elements (P, FP)\n if (FR_CONTENT_ELEMENTS.has(name)) {\n this.openContent(name);\n return;\n }\n\n // Inline elements (I, B, E, SU, FR, AC)\n if (FR_INLINE_ELEMENTS.has(name)) {\n this.openInline(name, attrs);\n return;\n }\n\n // Footnote reference\n if (name === FR_FTREF_ELEMENT) {\n const node: InlineNode = {\n type: \"inline\",\n inlineType: \"footnoteRef\",\n idref: attrs[\"ID\"],\n };\n this.stack.push({ kind: \"inline\", elementName: name, node, textBuffer: \"\" });\n return;\n }\n\n // Note elements (FTNT, EDNOTE, OLNOTE1)\n if (FR_NOTE_ELEMENTS.has(name)) {\n this.openNote(name);\n return;\n }\n\n // REGTEXT and related elements\n if (FR_REGTEXT_ELEMENTS.has(name)) {\n this.openRegtext(name, attrs);\n return;\n }\n\n // LSTSUB — List of subjects\n if (name === FR_LSTSUB_ELEMENT) {\n this.stack.push({ kind: \"block\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Signature block\n if (FR_SIGNATURE_ELEMENTS.has(name)) {\n this.openSignature(name);\n return;\n }\n\n // Block elements (EXTRACT, EXAMPLE)\n if (FR_BLOCK_ELEMENTS.has(name)) {\n this.stack.push({ kind: \"block\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // GPOTABLE elements\n if (FR_TABLE_ELEMENTS.has(name)) {\n this.openTableElement(name, attrs);\n return;\n }\n\n // FRDOC — extract document number\n if (name === FR_FRDOC_ELEMENT) {\n this.stack.push({ kind: \"frdoc\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // BILCOD — skip\n if (name === FR_BILCOD_ELEMENT) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Unknown elements — push as ignore to maintain stack balance\n this.stack.push({ kind: \"ignore\", elementName: name, textBuffer: \"\" });\n }\n\n /** Handle SAX close element */\n onCloseElement(name: string): void {\n // Track ignored containers\n if (this.ignoredContainerDepth > 0) {\n this.ignoredContainerDepth--;\n return;\n }\n\n // Pass-through elements — no frame to pop\n if (FR_PASSTHROUGH_ELEMENTS.has(name) || FR_SECTION_CONTAINERS.has(name)) {\n return;\n }\n\n // Presidential subtypes — pass through\n if (FR_PRESIDENTIAL_SUBTYPES.has(name)) {\n return;\n }\n\n // Document elements → emit\n if (FR_DOCUMENT_ELEMENTS.has(name)) {\n this.closeDocument(name);\n return;\n }\n\n // Preamble metadata → extract text\n if (FR_PREAMBLE_META_ELEMENTS.has(name)) {\n this.closePreambleMeta(name);\n return;\n }\n\n // Preamble sections → just pop the frame\n if (FR_PREAMBLE_SECTIONS.has(name)) {\n this.popFrame(name);\n return;\n }\n\n // Heading\n if (name === FR_HEADING_ELEMENT) {\n this.closeHeading(name);\n return;\n }\n\n // Content elements\n if (FR_CONTENT_ELEMENTS.has(name)) {\n this.closeContent(name);\n return;\n }\n\n // Presidential metadata content (PSIG, PLACE)\n if (name === \"PSIG\" || name === \"PLACE\") {\n this.closeContent(name);\n return;\n }\n\n // Inline elements\n if (FR_INLINE_ELEMENTS.has(name) || name === FR_FTREF_ELEMENT) {\n this.closeInline(name);\n return;\n }\n\n // Note elements\n if (FR_NOTE_ELEMENTS.has(name)) {\n this.closeNote(name);\n return;\n }\n\n // REGTEXT elements\n if (FR_REGTEXT_ELEMENTS.has(name)) {\n this.closeRegtext(name);\n return;\n }\n\n // LSTSUB\n if (name === FR_LSTSUB_ELEMENT) {\n this.popFrame(name);\n return;\n }\n\n // Signature block\n if (FR_SIGNATURE_ELEMENTS.has(name)) {\n this.closeSignature(name);\n return;\n }\n\n // Block elements\n if (FR_BLOCK_ELEMENTS.has(name)) {\n this.popFrame(name);\n return;\n }\n\n // GPOTABLE elements\n if (FR_TABLE_ELEMENTS.has(name)) {\n this.closeTableElement(name);\n return;\n }\n\n // FRDOC → extract document number\n if (name === FR_FRDOC_ELEMENT) {\n this.closeFrdoc();\n return;\n }\n\n // Pop any remaining frames (ignore, etc.)\n if (this.stack.length > 0 && this.stack[this.stack.length - 1]?.elementName === name) {\n this.stack.pop();\n }\n }\n\n /** Handle SAX text content */\n onText(text: string): void {\n if (this.ignoredContainerDepth > 0) return;\n\n const frame = this.stack[this.stack.length - 1];\n if (!frame) return;\n\n // Accumulate text in text-collecting frames\n if (\n frame.kind === \"heading\" ||\n frame.kind === \"preambleMeta\" ||\n frame.kind === \"signatureField\" ||\n frame.kind === \"tableCell\" ||\n frame.kind === \"tableHeader\" ||\n frame.kind === \"frdoc\"\n ) {\n frame.textBuffer += text;\n return;\n }\n\n // Content frames → create inline text node\n if (frame.kind === \"content\" && frame.node?.type === \"content\") {\n const contentNode = frame.node as ContentNode;\n if (text) {\n contentNode.children.push({\n type: \"inline\",\n inlineType: \"text\",\n text,\n });\n }\n return;\n }\n\n // Inline frames → set text or add child\n if (frame.kind === \"inline\" && frame.node?.type === \"inline\") {\n const inlineNode = frame.node as InlineNode;\n if (inlineNode.children) {\n inlineNode.children.push({\n type: \"inline\",\n inlineType: \"text\",\n text,\n });\n } else {\n inlineNode.text = (inlineNode.text ?? \"\") + text;\n }\n return;\n }\n\n // Note frames with direct text\n if (frame.kind === \"note\" && frame.node?.type === \"note\") {\n frame.textBuffer += text;\n return;\n }\n\n // Document-level, preambleSection, block, regtext — ignore stray text\n }\n\n // ── Private helpers: Document ──\n\n private openDocument(elementName: string): void {\n this.currentDocMeta = {\n documentType: elementName,\n documentTypeNormalized: FR_DOCUMENT_TYPE_MAP[elementName] ?? elementName.toLowerCase(),\n };\n\n const node: LevelNode = {\n type: \"level\",\n levelType: \"section\",\n children: [],\n sourceElement: elementName,\n };\n\n this.stack.push({ kind: \"document\", elementName, node, textBuffer: \"\" });\n }\n\n private closeDocument(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"document\" || !frame.node) return;\n\n const levelNode = frame.node as LevelNode;\n\n // Set heading from subject\n if (this.currentDocMeta.subject) {\n levelNode.heading = this.currentDocMeta.subject;\n }\n\n // Set identifier from document number\n if (this.currentDocMeta.documentNumber) {\n levelNode.identifier = `/us/fr/${this.currentDocMeta.documentNumber}`;\n levelNode.numValue = this.currentDocMeta.documentNumber;\n }\n\n // Build emit context\n const ancestors: AncestorInfo[] = [];\n for (const f of this.stack) {\n if (f.kind === \"document\" && f.node?.type === \"level\") {\n const ln = f.node as LevelNode;\n ancestors.push({\n levelType: ln.levelType,\n numValue: ln.numValue,\n heading: ln.heading,\n identifier: ln.identifier,\n });\n }\n }\n\n const context: EmitContext = {\n ancestors,\n documentMeta: {\n dcTitle: this.currentDocMeta.subject,\n dcType: this.currentDocMeta.documentTypeNormalized,\n },\n };\n\n // Save metadata before emitting\n this.documentMetas.push({ ...this.currentDocMeta });\n\n this.options.onEmit(levelNode, context);\n }\n\n // ── Private helpers: Preamble ──\n\n private closePreambleMeta(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"preambleMeta\") return;\n\n const text = frame.textBuffer.trim();\n if (!text) return;\n\n switch (elementName) {\n case \"AGENCY\":\n this.currentDocMeta.agency = text;\n break;\n case \"SUBAGY\":\n this.currentDocMeta.subAgency = text;\n break;\n case \"CFR\":\n this.currentDocMeta.cfrCitation = text;\n break;\n case \"SUBJECT\":\n this.currentDocMeta.subject = text;\n break;\n case \"RIN\":\n this.currentDocMeta.rin = text.replace(/^RIN\\s+/i, \"\").trim();\n break;\n case \"DEPDOC\":\n // Department document number — store for potential use\n break;\n }\n }\n\n // ── Private helpers: Heading ──\n\n private openHeading(_elementName: string, attrs: Attributes): void {\n const source = attrs[\"SOURCE\"] ?? \"HD1\";\n const depth = FR_HD_SOURCE_TO_DEPTH[source] ?? 3;\n\n this.stack.push({\n kind: \"heading\",\n elementName: FR_HEADING_ELEMENT,\n textBuffer: \"\",\n headerLevel: depth,\n });\n }\n\n private closeHeading(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"heading\") return;\n\n const headingText = frame.textBuffer.trim();\n if (!headingText) return;\n\n // In preamble sections (AGY, ACT, SUM, etc.), the HD contains the label\n // like \"AGENCY:\", \"ACTION:\", \"SUMMARY:\". We render these as bold labels.\n const parentFrame = this.stack[this.stack.length - 1];\n\n if (parentFrame?.kind === \"preambleSection\") {\n // Create a bold label content node\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: headingText,\n },\n ],\n };\n this.addToDocument(contentNode);\n return;\n }\n\n // Outside preamble: render as a bold heading content node\n // The depth from SOURCE attribute determines visual weight\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: headingText,\n },\n ],\n };\n this.addToDocument(contentNode);\n }\n\n // ── Private helpers: Content ──\n\n private openContent(elementName: string): void {\n const node: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [],\n };\n this.stack.push({ kind: \"content\", elementName, node, textBuffer: \"\" });\n }\n\n private closeContent(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const contentNode = frame.node as ContentNode;\n\n // Skip empty content nodes\n if (contentNode.children.length === 0) return;\n\n // Add to parent: document, note, or block\n const parent = this.findParentDocument() ?? this.findParentNote();\n if (parent?.node) {\n if (parent.node.type === \"level\") {\n (parent.node as LevelNode).children.push(contentNode);\n } else if (parent.node.type === \"note\") {\n (parent.node as NoteNode).children.push(contentNode);\n }\n }\n }\n\n // ── Private helpers: Inline ──\n\n private openInline(elementName: string, attrs: Attributes): void {\n let inlineType: InlineType = \"text\";\n\n if (elementName === \"I\") {\n inlineType = \"italic\";\n } else if (elementName === \"B\") {\n inlineType = \"bold\";\n } else if (elementName === \"SU\") {\n inlineType = \"sup\";\n } else if (elementName === \"FR\") {\n inlineType = \"text\"; // Fractions render as text\n } else if (elementName === \"E\") {\n const tValue = attrs[\"T\"] ?? \"\";\n inlineType = FR_EMPHASIS_MAP[tValue] ?? \"italic\";\n }\n\n const node: InlineNode = {\n type: \"inline\",\n inlineType,\n children: [],\n };\n\n this.stack.push({ kind: \"inline\", elementName, node, textBuffer: \"\" });\n }\n\n private closeInline(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const inlineNode = frame.node as InlineNode;\n\n // For footnoteRef, set text from buffer\n if (inlineNode.inlineType === \"footnoteRef\" && frame.textBuffer) {\n inlineNode.text = frame.textBuffer.trim();\n }\n\n // Find parent to attach to\n const parentFrame = this.stack[this.stack.length - 1];\n if (!parentFrame) return;\n\n if (parentFrame.kind === \"content\" && parentFrame.node?.type === \"content\") {\n (parentFrame.node as ContentNode).children.push(inlineNode);\n } else if (parentFrame.kind === \"inline\" && parentFrame.node?.type === \"inline\") {\n const parentInline = parentFrame.node as InlineNode;\n if (parentInline.children) {\n parentInline.children.push(inlineNode);\n }\n } else if (parentFrame.kind === \"heading\" || parentFrame.kind === \"preambleMeta\") {\n // Inline inside heading or preamble metadata — accumulate text\n if (inlineNode.text) {\n parentFrame.textBuffer += inlineNode.text;\n } else if (inlineNode.children) {\n for (const child of inlineNode.children) {\n if (child.text) parentFrame.textBuffer += child.text;\n }\n }\n }\n }\n\n // ── Private helpers: Notes ──\n\n private openNote(elementName: string): void {\n const noteTypeMap: Record<string, string> = {\n FTNT: \"footnote\",\n EDNOTE: \"editorial\",\n OLNOTE1: \"general\",\n };\n\n const noteType = noteTypeMap[elementName] ?? elementName.toLowerCase();\n const node: NoteNode = {\n type: \"note\",\n noteType,\n children: [],\n };\n\n this.stack.push({ kind: \"note\", elementName, node, textBuffer: \"\" });\n }\n\n private closeNote(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const noteNode = frame.node as NoteNode;\n\n // If text was collected directly (no child content nodes), create one\n if (frame.textBuffer.trim() && noteNode.children.length === 0) {\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"text\",\n text: frame.textBuffer.trim(),\n },\n ],\n };\n noteNode.children.push(contentNode);\n }\n\n // Add to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(noteNode);\n }\n }\n\n // ── Private helpers: Regulatory text ──\n\n private openRegtext(elementName: string, attrs: Attributes): void {\n if (elementName === \"REGTEXT\") {\n // REGTEXT container with TITLE and PART attributes\n const title = attrs[\"TITLE\"] ?? \"\";\n const part = attrs[\"PART\"] ?? \"\";\n const label = title && part ? `${title} CFR Part ${part}` : \"\";\n\n // Create a bold label if we have CFR reference info\n if (label) {\n const labelNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: label,\n },\n ],\n };\n this.addToDocument(labelNode);\n }\n\n this.stack.push({ kind: \"regtext\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"AMDPAR\") {\n // Amendment instruction paragraph — render as italic content\n this.openContent(elementName);\n return;\n }\n\n if (elementName === \"SECTION\") {\n // Section container within REGTEXT — pass through\n this.stack.push({ kind: \"block\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"SECTNO\") {\n // Section number — collect as content\n this.openContent(elementName);\n return;\n }\n\n if (elementName === \"PART\") {\n // Part container within REGTEXT — pass through\n this.stack.push({ kind: \"block\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"AUTH\") {\n // Authority citation in REGTEXT\n this.openNote(elementName);\n return;\n }\n }\n\n private closeRegtext(elementName: string): void {\n if (elementName === \"REGTEXT\") {\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"AMDPAR\" || elementName === \"SECTNO\") {\n this.closeContent(elementName);\n return;\n }\n\n if (elementName === \"SECTION\" || elementName === \"PART\") {\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"AUTH\") {\n this.closeNote(elementName);\n return;\n }\n }\n\n // ── Private helpers: Signature block ──\n\n private openSignature(elementName: string): void {\n if (elementName === \"SIG\") {\n // Signature container\n const node: NoteNode = {\n type: \"note\",\n noteType: \"signature\",\n children: [],\n };\n this.stack.push({ kind: \"signature\", elementName, node, textBuffer: \"\" });\n return;\n }\n\n // NAME, TITLE, DATED — collect text\n this.stack.push({ kind: \"signatureField\", elementName, textBuffer: \"\" });\n }\n\n private closeSignature(elementName: string): void {\n if (elementName === \"SIG\") {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const sigNode = frame.node as NoteNode;\n\n // Add signature to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(sigNode);\n }\n return;\n }\n\n // NAME, TITLE, DATED fields\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"signatureField\") return;\n\n const text = frame.textBuffer.trim();\n if (!text) return;\n\n // Add as content to parent signature node\n const sigFrame = this.findFrame(\"signature\");\n if (sigFrame?.node && sigFrame.node.type === \"note\") {\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"text\",\n text,\n },\n ],\n };\n (sigFrame.node as NoteNode).children.push(contentNode);\n }\n }\n\n // ── Private helpers: GPOTABLE ──\n\n private openTableElement(elementName: string, _attrs: Attributes): void {\n if (elementName === \"GPOTABLE\") {\n this.stack.push({\n kind: \"table\",\n elementName,\n textBuffer: \"\",\n headers: [],\n rows: [],\n currentRow: [],\n });\n return;\n }\n\n if (elementName === \"TTITLE\") {\n // Table title — collect text as heading\n this.stack.push({ kind: \"heading\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"BOXHD\") {\n // Header container — no frame needed, children (CHED) handle themselves\n return;\n }\n\n if (elementName === \"CHED\") {\n // Column header entry\n this.stack.push({ kind: \"tableHeader\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"ROW\") {\n const tableFrame = this.findTableFrame();\n if (tableFrame) {\n tableFrame.currentRow = [];\n }\n this.stack.push({ kind: \"tableRow\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"ENT\") {\n // Cell entry\n this.stack.push({ kind: \"tableCell\", elementName, textBuffer: \"\" });\n return;\n }\n }\n\n private closeTableElement(elementName: string): void {\n if (elementName === \"GPOTABLE\") {\n this.closeGpoTable();\n return;\n }\n\n if (elementName === \"TTITLE\") {\n // Table title — drop the heading frame (title is informational)\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"BOXHD\") {\n // No frame to pop\n return;\n }\n\n if (elementName === \"CHED\") {\n this.closeTableHeader();\n return;\n }\n\n if (elementName === \"ROW\") {\n this.closeTableRow();\n return;\n }\n\n if (elementName === \"ENT\") {\n this.closeTableCell();\n return;\n }\n }\n\n private closeGpoTable(): void {\n const frame = this.popFrame(\"GPOTABLE\");\n if (!frame || frame.kind !== \"table\") return;\n\n const tableNode: TableNode = {\n type: \"table\",\n variant: \"xhtml\", // Reuse the same variant for rendering\n headers: frame.headers ?? [],\n rows: frame.rows ?? [],\n };\n\n // Add to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(tableNode);\n }\n }\n\n private closeTableHeader(): void {\n const headerFrame = this.popFrame(\"CHED\");\n if (!headerFrame || headerFrame.kind !== \"tableHeader\") return;\n\n const tableFrame = this.findTableFrame();\n if (!tableFrame) return;\n\n const text = headerFrame.textBuffer.trim();\n\n // GPOTABLE headers are flat — each CHED is one column header.\n // We build a single header row from all CHED elements.\n if (!tableFrame.headers || tableFrame.headers.length === 0) {\n tableFrame.headers = [[]];\n }\n const headerRow = tableFrame.headers[0];\n if (headerRow) {\n headerRow.push(text);\n }\n }\n\n private closeTableRow(): void {\n const rowFrame = this.popFrame(\"ROW\");\n if (!rowFrame) return;\n\n const tableFrame = this.findTableFrame();\n if (tableFrame?.currentRow) {\n tableFrame.rows?.push([...tableFrame.currentRow]);\n tableFrame.currentRow = [];\n }\n }\n\n private closeTableCell(): void {\n const cellFrame = this.stack.pop();\n if (!cellFrame || cellFrame.kind !== \"tableCell\") return;\n\n const tableFrame = this.findTableFrame();\n if (tableFrame?.currentRow) {\n tableFrame.currentRow.push(cellFrame.textBuffer.trim());\n }\n }\n\n // ── Private helpers: FRDOC ──\n\n private closeFrdoc(): void {\n const frame = this.popFrame(FR_FRDOC_ELEMENT);\n if (!frame || frame.kind !== \"frdoc\") return;\n\n const text = frame.textBuffer.trim();\n // Extract document number from \"[FR Doc. 2026-06029 Filed 3-27-26; 8:45 am]\"\n // or \"[FR Doc. 2026-06029]\"\n const match = /FR\\s+Doc\\.\\s+([\\d-]+)/i.exec(text);\n if (match) {\n this.currentDocMeta.documentNumber = match[1];\n }\n }\n\n // ── Private helpers: Stack navigation ──\n\n private addToDocument(node: ASTNode): void {\n const docFrame = this.findParentDocument();\n if (docFrame?.node && docFrame.node.type === \"level\") {\n (docFrame.node as LevelNode).children.push(node);\n }\n }\n\n private findParentDocument(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"document\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findParentNote(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"note\" || this.stack[i]?.kind === \"signature\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findTableFrame(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"table\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findFrame(kind: FrameKind): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === kind) {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private popFrame(elementName: string): StackFrame | undefined {\n if (this.stack.length === 0) return undefined;\n\n // Find the matching frame (may not be exactly on top)\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.elementName === elementName) {\n return this.stack.splice(i, 1)[0];\n }\n }\n\n // No matching frame found — warn rather than popping an unrelated frame\n console.warn(\n `FrASTBuilder: no matching frame for closing element </${elementName}>, ` +\n `stack has: [${this.stack.map((f) => f.elementName).join(\", \")}]`,\n );\n return undefined;\n }\n}\n","/**\n * Federal Register frontmatter builder.\n *\n * Constructs FrontmatterData from an emitted FR AST node, its context,\n * and optional JSON metadata from the FederalRegister.gov API.\n */\n\nimport type { LevelNode, EmitContext, FrontmatterData } from \"@lexbuild/core\";\nimport type { FrDocumentXmlMeta } from \"./fr-builder.js\";\n\n/**\n * Metadata from the FederalRegister.gov API JSON response.\n * Stored as a sidecar `.json` file alongside each `.xml` download.\n */\nexport interface FrDocumentJsonMeta {\n /** FR document number (e.g., \"2026-06029\") */\n document_number: string;\n /** Document type (Rule, Proposed Rule, Notice, Presidential Document) */\n type: string;\n /** Document title */\n title: string;\n /** Publication date (YYYY-MM-DD) */\n publication_date: string;\n /** Full FR citation (e.g., \"91 FR 14523\") */\n citation: string;\n /** FR volume number */\n volume: number;\n /** Start page number */\n start_page: number;\n /** End page number */\n end_page: number;\n /** Agencies with hierarchy info */\n agencies: Array<{\n name: string;\n id: number;\n slug: string;\n parent_id?: number | null;\n raw_name?: string;\n }>;\n /** CFR title/part references */\n cfr_references: Array<{ title: number; part: number }>;\n /** Docket identifiers */\n docket_ids: string[];\n /** Regulation Identifier Numbers */\n regulation_id_numbers: string[];\n /** Effective date (YYYY-MM-DD) */\n effective_on?: string | null;\n /** Comment period end date (YYYY-MM-DD) */\n comments_close_on?: string | null;\n /** Action description (e.g., \"Final rule.\") */\n action?: string | null;\n /** Document abstract */\n abstract?: string | null;\n /** Whether the document is significant */\n significant?: boolean | null;\n /** Topics/keywords */\n topics: string[];\n /** URL to full text XML */\n full_text_xml_url: string;\n}\n\n/** Normalize API document type to lowercase snake_case */\nfunction normalizeDocumentType(apiType: string): string {\n const map: Record<string, string> = {\n Rule: \"rule\",\n \"Proposed Rule\": \"proposed_rule\",\n Notice: \"notice\",\n \"Presidential Document\": \"presidential_document\",\n };\n return map[apiType] ?? apiType.toLowerCase().replace(/\\s+/g, \"_\");\n}\n\n/**\n * Build FrontmatterData from an FR document node with optional JSON metadata.\n *\n * If JSON metadata is available (from the API sidecar file), it enriches\n * the frontmatter with structured agency, CFR reference, docket, and\n * date information that isn't available in the XML alone.\n */\nexport function buildFrFrontmatter(\n node: LevelNode,\n _context: EmitContext,\n xmlMeta: FrDocumentXmlMeta,\n jsonMeta?: FrDocumentJsonMeta,\n): FrontmatterData {\n const documentNumber = jsonMeta?.document_number ?? xmlMeta.documentNumber ?? \"\";\n const subject = jsonMeta?.title ?? xmlMeta.subject ?? node.heading ?? \"\";\n const publicationDate = jsonMeta?.publication_date ?? \"\";\n const documentType =\n jsonMeta ? normalizeDocumentType(jsonMeta.type) : xmlMeta.documentTypeNormalized;\n\n // Build agencies list\n let agencies: string[] | undefined;\n if (jsonMeta?.agencies && jsonMeta.agencies.length > 0) {\n agencies = jsonMeta.agencies.map((a) => a.name);\n } else if (xmlMeta.agency) {\n agencies = [xmlMeta.agency];\n if (xmlMeta.subAgency) {\n agencies.push(xmlMeta.subAgency);\n }\n }\n\n // Build CFR references list\n let cfrReferences: string[] | undefined;\n if (jsonMeta?.cfr_references && jsonMeta.cfr_references.length > 0) {\n cfrReferences = jsonMeta.cfr_references.map((r) => `${r.title} CFR Part ${r.part}`);\n } else if (xmlMeta.cfrCitation) {\n cfrReferences = [xmlMeta.cfrCitation];\n }\n\n // Build docket IDs list\n let docketIds: string[] | undefined;\n if (jsonMeta?.docket_ids && jsonMeta.docket_ids.length > 0) {\n docketIds = jsonMeta.docket_ids;\n }\n\n // Primary agency for the existing `agency` field\n const primaryAgency =\n agencies && agencies.length > 0 ? agencies[0] : undefined;\n\n // FR citation\n const frCitation = jsonMeta?.citation;\n\n // RIN\n const rin = jsonMeta?.regulation_id_numbers?.[0] ?? xmlMeta.rin;\n\n const fm: FrontmatterData = {\n source: \"fr\",\n legal_status: \"authoritative_unofficial\",\n identifier: node.identifier ?? `/us/fr/${documentNumber}`,\n title: subject,\n title_number: 0, // FR documents don't belong to a USC/CFR title\n title_name: \"Federal Register\",\n section_number: documentNumber,\n section_name: subject,\n positive_law: false,\n currency: publicationDate,\n last_updated: publicationDate,\n\n // Shared optional fields\n agency: primaryAgency,\n\n // FR-specific fields\n document_number: documentNumber || undefined,\n document_type: documentType || undefined,\n fr_citation: frCitation,\n fr_volume: jsonMeta?.volume,\n publication_date: publicationDate || undefined,\n agencies: agencies && agencies.length > 0 ? agencies : undefined,\n cfr_references: cfrReferences && cfrReferences.length > 0 ? cfrReferences : undefined,\n docket_ids: docketIds && docketIds.length > 0 ? docketIds : undefined,\n rin: rin || undefined,\n effective_date: jsonMeta?.effective_on ?? undefined,\n comments_close_date: jsonMeta?.comments_close_on ?? undefined,\n fr_action: jsonMeta?.action ?? undefined,\n };\n\n return fm;\n}\n","/**\n * Output path builder for Federal Register directory structure.\n *\n * FR path structure:\n * output/fr/{YYYY}/{MM}/{document_number}.md\n *\n * Downloads path structure:\n * downloads/fr/{YYYY}/{MM}/{document_number}.xml\n * downloads/fr/{YYYY}/{MM}/{document_number}.json\n */\n\nimport { join } from \"node:path\";\n\n/**\n * Build the output file path for an FR document.\n *\n * @param documentNumber - FR document number (e.g., \"2026-06029\")\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param outputRoot - Output root directory (e.g., \"./output\")\n * @returns Full output file path (e.g., \"output/fr/2026/03/2026-06029.md\")\n */\nexport function buildFrOutputPath(\n documentNumber: string,\n publicationDate: string,\n outputRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(outputRoot, \"fr\", year, month, `${documentNumber}.md`);\n}\n\n/**\n * Build the download file path for an FR document XML.\n *\n * @param documentNumber - FR document number\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param downloadRoot - Download root directory (e.g., \"./downloads/fr\")\n * @returns Full download file path (e.g., \"downloads/fr/2026/03/2026-06029.xml\")\n */\nexport function buildFrDownloadXmlPath(\n documentNumber: string,\n publicationDate: string,\n downloadRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(downloadRoot, year, month, `${documentNumber}.xml`);\n}\n\n/**\n * Build the download file path for an FR document JSON metadata.\n *\n * @param documentNumber - FR document number\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param downloadRoot - Download root directory\n * @returns Full download file path (e.g., \"downloads/fr/2026/03/2026-06029.json\")\n */\nexport function buildFrDownloadJsonPath(\n documentNumber: string,\n publicationDate: string,\n downloadRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(downloadRoot, year, month, `${documentNumber}.json`);\n}\n\n/**\n * Build the directory path for a year/month within the FR output structure.\n */\nexport function buildMonthDir(year: string, month: string, outputRoot: string): string {\n return join(outputRoot, \"fr\", year, month);\n}\n\n/**\n * Build the directory path for a year.\n */\nexport function buildYearDir(year: string, outputRoot: string): string {\n return join(outputRoot, \"fr\", year);\n}\n\n/**\n * Parse a YYYY-MM-DD date string into year and month components.\n */\nfunction parseDateComponents(date: string): { year: string; month: string } {\n const parts = date.split(\"-\");\n return {\n year: parts[0] || \"0000\",\n month: parts[1] || \"00\",\n };\n}\n","/**\n * Federal Register conversion orchestrator.\n *\n * Discovers downloaded FR XML files, parses them with FrASTBuilder,\n * enriches frontmatter with JSON sidecar metadata, renders via core's\n * renderDocument, and writes structured Markdown output.\n *\n * Processes FR documents in two passes: (1) parse all files and register\n * identifiers for link resolution, (2) render and write output files.\n */\n\nimport { createReadStream, existsSync } from \"node:fs\";\nimport { readFile, readdir, stat } from \"node:fs/promises\";\nimport { join, dirname } from \"node:path\";\nimport {\n XMLParser,\n renderDocument,\n createLinkResolver,\n writeFile,\n mkdir,\n} from \"@lexbuild/core\";\nimport type { LevelNode, EmitContext } from \"@lexbuild/core\";\nimport { FrASTBuilder } from \"./fr-builder.js\";\nimport type { FrDocumentXmlMeta } from \"./fr-builder.js\";\nimport { buildFrFrontmatter } from \"./fr-frontmatter.js\";\nimport type { FrDocumentJsonMeta } from \"./fr-frontmatter.js\";\nimport { buildFrOutputPath } from \"./fr-path.js\";\nimport { FR_DOCUMENT_TYPE_KEYS } from \"./fr-elements.js\";\nimport type { FrDocumentType } from \"./fr-elements.js\";\n\n// ── Public types ──\n\n/** Options for converting FR documents */\nexport interface FrConvertOptions {\n /** Path to input file or directory containing .xml/.json files */\n input: string;\n /** Output root directory */\n output: string;\n /** Link style for cross-references */\n linkStyle: \"relative\" | \"canonical\" | \"plaintext\";\n /** Parse only, don't write files */\n dryRun: boolean;\n /** Filter: start date (YYYY-MM-DD) */\n from?: string | undefined;\n /** Filter: end date (YYYY-MM-DD) */\n to?: string | undefined;\n /** Filter: document types */\n types?: FrDocumentType[] | undefined;\n}\n\n/** Result of a conversion operation */\nexport interface FrConvertResult {\n /** Number of documents converted */\n documentsConverted: number;\n /** Paths of written files */\n files: string[];\n /** Total estimated tokens */\n totalTokenEstimate: number;\n /** Peak RSS in bytes */\n peakMemoryBytes: number;\n /** Whether this was a dry run */\n dryRun: boolean;\n}\n\n/** Collected document info during parsing */\ninterface CollectedDoc {\n node: LevelNode;\n context: EmitContext;\n xmlMeta: FrDocumentXmlMeta;\n jsonMeta?: FrDocumentJsonMeta;\n publicationDate: string;\n documentNumber: string;\n}\n\n/** Set of valid FR document type element names for filtering */\nconst FR_DOC_TYPE_SET = new Set<string>(FR_DOCUMENT_TYPE_KEYS);\n\n// ── Public function ──\n\n/**\n * Convert FR XML documents to Markdown.\n *\n * Supports both single-file mode (input is a .xml path) and batch mode\n * (input is a directory containing year/month/doc.xml structure).\n */\nexport async function convertFrDocuments(options: FrConvertOptions): Promise<FrConvertResult> {\n const xmlFiles = await discoverXmlFiles(options.input, options.from, options.to);\n\n const files: string[] = [];\n let totalTokenEstimate = 0;\n let peakMemoryBytes = 0;\n\n const linkResolver = createLinkResolver();\n\n // Parse all files once and cache results\n const parsedFiles = new Map<string, CollectedDoc[]>();\n for (const xmlPath of xmlFiles) {\n try {\n const collected = await parseXmlFile(xmlPath);\n parsedFiles.set(xmlPath, collected);\n } catch (err) {\n console.warn(\n `Warning: Failed to parse ${xmlPath}: ${err instanceof Error ? err.message : String(err)}. Skipping.`,\n );\n }\n }\n\n // Register identifiers for link resolution using cached results\n for (const [, collected] of parsedFiles) {\n for (const doc of collected) {\n if (options.types && options.types.length > 0) {\n if (!FR_DOC_TYPE_SET.has(doc.xmlMeta.documentType) || !options.types.includes(doc.xmlMeta.documentType as FrDocumentType)) {\n continue;\n }\n }\n\n if (doc.node.identifier) {\n const outputPath = buildFrOutputPath(\n doc.documentNumber,\n doc.publicationDate,\n options.output,\n );\n linkResolver.register(doc.node.identifier, outputPath);\n }\n }\n }\n\n if (options.dryRun) {\n let count = 0;\n for (const [, collected] of parsedFiles) {\n for (const doc of collected) {\n if (options.types && options.types.length > 0) {\n if (!FR_DOC_TYPE_SET.has(doc.xmlMeta.documentType) || !options.types.includes(doc.xmlMeta.documentType as FrDocumentType)) {\n continue;\n }\n }\n count++;\n }\n }\n return {\n documentsConverted: count,\n files: [],\n totalTokenEstimate: 0,\n peakMemoryBytes: 0,\n dryRun: true,\n };\n }\n\n // Render and write using cached results\n for (const [, collected] of parsedFiles) {\n for (const doc of collected) {\n // Apply type filter\n if (options.types && options.types.length > 0) {\n if (!FR_DOC_TYPE_SET.has(doc.xmlMeta.documentType) || !options.types.includes(doc.xmlMeta.documentType as FrDocumentType)) {\n continue;\n }\n }\n\n const outputPath = buildFrOutputPath(\n doc.documentNumber,\n doc.publicationDate,\n options.output,\n );\n\n const frontmatter = buildFrFrontmatter(doc.node, doc.context, doc.xmlMeta, doc.jsonMeta);\n\n const markdown = renderDocument(doc.node, frontmatter, {\n headingOffset: 0,\n linkStyle: options.linkStyle,\n resolveLink: options.linkStyle === \"relative\"\n ? (id) => linkResolver.resolve(id, outputPath)\n : undefined,\n });\n\n await mkdir(dirname(outputPath), { recursive: true });\n await writeFile(outputPath, markdown, \"utf-8\");\n\n files.push(outputPath);\n\n // Estimate tokens (character/4 heuristic)\n const tokenEstimate = Math.round(markdown.length / 4);\n totalTokenEstimate += tokenEstimate;\n\n // Track memory\n const mem = process.memoryUsage().rss;\n if (mem > peakMemoryBytes) {\n peakMemoryBytes = mem;\n }\n }\n }\n\n return {\n documentsConverted: files.length,\n files,\n totalTokenEstimate,\n peakMemoryBytes,\n dryRun: false,\n };\n}\n\n// ── Private helpers ──\n\n/**\n * Parse a single XML file and collect document nodes + metadata.\n */\nasync function parseXmlFile(xmlPath: string): Promise<CollectedDoc[]> {\n const collected: CollectedDoc[] = [];\n\n const builder = new FrASTBuilder({\n onEmit: (node, context) => {\n // Snapshot metas at emit time\n const currentMetas = builder.getDocumentMetas();\n const meta = currentMetas[currentMetas.length - 1];\n if (!meta) {\n console.warn(\n `Warning: No XML metadata extracted for emitted document in ${xmlPath}. ` +\n `Frontmatter will have empty document_type and document_number.`,\n );\n }\n collected.push({\n node,\n context,\n xmlMeta: meta ?? { documentType: \"\", documentTypeNormalized: \"\" },\n publicationDate: \"\",\n documentNumber: meta?.documentNumber ?? \"\",\n });\n },\n });\n\n const parser = new XMLParser({ defaultNamespace: \"\" });\n parser.on(\"openElement\", (name, attrs) => builder.onOpenElement(name, attrs));\n parser.on(\"closeElement\", (name) => builder.onCloseElement(name));\n parser.on(\"text\", (text) => builder.onText(text));\n\n const stream = createReadStream(xmlPath, \"utf-8\");\n await parser.parseStream(stream);\n\n // Try to load JSON sidecar\n const jsonPath = xmlPath.replace(/\\.xml$/, \".json\");\n let jsonMeta: FrDocumentJsonMeta | undefined;\n if (existsSync(jsonPath)) {\n try {\n const raw = await readFile(jsonPath, \"utf-8\");\n jsonMeta = JSON.parse(raw) as FrDocumentJsonMeta;\n } catch (err) {\n console.warn(\n `Warning: Failed to parse JSON sidecar ${jsonPath}: ${err instanceof Error ? err.message : String(err)}. Continuing without enriched metadata.`,\n );\n }\n }\n\n // Enrich collected docs with JSON metadata and publication date\n for (const doc of collected) {\n if (jsonMeta && jsonMeta.document_number === doc.documentNumber) {\n doc.jsonMeta = jsonMeta;\n doc.publicationDate = jsonMeta.publication_date;\n } else {\n // Infer date from file path (downloads/fr/YYYY/MM/doc.xml)\n const inferredDate = inferDateFromPath(xmlPath);\n if (!inferredDate) {\n console.warn(\n `Warning: No publication date for document ${doc.documentNumber || \"(unknown)\"} — ` +\n `no JSON sidecar and path ${xmlPath} has no YYYY/MM/ pattern. Output will be in 0000/00/.`,\n );\n }\n doc.publicationDate = inferredDate;\n }\n }\n\n return collected;\n}\n\n/**\n * Discover XML files in a directory or return the single file path.\n */\nasync function discoverXmlFiles(\n input: string,\n from?: string,\n to?: string,\n): Promise<string[]> {\n let inputStat;\n try {\n inputStat = await stat(input);\n } catch (err) {\n throw new Error(\n `Cannot access input path \"${input}\": ${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n if (inputStat.isFile()) {\n return [input];\n }\n\n if (!inputStat.isDirectory()) {\n throw new Error(`Input path \"${input}\" is not a file or directory`);\n }\n\n // Recursively find all .xml files\n const xmlFiles: string[] = [];\n await walkDir(input, xmlFiles);\n\n // Apply date range filter based on file path structure (YYYY/MM/)\n let filtered = xmlFiles;\n if (from || to) {\n filtered = xmlFiles.filter((f) => {\n const date = inferDateFromPath(f);\n if (!date) return true; // Can't filter if no date in path\n if (from && date < from) return false;\n if (to && date > to + \"-32\") return false; // Month-level comparison\n return true;\n });\n }\n\n return filtered.sort();\n}\n\n/** Recursively walk a directory collecting .xml files */\nasync function walkDir(dir: string, results: string[]): Promise<void> {\n const entries = await readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n if (entry.isDirectory()) {\n await walkDir(fullPath, results);\n } else if (entry.isFile() && entry.name.endsWith(\".xml\")) {\n results.push(fullPath);\n }\n }\n}\n\n/**\n * Infer a date string from the file path (e.g., \"downloads/fr/2026/03/doc.xml\" → \"2026-03-01\").\n * Used when no JSON sidecar is available.\n */\nfunction inferDateFromPath(filePath: string): string {\n const match = /(\\d{4})\\/(\\d{2})\\/[^/]+\\.xml$/.exec(filePath);\n if (match) {\n return `${match[1]}-${match[2]}-01`;\n }\n return \"\";\n}\n","/**\n * Federal Register API downloader.\n *\n * Downloads FR documents (XML + JSON metadata) from the FederalRegister.gov API.\n * The API provides per-document endpoints, rich JSON metadata, and requires no\n * authentication. Results are paginated (max 200/page) with a 10,000 result cap\n * per query — the downloader auto-chunks by month for large date ranges.\n *\n * API base: https://www.federalregister.gov/api/v1/\n */\n\nimport { createWriteStream } from \"node:fs\";\nimport { mkdir, stat, writeFile as fsWriteFile } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\nimport { pipeline } from \"node:stream/promises\";\nimport { Readable } from \"node:stream\";\nimport { buildFrDownloadXmlPath, buildFrDownloadJsonPath } from \"./fr-path.js\";\nimport type { FrDocumentJsonMeta } from \"./fr-frontmatter.js\";\nimport type { FrDocumentType } from \"./fr-elements.js\";\n\n/** Base URL for the FederalRegister.gov API */\nconst FR_API_BASE = \"https://www.federalregister.gov/api/v1\";\n\n/** Maximum results per page (API max) */\nconst PER_PAGE = 200;\n\n/** Default delay between individual document XML fetches (ms) */\nconst DEFAULT_FETCH_DELAY_MS = 100;\n\n/** Maximum retry attempts for transient errors */\nconst MAX_RETRIES = 2;\n\n/** Base delay between retries (ms) */\nconst RETRY_BASE_DELAY_MS = 2000;\n\n/** Fields to request from the API documents endpoint */\nconst API_FIELDS = [\n \"document_number\",\n \"type\",\n \"title\",\n \"publication_date\",\n \"citation\",\n \"volume\",\n \"start_page\",\n \"end_page\",\n \"agencies\",\n \"cfr_references\",\n \"docket_ids\",\n \"regulation_id_numbers\",\n \"effective_on\",\n \"comments_close_on\",\n \"action\",\n \"abstract\",\n \"significant\",\n \"topics\",\n \"full_text_xml_url\",\n];\n\n// ── Public types ──\n\n/** Options for downloading FR documents */\nexport interface FrDownloadOptions {\n /** Download directory (e.g., \"./downloads/fr\") */\n output: string;\n /** Start date (YYYY-MM-DD, inclusive) */\n from: string;\n /** End date (YYYY-MM-DD, inclusive). Defaults to today. */\n to?: string | undefined;\n /** Document types to download. All types if omitted. */\n types?: FrDocumentType[] | undefined;\n /** Maximum number of documents to download (for testing) */\n limit?: number | undefined;\n /** Delay between XML fetches in milliseconds */\n fetchDelayMs?: number | undefined;\n /** Progress callback */\n onProgress?: ((progress: FrDownloadProgress) => void) | undefined;\n}\n\n/** Progress info for download callback */\nexport interface FrDownloadProgress {\n /** Documents downloaded so far */\n documentsDownloaded: number;\n /** Total documents found across all pages */\n totalDocuments: number;\n /** Current document number being downloaded */\n currentDocument: string;\n /** Current date chunk being processed (YYYY-MM) */\n currentChunk: string;\n}\n\n/** A successfully downloaded FR document */\nexport interface FrDownloadedFile {\n /** Absolute path to the XML file */\n xmlPath: string;\n /** Absolute path to the JSON metadata file */\n jsonPath: string;\n /** Document number */\n documentNumber: string;\n /** Publication date */\n publicationDate: string;\n /** Combined size in bytes (XML + JSON) */\n size: number;\n}\n\n/** A failed download */\nexport interface FrDownloadFailure {\n /** Document number */\n documentNumber: string;\n /** Error message */\n error: string;\n}\n\n/** Result of a download operation */\nexport interface FrDownloadResult {\n /** Number of documents downloaded */\n documentsDownloaded: number;\n /** Paths of downloaded files */\n files: FrDownloadedFile[];\n /** Total bytes downloaded */\n totalBytes: number;\n /** Date range covered */\n dateRange: { from: string; to: string };\n /** Documents without XML (pre-2000) */\n skipped: number;\n /** Documents that failed to download */\n failed: FrDownloadFailure[];\n}\n\n/** API listing response */\ninterface FrApiListResponse {\n count: number;\n total_pages: number;\n next_page_url?: string | null;\n /** Can be absent on weekends/holidays when count is 0 */\n results?: FrDocumentJsonMeta[];\n}\n\n// ── Public functions ──\n\n/**\n * Build the API documents listing URL for a date range.\n */\nexport function buildFrApiListUrl(\n from: string,\n to: string,\n page: number,\n types?: FrDocumentType[],\n): string {\n const params = new URLSearchParams();\n params.set(\"conditions[publication_date][gte]\", from);\n params.set(\"conditions[publication_date][lte]\", to);\n params.set(\"per_page\", String(PER_PAGE));\n params.set(\"page\", String(page));\n params.set(\"order\", \"oldest\");\n\n for (const field of API_FIELDS) {\n params.append(\"fields[]\", field);\n }\n\n if (types && types.length > 0) {\n for (const t of types) {\n params.append(\"conditions[type][]\", t);\n }\n }\n\n return `${FR_API_BASE}/documents.json?${params.toString()}`;\n}\n\n/**\n * Download FR documents for a date range.\n *\n * Automatically chunks large date ranges into month-sized windows to stay\n * under the API's 10,000 result cap per query.\n */\nexport async function downloadFrDocuments(options: FrDownloadOptions): Promise<FrDownloadResult> {\n const to = options.to ?? new Date().toISOString().slice(0, 10);\n const fetchDelay = options.fetchDelayMs ?? DEFAULT_FETCH_DELAY_MS;\n\n const files: FrDownloadedFile[] = [];\n const failed: FrDownloadFailure[] = [];\n let totalBytes = 0;\n let skipped = 0;\n let totalDocumentsFound = 0;\n\n // Break date range into month-sized chunks\n const chunks = buildMonthChunks(options.from, to);\n\n for (const chunk of chunks) {\n // Check limit\n if (options.limit !== undefined && files.length >= options.limit) break;\n\n // Paginate through this chunk\n let page = 1;\n let hasMore = true;\n\n while (hasMore) {\n const listUrl = buildFrApiListUrl(chunk.from, chunk.to, page, options.types);\n const response = await fetchWithRetry(listUrl);\n const data = (await response.json()) as FrApiListResponse;\n\n if (typeof data.count !== \"number\") {\n throw new Error(\n `Unexpected API response for ${listUrl}: missing or invalid 'count' field. ` +\n `The FederalRegister.gov API may have changed its response format.`,\n );\n }\n\n if (page === 1 && totalDocumentsFound === 0) {\n totalDocumentsFound = data.count;\n }\n\n const results = data.results ?? [];\n\n for (const doc of results) {\n // Check limit\n if (options.limit !== undefined && files.length >= options.limit) {\n hasMore = false;\n break;\n }\n\n // Report progress\n options.onProgress?.({\n documentsDownloaded: files.length,\n totalDocuments: totalDocumentsFound,\n currentDocument: doc.document_number,\n currentChunk: `${chunk.from.slice(0, 7)}`,\n });\n\n // Skip documents without XML (pre-2000)\n if (!doc.full_text_xml_url) {\n skipped++;\n continue;\n }\n\n try {\n const result = await downloadSingleDocument(doc, options.output, fetchDelay);\n files.push(result);\n totalBytes += result.size;\n } catch (err) {\n failed.push({\n documentNumber: doc.document_number,\n error: err instanceof Error ? err.message : String(err),\n });\n }\n }\n\n // Check for more pages\n hasMore = hasMore && page < (data.total_pages ?? 0);\n page++;\n }\n }\n\n return {\n documentsDownloaded: files.length,\n files,\n totalBytes,\n dateRange: { from: options.from, to },\n skipped,\n failed,\n };\n}\n\n/**\n * Download a single FR document by document number.\n *\n * Fetches both the JSON metadata and XML full text.\n */\nexport async function downloadSingleFrDocument(\n documentNumber: string,\n output: string,\n): Promise<FrDownloadedFile> {\n // Fetch JSON metadata first to get publication date and XML URL\n const metaUrl = `${FR_API_BASE}/documents/${documentNumber}.json?${new URLSearchParams(API_FIELDS.map((f) => [\"fields[]\", f])).toString()}`;\n const metaResponse = await fetchWithRetry(metaUrl);\n const doc = (await metaResponse.json()) as FrDocumentJsonMeta;\n\n if (!doc.document_number || !doc.publication_date) {\n throw new Error(\n `Invalid API response for document ${documentNumber}: missing document_number or publication_date`,\n );\n }\n\n return downloadSingleDocument(doc, output, 0);\n}\n\n// ── Private helpers ──\n\nasync function downloadSingleDocument(\n doc: FrDocumentJsonMeta,\n outputDir: string,\n fetchDelay: number,\n): Promise<FrDownloadedFile> {\n if (!doc.document_number || !doc.publication_date) {\n throw new Error(\n `Invalid document in API response: missing document_number or publication_date`,\n );\n }\n if (!doc.full_text_xml_url) {\n throw new Error(\n `Document ${doc.document_number} has no full_text_xml_url — cannot download XML`,\n );\n }\n\n const xmlPath = buildFrDownloadXmlPath(doc.document_number, doc.publication_date, outputDir);\n const jsonPath = buildFrDownloadJsonPath(doc.document_number, doc.publication_date, outputDir);\n\n // Ensure directory exists\n await mkdir(dirname(xmlPath), { recursive: true });\n\n // Write JSON metadata\n const jsonContent = JSON.stringify(doc, null, 2);\n await fsWriteFile(jsonPath, jsonContent, \"utf-8\");\n\n // Fetch and write XML\n if (fetchDelay > 0) {\n await sleep(fetchDelay);\n }\n\n const xmlResponse = await fetchWithRetry(doc.full_text_xml_url);\n if (!xmlResponse.body) {\n throw new Error(`No response body for ${doc.document_number} XML`);\n }\n\n const dest = createWriteStream(xmlPath);\n try {\n await pipeline(Readable.fromWeb(xmlResponse.body as never), dest);\n } catch (err) {\n throw new Error(\n `Failed to write XML for document ${doc.document_number} from ${doc.full_text_xml_url}: ` +\n `${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n // Get file sizes\n const xmlStat = await stat(xmlPath);\n const jsonSize = Buffer.byteLength(jsonContent, \"utf-8\");\n\n return {\n xmlPath,\n jsonPath,\n documentNumber: doc.document_number,\n publicationDate: doc.publication_date,\n size: Number(xmlStat.size) + jsonSize,\n };\n}\n\n/**\n * Break a date range into month-sized chunks.\n * Each chunk covers one calendar month (or partial month at boundaries).\n */\nfunction buildMonthChunks(from: string, to: string): Array<{ from: string; to: string }> {\n const chunks: Array<{ from: string; to: string }> = [];\n\n let current = new Date(from + \"T00:00:00Z\");\n const end = new Date(to + \"T00:00:00Z\");\n\n while (current <= end) {\n const chunkStart = current.toISOString().slice(0, 10);\n\n // End of this month\n const monthEnd = new Date(\n Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 0),\n );\n const chunkEnd = monthEnd <= end ? monthEnd.toISOString().slice(0, 10) : to;\n\n chunks.push({ from: chunkStart, to: chunkEnd });\n\n // Move to first day of next month\n current = new Date(\n Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 1),\n );\n }\n\n return chunks;\n}\n\n/** Fetch with retry on transient HTTP and network errors */\nasync function fetchWithRetry(url: string, attempt = 0): Promise<Response> {\n let response: Response;\n try {\n response = await fetch(url);\n } catch (err) {\n // Network-level error (DNS, TLS, connection reset) — retry\n if (attempt < MAX_RETRIES) {\n const delay = RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n console.warn(\n `Network error for ${url}: ${err instanceof Error ? err.message : String(err)}. ` +\n `Retrying in ${delay}ms (attempt ${attempt + 1}/${MAX_RETRIES})...`,\n );\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n throw new Error(\n `Network error after ${MAX_RETRIES + 1} attempts for ${url}: ${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n if (response.ok) return response;\n\n // Retry on transient HTTP errors\n if ((response.status === 429 || response.status === 503 || response.status === 504) && attempt < MAX_RETRIES) {\n const retryAfter = response.headers.get(\"Retry-After\");\n const parsedRetry = retryAfter ? parseInt(retryAfter, 10) : NaN;\n const delay = !isNaN(parsedRetry) && parsedRetry > 0\n ? parsedRetry * 1000\n : RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n console.warn(\n `HTTP ${response.status} for ${url}. Retrying in ${delay}ms (attempt ${attempt + 1}/${MAX_RETRIES})...`,\n );\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n\n throw new Error(`HTTP ${response.status}: ${response.statusText} for ${url}`);\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n"],"mappings":";AAiBO,IAAM,wBAAwB,CAAC,QAAQ,WAAW,UAAU,UAAU;AAMtE,IAAM,uBAAuB,IAAI,IAAY,qBAAqB;AAGlE,IAAM,wBAAwB,oBAAI,IAAI;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAGM,IAAM,uBAAyD;AAAA,EACpE,MAAM;AAAA,EACN,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AACZ;AAKO,IAAM,uBAAuB,oBAAI,IAAI;AAAA,EAC1C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,4BAA4B,oBAAI,IAAI;AAAA,EAC/C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,sBAAsB,oBAAI,IAAI;AAAA,EACzC;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,qBAAqB;AAM3B,IAAM,wBAA0D;AAAA,EACrE,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AACP;AAKO,IAAM,qBAAqB,oBAAI,IAAI;AAAA,EACxC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAMM,IAAM,kBAAwD;AAAA,EACnE,MAAM;AAAA,EACN,MAAM;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,QAAQ;AAAA;AACV;AAKO,IAAM,sBAAsB,oBAAI,IAAI;AAAA,EACzC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,oBAAoB;AAK1B,IAAM,wBAAwB,oBAAI,IAAI;AAAA,EAC3C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,2BAA2B,oBAAI,IAAI;AAAA,EAC9C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,gCAAgC,oBAAI,IAAI;AAAA,EACnD;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB,oBAAI,IAAI;AAAA,EACtC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,mBAAmB;AAKzB,IAAM,oBAAoB,oBAAI,IAAI;AAAA,EACvC;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,oBAAoB,oBAAI,IAAI;AAAA,EACvC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,qBAAqB,oBAAI,IAAI;AAAA,EACxC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB,oBAAI,IAAI;AAAA,EACtC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,0BAA0B,oBAAI,IAAI;AAAA,EAC7C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB;AAGzB,IAAM,oBAAoB;;;ACvH1B,IAAM,eAAN,MAAmB;AAAA,EACP;AAAA,EACA,QAAsB,CAAC;AAAA;AAAA,EAEhC,wBAAwB;AAAA;AAAA,EAExB,iBAAoC;AAAA,IAC1C,cAAc;AAAA,IACd,wBAAwB;AAAA,EAC1B;AAAA;AAAA,EAEiB,gBAAqC,CAAC;AAAA,EAEvD,YAAY,SAA8B;AACxC,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA,EAGA,mBAAiD;AAC/C,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,cAAc,MAAc,OAAyB;AAEnD,QAAI,KAAK,wBAAwB,GAAG;AAClC,WAAK;AACL;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,GAAG;AAChC,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,QAAI,wBAAwB,IAAI,IAAI,GAAG;AACrC;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,yBAAyB,IAAI,IAAI,GAAG;AACtC;AAAA,IACF;AAGA,QAAI,8BAA8B,IAAI,IAAI,GAAG;AAE3C,UAAI,SAAS,UAAU,SAAS,SAAS;AACvC,aAAK,YAAY,IAAI;AACrB;AAAA,MACF;AAEA,WAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AACrE;AAAA,IACF;AAGA,QAAI,0BAA0B,IAAI,IAAI,GAAG;AACvC,WAAK,MAAM,KAAK,EAAE,MAAM,gBAAgB,aAAa,MAAM,YAAY,GAAG,CAAC;AAC3E;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,MAAM,KAAK,EAAE,MAAM,mBAAmB,aAAa,MAAM,YAAY,GAAG,CAAC;AAC9E;AAAA,IACF;AAGA,QAAI,SAAS,oBAAoB;AAC/B,WAAK,YAAY,MAAM,KAAK;AAC5B;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,YAAY,IAAI;AACrB;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,GAAG;AAChC,WAAK,WAAW,MAAM,KAAK;AAC3B;AAAA,IACF;AAGA,QAAI,SAAS,kBAAkB;AAC7B,YAAM,OAAmB;AAAA,QACvB,MAAM;AAAA,QACN,YAAY;AAAA,QACZ,OAAO,MAAM,IAAI;AAAA,MACnB;AACA,WAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,MAAM,YAAY,GAAG,CAAC;AAC3E;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,YAAY,MAAM,KAAK;AAC5B;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC,WAAK,cAAc,IAAI;AACvB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,iBAAiB,MAAM,KAAK;AACjC;AAAA,IACF;AAGA,QAAI,SAAS,kBAAkB;AAC7B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,SAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACvE;AAAA;AAAA,EAGA,eAAe,MAAoB;AAEjC,QAAI,KAAK,wBAAwB,GAAG;AAClC,WAAK;AACL;AAAA,IACF;AAGA,QAAI,wBAAwB,IAAI,IAAI,KAAK,sBAAsB,IAAI,IAAI,GAAG;AACxE;AAAA,IACF;AAGA,QAAI,yBAAyB,IAAI,IAAI,GAAG;AACtC;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,cAAc,IAAI;AACvB;AAAA,IACF;AAGA,QAAI,0BAA0B,IAAI,IAAI,GAAG;AACvC,WAAK,kBAAkB,IAAI;AAC3B;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,SAAS,oBAAoB;AAC/B,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,SAAS,UAAU,SAAS,SAAS;AACvC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,KAAK,SAAS,kBAAkB;AAC7D,WAAK,YAAY,IAAI;AACrB;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,UAAU,IAAI;AACnB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC,WAAK,eAAe,IAAI;AACxB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,kBAAkB,IAAI;AAC3B;AAAA,IACF;AAGA,QAAI,SAAS,kBAAkB;AAC7B,WAAK,WAAW;AAChB;AAAA,IACF;AAGA,QAAI,KAAK,MAAM,SAAS,KAAK,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC,GAAG,gBAAgB,MAAM;AACpF,WAAK,MAAM,IAAI;AAAA,IACjB;AAAA,EACF;AAAA;AAAA,EAGA,OAAO,MAAoB;AACzB,QAAI,KAAK,wBAAwB,EAAG;AAEpC,UAAM,QAAQ,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AAC9C,QAAI,CAAC,MAAO;AAGZ,QACE,MAAM,SAAS,aACf,MAAM,SAAS,kBACf,MAAM,SAAS,oBACf,MAAM,SAAS,eACf,MAAM,SAAS,iBACf,MAAM,SAAS,SACf;AACA,YAAM,cAAc;AACpB;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,aAAa,MAAM,MAAM,SAAS,WAAW;AAC9D,YAAM,cAAc,MAAM;AAC1B,UAAI,MAAM;AACR,oBAAY,SAAS,KAAK;AAAA,UACxB,MAAM;AAAA,UACN,YAAY;AAAA,UACZ;AAAA,QACF,CAAC;AAAA,MACH;AACA;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,YAAY,MAAM,MAAM,SAAS,UAAU;AAC5D,YAAM,aAAa,MAAM;AACzB,UAAI,WAAW,UAAU;AACvB,mBAAW,SAAS,KAAK;AAAA,UACvB,MAAM;AAAA,UACN,YAAY;AAAA,UACZ;AAAA,QACF,CAAC;AAAA,MACH,OAAO;AACL,mBAAW,QAAQ,WAAW,QAAQ,MAAM;AAAA,MAC9C;AACA;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,UAAU,MAAM,MAAM,SAAS,QAAQ;AACxD,YAAM,cAAc;AACpB;AAAA,IACF;AAAA,EAGF;AAAA;AAAA,EAIQ,aAAa,aAA2B;AAC9C,SAAK,iBAAiB;AAAA,MACpB,cAAc;AAAA,MACd,wBAAwB,qBAAqB,WAAW,KAAK,YAAY,YAAY;AAAA,IACvF;AAEA,UAAM,OAAkB;AAAA,MACtB,MAAM;AAAA,MACN,WAAW;AAAA,MACX,UAAU,CAAC;AAAA,MACX,eAAe;AAAA,IACjB;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,YAAY,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACzE;AAAA,EAEQ,cAAc,aAA2B;AAC/C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,cAAc,CAAC,MAAM,KAAM;AAExD,UAAM,YAAY,MAAM;AAGxB,QAAI,KAAK,eAAe,SAAS;AAC/B,gBAAU,UAAU,KAAK,eAAe;AAAA,IAC1C;AAGA,QAAI,KAAK,eAAe,gBAAgB;AACtC,gBAAU,aAAa,UAAU,KAAK,eAAe,cAAc;AACnE,gBAAU,WAAW,KAAK,eAAe;AAAA,IAC3C;AAGA,UAAM,YAA4B,CAAC;AACnC,eAAW,KAAK,KAAK,OAAO;AAC1B,UAAI,EAAE,SAAS,cAAc,EAAE,MAAM,SAAS,SAAS;AACrD,cAAM,KAAK,EAAE;AACb,kBAAU,KAAK;AAAA,UACb,WAAW,GAAG;AAAA,UACd,UAAU,GAAG;AAAA,UACb,SAAS,GAAG;AAAA,UACZ,YAAY,GAAG;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM,UAAuB;AAAA,MAC3B;AAAA,MACA,cAAc;AAAA,QACZ,SAAS,KAAK,eAAe;AAAA,QAC7B,QAAQ,KAAK,eAAe;AAAA,MAC9B;AAAA,IACF;AAGA,SAAK,cAAc,KAAK,EAAE,GAAG,KAAK,eAAe,CAAC;AAElD,SAAK,QAAQ,OAAO,WAAW,OAAO;AAAA,EACxC;AAAA;AAAA,EAIQ,kBAAkB,aAA2B;AACnD,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,eAAgB;AAE7C,UAAM,OAAO,MAAM,WAAW,KAAK;AACnC,QAAI,CAAC,KAAM;AAEX,YAAQ,aAAa;AAAA,MACnB,KAAK;AACH,aAAK,eAAe,SAAS;AAC7B;AAAA,MACF,KAAK;AACH,aAAK,eAAe,YAAY;AAChC;AAAA,MACF,KAAK;AACH,aAAK,eAAe,cAAc;AAClC;AAAA,MACF,KAAK;AACH,aAAK,eAAe,UAAU;AAC9B;AAAA,MACF,KAAK;AACH,aAAK,eAAe,MAAM,KAAK,QAAQ,YAAY,EAAE,EAAE,KAAK;AAC5D;AAAA,MACF,KAAK;AAEH;AAAA,IACJ;AAAA,EACF;AAAA;AAAA,EAIQ,YAAY,cAAsB,OAAyB;AACjE,UAAM,SAAS,MAAM,QAAQ,KAAK;AAClC,UAAM,QAAQ,sBAAsB,MAAM,KAAK;AAE/C,SAAK,MAAM,KAAK;AAAA,MACd,MAAM;AAAA,MACN,aAAa;AAAA,MACb,YAAY;AAAA,MACZ,aAAa;AAAA,IACf,CAAC;AAAA,EACH;AAAA,EAEQ,aAAa,aAA2B;AAC9C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,UAAW;AAExC,UAAM,cAAc,MAAM,WAAW,KAAK;AAC1C,QAAI,CAAC,YAAa;AAIlB,UAAM,cAAc,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AAEpD,QAAI,aAAa,SAAS,mBAAmB;AAE3C,YAAMA,eAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ,MAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,WAAK,cAAcA,YAAW;AAC9B;AAAA,IACF;AAIA,UAAM,cAA2B;AAAA,MAC/B,MAAM;AAAA,MACN,SAAS;AAAA,MACT,UAAU;AAAA,QACR;AAAA,UACE,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,SAAK,cAAc,WAAW;AAAA,EAChC;AAAA;AAAA,EAIQ,YAAY,aAA2B;AAC7C,UAAM,OAAoB;AAAA,MACxB,MAAM;AAAA,MACN,SAAS;AAAA,MACT,UAAU,CAAC;AAAA,IACb;AACA,SAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACxE;AAAA,EAEQ,aAAa,aAA2B;AAC9C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,cAAc,MAAM;AAG1B,QAAI,YAAY,SAAS,WAAW,EAAG;AAGvC,UAAM,SAAS,KAAK,mBAAmB,KAAK,KAAK,eAAe;AAChE,QAAI,QAAQ,MAAM;AAChB,UAAI,OAAO,KAAK,SAAS,SAAS;AAChC,QAAC,OAAO,KAAmB,SAAS,KAAK,WAAW;AAAA,MACtD,WAAW,OAAO,KAAK,SAAS,QAAQ;AACtC,QAAC,OAAO,KAAkB,SAAS,KAAK,WAAW;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,WAAW,aAAqB,OAAyB;AAC/D,QAAI,aAAyB;AAE7B,QAAI,gBAAgB,KAAK;AACvB,mBAAa;AAAA,IACf,WAAW,gBAAgB,KAAK;AAC9B,mBAAa;AAAA,IACf,WAAW,gBAAgB,MAAM;AAC/B,mBAAa;AAAA,IACf,WAAW,gBAAgB,MAAM;AAC/B,mBAAa;AAAA,IACf,WAAW,gBAAgB,KAAK;AAC9B,YAAM,SAAS,MAAM,GAAG,KAAK;AAC7B,mBAAa,gBAAgB,MAAM,KAAK;AAAA,IAC1C;AAEA,UAAM,OAAmB;AAAA,MACvB,MAAM;AAAA,MACN;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACvE;AAAA,EAEQ,YAAY,aAA2B;AAC7C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,aAAa,MAAM;AAGzB,QAAI,WAAW,eAAe,iBAAiB,MAAM,YAAY;AAC/D,iBAAW,OAAO,MAAM,WAAW,KAAK;AAAA,IAC1C;AAGA,UAAM,cAAc,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AACpD,QAAI,CAAC,YAAa;AAElB,QAAI,YAAY,SAAS,aAAa,YAAY,MAAM,SAAS,WAAW;AAC1E,MAAC,YAAY,KAAqB,SAAS,KAAK,UAAU;AAAA,IAC5D,WAAW,YAAY,SAAS,YAAY,YAAY,MAAM,SAAS,UAAU;AAC/E,YAAM,eAAe,YAAY;AACjC,UAAI,aAAa,UAAU;AACzB,qBAAa,SAAS,KAAK,UAAU;AAAA,MACvC;AAAA,IACF,WAAW,YAAY,SAAS,aAAa,YAAY,SAAS,gBAAgB;AAEhF,UAAI,WAAW,MAAM;AACnB,oBAAY,cAAc,WAAW;AAAA,MACvC,WAAW,WAAW,UAAU;AAC9B,mBAAW,SAAS,WAAW,UAAU;AACvC,cAAI,MAAM,KAAM,aAAY,cAAc,MAAM;AAAA,QAClD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,SAAS,aAA2B;AAC1C,UAAM,cAAsC;AAAA,MAC1C,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS;AAAA,IACX;AAEA,UAAM,WAAW,YAAY,WAAW,KAAK,YAAY,YAAY;AACrE,UAAM,OAAiB;AAAA,MACrB,MAAM;AAAA,MACN;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,QAAQ,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACrE;AAAA,EAEQ,UAAU,aAA2B;AAC3C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,WAAW,MAAM;AAGvB,QAAI,MAAM,WAAW,KAAK,KAAK,SAAS,SAAS,WAAW,GAAG;AAC7D,YAAM,cAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ,MAAM,MAAM,WAAW,KAAK;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AACA,eAAS,SAAS,KAAK,WAAW;AAAA,IACpC;AAGA,UAAM,YAAY,KAAK,mBAAmB;AAC1C,QAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,MAAC,UAAU,KAAmB,SAAS,KAAK,QAAQ;AAAA,IACtD;AAAA,EACF;AAAA;AAAA,EAIQ,YAAY,aAAqB,OAAyB;AAChE,QAAI,gBAAgB,WAAW;AAE7B,YAAM,QAAQ,MAAM,OAAO,KAAK;AAChC,YAAM,OAAO,MAAM,MAAM,KAAK;AAC9B,YAAM,QAAQ,SAAS,OAAO,GAAG,KAAK,aAAa,IAAI,KAAK;AAG5D,UAAI,OAAO;AACT,cAAM,YAAyB;AAAA,UAC7B,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,YACR;AAAA,cACE,MAAM;AAAA,cACN,YAAY;AAAA,cACZ,MAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AACA,aAAK,cAAc,SAAS;AAAA,MAC9B;AAEA,WAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,YAAY,GAAG,CAAC;AAChE;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,YAAY,WAAW;AAC5B;AAAA,IACF;AAEA,QAAI,gBAAgB,WAAW;AAE7B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,YAAY,GAAG,CAAC;AAC9D;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,YAAY,WAAW;AAC5B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,YAAY,GAAG,CAAC;AAC9D;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,aAAa,aAA2B;AAC9C,QAAI,gBAAgB,WAAW;AAC7B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,YAAY,gBAAgB,UAAU;AACxD,WAAK,aAAa,WAAW;AAC7B;AAAA,IACF;AAEA,QAAI,gBAAgB,aAAa,gBAAgB,QAAQ;AACvD,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAC1B,WAAK,UAAU,WAAW;AAC1B;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,cAAc,aAA2B;AAC/C,QAAI,gBAAgB,OAAO;AAEzB,YAAM,OAAiB;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,QACV,UAAU,CAAC;AAAA,MACb;AACA,WAAK,MAAM,KAAK,EAAE,MAAM,aAAa,aAAa,MAAM,YAAY,GAAG,CAAC;AACxE;AAAA,IACF;AAGA,SAAK,MAAM,KAAK,EAAE,MAAM,kBAAkB,aAAa,YAAY,GAAG,CAAC;AAAA,EACzE;AAAA,EAEQ,eAAe,aAA2B;AAChD,QAAI,gBAAgB,OAAO;AACzB,YAAMC,SAAQ,KAAK,SAAS,WAAW;AACvC,UAAI,CAACA,UAAS,CAACA,OAAM,KAAM;AAE3B,YAAM,UAAUA,OAAM;AAGtB,YAAM,YAAY,KAAK,mBAAmB;AAC1C,UAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,QAAC,UAAU,KAAmB,SAAS,KAAK,OAAO;AAAA,MACrD;AACA;AAAA,IACF;AAGA,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,iBAAkB;AAE/C,UAAM,OAAO,MAAM,WAAW,KAAK;AACnC,QAAI,CAAC,KAAM;AAGX,UAAM,WAAW,KAAK,UAAU,WAAW;AAC3C,QAAI,UAAU,QAAQ,SAAS,KAAK,SAAS,QAAQ;AACnD,YAAM,cAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,MAAC,SAAS,KAAkB,SAAS,KAAK,WAAW;AAAA,IACvD;AAAA,EACF;AAAA;AAAA,EAIQ,iBAAiB,aAAqB,QAA0B;AACtE,QAAI,gBAAgB,YAAY;AAC9B,WAAK,MAAM,KAAK;AAAA,QACd,MAAM;AAAA,QACN;AAAA,QACA,YAAY;AAAA,QACZ,SAAS,CAAC;AAAA,QACV,MAAM,CAAC;AAAA,QACP,YAAY,CAAC;AAAA,MACf,CAAC;AACD;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,YAAY,GAAG,CAAC;AAChE;AAAA,IACF;AAEA,QAAI,gBAAgB,SAAS;AAE3B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,MAAM,KAAK,EAAE,MAAM,eAAe,aAAa,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,YAAM,aAAa,KAAK,eAAe;AACvC,UAAI,YAAY;AACd,mBAAW,aAAa,CAAC;AAAA,MAC3B;AACA,WAAK,MAAM,KAAK,EAAE,MAAM,YAAY,aAAa,YAAY,GAAG,CAAC;AACjE;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AAEzB,WAAK,MAAM,KAAK,EAAE,MAAM,aAAa,aAAa,YAAY,GAAG,CAAC;AAClE;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,kBAAkB,aAA2B;AACnD,QAAI,gBAAgB,YAAY;AAC9B,WAAK,cAAc;AACnB;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,SAAS;AAE3B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAC1B,WAAK,iBAAiB;AACtB;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,WAAK,cAAc;AACnB;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,WAAK,eAAe;AACpB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,gBAAsB;AAC5B,UAAM,QAAQ,KAAK,SAAS,UAAU;AACtC,QAAI,CAAC,SAAS,MAAM,SAAS,QAAS;AAEtC,UAAM,YAAuB;AAAA,MAC3B,MAAM;AAAA,MACN,SAAS;AAAA;AAAA,MACT,SAAS,MAAM,WAAW,CAAC;AAAA,MAC3B,MAAM,MAAM,QAAQ,CAAC;AAAA,IACvB;AAGA,UAAM,YAAY,KAAK,mBAAmB;AAC1C,QAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,MAAC,UAAU,KAAmB,SAAS,KAAK,SAAS;AAAA,IACvD;AAAA,EACF;AAAA,EAEQ,mBAAyB;AAC/B,UAAM,cAAc,KAAK,SAAS,MAAM;AACxC,QAAI,CAAC,eAAe,YAAY,SAAS,cAAe;AAExD,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,CAAC,WAAY;AAEjB,UAAM,OAAO,YAAY,WAAW,KAAK;AAIzC,QAAI,CAAC,WAAW,WAAW,WAAW,QAAQ,WAAW,GAAG;AAC1D,iBAAW,UAAU,CAAC,CAAC,CAAC;AAAA,IAC1B;AACA,UAAM,YAAY,WAAW,QAAQ,CAAC;AACtC,QAAI,WAAW;AACb,gBAAU,KAAK,IAAI;AAAA,IACrB;AAAA,EACF;AAAA,EAEQ,gBAAsB;AAC5B,UAAM,WAAW,KAAK,SAAS,KAAK;AACpC,QAAI,CAAC,SAAU;AAEf,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,YAAY,YAAY;AAC1B,iBAAW,MAAM,KAAK,CAAC,GAAG,WAAW,UAAU,CAAC;AAChD,iBAAW,aAAa,CAAC;AAAA,IAC3B;AAAA,EACF;AAAA,EAEQ,iBAAuB;AAC7B,UAAM,YAAY,KAAK,MAAM,IAAI;AACjC,QAAI,CAAC,aAAa,UAAU,SAAS,YAAa;AAElD,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,YAAY,YAAY;AAC1B,iBAAW,WAAW,KAAK,UAAU,WAAW,KAAK,CAAC;AAAA,IACxD;AAAA,EACF;AAAA;AAAA,EAIQ,aAAmB;AACzB,UAAM,QAAQ,KAAK,SAAS,gBAAgB;AAC5C,QAAI,CAAC,SAAS,MAAM,SAAS,QAAS;AAEtC,UAAM,OAAO,MAAM,WAAW,KAAK;AAGnC,UAAM,QAAQ,yBAAyB,KAAK,IAAI;AAChD,QAAI,OAAO;AACT,WAAK,eAAe,iBAAiB,MAAM,CAAC;AAAA,IAC9C;AAAA,EACF;AAAA;AAAA,EAIQ,cAAc,MAAqB;AACzC,UAAM,WAAW,KAAK,mBAAmB;AACzC,QAAI,UAAU,QAAQ,SAAS,KAAK,SAAS,SAAS;AACpD,MAAC,SAAS,KAAmB,SAAS,KAAK,IAAI;AAAA,IACjD;AAAA,EACF;AAAA,EAEQ,qBAA6C;AACnD,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,YAAY;AACtC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAyC;AAC/C,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,UAAU,KAAK,MAAM,CAAC,GAAG,SAAS,aAAa;AACzE,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAyC;AAC/C,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,SAAS;AACnC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU,MAAyC;AACzD,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,MAAM;AAChC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,SAAS,aAA6C;AAC5D,QAAI,KAAK,MAAM,WAAW,EAAG,QAAO;AAGpC,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,gBAAgB,aAAa;AAC9C,eAAO,KAAK,MAAM,OAAO,GAAG,CAAC,EAAE,CAAC;AAAA,MAClC;AAAA,IACF;AAGA,YAAQ;AAAA,MACN,yDAAyD,WAAW,kBACnD,KAAK,MAAM,IAAI,CAAC,MAAM,EAAE,WAAW,EAAE,KAAK,IAAI,CAAC;AAAA,IAClE;AACA,WAAO;AAAA,EACT;AACF;;;AC7gCA,SAAS,sBAAsB,SAAyB;AACtD,QAAM,MAA8B;AAAA,IAClC,MAAM;AAAA,IACN,iBAAiB;AAAA,IACjB,QAAQ;AAAA,IACR,yBAAyB;AAAA,EAC3B;AACA,SAAO,IAAI,OAAO,KAAK,QAAQ,YAAY,EAAE,QAAQ,QAAQ,GAAG;AAClE;AASO,SAAS,mBACd,MACA,UACA,SACA,UACiB;AACjB,QAAM,iBAAiB,UAAU,mBAAmB,QAAQ,kBAAkB;AAC9E,QAAM,UAAU,UAAU,SAAS,QAAQ,WAAW,KAAK,WAAW;AACtE,QAAM,kBAAkB,UAAU,oBAAoB;AACtD,QAAM,eACJ,WAAW,sBAAsB,SAAS,IAAI,IAAI,QAAQ;AAG5D,MAAI;AACJ,MAAI,UAAU,YAAY,SAAS,SAAS,SAAS,GAAG;AACtD,eAAW,SAAS,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EAChD,WAAW,QAAQ,QAAQ;AACzB,eAAW,CAAC,QAAQ,MAAM;AAC1B,QAAI,QAAQ,WAAW;AACrB,eAAS,KAAK,QAAQ,SAAS;AAAA,IACjC;AAAA,EACF;AAGA,MAAI;AACJ,MAAI,UAAU,kBAAkB,SAAS,eAAe,SAAS,GAAG;AAClE,oBAAgB,SAAS,eAAe,IAAI,CAAC,MAAM,GAAG,EAAE,KAAK,aAAa,EAAE,IAAI,EAAE;AAAA,EACpF,WAAW,QAAQ,aAAa;AAC9B,oBAAgB,CAAC,QAAQ,WAAW;AAAA,EACtC;AAGA,MAAI;AACJ,MAAI,UAAU,cAAc,SAAS,WAAW,SAAS,GAAG;AAC1D,gBAAY,SAAS;AAAA,EACvB;AAGA,QAAM,gBACJ,YAAY,SAAS,SAAS,IAAI,SAAS,CAAC,IAAI;AAGlD,QAAM,aAAa,UAAU;AAG7B,QAAM,MAAM,UAAU,wBAAwB,CAAC,KAAK,QAAQ;AAE5D,QAAM,KAAsB;AAAA,IAC1B,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,YAAY,KAAK,cAAc,UAAU,cAAc;AAAA,IACvD,OAAO;AAAA,IACP,cAAc;AAAA;AAAA,IACd,YAAY;AAAA,IACZ,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,cAAc;AAAA,IACd,UAAU;AAAA,IACV,cAAc;AAAA;AAAA,IAGd,QAAQ;AAAA;AAAA,IAGR,iBAAiB,kBAAkB;AAAA,IACnC,eAAe,gBAAgB;AAAA,IAC/B,aAAa;AAAA,IACb,WAAW,UAAU;AAAA,IACrB,kBAAkB,mBAAmB;AAAA,IACrC,UAAU,YAAY,SAAS,SAAS,IAAI,WAAW;AAAA,IACvD,gBAAgB,iBAAiB,cAAc,SAAS,IAAI,gBAAgB;AAAA,IAC5E,YAAY,aAAa,UAAU,SAAS,IAAI,YAAY;AAAA,IAC5D,KAAK,OAAO;AAAA,IACZ,gBAAgB,UAAU,gBAAgB;AAAA,IAC1C,qBAAqB,UAAU,qBAAqB;AAAA,IACpD,WAAW,UAAU,UAAU;AAAA,EACjC;AAEA,SAAO;AACT;;;ACnJA,SAAS,YAAY;AAUd,SAAS,kBACd,gBACA,iBACA,YACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,YAAY,MAAM,MAAM,OAAO,GAAG,cAAc,KAAK;AACnE;AAUO,SAAS,uBACd,gBACA,iBACA,cACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,cAAc,MAAM,OAAO,GAAG,cAAc,MAAM;AAChE;AAUO,SAAS,wBACd,gBACA,iBACA,cACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,cAAc,MAAM,OAAO,GAAG,cAAc,OAAO;AACjE;AAKO,SAAS,cAAc,MAAc,OAAe,YAA4B;AACrF,SAAO,KAAK,YAAY,MAAM,MAAM,KAAK;AAC3C;AAKO,SAAS,aAAa,MAAc,YAA4B;AACrE,SAAO,KAAK,YAAY,MAAM,IAAI;AACpC;AAKA,SAAS,oBAAoB,MAA+C;AAC1E,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,SAAO;AAAA,IACL,MAAM,MAAM,CAAC,KAAK;AAAA,IAClB,OAAO,MAAM,CAAC,KAAK;AAAA,EACrB;AACF;;;AC5EA,SAAS,kBAAkB,kBAAkB;AAC7C,SAAS,UAAU,SAAS,YAAY;AACxC,SAAS,QAAAC,OAAM,eAAe;AAC9B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAuDP,IAAM,kBAAkB,IAAI,IAAY,qBAAqB;AAU7D,eAAsB,mBAAmB,SAAqD;AAC5F,QAAM,WAAW,MAAM,iBAAiB,QAAQ,OAAO,QAAQ,MAAM,QAAQ,EAAE;AAE/E,QAAM,QAAkB,CAAC;AACzB,MAAI,qBAAqB;AACzB,MAAI,kBAAkB;AAEtB,QAAM,eAAe,mBAAmB;AAGxC,QAAM,cAAc,oBAAI,IAA4B;AACpD,aAAW,WAAW,UAAU;AAC9B,QAAI;AACF,YAAM,YAAY,MAAM,aAAa,OAAO;AAC5C,kBAAY,IAAI,SAAS,SAAS;AAAA,IACpC,SAAS,KAAK;AACZ,cAAQ;AAAA,QACN,4BAA4B,OAAO,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC1F;AAAA,IACF;AAAA,EACF;AAGA,aAAW,CAAC,EAAE,SAAS,KAAK,aAAa;AACvC,eAAW,OAAO,WAAW;AAC3B,UAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,YAAI,CAAC,gBAAgB,IAAI,IAAI,QAAQ,YAAY,KAAK,CAAC,QAAQ,MAAM,SAAS,IAAI,QAAQ,YAA8B,GAAG;AACzH;AAAA,QACF;AAAA,MACF;AAEA,UAAI,IAAI,KAAK,YAAY;AACvB,cAAM,aAAa;AAAA,UACjB,IAAI;AAAA,UACJ,IAAI;AAAA,UACJ,QAAQ;AAAA,QACV;AACA,qBAAa,SAAS,IAAI,KAAK,YAAY,UAAU;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AAEA,MAAI,QAAQ,QAAQ;AAClB,QAAI,QAAQ;AACZ,eAAW,CAAC,EAAE,SAAS,KAAK,aAAa;AACvC,iBAAW,OAAO,WAAW;AAC3B,YAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,cAAI,CAAC,gBAAgB,IAAI,IAAI,QAAQ,YAAY,KAAK,CAAC,QAAQ,MAAM,SAAS,IAAI,QAAQ,YAA8B,GAAG;AACzH;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,MACL,oBAAoB;AAAA,MACpB,OAAO,CAAC;AAAA,MACR,oBAAoB;AAAA,MACpB,iBAAiB;AAAA,MACjB,QAAQ;AAAA,IACV;AAAA,EACF;AAGA,aAAW,CAAC,EAAE,SAAS,KAAK,aAAa;AACvC,eAAW,OAAO,WAAW;AAE3B,UAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,YAAI,CAAC,gBAAgB,IAAI,IAAI,QAAQ,YAAY,KAAK,CAAC,QAAQ,MAAM,SAAS,IAAI,QAAQ,YAA8B,GAAG;AACzH;AAAA,QACF;AAAA,MACF;AAEA,YAAM,aAAa;AAAA,QACjB,IAAI;AAAA,QACJ,IAAI;AAAA,QACJ,QAAQ;AAAA,MACV;AAEA,YAAM,cAAc,mBAAmB,IAAI,MAAM,IAAI,SAAS,IAAI,SAAS,IAAI,QAAQ;AAEvF,YAAM,WAAW,eAAe,IAAI,MAAM,aAAa;AAAA,QACrD,eAAe;AAAA,QACf,WAAW,QAAQ;AAAA,QACnB,aAAa,QAAQ,cAAc,aAC/B,CAAC,OAAO,aAAa,QAAQ,IAAI,UAAU,IAC3C;AAAA,MACN,CAAC;AAED,YAAM,MAAM,QAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AACpD,YAAM,UAAU,YAAY,UAAU,OAAO;AAE7C,YAAM,KAAK,UAAU;AAGrB,YAAM,gBAAgB,KAAK,MAAM,SAAS,SAAS,CAAC;AACpD,4BAAsB;AAGtB,YAAM,MAAM,QAAQ,YAAY,EAAE;AAClC,UAAI,MAAM,iBAAiB;AACzB,0BAAkB;AAAA,MACpB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,oBAAoB,MAAM;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,IACA,QAAQ;AAAA,EACV;AACF;AAOA,eAAe,aAAa,SAA0C;AACpE,QAAM,YAA4B,CAAC;AAEnC,QAAM,UAAU,IAAI,aAAa;AAAA,IAC/B,QAAQ,CAAC,MAAM,YAAY;AAEzB,YAAM,eAAe,QAAQ,iBAAiB;AAC9C,YAAM,OAAO,aAAa,aAAa,SAAS,CAAC;AACjD,UAAI,CAAC,MAAM;AACT,gBAAQ;AAAA,UACN,8DAA8D,OAAO;AAAA,QAEvE;AAAA,MACF;AACA,gBAAU,KAAK;AAAA,QACb;AAAA,QACA;AAAA,QACA,SAAS,QAAQ,EAAE,cAAc,IAAI,wBAAwB,GAAG;AAAA,QAChE,iBAAiB;AAAA,QACjB,gBAAgB,MAAM,kBAAkB;AAAA,MAC1C,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AAED,QAAM,SAAS,IAAI,UAAU,EAAE,kBAAkB,GAAG,CAAC;AACrD,SAAO,GAAG,eAAe,CAAC,MAAM,UAAU,QAAQ,cAAc,MAAM,KAAK,CAAC;AAC5E,SAAO,GAAG,gBAAgB,CAAC,SAAS,QAAQ,eAAe,IAAI,CAAC;AAChE,SAAO,GAAG,QAAQ,CAAC,SAAS,QAAQ,OAAO,IAAI,CAAC;AAEhD,QAAM,SAAS,iBAAiB,SAAS,OAAO;AAChD,QAAM,OAAO,YAAY,MAAM;AAG/B,QAAM,WAAW,QAAQ,QAAQ,UAAU,OAAO;AAClD,MAAI;AACJ,MAAI,WAAW,QAAQ,GAAG;AACxB,QAAI;AACF,YAAM,MAAM,MAAM,SAAS,UAAU,OAAO;AAC5C,iBAAW,KAAK,MAAM,GAAG;AAAA,IAC3B,SAAS,KAAK;AACZ,cAAQ;AAAA,QACN,yCAAyC,QAAQ,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACxG;AAAA,IACF;AAAA,EACF;AAGA,aAAW,OAAO,WAAW;AAC3B,QAAI,YAAY,SAAS,oBAAoB,IAAI,gBAAgB;AAC/D,UAAI,WAAW;AACf,UAAI,kBAAkB,SAAS;AAAA,IACjC,OAAO;AAEL,YAAM,eAAe,kBAAkB,OAAO;AAC9C,UAAI,CAAC,cAAc;AACjB,gBAAQ;AAAA,UACN,6CAA6C,IAAI,kBAAkB,WAAW,oCAChD,OAAO;AAAA,QACvC;AAAA,MACF;AACA,UAAI,kBAAkB;AAAA,IACxB;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,iBACb,OACA,MACA,IACmB;AACnB,MAAI;AACJ,MAAI;AACF,gBAAY,MAAM,KAAK,KAAK;AAAA,EAC9B,SAAS,KAAK;AACZ,UAAM,IAAI;AAAA,MACR,6BAA6B,KAAK,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACxF,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAEA,MAAI,UAAU,OAAO,GAAG;AACtB,WAAO,CAAC,KAAK;AAAA,EACf;AAEA,MAAI,CAAC,UAAU,YAAY,GAAG;AAC5B,UAAM,IAAI,MAAM,eAAe,KAAK,8BAA8B;AAAA,EACpE;AAGA,QAAM,WAAqB,CAAC;AAC5B,QAAM,QAAQ,OAAO,QAAQ;AAG7B,MAAI,WAAW;AACf,MAAI,QAAQ,IAAI;AACd,eAAW,SAAS,OAAO,CAAC,MAAM;AAChC,YAAM,OAAO,kBAAkB,CAAC;AAChC,UAAI,CAAC,KAAM,QAAO;AAClB,UAAI,QAAQ,OAAO,KAAM,QAAO;AAChC,UAAI,MAAM,OAAO,KAAK,MAAO,QAAO;AACpC,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAEA,SAAO,SAAS,KAAK;AACvB;AAGA,eAAe,QAAQ,KAAa,SAAkC;AACpE,QAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC1D,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAWC,MAAK,KAAK,MAAM,IAAI;AACrC,QAAI,MAAM,YAAY,GAAG;AACvB,YAAM,QAAQ,UAAU,OAAO;AAAA,IACjC,WAAW,MAAM,OAAO,KAAK,MAAM,KAAK,SAAS,MAAM,GAAG;AACxD,cAAQ,KAAK,QAAQ;AAAA,IACvB;AAAA,EACF;AACF;AAMA,SAAS,kBAAkB,UAA0B;AACnD,QAAM,QAAQ,gCAAgC,KAAK,QAAQ;AAC3D,MAAI,OAAO;AACT,WAAO,GAAG,MAAM,CAAC,CAAC,IAAI,MAAM,CAAC,CAAC;AAAA,EAChC;AACA,SAAO;AACT;;;ACzUA,SAAS,yBAAyB;AAClC,SAAS,SAAAC,QAAO,QAAAC,OAAM,aAAa,mBAAmB;AACtD,SAAS,WAAAC,gBAAe;AACxB,SAAS,gBAAgB;AACzB,SAAS,gBAAgB;AAMzB,IAAM,cAAc;AAGpB,IAAM,WAAW;AAGjB,IAAM,yBAAyB;AAG/B,IAAM,cAAc;AAGpB,IAAM,sBAAsB;AAG5B,IAAM,aAAa;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAsFO,SAAS,kBACd,MACA,IACA,MACA,OACQ;AACR,QAAM,SAAS,IAAI,gBAAgB;AACnC,SAAO,IAAI,qCAAqC,IAAI;AACpD,SAAO,IAAI,qCAAqC,EAAE;AAClD,SAAO,IAAI,YAAY,OAAO,QAAQ,CAAC;AACvC,SAAO,IAAI,QAAQ,OAAO,IAAI,CAAC;AAC/B,SAAO,IAAI,SAAS,QAAQ;AAE5B,aAAW,SAAS,YAAY;AAC9B,WAAO,OAAO,YAAY,KAAK;AAAA,EACjC;AAEA,MAAI,SAAS,MAAM,SAAS,GAAG;AAC7B,eAAW,KAAK,OAAO;AACrB,aAAO,OAAO,sBAAsB,CAAC;AAAA,IACvC;AAAA,EACF;AAEA,SAAO,GAAG,WAAW,mBAAmB,OAAO,SAAS,CAAC;AAC3D;AAQA,eAAsB,oBAAoB,SAAuD;AAC/F,QAAM,KAAK,QAAQ,OAAM,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAC7D,QAAM,aAAa,QAAQ,gBAAgB;AAE3C,QAAM,QAA4B,CAAC;AACnC,QAAM,SAA8B,CAAC;AACrC,MAAI,aAAa;AACjB,MAAI,UAAU;AACd,MAAI,sBAAsB;AAG1B,QAAM,SAAS,iBAAiB,QAAQ,MAAM,EAAE;AAEhD,aAAW,SAAS,QAAQ;AAE1B,QAAI,QAAQ,UAAU,UAAa,MAAM,UAAU,QAAQ,MAAO;AAGlE,QAAI,OAAO;AACX,QAAI,UAAU;AAEd,WAAO,SAAS;AACd,YAAM,UAAU,kBAAkB,MAAM,MAAM,MAAM,IAAI,MAAM,QAAQ,KAAK;AAC3E,YAAM,WAAW,MAAM,eAAe,OAAO;AAC7C,YAAM,OAAQ,MAAM,SAAS,KAAK;AAElC,UAAI,OAAO,KAAK,UAAU,UAAU;AAClC,cAAM,IAAI;AAAA,UACR,+BAA+B,OAAO;AAAA,QAExC;AAAA,MACF;AAEA,UAAI,SAAS,KAAK,wBAAwB,GAAG;AAC3C,8BAAsB,KAAK;AAAA,MAC7B;AAEA,YAAM,UAAU,KAAK,WAAW,CAAC;AAEjC,iBAAW,OAAO,SAAS;AAEzB,YAAI,QAAQ,UAAU,UAAa,MAAM,UAAU,QAAQ,OAAO;AAChE,oBAAU;AACV;AAAA,QACF;AAGA,gBAAQ,aAAa;AAAA,UACnB,qBAAqB,MAAM;AAAA,UAC3B,gBAAgB;AAAA,UAChB,iBAAiB,IAAI;AAAA,UACrB,cAAc,GAAG,MAAM,KAAK,MAAM,GAAG,CAAC,CAAC;AAAA,QACzC,CAAC;AAGD,YAAI,CAAC,IAAI,mBAAmB;AAC1B;AACA;AAAA,QACF;AAEA,YAAI;AACF,gBAAM,SAAS,MAAM,uBAAuB,KAAK,QAAQ,QAAQ,UAAU;AAC3E,gBAAM,KAAK,MAAM;AACjB,wBAAc,OAAO;AAAA,QACvB,SAAS,KAAK;AACZ,iBAAO,KAAK;AAAA,YACV,gBAAgB,IAAI;AAAA,YACpB,OAAO,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,UACxD,CAAC;AAAA,QACH;AAAA,MACF;AAGA,gBAAU,WAAW,QAAQ,KAAK,eAAe;AACjD;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,qBAAqB,MAAM;AAAA,IAC3B;AAAA,IACA;AAAA,IACA,WAAW,EAAE,MAAM,QAAQ,MAAM,GAAG;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AACF;AAOA,eAAsB,yBACpB,gBACA,QAC2B;AAE3B,QAAM,UAAU,GAAG,WAAW,cAAc,cAAc,SAAS,IAAI,gBAAgB,WAAW,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC;AACzI,QAAM,eAAe,MAAM,eAAe,OAAO;AACjD,QAAM,MAAO,MAAM,aAAa,KAAK;AAErC,MAAI,CAAC,IAAI,mBAAmB,CAAC,IAAI,kBAAkB;AACjD,UAAM,IAAI;AAAA,MACR,qCAAqC,cAAc;AAAA,IACrD;AAAA,EACF;AAEA,SAAO,uBAAuB,KAAK,QAAQ,CAAC;AAC9C;AAIA,eAAe,uBACb,KACA,WACA,YAC2B;AAC3B,MAAI,CAAC,IAAI,mBAAmB,CAAC,IAAI,kBAAkB;AACjD,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,IAAI,mBAAmB;AAC1B,UAAM,IAAI;AAAA,MACR,YAAY,IAAI,eAAe;AAAA,IACjC;AAAA,EACF;AAEA,QAAM,UAAU,uBAAuB,IAAI,iBAAiB,IAAI,kBAAkB,SAAS;AAC3F,QAAM,WAAW,wBAAwB,IAAI,iBAAiB,IAAI,kBAAkB,SAAS;AAG7F,QAAMC,OAAMC,SAAQ,OAAO,GAAG,EAAE,WAAW,KAAK,CAAC;AAGjD,QAAM,cAAc,KAAK,UAAU,KAAK,MAAM,CAAC;AAC/C,QAAM,YAAY,UAAU,aAAa,OAAO;AAGhD,MAAI,aAAa,GAAG;AAClB,UAAM,MAAM,UAAU;AAAA,EACxB;AAEA,QAAM,cAAc,MAAM,eAAe,IAAI,iBAAiB;AAC9D,MAAI,CAAC,YAAY,MAAM;AACrB,UAAM,IAAI,MAAM,wBAAwB,IAAI,eAAe,MAAM;AAAA,EACnE;AAEA,QAAM,OAAO,kBAAkB,OAAO;AACtC,MAAI;AACF,UAAM,SAAS,SAAS,QAAQ,YAAY,IAAa,GAAG,IAAI;AAAA,EAClE,SAAS,KAAK;AACZ,UAAM,IAAI;AAAA,MACR,oCAAoC,IAAI,eAAe,SAAS,IAAI,iBAAiB,KAChF,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACrD,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAGA,QAAM,UAAU,MAAMC,MAAK,OAAO;AAClC,QAAM,WAAW,OAAO,WAAW,aAAa,OAAO;AAEvD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,gBAAgB,IAAI;AAAA,IACpB,iBAAiB,IAAI;AAAA,IACrB,MAAM,OAAO,QAAQ,IAAI,IAAI;AAAA,EAC/B;AACF;AAMA,SAAS,iBAAiB,MAAc,IAAiD;AACvF,QAAM,SAA8C,CAAC;AAErD,MAAI,UAAU,oBAAI,KAAK,OAAO,YAAY;AAC1C,QAAM,MAAM,oBAAI,KAAK,KAAK,YAAY;AAEtC,SAAO,WAAW,KAAK;AACrB,UAAM,aAAa,QAAQ,YAAY,EAAE,MAAM,GAAG,EAAE;AAGpD,UAAM,WAAW,IAAI;AAAA,MACnB,KAAK,IAAI,QAAQ,eAAe,GAAG,QAAQ,YAAY,IAAI,GAAG,CAAC;AAAA,IACjE;AACA,UAAM,WAAW,YAAY,MAAM,SAAS,YAAY,EAAE,MAAM,GAAG,EAAE,IAAI;AAEzE,WAAO,KAAK,EAAE,MAAM,YAAY,IAAI,SAAS,CAAC;AAG9C,cAAU,IAAI;AAAA,MACZ,KAAK,IAAI,QAAQ,eAAe,GAAG,QAAQ,YAAY,IAAI,GAAG,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,SAAO;AACT;AAGA,eAAe,eAAe,KAAa,UAAU,GAAsB;AACzE,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,MAAM,GAAG;AAAA,EAC5B,SAAS,KAAK;AAEZ,QAAI,UAAU,aAAa;AACzB,YAAM,QAAQ,sBAAsB,KAAK,IAAI,GAAG,OAAO;AACvD,cAAQ;AAAA,QACN,qBAAqB,GAAG,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,iBAC5D,KAAK,eAAe,UAAU,CAAC,IAAI,WAAW;AAAA,MACjE;AACA,YAAM,MAAM,KAAK;AACjB,aAAO,eAAe,KAAK,UAAU,CAAC;AAAA,IACxC;AACA,UAAM,IAAI;AAAA,MACR,uBAAuB,cAAc,CAAC,iBAAiB,GAAG,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC/G,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAEA,MAAI,SAAS,GAAI,QAAO;AAGxB,OAAK,SAAS,WAAW,OAAO,SAAS,WAAW,OAAO,SAAS,WAAW,QAAQ,UAAU,aAAa;AAC5G,UAAM,aAAa,SAAS,QAAQ,IAAI,aAAa;AACrD,UAAM,cAAc,aAAa,SAAS,YAAY,EAAE,IAAI;AAC5D,UAAM,QAAQ,CAAC,MAAM,WAAW,KAAK,cAAc,IAC/C,cAAc,MACd,sBAAsB,KAAK,IAAI,GAAG,OAAO;AAC7C,YAAQ;AAAA,MACN,QAAQ,SAAS,MAAM,QAAQ,GAAG,iBAAiB,KAAK,eAAe,UAAU,CAAC,IAAI,WAAW;AAAA,IACnG;AACA,UAAM,MAAM,KAAK;AACjB,WAAO,eAAe,KAAK,UAAU,CAAC;AAAA,EACxC;AAEA,QAAM,IAAI,MAAM,QAAQ,SAAS,MAAM,KAAK,SAAS,UAAU,QAAQ,GAAG,EAAE;AAC9E;AAEA,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AACzD;","names":["contentNode","frame","join","join","mkdir","stat","dirname","mkdir","dirname","stat"]}
1
+ {"version":3,"sources":["../src/fr-elements.ts","../src/fr-builder.ts","../src/fr-frontmatter.ts","../src/fr-path.ts","../src/converter.ts","../src/downloader.ts","../src/govinfo-downloader.ts"],"sourcesContent":["/**\n * Federal Register XML element classification.\n *\n * The FR XML is GPO/SGML-derived with no namespace. It shares many\n * inline formatting elements with eCFR (E T=\"nn\", SU, FTNT) but uses\n * a flat document-centric structure rather than a hierarchical DIV system.\n *\n * Each FR document (RULE, PRORULE, NOTICE, PRESDOCU) contains a preamble\n * (PREAMB) with structured metadata, supplementary information (SUPLINF)\n * with the document body, and optional regulatory text (REGTEXT).\n */\n\nimport type { InlineType } from \"@lexbuild/core\";\n\n// ── Document type elements ──\n\n/** FR document type element names as a const tuple — single source of truth */\nexport const FR_DOCUMENT_TYPE_KEYS = [\"RULE\", \"PRORULE\", \"NOTICE\", \"PRESDOCU\"] as const;\n\n/** FR document types supported by the API and XML */\nexport type FrDocumentType = (typeof FR_DOCUMENT_TYPE_KEYS)[number];\n\n/** Top-level document elements — each becomes an emitted section-level node */\nexport const FR_DOCUMENT_ELEMENTS = new Set<string>(FR_DOCUMENT_TYPE_KEYS);\n\n/** Container elements that group documents within daily issues */\nexport const FR_SECTION_CONTAINERS = new Set([\n \"RULES\",\n \"PRORULES\",\n \"NOTICES\",\n \"PRESDOCS\",\n]);\n\n/** Map from document element name to normalized document type string */\nexport const FR_DOCUMENT_TYPE_MAP: Readonly<Record<string, string>> = {\n RULE: \"rule\",\n PRORULE: \"proposed_rule\",\n NOTICE: \"notice\",\n PRESDOCU: \"presidential_document\",\n};\n\n// ── Preamble elements ──\n\n/** Preamble section elements containing structured content */\nexport const FR_PREAMBLE_SECTIONS = new Set([\n \"AGY\", // Agency section (HD + P)\n \"ACT\", // Action section (HD + P)\n \"SUM\", // Summary section (HD + P)\n \"DATES\", // Dates section (HD + P)\n \"EFFDATE\", // Effective date section (HD + P)\n \"ADD\", // Addresses section (HD + P)\n \"FURINF\", // Further information section (HD + P)\n]);\n\n/** Preamble metadata elements — text extracted for frontmatter */\nexport const FR_PREAMBLE_META_ELEMENTS = new Set([\n \"AGENCY\", // Issuing agency name (attrs: TYPE)\n \"SUBAGY\", // Sub-agency name\n \"CFR\", // CFR citation affected (e.g., \"10 CFR Part 2\")\n \"SUBJECT\", // Document title/subject\n \"DEPDOC\", // Department document number\n \"RIN\", // Regulation Identifier Number\n]);\n\n// ── Content elements ──\n\n/** Elements that contain paragraph text */\nexport const FR_CONTENT_ELEMENTS = new Set([\n \"P\", // Paragraph\n \"FP\", // Flush paragraph (attrs: SOURCE for indent level)\n]);\n\n/** Heading element — level determined by SOURCE attribute */\nexport const FR_HEADING_ELEMENT = \"HD\";\n\n/**\n * Map from HD SOURCE attribute to heading depth.\n * HED = top-level (section-like), HD1 = subsection, etc.\n */\nexport const FR_HD_SOURCE_TO_DEPTH: Readonly<Record<string, number>> = {\n HED: 1,\n HD1: 2,\n HD2: 3,\n HD3: 4,\n HD4: 5,\n HD5: 6,\n HD6: 6,\n HD8: 6,\n};\n\n// ── Inline formatting ──\n\n/** Inline formatting elements */\nexport const FR_INLINE_ELEMENTS = new Set([\n \"I\", // Italic\n \"B\", // Bold\n \"E\", // Emphasis (type varies by T attribute)\n \"SU\", // Superscript / footnote marker\n \"FR\", // Fraction\n \"AC\", // Accent/diacritical\n]);\n\n/**\n * Map from E element T attribute to InlineType.\n * Duplicated from eCFR — source packages must not import each other.\n */\nexport const FR_EMPHASIS_MAP: Readonly<Record<string, InlineType>> = {\n \"01\": \"bold\",\n \"02\": \"italic\",\n \"03\": \"italic\", // bold italic in print — FR uses T=\"03\" for case names, citations, and publication titles which render as italic\n \"04\": \"italic\", // italic in headings\n \"05\": \"italic\", // small caps — render as italic\n \"51\": \"sub\", // subscript\n \"52\": \"sub\", // subscript\n \"54\": \"sub\", // subscript (math)\n \"7462\": \"italic\", // special terms (et seq., De minimis)\n};\n\n// ── Regulatory text elements ──\n\n/** Regulatory text amendment elements (within SUPLINF) */\nexport const FR_REGTEXT_ELEMENTS = new Set([\n \"REGTEXT\", // Regulatory text container (attrs: TITLE, PART)\n \"AMDPAR\", // Amendment instruction paragraph\n \"SECTION\", // Section container\n \"SECTNO\", // Section number designation\n \"PART\", // Part container within REGTEXT\n \"AUTH\", // Authority citation in REGTEXT\n]);\n\n/** LSTSUB — List of subjects (CFR parts affected) */\nexport const FR_LSTSUB_ELEMENT = \"LSTSUB\";\n\n// ── Signature block ──\n\n/** Signature block elements */\nexport const FR_SIGNATURE_ELEMENTS = new Set([\n \"SIG\", // Signature block container\n \"NAME\", // Signer name\n \"TITLE\", // Signer title\n \"DATED\", // Date of signature\n]);\n\n// ── Presidential document subtypes ──\n\n/** Presidential document subtype containers */\nexport const FR_PRESIDENTIAL_SUBTYPES = new Set([\n \"EXECORD\", // Executive Order\n \"PRMEMO\", // Presidential Memorandum\n \"PROCLA\", // Proclamation\n \"DETERM\", // Presidential Determination\n \"PRNOTICE\", // Presidential Notice\n \"PRORDER\", // Presidential Order\n]);\n\n/** Presidential document metadata elements */\nexport const FR_PRESIDENTIAL_META_ELEMENTS = new Set([\n \"PSIG\", // Presidential signature (initials)\n \"PLACE\", // Place of issuance\n \"TITLE3\", // CFR Title 3 marker\n \"PRES\", // President name\n]);\n\n// ── Note elements ──\n\n/** Footnote and editorial note elements */\nexport const FR_NOTE_ELEMENTS = new Set([\n \"FTNT\", // Footnote\n \"EDNOTE\", // Editorial note\n \"OLNOTE1\", // Overlay note\n]);\n\n/** Footnote reference marker */\nexport const FR_FTREF_ELEMENT = \"FTREF\";\n\n// ── Block elements ──\n\n/** Block-level content wrappers */\nexport const FR_BLOCK_ELEMENTS = new Set([\n \"EXTRACT\", // Extracted/quoted text\n \"EXAMPLE\", // Illustrative example\n]);\n\n// ── Table elements (GPOTABLE format) ──\n\n/** GPOTABLE elements */\nexport const FR_TABLE_ELEMENTS = new Set([\n \"GPOTABLE\", // Table root\n \"TTITLE\", // Table title\n \"BOXHD\", // Header box container\n \"CHED\", // Column header entry (attrs: H for level)\n \"ROW\", // Data row (attrs: RUL for horizontal rules)\n \"ENT\", // Cell entry (attrs: I for indent, A for alignment)\n]);\n\n// ── Elements to ignore (skip entire subtree) ──\n\n/** Elements whose entire subtree should be skipped */\nexport const FR_IGNORE_ELEMENTS = new Set([\n \"CNTNTS\", // Table of contents in daily issue\n \"GPH\", // Graphics (not available in XML)\n \"GID\", // Graphics ID\n]);\n\n// ── Elements to skip (self only, no subtree) ──\n\n/** Self-contained elements to skip — metadata extracted elsewhere or irrelevant */\nexport const FR_SKIP_ELEMENTS = new Set([\n \"PRTPAGE\", // Page number reference (attrs: P for page)\n \"STARS\", // Visual separator (****)\n \"FILED\", // Filing info\n \"UNITNAME\", // Section name in daily issue\n \"VOL\", // Volume number (daily issue metadata)\n \"NO\", // Issue number (daily issue metadata)\n \"DATE\", // Date (daily issue level — document dates from preamble)\n \"NEWPART\", // New part container in daily issue\n \"PTITLE\", // Part title in daily issue\n \"PARTNO\", // Part number in daily issue\n \"PNOTICE\", // Part notice text\n]);\n\n// ── Passthrough elements ──\n\n/** Transparent wrappers — pass through without creating frames */\nexport const FR_PASSTHROUGH_ELEMENTS = new Set([\n \"FEDREG\", // Daily issue root element\n \"PREAMB\", // Preamble — children are handled individually\n \"SUPLINF\", // Supplementary information — children are handled individually\n]);\n\n// ── Metadata extraction elements ──\n\n/** FRDOC — Federal Register document citation, e.g., \"[FR Doc. 2026-06029 ...]\" */\nexport const FR_FRDOC_ELEMENT = \"FRDOC\";\n\n/** BILCOD — Billing code (skip) */\nexport const FR_BILCOD_ELEMENT = \"BILCOD\";\n","/**\n * Federal Register AST Builder — converts SAX events from FR XML into AST nodes.\n *\n * Follows the stack-based pattern from the eCFR builder but adapted for FR's\n * flat, document-centric structure. Each FR document (RULE, NOTICE, PRORULE,\n * PRESDOCU) becomes a single section-level LevelNode emitted via onEmit.\n *\n * FR XML is GPO/SGML-derived with no namespace. It shares inline formatting\n * (E T=\"nn\", SU, FTNT) with eCFR but uses a different document structure:\n * preamble (PREAMB) → supplementary info (SUPLINF) → signature (SIG).\n */\n\nimport type { Attributes } from \"@lexbuild/core\";\nimport type {\n LevelNode,\n ContentNode,\n InlineNode,\n InlineType,\n NoteNode,\n TableNode,\n ASTNode,\n AncestorInfo,\n EmitContext,\n} from \"@lexbuild/core\";\nimport {\n FR_DOCUMENT_ELEMENTS,\n FR_SECTION_CONTAINERS,\n FR_DOCUMENT_TYPE_MAP,\n FR_PREAMBLE_SECTIONS,\n FR_PREAMBLE_META_ELEMENTS,\n FR_CONTENT_ELEMENTS,\n FR_HEADING_ELEMENT,\n FR_HD_SOURCE_TO_DEPTH,\n FR_INLINE_ELEMENTS,\n FR_EMPHASIS_MAP,\n FR_REGTEXT_ELEMENTS,\n FR_LSTSUB_ELEMENT,\n FR_SIGNATURE_ELEMENTS,\n FR_PRESIDENTIAL_SUBTYPES,\n FR_PRESIDENTIAL_META_ELEMENTS,\n FR_NOTE_ELEMENTS,\n FR_FTREF_ELEMENT,\n FR_BLOCK_ELEMENTS,\n FR_TABLE_ELEMENTS,\n FR_IGNORE_ELEMENTS,\n FR_SKIP_ELEMENTS,\n FR_PASSTHROUGH_ELEMENTS,\n FR_FRDOC_ELEMENT,\n FR_BILCOD_ELEMENT,\n} from \"./fr-elements.js\";\n\n/** Options for configuring the FR AST builder */\nexport interface FrASTBuilderOptions {\n /** Callback when a completed document node is ready */\n onEmit: (node: LevelNode, context: EmitContext) => void | Promise<void>;\n}\n\n/** Metadata extracted from the FR document XML during parsing */\nexport interface FrDocumentXmlMeta {\n /** Document type element name (RULE, NOTICE, etc.) */\n documentType: string;\n /** Normalized document type (rule, proposed_rule, etc.) */\n documentTypeNormalized: string;\n /** Agency name from AGENCY element */\n agency?: string | undefined;\n /** Sub-agency name from SUBAGY element */\n subAgency?: string | undefined;\n /** Subject/title from SUBJECT element */\n subject?: string | undefined;\n /** CFR citation from CFR element */\n cfrCitation?: string | undefined;\n /** Regulation Identifier Number from RIN element */\n rin?: string | undefined;\n /** FR document number extracted from FRDOC text */\n documentNumber?: string | undefined;\n /** Publication date inferred from FRDOC filing date (YYYY-MM-DD) */\n publicationDate?: string | undefined;\n}\n\n/** Frame kinds for the stack */\ntype FrameKind =\n | \"document\"\n | \"content\"\n | \"inline\"\n | \"heading\"\n | \"preambleSection\"\n | \"preambleMeta\"\n | \"note\"\n | \"signature\"\n | \"signatureField\"\n | \"table\"\n | \"tableHeader\"\n | \"tableRow\"\n | \"tableCell\"\n | \"block\"\n | \"regtext\"\n | \"frdoc\"\n | \"ignore\";\n\n/** A stack frame tracking an in-progress element */\ninterface StackFrame {\n kind: FrameKind;\n elementName: string;\n node?: ASTNode;\n textBuffer: string;\n /** For GPOTABLE collection */\n headers?: string[][];\n rows?: string[][];\n currentRow?: string[];\n headerLevel?: number;\n}\n\n/**\n * Federal Register AST Builder.\n *\n * Consumes SAX events and produces LexBuild AST nodes. Each FR document\n * (RULE, NOTICE, PRORULE, PRESDOCU) is emitted as a single section-level\n * LevelNode via the onEmit callback.\n */\nexport class FrASTBuilder {\n private readonly options: FrASTBuilderOptions;\n private readonly stack: StackFrame[] = [];\n /** Depth inside fully-ignored elements (CNTNTS, GPH) */\n private ignoredContainerDepth = 0;\n /** Metadata extracted from current document */\n private currentDocMeta: FrDocumentXmlMeta = {\n documentType: \"\",\n documentTypeNormalized: \"\",\n };\n /** All document metadata collected during parsing */\n private readonly documentMetas: FrDocumentXmlMeta[] = [];\n\n constructor(options: FrASTBuilderOptions) {\n this.options = options;\n }\n\n /** Get metadata for all documents parsed so far */\n getDocumentMetas(): readonly FrDocumentXmlMeta[] {\n return this.documentMetas;\n }\n\n /** Handle SAX open element */\n onOpenElement(name: string, attrs: Attributes): void {\n // Track ignored containers (skip entire subtree)\n if (this.ignoredContainerDepth > 0) {\n this.ignoredContainerDepth++;\n return;\n }\n\n // Full-subtree ignore elements (CNTNTS, GPH, GID)\n if (FR_IGNORE_ELEMENTS.has(name)) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Self-contained skip elements\n if (FR_SKIP_ELEMENTS.has(name)) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Transparent pass-through wrappers (FEDREG, PREAMB, SUPLINF)\n if (FR_PASSTHROUGH_ELEMENTS.has(name)) {\n return;\n }\n\n // Section containers (RULES, PRORULES, NOTICES, PRESDOCS) — pass through\n if (FR_SECTION_CONTAINERS.has(name)) {\n return;\n }\n\n // Document elements (RULE, NOTICE, PRORULE, PRESDOCU) → open document-level node\n if (FR_DOCUMENT_ELEMENTS.has(name)) {\n this.openDocument(name);\n return;\n }\n\n // Presidential document subtypes (EXECORD, PRMEMO, etc.) — pass through\n if (FR_PRESIDENTIAL_SUBTYPES.has(name)) {\n return;\n }\n\n // Presidential metadata (PSIG, PLACE, TITLE3, PRES)\n if (FR_PRESIDENTIAL_META_ELEMENTS.has(name)) {\n // PSIG and PLACE contain text we want to capture as content\n if (name === \"PSIG\" || name === \"PLACE\") {\n this.openContent(name);\n return;\n }\n // TITLE3, PRES — skip\n this.stack.push({ kind: \"ignore\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Preamble metadata elements (AGENCY, SUBAGY, CFR, SUBJECT, RIN, DEPDOC)\n if (FR_PREAMBLE_META_ELEMENTS.has(name)) {\n this.stack.push({ kind: \"preambleMeta\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Preamble sections (AGY, ACT, SUM, DATES, EFFDATE, ADD, FURINF)\n if (FR_PREAMBLE_SECTIONS.has(name)) {\n this.stack.push({ kind: \"preambleSection\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Heading element (HD) — level from SOURCE attribute\n if (name === FR_HEADING_ELEMENT) {\n this.openHeading(name, attrs);\n return;\n }\n\n // Content elements (P, FP)\n if (FR_CONTENT_ELEMENTS.has(name)) {\n this.openContent(name);\n return;\n }\n\n // Inline elements (I, B, E, SU, FR, AC)\n if (FR_INLINE_ELEMENTS.has(name)) {\n this.openInline(name, attrs);\n return;\n }\n\n // Footnote reference marker — FTREF is empty and follows <SU>N</SU>.\n // Convert the preceding SU (rendered as sup) to a footnoteRef.\n if (name === FR_FTREF_ELEMENT) {\n const parentFrame = this.stack[this.stack.length - 1];\n if (parentFrame?.kind === \"content\" && parentFrame.node?.type === \"content\") {\n const contentNode = parentFrame.node as ContentNode;\n // Find the last sup child and convert it to footnoteRef\n for (let i = contentNode.children.length - 1; i >= 0; i--) {\n const child = contentNode.children[i];\n if (child?.type === \"inline\" && (child as InlineNode).inlineType === \"sup\") {\n (child as InlineNode).inlineType = \"footnoteRef\";\n break;\n }\n }\n }\n // FTREF is self-closing, push+pop to maintain balance\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Note elements (FTNT, EDNOTE, OLNOTE1)\n if (FR_NOTE_ELEMENTS.has(name)) {\n this.openNote(name);\n return;\n }\n\n // REGTEXT and related elements\n if (FR_REGTEXT_ELEMENTS.has(name)) {\n this.openRegtext(name, attrs);\n return;\n }\n\n // LSTSUB — List of subjects\n if (name === FR_LSTSUB_ELEMENT) {\n this.stack.push({ kind: \"block\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // Signature block\n if (FR_SIGNATURE_ELEMENTS.has(name)) {\n this.openSignature(name);\n return;\n }\n\n // Block elements (EXTRACT, EXAMPLE)\n if (FR_BLOCK_ELEMENTS.has(name)) {\n this.stack.push({ kind: \"block\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // GPOTABLE elements\n if (FR_TABLE_ELEMENTS.has(name)) {\n this.openTableElement(name, attrs);\n return;\n }\n\n // FRDOC — extract document number\n if (name === FR_FRDOC_ELEMENT) {\n this.stack.push({ kind: \"frdoc\", elementName: name, textBuffer: \"\" });\n return;\n }\n\n // BILCOD — skip\n if (name === FR_BILCOD_ELEMENT) {\n this.ignoredContainerDepth = 1;\n return;\n }\n\n // Unknown elements — push as ignore to maintain stack balance\n this.stack.push({ kind: \"ignore\", elementName: name, textBuffer: \"\" });\n }\n\n /** Handle SAX close element */\n onCloseElement(name: string): void {\n // Track ignored containers\n if (this.ignoredContainerDepth > 0) {\n this.ignoredContainerDepth--;\n return;\n }\n\n // Pass-through elements — no frame to pop\n if (FR_PASSTHROUGH_ELEMENTS.has(name) || FR_SECTION_CONTAINERS.has(name)) {\n return;\n }\n\n // Presidential subtypes — pass through\n if (FR_PRESIDENTIAL_SUBTYPES.has(name)) {\n return;\n }\n\n // Document elements → emit\n if (FR_DOCUMENT_ELEMENTS.has(name)) {\n this.closeDocument(name);\n return;\n }\n\n // Preamble metadata → extract text\n if (FR_PREAMBLE_META_ELEMENTS.has(name)) {\n this.closePreambleMeta(name);\n return;\n }\n\n // Preamble sections → just pop the frame\n if (FR_PREAMBLE_SECTIONS.has(name)) {\n this.popFrame(name);\n return;\n }\n\n // Heading\n if (name === FR_HEADING_ELEMENT) {\n this.closeHeading(name);\n return;\n }\n\n // Content elements\n if (FR_CONTENT_ELEMENTS.has(name)) {\n this.closeContent(name);\n return;\n }\n\n // Presidential metadata content (PSIG, PLACE)\n if (name === \"PSIG\" || name === \"PLACE\") {\n this.closeContent(name);\n return;\n }\n\n // Inline elements\n if (FR_INLINE_ELEMENTS.has(name) || name === FR_FTREF_ELEMENT) {\n this.closeInline(name);\n return;\n }\n\n // Note elements\n if (FR_NOTE_ELEMENTS.has(name)) {\n this.closeNote(name);\n return;\n }\n\n // REGTEXT elements\n if (FR_REGTEXT_ELEMENTS.has(name)) {\n this.closeRegtext(name);\n return;\n }\n\n // LSTSUB\n if (name === FR_LSTSUB_ELEMENT) {\n this.popFrame(name);\n return;\n }\n\n // Signature block\n if (FR_SIGNATURE_ELEMENTS.has(name)) {\n this.closeSignature(name);\n return;\n }\n\n // Block elements\n if (FR_BLOCK_ELEMENTS.has(name)) {\n this.popFrame(name);\n return;\n }\n\n // GPOTABLE elements\n if (FR_TABLE_ELEMENTS.has(name)) {\n this.closeTableElement(name);\n return;\n }\n\n // FRDOC → extract document number\n if (name === FR_FRDOC_ELEMENT) {\n this.closeFrdoc();\n return;\n }\n\n // Pop any remaining frames (ignore, etc.)\n if (this.stack.length > 0 && this.stack[this.stack.length - 1]?.elementName === name) {\n this.stack.pop();\n }\n }\n\n /** Handle SAX text content */\n onText(text: string): void {\n if (this.ignoredContainerDepth > 0) return;\n\n const frame = this.stack[this.stack.length - 1];\n if (!frame) return;\n\n // Accumulate text in text-collecting frames\n if (\n frame.kind === \"heading\" ||\n frame.kind === \"preambleMeta\" ||\n frame.kind === \"signatureField\" ||\n frame.kind === \"tableCell\" ||\n frame.kind === \"tableHeader\" ||\n frame.kind === \"frdoc\"\n ) {\n frame.textBuffer += text;\n return;\n }\n\n // Content frames → create inline text node\n if (frame.kind === \"content\" && frame.node?.type === \"content\") {\n const contentNode = frame.node as ContentNode;\n // Normalize XML indentation whitespace: collapse runs of whitespace to single spaces\n const normalized = text.replace(/\\s+/g, \" \");\n if (normalized && normalized !== \" \") {\n contentNode.children.push({\n type: \"inline\",\n inlineType: \"text\",\n text: normalized,\n });\n }\n return;\n }\n\n // Inline frames → set text or add child\n if (frame.kind === \"inline\" && frame.node?.type === \"inline\") {\n const inlineNode = frame.node as InlineNode;\n const normalized = text.replace(/\\s+/g, \" \");\n if (inlineNode.children) {\n if (normalized && normalized !== \" \") {\n inlineNode.children.push({\n type: \"inline\",\n inlineType: \"text\",\n text: normalized,\n });\n }\n } else {\n inlineNode.text = (inlineNode.text ?? \"\") + normalized;\n }\n return;\n }\n\n // Note frames with direct text\n if (frame.kind === \"note\" && frame.node?.type === \"note\") {\n frame.textBuffer += text;\n return;\n }\n\n // Document-level, preambleSection, block, regtext — ignore stray text\n }\n\n // ── Private helpers: Document ──\n\n private openDocument(elementName: string): void {\n this.currentDocMeta = {\n documentType: elementName,\n documentTypeNormalized: FR_DOCUMENT_TYPE_MAP[elementName] ?? elementName.toLowerCase(),\n };\n\n const node: LevelNode = {\n type: \"level\",\n levelType: \"section\",\n children: [],\n sourceElement: elementName,\n };\n\n this.stack.push({ kind: \"document\", elementName, node, textBuffer: \"\" });\n }\n\n private closeDocument(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"document\" || !frame.node) return;\n\n const levelNode = frame.node as LevelNode;\n\n // Set heading from subject\n if (this.currentDocMeta.subject) {\n levelNode.heading = this.currentDocMeta.subject;\n }\n\n // Set identifier from document number\n if (this.currentDocMeta.documentNumber) {\n levelNode.identifier = `/us/fr/${this.currentDocMeta.documentNumber}`;\n levelNode.numValue = this.currentDocMeta.documentNumber;\n }\n\n // Build emit context\n const ancestors: AncestorInfo[] = [];\n for (const f of this.stack) {\n if (f.kind === \"document\" && f.node?.type === \"level\") {\n const ln = f.node as LevelNode;\n ancestors.push({\n levelType: ln.levelType,\n numValue: ln.numValue,\n heading: ln.heading,\n identifier: ln.identifier,\n });\n }\n }\n\n const context: EmitContext = {\n ancestors,\n documentMeta: {\n dcTitle: this.currentDocMeta.subject,\n dcType: this.currentDocMeta.documentTypeNormalized,\n },\n };\n\n // Save metadata before emitting\n this.documentMetas.push({ ...this.currentDocMeta });\n\n this.options.onEmit(levelNode, context);\n }\n\n // ── Private helpers: Preamble ──\n\n private closePreambleMeta(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"preambleMeta\") return;\n\n const text = frame.textBuffer.trim();\n if (!text) return;\n\n switch (elementName) {\n case \"AGENCY\":\n this.currentDocMeta.agency = text;\n break;\n case \"SUBAGY\":\n this.currentDocMeta.subAgency = text;\n break;\n case \"CFR\":\n this.currentDocMeta.cfrCitation = text;\n break;\n case \"SUBJECT\":\n this.currentDocMeta.subject = text;\n break;\n case \"RIN\":\n this.currentDocMeta.rin = text.replace(/^RIN\\s+/i, \"\").trim();\n break;\n case \"DEPDOC\":\n // Department document number — store for potential use\n break;\n }\n }\n\n // ── Private helpers: Heading ──\n\n private openHeading(_elementName: string, attrs: Attributes): void {\n const source = attrs[\"SOURCE\"] ?? \"HD1\";\n const depth = FR_HD_SOURCE_TO_DEPTH[source] ?? 3;\n\n this.stack.push({\n kind: \"heading\",\n elementName: FR_HEADING_ELEMENT,\n textBuffer: \"\",\n headerLevel: depth,\n });\n }\n\n private closeHeading(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"heading\") return;\n\n const headingText = frame.textBuffer.trim();\n if (!headingText) return;\n\n // In preamble sections (AGY, ACT, SUM, etc.), the HD contains the label\n // like \"AGENCY:\", \"ACTION:\", \"SUMMARY:\". We render these as bold labels.\n const parentFrame = this.stack[this.stack.length - 1];\n\n if (parentFrame?.kind === \"preambleSection\") {\n // Create a bold label content node\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: headingText,\n },\n ],\n };\n this.addToDocument(contentNode);\n return;\n }\n\n // Outside preamble: render as a bold heading content node\n // The depth from SOURCE attribute determines visual weight\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: headingText,\n },\n ],\n };\n this.addToDocument(contentNode);\n }\n\n // ── Private helpers: Content ──\n\n private openContent(elementName: string): void {\n const node: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [],\n };\n this.stack.push({ kind: \"content\", elementName, node, textBuffer: \"\" });\n }\n\n private closeContent(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const contentNode = frame.node as ContentNode;\n\n // Skip empty content nodes\n if (contentNode.children.length === 0) return;\n\n // Add to parent: document, note, or block\n const parent = this.findParentDocument() ?? this.findParentNote();\n if (parent?.node) {\n if (parent.node.type === \"level\") {\n (parent.node as LevelNode).children.push(contentNode);\n } else if (parent.node.type === \"note\") {\n (parent.node as NoteNode).children.push(contentNode);\n }\n }\n }\n\n // ── Private helpers: Inline ──\n\n private openInline(elementName: string, attrs: Attributes): void {\n let inlineType: InlineType = \"text\";\n\n if (elementName === \"I\") {\n inlineType = \"italic\";\n } else if (elementName === \"B\") {\n inlineType = \"bold\";\n } else if (elementName === \"SU\") {\n // SU inside a footnote (FTNT) is the footnote marker, not a generic superscript.\n // Check if we're inside a note frame to determine the correct type.\n const insideFootnote = this.findFrame(\"note\") !== undefined;\n inlineType = insideFootnote ? \"footnoteRef\" : \"sup\";\n } else if (elementName === \"FR\") {\n inlineType = \"text\"; // Fractions render as text\n } else if (elementName === \"E\") {\n const tValue = attrs[\"T\"] ?? \"\";\n inlineType = FR_EMPHASIS_MAP[tValue] ?? \"italic\";\n }\n\n const node: InlineNode = {\n type: \"inline\",\n inlineType,\n children: [],\n };\n\n this.stack.push({ kind: \"inline\", elementName, node, textBuffer: \"\" });\n }\n\n private closeInline(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const inlineNode = frame.node as InlineNode;\n\n // For footnoteRef, set text from buffer\n if (inlineNode.inlineType === \"footnoteRef\" && frame.textBuffer) {\n inlineNode.text = frame.textBuffer.trim();\n }\n\n // Find parent to attach to\n const parentFrame = this.stack[this.stack.length - 1];\n if (!parentFrame) return;\n\n if (parentFrame.kind === \"content\" && parentFrame.node?.type === \"content\") {\n (parentFrame.node as ContentNode).children.push(inlineNode);\n } else if (parentFrame.kind === \"inline\" && parentFrame.node?.type === \"inline\") {\n const parentInline = parentFrame.node as InlineNode;\n if (parentInline.children) {\n parentInline.children.push(inlineNode);\n }\n } else if (parentFrame.kind === \"heading\" || parentFrame.kind === \"preambleMeta\") {\n // Inline inside heading or preamble metadata — accumulate text\n if (inlineNode.text) {\n parentFrame.textBuffer += inlineNode.text;\n } else if (inlineNode.children) {\n for (const child of inlineNode.children) {\n if (child.text) parentFrame.textBuffer += child.text;\n }\n }\n }\n }\n\n // ── Private helpers: Notes ──\n\n private openNote(elementName: string): void {\n const noteTypeMap: Record<string, string> = {\n FTNT: \"footnote\",\n EDNOTE: \"editorial\",\n OLNOTE1: \"general\",\n };\n\n const noteType = noteTypeMap[elementName] ?? elementName.toLowerCase();\n const node: NoteNode = {\n type: \"note\",\n noteType,\n children: [],\n };\n\n this.stack.push({ kind: \"note\", elementName, node, textBuffer: \"\" });\n }\n\n private closeNote(elementName: string): void {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const noteNode = frame.node as NoteNode;\n\n // If text was collected directly (no child content nodes), create one\n if (frame.textBuffer.trim() && noteNode.children.length === 0) {\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"text\",\n text: frame.textBuffer.trim(),\n },\n ],\n };\n noteNode.children.push(contentNode);\n }\n\n // Add to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(noteNode);\n }\n }\n\n // ── Private helpers: Regulatory text ──\n\n private openRegtext(elementName: string, attrs: Attributes): void {\n if (elementName === \"REGTEXT\") {\n // REGTEXT container with TITLE and PART attributes\n const title = attrs[\"TITLE\"] ?? \"\";\n const part = attrs[\"PART\"] ?? \"\";\n const label = title && part ? `${title} CFR Part ${part}` : \"\";\n\n // Create a bold label if we have CFR reference info\n if (label) {\n const labelNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"bold\",\n text: label,\n },\n ],\n };\n this.addToDocument(labelNode);\n }\n\n this.stack.push({ kind: \"regtext\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"AMDPAR\") {\n // Amendment instruction paragraph — render as italic content\n this.openContent(elementName);\n return;\n }\n\n if (elementName === \"SECTION\") {\n // Section container within REGTEXT — pass through\n this.stack.push({ kind: \"block\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"SECTNO\") {\n // Section number — collect as content\n this.openContent(elementName);\n return;\n }\n\n if (elementName === \"PART\") {\n // Part container within REGTEXT — pass through\n this.stack.push({ kind: \"block\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"AUTH\") {\n // Authority citation in REGTEXT\n this.openNote(elementName);\n return;\n }\n }\n\n private closeRegtext(elementName: string): void {\n if (elementName === \"REGTEXT\") {\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"AMDPAR\" || elementName === \"SECTNO\") {\n this.closeContent(elementName);\n return;\n }\n\n if (elementName === \"SECTION\" || elementName === \"PART\") {\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"AUTH\") {\n this.closeNote(elementName);\n return;\n }\n }\n\n // ── Private helpers: Signature block ──\n\n private openSignature(elementName: string): void {\n if (elementName === \"SIG\") {\n // Signature container\n const node: NoteNode = {\n type: \"note\",\n noteType: \"signature\",\n children: [],\n };\n this.stack.push({ kind: \"signature\", elementName, node, textBuffer: \"\" });\n return;\n }\n\n // NAME, TITLE, DATED — collect text\n this.stack.push({ kind: \"signatureField\", elementName, textBuffer: \"\" });\n }\n\n private closeSignature(elementName: string): void {\n if (elementName === \"SIG\") {\n const frame = this.popFrame(elementName);\n if (!frame || !frame.node) return;\n\n const sigNode = frame.node as NoteNode;\n\n // Add signature to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(sigNode);\n }\n return;\n }\n\n // NAME, TITLE, DATED fields\n const frame = this.popFrame(elementName);\n if (!frame || frame.kind !== \"signatureField\") return;\n\n const text = frame.textBuffer.trim();\n if (!text) return;\n\n // Add as content to parent signature node\n const sigFrame = this.findFrame(\"signature\");\n if (sigFrame?.node && sigFrame.node.type === \"note\") {\n const contentNode: ContentNode = {\n type: \"content\",\n variant: \"content\",\n children: [\n {\n type: \"inline\",\n inlineType: \"text\",\n text,\n },\n ],\n };\n (sigFrame.node as NoteNode).children.push(contentNode);\n }\n }\n\n // ── Private helpers: GPOTABLE ──\n\n private openTableElement(elementName: string, _attrs: Attributes): void {\n if (elementName === \"GPOTABLE\") {\n this.stack.push({\n kind: \"table\",\n elementName,\n textBuffer: \"\",\n headers: [],\n rows: [],\n currentRow: [],\n });\n return;\n }\n\n if (elementName === \"TTITLE\") {\n // Table title — collect text as heading\n this.stack.push({ kind: \"heading\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"BOXHD\") {\n // Header container — no frame needed, children (CHED) handle themselves\n return;\n }\n\n if (elementName === \"CHED\") {\n // Column header entry\n this.stack.push({ kind: \"tableHeader\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"ROW\") {\n const tableFrame = this.findTableFrame();\n if (tableFrame) {\n tableFrame.currentRow = [];\n }\n this.stack.push({ kind: \"tableRow\", elementName, textBuffer: \"\" });\n return;\n }\n\n if (elementName === \"ENT\") {\n // Cell entry\n this.stack.push({ kind: \"tableCell\", elementName, textBuffer: \"\" });\n return;\n }\n }\n\n private closeTableElement(elementName: string): void {\n if (elementName === \"GPOTABLE\") {\n this.closeGpoTable();\n return;\n }\n\n if (elementName === \"TTITLE\") {\n // Table title — drop the heading frame (title is informational)\n this.popFrame(elementName);\n return;\n }\n\n if (elementName === \"BOXHD\") {\n // No frame to pop\n return;\n }\n\n if (elementName === \"CHED\") {\n this.closeTableHeader();\n return;\n }\n\n if (elementName === \"ROW\") {\n this.closeTableRow();\n return;\n }\n\n if (elementName === \"ENT\") {\n this.closeTableCell();\n return;\n }\n }\n\n private closeGpoTable(): void {\n const frame = this.popFrame(\"GPOTABLE\");\n if (!frame || frame.kind !== \"table\") return;\n\n const tableNode: TableNode = {\n type: \"table\",\n variant: \"xhtml\", // Reuse the same variant for rendering\n headers: frame.headers ?? [],\n rows: frame.rows ?? [],\n };\n\n // Add to parent document\n const parentDoc = this.findParentDocument();\n if (parentDoc?.node && parentDoc.node.type === \"level\") {\n (parentDoc.node as LevelNode).children.push(tableNode);\n }\n }\n\n private closeTableHeader(): void {\n const headerFrame = this.popFrame(\"CHED\");\n if (!headerFrame || headerFrame.kind !== \"tableHeader\") return;\n\n const tableFrame = this.findTableFrame();\n if (!tableFrame) return;\n\n const text = headerFrame.textBuffer.trim();\n\n // GPOTABLE headers are flat — each CHED is one column header.\n // We build a single header row from all CHED elements.\n if (!tableFrame.headers || tableFrame.headers.length === 0) {\n tableFrame.headers = [[]];\n }\n const headerRow = tableFrame.headers[0];\n if (headerRow) {\n headerRow.push(text);\n }\n }\n\n private closeTableRow(): void {\n const rowFrame = this.popFrame(\"ROW\");\n if (!rowFrame) return;\n\n const tableFrame = this.findTableFrame();\n if (tableFrame?.currentRow) {\n tableFrame.rows?.push([...tableFrame.currentRow]);\n tableFrame.currentRow = [];\n }\n }\n\n private closeTableCell(): void {\n const cellFrame = this.stack.pop();\n if (!cellFrame || cellFrame.kind !== \"tableCell\") return;\n\n const tableFrame = this.findTableFrame();\n if (tableFrame?.currentRow) {\n tableFrame.currentRow.push(cellFrame.textBuffer.trim());\n }\n }\n\n // ── Private helpers: FRDOC ──\n\n private closeFrdoc(): void {\n const frame = this.popFrame(FR_FRDOC_ELEMENT);\n if (!frame || frame.kind !== \"frdoc\") return;\n\n const text = frame.textBuffer.trim();\n // Extract document number from FRDOC text. Formats vary by era:\n // Modern: \"[FR Doc. 2026-06029 Filed 3-27-26; 8:45 am]\"\n // Pre-2009: \"[FR Doc. E8-17594 Filed 7-31-08; 8:45 am]\"\n // Very old: \"[FR Doc. 00-123 Filed 1-2-00; 8:45 am]\"\n const docMatch = /FR\\s+Doc\\.\\s+([\\w-]+)/i.exec(text);\n if (docMatch) {\n this.currentDocMeta.documentNumber = docMatch[1];\n }\n\n // Extract publication date from filing date (Filed M-D-YY).\n // Publication = filing date + 1 calendar day (FR publishes the morning after).\n const dateMatch = /Filed\\s+(\\d{1,2})-(\\d{1,2})-(\\d{2})\\b/.exec(text);\n if (dateMatch) {\n const [, mmStr, ddStr, yyStr] = dateMatch;\n const mm = parseInt(mmStr ?? \"0\", 10);\n const dd = parseInt(ddStr ?? \"0\", 10);\n const yy = parseInt(yyStr ?? \"0\", 10);\n // 2-digit year: 00-49 → 2000s, 50-99 → 1900s\n const fullYear = yy < 50 ? 2000 + yy : 1900 + yy;\n const filed = new Date(fullYear, mm - 1, dd);\n // Validate — Date constructor silently wraps invalid values (month 13 → next year)\n if (filed.getMonth() !== mm - 1 || filed.getDate() !== dd) {\n return; // Invalid filing date — skip rather than produce wrong date\n }\n // Publication date = next calendar day\n filed.setDate(filed.getDate() + 1);\n const pubYear = filed.getFullYear();\n const pubMonth = String(filed.getMonth() + 1).padStart(2, \"0\");\n const pubDay = String(filed.getDate()).padStart(2, \"0\");\n this.currentDocMeta.publicationDate = `${pubYear}-${pubMonth}-${pubDay}`;\n }\n }\n\n // ── Private helpers: Stack navigation ──\n\n private addToDocument(node: ASTNode): void {\n const docFrame = this.findParentDocument();\n if (docFrame?.node && docFrame.node.type === \"level\") {\n (docFrame.node as LevelNode).children.push(node);\n }\n }\n\n private findParentDocument(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"document\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findParentNote(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"note\" || this.stack[i]?.kind === \"signature\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findTableFrame(): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === \"table\") {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private findFrame(kind: FrameKind): StackFrame | undefined {\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.kind === kind) {\n return this.stack[i];\n }\n }\n return undefined;\n }\n\n private popFrame(elementName: string): StackFrame | undefined {\n if (this.stack.length === 0) return undefined;\n\n // Find the matching frame (may not be exactly on top)\n for (let i = this.stack.length - 1; i >= 0; i--) {\n if (this.stack[i]?.elementName === elementName) {\n return this.stack.splice(i, 1)[0];\n }\n }\n\n // No matching frame found — warn rather than popping an unrelated frame\n console.warn(\n `FrASTBuilder: no matching frame for closing element </${elementName}>, ` +\n `stack has: [${this.stack.map((f) => f.elementName).join(\", \")}]`,\n );\n return undefined;\n }\n}\n","/**\n * Federal Register frontmatter builder.\n *\n * Constructs FrontmatterData from an emitted FR AST node, its context,\n * and optional JSON metadata from the FederalRegister.gov API.\n */\n\nimport type { LevelNode, EmitContext, FrontmatterData } from \"@lexbuild/core\";\nimport type { FrDocumentXmlMeta } from \"./fr-builder.js\";\n\n/**\n * Metadata from the FederalRegister.gov API JSON response.\n * Stored as a sidecar `.json` file alongside each `.xml` download.\n */\nexport interface FrDocumentJsonMeta {\n /** FR document number (e.g., \"2026-06029\") */\n document_number: string;\n /** Document type (Rule, Proposed Rule, Notice, Presidential Document) */\n type: string;\n /** Document title */\n title: string;\n /** Publication date (YYYY-MM-DD) */\n publication_date: string;\n /** Full FR citation (e.g., \"91 FR 14523\") */\n citation: string;\n /** FR volume number */\n volume: number;\n /** Start page number */\n start_page: number;\n /** End page number */\n end_page: number;\n /** Agencies with hierarchy info */\n agencies: Array<{\n name: string;\n id: number;\n slug: string;\n parent_id?: number | null;\n raw_name?: string;\n }>;\n /** CFR title/part references */\n cfr_references: Array<{ title: number; part: number }>;\n /** Docket identifiers */\n docket_ids: string[];\n /** Regulation Identifier Numbers */\n regulation_id_numbers: string[];\n /** Effective date (YYYY-MM-DD) */\n effective_on?: string | null;\n /** Comment period end date (YYYY-MM-DD) */\n comments_close_on?: string | null;\n /** Action description (e.g., \"Final rule.\") */\n action?: string | null;\n /** Document abstract */\n abstract?: string | null;\n /** Whether the document is significant */\n significant?: boolean | null;\n /** Topics/keywords */\n topics: string[];\n /** URL to full text XML */\n full_text_xml_url: string;\n}\n\n/** Normalize API document type to lowercase snake_case */\nfunction normalizeDocumentType(apiType: string): string {\n const map: Record<string, string> = {\n Rule: \"rule\",\n \"Proposed Rule\": \"proposed_rule\",\n Notice: \"notice\",\n \"Presidential Document\": \"presidential_document\",\n };\n return map[apiType] ?? apiType.toLowerCase().replace(/\\s+/g, \"_\");\n}\n\n/**\n * Build FrontmatterData from an FR document node with optional JSON metadata.\n *\n * If JSON metadata is available (from the API sidecar file), it enriches\n * the frontmatter with structured agency, CFR reference, docket, and\n * date information that isn't available in the XML alone.\n */\nexport function buildFrFrontmatter(\n node: LevelNode,\n _context: EmitContext,\n xmlMeta: FrDocumentXmlMeta,\n jsonMeta?: FrDocumentJsonMeta,\n): FrontmatterData {\n const documentNumber = jsonMeta?.document_number ?? xmlMeta.documentNumber ?? \"\";\n const subject = jsonMeta?.title ?? xmlMeta.subject ?? node.heading ?? \"\";\n const publicationDate = jsonMeta?.publication_date ?? xmlMeta.publicationDate ?? \"\";\n const documentType =\n jsonMeta ? normalizeDocumentType(jsonMeta.type) : xmlMeta.documentTypeNormalized;\n\n // Build agencies list\n let agencies: string[] | undefined;\n if (jsonMeta?.agencies && jsonMeta.agencies.length > 0) {\n agencies = jsonMeta.agencies.map((a) => a.name);\n } else if (xmlMeta.agency) {\n agencies = [xmlMeta.agency];\n if (xmlMeta.subAgency) {\n agencies.push(xmlMeta.subAgency);\n }\n }\n\n // Build CFR references list\n let cfrReferences: string[] | undefined;\n if (jsonMeta?.cfr_references && jsonMeta.cfr_references.length > 0) {\n cfrReferences = jsonMeta.cfr_references.map((r) => `${r.title} CFR Part ${r.part}`);\n } else if (xmlMeta.cfrCitation) {\n cfrReferences = [xmlMeta.cfrCitation];\n }\n\n // Build docket IDs list\n let docketIds: string[] | undefined;\n if (jsonMeta?.docket_ids && jsonMeta.docket_ids.length > 0) {\n docketIds = jsonMeta.docket_ids;\n }\n\n // Primary agency for the existing `agency` field\n const primaryAgency =\n agencies && agencies.length > 0 ? agencies[0] : undefined;\n\n // FR citation\n const frCitation = jsonMeta?.citation;\n\n // RIN\n const rin = jsonMeta?.regulation_id_numbers?.[0] ?? xmlMeta.rin;\n\n const fm: FrontmatterData = {\n source: \"fr\",\n legal_status: \"authoritative_unofficial\",\n identifier: node.identifier ?? `/us/fr/${documentNumber}`,\n title: subject,\n title_number: 0, // FR documents don't belong to a USC/CFR title\n title_name: \"Federal Register\",\n section_number: documentNumber,\n section_name: subject,\n positive_law: false,\n currency: publicationDate,\n last_updated: publicationDate,\n\n // Shared optional fields\n agency: primaryAgency,\n\n // FR-specific fields\n document_number: documentNumber || undefined,\n document_type: documentType || undefined,\n fr_citation: frCitation,\n fr_volume: jsonMeta?.volume,\n publication_date: publicationDate || undefined,\n agencies: agencies && agencies.length > 0 ? agencies : undefined,\n cfr_references: cfrReferences && cfrReferences.length > 0 ? cfrReferences : undefined,\n docket_ids: docketIds && docketIds.length > 0 ? docketIds : undefined,\n rin: rin || undefined,\n effective_date: jsonMeta?.effective_on ?? undefined,\n comments_close_date: jsonMeta?.comments_close_on ?? undefined,\n fr_action: jsonMeta?.action ?? undefined,\n };\n\n return fm;\n}\n","/**\n * Output path builder for Federal Register directory structure.\n *\n * FR path structure:\n * output/fr/{YYYY}/{MM}/{document_number}.md\n *\n * Downloads path structure:\n * downloads/fr/{YYYY}/{MM}/{document_number}.xml\n * downloads/fr/{YYYY}/{MM}/{document_number}.json\n */\n\nimport { join } from \"node:path\";\n\n/**\n * Build the output file path for an FR document.\n *\n * @param documentNumber - FR document number (e.g., \"2026-06029\")\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param outputRoot - Output root directory (e.g., \"./output\")\n * @returns Full output file path (e.g., \"output/fr/2026/03/2026-06029.md\")\n */\nexport function buildFrOutputPath(\n documentNumber: string,\n publicationDate: string,\n outputRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(outputRoot, \"fr\", year, month, `${documentNumber}.md`);\n}\n\n/**\n * Build the download file path for an FR document XML.\n *\n * @param documentNumber - FR document number\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param downloadRoot - Download root directory (e.g., \"./downloads/fr\")\n * @returns Full download file path (e.g., \"downloads/fr/2026/03/2026-06029.xml\")\n */\nexport function buildFrDownloadXmlPath(\n documentNumber: string,\n publicationDate: string,\n downloadRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(downloadRoot, year, month, `${documentNumber}.xml`);\n}\n\n/**\n * Build the download file path for an FR document JSON metadata.\n *\n * @param documentNumber - FR document number\n * @param publicationDate - Publication date in YYYY-MM-DD format\n * @param downloadRoot - Download root directory\n * @returns Full download file path (e.g., \"downloads/fr/2026/03/2026-06029.json\")\n */\nexport function buildFrDownloadJsonPath(\n documentNumber: string,\n publicationDate: string,\n downloadRoot: string,\n): string {\n const { year, month } = parseDateComponents(publicationDate);\n return join(downloadRoot, year, month, `${documentNumber}.json`);\n}\n\n/**\n * Build the directory path for a year/month within the FR output structure.\n */\nexport function buildMonthDir(year: string, month: string, outputRoot: string): string {\n return join(outputRoot, \"fr\", year, month);\n}\n\n/**\n * Build the directory path for a year.\n */\nexport function buildYearDir(year: string, outputRoot: string): string {\n return join(outputRoot, \"fr\", year);\n}\n\n/**\n * Parse a YYYY-MM-DD date string into year and month components.\n */\nfunction parseDateComponents(date: string): { year: string; month: string } {\n const parts = date.split(\"-\");\n return {\n year: parts[0] || \"0000\",\n month: parts[1] || \"00\",\n };\n}\n","/**\n * Federal Register conversion orchestrator.\n *\n * Discovers downloaded FR XML files, parses them with FrASTBuilder,\n * enriches frontmatter with JSON sidecar metadata, renders via core's\n * renderDocument, and writes structured Markdown output.\n *\n * Processes FR documents in a single streaming pass: parse each XML file,\n * render Markdown, and write output immediately. No link pre-registration\n * since FR documents rarely cross-reference each other.\n */\n\nimport { createReadStream, existsSync } from \"node:fs\";\nimport { readFile, readdir, stat } from \"node:fs/promises\";\nimport { join, dirname } from \"node:path\";\nimport {\n XMLParser,\n renderDocument,\n createLinkResolver,\n writeFile,\n mkdir,\n} from \"@lexbuild/core\";\nimport type { LevelNode, EmitContext } from \"@lexbuild/core\";\nimport { FrASTBuilder } from \"./fr-builder.js\";\nimport type { FrDocumentXmlMeta } from \"./fr-builder.js\";\nimport { buildFrFrontmatter } from \"./fr-frontmatter.js\";\nimport type { FrDocumentJsonMeta } from \"./fr-frontmatter.js\";\nimport { buildFrOutputPath } from \"./fr-path.js\";\nimport { FR_DOCUMENT_TYPE_KEYS } from \"./fr-elements.js\";\nimport type { FrDocumentType } from \"./fr-elements.js\";\n\n// ── Public types ──\n\n/** Progress info for conversion callback */\nexport interface FrConvertProgress {\n /** Documents converted so far */\n documentsConverted: number;\n /** XML files processed so far */\n filesProcessed: number;\n /** Total XML files to process */\n totalFiles: number;\n /** Current XML file being processed */\n currentFile: string;\n}\n\n/** Options for converting FR documents */\nexport interface FrConvertOptions {\n /** Path to input file or directory containing .xml/.json files */\n input: string;\n /** Output root directory */\n output: string;\n /** Link style for cross-references */\n linkStyle: \"relative\" | \"canonical\" | \"plaintext\";\n /** Parse only, don't write files */\n dryRun: boolean;\n /** Filter: start date (YYYY-MM-DD) */\n from?: string | undefined;\n /** Filter: end date (YYYY-MM-DD) */\n to?: string | undefined;\n /** Filter: document types */\n types?: FrDocumentType[] | undefined;\n /** Progress callback */\n onProgress?: ((progress: FrConvertProgress) => void) | undefined;\n}\n\n/** Result of a conversion operation */\nexport interface FrConvertResult {\n /** Number of documents converted */\n documentsConverted: number;\n /** Paths of written files */\n files: string[];\n /** Total estimated tokens */\n totalTokenEstimate: number;\n /** Peak RSS in bytes */\n peakMemoryBytes: number;\n /** Whether this was a dry run */\n dryRun: boolean;\n}\n\n/** Collected document info during parsing */\ninterface CollectedDoc {\n node: LevelNode;\n context: EmitContext;\n xmlMeta: FrDocumentXmlMeta;\n jsonMeta?: FrDocumentJsonMeta;\n publicationDate: string;\n documentNumber: string;\n}\n\n/** Set of valid FR document type element names for filtering */\nconst FR_DOC_TYPE_SET = new Set<string>(FR_DOCUMENT_TYPE_KEYS);\n\n// ── Public function ──\n\n/**\n * Convert FR XML documents to Markdown.\n *\n * Supports both single-file mode (input is a .xml path) and batch mode\n * (input is a directory containing year/month/doc.xml structure).\n */\nexport async function convertFrDocuments(options: FrConvertOptions): Promise<FrConvertResult> {\n const xmlFiles = await discoverXmlFiles(options.input, options.from, options.to);\n\n let documentsConverted = 0;\n let totalTokenEstimate = 0;\n let peakMemoryBytes = 0;\n\n const linkResolver = createLinkResolver();\n\n // Stream: parse each file, render, and write immediately.\n // FR documents rarely cross-reference each other, so we skip the two-pass\n // link registration that USC/eCFR use. This keeps memory bounded for\n // bulk XML processing (750k+ documents across 9,500+ files).\n let filesProcessed = 0;\n for (const xmlPath of xmlFiles) {\n let collected: CollectedDoc[];\n try {\n collected = await parseXmlFile(xmlPath);\n } catch (err) {\n console.warn(\n `Warning: Failed to parse ${xmlPath}: ${err instanceof Error ? err.message : String(err)}. Skipping.`,\n );\n continue;\n }\n\n for (const doc of collected) {\n // Apply type filter\n if (options.types && options.types.length > 0) {\n if (\n !FR_DOC_TYPE_SET.has(doc.xmlMeta.documentType) ||\n !options.types.includes(doc.xmlMeta.documentType as FrDocumentType)\n ) {\n continue;\n }\n }\n\n if (options.dryRun) {\n documentsConverted++;\n continue;\n }\n\n const outputPath = buildFrOutputPath(\n doc.documentNumber,\n doc.publicationDate,\n options.output,\n );\n\n const frontmatter = buildFrFrontmatter(doc.node, doc.context, doc.xmlMeta, doc.jsonMeta);\n\n const markdown = renderDocument(doc.node, frontmatter, {\n headingOffset: 0,\n linkStyle: options.linkStyle,\n resolveLink:\n options.linkStyle === \"relative\"\n ? (id) => linkResolver.resolve(id, outputPath)\n : undefined,\n });\n\n await mkdir(dirname(outputPath), { recursive: true });\n await writeFile(outputPath, markdown, \"utf-8\");\n\n documentsConverted++;\n totalTokenEstimate += Math.round(markdown.length / 4);\n\n // Track memory\n const mem = process.memoryUsage().rss;\n if (mem > peakMemoryBytes) {\n peakMemoryBytes = mem;\n }\n }\n\n filesProcessed++;\n\n options.onProgress?.({\n documentsConverted,\n filesProcessed,\n totalFiles: xmlFiles.length,\n currentFile: xmlPath,\n });\n }\n\n return {\n documentsConverted,\n files: [], // Don't accumulate 750k+ file paths in memory\n totalTokenEstimate,\n peakMemoryBytes,\n dryRun: options.dryRun,\n };\n}\n\n// ── Private helpers ──\n\n/**\n * Parse a single XML file and collect document nodes + metadata.\n */\nasync function parseXmlFile(xmlPath: string): Promise<CollectedDoc[]> {\n const collected: CollectedDoc[] = [];\n\n const builder = new FrASTBuilder({\n onEmit: (node, context) => {\n // Snapshot metas at emit time\n const currentMetas = builder.getDocumentMetas();\n const meta = currentMetas[currentMetas.length - 1];\n if (!meta) {\n console.warn(\n `Warning: No XML metadata extracted for emitted document in ${xmlPath}. ` +\n `Frontmatter will have empty document_type and document_number.`,\n );\n }\n collected.push({\n node,\n context,\n xmlMeta: meta ?? { documentType: \"\", documentTypeNormalized: \"\" },\n publicationDate: \"\",\n documentNumber: meta?.documentNumber ?? \"\",\n });\n },\n });\n\n const parser = new XMLParser({ defaultNamespace: \"\" });\n parser.on(\"openElement\", (name, attrs) => builder.onOpenElement(name, attrs));\n parser.on(\"closeElement\", (name) => builder.onCloseElement(name));\n parser.on(\"text\", (text) => builder.onText(text));\n\n const stream = createReadStream(xmlPath, \"utf-8\");\n await parser.parseStream(stream);\n\n // Try to load JSON sidecar\n const jsonPath = xmlPath.replace(/\\.xml$/, \".json\");\n let jsonMeta: FrDocumentJsonMeta | undefined;\n if (existsSync(jsonPath)) {\n try {\n const raw = await readFile(jsonPath, \"utf-8\");\n jsonMeta = JSON.parse(raw) as FrDocumentJsonMeta;\n } catch (err) {\n console.warn(\n `Warning: Failed to parse JSON sidecar ${jsonPath}: ${err instanceof Error ? err.message : String(err)}. Continuing without enriched metadata.`,\n );\n }\n }\n\n // Enrich collected docs with JSON metadata and publication date\n for (const doc of collected) {\n if (jsonMeta && jsonMeta.document_number === doc.documentNumber) {\n doc.jsonMeta = jsonMeta;\n doc.publicationDate = jsonMeta.publication_date;\n } else {\n // Infer date from file path (downloads/fr/YYYY/MM/doc.xml)\n const inferredDate = inferDateFromPath(xmlPath);\n if (!inferredDate) {\n console.warn(\n `Warning: No publication date for document ${doc.documentNumber || \"(unknown)\"} — ` +\n `no JSON sidecar and path ${xmlPath} has no YYYY/MM/ pattern. Output will be in 0000/00/.`,\n );\n }\n doc.publicationDate = inferredDate;\n }\n }\n\n return collected;\n}\n\n/**\n * Discover XML files in a directory or return the single file path.\n */\nasync function discoverXmlFiles(\n input: string,\n from?: string,\n to?: string,\n): Promise<string[]> {\n let inputStat;\n try {\n inputStat = await stat(input);\n } catch (err) {\n throw new Error(\n `Cannot access input path \"${input}\": ${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n if (inputStat.isFile()) {\n return [input];\n }\n\n if (!inputStat.isDirectory()) {\n throw new Error(`Input path \"${input}\" is not a file or directory`);\n }\n\n // Recursively find all .xml files\n const xmlFiles: string[] = [];\n await walkDir(input, xmlFiles);\n\n // Apply date range filter based on file path structure (YYYY/MM/)\n let filtered = xmlFiles;\n if (from || to) {\n filtered = xmlFiles.filter((f) => {\n const date = inferDateFromPath(f);\n if (!date) return true; // Can't filter if no date in path\n if (from && date < from) return false;\n if (to && date > to + \"-32\") return false; // Month-level comparison\n return true;\n });\n }\n\n return filtered.sort();\n}\n\n/** Recursively walk a directory collecting .xml files */\nasync function walkDir(dir: string, results: string[]): Promise<void> {\n const entries = await readdir(dir, { withFileTypes: true });\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n if (entry.isDirectory()) {\n await walkDir(fullPath, results);\n } else if (entry.isFile() && entry.name.endsWith(\".xml\")) {\n results.push(fullPath);\n }\n }\n}\n\n/**\n * Infer a date string from the file path. Used when no JSON sidecar is available.\n *\n * Supports two patterns:\n * - Per-document: \"downloads/fr/2026/03/doc.xml\" → \"2026-03-01\"\n * - Govinfo bulk: \"downloads/fr/bulk/2026/FR-2026-03-02.xml\" → \"2026-03-02\"\n */\n/** @internal Exported for testing. */\nexport function inferDateFromPath(filePath: string): string {\n // Govinfo bulk: FR-YYYY-MM-DD.xml\n const bulkMatch = /FR-(\\d{4})-(\\d{2})-(\\d{2})\\.xml$/.exec(filePath);\n if (bulkMatch) {\n return `${bulkMatch[1]}-${bulkMatch[2]}-${bulkMatch[3]}`;\n }\n\n // Per-document: YYYY/MM/doc.xml\n const perDocMatch = /(\\d{4})\\/(\\d{2})\\/[^/]+\\.xml$/.exec(filePath);\n if (perDocMatch) {\n return `${perDocMatch[1]}-${perDocMatch[2]}-01`;\n }\n\n return \"\";\n}\n","/**\n * Federal Register API downloader.\n *\n * Downloads FR documents (XML + JSON metadata) from the FederalRegister.gov API.\n * The API provides per-document endpoints, rich JSON metadata, and requires no\n * authentication. Results are paginated (max 200/page) with a 10,000 result cap\n * per query — the downloader auto-chunks by month for large date ranges.\n *\n * API base: https://www.federalregister.gov/api/v1/\n */\n\nimport { createWriteStream } from \"node:fs\";\nimport { mkdir, stat, writeFile as fsWriteFile } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\nimport { pipeline } from \"node:stream/promises\";\nimport { Readable } from \"node:stream\";\nimport { buildFrDownloadXmlPath, buildFrDownloadJsonPath } from \"./fr-path.js\";\nimport type { FrDocumentJsonMeta } from \"./fr-frontmatter.js\";\nimport type { FrDocumentType } from \"./fr-elements.js\";\n\n/** Base URL for the FederalRegister.gov API */\nconst FR_API_BASE = \"https://www.federalregister.gov/api/v1\";\n\n/** Maximum results per page (API max) */\nconst PER_PAGE = 200;\n\n/** Default number of concurrent XML downloads */\nconst DEFAULT_CONCURRENCY = 10;\n\n/** Maximum retry attempts for transient errors */\nconst MAX_RETRIES = 2;\n\n/** Base delay between retries (ms) */\nconst RETRY_BASE_DELAY_MS = 2000;\n\n/** Fields to request from the API documents endpoint */\nconst API_FIELDS = [\n \"document_number\",\n \"type\",\n \"title\",\n \"publication_date\",\n \"citation\",\n \"volume\",\n \"start_page\",\n \"end_page\",\n \"agencies\",\n \"cfr_references\",\n \"docket_ids\",\n \"regulation_id_numbers\",\n \"effective_on\",\n \"comments_close_on\",\n \"action\",\n \"abstract\",\n \"significant\",\n \"topics\",\n \"full_text_xml_url\",\n];\n\n// ── Public types ──\n\n/** Options for downloading FR documents */\nexport interface FrDownloadOptions {\n /** Download directory (e.g., \"./downloads/fr\") */\n output: string;\n /** Start date (YYYY-MM-DD, inclusive) */\n from: string;\n /** End date (YYYY-MM-DD, inclusive). Defaults to today. */\n to?: string | undefined;\n /** Document types to download. All types if omitted. */\n types?: FrDocumentType[] | undefined;\n /** Maximum number of documents to download (for testing) */\n limit?: number | undefined;\n /** Number of concurrent XML downloads (default 10) */\n concurrency?: number | undefined;\n /** Progress callback */\n onProgress?: ((progress: FrDownloadProgress) => void) | undefined;\n}\n\n/** Progress info for download callback */\nexport interface FrDownloadProgress {\n /** Documents downloaded so far */\n documentsDownloaded: number;\n /** Total documents found across all pages */\n totalDocuments: number;\n /** Current document number being downloaded */\n currentDocument: string;\n /** Current date chunk being processed (YYYY-MM) */\n currentChunk: string;\n}\n\n/** A successfully downloaded FR document */\nexport interface FrDownloadedFile {\n /** Absolute path to the XML file */\n xmlPath: string;\n /** Absolute path to the JSON metadata file */\n jsonPath: string;\n /** Document number */\n documentNumber: string;\n /** Publication date */\n publicationDate: string;\n /** Combined size in bytes (XML + JSON) */\n size: number;\n}\n\n/** A failed download */\nexport interface FrDownloadFailure {\n /** Document number */\n documentNumber: string;\n /** Error message */\n error: string;\n}\n\n/** Result of a download operation */\nexport interface FrDownloadResult {\n /** Number of documents downloaded */\n documentsDownloaded: number;\n /** Paths of downloaded files */\n files: FrDownloadedFile[];\n /** Total bytes downloaded */\n totalBytes: number;\n /** Date range covered */\n dateRange: { from: string; to: string };\n /** Documents without XML (pre-2000) */\n skipped: number;\n /** Documents that failed to download */\n failed: FrDownloadFailure[];\n}\n\n/** API listing response */\ninterface FrApiListResponse {\n count: number;\n total_pages: number;\n next_page_url?: string | null;\n /** Can be absent on weekends/holidays when count is 0 */\n results?: FrDocumentJsonMeta[];\n}\n\n// ── Public functions ──\n\n/**\n * Build the API documents listing URL for a date range.\n */\nexport function buildFrApiListUrl(\n from: string,\n to: string,\n page: number,\n types?: FrDocumentType[],\n): string {\n const params = new URLSearchParams();\n params.set(\"conditions[publication_date][gte]\", from);\n params.set(\"conditions[publication_date][lte]\", to);\n params.set(\"per_page\", String(PER_PAGE));\n params.set(\"page\", String(page));\n params.set(\"order\", \"oldest\");\n\n for (const field of API_FIELDS) {\n params.append(\"fields[]\", field);\n }\n\n if (types && types.length > 0) {\n for (const t of types) {\n params.append(\"conditions[type][]\", t);\n }\n }\n\n return `${FR_API_BASE}/documents.json?${params.toString()}`;\n}\n\n/**\n * Download FR documents for a date range.\n *\n * Automatically chunks large date ranges into month-sized windows to stay\n * under the API's 10,000 result cap per query. Within each chunk, document\n * XML files are downloaded concurrently (default 10 at a time).\n */\nexport async function downloadFrDocuments(options: FrDownloadOptions): Promise<FrDownloadResult> {\n const to = options.to ?? new Date().toISOString().slice(0, 10);\n const concurrency = options.concurrency ?? DEFAULT_CONCURRENCY;\n\n const files: FrDownloadedFile[] = [];\n const failed: FrDownloadFailure[] = [];\n let totalBytes = 0;\n let skipped = 0;\n let totalDocumentsFound = 0;\n\n // Break date range into month-sized chunks\n const chunks = buildMonthChunks(options.from, to);\n\n for (const chunk of chunks) {\n if (options.limit !== undefined && files.length >= options.limit) break;\n\n // Phase 1: Collect all document metadata for this chunk (pagination is fast, JSON only)\n const chunkDocs: FrDocumentJsonMeta[] = [];\n let page = 1;\n let hasMore = true;\n\n while (hasMore) {\n const listUrl = buildFrApiListUrl(chunk.from, chunk.to, page, options.types);\n const response = await fetchWithRetry(listUrl);\n const data = (await response.json()) as FrApiListResponse;\n\n if (typeof data.count !== \"number\") {\n throw new Error(\n `Unexpected API response for ${listUrl}: missing or invalid 'count' field. ` +\n `The FederalRegister.gov API may have changed its response format.`,\n );\n }\n\n // Each chunk has its own count — accumulate on the first page of each chunk\n if (page === 1) {\n totalDocumentsFound += data.count;\n }\n\n const results = data.results ?? [];\n\n for (const doc of results) {\n if (!doc.full_text_xml_url) {\n skipped++;\n continue;\n }\n chunkDocs.push(doc);\n }\n\n hasMore = page < (data.total_pages ?? 0);\n page++;\n }\n\n // Apply limit to this chunk\n const remaining = options.limit !== undefined ? options.limit - files.length : chunkDocs.length;\n const docsToDownload = chunkDocs.slice(0, remaining);\n const chunkLabel = chunk.from.slice(0, 7);\n\n // Phase 2: Download XML files concurrently\n await downloadPool(docsToDownload, concurrency, options.output, (doc, result, error) => {\n if (result) {\n files.push(result);\n totalBytes += result.size;\n } else if (error) {\n failed.push({ documentNumber: doc.document_number, error });\n }\n options.onProgress?.({\n documentsDownloaded: files.length,\n totalDocuments: totalDocumentsFound,\n currentDocument: doc.document_number,\n currentChunk: chunkLabel,\n });\n });\n }\n\n return {\n documentsDownloaded: files.length,\n files,\n totalBytes,\n dateRange: { from: options.from, to },\n skipped,\n failed,\n };\n}\n\n/**\n * Download a single FR document by document number.\n *\n * Fetches both the JSON metadata and XML full text.\n */\nexport async function downloadSingleFrDocument(\n documentNumber: string,\n output: string,\n): Promise<FrDownloadedFile> {\n // Fetch JSON metadata first to get publication date and XML URL\n const metaUrl = `${FR_API_BASE}/documents/${documentNumber}.json?${new URLSearchParams(API_FIELDS.map((f) => [\"fields[]\", f])).toString()}`;\n const metaResponse = await fetchWithRetry(metaUrl);\n const doc = (await metaResponse.json()) as FrDocumentJsonMeta;\n\n if (!doc.document_number || !doc.publication_date) {\n throw new Error(\n `Invalid API response for document ${documentNumber}: missing document_number or publication_date`,\n );\n }\n\n return downloadSingleDocument(doc, output);\n}\n\n// ── Private helpers ──\n\n/**\n * Download multiple documents concurrently using a worker pool.\n * Workers pull from a shared index, so concurrency is bounded without batching.\n */\nasync function downloadPool(\n docs: FrDocumentJsonMeta[],\n concurrency: number,\n outputDir: string,\n onComplete: (doc: FrDocumentJsonMeta, result: FrDownloadedFile | null, error: string | null) => void,\n): Promise<void> {\n let nextIndex = 0;\n\n async function worker(): Promise<void> {\n while (nextIndex < docs.length) {\n const i = nextIndex++;\n const doc = docs[i];\n if (!doc) break;\n try {\n const result = await downloadSingleDocument(doc, outputDir);\n onComplete(doc, result, null);\n } catch (err) {\n onComplete(doc, null, err instanceof Error ? err.message : String(err));\n }\n }\n }\n\n const workerCount = Math.min(concurrency, docs.length);\n await Promise.all(Array.from({ length: workerCount }, () => worker()));\n}\n\nasync function downloadSingleDocument(\n doc: FrDocumentJsonMeta,\n outputDir: string,\n): Promise<FrDownloadedFile> {\n if (!doc.document_number || !doc.publication_date) {\n throw new Error(\n `Invalid document in API response: missing document_number or publication_date`,\n );\n }\n if (!doc.full_text_xml_url) {\n throw new Error(\n `Document ${doc.document_number} has no full_text_xml_url — cannot download XML`,\n );\n }\n\n const xmlPath = buildFrDownloadXmlPath(doc.document_number, doc.publication_date, outputDir);\n const jsonPath = buildFrDownloadJsonPath(doc.document_number, doc.publication_date, outputDir);\n\n // Ensure directory exists\n await mkdir(dirname(xmlPath), { recursive: true });\n\n // Write JSON metadata\n const jsonContent = JSON.stringify(doc, null, 2);\n await fsWriteFile(jsonPath, jsonContent, \"utf-8\");\n\n // Fetch and write XML\n const xmlResponse = await fetchWithRetry(doc.full_text_xml_url);\n if (!xmlResponse.body) {\n throw new Error(`No response body for ${doc.document_number} XML`);\n }\n\n const dest = createWriteStream(xmlPath);\n try {\n await pipeline(Readable.fromWeb(xmlResponse.body as never), dest);\n } catch (err) {\n throw new Error(\n `Failed to write XML for document ${doc.document_number} from ${doc.full_text_xml_url}: ` +\n `${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n // Get file sizes\n const xmlStat = await stat(xmlPath);\n const jsonSize = Buffer.byteLength(jsonContent, \"utf-8\");\n\n return {\n xmlPath,\n jsonPath,\n documentNumber: doc.document_number,\n publicationDate: doc.publication_date,\n size: Number(xmlStat.size) + jsonSize,\n };\n}\n\n/**\n * Break a date range into month-sized chunks.\n * Each chunk covers one calendar month (or partial month at boundaries).\n */\nfunction buildMonthChunks(from: string, to: string): Array<{ from: string; to: string }> {\n const chunks: Array<{ from: string; to: string }> = [];\n\n let current = new Date(from + \"T00:00:00Z\");\n const end = new Date(to + \"T00:00:00Z\");\n\n while (current <= end) {\n const chunkStart = current.toISOString().slice(0, 10);\n\n // End of this month\n const monthEnd = new Date(\n Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 0),\n );\n const chunkEnd = monthEnd <= end ? monthEnd.toISOString().slice(0, 10) : to;\n\n chunks.push({ from: chunkStart, to: chunkEnd });\n\n // Move to first day of next month\n current = new Date(\n Date.UTC(current.getUTCFullYear(), current.getUTCMonth() + 1, 1),\n );\n }\n\n return chunks;\n}\n\n/** Fetch with retry on transient HTTP and network errors */\nasync function fetchWithRetry(url: string, attempt = 0): Promise<Response> {\n let response: Response;\n try {\n response = await fetch(url);\n } catch (err) {\n // Network-level error (DNS, TLS, connection reset) — retry\n if (attempt < MAX_RETRIES) {\n const delay = RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n console.warn(\n `Network error for ${url}: ${err instanceof Error ? err.message : String(err)}. ` +\n `Retrying in ${delay}ms (attempt ${attempt + 1}/${MAX_RETRIES})...`,\n );\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n throw new Error(\n `Network error after ${MAX_RETRIES + 1} attempts for ${url}: ${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n if (response.ok) return response;\n\n // Retry on transient HTTP errors\n if ((response.status === 429 || response.status === 503 || response.status === 504) && attempt < MAX_RETRIES) {\n const retryAfter = response.headers.get(\"Retry-After\");\n const parsedRetry = retryAfter ? parseInt(retryAfter, 10) : NaN;\n const delay = !isNaN(parsedRetry) && parsedRetry > 0\n ? parsedRetry * 1000\n : RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n console.warn(\n `HTTP ${response.status} for ${url}. Retrying in ${delay}ms (attempt ${attempt + 1}/${MAX_RETRIES})...`,\n );\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n\n throw new Error(`HTTP ${response.status}: ${response.statusText} for ${url}`);\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n","/**\n * Federal Register govinfo bulk downloader.\n *\n * Downloads complete daily-issue XML files from govinfo.gov. Each file contains\n * all FR documents published on a single day (~150 documents, ~2.4 MB average).\n * This is dramatically faster than the per-document API for historical backfill.\n *\n * URL pattern: https://www.govinfo.gov/content/pkg/FR-{YYYY-MM-DD}/xml/FR-{YYYY-MM-DD}.xml\n *\n * The existing FrASTBuilder handles daily-issue XML natively: FEDREG root is a\n * passthrough, section containers (RULES, NOTICES, etc.) are passthroughs, and\n * individual document elements emit via onEmit. No splitter needed.\n */\n\nimport { createWriteStream } from \"node:fs\";\nimport { mkdir, stat } from \"node:fs/promises\";\nimport { dirname, join } from \"node:path\";\nimport { pipeline } from \"node:stream/promises\";\nimport { Readable } from \"node:stream\";\n\n/** Base URL for govinfo FR bulk data */\nconst GOVINFO_BASE = \"https://www.govinfo.gov/content/pkg\";\n\n/** Default number of concurrent downloads */\nconst DEFAULT_CONCURRENCY = 10;\n\n/** Maximum retry attempts for transient errors */\nconst MAX_RETRIES = 2;\n\n/** Base delay between retries (ms) */\nconst RETRY_BASE_DELAY_MS = 2000;\n\n// ── Public types ──\n\n/** Options for downloading FR bulk XML from govinfo */\nexport interface FrGovinfoBulkOptions {\n /** Download directory (e.g., \"./downloads/fr\") */\n output: string;\n /** Start date (YYYY-MM-DD, inclusive) */\n from: string;\n /** End date (YYYY-MM-DD, inclusive). Defaults to today. */\n to?: string | undefined;\n /** Number of concurrent downloads (default 10) */\n concurrency?: number | undefined;\n /** Progress callback */\n onProgress?: ((progress: FrGovinfoProgress) => void) | undefined;\n}\n\n/** Progress info for govinfo download callback */\nexport interface FrGovinfoProgress {\n /** Files downloaded so far */\n downloaded: number;\n /** Total publishing days in date range */\n totalDays: number;\n /** Skipped days (weekends/holidays — 404) */\n skipped: number;\n /** Failed downloads */\n failed: number;\n /** Current date being downloaded */\n currentDate: string;\n}\n\n/** A successfully downloaded bulk file */\nexport interface FrGovinfoDownloadedFile {\n /** Absolute path to the downloaded XML file */\n path: string;\n /** Publication date (YYYY-MM-DD) */\n date: string;\n /** File size in bytes */\n size: number;\n}\n\n/** Result of a govinfo bulk download */\nexport interface FrGovinfoResult {\n /** Number of daily files downloaded */\n filesDownloaded: number;\n /** Downloaded files */\n files: FrGovinfoDownloadedFile[];\n /** Total bytes downloaded */\n totalBytes: number;\n /** Date range covered */\n dateRange: { from: string; to: string };\n /** Days skipped (no issue published — weekends/holidays) */\n skipped: number;\n /** Days that failed to download */\n failed: number;\n}\n\n// ── Public functions ──\n\n/**\n * Build the govinfo download URL for a single day's FR issue.\n */\nexport function buildGovinfoFrUrl(date: string): string {\n return `${GOVINFO_BASE}/FR-${date}/xml/FR-${date}.xml`;\n}\n\n/**\n * Build the local file path for a downloaded daily-issue XML.\n * Stored as: {output}/bulk/{YYYY}/FR-{YYYY-MM-DD}.xml\n */\nexport function buildGovinfoBulkPath(date: string, outputDir: string): string {\n const year = date.slice(0, 4);\n return join(outputDir, \"bulk\", year, `FR-${date}.xml`);\n}\n\n/**\n * Download FR daily-issue XML files from govinfo for a date range.\n * Skips weekends/holidays (404 responses) and retries transient errors.\n */\nexport async function downloadFrBulk(options: FrGovinfoBulkOptions): Promise<FrGovinfoResult> {\n const to = options.to ?? new Date().toISOString().slice(0, 10);\n const concurrency = options.concurrency ?? DEFAULT_CONCURRENCY;\n\n // Generate all dates in range\n const dates = generateDateRange(options.from, to);\n\n const files: FrGovinfoDownloadedFile[] = [];\n let totalBytes = 0;\n let skipped = 0;\n let failed = 0;\n\n // Download concurrently using a worker pool\n let nextIndex = 0;\n\n async function worker(): Promise<void> {\n while (nextIndex < dates.length) {\n const i = nextIndex++;\n const date = dates[i];\n if (!date) break;\n\n options.onProgress?.({\n downloaded: files.length,\n totalDays: dates.length,\n skipped,\n failed,\n currentDate: date,\n });\n\n const url = buildGovinfoFrUrl(date);\n const filePath = buildGovinfoBulkPath(date, options.output);\n\n try {\n const result = await downloadSingleDay(url, filePath, date);\n if (result) {\n files.push(result);\n totalBytes += result.size;\n } else {\n // null means 404 — no issue published on this date\n skipped++;\n }\n } catch (err) {\n console.warn(`Warning: Failed to download ${date}: ${err instanceof Error ? err.message : String(err)}`);\n failed++;\n }\n }\n }\n\n const workerCount = Math.min(concurrency, dates.length);\n await Promise.all(Array.from({ length: workerCount }, () => worker()));\n\n // Final progress update\n options.onProgress?.({\n downloaded: files.length,\n totalDays: dates.length,\n skipped,\n failed,\n currentDate: \"done\",\n });\n\n return {\n filesDownloaded: files.length,\n files,\n totalBytes,\n dateRange: { from: options.from, to },\n skipped,\n failed,\n };\n}\n\n// ── Private helpers ──\n\n/**\n * Download a single day's FR issue XML. Returns null if 404 (no issue).\n */\nasync function downloadSingleDay(\n url: string,\n filePath: string,\n date: string,\n): Promise<FrGovinfoDownloadedFile | null> {\n const response = await fetchWithRetry(url);\n\n if (response.status === 404) {\n return null; // No issue published on this date (weekend/holiday)\n }\n\n if (!response.ok) {\n throw new Error(`HTTP ${response.status} for ${url}`);\n }\n\n if (!response.body) {\n throw new Error(`No response body for ${url}`);\n }\n\n await mkdir(dirname(filePath), { recursive: true });\n\n const dest = createWriteStream(filePath);\n await pipeline(Readable.fromWeb(response.body as never), dest);\n\n const fileStat = await stat(filePath);\n\n return {\n path: filePath,\n date,\n size: Number(fileStat.size),\n };\n}\n\n/**\n * Generate all dates (YYYY-MM-DD) in a range, inclusive.\n */\nfunction generateDateRange(from: string, to: string): string[] {\n const dates: string[] = [];\n const current = new Date(from + \"T12:00:00Z\"); // Noon UTC to avoid DST issues\n const end = new Date(to + \"T12:00:00Z\");\n\n while (current <= end) {\n dates.push(current.toISOString().slice(0, 10));\n current.setUTCDate(current.getUTCDate() + 1);\n }\n\n return dates;\n}\n\n/** Fetch with retry on transient HTTP and network errors */\nasync function fetchWithRetry(url: string, attempt = 0): Promise<Response> {\n let response: Response;\n try {\n response = await fetch(url);\n } catch (err) {\n if (attempt < MAX_RETRIES) {\n const delay = RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n throw new Error(\n `Network error after ${MAX_RETRIES + 1} attempts for ${url}: ${err instanceof Error ? err.message : String(err)}`,\n { cause: err },\n );\n }\n\n if (response.ok || response.status === 404) return response;\n\n if (\n (response.status === 429 || response.status === 503 || response.status === 504) &&\n attempt < MAX_RETRIES\n ) {\n const retryAfter = response.headers.get(\"Retry-After\");\n const parsedRetry = retryAfter ? parseInt(retryAfter, 10) : NaN;\n const delay =\n !isNaN(parsedRetry) && parsedRetry > 0\n ? parsedRetry * 1000\n : RETRY_BASE_DELAY_MS * Math.pow(2, attempt);\n await sleep(delay);\n return fetchWithRetry(url, attempt + 1);\n }\n\n throw new Error(`HTTP ${response.status}: ${response.statusText} for ${url}`);\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n"],"mappings":";AAiBO,IAAM,wBAAwB,CAAC,QAAQ,WAAW,UAAU,UAAU;AAMtE,IAAM,uBAAuB,IAAI,IAAY,qBAAqB;AAGlE,IAAM,wBAAwB,oBAAI,IAAI;AAAA,EAC3C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAGM,IAAM,uBAAyD;AAAA,EACpE,MAAM;AAAA,EACN,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,UAAU;AACZ;AAKO,IAAM,uBAAuB,oBAAI,IAAI;AAAA,EAC1C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,4BAA4B,oBAAI,IAAI;AAAA,EAC/C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,sBAAsB,oBAAI,IAAI;AAAA,EACzC;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,qBAAqB;AAM3B,IAAM,wBAA0D;AAAA,EACrE,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AAAA,EACL,KAAK;AACP;AAKO,IAAM,qBAAqB,oBAAI,IAAI;AAAA,EACxC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAMM,IAAM,kBAAwD;AAAA,EACnE,MAAM;AAAA,EACN,MAAM;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,MAAM;AAAA;AAAA,EACN,QAAQ;AAAA;AACV;AAKO,IAAM,sBAAsB,oBAAI,IAAI;AAAA,EACzC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,oBAAoB;AAK1B,IAAM,wBAAwB,oBAAI,IAAI;AAAA,EAC3C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,2BAA2B,oBAAI,IAAI;AAAA,EAC9C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,gCAAgC,oBAAI,IAAI;AAAA,EACnD;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB,oBAAI,IAAI;AAAA,EACtC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAGM,IAAM,mBAAmB;AAKzB,IAAM,oBAAoB,oBAAI,IAAI;AAAA,EACvC;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,oBAAoB,oBAAI,IAAI;AAAA,EACvC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,qBAAqB,oBAAI,IAAI;AAAA,EACxC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB,oBAAI,IAAI;AAAA,EACtC;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,0BAA0B,oBAAI,IAAI;AAAA,EAC7C;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF,CAAC;AAKM,IAAM,mBAAmB;AAGzB,IAAM,oBAAoB;;;ACrH1B,IAAM,eAAN,MAAmB;AAAA,EACP;AAAA,EACA,QAAsB,CAAC;AAAA;AAAA,EAEhC,wBAAwB;AAAA;AAAA,EAExB,iBAAoC;AAAA,IAC1C,cAAc;AAAA,IACd,wBAAwB;AAAA,EAC1B;AAAA;AAAA,EAEiB,gBAAqC,CAAC;AAAA,EAEvD,YAAY,SAA8B;AACxC,SAAK,UAAU;AAAA,EACjB;AAAA;AAAA,EAGA,mBAAiD;AAC/C,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,cAAc,MAAc,OAAyB;AAEnD,QAAI,KAAK,wBAAwB,GAAG;AAClC,WAAK;AACL;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,GAAG;AAChC,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,QAAI,wBAAwB,IAAI,IAAI,GAAG;AACrC;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,yBAAyB,IAAI,IAAI,GAAG;AACtC;AAAA,IACF;AAGA,QAAI,8BAA8B,IAAI,IAAI,GAAG;AAE3C,UAAI,SAAS,UAAU,SAAS,SAAS;AACvC,aAAK,YAAY,IAAI;AACrB;AAAA,MACF;AAEA,WAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AACrE;AAAA,IACF;AAGA,QAAI,0BAA0B,IAAI,IAAI,GAAG;AACvC,WAAK,MAAM,KAAK,EAAE,MAAM,gBAAgB,aAAa,MAAM,YAAY,GAAG,CAAC;AAC3E;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,MAAM,KAAK,EAAE,MAAM,mBAAmB,aAAa,MAAM,YAAY,GAAG,CAAC;AAC9E;AAAA,IACF;AAGA,QAAI,SAAS,oBAAoB;AAC/B,WAAK,YAAY,MAAM,KAAK;AAC5B;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,YAAY,IAAI;AACrB;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,GAAG;AAChC,WAAK,WAAW,MAAM,KAAK;AAC3B;AAAA,IACF;AAIA,QAAI,SAAS,kBAAkB;AAC7B,YAAM,cAAc,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AACpD,UAAI,aAAa,SAAS,aAAa,YAAY,MAAM,SAAS,WAAW;AAC3E,cAAM,cAAc,YAAY;AAEhC,iBAAS,IAAI,YAAY,SAAS,SAAS,GAAG,KAAK,GAAG,KAAK;AACzD,gBAAM,QAAQ,YAAY,SAAS,CAAC;AACpC,cAAI,OAAO,SAAS,YAAa,MAAqB,eAAe,OAAO;AAC1E,YAAC,MAAqB,aAAa;AACnC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,YAAY,MAAM,KAAK;AAC5B;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC,WAAK,cAAc,IAAI;AACvB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,iBAAiB,MAAM,KAAK;AACjC;AAAA,IACF;AAGA,QAAI,SAAS,kBAAkB;AAC7B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,MAAM,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,wBAAwB;AAC7B;AAAA,IACF;AAGA,SAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACvE;AAAA;AAAA,EAGA,eAAe,MAAoB;AAEjC,QAAI,KAAK,wBAAwB,GAAG;AAClC,WAAK;AACL;AAAA,IACF;AAGA,QAAI,wBAAwB,IAAI,IAAI,KAAK,sBAAsB,IAAI,IAAI,GAAG;AACxE;AAAA,IACF;AAGA,QAAI,yBAAyB,IAAI,IAAI,GAAG;AACtC;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,cAAc,IAAI;AACvB;AAAA,IACF;AAGA,QAAI,0BAA0B,IAAI,IAAI,GAAG;AACvC,WAAK,kBAAkB,IAAI;AAC3B;AAAA,IACF;AAGA,QAAI,qBAAqB,IAAI,IAAI,GAAG;AAClC,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,SAAS,oBAAoB;AAC/B,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,SAAS,UAAU,SAAS,SAAS;AACvC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,mBAAmB,IAAI,IAAI,KAAK,SAAS,kBAAkB;AAC7D,WAAK,YAAY,IAAI;AACrB;AAAA,IACF;AAGA,QAAI,iBAAiB,IAAI,IAAI,GAAG;AAC9B,WAAK,UAAU,IAAI;AACnB;AAAA,IACF;AAGA,QAAI,oBAAoB,IAAI,IAAI,GAAG;AACjC,WAAK,aAAa,IAAI;AACtB;AAAA,IACF;AAGA,QAAI,SAAS,mBAAmB;AAC9B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,sBAAsB,IAAI,IAAI,GAAG;AACnC,WAAK,eAAe,IAAI;AACxB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,SAAS,IAAI;AAClB;AAAA,IACF;AAGA,QAAI,kBAAkB,IAAI,IAAI,GAAG;AAC/B,WAAK,kBAAkB,IAAI;AAC3B;AAAA,IACF;AAGA,QAAI,SAAS,kBAAkB;AAC7B,WAAK,WAAW;AAChB;AAAA,IACF;AAGA,QAAI,KAAK,MAAM,SAAS,KAAK,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC,GAAG,gBAAgB,MAAM;AACpF,WAAK,MAAM,IAAI;AAAA,IACjB;AAAA,EACF;AAAA;AAAA,EAGA,OAAO,MAAoB;AACzB,QAAI,KAAK,wBAAwB,EAAG;AAEpC,UAAM,QAAQ,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AAC9C,QAAI,CAAC,MAAO;AAGZ,QACE,MAAM,SAAS,aACf,MAAM,SAAS,kBACf,MAAM,SAAS,oBACf,MAAM,SAAS,eACf,MAAM,SAAS,iBACf,MAAM,SAAS,SACf;AACA,YAAM,cAAc;AACpB;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,aAAa,MAAM,MAAM,SAAS,WAAW;AAC9D,YAAM,cAAc,MAAM;AAE1B,YAAM,aAAa,KAAK,QAAQ,QAAQ,GAAG;AAC3C,UAAI,cAAc,eAAe,KAAK;AACpC,oBAAY,SAAS,KAAK;AAAA,UACxB,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,MAAM;AAAA,QACR,CAAC;AAAA,MACH;AACA;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,YAAY,MAAM,MAAM,SAAS,UAAU;AAC5D,YAAM,aAAa,MAAM;AACzB,YAAM,aAAa,KAAK,QAAQ,QAAQ,GAAG;AAC3C,UAAI,WAAW,UAAU;AACvB,YAAI,cAAc,eAAe,KAAK;AACpC,qBAAW,SAAS,KAAK;AAAA,YACvB,MAAM;AAAA,YACN,YAAY;AAAA,YACZ,MAAM;AAAA,UACR,CAAC;AAAA,QACH;AAAA,MACF,OAAO;AACL,mBAAW,QAAQ,WAAW,QAAQ,MAAM;AAAA,MAC9C;AACA;AAAA,IACF;AAGA,QAAI,MAAM,SAAS,UAAU,MAAM,MAAM,SAAS,QAAQ;AACxD,YAAM,cAAc;AACpB;AAAA,IACF;AAAA,EAGF;AAAA;AAAA,EAIQ,aAAa,aAA2B;AAC9C,SAAK,iBAAiB;AAAA,MACpB,cAAc;AAAA,MACd,wBAAwB,qBAAqB,WAAW,KAAK,YAAY,YAAY;AAAA,IACvF;AAEA,UAAM,OAAkB;AAAA,MACtB,MAAM;AAAA,MACN,WAAW;AAAA,MACX,UAAU,CAAC;AAAA,MACX,eAAe;AAAA,IACjB;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,YAAY,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACzE;AAAA,EAEQ,cAAc,aAA2B;AAC/C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,cAAc,CAAC,MAAM,KAAM;AAExD,UAAM,YAAY,MAAM;AAGxB,QAAI,KAAK,eAAe,SAAS;AAC/B,gBAAU,UAAU,KAAK,eAAe;AAAA,IAC1C;AAGA,QAAI,KAAK,eAAe,gBAAgB;AACtC,gBAAU,aAAa,UAAU,KAAK,eAAe,cAAc;AACnE,gBAAU,WAAW,KAAK,eAAe;AAAA,IAC3C;AAGA,UAAM,YAA4B,CAAC;AACnC,eAAW,KAAK,KAAK,OAAO;AAC1B,UAAI,EAAE,SAAS,cAAc,EAAE,MAAM,SAAS,SAAS;AACrD,cAAM,KAAK,EAAE;AACb,kBAAU,KAAK;AAAA,UACb,WAAW,GAAG;AAAA,UACd,UAAU,GAAG;AAAA,UACb,SAAS,GAAG;AAAA,UACZ,YAAY,GAAG;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,IACF;AAEA,UAAM,UAAuB;AAAA,MAC3B;AAAA,MACA,cAAc;AAAA,QACZ,SAAS,KAAK,eAAe;AAAA,QAC7B,QAAQ,KAAK,eAAe;AAAA,MAC9B;AAAA,IACF;AAGA,SAAK,cAAc,KAAK,EAAE,GAAG,KAAK,eAAe,CAAC;AAElD,SAAK,QAAQ,OAAO,WAAW,OAAO;AAAA,EACxC;AAAA;AAAA,EAIQ,kBAAkB,aAA2B;AACnD,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,eAAgB;AAE7C,UAAM,OAAO,MAAM,WAAW,KAAK;AACnC,QAAI,CAAC,KAAM;AAEX,YAAQ,aAAa;AAAA,MACnB,KAAK;AACH,aAAK,eAAe,SAAS;AAC7B;AAAA,MACF,KAAK;AACH,aAAK,eAAe,YAAY;AAChC;AAAA,MACF,KAAK;AACH,aAAK,eAAe,cAAc;AAClC;AAAA,MACF,KAAK;AACH,aAAK,eAAe,UAAU;AAC9B;AAAA,MACF,KAAK;AACH,aAAK,eAAe,MAAM,KAAK,QAAQ,YAAY,EAAE,EAAE,KAAK;AAC5D;AAAA,MACF,KAAK;AAEH;AAAA,IACJ;AAAA,EACF;AAAA;AAAA,EAIQ,YAAY,cAAsB,OAAyB;AACjE,UAAM,SAAS,MAAM,QAAQ,KAAK;AAClC,UAAM,QAAQ,sBAAsB,MAAM,KAAK;AAE/C,SAAK,MAAM,KAAK;AAAA,MACd,MAAM;AAAA,MACN,aAAa;AAAA,MACb,YAAY;AAAA,MACZ,aAAa;AAAA,IACf,CAAC;AAAA,EACH;AAAA,EAEQ,aAAa,aAA2B;AAC9C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,UAAW;AAExC,UAAM,cAAc,MAAM,WAAW,KAAK;AAC1C,QAAI,CAAC,YAAa;AAIlB,UAAM,cAAc,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AAEpD,QAAI,aAAa,SAAS,mBAAmB;AAE3C,YAAMA,eAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ,MAAM;AAAA,UACR;AAAA,QACF;AAAA,MACF;AACA,WAAK,cAAcA,YAAW;AAC9B;AAAA,IACF;AAIA,UAAM,cAA2B;AAAA,MAC/B,MAAM;AAAA,MACN,SAAS;AAAA,MACT,UAAU;AAAA,QACR;AAAA,UACE,MAAM;AAAA,UACN,YAAY;AAAA,UACZ,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,SAAK,cAAc,WAAW;AAAA,EAChC;AAAA;AAAA,EAIQ,YAAY,aAA2B;AAC7C,UAAM,OAAoB;AAAA,MACxB,MAAM;AAAA,MACN,SAAS;AAAA,MACT,UAAU,CAAC;AAAA,IACb;AACA,SAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACxE;AAAA,EAEQ,aAAa,aAA2B;AAC9C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,cAAc,MAAM;AAG1B,QAAI,YAAY,SAAS,WAAW,EAAG;AAGvC,UAAM,SAAS,KAAK,mBAAmB,KAAK,KAAK,eAAe;AAChE,QAAI,QAAQ,MAAM;AAChB,UAAI,OAAO,KAAK,SAAS,SAAS;AAChC,QAAC,OAAO,KAAmB,SAAS,KAAK,WAAW;AAAA,MACtD,WAAW,OAAO,KAAK,SAAS,QAAQ;AACtC,QAAC,OAAO,KAAkB,SAAS,KAAK,WAAW;AAAA,MACrD;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,WAAW,aAAqB,OAAyB;AAC/D,QAAI,aAAyB;AAE7B,QAAI,gBAAgB,KAAK;AACvB,mBAAa;AAAA,IACf,WAAW,gBAAgB,KAAK;AAC9B,mBAAa;AAAA,IACf,WAAW,gBAAgB,MAAM;AAG/B,YAAM,iBAAiB,KAAK,UAAU,MAAM,MAAM;AAClD,mBAAa,iBAAiB,gBAAgB;AAAA,IAChD,WAAW,gBAAgB,MAAM;AAC/B,mBAAa;AAAA,IACf,WAAW,gBAAgB,KAAK;AAC9B,YAAM,SAAS,MAAM,GAAG,KAAK;AAC7B,mBAAa,gBAAgB,MAAM,KAAK;AAAA,IAC1C;AAEA,UAAM,OAAmB;AAAA,MACvB,MAAM;AAAA,MACN;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,UAAU,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACvE;AAAA,EAEQ,YAAY,aAA2B;AAC7C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,aAAa,MAAM;AAGzB,QAAI,WAAW,eAAe,iBAAiB,MAAM,YAAY;AAC/D,iBAAW,OAAO,MAAM,WAAW,KAAK;AAAA,IAC1C;AAGA,UAAM,cAAc,KAAK,MAAM,KAAK,MAAM,SAAS,CAAC;AACpD,QAAI,CAAC,YAAa;AAElB,QAAI,YAAY,SAAS,aAAa,YAAY,MAAM,SAAS,WAAW;AAC1E,MAAC,YAAY,KAAqB,SAAS,KAAK,UAAU;AAAA,IAC5D,WAAW,YAAY,SAAS,YAAY,YAAY,MAAM,SAAS,UAAU;AAC/E,YAAM,eAAe,YAAY;AACjC,UAAI,aAAa,UAAU;AACzB,qBAAa,SAAS,KAAK,UAAU;AAAA,MACvC;AAAA,IACF,WAAW,YAAY,SAAS,aAAa,YAAY,SAAS,gBAAgB;AAEhF,UAAI,WAAW,MAAM;AACnB,oBAAY,cAAc,WAAW;AAAA,MACvC,WAAW,WAAW,UAAU;AAC9B,mBAAW,SAAS,WAAW,UAAU;AACvC,cAAI,MAAM,KAAM,aAAY,cAAc,MAAM;AAAA,QAClD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,SAAS,aAA2B;AAC1C,UAAM,cAAsC;AAAA,MAC1C,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,SAAS;AAAA,IACX;AAEA,UAAM,WAAW,YAAY,WAAW,KAAK,YAAY,YAAY;AACrE,UAAM,OAAiB;AAAA,MACrB,MAAM;AAAA,MACN;AAAA,MACA,UAAU,CAAC;AAAA,IACb;AAEA,SAAK,MAAM,KAAK,EAAE,MAAM,QAAQ,aAAa,MAAM,YAAY,GAAG,CAAC;AAAA,EACrE;AAAA,EAEQ,UAAU,aAA2B;AAC3C,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,CAAC,MAAM,KAAM;AAE3B,UAAM,WAAW,MAAM;AAGvB,QAAI,MAAM,WAAW,KAAK,KAAK,SAAS,SAAS,WAAW,GAAG;AAC7D,YAAM,cAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ,MAAM,MAAM,WAAW,KAAK;AAAA,UAC9B;AAAA,QACF;AAAA,MACF;AACA,eAAS,SAAS,KAAK,WAAW;AAAA,IACpC;AAGA,UAAM,YAAY,KAAK,mBAAmB;AAC1C,QAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,MAAC,UAAU,KAAmB,SAAS,KAAK,QAAQ;AAAA,IACtD;AAAA,EACF;AAAA;AAAA,EAIQ,YAAY,aAAqB,OAAyB;AAChE,QAAI,gBAAgB,WAAW;AAE7B,YAAM,QAAQ,MAAM,OAAO,KAAK;AAChC,YAAM,OAAO,MAAM,MAAM,KAAK;AAC9B,YAAM,QAAQ,SAAS,OAAO,GAAG,KAAK,aAAa,IAAI,KAAK;AAG5D,UAAI,OAAO;AACT,cAAM,YAAyB;AAAA,UAC7B,MAAM;AAAA,UACN,SAAS;AAAA,UACT,UAAU;AAAA,YACR;AAAA,cACE,MAAM;AAAA,cACN,YAAY;AAAA,cACZ,MAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AACA,aAAK,cAAc,SAAS;AAAA,MAC9B;AAEA,WAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,YAAY,GAAG,CAAC;AAChE;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,YAAY,WAAW;AAC5B;AAAA,IACF;AAEA,QAAI,gBAAgB,WAAW;AAE7B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,YAAY,GAAG,CAAC;AAC9D;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,YAAY,WAAW;AAC5B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,MAAM,KAAK,EAAE,MAAM,SAAS,aAAa,YAAY,GAAG,CAAC;AAC9D;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,aAAa,aAA2B;AAC9C,QAAI,gBAAgB,WAAW;AAC7B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,YAAY,gBAAgB,UAAU;AACxD,WAAK,aAAa,WAAW;AAC7B;AAAA,IACF;AAEA,QAAI,gBAAgB,aAAa,gBAAgB,QAAQ;AACvD,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAC1B,WAAK,UAAU,WAAW;AAC1B;AAAA,IACF;AAAA,EACF;AAAA;AAAA,EAIQ,cAAc,aAA2B;AAC/C,QAAI,gBAAgB,OAAO;AAEzB,YAAM,OAAiB;AAAA,QACrB,MAAM;AAAA,QACN,UAAU;AAAA,QACV,UAAU,CAAC;AAAA,MACb;AACA,WAAK,MAAM,KAAK,EAAE,MAAM,aAAa,aAAa,MAAM,YAAY,GAAG,CAAC;AACxE;AAAA,IACF;AAGA,SAAK,MAAM,KAAK,EAAE,MAAM,kBAAkB,aAAa,YAAY,GAAG,CAAC;AAAA,EACzE;AAAA,EAEQ,eAAe,aAA2B;AAChD,QAAI,gBAAgB,OAAO;AACzB,YAAMC,SAAQ,KAAK,SAAS,WAAW;AACvC,UAAI,CAACA,UAAS,CAACA,OAAM,KAAM;AAE3B,YAAM,UAAUA,OAAM;AAGtB,YAAM,YAAY,KAAK,mBAAmB;AAC1C,UAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,QAAC,UAAU,KAAmB,SAAS,KAAK,OAAO;AAAA,MACrD;AACA;AAAA,IACF;AAGA,UAAM,QAAQ,KAAK,SAAS,WAAW;AACvC,QAAI,CAAC,SAAS,MAAM,SAAS,iBAAkB;AAE/C,UAAM,OAAO,MAAM,WAAW,KAAK;AACnC,QAAI,CAAC,KAAM;AAGX,UAAM,WAAW,KAAK,UAAU,WAAW;AAC3C,QAAI,UAAU,QAAQ,SAAS,KAAK,SAAS,QAAQ;AACnD,YAAM,cAA2B;AAAA,QAC/B,MAAM;AAAA,QACN,SAAS;AAAA,QACT,UAAU;AAAA,UACR;AAAA,YACE,MAAM;AAAA,YACN,YAAY;AAAA,YACZ;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,MAAC,SAAS,KAAkB,SAAS,KAAK,WAAW;AAAA,IACvD;AAAA,EACF;AAAA;AAAA,EAIQ,iBAAiB,aAAqB,QAA0B;AACtE,QAAI,gBAAgB,YAAY;AAC9B,WAAK,MAAM,KAAK;AAAA,QACd,MAAM;AAAA,QACN;AAAA,QACA,YAAY;AAAA,QACZ,SAAS,CAAC;AAAA,QACV,MAAM,CAAC;AAAA,QACP,YAAY,CAAC;AAAA,MACf,CAAC;AACD;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,MAAM,KAAK,EAAE,MAAM,WAAW,aAAa,YAAY,GAAG,CAAC;AAChE;AAAA,IACF;AAEA,QAAI,gBAAgB,SAAS;AAE3B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAE1B,WAAK,MAAM,KAAK,EAAE,MAAM,eAAe,aAAa,YAAY,GAAG,CAAC;AACpE;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,YAAM,aAAa,KAAK,eAAe;AACvC,UAAI,YAAY;AACd,mBAAW,aAAa,CAAC;AAAA,MAC3B;AACA,WAAK,MAAM,KAAK,EAAE,MAAM,YAAY,aAAa,YAAY,GAAG,CAAC;AACjE;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AAEzB,WAAK,MAAM,KAAK,EAAE,MAAM,aAAa,aAAa,YAAY,GAAG,CAAC;AAClE;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,kBAAkB,aAA2B;AACnD,QAAI,gBAAgB,YAAY;AAC9B,WAAK,cAAc;AACnB;AAAA,IACF;AAEA,QAAI,gBAAgB,UAAU;AAE5B,WAAK,SAAS,WAAW;AACzB;AAAA,IACF;AAEA,QAAI,gBAAgB,SAAS;AAE3B;AAAA,IACF;AAEA,QAAI,gBAAgB,QAAQ;AAC1B,WAAK,iBAAiB;AACtB;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,WAAK,cAAc;AACnB;AAAA,IACF;AAEA,QAAI,gBAAgB,OAAO;AACzB,WAAK,eAAe;AACpB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,gBAAsB;AAC5B,UAAM,QAAQ,KAAK,SAAS,UAAU;AACtC,QAAI,CAAC,SAAS,MAAM,SAAS,QAAS;AAEtC,UAAM,YAAuB;AAAA,MAC3B,MAAM;AAAA,MACN,SAAS;AAAA;AAAA,MACT,SAAS,MAAM,WAAW,CAAC;AAAA,MAC3B,MAAM,MAAM,QAAQ,CAAC;AAAA,IACvB;AAGA,UAAM,YAAY,KAAK,mBAAmB;AAC1C,QAAI,WAAW,QAAQ,UAAU,KAAK,SAAS,SAAS;AACtD,MAAC,UAAU,KAAmB,SAAS,KAAK,SAAS;AAAA,IACvD;AAAA,EACF;AAAA,EAEQ,mBAAyB;AAC/B,UAAM,cAAc,KAAK,SAAS,MAAM;AACxC,QAAI,CAAC,eAAe,YAAY,SAAS,cAAe;AAExD,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,CAAC,WAAY;AAEjB,UAAM,OAAO,YAAY,WAAW,KAAK;AAIzC,QAAI,CAAC,WAAW,WAAW,WAAW,QAAQ,WAAW,GAAG;AAC1D,iBAAW,UAAU,CAAC,CAAC,CAAC;AAAA,IAC1B;AACA,UAAM,YAAY,WAAW,QAAQ,CAAC;AACtC,QAAI,WAAW;AACb,gBAAU,KAAK,IAAI;AAAA,IACrB;AAAA,EACF;AAAA,EAEQ,gBAAsB;AAC5B,UAAM,WAAW,KAAK,SAAS,KAAK;AACpC,QAAI,CAAC,SAAU;AAEf,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,YAAY,YAAY;AAC1B,iBAAW,MAAM,KAAK,CAAC,GAAG,WAAW,UAAU,CAAC;AAChD,iBAAW,aAAa,CAAC;AAAA,IAC3B;AAAA,EACF;AAAA,EAEQ,iBAAuB;AAC7B,UAAM,YAAY,KAAK,MAAM,IAAI;AACjC,QAAI,CAAC,aAAa,UAAU,SAAS,YAAa;AAElD,UAAM,aAAa,KAAK,eAAe;AACvC,QAAI,YAAY,YAAY;AAC1B,iBAAW,WAAW,KAAK,UAAU,WAAW,KAAK,CAAC;AAAA,IACxD;AAAA,EACF;AAAA;AAAA,EAIQ,aAAmB;AACzB,UAAM,QAAQ,KAAK,SAAS,gBAAgB;AAC5C,QAAI,CAAC,SAAS,MAAM,SAAS,QAAS;AAEtC,UAAM,OAAO,MAAM,WAAW,KAAK;AAKnC,UAAM,WAAW,yBAAyB,KAAK,IAAI;AACnD,QAAI,UAAU;AACZ,WAAK,eAAe,iBAAiB,SAAS,CAAC;AAAA,IACjD;AAIA,UAAM,YAAY,wCAAwC,KAAK,IAAI;AACnE,QAAI,WAAW;AACb,YAAM,CAAC,EAAE,OAAO,OAAO,KAAK,IAAI;AAChC,YAAM,KAAK,SAAS,SAAS,KAAK,EAAE;AACpC,YAAM,KAAK,SAAS,SAAS,KAAK,EAAE;AACpC,YAAM,KAAK,SAAS,SAAS,KAAK,EAAE;AAEpC,YAAM,WAAW,KAAK,KAAK,MAAO,KAAK,OAAO;AAC9C,YAAM,QAAQ,IAAI,KAAK,UAAU,KAAK,GAAG,EAAE;AAE3C,UAAI,MAAM,SAAS,MAAM,KAAK,KAAK,MAAM,QAAQ,MAAM,IAAI;AACzD;AAAA,MACF;AAEA,YAAM,QAAQ,MAAM,QAAQ,IAAI,CAAC;AACjC,YAAM,UAAU,MAAM,YAAY;AAClC,YAAM,WAAW,OAAO,MAAM,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AAC7D,YAAM,SAAS,OAAO,MAAM,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AACtD,WAAK,eAAe,kBAAkB,GAAG,OAAO,IAAI,QAAQ,IAAI,MAAM;AAAA,IACxE;AAAA,EACF;AAAA;AAAA,EAIQ,cAAc,MAAqB;AACzC,UAAM,WAAW,KAAK,mBAAmB;AACzC,QAAI,UAAU,QAAQ,SAAS,KAAK,SAAS,SAAS;AACpD,MAAC,SAAS,KAAmB,SAAS,KAAK,IAAI;AAAA,IACjD;AAAA,EACF;AAAA,EAEQ,qBAA6C;AACnD,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,YAAY;AACtC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAyC;AAC/C,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,UAAU,KAAK,MAAM,CAAC,GAAG,SAAS,aAAa;AACzE,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,iBAAyC;AAC/C,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,SAAS;AACnC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU,MAAyC;AACzD,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,SAAS,MAAM;AAChC,eAAO,KAAK,MAAM,CAAC;AAAA,MACrB;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA,EAEQ,SAAS,aAA6C;AAC5D,QAAI,KAAK,MAAM,WAAW,EAAG,QAAO;AAGpC,aAAS,IAAI,KAAK,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;AAC/C,UAAI,KAAK,MAAM,CAAC,GAAG,gBAAgB,aAAa;AAC9C,eAAO,KAAK,MAAM,OAAO,GAAG,CAAC,EAAE,CAAC;AAAA,MAClC;AAAA,IACF;AAGA,YAAQ;AAAA,MACN,yDAAyD,WAAW,kBACnD,KAAK,MAAM,IAAI,CAAC,MAAM,EAAE,WAAW,EAAE,KAAK,IAAI,CAAC;AAAA,IAClE;AACA,WAAO;AAAA,EACT;AACF;;;ACzjCA,SAAS,sBAAsB,SAAyB;AACtD,QAAM,MAA8B;AAAA,IAClC,MAAM;AAAA,IACN,iBAAiB;AAAA,IACjB,QAAQ;AAAA,IACR,yBAAyB;AAAA,EAC3B;AACA,SAAO,IAAI,OAAO,KAAK,QAAQ,YAAY,EAAE,QAAQ,QAAQ,GAAG;AAClE;AASO,SAAS,mBACd,MACA,UACA,SACA,UACiB;AACjB,QAAM,iBAAiB,UAAU,mBAAmB,QAAQ,kBAAkB;AAC9E,QAAM,UAAU,UAAU,SAAS,QAAQ,WAAW,KAAK,WAAW;AACtE,QAAM,kBAAkB,UAAU,oBAAoB,QAAQ,mBAAmB;AACjF,QAAM,eACJ,WAAW,sBAAsB,SAAS,IAAI,IAAI,QAAQ;AAG5D,MAAI;AACJ,MAAI,UAAU,YAAY,SAAS,SAAS,SAAS,GAAG;AACtD,eAAW,SAAS,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EAChD,WAAW,QAAQ,QAAQ;AACzB,eAAW,CAAC,QAAQ,MAAM;AAC1B,QAAI,QAAQ,WAAW;AACrB,eAAS,KAAK,QAAQ,SAAS;AAAA,IACjC;AAAA,EACF;AAGA,MAAI;AACJ,MAAI,UAAU,kBAAkB,SAAS,eAAe,SAAS,GAAG;AAClE,oBAAgB,SAAS,eAAe,IAAI,CAAC,MAAM,GAAG,EAAE,KAAK,aAAa,EAAE,IAAI,EAAE;AAAA,EACpF,WAAW,QAAQ,aAAa;AAC9B,oBAAgB,CAAC,QAAQ,WAAW;AAAA,EACtC;AAGA,MAAI;AACJ,MAAI,UAAU,cAAc,SAAS,WAAW,SAAS,GAAG;AAC1D,gBAAY,SAAS;AAAA,EACvB;AAGA,QAAM,gBACJ,YAAY,SAAS,SAAS,IAAI,SAAS,CAAC,IAAI;AAGlD,QAAM,aAAa,UAAU;AAG7B,QAAM,MAAM,UAAU,wBAAwB,CAAC,KAAK,QAAQ;AAE5D,QAAM,KAAsB;AAAA,IAC1B,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,YAAY,KAAK,cAAc,UAAU,cAAc;AAAA,IACvD,OAAO;AAAA,IACP,cAAc;AAAA;AAAA,IACd,YAAY;AAAA,IACZ,gBAAgB;AAAA,IAChB,cAAc;AAAA,IACd,cAAc;AAAA,IACd,UAAU;AAAA,IACV,cAAc;AAAA;AAAA,IAGd,QAAQ;AAAA;AAAA,IAGR,iBAAiB,kBAAkB;AAAA,IACnC,eAAe,gBAAgB;AAAA,IAC/B,aAAa;AAAA,IACb,WAAW,UAAU;AAAA,IACrB,kBAAkB,mBAAmB;AAAA,IACrC,UAAU,YAAY,SAAS,SAAS,IAAI,WAAW;AAAA,IACvD,gBAAgB,iBAAiB,cAAc,SAAS,IAAI,gBAAgB;AAAA,IAC5E,YAAY,aAAa,UAAU,SAAS,IAAI,YAAY;AAAA,IAC5D,KAAK,OAAO;AAAA,IACZ,gBAAgB,UAAU,gBAAgB;AAAA,IAC1C,qBAAqB,UAAU,qBAAqB;AAAA,IACpD,WAAW,UAAU,UAAU;AAAA,EACjC;AAEA,SAAO;AACT;;;ACnJA,SAAS,YAAY;AAUd,SAAS,kBACd,gBACA,iBACA,YACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,YAAY,MAAM,MAAM,OAAO,GAAG,cAAc,KAAK;AACnE;AAUO,SAAS,uBACd,gBACA,iBACA,cACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,cAAc,MAAM,OAAO,GAAG,cAAc,MAAM;AAChE;AAUO,SAAS,wBACd,gBACA,iBACA,cACQ;AACR,QAAM,EAAE,MAAM,MAAM,IAAI,oBAAoB,eAAe;AAC3D,SAAO,KAAK,cAAc,MAAM,OAAO,GAAG,cAAc,OAAO;AACjE;AAKO,SAAS,cAAc,MAAc,OAAe,YAA4B;AACrF,SAAO,KAAK,YAAY,MAAM,MAAM,KAAK;AAC3C;AAKO,SAAS,aAAa,MAAc,YAA4B;AACrE,SAAO,KAAK,YAAY,MAAM,IAAI;AACpC;AAKA,SAAS,oBAAoB,MAA+C;AAC1E,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,SAAO;AAAA,IACL,MAAM,MAAM,CAAC,KAAK;AAAA,IAClB,OAAO,MAAM,CAAC,KAAK;AAAA,EACrB;AACF;;;AC3EA,SAAS,kBAAkB,kBAAkB;AAC7C,SAAS,UAAU,SAAS,YAAY;AACxC,SAAS,QAAAC,OAAM,eAAe;AAC9B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAqEP,IAAM,kBAAkB,IAAI,IAAY,qBAAqB;AAU7D,eAAsB,mBAAmB,SAAqD;AAC5F,QAAM,WAAW,MAAM,iBAAiB,QAAQ,OAAO,QAAQ,MAAM,QAAQ,EAAE;AAE/E,MAAI,qBAAqB;AACzB,MAAI,qBAAqB;AACzB,MAAI,kBAAkB;AAEtB,QAAM,eAAe,mBAAmB;AAMxC,MAAI,iBAAiB;AACrB,aAAW,WAAW,UAAU;AAC9B,QAAI;AACJ,QAAI;AACF,kBAAY,MAAM,aAAa,OAAO;AAAA,IACxC,SAAS,KAAK;AACZ,cAAQ;AAAA,QACN,4BAA4B,OAAO,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC1F;AACA;AAAA,IACF;AAEA,eAAW,OAAO,WAAW;AAE3B,UAAI,QAAQ,SAAS,QAAQ,MAAM,SAAS,GAAG;AAC7C,YACE,CAAC,gBAAgB,IAAI,IAAI,QAAQ,YAAY,KAC7C,CAAC,QAAQ,MAAM,SAAS,IAAI,QAAQ,YAA8B,GAClE;AACA;AAAA,QACF;AAAA,MACF;AAEA,UAAI,QAAQ,QAAQ;AAClB;AACA;AAAA,MACF;AAEA,YAAM,aAAa;AAAA,QACjB,IAAI;AAAA,QACJ,IAAI;AAAA,QACJ,QAAQ;AAAA,MACV;AAEA,YAAM,cAAc,mBAAmB,IAAI,MAAM,IAAI,SAAS,IAAI,SAAS,IAAI,QAAQ;AAEvF,YAAM,WAAW,eAAe,IAAI,MAAM,aAAa;AAAA,QACrD,eAAe;AAAA,QACf,WAAW,QAAQ;AAAA,QACnB,aACE,QAAQ,cAAc,aAClB,CAAC,OAAO,aAAa,QAAQ,IAAI,UAAU,IAC3C;AAAA,MACR,CAAC;AAED,YAAM,MAAM,QAAQ,UAAU,GAAG,EAAE,WAAW,KAAK,CAAC;AACpD,YAAM,UAAU,YAAY,UAAU,OAAO;AAE7C;AACA,4BAAsB,KAAK,MAAM,SAAS,SAAS,CAAC;AAGpD,YAAM,MAAM,QAAQ,YAAY,EAAE;AAClC,UAAI,MAAM,iBAAiB;AACzB,0BAAkB;AAAA,MACpB;AAAA,IACF;AAEA;AAEA,YAAQ,aAAa;AAAA,MACnB;AAAA,MACA;AAAA,MACA,YAAY,SAAS;AAAA,MACrB,aAAa;AAAA,IACf,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL;AAAA,IACA,OAAO,CAAC;AAAA;AAAA,IACR;AAAA,IACA;AAAA,IACA,QAAQ,QAAQ;AAAA,EAClB;AACF;AAOA,eAAe,aAAa,SAA0C;AACpE,QAAM,YAA4B,CAAC;AAEnC,QAAM,UAAU,IAAI,aAAa;AAAA,IAC/B,QAAQ,CAAC,MAAM,YAAY;AAEzB,YAAM,eAAe,QAAQ,iBAAiB;AAC9C,YAAM,OAAO,aAAa,aAAa,SAAS,CAAC;AACjD,UAAI,CAAC,MAAM;AACT,gBAAQ;AAAA,UACN,8DAA8D,OAAO;AAAA,QAEvE;AAAA,MACF;AACA,gBAAU,KAAK;AAAA,QACb;AAAA,QACA;AAAA,QACA,SAAS,QAAQ,EAAE,cAAc,IAAI,wBAAwB,GAAG;AAAA,QAChE,iBAAiB;AAAA,QACjB,gBAAgB,MAAM,kBAAkB;AAAA,MAC1C,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AAED,QAAM,SAAS,IAAI,UAAU,EAAE,kBAAkB,GAAG,CAAC;AACrD,SAAO,GAAG,eAAe,CAAC,MAAM,UAAU,QAAQ,cAAc,MAAM,KAAK,CAAC;AAC5E,SAAO,GAAG,gBAAgB,CAAC,SAAS,QAAQ,eAAe,IAAI,CAAC;AAChE,SAAO,GAAG,QAAQ,CAAC,SAAS,QAAQ,OAAO,IAAI,CAAC;AAEhD,QAAM,SAAS,iBAAiB,SAAS,OAAO;AAChD,QAAM,OAAO,YAAY,MAAM;AAG/B,QAAM,WAAW,QAAQ,QAAQ,UAAU,OAAO;AAClD,MAAI;AACJ,MAAI,WAAW,QAAQ,GAAG;AACxB,QAAI;AACF,YAAM,MAAM,MAAM,SAAS,UAAU,OAAO;AAC5C,iBAAW,KAAK,MAAM,GAAG;AAAA,IAC3B,SAAS,KAAK;AACZ,cAAQ;AAAA,QACN,yCAAyC,QAAQ,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACxG;AAAA,IACF;AAAA,EACF;AAGA,aAAW,OAAO,WAAW;AAC3B,QAAI,YAAY,SAAS,oBAAoB,IAAI,gBAAgB;AAC/D,UAAI,WAAW;AACf,UAAI,kBAAkB,SAAS;AAAA,IACjC,OAAO;AAEL,YAAM,eAAe,kBAAkB,OAAO;AAC9C,UAAI,CAAC,cAAc;AACjB,gBAAQ;AAAA,UACN,6CAA6C,IAAI,kBAAkB,WAAW,oCAChD,OAAO;AAAA,QACvC;AAAA,MACF;AACA,UAAI,kBAAkB;AAAA,IACxB;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,iBACb,OACA,MACA,IACmB;AACnB,MAAI;AACJ,MAAI;AACF,gBAAY,MAAM,KAAK,KAAK;AAAA,EAC9B,SAAS,KAAK;AACZ,UAAM,IAAI;AAAA,MACR,6BAA6B,KAAK,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACxF,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAEA,MAAI,UAAU,OAAO,GAAG;AACtB,WAAO,CAAC,KAAK;AAAA,EACf;AAEA,MAAI,CAAC,UAAU,YAAY,GAAG;AAC5B,UAAM,IAAI,MAAM,eAAe,KAAK,8BAA8B;AAAA,EACpE;AAGA,QAAM,WAAqB,CAAC;AAC5B,QAAM,QAAQ,OAAO,QAAQ;AAG7B,MAAI,WAAW;AACf,MAAI,QAAQ,IAAI;AACd,eAAW,SAAS,OAAO,CAAC,MAAM;AAChC,YAAM,OAAO,kBAAkB,CAAC;AAChC,UAAI,CAAC,KAAM,QAAO;AAClB,UAAI,QAAQ,OAAO,KAAM,QAAO;AAChC,UAAI,MAAM,OAAO,KAAK,MAAO,QAAO;AACpC,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAEA,SAAO,SAAS,KAAK;AACvB;AAGA,eAAe,QAAQ,KAAa,SAAkC;AACpE,QAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC1D,aAAW,SAAS,SAAS;AAC3B,UAAM,WAAWC,MAAK,KAAK,MAAM,IAAI;AACrC,QAAI,MAAM,YAAY,GAAG;AACvB,YAAM,QAAQ,UAAU,OAAO;AAAA,IACjC,WAAW,MAAM,OAAO,KAAK,MAAM,KAAK,SAAS,MAAM,GAAG;AACxD,cAAQ,KAAK,QAAQ;AAAA,IACvB;AAAA,EACF;AACF;AAUO,SAAS,kBAAkB,UAA0B;AAE1D,QAAM,YAAY,mCAAmC,KAAK,QAAQ;AAClE,MAAI,WAAW;AACb,WAAO,GAAG,UAAU,CAAC,CAAC,IAAI,UAAU,CAAC,CAAC,IAAI,UAAU,CAAC,CAAC;AAAA,EACxD;AAGA,QAAM,cAAc,gCAAgC,KAAK,QAAQ;AACjE,MAAI,aAAa;AACf,WAAO,GAAG,YAAY,CAAC,CAAC,IAAI,YAAY,CAAC,CAAC;AAAA,EAC5C;AAEA,SAAO;AACT;;;AC3UA,SAAS,yBAAyB;AAClC,SAAS,SAAAC,QAAO,QAAAC,OAAM,aAAa,mBAAmB;AACtD,SAAS,WAAAC,gBAAe;AACxB,SAAS,gBAAgB;AACzB,SAAS,gBAAgB;AAMzB,IAAM,cAAc;AAGpB,IAAM,WAAW;AAGjB,IAAM,sBAAsB;AAG5B,IAAM,cAAc;AAGpB,IAAM,sBAAsB;AAG5B,IAAM,aAAa;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAsFO,SAAS,kBACd,MACA,IACA,MACA,OACQ;AACR,QAAM,SAAS,IAAI,gBAAgB;AACnC,SAAO,IAAI,qCAAqC,IAAI;AACpD,SAAO,IAAI,qCAAqC,EAAE;AAClD,SAAO,IAAI,YAAY,OAAO,QAAQ,CAAC;AACvC,SAAO,IAAI,QAAQ,OAAO,IAAI,CAAC;AAC/B,SAAO,IAAI,SAAS,QAAQ;AAE5B,aAAW,SAAS,YAAY;AAC9B,WAAO,OAAO,YAAY,KAAK;AAAA,EACjC;AAEA,MAAI,SAAS,MAAM,SAAS,GAAG;AAC7B,eAAW,KAAK,OAAO;AACrB,aAAO,OAAO,sBAAsB,CAAC;AAAA,IACvC;AAAA,EACF;AAEA,SAAO,GAAG,WAAW,mBAAmB,OAAO,SAAS,CAAC;AAC3D;AASA,eAAsB,oBAAoB,SAAuD;AAC/F,QAAM,KAAK,QAAQ,OAAM,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAC7D,QAAM,cAAc,QAAQ,eAAe;AAE3C,QAAM,QAA4B,CAAC;AACnC,QAAM,SAA8B,CAAC;AACrC,MAAI,aAAa;AACjB,MAAI,UAAU;AACd,MAAI,sBAAsB;AAG1B,QAAM,SAAS,iBAAiB,QAAQ,MAAM,EAAE;AAEhD,aAAW,SAAS,QAAQ;AAC1B,QAAI,QAAQ,UAAU,UAAa,MAAM,UAAU,QAAQ,MAAO;AAGlE,UAAM,YAAkC,CAAC;AACzC,QAAI,OAAO;AACX,QAAI,UAAU;AAEd,WAAO,SAAS;AACd,YAAM,UAAU,kBAAkB,MAAM,MAAM,MAAM,IAAI,MAAM,QAAQ,KAAK;AAC3E,YAAM,WAAW,MAAM,eAAe,OAAO;AAC7C,YAAM,OAAQ,MAAM,SAAS,KAAK;AAElC,UAAI,OAAO,KAAK,UAAU,UAAU;AAClC,cAAM,IAAI;AAAA,UACR,+BAA+B,OAAO;AAAA,QAExC;AAAA,MACF;AAGA,UAAI,SAAS,GAAG;AACd,+BAAuB,KAAK;AAAA,MAC9B;AAEA,YAAM,UAAU,KAAK,WAAW,CAAC;AAEjC,iBAAW,OAAO,SAAS;AACzB,YAAI,CAAC,IAAI,mBAAmB;AAC1B;AACA;AAAA,QACF;AACA,kBAAU,KAAK,GAAG;AAAA,MACpB;AAEA,gBAAU,QAAQ,KAAK,eAAe;AACtC;AAAA,IACF;AAGA,UAAM,YAAY,QAAQ,UAAU,SAAY,QAAQ,QAAQ,MAAM,SAAS,UAAU;AACzF,UAAM,iBAAiB,UAAU,MAAM,GAAG,SAAS;AACnD,UAAM,aAAa,MAAM,KAAK,MAAM,GAAG,CAAC;AAGxC,UAAM,aAAa,gBAAgB,aAAa,QAAQ,QAAQ,CAAC,KAAK,QAAQ,UAAU;AACtF,UAAI,QAAQ;AACV,cAAM,KAAK,MAAM;AACjB,sBAAc,OAAO;AAAA,MACvB,WAAW,OAAO;AAChB,eAAO,KAAK,EAAE,gBAAgB,IAAI,iBAAiB,MAAM,CAAC;AAAA,MAC5D;AACA,cAAQ,aAAa;AAAA,QACnB,qBAAqB,MAAM;AAAA,QAC3B,gBAAgB;AAAA,QAChB,iBAAiB,IAAI;AAAA,QACrB,cAAc;AAAA,MAChB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,qBAAqB,MAAM;AAAA,IAC3B;AAAA,IACA;AAAA,IACA,WAAW,EAAE,MAAM,QAAQ,MAAM,GAAG;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AACF;AAOA,eAAsB,yBACpB,gBACA,QAC2B;AAE3B,QAAM,UAAU,GAAG,WAAW,cAAc,cAAc,SAAS,IAAI,gBAAgB,WAAW,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC;AACzI,QAAM,eAAe,MAAM,eAAe,OAAO;AACjD,QAAM,MAAO,MAAM,aAAa,KAAK;AAErC,MAAI,CAAC,IAAI,mBAAmB,CAAC,IAAI,kBAAkB;AACjD,UAAM,IAAI;AAAA,MACR,qCAAqC,cAAc;AAAA,IACrD;AAAA,EACF;AAEA,SAAO,uBAAuB,KAAK,MAAM;AAC3C;AAQA,eAAe,aACb,MACA,aACA,WACA,YACe;AACf,MAAI,YAAY;AAEhB,iBAAe,SAAwB;AACrC,WAAO,YAAY,KAAK,QAAQ;AAC9B,YAAM,IAAI;AACV,YAAM,MAAM,KAAK,CAAC;AAClB,UAAI,CAAC,IAAK;AACV,UAAI;AACF,cAAM,SAAS,MAAM,uBAAuB,KAAK,SAAS;AAC1D,mBAAW,KAAK,QAAQ,IAAI;AAAA,MAC9B,SAAS,KAAK;AACZ,mBAAW,KAAK,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACxE;AAAA,IACF;AAAA,EACF;AAEA,QAAM,cAAc,KAAK,IAAI,aAAa,KAAK,MAAM;AACrD,QAAM,QAAQ,IAAI,MAAM,KAAK,EAAE,QAAQ,YAAY,GAAG,MAAM,OAAO,CAAC,CAAC;AACvE;AAEA,eAAe,uBACb,KACA,WAC2B;AAC3B,MAAI,CAAC,IAAI,mBAAmB,CAAC,IAAI,kBAAkB;AACjD,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,IAAI,mBAAmB;AAC1B,UAAM,IAAI;AAAA,MACR,YAAY,IAAI,eAAe;AAAA,IACjC;AAAA,EACF;AAEA,QAAM,UAAU,uBAAuB,IAAI,iBAAiB,IAAI,kBAAkB,SAAS;AAC3F,QAAM,WAAW,wBAAwB,IAAI,iBAAiB,IAAI,kBAAkB,SAAS;AAG7F,QAAMC,OAAMC,SAAQ,OAAO,GAAG,EAAE,WAAW,KAAK,CAAC;AAGjD,QAAM,cAAc,KAAK,UAAU,KAAK,MAAM,CAAC;AAC/C,QAAM,YAAY,UAAU,aAAa,OAAO;AAGhD,QAAM,cAAc,MAAM,eAAe,IAAI,iBAAiB;AAC9D,MAAI,CAAC,YAAY,MAAM;AACrB,UAAM,IAAI,MAAM,wBAAwB,IAAI,eAAe,MAAM;AAAA,EACnE;AAEA,QAAM,OAAO,kBAAkB,OAAO;AACtC,MAAI;AACF,UAAM,SAAS,SAAS,QAAQ,YAAY,IAAa,GAAG,IAAI;AAAA,EAClE,SAAS,KAAK;AACZ,UAAM,IAAI;AAAA,MACR,oCAAoC,IAAI,eAAe,SAAS,IAAI,iBAAiB,KAChF,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MACrD,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAGA,QAAM,UAAU,MAAMC,MAAK,OAAO;AAClC,QAAM,WAAW,OAAO,WAAW,aAAa,OAAO;AAEvD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,gBAAgB,IAAI;AAAA,IACpB,iBAAiB,IAAI;AAAA,IACrB,MAAM,OAAO,QAAQ,IAAI,IAAI;AAAA,EAC/B;AACF;AAMA,SAAS,iBAAiB,MAAc,IAAiD;AACvF,QAAM,SAA8C,CAAC;AAErD,MAAI,UAAU,oBAAI,KAAK,OAAO,YAAY;AAC1C,QAAM,MAAM,oBAAI,KAAK,KAAK,YAAY;AAEtC,SAAO,WAAW,KAAK;AACrB,UAAM,aAAa,QAAQ,YAAY,EAAE,MAAM,GAAG,EAAE;AAGpD,UAAM,WAAW,IAAI;AAAA,MACnB,KAAK,IAAI,QAAQ,eAAe,GAAG,QAAQ,YAAY,IAAI,GAAG,CAAC;AAAA,IACjE;AACA,UAAM,WAAW,YAAY,MAAM,SAAS,YAAY,EAAE,MAAM,GAAG,EAAE,IAAI;AAEzE,WAAO,KAAK,EAAE,MAAM,YAAY,IAAI,SAAS,CAAC;AAG9C,cAAU,IAAI;AAAA,MACZ,KAAK,IAAI,QAAQ,eAAe,GAAG,QAAQ,YAAY,IAAI,GAAG,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,SAAO;AACT;AAGA,eAAe,eAAe,KAAa,UAAU,GAAsB;AACzE,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,MAAM,GAAG;AAAA,EAC5B,SAAS,KAAK;AAEZ,QAAI,UAAU,aAAa;AACzB,YAAM,QAAQ,sBAAsB,KAAK,IAAI,GAAG,OAAO;AACvD,cAAQ;AAAA,QACN,qBAAqB,GAAG,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,iBAC5D,KAAK,eAAe,UAAU,CAAC,IAAI,WAAW;AAAA,MACjE;AACA,YAAM,MAAM,KAAK;AACjB,aAAO,eAAe,KAAK,UAAU,CAAC;AAAA,IACxC;AACA,UAAM,IAAI;AAAA,MACR,uBAAuB,cAAc,CAAC,iBAAiB,GAAG,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC/G,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAEA,MAAI,SAAS,GAAI,QAAO;AAGxB,OAAK,SAAS,WAAW,OAAO,SAAS,WAAW,OAAO,SAAS,WAAW,QAAQ,UAAU,aAAa;AAC5G,UAAM,aAAa,SAAS,QAAQ,IAAI,aAAa;AACrD,UAAM,cAAc,aAAa,SAAS,YAAY,EAAE,IAAI;AAC5D,UAAM,QAAQ,CAAC,MAAM,WAAW,KAAK,cAAc,IAC/C,cAAc,MACd,sBAAsB,KAAK,IAAI,GAAG,OAAO;AAC7C,YAAQ;AAAA,MACN,QAAQ,SAAS,MAAM,QAAQ,GAAG,iBAAiB,KAAK,eAAe,UAAU,CAAC,IAAI,WAAW;AAAA,IACnG;AACA,UAAM,MAAM,KAAK;AACjB,WAAO,eAAe,KAAK,UAAU,CAAC;AAAA,EACxC;AAEA,QAAM,IAAI,MAAM,QAAQ,SAAS,MAAM,KAAK,SAAS,UAAU,QAAQ,GAAG,EAAE;AAC9E;AAEA,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AACzD;;;AC5aA,SAAS,qBAAAC,0BAAyB;AAClC,SAAS,SAAAC,QAAO,QAAAC,aAAY;AAC5B,SAAS,WAAAC,UAAS,QAAAC,aAAY;AAC9B,SAAS,YAAAC,iBAAgB;AACzB,SAAS,YAAAC,iBAAgB;AAGzB,IAAM,eAAe;AAGrB,IAAMC,uBAAsB;AAG5B,IAAMC,eAAc;AAGpB,IAAMC,uBAAsB;AA+DrB,SAAS,kBAAkB,MAAsB;AACtD,SAAO,GAAG,YAAY,OAAO,IAAI,WAAW,IAAI;AAClD;AAMO,SAAS,qBAAqB,MAAc,WAA2B;AAC5E,QAAM,OAAO,KAAK,MAAM,GAAG,CAAC;AAC5B,SAAOL,MAAK,WAAW,QAAQ,MAAM,MAAM,IAAI,MAAM;AACvD;AAMA,eAAsB,eAAe,SAAyD;AAC5F,QAAM,KAAK,QAAQ,OAAM,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAC7D,QAAM,cAAc,QAAQ,eAAeG;AAG3C,QAAM,QAAQ,kBAAkB,QAAQ,MAAM,EAAE;AAEhD,QAAM,QAAmC,CAAC;AAC1C,MAAI,aAAa;AACjB,MAAI,UAAU;AACd,MAAI,SAAS;AAGb,MAAI,YAAY;AAEhB,iBAAe,SAAwB;AACrC,WAAO,YAAY,MAAM,QAAQ;AAC/B,YAAM,IAAI;AACV,YAAM,OAAO,MAAM,CAAC;AACpB,UAAI,CAAC,KAAM;AAEX,cAAQ,aAAa;AAAA,QACnB,YAAY,MAAM;AAAA,QAClB,WAAW,MAAM;AAAA,QACjB;AAAA,QACA;AAAA,QACA,aAAa;AAAA,MACf,CAAC;AAED,YAAM,MAAM,kBAAkB,IAAI;AAClC,YAAM,WAAW,qBAAqB,MAAM,QAAQ,MAAM;AAE1D,UAAI;AACF,cAAM,SAAS,MAAM,kBAAkB,KAAK,UAAU,IAAI;AAC1D,YAAI,QAAQ;AACV,gBAAM,KAAK,MAAM;AACjB,wBAAc,OAAO;AAAA,QACvB,OAAO;AAEL;AAAA,QACF;AAAA,MACF,SAAS,KAAK;AACZ,gBAAQ,KAAK,+BAA+B,IAAI,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AACvG;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,cAAc,KAAK,IAAI,aAAa,MAAM,MAAM;AACtD,QAAM,QAAQ,IAAI,MAAM,KAAK,EAAE,QAAQ,YAAY,GAAG,MAAM,OAAO,CAAC,CAAC;AAGrE,UAAQ,aAAa;AAAA,IACnB,YAAY,MAAM;AAAA,IAClB,WAAW,MAAM;AAAA,IACjB;AAAA,IACA;AAAA,IACA,aAAa;AAAA,EACf,CAAC;AAED,SAAO;AAAA,IACL,iBAAiB,MAAM;AAAA,IACvB;AAAA,IACA;AAAA,IACA,WAAW,EAAE,MAAM,QAAQ,MAAM,GAAG;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AACF;AAOA,eAAe,kBACb,KACA,UACA,MACyC;AACzC,QAAM,WAAW,MAAMG,gBAAe,GAAG;AAEzC,MAAI,SAAS,WAAW,KAAK;AAC3B,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI,MAAM,QAAQ,SAAS,MAAM,QAAQ,GAAG,EAAE;AAAA,EACtD;AAEA,MAAI,CAAC,SAAS,MAAM;AAClB,UAAM,IAAI,MAAM,wBAAwB,GAAG,EAAE;AAAA,EAC/C;AAEA,QAAMT,OAAME,SAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAElD,QAAM,OAAOH,mBAAkB,QAAQ;AACvC,QAAMK,UAASC,UAAS,QAAQ,SAAS,IAAa,GAAG,IAAI;AAE7D,QAAM,WAAW,MAAMJ,MAAK,QAAQ;AAEpC,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA,MAAM,OAAO,SAAS,IAAI;AAAA,EAC5B;AACF;AAKA,SAAS,kBAAkB,MAAc,IAAsB;AAC7D,QAAM,QAAkB,CAAC;AACzB,QAAM,UAAU,oBAAI,KAAK,OAAO,YAAY;AAC5C,QAAM,MAAM,oBAAI,KAAK,KAAK,YAAY;AAEtC,SAAO,WAAW,KAAK;AACrB,UAAM,KAAK,QAAQ,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAC7C,YAAQ,WAAW,QAAQ,WAAW,IAAI,CAAC;AAAA,EAC7C;AAEA,SAAO;AACT;AAGA,eAAeQ,gBAAe,KAAa,UAAU,GAAsB;AACzE,MAAI;AACJ,MAAI;AACF,eAAW,MAAM,MAAM,GAAG;AAAA,EAC5B,SAAS,KAAK;AACZ,QAAI,UAAUF,cAAa;AACzB,YAAM,QAAQC,uBAAsB,KAAK,IAAI,GAAG,OAAO;AACvD,YAAME,OAAM,KAAK;AACjB,aAAOD,gBAAe,KAAK,UAAU,CAAC;AAAA,IACxC;AACA,UAAM,IAAI;AAAA,MACR,uBAAuBF,eAAc,CAAC,iBAAiB,GAAG,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC/G,EAAE,OAAO,IAAI;AAAA,IACf;AAAA,EACF;AAEA,MAAI,SAAS,MAAM,SAAS,WAAW,IAAK,QAAO;AAEnD,OACG,SAAS,WAAW,OAAO,SAAS,WAAW,OAAO,SAAS,WAAW,QAC3E,UAAUA,cACV;AACA,UAAM,aAAa,SAAS,QAAQ,IAAI,aAAa;AACrD,UAAM,cAAc,aAAa,SAAS,YAAY,EAAE,IAAI;AAC5D,UAAM,QACJ,CAAC,MAAM,WAAW,KAAK,cAAc,IACjC,cAAc,MACdC,uBAAsB,KAAK,IAAI,GAAG,OAAO;AAC/C,UAAME,OAAM,KAAK;AACjB,WAAOD,gBAAe,KAAK,UAAU,CAAC;AAAA,EACxC;AAEA,QAAM,IAAI,MAAM,QAAQ,SAAS,MAAM,KAAK,SAAS,UAAU,QAAQ,GAAG,EAAE;AAC9E;AAEA,SAASC,OAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AACzD;","names":["contentNode","frame","join","join","mkdir","stat","dirname","mkdir","dirname","stat","createWriteStream","mkdir","stat","dirname","join","pipeline","Readable","DEFAULT_CONCURRENCY","MAX_RETRIES","RETRY_BASE_DELAY_MS","fetchWithRetry","sleep"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lexbuild/fr",
3
- "version": "1.14.1",
3
+ "version": "1.15.1",
4
4
  "description": "Federal Register XML to Markdown converter for LexBuild",
5
5
  "author": "Chris Thomas",
6
6
  "license": "MIT",
@@ -41,7 +41,7 @@
41
41
  "dist"
42
42
  ],
43
43
  "dependencies": {
44
- "@lexbuild/core": "1.14.1"
44
+ "@lexbuild/core": "1.15.1"
45
45
  },
46
46
  "devDependencies": {
47
47
  "@types/node": "^25.3.2",