@ncukondo/reference-manager 0.5.1 → 0.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -1
- package/dist/chunks/{file-watcher-Dqkw6R7-.js → file-watcher-CBAbblss.js} +120 -19
- package/dist/chunks/file-watcher-CBAbblss.js.map +1 -0
- package/dist/chunks/index-Bl_mOQRe.js +1657 -0
- package/dist/chunks/index-Bl_mOQRe.js.map +1 -0
- package/dist/cli/commands/fulltext.d.ts +4 -3
- package/dist/cli/commands/fulltext.d.ts.map +1 -1
- package/dist/cli/commands/remove.d.ts +2 -1
- package/dist/cli/commands/remove.d.ts.map +1 -1
- package/dist/cli/commands/update.d.ts +2 -1
- package/dist/cli/commands/update.d.ts.map +1 -1
- package/dist/cli/index.d.ts.map +1 -1
- package/dist/cli/server-client.d.ts +5 -4
- package/dist/cli/server-client.d.ts.map +1 -1
- package/dist/cli.js +49 -42
- package/dist/cli.js.map +1 -1
- package/dist/core/library-interface.d.ts +22 -4
- package/dist/core/library-interface.d.ts.map +1 -1
- package/dist/core/library.d.ts +2 -10
- package/dist/core/library.d.ts.map +1 -1
- package/dist/core/reference.d.ts +1 -0
- package/dist/core/reference.d.ts.map +1 -1
- package/dist/features/duplicate/detector.d.ts.map +1 -1
- package/dist/features/duplicate/types.d.ts +2 -1
- package/dist/features/duplicate/types.d.ts.map +1 -1
- package/dist/features/import/cache.d.ts +8 -0
- package/dist/features/import/cache.d.ts.map +1 -1
- package/dist/features/import/detector.d.ts +11 -3
- package/dist/features/import/detector.d.ts.map +1 -1
- package/dist/features/import/fetcher.d.ts +8 -0
- package/dist/features/import/fetcher.d.ts.map +1 -1
- package/dist/features/import/importer.d.ts.map +1 -1
- package/dist/features/import/normalizer.d.ts +26 -0
- package/dist/features/import/normalizer.d.ts.map +1 -1
- package/dist/features/import/rate-limiter.d.ts +1 -1
- package/dist/features/import/rate-limiter.d.ts.map +1 -1
- package/dist/features/operations/cite.d.ts +3 -3
- package/dist/features/operations/cite.d.ts.map +1 -1
- package/dist/features/operations/fulltext/attach.d.ts +3 -3
- package/dist/features/operations/fulltext/attach.d.ts.map +1 -1
- package/dist/features/operations/fulltext/detach.d.ts +3 -3
- package/dist/features/operations/fulltext/detach.d.ts.map +1 -1
- package/dist/features/operations/fulltext/get.d.ts +3 -3
- package/dist/features/operations/fulltext/get.d.ts.map +1 -1
- package/dist/features/operations/remove.d.ts +3 -3
- package/dist/features/operations/remove.d.ts.map +1 -1
- package/dist/features/operations/update.d.ts +3 -3
- package/dist/features/operations/update.d.ts.map +1 -1
- package/dist/index.js +2 -2
- package/dist/server/routes/references.d.ts.map +1 -1
- package/dist/server.js +2 -2
- package/package.json +2 -1
- package/dist/chunks/file-watcher-Dqkw6R7-.js.map +0 -1
- package/dist/chunks/index-9dyK2f9_.js +0 -29851
- package/dist/chunks/index-9dyK2f9_.js.map +0 -1
package/README.md
CHANGED
|
@@ -27,6 +27,7 @@ Automate the tedious parts of literature reviews:
|
|
|
27
27
|
# Import references from multiple sources
|
|
28
28
|
ref add pmid:12345678 pmid:23456789
|
|
29
29
|
ref add "10.1234/example.doi"
|
|
30
|
+
ref add "ISBN:978-4-00-000000-0"
|
|
30
31
|
ref add exported-from-pubmed.nbib
|
|
31
32
|
|
|
32
33
|
# AI-assisted screening (with Claude Code)
|
|
@@ -102,6 +103,9 @@ ref add "10.1038/nature12373"
|
|
|
102
103
|
# Add from PubMed
|
|
103
104
|
ref add pmid:25056061
|
|
104
105
|
|
|
106
|
+
# Add a book by ISBN
|
|
107
|
+
ref add "ISBN:978-4-00-000000-0"
|
|
108
|
+
|
|
105
109
|
# Search your library
|
|
106
110
|
ref search "author:smith machine learning"
|
|
107
111
|
|
|
@@ -184,7 +188,7 @@ With a custom library:
|
|
|
184
188
|
|------|-------------|------------|
|
|
185
189
|
| `search` | Search references by query | `query`: Search string (e.g., `"author:smith 2024"`) |
|
|
186
190
|
| `list` | List all references | `format?`: `"json"` \| `"bibtex"` \| `"pretty"` |
|
|
187
|
-
| `add` | Add new reference(s) | `input`: DOI, PMID, BibTeX, RIS, or CSL-JSON |
|
|
191
|
+
| `add` | Add new reference(s) | `input`: DOI, PMID, ISBN, BibTeX, RIS, or CSL-JSON |
|
|
188
192
|
| `remove` | Remove a reference | `id`: Reference ID, `force`: must be `true` |
|
|
189
193
|
| `cite` | Generate formatted citation | `ids`: Array of reference IDs, `style?`: Citation style, `format?`: `"text"` \| `"html"` |
|
|
190
194
|
| `fulltext_attach` | Attach PDF/Markdown to reference | `id`: Reference ID, `path`: File path |
|
|
@@ -221,6 +225,7 @@ ref add references.bib # From BibTeX
|
|
|
221
225
|
ref add export.ris # From RIS
|
|
222
226
|
ref add "10.1038/nature12373" # From DOI
|
|
223
227
|
ref add pmid:25056061 # From PubMed ID
|
|
228
|
+
ref add "ISBN:978-4-00-000000-0" # From ISBN
|
|
224
229
|
cat references.json | ref add # From stdin
|
|
225
230
|
|
|
226
231
|
# Remove a reference
|
|
@@ -226,6 +226,9 @@ class Reference {
|
|
|
226
226
|
getPmid() {
|
|
227
227
|
return this.item.PMID;
|
|
228
228
|
}
|
|
229
|
+
getIsbn() {
|
|
230
|
+
return this.item.ISBN;
|
|
231
|
+
}
|
|
229
232
|
/**
|
|
230
233
|
* Get the PMCID
|
|
231
234
|
*/
|
|
@@ -432,6 +435,7 @@ class Library {
|
|
|
432
435
|
idIndex = /* @__PURE__ */ new Map();
|
|
433
436
|
doiIndex = /* @__PURE__ */ new Map();
|
|
434
437
|
pmidIndex = /* @__PURE__ */ new Map();
|
|
438
|
+
isbnIndex = /* @__PURE__ */ new Map();
|
|
435
439
|
constructor(filePath, items) {
|
|
436
440
|
this.filePath = filePath;
|
|
437
441
|
for (const item of items) {
|
|
@@ -506,8 +510,25 @@ class Library {
|
|
|
506
510
|
* @returns Remove result with removed status and the removed item
|
|
507
511
|
*/
|
|
508
512
|
async remove(identifier, options = {}) {
|
|
509
|
-
const {
|
|
510
|
-
|
|
513
|
+
const { idType = "id" } = options;
|
|
514
|
+
let ref;
|
|
515
|
+
switch (idType) {
|
|
516
|
+
case "uuid":
|
|
517
|
+
ref = this.uuidIndex.get(identifier);
|
|
518
|
+
break;
|
|
519
|
+
case "doi":
|
|
520
|
+
ref = this.doiIndex.get(identifier);
|
|
521
|
+
break;
|
|
522
|
+
case "pmid":
|
|
523
|
+
ref = this.pmidIndex.get(identifier);
|
|
524
|
+
break;
|
|
525
|
+
case "isbn":
|
|
526
|
+
ref = this.isbnIndex.get(identifier);
|
|
527
|
+
break;
|
|
528
|
+
default:
|
|
529
|
+
ref = this.idIndex.get(identifier);
|
|
530
|
+
break;
|
|
531
|
+
}
|
|
511
532
|
if (!ref) {
|
|
512
533
|
return { removed: false };
|
|
513
534
|
}
|
|
@@ -523,8 +544,25 @@ class Library {
|
|
|
523
544
|
* @returns Update result with updated item, success status, and any ID changes
|
|
524
545
|
*/
|
|
525
546
|
async update(identifier, updates, options = {}) {
|
|
526
|
-
const {
|
|
527
|
-
|
|
547
|
+
const { idType = "id", ...updateOptions } = options;
|
|
548
|
+
let ref;
|
|
549
|
+
switch (idType) {
|
|
550
|
+
case "uuid":
|
|
551
|
+
ref = this.uuidIndex.get(identifier);
|
|
552
|
+
break;
|
|
553
|
+
case "doi":
|
|
554
|
+
ref = this.doiIndex.get(identifier);
|
|
555
|
+
break;
|
|
556
|
+
case "pmid":
|
|
557
|
+
ref = this.pmidIndex.get(identifier);
|
|
558
|
+
break;
|
|
559
|
+
case "isbn":
|
|
560
|
+
ref = this.isbnIndex.get(identifier);
|
|
561
|
+
break;
|
|
562
|
+
default:
|
|
563
|
+
ref = this.idIndex.get(identifier);
|
|
564
|
+
break;
|
|
565
|
+
}
|
|
528
566
|
if (!ref) {
|
|
529
567
|
return { updated: false };
|
|
530
568
|
}
|
|
@@ -537,22 +575,27 @@ class Library {
|
|
|
537
575
|
* @returns The CSL item if found, undefined otherwise
|
|
538
576
|
*/
|
|
539
577
|
async find(identifier, options = {}) {
|
|
540
|
-
const {
|
|
541
|
-
|
|
578
|
+
const { idType = "id" } = options;
|
|
579
|
+
let ref;
|
|
580
|
+
switch (idType) {
|
|
581
|
+
case "uuid":
|
|
582
|
+
ref = this.uuidIndex.get(identifier);
|
|
583
|
+
break;
|
|
584
|
+
case "doi":
|
|
585
|
+
ref = this.doiIndex.get(identifier);
|
|
586
|
+
break;
|
|
587
|
+
case "pmid":
|
|
588
|
+
ref = this.pmidIndex.get(identifier);
|
|
589
|
+
break;
|
|
590
|
+
case "isbn":
|
|
591
|
+
ref = this.isbnIndex.get(identifier);
|
|
592
|
+
break;
|
|
593
|
+
default:
|
|
594
|
+
ref = this.idIndex.get(identifier);
|
|
595
|
+
break;
|
|
596
|
+
}
|
|
542
597
|
return ref?.getItem();
|
|
543
598
|
}
|
|
544
|
-
/**
|
|
545
|
-
* Find a reference by DOI
|
|
546
|
-
*/
|
|
547
|
-
findByDoi(doi) {
|
|
548
|
-
return this.doiIndex.get(doi);
|
|
549
|
-
}
|
|
550
|
-
/**
|
|
551
|
-
* Find a reference by PMID
|
|
552
|
-
*/
|
|
553
|
-
findByPmid(pmid) {
|
|
554
|
-
return this.pmidIndex.get(pmid);
|
|
555
|
-
}
|
|
556
599
|
/**
|
|
557
600
|
* Get all references
|
|
558
601
|
*/
|
|
@@ -586,6 +629,10 @@ class Library {
|
|
|
586
629
|
if (pmid) {
|
|
587
630
|
this.pmidIndex.set(pmid, ref);
|
|
588
631
|
}
|
|
632
|
+
const isbn = ref.getIsbn();
|
|
633
|
+
if (isbn) {
|
|
634
|
+
this.isbnIndex.set(isbn, ref);
|
|
635
|
+
}
|
|
589
636
|
}
|
|
590
637
|
/**
|
|
591
638
|
* Remove reference from all indices and array
|
|
@@ -682,6 +729,10 @@ class Library {
|
|
|
682
729
|
if (pmid) {
|
|
683
730
|
this.pmidIndex.delete(pmid);
|
|
684
731
|
}
|
|
732
|
+
const isbn = ref.getIsbn();
|
|
733
|
+
if (isbn) {
|
|
734
|
+
this.isbnIndex.delete(isbn);
|
|
735
|
+
}
|
|
685
736
|
}
|
|
686
737
|
/**
|
|
687
738
|
* Generate an alphabetic suffix for ID collision resolution.
|
|
@@ -1322,6 +1373,52 @@ function checkPmidMatch(item, existing) {
|
|
|
1322
1373
|
}
|
|
1323
1374
|
return null;
|
|
1324
1375
|
}
|
|
1376
|
+
function normalizeIsbn(isbn) {
|
|
1377
|
+
return isbn.replace(/[-\s]/g, "").toUpperCase();
|
|
1378
|
+
}
|
|
1379
|
+
const BOOK_TYPES = ["book"];
|
|
1380
|
+
const BOOK_SECTION_TYPES = ["chapter"];
|
|
1381
|
+
function checkIsbnMatch(item, existing) {
|
|
1382
|
+
if (!item.ISBN || !existing.ISBN) {
|
|
1383
|
+
return null;
|
|
1384
|
+
}
|
|
1385
|
+
const normalizedItemIsbn = normalizeIsbn(item.ISBN);
|
|
1386
|
+
const normalizedExistingIsbn = normalizeIsbn(existing.ISBN);
|
|
1387
|
+
if (normalizedItemIsbn !== normalizedExistingIsbn) {
|
|
1388
|
+
return null;
|
|
1389
|
+
}
|
|
1390
|
+
if (BOOK_TYPES.includes(item.type) || BOOK_TYPES.includes(existing.type)) {
|
|
1391
|
+
return {
|
|
1392
|
+
type: "isbn",
|
|
1393
|
+
existing,
|
|
1394
|
+
details: {
|
|
1395
|
+
isbn: normalizedExistingIsbn
|
|
1396
|
+
}
|
|
1397
|
+
};
|
|
1398
|
+
}
|
|
1399
|
+
if (BOOK_SECTION_TYPES.includes(item.type) || BOOK_SECTION_TYPES.includes(existing.type)) {
|
|
1400
|
+
const itemTitle = item.title ? normalize(item.title) : null;
|
|
1401
|
+
const existingTitle = existing.title ? normalize(existing.title) : null;
|
|
1402
|
+
if (itemTitle && existingTitle && itemTitle === existingTitle) {
|
|
1403
|
+
return {
|
|
1404
|
+
type: "isbn",
|
|
1405
|
+
existing,
|
|
1406
|
+
details: {
|
|
1407
|
+
isbn: normalizedExistingIsbn,
|
|
1408
|
+
normalizedTitle: existingTitle
|
|
1409
|
+
}
|
|
1410
|
+
};
|
|
1411
|
+
}
|
|
1412
|
+
return null;
|
|
1413
|
+
}
|
|
1414
|
+
return {
|
|
1415
|
+
type: "isbn",
|
|
1416
|
+
existing,
|
|
1417
|
+
details: {
|
|
1418
|
+
isbn: normalizedExistingIsbn
|
|
1419
|
+
}
|
|
1420
|
+
};
|
|
1421
|
+
}
|
|
1325
1422
|
function checkTitleAuthorYearMatch(item, existing) {
|
|
1326
1423
|
const itemTitle = item.title ? normalize(item.title) : null;
|
|
1327
1424
|
const existingTitle = existing.title ? normalize(existing.title) : null;
|
|
@@ -1354,6 +1451,10 @@ function checkSingleDuplicate(item, existing) {
|
|
|
1354
1451
|
if (pmidMatch) {
|
|
1355
1452
|
return pmidMatch;
|
|
1356
1453
|
}
|
|
1454
|
+
const isbnMatch = checkIsbnMatch(item, existing);
|
|
1455
|
+
if (isbnMatch) {
|
|
1456
|
+
return isbnMatch;
|
|
1457
|
+
}
|
|
1357
1458
|
return checkTitleAuthorYearMatch(item, existing);
|
|
1358
1459
|
}
|
|
1359
1460
|
function detectDuplicate(item, existingReferences) {
|
|
@@ -1560,4 +1661,4 @@ export {
|
|
|
1560
1661
|
tokenize as t,
|
|
1561
1662
|
writeCslJson as w
|
|
1562
1663
|
};
|
|
1563
|
-
//# sourceMappingURL=file-watcher-
|
|
1664
|
+
//# sourceMappingURL=file-watcher-CBAbblss.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"file-watcher-CBAbblss.js","sources":["../../src/core/identifier/normalize.ts","../../src/core/identifier/generator.ts","../../src/core/identifier/uuid.ts","../../src/core/reference.ts","../../src/utils/hash.ts","../../src/core/csl-json/types.ts","../../src/core/csl-json/parser.ts","../../src/core/csl-json/serializer.ts","../../src/core/library.ts","../../src/features/search/tokenizer.ts","../../src/features/search/normalizer.ts","../../src/features/search/uppercase.ts","../../src/features/search/matcher.ts","../../src/features/search/sorter.ts","../../src/features/duplicate/detector.ts","../../src/features/file-watcher/file-watcher.ts"],"sourcesContent":["/**\n * Normalize text for identifier generation\n * Converts to lowercase, converts spaces to underscores, keeps only alphanumeric and underscores\n * @param text - Text to normalize\n * @returns Normalized text (ASCII alphanumeric and underscores only)\n */\nexport function normalizeText(text: string): string {\n return text\n .toLowerCase()\n .replace(/\\s+/g, \"_\") // Convert spaces to underscores\n .replace(/[^a-z0-9_]/g, \"\") // Keep only alphanumeric and underscores\n .replace(/_+/g, \"_\") // Collapse multiple underscores\n .replace(/^_|_$/g, \"\"); // Remove leading/trailing underscores\n}\n\n/**\n * Normalize author name for identifier\n * @param name - Author name to normalize\n * @returns Normalized name (max 32 chars)\n */\nexport function normalizeAuthorName(name: string): string {\n const normalized = normalizeText(name);\n return normalized.slice(0, 32);\n}\n\n/**\n * Create a title slug for identifier\n * @param title - Title to create slug from\n * @returns Title slug (max 32 chars)\n */\nexport function normalizeTitleSlug(title: string): string {\n const normalized = normalizeText(title);\n return normalized.slice(0, 32);\n}\n","import type { CslItem } from \"../csl-json/types\";\nimport { normalizeAuthorName, normalizeTitleSlug } from \"./normalize\";\n\n/**\n * Extract author name from CSL-JSON item\n * Returns family name, literal name, or empty string\n */\nfunction extractAuthorName(item: CslItem): string {\n if (!item.author || item.author.length === 0) {\n return \"\";\n }\n\n const firstAuthor = item.author[0];\n if (!firstAuthor) {\n return \"\";\n }\n\n // Try family name first\n if (firstAuthor.family) {\n return normalizeAuthorName(firstAuthor.family);\n }\n\n // Try literal name (e.g., institutional authors)\n if (firstAuthor.literal) {\n return normalizeAuthorName(firstAuthor.literal);\n }\n\n return \"\";\n}\n\n/**\n * Extract year from CSL-JSON item\n * Returns year string or empty string\n */\nfunction extractYear(item: CslItem): string {\n if (!item.issued || !item.issued[\"date-parts\"] || item.issued[\"date-parts\"].length === 0) {\n return \"\";\n }\n\n const dateParts = item.issued[\"date-parts\"][0];\n if (!dateParts || dateParts.length === 0) {\n return \"\";\n }\n\n const year = dateParts[0];\n return year ? year.toString() : \"\";\n}\n\n/**\n * Determine the title part of the ID based on author, year, and title availability\n * @param hasAuthor - Whether author is available\n * @param hasYear - Whether year is available\n * @param title - Title slug\n * @returns Title part of the ID\n */\nfunction determineTitlePart(hasAuthor: boolean, hasYear: boolean, title: string): string {\n // No title part needed if both author and year are present\n if (hasAuthor && hasYear) {\n return \"\";\n }\n\n // Add title if available\n if (title) {\n return `-${title}`;\n }\n\n // Add \"untitled\" only if both author and year are missing\n if (!hasAuthor && !hasYear) {\n return \"-untitled\";\n }\n\n return \"\";\n}\n\n/**\n * Generate a BibTeX-style ID for a CSL-JSON item\n * Format: <FirstAuthorFamily>-<Year>[<TitleSlug>][a-z suffix]\n * @param item - CSL-JSON item\n * @returns Generated ID\n */\nexport function generateId(item: CslItem): string {\n const author = extractAuthorName(item);\n const year = extractYear(item);\n const title = item.title ? normalizeTitleSlug(item.title) : \"\";\n\n // Build base ID with fallbacks\n const authorPart = author || \"anon\";\n const yearPart = year || \"nd\"; // no date\n const titlePart = determineTitlePart(Boolean(author), Boolean(year), title);\n\n return `${authorPart}-${yearPart}${titlePart}`;\n}\n\n/**\n * Generate suffix for collision handling\n * a, b, c, ..., z, aa, ab, ...\n * @param index - Collision index (0 = no suffix, 1 = 'a', 2 = 'b', ...)\n * @returns Suffix string\n */\nfunction generateSuffix(index: number): string {\n if (index === 0) {\n return \"\";\n }\n\n let suffix = \"\";\n let num = index;\n\n while (num > 0) {\n num--; // Adjust for 0-based indexing\n suffix = String.fromCharCode(97 + (num % 26)) + suffix;\n num = Math.floor(num / 26);\n }\n\n return suffix;\n}\n\n/**\n * Generate ID with collision check\n * Appends a, b, c, ... suffix if the base ID already exists\n * @param item - CSL-JSON item\n * @param existingIds - Array of existing IDs\n * @returns Generated ID with collision handling\n */\nexport function generateIdWithCollisionCheck(item: CslItem, existingIds: string[]): string {\n const baseId = generateId(item);\n\n // Normalize existing IDs to lowercase for case-insensitive comparison\n const normalizedExistingIds = existingIds.map((id) => id.toLowerCase());\n\n // Check for collisions\n let candidate = baseId;\n let suffixIndex = 0;\n\n while (normalizedExistingIds.includes(candidate.toLowerCase())) {\n suffixIndex++;\n const suffix = generateSuffix(suffixIndex);\n candidate = `${baseId}${suffix}`;\n }\n\n return candidate;\n}\n","import { randomUUID } from \"node:crypto\";\nimport type { CslCustom } from \"../csl-json/types\";\n\nconst UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;\n\n/**\n * Validate if a string is a valid UUID v4\n */\nexport function isValidUuid(uuid: string): boolean {\n return UUID_REGEX.test(uuid);\n}\n\n/**\n * Generate a new UUID v4\n */\nexport function generateUuid(): string {\n return randomUUID();\n}\n\n/**\n * Generate a new ISO 8601 timestamp\n */\nexport function generateTimestamp(): string {\n return new Date().toISOString();\n}\n\n/**\n * Extract UUID from custom field\n */\nexport function extractUuidFromCustom(custom: CslCustom | undefined): string | null {\n if (!custom || !custom.uuid) {\n return null;\n }\n\n return isValidUuid(custom.uuid) ? custom.uuid : null;\n}\n\n/**\n * Ensure custom metadata has a valid UUID\n * Generates a new UUID if missing or invalid\n */\nfunction ensureUuid(custom: CslCustom | undefined): Partial<CslCustom> & { uuid: string } {\n const existingUuid = extractUuidFromCustom(custom);\n\n if (existingUuid && custom) {\n return custom as Partial<CslCustom> & { uuid: string };\n }\n\n const newUuid = generateUuid();\n return {\n ...custom,\n uuid: newUuid,\n };\n}\n\n/**\n * Ensure custom metadata has a valid created_at timestamp\n * Migrates from legacy timestamp field if needed\n * Generates a new timestamp if missing\n */\nfunction ensureCreatedAt(\n custom: Partial<CslCustom> & { uuid: string }\n): Partial<CslCustom> & { uuid: string; created_at: string } {\n // Already has created_at\n if (custom.created_at) {\n return custom as Partial<CslCustom> & { uuid: string; created_at: string };\n }\n\n // Legacy migration: move timestamp to created_at if timestamp exists but created_at doesn't\n if (custom.timestamp) {\n return {\n ...custom,\n created_at: custom.timestamp,\n };\n }\n\n // Generate new created_at\n const newTimestamp = generateTimestamp();\n return {\n ...custom,\n created_at: newTimestamp,\n };\n}\n\n/**\n * Ensure custom metadata has a valid timestamp (last modification time)\n * Defaults to created_at if missing\n */\nfunction ensureTimestamp(\n custom: Partial<CslCustom> & { uuid: string; created_at: string }\n): CslCustom {\n // Already has timestamp\n if (custom.timestamp) {\n return custom as CslCustom;\n }\n\n // Default to created_at (new item, not yet modified)\n return {\n ...custom,\n timestamp: custom.created_at,\n };\n}\n\n/**\n * Ensure custom metadata has valid UUID, created_at, and timestamp\n * Handles legacy migration from old timestamp-only format\n * Generates new values if missing or invalid\n */\nexport function ensureCustomMetadata(custom: CslCustom | undefined): CslCustom {\n const withUuid = ensureUuid(custom);\n const withCreatedAt = ensureCreatedAt(withUuid);\n const withTimestamp = ensureTimestamp(withCreatedAt);\n return withTimestamp;\n}\n","import type { CslItem } from \"./csl-json/types\";\nimport { generateIdWithCollisionCheck } from \"./identifier/generator\";\nimport { ensureCustomMetadata, extractUuidFromCustom } from \"./identifier/uuid\";\n\n/**\n * Options for creating a Reference\n */\nexport interface ReferenceCreateOptions {\n /** Existing IDs to check for collision */\n existingIds?: Set<string>;\n}\n\n/**\n * Reference entity wrapping a CSL-JSON item\n */\nexport class Reference {\n private item: CslItem;\n private uuid: string;\n\n constructor(item: CslItem) {\n // Ensure UUID and timestamp are present and valid in custom field\n const customMetadata = ensureCustomMetadata(item.custom);\n this.item = { ...item, custom: customMetadata };\n\n // Extract UUID from the custom field\n const extractedUuid = extractUuidFromCustom(customMetadata);\n if (!extractedUuid) {\n throw new Error(\"Failed to extract UUID after ensureCustomMetadata\");\n }\n this.uuid = extractedUuid;\n }\n\n /**\n * Factory method to create a Reference with UUID and ID generation\n */\n static create(item: CslItem, options?: ReferenceCreateOptions): Reference {\n const existingIds = options?.existingIds || new Set<string>();\n\n // Generate ID if not provided or empty\n let updatedItem = item;\n if (!item.id || item.id.trim() === \"\") {\n const generatedId = generateIdWithCollisionCheck(item, Array.from(existingIds));\n updatedItem = { ...item, id: generatedId };\n }\n\n return new Reference(updatedItem);\n }\n\n /**\n * Get the underlying CSL-JSON item\n */\n getItem(): CslItem {\n return this.item;\n }\n\n /**\n * Get the UUID (internal stable identifier)\n */\n getUuid(): string {\n return this.uuid;\n }\n\n /**\n * Get the ID (Pandoc citation key / BibTeX-key)\n */\n getId(): string {\n return this.item.id;\n }\n\n /**\n * Get the title\n */\n getTitle(): string | undefined {\n return this.item.title;\n }\n\n /**\n * Get the authors\n */\n getAuthors(): CslItem[\"author\"] {\n return this.item.author;\n }\n\n /**\n * Get the year from issued date\n */\n getYear(): number | undefined {\n const issued = this.item.issued;\n if (!issued || !issued[\"date-parts\"] || issued[\"date-parts\"].length === 0) {\n return undefined;\n }\n const firstDate = issued[\"date-parts\"][0];\n return firstDate && firstDate.length > 0 ? firstDate[0] : undefined;\n }\n\n /**\n * Get the DOI\n */\n getDoi(): string | undefined {\n return this.item.DOI;\n }\n\n /**\n * Get the PMID\n */\n getPmid(): string | undefined {\n return this.item.PMID;\n }\n\n getIsbn(): string | undefined {\n return this.item.ISBN;\n }\n\n /**\n * Get the PMCID\n */\n getPmcid(): string | undefined {\n return this.item.PMCID;\n }\n\n /**\n * Get the URL\n */\n getUrl(): string | undefined {\n return this.item.URL;\n }\n\n /**\n * Get the keyword\n */\n getKeyword(): string[] | undefined {\n return this.item.keyword;\n }\n\n /**\n * Get additional URLs from custom metadata\n */\n getAdditionalUrls(): string[] | undefined {\n return this.item.custom?.additional_urls;\n }\n\n /**\n * Get the creation timestamp from custom metadata (immutable)\n */\n getCreatedAt(): string {\n if (!this.item.custom?.created_at) {\n throw new Error(\"created_at is missing from custom metadata\");\n }\n return this.item.custom.created_at;\n }\n\n /**\n * Get the last modification timestamp from custom metadata\n */\n getTimestamp(): string {\n if (!this.item.custom?.timestamp) {\n throw new Error(\"timestamp is missing from custom metadata\");\n }\n return this.item.custom.timestamp;\n }\n\n /**\n * Update the timestamp to current time\n * Call this whenever the reference is modified\n */\n touch(): void {\n if (!this.item.custom) {\n throw new Error(\"custom metadata is missing\");\n }\n this.item.custom.timestamp = new Date().toISOString();\n }\n\n /**\n * Get the type\n */\n getType(): string {\n return this.item.type;\n }\n}\n","import { createHash } from \"node:crypto\";\nimport { createReadStream } from \"node:fs\";\n\n/**\n * Compute SHA-256 hash of a string\n */\nexport function computeHash(input: string): string {\n return createHash(\"sha256\").update(input, \"utf-8\").digest(\"hex\");\n}\n\n/**\n * Compute SHA-256 hash of a file\n */\nexport async function computeFileHash(filePath: string): Promise<string> {\n return new Promise((resolve, reject) => {\n const hash = createHash(\"sha256\");\n const stream = createReadStream(filePath);\n\n stream.on(\"data\", (chunk) => hash.update(chunk));\n stream.on(\"end\", () => resolve(hash.digest(\"hex\")));\n stream.on(\"error\", (error) => reject(error));\n });\n}\n","import { z } from \"zod\";\n\n// CSL-JSON Name (Person)\nconst CslNameSchema = z.object({\n family: z.string().optional(),\n given: z.string().optional(),\n literal: z.string().optional(),\n \"dropping-particle\": z.string().optional(),\n \"non-dropping-particle\": z.string().optional(),\n suffix: z.string().optional(),\n});\n\n// CSL-JSON Date\nconst CslDateSchema = z.object({\n \"date-parts\": z.array(z.array(z.number())).optional(),\n raw: z.string().optional(),\n season: z.string().optional(),\n circa: z.boolean().optional(),\n literal: z.string().optional(),\n});\n\n// CSL-JSON Fulltext Metadata\nconst CslFulltextSchema = z.object({\n pdf: z.string().optional(),\n markdown: z.string().optional(),\n});\n\n// CSL-JSON Custom Metadata\nconst CslCustomSchema = z\n .object({\n uuid: z.string(),\n created_at: z.string(),\n timestamp: z.string(),\n additional_urls: z.array(z.string()).optional(),\n fulltext: CslFulltextSchema.optional(),\n tags: z.array(z.string()).optional(),\n })\n .passthrough();\n\n// CSL-JSON Item\nexport const CslItemSchema = z\n .object({\n id: z.string(),\n type: z.string(),\n title: z.string().optional(),\n author: z.array(CslNameSchema).optional(),\n editor: z.array(CslNameSchema).optional(),\n issued: CslDateSchema.optional(),\n accessed: CslDateSchema.optional(),\n \"container-title\": z.string().optional(),\n volume: z.string().optional(),\n issue: z.string().optional(),\n page: z.string().optional(),\n DOI: z.string().optional(),\n PMID: z.string().optional(),\n PMCID: z.string().optional(),\n ISBN: z.string().optional(),\n ISSN: z.string().optional(),\n URL: z.string().optional(),\n abstract: z.string().optional(),\n publisher: z.string().optional(),\n \"publisher-place\": z.string().optional(),\n note: z.string().optional(),\n keyword: z.array(z.string()).optional(),\n custom: CslCustomSchema.optional(),\n // Allow additional fields\n })\n .passthrough();\n\n// CSL-JSON Library (array of items)\nexport const CslLibrarySchema = z.array(CslItemSchema);\n\nexport type CslCustom = z.infer<typeof CslCustomSchema>;\nexport type CslItem = z.infer<typeof CslItemSchema>;\nexport type CslLibrary = z.infer<typeof CslLibrarySchema>;\n","import { readFile } from \"node:fs/promises\";\nimport { ensureCustomMetadata } from \"../identifier/uuid\";\nimport { type CslLibrary, CslLibrarySchema } from \"./types\";\n\n/**\n * Convert keyword field from semicolon-separated string to array\n * @param keyword - Semicolon-separated string or undefined\n * @returns Array of keywords or undefined\n */\nfunction parseKeyword(keyword: unknown): string[] | undefined {\n if (typeof keyword !== \"string\") {\n return undefined;\n }\n\n if (keyword.trim() === \"\") {\n return undefined;\n }\n\n const keywords = keyword\n .split(\";\")\n .map((k) => k.trim())\n .filter((k) => k !== \"\");\n\n return keywords.length > 0 ? keywords : undefined;\n}\n\n/**\n * Parse a CSL-JSON file and ensure all entries have valid UUIDs and timestamps\n * @param filePath - Path to the CSL-JSON file\n * @returns Array of CSL-JSON items with guaranteed UUIDs and timestamps\n * @throws Error if file cannot be read or JSON is invalid\n */\nexport async function parseCslJson(filePath: string): Promise<CslLibrary> {\n // Read file\n const content = await readFile(filePath, \"utf-8\");\n\n // Parse JSON\n let rawData: unknown;\n try {\n rawData = JSON.parse(content);\n } catch (error) {\n throw new Error(\n `Failed to parse JSON: ${error instanceof Error ? error.message : String(error)}`\n );\n }\n\n // Convert keyword fields from string to array before validation\n if (Array.isArray(rawData)) {\n rawData = rawData.map((item: unknown) => {\n if (item && typeof item === \"object\" && \"keyword\" in item) {\n const itemWithKeyword = item as Record<string, unknown>;\n return {\n ...itemWithKeyword,\n keyword: parseKeyword(itemWithKeyword.keyword),\n };\n }\n return item;\n });\n }\n\n // Validate with zod\n const parseResult = CslLibrarySchema.safeParse(rawData);\n\n if (!parseResult.success) {\n throw new Error(`Invalid CSL-JSON structure: ${parseResult.error.message}`);\n }\n\n const library = parseResult.data;\n\n // Ensure all entries have valid UUIDs and timestamps\n const processedLibrary: CslLibrary = library.map((item) => {\n const updatedCustom = ensureCustomMetadata(item.custom);\n\n return {\n ...item,\n custom: updatedCustom,\n };\n });\n\n return processedLibrary;\n}\n","import { mkdir, writeFile } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\nimport type { CslLibrary } from \"./types\";\n\n/**\n * Convert keyword array to semicolon-separated string\n * @param keywords - Array of keywords or undefined\n * @returns Semicolon-separated string or undefined\n */\nfunction serializeKeyword(keywords: string[] | undefined): string | undefined {\n if (!keywords || keywords.length === 0) {\n return undefined;\n }\n\n return keywords.join(\"; \");\n}\n\n/**\n * Serialize a CSL-JSON library to a formatted JSON string\n * @param library - CSL-JSON library (array of items)\n * @returns Formatted JSON string with 2-space indentation\n */\nexport function serializeCslJson(library: CslLibrary): string {\n // Convert keyword arrays to semicolon-separated strings\n const libraryForJson = library.map((item) => {\n const { keyword, ...rest } = item;\n const serializedKeyword = serializeKeyword(keyword);\n\n if (serializedKeyword === undefined) {\n return rest;\n }\n\n return {\n ...rest,\n keyword: serializedKeyword,\n };\n });\n\n return JSON.stringify(libraryForJson, null, 2);\n}\n\n/**\n * Write a CSL-JSON library to a file\n * @param filePath - Path to write the CSL-JSON file\n * @param library - CSL-JSON library to write\n * @throws Error if file cannot be written\n */\nexport async function writeCslJson(filePath: string, library: CslLibrary): Promise<void> {\n // Ensure parent directory exists\n const dir = dirname(filePath);\n await mkdir(dir, { recursive: true });\n\n // Serialize and write\n const content = serializeCslJson(library);\n await writeFile(filePath, content, \"utf-8\");\n}\n","import { existsSync } from \"node:fs\";\nimport { mkdir } from \"node:fs/promises\";\nimport { dirname } from \"node:path\";\nimport { computeFileHash } from \"../utils/hash\";\nimport { parseCslJson } from \"./csl-json/parser\";\nimport { writeCslJson } from \"./csl-json/serializer\";\nimport type { CslItem } from \"./csl-json/types\";\nimport type {\n FindOptions,\n ILibrary,\n RemoveOptions,\n RemoveResult,\n UpdateOptions,\n UpdateResult,\n} from \"./library-interface.js\";\nimport { Reference } from \"./reference\";\n\n// Re-export types from library-interface for backward compatibility\nexport type {\n FindOptions,\n IdentifierType,\n ILibrary,\n UpdateOptions,\n UpdateResult,\n} from \"./library-interface.js\";\n\n/**\n * Library manager for CSL-JSON references.\n * Implements ILibrary interface for use with operations layer.\n */\nexport class Library implements ILibrary {\n private filePath: string;\n private references: Reference[] = [];\n private currentHash: string | null = null;\n\n // Indices for fast lookup\n private uuidIndex: Map<string, Reference> = new Map();\n private idIndex: Map<string, Reference> = new Map();\n private doiIndex: Map<string, Reference> = new Map();\n private pmidIndex: Map<string, Reference> = new Map();\n private isbnIndex: Map<string, Reference> = new Map();\n\n private constructor(filePath: string, items: CslItem[]) {\n this.filePath = filePath;\n\n // Create references and build indices\n for (const item of items) {\n const ref = new Reference(item);\n this.references.push(ref);\n this.addToIndices(ref);\n }\n }\n\n /**\n * Load library from file.\n * If the file does not exist, creates an empty library file.\n */\n static async load(filePath: string): Promise<Library> {\n // Check if file exists, create empty library if not\n if (!existsSync(filePath)) {\n // Create parent directories if needed\n const dir = dirname(filePath);\n await mkdir(dir, { recursive: true });\n // Create empty library file\n await writeCslJson(filePath, []);\n }\n\n const items = await parseCslJson(filePath);\n const library = new Library(filePath, items);\n // Compute and store file hash after loading\n library.currentHash = await computeFileHash(filePath);\n return library;\n }\n\n /**\n * Save library to file\n */\n async save(): Promise<void> {\n const items = this.references.map((ref) => ref.getItem());\n await writeCslJson(this.filePath, items);\n // Update file hash after saving\n this.currentHash = await computeFileHash(this.filePath);\n }\n\n /**\n * Reloads the library from file if it was modified externally.\n * Self-writes (detected via hash comparison) are skipped.\n * @returns true if reload occurred, false if skipped (self-write detected)\n */\n async reload(): Promise<boolean> {\n const newHash = await computeFileHash(this.filePath);\n\n if (newHash === this.currentHash) {\n // Self-write detected, skip reload\n return false;\n }\n\n // External change detected, reload\n const items = await parseCslJson(this.filePath);\n\n // Clear and rebuild indices\n this.references = [];\n this.uuidIndex.clear();\n this.idIndex.clear();\n this.doiIndex.clear();\n this.pmidIndex.clear();\n\n for (const item of items) {\n const ref = new Reference(item);\n this.references.push(ref);\n this.addToIndices(ref);\n }\n\n // Update hash\n this.currentHash = newHash;\n\n return true;\n }\n\n /**\n * Add a reference to the library\n * @param item - The CSL item to add\n * @returns The added CSL item (with generated ID and UUID)\n */\n async add(item: CslItem): Promise<CslItem> {\n // Collect existing IDs for collision check\n const existingIds = new Set(this.references.map((ref) => ref.getId()));\n\n // Create reference with collision check\n const ref = Reference.create(item, { existingIds });\n\n // Add to library\n this.references.push(ref);\n this.addToIndices(ref);\n\n // Return the added item\n return ref.getItem();\n }\n\n /**\n * Remove a reference by citation ID or UUID.\n * @param identifier - The citation ID or UUID of the reference to remove\n * @param options - Remove options (byUuid to use UUID lookup)\n * @returns Remove result with removed status and the removed item\n */\n async remove(identifier: string, options: RemoveOptions = {}): Promise<RemoveResult> {\n const { idType = \"id\" } = options;\n let ref: Reference | undefined;\n switch (idType) {\n case \"uuid\":\n ref = this.uuidIndex.get(identifier);\n break;\n case \"doi\":\n ref = this.doiIndex.get(identifier);\n break;\n case \"pmid\":\n ref = this.pmidIndex.get(identifier);\n break;\n case \"isbn\":\n ref = this.isbnIndex.get(identifier);\n break;\n default: // \"id\" or unknown\n ref = this.idIndex.get(identifier);\n break;\n }\n if (!ref) {\n return { removed: false };\n }\n const removedItem = ref.getItem();\n const removed = this.removeReference(ref);\n return { removed, removedItem };\n }\n\n /**\n * Update a reference by citation ID or UUID.\n * @param identifier - The citation ID or UUID of the reference to update\n * @param updates - Partial updates to apply to the reference\n * @param options - Update options (byUuid to use UUID lookup, onIdCollision for collision handling)\n * @returns Update result with updated item, success status, and any ID changes\n */\n async update(\n identifier: string,\n updates: Partial<CslItem>,\n options: UpdateOptions = {}\n ): Promise<UpdateResult> {\n const { idType = \"id\", ...updateOptions } = options;\n let ref: Reference | undefined;\n switch (idType) {\n case \"uuid\":\n ref = this.uuidIndex.get(identifier);\n break;\n case \"doi\":\n ref = this.doiIndex.get(identifier);\n break;\n case \"pmid\":\n ref = this.pmidIndex.get(identifier);\n break;\n case \"isbn\":\n ref = this.isbnIndex.get(identifier);\n break;\n default: // \"id\" or unknown\n ref = this.idIndex.get(identifier);\n break;\n }\n\n if (!ref) {\n return { updated: false };\n }\n\n return this.updateReference(ref, updates, updateOptions);\n }\n\n /**\n * Find a reference by citation ID or UUID.\n * @param identifier - The citation ID or UUID of the reference to find\n * @param options - Find options (byUuid to use UUID lookup)\n * @returns The CSL item if found, undefined otherwise\n */\n async find(identifier: string, options: FindOptions = {}): Promise<CslItem | undefined> {\n const { idType = \"id\" } = options;\n\n let ref: Reference | undefined;\n switch (idType) {\n case \"uuid\":\n ref = this.uuidIndex.get(identifier);\n break;\n case \"doi\":\n ref = this.doiIndex.get(identifier);\n break;\n case \"pmid\":\n ref = this.pmidIndex.get(identifier);\n break;\n case \"isbn\":\n ref = this.isbnIndex.get(identifier);\n break;\n default: // \"id\" or unknown\n ref = this.idIndex.get(identifier);\n break;\n }\n\n return ref?.getItem();\n }\n\n /**\n * Get all references\n */\n async getAll(): Promise<CslItem[]> {\n return this.references.map((ref) => ref.getItem());\n }\n\n /**\n * Get the file path\n */\n getFilePath(): string {\n return this.filePath;\n }\n\n /**\n * Get the current file hash\n * Returns null if the library has not been loaded or saved yet\n */\n getCurrentHash(): string | null {\n return this.currentHash;\n }\n\n /**\n * Add reference to all indices\n */\n private addToIndices(ref: Reference): void {\n // UUID index\n this.uuidIndex.set(ref.getUuid(), ref);\n\n // ID index\n this.idIndex.set(ref.getId(), ref);\n\n // DOI index\n const doi = ref.getDoi();\n if (doi) {\n this.doiIndex.set(doi, ref);\n }\n\n // PMID index\n const pmid = ref.getPmid();\n if (pmid) {\n this.pmidIndex.set(pmid, ref);\n }\n\n // ISBN index\n const isbn = ref.getIsbn();\n if (isbn) {\n this.isbnIndex.set(isbn, ref);\n }\n }\n\n /**\n * Remove reference from all indices and array\n */\n private removeReference(ref: Reference): boolean {\n const index = this.references.indexOf(ref);\n if (index === -1) {\n return false;\n }\n this.references.splice(index, 1);\n this.removeFromIndices(ref);\n return true;\n }\n\n /**\n * Update a reference with partial updates.\n * Preserves uuid and created_at, updates timestamp.\n */\n private updateReference(\n ref: Reference,\n updates: Partial<CslItem>,\n options: UpdateOptions = {}\n ): UpdateResult {\n const index = this.references.indexOf(ref);\n if (index === -1) {\n return { updated: false };\n }\n\n const existingItem = ref.getItem();\n const currentId = ref.getId();\n const { newId, idChanged, collision } = this.resolveNewId(\n updates.id ?? existingItem.id,\n currentId,\n options\n );\n\n if (collision) {\n return { updated: false, idCollision: true };\n }\n\n const updatedItem = this.buildUpdatedItem(existingItem, updates, newId);\n\n // Remove old reference from indices\n this.removeFromIndices(ref);\n\n // Create new reference and replace in array\n const newRef = new Reference(updatedItem);\n this.references[index] = newRef;\n this.addToIndices(newRef);\n\n const result: UpdateResult = { updated: true, item: newRef.getItem() };\n if (idChanged) {\n result.idChanged = true;\n result.newId = newId;\n }\n return result;\n }\n\n /**\n * Resolve the new ID, handling collisions based on options.\n */\n private resolveNewId(\n requestedId: string,\n currentId: string,\n options: UpdateOptions\n ): { newId: string; idChanged: boolean; collision: boolean } {\n if (requestedId === currentId) {\n return { newId: requestedId, idChanged: false, collision: false };\n }\n\n const conflictingRef = this.idIndex.get(requestedId);\n if (!conflictingRef) {\n return { newId: requestedId, idChanged: false, collision: false };\n }\n\n const onIdCollision = options.onIdCollision ?? \"fail\";\n if (onIdCollision === \"fail\") {\n return { newId: requestedId, idChanged: false, collision: true };\n }\n\n // onIdCollision === \"suffix\": resolve by adding suffix\n const existingIds = new Set(this.references.map((r) => r.getId()));\n existingIds.delete(currentId);\n const resolvedId = this.resolveIdCollision(requestedId, existingIds);\n return { newId: resolvedId, idChanged: true, collision: false };\n }\n\n /**\n * Build the updated CslItem, preserving uuid and created_at.\n */\n private buildUpdatedItem(\n existingItem: CslItem,\n updates: Partial<CslItem>,\n newId: string\n ): CslItem {\n return {\n ...existingItem,\n ...updates,\n id: newId,\n type: updates.type ?? existingItem.type,\n custom: {\n ...(existingItem.custom || {}),\n ...(updates.custom || {}),\n uuid: existingItem.custom?.uuid || \"\",\n created_at: existingItem.custom?.created_at || new Date().toISOString(),\n timestamp: new Date().toISOString(),\n },\n };\n }\n\n /**\n * Remove a reference from all indices.\n */\n private removeFromIndices(ref: Reference): void {\n this.uuidIndex.delete(ref.getUuid());\n this.idIndex.delete(ref.getId());\n\n const doi = ref.getDoi();\n if (doi) {\n this.doiIndex.delete(doi);\n }\n\n const pmid = ref.getPmid();\n if (pmid) {\n this.pmidIndex.delete(pmid);\n }\n\n const isbn = ref.getIsbn();\n if (isbn) {\n this.isbnIndex.delete(isbn);\n }\n }\n\n /**\n * Generate an alphabetic suffix for ID collision resolution.\n * 0 -> 'a', 1 -> 'b', ..., 25 -> 'z', 26 -> 'aa', etc.\n */\n private generateSuffix(index: number): string {\n const alphabet = \"abcdefghijklmnopqrstuvwxyz\";\n let suffix = \"\";\n let n = index;\n\n do {\n suffix = alphabet[n % 26] + suffix;\n n = Math.floor(n / 26) - 1;\n } while (n >= 0);\n\n return suffix;\n }\n\n /**\n * Resolve ID collision by appending alphabetic suffix.\n */\n private resolveIdCollision(baseId: string, existingIds: Set<string>): string {\n if (!existingIds.has(baseId)) {\n return baseId;\n }\n\n let index = 0;\n let newId: string;\n\n do {\n const suffix = this.generateSuffix(index);\n newId = `${baseId}${suffix}`;\n index++;\n } while (existingIds.has(newId));\n\n return newId;\n }\n}\n","import type { FieldSpecifier, SearchQuery, SearchToken } from \"./types.js\";\n\nconst VALID_FIELDS: Set<FieldSpecifier> = new Set([\n \"author\",\n \"title\",\n \"year\",\n \"doi\",\n \"pmid\",\n \"pmcid\",\n \"url\",\n \"keyword\",\n \"tag\",\n]);\n\n/**\n * Check if character at index is whitespace\n */\nfunction isWhitespace(query: string, index: number): boolean {\n return /\\s/.test(query.charAt(index));\n}\n\n/**\n * Check if character at index is a quote\n */\nfunction isQuote(query: string, index: number): boolean {\n return query.charAt(index) === '\"';\n}\n\n/**\n * Tokenize a search query string\n */\nexport function tokenize(query: string): SearchQuery {\n const tokens: SearchToken[] = [];\n let i = 0;\n\n while (i < query.length) {\n // Skip whitespace\n if (isWhitespace(query, i)) {\n i++;\n continue;\n }\n\n // Parse next token\n const result = parseNextToken(query, i);\n if (result.token) {\n tokens.push(result.token);\n }\n i = result.nextIndex;\n }\n\n return {\n original: query,\n tokens,\n };\n}\n\ntype TokenResult = { token: SearchToken | null; nextIndex: number };\n\n/**\n * Check if there's whitespace between two indices\n */\nfunction hasWhitespaceBetween(query: string, start: number, end: number): boolean {\n for (let j = start; j < end; j++) {\n if (isWhitespace(query, j)) {\n return true;\n }\n }\n return false;\n}\n\n/**\n * Try to parse a field:value pattern starting at the given index\n * Returns null if not a valid field:value pattern\n */\nfunction tryParseFieldValue(query: string, startIndex: number): TokenResult | null {\n const colonIndex = query.indexOf(\":\", startIndex);\n if (colonIndex === -1) {\n return null;\n }\n\n // Check if there's whitespace before colon (invalid field pattern)\n if (hasWhitespaceBetween(query, startIndex, colonIndex)) {\n return null;\n }\n\n const fieldName = query.substring(startIndex, colonIndex);\n if (!VALID_FIELDS.has(fieldName as FieldSpecifier)) {\n return null;\n }\n\n // Valid field specifier found\n const afterColon = colonIndex + 1;\n\n // Check if value is empty\n if (afterColon >= query.length || isWhitespace(query, afterColon)) {\n return { token: null, nextIndex: afterColon };\n }\n\n // Check if value is a quoted phrase\n if (isQuote(query, afterColon)) {\n const quoteResult = parseQuotedValue(query, afterColon);\n if (quoteResult.value !== null) {\n return {\n token: {\n raw: query.substring(startIndex, quoteResult.nextIndex),\n value: quoteResult.value,\n field: fieldName as FieldSpecifier,\n isPhrase: true,\n },\n nextIndex: quoteResult.nextIndex,\n };\n }\n // If quote parsing failed, return null to try other parsing\n return null;\n }\n\n // Regular unquoted value\n const valueResult = parseUnquotedValue(query, afterColon);\n return {\n token: {\n raw: query.substring(startIndex, valueResult.nextIndex),\n value: valueResult.value,\n field: fieldName as FieldSpecifier,\n isPhrase: false,\n },\n nextIndex: valueResult.nextIndex,\n };\n}\n\n/**\n * Parse a quoted token (phrase without field specifier)\n */\nfunction parseQuotedToken(query: string, startIndex: number): TokenResult {\n const quoteResult = parseQuotedValue(query, startIndex);\n if (quoteResult.value !== null) {\n return {\n token: {\n raw: query.substring(startIndex, quoteResult.nextIndex),\n value: quoteResult.value,\n isPhrase: true,\n },\n nextIndex: quoteResult.nextIndex,\n };\n }\n\n // If quote parsing failed (empty or unclosed), skip it\n if (quoteResult.nextIndex > startIndex) {\n // Empty quote - skip it\n return { token: null, nextIndex: quoteResult.nextIndex };\n }\n\n // Unclosed quote - treat as regular text including the quote character\n const valueResult = parseUnquotedValue(query, startIndex, true);\n return {\n token: {\n raw: valueResult.value,\n value: valueResult.value,\n isPhrase: false,\n },\n nextIndex: valueResult.nextIndex,\n };\n}\n\n/**\n * Parse a regular unquoted token\n */\nfunction parseRegularToken(query: string, startIndex: number): TokenResult {\n const valueResult = parseUnquotedValue(query, startIndex);\n return {\n token: {\n raw: valueResult.value,\n value: valueResult.value,\n isPhrase: false,\n },\n nextIndex: valueResult.nextIndex,\n };\n}\n\n/**\n * Parse the next token starting at the given index\n */\nfunction parseNextToken(query: string, startIndex: number): TokenResult {\n // Try to parse field:value pattern first\n const fieldResult = tryParseFieldValue(query, startIndex);\n if (fieldResult !== null) {\n return fieldResult;\n }\n\n // Check if it's a quoted phrase\n if (isQuote(query, startIndex)) {\n return parseQuotedToken(query, startIndex);\n }\n\n // Regular unquoted token\n return parseRegularToken(query, startIndex);\n}\n\n/**\n * Parse a quoted value starting at a quote character\n */\nfunction parseQuotedValue(\n query: string,\n startIndex: number\n): { value: string | null; nextIndex: number } {\n if (!isQuote(query, startIndex)) {\n return { value: null, nextIndex: startIndex };\n }\n\n let i = startIndex + 1; // Skip opening quote\n const valueStart = i;\n\n // Find closing quote\n while (i < query.length && !isQuote(query, i)) {\n i++;\n }\n\n // No closing quote found\n if (i >= query.length) {\n return { value: null, nextIndex: startIndex };\n }\n\n const value = query.substring(valueStart, i);\n i++; // Skip closing quote\n\n // Return null for empty quotes\n if (value.trim() === \"\") {\n return { value: null, nextIndex: i };\n }\n\n return { value, nextIndex: i };\n}\n\n/**\n * Parse an unquoted value\n * @param includeQuotes - If true, don't stop at quote characters (for unclosed quotes)\n */\nfunction parseUnquotedValue(\n query: string,\n startIndex: number,\n includeQuotes = false\n): { value: string; nextIndex: number } {\n let i = startIndex;\n\n // Read until whitespace (and optionally until quote)\n while (i < query.length && !isWhitespace(query, i)) {\n if (!includeQuotes && isQuote(query, i)) {\n break;\n }\n i++;\n }\n\n return {\n value: query.substring(startIndex, i),\n nextIndex: i,\n };\n}\n","/**\n * Normalize text for search matching\n *\n * Applies the following transformations:\n * 1. Unicode NFKC normalization\n * 2. Lowercase conversion\n * 3. Remove diacritics (accents)\n * 4. Punctuation removal\n * 5. Whitespace normalization\n */\nexport function normalize(text: string): string {\n // Step 1: Unicode NFKC normalization (compatibility normalization)\n let normalized = text.normalize(\"NFKC\");\n\n // Step 2: Lowercase\n normalized = normalized.toLowerCase();\n\n // Step 3: Remove diacritics\n // Use NFD to decompose, then remove combining diacritical marks\n normalized = normalized.normalize(\"NFD\").replace(/\\p{M}/gu, \"\");\n\n // Step 4: Remove punctuation\n // Replace all punctuation and special characters with spaces\n // Keep: letters (including Unicode), numbers, slashes, and whitespace\n normalized = normalized.replace(/[^\\p{L}\\p{N}/\\s]/gu, \" \");\n\n // Step 5: Normalize whitespace\n // - Replace all whitespace sequences (spaces, tabs, newlines) with a single space\n // - Trim leading and trailing whitespace\n normalized = normalized.replace(/\\s+/g, \" \").trim();\n\n return normalized;\n}\n\n/**\n * Normalize text for matching while preserving case\n *\n * Applies the following transformations:\n * 1. Unicode NFKC normalization\n * 2. Remove diacritics (accents)\n * 3. Normalize whitespace\n *\n * Unlike `normalize`, this function preserves letter case\n * for use with uppercase-sensitive matching.\n */\nexport function normalizePreservingCase(text: string): string {\n // Step 1: Unicode NFKC normalization (compatibility normalization)\n let normalized = text.normalize(\"NFKC\");\n\n // Step 2: Remove diacritics\n // Use NFD to decompose, then remove combining diacritical marks\n normalized = normalized.normalize(\"NFD\").replace(/\\p{M}/gu, \"\");\n\n // Step 3: Normalize whitespace\n // - Replace all whitespace sequences (spaces, tabs, newlines) with a single space\n // - Trim leading and trailing whitespace\n normalized = normalized.replace(/\\s+/g, \" \").trim();\n\n return normalized;\n}\n","/**\n * Represents a segment of consecutive uppercase letters in a string.\n */\nexport interface UppercaseSegment {\n /** The uppercase segment text */\n segment: string;\n /** Start index (inclusive) */\n start: number;\n /** End index (exclusive) */\n end: number;\n}\n\n/**\n * Checks if the text contains 2 or more consecutive uppercase letters.\n * Pattern: /[A-Z]{2,}/\n *\n * @param text - The text to check\n * @returns true if text contains consecutive uppercase letters\n */\nexport function hasConsecutiveUppercase(text: string): boolean {\n const pattern = /[A-Z]{2,}/;\n return pattern.test(text);\n}\n\n/**\n * Extracts all segments of 2 or more consecutive uppercase letters from text.\n * Pattern: /[A-Z]{2,}/g\n *\n * @param text - The text to extract segments from\n * @returns Array of uppercase segments with their positions\n */\nexport function extractUppercaseSegments(text: string): UppercaseSegment[] {\n const pattern = /[A-Z]{2,}/g;\n const segments: UppercaseSegment[] = [];\n\n for (const match of text.matchAll(pattern)) {\n segments.push({\n segment: match[0],\n start: match.index,\n end: match.index + match[0].length,\n });\n }\n\n return segments;\n}\n\n/**\n * Escapes special regex characters in a string.\n */\nfunction escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, \"\\\\$&\");\n}\n\n/**\n * Normalizes whitespace in a string (collapses multiple spaces, trims).\n */\nfunction normalizeWhitespace(text: string): string {\n return text.replace(/\\s+/g, \" \").trim();\n}\n\n/**\n * Checks if all uppercase segments from the query exist in the target (case-sensitive).\n */\nfunction allUppercaseSegmentsExist(segments: UppercaseSegment[], target: string): boolean {\n return segments.every((seg) => target.includes(seg.segment));\n}\n\n/**\n * Builds a regex pattern from query and its uppercase segments.\n * Uppercase segments are matched literally, other parts flexibly.\n */\nfunction buildMatchPattern(query: string, segments: UppercaseSegment[]): string {\n const patternParts: string[] = [];\n let lastEnd = 0;\n\n for (const seg of segments) {\n if (seg.start > lastEnd) {\n const beforePart = query.slice(lastEnd, seg.start);\n if (beforePart.trim()) {\n patternParts.push(escapeRegex(beforePart));\n }\n }\n patternParts.push(`(?:${escapeRegex(seg.segment)})`);\n lastEnd = seg.end;\n }\n\n if (lastEnd < query.length) {\n const afterPart = query.slice(lastEnd);\n if (afterPart.trim()) {\n patternParts.push(escapeRegex(afterPart));\n }\n }\n\n return patternParts.join(\".*?\");\n}\n\n/**\n * Matches a query string against a target string with case sensitivity\n * for consecutive uppercase segments in the query.\n *\n * - If query contains 2+ consecutive uppercase letters (e.g., AI, RNA),\n * those portions must match exactly in the target.\n * - Other portions are matched case-insensitively.\n *\n * @param query - The search query\n * @param target - The target string to match against\n * @returns true if query matches target according to the rules\n */\nexport function matchWithUppercaseSensitivity(query: string, target: string): boolean {\n if (query === \"\") {\n return true;\n }\n if (target === \"\") {\n return false;\n }\n\n const normalizedQuery = normalizeWhitespace(query);\n const normalizedTarget = normalizeWhitespace(target);\n\n if (!hasConsecutiveUppercase(normalizedQuery)) {\n return normalizedTarget.toLowerCase().includes(normalizedQuery.toLowerCase());\n }\n\n const segments = extractUppercaseSegments(normalizedQuery);\n\n if (!allUppercaseSegmentsExist(segments, target)) {\n return false;\n }\n\n const pattern = buildMatchPattern(normalizedQuery, segments);\n\n try {\n const regex = new RegExp(pattern, \"i\");\n return regex.test(normalizedTarget);\n } catch {\n return normalizedTarget.toLowerCase().includes(normalizedQuery.toLowerCase());\n }\n}\n","import type { CslItem } from \"../../core/csl-json/types.js\";\nimport { normalizePreservingCase } from \"./normalizer.js\";\nimport type { FieldMatch, MatchStrength, SearchResult, SearchToken } from \"./types.js\";\nimport { matchWithUppercaseSensitivity } from \"./uppercase.js\";\n\n/**\n * ID fields require exact match (case-sensitive)\n */\nconst ID_FIELDS = new Set([\"DOI\", \"PMID\", \"PMCID\", \"URL\"]);\n\n/**\n * Extract year from CSL-JSON issued field\n */\nfunction extractYear(reference: CslItem): string {\n if (reference.issued?.[\"date-parts\"]?.[0]?.[0]) {\n return String(reference.issued[\"date-parts\"][0][0]);\n }\n return \"0000\";\n}\n\n/**\n * Extract and format author names\n * Returns \"family given\" format for all authors\n */\nfunction extractAuthors(reference: CslItem): string {\n if (!reference.author || reference.author.length === 0) {\n return \"\";\n }\n\n return reference.author\n .map((author) => {\n const family = author.family || \"\";\n const given = author.given || \"\";\n return given ? `${family} ${given}` : family;\n })\n .join(\" \");\n}\n\n/**\n * Get field value from reference\n */\nfunction getFieldValue(reference: CslItem, field: string): string | null {\n // Handle special fields\n if (field === \"year\") {\n return extractYear(reference);\n }\n\n if (field === \"author\") {\n return extractAuthors(reference);\n }\n\n // Handle direct field access\n const value = reference[field as keyof CslItem];\n if (typeof value === \"string\") {\n return value;\n }\n\n // Handle nested custom fields\n if (field.startsWith(\"custom.\")) {\n const customField = field.substring(7); // Remove \"custom.\" prefix\n const customValue = (reference.custom as Record<string, unknown>)?.[customField];\n if (typeof customValue === \"string\") {\n return customValue;\n }\n }\n\n return null;\n}\n\n/**\n * Check if URL matches in primary URL or additional_urls array\n */\nfunction matchUrl(queryValue: string, reference: CslItem): FieldMatch | null {\n // Check primary URL field\n if (reference.URL === queryValue) {\n return {\n field: \"URL\",\n strength: \"exact\",\n value: reference.URL,\n };\n }\n\n // Check additional_urls in custom field\n const additionalUrls = (reference.custom as Record<string, unknown>)?.additional_urls;\n if (Array.isArray(additionalUrls)) {\n for (const url of additionalUrls) {\n if (typeof url === \"string\" && url === queryValue) {\n return {\n field: \"custom.additional_urls\",\n strength: \"exact\",\n value: url,\n };\n }\n }\n }\n\n return null;\n}\n\n/**\n * Check if query matches any keyword in the keyword array\n * Performs partial match with normalization on each keyword element\n */\nfunction matchKeyword(queryValue: string, reference: CslItem): FieldMatch | null {\n // Check if keyword field exists and is an array\n if (!reference.keyword || !Array.isArray(reference.keyword)) {\n return null;\n }\n\n // Normalize query value (preserving case for uppercase-sensitive matching)\n const normalizedQuery = normalizePreservingCase(queryValue);\n\n // Search through each keyword element\n for (const keyword of reference.keyword) {\n if (typeof keyword === \"string\") {\n const normalizedKeyword = normalizePreservingCase(keyword);\n // Use uppercase-sensitive matching\n if (matchWithUppercaseSensitivity(normalizedQuery, normalizedKeyword)) {\n return {\n field: \"keyword\",\n strength: \"partial\",\n value: keyword,\n };\n }\n }\n }\n\n return null;\n}\n\n/**\n * Match a tag field against custom.tags array\n * Similar to matchKeyword but accesses custom.tags\n */\nfunction matchTag(queryValue: string, reference: CslItem): FieldMatch | null {\n // Check if custom.tags field exists and is an array\n if (!reference.custom?.tags || !Array.isArray(reference.custom.tags)) {\n return null;\n }\n\n // Normalize query value (preserving case for uppercase-sensitive matching)\n const normalizedQuery = normalizePreservingCase(queryValue);\n\n // Search through each tag element\n for (const tag of reference.custom.tags) {\n if (typeof tag === \"string\") {\n const normalizedTag = normalizePreservingCase(tag);\n // Use uppercase-sensitive matching\n if (matchWithUppercaseSensitivity(normalizedQuery, normalizedTag)) {\n return {\n field: \"tag\",\n strength: \"partial\",\n value: tag,\n };\n }\n }\n }\n\n return null;\n}\n\n/**\n * Map field specifier to actual CSL-JSON field name\n */\nconst FIELD_MAP: Record<string, string> = {\n author: \"author\",\n title: \"title\",\n doi: \"DOI\",\n pmid: \"PMID\",\n pmcid: \"PMCID\",\n};\n\n/**\n * Match a year field against a reference\n */\nfunction matchYearField(tokenValue: string, reference: CslItem): FieldMatch | null {\n const year = extractYear(reference);\n if (year === tokenValue) {\n return {\n field: \"year\",\n strength: \"exact\",\n value: year,\n };\n }\n return null;\n}\n\n/**\n * Match a content or ID field against a reference\n */\nfunction matchFieldValue(field: string, tokenValue: string, reference: CslItem): FieldMatch | null {\n const fieldValue = getFieldValue(reference, field);\n if (fieldValue === null) {\n return null;\n }\n\n // Check if this is an ID field (exact match, case-sensitive)\n if (ID_FIELDS.has(field)) {\n if (fieldValue === tokenValue) {\n return {\n field,\n strength: \"exact\",\n value: fieldValue,\n };\n }\n return null;\n }\n\n // Content field: use uppercase-sensitive matching\n // Normalize both values (remove diacritics, normalize whitespace) while preserving case\n const normalizedFieldValue = normalizePreservingCase(fieldValue);\n const normalizedQuery = normalizePreservingCase(tokenValue);\n\n // If query contains consecutive uppercase (e.g., \"AI\", \"RNA\"), match case-sensitively\n // Otherwise, match case-insensitively\n if (matchWithUppercaseSensitivity(normalizedQuery, normalizedFieldValue)) {\n return {\n field,\n strength: \"partial\",\n value: fieldValue,\n };\n }\n return null;\n}\n\n/**\n * Match token against a specific field\n */\nfunction matchSpecificField(token: SearchToken, reference: CslItem): FieldMatch[] {\n const matches: FieldMatch[] = [];\n const fieldToSearch = token.field as string;\n\n // Handle URL field specially (search both URL and additional_urls)\n if (fieldToSearch === \"url\") {\n const urlMatch = matchUrl(token.value, reference);\n if (urlMatch) matches.push(urlMatch);\n return matches;\n }\n\n // Handle year field\n if (fieldToSearch === \"year\") {\n const yearMatch = matchYearField(token.value, reference);\n if (yearMatch) matches.push(yearMatch);\n return matches;\n }\n\n // Handle keyword field specially (search array elements)\n if (fieldToSearch === \"keyword\") {\n const keywordMatch = matchKeyword(token.value, reference);\n if (keywordMatch) matches.push(keywordMatch);\n return matches;\n }\n\n // Handle tag field specially (search custom.tags array)\n if (fieldToSearch === \"tag\") {\n const tagMatch = matchTag(token.value, reference);\n if (tagMatch) matches.push(tagMatch);\n return matches;\n }\n\n // Standard field matching\n const actualField = FIELD_MAP[fieldToSearch] || fieldToSearch;\n const match = matchFieldValue(actualField, token.value, reference);\n if (match) matches.push(match);\n\n return matches;\n}\n\n/**\n * Standard fields to search (not special-cased)\n */\nconst STANDARD_SEARCH_FIELDS = [\n \"title\",\n \"author\",\n \"container-title\",\n \"publisher\",\n \"DOI\",\n \"PMID\",\n \"PMCID\",\n \"abstract\",\n];\n\n/**\n * Match token against a single field (used for all-fields search)\n */\nfunction matchSingleField(\n field: string,\n tokenValue: string,\n reference: CslItem\n): FieldMatch | null {\n if (field === \"year\") {\n return matchYearField(tokenValue, reference);\n }\n if (field === \"URL\") {\n return matchUrl(tokenValue, reference);\n }\n if (field === \"keyword\") {\n return matchKeyword(tokenValue, reference);\n }\n if (field === \"tag\") {\n return matchTag(tokenValue, reference);\n }\n return matchFieldValue(field, tokenValue, reference);\n}\n\n/**\n * Match token against all searchable fields\n */\nfunction matchAllFields(token: SearchToken, reference: CslItem): FieldMatch[] {\n const matches: FieldMatch[] = [];\n\n // Match special fields\n const specialFields = [\"year\", \"URL\", \"keyword\", \"tag\"];\n for (const field of specialFields) {\n const match = matchSingleField(field, token.value, reference);\n if (match) matches.push(match);\n }\n\n // Match standard fields\n for (const field of STANDARD_SEARCH_FIELDS) {\n const match = matchFieldValue(field, token.value, reference);\n if (match) matches.push(match);\n }\n\n return matches;\n}\n\n/**\n * Match a single token against a reference\n * Returns an array of field matches\n */\nexport function matchToken(token: SearchToken, reference: CslItem): FieldMatch[] {\n // If field is specified, only search that field\n if (token.field) {\n return matchSpecificField(token, reference);\n }\n\n // No field specified: search all fields\n return matchAllFields(token, reference);\n}\n\n/**\n * Match a reference against all search tokens\n * Returns a SearchResult if all tokens match (AND logic), null otherwise\n */\nexport function matchReference(reference: CslItem, tokens: SearchToken[]): SearchResult | null {\n // Empty token array means no match\n if (tokens.length === 0) {\n return null;\n }\n\n const tokenMatches: SearchResult[\"tokenMatches\"] = [];\n let overallStrength: MatchStrength = \"none\";\n\n // Check if all tokens match (AND logic)\n for (const token of tokens) {\n const matches = matchToken(token, reference);\n\n // If any token doesn't match at least one field, no match\n if (matches.length === 0) {\n return null;\n }\n\n // Determine highest match strength for this token\n const tokenStrength = matches.some((m) => m.strength === \"exact\") ? \"exact\" : \"partial\";\n\n // Update overall strength (exact > partial > none)\n if (tokenStrength === \"exact\") {\n overallStrength = \"exact\";\n } else if (tokenStrength === \"partial\" && overallStrength === \"none\") {\n overallStrength = \"partial\";\n }\n\n tokenMatches.push({\n token,\n matches,\n });\n }\n\n // Calculate score (higher is better)\n // Exact matches get higher score than partial matches\n const score = overallStrength === \"exact\" ? 100 + tokenMatches.length : 50 + tokenMatches.length;\n\n return {\n reference,\n tokenMatches,\n overallStrength,\n score,\n };\n}\n\n/**\n * Search references against search tokens\n * Returns array of SearchResult for all matching references\n */\nexport function search(references: CslItem[], tokens: SearchToken[]): SearchResult[] {\n const results: SearchResult[] = [];\n\n for (const reference of references) {\n const match = matchReference(reference, tokens);\n if (match) {\n results.push(match);\n }\n }\n\n return results;\n}\n","import type { CslItem } from \"../../core/csl-json/types.js\";\nimport type { MatchStrength, SearchResult } from \"./types.js\";\n\n/**\n * Extract year from CSL-JSON reference\n * Returns \"0000\" for missing year (sorted last)\n */\nfunction extractYear(reference: CslItem): string {\n if (reference.issued?.[\"date-parts\"]?.[0]?.[0]) {\n return String(reference.issued[\"date-parts\"][0][0]);\n }\n return \"0000\";\n}\n\n/**\n * Extract first author's family name for sorting\n * Returns empty string for missing author (sorted last)\n */\nfunction extractFirstAuthorFamily(reference: CslItem): string {\n if (!reference.author || reference.author.length === 0) {\n return \"\";\n }\n return reference.author[0]?.family || \"\";\n}\n\n/**\n * Extract title for sorting\n * Returns empty string for missing title (sorted last)\n */\nfunction extractTitle(reference: CslItem): string {\n return reference.title || \"\";\n}\n\n/**\n * Compare match strength (exact > partial > none)\n * Returns negative if a < b, positive if a > b, 0 if equal\n */\nfunction compareStrength(a: MatchStrength, b: MatchStrength): number {\n const strengthOrder = { exact: 2, partial: 1, none: 0 };\n return strengthOrder[b] - strengthOrder[a];\n}\n\n/**\n * Compare years (descending - newer first)\n * Returns negative if a < b, positive if a > b, 0 if equal\n */\nfunction compareYear(a: CslItem, b: CslItem): number {\n const yearA = extractYear(a);\n const yearB = extractYear(b);\n return Number(yearB) - Number(yearA);\n}\n\n/**\n * Compare authors alphabetically (empty comes last)\n * Returns negative if a < b, positive if a > b, 0 if equal\n */\nfunction compareAuthor(a: CslItem, b: CslItem): number {\n const authorA = extractFirstAuthorFamily(a).toLowerCase();\n const authorB = extractFirstAuthorFamily(b).toLowerCase();\n // Empty string (no author) should come after authors\n if (authorA === \"\" && authorB !== \"\") return 1;\n if (authorA !== \"\" && authorB === \"\") return -1;\n return authorA.localeCompare(authorB);\n}\n\n/**\n * Compare titles alphabetically (empty comes last)\n * Returns negative if a < b, positive if a > b, 0 if equal\n */\nfunction compareTitle(a: CslItem, b: CslItem): number {\n const titleA = extractTitle(a).toLowerCase();\n const titleB = extractTitle(b).toLowerCase();\n // Empty string (no title) should come after titles\n if (titleA === \"\" && titleB !== \"\") return 1;\n if (titleA !== \"\" && titleB === \"\") return -1;\n return titleA.localeCompare(titleB);\n}\n\n/**\n * Sort search results according to the specification:\n * 1. Match strength (exact > partial)\n * 2. Year (descending)\n * 3. Author (alphabetical)\n * 4. Title (alphabetical)\n * 5. Registration order (original array order)\n */\nexport function sortResults(results: SearchResult[]): SearchResult[] {\n // Create a copy with original indices for stable sort\n const indexed = results.map((result, index) => ({ result, index }));\n\n // Sort according to the criteria\n const sorted = indexed.sort((a, b) => {\n // 1. Match strength (exact > partial)\n const strengthDiff = compareStrength(a.result.overallStrength, b.result.overallStrength);\n if (strengthDiff !== 0) return strengthDiff;\n\n // 2. Year (descending - newer first)\n const yearDiff = compareYear(a.result.reference, b.result.reference);\n if (yearDiff !== 0) return yearDiff;\n\n // 3. Author (alphabetical)\n const authorDiff = compareAuthor(a.result.reference, b.result.reference);\n if (authorDiff !== 0) return authorDiff;\n\n // 4. Title (alphabetical, case-insensitive)\n const titleDiff = compareTitle(a.result.reference, b.result.reference);\n if (titleDiff !== 0) return titleDiff;\n\n // 5. Registration order (original array order)\n return a.index - b.index;\n });\n\n // Return only the results (without indices)\n return sorted.map((item) => item.result);\n}\n","/**\n * Duplicate detection logic\n */\n\nimport type { CslItem } from \"../../core/csl-json/types.js\";\nimport { normalize } from \"../search/normalizer.js\";\nimport type { DuplicateMatch, DuplicateResult } from \"./types.js\";\n\n/**\n * Normalize DOI by removing common URL prefixes\n * Returns the DOI in format: 10.xxxx/...\n */\nfunction normalizeDoi(doi: string): string {\n // Remove common DOI URL prefixes\n const normalized = doi\n .replace(/^https?:\\/\\/doi\\.org\\//i, \"\")\n .replace(/^https?:\\/\\/dx\\.doi\\.org\\//i, \"\")\n .replace(/^doi:/i, \"\");\n\n return normalized;\n}\n\n/**\n * Extract year from CSL-JSON issued field\n */\nfunction extractYear(item: CslItem): string | null {\n const dateParts = item.issued?.[\"date-parts\"]?.[0];\n if (!dateParts || dateParts.length === 0) {\n return null;\n }\n return String(dateParts[0]);\n}\n\n/**\n * Normalize author names to \"family given-initial\" format\n */\nfunction normalizeAuthors(item: CslItem): string | null {\n if (!item.author || item.author.length === 0) {\n return null;\n }\n\n // Combine all authors: \"family given-initial\"\n const authorStrings = item.author.map((author) => {\n const family = author.family || \"\";\n const givenInitial = author.given ? author.given.charAt(0) : \"\";\n return `${family} ${givenInitial}`.trim();\n });\n\n // Join and normalize\n return normalize(authorStrings.join(\" \"));\n}\n\n/**\n * Check if two items match by DOI\n */\nfunction checkDoiMatch(item: CslItem, existing: CslItem): DuplicateMatch | null {\n if (!item.DOI || !existing.DOI) {\n return null;\n }\n\n const normalizedItemDoi = normalizeDoi(item.DOI);\n const normalizedExistingDoi = normalizeDoi(existing.DOI);\n\n // DOI comparison is case-sensitive\n if (normalizedItemDoi === normalizedExistingDoi) {\n return {\n type: \"doi\",\n existing,\n details: {\n doi: normalizedExistingDoi,\n },\n };\n }\n\n return null;\n}\n\n/**\n * Check if two items match by PMID\n */\nfunction checkPmidMatch(item: CslItem, existing: CslItem): DuplicateMatch | null {\n if (!item.PMID || !existing.PMID) {\n return null;\n }\n\n // PMID comparison is exact string match\n if (item.PMID === existing.PMID) {\n return {\n type: \"pmid\",\n existing,\n details: {\n pmid: existing.PMID,\n },\n };\n }\n\n return null;\n}\n\n/**\n * Normalize ISBN by removing hyphens, spaces, and uppercasing X\n */\nfunction normalizeIsbn(isbn: string): string {\n return isbn.replace(/[-\\s]/g, \"\").toUpperCase();\n}\n\n/**\n * Types that should match by ISBN only (not title)\n */\nconst BOOK_TYPES = [\"book\"];\n\n/**\n * Types that should match by ISBN + title (chapters in the same book can differ)\n */\nconst BOOK_SECTION_TYPES = [\"chapter\"];\n\n/**\n * Check if two items match by ISBN\n * - For book type: ISBN only\n * - For chapter type: ISBN + title\n */\nfunction checkIsbnMatch(item: CslItem, existing: CslItem): DuplicateMatch | null {\n if (!item.ISBN || !existing.ISBN) {\n return null;\n }\n\n const normalizedItemIsbn = normalizeIsbn(item.ISBN);\n const normalizedExistingIsbn = normalizeIsbn(existing.ISBN);\n\n if (normalizedItemIsbn !== normalizedExistingIsbn) {\n return null;\n }\n\n // For book type: ISBN only is enough\n if (BOOK_TYPES.includes(item.type) || BOOK_TYPES.includes(existing.type)) {\n return {\n type: \"isbn\",\n existing,\n details: {\n isbn: normalizedExistingIsbn,\n },\n };\n }\n\n // For chapter type: ISBN + title must match\n if (BOOK_SECTION_TYPES.includes(item.type) || BOOK_SECTION_TYPES.includes(existing.type)) {\n const itemTitle = item.title ? normalize(item.title) : null;\n const existingTitle = existing.title ? normalize(existing.title) : null;\n\n if (itemTitle && existingTitle && itemTitle === existingTitle) {\n return {\n type: \"isbn\",\n existing,\n details: {\n isbn: normalizedExistingIsbn,\n normalizedTitle: existingTitle,\n },\n };\n }\n return null;\n }\n\n // For other types with ISBN: treat as book\n return {\n type: \"isbn\",\n existing,\n details: {\n isbn: normalizedExistingIsbn,\n },\n };\n}\n\n/**\n * Check if two items match by Title + Author + Year\n */\nfunction checkTitleAuthorYearMatch(item: CslItem, existing: CslItem): DuplicateMatch | null {\n const itemTitle = item.title ? normalize(item.title) : null;\n const existingTitle = existing.title ? normalize(existing.title) : null;\n const itemAuthors = normalizeAuthors(item);\n const existingAuthors = normalizeAuthors(existing);\n const itemYear = extractYear(item);\n const existingYear = extractYear(existing);\n\n // All three must be present and match\n if (\n !itemTitle ||\n !existingTitle ||\n !itemAuthors ||\n !existingAuthors ||\n !itemYear ||\n !existingYear\n ) {\n return null;\n }\n\n if (itemTitle === existingTitle && itemAuthors === existingAuthors && itemYear === existingYear) {\n return {\n type: \"title-author-year\",\n existing,\n details: {\n normalizedTitle: existingTitle,\n normalizedAuthors: existingAuthors,\n year: existingYear,\n },\n };\n }\n\n return null;\n}\n\n/**\n * Check if an item is a duplicate of an existing item\n * Returns the first match found (highest priority)\n */\nfunction checkSingleDuplicate(item: CslItem, existing: CslItem): DuplicateMatch | null {\n // Priority 1: DOI matching (highest priority)\n const doiMatch = checkDoiMatch(item, existing);\n if (doiMatch) {\n return doiMatch;\n }\n\n // Priority 2: PMID matching\n const pmidMatch = checkPmidMatch(item, existing);\n if (pmidMatch) {\n return pmidMatch;\n }\n\n // Priority 3: ISBN matching\n const isbnMatch = checkIsbnMatch(item, existing);\n if (isbnMatch) {\n return isbnMatch;\n }\n\n // Priority 4: Title + Author + Year matching (lowest priority)\n return checkTitleAuthorYearMatch(item, existing);\n}\n\n/**\n * Detects if a reference is a duplicate of any existing references\n *\n * Priority order:\n * 1. DOI (highest priority)\n * 2. PMID\n * 3. Title + Author + Year (lowest priority)\n *\n * @param item - The reference to check for duplicates\n * @param existingReferences - Array of existing references to check against\n * @returns DuplicateResult indicating if duplicate found and match details\n */\nexport function detectDuplicate(item: CslItem, existingReferences: CslItem[]): DuplicateResult {\n const matches: DuplicateMatch[] = [];\n const itemUuid = item.custom?.uuid;\n\n for (const existing of existingReferences) {\n // Skip if same UUID (same item)\n if (itemUuid && existing.custom?.uuid === itemUuid) {\n continue;\n }\n\n const match = checkSingleDuplicate(item, existing);\n if (match) {\n matches.push(match);\n }\n }\n\n return {\n isDuplicate: matches.length > 0,\n matches,\n };\n}\n","import { EventEmitter } from \"node:events\";\nimport * as fs from \"node:fs/promises\";\nimport * as path from \"node:path\";\nimport chokidar, { type FSWatcher } from \"chokidar\";\n\n/**\n * Options for FileWatcher\n */\nexport interface FileWatcherOptions {\n /** Debounce time in milliseconds (default: 500) */\n debounceMs?: number;\n /** Poll interval in milliseconds for polling mode (default: 5000) */\n pollIntervalMs?: number;\n /** Use polling instead of native file system events */\n usePolling?: boolean;\n /** Retry delay in milliseconds for JSON parse (default: 200) */\n retryDelayMs?: number;\n /** Maximum number of retries for JSON parse (default: 10) */\n maxRetries?: number;\n}\n\n// Default values from spec\nconst DEFAULT_DEBOUNCE_MS = 500;\nconst DEFAULT_POLL_INTERVAL_MS = 5000;\nconst DEFAULT_RETRY_DELAY_MS = 200;\nconst DEFAULT_MAX_RETRIES = 10;\n\n/**\n * Check if a file should be ignored based on spec patterns\n * Ignored patterns:\n * - *.tmp\n * - *.bak\n * - *.conflict.*\n * - *.lock\n * - editor swap files (.swp, ~)\n */\nfunction shouldIgnore(filePath: string): boolean {\n const basename = path.basename(filePath);\n\n // *.tmp files\n if (basename.endsWith(\".tmp\")) return true;\n\n // *.bak files\n if (basename.endsWith(\".bak\")) return true;\n\n // *.conflict.* files (contains .conflict. in name)\n if (basename.includes(\".conflict.\")) return true;\n\n // *.lock files\n if (basename.endsWith(\".lock\")) return true;\n\n // Vim swap files (.*.swp)\n if (basename.startsWith(\".\") && basename.endsWith(\".swp\")) return true;\n\n // Editor backup files (*~)\n if (basename.endsWith(\"~\")) return true;\n\n return false;\n}\n\n/**\n * FileWatcher watches a file or directory for changes and emits events.\n *\n * Events:\n * - 'change': Emitted when a watched file changes (after debounce)\n * - 'error': Emitted when a watch error occurs\n * - 'ready': Emitted when watching has started\n * - 'parsed': Emitted when JSON file is successfully parsed\n * - 'parseError': Emitted when JSON parse fails after all retries\n */\nexport class FileWatcher extends EventEmitter {\n private readonly watchPath: string;\n private readonly debounceMs: number;\n private readonly pollIntervalMs: number;\n private readonly usePolling: boolean;\n private readonly retryDelayMs: number;\n private readonly maxRetries: number;\n\n private watcher: FSWatcher | null = null;\n private watching = false;\n private debounceTimers: Map<string, NodeJS.Timeout> = new Map();\n\n constructor(watchPath: string, options?: FileWatcherOptions) {\n super();\n this.watchPath = watchPath;\n this.debounceMs = options?.debounceMs ?? DEFAULT_DEBOUNCE_MS;\n this.pollIntervalMs = options?.pollIntervalMs ?? DEFAULT_POLL_INTERVAL_MS;\n this.usePolling = options?.usePolling ?? false;\n this.retryDelayMs = options?.retryDelayMs ?? DEFAULT_RETRY_DELAY_MS;\n this.maxRetries = options?.maxRetries ?? DEFAULT_MAX_RETRIES;\n }\n\n /**\n * Start watching for file changes\n */\n async start(): Promise<void> {\n if (this.watching) {\n return;\n }\n\n return new Promise((resolve, reject) => {\n this.watcher = chokidar.watch(this.watchPath, {\n ignored: shouldIgnore,\n persistent: true,\n usePolling: this.usePolling,\n interval: this.pollIntervalMs,\n ignoreInitial: true,\n awaitWriteFinish: false,\n });\n\n this.watcher.on(\"ready\", () => {\n this.watching = true;\n this.emit(\"ready\");\n resolve();\n });\n\n this.watcher.on(\"error\", (error: unknown) => {\n this.emit(\"error\", error);\n if (!this.watching) {\n reject(error);\n }\n });\n\n this.watcher.on(\"change\", (filePath: string) => {\n this.handleFileChange(filePath);\n });\n\n this.watcher.on(\"add\", (filePath: string) => {\n this.handleFileChange(filePath);\n });\n });\n }\n\n /**\n * Handle file change with debouncing\n */\n private handleFileChange(filePath: string): void {\n // Clear existing timer for this file\n const existingTimer = this.debounceTimers.get(filePath);\n if (existingTimer) {\n clearTimeout(existingTimer);\n }\n\n // Set new debounced timer\n const timer = setTimeout(() => {\n this.debounceTimers.delete(filePath);\n this.emit(\"change\", filePath);\n this.tryParseJsonFile(filePath);\n }, this.debounceMs);\n\n this.debounceTimers.set(filePath, timer);\n }\n\n /**\n * Try to parse JSON file with retries\n */\n private async tryParseJsonFile(filePath: string): Promise<void> {\n // Only parse .json files\n if (path.extname(filePath).toLowerCase() !== \".json\") {\n return;\n }\n\n let lastError: Error | null = null;\n\n for (let attempt = 0; attempt <= this.maxRetries; attempt++) {\n try {\n const content = await fs.readFile(filePath, \"utf-8\");\n const parsed = JSON.parse(content);\n this.emit(\"parsed\", filePath, parsed);\n return;\n } catch (error) {\n lastError = error as Error;\n if (attempt < this.maxRetries) {\n await this.delay(this.retryDelayMs);\n }\n }\n }\n\n this.emit(\"parseError\", filePath, lastError);\n }\n\n /**\n * Delay helper\n */\n private delay(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n }\n\n /**\n * Stop watching for file changes\n */\n close(): void {\n if (this.watcher) {\n this.watcher.close();\n this.watcher = null;\n }\n\n // Clear all debounce timers\n for (const timer of this.debounceTimers.values()) {\n clearTimeout(timer);\n }\n this.debounceTimers.clear();\n\n this.watching = false;\n }\n\n /**\n * Get the watched path\n */\n getPath(): string {\n return this.watchPath;\n }\n\n /**\n * Check if the watcher is currently active\n */\n isWatching(): boolean {\n return this.watching;\n }\n\n /**\n * Get the debounce time in milliseconds\n */\n getDebounceMs(): number {\n return this.debounceMs;\n }\n\n /**\n * Get the poll interval in milliseconds\n */\n getPollIntervalMs(): number {\n return this.pollIntervalMs;\n }\n\n /**\n * Get the retry delay in milliseconds\n */\n getRetryDelayMs(): number {\n return this.retryDelayMs;\n }\n\n /**\n * Get the maximum number of retries\n */\n getMaxRetries(): number {\n return this.maxRetries;\n }\n}\n"],"names":["extractYear","ref"],"mappings":";;;;;;;;;AAMO,SAAS,cAAc,MAAsB;AAClD,SAAO,KACJ,cACA,QAAQ,QAAQ,GAAG,EACnB,QAAQ,eAAe,EAAE,EACzB,QAAQ,OAAO,GAAG,EAClB,QAAQ,UAAU,EAAE;AACzB;AAOO,SAAS,oBAAoB,MAAsB;AACxD,QAAM,aAAa,cAAc,IAAI;AACrC,SAAO,WAAW,MAAM,GAAG,EAAE;AAC/B;AAOO,SAAS,mBAAmB,OAAuB;AACxD,QAAM,aAAa,cAAc,KAAK;AACtC,SAAO,WAAW,MAAM,GAAG,EAAE;AAC/B;AC1BA,SAAS,kBAAkB,MAAuB;AAChD,MAAI,CAAC,KAAK,UAAU,KAAK,OAAO,WAAW,GAAG;AAC5C,WAAO;AAAA,EACT;AAEA,QAAM,cAAc,KAAK,OAAO,CAAC;AACjC,MAAI,CAAC,aAAa;AAChB,WAAO;AAAA,EACT;AAGA,MAAI,YAAY,QAAQ;AACtB,WAAO,oBAAoB,YAAY,MAAM;AAAA,EAC/C;AAGA,MAAI,YAAY,SAAS;AACvB,WAAO,oBAAoB,YAAY,OAAO;AAAA,EAChD;AAEA,SAAO;AACT;AAMA,SAASA,cAAY,MAAuB;AAC1C,MAAI,CAAC,KAAK,UAAU,CAAC,KAAK,OAAO,YAAY,KAAK,KAAK,OAAO,YAAY,EAAE,WAAW,GAAG;AACxF,WAAO;AAAA,EACT;AAEA,QAAM,YAAY,KAAK,OAAO,YAAY,EAAE,CAAC;AAC7C,MAAI,CAAC,aAAa,UAAU,WAAW,GAAG;AACxC,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,UAAU,CAAC;AACxB,SAAO,OAAO,KAAK,SAAA,IAAa;AAClC;AASA,SAAS,mBAAmB,WAAoB,SAAkB,OAAuB;AAEvF,MAAI,aAAa,SAAS;AACxB,WAAO;AAAA,EACT;AAGA,MAAI,OAAO;AACT,WAAO,IAAI,KAAK;AAAA,EAClB;AAGA,MAAI,CAAC,aAAa,CAAC,SAAS;AAC1B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAQO,SAAS,WAAW,MAAuB;AAChD,QAAM,SAAS,kBAAkB,IAAI;AACrC,QAAM,OAAOA,cAAY,IAAI;AAC7B,QAAM,QAAQ,KAAK,QAAQ,mBAAmB,KAAK,KAAK,IAAI;AAG5D,QAAM,aAAa,UAAU;AAC7B,QAAM,WAAW,QAAQ;AACzB,QAAM,YAAY,mBAAmB,QAAQ,MAAM,GAAG,QAAQ,IAAI,GAAG,KAAK;AAE1E,SAAO,GAAG,UAAU,IAAI,QAAQ,GAAG,SAAS;AAC9C;AAQA,SAAS,eAAe,OAAuB;AAC7C,MAAI,UAAU,GAAG;AACf,WAAO;AAAA,EACT;AAEA,MAAI,SAAS;AACb,MAAI,MAAM;AAEV,SAAO,MAAM,GAAG;AACd;AACA,aAAS,OAAO,aAAa,KAAM,MAAM,EAAG,IAAI;AAChD,UAAM,KAAK,MAAM,MAAM,EAAE;AAAA,EAC3B;AAEA,SAAO;AACT;AASO,SAAS,6BAA6B,MAAe,aAA+B;AACzF,QAAM,SAAS,WAAW,IAAI;AAG9B,QAAM,wBAAwB,YAAY,IAAI,CAAC,OAAO,GAAG,aAAa;AAGtE,MAAI,YAAY;AAChB,MAAI,cAAc;AAElB,SAAO,sBAAsB,SAAS,UAAU,YAAA,CAAa,GAAG;AAC9D;AACA,UAAM,SAAS,eAAe,WAAW;AACzC,gBAAY,GAAG,MAAM,GAAG,MAAM;AAAA,EAChC;AAEA,SAAO;AACT;ACzIA,MAAM,aAAa;AAKZ,SAAS,YAAY,MAAuB;AACjD,SAAO,WAAW,KAAK,IAAI;AAC7B;AAKO,SAAS,eAAuB;AACrC,SAAO,WAAA;AACT;AAKO,SAAS,oBAA4B;AAC1C,UAAO,oBAAI,KAAA,GAAO,YAAA;AACpB;AAKO,SAAS,sBAAsB,QAA8C;AAClF,MAAI,CAAC,UAAU,CAAC,OAAO,MAAM;AAC3B,WAAO;AAAA,EACT;AAEA,SAAO,YAAY,OAAO,IAAI,IAAI,OAAO,OAAO;AAClD;AAMA,SAAS,WAAW,QAAsE;AACxF,QAAM,eAAe,sBAAsB,MAAM;AAEjD,MAAI,gBAAgB,QAAQ;AAC1B,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,aAAA;AAChB,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM;AAAA,EAAA;AAEV;AAOA,SAAS,gBACP,QAC2D;AAE3D,MAAI,OAAO,YAAY;AACrB,WAAO;AAAA,EACT;AAGA,MAAI,OAAO,WAAW;AACpB,WAAO;AAAA,MACL,GAAG;AAAA,MACH,YAAY,OAAO;AAAA,IAAA;AAAA,EAEvB;AAGA,QAAM,eAAe,kBAAA;AACrB,SAAO;AAAA,IACL,GAAG;AAAA,IACH,YAAY;AAAA,EAAA;AAEhB;AAMA,SAAS,gBACP,QACW;AAEX,MAAI,OAAO,WAAW;AACpB,WAAO;AAAA,EACT;AAGA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,WAAW,OAAO;AAAA,EAAA;AAEtB;AAOO,SAAS,qBAAqB,QAA0C;AAC7E,QAAM,WAAW,WAAW,MAAM;AAClC,QAAM,gBAAgB,gBAAgB,QAAQ;AAC9C,QAAM,gBAAgB,gBAAgB,aAAa;AACnD,SAAO;AACT;AClGO,MAAM,UAAU;AAAA,EACb;AAAA,EACA;AAAA,EAER,YAAY,MAAe;AAEzB,UAAM,iBAAiB,qBAAqB,KAAK,MAAM;AACvD,SAAK,OAAO,EAAE,GAAG,MAAM,QAAQ,eAAA;AAG/B,UAAM,gBAAgB,sBAAsB,cAAc;AAC1D,QAAI,CAAC,eAAe;AAClB,YAAM,IAAI,MAAM,mDAAmD;AAAA,IACrE;AACA,SAAK,OAAO;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAO,MAAe,SAA6C;AACxE,UAAM,cAAc,SAAS,eAAe,oBAAI,IAAA;AAGhD,QAAI,cAAc;AAClB,QAAI,CAAC,KAAK,MAAM,KAAK,GAAG,KAAA,MAAW,IAAI;AACrC,YAAM,cAAc,6BAA6B,MAAM,MAAM,KAAK,WAAW,CAAC;AAC9E,oBAAc,EAAE,GAAG,MAAM,IAAI,YAAA;AAAA,IAC/B;AAEA,WAAO,IAAI,UAAU,WAAW;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,UAAmB;AACjB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,UAAkB;AAChB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,QAAgB;AACd,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,WAA+B;AAC7B,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,aAAgC;AAC9B,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,UAA8B;AAC5B,UAAM,SAAS,KAAK,KAAK;AACzB,QAAI,CAAC,UAAU,CAAC,OAAO,YAAY,KAAK,OAAO,YAAY,EAAE,WAAW,GAAG;AACzE,aAAO;AAAA,IACT;AACA,UAAM,YAAY,OAAO,YAAY,EAAE,CAAC;AACxC,WAAO,aAAa,UAAU,SAAS,IAAI,UAAU,CAAC,IAAI;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKA,SAA6B;AAC3B,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,UAA8B;AAC5B,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA,EAEA,UAA8B;AAC5B,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,WAA+B;AAC7B,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,SAA6B;AAC3B,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,aAAmC;AACjC,WAAO,KAAK,KAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,oBAA0C;AACxC,WAAO,KAAK,KAAK,QAAQ;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,eAAuB;AACrB,QAAI,CAAC,KAAK,KAAK,QAAQ,YAAY;AACjC,YAAM,IAAI,MAAM,4CAA4C;AAAA,IAC9D;AACA,WAAO,KAAK,KAAK,OAAO;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,eAAuB;AACrB,QAAI,CAAC,KAAK,KAAK,QAAQ,WAAW;AAChC,YAAM,IAAI,MAAM,2CAA2C;AAAA,IAC7D;AACA,WAAO,KAAK,KAAK,OAAO;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,QAAc;AACZ,QAAI,CAAC,KAAK,KAAK,QAAQ;AACrB,YAAM,IAAI,MAAM,4BAA4B;AAAA,IAC9C;AACA,SAAK,KAAK,OAAO,aAAY,oBAAI,KAAA,GAAO,YAAA;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,UAAkB;AAChB,WAAO,KAAK,KAAK;AAAA,EACnB;AACF;AC5KO,SAAS,YAAY,OAAuB;AACjD,SAAO,WAAW,QAAQ,EAAE,OAAO,OAAO,OAAO,EAAE,OAAO,KAAK;AACjE;AAKA,eAAsB,gBAAgB,UAAmC;AACvE,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,UAAM,OAAO,WAAW,QAAQ;AAChC,UAAM,SAAS,iBAAiB,QAAQ;AAExC,WAAO,GAAG,QAAQ,CAAC,UAAU,KAAK,OAAO,KAAK,CAAC;AAC/C,WAAO,GAAG,OAAO,MAAM,QAAQ,KAAK,OAAO,KAAK,CAAC,CAAC;AAClD,WAAO,GAAG,SAAS,CAAC,UAAU,OAAO,KAAK,CAAC;AAAA,EAC7C,CAAC;AACH;ACnBA,MAAM,gBAAgB,EAAE,OAAO;AAAA,EAC7B,QAAQ,EAAE,OAAA,EAAS,SAAA;AAAA,EACnB,OAAO,EAAE,OAAA,EAAS,SAAA;AAAA,EAClB,SAAS,EAAE,OAAA,EAAS,SAAA;AAAA,EACpB,qBAAqB,EAAE,OAAA,EAAS,SAAA;AAAA,EAChC,yBAAyB,EAAE,OAAA,EAAS,SAAA;AAAA,EACpC,QAAQ,EAAE,OAAA,EAAS,SAAA;AACrB,CAAC;AAGD,MAAM,gBAAgB,EAAE,OAAO;AAAA,EAC7B,cAAc,EAAE,MAAM,EAAE,MAAM,EAAE,OAAA,CAAQ,CAAC,EAAE,SAAA;AAAA,EAC3C,KAAK,EAAE,OAAA,EAAS,SAAA;AAAA,EAChB,QAAQ,EAAE,OAAA,EAAS,SAAA;AAAA,EACnB,OAAO,EAAE,QAAA,EAAU,SAAA;AAAA,EACnB,SAAS,EAAE,OAAA,EAAS,SAAA;AACtB,CAAC;AAGD,MAAM,oBAAoB,EAAE,OAAO;AAAA,EACjC,KAAK,EAAE,OAAA,EAAS,SAAA;AAAA,EAChB,UAAU,EAAE,OAAA,EAAS,SAAA;AACvB,CAAC;AAGD,MAAM,kBAAkB,EACrB,OAAO;AAAA,EACN,MAAM,EAAE,OAAA;AAAA,EACR,YAAY,EAAE,OAAA;AAAA,EACd,WAAW,EAAE,OAAA;AAAA,EACb,iBAAiB,EAAE,MAAM,EAAE,OAAA,CAAQ,EAAE,SAAA;AAAA,EACrC,UAAU,kBAAkB,SAAA;AAAA,EAC5B,MAAM,EAAE,MAAM,EAAE,OAAA,CAAQ,EAAE,SAAA;AAC5B,CAAC,EACA,YAAA;AAGI,MAAM,gBAAgB,EAC1B,OAAO;AAAA,EACN,IAAI,EAAE,OAAA;AAAA,EACN,MAAM,EAAE,OAAA;AAAA,EACR,OAAO,EAAE,OAAA,EAAS,SAAA;AAAA,EAClB,QAAQ,EAAE,MAAM,aAAa,EAAE,SAAA;AAAA,EAC/B,QAAQ,EAAE,MAAM,aAAa,EAAE,SAAA;AAAA,EAC/B,QAAQ,cAAc,SAAA;AAAA,EACtB,UAAU,cAAc,SAAA;AAAA,EACxB,mBAAmB,EAAE,OAAA,EAAS,SAAA;AAAA,EAC9B,QAAQ,EAAE,OAAA,EAAS,SAAA;AAAA,EACnB,OAAO,EAAE,OAAA,EAAS,SAAA;AAAA,EAClB,MAAM,EAAE,OAAA,EAAS,SAAA;AAAA,EACjB,KAAK,EAAE,OAAA,EAAS,SAAA;AAAA,EAChB,MAAM,EAAE,OAAA,EAAS,SAAA;AAAA,EACjB,OAAO,EAAE,OAAA,EAAS,SAAA;AAAA,EAClB,MAAM,EAAE,OAAA,EAAS,SAAA;AAAA,EACjB,MAAM,EAAE,OAAA,EAAS,SAAA;AAAA,EACjB,KAAK,EAAE,OAAA,EAAS,SAAA;AAAA,EAChB,UAAU,EAAE,OAAA,EAAS,SAAA;AAAA,EACrB,WAAW,EAAE,OAAA,EAAS,SAAA;AAAA,EACtB,mBAAmB,EAAE,OAAA,EAAS,SAAA;AAAA,EAC9B,MAAM,EAAE,OAAA,EAAS,SAAA;AAAA,EACjB,SAAS,EAAE,MAAM,EAAE,OAAA,CAAQ,EAAE,SAAA;AAAA,EAC7B,QAAQ,gBAAgB,SAAA;AAAA;AAE1B,CAAC,EACA,YAAA;AAGI,MAAM,mBAAmB,EAAE,MAAM,aAAa;AC7DrD,SAAS,aAAa,SAAwC;AAC5D,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,QAAQ,KAAA,MAAW,IAAI;AACzB,WAAO;AAAA,EACT;AAEA,QAAM,WAAW,QACd,MAAM,GAAG,EACT,IAAI,CAAC,MAAM,EAAE,KAAA,CAAM,EACnB,OAAO,CAAC,MAAM,MAAM,EAAE;AAEzB,SAAO,SAAS,SAAS,IAAI,WAAW;AAC1C;AAQA,eAAsB,aAAa,UAAuC;AAExE,QAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAGhD,MAAI;AACJ,MAAI;AACF,cAAU,KAAK,MAAM,OAAO;AAAA,EAC9B,SAAS,OAAO;AACd,UAAM,IAAI;AAAA,MACR,yBAAyB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,IAAA;AAAA,EAEnF;AAGA,MAAI,MAAM,QAAQ,OAAO,GAAG;AAC1B,cAAU,QAAQ,IAAI,CAAC,SAAkB;AACvC,UAAI,QAAQ,OAAO,SAAS,YAAY,aAAa,MAAM;AACzD,cAAM,kBAAkB;AACxB,eAAO;AAAA,UACL,GAAG;AAAA,UACH,SAAS,aAAa,gBAAgB,OAAO;AAAA,QAAA;AAAA,MAEjD;AACA,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAGA,QAAM,cAAc,iBAAiB,UAAU,OAAO;AAEtD,MAAI,CAAC,YAAY,SAAS;AACxB,UAAM,IAAI,MAAM,+BAA+B,YAAY,MAAM,OAAO,EAAE;AAAA,EAC5E;AAEA,QAAM,UAAU,YAAY;AAG5B,QAAM,mBAA+B,QAAQ,IAAI,CAAC,SAAS;AACzD,UAAM,gBAAgB,qBAAqB,KAAK,MAAM;AAEtD,WAAO;AAAA,MACL,GAAG;AAAA,MACH,QAAQ;AAAA,IAAA;AAAA,EAEZ,CAAC;AAED,SAAO;AACT;ACvEA,SAAS,iBAAiB,UAAoD;AAC5E,MAAI,CAAC,YAAY,SAAS,WAAW,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,SAAO,SAAS,KAAK,IAAI;AAC3B;AAOO,SAAS,iBAAiB,SAA6B;AAE5D,QAAM,iBAAiB,QAAQ,IAAI,CAAC,SAAS;AAC3C,UAAM,EAAE,SAAS,GAAG,KAAA,IAAS;AAC7B,UAAM,oBAAoB,iBAAiB,OAAO;AAElD,QAAI,sBAAsB,QAAW;AACnC,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL,GAAG;AAAA,MACH,SAAS;AAAA,IAAA;AAAA,EAEb,CAAC;AAED,SAAO,KAAK,UAAU,gBAAgB,MAAM,CAAC;AAC/C;AAQA,eAAsB,aAAa,UAAkB,SAAoC;AAEvF,QAAM,MAAM,QAAQ,QAAQ;AAC5B,QAAM,MAAM,KAAK,EAAE,WAAW,MAAM;AAGpC,QAAM,UAAU,iBAAiB,OAAO;AACxC,QAAM,UAAU,UAAU,SAAS,OAAO;AAC5C;ACzBO,MAAM,QAA4B;AAAA,EAC/B;AAAA,EACA,aAA0B,CAAA;AAAA,EAC1B,cAA6B;AAAA;AAAA,EAG7B,gCAAwC,IAAA;AAAA,EACxC,8BAAsC,IAAA;AAAA,EACtC,+BAAuC,IAAA;AAAA,EACvC,gCAAwC,IAAA;AAAA,EACxC,gCAAwC,IAAA;AAAA,EAExC,YAAY,UAAkB,OAAkB;AACtD,SAAK,WAAW;AAGhB,eAAW,QAAQ,OAAO;AACxB,YAAM,MAAM,IAAI,UAAU,IAAI;AAC9B,WAAK,WAAW,KAAK,GAAG;AACxB,WAAK,aAAa,GAAG;AAAA,IACvB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,aAAa,KAAK,UAAoC;AAEpD,QAAI,CAAC,WAAW,QAAQ,GAAG;AAEzB,YAAM,MAAM,QAAQ,QAAQ;AAC5B,YAAM,MAAM,KAAK,EAAE,WAAW,MAAM;AAEpC,YAAM,aAAa,UAAU,EAAE;AAAA,IACjC;AAEA,UAAM,QAAQ,MAAM,aAAa,QAAQ;AACzC,UAAM,UAAU,IAAI,QAAQ,UAAU,KAAK;AAE3C,YAAQ,cAAc,MAAM,gBAAgB,QAAQ;AACpD,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAsB;AAC1B,UAAM,QAAQ,KAAK,WAAW,IAAI,CAAC,QAAQ,IAAI,SAAS;AACxD,UAAM,aAAa,KAAK,UAAU,KAAK;AAEvC,SAAK,cAAc,MAAM,gBAAgB,KAAK,QAAQ;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,SAA2B;AAC/B,UAAM,UAAU,MAAM,gBAAgB,KAAK,QAAQ;AAEnD,QAAI,YAAY,KAAK,aAAa;AAEhC,aAAO;AAAA,IACT;AAGA,UAAM,QAAQ,MAAM,aAAa,KAAK,QAAQ;AAG9C,SAAK,aAAa,CAAA;AAClB,SAAK,UAAU,MAAA;AACf,SAAK,QAAQ,MAAA;AACb,SAAK,SAAS,MAAA;AACd,SAAK,UAAU,MAAA;AAEf,eAAW,QAAQ,OAAO;AACxB,YAAM,MAAM,IAAI,UAAU,IAAI;AAC9B,WAAK,WAAW,KAAK,GAAG;AACxB,WAAK,aAAa,GAAG;AAAA,IACvB;AAGA,SAAK,cAAc;AAEnB,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,IAAI,MAAiC;AAEzC,UAAM,cAAc,IAAI,IAAI,KAAK,WAAW,IAAI,CAACC,SAAQA,KAAI,MAAA,CAAO,CAAC;AAGrE,UAAM,MAAM,UAAU,OAAO,MAAM,EAAE,aAAa;AAGlD,SAAK,WAAW,KAAK,GAAG;AACxB,SAAK,aAAa,GAAG;AAGrB,WAAO,IAAI,QAAA;AAAA,EACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,OAAO,YAAoB,UAAyB,IAA2B;AACnF,UAAM,EAAE,SAAS,KAAA,IAAS;AAC1B,QAAI;AACJ,YAAQ,QAAA;AAAA,MACN,KAAK;AACH,cAAM,KAAK,UAAU,IAAI,UAAU;AACnC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,SAAS,IAAI,UAAU;AAClC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,UAAU,IAAI,UAAU;AACnC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,UAAU,IAAI,UAAU;AACnC;AAAA,MACF;AACE,cAAM,KAAK,QAAQ,IAAI,UAAU;AACjC;AAAA,IAAA;AAEJ,QAAI,CAAC,KAAK;AACR,aAAO,EAAE,SAAS,MAAA;AAAA,IACpB;AACA,UAAM,cAAc,IAAI,QAAA;AACxB,UAAM,UAAU,KAAK,gBAAgB,GAAG;AACxC,WAAO,EAAE,SAAS,YAAA;AAAA,EACpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,OACJ,YACA,SACA,UAAyB,CAAA,GACF;AACvB,UAAM,EAAE,SAAS,MAAM,GAAG,kBAAkB;AAC5C,QAAI;AACJ,YAAQ,QAAA;AAAA,MACN,KAAK;AACH,cAAM,KAAK,UAAU,IAAI,UAAU;AACnC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,SAAS,IAAI,UAAU;AAClC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,UAAU,IAAI,UAAU;AACnC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,UAAU,IAAI,UAAU;AACnC;AAAA,MACF;AACE,cAAM,KAAK,QAAQ,IAAI,UAAU;AACjC;AAAA,IAAA;AAGJ,QAAI,CAAC,KAAK;AACR,aAAO,EAAE,SAAS,MAAA;AAAA,IACpB;AAEA,WAAO,KAAK,gBAAgB,KAAK,SAAS,aAAa;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,KAAK,YAAoB,UAAuB,IAAkC;AACtF,UAAM,EAAE,SAAS,KAAA,IAAS;AAE1B,QAAI;AACJ,YAAQ,QAAA;AAAA,MACN,KAAK;AACH,cAAM,KAAK,UAAU,IAAI,UAAU;AACnC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,SAAS,IAAI,UAAU;AAClC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,UAAU,IAAI,UAAU;AACnC;AAAA,MACF,KAAK;AACH,cAAM,KAAK,UAAU,IAAI,UAAU;AACnC;AAAA,MACF;AACE,cAAM,KAAK,QAAQ,IAAI,UAAU;AACjC;AAAA,IAAA;AAGJ,WAAO,KAAK,QAAA;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,SAA6B;AACjC,WAAO,KAAK,WAAW,IAAI,CAAC,QAAQ,IAAI,SAAS;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA,EAKA,cAAsB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,iBAAgC;AAC9B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAa,KAAsB;AAEzC,SAAK,UAAU,IAAI,IAAI,QAAA,GAAW,GAAG;AAGrC,SAAK,QAAQ,IAAI,IAAI,MAAA,GAAS,GAAG;AAGjC,UAAM,MAAM,IAAI,OAAA;AAChB,QAAI,KAAK;AACP,WAAK,SAAS,IAAI,KAAK,GAAG;AAAA,IAC5B;AAGA,UAAM,OAAO,IAAI,QAAA;AACjB,QAAI,MAAM;AACR,WAAK,UAAU,IAAI,MAAM,GAAG;AAAA,IAC9B;AAGA,UAAM,OAAO,IAAI,QAAA;AACjB,QAAI,MAAM;AACR,WAAK,UAAU,IAAI,MAAM,GAAG;AAAA,IAC9B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,KAAyB;AAC/C,UAAM,QAAQ,KAAK,WAAW,QAAQ,GAAG;AACzC,QAAI,UAAU,IAAI;AAChB,aAAO;AAAA,IACT;AACA,SAAK,WAAW,OAAO,OAAO,CAAC;AAC/B,SAAK,kBAAkB,GAAG;AAC1B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,gBACN,KACA,SACA,UAAyB,CAAA,GACX;AACd,UAAM,QAAQ,KAAK,WAAW,QAAQ,GAAG;AACzC,QAAI,UAAU,IAAI;AAChB,aAAO,EAAE,SAAS,MAAA;AAAA,IACpB;AAEA,UAAM,eAAe,IAAI,QAAA;AACzB,UAAM,YAAY,IAAI,MAAA;AACtB,UAAM,EAAE,OAAO,WAAW,UAAA,IAAc,KAAK;AAAA,MAC3C,QAAQ,MAAM,aAAa;AAAA,MAC3B;AAAA,MACA;AAAA,IAAA;AAGF,QAAI,WAAW;AACb,aAAO,EAAE,SAAS,OAAO,aAAa,KAAA;AAAA,IACxC;AAEA,UAAM,cAAc,KAAK,iBAAiB,cAAc,SAAS,KAAK;AAGtE,SAAK,kBAAkB,GAAG;AAG1B,UAAM,SAAS,IAAI,UAAU,WAAW;AACxC,SAAK,WAAW,KAAK,IAAI;AACzB,SAAK,aAAa,MAAM;AAExB,UAAM,SAAuB,EAAE,SAAS,MAAM,MAAM,OAAO,UAAQ;AACnE,QAAI,WAAW;AACb,aAAO,YAAY;AACnB,aAAO,QAAQ;AAAA,IACjB;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aACN,aACA,WACA,SAC2D;AAC3D,QAAI,gBAAgB,WAAW;AAC7B,aAAO,EAAE,OAAO,aAAa,WAAW,OAAO,WAAW,MAAA;AAAA,IAC5D;AAEA,UAAM,iBAAiB,KAAK,QAAQ,IAAI,WAAW;AACnD,QAAI,CAAC,gBAAgB;AACnB,aAAO,EAAE,OAAO,aAAa,WAAW,OAAO,WAAW,MAAA;AAAA,IAC5D;AAEA,UAAM,gBAAgB,QAAQ,iBAAiB;AAC/C,QAAI,kBAAkB,QAAQ;AAC5B,aAAO,EAAE,OAAO,aAAa,WAAW,OAAO,WAAW,KAAA;AAAA,IAC5D;AAGA,UAAM,cAAc,IAAI,IAAI,KAAK,WAAW,IAAI,CAAC,MAAM,EAAE,MAAA,CAAO,CAAC;AACjE,gBAAY,OAAO,SAAS;AAC5B,UAAM,aAAa,KAAK,mBAAmB,aAAa,WAAW;AACnE,WAAO,EAAE,OAAO,YAAY,WAAW,MAAM,WAAW,MAAA;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA,EAKQ,iBACN,cACA,SACA,OACS;AACT,WAAO;AAAA,MACL,GAAG;AAAA,MACH,GAAG;AAAA,MACH,IAAI;AAAA,MACJ,MAAM,QAAQ,QAAQ,aAAa;AAAA,MACnC,QAAQ;AAAA,QACN,GAAI,aAAa,UAAU,CAAA;AAAA,QAC3B,GAAI,QAAQ,UAAU,CAAA;AAAA,QACtB,MAAM,aAAa,QAAQ,QAAQ;AAAA,QACnC,YAAY,aAAa,QAAQ,eAAc,oBAAI,KAAA,GAAO,YAAA;AAAA,QAC1D,YAAW,oBAAI,KAAA,GAAO,YAAA;AAAA,MAAY;AAAA,IACpC;AAAA,EAEJ;AAAA;AAAA;AAAA;AAAA,EAKQ,kBAAkB,KAAsB;AAC9C,SAAK,UAAU,OAAO,IAAI,QAAA,CAAS;AACnC,SAAK,QAAQ,OAAO,IAAI,MAAA,CAAO;AAE/B,UAAM,MAAM,IAAI,OAAA;AAChB,QAAI,KAAK;AACP,WAAK,SAAS,OAAO,GAAG;AAAA,IAC1B;AAEA,UAAM,OAAO,IAAI,QAAA;AACjB,QAAI,MAAM;AACR,WAAK,UAAU,OAAO,IAAI;AAAA,IAC5B;AAEA,UAAM,OAAO,IAAI,QAAA;AACjB,QAAI,MAAM;AACR,WAAK,UAAU,OAAO,IAAI;AAAA,IAC5B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,eAAe,OAAuB;AAC5C,UAAM,WAAW;AACjB,QAAI,SAAS;AACb,QAAI,IAAI;AAER,OAAG;AACD,eAAS,SAAS,IAAI,EAAE,IAAI;AAC5B,UAAI,KAAK,MAAM,IAAI,EAAE,IAAI;AAAA,IAC3B,SAAS,KAAK;AAEd,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,mBAAmB,QAAgB,aAAkC;AAC3E,QAAI,CAAC,YAAY,IAAI,MAAM,GAAG;AAC5B,aAAO;AAAA,IACT;AAEA,QAAI,QAAQ;AACZ,QAAI;AAEJ,OAAG;AACD,YAAM,SAAS,KAAK,eAAe,KAAK;AACxC,cAAQ,GAAG,MAAM,GAAG,MAAM;AAC1B;AAAA,IACF,SAAS,YAAY,IAAI,KAAK;AAE9B,WAAO;AAAA,EACT;AACF;AC5cA,MAAM,mCAAwC,IAAI;AAAA,EAChD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAKD,SAAS,aAAa,OAAe,OAAwB;AAC3D,SAAO,KAAK,KAAK,MAAM,OAAO,KAAK,CAAC;AACtC;AAKA,SAAS,QAAQ,OAAe,OAAwB;AACtD,SAAO,MAAM,OAAO,KAAK,MAAM;AACjC;AAKO,SAAS,SAAS,OAA4B;AACnD,QAAM,SAAwB,CAAA;AAC9B,MAAI,IAAI;AAER,SAAO,IAAI,MAAM,QAAQ;AAEvB,QAAI,aAAa,OAAO,CAAC,GAAG;AAC1B;AACA;AAAA,IACF;AAGA,UAAM,SAAS,eAAe,OAAO,CAAC;AACtC,QAAI,OAAO,OAAO;AAChB,aAAO,KAAK,OAAO,KAAK;AAAA,IAC1B;AACA,QAAI,OAAO;AAAA,EACb;AAEA,SAAO;AAAA,IACL,UAAU;AAAA,IACV;AAAA,EAAA;AAEJ;AAOA,SAAS,qBAAqB,OAAe,OAAe,KAAsB;AAChF,WAAS,IAAI,OAAO,IAAI,KAAK,KAAK;AAChC,QAAI,aAAa,OAAO,CAAC,GAAG;AAC1B,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAMA,SAAS,mBAAmB,OAAe,YAAwC;AACjF,QAAM,aAAa,MAAM,QAAQ,KAAK,UAAU;AAChD,MAAI,eAAe,IAAI;AACrB,WAAO;AAAA,EACT;AAGA,MAAI,qBAAqB,OAAO,YAAY,UAAU,GAAG;AACvD,WAAO;AAAA,EACT;AAEA,QAAM,YAAY,MAAM,UAAU,YAAY,UAAU;AACxD,MAAI,CAAC,aAAa,IAAI,SAA2B,GAAG;AAClD,WAAO;AAAA,EACT;AAGA,QAAM,aAAa,aAAa;AAGhC,MAAI,cAAc,MAAM,UAAU,aAAa,OAAO,UAAU,GAAG;AACjE,WAAO,EAAE,OAAO,MAAM,WAAW,WAAA;AAAA,EACnC;AAGA,MAAI,QAAQ,OAAO,UAAU,GAAG;AAC9B,UAAM,cAAc,iBAAiB,OAAO,UAAU;AACtD,QAAI,YAAY,UAAU,MAAM;AAC9B,aAAO;AAAA,QACL,OAAO;AAAA,UACL,KAAK,MAAM,UAAU,YAAY,YAAY,SAAS;AAAA,UACtD,OAAO,YAAY;AAAA,UACnB,OAAO;AAAA,UACP,UAAU;AAAA,QAAA;AAAA,QAEZ,WAAW,YAAY;AAAA,MAAA;AAAA,IAE3B;AAEA,WAAO;AAAA,EACT;AAGA,QAAM,cAAc,mBAAmB,OAAO,UAAU;AACxD,SAAO;AAAA,IACL,OAAO;AAAA,MACL,KAAK,MAAM,UAAU,YAAY,YAAY,SAAS;AAAA,MACtD,OAAO,YAAY;AAAA,MACnB,OAAO;AAAA,MACP,UAAU;AAAA,IAAA;AAAA,IAEZ,WAAW,YAAY;AAAA,EAAA;AAE3B;AAKA,SAAS,iBAAiB,OAAe,YAAiC;AACxE,QAAM,cAAc,iBAAiB,OAAO,UAAU;AACtD,MAAI,YAAY,UAAU,MAAM;AAC9B,WAAO;AAAA,MACL,OAAO;AAAA,QACL,KAAK,MAAM,UAAU,YAAY,YAAY,SAAS;AAAA,QACtD,OAAO,YAAY;AAAA,QACnB,UAAU;AAAA,MAAA;AAAA,MAEZ,WAAW,YAAY;AAAA,IAAA;AAAA,EAE3B;AAGA,MAAI,YAAY,YAAY,YAAY;AAEtC,WAAO,EAAE,OAAO,MAAM,WAAW,YAAY,UAAA;AAAA,EAC/C;AAGA,QAAM,cAAc,mBAAmB,OAAO,YAAY,IAAI;AAC9D,SAAO;AAAA,IACL,OAAO;AAAA,MACL,KAAK,YAAY;AAAA,MACjB,OAAO,YAAY;AAAA,MACnB,UAAU;AAAA,IAAA;AAAA,IAEZ,WAAW,YAAY;AAAA,EAAA;AAE3B;AAKA,SAAS,kBAAkB,OAAe,YAAiC;AACzE,QAAM,cAAc,mBAAmB,OAAO,UAAU;AACxD,SAAO;AAAA,IACL,OAAO;AAAA,MACL,KAAK,YAAY;AAAA,MACjB,OAAO,YAAY;AAAA,MACnB,UAAU;AAAA,IAAA;AAAA,IAEZ,WAAW,YAAY;AAAA,EAAA;AAE3B;AAKA,SAAS,eAAe,OAAe,YAAiC;AAEtE,QAAM,cAAc,mBAAmB,OAAO,UAAU;AACxD,MAAI,gBAAgB,MAAM;AACxB,WAAO;AAAA,EACT;AAGA,MAAI,QAAQ,OAAO,UAAU,GAAG;AAC9B,WAAO,iBAAiB,OAAO,UAAU;AAAA,EAC3C;AAGA,SAAO,kBAAkB,OAAO,UAAU;AAC5C;AAKA,SAAS,iBACP,OACA,YAC6C;AAC7C,MAAI,CAAC,QAAQ,OAAO,UAAU,GAAG;AAC/B,WAAO,EAAE,OAAO,MAAM,WAAW,WAAA;AAAA,EACnC;AAEA,MAAI,IAAI,aAAa;AACrB,QAAM,aAAa;AAGnB,SAAO,IAAI,MAAM,UAAU,CAAC,QAAQ,OAAO,CAAC,GAAG;AAC7C;AAAA,EACF;AAGA,MAAI,KAAK,MAAM,QAAQ;AACrB,WAAO,EAAE,OAAO,MAAM,WAAW,WAAA;AAAA,EACnC;AAEA,QAAM,QAAQ,MAAM,UAAU,YAAY,CAAC;AAC3C;AAGA,MAAI,MAAM,KAAA,MAAW,IAAI;AACvB,WAAO,EAAE,OAAO,MAAM,WAAW,EAAA;AAAA,EACnC;AAEA,SAAO,EAAE,OAAO,WAAW,EAAA;AAC7B;AAMA,SAAS,mBACP,OACA,YACA,gBAAgB,OACsB;AACtC,MAAI,IAAI;AAGR,SAAO,IAAI,MAAM,UAAU,CAAC,aAAa,OAAO,CAAC,GAAG;AAClD,QAAI,CAAC,iBAAiB,QAAQ,OAAO,CAAC,GAAG;AACvC;AAAA,IACF;AACA;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAO,MAAM,UAAU,YAAY,CAAC;AAAA,IACpC,WAAW;AAAA,EAAA;AAEf;ACrPO,SAAS,UAAU,MAAsB;AAE9C,MAAI,aAAa,KAAK,UAAU,MAAM;AAGtC,eAAa,WAAW,YAAA;AAIxB,eAAa,WAAW,UAAU,KAAK,EAAE,QAAQ,WAAA,UAAA,OAAW,EAAE;AAK9D,eAAa,WAAW,QAAQ,sBAAsB,GAAG;AAKzD,eAAa,WAAW,QAAQ,QAAQ,GAAG,EAAE,KAAA;AAE7C,SAAO;AACT;AAaO,SAAS,wBAAwB,MAAsB;AAE5D,MAAI,aAAa,KAAK,UAAU,MAAM;AAItC,eAAa,WAAW,UAAU,KAAK,EAAE,QAAQ,WAAA,UAAA,OAAW,EAAE;AAK9D,eAAa,WAAW,QAAQ,QAAQ,GAAG,EAAE,KAAA;AAE7C,SAAO;AACT;ACxCO,SAAS,wBAAwB,MAAuB;AAC7D,QAAM,UAAU;AAChB,SAAO,QAAQ,KAAK,IAAI;AAC1B;AASO,SAAS,yBAAyB,MAAkC;AACzE,QAAM,UAAU;AAChB,QAAM,WAA+B,CAAA;AAErC,aAAW,SAAS,KAAK,SAAS,OAAO,GAAG;AAC1C,aAAS,KAAK;AAAA,MACZ,SAAS,MAAM,CAAC;AAAA,MAChB,OAAO,MAAM;AAAA,MACb,KAAK,MAAM,QAAQ,MAAM,CAAC,EAAE;AAAA,IAAA,CAC7B;AAAA,EACH;AAEA,SAAO;AACT;AAKA,SAAS,YAAY,KAAqB;AACxC,SAAO,IAAI,QAAQ,uBAAuB,MAAM;AAClD;AAKA,SAAS,oBAAoB,MAAsB;AACjD,SAAO,KAAK,QAAQ,QAAQ,GAAG,EAAE,KAAA;AACnC;AAKA,SAAS,0BAA0B,UAA8B,QAAyB;AACxF,SAAO,SAAS,MAAM,CAAC,QAAQ,OAAO,SAAS,IAAI,OAAO,CAAC;AAC7D;AAMA,SAAS,kBAAkB,OAAe,UAAsC;AAC9E,QAAM,eAAyB,CAAA;AAC/B,MAAI,UAAU;AAEd,aAAW,OAAO,UAAU;AAC1B,QAAI,IAAI,QAAQ,SAAS;AACvB,YAAM,aAAa,MAAM,MAAM,SAAS,IAAI,KAAK;AACjD,UAAI,WAAW,QAAQ;AACrB,qBAAa,KAAK,YAAY,UAAU,CAAC;AAAA,MAC3C;AAAA,IACF;AACA,iBAAa,KAAK,MAAM,YAAY,IAAI,OAAO,CAAC,GAAG;AACnD,cAAU,IAAI;AAAA,EAChB;AAEA,MAAI,UAAU,MAAM,QAAQ;AAC1B,UAAM,YAAY,MAAM,MAAM,OAAO;AACrC,QAAI,UAAU,QAAQ;AACpB,mBAAa,KAAK,YAAY,SAAS,CAAC;AAAA,IAC1C;AAAA,EACF;AAEA,SAAO,aAAa,KAAK,KAAK;AAChC;AAcO,SAAS,8BAA8B,OAAe,QAAyB;AACpF,MAAI,UAAU,IAAI;AAChB,WAAO;AAAA,EACT;AACA,MAAI,WAAW,IAAI;AACjB,WAAO;AAAA,EACT;AAEA,QAAM,kBAAkB,oBAAoB,KAAK;AACjD,QAAM,mBAAmB,oBAAoB,MAAM;AAEnD,MAAI,CAAC,wBAAwB,eAAe,GAAG;AAC7C,WAAO,iBAAiB,YAAA,EAAc,SAAS,gBAAgB,aAAa;AAAA,EAC9E;AAEA,QAAM,WAAW,yBAAyB,eAAe;AAEzD,MAAI,CAAC,0BAA0B,UAAU,MAAM,GAAG;AAChD,WAAO;AAAA,EACT;AAEA,QAAM,UAAU,kBAAkB,iBAAiB,QAAQ;AAE3D,MAAI;AACF,UAAM,QAAQ,IAAI,OAAO,SAAS,GAAG;AACrC,WAAO,MAAM,KAAK,gBAAgB;AAAA,EACpC,QAAQ;AACN,WAAO,iBAAiB,YAAA,EAAc,SAAS,gBAAgB,aAAa;AAAA,EAC9E;AACF;ACjIA,MAAM,gCAAgB,IAAI,CAAC,OAAO,QAAQ,SAAS,KAAK,CAAC;AAKzD,SAASD,cAAY,WAA4B;AAC/C,MAAI,UAAU,SAAS,YAAY,IAAI,CAAC,IAAI,CAAC,GAAG;AAC9C,WAAO,OAAO,UAAU,OAAO,YAAY,EAAE,CAAC,EAAE,CAAC,CAAC;AAAA,EACpD;AACA,SAAO;AACT;AAMA,SAAS,eAAe,WAA4B;AAClD,MAAI,CAAC,UAAU,UAAU,UAAU,OAAO,WAAW,GAAG;AACtD,WAAO;AAAA,EACT;AAEA,SAAO,UAAU,OACd,IAAI,CAAC,WAAW;AACf,UAAM,SAAS,OAAO,UAAU;AAChC,UAAM,QAAQ,OAAO,SAAS;AAC9B,WAAO,QAAQ,GAAG,MAAM,IAAI,KAAK,KAAK;AAAA,EACxC,CAAC,EACA,KAAK,GAAG;AACb;AAKA,SAAS,cAAc,WAAoB,OAA8B;AAEvE,MAAI,UAAU,QAAQ;AACpB,WAAOA,cAAY,SAAS;AAAA,EAC9B;AAEA,MAAI,UAAU,UAAU;AACtB,WAAO,eAAe,SAAS;AAAA,EACjC;AAGA,QAAM,QAAQ,UAAU,KAAsB;AAC9C,MAAI,OAAO,UAAU,UAAU;AAC7B,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,WAAW,SAAS,GAAG;AAC/B,UAAM,cAAc,MAAM,UAAU,CAAC;AACrC,UAAM,cAAe,UAAU,SAAqC,WAAW;AAC/E,QAAI,OAAO,gBAAgB,UAAU;AACnC,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,SAAS,YAAoB,WAAuC;AAE3E,MAAI,UAAU,QAAQ,YAAY;AAChC,WAAO;AAAA,MACL,OAAO;AAAA,MACP,UAAU;AAAA,MACV,OAAO,UAAU;AAAA,IAAA;AAAA,EAErB;AAGA,QAAM,iBAAkB,UAAU,QAAoC;AACtE,MAAI,MAAM,QAAQ,cAAc,GAAG;AACjC,eAAW,OAAO,gBAAgB;AAChC,UAAI,OAAO,QAAQ,YAAY,QAAQ,YAAY;AACjD,eAAO;AAAA,UACL,OAAO;AAAA,UACP,UAAU;AAAA,UACV,OAAO;AAAA,QAAA;AAAA,MAEX;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,aAAa,YAAoB,WAAuC;AAE/E,MAAI,CAAC,UAAU,WAAW,CAAC,MAAM,QAAQ,UAAU,OAAO,GAAG;AAC3D,WAAO;AAAA,EACT;AAGA,QAAM,kBAAkB,wBAAwB,UAAU;AAG1D,aAAW,WAAW,UAAU,SAAS;AACvC,QAAI,OAAO,YAAY,UAAU;AAC/B,YAAM,oBAAoB,wBAAwB,OAAO;AAEzD,UAAI,8BAA8B,iBAAiB,iBAAiB,GAAG;AACrE,eAAO;AAAA,UACL,OAAO;AAAA,UACP,UAAU;AAAA,UACV,OAAO;AAAA,QAAA;AAAA,MAEX;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,SAAS,YAAoB,WAAuC;AAE3E,MAAI,CAAC,UAAU,QAAQ,QAAQ,CAAC,MAAM,QAAQ,UAAU,OAAO,IAAI,GAAG;AACpE,WAAO;AAAA,EACT;AAGA,QAAM,kBAAkB,wBAAwB,UAAU;AAG1D,aAAW,OAAO,UAAU,OAAO,MAAM;AACvC,QAAI,OAAO,QAAQ,UAAU;AAC3B,YAAM,gBAAgB,wBAAwB,GAAG;AAEjD,UAAI,8BAA8B,iBAAiB,aAAa,GAAG;AACjE,eAAO;AAAA,UACL,OAAO;AAAA,UACP,UAAU;AAAA,UACV,OAAO;AAAA,QAAA;AAAA,MAEX;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,MAAM,YAAoC;AAAA,EACxC,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,KAAK;AAAA,EACL,MAAM;AAAA,EACN,OAAO;AACT;AAKA,SAAS,eAAe,YAAoB,WAAuC;AACjF,QAAM,OAAOA,cAAY,SAAS;AAClC,MAAI,SAAS,YAAY;AACvB,WAAO;AAAA,MACL,OAAO;AAAA,MACP,UAAU;AAAA,MACV,OAAO;AAAA,IAAA;AAAA,EAEX;AACA,SAAO;AACT;AAKA,SAAS,gBAAgB,OAAe,YAAoB,WAAuC;AACjG,QAAM,aAAa,cAAc,WAAW,KAAK;AACjD,MAAI,eAAe,MAAM;AACvB,WAAO;AAAA,EACT;AAGA,MAAI,UAAU,IAAI,KAAK,GAAG;AACxB,QAAI,eAAe,YAAY;AAC7B,aAAO;AAAA,QACL;AAAA,QACA,UAAU;AAAA,QACV,OAAO;AAAA,MAAA;AAAA,IAEX;AACA,WAAO;AAAA,EACT;AAIA,QAAM,uBAAuB,wBAAwB,UAAU;AAC/D,QAAM,kBAAkB,wBAAwB,UAAU;AAI1D,MAAI,8BAA8B,iBAAiB,oBAAoB,GAAG;AACxE,WAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV,OAAO;AAAA,IAAA;AAAA,EAEX;AACA,SAAO;AACT;AAKA,SAAS,mBAAmB,OAAoB,WAAkC;AAChF,QAAM,UAAwB,CAAA;AAC9B,QAAM,gBAAgB,MAAM;AAG5B,MAAI,kBAAkB,OAAO;AAC3B,UAAM,WAAW,SAAS,MAAM,OAAO,SAAS;AAChD,QAAI,SAAU,SAAQ,KAAK,QAAQ;AACnC,WAAO;AAAA,EACT;AAGA,MAAI,kBAAkB,QAAQ;AAC5B,UAAM,YAAY,eAAe,MAAM,OAAO,SAAS;AACvD,QAAI,UAAW,SAAQ,KAAK,SAAS;AACrC,WAAO;AAAA,EACT;AAGA,MAAI,kBAAkB,WAAW;AAC/B,UAAM,eAAe,aAAa,MAAM,OAAO,SAAS;AACxD,QAAI,aAAc,SAAQ,KAAK,YAAY;AAC3C,WAAO;AAAA,EACT;AAGA,MAAI,kBAAkB,OAAO;AAC3B,UAAM,WAAW,SAAS,MAAM,OAAO,SAAS;AAChD,QAAI,SAAU,SAAQ,KAAK,QAAQ;AACnC,WAAO;AAAA,EACT;AAGA,QAAM,cAAc,UAAU,aAAa,KAAK;AAChD,QAAM,QAAQ,gBAAgB,aAAa,MAAM,OAAO,SAAS;AACjE,MAAI,MAAO,SAAQ,KAAK,KAAK;AAE7B,SAAO;AACT;AAKA,MAAM,yBAAyB;AAAA,EAC7B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAKA,SAAS,iBACP,OACA,YACA,WACmB;AACnB,MAAI,UAAU,QAAQ;AACpB,WAAO,eAAe,YAAY,SAAS;AAAA,EAC7C;AACA,MAAI,UAAU,OAAO;AACnB,WAAO,SAAS,YAAY,SAAS;AAAA,EACvC;AACA,MAAI,UAAU,WAAW;AACvB,WAAO,aAAa,YAAY,SAAS;AAAA,EAC3C;AACA,MAAI,UAAU,OAAO;AACnB,WAAO,SAAS,YAAY,SAAS;AAAA,EACvC;AACA,SAAO,gBAAgB,OAAO,YAAY,SAAS;AACrD;AAKA,SAAS,eAAe,OAAoB,WAAkC;AAC5E,QAAM,UAAwB,CAAA;AAG9B,QAAM,gBAAgB,CAAC,QAAQ,OAAO,WAAW,KAAK;AACtD,aAAW,SAAS,eAAe;AACjC,UAAM,QAAQ,iBAAiB,OAAO,MAAM,OAAO,SAAS;AAC5D,QAAI,MAAO,SAAQ,KAAK,KAAK;AAAA,EAC/B;AAGA,aAAW,SAAS,wBAAwB;AAC1C,UAAM,QAAQ,gBAAgB,OAAO,MAAM,OAAO,SAAS;AAC3D,QAAI,MAAO,SAAQ,KAAK,KAAK;AAAA,EAC/B;AAEA,SAAO;AACT;AAMO,SAAS,WAAW,OAAoB,WAAkC;AAE/E,MAAI,MAAM,OAAO;AACf,WAAO,mBAAmB,OAAO,SAAS;AAAA,EAC5C;AAGA,SAAO,eAAe,OAAO,SAAS;AACxC;AAMO,SAAS,eAAe,WAAoB,QAA4C;AAE7F,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,EACT;AAEA,QAAM,eAA6C,CAAA;AACnD,MAAI,kBAAiC;AAGrC,aAAW,SAAS,QAAQ;AAC1B,UAAM,UAAU,WAAW,OAAO,SAAS;AAG3C,QAAI,QAAQ,WAAW,GAAG;AACxB,aAAO;AAAA,IACT;AAGA,UAAM,gBAAgB,QAAQ,KAAK,CAAC,MAAM,EAAE,aAAa,OAAO,IAAI,UAAU;AAG9E,QAAI,kBAAkB,SAAS;AAC7B,wBAAkB;AAAA,IACpB,WAAW,kBAAkB,aAAa,oBAAoB,QAAQ;AACpE,wBAAkB;AAAA,IACpB;AAEA,iBAAa,KAAK;AAAA,MAChB;AAAA,MACA;AAAA,IAAA,CACD;AAAA,EACH;AAIA,QAAM,QAAQ,oBAAoB,UAAU,MAAM,aAAa,SAAS,KAAK,aAAa;AAE1F,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEJ;AAMO,SAAS,OAAO,YAAuB,QAAuC;AACnF,QAAM,UAA0B,CAAA;AAEhC,aAAW,aAAa,YAAY;AAClC,UAAM,QAAQ,eAAe,WAAW,MAAM;AAC9C,QAAI,OAAO;AACT,cAAQ,KAAK,KAAK;AAAA,IACpB;AAAA,EACF;AAEA,SAAO;AACT;AC/YA,SAASA,cAAY,WAA4B;AAC/C,MAAI,UAAU,SAAS,YAAY,IAAI,CAAC,IAAI,CAAC,GAAG;AAC9C,WAAO,OAAO,UAAU,OAAO,YAAY,EAAE,CAAC,EAAE,CAAC,CAAC;AAAA,EACpD;AACA,SAAO;AACT;AAMA,SAAS,yBAAyB,WAA4B;AAC5D,MAAI,CAAC,UAAU,UAAU,UAAU,OAAO,WAAW,GAAG;AACtD,WAAO;AAAA,EACT;AACA,SAAO,UAAU,OAAO,CAAC,GAAG,UAAU;AACxC;AAMA,SAAS,aAAa,WAA4B;AAChD,SAAO,UAAU,SAAS;AAC5B;AAMA,SAAS,gBAAgB,GAAkB,GAA0B;AACnE,QAAM,gBAAgB,EAAE,OAAO,GAAG,SAAS,GAAG,MAAM,EAAA;AACpD,SAAO,cAAc,CAAC,IAAI,cAAc,CAAC;AAC3C;AAMA,SAAS,YAAY,GAAY,GAAoB;AACnD,QAAM,QAAQA,cAAY,CAAC;AAC3B,QAAM,QAAQA,cAAY,CAAC;AAC3B,SAAO,OAAO,KAAK,IAAI,OAAO,KAAK;AACrC;AAMA,SAAS,cAAc,GAAY,GAAoB;AACrD,QAAM,UAAU,yBAAyB,CAAC,EAAE,YAAA;AAC5C,QAAM,UAAU,yBAAyB,CAAC,EAAE,YAAA;AAE5C,MAAI,YAAY,MAAM,YAAY,GAAI,QAAO;AAC7C,MAAI,YAAY,MAAM,YAAY,GAAI,QAAO;AAC7C,SAAO,QAAQ,cAAc,OAAO;AACtC;AAMA,SAAS,aAAa,GAAY,GAAoB;AACpD,QAAM,SAAS,aAAa,CAAC,EAAE,YAAA;AAC/B,QAAM,SAAS,aAAa,CAAC,EAAE,YAAA;AAE/B,MAAI,WAAW,MAAM,WAAW,GAAI,QAAO;AAC3C,MAAI,WAAW,MAAM,WAAW,GAAI,QAAO;AAC3C,SAAO,OAAO,cAAc,MAAM;AACpC;AAUO,SAAS,YAAY,SAAyC;AAEnE,QAAM,UAAU,QAAQ,IAAI,CAAC,QAAQ,WAAW,EAAE,QAAQ,MAAA,EAAQ;AAGlE,QAAM,SAAS,QAAQ,KAAK,CAAC,GAAG,MAAM;AAEpC,UAAM,eAAe,gBAAgB,EAAE,OAAO,iBAAiB,EAAE,OAAO,eAAe;AACvF,QAAI,iBAAiB,EAAG,QAAO;AAG/B,UAAM,WAAW,YAAY,EAAE,OAAO,WAAW,EAAE,OAAO,SAAS;AACnE,QAAI,aAAa,EAAG,QAAO;AAG3B,UAAM,aAAa,cAAc,EAAE,OAAO,WAAW,EAAE,OAAO,SAAS;AACvE,QAAI,eAAe,EAAG,QAAO;AAG7B,UAAM,YAAY,aAAa,EAAE,OAAO,WAAW,EAAE,OAAO,SAAS;AACrE,QAAI,cAAc,EAAG,QAAO;AAG5B,WAAO,EAAE,QAAQ,EAAE;AAAA,EACrB,CAAC;AAGD,SAAO,OAAO,IAAI,CAAC,SAAS,KAAK,MAAM;AACzC;ACtGA,SAAS,aAAa,KAAqB;AAEzC,QAAM,aAAa,IAChB,QAAQ,2BAA2B,EAAE,EACrC,QAAQ,+BAA+B,EAAE,EACzC,QAAQ,UAAU,EAAE;AAEvB,SAAO;AACT;AAKA,SAAS,YAAY,MAA8B;AACjD,QAAM,YAAY,KAAK,SAAS,YAAY,IAAI,CAAC;AACjD,MAAI,CAAC,aAAa,UAAU,WAAW,GAAG;AACxC,WAAO;AAAA,EACT;AACA,SAAO,OAAO,UAAU,CAAC,CAAC;AAC5B;AAKA,SAAS,iBAAiB,MAA8B;AACtD,MAAI,CAAC,KAAK,UAAU,KAAK,OAAO,WAAW,GAAG;AAC5C,WAAO;AAAA,EACT;AAGA,QAAM,gBAAgB,KAAK,OAAO,IAAI,CAAC,WAAW;AAChD,UAAM,SAAS,OAAO,UAAU;AAChC,UAAM,eAAe,OAAO,QAAQ,OAAO,MAAM,OAAO,CAAC,IAAI;AAC7D,WAAO,GAAG,MAAM,IAAI,YAAY,GAAG,KAAA;AAAA,EACrC,CAAC;AAGD,SAAO,UAAU,cAAc,KAAK,GAAG,CAAC;AAC1C;AAKA,SAAS,cAAc,MAAe,UAA0C;AAC9E,MAAI,CAAC,KAAK,OAAO,CAAC,SAAS,KAAK;AAC9B,WAAO;AAAA,EACT;AAEA,QAAM,oBAAoB,aAAa,KAAK,GAAG;AAC/C,QAAM,wBAAwB,aAAa,SAAS,GAAG;AAGvD,MAAI,sBAAsB,uBAAuB;AAC/C,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,SAAS;AAAA,QACP,KAAK;AAAA,MAAA;AAAA,IACP;AAAA,EAEJ;AAEA,SAAO;AACT;AAKA,SAAS,eAAe,MAAe,UAA0C;AAC/E,MAAI,CAAC,KAAK,QAAQ,CAAC,SAAS,MAAM;AAChC,WAAO;AAAA,EACT;AAGA,MAAI,KAAK,SAAS,SAAS,MAAM;AAC/B,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,SAAS;AAAA,QACP,MAAM,SAAS;AAAA,MAAA;AAAA,IACjB;AAAA,EAEJ;AAEA,SAAO;AACT;AAKA,SAAS,cAAc,MAAsB;AAC3C,SAAO,KAAK,QAAQ,UAAU,EAAE,EAAE,YAAA;AACpC;AAKA,MAAM,aAAa,CAAC,MAAM;AAK1B,MAAM,qBAAqB,CAAC,SAAS;AAOrC,SAAS,eAAe,MAAe,UAA0C;AAC/E,MAAI,CAAC,KAAK,QAAQ,CAAC,SAAS,MAAM;AAChC,WAAO;AAAA,EACT;AAEA,QAAM,qBAAqB,cAAc,KAAK,IAAI;AAClD,QAAM,yBAAyB,cAAc,SAAS,IAAI;AAE1D,MAAI,uBAAuB,wBAAwB;AACjD,WAAO;AAAA,EACT;AAGA,MAAI,WAAW,SAAS,KAAK,IAAI,KAAK,WAAW,SAAS,SAAS,IAAI,GAAG;AACxE,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,SAAS;AAAA,QACP,MAAM;AAAA,MAAA;AAAA,IACR;AAAA,EAEJ;AAGA,MAAI,mBAAmB,SAAS,KAAK,IAAI,KAAK,mBAAmB,SAAS,SAAS,IAAI,GAAG;AACxF,UAAM,YAAY,KAAK,QAAQ,UAAU,KAAK,KAAK,IAAI;AACvD,UAAM,gBAAgB,SAAS,QAAQ,UAAU,SAAS,KAAK,IAAI;AAEnE,QAAI,aAAa,iBAAiB,cAAc,eAAe;AAC7D,aAAO;AAAA,QACL,MAAM;AAAA,QACN;AAAA,QACA,SAAS;AAAA,UACP,MAAM;AAAA,UACN,iBAAiB;AAAA,QAAA;AAAA,MACnB;AAAA,IAEJ;AACA,WAAO;AAAA,EACT;AAGA,SAAO;AAAA,IACL,MAAM;AAAA,IACN;AAAA,IACA,SAAS;AAAA,MACP,MAAM;AAAA,IAAA;AAAA,EACR;AAEJ;AAKA,SAAS,0BAA0B,MAAe,UAA0C;AAC1F,QAAM,YAAY,KAAK,QAAQ,UAAU,KAAK,KAAK,IAAI;AACvD,QAAM,gBAAgB,SAAS,QAAQ,UAAU,SAAS,KAAK,IAAI;AACnE,QAAM,cAAc,iBAAiB,IAAI;AACzC,QAAM,kBAAkB,iBAAiB,QAAQ;AACjD,QAAM,WAAW,YAAY,IAAI;AACjC,QAAM,eAAe,YAAY,QAAQ;AAGzC,MACE,CAAC,aACD,CAAC,iBACD,CAAC,eACD,CAAC,mBACD,CAAC,YACD,CAAC,cACD;AACA,WAAO;AAAA,EACT;AAEA,MAAI,cAAc,iBAAiB,gBAAgB,mBAAmB,aAAa,cAAc;AAC/F,WAAO;AAAA,MACL,MAAM;AAAA,MACN;AAAA,MACA,SAAS;AAAA,QACP,iBAAiB;AAAA,QACjB,mBAAmB;AAAA,QACnB,MAAM;AAAA,MAAA;AAAA,IACR;AAAA,EAEJ;AAEA,SAAO;AACT;AAMA,SAAS,qBAAqB,MAAe,UAA0C;AAErF,QAAM,WAAW,cAAc,MAAM,QAAQ;AAC7C,MAAI,UAAU;AACZ,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,eAAe,MAAM,QAAQ;AAC/C,MAAI,WAAW;AACb,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,eAAe,MAAM,QAAQ;AAC/C,MAAI,WAAW;AACb,WAAO;AAAA,EACT;AAGA,SAAO,0BAA0B,MAAM,QAAQ;AACjD;AAcO,SAAS,gBAAgB,MAAe,oBAAgD;AAC7F,QAAM,UAA4B,CAAA;AAClC,QAAM,WAAW,KAAK,QAAQ;AAE9B,aAAW,YAAY,oBAAoB;AAEzC,QAAI,YAAY,SAAS,QAAQ,SAAS,UAAU;AAClD;AAAA,IACF;AAEA,UAAM,QAAQ,qBAAqB,MAAM,QAAQ;AACjD,QAAI,OAAO;AACT,cAAQ,KAAK,KAAK;AAAA,IACpB;AAAA,EACF;AAEA,SAAO;AAAA,IACL,aAAa,QAAQ,SAAS;AAAA,IAC9B;AAAA,EAAA;AAEJ;ACvPA,MAAM,sBAAsB;AAC5B,MAAM,2BAA2B;AACjC,MAAM,yBAAyB;AAC/B,MAAM,sBAAsB;AAW5B,SAAS,aAAa,UAA2B;AAC/C,QAAM,WAAW,KAAK,SAAS,QAAQ;AAGvC,MAAI,SAAS,SAAS,MAAM,EAAG,QAAO;AAGtC,MAAI,SAAS,SAAS,MAAM,EAAG,QAAO;AAGtC,MAAI,SAAS,SAAS,YAAY,EAAG,QAAO;AAG5C,MAAI,SAAS,SAAS,OAAO,EAAG,QAAO;AAGvC,MAAI,SAAS,WAAW,GAAG,KAAK,SAAS,SAAS,MAAM,EAAG,QAAO;AAGlE,MAAI,SAAS,SAAS,GAAG,EAAG,QAAO;AAEnC,SAAO;AACT;AAYO,MAAM,oBAAoB,aAAa;AAAA,EAC3B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAET,UAA4B;AAAA,EAC5B,WAAW;AAAA,EACX,qCAAkD,IAAA;AAAA,EAE1D,YAAY,WAAmB,SAA8B;AAC3D,UAAA;AACA,SAAK,YAAY;AACjB,SAAK,aAAa,SAAS,cAAc;AACzC,SAAK,iBAAiB,SAAS,kBAAkB;AACjD,SAAK,aAAa,SAAS,cAAc;AACzC,SAAK,eAAe,SAAS,gBAAgB;AAC7C,SAAK,aAAa,SAAS,cAAc;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAuB;AAC3B,QAAI,KAAK,UAAU;AACjB;AAAA,IACF;AAEA,WAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,WAAK,UAAU,SAAS,MAAM,KAAK,WAAW;AAAA,QAC5C,SAAS;AAAA,QACT,YAAY;AAAA,QACZ,YAAY,KAAK;AAAA,QACjB,UAAU,KAAK;AAAA,QACf,eAAe;AAAA,QACf,kBAAkB;AAAA,MAAA,CACnB;AAED,WAAK,QAAQ,GAAG,SAAS,MAAM;AAC7B,aAAK,WAAW;AAChB,aAAK,KAAK,OAAO;AACjB,gBAAA;AAAA,MACF,CAAC;AAED,WAAK,QAAQ,GAAG,SAAS,CAAC,UAAmB;AAC3C,aAAK,KAAK,SAAS,KAAK;AACxB,YAAI,CAAC,KAAK,UAAU;AAClB,iBAAO,KAAK;AAAA,QACd;AAAA,MACF,CAAC;AAED,WAAK,QAAQ,GAAG,UAAU,CAAC,aAAqB;AAC9C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,CAAC;AAED,WAAK,QAAQ,GAAG,OAAO,CAAC,aAAqB;AAC3C,aAAK,iBAAiB,QAAQ;AAAA,MAChC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,UAAwB;AAE/C,UAAM,gBAAgB,KAAK,eAAe,IAAI,QAAQ;AACtD,QAAI,eAAe;AACjB,mBAAa,aAAa;AAAA,IAC5B;AAGA,UAAM,QAAQ,WAAW,MAAM;AAC7B,WAAK,eAAe,OAAO,QAAQ;AACnC,WAAK,KAAK,UAAU,QAAQ;AAC5B,WAAK,iBAAiB,QAAQ;AAAA,IAChC,GAAG,KAAK,UAAU;AAElB,SAAK,eAAe,IAAI,UAAU,KAAK;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,iBAAiB,UAAiC;AAE9D,QAAI,KAAK,QAAQ,QAAQ,EAAE,YAAA,MAAkB,SAAS;AACpD;AAAA,IACF;AAEA,QAAI,YAA0B;AAE9B,aAAS,UAAU,GAAG,WAAW,KAAK,YAAY,WAAW;AAC3D,UAAI;AACF,cAAM,UAAU,MAAM,GAAG,SAAS,UAAU,OAAO;AACnD,cAAM,SAAS,KAAK,MAAM,OAAO;AACjC,aAAK,KAAK,UAAU,UAAU,MAAM;AACpC;AAAA,MACF,SAAS,OAAO;AACd,oBAAY;AACZ,YAAI,UAAU,KAAK,YAAY;AAC7B,gBAAM,KAAK,MAAM,KAAK,YAAY;AAAA,QACpC;AAAA,MACF;AAAA,IACF;AAEA,SAAK,KAAK,cAAc,UAAU,SAAS;AAAA,EAC7C;AAAA;AAAA;AAAA;AAAA,EAKQ,MAAM,IAA2B;AACvC,WAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,QAAI,KAAK,SAAS;AAChB,WAAK,QAAQ,MAAA;AACb,WAAK,UAAU;AAAA,IACjB;AAGA,eAAW,SAAS,KAAK,eAAe,OAAA,GAAU;AAChD,mBAAa,KAAK;AAAA,IACpB;AACA,SAAK,eAAe,MAAA;AAEpB,SAAK,WAAW;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAKA,UAAkB;AAChB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,aAAsB;AACpB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,oBAA4B;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,kBAA0B;AACxB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,gBAAwB;AACtB,WAAO,KAAK;AAAA,EACd;AACF;"}
|