synset 0.9.3 → 0.9.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -10
- package/dist/cli.cjs +2 -2
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.js +2 -2
- package/dist/cli.js.map +1 -1
- package/dist/index.cjs +2 -2
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/schema.sql +3 -3
- package/package.json +3 -2
package/README.md
CHANGED
|
@@ -85,7 +85,7 @@ findSynsets(index, 'bank')
|
|
|
85
85
|
```ts
|
|
86
86
|
import { exportToSQLite } from 'synset'
|
|
87
87
|
|
|
88
|
-
// Export to SQLite
|
|
88
|
+
// Export to SQLite
|
|
89
89
|
exportToSQLite(lexicon, 'dictionary.db', {
|
|
90
90
|
onProgress: ({ phase, current, total }) => {
|
|
91
91
|
console.log(`${phase}: ${current}/${total}`)
|
|
@@ -93,20 +93,15 @@ exportToSQLite(lexicon, 'dictionary.db', {
|
|
|
93
93
|
})
|
|
94
94
|
```
|
|
95
95
|
|
|
96
|
-
Schema:
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
synsets(id, pos, definition)
|
|
100
|
-
word_synsets(word_id, synset_id)
|
|
101
|
-
```
|
|
96
|
+
Schema (`words`, `synsets`, `word_synsets` tables) is available as:
|
|
97
|
+
- `import { SCHEMA } from 'synset'` - SQL string constant
|
|
98
|
+
- `synset/schema.sql` - standalone file via package exports
|
|
102
99
|
|
|
103
100
|
## Runtime
|
|
104
101
|
|
|
105
|
-
- **Bun**: Full support
|
|
102
|
+
- **Bun**: Full support
|
|
106
103
|
- **Node.js 18+**: Full support
|
|
107
104
|
|
|
108
|
-
> **Note:** SQLite export (`exportToSQLite`, `export-sqlite`) requires Bun (uses `bun:sqlite`).
|
|
109
|
-
|
|
110
105
|
## Development
|
|
111
106
|
|
|
112
107
|
```bash
|
package/dist/cli.cjs
CHANGED
|
@@ -361,8 +361,8 @@ var children = (node, type, fn) => {
|
|
|
361
361
|
var SCHEMA = `
|
|
362
362
|
CREATE TABLE IF NOT EXISTS words (
|
|
363
363
|
id INTEGER PRIMARY KEY,
|
|
364
|
-
word TEXT NOT NULL,
|
|
365
|
-
word_display TEXT NOT NULL
|
|
364
|
+
word TEXT NOT NULL, -- lowercase for search
|
|
365
|
+
word_display TEXT NOT NULL -- original casing
|
|
366
366
|
);
|
|
367
367
|
CREATE INDEX IF NOT EXISTS idx_words_word ON words(word);
|
|
368
368
|
|
package/dist/cli.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/export-sqlite.ts","../src/types.ts","../src/helpers.ts","../src/literals.ts","../src/loader.ts","../node_modules/@dbushell/xml-streamify/src/node.ts","../node_modules/@dbushell/xml-streamify/src/stream.ts","../node_modules/@dbushell/xml-streamify/src/parse.ts","../src/query.ts","../src/cli.ts"],"sourcesContent":["import Database from \"libsql\";\nimport { decodeXmlEntities } from \"./helpers\";\nimport type { LexicalEntry, Lexicon, Synset } from \"./types\";\n\n/**\n * SQLite schema for WordNet export.\n * Also available as dist/schema.sql in the package.\n */\nexport const SCHEMA = `\nCREATE TABLE IF NOT EXISTS words (\n id INTEGER PRIMARY KEY,\n word TEXT NOT NULL,\n word_display TEXT NOT NULL\n);\nCREATE INDEX IF NOT EXISTS idx_words_word ON words(word);\n\nCREATE TABLE IF NOT EXISTS synsets (\n id TEXT PRIMARY KEY,\n pos TEXT NOT NULL,\n definition TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS word_synsets (\n word_id INTEGER NOT NULL,\n synset_id TEXT NOT NULL,\n PRIMARY KEY (word_id, synset_id)\n);\nCREATE INDEX IF NOT EXISTS idx_ws_word ON word_synsets(word_id);\n`;\n\nexport interface ExportProgress {\n phase: \"words\" | \"synsets\" | \"relations\";\n current: number;\n total: number;\n}\n\nexport interface ExportOptions {\n onProgress?: (progress: ExportProgress) => void;\n}\n\n/**\n * Export WordNet lexicon to SQLite database.\n * Creates a compact database optimized for word lookup and definition retrieval.\n */\nexport function exportToSQLite(\n lexicon: Lexicon,\n outputPath: string,\n options: ExportOptions = {},\n): void {\n const { onProgress } = options;\n\n // Create database\n const db = new Database(outputPath);\n db.exec(\"PRAGMA journal_mode = OFF\");\n db.exec(\"PRAGMA synchronous = OFF\");\n db.exec(SCHEMA);\n\n // Build word -> entries mapping and collect unique words\n const wordToEntries = new Map<string, LexicalEntry[]>();\n for (const entry of lexicon.lexicalEntries) {\n const word = entry.lemmas[0]?.writtenForm;\n if (word) {\n const lower = word.toLowerCase();\n const existing = wordToEntries.get(lower) || [];\n existing.push(entry);\n wordToEntries.set(lower, existing);\n }\n }\n\n // Build synset lookup\n const synsetMap = new Map<string, Synset>();\n for (const synset of lexicon.synsets) {\n synsetMap.set(synset.id, synset);\n }\n\n // Insert words\n const insertWord = db.prepare(\n \"INSERT INTO words (word, word_display) VALUES (?, ?)\",\n );\n const wordIds = new Map<string, number>();\n const words = Array.from(wordToEntries.keys()).sort();\n const totalWords = words.length;\n\n db.exec(\"BEGIN TRANSACTION\");\n let wordId = 0;\n for (let i = 0; i < words.length; i++) {\n const word = words[i];\n const entries = wordToEntries.get(word);\n if (!entries) continue;\n // Use first entry's original casing as display form\n const display = entries[0].lemmas[0]?.writtenForm || word;\n insertWord.run(word, display);\n wordId++;\n wordIds.set(word, wordId);\n if (onProgress && i % 10000 === 0) {\n onProgress({ phase: \"words\", current: i, total: totalWords });\n }\n }\n db.exec(\"COMMIT\");\n\n // Insert synsets (only those that have word associations)\n const usedSynsetIds = new Set<string>();\n for (const entries of wordToEntries.values()) {\n for (const entry of entries) {\n for (const sense of entry.senses) {\n usedSynsetIds.add(sense.synset);\n }\n }\n }\n\n const insertSynset = db.prepare(\n \"INSERT OR IGNORE INTO synsets (id, pos, definition) VALUES (?, ?, ?)\",\n );\n const synsetList = Array.from(usedSynsetIds);\n const totalSynsets = synsetList.length;\n\n db.exec(\"BEGIN TRANSACTION\");\n for (let i = 0; i < synsetList.length; i++) {\n const synsetId = synsetList[i];\n const synset = synsetMap.get(synsetId);\n if (synset) {\n const def = decodeXmlEntities(synset.definitions[0]?.inner) || \"\";\n insertSynset.run(synsetId, synset.partOfSpeech, def);\n }\n if (onProgress && i % 10000 === 0) {\n onProgress({ phase: \"synsets\", current: i, total: totalSynsets });\n }\n }\n db.exec(\"COMMIT\");\n\n // Insert word-synset relations\n const insertRelation = db.prepare(\n \"INSERT OR IGNORE INTO word_synsets (word_id, synset_id) VALUES (?, ?)\",\n );\n let relationCount = 0;\n const totalRelations = Array.from(wordToEntries.values()).reduce(\n (sum, entries) => sum + entries.reduce((s, e) => s + e.senses.length, 0),\n 0,\n );\n\n db.exec(\"BEGIN TRANSACTION\");\n for (const [word, entries] of wordToEntries) {\n const wordId = wordIds.get(word);\n if (!wordId) continue;\n\n for (const entry of entries) {\n for (const sense of entry.senses) {\n insertRelation.run(wordId, sense.synset);\n relationCount++;\n if (onProgress && relationCount % 10000 === 0) {\n onProgress({\n phase: \"relations\",\n current: relationCount,\n total: totalRelations,\n });\n }\n }\n }\n }\n db.exec(\"COMMIT\");\n\n db.close();\n}\n","import { z } from \"zod\";\n\nexport const LexiconId = z.string();\nexport const LexicalEntryId = z.string();\nexport const SynsetId = z.string();\nexport const SenseId = z.string();\nexport const SyntacticBehaviorId = z.string();\n\n/** Note: only the literals that are found in the test wordnet xml file are listed */\nexport const PartsOfSpeech = z.union([\n z.literal(\"a\"),\n z.literal(\"c\"),\n z.literal(\"n\"),\n z.literal(\"p\"),\n z.literal(\"r\"),\n z.literal(\"s\"),\n z.literal(\"u\"),\n z.literal(\"v\"),\n z.literal(\"x\"),\n]);\n\n/** Note: only the literals that are found in the test wordnet xml file are listed */\nexport const SenseRelationRelType = z.union([\n z.literal(\"also\"),\n z.literal(\"antonym\"),\n z.literal(\"derivation\"),\n z.literal(\"domain_member_region\"),\n z.literal(\"domain_member_topic\"),\n z.literal(\"domain_region\"),\n z.literal(\"domain_topic\"),\n z.literal(\"exemplifies\"),\n z.literal(\"is_exemplified_by\"),\n z.literal(\"other\"), // TODO: Then \"dc:type\" attribute should define what relation\n z.literal(\"participle\"),\n z.literal(\"pertainym\"),\n z.literal(\"similar\"),\n]);\n\n/** Note: only the literals that are found in the test wordnet xml file are listed */\nexport const SynsetRelationRelType = z.union([\n z.literal(\"also\"),\n z.literal(\"attribute\"),\n z.literal(\"cause\"),\n z.literal(\"causes\"),\n z.literal(\"domain_member_region\"),\n z.literal(\"domain_member_topic\"),\n z.literal(\"domain_region\"),\n z.literal(\"domain_topic\"),\n z.literal(\"entail\"),\n z.literal(\"entails\"),\n z.literal(\"exemplifies\"),\n z.literal(\"has_domain_region\"),\n z.literal(\"has_domain_topic\"),\n z.literal(\"holo_member\"),\n z.literal(\"holo_part\"),\n z.literal(\"holo_substance\"),\n z.literal(\"hypernym\"),\n z.literal(\"hyponym\"),\n z.literal(\"instance_hypernym\"),\n z.literal(\"instance_hyponym\"),\n z.literal(\"is_caused_by\"),\n z.literal(\"is_entailed_by\"),\n z.literal(\"is_exemplified_by\"),\n z.literal(\"member_holonym\"),\n z.literal(\"member_meronym\"),\n z.literal(\"mero_member\"),\n z.literal(\"mero_part\"),\n z.literal(\"mero_substance\"),\n z.literal(\"part_holonym\"),\n z.literal(\"part_meronym\"),\n z.literal(\"similar\"),\n z.literal(\"substance_holonym\"),\n z.literal(\"substance_meronym\"),\n]);\n\nexport const AdjPosition = z.union([\n z.literal(\"a\"),\n z.literal(\"ip\"),\n z.literal(\"p\"),\n]);\n\nexport const Pronunciation = z.object({\n variety: z.string().optional(), // TODO: \"GB\", \"US\", ...\n inner: z.string(), // Actual value\n});\n\nexport const Lemma = z.object({\n writtenForm: z.string(), // Actual value\n partOfSpeech: PartsOfSpeech,\n pronunciations: z.array(Pronunciation).min(0),\n});\n\nexport const SenseRelation = z.object({\n relType: SenseRelationRelType,\n dcType: z.string().optional(), // TODO: This is only when relType is \"other\"\n target: SenseId,\n});\n\nexport const Sense = z.object({\n id: SenseId,\n synset: SynsetId,\n subCat: SyntacticBehaviorId.optional(),\n adjPosition: AdjPosition.optional(),\n senseRelations: z.array(SenseRelation).min(0),\n});\n\nexport const Form = z.object({\n writtenForm: z.string(), // This is where huge variety lives\n});\n\nexport const LexicalEntry = z.object({\n id: LexicalEntryId,\n lemmas: z.array(Lemma).length(1),\n senses: z.array(Sense).min(1),\n forms: z.array(Form).min(0),\n});\n\nexport const Definition = z.object({\n inner: z.string(), // Actual value\n});\n\nexport const Example = z.object({\n inner: z.string(), // Actual value\n dcSource: z.string().optional(),\n});\n\nexport const ILIDefinition = z.object({\n inner: z.string(), // Actual value\n});\n\nexport const SynsetRelation = z.object({\n relType: SynsetRelationRelType,\n target: SynsetId,\n});\n\nexport const Synset = z.object({\n id: SynsetId,\n ili: z.string(),\n members: z.array(LexicalEntryId).min(1), // space-separated list of refs that we unwrap to array\n partOfSpeech: PartsOfSpeech,\n lexfile: z.string(),\n dcSource: z.string().optional(),\n definitions: z.array(Definition).min(1),\n examples: z.array(Example).min(0),\n iliDefinitions: z.array(ILIDefinition).min(0),\n synsetRelations: z.array(SynsetRelation).min(0),\n});\n\nexport const SyntacticBehavior = z.object({\n id: SyntacticBehaviorId,\n subcategorizationFrame: z.string(), // Sentence structure. This is where (not very huge) variety lives\n});\n\nexport const Lexicon = z.object({\n id: LexiconId, // \"oewn\"\n label: z.string(), // \"Open English WordNet\"\n language: z.string(), // \"en\"\n email: z.string(), // \"english-wordnet@googlegroups.com\"\n license: z.string(), // \"https://creativecommons.org/licenses/by/4.0/\"\n version: z.string(), // \"2023\"\n url: z.string(), // \"https://github.com/globalwordnet/english-wordnet\">\n citation: z.string().optional(), // \"John P. McCrae, Alexandre Rademaker, Francis Bond, Ewa Rudnicka and Christiane Fellbaum (2019) English WordNet 2019 – An Open-Source WordNet for English, *Proceedings of the 10th Global WordNet Conference* – GWC 2019\"\n lexicalEntries: z.array(LexicalEntry).min(0),\n synsets: z.array(Synset).min(0),\n syntacticBehaviors: z.array(SyntacticBehavior).min(0),\n});\n\nexport type Lemma = z.infer<typeof Lemma>;\nexport type LexicalEntry = z.infer<typeof LexicalEntry>;\nexport type Sense = z.infer<typeof Sense>;\nexport type SenseRelation = z.infer<typeof SenseRelation>;\nexport type Pronunciation = z.infer<typeof Pronunciation>;\nexport type Form = z.infer<typeof Form>;\nexport type Synset = z.infer<typeof Synset>;\nexport type Definition = z.infer<typeof Definition>;\nexport type Example = z.infer<typeof Example>;\nexport type ILIDefinition = z.infer<typeof ILIDefinition>;\nexport type SynsetRelation = z.infer<typeof SynsetRelation>;\nexport type SyntacticBehavior = z.infer<typeof SyntacticBehavior>;\nexport type Lexicon = z.infer<typeof Lexicon>;\n\nexport const partsOfSpeechList: string[] = PartsOfSpeech.options.map(\n (v) => v.value,\n);\n","import type { Node } from \"@dbushell/xml-streamify\";\nimport {\n AdjPosition,\n Definition,\n Example,\n Form,\n ILIDefinition,\n Lemma,\n LexicalEntry,\n Lexicon,\n PartsOfSpeech,\n Pronunciation,\n Sense,\n SenseRelation,\n SenseRelationRelType,\n Synset,\n SynsetId,\n SynsetRelation,\n SynsetRelationRelType,\n SyntacticBehavior,\n} from \"./types\";\n\nexport function PronunciationNode(node: Node): Pronunciation {\n const obj: Pronunciation = {\n variety: optAttr(node, \"variety\"),\n inner: node.innerText,\n };\n return Pronunciation.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function LemmaNode(node: Node): Lemma {\n const obj: Lemma = {\n writtenForm: attr(node, \"writtenForm\"),\n partOfSpeech: PartsOfSpeech.parse(attr(node, \"partOfSpeech\")),\n pronunciations: //\n children(node, \"Pronunciation\", (v) => PronunciationNode(v)),\n };\n return Lemma.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function SenseRelationNode(node: Node): SenseRelation {\n const obj: SenseRelation = {\n relType: SenseRelationRelType.parse(attr(node, \"relType\")),\n target: attr(node, \"target\"),\n dcType: optAttr(node, \"dc:type\"),\n };\n return SenseRelation.parse(\n extendWithRestAttr(node, obj, (s) => (s === \"dc:type\" ? \"dcType\" : s)),\n );\n}\n\nexport function SenseNode(node: Node): Sense {\n const adjPos = optAttr(node, \"adjposition\");\n const obj: Sense = {\n id: attr(node, \"id\"),\n synset: SynsetId.parse(attr(node, \"synset\")),\n senseRelations: children(node, \"SenseRelation\", SenseRelationNode),\n subCat: optAttr(node, \"subcat\"),\n adjPosition: adjPos ? AdjPosition.parse(adjPos) : undefined,\n };\n return Sense.parse(\n extendWithRestAttr(node, obj, (s) =>\n s === \"subcat\" ? \"subCat\" : s === \"adjposition\" ? \"adjPosition\" : s,\n ),\n );\n}\n\nexport function FormNode(node: Node): Form {\n const obj: Form = {\n writtenForm: attr(node, \"writtenForm\"),\n };\n return Form.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function LexicalEntryNode(node: Node): LexicalEntry {\n const obj: LexicalEntry = {\n id: attr(node, \"id\"),\n lemmas: children(node, \"Lemma\", LemmaNode),\n senses: children(node, \"Sense\", SenseNode),\n forms: children(node, \"Form\", FormNode),\n };\n return LexicalEntry.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function DefinitionNode(node: Node): Definition {\n const obj: Definition = {\n inner: node.innerText,\n };\n return Definition.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function ExampleNode(node: Node): Example {\n const obj: Example = {\n inner: node.innerText,\n dcSource: optAttr(node, \"dc:source\"),\n };\n return Example.parse(\n extendWithRestAttr(node, obj, (s) => (s === \"dc:source\" ? \"dcSource\" : s)),\n );\n}\n\nexport function ILIDefinitionNode(node: Node): ILIDefinition {\n const obj: ILIDefinition = {\n inner: node.innerText,\n };\n return ILIDefinition.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function SynsetRelationNode(node: Node): SynsetRelation {\n const obj: SynsetRelation = {\n relType: SynsetRelationRelType.parse(attr(node, \"relType\")),\n target: attr(node, \"target\"),\n };\n return SynsetRelation.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function SyntacticBehaviorNode(node: Node): SyntacticBehavior {\n const obj: SyntacticBehavior = {\n id: attr(node, \"id\"),\n subcategorizationFrame: attr(node, \"subcategorizationFrame\"),\n };\n return SyntacticBehavior.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function SynsetNode(node: Node): Synset {\n const obj: Synset = {\n id: attr(node, \"id\"),\n ili: attr(node, \"ili\"),\n lexfile: attr(node, \"lexfile\"),\n members: attr(node, \"members\").split(\" \"),\n dcSource: optAttr(node, \"dc:source\"),\n partOfSpeech: PartsOfSpeech.parse(attr(node, \"partOfSpeech\")),\n definitions: children(node, \"Definition\", (v) => DefinitionNode(v)),\n examples: children(node, \"Example\", (v) => ExampleNode(v)),\n iliDefinitions: children(node, \"ILIDefinition\", ILIDefinitionNode),\n synsetRelations: children(node, \"SynsetRelation\", SynsetRelationNode),\n };\n return Synset.parse(\n extendWithRestAttr(node, obj, (s) => (s === \"dc:source\" ? \"dcSource\" : s)),\n );\n}\n\n/** LexiconNode is used as a root node for the whole WordNet document structure,\n * omitting the `LexicalResource` parent (and its virtual grandparent representing the\n * whole document). */\nexport function LexiconNode(node: Node): Lexicon {\n const obj: Lexicon = {\n id: attr(node, \"id\"),\n label: attr(node, \"label\"),\n language: attr(node, \"language\"),\n email: attr(node, \"email\"),\n license: attr(node, \"license\"),\n version: attr(node, \"version\"),\n citation: optAttr(node, \"citation\"),\n url: attr(node, \"url\"),\n lexicalEntries: children(node, \"LexicalEntry\", LexicalEntryNode),\n synsets: children(node, \"Synset\", SynsetNode),\n syntacticBehaviors: //\n children(node, \"SyntacticBehaviour\", SyntacticBehaviorNode),\n };\n return Lexicon.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport const decodeXmlEntities = (\n s: string | undefined,\n): string | undefined => {\n if (s === undefined) return undefined;\n return s\n .replace(/&/g, \"&\")\n .replace(/</g, \"<\")\n .replace(/>/g, \">\")\n .replace(/'/g, \"'\")\n .replace(/"/g, '\"');\n};\n\nconst attr = (node: Node, attrName: string): string => {\n const value = decodeXmlEntities(node.attributes[attrName]);\n if (value === undefined) {\n throw new Error(\n `Missing required attribute \"${attrName}\" on node \"${node.type}\"`,\n );\n }\n return value;\n};\n\nconst optAttr = (node: Node, attrName: string): string | undefined => {\n return decodeXmlEntities(node.attributes[attrName]);\n};\n\n/** restAttrs appends the rest of the attributes, taking into account that some has been renamed.\n * The proxy function provided is expected to return the renamed result for an original xml key.\n */\nconst restAttrs = (\n node: Node,\n obj: object,\n proxy: (from: string) => string,\n): Record<string, string> => {\n const result: Record<string, string> = {};\n Object.keys(node.attributes) // These keys are still unmodified\n .filter((a) => !(proxy(a) in obj)) // Here we can't trust the 'in' because obj already has modified keys.\n .forEach((k) => {\n result[k] = decodeXmlEntities(node.attributes[k]) ?? node.attributes[k];\n });\n return result;\n};\n\nconst extendWithRestAttr = (\n node: Node,\n obj: object,\n proxy: (from: string) => string,\n) => {\n return Object.assign(obj, restAttrs(node, obj, proxy));\n};\n\nconst children = <T, Fn extends (node: Node) => T>(\n node: Node,\n type: string,\n fn: Fn,\n) => {\n return node.children\n .filter((v: Node) => v.type === type)\n .map((v: Node) => fn(v));\n};\n","// Literals are as discovered in WN-LMF-1.3.dtd.\n//\n// Note: types.ts is not necessarily lists all of these\n\nexport const PartsOfSpeech: Record<string, string> = {\n n: \"Noun\",\n v: \"Verb\",\n a: \"Adjective\",\n r: \"Adverb\",\n s: \"Adjective Satellite\",\n t: \"?\",\n c: \"Conjunction\",\n p: \"Adposition (Preposition, postposition, etc.)\",\n x: \"Other (inc. particle, classifier, bound morphemes, determiners)\",\n u: \"Unknown\",\n};\n\nexport const SenseRelationRelType: Record<string, string> = {\n also: \"See also\",\n anto_converse: \"Converse antonym\",\n anto_gradable: \"Gradable antonym\",\n anto_simple: \"Simple antonym\",\n antonym: \"Antonym\",\n augmentative: \"Augmentative\",\n derivation: \"Derivation\",\n diminutive: \"Diminutive\",\n domain_region: \"Domain region\",\n domain_topic: \"Domain topic\",\n exemplifies: \"Exemplifies\",\n feminine: \"Feminine\",\n has_augmentative: \"Has augmentative\",\n has_diminutive: \"Has diminutive\",\n has_domain_region: \"Has domain region\",\n has_domain_topic: \"Has domain topic\",\n has_feminine: \"Has feminine\",\n has_masculine: \"Has masculine\",\n has_young: \"Has young\",\n is_exemplified_by: \"Is exemplified by\",\n masculine: \"Masculine\",\n other: \"Other\",\n participle: \"Participle\",\n pertainym: \"Pertainym\",\n secondary_aspect_ip: \"Secondary aspect IP\",\n secondary_aspect_pi: \"Secondary aspect PI\",\n similar: \"Similar\",\n simple_aspect_ip: \"Simple aspect IP\",\n simple_aspect_pi: \"Simple aspect PI\",\n young: \"Young\",\n domain_member_region: \"Domain member region\",\n domain_member_topic: \"Domain member topic\",\n};\n\nexport const SynsetRelationRelType: Record<string, string> = {\n agent: \"Agent\",\n also: \"See also\",\n anto_converse: \"Converse antonym\",\n anto_gradable: \"Gradable antonym\",\n anto_simple: \"Simple antonym\",\n antonym: \"Antonym\",\n attribute: \"Attribute\",\n augmentative: \"Augmentative\",\n be_in_state: \"Be in state\",\n cause: \"Cause\",\n causes: \"Causes\",\n classified_by: \"Classified by\",\n classifies: \"Classifies\",\n co_agent_instrument: \"Co-agent instrument\",\n co_agent_patient: \"Co-agent patient\",\n co_agent_result: \"Co-agent result\",\n co_instrument_agent: \"Co-instrument agent\",\n co_instrument_patient: \"Co-instrument patient\",\n co_instrument_result: \"Co-instrument result\",\n co_patient_agent: \"Co-patient agent\",\n co_patient_instrument: \"Co-patient instrument\",\n co_result_agent: \"Co-result agent\",\n co_result_instrument: \"Co-result instrument\",\n co_role: \"Co-role\",\n diminutive: \"Diminutive\",\n direction: \"Direction\",\n domain_member_region: \"Domain member region\",\n domain_member_topic: \"Domain member topic\",\n domain_region: \"Domain region\",\n domain_topic: \"Domain topic\",\n entail: \"Entail\",\n entails: \"Entails\",\n eq_synonym: \"Equivalent synonym\",\n exemplifies: \"Exemplifies\",\n feminine: \"Feminine\",\n has_augmentative: \"Has augmentative\",\n has_diminutive: \"Has diminutive\",\n has_domain_region: \"Has domain region\",\n has_domain_topic: \"Has domain topic\",\n has_feminine: \"Has feminine\",\n has_masculine: \"Has masculine\",\n has_young: \"Has young\",\n holo_location: \"Holonym location\",\n holo_member: \"Member holonym\",\n holo_part: \"Part holonym\",\n holo_portion: \"Portion holonym\",\n holo_substance: \"Substance holonym\",\n holonym: \"Holonym\",\n hypernym: \"Hypernym\",\n hyponym: \"Hyponym\",\n in_manner: \"In manner\",\n instance_hypernym: \"Instance hypernym\",\n instance_hyponym: \"Instance hyponym\",\n instrument: \"Instrument\",\n involved: \"Involved\",\n involved_agent: \"Involved agent\",\n involved_direction: \"Involved direction\",\n involved_instrument: \"Involved instrument\",\n involved_location: \"Involved location\",\n involved_patient: \"Involved patient\",\n involved_result: \"Involved result\",\n involved_source_direction: \"Involved source direction\",\n involved_target_direction: \"Involved target direction\",\n ir_synonym: \"IR synonym\",\n is_caused_by: \"Is caused by\",\n is_entailed_by: \"Is entailed by\",\n is_exemplified_by: \"Is exemplified by\",\n is_subevent_of: \"Is subevent of\",\n location: \"Location\",\n manner_of: \"Manner of\",\n masculine: \"Masculine\",\n member_holonym: \"Member holonym\",\n member_meronym: \"Member meronym\",\n mero_location: \"Meronym location\",\n mero_member: \"Member meronym\",\n mero_part: \"Part meronym\",\n mero_portion: \"Portion meronym\",\n mero_substance: \"Substance meronym\",\n meronym: \"Meronym\",\n other: \"Other\",\n part_holonym: \"Part holonym\",\n part_meronym: \"Part meronym\",\n patient: \"Patient\",\n restricted_by: \"Restricted by\",\n restricts: \"Restricts\",\n result: \"Result\",\n role: \"Role\",\n similar: \"Similar\",\n source_direction: \"Source direction\",\n state_of: \"State of\",\n subevent: \"Subevent\",\n substance_holonym: \"Substance holonym\",\n substance_meronym: \"Substance meronym\",\n target_direction: \"Target direction\",\n young: \"Young\",\n};\n\nexport const AdjPosition: Record<string, string> = {\n a: \"Attributive\",\n ip: \"Immediate postnominal\",\n p: \"Predicative\",\n};\n","import {\n createReadStream,\n existsSync,\n mkdirSync,\n readdirSync,\n statSync,\n writeFileSync,\n} from \"node:fs\";\nimport path from \"node:path\";\nimport { Readable } from \"node:stream\";\nimport { type Node, parse } from \"@dbushell/xml-streamify\";\nimport { LexiconNode } from \"./helpers\";\nimport type { Lexicon } from \"./types\";\n\n/** Base version to start searching from */\nexport const BASE_VERSION = \"2024\";\n\n/** Generate filename for a given version */\nexport function getFilename(version: string): string {\n return `english-wordnet-${version}.xml`;\n}\n\n/** Generate download URL for a given version */\nexport function getDownloadUrl(version: string): string {\n return `https://en-word.net/static/${getFilename(version)}.gz`;\n}\n\n/** Default cache directory for downloaded WordNet data */\nexport function getDefaultCacheDir(): string {\n const homeDir = process.env.HOME || process.env.USERPROFILE || \".\";\n return path.join(homeDir, \".cache\", \"synset\");\n}\n\n/** Check if file exists and is a file */\nfunction fileExists(filePath: string): boolean {\n if (existsSync(filePath)) {\n const stat = statSync(filePath);\n return stat.isFile();\n }\n return false;\n}\n\n/** Check if a remote URL exists (HEAD request) */\nasync function urlExists(url: string): Promise<boolean> {\n try {\n const response = await fetch(url, { method: \"HEAD\" });\n return response.ok;\n } catch {\n return false;\n }\n}\n\n/** Extract version year from filename (e.g., \"english-wordnet-2024.xml\" -> 2024) */\nfunction extractVersionFromFilename(filename: string): number | null {\n const match = filename.match(/english-wordnet-(\\d{4})\\.xml/);\n return match ? parseInt(match[1], 10) : null;\n}\n\n/** Find any cached WordNet file and return its version */\nfunction findCachedVersion(cacheDir: string): string | null {\n if (!existsSync(cacheDir)) return null;\n\n const files = readdirSync(cacheDir);\n const wordnetFiles = files\n .map((f) => ({ file: f, year: extractVersionFromFilename(f) }))\n .filter((x): x is { file: string; year: number } => x.year !== null)\n .sort((a, b) => b.year - a.year); // newest first\n\n return wordnetFiles.length > 0 ? wordnetFiles[0].year.toString() : null;\n}\n\n/**\n * Find the best available WordNet version.\n * - WordNet releases come out at END of year, so we only check up to (currentYear - 1)\n * - If cached version exists and no newer year has passed, use cache (no network)\n * - If a new year has passed since cache, check for that year's release\n * - If no cache, discover latest available\n */\nexport async function findLatestVersion(\n onProgress?: (message: string) => void,\n cacheDir?: string,\n): Promise<string> {\n const log = onProgress || (() => {});\n const currentYear = new Date().getFullYear();\n const lastReleasableYear = currentYear - 1; // Can't have 2026 release until end of 2026\n const baseYear = parseInt(BASE_VERSION, 10);\n const dir = cacheDir || getDefaultCacheDir();\n\n // Check for existing cache\n const cachedVersion = findCachedVersion(dir);\n if (cachedVersion) {\n const cachedYear = parseInt(cachedVersion, 10);\n\n // If cached version is already at or beyond last releasable year, use it\n if (cachedYear >= lastReleasableYear) {\n return cachedVersion;\n }\n\n // Check for versions between cache and last releasable year\n log(`Checking for newer version...`);\n for (let year = cachedYear + 1; year <= lastReleasableYear; year++) {\n const version = year.toString();\n if (await urlExists(getDownloadUrl(version))) {\n log(`Found ${version}`);\n return version;\n }\n }\n // No newer version found, use cache\n return cachedVersion;\n }\n\n // No cache - discover from BASE_VERSION\n log(`Checking available versions...`);\n if (await urlExists(getDownloadUrl(BASE_VERSION))) {\n // Check if there's a newer version (up to last releasable year)\n for (let year = baseYear + 1; year <= lastReleasableYear; year++) {\n const version = year.toString();\n if (await urlExists(getDownloadUrl(version))) {\n } else {\n return (year - 1).toString();\n }\n }\n // All years up to lastReleasableYear exist, return that\n return lastReleasableYear.toString();\n }\n\n // Base version doesn't exist, try incrementing\n for (let year = baseYear + 1; year <= lastReleasableYear; year++) {\n const version = year.toString();\n if (await urlExists(getDownloadUrl(version))) {\n return version;\n }\n }\n\n throw new Error(\n `No WordNet version found between ${BASE_VERSION} and ${lastReleasableYear}`,\n );\n}\n\n/** Download and decompress WordNet XML from remote URL */\nasync function downloadWordNet(\n version: string,\n destPath: string,\n): Promise<void> {\n const url = getDownloadUrl(version);\n const response = await fetch(url);\n if (!response.ok || !response.body) {\n throw new Error(\n `Failed to download WordNet ${version}: ${response.statusText}`,\n );\n }\n\n const decompressed = response.body.pipeThrough(\n new DecompressionStream(\"gzip\"),\n );\n const arrayBuffer = await new Response(decompressed).arrayBuffer();\n\n // Ensure directory exists\n const dir = path.dirname(destPath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n\n writeFileSync(destPath, Buffer.from(arrayBuffer));\n}\n\n/** Create XML streaming parser for WordNet file */\nexport function createParser(filePath: string) {\n const resolvedPath = path.resolve(filePath);\n const nodeStream = createReadStream(resolvedPath);\n const webStream = Readable.toWeb(nodeStream) as unknown as ReadableStream;\n return parse(webStream, {\n ignoreDeclaration: false,\n silent: false,\n });\n}\n\n/** Parse Lexicon from XML stream */\nexport async function parseLexicon(\n // biome-ignore lint/suspicious/noConfusingVoidType: matches xml-streamify's return type\n parser: AsyncGenerator<Node, void | Node, void>,\n): Promise<Lexicon | undefined> {\n for await (const node of parser) {\n if (node.type === \"Lexicon\") {\n return LexiconNode(node);\n }\n }\n return undefined;\n}\n\nexport interface LoadOptions {\n /** Specific version to download (e.g., \"2024\"). If not set, finds latest. */\n version?: string;\n /** Custom cache directory for downloaded data */\n cacheDir?: string;\n /** Force re-download even if cached */\n forceDownload?: boolean;\n /** Callback for progress updates */\n onProgress?: (message: string) => void;\n}\n\nexport interface LoadResult {\n lexicon: Lexicon;\n version: string;\n filePath: string;\n}\n\n/**\n * Load WordNet from a local file path.\n * @param filePath Path to the WordNet XML file\n */\nexport async function loadWordNet(filePath: string): Promise<Lexicon> {\n if (!fileExists(filePath)) {\n throw new Error(`WordNet file not found: ${filePath}`);\n }\n\n const parser = createParser(filePath);\n const lexicon = await parseLexicon(parser);\n\n if (!lexicon) {\n throw new Error(\"Failed to parse WordNet: no Lexicon node found\");\n }\n\n return lexicon;\n}\n\n/**\n * Fetch WordNet from remote URL, cache locally, and parse.\n * If no version specified, finds the latest available version.\n * @param options Loading options\n * @returns LoadResult with lexicon, version, and file path\n */\nexport async function fetchWordNet(\n options: LoadOptions = {},\n): Promise<LoadResult> {\n const cacheDir = options.cacheDir || getDefaultCacheDir();\n const log = options.onProgress || (() => {});\n\n // Determine version to use\n const version = options.version || (await findLatestVersion(log, cacheDir));\n const filename = getFilename(version);\n const cachedPath = path.join(cacheDir, filename);\n\n if (!fileExists(cachedPath) || options.forceDownload) {\n const url = getDownloadUrl(version);\n log(`Downloading WordNet ${version} from ${url}`);\n await downloadWordNet(version, cachedPath);\n log(`Saved to ${cachedPath}`);\n } else {\n log(`Using cached ${cachedPath}`);\n }\n\n const lexicon = await loadWordNet(cachedPath);\n return { lexicon, version, filePath: cachedPath };\n}\n\n/**\n * Get path to cached WordNet file (downloads if not present).\n * Useful when you want to work with the file directly.\n * @returns Object with file path and version\n */\nexport async function ensureWordNetCached(\n options: LoadOptions = {},\n): Promise<{ filePath: string; version: string }> {\n const cacheDir = options.cacheDir || getDefaultCacheDir();\n const log = options.onProgress || (() => {});\n\n // Determine version to use\n const version = options.version || (await findLatestVersion(log, cacheDir));\n const filename = getFilename(version);\n const cachedPath = path.join(cacheDir, filename);\n\n if (!fileExists(cachedPath) || options.forceDownload) {\n const url = getDownloadUrl(version);\n log(`Downloading WordNet ${version} from ${url}`);\n await downloadWordNet(version, cachedPath);\n log(`Saved to ${cachedPath}`);\n } else {\n log(`Using cached ${cachedPath}`);\n }\n\n return { filePath: cachedPath, version };\n}\n\n// Legacy exports for backwards compatibility\nexport const WORDNET_VERSION = BASE_VERSION;\nexport const WORDNET_FILENAME = getFilename(BASE_VERSION);\nexport const WORDNET_URL = getDownloadUrl(BASE_VERSION);\n","/**\n * Module exports an XML Node class.\n *\n * @module\n */\n/** XML node with helper methods to read data and traverse the tree */\nexport class Node {\n #type: string;\n #children: Array<Node>;\n #parent?: Node;\n #attr?: Record<string, string>;\n #raw?: string;\n\n constructor(type: string, parent?: Node, raw?: string) {\n this.#type = type;\n this.#parent = parent;\n this.#raw = raw;\n this.#children = [];\n }\n\n get type(): string {\n return this.#type;\n }\n\n get raw(): string {\n return this.#raw ?? '';\n }\n\n get parent(): Node | undefined {\n return this.#parent;\n }\n\n get children(): Array<Node> {\n return this.#children;\n }\n\n get attributes(): Record<string, string> {\n if (this.#attr) {\n return this.#attr;\n }\n // Setup and parse attributes on first access\n this.#attr = {};\n if (this.raw) {\n const regex = /([\\w:.-]+)\\s*=\\s*([\"'])(.*?)\\2/g;\n let match: RegExpExecArray | null;\n while ((match = regex.exec(this.raw)) !== null) {\n this.#attr[match[1]] = match[3];\n }\n }\n return this.#attr;\n }\n\n get innerText(): string {\n if (this.children.length) {\n let text = '';\n for (const child of this.children) {\n text += child.innerText;\n }\n return text;\n }\n return (this.raw.match(/<!\\[CDATA\\[(.*?)]]>/s) ?? [, this.raw])[1];\n }\n\n addChild(child: Node): void {\n this.#children.push(child);\n }\n\n /**\n * Returns true if node and parents match the key hierarchy\n * @param keys - XML tag names\n */\n is(...keys: Array<string>): boolean {\n if (!keys.length) return false;\n let parent: Node | undefined;\n for (const key of keys.toReversed()) {\n parent = parent ? parent.parent : this;\n if (parent?.type !== key) {\n return false;\n }\n }\n return true;\n }\n\n /**\n * Return the first child matching the key\n * @param key - XML tag name\n */\n first(key: string): Node | undefined {\n return this.children.find((n) => n.type === key);\n }\n\n /**\n * Return all children matching the key hierarchy\n * @param keys - XML tag names\n */\n all(...keys: Array<string>): Array<Node> {\n let nodes: Array<Node> | undefined = this.children;\n let found: Array<Node> = [];\n for (const [i, k] of Object.entries(keys)) {\n if (Number.parseInt(i) === keys.length - 1) {\n found = nodes.filter((n) => n.type === k);\n break;\n }\n nodes = nodes?.find((n) => n.type === k)?.children;\n if (!nodes) return [];\n }\n return found;\n }\n}\n","/**\n * Module exports a `TransformStream` class for decoding binary XML streams into structured data.\n *\n * @module\n */\nimport {NodeType, StateType} from './types.ts';\n\ntype State = NodeType | StateType;\n\nconst ENTITIES = {\n cdata: {\n end: ']]>',\n start: /^<!\\[CDATA\\[/\n },\n comment: {\n end: '-->',\n start: /^<!--/\n },\n declaration: {\n end: '?>',\n start: /^<\\?/\n },\n doctype: {\n end: '>',\n start: /^<!DOCTYPE/i\n },\n element: {\n end: '>',\n start: /^<[\\w:.-/]/\n }\n} as const;\n\n/** Transformer object for `TransformStream` constructed by `XMLStream` */\nexport const transformer: Transformer<string, [NodeType, string]> & {\n buf: string;\n state: State;\n previous: [State, number];\n} = {\n buf: '',\n state: StateType.SKIP,\n previous: [StateType.SKIP, -1],\n flush(controller) {\n // Buffer should be empty if document is well-formed\n if (this.buf.length > 0) {\n controller.enqueue([NodeType.TEXT, this.buf]);\n }\n },\n transform(chunk, controller) {\n this.buf += chunk;\n while (this.buf.length) {\n // Break if no progress is made (entity may straddle chunk boundary)\n if (\n this.state === this.previous[0] &&\n this.buf.length === this.previous[1]\n ) {\n break;\n }\n this.previous = [this.state, this.buf.length];\n // Skip to next entity\n if (this.state === StateType.SKIP) {\n const index = this.buf.indexOf('<');\n if (index < 0) break;\n // Clear buffer up to index of next entity\n controller.enqueue([NodeType.TEXT, this.buf.substring(0, index)]);\n this.buf = this.buf.substring(index);\n this.state = StateType.SEARCH;\n }\n // Search for start of entity\n if (this.state === StateType.SEARCH) {\n if (this.buf.length < 3) break;\n for (const [state, entity] of Object.entries(ENTITIES)) {\n if (this.buf.match(entity.start)) {\n this.state = state as State;\n break;\n }\n }\n continue;\n }\n // Search for end of entity\n if (Object.hasOwn(ENTITIES, this.state)) {\n const {end} = ENTITIES[this.state as keyof typeof ENTITIES];\n const index = this.buf.indexOf(end);\n if (index < 0) break;\n controller.enqueue([\n this.state,\n this.buf.substring(0, index + end.length)\n ]);\n this.buf = this.buf.substring(index + end.length);\n this.state = StateType.SKIP;\n continue;\n }\n // We should never be here something is very wrong!\n throw new Error();\n }\n }\n};\n\n/** Transform a binary XML stream into a stream of structured XML data */\nexport class XMLStream extends TransformStream<string, [NodeType, string]> {\n constructor() {\n super({...transformer});\n }\n}\n","/**\n * Module export an async generator function for parsing a streamed XML document.\n *\n * @module\n */\nimport type {ParseOptions} from './types.ts';\nimport {NodeType} from './types.ts';\nimport {Node} from './node.ts';\nimport {XMLStream} from './stream.ts';\n\nconst ignoreTypes: Partial<Record<NodeType, keyof ParseOptions>> = {\n [NodeType.COMMENT]: 'ignoreComments',\n [NodeType.DECLARATION]: 'ignoreDeclaration',\n [NodeType.DOCTYPE]: 'ignoreDoctype'\n} as const;\n\n/**\n * Async generator function for parsing a streamed XML document\n * @param input URL to fetch and parse (or a ReadableStream)\n * @param options Parsing options {@link ParseOptions}\n * @returns Yields parsed XML nodes {@link Node}\n */\nexport async function* parse(\n input: string | URL | ReadableStream,\n options?: ParseOptions\n): AsyncGenerator<Node, Node | void, void> {\n const document = new Node('@document');\n try {\n const init = {...options?.fetchOptions};\n if (options?.signal) {\n init.signal = options.signal;\n }\n\n let source: ReadableStream;\n\n // Fetch stream if URL is provided as input\n if (typeof input === 'string' || input instanceof URL) {\n input = new URL(input);\n const response = await fetch(input, init);\n if (!response.ok || !response.body) {\n throw new Error(`Bad response`);\n }\n source = response.body;\n } else {\n source = input;\n }\n\n const stream = source\n .pipeThrough(new TextDecoderStream())\n .pipeThrough(new XMLStream(), {\n signal: options?.signal\n });\n\n // Set root document as current node\n let node = document;\n\n for await (const [type, value] of stream) {\n if (options?.signal?.aborted) {\n break;\n }\n // Skip whitespace\n if (type === NodeType.TEXT) {\n if (options?.ignoreWhitespace !== false && value.trim().length === 0) {\n continue;\n }\n }\n // Handle other ignored types\n if (type in ignoreTypes && options?.[ignoreTypes[type]!] === false) {\n const newNode = new Node(type, node, value);\n node.addChild(newNode);\n yield newNode;\n continue;\n }\n // Handle elements\n if (type === NodeType.ELEMENT) {\n const name = value.match(/<\\/?([\\w:.-]+)/)![1];\n // Handle self-closing element\n if (value.endsWith('/>')) {\n const newNode = new Node(name, node, value);\n node.addChild(newNode);\n yield newNode;\n continue;\n }\n // Handle closing element\n if (value.startsWith('</')) {\n yield node;\n node = node.parent!;\n continue;\n }\n // Handle opening element\n const newNode = new Node(name, node, value);\n node.addChild(newNode);\n node = newNode;\n continue;\n }\n // Handle other types\n node.addChild(new Node(type, node, value));\n }\n } catch (err) {\n if (options?.silent === false) {\n throw err;\n }\n }\n return document;\n}\n","import type { z } from \"zod\";\nimport type {\n LexicalEntry,\n Lexicon,\n Sense,\n Synset,\n SynsetRelation,\n SynsetRelationRelType as SynsetRelationRelTypeSchema,\n} from \"./types\";\n\nexport type SynsetRelationRelType = z.infer<typeof SynsetRelationRelTypeSchema>;\n\n/** Definition with its source synset */\nexport interface DefinitionResult {\n text: string;\n synset: Synset;\n partOfSpeech: string;\n}\n\n/** Synonym with its source context */\nexport interface SynonymResult {\n word: string;\n entry: LexicalEntry;\n synset: Synset;\n}\n\n/** Indexed WordNet data for fast lookups */\nexport interface WordNetIndex {\n /** All synsets indexed by ID */\n synsets: Map<string, Synset>;\n /** All senses indexed by ID */\n senses: Map<string, Sense>;\n /** All lexical entries indexed by ID */\n entries: Map<string, LexicalEntry>;\n /** Lexical entries indexed by word (lowercase) */\n byWord: Map<string, LexicalEntry[]>;\n /** Senses indexed by word (lowercase) */\n sensesByWord: Map<string, Sense[]>;\n /** Synsets indexed by word (lowercase) */\n synsetsByWord: Map<string, Synset[]>;\n /** Original lexicon */\n lexicon: Lexicon;\n}\n\n/**\n * Build an indexed structure for fast lookups.\n * Call this once after loading the lexicon.\n */\nexport function buildIndex(lexicon: Lexicon): WordNetIndex {\n const synsets = new Map<string, Synset>();\n const senses = new Map<string, Sense>();\n const entries = new Map<string, LexicalEntry>();\n const byWord = new Map<string, LexicalEntry[]>();\n const sensesByWord = new Map<string, Sense[]>();\n const synsetsByWord = new Map<string, Synset[]>();\n\n // Index synsets\n for (const synset of lexicon.synsets) {\n synsets.set(synset.id, synset);\n }\n\n // Index lexical entries and senses\n for (const entry of lexicon.lexicalEntries) {\n entries.set(entry.id, entry);\n\n const word = entry.lemmas[0]?.writtenForm.toLowerCase();\n if (word) {\n // Index by word\n const existing = byWord.get(word) || [];\n existing.push(entry);\n byWord.set(word, existing);\n\n // Index senses by word\n for (const sense of entry.senses) {\n senses.set(sense.id, sense);\n\n const existingSenses = sensesByWord.get(word) || [];\n existingSenses.push(sense);\n sensesByWord.set(word, existingSenses);\n\n // Index synsets by word\n const synset = synsets.get(sense.synset);\n if (synset) {\n const existingSynsets = synsetsByWord.get(word) || [];\n if (!existingSynsets.includes(synset)) {\n existingSynsets.push(synset);\n synsetsByWord.set(word, existingSynsets);\n }\n }\n }\n }\n }\n\n return {\n synsets,\n senses,\n entries,\n byWord,\n sensesByWord,\n synsetsByWord,\n lexicon,\n };\n}\n\n/** Get a synset by ID */\nexport function getSynset(index: WordNetIndex, id: string): Synset | undefined {\n return index.synsets.get(id);\n}\n\n/** Get a sense by ID */\nexport function getSense(index: WordNetIndex, id: string): Sense | undefined {\n return index.senses.get(id);\n}\n\n/** Get a lexical entry by ID */\nexport function getLexicalEntry(\n index: WordNetIndex,\n id: string,\n): LexicalEntry | undefined {\n return index.entries.get(id);\n}\n\n/** Find all lexical entries for a word */\nexport function findWord(index: WordNetIndex, word: string): LexicalEntry[] {\n return index.byWord.get(word.toLowerCase()) || [];\n}\n\n/** Find all senses for a word */\nexport function findSenses(index: WordNetIndex, word: string): Sense[] {\n return index.sensesByWord.get(word.toLowerCase()) || [];\n}\n\n/** Find all synsets for a word */\nexport function findSynsets(index: WordNetIndex, word: string): Synset[] {\n return index.synsetsByWord.get(word.toLowerCase()) || [];\n}\n\n/** Get all definitions for a word */\nexport function getDefinitions(\n index: WordNetIndex,\n word: string,\n): DefinitionResult[] {\n const synsets = findSynsets(index, word);\n return synsets.flatMap((synset) =>\n synset.definitions.map((d) => ({\n text: d.inner,\n synset,\n partOfSpeech: synset.partOfSpeech,\n })),\n );\n}\n\n/** Get related synsets by relation type */\nexport function getRelated(\n index: WordNetIndex,\n synset: Synset,\n relType: SynsetRelationRelType,\n): Synset[] {\n return synset.synsetRelations\n .filter((r: SynsetRelation) => r.relType === relType)\n .map((r: SynsetRelation) => index.synsets.get(r.target))\n .filter((s): s is Synset => s !== undefined);\n}\n\n/** Get hypernyms (more general terms) for a word */\nexport function getHypernyms(index: WordNetIndex, word: string): Synset[] {\n const synsets = findSynsets(index, word);\n return synsets.flatMap((s) => getRelated(index, s, \"hypernym\"));\n}\n\n/** Get hyponyms (more specific terms) for a word */\nexport function getHyponyms(index: WordNetIndex, word: string): Synset[] {\n const synsets = findSynsets(index, word);\n return synsets.flatMap((s) => getRelated(index, s, \"hyponym\"));\n}\n\n/** Get synonyms for a word (words in the same synsets) */\nexport function getSynonyms(\n index: WordNetIndex,\n word: string,\n): SynonymResult[] {\n const synsets = findSynsets(index, word);\n const lowerWord = word.toLowerCase();\n const seen = new Set<string>();\n const results: SynonymResult[] = [];\n\n for (const synset of synsets) {\n for (const memberId of synset.members) {\n const entry = index.entries.get(memberId);\n if (entry) {\n const lemma = entry.lemmas[0]?.writtenForm;\n if (lemma && lemma.toLowerCase() !== lowerWord && !seen.has(lemma)) {\n seen.add(lemma);\n results.push({ word: lemma, entry, synset });\n }\n }\n }\n }\n\n return results;\n}\n\n/** Get the written form of the first lemma for a synset */\nexport function getSynsetWords(index: WordNetIndex, synset: Synset): string[] {\n return synset.members\n .map((id) => index.entries.get(id))\n .filter((e): e is LexicalEntry => e !== undefined)\n .map((e) => e.lemmas[0]?.writtenForm)\n .filter((w): w is string => w !== undefined);\n}\n","#!/usr/bin/env node\nimport { exportToSQLite } from \"./export-sqlite\";\nimport { decodeXmlEntities } from \"./helpers\";\nimport { PartsOfSpeech, SynsetRelationRelType } from \"./literals\";\nimport { ensureWordNetCached, fetchWordNet, loadWordNet } from \"./loader\";\nimport {\n buildIndex,\n findSynsets,\n getDefinitions,\n getHypernyms,\n getHyponyms,\n getSynonyms,\n getSynsetWords,\n} from \"./query\";\n\n/** Decode XML entities, return empty string if undefined */\nconst decode = (s: string | undefined): string => decodeXmlEntities(s) ?? \"\";\n\nconst HELP = `\nsynset - WordNet dictionary explorer\n\nUsage:\n synset <command> [options]\n\nCommands:\n define <word> Show definitions for a word\n synonyms <word> List synonyms for a word\n hypernyms <word> Show hypernyms (more general terms)\n hyponyms <word> Show hyponyms (more specific terms)\n related <word> Show all relations for a word\n info <synset-id> Show details for a synset ID\n fetch Download WordNet data to cache\n export-sqlite <out> Export dictionary to SQLite database\n\nOptions:\n --file <path> Use a local WordNet XML file instead of cache\n --help, -h Show this help message\n\nExamples:\n synset define dog\n synset synonyms happy\n synset related computer --file ./wordnet.xml\n synset fetch\n synset export-sqlite dictionary.db\n`;\n\nasync function main() {\n const args = process.argv.slice(2);\n\n if (args.length === 0 || args.includes(\"--help\") || args.includes(\"-h\")) {\n console.log(HELP);\n process.exit(0);\n }\n\n const command = args[0];\n const fileIndex = args.indexOf(\"--file\");\n const filePath = fileIndex !== -1 ? args[fileIndex + 1] : undefined;\n\n // Remove --file and its argument from args for word extraction\n const cleanArgs =\n fileIndex === -1\n ? args\n : args.filter((_, i) => i !== fileIndex && i !== fileIndex + 1);\n const word = cleanArgs[1];\n\n if (command === \"fetch\") {\n console.log(\"Downloading WordNet data...\");\n const { filePath: cachedPath, version } = await ensureWordNetCached({\n forceDownload: args.includes(\"--force\"),\n onProgress: console.log,\n });\n console.log(`WordNet ${version} cached at: ${cachedPath}`);\n return;\n }\n\n if (command === \"export-sqlite\") {\n const outputPath = cleanArgs[1];\n if (!outputPath) {\n console.error(\"Error: Missing output path for export-sqlite\");\n process.exit(1);\n }\n console.log(\"Loading WordNet data...\");\n const lexicon = filePath\n ? await loadWordNet(filePath)\n : (await fetchWordNet({ onProgress: console.log })).lexicon;\n console.log(`Exporting to ${outputPath}...`);\n exportToSQLite(lexicon, outputPath, {\n onProgress: ({ phase, current, total }) => {\n process.stdout.write(`\\r${phase}: ${current}/${total}`);\n },\n });\n console.log(`\\nExported to ${outputPath}`);\n return;\n }\n\n if (!word && command !== \"fetch\") {\n console.error(`Error: Missing word argument for command '${command}'`);\n process.exit(1);\n }\n\n const lexicon = filePath\n ? await loadWordNet(filePath)\n : (await fetchWordNet({ onProgress: console.log })).lexicon;\n\n const index = buildIndex(lexicon);\n\n switch (command) {\n case \"define\": {\n const definitions = getDefinitions(index, word);\n if (definitions.length === 0) {\n console.log(`No definitions found for \"${word}\"`);\n } else {\n console.log(`Definitions for \"${word}\":`);\n definitions.forEach((def, i) => {\n const pos = PartsOfSpeech[def.partOfSpeech] || def.partOfSpeech;\n console.log(` ${i + 1}. [${pos}] ${decode(def.text)}`);\n });\n }\n break;\n }\n\n case \"synonyms\": {\n const synonyms = getSynonyms(index, word);\n if (synonyms.length === 0) {\n console.log(`No synonyms found for \"${word}\"`);\n } else {\n console.log(`Synonyms for \"${word}\":`);\n console.log(` ${synonyms.map((s) => s.word).join(\", \")}`);\n }\n break;\n }\n\n case \"hypernyms\": {\n const hypernyms = getHypernyms(index, word);\n if (hypernyms.length === 0) {\n console.log(`No hypernyms found for \"${word}\"`);\n } else {\n console.log(`Hypernyms for \"${word}\" (more general):`);\n hypernyms.forEach((s) => {\n const words = getSynsetWords(index, s);\n const def = decode(s.definitions[0]?.inner);\n console.log(` - ${words.join(\", \")}: ${def}`);\n });\n }\n break;\n }\n\n case \"hyponyms\": {\n const hyponyms = getHyponyms(index, word);\n if (hyponyms.length === 0) {\n console.log(`No hyponyms found for \"${word}\"`);\n } else {\n console.log(`Hyponyms for \"${word}\" (more specific):`);\n hyponyms.forEach((s) => {\n const words = getSynsetWords(index, s);\n const def = decode(s.definitions[0]?.inner);\n console.log(` - ${words.join(\", \")}: ${def}`);\n });\n }\n break;\n }\n\n case \"related\": {\n const synsets = findSynsets(index, word);\n if (synsets.length === 0) {\n console.log(`No synsets found for \"${word}\"`);\n break;\n }\n\n console.log(`Relations for \"${word}\":`);\n for (const synset of synsets) {\n const pos = PartsOfSpeech[synset.partOfSpeech] || synset.partOfSpeech;\n const def = decode(synset.definitions[0]?.inner);\n console.log(`\\n[${pos}] ${def}`);\n\n // Group relations by type\n const relsByType = new Map<string, string[]>();\n for (const rel of synset.synsetRelations) {\n const relatedSynset = index.synsets.get(rel.target);\n if (relatedSynset) {\n const words = getSynsetWords(index, relatedSynset);\n const existing = relsByType.get(rel.relType) || [];\n existing.push(words.join(\", \"));\n relsByType.set(rel.relType, existing);\n }\n }\n\n for (const [relType, words] of relsByType) {\n const label = SynsetRelationRelType[relType] || relType;\n console.log(` ${label}:`);\n for (const w of words) console.log(` - ${w}`);\n }\n }\n break;\n }\n\n case \"info\": {\n // word here is actually the synset ID\n const synset = index.synsets.get(word);\n if (!synset) {\n console.log(`Synset not found: ${word}`);\n break;\n }\n\n const pos = PartsOfSpeech[synset.partOfSpeech] || synset.partOfSpeech;\n const words = getSynsetWords(index, synset);\n\n console.log(`Synset: ${synset.id}`);\n console.log(`Words: ${words.join(\", \")}`);\n console.log(`Part of Speech: ${pos}`);\n console.log(`ILI: ${synset.ili}`);\n console.log(`\\nDefinitions:`);\n for (const d of synset.definitions) console.log(` - ${decode(d.inner)}`);\n\n if (synset.examples.length > 0) {\n console.log(`\\nExamples:`);\n for (const e of synset.examples)\n console.log(` - \"${decode(e.inner)}\"`);\n }\n\n if (synset.synsetRelations.length > 0) {\n console.log(`\\nRelations:`);\n for (const rel of synset.synsetRelations) {\n const label = SynsetRelationRelType[rel.relType] || rel.relType;\n const relatedSynset = index.synsets.get(rel.target);\n const relatedWords = relatedSynset\n ? getSynsetWords(index, relatedSynset).join(\", \")\n : rel.target;\n console.log(` ${label}: ${relatedWords}`);\n }\n }\n break;\n }\n\n default:\n console.error(`Unknown command: ${command}`);\n console.log(HELP);\n process.exit(1);\n }\n}\n\nmain().catch((err) => {\n console.error(\"Error:\", err.message);\n process.exit(1);\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oBAAqB;;;ACArB,iBAAkB;AAEX,IAAM,YAAY,aAAE,OAAO;AAC3B,IAAM,iBAAiB,aAAE,OAAO;AAChC,IAAM,WAAW,aAAE,OAAO;AAC1B,IAAM,UAAU,aAAE,OAAO;AACzB,IAAM,sBAAsB,aAAE,OAAO;AAGrC,IAAM,gBAAgB,aAAE,MAAM;AAAA,EACnC,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AACf,CAAC;AAGM,IAAM,uBAAuB,aAAE,MAAM;AAAA,EAC1C,aAAE,QAAQ,MAAM;AAAA,EAChB,aAAE,QAAQ,SAAS;AAAA,EACnB,aAAE,QAAQ,YAAY;AAAA,EACtB,aAAE,QAAQ,sBAAsB;AAAA,EAChC,aAAE,QAAQ,qBAAqB;AAAA,EAC/B,aAAE,QAAQ,eAAe;AAAA,EACzB,aAAE,QAAQ,cAAc;AAAA,EACxB,aAAE,QAAQ,aAAa;AAAA,EACvB,aAAE,QAAQ,mBAAmB;AAAA,EAC7B,aAAE,QAAQ,OAAO;AAAA;AAAA,EACjB,aAAE,QAAQ,YAAY;AAAA,EACtB,aAAE,QAAQ,WAAW;AAAA,EACrB,aAAE,QAAQ,SAAS;AACrB,CAAC;AAGM,IAAM,wBAAwB,aAAE,MAAM;AAAA,EAC3C,aAAE,QAAQ,MAAM;AAAA,EAChB,aAAE,QAAQ,WAAW;AAAA,EACrB,aAAE,QAAQ,OAAO;AAAA,EACjB,aAAE,QAAQ,QAAQ;AAAA,EAClB,aAAE,QAAQ,sBAAsB;AAAA,EAChC,aAAE,QAAQ,qBAAqB;AAAA,EAC/B,aAAE,QAAQ,eAAe;AAAA,EACzB,aAAE,QAAQ,cAAc;AAAA,EACxB,aAAE,QAAQ,QAAQ;AAAA,EAClB,aAAE,QAAQ,SAAS;AAAA,EACnB,aAAE,QAAQ,aAAa;AAAA,EACvB,aAAE,QAAQ,mBAAmB;AAAA,EAC7B,aAAE,QAAQ,kBAAkB;AAAA,EAC5B,aAAE,QAAQ,aAAa;AAAA,EACvB,aAAE,QAAQ,WAAW;AAAA,EACrB,aAAE,QAAQ,gBAAgB;AAAA,EAC1B,aAAE,QAAQ,UAAU;AAAA,EACpB,aAAE,QAAQ,SAAS;AAAA,EACnB,aAAE,QAAQ,mBAAmB;AAAA,EAC7B,aAAE,QAAQ,kBAAkB;AAAA,EAC5B,aAAE,QAAQ,cAAc;AAAA,EACxB,aAAE,QAAQ,gBAAgB;AAAA,EAC1B,aAAE,QAAQ,mBAAmB;AAAA,EAC7B,aAAE,QAAQ,gBAAgB;AAAA,EAC1B,aAAE,QAAQ,gBAAgB;AAAA,EAC1B,aAAE,QAAQ,aAAa;AAAA,EACvB,aAAE,QAAQ,WAAW;AAAA,EACrB,aAAE,QAAQ,gBAAgB;AAAA,EAC1B,aAAE,QAAQ,cAAc;AAAA,EACxB,aAAE,QAAQ,cAAc;AAAA,EACxB,aAAE,QAAQ,SAAS;AAAA,EACnB,aAAE,QAAQ,mBAAmB;AAAA,EAC7B,aAAE,QAAQ,mBAAmB;AAC/B,CAAC;AAEM,IAAM,cAAc,aAAE,MAAM;AAAA,EACjC,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,IAAI;AAAA,EACd,aAAE,QAAQ,GAAG;AACf,CAAC;AAEM,IAAM,gBAAgB,aAAE,OAAO;AAAA,EACpC,SAAS,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAC7B,OAAO,aAAE,OAAO;AAAA;AAClB,CAAC;AAEM,IAAM,QAAQ,aAAE,OAAO;AAAA,EAC5B,aAAa,aAAE,OAAO;AAAA;AAAA,EACtB,cAAc;AAAA,EACd,gBAAgB,aAAE,MAAM,aAAa,EAAE,IAAI,CAAC;AAC9C,CAAC;AAEM,IAAM,gBAAgB,aAAE,OAAO;AAAA,EACpC,SAAS;AAAA,EACT,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAC5B,QAAQ;AACV,CAAC;AAEM,IAAM,QAAQ,aAAE,OAAO;AAAA,EAC5B,IAAI;AAAA,EACJ,QAAQ;AAAA,EACR,QAAQ,oBAAoB,SAAS;AAAA,EACrC,aAAa,YAAY,SAAS;AAAA,EAClC,gBAAgB,aAAE,MAAM,aAAa,EAAE,IAAI,CAAC;AAC9C,CAAC;AAEM,IAAM,OAAO,aAAE,OAAO;AAAA,EAC3B,aAAa,aAAE,OAAO;AAAA;AACxB,CAAC;AAEM,IAAM,eAAe,aAAE,OAAO;AAAA,EACnC,IAAI;AAAA,EACJ,QAAQ,aAAE,MAAM,KAAK,EAAE,OAAO,CAAC;AAAA,EAC/B,QAAQ,aAAE,MAAM,KAAK,EAAE,IAAI,CAAC;AAAA,EAC5B,OAAO,aAAE,MAAM,IAAI,EAAE,IAAI,CAAC;AAC5B,CAAC;AAEM,IAAM,aAAa,aAAE,OAAO;AAAA,EACjC,OAAO,aAAE,OAAO;AAAA;AAClB,CAAC;AAEM,IAAM,UAAU,aAAE,OAAO;AAAA,EAC9B,OAAO,aAAE,OAAO;AAAA;AAAA,EAChB,UAAU,aAAE,OAAO,EAAE,SAAS;AAChC,CAAC;AAEM,IAAM,gBAAgB,aAAE,OAAO;AAAA,EACpC,OAAO,aAAE,OAAO;AAAA;AAClB,CAAC;AAEM,IAAM,iBAAiB,aAAE,OAAO;AAAA,EACrC,SAAS;AAAA,EACT,QAAQ;AACV,CAAC;AAEM,IAAM,SAAS,aAAE,OAAO;AAAA,EAC7B,IAAI;AAAA,EACJ,KAAK,aAAE,OAAO;AAAA,EACd,SAAS,aAAE,MAAM,cAAc,EAAE,IAAI,CAAC;AAAA;AAAA,EACtC,cAAc;AAAA,EACd,SAAS,aAAE,OAAO;AAAA,EAClB,UAAU,aAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,aAAa,aAAE,MAAM,UAAU,EAAE,IAAI,CAAC;AAAA,EACtC,UAAU,aAAE,MAAM,OAAO,EAAE,IAAI,CAAC;AAAA,EAChC,gBAAgB,aAAE,MAAM,aAAa,EAAE,IAAI,CAAC;AAAA,EAC5C,iBAAiB,aAAE,MAAM,cAAc,EAAE,IAAI,CAAC;AAChD,CAAC;AAEM,IAAM,oBAAoB,aAAE,OAAO;AAAA,EACxC,IAAI;AAAA,EACJ,wBAAwB,aAAE,OAAO;AAAA;AACnC,CAAC;AAEM,IAAM,UAAU,aAAE,OAAO;AAAA,EAC9B,IAAI;AAAA;AAAA,EACJ,OAAO,aAAE,OAAO;AAAA;AAAA,EAChB,UAAU,aAAE,OAAO;AAAA;AAAA,EACnB,OAAO,aAAE,OAAO;AAAA;AAAA,EAChB,SAAS,aAAE,OAAO;AAAA;AAAA,EAClB,SAAS,aAAE,OAAO;AAAA;AAAA,EAClB,KAAK,aAAE,OAAO;AAAA;AAAA,EACd,UAAU,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAC9B,gBAAgB,aAAE,MAAM,YAAY,EAAE,IAAI,CAAC;AAAA,EAC3C,SAAS,aAAE,MAAM,MAAM,EAAE,IAAI,CAAC;AAAA,EAC9B,oBAAoB,aAAE,MAAM,iBAAiB,EAAE,IAAI,CAAC;AACtD,CAAC;AAgBM,IAAM,oBAA8B,cAAc,QAAQ;AAAA,EAC/D,CAAC,MAAM,EAAE;AACX;;;ACjKO,SAAS,kBAAkB,MAA2B;AAC3D,QAAM,MAAqB;AAAA,IACzB,SAAS,QAAQ,MAAM,SAAS;AAAA,IAChC,OAAO,KAAK;AAAA,EACd;AACA,SAAO,cAAc,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACpE;AAEO,SAAS,UAAU,MAAmB;AAC3C,QAAM,MAAa;AAAA,IACjB,aAAa,KAAK,MAAM,aAAa;AAAA,IACrC,cAAc,cAAc,MAAM,KAAK,MAAM,cAAc,CAAC;AAAA,IAC5D;AAAA;AAAA,MACE,SAAS,MAAM,iBAAiB,CAAC,MAAM,kBAAkB,CAAC,CAAC;AAAA;AAAA,EAC/D;AACA,SAAO,MAAM,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AAC5D;AAEO,SAAS,kBAAkB,MAA2B;AAC3D,QAAM,MAAqB;AAAA,IACzB,SAAS,qBAAqB,MAAM,KAAK,MAAM,SAAS,CAAC;AAAA,IACzD,QAAQ,KAAK,MAAM,QAAQ;AAAA,IAC3B,QAAQ,QAAQ,MAAM,SAAS;AAAA,EACjC;AACA,SAAO,cAAc;AAAA,IACnB,mBAAmB,MAAM,KAAK,CAAC,MAAO,MAAM,YAAY,WAAW,CAAE;AAAA,EACvE;AACF;AAEO,SAAS,UAAU,MAAmB;AAC3C,QAAM,SAAS,QAAQ,MAAM,aAAa;AAC1C,QAAM,MAAa;AAAA,IACjB,IAAI,KAAK,MAAM,IAAI;AAAA,IACnB,QAAQ,SAAS,MAAM,KAAK,MAAM,QAAQ,CAAC;AAAA,IAC3C,gBAAgB,SAAS,MAAM,iBAAiB,iBAAiB;AAAA,IACjE,QAAQ,QAAQ,MAAM,QAAQ;AAAA,IAC9B,aAAa,SAAS,YAAY,MAAM,MAAM,IAAI;AAAA,EACpD;AACA,SAAO,MAAM;AAAA,IACX;AAAA,MAAmB;AAAA,MAAM;AAAA,MAAK,CAAC,MAC7B,MAAM,WAAW,WAAW,MAAM,gBAAgB,gBAAgB;AAAA,IACpE;AAAA,EACF;AACF;AAEO,SAAS,SAAS,MAAkB;AACzC,QAAM,MAAY;AAAA,IAChB,aAAa,KAAK,MAAM,aAAa;AAAA,EACvC;AACA,SAAO,KAAK,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AAC3D;AAEO,SAAS,iBAAiB,MAA0B;AACzD,QAAM,MAAoB;AAAA,IACxB,IAAI,KAAK,MAAM,IAAI;AAAA,IACnB,QAAQ,SAAS,MAAM,SAAS,SAAS;AAAA,IACzC,QAAQ,SAAS,MAAM,SAAS,SAAS;AAAA,IACzC,OAAO,SAAS,MAAM,QAAQ,QAAQ;AAAA,EACxC;AACA,SAAO,aAAa,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACnE;AAEO,SAAS,eAAe,MAAwB;AACrD,QAAM,MAAkB;AAAA,IACtB,OAAO,KAAK;AAAA,EACd;AACA,SAAO,WAAW,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACjE;AAEO,SAAS,YAAY,MAAqB;AAC/C,QAAM,MAAe;AAAA,IACnB,OAAO,KAAK;AAAA,IACZ,UAAU,QAAQ,MAAM,WAAW;AAAA,EACrC;AACA,SAAO,QAAQ;AAAA,IACb,mBAAmB,MAAM,KAAK,CAAC,MAAO,MAAM,cAAc,aAAa,CAAE;AAAA,EAC3E;AACF;AAEO,SAAS,kBAAkB,MAA2B;AAC3D,QAAM,MAAqB;AAAA,IACzB,OAAO,KAAK;AAAA,EACd;AACA,SAAO,cAAc,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACpE;AAEO,SAAS,mBAAmB,MAA4B;AAC7D,QAAM,MAAsB;AAAA,IAC1B,SAAS,sBAAsB,MAAM,KAAK,MAAM,SAAS,CAAC;AAAA,IAC1D,QAAQ,KAAK,MAAM,QAAQ;AAAA,EAC7B;AACA,SAAO,eAAe,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACrE;AAEO,SAAS,sBAAsB,MAA+B;AACnE,QAAM,MAAyB;AAAA,IAC7B,IAAI,KAAK,MAAM,IAAI;AAAA,IACnB,wBAAwB,KAAK,MAAM,wBAAwB;AAAA,EAC7D;AACA,SAAO,kBAAkB,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACxE;AAEO,SAAS,WAAW,MAAoB;AAC7C,QAAM,MAAc;AAAA,IAClB,IAAI,KAAK,MAAM,IAAI;AAAA,IACnB,KAAK,KAAK,MAAM,KAAK;AAAA,IACrB,SAAS,KAAK,MAAM,SAAS;AAAA,IAC7B,SAAS,KAAK,MAAM,SAAS,EAAE,MAAM,GAAG;AAAA,IACxC,UAAU,QAAQ,MAAM,WAAW;AAAA,IACnC,cAAc,cAAc,MAAM,KAAK,MAAM,cAAc,CAAC;AAAA,IAC5D,aAAa,SAAS,MAAM,cAAc,CAAC,MAAM,eAAe,CAAC,CAAC;AAAA,IAClE,UAAU,SAAS,MAAM,WAAW,CAAC,MAAM,YAAY,CAAC,CAAC;AAAA,IACzD,gBAAgB,SAAS,MAAM,iBAAiB,iBAAiB;AAAA,IACjE,iBAAiB,SAAS,MAAM,kBAAkB,kBAAkB;AAAA,EACtE;AACA,SAAO,OAAO;AAAA,IACZ,mBAAmB,MAAM,KAAK,CAAC,MAAO,MAAM,cAAc,aAAa,CAAE;AAAA,EAC3E;AACF;AAKO,SAAS,YAAY,MAAqB;AAC/C,QAAM,MAAe;AAAA,IACnB,IAAI,KAAK,MAAM,IAAI;AAAA,IACnB,OAAO,KAAK,MAAM,OAAO;AAAA,IACzB,UAAU,KAAK,MAAM,UAAU;AAAA,IAC/B,OAAO,KAAK,MAAM,OAAO;AAAA,IACzB,SAAS,KAAK,MAAM,SAAS;AAAA,IAC7B,SAAS,KAAK,MAAM,SAAS;AAAA,IAC7B,UAAU,QAAQ,MAAM,UAAU;AAAA,IAClC,KAAK,KAAK,MAAM,KAAK;AAAA,IACrB,gBAAgB,SAAS,MAAM,gBAAgB,gBAAgB;AAAA,IAC/D,SAAS,SAAS,MAAM,UAAU,UAAU;AAAA,IAC5C;AAAA;AAAA,MACE,SAAS,MAAM,sBAAsB,qBAAqB;AAAA;AAAA,EAC9D;AACA,SAAO,QAAQ,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AAC9D;AAEO,IAAM,oBAAoB,CAC/B,MACuB;AACvB,MAAI,MAAM,OAAW,QAAO;AAC5B,SAAO,EACJ,QAAQ,UAAU,GAAG,EACrB,QAAQ,SAAS,GAAG,EACpB,QAAQ,SAAS,GAAG,EACpB,QAAQ,WAAW,GAAG,EACtB,QAAQ,WAAW,GAAG;AAC3B;AAEA,IAAM,OAAO,CAAC,MAAY,aAA6B;AACrD,QAAM,QAAQ,kBAAkB,KAAK,WAAW,QAAQ,CAAC;AACzD,MAAI,UAAU,QAAW;AACvB,UAAM,IAAI;AAAA,MACR,+BAA+B,QAAQ,cAAc,KAAK,IAAI;AAAA,IAChE;AAAA,EACF;AACA,SAAO;AACT;AAEA,IAAM,UAAU,CAAC,MAAY,aAAyC;AACpE,SAAO,kBAAkB,KAAK,WAAW,QAAQ,CAAC;AACpD;AAKA,IAAM,YAAY,CAChB,MACA,KACA,UAC2B;AAC3B,QAAM,SAAiC,CAAC;AACxC,SAAO,KAAK,KAAK,UAAU,EACxB,OAAO,CAAC,MAAM,EAAE,MAAM,CAAC,KAAK,IAAI,EAChC,QAAQ,CAAC,MAAM;AACd,WAAO,CAAC,IAAI,kBAAkB,KAAK,WAAW,CAAC,CAAC,KAAK,KAAK,WAAW,CAAC;AAAA,EACxE,CAAC;AACH,SAAO;AACT;AAEA,IAAM,qBAAqB,CACzB,MACA,KACA,UACG;AACH,SAAO,OAAO,OAAO,KAAK,UAAU,MAAM,KAAK,KAAK,CAAC;AACvD;AAEA,IAAM,WAAW,CACf,MACA,MACA,OACG;AACH,SAAO,KAAK,SACT,OAAO,CAAC,MAAY,EAAE,SAAS,IAAI,EACnC,IAAI,CAAC,MAAY,GAAG,CAAC,CAAC;AAC3B;;;AFtNO,IAAM,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoCf,SAAS,eACd,SACA,YACA,UAAyB,CAAC,GACpB;AACN,QAAM,EAAE,WAAW,IAAI;AAGvB,QAAM,KAAK,IAAI,cAAAA,QAAS,UAAU;AAClC,KAAG,KAAK,2BAA2B;AACnC,KAAG,KAAK,0BAA0B;AAClC,KAAG,KAAK,MAAM;AAGd,QAAM,gBAAgB,oBAAI,IAA4B;AACtD,aAAW,SAAS,QAAQ,gBAAgB;AAC1C,UAAM,OAAO,MAAM,OAAO,CAAC,GAAG;AAC9B,QAAI,MAAM;AACR,YAAM,QAAQ,KAAK,YAAY;AAC/B,YAAM,WAAW,cAAc,IAAI,KAAK,KAAK,CAAC;AAC9C,eAAS,KAAK,KAAK;AACnB,oBAAc,IAAI,OAAO,QAAQ;AAAA,IACnC;AAAA,EACF;AAGA,QAAM,YAAY,oBAAI,IAAoB;AAC1C,aAAW,UAAU,QAAQ,SAAS;AACpC,cAAU,IAAI,OAAO,IAAI,MAAM;AAAA,EACjC;AAGA,QAAM,aAAa,GAAG;AAAA,IACpB;AAAA,EACF;AACA,QAAM,UAAU,oBAAI,IAAoB;AACxC,QAAM,QAAQ,MAAM,KAAK,cAAc,KAAK,CAAC,EAAE,KAAK;AACpD,QAAM,aAAa,MAAM;AAEzB,KAAG,KAAK,mBAAmB;AAC3B,MAAI,SAAS;AACb,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,UAAU,cAAc,IAAI,IAAI;AACtC,QAAI,CAAC,QAAS;AAEd,UAAM,UAAU,QAAQ,CAAC,EAAE,OAAO,CAAC,GAAG,eAAe;AACrD,eAAW,IAAI,MAAM,OAAO;AAC5B;AACA,YAAQ,IAAI,MAAM,MAAM;AACxB,QAAI,cAAc,IAAI,QAAU,GAAG;AACjC,iBAAW,EAAE,OAAO,SAAS,SAAS,GAAG,OAAO,WAAW,CAAC;AAAA,IAC9D;AAAA,EACF;AACA,KAAG,KAAK,QAAQ;AAGhB,QAAM,gBAAgB,oBAAI,IAAY;AACtC,aAAW,WAAW,cAAc,OAAO,GAAG;AAC5C,eAAW,SAAS,SAAS;AAC3B,iBAAW,SAAS,MAAM,QAAQ;AAChC,sBAAc,IAAI,MAAM,MAAM;AAAA,MAChC;AAAA,IACF;AAAA,EACF;AAEA,QAAM,eAAe,GAAG;AAAA,IACtB;AAAA,EACF;AACA,QAAM,aAAa,MAAM,KAAK,aAAa;AAC3C,QAAM,eAAe,WAAW;AAEhC,KAAG,KAAK,mBAAmB;AAC3B,WAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,UAAM,WAAW,WAAW,CAAC;AAC7B,UAAM,SAAS,UAAU,IAAI,QAAQ;AACrC,QAAI,QAAQ;AACV,YAAM,MAAM,kBAAkB,OAAO,YAAY,CAAC,GAAG,KAAK,KAAK;AAC/D,mBAAa,IAAI,UAAU,OAAO,cAAc,GAAG;AAAA,IACrD;AACA,QAAI,cAAc,IAAI,QAAU,GAAG;AACjC,iBAAW,EAAE,OAAO,WAAW,SAAS,GAAG,OAAO,aAAa,CAAC;AAAA,IAClE;AAAA,EACF;AACA,KAAG,KAAK,QAAQ;AAGhB,QAAM,iBAAiB,GAAG;AAAA,IACxB;AAAA,EACF;AACA,MAAI,gBAAgB;AACpB,QAAM,iBAAiB,MAAM,KAAK,cAAc,OAAO,CAAC,EAAE;AAAA,IACxD,CAAC,KAAK,YAAY,MAAM,QAAQ,OAAO,CAAC,GAAG,MAAM,IAAI,EAAE,OAAO,QAAQ,CAAC;AAAA,IACvE;AAAA,EACF;AAEA,KAAG,KAAK,mBAAmB;AAC3B,aAAW,CAAC,MAAM,OAAO,KAAK,eAAe;AAC3C,UAAMC,UAAS,QAAQ,IAAI,IAAI;AAC/B,QAAI,CAACA,QAAQ;AAEb,eAAW,SAAS,SAAS;AAC3B,iBAAW,SAAS,MAAM,QAAQ;AAChC,uBAAe,IAAIA,SAAQ,MAAM,MAAM;AACvC;AACA,YAAI,cAAc,gBAAgB,QAAU,GAAG;AAC7C,qBAAW;AAAA,YACT,OAAO;AAAA,YACP,SAAS;AAAA,YACT,OAAO;AAAA,UACT,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,KAAG,KAAK,QAAQ;AAEhB,KAAG,MAAM;AACX;;;AG9JO,IAAMC,iBAAwC;AAAA,EACnD,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AACL;AAqCO,IAAMC,yBAAgD;AAAA,EAC3D,OAAO;AAAA,EACP,MAAM;AAAA,EACN,eAAe;AAAA,EACf,eAAe;AAAA,EACf,aAAa;AAAA,EACb,SAAS;AAAA,EACT,WAAW;AAAA,EACX,cAAc;AAAA,EACd,aAAa;AAAA,EACb,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,qBAAqB;AAAA,EACrB,kBAAkB;AAAA,EAClB,iBAAiB;AAAA,EACjB,qBAAqB;AAAA,EACrB,uBAAuB;AAAA,EACvB,sBAAsB;AAAA,EACtB,kBAAkB;AAAA,EAClB,uBAAuB;AAAA,EACvB,iBAAiB;AAAA,EACjB,sBAAsB;AAAA,EACtB,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,WAAW;AAAA,EACX,sBAAsB;AAAA,EACtB,qBAAqB;AAAA,EACrB,eAAe;AAAA,EACf,cAAc;AAAA,EACd,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,UAAU;AAAA,EACV,kBAAkB;AAAA,EAClB,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAClB,cAAc;AAAA,EACd,eAAe;AAAA,EACf,WAAW;AAAA,EACX,eAAe;AAAA,EACf,aAAa;AAAA,EACb,WAAW;AAAA,EACX,cAAc;AAAA,EACd,gBAAgB;AAAA,EAChB,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,WAAW;AAAA,EACX,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAClB,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,oBAAoB;AAAA,EACpB,qBAAqB;AAAA,EACrB,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAClB,iBAAiB;AAAA,EACjB,2BAA2B;AAAA,EAC3B,2BAA2B;AAAA,EAC3B,YAAY;AAAA,EACZ,cAAc;AAAA,EACd,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,gBAAgB;AAAA,EAChB,UAAU;AAAA,EACV,WAAW;AAAA,EACX,WAAW;AAAA,EACX,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAChB,eAAe;AAAA,EACf,aAAa;AAAA,EACb,WAAW;AAAA,EACX,cAAc;AAAA,EACd,gBAAgB;AAAA,EAChB,SAAS;AAAA,EACT,OAAO;AAAA,EACP,cAAc;AAAA,EACd,cAAc;AAAA,EACd,SAAS;AAAA,EACT,eAAe;AAAA,EACf,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,kBAAkB;AAAA,EAClB,UAAU;AAAA,EACV,UAAU;AAAA,EACV,mBAAmB;AAAA,EACnB,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAClB,OAAO;AACT;;;ACpJA,qBAOO;AACP,uBAAiB;AACjB,yBAAyB;;;ACHlB,IAAM,OAAN,MAAW;AAAA,EAChB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,MAAc,QAAe,KAAc;AACrD,SAAK,QAAQ;AACb,SAAK,UAAU;AACf,SAAK,OAAO;AACZ,SAAK,YAAY,CAAC;AAAA,EACpB;AAAA,EAEA,IAAI,OAAe;AACjB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,MAAc;AAChB,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA,EAEA,IAAI,SAA2B;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,WAAwB;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,aAAqC;AACvC,QAAI,KAAK,OAAO;AACd,aAAO,KAAK;AAAA,IACd;AAEA,SAAK,QAAQ,CAAC;AACd,QAAI,KAAK,KAAK;AACZ,YAAM,QAAQ;AACd,UAAI;AACJ,cAAQ,QAAQ,MAAM,KAAK,KAAK,GAAG,OAAO,MAAM;AAC9C,aAAK,MAAM,MAAM,CAAC,CAAC,IAAI,MAAM,CAAC;AAAA,MAChC;AAAA,IACF;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,YAAoB;AACtB,QAAI,KAAK,SAAS,QAAQ;AACxB,UAAI,OAAO;AACX,iBAAW,SAAS,KAAK,UAAU;AACjC,gBAAQ,MAAM;AAAA,MAChB;AACA,aAAO;AAAA,IACT;AACA,YAAQ,KAAK,IAAI,MAAM,sBAAsB,KAAK,CAAC,EAAE,KAAK,GAAG,GAAG,CAAC;AAAA,EACnE;AAAA,EAEA,SAAS,OAAmB;AAC1B,SAAK,UAAU,KAAK,KAAK;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAA8B;AAClC,QAAI,CAAC,KAAK,OAAQ,QAAO;AACzB,QAAI;AACJ,eAAW,OAAO,KAAK,WAAW,GAAG;AACnC,eAAS,SAAS,OAAO,SAAS;AAClC,UAAI,QAAQ,SAAS,KAAK;AACxB,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,KAA+B;AACnC,WAAO,KAAK,SAAS,KAAK,CAAC,MAAM,EAAE,SAAS,GAAG;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,MAAkC;AACvC,QAAI,QAAiC,KAAK;AAC1C,QAAI,QAAqB,CAAC;AAC1B,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,IAAI,GAAG;AACzC,UAAI,OAAO,SAAS,CAAC,MAAM,KAAK,SAAS,GAAG;AAC1C,gBAAQ,MAAM,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AACxC;AAAA,MACF;AACA,cAAQ,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,CAAC,GAAG;AAC1C,UAAI,CAAC,MAAO,QAAO,CAAC;AAAA,IACtB;AACA,WAAO;AAAA,EACT;AACF;;;ACnGA,IAAM,WAAW;AAAA,EACf,OAAO;AAAA,IACL,KAAK;AAAA,IACL,OAAO;AAAA,EACT;AAAA,EACA,SAAS;AAAA,IACP,KAAK;AAAA,IACL,OAAO;AAAA,EACT;AAAA,EACA,aAAa;AAAA,IACX,KAAK;AAAA,IACL,OAAO;AAAA,EACT;AAAA,EACA,SAAS;AAAA,IACP,KAAK;AAAA,IACL,OAAO;AAAA,EACT;AAAA,EACA,SAAS;AAAA,IACP,KAAK;AAAA,IACL,OAAO;AAAA,EACT;AACF;AAGO,IAAM,cAIT;AAAA,EACF,KAAK;AAAA,EACL;AAAA,EACA,UAAU,oBAAiB,EAAE;AAAA,EAC7B,MAAM,YAAY;AAEhB,QAAI,KAAK,IAAI,SAAS,GAAG;AACvB,iBAAW,QAAQ,oBAAgB,KAAK,GAAG,CAAC;AAAA,IAC9C;AAAA,EACF;AAAA,EACA,UAAU,OAAO,YAAY;AAC3B,SAAK,OAAO;AACZ,WAAO,KAAK,IAAI,QAAQ;AAEtB,UACE,KAAK,UAAU,KAAK,SAAS,CAAC,KAC9B,KAAK,IAAI,WAAW,KAAK,SAAS,CAAC,GACnC;AACA;AAAA,MACF;AACA,WAAK,WAAW,CAAC,KAAK,OAAO,KAAK,IAAI,MAAM;AAE5C,UAAI,KAAK,6BAA0B;AACjC,cAAM,QAAQ,KAAK,IAAI,QAAQ,GAAG;AAClC,YAAI,QAAQ,EAAG;AAEf,mBAAW,QAAQ,oBAAgB,KAAK,IAAI,UAAU,GAAG,KAAK,CAAC,CAAC;AAChE,aAAK,MAAM,KAAK,IAAI,UAAU,KAAK;AACnC,aAAK;AAAA,MACP;AAEA,UAAI,KAAK,iCAA4B;AACnC,YAAI,KAAK,IAAI,SAAS,EAAG;AACzB,mBAAW,CAAC,OAAO,MAAM,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACtD,cAAI,KAAK,IAAI,MAAM,OAAO,KAAK,GAAG;AAChC,iBAAK,QAAQ;AACb;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF;AAEA,UAAI,OAAO,OAAO,UAAU,KAAK,KAAK,GAAG;AACvC,cAAM,EAAC,IAAG,IAAI,SAAS,KAAK,KAA8B;AAC1D,cAAM,QAAQ,KAAK,IAAI,QAAQ,GAAG;AAClC,YAAI,QAAQ,EAAG;AACf,mBAAW,QAAQ;AAAA,UACjB,KAAK;AAAA,UACL,KAAK,IAAI,UAAU,GAAG,QAAQ,IAAI,MAAM;AAAA,QAC1C,CAAC;AACD,aAAK,MAAM,KAAK,IAAI,UAAU,QAAQ,IAAI,MAAM;AAChD,aAAK;AACL;AAAA,MACF;AAEA,YAAM,IAAI,MAAM;AAAA,IAClB;AAAA,EACF;AACF;AAGO,IAAM,YAAN,cAAwB,gBAA4C;AAAA,EACzE,cAAc;AACZ,UAAM,EAAC,GAAG,YAAW,CAAC;AAAA,EACxB;AACF;;;AC5FA,IAAM,cAA6D;AAAA,EACjE,wBAAiB,GAAG;AAAA,EACpB,gCAAqB,GAAG;AAAA,EACxB,wBAAiB,GAAG;AACtB;AAQA,gBAAuB,MACrB,OACA,SACyC;AACzC,QAAM,WAAW,IAAI,KAAK,WAAW;AACrC,MAAI;AACF,UAAM,OAAO,EAAC,GAAG,SAAS,aAAY;AACtC,QAAI,SAAS,QAAQ;AACnB,WAAK,SAAS,QAAQ;AAAA,IACxB;AAEA,QAAI;AAGJ,QAAI,OAAO,UAAU,YAAY,iBAAiB,KAAK;AACrD,cAAQ,IAAI,IAAI,KAAK;AACrB,YAAM,WAAW,MAAM,MAAM,OAAO,IAAI;AACxC,UAAI,CAAC,SAAS,MAAM,CAAC,SAAS,MAAM;AAClC,cAAM,IAAI,MAAM,cAAc;AAAA,MAChC;AACA,eAAS,SAAS;AAAA,IACpB,OAAO;AACL,eAAS;AAAA,IACX;AAEA,UAAM,SAAS,OACZ,YAAY,IAAI,kBAAkB,CAAC,EACnC,YAAY,IAAI,UAAU,GAAG;AAAA,MAC5B,QAAQ,SAAS;AAAA,IACnB,CAAC;AAGH,QAAI,OAAO;AAEX,qBAAiB,CAAC,MAAM,KAAK,KAAK,QAAQ;AACxC,UAAI,SAAS,QAAQ,SAAS;AAC5B;AAAA,MACF;AAEA,UAAI,4BAAwB;AAC1B,YAAI,SAAS,qBAAqB,SAAS,MAAM,KAAK,EAAE,WAAW,GAAG;AACpE;AAAA,QACF;AAAA,MACF;AAEA,UAAI,QAAQ,eAAe,UAAU,YAAY,IAAI,CAAE,MAAM,OAAO;AAClE,cAAM,UAAU,IAAI,KAAK,MAAM,MAAM,KAAK;AAC1C,aAAK,SAAS,OAAO;AACrB,cAAM;AACN;AAAA,MACF;AAEA,UAAI,kCAA2B;AAC7B,cAAM,OAAO,MAAM,MAAM,gBAAgB,EAAG,CAAC;AAE7C,YAAI,MAAM,SAAS,IAAI,GAAG;AACxB,gBAAMC,WAAU,IAAI,KAAK,MAAM,MAAM,KAAK;AAC1C,eAAK,SAASA,QAAO;AACrB,gBAAMA;AACN;AAAA,QACF;AAEA,YAAI,MAAM,WAAW,IAAI,GAAG;AAC1B,gBAAM;AACN,iBAAO,KAAK;AACZ;AAAA,QACF;AAEA,cAAM,UAAU,IAAI,KAAK,MAAM,MAAM,KAAK;AAC1C,aAAK,SAAS,OAAO;AACrB,eAAO;AACP;AAAA,MACF;AAEA,WAAK,SAAS,IAAI,KAAK,MAAM,MAAM,KAAK,CAAC;AAAA,IAC3C;AAAA,EACF,SAAS,KAAK;AACZ,QAAI,SAAS,WAAW,OAAO;AAC7B,YAAM;AAAA,IACR;AAAA,EACF;AACA,SAAO;AACT;;;AHzFO,IAAM,eAAe;AAGrB,SAAS,YAAY,SAAyB;AACnD,SAAO,mBAAmB,OAAO;AACnC;AAGO,SAAS,eAAe,SAAyB;AACtD,SAAO,8BAA8B,YAAY,OAAO,CAAC;AAC3D;AAGO,SAAS,qBAA6B;AAC3C,QAAM,UAAU,QAAQ,IAAI,QAAQ,QAAQ,IAAI,eAAe;AAC/D,SAAO,iBAAAC,QAAK,KAAK,SAAS,UAAU,QAAQ;AAC9C;AAGA,SAAS,WAAW,UAA2B;AAC7C,UAAI,2BAAW,QAAQ,GAAG;AACxB,UAAM,WAAO,yBAAS,QAAQ;AAC9B,WAAO,KAAK,OAAO;AAAA,EACrB;AACA,SAAO;AACT;AAGA,eAAe,UAAU,KAA+B;AACtD,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,KAAK,EAAE,QAAQ,OAAO,CAAC;AACpD,WAAO,SAAS;AAAA,EAClB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAGA,SAAS,2BAA2B,UAAiC;AACnE,QAAM,QAAQ,SAAS,MAAM,8BAA8B;AAC3D,SAAO,QAAQ,SAAS,MAAM,CAAC,GAAG,EAAE,IAAI;AAC1C;AAGA,SAAS,kBAAkB,UAAiC;AAC1D,MAAI,KAAC,2BAAW,QAAQ,EAAG,QAAO;AAElC,QAAM,YAAQ,4BAAY,QAAQ;AAClC,QAAM,eAAe,MAClB,IAAI,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,2BAA2B,CAAC,EAAE,EAAE,EAC7D,OAAO,CAAC,MAA2C,EAAE,SAAS,IAAI,EAClE,KAAK,CAAC,GAAG,MAAM,EAAE,OAAO,EAAE,IAAI;AAEjC,SAAO,aAAa,SAAS,IAAI,aAAa,CAAC,EAAE,KAAK,SAAS,IAAI;AACrE;AASA,eAAsB,kBACpB,YACA,UACiB;AACjB,QAAM,MAAM,eAAe,MAAM;AAAA,EAAC;AAClC,QAAM,eAAc,oBAAI,KAAK,GAAE,YAAY;AAC3C,QAAM,qBAAqB,cAAc;AACzC,QAAM,WAAW,SAAS,cAAc,EAAE;AAC1C,QAAM,MAAM,YAAY,mBAAmB;AAG3C,QAAM,gBAAgB,kBAAkB,GAAG;AAC3C,MAAI,eAAe;AACjB,UAAM,aAAa,SAAS,eAAe,EAAE;AAG7C,QAAI,cAAc,oBAAoB;AACpC,aAAO;AAAA,IACT;AAGA,QAAI,+BAA+B;AACnC,aAAS,OAAO,aAAa,GAAG,QAAQ,oBAAoB,QAAQ;AAClE,YAAM,UAAU,KAAK,SAAS;AAC9B,UAAI,MAAM,UAAU,eAAe,OAAO,CAAC,GAAG;AAC5C,YAAI,SAAS,OAAO,EAAE;AACtB,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAGA,MAAI,gCAAgC;AACpC,MAAI,MAAM,UAAU,eAAe,YAAY,CAAC,GAAG;AAEjD,aAAS,OAAO,WAAW,GAAG,QAAQ,oBAAoB,QAAQ;AAChE,YAAM,UAAU,KAAK,SAAS;AAC9B,UAAI,MAAM,UAAU,eAAe,OAAO,CAAC,GAAG;AAAA,MAC9C,OAAO;AACL,gBAAQ,OAAO,GAAG,SAAS;AAAA,MAC7B;AAAA,IACF;AAEA,WAAO,mBAAmB,SAAS;AAAA,EACrC;AAGA,WAAS,OAAO,WAAW,GAAG,QAAQ,oBAAoB,QAAQ;AAChE,UAAM,UAAU,KAAK,SAAS;AAC9B,QAAI,MAAM,UAAU,eAAe,OAAO,CAAC,GAAG;AAC5C,aAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,IAAI;AAAA,IACR,oCAAoC,YAAY,QAAQ,kBAAkB;AAAA,EAC5E;AACF;AAGA,eAAe,gBACb,SACA,UACe;AACf,QAAM,MAAM,eAAe,OAAO;AAClC,QAAM,WAAW,MAAM,MAAM,GAAG;AAChC,MAAI,CAAC,SAAS,MAAM,CAAC,SAAS,MAAM;AAClC,UAAM,IAAI;AAAA,MACR,8BAA8B,OAAO,KAAK,SAAS,UAAU;AAAA,IAC/D;AAAA,EACF;AAEA,QAAM,eAAe,SAAS,KAAK;AAAA,IACjC,IAAI,oBAAoB,MAAM;AAAA,EAChC;AACA,QAAM,cAAc,MAAM,IAAI,SAAS,YAAY,EAAE,YAAY;AAGjE,QAAM,MAAM,iBAAAA,QAAK,QAAQ,QAAQ;AACjC,MAAI,KAAC,2BAAW,GAAG,GAAG;AACpB,kCAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AAEA,oCAAc,UAAU,OAAO,KAAK,WAAW,CAAC;AAClD;AAGO,SAAS,aAAa,UAAkB;AAC7C,QAAM,eAAe,iBAAAA,QAAK,QAAQ,QAAQ;AAC1C,QAAM,iBAAa,iCAAiB,YAAY;AAChD,QAAM,YAAY,4BAAS,MAAM,UAAU;AAC3C,SAAO,MAAM,WAAW;AAAA,IACtB,mBAAmB;AAAA,IACnB,QAAQ;AAAA,EACV,CAAC;AACH;AAGA,eAAsB,aAEpB,QAC8B;AAC9B,mBAAiB,QAAQ,QAAQ;AAC/B,QAAI,KAAK,SAAS,WAAW;AAC3B,aAAO,YAAY,IAAI;AAAA,IACzB;AAAA,EACF;AACA,SAAO;AACT;AAuBA,eAAsB,YAAY,UAAoC;AACpE,MAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,UAAM,IAAI,MAAM,2BAA2B,QAAQ,EAAE;AAAA,EACvD;AAEA,QAAM,SAAS,aAAa,QAAQ;AACpC,QAAM,UAAU,MAAM,aAAa,MAAM;AAEzC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AAEA,SAAO;AACT;AAQA,eAAsB,aACpB,UAAuB,CAAC,GACH;AACrB,QAAM,WAAW,QAAQ,YAAY,mBAAmB;AACxD,QAAM,MAAM,QAAQ,eAAe,MAAM;AAAA,EAAC;AAG1C,QAAM,UAAU,QAAQ,WAAY,MAAM,kBAAkB,KAAK,QAAQ;AACzE,QAAM,WAAW,YAAY,OAAO;AACpC,QAAM,aAAa,iBAAAA,QAAK,KAAK,UAAU,QAAQ;AAE/C,MAAI,CAAC,WAAW,UAAU,KAAK,QAAQ,eAAe;AACpD,UAAM,MAAM,eAAe,OAAO;AAClC,QAAI,uBAAuB,OAAO,SAAS,GAAG,EAAE;AAChD,UAAM,gBAAgB,SAAS,UAAU;AACzC,QAAI,YAAY,UAAU,EAAE;AAAA,EAC9B,OAAO;AACL,QAAI,gBAAgB,UAAU,EAAE;AAAA,EAClC;AAEA,QAAM,UAAU,MAAM,YAAY,UAAU;AAC5C,SAAO,EAAE,SAAS,SAAS,UAAU,WAAW;AAClD;AAOA,eAAsB,oBACpB,UAAuB,CAAC,GACwB;AAChD,QAAM,WAAW,QAAQ,YAAY,mBAAmB;AACxD,QAAM,MAAM,QAAQ,eAAe,MAAM;AAAA,EAAC;AAG1C,QAAM,UAAU,QAAQ,WAAY,MAAM,kBAAkB,KAAK,QAAQ;AACzE,QAAM,WAAW,YAAY,OAAO;AACpC,QAAM,aAAa,iBAAAA,QAAK,KAAK,UAAU,QAAQ;AAE/C,MAAI,CAAC,WAAW,UAAU,KAAK,QAAQ,eAAe;AACpD,UAAM,MAAM,eAAe,OAAO;AAClC,QAAI,uBAAuB,OAAO,SAAS,GAAG,EAAE;AAChD,UAAM,gBAAgB,SAAS,UAAU;AACzC,QAAI,YAAY,UAAU,EAAE;AAAA,EAC9B,OAAO;AACL,QAAI,gBAAgB,UAAU,EAAE;AAAA,EAClC;AAEA,SAAO,EAAE,UAAU,YAAY,QAAQ;AACzC;AAIO,IAAM,mBAAmB,YAAY,YAAY;AACjD,IAAM,cAAc,eAAe,YAAY;;;AI/O/C,SAAS,WAAW,SAAgC;AACzD,QAAM,UAAU,oBAAI,IAAoB;AACxC,QAAM,SAAS,oBAAI,IAAmB;AACtC,QAAM,UAAU,oBAAI,IAA0B;AAC9C,QAAM,SAAS,oBAAI,IAA4B;AAC/C,QAAM,eAAe,oBAAI,IAAqB;AAC9C,QAAM,gBAAgB,oBAAI,IAAsB;AAGhD,aAAW,UAAU,QAAQ,SAAS;AACpC,YAAQ,IAAI,OAAO,IAAI,MAAM;AAAA,EAC/B;AAGA,aAAW,SAAS,QAAQ,gBAAgB;AAC1C,YAAQ,IAAI,MAAM,IAAI,KAAK;AAE3B,UAAM,OAAO,MAAM,OAAO,CAAC,GAAG,YAAY,YAAY;AACtD,QAAI,MAAM;AAER,YAAM,WAAW,OAAO,IAAI,IAAI,KAAK,CAAC;AACtC,eAAS,KAAK,KAAK;AACnB,aAAO,IAAI,MAAM,QAAQ;AAGzB,iBAAW,SAAS,MAAM,QAAQ;AAChC,eAAO,IAAI,MAAM,IAAI,KAAK;AAE1B,cAAM,iBAAiB,aAAa,IAAI,IAAI,KAAK,CAAC;AAClD,uBAAe,KAAK,KAAK;AACzB,qBAAa,IAAI,MAAM,cAAc;AAGrC,cAAM,SAAS,QAAQ,IAAI,MAAM,MAAM;AACvC,YAAI,QAAQ;AACV,gBAAM,kBAAkB,cAAc,IAAI,IAAI,KAAK,CAAC;AACpD,cAAI,CAAC,gBAAgB,SAAS,MAAM,GAAG;AACrC,4BAAgB,KAAK,MAAM;AAC3B,0BAAc,IAAI,MAAM,eAAe;AAAA,UACzC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AA+BO,SAAS,YAAY,OAAqB,MAAwB;AACvE,SAAO,MAAM,cAAc,IAAI,KAAK,YAAY,CAAC,KAAK,CAAC;AACzD;AAGO,SAAS,eACd,OACA,MACoB;AACpB,QAAM,UAAU,YAAY,OAAO,IAAI;AACvC,SAAO,QAAQ;AAAA,IAAQ,CAAC,WACtB,OAAO,YAAY,IAAI,CAAC,OAAO;AAAA,MAC7B,MAAM,EAAE;AAAA,MACR;AAAA,MACA,cAAc,OAAO;AAAA,IACvB,EAAE;AAAA,EACJ;AACF;AAGO,SAAS,WACd,OACA,QACA,SACU;AACV,SAAO,OAAO,gBACX,OAAO,CAAC,MAAsB,EAAE,YAAY,OAAO,EACnD,IAAI,CAAC,MAAsB,MAAM,QAAQ,IAAI,EAAE,MAAM,CAAC,EACtD,OAAO,CAAC,MAAmB,MAAM,MAAS;AAC/C;AAGO,SAAS,aAAa,OAAqB,MAAwB;AACxE,QAAM,UAAU,YAAY,OAAO,IAAI;AACvC,SAAO,QAAQ,QAAQ,CAAC,MAAM,WAAW,OAAO,GAAG,UAAU,CAAC;AAChE;AAGO,SAAS,YAAY,OAAqB,MAAwB;AACvE,QAAM,UAAU,YAAY,OAAO,IAAI;AACvC,SAAO,QAAQ,QAAQ,CAAC,MAAM,WAAW,OAAO,GAAG,SAAS,CAAC;AAC/D;AAGO,SAAS,YACd,OACA,MACiB;AACjB,QAAM,UAAU,YAAY,OAAO,IAAI;AACvC,QAAM,YAAY,KAAK,YAAY;AACnC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,UAA2B,CAAC;AAElC,aAAW,UAAU,SAAS;AAC5B,eAAW,YAAY,OAAO,SAAS;AACrC,YAAM,QAAQ,MAAM,QAAQ,IAAI,QAAQ;AACxC,UAAI,OAAO;AACT,cAAM,QAAQ,MAAM,OAAO,CAAC,GAAG;AAC/B,YAAI,SAAS,MAAM,YAAY,MAAM,aAAa,CAAC,KAAK,IAAI,KAAK,GAAG;AAClE,eAAK,IAAI,KAAK;AACd,kBAAQ,KAAK,EAAE,MAAM,OAAO,OAAO,OAAO,CAAC;AAAA,QAC7C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAGO,SAAS,eAAe,OAAqB,QAA0B;AAC5E,SAAO,OAAO,QACX,IAAI,CAAC,OAAO,MAAM,QAAQ,IAAI,EAAE,CAAC,EACjC,OAAO,CAAC,MAAyB,MAAM,MAAS,EAChD,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,WAAW,EACnC,OAAO,CAAC,MAAmB,MAAM,MAAS;AAC/C;;;ACjMA,IAAM,SAAS,CAAC,MAAkC,kBAAkB,CAAC,KAAK;AAE1E,IAAM,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA4Bb,eAAe,OAAO;AACpB,QAAM,OAAO,QAAQ,KAAK,MAAM,CAAC;AAEjC,MAAI,KAAK,WAAW,KAAK,KAAK,SAAS,QAAQ,KAAK,KAAK,SAAS,IAAI,GAAG;AACvE,YAAQ,IAAI,IAAI;AAChB,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,UAAU,KAAK,CAAC;AACtB,QAAM,YAAY,KAAK,QAAQ,QAAQ;AACvC,QAAM,WAAW,cAAc,KAAK,KAAK,YAAY,CAAC,IAAI;AAG1D,QAAM,YACJ,cAAc,KACV,OACA,KAAK,OAAO,CAAC,GAAG,MAAM,MAAM,aAAa,MAAM,YAAY,CAAC;AAClE,QAAM,OAAO,UAAU,CAAC;AAExB,MAAI,YAAY,SAAS;AACvB,YAAQ,IAAI,6BAA6B;AACzC,UAAM,EAAE,UAAU,YAAY,QAAQ,IAAI,MAAM,oBAAoB;AAAA,MAClE,eAAe,KAAK,SAAS,SAAS;AAAA,MACtC,YAAY,QAAQ;AAAA,IACtB,CAAC;AACD,YAAQ,IAAI,WAAW,OAAO,eAAe,UAAU,EAAE;AACzD;AAAA,EACF;AAEA,MAAI,YAAY,iBAAiB;AAC/B,UAAM,aAAa,UAAU,CAAC;AAC9B,QAAI,CAAC,YAAY;AACf,cAAQ,MAAM,8CAA8C;AAC5D,cAAQ,KAAK,CAAC;AAAA,IAChB;AACA,YAAQ,IAAI,yBAAyB;AACrC,UAAMC,WAAU,WACZ,MAAM,YAAY,QAAQ,KACzB,MAAM,aAAa,EAAE,YAAY,QAAQ,IAAI,CAAC,GAAG;AACtD,YAAQ,IAAI,gBAAgB,UAAU,KAAK;AAC3C,mBAAeA,UAAS,YAAY;AAAA,MAClC,YAAY,CAAC,EAAE,OAAO,SAAS,MAAM,MAAM;AACzC,gBAAQ,OAAO,MAAM,KAAK,KAAK,KAAK,OAAO,IAAI,KAAK,EAAE;AAAA,MACxD;AAAA,IACF,CAAC;AACD,YAAQ,IAAI;AAAA,cAAiB,UAAU,EAAE;AACzC;AAAA,EACF;AAEA,MAAI,CAAC,QAAQ,YAAY,SAAS;AAChC,YAAQ,MAAM,6CAA6C,OAAO,GAAG;AACrE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,UAAU,WACZ,MAAM,YAAY,QAAQ,KACzB,MAAM,aAAa,EAAE,YAAY,QAAQ,IAAI,CAAC,GAAG;AAEtD,QAAM,QAAQ,WAAW,OAAO;AAEhC,UAAQ,SAAS;AAAA,IACf,KAAK,UAAU;AACb,YAAM,cAAc,eAAe,OAAO,IAAI;AAC9C,UAAI,YAAY,WAAW,GAAG;AAC5B,gBAAQ,IAAI,6BAA6B,IAAI,GAAG;AAAA,MAClD,OAAO;AACL,gBAAQ,IAAI,oBAAoB,IAAI,IAAI;AACxC,oBAAY,QAAQ,CAAC,KAAK,MAAM;AAC9B,gBAAM,MAAMC,eAAc,IAAI,YAAY,KAAK,IAAI;AACnD,kBAAQ,IAAI,KAAK,IAAI,CAAC,MAAM,GAAG,KAAK,OAAO,IAAI,IAAI,CAAC,EAAE;AAAA,QACxD,CAAC;AAAA,MACH;AACA;AAAA,IACF;AAAA,IAEA,KAAK,YAAY;AACf,YAAM,WAAW,YAAY,OAAO,IAAI;AACxC,UAAI,SAAS,WAAW,GAAG;AACzB,gBAAQ,IAAI,0BAA0B,IAAI,GAAG;AAAA,MAC/C,OAAO;AACL,gBAAQ,IAAI,iBAAiB,IAAI,IAAI;AACrC,gBAAQ,IAAI,KAAK,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI,EAAE,KAAK,IAAI,CAAC,EAAE;AAAA,MAC3D;AACA;AAAA,IACF;AAAA,IAEA,KAAK,aAAa;AAChB,YAAM,YAAY,aAAa,OAAO,IAAI;AAC1C,UAAI,UAAU,WAAW,GAAG;AAC1B,gBAAQ,IAAI,2BAA2B,IAAI,GAAG;AAAA,MAChD,OAAO;AACL,gBAAQ,IAAI,kBAAkB,IAAI,mBAAmB;AACrD,kBAAU,QAAQ,CAAC,MAAM;AACvB,gBAAM,QAAQ,eAAe,OAAO,CAAC;AACrC,gBAAM,MAAM,OAAO,EAAE,YAAY,CAAC,GAAG,KAAK;AAC1C,kBAAQ,IAAI,OAAO,MAAM,KAAK,IAAI,CAAC,KAAK,GAAG,EAAE;AAAA,QAC/C,CAAC;AAAA,MACH;AACA;AAAA,IACF;AAAA,IAEA,KAAK,YAAY;AACf,YAAM,WAAW,YAAY,OAAO,IAAI;AACxC,UAAI,SAAS,WAAW,GAAG;AACzB,gBAAQ,IAAI,0BAA0B,IAAI,GAAG;AAAA,MAC/C,OAAO;AACL,gBAAQ,IAAI,iBAAiB,IAAI,oBAAoB;AACrD,iBAAS,QAAQ,CAAC,MAAM;AACtB,gBAAM,QAAQ,eAAe,OAAO,CAAC;AACrC,gBAAM,MAAM,OAAO,EAAE,YAAY,CAAC,GAAG,KAAK;AAC1C,kBAAQ,IAAI,OAAO,MAAM,KAAK,IAAI,CAAC,KAAK,GAAG,EAAE;AAAA,QAC/C,CAAC;AAAA,MACH;AACA;AAAA,IACF;AAAA,IAEA,KAAK,WAAW;AACd,YAAM,UAAU,YAAY,OAAO,IAAI;AACvC,UAAI,QAAQ,WAAW,GAAG;AACxB,gBAAQ,IAAI,yBAAyB,IAAI,GAAG;AAC5C;AAAA,MACF;AAEA,cAAQ,IAAI,kBAAkB,IAAI,IAAI;AACtC,iBAAW,UAAU,SAAS;AAC5B,cAAM,MAAMA,eAAc,OAAO,YAAY,KAAK,OAAO;AACzD,cAAM,MAAM,OAAO,OAAO,YAAY,CAAC,GAAG,KAAK;AAC/C,gBAAQ,IAAI;AAAA,GAAM,GAAG,KAAK,GAAG,EAAE;AAG/B,cAAM,aAAa,oBAAI,IAAsB;AAC7C,mBAAW,OAAO,OAAO,iBAAiB;AACxC,gBAAM,gBAAgB,MAAM,QAAQ,IAAI,IAAI,MAAM;AAClD,cAAI,eAAe;AACjB,kBAAM,QAAQ,eAAe,OAAO,aAAa;AACjD,kBAAM,WAAW,WAAW,IAAI,IAAI,OAAO,KAAK,CAAC;AACjD,qBAAS,KAAK,MAAM,KAAK,IAAI,CAAC;AAC9B,uBAAW,IAAI,IAAI,SAAS,QAAQ;AAAA,UACtC;AAAA,QACF;AAEA,mBAAW,CAAC,SAAS,KAAK,KAAK,YAAY;AACzC,gBAAM,QAAQC,uBAAsB,OAAO,KAAK;AAChD,kBAAQ,IAAI,KAAK,KAAK,GAAG;AACzB,qBAAW,KAAK,MAAO,SAAQ,IAAI,SAAS,CAAC,EAAE;AAAA,QACjD;AAAA,MACF;AACA;AAAA,IACF;AAAA,IAEA,KAAK,QAAQ;AAEX,YAAM,SAAS,MAAM,QAAQ,IAAI,IAAI;AACrC,UAAI,CAAC,QAAQ;AACX,gBAAQ,IAAI,qBAAqB,IAAI,EAAE;AACvC;AAAA,MACF;AAEA,YAAM,MAAMD,eAAc,OAAO,YAAY,KAAK,OAAO;AACzD,YAAM,QAAQ,eAAe,OAAO,MAAM;AAE1C,cAAQ,IAAI,WAAW,OAAO,EAAE,EAAE;AAClC,cAAQ,IAAI,UAAU,MAAM,KAAK,IAAI,CAAC,EAAE;AACxC,cAAQ,IAAI,mBAAmB,GAAG,EAAE;AACpC,cAAQ,IAAI,QAAQ,OAAO,GAAG,EAAE;AAChC,cAAQ,IAAI;AAAA,aAAgB;AAC5B,iBAAW,KAAK,OAAO,YAAa,SAAQ,IAAI,OAAO,OAAO,EAAE,KAAK,CAAC,EAAE;AAExE,UAAI,OAAO,SAAS,SAAS,GAAG;AAC9B,gBAAQ,IAAI;AAAA,UAAa;AACzB,mBAAW,KAAK,OAAO;AACrB,kBAAQ,IAAI,QAAQ,OAAO,EAAE,KAAK,CAAC,GAAG;AAAA,MAC1C;AAEA,UAAI,OAAO,gBAAgB,SAAS,GAAG;AACrC,gBAAQ,IAAI;AAAA,WAAc;AAC1B,mBAAW,OAAO,OAAO,iBAAiB;AACxC,gBAAM,QAAQC,uBAAsB,IAAI,OAAO,KAAK,IAAI;AACxD,gBAAM,gBAAgB,MAAM,QAAQ,IAAI,IAAI,MAAM;AAClD,gBAAM,eAAe,gBACjB,eAAe,OAAO,aAAa,EAAE,KAAK,IAAI,IAC9C,IAAI;AACR,kBAAQ,IAAI,KAAK,KAAK,KAAK,YAAY,EAAE;AAAA,QAC3C;AAAA,MACF;AACA;AAAA,IACF;AAAA,IAEA;AACE,cAAQ,MAAM,oBAAoB,OAAO,EAAE;AAC3C,cAAQ,IAAI,IAAI;AAChB,cAAQ,KAAK,CAAC;AAAA,EAClB;AACF;AAEA,KAAK,EAAE,MAAM,CAAC,QAAQ;AACpB,UAAQ,MAAM,UAAU,IAAI,OAAO;AACnC,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":["Database","wordId","PartsOfSpeech","SynsetRelationRelType","newNode","path","lexicon","PartsOfSpeech","SynsetRelationRelType"]}
|
|
1
|
+
{"version":3,"sources":["../src/export-sqlite.ts","../src/types.ts","../src/helpers.ts","../src/literals.ts","../src/loader.ts","../node_modules/@dbushell/xml-streamify/src/node.ts","../node_modules/@dbushell/xml-streamify/src/stream.ts","../node_modules/@dbushell/xml-streamify/src/parse.ts","../src/query.ts","../src/cli.ts"],"sourcesContent":["import Database from \"libsql\";\nimport { decodeXmlEntities } from \"./helpers\";\nimport type { LexicalEntry, Lexicon, Synset } from \"./types\";\n\n/**\n * SQLite schema for WordNet export.\n * Also available as dist/schema.sql in the package.\n */\nexport const SCHEMA = `\nCREATE TABLE IF NOT EXISTS words (\n id INTEGER PRIMARY KEY,\n word TEXT NOT NULL, -- lowercase for search\n word_display TEXT NOT NULL -- original casing\n);\nCREATE INDEX IF NOT EXISTS idx_words_word ON words(word);\n\nCREATE TABLE IF NOT EXISTS synsets (\n id TEXT PRIMARY KEY,\n pos TEXT NOT NULL,\n definition TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS word_synsets (\n word_id INTEGER NOT NULL,\n synset_id TEXT NOT NULL,\n PRIMARY KEY (word_id, synset_id)\n);\nCREATE INDEX IF NOT EXISTS idx_ws_word ON word_synsets(word_id);\n`;\n\nexport interface ExportProgress {\n phase: \"words\" | \"synsets\" | \"relations\";\n current: number;\n total: number;\n}\n\nexport interface ExportOptions {\n onProgress?: (progress: ExportProgress) => void;\n}\n\n/**\n * Export WordNet lexicon to SQLite database.\n * Creates a compact database optimized for word lookup and definition retrieval.\n */\nexport function exportToSQLite(\n lexicon: Lexicon,\n outputPath: string,\n options: ExportOptions = {},\n): void {\n const { onProgress } = options;\n\n // Create database\n const db = new Database(outputPath);\n db.exec(\"PRAGMA journal_mode = OFF\");\n db.exec(\"PRAGMA synchronous = OFF\");\n db.exec(SCHEMA);\n\n // Build word -> entries mapping and collect unique words\n const wordToEntries = new Map<string, LexicalEntry[]>();\n for (const entry of lexicon.lexicalEntries) {\n const word = entry.lemmas[0]?.writtenForm;\n if (word) {\n const lower = word.toLowerCase();\n const existing = wordToEntries.get(lower) || [];\n existing.push(entry);\n wordToEntries.set(lower, existing);\n }\n }\n\n // Build synset lookup\n const synsetMap = new Map<string, Synset>();\n for (const synset of lexicon.synsets) {\n synsetMap.set(synset.id, synset);\n }\n\n // Insert words\n const insertWord = db.prepare(\n \"INSERT INTO words (word, word_display) VALUES (?, ?)\",\n );\n const wordIds = new Map<string, number>();\n const words = Array.from(wordToEntries.keys()).sort();\n const totalWords = words.length;\n\n db.exec(\"BEGIN TRANSACTION\");\n let wordId = 0;\n for (let i = 0; i < words.length; i++) {\n const word = words[i];\n const entries = wordToEntries.get(word);\n if (!entries) continue;\n // Use first entry's original casing as display form\n const display = entries[0].lemmas[0]?.writtenForm || word;\n insertWord.run(word, display);\n wordId++;\n wordIds.set(word, wordId);\n if (onProgress && i % 10000 === 0) {\n onProgress({ phase: \"words\", current: i, total: totalWords });\n }\n }\n db.exec(\"COMMIT\");\n\n // Insert synsets (only those that have word associations)\n const usedSynsetIds = new Set<string>();\n for (const entries of wordToEntries.values()) {\n for (const entry of entries) {\n for (const sense of entry.senses) {\n usedSynsetIds.add(sense.synset);\n }\n }\n }\n\n const insertSynset = db.prepare(\n \"INSERT OR IGNORE INTO synsets (id, pos, definition) VALUES (?, ?, ?)\",\n );\n const synsetList = Array.from(usedSynsetIds);\n const totalSynsets = synsetList.length;\n\n db.exec(\"BEGIN TRANSACTION\");\n for (let i = 0; i < synsetList.length; i++) {\n const synsetId = synsetList[i];\n const synset = synsetMap.get(synsetId);\n if (synset) {\n const def = decodeXmlEntities(synset.definitions[0]?.inner) || \"\";\n insertSynset.run(synsetId, synset.partOfSpeech, def);\n }\n if (onProgress && i % 10000 === 0) {\n onProgress({ phase: \"synsets\", current: i, total: totalSynsets });\n }\n }\n db.exec(\"COMMIT\");\n\n // Insert word-synset relations\n const insertRelation = db.prepare(\n \"INSERT OR IGNORE INTO word_synsets (word_id, synset_id) VALUES (?, ?)\",\n );\n let relationCount = 0;\n const totalRelations = Array.from(wordToEntries.values()).reduce(\n (sum, entries) => sum + entries.reduce((s, e) => s + e.senses.length, 0),\n 0,\n );\n\n db.exec(\"BEGIN TRANSACTION\");\n for (const [word, entries] of wordToEntries) {\n const wordId = wordIds.get(word);\n if (!wordId) continue;\n\n for (const entry of entries) {\n for (const sense of entry.senses) {\n insertRelation.run(wordId, sense.synset);\n relationCount++;\n if (onProgress && relationCount % 10000 === 0) {\n onProgress({\n phase: \"relations\",\n current: relationCount,\n total: totalRelations,\n });\n }\n }\n }\n }\n db.exec(\"COMMIT\");\n\n db.close();\n}\n","import { z } from \"zod\";\n\nexport const LexiconId = z.string();\nexport const LexicalEntryId = z.string();\nexport const SynsetId = z.string();\nexport const SenseId = z.string();\nexport const SyntacticBehaviorId = z.string();\n\n/** Note: only the literals that are found in the test wordnet xml file are listed */\nexport const PartsOfSpeech = z.union([\n z.literal(\"a\"),\n z.literal(\"c\"),\n z.literal(\"n\"),\n z.literal(\"p\"),\n z.literal(\"r\"),\n z.literal(\"s\"),\n z.literal(\"u\"),\n z.literal(\"v\"),\n z.literal(\"x\"),\n]);\n\n/** Note: only the literals that are found in the test wordnet xml file are listed */\nexport const SenseRelationRelType = z.union([\n z.literal(\"also\"),\n z.literal(\"antonym\"),\n z.literal(\"derivation\"),\n z.literal(\"domain_member_region\"),\n z.literal(\"domain_member_topic\"),\n z.literal(\"domain_region\"),\n z.literal(\"domain_topic\"),\n z.literal(\"exemplifies\"),\n z.literal(\"is_exemplified_by\"),\n z.literal(\"other\"), // TODO: Then \"dc:type\" attribute should define what relation\n z.literal(\"participle\"),\n z.literal(\"pertainym\"),\n z.literal(\"similar\"),\n]);\n\n/** Note: only the literals that are found in the test wordnet xml file are listed */\nexport const SynsetRelationRelType = z.union([\n z.literal(\"also\"),\n z.literal(\"attribute\"),\n z.literal(\"cause\"),\n z.literal(\"causes\"),\n z.literal(\"domain_member_region\"),\n z.literal(\"domain_member_topic\"),\n z.literal(\"domain_region\"),\n z.literal(\"domain_topic\"),\n z.literal(\"entail\"),\n z.literal(\"entails\"),\n z.literal(\"exemplifies\"),\n z.literal(\"has_domain_region\"),\n z.literal(\"has_domain_topic\"),\n z.literal(\"holo_member\"),\n z.literal(\"holo_part\"),\n z.literal(\"holo_substance\"),\n z.literal(\"hypernym\"),\n z.literal(\"hyponym\"),\n z.literal(\"instance_hypernym\"),\n z.literal(\"instance_hyponym\"),\n z.literal(\"is_caused_by\"),\n z.literal(\"is_entailed_by\"),\n z.literal(\"is_exemplified_by\"),\n z.literal(\"member_holonym\"),\n z.literal(\"member_meronym\"),\n z.literal(\"mero_member\"),\n z.literal(\"mero_part\"),\n z.literal(\"mero_substance\"),\n z.literal(\"part_holonym\"),\n z.literal(\"part_meronym\"),\n z.literal(\"similar\"),\n z.literal(\"substance_holonym\"),\n z.literal(\"substance_meronym\"),\n]);\n\nexport const AdjPosition = z.union([\n z.literal(\"a\"),\n z.literal(\"ip\"),\n z.literal(\"p\"),\n]);\n\nexport const Pronunciation = z.object({\n variety: z.string().optional(), // TODO: \"GB\", \"US\", ...\n inner: z.string(), // Actual value\n});\n\nexport const Lemma = z.object({\n writtenForm: z.string(), // Actual value\n partOfSpeech: PartsOfSpeech,\n pronunciations: z.array(Pronunciation).min(0),\n});\n\nexport const SenseRelation = z.object({\n relType: SenseRelationRelType,\n dcType: z.string().optional(), // TODO: This is only when relType is \"other\"\n target: SenseId,\n});\n\nexport const Sense = z.object({\n id: SenseId,\n synset: SynsetId,\n subCat: SyntacticBehaviorId.optional(),\n adjPosition: AdjPosition.optional(),\n senseRelations: z.array(SenseRelation).min(0),\n});\n\nexport const Form = z.object({\n writtenForm: z.string(), // This is where huge variety lives\n});\n\nexport const LexicalEntry = z.object({\n id: LexicalEntryId,\n lemmas: z.array(Lemma).length(1),\n senses: z.array(Sense).min(1),\n forms: z.array(Form).min(0),\n});\n\nexport const Definition = z.object({\n inner: z.string(), // Actual value\n});\n\nexport const Example = z.object({\n inner: z.string(), // Actual value\n dcSource: z.string().optional(),\n});\n\nexport const ILIDefinition = z.object({\n inner: z.string(), // Actual value\n});\n\nexport const SynsetRelation = z.object({\n relType: SynsetRelationRelType,\n target: SynsetId,\n});\n\nexport const Synset = z.object({\n id: SynsetId,\n ili: z.string(),\n members: z.array(LexicalEntryId).min(1), // space-separated list of refs that we unwrap to array\n partOfSpeech: PartsOfSpeech,\n lexfile: z.string(),\n dcSource: z.string().optional(),\n definitions: z.array(Definition).min(1),\n examples: z.array(Example).min(0),\n iliDefinitions: z.array(ILIDefinition).min(0),\n synsetRelations: z.array(SynsetRelation).min(0),\n});\n\nexport const SyntacticBehavior = z.object({\n id: SyntacticBehaviorId,\n subcategorizationFrame: z.string(), // Sentence structure. This is where (not very huge) variety lives\n});\n\nexport const Lexicon = z.object({\n id: LexiconId, // \"oewn\"\n label: z.string(), // \"Open English WordNet\"\n language: z.string(), // \"en\"\n email: z.string(), // \"english-wordnet@googlegroups.com\"\n license: z.string(), // \"https://creativecommons.org/licenses/by/4.0/\"\n version: z.string(), // \"2023\"\n url: z.string(), // \"https://github.com/globalwordnet/english-wordnet\">\n citation: z.string().optional(), // \"John P. McCrae, Alexandre Rademaker, Francis Bond, Ewa Rudnicka and Christiane Fellbaum (2019) English WordNet 2019 – An Open-Source WordNet for English, *Proceedings of the 10th Global WordNet Conference* – GWC 2019\"\n lexicalEntries: z.array(LexicalEntry).min(0),\n synsets: z.array(Synset).min(0),\n syntacticBehaviors: z.array(SyntacticBehavior).min(0),\n});\n\nexport type Lemma = z.infer<typeof Lemma>;\nexport type LexicalEntry = z.infer<typeof LexicalEntry>;\nexport type Sense = z.infer<typeof Sense>;\nexport type SenseRelation = z.infer<typeof SenseRelation>;\nexport type Pronunciation = z.infer<typeof Pronunciation>;\nexport type Form = z.infer<typeof Form>;\nexport type Synset = z.infer<typeof Synset>;\nexport type Definition = z.infer<typeof Definition>;\nexport type Example = z.infer<typeof Example>;\nexport type ILIDefinition = z.infer<typeof ILIDefinition>;\nexport type SynsetRelation = z.infer<typeof SynsetRelation>;\nexport type SyntacticBehavior = z.infer<typeof SyntacticBehavior>;\nexport type Lexicon = z.infer<typeof Lexicon>;\n\nexport const partsOfSpeechList: string[] = PartsOfSpeech.options.map(\n (v) => v.value,\n);\n","import type { Node } from \"@dbushell/xml-streamify\";\nimport {\n AdjPosition,\n Definition,\n Example,\n Form,\n ILIDefinition,\n Lemma,\n LexicalEntry,\n Lexicon,\n PartsOfSpeech,\n Pronunciation,\n Sense,\n SenseRelation,\n SenseRelationRelType,\n Synset,\n SynsetId,\n SynsetRelation,\n SynsetRelationRelType,\n SyntacticBehavior,\n} from \"./types\";\n\nexport function PronunciationNode(node: Node): Pronunciation {\n const obj: Pronunciation = {\n variety: optAttr(node, \"variety\"),\n inner: node.innerText,\n };\n return Pronunciation.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function LemmaNode(node: Node): Lemma {\n const obj: Lemma = {\n writtenForm: attr(node, \"writtenForm\"),\n partOfSpeech: PartsOfSpeech.parse(attr(node, \"partOfSpeech\")),\n pronunciations: //\n children(node, \"Pronunciation\", (v) => PronunciationNode(v)),\n };\n return Lemma.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function SenseRelationNode(node: Node): SenseRelation {\n const obj: SenseRelation = {\n relType: SenseRelationRelType.parse(attr(node, \"relType\")),\n target: attr(node, \"target\"),\n dcType: optAttr(node, \"dc:type\"),\n };\n return SenseRelation.parse(\n extendWithRestAttr(node, obj, (s) => (s === \"dc:type\" ? \"dcType\" : s)),\n );\n}\n\nexport function SenseNode(node: Node): Sense {\n const adjPos = optAttr(node, \"adjposition\");\n const obj: Sense = {\n id: attr(node, \"id\"),\n synset: SynsetId.parse(attr(node, \"synset\")),\n senseRelations: children(node, \"SenseRelation\", SenseRelationNode),\n subCat: optAttr(node, \"subcat\"),\n adjPosition: adjPos ? AdjPosition.parse(adjPos) : undefined,\n };\n return Sense.parse(\n extendWithRestAttr(node, obj, (s) =>\n s === \"subcat\" ? \"subCat\" : s === \"adjposition\" ? \"adjPosition\" : s,\n ),\n );\n}\n\nexport function FormNode(node: Node): Form {\n const obj: Form = {\n writtenForm: attr(node, \"writtenForm\"),\n };\n return Form.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function LexicalEntryNode(node: Node): LexicalEntry {\n const obj: LexicalEntry = {\n id: attr(node, \"id\"),\n lemmas: children(node, \"Lemma\", LemmaNode),\n senses: children(node, \"Sense\", SenseNode),\n forms: children(node, \"Form\", FormNode),\n };\n return LexicalEntry.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function DefinitionNode(node: Node): Definition {\n const obj: Definition = {\n inner: node.innerText,\n };\n return Definition.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function ExampleNode(node: Node): Example {\n const obj: Example = {\n inner: node.innerText,\n dcSource: optAttr(node, \"dc:source\"),\n };\n return Example.parse(\n extendWithRestAttr(node, obj, (s) => (s === \"dc:source\" ? \"dcSource\" : s)),\n );\n}\n\nexport function ILIDefinitionNode(node: Node): ILIDefinition {\n const obj: ILIDefinition = {\n inner: node.innerText,\n };\n return ILIDefinition.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function SynsetRelationNode(node: Node): SynsetRelation {\n const obj: SynsetRelation = {\n relType: SynsetRelationRelType.parse(attr(node, \"relType\")),\n target: attr(node, \"target\"),\n };\n return SynsetRelation.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function SyntacticBehaviorNode(node: Node): SyntacticBehavior {\n const obj: SyntacticBehavior = {\n id: attr(node, \"id\"),\n subcategorizationFrame: attr(node, \"subcategorizationFrame\"),\n };\n return SyntacticBehavior.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport function SynsetNode(node: Node): Synset {\n const obj: Synset = {\n id: attr(node, \"id\"),\n ili: attr(node, \"ili\"),\n lexfile: attr(node, \"lexfile\"),\n members: attr(node, \"members\").split(\" \"),\n dcSource: optAttr(node, \"dc:source\"),\n partOfSpeech: PartsOfSpeech.parse(attr(node, \"partOfSpeech\")),\n definitions: children(node, \"Definition\", (v) => DefinitionNode(v)),\n examples: children(node, \"Example\", (v) => ExampleNode(v)),\n iliDefinitions: children(node, \"ILIDefinition\", ILIDefinitionNode),\n synsetRelations: children(node, \"SynsetRelation\", SynsetRelationNode),\n };\n return Synset.parse(\n extendWithRestAttr(node, obj, (s) => (s === \"dc:source\" ? \"dcSource\" : s)),\n );\n}\n\n/** LexiconNode is used as a root node for the whole WordNet document structure,\n * omitting the `LexicalResource` parent (and its virtual grandparent representing the\n * whole document). */\nexport function LexiconNode(node: Node): Lexicon {\n const obj: Lexicon = {\n id: attr(node, \"id\"),\n label: attr(node, \"label\"),\n language: attr(node, \"language\"),\n email: attr(node, \"email\"),\n license: attr(node, \"license\"),\n version: attr(node, \"version\"),\n citation: optAttr(node, \"citation\"),\n url: attr(node, \"url\"),\n lexicalEntries: children(node, \"LexicalEntry\", LexicalEntryNode),\n synsets: children(node, \"Synset\", SynsetNode),\n syntacticBehaviors: //\n children(node, \"SyntacticBehaviour\", SyntacticBehaviorNode),\n };\n return Lexicon.parse(extendWithRestAttr(node, obj, (s) => s));\n}\n\nexport const decodeXmlEntities = (\n s: string | undefined,\n): string | undefined => {\n if (s === undefined) return undefined;\n return s\n .replace(/&/g, \"&\")\n .replace(/</g, \"<\")\n .replace(/>/g, \">\")\n .replace(/'/g, \"'\")\n .replace(/"/g, '\"');\n};\n\nconst attr = (node: Node, attrName: string): string => {\n const value = decodeXmlEntities(node.attributes[attrName]);\n if (value === undefined) {\n throw new Error(\n `Missing required attribute \"${attrName}\" on node \"${node.type}\"`,\n );\n }\n return value;\n};\n\nconst optAttr = (node: Node, attrName: string): string | undefined => {\n return decodeXmlEntities(node.attributes[attrName]);\n};\n\n/** restAttrs appends the rest of the attributes, taking into account that some has been renamed.\n * The proxy function provided is expected to return the renamed result for an original xml key.\n */\nconst restAttrs = (\n node: Node,\n obj: object,\n proxy: (from: string) => string,\n): Record<string, string> => {\n const result: Record<string, string> = {};\n Object.keys(node.attributes) // These keys are still unmodified\n .filter((a) => !(proxy(a) in obj)) // Here we can't trust the 'in' because obj already has modified keys.\n .forEach((k) => {\n result[k] = decodeXmlEntities(node.attributes[k]) ?? node.attributes[k];\n });\n return result;\n};\n\nconst extendWithRestAttr = (\n node: Node,\n obj: object,\n proxy: (from: string) => string,\n) => {\n return Object.assign(obj, restAttrs(node, obj, proxy));\n};\n\nconst children = <T, Fn extends (node: Node) => T>(\n node: Node,\n type: string,\n fn: Fn,\n) => {\n return node.children\n .filter((v: Node) => v.type === type)\n .map((v: Node) => fn(v));\n};\n","// Literals are as discovered in WN-LMF-1.3.dtd.\n//\n// Note: types.ts is not necessarily lists all of these\n\nexport const PartsOfSpeech: Record<string, string> = {\n n: \"Noun\",\n v: \"Verb\",\n a: \"Adjective\",\n r: \"Adverb\",\n s: \"Adjective Satellite\",\n t: \"?\",\n c: \"Conjunction\",\n p: \"Adposition (Preposition, postposition, etc.)\",\n x: \"Other (inc. particle, classifier, bound morphemes, determiners)\",\n u: \"Unknown\",\n};\n\nexport const SenseRelationRelType: Record<string, string> = {\n also: \"See also\",\n anto_converse: \"Converse antonym\",\n anto_gradable: \"Gradable antonym\",\n anto_simple: \"Simple antonym\",\n antonym: \"Antonym\",\n augmentative: \"Augmentative\",\n derivation: \"Derivation\",\n diminutive: \"Diminutive\",\n domain_region: \"Domain region\",\n domain_topic: \"Domain topic\",\n exemplifies: \"Exemplifies\",\n feminine: \"Feminine\",\n has_augmentative: \"Has augmentative\",\n has_diminutive: \"Has diminutive\",\n has_domain_region: \"Has domain region\",\n has_domain_topic: \"Has domain topic\",\n has_feminine: \"Has feminine\",\n has_masculine: \"Has masculine\",\n has_young: \"Has young\",\n is_exemplified_by: \"Is exemplified by\",\n masculine: \"Masculine\",\n other: \"Other\",\n participle: \"Participle\",\n pertainym: \"Pertainym\",\n secondary_aspect_ip: \"Secondary aspect IP\",\n secondary_aspect_pi: \"Secondary aspect PI\",\n similar: \"Similar\",\n simple_aspect_ip: \"Simple aspect IP\",\n simple_aspect_pi: \"Simple aspect PI\",\n young: \"Young\",\n domain_member_region: \"Domain member region\",\n domain_member_topic: \"Domain member topic\",\n};\n\nexport const SynsetRelationRelType: Record<string, string> = {\n agent: \"Agent\",\n also: \"See also\",\n anto_converse: \"Converse antonym\",\n anto_gradable: \"Gradable antonym\",\n anto_simple: \"Simple antonym\",\n antonym: \"Antonym\",\n attribute: \"Attribute\",\n augmentative: \"Augmentative\",\n be_in_state: \"Be in state\",\n cause: \"Cause\",\n causes: \"Causes\",\n classified_by: \"Classified by\",\n classifies: \"Classifies\",\n co_agent_instrument: \"Co-agent instrument\",\n co_agent_patient: \"Co-agent patient\",\n co_agent_result: \"Co-agent result\",\n co_instrument_agent: \"Co-instrument agent\",\n co_instrument_patient: \"Co-instrument patient\",\n co_instrument_result: \"Co-instrument result\",\n co_patient_agent: \"Co-patient agent\",\n co_patient_instrument: \"Co-patient instrument\",\n co_result_agent: \"Co-result agent\",\n co_result_instrument: \"Co-result instrument\",\n co_role: \"Co-role\",\n diminutive: \"Diminutive\",\n direction: \"Direction\",\n domain_member_region: \"Domain member region\",\n domain_member_topic: \"Domain member topic\",\n domain_region: \"Domain region\",\n domain_topic: \"Domain topic\",\n entail: \"Entail\",\n entails: \"Entails\",\n eq_synonym: \"Equivalent synonym\",\n exemplifies: \"Exemplifies\",\n feminine: \"Feminine\",\n has_augmentative: \"Has augmentative\",\n has_diminutive: \"Has diminutive\",\n has_domain_region: \"Has domain region\",\n has_domain_topic: \"Has domain topic\",\n has_feminine: \"Has feminine\",\n has_masculine: \"Has masculine\",\n has_young: \"Has young\",\n holo_location: \"Holonym location\",\n holo_member: \"Member holonym\",\n holo_part: \"Part holonym\",\n holo_portion: \"Portion holonym\",\n holo_substance: \"Substance holonym\",\n holonym: \"Holonym\",\n hypernym: \"Hypernym\",\n hyponym: \"Hyponym\",\n in_manner: \"In manner\",\n instance_hypernym: \"Instance hypernym\",\n instance_hyponym: \"Instance hyponym\",\n instrument: \"Instrument\",\n involved: \"Involved\",\n involved_agent: \"Involved agent\",\n involved_direction: \"Involved direction\",\n involved_instrument: \"Involved instrument\",\n involved_location: \"Involved location\",\n involved_patient: \"Involved patient\",\n involved_result: \"Involved result\",\n involved_source_direction: \"Involved source direction\",\n involved_target_direction: \"Involved target direction\",\n ir_synonym: \"IR synonym\",\n is_caused_by: \"Is caused by\",\n is_entailed_by: \"Is entailed by\",\n is_exemplified_by: \"Is exemplified by\",\n is_subevent_of: \"Is subevent of\",\n location: \"Location\",\n manner_of: \"Manner of\",\n masculine: \"Masculine\",\n member_holonym: \"Member holonym\",\n member_meronym: \"Member meronym\",\n mero_location: \"Meronym location\",\n mero_member: \"Member meronym\",\n mero_part: \"Part meronym\",\n mero_portion: \"Portion meronym\",\n mero_substance: \"Substance meronym\",\n meronym: \"Meronym\",\n other: \"Other\",\n part_holonym: \"Part holonym\",\n part_meronym: \"Part meronym\",\n patient: \"Patient\",\n restricted_by: \"Restricted by\",\n restricts: \"Restricts\",\n result: \"Result\",\n role: \"Role\",\n similar: \"Similar\",\n source_direction: \"Source direction\",\n state_of: \"State of\",\n subevent: \"Subevent\",\n substance_holonym: \"Substance holonym\",\n substance_meronym: \"Substance meronym\",\n target_direction: \"Target direction\",\n young: \"Young\",\n};\n\nexport const AdjPosition: Record<string, string> = {\n a: \"Attributive\",\n ip: \"Immediate postnominal\",\n p: \"Predicative\",\n};\n","import {\n createReadStream,\n existsSync,\n mkdirSync,\n readdirSync,\n statSync,\n writeFileSync,\n} from \"node:fs\";\nimport path from \"node:path\";\nimport { Readable } from \"node:stream\";\nimport { type Node, parse } from \"@dbushell/xml-streamify\";\nimport { LexiconNode } from \"./helpers\";\nimport type { Lexicon } from \"./types\";\n\n/** Base version to start searching from */\nexport const BASE_VERSION = \"2024\";\n\n/** Generate filename for a given version */\nexport function getFilename(version: string): string {\n return `english-wordnet-${version}.xml`;\n}\n\n/** Generate download URL for a given version */\nexport function getDownloadUrl(version: string): string {\n return `https://en-word.net/static/${getFilename(version)}.gz`;\n}\n\n/** Default cache directory for downloaded WordNet data */\nexport function getDefaultCacheDir(): string {\n const homeDir = process.env.HOME || process.env.USERPROFILE || \".\";\n return path.join(homeDir, \".cache\", \"synset\");\n}\n\n/** Check if file exists and is a file */\nfunction fileExists(filePath: string): boolean {\n if (existsSync(filePath)) {\n const stat = statSync(filePath);\n return stat.isFile();\n }\n return false;\n}\n\n/** Check if a remote URL exists (HEAD request) */\nasync function urlExists(url: string): Promise<boolean> {\n try {\n const response = await fetch(url, { method: \"HEAD\" });\n return response.ok;\n } catch {\n return false;\n }\n}\n\n/** Extract version year from filename (e.g., \"english-wordnet-2024.xml\" -> 2024) */\nfunction extractVersionFromFilename(filename: string): number | null {\n const match = filename.match(/english-wordnet-(\\d{4})\\.xml/);\n return match ? parseInt(match[1], 10) : null;\n}\n\n/** Find any cached WordNet file and return its version */\nfunction findCachedVersion(cacheDir: string): string | null {\n if (!existsSync(cacheDir)) return null;\n\n const files = readdirSync(cacheDir);\n const wordnetFiles = files\n .map((f) => ({ file: f, year: extractVersionFromFilename(f) }))\n .filter((x): x is { file: string; year: number } => x.year !== null)\n .sort((a, b) => b.year - a.year); // newest first\n\n return wordnetFiles.length > 0 ? wordnetFiles[0].year.toString() : null;\n}\n\n/**\n * Find the best available WordNet version.\n * - WordNet releases come out at END of year, so we only check up to (currentYear - 1)\n * - If cached version exists and no newer year has passed, use cache (no network)\n * - If a new year has passed since cache, check for that year's release\n * - If no cache, discover latest available\n */\nexport async function findLatestVersion(\n onProgress?: (message: string) => void,\n cacheDir?: string,\n): Promise<string> {\n const log = onProgress || (() => {});\n const currentYear = new Date().getFullYear();\n const lastReleasableYear = currentYear - 1; // Can't have 2026 release until end of 2026\n const baseYear = parseInt(BASE_VERSION, 10);\n const dir = cacheDir || getDefaultCacheDir();\n\n // Check for existing cache\n const cachedVersion = findCachedVersion(dir);\n if (cachedVersion) {\n const cachedYear = parseInt(cachedVersion, 10);\n\n // If cached version is already at or beyond last releasable year, use it\n if (cachedYear >= lastReleasableYear) {\n return cachedVersion;\n }\n\n // Check for versions between cache and last releasable year\n log(`Checking for newer version...`);\n for (let year = cachedYear + 1; year <= lastReleasableYear; year++) {\n const version = year.toString();\n if (await urlExists(getDownloadUrl(version))) {\n log(`Found ${version}`);\n return version;\n }\n }\n // No newer version found, use cache\n return cachedVersion;\n }\n\n // No cache - discover from BASE_VERSION\n log(`Checking available versions...`);\n if (await urlExists(getDownloadUrl(BASE_VERSION))) {\n // Check if there's a newer version (up to last releasable year)\n for (let year = baseYear + 1; year <= lastReleasableYear; year++) {\n const version = year.toString();\n if (await urlExists(getDownloadUrl(version))) {\n } else {\n return (year - 1).toString();\n }\n }\n // All years up to lastReleasableYear exist, return that\n return lastReleasableYear.toString();\n }\n\n // Base version doesn't exist, try incrementing\n for (let year = baseYear + 1; year <= lastReleasableYear; year++) {\n const version = year.toString();\n if (await urlExists(getDownloadUrl(version))) {\n return version;\n }\n }\n\n throw new Error(\n `No WordNet version found between ${BASE_VERSION} and ${lastReleasableYear}`,\n );\n}\n\n/** Download and decompress WordNet XML from remote URL */\nasync function downloadWordNet(\n version: string,\n destPath: string,\n): Promise<void> {\n const url = getDownloadUrl(version);\n const response = await fetch(url);\n if (!response.ok || !response.body) {\n throw new Error(\n `Failed to download WordNet ${version}: ${response.statusText}`,\n );\n }\n\n const decompressed = response.body.pipeThrough(\n new DecompressionStream(\"gzip\"),\n );\n const arrayBuffer = await new Response(decompressed).arrayBuffer();\n\n // Ensure directory exists\n const dir = path.dirname(destPath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n\n writeFileSync(destPath, Buffer.from(arrayBuffer));\n}\n\n/** Create XML streaming parser for WordNet file */\nexport function createParser(filePath: string) {\n const resolvedPath = path.resolve(filePath);\n const nodeStream = createReadStream(resolvedPath);\n const webStream = Readable.toWeb(nodeStream) as unknown as ReadableStream;\n return parse(webStream, {\n ignoreDeclaration: false,\n silent: false,\n });\n}\n\n/** Parse Lexicon from XML stream */\nexport async function parseLexicon(\n // biome-ignore lint/suspicious/noConfusingVoidType: matches xml-streamify's return type\n parser: AsyncGenerator<Node, void | Node, void>,\n): Promise<Lexicon | undefined> {\n for await (const node of parser) {\n if (node.type === \"Lexicon\") {\n return LexiconNode(node);\n }\n }\n return undefined;\n}\n\nexport interface LoadOptions {\n /** Specific version to download (e.g., \"2024\"). If not set, finds latest. */\n version?: string;\n /** Custom cache directory for downloaded data */\n cacheDir?: string;\n /** Force re-download even if cached */\n forceDownload?: boolean;\n /** Callback for progress updates */\n onProgress?: (message: string) => void;\n}\n\nexport interface LoadResult {\n lexicon: Lexicon;\n version: string;\n filePath: string;\n}\n\n/**\n * Load WordNet from a local file path.\n * @param filePath Path to the WordNet XML file\n */\nexport async function loadWordNet(filePath: string): Promise<Lexicon> {\n if (!fileExists(filePath)) {\n throw new Error(`WordNet file not found: ${filePath}`);\n }\n\n const parser = createParser(filePath);\n const lexicon = await parseLexicon(parser);\n\n if (!lexicon) {\n throw new Error(\"Failed to parse WordNet: no Lexicon node found\");\n }\n\n return lexicon;\n}\n\n/**\n * Fetch WordNet from remote URL, cache locally, and parse.\n * If no version specified, finds the latest available version.\n * @param options Loading options\n * @returns LoadResult with lexicon, version, and file path\n */\nexport async function fetchWordNet(\n options: LoadOptions = {},\n): Promise<LoadResult> {\n const cacheDir = options.cacheDir || getDefaultCacheDir();\n const log = options.onProgress || (() => {});\n\n // Determine version to use\n const version = options.version || (await findLatestVersion(log, cacheDir));\n const filename = getFilename(version);\n const cachedPath = path.join(cacheDir, filename);\n\n if (!fileExists(cachedPath) || options.forceDownload) {\n const url = getDownloadUrl(version);\n log(`Downloading WordNet ${version} from ${url}`);\n await downloadWordNet(version, cachedPath);\n log(`Saved to ${cachedPath}`);\n } else {\n log(`Using cached ${cachedPath}`);\n }\n\n const lexicon = await loadWordNet(cachedPath);\n return { lexicon, version, filePath: cachedPath };\n}\n\n/**\n * Get path to cached WordNet file (downloads if not present).\n * Useful when you want to work with the file directly.\n * @returns Object with file path and version\n */\nexport async function ensureWordNetCached(\n options: LoadOptions = {},\n): Promise<{ filePath: string; version: string }> {\n const cacheDir = options.cacheDir || getDefaultCacheDir();\n const log = options.onProgress || (() => {});\n\n // Determine version to use\n const version = options.version || (await findLatestVersion(log, cacheDir));\n const filename = getFilename(version);\n const cachedPath = path.join(cacheDir, filename);\n\n if (!fileExists(cachedPath) || options.forceDownload) {\n const url = getDownloadUrl(version);\n log(`Downloading WordNet ${version} from ${url}`);\n await downloadWordNet(version, cachedPath);\n log(`Saved to ${cachedPath}`);\n } else {\n log(`Using cached ${cachedPath}`);\n }\n\n return { filePath: cachedPath, version };\n}\n\n// Legacy exports for backwards compatibility\nexport const WORDNET_VERSION = BASE_VERSION;\nexport const WORDNET_FILENAME = getFilename(BASE_VERSION);\nexport const WORDNET_URL = getDownloadUrl(BASE_VERSION);\n","/**\n * Module exports an XML Node class.\n *\n * @module\n */\n/** XML node with helper methods to read data and traverse the tree */\nexport class Node {\n #type: string;\n #children: Array<Node>;\n #parent?: Node;\n #attr?: Record<string, string>;\n #raw?: string;\n\n constructor(type: string, parent?: Node, raw?: string) {\n this.#type = type;\n this.#parent = parent;\n this.#raw = raw;\n this.#children = [];\n }\n\n get type(): string {\n return this.#type;\n }\n\n get raw(): string {\n return this.#raw ?? '';\n }\n\n get parent(): Node | undefined {\n return this.#parent;\n }\n\n get children(): Array<Node> {\n return this.#children;\n }\n\n get attributes(): Record<string, string> {\n if (this.#attr) {\n return this.#attr;\n }\n // Setup and parse attributes on first access\n this.#attr = {};\n if (this.raw) {\n const regex = /([\\w:.-]+)\\s*=\\s*([\"'])(.*?)\\2/g;\n let match: RegExpExecArray | null;\n while ((match = regex.exec(this.raw)) !== null) {\n this.#attr[match[1]] = match[3];\n }\n }\n return this.#attr;\n }\n\n get innerText(): string {\n if (this.children.length) {\n let text = '';\n for (const child of this.children) {\n text += child.innerText;\n }\n return text;\n }\n return (this.raw.match(/<!\\[CDATA\\[(.*?)]]>/s) ?? [, this.raw])[1];\n }\n\n addChild(child: Node): void {\n this.#children.push(child);\n }\n\n /**\n * Returns true if node and parents match the key hierarchy\n * @param keys - XML tag names\n */\n is(...keys: Array<string>): boolean {\n if (!keys.length) return false;\n let parent: Node | undefined;\n for (const key of keys.toReversed()) {\n parent = parent ? parent.parent : this;\n if (parent?.type !== key) {\n return false;\n }\n }\n return true;\n }\n\n /**\n * Return the first child matching the key\n * @param key - XML tag name\n */\n first(key: string): Node | undefined {\n return this.children.find((n) => n.type === key);\n }\n\n /**\n * Return all children matching the key hierarchy\n * @param keys - XML tag names\n */\n all(...keys: Array<string>): Array<Node> {\n let nodes: Array<Node> | undefined = this.children;\n let found: Array<Node> = [];\n for (const [i, k] of Object.entries(keys)) {\n if (Number.parseInt(i) === keys.length - 1) {\n found = nodes.filter((n) => n.type === k);\n break;\n }\n nodes = nodes?.find((n) => n.type === k)?.children;\n if (!nodes) return [];\n }\n return found;\n }\n}\n","/**\n * Module exports a `TransformStream` class for decoding binary XML streams into structured data.\n *\n * @module\n */\nimport {NodeType, StateType} from './types.ts';\n\ntype State = NodeType | StateType;\n\nconst ENTITIES = {\n cdata: {\n end: ']]>',\n start: /^<!\\[CDATA\\[/\n },\n comment: {\n end: '-->',\n start: /^<!--/\n },\n declaration: {\n end: '?>',\n start: /^<\\?/\n },\n doctype: {\n end: '>',\n start: /^<!DOCTYPE/i\n },\n element: {\n end: '>',\n start: /^<[\\w:.-/]/\n }\n} as const;\n\n/** Transformer object for `TransformStream` constructed by `XMLStream` */\nexport const transformer: Transformer<string, [NodeType, string]> & {\n buf: string;\n state: State;\n previous: [State, number];\n} = {\n buf: '',\n state: StateType.SKIP,\n previous: [StateType.SKIP, -1],\n flush(controller) {\n // Buffer should be empty if document is well-formed\n if (this.buf.length > 0) {\n controller.enqueue([NodeType.TEXT, this.buf]);\n }\n },\n transform(chunk, controller) {\n this.buf += chunk;\n while (this.buf.length) {\n // Break if no progress is made (entity may straddle chunk boundary)\n if (\n this.state === this.previous[0] &&\n this.buf.length === this.previous[1]\n ) {\n break;\n }\n this.previous = [this.state, this.buf.length];\n // Skip to next entity\n if (this.state === StateType.SKIP) {\n const index = this.buf.indexOf('<');\n if (index < 0) break;\n // Clear buffer up to index of next entity\n controller.enqueue([NodeType.TEXT, this.buf.substring(0, index)]);\n this.buf = this.buf.substring(index);\n this.state = StateType.SEARCH;\n }\n // Search for start of entity\n if (this.state === StateType.SEARCH) {\n if (this.buf.length < 3) break;\n for (const [state, entity] of Object.entries(ENTITIES)) {\n if (this.buf.match(entity.start)) {\n this.state = state as State;\n break;\n }\n }\n continue;\n }\n // Search for end of entity\n if (Object.hasOwn(ENTITIES, this.state)) {\n const {end} = ENTITIES[this.state as keyof typeof ENTITIES];\n const index = this.buf.indexOf(end);\n if (index < 0) break;\n controller.enqueue([\n this.state,\n this.buf.substring(0, index + end.length)\n ]);\n this.buf = this.buf.substring(index + end.length);\n this.state = StateType.SKIP;\n continue;\n }\n // We should never be here something is very wrong!\n throw new Error();\n }\n }\n};\n\n/** Transform a binary XML stream into a stream of structured XML data */\nexport class XMLStream extends TransformStream<string, [NodeType, string]> {\n constructor() {\n super({...transformer});\n }\n}\n","/**\n * Module export an async generator function for parsing a streamed XML document.\n *\n * @module\n */\nimport type {ParseOptions} from './types.ts';\nimport {NodeType} from './types.ts';\nimport {Node} from './node.ts';\nimport {XMLStream} from './stream.ts';\n\nconst ignoreTypes: Partial<Record<NodeType, keyof ParseOptions>> = {\n [NodeType.COMMENT]: 'ignoreComments',\n [NodeType.DECLARATION]: 'ignoreDeclaration',\n [NodeType.DOCTYPE]: 'ignoreDoctype'\n} as const;\n\n/**\n * Async generator function for parsing a streamed XML document\n * @param input URL to fetch and parse (or a ReadableStream)\n * @param options Parsing options {@link ParseOptions}\n * @returns Yields parsed XML nodes {@link Node}\n */\nexport async function* parse(\n input: string | URL | ReadableStream,\n options?: ParseOptions\n): AsyncGenerator<Node, Node | void, void> {\n const document = new Node('@document');\n try {\n const init = {...options?.fetchOptions};\n if (options?.signal) {\n init.signal = options.signal;\n }\n\n let source: ReadableStream;\n\n // Fetch stream if URL is provided as input\n if (typeof input === 'string' || input instanceof URL) {\n input = new URL(input);\n const response = await fetch(input, init);\n if (!response.ok || !response.body) {\n throw new Error(`Bad response`);\n }\n source = response.body;\n } else {\n source = input;\n }\n\n const stream = source\n .pipeThrough(new TextDecoderStream())\n .pipeThrough(new XMLStream(), {\n signal: options?.signal\n });\n\n // Set root document as current node\n let node = document;\n\n for await (const [type, value] of stream) {\n if (options?.signal?.aborted) {\n break;\n }\n // Skip whitespace\n if (type === NodeType.TEXT) {\n if (options?.ignoreWhitespace !== false && value.trim().length === 0) {\n continue;\n }\n }\n // Handle other ignored types\n if (type in ignoreTypes && options?.[ignoreTypes[type]!] === false) {\n const newNode = new Node(type, node, value);\n node.addChild(newNode);\n yield newNode;\n continue;\n }\n // Handle elements\n if (type === NodeType.ELEMENT) {\n const name = value.match(/<\\/?([\\w:.-]+)/)![1];\n // Handle self-closing element\n if (value.endsWith('/>')) {\n const newNode = new Node(name, node, value);\n node.addChild(newNode);\n yield newNode;\n continue;\n }\n // Handle closing element\n if (value.startsWith('</')) {\n yield node;\n node = node.parent!;\n continue;\n }\n // Handle opening element\n const newNode = new Node(name, node, value);\n node.addChild(newNode);\n node = newNode;\n continue;\n }\n // Handle other types\n node.addChild(new Node(type, node, value));\n }\n } catch (err) {\n if (options?.silent === false) {\n throw err;\n }\n }\n return document;\n}\n","import type { z } from \"zod\";\nimport type {\n LexicalEntry,\n Lexicon,\n Sense,\n Synset,\n SynsetRelation,\n SynsetRelationRelType as SynsetRelationRelTypeSchema,\n} from \"./types\";\n\nexport type SynsetRelationRelType = z.infer<typeof SynsetRelationRelTypeSchema>;\n\n/** Definition with its source synset */\nexport interface DefinitionResult {\n text: string;\n synset: Synset;\n partOfSpeech: string;\n}\n\n/** Synonym with its source context */\nexport interface SynonymResult {\n word: string;\n entry: LexicalEntry;\n synset: Synset;\n}\n\n/** Indexed WordNet data for fast lookups */\nexport interface WordNetIndex {\n /** All synsets indexed by ID */\n synsets: Map<string, Synset>;\n /** All senses indexed by ID */\n senses: Map<string, Sense>;\n /** All lexical entries indexed by ID */\n entries: Map<string, LexicalEntry>;\n /** Lexical entries indexed by word (lowercase) */\n byWord: Map<string, LexicalEntry[]>;\n /** Senses indexed by word (lowercase) */\n sensesByWord: Map<string, Sense[]>;\n /** Synsets indexed by word (lowercase) */\n synsetsByWord: Map<string, Synset[]>;\n /** Original lexicon */\n lexicon: Lexicon;\n}\n\n/**\n * Build an indexed structure for fast lookups.\n * Call this once after loading the lexicon.\n */\nexport function buildIndex(lexicon: Lexicon): WordNetIndex {\n const synsets = new Map<string, Synset>();\n const senses = new Map<string, Sense>();\n const entries = new Map<string, LexicalEntry>();\n const byWord = new Map<string, LexicalEntry[]>();\n const sensesByWord = new Map<string, Sense[]>();\n const synsetsByWord = new Map<string, Synset[]>();\n\n // Index synsets\n for (const synset of lexicon.synsets) {\n synsets.set(synset.id, synset);\n }\n\n // Index lexical entries and senses\n for (const entry of lexicon.lexicalEntries) {\n entries.set(entry.id, entry);\n\n const word = entry.lemmas[0]?.writtenForm.toLowerCase();\n if (word) {\n // Index by word\n const existing = byWord.get(word) || [];\n existing.push(entry);\n byWord.set(word, existing);\n\n // Index senses by word\n for (const sense of entry.senses) {\n senses.set(sense.id, sense);\n\n const existingSenses = sensesByWord.get(word) || [];\n existingSenses.push(sense);\n sensesByWord.set(word, existingSenses);\n\n // Index synsets by word\n const synset = synsets.get(sense.synset);\n if (synset) {\n const existingSynsets = synsetsByWord.get(word) || [];\n if (!existingSynsets.includes(synset)) {\n existingSynsets.push(synset);\n synsetsByWord.set(word, existingSynsets);\n }\n }\n }\n }\n }\n\n return {\n synsets,\n senses,\n entries,\n byWord,\n sensesByWord,\n synsetsByWord,\n lexicon,\n };\n}\n\n/** Get a synset by ID */\nexport function getSynset(index: WordNetIndex, id: string): Synset | undefined {\n return index.synsets.get(id);\n}\n\n/** Get a sense by ID */\nexport function getSense(index: WordNetIndex, id: string): Sense | undefined {\n return index.senses.get(id);\n}\n\n/** Get a lexical entry by ID */\nexport function getLexicalEntry(\n index: WordNetIndex,\n id: string,\n): LexicalEntry | undefined {\n return index.entries.get(id);\n}\n\n/** Find all lexical entries for a word */\nexport function findWord(index: WordNetIndex, word: string): LexicalEntry[] {\n return index.byWord.get(word.toLowerCase()) || [];\n}\n\n/** Find all senses for a word */\nexport function findSenses(index: WordNetIndex, word: string): Sense[] {\n return index.sensesByWord.get(word.toLowerCase()) || [];\n}\n\n/** Find all synsets for a word */\nexport function findSynsets(index: WordNetIndex, word: string): Synset[] {\n return index.synsetsByWord.get(word.toLowerCase()) || [];\n}\n\n/** Get all definitions for a word */\nexport function getDefinitions(\n index: WordNetIndex,\n word: string,\n): DefinitionResult[] {\n const synsets = findSynsets(index, word);\n return synsets.flatMap((synset) =>\n synset.definitions.map((d) => ({\n text: d.inner,\n synset,\n partOfSpeech: synset.partOfSpeech,\n })),\n );\n}\n\n/** Get related synsets by relation type */\nexport function getRelated(\n index: WordNetIndex,\n synset: Synset,\n relType: SynsetRelationRelType,\n): Synset[] {\n return synset.synsetRelations\n .filter((r: SynsetRelation) => r.relType === relType)\n .map((r: SynsetRelation) => index.synsets.get(r.target))\n .filter((s): s is Synset => s !== undefined);\n}\n\n/** Get hypernyms (more general terms) for a word */\nexport function getHypernyms(index: WordNetIndex, word: string): Synset[] {\n const synsets = findSynsets(index, word);\n return synsets.flatMap((s) => getRelated(index, s, \"hypernym\"));\n}\n\n/** Get hyponyms (more specific terms) for a word */\nexport function getHyponyms(index: WordNetIndex, word: string): Synset[] {\n const synsets = findSynsets(index, word);\n return synsets.flatMap((s) => getRelated(index, s, \"hyponym\"));\n}\n\n/** Get synonyms for a word (words in the same synsets) */\nexport function getSynonyms(\n index: WordNetIndex,\n word: string,\n): SynonymResult[] {\n const synsets = findSynsets(index, word);\n const lowerWord = word.toLowerCase();\n const seen = new Set<string>();\n const results: SynonymResult[] = [];\n\n for (const synset of synsets) {\n for (const memberId of synset.members) {\n const entry = index.entries.get(memberId);\n if (entry) {\n const lemma = entry.lemmas[0]?.writtenForm;\n if (lemma && lemma.toLowerCase() !== lowerWord && !seen.has(lemma)) {\n seen.add(lemma);\n results.push({ word: lemma, entry, synset });\n }\n }\n }\n }\n\n return results;\n}\n\n/** Get the written form of the first lemma for a synset */\nexport function getSynsetWords(index: WordNetIndex, synset: Synset): string[] {\n return synset.members\n .map((id) => index.entries.get(id))\n .filter((e): e is LexicalEntry => e !== undefined)\n .map((e) => e.lemmas[0]?.writtenForm)\n .filter((w): w is string => w !== undefined);\n}\n","#!/usr/bin/env node\nimport { exportToSQLite } from \"./export-sqlite\";\nimport { decodeXmlEntities } from \"./helpers\";\nimport { PartsOfSpeech, SynsetRelationRelType } from \"./literals\";\nimport { ensureWordNetCached, fetchWordNet, loadWordNet } from \"./loader\";\nimport {\n buildIndex,\n findSynsets,\n getDefinitions,\n getHypernyms,\n getHyponyms,\n getSynonyms,\n getSynsetWords,\n} from \"./query\";\n\n/** Decode XML entities, return empty string if undefined */\nconst decode = (s: string | undefined): string => decodeXmlEntities(s) ?? \"\";\n\nconst HELP = `\nsynset - WordNet dictionary explorer\n\nUsage:\n synset <command> [options]\n\nCommands:\n define <word> Show definitions for a word\n synonyms <word> List synonyms for a word\n hypernyms <word> Show hypernyms (more general terms)\n hyponyms <word> Show hyponyms (more specific terms)\n related <word> Show all relations for a word\n info <synset-id> Show details for a synset ID\n fetch Download WordNet data to cache\n export-sqlite <out> Export dictionary to SQLite database\n\nOptions:\n --file <path> Use a local WordNet XML file instead of cache\n --help, -h Show this help message\n\nExamples:\n synset define dog\n synset synonyms happy\n synset related computer --file ./wordnet.xml\n synset fetch\n synset export-sqlite dictionary.db\n`;\n\nasync function main() {\n const args = process.argv.slice(2);\n\n if (args.length === 0 || args.includes(\"--help\") || args.includes(\"-h\")) {\n console.log(HELP);\n process.exit(0);\n }\n\n const command = args[0];\n const fileIndex = args.indexOf(\"--file\");\n const filePath = fileIndex !== -1 ? args[fileIndex + 1] : undefined;\n\n // Remove --file and its argument from args for word extraction\n const cleanArgs =\n fileIndex === -1\n ? args\n : args.filter((_, i) => i !== fileIndex && i !== fileIndex + 1);\n const word = cleanArgs[1];\n\n if (command === \"fetch\") {\n console.log(\"Downloading WordNet data...\");\n const { filePath: cachedPath, version } = await ensureWordNetCached({\n forceDownload: args.includes(\"--force\"),\n onProgress: console.log,\n });\n console.log(`WordNet ${version} cached at: ${cachedPath}`);\n return;\n }\n\n if (command === \"export-sqlite\") {\n const outputPath = cleanArgs[1];\n if (!outputPath) {\n console.error(\"Error: Missing output path for export-sqlite\");\n process.exit(1);\n }\n console.log(\"Loading WordNet data...\");\n const lexicon = filePath\n ? await loadWordNet(filePath)\n : (await fetchWordNet({ onProgress: console.log })).lexicon;\n console.log(`Exporting to ${outputPath}...`);\n exportToSQLite(lexicon, outputPath, {\n onProgress: ({ phase, current, total }) => {\n process.stdout.write(`\\r${phase}: ${current}/${total}`);\n },\n });\n console.log(`\\nExported to ${outputPath}`);\n return;\n }\n\n if (!word && command !== \"fetch\") {\n console.error(`Error: Missing word argument for command '${command}'`);\n process.exit(1);\n }\n\n const lexicon = filePath\n ? await loadWordNet(filePath)\n : (await fetchWordNet({ onProgress: console.log })).lexicon;\n\n const index = buildIndex(lexicon);\n\n switch (command) {\n case \"define\": {\n const definitions = getDefinitions(index, word);\n if (definitions.length === 0) {\n console.log(`No definitions found for \"${word}\"`);\n } else {\n console.log(`Definitions for \"${word}\":`);\n definitions.forEach((def, i) => {\n const pos = PartsOfSpeech[def.partOfSpeech] || def.partOfSpeech;\n console.log(` ${i + 1}. [${pos}] ${decode(def.text)}`);\n });\n }\n break;\n }\n\n case \"synonyms\": {\n const synonyms = getSynonyms(index, word);\n if (synonyms.length === 0) {\n console.log(`No synonyms found for \"${word}\"`);\n } else {\n console.log(`Synonyms for \"${word}\":`);\n console.log(` ${synonyms.map((s) => s.word).join(\", \")}`);\n }\n break;\n }\n\n case \"hypernyms\": {\n const hypernyms = getHypernyms(index, word);\n if (hypernyms.length === 0) {\n console.log(`No hypernyms found for \"${word}\"`);\n } else {\n console.log(`Hypernyms for \"${word}\" (more general):`);\n hypernyms.forEach((s) => {\n const words = getSynsetWords(index, s);\n const def = decode(s.definitions[0]?.inner);\n console.log(` - ${words.join(\", \")}: ${def}`);\n });\n }\n break;\n }\n\n case \"hyponyms\": {\n const hyponyms = getHyponyms(index, word);\n if (hyponyms.length === 0) {\n console.log(`No hyponyms found for \"${word}\"`);\n } else {\n console.log(`Hyponyms for \"${word}\" (more specific):`);\n hyponyms.forEach((s) => {\n const words = getSynsetWords(index, s);\n const def = decode(s.definitions[0]?.inner);\n console.log(` - ${words.join(\", \")}: ${def}`);\n });\n }\n break;\n }\n\n case \"related\": {\n const synsets = findSynsets(index, word);\n if (synsets.length === 0) {\n console.log(`No synsets found for \"${word}\"`);\n break;\n }\n\n console.log(`Relations for \"${word}\":`);\n for (const synset of synsets) {\n const pos = PartsOfSpeech[synset.partOfSpeech] || synset.partOfSpeech;\n const def = decode(synset.definitions[0]?.inner);\n console.log(`\\n[${pos}] ${def}`);\n\n // Group relations by type\n const relsByType = new Map<string, string[]>();\n for (const rel of synset.synsetRelations) {\n const relatedSynset = index.synsets.get(rel.target);\n if (relatedSynset) {\n const words = getSynsetWords(index, relatedSynset);\n const existing = relsByType.get(rel.relType) || [];\n existing.push(words.join(\", \"));\n relsByType.set(rel.relType, existing);\n }\n }\n\n for (const [relType, words] of relsByType) {\n const label = SynsetRelationRelType[relType] || relType;\n console.log(` ${label}:`);\n for (const w of words) console.log(` - ${w}`);\n }\n }\n break;\n }\n\n case \"info\": {\n // word here is actually the synset ID\n const synset = index.synsets.get(word);\n if (!synset) {\n console.log(`Synset not found: ${word}`);\n break;\n }\n\n const pos = PartsOfSpeech[synset.partOfSpeech] || synset.partOfSpeech;\n const words = getSynsetWords(index, synset);\n\n console.log(`Synset: ${synset.id}`);\n console.log(`Words: ${words.join(\", \")}`);\n console.log(`Part of Speech: ${pos}`);\n console.log(`ILI: ${synset.ili}`);\n console.log(`\\nDefinitions:`);\n for (const d of synset.definitions) console.log(` - ${decode(d.inner)}`);\n\n if (synset.examples.length > 0) {\n console.log(`\\nExamples:`);\n for (const e of synset.examples)\n console.log(` - \"${decode(e.inner)}\"`);\n }\n\n if (synset.synsetRelations.length > 0) {\n console.log(`\\nRelations:`);\n for (const rel of synset.synsetRelations) {\n const label = SynsetRelationRelType[rel.relType] || rel.relType;\n const relatedSynset = index.synsets.get(rel.target);\n const relatedWords = relatedSynset\n ? getSynsetWords(index, relatedSynset).join(\", \")\n : rel.target;\n console.log(` ${label}: ${relatedWords}`);\n }\n }\n break;\n }\n\n default:\n console.error(`Unknown command: ${command}`);\n console.log(HELP);\n process.exit(1);\n }\n}\n\nmain().catch((err) => {\n console.error(\"Error:\", err.message);\n process.exit(1);\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oBAAqB;;;ACArB,iBAAkB;AAEX,IAAM,YAAY,aAAE,OAAO;AAC3B,IAAM,iBAAiB,aAAE,OAAO;AAChC,IAAM,WAAW,aAAE,OAAO;AAC1B,IAAM,UAAU,aAAE,OAAO;AACzB,IAAM,sBAAsB,aAAE,OAAO;AAGrC,IAAM,gBAAgB,aAAE,MAAM;AAAA,EACnC,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,GAAG;AACf,CAAC;AAGM,IAAM,uBAAuB,aAAE,MAAM;AAAA,EAC1C,aAAE,QAAQ,MAAM;AAAA,EAChB,aAAE,QAAQ,SAAS;AAAA,EACnB,aAAE,QAAQ,YAAY;AAAA,EACtB,aAAE,QAAQ,sBAAsB;AAAA,EAChC,aAAE,QAAQ,qBAAqB;AAAA,EAC/B,aAAE,QAAQ,eAAe;AAAA,EACzB,aAAE,QAAQ,cAAc;AAAA,EACxB,aAAE,QAAQ,aAAa;AAAA,EACvB,aAAE,QAAQ,mBAAmB;AAAA,EAC7B,aAAE,QAAQ,OAAO;AAAA;AAAA,EACjB,aAAE,QAAQ,YAAY;AAAA,EACtB,aAAE,QAAQ,WAAW;AAAA,EACrB,aAAE,QAAQ,SAAS;AACrB,CAAC;AAGM,IAAM,wBAAwB,aAAE,MAAM;AAAA,EAC3C,aAAE,QAAQ,MAAM;AAAA,EAChB,aAAE,QAAQ,WAAW;AAAA,EACrB,aAAE,QAAQ,OAAO;AAAA,EACjB,aAAE,QAAQ,QAAQ;AAAA,EAClB,aAAE,QAAQ,sBAAsB;AAAA,EAChC,aAAE,QAAQ,qBAAqB;AAAA,EAC/B,aAAE,QAAQ,eAAe;AAAA,EACzB,aAAE,QAAQ,cAAc;AAAA,EACxB,aAAE,QAAQ,QAAQ;AAAA,EAClB,aAAE,QAAQ,SAAS;AAAA,EACnB,aAAE,QAAQ,aAAa;AAAA,EACvB,aAAE,QAAQ,mBAAmB;AAAA,EAC7B,aAAE,QAAQ,kBAAkB;AAAA,EAC5B,aAAE,QAAQ,aAAa;AAAA,EACvB,aAAE,QAAQ,WAAW;AAAA,EACrB,aAAE,QAAQ,gBAAgB;AAAA,EAC1B,aAAE,QAAQ,UAAU;AAAA,EACpB,aAAE,QAAQ,SAAS;AAAA,EACnB,aAAE,QAAQ,mBAAmB;AAAA,EAC7B,aAAE,QAAQ,kBAAkB;AAAA,EAC5B,aAAE,QAAQ,cAAc;AAAA,EACxB,aAAE,QAAQ,gBAAgB;AAAA,EAC1B,aAAE,QAAQ,mBAAmB;AAAA,EAC7B,aAAE,QAAQ,gBAAgB;AAAA,EAC1B,aAAE,QAAQ,gBAAgB;AAAA,EAC1B,aAAE,QAAQ,aAAa;AAAA,EACvB,aAAE,QAAQ,WAAW;AAAA,EACrB,aAAE,QAAQ,gBAAgB;AAAA,EAC1B,aAAE,QAAQ,cAAc;AAAA,EACxB,aAAE,QAAQ,cAAc;AAAA,EACxB,aAAE,QAAQ,SAAS;AAAA,EACnB,aAAE,QAAQ,mBAAmB;AAAA,EAC7B,aAAE,QAAQ,mBAAmB;AAC/B,CAAC;AAEM,IAAM,cAAc,aAAE,MAAM;AAAA,EACjC,aAAE,QAAQ,GAAG;AAAA,EACb,aAAE,QAAQ,IAAI;AAAA,EACd,aAAE,QAAQ,GAAG;AACf,CAAC;AAEM,IAAM,gBAAgB,aAAE,OAAO;AAAA,EACpC,SAAS,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAC7B,OAAO,aAAE,OAAO;AAAA;AAClB,CAAC;AAEM,IAAM,QAAQ,aAAE,OAAO;AAAA,EAC5B,aAAa,aAAE,OAAO;AAAA;AAAA,EACtB,cAAc;AAAA,EACd,gBAAgB,aAAE,MAAM,aAAa,EAAE,IAAI,CAAC;AAC9C,CAAC;AAEM,IAAM,gBAAgB,aAAE,OAAO;AAAA,EACpC,SAAS;AAAA,EACT,QAAQ,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAC5B,QAAQ;AACV,CAAC;AAEM,IAAM,QAAQ,aAAE,OAAO;AAAA,EAC5B,IAAI;AAAA,EACJ,QAAQ;AAAA,EACR,QAAQ,oBAAoB,SAAS;AAAA,EACrC,aAAa,YAAY,SAAS;AAAA,EAClC,gBAAgB,aAAE,MAAM,aAAa,EAAE,IAAI,CAAC;AAC9C,CAAC;AAEM,IAAM,OAAO,aAAE,OAAO;AAAA,EAC3B,aAAa,aAAE,OAAO;AAAA;AACxB,CAAC;AAEM,IAAM,eAAe,aAAE,OAAO;AAAA,EACnC,IAAI;AAAA,EACJ,QAAQ,aAAE,MAAM,KAAK,EAAE,OAAO,CAAC;AAAA,EAC/B,QAAQ,aAAE,MAAM,KAAK,EAAE,IAAI,CAAC;AAAA,EAC5B,OAAO,aAAE,MAAM,IAAI,EAAE,IAAI,CAAC;AAC5B,CAAC;AAEM,IAAM,aAAa,aAAE,OAAO;AAAA,EACjC,OAAO,aAAE,OAAO;AAAA;AAClB,CAAC;AAEM,IAAM,UAAU,aAAE,OAAO;AAAA,EAC9B,OAAO,aAAE,OAAO;AAAA;AAAA,EAChB,UAAU,aAAE,OAAO,EAAE,SAAS;AAChC,CAAC;AAEM,IAAM,gBAAgB,aAAE,OAAO;AAAA,EACpC,OAAO,aAAE,OAAO;AAAA;AAClB,CAAC;AAEM,IAAM,iBAAiB,aAAE,OAAO;AAAA,EACrC,SAAS;AAAA,EACT,QAAQ;AACV,CAAC;AAEM,IAAM,SAAS,aAAE,OAAO;AAAA,EAC7B,IAAI;AAAA,EACJ,KAAK,aAAE,OAAO;AAAA,EACd,SAAS,aAAE,MAAM,cAAc,EAAE,IAAI,CAAC;AAAA;AAAA,EACtC,cAAc;AAAA,EACd,SAAS,aAAE,OAAO;AAAA,EAClB,UAAU,aAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,aAAa,aAAE,MAAM,UAAU,EAAE,IAAI,CAAC;AAAA,EACtC,UAAU,aAAE,MAAM,OAAO,EAAE,IAAI,CAAC;AAAA,EAChC,gBAAgB,aAAE,MAAM,aAAa,EAAE,IAAI,CAAC;AAAA,EAC5C,iBAAiB,aAAE,MAAM,cAAc,EAAE,IAAI,CAAC;AAChD,CAAC;AAEM,IAAM,oBAAoB,aAAE,OAAO;AAAA,EACxC,IAAI;AAAA,EACJ,wBAAwB,aAAE,OAAO;AAAA;AACnC,CAAC;AAEM,IAAM,UAAU,aAAE,OAAO;AAAA,EAC9B,IAAI;AAAA;AAAA,EACJ,OAAO,aAAE,OAAO;AAAA;AAAA,EAChB,UAAU,aAAE,OAAO;AAAA;AAAA,EACnB,OAAO,aAAE,OAAO;AAAA;AAAA,EAChB,SAAS,aAAE,OAAO;AAAA;AAAA,EAClB,SAAS,aAAE,OAAO;AAAA;AAAA,EAClB,KAAK,aAAE,OAAO;AAAA;AAAA,EACd,UAAU,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAC9B,gBAAgB,aAAE,MAAM,YAAY,EAAE,IAAI,CAAC;AAAA,EAC3C,SAAS,aAAE,MAAM,MAAM,EAAE,IAAI,CAAC;AAAA,EAC9B,oBAAoB,aAAE,MAAM,iBAAiB,EAAE,IAAI,CAAC;AACtD,CAAC;AAgBM,IAAM,oBAA8B,cAAc,QAAQ;AAAA,EAC/D,CAAC,MAAM,EAAE;AACX;;;ACjKO,SAAS,kBAAkB,MAA2B;AAC3D,QAAM,MAAqB;AAAA,IACzB,SAAS,QAAQ,MAAM,SAAS;AAAA,IAChC,OAAO,KAAK;AAAA,EACd;AACA,SAAO,cAAc,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACpE;AAEO,SAAS,UAAU,MAAmB;AAC3C,QAAM,MAAa;AAAA,IACjB,aAAa,KAAK,MAAM,aAAa;AAAA,IACrC,cAAc,cAAc,MAAM,KAAK,MAAM,cAAc,CAAC;AAAA,IAC5D;AAAA;AAAA,MACE,SAAS,MAAM,iBAAiB,CAAC,MAAM,kBAAkB,CAAC,CAAC;AAAA;AAAA,EAC/D;AACA,SAAO,MAAM,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AAC5D;AAEO,SAAS,kBAAkB,MAA2B;AAC3D,QAAM,MAAqB;AAAA,IACzB,SAAS,qBAAqB,MAAM,KAAK,MAAM,SAAS,CAAC;AAAA,IACzD,QAAQ,KAAK,MAAM,QAAQ;AAAA,IAC3B,QAAQ,QAAQ,MAAM,SAAS;AAAA,EACjC;AACA,SAAO,cAAc;AAAA,IACnB,mBAAmB,MAAM,KAAK,CAAC,MAAO,MAAM,YAAY,WAAW,CAAE;AAAA,EACvE;AACF;AAEO,SAAS,UAAU,MAAmB;AAC3C,QAAM,SAAS,QAAQ,MAAM,aAAa;AAC1C,QAAM,MAAa;AAAA,IACjB,IAAI,KAAK,MAAM,IAAI;AAAA,IACnB,QAAQ,SAAS,MAAM,KAAK,MAAM,QAAQ,CAAC;AAAA,IAC3C,gBAAgB,SAAS,MAAM,iBAAiB,iBAAiB;AAAA,IACjE,QAAQ,QAAQ,MAAM,QAAQ;AAAA,IAC9B,aAAa,SAAS,YAAY,MAAM,MAAM,IAAI;AAAA,EACpD;AACA,SAAO,MAAM;AAAA,IACX;AAAA,MAAmB;AAAA,MAAM;AAAA,MAAK,CAAC,MAC7B,MAAM,WAAW,WAAW,MAAM,gBAAgB,gBAAgB;AAAA,IACpE;AAAA,EACF;AACF;AAEO,SAAS,SAAS,MAAkB;AACzC,QAAM,MAAY;AAAA,IAChB,aAAa,KAAK,MAAM,aAAa;AAAA,EACvC;AACA,SAAO,KAAK,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AAC3D;AAEO,SAAS,iBAAiB,MAA0B;AACzD,QAAM,MAAoB;AAAA,IACxB,IAAI,KAAK,MAAM,IAAI;AAAA,IACnB,QAAQ,SAAS,MAAM,SAAS,SAAS;AAAA,IACzC,QAAQ,SAAS,MAAM,SAAS,SAAS;AAAA,IACzC,OAAO,SAAS,MAAM,QAAQ,QAAQ;AAAA,EACxC;AACA,SAAO,aAAa,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACnE;AAEO,SAAS,eAAe,MAAwB;AACrD,QAAM,MAAkB;AAAA,IACtB,OAAO,KAAK;AAAA,EACd;AACA,SAAO,WAAW,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACjE;AAEO,SAAS,YAAY,MAAqB;AAC/C,QAAM,MAAe;AAAA,IACnB,OAAO,KAAK;AAAA,IACZ,UAAU,QAAQ,MAAM,WAAW;AAAA,EACrC;AACA,SAAO,QAAQ;AAAA,IACb,mBAAmB,MAAM,KAAK,CAAC,MAAO,MAAM,cAAc,aAAa,CAAE;AAAA,EAC3E;AACF;AAEO,SAAS,kBAAkB,MAA2B;AAC3D,QAAM,MAAqB;AAAA,IACzB,OAAO,KAAK;AAAA,EACd;AACA,SAAO,cAAc,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACpE;AAEO,SAAS,mBAAmB,MAA4B;AAC7D,QAAM,MAAsB;AAAA,IAC1B,SAAS,sBAAsB,MAAM,KAAK,MAAM,SAAS,CAAC;AAAA,IAC1D,QAAQ,KAAK,MAAM,QAAQ;AAAA,EAC7B;AACA,SAAO,eAAe,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACrE;AAEO,SAAS,sBAAsB,MAA+B;AACnE,QAAM,MAAyB;AAAA,IAC7B,IAAI,KAAK,MAAM,IAAI;AAAA,IACnB,wBAAwB,KAAK,MAAM,wBAAwB;AAAA,EAC7D;AACA,SAAO,kBAAkB,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AACxE;AAEO,SAAS,WAAW,MAAoB;AAC7C,QAAM,MAAc;AAAA,IAClB,IAAI,KAAK,MAAM,IAAI;AAAA,IACnB,KAAK,KAAK,MAAM,KAAK;AAAA,IACrB,SAAS,KAAK,MAAM,SAAS;AAAA,IAC7B,SAAS,KAAK,MAAM,SAAS,EAAE,MAAM,GAAG;AAAA,IACxC,UAAU,QAAQ,MAAM,WAAW;AAAA,IACnC,cAAc,cAAc,MAAM,KAAK,MAAM,cAAc,CAAC;AAAA,IAC5D,aAAa,SAAS,MAAM,cAAc,CAAC,MAAM,eAAe,CAAC,CAAC;AAAA,IAClE,UAAU,SAAS,MAAM,WAAW,CAAC,MAAM,YAAY,CAAC,CAAC;AAAA,IACzD,gBAAgB,SAAS,MAAM,iBAAiB,iBAAiB;AAAA,IACjE,iBAAiB,SAAS,MAAM,kBAAkB,kBAAkB;AAAA,EACtE;AACA,SAAO,OAAO;AAAA,IACZ,mBAAmB,MAAM,KAAK,CAAC,MAAO,MAAM,cAAc,aAAa,CAAE;AAAA,EAC3E;AACF;AAKO,SAAS,YAAY,MAAqB;AAC/C,QAAM,MAAe;AAAA,IACnB,IAAI,KAAK,MAAM,IAAI;AAAA,IACnB,OAAO,KAAK,MAAM,OAAO;AAAA,IACzB,UAAU,KAAK,MAAM,UAAU;AAAA,IAC/B,OAAO,KAAK,MAAM,OAAO;AAAA,IACzB,SAAS,KAAK,MAAM,SAAS;AAAA,IAC7B,SAAS,KAAK,MAAM,SAAS;AAAA,IAC7B,UAAU,QAAQ,MAAM,UAAU;AAAA,IAClC,KAAK,KAAK,MAAM,KAAK;AAAA,IACrB,gBAAgB,SAAS,MAAM,gBAAgB,gBAAgB;AAAA,IAC/D,SAAS,SAAS,MAAM,UAAU,UAAU;AAAA,IAC5C;AAAA;AAAA,MACE,SAAS,MAAM,sBAAsB,qBAAqB;AAAA;AAAA,EAC9D;AACA,SAAO,QAAQ,MAAM,mBAAmB,MAAM,KAAK,CAAC,MAAM,CAAC,CAAC;AAC9D;AAEO,IAAM,oBAAoB,CAC/B,MACuB;AACvB,MAAI,MAAM,OAAW,QAAO;AAC5B,SAAO,EACJ,QAAQ,UAAU,GAAG,EACrB,QAAQ,SAAS,GAAG,EACpB,QAAQ,SAAS,GAAG,EACpB,QAAQ,WAAW,GAAG,EACtB,QAAQ,WAAW,GAAG;AAC3B;AAEA,IAAM,OAAO,CAAC,MAAY,aAA6B;AACrD,QAAM,QAAQ,kBAAkB,KAAK,WAAW,QAAQ,CAAC;AACzD,MAAI,UAAU,QAAW;AACvB,UAAM,IAAI;AAAA,MACR,+BAA+B,QAAQ,cAAc,KAAK,IAAI;AAAA,IAChE;AAAA,EACF;AACA,SAAO;AACT;AAEA,IAAM,UAAU,CAAC,MAAY,aAAyC;AACpE,SAAO,kBAAkB,KAAK,WAAW,QAAQ,CAAC;AACpD;AAKA,IAAM,YAAY,CAChB,MACA,KACA,UAC2B;AAC3B,QAAM,SAAiC,CAAC;AACxC,SAAO,KAAK,KAAK,UAAU,EACxB,OAAO,CAAC,MAAM,EAAE,MAAM,CAAC,KAAK,IAAI,EAChC,QAAQ,CAAC,MAAM;AACd,WAAO,CAAC,IAAI,kBAAkB,KAAK,WAAW,CAAC,CAAC,KAAK,KAAK,WAAW,CAAC;AAAA,EACxE,CAAC;AACH,SAAO;AACT;AAEA,IAAM,qBAAqB,CACzB,MACA,KACA,UACG;AACH,SAAO,OAAO,OAAO,KAAK,UAAU,MAAM,KAAK,KAAK,CAAC;AACvD;AAEA,IAAM,WAAW,CACf,MACA,MACA,OACG;AACH,SAAO,KAAK,SACT,OAAO,CAAC,MAAY,EAAE,SAAS,IAAI,EACnC,IAAI,CAAC,MAAY,GAAG,CAAC,CAAC;AAC3B;;;AFtNO,IAAM,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoCf,SAAS,eACd,SACA,YACA,UAAyB,CAAC,GACpB;AACN,QAAM,EAAE,WAAW,IAAI;AAGvB,QAAM,KAAK,IAAI,cAAAA,QAAS,UAAU;AAClC,KAAG,KAAK,2BAA2B;AACnC,KAAG,KAAK,0BAA0B;AAClC,KAAG,KAAK,MAAM;AAGd,QAAM,gBAAgB,oBAAI,IAA4B;AACtD,aAAW,SAAS,QAAQ,gBAAgB;AAC1C,UAAM,OAAO,MAAM,OAAO,CAAC,GAAG;AAC9B,QAAI,MAAM;AACR,YAAM,QAAQ,KAAK,YAAY;AAC/B,YAAM,WAAW,cAAc,IAAI,KAAK,KAAK,CAAC;AAC9C,eAAS,KAAK,KAAK;AACnB,oBAAc,IAAI,OAAO,QAAQ;AAAA,IACnC;AAAA,EACF;AAGA,QAAM,YAAY,oBAAI,IAAoB;AAC1C,aAAW,UAAU,QAAQ,SAAS;AACpC,cAAU,IAAI,OAAO,IAAI,MAAM;AAAA,EACjC;AAGA,QAAM,aAAa,GAAG;AAAA,IACpB;AAAA,EACF;AACA,QAAM,UAAU,oBAAI,IAAoB;AACxC,QAAM,QAAQ,MAAM,KAAK,cAAc,KAAK,CAAC,EAAE,KAAK;AACpD,QAAM,aAAa,MAAM;AAEzB,KAAG,KAAK,mBAAmB;AAC3B,MAAI,SAAS;AACb,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,OAAO,MAAM,CAAC;AACpB,UAAM,UAAU,cAAc,IAAI,IAAI;AACtC,QAAI,CAAC,QAAS;AAEd,UAAM,UAAU,QAAQ,CAAC,EAAE,OAAO,CAAC,GAAG,eAAe;AACrD,eAAW,IAAI,MAAM,OAAO;AAC5B;AACA,YAAQ,IAAI,MAAM,MAAM;AACxB,QAAI,cAAc,IAAI,QAAU,GAAG;AACjC,iBAAW,EAAE,OAAO,SAAS,SAAS,GAAG,OAAO,WAAW,CAAC;AAAA,IAC9D;AAAA,EACF;AACA,KAAG,KAAK,QAAQ;AAGhB,QAAM,gBAAgB,oBAAI,IAAY;AACtC,aAAW,WAAW,cAAc,OAAO,GAAG;AAC5C,eAAW,SAAS,SAAS;AAC3B,iBAAW,SAAS,MAAM,QAAQ;AAChC,sBAAc,IAAI,MAAM,MAAM;AAAA,MAChC;AAAA,IACF;AAAA,EACF;AAEA,QAAM,eAAe,GAAG;AAAA,IACtB;AAAA,EACF;AACA,QAAM,aAAa,MAAM,KAAK,aAAa;AAC3C,QAAM,eAAe,WAAW;AAEhC,KAAG,KAAK,mBAAmB;AAC3B,WAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,UAAM,WAAW,WAAW,CAAC;AAC7B,UAAM,SAAS,UAAU,IAAI,QAAQ;AACrC,QAAI,QAAQ;AACV,YAAM,MAAM,kBAAkB,OAAO,YAAY,CAAC,GAAG,KAAK,KAAK;AAC/D,mBAAa,IAAI,UAAU,OAAO,cAAc,GAAG;AAAA,IACrD;AACA,QAAI,cAAc,IAAI,QAAU,GAAG;AACjC,iBAAW,EAAE,OAAO,WAAW,SAAS,GAAG,OAAO,aAAa,CAAC;AAAA,IAClE;AAAA,EACF;AACA,KAAG,KAAK,QAAQ;AAGhB,QAAM,iBAAiB,GAAG;AAAA,IACxB;AAAA,EACF;AACA,MAAI,gBAAgB;AACpB,QAAM,iBAAiB,MAAM,KAAK,cAAc,OAAO,CAAC,EAAE;AAAA,IACxD,CAAC,KAAK,YAAY,MAAM,QAAQ,OAAO,CAAC,GAAG,MAAM,IAAI,EAAE,OAAO,QAAQ,CAAC;AAAA,IACvE;AAAA,EACF;AAEA,KAAG,KAAK,mBAAmB;AAC3B,aAAW,CAAC,MAAM,OAAO,KAAK,eAAe;AAC3C,UAAMC,UAAS,QAAQ,IAAI,IAAI;AAC/B,QAAI,CAACA,QAAQ;AAEb,eAAW,SAAS,SAAS;AAC3B,iBAAW,SAAS,MAAM,QAAQ;AAChC,uBAAe,IAAIA,SAAQ,MAAM,MAAM;AACvC;AACA,YAAI,cAAc,gBAAgB,QAAU,GAAG;AAC7C,qBAAW;AAAA,YACT,OAAO;AAAA,YACP,SAAS;AAAA,YACT,OAAO;AAAA,UACT,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,KAAG,KAAK,QAAQ;AAEhB,KAAG,MAAM;AACX;;;AG9JO,IAAMC,iBAAwC;AAAA,EACnD,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AAAA,EACH,GAAG;AACL;AAqCO,IAAMC,yBAAgD;AAAA,EAC3D,OAAO;AAAA,EACP,MAAM;AAAA,EACN,eAAe;AAAA,EACf,eAAe;AAAA,EACf,aAAa;AAAA,EACb,SAAS;AAAA,EACT,WAAW;AAAA,EACX,cAAc;AAAA,EACd,aAAa;AAAA,EACb,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,eAAe;AAAA,EACf,YAAY;AAAA,EACZ,qBAAqB;AAAA,EACrB,kBAAkB;AAAA,EAClB,iBAAiB;AAAA,EACjB,qBAAqB;AAAA,EACrB,uBAAuB;AAAA,EACvB,sBAAsB;AAAA,EACtB,kBAAkB;AAAA,EAClB,uBAAuB;AAAA,EACvB,iBAAiB;AAAA,EACjB,sBAAsB;AAAA,EACtB,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,WAAW;AAAA,EACX,sBAAsB;AAAA,EACtB,qBAAqB;AAAA,EACrB,eAAe;AAAA,EACf,cAAc;AAAA,EACd,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,YAAY;AAAA,EACZ,aAAa;AAAA,EACb,UAAU;AAAA,EACV,kBAAkB;AAAA,EAClB,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAClB,cAAc;AAAA,EACd,eAAe;AAAA,EACf,WAAW;AAAA,EACX,eAAe;AAAA,EACf,aAAa;AAAA,EACb,WAAW;AAAA,EACX,cAAc;AAAA,EACd,gBAAgB;AAAA,EAChB,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,WAAW;AAAA,EACX,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAClB,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,gBAAgB;AAAA,EAChB,oBAAoB;AAAA,EACpB,qBAAqB;AAAA,EACrB,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAClB,iBAAiB;AAAA,EACjB,2BAA2B;AAAA,EAC3B,2BAA2B;AAAA,EAC3B,YAAY;AAAA,EACZ,cAAc;AAAA,EACd,gBAAgB;AAAA,EAChB,mBAAmB;AAAA,EACnB,gBAAgB;AAAA,EAChB,UAAU;AAAA,EACV,WAAW;AAAA,EACX,WAAW;AAAA,EACX,gBAAgB;AAAA,EAChB,gBAAgB;AAAA,EAChB,eAAe;AAAA,EACf,aAAa;AAAA,EACb,WAAW;AAAA,EACX,cAAc;AAAA,EACd,gBAAgB;AAAA,EAChB,SAAS;AAAA,EACT,OAAO;AAAA,EACP,cAAc;AAAA,EACd,cAAc;AAAA,EACd,SAAS;AAAA,EACT,eAAe;AAAA,EACf,WAAW;AAAA,EACX,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,SAAS;AAAA,EACT,kBAAkB;AAAA,EAClB,UAAU;AAAA,EACV,UAAU;AAAA,EACV,mBAAmB;AAAA,EACnB,mBAAmB;AAAA,EACnB,kBAAkB;AAAA,EAClB,OAAO;AACT;;;ACpJA,qBAOO;AACP,uBAAiB;AACjB,yBAAyB;;;ACHlB,IAAM,OAAN,MAAW;AAAA,EAChB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEA,YAAY,MAAc,QAAe,KAAc;AACrD,SAAK,QAAQ;AACb,SAAK,UAAU;AACf,SAAK,OAAO;AACZ,SAAK,YAAY,CAAC;AAAA,EACpB;AAAA,EAEA,IAAI,OAAe;AACjB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,MAAc;AAChB,WAAO,KAAK,QAAQ;AAAA,EACtB;AAAA,EAEA,IAAI,SAA2B;AAC7B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,WAAwB;AAC1B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,aAAqC;AACvC,QAAI,KAAK,OAAO;AACd,aAAO,KAAK;AAAA,IACd;AAEA,SAAK,QAAQ,CAAC;AACd,QAAI,KAAK,KAAK;AACZ,YAAM,QAAQ;AACd,UAAI;AACJ,cAAQ,QAAQ,MAAM,KAAK,KAAK,GAAG,OAAO,MAAM;AAC9C,aAAK,MAAM,MAAM,CAAC,CAAC,IAAI,MAAM,CAAC;AAAA,MAChC;AAAA,IACF;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,IAAI,YAAoB;AACtB,QAAI,KAAK,SAAS,QAAQ;AACxB,UAAI,OAAO;AACX,iBAAW,SAAS,KAAK,UAAU;AACjC,gBAAQ,MAAM;AAAA,MAChB;AACA,aAAO;AAAA,IACT;AACA,YAAQ,KAAK,IAAI,MAAM,sBAAsB,KAAK,CAAC,EAAE,KAAK,GAAG,GAAG,CAAC;AAAA,EACnE;AAAA,EAEA,SAAS,OAAmB;AAC1B,SAAK,UAAU,KAAK,KAAK;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,MAA8B;AAClC,QAAI,CAAC,KAAK,OAAQ,QAAO;AACzB,QAAI;AACJ,eAAW,OAAO,KAAK,WAAW,GAAG;AACnC,eAAS,SAAS,OAAO,SAAS;AAClC,UAAI,QAAQ,SAAS,KAAK;AACxB,eAAO;AAAA,MACT;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,KAA+B;AACnC,WAAO,KAAK,SAAS,KAAK,CAAC,MAAM,EAAE,SAAS,GAAG;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,MAAkC;AACvC,QAAI,QAAiC,KAAK;AAC1C,QAAI,QAAqB,CAAC;AAC1B,eAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,IAAI,GAAG;AACzC,UAAI,OAAO,SAAS,CAAC,MAAM,KAAK,SAAS,GAAG;AAC1C,gBAAQ,MAAM,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AACxC;AAAA,MACF;AACA,cAAQ,OAAO,KAAK,CAAC,MAAM,EAAE,SAAS,CAAC,GAAG;AAC1C,UAAI,CAAC,MAAO,QAAO,CAAC;AAAA,IACtB;AACA,WAAO;AAAA,EACT;AACF;;;ACnGA,IAAM,WAAW;AAAA,EACf,OAAO;AAAA,IACL,KAAK;AAAA,IACL,OAAO;AAAA,EACT;AAAA,EACA,SAAS;AAAA,IACP,KAAK;AAAA,IACL,OAAO;AAAA,EACT;AAAA,EACA,aAAa;AAAA,IACX,KAAK;AAAA,IACL,OAAO;AAAA,EACT;AAAA,EACA,SAAS;AAAA,IACP,KAAK;AAAA,IACL,OAAO;AAAA,EACT;AAAA,EACA,SAAS;AAAA,IACP,KAAK;AAAA,IACL,OAAO;AAAA,EACT;AACF;AAGO,IAAM,cAIT;AAAA,EACF,KAAK;AAAA,EACL;AAAA,EACA,UAAU,oBAAiB,EAAE;AAAA,EAC7B,MAAM,YAAY;AAEhB,QAAI,KAAK,IAAI,SAAS,GAAG;AACvB,iBAAW,QAAQ,oBAAgB,KAAK,GAAG,CAAC;AAAA,IAC9C;AAAA,EACF;AAAA,EACA,UAAU,OAAO,YAAY;AAC3B,SAAK,OAAO;AACZ,WAAO,KAAK,IAAI,QAAQ;AAEtB,UACE,KAAK,UAAU,KAAK,SAAS,CAAC,KAC9B,KAAK,IAAI,WAAW,KAAK,SAAS,CAAC,GACnC;AACA;AAAA,MACF;AACA,WAAK,WAAW,CAAC,KAAK,OAAO,KAAK,IAAI,MAAM;AAE5C,UAAI,KAAK,6BAA0B;AACjC,cAAM,QAAQ,KAAK,IAAI,QAAQ,GAAG;AAClC,YAAI,QAAQ,EAAG;AAEf,mBAAW,QAAQ,oBAAgB,KAAK,IAAI,UAAU,GAAG,KAAK,CAAC,CAAC;AAChE,aAAK,MAAM,KAAK,IAAI,UAAU,KAAK;AACnC,aAAK;AAAA,MACP;AAEA,UAAI,KAAK,iCAA4B;AACnC,YAAI,KAAK,IAAI,SAAS,EAAG;AACzB,mBAAW,CAAC,OAAO,MAAM,KAAK,OAAO,QAAQ,QAAQ,GAAG;AACtD,cAAI,KAAK,IAAI,MAAM,OAAO,KAAK,GAAG;AAChC,iBAAK,QAAQ;AACb;AAAA,UACF;AAAA,QACF;AACA;AAAA,MACF;AAEA,UAAI,OAAO,OAAO,UAAU,KAAK,KAAK,GAAG;AACvC,cAAM,EAAC,IAAG,IAAI,SAAS,KAAK,KAA8B;AAC1D,cAAM,QAAQ,KAAK,IAAI,QAAQ,GAAG;AAClC,YAAI,QAAQ,EAAG;AACf,mBAAW,QAAQ;AAAA,UACjB,KAAK;AAAA,UACL,KAAK,IAAI,UAAU,GAAG,QAAQ,IAAI,MAAM;AAAA,QAC1C,CAAC;AACD,aAAK,MAAM,KAAK,IAAI,UAAU,QAAQ,IAAI,MAAM;AAChD,aAAK;AACL;AAAA,MACF;AAEA,YAAM,IAAI,MAAM;AAAA,IAClB;AAAA,EACF;AACF;AAGO,IAAM,YAAN,cAAwB,gBAA4C;AAAA,EACzE,cAAc;AACZ,UAAM,EAAC,GAAG,YAAW,CAAC;AAAA,EACxB;AACF;;;AC5FA,IAAM,cAA6D;AAAA,EACjE,wBAAiB,GAAG;AAAA,EACpB,gCAAqB,GAAG;AAAA,EACxB,wBAAiB,GAAG;AACtB;AAQA,gBAAuB,MACrB,OACA,SACyC;AACzC,QAAM,WAAW,IAAI,KAAK,WAAW;AACrC,MAAI;AACF,UAAM,OAAO,EAAC,GAAG,SAAS,aAAY;AACtC,QAAI,SAAS,QAAQ;AACnB,WAAK,SAAS,QAAQ;AAAA,IACxB;AAEA,QAAI;AAGJ,QAAI,OAAO,UAAU,YAAY,iBAAiB,KAAK;AACrD,cAAQ,IAAI,IAAI,KAAK;AACrB,YAAM,WAAW,MAAM,MAAM,OAAO,IAAI;AACxC,UAAI,CAAC,SAAS,MAAM,CAAC,SAAS,MAAM;AAClC,cAAM,IAAI,MAAM,cAAc;AAAA,MAChC;AACA,eAAS,SAAS;AAAA,IACpB,OAAO;AACL,eAAS;AAAA,IACX;AAEA,UAAM,SAAS,OACZ,YAAY,IAAI,kBAAkB,CAAC,EACnC,YAAY,IAAI,UAAU,GAAG;AAAA,MAC5B,QAAQ,SAAS;AAAA,IACnB,CAAC;AAGH,QAAI,OAAO;AAEX,qBAAiB,CAAC,MAAM,KAAK,KAAK,QAAQ;AACxC,UAAI,SAAS,QAAQ,SAAS;AAC5B;AAAA,MACF;AAEA,UAAI,4BAAwB;AAC1B,YAAI,SAAS,qBAAqB,SAAS,MAAM,KAAK,EAAE,WAAW,GAAG;AACpE;AAAA,QACF;AAAA,MACF;AAEA,UAAI,QAAQ,eAAe,UAAU,YAAY,IAAI,CAAE,MAAM,OAAO;AAClE,cAAM,UAAU,IAAI,KAAK,MAAM,MAAM,KAAK;AAC1C,aAAK,SAAS,OAAO;AACrB,cAAM;AACN;AAAA,MACF;AAEA,UAAI,kCAA2B;AAC7B,cAAM,OAAO,MAAM,MAAM,gBAAgB,EAAG,CAAC;AAE7C,YAAI,MAAM,SAAS,IAAI,GAAG;AACxB,gBAAMC,WAAU,IAAI,KAAK,MAAM,MAAM,KAAK;AAC1C,eAAK,SAASA,QAAO;AACrB,gBAAMA;AACN;AAAA,QACF;AAEA,YAAI,MAAM,WAAW,IAAI,GAAG;AAC1B,gBAAM;AACN,iBAAO,KAAK;AACZ;AAAA,QACF;AAEA,cAAM,UAAU,IAAI,KAAK,MAAM,MAAM,KAAK;AAC1C,aAAK,SAAS,OAAO;AACrB,eAAO;AACP;AAAA,MACF;AAEA,WAAK,SAAS,IAAI,KAAK,MAAM,MAAM,KAAK,CAAC;AAAA,IAC3C;AAAA,EACF,SAAS,KAAK;AACZ,QAAI,SAAS,WAAW,OAAO;AAC7B,YAAM;AAAA,IACR;AAAA,EACF;AACA,SAAO;AACT;;;AHzFO,IAAM,eAAe;AAGrB,SAAS,YAAY,SAAyB;AACnD,SAAO,mBAAmB,OAAO;AACnC;AAGO,SAAS,eAAe,SAAyB;AACtD,SAAO,8BAA8B,YAAY,OAAO,CAAC;AAC3D;AAGO,SAAS,qBAA6B;AAC3C,QAAM,UAAU,QAAQ,IAAI,QAAQ,QAAQ,IAAI,eAAe;AAC/D,SAAO,iBAAAC,QAAK,KAAK,SAAS,UAAU,QAAQ;AAC9C;AAGA,SAAS,WAAW,UAA2B;AAC7C,UAAI,2BAAW,QAAQ,GAAG;AACxB,UAAM,WAAO,yBAAS,QAAQ;AAC9B,WAAO,KAAK,OAAO;AAAA,EACrB;AACA,SAAO;AACT;AAGA,eAAe,UAAU,KAA+B;AACtD,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,KAAK,EAAE,QAAQ,OAAO,CAAC;AACpD,WAAO,SAAS;AAAA,EAClB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAGA,SAAS,2BAA2B,UAAiC;AACnE,QAAM,QAAQ,SAAS,MAAM,8BAA8B;AAC3D,SAAO,QAAQ,SAAS,MAAM,CAAC,GAAG,EAAE,IAAI;AAC1C;AAGA,SAAS,kBAAkB,UAAiC;AAC1D,MAAI,KAAC,2BAAW,QAAQ,EAAG,QAAO;AAElC,QAAM,YAAQ,4BAAY,QAAQ;AAClC,QAAM,eAAe,MAClB,IAAI,CAAC,OAAO,EAAE,MAAM,GAAG,MAAM,2BAA2B,CAAC,EAAE,EAAE,EAC7D,OAAO,CAAC,MAA2C,EAAE,SAAS,IAAI,EAClE,KAAK,CAAC,GAAG,MAAM,EAAE,OAAO,EAAE,IAAI;AAEjC,SAAO,aAAa,SAAS,IAAI,aAAa,CAAC,EAAE,KAAK,SAAS,IAAI;AACrE;AASA,eAAsB,kBACpB,YACA,UACiB;AACjB,QAAM,MAAM,eAAe,MAAM;AAAA,EAAC;AAClC,QAAM,eAAc,oBAAI,KAAK,GAAE,YAAY;AAC3C,QAAM,qBAAqB,cAAc;AACzC,QAAM,WAAW,SAAS,cAAc,EAAE;AAC1C,QAAM,MAAM,YAAY,mBAAmB;AAG3C,QAAM,gBAAgB,kBAAkB,GAAG;AAC3C,MAAI,eAAe;AACjB,UAAM,aAAa,SAAS,eAAe,EAAE;AAG7C,QAAI,cAAc,oBAAoB;AACpC,aAAO;AAAA,IACT;AAGA,QAAI,+BAA+B;AACnC,aAAS,OAAO,aAAa,GAAG,QAAQ,oBAAoB,QAAQ;AAClE,YAAM,UAAU,KAAK,SAAS;AAC9B,UAAI,MAAM,UAAU,eAAe,OAAO,CAAC,GAAG;AAC5C,YAAI,SAAS,OAAO,EAAE;AACtB,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAGA,MAAI,gCAAgC;AACpC,MAAI,MAAM,UAAU,eAAe,YAAY,CAAC,GAAG;AAEjD,aAAS,OAAO,WAAW,GAAG,QAAQ,oBAAoB,QAAQ;AAChE,YAAM,UAAU,KAAK,SAAS;AAC9B,UAAI,MAAM,UAAU,eAAe,OAAO,CAAC,GAAG;AAAA,MAC9C,OAAO;AACL,gBAAQ,OAAO,GAAG,SAAS;AAAA,MAC7B;AAAA,IACF;AAEA,WAAO,mBAAmB,SAAS;AAAA,EACrC;AAGA,WAAS,OAAO,WAAW,GAAG,QAAQ,oBAAoB,QAAQ;AAChE,UAAM,UAAU,KAAK,SAAS;AAC9B,QAAI,MAAM,UAAU,eAAe,OAAO,CAAC,GAAG;AAC5C,aAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,IAAI;AAAA,IACR,oCAAoC,YAAY,QAAQ,kBAAkB;AAAA,EAC5E;AACF;AAGA,eAAe,gBACb,SACA,UACe;AACf,QAAM,MAAM,eAAe,OAAO;AAClC,QAAM,WAAW,MAAM,MAAM,GAAG;AAChC,MAAI,CAAC,SAAS,MAAM,CAAC,SAAS,MAAM;AAClC,UAAM,IAAI;AAAA,MACR,8BAA8B,OAAO,KAAK,SAAS,UAAU;AAAA,IAC/D;AAAA,EACF;AAEA,QAAM,eAAe,SAAS,KAAK;AAAA,IACjC,IAAI,oBAAoB,MAAM;AAAA,EAChC;AACA,QAAM,cAAc,MAAM,IAAI,SAAS,YAAY,EAAE,YAAY;AAGjE,QAAM,MAAM,iBAAAA,QAAK,QAAQ,QAAQ;AACjC,MAAI,KAAC,2BAAW,GAAG,GAAG;AACpB,kCAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACpC;AAEA,oCAAc,UAAU,OAAO,KAAK,WAAW,CAAC;AAClD;AAGO,SAAS,aAAa,UAAkB;AAC7C,QAAM,eAAe,iBAAAA,QAAK,QAAQ,QAAQ;AAC1C,QAAM,iBAAa,iCAAiB,YAAY;AAChD,QAAM,YAAY,4BAAS,MAAM,UAAU;AAC3C,SAAO,MAAM,WAAW;AAAA,IACtB,mBAAmB;AAAA,IACnB,QAAQ;AAAA,EACV,CAAC;AACH;AAGA,eAAsB,aAEpB,QAC8B;AAC9B,mBAAiB,QAAQ,QAAQ;AAC/B,QAAI,KAAK,SAAS,WAAW;AAC3B,aAAO,YAAY,IAAI;AAAA,IACzB;AAAA,EACF;AACA,SAAO;AACT;AAuBA,eAAsB,YAAY,UAAoC;AACpE,MAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,UAAM,IAAI,MAAM,2BAA2B,QAAQ,EAAE;AAAA,EACvD;AAEA,QAAM,SAAS,aAAa,QAAQ;AACpC,QAAM,UAAU,MAAM,aAAa,MAAM;AAEzC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AAEA,SAAO;AACT;AAQA,eAAsB,aACpB,UAAuB,CAAC,GACH;AACrB,QAAM,WAAW,QAAQ,YAAY,mBAAmB;AACxD,QAAM,MAAM,QAAQ,eAAe,MAAM;AAAA,EAAC;AAG1C,QAAM,UAAU,QAAQ,WAAY,MAAM,kBAAkB,KAAK,QAAQ;AACzE,QAAM,WAAW,YAAY,OAAO;AACpC,QAAM,aAAa,iBAAAA,QAAK,KAAK,UAAU,QAAQ;AAE/C,MAAI,CAAC,WAAW,UAAU,KAAK,QAAQ,eAAe;AACpD,UAAM,MAAM,eAAe,OAAO;AAClC,QAAI,uBAAuB,OAAO,SAAS,GAAG,EAAE;AAChD,UAAM,gBAAgB,SAAS,UAAU;AACzC,QAAI,YAAY,UAAU,EAAE;AAAA,EAC9B,OAAO;AACL,QAAI,gBAAgB,UAAU,EAAE;AAAA,EAClC;AAEA,QAAM,UAAU,MAAM,YAAY,UAAU;AAC5C,SAAO,EAAE,SAAS,SAAS,UAAU,WAAW;AAClD;AAOA,eAAsB,oBACpB,UAAuB,CAAC,GACwB;AAChD,QAAM,WAAW,QAAQ,YAAY,mBAAmB;AACxD,QAAM,MAAM,QAAQ,eAAe,MAAM;AAAA,EAAC;AAG1C,QAAM,UAAU,QAAQ,WAAY,MAAM,kBAAkB,KAAK,QAAQ;AACzE,QAAM,WAAW,YAAY,OAAO;AACpC,QAAM,aAAa,iBAAAA,QAAK,KAAK,UAAU,QAAQ;AAE/C,MAAI,CAAC,WAAW,UAAU,KAAK,QAAQ,eAAe;AACpD,UAAM,MAAM,eAAe,OAAO;AAClC,QAAI,uBAAuB,OAAO,SAAS,GAAG,EAAE;AAChD,UAAM,gBAAgB,SAAS,UAAU;AACzC,QAAI,YAAY,UAAU,EAAE;AAAA,EAC9B,OAAO;AACL,QAAI,gBAAgB,UAAU,EAAE;AAAA,EAClC;AAEA,SAAO,EAAE,UAAU,YAAY,QAAQ;AACzC;AAIO,IAAM,mBAAmB,YAAY,YAAY;AACjD,IAAM,cAAc,eAAe,YAAY;;;AI/O/C,SAAS,WAAW,SAAgC;AACzD,QAAM,UAAU,oBAAI,IAAoB;AACxC,QAAM,SAAS,oBAAI,IAAmB;AACtC,QAAM,UAAU,oBAAI,IAA0B;AAC9C,QAAM,SAAS,oBAAI,IAA4B;AAC/C,QAAM,eAAe,oBAAI,IAAqB;AAC9C,QAAM,gBAAgB,oBAAI,IAAsB;AAGhD,aAAW,UAAU,QAAQ,SAAS;AACpC,YAAQ,IAAI,OAAO,IAAI,MAAM;AAAA,EAC/B;AAGA,aAAW,SAAS,QAAQ,gBAAgB;AAC1C,YAAQ,IAAI,MAAM,IAAI,KAAK;AAE3B,UAAM,OAAO,MAAM,OAAO,CAAC,GAAG,YAAY,YAAY;AACtD,QAAI,MAAM;AAER,YAAM,WAAW,OAAO,IAAI,IAAI,KAAK,CAAC;AACtC,eAAS,KAAK,KAAK;AACnB,aAAO,IAAI,MAAM,QAAQ;AAGzB,iBAAW,SAAS,MAAM,QAAQ;AAChC,eAAO,IAAI,MAAM,IAAI,KAAK;AAE1B,cAAM,iBAAiB,aAAa,IAAI,IAAI,KAAK,CAAC;AAClD,uBAAe,KAAK,KAAK;AACzB,qBAAa,IAAI,MAAM,cAAc;AAGrC,cAAM,SAAS,QAAQ,IAAI,MAAM,MAAM;AACvC,YAAI,QAAQ;AACV,gBAAM,kBAAkB,cAAc,IAAI,IAAI,KAAK,CAAC;AACpD,cAAI,CAAC,gBAAgB,SAAS,MAAM,GAAG;AACrC,4BAAgB,KAAK,MAAM;AAC3B,0BAAc,IAAI,MAAM,eAAe;AAAA,UACzC;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AA+BO,SAAS,YAAY,OAAqB,MAAwB;AACvE,SAAO,MAAM,cAAc,IAAI,KAAK,YAAY,CAAC,KAAK,CAAC;AACzD;AAGO,SAAS,eACd,OACA,MACoB;AACpB,QAAM,UAAU,YAAY,OAAO,IAAI;AACvC,SAAO,QAAQ;AAAA,IAAQ,CAAC,WACtB,OAAO,YAAY,IAAI,CAAC,OAAO;AAAA,MAC7B,MAAM,EAAE;AAAA,MACR;AAAA,MACA,cAAc,OAAO;AAAA,IACvB,EAAE;AAAA,EACJ;AACF;AAGO,SAAS,WACd,OACA,QACA,SACU;AACV,SAAO,OAAO,gBACX,OAAO,CAAC,MAAsB,EAAE,YAAY,OAAO,EACnD,IAAI,CAAC,MAAsB,MAAM,QAAQ,IAAI,EAAE,MAAM,CAAC,EACtD,OAAO,CAAC,MAAmB,MAAM,MAAS;AAC/C;AAGO,SAAS,aAAa,OAAqB,MAAwB;AACxE,QAAM,UAAU,YAAY,OAAO,IAAI;AACvC,SAAO,QAAQ,QAAQ,CAAC,MAAM,WAAW,OAAO,GAAG,UAAU,CAAC;AAChE;AAGO,SAAS,YAAY,OAAqB,MAAwB;AACvE,QAAM,UAAU,YAAY,OAAO,IAAI;AACvC,SAAO,QAAQ,QAAQ,CAAC,MAAM,WAAW,OAAO,GAAG,SAAS,CAAC;AAC/D;AAGO,SAAS,YACd,OACA,MACiB;AACjB,QAAM,UAAU,YAAY,OAAO,IAAI;AACvC,QAAM,YAAY,KAAK,YAAY;AACnC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,UAA2B,CAAC;AAElC,aAAW,UAAU,SAAS;AAC5B,eAAW,YAAY,OAAO,SAAS;AACrC,YAAM,QAAQ,MAAM,QAAQ,IAAI,QAAQ;AACxC,UAAI,OAAO;AACT,cAAM,QAAQ,MAAM,OAAO,CAAC,GAAG;AAC/B,YAAI,SAAS,MAAM,YAAY,MAAM,aAAa,CAAC,KAAK,IAAI,KAAK,GAAG;AAClE,eAAK,IAAI,KAAK;AACd,kBAAQ,KAAK,EAAE,MAAM,OAAO,OAAO,OAAO,CAAC;AAAA,QAC7C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAGO,SAAS,eAAe,OAAqB,QAA0B;AAC5E,SAAO,OAAO,QACX,IAAI,CAAC,OAAO,MAAM,QAAQ,IAAI,EAAE,CAAC,EACjC,OAAO,CAAC,MAAyB,MAAM,MAAS,EAChD,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,WAAW,EACnC,OAAO,CAAC,MAAmB,MAAM,MAAS;AAC/C;;;ACjMA,IAAM,SAAS,CAAC,MAAkC,kBAAkB,CAAC,KAAK;AAE1E,IAAM,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA4Bb,eAAe,OAAO;AACpB,QAAM,OAAO,QAAQ,KAAK,MAAM,CAAC;AAEjC,MAAI,KAAK,WAAW,KAAK,KAAK,SAAS,QAAQ,KAAK,KAAK,SAAS,IAAI,GAAG;AACvE,YAAQ,IAAI,IAAI;AAChB,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,UAAU,KAAK,CAAC;AACtB,QAAM,YAAY,KAAK,QAAQ,QAAQ;AACvC,QAAM,WAAW,cAAc,KAAK,KAAK,YAAY,CAAC,IAAI;AAG1D,QAAM,YACJ,cAAc,KACV,OACA,KAAK,OAAO,CAAC,GAAG,MAAM,MAAM,aAAa,MAAM,YAAY,CAAC;AAClE,QAAM,OAAO,UAAU,CAAC;AAExB,MAAI,YAAY,SAAS;AACvB,YAAQ,IAAI,6BAA6B;AACzC,UAAM,EAAE,UAAU,YAAY,QAAQ,IAAI,MAAM,oBAAoB;AAAA,MAClE,eAAe,KAAK,SAAS,SAAS;AAAA,MACtC,YAAY,QAAQ;AAAA,IACtB,CAAC;AACD,YAAQ,IAAI,WAAW,OAAO,eAAe,UAAU,EAAE;AACzD;AAAA,EACF;AAEA,MAAI,YAAY,iBAAiB;AAC/B,UAAM,aAAa,UAAU,CAAC;AAC9B,QAAI,CAAC,YAAY;AACf,cAAQ,MAAM,8CAA8C;AAC5D,cAAQ,KAAK,CAAC;AAAA,IAChB;AACA,YAAQ,IAAI,yBAAyB;AACrC,UAAMC,WAAU,WACZ,MAAM,YAAY,QAAQ,KACzB,MAAM,aAAa,EAAE,YAAY,QAAQ,IAAI,CAAC,GAAG;AACtD,YAAQ,IAAI,gBAAgB,UAAU,KAAK;AAC3C,mBAAeA,UAAS,YAAY;AAAA,MAClC,YAAY,CAAC,EAAE,OAAO,SAAS,MAAM,MAAM;AACzC,gBAAQ,OAAO,MAAM,KAAK,KAAK,KAAK,OAAO,IAAI,KAAK,EAAE;AAAA,MACxD;AAAA,IACF,CAAC;AACD,YAAQ,IAAI;AAAA,cAAiB,UAAU,EAAE;AACzC;AAAA,EACF;AAEA,MAAI,CAAC,QAAQ,YAAY,SAAS;AAChC,YAAQ,MAAM,6CAA6C,OAAO,GAAG;AACrE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,UAAU,WACZ,MAAM,YAAY,QAAQ,KACzB,MAAM,aAAa,EAAE,YAAY,QAAQ,IAAI,CAAC,GAAG;AAEtD,QAAM,QAAQ,WAAW,OAAO;AAEhC,UAAQ,SAAS;AAAA,IACf,KAAK,UAAU;AACb,YAAM,cAAc,eAAe,OAAO,IAAI;AAC9C,UAAI,YAAY,WAAW,GAAG;AAC5B,gBAAQ,IAAI,6BAA6B,IAAI,GAAG;AAAA,MAClD,OAAO;AACL,gBAAQ,IAAI,oBAAoB,IAAI,IAAI;AACxC,oBAAY,QAAQ,CAAC,KAAK,MAAM;AAC9B,gBAAM,MAAMC,eAAc,IAAI,YAAY,KAAK,IAAI;AACnD,kBAAQ,IAAI,KAAK,IAAI,CAAC,MAAM,GAAG,KAAK,OAAO,IAAI,IAAI,CAAC,EAAE;AAAA,QACxD,CAAC;AAAA,MACH;AACA;AAAA,IACF;AAAA,IAEA,KAAK,YAAY;AACf,YAAM,WAAW,YAAY,OAAO,IAAI;AACxC,UAAI,SAAS,WAAW,GAAG;AACzB,gBAAQ,IAAI,0BAA0B,IAAI,GAAG;AAAA,MAC/C,OAAO;AACL,gBAAQ,IAAI,iBAAiB,IAAI,IAAI;AACrC,gBAAQ,IAAI,KAAK,SAAS,IAAI,CAAC,MAAM,EAAE,IAAI,EAAE,KAAK,IAAI,CAAC,EAAE;AAAA,MAC3D;AACA;AAAA,IACF;AAAA,IAEA,KAAK,aAAa;AAChB,YAAM,YAAY,aAAa,OAAO,IAAI;AAC1C,UAAI,UAAU,WAAW,GAAG;AAC1B,gBAAQ,IAAI,2BAA2B,IAAI,GAAG;AAAA,MAChD,OAAO;AACL,gBAAQ,IAAI,kBAAkB,IAAI,mBAAmB;AACrD,kBAAU,QAAQ,CAAC,MAAM;AACvB,gBAAM,QAAQ,eAAe,OAAO,CAAC;AACrC,gBAAM,MAAM,OAAO,EAAE,YAAY,CAAC,GAAG,KAAK;AAC1C,kBAAQ,IAAI,OAAO,MAAM,KAAK,IAAI,CAAC,KAAK,GAAG,EAAE;AAAA,QAC/C,CAAC;AAAA,MACH;AACA;AAAA,IACF;AAAA,IAEA,KAAK,YAAY;AACf,YAAM,WAAW,YAAY,OAAO,IAAI;AACxC,UAAI,SAAS,WAAW,GAAG;AACzB,gBAAQ,IAAI,0BAA0B,IAAI,GAAG;AAAA,MAC/C,OAAO;AACL,gBAAQ,IAAI,iBAAiB,IAAI,oBAAoB;AACrD,iBAAS,QAAQ,CAAC,MAAM;AACtB,gBAAM,QAAQ,eAAe,OAAO,CAAC;AACrC,gBAAM,MAAM,OAAO,EAAE,YAAY,CAAC,GAAG,KAAK;AAC1C,kBAAQ,IAAI,OAAO,MAAM,KAAK,IAAI,CAAC,KAAK,GAAG,EAAE;AAAA,QAC/C,CAAC;AAAA,MACH;AACA;AAAA,IACF;AAAA,IAEA,KAAK,WAAW;AACd,YAAM,UAAU,YAAY,OAAO,IAAI;AACvC,UAAI,QAAQ,WAAW,GAAG;AACxB,gBAAQ,IAAI,yBAAyB,IAAI,GAAG;AAC5C;AAAA,MACF;AAEA,cAAQ,IAAI,kBAAkB,IAAI,IAAI;AACtC,iBAAW,UAAU,SAAS;AAC5B,cAAM,MAAMA,eAAc,OAAO,YAAY,KAAK,OAAO;AACzD,cAAM,MAAM,OAAO,OAAO,YAAY,CAAC,GAAG,KAAK;AAC/C,gBAAQ,IAAI;AAAA,GAAM,GAAG,KAAK,GAAG,EAAE;AAG/B,cAAM,aAAa,oBAAI,IAAsB;AAC7C,mBAAW,OAAO,OAAO,iBAAiB;AACxC,gBAAM,gBAAgB,MAAM,QAAQ,IAAI,IAAI,MAAM;AAClD,cAAI,eAAe;AACjB,kBAAM,QAAQ,eAAe,OAAO,aAAa;AACjD,kBAAM,WAAW,WAAW,IAAI,IAAI,OAAO,KAAK,CAAC;AACjD,qBAAS,KAAK,MAAM,KAAK,IAAI,CAAC;AAC9B,uBAAW,IAAI,IAAI,SAAS,QAAQ;AAAA,UACtC;AAAA,QACF;AAEA,mBAAW,CAAC,SAAS,KAAK,KAAK,YAAY;AACzC,gBAAM,QAAQC,uBAAsB,OAAO,KAAK;AAChD,kBAAQ,IAAI,KAAK,KAAK,GAAG;AACzB,qBAAW,KAAK,MAAO,SAAQ,IAAI,SAAS,CAAC,EAAE;AAAA,QACjD;AAAA,MACF;AACA;AAAA,IACF;AAAA,IAEA,KAAK,QAAQ;AAEX,YAAM,SAAS,MAAM,QAAQ,IAAI,IAAI;AACrC,UAAI,CAAC,QAAQ;AACX,gBAAQ,IAAI,qBAAqB,IAAI,EAAE;AACvC;AAAA,MACF;AAEA,YAAM,MAAMD,eAAc,OAAO,YAAY,KAAK,OAAO;AACzD,YAAM,QAAQ,eAAe,OAAO,MAAM;AAE1C,cAAQ,IAAI,WAAW,OAAO,EAAE,EAAE;AAClC,cAAQ,IAAI,UAAU,MAAM,KAAK,IAAI,CAAC,EAAE;AACxC,cAAQ,IAAI,mBAAmB,GAAG,EAAE;AACpC,cAAQ,IAAI,QAAQ,OAAO,GAAG,EAAE;AAChC,cAAQ,IAAI;AAAA,aAAgB;AAC5B,iBAAW,KAAK,OAAO,YAAa,SAAQ,IAAI,OAAO,OAAO,EAAE,KAAK,CAAC,EAAE;AAExE,UAAI,OAAO,SAAS,SAAS,GAAG;AAC9B,gBAAQ,IAAI;AAAA,UAAa;AACzB,mBAAW,KAAK,OAAO;AACrB,kBAAQ,IAAI,QAAQ,OAAO,EAAE,KAAK,CAAC,GAAG;AAAA,MAC1C;AAEA,UAAI,OAAO,gBAAgB,SAAS,GAAG;AACrC,gBAAQ,IAAI;AAAA,WAAc;AAC1B,mBAAW,OAAO,OAAO,iBAAiB;AACxC,gBAAM,QAAQC,uBAAsB,IAAI,OAAO,KAAK,IAAI;AACxD,gBAAM,gBAAgB,MAAM,QAAQ,IAAI,IAAI,MAAM;AAClD,gBAAM,eAAe,gBACjB,eAAe,OAAO,aAAa,EAAE,KAAK,IAAI,IAC9C,IAAI;AACR,kBAAQ,IAAI,KAAK,KAAK,KAAK,YAAY,EAAE;AAAA,QAC3C;AAAA,MACF;AACA;AAAA,IACF;AAAA,IAEA;AACE,cAAQ,MAAM,oBAAoB,OAAO,EAAE;AAC3C,cAAQ,IAAI,IAAI;AAChB,cAAQ,KAAK,CAAC;AAAA,EAClB;AACF;AAEA,KAAK,EAAE,MAAM,CAAC,QAAQ;AACpB,UAAQ,MAAM,UAAU,IAAI,OAAO;AACnC,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":["Database","wordId","PartsOfSpeech","SynsetRelationRelType","newNode","path","lexicon","PartsOfSpeech","SynsetRelationRelType"]}
|
package/dist/cli.js
CHANGED
|
@@ -338,8 +338,8 @@ var children = (node, type, fn) => {
|
|
|
338
338
|
var SCHEMA = `
|
|
339
339
|
CREATE TABLE IF NOT EXISTS words (
|
|
340
340
|
id INTEGER PRIMARY KEY,
|
|
341
|
-
word TEXT NOT NULL,
|
|
342
|
-
word_display TEXT NOT NULL
|
|
341
|
+
word TEXT NOT NULL, -- lowercase for search
|
|
342
|
+
word_display TEXT NOT NULL -- original casing
|
|
343
343
|
);
|
|
344
344
|
CREATE INDEX IF NOT EXISTS idx_words_word ON words(word);
|
|
345
345
|
|