@dev-pi2pie/word-counter 0.0.9-canary.1 β†’ 0.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -106,9 +106,37 @@ import wordCounter, {
106
106
  wordCounter("Hello world", { latinLocaleHint: "en" });
107
107
  wordCounter("Hi πŸ‘‹, world!", { nonWords: true });
108
108
  wordCounter("Hi πŸ‘‹, world!", { mode: "char", nonWords: true });
109
+ wordCounter("Hi\tthere\n", { nonWords: true, includeWhitespace: true });
109
110
  countCharsForLocale("πŸ‘‹", "en");
110
111
  ```
111
112
 
113
+ Note: `includeWhitespace` only affects results when `nonWords: true` is enabled.
114
+
115
+ Sample output (with `nonWords: true` and `includeWhitespace: true`):
116
+
117
+ ```json
118
+ {
119
+ "total": 4,
120
+ "counts": { "words": 2, "nonWords": 2, "total": 4 },
121
+ "breakdown": {
122
+ "mode": "chunk",
123
+ "items": [
124
+ {
125
+ // ...
126
+ "words": 2,
127
+ "nonWords": {
128
+ "emoji": [],
129
+ "symbols": [],
130
+ "punctuation": [],
131
+ "counts": { "emoji": 0, "symbols": 0, "punctuation": 0, "whitespace": 2 },
132
+ "whitespace": { "spaces": 0, "tabs": 1, "newlines": 1, "other": 0 }
133
+ }
134
+ }
135
+ ]
136
+ }
137
+ }
138
+ ```
139
+
112
140
  ### CJS
113
141
 
114
142
  ```js
@@ -125,9 +153,37 @@ const {
125
153
  wordCounter("Hello world", { latinLocaleHint: "en" });
126
154
  wordCounter("Hi πŸ‘‹, world!", { nonWords: true });
127
155
  wordCounter("Hi πŸ‘‹, world!", { mode: "char", nonWords: true });
156
+ wordCounter("Hi\tthere\n", { nonWords: true, includeWhitespace: true });
128
157
  countCharsForLocale("πŸ‘‹", "en");
129
158
  ```
130
159
 
160
+ Note: `includeWhitespace` only affects results when `nonWords: true` is enabled.
161
+
162
+ Sample output (with `nonWords: true` and `includeWhitespace: true`):
163
+
164
+ ```json
165
+ {
166
+ "total": 4,
167
+ "counts": { "words": 2, "nonWords": 2, "total": 4 },
168
+ "breakdown": {
169
+ "mode": "chunk",
170
+ "items": [
171
+ {
172
+ // ...
173
+ "words": 2,
174
+ "nonWords": {
175
+ "emoji": [],
176
+ "symbols": [],
177
+ "punctuation": [],
178
+ "counts": { "emoji": 0, "symbols": 0, "punctuation": 0, "whitespace": 2 },
179
+ "whitespace": { "spaces": 0, "tabs": 1, "newlines": 1, "other": 0 }
180
+ }
181
+ }
182
+ ]
183
+ }
184
+ }
185
+ ```
186
+
131
187
  ### Export Summary
132
188
 
133
189
  #### Core API
@@ -155,13 +211,13 @@ countCharsForLocale("πŸ‘‹", "en");
155
211
 
156
212
  #### Types
157
213
 
158
- | Export | Kind | Notes |
159
- | ---------------------- | ---- | ----------------------------------------- |
160
- | `WordCounterOptions` | type | Options for the `wordCounter` function. |
161
- | `WordCounterResult` | type | Returned by `wordCounter`. |
162
- | `WordCounterBreakdown` | type | Breakdown payload in `WordCounterResult`. |
163
- | `WordCounterMode` | type | `"chunk" \| "segments" \| "collector" \| "char"`. |
164
- | `NonWordCollection` | type | Non-word segments + counts payload. |
214
+ | Export | Kind | Notes |
215
+ | ---------------------- | ---- | ------------------------------------------------- |
216
+ | `WordCounterOptions` | type | Options for the `wordCounter` function. |
217
+ | `WordCounterResult` | type | Returned by `wordCounter`. |
218
+ | `WordCounterBreakdown` | type | Breakdown payload in `WordCounterResult`. |
219
+ | `WordCounterMode` | type | `"chunk" \| "segments" \| "collector" \| "char"`. |
220
+ | `NonWordCollection` | type | Non-word segments + counts payload. |
165
221
 
166
222
  ### Display Modes
167
223
 
@@ -266,6 +322,37 @@ word-counter --non-words "Hi πŸ‘‹, world!"
266
322
  Example: `total = words + emoji + symbols + punctuation` when enabled.
267
323
  Standard output labels this as `Total count` to reflect the combined total; `--format raw` still prints a single number.
268
324
 
325
+ Include whitespace-like characters in the non-words bucket (API: `includeWhitespace: true`):
326
+
327
+ ```bash
328
+ word-counter --include-whitespace "Hi\tthere\n"
329
+ word-counter --misc "Hi\tthere\n"
330
+ ```
331
+
332
+ In the CLI, `--include-whitespace` implies with `--non-words` (same behavior as `--misc`). `--non-words` alone does not include whitespace. When enabled, whitespace counts appear under `nonWords.whitespace`, and `total = words + nonWords` (emoji + symbols + punctuation + whitespace). JSON output also includes top-level `counts` when `nonWords` is enabled. See `docs/schemas/whitespace-categories.md` for how whitespace is categorized.
333
+
334
+ Example JSON (trimmed):
335
+
336
+ ```json
337
+ {
338
+ "total": 5,
339
+ "counts": { "words": 2, "nonWords": 3, "total": 5 },
340
+ "breakdown": {
341
+ "mode": "chunk",
342
+ "items": [
343
+ {
344
+ "locale": "und-Latn",
345
+ "words": 2,
346
+ "nonWords": {
347
+ "counts": { "emoji": 0, "symbols": 0, "punctuation": 0, "whitespace": 3 },
348
+ "whitespace": { "spaces": 1, "tabs": 1, "newlines": 1, "other": 0 }
349
+ }
350
+ }
351
+ ]
352
+ }
353
+ }
354
+ ```
355
+
269
356
  > [!Note]
270
357
  > Text-default symbols (e.g. Β©) count as `symbols` unless explicitly emoji-presented (e.g. ©️ with VS16).
271
358
 
@@ -41,6 +41,13 @@ const emojiPresentationRegex = /\p{Emoji_Presentation}/u;
41
41
  const keycapEmojiRegex = /[0-9#*]\uFE0F?\u20E3/u;
42
42
  const symbolRegex = /\p{S}/u;
43
43
  const punctuationRegex = /\p{P}/u;
44
+ const whitespaceRegex = /\s/u;
45
+ const newlineChars = new Set([
46
+ "\n",
47
+ "\r",
48
+ "\u2028",
49
+ "\u2029"
50
+ ]);
44
51
  function createNonWordCollection() {
45
52
  return {
46
53
  emoji: [],
@@ -67,6 +74,40 @@ function addNonWord(collection, category, segment) {
67
74
  collection.punctuation.push(segment);
68
75
  collection.counts.punctuation += 1;
69
76
  }
77
+ function addWhitespace(collection, segment) {
78
+ let whitespace = collection.whitespace;
79
+ let count = 0;
80
+ for (const char of segment) {
81
+ if (char === " ") {
82
+ whitespace = whitespace ?? createWhitespaceCounts();
83
+ whitespace.spaces += 1;
84
+ count += 1;
85
+ continue;
86
+ }
87
+ if (char === " ") {
88
+ whitespace = whitespace ?? createWhitespaceCounts();
89
+ whitespace.tabs += 1;
90
+ count += 1;
91
+ continue;
92
+ }
93
+ if (newlineChars.has(char)) {
94
+ whitespace = whitespace ?? createWhitespaceCounts();
95
+ whitespace.newlines += 1;
96
+ count += 1;
97
+ continue;
98
+ }
99
+ if (whitespaceRegex.test(char)) {
100
+ whitespace = whitespace ?? createWhitespaceCounts();
101
+ whitespace.other += 1;
102
+ count += 1;
103
+ }
104
+ }
105
+ if (count > 0) {
106
+ collection.whitespace = whitespace ?? createWhitespaceCounts();
107
+ collection.counts.whitespace = (collection.counts.whitespace ?? 0) + count;
108
+ }
109
+ return count;
110
+ }
70
111
  function classifyNonWordSegment(segment) {
71
112
  const hasEmojiVariationSelector = segment.includes("️");
72
113
  if (keycapEmojiRegex.test(segment) || emojiPresentationRegex.test(segment) || hasEmojiVariationSelector && emojiRegex.test(segment)) return "emoji";
@@ -87,17 +128,35 @@ function mergeNonWordCollections(target, source) {
87
128
  target.punctuation.push(...source.punctuation);
88
129
  target.counts.punctuation += source.counts.punctuation;
89
130
  }
131
+ if (source.counts.whitespace && source.counts.whitespace > 0 && source.whitespace) {
132
+ const whitespace = target.whitespace ?? createWhitespaceCounts();
133
+ whitespace.spaces += source.whitespace.spaces;
134
+ whitespace.tabs += source.whitespace.tabs;
135
+ whitespace.newlines += source.whitespace.newlines;
136
+ whitespace.other += source.whitespace.other;
137
+ target.whitespace = whitespace;
138
+ target.counts.whitespace = (target.counts.whitespace ?? 0) + source.counts.whitespace;
139
+ }
90
140
  return target;
91
141
  }
142
+ function createWhitespaceCounts() {
143
+ return {
144
+ spaces: 0,
145
+ tabs: 0,
146
+ newlines: 0,
147
+ other: 0
148
+ };
149
+ }
92
150
 
93
151
  //#endregion
94
152
  //#region src/wc/analyze.ts
95
- function analyzeChunk(chunk, collectNonWords) {
153
+ function analyzeChunk(chunk, collectNonWords, includeWhitespace) {
96
154
  const segmenter = getSegmenter(chunk.locale);
97
155
  const segments = [];
98
156
  const nonWords = collectNonWords ? createNonWordCollection() : null;
99
157
  for (const part of segmenter.segment(chunk.text)) if (part.isWordLike) segments.push(part.segment);
100
158
  else if (collectNonWords && nonWords) {
159
+ if (includeWhitespace) addWhitespace(nonWords, part.segment);
101
160
  const category = classifyNonWordSegment(part.segment);
102
161
  if (category) addNonWord(nonWords, category, part.segment);
103
162
  }
@@ -109,20 +168,28 @@ function analyzeChunk(chunk, collectNonWords) {
109
168
  nonWords: nonWords ?? void 0
110
169
  };
111
170
  }
112
- function analyzeCharChunk(chunk, collectNonWords) {
171
+ function analyzeCharChunk(chunk, collectNonWords, includeWhitespace) {
113
172
  const segmenter = getSegmenter(chunk.locale);
114
173
  const nonWords = collectNonWords ? createNonWordCollection() : null;
115
174
  let chars = 0;
175
+ let wordChars = 0;
176
+ let nonWordChars = 0;
116
177
  for (const part of segmenter.segment(chunk.text)) {
117
178
  if (part.isWordLike) {
118
- chars += countCharsForLocale(part.segment, chunk.locale);
179
+ const count = countCharsForLocale(part.segment, chunk.locale);
180
+ chars += count;
181
+ wordChars += count;
119
182
  continue;
120
183
  }
121
184
  if (collectNonWords && nonWords) {
185
+ let whitespaceCount = 0;
186
+ if (includeWhitespace) whitespaceCount = addWhitespace(nonWords, part.segment);
122
187
  const category = classifyNonWordSegment(part.segment);
123
- if (category) {
124
- addNonWord(nonWords, category, part.segment);
125
- chars += countCharsForLocale(part.segment, chunk.locale);
188
+ if (category) addNonWord(nonWords, category, part.segment);
189
+ if (category || whitespaceCount > 0) {
190
+ const count = countCharsForLocale(part.segment, chunk.locale);
191
+ chars += count;
192
+ nonWordChars += count;
126
193
  }
127
194
  }
128
195
  }
@@ -130,6 +197,8 @@ function analyzeCharChunk(chunk, collectNonWords) {
130
197
  locale: chunk.locale,
131
198
  text: chunk.text,
132
199
  chars,
200
+ wordChars,
201
+ nonWordChars,
133
202
  nonWords: nonWords ?? void 0
134
203
  };
135
204
  }
@@ -309,30 +378,49 @@ function mergeAdjacentChunks(chunks) {
309
378
  function wordCounter(text, options = {}) {
310
379
  const mode = resolveMode(options.mode, "chunk");
311
380
  const collectNonWords = Boolean(options.nonWords);
381
+ const includeWhitespace = Boolean(options.includeWhitespace);
312
382
  const chunks = segmentTextByLocale(text, { latinLocaleHint: options.latinLocaleHint });
313
383
  if (mode === "char") {
314
- const analyzed$1 = chunks.map((chunk) => analyzeCharChunk(chunk, collectNonWords));
384
+ const analyzed$1 = chunks.map((chunk) => analyzeCharChunk(chunk, collectNonWords, includeWhitespace));
385
+ const total$1 = analyzed$1.reduce((sum, chunk) => sum + chunk.chars, 0);
386
+ const items = analyzed$1.map((chunk) => ({
387
+ locale: chunk.locale,
388
+ text: chunk.text,
389
+ chars: chunk.chars,
390
+ nonWords: chunk.nonWords
391
+ }));
315
392
  return {
316
- total: analyzed$1.reduce((sum, chunk) => sum + chunk.chars, 0),
393
+ total: total$1,
394
+ counts: collectNonWords ? {
395
+ words: analyzed$1.reduce((sum, chunk) => sum + chunk.wordChars, 0),
396
+ nonWords: analyzed$1.reduce((sum, chunk) => sum + chunk.nonWordChars, 0),
397
+ total: total$1
398
+ } : void 0,
317
399
  breakdown: {
318
400
  mode,
319
- items: analyzed$1.map((chunk) => ({
320
- locale: chunk.locale,
321
- text: chunk.text,
322
- chars: chunk.chars,
323
- nonWords: chunk.nonWords
324
- }))
401
+ items
325
402
  }
326
403
  };
327
404
  }
328
- const analyzed = chunks.map((chunk) => analyzeChunk(chunk, collectNonWords));
405
+ const analyzed = chunks.map((chunk) => analyzeChunk(chunk, collectNonWords, includeWhitespace));
406
+ const wordsTotal = analyzed.reduce((sum, chunk) => sum + chunk.words, 0);
407
+ const nonWordsTotal = collectNonWords ? analyzed.reduce((sum, chunk) => {
408
+ if (!chunk.nonWords) return sum;
409
+ return sum + getNonWordTotal(chunk.nonWords);
410
+ }, 0) : 0;
329
411
  const total = analyzed.reduce((sum, chunk) => {
330
412
  let chunkTotal = chunk.words;
331
- if (collectNonWords && chunk.nonWords) chunkTotal += chunk.nonWords.counts.emoji + chunk.nonWords.counts.symbols + chunk.nonWords.counts.punctuation;
413
+ if (collectNonWords && chunk.nonWords) chunkTotal += getNonWordTotal(chunk.nonWords);
332
414
  return sum + chunkTotal;
333
415
  }, 0);
416
+ const counts = collectNonWords ? {
417
+ words: wordsTotal,
418
+ nonWords: nonWordsTotal,
419
+ total
420
+ } : void 0;
334
421
  if (mode === "segments") return {
335
422
  total,
423
+ counts,
336
424
  breakdown: {
337
425
  mode,
338
426
  items: analyzed.map((chunk) => ({
@@ -346,6 +434,7 @@ function wordCounter(text, options = {}) {
346
434
  };
347
435
  if (mode === "collector") return {
348
436
  total,
437
+ counts,
349
438
  breakdown: {
350
439
  mode,
351
440
  items: aggregateByLocale(analyzed),
@@ -354,6 +443,7 @@ function wordCounter(text, options = {}) {
354
443
  };
355
444
  return {
356
445
  total,
446
+ counts,
357
447
  breakdown: {
358
448
  mode,
359
449
  items: analyzed.map((chunk) => ({
@@ -365,6 +455,9 @@ function wordCounter(text, options = {}) {
365
455
  }
366
456
  };
367
457
  }
458
+ function getNonWordTotal(nonWords) {
459
+ return nonWords.counts.emoji + nonWords.counts.symbols + nonWords.counts.punctuation + (nonWords.counts.whitespace ?? 0);
460
+ }
368
461
  function collectNonWordsAggregate(analyzed, enabled) {
369
462
  if (!enabled) return;
370
463
  const collection = createNonWordCollection();
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs","names":["analyzed","frontmatter","wordCounter","wordCounter"],"sources":["../../src/wc/segmenter.ts","../../src/wc/non-words.ts","../../src/wc/analyze.ts","../../src/wc/mode.ts","../../src/wc/locale-detect.ts","../../src/wc/segment.ts","../../src/wc/wc.ts","../../src/wc/index.ts","../../src/markdown/toml/arrays.ts","../../src/markdown/toml/keys.ts","../../src/markdown/toml/strings.ts","../../src/markdown/toml/values.ts","../../src/markdown/toml/parse-frontmatter.ts","../../src/markdown/parse-markdown.ts","../../src/markdown/section-count.ts","../../src/utils/show-singular-or-plural-word.ts","../../src/index.cjs.ts"],"sourcesContent":["const segmenterCache = new Map<string, Intl.Segmenter>();\nconst graphemeSegmenterCache = new Map<string, Intl.Segmenter>();\n\nexport function getSegmenter(locale: string): Intl.Segmenter {\n const cached = segmenterCache.get(locale);\n if (cached) {\n return cached;\n }\n const segmenter = new Intl.Segmenter(locale, { granularity: \"word\" });\n segmenterCache.set(locale, segmenter);\n return segmenter;\n}\n\nexport function getGraphemeSegmenter(locale: string): Intl.Segmenter {\n const cached = graphemeSegmenterCache.get(locale);\n if (cached) {\n return cached;\n }\n const segmenter = new Intl.Segmenter(locale, { granularity: \"grapheme\" });\n graphemeSegmenterCache.set(locale, segmenter);\n return segmenter;\n}\n\nfunction supportsSegmenter(): boolean {\n return typeof Intl !== \"undefined\" && typeof Intl.Segmenter === \"function\";\n}\n\nexport function countWordsForLocale(text: string, locale: string): number {\n const segmenter = getSegmenter(locale);\n let count = 0;\n for (const segment of segmenter.segment(text)) {\n if (segment.isWordLike) {\n count++;\n }\n }\n return count;\n}\n\nexport function countCharsForLocale(text: string, locale: string): number {\n if (!supportsSegmenter()) {\n return Array.from(text).length;\n }\n const segmenter = getGraphemeSegmenter(locale);\n let count = 0;\n for (const _segment of segmenter.segment(text)) {\n count++;\n }\n return count;\n}\n","import type { NonWordCollection } from \"./types\";\n\nconst emojiRegex = /(?:\\p{Extended_Pictographic}|\\p{Emoji_Presentation})/u;\nconst emojiPresentationRegex = /\\p{Emoji_Presentation}/u;\nconst keycapEmojiRegex = /[0-9#*]\\uFE0F?\\u20E3/u;\nconst symbolRegex = /\\p{S}/u;\nconst punctuationRegex = /\\p{P}/u;\n\nexport function createNonWordCollection(): NonWordCollection {\n return {\n emoji: [],\n symbols: [],\n punctuation: [],\n counts: {\n emoji: 0,\n symbols: 0,\n punctuation: 0,\n },\n };\n}\n\nexport function addNonWord(\n collection: NonWordCollection,\n category: \"emoji\" | \"symbol\" | \"punctuation\",\n segment: string,\n): void {\n if (category === \"emoji\") {\n collection.emoji.push(segment);\n collection.counts.emoji += 1;\n return;\n }\n if (category === \"symbol\") {\n collection.symbols.push(segment);\n collection.counts.symbols += 1;\n return;\n }\n collection.punctuation.push(segment);\n collection.counts.punctuation += 1;\n}\n\nexport function classifyNonWordSegment(\n segment: string,\n): \"emoji\" | \"symbol\" | \"punctuation\" | null {\n const hasEmojiVariationSelector = segment.includes(\"\\uFE0F\");\n if (\n keycapEmojiRegex.test(segment) ||\n emojiPresentationRegex.test(segment) ||\n (hasEmojiVariationSelector && emojiRegex.test(segment))\n ) {\n return \"emoji\";\n }\n if (symbolRegex.test(segment)) {\n return \"symbol\";\n }\n if (punctuationRegex.test(segment)) {\n return \"punctuation\";\n }\n return null;\n}\n\nexport function mergeNonWordCollections(\n target: NonWordCollection,\n source: NonWordCollection,\n): NonWordCollection {\n if (source.counts.emoji > 0) {\n target.emoji.push(...source.emoji);\n target.counts.emoji += source.counts.emoji;\n }\n if (source.counts.symbols > 0) {\n target.symbols.push(...source.symbols);\n target.counts.symbols += source.counts.symbols;\n }\n if (source.counts.punctuation > 0) {\n target.punctuation.push(...source.punctuation);\n target.counts.punctuation += source.counts.punctuation;\n }\n return target;\n}\n","import { countCharsForLocale, getSegmenter } from \"./segmenter\";\nimport {\n addNonWord,\n classifyNonWordSegment,\n createNonWordCollection,\n} from \"./non-words\";\nimport type {\n ChunkAnalysis,\n CollectorBreakdown,\n LocaleChunk,\n NonWordCollection,\n} from \"./types\";\n\ntype CharAnalysis = LocaleChunk & { chars: number; nonWords?: NonWordCollection };\n\nexport function analyzeChunk(\n chunk: LocaleChunk,\n collectNonWords?: boolean,\n): ChunkAnalysis {\n const segmenter = getSegmenter(chunk.locale);\n const segments: string[] = [];\n const nonWords: NonWordCollection | null = collectNonWords\n ? createNonWordCollection()\n : null;\n for (const part of segmenter.segment(chunk.text)) {\n if (part.isWordLike) {\n segments.push(part.segment);\n } else if (collectNonWords && nonWords) {\n const category = classifyNonWordSegment(part.segment);\n if (category) {\n addNonWord(nonWords, category, part.segment);\n }\n }\n }\n return {\n locale: chunk.locale,\n text: chunk.text,\n segments,\n words: segments.length,\n nonWords: nonWords ?? undefined,\n };\n}\n\nexport function analyzeCharChunk(\n chunk: LocaleChunk,\n collectNonWords?: boolean,\n): CharAnalysis {\n const segmenter = getSegmenter(chunk.locale);\n const nonWords: NonWordCollection | null = collectNonWords\n ? createNonWordCollection()\n : null;\n let chars = 0;\n\n for (const part of segmenter.segment(chunk.text)) {\n if (part.isWordLike) {\n chars += countCharsForLocale(part.segment, chunk.locale);\n continue;\n }\n\n if (collectNonWords && nonWords) {\n const category = classifyNonWordSegment(part.segment);\n if (category) {\n addNonWord(nonWords, category, part.segment);\n chars += countCharsForLocale(part.segment, chunk.locale);\n }\n }\n }\n\n return {\n locale: chunk.locale,\n text: chunk.text,\n chars,\n nonWords: nonWords ?? undefined,\n };\n}\n\nexport function aggregateByLocale(\n chunks: ChunkAnalysis[]\n): CollectorBreakdown[] {\n const order: string[] = [];\n const map = new Map<string, CollectorBreakdown>();\n\n for (const chunk of chunks) {\n const existing = map.get(chunk.locale);\n if (existing) {\n existing.words += chunk.words;\n existing.segments.push(...chunk.segments);\n continue;\n }\n\n order.push(chunk.locale);\n map.set(chunk.locale, {\n locale: chunk.locale,\n words: chunk.words,\n segments: [...chunk.segments],\n });\n }\n\n return order.map((locale) => map.get(locale)!);\n}\n","import type { WordCounterMode } from \"./types\";\n\nconst MODE_ALIASES: Record<string, WordCounterMode> = {\n chunk: \"chunk\",\n chunks: \"chunk\",\n segments: \"segments\",\n segment: \"segments\",\n seg: \"segments\",\n collector: \"collector\",\n collect: \"collector\",\n colle: \"collector\",\n char: \"char\",\n chars: \"char\",\n character: \"char\",\n characters: \"char\",\n};\n\nexport function normalizeMode(input?: string | null): WordCounterMode | null {\n if (!input) {\n return null;\n }\n const normalized = input.trim().toLowerCase();\n return MODE_ALIASES[normalized] ?? null;\n}\n\nexport function resolveMode(\n input?: string | null,\n fallback: WordCounterMode = \"chunk\",\n): WordCounterMode {\n return normalizeMode(input) ?? fallback;\n}\n","export const DEFAULT_LOCALE = \"und-Latn\";\n\nexport interface LocaleDetectOptions {\n latinLocaleHint?: string;\n}\n\nconst regex = {\n hiragana: /\\p{Script=Hiragana}/u,\n katakana: /\\p{Script=Katakana}/u,\n hangul: /\\p{Script=Hangul}/u,\n han: /\\p{Script=Han}/u,\n latin: /\\p{Script=Latin}/u,\n arabic: /\\p{Script=Arabic}/u,\n cyrillic: /\\p{Script=Cyrillic}/u,\n devanagari: /\\p{Script=Devanagari}/u,\n thai: /\\p{Script=Thai}/u,\n};\n\nconst latinLocaleHints: Array<{ locale: string; regex: RegExp }> = [\n { locale: \"de\", regex: /[Γ€ΓΆΓΌΓ„Γ–ΓœΓŸ]/ },\n { locale: \"es\", regex: /[ñÑ¿‘]/ },\n { locale: \"pt\", regex: /[ãáÃÕ]/ },\n { locale: \"fr\", regex: /[œŒæÆ]/ },\n];\n\nconst latinLocales = new Set<string>([\n DEFAULT_LOCALE,\n ...latinLocaleHints.map((hint) => hint.locale),\n]);\n\nexport function isLatinLocale(locale: string): boolean {\n return latinLocales.has(locale);\n}\n\nfunction detectLatinLocale(char: string): string {\n for (const hint of latinLocaleHints) {\n if (hint.regex.test(char)) {\n return hint.locale;\n }\n }\n return DEFAULT_LOCALE;\n}\n\nexport function detectLocaleForChar(\n char: string,\n previousLocale?: string | null,\n options: LocaleDetectOptions = {}\n): string | null {\n if (regex.hiragana.test(char) || regex.katakana.test(char)) {\n return \"ja\";\n }\n if (regex.hangul.test(char)) {\n return \"ko\";\n }\n if (regex.arabic.test(char)) {\n return \"ar\";\n }\n if (regex.cyrillic.test(char)) {\n return \"ru\";\n }\n if (regex.devanagari.test(char)) {\n return \"hi\";\n }\n if (regex.thai.test(char)) {\n return \"th\";\n }\n\n if (regex.han.test(char)) {\n if (previousLocale && previousLocale.startsWith(\"ja\")) {\n return previousLocale;\n }\n return \"zh-Hans\";\n }\n\n if (regex.latin.test(char)) {\n const hintedLocale = detectLatinLocale(char);\n if (hintedLocale !== DEFAULT_LOCALE) {\n return hintedLocale;\n }\n if (previousLocale && isLatinLocale(previousLocale) && previousLocale !== DEFAULT_LOCALE) {\n return previousLocale;\n }\n if (options.latinLocaleHint) {\n return options.latinLocaleHint;\n }\n return DEFAULT_LOCALE;\n }\n\n return null;\n}\n","import {\n DEFAULT_LOCALE,\n detectLocaleForChar,\n isLatinLocale,\n type LocaleDetectOptions,\n} from \"./locale-detect\";\nimport type { LocaleChunk } from \"./types\";\n\nexport function segmentTextByLocale(\n text: string,\n options: LocaleDetectOptions = {}\n): LocaleChunk[] {\n const chunks: LocaleChunk[] = [];\n // Keep currentLocale as a non-null string to simplify type-narrowing.\n let currentLocale: string = DEFAULT_LOCALE;\n let buffer = \"\";\n let bufferHasScript = false;\n\n for (const char of text) {\n const detected = detectLocaleForChar(char, currentLocale, options);\n const targetLocale = detected ?? currentLocale;\n\n // If buffer is empty, this is the first character for a new chunk.\n if (buffer === \"\") {\n currentLocale = targetLocale;\n buffer = char;\n bufferHasScript = detected !== null;\n continue;\n }\n\n if (detected !== null && !bufferHasScript) {\n currentLocale = targetLocale;\n buffer += char;\n bufferHasScript = true;\n continue;\n }\n\n if (targetLocale !== currentLocale && detected !== null) {\n if (currentLocale === DEFAULT_LOCALE && isLatinLocale(targetLocale)) {\n currentLocale = targetLocale;\n buffer += char;\n bufferHasScript = true;\n continue;\n }\n // currentLocale is guaranteed to be a string here.\n chunks.push({ locale: currentLocale, text: buffer });\n currentLocale = targetLocale;\n buffer = char;\n bufferHasScript = true;\n continue;\n }\n\n buffer += char;\n if (detected !== null) {\n bufferHasScript = true;\n }\n }\n\n if (buffer.length > 0) {\n chunks.push({ locale: currentLocale, text: buffer });\n }\n\n return mergeAdjacentChunks(chunks);\n}\n\nfunction mergeAdjacentChunks(chunks: LocaleChunk[]): LocaleChunk[] {\n if (chunks.length === 0) {\n return chunks;\n }\n\n const merged: LocaleChunk[] = [];\n // We already returned for empty arrays above, so the first element is present.\n let last = chunks[0]!;\n\n for (let i = 1; i < chunks.length; i++) {\n const chunk = chunks[i]!;\n if (chunk.locale === last.locale) {\n last = { locale: last.locale, text: last.text + chunk.text };\n } else {\n merged.push(last);\n last = chunk;\n }\n }\n\n merged.push(last);\n return merged;\n}\n","import { analyzeCharChunk, analyzeChunk, aggregateByLocale } from \"./analyze\";\nimport { resolveMode } from \"./mode\";\nimport { segmentTextByLocale } from \"./segment\";\nimport { countCharsForLocale, countWordsForLocale } from \"./segmenter\";\nimport { createNonWordCollection, mergeNonWordCollections } from \"./non-words\";\nimport type {\n CharBreakdown,\n ChunkBreakdown,\n ChunkWithSegments,\n NonWordCollection,\n WordCounterMode,\n WordCounterOptions,\n WordCounterResult,\n} from \"./types\";\n\nexport type {\n NonWordCollection,\n WordCounterMode,\n WordCounterOptions,\n WordCounterResult,\n WordCounterBreakdown,\n} from \"./types\";\n\nexport { countCharsForLocale, countWordsForLocale, segmentTextByLocale };\n\nexport function wordCounter(\n text: string,\n options: WordCounterOptions = {}\n): WordCounterResult {\n const mode: WordCounterMode = resolveMode(options.mode, \"chunk\");\n const collectNonWords = Boolean(options.nonWords);\n const chunks = segmentTextByLocale(text, { latinLocaleHint: options.latinLocaleHint });\n\n if (mode === \"char\") {\n const analyzed = chunks.map((chunk) => analyzeCharChunk(chunk, collectNonWords));\n const total = analyzed.reduce((sum, chunk) => sum + chunk.chars, 0);\n const items: CharBreakdown[] = analyzed.map((chunk) => ({\n locale: chunk.locale,\n text: chunk.text,\n chars: chunk.chars,\n nonWords: chunk.nonWords,\n }));\n return {\n total,\n breakdown: {\n mode,\n items,\n },\n };\n }\n\n const analyzed = chunks.map((chunk) => analyzeChunk(chunk, collectNonWords));\n const total = analyzed.reduce((sum, chunk) => {\n let chunkTotal = chunk.words;\n if (collectNonWords && chunk.nonWords) {\n chunkTotal +=\n chunk.nonWords.counts.emoji +\n chunk.nonWords.counts.symbols +\n chunk.nonWords.counts.punctuation;\n }\n return sum + chunkTotal;\n }, 0);\n\n if (mode === \"segments\") {\n const items: ChunkWithSegments[] = analyzed.map((chunk) => ({\n locale: chunk.locale,\n text: chunk.text,\n words: chunk.words,\n segments: chunk.segments,\n nonWords: chunk.nonWords,\n }));\n return {\n total,\n breakdown: {\n mode,\n items,\n },\n };\n }\n\n if (mode === \"collector\") {\n const items = aggregateByLocale(analyzed);\n const nonWords = collectNonWordsAggregate(analyzed, collectNonWords);\n return {\n total,\n breakdown: {\n mode,\n items,\n nonWords,\n },\n };\n }\n\n const items: ChunkBreakdown[] = analyzed.map((chunk) => ({\n locale: chunk.locale,\n text: chunk.text,\n words: chunk.words,\n nonWords: chunk.nonWords,\n }));\n\n return {\n total,\n breakdown: {\n mode,\n items,\n },\n };\n}\n\nfunction collectNonWordsAggregate(\n analyzed: Array<{ nonWords?: NonWordCollection }>,\n enabled: boolean,\n): NonWordCollection | undefined {\n if (!enabled) {\n return undefined;\n }\n const collection = createNonWordCollection();\n for (const chunk of analyzed) {\n if (!chunk.nonWords) {\n continue;\n }\n mergeNonWordCollections(collection, chunk.nonWords);\n }\n return collection;\n}\n","import { wordCounter } from \"./wc\";\n\nexport default wordCounter;\nexport { countCharsForLocale, countWordsForLocale, segmentTextByLocale } from \"./wc\";\nexport type {\n NonWordCollection,\n WordCounterMode,\n WordCounterOptions,\n WordCounterResult,\n WordCounterBreakdown,\n} from \"./wc\";\n","export function ensureArrayContainer(\n result: Record<string, unknown>,\n key: string,\n): Record<string, unknown>[] {\n const existing = result[key];\n if (Array.isArray(existing)) {\n return existing as Record<string, unknown>[];\n }\n const list: Record<string, unknown>[] = [];\n result[key] = list;\n return list;\n}\n\nexport function flattenArrayTables(result: Record<string, unknown>): void {\n for (const [key, value] of Object.entries(result)) {\n if (!Array.isArray(value)) {\n continue;\n }\n const flattened = value\n .map((entry) =>\n Object.entries(entry)\n .map(([entryKey, entryValue]) => `${entryKey}=${entryValue}`)\n .join(\", \"),\n )\n .join(\" | \");\n result[key] = flattened;\n }\n}\n","function stripKeyQuotes(key: string): string {\n const trimmed = key.trim();\n if (\n (trimmed.startsWith(\"\\\"\") && trimmed.endsWith(\"\\\"\")) ||\n (trimmed.startsWith(\"'\") && trimmed.endsWith(\"'\"))\n ) {\n return trimmed.slice(1, -1);\n }\n return trimmed;\n}\n\nexport function normalizeKeyPath(key: string): string | null {\n const trimmed = key.trim();\n if (!trimmed) {\n return null;\n }\n\n if (\n (trimmed.startsWith(\"\\\"\") && trimmed.endsWith(\"\\\"\")) ||\n (trimmed.startsWith(\"'\") && trimmed.endsWith(\"'\"))\n ) {\n const unquoted = stripKeyQuotes(trimmed);\n return unquoted ? unquoted : null;\n }\n\n const segments = trimmed.split(\".\").map((segment) => segment.trim());\n if (segments.some((segment) => !segment)) {\n return null;\n }\n return segments.join(\".\");\n}\n","export function stripInlineComment(line: string): string {\n let inString: \"single\" | \"double\" | null = null;\n let escaped = false;\n\n for (let i = 0; i < line.length; i += 1) {\n const char = line[i] ?? \"\";\n\n if (inString) {\n if (escaped) {\n escaped = false;\n continue;\n }\n\n if (char === \"\\\\\" && inString === \"double\") {\n escaped = true;\n continue;\n }\n\n if (inString === \"double\" && char === \"\\\"\") {\n inString = null;\n continue;\n }\n\n if (inString === \"single\" && char === \"'\") {\n inString = null;\n continue;\n }\n\n continue;\n }\n\n if (char === \"\\\"\") {\n inString = \"double\";\n continue;\n }\n\n if (char === \"'\") {\n inString = \"single\";\n continue;\n }\n\n if (char === \"#\") {\n return line.slice(0, i).trimEnd();\n }\n }\n\n return line;\n}\n\nfunction unescapeBasic(input: string): string {\n return input\n .replace(/\\\\\\\\/g, \"\\\\\")\n .replace(/\\\\\"/g, \"\\\"\")\n .replace(/\\\\n/g, \"\\n\")\n .replace(/\\\\t/g, \"\\t\")\n .replace(/\\\\r/g, \"\\r\");\n}\n\nexport function parseStringLiteral(value: string): string | null {\n if (value.startsWith('\"\"\"') && value.endsWith('\"\"\"')) {\n const inner = value.slice(3, -3);\n return unescapeBasic(inner);\n }\n\n if (value.startsWith(\"'''\") && value.endsWith(\"'''\")) {\n return value.slice(3, -3);\n }\n\n if (value.startsWith(\"\\\"\") && value.endsWith(\"\\\"\")) {\n return unescapeBasic(value.slice(1, -1));\n }\n\n if (value.startsWith(\"'\") && value.endsWith(\"'\")) {\n return value.slice(1, -1);\n }\n\n return null;\n}\n","import { normalizeKeyPath } from \"./keys\";\nimport { parseStringLiteral } from \"./strings\";\nimport type { TomlValue } from \"./types\";\n\nfunction parsePrimitive(raw: string): string | number | boolean | null {\n const value = raw.trim();\n if (!value) {\n return null;\n }\n\n const stringLiteral = parseStringLiteral(value);\n if (stringLiteral !== null) {\n return stringLiteral;\n }\n\n if (value === \"true\") {\n return true;\n }\n\n if (value === \"false\") {\n return false;\n }\n\n if (/^[+-]?\\d+(?:\\.\\d+)?(?:[eE][+-]?\\d+)?$/.test(value)) {\n return Number(value);\n }\n\n if (/^\\d{4}-\\d{2}-\\d{2}/.test(value)) {\n return value;\n }\n\n return value;\n}\n\nfunction parseArray(raw: string): Array<string | number | boolean> | null {\n const value = raw.trim();\n if (!value.startsWith(\"[\") || !value.endsWith(\"]\")) {\n return null;\n }\n\n const inner = value.slice(1, -1).trim();\n if (!inner) {\n return [];\n }\n\n const items: Array<string | number | boolean> = [];\n let current = \"\";\n let inString: \"single\" | \"double\" | null = null;\n let escaped = false;\n\n for (let i = 0; i < inner.length; i += 1) {\n const char = inner[i] ?? \"\";\n\n if (inString) {\n current += char;\n if (escaped) {\n escaped = false;\n continue;\n }\n\n if (char === \"\\\\\" && inString === \"double\") {\n escaped = true;\n continue;\n }\n\n if (inString === \"double\" && char === \"\\\"\") {\n inString = null;\n } else if (inString === \"single\" && char === \"'\") {\n inString = null;\n }\n continue;\n }\n\n if (char === \"\\\"\") {\n inString = \"double\";\n current += char;\n continue;\n }\n\n if (char === \"'\") {\n inString = \"single\";\n current += char;\n continue;\n }\n\n if (char === \",\") {\n const item = parsePrimitive(current);\n if (item === null) {\n return null;\n }\n items.push(item);\n current = \"\";\n continue;\n }\n\n current += char;\n }\n\n const finalItem = parsePrimitive(current);\n if (finalItem === null) {\n return null;\n }\n items.push(finalItem);\n\n return items;\n}\n\nfunction parseInlineTable(raw: string): Record<string, TomlValue> | null {\n const trimmed = raw.trim();\n if (!trimmed.startsWith(\"{\") || !trimmed.endsWith(\"}\")) {\n return null;\n }\n\n const inner = trimmed.slice(1, -1).trim();\n if (!inner) {\n return {};\n }\n\n const pairs: string[] = [];\n let current = \"\";\n let inString: \"single\" | \"double\" | null = null;\n let escaped = false;\n let bracketDepth = 0;\n let braceDepth = 0;\n\n for (let i = 0; i < inner.length; i += 1) {\n const char = inner[i] ?? \"\";\n\n if (inString) {\n current += char;\n if (escaped) {\n escaped = false;\n continue;\n }\n\n if (char === \"\\\\\" && inString === \"double\") {\n escaped = true;\n continue;\n }\n\n if (inString === \"double\" && char === \"\\\"\") {\n inString = null;\n } else if (inString === \"single\" && char === \"'\") {\n inString = null;\n }\n continue;\n }\n\n if (char === \"\\\"\") {\n inString = \"double\";\n current += char;\n continue;\n }\n\n if (char === \"'\") {\n inString = \"single\";\n current += char;\n continue;\n }\n\n if (char === \"[\") {\n bracketDepth += 1;\n current += char;\n continue;\n }\n\n if (char === \"]\") {\n if (bracketDepth > 0) {\n bracketDepth -= 1;\n }\n current += char;\n continue;\n }\n\n if (char === \"{\") {\n braceDepth += 1;\n current += char;\n continue;\n }\n\n if (char === \"}\") {\n if (braceDepth > 0) {\n braceDepth -= 1;\n }\n current += char;\n continue;\n }\n\n if (char === \",\" && bracketDepth === 0 && braceDepth === 0) {\n pairs.push(current);\n current = \"\";\n continue;\n }\n\n current += char;\n }\n\n if (current.trim()) {\n pairs.push(current);\n }\n\n const output: Record<string, TomlValue> = {};\n for (const pair of pairs) {\n const separatorIndex = pair.indexOf(\"=\");\n if (separatorIndex === -1) {\n return null;\n }\n const key = normalizeKeyPath(pair.slice(0, separatorIndex));\n if (!key) {\n return null;\n }\n const valueRaw = pair.slice(separatorIndex + 1).trim();\n if (!valueRaw) {\n return null;\n }\n if (valueRaw.startsWith(\"{\")) {\n return null;\n }\n const normalized = normalizeValue(valueRaw);\n if (normalized === null) {\n return null;\n }\n if (typeof normalized === \"object\" && !Array.isArray(normalized)) {\n return null;\n }\n output[key] = normalized;\n }\n\n return output;\n}\n\nexport function normalizeValue(value: string): TomlValue | null {\n if (!value) {\n return null;\n }\n\n const trimmed = value.trim();\n if (trimmed.startsWith(\"{\") && trimmed.endsWith(\"}\")) {\n return parseInlineTable(trimmed);\n }\n\n const array = parseArray(trimmed);\n if (array) {\n return array;\n }\n\n if (trimmed.startsWith(\"[\") && trimmed.endsWith(\"]\")) {\n return null;\n }\n\n return parsePrimitive(trimmed);\n}\n\nexport function toPlainText(value: unknown): string {\n if (value == null) {\n return \"\";\n }\n if (Array.isArray(value)) {\n return value.map((item) => String(item)).join(\", \");\n }\n return String(value);\n}\n","import { ensureArrayContainer, flattenArrayTables } from \"./arrays\";\nimport { normalizeKeyPath } from \"./keys\";\nimport { stripInlineComment } from \"./strings\";\nimport { normalizeValue, toPlainText } from \"./values\";\n\nexport function parseTomlFrontmatter(frontmatter: string): Record<string, unknown> | null {\n const result: Record<string, unknown> = {};\n const lines = frontmatter.split(\"\\n\");\n let tablePrefix = \"\";\n let tableTarget: Record<string, unknown> | null = null;\n let tablePrefixInList = false;\n\n for (let index = 0; index < lines.length; index += 1) {\n const rawLine = lines[index] ?? \"\";\n const trimmedLine = rawLine.trim();\n if (!trimmedLine || trimmedLine.startsWith(\"#\")) {\n continue;\n }\n\n if (trimmedLine.startsWith(\"[[\")) {\n const match = trimmedLine.match(/^\\[\\[([^\\]]+)]]$/);\n if (!match) {\n return null;\n }\n const normalizedTable = normalizeKeyPath(match[1] ?? \"\");\n if (!normalizedTable) {\n return null;\n }\n const list = ensureArrayContainer(result, normalizedTable);\n const newEntry: Record<string, unknown> = {};\n list.push(newEntry);\n tableTarget = newEntry;\n tablePrefix = normalizedTable;\n tablePrefixInList = true;\n continue;\n }\n\n const tableMatch = trimmedLine.match(/^\\[([^\\]]+)]$/);\n if (tableMatch) {\n const normalizedTable = normalizeKeyPath(tableMatch[1] ?? \"\");\n if (!normalizedTable) {\n return null;\n }\n tablePrefix = normalizedTable;\n tablePrefixInList = false;\n tableTarget = null;\n continue;\n }\n\n const lineForParsing = /(\"\"\"|''')/.test(rawLine) ? rawLine : stripInlineComment(rawLine);\n const separatorIndex = lineForParsing.indexOf(\"=\");\n if (separatorIndex === -1) {\n return null;\n }\n\n const keyRaw = lineForParsing.slice(0, separatorIndex);\n const key = normalizeKeyPath(keyRaw);\n let valueRaw = lineForParsing.slice(separatorIndex + 1).trim();\n if (!key) {\n return null;\n }\n\n const tripleDelimiter = valueRaw.startsWith('\"\"\"') ? '\"\"\"' : valueRaw.startsWith(\"'''\") ? \"'''\" : null;\n if (tripleDelimiter) {\n const closingIndex = valueRaw.indexOf(tripleDelimiter, tripleDelimiter.length);\n if (closingIndex !== -1) {\n const after = valueRaw.slice(closingIndex + tripleDelimiter.length);\n const strippedAfter = stripInlineComment(after);\n valueRaw = `${valueRaw.slice(0, closingIndex + tripleDelimiter.length)}${strippedAfter}`;\n } else {\n const delimiter = tripleDelimiter;\n let combined = valueRaw;\n let closed = false;\n while (index + 1 < lines.length) {\n index += 1;\n const nextLine = lines[index] ?? \"\";\n combined += `\\n${nextLine}`;\n if (new RegExp(`${delimiter}\\\\s*$`).test(nextLine)) {\n closed = true;\n break;\n }\n }\n if (!closed) {\n return null;\n }\n valueRaw = combined;\n }\n }\n\n const normalized = normalizeValue(valueRaw);\n if (normalized === null) {\n return null;\n }\n\n const fullKey = tablePrefix ? `${tablePrefix}.${key}` : key;\n if (typeof normalized === \"object\" && !Array.isArray(normalized)) {\n for (const [inlineKey, inlineValue] of Object.entries(normalized)) {\n const entryKey = tablePrefixInList ? `${key}.${inlineKey}` : `${fullKey}.${inlineKey}`;\n if (tablePrefixInList && tableTarget) {\n tableTarget[entryKey] = toPlainText(inlineValue);\n } else {\n result[entryKey] = toPlainText(inlineValue);\n }\n }\n continue;\n }\n\n if (tablePrefixInList && tableTarget) {\n tableTarget[key] = toPlainText(normalized);\n continue;\n }\n\n result[fullKey] = toPlainText(normalized);\n }\n\n flattenArrayTables(result);\n\n return result;\n}\n","import { parseDocument } from \"yaml\";\nimport { parseTomlFrontmatter } from \"./toml-simple\";\nimport type { FrontmatterType, ParsedMarkdown } from \"./types\";\n\nconst FENCE_TO_TYPE: Record<string, FrontmatterType> = {\n \"---\": \"yaml\",\n \"+++\": \"toml\",\n \";;;\": \"json\",\n};\n\nfunction normalizeNewlines(input: string): string {\n return input.replace(/\\r\\n/g, \"\\n\");\n}\n\nfunction stripBom(line: string): string {\n return line.startsWith(\"\\uFEFF\") ? line.slice(1) : line;\n}\n\nfunction getFenceType(line: string): FrontmatterType | null {\n const match = line.match(/^[\\t ]*(---|\\+\\+\\+|;;;)[\\t ]*$/);\n if (!match) {\n return null;\n }\n return FENCE_TO_TYPE[match[1] ?? \"\"] ?? null;\n}\n\nfunction parseFrontmatter(frontmatter: string, type: FrontmatterType | null): Record<string, unknown> | null {\n if (!type) {\n return null;\n }\n\n if (type === \"json\") {\n try {\n return JSON.parse(frontmatter) as Record<string, unknown>;\n } catch {\n return null;\n }\n }\n\n if (type === \"yaml\") {\n const doc = parseDocument(frontmatter, { prettyErrors: false });\n if (doc.errors.length > 0) {\n return null;\n }\n const data = doc.toJSON();\n if (!data || typeof data !== \"object\" || Array.isArray(data)) {\n return null;\n }\n return data as Record<string, unknown>;\n }\n\n if (type === \"toml\") {\n return parseTomlFrontmatter(frontmatter);\n }\n\n return null;\n}\n\nfunction extractJsonBlock(text: string, startIndex: number): { jsonText: string; endIndex: number } | null {\n let depth = 0;\n let inString = false;\n let escaped = false;\n\n for (let i = startIndex; i < text.length; i += 1) {\n const char = text[i] ?? \"\";\n\n if (inString) {\n if (escaped) {\n escaped = false;\n continue;\n }\n\n if (char === \"\\\\\") {\n escaped = true;\n continue;\n }\n\n if (char === \"\\\"\") {\n inString = false;\n }\n\n continue;\n }\n\n if (char === \"\\\"\") {\n inString = true;\n continue;\n }\n\n if (char === \"{\") {\n depth += 1;\n continue;\n }\n\n if (char === \"}\") {\n depth -= 1;\n if (depth === 0) {\n const jsonText = text.slice(startIndex, i + 1);\n return { jsonText, endIndex: i };\n }\n }\n }\n\n return null;\n}\n\nexport function parseMarkdown(input: string): ParsedMarkdown {\n const normalized = normalizeNewlines(input);\n const lines = normalized.split(\"\\n\");\n if (lines.length === 0) {\n return { frontmatter: null, content: normalized, data: null, frontmatterType: null };\n }\n\n lines[0] = stripBom(lines[0] ?? \"\");\n const normalizedWithoutBom = lines.join(\"\\n\");\n\n const openingType = getFenceType(lines[0] ?? \"\");\n if (!openingType) {\n const leadingWhitespace = normalizedWithoutBom.match(/^[\\t \\n]*/)?.[0] ?? \"\";\n const jsonStart = leadingWhitespace.length;\n if (normalizedWithoutBom[jsonStart] !== \"{\") {\n return { frontmatter: null, content: normalizedWithoutBom, data: null, frontmatterType: null };\n }\n\n const jsonBlock = extractJsonBlock(normalizedWithoutBom, jsonStart);\n if (!jsonBlock) {\n return { frontmatter: null, content: normalizedWithoutBom, data: null, frontmatterType: null };\n }\n\n const frontmatter = jsonBlock.jsonText;\n let content = normalizedWithoutBom.slice(jsonBlock.endIndex + 1);\n if (content.startsWith(\"\\n\")) {\n content = content.slice(1);\n }\n const data = parseFrontmatter(frontmatter, \"json\");\n if (!data) {\n return { frontmatter: null, content: normalizedWithoutBom, data: null, frontmatterType: null };\n }\n\n return {\n frontmatter,\n content,\n data,\n frontmatterType: \"json\",\n };\n }\n\n let closingIndex = -1;\n for (let i = 1; i < lines.length; i += 1) {\n if (getFenceType(lines[i] ?? \"\") === openingType) {\n closingIndex = i;\n break;\n }\n }\n\n if (closingIndex === -1) {\n return { frontmatter: null, content: normalizedWithoutBom, data: null, frontmatterType: null };\n }\n\n const frontmatter = lines.slice(1, closingIndex).join(\"\\n\");\n const content = lines.slice(closingIndex + 1).join(\"\\n\");\n const data = parseFrontmatter(frontmatter, openingType);\n\n return {\n frontmatter,\n content,\n data,\n frontmatterType: openingType,\n };\n}\n","import type { WordCounterMode, WordCounterOptions, WordCounterResult } from \"../wc/types\";\nimport wordCounter from \"../wc\";\nimport { parseMarkdown } from \"./parse-markdown\";\nimport type { SectionMode, SectionedResult } from \"./types\";\n\nfunction normalizeText(value: unknown): string {\n if (value == null) {\n return \"\";\n }\n if (typeof value === \"string\") {\n return value;\n }\n if (typeof value === \"number\" || typeof value === \"boolean\") {\n return String(value);\n }\n try {\n return JSON.stringify(value);\n } catch {\n return String(value);\n }\n}\n\nfunction buildPerKeyItems(\n data: Record<string, unknown> | null,\n mode: WordCounterMode,\n options: WordCounterOptions,\n): Array<{ name: string; source: \"frontmatter\"; result: WordCounterResult }> {\n if (!data || typeof data !== \"object\" || Array.isArray(data)) {\n return [];\n }\n\n return Object.entries(data).map(([key, value]) => {\n const valueText = normalizeText(value);\n const text = valueText ? `${key}: ${valueText}` : key;\n return {\n name: key,\n source: \"frontmatter\",\n result: wordCounter(text, options),\n };\n });\n}\n\nfunction buildSingleItem(\n name: string,\n text: string,\n mode: WordCounterMode,\n options: WordCounterOptions,\n source: \"frontmatter\" | \"content\",\n) {\n return [{ name, source, result: wordCounter(text, options) }];\n}\n\nfunction sumTotals(items: Array<{ result: WordCounterResult }>): number {\n return items.reduce((sum, item) => sum + item.result.total, 0);\n}\n\nexport function countSections(\n input: string,\n section: SectionMode,\n options: WordCounterOptions = {},\n): SectionedResult {\n const mode: WordCounterMode = options.mode ?? \"chunk\";\n if (section === \"all\") {\n const result = wordCounter(input, options);\n return {\n section,\n total: result.total,\n frontmatterType: null,\n items: [{ name: \"all\", source: \"content\", result }],\n };\n }\n\n const parsed = parseMarkdown(input);\n const frontmatterText = parsed.frontmatter ?? \"\";\n const contentText = parsed.content ?? \"\";\n\n let items: Array<{ name: string; source: \"frontmatter\" | \"content\"; result: WordCounterResult }> = [];\n\n if (section === \"frontmatter\") {\n items = buildSingleItem(\"frontmatter\", frontmatterText, mode, options, \"frontmatter\");\n } else if (section === \"content\") {\n items = buildSingleItem(\"content\", contentText, mode, options, \"content\");\n } else if (section === \"split\") {\n items = [\n ...buildSingleItem(\"frontmatter\", frontmatterText, mode, options, \"frontmatter\"),\n ...buildSingleItem(\"content\", contentText, mode, options, \"content\"),\n ];\n } else if (section === \"per-key\") {\n items = buildPerKeyItems(parsed.data, mode, options);\n } else if (section === \"split-per-key\") {\n items = [\n ...buildPerKeyItems(parsed.data, mode, options),\n ...buildSingleItem(\"content\", contentText, mode, options, \"content\"),\n ];\n }\n\n return {\n section,\n total: sumTotals(items),\n frontmatterType: parsed.frontmatterType,\n items,\n };\n}\n","export function showSingularOrPluralWord(count: number, word: string): string {\n return `${count} ${word}${count === 1 ? \"\" : \"s\"}`;\n}\n","import wordCounter, {\n countCharsForLocale,\n countWordsForLocale,\n segmentTextByLocale,\n} from \"./wc\";\nimport { parseMarkdown, countSections } from \"./markdown\";\nimport { showSingularOrPluralWord } from \"./utils\";\n\nconst cjsExports = Object.assign(wordCounter, {\n default: wordCounter,\n wordCounter,\n countCharsForLocale,\n countWordsForLocale,\n segmentTextByLocale,\n parseMarkdown,\n countSections,\n showSingularOrPluralWord,\n});\n\nexport = cjsExports;\n"],"mappings":";;;AAAA,MAAM,iCAAiB,IAAI,KAA6B;AACxD,MAAM,yCAAyB,IAAI,KAA6B;AAEhE,SAAgB,aAAa,QAAgC;CAC3D,MAAM,SAAS,eAAe,IAAI,OAAO;AACzC,KAAI,OACF,QAAO;CAET,MAAM,YAAY,IAAI,KAAK,UAAU,QAAQ,EAAE,aAAa,QAAQ,CAAC;AACrE,gBAAe,IAAI,QAAQ,UAAU;AACrC,QAAO;;AAGT,SAAgB,qBAAqB,QAAgC;CACnE,MAAM,SAAS,uBAAuB,IAAI,OAAO;AACjD,KAAI,OACF,QAAO;CAET,MAAM,YAAY,IAAI,KAAK,UAAU,QAAQ,EAAE,aAAa,YAAY,CAAC;AACzE,wBAAuB,IAAI,QAAQ,UAAU;AAC7C,QAAO;;AAGT,SAAS,oBAA6B;AACpC,QAAO,OAAO,SAAS,eAAe,OAAO,KAAK,cAAc;;AAGlE,SAAgB,oBAAoB,MAAc,QAAwB;CACxE,MAAM,YAAY,aAAa,OAAO;CACtC,IAAI,QAAQ;AACZ,MAAK,MAAM,WAAW,UAAU,QAAQ,KAAK,CAC3C,KAAI,QAAQ,WACV;AAGJ,QAAO;;AAGT,SAAgB,oBAAoB,MAAc,QAAwB;AACxE,KAAI,CAAC,mBAAmB,CACtB,QAAO,MAAM,KAAK,KAAK,CAAC;CAE1B,MAAM,YAAY,qBAAqB,OAAO;CAC9C,IAAI,QAAQ;AACZ,MAAK,MAAM,YAAY,UAAU,QAAQ,KAAK,CAC5C;AAEF,QAAO;;;;;AC7CT,MAAM,aAAa;AACnB,MAAM,yBAAyB;AAC/B,MAAM,mBAAmB;AACzB,MAAM,cAAc;AACpB,MAAM,mBAAmB;AAEzB,SAAgB,0BAA6C;AAC3D,QAAO;EACL,OAAO,EAAE;EACT,SAAS,EAAE;EACX,aAAa,EAAE;EACf,QAAQ;GACN,OAAO;GACP,SAAS;GACT,aAAa;GACd;EACF;;AAGH,SAAgB,WACd,YACA,UACA,SACM;AACN,KAAI,aAAa,SAAS;AACxB,aAAW,MAAM,KAAK,QAAQ;AAC9B,aAAW,OAAO,SAAS;AAC3B;;AAEF,KAAI,aAAa,UAAU;AACzB,aAAW,QAAQ,KAAK,QAAQ;AAChC,aAAW,OAAO,WAAW;AAC7B;;AAEF,YAAW,YAAY,KAAK,QAAQ;AACpC,YAAW,OAAO,eAAe;;AAGnC,SAAgB,uBACd,SAC2C;CAC3C,MAAM,4BAA4B,QAAQ,SAAS,IAAS;AAC5D,KACE,iBAAiB,KAAK,QAAQ,IAC9B,uBAAuB,KAAK,QAAQ,IACnC,6BAA6B,WAAW,KAAK,QAAQ,CAEtD,QAAO;AAET,KAAI,YAAY,KAAK,QAAQ,CAC3B,QAAO;AAET,KAAI,iBAAiB,KAAK,QAAQ,CAChC,QAAO;AAET,QAAO;;AAGT,SAAgB,wBACd,QACA,QACmB;AACnB,KAAI,OAAO,OAAO,QAAQ,GAAG;AAC3B,SAAO,MAAM,KAAK,GAAG,OAAO,MAAM;AAClC,SAAO,OAAO,SAAS,OAAO,OAAO;;AAEvC,KAAI,OAAO,OAAO,UAAU,GAAG;AAC7B,SAAO,QAAQ,KAAK,GAAG,OAAO,QAAQ;AACtC,SAAO,OAAO,WAAW,OAAO,OAAO;;AAEzC,KAAI,OAAO,OAAO,cAAc,GAAG;AACjC,SAAO,YAAY,KAAK,GAAG,OAAO,YAAY;AAC9C,SAAO,OAAO,eAAe,OAAO,OAAO;;AAE7C,QAAO;;;;;AC7DT,SAAgB,aACd,OACA,iBACe;CACf,MAAM,YAAY,aAAa,MAAM,OAAO;CAC5C,MAAM,WAAqB,EAAE;CAC7B,MAAM,WAAqC,kBACvC,yBAAyB,GACzB;AACJ,MAAK,MAAM,QAAQ,UAAU,QAAQ,MAAM,KAAK,CAC9C,KAAI,KAAK,WACP,UAAS,KAAK,KAAK,QAAQ;UAClB,mBAAmB,UAAU;EACtC,MAAM,WAAW,uBAAuB,KAAK,QAAQ;AACrD,MAAI,SACF,YAAW,UAAU,UAAU,KAAK,QAAQ;;AAIlD,QAAO;EACL,QAAQ,MAAM;EACd,MAAM,MAAM;EACZ;EACA,OAAO,SAAS;EAChB,UAAU,YAAY;EACvB;;AAGH,SAAgB,iBACd,OACA,iBACc;CACd,MAAM,YAAY,aAAa,MAAM,OAAO;CAC5C,MAAM,WAAqC,kBACvC,yBAAyB,GACzB;CACJ,IAAI,QAAQ;AAEZ,MAAK,MAAM,QAAQ,UAAU,QAAQ,MAAM,KAAK,EAAE;AAChD,MAAI,KAAK,YAAY;AACnB,YAAS,oBAAoB,KAAK,SAAS,MAAM,OAAO;AACxD;;AAGF,MAAI,mBAAmB,UAAU;GAC/B,MAAM,WAAW,uBAAuB,KAAK,QAAQ;AACrD,OAAI,UAAU;AACZ,eAAW,UAAU,UAAU,KAAK,QAAQ;AAC5C,aAAS,oBAAoB,KAAK,SAAS,MAAM,OAAO;;;;AAK9D,QAAO;EACL,QAAQ,MAAM;EACd,MAAM,MAAM;EACZ;EACA,UAAU,YAAY;EACvB;;AAGH,SAAgB,kBACd,QACsB;CACtB,MAAM,QAAkB,EAAE;CAC1B,MAAM,sBAAM,IAAI,KAAiC;AAEjD,MAAK,MAAM,SAAS,QAAQ;EAC1B,MAAM,WAAW,IAAI,IAAI,MAAM,OAAO;AACtC,MAAI,UAAU;AACZ,YAAS,SAAS,MAAM;AACxB,YAAS,SAAS,KAAK,GAAG,MAAM,SAAS;AACzC;;AAGF,QAAM,KAAK,MAAM,OAAO;AACxB,MAAI,IAAI,MAAM,QAAQ;GACpB,QAAQ,MAAM;GACd,OAAO,MAAM;GACb,UAAU,CAAC,GAAG,MAAM,SAAS;GAC9B,CAAC;;AAGJ,QAAO,MAAM,KAAK,WAAW,IAAI,IAAI,OAAO,CAAE;;;;;AChGhD,MAAM,eAAgD;CACpD,OAAO;CACP,QAAQ;CACR,UAAU;CACV,SAAS;CACT,KAAK;CACL,WAAW;CACX,SAAS;CACT,OAAO;CACP,MAAM;CACN,OAAO;CACP,WAAW;CACX,YAAY;CACb;AAED,SAAgB,cAAc,OAA+C;AAC3E,KAAI,CAAC,MACH,QAAO;AAGT,QAAO,aADY,MAAM,MAAM,CAAC,aAAa,KACV;;AAGrC,SAAgB,YACd,OACA,WAA4B,SACX;AACjB,QAAO,cAAc,MAAM,IAAI;;;;;AC7BjC,MAAa,iBAAiB;AAM9B,MAAM,QAAQ;CACZ,UAAU;CACV,UAAU;CACV,QAAQ;CACR,KAAK;CACL,OAAO;CACP,QAAQ;CACR,UAAU;CACV,YAAY;CACZ,MAAM;CACP;AAED,MAAM,mBAA6D;CACjE;EAAE,QAAQ;EAAM,OAAO;EAAa;CACpC;EAAE,QAAQ;EAAM,OAAO;EAAU;CACjC;EAAE,QAAQ;EAAM,OAAO;EAAU;CACjC;EAAE,QAAQ;EAAM,OAAO;EAAU;CAClC;AAED,MAAM,eAAe,IAAI,IAAY,CACnC,gBACA,GAAG,iBAAiB,KAAK,SAAS,KAAK,OAAO,CAC/C,CAAC;AAEF,SAAgB,cAAc,QAAyB;AACrD,QAAO,aAAa,IAAI,OAAO;;AAGjC,SAAS,kBAAkB,MAAsB;AAC/C,MAAK,MAAM,QAAQ,iBACjB,KAAI,KAAK,MAAM,KAAK,KAAK,CACvB,QAAO,KAAK;AAGhB,QAAO;;AAGT,SAAgB,oBACd,MACA,gBACA,UAA+B,EAAE,EAClB;AACf,KAAI,MAAM,SAAS,KAAK,KAAK,IAAI,MAAM,SAAS,KAAK,KAAK,CACxD,QAAO;AAET,KAAI,MAAM,OAAO,KAAK,KAAK,CACzB,QAAO;AAET,KAAI,MAAM,OAAO,KAAK,KAAK,CACzB,QAAO;AAET,KAAI,MAAM,SAAS,KAAK,KAAK,CAC3B,QAAO;AAET,KAAI,MAAM,WAAW,KAAK,KAAK,CAC7B,QAAO;AAET,KAAI,MAAM,KAAK,KAAK,KAAK,CACvB,QAAO;AAGT,KAAI,MAAM,IAAI,KAAK,KAAK,EAAE;AACxB,MAAI,kBAAkB,eAAe,WAAW,KAAK,CACnD,QAAO;AAET,SAAO;;AAGT,KAAI,MAAM,MAAM,KAAK,KAAK,EAAE;EAC1B,MAAM,eAAe,kBAAkB,KAAK;AAC5C,MAAI,iBAAiB,eACnB,QAAO;AAET,MAAI,kBAAkB,cAAc,eAAe,IAAI,mBAAmB,eACxE,QAAO;AAET,MAAI,QAAQ,gBACV,QAAO,QAAQ;AAEjB,SAAO;;AAGT,QAAO;;;;;AChFT,SAAgB,oBACd,MACA,UAA+B,EAAE,EAClB;CACf,MAAM,SAAwB,EAAE;CAEhC,IAAI,gBAAwB;CAC5B,IAAI,SAAS;CACb,IAAI,kBAAkB;AAEtB,MAAK,MAAM,QAAQ,MAAM;EACvB,MAAM,WAAW,oBAAoB,MAAM,eAAe,QAAQ;EAClE,MAAM,eAAe,YAAY;AAGjC,MAAI,WAAW,IAAI;AACjB,mBAAgB;AAChB,YAAS;AACT,qBAAkB,aAAa;AAC/B;;AAGF,MAAI,aAAa,QAAQ,CAAC,iBAAiB;AACzC,mBAAgB;AAChB,aAAU;AACV,qBAAkB;AAClB;;AAGF,MAAI,iBAAiB,iBAAiB,aAAa,MAAM;AACvD,OAAI,kBAAkB,kBAAkB,cAAc,aAAa,EAAE;AACnE,oBAAgB;AAChB,cAAU;AACV,sBAAkB;AAClB;;AAGF,UAAO,KAAK;IAAE,QAAQ;IAAe,MAAM;IAAQ,CAAC;AACpD,mBAAgB;AAChB,YAAS;AACT,qBAAkB;AAClB;;AAGF,YAAU;AACV,MAAI,aAAa,KACf,mBAAkB;;AAItB,KAAI,OAAO,SAAS,EAClB,QAAO,KAAK;EAAE,QAAQ;EAAe,MAAM;EAAQ,CAAC;AAGtD,QAAO,oBAAoB,OAAO;;AAGpC,SAAS,oBAAoB,QAAsC;AACjE,KAAI,OAAO,WAAW,EACpB,QAAO;CAGT,MAAM,SAAwB,EAAE;CAEhC,IAAI,OAAO,OAAO;AAElB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AACrB,MAAI,MAAM,WAAW,KAAK,OACxB,QAAO;GAAE,QAAQ,KAAK;GAAQ,MAAM,KAAK,OAAO,MAAM;GAAM;OACvD;AACL,UAAO,KAAK,KAAK;AACjB,UAAO;;;AAIX,QAAO,KAAK,KAAK;AACjB,QAAO;;;;;AC5DT,SAAgB,YACd,MACA,UAA8B,EAAE,EACb;CACnB,MAAM,OAAwB,YAAY,QAAQ,MAAM,QAAQ;CAChE,MAAM,kBAAkB,QAAQ,QAAQ,SAAS;CACjD,MAAM,SAAS,oBAAoB,MAAM,EAAE,iBAAiB,QAAQ,iBAAiB,CAAC;AAEtF,KAAI,SAAS,QAAQ;EACnB,MAAMA,aAAW,OAAO,KAAK,UAAU,iBAAiB,OAAO,gBAAgB,CAAC;AAQhF,SAAO;GACL,OARYA,WAAS,QAAQ,KAAK,UAAU,MAAM,MAAM,OAAO,EAAE;GASjE,WAAW;IACT;IACA,OAV2BA,WAAS,KAAK,WAAW;KACtD,QAAQ,MAAM;KACd,MAAM,MAAM;KACZ,OAAO,MAAM;KACb,UAAU,MAAM;KACjB,EAAE;IAMA;GACF;;CAGH,MAAM,WAAW,OAAO,KAAK,UAAU,aAAa,OAAO,gBAAgB,CAAC;CAC5E,MAAM,QAAQ,SAAS,QAAQ,KAAK,UAAU;EAC5C,IAAI,aAAa,MAAM;AACvB,MAAI,mBAAmB,MAAM,SAC3B,eACE,MAAM,SAAS,OAAO,QACtB,MAAM,SAAS,OAAO,UACtB,MAAM,SAAS,OAAO;AAE1B,SAAO,MAAM;IACZ,EAAE;AAEL,KAAI,SAAS,WAQX,QAAO;EACL;EACA,WAAW;GACT;GACA,OAX+B,SAAS,KAAK,WAAW;IAC1D,QAAQ,MAAM;IACd,MAAM,MAAM;IACZ,OAAO,MAAM;IACb,UAAU,MAAM;IAChB,UAAU,MAAM;IACjB,EAAE;GAMA;EACF;AAGH,KAAI,SAAS,YAGX,QAAO;EACL;EACA,WAAW;GACT;GACA,OANU,kBAAkB,SAAS;GAOrC,UANa,yBAAyB,UAAU,gBAAgB;GAOjE;EACF;AAUH,QAAO;EACL;EACA,WAAW;GACT;GACA,OAX4B,SAAS,KAAK,WAAW;IACvD,QAAQ,MAAM;IACd,MAAM,MAAM;IACZ,OAAO,MAAM;IACb,UAAU,MAAM;IACjB,EAAE;GAOA;EACF;;AAGH,SAAS,yBACP,UACA,SAC+B;AAC/B,KAAI,CAAC,QACH;CAEF,MAAM,aAAa,yBAAyB;AAC5C,MAAK,MAAM,SAAS,UAAU;AAC5B,MAAI,CAAC,MAAM,SACT;AAEF,0BAAwB,YAAY,MAAM,SAAS;;AAErD,QAAO;;;;;ACzHT,iBAAe;;;;ACFf,SAAgB,qBACd,QACA,KAC2B;CAC3B,MAAM,WAAW,OAAO;AACxB,KAAI,MAAM,QAAQ,SAAS,CACzB,QAAO;CAET,MAAM,OAAkC,EAAE;AAC1C,QAAO,OAAO;AACd,QAAO;;AAGT,SAAgB,mBAAmB,QAAuC;AACxE,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,EAAE;AACjD,MAAI,CAAC,MAAM,QAAQ,MAAM,CACvB;AASF,SAAO,OAPW,MACf,KAAK,UACJ,OAAO,QAAQ,MAAM,CAClB,KAAK,CAAC,UAAU,gBAAgB,GAAG,SAAS,GAAG,aAAa,CAC5D,KAAK,KAAK,CACd,CACA,KAAK,MAAM;;;;;;ACxBlB,SAAS,eAAe,KAAqB;CAC3C,MAAM,UAAU,IAAI,MAAM;AAC1B,KACG,QAAQ,WAAW,KAAK,IAAI,QAAQ,SAAS,KAAK,IAClD,QAAQ,WAAW,IAAI,IAAI,QAAQ,SAAS,IAAI,CAEjD,QAAO,QAAQ,MAAM,GAAG,GAAG;AAE7B,QAAO;;AAGT,SAAgB,iBAAiB,KAA4B;CAC3D,MAAM,UAAU,IAAI,MAAM;AAC1B,KAAI,CAAC,QACH,QAAO;AAGT,KACG,QAAQ,WAAW,KAAK,IAAI,QAAQ,SAAS,KAAK,IAClD,QAAQ,WAAW,IAAI,IAAI,QAAQ,SAAS,IAAI,EACjD;EACA,MAAM,WAAW,eAAe,QAAQ;AACxC,SAAO,WAAW,WAAW;;CAG/B,MAAM,WAAW,QAAQ,MAAM,IAAI,CAAC,KAAK,YAAY,QAAQ,MAAM,CAAC;AACpE,KAAI,SAAS,MAAM,YAAY,CAAC,QAAQ,CACtC,QAAO;AAET,QAAO,SAAS,KAAK,IAAI;;;;;AC7B3B,SAAgB,mBAAmB,MAAsB;CACvD,IAAI,WAAuC;CAC3C,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK,GAAG;EACvC,MAAM,OAAO,KAAK,MAAM;AAExB,MAAI,UAAU;AACZ,OAAI,SAAS;AACX,cAAU;AACV;;AAGF,OAAI,SAAS,QAAQ,aAAa,UAAU;AAC1C,cAAU;AACV;;AAGF,OAAI,aAAa,YAAY,SAAS,MAAM;AAC1C,eAAW;AACX;;AAGF,OAAI,aAAa,YAAY,SAAS,KAAK;AACzC,eAAW;AACX;;AAGF;;AAGF,MAAI,SAAS,MAAM;AACjB,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,cAAW;AACX;;AAGF,MAAI,SAAS,IACX,QAAO,KAAK,MAAM,GAAG,EAAE,CAAC,SAAS;;AAIrC,QAAO;;AAGT,SAAS,cAAc,OAAuB;AAC5C,QAAO,MACJ,QAAQ,SAAS,KAAK,CACtB,QAAQ,QAAQ,KAAK,CACrB,QAAQ,QAAQ,KAAK,CACrB,QAAQ,QAAQ,IAAK,CACrB,QAAQ,QAAQ,KAAK;;AAG1B,SAAgB,mBAAmB,OAA8B;AAC/D,KAAI,MAAM,WAAW,SAAM,IAAI,MAAM,SAAS,SAAM,CAElD,QAAO,cADO,MAAM,MAAM,GAAG,GAAG,CACL;AAG7B,KAAI,MAAM,WAAW,MAAM,IAAI,MAAM,SAAS,MAAM,CAClD,QAAO,MAAM,MAAM,GAAG,GAAG;AAG3B,KAAI,MAAM,WAAW,KAAK,IAAI,MAAM,SAAS,KAAK,CAChD,QAAO,cAAc,MAAM,MAAM,GAAG,GAAG,CAAC;AAG1C,KAAI,MAAM,WAAW,IAAI,IAAI,MAAM,SAAS,IAAI,CAC9C,QAAO,MAAM,MAAM,GAAG,GAAG;AAG3B,QAAO;;;;;ACxET,SAAS,eAAe,KAA+C;CACrE,MAAM,QAAQ,IAAI,MAAM;AACxB,KAAI,CAAC,MACH,QAAO;CAGT,MAAM,gBAAgB,mBAAmB,MAAM;AAC/C,KAAI,kBAAkB,KACpB,QAAO;AAGT,KAAI,UAAU,OACZ,QAAO;AAGT,KAAI,UAAU,QACZ,QAAO;AAGT,KAAI,wCAAwC,KAAK,MAAM,CACrD,QAAO,OAAO,MAAM;AAGtB,KAAI,qBAAqB,KAAK,MAAM,CAClC,QAAO;AAGT,QAAO;;AAGT,SAAS,WAAW,KAAsD;CACxE,MAAM,QAAQ,IAAI,MAAM;AACxB,KAAI,CAAC,MAAM,WAAW,IAAI,IAAI,CAAC,MAAM,SAAS,IAAI,CAChD,QAAO;CAGT,MAAM,QAAQ,MAAM,MAAM,GAAG,GAAG,CAAC,MAAM;AACvC,KAAI,CAAC,MACH,QAAO,EAAE;CAGX,MAAM,QAA0C,EAAE;CAClD,IAAI,UAAU;CACd,IAAI,WAAuC;CAC3C,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;EACxC,MAAM,OAAO,MAAM,MAAM;AAEzB,MAAI,UAAU;AACZ,cAAW;AACX,OAAI,SAAS;AACX,cAAU;AACV;;AAGF,OAAI,SAAS,QAAQ,aAAa,UAAU;AAC1C,cAAU;AACV;;AAGF,OAAI,aAAa,YAAY,SAAS,KACpC,YAAW;YACF,aAAa,YAAY,SAAS,IAC3C,YAAW;AAEb;;AAGF,MAAI,SAAS,MAAM;AACjB,cAAW;AACX,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,cAAW;AACX,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;GAChB,MAAM,OAAO,eAAe,QAAQ;AACpC,OAAI,SAAS,KACX,QAAO;AAET,SAAM,KAAK,KAAK;AAChB,aAAU;AACV;;AAGF,aAAW;;CAGb,MAAM,YAAY,eAAe,QAAQ;AACzC,KAAI,cAAc,KAChB,QAAO;AAET,OAAM,KAAK,UAAU;AAErB,QAAO;;AAGT,SAAS,iBAAiB,KAA+C;CACvE,MAAM,UAAU,IAAI,MAAM;AAC1B,KAAI,CAAC,QAAQ,WAAW,IAAI,IAAI,CAAC,QAAQ,SAAS,IAAI,CACpD,QAAO;CAGT,MAAM,QAAQ,QAAQ,MAAM,GAAG,GAAG,CAAC,MAAM;AACzC,KAAI,CAAC,MACH,QAAO,EAAE;CAGX,MAAM,QAAkB,EAAE;CAC1B,IAAI,UAAU;CACd,IAAI,WAAuC;CAC3C,IAAI,UAAU;CACd,IAAI,eAAe;CACnB,IAAI,aAAa;AAEjB,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;EACxC,MAAM,OAAO,MAAM,MAAM;AAEzB,MAAI,UAAU;AACZ,cAAW;AACX,OAAI,SAAS;AACX,cAAU;AACV;;AAGF,OAAI,SAAS,QAAQ,aAAa,UAAU;AAC1C,cAAU;AACV;;AAGF,OAAI,aAAa,YAAY,SAAS,KACpC,YAAW;YACF,aAAa,YAAY,SAAS,IAC3C,YAAW;AAEb;;AAGF,MAAI,SAAS,MAAM;AACjB,cAAW;AACX,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,cAAW;AACX,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,mBAAgB;AAChB,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,OAAI,eAAe,EACjB,iBAAgB;AAElB,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,iBAAc;AACd,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,OAAI,aAAa,EACf,eAAc;AAEhB,cAAW;AACX;;AAGF,MAAI,SAAS,OAAO,iBAAiB,KAAK,eAAe,GAAG;AAC1D,SAAM,KAAK,QAAQ;AACnB,aAAU;AACV;;AAGF,aAAW;;AAGb,KAAI,QAAQ,MAAM,CAChB,OAAM,KAAK,QAAQ;CAGrB,MAAM,SAAoC,EAAE;AAC5C,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,iBAAiB,KAAK,QAAQ,IAAI;AACxC,MAAI,mBAAmB,GACrB,QAAO;EAET,MAAM,MAAM,iBAAiB,KAAK,MAAM,GAAG,eAAe,CAAC;AAC3D,MAAI,CAAC,IACH,QAAO;EAET,MAAM,WAAW,KAAK,MAAM,iBAAiB,EAAE,CAAC,MAAM;AACtD,MAAI,CAAC,SACH,QAAO;AAET,MAAI,SAAS,WAAW,IAAI,CAC1B,QAAO;EAET,MAAM,aAAa,eAAe,SAAS;AAC3C,MAAI,eAAe,KACjB,QAAO;AAET,MAAI,OAAO,eAAe,YAAY,CAAC,MAAM,QAAQ,WAAW,CAC9D,QAAO;AAET,SAAO,OAAO;;AAGhB,QAAO;;AAGT,SAAgB,eAAe,OAAiC;AAC9D,KAAI,CAAC,MACH,QAAO;CAGT,MAAM,UAAU,MAAM,MAAM;AAC5B,KAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,SAAS,IAAI,CAClD,QAAO,iBAAiB,QAAQ;CAGlC,MAAM,QAAQ,WAAW,QAAQ;AACjC,KAAI,MACF,QAAO;AAGT,KAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,SAAS,IAAI,CAClD,QAAO;AAGT,QAAO,eAAe,QAAQ;;AAGhC,SAAgB,YAAY,OAAwB;AAClD,KAAI,SAAS,KACX,QAAO;AAET,KAAI,MAAM,QAAQ,MAAM,CACtB,QAAO,MAAM,KAAK,SAAS,OAAO,KAAK,CAAC,CAAC,KAAK,KAAK;AAErD,QAAO,OAAO,MAAM;;;;;AC/PtB,SAAgB,qBAAqB,aAAqD;CACxF,MAAM,SAAkC,EAAE;CAC1C,MAAM,QAAQ,YAAY,MAAM,KAAK;CACrC,IAAI,cAAc;CAClB,IAAI,cAA8C;CAClD,IAAI,oBAAoB;AAExB,MAAK,IAAI,QAAQ,GAAG,QAAQ,MAAM,QAAQ,SAAS,GAAG;EACpD,MAAM,UAAU,MAAM,UAAU;EAChC,MAAM,cAAc,QAAQ,MAAM;AAClC,MAAI,CAAC,eAAe,YAAY,WAAW,IAAI,CAC7C;AAGF,MAAI,YAAY,WAAW,KAAK,EAAE;GAChC,MAAM,QAAQ,YAAY,MAAM,mBAAmB;AACnD,OAAI,CAAC,MACH,QAAO;GAET,MAAM,kBAAkB,iBAAiB,MAAM,MAAM,GAAG;AACxD,OAAI,CAAC,gBACH,QAAO;GAET,MAAM,OAAO,qBAAqB,QAAQ,gBAAgB;GAC1D,MAAM,WAAoC,EAAE;AAC5C,QAAK,KAAK,SAAS;AACnB,iBAAc;AACd,iBAAc;AACd,uBAAoB;AACpB;;EAGF,MAAM,aAAa,YAAY,MAAM,gBAAgB;AACrD,MAAI,YAAY;GACd,MAAM,kBAAkB,iBAAiB,WAAW,MAAM,GAAG;AAC7D,OAAI,CAAC,gBACH,QAAO;AAET,iBAAc;AACd,uBAAoB;AACpB,iBAAc;AACd;;EAGF,MAAM,iBAAiB,YAAY,KAAK,QAAQ,GAAG,UAAU,mBAAmB,QAAQ;EACxF,MAAM,iBAAiB,eAAe,QAAQ,IAAI;AAClD,MAAI,mBAAmB,GACrB,QAAO;EAIT,MAAM,MAAM,iBADG,eAAe,MAAM,GAAG,eAAe,CAClB;EACpC,IAAI,WAAW,eAAe,MAAM,iBAAiB,EAAE,CAAC,MAAM;AAC9D,MAAI,CAAC,IACH,QAAO;EAGT,MAAM,kBAAkB,SAAS,WAAW,SAAM,GAAG,WAAQ,SAAS,WAAW,MAAM,GAAG,QAAQ;AAClG,MAAI,iBAAiB;GACnB,MAAM,eAAe,SAAS,QAAQ,iBAAiB,gBAAgB,OAAO;AAC9E,OAAI,iBAAiB,IAAI;IAEvB,MAAM,gBAAgB,mBADR,SAAS,MAAM,eAAe,gBAAgB,OAAO,CACpB;AAC/C,eAAW,GAAG,SAAS,MAAM,GAAG,eAAe,gBAAgB,OAAO,GAAG;UACpE;IACL,MAAM,YAAY;IACpB,IAAI,WAAW;IACf,IAAI,SAAS;AACb,WAAO,QAAQ,IAAI,MAAM,QAAQ;AAC/B,cAAS;KACT,MAAM,WAAW,MAAM,UAAU;AACjC,iBAAY,KAAK;AACjB,0BAAI,IAAI,OAAO,GAAG,UAAU,OAAO,EAAC,KAAK,SAAS,EAAE;AAClD,eAAS;AACT;;;AAGJ,QAAI,CAAC,OACH,QAAO;AAET,eAAW;;;EAIb,MAAM,aAAa,eAAe,SAAS;AAC3C,MAAI,eAAe,KACjB,QAAO;EAGT,MAAM,UAAU,cAAc,GAAG,YAAY,GAAG,QAAQ;AACxD,MAAI,OAAO,eAAe,YAAY,CAAC,MAAM,QAAQ,WAAW,EAAE;AAChE,QAAK,MAAM,CAAC,WAAW,gBAAgB,OAAO,QAAQ,WAAW,EAAE;IACjE,MAAM,WAAW,oBAAoB,GAAG,IAAI,GAAG,cAAc,GAAG,QAAQ,GAAG;AAC3E,QAAI,qBAAqB,YACvB,aAAY,YAAY,YAAY,YAAY;QAEhD,QAAO,YAAY,YAAY,YAAY;;AAG/C;;AAGF,MAAI,qBAAqB,aAAa;AACpC,eAAY,OAAO,YAAY,WAAW;AAC1C;;AAGF,SAAO,WAAW,YAAY,WAAW;;AAG3C,oBAAmB,OAAO;AAE1B,QAAO;;;;;ACjHT,MAAM,gBAAiD;CACrD,OAAO;CACP,OAAO;CACP,OAAO;CACR;AAED,SAAS,kBAAkB,OAAuB;AAChD,QAAO,MAAM,QAAQ,SAAS,KAAK;;AAGrC,SAAS,SAAS,MAAsB;AACtC,QAAO,KAAK,WAAW,IAAS,GAAG,KAAK,MAAM,EAAE,GAAG;;AAGrD,SAAS,aAAa,MAAsC;CAC1D,MAAM,QAAQ,KAAK,MAAM,iCAAiC;AAC1D,KAAI,CAAC,MACH,QAAO;AAET,QAAO,cAAc,MAAM,MAAM,OAAO;;AAG1C,SAAS,iBAAiB,aAAqB,MAA8D;AAC3G,KAAI,CAAC,KACH,QAAO;AAGT,KAAI,SAAS,OACX,KAAI;AACF,SAAO,KAAK,MAAM,YAAY;SACxB;AACN,SAAO;;AAIX,KAAI,SAAS,QAAQ;EACnB,MAAM,8BAAoB,aAAa,EAAE,cAAc,OAAO,CAAC;AAC/D,MAAI,IAAI,OAAO,SAAS,EACtB,QAAO;EAET,MAAM,OAAO,IAAI,QAAQ;AACzB,MAAI,CAAC,QAAQ,OAAO,SAAS,YAAY,MAAM,QAAQ,KAAK,CAC1D,QAAO;AAET,SAAO;;AAGT,KAAI,SAAS,OACX,QAAO,qBAAqB,YAAY;AAG1C,QAAO;;AAGT,SAAS,iBAAiB,MAAc,YAAmE;CACzG,IAAI,QAAQ;CACZ,IAAI,WAAW;CACf,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,YAAY,IAAI,KAAK,QAAQ,KAAK,GAAG;EAChD,MAAM,OAAO,KAAK,MAAM;AAExB,MAAI,UAAU;AACZ,OAAI,SAAS;AACX,cAAU;AACV;;AAGF,OAAI,SAAS,MAAM;AACjB,cAAU;AACV;;AAGF,OAAI,SAAS,KACX,YAAW;AAGb;;AAGF,MAAI,SAAS,MAAM;AACjB,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,YAAS;AACT;;AAGF,MAAI,SAAS,KAAK;AAChB,YAAS;AACT,OAAI,UAAU,EAEZ,QAAO;IAAE,UADQ,KAAK,MAAM,YAAY,IAAI,EAAE;IAC3B,UAAU;IAAG;;;AAKtC,QAAO;;AAGT,SAAgB,cAAc,OAA+B;CAC3D,MAAM,aAAa,kBAAkB,MAAM;CAC3C,MAAM,QAAQ,WAAW,MAAM,KAAK;AACpC,KAAI,MAAM,WAAW,EACnB,QAAO;EAAE,aAAa;EAAM,SAAS;EAAY,MAAM;EAAM,iBAAiB;EAAM;AAGtF,OAAM,KAAK,SAAS,MAAM,MAAM,GAAG;CACnC,MAAM,uBAAuB,MAAM,KAAK,KAAK;CAE7C,MAAM,cAAc,aAAa,MAAM,MAAM,GAAG;AAChD,KAAI,CAAC,aAAa;EAEhB,MAAM,aADoB,qBAAqB,MAAM,YAAY,GAAG,MAAM,IACtC;AACpC,MAAI,qBAAqB,eAAe,IACtC,QAAO;GAAE,aAAa;GAAM,SAAS;GAAsB,MAAM;GAAM,iBAAiB;GAAM;EAGhG,MAAM,YAAY,iBAAiB,sBAAsB,UAAU;AACnE,MAAI,CAAC,UACH,QAAO;GAAE,aAAa;GAAM,SAAS;GAAsB,MAAM;GAAM,iBAAiB;GAAM;EAGhG,MAAMC,gBAAc,UAAU;EAC9B,IAAI,UAAU,qBAAqB,MAAM,UAAU,WAAW,EAAE;AAChE,MAAI,QAAQ,WAAW,KAAK,CAC1B,WAAU,QAAQ,MAAM,EAAE;EAE5B,MAAM,OAAO,iBAAiBA,eAAa,OAAO;AAClD,MAAI,CAAC,KACH,QAAO;GAAE,aAAa;GAAM,SAAS;GAAsB,MAAM;GAAM,iBAAiB;GAAM;AAGhG,SAAO;GACL;GACA;GACA;GACA,iBAAiB;GAClB;;CAGH,IAAI,eAAe;AACnB,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,EACrC,KAAI,aAAa,MAAM,MAAM,GAAG,KAAK,aAAa;AAChD,iBAAe;AACf;;AAIJ,KAAI,iBAAiB,GACnB,QAAO;EAAE,aAAa;EAAM,SAAS;EAAsB,MAAM;EAAM,iBAAiB;EAAM;CAGhG,MAAM,cAAc,MAAM,MAAM,GAAG,aAAa,CAAC,KAAK,KAAK;AAI3D,QAAO;EACL;EACA,SALc,MAAM,MAAM,eAAe,EAAE,CAAC,KAAK,KAAK;EAMtD,MALW,iBAAiB,aAAa,YAAY;EAMrD,iBAAiB;EAClB;;;;;ACnKH,SAAS,cAAc,OAAwB;AAC7C,KAAI,SAAS,KACX,QAAO;AAET,KAAI,OAAO,UAAU,SACnB,QAAO;AAET,KAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAChD,QAAO,OAAO,MAAM;AAEtB,KAAI;AACF,SAAO,KAAK,UAAU,MAAM;SACtB;AACN,SAAO,OAAO,MAAM;;;AAIxB,SAAS,iBACP,MACA,MACA,SAC2E;AAC3E,KAAI,CAAC,QAAQ,OAAO,SAAS,YAAY,MAAM,QAAQ,KAAK,CAC1D,QAAO,EAAE;AAGX,QAAO,OAAO,QAAQ,KAAK,CAAC,KAAK,CAAC,KAAK,WAAW;EAChD,MAAM,YAAY,cAAc,MAAM;AAEtC,SAAO;GACL,MAAM;GACN,QAAQ;GACR,QAAQC,WAJG,YAAY,GAAG,IAAI,IAAI,cAAc,KAItB,QAAQ;GACnC;GACD;;AAGJ,SAAS,gBACP,MACA,MACA,MACA,SACA,QACA;AACA,QAAO,CAAC;EAAE;EAAM;EAAQ,QAAQA,WAAY,MAAM,QAAQ;EAAE,CAAC;;AAG/D,SAAS,UAAU,OAAqD;AACtE,QAAO,MAAM,QAAQ,KAAK,SAAS,MAAM,KAAK,OAAO,OAAO,EAAE;;AAGhE,SAAgB,cACd,OACA,SACA,UAA8B,EAAE,EACf;CACjB,MAAM,OAAwB,QAAQ,QAAQ;AAC9C,KAAI,YAAY,OAAO;EACrB,MAAM,SAASA,WAAY,OAAO,QAAQ;AAC1C,SAAO;GACL;GACA,OAAO,OAAO;GACd,iBAAiB;GACjB,OAAO,CAAC;IAAE,MAAM;IAAO,QAAQ;IAAW;IAAQ,CAAC;GACpD;;CAGH,MAAM,SAAS,cAAc,MAAM;CACnC,MAAM,kBAAkB,OAAO,eAAe;CAC9C,MAAM,cAAc,OAAO,WAAW;CAEtC,IAAI,QAA+F,EAAE;AAErG,KAAI,YAAY,cACd,SAAQ,gBAAgB,eAAe,iBAAiB,MAAM,SAAS,cAAc;UAC5E,YAAY,UACrB,SAAQ,gBAAgB,WAAW,aAAa,MAAM,SAAS,UAAU;UAChE,YAAY,QACrB,SAAQ,CACN,GAAG,gBAAgB,eAAe,iBAAiB,MAAM,SAAS,cAAc,EAChF,GAAG,gBAAgB,WAAW,aAAa,MAAM,SAAS,UAAU,CACrE;UACQ,YAAY,UACrB,SAAQ,iBAAiB,OAAO,MAAM,MAAM,QAAQ;UAC3C,YAAY,gBACrB,SAAQ,CACN,GAAG,iBAAiB,OAAO,MAAM,MAAM,QAAQ,EAC/C,GAAG,gBAAgB,WAAW,aAAa,MAAM,SAAS,UAAU,CACrE;AAGH,QAAO;EACL;EACA,OAAO,UAAU,MAAM;EACvB,iBAAiB,OAAO;EACxB;EACD;;;;;ACrGH,SAAgB,yBAAyB,OAAe,MAAsB;AAC5E,QAAO,GAAG,MAAM,GAAG,OAAO,UAAU,IAAI,KAAK;;;;;ACO/C,MAAM,aAAa,OAAO,OAAOC,YAAa;CAC5C,SAASA;CACT;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;iBAEO"}
1
+ {"version":3,"file":"index.cjs","names":["analyzed","total","frontmatter","wordCounter","wordCounter"],"sources":["../../src/wc/segmenter.ts","../../src/wc/non-words.ts","../../src/wc/analyze.ts","../../src/wc/mode.ts","../../src/wc/locale-detect.ts","../../src/wc/segment.ts","../../src/wc/wc.ts","../../src/wc/index.ts","../../src/markdown/toml/arrays.ts","../../src/markdown/toml/keys.ts","../../src/markdown/toml/strings.ts","../../src/markdown/toml/values.ts","../../src/markdown/toml/parse-frontmatter.ts","../../src/markdown/parse-markdown.ts","../../src/markdown/section-count.ts","../../src/utils/show-singular-or-plural-word.ts","../../src/index.cjs.ts"],"sourcesContent":["const segmenterCache = new Map<string, Intl.Segmenter>();\nconst graphemeSegmenterCache = new Map<string, Intl.Segmenter>();\n\nexport function getSegmenter(locale: string): Intl.Segmenter {\n const cached = segmenterCache.get(locale);\n if (cached) {\n return cached;\n }\n const segmenter = new Intl.Segmenter(locale, { granularity: \"word\" });\n segmenterCache.set(locale, segmenter);\n return segmenter;\n}\n\nexport function getGraphemeSegmenter(locale: string): Intl.Segmenter {\n const cached = graphemeSegmenterCache.get(locale);\n if (cached) {\n return cached;\n }\n const segmenter = new Intl.Segmenter(locale, { granularity: \"grapheme\" });\n graphemeSegmenterCache.set(locale, segmenter);\n return segmenter;\n}\n\nfunction supportsSegmenter(): boolean {\n return typeof Intl !== \"undefined\" && typeof Intl.Segmenter === \"function\";\n}\n\nexport function countWordsForLocale(text: string, locale: string): number {\n const segmenter = getSegmenter(locale);\n let count = 0;\n for (const segment of segmenter.segment(text)) {\n if (segment.isWordLike) {\n count++;\n }\n }\n return count;\n}\n\nexport function countCharsForLocale(text: string, locale: string): number {\n if (!supportsSegmenter()) {\n return Array.from(text).length;\n }\n const segmenter = getGraphemeSegmenter(locale);\n let count = 0;\n for (const _segment of segmenter.segment(text)) {\n count++;\n }\n return count;\n}\n","import type { NonWordCollection, WhitespaceCounts } from \"./types\";\n\nconst emojiRegex = /(?:\\p{Extended_Pictographic}|\\p{Emoji_Presentation})/u;\nconst emojiPresentationRegex = /\\p{Emoji_Presentation}/u;\nconst keycapEmojiRegex = /[0-9#*]\\uFE0F?\\u20E3/u;\nconst symbolRegex = /\\p{S}/u;\nconst punctuationRegex = /\\p{P}/u;\nconst whitespaceRegex = /\\s/u;\nconst newlineChars = new Set([\"\\n\", \"\\r\", \"\\u2028\", \"\\u2029\"]);\n\nexport function createNonWordCollection(): NonWordCollection {\n return {\n emoji: [],\n symbols: [],\n punctuation: [],\n counts: {\n emoji: 0,\n symbols: 0,\n punctuation: 0,\n },\n };\n}\n\nexport function addNonWord(\n collection: NonWordCollection,\n category: \"emoji\" | \"symbol\" | \"punctuation\",\n segment: string,\n): void {\n if (category === \"emoji\") {\n collection.emoji.push(segment);\n collection.counts.emoji += 1;\n return;\n }\n if (category === \"symbol\") {\n collection.symbols.push(segment);\n collection.counts.symbols += 1;\n return;\n }\n collection.punctuation.push(segment);\n collection.counts.punctuation += 1;\n}\n\nexport function addWhitespace(\n collection: NonWordCollection,\n segment: string,\n): number {\n let whitespace = collection.whitespace;\n let count = 0;\n for (const char of segment) {\n if (char === \" \") {\n whitespace = whitespace ?? createWhitespaceCounts();\n whitespace.spaces += 1;\n count += 1;\n continue;\n }\n if (char === \"\\t\") {\n whitespace = whitespace ?? createWhitespaceCounts();\n whitespace.tabs += 1;\n count += 1;\n continue;\n }\n if (newlineChars.has(char)) {\n whitespace = whitespace ?? createWhitespaceCounts();\n whitespace.newlines += 1;\n count += 1;\n continue;\n }\n if (whitespaceRegex.test(char)) {\n whitespace = whitespace ?? createWhitespaceCounts();\n whitespace.other += 1;\n count += 1;\n }\n }\n\n if (count > 0) {\n collection.whitespace = whitespace ?? createWhitespaceCounts();\n collection.counts.whitespace = (collection.counts.whitespace ?? 0) + count;\n }\n\n return count;\n}\n\nexport function classifyNonWordSegment(\n segment: string,\n): \"emoji\" | \"symbol\" | \"punctuation\" | null {\n const hasEmojiVariationSelector = segment.includes(\"\\uFE0F\");\n if (\n keycapEmojiRegex.test(segment) ||\n emojiPresentationRegex.test(segment) ||\n (hasEmojiVariationSelector && emojiRegex.test(segment))\n ) {\n return \"emoji\";\n }\n if (symbolRegex.test(segment)) {\n return \"symbol\";\n }\n if (punctuationRegex.test(segment)) {\n return \"punctuation\";\n }\n return null;\n}\n\nexport function mergeNonWordCollections(\n target: NonWordCollection,\n source: NonWordCollection,\n): NonWordCollection {\n if (source.counts.emoji > 0) {\n target.emoji.push(...source.emoji);\n target.counts.emoji += source.counts.emoji;\n }\n if (source.counts.symbols > 0) {\n target.symbols.push(...source.symbols);\n target.counts.symbols += source.counts.symbols;\n }\n if (source.counts.punctuation > 0) {\n target.punctuation.push(...source.punctuation);\n target.counts.punctuation += source.counts.punctuation;\n }\n if (source.counts.whitespace && source.counts.whitespace > 0 && source.whitespace) {\n const whitespace = target.whitespace ?? createWhitespaceCounts();\n whitespace.spaces += source.whitespace.spaces;\n whitespace.tabs += source.whitespace.tabs;\n whitespace.newlines += source.whitespace.newlines;\n whitespace.other += source.whitespace.other;\n target.whitespace = whitespace;\n target.counts.whitespace = (target.counts.whitespace ?? 0) + source.counts.whitespace;\n }\n return target;\n}\n\nfunction createWhitespaceCounts(): WhitespaceCounts {\n return { spaces: 0, tabs: 0, newlines: 0, other: 0 };\n}\n","import { countCharsForLocale, getSegmenter } from \"./segmenter\";\nimport {\n addNonWord,\n addWhitespace,\n classifyNonWordSegment,\n createNonWordCollection,\n} from \"./non-words\";\nimport type {\n ChunkAnalysis,\n CollectorBreakdown,\n LocaleChunk,\n NonWordCollection,\n} from \"./types\";\n\ntype CharAnalysis = LocaleChunk & { chars: number; nonWords?: NonWordCollection };\n\nexport function analyzeChunk(\n chunk: LocaleChunk,\n collectNonWords?: boolean,\n includeWhitespace?: boolean,\n): ChunkAnalysis {\n const segmenter = getSegmenter(chunk.locale);\n const segments: string[] = [];\n const nonWords: NonWordCollection | null = collectNonWords\n ? createNonWordCollection()\n : null;\n for (const part of segmenter.segment(chunk.text)) {\n if (part.isWordLike) {\n segments.push(part.segment);\n } else if (collectNonWords && nonWords) {\n if (includeWhitespace) {\n addWhitespace(nonWords, part.segment);\n }\n const category = classifyNonWordSegment(part.segment);\n if (category) {\n addNonWord(nonWords, category, part.segment);\n }\n }\n }\n return {\n locale: chunk.locale,\n text: chunk.text,\n segments,\n words: segments.length,\n nonWords: nonWords ?? undefined,\n };\n}\n\nexport function analyzeCharChunk(\n chunk: LocaleChunk,\n collectNonWords?: boolean,\n includeWhitespace?: boolean,\n): CharAnalysis & { wordChars: number; nonWordChars: number } {\n const segmenter = getSegmenter(chunk.locale);\n const nonWords: NonWordCollection | null = collectNonWords\n ? createNonWordCollection()\n : null;\n let chars = 0;\n let wordChars = 0;\n let nonWordChars = 0;\n\n for (const part of segmenter.segment(chunk.text)) {\n if (part.isWordLike) {\n const count = countCharsForLocale(part.segment, chunk.locale);\n chars += count;\n wordChars += count;\n continue;\n }\n\n if (collectNonWords && nonWords) {\n let whitespaceCount = 0;\n if (includeWhitespace) {\n whitespaceCount = addWhitespace(nonWords, part.segment);\n }\n const category = classifyNonWordSegment(part.segment);\n if (category) {\n addNonWord(nonWords, category, part.segment);\n }\n if (category || whitespaceCount > 0) {\n const count = countCharsForLocale(part.segment, chunk.locale);\n chars += count;\n nonWordChars += count;\n }\n }\n }\n\n return {\n locale: chunk.locale,\n text: chunk.text,\n chars,\n wordChars,\n nonWordChars,\n nonWords: nonWords ?? undefined,\n };\n}\n\nexport function aggregateByLocale(\n chunks: ChunkAnalysis[]\n): CollectorBreakdown[] {\n const order: string[] = [];\n const map = new Map<string, CollectorBreakdown>();\n\n for (const chunk of chunks) {\n const existing = map.get(chunk.locale);\n if (existing) {\n existing.words += chunk.words;\n existing.segments.push(...chunk.segments);\n continue;\n }\n\n order.push(chunk.locale);\n map.set(chunk.locale, {\n locale: chunk.locale,\n words: chunk.words,\n segments: [...chunk.segments],\n });\n }\n\n return order.map((locale) => map.get(locale)!);\n}\n","import type { WordCounterMode } from \"./types\";\n\nconst MODE_ALIASES: Record<string, WordCounterMode> = {\n chunk: \"chunk\",\n chunks: \"chunk\",\n segments: \"segments\",\n segment: \"segments\",\n seg: \"segments\",\n collector: \"collector\",\n collect: \"collector\",\n colle: \"collector\",\n char: \"char\",\n chars: \"char\",\n character: \"char\",\n characters: \"char\",\n};\n\nexport function normalizeMode(input?: string | null): WordCounterMode | null {\n if (!input) {\n return null;\n }\n const normalized = input.trim().toLowerCase();\n return MODE_ALIASES[normalized] ?? null;\n}\n\nexport function resolveMode(\n input?: string | null,\n fallback: WordCounterMode = \"chunk\",\n): WordCounterMode {\n return normalizeMode(input) ?? fallback;\n}\n","export const DEFAULT_LOCALE = \"und-Latn\";\n\nexport interface LocaleDetectOptions {\n latinLocaleHint?: string;\n}\n\nconst regex = {\n hiragana: /\\p{Script=Hiragana}/u,\n katakana: /\\p{Script=Katakana}/u,\n hangul: /\\p{Script=Hangul}/u,\n han: /\\p{Script=Han}/u,\n latin: /\\p{Script=Latin}/u,\n arabic: /\\p{Script=Arabic}/u,\n cyrillic: /\\p{Script=Cyrillic}/u,\n devanagari: /\\p{Script=Devanagari}/u,\n thai: /\\p{Script=Thai}/u,\n};\n\nconst latinLocaleHints: Array<{ locale: string; regex: RegExp }> = [\n { locale: \"de\", regex: /[Γ€ΓΆΓΌΓ„Γ–ΓœΓŸ]/ },\n { locale: \"es\", regex: /[ñÑ¿‘]/ },\n { locale: \"pt\", regex: /[ãáÃÕ]/ },\n { locale: \"fr\", regex: /[œŒæÆ]/ },\n];\n\nconst latinLocales = new Set<string>([\n DEFAULT_LOCALE,\n ...latinLocaleHints.map((hint) => hint.locale),\n]);\n\nexport function isLatinLocale(locale: string): boolean {\n return latinLocales.has(locale);\n}\n\nfunction detectLatinLocale(char: string): string {\n for (const hint of latinLocaleHints) {\n if (hint.regex.test(char)) {\n return hint.locale;\n }\n }\n return DEFAULT_LOCALE;\n}\n\nexport function detectLocaleForChar(\n char: string,\n previousLocale?: string | null,\n options: LocaleDetectOptions = {}\n): string | null {\n if (regex.hiragana.test(char) || regex.katakana.test(char)) {\n return \"ja\";\n }\n if (regex.hangul.test(char)) {\n return \"ko\";\n }\n if (regex.arabic.test(char)) {\n return \"ar\";\n }\n if (regex.cyrillic.test(char)) {\n return \"ru\";\n }\n if (regex.devanagari.test(char)) {\n return \"hi\";\n }\n if (regex.thai.test(char)) {\n return \"th\";\n }\n\n if (regex.han.test(char)) {\n if (previousLocale && previousLocale.startsWith(\"ja\")) {\n return previousLocale;\n }\n return \"zh-Hans\";\n }\n\n if (regex.latin.test(char)) {\n const hintedLocale = detectLatinLocale(char);\n if (hintedLocale !== DEFAULT_LOCALE) {\n return hintedLocale;\n }\n if (previousLocale && isLatinLocale(previousLocale) && previousLocale !== DEFAULT_LOCALE) {\n return previousLocale;\n }\n if (options.latinLocaleHint) {\n return options.latinLocaleHint;\n }\n return DEFAULT_LOCALE;\n }\n\n return null;\n}\n","import {\n DEFAULT_LOCALE,\n detectLocaleForChar,\n isLatinLocale,\n type LocaleDetectOptions,\n} from \"./locale-detect\";\nimport type { LocaleChunk } from \"./types\";\n\nexport function segmentTextByLocale(\n text: string,\n options: LocaleDetectOptions = {}\n): LocaleChunk[] {\n const chunks: LocaleChunk[] = [];\n // Keep currentLocale as a non-null string to simplify type-narrowing.\n let currentLocale: string = DEFAULT_LOCALE;\n let buffer = \"\";\n let bufferHasScript = false;\n\n for (const char of text) {\n const detected = detectLocaleForChar(char, currentLocale, options);\n const targetLocale = detected ?? currentLocale;\n\n // If buffer is empty, this is the first character for a new chunk.\n if (buffer === \"\") {\n currentLocale = targetLocale;\n buffer = char;\n bufferHasScript = detected !== null;\n continue;\n }\n\n if (detected !== null && !bufferHasScript) {\n currentLocale = targetLocale;\n buffer += char;\n bufferHasScript = true;\n continue;\n }\n\n if (targetLocale !== currentLocale && detected !== null) {\n if (currentLocale === DEFAULT_LOCALE && isLatinLocale(targetLocale)) {\n currentLocale = targetLocale;\n buffer += char;\n bufferHasScript = true;\n continue;\n }\n // currentLocale is guaranteed to be a string here.\n chunks.push({ locale: currentLocale, text: buffer });\n currentLocale = targetLocale;\n buffer = char;\n bufferHasScript = true;\n continue;\n }\n\n buffer += char;\n if (detected !== null) {\n bufferHasScript = true;\n }\n }\n\n if (buffer.length > 0) {\n chunks.push({ locale: currentLocale, text: buffer });\n }\n\n return mergeAdjacentChunks(chunks);\n}\n\nfunction mergeAdjacentChunks(chunks: LocaleChunk[]): LocaleChunk[] {\n if (chunks.length === 0) {\n return chunks;\n }\n\n const merged: LocaleChunk[] = [];\n // We already returned for empty arrays above, so the first element is present.\n let last = chunks[0]!;\n\n for (let i = 1; i < chunks.length; i++) {\n const chunk = chunks[i]!;\n if (chunk.locale === last.locale) {\n last = { locale: last.locale, text: last.text + chunk.text };\n } else {\n merged.push(last);\n last = chunk;\n }\n }\n\n merged.push(last);\n return merged;\n}\n","import { analyzeCharChunk, analyzeChunk, aggregateByLocale } from \"./analyze\";\nimport { resolveMode } from \"./mode\";\nimport { segmentTextByLocale } from \"./segment\";\nimport { countCharsForLocale, countWordsForLocale } from \"./segmenter\";\nimport { createNonWordCollection, mergeNonWordCollections } from \"./non-words\";\nimport type {\n CharBreakdown,\n ChunkBreakdown,\n ChunkWithSegments,\n NonWordCollection,\n WordCounterMode,\n WordCounterOptions,\n WordCounterResult,\n} from \"./types\";\n\nexport type {\n NonWordCollection,\n WordCounterMode,\n WordCounterOptions,\n WordCounterResult,\n WordCounterBreakdown,\n} from \"./types\";\n\nexport { countCharsForLocale, countWordsForLocale, segmentTextByLocale };\n\nexport function wordCounter(\n text: string,\n options: WordCounterOptions = {}\n): WordCounterResult {\n const mode: WordCounterMode = resolveMode(options.mode, \"chunk\");\n const collectNonWords = Boolean(options.nonWords);\n const includeWhitespace = Boolean(options.includeWhitespace);\n const chunks = segmentTextByLocale(text, { latinLocaleHint: options.latinLocaleHint });\n\n if (mode === \"char\") {\n const analyzed = chunks.map((chunk) =>\n analyzeCharChunk(chunk, collectNonWords, includeWhitespace),\n );\n const total = analyzed.reduce((sum, chunk) => sum + chunk.chars, 0);\n const items: CharBreakdown[] = analyzed.map((chunk) => ({\n locale: chunk.locale,\n text: chunk.text,\n chars: chunk.chars,\n nonWords: chunk.nonWords,\n }));\n const counts = collectNonWords\n ? {\n words: analyzed.reduce((sum, chunk) => sum + chunk.wordChars, 0),\n nonWords: analyzed.reduce((sum, chunk) => sum + chunk.nonWordChars, 0),\n total,\n }\n : undefined;\n return {\n total,\n counts,\n breakdown: {\n mode,\n items,\n },\n };\n }\n\n const analyzed = chunks.map((chunk) =>\n analyzeChunk(chunk, collectNonWords, includeWhitespace),\n );\n const wordsTotal = analyzed.reduce((sum, chunk) => sum + chunk.words, 0);\n const nonWordsTotal = collectNonWords\n ? analyzed.reduce((sum, chunk) => {\n if (!chunk.nonWords) {\n return sum;\n }\n return sum + getNonWordTotal(chunk.nonWords);\n }, 0)\n : 0;\n const total = analyzed.reduce((sum, chunk) => {\n let chunkTotal = chunk.words;\n if (collectNonWords && chunk.nonWords) {\n chunkTotal += getNonWordTotal(chunk.nonWords);\n }\n return sum + chunkTotal;\n }, 0);\n\n const counts = collectNonWords ? { words: wordsTotal, nonWords: nonWordsTotal, total } : undefined;\n\n if (mode === \"segments\") {\n const items: ChunkWithSegments[] = analyzed.map((chunk) => ({\n locale: chunk.locale,\n text: chunk.text,\n words: chunk.words,\n segments: chunk.segments,\n nonWords: chunk.nonWords,\n }));\n return {\n total,\n counts,\n breakdown: {\n mode,\n items,\n },\n };\n }\n\n if (mode === \"collector\") {\n const items = aggregateByLocale(analyzed);\n const nonWords = collectNonWordsAggregate(analyzed, collectNonWords);\n return {\n total,\n counts,\n breakdown: {\n mode,\n items,\n nonWords,\n },\n };\n }\n\n const items: ChunkBreakdown[] = analyzed.map((chunk) => ({\n locale: chunk.locale,\n text: chunk.text,\n words: chunk.words,\n nonWords: chunk.nonWords,\n }));\n\n return {\n total,\n counts,\n breakdown: {\n mode,\n items,\n },\n };\n}\n\nfunction getNonWordTotal(nonWords: NonWordCollection): number {\n return (\n nonWords.counts.emoji +\n nonWords.counts.symbols +\n nonWords.counts.punctuation +\n (nonWords.counts.whitespace ?? 0)\n );\n}\n\n\nfunction collectNonWordsAggregate(\n analyzed: Array<{ nonWords?: NonWordCollection }>,\n enabled: boolean,\n): NonWordCollection | undefined {\n if (!enabled) {\n return undefined;\n }\n const collection = createNonWordCollection();\n for (const chunk of analyzed) {\n if (!chunk.nonWords) {\n continue;\n }\n mergeNonWordCollections(collection, chunk.nonWords);\n }\n return collection;\n}\n","import { wordCounter } from \"./wc\";\n\nexport default wordCounter;\nexport { countCharsForLocale, countWordsForLocale, segmentTextByLocale } from \"./wc\";\nexport type {\n NonWordCollection,\n WordCounterMode,\n WordCounterOptions,\n WordCounterResult,\n WordCounterBreakdown,\n} from \"./wc\";\n","export function ensureArrayContainer(\n result: Record<string, unknown>,\n key: string,\n): Record<string, unknown>[] {\n const existing = result[key];\n if (Array.isArray(existing)) {\n return existing as Record<string, unknown>[];\n }\n const list: Record<string, unknown>[] = [];\n result[key] = list;\n return list;\n}\n\nexport function flattenArrayTables(result: Record<string, unknown>): void {\n for (const [key, value] of Object.entries(result)) {\n if (!Array.isArray(value)) {\n continue;\n }\n const flattened = value\n .map((entry) =>\n Object.entries(entry)\n .map(([entryKey, entryValue]) => `${entryKey}=${entryValue}`)\n .join(\", \"),\n )\n .join(\" | \");\n result[key] = flattened;\n }\n}\n","function stripKeyQuotes(key: string): string {\n const trimmed = key.trim();\n if (\n (trimmed.startsWith(\"\\\"\") && trimmed.endsWith(\"\\\"\")) ||\n (trimmed.startsWith(\"'\") && trimmed.endsWith(\"'\"))\n ) {\n return trimmed.slice(1, -1);\n }\n return trimmed;\n}\n\nexport function normalizeKeyPath(key: string): string | null {\n const trimmed = key.trim();\n if (!trimmed) {\n return null;\n }\n\n if (\n (trimmed.startsWith(\"\\\"\") && trimmed.endsWith(\"\\\"\")) ||\n (trimmed.startsWith(\"'\") && trimmed.endsWith(\"'\"))\n ) {\n const unquoted = stripKeyQuotes(trimmed);\n return unquoted ? unquoted : null;\n }\n\n const segments = trimmed.split(\".\").map((segment) => segment.trim());\n if (segments.some((segment) => !segment)) {\n return null;\n }\n return segments.join(\".\");\n}\n","export function stripInlineComment(line: string): string {\n let inString: \"single\" | \"double\" | null = null;\n let escaped = false;\n\n for (let i = 0; i < line.length; i += 1) {\n const char = line[i] ?? \"\";\n\n if (inString) {\n if (escaped) {\n escaped = false;\n continue;\n }\n\n if (char === \"\\\\\" && inString === \"double\") {\n escaped = true;\n continue;\n }\n\n if (inString === \"double\" && char === \"\\\"\") {\n inString = null;\n continue;\n }\n\n if (inString === \"single\" && char === \"'\") {\n inString = null;\n continue;\n }\n\n continue;\n }\n\n if (char === \"\\\"\") {\n inString = \"double\";\n continue;\n }\n\n if (char === \"'\") {\n inString = \"single\";\n continue;\n }\n\n if (char === \"#\") {\n return line.slice(0, i).trimEnd();\n }\n }\n\n return line;\n}\n\nfunction unescapeBasic(input: string): string {\n return input\n .replace(/\\\\\\\\/g, \"\\\\\")\n .replace(/\\\\\"/g, \"\\\"\")\n .replace(/\\\\n/g, \"\\n\")\n .replace(/\\\\t/g, \"\\t\")\n .replace(/\\\\r/g, \"\\r\");\n}\n\nexport function parseStringLiteral(value: string): string | null {\n if (value.startsWith('\"\"\"') && value.endsWith('\"\"\"')) {\n const inner = value.slice(3, -3);\n return unescapeBasic(inner);\n }\n\n if (value.startsWith(\"'''\") && value.endsWith(\"'''\")) {\n return value.slice(3, -3);\n }\n\n if (value.startsWith(\"\\\"\") && value.endsWith(\"\\\"\")) {\n return unescapeBasic(value.slice(1, -1));\n }\n\n if (value.startsWith(\"'\") && value.endsWith(\"'\")) {\n return value.slice(1, -1);\n }\n\n return null;\n}\n","import { normalizeKeyPath } from \"./keys\";\nimport { parseStringLiteral } from \"./strings\";\nimport type { TomlValue } from \"./types\";\n\nfunction parsePrimitive(raw: string): string | number | boolean | null {\n const value = raw.trim();\n if (!value) {\n return null;\n }\n\n const stringLiteral = parseStringLiteral(value);\n if (stringLiteral !== null) {\n return stringLiteral;\n }\n\n if (value === \"true\") {\n return true;\n }\n\n if (value === \"false\") {\n return false;\n }\n\n if (/^[+-]?\\d+(?:\\.\\d+)?(?:[eE][+-]?\\d+)?$/.test(value)) {\n return Number(value);\n }\n\n if (/^\\d{4}-\\d{2}-\\d{2}/.test(value)) {\n return value;\n }\n\n return value;\n}\n\nfunction parseArray(raw: string): Array<string | number | boolean> | null {\n const value = raw.trim();\n if (!value.startsWith(\"[\") || !value.endsWith(\"]\")) {\n return null;\n }\n\n const inner = value.slice(1, -1).trim();\n if (!inner) {\n return [];\n }\n\n const items: Array<string | number | boolean> = [];\n let current = \"\";\n let inString: \"single\" | \"double\" | null = null;\n let escaped = false;\n\n for (let i = 0; i < inner.length; i += 1) {\n const char = inner[i] ?? \"\";\n\n if (inString) {\n current += char;\n if (escaped) {\n escaped = false;\n continue;\n }\n\n if (char === \"\\\\\" && inString === \"double\") {\n escaped = true;\n continue;\n }\n\n if (inString === \"double\" && char === \"\\\"\") {\n inString = null;\n } else if (inString === \"single\" && char === \"'\") {\n inString = null;\n }\n continue;\n }\n\n if (char === \"\\\"\") {\n inString = \"double\";\n current += char;\n continue;\n }\n\n if (char === \"'\") {\n inString = \"single\";\n current += char;\n continue;\n }\n\n if (char === \",\") {\n const item = parsePrimitive(current);\n if (item === null) {\n return null;\n }\n items.push(item);\n current = \"\";\n continue;\n }\n\n current += char;\n }\n\n const finalItem = parsePrimitive(current);\n if (finalItem === null) {\n return null;\n }\n items.push(finalItem);\n\n return items;\n}\n\nfunction parseInlineTable(raw: string): Record<string, TomlValue> | null {\n const trimmed = raw.trim();\n if (!trimmed.startsWith(\"{\") || !trimmed.endsWith(\"}\")) {\n return null;\n }\n\n const inner = trimmed.slice(1, -1).trim();\n if (!inner) {\n return {};\n }\n\n const pairs: string[] = [];\n let current = \"\";\n let inString: \"single\" | \"double\" | null = null;\n let escaped = false;\n let bracketDepth = 0;\n let braceDepth = 0;\n\n for (let i = 0; i < inner.length; i += 1) {\n const char = inner[i] ?? \"\";\n\n if (inString) {\n current += char;\n if (escaped) {\n escaped = false;\n continue;\n }\n\n if (char === \"\\\\\" && inString === \"double\") {\n escaped = true;\n continue;\n }\n\n if (inString === \"double\" && char === \"\\\"\") {\n inString = null;\n } else if (inString === \"single\" && char === \"'\") {\n inString = null;\n }\n continue;\n }\n\n if (char === \"\\\"\") {\n inString = \"double\";\n current += char;\n continue;\n }\n\n if (char === \"'\") {\n inString = \"single\";\n current += char;\n continue;\n }\n\n if (char === \"[\") {\n bracketDepth += 1;\n current += char;\n continue;\n }\n\n if (char === \"]\") {\n if (bracketDepth > 0) {\n bracketDepth -= 1;\n }\n current += char;\n continue;\n }\n\n if (char === \"{\") {\n braceDepth += 1;\n current += char;\n continue;\n }\n\n if (char === \"}\") {\n if (braceDepth > 0) {\n braceDepth -= 1;\n }\n current += char;\n continue;\n }\n\n if (char === \",\" && bracketDepth === 0 && braceDepth === 0) {\n pairs.push(current);\n current = \"\";\n continue;\n }\n\n current += char;\n }\n\n if (current.trim()) {\n pairs.push(current);\n }\n\n const output: Record<string, TomlValue> = {};\n for (const pair of pairs) {\n const separatorIndex = pair.indexOf(\"=\");\n if (separatorIndex === -1) {\n return null;\n }\n const key = normalizeKeyPath(pair.slice(0, separatorIndex));\n if (!key) {\n return null;\n }\n const valueRaw = pair.slice(separatorIndex + 1).trim();\n if (!valueRaw) {\n return null;\n }\n if (valueRaw.startsWith(\"{\")) {\n return null;\n }\n const normalized = normalizeValue(valueRaw);\n if (normalized === null) {\n return null;\n }\n if (typeof normalized === \"object\" && !Array.isArray(normalized)) {\n return null;\n }\n output[key] = normalized;\n }\n\n return output;\n}\n\nexport function normalizeValue(value: string): TomlValue | null {\n if (!value) {\n return null;\n }\n\n const trimmed = value.trim();\n if (trimmed.startsWith(\"{\") && trimmed.endsWith(\"}\")) {\n return parseInlineTable(trimmed);\n }\n\n const array = parseArray(trimmed);\n if (array) {\n return array;\n }\n\n if (trimmed.startsWith(\"[\") && trimmed.endsWith(\"]\")) {\n return null;\n }\n\n return parsePrimitive(trimmed);\n}\n\nexport function toPlainText(value: unknown): string {\n if (value == null) {\n return \"\";\n }\n if (Array.isArray(value)) {\n return value.map((item) => String(item)).join(\", \");\n }\n return String(value);\n}\n","import { ensureArrayContainer, flattenArrayTables } from \"./arrays\";\nimport { normalizeKeyPath } from \"./keys\";\nimport { stripInlineComment } from \"./strings\";\nimport { normalizeValue, toPlainText } from \"./values\";\n\nexport function parseTomlFrontmatter(frontmatter: string): Record<string, unknown> | null {\n const result: Record<string, unknown> = {};\n const lines = frontmatter.split(\"\\n\");\n let tablePrefix = \"\";\n let tableTarget: Record<string, unknown> | null = null;\n let tablePrefixInList = false;\n\n for (let index = 0; index < lines.length; index += 1) {\n const rawLine = lines[index] ?? \"\";\n const trimmedLine = rawLine.trim();\n if (!trimmedLine || trimmedLine.startsWith(\"#\")) {\n continue;\n }\n\n if (trimmedLine.startsWith(\"[[\")) {\n const match = trimmedLine.match(/^\\[\\[([^\\]]+)]]$/);\n if (!match) {\n return null;\n }\n const normalizedTable = normalizeKeyPath(match[1] ?? \"\");\n if (!normalizedTable) {\n return null;\n }\n const list = ensureArrayContainer(result, normalizedTable);\n const newEntry: Record<string, unknown> = {};\n list.push(newEntry);\n tableTarget = newEntry;\n tablePrefix = normalizedTable;\n tablePrefixInList = true;\n continue;\n }\n\n const tableMatch = trimmedLine.match(/^\\[([^\\]]+)]$/);\n if (tableMatch) {\n const normalizedTable = normalizeKeyPath(tableMatch[1] ?? \"\");\n if (!normalizedTable) {\n return null;\n }\n tablePrefix = normalizedTable;\n tablePrefixInList = false;\n tableTarget = null;\n continue;\n }\n\n const lineForParsing = /(\"\"\"|''')/.test(rawLine) ? rawLine : stripInlineComment(rawLine);\n const separatorIndex = lineForParsing.indexOf(\"=\");\n if (separatorIndex === -1) {\n return null;\n }\n\n const keyRaw = lineForParsing.slice(0, separatorIndex);\n const key = normalizeKeyPath(keyRaw);\n let valueRaw = lineForParsing.slice(separatorIndex + 1).trim();\n if (!key) {\n return null;\n }\n\n const tripleDelimiter = valueRaw.startsWith('\"\"\"') ? '\"\"\"' : valueRaw.startsWith(\"'''\") ? \"'''\" : null;\n if (tripleDelimiter) {\n const closingIndex = valueRaw.indexOf(tripleDelimiter, tripleDelimiter.length);\n if (closingIndex !== -1) {\n const after = valueRaw.slice(closingIndex + tripleDelimiter.length);\n const strippedAfter = stripInlineComment(after);\n valueRaw = `${valueRaw.slice(0, closingIndex + tripleDelimiter.length)}${strippedAfter}`;\n } else {\n const delimiter = tripleDelimiter;\n let combined = valueRaw;\n let closed = false;\n while (index + 1 < lines.length) {\n index += 1;\n const nextLine = lines[index] ?? \"\";\n combined += `\\n${nextLine}`;\n if (new RegExp(`${delimiter}\\\\s*$`).test(nextLine)) {\n closed = true;\n break;\n }\n }\n if (!closed) {\n return null;\n }\n valueRaw = combined;\n }\n }\n\n const normalized = normalizeValue(valueRaw);\n if (normalized === null) {\n return null;\n }\n\n const fullKey = tablePrefix ? `${tablePrefix}.${key}` : key;\n if (typeof normalized === \"object\" && !Array.isArray(normalized)) {\n for (const [inlineKey, inlineValue] of Object.entries(normalized)) {\n const entryKey = tablePrefixInList ? `${key}.${inlineKey}` : `${fullKey}.${inlineKey}`;\n if (tablePrefixInList && tableTarget) {\n tableTarget[entryKey] = toPlainText(inlineValue);\n } else {\n result[entryKey] = toPlainText(inlineValue);\n }\n }\n continue;\n }\n\n if (tablePrefixInList && tableTarget) {\n tableTarget[key] = toPlainText(normalized);\n continue;\n }\n\n result[fullKey] = toPlainText(normalized);\n }\n\n flattenArrayTables(result);\n\n return result;\n}\n","import { parseDocument } from \"yaml\";\nimport { parseTomlFrontmatter } from \"./toml-simple\";\nimport type { FrontmatterType, ParsedMarkdown } from \"./types\";\n\nconst FENCE_TO_TYPE: Record<string, FrontmatterType> = {\n \"---\": \"yaml\",\n \"+++\": \"toml\",\n \";;;\": \"json\",\n};\n\nfunction normalizeNewlines(input: string): string {\n return input.replace(/\\r\\n/g, \"\\n\");\n}\n\nfunction stripBom(line: string): string {\n return line.startsWith(\"\\uFEFF\") ? line.slice(1) : line;\n}\n\nfunction getFenceType(line: string): FrontmatterType | null {\n const match = line.match(/^[\\t ]*(---|\\+\\+\\+|;;;)[\\t ]*$/);\n if (!match) {\n return null;\n }\n return FENCE_TO_TYPE[match[1] ?? \"\"] ?? null;\n}\n\nfunction parseFrontmatter(frontmatter: string, type: FrontmatterType | null): Record<string, unknown> | null {\n if (!type) {\n return null;\n }\n\n if (type === \"json\") {\n try {\n return JSON.parse(frontmatter) as Record<string, unknown>;\n } catch {\n return null;\n }\n }\n\n if (type === \"yaml\") {\n const doc = parseDocument(frontmatter, { prettyErrors: false });\n if (doc.errors.length > 0) {\n return null;\n }\n const data = doc.toJSON();\n if (!data || typeof data !== \"object\" || Array.isArray(data)) {\n return null;\n }\n return data as Record<string, unknown>;\n }\n\n if (type === \"toml\") {\n return parseTomlFrontmatter(frontmatter);\n }\n\n return null;\n}\n\nfunction extractJsonBlock(text: string, startIndex: number): { jsonText: string; endIndex: number } | null {\n let depth = 0;\n let inString = false;\n let escaped = false;\n\n for (let i = startIndex; i < text.length; i += 1) {\n const char = text[i] ?? \"\";\n\n if (inString) {\n if (escaped) {\n escaped = false;\n continue;\n }\n\n if (char === \"\\\\\") {\n escaped = true;\n continue;\n }\n\n if (char === \"\\\"\") {\n inString = false;\n }\n\n continue;\n }\n\n if (char === \"\\\"\") {\n inString = true;\n continue;\n }\n\n if (char === \"{\") {\n depth += 1;\n continue;\n }\n\n if (char === \"}\") {\n depth -= 1;\n if (depth === 0) {\n const jsonText = text.slice(startIndex, i + 1);\n return { jsonText, endIndex: i };\n }\n }\n }\n\n return null;\n}\n\nexport function parseMarkdown(input: string): ParsedMarkdown {\n const normalized = normalizeNewlines(input);\n const lines = normalized.split(\"\\n\");\n if (lines.length === 0) {\n return { frontmatter: null, content: normalized, data: null, frontmatterType: null };\n }\n\n lines[0] = stripBom(lines[0] ?? \"\");\n const normalizedWithoutBom = lines.join(\"\\n\");\n\n const openingType = getFenceType(lines[0] ?? \"\");\n if (!openingType) {\n const leadingWhitespace = normalizedWithoutBom.match(/^[\\t \\n]*/)?.[0] ?? \"\";\n const jsonStart = leadingWhitespace.length;\n if (normalizedWithoutBom[jsonStart] !== \"{\") {\n return { frontmatter: null, content: normalizedWithoutBom, data: null, frontmatterType: null };\n }\n\n const jsonBlock = extractJsonBlock(normalizedWithoutBom, jsonStart);\n if (!jsonBlock) {\n return { frontmatter: null, content: normalizedWithoutBom, data: null, frontmatterType: null };\n }\n\n const frontmatter = jsonBlock.jsonText;\n let content = normalizedWithoutBom.slice(jsonBlock.endIndex + 1);\n if (content.startsWith(\"\\n\")) {\n content = content.slice(1);\n }\n const data = parseFrontmatter(frontmatter, \"json\");\n if (!data) {\n return { frontmatter: null, content: normalizedWithoutBom, data: null, frontmatterType: null };\n }\n\n return {\n frontmatter,\n content,\n data,\n frontmatterType: \"json\",\n };\n }\n\n let closingIndex = -1;\n for (let i = 1; i < lines.length; i += 1) {\n if (getFenceType(lines[i] ?? \"\") === openingType) {\n closingIndex = i;\n break;\n }\n }\n\n if (closingIndex === -1) {\n return { frontmatter: null, content: normalizedWithoutBom, data: null, frontmatterType: null };\n }\n\n const frontmatter = lines.slice(1, closingIndex).join(\"\\n\");\n const content = lines.slice(closingIndex + 1).join(\"\\n\");\n const data = parseFrontmatter(frontmatter, openingType);\n\n return {\n frontmatter,\n content,\n data,\n frontmatterType: openingType,\n };\n}\n","import type { WordCounterMode, WordCounterOptions, WordCounterResult } from \"../wc/types\";\nimport wordCounter from \"../wc\";\nimport { parseMarkdown } from \"./parse-markdown\";\nimport type { SectionMode, SectionedResult } from \"./types\";\n\nfunction normalizeText(value: unknown): string {\n if (value == null) {\n return \"\";\n }\n if (typeof value === \"string\") {\n return value;\n }\n if (typeof value === \"number\" || typeof value === \"boolean\") {\n return String(value);\n }\n try {\n return JSON.stringify(value);\n } catch {\n return String(value);\n }\n}\n\nfunction buildPerKeyItems(\n data: Record<string, unknown> | null,\n mode: WordCounterMode,\n options: WordCounterOptions,\n): Array<{ name: string; source: \"frontmatter\"; result: WordCounterResult }> {\n if (!data || typeof data !== \"object\" || Array.isArray(data)) {\n return [];\n }\n\n return Object.entries(data).map(([key, value]) => {\n const valueText = normalizeText(value);\n const text = valueText ? `${key}: ${valueText}` : key;\n return {\n name: key,\n source: \"frontmatter\",\n result: wordCounter(text, options),\n };\n });\n}\n\nfunction buildSingleItem(\n name: string,\n text: string,\n mode: WordCounterMode,\n options: WordCounterOptions,\n source: \"frontmatter\" | \"content\",\n) {\n return [{ name, source, result: wordCounter(text, options) }];\n}\n\nfunction sumTotals(items: Array<{ result: WordCounterResult }>): number {\n return items.reduce((sum, item) => sum + item.result.total, 0);\n}\n\nexport function countSections(\n input: string,\n section: SectionMode,\n options: WordCounterOptions = {},\n): SectionedResult {\n const mode: WordCounterMode = options.mode ?? \"chunk\";\n if (section === \"all\") {\n const result = wordCounter(input, options);\n return {\n section,\n total: result.total,\n frontmatterType: null,\n items: [{ name: \"all\", source: \"content\", result }],\n };\n }\n\n const parsed = parseMarkdown(input);\n const frontmatterText = parsed.frontmatter ?? \"\";\n const contentText = parsed.content ?? \"\";\n\n let items: Array<{ name: string; source: \"frontmatter\" | \"content\"; result: WordCounterResult }> = [];\n\n if (section === \"frontmatter\") {\n items = buildSingleItem(\"frontmatter\", frontmatterText, mode, options, \"frontmatter\");\n } else if (section === \"content\") {\n items = buildSingleItem(\"content\", contentText, mode, options, \"content\");\n } else if (section === \"split\") {\n items = [\n ...buildSingleItem(\"frontmatter\", frontmatterText, mode, options, \"frontmatter\"),\n ...buildSingleItem(\"content\", contentText, mode, options, \"content\"),\n ];\n } else if (section === \"per-key\") {\n items = buildPerKeyItems(parsed.data, mode, options);\n } else if (section === \"split-per-key\") {\n items = [\n ...buildPerKeyItems(parsed.data, mode, options),\n ...buildSingleItem(\"content\", contentText, mode, options, \"content\"),\n ];\n }\n\n return {\n section,\n total: sumTotals(items),\n frontmatterType: parsed.frontmatterType,\n items,\n };\n}\n","export function showSingularOrPluralWord(count: number, word: string): string {\n return `${count} ${word}${count === 1 ? \"\" : \"s\"}`;\n}\n","import wordCounter, {\n countCharsForLocale,\n countWordsForLocale,\n segmentTextByLocale,\n} from \"./wc\";\nimport { parseMarkdown, countSections } from \"./markdown\";\nimport { showSingularOrPluralWord } from \"./utils\";\n\nconst cjsExports = Object.assign(wordCounter, {\n default: wordCounter,\n wordCounter,\n countCharsForLocale,\n countWordsForLocale,\n segmentTextByLocale,\n parseMarkdown,\n countSections,\n showSingularOrPluralWord,\n});\n\nexport = cjsExports;\n"],"mappings":";;;AAAA,MAAM,iCAAiB,IAAI,KAA6B;AACxD,MAAM,yCAAyB,IAAI,KAA6B;AAEhE,SAAgB,aAAa,QAAgC;CAC3D,MAAM,SAAS,eAAe,IAAI,OAAO;AACzC,KAAI,OACF,QAAO;CAET,MAAM,YAAY,IAAI,KAAK,UAAU,QAAQ,EAAE,aAAa,QAAQ,CAAC;AACrE,gBAAe,IAAI,QAAQ,UAAU;AACrC,QAAO;;AAGT,SAAgB,qBAAqB,QAAgC;CACnE,MAAM,SAAS,uBAAuB,IAAI,OAAO;AACjD,KAAI,OACF,QAAO;CAET,MAAM,YAAY,IAAI,KAAK,UAAU,QAAQ,EAAE,aAAa,YAAY,CAAC;AACzE,wBAAuB,IAAI,QAAQ,UAAU;AAC7C,QAAO;;AAGT,SAAS,oBAA6B;AACpC,QAAO,OAAO,SAAS,eAAe,OAAO,KAAK,cAAc;;AAGlE,SAAgB,oBAAoB,MAAc,QAAwB;CACxE,MAAM,YAAY,aAAa,OAAO;CACtC,IAAI,QAAQ;AACZ,MAAK,MAAM,WAAW,UAAU,QAAQ,KAAK,CAC3C,KAAI,QAAQ,WACV;AAGJ,QAAO;;AAGT,SAAgB,oBAAoB,MAAc,QAAwB;AACxE,KAAI,CAAC,mBAAmB,CACtB,QAAO,MAAM,KAAK,KAAK,CAAC;CAE1B,MAAM,YAAY,qBAAqB,OAAO;CAC9C,IAAI,QAAQ;AACZ,MAAK,MAAM,YAAY,UAAU,QAAQ,KAAK,CAC5C;AAEF,QAAO;;;;;AC7CT,MAAM,aAAa;AACnB,MAAM,yBAAyB;AAC/B,MAAM,mBAAmB;AACzB,MAAM,cAAc;AACpB,MAAM,mBAAmB;AACzB,MAAM,kBAAkB;AACxB,MAAM,eAAe,IAAI,IAAI;CAAC;CAAM;CAAM;CAAU;CAAS,CAAC;AAE9D,SAAgB,0BAA6C;AAC3D,QAAO;EACL,OAAO,EAAE;EACT,SAAS,EAAE;EACX,aAAa,EAAE;EACf,QAAQ;GACN,OAAO;GACP,SAAS;GACT,aAAa;GACd;EACF;;AAGH,SAAgB,WACd,YACA,UACA,SACM;AACN,KAAI,aAAa,SAAS;AACxB,aAAW,MAAM,KAAK,QAAQ;AAC9B,aAAW,OAAO,SAAS;AAC3B;;AAEF,KAAI,aAAa,UAAU;AACzB,aAAW,QAAQ,KAAK,QAAQ;AAChC,aAAW,OAAO,WAAW;AAC7B;;AAEF,YAAW,YAAY,KAAK,QAAQ;AACpC,YAAW,OAAO,eAAe;;AAGnC,SAAgB,cACd,YACA,SACQ;CACR,IAAI,aAAa,WAAW;CAC5B,IAAI,QAAQ;AACZ,MAAK,MAAM,QAAQ,SAAS;AAC1B,MAAI,SAAS,KAAK;AAChB,gBAAa,cAAc,wBAAwB;AACnD,cAAW,UAAU;AACrB,YAAS;AACT;;AAEF,MAAI,SAAS,KAAM;AACjB,gBAAa,cAAc,wBAAwB;AACnD,cAAW,QAAQ;AACnB,YAAS;AACT;;AAEF,MAAI,aAAa,IAAI,KAAK,EAAE;AAC1B,gBAAa,cAAc,wBAAwB;AACnD,cAAW,YAAY;AACvB,YAAS;AACT;;AAEF,MAAI,gBAAgB,KAAK,KAAK,EAAE;AAC9B,gBAAa,cAAc,wBAAwB;AACnD,cAAW,SAAS;AACpB,YAAS;;;AAIb,KAAI,QAAQ,GAAG;AACb,aAAW,aAAa,cAAc,wBAAwB;AAC9D,aAAW,OAAO,cAAc,WAAW,OAAO,cAAc,KAAK;;AAGvE,QAAO;;AAGT,SAAgB,uBACd,SAC2C;CAC3C,MAAM,4BAA4B,QAAQ,SAAS,IAAS;AAC5D,KACE,iBAAiB,KAAK,QAAQ,IAC9B,uBAAuB,KAAK,QAAQ,IACnC,6BAA6B,WAAW,KAAK,QAAQ,CAEtD,QAAO;AAET,KAAI,YAAY,KAAK,QAAQ,CAC3B,QAAO;AAET,KAAI,iBAAiB,KAAK,QAAQ,CAChC,QAAO;AAET,QAAO;;AAGT,SAAgB,wBACd,QACA,QACmB;AACnB,KAAI,OAAO,OAAO,QAAQ,GAAG;AAC3B,SAAO,MAAM,KAAK,GAAG,OAAO,MAAM;AAClC,SAAO,OAAO,SAAS,OAAO,OAAO;;AAEvC,KAAI,OAAO,OAAO,UAAU,GAAG;AAC7B,SAAO,QAAQ,KAAK,GAAG,OAAO,QAAQ;AACtC,SAAO,OAAO,WAAW,OAAO,OAAO;;AAEzC,KAAI,OAAO,OAAO,cAAc,GAAG;AACjC,SAAO,YAAY,KAAK,GAAG,OAAO,YAAY;AAC9C,SAAO,OAAO,eAAe,OAAO,OAAO;;AAE7C,KAAI,OAAO,OAAO,cAAc,OAAO,OAAO,aAAa,KAAK,OAAO,YAAY;EACjF,MAAM,aAAa,OAAO,cAAc,wBAAwB;AAChE,aAAW,UAAU,OAAO,WAAW;AACvC,aAAW,QAAQ,OAAO,WAAW;AACrC,aAAW,YAAY,OAAO,WAAW;AACzC,aAAW,SAAS,OAAO,WAAW;AACtC,SAAO,aAAa;AACpB,SAAO,OAAO,cAAc,OAAO,OAAO,cAAc,KAAK,OAAO,OAAO;;AAE7E,QAAO;;AAGT,SAAS,yBAA2C;AAClD,QAAO;EAAE,QAAQ;EAAG,MAAM;EAAG,UAAU;EAAG,OAAO;EAAG;;;;;ACnHtD,SAAgB,aACd,OACA,iBACA,mBACe;CACf,MAAM,YAAY,aAAa,MAAM,OAAO;CAC5C,MAAM,WAAqB,EAAE;CAC7B,MAAM,WAAqC,kBACvC,yBAAyB,GACzB;AACJ,MAAK,MAAM,QAAQ,UAAU,QAAQ,MAAM,KAAK,CAC9C,KAAI,KAAK,WACP,UAAS,KAAK,KAAK,QAAQ;UAClB,mBAAmB,UAAU;AACtC,MAAI,kBACF,eAAc,UAAU,KAAK,QAAQ;EAEvC,MAAM,WAAW,uBAAuB,KAAK,QAAQ;AACrD,MAAI,SACF,YAAW,UAAU,UAAU,KAAK,QAAQ;;AAIlD,QAAO;EACL,QAAQ,MAAM;EACd,MAAM,MAAM;EACZ;EACA,OAAO,SAAS;EAChB,UAAU,YAAY;EACvB;;AAGH,SAAgB,iBACd,OACA,iBACA,mBAC4D;CAC5D,MAAM,YAAY,aAAa,MAAM,OAAO;CAC5C,MAAM,WAAqC,kBACvC,yBAAyB,GACzB;CACJ,IAAI,QAAQ;CACZ,IAAI,YAAY;CAChB,IAAI,eAAe;AAEnB,MAAK,MAAM,QAAQ,UAAU,QAAQ,MAAM,KAAK,EAAE;AAChD,MAAI,KAAK,YAAY;GACnB,MAAM,QAAQ,oBAAoB,KAAK,SAAS,MAAM,OAAO;AAC7D,YAAS;AACT,gBAAa;AACb;;AAGF,MAAI,mBAAmB,UAAU;GAC/B,IAAI,kBAAkB;AACtB,OAAI,kBACF,mBAAkB,cAAc,UAAU,KAAK,QAAQ;GAEzD,MAAM,WAAW,uBAAuB,KAAK,QAAQ;AACrD,OAAI,SACF,YAAW,UAAU,UAAU,KAAK,QAAQ;AAE9C,OAAI,YAAY,kBAAkB,GAAG;IACnC,MAAM,QAAQ,oBAAoB,KAAK,SAAS,MAAM,OAAO;AAC7D,aAAS;AACT,oBAAgB;;;;AAKtB,QAAO;EACL,QAAQ,MAAM;EACd,MAAM,MAAM;EACZ;EACA;EACA;EACA,UAAU,YAAY;EACvB;;AAGH,SAAgB,kBACd,QACsB;CACtB,MAAM,QAAkB,EAAE;CAC1B,MAAM,sBAAM,IAAI,KAAiC;AAEjD,MAAK,MAAM,SAAS,QAAQ;EAC1B,MAAM,WAAW,IAAI,IAAI,MAAM,OAAO;AACtC,MAAI,UAAU;AACZ,YAAS,SAAS,MAAM;AACxB,YAAS,SAAS,KAAK,GAAG,MAAM,SAAS;AACzC;;AAGF,QAAM,KAAK,MAAM,OAAO;AACxB,MAAI,IAAI,MAAM,QAAQ;GACpB,QAAQ,MAAM;GACd,OAAO,MAAM;GACb,UAAU,CAAC,GAAG,MAAM,SAAS;GAC9B,CAAC;;AAGJ,QAAO,MAAM,KAAK,WAAW,IAAI,IAAI,OAAO,CAAE;;;;;ACpHhD,MAAM,eAAgD;CACpD,OAAO;CACP,QAAQ;CACR,UAAU;CACV,SAAS;CACT,KAAK;CACL,WAAW;CACX,SAAS;CACT,OAAO;CACP,MAAM;CACN,OAAO;CACP,WAAW;CACX,YAAY;CACb;AAED,SAAgB,cAAc,OAA+C;AAC3E,KAAI,CAAC,MACH,QAAO;AAGT,QAAO,aADY,MAAM,MAAM,CAAC,aAAa,KACV;;AAGrC,SAAgB,YACd,OACA,WAA4B,SACX;AACjB,QAAO,cAAc,MAAM,IAAI;;;;;AC7BjC,MAAa,iBAAiB;AAM9B,MAAM,QAAQ;CACZ,UAAU;CACV,UAAU;CACV,QAAQ;CACR,KAAK;CACL,OAAO;CACP,QAAQ;CACR,UAAU;CACV,YAAY;CACZ,MAAM;CACP;AAED,MAAM,mBAA6D;CACjE;EAAE,QAAQ;EAAM,OAAO;EAAa;CACpC;EAAE,QAAQ;EAAM,OAAO;EAAU;CACjC;EAAE,QAAQ;EAAM,OAAO;EAAU;CACjC;EAAE,QAAQ;EAAM,OAAO;EAAU;CAClC;AAED,MAAM,eAAe,IAAI,IAAY,CACnC,gBACA,GAAG,iBAAiB,KAAK,SAAS,KAAK,OAAO,CAC/C,CAAC;AAEF,SAAgB,cAAc,QAAyB;AACrD,QAAO,aAAa,IAAI,OAAO;;AAGjC,SAAS,kBAAkB,MAAsB;AAC/C,MAAK,MAAM,QAAQ,iBACjB,KAAI,KAAK,MAAM,KAAK,KAAK,CACvB,QAAO,KAAK;AAGhB,QAAO;;AAGT,SAAgB,oBACd,MACA,gBACA,UAA+B,EAAE,EAClB;AACf,KAAI,MAAM,SAAS,KAAK,KAAK,IAAI,MAAM,SAAS,KAAK,KAAK,CACxD,QAAO;AAET,KAAI,MAAM,OAAO,KAAK,KAAK,CACzB,QAAO;AAET,KAAI,MAAM,OAAO,KAAK,KAAK,CACzB,QAAO;AAET,KAAI,MAAM,SAAS,KAAK,KAAK,CAC3B,QAAO;AAET,KAAI,MAAM,WAAW,KAAK,KAAK,CAC7B,QAAO;AAET,KAAI,MAAM,KAAK,KAAK,KAAK,CACvB,QAAO;AAGT,KAAI,MAAM,IAAI,KAAK,KAAK,EAAE;AACxB,MAAI,kBAAkB,eAAe,WAAW,KAAK,CACnD,QAAO;AAET,SAAO;;AAGT,KAAI,MAAM,MAAM,KAAK,KAAK,EAAE;EAC1B,MAAM,eAAe,kBAAkB,KAAK;AAC5C,MAAI,iBAAiB,eACnB,QAAO;AAET,MAAI,kBAAkB,cAAc,eAAe,IAAI,mBAAmB,eACxE,QAAO;AAET,MAAI,QAAQ,gBACV,QAAO,QAAQ;AAEjB,SAAO;;AAGT,QAAO;;;;;AChFT,SAAgB,oBACd,MACA,UAA+B,EAAE,EAClB;CACf,MAAM,SAAwB,EAAE;CAEhC,IAAI,gBAAwB;CAC5B,IAAI,SAAS;CACb,IAAI,kBAAkB;AAEtB,MAAK,MAAM,QAAQ,MAAM;EACvB,MAAM,WAAW,oBAAoB,MAAM,eAAe,QAAQ;EAClE,MAAM,eAAe,YAAY;AAGjC,MAAI,WAAW,IAAI;AACjB,mBAAgB;AAChB,YAAS;AACT,qBAAkB,aAAa;AAC/B;;AAGF,MAAI,aAAa,QAAQ,CAAC,iBAAiB;AACzC,mBAAgB;AAChB,aAAU;AACV,qBAAkB;AAClB;;AAGF,MAAI,iBAAiB,iBAAiB,aAAa,MAAM;AACvD,OAAI,kBAAkB,kBAAkB,cAAc,aAAa,EAAE;AACnE,oBAAgB;AAChB,cAAU;AACV,sBAAkB;AAClB;;AAGF,UAAO,KAAK;IAAE,QAAQ;IAAe,MAAM;IAAQ,CAAC;AACpD,mBAAgB;AAChB,YAAS;AACT,qBAAkB;AAClB;;AAGF,YAAU;AACV,MAAI,aAAa,KACf,mBAAkB;;AAItB,KAAI,OAAO,SAAS,EAClB,QAAO,KAAK;EAAE,QAAQ;EAAe,MAAM;EAAQ,CAAC;AAGtD,QAAO,oBAAoB,OAAO;;AAGpC,SAAS,oBAAoB,QAAsC;AACjE,KAAI,OAAO,WAAW,EACpB,QAAO;CAGT,MAAM,SAAwB,EAAE;CAEhC,IAAI,OAAO,OAAO;AAElB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AACrB,MAAI,MAAM,WAAW,KAAK,OACxB,QAAO;GAAE,QAAQ,KAAK;GAAQ,MAAM,KAAK,OAAO,MAAM;GAAM;OACvD;AACL,UAAO,KAAK,KAAK;AACjB,UAAO;;;AAIX,QAAO,KAAK,KAAK;AACjB,QAAO;;;;;AC5DT,SAAgB,YACd,MACA,UAA8B,EAAE,EACb;CACnB,MAAM,OAAwB,YAAY,QAAQ,MAAM,QAAQ;CAChE,MAAM,kBAAkB,QAAQ,QAAQ,SAAS;CACjD,MAAM,oBAAoB,QAAQ,QAAQ,kBAAkB;CAC5D,MAAM,SAAS,oBAAoB,MAAM,EAAE,iBAAiB,QAAQ,iBAAiB,CAAC;AAEtF,KAAI,SAAS,QAAQ;EACnB,MAAMA,aAAW,OAAO,KAAK,UAC3B,iBAAiB,OAAO,iBAAiB,kBAAkB,CAC5D;EACD,MAAMC,UAAQD,WAAS,QAAQ,KAAK,UAAU,MAAM,MAAM,OAAO,EAAE;EACnE,MAAM,QAAyBA,WAAS,KAAK,WAAW;GACtD,QAAQ,MAAM;GACd,MAAM,MAAM;GACZ,OAAO,MAAM;GACb,UAAU,MAAM;GACjB,EAAE;AAQH,SAAO;GACL;GACA,QATa,kBACX;IACE,OAAOA,WAAS,QAAQ,KAAK,UAAU,MAAM,MAAM,WAAW,EAAE;IAChE,UAAUA,WAAS,QAAQ,KAAK,UAAU,MAAM,MAAM,cAAc,EAAE;IACtE;IACD,GACD;GAIF,WAAW;IACT;IACA;IACD;GACF;;CAGH,MAAM,WAAW,OAAO,KAAK,UAC3B,aAAa,OAAO,iBAAiB,kBAAkB,CACxD;CACD,MAAM,aAAa,SAAS,QAAQ,KAAK,UAAU,MAAM,MAAM,OAAO,EAAE;CACxE,MAAM,gBAAgB,kBAClB,SAAS,QAAQ,KAAK,UAAU;AAC9B,MAAI,CAAC,MAAM,SACT,QAAO;AAET,SAAO,MAAM,gBAAgB,MAAM,SAAS;IAC3C,EAAE,GACL;CACJ,MAAM,QAAQ,SAAS,QAAQ,KAAK,UAAU;EAC5C,IAAI,aAAa,MAAM;AACvB,MAAI,mBAAmB,MAAM,SAC3B,eAAc,gBAAgB,MAAM,SAAS;AAE/C,SAAO,MAAM;IACZ,EAAE;CAEL,MAAM,SAAS,kBAAkB;EAAE,OAAO;EAAY,UAAU;EAAe;EAAO,GAAG;AAEzF,KAAI,SAAS,WAQX,QAAO;EACL;EACA;EACA,WAAW;GACT;GACA,OAZ+B,SAAS,KAAK,WAAW;IAC1D,QAAQ,MAAM;IACd,MAAM,MAAM;IACZ,OAAO,MAAM;IACb,UAAU,MAAM;IAChB,UAAU,MAAM;IACjB,EAAE;GAOA;EACF;AAGH,KAAI,SAAS,YAGX,QAAO;EACL;EACA;EACA,WAAW;GACT;GACA,OAPU,kBAAkB,SAAS;GAQrC,UAPa,yBAAyB,UAAU,gBAAgB;GAQjE;EACF;AAUH,QAAO;EACL;EACA;EACA,WAAW;GACT;GACA,OAZ4B,SAAS,KAAK,WAAW;IACvD,QAAQ,MAAM;IACd,MAAM,MAAM;IACZ,OAAO,MAAM;IACb,UAAU,MAAM;IACjB,EAAE;GAQA;EACF;;AAGH,SAAS,gBAAgB,UAAqC;AAC5D,QACE,SAAS,OAAO,QAChB,SAAS,OAAO,UAChB,SAAS,OAAO,eACf,SAAS,OAAO,cAAc;;AAKnC,SAAS,yBACP,UACA,SAC+B;AAC/B,KAAI,CAAC,QACH;CAEF,MAAM,aAAa,yBAAyB;AAC5C,MAAK,MAAM,SAAS,UAAU;AAC5B,MAAI,CAAC,MAAM,SACT;AAEF,0BAAwB,YAAY,MAAM,SAAS;;AAErD,QAAO;;;;;AC3JT,iBAAe;;;;ACFf,SAAgB,qBACd,QACA,KAC2B;CAC3B,MAAM,WAAW,OAAO;AACxB,KAAI,MAAM,QAAQ,SAAS,CACzB,QAAO;CAET,MAAM,OAAkC,EAAE;AAC1C,QAAO,OAAO;AACd,QAAO;;AAGT,SAAgB,mBAAmB,QAAuC;AACxE,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO,EAAE;AACjD,MAAI,CAAC,MAAM,QAAQ,MAAM,CACvB;AASF,SAAO,OAPW,MACf,KAAK,UACJ,OAAO,QAAQ,MAAM,CAClB,KAAK,CAAC,UAAU,gBAAgB,GAAG,SAAS,GAAG,aAAa,CAC5D,KAAK,KAAK,CACd,CACA,KAAK,MAAM;;;;;;ACxBlB,SAAS,eAAe,KAAqB;CAC3C,MAAM,UAAU,IAAI,MAAM;AAC1B,KACG,QAAQ,WAAW,KAAK,IAAI,QAAQ,SAAS,KAAK,IAClD,QAAQ,WAAW,IAAI,IAAI,QAAQ,SAAS,IAAI,CAEjD,QAAO,QAAQ,MAAM,GAAG,GAAG;AAE7B,QAAO;;AAGT,SAAgB,iBAAiB,KAA4B;CAC3D,MAAM,UAAU,IAAI,MAAM;AAC1B,KAAI,CAAC,QACH,QAAO;AAGT,KACG,QAAQ,WAAW,KAAK,IAAI,QAAQ,SAAS,KAAK,IAClD,QAAQ,WAAW,IAAI,IAAI,QAAQ,SAAS,IAAI,EACjD;EACA,MAAM,WAAW,eAAe,QAAQ;AACxC,SAAO,WAAW,WAAW;;CAG/B,MAAM,WAAW,QAAQ,MAAM,IAAI,CAAC,KAAK,YAAY,QAAQ,MAAM,CAAC;AACpE,KAAI,SAAS,MAAM,YAAY,CAAC,QAAQ,CACtC,QAAO;AAET,QAAO,SAAS,KAAK,IAAI;;;;;AC7B3B,SAAgB,mBAAmB,MAAsB;CACvD,IAAI,WAAuC;CAC3C,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK,GAAG;EACvC,MAAM,OAAO,KAAK,MAAM;AAExB,MAAI,UAAU;AACZ,OAAI,SAAS;AACX,cAAU;AACV;;AAGF,OAAI,SAAS,QAAQ,aAAa,UAAU;AAC1C,cAAU;AACV;;AAGF,OAAI,aAAa,YAAY,SAAS,MAAM;AAC1C,eAAW;AACX;;AAGF,OAAI,aAAa,YAAY,SAAS,KAAK;AACzC,eAAW;AACX;;AAGF;;AAGF,MAAI,SAAS,MAAM;AACjB,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,cAAW;AACX;;AAGF,MAAI,SAAS,IACX,QAAO,KAAK,MAAM,GAAG,EAAE,CAAC,SAAS;;AAIrC,QAAO;;AAGT,SAAS,cAAc,OAAuB;AAC5C,QAAO,MACJ,QAAQ,SAAS,KAAK,CACtB,QAAQ,QAAQ,KAAK,CACrB,QAAQ,QAAQ,KAAK,CACrB,QAAQ,QAAQ,IAAK,CACrB,QAAQ,QAAQ,KAAK;;AAG1B,SAAgB,mBAAmB,OAA8B;AAC/D,KAAI,MAAM,WAAW,SAAM,IAAI,MAAM,SAAS,SAAM,CAElD,QAAO,cADO,MAAM,MAAM,GAAG,GAAG,CACL;AAG7B,KAAI,MAAM,WAAW,MAAM,IAAI,MAAM,SAAS,MAAM,CAClD,QAAO,MAAM,MAAM,GAAG,GAAG;AAG3B,KAAI,MAAM,WAAW,KAAK,IAAI,MAAM,SAAS,KAAK,CAChD,QAAO,cAAc,MAAM,MAAM,GAAG,GAAG,CAAC;AAG1C,KAAI,MAAM,WAAW,IAAI,IAAI,MAAM,SAAS,IAAI,CAC9C,QAAO,MAAM,MAAM,GAAG,GAAG;AAG3B,QAAO;;;;;ACxET,SAAS,eAAe,KAA+C;CACrE,MAAM,QAAQ,IAAI,MAAM;AACxB,KAAI,CAAC,MACH,QAAO;CAGT,MAAM,gBAAgB,mBAAmB,MAAM;AAC/C,KAAI,kBAAkB,KACpB,QAAO;AAGT,KAAI,UAAU,OACZ,QAAO;AAGT,KAAI,UAAU,QACZ,QAAO;AAGT,KAAI,wCAAwC,KAAK,MAAM,CACrD,QAAO,OAAO,MAAM;AAGtB,KAAI,qBAAqB,KAAK,MAAM,CAClC,QAAO;AAGT,QAAO;;AAGT,SAAS,WAAW,KAAsD;CACxE,MAAM,QAAQ,IAAI,MAAM;AACxB,KAAI,CAAC,MAAM,WAAW,IAAI,IAAI,CAAC,MAAM,SAAS,IAAI,CAChD,QAAO;CAGT,MAAM,QAAQ,MAAM,MAAM,GAAG,GAAG,CAAC,MAAM;AACvC,KAAI,CAAC,MACH,QAAO,EAAE;CAGX,MAAM,QAA0C,EAAE;CAClD,IAAI,UAAU;CACd,IAAI,WAAuC;CAC3C,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;EACxC,MAAM,OAAO,MAAM,MAAM;AAEzB,MAAI,UAAU;AACZ,cAAW;AACX,OAAI,SAAS;AACX,cAAU;AACV;;AAGF,OAAI,SAAS,QAAQ,aAAa,UAAU;AAC1C,cAAU;AACV;;AAGF,OAAI,aAAa,YAAY,SAAS,KACpC,YAAW;YACF,aAAa,YAAY,SAAS,IAC3C,YAAW;AAEb;;AAGF,MAAI,SAAS,MAAM;AACjB,cAAW;AACX,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,cAAW;AACX,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;GAChB,MAAM,OAAO,eAAe,QAAQ;AACpC,OAAI,SAAS,KACX,QAAO;AAET,SAAM,KAAK,KAAK;AAChB,aAAU;AACV;;AAGF,aAAW;;CAGb,MAAM,YAAY,eAAe,QAAQ;AACzC,KAAI,cAAc,KAChB,QAAO;AAET,OAAM,KAAK,UAAU;AAErB,QAAO;;AAGT,SAAS,iBAAiB,KAA+C;CACvE,MAAM,UAAU,IAAI,MAAM;AAC1B,KAAI,CAAC,QAAQ,WAAW,IAAI,IAAI,CAAC,QAAQ,SAAS,IAAI,CACpD,QAAO;CAGT,MAAM,QAAQ,QAAQ,MAAM,GAAG,GAAG,CAAC,MAAM;AACzC,KAAI,CAAC,MACH,QAAO,EAAE;CAGX,MAAM,QAAkB,EAAE;CAC1B,IAAI,UAAU;CACd,IAAI,WAAuC;CAC3C,IAAI,UAAU;CACd,IAAI,eAAe;CACnB,IAAI,aAAa;AAEjB,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;EACxC,MAAM,OAAO,MAAM,MAAM;AAEzB,MAAI,UAAU;AACZ,cAAW;AACX,OAAI,SAAS;AACX,cAAU;AACV;;AAGF,OAAI,SAAS,QAAQ,aAAa,UAAU;AAC1C,cAAU;AACV;;AAGF,OAAI,aAAa,YAAY,SAAS,KACpC,YAAW;YACF,aAAa,YAAY,SAAS,IAC3C,YAAW;AAEb;;AAGF,MAAI,SAAS,MAAM;AACjB,cAAW;AACX,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,cAAW;AACX,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,mBAAgB;AAChB,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,OAAI,eAAe,EACjB,iBAAgB;AAElB,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,iBAAc;AACd,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,OAAI,aAAa,EACf,eAAc;AAEhB,cAAW;AACX;;AAGF,MAAI,SAAS,OAAO,iBAAiB,KAAK,eAAe,GAAG;AAC1D,SAAM,KAAK,QAAQ;AACnB,aAAU;AACV;;AAGF,aAAW;;AAGb,KAAI,QAAQ,MAAM,CAChB,OAAM,KAAK,QAAQ;CAGrB,MAAM,SAAoC,EAAE;AAC5C,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,iBAAiB,KAAK,QAAQ,IAAI;AACxC,MAAI,mBAAmB,GACrB,QAAO;EAET,MAAM,MAAM,iBAAiB,KAAK,MAAM,GAAG,eAAe,CAAC;AAC3D,MAAI,CAAC,IACH,QAAO;EAET,MAAM,WAAW,KAAK,MAAM,iBAAiB,EAAE,CAAC,MAAM;AACtD,MAAI,CAAC,SACH,QAAO;AAET,MAAI,SAAS,WAAW,IAAI,CAC1B,QAAO;EAET,MAAM,aAAa,eAAe,SAAS;AAC3C,MAAI,eAAe,KACjB,QAAO;AAET,MAAI,OAAO,eAAe,YAAY,CAAC,MAAM,QAAQ,WAAW,CAC9D,QAAO;AAET,SAAO,OAAO;;AAGhB,QAAO;;AAGT,SAAgB,eAAe,OAAiC;AAC9D,KAAI,CAAC,MACH,QAAO;CAGT,MAAM,UAAU,MAAM,MAAM;AAC5B,KAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,SAAS,IAAI,CAClD,QAAO,iBAAiB,QAAQ;CAGlC,MAAM,QAAQ,WAAW,QAAQ;AACjC,KAAI,MACF,QAAO;AAGT,KAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,SAAS,IAAI,CAClD,QAAO;AAGT,QAAO,eAAe,QAAQ;;AAGhC,SAAgB,YAAY,OAAwB;AAClD,KAAI,SAAS,KACX,QAAO;AAET,KAAI,MAAM,QAAQ,MAAM,CACtB,QAAO,MAAM,KAAK,SAAS,OAAO,KAAK,CAAC,CAAC,KAAK,KAAK;AAErD,QAAO,OAAO,MAAM;;;;;AC/PtB,SAAgB,qBAAqB,aAAqD;CACxF,MAAM,SAAkC,EAAE;CAC1C,MAAM,QAAQ,YAAY,MAAM,KAAK;CACrC,IAAI,cAAc;CAClB,IAAI,cAA8C;CAClD,IAAI,oBAAoB;AAExB,MAAK,IAAI,QAAQ,GAAG,QAAQ,MAAM,QAAQ,SAAS,GAAG;EACpD,MAAM,UAAU,MAAM,UAAU;EAChC,MAAM,cAAc,QAAQ,MAAM;AAClC,MAAI,CAAC,eAAe,YAAY,WAAW,IAAI,CAC7C;AAGF,MAAI,YAAY,WAAW,KAAK,EAAE;GAChC,MAAM,QAAQ,YAAY,MAAM,mBAAmB;AACnD,OAAI,CAAC,MACH,QAAO;GAET,MAAM,kBAAkB,iBAAiB,MAAM,MAAM,GAAG;AACxD,OAAI,CAAC,gBACH,QAAO;GAET,MAAM,OAAO,qBAAqB,QAAQ,gBAAgB;GAC1D,MAAM,WAAoC,EAAE;AAC5C,QAAK,KAAK,SAAS;AACnB,iBAAc;AACd,iBAAc;AACd,uBAAoB;AACpB;;EAGF,MAAM,aAAa,YAAY,MAAM,gBAAgB;AACrD,MAAI,YAAY;GACd,MAAM,kBAAkB,iBAAiB,WAAW,MAAM,GAAG;AAC7D,OAAI,CAAC,gBACH,QAAO;AAET,iBAAc;AACd,uBAAoB;AACpB,iBAAc;AACd;;EAGF,MAAM,iBAAiB,YAAY,KAAK,QAAQ,GAAG,UAAU,mBAAmB,QAAQ;EACxF,MAAM,iBAAiB,eAAe,QAAQ,IAAI;AAClD,MAAI,mBAAmB,GACrB,QAAO;EAIT,MAAM,MAAM,iBADG,eAAe,MAAM,GAAG,eAAe,CAClB;EACpC,IAAI,WAAW,eAAe,MAAM,iBAAiB,EAAE,CAAC,MAAM;AAC9D,MAAI,CAAC,IACH,QAAO;EAGT,MAAM,kBAAkB,SAAS,WAAW,SAAM,GAAG,WAAQ,SAAS,WAAW,MAAM,GAAG,QAAQ;AAClG,MAAI,iBAAiB;GACnB,MAAM,eAAe,SAAS,QAAQ,iBAAiB,gBAAgB,OAAO;AAC9E,OAAI,iBAAiB,IAAI;IAEvB,MAAM,gBAAgB,mBADR,SAAS,MAAM,eAAe,gBAAgB,OAAO,CACpB;AAC/C,eAAW,GAAG,SAAS,MAAM,GAAG,eAAe,gBAAgB,OAAO,GAAG;UACpE;IACL,MAAM,YAAY;IACpB,IAAI,WAAW;IACf,IAAI,SAAS;AACb,WAAO,QAAQ,IAAI,MAAM,QAAQ;AAC/B,cAAS;KACT,MAAM,WAAW,MAAM,UAAU;AACjC,iBAAY,KAAK;AACjB,0BAAI,IAAI,OAAO,GAAG,UAAU,OAAO,EAAC,KAAK,SAAS,EAAE;AAClD,eAAS;AACT;;;AAGJ,QAAI,CAAC,OACH,QAAO;AAET,eAAW;;;EAIb,MAAM,aAAa,eAAe,SAAS;AAC3C,MAAI,eAAe,KACjB,QAAO;EAGT,MAAM,UAAU,cAAc,GAAG,YAAY,GAAG,QAAQ;AACxD,MAAI,OAAO,eAAe,YAAY,CAAC,MAAM,QAAQ,WAAW,EAAE;AAChE,QAAK,MAAM,CAAC,WAAW,gBAAgB,OAAO,QAAQ,WAAW,EAAE;IACjE,MAAM,WAAW,oBAAoB,GAAG,IAAI,GAAG,cAAc,GAAG,QAAQ,GAAG;AAC3E,QAAI,qBAAqB,YACvB,aAAY,YAAY,YAAY,YAAY;QAEhD,QAAO,YAAY,YAAY,YAAY;;AAG/C;;AAGF,MAAI,qBAAqB,aAAa;AACpC,eAAY,OAAO,YAAY,WAAW;AAC1C;;AAGF,SAAO,WAAW,YAAY,WAAW;;AAG3C,oBAAmB,OAAO;AAE1B,QAAO;;;;;ACjHT,MAAM,gBAAiD;CACrD,OAAO;CACP,OAAO;CACP,OAAO;CACR;AAED,SAAS,kBAAkB,OAAuB;AAChD,QAAO,MAAM,QAAQ,SAAS,KAAK;;AAGrC,SAAS,SAAS,MAAsB;AACtC,QAAO,KAAK,WAAW,IAAS,GAAG,KAAK,MAAM,EAAE,GAAG;;AAGrD,SAAS,aAAa,MAAsC;CAC1D,MAAM,QAAQ,KAAK,MAAM,iCAAiC;AAC1D,KAAI,CAAC,MACH,QAAO;AAET,QAAO,cAAc,MAAM,MAAM,OAAO;;AAG1C,SAAS,iBAAiB,aAAqB,MAA8D;AAC3G,KAAI,CAAC,KACH,QAAO;AAGT,KAAI,SAAS,OACX,KAAI;AACF,SAAO,KAAK,MAAM,YAAY;SACxB;AACN,SAAO;;AAIX,KAAI,SAAS,QAAQ;EACnB,MAAM,8BAAoB,aAAa,EAAE,cAAc,OAAO,CAAC;AAC/D,MAAI,IAAI,OAAO,SAAS,EACtB,QAAO;EAET,MAAM,OAAO,IAAI,QAAQ;AACzB,MAAI,CAAC,QAAQ,OAAO,SAAS,YAAY,MAAM,QAAQ,KAAK,CAC1D,QAAO;AAET,SAAO;;AAGT,KAAI,SAAS,OACX,QAAO,qBAAqB,YAAY;AAG1C,QAAO;;AAGT,SAAS,iBAAiB,MAAc,YAAmE;CACzG,IAAI,QAAQ;CACZ,IAAI,WAAW;CACf,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,YAAY,IAAI,KAAK,QAAQ,KAAK,GAAG;EAChD,MAAM,OAAO,KAAK,MAAM;AAExB,MAAI,UAAU;AACZ,OAAI,SAAS;AACX,cAAU;AACV;;AAGF,OAAI,SAAS,MAAM;AACjB,cAAU;AACV;;AAGF,OAAI,SAAS,KACX,YAAW;AAGb;;AAGF,MAAI,SAAS,MAAM;AACjB,cAAW;AACX;;AAGF,MAAI,SAAS,KAAK;AAChB,YAAS;AACT;;AAGF,MAAI,SAAS,KAAK;AAChB,YAAS;AACT,OAAI,UAAU,EAEZ,QAAO;IAAE,UADQ,KAAK,MAAM,YAAY,IAAI,EAAE;IAC3B,UAAU;IAAG;;;AAKtC,QAAO;;AAGT,SAAgB,cAAc,OAA+B;CAC3D,MAAM,aAAa,kBAAkB,MAAM;CAC3C,MAAM,QAAQ,WAAW,MAAM,KAAK;AACpC,KAAI,MAAM,WAAW,EACnB,QAAO;EAAE,aAAa;EAAM,SAAS;EAAY,MAAM;EAAM,iBAAiB;EAAM;AAGtF,OAAM,KAAK,SAAS,MAAM,MAAM,GAAG;CACnC,MAAM,uBAAuB,MAAM,KAAK,KAAK;CAE7C,MAAM,cAAc,aAAa,MAAM,MAAM,GAAG;AAChD,KAAI,CAAC,aAAa;EAEhB,MAAM,aADoB,qBAAqB,MAAM,YAAY,GAAG,MAAM,IACtC;AACpC,MAAI,qBAAqB,eAAe,IACtC,QAAO;GAAE,aAAa;GAAM,SAAS;GAAsB,MAAM;GAAM,iBAAiB;GAAM;EAGhG,MAAM,YAAY,iBAAiB,sBAAsB,UAAU;AACnE,MAAI,CAAC,UACH,QAAO;GAAE,aAAa;GAAM,SAAS;GAAsB,MAAM;GAAM,iBAAiB;GAAM;EAGhG,MAAME,gBAAc,UAAU;EAC9B,IAAI,UAAU,qBAAqB,MAAM,UAAU,WAAW,EAAE;AAChE,MAAI,QAAQ,WAAW,KAAK,CAC1B,WAAU,QAAQ,MAAM,EAAE;EAE5B,MAAM,OAAO,iBAAiBA,eAAa,OAAO;AAClD,MAAI,CAAC,KACH,QAAO;GAAE,aAAa;GAAM,SAAS;GAAsB,MAAM;GAAM,iBAAiB;GAAM;AAGhG,SAAO;GACL;GACA;GACA;GACA,iBAAiB;GAClB;;CAGH,IAAI,eAAe;AACnB,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,EACrC,KAAI,aAAa,MAAM,MAAM,GAAG,KAAK,aAAa;AAChD,iBAAe;AACf;;AAIJ,KAAI,iBAAiB,GACnB,QAAO;EAAE,aAAa;EAAM,SAAS;EAAsB,MAAM;EAAM,iBAAiB;EAAM;CAGhG,MAAM,cAAc,MAAM,MAAM,GAAG,aAAa,CAAC,KAAK,KAAK;AAI3D,QAAO;EACL;EACA,SALc,MAAM,MAAM,eAAe,EAAE,CAAC,KAAK,KAAK;EAMtD,MALW,iBAAiB,aAAa,YAAY;EAMrD,iBAAiB;EAClB;;;;;ACnKH,SAAS,cAAc,OAAwB;AAC7C,KAAI,SAAS,KACX,QAAO;AAET,KAAI,OAAO,UAAU,SACnB,QAAO;AAET,KAAI,OAAO,UAAU,YAAY,OAAO,UAAU,UAChD,QAAO,OAAO,MAAM;AAEtB,KAAI;AACF,SAAO,KAAK,UAAU,MAAM;SACtB;AACN,SAAO,OAAO,MAAM;;;AAIxB,SAAS,iBACP,MACA,MACA,SAC2E;AAC3E,KAAI,CAAC,QAAQ,OAAO,SAAS,YAAY,MAAM,QAAQ,KAAK,CAC1D,QAAO,EAAE;AAGX,QAAO,OAAO,QAAQ,KAAK,CAAC,KAAK,CAAC,KAAK,WAAW;EAChD,MAAM,YAAY,cAAc,MAAM;AAEtC,SAAO;GACL,MAAM;GACN,QAAQ;GACR,QAAQC,WAJG,YAAY,GAAG,IAAI,IAAI,cAAc,KAItB,QAAQ;GACnC;GACD;;AAGJ,SAAS,gBACP,MACA,MACA,MACA,SACA,QACA;AACA,QAAO,CAAC;EAAE;EAAM;EAAQ,QAAQA,WAAY,MAAM,QAAQ;EAAE,CAAC;;AAG/D,SAAS,UAAU,OAAqD;AACtE,QAAO,MAAM,QAAQ,KAAK,SAAS,MAAM,KAAK,OAAO,OAAO,EAAE;;AAGhE,SAAgB,cACd,OACA,SACA,UAA8B,EAAE,EACf;CACjB,MAAM,OAAwB,QAAQ,QAAQ;AAC9C,KAAI,YAAY,OAAO;EACrB,MAAM,SAASA,WAAY,OAAO,QAAQ;AAC1C,SAAO;GACL;GACA,OAAO,OAAO;GACd,iBAAiB;GACjB,OAAO,CAAC;IAAE,MAAM;IAAO,QAAQ;IAAW;IAAQ,CAAC;GACpD;;CAGH,MAAM,SAAS,cAAc,MAAM;CACnC,MAAM,kBAAkB,OAAO,eAAe;CAC9C,MAAM,cAAc,OAAO,WAAW;CAEtC,IAAI,QAA+F,EAAE;AAErG,KAAI,YAAY,cACd,SAAQ,gBAAgB,eAAe,iBAAiB,MAAM,SAAS,cAAc;UAC5E,YAAY,UACrB,SAAQ,gBAAgB,WAAW,aAAa,MAAM,SAAS,UAAU;UAChE,YAAY,QACrB,SAAQ,CACN,GAAG,gBAAgB,eAAe,iBAAiB,MAAM,SAAS,cAAc,EAChF,GAAG,gBAAgB,WAAW,aAAa,MAAM,SAAS,UAAU,CACrE;UACQ,YAAY,UACrB,SAAQ,iBAAiB,OAAO,MAAM,MAAM,QAAQ;UAC3C,YAAY,gBACrB,SAAQ,CACN,GAAG,iBAAiB,OAAO,MAAM,MAAM,QAAQ,EAC/C,GAAG,gBAAgB,WAAW,aAAa,MAAM,SAAS,UAAU,CACrE;AAGH,QAAO;EACL;EACA,OAAO,UAAU,MAAM;EACvB,iBAAiB,OAAO;EACxB;EACD;;;;;ACrGH,SAAgB,yBAAyB,OAAe,MAAsB;AAC5E,QAAO,GAAG,MAAM,GAAG,OAAO,UAAU,IAAI,KAAK;;;;;ACO/C,MAAM,aAAa,OAAO,OAAOC,YAAa;CAC5C,SAASA;CACT;CACA;CACA;CACA;CACA;CACA;CACA;CACD,CAAC;iBAEO"}