@cspell/cspell-tools 9.6.1 → 9.6.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/app.mjs CHANGED
@@ -1,4 +1,4 @@
1
- import { a as reportCheckChecksumFile, c as toError, d as compressFile, i as logWithTimestamp, l as generateBTrie, n as compile, o as reportChecksumForFiles, r as setLogger, s as updateChecksumForFiles, t as build, u as OSFlags } from "./build-OgMPaXPZ.mjs";
1
+ import { a as reportCheckChecksumFile, c as toError, d as compressFile, i as logWithTimestamp, l as generateBTrie, n as compile, o as reportChecksumForFiles, r as setLogger, s as updateChecksumForFiles, t as build, u as OSFlags } from "./build-BGL2P0c2.mjs";
2
2
  import { readFileSync } from "node:fs";
3
3
  import { CommanderError, Option } from "commander";
4
4
  import { writeFile } from "node:fs/promises";
@@ -9,7 +9,7 @@ import YAML from "yaml";
9
9
 
10
10
  //#region src/util/globP.ts
11
11
  function globP(pattern, options) {
12
- return glob((Array.isArray(pattern) ? pattern : [pattern]).map((pattern$1) => pattern$1.replaceAll("\\", "/")), options);
12
+ return glob((Array.isArray(pattern) ? pattern : [pattern]).map((pattern) => pattern.replaceAll("\\", "/")), options);
13
13
  }
14
14
 
15
15
  //#endregion
@@ -1,7 +1,7 @@
1
1
  import { promises } from "node:fs";
2
2
  import fs, { mkdir, readFile, writeFile } from "node:fs/promises";
3
3
  import * as path from "node:path";
4
- import fsPath, { resolve, sep } from "node:path";
4
+ import fsPath, { dirname, relative, resolve, sep } from "node:path";
5
5
  import * as Trie from "cspell-trie-lib";
6
6
  import { COMPOUND_FIX, FORBID_PREFIX, createDictionaryLineParser, decodeTrie, encodeTrieDataToBTrie, parseDictionary } from "cspell-trie-lib";
7
7
  import { promisify } from "node:util";
@@ -18,14 +18,14 @@ import { Buffer } from "node:buffer";
18
18
  //#region src/gzip/compressFiles.ts
19
19
  const gzip$1 = promisify(gzip);
20
20
  const gunzip$1 = promisify(gunzip);
21
- let OSFlags = /* @__PURE__ */ function(OSFlags$1) {
22
- OSFlags$1[OSFlags$1["auto"] = -1] = "auto";
23
- OSFlags$1[OSFlags$1["FAT"] = 0] = "FAT";
24
- OSFlags$1[OSFlags$1["Unix"] = 3] = "Unix";
25
- OSFlags$1[OSFlags$1["HPFS"] = 6] = "HPFS";
26
- OSFlags$1[OSFlags$1["MACOS"] = 7] = "MACOS";
27
- OSFlags$1[OSFlags$1["NTFS"] = 11] = "NTFS";
28
- return OSFlags$1;
21
+ let OSFlags = /* @__PURE__ */ function(OSFlags) {
22
+ OSFlags[OSFlags["auto"] = -1] = "auto";
23
+ OSFlags[OSFlags["FAT"] = 0] = "FAT";
24
+ OSFlags[OSFlags["Unix"] = 3] = "Unix";
25
+ OSFlags[OSFlags["HPFS"] = 6] = "HPFS";
26
+ OSFlags[OSFlags["MACOS"] = 7] = "MACOS";
27
+ OSFlags[OSFlags["NTFS"] = 11] = "NTFS";
28
+ return OSFlags;
29
29
  }({});
30
30
  const OSSystemIDOffset = 9;
31
31
  async function compressFile(file, os) {
@@ -238,15 +238,17 @@ async function createReader(filename, options) {
238
238
  //#endregion
239
239
  //#region src/compiler/bTrie.ts
240
240
  async function generateBTrieFromFile(file, options) {
241
- console.log(`Processing file: ${file}`);
241
+ const log = options.logger || console.log.bind(console);
242
+ log(`Processing file: ${file}`);
242
243
  const btrie = await createBTrieFromFile(file, options);
243
244
  const outFile = bTrieFileName(file, options);
244
245
  await mkdir(fsPath.dirname(outFile), { recursive: true });
245
246
  await writeFile$1(outFile, btrie);
246
- console.log(`Written BTrie to: ${outFile}`);
247
+ log(`Written BTrie to: ${outFile}`);
248
+ return outFile;
247
249
  }
248
250
  async function generateBTrieFromFiles(files, options) {
249
- console.log(`Generating BTrie for ${files.length} file(s).`);
251
+ (options.logger || console.log.bind(console))(`Generating BTrie for ${files.length} file(s).`);
250
252
  for (const file of files) await generateBTrieFromFile(file, options);
251
253
  }
252
254
  function bTrieFileName(inputFilename, options) {
@@ -343,17 +345,18 @@ async function shasumFile(filename, root) {
343
345
  /**
344
346
  *
345
347
  * @param filename - name of checksum file
346
- * @param files - optional list of files to check
348
+ * @param files - optional list of files to check - they will be resolved relative to the checksum file.
347
349
  * @param root - optional root, default cwd.
348
350
  */
349
351
  async function checkShasumFile(filename, files, root) {
350
352
  files = !files ? files : files.length ? files : void 0;
353
+ const resolvedRoot = resolve(root || ".");
354
+ const fileDir = dirname(resolve(resolvedRoot, filename));
351
355
  const shaFiles = await readAndParseShasumFile(filename);
352
- const filesToCheck = !files ? shaFiles.map(({ filename: filename$1 }) => filename$1) : files;
356
+ const relFilesToCheck = (!files ? shaFiles.map(({ filename }) => filename) : files).map((f) => relative(fileDir, resolve(fileDir, f)));
353
357
  const mapNameToChecksum = new Map(shaFiles.map((r) => [normalizeFilename(r.filename), r.checksum]));
354
- const resolvedRoot = resolve(root || ".");
355
- const results = await Promise.all(filesToCheck.map(normalizeFilename).map((filename$1) => {
356
- return tryToCheckFile(filename$1, resolvedRoot, mapNameToChecksum.get(filename$1));
358
+ const results = await Promise.all(relFilesToCheck.map(normalizeFilename).map((filename) => {
359
+ return tryToCheckFile(filename, resolvedRoot, mapNameToChecksum.get(filename));
357
360
  }));
358
361
  return {
359
362
  passed: !results.some((v) => !v.passed),
@@ -416,7 +419,7 @@ async function reportChecksumForFiles(files, options) {
416
419
  async function reportCheckChecksumFile(filename, files, options) {
417
420
  const root = options.root;
418
421
  const results = (await checkShasumFile(filename, await resolveFileList(files, options.listFile), root)).results;
419
- const lines = results.map(({ filename: filename$1, passed: passed$1, error }) => `${filename$1}: ${passed$1 ? "OK" : "FAILED"} ${error ? "- " + error.message : ""}`.trim());
422
+ const lines = results.map(({ filename, passed, error }) => `${filename}: ${passed ? "OK" : "FAILED"} ${error ? "- " + error.message : ""}`.trim());
420
423
  const withErrors = results.filter((a) => !a.passed);
421
424
  const passed = !withErrors.length;
422
425
  if (!passed) lines.push(`shasum: WARNING: ${withErrors.length} computed checksum${withErrors.length > 1 ? "s" : ""} did NOT match`);
@@ -446,7 +449,7 @@ async function calcUpdateChecksumForFiles(filename, files, options) {
446
449
  }));
447
450
  const entriesToUpdate = new Set([...filesToCheck, ...currentEntries.map((e) => e.filename)]);
448
451
  const mustExist = new Set(filesToCheck);
449
- const checksumMap = new Map(currentEntries.map(({ filename: filename$1, checksum }) => [filename$1, checksum]));
452
+ const checksumMap = new Map(currentEntries.map(({ filename, checksum }) => [filename, checksum]));
450
453
  for (const file of entriesToUpdate) try {
451
454
  const checksum = await calcFileChecksum(resolve(root, file));
452
455
  checksumMap.set(file, checksum);
@@ -454,8 +457,8 @@ async function calcUpdateChecksumForFiles(filename, files, options) {
454
457
  if (mustExist.has(file) || toError(e).code !== "ENOENT") throw e;
455
458
  checksumMap.delete(file);
456
459
  }
457
- return [...checksumMap].map(([filename$1, checksum]) => ({
458
- filename: filename$1,
460
+ return [...checksumMap].map(([filename, checksum]) => ({
461
+ filename,
459
462
  checksum
460
463
  })).sort((a, b) => a.filename < b.filename ? -1 : 1).map((e) => `${e.checksum} ${e.filename}`).join("\n") + "\n";
461
464
  }
@@ -530,11 +533,11 @@ function removeVerboseFromRegExp(pattern) {
530
533
  acc.idx++;
531
534
  let escCount = 0;
532
535
  while (acc.idx < pattern.length) {
533
- const char$1 = pattern[acc.idx];
534
- acc.result += char$1;
536
+ const char = pattern[acc.idx];
537
+ acc.result += char;
535
538
  acc.idx++;
536
- if (char$1 === "]" && !(escCount & 1)) break;
537
- escCount = char$1 === "\\" ? escCount + 1 : 0;
539
+ if (char === "]" && !(escCount & 1)) break;
540
+ escCount = char === "\\" ? escCount + 1 : 0;
538
541
  }
539
542
  return acc;
540
543
  }
@@ -609,9 +612,9 @@ function buildHasFn(dict) {
609
612
  async function readFile$1(filename) {
610
613
  return await createReader(filename, {});
611
614
  }
612
- function readersToCollection(readers$1) {
613
- const dictReaders = readers$1.filter(isDictionaryReader).map(dictReaderToCollection);
614
- const nonDictCollection = lineReadersToCollection(readers$1.filter((a) => !isDictionaryReader(a)));
615
+ function readersToCollection(readers) {
616
+ const dictReaders = readers.filter(isDictionaryReader).map(dictReaderToCollection);
617
+ const nonDictCollection = lineReadersToCollection(readers.filter((a) => !isDictionaryReader(a)));
615
618
  const collections = [...dictReaders, nonDictCollection];
616
619
  return {
617
620
  size: collections.reduce((s, a) => s + a.size, 0),
@@ -636,9 +639,9 @@ function dictReaderToCollection(reader) {
636
639
  has: buildHasFn(reader)
637
640
  };
638
641
  }
639
- function lineReadersToCollection(readers$1) {
642
+ function lineReadersToCollection(readers) {
640
643
  function* words() {
641
- for (const reader of readers$1) yield* reader.lines;
644
+ for (const reader of readers) yield* reader.lines;
642
645
  }
643
646
  const dict = parseDictionary(words(), { stripCaseAndAccents: false });
644
647
  return {
@@ -686,7 +689,7 @@ function isSingleLetter(c) {
686
689
  //#region src/compiler/splitCamelCaseIfAllowed.ts
687
690
  const regExpSpaceOrDash = /[- ]+/g;
688
691
  const regExpIsNumber = /^\d+$/;
689
- function splitCamelCaseIfAllowed(word, allowedWords, keepCase, compoundPrefix, minCompoundLength$1) {
692
+ function splitCamelCaseIfAllowed(word, allowedWords, keepCase, compoundPrefix, minCompoundLength) {
690
693
  const split = [...splitCamelCase(word)];
691
694
  if (split.length === 1) return adjustCases(split, allowedWords, keepCase);
692
695
  if (split.some((w) => isUnknown(w, allowedWords))) return [word];
@@ -694,7 +697,7 @@ function splitCamelCaseIfAllowed(word, allowedWords, keepCase, compoundPrefix, m
694
697
  const adjusted = adjustCases(split, allowedWords, keepCase);
695
698
  return !compoundPrefix ? adjusted : adjusted.map((w, i) => {
696
699
  const { px, sx } = wordIndexes[i];
697
- const canCompound = w.length >= minCompoundLength$1;
700
+ const canCompound = w.length >= minCompoundLength;
698
701
  const lc = w.toLowerCase();
699
702
  const p = canCompound && isSingleLetter(px) ? compoundPrefix : "";
700
703
  const s = canCompound && isSingleLetter(sx) ? compoundPrefix : "";
@@ -718,7 +721,7 @@ function isUnknown(word, allowedWords) {
718
721
  return !allowedWords.has(word, false);
719
722
  }
720
723
  function splitCamelCase(word) {
721
- const splitWords = splitCamelCaseWord(word).filter((word$1) => !regExpIsNumber.test(word$1));
724
+ const splitWords = splitCamelCaseWord(word).filter((word) => !regExpIsNumber.test(word));
722
725
  if (splitWords.length > 1 && regExpSpaceOrDash.test(word)) return splitWords.flatMap((w) => w.split(regExpSpaceOrDash));
723
726
  return splitWords;
724
727
  }
@@ -807,7 +810,7 @@ const cSpellToolDirective = "cspell-tools:";
807
810
  */
808
811
  function createParseFileLineMapper(options) {
809
812
  const _options = options || _defaultOptions;
810
- const { splitKeepBoth = _defaultOptions.splitKeepBoth, allowedSplitWords = _defaultOptions.allowedSplitWords, storeSplitWordsAsCompounds, minCompoundLength: minCompoundLength$1 = _defaultOptions.minCompoundLength } = _options;
813
+ const { splitKeepBoth = _defaultOptions.splitKeepBoth, allowedSplitWords = _defaultOptions.allowedSplitWords, storeSplitWordsAsCompounds, minCompoundLength = _defaultOptions.minCompoundLength } = _options;
811
814
  let { legacy = _defaultOptions.legacy } = _options;
812
815
  let { split = _defaultOptions.split, keepCase = legacy ? false : _defaultOptions.keepCase } = _options;
813
816
  const compoundFix = storeSplitWordsAsCompounds ? "+" : "";
@@ -866,7 +869,7 @@ function createParseFileLineMapper(options) {
866
869
  return line.split("|").map((a) => a.trim()).filter((a) => !!a).filter((a) => !/^[0-9_-]+$/.test(a)).filter((a) => !/^0[xo][0-9A-F]+$/i.test(a));
867
870
  }
868
871
  function splitWordIntoWords(word) {
869
- return splitCamelCaseIfAllowed(word, allowedSplitWords, keepCase, compoundFix, minCompoundLength$1);
872
+ return splitCamelCaseIfAllowed(word, allowedSplitWords, keepCase, compoundFix, minCompoundLength);
870
873
  }
871
874
  function* splitWords(lines) {
872
875
  for (const line of lines) {
@@ -890,10 +893,10 @@ function createParseFileLineMapper(options) {
890
893
  yield line;
891
894
  }
892
895
  }
893
- function* splitLines$1(paragraphs) {
896
+ function* splitLines(paragraphs) {
894
897
  for (const paragraph of paragraphs) yield* paragraph.split("\n");
895
898
  }
896
- return opCombine(opFilter(isString), splitLines$1, opMap(removeComments), splitWords, opMap(trim), opFilter(filterEmptyLines), unique);
899
+ return opCombine(opFilter(isString), splitLines, opMap(removeComments), splitWords, opMap(trim), opFilter(filterEmptyLines), unique);
897
900
  }
898
901
  /**
899
902
  * Normalizes a dictionary words based upon prefix / suffixes.
@@ -927,7 +930,7 @@ function splitLines(lines, options) {
927
930
  return split();
928
931
  }
929
932
  async function textFileReader(reader, options) {
930
- const { legacy, splitWords: split, allowedSplitWords, storeSplitWordsAsCompounds, minCompoundLength: minCompoundLength$1 } = options;
933
+ const { legacy, splitWords: split, allowedSplitWords, storeSplitWordsAsCompounds, minCompoundLength } = options;
931
934
  const parseOptions = {
932
935
  legacy,
933
936
  split,
@@ -935,7 +938,7 @@ async function textFileReader(reader, options) {
935
938
  keepCase: void 0,
936
939
  allowedSplitWords,
937
940
  storeSplitWordsAsCompounds,
938
- minCompoundLength: minCompoundLength$1
941
+ minCompoundLength
939
942
  };
940
943
  const words = [...parseFileLines(reader.lines, parseOptions)];
941
944
  return {
@@ -1031,7 +1034,7 @@ function* removeDuplicates(words) {
1031
1034
  const sLcForms = new Set(lcForm);
1032
1035
  yield* lcForm;
1033
1036
  if (sLcForms.has("*" + lc + "*")) continue;
1034
- for (const forms$1 of mForms.values()) for (const form of forms$1) {
1037
+ for (const forms of mForms.values()) for (const form of forms) {
1035
1038
  if (sLcForms.has(form.toLowerCase())) continue;
1036
1039
  yield form;
1037
1040
  }
@@ -1044,14 +1047,14 @@ function* removeDuplicates(words) {
1044
1047
  * required_prefix+
1045
1048
  * required_suffix+
1046
1049
  */
1047
- var Flags = /* @__PURE__ */ function(Flags$1) {
1048
- Flags$1[Flags$1["base"] = 0] = "base";
1049
- Flags$1[Flags$1["none"] = 1] = "none";
1050
- Flags$1[Flags$1["both"] = 2] = "both";
1051
- Flags$1[Flags$1["pfx"] = 4] = "pfx";
1052
- Flags$1[Flags$1["sfx"] = 8] = "sfx";
1053
- Flags$1[Flags$1["all"] = 15] = "all";
1054
- return Flags$1;
1050
+ var Flags = /* @__PURE__ */ function(Flags) {
1051
+ Flags[Flags["base"] = 0] = "base";
1052
+ Flags[Flags["none"] = 1] = "none";
1053
+ Flags[Flags["both"] = 2] = "both";
1054
+ Flags[Flags["pfx"] = 4] = "pfx";
1055
+ Flags[Flags["sfx"] = 8] = "sfx";
1056
+ Flags[Flags["all"] = 15] = "all";
1057
+ return Flags;
1055
1058
  }(Flags || {});
1056
1059
  function applyFlags(word, flags) {
1057
1060
  if (flags === Flags.none) return [word];
@@ -1096,22 +1099,22 @@ function removeDuplicateForms(forms) {
1096
1099
  return [form, applyFlags(form, flag)];
1097
1100
  }));
1098
1101
  }
1099
- async function createTargetFile(destFilename, seq, compress$1) {
1100
- const rel$1 = path.relative(process.cwd(), destFilename).replaceAll(path.sep, "/");
1101
- getLogger()(`Writing to file ${rel$1}${compress$1 ? ".gz" : ""}`);
1102
+ async function createTargetFile(destFilename, seq, compress) {
1103
+ const rel = path.relative(process.cwd(), destFilename).replaceAll(path.sep, "/");
1104
+ getLogger()(`Writing to file ${rel}${compress ? ".gz" : ""}`);
1102
1105
  await mkdirp(path.dirname(destFilename));
1103
- await writeTextToFile(destFilename, seq, compress$1);
1106
+ await writeTextToFile(destFilename, seq, compress);
1104
1107
  }
1105
1108
  function createTrieCompiler(options) {
1106
1109
  return (words) => {
1107
- const log$1 = getLogger();
1108
- log$1("Reading Words into Trie");
1110
+ const log = getLogger();
1111
+ log("Reading Words into Trie");
1109
1112
  const base = options.base ?? 32;
1110
1113
  const version = options.trie4 ? 4 : options.trie3 ? 3 : 1;
1111
1114
  const root = Trie.buildTrie(words).root;
1112
- log$1("Reduce duplicate word endings");
1115
+ log("Reduce duplicate word endings");
1113
1116
  const trie = Trie.consolidate(root);
1114
- log$1("Trie compilation complete");
1117
+ log("Trie compilation complete");
1115
1118
  return Trie.serializeTrie(trie, {
1116
1119
  base,
1117
1120
  comment: "Built by cspell-tools.",
@@ -1160,27 +1163,45 @@ function resolveChecksumFile(checksumFile, root) {
1160
1163
  }
1161
1164
  async function compileTarget(target, options, compileOptions) {
1162
1165
  logWithTimestamp(`Start compile: ${target.name}`);
1163
- const { rootDir, cwd, checksumFile, conditional } = compileOptions;
1164
- const { format, sources, trieBase, sort = true, generateNonStrict = false, excludeWordsFrom = [], excludeWordsNotFoundIn = [], excludeWordsMatchingRegex } = target;
1166
+ const buildOptions = genBuildTargetDictionaryOptions(target, compileOptions);
1167
+ const deps = /* @__PURE__ */ new Set();
1168
+ addToSet(deps, await buildTargetDictionary(target, options, compileOptions, buildOptions), await compressDictionaryFileIfNeeded(buildOptions), await genBTrieForTarget(target, buildOptions));
1169
+ logWithTimestamp(`Done compile: ${target.name}`);
1170
+ const checksumRoot = buildOptions.checksumRoot;
1171
+ return [...deps].map((d) => path.relative(checksumRoot, d));
1172
+ }
1173
+ function genBuildTargetDictionaryOptions(target, compileOptions) {
1174
+ const { rootDir, checksumFile, cwd } = compileOptions;
1165
1175
  let targetDirectory = target.targetDirectory ?? cwd ?? process.cwd();
1166
1176
  targetDirectory = targetDirectory.replace("${cwd}", cwd ?? process.cwd());
1167
1177
  targetDirectory = path.resolve(rootDir, targetDirectory);
1178
+ const name = normalizeTargetName(target.name);
1179
+ const useTrie = target.format.startsWith("trie");
1180
+ const generateCompressed = target.compress ?? false;
1181
+ const generateUncompressed = target.keepUncompressed ?? false;
1182
+ const generateOnlyCompressedDictionary = generateCompressed && !generateUncompressed;
1183
+ return {
1184
+ name,
1185
+ filename: resolveTarget(name, targetDirectory, useTrie),
1186
+ useTrie,
1187
+ generateOnlyCompressedDictionary,
1188
+ generateCompressed,
1189
+ checksumRoot: checksumFile && path.dirname(checksumFile) || rootDir
1190
+ };
1191
+ }
1192
+ async function buildTargetDictionary(target, options, compileOptions, buildOptions) {
1193
+ const { rootDir, checksumFile, conditional } = compileOptions;
1194
+ const { format, sources, trieBase, sort = true, generateNonStrict = false, excludeWordsFrom = [], excludeWordsNotFoundIn = [], excludeWordsMatchingRegex } = target;
1195
+ const { filename, useTrie, generateOnlyCompressedDictionary, checksumRoot } = buildOptions;
1168
1196
  const dictionaryDirectives = target.dictionaryDirectives ?? compileOptions.dictionaryDirectives;
1169
- const removeDuplicates$1 = target.removeDuplicates ?? false;
1197
+ const removeDuplicates = target.removeDuplicates ?? false;
1170
1198
  const excludeFromFilter = await createExcludeFilter(excludeWordsFrom);
1171
1199
  const includeFromFilter = await createIncludeFilter(excludeWordsNotFoundIn);
1172
1200
  const excludeRegexFilter = createExcludeRegexFilter(excludeWordsMatchingRegex);
1173
1201
  const excludeFilter = (word) => {
1174
1202
  return excludeFromFilter(word) && includeFromFilter(word) && excludeRegexFilter(word);
1175
1203
  };
1176
- const name = normalizeTargetName(target.name);
1177
- const useTrie = format.startsWith("trie");
1178
- const generateCompressed = target.compress ?? false;
1179
- const generateUncompressed = target.keepUncompressed ?? false;
1180
- const genSet = /* @__PURE__ */ new Set();
1181
- genSet.add(generateCompressed);
1182
- if (generateUncompressed) genSet.add(false);
1183
- const filename = resolveTarget(name, targetDirectory, useTrie);
1204
+ const compress = generateOnlyCompressedDictionary;
1184
1205
  const filesToProcess = await toArray(pipeAsync(readSourceList(sources, rootDir), opMapAsync((src) => readFileSource(src, options)), opAwaitAsync()));
1185
1206
  const normalizer = normalizeTargetWords({
1186
1207
  sort: useTrie || sort,
@@ -1188,8 +1209,7 @@ async function compileTarget(target, options, compileOptions) {
1188
1209
  filter: excludeFilter,
1189
1210
  dictionaryDirectives
1190
1211
  });
1191
- const checksumRoot = checksumFile && path.dirname(checksumFile) || rootDir;
1192
- const deps = [...calculateDependencies(filename + (generateCompressed ? ".gz" : ""), filesToProcess, [...excludeWordsFrom, ...excludeWordsNotFoundIn], checksumRoot)];
1212
+ const deps = [...calculateDependencies(filename + (compress ? ".gz" : ""), filesToProcess, [...excludeWordsFrom, ...excludeWordsNotFoundIn], checksumRoot)];
1193
1213
  if (conditional && checksumFile) {
1194
1214
  if ((await checkShasumFile(checksumFile, deps, checksumRoot).catch(() => void 0))?.passed) {
1195
1215
  logWithTimestamp(`Skip ${target.name}, nothing changed.`);
@@ -1197,7 +1217,7 @@ async function compileTarget(target, options, compileOptions) {
1197
1217
  }
1198
1218
  }
1199
1219
  async function action(words, dst) {
1200
- const data = iterableToString(pipe(words, normalizer, useTrie ? createTrieCompiler({
1220
+ await createTargetFile(dst, iterableToString(pipe(words, normalizer, useTrie ? createTrieCompiler({
1201
1221
  base: trieBase,
1202
1222
  trie3: format === "trie3",
1203
1223
  trie4: format === "trie4"
@@ -1205,35 +1225,46 @@ async function compileTarget(target, options, compileOptions) {
1205
1225
  sort,
1206
1226
  generateNonStrict,
1207
1227
  dictionaryDirectives,
1208
- removeDuplicates: removeDuplicates$1
1209
- })));
1210
- for (const compress$1 of genSet) await createTargetFile(dst, data, compress$1);
1228
+ removeDuplicates
1229
+ }))), compress);
1211
1230
  }
1212
1231
  await processFiles({
1213
1232
  action,
1214
1233
  filesToProcess,
1215
1234
  mergeTarget: filename
1216
1235
  });
1217
- if (target.bTrie) await generateBTrieFromFile(filename, {
1218
- compress: true,
1219
- optimize: true,
1220
- useStringTable: true,
1221
- ...typeof target.bTrie === "object" ? target.bTrie : {}
1222
- });
1223
- logWithTimestamp(`Done compile: ${target.name}`);
1224
1236
  return deps;
1225
1237
  }
1226
- function calculateDependencies(targetFile, filesToProcess, extraDependencyFiles, rootDir) {
1238
+ async function compressDictionaryFileIfNeeded(buildOptions) {
1239
+ const { filename, generateCompressed, generateOnlyCompressedDictionary } = buildOptions;
1240
+ if (generateOnlyCompressedDictionary || !generateCompressed) return [];
1241
+ logWithTimestamp(`Compress: ${filename}`);
1242
+ return [filename, await compressFile(filename)];
1243
+ }
1244
+ async function genBTrieForTarget(target, buildOptions) {
1245
+ if (!target.bTrie) return [];
1246
+ const cfg = typeof target.bTrie === "object" ? target.bTrie : {};
1247
+ const { filename, generateOnlyCompressedDictionary } = buildOptions;
1248
+ const srcFilename = filename + (generateOnlyCompressedDictionary ? ".gz" : "");
1249
+ logWithTimestamp(`Generate BTrie from: ${srcFilename}`);
1250
+ return [srcFilename, await generateBTrieFromFile(srcFilename, {
1251
+ compress: cfg.compress ?? true,
1252
+ optimize: cfg.optimize ?? true,
1253
+ useStringTable: cfg.useStringTable ?? true,
1254
+ logger: logWithTimestamp
1255
+ })];
1256
+ }
1257
+ function calculateDependencies(targetFile, filesToProcess, extraDependencyFiles, checksumRoot) {
1227
1258
  const dependencies = /* @__PURE__ */ new Set();
1228
1259
  addDependency(targetFile);
1229
1260
  extraDependencyFiles?.forEach((f) => addDependency(f));
1230
1261
  filesToProcess.forEach((f) => addDependency(f.src));
1231
1262
  return dependencies;
1232
1263
  function addDependency(filename) {
1233
- const rel$1 = path.relative(rootDir, filename);
1234
- dependencies.add(rel$1);
1235
- dependencies.add(rel$1.replace(/\.aff$/, ".dic"));
1236
- dependencies.add(rel$1.replace(/\.dic$/, ".aff"));
1264
+ const abs = path.resolve(checksumRoot, filename);
1265
+ dependencies.add(abs);
1266
+ dependencies.add(abs.replace(/\.aff$/, ".dic"));
1267
+ dependencies.add(abs.replace(/\.dic$/, ".aff"));
1237
1268
  }
1238
1269
  }
1239
1270
  function rel(filePath) {
@@ -1288,7 +1319,7 @@ async function readFileList(fileList) {
1288
1319
  return (await readTextFile(fileList)).split("\n").map((a) => a.trim()).filter((a) => !!a);
1289
1320
  }
1290
1321
  async function readFileSource(fileSource, sourceOptions) {
1291
- const { filename, keepRawCase = sourceOptions.keepRawCase || false, split = sourceOptions.split || false, maxDepth, storeSplitWordsAsCompounds, minCompoundLength: minCompoundLength$1 } = fileSource;
1322
+ const { filename, keepRawCase = sourceOptions.keepRawCase || false, split = sourceOptions.split || false, maxDepth, storeSplitWordsAsCompounds, minCompoundLength } = fileSource;
1292
1323
  const legacy = split === "legacy";
1293
1324
  const readerOptions = {
1294
1325
  maxDepth,
@@ -1297,7 +1328,7 @@ async function readFileSource(fileSource, sourceOptions) {
1297
1328
  keepCase: keepRawCase,
1298
1329
  allowedSplitWords: await createAllowedSplitWordsFromFiles(fileSource.allowedSplitWords || sourceOptions.allowedSplitWords),
1299
1330
  storeSplitWordsAsCompounds,
1300
- minCompoundLength: minCompoundLength$1
1331
+ minCompoundLength
1301
1332
  };
1302
1333
  logWithTimestamp(`Reading ${path.basename(filename)}`);
1303
1334
  const stream = await streamSourceWordsFromFile(filename, readerOptions);
@@ -1311,7 +1342,7 @@ function normalizeTargetName(name) {
1311
1342
  return name.replace(/((\.txt|\.dic|\.aff|\.trie)(\.gz)?)?$/, "").replaceAll(/[^\p{L}\p{M}.\w\\/-]/gu, "_");
1312
1343
  }
1313
1344
  function logProgress(freq = 1e5) {
1314
- function* logProgress$1(iter) {
1345
+ function* logProgress(iter) {
1315
1346
  const _freq = freq;
1316
1347
  let count = 0;
1317
1348
  for (const v of iter) {
@@ -1320,7 +1351,7 @@ function logProgress(freq = 1e5) {
1320
1351
  yield v;
1321
1352
  }
1322
1353
  }
1323
- return logProgress$1;
1354
+ return logProgress;
1324
1355
  }
1325
1356
  /**
1326
1357
  * @param excludeWordsFrom - List of files to read words from.
@@ -1356,6 +1387,9 @@ function createExcludeRegexFilter(excludeWordsMatchingRegex) {
1356
1387
  function iterableToString(iter) {
1357
1388
  return Array.isArray(iter) ? iter.join("") : [...iter].join("");
1358
1389
  }
1390
+ function addToSet(set, ...sources) {
1391
+ for (const items of sources) for (const item of items) set.add(item);
1392
+ }
1359
1393
 
1360
1394
  //#endregion
1361
1395
  //#region src/build.ts
@@ -1400,4 +1434,4 @@ function normalizeRequest(buildInfo, root) {
1400
1434
 
1401
1435
  //#endregion
1402
1436
  export { reportCheckChecksumFile as a, toError as c, compressFile as d, logWithTimestamp as i, generateBTrie as l, compile as n, reportChecksumForFiles as o, setLogger as r, updateChecksumForFiles as s, build as t, OSFlags as u };
1403
- //# sourceMappingURL=build-OgMPaXPZ.mjs.map
1437
+ //# sourceMappingURL=build-BGL2P0c2.mjs.map
package/dist/index.d.mts CHANGED
@@ -1,5 +1,4 @@
1
1
  //#region src/config/config.d.ts
2
-
3
2
  interface BTrieOptions {
4
3
  /** compress the resulting file */
5
4
  compress?: boolean | undefined;
@@ -9,10 +8,14 @@ interface BTrieOptions {
9
8
  useStringTable?: boolean | undefined;
10
9
  }
11
10
  //#endregion
11
+ //#region src/compiler/logger.d.ts
12
+ type Logger = (message?: any, ...optionalParams: any[]) => void;
13
+ //#endregion
12
14
  //#region src/compiler/bTrie.d.ts
13
15
  interface GenerateBTrieOptions extends BTrieOptions {
14
16
  /** output directory */
15
17
  output?: string;
18
+ logger?: Logger;
16
19
  }
17
20
  //#endregion
18
21
  //#region src/bTrie.d.ts
package/dist/index.mjs CHANGED
@@ -1,3 +1,3 @@
1
- import { l as generateBTrie, t as build } from "./build-OgMPaXPZ.mjs";
1
+ import { l as generateBTrie, t as build } from "./build-BGL2P0c2.mjs";
2
2
 
3
3
  export { build, generateBTrie };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@cspell/cspell-tools",
3
- "version": "9.6.1",
3
+ "version": "9.6.3",
4
4
  "description": "Tools to assist with the development of cSpell",
5
5
  "publishConfig": {
6
6
  "access": "public",
@@ -64,12 +64,12 @@
64
64
  },
65
65
  "homepage": "https://github.com/streetsidesoftware/cspell/tree/main/packages/cspell-tools#readme",
66
66
  "dependencies": {
67
- "@cspell/cspell-pipe": "9.6.1",
68
- "commander": "^14.0.2",
67
+ "@cspell/cspell-pipe": "9.6.3",
68
+ "commander": "^14.0.3",
69
69
  "cosmiconfig": "9.0.0",
70
- "cspell-trie-lib": "9.6.1",
70
+ "cspell-trie-lib": "9.6.3",
71
71
  "glob": "^13.0.0",
72
- "hunspell-reader": "9.6.1",
72
+ "hunspell-reader": "9.6.3",
73
73
  "yaml": "^2.8.2"
74
74
  },
75
75
  "engines": {
@@ -80,5 +80,5 @@
80
80
  "ts-json-schema-generator": "^2.4.0"
81
81
  },
82
82
  "module": "bin.mjs",
83
- "gitHead": "666fb79096d25c53af9519cad07030e7aca597e1"
83
+ "gitHead": "500b996b6c0a6ff025c42ef98db44776f43a9e72"
84
84
  }