@intlayer/chokidar 7.1.1-canary.0 → 7.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,22 +18,23 @@ __intlayer_config_package_json = require_rolldown_runtime.__toESM(__intlayer_con
18
18
  const DEFAULT_PREPARE_INTLAYER_OPTIONS = {
19
19
  clean: false,
20
20
  format: ["cjs", "esm"],
21
- forceRun: false
21
+ cacheTimeoutMs: 1e3 * 60 * 60
22
22
  };
23
23
  const prepareIntlayer = async (configuration, options) => {
24
- const { clean, format, forceRun, onIsCached } = {
25
- ...DEFAULT_PREPARE_INTLAYER_OPTIONS,
26
- ...options ?? {}
27
- };
28
24
  (0, __intlayer_config.checkVersionsConsistency)(configuration);
29
25
  const appLogger = (0, __intlayer_config.getAppLogger)(configuration);
30
26
  const sentinelPath = (0, node_path.join)(configuration.content.cacheDir, "intlayer-prepared.lock");
31
27
  const versionCache = (0, __intlayer_config.cacheDisk)(configuration, ["intlayer-version"]);
32
28
  const intlayerCacheVersion = await versionCache.get();
33
- if (clean || intlayerCacheVersion && intlayerCacheVersion !== __intlayer_config_package_json.default.version) await require_cleanOutputDir.cleanOutputDir(configuration);
34
- await versionCache.set(__intlayer_config_package_json.default.version);
29
+ const isCorrectVersion = Boolean(intlayerCacheVersion && intlayerCacheVersion === __intlayer_config_package_json.default.version);
30
+ const { clean, format, forceRun, onIsCached, cacheTimeoutMs } = {
31
+ ...DEFAULT_PREPARE_INTLAYER_OPTIONS,
32
+ forceRun: !isCorrectVersion,
33
+ ...options ?? {}
34
+ };
35
35
  await require_utils_runOnce.runOnce(sentinelPath, async () => {
36
- const { plugins } = configuration;
36
+ if (clean || !isCorrectVersion) await require_cleanOutputDir.cleanOutputDir(configuration);
37
+ await versionCache.set(__intlayer_config_package_json.default.version);
37
38
  const preparationStartMs = Date.now();
38
39
  appLogger(["Preparing Intlayer", (0, __intlayer_config.colorize)(`(v${__intlayer_config_package_json.default.version})`, __intlayer_config.ANSIColors.GREY_DARK)]);
39
40
  await require_writeConfiguration_index.writeConfiguration(configuration);
@@ -60,7 +61,7 @@ const prepareIntlayer = async (configuration, options) => {
60
61
  appLogger(["Dictionaries built", (0, __intlayer_config.colorize)(`(${dictionariesBuiltTime - preparationStartMs}ms)`, __intlayer_config.ANSIColors.GREY_DARK)]);
61
62
  await require_createType_createModuleAugmentation.createModuleAugmentation(configuration);
62
63
  appLogger(["Module augmentation built", (0, __intlayer_config.colorize)(`(${Date.now() - dictionariesBuiltTime}ms)`, __intlayer_config.ANSIColors.GREY_DARK)], { isVerbose: true });
63
- for await (const plugin of plugins ?? []) {
64
+ for await (const plugin of configuration.plugins ?? []) {
64
65
  const { unmergedDictionaries, mergedDictionaries } = dictionariesOutput;
65
66
  await plugin.afterBuild?.({
66
67
  dictionaries: {
@@ -77,7 +78,7 @@ const prepareIntlayer = async (configuration, options) => {
77
78
  }, {
78
79
  forceRun,
79
80
  onIsCached,
80
- cacheTimeoutMs: 1e3 * 60 * 60
81
+ cacheTimeoutMs
81
82
  });
82
83
  };
83
84
 
@@ -1 +1 @@
1
- {"version":3,"file":"prepareIntlayer.cjs","names":["packageJson","cleanOutputDir","runOnce","ANSIColors","writeConfiguration","loadDictionaries","listDictionaries","buildDictionary","writeRemoteDictionary","createTypes","createDictionaryEntryPoint","createModuleAugmentation"],"sources":["../../src/prepareIntlayer.ts"],"sourcesContent":["import { join } from 'node:path';\nimport {\n ANSIColors,\n cacheDisk,\n checkVersionsConsistency,\n colorize,\n getAppLogger,\n} from '@intlayer/config';\nimport packageJson from '@intlayer/config/package.json' with { type: 'json' };\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { buildDictionary } from './buildIntlayerDictionary/buildIntlayerDictionary';\nimport { writeRemoteDictionary } from './buildIntlayerDictionary/writeRemoteDictionary';\nimport { cleanOutputDir } from './cleanOutputDir';\nimport { createDictionaryEntryPoint } from './createDictionaryEntryPoint/createDictionaryEntryPoint';\nimport { createModuleAugmentation, createTypes } from './createType/index';\nimport { listDictionaries } from './listDictionariesPath';\nimport { loadDictionaries } from './loadDictionaries/loadDictionaries';\nimport { runOnce } from './utils/runOnce';\nimport { writeConfiguration } from './writeConfiguration';\n\ntype PrepareIntlayerOptions = {\n clean?: boolean;\n format?: ('cjs' | 'esm')[];\n forceRun?: boolean;\n onIsCached?: () => void | Promise<void>;\n};\n\nconst DEFAULT_PREPARE_INTLAYER_OPTIONS = {\n clean: false,\n format: ['cjs', 'esm'],\n forceRun: false,\n} satisfies PrepareIntlayerOptions;\n\nexport const prepareIntlayer = async (\n configuration: IntlayerConfig,\n options?: PrepareIntlayerOptions\n) => {\n const { clean, format, forceRun, onIsCached } = {\n ...DEFAULT_PREPARE_INTLAYER_OPTIONS,\n ...(options ?? {}),\n };\n\n checkVersionsConsistency(configuration);\n const appLogger = getAppLogger(configuration);\n\n const sentinelPath = join(\n configuration.content.cacheDir,\n 'intlayer-prepared.lock'\n );\n\n // Clean output dir if the intlayer version has changed\n const versionCache = cacheDisk(configuration, ['intlayer-version']);\n const intlayerCacheVersion = await versionCache.get();\n if (\n clean ||\n (intlayerCacheVersion && intlayerCacheVersion !== packageJson.version)\n ) {\n await cleanOutputDir(configuration);\n }\n await versionCache.set(packageJson.version);\n\n // Skip preparation if it has already been done recently\n await runOnce(\n sentinelPath,\n async () => {\n const { plugins } = configuration;\n\n const preparationStartMs = Date.now();\n\n appLogger([\n 'Preparing Intlayer',\n colorize(`(v${packageJson.version})`, ANSIColors.GREY_DARK),\n ]);\n\n await writeConfiguration(configuration);\n\n const configurationWrittenTime = Date.now();\n\n appLogger(\n [\n 'Configuration written',\n colorize(\n `(${configurationWrittenTime - preparationStartMs}ms)`,\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n const files: string[] = await listDictionaries(configuration);\n\n const dictionaries = await loadDictionaries(files, configuration);\n\n const dictionariesLoadedTime = Date.now();\n\n appLogger(\n [\n 'Content loaded',\n colorize(\n [\n dictionaries.remoteDictionaries.length +\n dictionaries.pluginDictionaries.length >\n 0\n ? [\n `(Total: ${dictionariesLoadedTime - configurationWrittenTime}ms`,\n dictionaries.localDictionaries.length > 0\n ? `- Local: ${dictionaries.time.localDictionaries}ms`\n : '',\n dictionaries.remoteDictionaries.length > 0\n ? `- Remote: ${dictionaries.time.remoteDictionaries}ms`\n : '',\n dictionaries.pluginDictionaries.length > 0\n ? `- Plugin: ${dictionaries.time.pluginDictionaries}ms`\n : '',\n `)`,\n ].join('')\n : `(${dictionariesLoadedTime - configurationWrittenTime}ms)`,\n ].join(''),\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n // Build local dictionaries\n const dictionariesOutput = await buildDictionary(\n [\n ...dictionaries.localDictionaries,\n ...dictionaries.remoteDictionaries,\n ...dictionaries.pluginDictionaries,\n ],\n configuration,\n format,\n false\n );\n\n // Write remote dictionaries\n // Used as cache for next fetch\n await writeRemoteDictionary(\n dictionaries.remoteDictionaries,\n configuration\n );\n\n const dictionariesPaths = Object.values(\n dictionariesOutput?.mergedDictionaries ?? {}\n ).map((dictionary) => dictionary.dictionaryPath);\n\n await createTypes(dictionariesPaths, configuration);\n\n await createDictionaryEntryPoint(configuration);\n\n const dictionariesBuiltTime = Date.now();\n\n appLogger([\n 'Dictionaries built',\n colorize(\n `(${dictionariesBuiltTime - preparationStartMs}ms)`,\n ANSIColors.GREY_DARK\n ),\n ]);\n\n await createModuleAugmentation(configuration);\n\n const moduleAugmentationBuiltTime = Date.now();\n\n appLogger(\n [\n 'Module augmentation built',\n colorize(\n `(${moduleAugmentationBuiltTime - dictionariesBuiltTime}ms)`,\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n // Plugin transformation\n // Allow plugins to post-process the final build output (e.g., write back ICU JSON)\n for await (const plugin of plugins ?? []) {\n const { unmergedDictionaries, mergedDictionaries } = dictionariesOutput;\n\n await plugin.afterBuild?.({\n dictionaries: {\n unmergedDictionaries,\n mergedDictionaries,\n },\n configuration,\n });\n }\n\n const preparationElapsedMs = Date.now() - preparationStartMs;\n appLogger(\n [`Done`, colorize(`${preparationElapsedMs}ms`, ANSIColors.GREEN)],\n {\n level: 'info',\n isVerbose: true,\n }\n );\n },\n {\n forceRun,\n onIsCached,\n cacheTimeoutMs: 1000 * 60 * 60, // 1 hour\n }\n );\n};\n"],"mappings":";;;;;;;;;;;;;;;;;AA2BA,MAAM,mCAAmC;CACvC,OAAO;CACP,QAAQ,CAAC,OAAO,MAAM;CACtB,UAAU;CACX;AAED,MAAa,kBAAkB,OAC7B,eACA,YACG;CACH,MAAM,EAAE,OAAO,QAAQ,UAAU,eAAe;EAC9C,GAAG;EACH,GAAI,WAAW,EAAE;EAClB;AAED,iDAAyB,cAAc;CACvC,MAAM,gDAAyB,cAAc;CAE7C,MAAM,mCACJ,cAAc,QAAQ,UACtB,yBACD;CAGD,MAAM,gDAAyB,eAAe,CAAC,mBAAmB,CAAC;CACnE,MAAM,uBAAuB,MAAM,aAAa,KAAK;AACrD,KACE,SACC,wBAAwB,yBAAyBA,uCAAY,QAE9D,OAAMC,sCAAe,cAAc;AAErC,OAAM,aAAa,IAAID,uCAAY,QAAQ;AAG3C,OAAME,8BACJ,cACA,YAAY;EACV,MAAM,EAAE,YAAY;EAEpB,MAAM,qBAAqB,KAAK,KAAK;AAErC,YAAU,CACR,sDACS,KAAKF,uCAAY,QAAQ,IAAIG,6BAAW,UAAU,CAC5D,CAAC;AAEF,QAAMC,oDAAmB,cAAc;EAEvC,MAAM,2BAA2B,KAAK,KAAK;AAE3C,YACE,CACE,yDAEE,IAAI,2BAA2B,mBAAmB,MAClDD,6BAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;EAID,MAAM,eAAe,MAAME,2DAFH,MAAMC,8CAAiB,cAAc,EAEV,cAAc;EAEjE,MAAM,yBAAyB,KAAK,KAAK;AAEzC,YACE,CACE,kDAEE,CACE,aAAa,mBAAmB,SAC9B,aAAa,mBAAmB,SAClC,IACI;GACE,WAAW,yBAAyB,yBAAyB;GAC7D,aAAa,kBAAkB,SAAS,IACpC,YAAY,aAAa,KAAK,kBAAkB,MAChD;GACJ,aAAa,mBAAmB,SAAS,IACrC,aAAa,aAAa,KAAK,mBAAmB,MAClD;GACJ,aAAa,mBAAmB,SAAS,IACrC,aAAa,aAAa,KAAK,mBAAmB,MAClD;GACJ;GACD,CAAC,KAAK,GAAG,GACV,IAAI,yBAAyB,yBAAyB,KAC3D,CAAC,KAAK,GAAG,EACVH,6BAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;EAGD,MAAM,qBAAqB,MAAMI,wEAC/B;GACE,GAAG,aAAa;GAChB,GAAG,aAAa;GAChB,GAAG,aAAa;GACjB,EACD,eACA,QACA,MACD;AAID,QAAMC,4EACJ,aAAa,oBACb,cACD;AAMD,QAAMC,0CAJoB,OAAO,OAC/B,oBAAoB,sBAAsB,EAAE,CAC7C,CAAC,KAAK,eAAe,WAAW,eAAe,EAEX,cAAc;AAEnD,QAAMC,yFAA2B,cAAc;EAE/C,MAAM,wBAAwB,KAAK,KAAK;AAExC,YAAU,CACR,sDAEE,IAAI,wBAAwB,mBAAmB,MAC/CP,6BAAW,UACZ,CACF,CAAC;AAEF,QAAMQ,qEAAyB,cAAc;AAI7C,YACE,CACE,6DAEE,IAN8B,KAAK,KAAK,GAMN,sBAAsB,MACxDR,6BAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;AAID,aAAW,MAAM,UAAU,WAAW,EAAE,EAAE;GACxC,MAAM,EAAE,sBAAsB,uBAAuB;AAErD,SAAM,OAAO,aAAa;IACxB,cAAc;KACZ;KACA;KACD;IACD;IACD,CAAC;;AAIJ,YACE,CAAC,wCAAiB,GAFS,KAAK,KAAK,GAAG,mBAEE,KAAKA,6BAAW,MAAM,CAAC,EACjE;GACE,OAAO;GACP,WAAW;GACZ,CACF;IAEH;EACE;EACA;EACA,gBAAgB,MAAO,KAAK;EAC7B,CACF"}
1
+ {"version":3,"file":"prepareIntlayer.cjs","names":["packageJson","runOnce","cleanOutputDir","ANSIColors","writeConfiguration","loadDictionaries","listDictionaries","buildDictionary","writeRemoteDictionary","createTypes","createDictionaryEntryPoint","createModuleAugmentation"],"sources":["../../src/prepareIntlayer.ts"],"sourcesContent":["import { join } from 'node:path';\nimport {\n ANSIColors,\n cacheDisk,\n checkVersionsConsistency,\n colorize,\n getAppLogger,\n} from '@intlayer/config';\nimport packageJson from '@intlayer/config/package.json' with { type: 'json' };\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { buildDictionary } from './buildIntlayerDictionary/buildIntlayerDictionary';\nimport { writeRemoteDictionary } from './buildIntlayerDictionary/writeRemoteDictionary';\nimport { cleanOutputDir } from './cleanOutputDir';\nimport { createDictionaryEntryPoint } from './createDictionaryEntryPoint/createDictionaryEntryPoint';\nimport { createModuleAugmentation, createTypes } from './createType/index';\nimport { listDictionaries } from './listDictionariesPath';\nimport { loadDictionaries } from './loadDictionaries/loadDictionaries';\nimport { runOnce } from './utils/runOnce';\nimport { writeConfiguration } from './writeConfiguration';\n\ntype PrepareIntlayerOptions = {\n clean?: boolean;\n format?: ('cjs' | 'esm')[];\n forceRun?: boolean;\n cacheTimeoutMs?: number;\n onIsCached?: () => void | Promise<void>;\n};\n\nconst DEFAULT_PREPARE_INTLAYER_OPTIONS = {\n clean: false,\n format: ['cjs', 'esm'],\n cacheTimeoutMs: 1000 * 60 * 60, // 1 hour\n} satisfies PrepareIntlayerOptions;\n\nexport const prepareIntlayer = async (\n configuration: IntlayerConfig,\n options?: PrepareIntlayerOptions\n) => {\n checkVersionsConsistency(configuration);\n const appLogger = getAppLogger(configuration);\n\n const sentinelPath = join(\n configuration.content.cacheDir,\n 'intlayer-prepared.lock'\n );\n // Clean output dir if the intlayer version has changed\n const versionCache = cacheDisk(configuration, ['intlayer-version']);\n const intlayerCacheVersion = await versionCache.get();\n const isCorrectVersion = Boolean(\n intlayerCacheVersion && intlayerCacheVersion === packageJson.version\n );\n\n const { clean, format, forceRun, onIsCached, cacheTimeoutMs } = {\n ...DEFAULT_PREPARE_INTLAYER_OPTIONS,\n forceRun: !isCorrectVersion,\n ...(options ?? {}),\n };\n\n // Skip preparation if it has already been done recently\n await runOnce(\n sentinelPath,\n async () => {\n if (clean || !isCorrectVersion) {\n await cleanOutputDir(configuration);\n }\n\n await versionCache.set(packageJson.version);\n\n const preparationStartMs = Date.now();\n\n appLogger([\n 'Preparing Intlayer',\n colorize(`(v${packageJson.version})`, ANSIColors.GREY_DARK),\n ]);\n\n await writeConfiguration(configuration);\n\n const configurationWrittenTime = Date.now();\n\n appLogger(\n [\n 'Configuration written',\n colorize(\n `(${configurationWrittenTime - preparationStartMs}ms)`,\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n const files: string[] = await listDictionaries(configuration);\n\n const dictionaries = await loadDictionaries(files, configuration);\n\n const dictionariesLoadedTime = Date.now();\n\n appLogger(\n [\n 'Content loaded',\n colorize(\n [\n dictionaries.remoteDictionaries.length +\n dictionaries.pluginDictionaries.length >\n 0\n ? [\n `(Total: ${dictionariesLoadedTime - configurationWrittenTime}ms`,\n dictionaries.localDictionaries.length > 0\n ? `- Local: ${dictionaries.time.localDictionaries}ms`\n : '',\n dictionaries.remoteDictionaries.length > 0\n ? `- Remote: ${dictionaries.time.remoteDictionaries}ms`\n : '',\n dictionaries.pluginDictionaries.length > 0\n ? `- Plugin: ${dictionaries.time.pluginDictionaries}ms`\n : '',\n `)`,\n ].join('')\n : `(${dictionariesLoadedTime - configurationWrittenTime}ms)`,\n ].join(''),\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n // Build local dictionaries\n const dictionariesOutput = await buildDictionary(\n [\n ...dictionaries.localDictionaries,\n ...dictionaries.remoteDictionaries,\n ...dictionaries.pluginDictionaries,\n ],\n configuration,\n format,\n false\n );\n\n // Write remote dictionaries\n // Used as cache for next fetch\n await writeRemoteDictionary(\n dictionaries.remoteDictionaries,\n configuration\n );\n\n const dictionariesPaths = Object.values(\n dictionariesOutput?.mergedDictionaries ?? {}\n ).map((dictionary) => dictionary.dictionaryPath);\n\n await createTypes(dictionariesPaths, configuration);\n\n await createDictionaryEntryPoint(configuration);\n\n const dictionariesBuiltTime = Date.now();\n\n appLogger([\n 'Dictionaries built',\n colorize(\n `(${dictionariesBuiltTime - preparationStartMs}ms)`,\n ANSIColors.GREY_DARK\n ),\n ]);\n\n await createModuleAugmentation(configuration);\n\n const moduleAugmentationBuiltTime = Date.now();\n\n appLogger(\n [\n 'Module augmentation built',\n colorize(\n `(${moduleAugmentationBuiltTime - dictionariesBuiltTime}ms)`,\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n // Plugin transformation\n // Allow plugins to post-process the final build output (e.g., write back ICU JSON)\n for await (const plugin of configuration.plugins ?? []) {\n const { unmergedDictionaries, mergedDictionaries } = dictionariesOutput;\n\n await plugin.afterBuild?.({\n dictionaries: {\n unmergedDictionaries,\n mergedDictionaries,\n },\n configuration,\n });\n }\n\n const preparationElapsedMs = Date.now() - preparationStartMs;\n appLogger(\n [`Done`, colorize(`${preparationElapsedMs}ms`, ANSIColors.GREEN)],\n {\n level: 'info',\n isVerbose: true,\n }\n );\n },\n {\n forceRun,\n onIsCached,\n cacheTimeoutMs,\n }\n );\n};\n"],"mappings":";;;;;;;;;;;;;;;;;AA4BA,MAAM,mCAAmC;CACvC,OAAO;CACP,QAAQ,CAAC,OAAO,MAAM;CACtB,gBAAgB,MAAO,KAAK;CAC7B;AAED,MAAa,kBAAkB,OAC7B,eACA,YACG;AACH,iDAAyB,cAAc;CACvC,MAAM,gDAAyB,cAAc;CAE7C,MAAM,mCACJ,cAAc,QAAQ,UACtB,yBACD;CAED,MAAM,gDAAyB,eAAe,CAAC,mBAAmB,CAAC;CACnE,MAAM,uBAAuB,MAAM,aAAa,KAAK;CACrD,MAAM,mBAAmB,QACvB,wBAAwB,yBAAyBA,uCAAY,QAC9D;CAED,MAAM,EAAE,OAAO,QAAQ,UAAU,YAAY,mBAAmB;EAC9D,GAAG;EACH,UAAU,CAAC;EACX,GAAI,WAAW,EAAE;EAClB;AAGD,OAAMC,8BACJ,cACA,YAAY;AACV,MAAI,SAAS,CAAC,iBACZ,OAAMC,sCAAe,cAAc;AAGrC,QAAM,aAAa,IAAIF,uCAAY,QAAQ;EAE3C,MAAM,qBAAqB,KAAK,KAAK;AAErC,YAAU,CACR,sDACS,KAAKA,uCAAY,QAAQ,IAAIG,6BAAW,UAAU,CAC5D,CAAC;AAEF,QAAMC,oDAAmB,cAAc;EAEvC,MAAM,2BAA2B,KAAK,KAAK;AAE3C,YACE,CACE,yDAEE,IAAI,2BAA2B,mBAAmB,MAClDD,6BAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;EAID,MAAM,eAAe,MAAME,2DAFH,MAAMC,8CAAiB,cAAc,EAEV,cAAc;EAEjE,MAAM,yBAAyB,KAAK,KAAK;AAEzC,YACE,CACE,kDAEE,CACE,aAAa,mBAAmB,SAC9B,aAAa,mBAAmB,SAClC,IACI;GACE,WAAW,yBAAyB,yBAAyB;GAC7D,aAAa,kBAAkB,SAAS,IACpC,YAAY,aAAa,KAAK,kBAAkB,MAChD;GACJ,aAAa,mBAAmB,SAAS,IACrC,aAAa,aAAa,KAAK,mBAAmB,MAClD;GACJ,aAAa,mBAAmB,SAAS,IACrC,aAAa,aAAa,KAAK,mBAAmB,MAClD;GACJ;GACD,CAAC,KAAK,GAAG,GACV,IAAI,yBAAyB,yBAAyB,KAC3D,CAAC,KAAK,GAAG,EACVH,6BAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;EAGD,MAAM,qBAAqB,MAAMI,wEAC/B;GACE,GAAG,aAAa;GAChB,GAAG,aAAa;GAChB,GAAG,aAAa;GACjB,EACD,eACA,QACA,MACD;AAID,QAAMC,4EACJ,aAAa,oBACb,cACD;AAMD,QAAMC,0CAJoB,OAAO,OAC/B,oBAAoB,sBAAsB,EAAE,CAC7C,CAAC,KAAK,eAAe,WAAW,eAAe,EAEX,cAAc;AAEnD,QAAMC,yFAA2B,cAAc;EAE/C,MAAM,wBAAwB,KAAK,KAAK;AAExC,YAAU,CACR,sDAEE,IAAI,wBAAwB,mBAAmB,MAC/CP,6BAAW,UACZ,CACF,CAAC;AAEF,QAAMQ,qEAAyB,cAAc;AAI7C,YACE,CACE,6DAEE,IAN8B,KAAK,KAAK,GAMN,sBAAsB,MACxDR,6BAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;AAID,aAAW,MAAM,UAAU,cAAc,WAAW,EAAE,EAAE;GACtD,MAAM,EAAE,sBAAsB,uBAAuB;AAErD,SAAM,OAAO,aAAa;IACxB,cAAc;KACZ;KACA;KACD;IACD;IACD,CAAC;;AAIJ,YACE,CAAC,wCAAiB,GAFS,KAAK,KAAK,GAAG,mBAEE,KAAKA,6BAAW,MAAM,CAAC,EACjE;GACE,OAAO;GACP,WAAW;GACZ,CACF;IAEH;EACE;EACA;EACA;EACD,CACF"}
@@ -6,6 +6,19 @@ __intlayer_core_package_json = require_rolldown_runtime.__toESM(__intlayer_core_
6
6
 
7
7
  //#region src/utils/runOnce.ts
8
8
  const DEFAULT_RUN_ONCE_OPTIONS = { cacheTimeoutMs: 60 * 1e3 };
9
+ const writeSentinelFile = async (sentinelFilePath, currentTimestamp) => {
10
+ try {
11
+ await (0, node_fs_promises.mkdir)((0, node_path.dirname)(sentinelFilePath), { recursive: true });
12
+ const data = {
13
+ version: __intlayer_core_package_json.default.version,
14
+ timestamp: currentTimestamp
15
+ };
16
+ await (0, node_fs_promises.writeFile)(sentinelFilePath, JSON.stringify(data), { flag: "wx" });
17
+ } catch (err) {
18
+ if (err.code === "EEXIST") return;
19
+ throw err;
20
+ }
21
+ };
9
22
  /**
10
23
  * Ensures a callback function runs only once within a specified time window across multiple processes.
11
24
  * Uses a sentinel file to coordinate execution and prevent duplicate work.
@@ -61,18 +74,13 @@ const runOnce = async (sentinelFilePath, callback, options) => {
61
74
  } catch (err) {
62
75
  if (err.code === "ENOENT") {} else throw err;
63
76
  }
77
+ writeSentinelFile(sentinelFilePath, currentTimestamp);
64
78
  try {
65
- await (0, node_fs_promises.mkdir)((0, node_path.dirname)(sentinelFilePath), { recursive: true });
66
- const data = {
67
- version: __intlayer_core_package_json.default.version,
68
- timestamp: currentTimestamp
69
- };
70
- await (0, node_fs_promises.writeFile)(sentinelFilePath, JSON.stringify(data), { flag: "wx" });
71
- } catch (err) {
72
- if (err.code === "EEXIST") return;
73
- throw err;
79
+ await callback();
80
+ writeSentinelFile(sentinelFilePath, currentTimestamp);
81
+ } catch {
82
+ await (0, node_fs_promises.unlink)(sentinelFilePath);
74
83
  }
75
- await callback();
76
84
  };
77
85
 
78
86
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"runOnce.cjs","names":["cachedVersion: string | undefined","packageJson","err: any","data: SentinelData"],"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":["import { mkdir, readFile, stat, unlink, writeFile } from 'node:fs/promises';\nimport { dirname } from 'node:path';\nimport packageJson from '@intlayer/core/package.json' with { type: 'json' };\n\ntype RunOnceOptions = {\n /**\n * The function to execute when the sentinel is not found or is older than the cache timeout.\n */\n onIsCached?: () => void | Promise<void>;\n /**\n * The time window in milliseconds during which the sentinel is considered valid.\n *\n * @default 60000 = 1 minute\n */\n cacheTimeoutMs?: number;\n /**\n * If true, the callback will always run. If undefined, the callback will run only if the sentinel is older than the cache timeout.\n *\n * @default false\n */\n forceRun?: boolean;\n};\n\nconst DEFAULT_RUN_ONCE_OPTIONS = {\n cacheTimeoutMs: 60 * 1000, // 1 minute in milliseconds,\n} satisfies RunOnceOptions;\n\ntype SentinelData = {\n version: string;\n timestamp: number;\n};\n\n/**\n * Ensures a callback function runs only once within a specified time window across multiple processes.\n * Uses a sentinel file to coordinate execution and prevent duplicate work.\n *\n * @param sentinelFilePath - Path to the sentinel file used for coordination\n * @param callback - The function to execute (should be async)\n * @param options - The options for the runOnce function\n *\n * @example\n * ```typescript\n * await runPrepareIntlayerOnce(\n * '/tmp/intlayer-sentinel',\n * async () => {\n * // Your initialization logic here\n * await prepareIntlayer();\n * },\n * 30 * 1000 // 30 seconds cache\n * );\n * ```\n *\n * @throws {Error} When there are unexpected filesystem errors\n */\nexport const runOnce = async (\n sentinelFilePath: string,\n callback: () => void | Promise<void>,\n options?: RunOnceOptions\n) => {\n const { onIsCached, cacheTimeoutMs, forceRun } = {\n ...DEFAULT_RUN_ONCE_OPTIONS,\n ...(options ?? {}),\n };\n const currentTimestamp = Date.now();\n\n try {\n // Check if sentinel file exists and get its stats\n const sentinelStats = await stat(sentinelFilePath);\n const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();\n\n // Determine if we should rebuild based on cache age, force flag, or version mismatch\n let shouldRebuild = Boolean(forceRun) || sentinelAge > cacheTimeoutMs!;\n\n if (!shouldRebuild) {\n try {\n const raw = await readFile(sentinelFilePath, 'utf8');\n let cachedVersion: string | undefined;\n try {\n const parsed = JSON.parse(raw) as Partial<SentinelData>;\n cachedVersion = parsed.version;\n } catch {\n // Legacy format (timestamp only). Force a rebuild once to write versioned sentinel.\n cachedVersion = undefined;\n }\n\n if (!cachedVersion || cachedVersion !== packageJson.version) {\n shouldRebuild = true;\n }\n } catch {\n // If we cannot read the file, err on the safe side and rebuild\n shouldRebuild = true;\n }\n }\n\n if (shouldRebuild) {\n try {\n await unlink(sentinelFilePath);\n } catch (err: any) {\n if (err.code !== 'ENOENT') throw err;\n }\n // Fall through to create new sentinel and rebuild\n } else {\n await onIsCached?.();\n // Sentinel is recent and versions match, no need to rebuild\n return;\n }\n } catch (err: any) {\n if (err.code === 'ENOENT') {\n // File doesn't exist, continue to create it\n } else {\n throw err; // unexpected FS error\n }\n }\n\n try {\n // Ensure the directory exists before writing the file\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n\n // O_EXCL ensures only the *first* process can create the file\n const data: SentinelData = {\n version: packageJson.version,\n timestamp: currentTimestamp,\n };\n await writeFile(sentinelFilePath, JSON.stringify(data), { flag: 'wx' });\n } catch (err: any) {\n if (err.code === 'EEXIST') {\n // Another process already created it we're done\n return;\n }\n throw err; // unexpected FS error\n }\n\n await callback();\n};\n"],"mappings":";;;;;;;AAuBA,MAAM,2BAA2B,EAC/B,gBAAgB,KAAK,KACtB;;;;;;;;;;;;;;;;;;;;;;;AA6BD,MAAa,UAAU,OACrB,kBACA,UACA,YACG;CACH,MAAM,EAAE,YAAY,gBAAgB,aAAa;EAC/C,GAAG;EACH,GAAI,WAAW,EAAE;EAClB;CACD,MAAM,mBAAmB,KAAK,KAAK;AAEnC,KAAI;EAGF,MAAM,cAAc,oBADE,iCAAW,iBAAiB,EACG,MAAM,SAAS;EAGpE,IAAI,gBAAgB,QAAQ,SAAS,IAAI,cAAc;AAEvD,MAAI,CAAC,cACH,KAAI;GACF,MAAM,MAAM,qCAAe,kBAAkB,OAAO;GACpD,IAAIA;AACJ,OAAI;AAEF,oBADe,KAAK,MAAM,IAAI,CACP;WACjB;AAEN,oBAAgB;;AAGlB,OAAI,CAAC,iBAAiB,kBAAkBC,qCAAY,QAClD,iBAAgB;UAEZ;AAEN,mBAAgB;;AAIpB,MAAI,cACF,KAAI;AACF,sCAAa,iBAAiB;WACvBC,KAAU;AACjB,OAAI,IAAI,SAAS,SAAU,OAAM;;OAG9B;AACL,SAAM,cAAc;AAEpB;;UAEKA,KAAU;AACjB,MAAI,IAAI,SAAS,UAAU,OAGzB,OAAM;;AAIV,KAAI;AAEF,2DAAoB,iBAAiB,EAAE,EAAE,WAAW,MAAM,CAAC;EAG3D,MAAMC,OAAqB;GACzB,SAASF,qCAAY;GACrB,WAAW;GACZ;AACD,wCAAgB,kBAAkB,KAAK,UAAU,KAAK,EAAE,EAAE,MAAM,MAAM,CAAC;UAChEC,KAAU;AACjB,MAAI,IAAI,SAAS,SAEf;AAEF,QAAM;;AAGR,OAAM,UAAU"}
1
+ {"version":3,"file":"runOnce.cjs","names":["data: SentinelData","packageJson","err: any","cachedVersion: string | undefined"],"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":["import { mkdir, readFile, stat, unlink, writeFile } from 'node:fs/promises';\nimport { dirname } from 'node:path';\nimport packageJson from '@intlayer/core/package.json' with { type: 'json' };\n\ntype RunOnceOptions = {\n /**\n * The function to execute when the sentinel is not found or is older than the cache timeout.\n */\n onIsCached?: () => void | Promise<void>;\n /**\n * The time window in milliseconds during which the sentinel is considered valid.\n *\n * @default 60000 = 1 minute\n */\n cacheTimeoutMs?: number;\n /**\n * If true, the callback will always run. If undefined, the callback will run only if the sentinel is older than the cache timeout.\n *\n * @default false\n */\n forceRun?: boolean;\n};\n\nconst DEFAULT_RUN_ONCE_OPTIONS = {\n cacheTimeoutMs: 60 * 1000, // 1 minute in milliseconds,\n} satisfies RunOnceOptions;\n\ntype SentinelData = {\n version: string;\n timestamp: number;\n};\n\nconst writeSentinelFile = async (\n sentinelFilePath: string,\n currentTimestamp: number\n) => {\n try {\n // Ensure the directory exists before writing the file\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n\n // O_EXCL ensures only the *first* process can create the file\n const data: SentinelData = {\n version: packageJson.version,\n timestamp: currentTimestamp,\n };\n await writeFile(sentinelFilePath, JSON.stringify(data), { flag: 'wx' });\n } catch (err: any) {\n if (err.code === 'EEXIST') {\n // Another process already created it → we're done\n return;\n }\n throw err; // unexpected FS error\n }\n};\n\n/**\n * Ensures a callback function runs only once within a specified time window across multiple processes.\n * Uses a sentinel file to coordinate execution and prevent duplicate work.\n *\n * @param sentinelFilePath - Path to the sentinel file used for coordination\n * @param callback - The function to execute (should be async)\n * @param options - The options for the runOnce function\n *\n * @example\n * ```typescript\n * await runPrepareIntlayerOnce(\n * '/tmp/intlayer-sentinel',\n * async () => {\n * // Your initialization logic here\n * await prepareIntlayer();\n * },\n * 30 * 1000 // 30 seconds cache\n * );\n * ```\n *\n * @throws {Error} When there are unexpected filesystem errors\n */\nexport const runOnce = async (\n sentinelFilePath: string,\n callback: () => void | Promise<void>,\n options?: RunOnceOptions\n) => {\n const { onIsCached, cacheTimeoutMs, forceRun } = {\n ...DEFAULT_RUN_ONCE_OPTIONS,\n ...(options ?? {}),\n };\n const currentTimestamp = Date.now();\n\n try {\n // Check if sentinel file exists and get its stats\n const sentinelStats = await stat(sentinelFilePath);\n const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();\n\n // Determine if we should rebuild based on cache age, force flag, or version mismatch\n let shouldRebuild = Boolean(forceRun) || sentinelAge > cacheTimeoutMs!;\n\n if (!shouldRebuild) {\n try {\n const raw = await readFile(sentinelFilePath, 'utf8');\n let cachedVersion: string | undefined;\n try {\n const parsed = JSON.parse(raw) as Partial<SentinelData>;\n cachedVersion = parsed.version;\n } catch {\n // Legacy format (timestamp only). Force a rebuild once to write versioned sentinel.\n cachedVersion = undefined;\n }\n\n if (!cachedVersion || cachedVersion !== packageJson.version) {\n shouldRebuild = true;\n }\n } catch {\n // If we cannot read the file, err on the safe side and rebuild\n shouldRebuild = true;\n }\n }\n\n if (shouldRebuild) {\n try {\n await unlink(sentinelFilePath);\n } catch (err: any) {\n if (err.code !== 'ENOENT') throw err;\n }\n // Fall through to create new sentinel and rebuild\n } else {\n await onIsCached?.();\n // Sentinel is recent and versions match, no need to rebuild\n return;\n }\n } catch (err: any) {\n if (err.code === 'ENOENT') {\n // File doesn't exist, continue to create it\n } else {\n throw err; // unexpected FS error\n }\n }\n\n // Write sentinel file before to block parallel processes\n writeSentinelFile(sentinelFilePath, currentTimestamp);\n\n try {\n await callback();\n\n // Write sentinel file after to ensure the first one has not been removed with cleanOutputDir\n writeSentinelFile(sentinelFilePath, currentTimestamp);\n } catch {\n await unlink(sentinelFilePath); // Remove sentinel file if an error occurs\n }\n};\n"],"mappings":";;;;;;;AAuBA,MAAM,2BAA2B,EAC/B,gBAAgB,KAAK,KACtB;AAOD,MAAM,oBAAoB,OACxB,kBACA,qBACG;AACH,KAAI;AAEF,2DAAoB,iBAAiB,EAAE,EAAE,WAAW,MAAM,CAAC;EAG3D,MAAMA,OAAqB;GACzB,SAASC,qCAAY;GACrB,WAAW;GACZ;AACD,wCAAgB,kBAAkB,KAAK,UAAU,KAAK,EAAE,EAAE,MAAM,MAAM,CAAC;UAChEC,KAAU;AACjB,MAAI,IAAI,SAAS,SAEf;AAEF,QAAM;;;;;;;;;;;;;;;;;;;;;;;;;AA0BV,MAAa,UAAU,OACrB,kBACA,UACA,YACG;CACH,MAAM,EAAE,YAAY,gBAAgB,aAAa;EAC/C,GAAG;EACH,GAAI,WAAW,EAAE;EAClB;CACD,MAAM,mBAAmB,KAAK,KAAK;AAEnC,KAAI;EAGF,MAAM,cAAc,oBADE,iCAAW,iBAAiB,EACG,MAAM,SAAS;EAGpE,IAAI,gBAAgB,QAAQ,SAAS,IAAI,cAAc;AAEvD,MAAI,CAAC,cACH,KAAI;GACF,MAAM,MAAM,qCAAe,kBAAkB,OAAO;GACpD,IAAIC;AACJ,OAAI;AAEF,oBADe,KAAK,MAAM,IAAI,CACP;WACjB;AAEN,oBAAgB;;AAGlB,OAAI,CAAC,iBAAiB,kBAAkBF,qCAAY,QAClD,iBAAgB;UAEZ;AAEN,mBAAgB;;AAIpB,MAAI,cACF,KAAI;AACF,sCAAa,iBAAiB;WACvBC,KAAU;AACjB,OAAI,IAAI,SAAS,SAAU,OAAM;;OAG9B;AACL,SAAM,cAAc;AAEpB;;UAEKA,KAAU;AACjB,MAAI,IAAI,SAAS,UAAU,OAGzB,OAAM;;AAKV,mBAAkB,kBAAkB,iBAAiB;AAErD,KAAI;AACF,QAAM,UAAU;AAGhB,oBAAkB,kBAAkB,iBAAiB;SAC/C;AACN,qCAAa,iBAAiB"}
@@ -16,22 +16,23 @@ import packageJson from "@intlayer/config/package.json" with { type: "json" };
16
16
  const DEFAULT_PREPARE_INTLAYER_OPTIONS = {
17
17
  clean: false,
18
18
  format: ["cjs", "esm"],
19
- forceRun: false
19
+ cacheTimeoutMs: 1e3 * 60 * 60
20
20
  };
21
21
  const prepareIntlayer = async (configuration, options) => {
22
- const { clean, format, forceRun, onIsCached } = {
23
- ...DEFAULT_PREPARE_INTLAYER_OPTIONS,
24
- ...options ?? {}
25
- };
26
22
  checkVersionsConsistency(configuration);
27
23
  const appLogger = getAppLogger(configuration);
28
24
  const sentinelPath = join(configuration.content.cacheDir, "intlayer-prepared.lock");
29
25
  const versionCache = cacheDisk(configuration, ["intlayer-version"]);
30
26
  const intlayerCacheVersion = await versionCache.get();
31
- if (clean || intlayerCacheVersion && intlayerCacheVersion !== packageJson.version) await cleanOutputDir(configuration);
32
- await versionCache.set(packageJson.version);
27
+ const isCorrectVersion = Boolean(intlayerCacheVersion && intlayerCacheVersion === packageJson.version);
28
+ const { clean, format, forceRun, onIsCached, cacheTimeoutMs } = {
29
+ ...DEFAULT_PREPARE_INTLAYER_OPTIONS,
30
+ forceRun: !isCorrectVersion,
31
+ ...options ?? {}
32
+ };
33
33
  await runOnce(sentinelPath, async () => {
34
- const { plugins } = configuration;
34
+ if (clean || !isCorrectVersion) await cleanOutputDir(configuration);
35
+ await versionCache.set(packageJson.version);
35
36
  const preparationStartMs = Date.now();
36
37
  appLogger(["Preparing Intlayer", colorize(`(v${packageJson.version})`, ANSIColors.GREY_DARK)]);
37
38
  await writeConfiguration(configuration);
@@ -58,7 +59,7 @@ const prepareIntlayer = async (configuration, options) => {
58
59
  appLogger(["Dictionaries built", colorize(`(${dictionariesBuiltTime - preparationStartMs}ms)`, ANSIColors.GREY_DARK)]);
59
60
  await createModuleAugmentation(configuration);
60
61
  appLogger(["Module augmentation built", colorize(`(${Date.now() - dictionariesBuiltTime}ms)`, ANSIColors.GREY_DARK)], { isVerbose: true });
61
- for await (const plugin of plugins ?? []) {
62
+ for await (const plugin of configuration.plugins ?? []) {
62
63
  const { unmergedDictionaries, mergedDictionaries } = dictionariesOutput;
63
64
  await plugin.afterBuild?.({
64
65
  dictionaries: {
@@ -75,7 +76,7 @@ const prepareIntlayer = async (configuration, options) => {
75
76
  }, {
76
77
  forceRun,
77
78
  onIsCached,
78
- cacheTimeoutMs: 1e3 * 60 * 60
79
+ cacheTimeoutMs
79
80
  });
80
81
  };
81
82
 
@@ -1 +1 @@
1
- {"version":3,"file":"prepareIntlayer.mjs","names":[],"sources":["../../src/prepareIntlayer.ts"],"sourcesContent":["import { join } from 'node:path';\nimport {\n ANSIColors,\n cacheDisk,\n checkVersionsConsistency,\n colorize,\n getAppLogger,\n} from '@intlayer/config';\nimport packageJson from '@intlayer/config/package.json' with { type: 'json' };\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { buildDictionary } from './buildIntlayerDictionary/buildIntlayerDictionary';\nimport { writeRemoteDictionary } from './buildIntlayerDictionary/writeRemoteDictionary';\nimport { cleanOutputDir } from './cleanOutputDir';\nimport { createDictionaryEntryPoint } from './createDictionaryEntryPoint/createDictionaryEntryPoint';\nimport { createModuleAugmentation, createTypes } from './createType/index';\nimport { listDictionaries } from './listDictionariesPath';\nimport { loadDictionaries } from './loadDictionaries/loadDictionaries';\nimport { runOnce } from './utils/runOnce';\nimport { writeConfiguration } from './writeConfiguration';\n\ntype PrepareIntlayerOptions = {\n clean?: boolean;\n format?: ('cjs' | 'esm')[];\n forceRun?: boolean;\n onIsCached?: () => void | Promise<void>;\n};\n\nconst DEFAULT_PREPARE_INTLAYER_OPTIONS = {\n clean: false,\n format: ['cjs', 'esm'],\n forceRun: false,\n} satisfies PrepareIntlayerOptions;\n\nexport const prepareIntlayer = async (\n configuration: IntlayerConfig,\n options?: PrepareIntlayerOptions\n) => {\n const { clean, format, forceRun, onIsCached } = {\n ...DEFAULT_PREPARE_INTLAYER_OPTIONS,\n ...(options ?? {}),\n };\n\n checkVersionsConsistency(configuration);\n const appLogger = getAppLogger(configuration);\n\n const sentinelPath = join(\n configuration.content.cacheDir,\n 'intlayer-prepared.lock'\n );\n\n // Clean output dir if the intlayer version has changed\n const versionCache = cacheDisk(configuration, ['intlayer-version']);\n const intlayerCacheVersion = await versionCache.get();\n if (\n clean ||\n (intlayerCacheVersion && intlayerCacheVersion !== packageJson.version)\n ) {\n await cleanOutputDir(configuration);\n }\n await versionCache.set(packageJson.version);\n\n // Skip preparation if it has already been done recently\n await runOnce(\n sentinelPath,\n async () => {\n const { plugins } = configuration;\n\n const preparationStartMs = Date.now();\n\n appLogger([\n 'Preparing Intlayer',\n colorize(`(v${packageJson.version})`, ANSIColors.GREY_DARK),\n ]);\n\n await writeConfiguration(configuration);\n\n const configurationWrittenTime = Date.now();\n\n appLogger(\n [\n 'Configuration written',\n colorize(\n `(${configurationWrittenTime - preparationStartMs}ms)`,\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n const files: string[] = await listDictionaries(configuration);\n\n const dictionaries = await loadDictionaries(files, configuration);\n\n const dictionariesLoadedTime = Date.now();\n\n appLogger(\n [\n 'Content loaded',\n colorize(\n [\n dictionaries.remoteDictionaries.length +\n dictionaries.pluginDictionaries.length >\n 0\n ? [\n `(Total: ${dictionariesLoadedTime - configurationWrittenTime}ms`,\n dictionaries.localDictionaries.length > 0\n ? `- Local: ${dictionaries.time.localDictionaries}ms`\n : '',\n dictionaries.remoteDictionaries.length > 0\n ? `- Remote: ${dictionaries.time.remoteDictionaries}ms`\n : '',\n dictionaries.pluginDictionaries.length > 0\n ? `- Plugin: ${dictionaries.time.pluginDictionaries}ms`\n : '',\n `)`,\n ].join('')\n : `(${dictionariesLoadedTime - configurationWrittenTime}ms)`,\n ].join(''),\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n // Build local dictionaries\n const dictionariesOutput = await buildDictionary(\n [\n ...dictionaries.localDictionaries,\n ...dictionaries.remoteDictionaries,\n ...dictionaries.pluginDictionaries,\n ],\n configuration,\n format,\n false\n );\n\n // Write remote dictionaries\n // Used as cache for next fetch\n await writeRemoteDictionary(\n dictionaries.remoteDictionaries,\n configuration\n );\n\n const dictionariesPaths = Object.values(\n dictionariesOutput?.mergedDictionaries ?? {}\n ).map((dictionary) => dictionary.dictionaryPath);\n\n await createTypes(dictionariesPaths, configuration);\n\n await createDictionaryEntryPoint(configuration);\n\n const dictionariesBuiltTime = Date.now();\n\n appLogger([\n 'Dictionaries built',\n colorize(\n `(${dictionariesBuiltTime - preparationStartMs}ms)`,\n ANSIColors.GREY_DARK\n ),\n ]);\n\n await createModuleAugmentation(configuration);\n\n const moduleAugmentationBuiltTime = Date.now();\n\n appLogger(\n [\n 'Module augmentation built',\n colorize(\n `(${moduleAugmentationBuiltTime - dictionariesBuiltTime}ms)`,\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n // Plugin transformation\n // Allow plugins to post-process the final build output (e.g., write back ICU JSON)\n for await (const plugin of plugins ?? []) {\n const { unmergedDictionaries, mergedDictionaries } = dictionariesOutput;\n\n await plugin.afterBuild?.({\n dictionaries: {\n unmergedDictionaries,\n mergedDictionaries,\n },\n configuration,\n });\n }\n\n const preparationElapsedMs = Date.now() - preparationStartMs;\n appLogger(\n [`Done`, colorize(`${preparationElapsedMs}ms`, ANSIColors.GREEN)],\n {\n level: 'info',\n isVerbose: true,\n }\n );\n },\n {\n forceRun,\n onIsCached,\n cacheTimeoutMs: 1000 * 60 * 60, // 1 hour\n }\n );\n};\n"],"mappings":";;;;;;;;;;;;;;;AA2BA,MAAM,mCAAmC;CACvC,OAAO;CACP,QAAQ,CAAC,OAAO,MAAM;CACtB,UAAU;CACX;AAED,MAAa,kBAAkB,OAC7B,eACA,YACG;CACH,MAAM,EAAE,OAAO,QAAQ,UAAU,eAAe;EAC9C,GAAG;EACH,GAAI,WAAW,EAAE;EAClB;AAED,0BAAyB,cAAc;CACvC,MAAM,YAAY,aAAa,cAAc;CAE7C,MAAM,eAAe,KACnB,cAAc,QAAQ,UACtB,yBACD;CAGD,MAAM,eAAe,UAAU,eAAe,CAAC,mBAAmB,CAAC;CACnE,MAAM,uBAAuB,MAAM,aAAa,KAAK;AACrD,KACE,SACC,wBAAwB,yBAAyB,YAAY,QAE9D,OAAM,eAAe,cAAc;AAErC,OAAM,aAAa,IAAI,YAAY,QAAQ;AAG3C,OAAM,QACJ,cACA,YAAY;EACV,MAAM,EAAE,YAAY;EAEpB,MAAM,qBAAqB,KAAK,KAAK;AAErC,YAAU,CACR,sBACA,SAAS,KAAK,YAAY,QAAQ,IAAI,WAAW,UAAU,CAC5D,CAAC;AAEF,QAAM,mBAAmB,cAAc;EAEvC,MAAM,2BAA2B,KAAK,KAAK;AAE3C,YACE,CACE,yBACA,SACE,IAAI,2BAA2B,mBAAmB,MAClD,WAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;EAID,MAAM,eAAe,MAAM,iBAFH,MAAM,iBAAiB,cAAc,EAEV,cAAc;EAEjE,MAAM,yBAAyB,KAAK,KAAK;AAEzC,YACE,CACE,kBACA,SACE,CACE,aAAa,mBAAmB,SAC9B,aAAa,mBAAmB,SAClC,IACI;GACE,WAAW,yBAAyB,yBAAyB;GAC7D,aAAa,kBAAkB,SAAS,IACpC,YAAY,aAAa,KAAK,kBAAkB,MAChD;GACJ,aAAa,mBAAmB,SAAS,IACrC,aAAa,aAAa,KAAK,mBAAmB,MAClD;GACJ,aAAa,mBAAmB,SAAS,IACrC,aAAa,aAAa,KAAK,mBAAmB,MAClD;GACJ;GACD,CAAC,KAAK,GAAG,GACV,IAAI,yBAAyB,yBAAyB,KAC3D,CAAC,KAAK,GAAG,EACV,WAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;EAGD,MAAM,qBAAqB,MAAM,gBAC/B;GACE,GAAG,aAAa;GAChB,GAAG,aAAa;GAChB,GAAG,aAAa;GACjB,EACD,eACA,QACA,MACD;AAID,QAAM,sBACJ,aAAa,oBACb,cACD;AAMD,QAAM,YAJoB,OAAO,OAC/B,oBAAoB,sBAAsB,EAAE,CAC7C,CAAC,KAAK,eAAe,WAAW,eAAe,EAEX,cAAc;AAEnD,QAAM,2BAA2B,cAAc;EAE/C,MAAM,wBAAwB,KAAK,KAAK;AAExC,YAAU,CACR,sBACA,SACE,IAAI,wBAAwB,mBAAmB,MAC/C,WAAW,UACZ,CACF,CAAC;AAEF,QAAM,yBAAyB,cAAc;AAI7C,YACE,CACE,6BACA,SACE,IAN8B,KAAK,KAAK,GAMN,sBAAsB,MACxD,WAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;AAID,aAAW,MAAM,UAAU,WAAW,EAAE,EAAE;GACxC,MAAM,EAAE,sBAAsB,uBAAuB;AAErD,SAAM,OAAO,aAAa;IACxB,cAAc;KACZ;KACA;KACD;IACD;IACD,CAAC;;AAIJ,YACE,CAAC,QAAQ,SAAS,GAFS,KAAK,KAAK,GAAG,mBAEE,KAAK,WAAW,MAAM,CAAC,EACjE;GACE,OAAO;GACP,WAAW;GACZ,CACF;IAEH;EACE;EACA;EACA,gBAAgB,MAAO,KAAK;EAC7B,CACF"}
1
+ {"version":3,"file":"prepareIntlayer.mjs","names":[],"sources":["../../src/prepareIntlayer.ts"],"sourcesContent":["import { join } from 'node:path';\nimport {\n ANSIColors,\n cacheDisk,\n checkVersionsConsistency,\n colorize,\n getAppLogger,\n} from '@intlayer/config';\nimport packageJson from '@intlayer/config/package.json' with { type: 'json' };\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { buildDictionary } from './buildIntlayerDictionary/buildIntlayerDictionary';\nimport { writeRemoteDictionary } from './buildIntlayerDictionary/writeRemoteDictionary';\nimport { cleanOutputDir } from './cleanOutputDir';\nimport { createDictionaryEntryPoint } from './createDictionaryEntryPoint/createDictionaryEntryPoint';\nimport { createModuleAugmentation, createTypes } from './createType/index';\nimport { listDictionaries } from './listDictionariesPath';\nimport { loadDictionaries } from './loadDictionaries/loadDictionaries';\nimport { runOnce } from './utils/runOnce';\nimport { writeConfiguration } from './writeConfiguration';\n\ntype PrepareIntlayerOptions = {\n clean?: boolean;\n format?: ('cjs' | 'esm')[];\n forceRun?: boolean;\n cacheTimeoutMs?: number;\n onIsCached?: () => void | Promise<void>;\n};\n\nconst DEFAULT_PREPARE_INTLAYER_OPTIONS = {\n clean: false,\n format: ['cjs', 'esm'],\n cacheTimeoutMs: 1000 * 60 * 60, // 1 hour\n} satisfies PrepareIntlayerOptions;\n\nexport const prepareIntlayer = async (\n configuration: IntlayerConfig,\n options?: PrepareIntlayerOptions\n) => {\n checkVersionsConsistency(configuration);\n const appLogger = getAppLogger(configuration);\n\n const sentinelPath = join(\n configuration.content.cacheDir,\n 'intlayer-prepared.lock'\n );\n // Clean output dir if the intlayer version has changed\n const versionCache = cacheDisk(configuration, ['intlayer-version']);\n const intlayerCacheVersion = await versionCache.get();\n const isCorrectVersion = Boolean(\n intlayerCacheVersion && intlayerCacheVersion === packageJson.version\n );\n\n const { clean, format, forceRun, onIsCached, cacheTimeoutMs } = {\n ...DEFAULT_PREPARE_INTLAYER_OPTIONS,\n forceRun: !isCorrectVersion,\n ...(options ?? {}),\n };\n\n // Skip preparation if it has already been done recently\n await runOnce(\n sentinelPath,\n async () => {\n if (clean || !isCorrectVersion) {\n await cleanOutputDir(configuration);\n }\n\n await versionCache.set(packageJson.version);\n\n const preparationStartMs = Date.now();\n\n appLogger([\n 'Preparing Intlayer',\n colorize(`(v${packageJson.version})`, ANSIColors.GREY_DARK),\n ]);\n\n await writeConfiguration(configuration);\n\n const configurationWrittenTime = Date.now();\n\n appLogger(\n [\n 'Configuration written',\n colorize(\n `(${configurationWrittenTime - preparationStartMs}ms)`,\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n const files: string[] = await listDictionaries(configuration);\n\n const dictionaries = await loadDictionaries(files, configuration);\n\n const dictionariesLoadedTime = Date.now();\n\n appLogger(\n [\n 'Content loaded',\n colorize(\n [\n dictionaries.remoteDictionaries.length +\n dictionaries.pluginDictionaries.length >\n 0\n ? [\n `(Total: ${dictionariesLoadedTime - configurationWrittenTime}ms`,\n dictionaries.localDictionaries.length > 0\n ? `- Local: ${dictionaries.time.localDictionaries}ms`\n : '',\n dictionaries.remoteDictionaries.length > 0\n ? `- Remote: ${dictionaries.time.remoteDictionaries}ms`\n : '',\n dictionaries.pluginDictionaries.length > 0\n ? `- Plugin: ${dictionaries.time.pluginDictionaries}ms`\n : '',\n `)`,\n ].join('')\n : `(${dictionariesLoadedTime - configurationWrittenTime}ms)`,\n ].join(''),\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n // Build local dictionaries\n const dictionariesOutput = await buildDictionary(\n [\n ...dictionaries.localDictionaries,\n ...dictionaries.remoteDictionaries,\n ...dictionaries.pluginDictionaries,\n ],\n configuration,\n format,\n false\n );\n\n // Write remote dictionaries\n // Used as cache for next fetch\n await writeRemoteDictionary(\n dictionaries.remoteDictionaries,\n configuration\n );\n\n const dictionariesPaths = Object.values(\n dictionariesOutput?.mergedDictionaries ?? {}\n ).map((dictionary) => dictionary.dictionaryPath);\n\n await createTypes(dictionariesPaths, configuration);\n\n await createDictionaryEntryPoint(configuration);\n\n const dictionariesBuiltTime = Date.now();\n\n appLogger([\n 'Dictionaries built',\n colorize(\n `(${dictionariesBuiltTime - preparationStartMs}ms)`,\n ANSIColors.GREY_DARK\n ),\n ]);\n\n await createModuleAugmentation(configuration);\n\n const moduleAugmentationBuiltTime = Date.now();\n\n appLogger(\n [\n 'Module augmentation built',\n colorize(\n `(${moduleAugmentationBuiltTime - dictionariesBuiltTime}ms)`,\n ANSIColors.GREY_DARK\n ),\n ],\n {\n isVerbose: true,\n }\n );\n\n // Plugin transformation\n // Allow plugins to post-process the final build output (e.g., write back ICU JSON)\n for await (const plugin of configuration.plugins ?? []) {\n const { unmergedDictionaries, mergedDictionaries } = dictionariesOutput;\n\n await plugin.afterBuild?.({\n dictionaries: {\n unmergedDictionaries,\n mergedDictionaries,\n },\n configuration,\n });\n }\n\n const preparationElapsedMs = Date.now() - preparationStartMs;\n appLogger(\n [`Done`, colorize(`${preparationElapsedMs}ms`, ANSIColors.GREEN)],\n {\n level: 'info',\n isVerbose: true,\n }\n );\n },\n {\n forceRun,\n onIsCached,\n cacheTimeoutMs,\n }\n );\n};\n"],"mappings":";;;;;;;;;;;;;;;AA4BA,MAAM,mCAAmC;CACvC,OAAO;CACP,QAAQ,CAAC,OAAO,MAAM;CACtB,gBAAgB,MAAO,KAAK;CAC7B;AAED,MAAa,kBAAkB,OAC7B,eACA,YACG;AACH,0BAAyB,cAAc;CACvC,MAAM,YAAY,aAAa,cAAc;CAE7C,MAAM,eAAe,KACnB,cAAc,QAAQ,UACtB,yBACD;CAED,MAAM,eAAe,UAAU,eAAe,CAAC,mBAAmB,CAAC;CACnE,MAAM,uBAAuB,MAAM,aAAa,KAAK;CACrD,MAAM,mBAAmB,QACvB,wBAAwB,yBAAyB,YAAY,QAC9D;CAED,MAAM,EAAE,OAAO,QAAQ,UAAU,YAAY,mBAAmB;EAC9D,GAAG;EACH,UAAU,CAAC;EACX,GAAI,WAAW,EAAE;EAClB;AAGD,OAAM,QACJ,cACA,YAAY;AACV,MAAI,SAAS,CAAC,iBACZ,OAAM,eAAe,cAAc;AAGrC,QAAM,aAAa,IAAI,YAAY,QAAQ;EAE3C,MAAM,qBAAqB,KAAK,KAAK;AAErC,YAAU,CACR,sBACA,SAAS,KAAK,YAAY,QAAQ,IAAI,WAAW,UAAU,CAC5D,CAAC;AAEF,QAAM,mBAAmB,cAAc;EAEvC,MAAM,2BAA2B,KAAK,KAAK;AAE3C,YACE,CACE,yBACA,SACE,IAAI,2BAA2B,mBAAmB,MAClD,WAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;EAID,MAAM,eAAe,MAAM,iBAFH,MAAM,iBAAiB,cAAc,EAEV,cAAc;EAEjE,MAAM,yBAAyB,KAAK,KAAK;AAEzC,YACE,CACE,kBACA,SACE,CACE,aAAa,mBAAmB,SAC9B,aAAa,mBAAmB,SAClC,IACI;GACE,WAAW,yBAAyB,yBAAyB;GAC7D,aAAa,kBAAkB,SAAS,IACpC,YAAY,aAAa,KAAK,kBAAkB,MAChD;GACJ,aAAa,mBAAmB,SAAS,IACrC,aAAa,aAAa,KAAK,mBAAmB,MAClD;GACJ,aAAa,mBAAmB,SAAS,IACrC,aAAa,aAAa,KAAK,mBAAmB,MAClD;GACJ;GACD,CAAC,KAAK,GAAG,GACV,IAAI,yBAAyB,yBAAyB,KAC3D,CAAC,KAAK,GAAG,EACV,WAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;EAGD,MAAM,qBAAqB,MAAM,gBAC/B;GACE,GAAG,aAAa;GAChB,GAAG,aAAa;GAChB,GAAG,aAAa;GACjB,EACD,eACA,QACA,MACD;AAID,QAAM,sBACJ,aAAa,oBACb,cACD;AAMD,QAAM,YAJoB,OAAO,OAC/B,oBAAoB,sBAAsB,EAAE,CAC7C,CAAC,KAAK,eAAe,WAAW,eAAe,EAEX,cAAc;AAEnD,QAAM,2BAA2B,cAAc;EAE/C,MAAM,wBAAwB,KAAK,KAAK;AAExC,YAAU,CACR,sBACA,SACE,IAAI,wBAAwB,mBAAmB,MAC/C,WAAW,UACZ,CACF,CAAC;AAEF,QAAM,yBAAyB,cAAc;AAI7C,YACE,CACE,6BACA,SACE,IAN8B,KAAK,KAAK,GAMN,sBAAsB,MACxD,WAAW,UACZ,CACF,EACD,EACE,WAAW,MACZ,CACF;AAID,aAAW,MAAM,UAAU,cAAc,WAAW,EAAE,EAAE;GACtD,MAAM,EAAE,sBAAsB,uBAAuB;AAErD,SAAM,OAAO,aAAa;IACxB,cAAc;KACZ;KACA;KACD;IACD;IACD,CAAC;;AAIJ,YACE,CAAC,QAAQ,SAAS,GAFS,KAAK,KAAK,GAAG,mBAEE,KAAK,WAAW,MAAM,CAAC,EACjE;GACE,OAAO;GACP,WAAW;GACZ,CACF;IAEH;EACE;EACA;EACA;EACD,CACF"}
@@ -4,6 +4,19 @@ import packageJson from "@intlayer/core/package.json" with { type: "json" };
4
4
 
5
5
  //#region src/utils/runOnce.ts
6
6
  const DEFAULT_RUN_ONCE_OPTIONS = { cacheTimeoutMs: 60 * 1e3 };
7
+ const writeSentinelFile = async (sentinelFilePath, currentTimestamp) => {
8
+ try {
9
+ await mkdir(dirname(sentinelFilePath), { recursive: true });
10
+ const data = {
11
+ version: packageJson.version,
12
+ timestamp: currentTimestamp
13
+ };
14
+ await writeFile(sentinelFilePath, JSON.stringify(data), { flag: "wx" });
15
+ } catch (err) {
16
+ if (err.code === "EEXIST") return;
17
+ throw err;
18
+ }
19
+ };
7
20
  /**
8
21
  * Ensures a callback function runs only once within a specified time window across multiple processes.
9
22
  * Uses a sentinel file to coordinate execution and prevent duplicate work.
@@ -59,18 +72,13 @@ const runOnce = async (sentinelFilePath, callback, options) => {
59
72
  } catch (err) {
60
73
  if (err.code === "ENOENT") {} else throw err;
61
74
  }
75
+ writeSentinelFile(sentinelFilePath, currentTimestamp);
62
76
  try {
63
- await mkdir(dirname(sentinelFilePath), { recursive: true });
64
- const data = {
65
- version: packageJson.version,
66
- timestamp: currentTimestamp
67
- };
68
- await writeFile(sentinelFilePath, JSON.stringify(data), { flag: "wx" });
69
- } catch (err) {
70
- if (err.code === "EEXIST") return;
71
- throw err;
77
+ await callback();
78
+ writeSentinelFile(sentinelFilePath, currentTimestamp);
79
+ } catch {
80
+ await unlink(sentinelFilePath);
72
81
  }
73
- await callback();
74
82
  };
75
83
 
76
84
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"runOnce.mjs","names":["cachedVersion: string | undefined","err: any","data: SentinelData"],"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":["import { mkdir, readFile, stat, unlink, writeFile } from 'node:fs/promises';\nimport { dirname } from 'node:path';\nimport packageJson from '@intlayer/core/package.json' with { type: 'json' };\n\ntype RunOnceOptions = {\n /**\n * The function to execute when the sentinel is not found or is older than the cache timeout.\n */\n onIsCached?: () => void | Promise<void>;\n /**\n * The time window in milliseconds during which the sentinel is considered valid.\n *\n * @default 60000 = 1 minute\n */\n cacheTimeoutMs?: number;\n /**\n * If true, the callback will always run. If undefined, the callback will run only if the sentinel is older than the cache timeout.\n *\n * @default false\n */\n forceRun?: boolean;\n};\n\nconst DEFAULT_RUN_ONCE_OPTIONS = {\n cacheTimeoutMs: 60 * 1000, // 1 minute in milliseconds,\n} satisfies RunOnceOptions;\n\ntype SentinelData = {\n version: string;\n timestamp: number;\n};\n\n/**\n * Ensures a callback function runs only once within a specified time window across multiple processes.\n * Uses a sentinel file to coordinate execution and prevent duplicate work.\n *\n * @param sentinelFilePath - Path to the sentinel file used for coordination\n * @param callback - The function to execute (should be async)\n * @param options - The options for the runOnce function\n *\n * @example\n * ```typescript\n * await runPrepareIntlayerOnce(\n * '/tmp/intlayer-sentinel',\n * async () => {\n * // Your initialization logic here\n * await prepareIntlayer();\n * },\n * 30 * 1000 // 30 seconds cache\n * );\n * ```\n *\n * @throws {Error} When there are unexpected filesystem errors\n */\nexport const runOnce = async (\n sentinelFilePath: string,\n callback: () => void | Promise<void>,\n options?: RunOnceOptions\n) => {\n const { onIsCached, cacheTimeoutMs, forceRun } = {\n ...DEFAULT_RUN_ONCE_OPTIONS,\n ...(options ?? {}),\n };\n const currentTimestamp = Date.now();\n\n try {\n // Check if sentinel file exists and get its stats\n const sentinelStats = await stat(sentinelFilePath);\n const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();\n\n // Determine if we should rebuild based on cache age, force flag, or version mismatch\n let shouldRebuild = Boolean(forceRun) || sentinelAge > cacheTimeoutMs!;\n\n if (!shouldRebuild) {\n try {\n const raw = await readFile(sentinelFilePath, 'utf8');\n let cachedVersion: string | undefined;\n try {\n const parsed = JSON.parse(raw) as Partial<SentinelData>;\n cachedVersion = parsed.version;\n } catch {\n // Legacy format (timestamp only). Force a rebuild once to write versioned sentinel.\n cachedVersion = undefined;\n }\n\n if (!cachedVersion || cachedVersion !== packageJson.version) {\n shouldRebuild = true;\n }\n } catch {\n // If we cannot read the file, err on the safe side and rebuild\n shouldRebuild = true;\n }\n }\n\n if (shouldRebuild) {\n try {\n await unlink(sentinelFilePath);\n } catch (err: any) {\n if (err.code !== 'ENOENT') throw err;\n }\n // Fall through to create new sentinel and rebuild\n } else {\n await onIsCached?.();\n // Sentinel is recent and versions match, no need to rebuild\n return;\n }\n } catch (err: any) {\n if (err.code === 'ENOENT') {\n // File doesn't exist, continue to create it\n } else {\n throw err; // unexpected FS error\n }\n }\n\n try {\n // Ensure the directory exists before writing the file\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n\n // O_EXCL ensures only the *first* process can create the file\n const data: SentinelData = {\n version: packageJson.version,\n timestamp: currentTimestamp,\n };\n await writeFile(sentinelFilePath, JSON.stringify(data), { flag: 'wx' });\n } catch (err: any) {\n if (err.code === 'EEXIST') {\n // Another process already created it we're done\n return;\n }\n throw err; // unexpected FS error\n }\n\n await callback();\n};\n"],"mappings":";;;;;AAuBA,MAAM,2BAA2B,EAC/B,gBAAgB,KAAK,KACtB;;;;;;;;;;;;;;;;;;;;;;;AA6BD,MAAa,UAAU,OACrB,kBACA,UACA,YACG;CACH,MAAM,EAAE,YAAY,gBAAgB,aAAa;EAC/C,GAAG;EACH,GAAI,WAAW,EAAE;EAClB;CACD,MAAM,mBAAmB,KAAK,KAAK;AAEnC,KAAI;EAGF,MAAM,cAAc,oBADE,MAAM,KAAK,iBAAiB,EACG,MAAM,SAAS;EAGpE,IAAI,gBAAgB,QAAQ,SAAS,IAAI,cAAc;AAEvD,MAAI,CAAC,cACH,KAAI;GACF,MAAM,MAAM,MAAM,SAAS,kBAAkB,OAAO;GACpD,IAAIA;AACJ,OAAI;AAEF,oBADe,KAAK,MAAM,IAAI,CACP;WACjB;AAEN,oBAAgB;;AAGlB,OAAI,CAAC,iBAAiB,kBAAkB,YAAY,QAClD,iBAAgB;UAEZ;AAEN,mBAAgB;;AAIpB,MAAI,cACF,KAAI;AACF,SAAM,OAAO,iBAAiB;WACvBC,KAAU;AACjB,OAAI,IAAI,SAAS,SAAU,OAAM;;OAG9B;AACL,SAAM,cAAc;AAEpB;;UAEKA,KAAU;AACjB,MAAI,IAAI,SAAS,UAAU,OAGzB,OAAM;;AAIV,KAAI;AAEF,QAAM,MAAM,QAAQ,iBAAiB,EAAE,EAAE,WAAW,MAAM,CAAC;EAG3D,MAAMC,OAAqB;GACzB,SAAS,YAAY;GACrB,WAAW;GACZ;AACD,QAAM,UAAU,kBAAkB,KAAK,UAAU,KAAK,EAAE,EAAE,MAAM,MAAM,CAAC;UAChED,KAAU;AACjB,MAAI,IAAI,SAAS,SAEf;AAEF,QAAM;;AAGR,OAAM,UAAU"}
1
+ {"version":3,"file":"runOnce.mjs","names":["data: SentinelData","err: any","cachedVersion: string | undefined"],"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":["import { mkdir, readFile, stat, unlink, writeFile } from 'node:fs/promises';\nimport { dirname } from 'node:path';\nimport packageJson from '@intlayer/core/package.json' with { type: 'json' };\n\ntype RunOnceOptions = {\n /**\n * The function to execute when the sentinel is not found or is older than the cache timeout.\n */\n onIsCached?: () => void | Promise<void>;\n /**\n * The time window in milliseconds during which the sentinel is considered valid.\n *\n * @default 60000 = 1 minute\n */\n cacheTimeoutMs?: number;\n /**\n * If true, the callback will always run. If undefined, the callback will run only if the sentinel is older than the cache timeout.\n *\n * @default false\n */\n forceRun?: boolean;\n};\n\nconst DEFAULT_RUN_ONCE_OPTIONS = {\n cacheTimeoutMs: 60 * 1000, // 1 minute in milliseconds,\n} satisfies RunOnceOptions;\n\ntype SentinelData = {\n version: string;\n timestamp: number;\n};\n\nconst writeSentinelFile = async (\n sentinelFilePath: string,\n currentTimestamp: number\n) => {\n try {\n // Ensure the directory exists before writing the file\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n\n // O_EXCL ensures only the *first* process can create the file\n const data: SentinelData = {\n version: packageJson.version,\n timestamp: currentTimestamp,\n };\n await writeFile(sentinelFilePath, JSON.stringify(data), { flag: 'wx' });\n } catch (err: any) {\n if (err.code === 'EEXIST') {\n // Another process already created it → we're done\n return;\n }\n throw err; // unexpected FS error\n }\n};\n\n/**\n * Ensures a callback function runs only once within a specified time window across multiple processes.\n * Uses a sentinel file to coordinate execution and prevent duplicate work.\n *\n * @param sentinelFilePath - Path to the sentinel file used for coordination\n * @param callback - The function to execute (should be async)\n * @param options - The options for the runOnce function\n *\n * @example\n * ```typescript\n * await runPrepareIntlayerOnce(\n * '/tmp/intlayer-sentinel',\n * async () => {\n * // Your initialization logic here\n * await prepareIntlayer();\n * },\n * 30 * 1000 // 30 seconds cache\n * );\n * ```\n *\n * @throws {Error} When there are unexpected filesystem errors\n */\nexport const runOnce = async (\n sentinelFilePath: string,\n callback: () => void | Promise<void>,\n options?: RunOnceOptions\n) => {\n const { onIsCached, cacheTimeoutMs, forceRun } = {\n ...DEFAULT_RUN_ONCE_OPTIONS,\n ...(options ?? {}),\n };\n const currentTimestamp = Date.now();\n\n try {\n // Check if sentinel file exists and get its stats\n const sentinelStats = await stat(sentinelFilePath);\n const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();\n\n // Determine if we should rebuild based on cache age, force flag, or version mismatch\n let shouldRebuild = Boolean(forceRun) || sentinelAge > cacheTimeoutMs!;\n\n if (!shouldRebuild) {\n try {\n const raw = await readFile(sentinelFilePath, 'utf8');\n let cachedVersion: string | undefined;\n try {\n const parsed = JSON.parse(raw) as Partial<SentinelData>;\n cachedVersion = parsed.version;\n } catch {\n // Legacy format (timestamp only). Force a rebuild once to write versioned sentinel.\n cachedVersion = undefined;\n }\n\n if (!cachedVersion || cachedVersion !== packageJson.version) {\n shouldRebuild = true;\n }\n } catch {\n // If we cannot read the file, err on the safe side and rebuild\n shouldRebuild = true;\n }\n }\n\n if (shouldRebuild) {\n try {\n await unlink(sentinelFilePath);\n } catch (err: any) {\n if (err.code !== 'ENOENT') throw err;\n }\n // Fall through to create new sentinel and rebuild\n } else {\n await onIsCached?.();\n // Sentinel is recent and versions match, no need to rebuild\n return;\n }\n } catch (err: any) {\n if (err.code === 'ENOENT') {\n // File doesn't exist, continue to create it\n } else {\n throw err; // unexpected FS error\n }\n }\n\n // Write sentinel file before to block parallel processes\n writeSentinelFile(sentinelFilePath, currentTimestamp);\n\n try {\n await callback();\n\n // Write sentinel file after to ensure the first one has not been removed with cleanOutputDir\n writeSentinelFile(sentinelFilePath, currentTimestamp);\n } catch {\n await unlink(sentinelFilePath); // Remove sentinel file if an error occurs\n }\n};\n"],"mappings":";;;;;AAuBA,MAAM,2BAA2B,EAC/B,gBAAgB,KAAK,KACtB;AAOD,MAAM,oBAAoB,OACxB,kBACA,qBACG;AACH,KAAI;AAEF,QAAM,MAAM,QAAQ,iBAAiB,EAAE,EAAE,WAAW,MAAM,CAAC;EAG3D,MAAMA,OAAqB;GACzB,SAAS,YAAY;GACrB,WAAW;GACZ;AACD,QAAM,UAAU,kBAAkB,KAAK,UAAU,KAAK,EAAE,EAAE,MAAM,MAAM,CAAC;UAChEC,KAAU;AACjB,MAAI,IAAI,SAAS,SAEf;AAEF,QAAM;;;;;;;;;;;;;;;;;;;;;;;;;AA0BV,MAAa,UAAU,OACrB,kBACA,UACA,YACG;CACH,MAAM,EAAE,YAAY,gBAAgB,aAAa;EAC/C,GAAG;EACH,GAAI,WAAW,EAAE;EAClB;CACD,MAAM,mBAAmB,KAAK,KAAK;AAEnC,KAAI;EAGF,MAAM,cAAc,oBADE,MAAM,KAAK,iBAAiB,EACG,MAAM,SAAS;EAGpE,IAAI,gBAAgB,QAAQ,SAAS,IAAI,cAAc;AAEvD,MAAI,CAAC,cACH,KAAI;GACF,MAAM,MAAM,MAAM,SAAS,kBAAkB,OAAO;GACpD,IAAIC;AACJ,OAAI;AAEF,oBADe,KAAK,MAAM,IAAI,CACP;WACjB;AAEN,oBAAgB;;AAGlB,OAAI,CAAC,iBAAiB,kBAAkB,YAAY,QAClD,iBAAgB;UAEZ;AAEN,mBAAgB;;AAIpB,MAAI,cACF,KAAI;AACF,SAAM,OAAO,iBAAiB;WACvBD,KAAU;AACjB,OAAI,IAAI,SAAS,SAAU,OAAM;;OAG9B;AACL,SAAM,cAAc;AAEpB;;UAEKA,KAAU;AACjB,MAAI,IAAI,SAAS,UAAU,OAGzB,OAAM;;AAKV,mBAAkB,kBAAkB,iBAAiB;AAErD,KAAI;AACF,QAAM,UAAU;AAGhB,oBAAkB,kBAAkB,iBAAiB;SAC/C;AACN,QAAM,OAAO,iBAAiB"}
@@ -1,14 +1,14 @@
1
1
  import { UnmergedDictionaryOutput } from "./writeUnmergedDictionary.js";
2
2
  import { MergedDictionaryOutput } from "./writeMergedDictionary.js";
3
3
  import { LocalizedDictionaryOutput } from "./writeDynamicDictionary.js";
4
- import * as _intlayer_types0 from "@intlayer/types";
4
+ import * as _intlayer_types4 from "@intlayer/types";
5
5
  import { Dictionary } from "@intlayer/types";
6
6
 
7
7
  //#region src/buildIntlayerDictionary/buildIntlayerDictionary.d.ts
8
8
  /**
9
9
  * This function transpile the bundled code to to make dictionaries as JSON files
10
10
  */
11
- declare const buildDictionary: (localDictionariesEntries: Dictionary[], configuration?: _intlayer_types0.IntlayerConfig, formats?: ("cjs" | "esm")[], importOtherDictionaries?: boolean) => Promise<{
11
+ declare const buildDictionary: (localDictionariesEntries: Dictionary[], configuration?: _intlayer_types4.IntlayerConfig, formats?: ("cjs" | "esm")[], importOtherDictionaries?: boolean) => Promise<{
12
12
  unmergedDictionaries: UnmergedDictionaryOutput;
13
13
  mergedDictionaries: MergedDictionaryOutput;
14
14
  dynamicDictionaries: LocalizedDictionaryOutput;
@@ -1,5 +1,5 @@
1
1
  import { MergedDictionaryOutput } from "./writeMergedDictionary.js";
2
- import * as _intlayer_types2 from "@intlayer/types";
2
+ import * as _intlayer_types0 from "@intlayer/types";
3
3
  import { Dictionary, Locale } from "@intlayer/types";
4
4
 
5
5
  //#region src/buildIntlayerDictionary/writeDynamicDictionary.d.ts
@@ -12,7 +12,7 @@ type LocalizedDictionaryOutput = Record<string, LocalizedDictionaryResult>;
12
12
  /**
13
13
  * This function generates the content of the dictionary list file
14
14
  */
15
- declare const generateDictionaryEntryPoint: (localizedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?: _intlayer_types2.IntlayerConfig) => string;
15
+ declare const generateDictionaryEntryPoint: (localizedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?: _intlayer_types0.IntlayerConfig) => string;
16
16
  /**
17
17
  * Write the localized dictionaries to the dictionariesDir
18
18
  * @param mergedDictionaries - The merged dictionaries
@@ -29,7 +29,7 @@ declare const generateDictionaryEntryPoint: (localizedDictionariesPathsRecord: L
29
29
  * // { key: 'home', content: { ... } },
30
30
  * ```
31
31
  */
32
- declare const writeDynamicDictionary: (mergedDictionaries: MergedDictionaryOutput, configuration?: _intlayer_types2.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<LocalizedDictionaryOutput>;
32
+ declare const writeDynamicDictionary: (mergedDictionaries: MergedDictionaryOutput, configuration?: _intlayer_types0.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<LocalizedDictionaryOutput>;
33
33
  //#endregion
34
34
  export { DictionaryResult, LocalizedDictionaryOutput, LocalizedDictionaryResult, generateDictionaryEntryPoint, writeDynamicDictionary };
35
35
  //# sourceMappingURL=writeDynamicDictionary.d.ts.map
@@ -1,11 +1,11 @@
1
1
  import { LocalizedDictionaryOutput, LocalizedDictionaryResult } from "./writeDynamicDictionary.js";
2
- import * as _intlayer_types0 from "@intlayer/types";
2
+ import * as _intlayer_types2 from "@intlayer/types";
3
3
 
4
4
  //#region src/buildIntlayerDictionary/writeFetchDictionary.d.ts
5
5
  /**
6
6
  * This function generates the content of the dictionary list file
7
7
  */
8
- declare const generateDictionaryEntryPoint: (localedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?: _intlayer_types0.IntlayerConfig) => string;
8
+ declare const generateDictionaryEntryPoint: (localedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?: _intlayer_types2.IntlayerConfig) => string;
9
9
  /**
10
10
  * Write the localized dictionaries to the dictionariesDir
11
11
  * @param mergedDictionaries - The merged dictionaries
@@ -22,7 +22,7 @@ declare const generateDictionaryEntryPoint: (localedDictionariesPathsRecord: Loc
22
22
  * // { key: 'home', content: { ... } },
23
23
  * ```
24
24
  */
25
- declare const writeFetchDictionary: (dynamicDictionaries: LocalizedDictionaryOutput, configuration?: _intlayer_types0.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<LocalizedDictionaryOutput>;
25
+ declare const writeFetchDictionary: (dynamicDictionaries: LocalizedDictionaryOutput, configuration?: _intlayer_types2.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<LocalizedDictionaryOutput>;
26
26
  //#endregion
27
27
  export { generateDictionaryEntryPoint, writeFetchDictionary };
28
28
  //# sourceMappingURL=writeFetchDictionary.d.ts.map
@@ -1,5 +1,5 @@
1
1
  import { UnmergedDictionaryOutput } from "./writeUnmergedDictionary.js";
2
- import * as _intlayer_types6 from "@intlayer/types";
2
+ import * as _intlayer_types5 from "@intlayer/types";
3
3
  import { Dictionary } from "@intlayer/types";
4
4
 
5
5
  //#region src/buildIntlayerDictionary/writeMergedDictionary.d.ts
@@ -24,7 +24,7 @@ type MergedDictionaryOutput = Record<string, MergedDictionaryResult>;
24
24
  * // { key: 'home', content: { ... } },
25
25
  * ```
26
26
  */
27
- declare const writeMergedDictionaries: (groupedDictionaries: UnmergedDictionaryOutput, configuration?: _intlayer_types6.IntlayerConfig) => Promise<MergedDictionaryOutput>;
27
+ declare const writeMergedDictionaries: (groupedDictionaries: UnmergedDictionaryOutput, configuration?: _intlayer_types5.IntlayerConfig) => Promise<MergedDictionaryOutput>;
28
28
  //#endregion
29
29
  export { MergedDictionaryOutput, MergedDictionaryResult, writeMergedDictionaries };
30
30
  //# sourceMappingURL=writeMergedDictionary.d.ts.map
@@ -1,4 +1,4 @@
1
- import * as _intlayer_types4 from "@intlayer/types";
1
+ import * as _intlayer_types6 from "@intlayer/types";
2
2
  import { Dictionary } from "@intlayer/types";
3
3
 
4
4
  //#region src/buildIntlayerDictionary/writeRemoteDictionary.d.ts
@@ -23,7 +23,7 @@ type RemoteDictionaryOutput = Record<string, RemoteDictionaryResult>;
23
23
  * // { key: 'home', content: { ... } },
24
24
  * ```
25
25
  */
26
- declare const writeRemoteDictionary: (remoteDictionaries: Dictionary[], configuration?: _intlayer_types4.IntlayerConfig) => Promise<RemoteDictionaryOutput>;
26
+ declare const writeRemoteDictionary: (remoteDictionaries: Dictionary[], configuration?: _intlayer_types6.IntlayerConfig) => Promise<RemoteDictionaryOutput>;
27
27
  //#endregion
28
28
  export { RemoteDictionaryOutput, RemoteDictionaryResult, writeRemoteDictionary };
29
29
  //# sourceMappingURL=writeRemoteDictionary.d.ts.map
@@ -1,10 +1,10 @@
1
- import * as _intlayer_types7 from "@intlayer/types";
1
+ import * as _intlayer_types8 from "@intlayer/types";
2
2
 
3
3
  //#region src/createDictionaryEntryPoint/createDictionaryEntryPoint.d.ts
4
4
  /**
5
5
  * This function generates a list of dictionaries in the main directory
6
6
  */
7
- declare const createDictionaryEntryPoint: (configuration?: _intlayer_types7.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<void>;
7
+ declare const createDictionaryEntryPoint: (configuration?: _intlayer_types8.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<void>;
8
8
  //#endregion
9
9
  export { createDictionaryEntryPoint };
10
10
  //# sourceMappingURL=createDictionaryEntryPoint.d.ts.map
@@ -1,10 +1,10 @@
1
- import * as _intlayer_types5 from "@intlayer/types";
1
+ import * as _intlayer_types0 from "@intlayer/types";
2
2
 
3
3
  //#region src/createDictionaryEntryPoint/generateDictionaryListContent.d.ts
4
4
  /**
5
5
  * This function generates the content of the dictionary list file
6
6
  */
7
- declare const generateDictionaryListContent: (dictionaries: string[], functionName: string, format?: "cjs" | "esm", configuration?: _intlayer_types5.IntlayerConfig) => string;
7
+ declare const generateDictionaryListContent: (dictionaries: string[], functionName: string, format?: "cjs" | "esm", configuration?: _intlayer_types0.IntlayerConfig) => string;
8
8
  //#endregion
9
9
  export { generateDictionaryListContent };
10
10
  //# sourceMappingURL=generateDictionaryListContent.d.ts.map
@@ -1,11 +1,11 @@
1
1
  import { DictionariesStatus } from "./loadDictionaries.js";
2
- import * as _intlayer_types8 from "@intlayer/types";
2
+ import * as _intlayer_types7 from "@intlayer/types";
3
3
  import { Dictionary } from "@intlayer/types";
4
4
  import { DictionaryAPI } from "@intlayer/backend";
5
5
 
6
6
  //#region src/loadDictionaries/loadRemoteDictionaries.d.ts
7
7
  declare const formatDistantDictionaries: (dictionaries: (DictionaryAPI | Dictionary)[]) => Dictionary[];
8
- declare const loadRemoteDictionaries: (configuration?: _intlayer_types8.IntlayerConfig, onStatusUpdate?: (status: DictionariesStatus[]) => void, options?: {
8
+ declare const loadRemoteDictionaries: (configuration?: _intlayer_types7.IntlayerConfig, onStatusUpdate?: (status: DictionariesStatus[]) => void, options?: {
9
9
  onStartRemoteCheck?: () => void;
10
10
  onStopRemoteCheck?: () => void;
11
11
  onError?: (error: Error) => void;
@@ -5,6 +5,7 @@ type PrepareIntlayerOptions = {
5
5
  clean?: boolean;
6
6
  format?: ('cjs' | 'esm')[];
7
7
  forceRun?: boolean;
8
+ cacheTimeoutMs?: number;
8
9
  onIsCached?: () => void | Promise<void>;
9
10
  };
10
11
  declare const prepareIntlayer: (configuration: IntlayerConfig, options?: PrepareIntlayerOptions) => Promise<void>;
@@ -1 +1 @@
1
- {"version":3,"file":"prepareIntlayer.d.ts","names":[],"sources":["../../src/prepareIntlayer.ts"],"sourcesContent":[],"mappings":";;;KAoBK,sBAAA;;EAAA,MAAA,CAAA,EAAA,CAAA,KAAA,GAAA,KAAA,CAAA,EAAsB;EAad,QAAA,CAAA,EAAA,OAkLZ;EAjLgB,UAAA,CAAA,EAAA,GAAA,GAAA,IAAA,GAVW,OAUX,CAAA,IAAA,CAAA;CACL;AAAsB,cAFrB,eAEqB,EAAA,CAAA,aAAA,EADjB,cACiB,EAAA,OAAA,CAAA,EAAtB,sBAAsB,EAAA,GAAA,OAAA,CAAA,IAAA,CAAA"}
1
+ {"version":3,"file":"prepareIntlayer.d.ts","names":[],"sources":["../../src/prepareIntlayer.ts"],"sourcesContent":[],"mappings":";;;KAoBK,sBAAA;;EAAA,MAAA,CAAA,EAAA,CAAA,KAAA,GAAA,KAAA,CAAA,EAAsB;EAcd,QAAA,CAAA,EAAA,OAkLZ;EAjLgB,cAAA,CAAA,EAAA,MAAA;EACL,UAAA,CAAA,EAAA,GAAA,GAAA,IAAA,GAXgB,OAWhB,CAAA,IAAA,CAAA;CAAsB;AAAA,cAFrB,eAEqB,EAAA,CAAA,aAAA,EADjB,cACiB,EAAA,OAAA,CAAA,EAAtB,sBAAsB,EAAA,GAAA,OAAA,CAAA,IAAA,CAAA"}
@@ -1 +1 @@
1
- {"version":3,"file":"getFormatFromExtension.d.ts","names":[],"sources":["../../../src/utils/getFormatFromExtension.ts"],"sourcesContent":[],"mappings":";KAAY,MAAA;AAAA,KACA,SAAA,GADM,KAAA,GAAA,MAAA,GAAA,KAAA,GAAA,MAAA,GAAA,MAAA,GAAA,OAAA,GAAA,MAAA,GAAA,OAAA,GAAA,OAAA,GAAA,QAAA;AACN,cAYC,sBAZQ,EAAA,CAAA,SAAA,EAY6B,SAZ7B,EAAA,GAYyC,MAZzC;AAYR,cAkBA,sBAlBqC,EAAA,CAAA,MAAY,EAkBf,MAF9C,EAAA,GAEuD,SAFvD"}
1
+ {"version":3,"file":"getFormatFromExtension.d.ts","names":[],"sources":["../../../src/utils/getFormatFromExtension.ts"],"sourcesContent":[],"mappings":";KAAY,MAAA;AAAA,KACA,SAAA,GADM,KAAA,GAAA,MAAA,GAAA,KAAA,GAAA,MAAA,GAAA,MAAA,GAAA,OAAA,GAAA,MAAA,GAAA,OAAA,GAAA,OAAA,GAAA,QAAA;AACN,cAYC,sBAZQ,EAAA,CAAA,SAAA,EAY6B,SAZ7B,EAAA,GAYyC,MAZzC;AAYR,cAkBA,sBAlBqC,EAAA,CAAA,MAAA,EAkBH,MAF9C,EAAA,GAEuD,SAFvD"}
@@ -1 +1 @@
1
- {"version":3,"file":"runOnce.d.ts","names":[],"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":[],"mappings":";KAIK,cAAA;EAAA;AAkDL;;EAGY,UAAA,CAAA,EAAA,GAAA,GAAA,IAAA,GAjDgB,OAiDhB,CAAA,IAAA,CAAA;EAAc;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;cAHb,2DAEY,yBACb,mBAAc"}
1
+ {"version":3,"file":"runOnce.d.ts","names":[],"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":[],"mappings":";KAIK,cAAA;EAAA;AAyEL;;EAGY,UAAA,CAAA,EAAA,GAAA,GAAA,IAAA,GAxEgB,OAwEhB,CAAA,IAAA,CAAA;EAAc;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;cAHb,2DAEY,yBACb,mBAAc"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@intlayer/chokidar",
3
- "version": "7.1.1-canary.0",
3
+ "version": "7.1.1",
4
4
  "private": false,
5
5
  "description": "Uses chokidar to scan and build Intlayer declaration files into dictionaries based on Intlayer configuration.",
6
6
  "keywords": [
@@ -74,13 +74,13 @@
74
74
  "typecheck": "tsc --noEmit --project tsconfig.types.json"
75
75
  },
76
76
  "dependencies": {
77
- "@intlayer/api": "7.1.1-canary.0",
78
- "@intlayer/config": "7.1.1-canary.0",
79
- "@intlayer/core": "7.1.1-canary.0",
80
- "@intlayer/dictionaries-entry": "7.1.1-canary.0",
81
- "@intlayer/remote-dictionaries-entry": "7.1.1-canary.0",
82
- "@intlayer/types": "7.1.1-canary.0",
83
- "@intlayer/unmerged-dictionaries-entry": "7.1.1-canary.0",
77
+ "@intlayer/api": "7.1.1",
78
+ "@intlayer/config": "7.1.1",
79
+ "@intlayer/core": "7.1.1",
80
+ "@intlayer/dictionaries-entry": "7.1.1",
81
+ "@intlayer/remote-dictionaries-entry": "7.1.1",
82
+ "@intlayer/types": "7.1.1",
83
+ "@intlayer/unmerged-dictionaries-entry": "7.1.1",
84
84
  "chokidar": "3.6.0",
85
85
  "crypto-js": "4.2.0",
86
86
  "deep-equal": "2.2.3",