@intlayer/chokidar 8.1.3 → 8.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/index.cjs +1 -1
- package/dist/cjs/utils/index.cjs +1 -1
- package/dist/cjs/utils/mergeChunks.cjs +2 -0
- package/dist/cjs/utils/mergeChunks.cjs.map +1 -0
- package/dist/cjs/watcher.cjs +1 -1
- package/dist/cjs/watcher.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs +1 -1
- package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/writeJSFile.cjs +1 -1
- package/dist/cjs/writeContentDeclaration/writeJSFile.cjs.map +1 -1
- package/dist/esm/index.mjs +1 -1
- package/dist/esm/utils/index.mjs +1 -1
- package/dist/esm/utils/mergeChunks.mjs +2 -0
- package/dist/esm/utils/mergeChunks.mjs.map +1 -0
- package/dist/esm/watcher.mjs +1 -1
- package/dist/esm/watcher.mjs.map +1 -1
- package/dist/esm/writeContentDeclaration/writeContentDeclaration.mjs +1 -1
- package/dist/esm/writeContentDeclaration/writeContentDeclaration.mjs.map +1 -1
- package/dist/esm/writeContentDeclaration/writeJSFile.mjs +1 -1
- package/dist/esm/writeContentDeclaration/writeJSFile.mjs.map +1 -1
- package/dist/types/index.d.ts +2 -1
- package/dist/types/utils/index.d.ts +2 -1
- package/dist/types/utils/mergeChunks.d.ts +5 -0
- package/dist/types/utils/mergeChunks.d.ts.map +1 -0
- package/dist/types/watcher.d.ts.map +1 -1
- package/package.json +11 -11
package/dist/cjs/index.cjs
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
Object.defineProperty(exports,Symbol.toStringTag,{value:`Module`});const e=require(`./utils/pLimit.cjs`),t=require(`./utils/parallelize.cjs`),n=require(`./utils/formatter.cjs`),r=require(`./buildIntlayerDictionary/buildIntlayerDictionary.cjs`),i=require(`./utils/autoDecorateContent.cjs`),a=require(`./utils/resolveObjectPromises.cjs`),o=require(`./buildIntlayerDictionary/processContentDeclaration.cjs`),s=require(`./cleanOutputDir.cjs`),c=require(`./utils/getFileHash.cjs`),l=require(`./createDictionaryEntryPoint/generateDictionaryListContent.cjs`),u=require(`./createDictionaryEntryPoint/getBuiltDictionariesPath.cjs`),d=require(`./createDictionaryEntryPoint/getBuiltDynamicDictionariesPath.cjs`),f=require(`./createDictionaryEntryPoint/getBuiltFetchDictionariesPath.cjs`),p=require(`./createDictionaryEntryPoint/getBuiltRemoteDictionariesPath.cjs`),m=require(`./createDictionaryEntryPoint/getBuiltUnmergedDictionariesPath.cjs`),h=require(`./createDictionaryEntryPoint/createDictionaryEntryPoint.cjs`),g=require(`./createType/createModuleAugmentation.cjs`),_=require(`./createType/createType.cjs`),v=require(`./formatDictionary.cjs`),y=require(`./loadDictionaries/loadContentDeclaration.cjs`),b=require(`./utils/sortAlphabetically.cjs`),x=require(`./loadDictionaries/loadRemoteDictionaries.cjs`),S=require(`./loadDictionaries/loadDictionaries.cjs`),C=require(`./loadDictionaries/loadLocalDictionaries.cjs`),w=require(`./utils/runOnce.cjs`),T=require(`./writeConfiguration/index.cjs`),E=require(`./prepareIntlayer.cjs`),D=require(`./writeContentDeclaration/detectExportedComponentName.cjs`),O=require(`./writeContentDeclaration/detectFormatCommand.cjs`),k=require(`./writeContentDeclaration/transformJSFile.cjs`),A=require(`./utils/getFormatFromExtension.cjs`),j=require(`./writeContentDeclaration/writeJSFile.cjs`),M=require(`./writeContentDeclaration/writeContentDeclaration.cjs`),N=require(`./init/index.cjs`),P=require(`./installSkills/index.cjs`),F=require(`./listGitFiles.cjs`),I=require(`./listProjects.cjs`),L=require(`./transformFiles/extractDictionaryKey.cjs`),R=require(`./transformFiles/transformFiles.cjs`),z=require(`./utils/buildFilesList.cjs`),B=require(`./utils/splitTextByLine.cjs`),V=require(`./utils/getChunk.cjs`),H=require(`./utils/chunkJSON.cjs`),U=require(`./utils/getComponentTransformPattern.cjs`),W=require(`./utils/
|
|
1
|
+
Object.defineProperty(exports,Symbol.toStringTag,{value:`Module`});const e=require(`./utils/pLimit.cjs`),t=require(`./utils/parallelize.cjs`),n=require(`./utils/formatter.cjs`),r=require(`./buildIntlayerDictionary/buildIntlayerDictionary.cjs`),i=require(`./utils/autoDecorateContent.cjs`),a=require(`./utils/resolveObjectPromises.cjs`),o=require(`./buildIntlayerDictionary/processContentDeclaration.cjs`),s=require(`./cleanOutputDir.cjs`),c=require(`./utils/getFileHash.cjs`),l=require(`./createDictionaryEntryPoint/generateDictionaryListContent.cjs`),u=require(`./createDictionaryEntryPoint/getBuiltDictionariesPath.cjs`),d=require(`./createDictionaryEntryPoint/getBuiltDynamicDictionariesPath.cjs`),f=require(`./createDictionaryEntryPoint/getBuiltFetchDictionariesPath.cjs`),p=require(`./createDictionaryEntryPoint/getBuiltRemoteDictionariesPath.cjs`),m=require(`./createDictionaryEntryPoint/getBuiltUnmergedDictionariesPath.cjs`),h=require(`./createDictionaryEntryPoint/createDictionaryEntryPoint.cjs`),g=require(`./createType/createModuleAugmentation.cjs`),_=require(`./createType/createType.cjs`),v=require(`./formatDictionary.cjs`),y=require(`./loadDictionaries/loadContentDeclaration.cjs`),b=require(`./utils/sortAlphabetically.cjs`),x=require(`./loadDictionaries/loadRemoteDictionaries.cjs`),S=require(`./loadDictionaries/loadDictionaries.cjs`),C=require(`./loadDictionaries/loadLocalDictionaries.cjs`),w=require(`./utils/runOnce.cjs`),T=require(`./writeConfiguration/index.cjs`),E=require(`./prepareIntlayer.cjs`),D=require(`./writeContentDeclaration/detectExportedComponentName.cjs`),O=require(`./writeContentDeclaration/detectFormatCommand.cjs`),k=require(`./writeContentDeclaration/transformJSFile.cjs`),A=require(`./utils/getFormatFromExtension.cjs`),j=require(`./writeContentDeclaration/writeJSFile.cjs`),M=require(`./writeContentDeclaration/writeContentDeclaration.cjs`),N=require(`./init/index.cjs`),P=require(`./installSkills/index.cjs`),F=require(`./listGitFiles.cjs`),I=require(`./listProjects.cjs`),L=require(`./transformFiles/extractDictionaryKey.cjs`),R=require(`./transformFiles/transformFiles.cjs`),z=require(`./utils/buildFilesList.cjs`),B=require(`./utils/splitTextByLine.cjs`),V=require(`./utils/getChunk.cjs`),H=require(`./utils/chunkJSON.cjs`),U=require(`./utils/getComponentTransformPattern.cjs`),W=require(`./utils/mergeChunks.cjs`),G=require(`./utils/parallelizeGlobal.cjs`),K=require(`./utils/reduceObjectFormat.cjs`),q=require(`./utils/runParallel/index.cjs`),J=require(`./utils/verifyIdenticObjectFormat.cjs`),Y=require(`./watcher.cjs`);exports.ATTRIBUTES_TO_EXTRACT=R.ATTRIBUTES_TO_EXTRACT,exports.Queue=e.Queue,exports.SKILLS=P.SKILLS,exports.SKILLS_METADATA=P.SKILLS_METADATA,exports.assembleJSON=H.assembleJSON,exports.autoDecorateContent=i.autoDecorateContent,exports.buildAndWatchIntlayer=Y.buildAndWatchIntlayer,exports.buildDictionary=r.buildDictionary,exports.buildFilesList=z.buildFilesList,exports.chunkJSON=H.chunkJSON,exports.cleanOutputDir=s.cleanOutputDir,exports.createDictionaryEntryPoint=h.createDictionaryEntryPoint,exports.createModuleAugmentation=g.createModuleAugmentation,exports.createTypes=_.createTypes,exports.detectExportedComponentName=D.detectExportedComponentName,exports.detectFormatCommand=O.detectFormatCommand,exports.extractDictionaryKey=L.extractDictionaryKey,exports.extractIntlayer=R.extractIntlayer,exports.formatDictionaries=v.formatDictionaries,exports.formatDictionariesOutput=v.formatDictionariesOutput,exports.formatDictionary=v.formatDictionary,exports.formatDictionaryOutput=v.formatDictionaryOutput,exports.formatDistantDictionaries=x.formatDistantDictionaries,exports.formatLocalDictionaries=y.formatLocalDictionaries,exports.formatLocale=n.formatLocale,exports.formatPath=n.formatPath,exports.generateDictionaryListContent=l.generateDictionaryListContent,exports.generateKey=R.generateKey,exports.generateTypeScriptType=_.generateTypeScriptType,exports.getBuiltDictionariesPath=u.getBuiltDictionariesPath,exports.getBuiltDynamicDictionariesPath=d.getBuiltDynamicDictionariesPath,exports.getBuiltFetchDictionariesPath=f.getBuiltFetchDictionariesPath,exports.getBuiltRemoteDictionariesPath=p.getBuiltRemoteDictionariesPath,exports.getBuiltUnmergedDictionariesPath=m.getBuiltUnmergedDictionariesPath,exports.getChunk=V.getChunk,exports.getComponentTransformPattern=U.getComponentTransformPattern,exports.getComponentTransformPatternSync=U.getComponentTransformPatternSync,exports.getExtensionFromFormat=A.getExtensionFromFormat,exports.getFileHash=c.getFileHash,exports.getFormatFromExtension=A.getFormatFromExtension,exports.getGlobalLimiter=G.getGlobalLimiter,exports.getTaskLimiter=G.getTaskLimiter,exports.getTypeName=g.getTypeName,exports.initIntlayer=N.initIntlayer,exports.installSkills=P.installSkills,exports.isCachedConfigurationUpToDate=T.isCachedConfigurationUpToDate,exports.listGitFiles=F.listGitFiles,exports.listGitLines=F.listGitLines,exports.listProjects=I.listProjects,exports.loadContentDeclarations=y.loadContentDeclarations,exports.loadDictionaries=S.loadDictionaries,exports.loadLocalDictionaries=C.loadLocalDictionaries,exports.loadRemoteDictionaries=x.loadRemoteDictionaries,exports.mergeChunks=W.mergeChunks,exports.pLimit=e.pLimit,exports.parallelize=t.parallelize,exports.parallelizeGlobal=G.parallelizeGlobal,exports.prepareIntlayer=E.prepareIntlayer,exports.processContentDeclaration=o.processContentDeclaration,exports.reconstructFromSingleChunk=H.reconstructFromSingleChunk,exports.reduceObjectFormat=K.reduceObjectFormat,exports.resolveObjectPromises=a.resolveObjectPromises,exports.runOnce=w.runOnce,exports.runParallel=q.runParallel,exports.shouldExtract=R.shouldExtract,exports.sortAlphabetically=b.sortAlphabetically,exports.splitTextByLines=B.splitTextByLines,exports.transformFiles=R.transformFiles,exports.transformJSFile=k.transformJSFile,exports.verifyIdenticObjectFormat=J.verifyIdenticObjectFormat,exports.watch=Y.watch,exports.writeConfiguration=T.writeConfiguration,exports.writeContentDeclaration=M.writeContentDeclaration,exports.writeJSFile=j.writeJSFile;
|
package/dist/cjs/utils/index.cjs
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
Object.defineProperty(exports,Symbol.toStringTag,{value:`Module`});const e=require(`./pLimit.cjs`),t=require(`./parallelize.cjs`),n=require(`./formatter.cjs`),r=require(`./autoDecorateContent.cjs`),i=require(`./resolveObjectPromises.cjs`),a=require(`./getFileHash.cjs`),o=require(`./sortAlphabetically.cjs`),s=require(`./runOnce.cjs`),c=require(`./getFormatFromExtension.cjs`),l=require(`./buildFilesList.cjs`),u=require(`./splitTextByLine.cjs`),d=require(`./getChunk.cjs`),f=require(`./chunkJSON.cjs`),p=require(`./getComponentTransformPattern.cjs`),m=require(`./
|
|
1
|
+
Object.defineProperty(exports,Symbol.toStringTag,{value:`Module`});const e=require(`./pLimit.cjs`),t=require(`./parallelize.cjs`),n=require(`./formatter.cjs`),r=require(`./autoDecorateContent.cjs`),i=require(`./resolveObjectPromises.cjs`),a=require(`./getFileHash.cjs`),o=require(`./sortAlphabetically.cjs`),s=require(`./runOnce.cjs`),c=require(`./getFormatFromExtension.cjs`),l=require(`./buildFilesList.cjs`),u=require(`./splitTextByLine.cjs`),d=require(`./getChunk.cjs`),f=require(`./chunkJSON.cjs`),p=require(`./getComponentTransformPattern.cjs`),m=require(`./mergeChunks.cjs`),h=require(`./parallelizeGlobal.cjs`),g=require(`./reduceObjectFormat.cjs`),_=require(`./runParallel/index.cjs`),v=require(`./verifyIdenticObjectFormat.cjs`);exports.Queue=e.Queue,exports.assembleJSON=f.assembleJSON,exports.autoDecorateContent=r.autoDecorateContent,exports.buildFilesList=l.buildFilesList,exports.chunkJSON=f.chunkJSON,exports.formatLocale=n.formatLocale,exports.formatPath=n.formatPath,exports.getChunk=d.getChunk,exports.getComponentTransformPattern=p.getComponentTransformPattern,exports.getComponentTransformPatternSync=p.getComponentTransformPatternSync,exports.getExtensionFromFormat=c.getExtensionFromFormat,exports.getFileHash=a.getFileHash,exports.getFormatFromExtension=c.getFormatFromExtension,exports.getGlobalLimiter=h.getGlobalLimiter,exports.getTaskLimiter=h.getTaskLimiter,exports.mergeChunks=m.mergeChunks,exports.pLimit=e.pLimit,exports.parallelize=t.parallelize,exports.parallelizeGlobal=h.parallelizeGlobal,exports.reconstructFromSingleChunk=f.reconstructFromSingleChunk,exports.reduceObjectFormat=g.reduceObjectFormat,exports.resolveObjectPromises=i.resolveObjectPromises,exports.runOnce=s.runOnce,exports.runParallel=_.runParallel,exports.sortAlphabetically=o.sortAlphabetically,exports.splitTextByLines=u.splitTextByLines,exports.verifyIdenticObjectFormat=v.verifyIdenticObjectFormat;
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
Object.defineProperty(exports,Symbol.toStringTag,{value:`Module`});const e=e=>{if(e.length===0)return{};let n=e[0];for(let r=1;r<e.length;r++)n=t(n,e[r]);return n},t=(e,n)=>{if(e==null)return n;if(n==null)return e;if(Array.isArray(e)&&Array.isArray(n)){let r=Math.max(e.length,n.length),i=[];for(let a=0;a<r;a++)i[a]=t(e[a],n[a]);return i}if(typeof e==`object`&&typeof n==`object`){let r={...e};for(let e of Object.keys(n))r[e]=t(r[e],n[e]);return r}return e};exports.mergeChunks=e;
|
|
2
|
+
//# sourceMappingURL=mergeChunks.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"mergeChunks.cjs","names":[],"sources":["../../../src/utils/mergeChunks.ts"],"sourcesContent":["export const mergeChunks = (chunks: any[]): any => {\n if (chunks.length === 0) return {};\n\n let result = chunks[0];\n\n for (let i = 1; i < chunks.length; i++) {\n result = customChunkMerge(result, chunks[i]);\n }\n\n return result;\n};\n\nconst customChunkMerge = (dest: any, source: any): any => {\n if (dest === undefined || dest === null) return source;\n if (source === undefined || source === null) return dest;\n\n if (Array.isArray(dest) && Array.isArray(source)) {\n const maxLength = Math.max(dest.length, source.length);\n const result = [];\n for (let i = 0; i < maxLength; i++) {\n result[i] = customChunkMerge(dest[i], source[i]);\n }\n return result;\n }\n\n if (typeof dest === 'object' && typeof source === 'object') {\n const result: any = { ...dest };\n for (const key of Object.keys(source)) {\n result[key] = customChunkMerge(result[key], source[key]);\n }\n return result;\n }\n\n // Primitives: if we are here, both are not null.\n // Since chunks shouldn't overlap, we can return dest.\n return dest;\n};\n"],"mappings":"mEAAA,MAAa,EAAe,GAAuB,CACjD,GAAI,EAAO,SAAW,EAAG,MAAO,EAAE,CAElC,IAAI,EAAS,EAAO,GAEpB,IAAK,IAAI,EAAI,EAAG,EAAI,EAAO,OAAQ,IACjC,EAAS,EAAiB,EAAQ,EAAO,GAAG,CAG9C,OAAO,GAGH,GAAoB,EAAW,IAAqB,CACxD,GAAI,GAA+B,KAAM,OAAO,EAChD,GAAI,GAAmC,KAAM,OAAO,EAEpD,GAAI,MAAM,QAAQ,EAAK,EAAI,MAAM,QAAQ,EAAO,CAAE,CAChD,IAAM,EAAY,KAAK,IAAI,EAAK,OAAQ,EAAO,OAAO,CAChD,EAAS,EAAE,CACjB,IAAK,IAAI,EAAI,EAAG,EAAI,EAAW,IAC7B,EAAO,GAAK,EAAiB,EAAK,GAAI,EAAO,GAAG,CAElD,OAAO,EAGT,GAAI,OAAO,GAAS,UAAY,OAAO,GAAW,SAAU,CAC1D,IAAM,EAAc,CAAE,GAAG,EAAM,CAC/B,IAAK,IAAM,KAAO,OAAO,KAAK,EAAO,CACnC,EAAO,GAAO,EAAiB,EAAO,GAAM,EAAO,GAAK,CAE1D,OAAO,EAKT,OAAO"}
|
package/dist/cjs/watcher.cjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
Object.defineProperty(exports,Symbol.toStringTag,{value:`Module`}),require(`./_virtual/_rolldown/runtime.cjs`);const e=require(`./prepareIntlayer.cjs`),t=require(`./writeContentDeclaration/writeContentDeclaration.cjs`),n=require(`./handleAdditionalContentDeclarationFile.cjs`),r=require(`./handleContentDeclarationFileChange.cjs`),i=require(`./handleContentDeclarationFileMoved.cjs`),a=require(`./handleUnlinkedContentDeclarationFile.cjs`);let o=require(`@intlayer/config/node`),s=require(`node:fs/promises`),c=require(`node:path`),l=require(`@intlayer/config/logger`),u=require(`chokidar`);const d=new Map
|
|
1
|
+
Object.defineProperty(exports,Symbol.toStringTag,{value:`Module`}),require(`./_virtual/_rolldown/runtime.cjs`);const e=require(`./prepareIntlayer.cjs`),t=require(`./writeContentDeclaration/writeContentDeclaration.cjs`),n=require(`./handleAdditionalContentDeclarationFile.cjs`),r=require(`./handleContentDeclarationFileChange.cjs`),i=require(`./handleContentDeclarationFileMoved.cjs`),a=require(`./handleUnlinkedContentDeclarationFile.cjs`);let o=require(`@intlayer/config/node`),s=require(`node:fs/promises`),c=require(`node:path`),l=require(`@intlayer/config/logger`),u=require(`chokidar`);const d=new Map;let f=Promise.resolve();const p=e=>{f=f.then(async()=>{try{await e()}catch(e){console.error(e)}})},m=f=>{let m=f?.configuration??(0,o.getConfiguration)(f?.configOptions),h=(0,l.getAppLogger)(m),{watch:g,watchedFilesPatternWithPath:_,fileExtensions:v}=m.content;return(0,u.watch)(_,{persistent:g,ignoreInitial:!0,awaitWriteFinish:{stabilityThreshold:1e3,pollInterval:100},ignored:[`**/node_modules/**`,`**/dist/**`,`**/build/**`,`**/.intlayer/**`],...f}).on(`add`,async e=>{let r=(0,c.basename)(e),a=!1,o;for(let[e]of d)if((0,c.basename)(e)===r){o=e;break}if(!o&&d.size===1&&(o=d.keys().next().value),o){let t=d.get(o);t&&(clearTimeout(t.timer),d.delete(o)),a=!0,h(`File moved from ${o} to ${e}`)}p(async()=>{if(a&&o)await i.handleContentDeclarationFileMoved(o,e,m);else{if(await(0,s.readFile)(e,`utf-8`)===``){let n=v.map(e=>e.replace(/\./g,`\\.`)).join(`|`);await t.writeContentDeclaration({key:r.replace(RegExp(`(${n})$`),``),content:{},filePath:e},m)}await n.handleAdditionalContentDeclarationFile(e,m)}})}).on(`change`,async e=>p(async()=>r.handleContentDeclarationFileChange(e,m))).on(`unlink`,async e=>{let t=setTimeout(async()=>{d.delete(e),p(async()=>a.handleUnlinkedContentDeclarationFile(e,m))},200);d.set(e,{timer:t,oldPath:e})}).on(`error`,async t=>{h(`Watcher error: ${t}`,{level:`error`}),h(`Restarting watcher`),await e.prepareIntlayer(m)})},h=async t=>{let{skipPrepare:n,...r}=t??{},i=t?.configuration??(0,o.getConfiguration)(t?.configOptions);n||await e.prepareIntlayer(i,{forceRun:!0}),(i.content.watch||t?.persistent)&&((0,l.getAppLogger)(i)(`Watching Intlayer content declarations`),m({...r,configuration:i}))};exports.buildAndWatchIntlayer=h,exports.watch=m;
|
|
2
2
|
//# sourceMappingURL=watcher.cjs.map
|
package/dist/cjs/watcher.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"watcher.cjs","names":["handleContentDeclarationFileMoved","writeContentDeclaration","handleAdditionalContentDeclarationFile","handleContentDeclarationFileChange","handleUnlinkedContentDeclarationFile","prepareIntlayer"],"sources":["../../src/watcher.ts"],"sourcesContent":["import { readFile } from 'node:fs/promises';\nimport { basename } from 'node:path';\nimport { getAppLogger } from '@intlayer/config/logger';\nimport {\n type GetConfigurationOptions,\n getConfiguration,\n} from '@intlayer/config/node';\nimport type { IntlayerConfig } from '@intlayer/types';\n/** @ts-ignore remove error Module '\"chokidar\"' has no exported member 'ChokidarOptions' */\nimport { type ChokidarOptions, watch as chokidarWatch } from 'chokidar';\nimport { handleAdditionalContentDeclarationFile } from './handleAdditionalContentDeclarationFile';\nimport { handleContentDeclarationFileChange } from './handleContentDeclarationFileChange';\nimport { handleContentDeclarationFileMoved } from './handleContentDeclarationFileMoved';\nimport { handleUnlinkedContentDeclarationFile } from './handleUnlinkedContentDeclarationFile';\nimport { prepareIntlayer } from './prepareIntlayer';\nimport { writeContentDeclaration } from './writeContentDeclaration';\n\n// Map to track files that were recently unlinked: oldPath -> { timer, timestamp }\nconst pendingUnlinks = new Map<\n string,\n { timer: NodeJS.Timeout; oldPath: string }\n>();\n\ntype WatchOptions = ChokidarOptions & {\n configuration?: IntlayerConfig;\n configOptions?: GetConfigurationOptions;\n skipPrepare?: boolean;\n};\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const configuration: IntlayerConfig =\n options?.configuration ?? getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const {\n watch: isWatchMode,\n watchedFilesPatternWithPath,\n fileExtensions,\n } = configuration.content;\n\n return chokidarWatch(watchedFilesPatternWithPath, {\n persistent: isWatchMode, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n awaitWriteFinish: {\n stabilityThreshold: 1000,\n pollInterval: 100,\n },\n ignored: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.intlayer/**',\n ],\n ...options,\n })\n .on('add', async (filePath) => {\n const fileName = basename(filePath);\n let isMove = false;\n\n // Check if this Add corresponds to a pending Unlink (Move/Rename detection)\n // Heuristic:\n // - Priority A: Exact basename match (Moved to different folder)\n // - Priority B: Single entry in pendingUnlinks (Renamed file)\n let matchedOldPath: string | undefined;\n\n // Search for basename match\n for (const [oldPath] of pendingUnlinks) {\n if (basename(oldPath) === fileName) {\n matchedOldPath = oldPath;\n break;\n }\n }\n\n // If no basename match, but exactly one file was recently unlinked, assume it's a rename\n if (!matchedOldPath && pendingUnlinks.size === 1) {\n matchedOldPath = pendingUnlinks.keys().next().value;\n }\n\n if (matchedOldPath) {\n // It is a move! Cancel the unlink handler\n const pending = pendingUnlinks.get(matchedOldPath);\n if (pending) {\n clearTimeout(pending.timer);\n pendingUnlinks.delete(matchedOldPath);\n }\n\n isMove = true;\n appLogger(`File moved from ${matchedOldPath} to ${filePath}`);\n\n await handleContentDeclarationFileMoved(\n
|
|
1
|
+
{"version":3,"file":"watcher.cjs","names":["handleContentDeclarationFileMoved","writeContentDeclaration","handleAdditionalContentDeclarationFile","handleContentDeclarationFileChange","handleUnlinkedContentDeclarationFile","prepareIntlayer"],"sources":["../../src/watcher.ts"],"sourcesContent":["import { readFile } from 'node:fs/promises';\nimport { basename } from 'node:path';\nimport { getAppLogger } from '@intlayer/config/logger';\nimport {\n type GetConfigurationOptions,\n getConfiguration,\n} from '@intlayer/config/node';\nimport type { IntlayerConfig } from '@intlayer/types';\n/** @ts-ignore remove error Module '\"chokidar\"' has no exported member 'ChokidarOptions' */\nimport { type ChokidarOptions, watch as chokidarWatch } from 'chokidar';\nimport { handleAdditionalContentDeclarationFile } from './handleAdditionalContentDeclarationFile';\nimport { handleContentDeclarationFileChange } from './handleContentDeclarationFileChange';\nimport { handleContentDeclarationFileMoved } from './handleContentDeclarationFileMoved';\nimport { handleUnlinkedContentDeclarationFile } from './handleUnlinkedContentDeclarationFile';\nimport { prepareIntlayer } from './prepareIntlayer';\nimport { writeContentDeclaration } from './writeContentDeclaration';\n\n// Map to track files that were recently unlinked: oldPath -> { timer, timestamp }\nconst pendingUnlinks = new Map<\n string,\n { timer: NodeJS.Timeout; oldPath: string }\n>();\n\n// Task queue to ensure sequential processing of file events\nlet processingQueue = Promise.resolve();\nconst processEvent = (task: () => Promise<void>) => {\n processingQueue = processingQueue.then(async () => {\n try {\n await task();\n } catch (error) {\n console.error(error);\n }\n });\n};\n\ntype WatchOptions = ChokidarOptions & {\n configuration?: IntlayerConfig;\n configOptions?: GetConfigurationOptions;\n skipPrepare?: boolean;\n};\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const configuration: IntlayerConfig =\n options?.configuration ?? getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const {\n watch: isWatchMode,\n watchedFilesPatternWithPath,\n fileExtensions,\n } = configuration.content;\n\n return chokidarWatch(watchedFilesPatternWithPath, {\n persistent: isWatchMode, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n awaitWriteFinish: {\n stabilityThreshold: 1000,\n pollInterval: 100,\n },\n ignored: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.intlayer/**',\n ],\n ...options,\n })\n .on('add', async (filePath) => {\n const fileName = basename(filePath);\n let isMove = false;\n\n // Check if this Add corresponds to a pending Unlink (Move/Rename detection)\n // Heuristic:\n // - Priority A: Exact basename match (Moved to different folder)\n // - Priority B: Single entry in pendingUnlinks (Renamed file)\n let matchedOldPath: string | undefined;\n\n // Search for basename match\n for (const [oldPath] of pendingUnlinks) {\n if (basename(oldPath) === fileName) {\n matchedOldPath = oldPath;\n break;\n }\n }\n\n // If no basename match, but exactly one file was recently unlinked, assume it's a rename\n if (!matchedOldPath && pendingUnlinks.size === 1) {\n matchedOldPath = pendingUnlinks.keys().next().value;\n }\n\n if (matchedOldPath) {\n // It is a move! Cancel the unlink handler\n const pending = pendingUnlinks.get(matchedOldPath);\n if (pending) {\n clearTimeout(pending.timer);\n pendingUnlinks.delete(matchedOldPath);\n }\n\n isMove = true;\n appLogger(`File moved from ${matchedOldPath} to ${filePath}`);\n }\n\n processEvent(async () => {\n if (isMove && matchedOldPath) {\n await handleContentDeclarationFileMoved(\n matchedOldPath,\n filePath,\n configuration\n );\n } else {\n const fileContent = await readFile(filePath, 'utf-8');\n const isEmpty = fileContent === '';\n\n // Fill template content declaration file if it is empty\n if (isEmpty) {\n const extensionPattern = fileExtensions\n .map((ext) => ext.replace(/\\./g, '\\\\.'))\n .join('|');\n const name = fileName.replace(\n new RegExp(`(${extensionPattern})$`),\n ''\n );\n\n await writeContentDeclaration(\n {\n key: name,\n content: {},\n filePath,\n },\n configuration\n );\n }\n\n await handleAdditionalContentDeclarationFile(filePath, configuration);\n }\n });\n })\n .on('change', async (filePath) =>\n processEvent(async () =>\n handleContentDeclarationFileChange(filePath, configuration)\n )\n )\n .on('unlink', async (filePath) => {\n // Delay unlink processing to see if an 'add' event occurs (indicating a move)\n const timer = setTimeout(async () => {\n // If timer fires, the file was genuinely removed\n pendingUnlinks.delete(filePath);\n processEvent(async () =>\n handleUnlinkedContentDeclarationFile(filePath, configuration)\n );\n }, 200); // 200ms window to catch the 'add' event\n\n pendingUnlinks.set(filePath, { timer, oldPath: filePath });\n })\n .on('error', async (error) => {\n appLogger(`Watcher error: ${error}`, {\n level: 'error',\n });\n\n appLogger('Restarting watcher');\n\n await prepareIntlayer(configuration);\n });\n};\n\nexport const buildAndWatchIntlayer = async (options?: WatchOptions) => {\n const { skipPrepare, ...rest } = options ?? {};\n const configuration =\n options?.configuration ?? getConfiguration(options?.configOptions);\n\n if (!skipPrepare) {\n await prepareIntlayer(configuration, { forceRun: true });\n }\n\n if (configuration.content.watch || options?.persistent) {\n const appLogger = getAppLogger(configuration);\n\n appLogger('Watching Intlayer content declarations');\n watch({ ...rest, configuration });\n }\n};\n"],"mappings":"+kBAkBA,MAAM,EAAiB,IAAI,IAM3B,IAAI,EAAkB,QAAQ,SAAS,CACvC,MAAM,EAAgB,GAA8B,CAClD,EAAkB,EAAgB,KAAK,SAAY,CACjD,GAAI,CACF,MAAM,GAAM,OACL,EAAO,CACd,QAAQ,MAAM,EAAM,GAEtB,EAUS,EAAS,GAA2B,CAC/C,IAAM,EACJ,GAAS,gBAAA,EAAA,EAAA,kBAAkC,GAAS,cAAc,CAC9D,GAAA,EAAA,EAAA,cAAyB,EAAc,CAEvC,CACJ,MAAO,EACP,8BACA,kBACE,EAAc,QAElB,OAAA,EAAA,EAAA,OAAqB,EAA6B,CAChD,WAAY,EACZ,cAAe,GACf,iBAAkB,CAChB,mBAAoB,IACpB,aAAc,IACf,CACD,QAAS,CACP,qBACA,aACA,cACA,kBACD,CACD,GAAG,EACJ,CAAC,CACC,GAAG,MAAO,KAAO,IAAa,CAC7B,IAAM,GAAA,EAAA,EAAA,UAAoB,EAAS,CAC/B,EAAS,GAMT,EAGJ,IAAK,GAAM,CAAC,KAAY,EACtB,IAAA,EAAA,EAAA,UAAa,EAAQ,GAAK,EAAU,CAClC,EAAiB,EACjB,MASJ,GAJI,CAAC,GAAkB,EAAe,OAAS,IAC7C,EAAiB,EAAe,MAAM,CAAC,MAAM,CAAC,OAG5C,EAAgB,CAElB,IAAM,EAAU,EAAe,IAAI,EAAe,CAC9C,IACF,aAAa,EAAQ,MAAM,CAC3B,EAAe,OAAO,EAAe,EAGvC,EAAS,GACT,EAAU,mBAAmB,EAAe,MAAM,IAAW,CAG/D,EAAa,SAAY,CACvB,GAAI,GAAU,EACZ,MAAMA,EAAAA,kCACJ,EACA,EACA,EACD,KACI,CAKL,GAJoB,MAAA,EAAA,EAAA,UAAe,EAAU,QAAQ,GACrB,GAGnB,CACX,IAAM,EAAmB,EACtB,IAAK,GAAQ,EAAI,QAAQ,MAAO,MAAM,CAAC,CACvC,KAAK,IAAI,CAMZ,MAAMC,EAAAA,wBACJ,CACE,IAPS,EAAS,QAChB,OAAO,IAAI,EAAiB,IAAI,CACpC,GACD,CAKG,QAAS,EAAE,CACX,WACD,CACD,EACD,CAGH,MAAMC,EAAAA,uCAAuC,EAAU,EAAc,GAEvE,EACF,CACD,GAAG,SAAU,KAAO,IACnB,EAAa,SACXC,EAAAA,mCAAmC,EAAU,EAAc,CAC5D,CACF,CACA,GAAG,SAAU,KAAO,IAAa,CAEhC,IAAM,EAAQ,WAAW,SAAY,CAEnC,EAAe,OAAO,EAAS,CAC/B,EAAa,SACXC,EAAAA,qCAAqC,EAAU,EAAc,CAC9D,EACA,IAAI,CAEP,EAAe,IAAI,EAAU,CAAE,QAAO,QAAS,EAAU,CAAC,EAC1D,CACD,GAAG,QAAS,KAAO,IAAU,CAC5B,EAAU,kBAAkB,IAAS,CACnC,MAAO,QACR,CAAC,CAEF,EAAU,qBAAqB,CAE/B,MAAMC,EAAAA,gBAAgB,EAAc,EACpC,EAGO,EAAwB,KAAO,IAA2B,CACrE,GAAM,CAAE,cAAa,GAAG,GAAS,GAAW,EAAE,CACxC,EACJ,GAAS,gBAAA,EAAA,EAAA,kBAAkC,GAAS,cAAc,CAE/D,GACH,MAAMA,EAAAA,gBAAgB,EAAe,CAAE,SAAU,GAAM,CAAC,EAGtD,EAAc,QAAQ,OAAS,GAAS,eAG1C,EAAA,EAAA,cAF+B,EAAc,CAEnC,yCAAyC,CACnD,EAAM,CAAE,GAAG,EAAM,gBAAe,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
Object.defineProperty(exports,Symbol.toStringTag,{value:`Module`}),require(`../_virtual/_rolldown/runtime.cjs`);const e=require(`../utils/getFormatFromExtension.cjs`),t=require(`./processContentDeclarationContent.cjs`),n=require(`./transformJSONFile.cjs`),r=require(`./writeJSFile.cjs`);let i=require(`@intlayer/unmerged-dictionaries-entry`),a=require(`node:fs/promises`),o=require(`node:path`),s=require(`@intlayer/core/plugins`),c=require(`node:fs`),l=require(`node:util`);const u=async(n,r,i)=>{let a=await t.processContentDeclarationContent(n),c=a.content;n.locale?c=(0,s.getPerLocaleDictionary)(a,n.locale).content:i&&(c=(0,s.getFilteredLocalesDictionary)(a,i).content);let l={...n,content:c};for await(let e of r.plugins??[])if(e.formatOutput){let t=await e.formatOutput?.({dictionary:l,configuration:r});t&&(l=t)}if(!(l.content&&l.key))return l;let u={key:n.key,id:n.id,title:n.title,description:n.description,tags:n.tags,locale:n.locale,fill:n.fill,filled:n.filled,priority:n.priority,importMode:n.importMode,version:n.version,content:c};return e.getFormatFromExtension(n.filePath?(0,o.extname)(n.filePath):`.json`)===`json`&&l.content&&l.key&&(u={$schema:`https://intlayer.org/schema.json`,...u}),u},d={newDictionariesPath:`intlayer-dictionaries`},f=async(e,t,n)=>{let{content:r}=t,{baseDir:a}=r,{newDictionariesPath:s,localeList:c}={...d,...n},f=(0,o.join)(a,s),m=(0,i.getUnmergedDictionaries)(t)[e.key]?.find(t=>t.localId===e.localId),h=await u(e,t,c);if(m?.filePath){let n=(0,l.isDeepStrictEqual)(m,e),r=(0,o.resolve)(t.content.baseDir,m.filePath);return n?{status:`up-to-date`,path:r}:(await p(r,h,t),{status:`updated`,path:r})}if(e.filePath){let n=(0,o.resolve)(t.content.baseDir,e.filePath);return await p(n,h,t),{status:`created`,path:n}}let g=(0,o.join)(f,`${e.key}.content.json`);return await p(g,h,t),{status:`imported`,path:g}},p=async(e,t,i)=>{await(0,a.mkdir)((0,o.dirname)(e),{recursive:!0});let s=(0,o.extname)(e);if(!i.content.fileExtensions.map(e=>(0,o.extname)(e)).includes(s))throw Error(`Invalid file extension: ${s}, file: ${e}`);if(s===`.json`){await(0,a.writeFile)(
|
|
1
|
+
Object.defineProperty(exports,Symbol.toStringTag,{value:`Module`}),require(`../_virtual/_rolldown/runtime.cjs`);const e=require(`../utils/getFormatFromExtension.cjs`),t=require(`./processContentDeclarationContent.cjs`),n=require(`./transformJSONFile.cjs`),r=require(`./writeJSFile.cjs`);let i=require(`@intlayer/unmerged-dictionaries-entry`),a=require(`node:fs/promises`),o=require(`node:path`),s=require(`@intlayer/core/plugins`),c=require(`node:fs`),l=require(`node:util`);const u=async(n,r,i)=>{let a=await t.processContentDeclarationContent(n),c=a.content;n.locale?c=(0,s.getPerLocaleDictionary)(a,n.locale).content:i&&(c=(0,s.getFilteredLocalesDictionary)(a,i).content);let l={...n,content:c};for await(let e of r.plugins??[])if(e.formatOutput){let t=await e.formatOutput?.({dictionary:l,configuration:r});t&&(l=t)}if(!(l.content&&l.key))return l;let u={key:n.key,id:n.id,title:n.title,description:n.description,tags:n.tags,locale:n.locale,fill:n.fill,filled:n.filled,priority:n.priority,importMode:n.importMode,version:n.version,content:c};return e.getFormatFromExtension(n.filePath?(0,o.extname)(n.filePath):`.json`)===`json`&&l.content&&l.key&&(u={$schema:`https://intlayer.org/schema.json`,...u}),u},d={newDictionariesPath:`intlayer-dictionaries`},f=async(e,t,n)=>{let{content:r}=t,{baseDir:a}=r,{newDictionariesPath:s,localeList:c}={...d,...n},f=(0,o.join)(a,s),m=(0,i.getUnmergedDictionaries)(t)[e.key]?.find(t=>t.localId===e.localId),h=await u(e,t,c);if(m?.filePath){let n=(0,l.isDeepStrictEqual)(m,e),r=(0,o.resolve)(t.content.baseDir,m.filePath);return n?{status:`up-to-date`,path:r}:(await p(r,h,t),{status:`updated`,path:r})}if(e.filePath){let n=(0,o.resolve)(t.content.baseDir,e.filePath);return await p(n,h,t),{status:`created`,path:n}}let g=(0,o.join)(f,`${e.key}.content.json`);return await p(g,h,t),{status:`imported`,path:g}},p=async(e,t,i)=>{await(0,a.mkdir)((0,o.dirname)(e),{recursive:!0});let s=(0,o.extname)(e);if(!i.content.fileExtensions.map(e=>(0,o.extname)(e)).includes(s))throw Error(`Invalid file extension: ${s}, file: ${e}`);if(s===`.json`){let n=JSON.stringify(t,null,2),r=`${e}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;await(0,a.writeFile)(r,`${n}\n`),await(0,a.rename)(r,e);return}if([`.jsonc`,`.json5`].includes(s)){let r=`{}`;if((0,c.existsSync)(e))try{r=await(0,a.readFile)(e,`utf-8`)}catch{}let i=n.transformJSONFile(r,t),o=`${e}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;await(0,a.writeFile)(o,i,`utf-8`),await(0,a.rename)(o,e);return}await r.writeJSFile(e,t,i);try{await(0,a.rm)((0,o.join)(i.system.cacheDir,`intlayer-prepared.lock`),{recursive:!0})}catch{}};exports.writeContentDeclaration=f;
|
|
2
2
|
//# sourceMappingURL=writeContentDeclaration.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"writeContentDeclaration.cjs","names":["processContentDeclarationContent","getFormatFromExtension","transformJSONFile","writeJSFile"],"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import { existsSync } from 'node:fs';\nimport { mkdir, readFile, rm, writeFile } from 'node:fs/promises';\nimport { dirname, extname, join, resolve } from 'node:path';\nimport { isDeepStrictEqual } from 'node:util';\nimport {\n getFilteredLocalesDictionary,\n getPerLocaleDictionary,\n} from '@intlayer/core/plugins';\nimport type {\n Dictionary,\n IntlayerConfig,\n Locale,\n LocalesValues,\n} from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport type { DictionaryStatus } from './dictionaryStatus';\nimport { processContentDeclarationContent } from './processContentDeclarationContent';\nimport { transformJSONFile } from './transformJSONFile';\nimport { writeJSFile } from './writeJSFile';\n\nconst formatContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n localeList?: LocalesValues[]\n) => {\n /**\n * Clean Markdown, Insertion, File, etc. node metadata\n */\n const processedDictionary =\n await processContentDeclarationContent(dictionary);\n\n let content = processedDictionary.content;\n\n /**\n * Filter locales content\n */\n\n if (dictionary.locale) {\n content = getPerLocaleDictionary(\n processedDictionary,\n dictionary.locale\n ).content;\n } else if (localeList) {\n content = getFilteredLocalesDictionary(\n processedDictionary,\n localeList\n ).content;\n }\n\n let pluginFormatResult: any = {\n ...dictionary,\n content,\n } satisfies Dictionary;\n\n /**\n * Format the dictionary with the plugins\n */\n\n for await (const plugin of configuration.plugins ?? []) {\n if (plugin.formatOutput) {\n const formattedResult = await plugin.formatOutput?.({\n dictionary: pluginFormatResult,\n configuration,\n });\n\n if (formattedResult) {\n pluginFormatResult = formattedResult;\n }\n }\n }\n\n const isDictionaryFormat =\n pluginFormatResult.content && pluginFormatResult.key;\n\n if (!isDictionaryFormat) return pluginFormatResult;\n\n let result: Dictionary = {\n key: dictionary.key,\n id: dictionary.id,\n title: dictionary.title,\n description: dictionary.description,\n tags: dictionary.tags,\n locale: dictionary.locale,\n fill: dictionary.fill,\n filled: dictionary.filled,\n priority: dictionary.priority,\n importMode: dictionary.importMode,\n version: dictionary.version,\n content,\n };\n\n /**\n * Add $schema to JSON dictionaries\n */\n const extension = (\n dictionary.filePath ? extname(dictionary.filePath) : '.json'\n ) as Extension;\n const format = getFormatFromExtension(extension);\n\n if (\n format === 'json' &&\n pluginFormatResult.content &&\n pluginFormatResult.key\n ) {\n result = {\n $schema: 'https://intlayer.org/schema.json',\n ...result,\n };\n }\n\n return result;\n};\n\ntype WriteContentDeclarationOptions = {\n newDictionariesPath?: string;\n localeList?: LocalesValues[];\n fallbackLocale?: Locale;\n};\n\nconst defaultOptions = {\n newDictionariesPath: 'intlayer-dictionaries',\n} satisfies WriteContentDeclarationOptions;\n\nexport const writeContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n options?: WriteContentDeclarationOptions\n): Promise<{ status: DictionaryStatus; path: string }> => {\n const { content } = configuration;\n const { baseDir } = content;\n const { newDictionariesPath, localeList } = {\n ...defaultOptions,\n ...options,\n };\n\n const newDictionaryLocationPath = join(baseDir, newDictionariesPath);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(configuration);\n const unmergedDictionaries = unmergedDictionariesRecord[\n dictionary.key\n ] as Dictionary[];\n\n const existingDictionary = unmergedDictionaries?.find(\n (el) => el.localId === dictionary.localId\n );\n\n const formattedContentDeclaration = await formatContentDeclaration(\n dictionary,\n configuration,\n localeList\n );\n\n if (existingDictionary?.filePath) {\n // Compare existing dictionary content with new dictionary content\n const isSameContent = isDeepStrictEqual(existingDictionary, dictionary);\n\n const filePath = resolve(\n configuration.content.baseDir,\n existingDictionary.filePath\n );\n\n // Up to date, nothing to do\n if (isSameContent) {\n return {\n status: 'up-to-date',\n path: filePath,\n };\n }\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'updated', path: filePath };\n }\n\n if (dictionary.filePath) {\n const filePath = resolve(\n configuration.content.baseDir,\n dictionary.filePath\n );\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'created', path: filePath };\n }\n\n // No existing dictionary, write to new location\n const contentDeclarationPath = join(\n newDictionaryLocationPath,\n `${dictionary.key}.content.json`\n );\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration,\n configuration\n );\n\n return {\n status: 'imported',\n path: contentDeclarationPath,\n };\n};\n\nconst writeFileWithDirectories = async (\n absoluteFilePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n // Extract the directory from the file path\n const dir = dirname(absoluteFilePath);\n\n // Create the directory recursively\n await mkdir(dir, { recursive: true });\n\n const extension = extname(absoluteFilePath);\n const acceptedExtensions = configuration.content.fileExtensions.map(\n (extension) => extname(extension)\n );\n\n if (!acceptedExtensions.includes(extension)) {\n throw new Error(\n `Invalid file extension: ${extension}, file: ${absoluteFilePath}`\n );\n }\n\n if (extension === '.json') {\n const jsonDictionary = JSON.stringify(dictionary, null, 2);\n\n // Write the file\n await writeFile(absoluteFilePath, `${jsonDictionary}\\n`); // Add a new line at the end of the file to avoid formatting issues with VSCode\n\n return;\n }\n\n // Handle JSONC, and JSON5 via the AST transformer\n if (['.jsonc', '.json5'].includes(extension)) {\n let fileContent = '{}';\n\n if (existsSync(absoluteFilePath)) {\n try {\n fileContent = await readFile(absoluteFilePath, 'utf-8');\n } catch {\n // ignore read errors, start with empty object\n }\n }\n\n const transformedContent = transformJSONFile(fileContent, dictionary);\n\n // We use standard writeFile because transformedContent is already a string\n await writeFile(absoluteFilePath, transformedContent, 'utf-8');\n return;\n }\n\n await writeJSFile(absoluteFilePath, dictionary, configuration);\n\n // remove the cache as content has changed\n // Will force a new preparation of the intlayer on next build\n try {\n const sentinelPath = join(\n configuration.system.cacheDir,\n 'intlayer-prepared.lock'\n );\n await rm(sentinelPath, { recursive: true });\n } catch {}\n};\n"],"mappings":"2dAwBA,MAAM,EAA2B,MAC/B,EACA,EACA,IACG,CAIH,IAAM,EACJ,MAAMA,EAAAA,iCAAiC,EAAW,CAEhD,EAAU,EAAoB,QAM9B,EAAW,OACb,GAAA,EAAA,EAAA,wBACE,EACA,EAAW,OACZ,CAAC,QACO,IACT,GAAA,EAAA,EAAA,8BACE,EACA,EACD,CAAC,SAGJ,IAAI,EAA0B,CAC5B,GAAG,EACH,UACD,CAMD,UAAW,IAAM,KAAU,EAAc,SAAW,EAAE,CACpD,GAAI,EAAO,aAAc,CACvB,IAAM,EAAkB,MAAM,EAAO,eAAe,CAClD,WAAY,EACZ,gBACD,CAAC,CAEE,IACF,EAAqB,GAQ3B,GAAI,EAFF,EAAmB,SAAW,EAAmB,KAE1B,OAAO,EAEhC,IAAI,EAAqB,CACvB,IAAK,EAAW,IAChB,GAAI,EAAW,GACf,MAAO,EAAW,MAClB,YAAa,EAAW,YACxB,KAAM,EAAW,KACjB,OAAQ,EAAW,OACnB,KAAM,EAAW,KACjB,OAAQ,EAAW,OACnB,SAAU,EAAW,SACrB,WAAY,EAAW,WACvB,QAAS,EAAW,QACpB,UACD,CAqBD,OAbeC,EAAAA,uBAFb,EAAW,UAAA,EAAA,EAAA,SAAmB,EAAW,SAAS,CAAG,QAEP,GAGnC,QACX,EAAmB,SACnB,EAAmB,MAEnB,EAAS,CACP,QAAS,mCACT,GAAG,EACJ,EAGI,GASH,EAAiB,CACrB,oBAAqB,wBACtB,CAEY,EAA0B,MACrC,EACA,EACA,IACwD,CACxD,GAAM,CAAE,WAAY,EACd,CAAE,WAAY,EACd,CAAE,sBAAqB,cAAe,CAC1C,GAAG,EACH,GAAG,EACJ,CAEK,GAAA,EAAA,EAAA,MAAiC,EAAS,EAAoB,CAO9D,GAAA,EAAA,EAAA,yBALqD,EAAc,CAEvE,EAAW,MAGoC,KAC9C,GAAO,EAAG,UAAY,EAAW,QACnC,CAEK,EAA8B,MAAM,EACxC,EACA,EACA,EACD,CAED,GAAI,GAAoB,SAAU,CAEhC,IAAM,GAAA,EAAA,EAAA,mBAAkC,EAAoB,EAAW,CAEjE,GAAA,EAAA,EAAA,SACJ,EAAc,QAAQ,QACtB,EAAmB,SACpB,CAgBD,OAbI,EACK,CACL,OAAQ,aACR,KAAM,EACP,EAGH,MAAM,EACJ,EACA,EACA,EACD,CAEM,CAAE,OAAQ,UAAW,KAAM,EAAU,EAG9C,GAAI,EAAW,SAAU,CACvB,IAAM,GAAA,EAAA,EAAA,SACJ,EAAc,QAAQ,QACtB,EAAW,SACZ,CAQD,OANA,MAAM,EACJ,EACA,EACA,EACD,CAEM,CAAE,OAAQ,UAAW,KAAM,EAAU,CAI9C,IAAM,GAAA,EAAA,EAAA,MACJ,EACA,GAAG,EAAW,IAAI,eACnB,CAQD,OANA,MAAM,EACJ,EACA,EACA,EACD,CAEM,CACL,OAAQ,WACR,KAAM,EACP,EAGG,EAA2B,MAC/B,EACA,EACA,IACkB,CAKlB,MAAA,EAAA,EAAA,QAAA,EAAA,EAAA,SAHoB,EAAiB,CAGpB,CAAE,UAAW,GAAM,CAAC,CAErC,IAAM,GAAA,EAAA,EAAA,SAAoB,EAAiB,CAK3C,GAAI,CAJuB,EAAc,QAAQ,eAAe,IAC7D,IAAA,EAAA,EAAA,SAAsB,EAAU,CAClC,CAEuB,SAAS,EAAU,CACzC,MAAU,MACR,2BAA2B,EAAU,UAAU,IAChD,CAGH,GAAI,IAAc,QAAS,CAIzB,MAAA,EAAA,EAAA,WAAgB,EAAkB,GAHX,KAAK,UAAU,EAAY,KAAM,EAAE,CAGN,IAAI,CAExD,OAIF,GAAI,CAAC,SAAU,SAAS,CAAC,SAAS,EAAU,CAAE,CAC5C,IAAI,EAAc,KAElB,IAAA,EAAA,EAAA,YAAe,EAAiB,CAC9B,GAAI,CACF,EAAc,MAAA,EAAA,EAAA,UAAe,EAAkB,QAAQ,MACjD,EAQV,MAAA,EAAA,EAAA,WAAgB,EAHWC,EAAAA,kBAAkB,EAAa,EAAW,CAGf,QAAQ,CAC9D,OAGF,MAAMC,EAAAA,YAAY,EAAkB,EAAY,EAAc,CAI9D,GAAI,CAKF,MAAA,EAAA,EAAA,KAAA,EAAA,EAAA,MAHE,EAAc,OAAO,SACrB,yBACD,CACsB,CAAE,UAAW,GAAM,CAAC,MACrC"}
|
|
1
|
+
{"version":3,"file":"writeContentDeclaration.cjs","names":["processContentDeclarationContent","getFormatFromExtension","transformJSONFile","writeJSFile"],"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import { existsSync } from 'node:fs';\nimport { mkdir, readFile, rename, rm, writeFile } from 'node:fs/promises';\nimport { dirname, extname, join, resolve } from 'node:path';\nimport { isDeepStrictEqual } from 'node:util';\nimport {\n getFilteredLocalesDictionary,\n getPerLocaleDictionary,\n} from '@intlayer/core/plugins';\nimport type {\n Dictionary,\n IntlayerConfig,\n Locale,\n LocalesValues,\n} from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport type { DictionaryStatus } from './dictionaryStatus';\nimport { processContentDeclarationContent } from './processContentDeclarationContent';\nimport { transformJSONFile } from './transformJSONFile';\nimport { writeJSFile } from './writeJSFile';\n\nconst formatContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n localeList?: LocalesValues[]\n) => {\n /**\n * Clean Markdown, Insertion, File, etc. node metadata\n */\n const processedDictionary =\n await processContentDeclarationContent(dictionary);\n\n let content = processedDictionary.content;\n\n /**\n * Filter locales content\n */\n\n if (dictionary.locale) {\n content = getPerLocaleDictionary(\n processedDictionary,\n dictionary.locale\n ).content;\n } else if (localeList) {\n content = getFilteredLocalesDictionary(\n processedDictionary,\n localeList\n ).content;\n }\n\n let pluginFormatResult: any = {\n ...dictionary,\n content,\n } satisfies Dictionary;\n\n /**\n * Format the dictionary with the plugins\n */\n\n for await (const plugin of configuration.plugins ?? []) {\n if (plugin.formatOutput) {\n const formattedResult = await plugin.formatOutput?.({\n dictionary: pluginFormatResult,\n configuration,\n });\n\n if (formattedResult) {\n pluginFormatResult = formattedResult;\n }\n }\n }\n\n const isDictionaryFormat =\n pluginFormatResult.content && pluginFormatResult.key;\n\n if (!isDictionaryFormat) return pluginFormatResult;\n\n let result: Dictionary = {\n key: dictionary.key,\n id: dictionary.id,\n title: dictionary.title,\n description: dictionary.description,\n tags: dictionary.tags,\n locale: dictionary.locale,\n fill: dictionary.fill,\n filled: dictionary.filled,\n priority: dictionary.priority,\n importMode: dictionary.importMode,\n version: dictionary.version,\n content,\n };\n\n /**\n * Add $schema to JSON dictionaries\n */\n const extension = (\n dictionary.filePath ? extname(dictionary.filePath) : '.json'\n ) as Extension;\n const format = getFormatFromExtension(extension);\n\n if (\n format === 'json' &&\n pluginFormatResult.content &&\n pluginFormatResult.key\n ) {\n result = {\n $schema: 'https://intlayer.org/schema.json',\n ...result,\n };\n }\n\n return result;\n};\n\ntype WriteContentDeclarationOptions = {\n newDictionariesPath?: string;\n localeList?: LocalesValues[];\n fallbackLocale?: Locale;\n};\n\nconst defaultOptions = {\n newDictionariesPath: 'intlayer-dictionaries',\n} satisfies WriteContentDeclarationOptions;\n\nexport const writeContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n options?: WriteContentDeclarationOptions\n): Promise<{ status: DictionaryStatus; path: string }> => {\n const { content } = configuration;\n const { baseDir } = content;\n const { newDictionariesPath, localeList } = {\n ...defaultOptions,\n ...options,\n };\n\n const newDictionaryLocationPath = join(baseDir, newDictionariesPath);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(configuration);\n const unmergedDictionaries = unmergedDictionariesRecord[\n dictionary.key\n ] as Dictionary[];\n\n const existingDictionary = unmergedDictionaries?.find(\n (el) => el.localId === dictionary.localId\n );\n\n const formattedContentDeclaration = await formatContentDeclaration(\n dictionary,\n configuration,\n localeList\n );\n\n if (existingDictionary?.filePath) {\n // Compare existing dictionary content with new dictionary content\n const isSameContent = isDeepStrictEqual(existingDictionary, dictionary);\n\n const filePath = resolve(\n configuration.content.baseDir,\n existingDictionary.filePath\n );\n\n // Up to date, nothing to do\n if (isSameContent) {\n return {\n status: 'up-to-date',\n path: filePath,\n };\n }\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'updated', path: filePath };\n }\n\n if (dictionary.filePath) {\n const filePath = resolve(\n configuration.content.baseDir,\n dictionary.filePath\n );\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'created', path: filePath };\n }\n\n // No existing dictionary, write to new location\n const contentDeclarationPath = join(\n newDictionaryLocationPath,\n `${dictionary.key}.content.json`\n );\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration,\n configuration\n );\n\n return {\n status: 'imported',\n path: contentDeclarationPath,\n };\n};\n\nconst writeFileWithDirectories = async (\n absoluteFilePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n // Extract the directory from the file path\n const dir = dirname(absoluteFilePath);\n\n // Create the directory recursively\n await mkdir(dir, { recursive: true });\n\n const extension = extname(absoluteFilePath);\n const acceptedExtensions = configuration.content.fileExtensions.map(\n (extension) => extname(extension)\n );\n\n if (!acceptedExtensions.includes(extension)) {\n throw new Error(\n `Invalid file extension: ${extension}, file: ${absoluteFilePath}`\n );\n }\n\n if (extension === '.json') {\n const jsonDictionary = JSON.stringify(dictionary, null, 2);\n\n // Write the file\n const tempPath = `${absoluteFilePath}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;\n await writeFile(tempPath, `${jsonDictionary}\\n`); // Add a new line at the end of the file to avoid formatting issues with VSCode\n await rename(tempPath, absoluteFilePath);\n\n return;\n }\n\n // Handle JSONC, and JSON5 via the AST transformer\n if (['.jsonc', '.json5'].includes(extension)) {\n let fileContent = '{}';\n\n if (existsSync(absoluteFilePath)) {\n try {\n fileContent = await readFile(absoluteFilePath, 'utf-8');\n } catch {\n // ignore read errors, start with empty object\n }\n }\n\n const transformedContent = transformJSONFile(fileContent, dictionary);\n\n // We use standard writeFile because transformedContent is already a string\n const tempPath = `${absoluteFilePath}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;\n await writeFile(tempPath, transformedContent, 'utf-8');\n await rename(tempPath, absoluteFilePath);\n return;\n }\n\n await writeJSFile(absoluteFilePath, dictionary, configuration);\n\n // remove the cache as content has changed\n // Will force a new preparation of the intlayer on next build\n try {\n const sentinelPath = join(\n configuration.system.cacheDir,\n 'intlayer-prepared.lock'\n );\n await rm(sentinelPath, { recursive: true });\n } catch {}\n};\n"],"mappings":"2dAwBA,MAAM,EAA2B,MAC/B,EACA,EACA,IACG,CAIH,IAAM,EACJ,MAAMA,EAAAA,iCAAiC,EAAW,CAEhD,EAAU,EAAoB,QAM9B,EAAW,OACb,GAAA,EAAA,EAAA,wBACE,EACA,EAAW,OACZ,CAAC,QACO,IACT,GAAA,EAAA,EAAA,8BACE,EACA,EACD,CAAC,SAGJ,IAAI,EAA0B,CAC5B,GAAG,EACH,UACD,CAMD,UAAW,IAAM,KAAU,EAAc,SAAW,EAAE,CACpD,GAAI,EAAO,aAAc,CACvB,IAAM,EAAkB,MAAM,EAAO,eAAe,CAClD,WAAY,EACZ,gBACD,CAAC,CAEE,IACF,EAAqB,GAQ3B,GAAI,EAFF,EAAmB,SAAW,EAAmB,KAE1B,OAAO,EAEhC,IAAI,EAAqB,CACvB,IAAK,EAAW,IAChB,GAAI,EAAW,GACf,MAAO,EAAW,MAClB,YAAa,EAAW,YACxB,KAAM,EAAW,KACjB,OAAQ,EAAW,OACnB,KAAM,EAAW,KACjB,OAAQ,EAAW,OACnB,SAAU,EAAW,SACrB,WAAY,EAAW,WACvB,QAAS,EAAW,QACpB,UACD,CAqBD,OAbeC,EAAAA,uBAFb,EAAW,UAAA,EAAA,EAAA,SAAmB,EAAW,SAAS,CAAG,QAEP,GAGnC,QACX,EAAmB,SACnB,EAAmB,MAEnB,EAAS,CACP,QAAS,mCACT,GAAG,EACJ,EAGI,GASH,EAAiB,CACrB,oBAAqB,wBACtB,CAEY,EAA0B,MACrC,EACA,EACA,IACwD,CACxD,GAAM,CAAE,WAAY,EACd,CAAE,WAAY,EACd,CAAE,sBAAqB,cAAe,CAC1C,GAAG,EACH,GAAG,EACJ,CAEK,GAAA,EAAA,EAAA,MAAiC,EAAS,EAAoB,CAO9D,GAAA,EAAA,EAAA,yBALqD,EAAc,CAEvE,EAAW,MAGoC,KAC9C,GAAO,EAAG,UAAY,EAAW,QACnC,CAEK,EAA8B,MAAM,EACxC,EACA,EACA,EACD,CAED,GAAI,GAAoB,SAAU,CAEhC,IAAM,GAAA,EAAA,EAAA,mBAAkC,EAAoB,EAAW,CAEjE,GAAA,EAAA,EAAA,SACJ,EAAc,QAAQ,QACtB,EAAmB,SACpB,CAgBD,OAbI,EACK,CACL,OAAQ,aACR,KAAM,EACP,EAGH,MAAM,EACJ,EACA,EACA,EACD,CAEM,CAAE,OAAQ,UAAW,KAAM,EAAU,EAG9C,GAAI,EAAW,SAAU,CACvB,IAAM,GAAA,EAAA,EAAA,SACJ,EAAc,QAAQ,QACtB,EAAW,SACZ,CAQD,OANA,MAAM,EACJ,EACA,EACA,EACD,CAEM,CAAE,OAAQ,UAAW,KAAM,EAAU,CAI9C,IAAM,GAAA,EAAA,EAAA,MACJ,EACA,GAAG,EAAW,IAAI,eACnB,CAQD,OANA,MAAM,EACJ,EACA,EACA,EACD,CAEM,CACL,OAAQ,WACR,KAAM,EACP,EAGG,EAA2B,MAC/B,EACA,EACA,IACkB,CAKlB,MAAA,EAAA,EAAA,QAAA,EAAA,EAAA,SAHoB,EAAiB,CAGpB,CAAE,UAAW,GAAM,CAAC,CAErC,IAAM,GAAA,EAAA,EAAA,SAAoB,EAAiB,CAK3C,GAAI,CAJuB,EAAc,QAAQ,eAAe,IAC7D,IAAA,EAAA,EAAA,SAAsB,EAAU,CAClC,CAEuB,SAAS,EAAU,CACzC,MAAU,MACR,2BAA2B,EAAU,UAAU,IAChD,CAGH,GAAI,IAAc,QAAS,CACzB,IAAM,EAAiB,KAAK,UAAU,EAAY,KAAM,EAAE,CAGpD,EAAW,GAAG,EAAiB,GAAG,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,EAAE,CAAC,MAC1F,MAAA,EAAA,EAAA,WAAgB,EAAU,GAAG,EAAe,IAAI,CAChD,MAAA,EAAA,EAAA,QAAa,EAAU,EAAiB,CAExC,OAIF,GAAI,CAAC,SAAU,SAAS,CAAC,SAAS,EAAU,CAAE,CAC5C,IAAI,EAAc,KAElB,IAAA,EAAA,EAAA,YAAe,EAAiB,CAC9B,GAAI,CACF,EAAc,MAAA,EAAA,EAAA,UAAe,EAAkB,QAAQ,MACjD,EAKV,IAAM,EAAqBC,EAAAA,kBAAkB,EAAa,EAAW,CAG/D,EAAW,GAAG,EAAiB,GAAG,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,EAAE,CAAC,MAC1F,MAAA,EAAA,EAAA,WAAgB,EAAU,EAAoB,QAAQ,CACtD,MAAA,EAAA,EAAA,QAAa,EAAU,EAAiB,CACxC,OAGF,MAAMC,EAAAA,YAAY,EAAkB,EAAY,EAAc,CAI9D,GAAI,CAKF,MAAA,EAAA,EAAA,KAAA,EAAA,EAAA,MAHE,EAAc,OAAO,SACrB,yBACD,CACsB,CAAE,UAAW,GAAM,CAAC,MACrC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
Object.defineProperty(exports,Symbol.toStringTag,{value:`Module`}),require(`../_virtual/_rolldown/runtime.cjs`);const e=require(`./detectFormatCommand.cjs`),t=require(`./transformJSFile.cjs`),n=require(`../utils/getFormatFromExtension.cjs`),r=require(`../getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs`);let i=require(`node:fs/promises`),a=require(`node:path`),o=require(`@intlayer/config/logger`),s=require(`node:fs`),c=require(`node:child_process`);const l=async(l,u,d)=>{let f={...d.dictionary,...u},p=(0,o.getAppLogger)(d);if(!(0,s.existsSync)(l)){let e=n.getFormatFromExtension((0,a.extname)(l));p(`File does not exist, creating it`,{isVerbose:!0})
|
|
1
|
+
Object.defineProperty(exports,Symbol.toStringTag,{value:`Module`}),require(`../_virtual/_rolldown/runtime.cjs`);const e=require(`./detectFormatCommand.cjs`),t=require(`./transformJSFile.cjs`),n=require(`../utils/getFormatFromExtension.cjs`),r=require(`../getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs`);let i=require(`node:fs/promises`),a=require(`node:path`),o=require(`@intlayer/config/logger`),s=require(`node:fs`),c=require(`node:child_process`);const l=async(l,u,d)=>{let f={...d.dictionary,...u},p=(0,o.getAppLogger)(d);if(!(0,s.existsSync)(l)){let e=n.getFormatFromExtension((0,a.extname)(l));p(`File does not exist, creating it`,{isVerbose:!0});let t=await r.getContentDeclarationFileTemplate(f.key,e,Object.fromEntries(Object.entries({id:f.id,locale:f.locale,filled:f.filled,fill:f.fill,description:f.description,title:f.title,tags:f.tags,version:f.version,priority:f.priority,importMode:f.importMode}).filter(([,e])=>e!==void 0))),o=`${l}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;await(0,i.writeFile)(o,t,`utf-8`),await(0,i.rename)(o,l)}let m=await(0,i.readFile)(l,`utf-8`);if(m===``){let e=n.getFormatFromExtension((0,a.extname)(l));m=await r.getContentDeclarationFileTemplate(f.key,e)}let h=await t.transformJSFile(m,u);try{let e=`${l}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;await(0,i.writeFile)(e,h,`utf-8`),await(0,i.rename)(e,l),(0,o.logger)(`Successfully updated ${l}`,{level:`info`,isVerbose:!0})}catch(e){let t=e;throw(0,o.logger)(`Failed to write updated file: ${l}`,{level:`error`}),Error(`Failed to write updated file ${l}: ${t.message}`)}let g=e.detectFormatCommand(d);if(g)try{(0,c.execSync)(g.replace(`{{file}}`,l),{stdio:`inherit`,cwd:d.content.baseDir})}catch(e){console.error(e)}};exports.writeJSFile=l;
|
|
2
2
|
//# sourceMappingURL=writeJSFile.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"writeJSFile.cjs","names":["getFormatFromExtension","getContentDeclarationFileTemplate","transformJSFile","detectFormatCommand"],"sources":["../../../src/writeContentDeclaration/writeJSFile.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { readFile, writeFile } from 'node:fs/promises';\nimport { extname } from 'node:path';\nimport { getAppLogger, logger } from '@intlayer/config/logger';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { getContentDeclarationFileTemplate } from '../getContentDeclarationFileTemplate/getContentDeclarationFileTemplate';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport { detectFormatCommand } from './detectFormatCommand';\nimport { transformJSFile } from './transformJSFile';\n\n/**\n * Updates a JavaScript/TypeScript file based on the provided JSON instructions.\n * It targets a specific dictionary object within the file (identified by its 'key' property)\n * and updates its 'content' entries. Currently, it focuses on modifying arguments\n * of 't' (translation) function calls.\n */\nexport const writeJSFile = async (\n filePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n const mergedDictionary = {\n ...configuration.dictionary,\n ...dictionary,\n };\n\n const appLogger = getAppLogger(configuration);\n\n // Check if the file exist\n if (!existsSync(filePath)) {\n const fileExtension = extname(filePath) as Extension;\n\n const format = getFormatFromExtension(fileExtension);\n\n appLogger('File does not exist, creating it', {\n isVerbose: true,\n });\n const template = await getContentDeclarationFileTemplate(\n mergedDictionary.key,\n format,\n // Filter out undefined values\n Object.fromEntries(\n Object.entries({\n id: mergedDictionary.id,\n locale: mergedDictionary.locale,\n filled: mergedDictionary.filled,\n fill: mergedDictionary.fill,\n description: mergedDictionary.description,\n title: mergedDictionary.title,\n tags: mergedDictionary.tags,\n version: mergedDictionary.version,\n priority: mergedDictionary.priority,\n importMode: mergedDictionary.importMode,\n }).filter(([, value]) => value !== undefined)\n )\n );\n\n await writeFile(
|
|
1
|
+
{"version":3,"file":"writeJSFile.cjs","names":["getFormatFromExtension","getContentDeclarationFileTemplate","transformJSFile","detectFormatCommand"],"sources":["../../../src/writeContentDeclaration/writeJSFile.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { readFile, rename, writeFile } from 'node:fs/promises';\nimport { extname } from 'node:path';\nimport { getAppLogger, logger } from '@intlayer/config/logger';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { getContentDeclarationFileTemplate } from '../getContentDeclarationFileTemplate/getContentDeclarationFileTemplate';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport { detectFormatCommand } from './detectFormatCommand';\nimport { transformJSFile } from './transformJSFile';\n\n/**\n * Updates a JavaScript/TypeScript file based on the provided JSON instructions.\n * It targets a specific dictionary object within the file (identified by its 'key' property)\n * and updates its 'content' entries. Currently, it focuses on modifying arguments\n * of 't' (translation) function calls.\n */\nexport const writeJSFile = async (\n filePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n const mergedDictionary = {\n ...configuration.dictionary,\n ...dictionary,\n };\n\n const appLogger = getAppLogger(configuration);\n\n // Check if the file exist\n if (!existsSync(filePath)) {\n const fileExtension = extname(filePath) as Extension;\n\n const format = getFormatFromExtension(fileExtension);\n\n appLogger('File does not exist, creating it', {\n isVerbose: true,\n });\n const template = await getContentDeclarationFileTemplate(\n mergedDictionary.key,\n format,\n // Filter out undefined values\n Object.fromEntries(\n Object.entries({\n id: mergedDictionary.id,\n locale: mergedDictionary.locale,\n filled: mergedDictionary.filled,\n fill: mergedDictionary.fill,\n description: mergedDictionary.description,\n title: mergedDictionary.title,\n tags: mergedDictionary.tags,\n version: mergedDictionary.version,\n priority: mergedDictionary.priority,\n importMode: mergedDictionary.importMode,\n }).filter(([, value]) => value !== undefined)\n )\n );\n\n const tempPath = `${filePath}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;\n await writeFile(tempPath, template, 'utf-8');\n await rename(tempPath, filePath);\n }\n\n let fileContent = await readFile(filePath, 'utf-8');\n\n if (fileContent === '') {\n const format = getFormatFromExtension(extname(filePath) as Extension);\n\n fileContent = await getContentDeclarationFileTemplate(\n mergedDictionary.key,\n format\n );\n }\n\n const finalCode = await transformJSFile(fileContent, dictionary);\n\n // Write the modified code back to the file\n try {\n const tempPath = `${filePath}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;\n await writeFile(tempPath, finalCode, 'utf-8');\n await rename(tempPath, filePath);\n logger(`Successfully updated ${filePath}`, {\n level: 'info',\n isVerbose: true,\n });\n } catch (error) {\n const err = error as Error;\n logger(`Failed to write updated file: ${filePath}`, {\n level: 'error',\n });\n throw new Error(`Failed to write updated file ${filePath}: ${err.message}`);\n }\n\n const formatCommand = detectFormatCommand(configuration);\n\n if (formatCommand) {\n try {\n execSync(formatCommand.replace('{{file}}', filePath), {\n stdio: 'inherit',\n cwd: configuration.content.baseDir,\n });\n } catch (error) {\n console.error(error);\n }\n }\n};\n"],"mappings":"4dAoBA,MAAa,EAAc,MACzB,EACA,EACA,IACkB,CAClB,IAAM,EAAmB,CACvB,GAAG,EAAc,WACjB,GAAG,EACJ,CAEK,GAAA,EAAA,EAAA,cAAyB,EAAc,CAG7C,GAAI,EAAA,EAAA,EAAA,YAAY,EAAS,CAAE,CAGzB,IAAM,EAASA,EAAAA,wBAAAA,EAAAA,EAAAA,SAFe,EAAS,CAEa,CAEpD,EAAU,mCAAoC,CAC5C,UAAW,GACZ,CAAC,CACF,IAAM,EAAW,MAAMC,EAAAA,kCACrB,EAAiB,IACjB,EAEA,OAAO,YACL,OAAO,QAAQ,CACb,GAAI,EAAiB,GACrB,OAAQ,EAAiB,OACzB,OAAQ,EAAiB,OACzB,KAAM,EAAiB,KACvB,YAAa,EAAiB,YAC9B,MAAO,EAAiB,MACxB,KAAM,EAAiB,KACvB,QAAS,EAAiB,QAC1B,SAAU,EAAiB,SAC3B,WAAY,EAAiB,WAC9B,CAAC,CAAC,QAAQ,EAAG,KAAW,IAAU,IAAA,GAAU,CAC9C,CACF,CAEK,EAAW,GAAG,EAAS,GAAG,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,EAAE,CAAC,MAClF,MAAA,EAAA,EAAA,WAAgB,EAAU,EAAU,QAAQ,CAC5C,MAAA,EAAA,EAAA,QAAa,EAAU,EAAS,CAGlC,IAAI,EAAc,MAAA,EAAA,EAAA,UAAe,EAAU,QAAQ,CAEnD,GAAI,IAAgB,GAAI,CACtB,IAAM,EAASD,EAAAA,wBAAAA,EAAAA,EAAAA,SAA+B,EAAS,CAAc,CAErE,EAAc,MAAMC,EAAAA,kCAClB,EAAiB,IACjB,EACD,CAGH,IAAM,EAAY,MAAMC,EAAAA,gBAAgB,EAAa,EAAW,CAGhE,GAAI,CACF,IAAM,EAAW,GAAG,EAAS,GAAG,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,EAAE,CAAC,MAClF,MAAA,EAAA,EAAA,WAAgB,EAAU,EAAW,QAAQ,CAC7C,MAAA,EAAA,EAAA,QAAa,EAAU,EAAS,EAChC,EAAA,EAAA,QAAO,wBAAwB,IAAY,CACzC,MAAO,OACP,UAAW,GACZ,CAAC,OACK,EAAO,CACd,IAAM,EAAM,EAIZ,MAHA,EAAA,EAAA,QAAO,iCAAiC,IAAY,CAClD,MAAO,QACR,CAAC,CACQ,MAAM,gCAAgC,EAAS,IAAI,EAAI,UAAU,CAG7E,IAAM,EAAgBC,EAAAA,oBAAoB,EAAc,CAExD,GAAI,EACF,GAAI,EACF,EAAA,EAAA,UAAS,EAAc,QAAQ,WAAY,EAAS,CAAE,CACpD,MAAO,UACP,IAAK,EAAc,QAAQ,QAC5B,CAAC,OACK,EAAO,CACd,QAAQ,MAAM,EAAM"}
|
package/dist/esm/index.mjs
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import{Queue as e,pLimit as t}from"./utils/pLimit.mjs";import{parallelize as n}from"./utils/parallelize.mjs";import{formatLocale as r,formatPath as i}from"./utils/formatter.mjs";import{buildDictionary as a}from"./buildIntlayerDictionary/buildIntlayerDictionary.mjs";import{autoDecorateContent as o}from"./utils/autoDecorateContent.mjs";import{resolveObjectPromises as s}from"./utils/resolveObjectPromises.mjs";import{processContentDeclaration as c}from"./buildIntlayerDictionary/processContentDeclaration.mjs";import{cleanOutputDir as l}from"./cleanOutputDir.mjs";import{getFileHash as u}from"./utils/getFileHash.mjs";import{generateDictionaryListContent as d}from"./createDictionaryEntryPoint/generateDictionaryListContent.mjs";import{getBuiltDictionariesPath as f}from"./createDictionaryEntryPoint/getBuiltDictionariesPath.mjs";import{getBuiltDynamicDictionariesPath as p}from"./createDictionaryEntryPoint/getBuiltDynamicDictionariesPath.mjs";import{getBuiltFetchDictionariesPath as m}from"./createDictionaryEntryPoint/getBuiltFetchDictionariesPath.mjs";import{getBuiltRemoteDictionariesPath as h}from"./createDictionaryEntryPoint/getBuiltRemoteDictionariesPath.mjs";import{getBuiltUnmergedDictionariesPath as g}from"./createDictionaryEntryPoint/getBuiltUnmergedDictionariesPath.mjs";import{createDictionaryEntryPoint as _}from"./createDictionaryEntryPoint/createDictionaryEntryPoint.mjs";import{createModuleAugmentation as v,getTypeName as y}from"./createType/createModuleAugmentation.mjs";import{createTypes as b,generateTypeScriptType as x}from"./createType/createType.mjs";import{formatDictionaries as S,formatDictionariesOutput as C,formatDictionary as w,formatDictionaryOutput as T}from"./formatDictionary.mjs";import{formatLocalDictionaries as E,loadContentDeclarations as D}from"./loadDictionaries/loadContentDeclaration.mjs";import{sortAlphabetically as O}from"./utils/sortAlphabetically.mjs";import{formatDistantDictionaries as k,loadRemoteDictionaries as A}from"./loadDictionaries/loadRemoteDictionaries.mjs";import{loadDictionaries as j}from"./loadDictionaries/loadDictionaries.mjs";import{loadLocalDictionaries as M}from"./loadDictionaries/loadLocalDictionaries.mjs";import{runOnce as N}from"./utils/runOnce.mjs";import{isCachedConfigurationUpToDate as P,writeConfiguration as F}from"./writeConfiguration/index.mjs";import{prepareIntlayer as I}from"./prepareIntlayer.mjs";import{detectExportedComponentName as L}from"./writeContentDeclaration/detectExportedComponentName.mjs";import{detectFormatCommand as R}from"./writeContentDeclaration/detectFormatCommand.mjs";import{transformJSFile as z}from"./writeContentDeclaration/transformJSFile.mjs";import{getExtensionFromFormat as B,getFormatFromExtension as V}from"./utils/getFormatFromExtension.mjs";import{writeJSFile as H}from"./writeContentDeclaration/writeJSFile.mjs";import{writeContentDeclaration as U}from"./writeContentDeclaration/writeContentDeclaration.mjs";import{initIntlayer as W}from"./init/index.mjs";import{SKILLS as G,SKILLS_METADATA as K,installSkills as q}from"./installSkills/index.mjs";import{listGitFiles as J,listGitLines as Y}from"./listGitFiles.mjs";import{listProjects as X}from"./listProjects.mjs";import{extractDictionaryKey as Z}from"./transformFiles/extractDictionaryKey.mjs";import{ATTRIBUTES_TO_EXTRACT as Q,extractIntlayer as $,generateKey as ee,shouldExtract as te,transformFiles as ne}from"./transformFiles/transformFiles.mjs";import{buildFilesList as re}from"./utils/buildFilesList.mjs";import{splitTextByLines as ie}from"./utils/splitTextByLine.mjs";import{getChunk as ae}from"./utils/getChunk.mjs";import{assembleJSON as oe,chunkJSON as se,reconstructFromSingleChunk as ce}from"./utils/chunkJSON.mjs";import{getComponentTransformPattern as le,getComponentTransformPatternSync as ue}from"./utils/getComponentTransformPattern.mjs";import{getGlobalLimiter as
|
|
1
|
+
import{Queue as e,pLimit as t}from"./utils/pLimit.mjs";import{parallelize as n}from"./utils/parallelize.mjs";import{formatLocale as r,formatPath as i}from"./utils/formatter.mjs";import{buildDictionary as a}from"./buildIntlayerDictionary/buildIntlayerDictionary.mjs";import{autoDecorateContent as o}from"./utils/autoDecorateContent.mjs";import{resolveObjectPromises as s}from"./utils/resolveObjectPromises.mjs";import{processContentDeclaration as c}from"./buildIntlayerDictionary/processContentDeclaration.mjs";import{cleanOutputDir as l}from"./cleanOutputDir.mjs";import{getFileHash as u}from"./utils/getFileHash.mjs";import{generateDictionaryListContent as d}from"./createDictionaryEntryPoint/generateDictionaryListContent.mjs";import{getBuiltDictionariesPath as f}from"./createDictionaryEntryPoint/getBuiltDictionariesPath.mjs";import{getBuiltDynamicDictionariesPath as p}from"./createDictionaryEntryPoint/getBuiltDynamicDictionariesPath.mjs";import{getBuiltFetchDictionariesPath as m}from"./createDictionaryEntryPoint/getBuiltFetchDictionariesPath.mjs";import{getBuiltRemoteDictionariesPath as h}from"./createDictionaryEntryPoint/getBuiltRemoteDictionariesPath.mjs";import{getBuiltUnmergedDictionariesPath as g}from"./createDictionaryEntryPoint/getBuiltUnmergedDictionariesPath.mjs";import{createDictionaryEntryPoint as _}from"./createDictionaryEntryPoint/createDictionaryEntryPoint.mjs";import{createModuleAugmentation as v,getTypeName as y}from"./createType/createModuleAugmentation.mjs";import{createTypes as b,generateTypeScriptType as x}from"./createType/createType.mjs";import{formatDictionaries as S,formatDictionariesOutput as C,formatDictionary as w,formatDictionaryOutput as T}from"./formatDictionary.mjs";import{formatLocalDictionaries as E,loadContentDeclarations as D}from"./loadDictionaries/loadContentDeclaration.mjs";import{sortAlphabetically as O}from"./utils/sortAlphabetically.mjs";import{formatDistantDictionaries as k,loadRemoteDictionaries as A}from"./loadDictionaries/loadRemoteDictionaries.mjs";import{loadDictionaries as j}from"./loadDictionaries/loadDictionaries.mjs";import{loadLocalDictionaries as M}from"./loadDictionaries/loadLocalDictionaries.mjs";import{runOnce as N}from"./utils/runOnce.mjs";import{isCachedConfigurationUpToDate as P,writeConfiguration as F}from"./writeConfiguration/index.mjs";import{prepareIntlayer as I}from"./prepareIntlayer.mjs";import{detectExportedComponentName as L}from"./writeContentDeclaration/detectExportedComponentName.mjs";import{detectFormatCommand as R}from"./writeContentDeclaration/detectFormatCommand.mjs";import{transformJSFile as z}from"./writeContentDeclaration/transformJSFile.mjs";import{getExtensionFromFormat as B,getFormatFromExtension as V}from"./utils/getFormatFromExtension.mjs";import{writeJSFile as H}from"./writeContentDeclaration/writeJSFile.mjs";import{writeContentDeclaration as U}from"./writeContentDeclaration/writeContentDeclaration.mjs";import{initIntlayer as W}from"./init/index.mjs";import{SKILLS as G,SKILLS_METADATA as K,installSkills as q}from"./installSkills/index.mjs";import{listGitFiles as J,listGitLines as Y}from"./listGitFiles.mjs";import{listProjects as X}from"./listProjects.mjs";import{extractDictionaryKey as Z}from"./transformFiles/extractDictionaryKey.mjs";import{ATTRIBUTES_TO_EXTRACT as Q,extractIntlayer as $,generateKey as ee,shouldExtract as te,transformFiles as ne}from"./transformFiles/transformFiles.mjs";import{buildFilesList as re}from"./utils/buildFilesList.mjs";import{splitTextByLines as ie}from"./utils/splitTextByLine.mjs";import{getChunk as ae}from"./utils/getChunk.mjs";import{assembleJSON as oe,chunkJSON as se,reconstructFromSingleChunk as ce}from"./utils/chunkJSON.mjs";import{getComponentTransformPattern as le,getComponentTransformPatternSync as ue}from"./utils/getComponentTransformPattern.mjs";import{mergeChunks as de}from"./utils/mergeChunks.mjs";import{getGlobalLimiter as fe,getTaskLimiter as pe,parallelizeGlobal as me}from"./utils/parallelizeGlobal.mjs";import{reduceObjectFormat as he}from"./utils/reduceObjectFormat.mjs";import{runParallel as ge}from"./utils/runParallel/index.mjs";import{verifyIdenticObjectFormat as _e}from"./utils/verifyIdenticObjectFormat.mjs";import{buildAndWatchIntlayer as ve,watch as ye}from"./watcher.mjs";export{Q as ATTRIBUTES_TO_EXTRACT,e as Queue,G as SKILLS,K as SKILLS_METADATA,oe as assembleJSON,o as autoDecorateContent,ve as buildAndWatchIntlayer,a as buildDictionary,re as buildFilesList,se as chunkJSON,l as cleanOutputDir,_ as createDictionaryEntryPoint,v as createModuleAugmentation,b as createTypes,L as detectExportedComponentName,R as detectFormatCommand,Z as extractDictionaryKey,$ as extractIntlayer,S as formatDictionaries,C as formatDictionariesOutput,w as formatDictionary,T as formatDictionaryOutput,k as formatDistantDictionaries,E as formatLocalDictionaries,r as formatLocale,i as formatPath,d as generateDictionaryListContent,ee as generateKey,x as generateTypeScriptType,f as getBuiltDictionariesPath,p as getBuiltDynamicDictionariesPath,m as getBuiltFetchDictionariesPath,h as getBuiltRemoteDictionariesPath,g as getBuiltUnmergedDictionariesPath,ae as getChunk,le as getComponentTransformPattern,ue as getComponentTransformPatternSync,B as getExtensionFromFormat,u as getFileHash,V as getFormatFromExtension,fe as getGlobalLimiter,pe as getTaskLimiter,y as getTypeName,W as initIntlayer,q as installSkills,P as isCachedConfigurationUpToDate,J as listGitFiles,Y as listGitLines,X as listProjects,D as loadContentDeclarations,j as loadDictionaries,M as loadLocalDictionaries,A as loadRemoteDictionaries,de as mergeChunks,t as pLimit,n as parallelize,me as parallelizeGlobal,I as prepareIntlayer,c as processContentDeclaration,ce as reconstructFromSingleChunk,he as reduceObjectFormat,s as resolveObjectPromises,N as runOnce,ge as runParallel,te as shouldExtract,O as sortAlphabetically,ie as splitTextByLines,ne as transformFiles,z as transformJSFile,_e as verifyIdenticObjectFormat,ye as watch,F as writeConfiguration,U as writeContentDeclaration,H as writeJSFile};
|
package/dist/esm/utils/index.mjs
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import{Queue as e,pLimit as t}from"./pLimit.mjs";import{parallelize as n}from"./parallelize.mjs";import{formatLocale as r,formatPath as i}from"./formatter.mjs";import{autoDecorateContent as a}from"./autoDecorateContent.mjs";import{resolveObjectPromises as o}from"./resolveObjectPromises.mjs";import{getFileHash as s}from"./getFileHash.mjs";import{sortAlphabetically as c}from"./sortAlphabetically.mjs";import{runOnce as l}from"./runOnce.mjs";import{getExtensionFromFormat as u,getFormatFromExtension as d}from"./getFormatFromExtension.mjs";import{buildFilesList as f}from"./buildFilesList.mjs";import{splitTextByLines as p}from"./splitTextByLine.mjs";import{getChunk as m}from"./getChunk.mjs";import{assembleJSON as h,chunkJSON as g,reconstructFromSingleChunk as _}from"./chunkJSON.mjs";import{getComponentTransformPattern as v,getComponentTransformPatternSync as y}from"./getComponentTransformPattern.mjs";import{getGlobalLimiter as
|
|
1
|
+
import{Queue as e,pLimit as t}from"./pLimit.mjs";import{parallelize as n}from"./parallelize.mjs";import{formatLocale as r,formatPath as i}from"./formatter.mjs";import{autoDecorateContent as a}from"./autoDecorateContent.mjs";import{resolveObjectPromises as o}from"./resolveObjectPromises.mjs";import{getFileHash as s}from"./getFileHash.mjs";import{sortAlphabetically as c}from"./sortAlphabetically.mjs";import{runOnce as l}from"./runOnce.mjs";import{getExtensionFromFormat as u,getFormatFromExtension as d}from"./getFormatFromExtension.mjs";import{buildFilesList as f}from"./buildFilesList.mjs";import{splitTextByLines as p}from"./splitTextByLine.mjs";import{getChunk as m}from"./getChunk.mjs";import{assembleJSON as h,chunkJSON as g,reconstructFromSingleChunk as _}from"./chunkJSON.mjs";import{getComponentTransformPattern as v,getComponentTransformPatternSync as y}from"./getComponentTransformPattern.mjs";import{mergeChunks as b}from"./mergeChunks.mjs";import{getGlobalLimiter as x,getTaskLimiter as S,parallelizeGlobal as C}from"./parallelizeGlobal.mjs";import{reduceObjectFormat as w}from"./reduceObjectFormat.mjs";import{runParallel as T}from"./runParallel/index.mjs";import{verifyIdenticObjectFormat as E}from"./verifyIdenticObjectFormat.mjs";export{e as Queue,h as assembleJSON,a as autoDecorateContent,f as buildFilesList,g as chunkJSON,r as formatLocale,i as formatPath,m as getChunk,v as getComponentTransformPattern,y as getComponentTransformPatternSync,u as getExtensionFromFormat,s as getFileHash,d as getFormatFromExtension,x as getGlobalLimiter,S as getTaskLimiter,b as mergeChunks,t as pLimit,n as parallelize,C as parallelizeGlobal,_ as reconstructFromSingleChunk,w as reduceObjectFormat,o as resolveObjectPromises,l as runOnce,T as runParallel,c as sortAlphabetically,p as splitTextByLines,E as verifyIdenticObjectFormat};
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
const e=e=>{if(e.length===0)return{};let n=e[0];for(let r=1;r<e.length;r++)n=t(n,e[r]);return n},t=(e,n)=>{if(e==null)return n;if(n==null)return e;if(Array.isArray(e)&&Array.isArray(n)){let r=Math.max(e.length,n.length),i=[];for(let a=0;a<r;a++)i[a]=t(e[a],n[a]);return i}if(typeof e==`object`&&typeof n==`object`){let r={...e};for(let e of Object.keys(n))r[e]=t(r[e],n[e]);return r}return e};export{e as mergeChunks};
|
|
2
|
+
//# sourceMappingURL=mergeChunks.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"mergeChunks.mjs","names":[],"sources":["../../../src/utils/mergeChunks.ts"],"sourcesContent":["export const mergeChunks = (chunks: any[]): any => {\n if (chunks.length === 0) return {};\n\n let result = chunks[0];\n\n for (let i = 1; i < chunks.length; i++) {\n result = customChunkMerge(result, chunks[i]);\n }\n\n return result;\n};\n\nconst customChunkMerge = (dest: any, source: any): any => {\n if (dest === undefined || dest === null) return source;\n if (source === undefined || source === null) return dest;\n\n if (Array.isArray(dest) && Array.isArray(source)) {\n const maxLength = Math.max(dest.length, source.length);\n const result = [];\n for (let i = 0; i < maxLength; i++) {\n result[i] = customChunkMerge(dest[i], source[i]);\n }\n return result;\n }\n\n if (typeof dest === 'object' && typeof source === 'object') {\n const result: any = { ...dest };\n for (const key of Object.keys(source)) {\n result[key] = customChunkMerge(result[key], source[key]);\n }\n return result;\n }\n\n // Primitives: if we are here, both are not null.\n // Since chunks shouldn't overlap, we can return dest.\n return dest;\n};\n"],"mappings":"AAAA,MAAa,EAAe,GAAuB,CACjD,GAAI,EAAO,SAAW,EAAG,MAAO,EAAE,CAElC,IAAI,EAAS,EAAO,GAEpB,IAAK,IAAI,EAAI,EAAG,EAAI,EAAO,OAAQ,IACjC,EAAS,EAAiB,EAAQ,EAAO,GAAG,CAG9C,OAAO,GAGH,GAAoB,EAAW,IAAqB,CACxD,GAAI,GAA+B,KAAM,OAAO,EAChD,GAAI,GAAmC,KAAM,OAAO,EAEpD,GAAI,MAAM,QAAQ,EAAK,EAAI,MAAM,QAAQ,EAAO,CAAE,CAChD,IAAM,EAAY,KAAK,IAAI,EAAK,OAAQ,EAAO,OAAO,CAChD,EAAS,EAAE,CACjB,IAAK,IAAI,EAAI,EAAG,EAAI,EAAW,IAC7B,EAAO,GAAK,EAAiB,EAAK,GAAI,EAAO,GAAG,CAElD,OAAO,EAGT,GAAI,OAAO,GAAS,UAAY,OAAO,GAAW,SAAU,CAC1D,IAAM,EAAc,CAAE,GAAG,EAAM,CAC/B,IAAK,IAAM,KAAO,OAAO,KAAK,EAAO,CACnC,EAAO,GAAO,EAAiB,EAAO,GAAM,EAAO,GAAK,CAE1D,OAAO,EAKT,OAAO"}
|
package/dist/esm/watcher.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{prepareIntlayer as e}from"./prepareIntlayer.mjs";import{writeContentDeclaration as t}from"./writeContentDeclaration/writeContentDeclaration.mjs";import{handleAdditionalContentDeclarationFile as n}from"./handleAdditionalContentDeclarationFile.mjs";import{handleContentDeclarationFileChange as r}from"./handleContentDeclarationFileChange.mjs";import{handleContentDeclarationFileMoved as i}from"./handleContentDeclarationFileMoved.mjs";import{handleUnlinkedContentDeclarationFile as a}from"./handleUnlinkedContentDeclarationFile.mjs";import{getConfiguration as o}from"@intlayer/config/node";import{readFile as s}from"node:fs/promises";import{basename as c}from"node:path";import{getAppLogger as l}from"@intlayer/config/logger";import{watch as u}from"chokidar";const d=new Map
|
|
1
|
+
import{prepareIntlayer as e}from"./prepareIntlayer.mjs";import{writeContentDeclaration as t}from"./writeContentDeclaration/writeContentDeclaration.mjs";import{handleAdditionalContentDeclarationFile as n}from"./handleAdditionalContentDeclarationFile.mjs";import{handleContentDeclarationFileChange as r}from"./handleContentDeclarationFileChange.mjs";import{handleContentDeclarationFileMoved as i}from"./handleContentDeclarationFileMoved.mjs";import{handleUnlinkedContentDeclarationFile as a}from"./handleUnlinkedContentDeclarationFile.mjs";import{getConfiguration as o}from"@intlayer/config/node";import{readFile as s}from"node:fs/promises";import{basename as c}from"node:path";import{getAppLogger as l}from"@intlayer/config/logger";import{watch as u}from"chokidar";const d=new Map;let f=Promise.resolve();const p=e=>{f=f.then(async()=>{try{await e()}catch(e){console.error(e)}})},m=f=>{let m=f?.configuration??o(f?.configOptions),h=l(m),{watch:g,watchedFilesPatternWithPath:_,fileExtensions:v}=m.content;return u(_,{persistent:g,ignoreInitial:!0,awaitWriteFinish:{stabilityThreshold:1e3,pollInterval:100},ignored:[`**/node_modules/**`,`**/dist/**`,`**/build/**`,`**/.intlayer/**`],...f}).on(`add`,async e=>{let r=c(e),a=!1,o;for(let[e]of d)if(c(e)===r){o=e;break}if(!o&&d.size===1&&(o=d.keys().next().value),o){let t=d.get(o);t&&(clearTimeout(t.timer),d.delete(o)),a=!0,h(`File moved from ${o} to ${e}`)}p(async()=>{if(a&&o)await i(o,e,m);else{if(await s(e,`utf-8`)===``){let n=v.map(e=>e.replace(/\./g,`\\.`)).join(`|`);await t({key:r.replace(RegExp(`(${n})$`),``),content:{},filePath:e},m)}await n(e,m)}})}).on(`change`,async e=>p(async()=>r(e,m))).on(`unlink`,async e=>{let t=setTimeout(async()=>{d.delete(e),p(async()=>a(e,m))},200);d.set(e,{timer:t,oldPath:e})}).on(`error`,async t=>{h(`Watcher error: ${t}`,{level:`error`}),h(`Restarting watcher`),await e(m)})},h=async t=>{let{skipPrepare:n,...r}=t??{},i=t?.configuration??o(t?.configOptions);n||await e(i,{forceRun:!0}),(i.content.watch||t?.persistent)&&(l(i)(`Watching Intlayer content declarations`),m({...r,configuration:i}))};export{h as buildAndWatchIntlayer,m as watch};
|
|
2
2
|
//# sourceMappingURL=watcher.mjs.map
|
package/dist/esm/watcher.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"watcher.mjs","names":["chokidarWatch"],"sources":["../../src/watcher.ts"],"sourcesContent":["import { readFile } from 'node:fs/promises';\nimport { basename } from 'node:path';\nimport { getAppLogger } from '@intlayer/config/logger';\nimport {\n type GetConfigurationOptions,\n getConfiguration,\n} from '@intlayer/config/node';\nimport type { IntlayerConfig } from '@intlayer/types';\n/** @ts-ignore remove error Module '\"chokidar\"' has no exported member 'ChokidarOptions' */\nimport { type ChokidarOptions, watch as chokidarWatch } from 'chokidar';\nimport { handleAdditionalContentDeclarationFile } from './handleAdditionalContentDeclarationFile';\nimport { handleContentDeclarationFileChange } from './handleContentDeclarationFileChange';\nimport { handleContentDeclarationFileMoved } from './handleContentDeclarationFileMoved';\nimport { handleUnlinkedContentDeclarationFile } from './handleUnlinkedContentDeclarationFile';\nimport { prepareIntlayer } from './prepareIntlayer';\nimport { writeContentDeclaration } from './writeContentDeclaration';\n\n// Map to track files that were recently unlinked: oldPath -> { timer, timestamp }\nconst pendingUnlinks = new Map<\n string,\n { timer: NodeJS.Timeout; oldPath: string }\n>();\n\ntype WatchOptions = ChokidarOptions & {\n configuration?: IntlayerConfig;\n configOptions?: GetConfigurationOptions;\n skipPrepare?: boolean;\n};\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const configuration: IntlayerConfig =\n options?.configuration ?? getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const {\n watch: isWatchMode,\n watchedFilesPatternWithPath,\n fileExtensions,\n } = configuration.content;\n\n return chokidarWatch(watchedFilesPatternWithPath, {\n persistent: isWatchMode, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n awaitWriteFinish: {\n stabilityThreshold: 1000,\n pollInterval: 100,\n },\n ignored: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.intlayer/**',\n ],\n ...options,\n })\n .on('add', async (filePath) => {\n const fileName = basename(filePath);\n let isMove = false;\n\n // Check if this Add corresponds to a pending Unlink (Move/Rename detection)\n // Heuristic:\n // - Priority A: Exact basename match (Moved to different folder)\n // - Priority B: Single entry in pendingUnlinks (Renamed file)\n let matchedOldPath: string | undefined;\n\n // Search for basename match\n for (const [oldPath] of pendingUnlinks) {\n if (basename(oldPath) === fileName) {\n matchedOldPath = oldPath;\n break;\n }\n }\n\n // If no basename match, but exactly one file was recently unlinked, assume it's a rename\n if (!matchedOldPath && pendingUnlinks.size === 1) {\n matchedOldPath = pendingUnlinks.keys().next().value;\n }\n\n if (matchedOldPath) {\n // It is a move! Cancel the unlink handler\n const pending = pendingUnlinks.get(matchedOldPath);\n if (pending) {\n clearTimeout(pending.timer);\n pendingUnlinks.delete(matchedOldPath);\n }\n\n isMove = true;\n appLogger(`File moved from ${matchedOldPath} to ${filePath}`);\n\n await handleContentDeclarationFileMoved(\n
|
|
1
|
+
{"version":3,"file":"watcher.mjs","names":["chokidarWatch"],"sources":["../../src/watcher.ts"],"sourcesContent":["import { readFile } from 'node:fs/promises';\nimport { basename } from 'node:path';\nimport { getAppLogger } from '@intlayer/config/logger';\nimport {\n type GetConfigurationOptions,\n getConfiguration,\n} from '@intlayer/config/node';\nimport type { IntlayerConfig } from '@intlayer/types';\n/** @ts-ignore remove error Module '\"chokidar\"' has no exported member 'ChokidarOptions' */\nimport { type ChokidarOptions, watch as chokidarWatch } from 'chokidar';\nimport { handleAdditionalContentDeclarationFile } from './handleAdditionalContentDeclarationFile';\nimport { handleContentDeclarationFileChange } from './handleContentDeclarationFileChange';\nimport { handleContentDeclarationFileMoved } from './handleContentDeclarationFileMoved';\nimport { handleUnlinkedContentDeclarationFile } from './handleUnlinkedContentDeclarationFile';\nimport { prepareIntlayer } from './prepareIntlayer';\nimport { writeContentDeclaration } from './writeContentDeclaration';\n\n// Map to track files that were recently unlinked: oldPath -> { timer, timestamp }\nconst pendingUnlinks = new Map<\n string,\n { timer: NodeJS.Timeout; oldPath: string }\n>();\n\n// Task queue to ensure sequential processing of file events\nlet processingQueue = Promise.resolve();\nconst processEvent = (task: () => Promise<void>) => {\n processingQueue = processingQueue.then(async () => {\n try {\n await task();\n } catch (error) {\n console.error(error);\n }\n });\n};\n\ntype WatchOptions = ChokidarOptions & {\n configuration?: IntlayerConfig;\n configOptions?: GetConfigurationOptions;\n skipPrepare?: boolean;\n};\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const configuration: IntlayerConfig =\n options?.configuration ?? getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const {\n watch: isWatchMode,\n watchedFilesPatternWithPath,\n fileExtensions,\n } = configuration.content;\n\n return chokidarWatch(watchedFilesPatternWithPath, {\n persistent: isWatchMode, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n awaitWriteFinish: {\n stabilityThreshold: 1000,\n pollInterval: 100,\n },\n ignored: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.intlayer/**',\n ],\n ...options,\n })\n .on('add', async (filePath) => {\n const fileName = basename(filePath);\n let isMove = false;\n\n // Check if this Add corresponds to a pending Unlink (Move/Rename detection)\n // Heuristic:\n // - Priority A: Exact basename match (Moved to different folder)\n // - Priority B: Single entry in pendingUnlinks (Renamed file)\n let matchedOldPath: string | undefined;\n\n // Search for basename match\n for (const [oldPath] of pendingUnlinks) {\n if (basename(oldPath) === fileName) {\n matchedOldPath = oldPath;\n break;\n }\n }\n\n // If no basename match, but exactly one file was recently unlinked, assume it's a rename\n if (!matchedOldPath && pendingUnlinks.size === 1) {\n matchedOldPath = pendingUnlinks.keys().next().value;\n }\n\n if (matchedOldPath) {\n // It is a move! Cancel the unlink handler\n const pending = pendingUnlinks.get(matchedOldPath);\n if (pending) {\n clearTimeout(pending.timer);\n pendingUnlinks.delete(matchedOldPath);\n }\n\n isMove = true;\n appLogger(`File moved from ${matchedOldPath} to ${filePath}`);\n }\n\n processEvent(async () => {\n if (isMove && matchedOldPath) {\n await handleContentDeclarationFileMoved(\n matchedOldPath,\n filePath,\n configuration\n );\n } else {\n const fileContent = await readFile(filePath, 'utf-8');\n const isEmpty = fileContent === '';\n\n // Fill template content declaration file if it is empty\n if (isEmpty) {\n const extensionPattern = fileExtensions\n .map((ext) => ext.replace(/\\./g, '\\\\.'))\n .join('|');\n const name = fileName.replace(\n new RegExp(`(${extensionPattern})$`),\n ''\n );\n\n await writeContentDeclaration(\n {\n key: name,\n content: {},\n filePath,\n },\n configuration\n );\n }\n\n await handleAdditionalContentDeclarationFile(filePath, configuration);\n }\n });\n })\n .on('change', async (filePath) =>\n processEvent(async () =>\n handleContentDeclarationFileChange(filePath, configuration)\n )\n )\n .on('unlink', async (filePath) => {\n // Delay unlink processing to see if an 'add' event occurs (indicating a move)\n const timer = setTimeout(async () => {\n // If timer fires, the file was genuinely removed\n pendingUnlinks.delete(filePath);\n processEvent(async () =>\n handleUnlinkedContentDeclarationFile(filePath, configuration)\n );\n }, 200); // 200ms window to catch the 'add' event\n\n pendingUnlinks.set(filePath, { timer, oldPath: filePath });\n })\n .on('error', async (error) => {\n appLogger(`Watcher error: ${error}`, {\n level: 'error',\n });\n\n appLogger('Restarting watcher');\n\n await prepareIntlayer(configuration);\n });\n};\n\nexport const buildAndWatchIntlayer = async (options?: WatchOptions) => {\n const { skipPrepare, ...rest } = options ?? {};\n const configuration =\n options?.configuration ?? getConfiguration(options?.configOptions);\n\n if (!skipPrepare) {\n await prepareIntlayer(configuration, { forceRun: true });\n }\n\n if (configuration.content.watch || options?.persistent) {\n const appLogger = getAppLogger(configuration);\n\n appLogger('Watching Intlayer content declarations');\n watch({ ...rest, configuration });\n }\n};\n"],"mappings":"4vBAkBA,MAAM,EAAiB,IAAI,IAM3B,IAAI,EAAkB,QAAQ,SAAS,CACvC,MAAM,EAAgB,GAA8B,CAClD,EAAkB,EAAgB,KAAK,SAAY,CACjD,GAAI,CACF,MAAM,GAAM,OACL,EAAO,CACd,QAAQ,MAAM,EAAM,GAEtB,EAUS,EAAS,GAA2B,CAC/C,IAAM,EACJ,GAAS,eAAiB,EAAiB,GAAS,cAAc,CAC9D,EAAY,EAAa,EAAc,CAEvC,CACJ,MAAO,EACP,8BACA,kBACE,EAAc,QAElB,OAAOA,EAAc,EAA6B,CAChD,WAAY,EACZ,cAAe,GACf,iBAAkB,CAChB,mBAAoB,IACpB,aAAc,IACf,CACD,QAAS,CACP,qBACA,aACA,cACA,kBACD,CACD,GAAG,EACJ,CAAC,CACC,GAAG,MAAO,KAAO,IAAa,CAC7B,IAAM,EAAW,EAAS,EAAS,CAC/B,EAAS,GAMT,EAGJ,IAAK,GAAM,CAAC,KAAY,EACtB,GAAI,EAAS,EAAQ,GAAK,EAAU,CAClC,EAAiB,EACjB,MASJ,GAJI,CAAC,GAAkB,EAAe,OAAS,IAC7C,EAAiB,EAAe,MAAM,CAAC,MAAM,CAAC,OAG5C,EAAgB,CAElB,IAAM,EAAU,EAAe,IAAI,EAAe,CAC9C,IACF,aAAa,EAAQ,MAAM,CAC3B,EAAe,OAAO,EAAe,EAGvC,EAAS,GACT,EAAU,mBAAmB,EAAe,MAAM,IAAW,CAG/D,EAAa,SAAY,CACvB,GAAI,GAAU,EACZ,MAAM,EACJ,EACA,EACA,EACD,KACI,CAKL,GAJoB,MAAM,EAAS,EAAU,QAAQ,GACrB,GAGnB,CACX,IAAM,EAAmB,EACtB,IAAK,GAAQ,EAAI,QAAQ,MAAO,MAAM,CAAC,CACvC,KAAK,IAAI,CAMZ,MAAM,EACJ,CACE,IAPS,EAAS,QAChB,OAAO,IAAI,EAAiB,IAAI,CACpC,GACD,CAKG,QAAS,EAAE,CACX,WACD,CACD,EACD,CAGH,MAAM,EAAuC,EAAU,EAAc,GAEvE,EACF,CACD,GAAG,SAAU,KAAO,IACnB,EAAa,SACX,EAAmC,EAAU,EAAc,CAC5D,CACF,CACA,GAAG,SAAU,KAAO,IAAa,CAEhC,IAAM,EAAQ,WAAW,SAAY,CAEnC,EAAe,OAAO,EAAS,CAC/B,EAAa,SACX,EAAqC,EAAU,EAAc,CAC9D,EACA,IAAI,CAEP,EAAe,IAAI,EAAU,CAAE,QAAO,QAAS,EAAU,CAAC,EAC1D,CACD,GAAG,QAAS,KAAO,IAAU,CAC5B,EAAU,kBAAkB,IAAS,CACnC,MAAO,QACR,CAAC,CAEF,EAAU,qBAAqB,CAE/B,MAAM,EAAgB,EAAc,EACpC,EAGO,EAAwB,KAAO,IAA2B,CACrE,GAAM,CAAE,cAAa,GAAG,GAAS,GAAW,EAAE,CACxC,EACJ,GAAS,eAAiB,EAAiB,GAAS,cAAc,CAE/D,GACH,MAAM,EAAgB,EAAe,CAAE,SAAU,GAAM,CAAC,EAGtD,EAAc,QAAQ,OAAS,GAAS,cACxB,EAAa,EAAc,CAEnC,yCAAyC,CACnD,EAAM,CAAE,GAAG,EAAM,gBAAe,CAAC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{getFormatFromExtension as e}from"../utils/getFormatFromExtension.mjs";import{processContentDeclarationContent as t}from"./processContentDeclarationContent.mjs";import{transformJSONFile as n}from"./transformJSONFile.mjs";import{writeJSFile as r}from"./writeJSFile.mjs";import{getUnmergedDictionaries as i}from"@intlayer/unmerged-dictionaries-entry";import{mkdir as a,readFile as o,
|
|
1
|
+
import{getFormatFromExtension as e}from"../utils/getFormatFromExtension.mjs";import{processContentDeclarationContent as t}from"./processContentDeclarationContent.mjs";import{transformJSONFile as n}from"./transformJSONFile.mjs";import{writeJSFile as r}from"./writeJSFile.mjs";import{getUnmergedDictionaries as i}from"@intlayer/unmerged-dictionaries-entry";import{mkdir as a,readFile as o,rename as s,rm as c,writeFile as l}from"node:fs/promises";import{dirname as u,extname as d,join as f,resolve as p}from"node:path";import{getFilteredLocalesDictionary as m,getPerLocaleDictionary as h}from"@intlayer/core/plugins";import{existsSync as g}from"node:fs";import{isDeepStrictEqual as _}from"node:util";const v=async(n,r,i)=>{let a=await t(n),o=a.content;n.locale?o=h(a,n.locale).content:i&&(o=m(a,i).content);let s={...n,content:o};for await(let e of r.plugins??[])if(e.formatOutput){let t=await e.formatOutput?.({dictionary:s,configuration:r});t&&(s=t)}if(!(s.content&&s.key))return s;let c={key:n.key,id:n.id,title:n.title,description:n.description,tags:n.tags,locale:n.locale,fill:n.fill,filled:n.filled,priority:n.priority,importMode:n.importMode,version:n.version,content:o};return e(n.filePath?d(n.filePath):`.json`)===`json`&&s.content&&s.key&&(c={$schema:`https://intlayer.org/schema.json`,...c}),c},y={newDictionariesPath:`intlayer-dictionaries`},b=async(e,t,n)=>{let{content:r}=t,{baseDir:a}=r,{newDictionariesPath:o,localeList:s}={...y,...n},c=f(a,o),l=i(t)[e.key]?.find(t=>t.localId===e.localId),u=await v(e,t,s);if(l?.filePath){let n=_(l,e),r=p(t.content.baseDir,l.filePath);return n?{status:`up-to-date`,path:r}:(await x(r,u,t),{status:`updated`,path:r})}if(e.filePath){let n=p(t.content.baseDir,e.filePath);return await x(n,u,t),{status:`created`,path:n}}let d=f(c,`${e.key}.content.json`);return await x(d,u,t),{status:`imported`,path:d}},x=async(e,t,i)=>{await a(u(e),{recursive:!0});let p=d(e);if(!i.content.fileExtensions.map(e=>d(e)).includes(p))throw Error(`Invalid file extension: ${p}, file: ${e}`);if(p===`.json`){let n=JSON.stringify(t,null,2),r=`${e}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;await l(r,`${n}\n`),await s(r,e);return}if([`.jsonc`,`.json5`].includes(p)){let r=`{}`;if(g(e))try{r=await o(e,`utf-8`)}catch{}let i=n(r,t),a=`${e}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;await l(a,i,`utf-8`),await s(a,e);return}await r(e,t,i);try{await c(f(i.system.cacheDir,`intlayer-prepared.lock`),{recursive:!0})}catch{}};export{b as writeContentDeclaration};
|
|
2
2
|
//# sourceMappingURL=writeContentDeclaration.mjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"writeContentDeclaration.mjs","names":[],"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import { existsSync } from 'node:fs';\nimport { mkdir, readFile, rm, writeFile } from 'node:fs/promises';\nimport { dirname, extname, join, resolve } from 'node:path';\nimport { isDeepStrictEqual } from 'node:util';\nimport {\n getFilteredLocalesDictionary,\n getPerLocaleDictionary,\n} from '@intlayer/core/plugins';\nimport type {\n Dictionary,\n IntlayerConfig,\n Locale,\n LocalesValues,\n} from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport type { DictionaryStatus } from './dictionaryStatus';\nimport { processContentDeclarationContent } from './processContentDeclarationContent';\nimport { transformJSONFile } from './transformJSONFile';\nimport { writeJSFile } from './writeJSFile';\n\nconst formatContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n localeList?: LocalesValues[]\n) => {\n /**\n * Clean Markdown, Insertion, File, etc. node metadata\n */\n const processedDictionary =\n await processContentDeclarationContent(dictionary);\n\n let content = processedDictionary.content;\n\n /**\n * Filter locales content\n */\n\n if (dictionary.locale) {\n content = getPerLocaleDictionary(\n processedDictionary,\n dictionary.locale\n ).content;\n } else if (localeList) {\n content = getFilteredLocalesDictionary(\n processedDictionary,\n localeList\n ).content;\n }\n\n let pluginFormatResult: any = {\n ...dictionary,\n content,\n } satisfies Dictionary;\n\n /**\n * Format the dictionary with the plugins\n */\n\n for await (const plugin of configuration.plugins ?? []) {\n if (plugin.formatOutput) {\n const formattedResult = await plugin.formatOutput?.({\n dictionary: pluginFormatResult,\n configuration,\n });\n\n if (formattedResult) {\n pluginFormatResult = formattedResult;\n }\n }\n }\n\n const isDictionaryFormat =\n pluginFormatResult.content && pluginFormatResult.key;\n\n if (!isDictionaryFormat) return pluginFormatResult;\n\n let result: Dictionary = {\n key: dictionary.key,\n id: dictionary.id,\n title: dictionary.title,\n description: dictionary.description,\n tags: dictionary.tags,\n locale: dictionary.locale,\n fill: dictionary.fill,\n filled: dictionary.filled,\n priority: dictionary.priority,\n importMode: dictionary.importMode,\n version: dictionary.version,\n content,\n };\n\n /**\n * Add $schema to JSON dictionaries\n */\n const extension = (\n dictionary.filePath ? extname(dictionary.filePath) : '.json'\n ) as Extension;\n const format = getFormatFromExtension(extension);\n\n if (\n format === 'json' &&\n pluginFormatResult.content &&\n pluginFormatResult.key\n ) {\n result = {\n $schema: 'https://intlayer.org/schema.json',\n ...result,\n };\n }\n\n return result;\n};\n\ntype WriteContentDeclarationOptions = {\n newDictionariesPath?: string;\n localeList?: LocalesValues[];\n fallbackLocale?: Locale;\n};\n\nconst defaultOptions = {\n newDictionariesPath: 'intlayer-dictionaries',\n} satisfies WriteContentDeclarationOptions;\n\nexport const writeContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n options?: WriteContentDeclarationOptions\n): Promise<{ status: DictionaryStatus; path: string }> => {\n const { content } = configuration;\n const { baseDir } = content;\n const { newDictionariesPath, localeList } = {\n ...defaultOptions,\n ...options,\n };\n\n const newDictionaryLocationPath = join(baseDir, newDictionariesPath);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(configuration);\n const unmergedDictionaries = unmergedDictionariesRecord[\n dictionary.key\n ] as Dictionary[];\n\n const existingDictionary = unmergedDictionaries?.find(\n (el) => el.localId === dictionary.localId\n );\n\n const formattedContentDeclaration = await formatContentDeclaration(\n dictionary,\n configuration,\n localeList\n );\n\n if (existingDictionary?.filePath) {\n // Compare existing dictionary content with new dictionary content\n const isSameContent = isDeepStrictEqual(existingDictionary, dictionary);\n\n const filePath = resolve(\n configuration.content.baseDir,\n existingDictionary.filePath\n );\n\n // Up to date, nothing to do\n if (isSameContent) {\n return {\n status: 'up-to-date',\n path: filePath,\n };\n }\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'updated', path: filePath };\n }\n\n if (dictionary.filePath) {\n const filePath = resolve(\n configuration.content.baseDir,\n dictionary.filePath\n );\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'created', path: filePath };\n }\n\n // No existing dictionary, write to new location\n const contentDeclarationPath = join(\n newDictionaryLocationPath,\n `${dictionary.key}.content.json`\n );\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration,\n configuration\n );\n\n return {\n status: 'imported',\n path: contentDeclarationPath,\n };\n};\n\nconst writeFileWithDirectories = async (\n absoluteFilePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n // Extract the directory from the file path\n const dir = dirname(absoluteFilePath);\n\n // Create the directory recursively\n await mkdir(dir, { recursive: true });\n\n const extension = extname(absoluteFilePath);\n const acceptedExtensions = configuration.content.fileExtensions.map(\n (extension) => extname(extension)\n );\n\n if (!acceptedExtensions.includes(extension)) {\n throw new Error(\n `Invalid file extension: ${extension}, file: ${absoluteFilePath}`\n );\n }\n\n if (extension === '.json') {\n const jsonDictionary = JSON.stringify(dictionary, null, 2);\n\n // Write the file\n await writeFile(absoluteFilePath, `${jsonDictionary}\\n`); // Add a new line at the end of the file to avoid formatting issues with VSCode\n\n return;\n }\n\n // Handle JSONC, and JSON5 via the AST transformer\n if (['.jsonc', '.json5'].includes(extension)) {\n let fileContent = '{}';\n\n if (existsSync(absoluteFilePath)) {\n try {\n fileContent = await readFile(absoluteFilePath, 'utf-8');\n } catch {\n // ignore read errors, start with empty object\n }\n }\n\n const transformedContent = transformJSONFile(fileContent, dictionary);\n\n // We use standard writeFile because transformedContent is already a string\n await writeFile(absoluteFilePath, transformedContent, 'utf-8');\n return;\n }\n\n await writeJSFile(absoluteFilePath, dictionary, configuration);\n\n // remove the cache as content has changed\n // Will force a new preparation of the intlayer on next build\n try {\n const sentinelPath = join(\n configuration.system.cacheDir,\n 'intlayer-prepared.lock'\n );\n await rm(sentinelPath, { recursive: true });\n } catch {}\n};\n"],"mappings":"8qBAwBA,MAAM,EAA2B,MAC/B,EACA,EACA,IACG,CAIH,IAAM,EACJ,MAAM,EAAiC,EAAW,CAEhD,EAAU,EAAoB,QAM9B,EAAW,OACb,EAAU,EACR,EACA,EAAW,OACZ,CAAC,QACO,IACT,EAAU,EACR,EACA,EACD,CAAC,SAGJ,IAAI,EAA0B,CAC5B,GAAG,EACH,UACD,CAMD,UAAW,IAAM,KAAU,EAAc,SAAW,EAAE,CACpD,GAAI,EAAO,aAAc,CACvB,IAAM,EAAkB,MAAM,EAAO,eAAe,CAClD,WAAY,EACZ,gBACD,CAAC,CAEE,IACF,EAAqB,GAQ3B,GAAI,EAFF,EAAmB,SAAW,EAAmB,KAE1B,OAAO,EAEhC,IAAI,EAAqB,CACvB,IAAK,EAAW,IAChB,GAAI,EAAW,GACf,MAAO,EAAW,MAClB,YAAa,EAAW,YACxB,KAAM,EAAW,KACjB,OAAQ,EAAW,OACnB,KAAM,EAAW,KACjB,OAAQ,EAAW,OACnB,SAAU,EAAW,SACrB,WAAY,EAAW,WACvB,QAAS,EAAW,QACpB,UACD,CAqBD,OAbe,EAFb,EAAW,SAAW,EAAQ,EAAW,SAAS,CAAG,QAEP,GAGnC,QACX,EAAmB,SACnB,EAAmB,MAEnB,EAAS,CACP,QAAS,mCACT,GAAG,EACJ,EAGI,GASH,EAAiB,CACrB,oBAAqB,wBACtB,CAEY,EAA0B,MACrC,EACA,EACA,IACwD,CACxD,GAAM,CAAE,WAAY,EACd,CAAE,WAAY,EACd,CAAE,sBAAqB,cAAe,CAC1C,GAAG,EACH,GAAG,EACJ,CAEK,EAA4B,EAAK,EAAS,EAAoB,CAO9D,EAL6B,EAAwB,EAAc,CAEvE,EAAW,MAGoC,KAC9C,GAAO,EAAG,UAAY,EAAW,QACnC,CAEK,EAA8B,MAAM,EACxC,EACA,EACA,EACD,CAED,GAAI,GAAoB,SAAU,CAEhC,IAAM,EAAgB,EAAkB,EAAoB,EAAW,CAEjE,EAAW,EACf,EAAc,QAAQ,QACtB,EAAmB,SACpB,CAgBD,OAbI,EACK,CACL,OAAQ,aACR,KAAM,EACP,EAGH,MAAM,EACJ,EACA,EACA,EACD,CAEM,CAAE,OAAQ,UAAW,KAAM,EAAU,EAG9C,GAAI,EAAW,SAAU,CACvB,IAAM,EAAW,EACf,EAAc,QAAQ,QACtB,EAAW,SACZ,CAQD,OANA,MAAM,EACJ,EACA,EACA,EACD,CAEM,CAAE,OAAQ,UAAW,KAAM,EAAU,CAI9C,IAAM,EAAyB,EAC7B,EACA,GAAG,EAAW,IAAI,eACnB,CAQD,OANA,MAAM,EACJ,EACA,EACA,EACD,CAEM,CACL,OAAQ,WACR,KAAM,EACP,EAGG,EAA2B,MAC/B,EACA,EACA,IACkB,CAKlB,MAAM,EAHM,EAAQ,EAAiB,CAGpB,CAAE,UAAW,GAAM,CAAC,CAErC,IAAM,EAAY,EAAQ,EAAiB,CAK3C,GAAI,CAJuB,EAAc,QAAQ,eAAe,IAC7D,GAAc,EAAQ,EAAU,CAClC,CAEuB,SAAS,EAAU,CACzC,MAAU,MACR,2BAA2B,EAAU,UAAU,IAChD,CAGH,GAAI,IAAc,QAAS,CAIzB,MAAM,EAAU,EAAkB,GAHX,KAAK,UAAU,EAAY,KAAM,EAAE,CAGN,IAAI,CAExD,OAIF,GAAI,CAAC,SAAU,SAAS,CAAC,SAAS,EAAU,CAAE,CAC5C,IAAI,EAAc,KAElB,GAAI,EAAW,EAAiB,CAC9B,GAAI,CACF,EAAc,MAAM,EAAS,EAAkB,QAAQ,MACjD,EAQV,MAAM,EAAU,EAHW,EAAkB,EAAa,EAAW,CAGf,QAAQ,CAC9D,OAGF,MAAM,EAAY,EAAkB,EAAY,EAAc,CAI9D,GAAI,CAKF,MAAM,EAJe,EACnB,EAAc,OAAO,SACrB,yBACD,CACsB,CAAE,UAAW,GAAM,CAAC,MACrC"}
|
|
1
|
+
{"version":3,"file":"writeContentDeclaration.mjs","names":[],"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import { existsSync } from 'node:fs';\nimport { mkdir, readFile, rename, rm, writeFile } from 'node:fs/promises';\nimport { dirname, extname, join, resolve } from 'node:path';\nimport { isDeepStrictEqual } from 'node:util';\nimport {\n getFilteredLocalesDictionary,\n getPerLocaleDictionary,\n} from '@intlayer/core/plugins';\nimport type {\n Dictionary,\n IntlayerConfig,\n Locale,\n LocalesValues,\n} from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport type { DictionaryStatus } from './dictionaryStatus';\nimport { processContentDeclarationContent } from './processContentDeclarationContent';\nimport { transformJSONFile } from './transformJSONFile';\nimport { writeJSFile } from './writeJSFile';\n\nconst formatContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n localeList?: LocalesValues[]\n) => {\n /**\n * Clean Markdown, Insertion, File, etc. node metadata\n */\n const processedDictionary =\n await processContentDeclarationContent(dictionary);\n\n let content = processedDictionary.content;\n\n /**\n * Filter locales content\n */\n\n if (dictionary.locale) {\n content = getPerLocaleDictionary(\n processedDictionary,\n dictionary.locale\n ).content;\n } else if (localeList) {\n content = getFilteredLocalesDictionary(\n processedDictionary,\n localeList\n ).content;\n }\n\n let pluginFormatResult: any = {\n ...dictionary,\n content,\n } satisfies Dictionary;\n\n /**\n * Format the dictionary with the plugins\n */\n\n for await (const plugin of configuration.plugins ?? []) {\n if (plugin.formatOutput) {\n const formattedResult = await plugin.formatOutput?.({\n dictionary: pluginFormatResult,\n configuration,\n });\n\n if (formattedResult) {\n pluginFormatResult = formattedResult;\n }\n }\n }\n\n const isDictionaryFormat =\n pluginFormatResult.content && pluginFormatResult.key;\n\n if (!isDictionaryFormat) return pluginFormatResult;\n\n let result: Dictionary = {\n key: dictionary.key,\n id: dictionary.id,\n title: dictionary.title,\n description: dictionary.description,\n tags: dictionary.tags,\n locale: dictionary.locale,\n fill: dictionary.fill,\n filled: dictionary.filled,\n priority: dictionary.priority,\n importMode: dictionary.importMode,\n version: dictionary.version,\n content,\n };\n\n /**\n * Add $schema to JSON dictionaries\n */\n const extension = (\n dictionary.filePath ? extname(dictionary.filePath) : '.json'\n ) as Extension;\n const format = getFormatFromExtension(extension);\n\n if (\n format === 'json' &&\n pluginFormatResult.content &&\n pluginFormatResult.key\n ) {\n result = {\n $schema: 'https://intlayer.org/schema.json',\n ...result,\n };\n }\n\n return result;\n};\n\ntype WriteContentDeclarationOptions = {\n newDictionariesPath?: string;\n localeList?: LocalesValues[];\n fallbackLocale?: Locale;\n};\n\nconst defaultOptions = {\n newDictionariesPath: 'intlayer-dictionaries',\n} satisfies WriteContentDeclarationOptions;\n\nexport const writeContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n options?: WriteContentDeclarationOptions\n): Promise<{ status: DictionaryStatus; path: string }> => {\n const { content } = configuration;\n const { baseDir } = content;\n const { newDictionariesPath, localeList } = {\n ...defaultOptions,\n ...options,\n };\n\n const newDictionaryLocationPath = join(baseDir, newDictionariesPath);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(configuration);\n const unmergedDictionaries = unmergedDictionariesRecord[\n dictionary.key\n ] as Dictionary[];\n\n const existingDictionary = unmergedDictionaries?.find(\n (el) => el.localId === dictionary.localId\n );\n\n const formattedContentDeclaration = await formatContentDeclaration(\n dictionary,\n configuration,\n localeList\n );\n\n if (existingDictionary?.filePath) {\n // Compare existing dictionary content with new dictionary content\n const isSameContent = isDeepStrictEqual(existingDictionary, dictionary);\n\n const filePath = resolve(\n configuration.content.baseDir,\n existingDictionary.filePath\n );\n\n // Up to date, nothing to do\n if (isSameContent) {\n return {\n status: 'up-to-date',\n path: filePath,\n };\n }\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'updated', path: filePath };\n }\n\n if (dictionary.filePath) {\n const filePath = resolve(\n configuration.content.baseDir,\n dictionary.filePath\n );\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'created', path: filePath };\n }\n\n // No existing dictionary, write to new location\n const contentDeclarationPath = join(\n newDictionaryLocationPath,\n `${dictionary.key}.content.json`\n );\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration,\n configuration\n );\n\n return {\n status: 'imported',\n path: contentDeclarationPath,\n };\n};\n\nconst writeFileWithDirectories = async (\n absoluteFilePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n // Extract the directory from the file path\n const dir = dirname(absoluteFilePath);\n\n // Create the directory recursively\n await mkdir(dir, { recursive: true });\n\n const extension = extname(absoluteFilePath);\n const acceptedExtensions = configuration.content.fileExtensions.map(\n (extension) => extname(extension)\n );\n\n if (!acceptedExtensions.includes(extension)) {\n throw new Error(\n `Invalid file extension: ${extension}, file: ${absoluteFilePath}`\n );\n }\n\n if (extension === '.json') {\n const jsonDictionary = JSON.stringify(dictionary, null, 2);\n\n // Write the file\n const tempPath = `${absoluteFilePath}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;\n await writeFile(tempPath, `${jsonDictionary}\\n`); // Add a new line at the end of the file to avoid formatting issues with VSCode\n await rename(tempPath, absoluteFilePath);\n\n return;\n }\n\n // Handle JSONC, and JSON5 via the AST transformer\n if (['.jsonc', '.json5'].includes(extension)) {\n let fileContent = '{}';\n\n if (existsSync(absoluteFilePath)) {\n try {\n fileContent = await readFile(absoluteFilePath, 'utf-8');\n } catch {\n // ignore read errors, start with empty object\n }\n }\n\n const transformedContent = transformJSONFile(fileContent, dictionary);\n\n // We use standard writeFile because transformedContent is already a string\n const tempPath = `${absoluteFilePath}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;\n await writeFile(tempPath, transformedContent, 'utf-8');\n await rename(tempPath, absoluteFilePath);\n return;\n }\n\n await writeJSFile(absoluteFilePath, dictionary, configuration);\n\n // remove the cache as content has changed\n // Will force a new preparation of the intlayer on next build\n try {\n const sentinelPath = join(\n configuration.system.cacheDir,\n 'intlayer-prepared.lock'\n );\n await rm(sentinelPath, { recursive: true });\n } catch {}\n};\n"],"mappings":"0rBAwBA,MAAM,EAA2B,MAC/B,EACA,EACA,IACG,CAIH,IAAM,EACJ,MAAM,EAAiC,EAAW,CAEhD,EAAU,EAAoB,QAM9B,EAAW,OACb,EAAU,EACR,EACA,EAAW,OACZ,CAAC,QACO,IACT,EAAU,EACR,EACA,EACD,CAAC,SAGJ,IAAI,EAA0B,CAC5B,GAAG,EACH,UACD,CAMD,UAAW,IAAM,KAAU,EAAc,SAAW,EAAE,CACpD,GAAI,EAAO,aAAc,CACvB,IAAM,EAAkB,MAAM,EAAO,eAAe,CAClD,WAAY,EACZ,gBACD,CAAC,CAEE,IACF,EAAqB,GAQ3B,GAAI,EAFF,EAAmB,SAAW,EAAmB,KAE1B,OAAO,EAEhC,IAAI,EAAqB,CACvB,IAAK,EAAW,IAChB,GAAI,EAAW,GACf,MAAO,EAAW,MAClB,YAAa,EAAW,YACxB,KAAM,EAAW,KACjB,OAAQ,EAAW,OACnB,KAAM,EAAW,KACjB,OAAQ,EAAW,OACnB,SAAU,EAAW,SACrB,WAAY,EAAW,WACvB,QAAS,EAAW,QACpB,UACD,CAqBD,OAbe,EAFb,EAAW,SAAW,EAAQ,EAAW,SAAS,CAAG,QAEP,GAGnC,QACX,EAAmB,SACnB,EAAmB,MAEnB,EAAS,CACP,QAAS,mCACT,GAAG,EACJ,EAGI,GASH,EAAiB,CACrB,oBAAqB,wBACtB,CAEY,EAA0B,MACrC,EACA,EACA,IACwD,CACxD,GAAM,CAAE,WAAY,EACd,CAAE,WAAY,EACd,CAAE,sBAAqB,cAAe,CAC1C,GAAG,EACH,GAAG,EACJ,CAEK,EAA4B,EAAK,EAAS,EAAoB,CAO9D,EAL6B,EAAwB,EAAc,CAEvE,EAAW,MAGoC,KAC9C,GAAO,EAAG,UAAY,EAAW,QACnC,CAEK,EAA8B,MAAM,EACxC,EACA,EACA,EACD,CAED,GAAI,GAAoB,SAAU,CAEhC,IAAM,EAAgB,EAAkB,EAAoB,EAAW,CAEjE,EAAW,EACf,EAAc,QAAQ,QACtB,EAAmB,SACpB,CAgBD,OAbI,EACK,CACL,OAAQ,aACR,KAAM,EACP,EAGH,MAAM,EACJ,EACA,EACA,EACD,CAEM,CAAE,OAAQ,UAAW,KAAM,EAAU,EAG9C,GAAI,EAAW,SAAU,CACvB,IAAM,EAAW,EACf,EAAc,QAAQ,QACtB,EAAW,SACZ,CAQD,OANA,MAAM,EACJ,EACA,EACA,EACD,CAEM,CAAE,OAAQ,UAAW,KAAM,EAAU,CAI9C,IAAM,EAAyB,EAC7B,EACA,GAAG,EAAW,IAAI,eACnB,CAQD,OANA,MAAM,EACJ,EACA,EACA,EACD,CAEM,CACL,OAAQ,WACR,KAAM,EACP,EAGG,EAA2B,MAC/B,EACA,EACA,IACkB,CAKlB,MAAM,EAHM,EAAQ,EAAiB,CAGpB,CAAE,UAAW,GAAM,CAAC,CAErC,IAAM,EAAY,EAAQ,EAAiB,CAK3C,GAAI,CAJuB,EAAc,QAAQ,eAAe,IAC7D,GAAc,EAAQ,EAAU,CAClC,CAEuB,SAAS,EAAU,CACzC,MAAU,MACR,2BAA2B,EAAU,UAAU,IAChD,CAGH,GAAI,IAAc,QAAS,CACzB,IAAM,EAAiB,KAAK,UAAU,EAAY,KAAM,EAAE,CAGpD,EAAW,GAAG,EAAiB,GAAG,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,EAAE,CAAC,MAC1F,MAAM,EAAU,EAAU,GAAG,EAAe,IAAI,CAChD,MAAM,EAAO,EAAU,EAAiB,CAExC,OAIF,GAAI,CAAC,SAAU,SAAS,CAAC,SAAS,EAAU,CAAE,CAC5C,IAAI,EAAc,KAElB,GAAI,EAAW,EAAiB,CAC9B,GAAI,CACF,EAAc,MAAM,EAAS,EAAkB,QAAQ,MACjD,EAKV,IAAM,EAAqB,EAAkB,EAAa,EAAW,CAG/D,EAAW,GAAG,EAAiB,GAAG,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,EAAE,CAAC,MAC1F,MAAM,EAAU,EAAU,EAAoB,QAAQ,CACtD,MAAM,EAAO,EAAU,EAAiB,CACxC,OAGF,MAAM,EAAY,EAAkB,EAAY,EAAc,CAI9D,GAAI,CAKF,MAAM,EAJe,EACnB,EAAc,OAAO,SACrB,yBACD,CACsB,CAAE,UAAW,GAAM,CAAC,MACrC"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import{detectFormatCommand as e}from"./detectFormatCommand.mjs";import{transformJSFile as t}from"./transformJSFile.mjs";import{getFormatFromExtension as n}from"../utils/getFormatFromExtension.mjs";import{getContentDeclarationFileTemplate as r}from"../getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.mjs";import{readFile as i,writeFile as
|
|
1
|
+
import{detectFormatCommand as e}from"./detectFormatCommand.mjs";import{transformJSFile as t}from"./transformJSFile.mjs";import{getFormatFromExtension as n}from"../utils/getFormatFromExtension.mjs";import{getContentDeclarationFileTemplate as r}from"../getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.mjs";import{readFile as i,rename as a,writeFile as o}from"node:fs/promises";import{extname as s}from"node:path";import{getAppLogger as c,logger as l}from"@intlayer/config/logger";import{existsSync as u}from"node:fs";import{execSync as d}from"node:child_process";const f=async(f,p,m)=>{let h={...m.dictionary,...p},g=c(m);if(!u(f)){let e=n(s(f));g(`File does not exist, creating it`,{isVerbose:!0});let t=await r(h.key,e,Object.fromEntries(Object.entries({id:h.id,locale:h.locale,filled:h.filled,fill:h.fill,description:h.description,title:h.title,tags:h.tags,version:h.version,priority:h.priority,importMode:h.importMode}).filter(([,e])=>e!==void 0))),i=`${f}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;await o(i,t,`utf-8`),await a(i,f)}let _=await i(f,`utf-8`);if(_===``){let e=n(s(f));_=await r(h.key,e)}let v=await t(_,p);try{let e=`${f}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;await o(e,v,`utf-8`),await a(e,f),l(`Successfully updated ${f}`,{level:`info`,isVerbose:!0})}catch(e){let t=e;throw l(`Failed to write updated file: ${f}`,{level:`error`}),Error(`Failed to write updated file ${f}: ${t.message}`)}let y=e(m);if(y)try{d(y.replace(`{{file}}`,f),{stdio:`inherit`,cwd:m.content.baseDir})}catch(e){console.error(e)}};export{f as writeJSFile};
|
|
2
2
|
//# sourceMappingURL=writeJSFile.mjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"writeJSFile.mjs","names":[],"sources":["../../../src/writeContentDeclaration/writeJSFile.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { readFile, writeFile } from 'node:fs/promises';\nimport { extname } from 'node:path';\nimport { getAppLogger, logger } from '@intlayer/config/logger';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { getContentDeclarationFileTemplate } from '../getContentDeclarationFileTemplate/getContentDeclarationFileTemplate';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport { detectFormatCommand } from './detectFormatCommand';\nimport { transformJSFile } from './transformJSFile';\n\n/**\n * Updates a JavaScript/TypeScript file based on the provided JSON instructions.\n * It targets a specific dictionary object within the file (identified by its 'key' property)\n * and updates its 'content' entries. Currently, it focuses on modifying arguments\n * of 't' (translation) function calls.\n */\nexport const writeJSFile = async (\n filePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n const mergedDictionary = {\n ...configuration.dictionary,\n ...dictionary,\n };\n\n const appLogger = getAppLogger(configuration);\n\n // Check if the file exist\n if (!existsSync(filePath)) {\n const fileExtension = extname(filePath) as Extension;\n\n const format = getFormatFromExtension(fileExtension);\n\n appLogger('File does not exist, creating it', {\n isVerbose: true,\n });\n const template = await getContentDeclarationFileTemplate(\n mergedDictionary.key,\n format,\n // Filter out undefined values\n Object.fromEntries(\n Object.entries({\n id: mergedDictionary.id,\n locale: mergedDictionary.locale,\n filled: mergedDictionary.filled,\n fill: mergedDictionary.fill,\n description: mergedDictionary.description,\n title: mergedDictionary.title,\n tags: mergedDictionary.tags,\n version: mergedDictionary.version,\n priority: mergedDictionary.priority,\n importMode: mergedDictionary.importMode,\n }).filter(([, value]) => value !== undefined)\n )\n );\n\n await writeFile(
|
|
1
|
+
{"version":3,"file":"writeJSFile.mjs","names":[],"sources":["../../../src/writeContentDeclaration/writeJSFile.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { readFile, rename, writeFile } from 'node:fs/promises';\nimport { extname } from 'node:path';\nimport { getAppLogger, logger } from '@intlayer/config/logger';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { getContentDeclarationFileTemplate } from '../getContentDeclarationFileTemplate/getContentDeclarationFileTemplate';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport { detectFormatCommand } from './detectFormatCommand';\nimport { transformJSFile } from './transformJSFile';\n\n/**\n * Updates a JavaScript/TypeScript file based on the provided JSON instructions.\n * It targets a specific dictionary object within the file (identified by its 'key' property)\n * and updates its 'content' entries. Currently, it focuses on modifying arguments\n * of 't' (translation) function calls.\n */\nexport const writeJSFile = async (\n filePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n const mergedDictionary = {\n ...configuration.dictionary,\n ...dictionary,\n };\n\n const appLogger = getAppLogger(configuration);\n\n // Check if the file exist\n if (!existsSync(filePath)) {\n const fileExtension = extname(filePath) as Extension;\n\n const format = getFormatFromExtension(fileExtension);\n\n appLogger('File does not exist, creating it', {\n isVerbose: true,\n });\n const template = await getContentDeclarationFileTemplate(\n mergedDictionary.key,\n format,\n // Filter out undefined values\n Object.fromEntries(\n Object.entries({\n id: mergedDictionary.id,\n locale: mergedDictionary.locale,\n filled: mergedDictionary.filled,\n fill: mergedDictionary.fill,\n description: mergedDictionary.description,\n title: mergedDictionary.title,\n tags: mergedDictionary.tags,\n version: mergedDictionary.version,\n priority: mergedDictionary.priority,\n importMode: mergedDictionary.importMode,\n }).filter(([, value]) => value !== undefined)\n )\n );\n\n const tempPath = `${filePath}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;\n await writeFile(tempPath, template, 'utf-8');\n await rename(tempPath, filePath);\n }\n\n let fileContent = await readFile(filePath, 'utf-8');\n\n if (fileContent === '') {\n const format = getFormatFromExtension(extname(filePath) as Extension);\n\n fileContent = await getContentDeclarationFileTemplate(\n mergedDictionary.key,\n format\n );\n }\n\n const finalCode = await transformJSFile(fileContent, dictionary);\n\n // Write the modified code back to the file\n try {\n const tempPath = `${filePath}.${Date.now()}-${Math.random().toString(36).slice(2)}.tmp`;\n await writeFile(tempPath, finalCode, 'utf-8');\n await rename(tempPath, filePath);\n logger(`Successfully updated ${filePath}`, {\n level: 'info',\n isVerbose: true,\n });\n } catch (error) {\n const err = error as Error;\n logger(`Failed to write updated file: ${filePath}`, {\n level: 'error',\n });\n throw new Error(`Failed to write updated file ${filePath}: ${err.message}`);\n }\n\n const formatCommand = detectFormatCommand(configuration);\n\n if (formatCommand) {\n try {\n execSync(formatCommand.replace('{{file}}', filePath), {\n stdio: 'inherit',\n cwd: configuration.content.baseDir,\n });\n } catch (error) {\n console.error(error);\n }\n }\n};\n"],"mappings":"qkBAoBA,MAAa,EAAc,MACzB,EACA,EACA,IACkB,CAClB,IAAM,EAAmB,CACvB,GAAG,EAAc,WACjB,GAAG,EACJ,CAEK,EAAY,EAAa,EAAc,CAG7C,GAAI,CAAC,EAAW,EAAS,CAAE,CAGzB,IAAM,EAAS,EAFO,EAAQ,EAAS,CAEa,CAEpD,EAAU,mCAAoC,CAC5C,UAAW,GACZ,CAAC,CACF,IAAM,EAAW,MAAM,EACrB,EAAiB,IACjB,EAEA,OAAO,YACL,OAAO,QAAQ,CACb,GAAI,EAAiB,GACrB,OAAQ,EAAiB,OACzB,OAAQ,EAAiB,OACzB,KAAM,EAAiB,KACvB,YAAa,EAAiB,YAC9B,MAAO,EAAiB,MACxB,KAAM,EAAiB,KACvB,QAAS,EAAiB,QAC1B,SAAU,EAAiB,SAC3B,WAAY,EAAiB,WAC9B,CAAC,CAAC,QAAQ,EAAG,KAAW,IAAU,IAAA,GAAU,CAC9C,CACF,CAEK,EAAW,GAAG,EAAS,GAAG,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,EAAE,CAAC,MAClF,MAAM,EAAU,EAAU,EAAU,QAAQ,CAC5C,MAAM,EAAO,EAAU,EAAS,CAGlC,IAAI,EAAc,MAAM,EAAS,EAAU,QAAQ,CAEnD,GAAI,IAAgB,GAAI,CACtB,IAAM,EAAS,EAAuB,EAAQ,EAAS,CAAc,CAErE,EAAc,MAAM,EAClB,EAAiB,IACjB,EACD,CAGH,IAAM,EAAY,MAAM,EAAgB,EAAa,EAAW,CAGhE,GAAI,CACF,IAAM,EAAW,GAAG,EAAS,GAAG,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,EAAE,CAAC,MAClF,MAAM,EAAU,EAAU,EAAW,QAAQ,CAC7C,MAAM,EAAO,EAAU,EAAS,CAChC,EAAO,wBAAwB,IAAY,CACzC,MAAO,OACP,UAAW,GACZ,CAAC,OACK,EAAO,CACd,IAAM,EAAM,EAIZ,MAHA,EAAO,iCAAiC,IAAY,CAClD,MAAO,QACR,CAAC,CACQ,MAAM,gCAAgC,EAAS,IAAI,EAAI,UAAU,CAG7E,IAAM,EAAgB,EAAoB,EAAc,CAExD,GAAI,EACF,GAAI,CACF,EAAS,EAAc,QAAQ,WAAY,EAAS,CAAE,CACpD,MAAO,UACP,IAAK,EAAc,QAAQ,QAC5B,CAAC,OACK,EAAO,CACd,QAAQ,MAAM,EAAM"}
|
package/dist/types/index.d.ts
CHANGED
|
@@ -39,6 +39,7 @@ import { formatLocale, formatPath } from "./utils/formatter.js";
|
|
|
39
39
|
import { getChunk } from "./utils/getChunk.js";
|
|
40
40
|
import { getComponentTransformPattern, getComponentTransformPatternSync } from "./utils/getComponentTransformPattern.js";
|
|
41
41
|
import { getFileHash } from "./utils/getFileHash.js";
|
|
42
|
+
import { mergeChunks } from "./utils/mergeChunks.js";
|
|
42
43
|
import { parallelize } from "./utils/parallelize.js";
|
|
43
44
|
import { Queue, pLimit } from "./utils/pLimit.js";
|
|
44
45
|
import { getGlobalLimiter, getTaskLimiter, parallelizeGlobal } from "./utils/parallelizeGlobal.js";
|
|
@@ -51,4 +52,4 @@ import { splitTextByLines } from "./utils/splitTextByLine.js";
|
|
|
51
52
|
import { verifyIdenticObjectFormat } from "./utils/verifyIdenticObjectFormat.js";
|
|
52
53
|
import "./utils/index.js";
|
|
53
54
|
import { buildAndWatchIntlayer, watch } from "./watcher.js";
|
|
54
|
-
export { ATTRIBUTES_TO_EXTRACT, BuildFilesListOptions, CreateDictionaryEntryPointOptions, DictionariesStatus, DictionaryStatus, DiffMode, Extension, ExtractIntlayerOptions, Format, JSONObject, JsonChunk, ListGitFilesOptions, ListGitLinesOptions, ListProjectsOptions, PackageName, ParallelHandle, Platform, Queue, SKILLS, SKILLS_METADATA, Skill, assembleJSON, autoDecorateContent, buildAndWatchIntlayer, buildDictionary, buildFilesList, chunkJSON, cleanOutputDir, createDictionaryEntryPoint, createModuleAugmentation, createTypes, detectExportedComponentName, detectFormatCommand, extractDictionaryKey, extractIntlayer, formatDictionaries, formatDictionariesOutput, formatDictionary, formatDictionaryOutput, formatDistantDictionaries, formatLocalDictionaries, formatLocale, formatPath, generateDictionaryListContent, generateKey, generateTypeScriptType, getBuiltDictionariesPath, getBuiltDynamicDictionariesPath, getBuiltFetchDictionariesPath, getBuiltRemoteDictionariesPath, getBuiltUnmergedDictionariesPath, getChunk, getComponentTransformPattern, getComponentTransformPatternSync, getExtensionFromFormat, getFileHash, getFormatFromExtension, getGlobalLimiter, getTaskLimiter, getTypeName, initIntlayer, installSkills, isCachedConfigurationUpToDate, listGitFiles, listGitLines, listProjects, loadContentDeclarations, loadDictionaries, loadLocalDictionaries, loadRemoteDictionaries, pLimit, parallelize, parallelizeGlobal, prepareIntlayer, processContentDeclaration, reconstructFromSingleChunk, reduceObjectFormat, resolveObjectPromises, runOnce, runParallel, shouldExtract, sortAlphabetically, splitTextByLines, transformFiles, transformJSFile, verifyIdenticObjectFormat, watch, writeConfiguration, writeContentDeclaration, writeJSFile };
|
|
55
|
+
export { ATTRIBUTES_TO_EXTRACT, BuildFilesListOptions, CreateDictionaryEntryPointOptions, DictionariesStatus, DictionaryStatus, DiffMode, Extension, ExtractIntlayerOptions, Format, JSONObject, JsonChunk, ListGitFilesOptions, ListGitLinesOptions, ListProjectsOptions, PackageName, ParallelHandle, Platform, Queue, SKILLS, SKILLS_METADATA, Skill, assembleJSON, autoDecorateContent, buildAndWatchIntlayer, buildDictionary, buildFilesList, chunkJSON, cleanOutputDir, createDictionaryEntryPoint, createModuleAugmentation, createTypes, detectExportedComponentName, detectFormatCommand, extractDictionaryKey, extractIntlayer, formatDictionaries, formatDictionariesOutput, formatDictionary, formatDictionaryOutput, formatDistantDictionaries, formatLocalDictionaries, formatLocale, formatPath, generateDictionaryListContent, generateKey, generateTypeScriptType, getBuiltDictionariesPath, getBuiltDynamicDictionariesPath, getBuiltFetchDictionariesPath, getBuiltRemoteDictionariesPath, getBuiltUnmergedDictionariesPath, getChunk, getComponentTransformPattern, getComponentTransformPatternSync, getExtensionFromFormat, getFileHash, getFormatFromExtension, getGlobalLimiter, getTaskLimiter, getTypeName, initIntlayer, installSkills, isCachedConfigurationUpToDate, listGitFiles, listGitLines, listProjects, loadContentDeclarations, loadDictionaries, loadLocalDictionaries, loadRemoteDictionaries, mergeChunks, pLimit, parallelize, parallelizeGlobal, prepareIntlayer, processContentDeclaration, reconstructFromSingleChunk, reduceObjectFormat, resolveObjectPromises, runOnce, runParallel, shouldExtract, sortAlphabetically, splitTextByLines, transformFiles, transformJSFile, verifyIdenticObjectFormat, watch, writeConfiguration, writeContentDeclaration, writeJSFile };
|
|
@@ -6,6 +6,7 @@ import { formatLocale, formatPath } from "./formatter.js";
|
|
|
6
6
|
import { getChunk } from "./getChunk.js";
|
|
7
7
|
import { getComponentTransformPattern, getComponentTransformPatternSync } from "./getComponentTransformPattern.js";
|
|
8
8
|
import { getFileHash } from "./getFileHash.js";
|
|
9
|
+
import { mergeChunks } from "./mergeChunks.js";
|
|
9
10
|
import { parallelize } from "./parallelize.js";
|
|
10
11
|
import { Queue, pLimit } from "./pLimit.js";
|
|
11
12
|
import { getGlobalLimiter, getTaskLimiter, parallelizeGlobal } from "./parallelizeGlobal.js";
|
|
@@ -16,4 +17,4 @@ import { ParallelHandle, runParallel } from "./runParallel/index.js";
|
|
|
16
17
|
import { sortAlphabetically } from "./sortAlphabetically.js";
|
|
17
18
|
import { splitTextByLines } from "./splitTextByLine.js";
|
|
18
19
|
import { verifyIdenticObjectFormat } from "./verifyIdenticObjectFormat.js";
|
|
19
|
-
export { BuildFilesListOptions, Extension, Format, JSONObject, JsonChunk, ParallelHandle, Queue, assembleJSON, autoDecorateContent, buildFilesList, chunkJSON, formatLocale, formatPath, getChunk, getComponentTransformPattern, getComponentTransformPatternSync, getExtensionFromFormat, getFileHash, getFormatFromExtension, getGlobalLimiter, getTaskLimiter, pLimit, parallelize, parallelizeGlobal, reconstructFromSingleChunk, reduceObjectFormat, resolveObjectPromises, runOnce, runParallel, sortAlphabetically, splitTextByLines, verifyIdenticObjectFormat };
|
|
20
|
+
export { BuildFilesListOptions, Extension, Format, JSONObject, JsonChunk, ParallelHandle, Queue, assembleJSON, autoDecorateContent, buildFilesList, chunkJSON, formatLocale, formatPath, getChunk, getComponentTransformPattern, getComponentTransformPatternSync, getExtensionFromFormat, getFileHash, getFormatFromExtension, getGlobalLimiter, getTaskLimiter, mergeChunks, pLimit, parallelize, parallelizeGlobal, reconstructFromSingleChunk, reduceObjectFormat, resolveObjectPromises, runOnce, runParallel, sortAlphabetically, splitTextByLines, verifyIdenticObjectFormat };
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"mergeChunks.d.ts","names":[],"sources":["../../../src/utils/mergeChunks.ts"],"mappings":";cAAa,WAAA,GAAe,MAAA"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"watcher.d.ts","names":[],"sources":["../../src/watcher.ts"],"mappings":";;;;;;
|
|
1
|
+
{"version":3,"file":"watcher.d.ts","names":[],"sources":["../../src/watcher.ts"],"mappings":";;;;;;KAmCK,YAAA,GAAe,eAAA;EAClB,aAAA,GAAgB,cAAA;EAChB,aAAA,GAAgB,uBAAA;EAChB,WAAA;AAAA;AAAA,cAIW,KAAA,GAAS,OAAA,GAAU,YAAA,KAAY,QAAA,CAAA,SAAA;AAAA,cA4H/B,qBAAA,GAA+B,OAAA,GAAU,YAAA,KAAY,OAAA"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@intlayer/chokidar",
|
|
3
|
-
"version": "8.1.
|
|
3
|
+
"version": "8.1.4",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "Uses chokidar to scan and build Intlayer declaration files into dictionaries based on Intlayer configuration.",
|
|
6
6
|
"keywords": [
|
|
@@ -102,13 +102,13 @@
|
|
|
102
102
|
"typecheck": "tsc --noEmit --project tsconfig.types.json"
|
|
103
103
|
},
|
|
104
104
|
"dependencies": {
|
|
105
|
-
"@intlayer/api": "8.1.
|
|
106
|
-
"@intlayer/config": "8.1.
|
|
107
|
-
"@intlayer/core": "8.1.
|
|
108
|
-
"@intlayer/dictionaries-entry": "8.1.
|
|
109
|
-
"@intlayer/remote-dictionaries-entry": "8.1.
|
|
110
|
-
"@intlayer/types": "8.1.
|
|
111
|
-
"@intlayer/unmerged-dictionaries-entry": "8.1.
|
|
105
|
+
"@intlayer/api": "8.1.4",
|
|
106
|
+
"@intlayer/config": "8.1.4",
|
|
107
|
+
"@intlayer/core": "8.1.4",
|
|
108
|
+
"@intlayer/dictionaries-entry": "8.1.4",
|
|
109
|
+
"@intlayer/remote-dictionaries-entry": "8.1.4",
|
|
110
|
+
"@intlayer/types": "8.1.4",
|
|
111
|
+
"@intlayer/unmerged-dictionaries-entry": "8.1.4",
|
|
112
112
|
"chokidar": "3.6.0",
|
|
113
113
|
"crypto-js": "4.2.0",
|
|
114
114
|
"defu": "6.1.4",
|
|
@@ -119,7 +119,7 @@
|
|
|
119
119
|
},
|
|
120
120
|
"devDependencies": {
|
|
121
121
|
"@types/crypto-js": "4.2.2",
|
|
122
|
-
"@types/node": "25.
|
|
122
|
+
"@types/node": "25.3.0",
|
|
123
123
|
"@utils/ts-config": "1.0.4",
|
|
124
124
|
"@utils/ts-config-types": "1.0.4",
|
|
125
125
|
"@utils/tsdown-config": "1.0.4",
|
|
@@ -130,8 +130,8 @@
|
|
|
130
130
|
"zod": "4.3.6"
|
|
131
131
|
},
|
|
132
132
|
"peerDependencies": {
|
|
133
|
-
"@intlayer/svelte-transformer": "8.1.
|
|
134
|
-
"@intlayer/vue-transformer": "8.1.
|
|
133
|
+
"@intlayer/svelte-transformer": "8.1.4",
|
|
134
|
+
"@intlayer/vue-transformer": "8.1.4"
|
|
135
135
|
},
|
|
136
136
|
"peerDependenciesMeta": {
|
|
137
137
|
"@intlayer/svelte-transformer": {
|