@intlayer/chokidar 5.4.2 → 5.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/checkDictionaryChanges.cjs +10 -8
- package/dist/cjs/checkDictionaryChanges.cjs.map +1 -1
- package/dist/cjs/chokidar/watcher.cjs +26 -27
- package/dist/cjs/chokidar/watcher.cjs.map +1 -1
- package/dist/cjs/cleanOutputDir.cjs +7 -9
- package/dist/cjs/cleanOutputDir.cjs.map +1 -1
- package/dist/cjs/fetchDistantDictionaries.cjs +8 -6
- package/dist/cjs/fetchDistantDictionaries.cjs.map +1 -1
- package/dist/cjs/filterDictionaryLocales.cjs +58 -0
- package/dist/cjs/filterDictionaryLocales.cjs.map +1 -0
- package/dist/cjs/getBuiltUnmergedDictionariesPath.cjs +51 -0
- package/dist/cjs/getBuiltUnmergedDictionariesPath.cjs.map +1 -0
- package/dist/cjs/getContentDeclarationFileTemplate/cjsTemplate.md +9 -0
- package/dist/cjs/getContentDeclarationFileTemplate/esmTemplate.md +9 -0
- package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs +62 -0
- package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs.map +1 -0
- package/dist/cjs/getContentDeclarationFileTemplate/tsTemplate.md +8 -0
- package/dist/cjs/index.cjs +21 -6
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/listGitFiles.cjs +100 -0
- package/dist/cjs/listGitFiles.cjs.map +1 -0
- package/dist/cjs/loadDictionaries/loadDictionaries.cjs +4 -25
- package/dist/cjs/loadDictionaries/loadDictionaries.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/loadDistantDictionaries.cjs +7 -1
- package/dist/cjs/loadDictionaries/loadDistantDictionaries.cjs.map +1 -1
- package/dist/cjs/mergeDictionaries.cjs +44 -23
- package/dist/cjs/mergeDictionaries.cjs.map +1 -1
- package/dist/cjs/prepareIntlayer.cjs +6 -5
- package/dist/cjs/prepareIntlayer.cjs.map +1 -1
- package/dist/cjs/processPerLocaleDictionary.cjs +49 -0
- package/dist/cjs/processPerLocaleDictionary.cjs.map +1 -0
- package/dist/cjs/reduceDictionaryContent/applyMask.cjs +50 -0
- package/dist/cjs/reduceDictionaryContent/applyMask.cjs.map +1 -0
- package/dist/cjs/reduceDictionaryContent/buildMask.cjs +49 -0
- package/dist/cjs/reduceDictionaryContent/buildMask.cjs.map +1 -0
- package/dist/cjs/reduceDictionaryContent/reduceDictionaryContent.cjs +35 -0
- package/dist/cjs/reduceDictionaryContent/reduceDictionaryContent.cjs.map +1 -0
- package/dist/cjs/transpiler/declaration_file_to_dictionary/i18next_dictionary/writeDictionary.cjs +4 -4
- package/dist/cjs/transpiler/declaration_file_to_dictionary/i18next_dictionary/writeDictionary.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/writeDictionary.cjs +56 -8
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/writeDictionary.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/reactIntl_dictionary/writeDictionary.cjs +4 -4
- package/dist/cjs/transpiler/declaration_file_to_dictionary/reactIntl_dictionary/writeDictionary.cjs.map +1 -1
- package/dist/cjs/transpiler/dictionary_to_main/createDictionaryEntryPoint.cjs +21 -1
- package/dist/cjs/transpiler/dictionary_to_main/createDictionaryEntryPoint.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/formatCode.cjs +73 -0
- package/dist/cjs/writeContentDeclaration/formatCode.cjs.map +1 -0
- package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs +36 -37
- package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/writeJSFile.cjs +379 -0
- package/dist/cjs/writeContentDeclaration/writeJSFile.cjs.map +1 -0
- package/dist/esm/checkDictionaryChanges.mjs +10 -5
- package/dist/esm/checkDictionaryChanges.mjs.map +1 -1
- package/dist/esm/chokidar/watcher.mjs +18 -19
- package/dist/esm/chokidar/watcher.mjs.map +1 -1
- package/dist/esm/cleanOutputDir.mjs +7 -9
- package/dist/esm/cleanOutputDir.mjs.map +1 -1
- package/dist/esm/fetchDistantDictionaries.mjs +8 -6
- package/dist/esm/fetchDistantDictionaries.mjs.map +1 -1
- package/dist/esm/filterDictionaryLocales.mjs +37 -0
- package/dist/esm/filterDictionaryLocales.mjs.map +1 -0
- package/dist/esm/getBuiltUnmergedDictionariesPath.mjs +17 -0
- package/dist/esm/getBuiltUnmergedDictionariesPath.mjs.map +1 -0
- package/dist/esm/getContentDeclarationFileTemplate/cjsTemplate.md +9 -0
- package/dist/esm/getContentDeclarationFileTemplate/esmTemplate.md +9 -0
- package/dist/esm/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.mjs +37 -0
- package/dist/esm/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.mjs.map +1 -0
- package/dist/esm/getContentDeclarationFileTemplate/tsTemplate.md +8 -0
- package/dist/esm/index.mjs +21 -9
- package/dist/esm/index.mjs.map +1 -1
- package/dist/esm/listGitFiles.mjs +66 -0
- package/dist/esm/listGitFiles.mjs.map +1 -0
- package/dist/esm/loadDictionaries/loadDictionaries.mjs +7 -14
- package/dist/esm/loadDictionaries/loadDictionaries.mjs.map +1 -1
- package/dist/esm/loadDictionaries/loadDistantDictionaries.mjs +7 -1
- package/dist/esm/loadDictionaries/loadDistantDictionaries.mjs.map +1 -1
- package/dist/esm/mergeDictionaries.mjs +43 -22
- package/dist/esm/mergeDictionaries.mjs.map +1 -1
- package/dist/esm/prepareIntlayer.mjs +5 -4
- package/dist/esm/prepareIntlayer.mjs.map +1 -1
- package/dist/esm/processPerLocaleDictionary.mjs +25 -0
- package/dist/esm/processPerLocaleDictionary.mjs.map +1 -0
- package/dist/esm/reduceDictionaryContent/applyMask.mjs +26 -0
- package/dist/esm/reduceDictionaryContent/applyMask.mjs.map +1 -0
- package/dist/esm/reduceDictionaryContent/buildMask.mjs +24 -0
- package/dist/esm/reduceDictionaryContent/buildMask.mjs.map +1 -0
- package/dist/esm/reduceDictionaryContent/reduceDictionaryContent.mjs +11 -0
- package/dist/esm/reduceDictionaryContent/reduceDictionaryContent.mjs.map +1 -0
- package/dist/esm/transpiler/declaration_file_to_dictionary/i18next_dictionary/writeDictionary.mjs +4 -4
- package/dist/esm/transpiler/declaration_file_to_dictionary/i18next_dictionary/writeDictionary.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/writeDictionary.mjs +54 -8
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/writeDictionary.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/reactIntl_dictionary/writeDictionary.mjs +4 -4
- package/dist/esm/transpiler/declaration_file_to_dictionary/reactIntl_dictionary/writeDictionary.mjs.map +1 -1
- package/dist/esm/transpiler/dictionary_to_main/createDictionaryEntryPoint.mjs +21 -1
- package/dist/esm/transpiler/dictionary_to_main/createDictionaryEntryPoint.mjs.map +1 -1
- package/dist/esm/writeContentDeclaration/formatCode.mjs +39 -0
- package/dist/esm/writeContentDeclaration/formatCode.mjs.map +1 -0
- package/dist/esm/writeContentDeclaration/writeContentDeclaration.mjs +35 -36
- package/dist/esm/writeContentDeclaration/writeContentDeclaration.mjs.map +1 -1
- package/dist/esm/writeContentDeclaration/writeJSFile.mjs +347 -0
- package/dist/esm/writeContentDeclaration/writeJSFile.mjs.map +1 -0
- package/dist/types/checkDictionaryChanges.d.ts.map +1 -1
- package/dist/types/chokidar/watcher.d.ts +2 -1
- package/dist/types/chokidar/watcher.d.ts.map +1 -1
- package/dist/types/cleanOutputDir.d.ts.map +1 -1
- package/dist/types/fetchDistantDictionaries.d.ts.map +1 -1
- package/dist/types/filterDictionaryLocales.d.ts +4 -0
- package/dist/types/filterDictionaryLocales.d.ts.map +1 -0
- package/dist/types/getBuiltUnmergedDictionariesPath.d.ts +5 -0
- package/dist/types/getBuiltUnmergedDictionariesPath.d.ts.map +1 -0
- package/dist/types/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.d.ts +2 -0
- package/dist/types/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.d.ts.map +1 -0
- package/dist/types/index.d.ts +12 -7
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/listGitFiles.d.ts +9 -0
- package/dist/types/listGitFiles.d.ts.map +1 -0
- package/dist/types/loadDictionaries/loadDictionaries.d.ts.map +1 -1
- package/dist/types/loadDictionaries/loadDistantDictionaries.d.ts.map +1 -1
- package/dist/types/mergeDictionaries.d.ts +2 -4
- package/dist/types/mergeDictionaries.d.ts.map +1 -1
- package/dist/types/prepareIntlayer.d.ts.map +1 -1
- package/dist/types/processPerLocaleDictionary.d.ts +32 -0
- package/dist/types/processPerLocaleDictionary.d.ts.map +1 -0
- package/dist/types/reduceDictionaryContent/applyMask.d.ts +3 -0
- package/dist/types/reduceDictionaryContent/applyMask.d.ts.map +1 -0
- package/dist/types/reduceDictionaryContent/buildMask.d.ts +5 -0
- package/dist/types/reduceDictionaryContent/buildMask.d.ts.map +1 -0
- package/dist/types/reduceDictionaryContent/reduceDictionaryContent.d.ts +3 -0
- package/dist/types/reduceDictionaryContent/reduceDictionaryContent.d.ts.map +1 -0
- package/dist/types/transpiler/declaration_file_to_dictionary/intlayer_dictionary/writeDictionary.d.ts +38 -0
- package/dist/types/transpiler/declaration_file_to_dictionary/intlayer_dictionary/writeDictionary.d.ts.map +1 -1
- package/dist/types/transpiler/dictionary_to_main/createDictionaryEntryPoint.d.ts.map +1 -1
- package/dist/types/writeContentDeclaration/formatCode.d.ts +2 -0
- package/dist/types/writeContentDeclaration/formatCode.d.ts.map +1 -0
- package/dist/types/writeContentDeclaration/writeContentDeclaration.d.ts.map +1 -1
- package/dist/types/writeContentDeclaration/writeJSFile.d.ts +9 -0
- package/dist/types/writeContentDeclaration/writeJSFile.d.ts.map +1 -0
- package/package.json +21 -13
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var buildMask_exports = {};
|
|
20
|
+
__export(buildMask_exports, {
|
|
21
|
+
buildMask: () => buildMask,
|
|
22
|
+
buildMaskPlugin: () => buildMaskPlugin
|
|
23
|
+
});
|
|
24
|
+
module.exports = __toCommonJS(buildMask_exports);
|
|
25
|
+
var import_core = require("@intlayer/core");
|
|
26
|
+
const passTypedNodePlugin = {
|
|
27
|
+
id: "pass-typed-node-plugin",
|
|
28
|
+
canHandle: (node) => typeof node === "object" && typeof node?.nodeType === "string",
|
|
29
|
+
transform: (node, props, deepTransformNode2) => deepTransformNode2(node[node.nodeType], props)
|
|
30
|
+
};
|
|
31
|
+
const buildMaskPlugin = {
|
|
32
|
+
id: "build-mask-plugin",
|
|
33
|
+
canHandle: (node) => typeof node === "string" || typeof node === "number",
|
|
34
|
+
transform: () => true
|
|
35
|
+
};
|
|
36
|
+
const buildMask = (source) => ({
|
|
37
|
+
...source,
|
|
38
|
+
content: (0, import_core.deepTransformNode)(source.content, {
|
|
39
|
+
dictionaryKey: source.key,
|
|
40
|
+
keyPath: [],
|
|
41
|
+
plugins: [passTypedNodePlugin, buildMaskPlugin]
|
|
42
|
+
})
|
|
43
|
+
});
|
|
44
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
45
|
+
0 && (module.exports = {
|
|
46
|
+
buildMask,
|
|
47
|
+
buildMaskPlugin
|
|
48
|
+
});
|
|
49
|
+
//# sourceMappingURL=buildMask.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/reduceDictionaryContent/buildMask.ts"],"sourcesContent":["import { deepTransformNode, Dictionary, type Plugins } from '@intlayer/core';\n\nconst passTypedNodePlugin: Plugins = {\n id: 'pass-typed-node-plugin',\n canHandle: (node) =>\n typeof node === 'object' && typeof node?.nodeType === 'string',\n transform: (node, props, deepTransformNode) =>\n deepTransformNode(node[node.nodeType], props),\n};\n\n/** Translation plugin. Replaces node with a locale string if nodeType = Translation. */\nexport const buildMaskPlugin: Plugins = {\n id: 'build-mask-plugin',\n canHandle: (node) => typeof node === 'string' || typeof node === 'number',\n transform: () => true,\n};\n\nexport const buildMask = (source: Dictionary): any => ({\n ...source,\n content: deepTransformNode(source.content, {\n dictionaryKey: source.key,\n keyPath: [],\n plugins: [passTypedNodePlugin, buildMaskPlugin],\n }),\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAA4D;AAE5D,MAAM,sBAA+B;AAAA,EACnC,IAAI;AAAA,EACJ,WAAW,CAAC,SACV,OAAO,SAAS,YAAY,OAAO,MAAM,aAAa;AAAA,EACxD,WAAW,CAAC,MAAM,OAAOA,uBACvBA,mBAAkB,KAAK,KAAK,QAAQ,GAAG,KAAK;AAChD;AAGO,MAAM,kBAA2B;AAAA,EACtC,IAAI;AAAA,EACJ,WAAW,CAAC,SAAS,OAAO,SAAS,YAAY,OAAO,SAAS;AAAA,EACjE,WAAW,MAAM;AACnB;AAEO,MAAM,YAAY,CAAC,YAA6B;AAAA,EACrD,GAAG;AAAA,EACH,aAAS,+BAAkB,OAAO,SAAS;AAAA,IACzC,eAAe,OAAO;AAAA,IACtB,SAAS,CAAC;AAAA,IACV,SAAS,CAAC,qBAAqB,eAAe;AAAA,EAChD,CAAC;AACH;","names":["deepTransformNode"]}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var reduceDictionaryContent_exports = {};
|
|
20
|
+
__export(reduceDictionaryContent_exports, {
|
|
21
|
+
reduceDictionaryContent: () => reduceDictionaryContent
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(reduceDictionaryContent_exports);
|
|
24
|
+
var import_applyMask = require('./applyMask.cjs');
|
|
25
|
+
var import_buildMask = require('./buildMask.cjs');
|
|
26
|
+
const reduceDictionaryContent = (fullDictionary, partialDictionary) => {
|
|
27
|
+
const mask = (0, import_buildMask.buildMask)(partialDictionary);
|
|
28
|
+
const result = (0, import_applyMask.applyMask)(fullDictionary, mask);
|
|
29
|
+
return result;
|
|
30
|
+
};
|
|
31
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
32
|
+
0 && (module.exports = {
|
|
33
|
+
reduceDictionaryContent
|
|
34
|
+
});
|
|
35
|
+
//# sourceMappingURL=reduceDictionaryContent.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/reduceDictionaryContent/reduceDictionaryContent.ts"],"sourcesContent":["import type { Dictionary } from '@intlayer/core';\nimport { applyMask } from './applyMask';\nimport { buildMask } from './buildMask';\n\nexport const reduceDictionaryContent = (\n fullDictionary: Dictionary,\n partialDictionary: Dictionary\n) => {\n const mask = buildMask(partialDictionary);\n const result = applyMask(fullDictionary, mask);\n\n return result;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,uBAA0B;AAC1B,uBAA0B;AAEnB,MAAM,0BAA0B,CACrC,gBACA,sBACG;AACH,QAAM,WAAO,4BAAU,iBAAiB;AACxC,QAAM,aAAS,4BAAU,gBAAgB,IAAI;AAE7C,SAAO;AACT;","names":[]}
|
package/dist/cjs/transpiler/declaration_file_to_dictionary/i18next_dictionary/writeDictionary.cjs
CHANGED
|
@@ -21,9 +21,9 @@ __export(writeDictionary_exports, {
|
|
|
21
21
|
writeDictionary: () => writeDictionary
|
|
22
22
|
});
|
|
23
23
|
module.exports = __toCommonJS(writeDictionary_exports);
|
|
24
|
+
var import_config = require("@intlayer/config");
|
|
24
25
|
var import_promises = require("fs/promises");
|
|
25
26
|
var import_path = require("path");
|
|
26
|
-
var import_config = require("@intlayer/config");
|
|
27
27
|
const writeDictionary = async (dictionariesDeclaration, configuration = (0, import_config.getConfiguration)()) => {
|
|
28
28
|
const { i18nextResourcesDir } = configuration.content;
|
|
29
29
|
const resultDictionariesPaths = [];
|
|
@@ -33,9 +33,9 @@ const writeDictionary = async (dictionariesDeclaration, configuration = (0, impo
|
|
|
33
33
|
for await (const [locale, content] of Object.entries(localContent)) {
|
|
34
34
|
const contentString = JSON.stringify(content);
|
|
35
35
|
const outputFileName = `${nameSpace}.json`;
|
|
36
|
-
const
|
|
37
|
-
const resultFilePath = (0, import_path.resolve)(
|
|
38
|
-
await (0, import_promises.mkdir)(
|
|
36
|
+
const dictionariesDirPath = (0, import_path.resolve)(i18nextResourcesDir, locale);
|
|
37
|
+
const resultFilePath = (0, import_path.resolve)(dictionariesDirPath, outputFileName);
|
|
38
|
+
await (0, import_promises.mkdir)(dictionariesDirPath, { recursive: true });
|
|
39
39
|
await (0, import_promises.writeFile)(resultFilePath, contentString, "utf8").catch((err) => {
|
|
40
40
|
console.error(`Error creating ${outputFileName}:`, err);
|
|
41
41
|
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18next_dictionary/writeDictionary.ts"],"sourcesContent":["import {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18next_dictionary/writeDictionary.ts"],"sourcesContent":["import { getConfiguration } from '@intlayer/config';\nimport { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport type { I18nextDictionariesOutput } from './convertContentDeclarationInto18nDictionaries';\n\nexport type DictionariesDeclaration = Record<string, I18nextDictionariesOutput>;\n\n/**\n * This function writes the dictionaries to the file system\n */\nexport const writeDictionary = async (\n dictionariesDeclaration: DictionariesDeclaration,\n configuration = getConfiguration()\n) => {\n const { i18nextResourcesDir } = configuration.content;\n\n const resultDictionariesPaths: string[] = [];\n\n for (const [nameSpace, localContent] of Object.entries(\n dictionariesDeclaration\n )) {\n for await (const [locale, content] of Object.entries(localContent)) {\n const contentString = JSON.stringify(content);\n\n const outputFileName = `${nameSpace}.json`;\n const dictionariesDirPath = resolve(i18nextResourcesDir, locale);\n const resultFilePath = resolve(dictionariesDirPath, outputFileName);\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(dictionariesDirPath, { recursive: true });\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAiC;AACjC,sBAAiC;AACjC,kBAAwB;AAQjB,MAAM,kBAAkB,OAC7B,yBACA,oBAAgB,gCAAiB,MAC9B;AACH,QAAM,EAAE,oBAAoB,IAAI,cAAc;AAE9C,QAAM,0BAAoC,CAAC;AAE3C,aAAW,CAAC,WAAW,YAAY,KAAK,OAAO;AAAA,IAC7C;AAAA,EACF,GAAG;AACD,qBAAiB,CAAC,QAAQ,OAAO,KAAK,OAAO,QAAQ,YAAY,GAAG;AAClE,YAAM,gBAAgB,KAAK,UAAU,OAAO;AAE5C,YAAM,iBAAiB,GAAG,SAAS;AACnC,YAAM,0BAAsB,qBAAQ,qBAAqB,MAAM;AAC/D,YAAM,qBAAiB,qBAAQ,qBAAqB,cAAc;AAGlE,gBAAM,uBAAM,qBAAqB,EAAE,WAAW,KAAK,CAAC;AAGpD,gBAAM,2BAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,gBAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,MACxD,CAAC;AAED,8BAAwB,KAAK,cAAc;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AACT;","names":[]}
|
package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/writeDictionary.cjs
CHANGED
|
@@ -18,30 +18,78 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
18
18
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
19
|
var writeDictionary_exports = {};
|
|
20
20
|
__export(writeDictionary_exports, {
|
|
21
|
-
writeDictionary: () => writeDictionary
|
|
21
|
+
writeDictionary: () => writeDictionary,
|
|
22
|
+
writeFinalDictionaries: () => writeFinalDictionaries,
|
|
23
|
+
writeUnmergedDictionaries: () => writeUnmergedDictionaries
|
|
22
24
|
});
|
|
23
25
|
module.exports = __toCommonJS(writeDictionary_exports);
|
|
26
|
+
var import_config = require("@intlayer/config");
|
|
24
27
|
var import_promises = require("fs/promises");
|
|
25
28
|
var import_path = require("path");
|
|
26
|
-
var
|
|
27
|
-
|
|
29
|
+
var import_mergeDictionaries = require('../../../mergeDictionaries.cjs');
|
|
30
|
+
var import_processPerLocaleDictionary = require('../../../processPerLocaleDictionary.cjs');
|
|
31
|
+
const groupDictionariesByKey = (dictionaries) => {
|
|
32
|
+
return dictionaries.reduce(
|
|
33
|
+
(acc, dictionary) => {
|
|
34
|
+
const key = dictionary.key;
|
|
35
|
+
if (!acc[key]) {
|
|
36
|
+
acc[key] = [];
|
|
37
|
+
}
|
|
38
|
+
acc[key].push(dictionary);
|
|
39
|
+
return acc;
|
|
40
|
+
},
|
|
41
|
+
{}
|
|
42
|
+
);
|
|
43
|
+
};
|
|
44
|
+
const writeUnmergedDictionaries = async (dictionaries, configuration = (0, import_config.getConfiguration)()) => {
|
|
45
|
+
const { unmergedDictionariesDir } = configuration.content;
|
|
46
|
+
await (0, import_promises.mkdir)(unmergedDictionariesDir, { recursive: true });
|
|
47
|
+
const groupedDictionaries = groupDictionariesByKey(dictionaries);
|
|
48
|
+
for (const [key, dictionaries2] of Object.entries(groupedDictionaries)) {
|
|
49
|
+
const isDevelopment = process.env.NODE_ENV === "development";
|
|
50
|
+
const contentString = isDevelopment ? JSON.stringify(dictionaries2, null, 2) : JSON.stringify(dictionaries2);
|
|
51
|
+
const outputFileName = `${key}.json`;
|
|
52
|
+
const unmergedFilePath = (0, import_path.resolve)(unmergedDictionariesDir, outputFileName);
|
|
53
|
+
await (0, import_promises.writeFile)(unmergedFilePath, contentString, "utf8").catch((err) => {
|
|
54
|
+
console.error(`Error creating unmerged ${outputFileName}:`, err);
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
return groupedDictionaries;
|
|
58
|
+
};
|
|
59
|
+
const writeFinalDictionaries = async (groupedDictionaries, configuration = (0, import_config.getConfiguration)()) => {
|
|
28
60
|
const { dictionariesDir } = configuration.content;
|
|
29
61
|
const resultDictionariesPaths = [];
|
|
30
|
-
for
|
|
62
|
+
for (const [key, dictionaries] of Object.entries(groupedDictionaries)) {
|
|
63
|
+
const multiLocaleDictionaries = dictionaries.map(
|
|
64
|
+
(dictionary) => (0, import_processPerLocaleDictionary.processPerLocaleDictionary)(dictionary)
|
|
65
|
+
);
|
|
66
|
+
const mergedDictionary = (0, import_mergeDictionaries.mergeDictionaries)(multiLocaleDictionaries);
|
|
31
67
|
const isDevelopment = process.env.NODE_ENV === "development";
|
|
32
|
-
const contentString = isDevelopment ? JSON.stringify(
|
|
33
|
-
const key = dictionaryContent.key;
|
|
68
|
+
const contentString = isDevelopment ? JSON.stringify(mergedDictionary, null, 2) : JSON.stringify(mergedDictionary);
|
|
34
69
|
const outputFileName = `${key}.json`;
|
|
35
70
|
const resultFilePath = (0, import_path.resolve)(dictionariesDir, outputFileName);
|
|
36
71
|
await (0, import_promises.writeFile)(resultFilePath, contentString, "utf8").catch((err) => {
|
|
37
|
-
console.error(`Error creating ${outputFileName}:`, err);
|
|
72
|
+
console.error(`Error creating merged ${outputFileName}:`, err);
|
|
38
73
|
});
|
|
39
74
|
resultDictionariesPaths.push(resultFilePath);
|
|
40
75
|
}
|
|
41
76
|
return resultDictionariesPaths;
|
|
42
77
|
};
|
|
78
|
+
const writeDictionary = async (dictionaries, configuration = (0, import_config.getConfiguration)()) => {
|
|
79
|
+
const unmergedDictionaries = await writeUnmergedDictionaries(
|
|
80
|
+
dictionaries,
|
|
81
|
+
configuration
|
|
82
|
+
);
|
|
83
|
+
const finalDictionaries = await writeFinalDictionaries(
|
|
84
|
+
unmergedDictionaries,
|
|
85
|
+
configuration
|
|
86
|
+
);
|
|
87
|
+
return finalDictionaries;
|
|
88
|
+
};
|
|
43
89
|
// Annotate the CommonJS export names for ESM import in node:
|
|
44
90
|
0 && (module.exports = {
|
|
45
|
-
writeDictionary
|
|
91
|
+
writeDictionary,
|
|
92
|
+
writeFinalDictionaries,
|
|
93
|
+
writeUnmergedDictionaries
|
|
46
94
|
});
|
|
47
95
|
//# sourceMappingURL=writeDictionary.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/writeDictionary.ts"],"sourcesContent":["import { writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/writeDictionary.ts"],"sourcesContent":["import { getConfiguration } from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/core';\nimport { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { mergeDictionaries } from '../../../mergeDictionaries';\nimport { processPerLocaleDictionary } from '../../../processPerLocaleDictionary';\n\nconst groupDictionariesByKey = (\n dictionaries: Dictionary[]\n): Record<string, Dictionary[]> => {\n return dictionaries.reduce(\n (acc, dictionary) => {\n const key = dictionary.key;\n if (!acc[key]) {\n acc[key] = [];\n }\n acc[key].push(dictionary);\n return acc;\n },\n {} as Record<string, Dictionary[]>\n );\n};\n\n/**\n * Write the unmerged dictionaries to the unmergedDictionariesDir\n * @param dictionaries - The dictionaries to write\n * @param configuration - The configuration\n * @returns The grouped dictionaries\n *\n * @example\n * ```ts\n * const unmergedDictionaries = await writeUnmergedDictionaries(dictionaries);\n * console.log(unmergedDictionaries);\n *\n * // .intlayer/unmerged_dictionaries/home.json\n * // {\n * // [\n * // { key: 'home', content: { ... } },\n * // { key: 'home', content: { ... } },\n * // ],\n * // }\n * ```\n */\nexport const writeUnmergedDictionaries = async (\n dictionaries: Dictionary[],\n configuration = getConfiguration()\n): Promise<Record<string, Dictionary[]>> => {\n const { unmergedDictionariesDir } = configuration.content;\n\n // Create the merged dictionaries directory\n await mkdir(unmergedDictionariesDir, { recursive: true });\n\n // Group dictionaries by key and write to unmergedDictionariesDir\n const groupedDictionaries = groupDictionariesByKey(dictionaries);\n\n for (const [key, dictionaries] of Object.entries(groupedDictionaries)) {\n const isDevelopment = process.env.NODE_ENV === 'development';\n const contentString = isDevelopment\n ? JSON.stringify(dictionaries, null, 2)\n : JSON.stringify(dictionaries);\n\n const outputFileName = `${key}.json`;\n const unmergedFilePath = resolve(unmergedDictionariesDir, outputFileName);\n\n // Write the grouped dictionaries\n await writeFile(unmergedFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating unmerged ${outputFileName}:`, err);\n });\n }\n\n return groupedDictionaries;\n};\n\n/**\n * Write the final dictionaries to the dictionariesDir\n * @param groupedDictionaries - The grouped dictionaries\n * @param configuration - The configuration\n * @returns The final dictionaries\n *\n * @example\n * ```ts\n * const unmergedDictionaries = await writeUnmergedDictionaries(dictionaries);\n * const finalDictionaries = await writeFinalDictionaries(unmergedDictionaries);\n * console.log(finalDictionaries);\n *\n * // .intlayer/dictionaries/home.json\n * // { key: 'home', content: { ... } },\n * ```\n */\nexport const writeFinalDictionaries = async (\n groupedDictionaries: Record<string, Dictionary[]>,\n configuration = getConfiguration()\n) => {\n const { dictionariesDir } = configuration.content;\n const resultDictionariesPaths: string[] = [];\n\n // Merge dictionaries with the same key and write to dictionariesDir\n for (const [key, dictionaries] of Object.entries(groupedDictionaries)) {\n const multiLocaleDictionaries = dictionaries.map((dictionary) =>\n processPerLocaleDictionary(dictionary)\n );\n const mergedDictionary = mergeDictionaries(multiLocaleDictionaries);\n\n const isDevelopment = process.env.NODE_ENV === 'development';\n const contentString = isDevelopment\n ? JSON.stringify(mergedDictionary, null, 2)\n : JSON.stringify(mergedDictionary);\n\n const outputFileName = `${key}.json`;\n const resultFilePath = resolve(dictionariesDir, outputFileName);\n\n // Write the merged dictionary\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating merged ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n\n return resultDictionariesPaths;\n};\n\nexport const writeDictionary = async (\n dictionaries: Dictionary[],\n configuration = getConfiguration()\n) => {\n const unmergedDictionaries = await writeUnmergedDictionaries(\n dictionaries,\n configuration\n );\n\n const finalDictionaries = await writeFinalDictionaries(\n unmergedDictionaries,\n configuration\n );\n\n return finalDictionaries;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAiC;AAEjC,sBAAiC;AACjC,kBAAwB;AACxB,+BAAkC;AAClC,wCAA2C;AAE3C,MAAM,yBAAyB,CAC7B,iBACiC;AACjC,SAAO,aAAa;AAAA,IAClB,CAAC,KAAK,eAAe;AACnB,YAAM,MAAM,WAAW;AACvB,UAAI,CAAC,IAAI,GAAG,GAAG;AACb,YAAI,GAAG,IAAI,CAAC;AAAA,MACd;AACA,UAAI,GAAG,EAAE,KAAK,UAAU;AACxB,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AACF;AAsBO,MAAM,4BAA4B,OACvC,cACA,oBAAgB,gCAAiB,MACS;AAC1C,QAAM,EAAE,wBAAwB,IAAI,cAAc;AAGlD,YAAM,uBAAM,yBAAyB,EAAE,WAAW,KAAK,CAAC;AAGxD,QAAM,sBAAsB,uBAAuB,YAAY;AAE/D,aAAW,CAAC,KAAKA,aAAY,KAAK,OAAO,QAAQ,mBAAmB,GAAG;AACrE,UAAM,gBAAgB,QAAQ,IAAI,aAAa;AAC/C,UAAM,gBAAgB,gBAClB,KAAK,UAAUA,eAAc,MAAM,CAAC,IACpC,KAAK,UAAUA,aAAY;AAE/B,UAAM,iBAAiB,GAAG,GAAG;AAC7B,UAAM,uBAAmB,qBAAQ,yBAAyB,cAAc;AAGxE,cAAM,2BAAU,kBAAkB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACtE,cAAQ,MAAM,2BAA2B,cAAc,KAAK,GAAG;AAAA,IACjE,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAkBO,MAAM,yBAAyB,OACpC,qBACA,oBAAgB,gCAAiB,MAC9B;AACH,QAAM,EAAE,gBAAgB,IAAI,cAAc;AAC1C,QAAM,0BAAoC,CAAC;AAG3C,aAAW,CAAC,KAAK,YAAY,KAAK,OAAO,QAAQ,mBAAmB,GAAG;AACrE,UAAM,0BAA0B,aAAa;AAAA,MAAI,CAAC,mBAChD,8DAA2B,UAAU;AAAA,IACvC;AACA,UAAM,uBAAmB,4CAAkB,uBAAuB;AAElE,UAAM,gBAAgB,QAAQ,IAAI,aAAa;AAC/C,UAAM,gBAAgB,gBAClB,KAAK,UAAU,kBAAkB,MAAM,CAAC,IACxC,KAAK,UAAU,gBAAgB;AAEnC,UAAM,iBAAiB,GAAG,GAAG;AAC7B,UAAM,qBAAiB,qBAAQ,iBAAiB,cAAc;AAG9D,cAAM,2BAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,cAAQ,MAAM,yBAAyB,cAAc,KAAK,GAAG;AAAA,IAC/D,CAAC;AAED,4BAAwB,KAAK,cAAc;AAAA,EAC7C;AAEA,SAAO;AACT;AAEO,MAAM,kBAAkB,OAC7B,cACA,oBAAgB,gCAAiB,MAC9B;AACH,QAAM,uBAAuB,MAAM;AAAA,IACjC;AAAA,IACA;AAAA,EACF;AAEA,QAAM,oBAAoB,MAAM;AAAA,IAC9B;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AACT;","names":["dictionaries"]}
|
package/dist/cjs/transpiler/declaration_file_to_dictionary/reactIntl_dictionary/writeDictionary.cjs
CHANGED
|
@@ -21,9 +21,9 @@ __export(writeDictionary_exports, {
|
|
|
21
21
|
writeDictionary: () => writeDictionary
|
|
22
22
|
});
|
|
23
23
|
module.exports = __toCommonJS(writeDictionary_exports);
|
|
24
|
+
var import_config = require("@intlayer/config");
|
|
24
25
|
var import_promises = require("fs/promises");
|
|
25
26
|
var import_path = require("path");
|
|
26
|
-
var import_config = require("@intlayer/config");
|
|
27
27
|
const { content } = (0, import_config.getConfiguration)();
|
|
28
28
|
const { reactIntlMessagesDir } = content;
|
|
29
29
|
const writeDictionary = async (dictionariesDeclaration) => {
|
|
@@ -34,9 +34,9 @@ const writeDictionary = async (dictionariesDeclaration) => {
|
|
|
34
34
|
for await (const [locale, content2] of Object.entries(localContent)) {
|
|
35
35
|
const contentString = JSON.stringify(content2);
|
|
36
36
|
const outputFileName = `${nameSpace}.json`;
|
|
37
|
-
const
|
|
38
|
-
const resultFilePath = (0, import_path.resolve)(
|
|
39
|
-
await (0, import_promises.mkdir)(
|
|
37
|
+
const dictionariesDirPath = (0, import_path.resolve)(reactIntlMessagesDir, locale);
|
|
38
|
+
const resultFilePath = (0, import_path.resolve)(dictionariesDirPath, outputFileName);
|
|
39
|
+
await (0, import_promises.mkdir)(dictionariesDirPath, { recursive: true });
|
|
40
40
|
await (0, import_promises.writeFile)(resultFilePath, contentString, "utf8").catch((err) => {
|
|
41
41
|
console.error(`Error creating ${outputFileName}:`, err);
|
|
42
42
|
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/reactIntl_dictionary/writeDictionary.ts"],"sourcesContent":["import {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/reactIntl_dictionary/writeDictionary.ts"],"sourcesContent":["import { getConfiguration } from '@intlayer/config';\nimport { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport type { I18nextDictionariesOutput } from '../i18next_dictionary/convertContentDeclarationInto18nDictionaries';\n\nconst { content } = getConfiguration();\nconst { reactIntlMessagesDir } = content;\n\nexport type DictionariesDeclaration = Record<string, I18nextDictionariesOutput>;\n\n/**\n * This function writes the dictionaries to the file system\n */\nexport const writeDictionary = async (\n dictionariesDeclaration: DictionariesDeclaration\n) => {\n const resultDictionariesPaths: string[] = [];\n\n for (const [nameSpace, localContent] of Object.entries(\n dictionariesDeclaration\n )) {\n for await (const [locale, content] of Object.entries(localContent)) {\n const contentString = JSON.stringify(content);\n\n const outputFileName = `${nameSpace}.json`;\n const dictionariesDirPath = resolve(reactIntlMessagesDir, locale);\n const resultFilePath = resolve(dictionariesDirPath, outputFileName);\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(dictionariesDirPath, { recursive: true });\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAiC;AACjC,sBAAiC;AACjC,kBAAwB;AAGxB,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,qBAAqB,IAAI;AAO1B,MAAM,kBAAkB,OAC7B,4BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,aAAW,CAAC,WAAW,YAAY,KAAK,OAAO;AAAA,IAC7C;AAAA,EACF,GAAG;AACD,qBAAiB,CAAC,QAAQA,QAAO,KAAK,OAAO,QAAQ,YAAY,GAAG;AAClE,YAAM,gBAAgB,KAAK,UAAUA,QAAO;AAE5C,YAAM,iBAAiB,GAAG,SAAS;AACnC,YAAM,0BAAsB,qBAAQ,sBAAsB,MAAM;AAChE,YAAM,qBAAiB,qBAAQ,qBAAqB,cAAc;AAGlE,gBAAM,uBAAM,qBAAqB,EAAE,WAAW,KAAK,CAAC;AAGpD,gBAAM,2BAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,gBAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,MACxD,CAAC;AAED,8BAAwB,KAAK,cAAc;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AACT;","names":["content"]}
|
|
@@ -21,10 +21,11 @@ __export(createDictionaryEntryPoint_exports, {
|
|
|
21
21
|
createDictionaryEntryPoint: () => createDictionaryEntryPoint
|
|
22
22
|
});
|
|
23
23
|
module.exports = __toCommonJS(createDictionaryEntryPoint_exports);
|
|
24
|
+
var import_config = require("@intlayer/config");
|
|
24
25
|
var import_fs = require("fs");
|
|
25
26
|
var import_path = require("path");
|
|
26
|
-
var import_config = require("@intlayer/config");
|
|
27
27
|
var import_getBuiltDictionariesPath = require('../../getBuiltDictionariesPath.cjs');
|
|
28
|
+
var import_getBuiltUnmergedDictionariesPath = require('../../getBuiltUnmergedDictionariesPath.cjs');
|
|
28
29
|
var import_utils = require('../../utils.cjs');
|
|
29
30
|
const generateDictionaryListContent = (dictionaries, format = "esm", configuration = (0, import_config.getConfiguration)()) => {
|
|
30
31
|
const { mainDir } = configuration.content;
|
|
@@ -76,6 +77,25 @@ const createDictionaryEntryPoint = (configuration = (0, import_config.getConfigu
|
|
|
76
77
|
configuration
|
|
77
78
|
);
|
|
78
79
|
(0, import_fs.writeFileSync)((0, import_path.resolve)(mainDir, "dictionaries.mjs"), esmContent);
|
|
80
|
+
const unmergedDictionariesPath = (0, import_getBuiltUnmergedDictionariesPath.getBuiltUnmergedDictionariesPath)(configuration);
|
|
81
|
+
const unmergedCjsContent = generateDictionaryListContent(
|
|
82
|
+
unmergedDictionariesPath,
|
|
83
|
+
"cjs",
|
|
84
|
+
configuration
|
|
85
|
+
);
|
|
86
|
+
(0, import_fs.writeFileSync)(
|
|
87
|
+
(0, import_path.resolve)(mainDir, "unmerged_dictionaries.cjs"),
|
|
88
|
+
unmergedCjsContent
|
|
89
|
+
);
|
|
90
|
+
const unmergedEsmContent = generateDictionaryListContent(
|
|
91
|
+
unmergedDictionariesPath,
|
|
92
|
+
"esm",
|
|
93
|
+
configuration
|
|
94
|
+
);
|
|
95
|
+
(0, import_fs.writeFileSync)(
|
|
96
|
+
(0, import_path.resolve)(mainDir, "unmerged_dictionaries.mjs"),
|
|
97
|
+
unmergedEsmContent
|
|
98
|
+
);
|
|
79
99
|
};
|
|
80
100
|
// Annotate the CommonJS export names for ESM import in node:
|
|
81
101
|
0 && (module.exports = {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/transpiler/dictionary_to_main/createDictionaryEntryPoint.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { basename, extname, relative, resolve } from 'path';\nimport {
|
|
1
|
+
{"version":3,"sources":["../../../../src/transpiler/dictionary_to_main/createDictionaryEntryPoint.ts"],"sourcesContent":["import { getConfiguration } from '@intlayer/config';\nimport { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { basename, extname, relative, resolve } from 'path';\nimport { getBuiltDictionariesPath } from '../../getBuiltDictionariesPath';\nimport { getBuiltUnmergedDictionariesPath } from '../../getBuiltUnmergedDictionariesPath';\nimport { getFileHash } from '../../utils';\n\n/**\n * This function generates the content of the dictionary list file\n */\nconst generateDictionaryListContent = (\n dictionaries: string[],\n format: 'cjs' | 'esm' = 'esm',\n configuration = getConfiguration()\n): string => {\n const { mainDir } = configuration.content;\n\n let content = '';\n\n const dictionariesRef = dictionaries.map((dictionaryPath) => ({\n relativePath: relative(mainDir, dictionaryPath),\n id: basename(dictionaryPath, extname(dictionaryPath)), // Get the base name as the dictionary id\n hash: `_${getFileHash(dictionaryPath)}`, // Get the hash of the dictionary to avoid conflicts\n }));\n\n // Import all dictionaries\n dictionariesRef.forEach((dictionary) => {\n if (format === 'esm')\n content += `import ${dictionary.hash} from '${dictionary.relativePath}';\\n`;\n if (format === 'cjs')\n content += `const ${dictionary.hash} = require('${dictionary.relativePath}');\\n`;\n });\n\n content += '\\n';\n\n // Format Dictionary Map\n const formattedDictionaryMap: string = dictionariesRef\n .map((dictionary) => ` \"${dictionary.id}\": ${dictionary.hash}`)\n .join(',\\n');\n\n if (format === 'esm')\n content += `export default {\\n${formattedDictionaryMap}\\n};\\n`;\n if (format === 'cjs')\n content += `module.exports = {\\n${formattedDictionaryMap}\\n};\\n`;\n\n return content;\n};\n\n/**\n * This function generates a list of dictionaries in the main directory\n */\nexport const createDictionaryEntryPoint = (\n configuration = getConfiguration()\n) => {\n const { mainDir } = configuration.content;\n\n // Create main directory if it doesn't exist\n if (!existsSync(mainDir)) {\n mkdirSync(mainDir, { recursive: true });\n }\n\n const dictionariesPath: string[] = getBuiltDictionariesPath(configuration);\n\n // Create the dictionary list file\n const cjsContent = generateDictionaryListContent(\n dictionariesPath,\n 'cjs',\n configuration\n );\n writeFileSync(resolve(mainDir, 'dictionaries.cjs'), cjsContent);\n\n const esmContent = generateDictionaryListContent(\n dictionariesPath,\n 'esm',\n configuration\n );\n writeFileSync(resolve(mainDir, 'dictionaries.mjs'), esmContent);\n\n const unmergedDictionariesPath: string[] =\n getBuiltUnmergedDictionariesPath(configuration);\n\n const unmergedCjsContent = generateDictionaryListContent(\n unmergedDictionariesPath,\n 'cjs',\n configuration\n );\n writeFileSync(\n resolve(mainDir, 'unmerged_dictionaries.cjs'),\n unmergedCjsContent\n );\n\n const unmergedEsmContent = generateDictionaryListContent(\n unmergedDictionariesPath,\n 'esm',\n configuration\n );\n writeFileSync(\n resolve(mainDir, 'unmerged_dictionaries.mjs'),\n unmergedEsmContent\n );\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAiC;AACjC,gBAAqD;AACrD,kBAAqD;AACrD,sCAAyC;AACzC,8CAAiD;AACjD,mBAA4B;AAK5B,MAAM,gCAAgC,CACpC,cACA,SAAwB,OACxB,oBAAgB,gCAAiB,MACtB;AACX,QAAM,EAAE,QAAQ,IAAI,cAAc;AAElC,MAAI,UAAU;AAEd,QAAM,kBAAkB,aAAa,IAAI,CAAC,oBAAoB;AAAA,IAC5D,kBAAc,sBAAS,SAAS,cAAc;AAAA,IAC9C,QAAI,sBAAS,oBAAgB,qBAAQ,cAAc,CAAC;AAAA;AAAA,IACpD,MAAM,QAAI,0BAAY,cAAc,CAAC;AAAA;AAAA,EACvC,EAAE;AAGF,kBAAgB,QAAQ,CAAC,eAAe;AACtC,QAAI,WAAW;AACb,iBAAW,UAAU,WAAW,IAAI,UAAU,WAAW,YAAY;AAAA;AACvE,QAAI,WAAW;AACb,iBAAW,SAAS,WAAW,IAAI,eAAe,WAAW,YAAY;AAAA;AAAA,EAC7E,CAAC;AAED,aAAW;AAGX,QAAM,yBAAiC,gBACpC,IAAI,CAAC,eAAe,MAAM,WAAW,EAAE,MAAM,WAAW,IAAI,EAAE,EAC9D,KAAK,KAAK;AAEb,MAAI,WAAW;AACb,eAAW;AAAA,EAAqB,sBAAsB;AAAA;AAAA;AACxD,MAAI,WAAW;AACb,eAAW;AAAA,EAAuB,sBAAsB;AAAA;AAAA;AAE1D,SAAO;AACT;AAKO,MAAM,6BAA6B,CACxC,oBAAgB,gCAAiB,MAC9B;AACH,QAAM,EAAE,QAAQ,IAAI,cAAc;AAGlC,MAAI,KAAC,sBAAW,OAAO,GAAG;AACxB,6BAAU,SAAS,EAAE,WAAW,KAAK,CAAC;AAAA,EACxC;AAEA,QAAM,uBAA6B,0DAAyB,aAAa;AAGzE,QAAM,aAAa;AAAA,IACjB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,mCAAc,qBAAQ,SAAS,kBAAkB,GAAG,UAAU;AAE9D,QAAM,aAAa;AAAA,IACjB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,mCAAc,qBAAQ,SAAS,kBAAkB,GAAG,UAAU;AAE9D,QAAM,+BACJ,0EAAiC,aAAa;AAEhD,QAAM,qBAAqB;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA;AAAA,QACE,qBAAQ,SAAS,2BAA2B;AAAA,IAC5C;AAAA,EACF;AAEA,QAAM,qBAAqB;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA;AAAA,QACE,qBAAQ,SAAS,2BAA2B;AAAA,IAC5C;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var formatCode_exports = {};
|
|
30
|
+
__export(formatCode_exports, {
|
|
31
|
+
formatCode: () => formatCode
|
|
32
|
+
});
|
|
33
|
+
module.exports = __toCommonJS(formatCode_exports);
|
|
34
|
+
var import_config = require("@intlayer/config");
|
|
35
|
+
var import_built = __toESM(require("@intlayer/config/built"));
|
|
36
|
+
const formatCode = async (filePath, code) => {
|
|
37
|
+
const appLogger = (0, import_config.getAppLogger)(import_built.default);
|
|
38
|
+
let prettier;
|
|
39
|
+
try {
|
|
40
|
+
prettier = require("prettier");
|
|
41
|
+
} catch (error) {
|
|
42
|
+
}
|
|
43
|
+
if (prettier) {
|
|
44
|
+
try {
|
|
45
|
+
const prettierConfig = await prettier.resolveConfig(filePath ?? "");
|
|
46
|
+
const formattedCode = await prettier.format(code, {
|
|
47
|
+
...prettierConfig,
|
|
48
|
+
filepath: filePath
|
|
49
|
+
// Explicitly provide the filepath so Prettier can infer the parser
|
|
50
|
+
});
|
|
51
|
+
appLogger(`Applied Prettier formatting to ${filePath}`, {
|
|
52
|
+
level: "info",
|
|
53
|
+
isVerbose: true
|
|
54
|
+
});
|
|
55
|
+
return formattedCode;
|
|
56
|
+
} catch (error) {
|
|
57
|
+
const err = error;
|
|
58
|
+
appLogger(
|
|
59
|
+
`Failed to apply Prettier formatting to ${filePath}: ${err.message}`,
|
|
60
|
+
{
|
|
61
|
+
level: "warn",
|
|
62
|
+
isVerbose: true
|
|
63
|
+
}
|
|
64
|
+
);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
return code;
|
|
68
|
+
};
|
|
69
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
70
|
+
0 && (module.exports = {
|
|
71
|
+
formatCode
|
|
72
|
+
});
|
|
73
|
+
//# sourceMappingURL=formatCode.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/writeContentDeclaration/formatCode.ts"],"sourcesContent":["import { getAppLogger } from '@intlayer/config';\nimport configuration from '@intlayer/config/built';\n\nexport const formatCode = async (filePath: string, code: string) => {\n const appLogger = getAppLogger(configuration);\n // Try to import prettier if it exists\n let prettier: any;\n try {\n prettier = require('prettier');\n } catch (error) {\n // Prettier is not installed, continue without it\n }\n\n // Apply Prettier formatting if it's available\n if (prettier) {\n try {\n // Try to find a prettier config file\n const prettierConfig = await prettier.resolveConfig(filePath ?? '');\n\n // Format the code with Prettier\n const formattedCode = await prettier.format(code, {\n ...prettierConfig,\n filepath: filePath, // Explicitly provide the filepath so Prettier can infer the parser\n });\n\n appLogger(`Applied Prettier formatting to ${filePath}`, {\n level: 'info',\n isVerbose: true,\n });\n\n return formattedCode;\n } catch (error) {\n const err = error as Error;\n appLogger(\n `Failed to apply Prettier formatting to ${filePath}: ${err.message}`,\n {\n level: 'warn',\n isVerbose: true,\n }\n );\n // Continue with unformatted code on prettier error\n }\n }\n\n return code;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA6B;AAC7B,mBAA0B;AAEnB,MAAM,aAAa,OAAO,UAAkB,SAAiB;AAClE,QAAM,gBAAY,4BAAa,aAAAA,OAAa;AAE5C,MAAI;AACJ,MAAI;AACF,eAAW,QAAQ,UAAU;AAAA,EAC/B,SAAS,OAAO;AAAA,EAEhB;AAGA,MAAI,UAAU;AACZ,QAAI;AAEF,YAAM,iBAAiB,MAAM,SAAS,cAAc,YAAY,EAAE;AAGlE,YAAM,gBAAgB,MAAM,SAAS,OAAO,MAAM;AAAA,QAChD,GAAG;AAAA,QACH,UAAU;AAAA;AAAA,MACZ,CAAC;AAED,gBAAU,kCAAkC,QAAQ,IAAI;AAAA,QACtD,OAAO;AAAA,QACP,WAAW;AAAA,MACb,CAAC;AAED,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,MAAM;AACZ;AAAA,QACE,0CAA0C,QAAQ,KAAK,IAAI,OAAO;AAAA,QAClE;AAAA,UACE,OAAO;AAAA,UACP,WAAW;AAAA,QACb;AAAA,MACF;AAAA,IAEF;AAAA,EACF;AAEA,SAAO;AACT;","names":["configuration"]}
|
|
@@ -31,26 +31,32 @@ __export(writeContentDeclaration_exports, {
|
|
|
31
31
|
writeContentDeclaration: () => writeContentDeclaration
|
|
32
32
|
});
|
|
33
33
|
module.exports = __toCommonJS(writeContentDeclaration_exports);
|
|
34
|
-
var import_fs = require("fs");
|
|
35
|
-
var fsPromises = __toESM(require("fs/promises"));
|
|
36
|
-
var import_path = require("path");
|
|
37
|
-
var import_chokidar = require("@intlayer/chokidar");
|
|
38
34
|
var import_built = __toESM(require("@intlayer/config/built"));
|
|
39
|
-
var
|
|
35
|
+
var import_unmerged_dictionaries_entry = __toESM(require("@intlayer/unmerged-dictionaries-entry"));
|
|
40
36
|
var import_deep_equal = __toESM(require("deep-equal"));
|
|
37
|
+
var import_fs = require("fs");
|
|
38
|
+
var import_promises = require("fs/promises");
|
|
39
|
+
var import_path = require("path");
|
|
40
|
+
var import_prepareContentDeclaration = require('../prepareContentDeclaration.cjs');
|
|
41
|
+
var import_writeJSFile = require('./writeJSFile.cjs');
|
|
41
42
|
const DEFAULT_NEW_DICTIONARY_PATH = "intlayer-dictionaries";
|
|
42
|
-
const
|
|
43
|
-
const
|
|
44
|
-
const { baseDir } = content;
|
|
45
|
-
const newDictionaryRelativeLocationPath = newDictionariesPath ?? DEFAULT_NEW_DICTIONARY_PATH;
|
|
46
|
-
const newDictionaryLocationPath = `${baseDir}/${newDictionaryRelativeLocationPath}`;
|
|
47
|
-
const existingDictionary = import_dictionaries_entry.default[dictionary.key];
|
|
48
|
-
const preparedContentDeclaration = await (0, import_chokidar.prepareContentDeclaration)(dictionary);
|
|
43
|
+
const formatContentDeclaration = async (dictionary) => {
|
|
44
|
+
const preparedContentDeclaration = await (0, import_prepareContentDeclaration.prepareContentDeclaration)(dictionary);
|
|
49
45
|
const { filePath, $schema, ...dictionaryWithoutPath } = preparedContentDeclaration;
|
|
50
46
|
const formattedContentDeclaration = {
|
|
51
47
|
$schema: "https://intlayer.org/schema.json",
|
|
52
48
|
...dictionaryWithoutPath
|
|
53
49
|
};
|
|
50
|
+
return formattedContentDeclaration;
|
|
51
|
+
};
|
|
52
|
+
const writeContentDeclaration = async (dictionary, config = import_built.default, newDictionariesPath) => {
|
|
53
|
+
const { content } = config;
|
|
54
|
+
const { baseDir } = content;
|
|
55
|
+
const newDictionaryRelativeLocationPath = newDictionariesPath ?? DEFAULT_NEW_DICTIONARY_PATH;
|
|
56
|
+
const newDictionaryLocationPath = `${baseDir}/${newDictionaryRelativeLocationPath}`;
|
|
57
|
+
const existingDictionary = import_unmerged_dictionaries_entry.default[dictionary.key].filter((el) => el.filePath === dictionary.filePath);
|
|
58
|
+
const filePath = dictionary.filePath;
|
|
59
|
+
const formattedContentDeclaration = await formatContentDeclaration(dictionary);
|
|
54
60
|
if (existingDictionary) {
|
|
55
61
|
if ((0, import_deep_equal.default)(existingDictionary, dictionary)) {
|
|
56
62
|
return {
|
|
@@ -59,28 +65,8 @@ const writeContentDeclaration = async (dictionary, config = import_built.default
|
|
|
59
65
|
};
|
|
60
66
|
} else {
|
|
61
67
|
if (filePath) {
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
const contentDeclarationPath = `${baseDir}/${filePath}`;
|
|
65
|
-
await writeFileWithDirectories(
|
|
66
|
-
contentDeclarationPath,
|
|
67
|
-
formattedContentDeclaration
|
|
68
|
-
);
|
|
69
|
-
return { status: "updated", path: contentDeclarationPath };
|
|
70
|
-
} else {
|
|
71
|
-
await fsPromises.rm(filePath);
|
|
72
|
-
const dictionariesDirPath = (0, import_path.dirname)(filePath);
|
|
73
|
-
const dictionariesFileName = (0, import_path.basename)(filePath, (0, import_path.extname)(filePath));
|
|
74
|
-
const newFilePath = `${dictionariesDirPath}/${dictionariesFileName}.json`;
|
|
75
|
-
await writeFileWithDirectories(
|
|
76
|
-
newFilePath,
|
|
77
|
-
formattedContentDeclaration
|
|
78
|
-
);
|
|
79
|
-
return {
|
|
80
|
-
status: "replaced",
|
|
81
|
-
path: newFilePath
|
|
82
|
-
};
|
|
83
|
-
}
|
|
68
|
+
await writeFileWithDirectories(filePath, formattedContentDeclaration);
|
|
69
|
+
return { status: "updated", path: filePath };
|
|
84
70
|
} else {
|
|
85
71
|
const contentDeclarationPath = `${newDictionaryLocationPath}/${dictionary.key}.content.json`;
|
|
86
72
|
await writeFileWithDirectories(
|
|
@@ -110,10 +96,23 @@ const writeFileWithDirectories = async (filePath, data) => {
|
|
|
110
96
|
const dir = (0, import_path.dirname)(filePath);
|
|
111
97
|
const directoryExists = (0, import_fs.existsSync)(dir);
|
|
112
98
|
if (!directoryExists) {
|
|
113
|
-
await
|
|
99
|
+
await (0, import_promises.mkdir)(dir, { recursive: true });
|
|
100
|
+
}
|
|
101
|
+
const extention = (0, import_path.extname)(filePath);
|
|
102
|
+
const acceptedExtensions = import_built.default.content.fileExtensions.map(
|
|
103
|
+
(extention2) => (0, import_path.extname)(extention2)
|
|
104
|
+
);
|
|
105
|
+
if (!acceptedExtensions.includes(extention)) {
|
|
106
|
+
throw new Error(
|
|
107
|
+
`Invalid file extension: ${extention}, file: ${filePath}`
|
|
108
|
+
);
|
|
109
|
+
}
|
|
110
|
+
if (extention === ".json") {
|
|
111
|
+
const jsonDictionary = JSON.stringify(data, null, 2);
|
|
112
|
+
await (0, import_promises.writeFile)(filePath, jsonDictionary);
|
|
113
|
+
} else {
|
|
114
|
+
await (0, import_writeJSFile.writeJSFile)(filePath, data);
|
|
114
115
|
}
|
|
115
|
-
const jsonDictionary = JSON.stringify(data, null, 2);
|
|
116
|
-
await fsPromises.writeFile(filePath, jsonDictionary);
|
|
117
116
|
} catch (error) {
|
|
118
117
|
throw new Error(`Error writing file to ${filePath}: ${error}`);
|
|
119
118
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import
|
|
1
|
+
{"version":3,"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import configuration from '@intlayer/config/built';\nimport type { IntlayerConfig } from '@intlayer/config/client';\nimport type { Dictionary } from '@intlayer/core';\nimport dictionariesRecord from '@intlayer/unmerged-dictionaries-entry';\nimport deepEqual from 'deep-equal';\nimport { existsSync } from 'fs';\nimport { mkdir, writeFile } from 'fs/promises';\nimport { dirname, extname } from 'path';\nimport { prepareContentDeclaration } from '../prepareContentDeclaration';\nimport type { DictionaryStatus } from './dictionaryStatus';\nimport { writeJSFile } from './writeJSFile';\n\nconst DEFAULT_NEW_DICTIONARY_PATH = 'intlayer-dictionaries';\n\nconst formatContentDeclaration = async (dictionary: Dictionary) => {\n // Clean Markdown, Insertion, File, etc. node metadata\n const preparedContentDeclaration =\n await prepareContentDeclaration(dictionary);\n\n // Remove the filePath from the dictionary and set $schema\n const { filePath, $schema, ...dictionaryWithoutPath } =\n preparedContentDeclaration;\n\n const formattedContentDeclaration = {\n $schema: 'https://intlayer.org/schema.json',\n ...dictionaryWithoutPath,\n };\n\n return formattedContentDeclaration;\n};\n\nexport const writeContentDeclaration = async (\n dictionary: Dictionary,\n config: IntlayerConfig = configuration,\n newDictionariesPath?: string\n): Promise<{ status: DictionaryStatus; path: string }> => {\n const { content } = config;\n const { baseDir } = content;\n\n const newDictionaryRelativeLocationPath =\n newDictionariesPath ?? DEFAULT_NEW_DICTIONARY_PATH;\n const newDictionaryLocationPath = `${baseDir}/${newDictionaryRelativeLocationPath}`;\n\n const existingDictionary = (\n dictionariesRecord[dictionary.key] as Dictionary[]\n ).filter((el) => el.filePath === dictionary.filePath);\n\n const filePath = dictionary.filePath;\n const formattedContentDeclaration =\n await formatContentDeclaration(dictionary);\n\n if (existingDictionary) {\n // Compare existing dictionary with distant dictionary\n if (deepEqual(existingDictionary, dictionary)) {\n // Up to date, nothing to do\n return {\n status: 'up-to-date',\n path: filePath!,\n };\n } else {\n if (filePath) {\n await writeFileWithDirectories(filePath, formattedContentDeclaration);\n\n return { status: 'updated', path: filePath };\n } else {\n // Write the dictionary to the intlayer-dictionaries directory\n const contentDeclarationPath = `${newDictionaryLocationPath}/${dictionary.key}.content.json`;\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration\n );\n\n return {\n status: 'reimported in new location',\n path: contentDeclarationPath,\n };\n }\n }\n } else {\n // No existing dictionary, write to new location\n const contentDeclarationPath = `${newDictionaryLocationPath}/${dictionary.key}.content.json`;\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration\n );\n\n return {\n status: 'imported',\n path: contentDeclarationPath,\n };\n }\n};\n\nconst writeFileWithDirectories = async (\n filePath: string,\n data: string | Buffer\n): Promise<void> => {\n try {\n // Extract the directory from the file path\n const dir = dirname(filePath);\n\n // Check if the directory exists\n const directoryExists = existsSync(dir);\n\n if (!directoryExists) {\n // Create the directory recursively\n await mkdir(dir, { recursive: true });\n }\n\n const extention = extname(filePath);\n const acceptedExtensions = configuration.content.fileExtensions.map(\n (extention) => extname(extention)\n );\n\n if (!acceptedExtensions.includes(extention)) {\n throw new Error(\n `Invalid file extension: ${extention}, file: ${filePath}`\n );\n }\n\n if (extention === '.json') {\n const jsonDictionary = JSON.stringify(data, null, 2);\n\n // Write the file\n await writeFile(filePath, jsonDictionary);\n } else {\n await writeJSFile(filePath, data as unknown as Dictionary);\n }\n } catch (error) {\n throw new Error(`Error writing file to ${filePath}: ${error}`);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAA0B;AAG1B,yCAA+B;AAC/B,wBAAsB;AACtB,gBAA2B;AAC3B,sBAAiC;AACjC,kBAAiC;AACjC,uCAA0C;AAE1C,yBAA4B;AAE5B,MAAM,8BAA8B;AAEpC,MAAM,2BAA2B,OAAO,eAA2B;AAEjE,QAAM,6BACJ,UAAM,4DAA0B,UAAU;AAG5C,QAAM,EAAE,UAAU,SAAS,GAAG,sBAAsB,IAClD;AAEF,QAAM,8BAA8B;AAAA,IAClC,SAAS;AAAA,IACT,GAAG;AAAA,EACL;AAEA,SAAO;AACT;AAEO,MAAM,0BAA0B,OACrC,YACA,SAAyB,aAAAA,SACzB,wBACwD;AACxD,QAAM,EAAE,QAAQ,IAAI;AACpB,QAAM,EAAE,QAAQ,IAAI;AAEpB,QAAM,oCACJ,uBAAuB;AACzB,QAAM,4BAA4B,GAAG,OAAO,IAAI,iCAAiC;AAEjF,QAAM,qBACJ,mCAAAC,QAAmB,WAAW,GAAG,EACjC,OAAO,CAAC,OAAO,GAAG,aAAa,WAAW,QAAQ;AAEpD,QAAM,WAAW,WAAW;AAC5B,QAAM,8BACJ,MAAM,yBAAyB,UAAU;AAE3C,MAAI,oBAAoB;AAEtB,YAAI,kBAAAC,SAAU,oBAAoB,UAAU,GAAG;AAE7C,aAAO;AAAA,QACL,QAAQ;AAAA,QACR,MAAM;AAAA,MACR;AAAA,IACF,OAAO;AACL,UAAI,UAAU;AACZ,cAAM,yBAAyB,UAAU,2BAA2B;AAEpE,eAAO,EAAE,QAAQ,WAAW,MAAM,SAAS;AAAA,MAC7C,OAAO;AAEL,cAAM,yBAAyB,GAAG,yBAAyB,IAAI,WAAW,GAAG;AAE7E,cAAM;AAAA,UACJ;AAAA,UACA;AAAA,QACF;AAEA,eAAO;AAAA,UACL,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF,OAAO;AAEL,UAAM,yBAAyB,GAAG,yBAAyB,IAAI,WAAW,GAAG;AAE7E,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,IACF;AAEA,WAAO;AAAA,MACL,QAAQ;AAAA,MACR,MAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,MAAM,2BAA2B,OAC/B,UACA,SACkB;AAClB,MAAI;AAEF,UAAM,UAAM,qBAAQ,QAAQ;AAG5B,UAAM,sBAAkB,sBAAW,GAAG;AAEtC,QAAI,CAAC,iBAAiB;AAEpB,gBAAM,uBAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,IACtC;AAEA,UAAM,gBAAY,qBAAQ,QAAQ;AAClC,UAAM,qBAAqB,aAAAF,QAAc,QAAQ,eAAe;AAAA,MAC9D,CAACG,mBAAc,qBAAQA,UAAS;AAAA,IAClC;AAEA,QAAI,CAAC,mBAAmB,SAAS,SAAS,GAAG;AAC3C,YAAM,IAAI;AAAA,QACR,2BAA2B,SAAS,WAAW,QAAQ;AAAA,MACzD;AAAA,IACF;AAEA,QAAI,cAAc,SAAS;AACzB,YAAM,iBAAiB,KAAK,UAAU,MAAM,MAAM,CAAC;AAGnD,gBAAM,2BAAU,UAAU,cAAc;AAAA,IAC1C,OAAO;AACL,gBAAM,gCAAY,UAAU,IAA6B;AAAA,IAC3D;AAAA,EACF,SAAS,OAAO;AACd,UAAM,IAAI,MAAM,yBAAyB,QAAQ,KAAK,KAAK,EAAE;AAAA,EAC/D;AACF;","names":["configuration","dictionariesRecord","deepEqual","extention"]}
|