@intlayer/chokidar 7.5.0-canary.0 → 7.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/buildIntlayerDictionary/buildIntlayerDictionary.cjs +4 -4
- package/dist/cjs/buildIntlayerDictionary/buildIntlayerDictionary.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/processContentDeclaration.cjs +2 -2
- package/dist/cjs/buildIntlayerDictionary/processContentDeclaration.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/writeDynamicDictionary.cjs +7 -7
- package/dist/cjs/buildIntlayerDictionary/writeDynamicDictionary.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/writeFetchDictionary.cjs +5 -5
- package/dist/cjs/buildIntlayerDictionary/writeFetchDictionary.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/writeMergedDictionary.cjs +5 -5
- package/dist/cjs/buildIntlayerDictionary/writeMergedDictionary.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/writeRemoteDictionary.cjs +5 -5
- package/dist/cjs/buildIntlayerDictionary/writeRemoteDictionary.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/writeUnmergedDictionary.cjs +4 -4
- package/dist/cjs/buildIntlayerDictionary/writeUnmergedDictionary.cjs.map +1 -1
- package/dist/cjs/cleanOutputDir.cjs +2 -2
- package/dist/cjs/cleanOutputDir.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/createDictionaryEntryPoint.cjs +16 -11
- package/dist/cjs/createDictionaryEntryPoint/createDictionaryEntryPoint.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/generateDictionaryListContent.cjs +4 -4
- package/dist/cjs/createDictionaryEntryPoint/generateDictionaryListContent.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/getBuiltDictionariesPath.cjs +2 -2
- package/dist/cjs/createDictionaryEntryPoint/getBuiltDictionariesPath.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/getBuiltDynamicDictionariesPath.cjs +2 -2
- package/dist/cjs/createDictionaryEntryPoint/getBuiltDynamicDictionariesPath.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/getBuiltFetchDictionariesPath.cjs +2 -2
- package/dist/cjs/createDictionaryEntryPoint/getBuiltFetchDictionariesPath.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/getBuiltRemoteDictionariesPath.cjs +2 -2
- package/dist/cjs/createDictionaryEntryPoint/getBuiltRemoteDictionariesPath.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/getBuiltUnmergedDictionariesPath.cjs +2 -2
- package/dist/cjs/createDictionaryEntryPoint/getBuiltUnmergedDictionariesPath.cjs.map +1 -1
- package/dist/cjs/createType/createModuleAugmentation.cjs +3 -3
- package/dist/cjs/createType/createModuleAugmentation.cjs.map +1 -1
- package/dist/cjs/createType/createType.cjs +3 -3
- package/dist/cjs/createType/createType.cjs.map +1 -1
- package/dist/cjs/fetchDistantDictionaries.cjs +6 -6
- package/dist/cjs/fetchDistantDictionaries.cjs.map +1 -1
- package/dist/cjs/filterInvalidDictionaries.cjs +3 -3
- package/dist/cjs/filterInvalidDictionaries.cjs.map +1 -1
- package/dist/cjs/formatDictionary.cjs +7 -7
- package/dist/cjs/formatDictionary.cjs.map +1 -1
- package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs +2 -2
- package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs.map +1 -1
- package/dist/cjs/handleAdditionalContentDeclarationFile.cjs +2 -2
- package/dist/cjs/handleAdditionalContentDeclarationFile.cjs.map +1 -1
- package/dist/cjs/handleContentDeclarationFileChange.cjs +2 -2
- package/dist/cjs/handleContentDeclarationFileChange.cjs.map +1 -1
- package/dist/cjs/handleUnlinkedContentDeclarationFile.cjs +2 -2
- package/dist/cjs/handleUnlinkedContentDeclarationFile.cjs.map +1 -1
- package/dist/cjs/listGitFiles.cjs +4 -4
- package/dist/cjs/listGitFiles.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/getIntlayerBundle.cjs +5 -5
- package/dist/cjs/loadDictionaries/getIntlayerBundle.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/loadContentDeclaration.cjs +4 -4
- package/dist/cjs/loadDictionaries/loadContentDeclaration.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/loadDictionaries.cjs +11 -11
- package/dist/cjs/loadDictionaries/loadDictionaries.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/loadRemoteDictionaries.cjs +6 -6
- package/dist/cjs/loadDictionaries/loadRemoteDictionaries.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/log.cjs +17 -17
- package/dist/cjs/loadDictionaries/log.cjs.map +1 -1
- package/dist/cjs/prepareIntlayer.cjs +14 -14
- package/dist/cjs/prepareIntlayer.cjs.map +1 -1
- package/dist/cjs/reduceDictionaryContent/reduceDictionaryContent.cjs +2 -2
- package/dist/cjs/reduceDictionaryContent/reduceDictionaryContent.cjs.map +1 -1
- package/dist/cjs/transformFiles/transformFiles.cjs +9 -9
- package/dist/cjs/transformFiles/transformFiles.cjs.map +1 -1
- package/dist/cjs/utils/formatter.cjs +7 -7
- package/dist/cjs/utils/formatter.cjs.map +1 -1
- package/dist/cjs/utils/runOnce.cjs +4 -4
- package/dist/cjs/utils/runOnce.cjs.map +1 -1
- package/dist/cjs/utils/runParallel/index.cjs +1 -1
- package/dist/cjs/utils/runParallel/index.cjs.map +1 -1
- package/dist/cjs/watcher.cjs +5 -5
- package/dist/cjs/watcher.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/detectFormatCommand.cjs +2 -2
- package/dist/cjs/writeContentDeclaration/detectFormatCommand.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/processContentDeclarationContent.cjs +14 -14
- package/dist/cjs/writeContentDeclaration/processContentDeclarationContent.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/transformJSFile.cjs +86 -86
- package/dist/cjs/writeContentDeclaration/transformJSFile.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs +5 -5
- package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/writeJSFile.cjs +4 -4
- package/dist/cjs/writeContentDeclaration/writeJSFile.cjs.map +1 -1
- package/dist/esm/createDictionaryEntryPoint/createDictionaryEntryPoint.mjs +14 -9
- package/dist/esm/createDictionaryEntryPoint/createDictionaryEntryPoint.mjs.map +1 -1
- package/dist/esm/createDictionaryEntryPoint/generateDictionaryListContent.mjs +2 -2
- package/dist/esm/createDictionaryEntryPoint/generateDictionaryListContent.mjs.map +1 -1
- package/dist/esm/utils/runParallel/index.mjs +1 -1
- package/dist/esm/utils/runParallel/index.mjs.map +1 -1
- package/dist/types/buildIntlayerDictionary/buildIntlayerDictionary.d.ts +2 -2
- package/dist/types/buildIntlayerDictionary/writeDynamicDictionary.d.ts +3 -3
- package/dist/types/buildIntlayerDictionary/writeFetchDictionary.d.ts +3 -3
- package/dist/types/buildIntlayerDictionary/writeMergedDictionary.d.ts +2 -2
- package/dist/types/buildIntlayerDictionary/writeRemoteDictionary.d.ts +2 -2
- package/dist/types/createDictionaryEntryPoint/createDictionaryEntryPoint.d.ts +2 -2
- package/dist/types/createDictionaryEntryPoint/createDictionaryEntryPoint.d.ts.map +1 -1
- package/dist/types/createDictionaryEntryPoint/generateDictionaryListContent.d.ts +2 -2
- package/dist/types/createDictionaryEntryPoint/generateDictionaryListContent.d.ts.map +1 -1
- package/dist/types/listDictionariesPath.d.ts +2 -2
- package/dist/types/listDictionariesPath.d.ts.map +1 -1
- package/dist/types/loadDictionaries/loadRemoteDictionaries.d.ts +2 -2
- package/package.json +13 -13
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
|
2
|
-
let
|
|
3
|
-
let
|
|
2
|
+
let _intlayer_core = require("@intlayer/core");
|
|
3
|
+
let _intlayer_types = require("@intlayer/types");
|
|
4
4
|
let ts_morph = require("ts-morph");
|
|
5
5
|
|
|
6
6
|
//#region src/writeContentDeclaration/transformJSFile.ts
|
|
@@ -119,8 +119,8 @@ const buildGenderInitializer = (genderMap) => {
|
|
|
119
119
|
*/
|
|
120
120
|
const buildInsertionInitializer = (insertionContent) => {
|
|
121
121
|
if (typeof insertionContent === "string") return `insert(${JSON.stringify(insertionContent)})`;
|
|
122
|
-
if ((0,
|
|
123
|
-
const translationMap = insertionContent[
|
|
122
|
+
if ((0, _intlayer_core.getNodeType)(insertionContent) === _intlayer_types.NodeType.Translation) {
|
|
123
|
+
const translationMap = insertionContent[_intlayer_types.NodeType.Translation] ?? {};
|
|
124
124
|
if (!Object.values(translationMap).every((translationValue) => typeof translationValue === "string")) return void 0;
|
|
125
125
|
return `insert(${buildTranslationInitializer(translationMap)})`;
|
|
126
126
|
}
|
|
@@ -144,13 +144,13 @@ const buildFileInitializer = (filePath) => {
|
|
|
144
144
|
*/
|
|
145
145
|
const buildMarkdownInitializer = (markdownContent) => {
|
|
146
146
|
if (typeof markdownContent === "string") return `md(${JSON.stringify(markdownContent)})`;
|
|
147
|
-
if ((0,
|
|
148
|
-
const translationMap = markdownContent[
|
|
147
|
+
if ((0, _intlayer_core.getNodeType)(markdownContent) === _intlayer_types.NodeType.Translation) {
|
|
148
|
+
const translationMap = markdownContent[_intlayer_types.NodeType.Translation] ?? {};
|
|
149
149
|
if (!Object.values(translationMap).every((translationValue) => typeof translationValue === "string")) return void 0;
|
|
150
150
|
return `md(${buildTranslationInitializer(translationMap)})`;
|
|
151
151
|
}
|
|
152
|
-
if ((0,
|
|
153
|
-
const filePath = markdownContent[
|
|
152
|
+
if ((0, _intlayer_core.getNodeType)(markdownContent) === _intlayer_types.NodeType.File) {
|
|
153
|
+
const filePath = markdownContent[_intlayer_types.NodeType.File];
|
|
154
154
|
const fileInitializer = buildFileInitializer(filePath);
|
|
155
155
|
if (!fileInitializer) return void 0;
|
|
156
156
|
return `md(${fileInitializer})`;
|
|
@@ -457,21 +457,21 @@ const processArrayContent = (contentObject, propertyKey, arrayValue, existingPro
|
|
|
457
457
|
}
|
|
458
458
|
serializedArrayElements.push(serializedElementValue);
|
|
459
459
|
}
|
|
460
|
-
const elementNodeType = (0,
|
|
461
|
-
if (elementNodeType ===
|
|
462
|
-
else if (elementNodeType ===
|
|
463
|
-
else if (elementNodeType ===
|
|
464
|
-
else if (elementNodeType ===
|
|
465
|
-
else if (elementNodeType ===
|
|
460
|
+
const elementNodeType = (0, _intlayer_core.getNodeType)(currentElement);
|
|
461
|
+
if (elementNodeType === _intlayer_types.NodeType.Translation) requiredImports.add("t");
|
|
462
|
+
else if (elementNodeType === _intlayer_types.NodeType.Enumeration) requiredImports.add("enu");
|
|
463
|
+
else if (elementNodeType === _intlayer_types.NodeType.Condition) requiredImports.add("cond");
|
|
464
|
+
else if (elementNodeType === _intlayer_types.NodeType.Gender) requiredImports.add("gender");
|
|
465
|
+
else if (elementNodeType === _intlayer_types.NodeType.Insertion) {
|
|
466
466
|
requiredImports.add("insert");
|
|
467
|
-
const insertionContent = currentElement[
|
|
468
|
-
if (typeof insertionContent === "object" && insertionContent !== null && (0,
|
|
469
|
-
} else if (elementNodeType ===
|
|
467
|
+
const insertionContent = currentElement[_intlayer_types.NodeType.Insertion];
|
|
468
|
+
if (typeof insertionContent === "object" && insertionContent !== null && (0, _intlayer_core.getNodeType)(insertionContent) === _intlayer_types.NodeType.Translation) requiredImports.add("t");
|
|
469
|
+
} else if (elementNodeType === _intlayer_types.NodeType.Markdown) {
|
|
470
470
|
requiredImports.add("md");
|
|
471
|
-
const markdownContent = currentElement[
|
|
472
|
-
if (typeof markdownContent === "object" && markdownContent !== null && (0,
|
|
473
|
-
} else if (elementNodeType ===
|
|
474
|
-
else if (elementNodeType ===
|
|
471
|
+
const markdownContent = currentElement[_intlayer_types.NodeType.Markdown];
|
|
472
|
+
if (typeof markdownContent === "object" && markdownContent !== null && (0, _intlayer_core.getNodeType)(markdownContent) === _intlayer_types.NodeType.File) requiredImports.add("file");
|
|
473
|
+
} else if (elementNodeType === _intlayer_types.NodeType.File) requiredImports.add("file");
|
|
474
|
+
else if (elementNodeType === _intlayer_types.NodeType.Nested) requiredImports.add("nest");
|
|
475
475
|
} else {
|
|
476
476
|
hasUnsupportedContent = true;
|
|
477
477
|
break;
|
|
@@ -574,15 +574,15 @@ const processPrimitiveContent = (contentObject, propertyKey, primitiveValue, exi
|
|
|
574
574
|
* @returns True if the content was modified
|
|
575
575
|
*/
|
|
576
576
|
const processComplexContent = (contentObject, propertyKey, contentNode, existingPropertyKeys, effectiveFallbackLocale, requiredImports, sourceFile) => {
|
|
577
|
-
switch ((0,
|
|
578
|
-
case
|
|
579
|
-
case
|
|
580
|
-
case
|
|
581
|
-
case
|
|
582
|
-
case
|
|
583
|
-
case
|
|
584
|
-
case
|
|
585
|
-
case
|
|
577
|
+
switch ((0, _intlayer_core.getNodeType)(contentNode)) {
|
|
578
|
+
case _intlayer_types.NodeType.Translation: return processTranslationContent(contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile);
|
|
579
|
+
case _intlayer_types.NodeType.Enumeration: return processEnumerationContent(contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile);
|
|
580
|
+
case _intlayer_types.NodeType.Condition: return processConditionContent(contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile);
|
|
581
|
+
case _intlayer_types.NodeType.Gender: return processGenderContent(contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile);
|
|
582
|
+
case _intlayer_types.NodeType.Insertion: return processInsertionContent(contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile);
|
|
583
|
+
case _intlayer_types.NodeType.Markdown: return processMarkdownContent(contentObject, propertyKey, contentNode, existingPropertyKeys, effectiveFallbackLocale, requiredImports, sourceFile);
|
|
584
|
+
case _intlayer_types.NodeType.File: return processFileContent(contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile);
|
|
585
|
+
case _intlayer_types.NodeType.Nested: return processNestedContent(contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile);
|
|
586
586
|
default: return false;
|
|
587
587
|
}
|
|
588
588
|
};
|
|
@@ -599,9 +599,9 @@ const processComplexContent = (contentObject, propertyKey, contentNode, existing
|
|
|
599
599
|
* @returns True if the content was modified
|
|
600
600
|
*/
|
|
601
601
|
const processTranslationContent = (contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile) => {
|
|
602
|
-
const translationMap = contentNode[
|
|
602
|
+
const translationMap = contentNode[_intlayer_types.NodeType.Translation] ?? {};
|
|
603
603
|
const areAllValuesStringsOrArrays = Object.values(translationMap).every((translationValue) => typeof translationValue === "string" || Array.isArray(translationValue));
|
|
604
|
-
if (Object.values(translationMap).some((translationValue) => typeof translationValue === "object" && translationValue !== null && !Array.isArray(translationValue) && (0,
|
|
604
|
+
if (Object.values(translationMap).some((translationValue) => typeof translationValue === "object" && translationValue !== null && !Array.isArray(translationValue) && (0, _intlayer_core.getNodeType)(translationValue) !== _intlayer_types.NodeType.Text) && !areAllValuesStringsOrArrays) {
|
|
605
605
|
if (!existingPropertyKeys.has(propertyKey)) {
|
|
606
606
|
const spreadTargetObject = findSpreadTargetObjectForKey(contentObject, propertyKey, sourceFile);
|
|
607
607
|
if (spreadTargetObject) return processTranslationContent(spreadTargetObject, propertyKey, contentNode, getExistingPropertyNames(spreadTargetObject), requiredImports, sourceFile);
|
|
@@ -617,17 +617,17 @@ const processTranslationContent = (contentObject, propertyKey, contentNode, exis
|
|
|
617
617
|
break;
|
|
618
618
|
}
|
|
619
619
|
translationParts$1.push(`${formattedLocaleKey}: ${serializedValue}`);
|
|
620
|
-
const nodeType = (0,
|
|
621
|
-
if (nodeType ===
|
|
620
|
+
const nodeType = (0, _intlayer_core.getNodeType)(translationValue);
|
|
621
|
+
if (nodeType === _intlayer_types.NodeType.Markdown) {
|
|
622
622
|
requiredImports.add("md");
|
|
623
|
-
const markdownContent = translationValue[
|
|
624
|
-
if (typeof markdownContent === "object" && markdownContent !== null && (0,
|
|
625
|
-
} else if (nodeType ===
|
|
626
|
-
else if (nodeType ===
|
|
627
|
-
else if (nodeType ===
|
|
628
|
-
else if (nodeType ===
|
|
629
|
-
else if (nodeType ===
|
|
630
|
-
else if (nodeType ===
|
|
623
|
+
const markdownContent = translationValue[_intlayer_types.NodeType.Markdown];
|
|
624
|
+
if (typeof markdownContent === "object" && markdownContent !== null && (0, _intlayer_core.getNodeType)(markdownContent) === _intlayer_types.NodeType.File) requiredImports.add("file");
|
|
625
|
+
} else if (nodeType === _intlayer_types.NodeType.File) requiredImports.add("file");
|
|
626
|
+
else if (nodeType === _intlayer_types.NodeType.Insertion) requiredImports.add("insert");
|
|
627
|
+
else if (nodeType === _intlayer_types.NodeType.Enumeration) requiredImports.add("enu");
|
|
628
|
+
else if (nodeType === _intlayer_types.NodeType.Condition) requiredImports.add("cond");
|
|
629
|
+
else if (nodeType === _intlayer_types.NodeType.Gender) requiredImports.add("gender");
|
|
630
|
+
else if (nodeType === _intlayer_types.NodeType.Nested) requiredImports.add("nest");
|
|
631
631
|
} else if (typeof translationValue === "string") translationParts$1.push(`${formattedLocaleKey}: ${JSON.stringify(translationValue)}`);
|
|
632
632
|
else if (Array.isArray(translationValue)) {
|
|
633
633
|
const serializedArrayElements = translationValue.map((arrayElement) => JSON.stringify(arrayElement)).join(", ");
|
|
@@ -700,7 +700,7 @@ const processTranslationContent = (contentObject, propertyKey, contentNode, exis
|
|
|
700
700
|
* @returns True if the content was modified
|
|
701
701
|
*/
|
|
702
702
|
const processEnumerationContent = (contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile) => {
|
|
703
|
-
const enumerationMap = contentNode[
|
|
703
|
+
const enumerationMap = contentNode[_intlayer_types.NodeType.Enumeration];
|
|
704
704
|
if (!Object.values(enumerationMap).every((enumerationValue) => typeof enumerationValue === "string")) return false;
|
|
705
705
|
const enumerationInitializerText = buildEnumerationInitializer(enumerationMap);
|
|
706
706
|
if (!enumerationInitializerText) return false;
|
|
@@ -737,7 +737,7 @@ const processEnumerationContent = (contentObject, propertyKey, contentNode, exis
|
|
|
737
737
|
* @returns True if the content was modified
|
|
738
738
|
*/
|
|
739
739
|
const processConditionContent = (contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile) => {
|
|
740
|
-
const conditionMap = contentNode[
|
|
740
|
+
const conditionMap = contentNode[_intlayer_types.NodeType.Condition];
|
|
741
741
|
if (Object.values(conditionMap).every((conditionValue) => typeof conditionValue === "string")) {
|
|
742
742
|
const conditionInitializerText = buildConditionInitializer(conditionMap);
|
|
743
743
|
if (!conditionInitializerText) return false;
|
|
@@ -777,20 +777,20 @@ const processConditionContent = (contentObject, propertyKey, contentNode, existi
|
|
|
777
777
|
requiredImports.add("cond");
|
|
778
778
|
let hasModifications = false;
|
|
779
779
|
for (const [conditionKey, conditionValue] of Object.entries(conditionMap)) {
|
|
780
|
-
const nodeType = (0,
|
|
780
|
+
const nodeType = (0, _intlayer_core.getNodeType)(conditionValue);
|
|
781
781
|
if (!nodeType) continue;
|
|
782
782
|
let condProperty = condArgument.getProperty(conditionKey);
|
|
783
783
|
if (!condProperty) condProperty = condArgument.getProperty(stringifyKey(conditionKey));
|
|
784
784
|
if (!condProperty || !ts_morph.Node.isPropertyAssignment(condProperty)) continue;
|
|
785
785
|
const condValueInitializer = condProperty.getInitializer();
|
|
786
786
|
if (!condValueInitializer) continue;
|
|
787
|
-
if (nodeType ===
|
|
787
|
+
if (nodeType === _intlayer_types.NodeType.Translation) {
|
|
788
788
|
if (!ts_morph.Node.isCallExpression(condValueInitializer)) continue;
|
|
789
789
|
const tCallExpression = condValueInitializer.getExpression();
|
|
790
790
|
if (!ts_morph.Node.isIdentifier(tCallExpression) || tCallExpression.getText() !== "t") continue;
|
|
791
791
|
const tArgument = condValueInitializer.getArguments()[0];
|
|
792
792
|
if (!tArgument || !ts_morph.Node.isObjectLiteralExpression(tArgument)) continue;
|
|
793
|
-
const translationMap = conditionValue[
|
|
793
|
+
const translationMap = conditionValue[_intlayer_types.NodeType.Translation];
|
|
794
794
|
if (!translationMap || typeof translationMap !== "object") continue;
|
|
795
795
|
const existingTranslationMap = {};
|
|
796
796
|
for (const propertyAssignment of tArgument.getProperties()) {
|
|
@@ -845,7 +845,7 @@ const processConditionContent = (contentObject, propertyKey, contentNode, existi
|
|
|
845
845
|
* @returns True if the content was modified
|
|
846
846
|
*/
|
|
847
847
|
const processGenderContent = (contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile) => {
|
|
848
|
-
const genderMap = contentNode[
|
|
848
|
+
const genderMap = contentNode[_intlayer_types.NodeType.Gender];
|
|
849
849
|
if (!Object.values(genderMap).every((genderValue) => typeof genderValue === "string")) return false;
|
|
850
850
|
const genderInitializerText = buildGenderInitializer(genderMap);
|
|
851
851
|
if (!genderInitializerText) return false;
|
|
@@ -882,14 +882,14 @@ const processGenderContent = (contentObject, propertyKey, contentNode, existingP
|
|
|
882
882
|
* @returns True if the content was modified
|
|
883
883
|
*/
|
|
884
884
|
const processInsertionContent = (contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile) => {
|
|
885
|
-
const insertionContent = contentNode[
|
|
885
|
+
const insertionContent = contentNode[_intlayer_types.NodeType.Insertion];
|
|
886
886
|
const insertionInitializerText = buildInsertionInitializer(insertionContent);
|
|
887
887
|
if (!insertionInitializerText) return false;
|
|
888
888
|
if (!existingPropertyKeys.has(propertyKey)) {
|
|
889
889
|
const spreadTargetObject = findSpreadTargetObjectForKey(contentObject, propertyKey, sourceFile);
|
|
890
890
|
if (spreadTargetObject) return processInsertionContent(spreadTargetObject, propertyKey, contentNode, getExistingPropertyNames(spreadTargetObject), requiredImports, sourceFile);
|
|
891
891
|
requiredImports.add("insert");
|
|
892
|
-
if (typeof insertionContent === "object" && insertionContent !== null && (0,
|
|
892
|
+
if (typeof insertionContent === "object" && insertionContent !== null && (0, _intlayer_core.getNodeType)(insertionContent) === _intlayer_types.NodeType.Translation) requiredImports.add("t");
|
|
893
893
|
contentObject.addPropertyAssignment({
|
|
894
894
|
name: propertyKey,
|
|
895
895
|
initializer: insertionInitializerText
|
|
@@ -897,9 +897,9 @@ const processInsertionContent = (contentObject, propertyKey, contentNode, existi
|
|
|
897
897
|
return true;
|
|
898
898
|
}
|
|
899
899
|
const existingInsertion = readExistingInsertion(contentObject, propertyKey);
|
|
900
|
-
if (!(typeof insertionContent === "string" && existingInsertion?.kind === "string" && existingInsertion.value === insertionContent || typeof insertionContent === "object" && insertionContent !== null && (0,
|
|
900
|
+
if (!(typeof insertionContent === "string" && existingInsertion?.kind === "string" && existingInsertion.value === insertionContent || typeof insertionContent === "object" && insertionContent !== null && (0, _intlayer_core.getNodeType)(insertionContent) === _intlayer_types.NodeType.Translation && existingInsertion?.kind === "translation" && areStringMapsEqual(insertionContent[_intlayer_types.NodeType.Translation] ?? {}, existingInsertion.map))) {
|
|
901
901
|
requiredImports.add("insert");
|
|
902
|
-
if (typeof insertionContent === "object" && insertionContent !== null && (0,
|
|
902
|
+
if (typeof insertionContent === "object" && insertionContent !== null && (0, _intlayer_core.getNodeType)(insertionContent) === _intlayer_types.NodeType.Translation) requiredImports.add("t");
|
|
903
903
|
const property = contentObject.getProperty(propertyKey);
|
|
904
904
|
if (property && ts_morph.Node.isPropertyAssignment(property)) {
|
|
905
905
|
property.setInitializer(insertionInitializerText);
|
|
@@ -922,23 +922,23 @@ const processInsertionContent = (contentObject, propertyKey, contentNode, existi
|
|
|
922
922
|
* @returns True if the content was modified
|
|
923
923
|
*/
|
|
924
924
|
const processMarkdownContent = (contentObject, propertyKey, contentNode, existingPropertyKeys, effectiveFallbackLocale, requiredImports, sourceFile) => {
|
|
925
|
-
const markdownContent = contentNode[
|
|
925
|
+
const markdownContent = contentNode[_intlayer_types.NodeType.Markdown];
|
|
926
926
|
const markdownInitializerText = buildMarkdownInitializer(markdownContent);
|
|
927
927
|
if (!markdownInitializerText) return false;
|
|
928
928
|
if (!existingPropertyKeys.has(propertyKey)) {
|
|
929
929
|
const spreadTargetObject = findSpreadTargetObjectForKey(contentObject, propertyKey, sourceFile);
|
|
930
930
|
if (spreadTargetObject) return processMarkdownContent(spreadTargetObject, propertyKey, contentNode, getExistingPropertyNames(spreadTargetObject), effectiveFallbackLocale, requiredImports, sourceFile);
|
|
931
931
|
requiredImports.add("md");
|
|
932
|
-
const markdownNodeType$1 = (0,
|
|
933
|
-
if (markdownNodeType$1 ===
|
|
934
|
-
else if (markdownNodeType$1 ===
|
|
932
|
+
const markdownNodeType$1 = (0, _intlayer_core.getNodeType)(markdownContent);
|
|
933
|
+
if (markdownNodeType$1 === _intlayer_types.NodeType.File) requiredImports.add("file");
|
|
934
|
+
else if (markdownNodeType$1 === _intlayer_types.NodeType.Translation) requiredImports.add("t");
|
|
935
935
|
contentObject.addPropertyAssignment({
|
|
936
936
|
name: propertyKey,
|
|
937
937
|
initializer: markdownInitializerText
|
|
938
938
|
});
|
|
939
939
|
return true;
|
|
940
940
|
}
|
|
941
|
-
const markdownNodeType = (0,
|
|
941
|
+
const markdownNodeType = (0, _intlayer_core.getNodeType)(markdownContent);
|
|
942
942
|
const existingSimpleMarkdown = readExistingMarkdown(contentObject, propertyKey);
|
|
943
943
|
const existingMarkdownTranslationMap = readExistingMarkdownTranslationMap(contentObject, propertyKey);
|
|
944
944
|
const existingTranslationTypeArguments = readExistingTypeArgsForCall(contentObject, propertyKey, "t");
|
|
@@ -956,8 +956,8 @@ const processMarkdownContent = (contentObject, propertyKey, contentNode, existin
|
|
|
956
956
|
}
|
|
957
957
|
return false;
|
|
958
958
|
}
|
|
959
|
-
if (markdownNodeType ===
|
|
960
|
-
const markdownTranslationMap = markdownContent[
|
|
959
|
+
if (markdownNodeType === _intlayer_types.NodeType.Translation) {
|
|
960
|
+
const markdownTranslationMap = markdownContent[_intlayer_types.NodeType.Translation];
|
|
961
961
|
if (!Object.values(markdownTranslationMap).every((translationValue) => typeof translationValue === "string")) return false;
|
|
962
962
|
if (!areStringMapsEqual(markdownTranslationMap, existingMarkdownTranslationMap)) {
|
|
963
963
|
requiredImports.add("md");
|
|
@@ -970,9 +970,9 @@ const processMarkdownContent = (contentObject, propertyKey, contentNode, existin
|
|
|
970
970
|
}
|
|
971
971
|
return false;
|
|
972
972
|
}
|
|
973
|
-
if (!(typeof markdownContent === "string" && existingSimpleMarkdown?.kind === "string" && existingSimpleMarkdown.value === markdownContent || markdownNodeType ===
|
|
973
|
+
if (!(typeof markdownContent === "string" && existingSimpleMarkdown?.kind === "string" && existingSimpleMarkdown.value === markdownContent || markdownNodeType === _intlayer_types.NodeType.File && existingSimpleMarkdown?.kind === "file" && existingSimpleMarkdown.path === markdownContent[_intlayer_types.NodeType.File])) {
|
|
974
974
|
requiredImports.add("md");
|
|
975
|
-
if (markdownNodeType ===
|
|
975
|
+
if (markdownNodeType === _intlayer_types.NodeType.File) requiredImports.add("file");
|
|
976
976
|
const property = contentObject.getProperty(propertyKey);
|
|
977
977
|
if (property && ts_morph.Node.isPropertyAssignment(property)) {
|
|
978
978
|
property.setInitializer(markdownInitializerText);
|
|
@@ -994,7 +994,7 @@ const processMarkdownContent = (contentObject, propertyKey, contentNode, existin
|
|
|
994
994
|
* @returns True if the content was modified
|
|
995
995
|
*/
|
|
996
996
|
const processFileContent = (contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile) => {
|
|
997
|
-
const filePath = contentNode[
|
|
997
|
+
const filePath = contentNode[_intlayer_types.NodeType.File];
|
|
998
998
|
const fileInitializerText = buildFileInitializer(filePath);
|
|
999
999
|
if (!fileInitializerText) return false;
|
|
1000
1000
|
if (!existingPropertyKeys.has(propertyKey)) {
|
|
@@ -1030,7 +1030,7 @@ const processFileContent = (contentObject, propertyKey, contentNode, existingPro
|
|
|
1030
1030
|
* @returns True if the content was modified
|
|
1031
1031
|
*/
|
|
1032
1032
|
const processNestedContent = (contentObject, propertyKey, contentNode, existingPropertyKeys, requiredImports, sourceFile) => {
|
|
1033
|
-
const nestedContent = contentNode[
|
|
1033
|
+
const nestedContent = contentNode[_intlayer_types.NodeType.Nested];
|
|
1034
1034
|
const nestedInitializerText = buildNestedInitializer(nestedContent);
|
|
1035
1035
|
if (!nestedInitializerText) return false;
|
|
1036
1036
|
if (!existingPropertyKeys.has(propertyKey)) {
|
|
@@ -1118,8 +1118,8 @@ const processContentEntries = (contentObject, dictionaryContent, effectiveFallba
|
|
|
1118
1118
|
if (processPrimitiveContent(contentObject, propertyKey, propertyValue, existingPropertyKeys, effectiveFallbackLocale, requiredImports, sourceFile)) contentWasChanged = true;
|
|
1119
1119
|
continue;
|
|
1120
1120
|
}
|
|
1121
|
-
const nodeType = (0,
|
|
1122
|
-
if (nodeType !==
|
|
1121
|
+
const nodeType = (0, _intlayer_core.getNodeType)(propertyValue);
|
|
1122
|
+
if (nodeType !== _intlayer_types.NodeType.Text && nodeType !== _intlayer_types.NodeType.Number && nodeType !== _intlayer_types.NodeType.Boolean && nodeType !== _intlayer_types.NodeType.Null) {
|
|
1123
1123
|
if (processComplexContent(contentObject, propertyKey, propertyValue, existingPropertyKeys, effectiveFallbackLocale, requiredImports, sourceFile)) {
|
|
1124
1124
|
contentWasChanged = true;
|
|
1125
1125
|
continue;
|
|
@@ -1384,41 +1384,41 @@ const readExistingArraySerialized = (contentObject, propName) => {
|
|
|
1384
1384
|
return serialized;
|
|
1385
1385
|
};
|
|
1386
1386
|
const serializeValue = (value) => {
|
|
1387
|
-
const nodeType = (0,
|
|
1388
|
-
if (nodeType ===
|
|
1389
|
-
if (nodeType ===
|
|
1390
|
-
if (nodeType ===
|
|
1391
|
-
if (nodeType ===
|
|
1392
|
-
const translations = value[
|
|
1387
|
+
const nodeType = (0, _intlayer_core.getNodeType)(value);
|
|
1388
|
+
if (nodeType === _intlayer_types.NodeType.Text) return JSON.stringify(value);
|
|
1389
|
+
if (nodeType === _intlayer_types.NodeType.Number || nodeType === _intlayer_types.NodeType.Boolean) return String(value);
|
|
1390
|
+
if (nodeType === _intlayer_types.NodeType.Null) return "null";
|
|
1391
|
+
if (nodeType === _intlayer_types.NodeType.Translation) {
|
|
1392
|
+
const translations = value[_intlayer_types.NodeType.Translation] ?? {};
|
|
1393
1393
|
if (!Object.values(translations).every((v) => typeof v === "string")) return void 0;
|
|
1394
1394
|
return buildTranslationInitializer(translations);
|
|
1395
1395
|
}
|
|
1396
|
-
if (nodeType ===
|
|
1397
|
-
const map = value[
|
|
1396
|
+
if (nodeType === _intlayer_types.NodeType.Enumeration) {
|
|
1397
|
+
const map = value[_intlayer_types.NodeType.Enumeration];
|
|
1398
1398
|
return buildEnumerationInitializer(map);
|
|
1399
1399
|
}
|
|
1400
|
-
if (nodeType ===
|
|
1401
|
-
const map = value[
|
|
1400
|
+
if (nodeType === _intlayer_types.NodeType.Condition) {
|
|
1401
|
+
const map = value[_intlayer_types.NodeType.Condition];
|
|
1402
1402
|
return buildConditionInitializer(map);
|
|
1403
1403
|
}
|
|
1404
|
-
if (nodeType ===
|
|
1405
|
-
const map = value[
|
|
1404
|
+
if (nodeType === _intlayer_types.NodeType.Gender) {
|
|
1405
|
+
const map = value[_intlayer_types.NodeType.Gender];
|
|
1406
1406
|
return buildGenderInitializer(map);
|
|
1407
1407
|
}
|
|
1408
|
-
if (nodeType ===
|
|
1409
|
-
const content = value[
|
|
1408
|
+
if (nodeType === _intlayer_types.NodeType.Insertion) {
|
|
1409
|
+
const content = value[_intlayer_types.NodeType.Insertion];
|
|
1410
1410
|
return buildInsertionInitializer(content);
|
|
1411
1411
|
}
|
|
1412
|
-
if (nodeType ===
|
|
1413
|
-
const content = value[
|
|
1412
|
+
if (nodeType === _intlayer_types.NodeType.Markdown) {
|
|
1413
|
+
const content = value[_intlayer_types.NodeType.Markdown];
|
|
1414
1414
|
return buildMarkdownInitializer(content);
|
|
1415
1415
|
}
|
|
1416
|
-
if (nodeType ===
|
|
1417
|
-
const path = value[
|
|
1416
|
+
if (nodeType === _intlayer_types.NodeType.File) {
|
|
1417
|
+
const path = value[_intlayer_types.NodeType.File];
|
|
1418
1418
|
return buildFileInitializer(path);
|
|
1419
1419
|
}
|
|
1420
|
-
if (nodeType ===
|
|
1421
|
-
const content = value[
|
|
1420
|
+
if (nodeType === _intlayer_types.NodeType.Nested) {
|
|
1421
|
+
const content = value[_intlayer_types.NodeType.Nested];
|
|
1422
1422
|
return buildNestedInitializer(content);
|
|
1423
1423
|
}
|
|
1424
1424
|
};
|
|
@@ -1732,7 +1732,7 @@ const transformJSFile = async (fileContent, dictionary, fallbackLocale) => {
|
|
|
1732
1732
|
} else if (Array.isArray(dictionary.content) && isContentArrayInSource) {
|
|
1733
1733
|
if (processArrayContent(rootObject, "content", dictionary.content ?? [], getExistingPropertyNames(rootObject), effectiveFallbackLocale, requiredImports, sourceFile)) changed = true;
|
|
1734
1734
|
} else if (isContentCallExpression) {
|
|
1735
|
-
if ((0,
|
|
1735
|
+
if ((0, _intlayer_core.getNodeType)(dictionary.content)) {
|
|
1736
1736
|
if (processComplexContent(rootObject, "content", dictionary.content, getExistingPropertyNames(rootObject), effectiveFallbackLocale, requiredImports, sourceFile)) changed = true;
|
|
1737
1737
|
}
|
|
1738
1738
|
}
|