@abdess76/i18nkit 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +35 -0
- package/LICENSE +21 -0
- package/README.md +309 -0
- package/bin/cli.js +48 -0
- package/bin/commands/apply.js +48 -0
- package/bin/commands/check-sync.js +35 -0
- package/bin/commands/extract-utils.js +216 -0
- package/bin/commands/extract.js +198 -0
- package/bin/commands/find-orphans.js +36 -0
- package/bin/commands/help.js +34 -0
- package/bin/commands/index.js +79 -0
- package/bin/commands/translate.js +51 -0
- package/bin/commands/version.js +17 -0
- package/bin/commands/watch.js +34 -0
- package/bin/core/applier-utils.js +144 -0
- package/bin/core/applier.js +165 -0
- package/bin/core/args.js +147 -0
- package/bin/core/backup.js +74 -0
- package/bin/core/command-interface.js +69 -0
- package/bin/core/config.js +108 -0
- package/bin/core/context.js +86 -0
- package/bin/core/detector.js +152 -0
- package/bin/core/file-walker.js +159 -0
- package/bin/core/fs-adapter.js +56 -0
- package/bin/core/help-generator.js +208 -0
- package/bin/core/index.js +63 -0
- package/bin/core/json-utils.js +213 -0
- package/bin/core/key-generator.js +75 -0
- package/bin/core/log-utils.js +26 -0
- package/bin/core/orphan-finder.js +208 -0
- package/bin/core/parser-utils.js +187 -0
- package/bin/core/paths.js +60 -0
- package/bin/core/plugin-interface.js +83 -0
- package/bin/core/plugin-resolver-utils.js +166 -0
- package/bin/core/plugin-resolver.js +211 -0
- package/bin/core/sync-checker-utils.js +99 -0
- package/bin/core/sync-checker.js +199 -0
- package/bin/core/translator.js +197 -0
- package/bin/core/types.js +297 -0
- package/bin/core/watcher.js +119 -0
- package/bin/plugins/adapter-transloco.js +156 -0
- package/bin/plugins/parser-angular.js +56 -0
- package/bin/plugins/parser-primeng.js +79 -0
- package/bin/plugins/parser-typescript.js +66 -0
- package/bin/plugins/provider-deepl.js +65 -0
- package/bin/plugins/provider-mymemory.js +192 -0
- package/package.json +123 -0
- package/types/index.d.ts +85 -0
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @fileoverview i18nkit core library public API.
|
|
5
|
+
* Exposes all extraction, translation, and synchronization utilities.
|
|
6
|
+
* @module i18nkit/core
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const json = require('./json-utils');
|
|
10
|
+
const keys = require('./key-generator');
|
|
11
|
+
const files = require('./file-walker');
|
|
12
|
+
const backup = require('./backup');
|
|
13
|
+
const sync = require('./sync-checker');
|
|
14
|
+
const orphans = require('./orphan-finder');
|
|
15
|
+
const apply = require('./applier');
|
|
16
|
+
const translate = require('./translator');
|
|
17
|
+
const watch = require('./watcher');
|
|
18
|
+
const plugins = require('./plugin-resolver');
|
|
19
|
+
const config = require('./config');
|
|
20
|
+
const detector = require('./detector');
|
|
21
|
+
const help = require('./help-generator');
|
|
22
|
+
const fsAdapter = require('./fs-adapter');
|
|
23
|
+
const logUtils = require('./log-utils');
|
|
24
|
+
|
|
25
|
+
module.exports = {
|
|
26
|
+
setFsAdapter: fsAdapter.setAdapter,
|
|
27
|
+
resetFsAdapter: fsAdapter.resetAdapter,
|
|
28
|
+
|
|
29
|
+
getArgValue: config.getArgValue,
|
|
30
|
+
|
|
31
|
+
readJsonFile: json.readJsonFile,
|
|
32
|
+
readJsonFileSync: json.readJsonFileSync,
|
|
33
|
+
writeJsonFile: json.writeJsonFile,
|
|
34
|
+
flattenJson: json.flattenJson,
|
|
35
|
+
unflattenJson: json.unflattenJson,
|
|
36
|
+
mergeDeep: json.mergeDeep,
|
|
37
|
+
setNestedValue: json.setNestedValue,
|
|
38
|
+
normalizeData: json.normalizeData,
|
|
39
|
+
|
|
40
|
+
logListWithLimit: logUtils.logListWithLimit,
|
|
41
|
+
|
|
42
|
+
slugify: keys.slugify,
|
|
43
|
+
pathToScope: keys.pathToScope,
|
|
44
|
+
|
|
45
|
+
collectFiles: files.collectFiles,
|
|
46
|
+
getFileContent: files.getFileContent,
|
|
47
|
+
|
|
48
|
+
restoreBackups: backup.restoreBackups,
|
|
49
|
+
getBackupFiles: backup.getBackupFiles,
|
|
50
|
+
|
|
51
|
+
checkSync: sync.checkSync,
|
|
52
|
+
findOrphans: orphans.findOrphans,
|
|
53
|
+
applyFindings: apply.applyFindings,
|
|
54
|
+
applyTranslations: apply.applyTranslations,
|
|
55
|
+
translateFile: translate.translateFile,
|
|
56
|
+
watchFiles: watch.watchFiles,
|
|
57
|
+
|
|
58
|
+
loadPlugins: plugins.loadPlugins,
|
|
59
|
+
getResolver: plugins.getResolver,
|
|
60
|
+
|
|
61
|
+
detectProject: detector.detectProject,
|
|
62
|
+
generateFullHelp: help.generateFullHelp,
|
|
63
|
+
};
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @fileoverview JSON manipulation utilities for nested i18n structures.
|
|
5
|
+
* Handles flattening, unflattening, deep merging, and ICU message detection.
|
|
6
|
+
* @module json-utils
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const fs = require('./fs-adapter');
|
|
10
|
+
const path = require('path');
|
|
11
|
+
|
|
12
|
+
const KEY_SEPARATOR = ':::';
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* @param {string} filePath
|
|
16
|
+
* @param {boolean} [verbose=false]
|
|
17
|
+
* @returns {Object|null}
|
|
18
|
+
*/
|
|
19
|
+
function readJsonFileSync(filePath, verbose = false) {
|
|
20
|
+
try {
|
|
21
|
+
return JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
22
|
+
} catch (err) {
|
|
23
|
+
if (verbose) {
|
|
24
|
+
console.warn(`Warning: Cannot parse ${filePath}: ${err.message}`);
|
|
25
|
+
}
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* @param {string} filePath
|
|
32
|
+
* @param {boolean} [verbose=false]
|
|
33
|
+
* @returns {Promise<Object|null>}
|
|
34
|
+
*/
|
|
35
|
+
async function readJsonFile(filePath, verbose = false) {
|
|
36
|
+
try {
|
|
37
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
38
|
+
return JSON.parse(content);
|
|
39
|
+
} catch (err) {
|
|
40
|
+
if (verbose) {
|
|
41
|
+
console.warn(`Warning: Cannot parse ${filePath}: ${err.message}`);
|
|
42
|
+
}
|
|
43
|
+
return null;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* @param {string} filePath
|
|
49
|
+
* @param {Object} data
|
|
50
|
+
* @returns {Promise<void>}
|
|
51
|
+
*/
|
|
52
|
+
async function writeJsonFile(filePath, data) {
|
|
53
|
+
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
|
54
|
+
await fs.writeFile(filePath, JSON.stringify(data, null, 2), 'utf-8');
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function navigateToParent(obj, keyPath) {
|
|
58
|
+
let current = obj;
|
|
59
|
+
for (let i = 0; i < keyPath.length - 1; i++) {
|
|
60
|
+
current[keyPath[i]] ??= {};
|
|
61
|
+
current = current[keyPath[i]];
|
|
62
|
+
}
|
|
63
|
+
return current;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function resolveFinalKey(current, key, value) {
|
|
67
|
+
if (current[key] && current[key] !== value) {
|
|
68
|
+
return `${key}_${hashText(value)}`;
|
|
69
|
+
}
|
|
70
|
+
return key;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
function setNestedValue(obj, keyPath, value) {
|
|
74
|
+
const current = navigateToParent(obj, keyPath);
|
|
75
|
+
const originalKey = keyPath.at(-1);
|
|
76
|
+
const existed = Object.hasOwn(current, originalKey);
|
|
77
|
+
const finalKey = resolveFinalKey(current, originalKey, value);
|
|
78
|
+
current[finalKey] = value;
|
|
79
|
+
return { path: [...keyPath.slice(0, -1), finalKey], isNew: !existed };
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
function getNestedValue(obj, keyPath) {
|
|
83
|
+
let current = obj;
|
|
84
|
+
for (const key of keyPath) {
|
|
85
|
+
if (!current || typeof current !== 'object') {
|
|
86
|
+
return undefined;
|
|
87
|
+
}
|
|
88
|
+
current = current[key];
|
|
89
|
+
}
|
|
90
|
+
return current;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const isNestedObject = v => typeof v === 'object' && v !== null;
|
|
94
|
+
const buildKey = (prefix, key) => (prefix ? `${prefix}.${key}` : key);
|
|
95
|
+
|
|
96
|
+
function flattenKeys(obj, prefix = '') {
|
|
97
|
+
return Object.entries(obj).flatMap(([key, value]) => {
|
|
98
|
+
const fullKey = buildKey(prefix, key);
|
|
99
|
+
return isNestedObject(value) ? flattenKeys(value, fullKey) : [fullKey];
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const escapeKey = key => (key.includes('.') ? key.replaceAll('.', KEY_SEPARATOR) : key);
|
|
104
|
+
const unescapeKey = key => (key.includes(KEY_SEPARATOR) ? key.replaceAll(KEY_SEPARATOR, '.') : key);
|
|
105
|
+
|
|
106
|
+
const isPlainObject = v => typeof v === 'object' && v !== null && !Array.isArray(v);
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Flattens nested JSON to dot-notation keys
|
|
110
|
+
* @param {Object} obj
|
|
111
|
+
* @param {string} [prefix='']
|
|
112
|
+
* @returns {Record<string, string>}
|
|
113
|
+
* @example
|
|
114
|
+
* flattenJson({ user: { name: 'John' } }) // { 'user.name': 'John' }
|
|
115
|
+
*/
|
|
116
|
+
function flattenJson(obj, prefix = '') {
|
|
117
|
+
return Object.entries(obj).reduce((acc, [key, value]) => {
|
|
118
|
+
const fullKey = buildKey(prefix, escapeKey(key));
|
|
119
|
+
return isPlainObject(value) ?
|
|
120
|
+
{ ...acc, ...flattenJson(value, fullKey) }
|
|
121
|
+
: { ...acc, [fullKey]: value };
|
|
122
|
+
}, {});
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
function ensurePath(obj, parts) {
|
|
126
|
+
return parts.slice(0, -1).reduce((current, part) => {
|
|
127
|
+
if (!Object.hasOwn(current, part) || !isPlainObject(current[part])) {
|
|
128
|
+
current[part] = {};
|
|
129
|
+
}
|
|
130
|
+
return current[part];
|
|
131
|
+
}, obj);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
/**
|
|
135
|
+
* Restores nested structure from dot-notation keys
|
|
136
|
+
* @param {Record<string, string>} obj
|
|
137
|
+
* @returns {Object}
|
|
138
|
+
* @example
|
|
139
|
+
* unflattenJson({ 'user.name': 'John' }) // { user: { name: 'John' } }
|
|
140
|
+
*/
|
|
141
|
+
function unflattenJson(obj) {
|
|
142
|
+
const result = {};
|
|
143
|
+
for (const [key, value] of Object.entries(obj)) {
|
|
144
|
+
const parts = key.split('.').map(unescapeKey);
|
|
145
|
+
ensurePath(result, parts)[parts.at(-1)] = value;
|
|
146
|
+
}
|
|
147
|
+
return result;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
function mergeValue(result, key, value) {
|
|
151
|
+
if (Array.isArray(value)) {
|
|
152
|
+
return value;
|
|
153
|
+
}
|
|
154
|
+
if (isPlainObject(value)) {
|
|
155
|
+
return mergeDeep(result[key] ?? {}, value);
|
|
156
|
+
}
|
|
157
|
+
return Object.hasOwn(result, key) ? result[key] : value;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Deep merges source into target, preserving existing values
|
|
162
|
+
* @param {Object} target
|
|
163
|
+
* @param {Object} source
|
|
164
|
+
* @returns {Object}
|
|
165
|
+
*/
|
|
166
|
+
function mergeDeep(target, source) {
|
|
167
|
+
const result = { ...target };
|
|
168
|
+
for (const [key, value] of Object.entries(source)) {
|
|
169
|
+
result[key] = mergeValue(result, key, value);
|
|
170
|
+
}
|
|
171
|
+
return result;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
function hashText(text) {
|
|
175
|
+
let hash = 0;
|
|
176
|
+
for (let i = 0; i < text.length; i++) {
|
|
177
|
+
const char = text.charCodeAt(i);
|
|
178
|
+
hash = (hash << 5) - hash + char;
|
|
179
|
+
hash |= 0;
|
|
180
|
+
}
|
|
181
|
+
return Math.abs(hash).toString(36).substring(0, 6);
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
/**
|
|
185
|
+
* Detects ICU MessageFormat syntax (plural, select, selectordinal)
|
|
186
|
+
* @param {*} value
|
|
187
|
+
* @returns {boolean}
|
|
188
|
+
*/
|
|
189
|
+
function isICUMessage(value) {
|
|
190
|
+
if (typeof value !== 'string') {
|
|
191
|
+
return false;
|
|
192
|
+
}
|
|
193
|
+
return /\{[^}]+,\s*(plural|select|selectordinal)\s*,/.test(value);
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
function normalizeData(data, format) {
|
|
197
|
+
const isFlat = format === 'flat' || typeof Object.values(data)[0] === 'string';
|
|
198
|
+
return isFlat ? unflattenJson(data) : data;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
module.exports = {
|
|
202
|
+
readJsonFileSync,
|
|
203
|
+
readJsonFile,
|
|
204
|
+
writeJsonFile,
|
|
205
|
+
setNestedValue,
|
|
206
|
+
getNestedValue,
|
|
207
|
+
flattenKeys,
|
|
208
|
+
flattenJson,
|
|
209
|
+
unflattenJson,
|
|
210
|
+
mergeDeep,
|
|
211
|
+
isICUMessage,
|
|
212
|
+
normalizeData,
|
|
213
|
+
};
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @fileoverview Translation key generation from source text.
|
|
5
|
+
* Slugifies text and derives scope from file paths.
|
|
6
|
+
* @module key-generator
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const path = require('path');
|
|
10
|
+
const { decodeHtmlEntities } = require('./parser-utils');
|
|
11
|
+
|
|
12
|
+
const MAX_KEY_LENGTH = 50;
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Converts text to a translation key (lowercase, underscored, max 50 chars)
|
|
16
|
+
* @param {string} text
|
|
17
|
+
* @returns {string}
|
|
18
|
+
* @example
|
|
19
|
+
* slugify("Hello World!") // "hello_world"
|
|
20
|
+
*/
|
|
21
|
+
function slugify(text) {
|
|
22
|
+
return (
|
|
23
|
+
decodeHtmlEntities(text)
|
|
24
|
+
.toLowerCase()
|
|
25
|
+
.normalize('NFD')
|
|
26
|
+
.replace(/[\u0300-\u036f]/g, '')
|
|
27
|
+
.replace(/'/g, '')
|
|
28
|
+
.replace(/[^a-z0-9\s]/g, ' ')
|
|
29
|
+
.trim()
|
|
30
|
+
.split(/\s+/)
|
|
31
|
+
.filter(w => w.length > 1)
|
|
32
|
+
.slice(0, 6)
|
|
33
|
+
.join('_')
|
|
34
|
+
.substring(0, MAX_KEY_LENGTH) || 'text'
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Derives scope array from file path (e.g., "users/profile.component.ts" → ["users", "profile"])
|
|
40
|
+
* @param {string} filePath
|
|
41
|
+
* @param {string} baseDir
|
|
42
|
+
* @returns {string[]}
|
|
43
|
+
*/
|
|
44
|
+
function pathToScope(filePath, baseDir) {
|
|
45
|
+
const relative = path.relative(baseDir, filePath);
|
|
46
|
+
const parts = relative.split(path.sep);
|
|
47
|
+
const fileName = parts
|
|
48
|
+
.pop()
|
|
49
|
+
.replace(/\.(component|html|ts)$/g, '')
|
|
50
|
+
.replace(/\.component$/, '');
|
|
51
|
+
|
|
52
|
+
const ignoredFolders = [
|
|
53
|
+
'components',
|
|
54
|
+
'pages',
|
|
55
|
+
'shared',
|
|
56
|
+
'common',
|
|
57
|
+
'features',
|
|
58
|
+
'dialogs',
|
|
59
|
+
'forms',
|
|
60
|
+
'ui',
|
|
61
|
+
'lib',
|
|
62
|
+
];
|
|
63
|
+
const significantParts = parts.filter(p => !ignoredFolders.includes(p));
|
|
64
|
+
|
|
65
|
+
const scope = significantParts
|
|
66
|
+
.concat(fileName !== 'app' && fileName !== parts.at(-1) ? [fileName] : [])
|
|
67
|
+
.filter(Boolean);
|
|
68
|
+
|
|
69
|
+
return scope.length > 0 ? scope : ['app'];
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
module.exports = {
|
|
73
|
+
slugify,
|
|
74
|
+
pathToScope,
|
|
75
|
+
};
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @fileoverview Logging utilities for CLI output formatting.
|
|
5
|
+
* @module log-utils
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Logs a list with truncation after limit
|
|
10
|
+
* @param {Object} ctx
|
|
11
|
+
*/
|
|
12
|
+
function logListWithLimit(ctx) {
|
|
13
|
+
const { items, label, limit, log, prefix = ' - ', formatter = item => item } = ctx;
|
|
14
|
+
if (items.length === 0) {
|
|
15
|
+
return;
|
|
16
|
+
}
|
|
17
|
+
log(`${label} (${items.length}):`);
|
|
18
|
+
items.slice(0, limit).forEach(item => log(`${prefix}${formatter(item)}`));
|
|
19
|
+
if (items.length > limit) {
|
|
20
|
+
log(` ... and ${items.length - limit} more`);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
module.exports = {
|
|
25
|
+
logListWithLimit,
|
|
26
|
+
};
|
|
@@ -0,0 +1,208 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @fileoverview Unused translation key detector.
|
|
5
|
+
* Scans source files to identify orphan keys not referenced in code.
|
|
6
|
+
* @module orphan-finder
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const path = require('path');
|
|
10
|
+
const fs = require('./fs-adapter');
|
|
11
|
+
const { readJsonFile, flattenKeys, normalizeData } = require('./json-utils');
|
|
12
|
+
const { collectFiles } = require('./file-walker');
|
|
13
|
+
const { isTranslationFile, readI18nDirectory } = require('./sync-checker');
|
|
14
|
+
const { logListWithLimit } = require('./log-utils');
|
|
15
|
+
|
|
16
|
+
const DYNAMIC_KEY_RE = /(?:translate|transloco)\s*\(\s*[`'"]([^`'"]*)\$\{/g;
|
|
17
|
+
const CONCAT_KEY_RE = /['"`]([^'"`]+)['"]\s*\+\s*(?:\w+|['"`])/g;
|
|
18
|
+
|
|
19
|
+
async function getLangFiles(i18nDir) {
|
|
20
|
+
const dirEntries = await readI18nDirectory(i18nDir);
|
|
21
|
+
return dirEntries.filter(isTranslationFile).map(e => e.name);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async function loadRefData(i18nDir, refFile, format) {
|
|
25
|
+
const refData = await readJsonFile(path.join(i18nDir, refFile));
|
|
26
|
+
if (!refData) {
|
|
27
|
+
throw new Error(`Cannot parse ${refFile}`);
|
|
28
|
+
}
|
|
29
|
+
return { refFile, allKeys: flattenKeys(normalizeData(refData, format)) };
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
async function loadReferenceKeysForOrphans(ctx) {
|
|
33
|
+
const { i18nDir, format, log } = ctx;
|
|
34
|
+
const langFiles = await getLangFiles(i18nDir);
|
|
35
|
+
if (langFiles.length === 0) {
|
|
36
|
+
log('No language files found.');
|
|
37
|
+
return null;
|
|
38
|
+
}
|
|
39
|
+
return loadRefData(i18nDir, langFiles[0], format);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
function detectDynamicPatterns(content, relPath, dynamicPatterns) {
|
|
43
|
+
for (const match of content.matchAll(DYNAMIC_KEY_RE)) {
|
|
44
|
+
dynamicPatterns.push({ file: relPath, pattern: `${match[1]}...` });
|
|
45
|
+
}
|
|
46
|
+
for (const match of content.matchAll(CONCAT_KEY_RE)) {
|
|
47
|
+
if (match[1].includes('.') && !match[1].includes(' ')) {
|
|
48
|
+
dynamicPatterns.push({ file: relPath, pattern: `${match[1]}+...` });
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function scanFileForKeyUsage(ctx) {
|
|
54
|
+
const { content, relPath, allKeys, keyUsageMap, dynamicPatterns } = ctx;
|
|
55
|
+
detectDynamicPatterns(content, relPath, dynamicPatterns);
|
|
56
|
+
|
|
57
|
+
const keySet = new Set(allKeys.filter(k => !keyUsageMap.get(k)));
|
|
58
|
+
if (keySet.size === 0) {
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const keyPattern = new RegExp(
|
|
63
|
+
`['"\`](${[...keySet].map(k => k.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')).join('|')})['"\`]`,
|
|
64
|
+
'g',
|
|
65
|
+
);
|
|
66
|
+
for (const match of content.matchAll(keyPattern)) {
|
|
67
|
+
keyUsageMap.set(match[1], true);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
async function scanAllFilesForKeys(files, ctx) {
|
|
72
|
+
const { srcDir, allKeys, keyUsageMap, dynamicPatterns, verbose } = ctx;
|
|
73
|
+
await Promise.all(
|
|
74
|
+
files.map(async filePath => {
|
|
75
|
+
try {
|
|
76
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
77
|
+
scanFileForKeyUsage({
|
|
78
|
+
content,
|
|
79
|
+
relPath: path.relative(srcDir, filePath),
|
|
80
|
+
allKeys,
|
|
81
|
+
keyUsageMap,
|
|
82
|
+
dynamicPatterns,
|
|
83
|
+
});
|
|
84
|
+
} catch (err) {
|
|
85
|
+
if (verbose) {
|
|
86
|
+
console.warn(`Warning: Cannot read ${path.relative(srcDir, filePath)}: ${err.message}`);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}),
|
|
90
|
+
);
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
function partitionKeyUsage(keyUsageMap) {
|
|
94
|
+
const usedKeys = [];
|
|
95
|
+
const orphanKeys = [];
|
|
96
|
+
for (const [key, used] of keyUsageMap) {
|
|
97
|
+
(used ? usedKeys : orphanKeys).push(key);
|
|
98
|
+
}
|
|
99
|
+
return { usedKeys, orphanKeys };
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
function logDynamicPatterns(dynamicPatterns, log) {
|
|
103
|
+
if (dynamicPatterns.length === 0) {
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
const uniquePatterns = [...new Set(dynamicPatterns.map(p => p.pattern))];
|
|
107
|
+
logListWithLimit({
|
|
108
|
+
items: uniquePatterns,
|
|
109
|
+
label: `Dynamic key patterns detected`,
|
|
110
|
+
limit: 5,
|
|
111
|
+
log,
|
|
112
|
+
prefix: ' [!] ',
|
|
113
|
+
});
|
|
114
|
+
log(' (Some orphans may be false positives due to dynamic keys)\n');
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
function logOrphanResults(results, log) {
|
|
118
|
+
const { dynamicPatterns, orphanKeys, allKeys, usedKeys } = results;
|
|
119
|
+
logDynamicPatterns(dynamicPatterns, log);
|
|
120
|
+
|
|
121
|
+
if (orphanKeys.length > 0) {
|
|
122
|
+
logListWithLimit({ items: orphanKeys, label: 'Orphan keys', limit: 20, log });
|
|
123
|
+
} else {
|
|
124
|
+
log('No orphan keys found.');
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
log(`\nSummary\n${'-'.repeat(50)}`);
|
|
128
|
+
log(`Total keys: ${allKeys.length}\nUsed keys: ${usedKeys.length}`);
|
|
129
|
+
log(`Orphan keys: ${orphanKeys.length}\nDynamic patterns: ${dynamicPatterns.length}`);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
async function scanAndCollect(ctx) {
|
|
133
|
+
const { srcDir, excludedFolders, allKeys, verbose } = ctx;
|
|
134
|
+
const keyUsageMap = new Map(allKeys.map(k => [k, false]));
|
|
135
|
+
const dynamicPatterns = [];
|
|
136
|
+
const files = await collectFiles(srcDir, excludedFolders);
|
|
137
|
+
await scanAllFilesForKeys(files, { srcDir, allKeys, keyUsageMap, dynamicPatterns, verbose });
|
|
138
|
+
const { usedKeys, orphanKeys } = partitionKeyUsage(keyUsageMap);
|
|
139
|
+
return { usedKeys, orphanKeys, dynamicPatterns, allKeys };
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
function buildOrphanResult(ctx) {
|
|
143
|
+
const { results, strict, exitCodes, log } = ctx;
|
|
144
|
+
const hasOrphans = strict && results.orphanKeys.length > 0;
|
|
145
|
+
if (hasOrphans) {
|
|
146
|
+
log('\nOrphan check failed (--strict mode)');
|
|
147
|
+
}
|
|
148
|
+
return {
|
|
149
|
+
success: !hasOrphans,
|
|
150
|
+
exitCode: hasOrphans ? exitCodes.untranslated : exitCodes.success,
|
|
151
|
+
...results,
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
function logOrphanHeader(log) {
|
|
156
|
+
log('Transloco Find Orphan Keys');
|
|
157
|
+
log('='.repeat(50));
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
function getOrphanExitCodes(options) {
|
|
161
|
+
return options.exitCodes || { success: 0, untranslated: 1 };
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
function handleNoRef(exitCodes) {
|
|
165
|
+
return { success: true, exitCode: exitCodes.success, orphanKeys: [] };
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
async function executeOrphanScan(ctx) {
|
|
169
|
+
const { srcDir, excludedFolders, allKeys, verbose, refFile, log, strict, exitCodes } = ctx;
|
|
170
|
+
log(`Reference: ${refFile} (${allKeys.length} keys)\nScanning: ${srcDir}\n`);
|
|
171
|
+
const results = await scanAndCollect({ srcDir, excludedFolders, allKeys, verbose });
|
|
172
|
+
logOrphanResults(results, log);
|
|
173
|
+
return buildOrphanResult({ results, strict, exitCodes, log });
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
function getOrphanDefaults() {
|
|
177
|
+
return { format: 'nested', excludedFolders: [], verbose: false, strict: false, log: console.log };
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
function parseOrphanOptions(options) {
|
|
181
|
+
const defaults = getOrphanDefaults();
|
|
182
|
+
return { ...defaults, ...options, exitCodes: getOrphanExitCodes(options) };
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
/**
|
|
186
|
+
* Scans source files to find translation keys not used in code
|
|
187
|
+
* @param {OrphanOptions} [options]
|
|
188
|
+
* @returns {Promise<OrphanResult>}
|
|
189
|
+
* @example
|
|
190
|
+
* const { orphanKeys } = await findOrphans({ i18nDir: './src/i18n', srcDir: './src/app' });
|
|
191
|
+
*/
|
|
192
|
+
async function findOrphans(options = {}) {
|
|
193
|
+
const opts = parseOrphanOptions(options);
|
|
194
|
+
logOrphanHeader(opts.log);
|
|
195
|
+
const refResult = await loadReferenceKeysForOrphans({
|
|
196
|
+
i18nDir: opts.i18nDir,
|
|
197
|
+
format: opts.format,
|
|
198
|
+
log: opts.log,
|
|
199
|
+
});
|
|
200
|
+
if (!refResult) {
|
|
201
|
+
return handleNoRef(opts.exitCodes);
|
|
202
|
+
}
|
|
203
|
+
return executeOrphanScan({ ...refResult, ...opts });
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
module.exports = {
|
|
207
|
+
findOrphans,
|
|
208
|
+
};
|