@abdess76/i18nkit 1.0.3 → 1.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,6 +10,7 @@ const fs = require('./fs-adapter');
10
10
  const path = require('path');
11
11
 
12
12
  const DEFAULT_EXCLUDED_FOLDERS = ['node_modules', 'dist', '.git', 'coverage', 'e2e', '.angular'];
13
+ const DEFAULT_EXCLUDED_SET = new Set(DEFAULT_EXCLUDED_FOLDERS);
13
14
 
14
15
  const VALID_SOURCE_RE = /\.(ts|html)$/;
15
16
  const EXCLUDED_SOURCE_RE = /\.(spec|test|e2e|mock)\./;
@@ -22,30 +23,36 @@ function validateDir(dir) {
22
23
  }
23
24
  }
24
25
 
25
- function shouldSkipEntry(entry, excludedFolders) {
26
- return excludedFolders.includes(entry.name);
26
+ function shouldSkipEntry(entry, excludedSet) {
27
+ return excludedSet.has(entry.name);
27
28
  }
28
29
 
29
- async function* processEntry(entry, dir, excludedFolders) {
30
+ async function* processEntry(entry, dir, excludedSet) {
30
31
  const filePath = path.join(dir, entry.name);
31
32
  if (entry.isDirectory()) {
32
- yield* walkDirAsync(filePath, excludedFolders);
33
+ yield* walkDirRecursive(filePath, excludedSet);
33
34
  } else if (isValidSourceFile(entry.name)) {
34
35
  yield filePath;
35
36
  }
36
37
  }
37
38
 
38
- async function* walkDirAsync(dir, excludedFolders = DEFAULT_EXCLUDED_FOLDERS) {
39
+ async function* walkDirRecursive(dir, excludedSet) {
39
40
  validateDir(dir);
40
41
  const entries = await fs.readdir(dir, { withFileTypes: true });
41
42
  for (const entry of entries) {
42
- if (shouldSkipEntry(entry, excludedFolders)) {
43
+ if (shouldSkipEntry(entry, excludedSet)) {
43
44
  continue;
44
45
  }
45
- yield* processEntry(entry, dir, excludedFolders);
46
+ yield* processEntry(entry, dir, excludedSet);
46
47
  }
47
48
  }
48
49
 
50
+ async function* walkDirAsync(dir, excludedFolders = DEFAULT_EXCLUDED_FOLDERS) {
51
+ const excludedSet =
52
+ Array.isArray(excludedFolders) ? new Set(excludedFolders) : DEFAULT_EXCLUDED_SET;
53
+ yield* walkDirRecursive(dir, excludedSet);
54
+ }
55
+
49
56
  /**
50
57
  * Collects all .ts/.html files recursively, excluding test files
51
58
  * @param {string} dir
@@ -109,17 +116,24 @@ function handleHtmlFile(content, filePath, processedTemplates) {
109
116
  return { template: content, typescript: null, type: 'html' };
110
117
  }
111
118
 
119
+ function removeMatchedSections(content, matches) {
120
+ const parts = [];
121
+ let lastIndex = 0;
122
+ for (const m of matches) {
123
+ parts.push(content.slice(lastIndex, m.index));
124
+ lastIndex = m.index + m[0].length;
125
+ }
126
+ parts.push(content.slice(lastIndex));
127
+ return parts.join('');
128
+ }
129
+
112
130
  function extractInlineTemplate(content) {
113
131
  const templateMatches = [...content.matchAll(/template\s*:\s*`([\s\S]*?)`/g)];
114
132
  if (templateMatches.length === 0) {
115
133
  return { template: null, tsCode: content };
116
134
  }
117
135
  const template = templateMatches.map(m => m[1]).join('\n');
118
- let tsCode = content;
119
- for (const m of templateMatches) {
120
- tsCode = tsCode.replace(m[0], '');
121
- }
122
- return { template, tsCode };
136
+ return { template, tsCode: removeMatchedSections(content, templateMatches) };
123
137
  }
124
138
 
125
139
  function resolveTemplatePath(content, filePath) {
@@ -33,11 +33,16 @@ const readdirSync = (path, options) => fs.readdirSync(path, options);
33
33
  const statSync = path => fs.statSync(path);
34
34
  const watch = (path, options, listener) => fs.watch(path, options, listener);
35
35
  const mkdirSync = (path, options) => fs.mkdirSync(path, options);
36
+ const copyFileSync = (src, dest) => fs.copyFileSync(src, dest);
36
37
 
37
38
  const readFile = (path, encoding = 'utf-8') => fsp.readFile(path, encoding);
38
39
  const writeFile = (path, data, encoding = 'utf-8') => fsp.writeFile(path, data, encoding);
39
40
  const readdir = (path, options) => fsp.readdir(path, options);
40
41
  const mkdir = (path, options) => fsp.mkdir(path, options);
42
+ const rename = (oldPath, newPath) => fsp.rename(oldPath, newPath);
43
+ const unlink = path => fsp.unlink(path);
44
+ const copyFile = (src, dest) => fsp.copyFile(src, dest);
45
+ const rm = (path, options) => fsp.rm(path, options);
41
46
 
42
47
  module.exports = {
43
48
  setAdapter,
@@ -47,10 +52,15 @@ module.exports = {
47
52
  writeFileSync,
48
53
  readdirSync,
49
54
  mkdirSync,
55
+ copyFileSync,
50
56
  statSync,
51
57
  watch,
52
58
  readFile,
53
59
  writeFile,
54
60
  readdir,
55
61
  mkdir,
62
+ rename,
63
+ unlink,
64
+ copyFile,
65
+ rm,
56
66
  };
@@ -165,7 +165,7 @@ function generateFooter() {
165
165
 
166
166
  PLUGIN LOCATIONS:
167
167
  builtin: <package>/bin/plugins/
168
- local: .i18n/plugins/
168
+ local: .i18nkit/plugins/
169
169
  npm: i18nkit-* packages`;
170
170
  }
171
171
 
package/bin/core/index.js CHANGED
@@ -19,13 +19,9 @@ const plugins = require('./plugin-resolver');
19
19
  const config = require('./config');
20
20
  const detector = require('./detector');
21
21
  const help = require('./help-generator');
22
- const fsAdapter = require('./fs-adapter');
23
22
  const logUtils = require('./log-utils');
24
23
 
25
24
  module.exports = {
26
- setFsAdapter: fsAdapter.setAdapter,
27
- resetFsAdapter: fsAdapter.resetAdapter,
28
-
29
25
  getArgValue: config.getArgValue,
30
26
 
31
27
  readJsonFile: json.readJsonFile,
@@ -109,17 +109,21 @@ const isPlainObject = v => typeof v === 'object' && v !== null && !Array.isArray
109
109
  * Flattens nested JSON to dot-notation keys
110
110
  * @param {Object} obj
111
111
  * @param {string} [prefix='']
112
+ * @param {Object} [result={}]
112
113
  * @returns {Record<string, string>}
113
114
  * @example
114
115
  * flattenJson({ user: { name: 'John' } }) // { 'user.name': 'John' }
115
116
  */
116
- function flattenJson(obj, prefix = '') {
117
- return Object.entries(obj).reduce((acc, [key, value]) => {
117
+ function flattenJson(obj, prefix = '', result = {}) {
118
+ for (const [key, value] of Object.entries(obj)) {
118
119
  const fullKey = buildKey(prefix, escapeKey(key));
119
- return isPlainObject(value) ?
120
- { ...acc, ...flattenJson(value, fullKey) }
121
- : { ...acc, [fullKey]: value };
122
- }, {});
120
+ if (isPlainObject(value)) {
121
+ flattenJson(value, fullKey, result);
122
+ } else {
123
+ result[fullKey] = value;
124
+ }
125
+ }
126
+ return result;
123
127
  }
124
128
 
125
129
  function ensurePath(obj, parts) {
@@ -10,6 +10,17 @@ const path = require('path');
10
10
  const { decodeHtmlEntities } = require('./parser-utils');
11
11
 
12
12
  const MAX_KEY_LENGTH = 50;
13
+ const IGNORED_SCOPE_FOLDERS = new Set([
14
+ 'components',
15
+ 'pages',
16
+ 'shared',
17
+ 'common',
18
+ 'features',
19
+ 'dialogs',
20
+ 'forms',
21
+ 'ui',
22
+ 'lib',
23
+ ]);
13
24
 
14
25
  /**
15
26
  * Converts text to a translation key (lowercase, underscored, max 50 chars)
@@ -49,18 +60,7 @@ function pathToScope(filePath, baseDir) {
49
60
  .replace(/\.(component|html|ts)$/g, '')
50
61
  .replace(/\.component$/, '');
51
62
 
52
- const ignoredFolders = [
53
- 'components',
54
- 'pages',
55
- 'shared',
56
- 'common',
57
- 'features',
58
- 'dialogs',
59
- 'forms',
60
- 'ui',
61
- 'lib',
62
- ];
63
- const significantParts = parts.filter(p => !ignoredFolders.includes(p));
63
+ const significantParts = parts.filter(p => !IGNORED_SCOPE_FOLDERS.has(p));
64
64
 
65
65
  const scope = significantParts
66
66
  .concat(fileName !== 'app' && fileName !== parts.at(-1) ? [fileName] : [])
@@ -50,26 +50,22 @@ function detectDynamicPatterns(content, relPath, dynamicPatterns) {
50
50
  }
51
51
  }
52
52
 
53
+ const QUOTED_STRING_RE = /['"`]([a-zA-Z][a-zA-Z0-9_.]+)['"`]/g;
54
+
53
55
  function scanFileForKeyUsage(ctx) {
54
- const { content, relPath, allKeys, keyUsageMap, dynamicPatterns } = ctx;
56
+ const { content, relPath, keySet, keyUsageMap, dynamicPatterns } = ctx;
55
57
  detectDynamicPatterns(content, relPath, dynamicPatterns);
56
58
 
57
- const keySet = new Set(allKeys.filter(k => !keyUsageMap.get(k)));
58
- if (keySet.size === 0) {
59
- return;
60
- }
61
-
62
- const keyPattern = new RegExp(
63
- `['"\`](${[...keySet].map(k => k.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')).join('|')})['"\`]`,
64
- 'g',
65
- );
66
- for (const match of content.matchAll(keyPattern)) {
67
- keyUsageMap.set(match[1], true);
59
+ for (const match of content.matchAll(QUOTED_STRING_RE)) {
60
+ const candidate = match[1];
61
+ if (keySet.has(candidate)) {
62
+ keyUsageMap.set(candidate, true);
63
+ }
68
64
  }
69
65
  }
70
66
 
71
67
  async function scanAllFilesForKeys(files, ctx) {
72
- const { srcDir, allKeys, keyUsageMap, dynamicPatterns, verbose } = ctx;
68
+ const { srcDir, keySet, keyUsageMap, dynamicPatterns, verbose } = ctx;
73
69
  await Promise.all(
74
70
  files.map(async filePath => {
75
71
  try {
@@ -77,7 +73,7 @@ async function scanAllFilesForKeys(files, ctx) {
77
73
  scanFileForKeyUsage({
78
74
  content,
79
75
  relPath: path.relative(srcDir, filePath),
80
- allKeys,
76
+ keySet,
81
77
  keyUsageMap,
82
78
  dynamicPatterns,
83
79
  });
@@ -131,10 +127,11 @@ function logOrphanResults(results, log) {
131
127
 
132
128
  async function scanAndCollect(ctx) {
133
129
  const { srcDir, excludedFolders, allKeys, verbose } = ctx;
130
+ const keySet = new Set(allKeys);
134
131
  const keyUsageMap = new Map(allKeys.map(k => [k, false]));
135
132
  const dynamicPatterns = [];
136
133
  const files = await collectFiles(srcDir, excludedFolders);
137
- await scanAllFilesForKeys(files, { srcDir, allKeys, keyUsageMap, dynamicPatterns, verbose });
134
+ await scanAllFilesForKeys(files, { srcDir, keySet, keyUsageMap, dynamicPatterns, verbose });
138
135
  const { usedKeys, orphanKeys } = partitionKeyUsage(keyUsageMap);
139
136
  return { usedKeys, orphanKeys, dynamicPatterns, allKeys };
140
137
  }
package/bin/core/paths.js CHANGED
@@ -43,14 +43,14 @@ function resolveOutputFile(ctx) {
43
43
  function resolvePaths(ctx) {
44
44
  const { args, config, cwd, lang = null } = ctx;
45
45
  const i18nDir = resolveI18nDir(args, config, cwd);
46
- const reportDir = path.join(cwd, '.i18n');
46
+ const i18nkitDir = path.join(cwd, '.i18nkit');
47
47
  return {
48
48
  srcDir: resolveSrcDir(args, config, cwd),
49
49
  i18nDir,
50
- reportDir,
51
- backupDir: path.join(reportDir, 'backup'),
50
+ reportDir: i18nkitDir,
51
+ backupDir: path.join(i18nkitDir, 'backups'),
52
52
  outputFile: resolveOutputFile({ args, config, i18nDir, lang }),
53
- keyMappingFile: path.join(cwd, '.i18n-keys.json'),
53
+ keyMappingFile: path.join(i18nkitDir, 'keys.json'),
54
54
  excludedFolders: config.excludedFolders || DEFAULT_EXCLUDED_FOLDERS,
55
55
  };
56
56
  }
@@ -10,7 +10,7 @@ const fs = require('./fs-adapter');
10
10
  const path = require('path');
11
11
 
12
12
  const BUILTIN_DIR = path.join(__dirname, '..', 'plugins');
13
- const LOCAL_DIR = '.i18n/plugins';
13
+ const LOCAL_DIR = '.i18nkit/plugins';
14
14
 
15
15
  /** @type {Record<string, string>} */
16
16
  const BUILTIN_ALIASES = {
@@ -50,7 +50,11 @@ function logTranslationSummary(ctx) {
50
50
 
51
51
  async function executeDeepLTranslation(ctx) {
52
52
  const { uniqueValues, sourceLang, targetLang, provider } = ctx;
53
- const translated = await provider.translateBatch(uniqueValues, sourceLang, targetLang);
53
+ const translated = await provider.translateBatch({
54
+ texts: uniqueValues,
55
+ fromLang: sourceLang,
56
+ toLang: targetLang,
57
+ });
54
58
  return {
55
59
  translationMap: new Map(uniqueValues.map((v, i) => [v, translated[i]])),
56
60
  failedCount: 0,
@@ -59,13 +63,18 @@ async function executeDeepLTranslation(ctx) {
59
63
 
60
64
  function executeMyMemoryTranslation(ctx) {
61
65
  const { uniqueValues, sourceLang, targetLang, provider, email, verbose } = ctx;
62
- return provider.translateBatch(uniqueValues, sourceLang, targetLang, {
63
- email,
64
- verbose,
65
- onProgress: (processed, total) => {
66
- if (processed < total) {
67
- process.stdout.write(`\r Progress: ${processed}/${total}`);
68
- }
66
+ return provider.translateBatch({
67
+ texts: uniqueValues,
68
+ fromLang: sourceLang,
69
+ toLang: targetLang,
70
+ options: {
71
+ email,
72
+ verbose,
73
+ onProgress: (processed, total) => {
74
+ if (processed < total) {
75
+ process.stdout.write(`\r Progress: ${processed}/${total}`);
76
+ }
77
+ },
69
78
  },
70
79
  });
71
80
  }
@@ -22,6 +22,7 @@ const TRANSLATABLE_ATTRS = [
22
22
  'defaultLabel',
23
23
  'selectedItemsLabel',
24
24
  'text',
25
+ 'value',
25
26
  ];
26
27
 
27
28
  const ATTR_REPLACEMENT_MAP = Object.fromEntries(
@@ -29,7 +30,7 @@ const ATTR_REPLACEMENT_MAP = Object.fromEntries(
29
30
  );
30
31
 
31
32
  const TAG_CONTENT_RE =
32
- /(<(?:h[1-6]|p|span|div|li|td|th|a|button|label|option)[^>]*>)\s*(.+?)\s*(<\/(?:h[1-6]|p|span|div|li|td|th|a|button|label|option)>)/gi;
33
+ /(<(?:h[1-6]|p|span|div|li|td|th|a|button|label|option)[^>]*>)\s*([^<]+?)\s*(<\/(?:h[1-6]|p|span|div|li|td|th|a|button|label|option)>)/gi;
33
34
 
34
35
  const hasTranslocoPipe = content => /\bTranslocoPipe\b/.test(content);
35
36
  const hasTranslocoImport = content => /@jsverse\/transloco/.test(content);
@@ -140,7 +141,7 @@ module.exports = {
140
141
 
141
142
  transform(ctx) {
142
143
  const { content, rawText, key, context } = ctx;
143
- if (context.startsWith('ts_')) {
144
+ if (context?.startsWith('ts_')) {
144
145
  return { content, replacements: 0 };
145
146
  }
146
147
  return transformTemplate({ content, rawText, key });
@@ -37,7 +37,7 @@ const PRIMENG_PATTERNS = [
37
37
  context: 'buttons',
38
38
  attr: 'chooseLabel',
39
39
  },
40
- { regex: /<p-(?:chip|tag)[^>]*\blabel="([^"]+)"/gi, context: 'labels', attr: 'label' },
40
+ { regex: /<p-(?:chip|tag)[^>]*\b(?:label|value)="([^"]+)"/gi, context: 'labels', attr: 'value' },
41
41
  {
42
42
  regex: /<p-(?:inputNumber|calendar)[^>]*\b(?:prefix|suffix)="([^"]+)"/gi,
43
43
  context: 'labels',
@@ -27,14 +27,13 @@ function buildDeepLRequest(texts, ctx) {
27
27
  };
28
28
  }
29
29
 
30
- async function translate(text, ctx) {
31
- const result = await translateBatch([text], ctx);
32
- return result[0];
33
- }
34
-
35
- async function translateBatch(texts, ctx) {
36
- const apiKey = getApiKey(ctx);
37
- const response = await fetch(getEndpoint(apiKey), buildDeepLRequest(texts, { ...ctx, apiKey }));
30
+ async function doTranslateBatch(ctx) {
31
+ const { texts, fromLang, toLang, options = {} } = ctx;
32
+ const apiKey = getApiKey(options);
33
+ const response = await fetch(
34
+ getEndpoint(apiKey),
35
+ buildDeepLRequest(texts, { fromLang, toLang, apiKey }),
36
+ );
38
37
  if (!response.ok) {
39
38
  throw new Error(`DeepL API error: ${response.status} - ${await response.text()}`);
40
39
  }
@@ -42,6 +41,21 @@ async function translateBatch(texts, ctx) {
42
41
  return data.translations.map(t => t.text);
43
42
  }
44
43
 
44
+ async function translate(ctx) {
45
+ const { text, fromLang, toLang, options = {} } = ctx;
46
+ const result = await doTranslateBatch({ texts: [text], fromLang, toLang, options });
47
+ return result[0];
48
+ }
49
+
50
+ async function translateBatch(ctx) {
51
+ const { texts, fromLang, toLang, options = {} } = ctx;
52
+ const translated = await doTranslateBatch({ texts, fromLang, toLang, options });
53
+ return {
54
+ translationMap: new Map(texts.map((t, i) => [t, translated[i]])),
55
+ failedCount: 0,
56
+ };
57
+ }
58
+
45
59
  module.exports = {
46
60
  name: 'provider-deepl',
47
61
  type: 'provider',
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@abdess76/i18nkit",
3
- "version": "1.0.3",
3
+ "version": "1.0.5",
4
4
  "description": "Universal i18n CLI - extract translation keys, sync language files, detect missing translations. Extensible plugin architecture.",
5
5
  "author": {
6
6
  "name": "Abdessamad DERRAZ",