@mxml3gend/gloss 0.1.2 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -40,6 +40,7 @@ export default {
40
40
  exclude: ["**/*.test.tsx"],
41
41
  mode: "regex", // or "ast" for strict parsing
42
42
  },
43
+ strictPlaceholders: true, // default true; set false to treat placeholder mismatches as warnings
43
44
  };
44
45
  ```
45
46
 
@@ -54,6 +55,7 @@ module.exports = {
54
55
  scan: {
55
56
  mode: "ast",
56
57
  },
58
+ strictPlaceholders: false,
57
59
  };
58
60
  ```
59
61
 
@@ -63,8 +65,13 @@ module.exports = {
63
65
  gloss --help
64
66
  gloss --version
65
67
  gloss --no-open
68
+ gloss --no-cache
66
69
  gloss --port 5179
67
70
  gloss open key auth.login.title
71
+ gloss check --no-cache
72
+ gloss cache status
73
+ gloss cache clear
74
+ npm run test:perf
68
75
  ```
69
76
 
70
77
  ## CI Guardrails
@@ -86,6 +93,21 @@ gloss check --format both
86
93
 
87
94
  The local UI also consumes this data through `/api/check` and shows a hardcoded-text status chip.
88
95
 
96
+ ## Performance Regression Gate
97
+
98
+ Gloss ships with a deterministic 1000-key fixture regression test for scanner performance.
99
+
100
+ ```bash
101
+ npm run test:perf
102
+ ```
103
+
104
+ Optional environment overrides:
105
+
106
+ ```bash
107
+ GLOSS_PERF_COLD_MAX_MS=5000
108
+ GLOSS_PERF_WARM_MAX_MS=3500
109
+ ```
110
+
89
111
  ## Typed Key Generation
90
112
 
91
113
  Generate `i18n-keys.d.ts` from current translation keys:
@@ -0,0 +1,118 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ const BASELINE_DIRECTORY = ".gloss";
4
+ const BASELINE_FILENAME = "baseline.json";
5
+ const SUMMARY_KEYS = [
6
+ "missingTranslations",
7
+ "orphanKeys",
8
+ "invalidKeys",
9
+ "placeholderMismatches",
10
+ "hardcodedTexts",
11
+ "errorIssues",
12
+ "warningIssues",
13
+ "totalIssues",
14
+ ];
15
+ const isFiniteNumber = (value) => typeof value === "number" && Number.isFinite(value);
16
+ const emptyDelta = () => ({
17
+ missingTranslations: 0,
18
+ orphanKeys: 0,
19
+ invalidKeys: 0,
20
+ placeholderMismatches: 0,
21
+ hardcodedTexts: 0,
22
+ errorIssues: 0,
23
+ warningIssues: 0,
24
+ totalIssues: 0,
25
+ });
26
+ const normalizeSummary = (value) => {
27
+ if (!value || typeof value !== "object") {
28
+ return null;
29
+ }
30
+ const source = value;
31
+ const next = {};
32
+ for (const key of SUMMARY_KEYS) {
33
+ const entry = source[key];
34
+ if (!isFiniteNumber(entry)) {
35
+ return null;
36
+ }
37
+ next[key] = entry;
38
+ }
39
+ return next;
40
+ };
41
+ const baselineFilePath = (rootDir) => path.join(rootDir, BASELINE_DIRECTORY, BASELINE_FILENAME);
42
+ const readBaselineFile = async (rootDir) => {
43
+ const filePath = baselineFilePath(rootDir);
44
+ try {
45
+ const raw = await fs.readFile(filePath, "utf8");
46
+ const parsed = JSON.parse(raw);
47
+ if (parsed.schemaVersion !== 1 || typeof parsed.updatedAt !== "string") {
48
+ return null;
49
+ }
50
+ const summary = normalizeSummary(parsed.summary);
51
+ if (!summary) {
52
+ return null;
53
+ }
54
+ return {
55
+ schemaVersion: 1,
56
+ updatedAt: parsed.updatedAt,
57
+ summary,
58
+ };
59
+ }
60
+ catch (error) {
61
+ const code = error.code;
62
+ if (code === "ENOENT") {
63
+ return null;
64
+ }
65
+ return null;
66
+ }
67
+ };
68
+ const computeDelta = (current, previous) => {
69
+ if (!previous) {
70
+ return emptyDelta();
71
+ }
72
+ const delta = {};
73
+ for (const key of SUMMARY_KEYS) {
74
+ delta[key] = current[key] - previous[key];
75
+ }
76
+ return delta;
77
+ };
78
+ export async function updateIssueBaseline(rootDir, summary) {
79
+ const previous = await readBaselineFile(rootDir);
80
+ const delta = computeDelta(summary, previous?.summary ?? null);
81
+ const currentUpdatedAt = new Date().toISOString();
82
+ const baseline = {
83
+ schemaVersion: 1,
84
+ updatedAt: currentUpdatedAt,
85
+ summary,
86
+ };
87
+ const filePath = baselineFilePath(rootDir);
88
+ await fs.mkdir(path.dirname(filePath), { recursive: true });
89
+ await fs.writeFile(`${filePath}.tmp`, `${JSON.stringify(baseline, null, 2)}\n`, "utf8");
90
+ await fs.rename(`${filePath}.tmp`, filePath);
91
+ return {
92
+ hasPrevious: Boolean(previous),
93
+ baselinePath: path.relative(rootDir, filePath) || filePath,
94
+ previousUpdatedAt: previous?.updatedAt ?? null,
95
+ currentUpdatedAt,
96
+ delta,
97
+ };
98
+ }
99
+ export async function resetIssueBaseline(rootDir) {
100
+ const filePath = baselineFilePath(rootDir);
101
+ let existed = true;
102
+ try {
103
+ await fs.rm(filePath);
104
+ }
105
+ catch (error) {
106
+ const code = error.code;
107
+ if (code === "ENOENT") {
108
+ existed = false;
109
+ }
110
+ else {
111
+ throw error;
112
+ }
113
+ }
114
+ return {
115
+ existed,
116
+ baselinePath: path.relative(rootDir, filePath) || filePath,
117
+ };
118
+ }
package/dist/cache.js ADDED
@@ -0,0 +1,78 @@
1
+ import { clearCacheMetrics, readCacheMetrics, } from "./cacheMetrics.js";
2
+ import { clearKeyUsageCache, getKeyUsageCacheStatus, keyUsageCacheKey, } from "./usage.js";
3
+ import { clearUsageScannerCache, getUsageScannerCacheStatus, inferUsageRoot, usageScannerCacheKey, } from "./usageScanner.js";
4
+ const fromMetricsEntry = (cacheKey, entry, staleRelativeToConfig) => {
5
+ if (!entry) {
6
+ return {
7
+ cacheKey,
8
+ fileCount: 0,
9
+ totalSizeBytes: 0,
10
+ oldestMtimeMs: null,
11
+ staleRelativeToConfig,
12
+ source: "missing",
13
+ };
14
+ }
15
+ return {
16
+ cacheKey,
17
+ fileCount: entry.fileCount,
18
+ totalSizeBytes: entry.totalSizeBytes,
19
+ oldestMtimeMs: entry.oldestMtimeMs,
20
+ staleRelativeToConfig,
21
+ source: "metrics",
22
+ };
23
+ };
24
+ export const getCacheStatus = async (rootDir, cfg) => {
25
+ const usageKey = usageScannerCacheKey(inferUsageRoot(cfg), cfg.scan);
26
+ const keyUsageKey = keyUsageCacheKey(cfg);
27
+ const metrics = await readCacheMetrics(rootDir);
28
+ const metricsUsage = metrics?.usageScanner?.[usageKey] ?? null;
29
+ const metricsKeyUsage = metrics?.keyUsage?.[keyUsageKey] ?? null;
30
+ const usageStale = !metricsUsage;
31
+ const keyUsageStale = !metricsKeyUsage;
32
+ const usageBucket = metricsUsage
33
+ ? fromMetricsEntry(usageKey, metricsUsage, usageStale)
34
+ : (() => {
35
+ const memory = getUsageScannerCacheStatus(inferUsageRoot(cfg), cfg.scan);
36
+ return {
37
+ ...memory,
38
+ staleRelativeToConfig: memory.fileCount === 0,
39
+ source: "memory",
40
+ };
41
+ })();
42
+ const keyUsageBucket = metricsKeyUsage
43
+ ? fromMetricsEntry(keyUsageKey, metricsKeyUsage, keyUsageStale)
44
+ : (() => {
45
+ const memory = getKeyUsageCacheStatus(cfg);
46
+ return {
47
+ ...memory,
48
+ staleRelativeToConfig: memory.fileCount === 0,
49
+ source: "memory",
50
+ };
51
+ })();
52
+ const totalCachedFiles = usageBucket.fileCount + keyUsageBucket.fileCount;
53
+ const totalCachedSizeBytes = usageBucket.totalSizeBytes + keyUsageBucket.totalSizeBytes;
54
+ const oldestMtimeCandidates = [usageBucket.oldestMtimeMs, keyUsageBucket.oldestMtimeMs]
55
+ .filter((value) => typeof value === "number" && Number.isFinite(value));
56
+ const oldestMtimeMs = oldestMtimeCandidates.length > 0 ? Math.min(...oldestMtimeCandidates) : null;
57
+ const oldestEntryAgeMs = oldestMtimeMs === null ? null : Math.max(0, Date.now() - oldestMtimeMs);
58
+ return {
59
+ metricsFileFound: metrics !== null,
60
+ metricsUpdatedAt: metrics?.updatedAt ?? null,
61
+ usageScanner: usageBucket,
62
+ keyUsage: keyUsageBucket,
63
+ totalCachedFiles,
64
+ totalCachedSizeBytes,
65
+ oldestEntryAgeMs,
66
+ staleRelativeToConfig: usageBucket.staleRelativeToConfig || keyUsageBucket.staleRelativeToConfig,
67
+ };
68
+ };
69
+ export const clearGlossCaches = async (rootDir) => {
70
+ const usage = clearUsageScannerCache();
71
+ const keyUsage = clearKeyUsageCache();
72
+ const metrics = await clearCacheMetrics(rootDir);
73
+ return {
74
+ usage,
75
+ keyUsage,
76
+ metrics,
77
+ };
78
+ };
@@ -0,0 +1,120 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ const CACHE_DIRECTORY = ".gloss";
4
+ const CACHE_METRICS_FILENAME = "cache-metrics.json";
5
+ const metricsFilePath = (rootDir) => path.join(rootDir, CACHE_DIRECTORY, CACHE_METRICS_FILENAME);
6
+ const emptyMetrics = () => ({
7
+ schemaVersion: 1,
8
+ updatedAt: new Date().toISOString(),
9
+ usageScanner: {},
10
+ keyUsage: {},
11
+ });
12
+ const normalizeEntry = (value) => {
13
+ if (!value || typeof value !== "object") {
14
+ return null;
15
+ }
16
+ const source = value;
17
+ if (typeof source.cacheKey !== "string" || source.cacheKey.trim().length === 0) {
18
+ return null;
19
+ }
20
+ if (typeof source.fileCount !== "number" ||
21
+ !Number.isFinite(source.fileCount) ||
22
+ source.fileCount < 0) {
23
+ return null;
24
+ }
25
+ if (typeof source.totalSizeBytes !== "number" ||
26
+ !Number.isFinite(source.totalSizeBytes) ||
27
+ source.totalSizeBytes < 0) {
28
+ return null;
29
+ }
30
+ if (source.oldestMtimeMs !== null &&
31
+ (typeof source.oldestMtimeMs !== "number" || !Number.isFinite(source.oldestMtimeMs))) {
32
+ return null;
33
+ }
34
+ if (typeof source.updatedAt !== "string") {
35
+ return null;
36
+ }
37
+ return {
38
+ cacheKey: source.cacheKey,
39
+ fileCount: source.fileCount,
40
+ totalSizeBytes: source.totalSizeBytes,
41
+ oldestMtimeMs: source.oldestMtimeMs,
42
+ updatedAt: source.updatedAt,
43
+ };
44
+ };
45
+ const normalizeEntries = (value) => {
46
+ if (!value || typeof value !== "object") {
47
+ return {};
48
+ }
49
+ const source = value;
50
+ const entries = {};
51
+ for (const [key, rawEntry] of Object.entries(source)) {
52
+ const entry = normalizeEntry(rawEntry);
53
+ if (entry) {
54
+ entries[key] = entry;
55
+ }
56
+ }
57
+ return entries;
58
+ };
59
+ export const readCacheMetrics = async (rootDir) => {
60
+ const filePath = metricsFilePath(rootDir);
61
+ try {
62
+ const raw = await fs.readFile(filePath, "utf8");
63
+ const parsed = JSON.parse(raw);
64
+ if (parsed.schemaVersion !== 1 || typeof parsed.updatedAt !== "string") {
65
+ return null;
66
+ }
67
+ return {
68
+ schemaVersion: 1,
69
+ updatedAt: parsed.updatedAt,
70
+ usageScanner: normalizeEntries(parsed.usageScanner),
71
+ keyUsage: normalizeEntries(parsed.keyUsage),
72
+ };
73
+ }
74
+ catch (error) {
75
+ const code = error.code;
76
+ if (code === "ENOENT") {
77
+ return null;
78
+ }
79
+ return null;
80
+ }
81
+ };
82
+ export const updateCacheMetrics = async (rootDir, kind, entry) => {
83
+ const existing = (await readCacheMetrics(rootDir)) ?? emptyMetrics();
84
+ const updatedAt = new Date().toISOString();
85
+ const next = {
86
+ ...existing,
87
+ updatedAt,
88
+ [kind]: {
89
+ ...existing[kind],
90
+ [entry.cacheKey]: {
91
+ ...entry,
92
+ updatedAt,
93
+ },
94
+ },
95
+ };
96
+ const filePath = metricsFilePath(rootDir);
97
+ await fs.mkdir(path.dirname(filePath), { recursive: true });
98
+ await fs.writeFile(`${filePath}.tmp`, `${JSON.stringify(next, null, 2)}\n`, "utf8");
99
+ await fs.rename(`${filePath}.tmp`, filePath);
100
+ };
101
+ export const clearCacheMetrics = async (rootDir) => {
102
+ const filePath = metricsFilePath(rootDir);
103
+ let existed = true;
104
+ try {
105
+ await fs.rm(filePath);
106
+ }
107
+ catch (error) {
108
+ const code = error.code;
109
+ if (code === "ENOENT") {
110
+ existed = false;
111
+ }
112
+ else {
113
+ throw error;
114
+ }
115
+ }
116
+ return {
117
+ existed,
118
+ path: path.relative(rootDir, filePath) || filePath,
119
+ };
120
+ };
package/dist/check.js CHANGED
@@ -24,7 +24,26 @@ const SIMPLE_PLACEHOLDER_REGEX = /\{([A-Za-z_][A-Za-z0-9_]*)\}/g;
24
24
  const ICU_PLACEHOLDER_REGEX = /\{([A-Za-z_][A-Za-z0-9_]*)\s*,\s*(plural|select|selectordinal)\s*,/g;
25
25
  const ICU_PLURAL_START_REGEX = /\{([A-Za-z_][A-Za-z0-9_]*)\s*,\s*plural\s*,/g;
26
26
  const ICU_CATEGORY_REGEX = /(?:^|[\s,])(=?\d+|zero|one|two|few|many|other)\s*\{/g;
27
+ const HARDCODED_IGNORE_MARKER = "gloss-ignore";
28
+ const CHECK_ALWAYS_FAIL_ON = ["missingTranslations", "invalidKeys"];
29
+ const CHECK_ALWAYS_WARN_ON = ["orphanKeys", "hardcodedTexts"];
27
30
  const projectRoot = () => process.env.INIT_CWD || process.cwd();
31
+ const getCheckPolicy = (cfg) => {
32
+ const strictPlaceholders = cfg.strictPlaceholders !== false;
33
+ const failOn = [...CHECK_ALWAYS_FAIL_ON];
34
+ const warnOn = [...CHECK_ALWAYS_WARN_ON];
35
+ if (strictPlaceholders) {
36
+ failOn.push("placeholderMismatches");
37
+ }
38
+ else {
39
+ warnOn.push("placeholderMismatches");
40
+ }
41
+ return {
42
+ strictPlaceholders,
43
+ failOn,
44
+ warnOn,
45
+ };
46
+ };
28
47
  const normalizePath = (filePath) => filePath.split(path.sep).join("/");
29
48
  const withCollapsedWhitespace = (value) => value.replace(/\s+/g, " ").trim();
30
49
  const lineNumberAtIndex = (source, index) => {
@@ -39,9 +58,9 @@ const lineNumberAtIndex = (source, index) => {
39
58
  const hasIgnoredPathSegment = (relativePath) => normalizePath(relativePath)
40
59
  .split("/")
41
60
  .some((segment) => HARDCODED_IGNORED_DIRECTORIES.has(segment));
42
- const isLikelyHardcodedText = (value) => {
61
+ const isLikelyHardcodedText = (value, minLength) => {
43
62
  const text = withCollapsedWhitespace(value);
44
- if (text.length < 3) {
63
+ if (text.length < minLength) {
45
64
  return false;
46
65
  }
47
66
  if (!/[A-Za-z]/.test(text)) {
@@ -144,6 +163,33 @@ const scanHardcodedText = async (rootDir, cfg) => {
144
163
  const issues = [];
145
164
  const seen = new Set();
146
165
  const shouldScanFile = createScanMatcher(cfg.scan);
166
+ const hardcodedConfig = cfg.hardcodedText ?? {
167
+ enabled: true,
168
+ minLength: 3,
169
+ excludePatterns: [],
170
+ };
171
+ if (hardcodedConfig.enabled === false) {
172
+ return { issues, suppressedCount: 0 };
173
+ }
174
+ const minLength = typeof hardcodedConfig.minLength === "number" &&
175
+ Number.isFinite(hardcodedConfig.minLength) &&
176
+ hardcodedConfig.minLength >= 1
177
+ ? hardcodedConfig.minLength
178
+ : 3;
179
+ const excludeMatchers = (hardcodedConfig.excludePatterns ?? [])
180
+ .map((pattern) => pattern.trim())
181
+ .filter((pattern) => pattern.length > 0)
182
+ .map((pattern) => new RegExp(pattern));
183
+ let suppressedCount = 0;
184
+ const isSuppressed = (text, line, lines) => {
185
+ const currentLine = lines[line - 1] ?? "";
186
+ const previousLine = lines[line - 2] ?? "";
187
+ if (currentLine.includes(HARDCODED_IGNORE_MARKER) ||
188
+ previousLine.includes(HARDCODED_IGNORE_MARKER)) {
189
+ return true;
190
+ }
191
+ return excludeMatchers.some((matcher) => matcher.test(text));
192
+ };
147
193
  const visitDirectory = async (directory) => {
148
194
  const entries = await fs.readdir(directory, { withFileTypes: true });
149
195
  for (const entry of entries) {
@@ -164,11 +210,17 @@ const scanHardcodedText = async (rootDir, cfg) => {
164
210
  continue;
165
211
  }
166
212
  const source = await fs.readFile(fullPath, "utf8");
213
+ const lines = source.split("\n");
167
214
  let textMatch = JSX_TEXT_REGEX.exec(source);
168
215
  while (textMatch) {
169
216
  const text = withCollapsedWhitespace(textMatch[1]);
170
- if (isLikelyHardcodedText(text)) {
217
+ if (isLikelyHardcodedText(text, minLength)) {
171
218
  const line = lineNumberAtIndex(source, textMatch.index);
219
+ if (isSuppressed(text, line, lines)) {
220
+ suppressedCount += 1;
221
+ textMatch = JSX_TEXT_REGEX.exec(source);
222
+ continue;
223
+ }
172
224
  const dedupeKey = `${relativePath}:${line}:jsx_text:${text}`;
173
225
  if (!seen.has(dedupeKey)) {
174
226
  seen.add(dedupeKey);
@@ -181,8 +233,13 @@ const scanHardcodedText = async (rootDir, cfg) => {
181
233
  let attrMatch = JSX_ATTRIBUTE_REGEX.exec(source);
182
234
  while (attrMatch) {
183
235
  const text = withCollapsedWhitespace(attrMatch[1]);
184
- if (isLikelyHardcodedText(text)) {
236
+ if (isLikelyHardcodedText(text, minLength)) {
185
237
  const line = lineNumberAtIndex(source, attrMatch.index);
238
+ if (isSuppressed(text, line, lines)) {
239
+ suppressedCount += 1;
240
+ attrMatch = JSX_ATTRIBUTE_REGEX.exec(source);
241
+ continue;
242
+ }
186
243
  const dedupeKey = `${relativePath}:${line}:jsx_attribute:${text}`;
187
244
  if (!seen.has(dedupeKey)) {
188
245
  seen.add(dedupeKey);
@@ -204,14 +261,16 @@ const scanHardcodedText = async (rootDir, cfg) => {
204
261
  }
205
262
  return left.text.localeCompare(right.text);
206
263
  });
207
- return issues;
264
+ return { issues, suppressedCount };
208
265
  };
209
- export async function runGlossCheck(cfg) {
266
+ export async function runGlossCheck(cfg, options) {
210
267
  const rootDir = projectRoot();
211
268
  const data = (await readAllTranslations(cfg));
212
269
  const flatByLocale = flattenByLocale(cfg, data);
213
270
  const usageRoot = inferUsageRoot(cfg);
214
- const usage = await scanUsage(usageRoot, cfg.scan);
271
+ const usage = await scanUsage(usageRoot, cfg.scan, {
272
+ useCache: options?.useCache,
273
+ });
215
274
  const usageKeys = new Set(Object.keys(usage));
216
275
  const translationKeys = new Set(cfg.locales.flatMap((locale) => Object.keys(flatByLocale[locale] ?? {})));
217
276
  const allKeys = uniqueSorted([...translationKeys, ...usageKeys]);
@@ -240,6 +299,7 @@ export async function runGlossCheck(cfg) {
240
299
  });
241
300
  orphanKeys.push({ key, localesWithValue });
242
301
  }
302
+ orphanKeys.sort((left, right) => left.key.localeCompare(right.key));
243
303
  const invalidKeys = [];
244
304
  for (const key of translationKeys) {
245
305
  const reason = getInvalidTranslationKeyReason(key);
@@ -247,6 +307,7 @@ export async function runGlossCheck(cfg) {
247
307
  invalidKeys.push({ key, reason });
248
308
  }
249
309
  }
310
+ invalidKeys.sort((left, right) => left.key.localeCompare(right.key));
250
311
  const placeholderMismatches = [];
251
312
  for (const key of translationKeys) {
252
313
  const localesWithValue = cfg.locales.filter((locale) => {
@@ -292,6 +353,12 @@ export async function runGlossCheck(cfg) {
292
353
  }
293
354
  }
294
355
  if (mismatchedLocales.length > 0 || pluralMismatches.length > 0) {
356
+ pluralMismatches.sort((left, right) => {
357
+ if (left.locale !== right.locale) {
358
+ return left.locale.localeCompare(right.locale);
359
+ }
360
+ return left.variable.localeCompare(right.variable);
361
+ });
295
362
  placeholderMismatches.push({
296
363
  key,
297
364
  referenceLocale,
@@ -302,24 +369,43 @@ export async function runGlossCheck(cfg) {
302
369
  });
303
370
  }
304
371
  }
305
- const hardcodedTexts = await scanHardcodedText(usageRoot, cfg);
306
- const summary = {
372
+ placeholderMismatches.sort((left, right) => left.key.localeCompare(right.key));
373
+ const hardcodedScan = await scanHardcodedText(usageRoot, cfg);
374
+ const hardcodedTexts = hardcodedScan.issues;
375
+ const policy = getCheckPolicy(cfg);
376
+ const categoryCounts = {
307
377
  missingTranslations: missingTranslations.length,
308
378
  orphanKeys: orphanKeys.length,
309
379
  invalidKeys: invalidKeys.length,
310
380
  placeholderMismatches: placeholderMismatches.length,
311
381
  hardcodedTexts: hardcodedTexts.length,
312
- totalIssues: missingTranslations.length +
313
- orphanKeys.length +
314
- invalidKeys.length +
315
- placeholderMismatches.length +
316
- hardcodedTexts.length,
382
+ suppressedHardcodedTexts: hardcodedScan.suppressedCount,
383
+ };
384
+ const errorIssues = policy.failOn.reduce((total, category) => {
385
+ return total + categoryCounts[category];
386
+ }, 0);
387
+ const warningIssues = policy.warnOn.reduce((total, category) => {
388
+ return total + categoryCounts[category];
389
+ }, 0);
390
+ const totalIssues = errorIssues + warningIssues;
391
+ const summary = {
392
+ ...categoryCounts,
393
+ errorIssues,
394
+ warningIssues,
395
+ totalIssues,
317
396
  };
397
+ const ok = summary.errorIssues === 0;
318
398
  return {
319
- ok: summary.totalIssues === 0,
399
+ schemaVersion: 1,
400
+ status: ok ? "pass" : "fail",
401
+ ok,
320
402
  generatedAt: new Date().toISOString(),
321
403
  rootDir: rootDir,
322
404
  locales: cfg.locales,
405
+ policy: {
406
+ failOn: [...policy.failOn],
407
+ warnOn: [...policy.warnOn],
408
+ },
323
409
  summary,
324
410
  missingTranslations,
325
411
  orphanKeys,
@@ -345,18 +431,34 @@ const printSample = (title, lines) => {
345
431
  console.log(`- ... +${lines.length - limit} more`);
346
432
  }
347
433
  };
348
- export const printGlossCheck = (result, format) => {
434
+ export const printGlossCheck = (result, format, baseline) => {
435
+ const formatSigned = (value) => (value > 0 ? `+${value}` : `${value}`);
349
436
  if (format === "human" || format === "both") {
350
437
  console.log(`Gloss check for ${result.rootDir}`);
438
+ const placeholderSeverity = result.policy.failOn.includes("placeholderMismatches")
439
+ ? "error"
440
+ : "warning";
351
441
  printTable([
352
- { label: "Missing translations", value: result.summary.missingTranslations },
353
- { label: "Orphan keys", value: result.summary.orphanKeys },
354
- { label: "Invalid keys", value: result.summary.invalidKeys },
355
442
  {
356
- label: "Placeholder mismatches",
443
+ label: "Missing translations (error)",
444
+ value: result.summary.missingTranslations,
445
+ },
446
+ { label: "Orphan keys (warning)", value: result.summary.orphanKeys },
447
+ { label: "Invalid keys (error)", value: result.summary.invalidKeys },
448
+ {
449
+ label: `Placeholder mismatches (${placeholderSeverity})`,
357
450
  value: result.summary.placeholderMismatches,
358
451
  },
359
- { label: "Hardcoded text candidates", value: result.summary.hardcodedTexts },
452
+ {
453
+ label: "Hardcoded text candidates (warning)",
454
+ value: result.summary.hardcodedTexts,
455
+ },
456
+ {
457
+ label: "Hardcoded text suppressed",
458
+ value: result.summary.suppressedHardcodedTexts,
459
+ },
460
+ { label: "Error issues (fail CI)", value: result.summary.errorIssues },
461
+ { label: "Warning issues", value: result.summary.warningIssues },
360
462
  { label: "Total issues", value: result.summary.totalIssues },
361
463
  ]);
362
464
  printSample("Missing translations", result.missingTranslations.map((issue) => `${issue.key} -> missing in [${issue.missingLocales.join(", ")}]${issue.usedInCode ? " (used)" : ""}`));
@@ -369,14 +471,40 @@ export const printGlossCheck = (result, format) => {
369
471
  return `${issue.key} -> expected [${issue.expectedPlaceholders.join(", ")}] from ${issue.referenceLocale}; locales: [${issue.mismatchedLocales.join(", ")}]${pluralInfo}`;
370
472
  }));
371
473
  printSample("Hardcoded text candidates", result.hardcodedTexts.map((issue) => `${issue.file}:${issue.line} [${issue.kind}] ${issue.text}`));
372
- console.log(result.ok
373
- ? "\nResult: PASS"
374
- : "\nResult: FAIL (non-zero exit code for CI guardrails)");
474
+ if (baseline?.hasPrevious) {
475
+ console.log("\nDelta since baseline");
476
+ printTable([
477
+ {
478
+ label: "Missing translations",
479
+ value: formatSigned(baseline.delta.missingTranslations),
480
+ },
481
+ { label: "Orphan keys", value: formatSigned(baseline.delta.orphanKeys) },
482
+ { label: "Invalid keys", value: formatSigned(baseline.delta.invalidKeys) },
483
+ {
484
+ label: "Placeholder mismatches",
485
+ value: formatSigned(baseline.delta.placeholderMismatches),
486
+ },
487
+ {
488
+ label: "Hardcoded text candidates",
489
+ value: formatSigned(baseline.delta.hardcodedTexts),
490
+ },
491
+ { label: "Error issues", value: formatSigned(baseline.delta.errorIssues) },
492
+ {
493
+ label: "Warning issues",
494
+ value: formatSigned(baseline.delta.warningIssues),
495
+ },
496
+ { label: "Total issues", value: formatSigned(baseline.delta.totalIssues) },
497
+ ]);
498
+ }
499
+ else if (baseline) {
500
+ console.log(`\nBaseline initialized at ${baseline.baselinePath}`);
501
+ }
502
+ console.log(result.ok ? "\nResult: PASS" : "\nResult: FAIL (blocking issues found)");
375
503
  }
376
504
  if (format === "json" || format === "both") {
377
505
  if (format === "both") {
378
506
  console.log("\nJSON output:");
379
507
  }
380
- console.log(JSON.stringify(result, null, 2));
508
+ console.log(JSON.stringify({ ...result, baseline }, null, 2));
381
509
  }
382
510
  };