tokvista 1.11.0 → 1.11.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/tokvista.js +123 -4
- package/package.json +1 -1
package/dist/bin/tokvista.js
CHANGED
|
@@ -1097,6 +1097,45 @@ function isRecord7(value) {
|
|
|
1097
1097
|
function isTokenLike5(obj) {
|
|
1098
1098
|
return isRecord7(obj) && "value" in obj;
|
|
1099
1099
|
}
|
|
1100
|
+
function validateTokenStructure(tokens) {
|
|
1101
|
+
const issues = [];
|
|
1102
|
+
function walk(node, path2 = [], level = 0) {
|
|
1103
|
+
if (!isRecord7(node)) return;
|
|
1104
|
+
if (path2.length === 0 && Object.keys(node).some((k) => k.includes("/"))) {
|
|
1105
|
+
Object.values(node).forEach((val) => walk(val, [], level));
|
|
1106
|
+
return;
|
|
1107
|
+
}
|
|
1108
|
+
if (isTokenLike5(node)) {
|
|
1109
|
+
const tokenPath = path2.join(".");
|
|
1110
|
+
const value = String(node.value || "");
|
|
1111
|
+
const isSemanticToken = path2.some(
|
|
1112
|
+
(p) => p.toLowerCase().includes("semantic") || p.toLowerCase().includes("component")
|
|
1113
|
+
);
|
|
1114
|
+
if (isSemanticToken && !value.startsWith("{")) {
|
|
1115
|
+
if (/#[0-9A-Fa-f]{3,8}/.test(value) || /rgba?\(/.test(value)) {
|
|
1116
|
+
issues.push({
|
|
1117
|
+
path: tokenPath,
|
|
1118
|
+
issue: "Semantic token uses hardcoded color",
|
|
1119
|
+
value
|
|
1120
|
+
});
|
|
1121
|
+
}
|
|
1122
|
+
if (/\d+px/.test(value) && (node.type === "spacing" || node.type === "sizing")) {
|
|
1123
|
+
issues.push({
|
|
1124
|
+
path: tokenPath,
|
|
1125
|
+
issue: "Semantic token uses hardcoded spacing",
|
|
1126
|
+
value
|
|
1127
|
+
});
|
|
1128
|
+
}
|
|
1129
|
+
}
|
|
1130
|
+
return;
|
|
1131
|
+
}
|
|
1132
|
+
Object.entries(node).forEach(([key, val]) => {
|
|
1133
|
+
walk(val, [...path2, key], level + 1);
|
|
1134
|
+
});
|
|
1135
|
+
}
|
|
1136
|
+
walk(tokens);
|
|
1137
|
+
return issues;
|
|
1138
|
+
}
|
|
1100
1139
|
function extractTokenNames(tokens) {
|
|
1101
1140
|
const tokenMap = /* @__PURE__ */ new Map();
|
|
1102
1141
|
function walk(node, path2 = []) {
|
|
@@ -1171,6 +1210,10 @@ async function scanDirectory(dir, tokenVars, extensions) {
|
|
|
1171
1210
|
} else if (entry.isFile()) {
|
|
1172
1211
|
const ext = extname(entry.name);
|
|
1173
1212
|
if (extensions.has(ext)) {
|
|
1213
|
+
const fileName = entry.name.toLowerCase();
|
|
1214
|
+
if (fileName.includes("token") && (ext === ".css" || ext === ".scss" || ext === ".js")) {
|
|
1215
|
+
continue;
|
|
1216
|
+
}
|
|
1174
1217
|
const result = await scanFile(fullPath, tokenVars);
|
|
1175
1218
|
filesScanned++;
|
|
1176
1219
|
result.usedVars.forEach((v) => usedVars.add(v));
|
|
@@ -1191,6 +1234,7 @@ async function scanTokenUsage(tokensPath, scanDir, tokens) {
|
|
|
1191
1234
|
if (detection.format !== "token-studio" && detection.format !== "unknown") {
|
|
1192
1235
|
normalizedTokens = normalizeTokenFormat(tokens, detection.format);
|
|
1193
1236
|
}
|
|
1237
|
+
const tokenFileIssues = validateTokenStructure(normalizedTokens);
|
|
1194
1238
|
const tokenMap = extractTokenNames(normalizedTokens);
|
|
1195
1239
|
const tokenVars = new Set(tokenMap.keys());
|
|
1196
1240
|
const extensions = /* @__PURE__ */ new Set([".css", ".scss", ".sass", ".less", ".tsx", ".jsx", ".ts", ".js", ".vue", ".svelte"]);
|
|
@@ -1212,6 +1256,7 @@ async function scanTokenUsage(tokensPath, scanDir, tokens) {
|
|
|
1212
1256
|
// Limit to 50
|
|
1213
1257
|
hardcodedSpacing: scanResult.hardcodedSpacing.slice(0, 50),
|
|
1214
1258
|
// Limit to 50
|
|
1259
|
+
tokenFileIssues,
|
|
1215
1260
|
filesScanned: scanResult.filesScanned
|
|
1216
1261
|
};
|
|
1217
1262
|
}
|
|
@@ -1251,7 +1296,7 @@ Usage:
|
|
|
1251
1296
|
tokvista diff <old.json> <new.json>
|
|
1252
1297
|
tokvista convert <tokens.json> --to <w3c|style-dictionary|supernova> [--output <file>]
|
|
1253
1298
|
tokvista build <tokens.json> --output-dir <dir> [--skip-validation]
|
|
1254
|
-
tokvista scan <directory> [--tokens tokens.json]
|
|
1299
|
+
tokvista scan <directory|tokens.json> [--tokens tokens.json]
|
|
1255
1300
|
|
|
1256
1301
|
Arguments:
|
|
1257
1302
|
tokens.json Path to your tokens file (overrides config.tokens)
|
|
@@ -1266,6 +1311,10 @@ Options:
|
|
|
1266
1311
|
--no-watch Disable live reload (serve only)
|
|
1267
1312
|
--no-preview Skip starting live preview after init
|
|
1268
1313
|
-h, --help Show this help message
|
|
1314
|
+
|
|
1315
|
+
Scan command usage:
|
|
1316
|
+
tokvista scan ./src --tokens tokens.json # Scan directory with specific tokens
|
|
1317
|
+
tokvista scan tokens.json # Scan current directory using tokens file
|
|
1269
1318
|
`);
|
|
1270
1319
|
}
|
|
1271
1320
|
function parsePort(value) {
|
|
@@ -1550,11 +1599,11 @@ function parseScanArgs(args) {
|
|
|
1550
1599
|
throw new Error(`Unknown option: ${arg}`);
|
|
1551
1600
|
}
|
|
1552
1601
|
if (scanDir) {
|
|
1553
|
-
throw new Error(`Only one directory is supported. Unexpected value: "${arg}"`);
|
|
1602
|
+
throw new Error(`Only one directory or token file is supported. Unexpected value: "${arg}"`);
|
|
1554
1603
|
}
|
|
1555
1604
|
scanDir = arg;
|
|
1556
1605
|
}
|
|
1557
|
-
if (!scanDir) throw new Error("Directory to scan is required");
|
|
1606
|
+
if (!scanDir) throw new Error("Directory to scan or token file is required");
|
|
1558
1607
|
return { command: "scan", scanDir, tokenFileArg };
|
|
1559
1608
|
}
|
|
1560
1609
|
function parseExportArgs(args) {
|
|
@@ -2373,7 +2422,70 @@ async function runBuildCommand(cwd, options) {
|
|
|
2373
2422
|
`);
|
|
2374
2423
|
}
|
|
2375
2424
|
async function runScanCommand(cwd, options) {
|
|
2376
|
-
const
|
|
2425
|
+
const resolvedScanPath = path.resolve(cwd, options.scanDir);
|
|
2426
|
+
if (existsSync(resolvedScanPath)) {
|
|
2427
|
+
const fs = await import("fs");
|
|
2428
|
+
if (!fs.statSync(resolvedScanPath).isDirectory()) {
|
|
2429
|
+
const tokenPath2 = resolvedScanPath;
|
|
2430
|
+
const scanDir2 = cwd;
|
|
2431
|
+
const tokens2 = await readTokens(tokenPath2);
|
|
2432
|
+
console.log(`
|
|
2433
|
+
Scanning ${scanDir2} for token usage...
|
|
2434
|
+
`);
|
|
2435
|
+
const result2 = await scanTokenUsage(tokenPath2, scanDir2, tokens2);
|
|
2436
|
+
const usagePercent2 = (result2.usedTokens.length / result2.totalTokens * 100).toFixed(1);
|
|
2437
|
+
console.log(`\u{1F4CA} Token Usage Report
|
|
2438
|
+
`);
|
|
2439
|
+
console.log(`Files scanned: ${result2.filesScanned}`);
|
|
2440
|
+
console.log(`Total tokens: ${result2.totalTokens}`);
|
|
2441
|
+
console.log(`Used tokens: ${result2.usedTokens.length} (${usagePercent2}%)`);
|
|
2442
|
+
console.log(`Unused tokens: ${result2.unusedTokens.length}
|
|
2443
|
+
`);
|
|
2444
|
+
if (result2.unusedTokens.length > 0) {
|
|
2445
|
+
console.log(`\u26A0\uFE0F Unused Tokens (safe to remove):`);
|
|
2446
|
+
result2.unusedTokens.slice(0, 20).forEach((token) => {
|
|
2447
|
+
console.log(` - ${token}`);
|
|
2448
|
+
});
|
|
2449
|
+
if (result2.unusedTokens.length > 20) {
|
|
2450
|
+
console.log(` ... and ${result2.unusedTokens.length - 20} more`);
|
|
2451
|
+
}
|
|
2452
|
+
console.log("");
|
|
2453
|
+
}
|
|
2454
|
+
if (result2.tokenFileIssues.length > 0) {
|
|
2455
|
+
console.log(`\u{1F6A8} Token File Issues:`);
|
|
2456
|
+
result2.tokenFileIssues.forEach(({ path: path2, issue, value }) => {
|
|
2457
|
+
console.log(` ${path2}: ${issue} (${value})`);
|
|
2458
|
+
});
|
|
2459
|
+
console.log("");
|
|
2460
|
+
}
|
|
2461
|
+
if (result2.hardcodedColors.length > 0) {
|
|
2462
|
+
console.log(`\u{1F3A8} Hardcoded Colors (should use tokens):`);
|
|
2463
|
+
result2.hardcodedColors.slice(0, 10).forEach(({ file, line, value }) => {
|
|
2464
|
+
const relPath = path.relative(cwd, file);
|
|
2465
|
+
console.log(` ${relPath}:${line} - ${value}`);
|
|
2466
|
+
});
|
|
2467
|
+
if (result2.hardcodedColors.length > 10) {
|
|
2468
|
+
console.log(` ... and ${result2.hardcodedColors.length - 10} more`);
|
|
2469
|
+
}
|
|
2470
|
+
console.log("");
|
|
2471
|
+
}
|
|
2472
|
+
if (result2.hardcodedSpacing.length > 0) {
|
|
2473
|
+
console.log(`\u{1F4CF} Hardcoded Spacing (should use tokens):`);
|
|
2474
|
+
result2.hardcodedSpacing.slice(0, 10).forEach(({ file, line, value }) => {
|
|
2475
|
+
const relPath = path.relative(cwd, file);
|
|
2476
|
+
console.log(` ${relPath}:${line} - ${value}`);
|
|
2477
|
+
});
|
|
2478
|
+
if (result2.hardcodedSpacing.length > 10) {
|
|
2479
|
+
console.log(` ... and ${result2.hardcodedSpacing.length - 10} more`);
|
|
2480
|
+
}
|
|
2481
|
+
console.log("");
|
|
2482
|
+
}
|
|
2483
|
+
console.log(`\u2705 Scan complete
|
|
2484
|
+
`);
|
|
2485
|
+
return;
|
|
2486
|
+
}
|
|
2487
|
+
}
|
|
2488
|
+
const scanDir = resolvedScanPath;
|
|
2377
2489
|
const tokenPath = options.tokenFileArg ? path.resolve(cwd, options.tokenFileArg) : path.resolve(cwd, "tokens.json");
|
|
2378
2490
|
if (!existsSync(scanDir)) {
|
|
2379
2491
|
throw new Error(`Directory not found: ${scanDir}`);
|
|
@@ -2404,6 +2516,13 @@ Scanning ${scanDir} for token usage...
|
|
|
2404
2516
|
}
|
|
2405
2517
|
console.log("");
|
|
2406
2518
|
}
|
|
2519
|
+
if (result.tokenFileIssues.length > 0) {
|
|
2520
|
+
console.log(`\u{1F6A8} Token File Issues:`);
|
|
2521
|
+
result.tokenFileIssues.forEach(({ path: path2, issue, value }) => {
|
|
2522
|
+
console.log(` ${path2}: ${issue} (${value})`);
|
|
2523
|
+
});
|
|
2524
|
+
console.log("");
|
|
2525
|
+
}
|
|
2407
2526
|
if (result.hardcodedColors.length > 0) {
|
|
2408
2527
|
console.log(`\u{1F3A8} Hardcoded Colors (should use tokens):`);
|
|
2409
2528
|
result.hardcodedColors.slice(0, 10).forEach(({ file, line, value }) => {
|