tokvista 1.11.1 → 1.11.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/tokvista.js +59 -0
- package/package.json +1 -1
package/dist/bin/tokvista.js
CHANGED
|
@@ -1097,6 +1097,45 @@ function isRecord7(value) {
|
|
|
1097
1097
|
function isTokenLike5(obj) {
|
|
1098
1098
|
return isRecord7(obj) && "value" in obj;
|
|
1099
1099
|
}
|
|
1100
|
+
function validateTokenStructure(tokens) {
|
|
1101
|
+
const issues = [];
|
|
1102
|
+
function walk(node, path2 = [], level = 0) {
|
|
1103
|
+
if (!isRecord7(node)) return;
|
|
1104
|
+
if (path2.length === 0 && Object.keys(node).some((k) => k.includes("/"))) {
|
|
1105
|
+
Object.values(node).forEach((val) => walk(val, [], level));
|
|
1106
|
+
return;
|
|
1107
|
+
}
|
|
1108
|
+
if (isTokenLike5(node)) {
|
|
1109
|
+
const tokenPath = path2.join(".");
|
|
1110
|
+
const value = String(node.value || "");
|
|
1111
|
+
const isSemanticToken = path2.some(
|
|
1112
|
+
(p) => p.toLowerCase().includes("semantic") || p.toLowerCase().includes("component")
|
|
1113
|
+
);
|
|
1114
|
+
if (isSemanticToken && !value.startsWith("{")) {
|
|
1115
|
+
if (/#[0-9A-Fa-f]{3,8}/.test(value) || /rgba?\(/.test(value)) {
|
|
1116
|
+
issues.push({
|
|
1117
|
+
path: tokenPath,
|
|
1118
|
+
issue: "Semantic token uses hardcoded color",
|
|
1119
|
+
value
|
|
1120
|
+
});
|
|
1121
|
+
}
|
|
1122
|
+
if (/\d+px/.test(value) && (node.type === "spacing" || node.type === "sizing")) {
|
|
1123
|
+
issues.push({
|
|
1124
|
+
path: tokenPath,
|
|
1125
|
+
issue: "Semantic token uses hardcoded spacing",
|
|
1126
|
+
value
|
|
1127
|
+
});
|
|
1128
|
+
}
|
|
1129
|
+
}
|
|
1130
|
+
return;
|
|
1131
|
+
}
|
|
1132
|
+
Object.entries(node).forEach(([key, val]) => {
|
|
1133
|
+
walk(val, [...path2, key], level + 1);
|
|
1134
|
+
});
|
|
1135
|
+
}
|
|
1136
|
+
walk(tokens);
|
|
1137
|
+
return issues;
|
|
1138
|
+
}
|
|
1100
1139
|
function extractTokenNames(tokens) {
|
|
1101
1140
|
const tokenMap = /* @__PURE__ */ new Map();
|
|
1102
1141
|
function walk(node, path2 = []) {
|
|
@@ -1171,6 +1210,10 @@ async function scanDirectory(dir, tokenVars, extensions) {
|
|
|
1171
1210
|
} else if (entry.isFile()) {
|
|
1172
1211
|
const ext = extname(entry.name);
|
|
1173
1212
|
if (extensions.has(ext)) {
|
|
1213
|
+
const fileName = entry.name.toLowerCase();
|
|
1214
|
+
if (fileName.includes("token") && (ext === ".css" || ext === ".scss" || ext === ".js")) {
|
|
1215
|
+
continue;
|
|
1216
|
+
}
|
|
1174
1217
|
const result = await scanFile(fullPath, tokenVars);
|
|
1175
1218
|
filesScanned++;
|
|
1176
1219
|
result.usedVars.forEach((v) => usedVars.add(v));
|
|
@@ -1191,6 +1234,7 @@ async function scanTokenUsage(tokensPath, scanDir, tokens) {
|
|
|
1191
1234
|
if (detection.format !== "token-studio" && detection.format !== "unknown") {
|
|
1192
1235
|
normalizedTokens = normalizeTokenFormat(tokens, detection.format);
|
|
1193
1236
|
}
|
|
1237
|
+
const tokenFileIssues = validateTokenStructure(normalizedTokens);
|
|
1194
1238
|
const tokenMap = extractTokenNames(normalizedTokens);
|
|
1195
1239
|
const tokenVars = new Set(tokenMap.keys());
|
|
1196
1240
|
const extensions = /* @__PURE__ */ new Set([".css", ".scss", ".sass", ".less", ".tsx", ".jsx", ".ts", ".js", ".vue", ".svelte"]);
|
|
@@ -1212,6 +1256,7 @@ async function scanTokenUsage(tokensPath, scanDir, tokens) {
|
|
|
1212
1256
|
// Limit to 50
|
|
1213
1257
|
hardcodedSpacing: scanResult.hardcodedSpacing.slice(0, 50),
|
|
1214
1258
|
// Limit to 50
|
|
1259
|
+
tokenFileIssues,
|
|
1215
1260
|
filesScanned: scanResult.filesScanned
|
|
1216
1261
|
};
|
|
1217
1262
|
}
|
|
@@ -2406,6 +2451,13 @@ Scanning ${scanDir2} for token usage...
|
|
|
2406
2451
|
}
|
|
2407
2452
|
console.log("");
|
|
2408
2453
|
}
|
|
2454
|
+
if (result2.tokenFileIssues.length > 0) {
|
|
2455
|
+
console.log(`\u{1F6A8} Token File Issues:`);
|
|
2456
|
+
result2.tokenFileIssues.forEach(({ path: path2, issue, value }) => {
|
|
2457
|
+
console.log(` ${path2}: ${issue} (${value})`);
|
|
2458
|
+
});
|
|
2459
|
+
console.log("");
|
|
2460
|
+
}
|
|
2409
2461
|
if (result2.hardcodedColors.length > 0) {
|
|
2410
2462
|
console.log(`\u{1F3A8} Hardcoded Colors (should use tokens):`);
|
|
2411
2463
|
result2.hardcodedColors.slice(0, 10).forEach(({ file, line, value }) => {
|
|
@@ -2464,6 +2516,13 @@ Scanning ${scanDir} for token usage...
|
|
|
2464
2516
|
}
|
|
2465
2517
|
console.log("");
|
|
2466
2518
|
}
|
|
2519
|
+
if (result.tokenFileIssues.length > 0) {
|
|
2520
|
+
console.log(`\u{1F6A8} Token File Issues:`);
|
|
2521
|
+
result.tokenFileIssues.forEach(({ path: path2, issue, value }) => {
|
|
2522
|
+
console.log(` ${path2}: ${issue} (${value})`);
|
|
2523
|
+
});
|
|
2524
|
+
console.log("");
|
|
2525
|
+
}
|
|
2467
2526
|
if (result.hardcodedColors.length > 0) {
|
|
2468
2527
|
console.log(`\u{1F3A8} Hardcoded Colors (should use tokens):`);
|
|
2469
2528
|
result.hardcodedColors.slice(0, 10).forEach(({ file, line, value }) => {
|