@buoy-design/core 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/analysis/audit.d.ts +47 -0
- package/dist/analysis/audit.d.ts.map +1 -0
- package/dist/analysis/audit.js +168 -0
- package/dist/analysis/audit.js.map +1 -0
- package/dist/analysis/index.d.ts +1 -0
- package/dist/analysis/index.d.ts.map +1 -1
- package/dist/analysis/index.js +2 -0
- package/dist/analysis/index.js.map +1 -1
- package/dist/analysis/semantic-diff.d.ts +15 -0
- package/dist/analysis/semantic-diff.d.ts.map +1 -1
- package/dist/analysis/semantic-diff.js +204 -1
- package/dist/analysis/semantic-diff.js.map +1 -1
- package/dist/analysis/token-suggestions.d.ts +8 -1
- package/dist/analysis/token-suggestions.d.ts.map +1 -1
- package/dist/analysis/token-suggestions.js +116 -10
- package/dist/analysis/token-suggestions.js.map +1 -1
- package/dist/extraction/css-parser.d.ts +51 -0
- package/dist/extraction/css-parser.d.ts.map +1 -0
- package/dist/extraction/css-parser.js +303 -0
- package/dist/extraction/css-parser.js.map +1 -0
- package/dist/extraction/index.d.ts +2 -0
- package/dist/extraction/index.d.ts.map +1 -0
- package/dist/extraction/index.js +2 -0
- package/dist/extraction/index.js.map +1 -0
- package/dist/graph/builder.d.ts +118 -0
- package/dist/graph/builder.d.ts.map +1 -0
- package/dist/graph/builder.js +328 -0
- package/dist/graph/builder.js.map +1 -0
- package/dist/graph/collectors/git.d.ts +90 -0
- package/dist/graph/collectors/git.d.ts.map +1 -0
- package/dist/graph/collectors/git.js +253 -0
- package/dist/graph/collectors/git.js.map +1 -0
- package/dist/graph/collectors/imports.d.ts +55 -0
- package/dist/graph/collectors/imports.d.ts.map +1 -0
- package/dist/graph/collectors/imports.js +230 -0
- package/dist/graph/collectors/imports.js.map +1 -0
- package/dist/graph/collectors/index.d.ts +9 -0
- package/dist/graph/collectors/index.d.ts.map +1 -0
- package/dist/graph/collectors/index.js +12 -0
- package/dist/graph/collectors/index.js.map +1 -0
- package/dist/graph/collectors/usages.d.ts +68 -0
- package/dist/graph/collectors/usages.d.ts.map +1 -0
- package/dist/graph/collectors/usages.js +244 -0
- package/dist/graph/collectors/usages.js.map +1 -0
- package/dist/graph/index.d.ts +11 -0
- package/dist/graph/index.d.ts.map +1 -0
- package/dist/graph/index.js +29 -0
- package/dist/graph/index.js.map +1 -0
- package/dist/graph/queries.d.ts +81 -0
- package/dist/graph/queries.d.ts.map +1 -0
- package/dist/graph/queries.js +379 -0
- package/dist/graph/queries.js.map +1 -0
- package/dist/graph/types.d.ts +184 -0
- package/dist/graph/types.d.ts.map +1 -0
- package/dist/graph/types.js +8 -0
- package/dist/graph/types.js.map +1 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +8 -0
- package/dist/index.js.map +1 -1
- package/dist/models/drift.d.ts +20 -4
- package/dist/models/drift.d.ts.map +1 -1
- package/dist/models/drift.js +55 -1
- package/dist/models/drift.js.map +1 -1
- package/dist/models/index.d.ts +3 -3
- package/dist/models/index.d.ts.map +1 -1
- package/dist/models/index.js +2 -2
- package/dist/models/index.js.map +1 -1
- package/dist/models/token.d.ts +57 -0
- package/dist/models/token.d.ts.map +1 -1
- package/dist/models/token.js +9 -0
- package/dist/models/token.js.map +1 -1
- package/dist/tokenization/generator.d.ts +49 -0
- package/dist/tokenization/generator.d.ts.map +1 -0
- package/dist/tokenization/generator.js +886 -0
- package/dist/tokenization/generator.js.map +1 -0
- package/dist/tokenization/index.d.ts +2 -0
- package/dist/tokenization/index.d.ts.map +1 -0
- package/dist/tokenization/index.js +2 -0
- package/dist/tokenization/index.js.map +1 -0
- package/dist/tokens/comparison.d.ts +30 -0
- package/dist/tokens/comparison.d.ts.map +1 -0
- package/dist/tokens/comparison.js +142 -0
- package/dist/tokens/comparison.js.map +1 -0
- package/dist/tokens/index.d.ts +3 -0
- package/dist/tokens/index.d.ts.map +1 -0
- package/dist/tokens/index.js +5 -0
- package/dist/tokens/index.js.map +1 -0
- package/dist/tokens/parser.d.ts +11 -0
- package/dist/tokens/parser.d.ts.map +1 -0
- package/dist/tokens/parser.js +268 -0
- package/dist/tokens/parser.js.map +1 -0
- package/package.json +14 -10
- package/LICENSE +0 -21
- package/dist/analysis/semantic-diff.test.d.ts +0 -2
- package/dist/analysis/semantic-diff.test.d.ts.map +0 -1
- package/dist/analysis/semantic-diff.test.js +0 -188
- package/dist/analysis/semantic-diff.test.js.map +0 -1
- package/dist/models/component.test.d.ts +0 -2
- package/dist/models/component.test.d.ts.map +0 -1
- package/dist/models/component.test.js +0 -55
- package/dist/models/component.test.js.map +0 -1
- package/dist/models/drift.test.d.ts +0 -2
- package/dist/models/drift.test.d.ts.map +0 -1
- package/dist/models/drift.test.js +0 -38
- package/dist/models/drift.test.js.map +0 -1
- package/dist/models/token.test.d.ts +0 -2
- package/dist/models/token.test.d.ts.map +0 -1
- package/dist/models/token.test.js +0 -168
- package/dist/models/token.test.js.map +0 -1
|
@@ -0,0 +1,886 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Token Generator
|
|
3
|
+
* Clusters extracted design values and generates design tokens.
|
|
4
|
+
*/
|
|
5
|
+
import { hexToRgb, normalizeHexColor, spacingToPx } from '../extraction/css-parser.js';
|
|
6
|
+
// Token generation limits - values above these become orphan tokens
|
|
7
|
+
const MAX_SPACING_TOKENS = 10;
|
|
8
|
+
const MAX_SIZING_TOKENS = 12;
|
|
9
|
+
/**
|
|
10
|
+
* Generate design tokens from extracted values
|
|
11
|
+
*/
|
|
12
|
+
export function generateTokens(values, options = {}) {
|
|
13
|
+
const { colorThreshold = 10, spacingThreshold = 4, prefix = '', } = options;
|
|
14
|
+
const tokens = [];
|
|
15
|
+
const stats = {
|
|
16
|
+
total: values.length,
|
|
17
|
+
coverage: { total: 0, covered: 0, percentage: 0 },
|
|
18
|
+
byCategory: {},
|
|
19
|
+
};
|
|
20
|
+
// Group values by category
|
|
21
|
+
const byCategory = {};
|
|
22
|
+
for (const value of values) {
|
|
23
|
+
if (!byCategory[value.category]) {
|
|
24
|
+
byCategory[value.category] = [];
|
|
25
|
+
}
|
|
26
|
+
byCategory[value.category].push(value);
|
|
27
|
+
}
|
|
28
|
+
// Extract breakpoint values first to filter from sizing
|
|
29
|
+
const breakpointPxValues = new Set();
|
|
30
|
+
if (byCategory['breakpoint']) {
|
|
31
|
+
for (const v of byCategory['breakpoint']) {
|
|
32
|
+
const px = spacingToPx(v.value);
|
|
33
|
+
if (px !== null && px > 0) {
|
|
34
|
+
breakpointPxValues.add(Math.round(px));
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
// Generate color tokens
|
|
39
|
+
if (byCategory['color']) {
|
|
40
|
+
const result = generateColorTokens(byCategory['color'], colorThreshold);
|
|
41
|
+
tokens.push(...result.tokens);
|
|
42
|
+
stats.byCategory['color'] = result.stats;
|
|
43
|
+
}
|
|
44
|
+
// Generate spacing tokens (group by context: spacing, sizing, position)
|
|
45
|
+
if (byCategory['spacing']) {
|
|
46
|
+
// Group by context first
|
|
47
|
+
const byContext = {};
|
|
48
|
+
for (const v of byCategory['spacing']) {
|
|
49
|
+
const ctx = v.context || 'spacing';
|
|
50
|
+
if (!byContext[ctx])
|
|
51
|
+
byContext[ctx] = [];
|
|
52
|
+
byContext[ctx].push(v);
|
|
53
|
+
}
|
|
54
|
+
// Generate separate scales for spacing context
|
|
55
|
+
if (byContext['spacing']) {
|
|
56
|
+
const result = generateSpacingTokens(byContext['spacing'], spacingThreshold, 'spacing', 'spacing');
|
|
57
|
+
tokens.push(...result.tokens);
|
|
58
|
+
stats.byCategory['spacing'] = result.stats;
|
|
59
|
+
}
|
|
60
|
+
// Generate separate scale for position context
|
|
61
|
+
if (byContext['position']) {
|
|
62
|
+
const result = generateSpacingTokens(byContext['position'], spacingThreshold, 'position', 'spacing');
|
|
63
|
+
tokens.push(...result.tokens);
|
|
64
|
+
stats.byCategory['position'] = result.stats;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
// Generate sizing tokens - FILTER OUT BREAKPOINT VALUES
|
|
68
|
+
if (byCategory['sizing']) {
|
|
69
|
+
const filteredSizing = byCategory['sizing'].filter(v => {
|
|
70
|
+
const px = spacingToPx(v.value);
|
|
71
|
+
if (px === null)
|
|
72
|
+
return true; // Keep non-numeric
|
|
73
|
+
return !breakpointPxValues.has(Math.round(px));
|
|
74
|
+
});
|
|
75
|
+
if (filteredSizing.length > 0) {
|
|
76
|
+
const result = generateSizingTokens(filteredSizing, spacingThreshold);
|
|
77
|
+
tokens.push(...result.tokens);
|
|
78
|
+
stats.byCategory['sizing'] = result.stats;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
// Generate font-size tokens
|
|
82
|
+
if (byCategory['font-size']) {
|
|
83
|
+
const result = generateFontSizeTokens(byCategory['font-size'], spacingThreshold);
|
|
84
|
+
tokens.push(...result.tokens);
|
|
85
|
+
stats.byCategory['font-size'] = result.stats;
|
|
86
|
+
}
|
|
87
|
+
// Generate radius tokens
|
|
88
|
+
if (byCategory['radius']) {
|
|
89
|
+
const result = generateRadiusTokens(byCategory['radius'], spacingThreshold);
|
|
90
|
+
tokens.push(...result.tokens);
|
|
91
|
+
stats.byCategory['radius'] = result.stats;
|
|
92
|
+
}
|
|
93
|
+
// Generate breakpoint tokens
|
|
94
|
+
if (byCategory['breakpoint']) {
|
|
95
|
+
const result = generateBreakpointTokens(byCategory['breakpoint']);
|
|
96
|
+
tokens.push(...result.tokens);
|
|
97
|
+
stats.byCategory['breakpoint'] = result.stats;
|
|
98
|
+
}
|
|
99
|
+
// Calculate overall coverage
|
|
100
|
+
let totalCovered = 0;
|
|
101
|
+
for (const categoryStats of Object.values(stats.byCategory)) {
|
|
102
|
+
// Coverage is based on how many input values have a token
|
|
103
|
+
// All values in tokenized clusters are "covered"
|
|
104
|
+
const coveredInCategory = Math.round(categoryStats.input * (categoryStats.coverage / 100));
|
|
105
|
+
totalCovered += coveredInCategory;
|
|
106
|
+
}
|
|
107
|
+
stats.coverage = {
|
|
108
|
+
total: values.length,
|
|
109
|
+
covered: totalCovered,
|
|
110
|
+
percentage: values.length > 0 ? Math.round((totalCovered / values.length) * 100) : 0,
|
|
111
|
+
};
|
|
112
|
+
// Generate CSS output
|
|
113
|
+
const css = generateCss(tokens, prefix);
|
|
114
|
+
// Generate JSON output
|
|
115
|
+
const json = generateJson(tokens);
|
|
116
|
+
return { tokens, css, json, stats };
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Generate color tokens by clustering similar colors
|
|
120
|
+
*/
|
|
121
|
+
function generateColorTokens(values, threshold) {
|
|
122
|
+
const inputCount = values.length;
|
|
123
|
+
// Count occurrences of each color
|
|
124
|
+
const colorCounts = new Map();
|
|
125
|
+
for (const v of values) {
|
|
126
|
+
const normalized = normalizeColor(v.value);
|
|
127
|
+
colorCounts.set(normalized, (colorCounts.get(normalized) || 0) + 1);
|
|
128
|
+
}
|
|
129
|
+
const uniqueCount = colorCounts.size;
|
|
130
|
+
// Sort by frequency
|
|
131
|
+
const sortedColors = [...colorCounts.entries()]
|
|
132
|
+
.sort((a, b) => b[1] - a[1]);
|
|
133
|
+
// Cluster similar colors
|
|
134
|
+
const clusters = [];
|
|
135
|
+
for (const [color, count] of sortedColors) {
|
|
136
|
+
// Try to find an existing cluster this color belongs to
|
|
137
|
+
let foundCluster = false;
|
|
138
|
+
for (const cluster of clusters) {
|
|
139
|
+
if (colorsAreSimilar(color, cluster.representative, threshold)) {
|
|
140
|
+
cluster.members.push(color);
|
|
141
|
+
cluster.count += count;
|
|
142
|
+
foundCluster = true;
|
|
143
|
+
break;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
if (!foundCluster) {
|
|
147
|
+
clusters.push({ representative: color, members: [color], count });
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
// Sort clusters by total count
|
|
151
|
+
clusters.sort((a, b) => b.count - a.count);
|
|
152
|
+
// Assign token names
|
|
153
|
+
const tokens = [];
|
|
154
|
+
const tokenizedClusters = [];
|
|
155
|
+
const orphanClusters = [];
|
|
156
|
+
// Categorize colors
|
|
157
|
+
const neutrals = [];
|
|
158
|
+
const primaries = [];
|
|
159
|
+
const accents = [];
|
|
160
|
+
for (const cluster of clusters) {
|
|
161
|
+
const rgb = parseColor(cluster.representative);
|
|
162
|
+
if (!rgb)
|
|
163
|
+
continue;
|
|
164
|
+
const saturation = getColorSaturation(rgb);
|
|
165
|
+
if (saturation < 0.1) {
|
|
166
|
+
neutrals.push(cluster);
|
|
167
|
+
}
|
|
168
|
+
else if (primaries.length < 3) {
|
|
169
|
+
primaries.push(cluster);
|
|
170
|
+
}
|
|
171
|
+
else {
|
|
172
|
+
accents.push(cluster);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
// Generate neutral tokens (gray scale) - limit to 11
|
|
176
|
+
const neutralNames = ['50', '100', '200', '300', '400', '500', '600', '700', '800', '900', '950'];
|
|
177
|
+
neutrals.sort((a, b) => {
|
|
178
|
+
const rgbA = parseColor(a.representative);
|
|
179
|
+
const rgbB = parseColor(b.representative);
|
|
180
|
+
if (!rgbA || !rgbB)
|
|
181
|
+
return 0;
|
|
182
|
+
return getLightness(rgbB) - getLightness(rgbA); // Lighter first
|
|
183
|
+
});
|
|
184
|
+
for (let i = 0; i < neutrals.length; i++) {
|
|
185
|
+
const cluster = neutrals[i];
|
|
186
|
+
if (i < neutralNames.length) {
|
|
187
|
+
tokens.push({
|
|
188
|
+
name: `color-neutral-${neutralNames[i]}`,
|
|
189
|
+
value: cluster.representative,
|
|
190
|
+
category: 'color',
|
|
191
|
+
context: 'color',
|
|
192
|
+
occurrences: cluster.count,
|
|
193
|
+
sources: cluster.members,
|
|
194
|
+
});
|
|
195
|
+
tokenizedClusters.push(cluster);
|
|
196
|
+
}
|
|
197
|
+
else {
|
|
198
|
+
// Extra neutrals become orphans
|
|
199
|
+
tokens.push({
|
|
200
|
+
name: `color-orphan-${orphanClusters.length + 1}`,
|
|
201
|
+
value: cluster.representative,
|
|
202
|
+
category: 'color',
|
|
203
|
+
context: 'color',
|
|
204
|
+
occurrences: cluster.count,
|
|
205
|
+
sources: cluster.members,
|
|
206
|
+
isOrphan: true,
|
|
207
|
+
});
|
|
208
|
+
orphanClusters.push(cluster);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
// Generate primary tokens
|
|
212
|
+
if (primaries.length > 0) {
|
|
213
|
+
const primary = primaries[0];
|
|
214
|
+
tokens.push({
|
|
215
|
+
name: 'color-primary-500',
|
|
216
|
+
value: primary.representative,
|
|
217
|
+
category: 'color',
|
|
218
|
+
context: 'color',
|
|
219
|
+
occurrences: primary.count,
|
|
220
|
+
sources: primary.members,
|
|
221
|
+
});
|
|
222
|
+
tokenizedClusters.push(primary);
|
|
223
|
+
}
|
|
224
|
+
if (primaries.length > 1) {
|
|
225
|
+
const secondary = primaries[1];
|
|
226
|
+
tokens.push({
|
|
227
|
+
name: 'color-secondary-500',
|
|
228
|
+
value: secondary.representative,
|
|
229
|
+
category: 'color',
|
|
230
|
+
context: 'color',
|
|
231
|
+
occurrences: secondary.count,
|
|
232
|
+
sources: secondary.members,
|
|
233
|
+
});
|
|
234
|
+
tokenizedClusters.push(secondary);
|
|
235
|
+
}
|
|
236
|
+
// Additional primaries become orphans
|
|
237
|
+
for (let i = 2; i < primaries.length; i++) {
|
|
238
|
+
const cluster = primaries[i];
|
|
239
|
+
tokens.push({
|
|
240
|
+
name: `color-orphan-${orphanClusters.length + 1}`,
|
|
241
|
+
value: cluster.representative,
|
|
242
|
+
category: 'color',
|
|
243
|
+
context: 'color',
|
|
244
|
+
occurrences: cluster.count,
|
|
245
|
+
sources: cluster.members,
|
|
246
|
+
isOrphan: true,
|
|
247
|
+
});
|
|
248
|
+
orphanClusters.push(cluster);
|
|
249
|
+
}
|
|
250
|
+
// Generate accent tokens - primary accents get names, rest become orphans
|
|
251
|
+
for (let i = 0; i < accents.length; i++) {
|
|
252
|
+
const accent = accents[i];
|
|
253
|
+
if (i < 3) {
|
|
254
|
+
tokens.push({
|
|
255
|
+
name: `color-accent-${i + 1}`,
|
|
256
|
+
value: accent.representative,
|
|
257
|
+
category: 'color',
|
|
258
|
+
context: 'color',
|
|
259
|
+
occurrences: accent.count,
|
|
260
|
+
sources: accent.members,
|
|
261
|
+
});
|
|
262
|
+
tokenizedClusters.push(accent);
|
|
263
|
+
}
|
|
264
|
+
else {
|
|
265
|
+
tokens.push({
|
|
266
|
+
name: `color-orphan-${orphanClusters.length + 1}`,
|
|
267
|
+
value: accent.representative,
|
|
268
|
+
category: 'color',
|
|
269
|
+
context: 'color',
|
|
270
|
+
occurrences: accent.count,
|
|
271
|
+
sources: accent.members,
|
|
272
|
+
isOrphan: true,
|
|
273
|
+
});
|
|
274
|
+
orphanClusters.push(accent);
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
// Calculate coverage: sum of tokenized cluster counts / total input
|
|
278
|
+
const tokenizedCount = tokenizedClusters.reduce((sum, c) => sum + c.count, 0);
|
|
279
|
+
const coverage = inputCount > 0 ? Math.round((tokenizedCount / inputCount) * 100) : 0;
|
|
280
|
+
return {
|
|
281
|
+
tokens,
|
|
282
|
+
stats: {
|
|
283
|
+
input: inputCount,
|
|
284
|
+
uniqueValues: uniqueCount,
|
|
285
|
+
clustered: clusters.length,
|
|
286
|
+
tokenized: tokenizedClusters.length,
|
|
287
|
+
orphaned: orphanClusters.length,
|
|
288
|
+
coverage,
|
|
289
|
+
},
|
|
290
|
+
};
|
|
291
|
+
}
|
|
292
|
+
/**
|
|
293
|
+
* Generate spacing tokens using t-shirt sizing
|
|
294
|
+
*/
|
|
295
|
+
function generateSpacingTokens(values, threshold, context = 'spacing', category = 'spacing') {
|
|
296
|
+
const inputCount = values.length;
|
|
297
|
+
// Convert all values to pixels and count
|
|
298
|
+
const pxCounts = new Map();
|
|
299
|
+
for (const v of values) {
|
|
300
|
+
const px = spacingToPx(v.value);
|
|
301
|
+
if (px === null || px < 0)
|
|
302
|
+
continue;
|
|
303
|
+
const rounded = Math.round(px);
|
|
304
|
+
const existing = pxCounts.get(rounded);
|
|
305
|
+
if (existing) {
|
|
306
|
+
existing.count++;
|
|
307
|
+
if (!existing.sources.includes(v.value)) {
|
|
308
|
+
existing.sources.push(v.value);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
else {
|
|
312
|
+
pxCounts.set(rounded, { count: 1, sources: [v.value] });
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
const uniqueCount = pxCounts.size;
|
|
316
|
+
// Cluster similar values
|
|
317
|
+
const clusters = [];
|
|
318
|
+
const sortedPx = [...pxCounts.entries()].sort((a, b) => a[0] - b[0]);
|
|
319
|
+
for (const [px, data] of sortedPx) {
|
|
320
|
+
// Never cluster 0 with non-zero values - keep zero separate
|
|
321
|
+
if (px === 0) {
|
|
322
|
+
clusters.push({ value: 0, count: data.count, sources: [...data.sources] });
|
|
323
|
+
continue;
|
|
324
|
+
}
|
|
325
|
+
let foundCluster = false;
|
|
326
|
+
for (const cluster of clusters) {
|
|
327
|
+
// Don't cluster non-zero values with zero
|
|
328
|
+
if (cluster.value === 0)
|
|
329
|
+
continue;
|
|
330
|
+
if (Math.abs(px - cluster.value) <= threshold) {
|
|
331
|
+
// Use the more common value as representative
|
|
332
|
+
if (data.count > cluster.count) {
|
|
333
|
+
cluster.value = px;
|
|
334
|
+
}
|
|
335
|
+
cluster.count += data.count;
|
|
336
|
+
cluster.sources.push(...data.sources);
|
|
337
|
+
foundCluster = true;
|
|
338
|
+
break;
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
if (!foundCluster) {
|
|
342
|
+
clusters.push({ value: px, count: data.count, sources: [...data.sources] });
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
// Sort by value
|
|
346
|
+
clusters.sort((a, b) => a.value - b.value);
|
|
347
|
+
// Sort by count to find most used values, take top N as primary
|
|
348
|
+
const sortedByCount = [...clusters].sort((a, b) => b.count - a.count);
|
|
349
|
+
const topClusters = sortedByCount.slice(0, MAX_SPACING_TOKENS).sort((a, b) => a.value - b.value);
|
|
350
|
+
const orphanClusters = sortedByCount.slice(MAX_SPACING_TOKENS);
|
|
351
|
+
// Assign t-shirt sizes based on position in sorted list
|
|
352
|
+
const sizeNames = ['3xs', '2xs', 'xs', 'sm', 'md', 'lg', 'xl', '2xl', '3xl', '4xl'];
|
|
353
|
+
const tokens = [];
|
|
354
|
+
for (let i = 0; i < topClusters.length; i++) {
|
|
355
|
+
const cluster = topClusters[i];
|
|
356
|
+
const sizeName = sizeNames[i] || `${i + 1}`;
|
|
357
|
+
tokens.push({
|
|
358
|
+
name: `${context}-${sizeName}`,
|
|
359
|
+
value: `${cluster.value}px`,
|
|
360
|
+
category,
|
|
361
|
+
context,
|
|
362
|
+
occurrences: cluster.count,
|
|
363
|
+
sources: [...new Set(cluster.sources)],
|
|
364
|
+
});
|
|
365
|
+
}
|
|
366
|
+
// Generate orphan tokens for less common values
|
|
367
|
+
for (let i = 0; i < orphanClusters.length; i++) {
|
|
368
|
+
const cluster = orphanClusters[i];
|
|
369
|
+
tokens.push({
|
|
370
|
+
name: `${context}-orphan-${i + 1}`,
|
|
371
|
+
value: `${cluster.value}px`,
|
|
372
|
+
category,
|
|
373
|
+
context,
|
|
374
|
+
occurrences: cluster.count,
|
|
375
|
+
sources: [...new Set(cluster.sources)],
|
|
376
|
+
isOrphan: true,
|
|
377
|
+
});
|
|
378
|
+
}
|
|
379
|
+
// Calculate coverage (primary tokens only)
|
|
380
|
+
const tokenizedCount = topClusters.reduce((sum, c) => sum + c.count, 0);
|
|
381
|
+
const coverage = inputCount > 0 ? Math.round((tokenizedCount / inputCount) * 100) : 0;
|
|
382
|
+
return {
|
|
383
|
+
tokens,
|
|
384
|
+
stats: {
|
|
385
|
+
input: inputCount,
|
|
386
|
+
uniqueValues: uniqueCount,
|
|
387
|
+
clustered: clusters.length,
|
|
388
|
+
tokenized: topClusters.length,
|
|
389
|
+
orphaned: orphanClusters.length,
|
|
390
|
+
coverage,
|
|
391
|
+
},
|
|
392
|
+
};
|
|
393
|
+
}
|
|
394
|
+
/**
|
|
395
|
+
* Generate sizing tokens (width, height) - different naming convention
|
|
396
|
+
*/
|
|
397
|
+
function generateSizingTokens(values, threshold) {
|
|
398
|
+
const inputCount = values.length;
|
|
399
|
+
const pxCounts = new Map();
|
|
400
|
+
for (const v of values) {
|
|
401
|
+
const px = spacingToPx(v.value);
|
|
402
|
+
if (px === null || px < 0)
|
|
403
|
+
continue;
|
|
404
|
+
const rounded = Math.round(px);
|
|
405
|
+
const existing = pxCounts.get(rounded);
|
|
406
|
+
if (existing) {
|
|
407
|
+
existing.count++;
|
|
408
|
+
if (!existing.sources.includes(v.value)) {
|
|
409
|
+
existing.sources.push(v.value);
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
else {
|
|
413
|
+
pxCounts.set(rounded, { count: 1, sources: [v.value] });
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
const uniqueCount = pxCounts.size;
|
|
417
|
+
// Cluster similar values
|
|
418
|
+
const clusters = [];
|
|
419
|
+
const sortedPx = [...pxCounts.entries()].sort((a, b) => a[0] - b[0]);
|
|
420
|
+
for (const [px, data] of sortedPx) {
|
|
421
|
+
// Never cluster 0 with non-zero values - keep zero separate
|
|
422
|
+
if (px === 0) {
|
|
423
|
+
clusters.push({ value: 0, count: data.count, sources: [...data.sources] });
|
|
424
|
+
continue;
|
|
425
|
+
}
|
|
426
|
+
let foundCluster = false;
|
|
427
|
+
for (const cluster of clusters) {
|
|
428
|
+
// Don't cluster non-zero values with zero
|
|
429
|
+
if (cluster.value === 0)
|
|
430
|
+
continue;
|
|
431
|
+
if (Math.abs(px - cluster.value) <= threshold) {
|
|
432
|
+
if (data.count > cluster.count) {
|
|
433
|
+
cluster.value = px;
|
|
434
|
+
}
|
|
435
|
+
cluster.count += data.count;
|
|
436
|
+
cluster.sources.push(...data.sources);
|
|
437
|
+
foundCluster = true;
|
|
438
|
+
break;
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
if (!foundCluster) {
|
|
442
|
+
clusters.push({ value: px, count: data.count, sources: [...data.sources] });
|
|
443
|
+
}
|
|
444
|
+
}
|
|
445
|
+
clusters.sort((a, b) => a.value - b.value);
|
|
446
|
+
// Sizing uses numeric naming: size-1 through size-N
|
|
447
|
+
const sortedByCount = [...clusters].sort((a, b) => b.count - a.count);
|
|
448
|
+
const topClusters = sortedByCount.slice(0, MAX_SIZING_TOKENS).sort((a, b) => a.value - b.value);
|
|
449
|
+
const orphanClusters = sortedByCount.slice(MAX_SIZING_TOKENS);
|
|
450
|
+
const tokens = [];
|
|
451
|
+
for (let i = 0; i < topClusters.length; i++) {
|
|
452
|
+
const cluster = topClusters[i];
|
|
453
|
+
tokens.push({
|
|
454
|
+
name: `size-${i + 1}`,
|
|
455
|
+
value: `${cluster.value}px`,
|
|
456
|
+
category: 'sizing',
|
|
457
|
+
context: 'sizing',
|
|
458
|
+
occurrences: cluster.count,
|
|
459
|
+
sources: [...new Set(cluster.sources)],
|
|
460
|
+
});
|
|
461
|
+
}
|
|
462
|
+
// Generate orphan tokens for less common values
|
|
463
|
+
for (let i = 0; i < orphanClusters.length; i++) {
|
|
464
|
+
const cluster = orphanClusters[i];
|
|
465
|
+
tokens.push({
|
|
466
|
+
name: `size-orphan-${i + 1}`,
|
|
467
|
+
value: `${cluster.value}px`,
|
|
468
|
+
category: 'sizing',
|
|
469
|
+
context: 'sizing',
|
|
470
|
+
occurrences: cluster.count,
|
|
471
|
+
sources: [...new Set(cluster.sources)],
|
|
472
|
+
isOrphan: true,
|
|
473
|
+
});
|
|
474
|
+
}
|
|
475
|
+
// Calculate coverage (primary tokens only)
|
|
476
|
+
const tokenizedCount = topClusters.reduce((sum, c) => sum + c.count, 0);
|
|
477
|
+
const coverage = inputCount > 0 ? Math.round((tokenizedCount / inputCount) * 100) : 0;
|
|
478
|
+
return {
|
|
479
|
+
tokens,
|
|
480
|
+
stats: {
|
|
481
|
+
input: inputCount,
|
|
482
|
+
uniqueValues: uniqueCount,
|
|
483
|
+
clustered: clusters.length,
|
|
484
|
+
tokenized: topClusters.length,
|
|
485
|
+
orphaned: orphanClusters.length,
|
|
486
|
+
coverage,
|
|
487
|
+
},
|
|
488
|
+
};
|
|
489
|
+
}
|
|
490
|
+
/**
|
|
491
|
+
* Generate font-size tokens
|
|
492
|
+
*/
|
|
493
|
+
function generateFontSizeTokens(values, threshold) {
|
|
494
|
+
const inputCount = values.length;
|
|
495
|
+
// Minimum realistic font size (smaller values are likely border-width or other noise)
|
|
496
|
+
const MIN_FONT_SIZE_PX = 8;
|
|
497
|
+
// Similar to spacing, but with font-size naming
|
|
498
|
+
const pxCounts = new Map();
|
|
499
|
+
for (const v of values) {
|
|
500
|
+
const px = spacingToPx(v.value);
|
|
501
|
+
// Filter out unrealistic font sizes
|
|
502
|
+
if (px === null || px < MIN_FONT_SIZE_PX)
|
|
503
|
+
continue;
|
|
504
|
+
const rounded = Math.round(px);
|
|
505
|
+
const existing = pxCounts.get(rounded);
|
|
506
|
+
if (existing) {
|
|
507
|
+
existing.count++;
|
|
508
|
+
if (!existing.sources.includes(v.value)) {
|
|
509
|
+
existing.sources.push(v.value);
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
else {
|
|
513
|
+
pxCounts.set(rounded, { count: 1, sources: [v.value] });
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
const uniqueCount = pxCounts.size;
|
|
517
|
+
// Cluster and sort
|
|
518
|
+
const clusters = [];
|
|
519
|
+
const sortedPx = [...pxCounts.entries()].sort((a, b) => a[0] - b[0]);
|
|
520
|
+
for (const [px, data] of sortedPx) {
|
|
521
|
+
// Never cluster 0 with non-zero values - keep zero separate
|
|
522
|
+
// (0 values are already filtered above, but guard for safety)
|
|
523
|
+
if (px === 0) {
|
|
524
|
+
clusters.push({ value: 0, count: data.count, sources: [...data.sources] });
|
|
525
|
+
continue;
|
|
526
|
+
}
|
|
527
|
+
let foundCluster = false;
|
|
528
|
+
for (const cluster of clusters) {
|
|
529
|
+
// Don't cluster non-zero values with zero
|
|
530
|
+
if (cluster.value === 0)
|
|
531
|
+
continue;
|
|
532
|
+
if (Math.abs(px - cluster.value) <= threshold) {
|
|
533
|
+
if (data.count > cluster.count) {
|
|
534
|
+
cluster.value = px;
|
|
535
|
+
}
|
|
536
|
+
cluster.count += data.count;
|
|
537
|
+
cluster.sources.push(...data.sources);
|
|
538
|
+
foundCluster = true;
|
|
539
|
+
break;
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
if (!foundCluster) {
|
|
543
|
+
clusters.push({ value: px, count: data.count, sources: [...data.sources] });
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
clusters.sort((a, b) => a.value - b.value);
|
|
547
|
+
// Sort by count to find most used values
|
|
548
|
+
const sortedByCount = [...clusters].sort((a, b) => b.count - a.count);
|
|
549
|
+
const topClusters = sortedByCount.slice(0, 10).sort((a, b) => a.value - b.value);
|
|
550
|
+
const orphanClusters = sortedByCount.slice(10);
|
|
551
|
+
// Assign font-size names
|
|
552
|
+
const sizeNames = ['2xs', 'xs', 'sm', 'base', 'lg', 'xl', '2xl', '3xl', '4xl', '5xl'];
|
|
553
|
+
const tokens = [];
|
|
554
|
+
for (let i = 0; i < topClusters.length; i++) {
|
|
555
|
+
const cluster = topClusters[i];
|
|
556
|
+
const sizeName = sizeNames[i] || `${i + 1}`;
|
|
557
|
+
tokens.push({
|
|
558
|
+
name: `font-size-${sizeName}`,
|
|
559
|
+
value: `${cluster.value}px`,
|
|
560
|
+
category: 'font-size',
|
|
561
|
+
context: 'typography',
|
|
562
|
+
occurrences: cluster.count,
|
|
563
|
+
sources: [...new Set(cluster.sources)],
|
|
564
|
+
});
|
|
565
|
+
}
|
|
566
|
+
// Generate orphan tokens for less common font sizes
|
|
567
|
+
for (let i = 0; i < orphanClusters.length; i++) {
|
|
568
|
+
const cluster = orphanClusters[i];
|
|
569
|
+
tokens.push({
|
|
570
|
+
name: `font-size-orphan-${i + 1}`,
|
|
571
|
+
value: `${cluster.value}px`,
|
|
572
|
+
category: 'font-size',
|
|
573
|
+
context: 'typography',
|
|
574
|
+
occurrences: cluster.count,
|
|
575
|
+
sources: [...new Set(cluster.sources)],
|
|
576
|
+
isOrphan: true,
|
|
577
|
+
});
|
|
578
|
+
}
|
|
579
|
+
// Calculate coverage (primary tokens only)
|
|
580
|
+
const tokenizedCount = topClusters.reduce((sum, c) => sum + c.count, 0);
|
|
581
|
+
const coverage = inputCount > 0 ? Math.round((tokenizedCount / inputCount) * 100) : 0;
|
|
582
|
+
return {
|
|
583
|
+
tokens,
|
|
584
|
+
stats: {
|
|
585
|
+
input: inputCount,
|
|
586
|
+
uniqueValues: uniqueCount,
|
|
587
|
+
clustered: clusters.length,
|
|
588
|
+
tokenized: topClusters.length,
|
|
589
|
+
orphaned: orphanClusters.length,
|
|
590
|
+
coverage,
|
|
591
|
+
},
|
|
592
|
+
};
|
|
593
|
+
}
|
|
594
|
+
/**
|
|
595
|
+
* Generate radius tokens
|
|
596
|
+
*/
|
|
597
|
+
function generateRadiusTokens(values, threshold) {
|
|
598
|
+
const inputCount = values.length;
|
|
599
|
+
const pxCounts = new Map();
|
|
600
|
+
for (const v of values) {
|
|
601
|
+
const px = spacingToPx(v.value);
|
|
602
|
+
if (px === null || px < 0)
|
|
603
|
+
continue;
|
|
604
|
+
const rounded = Math.round(px);
|
|
605
|
+
const existing = pxCounts.get(rounded);
|
|
606
|
+
if (existing) {
|
|
607
|
+
existing.count++;
|
|
608
|
+
if (!existing.sources.includes(v.value)) {
|
|
609
|
+
existing.sources.push(v.value);
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
else {
|
|
613
|
+
pxCounts.set(rounded, { count: 1, sources: [v.value] });
|
|
614
|
+
}
|
|
615
|
+
}
|
|
616
|
+
const uniqueCount = pxCounts.size;
|
|
617
|
+
const clusters = [];
|
|
618
|
+
const sortedPx = [...pxCounts.entries()].sort((a, b) => a[0] - b[0]);
|
|
619
|
+
for (const [px, data] of sortedPx) {
|
|
620
|
+
// Never cluster 0 with non-zero values - keep zero separate
|
|
621
|
+
if (px === 0) {
|
|
622
|
+
clusters.push({ value: 0, count: data.count, sources: [...data.sources] });
|
|
623
|
+
continue;
|
|
624
|
+
}
|
|
625
|
+
let foundCluster = false;
|
|
626
|
+
for (const cluster of clusters) {
|
|
627
|
+
// Don't cluster non-zero values with zero
|
|
628
|
+
if (cluster.value === 0)
|
|
629
|
+
continue;
|
|
630
|
+
if (Math.abs(px - cluster.value) <= threshold) {
|
|
631
|
+
if (data.count > cluster.count) {
|
|
632
|
+
cluster.value = px;
|
|
633
|
+
}
|
|
634
|
+
cluster.count += data.count;
|
|
635
|
+
cluster.sources.push(...data.sources);
|
|
636
|
+
foundCluster = true;
|
|
637
|
+
break;
|
|
638
|
+
}
|
|
639
|
+
}
|
|
640
|
+
if (!foundCluster) {
|
|
641
|
+
clusters.push({ value: px, count: data.count, sources: [...data.sources] });
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
clusters.sort((a, b) => a.value - b.value);
|
|
645
|
+
// Separate zero cluster from non-zero clusters
|
|
646
|
+
const zeroCluster = clusters.find(c => c.value === 0);
|
|
647
|
+
const nonZeroClusters = clusters.filter(c => c.value > 0);
|
|
648
|
+
// Size names for non-zero values only
|
|
649
|
+
const sizeNames = ['sm', 'md', 'lg', 'xl', '2xl', 'full'];
|
|
650
|
+
const tokens = [];
|
|
651
|
+
const tokenizedClusters = [];
|
|
652
|
+
// Handle zero first - only create radius-none if there's actually a 0 value
|
|
653
|
+
if (zeroCluster) {
|
|
654
|
+
tokens.push({
|
|
655
|
+
name: 'radius-none',
|
|
656
|
+
value: '0',
|
|
657
|
+
category: 'radius',
|
|
658
|
+
context: 'radius',
|
|
659
|
+
occurrences: zeroCluster.count,
|
|
660
|
+
sources: [...new Set(zeroCluster.sources)],
|
|
661
|
+
});
|
|
662
|
+
tokenizedClusters.push(zeroCluster);
|
|
663
|
+
}
|
|
664
|
+
// Handle non-zero values with sm, md, lg, etc.
|
|
665
|
+
const nonZeroTokenized = nonZeroClusters.slice(0, sizeNames.length);
|
|
666
|
+
const orphanClusters = nonZeroClusters.slice(sizeNames.length);
|
|
667
|
+
for (let i = 0; i < nonZeroTokenized.length; i++) {
|
|
668
|
+
const cluster = nonZeroTokenized[i];
|
|
669
|
+
const sizeName = sizeNames[i];
|
|
670
|
+
const value = sizeName === 'full' ? '9999px' : `${cluster.value}px`;
|
|
671
|
+
tokens.push({
|
|
672
|
+
name: `radius-${sizeName}`,
|
|
673
|
+
value,
|
|
674
|
+
category: 'radius',
|
|
675
|
+
context: 'radius',
|
|
676
|
+
occurrences: cluster.count,
|
|
677
|
+
sources: [...new Set(cluster.sources)],
|
|
678
|
+
});
|
|
679
|
+
tokenizedClusters.push(cluster);
|
|
680
|
+
}
|
|
681
|
+
// Generate orphan tokens for extra radius values
|
|
682
|
+
for (let i = 0; i < orphanClusters.length; i++) {
|
|
683
|
+
const cluster = orphanClusters[i];
|
|
684
|
+
tokens.push({
|
|
685
|
+
name: `radius-orphan-${i + 1}`,
|
|
686
|
+
value: `${cluster.value}px`,
|
|
687
|
+
category: 'radius',
|
|
688
|
+
context: 'radius',
|
|
689
|
+
occurrences: cluster.count,
|
|
690
|
+
sources: [...new Set(cluster.sources)],
|
|
691
|
+
isOrphan: true,
|
|
692
|
+
});
|
|
693
|
+
}
|
|
694
|
+
// Calculate coverage (primary tokens only)
|
|
695
|
+
const tokenizedCount = tokenizedClusters.reduce((sum, c) => sum + c.count, 0);
|
|
696
|
+
const coverage = inputCount > 0 ? Math.round((tokenizedCount / inputCount) * 100) : 0;
|
|
697
|
+
return {
|
|
698
|
+
tokens,
|
|
699
|
+
stats: {
|
|
700
|
+
input: inputCount,
|
|
701
|
+
uniqueValues: uniqueCount,
|
|
702
|
+
clustered: clusters.length,
|
|
703
|
+
tokenized: tokenizedClusters.length,
|
|
704
|
+
orphaned: orphanClusters.length,
|
|
705
|
+
coverage,
|
|
706
|
+
},
|
|
707
|
+
};
|
|
708
|
+
}
|
|
709
|
+
/**
|
|
710
|
+
* Generate breakpoint tokens from media query values
|
|
711
|
+
*/
|
|
712
|
+
function generateBreakpointTokens(values) {
|
|
713
|
+
const inputCount = values.length;
|
|
714
|
+
const pxCounts = new Map();
|
|
715
|
+
for (const v of values) {
|
|
716
|
+
const px = spacingToPx(v.value);
|
|
717
|
+
if (px === null || px <= 0)
|
|
718
|
+
continue;
|
|
719
|
+
const rounded = Math.round(px);
|
|
720
|
+
const existing = pxCounts.get(rounded);
|
|
721
|
+
if (existing) {
|
|
722
|
+
existing.count++;
|
|
723
|
+
if (!existing.sources.includes(v.value)) {
|
|
724
|
+
existing.sources.push(v.value);
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
else {
|
|
728
|
+
pxCounts.set(rounded, { count: 1, sources: [v.value] });
|
|
729
|
+
}
|
|
730
|
+
}
|
|
731
|
+
const uniqueCount = pxCounts.size;
|
|
732
|
+
// Sort by value (ascending for breakpoints)
|
|
733
|
+
const sortedPx = [...pxCounts.entries()].sort((a, b) => a[0] - b[0]);
|
|
734
|
+
// Breakpoints use named conventions: xs, sm, md, lg, xl, 2xl
|
|
735
|
+
const breakpointNames = ['xs', 'sm', 'md', 'lg', 'xl', '2xl', '3xl'];
|
|
736
|
+
const tokens = [];
|
|
737
|
+
const tokenizedEntries = sortedPx.slice(0, breakpointNames.length);
|
|
738
|
+
const orphanEntries = sortedPx.slice(breakpointNames.length);
|
|
739
|
+
for (let i = 0; i < tokenizedEntries.length; i++) {
|
|
740
|
+
const [px, data] = tokenizedEntries[i];
|
|
741
|
+
tokens.push({
|
|
742
|
+
name: `breakpoint-${breakpointNames[i]}`,
|
|
743
|
+
value: `${px}px`,
|
|
744
|
+
category: 'breakpoint',
|
|
745
|
+
context: 'breakpoint',
|
|
746
|
+
occurrences: data.count,
|
|
747
|
+
sources: [...new Set(data.sources)],
|
|
748
|
+
});
|
|
749
|
+
}
|
|
750
|
+
// Generate orphan tokens for extra breakpoints
|
|
751
|
+
for (let i = 0; i < orphanEntries.length; i++) {
|
|
752
|
+
const [px, data] = orphanEntries[i];
|
|
753
|
+
tokens.push({
|
|
754
|
+
name: `breakpoint-orphan-${i + 1}`,
|
|
755
|
+
value: `${px}px`,
|
|
756
|
+
category: 'breakpoint',
|
|
757
|
+
context: 'breakpoint',
|
|
758
|
+
occurrences: data.count,
|
|
759
|
+
sources: [...new Set(data.sources)],
|
|
760
|
+
isOrphan: true,
|
|
761
|
+
});
|
|
762
|
+
}
|
|
763
|
+
// Calculate coverage (primary tokens only)
|
|
764
|
+
const tokenizedCount = tokenizedEntries.reduce((sum, [, data]) => sum + data.count, 0);
|
|
765
|
+
const coverage = inputCount > 0 ? Math.round((tokenizedCount / inputCount) * 100) : 0;
|
|
766
|
+
return {
|
|
767
|
+
tokens,
|
|
768
|
+
stats: {
|
|
769
|
+
input: inputCount,
|
|
770
|
+
uniqueValues: uniqueCount,
|
|
771
|
+
clustered: uniqueCount, // No clustering for breakpoints
|
|
772
|
+
tokenized: tokenizedEntries.length,
|
|
773
|
+
orphaned: orphanEntries.length,
|
|
774
|
+
coverage,
|
|
775
|
+
},
|
|
776
|
+
};
|
|
777
|
+
}
|
|
778
|
+
/**
|
|
779
|
+
* Generate CSS custom properties
|
|
780
|
+
*/
|
|
781
|
+
function generateCss(tokens, prefix) {
|
|
782
|
+
const lines = [':root {'];
|
|
783
|
+
// Group by category
|
|
784
|
+
const byCategory = {};
|
|
785
|
+
for (const token of tokens) {
|
|
786
|
+
if (!byCategory[token.category]) {
|
|
787
|
+
byCategory[token.category] = [];
|
|
788
|
+
}
|
|
789
|
+
byCategory[token.category].push(token);
|
|
790
|
+
}
|
|
791
|
+
const categoryOrder = ['color', 'spacing', 'sizing', 'font-size', 'radius', 'breakpoint'];
|
|
792
|
+
const categoryDisplayNames = {
|
|
793
|
+
'color': 'Colors',
|
|
794
|
+
'spacing': 'Spacing',
|
|
795
|
+
'sizing': 'Sizing',
|
|
796
|
+
'font-size': 'Font Sizes',
|
|
797
|
+
'radius': 'Border Radii',
|
|
798
|
+
'breakpoint': 'Breakpoints',
|
|
799
|
+
};
|
|
800
|
+
for (const category of categoryOrder) {
|
|
801
|
+
const categoryTokens = byCategory[category];
|
|
802
|
+
if (!categoryTokens || categoryTokens.length === 0)
|
|
803
|
+
continue;
|
|
804
|
+
const displayName = categoryDisplayNames[category] || `${category.charAt(0).toUpperCase() + category.slice(1)}s`;
|
|
805
|
+
lines.push(` /* ${displayName} */`);
|
|
806
|
+
for (const token of categoryTokens) {
|
|
807
|
+
const varName = prefix ? `--${prefix}-${token.name}` : `--${token.name}`;
|
|
808
|
+
lines.push(` ${varName}: ${token.value};`);
|
|
809
|
+
}
|
|
810
|
+
lines.push('');
|
|
811
|
+
}
|
|
812
|
+
lines.push('}');
|
|
813
|
+
return lines.join('\n');
|
|
814
|
+
}
|
|
815
|
+
/**
|
|
816
|
+
* Generate JSON token format
|
|
817
|
+
*/
|
|
818
|
+
function generateJson(tokens) {
|
|
819
|
+
const result = {};
|
|
820
|
+
for (const token of tokens) {
|
|
821
|
+
if (!result[token.category]) {
|
|
822
|
+
result[token.category] = {};
|
|
823
|
+
}
|
|
824
|
+
result[token.category][token.name] = token.value;
|
|
825
|
+
}
|
|
826
|
+
return result;
|
|
827
|
+
}
|
|
828
|
+
// Helper functions
|
|
829
|
+
function normalizeColor(color) {
|
|
830
|
+
color = color.toLowerCase().trim();
|
|
831
|
+
// Named colors
|
|
832
|
+
const namedColors = {
|
|
833
|
+
white: '#ffffff',
|
|
834
|
+
black: '#000000',
|
|
835
|
+
red: '#ff0000',
|
|
836
|
+
green: '#008000',
|
|
837
|
+
blue: '#0000ff',
|
|
838
|
+
transparent: 'transparent',
|
|
839
|
+
};
|
|
840
|
+
if (namedColors[color]) {
|
|
841
|
+
return namedColors[color];
|
|
842
|
+
}
|
|
843
|
+
// Normalize hex
|
|
844
|
+
if (color.startsWith('#')) {
|
|
845
|
+
return normalizeHexColor(color);
|
|
846
|
+
}
|
|
847
|
+
return color;
|
|
848
|
+
}
|
|
849
|
+
function parseColor(color) {
|
|
850
|
+
color = normalizeColor(color);
|
|
851
|
+
if (color.startsWith('#')) {
|
|
852
|
+
return hexToRgb(color);
|
|
853
|
+
}
|
|
854
|
+
// Parse rgb()
|
|
855
|
+
const rgbMatch = color.match(/rgb\((\d+),\s*(\d+),\s*(\d+)\)/);
|
|
856
|
+
if (rgbMatch) {
|
|
857
|
+
return {
|
|
858
|
+
r: parseInt(rgbMatch[1], 10),
|
|
859
|
+
g: parseInt(rgbMatch[2], 10),
|
|
860
|
+
b: parseInt(rgbMatch[3], 10),
|
|
861
|
+
};
|
|
862
|
+
}
|
|
863
|
+
return null;
|
|
864
|
+
}
|
|
865
|
+
function colorsAreSimilar(color1, color2, threshold) {
|
|
866
|
+
const rgb1 = parseColor(color1);
|
|
867
|
+
const rgb2 = parseColor(color2);
|
|
868
|
+
if (!rgb1 || !rgb2)
|
|
869
|
+
return false;
|
|
870
|
+
// Simple Euclidean distance in RGB space
|
|
871
|
+
const distance = Math.sqrt(Math.pow(rgb1.r - rgb2.r, 2) +
|
|
872
|
+
Math.pow(rgb1.g - rgb2.g, 2) +
|
|
873
|
+
Math.pow(rgb1.b - rgb2.b, 2));
|
|
874
|
+
return distance < threshold * 10; // Rough conversion from perceptual threshold
|
|
875
|
+
}
|
|
876
|
+
function getColorSaturation(rgb) {
|
|
877
|
+
const max = Math.max(rgb.r, rgb.g, rgb.b);
|
|
878
|
+
const min = Math.min(rgb.r, rgb.g, rgb.b);
|
|
879
|
+
if (max === 0)
|
|
880
|
+
return 0;
|
|
881
|
+
return (max - min) / max;
|
|
882
|
+
}
|
|
883
|
+
function getLightness(rgb) {
|
|
884
|
+
return (rgb.r + rgb.g + rgb.b) / 3 / 255;
|
|
885
|
+
}
|
|
886
|
+
//# sourceMappingURL=generator.js.map
|