tokka 0.2.1 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/compiler/generators/css.ts +146 -0
- package/compiler/generators/figma.ts +147 -0
- package/compiler/generators/tailwind.ts +106 -0
- package/compiler/generators/typescript.ts +113 -0
- package/compiler/index.ts +45 -0
- package/compiler/loader.ts +92 -0
- package/compiler/resolver.ts +177 -0
- package/compiler/types.ts +118 -0
- package/compiler/validator.ts +194 -0
- package/dist/index.js +26 -14
- package/package.json +3 -3
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import { type ResolvedToken, type System } from "../types.js"
|
|
2
|
+
|
|
3
|
+
export interface CSSGeneratorOptions {
|
|
4
|
+
modeSelector?: {
|
|
5
|
+
strategy: "class" | "data-attribute"
|
|
6
|
+
selectors?: {
|
|
7
|
+
light: string
|
|
8
|
+
dark: string
|
|
9
|
+
[key: string]: string
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Convert token ID to CSS variable name
|
|
16
|
+
* surface.brand → --surface-brand
|
|
17
|
+
* button.primary.bg → --button-primary-bg
|
|
18
|
+
*/
|
|
19
|
+
export function tokenIdToCSSVar(id: string): string {
|
|
20
|
+
return `--${id.replace(/\./g, "-")}`
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Parse color value to HSL triplet format
|
|
25
|
+
* Supports: hsl(...), oklch(...), hex, rgb(...)
|
|
26
|
+
*/
|
|
27
|
+
export function parseColorToHSLTriplet(value: string): string {
|
|
28
|
+
// If already a triplet (e.g., "220 90% 56%"), return as-is
|
|
29
|
+
if (/^\d+\s+\d+%\s+\d+%$/.test(value.trim())) {
|
|
30
|
+
return value.trim()
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
// If it's hsl(...), extract the triplet
|
|
34
|
+
const hslMatch = value.match(/hsl\(([^)]+)\)/)
|
|
35
|
+
if (hslMatch) {
|
|
36
|
+
return hslMatch[1].trim()
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// If it's oklch(...), we'll need conversion (simplified for v1)
|
|
40
|
+
const oklchMatch = value.match(/oklch\(([^)]+)\)/)
|
|
41
|
+
if (oklchMatch) {
|
|
42
|
+
// For v1, just pass through - users should provide HSL
|
|
43
|
+
// In production, would convert OKLCH to HSL
|
|
44
|
+
return value
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// For other formats, pass through as-is
|
|
48
|
+
return value
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Generate CSS variables for a mode
|
|
53
|
+
*/
|
|
54
|
+
export function generateCSSForMode(
|
|
55
|
+
tokens: ResolvedToken[],
|
|
56
|
+
mode: string | "default",
|
|
57
|
+
options: CSSGeneratorOptions = {}
|
|
58
|
+
): string {
|
|
59
|
+
const lines: string[] = []
|
|
60
|
+
|
|
61
|
+
const selector =
|
|
62
|
+
mode === "default"
|
|
63
|
+
? ":root"
|
|
64
|
+
: options.modeSelector?.strategy === "data-attribute"
|
|
65
|
+
? options.modeSelector.selectors?.[mode] || `[data-theme="${mode}"]`
|
|
66
|
+
: options.modeSelector?.selectors?.[mode] || `.${mode}`
|
|
67
|
+
|
|
68
|
+
lines.push(`${selector} {`)
|
|
69
|
+
|
|
70
|
+
for (const token of tokens) {
|
|
71
|
+
const cssVar = tokenIdToCSSVar(token.id)
|
|
72
|
+
let value: string | undefined
|
|
73
|
+
|
|
74
|
+
// Get value for this mode
|
|
75
|
+
if (mode === "default") {
|
|
76
|
+
value = token.resolvedValue || token.value
|
|
77
|
+
} else if (token.resolvedModes?.[mode]) {
|
|
78
|
+
value = token.resolvedModes[mode]
|
|
79
|
+
} else if (token.modes?.[mode]) {
|
|
80
|
+
value = token.modes[mode]
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
if (value === undefined) continue
|
|
84
|
+
|
|
85
|
+
// Convert colors to HSL triplet format
|
|
86
|
+
if (token.type === "color") {
|
|
87
|
+
const triplet = parseColorToHSLTriplet(String(value))
|
|
88
|
+
lines.push(` ${cssVar}: ${triplet};`)
|
|
89
|
+
} else {
|
|
90
|
+
lines.push(` ${cssVar}: ${value};`)
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
lines.push("}")
|
|
95
|
+
|
|
96
|
+
return lines.join("\n")
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Generate complete CSS output
|
|
101
|
+
*/
|
|
102
|
+
export function generateCSS(
|
|
103
|
+
tokens: ResolvedToken[],
|
|
104
|
+
system: System,
|
|
105
|
+
options: CSSGeneratorOptions = {}
|
|
106
|
+
): string {
|
|
107
|
+
const output: string[] = []
|
|
108
|
+
|
|
109
|
+
// Header comment
|
|
110
|
+
output.push("/**")
|
|
111
|
+
output.push(` * Design tokens for ${system.name}`)
|
|
112
|
+
output.push(" * Generated by figma-base - do not edit directly")
|
|
113
|
+
output.push(" */")
|
|
114
|
+
output.push("")
|
|
115
|
+
|
|
116
|
+
// Generate default (light) mode
|
|
117
|
+
const lightMode = system.modes.includes("light") ? "light" : system.modes[0]
|
|
118
|
+
output.push(generateCSSForMode(tokens, "default", options))
|
|
119
|
+
|
|
120
|
+
// Generate other modes
|
|
121
|
+
for (const mode of system.modes) {
|
|
122
|
+
if (mode !== lightMode) {
|
|
123
|
+
output.push("")
|
|
124
|
+
output.push(generateCSSForMode(tokens, mode, options))
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
return output.join("\n")
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
/**
|
|
132
|
+
* Generate CSS output files
|
|
133
|
+
*/
|
|
134
|
+
export interface CSSOutput {
|
|
135
|
+
"tokens.css": string
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
export function generateCSSOutput(
|
|
139
|
+
tokens: ResolvedToken[],
|
|
140
|
+
system: System,
|
|
141
|
+
options: CSSGeneratorOptions = {}
|
|
142
|
+
): CSSOutput {
|
|
143
|
+
return {
|
|
144
|
+
"tokens.css": generateCSS(tokens, system, options),
|
|
145
|
+
}
|
|
146
|
+
}
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import { type ResolvedToken, type System } from "../types.js"
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Format a color value for Tokens Studio compatibility
|
|
5
|
+
* Converts Tailwind-style HSL (e.g., "220 90% 56%") to proper hsl() format
|
|
6
|
+
*/
|
|
7
|
+
function formatColorValue(value: string): string {
|
|
8
|
+
// Check if it's an HSL value without the hsl() wrapper (Tailwind format)
|
|
9
|
+
if (/^\d+\s+\d+%\s+\d+%$/.test(value.trim())) {
|
|
10
|
+
return `hsl(${value})`
|
|
11
|
+
}
|
|
12
|
+
// Return as-is for hex, rgb(), hsl(), or other formats
|
|
13
|
+
return value
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Format a token value based on its type
|
|
18
|
+
*/
|
|
19
|
+
function formatTokenValue(token: ResolvedToken, value: string): string {
|
|
20
|
+
// Don't format references
|
|
21
|
+
if (value.startsWith("{") && value.endsWith("}")) {
|
|
22
|
+
return value
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Format colors
|
|
26
|
+
if (token.type === "color") {
|
|
27
|
+
return formatColorValue(value)
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
return value
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Generate Tokens Studio compatible format
|
|
35
|
+
*/
|
|
36
|
+
export function generateFigmaTokens(
|
|
37
|
+
tokens: ResolvedToken[],
|
|
38
|
+
_system: System
|
|
39
|
+
): Record<string, any> {
|
|
40
|
+
const output: Record<string, any> = {}
|
|
41
|
+
|
|
42
|
+
// Group tokens by source
|
|
43
|
+
const primitiveTokens = tokens.filter((t) => t.source === "primitive")
|
|
44
|
+
const semanticTokens = tokens.filter((t) => t.source === "semantic")
|
|
45
|
+
const componentTokens = tokens.filter((t) => t.source === "component")
|
|
46
|
+
|
|
47
|
+
// Build nested structure for primitives
|
|
48
|
+
const primitiveTree: Record<string, any> = {}
|
|
49
|
+
for (const token of primitiveTokens) {
|
|
50
|
+
const parts = token.id.split(".")
|
|
51
|
+
let current = primitiveTree
|
|
52
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
53
|
+
if (!current[parts[i]]) {
|
|
54
|
+
current[parts[i]] = {}
|
|
55
|
+
}
|
|
56
|
+
current = current[parts[i]]
|
|
57
|
+
}
|
|
58
|
+
const lastPart = parts[parts.length - 1]
|
|
59
|
+
const rawValue = token.resolvedValue || token.value
|
|
60
|
+
const formattedValue = formatTokenValue(token, rawValue)
|
|
61
|
+
|
|
62
|
+
current[lastPart] = {
|
|
63
|
+
value: formattedValue,
|
|
64
|
+
type: token.type,
|
|
65
|
+
description: token.description,
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
output.global = primitiveTree
|
|
69
|
+
|
|
70
|
+
// Build nested structure for semantics
|
|
71
|
+
const semanticTree: Record<string, any> = {}
|
|
72
|
+
for (const token of semanticTokens) {
|
|
73
|
+
const parts = token.id.split(".")
|
|
74
|
+
let current = semanticTree
|
|
75
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
76
|
+
if (!current[parts[i]]) {
|
|
77
|
+
current[parts[i]] = {}
|
|
78
|
+
}
|
|
79
|
+
current = current[parts[i]]
|
|
80
|
+
}
|
|
81
|
+
const lastPart = parts[parts.length - 1]
|
|
82
|
+
|
|
83
|
+
// Use reference format if token has references
|
|
84
|
+
const rawValue =
|
|
85
|
+
token.references && token.references.length > 0
|
|
86
|
+
? `{${token.references[0]}}`
|
|
87
|
+
: token.resolvedValue || token.value
|
|
88
|
+
|
|
89
|
+
const formattedValue = formatTokenValue(token, rawValue)
|
|
90
|
+
|
|
91
|
+
current[lastPart] = {
|
|
92
|
+
value: formattedValue,
|
|
93
|
+
type: token.type,
|
|
94
|
+
description: token.description,
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
output.semantic = semanticTree
|
|
98
|
+
|
|
99
|
+
// Build component tokens if any
|
|
100
|
+
if (componentTokens.length > 0) {
|
|
101
|
+
const componentTree: Record<string, any> = {}
|
|
102
|
+
for (const token of componentTokens) {
|
|
103
|
+
const parts = token.id.split(".")
|
|
104
|
+
let current = componentTree
|
|
105
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
106
|
+
if (!current[parts[i]]) {
|
|
107
|
+
current[parts[i]] = {}
|
|
108
|
+
}
|
|
109
|
+
current = current[parts[i]]
|
|
110
|
+
}
|
|
111
|
+
const lastPart = parts[parts.length - 1]
|
|
112
|
+
|
|
113
|
+
const rawValue =
|
|
114
|
+
token.references && token.references.length > 0
|
|
115
|
+
? `{${token.references[0]}}`
|
|
116
|
+
: token.resolvedValue || token.value
|
|
117
|
+
|
|
118
|
+
const formattedValue = formatTokenValue(token, rawValue)
|
|
119
|
+
|
|
120
|
+
current[lastPart] = {
|
|
121
|
+
value: formattedValue,
|
|
122
|
+
type: token.type,
|
|
123
|
+
description: token.description,
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
output.component = componentTree
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return output
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
/**
|
|
133
|
+
* Generate Figma token export file
|
|
134
|
+
*/
|
|
135
|
+
export interface FigmaTokenOutput {
|
|
136
|
+
"tokka.tokens.json": string
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
export function generateFigmaTokenOutput(
|
|
140
|
+
tokens: ResolvedToken[],
|
|
141
|
+
system: System
|
|
142
|
+
): FigmaTokenOutput {
|
|
143
|
+
const tokensObject = generateFigmaTokens(tokens, system)
|
|
144
|
+
return {
|
|
145
|
+
"tokka.tokens.json": JSON.stringify(tokensObject, null, 2),
|
|
146
|
+
}
|
|
147
|
+
}
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
import { type ResolvedToken, type System } from "../types.js"
|
|
2
|
+
import { tokenIdToCSSVar } from "./css.js"
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Semantic token mapping to Tailwind color names
|
|
6
|
+
* Maps semantic tokens to Tailwind's expected color scheme
|
|
7
|
+
*/
|
|
8
|
+
const SEMANTIC_TO_TAILWIND_MAPPING: Record<string, string> = {
|
|
9
|
+
"surface.brand": "primary",
|
|
10
|
+
"text.on-brand": "primary-foreground",
|
|
11
|
+
"surface.secondary": "secondary",
|
|
12
|
+
"text.on-secondary": "secondary-foreground",
|
|
13
|
+
"surface.destructive": "destructive",
|
|
14
|
+
"text.on-destructive": "destructive-foreground",
|
|
15
|
+
"surface.default": "background",
|
|
16
|
+
"text.default": "foreground",
|
|
17
|
+
"surface.card": "card",
|
|
18
|
+
"text.on-card": "card-foreground",
|
|
19
|
+
"surface.popover": "popover",
|
|
20
|
+
"text.on-popover": "popover-foreground",
|
|
21
|
+
"surface.muted": "muted",
|
|
22
|
+
"text.muted": "muted-foreground",
|
|
23
|
+
"surface.accent": "accent",
|
|
24
|
+
"text.on-accent": "accent-foreground",
|
|
25
|
+
"border.default": "border",
|
|
26
|
+
"border.input": "input",
|
|
27
|
+
"focus.ring": "ring",
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Generate Tailwind config mapping semantic tokens to CSS vars
|
|
32
|
+
*/
|
|
33
|
+
export function generateTailwindConfig(
|
|
34
|
+
tokens: ResolvedToken[],
|
|
35
|
+
_system: System
|
|
36
|
+
): Record<string, any> {
|
|
37
|
+
const colors: Record<string, string> = {}
|
|
38
|
+
const spacing: Record<string, string> = {}
|
|
39
|
+
const borderRadius: Record<string, string> = {}
|
|
40
|
+
const boxShadow: Record<string, string> = {}
|
|
41
|
+
|
|
42
|
+
for (const token of tokens) {
|
|
43
|
+
// Only map semantic tokens to Tailwind (not primitives)
|
|
44
|
+
if (token.source !== "semantic") continue
|
|
45
|
+
|
|
46
|
+
const cssVar = tokenIdToCSSVar(token.id)
|
|
47
|
+
|
|
48
|
+
// Map semantic tokens to Tailwind color names
|
|
49
|
+
if (token.type === "color") {
|
|
50
|
+
const tailwindName = SEMANTIC_TO_TAILWIND_MAPPING[token.id]
|
|
51
|
+
if (tailwindName) {
|
|
52
|
+
colors[tailwindName] = `hsl(var(${cssVar}))`
|
|
53
|
+
} else {
|
|
54
|
+
// Also include the semantic token by its own name
|
|
55
|
+
const name = token.id.replace(/\./g, "-")
|
|
56
|
+
colors[name] = `hsl(var(${cssVar}))`
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// Map spacing tokens
|
|
61
|
+
if (token.type === "dimension" && token.id.startsWith("space.")) {
|
|
62
|
+
const name = token.id.replace("space.", "")
|
|
63
|
+
spacing[name] = `var(${cssVar})`
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Map radius tokens
|
|
67
|
+
if (token.type === "radius" && token.id.startsWith("radius.")) {
|
|
68
|
+
const name = token.id.replace("radius.", "")
|
|
69
|
+
borderRadius[name] = `var(${cssVar})`
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// Map shadow tokens
|
|
73
|
+
if (token.type === "shadow" && token.id.startsWith("shadow.")) {
|
|
74
|
+
const name = token.id.replace("shadow.", "")
|
|
75
|
+
boxShadow[name] = `var(${cssVar})`
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return {
|
|
80
|
+
theme: {
|
|
81
|
+
extend: {
|
|
82
|
+
colors,
|
|
83
|
+
spacing,
|
|
84
|
+
borderRadius,
|
|
85
|
+
boxShadow,
|
|
86
|
+
},
|
|
87
|
+
},
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Generate Tailwind config file content
|
|
93
|
+
*/
|
|
94
|
+
export function generateTailwindOutput(
|
|
95
|
+
tokens: ResolvedToken[],
|
|
96
|
+
system: System
|
|
97
|
+
): string {
|
|
98
|
+
const config = generateTailwindConfig(tokens, system)
|
|
99
|
+
|
|
100
|
+
return `/**
|
|
101
|
+
* Tailwind config for ${system.name}
|
|
102
|
+
* Generated by figma-base - do not edit directly
|
|
103
|
+
*/
|
|
104
|
+
export default ${JSON.stringify(config, null, 2)}
|
|
105
|
+
`
|
|
106
|
+
}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import { type ResolvedToken, type System } from "../types.js"
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Generate TypeScript token map with full typing
|
|
5
|
+
*/
|
|
6
|
+
export function generateTypeScript(
|
|
7
|
+
tokens: ResolvedToken[],
|
|
8
|
+
system: System
|
|
9
|
+
): string {
|
|
10
|
+
const lines: string[] = []
|
|
11
|
+
|
|
12
|
+
// Header
|
|
13
|
+
lines.push("/**")
|
|
14
|
+
lines.push(` * Token type definitions for ${system.name}`)
|
|
15
|
+
lines.push(" * Generated by figma-base - do not edit directly")
|
|
16
|
+
lines.push(" */")
|
|
17
|
+
lines.push("")
|
|
18
|
+
|
|
19
|
+
// Token type enum
|
|
20
|
+
lines.push("export type TokenType =")
|
|
21
|
+
lines.push(' | "color"')
|
|
22
|
+
lines.push(' | "number"')
|
|
23
|
+
lines.push(' | "dimension"')
|
|
24
|
+
lines.push(' | "radius"')
|
|
25
|
+
lines.push(' | "shadow"')
|
|
26
|
+
lines.push(' | "typography"')
|
|
27
|
+
lines.push(' | "motion"')
|
|
28
|
+
lines.push(' | "opacity"')
|
|
29
|
+
lines.push(' | "zIndex"')
|
|
30
|
+
lines.push("")
|
|
31
|
+
|
|
32
|
+
// Token source enum
|
|
33
|
+
lines.push("export type TokenSource = \"primitive\" | \"semantic\" | \"component\"")
|
|
34
|
+
lines.push("")
|
|
35
|
+
|
|
36
|
+
// Token interface
|
|
37
|
+
lines.push("export interface Token {")
|
|
38
|
+
lines.push(" id: string")
|
|
39
|
+
lines.push(" type: TokenType")
|
|
40
|
+
lines.push(" source: TokenSource")
|
|
41
|
+
lines.push(" description: string")
|
|
42
|
+
lines.push(" value?: string")
|
|
43
|
+
lines.push(" modes?: Record<string, string>")
|
|
44
|
+
lines.push("}")
|
|
45
|
+
lines.push("")
|
|
46
|
+
|
|
47
|
+
// All token IDs union type
|
|
48
|
+
lines.push("export type TokenId =")
|
|
49
|
+
tokens.forEach((token, index) => {
|
|
50
|
+
const isLast = index === tokens.length - 1
|
|
51
|
+
lines.push(` | "${token.id}"${isLast ? "" : ""}`)
|
|
52
|
+
})
|
|
53
|
+
lines.push("")
|
|
54
|
+
|
|
55
|
+
// Token map object
|
|
56
|
+
lines.push("export const tokens: Record<TokenId, Token> = {")
|
|
57
|
+
for (const token of tokens) {
|
|
58
|
+
lines.push(` "${token.id}": {`)
|
|
59
|
+
lines.push(` id: "${token.id}",`)
|
|
60
|
+
lines.push(` type: "${token.type}",`)
|
|
61
|
+
lines.push(` source: "${token.source}",`)
|
|
62
|
+
lines.push(` description: ${JSON.stringify(token.description)},`)
|
|
63
|
+
|
|
64
|
+
if (token.resolvedValue) {
|
|
65
|
+
lines.push(` value: ${JSON.stringify(token.resolvedValue)},`)
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (token.resolvedModes && Object.keys(token.resolvedModes).length > 0) {
|
|
69
|
+
lines.push(` modes: {`)
|
|
70
|
+
for (const [mode, value] of Object.entries(token.resolvedModes)) {
|
|
71
|
+
lines.push(` "${mode}": ${JSON.stringify(value)},`)
|
|
72
|
+
}
|
|
73
|
+
lines.push(` },`)
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
lines.push(` },`)
|
|
77
|
+
}
|
|
78
|
+
lines.push("} as const")
|
|
79
|
+
lines.push("")
|
|
80
|
+
|
|
81
|
+
// Helper functions
|
|
82
|
+
lines.push("export function getToken(id: TokenId): Token | undefined {")
|
|
83
|
+
lines.push(" return tokens[id]")
|
|
84
|
+
lines.push("}")
|
|
85
|
+
lines.push("")
|
|
86
|
+
|
|
87
|
+
lines.push("export function getTokensByType(type: TokenType): Token[] {")
|
|
88
|
+
lines.push(" return Object.values(tokens).filter(t => t.type === type)")
|
|
89
|
+
lines.push("}")
|
|
90
|
+
lines.push("")
|
|
91
|
+
|
|
92
|
+
lines.push("export function getTokensBySource(source: TokenSource): Token[] {")
|
|
93
|
+
lines.push(" return Object.values(tokens).filter(t => t.source === source)")
|
|
94
|
+
lines.push("}")
|
|
95
|
+
|
|
96
|
+
return lines.join("\n")
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* Generate TypeScript output
|
|
101
|
+
*/
|
|
102
|
+
export interface TypeScriptOutput {
|
|
103
|
+
"tokens.ts": string
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
export function generateTypeScriptOutput(
|
|
107
|
+
tokens: ResolvedToken[],
|
|
108
|
+
system: System
|
|
109
|
+
): TypeScriptOutput {
|
|
110
|
+
return {
|
|
111
|
+
"tokens.ts": generateTypeScript(tokens, system),
|
|
112
|
+
}
|
|
113
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
export * from "./types.js"
|
|
2
|
+
export * from "./loader.js"
|
|
3
|
+
export * from "./validator.js"
|
|
4
|
+
export * from "./resolver.js"
|
|
5
|
+
export * from "./generators/css.js"
|
|
6
|
+
export * from "./generators/tailwind.js"
|
|
7
|
+
export * from "./generators/typescript.js"
|
|
8
|
+
export * from "./generators/figma.js"
|
|
9
|
+
|
|
10
|
+
import { loadTokens, loadSystem, type LoadOptions } from "./loader.js"
|
|
11
|
+
import { validateTokens } from "./validator.js"
|
|
12
|
+
import { buildDependencyGraph, resolveTokens } from "./resolver.js"
|
|
13
|
+
import { type CompilationResult } from "./types.js"
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Main compilation function
|
|
17
|
+
*/
|
|
18
|
+
export async function compile(options: LoadOptions): Promise<CompilationResult> {
|
|
19
|
+
// Load tokens and system
|
|
20
|
+
const loadedTokens = await loadTokens(options)
|
|
21
|
+
const system = await loadSystem(options)
|
|
22
|
+
|
|
23
|
+
// Validate tokens
|
|
24
|
+
const validationErrors = validateTokens(loadedTokens.all, system.modes, {
|
|
25
|
+
strict: false, // Tier 0 default
|
|
26
|
+
})
|
|
27
|
+
|
|
28
|
+
if (validationErrors.length > 0) {
|
|
29
|
+
return {
|
|
30
|
+
tokens: [],
|
|
31
|
+
errors: validationErrors,
|
|
32
|
+
warnings: [],
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
// Build dependency graph and resolve
|
|
37
|
+
const graph = buildDependencyGraph(loadedTokens.all)
|
|
38
|
+
const { resolved, errors } = resolveTokens(loadedTokens.all, graph)
|
|
39
|
+
|
|
40
|
+
return {
|
|
41
|
+
tokens: resolved,
|
|
42
|
+
errors,
|
|
43
|
+
warnings: [],
|
|
44
|
+
}
|
|
45
|
+
}
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import fs from "fs/promises"
|
|
2
|
+
import path from "path"
|
|
3
|
+
import { tokenFileSchema, systemSchema, type Token, type System } from "./types.js"
|
|
4
|
+
|
|
5
|
+
export interface LoadOptions {
|
|
6
|
+
cwd: string
|
|
7
|
+
tokensDir?: string
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export interface LoadedTokens {
|
|
11
|
+
primitives: Token[]
|
|
12
|
+
semantics: Token[]
|
|
13
|
+
components: Token[]
|
|
14
|
+
all: Token[]
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Load token files from a project directory
|
|
19
|
+
*/
|
|
20
|
+
export async function loadTokens(options: LoadOptions): Promise<LoadedTokens> {
|
|
21
|
+
const tokensDir = options.tokensDir || path.join(options.cwd, "tokens")
|
|
22
|
+
|
|
23
|
+
// Load primitives (required)
|
|
24
|
+
const primitivesPath = path.join(tokensDir, "primitives.json")
|
|
25
|
+
const primitivesContent = await fs.readFile(primitivesPath, "utf-8")
|
|
26
|
+
const primitivesData = tokenFileSchema.parse(JSON.parse(primitivesContent))
|
|
27
|
+
const primitives = primitivesData.tokens
|
|
28
|
+
|
|
29
|
+
// Load semantics (required)
|
|
30
|
+
const semanticsPath = path.join(tokensDir, "semantics.json")
|
|
31
|
+
const semanticsContent = await fs.readFile(semanticsPath, "utf-8")
|
|
32
|
+
const semanticsData = tokenFileSchema.parse(JSON.parse(semanticsContent))
|
|
33
|
+
const semantics = semanticsData.tokens
|
|
34
|
+
|
|
35
|
+
// Load component tokens (optional - Tier 2)
|
|
36
|
+
const components: Token[] = []
|
|
37
|
+
const componentsDir = path.join(tokensDir, "components")
|
|
38
|
+
try {
|
|
39
|
+
const componentFiles = await fs.readdir(componentsDir)
|
|
40
|
+
for (const file of componentFiles) {
|
|
41
|
+
if (file.endsWith(".json")) {
|
|
42
|
+
const componentPath = path.join(componentsDir, file)
|
|
43
|
+
const componentContent = await fs.readFile(componentPath, "utf-8")
|
|
44
|
+
const componentData = tokenFileSchema.parse(JSON.parse(componentContent))
|
|
45
|
+
components.push(...componentData.tokens)
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
} catch (error) {
|
|
49
|
+
// Components directory is optional
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return {
|
|
53
|
+
primitives,
|
|
54
|
+
semantics,
|
|
55
|
+
components,
|
|
56
|
+
all: [...primitives, ...semantics, ...components],
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Load system metadata
|
|
62
|
+
*/
|
|
63
|
+
export async function loadSystem(options: LoadOptions): Promise<System> {
|
|
64
|
+
const systemPath = path.join(options.cwd, "system.json")
|
|
65
|
+
const systemContent = await fs.readFile(systemPath, "utf-8")
|
|
66
|
+
const system = systemSchema.parse(JSON.parse(systemContent))
|
|
67
|
+
return system
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Check if tokens directory exists
|
|
72
|
+
*/
|
|
73
|
+
export async function hasTokens(cwd: string): Promise<boolean> {
|
|
74
|
+
try {
|
|
75
|
+
await fs.access(path.join(cwd, "tokens"))
|
|
76
|
+
return true
|
|
77
|
+
} catch {
|
|
78
|
+
return false
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Check if system.json exists
|
|
84
|
+
*/
|
|
85
|
+
export async function hasSystem(cwd: string): Promise<boolean> {
|
|
86
|
+
try {
|
|
87
|
+
await fs.access(path.join(cwd, "system.json"))
|
|
88
|
+
return true
|
|
89
|
+
} catch {
|
|
90
|
+
return false
|
|
91
|
+
}
|
|
92
|
+
}
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
import { type Token, type ResolvedToken, type CompilationError } from "./types.js"
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Build a dependency graph from token references
|
|
5
|
+
*/
|
|
6
|
+
export function buildDependencyGraph(tokens: Token[]): Map<string, Set<string>> {
|
|
7
|
+
const graph = new Map<string, Set<string>>()
|
|
8
|
+
|
|
9
|
+
// Initialize nodes
|
|
10
|
+
for (const token of tokens) {
|
|
11
|
+
graph.set(token.id, new Set())
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
// Add edges
|
|
15
|
+
for (const token of tokens) {
|
|
16
|
+
if (token.references) {
|
|
17
|
+
const deps = graph.get(token.id)!
|
|
18
|
+
for (const ref of token.references) {
|
|
19
|
+
deps.add(ref)
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
return graph
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Detect cycles in the dependency graph
|
|
29
|
+
*/
|
|
30
|
+
export function detectCycles(graph: Map<string, Set<string>>): string[][] {
|
|
31
|
+
const cycles: string[][] = []
|
|
32
|
+
const visited = new Set<string>()
|
|
33
|
+
const recursionStack = new Set<string>()
|
|
34
|
+
|
|
35
|
+
function dfs(node: string, path: string[]): boolean {
|
|
36
|
+
visited.add(node)
|
|
37
|
+
recursionStack.add(node)
|
|
38
|
+
path.push(node)
|
|
39
|
+
|
|
40
|
+
const neighbors = graph.get(node) || new Set()
|
|
41
|
+
for (const neighbor of neighbors) {
|
|
42
|
+
if (!visited.has(neighbor)) {
|
|
43
|
+
if (dfs(neighbor, path)) {
|
|
44
|
+
return true
|
|
45
|
+
}
|
|
46
|
+
} else if (recursionStack.has(neighbor)) {
|
|
47
|
+
// Found a cycle
|
|
48
|
+
const cycleStart = path.indexOf(neighbor)
|
|
49
|
+
cycles.push([...path.slice(cycleStart), neighbor])
|
|
50
|
+
return true
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
recursionStack.delete(node)
|
|
55
|
+
path.pop()
|
|
56
|
+
return false
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
for (const node of graph.keys()) {
|
|
60
|
+
if (!visited.has(node)) {
|
|
61
|
+
dfs(node, [])
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return cycles
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Topological sort of tokens by dependencies
|
|
70
|
+
*/
|
|
71
|
+
export function topologicalSort(
|
|
72
|
+
tokens: Token[],
|
|
73
|
+
graph: Map<string, Set<string>>
|
|
74
|
+
): Token[] {
|
|
75
|
+
const sorted: Token[] = []
|
|
76
|
+
const visited = new Set<string>()
|
|
77
|
+
const tokenMap = new Map(tokens.map((t) => [t.id, t]))
|
|
78
|
+
|
|
79
|
+
function visit(tokenId: string) {
|
|
80
|
+
if (visited.has(tokenId)) return
|
|
81
|
+
|
|
82
|
+
visited.add(tokenId)
|
|
83
|
+
const deps = graph.get(tokenId) || new Set()
|
|
84
|
+
for (const dep of deps) {
|
|
85
|
+
visit(dep)
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const token = tokenMap.get(tokenId)
|
|
89
|
+
if (token) {
|
|
90
|
+
sorted.push(token)
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
for (const token of tokens) {
|
|
95
|
+
visit(token.id)
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return sorted
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* Resolve token values (alias-only in v1)
|
|
103
|
+
*/
|
|
104
|
+
export function resolveTokens(
|
|
105
|
+
tokens: Token[],
|
|
106
|
+
graph: Map<string, Set<string>>
|
|
107
|
+
): { resolved: ResolvedToken[]; errors: CompilationError[] } {
|
|
108
|
+
const errors: CompilationError[] = []
|
|
109
|
+
|
|
110
|
+
// Check for cycles first
|
|
111
|
+
const cycles = detectCycles(graph)
|
|
112
|
+
if (cycles.length > 0) {
|
|
113
|
+
for (const cycle of cycles) {
|
|
114
|
+
errors.push({
|
|
115
|
+
type: "cycle",
|
|
116
|
+
message: `Circular reference detected: ${cycle.join(" → ")}`,
|
|
117
|
+
tokenId: cycle[0],
|
|
118
|
+
})
|
|
119
|
+
}
|
|
120
|
+
return { resolved: [], errors }
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// Sort topologically
|
|
124
|
+
const sorted = topologicalSort(tokens, graph)
|
|
125
|
+
const resolvedMap = new Map<string, ResolvedToken>()
|
|
126
|
+
|
|
127
|
+
// Resolve in dependency order
|
|
128
|
+
for (const token of sorted) {
|
|
129
|
+
const resolved: ResolvedToken = { ...token }
|
|
130
|
+
|
|
131
|
+
// Resolve direct value
|
|
132
|
+
if (token.value !== undefined) {
|
|
133
|
+
resolved.resolvedValue = resolveValue(token.value, resolvedMap)
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// Resolve modes
|
|
137
|
+
if (token.modes) {
|
|
138
|
+
resolved.resolvedModes = {}
|
|
139
|
+
for (const [mode, value] of Object.entries(token.modes)) {
|
|
140
|
+
resolved.resolvedModes[mode] = resolveValue(value, resolvedMap)
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
resolvedMap.set(token.id, resolved)
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
return { resolved: Array.from(resolvedMap.values()), errors }
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Resolve a single value (supports alias references)
|
|
152
|
+
*/
|
|
153
|
+
function resolveValue(
|
|
154
|
+
value: any,
|
|
155
|
+
resolvedMap: Map<string, ResolvedToken>
|
|
156
|
+
): string {
|
|
157
|
+
if (typeof value !== "string") {
|
|
158
|
+
return String(value)
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// Check if it's a reference to another token
|
|
162
|
+
// References start with a token ID (no special syntax needed in v1)
|
|
163
|
+
// If value starts with "var(--", it's a CSS var reference
|
|
164
|
+
if (value.startsWith("var(--")) {
|
|
165
|
+
// Extract token ID from CSS var
|
|
166
|
+
const match = value.match(/var\(--([a-z][a-z0-9-]*)\)/)
|
|
167
|
+
if (match) {
|
|
168
|
+
const tokenId = match[1].replace(/-/g, ".")
|
|
169
|
+
const refToken = resolvedMap.get(tokenId)
|
|
170
|
+
if (refToken?.resolvedValue) {
|
|
171
|
+
return refToken.resolvedValue
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
return value
|
|
177
|
+
}
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import { z } from "zod"
|
|
2
|
+
|
|
3
|
+
// Token type
|
|
4
|
+
export const tokenTypeSchema = z.enum([
|
|
5
|
+
"color",
|
|
6
|
+
"number",
|
|
7
|
+
"dimension",
|
|
8
|
+
"radius",
|
|
9
|
+
"shadow",
|
|
10
|
+
"typography",
|
|
11
|
+
"motion",
|
|
12
|
+
"opacity",
|
|
13
|
+
"zIndex",
|
|
14
|
+
])
|
|
15
|
+
|
|
16
|
+
export const tokenSourceSchema = z.enum(["primitive", "semantic", "component"])
|
|
17
|
+
|
|
18
|
+
export const tokenSchema = z
|
|
19
|
+
.object({
|
|
20
|
+
id: z
|
|
21
|
+
.string()
|
|
22
|
+
.regex(
|
|
23
|
+
/^[a-z][a-z0-9-]*(?:\.[a-z0-9][a-z0-9-]*)*$/,
|
|
24
|
+
"Token ID must be lowercase, dot-separated"
|
|
25
|
+
),
|
|
26
|
+
type: tokenTypeSchema,
|
|
27
|
+
description: z.string(),
|
|
28
|
+
source: tokenSourceSchema,
|
|
29
|
+
value: z.any().optional(),
|
|
30
|
+
modes: z.record(z.string(), z.any()).optional(),
|
|
31
|
+
references: z.array(z.string()).optional(),
|
|
32
|
+
tags: z.array(z.string()).optional(),
|
|
33
|
+
deprecated: z.boolean().optional(),
|
|
34
|
+
replacedBy: z.string().optional(),
|
|
35
|
+
figma: z
|
|
36
|
+
.object({
|
|
37
|
+
collection: z.string().optional(),
|
|
38
|
+
scopes: z.array(z.string()).optional(),
|
|
39
|
+
variableName: z.string().optional(),
|
|
40
|
+
})
|
|
41
|
+
.optional(),
|
|
42
|
+
})
|
|
43
|
+
.refine((data) => data.value !== undefined || data.modes !== undefined, {
|
|
44
|
+
message: "Token must have either value or modes",
|
|
45
|
+
})
|
|
46
|
+
|
|
47
|
+
export type Token = z.infer<typeof tokenSchema>
|
|
48
|
+
export type TokenType = z.infer<typeof tokenTypeSchema>
|
|
49
|
+
export type TokenSource = z.infer<typeof tokenSourceSchema>
|
|
50
|
+
|
|
51
|
+
// Token file structure
|
|
52
|
+
export const tokenFileSchema = z.object({
|
|
53
|
+
tokens: z.array(tokenSchema),
|
|
54
|
+
})
|
|
55
|
+
|
|
56
|
+
export type TokenFile = z.infer<typeof tokenFileSchema>
|
|
57
|
+
|
|
58
|
+
// System metadata
|
|
59
|
+
export const systemSchema = z.object({
|
|
60
|
+
id: z.string().regex(/^[a-z][a-z0-9-]*$/, "System ID must be lowercase kebab-case"),
|
|
61
|
+
name: z.string(),
|
|
62
|
+
description: z.string(),
|
|
63
|
+
tags: z.array(z.string()).optional(),
|
|
64
|
+
modes: z.array(z.string()).min(1),
|
|
65
|
+
policies: z
|
|
66
|
+
.object({
|
|
67
|
+
radius: z.enum(["sharp", "rounded", "pill"]).optional(),
|
|
68
|
+
density: z.enum(["compact", "comfortable", "spacious"]).optional(),
|
|
69
|
+
contrast: z.enum(["low", "medium", "high"]).optional(),
|
|
70
|
+
motion: z.enum(["none", "subtle", "expressive"]).optional(),
|
|
71
|
+
})
|
|
72
|
+
.optional(),
|
|
73
|
+
defaults: z
|
|
74
|
+
.object({
|
|
75
|
+
font: z.string().optional(),
|
|
76
|
+
iconStyle: z.string().optional(),
|
|
77
|
+
})
|
|
78
|
+
.optional(),
|
|
79
|
+
figma: z
|
|
80
|
+
.object({
|
|
81
|
+
collections: z.array(z.string()).optional(),
|
|
82
|
+
})
|
|
83
|
+
.optional(),
|
|
84
|
+
})
|
|
85
|
+
|
|
86
|
+
export type System = z.infer<typeof systemSchema>
|
|
87
|
+
|
|
88
|
+
// Resolved token (after compilation)
|
|
89
|
+
export interface ResolvedToken extends Token {
|
|
90
|
+
resolvedValue?: string
|
|
91
|
+
resolvedModes?: Record<string, string>
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// Compilation context
|
|
95
|
+
export interface CompilationContext {
|
|
96
|
+
tokens: Token[]
|
|
97
|
+
system: System
|
|
98
|
+
mode: "tier0" | "tier1" | "tier2"
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Compilation result
|
|
102
|
+
export interface CompilationResult {
|
|
103
|
+
tokens: ResolvedToken[]
|
|
104
|
+
errors: CompilationError[]
|
|
105
|
+
warnings: CompilationWarning[]
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
export interface CompilationError {
|
|
109
|
+
type: "schema" | "reference" | "cycle" | "naming" | "layering"
|
|
110
|
+
message: string
|
|
111
|
+
tokenId?: string
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
export interface CompilationWarning {
|
|
115
|
+
type: "unused" | "deprecated" | "missing-mode"
|
|
116
|
+
message: string
|
|
117
|
+
tokenId?: string
|
|
118
|
+
}
|
|
@@ -0,0 +1,194 @@
|
|
|
1
|
+
import { type Token, type CompilationError } from "./types.js"
|
|
2
|
+
|
|
3
|
+
// Semantic token category prefixes
|
|
4
|
+
const SEMANTIC_PREFIXES = [
|
|
5
|
+
"surface.",
|
|
6
|
+
"text.",
|
|
7
|
+
"border.",
|
|
8
|
+
"icon.",
|
|
9
|
+
"shadow.",
|
|
10
|
+
"focus.",
|
|
11
|
+
"overlay.",
|
|
12
|
+
"motion.",
|
|
13
|
+
"space.",
|
|
14
|
+
"radius.",
|
|
15
|
+
"typography.",
|
|
16
|
+
]
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Validate token naming conventions
|
|
20
|
+
*/
|
|
21
|
+
export function validateTokenNaming(tokens: Token[]): CompilationError[] {
|
|
22
|
+
const errors: CompilationError[] = []
|
|
23
|
+
|
|
24
|
+
for (const token of tokens) {
|
|
25
|
+
// Validate semantic token prefixes
|
|
26
|
+
if (token.source === "semantic") {
|
|
27
|
+
const hasValidPrefix = SEMANTIC_PREFIXES.some((prefix) =>
|
|
28
|
+
token.id.startsWith(prefix)
|
|
29
|
+
)
|
|
30
|
+
if (!hasValidPrefix) {
|
|
31
|
+
errors.push({
|
|
32
|
+
type: "naming",
|
|
33
|
+
message: `Semantic token "${token.id}" must start with one of: ${SEMANTIC_PREFIXES.join(", ")}`,
|
|
34
|
+
tokenId: token.id,
|
|
35
|
+
})
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Validate component token prefixes (must be component name + dot)
|
|
40
|
+
if (token.source === "component") {
|
|
41
|
+
const parts = token.id.split(".")
|
|
42
|
+
if (parts.length < 2) {
|
|
43
|
+
errors.push({
|
|
44
|
+
type: "naming",
|
|
45
|
+
message: `Component token "${token.id}" must follow format: <component>.<property>`,
|
|
46
|
+
tokenId: token.id,
|
|
47
|
+
})
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return errors
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Validate token uniqueness
|
|
57
|
+
*/
|
|
58
|
+
export function validateTokenUniqueness(tokens: Token[]): CompilationError[] {
|
|
59
|
+
const errors: CompilationError[] = []
|
|
60
|
+
const seen = new Map<string, Token>()
|
|
61
|
+
|
|
62
|
+
for (const token of tokens) {
|
|
63
|
+
if (seen.has(token.id)) {
|
|
64
|
+
errors.push({
|
|
65
|
+
type: "schema",
|
|
66
|
+
message: `Duplicate token ID: "${token.id}"`,
|
|
67
|
+
tokenId: token.id,
|
|
68
|
+
})
|
|
69
|
+
}
|
|
70
|
+
seen.set(token.id, token)
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return errors
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Validate token references exist
|
|
78
|
+
*/
|
|
79
|
+
export function validateTokenReferences(tokens: Token[]): CompilationError[] {
|
|
80
|
+
const errors: CompilationError[] = []
|
|
81
|
+
const tokenIds = new Set(tokens.map((t) => t.id))
|
|
82
|
+
|
|
83
|
+
for (const token of tokens) {
|
|
84
|
+
if (token.references) {
|
|
85
|
+
for (const ref of token.references) {
|
|
86
|
+
if (!tokenIds.has(ref)) {
|
|
87
|
+
errors.push({
|
|
88
|
+
type: "reference",
|
|
89
|
+
message: `Token "${token.id}" references non-existent token "${ref}"`,
|
|
90
|
+
tokenId: token.id,
|
|
91
|
+
})
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return errors
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Validate token layering rules (Tier 2 only)
|
|
102
|
+
*/
|
|
103
|
+
export function validateTokenLayering(
|
|
104
|
+
tokens: Token[],
|
|
105
|
+
strict: boolean = false
|
|
106
|
+
): CompilationError[] {
|
|
107
|
+
if (!strict) return []
|
|
108
|
+
|
|
109
|
+
const errors: CompilationError[] = []
|
|
110
|
+
const tokenMap = new Map(tokens.map((t) => [t.id, t]))
|
|
111
|
+
|
|
112
|
+
for (const token of tokens) {
|
|
113
|
+
if (!token.references || token.references.length === 0) continue
|
|
114
|
+
|
|
115
|
+
// Primitives may not reference anything
|
|
116
|
+
if (token.source === "primitive") {
|
|
117
|
+
errors.push({
|
|
118
|
+
type: "layering",
|
|
119
|
+
message: `Primitive token "${token.id}" may not reference other tokens`,
|
|
120
|
+
tokenId: token.id,
|
|
121
|
+
})
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Semantics may reference primitives and other semantics
|
|
125
|
+
if (token.source === "semantic") {
|
|
126
|
+
for (const ref of token.references) {
|
|
127
|
+
const refToken = tokenMap.get(ref)
|
|
128
|
+
if (refToken && refToken.source === "component") {
|
|
129
|
+
errors.push({
|
|
130
|
+
type: "layering",
|
|
131
|
+
message: `Semantic token "${token.id}" may not reference component token "${ref}"`,
|
|
132
|
+
tokenId: token.id,
|
|
133
|
+
})
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
// Component tokens may reference semantics and primitives
|
|
139
|
+
if (token.source === "component") {
|
|
140
|
+
for (const ref of token.references) {
|
|
141
|
+
const refToken = tokenMap.get(ref)
|
|
142
|
+
if (refToken && refToken.source === "component") {
|
|
143
|
+
// Allow component-to-component references but warn
|
|
144
|
+
// (could be useful for variants)
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
return errors
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Validate mode coverage
|
|
155
|
+
*/
|
|
156
|
+
export function validateModeCoverage(
|
|
157
|
+
tokens: Token[],
|
|
158
|
+
requiredModes: string[]
|
|
159
|
+
): CompilationError[] {
|
|
160
|
+
const errors: CompilationError[] = []
|
|
161
|
+
|
|
162
|
+
for (const token of tokens) {
|
|
163
|
+
if (token.modes) {
|
|
164
|
+
const tokenModes = Object.keys(token.modes)
|
|
165
|
+
const missingModes = requiredModes.filter((m) => !tokenModes.includes(m))
|
|
166
|
+
if (missingModes.length > 0) {
|
|
167
|
+
errors.push({
|
|
168
|
+
type: "schema",
|
|
169
|
+
message: `Token "${token.id}" missing required modes: ${missingModes.join(", ")}`,
|
|
170
|
+
tokenId: token.id,
|
|
171
|
+
})
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
return errors
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Run all validations
|
|
181
|
+
*/
|
|
182
|
+
export function validateTokens(
|
|
183
|
+
tokens: Token[],
|
|
184
|
+
requiredModes: string[],
|
|
185
|
+
options: { strict?: boolean } = {}
|
|
186
|
+
): CompilationError[] {
|
|
187
|
+
return [
|
|
188
|
+
...validateTokenUniqueness(tokens),
|
|
189
|
+
...validateTokenNaming(tokens),
|
|
190
|
+
...validateTokenReferences(tokens),
|
|
191
|
+
...validateModeCoverage(tokens, requiredModes),
|
|
192
|
+
...validateTokenLayering(tokens, options.strict),
|
|
193
|
+
]
|
|
194
|
+
}
|
package/dist/index.js
CHANGED
|
@@ -57,9 +57,9 @@ Setting up ${selectedSystem.name}...
|
|
|
57
57
|
}
|
|
58
58
|
spinner.start("Building tokens...");
|
|
59
59
|
try {
|
|
60
|
-
const compilerPath = path.join(__dirname, "
|
|
61
|
-
const { compile
|
|
62
|
-
const result = await
|
|
60
|
+
const compilerPath = path.join(__dirname, "../compiler/index.js");
|
|
61
|
+
const { compile, generateCSSOutput, generateTailwindOutput, generateTypeScriptOutput } = await import(compilerPath);
|
|
62
|
+
const result = await compile({ cwd });
|
|
63
63
|
if (result.errors.length > 0) {
|
|
64
64
|
spinner.fail("Build failed");
|
|
65
65
|
for (const error of result.errors) {
|
|
@@ -71,9 +71,9 @@ Setting up ${selectedSystem.name}...
|
|
|
71
71
|
await fs.ensureDir(path.join(cwd, "dist/tailwind"));
|
|
72
72
|
await fs.ensureDir(path.join(cwd, "dist/ts"));
|
|
73
73
|
const systemData = await fs.readJSON(path.join(cwd, "system.json"));
|
|
74
|
-
const cssOutput =
|
|
75
|
-
const tailwindOutput =
|
|
76
|
-
const tsOutput =
|
|
74
|
+
const cssOutput = generateCSSOutput(result.tokens, systemData);
|
|
75
|
+
const tailwindOutput = generateTailwindOutput(result.tokens, systemData);
|
|
76
|
+
const tsOutput = generateTypeScriptOutput(result.tokens, systemData);
|
|
77
77
|
await fs.writeFile(path.join(cwd, "dist/css/tokens.css"), cssOutput["tokens.css"]);
|
|
78
78
|
await fs.writeFile(
|
|
79
79
|
path.join(cwd, "dist/tailwind/tokens.tailwind.js"),
|
|
@@ -219,13 +219,17 @@ async function addCommand(components) {
|
|
|
219
219
|
// src/commands/build.ts
|
|
220
220
|
import fs3 from "fs-extra";
|
|
221
221
|
import path3 from "path";
|
|
222
|
+
import { fileURLToPath as fileURLToPath3 } from "url";
|
|
222
223
|
import ora3 from "ora";
|
|
223
224
|
import kleur3 from "kleur";
|
|
224
|
-
|
|
225
|
+
var __filename3 = fileURLToPath3(import.meta.url);
|
|
226
|
+
var __dirname3 = path3.dirname(__filename3);
|
|
225
227
|
async function buildCommand(options) {
|
|
226
228
|
const cwd = process.cwd();
|
|
227
229
|
const spinner = ora3("Building tokens...").start();
|
|
228
230
|
try {
|
|
231
|
+
const compilerPath = path3.join(__dirname3, "../compiler/index.js");
|
|
232
|
+
const { compile, generateCSSOutput, generateTailwindOutput, generateTypeScriptOutput } = await import(compilerPath);
|
|
229
233
|
const result = await compile({ cwd });
|
|
230
234
|
if (result.errors.length > 0) {
|
|
231
235
|
spinner.fail("Build failed");
|
|
@@ -270,7 +274,10 @@ async function buildCommand(options) {
|
|
|
270
274
|
// src/commands/validate.ts
|
|
271
275
|
import fs4 from "fs-extra";
|
|
272
276
|
import path4 from "path";
|
|
277
|
+
import { fileURLToPath as fileURLToPath4 } from "url";
|
|
273
278
|
import kleur4 from "kleur";
|
|
279
|
+
var __filename4 = fileURLToPath4(import.meta.url);
|
|
280
|
+
var __dirname4 = path4.dirname(__filename4);
|
|
274
281
|
async function validateCommand() {
|
|
275
282
|
const cwd = process.cwd();
|
|
276
283
|
const configPath = path4.join(cwd, ".figmabase");
|
|
@@ -284,7 +291,8 @@ async function validateCommand() {
|
|
|
284
291
|
process.exit(1);
|
|
285
292
|
}
|
|
286
293
|
console.log(kleur4.cyan("\n\u{1F50D} Validating tokens and manifests...\n"));
|
|
287
|
-
const
|
|
294
|
+
const compilerPath = path4.join(__dirname4, "../compiler/index.js");
|
|
295
|
+
const { compile, validateTokens, loadTokens, loadSystem } = await import(compilerPath);
|
|
288
296
|
try {
|
|
289
297
|
const tokens = await loadTokens({ cwd });
|
|
290
298
|
const system = await loadSystem({ cwd });
|
|
@@ -309,9 +317,11 @@ async function validateCommand() {
|
|
|
309
317
|
// src/commands/export.ts
|
|
310
318
|
import fs5 from "fs-extra";
|
|
311
319
|
import path5 from "path";
|
|
320
|
+
import { fileURLToPath as fileURLToPath5 } from "url";
|
|
312
321
|
import ora4 from "ora";
|
|
313
322
|
import kleur5 from "kleur";
|
|
314
|
-
|
|
323
|
+
var __filename5 = fileURLToPath5(import.meta.url);
|
|
324
|
+
var __dirname5 = path5.dirname(__filename5);
|
|
315
325
|
async function exportCommand(target, options) {
|
|
316
326
|
if (target !== "figma") {
|
|
317
327
|
console.error(kleur5.red(`\u2716 Unknown export target: ${target}`));
|
|
@@ -320,7 +330,9 @@ async function exportCommand(target, options) {
|
|
|
320
330
|
const cwd = process.cwd();
|
|
321
331
|
const spinner = ora4("Exporting Figma artifacts...").start();
|
|
322
332
|
try {
|
|
323
|
-
const
|
|
333
|
+
const compilerPath = path5.join(__dirname5, "../compiler/index.js");
|
|
334
|
+
const { compile, generateFigmaTokenOutput } = await import(compilerPath);
|
|
335
|
+
const result = await compile({ cwd });
|
|
324
336
|
if (result.errors.length > 0) {
|
|
325
337
|
spinner.fail("Export failed");
|
|
326
338
|
for (const error of result.errors) {
|
|
@@ -351,10 +363,10 @@ async function exportCommand(target, options) {
|
|
|
351
363
|
// src/commands/list.ts
|
|
352
364
|
import fs6 from "fs-extra";
|
|
353
365
|
import path6 from "path";
|
|
354
|
-
import { fileURLToPath as
|
|
366
|
+
import { fileURLToPath as fileURLToPath6 } from "url";
|
|
355
367
|
import kleur6 from "kleur";
|
|
356
|
-
var
|
|
357
|
-
var
|
|
368
|
+
var __filename6 = fileURLToPath6(import.meta.url);
|
|
369
|
+
var __dirname6 = path6.dirname(__filename6);
|
|
358
370
|
async function listCommand(type, options) {
|
|
359
371
|
if (type === "systems") {
|
|
360
372
|
await listSystems(options.json);
|
|
@@ -366,7 +378,7 @@ async function listCommand(type, options) {
|
|
|
366
378
|
}
|
|
367
379
|
}
|
|
368
380
|
async function listSystems(json) {
|
|
369
|
-
const systemsDir = path6.join(
|
|
381
|
+
const systemsDir = path6.join(__dirname6, "../../systems");
|
|
370
382
|
const systems = await loadSystemsMetadata(systemsDir);
|
|
371
383
|
if (json) {
|
|
372
384
|
console.log(JSON.stringify(systems, null, 2));
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "tokka",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.3",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "A shadcn-compatible UI foundation with a real token system and optional Figma exports",
|
|
6
6
|
"bin": "./dist/index.js",
|
|
@@ -13,7 +13,8 @@
|
|
|
13
13
|
"files": [
|
|
14
14
|
"dist",
|
|
15
15
|
"systems",
|
|
16
|
-
"components"
|
|
16
|
+
"components",
|
|
17
|
+
"compiler"
|
|
17
18
|
],
|
|
18
19
|
"scripts": {
|
|
19
20
|
"build": "tsup src/index.ts --format esm --dts --clean",
|
|
@@ -22,7 +23,6 @@
|
|
|
22
23
|
"typecheck": "tsc --noEmit"
|
|
23
24
|
},
|
|
24
25
|
"dependencies": {
|
|
25
|
-
"@tokka/compiler": "workspace:*",
|
|
26
26
|
"commander": "^11.1.0",
|
|
27
27
|
"prompts": "^2.4.2",
|
|
28
28
|
"kleur": "^4.1.5",
|