tokka 0.2.4 → 0.2.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/compiler/index.d.ts +478 -0
- package/compiler/index.js +732 -0
- package/dist/index.js +1 -1
- package/package.json +3 -2
- package/compiler/generators/css.ts +0 -146
- package/compiler/generators/figma.ts +0 -147
- package/compiler/generators/tailwind.ts +0 -106
- package/compiler/generators/typescript.ts +0 -113
- package/compiler/index.ts +0 -45
- package/compiler/loader.ts +0 -92
- package/compiler/resolver.ts +0 -177
- package/compiler/types.ts +0 -118
- package/compiler/validator.ts +0 -194
package/compiler/resolver.ts
DELETED
|
@@ -1,177 +0,0 @@
|
|
|
1
|
-
import { type Token, type ResolvedToken, type CompilationError } from "./types.js"
|
|
2
|
-
|
|
3
|
-
/**
|
|
4
|
-
* Build a dependency graph from token references
|
|
5
|
-
*/
|
|
6
|
-
export function buildDependencyGraph(tokens: Token[]): Map<string, Set<string>> {
|
|
7
|
-
const graph = new Map<string, Set<string>>()
|
|
8
|
-
|
|
9
|
-
// Initialize nodes
|
|
10
|
-
for (const token of tokens) {
|
|
11
|
-
graph.set(token.id, new Set())
|
|
12
|
-
}
|
|
13
|
-
|
|
14
|
-
// Add edges
|
|
15
|
-
for (const token of tokens) {
|
|
16
|
-
if (token.references) {
|
|
17
|
-
const deps = graph.get(token.id)!
|
|
18
|
-
for (const ref of token.references) {
|
|
19
|
-
deps.add(ref)
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
return graph
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
/**
|
|
28
|
-
* Detect cycles in the dependency graph
|
|
29
|
-
*/
|
|
30
|
-
export function detectCycles(graph: Map<string, Set<string>>): string[][] {
|
|
31
|
-
const cycles: string[][] = []
|
|
32
|
-
const visited = new Set<string>()
|
|
33
|
-
const recursionStack = new Set<string>()
|
|
34
|
-
|
|
35
|
-
function dfs(node: string, path: string[]): boolean {
|
|
36
|
-
visited.add(node)
|
|
37
|
-
recursionStack.add(node)
|
|
38
|
-
path.push(node)
|
|
39
|
-
|
|
40
|
-
const neighbors = graph.get(node) || new Set()
|
|
41
|
-
for (const neighbor of neighbors) {
|
|
42
|
-
if (!visited.has(neighbor)) {
|
|
43
|
-
if (dfs(neighbor, path)) {
|
|
44
|
-
return true
|
|
45
|
-
}
|
|
46
|
-
} else if (recursionStack.has(neighbor)) {
|
|
47
|
-
// Found a cycle
|
|
48
|
-
const cycleStart = path.indexOf(neighbor)
|
|
49
|
-
cycles.push([...path.slice(cycleStart), neighbor])
|
|
50
|
-
return true
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
recursionStack.delete(node)
|
|
55
|
-
path.pop()
|
|
56
|
-
return false
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
for (const node of graph.keys()) {
|
|
60
|
-
if (!visited.has(node)) {
|
|
61
|
-
dfs(node, [])
|
|
62
|
-
}
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
return cycles
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
/**
|
|
69
|
-
* Topological sort of tokens by dependencies
|
|
70
|
-
*/
|
|
71
|
-
export function topologicalSort(
|
|
72
|
-
tokens: Token[],
|
|
73
|
-
graph: Map<string, Set<string>>
|
|
74
|
-
): Token[] {
|
|
75
|
-
const sorted: Token[] = []
|
|
76
|
-
const visited = new Set<string>()
|
|
77
|
-
const tokenMap = new Map(tokens.map((t) => [t.id, t]))
|
|
78
|
-
|
|
79
|
-
function visit(tokenId: string) {
|
|
80
|
-
if (visited.has(tokenId)) return
|
|
81
|
-
|
|
82
|
-
visited.add(tokenId)
|
|
83
|
-
const deps = graph.get(tokenId) || new Set()
|
|
84
|
-
for (const dep of deps) {
|
|
85
|
-
visit(dep)
|
|
86
|
-
}
|
|
87
|
-
|
|
88
|
-
const token = tokenMap.get(tokenId)
|
|
89
|
-
if (token) {
|
|
90
|
-
sorted.push(token)
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
for (const token of tokens) {
|
|
95
|
-
visit(token.id)
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
return sorted
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
/**
|
|
102
|
-
* Resolve token values (alias-only in v1)
|
|
103
|
-
*/
|
|
104
|
-
export function resolveTokens(
|
|
105
|
-
tokens: Token[],
|
|
106
|
-
graph: Map<string, Set<string>>
|
|
107
|
-
): { resolved: ResolvedToken[]; errors: CompilationError[] } {
|
|
108
|
-
const errors: CompilationError[] = []
|
|
109
|
-
|
|
110
|
-
// Check for cycles first
|
|
111
|
-
const cycles = detectCycles(graph)
|
|
112
|
-
if (cycles.length > 0) {
|
|
113
|
-
for (const cycle of cycles) {
|
|
114
|
-
errors.push({
|
|
115
|
-
type: "cycle",
|
|
116
|
-
message: `Circular reference detected: ${cycle.join(" → ")}`,
|
|
117
|
-
tokenId: cycle[0],
|
|
118
|
-
})
|
|
119
|
-
}
|
|
120
|
-
return { resolved: [], errors }
|
|
121
|
-
}
|
|
122
|
-
|
|
123
|
-
// Sort topologically
|
|
124
|
-
const sorted = topologicalSort(tokens, graph)
|
|
125
|
-
const resolvedMap = new Map<string, ResolvedToken>()
|
|
126
|
-
|
|
127
|
-
// Resolve in dependency order
|
|
128
|
-
for (const token of sorted) {
|
|
129
|
-
const resolved: ResolvedToken = { ...token }
|
|
130
|
-
|
|
131
|
-
// Resolve direct value
|
|
132
|
-
if (token.value !== undefined) {
|
|
133
|
-
resolved.resolvedValue = resolveValue(token.value, resolvedMap)
|
|
134
|
-
}
|
|
135
|
-
|
|
136
|
-
// Resolve modes
|
|
137
|
-
if (token.modes) {
|
|
138
|
-
resolved.resolvedModes = {}
|
|
139
|
-
for (const [mode, value] of Object.entries(token.modes)) {
|
|
140
|
-
resolved.resolvedModes[mode] = resolveValue(value, resolvedMap)
|
|
141
|
-
}
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
resolvedMap.set(token.id, resolved)
|
|
145
|
-
}
|
|
146
|
-
|
|
147
|
-
return { resolved: Array.from(resolvedMap.values()), errors }
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
/**
|
|
151
|
-
* Resolve a single value (supports alias references)
|
|
152
|
-
*/
|
|
153
|
-
function resolveValue(
|
|
154
|
-
value: any,
|
|
155
|
-
resolvedMap: Map<string, ResolvedToken>
|
|
156
|
-
): string {
|
|
157
|
-
if (typeof value !== "string") {
|
|
158
|
-
return String(value)
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
// Check if it's a reference to another token
|
|
162
|
-
// References start with a token ID (no special syntax needed in v1)
|
|
163
|
-
// If value starts with "var(--", it's a CSS var reference
|
|
164
|
-
if (value.startsWith("var(--")) {
|
|
165
|
-
// Extract token ID from CSS var
|
|
166
|
-
const match = value.match(/var\(--([a-z][a-z0-9-]*)\)/)
|
|
167
|
-
if (match) {
|
|
168
|
-
const tokenId = match[1].replace(/-/g, ".")
|
|
169
|
-
const refToken = resolvedMap.get(tokenId)
|
|
170
|
-
if (refToken?.resolvedValue) {
|
|
171
|
-
return refToken.resolvedValue
|
|
172
|
-
}
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
return value
|
|
177
|
-
}
|
package/compiler/types.ts
DELETED
|
@@ -1,118 +0,0 @@
|
|
|
1
|
-
import { z } from "zod"
|
|
2
|
-
|
|
3
|
-
// Token type
|
|
4
|
-
export const tokenTypeSchema = z.enum([
|
|
5
|
-
"color",
|
|
6
|
-
"number",
|
|
7
|
-
"dimension",
|
|
8
|
-
"radius",
|
|
9
|
-
"shadow",
|
|
10
|
-
"typography",
|
|
11
|
-
"motion",
|
|
12
|
-
"opacity",
|
|
13
|
-
"zIndex",
|
|
14
|
-
])
|
|
15
|
-
|
|
16
|
-
export const tokenSourceSchema = z.enum(["primitive", "semantic", "component"])
|
|
17
|
-
|
|
18
|
-
export const tokenSchema = z
|
|
19
|
-
.object({
|
|
20
|
-
id: z
|
|
21
|
-
.string()
|
|
22
|
-
.regex(
|
|
23
|
-
/^[a-z][a-z0-9-]*(?:\.[a-z0-9][a-z0-9-]*)*$/,
|
|
24
|
-
"Token ID must be lowercase, dot-separated"
|
|
25
|
-
),
|
|
26
|
-
type: tokenTypeSchema,
|
|
27
|
-
description: z.string(),
|
|
28
|
-
source: tokenSourceSchema,
|
|
29
|
-
value: z.any().optional(),
|
|
30
|
-
modes: z.record(z.string(), z.any()).optional(),
|
|
31
|
-
references: z.array(z.string()).optional(),
|
|
32
|
-
tags: z.array(z.string()).optional(),
|
|
33
|
-
deprecated: z.boolean().optional(),
|
|
34
|
-
replacedBy: z.string().optional(),
|
|
35
|
-
figma: z
|
|
36
|
-
.object({
|
|
37
|
-
collection: z.string().optional(),
|
|
38
|
-
scopes: z.array(z.string()).optional(),
|
|
39
|
-
variableName: z.string().optional(),
|
|
40
|
-
})
|
|
41
|
-
.optional(),
|
|
42
|
-
})
|
|
43
|
-
.refine((data) => data.value !== undefined || data.modes !== undefined, {
|
|
44
|
-
message: "Token must have either value or modes",
|
|
45
|
-
})
|
|
46
|
-
|
|
47
|
-
export type Token = z.infer<typeof tokenSchema>
|
|
48
|
-
export type TokenType = z.infer<typeof tokenTypeSchema>
|
|
49
|
-
export type TokenSource = z.infer<typeof tokenSourceSchema>
|
|
50
|
-
|
|
51
|
-
// Token file structure
|
|
52
|
-
export const tokenFileSchema = z.object({
|
|
53
|
-
tokens: z.array(tokenSchema),
|
|
54
|
-
})
|
|
55
|
-
|
|
56
|
-
export type TokenFile = z.infer<typeof tokenFileSchema>
|
|
57
|
-
|
|
58
|
-
// System metadata
|
|
59
|
-
export const systemSchema = z.object({
|
|
60
|
-
id: z.string().regex(/^[a-z][a-z0-9-]*$/, "System ID must be lowercase kebab-case"),
|
|
61
|
-
name: z.string(),
|
|
62
|
-
description: z.string(),
|
|
63
|
-
tags: z.array(z.string()).optional(),
|
|
64
|
-
modes: z.array(z.string()).min(1),
|
|
65
|
-
policies: z
|
|
66
|
-
.object({
|
|
67
|
-
radius: z.enum(["sharp", "rounded", "pill"]).optional(),
|
|
68
|
-
density: z.enum(["compact", "comfortable", "spacious"]).optional(),
|
|
69
|
-
contrast: z.enum(["low", "medium", "high"]).optional(),
|
|
70
|
-
motion: z.enum(["none", "subtle", "expressive"]).optional(),
|
|
71
|
-
})
|
|
72
|
-
.optional(),
|
|
73
|
-
defaults: z
|
|
74
|
-
.object({
|
|
75
|
-
font: z.string().optional(),
|
|
76
|
-
iconStyle: z.string().optional(),
|
|
77
|
-
})
|
|
78
|
-
.optional(),
|
|
79
|
-
figma: z
|
|
80
|
-
.object({
|
|
81
|
-
collections: z.array(z.string()).optional(),
|
|
82
|
-
})
|
|
83
|
-
.optional(),
|
|
84
|
-
})
|
|
85
|
-
|
|
86
|
-
export type System = z.infer<typeof systemSchema>
|
|
87
|
-
|
|
88
|
-
// Resolved token (after compilation)
|
|
89
|
-
export interface ResolvedToken extends Token {
|
|
90
|
-
resolvedValue?: string
|
|
91
|
-
resolvedModes?: Record<string, string>
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
// Compilation context
|
|
95
|
-
export interface CompilationContext {
|
|
96
|
-
tokens: Token[]
|
|
97
|
-
system: System
|
|
98
|
-
mode: "tier0" | "tier1" | "tier2"
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
// Compilation result
|
|
102
|
-
export interface CompilationResult {
|
|
103
|
-
tokens: ResolvedToken[]
|
|
104
|
-
errors: CompilationError[]
|
|
105
|
-
warnings: CompilationWarning[]
|
|
106
|
-
}
|
|
107
|
-
|
|
108
|
-
export interface CompilationError {
|
|
109
|
-
type: "schema" | "reference" | "cycle" | "naming" | "layering"
|
|
110
|
-
message: string
|
|
111
|
-
tokenId?: string
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
export interface CompilationWarning {
|
|
115
|
-
type: "unused" | "deprecated" | "missing-mode"
|
|
116
|
-
message: string
|
|
117
|
-
tokenId?: string
|
|
118
|
-
}
|
package/compiler/validator.ts
DELETED
|
@@ -1,194 +0,0 @@
|
|
|
1
|
-
import { type Token, type CompilationError } from "./types.js"
|
|
2
|
-
|
|
3
|
-
// Semantic token category prefixes
|
|
4
|
-
const SEMANTIC_PREFIXES = [
|
|
5
|
-
"surface.",
|
|
6
|
-
"text.",
|
|
7
|
-
"border.",
|
|
8
|
-
"icon.",
|
|
9
|
-
"shadow.",
|
|
10
|
-
"focus.",
|
|
11
|
-
"overlay.",
|
|
12
|
-
"motion.",
|
|
13
|
-
"space.",
|
|
14
|
-
"radius.",
|
|
15
|
-
"typography.",
|
|
16
|
-
]
|
|
17
|
-
|
|
18
|
-
/**
|
|
19
|
-
* Validate token naming conventions
|
|
20
|
-
*/
|
|
21
|
-
export function validateTokenNaming(tokens: Token[]): CompilationError[] {
|
|
22
|
-
const errors: CompilationError[] = []
|
|
23
|
-
|
|
24
|
-
for (const token of tokens) {
|
|
25
|
-
// Validate semantic token prefixes
|
|
26
|
-
if (token.source === "semantic") {
|
|
27
|
-
const hasValidPrefix = SEMANTIC_PREFIXES.some((prefix) =>
|
|
28
|
-
token.id.startsWith(prefix)
|
|
29
|
-
)
|
|
30
|
-
if (!hasValidPrefix) {
|
|
31
|
-
errors.push({
|
|
32
|
-
type: "naming",
|
|
33
|
-
message: `Semantic token "${token.id}" must start with one of: ${SEMANTIC_PREFIXES.join(", ")}`,
|
|
34
|
-
tokenId: token.id,
|
|
35
|
-
})
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
// Validate component token prefixes (must be component name + dot)
|
|
40
|
-
if (token.source === "component") {
|
|
41
|
-
const parts = token.id.split(".")
|
|
42
|
-
if (parts.length < 2) {
|
|
43
|
-
errors.push({
|
|
44
|
-
type: "naming",
|
|
45
|
-
message: `Component token "${token.id}" must follow format: <component>.<property>`,
|
|
46
|
-
tokenId: token.id,
|
|
47
|
-
})
|
|
48
|
-
}
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
return errors
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
/**
|
|
56
|
-
* Validate token uniqueness
|
|
57
|
-
*/
|
|
58
|
-
export function validateTokenUniqueness(tokens: Token[]): CompilationError[] {
|
|
59
|
-
const errors: CompilationError[] = []
|
|
60
|
-
const seen = new Map<string, Token>()
|
|
61
|
-
|
|
62
|
-
for (const token of tokens) {
|
|
63
|
-
if (seen.has(token.id)) {
|
|
64
|
-
errors.push({
|
|
65
|
-
type: "schema",
|
|
66
|
-
message: `Duplicate token ID: "${token.id}"`,
|
|
67
|
-
tokenId: token.id,
|
|
68
|
-
})
|
|
69
|
-
}
|
|
70
|
-
seen.set(token.id, token)
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
return errors
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
/**
|
|
77
|
-
* Validate token references exist
|
|
78
|
-
*/
|
|
79
|
-
export function validateTokenReferences(tokens: Token[]): CompilationError[] {
|
|
80
|
-
const errors: CompilationError[] = []
|
|
81
|
-
const tokenIds = new Set(tokens.map((t) => t.id))
|
|
82
|
-
|
|
83
|
-
for (const token of tokens) {
|
|
84
|
-
if (token.references) {
|
|
85
|
-
for (const ref of token.references) {
|
|
86
|
-
if (!tokenIds.has(ref)) {
|
|
87
|
-
errors.push({
|
|
88
|
-
type: "reference",
|
|
89
|
-
message: `Token "${token.id}" references non-existent token "${ref}"`,
|
|
90
|
-
tokenId: token.id,
|
|
91
|
-
})
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
return errors
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
/**
|
|
101
|
-
* Validate token layering rules (Tier 2 only)
|
|
102
|
-
*/
|
|
103
|
-
export function validateTokenLayering(
|
|
104
|
-
tokens: Token[],
|
|
105
|
-
strict: boolean = false
|
|
106
|
-
): CompilationError[] {
|
|
107
|
-
if (!strict) return []
|
|
108
|
-
|
|
109
|
-
const errors: CompilationError[] = []
|
|
110
|
-
const tokenMap = new Map(tokens.map((t) => [t.id, t]))
|
|
111
|
-
|
|
112
|
-
for (const token of tokens) {
|
|
113
|
-
if (!token.references || token.references.length === 0) continue
|
|
114
|
-
|
|
115
|
-
// Primitives may not reference anything
|
|
116
|
-
if (token.source === "primitive") {
|
|
117
|
-
errors.push({
|
|
118
|
-
type: "layering",
|
|
119
|
-
message: `Primitive token "${token.id}" may not reference other tokens`,
|
|
120
|
-
tokenId: token.id,
|
|
121
|
-
})
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
// Semantics may reference primitives and other semantics
|
|
125
|
-
if (token.source === "semantic") {
|
|
126
|
-
for (const ref of token.references) {
|
|
127
|
-
const refToken = tokenMap.get(ref)
|
|
128
|
-
if (refToken && refToken.source === "component") {
|
|
129
|
-
errors.push({
|
|
130
|
-
type: "layering",
|
|
131
|
-
message: `Semantic token "${token.id}" may not reference component token "${ref}"`,
|
|
132
|
-
tokenId: token.id,
|
|
133
|
-
})
|
|
134
|
-
}
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
|
|
138
|
-
// Component tokens may reference semantics and primitives
|
|
139
|
-
if (token.source === "component") {
|
|
140
|
-
for (const ref of token.references) {
|
|
141
|
-
const refToken = tokenMap.get(ref)
|
|
142
|
-
if (refToken && refToken.source === "component") {
|
|
143
|
-
// Allow component-to-component references but warn
|
|
144
|
-
// (could be useful for variants)
|
|
145
|
-
}
|
|
146
|
-
}
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
|
|
150
|
-
return errors
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
/**
|
|
154
|
-
* Validate mode coverage
|
|
155
|
-
*/
|
|
156
|
-
export function validateModeCoverage(
|
|
157
|
-
tokens: Token[],
|
|
158
|
-
requiredModes: string[]
|
|
159
|
-
): CompilationError[] {
|
|
160
|
-
const errors: CompilationError[] = []
|
|
161
|
-
|
|
162
|
-
for (const token of tokens) {
|
|
163
|
-
if (token.modes) {
|
|
164
|
-
const tokenModes = Object.keys(token.modes)
|
|
165
|
-
const missingModes = requiredModes.filter((m) => !tokenModes.includes(m))
|
|
166
|
-
if (missingModes.length > 0) {
|
|
167
|
-
errors.push({
|
|
168
|
-
type: "schema",
|
|
169
|
-
message: `Token "${token.id}" missing required modes: ${missingModes.join(", ")}`,
|
|
170
|
-
tokenId: token.id,
|
|
171
|
-
})
|
|
172
|
-
}
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
return errors
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
/**
|
|
180
|
-
* Run all validations
|
|
181
|
-
*/
|
|
182
|
-
export function validateTokens(
|
|
183
|
-
tokens: Token[],
|
|
184
|
-
requiredModes: string[],
|
|
185
|
-
options: { strict?: boolean } = {}
|
|
186
|
-
): CompilationError[] {
|
|
187
|
-
return [
|
|
188
|
-
...validateTokenUniqueness(tokens),
|
|
189
|
-
...validateTokenNaming(tokens),
|
|
190
|
-
...validateTokenReferences(tokens),
|
|
191
|
-
...validateModeCoverage(tokens, requiredModes),
|
|
192
|
-
...validateTokenLayering(tokens, options.strict),
|
|
193
|
-
]
|
|
194
|
-
}
|