@terrazzo/parser 0.10.3 → 2.0.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.d.ts +82 -333
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2203 -3660
- package/dist/index.js.map +1 -1
- package/package.json +6 -5
- package/src/build/index.ts +32 -41
- package/src/config.ts +13 -6
- package/src/lib/code-frame.ts +5 -2
- package/src/lib/momoa.ts +10 -0
- package/src/lint/index.ts +41 -37
- package/src/lint/plugin-core/index.ts +73 -16
- package/src/lint/plugin-core/rules/colorspace.ts +4 -0
- package/src/lint/plugin-core/rules/duplicate-values.ts +2 -0
- package/src/lint/plugin-core/rules/max-gamut.ts +24 -4
- package/src/lint/plugin-core/rules/no-type-on-alias.ts +29 -0
- package/src/lint/plugin-core/rules/required-modes.ts +2 -0
- package/src/lint/plugin-core/rules/required-typography-properties.ts +13 -3
- package/src/lint/plugin-core/rules/valid-boolean.ts +41 -0
- package/src/lint/plugin-core/rules/valid-border.ts +57 -0
- package/src/lint/plugin-core/rules/valid-color.ts +265 -0
- package/src/lint/plugin-core/rules/valid-cubic-bezier.ts +83 -0
- package/src/lint/plugin-core/rules/valid-dimension.ts +199 -0
- package/src/lint/plugin-core/rules/valid-duration.ts +123 -0
- package/src/lint/plugin-core/rules/valid-font-family.ts +68 -0
- package/src/lint/plugin-core/rules/valid-font-weight.ts +89 -0
- package/src/lint/plugin-core/rules/valid-gradient.ts +79 -0
- package/src/lint/plugin-core/rules/valid-link.ts +41 -0
- package/src/lint/plugin-core/rules/valid-number.ts +63 -0
- package/src/lint/plugin-core/rules/valid-shadow.ts +67 -0
- package/src/lint/plugin-core/rules/valid-string.ts +41 -0
- package/src/lint/plugin-core/rules/valid-stroke-style.ts +104 -0
- package/src/lint/plugin-core/rules/valid-transition.ts +61 -0
- package/src/lint/plugin-core/rules/valid-typography.ts +67 -0
- package/src/logger.ts +70 -59
- package/src/parse/index.ts +23 -318
- package/src/parse/load.ts +257 -0
- package/src/parse/normalize.ts +134 -170
- package/src/parse/token.ts +530 -0
- package/src/types.ts +76 -10
- package/src/parse/alias.ts +0 -369
- package/src/parse/json.ts +0 -211
- package/src/parse/validate.ts +0 -961
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
import * as momoa from '@humanwhocodes/momoa';
|
|
2
|
+
import {
|
|
3
|
+
type BundleOptions,
|
|
4
|
+
bundle,
|
|
5
|
+
getObjMember,
|
|
6
|
+
type RefMap,
|
|
7
|
+
replaceNode,
|
|
8
|
+
traverseAsync,
|
|
9
|
+
} from '@terrazzo/json-schema-tools';
|
|
10
|
+
import type { GroupNormalized, TokenNormalized, TokenNormalizedSet } from '@terrazzo/token-tools';
|
|
11
|
+
import { toMomoa } from '../lib/momoa.js';
|
|
12
|
+
import type Logger from '../logger.js';
|
|
13
|
+
import type { InputSource, ParseOptions, TransformVisitors } from '../types.js';
|
|
14
|
+
import { normalize } from './normalize.js';
|
|
15
|
+
import {
|
|
16
|
+
graphAliases,
|
|
17
|
+
groupFromNode,
|
|
18
|
+
refToTokenID,
|
|
19
|
+
resolveAliases,
|
|
20
|
+
tokenFromNode,
|
|
21
|
+
tokenRawValuesFromNode,
|
|
22
|
+
} from './token.js';
|
|
23
|
+
|
|
24
|
+
/** Ephemeral format that only exists while parsing the document. This is not confirmed to be DTCG yet. */
|
|
25
|
+
export interface IntermediaryToken {
|
|
26
|
+
id: string;
|
|
27
|
+
/** Was this token aliasing another? */
|
|
28
|
+
$ref?: string;
|
|
29
|
+
$type?: string;
|
|
30
|
+
$description?: string;
|
|
31
|
+
$deprecated?: string | boolean;
|
|
32
|
+
$value: unknown;
|
|
33
|
+
$extensions?: Record<string, unknown>;
|
|
34
|
+
group: TokenNormalized['group'];
|
|
35
|
+
aliasOf?: string;
|
|
36
|
+
partialAliasOf?: Record<string, any> | any[];
|
|
37
|
+
mode: Record<
|
|
38
|
+
string,
|
|
39
|
+
{
|
|
40
|
+
$type?: string;
|
|
41
|
+
$value: unknown;
|
|
42
|
+
aliasOf?: string;
|
|
43
|
+
partialAliasOf?: Record<string, any> | any[];
|
|
44
|
+
source?: { filename?: URL; node: momoa.ObjectNode };
|
|
45
|
+
}
|
|
46
|
+
>;
|
|
47
|
+
source: {
|
|
48
|
+
filename?: URL;
|
|
49
|
+
node: momoa.ObjectNode;
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export interface LoadOptions extends Pick<ParseOptions, 'config' | 'continueOnError' | 'yamlToMomoa' | 'transform'> {
|
|
54
|
+
logger: Logger;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
/** Load from multiple entries, while resolving remote files */
|
|
58
|
+
export async function loadSources(
|
|
59
|
+
inputs: Omit<InputSource, 'document'>[],
|
|
60
|
+
{ config, logger, continueOnError, yamlToMomoa, transform }: LoadOptions,
|
|
61
|
+
): Promise<{ tokens: TokenNormalizedSet; sources: InputSource[] }> {
|
|
62
|
+
const entry = { group: 'parser' as const, label: 'init' };
|
|
63
|
+
|
|
64
|
+
// 1. Bundle root documents together
|
|
65
|
+
const firstLoad = performance.now();
|
|
66
|
+
let document = {} as momoa.DocumentNode;
|
|
67
|
+
|
|
68
|
+
/** The original user inputs, in original order, with parsed ASTs */
|
|
69
|
+
const sources = inputs.map((input, i) => ({
|
|
70
|
+
...input,
|
|
71
|
+
document: {} as momoa.DocumentNode,
|
|
72
|
+
filename: input.filename || new URL(`virtual:${i}`), // for objects created in memory, an index-based ID helps associate tokens with these
|
|
73
|
+
}));
|
|
74
|
+
/** The sources array, indexed by filename */
|
|
75
|
+
let sourceByFilename: Record<string, InputSource> = {};
|
|
76
|
+
/** Mapping of all final $ref resolutions. This will be used to generate the graph later. */
|
|
77
|
+
let refMap: RefMap = {};
|
|
78
|
+
|
|
79
|
+
try {
|
|
80
|
+
const result = await bundle(sources, {
|
|
81
|
+
parse: transform ? transformer(transform) : undefined,
|
|
82
|
+
yamlToMomoa,
|
|
83
|
+
});
|
|
84
|
+
document = result.document;
|
|
85
|
+
sourceByFilename = result.sources;
|
|
86
|
+
refMap = result.refMap;
|
|
87
|
+
for (const [filename, source] of Object.entries(result.sources)) {
|
|
88
|
+
const i = sources.findIndex((s) => s.filename.href === filename);
|
|
89
|
+
if (i === -1) {
|
|
90
|
+
sources.push(source);
|
|
91
|
+
} else {
|
|
92
|
+
sources[i]!.src = source.src; // this is a sanitized source that is easier to work with
|
|
93
|
+
sources[i]!.document = source.document;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
} catch (err) {
|
|
97
|
+
let src = sources.find((s) => s.filename.href === (err as any).filename)?.src;
|
|
98
|
+
if (src && typeof src !== 'string') {
|
|
99
|
+
src = JSON.stringify(src, undefined, 2);
|
|
100
|
+
}
|
|
101
|
+
logger.error({
|
|
102
|
+
...entry,
|
|
103
|
+
continueOnError,
|
|
104
|
+
message: (err as Error).message,
|
|
105
|
+
node: (err as any).node,
|
|
106
|
+
src,
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
logger.debug({ ...entry, message: `JSON loaded`, timing: performance.now() - firstLoad });
|
|
111
|
+
const artificialSource = { src: momoa.print(document, { indent: 2 }), document };
|
|
112
|
+
|
|
113
|
+
// 2. Parse
|
|
114
|
+
const firstPass = performance.now();
|
|
115
|
+
const tokens: TokenNormalizedSet = {};
|
|
116
|
+
// micro-optimization: while we’re iterating over tokens, keeping a “hot”
|
|
117
|
+
// array in memory saves recreating arrays from object keys over and over again.
|
|
118
|
+
// it does produce a noticeable speedup > 1,000 tokens.
|
|
119
|
+
const tokenIDs: string[] = [];
|
|
120
|
+
const groups: Record<string, GroupNormalized> = {};
|
|
121
|
+
|
|
122
|
+
// 2a. Token & group population
|
|
123
|
+
await traverseAsync(document, {
|
|
124
|
+
async enter(node, _parent, path) {
|
|
125
|
+
if (node.type !== 'Object') {
|
|
126
|
+
return;
|
|
127
|
+
}
|
|
128
|
+
groupFromNode(node, { path, groups });
|
|
129
|
+
const token = tokenFromNode(node, {
|
|
130
|
+
groups,
|
|
131
|
+
ignore: config.ignore,
|
|
132
|
+
path,
|
|
133
|
+
source: { src: artificialSource, document },
|
|
134
|
+
});
|
|
135
|
+
if (token) {
|
|
136
|
+
tokenIDs.push(token.jsonID);
|
|
137
|
+
tokens[token.jsonID] = token;
|
|
138
|
+
}
|
|
139
|
+
},
|
|
140
|
+
});
|
|
141
|
+
|
|
142
|
+
logger.debug({ ...entry, message: 'Parsing: 1st pass', timing: performance.now() - firstPass });
|
|
143
|
+
const secondPass = performance.now();
|
|
144
|
+
|
|
145
|
+
// 2b. Resolve originalValue and original sources
|
|
146
|
+
for (const source of Object.values(sourceByFilename)) {
|
|
147
|
+
await traverseAsync(source.document, {
|
|
148
|
+
async enter(node, _parent, path) {
|
|
149
|
+
if (node.type !== 'Object') {
|
|
150
|
+
return;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
const tokenRawValues = tokenRawValuesFromNode(node, { filename: source.filename!.href, path });
|
|
154
|
+
if (tokenRawValues && tokens[tokenRawValues?.jsonID]) {
|
|
155
|
+
tokens[tokenRawValues.jsonID]!.originalValue = tokenRawValues.originalValue;
|
|
156
|
+
tokens[tokenRawValues.jsonID]!.source = tokenRawValues.source;
|
|
157
|
+
for (const mode of Object.keys(tokenRawValues.mode)) {
|
|
158
|
+
tokens[tokenRawValues.jsonID]!.mode[mode]!.originalValue = tokenRawValues.mode[mode]!.originalValue;
|
|
159
|
+
tokens[tokenRawValues.jsonID]!.mode[mode]!.source = tokenRawValues.mode[mode]!.source;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
},
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
// 2c. DTCG alias resolution
|
|
167
|
+
// Unlike $refs which can be resolved as we go, these can’t happen until the final, flattened set
|
|
168
|
+
resolveAliases(tokens, { logger, sources: sourceByFilename, refMap });
|
|
169
|
+
logger.debug({ ...entry, message: 'Parsing: 2nd pass', timing: performance.now() - secondPass });
|
|
170
|
+
|
|
171
|
+
// 3. Alias graph
|
|
172
|
+
// We’ve resolved aliases, but we need this pass for reverse linking i.e. “aliasedBy”
|
|
173
|
+
const aliasStart = performance.now();
|
|
174
|
+
graphAliases(refMap, { tokens, logger, sources: sourceByFilename });
|
|
175
|
+
logger.debug({ ...entry, message: 'Alias graph built', timing: performance.now() - aliasStart });
|
|
176
|
+
|
|
177
|
+
// 4. normalize
|
|
178
|
+
// Allow for some minor variance in inputs, and be nice to folks.
|
|
179
|
+
const normalizeStart = performance.now();
|
|
180
|
+
for (const id of tokenIDs) {
|
|
181
|
+
const token = tokens[id]!;
|
|
182
|
+
normalize(token as any, { logger, src: sourceByFilename[token.source.filename!]?.src });
|
|
183
|
+
}
|
|
184
|
+
logger.debug({ ...entry, message: 'Normalized values', timing: performance.now() - normalizeStart });
|
|
185
|
+
|
|
186
|
+
// 5. alphabetize & filter
|
|
187
|
+
// This can’t happen until the last step, where we’re 100% sure we’ve resolved everything.
|
|
188
|
+
const tokensSorted: TokenNormalizedSet = {};
|
|
189
|
+
tokenIDs.sort((a, b) => a.localeCompare(b, 'en-us', { numeric: true }));
|
|
190
|
+
for (const path of tokenIDs) {
|
|
191
|
+
// Filter out any tokens in $defs (we needed to reference them earlier, but shouldn’t include them in the final assortment)
|
|
192
|
+
if (path.includes('/$defs/')) {
|
|
193
|
+
continue;
|
|
194
|
+
}
|
|
195
|
+
const id = refToTokenID(path)!;
|
|
196
|
+
tokensSorted[id] = tokens[path]!;
|
|
197
|
+
}
|
|
198
|
+
// Sort group IDs once, too
|
|
199
|
+
for (const group of Object.values(groups)) {
|
|
200
|
+
group.tokens.sort((a, b) => a.localeCompare(b, 'en-us', { numeric: true }));
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
return {
|
|
204
|
+
tokens: tokensSorted,
|
|
205
|
+
sources,
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
function transformer(transform: TransformVisitors): BundleOptions['parse'] {
|
|
210
|
+
return async (src, filename) => {
|
|
211
|
+
let document = toMomoa(src);
|
|
212
|
+
let lastPath = '#/';
|
|
213
|
+
let last$type: string | undefined;
|
|
214
|
+
|
|
215
|
+
if (transform.root) {
|
|
216
|
+
const result = transform.root(document, { filename, parent: undefined, path: [] });
|
|
217
|
+
if (result) {
|
|
218
|
+
document = result as momoa.DocumentNode;
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
await traverseAsync(document, {
|
|
223
|
+
async enter(node, parent, path) {
|
|
224
|
+
if (node.type !== 'Object' || !path.length) {
|
|
225
|
+
return;
|
|
226
|
+
}
|
|
227
|
+
const ctx = { filename, parent, path };
|
|
228
|
+
const next$type = getObjMember(node, '$type');
|
|
229
|
+
if (next$type?.type === 'String') {
|
|
230
|
+
const jsonPath = `#/${path.join('/')}`;
|
|
231
|
+
if (jsonPath.startsWith(lastPath)) {
|
|
232
|
+
last$type = next$type.value;
|
|
233
|
+
}
|
|
234
|
+
lastPath = jsonPath;
|
|
235
|
+
}
|
|
236
|
+
if (getObjMember(node, '$value')) {
|
|
237
|
+
let result: any = transform.token?.(structuredClone(node), ctx);
|
|
238
|
+
if (result) {
|
|
239
|
+
replaceNode(node, result);
|
|
240
|
+
result = undefined;
|
|
241
|
+
}
|
|
242
|
+
result = transform[last$type as keyof typeof transform]?.(structuredClone(node as any), ctx);
|
|
243
|
+
if (result) {
|
|
244
|
+
replaceNode(node, result);
|
|
245
|
+
}
|
|
246
|
+
} else if (!path.includes('$value')) {
|
|
247
|
+
const result = transform.group?.(structuredClone(node), ctx);
|
|
248
|
+
if (result) {
|
|
249
|
+
replaceNode(node, result);
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
},
|
|
253
|
+
});
|
|
254
|
+
|
|
255
|
+
return document;
|
|
256
|
+
};
|
|
257
|
+
}
|
package/src/parse/normalize.ts
CHANGED
|
@@ -1,199 +1,163 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
type FontFamilyValue,
|
|
6
|
-
type GradientStopNormalized,
|
|
7
|
-
type GradientValueNormalized,
|
|
8
|
-
isAlias,
|
|
9
|
-
parseColor,
|
|
10
|
-
type ShadowValueNormalized,
|
|
11
|
-
type Token,
|
|
12
|
-
type TransitionValue,
|
|
13
|
-
type TypographyValueNormalized,
|
|
14
|
-
} from '@terrazzo/token-tools';
|
|
1
|
+
import type * as momoa from '@humanwhocodes/momoa';
|
|
2
|
+
import { getObjMember } from '@terrazzo/json-schema-tools';
|
|
3
|
+
import { FONT_WEIGHTS, isAlias, parseColor } from '@terrazzo/token-tools';
|
|
4
|
+
import type Logger from '../logger.js';
|
|
15
5
|
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
normal: 400,
|
|
23
|
-
regular: 400,
|
|
24
|
-
book: 400,
|
|
25
|
-
medium: 500,
|
|
26
|
-
'semi-bold': 600,
|
|
27
|
-
'demi-bold': 600,
|
|
28
|
-
bold: 700,
|
|
29
|
-
'extra-bold': 800,
|
|
30
|
-
'ultra-bold': 800,
|
|
31
|
-
black: 900,
|
|
32
|
-
heavy: 900,
|
|
33
|
-
'extra-black': 950,
|
|
34
|
-
'ultra-black': 950,
|
|
35
|
-
};
|
|
6
|
+
interface PreValidatedToken {
|
|
7
|
+
id: string;
|
|
8
|
+
$type: string;
|
|
9
|
+
$value: unknown;
|
|
10
|
+
mode: Record<string, { $value: unknown; source: { node: any; filename: string | undefined } }>;
|
|
11
|
+
}
|
|
36
12
|
|
|
37
|
-
|
|
13
|
+
/**
|
|
14
|
+
* Normalize token value.
|
|
15
|
+
* The reason for the “any” typing is this aligns various user-provided inputs to the type
|
|
16
|
+
*/
|
|
17
|
+
export function normalize(token: PreValidatedToken, { logger, src }: { logger: Logger; src: string }) {
|
|
18
|
+
const entry = { group: 'parser' as const, label: 'init', src };
|
|
38
19
|
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
if (typeof token.$value === 'string' && isAlias(token.$value)) {
|
|
42
|
-
return token.$value;
|
|
20
|
+
function normalizeFontFamily(value: unknown): string[] {
|
|
21
|
+
return typeof value === 'string' ? [value] : (value as string[]);
|
|
43
22
|
}
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
return parseColor(token.$value);
|
|
61
|
-
}
|
|
62
|
-
const newValue: ColorValueNormalized = {
|
|
63
|
-
colorSpace: token.$value.colorSpace,
|
|
64
|
-
components: token.$value.components ?? token.$value.channels,
|
|
65
|
-
alpha: token.$value.alpha ?? 1,
|
|
66
|
-
};
|
|
67
|
-
if ('hex' in token.$value) {
|
|
68
|
-
newValue.hex = token.$value.hex;
|
|
69
|
-
}
|
|
70
|
-
return newValue;
|
|
71
|
-
}
|
|
72
|
-
case 'cubicBezier': {
|
|
73
|
-
if (typeof token.$value === 'string') {
|
|
74
|
-
return token.$value;
|
|
75
|
-
}
|
|
76
|
-
return token.$value.map((value) =>
|
|
77
|
-
typeof value === 'number' ? normalizeValue({ $type: 'number', $value: value }) : value,
|
|
78
|
-
) as CubicBezierValue;
|
|
79
|
-
}
|
|
80
|
-
case 'dimension': {
|
|
81
|
-
if ((token as any).$value === 0) {
|
|
82
|
-
return { value: 0, unit: 'px' };
|
|
23
|
+
|
|
24
|
+
function normalizeFontWeight(value: unknown): number {
|
|
25
|
+
return (typeof value === 'string' && FONT_WEIGHTS[value as keyof typeof FONT_WEIGHTS]) || (value as number);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function normalizeColor(value: unknown, node: momoa.AnyNode | undefined) {
|
|
29
|
+
if (typeof value === 'string' && !isAlias(value)) {
|
|
30
|
+
logger.warn({
|
|
31
|
+
...entry,
|
|
32
|
+
node,
|
|
33
|
+
message: `${token.id}: string colors will be deprecated in a future version. Please update to object notation`,
|
|
34
|
+
});
|
|
35
|
+
try {
|
|
36
|
+
return parseColor(value);
|
|
37
|
+
} catch {
|
|
38
|
+
return { colorSpace: 'srgb', components: [0, 0, 0], alpha: 1 };
|
|
83
39
|
}
|
|
84
|
-
|
|
85
|
-
if (
|
|
86
|
-
|
|
87
|
-
return { value: Number.parseFloat(match?.[1] || token.$value), unit: match?.[2] || 'px' };
|
|
40
|
+
} else if (value && typeof value === 'object') {
|
|
41
|
+
if ((value as any).alpha === undefined) {
|
|
42
|
+
(value as any).alpha = 1;
|
|
88
43
|
}
|
|
89
|
-
return token.$value;
|
|
90
44
|
}
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
return { value: Number.parseFloat(match?.[1] || token.$value), unit: match?.[2] || 'ms' };
|
|
45
|
+
return value;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
switch (token.$type) {
|
|
49
|
+
case 'color': {
|
|
50
|
+
for (const mode of Object.keys(token.mode)) {
|
|
51
|
+
token.mode[mode]!.$value = normalizeColor(token.mode[mode]!.$value, token.mode[mode]!.source.node);
|
|
99
52
|
}
|
|
100
|
-
|
|
53
|
+
token.$value = token.mode['.']!.$value;
|
|
54
|
+
break;
|
|
101
55
|
}
|
|
56
|
+
|
|
102
57
|
case 'fontFamily': {
|
|
103
|
-
|
|
58
|
+
for (const mode of Object.keys(token.mode)) {
|
|
59
|
+
token.mode[mode]!.$value = normalizeFontFamily(token.mode[mode]!.$value);
|
|
60
|
+
}
|
|
61
|
+
token.$value = token.mode['.']!.$value;
|
|
62
|
+
break;
|
|
104
63
|
}
|
|
64
|
+
|
|
105
65
|
case 'fontWeight': {
|
|
106
|
-
|
|
107
|
-
|
|
66
|
+
for (const mode of Object.keys(token.mode)) {
|
|
67
|
+
token.mode[mode]!.$value = normalizeFontWeight(token.mode[mode]!.$value);
|
|
108
68
|
}
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
Math.max(1, typeof token.$value === 'string' ? Number.parseInt(token.$value) : token.$value),
|
|
112
|
-
);
|
|
69
|
+
token.$value = token.mode['.']!.$value;
|
|
70
|
+
break;
|
|
113
71
|
}
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
72
|
+
|
|
73
|
+
case 'border': {
|
|
74
|
+
for (const mode of Object.keys(token.mode)) {
|
|
75
|
+
const border = token.mode[mode]!.$value as any;
|
|
76
|
+
if (!border || typeof border !== 'object') {
|
|
77
|
+
continue;
|
|
78
|
+
}
|
|
79
|
+
if (border.color) {
|
|
80
|
+
border.color = normalizeColor(
|
|
81
|
+
border.color,
|
|
82
|
+
getObjMember(token.mode[mode]!.source.node as momoa.ObjectNode, 'color'),
|
|
83
|
+
);
|
|
124
84
|
}
|
|
125
|
-
output.push(stop);
|
|
126
85
|
}
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
case 'number': {
|
|
130
|
-
return typeof token.$value === 'number' ? token.$value : Number.parseFloat(token.$value);
|
|
86
|
+
token.$value = token.mode['.']!.$value;
|
|
87
|
+
break;
|
|
131
88
|
}
|
|
89
|
+
|
|
132
90
|
case 'shadow': {
|
|
133
|
-
|
|
134
|
-
|
|
91
|
+
for (const mode of Object.keys(token.mode)) {
|
|
92
|
+
// normalize to array
|
|
93
|
+
if (!Array.isArray(token.mode[mode]!.$value)) {
|
|
94
|
+
token.mode[mode]!.$value = [token.mode[mode]!.$value];
|
|
95
|
+
}
|
|
96
|
+
const $value = token.mode[mode]!.$value as any[];
|
|
97
|
+
for (let i = 0; i < $value.length; i++) {
|
|
98
|
+
const shadow = $value[i]!;
|
|
99
|
+
if (!shadow || typeof shadow !== 'object') {
|
|
100
|
+
continue;
|
|
101
|
+
}
|
|
102
|
+
const shadowNode = (
|
|
103
|
+
token.mode[mode]!.source.node.type === 'Array'
|
|
104
|
+
? token.mode[mode]!.source.node.elements[i]!.value
|
|
105
|
+
: token.mode[mode]!.source.node
|
|
106
|
+
) as momoa.ObjectNode;
|
|
107
|
+
if (shadow.color) {
|
|
108
|
+
shadow.color = normalizeColor(shadow.color, getObjMember(shadowNode, 'color'));
|
|
109
|
+
}
|
|
110
|
+
if (!('inset' in shadow)) {
|
|
111
|
+
shadow.inset = false;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
135
114
|
}
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
({
|
|
139
|
-
color: normalizeValue({ $type: 'color', $value: layer.color }),
|
|
140
|
-
offsetX: normalizeValue({ $type: 'dimension', $value: layer.offsetX ?? { value: 0, unit: 'px' } }),
|
|
141
|
-
offsetY: normalizeValue({ $type: 'dimension', $value: layer.offsetY ?? { value: 0, unit: 'px' } }),
|
|
142
|
-
blur: normalizeValue({ $type: 'dimension', $value: layer.blur ?? { value: 0, unit: 'px' } }),
|
|
143
|
-
spread: normalizeValue({ $type: 'dimension', $value: layer.spread ?? { value: 0, unit: 'px' } }),
|
|
144
|
-
inset: layer.inset === true,
|
|
145
|
-
}) as ShadowValueNormalized,
|
|
146
|
-
);
|
|
115
|
+
token.$value = token.mode['.']!.$value;
|
|
116
|
+
break;
|
|
147
117
|
}
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
118
|
+
|
|
119
|
+
case 'gradient': {
|
|
120
|
+
for (const mode of Object.keys(token.mode)) {
|
|
121
|
+
if (!Array.isArray(token.mode[mode]!.$value)) {
|
|
122
|
+
continue;
|
|
123
|
+
}
|
|
124
|
+
const $value = token.mode[mode]!.$value as any[];
|
|
125
|
+
for (let i = 0; i < $value.length; i++) {
|
|
126
|
+
const stop = $value[i]!;
|
|
127
|
+
if (!stop || typeof stop !== 'object') {
|
|
128
|
+
continue;
|
|
129
|
+
}
|
|
130
|
+
const stopNode = (token.mode[mode]!.source.node as momoa.ArrayNode)?.elements?.[i]?.value as momoa.ObjectNode;
|
|
131
|
+
if (stop.color) {
|
|
132
|
+
stop.color = normalizeColor(stop.color, getObjMember(stopNode, 'color'));
|
|
133
|
+
}
|
|
134
|
+
}
|
|
157
135
|
}
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
delay: normalizeValue({ $type: 'duration', $value: token.$value.delay ?? 0 }),
|
|
161
|
-
timingFunction: normalizeValue({ $type: 'cubicBezier', $value: token.$value.timingFunction }),
|
|
162
|
-
} as TransitionValue;
|
|
136
|
+
token.$value = token.mode['.']!.$value;
|
|
137
|
+
break;
|
|
163
138
|
}
|
|
139
|
+
|
|
164
140
|
case 'typography': {
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
case 'lineHeight': {
|
|
181
|
-
output[k] = normalizeValue({
|
|
182
|
-
$type: typeof token.$value === 'number' ? 'number' : 'dimension',
|
|
183
|
-
$value: $value as any,
|
|
184
|
-
});
|
|
185
|
-
break;
|
|
186
|
-
}
|
|
187
|
-
default: {
|
|
188
|
-
output[k] = $value;
|
|
189
|
-
break;
|
|
141
|
+
for (const mode of Object.keys(token.mode)) {
|
|
142
|
+
const $value = token.mode[mode]!.$value as any;
|
|
143
|
+
if (typeof $value !== 'object') {
|
|
144
|
+
return;
|
|
145
|
+
}
|
|
146
|
+
for (const [k, v] of Object.entries($value)) {
|
|
147
|
+
switch (k) {
|
|
148
|
+
case 'fontFamily': {
|
|
149
|
+
$value[k] = normalizeFontFamily(v);
|
|
150
|
+
break;
|
|
151
|
+
}
|
|
152
|
+
case 'fontWeight': {
|
|
153
|
+
$value[k] = normalizeFontWeight(v);
|
|
154
|
+
break;
|
|
155
|
+
}
|
|
190
156
|
}
|
|
191
157
|
}
|
|
192
158
|
}
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
default: {
|
|
196
|
-
return token.$value;
|
|
159
|
+
token.$value = token.mode['.']!.$value;
|
|
160
|
+
break;
|
|
197
161
|
}
|
|
198
162
|
}
|
|
199
163
|
}
|