@terrazzo/parser 2.0.0-alpha.3 → 2.0.0-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@terrazzo/parser",
3
- "version": "2.0.0-alpha.3",
3
+ "version": "2.0.0-alpha.4",
4
4
  "description": "Parser/validator for the Design Tokens Community Group (DTCG) standard.",
5
5
  "license": "MIT",
6
6
  "type": "module",
@@ -43,11 +43,11 @@
43
43
  "picocolors": "^1.1.1",
44
44
  "scule": "^1.3.0",
45
45
  "wildcard-match": "^5.1.4",
46
- "@terrazzo/json-schema-tools": "^0.1.0",
47
- "@terrazzo/token-tools": "^2.0.0-alpha.3"
46
+ "@terrazzo/json-schema-tools": "^0.1.0-alpha.0",
47
+ "@terrazzo/token-tools": "^2.0.0-alpha.4"
48
48
  },
49
49
  "devDependencies": {
50
- "yaml-to-momoa": "0.0.6"
50
+ "yaml-to-momoa": "0.0.8"
51
51
  },
52
52
  "scripts": {
53
53
  "build": "rolldown -c && attw --profile esm-only --pack .",
@@ -1,11 +1,11 @@
1
- import type * as momoa from '@humanwhocodes/momoa';
1
+ import type { InputSourceWithDocument } from '@terrazzo/json-schema-tools';
2
2
  import type { TokenNormalized } from '@terrazzo/token-tools';
3
3
  import wcmatch from 'wildcard-match';
4
4
  import Logger, { type LogEntry } from '../logger.js';
5
5
  import type { BuildRunnerResult, ConfigInit, TokenTransformed, TransformParams } from '../types.js';
6
6
 
7
7
  export interface BuildRunnerOptions {
8
- sources: { filename?: URL; src: string; document: momoa.DocumentNode }[];
8
+ sources: InputSourceWithDocument[];
9
9
  config: ConfigInit;
10
10
  logger?: Logger;
11
11
  }
@@ -0,0 +1,35 @@
1
+ /**
2
+ * If tokens are found inside a resolver, strip out the resolver paths (don’t
3
+ * include "sets"/"modifiers" in the token ID etc.)
4
+ */
5
+ export function filterResolverPaths(path: string[]): string[] {
6
+ switch (path[0]) {
7
+ case 'sets': {
8
+ return path.slice(4);
9
+ }
10
+ case 'modifiers': {
11
+ return path.slice(5);
12
+ }
13
+ case 'resolutionOrder': {
14
+ switch (path[2]) {
15
+ case 'sources': {
16
+ return path.slice(4);
17
+ }
18
+ case 'contexts': {
19
+ return path.slice(5);
20
+ }
21
+ }
22
+ break;
23
+ }
24
+ }
25
+ return path;
26
+ }
27
+
28
+ /**
29
+ * Make a deterministic string from an object
30
+ */
31
+ export function makeInputKey(input: Record<string, string | undefined>): string {
32
+ return JSON.stringify(
33
+ Object.fromEntries(Object.entries(input).sort((a, b) => a[0].localeCompare(b[0], 'en-us', { numeric: true }))),
34
+ );
35
+ }
package/src/lint/index.ts CHANGED
@@ -1,7 +1,8 @@
1
+ import type { InputSourceWithDocument } from '@terrazzo/json-schema-tools';
1
2
  import { pluralize, type TokenNormalizedSet } from '@terrazzo/token-tools';
2
3
  import { merge } from 'merge-anything';
3
4
  import type { LogEntry, default as Logger } from '../logger.js';
4
- import type { ConfigInit, InputSource } from '../types.js';
5
+ import type { ConfigInit } from '../types.js';
5
6
 
6
7
  export { RECOMMENDED_CONFIG } from './plugin-core/index.js';
7
8
 
@@ -9,7 +10,7 @@ export interface LintRunnerOptions {
9
10
  tokens: TokenNormalizedSet;
10
11
  filename?: URL;
11
12
  config: ConfigInit;
12
- sources: InputSource[];
13
+ sources: InputSourceWithDocument[];
13
14
  logger: Logger;
14
15
  }
15
16
 
@@ -21,7 +22,7 @@ export default async function lintRunner({
21
22
  logger,
22
23
  }: LintRunnerOptions): Promise<void> {
23
24
  const { plugins = [], lint } = config;
24
- const sourceByFilename: Record<string, InputSource> = {};
25
+ const sourceByFilename: Record<string, InputSourceWithDocument> = {};
25
26
  for (const source of sources) {
26
27
  sourceByFilename[source.filename!.href] = source;
27
28
  }
@@ -1,20 +1,22 @@
1
1
  import type fsType from 'node:fs/promises';
2
+ import type { InputSource, InputSourceWithDocument } from '@terrazzo/json-schema-tools';
2
3
  import { pluralize, type TokenNormalizedSet } from '@terrazzo/token-tools';
3
4
  import lintRunner from '../lint/index.js';
4
5
  import Logger from '../logger.js';
6
+ import { createSyntheticResolver } from '../resolver/create-synthetic-resolver.js';
5
7
  import { loadResolver } from '../resolver/load.js';
6
- import type { ConfigInit, InputSource, ParseOptions, Resolver } from '../types.js';
8
+ import type { ConfigInit, ParseOptions, Resolver } from '../types.js';
7
9
  import { loadSources } from './load.js';
8
10
 
9
11
  export interface ParseResult {
10
12
  tokens: TokenNormalizedSet;
11
- sources: InputSource[];
12
- resolver?: Resolver | undefined;
13
+ sources: InputSourceWithDocument[];
14
+ resolver: Resolver;
13
15
  }
14
16
 
15
17
  /** Parse */
16
18
  export default async function parse(
17
- _input: Omit<InputSource, 'document'> | Omit<InputSource, 'document'>[],
19
+ _input: InputSource | InputSource[],
18
20
  {
19
21
  logger = new Logger(),
20
22
  req = defaultReq,
@@ -26,22 +28,33 @@ export default async function parse(
26
28
  }: ParseOptions = {} as ParseOptions,
27
29
  ): Promise<ParseResult> {
28
30
  const inputs = Array.isArray(_input) ? _input : [_input];
31
+ let tokens: TokenNormalizedSet = {};
32
+ let resolver: Resolver | undefined;
33
+ let sources: InputSourceWithDocument[] = [];
29
34
 
30
35
  const totalStart = performance.now();
31
36
 
32
- // 1. Resolver
33
- const resolver = await loadResolver(inputs, { logger, req, yamlToMomoa });
34
-
35
- // 2. No resolver (tokens)
37
+ // 1. Load tokens
36
38
  const initStart = performance.now();
37
- const { tokens, sources } = await loadSources(inputs, {
38
- req,
39
- logger,
40
- config,
41
- continueOnError,
42
- yamlToMomoa,
43
- transform,
44
- });
39
+ const resolverResult = await loadResolver(inputs, { config, logger, req, yamlToMomoa });
40
+ // 1a. Resolver
41
+ if (resolverResult.resolver) {
42
+ tokens = resolverResult.tokens;
43
+ sources = resolverResult.sources;
44
+ resolver = resolverResult.resolver;
45
+ } else {
46
+ // 1b. No resolver
47
+ const tokenResult = await loadSources(inputs, {
48
+ req,
49
+ logger,
50
+ config,
51
+ continueOnError,
52
+ yamlToMomoa,
53
+ transform,
54
+ });
55
+ tokens = tokenResult.tokens;
56
+ sources = tokenResult.sources;
57
+ }
45
58
  logger.debug({
46
59
  message: 'Loaded tokens',
47
60
  group: 'parser',
@@ -80,7 +93,7 @@ export default async function parse(
80
93
  return {
81
94
  tokens,
82
95
  sources,
83
- resolver,
96
+ resolver: resolver || (await createSyntheticResolver(tokens, { config, logger, req, sources })),
84
97
  };
85
98
  }
86
99
 
package/src/parse/load.ts CHANGED
@@ -3,23 +3,19 @@ import {
3
3
  type BundleOptions,
4
4
  bundle,
5
5
  getObjMember,
6
+ type InputSource,
7
+ type InputSourceWithDocument,
6
8
  type RefMap,
7
9
  replaceNode,
8
- traverseAsync,
10
+ traverse,
9
11
  } from '@terrazzo/json-schema-tools';
10
- import type { GroupNormalized, TokenNormalized, TokenNormalizedSet } from '@terrazzo/token-tools';
12
+ import type { TokenNormalized, TokenNormalizedSet } from '@terrazzo/token-tools';
11
13
  import { toMomoa } from '../lib/momoa.js';
14
+ import { filterResolverPaths } from '../lib/resolver-utils.js';
12
15
  import type Logger from '../logger.js';
13
- import type { InputSource, ParseOptions, TransformVisitors } from '../types.js';
14
- import { normalize } from './normalize.js';
15
- import {
16
- graphAliases,
17
- groupFromNode,
18
- refToTokenID,
19
- resolveAliases,
20
- tokenFromNode,
21
- tokenRawValuesFromNode,
22
- } from './token.js';
16
+ import { isLikelyResolver } from '../resolver/validate.js';
17
+ import type { ParseOptions, TransformVisitors } from '../types.js';
18
+ import { processTokens } from './process.js';
23
19
 
24
20
  /** Ephemeral format that only exists while parsing the document. This is not confirmed to be DTCG yet. */
25
21
  export interface IntermediaryToken {
@@ -57,12 +53,12 @@ export interface LoadOptions extends Pick<ParseOptions, 'config' | 'continueOnEr
57
53
 
58
54
  export interface LoadSourcesResult {
59
55
  tokens: TokenNormalizedSet;
60
- sources: InputSource[];
56
+ sources: InputSourceWithDocument[];
61
57
  }
62
58
 
63
59
  /** Load from multiple entries, while resolving remote files */
64
60
  export async function loadSources(
65
- inputs: Omit<InputSource, 'document'>[],
61
+ inputs: InputSource[],
66
62
  { config, logger, req, continueOnError, yamlToMomoa, transform }: LoadOptions,
67
63
  ): Promise<LoadSourcesResult> {
68
64
  const entry = { group: 'parser' as const, label: 'init' };
@@ -78,7 +74,7 @@ export async function loadSources(
78
74
  filename: input.filename || new URL(`virtual:${i}`), // for objects created in memory, an index-based ID helps associate tokens with these
79
75
  }));
80
76
  /** The sources array, indexed by filename */
81
- let sourceByFilename: Record<string, InputSource> = {};
77
+ let sourceByFilename: Record<string, InputSourceWithDocument> = {};
82
78
  /** Mapping of all final $ref resolutions. This will be used to generate the graph later. */
83
79
  let refMap: RefMap = {};
84
80
 
@@ -113,101 +109,13 @@ export async function loadSources(
113
109
  src,
114
110
  });
115
111
  }
116
-
117
112
  logger.debug({ ...entry, message: `JSON loaded`, timing: performance.now() - firstLoad });
118
- const artificialSource = { src: momoa.print(document, { indent: 2 }), document };
119
-
120
- // 2. Parse
121
- const firstPass = performance.now();
122
- const tokens: TokenNormalizedSet = {};
123
- // micro-optimization: while we’re iterating over tokens, keeping a “hot”
124
- // array in memory saves recreating arrays from object keys over and over again.
125
- // it does produce a noticeable speedup > 1,000 tokens.
126
- const tokenIDs: string[] = [];
127
- const groups: Record<string, GroupNormalized> = {};
128
-
129
- // 2a. Token & group population
130
- await traverseAsync(document, {
131
- async enter(node, _parent, path) {
132
- if (node.type !== 'Object') {
133
- return;
134
- }
135
- groupFromNode(node, { path, groups });
136
- const token = tokenFromNode(node, {
137
- groups,
138
- ignore: config.ignore,
139
- path,
140
- source: { src: artificialSource, document },
141
- });
142
- if (token) {
143
- tokenIDs.push(token.jsonID);
144
- tokens[token.jsonID] = token;
145
- }
146
- },
147
- });
148
-
149
- logger.debug({ ...entry, message: 'Parsing: 1st pass', timing: performance.now() - firstPass });
150
- const secondPass = performance.now();
151
113
 
152
- // 2b. Resolve originalValue and original sources
153
- for (const source of Object.values(sourceByFilename)) {
154
- await traverseAsync(source.document, {
155
- async enter(node, _parent, path) {
156
- if (node.type !== 'Object') {
157
- return;
158
- }
159
-
160
- const tokenRawValues = tokenRawValuesFromNode(node, { filename: source.filename!.href, path });
161
- if (tokenRawValues && tokens[tokenRawValues?.jsonID]) {
162
- tokens[tokenRawValues.jsonID]!.originalValue = tokenRawValues.originalValue;
163
- tokens[tokenRawValues.jsonID]!.source = tokenRawValues.source;
164
- for (const mode of Object.keys(tokenRawValues.mode)) {
165
- tokens[tokenRawValues.jsonID]!.mode[mode]!.originalValue = tokenRawValues.mode[mode]!.originalValue;
166
- tokens[tokenRawValues.jsonID]!.mode[mode]!.source = tokenRawValues.mode[mode]!.source;
167
- }
168
- }
169
- },
170
- });
171
- }
172
-
173
- // 2c. DTCG alias resolution
174
- // Unlike $refs which can be resolved as we go, these can’t happen until the final, flattened set
175
- resolveAliases(tokens, { logger, sources: sourceByFilename, refMap });
176
- logger.debug({ ...entry, message: 'Parsing: 2nd pass', timing: performance.now() - secondPass });
177
-
178
- // 3. Alias graph
179
- // We’ve resolved aliases, but we need this pass for reverse linking i.e. “aliasedBy”
180
- const aliasStart = performance.now();
181
- graphAliases(refMap, { tokens, logger, sources: sourceByFilename });
182
- logger.debug({ ...entry, message: 'Alias graph built', timing: performance.now() - aliasStart });
183
-
184
- // 4. normalize
185
- // Allow for some minor variance in inputs, and be nice to folks.
186
- const normalizeStart = performance.now();
187
- for (const id of tokenIDs) {
188
- const token = tokens[id]!;
189
- normalize(token as any, { logger, src: sourceByFilename[token.source.filename!]?.src });
190
- }
191
- logger.debug({ ...entry, message: 'Normalized values', timing: performance.now() - normalizeStart });
192
-
193
- // 5. alphabetize & filter
194
- // This can’t happen until the last step, where we’re 100% sure we’ve resolved everything.
195
- const tokensSorted: TokenNormalizedSet = {};
196
- tokenIDs.sort((a, b) => a.localeCompare(b, 'en-us', { numeric: true }));
197
- for (const path of tokenIDs) {
198
- // Filter out any tokens in $defs (we needed to reference them earlier, but shouldn’t include them in the final assortment)
199
- if (path.includes('/$defs/')) {
200
- continue;
201
- }
202
- const id = refToTokenID(path)!;
203
- tokensSorted[id] = tokens[path]!;
204
- }
205
- // Sort group IDs once, too
206
- for (const group of Object.values(groups)) {
207
- group.tokens.sort((a, b) => a.localeCompare(b, 'en-us', { numeric: true }));
208
- }
209
-
210
- return { tokens: tokensSorted, sources };
114
+ const rootSource = { filename: sources[0]!.filename!, document, src: momoa.print(document, { indent: 2 }) };
115
+ return {
116
+ tokens: processTokens(rootSource, { config, logger, refMap, sources, sourceByFilename }),
117
+ sources,
118
+ };
211
119
  }
212
120
 
213
121
  function transformer(transform: TransformVisitors): BundleOptions['parse'] {
@@ -223,8 +131,10 @@ function transformer(transform: TransformVisitors): BundleOptions['parse'] {
223
131
  }
224
132
  }
225
133
 
226
- await traverseAsync(document, {
227
- async enter(node, parent, path) {
134
+ const isResolver = isLikelyResolver(document);
135
+ traverse(document, {
136
+ enter(node, parent, rawPath) {
137
+ const path = isResolver ? filterResolverPaths(rawPath) : rawPath;
228
138
  if (node.type !== 'Object' || !path.length) {
229
139
  return;
230
140
  }
@@ -0,0 +1,124 @@
1
+ import { type InputSourceWithDocument, type RefMap, traverse } from '@terrazzo/json-schema-tools';
2
+ import type { GroupNormalized, TokenNormalizedSet } from '@terrazzo/token-tools';
3
+ import { filterResolverPaths } from '../lib/resolver-utils.js';
4
+ import type Logger from '../logger.js';
5
+ import { isLikelyResolver } from '../resolver/validate.js';
6
+ import type { ConfigInit } from '../types.js';
7
+ import { normalize } from './normalize.js';
8
+ import {
9
+ graphAliases,
10
+ groupFromNode,
11
+ refToTokenID,
12
+ resolveAliases,
13
+ tokenFromNode,
14
+ tokenRawValuesFromNode,
15
+ } from './token.js';
16
+
17
+ export interface ProcessTokensOptions {
18
+ config: ConfigInit;
19
+ logger: Logger;
20
+ sourceByFilename: Record<string, InputSourceWithDocument>;
21
+ refMap: RefMap;
22
+ sources: InputSourceWithDocument[];
23
+ }
24
+
25
+ export function processTokens(
26
+ rootSource: InputSourceWithDocument,
27
+ { config, logger, sourceByFilename, refMap }: ProcessTokensOptions,
28
+ ): TokenNormalizedSet {
29
+ const entry = { group: 'parser' as const, label: 'init' };
30
+
31
+ // 2. Parse
32
+ const firstPass = performance.now();
33
+ const tokens: TokenNormalizedSet = {};
34
+ // micro-optimization: while we’re iterating over tokens, keeping a “hot”
35
+ // array in memory saves recreating arrays from object keys over and over again.
36
+ // it does produce a noticeable speedup > 1,000 tokens.
37
+ const tokenIDs: string[] = [];
38
+ const groups: Record<string, GroupNormalized> = {};
39
+
40
+ // 2a. Token & group population
41
+ const isResolver = isLikelyResolver(rootSource.document);
42
+ traverse(rootSource.document, {
43
+ enter(node, _parent, rawPath) {
44
+ if (node.type !== 'Object') {
45
+ return;
46
+ }
47
+ const path = isResolver ? filterResolverPaths(rawPath) : rawPath;
48
+ groupFromNode(node, { path, groups });
49
+ const token = tokenFromNode(node, {
50
+ groups,
51
+ ignore: config.ignore,
52
+ path,
53
+ source: rootSource,
54
+ });
55
+ if (token) {
56
+ tokenIDs.push(token.jsonID);
57
+ tokens[token.jsonID] = token;
58
+ }
59
+ },
60
+ });
61
+
62
+ logger.debug({ ...entry, message: 'Parsing: 1st pass', timing: performance.now() - firstPass });
63
+ const secondPass = performance.now();
64
+
65
+ // 2b. Resolve originalValue and original sources
66
+ for (const source of Object.values(sourceByFilename)) {
67
+ traverse(source.document, {
68
+ enter(node, _parent, path) {
69
+ if (node.type !== 'Object') {
70
+ return;
71
+ }
72
+
73
+ const tokenRawValues = tokenRawValuesFromNode(node, { filename: source.filename!.href, path });
74
+ if (tokenRawValues && tokens[tokenRawValues?.jsonID]) {
75
+ tokens[tokenRawValues.jsonID]!.originalValue = tokenRawValues.originalValue;
76
+ tokens[tokenRawValues.jsonID]!.source = tokenRawValues.source;
77
+ for (const mode of Object.keys(tokenRawValues.mode)) {
78
+ tokens[tokenRawValues.jsonID]!.mode[mode]!.originalValue = tokenRawValues.mode[mode]!.originalValue;
79
+ tokens[tokenRawValues.jsonID]!.mode[mode]!.source = tokenRawValues.mode[mode]!.source;
80
+ }
81
+ }
82
+ },
83
+ });
84
+ }
85
+
86
+ // 2c. DTCG alias resolution
87
+ // Unlike $refs which can be resolved as we go, these can’t happen until the final, flattened set
88
+ resolveAliases(tokens, { logger, sources: sourceByFilename, refMap });
89
+ logger.debug({ ...entry, message: 'Parsing: 2nd pass', timing: performance.now() - secondPass });
90
+
91
+ // 3. Alias graph
92
+ // We’ve resolved aliases, but we need this pass for reverse linking i.e. “aliasedBy”
93
+ const aliasStart = performance.now();
94
+ graphAliases(refMap, { tokens, logger, sources: sourceByFilename });
95
+ logger.debug({ ...entry, message: 'Alias graph built', timing: performance.now() - aliasStart });
96
+
97
+ // 4. normalize
98
+ // Allow for some minor variance in inputs, and be nice to folks.
99
+ const normalizeStart = performance.now();
100
+ for (const id of tokenIDs) {
101
+ const token = tokens[id]!;
102
+ normalize(token as any, { logger, src: sourceByFilename[token.source.filename!]?.src });
103
+ }
104
+ logger.debug({ ...entry, message: 'Normalized values', timing: performance.now() - normalizeStart });
105
+
106
+ // 5. alphabetize & filter
107
+ // This can’t happen until the last step, where we’re 100% sure we’ve resolved everything.
108
+ const tokensSorted: TokenNormalizedSet = {};
109
+ tokenIDs.sort((a, b) => a.localeCompare(b, 'en-us', { numeric: true }));
110
+ for (const path of tokenIDs) {
111
+ // Filter out any tokens in $defs (we needed to reference them earlier, but shouldn’t include them in the final assortment)
112
+ if (path.includes('/$defs/')) {
113
+ continue;
114
+ }
115
+ const id = refToTokenID(path)!;
116
+ tokensSorted[id] = tokens[path]!;
117
+ }
118
+ // Sort group IDs once, too
119
+ for (const group of Object.values(groups)) {
120
+ group.tokens.sort((a, b) => a.localeCompare(b, 'en-us', { numeric: true }));
121
+ }
122
+
123
+ return tokensSorted;
124
+ }
@@ -1,5 +1,5 @@
1
1
  import * as momoa from '@humanwhocodes/momoa';
2
- import { getObjMember, parseRef, type RefMap } from '@terrazzo/json-schema-tools';
2
+ import { getObjMember, type InputSourceWithDocument, parseRef, type RefMap } from '@terrazzo/json-schema-tools';
3
3
  import {
4
4
  type GroupNormalized,
5
5
  isAlias,
@@ -9,7 +9,7 @@ import {
9
9
  } from '@terrazzo/token-tools';
10
10
  import wcmatch from 'wildcard-match';
11
11
  import type { default as Logger } from '../logger.js';
12
- import type { Config, InputSource, ReferenceObject } from '../types.js';
12
+ import type { Config, ReferenceObject } from '../types.js';
13
13
 
14
14
  /** Convert valid DTCG alias to $ref */
15
15
  export function aliasToRef(alias: string, mode?: string): ReferenceObject | undefined {
@@ -26,7 +26,7 @@ export function aliasToRef(alias: string, mode?: string): ReferenceObject | unde
26
26
  export interface TokenFromNodeOptions {
27
27
  groups: Record<string, GroupNormalized>;
28
28
  path: string[];
29
- source: InputSource;
29
+ source: InputSourceWithDocument;
30
30
  ignore: Config['ignore'];
31
31
  }
32
32
 
@@ -51,7 +51,7 @@ export function tokenFromNode(
51
51
  group.tokens.push(id);
52
52
  }
53
53
 
54
- const nodeSource = { filename: source.filename?.href, node };
54
+ const nodeSource = { filename: source.filename.href, node };
55
55
  const token: TokenNormalized = {
56
56
  id,
57
57
  $type: originalToken.$type || group.$type,
@@ -211,7 +211,7 @@ export function groupFromNode(
211
211
 
212
212
  export interface GraphAliasesOptions {
213
213
  tokens: TokenNormalizedSet;
214
- sources: Record<string, InputSource>;
214
+ sources: Record<string, InputSourceWithDocument>;
215
215
  logger: Logger;
216
216
  }
217
217
 
@@ -425,7 +425,7 @@ const EXPECTED_NESTED_ALIAS: Record<string, Record<string, string[]>> = {
425
425
  */
426
426
  export function resolveAliases(
427
427
  tokens: TokenNormalizedSet,
428
- { logger, refMap, sources }: { logger: Logger; refMap: RefMap; sources: Record<string, InputSource> },
428
+ { logger, refMap, sources }: { logger: Logger; refMap: RefMap; sources: Record<string, InputSourceWithDocument> },
429
429
  ): void {
430
430
  for (const token of Object.values(tokens)) {
431
431
  const aliasEntry = {
@@ -0,0 +1,86 @@
1
+ import * as momoa from '@humanwhocodes/momoa';
2
+ import type { InputSourceWithDocument } from '@terrazzo/json-schema-tools';
3
+ import type Logger from '../logger.js';
4
+ import type { ConfigInit, Group, Resolver, TokenNormalized, TokenNormalizedSet } from '../types.js';
5
+ import { createResolver } from './load.js';
6
+ import { normalizeResolver } from './normalize.js';
7
+
8
+ export interface CreateSyntheticResolverOptions {
9
+ config: ConfigInit;
10
+ logger: Logger;
11
+ req: (url: URL, origin: URL) => Promise<string>;
12
+ sources: InputSourceWithDocument[];
13
+ }
14
+
15
+ /**
16
+ * Interop layer upgrading legacy Terrazzo modes to resolvers
17
+ */
18
+ export async function createSyntheticResolver(
19
+ tokens: TokenNormalizedSet,
20
+ { config, logger, req, sources }: CreateSyntheticResolverOptions,
21
+ ): Promise<Resolver> {
22
+ const contexts: Record<string, any[]> = {};
23
+ for (const token of Object.values(tokens)) {
24
+ for (const [mode, value] of Object.entries(token.mode)) {
25
+ if (mode === '.') {
26
+ continue;
27
+ }
28
+ if (!(mode in contexts)) {
29
+ contexts[mode] = [{}];
30
+ }
31
+ addToken(contexts[mode]![0], { ...token, $value: value.$value }, { logger });
32
+ }
33
+ }
34
+
35
+ const src = JSON.stringify(
36
+ {
37
+ name: 'Terrazzo',
38
+ version: '2025.10',
39
+ resolutionOrder: [{ $ref: '#/sets/allTokens' }, { $ref: '#/modifiers/tzMode' }],
40
+ sets: {
41
+ allTokens: { sources: [simpleFlatten(tokens, { logger })] },
42
+ },
43
+ modifiers: {
44
+ tzMode: {
45
+ description: 'Automatically built from $extensions.mode',
46
+ contexts,
47
+ },
48
+ },
49
+ },
50
+ undefined,
51
+ 2,
52
+ );
53
+ const normalized = await normalizeResolver(momoa.parse(src), {
54
+ filename: new URL('file:///virtual:resolver.json'),
55
+ logger,
56
+ req,
57
+ src,
58
+ });
59
+ return createResolver(normalized, { config, logger, sources });
60
+ }
61
+
62
+ /** Add a normalized token back into an arbitrary, hierarchial structure */
63
+ function addToken(structure: any, token: TokenNormalized, { logger }: { logger: Logger }): void {
64
+ let node = structure;
65
+ const parts = token.id.split('.');
66
+ const localID = parts.pop()!;
67
+ for (const part of parts) {
68
+ if (!(part in node)) {
69
+ node[part] = {};
70
+ }
71
+ node = node[part];
72
+ }
73
+ if (localID in node) {
74
+ logger.error({ group: 'parser', label: 'resolver', message: `${localID} already exists!` });
75
+ }
76
+ node[localID] = { $type: token.$type, $value: token.$value };
77
+ }
78
+
79
+ /** Downconvert normalized tokens back into a simplified, hierarchial shape. This is extremely lossy, and only done to build a resolver. */
80
+ function simpleFlatten(tokens: TokenNormalizedSet, { logger }: { logger: Logger }): Group {
81
+ const group: Group = {};
82
+ for (const token of Object.values(tokens)) {
83
+ addToken(group, token, { logger });
84
+ }
85
+ return group;
86
+ }