@sprig-and-prose/sprig-universe 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/PHILOSOPHY.md +201 -0
- package/README.md +168 -0
- package/REFERENCE.md +355 -0
- package/biome.json +24 -0
- package/package.json +30 -0
- package/repositories/sprig-repository-github/index.js +29 -0
- package/src/ast.js +257 -0
- package/src/cli.js +1510 -0
- package/src/graph.js +950 -0
- package/src/index.js +46 -0
- package/src/ir.js +121 -0
- package/src/parser.js +1656 -0
- package/src/scanner.js +255 -0
- package/src/scene-manifest.js +856 -0
- package/src/util/span.js +46 -0
- package/src/util/text.js +126 -0
- package/src/validator.js +862 -0
- package/src/validators/mysql/connection.js +154 -0
- package/src/validators/mysql/schema.js +209 -0
- package/src/validators/mysql/type-compat.js +219 -0
- package/src/validators/mysql/validator.js +332 -0
- package/test/fixtures/amaranthine-mini.prose +53 -0
- package/test/fixtures/conflicting-universes-a.prose +8 -0
- package/test/fixtures/conflicting-universes-b.prose +8 -0
- package/test/fixtures/duplicate-names.prose +20 -0
- package/test/fixtures/first-line-aware.prose +32 -0
- package/test/fixtures/indented-describe.prose +18 -0
- package/test/fixtures/multi-file-universe-a.prose +15 -0
- package/test/fixtures/multi-file-universe-b.prose +15 -0
- package/test/fixtures/multi-file-universe-conflict-desc.prose +12 -0
- package/test/fixtures/multi-file-universe-conflict-title.prose +4 -0
- package/test/fixtures/multi-file-universe-with-title.prose +10 -0
- package/test/fixtures/named-document.prose +17 -0
- package/test/fixtures/named-duplicate.prose +22 -0
- package/test/fixtures/named-reference.prose +17 -0
- package/test/fixtures/relates-errors.prose +38 -0
- package/test/fixtures/relates-tier1.prose +14 -0
- package/test/fixtures/relates-tier2.prose +16 -0
- package/test/fixtures/relates-tier3.prose +21 -0
- package/test/fixtures/sprig-meta-mini.prose +62 -0
- package/test/fixtures/unresolved-relates.prose +15 -0
- package/test/fixtures/using-in-references.prose +35 -0
- package/test/fixtures/using-unknown.prose +8 -0
- package/test/universe-basic.test.js +804 -0
- package/tsconfig.json +15 -0
|
@@ -0,0 +1,804 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Basic tests for universe parser
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { test } from 'node:test';
|
|
6
|
+
import { readFileSync } from 'fs';
|
|
7
|
+
import { join } from 'path';
|
|
8
|
+
import { fileURLToPath } from 'url';
|
|
9
|
+
import { dirname } from 'path';
|
|
10
|
+
import { parseFiles } from '../src/index.js';
|
|
11
|
+
|
|
12
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
13
|
+
const __dirname = dirname(__filename);
|
|
14
|
+
const fixturesDir = join(__dirname, 'fixtures');
|
|
15
|
+
|
|
16
|
+
test('parses amaranthine-mini.prose', () => {
|
|
17
|
+
const file = join(fixturesDir, 'amaranthine-mini.prose');
|
|
18
|
+
const text = readFileSync(file, 'utf-8');
|
|
19
|
+
const graph = parseFiles([{ file, text }]);
|
|
20
|
+
|
|
21
|
+
// Check universe exists
|
|
22
|
+
assert(graph.universes.Amaranthine !== undefined, 'Universe Amaranthine should exist');
|
|
23
|
+
assert(graph.universes.Amaranthine.name === 'Amaranthine', 'Universe name should match');
|
|
24
|
+
|
|
25
|
+
// Check nodes exist
|
|
26
|
+
const universeNodeId = graph.universes.Amaranthine.root;
|
|
27
|
+
const universeNode = graph.nodes[universeNodeId];
|
|
28
|
+
assert(universeNode !== undefined, 'Universe node should exist');
|
|
29
|
+
assert(universeNode.kind === 'universe', 'Universe node kind should be universe');
|
|
30
|
+
assert(universeNode.name === 'Amaranthine', 'Universe node name should match');
|
|
31
|
+
|
|
32
|
+
// Check series nodes
|
|
33
|
+
const playerNodeId = `${universeNodeId.split(':')[0]}:series:Player`;
|
|
34
|
+
const playerNode = graph.nodes[playerNodeId];
|
|
35
|
+
assert(playerNode !== undefined, 'Player series node should exist');
|
|
36
|
+
assert(playerNode.kind === 'series', 'Player node kind should be series');
|
|
37
|
+
assert(playerNode.name === 'Player', 'Player node name should match');
|
|
38
|
+
assert(playerNode.parent === universeNodeId, 'Player should be child of universe');
|
|
39
|
+
|
|
40
|
+
const itemNodeId = `${universeNodeId.split(':')[0]}:series:Item`;
|
|
41
|
+
const itemNode = graph.nodes[itemNodeId];
|
|
42
|
+
assert(itemNode !== undefined, 'Item series node should exist');
|
|
43
|
+
|
|
44
|
+
// Check book nodes
|
|
45
|
+
const equipmentNodeId = `${universeNodeId.split(':')[0]}:book:Equipment`;
|
|
46
|
+
const equipmentNode = graph.nodes[equipmentNodeId];
|
|
47
|
+
assert(equipmentNode !== undefined, 'Equipment book node should exist');
|
|
48
|
+
assert(equipmentNode.kind === 'book', 'Equipment node kind should be book');
|
|
49
|
+
assert(equipmentNode.container === itemNodeId, 'Equipment should be in Item');
|
|
50
|
+
assert('container' in equipmentNode, 'Book node should have container field');
|
|
51
|
+
|
|
52
|
+
// Check chapter node
|
|
53
|
+
const toolNodeId = `${universeNodeId.split(':')[0]}:chapter:Tool`;
|
|
54
|
+
const toolNode = graph.nodes[toolNodeId];
|
|
55
|
+
assert(toolNode !== undefined, 'Tool chapter node should exist');
|
|
56
|
+
assert(toolNode.kind === 'chapter', 'Tool node kind should be chapter');
|
|
57
|
+
assert(toolNode.container === equipmentNodeId, 'Tool should be in Equipment');
|
|
58
|
+
assert('container' in toolNode, 'Chapter node should have container field');
|
|
59
|
+
|
|
60
|
+
// Check describe blocks
|
|
61
|
+
assert(universeNode.describe !== undefined, 'Universe should have describe block');
|
|
62
|
+
assert(
|
|
63
|
+
universeNode.describe.raw.includes('persistent text-based roleplaying game'),
|
|
64
|
+
'Universe describe should contain expected text',
|
|
65
|
+
);
|
|
66
|
+
assert(universeNode.describe.source !== undefined, 'Describe should have source span');
|
|
67
|
+
assert(
|
|
68
|
+
universeNode.describe.normalized !== undefined,
|
|
69
|
+
'Describe block should have normalized field',
|
|
70
|
+
);
|
|
71
|
+
assert(
|
|
72
|
+
typeof universeNode.describe.normalized === 'string',
|
|
73
|
+
'Normalized field should be a string',
|
|
74
|
+
);
|
|
75
|
+
|
|
76
|
+
// Check references block (parsed correctly, not as unknown)
|
|
77
|
+
assert(playerNode.references !== undefined, 'Player should have references');
|
|
78
|
+
assert(playerNode.references.length > 0, 'Player should have at least one reference');
|
|
79
|
+
const ref = playerNode.references[0];
|
|
80
|
+
assert(ref !== undefined, 'Player should have a reference');
|
|
81
|
+
assert(ref.repository === 'amaranthine', 'Reference should have repository');
|
|
82
|
+
assert(ref.paths !== undefined, 'Reference should have paths');
|
|
83
|
+
assert(ref.paths.length > 0, 'Reference should have at least one path');
|
|
84
|
+
assert(ref.source !== undefined, 'Reference should have source location');
|
|
85
|
+
|
|
86
|
+
// Check source spans
|
|
87
|
+
assert(universeNode.source !== undefined, 'Universe node should have source span');
|
|
88
|
+
assert(universeNode.source.start !== undefined, 'Source span should have start');
|
|
89
|
+
assert(universeNode.source.end !== undefined, 'Source span should have end');
|
|
90
|
+
assert(
|
|
91
|
+
universeNode.source.start.offset < universeNode.source.end.offset,
|
|
92
|
+
'Source span offsets should be monotonic',
|
|
93
|
+
);
|
|
94
|
+
});
|
|
95
|
+
|
|
96
|
+
test('parses sprig-meta-mini.prose', () => {
|
|
97
|
+
const file = join(fixturesDir, 'sprig-meta-mini.prose');
|
|
98
|
+
const text = readFileSync(file, 'utf-8');
|
|
99
|
+
const graph = parseFiles([{ file, text }]);
|
|
100
|
+
|
|
101
|
+
// Check universe exists
|
|
102
|
+
assert(graph.universes.Sprig !== undefined, 'Universe Sprig should exist');
|
|
103
|
+
|
|
104
|
+
const universeNodeId = graph.universes.Sprig.root;
|
|
105
|
+
const universeNode = graph.nodes[universeNodeId];
|
|
106
|
+
|
|
107
|
+
// Check series nodes
|
|
108
|
+
const languageNodeId = `${universeNodeId.split(':')[0]}:series:Language`;
|
|
109
|
+
const structureNodeId = `${universeNodeId.split(':')[0]}:series:Structure`;
|
|
110
|
+
assert(graph.nodes[languageNodeId] !== undefined, 'Language series should exist');
|
|
111
|
+
assert(graph.nodes[structureNodeId] !== undefined, 'Structure series should exist');
|
|
112
|
+
|
|
113
|
+
// Check book nodes in Structure
|
|
114
|
+
const universeBookId = `${universeNodeId.split(':')[0]}:book:Universe`;
|
|
115
|
+
const universeBook = graph.nodes[universeBookId];
|
|
116
|
+
assert(universeBook !== undefined, 'Universe book should exist');
|
|
117
|
+
assert(universeBook.container === structureNodeId, 'Universe book should be in Structure');
|
|
118
|
+
|
|
119
|
+
// Check relates edges
|
|
120
|
+
const edgeKeys = Object.keys(graph.edges);
|
|
121
|
+
assert(edgeKeys.length >= 3, 'Should have at least 3 relates edges');
|
|
122
|
+
|
|
123
|
+
const universeSeriesEdge = edgeKeys.find((k) => k.includes('Universe--Series'));
|
|
124
|
+
assert(universeSeriesEdge !== undefined, 'Should have Universe--Series edge');
|
|
125
|
+
const edge = graph.edges[universeSeriesEdge];
|
|
126
|
+
assert(edge.kind === 'relates', 'Edge kind should be relates');
|
|
127
|
+
assert(edge.a.text === 'Universe', 'Edge A should be Universe');
|
|
128
|
+
assert(edge.b.text === 'Series', 'Edge B should be Series');
|
|
129
|
+
assert(edge.describe !== undefined, 'Edge should have describe block');
|
|
130
|
+
assert(edge.describe.raw.includes('composed'), 'Edge describe should contain expected text');
|
|
131
|
+
assert(
|
|
132
|
+
edge.describe.normalized !== undefined,
|
|
133
|
+
'Edge describe should have normalized field',
|
|
134
|
+
);
|
|
135
|
+
|
|
136
|
+
// Check edge resolution
|
|
137
|
+
assert(edge.a.target !== undefined, 'Edge A should be resolved');
|
|
138
|
+
assert(edge.b.target !== undefined, 'Edge B should be resolved');
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
test('preserves describe block formatting', () => {
|
|
142
|
+
const file = join(fixturesDir, 'amaranthine-mini.prose');
|
|
143
|
+
const text = readFileSync(file, 'utf-8');
|
|
144
|
+
const graph = parseFiles([{ file, text }]);
|
|
145
|
+
|
|
146
|
+
const universeNodeId = graph.universes.Amaranthine.root;
|
|
147
|
+
const universeNode = graph.nodes[universeNodeId];
|
|
148
|
+
|
|
149
|
+
// Check that newlines are preserved
|
|
150
|
+
assert(
|
|
151
|
+
universeNode.describe.raw.includes('\n'),
|
|
152
|
+
'Describe block should preserve newlines',
|
|
153
|
+
);
|
|
154
|
+
assert(
|
|
155
|
+
universeNode.describe.raw.includes('semi-afk game'),
|
|
156
|
+
'Describe block should preserve content',
|
|
157
|
+
);
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
test('handles multiple files', () => {
|
|
161
|
+
const file1 = join(fixturesDir, 'amaranthine-mini.prose');
|
|
162
|
+
const file2 = join(fixturesDir, 'sprig-meta-mini.prose');
|
|
163
|
+
const text1 = readFileSync(file1, 'utf-8');
|
|
164
|
+
const text2 = readFileSync(file2, 'utf-8');
|
|
165
|
+
|
|
166
|
+
const graph = parseFiles([
|
|
167
|
+
{ file: file1, text: text1 },
|
|
168
|
+
{ file: file2, text: text2 },
|
|
169
|
+
]);
|
|
170
|
+
|
|
171
|
+
// Both universes should exist
|
|
172
|
+
assert(graph.universes.Amaranthine !== undefined, 'Amaranthine universe should exist');
|
|
173
|
+
assert(graph.universes.Sprig !== undefined, 'Sprig universe should exist');
|
|
174
|
+
|
|
175
|
+
// Nodes from both universes should exist
|
|
176
|
+
const amaranthineRoot = graph.universes.Amaranthine.root;
|
|
177
|
+
const sprigRoot = graph.universes.Sprig.root;
|
|
178
|
+
assert(graph.nodes[amaranthineRoot] !== undefined, 'Amaranthine root node should exist');
|
|
179
|
+
assert(graph.nodes[sprigRoot] !== undefined, 'Sprig root node should exist');
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
test('source spans are present and valid', () => {
|
|
183
|
+
const file = join(fixturesDir, 'amaranthine-mini.prose');
|
|
184
|
+
const text = readFileSync(file, 'utf-8');
|
|
185
|
+
const graph = parseFiles([{ file, text }]);
|
|
186
|
+
|
|
187
|
+
// Check all nodes have source spans
|
|
188
|
+
for (const nodeId in graph.nodes) {
|
|
189
|
+
const node = graph.nodes[nodeId];
|
|
190
|
+
assert(node.source !== undefined, `Node ${nodeId} should have source span`);
|
|
191
|
+
assert(node.source.file === file, `Node ${nodeId} source should reference correct file`);
|
|
192
|
+
assert(node.source.start !== undefined, `Node ${nodeId} source should have start`);
|
|
193
|
+
assert(node.source.end !== undefined, `Node ${nodeId} source should have end`);
|
|
194
|
+
assert(
|
|
195
|
+
node.source.start.offset <= node.source.end.offset,
|
|
196
|
+
`Node ${nodeId} source offsets should be monotonic`,
|
|
197
|
+
);
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
// Check all edges have source spans
|
|
201
|
+
for (const edgeId in graph.edges) {
|
|
202
|
+
const edge = graph.edges[edgeId];
|
|
203
|
+
assert(edge.source !== undefined, `Edge ${edgeId} should have source span`);
|
|
204
|
+
}
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
test('normalizes prose blocks with first-line-aware dedent', () => {
|
|
208
|
+
const file = join(fixturesDir, 'first-line-aware.prose');
|
|
209
|
+
const text = readFileSync(file, 'utf-8');
|
|
210
|
+
const graph = parseFiles([{ file, text }]);
|
|
211
|
+
|
|
212
|
+
const universeNodeId = graph.universes.Test.root;
|
|
213
|
+
const universeNode = graph.nodes[universeNodeId];
|
|
214
|
+
|
|
215
|
+
// Check universe describe: first line has 0 indent, subsequent lines indented
|
|
216
|
+
assert(
|
|
217
|
+
universeNode.describe !== undefined,
|
|
218
|
+
'Universe should have describe block',
|
|
219
|
+
);
|
|
220
|
+
const normalized = universeNode.describe.normalized;
|
|
221
|
+
assert(normalized !== undefined, 'Normalized should exist');
|
|
222
|
+
|
|
223
|
+
const normalizedLines = normalized.split('\n');
|
|
224
|
+
// First line should remain unchanged (starts with "This")
|
|
225
|
+
assert(
|
|
226
|
+
normalizedLines[0].trim() === 'This is a test universe',
|
|
227
|
+
'First line should remain unchanged',
|
|
228
|
+
);
|
|
229
|
+
// Subsequent lines should have indentation removed
|
|
230
|
+
// Line 1 should be "with multiple lines" (no leading spaces after normalization)
|
|
231
|
+
assert(normalizedLines.length > 1, 'Should have at least 2 lines');
|
|
232
|
+
const secondLine = normalizedLines[1];
|
|
233
|
+
// The second line should have had its 4-space indent removed
|
|
234
|
+
assert(
|
|
235
|
+
secondLine === 'with multiple lines',
|
|
236
|
+
`Second line should be 'with multiple lines', got: ${JSON.stringify(secondLine)}`,
|
|
237
|
+
);
|
|
238
|
+
// But internal indentation should be preserved
|
|
239
|
+
const indentedLine = normalizedLines.find((l) => l.includes('and indented content'));
|
|
240
|
+
assert(
|
|
241
|
+
indentedLine !== undefined && indentedLine.includes(' '),
|
|
242
|
+
'Internal indentation should be preserved',
|
|
243
|
+
);
|
|
244
|
+
|
|
245
|
+
// Check series describe block
|
|
246
|
+
const seriesNodeId = `${universeNodeId.split(':')[0]}:series:Example`;
|
|
247
|
+
const seriesNode = graph.nodes[seriesNodeId];
|
|
248
|
+
assert(seriesNode !== undefined, 'Example series should exist');
|
|
249
|
+
assert(seriesNode.describe !== undefined, 'Series should have describe block');
|
|
250
|
+
|
|
251
|
+
const seriesNormalized = seriesNode.describe.normalized;
|
|
252
|
+
assert(seriesNormalized !== undefined, 'Series normalized should exist');
|
|
253
|
+
|
|
254
|
+
const seriesNormalizedLines = seriesNormalized.split('\n');
|
|
255
|
+
// First non-empty line should start with "This" (no leading spaces)
|
|
256
|
+
const firstSeriesLine = seriesNormalizedLines.find((l) => l.trim().length > 0);
|
|
257
|
+
assert(
|
|
258
|
+
firstSeriesLine !== undefined && firstSeriesLine.trim().startsWith('This'),
|
|
259
|
+
'First line should start flush-left',
|
|
260
|
+
);
|
|
261
|
+
// Subsequent lines should have common indent removed
|
|
262
|
+
// The minimum indent (4 spaces) should be removed from all lines
|
|
263
|
+
// Line 1 has 8 spaces, so after removing 4, it should have 4 spaces remaining
|
|
264
|
+
// But we check that at least some indent was removed (it shouldn't start with 8 spaces)
|
|
265
|
+
const secondSeriesLine = seriesNormalizedLines[1]; // Second line
|
|
266
|
+
assert(
|
|
267
|
+
secondSeriesLine !== undefined,
|
|
268
|
+
'Should have a second line',
|
|
269
|
+
);
|
|
270
|
+
// After normalization, line 1 should have less indent than the raw (8 -> 4 spaces)
|
|
271
|
+
// So it should NOT start with 8 spaces (which would be ' ')
|
|
272
|
+
assert(
|
|
273
|
+
!secondSeriesLine.startsWith(' '),
|
|
274
|
+
`Subsequent lines should have some indentation removed, got: ${JSON.stringify(secondSeriesLine)}`,
|
|
275
|
+
);
|
|
276
|
+
|
|
277
|
+
// Check single-line describe (should remain unchanged)
|
|
278
|
+
const singleLineNodeId = `${universeNodeId.split(':')[0]}:book:SingleLine`;
|
|
279
|
+
const singleLineNode = graph.nodes[singleLineNodeId];
|
|
280
|
+
assert(singleLineNode !== undefined, 'SingleLine book should exist');
|
|
281
|
+
assert(singleLineNode.describe !== undefined, 'SingleLine should have describe block');
|
|
282
|
+
const singleLineNormalized = singleLineNode.describe.normalized;
|
|
283
|
+
// Single line should remain unchanged (no lines after first non-empty line)
|
|
284
|
+
assert(
|
|
285
|
+
singleLineNormalized === singleLineNode.describe.raw,
|
|
286
|
+
'Single-line describe should remain unchanged',
|
|
287
|
+
);
|
|
288
|
+
|
|
289
|
+
// Check all-indented describe (all lines share same indent)
|
|
290
|
+
const allIndentedNodeId = `${universeNodeId.split(':')[0]}:book:AllIndented`;
|
|
291
|
+
const allIndentedNode = graph.nodes[allIndentedNodeId];
|
|
292
|
+
assert(allIndentedNode !== undefined, 'AllIndented book should exist');
|
|
293
|
+
assert(allIndentedNode.describe !== undefined, 'AllIndented should have describe block');
|
|
294
|
+
const allIndentedNormalized = allIndentedNode.describe.normalized;
|
|
295
|
+
const allIndentedNormalizedLines = allIndentedNormalized.split('\n');
|
|
296
|
+
// First non-empty line should have indent removed
|
|
297
|
+
const firstAllIndentedLine = allIndentedNormalizedLines.find((l) => l.trim().length > 0);
|
|
298
|
+
assert(
|
|
299
|
+
firstAllIndentedLine !== undefined && !firstAllIndentedLine.startsWith(' '),
|
|
300
|
+
'First line should have indent removed',
|
|
301
|
+
);
|
|
302
|
+
});
|
|
303
|
+
|
|
304
|
+
test('emits warnings for unresolved relates endpoints', () => {
|
|
305
|
+
const file = join(fixturesDir, 'unresolved-relates.prose');
|
|
306
|
+
const text = readFileSync(file, 'utf-8');
|
|
307
|
+
const graph = parseFiles([{ file, text }]);
|
|
308
|
+
|
|
309
|
+
// Should have warnings for unresolved endpoint
|
|
310
|
+
const warnings = graph.diagnostics.filter((d) => d.severity === 'warning');
|
|
311
|
+
const unresolvedWarnings = warnings.filter((w) =>
|
|
312
|
+
w.message.includes('Unresolved relates endpoint'),
|
|
313
|
+
);
|
|
314
|
+
assert(
|
|
315
|
+
unresolvedWarnings.length > 0,
|
|
316
|
+
'Should have warnings for unresolved relates endpoints',
|
|
317
|
+
);
|
|
318
|
+
|
|
319
|
+
const nonexistentWarning = unresolvedWarnings.find((w) =>
|
|
320
|
+
w.message.includes('Nonexistent'),
|
|
321
|
+
);
|
|
322
|
+
assert(
|
|
323
|
+
nonexistentWarning !== undefined,
|
|
324
|
+
'Should have warning for Nonexistent endpoint',
|
|
325
|
+
);
|
|
326
|
+
assert(
|
|
327
|
+
nonexistentWarning.source !== undefined,
|
|
328
|
+
'Warning should have source span',
|
|
329
|
+
);
|
|
330
|
+
|
|
331
|
+
// Edge should still exist with unresolved target
|
|
332
|
+
const edgeKeys = Object.keys(graph.edges);
|
|
333
|
+
const unresolvedEdge = edgeKeys.find((k) => k.includes('Nonexistent'));
|
|
334
|
+
assert(unresolvedEdge !== undefined, 'Edge with unresolved endpoint should exist');
|
|
335
|
+
|
|
336
|
+
const edge = graph.edges[unresolvedEdge];
|
|
337
|
+
assert(edge.a.target !== undefined, 'Edge A should be resolved');
|
|
338
|
+
assert(edge.b.target === undefined, 'Edge B should be unresolved');
|
|
339
|
+
});
|
|
340
|
+
|
|
341
|
+
test('emits warnings for duplicate node names', () => {
|
|
342
|
+
const file = join(fixturesDir, 'duplicate-names.prose');
|
|
343
|
+
const text = readFileSync(file, 'utf-8');
|
|
344
|
+
const graph = parseFiles([{ file, text }]);
|
|
345
|
+
|
|
346
|
+
// Should have warnings for duplicate names
|
|
347
|
+
const warnings = graph.diagnostics.filter((d) => d.severity === 'warning');
|
|
348
|
+
const duplicateWarnings = warnings.filter((w) =>
|
|
349
|
+
w.message.includes('Duplicate concept name'),
|
|
350
|
+
);
|
|
351
|
+
assert(
|
|
352
|
+
duplicateWarnings.length >= 2,
|
|
353
|
+
'Should have warnings for duplicate concept names',
|
|
354
|
+
);
|
|
355
|
+
|
|
356
|
+
const duplicateSeriesWarning = duplicateWarnings.find((w) =>
|
|
357
|
+
w.message.includes('Duplicate') && w.message.includes('series'),
|
|
358
|
+
);
|
|
359
|
+
assert(
|
|
360
|
+
duplicateSeriesWarning !== undefined,
|
|
361
|
+
'Should have warning for duplicate series name',
|
|
362
|
+
);
|
|
363
|
+
assert(
|
|
364
|
+
duplicateSeriesWarning.source !== undefined,
|
|
365
|
+
'Warning should have source span',
|
|
366
|
+
);
|
|
367
|
+
// Verify the warning shows both kinds
|
|
368
|
+
assert(
|
|
369
|
+
duplicateSeriesWarning.message.includes('already defined as series'),
|
|
370
|
+
'Warning should show the first occurrence kind',
|
|
371
|
+
);
|
|
372
|
+
assert(
|
|
373
|
+
duplicateSeriesWarning.message.includes('now also defined as series'),
|
|
374
|
+
'Warning should show the duplicate kind',
|
|
375
|
+
);
|
|
376
|
+
|
|
377
|
+
// Note: Duplicate nodes have the same ID, so the second overwrites the first in the nodes map
|
|
378
|
+
// But both are processed and warnings are emitted. The nameMap resolves to the first encountered.
|
|
379
|
+
const universeNodeId = graph.universes.Test.root;
|
|
380
|
+
const universePrefix = universeNodeId.split(':')[0];
|
|
381
|
+
const duplicateId = `${universePrefix}:series:Duplicate`;
|
|
382
|
+
const duplicateNode = graph.nodes[duplicateId];
|
|
383
|
+
assert(duplicateNode !== undefined, 'Duplicate node should exist (second overwrites first)');
|
|
384
|
+
|
|
385
|
+
// Check that duplicate book names also produce warnings
|
|
386
|
+
const duplicateBookWarnings = duplicateWarnings.filter((w) =>
|
|
387
|
+
w.message.includes('Book1') && w.message.includes('book'),
|
|
388
|
+
);
|
|
389
|
+
assert(
|
|
390
|
+
duplicateBookWarnings.length > 0,
|
|
391
|
+
'Should have warning for duplicate book names',
|
|
392
|
+
);
|
|
393
|
+
// Verify the warning shows both kinds
|
|
394
|
+
assert(
|
|
395
|
+
duplicateBookWarnings[0].message.includes('already defined as book'),
|
|
396
|
+
'Warning should show the first occurrence kind for books',
|
|
397
|
+
);
|
|
398
|
+
assert(
|
|
399
|
+
duplicateBookWarnings[0].message.includes('now also defined as book'),
|
|
400
|
+
'Warning should show the duplicate kind for books',
|
|
401
|
+
);
|
|
402
|
+
});
|
|
403
|
+
|
|
404
|
+
test('merges multiple files with same universe name', () => {
|
|
405
|
+
const fileA = join(fixturesDir, 'multi-file-universe-a.prose');
|
|
406
|
+
const fileB = join(fixturesDir, 'multi-file-universe-b.prose');
|
|
407
|
+
const textA = readFileSync(fileA, 'utf-8');
|
|
408
|
+
const textB = readFileSync(fileB, 'utf-8');
|
|
409
|
+
const graph = parseFiles([
|
|
410
|
+
{ file: fileA, text: textA },
|
|
411
|
+
{ file: fileB, text: textB },
|
|
412
|
+
]);
|
|
413
|
+
|
|
414
|
+
// Should have exactly one universe
|
|
415
|
+
assert(Object.keys(graph.universes).length === 1, 'Should have exactly one universe');
|
|
416
|
+
assert(graph.universes.Amaranthine !== undefined, 'Universe Amaranthine should exist');
|
|
417
|
+
|
|
418
|
+
const universeNodeId = graph.universes.Amaranthine.root;
|
|
419
|
+
const universeNode = graph.nodes[universeNodeId];
|
|
420
|
+
assert(universeNode !== undefined, 'Universe node should exist');
|
|
421
|
+
|
|
422
|
+
// Both series should exist (merged from both files)
|
|
423
|
+
const itemsNodeId = `${universeNodeId.split(':')[0]}:series:Items`;
|
|
424
|
+
const skillsNodeId = `${universeNodeId.split(':')[0]}:series:Skills`;
|
|
425
|
+
assert(graph.nodes[itemsNodeId] !== undefined, 'Items series should exist');
|
|
426
|
+
assert(graph.nodes[skillsNodeId] !== undefined, 'Skills series should exist');
|
|
427
|
+
|
|
428
|
+
// References should be merged (both files had references)
|
|
429
|
+
assert(universeNode.references !== undefined, 'Universe should have references');
|
|
430
|
+
assert(universeNode.references.length === 2, 'Universe should have 2 references (one from each file)');
|
|
431
|
+
|
|
432
|
+
// No errors should be present
|
|
433
|
+
const errors = graph.diagnostics.filter((d) => d.severity === 'error');
|
|
434
|
+
assert(errors.length === 0, `Should have no errors, but found: ${errors.map((e) => e.message).join('; ')}`);
|
|
435
|
+
});
|
|
436
|
+
|
|
437
|
+
test('errors on conflicting universe names', () => {
|
|
438
|
+
const fileA = join(fixturesDir, 'conflicting-universes-a.prose');
|
|
439
|
+
const fileB = join(fixturesDir, 'conflicting-universes-b.prose');
|
|
440
|
+
const textA = readFileSync(fileA, 'utf-8');
|
|
441
|
+
const textB = readFileSync(fileB, 'utf-8');
|
|
442
|
+
const graph = parseFiles([
|
|
443
|
+
{ file: fileA, text: textA },
|
|
444
|
+
{ file: fileB, text: textB },
|
|
445
|
+
]);
|
|
446
|
+
|
|
447
|
+
// Should have error about multiple universes
|
|
448
|
+
const errors = graph.diagnostics.filter((d) => d.severity === 'error');
|
|
449
|
+
const universeError = errors.find((e) => e.message.includes('Multiple distinct universes'));
|
|
450
|
+
assert(universeError !== undefined, 'Should have error about multiple distinct universes');
|
|
451
|
+
assert(
|
|
452
|
+
universeError.message.includes('Amaranthine'),
|
|
453
|
+
'Error should mention Amaranthine universe',
|
|
454
|
+
);
|
|
455
|
+
assert(
|
|
456
|
+
universeError.message.includes('OtherUniverse'),
|
|
457
|
+
'Error should mention OtherUniverse universe',
|
|
458
|
+
);
|
|
459
|
+
});
|
|
460
|
+
|
|
461
|
+
test('errors on conflicting describe blocks', () => {
|
|
462
|
+
const fileA = join(fixturesDir, 'amaranthine-mini.prose');
|
|
463
|
+
const fileB = join(fixturesDir, 'multi-file-universe-conflict-desc.prose');
|
|
464
|
+
const textA = readFileSync(fileA, 'utf-8');
|
|
465
|
+
const textB = readFileSync(fileB, 'utf-8');
|
|
466
|
+
const graph = parseFiles([
|
|
467
|
+
{ file: fileA, text: textA },
|
|
468
|
+
{ file: fileB, text: textB },
|
|
469
|
+
]);
|
|
470
|
+
|
|
471
|
+
// Should have error about conflicting describe blocks
|
|
472
|
+
const errors = graph.diagnostics.filter((d) => d.severity === 'error');
|
|
473
|
+
const describeError = errors.find((e) => e.message.includes('multiple describe blocks'));
|
|
474
|
+
assert(describeError !== undefined, 'Should have error about multiple describe blocks');
|
|
475
|
+
assert(
|
|
476
|
+
describeError.message.includes('Amaranthine'),
|
|
477
|
+
'Error should mention universe name',
|
|
478
|
+
);
|
|
479
|
+
assert(describeError.source !== undefined, 'Error should have source location');
|
|
480
|
+
});
|
|
481
|
+
|
|
482
|
+
test('errors on conflicting title blocks', () => {
|
|
483
|
+
const fileA = join(fixturesDir, 'multi-file-universe-with-title.prose');
|
|
484
|
+
const fileB = join(fixturesDir, 'multi-file-universe-conflict-title.prose');
|
|
485
|
+
const textA = readFileSync(fileA, 'utf-8');
|
|
486
|
+
const textB = readFileSync(fileB, 'utf-8');
|
|
487
|
+
const graph = parseFiles([
|
|
488
|
+
{ file: fileA, text: textA },
|
|
489
|
+
{ file: fileB, text: textB },
|
|
490
|
+
]);
|
|
491
|
+
|
|
492
|
+
// Should have error about conflicting title blocks
|
|
493
|
+
const errors = graph.diagnostics.filter((d) => d.severity === 'error');
|
|
494
|
+
const titleError = errors.find((e) => e.message.includes('multiple title blocks'));
|
|
495
|
+
assert(titleError !== undefined, 'Should have error about multiple title blocks');
|
|
496
|
+
assert(
|
|
497
|
+
titleError.message.includes('Amaranthine'),
|
|
498
|
+
'Error should mention universe name',
|
|
499
|
+
);
|
|
500
|
+
assert(titleError.source !== undefined, 'Error should have source location');
|
|
501
|
+
});
|
|
502
|
+
|
|
503
|
+
test('preserves source locations for merged content', () => {
|
|
504
|
+
const fileA = join(fixturesDir, 'multi-file-universe-a.prose');
|
|
505
|
+
const fileB = join(fixturesDir, 'multi-file-universe-b.prose');
|
|
506
|
+
const textA = readFileSync(fileA, 'utf-8');
|
|
507
|
+
const textB = readFileSync(fileB, 'utf-8');
|
|
508
|
+
const graph = parseFiles([
|
|
509
|
+
{ file: fileA, text: textA },
|
|
510
|
+
{ file: fileB, text: textB },
|
|
511
|
+
]);
|
|
512
|
+
|
|
513
|
+
const universeNodeId = graph.universes.Amaranthine.root;
|
|
514
|
+
const universeNode = graph.nodes[universeNodeId];
|
|
515
|
+
|
|
516
|
+
// References should preserve source locations from their respective files
|
|
517
|
+
assert(universeNode.references !== undefined, 'Universe should have references');
|
|
518
|
+
assert(universeNode.references.length === 2, 'Should have 2 references');
|
|
519
|
+
|
|
520
|
+
// Check that references have source locations
|
|
521
|
+
for (const ref of universeNode.references) {
|
|
522
|
+
assert(ref.source !== undefined, 'Reference should have source location');
|
|
523
|
+
assert(ref.source.file !== undefined, 'Reference source should have file path');
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
// Series nodes should preserve source locations
|
|
527
|
+
const itemsNodeId = `${universeNodeId.split(':')[0]}:series:Items`;
|
|
528
|
+
const skillsNodeId = `${universeNodeId.split(':')[0]}:series:Skills`;
|
|
529
|
+
assert(graph.nodes[itemsNodeId].source !== undefined, 'Items series should have source');
|
|
530
|
+
assert(graph.nodes[itemsNodeId].source.file === fileA, 'Items series should be from fileA');
|
|
531
|
+
assert(graph.nodes[skillsNodeId].source !== undefined, 'Skills series should have source');
|
|
532
|
+
assert(graph.nodes[skillsNodeId].source.file === fileB, 'Skills series should be from fileB');
|
|
533
|
+
});
|
|
534
|
+
|
|
535
|
+
test('deterministic output order', () => {
|
|
536
|
+
const fileA = join(fixturesDir, 'multi-file-universe-a.prose');
|
|
537
|
+
const fileB = join(fixturesDir, 'multi-file-universe-b.prose');
|
|
538
|
+
const textA = readFileSync(fileA, 'utf-8');
|
|
539
|
+
const textB = readFileSync(fileB, 'utf-8');
|
|
540
|
+
|
|
541
|
+
// Parse in one order
|
|
542
|
+
const graph1 = parseFiles([
|
|
543
|
+
{ file: fileA, text: textA },
|
|
544
|
+
{ file: fileB, text: textB },
|
|
545
|
+
]);
|
|
546
|
+
|
|
547
|
+
// Parse in reverse order
|
|
548
|
+
const graph2 = parseFiles([
|
|
549
|
+
{ file: fileB, text: textB },
|
|
550
|
+
{ file: fileA, text: textA },
|
|
551
|
+
]);
|
|
552
|
+
|
|
553
|
+
// Both should have the same universe
|
|
554
|
+
assert(graph1.universes.Amaranthine !== undefined, 'Graph1 should have Amaranthine');
|
|
555
|
+
assert(graph2.universes.Amaranthine !== undefined, 'Graph2 should have Amaranthine');
|
|
556
|
+
|
|
557
|
+
// Both should have the same series (order may differ, but both should exist)
|
|
558
|
+
const universeNodeId1 = graph1.universes.Amaranthine.root;
|
|
559
|
+
const universeNodeId2 = graph2.universes.Amaranthine.root;
|
|
560
|
+
const itemsNodeId = `${universeNodeId1.split(':')[0]}:series:Items`;
|
|
561
|
+
const skillsNodeId = `${universeNodeId1.split(':')[0]}:series:Skills`;
|
|
562
|
+
|
|
563
|
+
assert(graph1.nodes[itemsNodeId] !== undefined, 'Graph1 should have Items');
|
|
564
|
+
assert(graph1.nodes[skillsNodeId] !== undefined, 'Graph1 should have Skills');
|
|
565
|
+
assert(graph2.nodes[itemsNodeId] !== undefined, 'Graph2 should have Items');
|
|
566
|
+
assert(graph2.nodes[skillsNodeId] !== undefined, 'Graph2 should have Skills');
|
|
567
|
+
|
|
568
|
+
// References should be merged in both (order may differ, but count should match)
|
|
569
|
+
assert(
|
|
570
|
+
graph1.nodes[universeNodeId1].references?.length === 2,
|
|
571
|
+
'Graph1 should have 2 references',
|
|
572
|
+
);
|
|
573
|
+
assert(
|
|
574
|
+
graph2.nodes[universeNodeId2].references?.length === 2,
|
|
575
|
+
'Graph2 should have 2 references',
|
|
576
|
+
);
|
|
577
|
+
});
|
|
578
|
+
|
|
579
|
+
test('parses named reference blocks at universe scope', () => {
|
|
580
|
+
const file = join(fixturesDir, 'named-reference.prose');
|
|
581
|
+
const text = readFileSync(file, 'utf-8');
|
|
582
|
+
const graph = parseFiles([{ file, text }]);
|
|
583
|
+
|
|
584
|
+
// Check universe exists
|
|
585
|
+
assert(graph.universes.Test !== undefined, 'Universe Test should exist');
|
|
586
|
+
|
|
587
|
+
const universeNodeId = graph.universes.Test.root;
|
|
588
|
+
const universeNode = graph.nodes[universeNodeId];
|
|
589
|
+
assert(universeNode !== undefined, 'Universe node should exist');
|
|
590
|
+
|
|
591
|
+
// Check named reference exists in registry
|
|
592
|
+
assert(graph.referencesByName !== undefined, 'referencesByName registry should exist');
|
|
593
|
+
assert(graph.referencesByName.Test !== undefined, 'Universe Test should have referencesByName');
|
|
594
|
+
assert(graph.referencesByName.Test.ItemRouter !== undefined, 'ItemRouter named reference should exist');
|
|
595
|
+
|
|
596
|
+
const namedRef = graph.referencesByName.Test.ItemRouter;
|
|
597
|
+
assert(namedRef.name === 'ItemRouter', 'Named reference should have correct name');
|
|
598
|
+
assert(namedRef.repository === 'amaranthine-backend', 'Named reference should have repository');
|
|
599
|
+
assert(namedRef.paths.length === 1, 'Named reference should have paths');
|
|
600
|
+
assert(namedRef.paths[0] === '/src/routers/items.ts', 'Named reference should have correct path');
|
|
601
|
+
assert(namedRef.describe !== undefined, 'Named reference should have describe');
|
|
602
|
+
assert(namedRef.describe.raw.includes('Routes that implement item endpoints'), 'Named reference describe should contain expected text');
|
|
603
|
+
assert(namedRef.source !== undefined, 'Named reference should have source location');
|
|
604
|
+
|
|
605
|
+
// Check that inline reference still works
|
|
606
|
+
const itemsNodeId = `${universeNodeId.split(':')[0]}:series:Items`;
|
|
607
|
+
const itemsNode = graph.nodes[itemsNodeId];
|
|
608
|
+
assert(itemsNode !== undefined, 'Items series should exist');
|
|
609
|
+
assert(itemsNode.references !== undefined, 'Items series should have references');
|
|
610
|
+
assert(itemsNode.references.length === 1, 'Items series should have one inline reference');
|
|
611
|
+
assert(itemsNode.references[0].repository === 'amaranthine-backend', 'Inline reference should have repository');
|
|
612
|
+
});
|
|
613
|
+
|
|
614
|
+
test('parses named document blocks at universe scope', () => {
|
|
615
|
+
const file = join(fixturesDir, 'named-document.prose');
|
|
616
|
+
const text = readFileSync(file, 'utf-8');
|
|
617
|
+
const graph = parseFiles([{ file, text }]);
|
|
618
|
+
|
|
619
|
+
// Check universe exists
|
|
620
|
+
assert(graph.universes.Test !== undefined, 'Universe Test should exist');
|
|
621
|
+
|
|
622
|
+
const universeNodeId = graph.universes.Test.root;
|
|
623
|
+
|
|
624
|
+
// Check named document exists in registry
|
|
625
|
+
assert(graph.documentsByName !== undefined, 'documentsByName registry should exist');
|
|
626
|
+
assert(graph.documentsByName.Test !== undefined, 'Universe Test should have documentsByName');
|
|
627
|
+
assert(graph.documentsByName.Test.ItemsDesignDoc !== undefined, 'ItemsDesignDoc named document should exist');
|
|
628
|
+
|
|
629
|
+
const namedDoc = graph.documentsByName.Test.ItemsDesignDoc;
|
|
630
|
+
assert(namedDoc.name === 'ItemsDesignDoc', 'Named document should have correct name');
|
|
631
|
+
assert(namedDoc.kind === 'internal', 'Named document should have kind');
|
|
632
|
+
assert(namedDoc.path === '/docs/items/overview.md', 'Named document should have path');
|
|
633
|
+
assert(namedDoc.describe !== undefined, 'Named document should have describe');
|
|
634
|
+
assert(namedDoc.describe.raw.includes('High-level items design notes'), 'Named document describe should contain expected text');
|
|
635
|
+
assert(namedDoc.source !== undefined, 'Named document should have source location');
|
|
636
|
+
|
|
637
|
+
// Check that inline document still works
|
|
638
|
+
const itemsNodeId = `${universeNodeId.split(':')[0]}:series:Items`;
|
|
639
|
+
const itemsNode = graph.nodes[itemsNodeId];
|
|
640
|
+
assert(itemsNode !== undefined, 'Items series should exist');
|
|
641
|
+
assert(itemsNode.documentation !== undefined, 'Items series should have documentation');
|
|
642
|
+
assert(itemsNode.documentation.length === 1, 'Items series should have one inline document');
|
|
643
|
+
assert(itemsNode.documentation[0].kind === 'internal', 'Inline document should have kind');
|
|
644
|
+
});
|
|
645
|
+
|
|
646
|
+
test('detects duplicate named reference and document names', () => {
|
|
647
|
+
const file = join(fixturesDir, 'named-duplicate.prose');
|
|
648
|
+
const text = readFileSync(file, 'utf-8');
|
|
649
|
+
const graph = parseFiles([{ file, text }]);
|
|
650
|
+
|
|
651
|
+
// Should have errors for duplicate names (2 errors per duplicate: one for duplicate, one pointing to first)
|
|
652
|
+
const errors = graph.diagnostics.filter((d) => d.severity === 'error');
|
|
653
|
+
const duplicateRefErrors = errors.filter((e) => e.message.includes('Duplicate named reference') || e.message.includes('First declaration of named reference'));
|
|
654
|
+
const duplicateDocErrors = errors.filter((e) => e.message.includes('Duplicate named document') || e.message.includes('First declaration of named document'));
|
|
655
|
+
|
|
656
|
+
assert(duplicateRefErrors.length >= 2, `Should have errors for duplicate named reference (duplicate + first declaration), got ${duplicateRefErrors.length}: ${duplicateRefErrors.map(e => e.message).join('; ')}`);
|
|
657
|
+
assert(duplicateDocErrors.length >= 2, `Should have errors for duplicate named document (duplicate + first declaration), got ${duplicateDocErrors.length}: ${duplicateDocErrors.map(e => e.message).join('; ')}`);
|
|
658
|
+
|
|
659
|
+
// Check that error messages include both locations
|
|
660
|
+
const refError = duplicateRefErrors.find((e) => e.message.includes('ItemRouter') && e.message.includes('Duplicate'));
|
|
661
|
+
assert(refError !== undefined, 'Should have error for duplicate ItemRouter');
|
|
662
|
+
assert(refError.source !== undefined, 'Error should have source location');
|
|
663
|
+
assert(refError.message.includes('First declared at'), 'Error should mention first declaration location');
|
|
664
|
+
|
|
665
|
+
const docError = duplicateDocErrors.find((e) => e.message.includes('ItemsDesignDoc') && e.message.includes('Duplicate'));
|
|
666
|
+
assert(docError !== undefined, 'Should have error for duplicate ItemsDesignDoc');
|
|
667
|
+
assert(docError.source !== undefined, 'Error should have source location');
|
|
668
|
+
assert(docError.message.includes('First declared at'), 'Error should mention first declaration location');
|
|
669
|
+
});
|
|
670
|
+
|
|
671
|
+
test('preserves source locations for named blocks', () => {
|
|
672
|
+
const file = join(fixturesDir, 'named-reference.prose');
|
|
673
|
+
const text = readFileSync(file, 'utf-8');
|
|
674
|
+
const graph = parseFiles([{ file, text }]);
|
|
675
|
+
|
|
676
|
+
const namedRef = graph.referencesByName.Test.ItemRouter;
|
|
677
|
+
assert(namedRef.source !== undefined, 'Named reference should have source');
|
|
678
|
+
assert(namedRef.source.file === file, 'Named reference source should reference correct file');
|
|
679
|
+
assert(namedRef.source.start !== undefined, 'Named reference source should have start');
|
|
680
|
+
assert(namedRef.source.end !== undefined, 'Named reference source should have end');
|
|
681
|
+
assert(namedRef.source.start.offset < namedRef.source.end.offset, 'Source offsets should be monotonic');
|
|
682
|
+
|
|
683
|
+
const file2 = join(fixturesDir, 'named-document.prose');
|
|
684
|
+
const text2 = readFileSync(file2, 'utf-8');
|
|
685
|
+
const graph2 = parseFiles([{ file: file2, text: text2 }]);
|
|
686
|
+
|
|
687
|
+
const namedDoc = graph2.documentsByName.Test.ItemsDesignDoc;
|
|
688
|
+
assert(namedDoc.source !== undefined, 'Named document should have source');
|
|
689
|
+
assert(namedDoc.source.file === file2, 'Named document source should reference correct file');
|
|
690
|
+
assert(namedDoc.source.start !== undefined, 'Named document source should have start');
|
|
691
|
+
assert(namedDoc.source.end !== undefined, 'Named document source should have end');
|
|
692
|
+
});
|
|
693
|
+
|
|
694
|
+
test('parses using blocks in references', () => {
|
|
695
|
+
const file = join(fixturesDir, 'using-in-references.prose');
|
|
696
|
+
const text = readFileSync(file, 'utf-8');
|
|
697
|
+
const graph = parseFiles([{ file, text }]);
|
|
698
|
+
|
|
699
|
+
// Check universe exists
|
|
700
|
+
assert(graph.universes.Test !== undefined, 'Universe Test should exist');
|
|
701
|
+
|
|
702
|
+
const universeNodeId = graph.universes.Test.root;
|
|
703
|
+
|
|
704
|
+
// Check that named references exist
|
|
705
|
+
assert(graph.referencesByName.Test.ItemRouter !== undefined, 'ItemRouter named reference should exist');
|
|
706
|
+
assert(graph.referencesByName.Test.PlayerRouter !== undefined, 'PlayerRouter named reference should exist');
|
|
707
|
+
|
|
708
|
+
// Check Items series has using block resolved
|
|
709
|
+
const itemsNodeId = `${universeNodeId.split(':')[0]}:series:Items`;
|
|
710
|
+
const itemsNode = graph.nodes[itemsNodeId];
|
|
711
|
+
assert(itemsNode !== undefined, 'Items series should exist');
|
|
712
|
+
assert(itemsNode.references !== undefined, 'Items series should have references');
|
|
713
|
+
assert(itemsNode.references.length === 1, 'Items series should have one reference from using');
|
|
714
|
+
assert(itemsNode.references[0].repository === 'amaranthine-backend', 'Resolved reference should have repository');
|
|
715
|
+
assert(itemsNode.references[0].paths[0] === '/src/routers/items.ts', 'Resolved reference should have correct path');
|
|
716
|
+
assert(itemsNode.references[0].describe !== undefined, 'Resolved reference should have describe');
|
|
717
|
+
});
|
|
718
|
+
|
|
719
|
+
test('parses mixed inline and using blocks in references', () => {
|
|
720
|
+
const file = join(fixturesDir, 'using-in-references.prose');
|
|
721
|
+
const text = readFileSync(file, 'utf-8');
|
|
722
|
+
const graph = parseFiles([{ file, text }]);
|
|
723
|
+
|
|
724
|
+
const universeNodeId = graph.universes.Test.root;
|
|
725
|
+
|
|
726
|
+
// Check Players series has both inline and using references
|
|
727
|
+
const playersNodeId = `${universeNodeId.split(':')[0]}:series:Players`;
|
|
728
|
+
const playersNode = graph.nodes[playersNodeId];
|
|
729
|
+
assert(playersNode !== undefined, 'Players series should exist');
|
|
730
|
+
assert(playersNode.references !== undefined, 'Players series should have references');
|
|
731
|
+
assert(playersNode.references.length === 2, 'Players series should have 2 references (inline + using)');
|
|
732
|
+
|
|
733
|
+
// First should be inline reference
|
|
734
|
+
assert(playersNode.references[0].repository === 'amaranthine-backend', 'First reference should have repository');
|
|
735
|
+
assert(playersNode.references[0].paths[0] === '/src/players/helpers.ts', 'First reference should be inline');
|
|
736
|
+
|
|
737
|
+
// Second should be resolved from using
|
|
738
|
+
assert(playersNode.references[1].repository === 'amaranthine-backend', 'Second reference should have repository');
|
|
739
|
+
assert(playersNode.references[1].paths[0] === '/src/routers/players.ts', 'Second reference should be from using');
|
|
740
|
+
});
|
|
741
|
+
|
|
742
|
+
test('parses multiple names in using block', () => {
|
|
743
|
+
const file = join(fixturesDir, 'using-in-references.prose');
|
|
744
|
+
const text = readFileSync(file, 'utf-8');
|
|
745
|
+
const graph = parseFiles([{ file, text }]);
|
|
746
|
+
|
|
747
|
+
const universeNodeId = graph.universes.Test.root;
|
|
748
|
+
|
|
749
|
+
// Check Mixed series has multiple references from single using block
|
|
750
|
+
const mixedNodeId = `${universeNodeId.split(':')[0]}:series:Mixed`;
|
|
751
|
+
const mixedNode = graph.nodes[mixedNodeId];
|
|
752
|
+
assert(mixedNode !== undefined, 'Mixed series should exist');
|
|
753
|
+
assert(mixedNode.references !== undefined, 'Mixed series should have references');
|
|
754
|
+
assert(mixedNode.references.length === 2, 'Mixed series should have 2 references from using block');
|
|
755
|
+
|
|
756
|
+
// Both should be resolved
|
|
757
|
+
assert(mixedNode.references[0].paths[0] === '/src/routers/items.ts', 'First reference should be ItemRouter');
|
|
758
|
+
assert(mixedNode.references[1].paths[0] === '/src/routers/players.ts', 'Second reference should be PlayerRouter');
|
|
759
|
+
});
|
|
760
|
+
|
|
761
|
+
test('errors on unknown reference name in using block', () => {
|
|
762
|
+
const file = join(fixturesDir, 'using-unknown.prose');
|
|
763
|
+
const text = readFileSync(file, 'utf-8');
|
|
764
|
+
const graph = parseFiles([{ file, text }]);
|
|
765
|
+
|
|
766
|
+
// Should have error for unknown reference
|
|
767
|
+
const errors = graph.diagnostics.filter((d) => d.severity === 'error');
|
|
768
|
+
const unknownRefErrors = errors.filter((e) => e.message.includes('Unknown reference'));
|
|
769
|
+
|
|
770
|
+
assert(unknownRefErrors.length > 0, 'Should have error for unknown reference');
|
|
771
|
+
const error = unknownRefErrors.find((e) => e.message.includes('UnknownRouter'));
|
|
772
|
+
assert(error !== undefined, 'Should have error for UnknownRouter');
|
|
773
|
+
assert(error.source !== undefined, 'Error should have source location');
|
|
774
|
+
assert(error.message.includes("Unknown reference 'UnknownRouter'"), 'Error message should mention UnknownRouter');
|
|
775
|
+
});
|
|
776
|
+
|
|
777
|
+
test('inline references still work unchanged', () => {
|
|
778
|
+
const file = join(fixturesDir, 'amaranthine-mini.prose');
|
|
779
|
+
const text = readFileSync(file, 'utf-8');
|
|
780
|
+
const graph = parseFiles([{ file, text }]);
|
|
781
|
+
|
|
782
|
+
const universeNodeId = graph.universes.Amaranthine.root;
|
|
783
|
+
|
|
784
|
+
// Check Player series has inline references (baseline)
|
|
785
|
+
const playerNodeId = `${universeNodeId.split(':')[0]}:series:Player`;
|
|
786
|
+
const playerNode = graph.nodes[playerNodeId];
|
|
787
|
+
assert(playerNode !== undefined, 'Player series should exist');
|
|
788
|
+
assert(playerNode.references !== undefined, 'Player should have references');
|
|
789
|
+
assert(playerNode.references.length === 1, 'Player should have one inline reference');
|
|
790
|
+
assert(playerNode.references[0].repository === 'amaranthine', 'Inline reference should have repository');
|
|
791
|
+
assert(playerNode.references[0].paths[0] === '/backends/api/src/routers/players.js', 'Inline reference should have path');
|
|
792
|
+
});
|
|
793
|
+
|
|
794
|
+
/**
|
|
795
|
+
* Simple assertion helper
|
|
796
|
+
* @param {boolean} condition
|
|
797
|
+
* @param {string} message
|
|
798
|
+
*/
|
|
799
|
+
function assert(condition, message) {
|
|
800
|
+
if (!condition) {
|
|
801
|
+
throw new Error(message);
|
|
802
|
+
}
|
|
803
|
+
}
|
|
804
|
+
|