@sprig-and-prose/sprig-universe 0.4.1 → 0.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/index.js +30 -0
- package/src/universe/graph.js +1619 -0
- package/src/universe/parser.js +1751 -0
- package/src/universe/scanner.js +240 -0
- package/src/universe/scene-manifest.js +856 -0
- package/src/universe/test-graph.js +157 -0
- package/src/universe/test-parser.js +61 -0
- package/src/universe/test-scanner.js +37 -0
- package/src/universe/universe.prose +169 -0
- package/src/universe/validator.js +862 -0
|
@@ -0,0 +1,862 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @fileoverview Scene file source validator
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import { readFileSync, existsSync } from 'fs';
|
|
6
|
+
import { resolve, dirname, join, relative } from 'path';
|
|
7
|
+
import { glob } from 'glob';
|
|
8
|
+
import { parse as parseYaml } from 'yaml';
|
|
9
|
+
import { validateMysqlSource, closeAllConnections as closeMysqlConnections } from '../validators/mysql/validator.js';
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* @typedef {Object} ValidationError
|
|
13
|
+
* @property {string} actorName - Name of the actor
|
|
14
|
+
* @property {string} sourceKind - "file"
|
|
15
|
+
* @property {string} path - File path that failed (legacy, kept for compatibility)
|
|
16
|
+
* @property {string} message - Error message (legacy, kept for compatibility)
|
|
17
|
+
* @property {Object} [location] - Source location (legacy, kept for compatibility)
|
|
18
|
+
* @property {string} [hint] - Optional hint for fixing the error
|
|
19
|
+
* @property {string} [fieldName] - Field name if applicable, or "<root>" for shape errors
|
|
20
|
+
* @property {string} errorKind - Error kind: enum, typeMismatch, missingRequired, identityDuplicate, shapeMismatch, parseError, fileNotFound, etc.
|
|
21
|
+
* @property {'error'|'warning'|'info'} [severity] - Severity level: 'error' (default), 'warning', or 'info'
|
|
22
|
+
* @property {string|Object} [expected] - Expected value or type
|
|
23
|
+
* @property {string|Object} [actual] - Actual value or type
|
|
24
|
+
* @property {Object} schemaLocation - Where the rule is defined (file, start, end)
|
|
25
|
+
* @property {Object} occurrence - Where the failing data is
|
|
26
|
+
* @property {string} occurrence.dataFile - File path (absolute or repo-relative)
|
|
27
|
+
* @property {number|null} occurrence.recordIndex - Record index (null for shape-level errors)
|
|
28
|
+
* @property {string} [occurrence.dataPath] - Optional JSON-pointer path like "/tools/3/tier"
|
|
29
|
+
*/
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* @typedef {Object} ValidationReport
|
|
33
|
+
* @property {number} totalActors - Total actors validated
|
|
34
|
+
* @property {number} totalFiles - Total files validated
|
|
35
|
+
* @property {number} totalErrors - Total errors found
|
|
36
|
+
* @property {ValidationError[]} errors - Array of validation errors
|
|
37
|
+
*/
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Validates a value against a type AST
|
|
41
|
+
* @param {any} value - Value to validate
|
|
42
|
+
* @param {Object} typeAst - Type AST from manifest
|
|
43
|
+
* @param {string} [dataPath] - Optional JSON-pointer path for nested errors
|
|
44
|
+
* @param {Map<string, Object>} [actorMap] - Optional map of actor names to actor definitions for reference validation
|
|
45
|
+
* @returns {{ errorKind: string, expected: string, actual: string, dataPath?: string, fieldLocation?: Object }|null} Error info or null if valid
|
|
46
|
+
*/
|
|
47
|
+
function validateValueAgainstType(value, typeAst, dataPath = '', actorMap = null) {
|
|
48
|
+
if (!typeAst || typeAst.kind === 'unknown') {
|
|
49
|
+
return null; // Skip unknown types
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
switch (typeAst.kind) {
|
|
53
|
+
case 'primitive':
|
|
54
|
+
const primitiveError = validatePrimitive(value, typeAst.name);
|
|
55
|
+
if (primitiveError) {
|
|
56
|
+
return {
|
|
57
|
+
errorKind: 'typeMismatch',
|
|
58
|
+
expected: typeAst.name,
|
|
59
|
+
actual: typeof value,
|
|
60
|
+
dataPath: dataPath || undefined,
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
return null;
|
|
64
|
+
case 'optional':
|
|
65
|
+
if (value === null || value === undefined) {
|
|
66
|
+
return null; // Optional values can be null/undefined
|
|
67
|
+
}
|
|
68
|
+
return validateValueAgainstType(value, typeAst.of, dataPath, actorMap);
|
|
69
|
+
case 'array':
|
|
70
|
+
if (!Array.isArray(value)) {
|
|
71
|
+
return {
|
|
72
|
+
errorKind: 'typeMismatch',
|
|
73
|
+
expected: 'array',
|
|
74
|
+
actual: typeof value,
|
|
75
|
+
dataPath: dataPath || undefined,
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
for (let i = 0; i < value.length; i++) {
|
|
79
|
+
const elementPath = dataPath ? `${dataPath}/${i}` : `/${i}`;
|
|
80
|
+
const error = validateValueAgainstType(value[i], typeAst.elementType, elementPath, actorMap);
|
|
81
|
+
if (error) {
|
|
82
|
+
return error;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
return null;
|
|
86
|
+
case 'oneOf':
|
|
87
|
+
if (!typeAst.values.includes(value)) {
|
|
88
|
+
return {
|
|
89
|
+
errorKind: 'enum',
|
|
90
|
+
expected: `one of: ${typeAst.values.map((v) => JSON.stringify(v)).join(', ')}`,
|
|
91
|
+
actual: JSON.stringify(value),
|
|
92
|
+
dataPath: dataPath || undefined,
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
// Also validate base type
|
|
96
|
+
if (typeAst.valueType === 'string' && typeof value !== 'string') {
|
|
97
|
+
return {
|
|
98
|
+
errorKind: 'typeMismatch',
|
|
99
|
+
expected: 'string',
|
|
100
|
+
actual: typeof value,
|
|
101
|
+
dataPath: dataPath || undefined,
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
if (typeAst.valueType === 'number' && typeof value !== 'number') {
|
|
105
|
+
return {
|
|
106
|
+
errorKind: 'typeMismatch',
|
|
107
|
+
expected: 'number',
|
|
108
|
+
actual: typeof value,
|
|
109
|
+
dataPath: dataPath || undefined,
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
return null;
|
|
113
|
+
case 'reference':
|
|
114
|
+
// Validate against the referenced actor's type definition
|
|
115
|
+
if (!actorMap || !typeAst.name) {
|
|
116
|
+
return null; // Skip if no actor map or no name
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
const referencedActor = actorMap.get(typeAst.name);
|
|
120
|
+
if (!referencedActor || !referencedActor.type || !referencedActor.type.fields) {
|
|
121
|
+
return null; // Skip if referenced actor not found or has no type definition
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Validate that value is an object
|
|
125
|
+
if (typeof value !== 'object' || value === null || Array.isArray(value)) {
|
|
126
|
+
return {
|
|
127
|
+
errorKind: 'typeMismatch',
|
|
128
|
+
expected: 'object',
|
|
129
|
+
actual: Array.isArray(value) ? 'array' : typeof value,
|
|
130
|
+
dataPath: dataPath || undefined,
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// Validate each field in the referenced actor's type
|
|
135
|
+
for (const field of referencedActor.type.fields) {
|
|
136
|
+
if (field.required && !(field.name in value)) {
|
|
137
|
+
return {
|
|
138
|
+
errorKind: 'missingRequired',
|
|
139
|
+
expected: `field '${field.name}'`,
|
|
140
|
+
actual: 'field missing',
|
|
141
|
+
dataPath: dataPath || undefined,
|
|
142
|
+
fieldLocation: field.location || undefined,
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (field.name in value) {
|
|
147
|
+
const fieldValue = value[field.name];
|
|
148
|
+
const fieldPath = dataPath ? `${dataPath}/${field.name}` : `/${field.name}`;
|
|
149
|
+
const fieldError = validateValueAgainstType(fieldValue, field.type, fieldPath, actorMap);
|
|
150
|
+
if (fieldError) {
|
|
151
|
+
// If the error doesn't have a fieldLocation but this field does, add it
|
|
152
|
+
if (!fieldError.fieldLocation && field.location) {
|
|
153
|
+
fieldError.fieldLocation = field.location;
|
|
154
|
+
}
|
|
155
|
+
return fieldError;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
return null;
|
|
161
|
+
default:
|
|
162
|
+
return null; // Unknown type kind, skip
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/**
|
|
167
|
+
* Validates a primitive value
|
|
168
|
+
* @param {any} value - Value to validate
|
|
169
|
+
* @param {string} typeName - Primitive type name
|
|
170
|
+
* @returns {string|null} Error message or null if valid
|
|
171
|
+
*/
|
|
172
|
+
function validatePrimitive(value, typeName) {
|
|
173
|
+
switch (typeName) {
|
|
174
|
+
case 'integer':
|
|
175
|
+
if (typeof value !== 'number' || !Number.isInteger(value)) {
|
|
176
|
+
return `Expected integer, got ${typeof value}`;
|
|
177
|
+
}
|
|
178
|
+
return null;
|
|
179
|
+
case 'string':
|
|
180
|
+
if (typeof value !== 'string') {
|
|
181
|
+
return `Expected string, got ${typeof value}`;
|
|
182
|
+
}
|
|
183
|
+
return null;
|
|
184
|
+
case 'boolean':
|
|
185
|
+
if (typeof value !== 'boolean') {
|
|
186
|
+
return `Expected boolean, got ${typeof value}`;
|
|
187
|
+
}
|
|
188
|
+
return null;
|
|
189
|
+
case 'number':
|
|
190
|
+
case 'float':
|
|
191
|
+
case 'double':
|
|
192
|
+
if (typeof value !== 'number') {
|
|
193
|
+
return `Expected number, got ${typeof value}`;
|
|
194
|
+
}
|
|
195
|
+
return null;
|
|
196
|
+
default:
|
|
197
|
+
return null; // Unknown primitive, skip validation
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
/**
|
|
202
|
+
* Resolves file paths from glob patterns
|
|
203
|
+
* Paths are relative to repo root, even if they start with /
|
|
204
|
+
* @param {string[]} patterns - Array of glob patterns
|
|
205
|
+
* @param {string} repoRoot - Repository root directory
|
|
206
|
+
* @returns {string[]} Resolved file paths (sorted)
|
|
207
|
+
*/
|
|
208
|
+
async function resolveFilePaths(patterns, repoRoot) {
|
|
209
|
+
const allFiles = [];
|
|
210
|
+
for (const pattern of patterns) {
|
|
211
|
+
// If pattern starts with /, treat it as relative to repo root (not absolute)
|
|
212
|
+
// This is common in config files where /data means repo-root/data
|
|
213
|
+
const normalizedPattern = pattern.startsWith('/') ? pattern.slice(1) : pattern;
|
|
214
|
+
const resolvedPattern = resolve(repoRoot, normalizedPattern);
|
|
215
|
+
const files = await glob(resolvedPattern, { absolute: true });
|
|
216
|
+
allFiles.push(...files);
|
|
217
|
+
}
|
|
218
|
+
// Sort for deterministic ordering
|
|
219
|
+
return allFiles.sort();
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
/**
|
|
223
|
+
* Parses a file based on format
|
|
224
|
+
* @param {string} filePath - Path to file
|
|
225
|
+
* @param {string} format - File format ('yaml', 'json', etc.)
|
|
226
|
+
* @returns {any} Parsed content
|
|
227
|
+
* @throws {Error} If parsing fails
|
|
228
|
+
*/
|
|
229
|
+
function parseFile(filePath, format) {
|
|
230
|
+
const content = readFileSync(filePath, 'utf-8');
|
|
231
|
+
|
|
232
|
+
switch (format) {
|
|
233
|
+
case 'yaml':
|
|
234
|
+
case 'yml':
|
|
235
|
+
try {
|
|
236
|
+
return parseYaml(content);
|
|
237
|
+
} catch (error) {
|
|
238
|
+
throw new Error(`Failed to parse YAML: ${error.message}`);
|
|
239
|
+
}
|
|
240
|
+
case 'json':
|
|
241
|
+
try {
|
|
242
|
+
return JSON.parse(content);
|
|
243
|
+
} catch (error) {
|
|
244
|
+
throw new Error(`Failed to parse JSON: ${error.message}`);
|
|
245
|
+
}
|
|
246
|
+
default:
|
|
247
|
+
throw new Error(`Unsupported format: ${format}`);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
/**
|
|
252
|
+
* Validates shape of parsed data
|
|
253
|
+
* @param {any} data - Parsed data
|
|
254
|
+
* @param {string} shape - Expected shape ('list' or 'record')
|
|
255
|
+
* @returns {string|null} Error message or null if valid
|
|
256
|
+
*/
|
|
257
|
+
function validateShape(data, shape) {
|
|
258
|
+
if (shape === 'list') {
|
|
259
|
+
if (!Array.isArray(data)) {
|
|
260
|
+
return `Shape is 'list' but root is not an array (got ${typeof data})`;
|
|
261
|
+
}
|
|
262
|
+
return null;
|
|
263
|
+
} else if (shape === 'record') {
|
|
264
|
+
if (Array.isArray(data)) {
|
|
265
|
+
return `Shape is 'record' but root is an array; change shape to { list } or change the file`;
|
|
266
|
+
}
|
|
267
|
+
if (typeof data !== 'object' || data === null) {
|
|
268
|
+
return `Shape is 'record' but root is not an object (got ${typeof data})`;
|
|
269
|
+
}
|
|
270
|
+
return null;
|
|
271
|
+
}
|
|
272
|
+
return null; // Unknown shape, skip validation
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
/**
|
|
276
|
+
* Extracts identity field name from identity AST
|
|
277
|
+
* @param {string} identityAst - Raw identity AST string
|
|
278
|
+
* @returns {string|null} Field name or null if not parseable
|
|
279
|
+
*/
|
|
280
|
+
function extractIdentityField(identityAst) {
|
|
281
|
+
if (!identityAst) return null;
|
|
282
|
+
const trimmed = identityAst.trim();
|
|
283
|
+
// Simple case: single identifier
|
|
284
|
+
if (/^[A-Za-z_][A-Za-z0-9_]*$/.test(trimmed)) {
|
|
285
|
+
return trimmed;
|
|
286
|
+
}
|
|
287
|
+
// For now, only support single-field identities
|
|
288
|
+
return null;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
/**
|
|
292
|
+
* Validates a file source for an actor
|
|
293
|
+
* @param {Object} actor - Actor from manifest
|
|
294
|
+
* @param {Object} fileSource - File source block from manifest
|
|
295
|
+
* @param {string} repoRoot - Repository root directory (fallback if no repository specified)
|
|
296
|
+
* @param {Object} repositories - Repository configuration from config
|
|
297
|
+
* @param {Map<string, Object>} [actorMap] - Optional map of actor names to actor definitions for reference validation
|
|
298
|
+
* @returns {ValidationError[]} Array of validation errors
|
|
299
|
+
*/
|
|
300
|
+
async function validateFileSource(actor, fileSource, repoRoot, repositories = {}, actorMap = null) {
|
|
301
|
+
const errors = [];
|
|
302
|
+
|
|
303
|
+
// Extract source configuration
|
|
304
|
+
const paths = fileSource.paths || [];
|
|
305
|
+
const format = fileSource.format || 'yaml';
|
|
306
|
+
const shape = fileSource.shape || 'list';
|
|
307
|
+
const repository = fileSource.repository;
|
|
308
|
+
const sourceLocation = fileSource.location;
|
|
309
|
+
|
|
310
|
+
if (paths.length === 0) {
|
|
311
|
+
errors.push({
|
|
312
|
+
actorName: actor.name,
|
|
313
|
+
sourceKind: 'file',
|
|
314
|
+
path: '<no paths specified>',
|
|
315
|
+
message: 'File source has no paths specified',
|
|
316
|
+
location: sourceLocation,
|
|
317
|
+
fieldName: '<root>',
|
|
318
|
+
errorKind: 'configError',
|
|
319
|
+
expected: 'paths array with at least one pattern',
|
|
320
|
+
actual: 'empty or missing paths',
|
|
321
|
+
schemaLocation: sourceLocation || actor.location,
|
|
322
|
+
occurrence: {
|
|
323
|
+
dataFile: '<no paths specified>',
|
|
324
|
+
recordIndex: null,
|
|
325
|
+
},
|
|
326
|
+
});
|
|
327
|
+
return errors;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// Determine base path: use repository localPath if specified, otherwise use repoRoot
|
|
331
|
+
let basePath = repoRoot;
|
|
332
|
+
if (repository) {
|
|
333
|
+
const repoConfig = repositories[repository];
|
|
334
|
+
if (repoConfig && repoConfig.localPath) {
|
|
335
|
+
basePath = repoConfig.localPath;
|
|
336
|
+
} else {
|
|
337
|
+
errors.push({
|
|
338
|
+
actorName: actor.name,
|
|
339
|
+
sourceKind: 'file',
|
|
340
|
+
path: paths.join(', '),
|
|
341
|
+
message: `Repository '${repository}' not found in config or missing localPath`,
|
|
342
|
+
location: sourceLocation,
|
|
343
|
+
fieldName: '<root>',
|
|
344
|
+
errorKind: 'configError',
|
|
345
|
+
expected: `repository '${repository}' with localPath`,
|
|
346
|
+
actual: 'repository not found or missing localPath',
|
|
347
|
+
schemaLocation: sourceLocation || actor.location,
|
|
348
|
+
occurrence: {
|
|
349
|
+
dataFile: paths.join(', '),
|
|
350
|
+
recordIndex: null,
|
|
351
|
+
},
|
|
352
|
+
hint: `Add '${repository}' to repositories section in sprig.config.json with a localPath`,
|
|
353
|
+
});
|
|
354
|
+
return errors;
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
// Resolve file paths
|
|
359
|
+
let filePaths;
|
|
360
|
+
try {
|
|
361
|
+
filePaths = await resolveFilePaths(paths, basePath);
|
|
362
|
+
} catch (error) {
|
|
363
|
+
errors.push({
|
|
364
|
+
actorName: actor.name,
|
|
365
|
+
sourceKind: 'file',
|
|
366
|
+
path: paths.join(', '),
|
|
367
|
+
message: `Failed to resolve file paths: ${error.message}`,
|
|
368
|
+
location: sourceLocation,
|
|
369
|
+
fieldName: '<root>',
|
|
370
|
+
errorKind: 'configError',
|
|
371
|
+
expected: 'valid glob patterns',
|
|
372
|
+
actual: `error: ${error.message}`,
|
|
373
|
+
schemaLocation: sourceLocation || actor.location,
|
|
374
|
+
occurrence: {
|
|
375
|
+
dataFile: paths.join(', '),
|
|
376
|
+
recordIndex: null,
|
|
377
|
+
},
|
|
378
|
+
});
|
|
379
|
+
return errors;
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
if (filePaths.length === 0) {
|
|
383
|
+
errors.push({
|
|
384
|
+
actorName: actor.name,
|
|
385
|
+
sourceKind: 'file',
|
|
386
|
+
path: paths.join(', '),
|
|
387
|
+
message: `No files found matching paths: ${paths.join(', ')}`,
|
|
388
|
+
location: sourceLocation,
|
|
389
|
+
fieldName: '<root>',
|
|
390
|
+
errorKind: 'fileNotFound',
|
|
391
|
+
expected: 'at least one matching file',
|
|
392
|
+
actual: '0 files found',
|
|
393
|
+
schemaLocation: sourceLocation || actor.location,
|
|
394
|
+
occurrence: {
|
|
395
|
+
dataFile: paths.join(', '),
|
|
396
|
+
recordIndex: null,
|
|
397
|
+
},
|
|
398
|
+
hint: 'Check that the glob patterns match existing files',
|
|
399
|
+
});
|
|
400
|
+
return errors;
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
// Get actor type fields
|
|
404
|
+
const typeFields = actor.type?.fields || [];
|
|
405
|
+
const fieldsByName = new Map();
|
|
406
|
+
for (const field of typeFields) {
|
|
407
|
+
fieldsByName.set(field.name, field);
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
// Extract identity field
|
|
411
|
+
const identityField = actor.identity
|
|
412
|
+
? extractIdentityField(actor.identity.ast)
|
|
413
|
+
: null;
|
|
414
|
+
|
|
415
|
+
// Track identity values for uniqueness check
|
|
416
|
+
const identityValues = new Map(); // value -> { file, recordIndex }
|
|
417
|
+
|
|
418
|
+
// Try to get repo-relative path for display
|
|
419
|
+
let repoRelativeBase = repoRoot;
|
|
420
|
+
if (repository) {
|
|
421
|
+
const repoConfig = repositories[repository];
|
|
422
|
+
if (repoConfig && repoConfig.localPath) {
|
|
423
|
+
repoRelativeBase = repoConfig.localPath;
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
// Validate each file
|
|
428
|
+
for (const filePath of filePaths) {
|
|
429
|
+
// Try to make path repo-relative for display
|
|
430
|
+
let displayPath = filePath;
|
|
431
|
+
try {
|
|
432
|
+
displayPath = relative(repoRelativeBase, filePath);
|
|
433
|
+
} catch {
|
|
434
|
+
// If relative fails, use absolute path
|
|
435
|
+
displayPath = filePath;
|
|
436
|
+
}
|
|
437
|
+
|
|
438
|
+
if (!existsSync(filePath)) {
|
|
439
|
+
errors.push({
|
|
440
|
+
actorName: actor.name,
|
|
441
|
+
sourceKind: 'file',
|
|
442
|
+
path: filePath,
|
|
443
|
+
message: `File does not exist`,
|
|
444
|
+
location: sourceLocation,
|
|
445
|
+
fieldName: '<root>',
|
|
446
|
+
errorKind: 'fileNotFound',
|
|
447
|
+
expected: 'file exists',
|
|
448
|
+
actual: 'file not found',
|
|
449
|
+
schemaLocation: sourceLocation || actor.location,
|
|
450
|
+
occurrence: {
|
|
451
|
+
dataFile: displayPath,
|
|
452
|
+
recordIndex: null,
|
|
453
|
+
},
|
|
454
|
+
});
|
|
455
|
+
continue;
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
// Parse file
|
|
459
|
+
let data;
|
|
460
|
+
try {
|
|
461
|
+
data = parseFile(filePath, format);
|
|
462
|
+
} catch (error) {
|
|
463
|
+
errors.push({
|
|
464
|
+
actorName: actor.name,
|
|
465
|
+
sourceKind: 'file',
|
|
466
|
+
path: filePath,
|
|
467
|
+
message: `Failed to parse ${format} file: ${error.message}`,
|
|
468
|
+
location: sourceLocation,
|
|
469
|
+
fieldName: '<root>',
|
|
470
|
+
errorKind: 'parseError',
|
|
471
|
+
expected: `valid ${format} file`,
|
|
472
|
+
actual: `parse error: ${error.message}`,
|
|
473
|
+
schemaLocation: sourceLocation || actor.location,
|
|
474
|
+
occurrence: {
|
|
475
|
+
dataFile: displayPath,
|
|
476
|
+
recordIndex: null,
|
|
477
|
+
},
|
|
478
|
+
});
|
|
479
|
+
continue;
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
// Validate shape
|
|
483
|
+
const shapeError = validateShape(data, shape);
|
|
484
|
+
if (shapeError) {
|
|
485
|
+
errors.push({
|
|
486
|
+
actorName: actor.name,
|
|
487
|
+
sourceKind: 'file',
|
|
488
|
+
path: filePath,
|
|
489
|
+
message: shapeError,
|
|
490
|
+
location: sourceLocation,
|
|
491
|
+
fieldName: '<root>',
|
|
492
|
+
errorKind: 'shapeMismatch',
|
|
493
|
+
expected: shape === 'list' ? 'array' : 'object',
|
|
494
|
+
actual: Array.isArray(data) ? 'array' : typeof data,
|
|
495
|
+
schemaLocation: sourceLocation || actor.location,
|
|
496
|
+
occurrence: {
|
|
497
|
+
dataFile: displayPath,
|
|
498
|
+
recordIndex: null,
|
|
499
|
+
},
|
|
500
|
+
hint: shape === 'list'
|
|
501
|
+
? "Change shape to { record } or ensure the file contains an array"
|
|
502
|
+
: "Change shape to { list } or ensure the file contains a single object",
|
|
503
|
+
});
|
|
504
|
+
continue;
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
// Get records based on shape
|
|
508
|
+
const records = shape === 'list' ? data : [data];
|
|
509
|
+
|
|
510
|
+
// Validate each record
|
|
511
|
+
for (let recordIndex = 0; recordIndex < records.length; recordIndex++) {
|
|
512
|
+
const record = records[recordIndex];
|
|
513
|
+
|
|
514
|
+
if (typeof record !== 'object' || record === null || Array.isArray(record)) {
|
|
515
|
+
errors.push({
|
|
516
|
+
actorName: actor.name,
|
|
517
|
+
sourceKind: 'file',
|
|
518
|
+
path: filePath,
|
|
519
|
+
message: `Record at index ${recordIndex} is not an object`,
|
|
520
|
+
location: fieldsByName.get(typeFields[0]?.name)?.location || sourceLocation,
|
|
521
|
+
fieldName: '<root>',
|
|
522
|
+
errorKind: 'typeMismatch',
|
|
523
|
+
expected: 'object',
|
|
524
|
+
actual: Array.isArray(record) ? 'array' : typeof record,
|
|
525
|
+
schemaLocation: sourceLocation || actor.location,
|
|
526
|
+
occurrence: {
|
|
527
|
+
dataFile: displayPath,
|
|
528
|
+
recordIndex,
|
|
529
|
+
},
|
|
530
|
+
});
|
|
531
|
+
continue;
|
|
532
|
+
}
|
|
533
|
+
|
|
534
|
+
// Validate required fields
|
|
535
|
+
for (const field of typeFields) {
|
|
536
|
+
if (field.required && !(field.name in record)) {
|
|
537
|
+
errors.push({
|
|
538
|
+
actorName: actor.name,
|
|
539
|
+
sourceKind: 'file',
|
|
540
|
+
path: filePath,
|
|
541
|
+
message: `Missing required field '${field.name}' in record at index ${recordIndex}`,
|
|
542
|
+
location: field.location || sourceLocation,
|
|
543
|
+
fieldName: field.name,
|
|
544
|
+
errorKind: 'missingRequired',
|
|
545
|
+
expected: `field '${field.name}' (${field.typeExpr || 'required'})`,
|
|
546
|
+
actual: 'field missing',
|
|
547
|
+
schemaLocation: field.location || sourceLocation || actor.location,
|
|
548
|
+
occurrence: {
|
|
549
|
+
dataFile: displayPath,
|
|
550
|
+
recordIndex,
|
|
551
|
+
},
|
|
552
|
+
});
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
// Validate field type if present
|
|
556
|
+
if (field.name in record) {
|
|
557
|
+
const value = record[field.name];
|
|
558
|
+
|
|
559
|
+
// Handle nested object types
|
|
560
|
+
if (field.type && field.type.kind === 'object' && field.type.nested) {
|
|
561
|
+
if (typeof value !== 'object' || value === null || Array.isArray(value)) {
|
|
562
|
+
errors.push({
|
|
563
|
+
actorName: actor.name,
|
|
564
|
+
sourceKind: 'file',
|
|
565
|
+
path: filePath,
|
|
566
|
+
message: `Field '${field.name}' in record at index ${recordIndex}: Expected object, got ${Array.isArray(value) ? 'array' : typeof value}`,
|
|
567
|
+
location: field.location || sourceLocation,
|
|
568
|
+
fieldName: field.name,
|
|
569
|
+
errorKind: 'typeMismatch',
|
|
570
|
+
expected: 'object',
|
|
571
|
+
actual: Array.isArray(value) ? 'array' : typeof value,
|
|
572
|
+
schemaLocation: field.location || sourceLocation || actor.location,
|
|
573
|
+
occurrence: {
|
|
574
|
+
dataFile: displayPath,
|
|
575
|
+
recordIndex,
|
|
576
|
+
},
|
|
577
|
+
});
|
|
578
|
+
} else {
|
|
579
|
+
// Validate nested object fields if they exist
|
|
580
|
+
if (field.nestedFields && Array.isArray(field.nestedFields)) {
|
|
581
|
+
const nestedObject = value;
|
|
582
|
+
|
|
583
|
+
// Validate required nested fields
|
|
584
|
+
for (const nestedField of field.nestedFields) {
|
|
585
|
+
if (nestedField.required && !(nestedField.name in nestedObject)) {
|
|
586
|
+
errors.push({
|
|
587
|
+
actorName: actor.name,
|
|
588
|
+
sourceKind: 'file',
|
|
589
|
+
path: filePath,
|
|
590
|
+
message: `Field '${field.name}.${nestedField.name}' in record at index ${recordIndex}: Missing required nested field`,
|
|
591
|
+
location: nestedField.location || field.location || sourceLocation,
|
|
592
|
+
fieldName: `${field.name}.${nestedField.name}`,
|
|
593
|
+
errorKind: 'missingRequired',
|
|
594
|
+
expected: `field '${nestedField.name}' (${nestedField.typeExpr || 'required'})`,
|
|
595
|
+
actual: 'field missing',
|
|
596
|
+
schemaLocation: nestedField.location || field.location || sourceLocation || actor.location,
|
|
597
|
+
occurrence: {
|
|
598
|
+
dataFile: displayPath,
|
|
599
|
+
recordIndex,
|
|
600
|
+
dataPath: `/${field.name}`,
|
|
601
|
+
},
|
|
602
|
+
});
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
// Validate nested field type if present
|
|
606
|
+
if (nestedField.name in nestedObject) {
|
|
607
|
+
const nestedValue = nestedObject[nestedField.name];
|
|
608
|
+
|
|
609
|
+
// Handle nested objects recursively
|
|
610
|
+
if (nestedField.type && nestedField.type.kind === 'object' && nestedField.type.nested) {
|
|
611
|
+
if (typeof nestedValue !== 'object' || nestedValue === null || Array.isArray(nestedValue)) {
|
|
612
|
+
errors.push({
|
|
613
|
+
actorName: actor.name,
|
|
614
|
+
sourceKind: 'file',
|
|
615
|
+
path: filePath,
|
|
616
|
+
message: `Field '${field.name}.${nestedField.name}' in record at index ${recordIndex}: Expected object, got ${Array.isArray(nestedValue) ? 'array' : typeof nestedValue}`,
|
|
617
|
+
location: nestedField.location || field.location || sourceLocation,
|
|
618
|
+
fieldName: `${field.name}.${nestedField.name}`,
|
|
619
|
+
errorKind: 'typeMismatch',
|
|
620
|
+
expected: 'object',
|
|
621
|
+
actual: Array.isArray(nestedValue) ? 'array' : typeof nestedValue,
|
|
622
|
+
schemaLocation: nestedField.location || field.location || sourceLocation || actor.location,
|
|
623
|
+
occurrence: {
|
|
624
|
+
dataFile: displayPath,
|
|
625
|
+
recordIndex,
|
|
626
|
+
dataPath: `/${field.name}/${nestedField.name}`,
|
|
627
|
+
},
|
|
628
|
+
});
|
|
629
|
+
}
|
|
630
|
+
// Could recurse further if needed, but for now stop at 2 levels
|
|
631
|
+
} else {
|
|
632
|
+
// Standard type validation for nested field
|
|
633
|
+
const typeError = validateValueAgainstType(nestedValue, nestedField.type, `/${field.name}/${nestedField.name}`, actorMap);
|
|
634
|
+
if (typeError) {
|
|
635
|
+
errors.push({
|
|
636
|
+
actorName: actor.name,
|
|
637
|
+
sourceKind: 'file',
|
|
638
|
+
path: filePath,
|
|
639
|
+
message: `Field '${field.name}.${nestedField.name}' in record at index ${recordIndex}: ${typeError.errorKind === 'enum' ? `Value "${nestedValue}" is not one of: ${typeError.expected}` : `${typeError.expected} expected, got ${typeError.actual}`}`,
|
|
640
|
+
location: nestedField.location || field.location || sourceLocation,
|
|
641
|
+
fieldName: `${field.name}.${nestedField.name}`,
|
|
642
|
+
errorKind: typeError.errorKind,
|
|
643
|
+
expected: typeError.expected,
|
|
644
|
+
actual: typeError.actual,
|
|
645
|
+
schemaLocation: nestedField.location || field.location || sourceLocation || actor.location,
|
|
646
|
+
occurrence: {
|
|
647
|
+
dataFile: displayPath,
|
|
648
|
+
recordIndex,
|
|
649
|
+
dataPath: typeError.dataPath || `/${field.name}/${nestedField.name}`,
|
|
650
|
+
},
|
|
651
|
+
});
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
}
|
|
657
|
+
}
|
|
658
|
+
} else {
|
|
659
|
+
// Standard type validation
|
|
660
|
+
const typeError = validateValueAgainstType(value, field.type, '', actorMap);
|
|
661
|
+
if (typeError) {
|
|
662
|
+
// Use the referenced field's location if available (for reference type validation)
|
|
663
|
+
const errorFieldLocation = typeError.fieldLocation || field.location;
|
|
664
|
+
errors.push({
|
|
665
|
+
actorName: actor.name,
|
|
666
|
+
sourceKind: 'file',
|
|
667
|
+
path: filePath,
|
|
668
|
+
message: `Field '${field.name}' in record at index ${recordIndex}: ${typeError.errorKind === 'enum' ? `Value "${value}" is not one of: ${typeError.expected}` : `${typeError.expected} expected, got ${typeError.actual}`}`,
|
|
669
|
+
location: errorFieldLocation || sourceLocation,
|
|
670
|
+
fieldName: field.name,
|
|
671
|
+
errorKind: typeError.errorKind,
|
|
672
|
+
expected: typeError.expected,
|
|
673
|
+
actual: typeError.actual,
|
|
674
|
+
schemaLocation: errorFieldLocation || sourceLocation || actor.location,
|
|
675
|
+
occurrence: {
|
|
676
|
+
dataFile: displayPath,
|
|
677
|
+
recordIndex,
|
|
678
|
+
dataPath: typeError.dataPath,
|
|
679
|
+
},
|
|
680
|
+
});
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
|
|
686
|
+
// Check identity field
|
|
687
|
+
if (identityField) {
|
|
688
|
+
const identityFieldDef = fieldsByName.get(identityField);
|
|
689
|
+
if (!identityFieldDef) {
|
|
690
|
+
// Identity field doesn't exist in type - this is a configuration error
|
|
691
|
+
// but we'll skip it for now as per requirements
|
|
692
|
+
} else {
|
|
693
|
+
// Identity field must be present (even if type is optional)
|
|
694
|
+
if (!(identityField in record)) {
|
|
695
|
+
errors.push({
|
|
696
|
+
actorName: actor.name,
|
|
697
|
+
sourceKind: 'file',
|
|
698
|
+
path: filePath,
|
|
699
|
+
message: `Missing identity field '${identityField}' in record at index ${recordIndex} (identity fields are required)`,
|
|
700
|
+
location: identityFieldDef.location || sourceLocation,
|
|
701
|
+
fieldName: identityField,
|
|
702
|
+
errorKind: 'missingRequired',
|
|
703
|
+
expected: `identity field '${identityField}'`,
|
|
704
|
+
actual: 'field missing',
|
|
705
|
+
schemaLocation: actor.identity?.location || identityFieldDef.location || actor.location,
|
|
706
|
+
occurrence: {
|
|
707
|
+
dataFile: displayPath,
|
|
708
|
+
recordIndex,
|
|
709
|
+
},
|
|
710
|
+
});
|
|
711
|
+
} else {
|
|
712
|
+
const identityValue = record[identityField];
|
|
713
|
+
// Check uniqueness
|
|
714
|
+
const identityKey = String(identityValue);
|
|
715
|
+
if (identityValues.has(identityKey)) {
|
|
716
|
+
const existing = identityValues.get(identityKey);
|
|
717
|
+
// Use relative paths for better readability
|
|
718
|
+
let relativeExisting;
|
|
719
|
+
try {
|
|
720
|
+
relativeExisting = relative(repoRelativeBase, existing.file);
|
|
721
|
+
} catch {
|
|
722
|
+
// If relative fails, use absolute path
|
|
723
|
+
relativeExisting = existing.file;
|
|
724
|
+
}
|
|
725
|
+
errors.push({
|
|
726
|
+
actorName: actor.name,
|
|
727
|
+
sourceKind: 'file',
|
|
728
|
+
path: filePath,
|
|
729
|
+
message: `Duplicate identity value '${identityValue}' for field '${identityField}' (already exists in ${relativeExisting} at index ${existing.recordIndex})`,
|
|
730
|
+
location: identityFieldDef.location || sourceLocation,
|
|
731
|
+
fieldName: identityField,
|
|
732
|
+
errorKind: 'identityDuplicate',
|
|
733
|
+
expected: 'unique value',
|
|
734
|
+
actual: `duplicate of value '${identityValue}'`,
|
|
735
|
+
schemaLocation: actor.identity?.location || identityFieldDef.location || actor.location,
|
|
736
|
+
occurrence: {
|
|
737
|
+
dataFile: displayPath,
|
|
738
|
+
recordIndex,
|
|
739
|
+
},
|
|
740
|
+
hint: 'Identity fields must be unique across all files for this actor',
|
|
741
|
+
});
|
|
742
|
+
} else {
|
|
743
|
+
identityValues.set(identityKey, { file: filePath, recordIndex });
|
|
744
|
+
}
|
|
745
|
+
}
|
|
746
|
+
}
|
|
747
|
+
}
|
|
748
|
+
}
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
return errors;
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
/**
|
|
755
|
+
* Validates scenes from a manifest
|
|
756
|
+
* @param {Object} config - Config object from sprig.config.json
|
|
757
|
+
* @param {Object} scenesManifest - Scenes manifest JSON
|
|
758
|
+
* @param {string} configPath - Path to config file (for resolving repo root)
|
|
759
|
+
* @returns {ValidationReport}
|
|
760
|
+
*/
|
|
761
|
+
export async function validateScenes(config, scenesManifest, configPath) {
|
|
762
|
+
const repoRoot = dirname(configPath);
|
|
763
|
+
const repositories = config.repositories || {};
|
|
764
|
+
const errors = [];
|
|
765
|
+
let totalActors = 0;
|
|
766
|
+
let totalFiles = 0;
|
|
767
|
+
|
|
768
|
+
if (!scenesManifest.scenes || !Array.isArray(scenesManifest.scenes)) {
|
|
769
|
+
return {
|
|
770
|
+
totalActors: 0,
|
|
771
|
+
totalFiles: 0,
|
|
772
|
+
totalErrors: 0,
|
|
773
|
+
errors: [],
|
|
774
|
+
};
|
|
775
|
+
}
|
|
776
|
+
|
|
777
|
+
// Collect all file paths we'll validate (for counting)
|
|
778
|
+
const filePathsSet = new Set();
|
|
779
|
+
|
|
780
|
+
const connections = config.connections || {};
|
|
781
|
+
|
|
782
|
+
// Build a map of actor names to actor definitions for reference validation
|
|
783
|
+
const actorMap = new Map();
|
|
784
|
+
for (const scene of scenesManifest.scenes) {
|
|
785
|
+
if (!scene.actors || !Array.isArray(scene.actors)) {
|
|
786
|
+
continue;
|
|
787
|
+
}
|
|
788
|
+
for (const actor of scene.actors) {
|
|
789
|
+
actorMap.set(actor.name, actor);
|
|
790
|
+
}
|
|
791
|
+
}
|
|
792
|
+
|
|
793
|
+
for (const scene of scenesManifest.scenes) {
|
|
794
|
+
if (!scene.actors || !Array.isArray(scene.actors)) {
|
|
795
|
+
continue;
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
for (const actor of scene.actors) {
|
|
799
|
+
// Validate file sources
|
|
800
|
+
if (actor.sources && actor.sources.file) {
|
|
801
|
+
totalActors++;
|
|
802
|
+
|
|
803
|
+
// Handle both single file source and array of file sources
|
|
804
|
+
const fileSources = Array.isArray(actor.sources.file)
|
|
805
|
+
? actor.sources.file
|
|
806
|
+
: [actor.sources.file];
|
|
807
|
+
|
|
808
|
+
for (const fileSource of fileSources) {
|
|
809
|
+
const sourcePaths = fileSource.paths || [];
|
|
810
|
+
if (sourcePaths.length > 0) {
|
|
811
|
+
// Determine base path for counting
|
|
812
|
+
let basePath = repoRoot;
|
|
813
|
+
if (fileSource.repository) {
|
|
814
|
+
const repoConfig = repositories[fileSource.repository];
|
|
815
|
+
if (repoConfig && repoConfig.localPath) {
|
|
816
|
+
basePath = repoConfig.localPath;
|
|
817
|
+
}
|
|
818
|
+
}
|
|
819
|
+
|
|
820
|
+
// Resolve paths to count files
|
|
821
|
+
try {
|
|
822
|
+
const resolvedPaths = await resolveFilePaths(sourcePaths, basePath);
|
|
823
|
+
resolvedPaths.forEach((p) => filePathsSet.add(p));
|
|
824
|
+
totalFiles += resolvedPaths.length;
|
|
825
|
+
} catch {
|
|
826
|
+
// Will be caught in validation
|
|
827
|
+
}
|
|
828
|
+
}
|
|
829
|
+
|
|
830
|
+
const sourceErrors = await validateFileSource(actor, fileSource, repoRoot, repositories, actorMap);
|
|
831
|
+
errors.push(...sourceErrors);
|
|
832
|
+
}
|
|
833
|
+
}
|
|
834
|
+
|
|
835
|
+
// Validate MySQL sources
|
|
836
|
+
if (actor.sources && actor.sources.mysql) {
|
|
837
|
+
totalActors++;
|
|
838
|
+
|
|
839
|
+
// Handle both single mysql source and array of mysql sources
|
|
840
|
+
const mysqlSources = Array.isArray(actor.sources.mysql)
|
|
841
|
+
? actor.sources.mysql
|
|
842
|
+
: [actor.sources.mysql];
|
|
843
|
+
|
|
844
|
+
for (const mysqlSource of mysqlSources) {
|
|
845
|
+
const sourceErrors = await validateMysqlSource(actor, mysqlSource, connections);
|
|
846
|
+
errors.push(...sourceErrors);
|
|
847
|
+
}
|
|
848
|
+
}
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
|
|
852
|
+
// Close all MySQL connections after validation
|
|
853
|
+
await closeMysqlConnections();
|
|
854
|
+
|
|
855
|
+
return {
|
|
856
|
+
totalActors,
|
|
857
|
+
totalFiles: filePathsSet.size,
|
|
858
|
+
totalErrors: errors.length,
|
|
859
|
+
errors,
|
|
860
|
+
};
|
|
861
|
+
}
|
|
862
|
+
|