@unrdf/project-engine 5.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +53 -0
  3. package/package.json +58 -0
  4. package/src/api-contract-validator.mjs +711 -0
  5. package/src/auto-test-generator.mjs +444 -0
  6. package/src/autonomic-mapek.mjs +511 -0
  7. package/src/capabilities-manifest.mjs +125 -0
  8. package/src/code-complexity-js.mjs +368 -0
  9. package/src/dependency-graph.mjs +276 -0
  10. package/src/doc-drift-checker.mjs +172 -0
  11. package/src/doc-generator.mjs +229 -0
  12. package/src/domain-infer.mjs +966 -0
  13. package/src/drift-snapshot.mjs +775 -0
  14. package/src/file-roles.mjs +94 -0
  15. package/src/fs-scan.mjs +305 -0
  16. package/src/gap-finder.mjs +376 -0
  17. package/src/golden-structure.mjs +149 -0
  18. package/src/hotspot-analyzer.mjs +412 -0
  19. package/src/index.mjs +151 -0
  20. package/src/initialize.mjs +957 -0
  21. package/src/lens/project-structure.mjs +74 -0
  22. package/src/mapek-orchestration.mjs +665 -0
  23. package/src/materialize-apply.mjs +505 -0
  24. package/src/materialize-plan.mjs +422 -0
  25. package/src/materialize.mjs +137 -0
  26. package/src/policy-derivation.mjs +869 -0
  27. package/src/project-config.mjs +142 -0
  28. package/src/project-diff.mjs +28 -0
  29. package/src/project-engine/build-utils.mjs +237 -0
  30. package/src/project-engine/code-analyzer.mjs +248 -0
  31. package/src/project-engine/doc-generator.mjs +407 -0
  32. package/src/project-engine/infrastructure.mjs +213 -0
  33. package/src/project-engine/metrics.mjs +146 -0
  34. package/src/project-model.mjs +111 -0
  35. package/src/project-report.mjs +348 -0
  36. package/src/refactoring-guide.mjs +242 -0
  37. package/src/stack-detect.mjs +102 -0
  38. package/src/stack-linter.mjs +213 -0
  39. package/src/template-infer.mjs +674 -0
  40. package/src/type-auditor.mjs +609 -0
@@ -0,0 +1,422 @@
1
+ /**
2
+ * @file Materialization planner - convert ontology + templates to file write plans
3
+ * @module project-engine/materialize-plan
4
+ */
5
+
6
+ import { z } from 'zod';
7
+ import { createHash } from 'crypto';
8
+ import { UnrdfDataFactory as DataFactory } from '@unrdf/core/rdf/n3-justified-only';
9
+
10
+ const { namedNode } = DataFactory;
11
+
12
+ /**
13
+ * @typedef {import('n3').Store} Store
14
+ */
15
+
16
+ /**
17
+ * @typedef {Object} WriteOperation
18
+ * @property {string} path - Output file path
19
+ * @property {string} content - File content to write
20
+ * @property {string} hash - SHA256 hash of content
21
+ * @property {string} templateIri - Source template IRI
22
+ * @property {string} entityIri - Domain entity IRI
23
+ * @property {string} entityType - Type of domain entity
24
+ */
25
+
26
+ /**
27
+ * @typedef {Object} UpdateOperation
28
+ * @property {string} path - File path to update
29
+ * @property {string} content - New content
30
+ * @property {string} oldHash - Hash of existing content
31
+ * @property {string} newHash - Hash of new content
32
+ * @property {string} templateIri - Source template IRI
33
+ * @property {string} entityIri - Domain entity IRI
34
+ */
35
+
36
+ /**
37
+ * @typedef {Object} DeleteOperation
38
+ * @property {string} path - File path to delete
39
+ * @property {string} hash - Hash of content being deleted
40
+ * @property {string} reason - Reason for deletion
41
+ */
42
+
43
+ /**
44
+ * @typedef {Object} MappingRecord
45
+ * @property {string} templateIri - Template that generated this file
46
+ * @property {string} entityIri - Entity used for substitution
47
+ * @property {string} outputPath - Generated file path
48
+ * @property {string} contentHash - Hash of generated content
49
+ */
50
+
51
+ /**
52
+ * @typedef {Object} MaterializationPlan
53
+ * @property {WriteOperation[]} writes - New files to create
54
+ * @property {UpdateOperation[]} updates - Existing files to update
55
+ * @property {DeleteOperation[]} deletes - Files to remove
56
+ */
57
+
58
+ /**
59
+ * @typedef {Object} MaterializationReceipt
60
+ * @property {string} ontologyHash - Hash of input ontology
61
+ * @property {string} templateHash - Hash of template graph
62
+ * @property {string} planHash - Hash of the plan
63
+ * @property {MappingRecord[]} mappings - Template-to-output mappings
64
+ * @property {string} timestamp - ISO timestamp
65
+ */
66
+
67
+ /**
68
+ * @typedef {Object} PlanResult
69
+ * @property {MaterializationPlan} plan
70
+ * @property {MaterializationReceipt} receipt
71
+ */
72
+
73
+ const _TemplateNodeSchema = z.object({
74
+ iri: z.string(),
75
+ targetsClass: z.string(),
76
+ outputPattern: z.string(),
77
+ content: z.string(),
78
+ extension: z.string().optional(),
79
+ });
80
+
81
+ const PlanOptionsSchema = z.object({
82
+ outputRoot: z.string().default('.'),
83
+ dryRun: z.boolean().default(false),
84
+ existingFiles: z.record(z.string(), z.string()).default({}),
85
+ });
86
+
87
+ /**
88
+ * Variable substitution patterns for templates
89
+ * @type {Record<string, (name: string) => string>}
90
+ */
91
+ const VARIABLE_PATTERNS = {
92
+ '{{entity}}': name => name,
93
+ '{{Entity}}': name => name.charAt(0).toUpperCase() + name.slice(1),
94
+ '{{ENTITY}}': name => name.toUpperCase(),
95
+ '{{entity_snake}}': name =>
96
+ name
97
+ .replace(/([A-Z])/g, '_$1')
98
+ .toLowerCase()
99
+ .replace(/^_/, ''),
100
+ '{{entity-kebab}}': name =>
101
+ name
102
+ .replace(/([A-Z])/g, '-$1')
103
+ .toLowerCase()
104
+ .replace(/^-/, ''),
105
+ };
106
+
107
+ /**
108
+ * Extract domain entities from ontology store
109
+ *
110
+ * @param {Store} store - Ontology store
111
+ * @returns {Array<{iri: string, type: string, label: string}>}
112
+ */
113
+ function extractDomainEntities(store) {
114
+ const entities = [];
115
+ const RDF_TYPE = namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type');
116
+ const RDFS_LABEL = namedNode('http://www.w3.org/2000/01/rdf-schema#label');
117
+
118
+ const typeQuads = store.getQuads(null, RDF_TYPE, null, null);
119
+
120
+ for (const quad of typeQuads) {
121
+ const subjectIri = quad.subject.value;
122
+ const typeIri = quad.object.value;
123
+
124
+ // Skip blank nodes and non-domain types
125
+ if (subjectIri.startsWith('_:')) continue;
126
+ if (typeIri.includes('rdf-syntax-ns') || typeIri.includes('rdf-schema')) continue;
127
+
128
+ // Get label if exists
129
+ const labelQuads = store.getQuads(quad.subject, RDFS_LABEL, null, null);
130
+ const label = labelQuads.length > 0 ? labelQuads[0].object.value : extractLocalName(subjectIri);
131
+
132
+ entities.push({
133
+ iri: subjectIri,
134
+ type: typeIri,
135
+ label,
136
+ });
137
+ }
138
+
139
+ return entities;
140
+ }
141
+
142
+ /**
143
+ * Extract templates from template graph
144
+ *
145
+ * @param {Store} store - Template graph store
146
+ * @returns {Array<z.infer<typeof TemplateNodeSchema>>}
147
+ */
148
+ function extractTemplates(store) {
149
+ const templates = [];
150
+ const TEMPLATE_TYPE = namedNode('http://example.org/unrdf/template#Template');
151
+ const TARGETS_CLASS = namedNode('http://example.org/unrdf/template#targetsClass');
152
+ const OUTPUT_PATTERN = namedNode('http://example.org/unrdf/template#outputPattern');
153
+ const CONTENT = namedNode('http://example.org/unrdf/template#content');
154
+ const EXTENSION = namedNode('http://example.org/unrdf/template#extension');
155
+ const RDF_TYPE = namedNode('http://www.w3.org/1999/02/22-rdf-syntax-ns#type');
156
+
157
+ const templateQuads = store.getQuads(null, RDF_TYPE, TEMPLATE_TYPE, null);
158
+
159
+ for (const quad of templateQuads) {
160
+ const templateSubject = quad.subject;
161
+
162
+ const targetsQuads = store.getQuads(templateSubject, TARGETS_CLASS, null, null);
163
+ const patternQuads = store.getQuads(templateSubject, OUTPUT_PATTERN, null, null);
164
+ const contentQuads = store.getQuads(templateSubject, CONTENT, null, null);
165
+ const extQuads = store.getQuads(templateSubject, EXTENSION, null, null);
166
+
167
+ if (targetsQuads.length === 0 || patternQuads.length === 0 || contentQuads.length === 0) {
168
+ continue;
169
+ }
170
+
171
+ templates.push({
172
+ iri: templateSubject.value,
173
+ targetsClass: targetsQuads[0].object.value,
174
+ outputPattern: patternQuads[0].object.value,
175
+ content: contentQuads[0].object.value,
176
+ extension: extQuads.length > 0 ? extQuads[0].object.value : undefined,
177
+ });
178
+ }
179
+
180
+ return templates;
181
+ }
182
+
183
+ /**
184
+ * Extract local name from IRI
185
+ *
186
+ * @param {string} iri
187
+ * @returns {string}
188
+ */
189
+ function extractLocalName(iri) {
190
+ const hashIdx = iri.lastIndexOf('#');
191
+ if (hashIdx !== -1) return iri.slice(hashIdx + 1);
192
+ const slashIdx = iri.lastIndexOf('/');
193
+ if (slashIdx !== -1) return iri.slice(slashIdx + 1);
194
+ return iri;
195
+ }
196
+
197
+ /**
198
+ * Substitute variables in pattern
199
+ *
200
+ * @param {string} pattern - Pattern with {{entity}} placeholders
201
+ * @param {string} entityName - Entity name to substitute
202
+ * @returns {string}
203
+ */
204
+ function substituteVariables(pattern, entityName) {
205
+ let result = pattern;
206
+
207
+ for (const [placeholder, transform] of Object.entries(VARIABLE_PATTERNS)) {
208
+ result = result.replace(
209
+ new RegExp(placeholder.replace(/[{}]/g, '\\$&'), 'g'),
210
+ transform(entityName)
211
+ );
212
+ }
213
+
214
+ return result;
215
+ }
216
+
217
+ /**
218
+ * Compute SHA256 hash of content
219
+ *
220
+ * @param {string} content
221
+ * @returns {string}
222
+ */
223
+ function hashContent(content) {
224
+ return createHash('sha256').update(content).digest('hex');
225
+ }
226
+
227
+ /**
228
+ * Hash an N3 store by serializing quad count and sample quads
229
+ *
230
+ * @param {Store} store
231
+ * @returns {string}
232
+ */
233
+ function hashStore(store) {
234
+ const hash = createHash('sha256');
235
+ const size = store.size || 0;
236
+ hash.update(`size:${size}`);
237
+
238
+ const quads = store.getQuads(null, null, null, null);
239
+ const sample = quads.slice(0, Math.min(10, quads.length));
240
+
241
+ for (const q of sample) {
242
+ hash.update(`${q.subject.value}|${q.predicate.value}|${q.object.value}`);
243
+ }
244
+
245
+ return hash.digest('hex').substring(0, 16);
246
+ }
247
+
248
+ /**
249
+ * Plan materialization from ontology + templates
250
+ *
251
+ * For each domain entity in ontologyStore:
252
+ * For each template in templateGraph:
253
+ * If template.targetsClass matches entity type:
254
+ * Generate output path by substituting variables
255
+ * Create write plan with hash + provenance
256
+ *
257
+ * @param {Store} ontologyStore - Domain ontology with entities
258
+ * @param {Store} templateGraph - Template definitions
259
+ * @param {Object} [options] - Planning options
260
+ * @param {string} [options.outputRoot] - Base directory for outputs
261
+ * @param {boolean} [options.dryRun] - If true, don't check existing files
262
+ * @param {Record<string, string>} [options.existingFiles] - Map of path -> content hash
263
+ * @returns {PlanResult}
264
+ */
265
+ export function planMaterialization(ontologyStore, templateGraph, options = {}) {
266
+ const opts = PlanOptionsSchema.parse(options);
267
+
268
+ const entities = extractDomainEntities(ontologyStore);
269
+ const templates = extractTemplates(templateGraph);
270
+
271
+ /** @type {WriteOperation[]} */
272
+ const writes = [];
273
+ /** @type {UpdateOperation[]} */
274
+ const updates = [];
275
+ /** @type {DeleteOperation[]} */
276
+ const deletes = [];
277
+ /** @type {MappingRecord[]} */
278
+ const mappings = [];
279
+
280
+ for (const entity of entities) {
281
+ for (const template of templates) {
282
+ // Check if template targets this entity type
283
+ if (template.targetsClass !== entity.type) {
284
+ continue;
285
+ }
286
+
287
+ const entityName = entity.label;
288
+ const outputPath = substituteVariables(template.outputPattern, entityName);
289
+ const fullPath = opts.outputRoot === '.' ? outputPath : `${opts.outputRoot}/${outputPath}`;
290
+
291
+ // Substitute content variables
292
+ const content = substituteVariables(template.content, entityName);
293
+ const contentHash = hashContent(content);
294
+
295
+ // Create mapping record
296
+ const mapping = {
297
+ templateIri: template.iri,
298
+ entityIri: entity.iri,
299
+ outputPath: fullPath,
300
+ contentHash,
301
+ };
302
+ mappings.push(mapping);
303
+
304
+ // Check if file exists
305
+ const existingHash = opts.existingFiles[fullPath];
306
+
307
+ if (existingHash === undefined) {
308
+ // New file - add to writes
309
+ writes.push({
310
+ path: fullPath,
311
+ content,
312
+ hash: contentHash,
313
+ templateIri: template.iri,
314
+ entityIri: entity.iri,
315
+ entityType: entity.type,
316
+ });
317
+ } else if (existingHash !== contentHash) {
318
+ // File changed - add to updates
319
+ updates.push({
320
+ path: fullPath,
321
+ content,
322
+ oldHash: existingHash,
323
+ newHash: contentHash,
324
+ templateIri: template.iri,
325
+ entityIri: entity.iri,
326
+ });
327
+ }
328
+ // If hashes match, file is unchanged - no action needed
329
+ }
330
+ }
331
+
332
+ const plan = { writes, updates, deletes };
333
+
334
+ const ontologyHash = hashStore(ontologyStore);
335
+ const templateHash = hashStore(templateGraph);
336
+ const planHash = hashContent(
337
+ JSON.stringify({
338
+ writes: writes.length,
339
+ updates: updates.length,
340
+ deletes: deletes.length,
341
+ })
342
+ );
343
+
344
+ const receipt = {
345
+ ontologyHash,
346
+ templateHash,
347
+ planHash: planHash.substring(0, 16),
348
+ mappings,
349
+ timestamp: new Date().toISOString(),
350
+ };
351
+
352
+ return { plan, receipt };
353
+ }
354
+
355
+ /**
356
+ * Validate a materialization plan
357
+ *
358
+ * Checks:
359
+ * - No duplicate output paths
360
+ * - All paths are relative (no absolute paths)
361
+ * - No path traversal attacks
362
+ *
363
+ * @param {MaterializationPlan} plan
364
+ * @returns {{valid: boolean, errors: string[]}}
365
+ */
366
+ export function validatePlan(plan) {
367
+ const errors = [];
368
+ const seenPaths = new Set();
369
+
370
+ const allOps = [...plan.writes, ...plan.updates];
371
+
372
+ for (const op of allOps) {
373
+ // Check for duplicates
374
+ if (seenPaths.has(op.path)) {
375
+ errors.push(`Duplicate output path: ${op.path}`);
376
+ }
377
+ seenPaths.add(op.path);
378
+
379
+ // Check for absolute paths
380
+ if (op.path.startsWith('/')) {
381
+ errors.push(`Absolute path not allowed: ${op.path}`);
382
+ }
383
+
384
+ // Check for path traversal
385
+ if (op.path.includes('..')) {
386
+ errors.push(`Path traversal not allowed: ${op.path}`);
387
+ }
388
+ }
389
+
390
+ return {
391
+ valid: errors.length === 0,
392
+ errors,
393
+ };
394
+ }
395
+
396
+ /**
397
+ * Create an empty plan
398
+ *
399
+ * @returns {MaterializationPlan}
400
+ */
401
+ export function createEmptyPlan() {
402
+ return {
403
+ writes: [],
404
+ updates: [],
405
+ deletes: [],
406
+ };
407
+ }
408
+
409
+ /**
410
+ * Merge two plans together
411
+ *
412
+ * @param {MaterializationPlan} plan1
413
+ * @param {MaterializationPlan} plan2
414
+ * @returns {MaterializationPlan}
415
+ */
416
+ export function mergePlans(plan1, plan2) {
417
+ return {
418
+ writes: [...plan1.writes, ...plan2.writes],
419
+ updates: [...plan1.updates, ...plan2.updates],
420
+ deletes: [...plan1.deletes, ...plan2.deletes],
421
+ };
422
+ }
@@ -0,0 +1,137 @@
1
+ /**
2
+ * @file Artifact materialization - plan and receipt for generation
3
+ * @module project-engine/materialize
4
+ */
5
+
6
+ import { z } from 'zod';
7
+ import { createHash } from 'crypto';
8
+
9
+ const MaterializeOptionsSchema = z.object({
10
+ ontologyStore: z.object({}).passthrough(),
11
+ templateConfig: z.record(z.string(), z.any()).optional(),
12
+ options: z
13
+ .object({
14
+ dryRun: z.boolean().optional(),
15
+ outputRoot: z.string().optional(),
16
+ })
17
+ .optional(),
18
+ });
19
+
20
+ /**
21
+ * Compute materialization plan from ontology
22
+ *
23
+ * @param {Object} params
24
+ * @param {Store} params.ontologyStore - Current ontology
25
+ * @param {Object} [params.templateConfig] - Template-to-generator mappings
26
+ * @param {Object} [params.options] - Materialization options
27
+ * @returns {{plan: Object, receipt: Object}}
28
+ */
29
+ export function materializeArtifacts(params) {
30
+ const validated = MaterializeOptionsSchema.parse(params);
31
+ const { ontologyStore, options = {} } = validated;
32
+
33
+ const plan = {
34
+ writes: [],
35
+ deletes: [],
36
+ moves: [],
37
+ metadata: {
38
+ timestamp: new Date().toISOString(),
39
+ storeSize: ontologyStore.size,
40
+ },
41
+ };
42
+
43
+ const features = extractFeaturesFromStore(ontologyStore);
44
+
45
+ for (const [featureName, feature] of Object.entries(features)) {
46
+ for (const role of feature.roles) {
47
+ const filePath = generatePathForRole(featureName, role);
48
+ plan.writes.push({
49
+ path: filePath,
50
+ type: role,
51
+ feature: featureName,
52
+ });
53
+ }
54
+ }
55
+
56
+ const beforeHash = hashStore(ontologyStore);
57
+ const planHash = hashPlan(plan);
58
+
59
+ const receipt = {
60
+ beforeHash,
61
+ afterHash: planHash,
62
+ planHash,
63
+ changes: plan.writes.length + plan.deletes.length + plan.moves.length,
64
+ dryRun: options.dryRun || false,
65
+ timestamp: new Date().toISOString(),
66
+ };
67
+
68
+ return { plan, receipt };
69
+ }
70
+
71
+ /**
72
+ * Extract features from ontology store
73
+ *
74
+ * @private
75
+ */
76
+ function extractFeaturesFromStore(store) {
77
+ const features = {};
78
+
79
+ try {
80
+ const featureQuads = store.getQuads(
81
+ null,
82
+ { value: 'http://www.w3.org/1999/02/22-rdf-syntax-ns#type' },
83
+ null
84
+ );
85
+
86
+ for (const quad of featureQuads) {
87
+ const subjectStr = quad.subject.value;
88
+ if (quad.object.value && quad.object.value.includes('Feature')) {
89
+ features[subjectStr] = { roles: ['Component', 'Hook', 'Test', 'Doc'] };
90
+ }
91
+ }
92
+ } catch (e) {
93
+ // Ignore errors, return empty features
94
+ }
95
+
96
+ return features;
97
+ }
98
+
99
+ /**
100
+ * Generate file path for a feature and role
101
+ *
102
+ * @private
103
+ */
104
+ function generatePathForRole(featureName, role) {
105
+ const roleMap = {
106
+ Component: 'components/index.tsx',
107
+ Hook: 'hooks/index.ts',
108
+ Test: '__tests__/index.test.tsx',
109
+ Doc: 'README.md',
110
+ Api: 'api/route.ts',
111
+ };
112
+
113
+ const suffix = roleMap[role] || `${role.toLowerCase()}/index.ts`;
114
+ return `src/features/${featureName}/${suffix}`;
115
+ }
116
+
117
+ /**
118
+ * Hash a store for provenance
119
+ *
120
+ * @private
121
+ */
122
+ function hashStore(store) {
123
+ const hash = createHash('sha256');
124
+ hash.update(String(store.size));
125
+ return hash.digest('hex').substring(0, 16);
126
+ }
127
+
128
+ /**
129
+ * Hash a plan for receipt
130
+ *
131
+ * @private
132
+ */
133
+ function hashPlan(plan) {
134
+ const hash = createHash('sha256');
135
+ hash.update(JSON.stringify(plan.metadata));
136
+ return hash.digest('hex').substring(0, 16);
137
+ }