@forwardimpact/libuniverse 0.1.2 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -79,7 +79,8 @@ async function main() {
79
79
 
80
80
  const monorepoRoot = resolve(__dirname, "../../..");
81
81
  const schemaDir = join(monorepoRoot, "products/map/schema/json");
82
- const cachePath = join(__dirname, "..", ".prose-cache.json");
82
+ const cachePath =
83
+ args.cache || join(monorepoRoot, "data", "synthetic", "prose-cache.json");
83
84
 
84
85
  const libsyntheticproseDir = dirname(
85
86
  fileURLToPath(import.meta.resolve("@forwardimpact/libsyntheticprose")),
@@ -160,7 +161,7 @@ async function main() {
160
161
 
161
162
  const result = await pipeline.run({
162
163
  universePath:
163
- args.universe || join(monorepoRoot, "examples", "universe.dsl"),
164
+ args.story || join(monorepoRoot, "data", "synthetic", "story.dsl"),
164
165
  only: args.only || null,
165
166
  schemaDir,
166
167
  });
@@ -266,7 +267,8 @@ function parseArgs(argv) {
266
267
  else if (arg === "--dry-run") args.dryRun = true;
267
268
  else if (arg === "--load") args.load = true;
268
269
  else if (arg.startsWith("--only=")) args.only = arg.slice(7);
269
- else if (arg.startsWith("--universe=")) args.universe = arg.slice(11);
270
+ else if (arg.startsWith("--story=")) args.story = arg.slice(8);
271
+ else if (arg.startsWith("--cache=")) args.cache = arg.slice(8);
270
272
  }
271
273
  return args;
272
274
  }
@@ -284,11 +286,12 @@ Options:
284
286
  --dry-run Show what would be written without writing
285
287
  --load Load raw documents to Supabase Storage
286
288
  --only=<type> Render only one content type (html|pathway|raw|markdown)
287
- --universe=<path> Path to a custom universe DSL file
289
+ --story=<path> Path to a custom story DSL file
290
+ --cache=<path> Path to prose cache file
288
291
  -h, --help Show this help message
289
292
 
290
293
  Prose modes:
291
- (default) Use cached prose from .prose-cache.json
294
+ (default) Use cached prose from prose-cache.json
292
295
  --generate Call LLM to generate prose and update the cache
293
296
  --no-prose No prose — produces minimal structural data only
294
297
 
@@ -304,7 +307,7 @@ Examples:
304
307
  npx fit-universe --strict # Cached prose, fail on miss
305
308
  npx fit-universe --no-prose # Structural only, no prose
306
309
  npx fit-universe --only=pathway # Generate pathway data only
307
- npx fit-universe --universe=custom.dsl # Use custom DSL file
310
+ npx fit-universe --story=custom.dsl # Use custom DSL file
308
311
  `);
309
312
  }
310
313
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@forwardimpact/libuniverse",
3
- "version": "0.1.2",
3
+ "version": "0.1.3",
4
4
  "description": "Synthetic data universe DSL and generation engine",
5
5
  "license": "Apache-2.0",
6
6
  "repository": {
package/pipeline.js CHANGED
@@ -66,7 +66,7 @@ export class Pipeline {
66
66
  * Run the full generation pipeline.
67
67
  *
68
68
  * @param {object} options
69
- * @param {string} options.universePath - Path to the universe.dsl file
69
+ * @param {string} options.universePath - Path to the story DSL file
70
70
  * @param {string} [options.only=null] - Render only a specific content type
71
71
  * @param {string|null} [options.schemaDir=null] - Path to JSON schema directory
72
72
  * @returns {Promise<{files: Map<string,string>, rawDocuments: Map<string,string>, entities: object, validation: object, stats: {prose: {hits: number, misses: number, generated: number}, files: number, rawDocuments: number}}>}
@@ -0,0 +1,103 @@
1
+ universe minimal {
2
+ domain "test.example"
3
+ industry "technology"
4
+ seed 42
5
+
6
+ org testorg {
7
+ name "Test Organization"
8
+ }
9
+
10
+ department eng {
11
+ name "Engineering"
12
+ parent testorg
13
+ headcount 5
14
+
15
+ team alpha {
16
+ name "Alpha Team"
17
+ size 5
18
+ manager @alpha_lead
19
+ repos ["alpha-service"]
20
+ }
21
+ }
22
+
23
+ people {
24
+ count 5
25
+ distribution {
26
+ L3 40%
27
+ L4 40%
28
+ L5 20%
29
+ }
30
+ disciplines {
31
+ software_engineering 100%
32
+ }
33
+ }
34
+
35
+ project testproj {
36
+ name "Test Project"
37
+ type "drug"
38
+ teams [alpha]
39
+ prose_topic "Testing synthetic generation"
40
+ prose_tone "technical"
41
+ }
42
+
43
+ framework {
44
+ proficiencies [awareness, foundational, working, practitioner, expert]
45
+ maturities [emerging, developing, practicing, role_modeling, exemplifying]
46
+ stages [specify, plan, code, review]
47
+
48
+ levels {
49
+ J040 { title "Software Engineer" rank 1 experience "0-2 years" }
50
+ J050 { title "Senior Engineer" rank 2 experience "2-5 years" }
51
+ }
52
+
53
+ capabilities {
54
+ coding { name "Coding" skills [python_dev, code_review] }
55
+ }
56
+
57
+ behaviours {
58
+ collaboration { name "Collaboration" }
59
+ }
60
+
61
+ disciplines {
62
+ software_engineering {
63
+ roleTitle "Software Engineer"
64
+ core [python_dev]
65
+ supporting [code_review]
66
+ }
67
+ }
68
+
69
+ tracks {
70
+ backend { name "Backend" }
71
+ }
72
+
73
+ drivers {
74
+ clear_direction {
75
+ name "Clear Direction"
76
+ skills [python_dev]
77
+ behaviours [collaboration]
78
+ }
79
+ }
80
+ }
81
+
82
+ scenario baseline {
83
+ name "Baseline Scenario"
84
+ timerange_start 2025-01
85
+ timerange_end 2025-06
86
+
87
+ affect alpha {
88
+ github_commits "moderate"
89
+ github_prs "moderate"
90
+ dx_drivers {
91
+ clear_direction { trajectory "rising" magnitude 3 }
92
+ }
93
+ evidence_skills [python_dev]
94
+ evidence_floor "foundational"
95
+ }
96
+ }
97
+
98
+ content guide_html {
99
+ courses 2
100
+ events 1
101
+ blogs 3
102
+ }
103
+ }
@@ -0,0 +1,106 @@
1
+ import { describe, test } from "node:test";
2
+ import assert from "node:assert/strict";
3
+ import { join, dirname } from "path";
4
+ import { fileURLToPath } from "url";
5
+ import {
6
+ createDslParser,
7
+ createEntityGenerator,
8
+ } from "@forwardimpact/libsyntheticgen";
9
+ import { validateCrossContent } from "@forwardimpact/libsyntheticrender";
10
+ import { readFileSync } from "fs";
11
+
12
+ const __dirname = dirname(fileURLToPath(import.meta.url));
13
+ const FIXTURE_PATH = join(__dirname, "fixtures", "minimal.dsl");
14
+
15
+ function makeLogger() {
16
+ return {
17
+ info: () => {},
18
+ debug: () => {},
19
+ warn: () => {},
20
+ error: () => {},
21
+ };
22
+ }
23
+
24
+ describe("Pipeline integration", () => {
25
+ test("parses minimal DSL fixture", () => {
26
+ const source = readFileSync(FIXTURE_PATH, "utf-8");
27
+ const parser = createDslParser();
28
+ const ast = parser.parse(source);
29
+
30
+ assert.strictEqual(ast.domain, "test.example");
31
+ assert.strictEqual(ast.industry, "technology");
32
+ assert.ok(ast.people);
33
+ assert.ok(ast.teams.length > 0);
34
+ assert.ok(ast.projects.length > 0);
35
+ });
36
+
37
+ test("generates entities from minimal DSL", () => {
38
+ const source = readFileSync(FIXTURE_PATH, "utf-8");
39
+ const parser = createDslParser();
40
+ const ast = parser.parse(source);
41
+ const generator = createEntityGenerator(makeLogger());
42
+ const entities = generator.generate(ast);
43
+
44
+ assert.ok(entities.orgs.length > 0);
45
+ assert.ok(entities.departments.length > 0);
46
+ assert.ok(entities.teams.length > 0);
47
+ assert.ok(entities.people.length > 0);
48
+ assert.ok(entities.projects.length > 0);
49
+ assert.ok(entities.domain);
50
+ });
51
+
52
+ test("entity IRIs use consistent /id/ namespace", () => {
53
+ const source = readFileSync(FIXTURE_PATH, "utf-8");
54
+ const parser = createDslParser();
55
+ const ast = parser.parse(source);
56
+ const generator = createEntityGenerator(makeLogger());
57
+ const entities = generator.generate(ast);
58
+
59
+ for (const org of entities.orgs) {
60
+ assert.ok(org.iri.includes("/id/org/"), `Bad org IRI: ${org.iri}`);
61
+ }
62
+ for (const dept of entities.departments) {
63
+ assert.ok(
64
+ dept.iri.includes("/id/department/"),
65
+ `Bad dept IRI: ${dept.iri}`,
66
+ );
67
+ }
68
+ for (const team of entities.teams) {
69
+ assert.ok(team.iri.includes("/id/team/"), `Bad team IRI: ${team.iri}`);
70
+ }
71
+ for (const person of entities.people) {
72
+ assert.ok(
73
+ person.iri.includes("/id/person/"),
74
+ `Bad person IRI: ${person.iri}`,
75
+ );
76
+ }
77
+ for (const proj of entities.projects) {
78
+ assert.ok(
79
+ proj.iri.includes("/id/project/"),
80
+ `Bad project IRI: ${proj.iri}`,
81
+ );
82
+ }
83
+ });
84
+
85
+ test("cross-content validation passes on generated entities", () => {
86
+ const source = readFileSync(FIXTURE_PATH, "utf-8");
87
+ const parser = createDslParser();
88
+ const ast = parser.parse(source);
89
+ const generator = createEntityGenerator(makeLogger());
90
+ const entities = generator.generate(ast);
91
+ const result = validateCrossContent(entities);
92
+
93
+ // Minimal fixture has no snapshots block, so snapshot checks are expected to fail
94
+ const snapshotChecks = new Set([
95
+ "getdx_snapshots_list_response",
96
+ "getdx_snapshots_info_responses",
97
+ ]);
98
+ const failures = result.checks.filter(
99
+ (c) => !c.passed && !snapshotChecks.has(c.name),
100
+ );
101
+ if (failures.length > 0) {
102
+ const names = failures.map((f) => f.name).join(", ");
103
+ assert.fail(`Validation failures: ${names}`);
104
+ }
105
+ });
106
+ });