@forwardimpact/pathway 0.25.21 → 0.25.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/fit-pathway.js +117 -325
- package/package.json +2 -2
- package/src/commands/agent-io.js +1 -1
- package/src/commands/agent.js +25 -23
- package/src/commands/behaviour.js +7 -7
- package/src/commands/build-bundle.js +88 -0
- package/src/commands/build-packs.js +566 -0
- package/src/commands/build.js +27 -84
- package/src/commands/command-factory.js +2 -2
- package/src/commands/discipline.js +7 -7
- package/src/commands/driver.js +8 -8
- package/src/commands/index.js +0 -1
- package/src/commands/interview.js +4 -4
- package/src/commands/job.js +24 -18
- package/src/commands/level.js +7 -7
- package/src/commands/progress.js +4 -4
- package/src/commands/questions.js +10 -8
- package/src/commands/skill.js +10 -10
- package/src/commands/stage.js +7 -7
- package/src/commands/tool.js +6 -6
- package/src/commands/track.js +7 -7
- package/src/css/pages/agent-builder.css +48 -0
- package/src/formatters/interview/shared.js +6 -4
- package/src/formatters/progress/shared.js +9 -20
- package/src/formatters/questions/yaml.js +1 -1
- package/src/formatters/skill/shared.js +9 -2
- package/src/formatters/track/shared.js +4 -1
- package/src/index.html +1 -1
- package/src/lib/cli-command.js +33 -33
- package/src/lib/cli-output.js +9 -189
- package/src/pages/agent-builder-install.js +118 -0
- package/src/pages/agent-builder-preview.js +3 -3
- package/src/pages/agent-builder.js +23 -1
- package/src/pages/progress.js +3 -3
- package/src/pages/skill.js +5 -2
- package/src/commands/init.js +0 -64
- package/src/lib/job-cache.js +0 -89
- package/starter/behaviours/systems_thinking.yaml +0 -32
- package/starter/capabilities/delivery.yaml +0 -105
- package/starter/capabilities/reliability.yaml +0 -72
- package/starter/disciplines/software_engineering.yaml +0 -46
- package/starter/drivers.yaml +0 -10
- package/starter/framework.yaml +0 -49
- package/starter/levels.yaml +0 -39
- package/starter/questions/behaviours/.gitkeep +0 -0
- package/starter/questions/capabilities/.gitkeep +0 -0
- package/starter/questions/skills/.gitkeep +0 -0
- package/starter/stages.yaml +0 -21
- package/starter/tracks/forward_deployed.yaml +0 -33
- package/starter/tracks/platform.yaml +0 -33
|
@@ -0,0 +1,566 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pack generation for Pathway distribution.
|
|
3
|
+
*
|
|
4
|
+
* Emits one pre-built agent/skill pack per valid discipline/track combination.
|
|
5
|
+
* Each pack becomes its own `npx skills`-compatible repository at
|
|
6
|
+
* `packs/{name}/.well-known/skills/`, with an aggregate repository at
|
|
7
|
+
* `packs/.well-known/skills/` listing every skill from every pack.
|
|
8
|
+
* An `apm.yml` for Microsoft APM is also written at the site root.
|
|
9
|
+
*
|
|
10
|
+
* See specs/320-pathway-ecosystem-distribution for context.
|
|
11
|
+
*
|
|
12
|
+
* Invoked from build.js after the distribution bundle has been generated.
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
import { mkdir, rm, readFile, writeFile, readdir, cp } from "fs/promises";
|
|
16
|
+
import { utimesSync } from "fs";
|
|
17
|
+
import { join } from "path";
|
|
18
|
+
import { execFileSync } from "child_process";
|
|
19
|
+
import { createHash } from "crypto";
|
|
20
|
+
|
|
21
|
+
import { createDataLoader } from "@forwardimpact/map/loader";
|
|
22
|
+
import { createTemplateLoader } from "@forwardimpact/libtemplate";
|
|
23
|
+
import {
|
|
24
|
+
generateStageAgentProfile,
|
|
25
|
+
deriveReferenceLevel,
|
|
26
|
+
deriveAgentSkills,
|
|
27
|
+
generateSkillMarkdown,
|
|
28
|
+
interpolateTeamInstructions,
|
|
29
|
+
getDisciplineAbbreviation,
|
|
30
|
+
toKebabCase,
|
|
31
|
+
} from "@forwardimpact/libskill/agent";
|
|
32
|
+
|
|
33
|
+
import { formatAgentProfile } from "../formatters/agent/profile.js";
|
|
34
|
+
import {
|
|
35
|
+
formatAgentSkill,
|
|
36
|
+
formatInstallScript,
|
|
37
|
+
formatReference,
|
|
38
|
+
} from "../formatters/agent/skill.js";
|
|
39
|
+
import { findValidCombinations } from "./agent.js";
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Slugify a string for use as a package name.
|
|
43
|
+
* @param {string} text
|
|
44
|
+
* @returns {string}
|
|
45
|
+
*/
|
|
46
|
+
function slugify(text) {
|
|
47
|
+
return text
|
|
48
|
+
.toLowerCase()
|
|
49
|
+
.replace(/[^a-z0-9]+/g, "-")
|
|
50
|
+
.replace(/^-+|-+$/g, "");
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* Stringify a JSON value with object keys sorted recursively.
|
|
55
|
+
* Produces deterministic output for digest stability.
|
|
56
|
+
* @param {unknown} value
|
|
57
|
+
* @returns {string}
|
|
58
|
+
*/
|
|
59
|
+
function stringifySorted(value) {
|
|
60
|
+
const seen = new WeakSet();
|
|
61
|
+
const sort = (v) => {
|
|
62
|
+
if (v === null || typeof v !== "object") return v;
|
|
63
|
+
if (seen.has(v)) throw new Error("Cannot stringify circular structure");
|
|
64
|
+
seen.add(v);
|
|
65
|
+
if (Array.isArray(v)) return v.map(sort);
|
|
66
|
+
const out = {};
|
|
67
|
+
for (const key of Object.keys(v).sort()) out[key] = sort(v[key]);
|
|
68
|
+
return out;
|
|
69
|
+
};
|
|
70
|
+
return JSON.stringify(sort(value), null, 2) + "\n";
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Escape a YAML scalar double-quoted value.
|
|
75
|
+
* @param {string} text
|
|
76
|
+
* @returns {string}
|
|
77
|
+
*/
|
|
78
|
+
function yamlQuote(text) {
|
|
79
|
+
return `"${String(text).replace(/\\/g, "\\\\").replace(/"/g, '\\"')}"`;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Write a single pack's files to disk under the staging directory.
|
|
84
|
+
* Calls the formatters directly (silent, no console output).
|
|
85
|
+
* @param {Object} params
|
|
86
|
+
* @returns {Promise<void>}
|
|
87
|
+
*/
|
|
88
|
+
async function writePackFiles({
|
|
89
|
+
packDir,
|
|
90
|
+
profiles,
|
|
91
|
+
skillFiles,
|
|
92
|
+
teamInstructions,
|
|
93
|
+
agentTemplate,
|
|
94
|
+
skillTemplates,
|
|
95
|
+
claudeCodeSettings,
|
|
96
|
+
}) {
|
|
97
|
+
const claudeDir = join(packDir, ".claude");
|
|
98
|
+
const agentsDir = join(claudeDir, "agents");
|
|
99
|
+
const skillsDir = join(claudeDir, "skills");
|
|
100
|
+
|
|
101
|
+
await mkdir(agentsDir, { recursive: true });
|
|
102
|
+
await mkdir(skillsDir, { recursive: true });
|
|
103
|
+
|
|
104
|
+
for (const profile of profiles) {
|
|
105
|
+
const profilePath = join(agentsDir, profile.filename);
|
|
106
|
+
await writeFile(
|
|
107
|
+
profilePath,
|
|
108
|
+
formatAgentProfile(profile, agentTemplate),
|
|
109
|
+
"utf-8",
|
|
110
|
+
);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
for (const skill of skillFiles) {
|
|
114
|
+
const skillDir = join(skillsDir, skill.dirname);
|
|
115
|
+
await mkdir(skillDir, { recursive: true });
|
|
116
|
+
|
|
117
|
+
await writeFile(
|
|
118
|
+
join(skillDir, "SKILL.md"),
|
|
119
|
+
formatAgentSkill(skill, skillTemplates.skill),
|
|
120
|
+
"utf-8",
|
|
121
|
+
);
|
|
122
|
+
|
|
123
|
+
if (skill.installScript) {
|
|
124
|
+
const scriptsDir = join(skillDir, "scripts");
|
|
125
|
+
await mkdir(scriptsDir, { recursive: true });
|
|
126
|
+
await writeFile(
|
|
127
|
+
join(scriptsDir, "install.sh"),
|
|
128
|
+
formatInstallScript(skill, skillTemplates.install),
|
|
129
|
+
{ mode: 0o755 },
|
|
130
|
+
);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
if (skill.implementationReference) {
|
|
134
|
+
const refsDir = join(skillDir, "references");
|
|
135
|
+
await mkdir(refsDir, { recursive: true });
|
|
136
|
+
await writeFile(
|
|
137
|
+
join(refsDir, "REFERENCE.md"),
|
|
138
|
+
formatReference(skill, skillTemplates.reference),
|
|
139
|
+
"utf-8",
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
if (teamInstructions) {
|
|
145
|
+
await writeFile(
|
|
146
|
+
join(claudeDir, "CLAUDE.md"),
|
|
147
|
+
teamInstructions.trim() + "\n",
|
|
148
|
+
"utf-8",
|
|
149
|
+
);
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Claude Code settings — matches the CLI path's generateClaudeCodeSettings
|
|
153
|
+
// output format (no merge with existing files since the staging dir starts
|
|
154
|
+
// empty).
|
|
155
|
+
const settings = { ...(claudeCodeSettings || {}) };
|
|
156
|
+
await writeFile(
|
|
157
|
+
join(claudeDir, "settings.json"),
|
|
158
|
+
JSON.stringify(settings, null, 2) + "\n",
|
|
159
|
+
"utf-8",
|
|
160
|
+
);
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
/**
|
|
164
|
+
* Derive profiles, skills, and team instructions for a single combination.
|
|
165
|
+
* @param {Object} params
|
|
166
|
+
* @returns {{profiles: Array, skillFiles: Array, teamInstructions: string|null}}
|
|
167
|
+
*/
|
|
168
|
+
function derivePackContent({
|
|
169
|
+
discipline,
|
|
170
|
+
track,
|
|
171
|
+
humanDiscipline,
|
|
172
|
+
humanTrack,
|
|
173
|
+
data,
|
|
174
|
+
agentData,
|
|
175
|
+
skillsWithAgent,
|
|
176
|
+
level,
|
|
177
|
+
}) {
|
|
178
|
+
const stageParams = {
|
|
179
|
+
discipline: humanDiscipline,
|
|
180
|
+
track: humanTrack,
|
|
181
|
+
level,
|
|
182
|
+
skills: skillsWithAgent,
|
|
183
|
+
behaviours: data.behaviours,
|
|
184
|
+
agentBehaviours: agentData.behaviours,
|
|
185
|
+
agentDiscipline: discipline,
|
|
186
|
+
agentTrack: track,
|
|
187
|
+
stages: data.stages,
|
|
188
|
+
};
|
|
189
|
+
|
|
190
|
+
const profiles = data.stages.map((stage) =>
|
|
191
|
+
generateStageAgentProfile({ ...stageParams, stage }),
|
|
192
|
+
);
|
|
193
|
+
|
|
194
|
+
const derivedSkills = deriveAgentSkills({
|
|
195
|
+
discipline: humanDiscipline,
|
|
196
|
+
track: humanTrack,
|
|
197
|
+
level,
|
|
198
|
+
skills: skillsWithAgent,
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
const skillFiles = derivedSkills
|
|
202
|
+
.map((derived) => skillsWithAgent.find((s) => s.id === derived.skillId))
|
|
203
|
+
.filter((skill) => skill?.agent)
|
|
204
|
+
.map((skill) =>
|
|
205
|
+
generateSkillMarkdown({ skillData: skill, stages: data.stages }),
|
|
206
|
+
);
|
|
207
|
+
|
|
208
|
+
const teamInstructions = interpolateTeamInstructions({
|
|
209
|
+
agentTrack: track,
|
|
210
|
+
humanDiscipline,
|
|
211
|
+
});
|
|
212
|
+
|
|
213
|
+
return { profiles, skillFiles, teamInstructions };
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
/**
|
|
217
|
+
* Recursively collect all paths (files and directories) under `dir`,
|
|
218
|
+
* relative to `dir`, in sorted order.
|
|
219
|
+
*/
|
|
220
|
+
async function collectPaths(dir, prefix = ".") {
|
|
221
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
222
|
+
const result = [];
|
|
223
|
+
for (const entry of entries) {
|
|
224
|
+
const rel = prefix + "/" + entry.name;
|
|
225
|
+
const abs = join(dir, entry.name);
|
|
226
|
+
if (entry.isDirectory()) {
|
|
227
|
+
result.push(rel);
|
|
228
|
+
result.push(...(await collectPaths(abs, rel)));
|
|
229
|
+
} else {
|
|
230
|
+
result.push(rel);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
return result;
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
/**
|
|
237
|
+
* Set mtime and atime to the Unix epoch for every entry under `dir`.
|
|
238
|
+
*/
|
|
239
|
+
async function resetTimestamps(dir) {
|
|
240
|
+
const epoch = new Date(0);
|
|
241
|
+
const paths = await collectPaths(dir);
|
|
242
|
+
for (const rel of paths) {
|
|
243
|
+
utimesSync(join(dir, rel), epoch, epoch);
|
|
244
|
+
}
|
|
245
|
+
utimesSync(dir, epoch, epoch);
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
/**
|
|
249
|
+
* Archive a staged pack directory as a deterministic tar.gz and return its
|
|
250
|
+
* sha256 digest.
|
|
251
|
+
*
|
|
252
|
+
* Determinism strategy (works on GNU tar and BSD tar):
|
|
253
|
+
* 1. Reset all file timestamps to epoch via Node's utimesSync.
|
|
254
|
+
* 2. Collect and sort the file list in JS — no reliance on --sort=name.
|
|
255
|
+
* 3. Create an uncompressed tar to stdout with the sorted list.
|
|
256
|
+
* 4. Pipe through `gzip -n` to suppress the gzip header timestamp.
|
|
257
|
+
*
|
|
258
|
+
* @param {string} packDir - Staging directory containing the pack files
|
|
259
|
+
* @param {string} archivePath - Destination path for the tar.gz
|
|
260
|
+
* @returns {Promise<string>} sha256 digest string (e.g. "sha256:abc...")
|
|
261
|
+
*/
|
|
262
|
+
async function archivePack(packDir, archivePath) {
|
|
263
|
+
await resetTimestamps(packDir);
|
|
264
|
+
|
|
265
|
+
const files = await collectPaths(packDir);
|
|
266
|
+
files.sort();
|
|
267
|
+
|
|
268
|
+
const tarBuf = execFileSync("tar", ["-cf", "-", "-C", packDir, ...files]);
|
|
269
|
+
const gzBuf = execFileSync("gzip", ["-n"], { input: tarBuf });
|
|
270
|
+
await writeFile(archivePath, gzBuf);
|
|
271
|
+
|
|
272
|
+
const bytes = await readFile(archivePath);
|
|
273
|
+
return "sha256:" + createHash("sha256").update(bytes).digest("hex");
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
/**
|
|
277
|
+
* Collect all file paths under `dir`, relative to `dir`, for the manifest
|
|
278
|
+
* `files` array. Returns sorted paths with forward slashes.
|
|
279
|
+
* @param {string} dir
|
|
280
|
+
* @param {string} [prefix]
|
|
281
|
+
* @returns {Promise<string[]>}
|
|
282
|
+
*/
|
|
283
|
+
async function collectFileList(dir, prefix = "") {
|
|
284
|
+
const entries = await readdir(dir, { withFileTypes: true });
|
|
285
|
+
const result = [];
|
|
286
|
+
for (const entry of entries) {
|
|
287
|
+
const rel = prefix ? prefix + "/" + entry.name : entry.name;
|
|
288
|
+
if (entry.isDirectory()) {
|
|
289
|
+
result.push(...(await collectFileList(join(dir, entry.name), rel)));
|
|
290
|
+
} else {
|
|
291
|
+
result.push(rel);
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
return result.sort();
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
/**
|
|
298
|
+
* Parse YAML frontmatter from a SKILL.md file. Returns an object with
|
|
299
|
+
* the key/value pairs found between the `---` fences.
|
|
300
|
+
* @param {string} content
|
|
301
|
+
* @returns {Record<string, string>}
|
|
302
|
+
*/
|
|
303
|
+
function parseFrontmatter(content) {
|
|
304
|
+
const match = content.match(/^---\n([\s\S]*?)\n---/);
|
|
305
|
+
if (!match) return {};
|
|
306
|
+
const result = {};
|
|
307
|
+
for (const line of match[1].split("\n")) {
|
|
308
|
+
const idx = line.indexOf(":");
|
|
309
|
+
if (idx > 0) result[line.slice(0, idx).trim()] = line.slice(idx + 1).trim();
|
|
310
|
+
}
|
|
311
|
+
return result;
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
/**
|
|
315
|
+
* Build a single skill index entry from a staged skill directory.
|
|
316
|
+
* @param {string} skillDir - Path containing SKILL.md and optional extras
|
|
317
|
+
* @param {string} name - Skill name for the manifest
|
|
318
|
+
* @returns {Promise<{name: string, description: string, files: string[]}>}
|
|
319
|
+
*/
|
|
320
|
+
async function buildSkillEntry(skillDir, name) {
|
|
321
|
+
const skillMd = await readFile(join(skillDir, "SKILL.md"), "utf-8");
|
|
322
|
+
const fm = parseFrontmatter(skillMd);
|
|
323
|
+
const files = await collectFileList(skillDir);
|
|
324
|
+
return { description: fm.description || "", files, name };
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
/**
|
|
328
|
+
* Write a `npx skills`-compatible repository for a single pack.
|
|
329
|
+
*
|
|
330
|
+
* Each pack becomes its own skill repository at
|
|
331
|
+
* `packs/{name}/.well-known/skills/` so that individual skills
|
|
332
|
+
* within the pack can be discovered and installed independently:
|
|
333
|
+
*
|
|
334
|
+
* npx skills add domain.org/packs/se-platform --all
|
|
335
|
+
* npx skills add domain.org/packs/se-platform -s architecture-design
|
|
336
|
+
*
|
|
337
|
+
* @param {string} packsOutputDir - The `packs/` output directory
|
|
338
|
+
* @param {string} packStagingDir - Staging directory for this pack
|
|
339
|
+
* @param {string} packName - Pack name (e.g. "se-platform")
|
|
340
|
+
* @returns {Promise<Array<{name: string, description: string, files: string[]}>>}
|
|
341
|
+
* The skill entries written, for use in the aggregate manifest.
|
|
342
|
+
*/
|
|
343
|
+
async function writePackRepository(packsOutputDir, packStagingDir, packName) {
|
|
344
|
+
const wellKnownDir = join(packsOutputDir, packName, ".well-known", "skills");
|
|
345
|
+
await mkdir(wellKnownDir, { recursive: true });
|
|
346
|
+
|
|
347
|
+
// Discover individual skills from the staged pack's .claude/skills/
|
|
348
|
+
const skillsSrcDir = join(packStagingDir, ".claude", "skills");
|
|
349
|
+
const skillDirs = (
|
|
350
|
+
await readdir(skillsSrcDir, { withFileTypes: true })
|
|
351
|
+
).filter((e) => e.isDirectory());
|
|
352
|
+
|
|
353
|
+
const entries = [];
|
|
354
|
+
for (const dir of skillDirs) {
|
|
355
|
+
const src = join(skillsSrcDir, dir.name);
|
|
356
|
+
const dest = join(wellKnownDir, dir.name);
|
|
357
|
+
await cp(src, dest, { recursive: true });
|
|
358
|
+
entries.push(await buildSkillEntry(dest, dir.name));
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
const manifest = {
|
|
362
|
+
$schema: "https://schemas.agentskills.io/discovery/0.2.0/schema.json",
|
|
363
|
+
skills: entries,
|
|
364
|
+
};
|
|
365
|
+
await writeFile(
|
|
366
|
+
join(wellKnownDir, "index.json"),
|
|
367
|
+
stringifySorted(manifest),
|
|
368
|
+
"utf-8",
|
|
369
|
+
);
|
|
370
|
+
|
|
371
|
+
return entries;
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
/**
|
|
375
|
+
* Write an aggregate `npx skills` repository at `packs/` that lists every
|
|
376
|
+
* unique skill across all packs. Skills with the same name produce identical
|
|
377
|
+
* SKILL.md content regardless of discipline/track, so we deduplicate by name
|
|
378
|
+
* and write one copy.
|
|
379
|
+
*
|
|
380
|
+
* npx skills add domain.org/packs --list
|
|
381
|
+
*
|
|
382
|
+
* @param {string} packsOutputDir
|
|
383
|
+
* @param {Array<{packName: string, entries: Array}>} allPackEntries
|
|
384
|
+
*/
|
|
385
|
+
async function writeAggregateRepository(packsOutputDir, allPackEntries) {
|
|
386
|
+
const wellKnownDir = join(packsOutputDir, ".well-known", "skills");
|
|
387
|
+
await mkdir(wellKnownDir, { recursive: true });
|
|
388
|
+
|
|
389
|
+
// Deduplicate: first occurrence of each skill name wins (content is identical)
|
|
390
|
+
const seen = new Map();
|
|
391
|
+
for (const { packName, entries } of allPackEntries) {
|
|
392
|
+
for (const entry of entries) {
|
|
393
|
+
if (seen.has(entry.name)) continue;
|
|
394
|
+
seen.set(entry.name, { packName, entry });
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
const skills = [];
|
|
399
|
+
for (const [, { packName, entry }] of seen) {
|
|
400
|
+
const dest = join(wellKnownDir, entry.name);
|
|
401
|
+
const src = join(
|
|
402
|
+
packsOutputDir,
|
|
403
|
+
packName,
|
|
404
|
+
".well-known",
|
|
405
|
+
"skills",
|
|
406
|
+
entry.name,
|
|
407
|
+
);
|
|
408
|
+
await cp(src, dest, { recursive: true });
|
|
409
|
+
skills.push(entry);
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
const manifest = {
|
|
413
|
+
$schema: "https://schemas.agentskills.io/discovery/0.2.0/schema.json",
|
|
414
|
+
skills,
|
|
415
|
+
};
|
|
416
|
+
await writeFile(
|
|
417
|
+
join(wellKnownDir, "index.json"),
|
|
418
|
+
stringifySorted(manifest),
|
|
419
|
+
"utf-8",
|
|
420
|
+
);
|
|
421
|
+
}
|
|
422
|
+
|
|
423
|
+
/**
|
|
424
|
+
* Write the Microsoft APM manifest at the site root.
|
|
425
|
+
* @param {string} outputDir
|
|
426
|
+
* @param {Array<{name: string, description: string, url: string, digest: string}>} packs
|
|
427
|
+
* @param {string} version
|
|
428
|
+
* @param {string} frameworkTitle
|
|
429
|
+
*/
|
|
430
|
+
async function writeApmManifest(outputDir, packs, version, frameworkTitle) {
|
|
431
|
+
const lines = [
|
|
432
|
+
`name: ${slugify(frameworkTitle)}`,
|
|
433
|
+
`version: ${version}`,
|
|
434
|
+
`description: ${yamlQuote(`${frameworkTitle} agent teams for Claude Code`)}`,
|
|
435
|
+
"",
|
|
436
|
+
"skills:",
|
|
437
|
+
];
|
|
438
|
+
for (const pack of packs) {
|
|
439
|
+
lines.push(` - name: ${pack.name}`);
|
|
440
|
+
lines.push(` description: ${yamlQuote(pack.description)}`);
|
|
441
|
+
lines.push(` version: ${version}`);
|
|
442
|
+
lines.push(` url: ${yamlQuote(pack.url)}`);
|
|
443
|
+
lines.push(` digest: ${yamlQuote(pack.digest)}`);
|
|
444
|
+
}
|
|
445
|
+
lines.push("");
|
|
446
|
+
await writeFile(join(outputDir, "apm.yml"), lines.join("\n"), "utf-8");
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
/**
|
|
450
|
+
* Generate pre-built agent/skill packs for installation through ecosystem
|
|
451
|
+
* tools like `npx skills` and Microsoft APM. One pack per valid
|
|
452
|
+
* discipline/track combination.
|
|
453
|
+
*
|
|
454
|
+
* @param {Object} params
|
|
455
|
+
* @param {string} params.outputDir - Build output directory
|
|
456
|
+
* @param {string} params.dataDir - Source data directory
|
|
457
|
+
* @param {string} params.siteUrl - Base URL for the published site
|
|
458
|
+
* @param {Object} params.framework - Framework configuration
|
|
459
|
+
* @param {string} params.version - Pathway package version
|
|
460
|
+
* @param {string} params.templatesDir - Absolute path to pathway/templates
|
|
461
|
+
*/
|
|
462
|
+
export async function generatePacks({
|
|
463
|
+
outputDir,
|
|
464
|
+
dataDir,
|
|
465
|
+
siteUrl,
|
|
466
|
+
framework,
|
|
467
|
+
version,
|
|
468
|
+
templatesDir,
|
|
469
|
+
}) {
|
|
470
|
+
console.log("📦 Generating agent/skill packs...");
|
|
471
|
+
|
|
472
|
+
const normalizedSiteUrl = siteUrl.replace(/\/$/, "");
|
|
473
|
+
const frameworkTitle = framework.title || "Engineering Pathway";
|
|
474
|
+
|
|
475
|
+
const loader = createDataLoader();
|
|
476
|
+
const templateLoader = createTemplateLoader(templatesDir);
|
|
477
|
+
|
|
478
|
+
const data = await loader.loadAllData(dataDir);
|
|
479
|
+
const agentData = await loader.loadAgentData(dataDir);
|
|
480
|
+
const skillsWithAgent = await loader.loadSkillsWithAgentData(dataDir);
|
|
481
|
+
|
|
482
|
+
const level = deriveReferenceLevel(data.levels);
|
|
483
|
+
|
|
484
|
+
const agentTemplate = templateLoader.load("agent.template.md", dataDir);
|
|
485
|
+
const skillTemplates = {
|
|
486
|
+
skill: templateLoader.load("skill.template.md", dataDir),
|
|
487
|
+
install: templateLoader.load("skill-install.template.sh", dataDir),
|
|
488
|
+
reference: templateLoader.load("skill-reference.template.md", dataDir),
|
|
489
|
+
};
|
|
490
|
+
|
|
491
|
+
const stagingDir = join(outputDir, "_packs");
|
|
492
|
+
const packsDir = join(outputDir, "packs");
|
|
493
|
+
await mkdir(stagingDir, { recursive: true });
|
|
494
|
+
await mkdir(packsDir, { recursive: true });
|
|
495
|
+
|
|
496
|
+
const combinations = findValidCombinations(data, agentData);
|
|
497
|
+
if (combinations.length === 0) {
|
|
498
|
+
console.log(" (no valid discipline/track combinations — skipping)");
|
|
499
|
+
await rm(stagingDir, { recursive: true, force: true });
|
|
500
|
+
return;
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
const packs = [];
|
|
504
|
+
|
|
505
|
+
for (const combination of combinations) {
|
|
506
|
+
const { discipline, track, humanDiscipline, humanTrack } = combination;
|
|
507
|
+
const abbrev = getDisciplineAbbreviation(discipline.id);
|
|
508
|
+
const agentName = `${abbrev}-${toKebabCase(track.id)}`;
|
|
509
|
+
const specName = humanDiscipline.specialization || humanDiscipline.name;
|
|
510
|
+
const description = `${specName} (${humanTrack.name}) — agent team`;
|
|
511
|
+
|
|
512
|
+
const { profiles, skillFiles, teamInstructions } = derivePackContent({
|
|
513
|
+
...combination,
|
|
514
|
+
data,
|
|
515
|
+
agentData,
|
|
516
|
+
skillsWithAgent,
|
|
517
|
+
level,
|
|
518
|
+
});
|
|
519
|
+
|
|
520
|
+
const packDir = join(stagingDir, agentName);
|
|
521
|
+
await writePackFiles({
|
|
522
|
+
packDir,
|
|
523
|
+
profiles,
|
|
524
|
+
skillFiles,
|
|
525
|
+
teamInstructions,
|
|
526
|
+
agentTemplate,
|
|
527
|
+
skillTemplates,
|
|
528
|
+
claudeCodeSettings: agentData.claudeCodeSettings,
|
|
529
|
+
});
|
|
530
|
+
|
|
531
|
+
const archivePath = join(packsDir, `${agentName}.tar.gz`);
|
|
532
|
+
const digest = await archivePack(packDir, archivePath);
|
|
533
|
+
|
|
534
|
+
packs.push({
|
|
535
|
+
name: agentName,
|
|
536
|
+
description,
|
|
537
|
+
url: `${normalizedSiteUrl}/packs/${agentName}.tar.gz`,
|
|
538
|
+
digest,
|
|
539
|
+
});
|
|
540
|
+
|
|
541
|
+
console.log(` ✓ packs/${agentName}.tar.gz`);
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
// Write per-pack skill repositories (one per discipline/track combination)
|
|
545
|
+
const allPackEntries = [];
|
|
546
|
+
for (const pack of packs) {
|
|
547
|
+
const entries = await writePackRepository(
|
|
548
|
+
packsDir,
|
|
549
|
+
join(stagingDir, pack.name),
|
|
550
|
+
pack.name,
|
|
551
|
+
);
|
|
552
|
+
allPackEntries.push({ packName: pack.name, entries });
|
|
553
|
+
console.log(
|
|
554
|
+
` ✓ packs/${pack.name}/.well-known/skills/ (${entries.length} skills)`,
|
|
555
|
+
);
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
// Write aggregate repository at packs/ level
|
|
559
|
+
await writeAggregateRepository(packsDir, allPackEntries);
|
|
560
|
+
console.log(" ✓ packs/.well-known/skills/index.json (aggregate)");
|
|
561
|
+
|
|
562
|
+
await rm(stagingDir, { recursive: true, force: true });
|
|
563
|
+
|
|
564
|
+
await writeApmManifest(outputDir, packs, version, frameworkTitle);
|
|
565
|
+
console.log(" ✓ apm.yml");
|
|
566
|
+
}
|
package/src/commands/build.js
CHANGED
|
@@ -3,26 +3,20 @@
|
|
|
3
3
|
*
|
|
4
4
|
* Generates a static site from the Engineering Pathway data.
|
|
5
5
|
* Copies all necessary files (HTML, JS, CSS) and data to an output directory.
|
|
6
|
-
* Optionally
|
|
7
|
-
*
|
|
6
|
+
* Optionally delegates to build-bundle and build-packs to produce the
|
|
7
|
+
* distribution surfaces (bundle.tar.gz + install.sh for the curl|bash flow,
|
|
8
|
+
* and agent/skill packs for ecosystem tools like `npx skills` and APM) when
|
|
9
|
+
* `framework.distribution.siteUrl` is configured.
|
|
8
10
|
*/
|
|
9
11
|
|
|
10
|
-
import {
|
|
11
|
-
cp,
|
|
12
|
-
mkdir,
|
|
13
|
-
rm,
|
|
14
|
-
access,
|
|
15
|
-
realpath,
|
|
16
|
-
readFile,
|
|
17
|
-
writeFile,
|
|
18
|
-
} from "fs/promises";
|
|
12
|
+
import { cp, mkdir, rm, access, realpath, writeFile } from "fs/promises";
|
|
19
13
|
import { readFileSync } from "fs";
|
|
20
14
|
import { join, dirname, relative, resolve } from "path";
|
|
21
15
|
import { fileURLToPath } from "url";
|
|
22
|
-
import { execFileSync } from "child_process";
|
|
23
|
-
import Mustache from "mustache";
|
|
24
16
|
import { createIndexGenerator } from "@forwardimpact/map/index-generator";
|
|
25
17
|
import { createDataLoader } from "@forwardimpact/map/loader";
|
|
18
|
+
import { generateBundle } from "./build-bundle.js";
|
|
19
|
+
import { generatePacks } from "./build-packs.js";
|
|
26
20
|
|
|
27
21
|
const __filename = fileURLToPath(import.meta.url);
|
|
28
22
|
const __dirname = dirname(__filename);
|
|
@@ -201,10 +195,26 @@ ${framework.emojiIcon} Generating ${framework.title} static site...
|
|
|
201
195
|
);
|
|
202
196
|
console.log(` ✓ version.json (${version})`);
|
|
203
197
|
|
|
204
|
-
// Generate distribution
|
|
198
|
+
// Generate distribution surfaces if siteUrl is configured
|
|
205
199
|
const siteUrl = options.url || framework.distribution?.siteUrl;
|
|
206
200
|
if (siteUrl) {
|
|
207
|
-
|
|
201
|
+
const templatesDir = join(appDir, "..", "templates");
|
|
202
|
+
await generateBundle({
|
|
203
|
+
outputDir,
|
|
204
|
+
dataDir,
|
|
205
|
+
siteUrl,
|
|
206
|
+
framework,
|
|
207
|
+
version,
|
|
208
|
+
templatesDir,
|
|
209
|
+
});
|
|
210
|
+
await generatePacks({
|
|
211
|
+
outputDir,
|
|
212
|
+
dataDir,
|
|
213
|
+
siteUrl,
|
|
214
|
+
framework,
|
|
215
|
+
version,
|
|
216
|
+
templatesDir,
|
|
217
|
+
});
|
|
208
218
|
}
|
|
209
219
|
|
|
210
220
|
// Show summary
|
|
@@ -212,10 +222,10 @@ ${framework.emojiIcon} Generating ${framework.title} static site...
|
|
|
212
222
|
✅ Site generated successfully!
|
|
213
223
|
|
|
214
224
|
Output: ${outputDir}
|
|
215
|
-
${siteUrl ? `\nDistribution:\n ${outputDir}/bundle.tar.gz\n ${outputDir}/install.sh\n` : ""}
|
|
225
|
+
${siteUrl ? `\nDistribution:\n ${outputDir}/bundle.tar.gz\n ${outputDir}/install.sh\n ${outputDir}/packs/ (agent/skill packs)\n ${outputDir}/packs/{name}/.well-known/skills/ (per-pack skill repositories)\n ${outputDir}/packs/.well-known/skills/ (aggregate skill repository)\n ${outputDir}/apm.yml\n` : ""}
|
|
216
226
|
To serve locally:
|
|
217
227
|
cd ${relative(process.cwd(), outputDir) || "."}
|
|
218
|
-
|
|
228
|
+
npx serve .
|
|
219
229
|
`);
|
|
220
230
|
}
|
|
221
231
|
|
|
@@ -228,70 +238,3 @@ function getPathwayVersion() {
|
|
|
228
238
|
const pkg = JSON.parse(readFileSync(pkgPath, "utf8"));
|
|
229
239
|
return pkg.version;
|
|
230
240
|
}
|
|
231
|
-
|
|
232
|
-
/**
|
|
233
|
-
* Generate distribution bundle (bundle.tar.gz + install.sh)
|
|
234
|
-
* @param {Object} params
|
|
235
|
-
* @param {string} params.outputDir - Build output directory
|
|
236
|
-
* @param {string} params.dataDir - Source data directory
|
|
237
|
-
* @param {string} params.siteUrl - Base URL for the published site
|
|
238
|
-
* @param {Object} params.framework - Framework configuration
|
|
239
|
-
*/
|
|
240
|
-
async function generateBundle({ outputDir, dataDir, siteUrl, framework }) {
|
|
241
|
-
console.log("📦 Generating distribution bundle...");
|
|
242
|
-
|
|
243
|
-
const version = getPathwayVersion();
|
|
244
|
-
const frameworkTitle = framework.title || "Engineering Pathway";
|
|
245
|
-
|
|
246
|
-
// 1. Create temporary bundle directory
|
|
247
|
-
const bundleDir = join(outputDir, "_bundle");
|
|
248
|
-
await mkdir(bundleDir, { recursive: true });
|
|
249
|
-
|
|
250
|
-
// 2. Generate minimal package.json for the bundle
|
|
251
|
-
const bundlePkg = {
|
|
252
|
-
name: "fit-pathway-local",
|
|
253
|
-
version: version,
|
|
254
|
-
private: true,
|
|
255
|
-
dependencies: {
|
|
256
|
-
"@forwardimpact/pathway": `^${version}`,
|
|
257
|
-
},
|
|
258
|
-
};
|
|
259
|
-
await writeFile(
|
|
260
|
-
join(bundleDir, "package.json"),
|
|
261
|
-
JSON.stringify(bundlePkg, null, 2) + "\n",
|
|
262
|
-
);
|
|
263
|
-
console.log(` ✓ package.json (pathway ^${version})`);
|
|
264
|
-
|
|
265
|
-
// 3. Copy data files into bundle
|
|
266
|
-
await cp(dataDir, join(bundleDir, "data"), {
|
|
267
|
-
recursive: true,
|
|
268
|
-
dereference: true,
|
|
269
|
-
});
|
|
270
|
-
console.log(" ✓ data/");
|
|
271
|
-
|
|
272
|
-
// 4. Create tar.gz from the bundle directory
|
|
273
|
-
execFileSync("tar", [
|
|
274
|
-
"-czf",
|
|
275
|
-
join(outputDir, "bundle.tar.gz"),
|
|
276
|
-
"-C",
|
|
277
|
-
outputDir,
|
|
278
|
-
"_bundle",
|
|
279
|
-
]);
|
|
280
|
-
console.log(" ✓ bundle.tar.gz");
|
|
281
|
-
|
|
282
|
-
// 5. Clean up temporary bundle directory
|
|
283
|
-
await rm(bundleDir, { recursive: true });
|
|
284
|
-
|
|
285
|
-
// 6. Render install.sh from template
|
|
286
|
-
const templatePath = join(appDir, "..", "templates", "install.template.sh");
|
|
287
|
-
const template = await readFile(templatePath, "utf8");
|
|
288
|
-
const installScript = Mustache.render(template, {
|
|
289
|
-
siteUrl: siteUrl.replace(/\/$/, ""),
|
|
290
|
-
version,
|
|
291
|
-
frameworkTitle,
|
|
292
|
-
});
|
|
293
|
-
await writeFile(join(outputDir, "install.sh"), installScript, {
|
|
294
|
-
mode: 0o755,
|
|
295
|
-
});
|
|
296
|
-
console.log(" ✓ install.sh");
|
|
297
|
-
}
|