@fission-ai/openspec 0.17.1 → 0.18.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/dist/cli/index.js +7 -1
  2. package/dist/commands/artifact-workflow.d.ts +17 -0
  3. package/dist/commands/artifact-workflow.js +818 -0
  4. package/dist/commands/validate.d.ts +1 -0
  5. package/dist/commands/validate.js +3 -3
  6. package/dist/core/archive.d.ts +0 -5
  7. package/dist/core/archive.js +4 -257
  8. package/dist/core/artifact-graph/graph.d.ts +56 -0
  9. package/dist/core/artifact-graph/graph.js +141 -0
  10. package/dist/core/artifact-graph/index.d.ts +7 -0
  11. package/dist/core/artifact-graph/index.js +13 -0
  12. package/dist/core/artifact-graph/instruction-loader.d.ts +130 -0
  13. package/dist/core/artifact-graph/instruction-loader.js +173 -0
  14. package/dist/core/artifact-graph/resolver.d.ts +61 -0
  15. package/dist/core/artifact-graph/resolver.js +187 -0
  16. package/dist/core/artifact-graph/schema.d.ts +13 -0
  17. package/dist/core/artifact-graph/schema.js +108 -0
  18. package/dist/core/artifact-graph/state.d.ts +12 -0
  19. package/dist/core/artifact-graph/state.js +54 -0
  20. package/dist/core/artifact-graph/types.d.ts +45 -0
  21. package/dist/core/artifact-graph/types.js +43 -0
  22. package/dist/core/converters/json-converter.js +2 -1
  23. package/dist/core/global-config.d.ts +10 -0
  24. package/dist/core/global-config.js +28 -0
  25. package/dist/core/index.d.ts +1 -1
  26. package/dist/core/index.js +1 -1
  27. package/dist/core/list.d.ts +6 -1
  28. package/dist/core/list.js +88 -6
  29. package/dist/core/specs-apply.d.ts +73 -0
  30. package/dist/core/specs-apply.js +384 -0
  31. package/dist/core/templates/skill-templates.d.ts +76 -0
  32. package/dist/core/templates/skill-templates.js +1472 -0
  33. package/dist/core/update.js +1 -1
  34. package/dist/core/validation/validator.js +2 -1
  35. package/dist/core/view.js +28 -8
  36. package/dist/utils/change-metadata.d.ts +47 -0
  37. package/dist/utils/change-metadata.js +130 -0
  38. package/dist/utils/change-utils.d.ts +51 -0
  39. package/dist/utils/change-utils.js +100 -0
  40. package/dist/utils/file-system.d.ts +5 -0
  41. package/dist/utils/file-system.js +7 -0
  42. package/dist/utils/index.d.ts +3 -1
  43. package/dist/utils/index.js +4 -1
  44. package/dist/utils/interactive.d.ts +7 -2
  45. package/dist/utils/interactive.js +9 -1
  46. package/package.json +4 -1
  47. package/schemas/spec-driven/schema.yaml +148 -0
  48. package/schemas/spec-driven/templates/design.md +19 -0
  49. package/schemas/spec-driven/templates/proposal.md +23 -0
  50. package/schemas/spec-driven/templates/spec.md +8 -0
  51. package/schemas/spec-driven/templates/tasks.md +9 -0
  52. package/schemas/tdd/schema.yaml +213 -0
  53. package/schemas/tdd/templates/docs.md +15 -0
  54. package/schemas/tdd/templates/implementation.md +11 -0
  55. package/schemas/tdd/templates/spec.md +11 -0
  56. package/schemas/tdd/templates/test.md +11 -0
@@ -6,6 +6,7 @@ interface ExecuteOptions {
6
6
  strict?: boolean;
7
7
  json?: boolean;
8
8
  noInteractive?: boolean;
9
+ interactive?: boolean;
9
10
  concurrency?: string;
10
11
  }
11
12
  export declare class ValidateCommand {
@@ -1,7 +1,7 @@
1
1
  import ora from 'ora';
2
2
  import path from 'path';
3
3
  import { Validator } from '../core/validation/validator.js';
4
- import { isInteractive } from '../utils/interactive.js';
4
+ import { isInteractive, resolveNoInteractive } from '../utils/interactive.js';
5
5
  import { getActiveChangeIds, getSpecIds } from '../utils/item-discovery.js';
6
6
  import { nearestMatches } from '../utils/match.js';
7
7
  export class ValidateCommand {
@@ -12,7 +12,7 @@ export class ValidateCommand {
12
12
  await this.runBulkValidation({
13
13
  changes: !!options.all || !!options.changes,
14
14
  specs: !!options.all || !!options.specs,
15
- }, { strict: !!options.strict, json: !!options.json, concurrency: options.concurrency });
15
+ }, { strict: !!options.strict, json: !!options.json, concurrency: options.concurrency, noInteractive: resolveNoInteractive(options) });
16
16
  return;
17
17
  }
18
18
  // No item and no flags
@@ -150,7 +150,7 @@ export class ValidateCommand {
150
150
  bullets.forEach(b => console.error(` ${b}`));
151
151
  }
152
152
  async runBulkValidation(scope, opts) {
153
- const spinner = !opts.json ? ora('Validating...').start() : undefined;
153
+ const spinner = !opts.json && !opts.noInteractive ? ora('Validating...').start() : undefined;
154
154
  const [changeIds, specIds] = await Promise.all([
155
155
  scope.changes ? getActiveChangeIds() : Promise.resolve([]),
156
156
  scope.specs ? getSpecIds() : Promise.resolve([]),
@@ -6,11 +6,6 @@ export declare class ArchiveCommand {
6
6
  validate?: boolean;
7
7
  }): Promise<void>;
8
8
  private selectChange;
9
- private checkIncompleteTasks;
10
- private findSpecUpdates;
11
- private buildUpdatedSpec;
12
- private writeUpdatedSpec;
13
- private buildSpecSkeleton;
14
9
  private getArchiveDate;
15
10
  }
16
11
  //# sourceMappingURL=archive.d.ts.map
@@ -3,7 +3,7 @@ import path from 'path';
3
3
  import { getTaskProgressForChange, formatTaskStatus } from '../utils/task-progress.js';
4
4
  import { Validator } from './validation/validator.js';
5
5
  import chalk from 'chalk';
6
- import { extractRequirementsSection, parseDeltaSpec, normalizeRequirementName, } from './parsers/requirement-blocks.js';
6
+ import { findSpecUpdates, buildUpdatedSpec, writeUpdatedSpec, } from './specs-apply.js';
7
7
  export class ArchiveCommand {
8
8
  async execute(changeName, options = {}) {
9
9
  const targetPath = '.';
@@ -148,7 +148,7 @@ export class ArchiveCommand {
148
148
  }
149
149
  else {
150
150
  // Find specs to update
151
- const specUpdates = await this.findSpecUpdates(changeDir, mainSpecsDir);
151
+ const specUpdates = await findSpecUpdates(changeDir, mainSpecsDir);
152
152
  if (specUpdates.length > 0) {
153
153
  console.log('\nSpecs to update:');
154
154
  for (const update of specUpdates) {
@@ -172,7 +172,7 @@ export class ArchiveCommand {
172
172
  const prepared = [];
173
173
  try {
174
174
  for (const update of specUpdates) {
175
- const built = await this.buildUpdatedSpec(update, changeName);
175
+ const built = await buildUpdatedSpec(update, changeName);
176
176
  prepared.push({ update, rebuilt: built.rebuilt, counts: built.counts });
177
177
  }
178
178
  }
@@ -199,7 +199,7 @@ export class ArchiveCommand {
199
199
  return;
200
200
  }
201
201
  }
202
- await this.writeUpdatedSpec(p.update, p.rebuilt, p.counts);
202
+ await writeUpdatedSpec(p.update, p.rebuilt, p.counts);
203
203
  totals.added += p.counts.added;
204
204
  totals.modified += p.counts.modified;
205
205
  totals.removed += p.counts.removed;
@@ -272,259 +272,6 @@ export class ArchiveCommand {
272
272
  return null;
273
273
  }
274
274
  }
275
- // Deprecated: replaced by shared task-progress utilities
276
- async checkIncompleteTasks(_tasksPath) {
277
- return 0;
278
- }
279
- async findSpecUpdates(changeDir, mainSpecsDir) {
280
- const updates = [];
281
- const changeSpecsDir = path.join(changeDir, 'specs');
282
- try {
283
- const entries = await fs.readdir(changeSpecsDir, { withFileTypes: true });
284
- for (const entry of entries) {
285
- if (entry.isDirectory()) {
286
- const specFile = path.join(changeSpecsDir, entry.name, 'spec.md');
287
- const targetFile = path.join(mainSpecsDir, entry.name, 'spec.md');
288
- try {
289
- await fs.access(specFile);
290
- // Check if target exists
291
- let exists = false;
292
- try {
293
- await fs.access(targetFile);
294
- exists = true;
295
- }
296
- catch {
297
- exists = false;
298
- }
299
- updates.push({
300
- source: specFile,
301
- target: targetFile,
302
- exists
303
- });
304
- }
305
- catch {
306
- // Source spec doesn't exist, skip
307
- }
308
- }
309
- }
310
- }
311
- catch {
312
- // No specs directory in change
313
- }
314
- return updates;
315
- }
316
- async buildUpdatedSpec(update, changeName) {
317
- // Read change spec content (delta-format expected)
318
- const changeContent = await fs.readFile(update.source, 'utf-8');
319
- // Parse deltas from the change spec file
320
- const plan = parseDeltaSpec(changeContent);
321
- const specName = path.basename(path.dirname(update.target));
322
- // Pre-validate duplicates within sections
323
- const addedNames = new Set();
324
- for (const add of plan.added) {
325
- const name = normalizeRequirementName(add.name);
326
- if (addedNames.has(name)) {
327
- throw new Error(`${specName} validation failed - duplicate requirement in ADDED for header "### Requirement: ${add.name}"`);
328
- }
329
- addedNames.add(name);
330
- }
331
- const modifiedNames = new Set();
332
- for (const mod of plan.modified) {
333
- const name = normalizeRequirementName(mod.name);
334
- if (modifiedNames.has(name)) {
335
- throw new Error(`${specName} validation failed - duplicate requirement in MODIFIED for header "### Requirement: ${mod.name}"`);
336
- }
337
- modifiedNames.add(name);
338
- }
339
- const removedNamesSet = new Set();
340
- for (const rem of plan.removed) {
341
- const name = normalizeRequirementName(rem);
342
- if (removedNamesSet.has(name)) {
343
- throw new Error(`${specName} validation failed - duplicate requirement in REMOVED for header "### Requirement: ${rem}"`);
344
- }
345
- removedNamesSet.add(name);
346
- }
347
- const renamedFromSet = new Set();
348
- const renamedToSet = new Set();
349
- for (const { from, to } of plan.renamed) {
350
- const fromNorm = normalizeRequirementName(from);
351
- const toNorm = normalizeRequirementName(to);
352
- if (renamedFromSet.has(fromNorm)) {
353
- throw new Error(`${specName} validation failed - duplicate FROM in RENAMED for header "### Requirement: ${from}"`);
354
- }
355
- if (renamedToSet.has(toNorm)) {
356
- throw new Error(`${specName} validation failed - duplicate TO in RENAMED for header "### Requirement: ${to}"`);
357
- }
358
- renamedFromSet.add(fromNorm);
359
- renamedToSet.add(toNorm);
360
- }
361
- // Pre-validate cross-section conflicts
362
- const conflicts = [];
363
- for (const n of modifiedNames) {
364
- if (removedNamesSet.has(n))
365
- conflicts.push({ name: n, a: 'MODIFIED', b: 'REMOVED' });
366
- if (addedNames.has(n))
367
- conflicts.push({ name: n, a: 'MODIFIED', b: 'ADDED' });
368
- }
369
- for (const n of addedNames) {
370
- if (removedNamesSet.has(n))
371
- conflicts.push({ name: n, a: 'ADDED', b: 'REMOVED' });
372
- }
373
- // Renamed interplay: MODIFIED must reference the NEW header, not FROM
374
- for (const { from, to } of plan.renamed) {
375
- const fromNorm = normalizeRequirementName(from);
376
- const toNorm = normalizeRequirementName(to);
377
- if (modifiedNames.has(fromNorm)) {
378
- throw new Error(`${specName} validation failed - when a rename exists, MODIFIED must reference the NEW header "### Requirement: ${to}"`);
379
- }
380
- // Detect ADDED colliding with a RENAMED TO
381
- if (addedNames.has(toNorm)) {
382
- throw new Error(`${specName} validation failed - RENAMED TO header collides with ADDED for "### Requirement: ${to}"`);
383
- }
384
- }
385
- if (conflicts.length > 0) {
386
- const c = conflicts[0];
387
- throw new Error(`${specName} validation failed - requirement present in multiple sections (${c.a} and ${c.b}) for header "### Requirement: ${c.name}"`);
388
- }
389
- const hasAnyDelta = (plan.added.length + plan.modified.length + plan.removed.length + plan.renamed.length) > 0;
390
- if (!hasAnyDelta) {
391
- throw new Error(`Delta parsing found no operations for ${path.basename(path.dirname(update.source))}. ` +
392
- `Provide ADDED/MODIFIED/REMOVED/RENAMED sections in change spec.`);
393
- }
394
- // Load or create base target content
395
- let targetContent;
396
- try {
397
- targetContent = await fs.readFile(update.target, 'utf-8');
398
- }
399
- catch {
400
- // Target spec does not exist; only ADDED operations are permitted
401
- if (plan.modified.length > 0 || plan.removed.length > 0 || plan.renamed.length > 0) {
402
- throw new Error(`${specName}: target spec does not exist; only ADDED requirements are allowed for new specs.`);
403
- }
404
- targetContent = this.buildSpecSkeleton(specName, changeName);
405
- }
406
- // Extract requirements section and build name->block map
407
- const parts = extractRequirementsSection(targetContent);
408
- const nameToBlock = new Map();
409
- for (const block of parts.bodyBlocks) {
410
- nameToBlock.set(normalizeRequirementName(block.name), block);
411
- }
412
- // Apply operations in order: RENAMED → REMOVED → MODIFIED → ADDED
413
- // RENAMED
414
- for (const r of plan.renamed) {
415
- const from = normalizeRequirementName(r.from);
416
- const to = normalizeRequirementName(r.to);
417
- if (!nameToBlock.has(from)) {
418
- throw new Error(`${specName} RENAMED failed for header "### Requirement: ${r.from}" - source not found`);
419
- }
420
- if (nameToBlock.has(to)) {
421
- throw new Error(`${specName} RENAMED failed for header "### Requirement: ${r.to}" - target already exists`);
422
- }
423
- const block = nameToBlock.get(from);
424
- const newHeader = `### Requirement: ${to}`;
425
- const rawLines = block.raw.split('\n');
426
- rawLines[0] = newHeader;
427
- const renamedBlock = {
428
- headerLine: newHeader,
429
- name: to,
430
- raw: rawLines.join('\n'),
431
- };
432
- nameToBlock.delete(from);
433
- nameToBlock.set(to, renamedBlock);
434
- }
435
- // REMOVED
436
- for (const name of plan.removed) {
437
- const key = normalizeRequirementName(name);
438
- if (!nameToBlock.has(key)) {
439
- throw new Error(`${specName} REMOVED failed for header "### Requirement: ${name}" - not found`);
440
- }
441
- nameToBlock.delete(key);
442
- }
443
- // MODIFIED
444
- for (const mod of plan.modified) {
445
- const key = normalizeRequirementName(mod.name);
446
- if (!nameToBlock.has(key)) {
447
- throw new Error(`${specName} MODIFIED failed for header "### Requirement: ${mod.name}" - not found`);
448
- }
449
- // Replace block with provided raw (ensure header line matches key)
450
- const modHeaderMatch = mod.raw.split('\n')[0].match(/^###\s*Requirement:\s*(.+)\s*$/);
451
- if (!modHeaderMatch || normalizeRequirementName(modHeaderMatch[1]) !== key) {
452
- throw new Error(`${specName} MODIFIED failed for header "### Requirement: ${mod.name}" - header mismatch in content`);
453
- }
454
- nameToBlock.set(key, mod);
455
- }
456
- // ADDED
457
- for (const add of plan.added) {
458
- const key = normalizeRequirementName(add.name);
459
- if (nameToBlock.has(key)) {
460
- throw new Error(`${specName} ADDED failed for header "### Requirement: ${add.name}" - already exists`);
461
- }
462
- nameToBlock.set(key, add);
463
- }
464
- // Duplicates within resulting map are implicitly prevented by key uniqueness.
465
- // Recompose requirements section preserving original ordering where possible
466
- const keptOrder = [];
467
- const seen = new Set();
468
- for (const block of parts.bodyBlocks) {
469
- const key = normalizeRequirementName(block.name);
470
- const replacement = nameToBlock.get(key);
471
- if (replacement) {
472
- keptOrder.push(replacement);
473
- seen.add(key);
474
- }
475
- }
476
- // Append any newly added that were not in original order
477
- for (const [key, block] of nameToBlock.entries()) {
478
- if (!seen.has(key)) {
479
- keptOrder.push(block);
480
- }
481
- }
482
- const reqBody = [
483
- parts.preamble && parts.preamble.trim() ? parts.preamble.trimEnd() : ''
484
- ]
485
- .filter(Boolean)
486
- .concat(keptOrder.map(b => b.raw))
487
- .join('\n\n')
488
- .trimEnd();
489
- const rebuilt = [
490
- parts.before.trimEnd(),
491
- parts.headerLine,
492
- reqBody,
493
- parts.after
494
- ]
495
- .filter((s, idx) => !(idx === 0 && s === ''))
496
- .join('\n')
497
- .replace(/\n{3,}/g, '\n\n');
498
- return {
499
- rebuilt,
500
- counts: {
501
- added: plan.added.length,
502
- modified: plan.modified.length,
503
- removed: plan.removed.length,
504
- renamed: plan.renamed.length,
505
- }
506
- };
507
- }
508
- async writeUpdatedSpec(update, rebuilt, counts) {
509
- // Create target directory if needed
510
- const targetDir = path.dirname(update.target);
511
- await fs.mkdir(targetDir, { recursive: true });
512
- await fs.writeFile(update.target, rebuilt);
513
- const specName = path.basename(path.dirname(update.target));
514
- console.log(`Applying changes to openspec/specs/${specName}/spec.md:`);
515
- if (counts.added)
516
- console.log(` + ${counts.added} added`);
517
- if (counts.modified)
518
- console.log(` ~ ${counts.modified} modified`);
519
- if (counts.removed)
520
- console.log(` - ${counts.removed} removed`);
521
- if (counts.renamed)
522
- console.log(` → ${counts.renamed} renamed`);
523
- }
524
- buildSpecSkeleton(specFolderName, changeName) {
525
- const titleBase = specFolderName;
526
- return `# ${titleBase} Specification\n\n## Purpose\nTBD - created by archiving change ${changeName}. Update Purpose after archive.\n\n## Requirements\n`;
527
- }
528
275
  getArchiveDate() {
529
276
  // Returns date in YYYY-MM-DD format
530
277
  return new Date().toISOString().split('T')[0];
@@ -0,0 +1,56 @@
1
+ import type { Artifact, SchemaYaml, CompletedSet, BlockedArtifacts } from './types.js';
2
+ /**
3
+ * Represents an artifact dependency graph.
4
+ * Provides methods for querying build order, ready artifacts, and completion status.
5
+ */
6
+ export declare class ArtifactGraph {
7
+ private artifacts;
8
+ private schema;
9
+ private constructor();
10
+ /**
11
+ * Creates an ArtifactGraph from a YAML file path.
12
+ */
13
+ static fromYaml(filePath: string): ArtifactGraph;
14
+ /**
15
+ * Creates an ArtifactGraph from YAML content string.
16
+ */
17
+ static fromYamlContent(yamlContent: string): ArtifactGraph;
18
+ /**
19
+ * Creates an ArtifactGraph from a pre-validated schema object.
20
+ */
21
+ static fromSchema(schema: SchemaYaml): ArtifactGraph;
22
+ /**
23
+ * Gets a single artifact by ID.
24
+ */
25
+ getArtifact(id: string): Artifact | undefined;
26
+ /**
27
+ * Gets all artifacts in the graph.
28
+ */
29
+ getAllArtifacts(): Artifact[];
30
+ /**
31
+ * Gets the schema name.
32
+ */
33
+ getName(): string;
34
+ /**
35
+ * Gets the schema version.
36
+ */
37
+ getVersion(): number;
38
+ /**
39
+ * Computes the topological build order using Kahn's algorithm.
40
+ * Returns artifact IDs in the order they should be built.
41
+ */
42
+ getBuildOrder(): string[];
43
+ /**
44
+ * Gets artifacts that are ready to be created (all dependencies completed).
45
+ */
46
+ getNextArtifacts(completed: CompletedSet): string[];
47
+ /**
48
+ * Checks if all artifacts in the graph are completed.
49
+ */
50
+ isComplete(completed: CompletedSet): boolean;
51
+ /**
52
+ * Gets blocked artifacts and their unmet dependencies.
53
+ */
54
+ getBlocked(completed: CompletedSet): BlockedArtifacts;
55
+ }
56
+ //# sourceMappingURL=graph.d.ts.map
@@ -0,0 +1,141 @@
1
+ import { loadSchema, parseSchema } from './schema.js';
2
+ /**
3
+ * Represents an artifact dependency graph.
4
+ * Provides methods for querying build order, ready artifacts, and completion status.
5
+ */
6
+ export class ArtifactGraph {
7
+ artifacts;
8
+ schema;
9
+ constructor(schema) {
10
+ this.schema = schema;
11
+ this.artifacts = new Map(schema.artifacts.map(a => [a.id, a]));
12
+ }
13
+ /**
14
+ * Creates an ArtifactGraph from a YAML file path.
15
+ */
16
+ static fromYaml(filePath) {
17
+ const schema = loadSchema(filePath);
18
+ return new ArtifactGraph(schema);
19
+ }
20
+ /**
21
+ * Creates an ArtifactGraph from YAML content string.
22
+ */
23
+ static fromYamlContent(yamlContent) {
24
+ const schema = parseSchema(yamlContent);
25
+ return new ArtifactGraph(schema);
26
+ }
27
+ /**
28
+ * Creates an ArtifactGraph from a pre-validated schema object.
29
+ */
30
+ static fromSchema(schema) {
31
+ return new ArtifactGraph(schema);
32
+ }
33
+ /**
34
+ * Gets a single artifact by ID.
35
+ */
36
+ getArtifact(id) {
37
+ return this.artifacts.get(id);
38
+ }
39
+ /**
40
+ * Gets all artifacts in the graph.
41
+ */
42
+ getAllArtifacts() {
43
+ return Array.from(this.artifacts.values());
44
+ }
45
+ /**
46
+ * Gets the schema name.
47
+ */
48
+ getName() {
49
+ return this.schema.name;
50
+ }
51
+ /**
52
+ * Gets the schema version.
53
+ */
54
+ getVersion() {
55
+ return this.schema.version;
56
+ }
57
+ /**
58
+ * Computes the topological build order using Kahn's algorithm.
59
+ * Returns artifact IDs in the order they should be built.
60
+ */
61
+ getBuildOrder() {
62
+ const inDegree = new Map();
63
+ const dependents = new Map();
64
+ // Initialize all artifacts
65
+ for (const artifact of this.artifacts.values()) {
66
+ inDegree.set(artifact.id, artifact.requires.length);
67
+ dependents.set(artifact.id, []);
68
+ }
69
+ // Build reverse adjacency (who depends on whom)
70
+ for (const artifact of this.artifacts.values()) {
71
+ for (const req of artifact.requires) {
72
+ dependents.get(req).push(artifact.id);
73
+ }
74
+ }
75
+ // Start with roots (in-degree 0), sorted for determinism
76
+ const queue = [...this.artifacts.keys()]
77
+ .filter(id => inDegree.get(id) === 0)
78
+ .sort();
79
+ const result = [];
80
+ while (queue.length > 0) {
81
+ const current = queue.shift();
82
+ result.push(current);
83
+ // Collect newly ready artifacts, then sort before adding
84
+ const newlyReady = [];
85
+ for (const dep of dependents.get(current)) {
86
+ const newDegree = inDegree.get(dep) - 1;
87
+ inDegree.set(dep, newDegree);
88
+ if (newDegree === 0) {
89
+ newlyReady.push(dep);
90
+ }
91
+ }
92
+ queue.push(...newlyReady.sort());
93
+ }
94
+ return result;
95
+ }
96
+ /**
97
+ * Gets artifacts that are ready to be created (all dependencies completed).
98
+ */
99
+ getNextArtifacts(completed) {
100
+ const ready = [];
101
+ for (const artifact of this.artifacts.values()) {
102
+ if (completed.has(artifact.id)) {
103
+ continue; // Already completed
104
+ }
105
+ const allDepsCompleted = artifact.requires.every(req => completed.has(req));
106
+ if (allDepsCompleted) {
107
+ ready.push(artifact.id);
108
+ }
109
+ }
110
+ // Sort for deterministic ordering
111
+ return ready.sort();
112
+ }
113
+ /**
114
+ * Checks if all artifacts in the graph are completed.
115
+ */
116
+ isComplete(completed) {
117
+ for (const artifact of this.artifacts.values()) {
118
+ if (!completed.has(artifact.id)) {
119
+ return false;
120
+ }
121
+ }
122
+ return true;
123
+ }
124
+ /**
125
+ * Gets blocked artifacts and their unmet dependencies.
126
+ */
127
+ getBlocked(completed) {
128
+ const blocked = {};
129
+ for (const artifact of this.artifacts.values()) {
130
+ if (completed.has(artifact.id)) {
131
+ continue; // Already completed
132
+ }
133
+ const unmetDeps = artifact.requires.filter(req => !completed.has(req));
134
+ if (unmetDeps.length > 0) {
135
+ blocked[artifact.id] = unmetDeps.sort();
136
+ }
137
+ }
138
+ return blocked;
139
+ }
140
+ }
141
+ //# sourceMappingURL=graph.js.map
@@ -0,0 +1,7 @@
1
+ export { ArtifactSchema, SchemaYamlSchema, type Artifact, type SchemaYaml, type CompletedSet, type BlockedArtifacts, } from './types.js';
2
+ export { loadSchema, parseSchema, SchemaValidationError } from './schema.js';
3
+ export { ArtifactGraph } from './graph.js';
4
+ export { detectCompleted } from './state.js';
5
+ export { resolveSchema, listSchemas, listSchemasWithInfo, getSchemaDir, getPackageSchemasDir, getUserSchemasDir, SchemaLoadError, type SchemaInfo, } from './resolver.js';
6
+ export { loadTemplate, loadChangeContext, generateInstructions, formatChangeStatus, TemplateLoadError, type ChangeContext, type ArtifactInstructions, type DependencyInfo, type ArtifactStatus, type ChangeStatus, } from './instruction-loader.js';
7
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1,13 @@
1
+ // Types
2
+ export { ArtifactSchema, SchemaYamlSchema, } from './types.js';
3
+ // Schema loading and validation
4
+ export { loadSchema, parseSchema, SchemaValidationError } from './schema.js';
5
+ // Graph operations
6
+ export { ArtifactGraph } from './graph.js';
7
+ // State detection
8
+ export { detectCompleted } from './state.js';
9
+ // Schema resolution
10
+ export { resolveSchema, listSchemas, listSchemasWithInfo, getSchemaDir, getPackageSchemasDir, getUserSchemasDir, SchemaLoadError, } from './resolver.js';
11
+ // Instruction loading
12
+ export { loadTemplate, loadChangeContext, generateInstructions, formatChangeStatus, TemplateLoadError, } from './instruction-loader.js';
13
+ //# sourceMappingURL=index.js.map