gsd-lite 0.6.1 → 0.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,7 +13,7 @@
13
13
  "name": "gsd",
14
14
  "source": "./",
15
15
  "description": "AI orchestration tool — GSD management shell + Superpowers quality core. 5 commands, 4 agents, 5 workflows, MCP server, context monitoring.",
16
- "version": "0.6.1",
16
+ "version": "0.6.2",
17
17
  "keywords": [
18
18
  "orchestration",
19
19
  "mcp",
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gsd",
3
- "version": "0.6.1",
3
+ "version": "0.6.2",
4
4
  "description": "AI orchestration tool for Claude Code — GSD management shell + Superpowers quality core",
5
5
  "author": {
6
6
  "name": "sdsrss",
@@ -286,6 +286,7 @@ async function fetchLatestRelease(token) {
286
286
  if (!res.ok) return null;
287
287
 
288
288
  const data = await res.json();
289
+ if (!data.tag_name || !data.tarball_url) return null;
289
290
  return {
290
291
  version: data.tag_name.replace(/^v/, ''),
291
292
  tarballUrl: data.tarball_url,
package/install.js CHANGED
@@ -170,8 +170,13 @@ export function main() {
170
170
  copyDir(localNM, join(RUNTIME_DIR, 'node_modules'), 'runtime/node_modules (copied)');
171
171
  } else if (!DRY_RUN) {
172
172
  log(' ⧗ Installing runtime dependencies...');
173
- execSync('npm ci --omit=dev', { cwd: RUNTIME_DIR, stdio: 'pipe' });
174
- log(' runtime dependencies installed');
173
+ try {
174
+ execSync('npm ci --omit=dev', { cwd: RUNTIME_DIR, stdio: 'pipe' });
175
+ log(' ✓ runtime dependencies installed');
176
+ } catch (err) {
177
+ log(` ✗ Failed to install runtime dependencies: ${err.message}`);
178
+ process.exit(1);
179
+ }
175
180
  } else {
176
181
  log(' [dry-run] Would install runtime dependencies');
177
182
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "gsd-lite",
3
- "version": "0.6.1",
3
+ "version": "0.6.2",
4
4
  "description": "AI orchestration tool for Claude Code — GSD management shell + Superpowers quality core",
5
5
  "type": "module",
6
6
  "bin": {
@@ -208,20 +208,11 @@ async function evaluatePreflight(state, basePath) {
208
208
  }
209
209
  }
210
210
 
211
- const expired_research = collectExpiredResearch(state);
212
- if (expired_research.length > 0) {
213
- hints.push({
214
- workflow_mode: 'research_refresh_needed',
215
- action: 'dispatch_researcher',
216
- updates: { workflow_mode: 'research_refresh_needed' },
217
- expired_research,
218
- message: 'Research cache expired and must be refreshed before execution resumes',
219
- });
220
- }
221
-
222
- // P0-2: Dirty-phase detection — rollback current_phase to earliest phase
211
+ // Dirty-phase detection — rollback current_phase to earliest phase
223
212
  // that has needs_revalidation tasks, ensuring earlier invalidated work
224
213
  // is re-executed before proceeding with later phases.
214
+ // Priority: placed before research expiry because dirty-phase rollback is a
215
+ // safety-critical action (prevents executing later phases on stale foundations).
225
216
  // Use filter+reduce (not .find) to guarantee lowest-ID match regardless of array order.
226
217
  const dirtyPhases = (state.phases || []).filter(p =>
227
218
  p.id < state.current_phase
@@ -245,6 +236,17 @@ async function evaluatePreflight(state, basePath) {
245
236
  });
246
237
  }
247
238
 
239
+ const expired_research = collectExpiredResearch(state);
240
+ if (expired_research.length > 0) {
241
+ hints.push({
242
+ workflow_mode: 'research_refresh_needed',
243
+ action: 'dispatch_researcher',
244
+ updates: { workflow_mode: 'research_refresh_needed' },
245
+ expired_research,
246
+ message: 'Research cache expired and must be refreshed before execution resumes',
247
+ });
248
+ }
249
+
248
250
  if (hints.length === 0) return { override: null };
249
251
 
250
252
  return {
@@ -53,7 +53,7 @@ function _buildResumeSummary(state, response) {
53
53
  };
54
54
  }
55
55
 
56
- async function resumeAwaitingClear(state, basePath) {
56
+ async function resumeAwaitingClear(state, basePath, _depth = 0) {
57
57
  const health = await readContextHealth(basePath);
58
58
  if (health !== null && health < CONTEXT_RESUME_THRESHOLD) {
59
59
  const persistError = await persist(basePath, {
@@ -83,7 +83,7 @@ async function resumeAwaitingClear(state, basePath) {
83
83
  }
84
84
  const persistError = await persist(basePath, updates);
85
85
  if (persistError) return persistError;
86
- return resumeWorkflow({ basePath });
86
+ return resumeWorkflow({ basePath, _depth: _depth + 1 });
87
87
  }
88
88
 
89
89
  async function resumeExecutingTask(state, basePath) {
@@ -319,7 +319,7 @@ export async function resumeWorkflow({ basePath = process.cwd(), _depth = 0, unb
319
319
  result = await resumeExecutingTask(state, basePath);
320
320
  break;
321
321
  case 'awaiting_clear':
322
- result = await resumeAwaitingClear(state, basePath);
322
+ result = await resumeAwaitingClear(state, basePath, _depth);
323
323
  break;
324
324
  case 'awaiting_user': {
325
325
  if (state.current_review?.stage === 'direction_drift') {
@@ -74,6 +74,23 @@ export async function handleReviewerResult({ result, basePath = process.cwd() }
74
74
  const qualityFailed = result.quality_passed === false;
75
75
  const needsRework = hasCritical || specFailed || qualityFailed;
76
76
 
77
+ // Safety: if rework is needed but no tasks were targeted for rework,
78
+ // fall back to marking all non-accepted checkpointed/accepted tasks as needs_revalidation
79
+ // to prevent infinite review loops (no runnable tasks → trigger_review → same result).
80
+ if (needsRework && taskPatches.filter(p => p.lifecycle === 'needs_revalidation').length === 0) {
81
+ for (const task of (phase.todo || [])) {
82
+ if (task.lifecycle === 'checkpointed' || task.lifecycle === 'accepted') {
83
+ taskPatches.push({
84
+ id: task.id,
85
+ lifecycle: 'needs_revalidation',
86
+ retry_count: 0,
87
+ evidence_refs: [],
88
+ last_review_feedback: ['Reviewer indicated rework needed but did not specify tasks; all completed tasks require revalidation'],
89
+ });
90
+ }
91
+ }
92
+ }
93
+
77
94
  // Compute retry count once for both exhaustion check and state update
78
95
  const currentRetryCount = phase.phase_review?.retry_count || 0;
79
96
  const nextRetryCount = needsRework ? currentRetryCount + 1 : 0;
@@ -251,6 +251,11 @@ export async function update({ updates, basePath = process.cwd(), expectedVersio
251
251
  // Deep merge phases by ID instead of shallow replace [I-1]
252
252
  const merged = { ...state, ...updates };
253
253
 
254
+ // Deep merge context by key (preserves plan_hashes, last_session, etc.)
255
+ if (updates.context && isPlainObject(updates.context)) {
256
+ merged.context = { ...(state.context || {}), ...updates.context };
257
+ }
258
+
254
259
  // Deep merge evidence by key (preserves existing entries)
255
260
  if (updates.evidence && isPlainObject(updates.evidence)) {
256
261
  merged.evidence = { ...(state.evidence || {}), ...updates.evidence };
@@ -326,10 +331,13 @@ export async function update({ updates, basePath = process.cwd(), expectedVersio
326
331
  await _pruneEvidenceFromState(merged, merged.current_phase, gsdDir);
327
332
  }
328
333
 
329
- // Use incremental validation for simple updates (no phases changes)
330
- const validation = !updates.phases
331
- ? validateStateUpdate(state, updates)
332
- : validateState(merged);
334
+ // Use incremental validation for simple updates (no phases/propagation/decisions changes)
335
+ const needsFullValidation = updates.phases
336
+ || (_append_decisions?.length > 0)
337
+ || (_propagation_tasks?.length > 0);
338
+ const validation = needsFullValidation
339
+ ? validateState(merged)
340
+ : validateStateUpdate(state, updates);
333
341
  if (!validation.valid) {
334
342
  return {
335
343
  error: true,
@@ -495,6 +503,7 @@ export async function phaseComplete({
495
503
  if (!driftValidation.valid) {
496
504
  return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `Validation failed: ${driftValidation.errors.join('; ')}` };
497
505
  }
506
+ state._version = (state._version ?? 0) + 1;
498
507
  await writeJson(statePath, state);
499
508
  return {
500
509
  success: true,
@@ -525,6 +534,8 @@ export async function phaseComplete({
525
534
  } else if (state.current_phase === phase_id && phase_id >= state.total_phases) {
526
535
  // Final phase completed — mark workflow as completed
527
536
  state.workflow_mode = 'completed';
537
+ state.current_task = null;
538
+ state.current_review = null;
528
539
  }
529
540
 
530
541
  // Update git_head to current commit
@@ -539,6 +550,7 @@ export async function phaseComplete({
539
550
  if (!finalValidation.valid) {
540
551
  return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `Validation failed: ${finalValidation.errors.join('; ')}` };
541
552
  }
553
+ state._version = (state._version ?? 0) + 1;
542
554
  await writeJson(statePath, state);
543
555
  return { success: true };
544
556
  });
@@ -570,7 +582,7 @@ export async function addEvidence({ id, data, basePath = process.cwd() }) {
570
582
  if (!result.ok) {
571
583
  return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: result.error };
572
584
  }
573
- const state = result.data;
585
+ const state = migrateState(result.data);
574
586
 
575
587
  if (!state.evidence) {
576
588
  state.evidence = {};
@@ -585,6 +597,7 @@ export async function addEvidence({ id, data, basePath = process.cwd() }) {
585
597
  await _pruneEvidenceFromState(state, state.current_phase, gsdDir);
586
598
  }
587
599
 
600
+ state._version = (state._version ?? 0) + 1;
588
601
  await writeJson(statePath, state);
589
602
  return { success: true };
590
603
  });
@@ -650,11 +663,14 @@ export async function pruneEvidence({ currentPhase, basePath = process.cwd() })
650
663
  if (!result.ok) {
651
664
  return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: result.error };
652
665
  }
653
- const state = result.data;
666
+ const state = migrateState(result.data);
654
667
 
655
668
  const gsdDir = dirname(statePath);
656
669
  const archived = await _pruneEvidenceFromState(state, currentPhase, gsdDir);
657
- if (archived > 0) await writeJson(statePath, state);
670
+ if (archived > 0) {
671
+ state._version = (state._version ?? 0) + 1;
672
+ await writeJson(statePath, state);
673
+ }
658
674
 
659
675
  return { success: true, archived };
660
676
  });
@@ -734,6 +750,7 @@ export async function patchPlan({ operations, basePath = process.cwd() } = {}) {
734
750
  return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `Validation failed: ${validation.errors.join('; ')}` };
735
751
  }
736
752
 
753
+ state._version = (state._version ?? 0) + 1;
737
754
  await writeJson(statePath, state);
738
755
  return { success: true, applied, plan_version: state.plan_version };
739
756
  });
@@ -5,6 +5,7 @@ import { writeFile, rename, unlink } from 'node:fs/promises';
5
5
  import { ensureDir, readJson, writeJson, getStatePath } from '../../utils.js';
6
6
  import {
7
7
  TASK_LIFECYCLE,
8
+ migrateState,
8
9
  validateResearchArtifacts,
9
10
  validateResearchDecisionIndex,
10
11
  validateResearcherResult,
@@ -439,7 +440,7 @@ export async function storeResearch({ result, artifacts, decision_index, basePat
439
440
  return { error: true, code: ERROR_CODES.NO_PROJECT_DIR, message: current.error };
440
441
  }
441
442
 
442
- const state = current.data;
443
+ const state = migrateState(current.data);
443
444
  const gsdDir = dirname(statePath);
444
445
  const researchDir = join(gsdDir, 'research');
445
446
  await ensureDir(researchDir);
@@ -503,6 +504,7 @@ export async function storeResearch({ result, artifacts, decision_index, basePat
503
504
  return { error: true, code: ERROR_CODES.VALIDATION_FAILED, message: `State validation failed: ${validation.errors.join('; ')}` };
504
505
  }
505
506
 
507
+ state._version = (state._version ?? 0) + 1;
506
508
  await writeJson(statePath, state);
507
509
  return {
508
510
  success: true,