gsd-lite 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,926 @@
1
+ // State CRUD tools
2
+
3
+ import { join, dirname } from 'node:path';
4
+ import { stat } from 'node:fs/promises';
5
+ import { ensureDir, readJson, writeJson, writeAtomic, getStatePath, getGitHead } from '../utils.js';
6
+ import {
7
+ CANONICAL_FIELDS,
8
+ TASK_LIFECYCLE,
9
+ validateResearchArtifacts,
10
+ validateResearchDecisionIndex,
11
+ validateResearcherResult,
12
+ validateState,
13
+ validateTransition,
14
+ createInitialState,
15
+ } from '../schema.js';
16
+ import { runAll } from './verify.js';
17
+
18
+ const RESEARCH_FILES = ['STACK.md', 'ARCHITECTURE.md', 'PITFALLS.md', 'SUMMARY.md'];
19
+
20
+ // C-1: Serialize all state mutations to prevent TOCTOU races
21
+ let _mutationQueue = Promise.resolve();
22
+ function withStateLock(fn) {
23
+ const p = _mutationQueue.then(fn);
24
+ _mutationQueue = p.catch(() => {});
25
+ return p;
26
+ }
27
+
28
+ function isPlainObject(value) {
29
+ return value !== null && typeof value === 'object' && !Array.isArray(value);
30
+ }
31
+
32
+ function inferWorkflowModeAfterResearch(state) {
33
+ if (state.current_review?.scope === 'phase') return 'reviewing_phase';
34
+ if (state.current_review?.scope === 'task') return 'reviewing_task';
35
+ return 'executing_task';
36
+ }
37
+
38
+ function normalizeResearchArtifacts(artifacts) {
39
+ const normalized = {};
40
+ for (const fileName of RESEARCH_FILES) {
41
+ const content = artifacts[fileName];
42
+ normalized[fileName] = content.endsWith('\n') ? content : `${content}\n`;
43
+ }
44
+ return normalized;
45
+ }
46
+
47
+ /**
48
+ * Initialize a new GSD project: creates .gsd/, state.json, plan.md, phases/
49
+ */
50
+ export async function init({ project, phases, research, force = false, basePath = process.cwd() }) {
51
+ if (!project || typeof project !== 'string') {
52
+ return { error: true, message: 'project must be a non-empty string' };
53
+ }
54
+ if (!Array.isArray(phases)) {
55
+ return { error: true, message: 'phases must be an array' };
56
+ }
57
+ const gsdDir = join(basePath, '.gsd');
58
+ const statePath = join(gsdDir, 'state.json');
59
+
60
+ // Guard: reject re-initialization unless force is set
61
+ if (!force) {
62
+ try {
63
+ await stat(statePath);
64
+ return { error: true, message: 'state.json already exists; pass force: true to reinitialize' };
65
+ } catch {} // File doesn't exist, proceed
66
+ }
67
+
68
+ const phasesDir = join(gsdDir, 'phases');
69
+
70
+ await ensureDir(phasesDir);
71
+ if (research) {
72
+ await ensureDir(join(gsdDir, 'research'));
73
+ }
74
+
75
+ const state = createInitialState({ project, phases });
76
+ state.git_head = getGitHead(basePath);
77
+
78
+ // Create plan.md placeholder (atomic write)
79
+ await writeAtomic(
80
+ join(gsdDir, 'plan.md'),
81
+ `# ${project}\n\nPlan placeholder — populate during planning phase.\n`,
82
+ );
83
+
84
+ // Create phase placeholder .md files (atomic writes)
85
+ for (const phase of state.phases) {
86
+ await writeAtomic(
87
+ join(phasesDir, `phase-${phase.id}.md`),
88
+ `# Phase ${phase.id}: ${phase.name}\n\nTasks and details go here.\n`,
89
+ );
90
+ }
91
+
92
+ const trackedFiles = [
93
+ join(gsdDir, 'plan.md'),
94
+ ...state.phases.map((phase) => join(phasesDir, `phase-${phase.id}.md`)),
95
+ ];
96
+ const mtimes = await Promise.all(trackedFiles.map(async (filePath) => (await stat(filePath)).mtimeMs));
97
+ state.context.last_session = new Date(Math.ceil(Math.max(...mtimes))).toISOString();
98
+ await writeJson(join(gsdDir, 'state.json'), state);
99
+
100
+ return { success: true };
101
+ }
102
+
103
+ /**
104
+ * Read state.json, optionally filtering to specific fields.
105
+ */
106
+ export async function read({ fields, basePath = process.cwd() } = {}) {
107
+ const statePath = getStatePath(basePath);
108
+ if (!statePath) {
109
+ return { error: true, message: 'No .gsd directory found' };
110
+ }
111
+
112
+ const result = await readJson(statePath);
113
+ if (!result.ok) {
114
+ return { error: true, message: result.error };
115
+ }
116
+ const state = result.data;
117
+
118
+ if (fields && Array.isArray(fields) && fields.length > 0) {
119
+ const filtered = {};
120
+ for (const key of fields) {
121
+ if (key in state) {
122
+ filtered[key] = state[key];
123
+ }
124
+ }
125
+ return filtered;
126
+ }
127
+
128
+ return state;
129
+ }
130
+
131
+ /**
132
+ * Update state.json with canonical field guard and full validation.
133
+ */
134
+ export async function update({ updates, basePath = process.cwd() } = {}) {
135
+ if (!updates || typeof updates !== 'object' || Array.isArray(updates)) {
136
+ return { error: true, message: 'updates must be a non-null object' };
137
+ }
138
+ // Guard: reject non-canonical fields
139
+ const nonCanonical = Object.keys(updates).filter(
140
+ (key) => !CANONICAL_FIELDS.includes(key),
141
+ );
142
+ if (nonCanonical.length > 0) {
143
+ return {
144
+ error: true,
145
+ message: `Non-canonical fields rejected: ${nonCanonical.join(', ')}`,
146
+ };
147
+ }
148
+
149
+ const statePath = getStatePath(basePath);
150
+ if (!statePath) {
151
+ return { error: true, message: 'No .gsd directory found' };
152
+ }
153
+
154
+ return withStateLock(async () => {
155
+ const result = await readJson(statePath);
156
+ if (!result.ok) {
157
+ return { error: true, message: result.error };
158
+ }
159
+ const state = result.data;
160
+
161
+ // Validate lifecycle transitions before merging
162
+ if (updates.phases && Array.isArray(updates.phases)) {
163
+ for (const newPhase of updates.phases) {
164
+ const oldPhase = state.phases.find(p => p.id === newPhase.id);
165
+ if (!oldPhase) continue;
166
+
167
+ // Check phase lifecycle transition
168
+ if (newPhase.lifecycle && newPhase.lifecycle !== oldPhase.lifecycle) {
169
+ const tr = validateTransition('phase', oldPhase.lifecycle, newPhase.lifecycle);
170
+ if (!tr.valid) return { error: true, message: tr.error };
171
+ }
172
+
173
+ // Check task lifecycle transitions
174
+ if (Array.isArray(newPhase.todo)) {
175
+ for (const newTask of newPhase.todo) {
176
+ const oldTask = (oldPhase.todo || []).find(t => t.id === newTask.id);
177
+ if (!oldTask) continue;
178
+ if (newTask.lifecycle && newTask.lifecycle !== oldTask.lifecycle) {
179
+ const tr = validateTransition('task', oldTask.lifecycle, newTask.lifecycle);
180
+ if (!tr.valid) return { error: true, message: tr.error };
181
+ }
182
+ }
183
+ }
184
+ }
185
+ }
186
+
187
+ // Deep merge phases by ID instead of shallow replace [I-1]
188
+ const merged = { ...state, ...updates };
189
+ if (updates.phases && Array.isArray(updates.phases)) {
190
+ merged.phases = state.phases.map(oldPhase => {
191
+ const newPhase = updates.phases.find(p => p.id === oldPhase.id);
192
+ if (!newPhase) return oldPhase;
193
+ const mergedPhase = { ...oldPhase, ...newPhase };
194
+ if (isPlainObject(oldPhase.phase_review) || isPlainObject(newPhase.phase_review)) {
195
+ mergedPhase.phase_review = { ...oldPhase.phase_review, ...newPhase.phase_review };
196
+ }
197
+ if (isPlainObject(oldPhase.phase_handoff) || isPlainObject(newPhase.phase_handoff)) {
198
+ mergedPhase.phase_handoff = { ...oldPhase.phase_handoff, ...newPhase.phase_handoff };
199
+ }
200
+ // Deep merge tasks within phase by ID
201
+ if (Array.isArray(newPhase.todo)) {
202
+ mergedPhase.todo = oldPhase.todo.map(oldTask => {
203
+ const newTask = newPhase.todo.find(t => t.id === oldTask.id);
204
+ return newTask ? { ...oldTask, ...newTask } : oldTask;
205
+ });
206
+ // Add any new tasks not in old phase
207
+ for (const newTask of newPhase.todo) {
208
+ if (!oldPhase.todo.find(t => t.id === newTask.id)) {
209
+ mergedPhase.todo.push(newTask);
210
+ }
211
+ }
212
+ }
213
+ return mergedPhase;
214
+ });
215
+ // Add any new phases not in old state
216
+ for (const newPhase of updates.phases) {
217
+ if (!state.phases.find(p => p.id === newPhase.id)) {
218
+ merged.phases.push(newPhase);
219
+ }
220
+ }
221
+ }
222
+
223
+ // Validate full state after merge
224
+ const validation = validateState(merged);
225
+ if (!validation.valid) {
226
+ return {
227
+ error: true,
228
+ message: `Validation failed: ${validation.errors.join('; ')}`,
229
+ };
230
+ }
231
+
232
+ await writeJson(statePath, merged);
233
+ return { success: true };
234
+ });
235
+ }
236
+
237
+ /**
238
+ * Complete a phase: checks handoff gate, transitions lifecycle, increments current_phase.
239
+ */
240
+ function verificationPassed(verification) {
241
+ if (!verification || typeof verification !== 'object') return false;
242
+ if ('passed' in verification) return verification.passed === true;
243
+ return ['lint', 'typecheck', 'test'].every((key) => (
244
+ verification[key]
245
+ && typeof verification[key].exit_code === 'number'
246
+ && verification[key].exit_code === 0
247
+ ));
248
+ }
249
+
250
+ function verificationSummary(verification) {
251
+ if (!verification || typeof verification !== 'object') return 'no verification details';
252
+ return ['lint', 'typecheck', 'test']
253
+ .filter((key) => verification[key])
254
+ .map((key) => `${key}:${verification[key].exit_code}`)
255
+ .join(', ');
256
+ }
257
+
258
+ export async function phaseComplete({
259
+ phase_id,
260
+ basePath = process.cwd(),
261
+ verification,
262
+ run_verify = false,
263
+ direction_ok,
264
+ } = {}) {
265
+ if (typeof phase_id !== 'number') {
266
+ return { error: true, message: 'phase_id must be a number' };
267
+ }
268
+ if (verification != null && (typeof verification !== 'object' || Array.isArray(verification))) {
269
+ return { error: true, message: 'verification must be an object when provided' };
270
+ }
271
+ if (typeof run_verify !== 'boolean') {
272
+ return { error: true, message: 'run_verify must be a boolean' };
273
+ }
274
+ if (direction_ok !== undefined && typeof direction_ok !== 'boolean') {
275
+ return { error: true, message: 'direction_ok must be a boolean when provided' };
276
+ }
277
+ const statePath = getStatePath(basePath);
278
+ if (!statePath) {
279
+ return { error: true, message: 'No .gsd directory found' };
280
+ }
281
+
282
+ return withStateLock(async () => {
283
+ const result = await readJson(statePath);
284
+ if (!result.ok) {
285
+ return { error: true, message: result.error };
286
+ }
287
+ const state = result.data;
288
+
289
+ const phase = state.phases.find((p) => p.id === phase_id);
290
+ if (!phase) {
291
+ return { error: true, message: `Phase ${phase_id} not found` };
292
+ }
293
+ if (!Array.isArray(phase.todo)) {
294
+ return { error: true, message: `Phase ${phase_id} has invalid todo list` };
295
+ }
296
+ if (!phase.phase_handoff || typeof phase.phase_handoff !== 'object') {
297
+ return { error: true, message: `Phase ${phase_id} is missing phase_handoff metadata` };
298
+ }
299
+
300
+ // Validate phase lifecycle transition FIRST (fail-fast) [I-4]
301
+ const transitionResult = validateTransition(
302
+ 'phase',
303
+ phase.lifecycle,
304
+ 'accepted',
305
+ );
306
+ if (!transitionResult.valid) {
307
+ return { error: true, message: transitionResult.error };
308
+ }
309
+
310
+ // Check handoff gate: all tasks must be accepted
311
+ const pendingTasks = phase.todo.filter((t) => t.lifecycle !== 'accepted');
312
+ if (pendingTasks.length > 0) {
313
+ return {
314
+ error: true,
315
+ message: `Handoff gate not met: ${pendingTasks.length} task(s) not accepted — ${pendingTasks.map((t) => `${t.id}:${t.lifecycle}`).join(', ')}`,
316
+ };
317
+ }
318
+
319
+ // Check critical issues
320
+ if (phase.phase_handoff.critical_issues_open > 0) {
321
+ return {
322
+ error: true,
323
+ message: `Handoff gate not met: ${phase.phase_handoff.critical_issues_open} critical issue(s) open`,
324
+ };
325
+ }
326
+
327
+ const reviewPassed = phase.phase_review?.status === 'accepted'
328
+ || phase.phase_handoff.required_reviews_passed === true;
329
+ if (!reviewPassed) {
330
+ return {
331
+ error: true,
332
+ message: 'Handoff gate not met: required reviews not passed',
333
+ };
334
+ }
335
+
336
+ const verificationResult = verification || (run_verify ? await runAll(basePath) : null);
337
+ const testsPassed = verificationResult
338
+ ? verificationPassed(verificationResult)
339
+ : phase.phase_handoff.tests_passed === true;
340
+ if (!testsPassed) {
341
+ return {
342
+ error: true,
343
+ message: `Handoff gate not met: verification checks failed — ${verificationSummary(verificationResult)}`,
344
+ };
345
+ }
346
+
347
+ const directionOk = direction_ok ?? phase.phase_handoff.direction_ok;
348
+ if (directionOk === false) {
349
+ state.workflow_mode = 'awaiting_user';
350
+ state.current_task = null;
351
+ state.current_review = {
352
+ scope: 'phase',
353
+ scope_id: phase.id,
354
+ stage: 'direction_drift',
355
+ summary: `Direction drift detected for phase ${phase.id}`,
356
+ };
357
+ phase.phase_handoff.direction_ok = false;
358
+ const driftValidation = validateState(state);
359
+ if (!driftValidation.valid) {
360
+ return { error: true, message: `Validation failed: ${driftValidation.errors.join('; ')}` };
361
+ }
362
+ await writeJson(statePath, state);
363
+ return {
364
+ error: true,
365
+ message: 'Handoff gate not met: direction drift detected, awaiting user decision',
366
+ workflow_mode: 'awaiting_user',
367
+ phase_id: phase.id,
368
+ };
369
+ }
370
+
371
+ // Apply transition
372
+ phase.lifecycle = 'accepted';
373
+ phase.phase_handoff.required_reviews_passed = reviewPassed;
374
+ phase.phase_handoff.tests_passed = testsPassed;
375
+ if (direction_ok !== undefined) {
376
+ phase.phase_handoff.direction_ok = direction_ok;
377
+ }
378
+
379
+ // Increment current_phase if this was the active phase
380
+ if (state.current_phase === phase_id && phase_id < state.total_phases) {
381
+ state.current_phase = phase_id + 1;
382
+ // Activate the next phase
383
+ const nextPhase = state.phases.find((p) => p.id === state.current_phase);
384
+ if (nextPhase && nextPhase.lifecycle === 'pending') {
385
+ nextPhase.lifecycle = 'active';
386
+ }
387
+ }
388
+
389
+ // Update git_head to current commit
390
+ const gsdDir = dirname(statePath);
391
+ state.git_head = getGitHead(dirname(gsdDir));
392
+
393
+ // Prune evidence from old phases (in-memory to avoid double read/write)
394
+ await _pruneEvidenceFromState(state, state.current_phase, gsdDir);
395
+
396
+ await writeJson(statePath, state);
397
+ return { success: true };
398
+ });
399
+ }
400
+
401
+ /**
402
+ * Add an evidence entry to state.evidence keyed by id.
403
+ */
404
+ export async function addEvidence({ id, data, basePath = process.cwd() }) {
405
+ // I-8: Validate inputs
406
+ if (!id || typeof id !== 'string') {
407
+ return { error: true, message: 'id must be a non-empty string' };
408
+ }
409
+ if (!data || typeof data !== 'object' || Array.isArray(data)) {
410
+ return { error: true, message: 'data must be a non-null object' };
411
+ }
412
+ if (typeof data.scope !== 'string') {
413
+ return { error: true, message: 'data.scope must be a string' };
414
+ }
415
+
416
+ const statePath = getStatePath(basePath);
417
+ if (!statePath) {
418
+ return { error: true, message: 'No .gsd directory found' };
419
+ }
420
+
421
+ return withStateLock(async () => {
422
+ const result = await readJson(statePath);
423
+ if (!result.ok) {
424
+ return { error: true, message: result.error };
425
+ }
426
+ const state = result.data;
427
+
428
+ if (!state.evidence) {
429
+ state.evidence = {};
430
+ }
431
+
432
+ state.evidence[id] = data;
433
+ await writeJson(statePath, state);
434
+ return { success: true };
435
+ });
436
+ }
437
+
438
+ /**
439
+ * Internal: prune evidence in-memory and write archive file.
440
+ * Mutates state.evidence. Returns count of archived entries.
441
+ */
442
+ async function _pruneEvidenceFromState(state, currentPhase, gsdDir) {
443
+ if (!state.evidence) return 0;
444
+
445
+ const threshold = currentPhase - 1;
446
+ const toArchive = {};
447
+ const toKeep = {};
448
+
449
+ for (const [id, entry] of Object.entries(state.evidence)) {
450
+ const phaseNum = parseScopePhase(entry.scope);
451
+ if (phaseNum !== null && phaseNum < threshold) {
452
+ toArchive[id] = entry;
453
+ } else {
454
+ toKeep[id] = entry;
455
+ }
456
+ }
457
+
458
+ const archivedCount = Object.keys(toArchive).length;
459
+
460
+ if (archivedCount > 0) {
461
+ const archivePath = join(gsdDir, 'evidence-archive.json');
462
+ const existing = await readJson(archivePath);
463
+ const archive = existing.ok ? existing.data : {};
464
+ Object.assign(archive, toArchive);
465
+ await writeJson(archivePath, archive);
466
+
467
+ state.evidence = toKeep;
468
+ }
469
+
470
+ return archivedCount;
471
+ }
472
+
473
+ /**
474
+ * Prune evidence: archive entries from phases older than currentPhase - 1.
475
+ * Scope format is "task:X.Y" where X is the phase number.
476
+ */
477
+ export async function pruneEvidence({ currentPhase, basePath = process.cwd() }) {
478
+ const statePath = getStatePath(basePath);
479
+ if (!statePath) {
480
+ return { error: true, message: 'No .gsd directory found' };
481
+ }
482
+
483
+ return withStateLock(async () => {
484
+ const result = await readJson(statePath);
485
+ if (!result.ok) {
486
+ return { error: true, message: result.error };
487
+ }
488
+ const state = result.data;
489
+
490
+ const gsdDir = dirname(statePath);
491
+ const archived = await _pruneEvidenceFromState(state, currentPhase, gsdDir);
492
+ if (archived > 0) await writeJson(statePath, state);
493
+
494
+ return { success: true, archived };
495
+ });
496
+ }
497
+
498
+ /**
499
+ * Parse phase number from scope string like "task:X.Y" → X.
500
+ * Returns null if scope is missing or doesn't match.
501
+ */
502
+ function parseScopePhase(scope) {
503
+ if (typeof scope !== 'string') return null;
504
+ const match = scope.match(/^task:(\d+)\./);
505
+ return match ? parseInt(match[1], 10) : null;
506
+ }
507
+
508
+ // ── Automation functions ──
509
+
510
+ const DEFAULT_MAX_RETRY = 3;
511
+
512
+ /**
513
+ * Select the next runnable task from a phase, respecting dependency gates.
514
+ * Returns { task } if a runnable task is found,
515
+ * { mode: 'trigger_review' } if all remaining are checkpointed,
516
+ * { mode: 'awaiting_user', blockers } if all are blocked,
517
+ * { task: undefined } if nothing can run.
518
+ * @param {object} phase - Phase object with todo array
519
+ * @param {object} state - Full state object
520
+ * @param {object} [options] - Options
521
+ * @param {number} [options.maxRetry=3] - Maximum retry count before skipping a task
522
+ */
523
+ export function selectRunnableTask(phase, state, { maxRetry = DEFAULT_MAX_RETRY } = {}) {
524
+ if (!phase || !Array.isArray(phase.todo)) {
525
+ return { error: true, message: 'Phase todo must be an array' };
526
+ }
527
+ const runnableTasks = [];
528
+
529
+ for (const task of phase.todo) {
530
+ if (!['pending', 'needs_revalidation'].includes(task.lifecycle)) continue;
531
+ if (task.retry_count >= maxRetry) continue;
532
+ if (task.blocked_reason) continue;
533
+
534
+ let depsOk = true;
535
+ for (const dep of (task.requires || [])) {
536
+ if (dep.kind === 'task') {
537
+ const depTask = phase.todo.find(t => t.id === dep.id);
538
+ if (!depTask) { depsOk = false; break; }
539
+ const gate = dep.gate || 'accepted';
540
+ if (gate === 'checkpoint' && !['checkpointed', 'accepted'].includes(depTask.lifecycle)) { depsOk = false; break; }
541
+ if (gate === 'accepted' && depTask.lifecycle !== 'accepted') { depsOk = false; break; }
542
+ if (gate === 'phase_complete') { depsOk = false; break; } // phase_complete is only valid on phase-kind deps
543
+ } else if (dep.kind === 'phase') {
544
+ const depPhase = (state.phases || []).find(p => p.id === dep.id);
545
+ if (!depPhase || depPhase.lifecycle !== 'accepted') { depsOk = false; break; }
546
+ }
547
+ }
548
+ if (depsOk) runnableTasks.push(task);
549
+ }
550
+
551
+ if (runnableTasks.length > 0) {
552
+ return { task: runnableTasks[0] };
553
+ }
554
+
555
+ const awaitingReview = phase.todo.filter(t => t.lifecycle === 'checkpointed');
556
+ if (awaitingReview.length > 0) {
557
+ return { mode: 'trigger_review' };
558
+ }
559
+
560
+ const blockedTasks = phase.todo.filter(t => t.lifecycle === 'blocked');
561
+ if (blockedTasks.length > 0) {
562
+ return { mode: 'awaiting_user', blockers: blockedTasks.map(t => ({ id: t.id, reason: t.blocked_reason })) };
563
+ }
564
+
565
+ // Diagnose why no task is runnable
566
+ const diagnostics = [];
567
+ for (const task of phase.todo) {
568
+ if (task.lifecycle === 'accepted' || task.lifecycle === 'failed') continue;
569
+ const reasons = [];
570
+ if (!['pending', 'needs_revalidation'].includes(task.lifecycle)) {
571
+ reasons.push(`lifecycle=${task.lifecycle}`);
572
+ }
573
+ if (task.retry_count >= maxRetry) {
574
+ reasons.push(`retry_count=${task.retry_count} >= max=${maxRetry}`);
575
+ }
576
+ if (task.blocked_reason) {
577
+ reasons.push(`blocked: ${task.blocked_reason}`);
578
+ }
579
+ for (const dep of (task.requires || [])) {
580
+ if (dep.kind === 'task') {
581
+ const depTask = phase.todo.find(t => t.id === dep.id);
582
+ const gate = dep.gate || 'accepted';
583
+ if (!depTask) {
584
+ reasons.push(`dep ${dep.id} not found`);
585
+ } else if (gate === 'checkpoint' && !['checkpointed', 'accepted'].includes(depTask.lifecycle)) {
586
+ reasons.push(`dep ${dep.id} needs checkpoint (is ${depTask.lifecycle})`);
587
+ } else if (gate === 'accepted' && depTask.lifecycle !== 'accepted') {
588
+ reasons.push(`dep ${dep.id} needs accepted (is ${depTask.lifecycle})`);
589
+ } else if (gate === 'phase_complete') {
590
+ reasons.push(`dep ${dep.id} has phase_complete gate (invalid for task-kind dependency)`);
591
+ }
592
+ } else if (dep.kind === 'phase') {
593
+ const depPhase = (state.phases || []).find(p => p.id === dep.id);
594
+ if (!depPhase || depPhase.lifecycle !== 'accepted') {
595
+ reasons.push(`phase dep ${dep.id} not accepted`);
596
+ }
597
+ }
598
+ }
599
+ if (reasons.length > 0) {
600
+ diagnostics.push({ id: task.id, reasons });
601
+ }
602
+ }
603
+
604
+ return { task: undefined, diagnostics };
605
+ }
606
+
607
+ /**
608
+ * Propagate invalidation to downstream dependents when a task is reworked.
609
+ * If contractChanged is true, all transitive dependents get needs_revalidation
610
+ * and their evidence_refs are cleared.
611
+ */
612
+ export function propagateInvalidation(phase, reworkTaskId, contractChanged) {
613
+ if (!contractChanged) return;
614
+
615
+ const affected = new Set();
616
+ const queue = [reworkTaskId];
617
+
618
+ while (queue.length > 0) {
619
+ const currentId = queue.shift();
620
+ for (const task of phase.todo) {
621
+ if (affected.has(task.id)) continue;
622
+ const dependsOnCurrent = (task.requires || []).some(dep =>
623
+ dep.kind === 'task' && dep.id === currentId
624
+ );
625
+ if (dependsOnCurrent) {
626
+ affected.add(task.id);
627
+ queue.push(task.id);
628
+ }
629
+ }
630
+ }
631
+
632
+ // C-2: Only transition tasks whose lifecycle allows needs_revalidation
633
+ const canInvalidate = new Set(
634
+ Object.entries(TASK_LIFECYCLE)
635
+ .filter(([, targets]) => targets.includes('needs_revalidation'))
636
+ .map(([state]) => state),
637
+ );
638
+ for (const task of phase.todo) {
639
+ if (affected.has(task.id) && canInvalidate.has(task.lifecycle)) {
640
+ task.lifecycle = 'needs_revalidation';
641
+ task.evidence_refs = [];
642
+ }
643
+ }
644
+ }
645
+
646
+ /**
647
+ * Build executor context for a task: 6-field protocol.
648
+ * Returns { task_spec, research_decisions, predecessor_outputs, project_conventions, workflows, constraints }.
649
+ */
650
+ export function buildExecutorContext(state, taskId, phaseId) {
651
+ const phase = state.phases.find(p => p.id === phaseId);
652
+ if (!phase) {
653
+ return { error: true, message: `Phase ${phaseId} not found` };
654
+ }
655
+ if (!Array.isArray(phase.todo)) {
656
+ return { error: true, message: `Phase ${phaseId} has invalid todo list` };
657
+ }
658
+ const task = phase.todo.find(t => t.id === taskId);
659
+ if (!task) {
660
+ return { error: true, message: `Task ${taskId} not found in phase ${phaseId}` };
661
+ }
662
+
663
+ const task_spec = `phases/phase-${phaseId}.md`;
664
+
665
+ const research_decisions = (task.research_basis || []).map(id => {
666
+ const decision = state.research?.decision_index?.[id];
667
+ return decision ? { id, ...decision } : { id, summary: 'not found' };
668
+ });
669
+
670
+ const predecessor_outputs = (task.requires || [])
671
+ .filter(dep => dep.kind === 'task')
672
+ .map(dep => {
673
+ const depTask = phase.todo.find(t => t.id === dep.id);
674
+ return depTask ? { files_changed: depTask.files_changed || [], checkpoint_commit: depTask.checkpoint_commit } : null;
675
+ })
676
+ .filter(Boolean);
677
+
678
+ const project_conventions = 'CLAUDE.md';
679
+ const workflows = ['workflows/tdd-cycle.md', 'workflows/deviation-rules.md'];
680
+ if ((task.retry_count || 0) > 0) workflows.push('workflows/debugging.md');
681
+ if ((task.research_basis || []).length > 0) workflows.push('workflows/research.md');
682
+ const constraints = {
683
+ retry_count: task.retry_count || 0,
684
+ level: task.level || 'L1',
685
+ review_required: task.review_required !== false,
686
+ };
687
+
688
+ const debugger_guidance = task.debug_context ? {
689
+ root_cause: task.debug_context.root_cause,
690
+ fix_direction: task.debug_context.fix_direction,
691
+ fix_attempts: task.debug_context.fix_attempts,
692
+ evidence: task.debug_context.evidence || [],
693
+ } : null;
694
+
695
+ return {
696
+ task_spec,
697
+ research_decisions,
698
+ predecessor_outputs,
699
+ project_conventions,
700
+ workflows,
701
+ constraints,
702
+ debugger_guidance,
703
+ };
704
+ }
705
+
706
+ const SENSITIVE_KEYWORDS = /\b(auth|payment|security|public.?api|login|token|credential|session|oauth)\b/i;
707
+
708
+ /**
709
+ * Reclassify review level at runtime based on executor results.
710
+ * Upgrades L1→L2 when contract_changed + sensitive keywords or [LEVEL-UP].
711
+ * Never downgrades.
712
+ */
713
+ export function reclassifyReviewLevel(task, executorResult) {
714
+ const currentLevel = task.level || 'L1';
715
+
716
+ // Never downgrade
717
+ if (currentLevel === 'L2' || currentLevel === 'L3') {
718
+ return currentLevel;
719
+ }
720
+
721
+ // Check for explicit [LEVEL-UP] in decisions
722
+ const hasLevelUp = (executorResult.decisions || []).some(d =>
723
+ typeof d === 'string' && d.includes('[LEVEL-UP]')
724
+ );
725
+ if (hasLevelUp) return 'L2';
726
+
727
+ // Check for contract change + sensitive keyword in task name
728
+ if (executorResult.contract_changed && SENSITIVE_KEYWORDS.test(task.name || '')) {
729
+ return 'L2';
730
+ }
731
+
732
+ return currentLevel;
733
+ }
734
+
735
+ const MIN_TOKEN_LENGTH = 2;
736
+ const MIN_OVERLAP = 2;
737
+
738
+ /**
739
+ * Tokenize a string into lowercase tokens, splitting on whitespace and punctuation.
740
+ * Filters out short tokens (< MIN_TOKEN_LENGTH).
741
+ */
742
+ function tokenize(text) {
743
+ if (!text) return [];
744
+ return text
745
+ .toLowerCase()
746
+ .split(/[\s,.:;!?()[\]{}<>/\\|@#$%^&*+=~`'",。:;!?()【】、]+/)
747
+ .filter(t => t.length >= MIN_TOKEN_LENGTH);
748
+ }
749
+
750
+ /**
751
+ * Match a blocked reason against research decisions by keyword overlap.
752
+ * Returns the best-matching decision or null if no sufficient overlap.
753
+ */
754
+ export function matchDecisionForBlocker(decisions, blockedReason) {
755
+ const reasonTokens = new Set(tokenize(blockedReason));
756
+ if (reasonTokens.size === 0) return null;
757
+
758
+ let bestMatch = null;
759
+ let bestOverlap = 0;
760
+
761
+ for (const decision of decisions) {
762
+ const summaryTokens = tokenize(decision.summary);
763
+ let overlap = 0;
764
+ for (const token of summaryTokens) {
765
+ if (reasonTokens.has(token)) {
766
+ overlap++;
767
+ }
768
+ }
769
+ if (overlap >= MIN_OVERLAP && overlap > bestOverlap) {
770
+ bestOverlap = overlap;
771
+ bestMatch = decision;
772
+ }
773
+ }
774
+
775
+ return bestMatch;
776
+ }
777
+
778
+ /**
779
+ * Apply research refresh: compare new research decisions against existing state.
780
+ * 4 rules:
781
+ * 1. Same ID + same summary → update metadata (e.g. expires_at), keep task lifecycle
782
+ * 2. Same ID + changed summary → invalidate dependent tasks (needs_revalidation)
783
+ * 3. Old ID missing from new → invalidate dependent tasks + warning
784
+ * 4. Brand new ID → add to index, no impact on existing tasks
785
+ * Returns { warnings: string[] }.
786
+ */
787
+ export function applyResearchRefresh(state, newResearch) {
788
+ const warnings = [];
789
+ const oldIndex = state.research?.decision_index || {};
790
+ const newIndex = newResearch?.decision_index || {};
791
+
792
+ // Collect IDs of decisions that changed or were removed
793
+ const invalidatedIds = new Set();
794
+
795
+ // Check existing decisions against new
796
+ for (const [id, oldDecision] of Object.entries(oldIndex)) {
797
+ if (id in newIndex) {
798
+ const newDecision = newIndex[id];
799
+ if (oldDecision.summary === newDecision.summary) {
800
+ // Rule 1: same conclusion — update metadata in place
801
+ Object.assign(oldIndex[id], newDecision);
802
+ } else {
803
+ // Rule 2: changed conclusion — replace and invalidate
804
+ oldIndex[id] = newDecision;
805
+ invalidatedIds.add(id);
806
+ }
807
+ } else {
808
+ // Rule 3: old ID missing from new research
809
+ invalidatedIds.add(id);
810
+ warnings.push(`Decision "${id}" removed in new research — dependent tasks invalidated`);
811
+ }
812
+ }
813
+
814
+ // Rule 4: brand new IDs — just add them
815
+ for (const [id, newDecision] of Object.entries(newIndex)) {
816
+ if (!(id in oldIndex)) {
817
+ oldIndex[id] = newDecision;
818
+ }
819
+ }
820
+
821
+ // Ensure decision_index is set on state
822
+ if (!state.research) state.research = {};
823
+ state.research.decision_index = oldIndex;
824
+
825
+ // C-3: Only invalidate tasks whose lifecycle allows needs_revalidation
826
+ if (invalidatedIds.size > 0) {
827
+ const canInvalidate = new Set(
828
+ Object.entries(TASK_LIFECYCLE)
829
+ .filter(([, targets]) => targets.includes('needs_revalidation'))
830
+ .map(([s]) => s),
831
+ );
832
+ for (const phase of (state.phases || [])) {
833
+ for (const task of (phase.todo || [])) {
834
+ const basis = task.research_basis || [];
835
+ const affected = basis.some(id => invalidatedIds.has(id));
836
+ if (affected && canInvalidate.has(task.lifecycle)) {
837
+ task.lifecycle = 'needs_revalidation';
838
+ if (task.evidence_refs) task.evidence_refs = [];
839
+ }
840
+ }
841
+ }
842
+ }
843
+
844
+ return { warnings };
845
+ }
846
+
847
+ export async function storeResearch({ result, artifacts, decision_index, basePath = process.cwd() } = {}) {
848
+ const resultValidation = validateResearcherResult(result || {});
849
+ if (!resultValidation.valid) {
850
+ return { error: true, message: `Invalid researcher result: ${resultValidation.errors.join('; ')}` };
851
+ }
852
+
853
+ const artifactsValidation = validateResearchArtifacts(artifacts, {
854
+ decisionIds: result.decision_ids,
855
+ volatility: result.volatility,
856
+ expiresAt: result.expires_at,
857
+ });
858
+ if (!artifactsValidation.valid) {
859
+ return { error: true, message: `Invalid research artifacts: ${artifactsValidation.errors.join('; ')}` };
860
+ }
861
+
862
+ const decisionIndexValidation = validateResearchDecisionIndex(decision_index, result.decision_ids);
863
+ if (!decisionIndexValidation.valid) {
864
+ return { error: true, message: `Invalid research decision_index: ${decisionIndexValidation.errors.join('; ')}` };
865
+ }
866
+
867
+ const statePath = getStatePath(basePath);
868
+ if (!statePath) {
869
+ return { error: true, message: 'No .gsd directory found' };
870
+ }
871
+
872
+ return withStateLock(async () => {
873
+ const current = await readJson(statePath);
874
+ if (!current.ok) {
875
+ return { error: true, message: current.error };
876
+ }
877
+
878
+ const state = current.data;
879
+ const gsdDir = dirname(statePath);
880
+ const researchDir = join(gsdDir, 'research');
881
+ await ensureDir(researchDir);
882
+
883
+ const normalizedArtifacts = normalizeResearchArtifacts(artifacts);
884
+ for (const fileName of RESEARCH_FILES) {
885
+ await writeAtomic(join(researchDir, fileName), normalizedArtifacts[fileName]);
886
+ }
887
+
888
+ const nextResearch = {
889
+ volatility: result.volatility,
890
+ expires_at: result.expires_at,
891
+ sources: result.sources,
892
+ decision_index,
893
+ files: RESEARCH_FILES,
894
+ updated_at: new Date().toISOString(),
895
+ };
896
+
897
+ const refreshResult = state.research
898
+ ? applyResearchRefresh(state, nextResearch)
899
+ : { warnings: [] };
900
+
901
+ state.research = {
902
+ ...(state.research || {}),
903
+ ...nextResearch,
904
+ decision_index: state.research?.decision_index || decision_index,
905
+ };
906
+
907
+ if (state.workflow_mode === 'research_refresh_needed') {
908
+ state.workflow_mode = inferWorkflowModeAfterResearch(state);
909
+ }
910
+
911
+ const validation = validateState(state);
912
+ if (!validation.valid) {
913
+ return { error: true, message: `State validation failed: ${validation.errors.join('; ')}` };
914
+ }
915
+
916
+ await writeJson(statePath, state);
917
+ return {
918
+ success: true,
919
+ workflow_mode: state.workflow_mode,
920
+ stored_files: RESEARCH_FILES,
921
+ decision_ids: result.decision_ids,
922
+ warnings: refreshResult.warnings,
923
+ research: state.research,
924
+ };
925
+ });
926
+ }