gsd-lite 0.5.10 → 0.5.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/server.js CHANGED
@@ -3,7 +3,7 @@ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'
3
3
  import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js';
4
4
  import { pathToFileURL } from 'node:url';
5
5
  import { createRequire } from 'node:module';
6
- import { init, read, update, phaseComplete } from './tools/state.js';
6
+ import { init, read, update, phaseComplete, patchPlan } from './tools/state/index.js';
7
7
 
8
8
  const _require = createRequire(import.meta.url);
9
9
  const PKG_VERSION = _require('../package.json').version;
@@ -41,7 +41,7 @@ import {
41
41
  handleResearcherResult,
42
42
  handleReviewerResult,
43
43
  resumeWorkflow,
44
- } from './tools/orchestrator.js';
44
+ } from './tools/orchestrator/index.js';
45
45
 
46
46
  const server = new Server(
47
47
  { name: 'gsd', version: PKG_VERSION },
@@ -154,6 +154,32 @@ const TOOLS = [
154
154
  required: ['phase_id'],
155
155
  },
156
156
  },
157
+ {
158
+ name: 'state-patch',
159
+ description: 'Incrementally patch the plan: add/remove/reorder tasks, update fields, add dependencies — without full replan',
160
+ inputSchema: {
161
+ type: 'object',
162
+ properties: {
163
+ operations: {
164
+ type: 'array',
165
+ description: 'Array of patch operations. Each: {op: "add_task"|"remove_task"|"reorder_tasks"|"update_task"|"add_dependency", ...params}',
166
+ items: {
167
+ type: 'object',
168
+ properties: {
169
+ op: { type: 'string', description: 'Operation type' },
170
+ phase_id: { type: 'number', description: 'Phase ID (for add_task, reorder_tasks)' },
171
+ task_id: { type: 'string', description: 'Task ID (for remove_task, update_task, add_dependency)' },
172
+ task: { type: 'object', description: 'Task definition (for add_task): {name, level?, requires?, after?, review_required?, verification_required?}' },
173
+ order: { type: 'array', items: { type: 'string' }, description: 'Ordered task IDs (for reorder_tasks)' },
174
+ requires: { type: 'object', description: 'Dependency (for add_dependency): {kind: "task"|"phase", id, gate?}' },
175
+ },
176
+ required: ['op'],
177
+ },
178
+ },
179
+ },
180
+ required: ['operations'],
181
+ },
182
+ },
157
183
  {
158
184
  name: 'orchestrator-resume',
159
185
  description: 'Resume the minimal orchestration loop from workflow_mode/current_phase state',
@@ -265,6 +291,9 @@ async function dispatchToolCall(name, args) {
265
291
  case 'phase-complete':
266
292
  result = await phaseComplete(args);
267
293
  break;
294
+ case 'state-patch':
295
+ result = await patchPlan(args);
296
+ break;
268
297
  case 'orchestrator-resume':
269
298
  result = await resumeWorkflow(args || {});
270
299
  break;
@@ -0,0 +1,94 @@
1
+ import { read } from '../state/index.js';
2
+ import { validateDebuggerResult } from '../../schema.js';
3
+ import {
4
+ getPhaseAndTask,
5
+ persist,
6
+ persistAndRead,
7
+ buildExecutorDispatch,
8
+ } from './helpers.js';
9
+
10
+ export async function handleDebuggerResult({ result, basePath = process.cwd() } = {}) {
11
+ if (!result || typeof result !== 'object' || Array.isArray(result)) {
12
+ return { error: true, message: 'result must be an object' };
13
+ }
14
+ const validation = validateDebuggerResult(result);
15
+ if (!validation.valid) {
16
+ return { error: true, message: `Invalid debugger result: ${validation.errors.join('; ')}` };
17
+ }
18
+
19
+ const state = await read({ basePath });
20
+ if (state.error) return state;
21
+ const { phase, task } = getPhaseAndTask(state, result.task_id);
22
+ if (!phase || !task) {
23
+ return { error: true, message: `Task ${result.task_id} not found` };
24
+ }
25
+
26
+ const debug_context = {
27
+ root_cause: result.root_cause,
28
+ fix_direction: result.fix_direction,
29
+ evidence: result.evidence,
30
+ hypothesis_tested: result.hypothesis_tested,
31
+ fix_attempts: result.fix_attempts,
32
+ blockers: result.blockers,
33
+ architecture_concern: result.architecture_concern,
34
+ };
35
+
36
+ if (result.outcome === 'failed' || result.architecture_concern === true) {
37
+ const phaseFailed = result.architecture_concern === true;
38
+
39
+ // Determine effective workflow mode: if no tasks can make progress, escalate
40
+ let effectiveWorkflowMode;
41
+ if (phaseFailed) {
42
+ effectiveWorkflowMode = 'failed';
43
+ } else {
44
+ const hasProgressable = (phase.todo || []).some(t =>
45
+ t.id !== task.id && !['accepted', 'failed'].includes(t.lifecycle),
46
+ );
47
+ effectiveWorkflowMode = hasProgressable ? 'executing_task' : 'awaiting_user';
48
+ }
49
+
50
+ const phasePatch = { id: phase.id };
51
+ if (phaseFailed) {
52
+ phasePatch.lifecycle = 'failed';
53
+ }
54
+ phasePatch.todo = [{ id: task.id, lifecycle: 'failed', debug_context }];
55
+
56
+ const persistError = await persist(basePath, {
57
+ workflow_mode: effectiveWorkflowMode,
58
+ current_task: null,
59
+ current_review: null,
60
+ phases: [phasePatch],
61
+ });
62
+ if (persistError) return persistError;
63
+
64
+ return {
65
+ success: true,
66
+ action: phaseFailed ? 'phase_failed' : 'task_failed',
67
+ workflow_mode: effectiveWorkflowMode,
68
+ phase_id: phase.id,
69
+ task_id: task.id,
70
+ };
71
+ }
72
+
73
+ // Reset retry_count after successful debugging so executor gets fresh attempts
74
+ const refreshed = await persistAndRead(basePath, {
75
+ workflow_mode: 'executing_task',
76
+ current_task: task.id,
77
+ current_review: null,
78
+ phases: [{
79
+ id: phase.id,
80
+ todo: [{
81
+ id: task.id,
82
+ retry_count: 0,
83
+ debug_context,
84
+ }],
85
+ }],
86
+ });
87
+ if (refreshed.error) return refreshed;
88
+
89
+ const refreshedInfo = getPhaseAndTask(refreshed, task.id);
90
+ return buildExecutorDispatch(refreshed, refreshedInfo.phase, refreshedInfo.task, {
91
+ resumed_from_debugger: true,
92
+ debugger_guidance: refreshedInfo.task.debug_context,
93
+ });
94
+ }
@@ -0,0 +1,162 @@
1
+ import { read, reclassifyReviewLevel } from '../state/index.js';
2
+ import { validateExecutorResult } from '../../schema.js';
3
+ import {
4
+ MAX_DEBUG_RETRY,
5
+ RESULT_CONTRACTS,
6
+ getPhaseAndTask,
7
+ getBlockedTasks,
8
+ buildDecisionEntries,
9
+ buildErrorFingerprint,
10
+ getBlockedReasonFromResult,
11
+ persist,
12
+ } from './helpers.js';
13
+
14
+ export async function handleExecutorResult({ result, basePath = process.cwd() } = {}) {
15
+ if (!result || typeof result !== 'object' || Array.isArray(result)) {
16
+ return { error: true, message: 'result must be an object' };
17
+ }
18
+ const validation = validateExecutorResult(result);
19
+ if (!validation.valid) {
20
+ return { error: true, message: `Invalid executor result: ${validation.errors.join('; ')}` };
21
+ }
22
+
23
+ const state = await read({ basePath });
24
+ if (state.error) return state;
25
+ const { phase, task } = getPhaseAndTask(state, result.task_id);
26
+ if (!phase || !task) {
27
+ return { error: true, message: `Task ${result.task_id} not found` };
28
+ }
29
+
30
+ // Build new decision entries — actual append happens atomically inside update()'s lock
31
+ const newDecisions = buildDecisionEntries(result.decisions, phase.id, task.id, (state.decisions || []).length);
32
+
33
+ if (result.outcome === 'checkpointed') {
34
+ const reviewLevel = reclassifyReviewLevel(task, result);
35
+ const isL0 = reviewLevel === 'L0';
36
+ const autoAccept = isL0 || task.review_required === false;
37
+
38
+ const current_review = !isL0 && (reviewLevel === 'L2' || reviewLevel === 'L3') && task.review_required !== false
39
+ ? { scope: 'task', scope_id: task.id, stage: 'spec' }
40
+ : null;
41
+ const workflow_mode = current_review ? 'reviewing_task' : 'executing_task';
42
+
43
+ // Single atomic persist: auto-accept goes directly running → accepted,
44
+ // otherwise running → checkpointed (awaiting review)
45
+ const taskPatch = {
46
+ id: task.id,
47
+ lifecycle: autoAccept ? 'accepted' : 'checkpointed',
48
+ checkpoint_commit: result.checkpoint_commit,
49
+ files_changed: result.files_changed || [],
50
+ evidence_refs: result.evidence || [],
51
+ level: reviewLevel,
52
+ blocked_reason: null,
53
+ unblock_condition: null,
54
+ debug_context: null,
55
+ };
56
+ const phasePatch = { id: phase.id, todo: [taskPatch] };
57
+ // done is auto-recomputed by update() — no manual increment needed
58
+
59
+ // Bundle evidence into the same atomic persist to prevent inconsistency
60
+ const evidenceUpdates = {};
61
+ for (const ev of (result.evidence || [])) {
62
+ if (ev && typeof ev === 'object' && typeof ev.id === 'string' && typeof ev.scope === 'string') {
63
+ evidenceUpdates[ev.id] = ev;
64
+ }
65
+ }
66
+
67
+ const persistError = await persist(basePath, {
68
+ workflow_mode,
69
+ current_task: null,
70
+ current_review,
71
+ phases: [phasePatch],
72
+ ...(Object.keys(evidenceUpdates).length > 0 ? { evidence: evidenceUpdates } : {}),
73
+ }, { _append_decisions: newDecisions });
74
+ if (persistError) return persistError;
75
+
76
+ return {
77
+ success: true,
78
+ action: current_review ? 'dispatch_reviewer' : 'continue_execution',
79
+ workflow_mode,
80
+ task_id: task.id,
81
+ review_level: reviewLevel,
82
+ current_review,
83
+ auto_accepted: autoAccept,
84
+ ...(current_review ? { result_contract: RESULT_CONTRACTS.reviewer } : {}),
85
+ };
86
+ }
87
+
88
+ if (result.outcome === 'blocked') {
89
+ const { blocked_reason, unblock_condition } = getBlockedReasonFromResult(result);
90
+ const persistError = await persist(basePath, {
91
+ workflow_mode: 'awaiting_user',
92
+ current_task: null,
93
+ current_review: null,
94
+ phases: [{
95
+ id: phase.id,
96
+ todo: [{
97
+ id: task.id,
98
+ lifecycle: 'blocked',
99
+ blocked_reason,
100
+ unblock_condition,
101
+ evidence_refs: result.evidence || [],
102
+ }],
103
+ }],
104
+ }, { _append_decisions: newDecisions });
105
+ if (persistError) return persistError;
106
+
107
+ return {
108
+ success: true,
109
+ action: 'awaiting_user',
110
+ workflow_mode: 'awaiting_user',
111
+ task_id: task.id,
112
+ blockers: getBlockedTasks({ todo: [{ id: task.id, lifecycle: 'blocked', blocked_reason, unblock_condition }] }),
113
+ };
114
+ }
115
+
116
+ // Task stays in 'running' lifecycle intentionally — executor outcome 'failed' means
117
+ // "attempt failed, ready for retry or debugger", NOT lifecycle 'failed'. The task only
118
+ // transitions to lifecycle 'failed' via handleDebuggerResult when debugging is exhausted.
119
+ const retry_count = (task.retry_count || 0) + 1;
120
+ const error_fingerprint = typeof result.error_fingerprint === 'string' && result.error_fingerprint.length > 0
121
+ ? result.error_fingerprint
122
+ : buildErrorFingerprint(result);
123
+ const shouldDebug = retry_count >= MAX_DEBUG_RETRY;
124
+ const current_review = shouldDebug
125
+ ? {
126
+ scope: 'task',
127
+ scope_id: task.id,
128
+ stage: 'debugging',
129
+ retry_count,
130
+ error_fingerprint,
131
+ summary: result.summary,
132
+ }
133
+ : null;
134
+
135
+ const persistError = await persist(basePath, {
136
+ workflow_mode: 'executing_task',
137
+ current_task: task.id,
138
+ current_review,
139
+ phases: [{
140
+ id: phase.id,
141
+ todo: [{
142
+ id: task.id,
143
+ retry_count,
144
+ last_error_fingerprint: error_fingerprint,
145
+ last_failure_summary: result.summary,
146
+ last_failure_blockers: result.blockers || [],
147
+ evidence_refs: result.evidence || [],
148
+ }],
149
+ }],
150
+ }, { _append_decisions: newDecisions });
151
+ if (persistError) return persistError;
152
+
153
+ return {
154
+ success: true,
155
+ action: shouldDebug ? 'dispatch_debugger' : 'retry_executor',
156
+ workflow_mode: 'executing_task',
157
+ task_id: task.id,
158
+ retry_count,
159
+ current_review,
160
+ result_contract: shouldDebug ? RESULT_CONTRACTS.debugger : RESULT_CONTRACTS.executor,
161
+ };
162
+ }