astrocode-workflow 0.1.58 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/README.md +243 -11
  2. package/dist/agents/prompts.d.ts +1 -0
  3. package/dist/agents/prompts.js +159 -0
  4. package/dist/agents/registry.js +11 -1
  5. package/dist/config/loader.js +34 -0
  6. package/dist/config/schema.d.ts +7 -1
  7. package/dist/config/schema.js +2 -0
  8. package/dist/hooks/continuation-enforcer.d.ts +9 -1
  9. package/dist/hooks/continuation-enforcer.js +2 -1
  10. package/dist/hooks/inject-provider.d.ts +9 -1
  11. package/dist/hooks/inject-provider.js +2 -1
  12. package/dist/hooks/tool-output-truncator.d.ts +9 -1
  13. package/dist/hooks/tool-output-truncator.js +2 -1
  14. package/dist/index.js +228 -45
  15. package/dist/state/adapters/index.d.ts +4 -2
  16. package/dist/state/adapters/index.js +23 -27
  17. package/dist/state/db.d.ts +6 -8
  18. package/dist/state/db.js +106 -45
  19. package/dist/tools/index.d.ts +13 -3
  20. package/dist/tools/index.js +14 -31
  21. package/dist/tools/init.d.ts +10 -1
  22. package/dist/tools/init.js +73 -18
  23. package/dist/tools/injects.js +90 -26
  24. package/dist/tools/spec.d.ts +0 -1
  25. package/dist/tools/spec.js +4 -1
  26. package/dist/tools/status.d.ts +1 -1
  27. package/dist/tools/status.js +70 -52
  28. package/dist/tools/workflow.js +2 -2
  29. package/dist/ui/inject.d.ts +16 -2
  30. package/dist/ui/inject.js +104 -33
  31. package/dist/workflow/directives.d.ts +2 -0
  32. package/dist/workflow/directives.js +34 -19
  33. package/dist/workflow/state-machine.d.ts +46 -3
  34. package/dist/workflow/state-machine.js +249 -92
  35. package/package.json +1 -1
  36. package/src/agents/prompts.ts +160 -0
  37. package/src/agents/registry.ts +16 -1
  38. package/src/config/loader.ts +39 -4
  39. package/src/config/schema.ts +3 -0
  40. package/src/hooks/continuation-enforcer.ts +9 -2
  41. package/src/hooks/inject-provider.ts +9 -2
  42. package/src/hooks/tool-output-truncator.ts +9 -2
  43. package/src/index.ts +260 -56
  44. package/src/state/adapters/index.ts +21 -26
  45. package/src/state/db.ts +114 -58
  46. package/src/tools/index.ts +29 -31
  47. package/src/tools/init.ts +91 -22
  48. package/src/tools/injects.ts +147 -53
  49. package/src/tools/spec.ts +6 -2
  50. package/src/tools/status.ts +71 -55
  51. package/src/tools/workflow.ts +3 -3
  52. package/src/ui/inject.ts +115 -41
  53. package/src/workflow/directives.ts +103 -75
  54. package/src/workflow/state-machine.ts +327 -109
@@ -1,6 +1,55 @@
1
+ import { withTx } from "../state/db";
1
2
  import { nowISO } from "../shared/time";
2
3
  import { newEventId, newRunId, newStageRunId } from "../state/ids";
3
4
  import { warn } from "../shared/log";
5
+ import { sha256Hex } from "../shared/hash";
6
+ import { SCHEMA_VERSION } from "../state/schema";
7
+ import { injectChatPrompt } from "../ui/inject";
8
+ export const EVENT_TYPES = {
9
+ RUN_STARTED: "run.started",
10
+ RUN_COMPLETED: "run.completed",
11
+ RUN_FAILED: "run.failed",
12
+ RUN_ABORTED: "run.aborted",
13
+ RUN_GENESIS_PLANNING_ATTACHED: "run.genesis_planning_attached",
14
+ STAGE_STARTED: "stage.started",
15
+ WORKFLOW_PROCEED: "workflow.proceed",
16
+ };
17
+ async function emitUi(ui, text, toast) {
18
+ if (!ui)
19
+ return;
20
+ // Prefer toast (if provided) AND also inject chat (for audit trail / visibility).
21
+ // If you want toast-only, pass a toast function and omit ctx/sessionId.
22
+ if (toast && ui.toast) {
23
+ try {
24
+ await ui.toast(toast);
25
+ }
26
+ catch {
27
+ // non-fatal
28
+ }
29
+ }
30
+ try {
31
+ await injectChatPrompt({
32
+ ctx: ui.ctx,
33
+ sessionId: ui.sessionId,
34
+ text,
35
+ agent: ui.agentName ?? "Astro",
36
+ });
37
+ }
38
+ catch {
39
+ // non-fatal (workflow correctness is DB-based)
40
+ }
41
+ }
42
+ function tableExists(db, tableName) {
43
+ try {
44
+ const row = db
45
+ .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name=?")
46
+ .get(tableName);
47
+ return row?.name === tableName;
48
+ }
49
+ catch {
50
+ return false;
51
+ }
52
+ }
4
53
  export function getActiveRun(db) {
5
54
  const row = db
6
55
  .prepare("SELECT * FROM runs WHERE status = 'running' ORDER BY started_at DESC, created_at DESC LIMIT 1")
@@ -30,9 +79,8 @@ export function decideNextAction(db, config) {
30
79
  }
31
80
  const stageRuns = getStageRuns(db, activeRun.run_id);
32
81
  const current = getCurrentStageRun(stageRuns);
33
- if (!current) {
82
+ if (!current)
34
83
  return { kind: "complete_run", run_id: activeRun.run_id };
35
- }
36
84
  if (current.status === "pending") {
37
85
  return { kind: "delegate_stage", run_id: activeRun.run_id, stage_key: current.stage_key, stage_run_id: current.stage_run_id };
38
86
  }
@@ -42,103 +90,212 @@ export function decideNextAction(db, config) {
42
90
  if (current.status === "failed") {
43
91
  return { kind: "failed", run_id: activeRun.run_id, stage_key: current.stage_key, error_text: current.error_text ?? "stage failed" };
44
92
  }
45
- // Should never happen: other statuses are handled above
46
- warn("Unexpected stage status in decideNextAction", { status: current.status, stage_key: current.stage_key });
93
+ warn("Unexpected stage status in decideNextAction", { status: current.status, stage_key: current.status });
47
94
  return { kind: "await_stage_completion", run_id: activeRun.run_id, stage_key: current.stage_key, stage_run_id: current.stage_run_id };
48
95
  }
49
- function isInitialStory(db, storyKey) {
50
- // Check if this story has no parent relations (is top-level)
51
- const relations = db.prepare("SELECT COUNT(*) as count FROM story_relations WHERE child_story_key=?").get(storyKey);
52
- return relations.count === 0;
96
+ function getPipelineFromConfig(config) {
97
+ const pipeline = config?.workflow?.pipeline;
98
+ if (!Array.isArray(pipeline) || pipeline.length === 0) {
99
+ throw new Error("Invalid config: workflow.pipeline must be a non-empty array of stage keys.");
100
+ }
101
+ return pipeline;
53
102
  }
54
- export function createRunForStory(db, config, storyKey) {
55
- const story = getStory(db, storyKey);
56
- if (!story)
57
- throw new Error(`Story not found: ${storyKey}`);
58
- if (story.state !== "approved")
59
- throw new Error(`Story must be approved to run: ${storyKey} (state=${story.state})`);
60
- const run_id = newRunId();
103
+ function getGenesisPlanningMode(config) {
104
+ const raw = config?.workflow?.genesis_planning;
105
+ if (raw === "off" || raw === "first_story_only" || raw === "always")
106
+ return raw;
107
+ warn(`Invalid genesis_planning config: ${String(raw)}. Using default "first_story_only".`);
108
+ return "first_story_only";
109
+ }
110
+ function shouldAttachPlanningDirective(config, story) {
111
+ const mode = getGenesisPlanningMode(config);
112
+ if (mode === "off")
113
+ return false;
114
+ if (mode === "always")
115
+ return true;
116
+ return story.story_key === "S-0001";
117
+ }
118
+ function attachRunPlanningDirective(db, runId, story, pipeline) {
119
+ if (!tableExists(db, "injects"))
120
+ return;
61
121
  const now = nowISO();
62
- const pipeline = config.workflow.pipeline;
63
- // Convert to genesis planning story if needed
64
- const isGenesisCandidate = storyKey === 'S-0001' || isInitialStory(db, storyKey) ||
65
- (story.body_md && story.body_md.length > 100 &&
66
- (story.title.toLowerCase().includes('implement') || story.body_md.toLowerCase().includes('implement')));
67
- // Skip conversion if there are already many stories (spec already decomposed)
68
- const existingStoriesCount = db.prepare("SELECT COUNT(*) as count FROM stories").get();
69
- const alreadyDecomposed = existingStoriesCount.count > 10; // Arbitrary threshold
70
- if (isGenesisCandidate && !alreadyDecomposed) {
71
- const planningTitle = `Plan and decompose: ${story.title}`;
72
- const planningBody = `Analyze the requirements and break down "${story.title}" into 50-200 detailed, granular implementation stories. Each story should be focused on a specific, implementable task with clear acceptance criteria.\n\nOriginal request: ${story.body_md || ''}`;
73
- db.prepare("UPDATE stories SET title=?, body_md=? WHERE story_key=?").run(planningTitle, planningBody, storyKey);
122
+ const injectId = `inj_${runId}_genesis_plan`;
123
+ const body = [
124
+ `# Genesis planning directive`,
125
+ ``,
126
+ `This run is configured to perform a planning/decomposition pass before implementation.`,
127
+ `Do not edit the origin story title/body. Create additional stories instead.`,
128
+ ``,
129
+ `## Required output`,
130
+ `- Produce 50–200 granular implementation stories with clear acceptance criteria.`,
131
+ `- Each story: single focused change, explicit done conditions, dependencies listed.`,
132
+ ``,
133
+ `## Context`,
134
+ `- Origin story: ${story.story_key} — ${story.title ?? ""}`,
135
+ `- Pipeline: ${pipeline.join(" → ")}`,
136
+ ``,
137
+ ].join("\n");
138
+ const hash = sha256Hex(body);
139
+ try {
140
+ db.prepare(`
141
+ INSERT OR IGNORE INTO injects (
142
+ inject_id, type, title, body_md, tags_json, scope, source, priority,
143
+ expires_at, sha256, created_at, updated_at
144
+ ) VALUES (
145
+ ?, 'note', ?, ?, '["genesis","planning","decompose"]', ?, 'tool', 100,
146
+ NULL, ?, ?, ?
147
+ )
148
+ `).run(injectId, "Genesis planning: decompose into stories", body, `run:${runId}`, hash, now, now);
149
+ db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, ?, ?, ?)").run(newEventId(), runId, EVENT_TYPES.RUN_GENESIS_PLANNING_ATTACHED, JSON.stringify({ story_key: story.story_key, inject_id: injectId }), now);
74
150
  }
75
- // Lock story
76
- db.prepare("UPDATE stories SET state='in_progress', in_progress=1, locked_by_run_id=?, locked_at=?, updated_at=? WHERE story_key=?").run(run_id, now, now, storyKey);
77
- db.prepare("INSERT INTO runs (run_id, story_key, status, pipeline_stages_json, current_stage_key, created_at, started_at, updated_at) VALUES (?, ?, 'running', ?, ?, ?, ?, ?)").run(run_id, storyKey, JSON.stringify(pipeline), pipeline[0] ?? null, now, now, now);
78
- // Stage runs
79
- const insertStage = db.prepare("INSERT INTO stage_runs (stage_run_id, run_id, stage_key, stage_index, status, updated_at) VALUES (?, ?, ?, ?, 'pending', ?)");
80
- pipeline.forEach((stageKey, idx) => {
81
- insertStage.run(newStageRunId(), run_id, stageKey, idx, now);
151
+ catch (e) {
152
+ warn("Failed to attach genesis planning inject", { run_id: runId, story_key: story.story_key, err: e });
153
+ }
154
+ }
155
+ export function createRunForStory(db, config, storyKey) {
156
+ return withTx(db, () => {
157
+ const story = getStory(db, storyKey);
158
+ if (!story)
159
+ throw new Error(`Story not found: ${storyKey}`);
160
+ if (story.state !== "approved")
161
+ throw new Error(`Story must be approved to run: ${storyKey} (state=${story.state})`);
162
+ const run_id = newRunId();
163
+ const now = nowISO();
164
+ const pipeline = getPipelineFromConfig(config);
165
+ db.prepare("UPDATE stories SET state='in_progress', in_progress=1, locked_by_run_id=?, locked_at=?, updated_at=? WHERE story_key=?").run(run_id, now, now, storyKey);
166
+ db.prepare("INSERT INTO runs (run_id, story_key, status, pipeline_stages_json, current_stage_key, created_at, started_at, updated_at) VALUES (?, ?, 'running', ?, ?, ?, ?, ?)").run(run_id, storyKey, JSON.stringify(pipeline), pipeline[0] ?? null, now, now, now);
167
+ const insertStage = db.prepare("INSERT INTO stage_runs (stage_run_id, run_id, stage_key, stage_index, status, created_at, updated_at) VALUES (?, ?, ?, ?, 'pending', ?, ?)");
168
+ pipeline.forEach((stageKey, idx) => {
169
+ insertStage.run(newStageRunId(), run_id, stageKey, idx, now, now);
170
+ });
171
+ db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, ?, ?, ?)").run(newEventId(), run_id, EVENT_TYPES.RUN_STARTED, JSON.stringify({ story_key: storyKey, pipeline }), now);
172
+ if (shouldAttachPlanningDirective(config, story)) {
173
+ attachRunPlanningDirective(db, run_id, story, pipeline);
174
+ }
175
+ db.prepare(`
176
+ INSERT INTO repo_state (id, schema_version, created_at, updated_at, last_run_id, last_story_key, last_event_at)
177
+ VALUES (1, ?, ?, ?, ?, ?, ?)
178
+ ON CONFLICT(id) DO UPDATE SET
179
+ last_run_id=excluded.last_run_id,
180
+ last_story_key=excluded.last_story_key,
181
+ last_event_at=excluded.last_event_at,
182
+ updated_at=excluded.updated_at
183
+ `).run(SCHEMA_VERSION, now, now, run_id, storyKey, now);
184
+ return { run_id };
82
185
  });
83
- // Event
84
- db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, 'run.started', ?, ?)").run(newEventId(), run_id, JSON.stringify({ story_key: storyKey, pipeline }), now);
85
- db.prepare("UPDATE repo_state SET last_run_id=?, last_story_key=?, last_event_at=?, updated_at=? WHERE id=1").run(run_id, storyKey, now, now);
86
- return { run_id };
87
186
  }
88
- export function startStage(db, runId, stageKey, meta) {
89
- const now = nowISO();
90
- // Ensure run is running
91
- const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
92
- if (!run)
93
- throw new Error(`Run not found: ${runId}`);
94
- if (run.status !== "running")
95
- throw new Error(`Run is not running: ${runId} (status=${run.status})`);
96
- const stage = db
97
- .prepare("SELECT * FROM stage_runs WHERE run_id=? AND stage_key=?")
98
- .get(runId, stageKey);
99
- if (!stage)
100
- throw new Error(`Stage run not found: ${runId}/${stageKey}`);
101
- if (stage.status !== "pending")
102
- throw new Error(`Stage is not pending: ${stageKey} (status=${stage.status})`);
103
- db.prepare("UPDATE stage_runs SET status='running', started_at=?, updated_at=?, subagent_type=COALESCE(?, subagent_type), subagent_session_id=COALESCE(?, subagent_session_id) WHERE stage_run_id=?").run(now, now, meta?.subagent_type ?? null, meta?.subagent_session_id ?? null, stage.stage_run_id);
104
- db.prepare("UPDATE runs SET current_stage_key=?, updated_at=? WHERE run_id=?").run(stageKey, now, runId);
105
- db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, ?, 'stage.started', ?, ?)").run(newEventId(), runId, stageKey, JSON.stringify({ subagent_type: meta?.subagent_type ?? null }), now);
106
- }
107
- export function completeRun(db, runId) {
108
- const now = nowISO();
109
- const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
110
- if (!run)
111
- throw new Error(`Run not found: ${runId}`);
112
- if (run.status !== "running")
113
- throw new Error(`Run not running: ${runId} (status=${run.status})`);
114
- // Ensure all stages completed/skipped
115
- const stageRuns = getStageRuns(db, runId);
116
- const incomplete = stageRuns.find((s) => s.status !== "completed" && s.status !== "skipped");
117
- if (incomplete)
118
- throw new Error(`Cannot complete run: stage ${incomplete.stage_key} is ${incomplete.status}`);
119
- db.prepare("UPDATE runs SET status='completed', completed_at=?, updated_at=?, current_stage_key=NULL WHERE run_id=?").run(now, now, runId);
120
- db.prepare("UPDATE stories SET state='done', in_progress=0, locked_by_run_id=NULL, locked_at=NULL, updated_at=? WHERE story_key=?").run(now, run.story_key);
121
- db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, 'run.completed', ?, ?)").run(newEventId(), runId, JSON.stringify({ story_key: run.story_key }), now);
122
- db.prepare("UPDATE repo_state SET last_event_at=?, updated_at=? WHERE id=1").run(now, now);
123
- }
124
- export function failRun(db, runId, stageKey, errorText) {
125
- const now = nowISO();
126
- const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
127
- if (!run)
128
- throw new Error(`Run not found: ${runId}`);
129
- db.prepare("UPDATE runs SET status='failed', error_text=?, updated_at=?, completed_at=? WHERE run_id=?").run(errorText, now, now, runId);
130
- db.prepare("UPDATE stories SET state='blocked', in_progress=0, locked_by_run_id=NULL, locked_at=NULL, updated_at=? WHERE story_key=?").run(now, run.story_key);
131
- db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, ?, 'run.failed', ?, ?)").run(newEventId(), runId, stageKey, JSON.stringify({ error_text: errorText }), now);
132
- db.prepare("UPDATE repo_state SET last_event_at=?, updated_at=? WHERE id=1").run(now, now);
187
+ /**
188
+ * STAGE MOVEMENT (START) — now async so UI injection is deterministic.
189
+ */
190
+ export async function startStage(db, runId, stageKey, meta) {
191
+ // Do DB work inside tx, capture what we need for UI outside.
192
+ const payload = withTx(db, () => {
193
+ const now = nowISO();
194
+ const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
195
+ if (!run)
196
+ throw new Error(`Run not found: ${runId}`);
197
+ if (run.status !== "running")
198
+ throw new Error(`Run is not running: ${runId} (status=${run.status})`);
199
+ const stage = db.prepare("SELECT * FROM stage_runs WHERE run_id=? AND stage_key=?").get(runId, stageKey);
200
+ if (!stage)
201
+ throw new Error(`Stage run not found: ${runId}/${stageKey}`);
202
+ if (stage.status !== "pending")
203
+ throw new Error(`Stage is not pending: ${stageKey} (status=${stage.status})`);
204
+ db.prepare("UPDATE stage_runs SET status='running', started_at=?, updated_at=?, subagent_type=COALESCE(?, subagent_type), subagent_session_id=COALESCE(?, subagent_session_id) WHERE stage_run_id=?").run(now, now, meta?.subagent_type ?? null, meta?.subagent_session_id ?? null, stage.stage_run_id);
205
+ db.prepare("UPDATE runs SET current_stage_key=?, updated_at=? WHERE run_id=?").run(stageKey, now, runId);
206
+ db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, ?, ?, ?, ?)").run(newEventId(), runId, stageKey, EVENT_TYPES.STAGE_STARTED, JSON.stringify({ subagent_type: meta?.subagent_type ?? null }), now);
207
+ db.prepare("UPDATE repo_state SET last_event_at=?, updated_at=? WHERE id=1").run(now, now);
208
+ const story = db.prepare("SELECT story_key, title FROM stories WHERE story_key=?").get(run.story_key);
209
+ return {
210
+ now,
211
+ story_key: run.story_key,
212
+ story_title: story?.title ?? "",
213
+ };
214
+ });
215
+ // Deterministic UI emission AFTER commit (never inside tx).
216
+ await emitUi(meta?.ui, [
217
+ `🟦 Stage started`,
218
+ `- Run: \`${runId}\``,
219
+ `- Stage: \`${stageKey}\``,
220
+ `- Story: \`${payload.story_key}\` ${payload.story_title || "(untitled)"}`,
221
+ ].join("\n"), {
222
+ title: "Stage started",
223
+ message: `${stageKey} (${payload.story_key})`,
224
+ variant: "info",
225
+ durationMs: 2500,
226
+ });
227
+ }
228
+ /**
229
+ * STAGE CLOSED (RUN COMPLETED) now async so UI injection is deterministic.
230
+ */
231
+ export async function completeRun(db, runId, ui) {
232
+ const payload = withTx(db, () => {
233
+ const now = nowISO();
234
+ const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
235
+ if (!run)
236
+ throw new Error(`Run not found: ${runId}`);
237
+ if (run.status !== "running")
238
+ throw new Error(`Run not running: ${runId} (status=${run.status})`);
239
+ const stageRuns = getStageRuns(db, runId);
240
+ const incomplete = stageRuns.find((s) => s.status !== "completed" && s.status !== "skipped");
241
+ if (incomplete)
242
+ throw new Error(`Cannot complete run: stage ${incomplete.stage_key} is ${incomplete.status}`);
243
+ db.prepare("UPDATE runs SET status='completed', completed_at=?, updated_at=?, current_stage_key=NULL WHERE run_id=?").run(now, now, runId);
244
+ db.prepare("UPDATE stories SET state='done', in_progress=0, locked_by_run_id=NULL, locked_at=NULL, updated_at=? WHERE story_key=?").run(now, run.story_key);
245
+ db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, ?, ?, ?)").run(newEventId(), runId, EVENT_TYPES.RUN_COMPLETED, JSON.stringify({ story_key: run.story_key }), now);
246
+ db.prepare("UPDATE repo_state SET last_event_at=?, updated_at=? WHERE id=1").run(now, now);
247
+ const story = db.prepare("SELECT story_key, title FROM stories WHERE story_key=?").get(run.story_key);
248
+ return { now, story_key: run.story_key, story_title: story?.title ?? "" };
249
+ });
250
+ await emitUi(ui, [
251
+ `✅ Run completed`,
252
+ `- Run: \`${runId}\``,
253
+ `- Story: \`${payload.story_key}\` — ${payload.story_title || "(untitled)"}`,
254
+ ].join("\n"), {
255
+ title: "Run completed",
256
+ message: `${payload.story_key} — done`,
257
+ variant: "success",
258
+ durationMs: 3000,
259
+ });
260
+ }
261
+ /**
262
+ * STAGE CLOSED (RUN FAILED) — now async so UI injection is deterministic.
263
+ */
264
+ export async function failRun(db, runId, stageKey, errorText, ui) {
265
+ const payload = withTx(db, () => {
266
+ const now = nowISO();
267
+ const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
268
+ if (!run)
269
+ throw new Error(`Run not found: ${runId}`);
270
+ db.prepare("UPDATE runs SET status='failed', error_text=?, updated_at=?, completed_at=? WHERE run_id=?").run(errorText, now, now, runId);
271
+ db.prepare("UPDATE stories SET state='blocked', in_progress=0, locked_by_run_id=NULL, locked_at=NULL, updated_at=? WHERE story_key=?").run(now, run.story_key);
272
+ db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, ?, ?, ?, ?)").run(newEventId(), runId, stageKey, EVENT_TYPES.RUN_FAILED, JSON.stringify({ error_text: errorText }), now);
273
+ db.prepare("UPDATE repo_state SET last_event_at=?, updated_at=? WHERE id=1").run(now, now);
274
+ const story = db.prepare("SELECT story_key, title FROM stories WHERE story_key=?").get(run.story_key);
275
+ return { now, story_key: run.story_key, story_title: story?.title ?? "" };
276
+ });
277
+ await emitUi(ui, [
278
+ `⛔ Run failed`,
279
+ `- Run: \`${runId}\``,
280
+ `- Stage: \`${stageKey}\``,
281
+ `- Story: \`${payload.story_key}\` — ${payload.story_title || "(untitled)"}`,
282
+ `- Error: ${errorText}`,
283
+ ].join("\n"), {
284
+ title: "Run failed",
285
+ message: `${stageKey}: ${errorText}`,
286
+ variant: "error",
287
+ durationMs: 4500,
288
+ });
133
289
  }
134
290
  export function abortRun(db, runId, reason) {
135
- const now = nowISO();
136
- const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
137
- if (!run)
138
- throw new Error(`Run not found: ${runId}`);
139
- db.prepare("UPDATE runs SET status='aborted', error_text=?, updated_at=?, completed_at=? WHERE run_id=?").run(reason, now, now, runId);
140
- // Unlock story back to approved so it can be re-run.
141
- db.prepare("UPDATE stories SET state='approved', in_progress=0, locked_by_run_id=NULL, locked_at=NULL, updated_at=? WHERE story_key=?").run(now, run.story_key);
142
- db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, 'run.aborted', ?, ?)").run(newEventId(), runId, JSON.stringify({ reason }), now);
143
- db.prepare("UPDATE repo_state SET last_event_at=?, updated_at=? WHERE id=1").run(now, now);
291
+ return withTx(db, () => {
292
+ const now = nowISO();
293
+ const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
294
+ if (!run)
295
+ throw new Error(`Run not found: ${runId}`);
296
+ db.prepare("UPDATE runs SET status='aborted', error_text=?, updated_at=?, completed_at=? WHERE run_id=?").run(reason, now, now, runId);
297
+ db.prepare("UPDATE stories SET state='approved', in_progress=0, locked_by_run_id=NULL, locked_at=NULL, updated_at=? WHERE story_key=?").run(now, run.story_key);
298
+ db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, ?, ?, ?)").run(newEventId(), runId, EVENT_TYPES.RUN_ABORTED, JSON.stringify({ reason }), now);
299
+ db.prepare("UPDATE repo_state SET last_event_at=?, updated_at=? WHERE id=1").run(now, now);
300
+ });
144
301
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "astrocode-workflow",
3
- "version": "0.1.58",
3
+ "version": "0.2.0",
4
4
  "type": "module",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -72,3 +72,163 @@ I analyzed the requirements and identified key components for implementation.
72
72
 
73
73
  If blocked, set status="blocked" and add ONE question to questions array. Do not deviate from this format.
74
74
  `;
75
+
76
+ export const QA_AGENT_PROMPT = `🌍 Global Engineering Review Prompt (LOCKED)
77
+
78
+ Use this prompt when reviewing any codebase.
79
+
80
+ 1️⃣ How every file review starts (MANDATORY)
81
+
82
+ Before discussing details, always answer:
83
+
84
+ Simple question this file answers
85
+
86
+ What decision, contract, or responsibility does this file define?
87
+
88
+ Things you have at the end of running this code
89
+
90
+ What objects, capabilities, state, guarantees, or invariants now exist?
91
+
92
+ If you can't answer these clearly, the file is already suspect.
93
+
94
+ 2️⃣ Canonical RULE set (GLOBAL ENGINEERING INVARIANTS)
95
+
96
+ These are engineering physics laws.
97
+ Every serious bug maps to one of these.
98
+ No ad-hoc rules are allowed.
99
+
100
+ enum RULE {
101
+ CAPABILITY_GUARANTEE =
102
+ "Expose a capability only when you can guarantee it will execute safely under current runtime conditions.",
103
+
104
+ RECOVERY_STRICTER =
105
+ "Recovery/degraded paths must be simpler and more conservative than the normal path, and must not introduce new failure modes.",
106
+
107
+ SOURCE_OF_TRUTH =
108
+ "For any piece of state, define exactly one authoritative source and explicit precedence rules for any mirrors, caches, or derivations.",
109
+
110
+ LIFECYCLE_DETERMINISM =
111
+ "Initialization and lifecycle must be deterministic: single construction, stable ordering, controlled side effects, and repeatable outcomes.",
112
+
113
+ SECURITY_BOUNDARIES =
114
+ "Security, authorization, and trust boundaries must be explicit, enforced, and never inferred implicitly."
115
+ }
116
+
117
+ If an issue does not violate one of these rules, it is not a P0/P1 blocker.
118
+
119
+ 3️⃣ Severity model (WHAT "P" MEANS)
120
+
121
+ Severity is about trust, not annoyance.
122
+
123
+ P0 — Trust break
124
+
125
+ Unsafe execution
126
+
127
+ Corrupted or ambiguous state
128
+
129
+ Non-deterministic lifecycle
130
+
131
+ Broken auditability / resumability
132
+
133
+ Security boundary violations
134
+
135
+ P1 — Reliability break
136
+
137
+ Runtime crashes after successful boot
138
+
139
+ Capabilities exposed but unusable
140
+
141
+ Degraded mode that lies or half-works
142
+
143
+ Recovery paths that add fragility
144
+
145
+ P2 — Quality / polish
146
+
147
+ Readability, ergonomics, maintainability
148
+
149
+ No RULE violated
150
+
151
+ 4️⃣ Mandatory P0 / P1 Blocker Format (STRICT)
152
+
153
+ Every P0 / P1 must be written exactly like this:
154
+
155
+ P{0|1} — <short human title>
156
+
157
+ Rule: RULE.<ONE_ENUM_VALUE>
158
+
159
+ Description:
160
+ A human-readable explanation of how this specific code violates the rule in context.
161
+ This is situational and concrete — not a rule.
162
+
163
+ What:
164
+ The exact defect or unsafe behavior.
165
+
166
+ Where:
167
+ Precise file + function + construct / lines.
168
+
169
+ Proposed fix:
170
+ The smallest possible change that restores the rule.
171
+ (Code snippets if helpful.)
172
+
173
+ Why:
174
+ How this fix restores the invariant and what class of failures it prevents.
175
+
176
+ 5️⃣ Recovery / Degraded Mode Lens (AUTO-APPLIED)
177
+
178
+ Whenever code introduces:
179
+
180
+ limited mode
181
+
182
+ fallback
183
+
184
+ catch-based recovery
185
+
186
+ partial initialization
187
+
188
+ Automatically evaluate against:
189
+
190
+ RULE.RECOVERY_STRICTER
191
+
192
+ RULE.CAPABILITY_GUARANTEE
193
+
194
+ RULE.LIFECYCLE_DETERMINISM
195
+
196
+ If recovery adds logic, validation, or ambiguity → it is a blocker.
197
+
198
+ Recovery must:
199
+
200
+ reduce capability surface
201
+
202
+ fail earlier, not later
203
+
204
+ be simpler than the normal path
205
+
206
+ provide a clear path back to normal
207
+
208
+ 6️⃣ How to ask for the next file (TEACHING MODE)
209
+
210
+ Before asking for the next file, always explain:
211
+
212
+ What this next file likely does (human-readable)
213
+
214
+ "This file takes X, registers it with Y, then enforces Z..."
215
+
216
+ Why this file matters next
217
+
218
+ Which RULE it is likely to uphold or violate, and why reviewing it now reduces risk.
219
+
220
+ 7️⃣ What this frame teaches over time
221
+
222
+ After repeated use, you stop seeing "random bugs" and start seeing patterns:
223
+
224
+ CAPABILITY_GUARANTEE violations (exposed but unsafe APIs)
225
+
226
+ RECOVERY_STRICTER violations (clever fallbacks that explode)
227
+
228
+ SOURCE_OF_TRUTH drift (DB vs disk vs memory)
229
+
230
+ LIFECYCLE_DETERMINISM failures (double init, racey wiring)
231
+
232
+ SECURITY_BOUNDARIES leaks (implicit trust)
233
+
234
+ At that point, reviews become portable skills, not project-specific knowledge.`;
@@ -2,7 +2,7 @@ import type { AgentConfig } from "@opencode-ai/sdk";
2
2
  import type { AstrocodeConfig } from "../config/schema";
3
3
  import { deepMerge } from "../shared/deep-merge";
4
4
  import { applyModelTuning } from "../shared/model-tuning";
5
- import { BASE_ORCH_PROMPT, BASE_STAGE_PROMPT } from "./prompts";
5
+ import { BASE_ORCH_PROMPT, BASE_STAGE_PROMPT, QA_AGENT_PROMPT } from "./prompts";
6
6
 
7
7
  type PermissionValue = "ask" | "allow" | "deny";
8
8
  type PermissionMap = Record<string, PermissionValue>;
@@ -136,6 +136,7 @@ export function createAstroAgents(opts: {
136
136
  },
137
137
  librarian_name: "Librarian",
138
138
  explore_name: "Explore",
139
+ qa_name: "QA",
139
140
  agent_variant_overrides: {}
140
141
  };
141
142
  }
@@ -301,6 +302,20 @@ export function createAstroAgents(opts: {
301
302
  "utility"
302
303
  );
303
304
 
305
+ // QA agent for code review and verification
306
+ agents[pluginConfig.agents.qa_name] = mk(
307
+ pluginConfig.agents.qa_name,
308
+ {
309
+ description: "QA agent: Global engineering review with canonical rules and severity model.",
310
+ mode: "subagent",
311
+ hidden: false, // Make it visible for delegation
312
+ temperature: 0.1,
313
+ prompt: QA_AGENT_PROMPT,
314
+ permission: stageReadOnlyPermissions(), // Read-only for safety
315
+ },
316
+ "utility"
317
+ );
318
+
304
319
  // Fallback general agent for delegation failures
305
320
  agents["General"] = mk(
306
321
  "General",
@@ -5,6 +5,37 @@ import { AstrocodeConfigSchema, type AstrocodeConfig } from "./schema";
5
5
  import { deepMerge } from "../shared/deep-merge";
6
6
  import { info, warn } from "../shared/log";
7
7
 
8
+ function validateJsonSerializable(obj: any, path = ""): void {
9
+ if (obj === null || obj === undefined) return;
10
+ if (typeof obj === "boolean" || typeof obj === "number" || typeof obj === "string") return;
11
+
12
+ if (typeof obj === "bigint") {
13
+ throw new Error(`Config contains non-JSON-serializable bigint at ${path}`);
14
+ }
15
+ if (typeof obj === "symbol") {
16
+ throw new Error(`Config contains non-JSON-serializable symbol at ${path}`);
17
+ }
18
+ if (typeof obj === "function") {
19
+ throw new Error(`Config contains non-JSON-serializable function at ${path}`);
20
+ }
21
+ if (obj instanceof Date) {
22
+ throw new Error(`Config contains non-JSON-serializable Date at ${path}`);
23
+ }
24
+ if (obj instanceof Map || obj instanceof Set) {
25
+ throw new Error(`Config contains non-JSON-serializable ${obj.constructor.name} at ${path}`);
26
+ }
27
+
28
+ if (Array.isArray(obj)) {
29
+ for (let i = 0; i < obj.length; i++) {
30
+ validateJsonSerializable(obj[i], `${path}[${i}]`);
31
+ }
32
+ } else if (typeof obj === "object") {
33
+ for (const key of Object.keys(obj)) {
34
+ validateJsonSerializable(obj[key], path ? `${path}.${key}` : key);
35
+ }
36
+ }
37
+ }
38
+
8
39
  export type ConfigFileDetection =
9
40
  | { format: "jsonc" | "json"; path: string }
10
41
  | { format: "none"; path: string };
@@ -46,10 +77,14 @@ export function loadAstrocodeConfig(repoRoot: string): AstrocodeConfig {
46
77
  if (legacyCfg) cfg = deepMerge(cfg, legacyCfg);
47
78
  if (projectCfg) cfg = deepMerge(cfg, projectCfg);
48
79
 
49
- // Ensure the final config is fully validated with all required defaults
50
- cfg = AstrocodeConfigSchema.parse(cfg);
80
+ // Ensure the final config is fully validated with all required defaults
81
+ cfg = AstrocodeConfigSchema.parse(cfg);
82
+
83
+ // CRITICAL CONTRACT: ensure config is JSON-serializable for recovery mode compatibility
84
+ // This prevents silent data corruption in cloneConfig's JSON fallback
85
+ validateJsonSerializable(cfg);
51
86
 
52
- // Config loaded successfully (silent)
87
+ // Config loaded successfully (silent)
53
88
 
54
- return cfg;
89
+ return cfg;
55
90
  }